From d8f256fac32a09520d8659fb84b4a0dd03ecf7f2 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:02:53 -0600 Subject: [PATCH 01/11] manifest and classpath updates for windows --- cave/build/alertviz/build.properties | 2 +- cave/build/cave/build.properties | 2 +- .../META-INF/MANIFEST.MF | 2 +- cave/com.raytheon.uf.viz.damagepath/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- cave/com.raytheon.uf.viz.hpe/.classpath | 2 +- .../com.raytheon.uf.viz.monitor.ffmp/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- cave/com.raytheon.viz.dataaccess/.classpath | 2 +- cave/com.raytheon.viz.product.awips/.classpath | 2 +- .../icons/awipsLogo.png | Bin 0 -> 1911 bytes .../icons/cave_128x128.png | Bin 0 -> 18403 bytes .../icons/cave_16x16.png | Bin 0 -> 1911 bytes .../icons/cave_256x256.png | Bin 0 -> 43101 bytes .../icons/cave_32x32.png | Bin 0 -> 3616 bytes .../icons/cave_48x48.png | Bin 0 -> 20077 bytes .../icons/cave_64x64.png | Bin 0 -> 22700 bytes .../icons/ipr.gif | Bin 0 -> 18040 bytes .../icons/sample.gif | Bin 0 -> 983 bytes cave/com.raytheon.viz.xdat/.classpath | 2 +- cave/com.raytheon.viz.xdat/META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.common.archive/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../com.raytheon.uf.common.nc.bufr/.classpath | 2 +- edexOsgi/com.raytheon.uf.common.nc4/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- edexOsgi/com.raytheon.uf.common.ohd/.classpath | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- edexOsgi/com.raytheon.uf.common.site/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.common.sounding/.classpath | 2 +- .../com.raytheon.uf.common.tafqueue/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- edexOsgi/com.raytheon.uf.common.wmo/.classpath | 2 +- edexOsgi/com.raytheon.uf.common.xmrg/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.edex.archive/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.edex.awipstools/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.edex.dat.utils/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.edex.maintenance/.classpath | 2 +- .../com.raytheon.uf.edex.management/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- edexOsgi/com.raytheon.uf.edex.menus/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.uf.edex.plugin.hpe/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../com.raytheon.uf.edex.plugin.mpe/.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../com.raytheon.uf.edex.plugin.npp/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- edexOsgi/com.raytheon.uf.edex.site/.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../.classpath | 2 +- .../META-INF/MANIFEST.MF | 2 +- .../com.raytheon.wes2bridge.common/.classpath | 2 +- .../.classpath | 2 +- .../com.raytheon.wes2bridge.datalink/.classpath | 2 +- .../com.raytheon.wes2bridge.manager/.classpath | 2 +- 159 files changed, 150 insertions(+), 150 deletions(-) create mode 100644 cave/com.raytheon.viz.product.awips/icons/awipsLogo.png create mode 100755 cave/com.raytheon.viz.product.awips/icons/cave_128x128.png create mode 100755 cave/com.raytheon.viz.product.awips/icons/cave_16x16.png create mode 100644 cave/com.raytheon.viz.product.awips/icons/cave_256x256.png create mode 100755 cave/com.raytheon.viz.product.awips/icons/cave_32x32.png create mode 100755 cave/com.raytheon.viz.product.awips/icons/cave_48x48.png create mode 100755 cave/com.raytheon.viz.product.awips/icons/cave_64x64.png create mode 100644 cave/com.raytheon.viz.product.awips/icons/ipr.gif create mode 100644 cave/com.raytheon.viz.product.awips/icons/sample.gif diff --git a/cave/build/alertviz/build.properties b/cave/build/alertviz/build.properties index 93f55c244e..87ead1c949 100644 --- a/cave/build/alertviz/build.properties +++ b/cave/build/alertviz/build.properties @@ -185,7 +185,7 @@ skipFetch=true #J2SE-1.3= #J2SE-1.4= #J2SE-1.5= -#JavaSE-1.6= +#JavaSE-1.8= #PersonalJava-1.1= #PersonalJava-1.2= #CDC-1.0/PersonalBasis-1.0= diff --git a/cave/build/cave/build.properties b/cave/build/cave/build.properties index 0add642b7c..32cfb4097e 100644 --- a/cave/build/cave/build.properties +++ b/cave/build/cave/build.properties @@ -156,7 +156,7 @@ skipFetch=true #J2SE-1.3= #J2SE-1.4= #J2SE-1.5= -#JavaSE-1.6= +#JavaSE-1.8= #PersonalJava-1.1= #PersonalJava-1.2= #CDC-1.0/PersonalBasis-1.0= diff --git a/cave/com.raytheon.uf.viz.cwat/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.cwat/META-INF/MANIFEST.MF index 0dfb69570a..c83bc73f6c 100644 --- a/cave/com.raytheon.uf.viz.cwat/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.cwat/META-INF/MANIFEST.MF @@ -13,6 +13,6 @@ Require-Bundle: com.raytheon.uf.common.dataplugin.cwat, com.raytheon.uf.common.dataplugin;bundle-version="1.14.0", com.raytheon.uf.common.datastorage;bundle-version="1.15.0", com.raytheon.uf.common.geospatial;bundle-version="1.14.2" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.viz.cwat diff --git a/cave/com.raytheon.uf.viz.damagepath/.classpath b/cave/com.raytheon.uf.viz.damagepath/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/cave/com.raytheon.uf.viz.damagepath/.classpath +++ b/cave/com.raytheon.uf.viz.damagepath/.classpath @@ -1,6 +1,6 @@ - + diff --git a/cave/com.raytheon.uf.viz.damagepath/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.damagepath/META-INF/MANIFEST.MF index 8beef83af5..dff5ae0ff5 100644 --- a/cave/com.raytheon.uf.viz.damagepath/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.damagepath/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Damage Path Bundle-SymbolicName: com.raytheon.uf.viz.damagepath;singleton:=true Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.viz.core, com.raytheon.uf.viz.drawing, com.raytheon.uf.common.time, diff --git a/cave/com.raytheon.uf.viz.hpe/.classpath b/cave/com.raytheon.uf.viz.hpe/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/cave/com.raytheon.uf.viz.hpe/.classpath +++ b/cave/com.raytheon.uf.viz.hpe/.classpath @@ -1,6 +1,6 @@ - + diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/.classpath b/cave/com.raytheon.uf.viz.monitor.ffmp/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/.classpath +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/.classpath @@ -1,6 +1,6 @@ - + diff --git a/cave/com.raytheon.uf.viz.monitor.ffmp/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.monitor.ffmp/META-INF/MANIFEST.MF index 0ee070fee4..f8c35cbf6e 100644 --- a/cave/com.raytheon.uf.viz.monitor.ffmp/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.monitor.ffmp/META-INF/MANIFEST.MF @@ -22,7 +22,7 @@ Require-Bundle: org.eclipse.core.runtime, com.raytheon.uf.common.style;bundle-version="1.0.0", com.raytheon.uf.common.plugin.hpe;bundle-version="1.14.3", com.raytheon.uf.viz.core.rsc;bundle-version="1.14.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Import-Package: com.raytheon.viz.core Export-Package: com.raytheon.uf.viz.monitor.ffmp, diff --git a/cave/com.raytheon.uf.viz.preciprate/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.preciprate/META-INF/MANIFEST.MF index 7250a2c3b3..fac109aa5d 100644 --- a/cave/com.raytheon.uf.viz.preciprate/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.preciprate/META-INF/MANIFEST.MF @@ -19,7 +19,7 @@ Require-Bundle: com.raytheon.uf.viz.core, com.raytheon.uf.common.units, com.raytheon.uf.common.style, com.raytheon.uf.viz.core.rsc;bundle-version="1.16.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.viz.preciprate, com.raytheon.uf.viz.preciprate.xml diff --git a/cave/com.raytheon.uf.viz.qpf/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.qpf/META-INF/MANIFEST.MF index aa718ff2c0..b788993d93 100644 --- a/cave/com.raytheon.uf.viz.qpf/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.qpf/META-INF/MANIFEST.MF @@ -17,7 +17,7 @@ Require-Bundle: com.raytheon.uf.viz.core, com.raytheon.uf.common.numeric;bundle-version="1.14.0", com.raytheon.uf.viz.datacube, com.raytheon.uf.viz.core.rsc;bundle-version="1.16.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.viz.qpf Import-Package: com.raytheon.uf.common.colormap.prefs, diff --git a/cave/com.raytheon.uf.viz.vil/META-INF/MANIFEST.MF b/cave/com.raytheon.uf.viz.vil/META-INF/MANIFEST.MF index 8da229a091..b4ec1b51ae 100644 --- a/cave/com.raytheon.uf.viz.vil/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.uf.viz.vil/META-INF/MANIFEST.MF @@ -12,6 +12,6 @@ Require-Bundle: com.raytheon.uf.viz.core, com.raytheon.uf.common.dataplugin;bundle-version="1.14.0", com.raytheon.uf.common.datastorage;bundle-version="1.15.0", com.raytheon.uf.common.geospatial;bundle-version="1.14.2" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.viz.vil diff --git a/cave/com.raytheon.viz.avnconfig/META-INF/MANIFEST.MF b/cave/com.raytheon.viz.avnconfig/META-INF/MANIFEST.MF index 93499930de..bb6a0cb484 100644 --- a/cave/com.raytheon.viz.avnconfig/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.viz.avnconfig/META-INF/MANIFEST.MF @@ -13,7 +13,7 @@ Require-Bundle: org.eclipse.ui, com.raytheon.viz.pointdata;bundle-version="1.15.0", org.apache.commons.configuration;bundle-version="1.10.0" Bundle-ActivationPolicy: lazy -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.dataplugin.persist, com.raytheon.uf.common.dataplugin.text, com.raytheon.uf.common.dataplugin.text.db, diff --git a/cave/com.raytheon.viz.awipstools/META-INF/MANIFEST.MF b/cave/com.raytheon.viz.awipstools/META-INF/MANIFEST.MF index 36fa0d8503..a9031abfd5 100644 --- a/cave/com.raytheon.viz.awipstools/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.viz.awipstools/META-INF/MANIFEST.MF @@ -30,7 +30,7 @@ Export-Package: com.raytheon.viz.awipstools, com.raytheon.viz.awipstools.ui.action, com.raytheon.viz.awipstools.ui.display, com.raytheon.viz.awipstools.ui.layer -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.inventory.exception, com.raytheon.viz.core, com.raytheon.viz.core.interval diff --git a/cave/com.raytheon.viz.dataaccess/.classpath b/cave/com.raytheon.viz.dataaccess/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/cave/com.raytheon.viz.dataaccess/.classpath +++ b/cave/com.raytheon.viz.dataaccess/.classpath @@ -1,6 +1,6 @@ - + diff --git a/cave/com.raytheon.viz.product.awips/.classpath b/cave/com.raytheon.viz.product.awips/.classpath index 1fa3e6803d..eca7bdba8f 100644 --- a/cave/com.raytheon.viz.product.awips/.classpath +++ b/cave/com.raytheon.viz.product.awips/.classpath @@ -1,6 +1,6 @@ - + diff --git a/cave/com.raytheon.viz.product.awips/icons/awipsLogo.png b/cave/com.raytheon.viz.product.awips/icons/awipsLogo.png new file mode 100644 index 0000000000000000000000000000000000000000..60e9e344efa559224498112ca07372d25657acf4 GIT binary patch literal 1911 zcmaJ?dsGu=7EkLN#Pv~Ir72EF3gwYZCYg|#;TiIfAi)%(LQ(5vGDAj^Oh`r(XiAk* zd?2z`(G``tO5H_wadmMivNa-}F1V)!v4~ZUx?8AaeW1{@MX{w51lvD6opWZs@4Mgq z-TS+*Gg}SmOM(J|0|Wv=kS;}?!LPIZ@ARqschS%&!7mFrO%|8Q(wv)cQUax!HBo@h zM&wc%6k#sNZ=>P_f=R`UF^kL6r{N@P6BB+NvB&1%(E>qSqQ^myc@zhjs9eS#4-Pc> zK!7pFgDd5FiQb{2EKEwFlgcbiHUBxVK^6M#4m&Ks~%907Q2R=W%L#DimYaenU? zLm)7w!sW$-Pf2Cz4S`fe}aoL$FK)V>p7~5(zN= zfV?-SnZ`5J+Hqg}DITB3FT(BRnFzYw?5y zHRU3mjDurXJK(oSnAr7PJjgSB8iLKC*FQ6Cca0~CPZ{JP91tv)KsK8{t}$&FmqGnP z7J;Qwtx^HY5E$0Tlar-tRHo7- zqiU5JK_nAco!!L|c9NR#%kX~3vDj~7ag~!IIM!)oS?fdx7%VKux-6^%P^rc$4!o%+ zNXG6LM)-^LG-fsBWHwM{t&_C@V_C+T=dhs27zC3`%pyWAmx&OHkccn^3G+J?m6>T+ zB8wrxalH9g*?Zchyo8U$F+m8YVHt^GGPB5pVWf!9Dk?%{3YmyTP)sT(%@SIHf)Gy; z@;A<}8fikrcc*{*Y%KW0v%#Y5{4_cFu~|_OGMAsmGM!p!^jx!M=2>!<3r7#`dUt=# z4`H^d(=p8L5T+%j{M50(U#u@al6j$d!7y>z8gRJly*W?7T}8&8By~yK(M7voE(K3L zO8P_2nI++!%dbadU$0@VbbkAF=+gW8`y+)_k92VkbaM;l8yX!AxlkW%NPdt=e$z2J zP;^i~;(Nt>a?Xxy!!L8@8r&TZO|9C0Z&--`+G4{H?~wImeEmSSWva2p?Y2XmDlW#29MMVwP3=5DshtBRua3a_22m9;B!McpWWy? zwju1U>)P&*py_!BFGmBNnuf|%PjUxV=bZn^W<$?h-khePL$9KPN#S&0&_Cag*m}M? zWF|XsuEN#0Y}({Ev=OUo&d`TeM3sBzwNLjRDeiiL-_PrF4DK6&Z{5zf(+jsnE9-VU ziek#Mz_L^88du618}u!Lu1eg!xbmiNchm=w(haP$c;~b-cyFZfZ{goH?`tbas)SE} zy6t9Wwy+4@*c4>T`eVujmZNyU*>9Tyx{IzUtDMz2(ibtMA=OK9NM!uAlyo z(&H|-;jOgcSnmsao4VRhYHMrrN-RNHN4IWU#lH9{7I6B*mWK~#q=m1%)hpaS=hZ5u zVbA=pf&&=0vm~l+Z)(lkt&PnicM2yZwue42zqv8+@=$Ac@NhpZJlr%z9j?z^cz4&O zfx6dC^q}f@rKtx9RYO_S$uCT!bsAku-@XFx>9!AU&OM$&msxJxw9>qa<>xNF+uM_V z0$6)A>~6}(sgs&RxRJPz6TiL0T}*qU>)3FwcjlTewFd6Lt@D~3-(4!QGHw-j)k2&V zes3wg8Fk=(fB%%*hhEy$vBP&_d&NN%h)eVO*bVp zY!7@Td~FR|p1=)PW+Sc(FaGBx^%KXB?U{kT1F;V~XZdb)cg@;V>FuvT_msK!3j&6g z{_jftNzP^Z{_621iNcUAhw6?24d3KPM}Lw}J%+y8mtB)q*a7rxX;9QR{N>2d&Ic>F z`a=tvg!_$s*ZNys(^#SBaeh|D{Pewnp*vI7daX0pRJ30B)1Oo4zC16`GUUNyy=U#3 x?&gwvmkuAxVziMVQ_bM9ej|%-QC^Y-CnyI}tYT;C>}KF- z0uVN~Gc*B7+ZdRcD4Q4rW)`;mq?c`7q#z4pe$pT8@=Wsfq9*1RQl5?`DxM0eMxIti+{UDW0w6wj zo-YD6Ce8*RcN=S4Cmwfx(*L5%^Hu*(Gb1VJzd)R=_(}hlP#W@zAW=I<6A(KCJG~JT z3p0q5n}L~~os*M=4#dL5%*M#X%*f12&&T_M1p4nn`o+!B*px?EOya+JeZBFM znmaq&^Dr{Hxw$d8u`<{>nlUnSb8|B?u`sf*(0@VDJ9*eT8@SWkI+6Vk1u+vRBS#B+ zXA3)9(0?cz7}~iw^OJr_`oFqhV=piNzX{tq{kNjNRL1CTV9&_Rz{F@{^PhhG7qpYJ zvdRB1#{U)CN!7#Ngi+bV$m=xaPo$^HlVOLzZoM*ji)l7{EIqs7;t7+8zh8M)Y) z*g8v#@soaiVKBBZ=8+KRlwfA&WMkrFVdCIoX66!QX5(U)U}KYDW)&8e5dI$;|2JGw z2_|6{VG#*VW>z+4W^r~A5f(8nR#9;gE-_ItHa4dJ!IidkayGCvGWj3A7GHe-8&~}Q ziOVDEXky@O=csCDXZ=4GplELAZ0BTdXAcq;{jU)RQOg?`S=j!kk>)>x^uPKnX5wh! zYGN$mXlDcZFD>&}{9mLnG2&q3W@j>{H(+OHrDroSV4~;dG-Cd$adEL4n=&)8au|{R zH@@-zp}qfQrC%*yJ+c2Efp|=rS&g{4S&iupxw(z#zqHCl&&A5gN^i=>#m&NQWXxpB z$wkWeB_iX04Cntar2lF9vd;fh|M#@`dimcI%f$9;HaUJx8*WmaL;yhaQCdt`)&1X@ z4z#zb+M?hbRgyGiN0P1-D0w=I8fMnS<##ybs+Q{you4iy3k!`^6&KZ!3>C}vYC@5! zsE|c6mqkKSmp+MdjF-HJosX30QX*26U|sj`zG<){<5`av&v{NCC2Ozb1VcTHL!TEU z? z$PU^i`1nZ<*fVojK6iaODvV2i!=lreKJX$xrwF7}$cEu{J@XF?1cpaMv=leuU6K$E z;Sd-(AoOpF!~a8~{=3NOJ@d&Zz>tgF{J9E?^7K5%^#HpY49K*)VZw{qIy{6{Qc{Xf zOLH^9>Rw&VqJo{ZN~Co4z$tTy;Fg`s!y|r@#M*Q~zlLX*Ge0gItj2FfI6Q}!UH08ULgvU7B>EEwR>w;V#q>$ifcd3h1hyM<6>XIy>H-;NSi-v`LBTthSpTe>LRd2Pmg|&P zsW!8jy)BxYT#&pO*W<>|(W(@XYKFi%WpJt6n8V!h6n6&%*-22GEiEnq-2s~~6B(4$ zWp%);mTDrHqG=}BXd-g3Hqj@{jrfsmf7ETBb&%jaf6%*HT>mpol9YO>&NP~erK(32|!0W?$=`W;NNVIjD} zqGkcEU-9e0&f%|illZ+7fA<^$)f-uz7;{{%$qZ&Dxm?uq3ZH+2(y1w01fNP$zcH2Y zLi8ud?TcDrKU~`pa%JNDw@C!1bAA!%1gO&-^TT&@1p?Wj*s;OZTv8*K&Z1N&Hnk&x z{mWz95IG{%nK;{io5|FhRB!V)?px#t7^K(^*gO_bxDeYJJos=E5r>H)*);@%;^iP< zh`7jY|4ei~wW!b@$zRvh3v z#@pnVmMJ%nS8<*^W~G`)zIZgjFT*5aw71cDXX|_bs%Tp;P1D321=wih+YENzf0qMN zRgNg@aedP0GzOV#JAPoXKeV<~t1#&yp#`Y#PG3q%z#wbPO=P&QRLiuZ)X6nV^n<{# z=|vc{vUV5L-9P2X+_xW*kktwnlnX@uYD3_;BP*DtM>=&HHfmi8(>|cZ?i;H;dF}m+ z7HGAE^@Ihu*JZ2VtY7Js-pd~q7B&hvG!Hy=aBwNIrrGud9FoVRwOF)aPTa;ClEG~N z4YVN5r#A_!YrEO}*zfLD+*P-1k`!aRY%pRt4o&&GKx$)t3&t@#J&4BOMRGcNE#KNJe#Z>U-_r<)V~FNq24H+O`E zYdl7bd&u1*#Qw?b(sLlGXIll<+#;Y8so2TJzgdvS$K;Wu5?)sL8%Ar5 zM!OE)MbMh>^DX}qsYC3cZ%rYJ}31z;lte_F-kD2Sey)K!c_5NFSb21_70 zB{mMD8=q+p|49&6l&09Y-38Z`q_ro@y)@vv45^sOS@6OV5~0ikM@MVc1a~po%tOC+ zDC+tK3oAJ18KJtvv+#k2-tOIi60pqG9g|@Bmv;dAfXZ@p-Y_CdNRL++ znb?>;keFp58bm=LDty4Xf(ufYhYPtz)jhk##;c-}tElJFHws8V9fgnK2$wj|Av~RE z;8i9IqBy!FmS2Je?K9Nz{4&WGoNKX&*LHt5=SP57@2MIQur)^2$w4<@bWjknt^Ho0 z$QQizRNO2)?E;98+(8tgGSc9Ev4>m8%^P zz1d&RoG=`V?vpZpcTvL>gbVhl(aI$TwBZLT3GqO{fD`R0QMf844a8 z&5~$m5%%#@V*tWSI}oc!eTpr+A@>Gid}b zJ=-!Y{#GbG_!2)%2*mkt)@k{-u!gY+stgBa?N4O^N0tp?V&CDO$8pq1GE38;^6uynZi5OEh( zrqX2eopI2B?t4_c!Kc$IB$%Gl{AosH`o149(>08xTU9eXTNTMoV^m_G_%Y4&lQE%& zJ28F)B-I|m_csVDe>jj>08-m71=SG6H_S+Ai+`TrFgFZseltKW3c@7*dAb~}>Zh}d z^f*(5yS6!7=rmk%#M*+?4##|`kCc_}$_ND-1I^a;-xA9|YzUSHe z{@7y_`X&DkD^xSphyjrrN>dWia{#ASs>4XZ$f$xQo+qZm)q6uf#0S#_h6F^p#wuCK zDt`1bITM4y`13gHp}OxBK6TbO;TOO?_s5iO!zX&LY_6iGKY8&i*+HG^BWeX2@W+2e%{n_3XnhPb&Ne5UefCO!4RgrsVES0|+{g9KJoA|S$ z<@k7~6^VB}9P9X0-3!tA4R=m1b{N~YY0)^$%PmA3?O)dex`7{v$VFq*T53Ht9Q%FRMr8V2zOhCZ^N@if!|+Yi zDWfayd20Nyfehe6fxU-eMH3P130EC^y0n*ZdO^K;mi55zcjSC9NV|LMalKRGT{0eS zY5I`QBO0DduuCCcJvlVqrIH>_0@=G=W@^<$WRUz2g*jB)&g}RH6gIrE$*_%)i;9VV zrD{2W+j21x4iRj7ka?z@Lgig(EP)-tFu&N9FvvpnQ-HZd7;rxC_*=KxP}IWBjv~?x)zDNAX#XgpY;n!y49XRW4A%Ljedy0?u(*F2OHncwxB>2o-WLlZ(&Qr zI^H=?e!=8xAppetMApD(pM(I-Z=}h|W!4o^;=S`U`}RMSQQG-Q(BP&c-(IEIv&b?~ z;Kue4&O}il@UIuX&x-yYs0Qyowsu-pdSau=TO)kD?*H(uuf(bzI4tDx@hFGFx}b(6 zQvf|%gr2#E68P9aDoj=@gIBgq!${!!RkOUUpKC|yn>ny%8k#@et`iyu7&2oNo#hhx z6`zt|Q+%w42X-R9f2PAAf*-ya6Y}xW;Y3tqt9?*UXj|4c!2|Kg{l!ItdXQVX1(8*6 zL{Ld-zxjT7liM(X3*F6sbXKvtBQg!e0qwkk5BC{C;1{(T8kk`j)(Ed#bGr*ytLyvW z&Vb*+yX@sb!VIAeubr1s+sFJSauu<^x^g-Nh&ySm#T=LGC{8Xw1<(avnyma#>he*c z2?o03q<^cCYpRH<(y?#R+uz@mAYk}+F{&SO5cOeFBD_dnW+mD-WI>RbhO1BZ_`P{ENV(f1QcHG05 z-E3v7cv9%rLfHA;kABP)nFA+X8|lz1jg-!Wi9~bW4vG+-`IWc=+}iNZp78MCYO`3h zLW_rCza*Y+J6W)Ae;;kPyzgK67@#ML)AU=YU44vXa(H2Bn4)~rSU2^~dC6Wc0HsOr z+@1VF6d|m@0@iK22Qg%tf6A8vpy0XOthY``cw^WX&2|I4Eq39B%JZ=K z1W6G-Vh%@O=7fT~@KOmX#fZ9-Oi>{?D(*dkdh9Exgr~&;_Em2J%pdMK6y_u6b$Y`l zESOYcP%_5nVbiLD4TTU~nuqLpAxiEWrWh=+JTPk4-}^*K(BAj_Y@~`U8*N zA^sppw5l)+VSSbify&BCxf=|a)(kk;TJq5Ge+ZZ3^4@}!e4xp22i^=u%*-)EwuMJK z+F;Xc%+)=LVH?hqOEP!~7`hkiW#gi~p<`6Y;r{7uU9jKH;YkMEuWXtKd4$Zua zu^S7z?7de|&f{F3d_;)FwJ+Vetdaz!?&q1^BrC`=<@^*5Re&nc{5HgMS+_Jyl2v58 zMG4jZ`zHtoyDWsS0~6MY%-gpq)CAIx6$J6iC#4EavL5|?%h&t!6nbpy0D_QDfRrG2 zRTIMN7Ej+g8)@7fk=!{(VpmT|>DoeGe&SwOSh(pTH0;J!9E>Yzu-Meyy5TPOxG>R< zHNPbZt{@s0^_>^PSKwDBY-vz0C&iDS=b9M2yJeeM81POnP=Q2tZ>o(68M0`;vP43_ zXGGHzD`;un!(u1twN>?CPl4Y_Lyy-1o2J2aPPU!X-HE(OMlON4X@6`>P`9<796f<= zE`G2C40cfTmSjdpHr|B7@s$vm?z*pK5F-VMAIv0BXfxS!=>(_nL89$j89D=3Iz&Jd zTRd6ch#A1d3^eCJZ=oPYcb+R|kE9CvR%|qiyO|wq|7#ft0Zeq8osfitq>z=B6}w{L z1iS38ix+exffF80EbrI)-6e0}HniQs73L#yfBpB>zg*=fFjx$EIIh))Hr3<;k?)CJ zaB>{HQE-)|F)FCnEmq8)&s!MMpuTiKUE+ zNnn^oxfw#Fq@|OBx5lJx4!Z*D$&{Tu7Sdnb<|b{&yy~yDPe<7W$1T7MzgFv3AtTKv ze!2H36TVW>M9-S0kQ~#m&&vN7xm2i#GG<2Jb6Z%r+^8F?D1+*qyyk829TnqfLQcp7 z5i9*xo}+T4`pX&Cda9G!w4K;W-4oj2m$m@BJ$e(;393VW`>{O&8xRm6lcM#$k-=(a z+@SzG4$Z{N+WJ8=Xu)cWbA4)xo_ChtE3Z}viFHW>g)~KXE>{}Hsj{qZC~)`8taLTB zoxsG`TcWm6(-AaInIT>{F*zx|7}Vf1Qww9xq>bB`OK=M9$u^``>aeL>Y836g`>ge( zrVN@V+ZBvBd#nB+v8C95h~j?CzwD%$M4qxi4_+6BiFnlWe`NRB*L3%FBbhUgG<7bW zaYIH3kS^UP^jT`rB5VI!g7S9#p3JG~+JyIK8nF!1d$Py0URcZWM9LYm%VPSp%^I>5 z0zj3SFJzQ@a{O$h+bo;RprizI$MaHY4FQ&?cPR%m$ZWHsqzIjtO&9hdtXnHk0=2=b zC8M)TxUpO+MeOe8$QLcrt#RfmCGPaG-tO#v|Dut;irv^QHdXFhcqG<1s;Bm4)qmE^+JP4uw+gnux1`2fGbwRv9Rq(gXbD(txN} z&kgPS?fJB&L1WGRW9)^DhX*RuvY=t4_aL~^9N?J3_T*FB!TpqIYh~2wnRgNF7-dcl_y5*5d@J+(aC@kdI zK&pKHY9Iqn!>>BCCEJU`v`1bqHyE13r2f$vf@gLM8aj+MUkd_7k@~1G`A>V#z@xY{ z9Wg}q!sHK@k#BD{-@&{k0?+fqnCt%mK65i6WF3m2$_s!!`UAJ#Nt%5JnHv&Of=Nk9 zqFI@lx#OdwC;R~G)ju<4qy4A31^uvf0yjT`$Gb>g8)e*yms5-p0`$jP@JiXWwxlw0 z~&+qd0{XvdMY{ZJidt<#s`!s;;)4wpimw{Nl*d?!dD6Yw&{J z4rrHAv@W7#21TJw{VLg+4WYWYobmgGz&QA|Tcxnj!1q&e$?kOe(a)hAK{<#Gr9kaZ zkpj?CaN(nb9LlhAJgq z?pORdHpR%9y-~a1*8h(nq%VW9| z;ny?O&-HDOsid3{1#f_TWiuB>_iw~sAI?ub0F}BOT26h4oP2}cM6!WW`5XE{j|uWB zvymv1hPSpzKw+19Tt`cWt`{xxOEOaLZJ3u8K4*#$PxRn|X&Um^_XE0v+a+UlkWlZ| z;yelt=IBWPXin8!sS7I5Op@v8I79!n@c$n1-9N^}923VFIF@hlG(yflHZPjTKI?w?9<2<9z zK{5J~iJin>Q?<2Ph!GN@$IH6b!5%9`zK4_6f5kdbI*D?<-eRseoCp$N;pgIVUljz6 z9NVbnv3VJS-hN?Cku6bQbisDr_LgrSJ8o^5Up%*zhH->=OrpgV-uW2@-*`0}*D{UW z_}PjhI1FLQ*C!(0o#N`x1^V7|YX-PvsDUV{q;YrAcYIQ0_Ib(i`B($`f2~l>@?&^(L+t^~6pb!5z0f{7qUSSfJ83{5xXwcduR zGKMju(@@ypz}ya({*JDwZf?yH^0_lmcHQ~C@V7*X`62Rge8W=DW(a{F76x|gjeGuW zV_|HG?&tz^W&Omyr$G{}qVm1lOrp42f40QIJV%x{_jfAK)I5@3G7)BpRo~M6GtCbK z%zF<{P7;0nhL=%I>ouo+Vb8kLUob4;F~#^`0pD)M753|$a;ixFCQ8gZ6smVOg^=m{ zdP1zNabo=PUbq^*!bwjc7?*n-RiUWGdw<7cJ9K`CxE40|4T@j6#g)F$~jXwWQk$Wo~7Y59}yNU2A6 zC8Li{7CbcBYU<|qA^vr4li^>v0-E)DMEaZV0B|E_()84fjAIV~=nsgV{+xlhYO%2@ zn(%!Ij+{FA?ilI1&HZbNy0h`gER_W-X33BBvcD9l!Q5_VSxDSnhdqNAg0QJ_B3Y7+ zpD(XT@A{C^BhXv-FHo!EG<>Hb0yJ@H96?b_Vp>)z%o*IYEZ=AD$Qw6}(e*P+2W7hP~Qot$mvayUnM# zaZ5_`#vwA~Bg)MbCq{$OnokQq^gNINhHW>CRoXt&{cF}}4SC+?lxPX>QmqURUSDyl zdS60!nen>zhK~bYMsL60`B(_FIsl}Z1N|!pqTnk(aV{A$uzO)mHT!b7Xb-`NCu^ zd?Q|T)s_txUQ`NXOk<;4+{FK;w0rS#0vabKn23~oy{QP{aGs=prwwB-kxidkEW_A} zLelArWXWx@b7Y1X#iqJ<8hCIk9PDbo-mWjersqISYb%k21T~xcbF~xGaxvl6 z#{Y@+zU+AHg@=b9V?2c7d85sUCzFhwPWw$oK{=T4Y^EmIp6dhYqA#X*O!j!j;W!_I z9!GgKxHoMJljrAS;zemaP8TACICW44qZ-qhOUcH+hMl_35&iDFQo#2_o2i$n`Uhuz zT11}Y0$m}`hC^F(S@u$8)I>)&-0@m?rC0AhF4)(AXDQWuq(^{>odSVAzFm}*kCX#N z;GgDX4zt7o&(3b}!@`#4(hs0C6lR`l zGf+H|9*+|3yV!+~M`Qt~ zl&dQ985-beFEHdBi^s4(oY~btM2dIxo^n|DvnbiKlRfL82_b{=ys5{<;T1O%f?J`t zuMoXpWc(4M)p2k>B6tfYCX>u9ie<1Lsz(T7-Do#)`yMw4XF4alen@-DLJ_y4bp_(F znhUT3qm}UYV1Xm3$MEt>KpG+eCeTT3q8f;5ViAT+EC4J7qiMN{6`E!MXyCJCzLlR? z)_felL(fV-zdMkhjag22{aI+(+9qMzCne8tZFce3;eIKU_Q8OZ0XEnFgZv~E>zq_) zyJGh-AZ49S`?xzIXY2E|7d52jWBZmsVZi6>XM-Tar{qvFX&%sZ$u(xL6!z#>+zw{? z5Y>L>Qyls#%%H{Aq7Noz8xavfLtW3*p}T3o<*o@$go}zwh=sQpMhA%F%5EQ(IH>Rv z!4D7EOx%FpVctk}^XZ^!Ta?Oy+DOj;vzsIsPn6y3im;J1P$&~ZSMo5Rip4jys^R%{S= zkf8>fP%%IKJa%CFLK6Y~Gp_tQuRLOxj=~LWE068M=*8 zwRYfJn$=^Zn9!V7wnUHN%snjzoRqZm@sbaqf8deA4TL6ilE5f}1WJ?PvxGX9plSC$ zJs`m`rQvJZ$M+*bn()M-%i@GnW~%&k`QrXz4kM0uX|58caaV;LEfd)I7#%jRn6t&= zxj~_58Yhv{)7u)s_yfMAGCxgbWb{xuP1g~Hn0T{!74YSvO7Epf+jd~_H)8U5JV({}MOO$3)Z{&vRaU$aT<>|`!6J4+lf{zh^=tr7-P?f_e$ zXvI|6(%_OVW~tdr5%AlqUbu9ju2;$KFHoii!$?C0Rmm(CW{)>nA{EYn{MtW09>p-8 z;n7DNfFe)?^Hp4G5z*X%QY}_dLElq6A&!$(h0!0zi@>TN_9H}RC4OADM{_+n_h?+T zQCFQXc$5BOyUxI&d3&^Hc(Xdx+@tiZ1++lyA5Sdi9)cSioT8Ri#2&gW|6w5N5ztk< z90Zdx<;h_{S#P$J1jOq}7`6L&d%u+}gA}krs49653fQP^&{D$b9$JGH<1F6>c!MKT zgmco+2B|`uTjG#A?7S3k+uQHC`XOIxr9V%kh}Pl>kc`lKm${U*pq&K&A<3Kia@Oav zYIfhy>5&46ahKBK6nWB}{9+>a5x5+X4Iu!DZhCky{+YB*0ASMm*IyU0(JUec3sL0T zyoyxG6Cc4$Me-OQkKb=ZDEIf*B&L@tM0-%zNn#j(1H~ISq*}br3cxRqpCDQt-#brj zG?8t)Q}@cpmBu@um%4Y_!5v|fYj)1oKRJ5J;HU12cRZIifkG2wIDlCXf;7C z#cdC$`YnYM*d67I6|!Mn*Z8xjB%dLj{yUvVv6EutV|~zJe$j@1(E_kbrw-dykU8oe z*tBm--ff47e^C>%IT%_iV*m!%TD<}@114Y(szF;MnlcK(!=)z}_z@#=(8FzBBefUI zq>5CdQ0ZIzs5p|jPnWHb`u@cgt_SE`g%gbaTga6GyK!*jI*Z-EUqyT1R)`EpX4`cF z`$tl%?Wwru>@!TUqJBEk%3bfRKJ`g{F|v%6UM#*+gOI-i%;>1y{;+@kkpc$DiHE!Fl~mpf*#y9d zwA5;)(!aa=zCxF9OZM{6Q{#=uXvaNjw|UW|1m3av?!dt7C__TBjD10v03o=q*4??r zvY1SQ8bKjc6C9`-S0ra_CIzbV^L;%?HBQxU+$RjUcYFuTMy1I*VZ&CnL9ZL<^qjG5 z=0*c2lNVUFH+i^nVc98Ep_eGc{ZZF(=+Q{h@=@1-6HKUWkFGhPHW>p~C0Q)`8)2)U zuHpoWVt??s2YzR)(YHnCUxhn7*S-qNvy=NLBn|R=UrH( zVfGzqdN$Vi%<*c5%1LIu(sjYr#hODr)&=tE_k;T4B*?y`ZtZ~@G0N>Jy$3>Yxaajv zpT;PPj`XX5Hn`ryEF%WhYcG;f-s>j;n(zXb3SX&eSpdp7R8j*kU^$Vz-b+po6C5?Iibkc3Z@spnDzi1ZEo+^$8puu>dO7D>9IXiHYqLgi@p4J(=J@a+)?s zab_Q({lR~IiOCGIA?PShqx5d<-SuI zd6swL?hFiXwX(3a

Q8)%CytWiQWv73xfru-YAolj~3zT~W^5@gQWkyo&^sih&r| z)p0V^c7KS=m@3JF)|v2N#YN?vP%9%vmxjMhsJS}LT`9GU0x+OM+7pmiMPO=g4g^9` zheTmP;j=Y1pf?ajmY#?Kkk9?g)h4AjBasT8cpq`Xz7oag;igr0hGDjJ0_HFHFnoiI zM~&KZwX@>qc6s07mc6h^+hd|$VOZn~Ws~{9wzYIQI~&_PFG1`}hTV*= zcBJJoBnKesh*N6;+6o7Ns%mPemeI-CO0&(zjl6!FgKCI3-Mn!lrNB+p&vyE z(*~f6zHK!XNGtlAo5Hhta?MHnB%_>u%w&Efyq7|Q8OChbFQ!>|za*qu`G5x77*z*Q ze+49z6c-AV@wC*`!@I9w&ua~M5olGJ_r`eyDM8<{)O*3iSk8m%`0HyA!!Z-cAl5dU z2?D>n<^k;n8m&IGO(0i2q#jxe4`&k9>KCSx>uXiE73{&udA*}L3V(L*13)xiUZp7I zF-io6j+D25XjRyw@j_I&)n@K!Q0F$-SuqbOe-$KLwa$om*TTlokq`GUb8%-Qp6>dS zO3nSVi<_eJ&3PHTVu78e48!CE=g?%GK#YbUV`_8$`9m-Y3d7I8_7^uJg=6D(nwpxh zFNxo1K95vPArk|=pJGACNM+Nr1V#6Om9v?tx*tevnrF+~oE4THaut8qwa2zLyRO|C z#$T2!xpY)V8pJ6BtTrJ5#H34Xc=`{kRt7z@G4@EfeuX2z)3}^4Bb-6BFd3xTrV~FQ zpg9_yP_uFmhV0_r!9i|RR1|!PuhY#`1JcBI8pxm|D>bp>N#SY}s?3k%S^#y0UGU}9 zUZ4E&d2{%TOZ?b!q`HxtD=I=v8b`1h7pLD-g`&D;9BQGHxKP*8(eYey zzTGcxOor!jIhns6Nur873?3>C*8huIsM1%4N9KhVUSb;`>&TaES!VubO9i zw;c$Odpa-Ktozpv-1I9ig$q!G+4lWPIz@Y6e()psQG(Vu+8M`j6GqX59bw=<0VhH|58rUs@X%sB-n`;^K0UMD?BM zR1UYE^F`XH(Oi-D?I{0ItruICd$sD#ztgXs?iz(CnCM;5cPT}`$ za-oTa7Z-kFyq>vQq^|be%3RPN-yTcuBFU#Z8>^LH8p%qouCD6h;^Mkymdmqan9A+# z?Zc*-4`=`0$xab5NWn6t{tfh+)<1@q751aUp6&cXA}oob)rsQZVI3}Se?J!{4{J*| zNdS9mU-xG+hkeaw(pyAR$Wl)*DLiDkSZ{OV>=dq1E<5`*tIG_KTE>eBO%1Kj_3x8J zBBo|!TvFj0t_!2#F|hWAS;fpgjbL|_GXCgvAkL=ka;_f?m??N`eU4%eHb8%Wpv@$YJG}NJwVQ;?C9+2G@HsL9&tn_;F`uoL+gEkXf*~i z-zkyLlDMiny?=QFhekU4L7g(bq_{)LJdgbX>DA`i`*4)$;k>1~^|Vf<4JxTD#)Ofe zln|E7kA?j)o`J6KGcOYkJ1Y$F$1eh~lLbZI-9GbfuwrN0v+DLeU@u&VDYHty?w{{s zuNiXVomwC0?o$=f`Nn3;A9b~?w^!sJ^)$1|CfhzOoL_SXSC_sxMeH@%%EL{sH8wC@%AMriva z_x`M)ce;ATIAGJS9o3iT2Sk)H5SA7(FT zx40~__kI6&pv`KN&(i$7s?Z#h+r>{RhrhAoekHv^G64A;9yMoh^|!={fpE66cy#?N zp5J1%VH7u64EJI;L7TkY+GYNXu6h0$ey;?S?N6xVuZM8dRaQEbI&N2V)&iLmX&L7a zvWe8><22w&AOSYOBJrf*=cBlr0~T47=wXz>YuQgIEE%SI*!f13L95{Bq~A9`Wb{3| z1Q5=mE2K3qHd}q)UtIQDlq~3**IidlLN;mOh14icXxjEuwF!uQ?h;z+>X_~NUheUU zJoXaWEO?dwan4U^BXT}Q1yTs^2a5a-&b2I{q$kh&v1#p(=fipv#Igv}VmaeIZSk}& zPS}^`$TSXGQ>`2?7BQe-R{Th z<)J;=adfK7>7tL0u6nVVdiRKSfV72DJX*d%>DfxqCRp^E2mp9#ZVNl^aVooPpYCTm z*?}I~Ed$ZtT5{ok`RT|=*SQas`h<$DQS0LG9wSCHp!3~aaao!{Bnm(VP&`?d{mBMg zk~sH=6EeW#UgKj5W%g58?!!r$efUZj`vlk(c-9^FKojrvD{dG8z7m+^(S*FIYyNee zogZ&Y6+dc}Xr9?0#8T(EG@FqlL|+U}6WWEm%8X>Ba)cb+F1_@~P&)lTFL*A#QFs-k z#z{vUQ2kQ-46xt5ICR{HVo0NC_tX@F*VV-^s$~2?BPqo1o?V*<>I!Jgd5`@? zdZ<=Trr6KUH4jZ$cvYDgMpV0Rko~5 z|9rY0BK9N23eP6!nwiTXOCk}YKj#)%z5pQLV$w-o)x>z|4{Rizxn)RQEL-Y!*3$e1 z3Zzof|Dq=0SVqpxb0UjA$A^!b#DTAm7hMjaH$ucYSN!X7d&L$$U{1nJ2gsZ-UGne@ zd73dgp-}j1rpmI~HM6}(+$M|fQFyGUrljN$iowdqx90ej5!Fr5Zuu1^sUr2){o2rW zqunEzgE$(7LoS!kV>ErH5PHT5!dykRe29@C7hw014(Ui9oTytO-Cfy3wTc|X`EvV8 zi4??dcFUAqF%Yjd;rnIz#(*3q5UOs65newC5YF}x2OH#2R(Hk+J&76E)r=j*@*)u` z1Q7`8`h(OtfHLQ={;Q}dU~>iG8E8U2B8YAW>ODSQ3&5pc_UYTvXd?*mu|Dq-fz8Os z5U)_LBHjGHR(X6qMC4@H;A(Qapj+VIKxpJk{q5VY^XU79I=F+28DqTxbIxz{H*^YC1$wuY(lyl?dO zpdV+jc{B^8da?9pN3-A&u@;YJvY@FN(v?t;j;AN@9C7x`0gtBZ2f&f`F+t z9@N0A$6r7QOLlAfoFj2nlEZZVUxISPiExM#=($}c=Y;I_8*t83F4C{0qJf=yC@I0ZIiRAHcYdKv-|(Po@ajcE8Q&6#%HPDKp$+c}WR3eWlQ zGW+W+0BvpsgS2UH(FjULx`l2;1XDm*(b&P7| zL8SnFxmbiyUOOB(tIq}$*Y-CU@5dd{+VcTu11WC|vKj29m;G#T-?(1nIS+W`dc)OD z#g49{KIAld%wWCCyhL01Ty|oenG{CT6gNl!k~4Vbu$X@~9lGw3N}UM60;S_m~xYSmHfESfLrT?r|_SXCOt{HZ0U{$5aNZXCh_RN$=K*9 z%clEoj9=hsOB;a~>0xQVl)Jt&+H%#~g`w(TmsXGk7My@;$$8zN;ve7L*&T!J{%s~i z9f)gW!DzW~tN1I%Cbp!3qwO|Ic^rEC8MavWOby{Gx)q3U6z9GRVY+Ix} zemcMnDaMF0p-sla1#T5@MsJgkg)o(|HRy4QXfrX@4J>G;sJXE*_B9v6xxLPnxALD; z^a5eOXV8pwK-L`T)`L=6^l&Rte2lN({mH^+wO&_lF02{p#>pzxM&k9BiauvntM|mZ zp_wz8u1_L&gsQ|<1!eQ&^I@J}yTN&=_ZdgosNm0o!|mLZ{yEl9_X#Z>ygOsaoz2c;>jo5 zO8Rzs=<_7`Ssy0q(e!fX-5DIvfLlOlj7MKuh?Wyb3qoP*x{qt#SgSXiVn&6)z)iPC zN*DZwctL;_hiz>z{IQVJId5mqo`T4fUTim6_q_OR6TaZf=`TZ4$lS6Txr&xi~}I*64sGFdly ztq({V37LrVR0qxD{#>7iXq(#Wb$Lgx@l-OKE0|MM+Z^r{)>;F#`W>F&5B z?@veO5y>36v%_60yuICWeldi+^*1WD@)1cjRIO|3=efL(yR)xQyk6%*2ozeV@@soQ zlZ<5X$AEIh&$x>}Btdq_c>!dH@By1?XD2Xuv1vc7LvDYk(76y4CHB_Oh4wZGkI)&S z`E-^(e;0V(GlT$JXgOZt!2J05fFck4si+t~R6S-y!-IwQZ6QD6bVzy+4o=Bj=LdKI zKRf@!`QWf&65uPH3jTU_(G*(WjHF?7vV2XZ8Q7$pf08Z2?B2lmI)kl+VwNFG_R*SzWUI(m3YJ9VauY^FrZG6C zJ=z9iP@{sJ8f{?&4+$^o_qhSbmGEPY*6TfTY_2T%ZS4l}>fh+)auCNQVFpB$Sxl&q zo}KA?zb%dF~B?#}1!r`Q0BOx)V%e4)rP?(tXoWvRx8r89%aL$vKsT z2^@V>I{(~gPVKuXxd`}RM&(CC4c{X5_iJ!rq}%jGncPe3J$h2q;9?+rk$?yS`#ivC z+AiruQq$`CcsZT8Y`>j+9ex=O??r$JxB#I%rXhYj z?Zyd?Pfant^$~wuwRLw3>78g73dYOA+&%`Bn8k*6tc28_C?a9Z;|qtN2CW61A6P=D zJE+m~T}|y@O~A(qmFC$E9j`OMeP+E=ZRwph^?ld$K-s-S{Jx$H##ubXYB1MFHvm~F z_7E*mcz@o!`C-4_>S7h3H={lFvsA1+@1b`!_WAbk@n`TJD%!F_@w5pnTzwW7Z-pw# zKJt2t)1lkfPVn=0zU%(NzEDJ2&aCGTzHjc+5V4oo&_Oetv$GfUCGg9l<*IN~fPm^$ zI5kL2cBb1F3MLCFyyzqRutgBsPG3-KqZ+-Bx*b!`uI!JVQJpXn#lqm%!J^*iXk&G@ zUzno}S!wR^yyKITlYX}ooSTChpMUeolV!`|E7&Lc^4_w@JPy2TXXix8m+$~Qse+$a zY}askfa$>!Uc?a135BQ;8W10X=Ebvk6c)eJVUF8m+xx@n;f^tNKZE*BluQW0L9|TR z>73#A>zqd|Sq09w%yCf3fxPA;K0~QPVM8lYs_il?vGqSl0X%jDnIrJaPhYTL$Ri#^ zpc{jdazz+bYISgqK?Mrw z*rbySmY<;Ar>^#01Y&knx(S?jhYx;ouNXzG;^RSvy$uv#@0kD`)5rt!6z%JLeJ7q~!xCQ*VUXw9x& zyY|hQGiUaU88fCMNz~HXHZqsgdftqKTBioaMzA)l`X(b}m({?D!V1VLtAmP0BN*8u zZTQwN0t986FPHkE3?+r5kIcG(eePnRv&YEQHo&=(3OJir1o+KnTCZ0$QCy<88?J%~Otkb00v^qxD1Z zO9;%=yegIU=+nu%3;;8lHj~NB>g!jkZ4xgX$Rbmm_Od(!*o-!z!WBrPuCA_X>C&bD z*|1^5)?045W#;6`lWzw$Ds)OR% zCa7*Qu?Io06f5ifZ+o*vI^VeTBuGOdc;Z3{<*}g}&sAPq57l)IpvV6^5v%mn(Wn3) zn3NKZF}#+6g}^}_PgZ-&lgO!Uj{JN>LxXwi)~!FTUcGv~+n#+ygvFvxqb4TVGW@X@ zxX=N^puV!A)fV+sCs11zcn!Mt7v2iX`2Bo+Kq|fRu=$FcLg}Cd3U@F1Gn7_Vv2EG9 z#+dnwwbYhF=8>Od^>uaBMBpb>H>kOdoS!o^G&E|;lqnMl2r)4+k-~EfSaHcENPMMA zm|Vs`(Tn%#4(5MpWidVds}5gjuOJG+sLoCTvkNPNX>O^GPknmyGqsUXGb=55qdq}@ z9Nuab+Y!yd9^=M8m5Xyd>xkEOisk8=P|Is};6YlC^Ox@?c)bwfH6ZCV4jehw;h!`B zS6(@#Go zp;M|*Cjdbw0KtJi)PLm2kr_AMc;gL&@bLHd*RcJH2n-x|iIFEkNQEw7nE(*in>c>y zdCPMHS{}G8X79x9@d;@B?whT11nxp7a;Mkptr*Ik`Qe8jcJAA^?@(i7W3^!F#jdG( zVHj?{`DXjeFTc$7@87>AtJ;pW1mQtY9kDUC(y0yY|JeXQJa+Hi4KKX#f@J5;o$f#& zt3cTrl|Y(g;o;$t!-o&g7&U6th}6{7z5xLNzHGq+aRqxz7fna$-qwHPI<@wbK11f6 zzKei?Cf<1d{P~;%2M!#@aRfu#B7xk>gjX@6N1AUKI&`S@`RAYKX3m`1EA#oY1AusJ z+O!GYeDh5S5<&ukmjD29cnO+CV2~qG3{Fc+OCB&_Kz{;56dpRA&X>Y1QB!#>cDfcz zws+4WD5NZ!kg!)#vnV`8tu~b6k%I?L=oyS&aw;k+Dg|MSjSVJ2@Ot6Zh~cem(V|68 zB)1Gey>-yiev)0c08m?73#egmHIDuJ_rv?|zc1OcWsAJNzMdpXz1%*+APb>(0|VWN z3a>Q)F%X7)0l^;~h1McG2oGO8(C=!HoT+ipv-BhvrGPmUvK?KD6*b35PfDSOWt8Jl zTwGj&ZmPJXq@=96x|#@_S@=Gg@ZA=H$TkZ?H)yq5)5M7r?GHcvFn7~UH^J39kU-ND z;IDjPh)l?~VO-8)*xo+G*cr8!>fbg5fKAXYy&!mv00@m>73g<1iXw1S()PCEtq?FU z98-CFdwZ$rVaOVd8s$$Q5EcouN8>qAW7tEo3P_f=R`UF^kL6r{N@P6BB+NvB&1%(E>qSqQ^myc@zhjs9eS#4-Pc> zK!7pFgDd5FiQb{2EKEwFlgcbiHUBxVK^6M#4m&Ks~%907Q2R=W%L#DimYaenU? zLm)7w!sW$-Pf2Cz4S`fe}aoL$FK)V>p7~5(zN= zfV?-SnZ`5J+Hqg}DITB3FT(BRnFzYw?5y zHRU3mjDurXJK(oSnAr7PJjgSB8iLKC*FQ6Cca0~CPZ{JP91tv)KsK8{t}$&FmqGnP z7J;Qwtx^HY5E$0Tlar-tRHo7- zqiU5JK_nAco!!L|c9NR#%kX~3vDj~7ag~!IIM!)oS?fdx7%VKux-6^%P^rc$4!o%+ zNXG6LM)-^LG-fsBWHwM{t&_C@V_C+T=dhs27zC3`%pyWAmx&OHkccn^3G+J?m6>T+ zB8wrxalH9g*?Zchyo8U$F+m8YVHt^GGPB5pVWf!9Dk?%{3YmyTP)sT(%@SIHf)Gy; z@;A<}8fikrcc*{*Y%KW0v%#Y5{4_cFu~|_OGMAsmGM!p!^jx!M=2>!<3r7#`dUt=# z4`H^d(=p8L5T+%j{M50(U#u@al6j$d!7y>z8gRJly*W?7T}8&8By~yK(M7voE(K3L zO8P_2nI++!%dbadU$0@VbbkAF=+gW8`y+)_k92VkbaM;l8yX!AxlkW%NPdt=e$z2J zP;^i~;(Nt>a?Xxy!!L8@8r&TZO|9C0Z&--`+G4{H?~wImeEmSSWva2p?Y2XmDlW#29MMVwP3=5DshtBRua3a_22m9;B!McpWWy? zwju1U>)P&*py_!BFGmBNnuf|%PjUxV=bZn^W<$?h-khePL$9KPN#S&0&_Cag*m}M? zWF|XsuEN#0Y}({Ev=OUo&d`TeM3sBzwNLjRDeiiL-_PrF4DK6&Z{5zf(+jsnE9-VU ziek#Mz_L^88du618}u!Lu1eg!xbmiNchm=w(haP$c;~b-cyFZfZ{goH?`tbas)SE} zy6t9Wwy+4@*c4>T`eVujmZNyU*>9Tyx{IzUtDMz2(ibtMA=OK9NM!uAlyo z(&H|-;jOgcSnmsao4VRhYHMrrN-RNHN4IWU#lH9{7I6B*mWK~#q=m1%)hpaS=hZ5u zVbA=pf&&=0vm~l+Z)(lkt&PnicM2yZwue42zqv8+@=$Ac@NhpZJlr%z9j?z^cz4&O zfx6dC^q}f@rKtx9RYO_S$uCT!bsAku-@XFx>9!AU&OM$&msxJxw9>qa<>xNF+uM_V z0$6)A>~6}(sgs&RxRJPz6TiL0T}*qU>)3FwcjlTewFd6Lt@D~3-(4!QGHw-j)k2&V zes3wg8Fk=(fB%%*hhEy$vBP&_d&NN%h)eVO*bVp zY!7@Td~FR|p1=)PW+Sc(FaGBx^%KXB?U{kT1F;V~XZdb)cg@;V>FuvT_msK!3j&6g z{_jftNzP^Z{_621iNcUAhw6?24d3KPM}Lw}J%+y8mtB)q*a7rxX;9QR{N>2d&Ic>F z`a=tvg!_$s*ZNys(^#SBaeh|D{Pewnp*vI7daX0pRJ30B)1Oo4zC16`GUUNyy=U#3 x?&gw_L6jE-&FwmN$G&OPV8`{Rx`Uj3<6 z(`#alHEUHxC@DxH!Q;aN001OuDKQlQ0PJ%J27rbByy-fXnt$H#K;l{;RiHV@&B)0N zAZ!XWHY1g`GqNyKF*7pta2z({0{|cbt<DY>8kDW5yf zrvN)MkP)f7ovpnyk2^p4zvS|Kj{gBOl9T?61Z2Zc{$ECE$t#hH0-elA*%{dBO_*4i zNjbS0nAzDmIa%mPS(upF7@3$EnOW(Xxp~;wc$k<-|9z2vYI8C*=TQ-p__walCw_8E z5Xgatke_>TlJGiMVgD+iDj z(4O=kiAKgi7Z5-Br=|Zj1v>|M`TrJd@BD8^eY%X%-N=EFnSqJX&hDRi{fpWeq+<4e z&G=uboz*-X%otV7oPjP*CZF*zr}z)^r|M?#!af|;3#feR{x6?W+ohL-0V!I^hWILtn_SVMojeFoF>elBQ7phQ*&k}Rt^*LfAgFE5AXd~ zmVQD$XX5Z5gLurDSxvaPSxxDUxw%c~KfTID&&A5gN^j1_#m&NQV#;LB$wkiiX(Ho4 ziSs`a=|7;)?EGi=-(~Un@ZW`HX8&1DPM>At;mx%K02De(iwUc_ubk<@c&n*Dyce69 z&v;$J{S4X2ydw{fgoj^JT4=RdeEaT3ucfKG*hE_$nID)Ro}UL>dbbgUsv>pi^Y+Ez;&ST-AJxGiRFytqA zJ8s{zV|mbO=&B^OnJGJHh zTx`@{0VmNNe@{PgW`9WmKNWuJB2v!U%kPF-)?zlFL}WN0o<(< zKfR}JUyk|3A)!}~u3mIbf0;i=BN`bf! zJ^jh(kw>!6_Tpn^mcrF!QPQO@Q{WGPn5u06GW+8L#md55kpN_#=8dqBgH)9Uy?~D4yXjlR38>QoKjn69m{Zd6z-yR9b>z^b z2t}JmC?iR}6GijkAs|=5BLEzrxBo_a<4Bk3ueKo_iCHsxXwMapVxr>fF{P%3Yw-bFVW&xDH{wQRkc^VN)9q7{=G-6(5sD~xO zEhX?hAnIg`Nz9B$8ZN4qD~bRiw~G0CQY8zop~1dipzGL?=Ep<4W}9Sq7}$y zW2~KLDIc9i@y-8QgnDpT6oB01f$ zb}1M_T!OIZutaK!mY##G{hh6ZE*cF~807cOv1~fBSEmAy!pFmrnx^DXj@|!f$6es=?rXx`_u)?KJNSh> zi+Grwx4El3B@P7CtYu*zCI?sImE4z8Ti=EkkMZ2~L7SYO{3nenm5ySyZ;SSf<@u-T zv)td&suxM8MCYmVZXeq{NO8JMQ{id}u(qSRibD$u&f<&I8l#zA1W5eHkB?9XJlFrw zVwL!1PLF`wi*g#o>9dlzUqsB59ZXq(C_UFL_r%LFSLV|na;v5^zlv>8-A7}kV$3={q%6AKg5ouSqfP%g!d1lJ^1FrJTc44#%`*feC zR0GCF9pT=3KQ6^Q0*+ddl_;yDI-Z_jA8*6!-A59q6=TXg1x0)i43Jq!pON8>?Y$7c z4soZ)76E$DV{{8C(9b^|;08pAbj{7-41Y*Hre&>-8LLt%6D%{fzZqCp> z3%MBvE4;{doYio?nnevG zO+4Y5iBC7GZrYP2oiEQOn2ZeyT0R}oRj8Cbol4vWGs#xwRjWXSPqEodPqnu&-UpH+ zv!eS=yLkvC38o;~W|ufi*RP1<$PMTFA@oK+R*Zz5{)2`f^hPKDTsz5bysW=GiG3*% zD1=4dYl{9|2;&iJ-lYJDZl#9eQ`5ORHVdhjQ!Mt2tOJCq8DC*~OhyjuV z99cTpIB)w`F4@p&1fQ_2983E)$H6`N4yjg!8#1;gWScngfK&thcz!ldrvfPn{+{lt zFC_zm6WNYQ^M^k;`2ayumal9hp1m6$YDwb@R~sxw$J;e%gYtSE$_gBRU0LijiWUuf zHk{^c>4p7Ru&U)AdXI4V+Z*LsgZ@_L37{^8t2+fC(*3B2);I;VDqc=qgTFl|f1g_y z@HaO?YS4@J@HiGD<+m1uBZfFC-CTs3NiZ=PeQ{k zfzf6{>ohFCa)Jk^LxNAvoB~B?oL%D{%m5nCM^fTqGFn=vNqI@xxehKO-GMn^t;^WP zU~-QsBRm9MUt{@ceI5>z=C!;F(P1K>ACYEvND9E2d33|ZXjm1dY*6gK4X9s>J@E0(=Uw#i z3cpF`&F?P9po*h;tIY?XP>~3=DOxm;q8E7C!ZmW3`)5QH@eUuM^Mua!wcZ**fL|>j zE>1mv$#}p_446wo5DMFY{}LJF{wBn&&#y!VF!anK4iaf1nxf7fV&HO8NHVG#A*#YaX2Iu zyhG5SNN&XYSg2%w)^aYFFx_!%9FIG?N_u;P8d_pNy$8mjH{Lm&8FuH!%~f1A=XACd zgrSUU(r*ZA#SU*CY>{WocHQ5L=EAO91?2q%3VCJ=30bjZTy(H3JQ{nRcV4o{Xr{mK zfwmM!0p&xbk5tyrW9r*(YwR?JV8i)>yw8~TkX@Y(HFXK9=Dja3BiGX;DN?Lj%+Y6y zmVWG=eELBs$lswh%rrBVg)(AT7b`gw2AhoZ-GFz0UP-c@dm4^9{X0c8n@ovc=Gy)vvD>w2N$gEaBt z*6T@tuS>Ea6kjFI=$OKxY(H>IO6uj*GU#wo^ZR`|UYRI*5UP+)zhOFLjJ94n`E>kD ztBZHa>Dr+SX|KCmf9r>)BSXErS2EJq1VYYwHy$wXHUxHxeQdT;<4Q(=y}B9TRqJF& zp+Q3Wkzo4g7`0MP)(9IX@TU(M_m?Www2g5%D2DPLUv{;DZFzeHu8_fZk9Hd#^r8@4 z;A8Ol^xRExwl_su76{M$9icq(HR&~w^{=y3{I+~!I!Hi&^(B(gc$wqd*x=9xH^{Goa>UPx?8WzH~c{*Dlq@oh<0rQ`1=5+0n$4w zIj$cCK(D0@bGB==zh9Y$kMB#oR;pyfD+CJ>=@gFP)SxxXTct(?2^G{;A^~8606t4{ zyX3_{T1RkBvPzeW;->6(&>L}6`~XJ=X`%=I=sHm5)X%w;knw`1e*zxkjtASjC`)I7 z>!^0i*^)V4XeSo}!oityQ-a-6wkO(oWASiqtAGM)DgJvDOM!Zp2##P3Ampu|vfVLH z?)4W{0NM)?S3X)kO3(c8#vcjH*!bP!C2(HRRKgn4DW#u`R%`R=>Q9@~GGs?^oz}`; z@!CcM?Egt*k!DDLgQx-eOHI?&Fm>{&lTSy4e`r0%!*sAm+UTD4WU1zlsk-WjW2 zFi@?gcnh2eSq4doVO*oLX4hs(^2VFzn zBS(6F`5PoHx*vF;SbjK2`@aookmnG9eC(~YR6Z80}tM%I&)&-Yq0zKXNPS*Yj7 zEv~ED2)@}r%+Dv;&(NX25q@pgKMA$!dq6jU`T}_3g&Y13Ga}A@z*L_NJUjVZX;OEI zj5)k*mXb?ZWS>!t15lOzS{7cK3GG^_98`n1!P_uXsxbN<8nnqmQf!AqLkK}-?}kZ^ zFn^U$T)Yf%Vo6C8Z#1KE&Q_8o`{Sr7HrQu88&n!krR*R7mc#oU z@>TupOQVizw8XA;jD#~zRnp@+UD{n9-HQZwxk%Oe$rzFt_=KPC6;Jxn1ogkfI_@ zSKBvp4Xzi`Ezh%M)^yw&Wd0WDa2iF+fxD)QoNm-+`)-AEq}`4vaJ5ub0Kb_Xx;?@o z!^PkZW+|Vfac=v~jyAm@y9KY3?0}2!^FL)Svy7aYedTzfw z0TX@fcAoO&9<{IXrM7@)`2N0RI*72-5$1RkCKkf%hzcJCRl(e^o+@^ zy010R-_5LjOP9juwb=v9>;0=nO3_{0Ri3L2n5>^FX4jE~GOelg}Oq*KNwOE^CN3NeTA$dkzU; z$5MHSs1)KHjcwXeBk?kB7Xd_3nA%j5?Oh{H(cFH;)5I{8^~AdPU?*7}Mt=C(vp82p zC&^nY&75fAMAA8$Ue|U8D-ofde~B zP@#agtj0rpQdx#H)IgQIy&f4-qT^8u{WzoVM}gGk`FjB)#meeWlO>1sJrgXz6k8Yl zP~5qu*T!pv$;g8;hm?}3#xIZVb;j##INVh*AW=^Z-b)@#FKC%BsGD@4ug`>%p&PpB zJPmGX08?mT&8L7LV&gZ1{cs-F`rgWl#F-D(U~1Hx;^93>ehh+ZO5V!Pd%el&pp8`F zZYDQ*WPuFS*bQMGGMP#^ZoUDIi@rDTy5z>AJ9oyh%Zp9AIKdBJhIACUHtq174UOQM z;n%AfYF*A7UYO-Tfx^}(>oF}Uj&>9>a8_L9_{BfKWsLB5`C_CX>l#ve^RLga1^`~7 zwkNNtM{Ok$4#5|i)|sy7zkwi`pGLoNk4na9rsczXoD?D%|Hi`=sZg)n{XHx8t?LP{ zbF`L|4gV9=wYQx*CezMCh7P8$0t(a&5`(mLA>C3^9>W*MzZNR!d1@FCbxvvTOCSL7 ztdx?2;ZOHdIX&A&7=*c3K+ogb#&Er0lA}Q?JP3B>Xxjhj5Ii*YaAj`~7w}{!vz61* zmeStkmoapd#~~@yOmoZ<$dz@yI5+jvN570YZTkz4ii=(BW(+Tn@X<*QaZovoEz+IA znhQDe-MRB`@A4mDU8p%qmqL;44{fqvsKt-KudMrzS=QSaMxp59f$F-n>^|(Hn}f*W zuB0bsR7DaOL$}=H)AhSpN8$jc@TG2ypukM1G}lig6Wpfv~DcWBu5Ka-n_E^uiZEGLPx2mq}0ufC;8fv zddaG~oGV_&RZqwF%~_hOMFS_twBVIVRTl~RKfOH)0{VBEPBvmvvFucXeLnk7x4mdT z!N1H!&N7<6`xLLaRZjLtuoTpuYgc%dVI6je$AC`Y*;1ocEfSUk`$~iY-(k|hX-c>m zFkZrFPIRh=_kil2MSuv@4t&4k3#zn9i40YGumOK1z0APwJVQosYlwv`ntq=k@9Q{v zXx$q-=o|v{$jo6nB`yo4k<`l_LeuX01Zl%MNIb;Q6rCzpWT4>=w7-TI_;=bDv6qm8 z$MeqDg~F6CuMvd1u9jYHSBnVu-}WgTqJH7*Y|}%P7R^t(fQ{>bI3mbBslQ5WDJpX5 z3$P9o;D~#UG@@G|dMr=wUlQEbQFKX|E-atj&cL0jVk-ECgyI<5Y}MC>p^qZj-eAHX zB7a(+PVa;!D|sjBBl=aL@f#<*8`=;*V9#y!U!zUUyhfRm zD-?&zs-!hKq`^JmS|g3ZLLShljWF_hAw7D)078Zaf5iVvp%kPkm&Z-tN&P!-C%p;f zRc>rEp6EcEtibjQZJRR*+VAdA)&0$q`%dopT;XSf*4=R>dqs_>vN=P5w{ElMkEF2;~i2jJwHV7u|qVQD{;E!^alQ)7tg$=X9#b< z=8{VIiwk z%hYn+4rC@=ZQ)m1YsyN0#b3R+e%ve+UIu0b8)#EKBj;E@{&k|QF)m)KV7W$`b`z}E zEZT|JxWfIrIZDyz=#dX>U`rt)r0n=3*~xw-MoNbu!GP7W(xX4GB+au8Yg-?ID6M2}xT5m&_@0{5qd%e*+q7!( z>Uwf{HjI zcgr(KSve>1YBX?;N{Wi*+a|A4G;P;p_d&aLNvS-oX09QGOz5`g$X5D|rQBgs!#XX7bE)sYTpXYs9vA?^FWOISPZHi9-*4c5J=@6B zt;vv`C5~@iNz`6lPE>ZgRMb1n7$=>*alnh#L!2E(%lsTy4FbhCOeosC+j3AQ%;28i^xvx!CL4mJPw&K~JT10Jrhr>-2?zPZ(q+X;Cd4V50FV&^pjU+4DXS^1%L7Kc zb!#@4RKnh=xwbqH~ z24lGVHO!7HV2#!Ka;$sdWph!+|IJeW5fFT&-g5sUSGT{yZ@N;)P3q&KhkzAgt4|Zm zo)_G2ea|+B+c|UWlH{IwJ7J0piWDRD@&vZjggs=RVhAE!Yamx1DCIgk#pQ2bmd;sM z3h2s#yTPjy%ea?K9aJ@QtZ2B3kz#UDA7F*h1!uOv`8@E~q|&7+!fw^gFf_rW)p`P#>-su@+-i8gE&EZ~)LFHem;kUc1-<2cPnsWYu(^D`E_|JnRDOo%TVGXNTJ!eYD<$jQ+=Y6(Xx^8n z9>IS$@!9=`(_(R~ec1fEI=an22oWr)5M+qFaV@-gcXnpLzM#WYcei~spsvhi7aTkX zo}o!$>I7v!v*Qw-hrER}xm)-pgr9OkfsbKpg>xRL8U2X+z)SJ3O2D}dhc;hKg9J2& z_AmAMLCsSlBF^uwYD>3xFFUf#R=QiOG(u`=d@nfg-$O=~BW*4Ibzsw>AoxR{+KCq| zGjIi)u$RK7!FYQfP7z7^p|x6N;=HB}Y@$xbDqX7AAUJaLuD!qwbt4ls zMr-Ck8%qKbzK| zndGLU`Ms@e_ry83GOBhJ>$qJ98gGAG8!{h!WI;73z@fBtXNxj&sH(*7rNdPcQ}VQ( zB|=W)#0}C5S{NSJp>KOB>H8CVLofP-usJDALAgZ?IpxeDNVnY$DA;WW-WK@w<4orZ?BKIJ5aUSFuj_TdXV+3*xv!3 z+K>qsdxm+!yr~r|@2dbn;SBB}_~!h#BMG!;*{(b`*ZzH*XZZ7Ukh`yrFMG8?dMr7V z7=7D0-)gVz%i`2d1xrTVofKToMluCtkQc-q%Ke}5vVbp`=8m-{($mtm-UyrnwNhbv zy9*8$hTEA5k1UPS>+XEN$>WhzL}tn(v7fv#S%`do2pP(IrU#wSo<#dbN8b%}4T|kP zD<`xB$yJnC#x8qr@dW^~b<~JE>sb|T&;CkU*+pUccbIV<61B@CQcN#GKtWoMLApnO zaeZV+ca5{1=#T6O5!vRlhR)Ke@W~#DusXT9xrw8#D9`69=bqZe57hb@_`4X9Q4j51 zEtg?CiKZO!p5{f@`Gs9~Qs2HehIYmw(Y`%SW7&S2{UIEsAmrw13%0$5Kh@F>PX*lN zX)~EiY_@kNS;HJ$Hi99SnjN-$IRPfuhDjOtVBug&Ven)~T750$SalSTXWbVvt{38t zd{{HlZSWG>ty$34>lvbEYm4=N!Ar#J>+Wq1oun!mIRfNa!TGv7s?gcLU4Iejn8VH3lT_QBVUi#V z0^~NNrfQnnD_lsLnVBV-7#oifjT0Vec>v(QmebNWz8K1N7&&c9W`%Squf`}Xh(+B! z0B38fdSU%^Du3O!%qPuj942mgJ|0WIy8~7DK#fiu`|f19*pq{*7$H};;J@4a(w^4# zhu_nhY2?44xXisBmq&!?B_2gdhKfbv@*^Cq+pvnK+0J9>UHz^0=4&LI-9EFe=ut*D zSFSl5!ZKSFOoaAz1hK*Wu8w`4W@etisi=|b;qNkq#jZ_7Rz&$m0qQBXoer$>%DVsw z|B;XkFOk?|A*3ls+q`~@VBupMlFB1r?<4Snrf8c9wy_$2W21t)HYLW5exMPlfB>Ay zj2PFE((&1@CG>^m&RI9bg@%T@mcGUzV?mV`M-URK;a;C1iEdVK&|kla z9v+&1tLMu%%dh!6pFjU-UupkYDq@6@<2S960cq;cQiAIH-W)pdIBOLl1vfQ3hldQm zP9YUHSxoiQD_-O7{nRqYXCeP=>eWyNTqWqV0bikr>pH|?_$&?-7knj>U2`>=WX<$!AqF&cAD1O8f|#v zFE}eR=$XBetLq*SF#YjsxFw=U1B0Asvb**9KMwV@G42+?0NJ_i1Tr+-b$Zl>E}mF; zV{!bHj^LK+)!N(E{mU3dDfg%D0Ei?+Xb7G}?mSzP=Nce{ju9oxj-CtnN5}8W)s6Qa zH1jBNh2OT`^=!cpm%xc00PAKNd0?yX#J=+IS~wKYG46@C{De|H`p3>@2H)SGDtk8TLfGBT zo6!CoBT#d&9}dhc7b;U11vu0zE{nXg-ce2VXEp$k$v?^{J1`IF>fL3qnI1N8I>BAy z*2TBp*AAfZ*5JM2OuF&n-i&6yL1eN~ORhQ$2c(Q2Y z=@KJc_{;$!lMP|jLC@KYP2QT0S)NLwFK*2Cicp3fw-kna+n`GQvXcY0eME2)fu_9C z(gDORE`l`FQSpyD{0mY8s=9{c%_c%Q_!e&XB4K^0rGRS`NK2GjObkXTp)~TXK{dK{ zs-IzB4`iYoXa=+U#eMGor7@!hn?ye=-1nzv@@eL_jqslr98h*#kX;zg|mblM?WE zj%Q{(Z7XZIvI;OP@Ci9sr}M9r$|pS7^T1u`yYU(i9^v}sbe23_h~5o0z5+0IeG;}x zC}zbr_nMYY;AA*GFF9FNuEt5eYPlBTt~$|(Z+#(7AO)YG>1=zG_jUbZO??yWI7aYBk=dA01FKbP1e}hcx9br z>H@JF9I5@*SM&8WcDTl-9o(Dieo=pGcFQz9a9X6(Z(f?PAmTyoCsMC2$IJ;n=-RD* zkO1!5ahxZ}J23Z`vT!`#_;^>kV1CEbj(IQ(>@a3!i00lMMiu2Yr~XW0?MBmfF3KMV-6GBx0dt0v*dfs3u{}eUYph)8UFJV{J2$^q_5P7EP}UKM zu6_w=`oms-k||KK;*63?qc{WP)ArIVugyKQ<%a zT*%5K9IC!jj0}p>z}GjudTS5S#F!5{S0Ji@Ii1=e8!?yG*^x*gB)=hfU&vVq3hIWV zpzQ5-@c%{*b4eK{A4_nY?nDkV8)ec1j~<45f4V5Y5RnZ{D*L0rCZ1WMw`y2u^S<~) zwVvYFPXOGRGkUj&=?0U-<#&$lRi}y6>Q&^^JYH{syy|zyps_b#5tnq-=~buIBIJ1 zTq_>mwpx{`y1pw8bv7%=5L8W}A;aQFSJ=i>gMKCm zDABH(qo;uth8~Ik(v_V^ZF!E`qs^fG(@@P5E`EXx;hevDN>jbtv(*fZCJ3q5*Abj43Ku? zl*4P|IzjvKV2SoFiI9t-p6}hb^U}$s=Ks|cr=j~cFyr@YtszGe#@n|1B`ujoLo}o@ zAu|*s75&4=f-U0DN!#{nPD)D1#t7AscdTag`1xQ9Db%Sv8Lsa{;xx5?Xm@=!bF-X@KP;=VuQET&zhUd9@$MU!-9E@KGl zCq>3AKK8r{7$KegUNGj!#HTA07=HmyTM@A*WfSt=PrF?92MO@wA{Ihj8Iiq|z5Saj5Z+21wTqk&YF2 z8-~m!-m92z!$){K5sCO16XJNk^b5L2OvGFMR*0rGnlk3RvgP1SQvQ~@9 z0DEvzQ&$gJZm`&Fvz{-vKR!M_?**T5#*J_6(t%lhzr1cfh2G3bn_+EgtdyI1ho58D zjkkIF{hm?wv0L~y-Wbk5AvzQsrF|o1>CSwQa653{(T56?edO$xX!$tP2NfvrhTKB#me zWUNN}h$Y4D5z&zPbreob!pB069rDGNBtCR!@>a_^UkrAeonM5I${{V-nvr~(d{e0sV=mIcK7f&X6$;svB>p$f2Q5=ydC%aKCW*FAXTd2 zB8X@;`N=tMaOL139@0nY)OZy-eyuBuf4<*zyrivxAP5}yC@X6gw!Y}ex!;uXDx3wP zkZ0cTCywZGW9?kV#YBrHr-nI5WRG(%3p$BjI$U!FcjnTr=oj~UWnnY(5~bsCZhs-D(h*5B`0z`*Pp`J&#B6n)h*6ZE{}ZGf9gEf-c@ z*u-TMD;km|lC5fldKM^}aHd)sk7M<=uv&HAFv!Xi0~s1OxKOmc`MrS<5z-D3^33a2 z>6`eE0u{3RCzFt_J0*cmt~9E#qEoAhO9FJCm8`gf=!&*5Sk(Ihn`ia{IU2j42m(5~ zG4QRan)h*kL@Q>m-eNj}FZ)l{NHyL`CLP)9{H;_=CbU{X$4>^n&-;gUCWASPYyP(g z08rY4Pe9mZ`^BYjkI_#=0F6ShUr^PBr*;w6#pxxzsj|OaXS#Gqw|RULxgn7(SyBi|H`Qhkdi|ks@|4 zD(1=15Nc+6Tf=3H86&k&ay!v!T{m>0?SshL8Ul+7_t_@$<_|t1MfB{;K5mcpJsabv z-iuio)Nbg%x4C$xLgi5AJ-OqS9dsGVt%RgNDY0i25>1HA&qZ&EU?dB;8R2vvm<^lLV^`4n)f>-xX4td00@- z%6jh0jMC;QFjGNr=+9vHhdO&6Q{rJ54Ldey+oEkpyzlJuozkiPcgMdbagM&-IU$Lpt zYfv%DJ3w!VF4fK@{-yTYd*L~{|O|4QrXgqqfZeK zn^*vMsXv3YIc0M;wWyqY+dqy1*Nm;Dqral6pqgNjF+f;rT^h{onM23h23n2!Zp zO}+j+Q9`^tV+4~Qi45K(NJq7mZ^`7GdH5*&cT*-&0$PRi-@b7JSv)pc22ccFyB`!L z(^)OXt18Pe323PCwKn0Wjej@J1gFArtMg!CEXJktXtVb z`^?8v3p&0ktAqfhZyPB6wjYz+t>w^L6Sh`&4{0(=qjta@ZX<)1L=Sox7m-Mj_UNg- z;}wEPq;_w6GaB8g%>90&%&Lmi2E7k-LQQ-#WaOT8Kq;D;zusFK(lmy~D{5--8)t6k z>}Z>svH&7UJ*I|8YXe8PHUn=jO5ETiaG3!DxPN@9C=>gOBTvRWM?^>m$hc$u-IKVJ zC3Q9mHV!fC2fR!qu_?d6kfq7}WVPGOkvM!AtFbDarWFewqCVmD@9@AHBKKdDK`dh& ze_470R0`;S$%-3et2d4JFc)Dl_ice#EJ?`+>Sl{hb*gsAyS6xJ)xyLu_jm# zcwYQIWFUm@mKF%MZ@K@qN~}_*MqJzT_0Ga?FID@+ubrQu%?jLw$bFNqb9hxD1DFxXL~8Gt-UNMNbHO|^yEp`rL}nM-=ZeDUia3k0ghrm203#qMPx~zrrMB8^go$^ zrwbq~6eX}!aE46qL+;gXW&3s9{_Ky!IaSEwa@+=VTjcr-z%Kf%G+Jr0&O5HVbues8 zjt@yc%&xXMZ5@|kuNWAeron9PnSgd54>Yral!B2o?_vASA@Uvb3-LMcWB$nfb@Q>i zzuq7;O`ePYQ9UpN@w-<|TQr$1OD<40_3V$2AaQ>erK@mzZVsCk-a9i7nn^f&?W_Db zdZ571jnX`Euh!GX7dU~<+(ToO=45fl!fkP5mRB!psntIX4$ch;pYFw3)VffUPBkzS z@+}f}g3a01uSI21awNwwppfLlM#V)W&GZ&(srl?8j7-6*Su%q0?rsK>mg4oDR&2JP z6-eV>A2K@KBiNn~HSc?q3X=Y04g5WxV*T^ImPG(&;zVs3N@l}sNI{>Z$azIYDJB;g zCtMfF%REo?b>G<#J3cvi&gJfCT2t`tZWbpLu6rT@3IThoy6v*_d&T|KjX2NYGjH#E zyb7)0k|+EzsKJ|`ov1bCml&0umS`i;NyQ&fdVsvgAkL-GYLE)r$XEsSmwAdYqOw}?%;!tAUTQ3i@k?KlB@T<>=U47u=s|jV{nGJBLC&vK+3BFp+dVY@MnGGXK zN_LolVK#x__xG(6iYH^gYNmpTy)M|tg`Ljr>z&P{#FM`sR~Bth9^1vhhXVg@$$}qT z9T8gHQRKQqW1OpUxSLiVr=Ro~$f`kJkf3mcgF7dFbvit~bCaWsN$>?8& zr{8huW=$uQbEUewO_Y*Jdc)}pF@m&Y{dDQHJ~*1uKl6Zf`%4*k`&9zmkR$tBllo0v_uziaXIb)hF;=ugE7ovF9>Vrl zpA$tqLuQ=GZZ)y<6>tACX>%T}V{VwU>a?@CGvJErs^Jbqam^-w*_2EpLg35>?Y-Z= zEMqG1$fItiM#1K3PXEo>pwg}8sd8>zn(VP;BWGuCdVIRxEkQ$g@|?~W&&C002`$|U z0yo;9_4V}y_V@RVWzAqt6DYu5yZf0*?07?LGjI(`evA@%)CwU-JP5*I`>PQ;9d2ns z#%rvlDMte1_I7&`;JEa4VT2f4$h{aJ$l6FKfikIHYWJEKhMex$vUyKG0C8s1N>*{8 zH5@CL11unjzg422roD+~vdD_v1y2g6q2E1L3>UsU7R$+D9t~dczAR=AB?4IYO8-$)mjb(wK^>DP$87p?kRPamRe8N8p!qr z3FlZBCUOYi1mVa}Dv{9P<_7LB-u0aa8N;IP)H=wxB{&P5WyiGKC7lOu_<5U=@bOn8 z=&yRK&@H@q?2A-*S!25AX#M${B<=)bQlkH&P*w5lY5o}5-RuklD^Hhl#j#L{;qLYw zIzT~y(x)Y5EIQx(1!7L7C~I$Cj6w-3*txcGftrPge%PnU9{wqO^y$r932h+#CyK+j}rr(<@c&7AG3JWXxSwUqen|HgR9$~23 zXpN5}>EOQC1*wprLSF5GQozs%hzz&1!mq4~-9f+msMF^KnR~I*-x(g@eZyMQdNL9o z9&Q=6eCE`IEEzsdsdRgr%{@eZzNyVIX)0_ohl6K!Xt~s}YMcAvqm~#{Ea0S^sAU+2 zK=bGGZ^ z8&GRDicQGpuEhMo*gRHKR_5pk%bmnCAv;N1)qec)U%{~JC4AH?Q_YLS+s)`{omVHCHln=z~MElC^}EQSP|=k-FQ!jJ_%#V&Y1 zA#t(Iu_E2^0T^?BNr5`HD@T84NuR2#j#GwWnpoqwN6k~rtlXv-C>{pLD$tgMQ1_z z@h&Pz*gFtyti0(UFvi>YtG8UoDCqX8 z$V9Ox_@R%@<}&OEzdSnu;{OMqKw!U}|IEkR$8X-ed9z=7=_LUOody6hGBWVfPd{P) z{P{P!%&WrYvXDhPE<)lZ`*NYyb`tF`_AVst-!&95{+?1J&UP3!r#$(7#0aKz39Va9 z@_UZaLS@roM68rx^Ut4;kmfT80gm(2qj^%}&olS{IGwqu+93BoJYVQ;{nv(@5~y zN5VNAoP)3>vl#DeO9!0d@%8P09?x$$hNA=&3>XZO^S<0gv_Dd?D^6o(v%|i>AmsMx zoHfXG5Zyz(U4;BEgJQ8hs|<&;%SCv%A;e4PQd3inS+izcUs6({K3Dw%&j|p$^2#gl z@$m^9J8s+z7y565ORA}kv_Dg!UB#03w9^c_lWYU?4#aBZx!3-uISrRR2Fj~{&-7k`?Ui%YW?*q%CY%MV-W$P(S(GAgx;e^j~?;tv(I)80AQ-Jc=2LfdF7QC zhlPfQSAhwDNgXJ1lGnBL zeP18WM-iD)btMZnhabRs%)4>koV|PZV&~4C9UlO``|djw78c@)E3UZ4XfW1ol;$G) zHi6K`^D40}vsC2&qmyixT|)xJ$ulWr6Dm8f{IeG0MgI(O(i>omLG;Po_}zvyasP=NfXqc2S*z?GT^+Wx#Z+B;?8gO;Fl8xXAGn9 z`_G>V;9SzJ#37>m)Bd*+nI+ZYJRtWRV1P&~uEdlNe#bp4_u@w~YwlRS8`D4Bgk>~W zaN2!nR<9_$GA&gIx1DkE=hbpOe>Q>3Wj!MB!@S`lqtRqBllBip1ZPY(GAEbQdkv&W z))ELX6L#M2-qBKA&IuSXYQ%IeFE9VM-+sH}1Hc<^yn*QG=(zs<`;Xz1Z_1;^RP4t? zdC2E2raOV-&t7zr8tyA{%0Jta*boSw*N~n1-Ju*D&Mz1Ct3s5-T13A- z7HW>Y`{^1sJbmQH19)M>QS8kv6DER`@#s1ioT$UeLC^higan|bNf0QlF(H@!u2Qs< zlUD}r#_|x@hqLQci2yeaqjLH#v%Gu0-GdEjg)Y~`Ooj!ToxsjTN>XizBuZUKG4T7i zK})o+u1VW1)Ju!;^uEvcW{chz^-ViFaPmF^VWD9W)22=x$2B#aOxIxn;K-39ShsE+ z)Bh>HzP{d8o260d=L@T9>%537|vm^y*xw^EL^8 zLZBxS0=)3itU*Yk=j_#IVCWO!hi`5eh97Slfu|<)aB?czYxni|a_$hUI-G;KM2pL6 zjCCICMl|fklZCkR>)q03CH3Y*Gz9V1;@*+T4()mV!?w02&z}KNC$plu4$%r!8_WU( z*x_ftCV#z$2pjTu@uNKtfFoJ`BI`0jZy>X2`E^6lHN@w%+pEp)7mc07f{*1^2u9qL zkA%&(t7gu;iiQ8HufFQI0I*`k3h_8)%EdEyAGG@1-@fY%dp`GPm0@3TmFWA|8MHsw zoA;0M$1`gW3EKSJl+;%Cwr_jkXJC47YZ{(da{w=`KO)YT2S0v%%^<5ZsMu5yl*BF9DR}DaZRqbha_u>CIJ@%L5`MCO%&B&{)ZFDR5-UK?D_Sy>h zEDml-$L0Zt3Y~{&5l_NDt9KMkR#tFU@tVKf7>3saU+b{Apfj$GVX;973-S_7nEO38 zWl+i;gT|9m4f^!cPdg?6Fje{ZZ7^1bNNoZyzYpgn5@Z}nzLYX)Z&q`-GsJ# zpFm>rd%t4HYilv|ziY*3{wS(+d{4DWvi-Y-`r^USDKN=lz|$RUQv7zJ5I20bRpb%4 zq#e`f8Rm=L1Ue>@wZq?UKB^5CT-rT?z%u~OdnL~96N8MBYGGb+GZoJ0<9dn!Pmg+v z?#!EyvOnhHo{?BdvQTR1_y^uoG@Y3h7B9QEyOz<0BqmB9sBm}!_=u$ z$N2gAiA_FT2#>8U_FrCY#4l+@@)k;+K_f_7Y-?tb$Wym#&5k1cM+(X@cgbcvx#plq zQ{^M0qzd=0*o|3>H(}|X49xp{8@}9|DKZ5sYK_=N*HU9L3t)J6%LzO|Howhg&)h4; ztzYiI9(wFSAfQy*0sQm_+s_<&yL$0vY)mf_@&9x>4`wD_WD}2!4?bf9l8)l*VYM}< zZ>;dxJImx%y`qswTATeq(x$N%@mhf1ipv;OzkfnoZkmrmoP zI1{C!>pUC;p9;7LDWR&1oc`ZD+T(jdB{Yjujf>Kyz z5XmGyT1|%$ezw2wU%6X!x$z=E2S#_ry5j}do>k%~Lg7b_gY#hL z*BW9EGd}tqvuV*^*(*wD{l5}3$RrBvp=y3Fe|9t;%qzp}Pc{jgndglKW)unk+CTDL zwj}SUqta-?J)^o}a@TMdVHBI4(EXoFbM%kZ`^6mdvgU-avmMNeB7=SK^O1b?it%@< zuW-%lU^Ae9|NdijI-S?jrAw>0!&Y1PpIQw7DwRq&DcCtdHi}=LK7B?qN;T5+Q|`I1 z!1_N5QO*0W+cQ}@!BEJ?3{yQHt-3=9|BkF;T>se?6caFU{Mossm;eD1TyCw!G4D*$ z#1|#-<`|yBBYywrZUhSXj(haNFNg3mfxxTKk+eJPRPl z`*&xT5Eu{**Qo1grhFk|lSA>u#Gbf*V7xF1^d$Hyd50vl=>=Cu-9+whdiFeK8;^ zK-5*VYB`7mBsB(-dA9y|k%%~G;ijL?494aZ1mFe5-Bc8gMvtB2P{*HI~@KgKG>pL$s*M*y`UM5*vt0Qjj7XsH`h7wK-{1AxHGLK?4U4fL(? zKi{}t{IB8cT7MRIiFp0 zAxa9vE0+zB6M!##haj_`MO%*JSV6hdw&~I%CsLg%t-{Q{F-QvbZsj)WQs2RTaY5Kh z0ARIPowUDe-$0|uh~(sCX4?T=dvpBw@%94%ruzni!RGDd?VXUA*wY07Yz;4xd)zA| z*K(&?3!;93sMqZyy1>XSwmLu_0MF;0BT}&L&WU(vTo0jzjH36#X`Ed0=a%is5CuL> zweqkpNDg<^U)Wdoi^H(EVB!3_&@q6QYL&r6AaFu#`z1=H%c`+6t5}3p1HH7kc}SAz zc-$6aD4Ay5si?fh2e zD{4fxUUPg6jXHnnOIR>2cm&{TM=}N8-+T-gzq1wvm9_Za))N@@+K>2pUj}yGHx>P& z+Ab>8C(0k?w7#;6tE3cU)b|5ObtyWXPSvejx9;^^672^7UAlDX>gnmJvx#=nb?VNw@I%^keKoo|cqa$Rq+1q*Zli>*=c$#c?-ub7- z)X*`=VZAGQ$0C8~Jtwg6zI$iGI(R%Vwg)avjTTyworOk=6&LCd%}ltdNNy?q_SH6g zvp-X?t$o?0j#g`JHR1!j;pwTx;oS0i2|=!OfTL+Wdi3bs7Qd-%gZUBL8Y>zp{k0E9 zv$Vo0l+j{Qp>sI^S#W&G7QAs#VuLVHNAg_re9!KmfFEuhkHN7)qT>&1X?qPmCj<44 z^oPMDRhf@1SejS6dz<{-1UQf%;9 zSXqM)w;pfP`^b%1;zN9}Kf6roHf3}5quIHM*<#`};8e81w$$o2003Kje0(=Y^Y=A!sDqx~VT_?-`<@DeGn~;jsh=2~A!)`~Wp5gxZ^tzGQo>?r4mp<6?2lix_ z&@Hjzk#Bbjt@YTX-hwflEs}}NTu319zDQ!BT<~-0d%s}I$pZN4Jvw~K@7WFyRoy<2 z9I9LQM;;y;-_y}ft+hTK_A&INb$0M@rU=0{1^1k49}yXuNUYnty1KfuJpn+>OF}|I z7n5j<%$&mJXlf{3lw`N?)=CPM0-a&cKl|WnxhdTvasSvJo#K4$8J7V>BPVu^5U&+i z*J4Y0f#_t>J1PKwAD>F}J*ELmJ6m&r?=Slv{(I#he75T(RuDxlsHkmodfd?lV?jwx zt#F2FRBcz`*dxLZtG68!or~3Kg(LUGeuH`5+4&a|92^xE78ahCmUgT?O#oYPaIk-H za8Q)RVwTUap#zC?$$1}pwLQV16C8YF!+dmj?vnl@ciz^CfXjLQMfRe+m%-4ukTwzi z#{3Y>ke->^5Ay~mivpwz-9N~=5Yc{~@X@txX?T|qUzBrl5#2YXym4+_g{SNTun9+? zcT7x7RBIi9tpyZD@4i~W1q(HQBgm-7-XTm+E(Dk^90)g~jzTPIBA?9yr|l?qpf zx_uC%<|^Fq@bFe|8{8TI5U7)wloYMg>9q_1Hq_s!!Y1m$OUM>2B}&s70svMy2LARn z!!bHJyi*Ima4eBcepSDCv45e}W;l-CffE&BvB6nGVyz&|mKUZ{u2!N63SNAYK zvY{g{h=tleIuHkPN<`y|LZX#LRfcmS44f-EW*BRVGytUKF z#IQECliSf5Ag+ZEE^6WUf33;fw!$wzKJood%v<~`%ISBG}}K zJW*C-5D_571?mT2|JJU|qBg%@Q2}1UdFudCN9(*gnG(AP`}l;@-&$1UAMhwdz+8Fu@g=ZJUs z;UXAbpzHxZnqP)bwjCGDBGgBRATKQ*p4gjy(^CMdhgyaEzuE#1G7}XSSY(jXgg>Qg z9}*venf(&lY|Suumdd*_+qVg`$tm{sLM*XIrKcz9UtfKBd3lX2kGAs(0BqmuV`JkY zI4&u2{+;$Xw{NOwA={AYCp!Z>bb1JfR%o$G{r)oOZPpsU5w|_l|ys~Pf6;=qK ze`rE)Oizs!!q3dfObd2!T!@qu`XD5S_zFkih2{)Y%Y{*GSq~B9trgn8 zs#b1BQZFUgY3S+So~>~Nw#o$1&`_yVdYiqxw_Xac-8)=nFt@sK;<+BYNhHVom5Pqt zwpyA-X)f0t%fZV(?L|D<-nWhFF49Qb>fHI>r4?1+j?aEaFquR@-#rceqJ!}3@m$>a z(R#f7+aVDLm-e@Zc%B^W<2XsqrKmMo#U>=7o*sC1`asNl?77Qp9JK?3-~>fLyC?S7Gja#3=G zU@k}VOR?_gDg5>G-?8wjVbbnEtvvsHT6_;p>WluQ^?9%v_ZBCAvnGlAaPtw|JFXX!$dqY~iBU#^>+h*W_rSVVS6uM(4B7H}ZV>v*KB@bV zLhAJ}rDrU5WE7yd(%`adQf_&*v@wWmCg1|h3E-6HelbC9x^CDR>C0X!u&^--(}!q&o694>XGkA`U*Mh)2E)h*l~-6H}pJSiNN#0q{OM)1@n1Mv-+B+vb@ z3w5=i()(R~7Z=uG2P^~QjYGS(`F&Or!(uk&ykl_vq9FN(Y0!#Hz}DsgFaRiRa+aUX zS)3I1<^@)fhpy8J9Y2W(Ur%%m z^MjvW+vK*>O~^ky)k>g=Z5yv1kc1F#JrbgWaU!o2eaJ*$8#ukFLQ(=3cnv^(0bnM? z{*pE|M~VKkm&NGPV!vTou+1hqaM=Oi{6_&~;dgBEqi{My`&wn;w{-ADp%n346LQL` zks2A;c^068Y5!3?`Q0{Qqc5B}0u}W5_=>H}_^NvEf)Y zMt2Dpi!>oI6yxc5;@fR__qT)a*K6@-8ayM_1`{@($i;{@R37X@;mtwJR-4N`sUHfI zUu#uzMQcp}m3S_11?;@rjNK7vwbkvn-l1(dvGY%LrxJc1KOfE#p6Ji#Ps9Wgb{@Ti_o#sHj81P&j73<5S=9c8t3?6o0nTdpK~u5)Dp zvh#2I$$UIW!fh9Ry9QxXU_?>`t{c`Bza7h|=c7ThoL2Vt|Mo`??)Y>A!innteD7r_ zCdxbi<8@elAXEGnSFrubWWR2%X{~#ni`)hqT$7IM{v4b0mSMK~&c7C?11sC$MC)7= z^%@vD;sJ_|zmWu=HJJV0YBG<4fD}t zHdNq#ydzBj#(KT1LwVRA#t=e18MuDK9Nlde|~iXyt_Gtm>7ESQOInE&tziY zEMWz-6&A~Sq@T0bs~U7y%fMYOI+~pTJ3k!6-fM$vM+WxlK=QyW&_Q899)0ivWVRGf zjtcO$3Zj@)R?MXrR>JC&7va9-#7=*`0YnSWB0`NFWSj*Oh5yr?sl=?QqD@Yo znKTH-^^Rzj4*;ouQLW}OuRhH@lS({N@8k&dYI7feh=%xZ5bNb$f8O@DIo7TzdF5Ie zfSFvJ+1CD>wI>4c|3+#}`^5A0!JXU`z^uh{#nqks&{+?}HxEt(m0U(ca=hk8`~KtY zscrzC0M^%VaV&b`$`JD=RA-s;VkA(b3UC z*cHfr6t7ZcHuVt0i(11FoT4PiSOTYOLprA5kE@@|D}%$?r7$Te3I;`mbeT@U*uxIf zh%Z&t!Q(&dpp7TwQn5$#Uo8tD=VjZOJQx@m43qmtU$r*@+lu9s*U`{<08yIwca0W{ z-Siij00;mm{A6o8i~-;>7z|a8vjA9!(O~$c5FZ7AEmYFk(>txyUt=&rW=S=%!^F^~ z;$Y-~g!-x>#NQjVMiZFH0h8EL*gX5HHo14$5^Q~HHf2^<%L1}1YN4>EftFOG(v&*o z0SHY2tL*7($md2U!D<%(u)W~<^XCg4Y!u;Du-2@m5JE3yZX!bpnFQ9Hu#RUQ)-x1j z1OUQ(Rb9yeKp)KV!)M`>Z6~2ls|Se~JnI@4ofK464@(c7z3RVlx}buxGc*ChycJe| zx(mU_C6P)YCnp!j5?#}*H|aG3fT%&!P=O&perSP+uLR`I8pbuTF5OFHZ64X#%Yt!+ zUT%3U9LX+$seNN%=h-|ct8461Ab=X*VrF1Fd2g6(BJ!m5w#iZv<&nuvV7`*Rl00G9 zOLQp>L6c|G!{5tR{3EOf5Hjuh`i8R099ZpW0?PdNlFE*-;jVK-kP8>GF2OYx+h3~*Kv1Z@TGQBA%OjqxXn_k= zpw{>Cl0&eUluMcH-1Zz&u*b#^fQQHQBU)b#ekvJcl~#AT!`dez2!gmn=CR}3W}_)p zsnNov%DO8Med}7d=@{fz)KPF&6B-ao7^lTzeKx}IA20kk9JI2svIOiA0Jb9lKvZ5{ zURy!_Cl-sH@|CR@%y8+cQN#?e(2>!d`#GXOl+Ol1$sEfmf#ruYDD%Yg7pRm$fLcz?XJIQap^WQvIzxVb zz7PP~Jqp0TsJ5V>;F3flwGRhj9i4(A@+vU_Dqw6wL_6l*6Q? zXgH8n2;b~E-6ez{&(Y?z9C&Kk4k)kFaGiTS(%TjrEBJi9WDpUhS$>Bw0DNIH0f1aFR{Sws4?u?wrYs#^ z5f|hQ63lAq>=l7rulu*w9fF*SS{N1IgCg;<+){^!=5B(IyR@zm=6$~f9{qL;=!mIf z;(GggR@*F|?bfuqOR$Kt&0QJ!;Kdc7VIP3l0JR_lXde%5E3ubIpscK{P@~b*3(fKN zi~=}7-}&?BFE}^q@rtszJ?lIu&raPW8m+w9z6mL)kO&-}O(w!Ju)c2a6760r45 zE)>@^*!xjh%+Nc;$KD1IgyX@v>{m+Ctz3Ls$Vqyp=NO8K8ok;)YH?A=WzWRt(8;5!4S5 z=C6j_%3As!u9wnm>&ip?d#Cfu;qCQ@U=N{~egw!wqHAJ{p4A$~AqJXV`P8!Qv{}oL zm{4+#!eLB8B>1Zwi-j(f*1*y1B3ku_&&5nko%c66)vOE$Z`xoX#aOe zEdYJ-hK7ckyuAFJ2(k80C7!5eps0|WUS^GdSIYv9Ap$3SN=b3GObaF`f~3sZrwN=~~CJN5&$I{>T6%FWHq zzg7oeTQC5g1?V#~GtQ13J7%nKa;fbQb9B0Igg?mP66Yo8+^10~lMv0BN3~~sh(CRx zOZTV4z^D*-eB$71k|v>$W9jFYJ5R!Ha!};tAlxxL5hf%!P45$<;;f^!`=mn&urKK}`O0aNN zAU7lsXQ;&z+a_2kg=+G~rEXw{((bwk(&6>l<6!5xOEg<+BN0RdfU6xLPCL06O|UgR z2R59%2t_pwa4N5i3N5aEnfbKIc;r)9ifiE#d3M=d2jo%K?kka0($=3*=xpK^%14Mo0zHY6L}h6ge7o?othE8@X|e>e$3iDy z08m0!eqMfdzyAFO8I4AP8DQ&QPUGuCdIZ3}qI%8`(AkHEis-C>$G_b~W4lUn&5Z^l ze6=%$0>IU<0PMAYJf{TI3Mt4W5|@4fr2YsLD-NHvrcx{A6#4ObaBMW@i(|70OgVk* zn@!Zl3?!_eZ)7kG;1~fqopuqk!n53Cq0lFWdz0s=pl#f(9ky8q`j6+WuD(utE+Zq8 zJ1Q2q%3gqWe?{9c01LUm#E1lVxT4+7!?R!BBs-i3KQU!$DLB9n~ z**$!*5HgA@ulk0G9J(jF-WVA|jbXLnQA1e3RW}+Xu`jWkPlH|$|M_*F?R9QUim^A9 z^knHlC(j`U#$T$VYLMqmWGnAd-o=vQ;v$|nKnE5B30VL|qT^&)_;g>! z@l8;N7eTq6A$|}|P>zN<+~oPtrjOjaMJiXlhrn1fdgSWY18LFlM)Hwuwy}>a+%$ z!g(cYTY3)cI&+EA_BOvJf&duyE3dOxqK)qpO_NMK*}k1Q)I^vuVQ$Gp`zviG8#s*w zLx7h+MMZg4dV2afC;Puc0e}_w0T7Y&M~@yojJe9zq9DdT)PS|x{ph_;iVTDvDj7vI zhZ*Q@vhnVtXCCVVu4En0E+zYIdV#D^~w2*zz)kB&4VsMYG=Un_S~2DbA|E@diW zH@wH)#ruyT^D#YxK&6mUXCSzk0)+k?Z2x`a$dMBq{Wo_C2H@-K3xfv_=2?K*D%(uM zp+krEO9ev>9DA9u6zTd!1wo9DoR|TO+dVh5t9PBvg)*+_h&gba{@WvKoz(kJwu*c*IzAi>===k_*B`jbKT7}QayWbT%;}<{qCBAk zuxHPnEZH3g02r5?IB}vd5GXz?q*!bT2M}@+i5q&{R-|zLEYk-$?=zKotJ=(uhZ>k7`RfPWggV7`m z|A9j}J^Ix4?Af~qto8pU4gi?sA z-7|UsEWCFL3?L@PrScm3bFa=G4?F+;Fl_nPLojn_A9}x$6=0Q_yWQiiy7dH@$HoRj zsK44){b#G25k${$ZQZtQ8z=buwhSLWoZA0Jn+biDX8g4>88e~svuDqSx88b7Xa=x_ zg@w5(DJe%MOq?)LuhUaRk}JVu`W&zd!h6}IB< zurMHY0P5GTAB2a83mt)Yp{C87H*et+f$YP9?Z}4CKtHt-CMQIILC=jjcSC;;GJ!y~ z7wpf-gXfn0(QNE}#?VA6m>3bjia%?l#Ck7iR~Yg&ESsJ-s5kwH!f%c7OQ{ye5?~4K zF$dI)O35y!i~_y=nDkRtqk&I09fE(a*a6etUJ6ey-UQoEXA=`)1Z;ivVR+`2(RBV{ zz1CpsJMZzwweQv&%rGR(7y3j7*>YiQML*o<3`hU>?%lgnqtTRcvc=~BFcT(BXcI8H zW&pqmB}%0d%>ZyV0}dTJbQD_`$)rMG05~K&pk}*?AYq^ss8)36C+nk>!Ky>)Fz4eH zkX}%BSu;o)(jln+zdy}{j~|{+g_hDmqtzt9Qe)Kq_l_9|JwyB*(m_#JUs$gKX4kJTe=y(+>o@9xOG6Zpj@7{&(r{WM=h;ZYudPR;~%`P1`t)Im z3D710&}Q8^=ggVIE?KgK6##%)uh&--^W~4f{`Id<7)?e2T@hK!*jQ^ORAi7ZOz0H? zzg?&VZ>8c|lbbqpVH44moMM>!e=Fgiw~vL#CJl4w8^9-aJUZb9m^`2tH0TVp+&IKX z4eL&1(B>d7%p7~!Jt!r@oLO99O&Dplc_JoTPG9rwU-!TlTaUtL^KOMH1LNsvu(abLoZsQr%~ zJ(`l0m6gf++&Om5%$zxsW(G2C&|$mH002Fbv9YmCUS1w=1|VX|i5al%-h1zTfV?3! z-zk`%BFxvuqa8G3a2#yESPBftJN?{%7C}HEgSti?JWuTP)Jr9>VD1!}{w#dRxT1sn z=*NCh)>!ThaS;w|LAcHpltXocmQIYhQh>BNSQ*L`ww=BRTT?I4ug8W2KyG<81z`+p zK6l$#2=!Cbu0sQ2!l-F+hA^2;C$r!z`G{4J$Rf;KgaZFA&203z6IxBUiB^Q$M)msT&kr572Mj%11!~`Qo?4I?@xoH zS%vV}O~c@!8;4KbsAp9qZe`XvY@gS7HvLE zwK?wuFD$Xk;rj;n89d) z1Y(y?hzo)(xwW)$NLN`%lX0KQDI#njpW68uh2^j-JsaL2=D}a5jcB69w}^U)F|AjP zp~b5NMO5~+#3YFb@PRQzxMPC-;j3*&ZBE(ZntF1-<={&g%Uf%9K^)QUBNL+FwO_VV zW`L#uG6nPzVDG_=DVea2fEj`Vz8A4a{kIIl#AJ&1Q^2Htu`~kYZ07O408FnhE-9|q zvSrH-f%d09HMLsJBFpZy0Du?&=%bG!00>zCZt*X_{IYt~s8M72E+;%Hig?Y32uWDL zoZ$(uKldYdqh8%zq8_C8tfo;I|^Te)?l~k zMb^I|YlU-8A`(MsO#{5JbTbuJ8D|2p+~lS`XL7ibQ!5Ca$|;8RM8NrECm-sVwI5Vk zc}TvF1z!Me?Xzt^?XS}lrq!1!Hmdyvwm-iSY*h*cYX9}MwY4Rj4B5Q@m~a;x8{2sS z02d{L)Q?Vyj_X-`LB%!83;Tfw1SjDOeo2kWZ6%Fmg~rw034uC3WJVP2Z9hrY*=N zQm5i9>vAC&Zq;sWo*6fIxAe8n)b_&05vAt(~@Glj{`GQpk0 z6Jc+58PvPU0Fi51sEyGZ{O7NKgE#J(40nv^PXtwLI}{va*^rYgYfx$)#TC{6m?NN~P`r z03$|>fRQ6d3Tp%Cn;~!3ux#0~pDh+^O)Sf}o`8xOaWig6fFyr~+qMn6l7(*_wF8C@ z)ALH;jg{NszE4)b=9A~?y>QmZtiWl6E10kZk;ucyJKI8*@6DRw!7+VlM`(MiFMG8g zzfmdv`rB`7Iqh#2YJYEUZ|45{@4qhBc-{PLFTC)A5CC`( zg7rtw!mG=-atSw=+ve7(MQ!)mHg@5W0UUGWf00860+=VlOH$}S_oIij5%;qhdH>L+2+eY0GUB899#|k`br`a= z)Y)V)U(w&NhqV~cUFQ~PopICu8+7pK7!3O4ghc=b%~CS7Ul%#c!%K9rAvP-Uu<*fKpt?~9 zqx;0dz}N^{KCCmE9Q+WBFp&jWo54iGTv$LPlZt6d@$Dn})1f8ZW;h8}@#*w1t#=5F zN{pt}ozCGto5{=Fx^KVzcCpcD)CgpRI{#4nzxwK{*Y(a_R{+5Jfajloo@W7kND$wk zrM$en;OC!y`bj|!wzXe?sA}>do0F5-vFOUh6_u6 zsR?j}_CKF@fjI@328_i@H{Z|)e*4!WFmrf6_+jU9ny&oxjIr?TlXt<&7aky=kE2CS z*x^{ub)wObXHuut6O+vdbFcx)({tgS2dC4du5f=J7}B!`ZCeJS?!o<0Y4uP5JTiVD zt#Ps0`f$0w%Zo7JJ!Bm|O#A(dU1|TeD*J7>JOJl4fBt;<_~Vb6f`S5}F91Qn=ht6< zT|b$ueq>=|5x#2>b!c;5ZR$iI;P9u3gW>$D{ZNmks@=4kjfnGuq+cq5!M%FWFyQQw z17OAeRM>thi*^CJEW*%4>>m$ja_=a3Xwq;<>JdVimJfvbdDHLJ6M=r|&PgylK9c-R zYeN!DAD%p@H+@Du0{tK=$Pa>iRIoce8;YyzVO-yyaMzeYw!hyqB!HF`qj`cEi!~Z8 zb?l<4(p7T|sHW3v;fb4vKmfTnjWpk1Wc#MD;txlC)z#KEeEs#;-*dmJZ*T42E&#x@ zfM=h5miGnVo5b8wrO9M!{Njr*{`c8upM65kgalatpN?uP8z$dC4bp=gxcOs};DcRf zL8|r^G#u|f3)6Serk?-BDoU^aeDf%He9B1phz^`?RGFg-C88nK|LoA$d$5I{gIt*M89F4+M8+j^L4=QS@q43AG8Nm&32I#%vI zLn!;Ie?9~YAG#HiqR4$CEDP;(Ebh5=coHP^2zEG>!h62+Yz}B>w_4Z9NKI{5Kljc9 z)8V_P?uIR|KMpINdjO({X;InGNa(VOf)P3z(Ud?R1in8x4wWIl*FcA<7`djIo%^|9 zm@m;!1`ppj$kyc-TnIm&ZxsHt^z?J9SFip};P~T(-%Rc}^Ww{|w2Ow5w#SPg3&5tK zk3II7Fg;kyF#*;0i@#r*JR*5SR8&+nb~&}B2Ma%v|L`WypEv}v%J#w~otbvY?T!mm zevwvVku1VW5)z3mV}u?-^yE0XE)=c*yOV` z>f%NoowmR^zkR&qXRyVvETJ?tL8ahU#>^CS*QY8nf(guwNuO?tKjtnp8r~C(9!W1X zn&v8*R?KpI72jBCbsMK=f2w1;Bi%>Rex*h%Bilv_6R`ajJoB3V7B6V1Qe4O)%&^6O z186FAy#8rCz8xViqVejsxw9OxKQ=kg#h>;2fpH)W1@>sLz6tS1&<(J3bwJCJh@Ez90ajkqC{b>?9w$bBhruYA0Dh2_k2*Nz`41)E@(N-xWmwOJ!l8Sgb+ zLpZzxk#_wnh)Xef25)yrBYS=ten{cy`U_9hfzp!xZ(WSG{z!BO@*Lhw8dl}YeQ*nc!2U=gd$S1#$hn`i!iV^rZL= za?4&TKTh%m6yY`et>np^G#f2>P^7M|f7rpplz>5#kp5@)i1D9y#lLp5WppOX;Mo5RKp`veK>j){Y!rr%wXAXsLCZcF zYK32~x7U*8F4xy|y_gW-~jON7qK|DA-*-5 zE7RD3<%P95df40BOMST&)t=A0hy55RB>$(ZCJS#!{yOvePiFIO+I#J!Iy8Q>E0~cr% z!1_94=+28RoKI^xUgc(aV{TjqMP!x+h{LU%?K%uD)w7sCq-&Ii%Lbh@8a8=u7$0Vf zdzuU{U{l$-syy@^P;uFpXPREA_2=ET_k;(!J`K{B+uv(V69{@wP=5u~Eat!4!|p=G zlbGczQpZ_^w!T%7>Q#j%t+H!2`g`=@Uz}Is6-<1BHmR(CjrX~)!Y}UMdmG8jm~kR! z0`87Au)y9ZPdVs*olvf7IR2i;yeyr0$oJRASz8isLEBExV1fbGF&T59Goqp_Fa1@X zXopHw`GY}oo=5%Nlw+2R4?M%+9G|Q;w{i9`8zo#KoEq~P z?9VE8=jYSbxL@56JW7R^0pTsmDe7wVz|xZSYF899U7}hDty&DF3 z1Pe+b@?_Y#+n|oc>WECJ-l|W*%pK5sV*i&YxFrA=c`;T6GY2NU`5Z2?MipwX>Sj^nH*8&zhIS_GppyooM-?<;9PX-izk16kwpE)+s8p)rAQAvapjbL z(BDKt^pGJ8vRipjb`;8eRBL!9XCqDO_IuS2N%+xj1N8F)x|4njn1PIzLr^NxrV-yX%g*u)|k)J5l~;wex~6l3ZX#f_N=-p=7(&1ul;coRb=OOM#s z+}O<-kzZ-%C6}sfLUq?>m#h~xF=)0A8b+EYOTZDRIJ&b{)>E=1Z-3bR^ZoUfB6ORz zPn|$$nebzCYvyuoJ88$)pl=7m_7npVfk-3$e!5bP3uCa*79PUOBnObUFE_K?+Eg{e z8*DwBhOEV;^IHmakVT@Keh6vW99fHfZJc*Xs;(8ZKVDIUXeKV@1Tx|FogvqI42?mP zO9x>x@RH7C+jT?rvE~+Lw$f^C%|2;dmCiMN^>J^Drxaad!n!vLw1?e&V!)_o8Jfft zme^6?LMP@-TVwb4CA!ccRi1Xrxl1SLUU?2LSL9jB%jXcjXP5X3aVO$$9U$=(+}fWJ zAj1Jee%$PmU*r@We+rzG-=QrUi13Px)EBILe%C+W-jV-kK`4JDr+xh|AkD{)I!)0$ z!zbxesU6oAw7Wk!X&a`qDD$@dwB9h?K8l&<>T?>u(Rb(Gc&gG4<0*S@4|}c7&d#4e zWiW6>zDi?cMl_wQ<-DB)Qb;x82m6@b72kjp88Ug#4i*~jmi$&in8S)W5gotte|ML( zd0;kk)>xn0A0QWg{~%(&)plNE|(kV%8su%TFLq z#O!NDpqJ@m^zO*=#Fnx#HjPJL@MhF+b#Em zV>it64@E(j9}rLx2tvU=e^Hehj3PCLLuSiZg&t8Z=SUOaLWi6@)k`Y|6S?3!iMSa3 z`({kbKwisd3*>2x&ZP5W2YSkAuS3vg{8yFEL&1!I#G${y$O9v+s{= zU5HC8+};5K>IYn&%1_a|On2i@l0osw~28eZCZ1{TdDSz=sSC_*2UR|EGX2`QtiWVHJzO z@Kx?-OW;A>jQQ!`zkh|@&vFHA#&IEr_~9-!_b3)6?FIj_)gOOy=`VQv8dCT1S2=}) zis@00%Y6r@_6}wXe@bhm=8po%WKaPntj12>h08keC?PgZWt7qe*{-!u+CXxoesV=D z;Y5&0a`M+B^!da8f$c9NuPQegLb*_SuMig{nt(cKIbCKnx#JeNRY7FsWF=Z+{njwU zjfMH%INY@{Led>z)lvwwUHmDfqN1`3?#mx{^(yugQ3CE~1*N;?b~oNdT6z%OGKag3 z6!Q6u&H-!-K4jUqXVFnHb+QyqfH^v;v!h3s5(j<7=qrF;8@ma(3AK35KpP(ffJShg zhb9FmLn(tPcS+?g(1rGqd45+J9t_KyBUy83v?T_FCWxk*jQQYyiTMx_ z?2mbbknlS+?K>Hhzti*eJyWeSw!;B$@hYw2^t)#?Xb=dD=Hlxl9pOECH@!TcVYlRK zqpe`%!`Om3pey<9*erq{E3vA6a=2ts-{EIMGQLPUAHDDs!KZ`MaGrqlJIW($Wmhg~ zbvs({Yr>p3;KuloThC$o00dW49KVc>c9(?!B2-^jHw7jtf4R*x)b*f(d@`!YQ@5WK zt=KUIrO2m>1lB*J?k@V}&E-7Tx1VJ{K@TPDIUg0&;P0Tzo<*QrgSa}8EHEuRpQC-n zuH5&YJS8QlyPMinU)|kTES6a;D!$rIoHEA~ey5BV< z!MrK=8t39GZd6*(Coe(($VU_)^mes363J7B+IV#(XXcHEom(ZidKJCd&`?k?(AU?$ zJ02EjzXY_%@_xKQaVn=FNk?IV0aP3TJREke9dTJm;dCVapx)8frn7Tu#!i}~`)NX$ zSN@<`)S*rEO2Wz=VFNaZY+rli^Cs!$q>VMH)RmmSJKdYZg@ovf%)%Qb7%HA!RPb1%!MTi2jsWLiyf>S;6AxNzzx z+tm+nSa#l}@_X6OB5*;w;UD&!x$1D-N#i#4fM@;D&|=`_eRQf5yOm0w#Fgx|?M_-w zNcwzp=hQMmD3SWCBd=8%_-zeelrQ^Ohm8ns ztqV`4KM1LrbH2Gb8EfZs*!0tuyQp5O0RFNW$$1+UI$_>VSN#Mi(2o!)9-GJpuX;<_ z273QW{&`pRBnHX9KZVC+!T{U`#p9!k?v?jXI>LyDUswBD?vA%%CX~gymm+Kw-q@o$ z2kLnIxJRC(BZuIj^8SQp^5hxFrv>f6}H zbkGHQc@Q(8#lG#1uzH~4)ul7SQAelo^eG$wRTh1O9W)zhpPX!c({``WB(w+{?m4Hf zu@ZmdyYTBfi6uzuJ&rjQ{^N2@mKZ^BdGuJ{pF0Wmi`cWQPmPU@e|l*wf^Vz)nVRo_ z@pvl`T38*2FTqEveh^QLzNdIi=5%u7RA}`J>2K&GK}@poK0=j^=5zXwPMnXiu?AD! z5l3O9!fGUuO%&0}c_r@nq?jU~ql#em9K{O(>W|&wd175@6b{>_$`*CxZ|vda%X>7t zIOlKLz|ovNzkJQK?EXm-%G*4UUFEsUl?^%zJs#w?c=5J3ho(m1XyJ<%XiP^~S?^L1R&#M-3{=o8dIPE1#D;Qu{pm#CP1^^0& z|EsRNz(uYFSylAF1!G^Q%S75MK6hkk@$c743H^#c%_IruhL@r~kg=pR;UV&URVW+| zu#}nX$=WrM1G=>0Ea0pDGZrKQw>2nI>itW<&loq!P~6FIYt?@^=_SKROT9?Pq^OITYZpyRHcET z!$Zf~K#9y$INq4)XF4+QYV?I;x>T9`qB3px$@=oy=bI0wbp8&P_9d?4$8@$fM+n`CO37!isuVHVC3x?EV3QqUUnlZWSN9=&9b@GacdtHm5om0XIA-qvo zRL09gq?t&JK72}OX2qJrjyKQ8n@Tm@Y8bM^iPp`g!k2G04VInZq{+QzzL9YX0=*Fc4sl z-6kwze>#^?n53NQ>$(g&<+mnV65H`_qV z{KeY4qf;F8iE9LMLqy6JN~4I5UY>B5YyaOP0#f(XD46a6X^X#|!`1P|Mzw*cX2eQc zGvo1(+{^<+Z?^hRt9*|F_(?9WC5D^$M zlVX@IQzg!!673NsQ)~d2lUF9EnU0zQ7RCgyt1(o3FDn)mI+i+w6caj@Y0mQ37yL7d z2sz)~{M*yK3%Y~$MN4I-k(iYX*VYz7PXqP1r>!Ah=V@9fa48THW9jsJn%bL4qpDVk zcOX%eH{jjE)6>)Yb=ZD~l?H5!AzvCA8iwYV8I57k_ewU~DJ|Bqd8CW#&W;QQZ_`Xv zI?&{wsSOm$lfVRENVffgkrtGZtoE8K`BR*Ti|So@Lx1Q>)ttRllR#EdaW`|nc6U*i zbu6)NXRGp>m8Kp8^!j?7(uK*;ggn?Ghv7zMIzaB$hpyZC0e z>hWgZjJ!<{M>3wW*#qxhxwm@TpAx7zh<2VnL=}7**zaNxgwKL>@IIWLo?iW5&&%%v zc94~pfDWD;3JROlxlY5M*?u&bQPl7SGc@UfjO={!g{pJN ziF7enh>-;64Pyrk0gbks#_;{{gZmCvGlMlLPb1|NSW!Z8&oJv3gX4T`k!tsK7hZcu zmy!pUEq7N;Yz-I#h$9_5Ql=h~`{bx;8oki+chg_Wn+y9j?bX{anD!W_BVJ)Ee*EAE zVMZ0FB+OI)@>@a6>M?A?nOZUw=%^Kr*%n^~7R(QTle-K6H?PhBGG2qszj5VrQZ_g{ zY@6AUm>mrR?TA>I(QfY%Hzd;HPsW@^uvxQx-lSG0B#YgT59}funB$0H#?#D4#5y~F z4WW6IzCF&;=rVFJw9r86gxI%-7Gu}jI#Il6N62rNW^=^VG_Yg_aNc+10o1wL?m zAR+%ugE5voZg%sQGK!i%HH^LMY|KQ#BGCjGZw%#;*D!Z!8D7)AzA`bJH< zhQ?s>J|Yj+Hb504kN!tT_>2`SVhU&*uy)b;WvowHslPAf4Y2GM@3S|3t35pp2!wQp zlDtsMO1ys791`P|b_X`$-6ojx`aqltMl`#XlnJy=E~ucOP^KHsTfscaYD;VfyFd~{ zygGUqq2^C{U~S;0UTd=3zx~Y3zDLU;>wx@_D+r7or2nj~1zD~5^9z1wQnQ7e1*0|% z!8$dUal*4--2ddOq>D6?YKieye2aqjQl&gOD~gxmnb|Iw=ouGsu=Yf!vgRAH74PHB z|NI&|b={Vw))}c7k~y_IrLI7$rJ56M?%?}GGJBX0<@J$}^-BrSF8!_|t37M{EJpd0 z@+xYToizy=*WQqcmG!=0#W&9wx4@Epm7Bi=!V_N1zRSbMrp3p{dmc6g0CBBH+Z0Lp zTXk|Ta0!GDt={DYemEs0tm{2DdB@~Zu0q{hZZdvLfvV7Ko!~AeN`!s&0~HF4+KnXo zbjg5g&>bb&v!GK_S|VKg+pE*82A73;x1sm<*X+k~Wrc-=gyMdg6?PV9M8k zqWxZO4~Z`O6Z;^WmWo7pAhAo=l#{F;V*P0SMT(gb9%mh0$poE(4GW7>Iz$z&g|xYGsm$LJ*{oRR}+ip`&Q8Soo-h&(UR?G z?kR7yrJKO^y;vl`--76!x z#!KD#=|vWLl4_4YT1el4S6fdHA>5;WKSF_R+bZ-`V}S)nBn2_EMtyrj{3h|^-f2o3 zzI3z0)^_Uxzefdvc@P@kAU3+NVL&K4|2;iz0EgzyT-c*xEV#!n0>Rxj3dQyhI;)-M zD-PT}*WBuht6zQ_!{nB~9IDun`6JwW-QYC%5kW)oTNDg(J8TtpLv^hxm&XY)w(2Us z&2n`ubI>C_uu5i<+ux@_goDO#{kd7&z?@fL--CIdlzsLHT;g4?1(${;8GzpAfkL6$ zE@XGPEgAbi)Zr3EuJl(<#)kL)!N`U&11kmr$}8#k(*Bm@6q%e*nUvx}Wnvf^I3k=d zlTk(Co@;j9i@gNL0O2#KlxLISRUAs?)!l;BQUwttCp4H0Bko<}6}(rn&Q++&xDm1P zhJzvK@`tMCPnSS96)DRchjx?o$5;{yHP}EEO!+W5 z1n(FCHE;g;Nj1T+FB5nBC+=37+&YN2t@U5jezX|$!8ysatFzUN4#cbLF4}jiHW0h$~*W9xuS@1Ip%1hOUBjA0d!6wPe&;g4ajM&M&zsgjVCnYU(|0buZE_f@4dM>YpB7AT~s zgCB8)&(X5^t+I0FV6JA1We)G=<_1E5&yhMqE9N+!&k8^ZW1sPUV6%nc<9A)*I9MS0 z5ZeQ;aw7!IeB9ix0&4fj{_ze-`5i81*z_mm0Px%=+&{m*pFH#g+IiF5rXAUN{)Po~V%lxvM7VsyE)AEzH%up3zeqhaxatD-4L(OJy?vQqt(Q(#CGf z%ocu>lahjv1_Gf`P)gj?hz02s{;4t1SYMkh0%G!6&{GR_;$(Ul(+Un) zk+^!zYjSdO>~nE(u^liN@gk58&cl@xSvPI5^JKlib^o_3jgJKiG8x}CJkb28Jd)BA zSYguwgI6RCxIrKXmJ}%z*oR^FG*Fl<_JoHL7+$ZvV!H+d>SFcn_4&~|6$1dQH-<)| zbpfpW{Ubg^c(`z@D;(VZYo&K)Jt~KJJib6zX|I%!knlKm+~I#uurR>Ch|#^)I1K%} zKLFp)V5O!xYt_X>Ev%CTlY+c?5P&!yxzrLMtW45UBdoCr1&u1OKygw#P&`U_v~yo) zTPWKr^}e>Sv#}}J*w_@}>&NZ^?gvwUe}5=A%JNv2`Cr z;6Lsf6%a9U=duAhG*PR6oB_6GE&hMWFH{Z!!i z+zp&T={Y$$>b_?cX3f4P8Fec6$jSfEP*PIHqQ?u>Rs0`6cBH*DMm)lY5H4e}*yWz( zGq!NP?sX2z`DyRFx+p9@5^V~D@N!vK?oBwedM=kt0A`Scgwla_zSxwH2EbU=N&xo# z6+za7$B_jG!nfdjx#>k#?-NX-mNOG#d+gnL-y-0Y*e^_$8v?4ix&=akywf==5E9RB z?z^1=iB*Fgbk^B=vC)GctmJ#T`&$8|=tzf;Qs6@1IFs_BpmIL}cCQ1S4-d82uJIwbKQ` zA%qG)xlA6{H9rXU?)mwnUO2euU&^?-!jBE32P-C>)bL2dT?oSVp`oI_Wk(p4hFOs% zxTWfEUX6D6+?1%aMf1Rpk6CLPrqJuD?^9vQmG#f7BNdTMQWb$c<y3Buvx-Jz~NH__>q=qLPU0hE|Swd;B9FM)y&e=j3GD5HY@*Z^xrZY!d+h&azqxG zOi^WmMCq2s=Ri&8f2N7x;A(-Dyqv+th_lVwX0PrSTZhA&_Vv9{9m3_7Jh>ZS4M zw8~MhHho@g8uv5<#2xTQ#m%Om61AHvBbVSzobXLe9L(%d4!X)T%Gq29RI7T7 za;THkM{-3riz<GiUE(cr95W}59n*soL}t+d##E*|4NPQ^J!!TyGBqTigJu8#2t?EEoj6Wb zhp-eT14`c1fd(`Dd1wH@;6SiHnc_p^fZb`HbU&Qxm%4gYFrA81bwpXgt^7@CUUZ94 z7R@fy+MW{XL%~p04;X?Cg0VaU3>t?F4rciJv9ZB8)n9h8y#1yarV9S0!tueW{!Pls ziUc-gvS?rw6a}Hcbr4`Q28uwT&}f}~U>!ID34j_R>L0^??B9vvr3@BK_J<*$a2SKJ8P_juHpiCs zUmE|a&9)Emr@?G#Y-S*f!mEde+8;14cmLnfrXr6G_AraiD+<{c&!hx0Xnq_b9;eE? zfl}#IEWr#-Kp@aaI9dm;r;kABn<9|HorlM$toJP;e@Q zj6&%`kTfzJfD@PCwqT)DNn+S#QzT=tOr7ug2CugA?_Fq z1;Wd!K15#^tqbu$>SJ_J6e`>Ut*;8>QG{(a&i^#h9}%xRH@E+c1@G|BVA1?|)5PM9 z&D}d%cmO~INyHo52fuD{q;u@Y+O2P>U#ER zJdEZCc?%xhA@dy}oE-;m%yWE%oe=yHA$Y;NGKsd=ymz*+uOrMgpDz{v{k+ZGSdY*| zVQFNT)w{B+Wq~5xg~%&o$LA*P_pYGV%PR+Cy&a{sfFA_&;dq#G8T2+;|H(Dyh_2PaSp9RUL2nq5B&kZIjrw*spt#;U1pPZnlbGUSK)xLqQpxN%I)sF2h)6Ye%(~pWdI5fm3@m&u9=BDX;waB{n?6EfijUED7 zE+k6S?y2JZG%{6&)-`{v8WvBv7-J@>Ib%Gh=-3ueo>`y5XXAeHM$*(f*G!k1l0i!N z1Xn=sj!0-zEyYG`7HV-$w_!v%ZC7n$X-v}D;N!0RvoK4wwd$`o8qeHme{*jlPhe#O ztdxNf%n1^4%Fa1;&2Y#1-Qd9YPf2NVkU`A8r|0gH!4f0o;g{Ali`1WXrCHZBHO;ww z-P7U(G?&kcBl#9)BELoi=+$&tn#5mgZoXS_wj9wHM!~gSN?Z! z=X_F_NFOQPy=BQ>t!=34$&Dw$xp~G%OWPIjIo;y%MxK+olMM%jN@`Z~n{fd1be2f_ z0pqZYF`PN##XyhiblmL)@s~l(K(R4u>sK!z`U0YkJ(}?>*ce6{|7tNs^8|~s6#cs|O!rZp>fu46K-;jh!4l`fA#1cL_ zUT2$q$nDFlo~qRev6Q##>&iddH?nQyDmt0m(SneT^Opz~JFpS9yB8AbG_I)OU!c{n z>toNyUdrOzaHl|ds?&0#UhU-gZ`P#7kdRXS-tj#HjoWqlO^|q3@D@(g1CEdTJK`dDk=MLCU5s zj$b~|RX|EcvaY6s_DHE#M~K+?&{LO)t%Bc2;4^YU@DHovwP9H}3IrR6UzdpRj+>lX z%yT7^=hU2WyABsejDL{hJTy?RX~gPWTC?#3yX<=WBC*I-`FS-*Wh`)ign%Ja5zr_J z8|ZgVzvdT%x;_vY5Cr}dT;vv`dqQvkyneYW|KEb7Yyc?yNn|5 za(m-jVfEqyYI(PEA8SZ#Wd#5T`I4ze2Fyh-aV&0VYjz?#NAPX7@f&$%GL3s;kJX*G zb~9&~izhjI;cpWU5;cm(PUn7m@4=ea&I5(6mE1l&jvED3R95Epd`?~pLSlA~6@o2O z{o)KE#H#N7rv{>4od;zK=lHsu+3z`)pIaGQcA-{_32J){8rwdr-~!I%e)|5_{;VRy z@7{DSm&+vsKm)5sN*o@DT}k^%TTWD5xP?sEV)^jU4ML^l8%>6KEc<(kGJs%Mpp=rUp5v`x{`dusslrr; z)qqvjy}CzFEGl{;Y_x0@cI;?+f9&P()MP~9o2fegwWqLid~wJQ?`pT6nZ1gIZ%#x` z{TQxWMeWypm0z6&w~=%|7q&y4G@0e&?f5V+>%b$SMUC+mHL9_3*}^(XYj`I}O%o!# zy5Wazn@|X7Z?P{8nS48|PUz48HOB@Xa%4__5pPiuybCB|u45 zAL&bXQs;b?F@Kks=CzJOt$R zbEgk1qw)ll4A|=+?!Dz(yW(%r3&Y+=?qc6`PUu|@H7UGF+R#MEEu24l_;9wCOFg-H zO>51oUHK)X@lK4iRsny8WYVeEVZ%uTBNgm@r*>$O&Sv$*L&e3tKE}zi;5;!u1;YRP8hSV+4oZS>Qay zvNN3AP5w5ru*HAPeb>|{X>IZjRYWX*q?0Lh)qGsYuvC9z=&#YMW!U=3T+J5)!ve&o zU<`s!zB&{MI4U9L3A4PquWccQefMGP<4Ia!^{rbT>nn_8Tvg;~j_0AA8sA24h-g}( z@_`=P^z2ewACArvJM{#CyL7v6@O7_~i%Z~CQ|q;6_QZ2s(A;S9gZM2mOP99i_Cxlx zTFw?}Ew`K91Z=hWUgrEj>e!vDD^vovs|R@D+-$i)k88xoMZ1clD0!=gt-jWM3QM0F z&uPfdt^a@~jb7(`zFg@X6ol4eyuDdPk)4m+lalS;@98^KG45S&)Y>|8GQY5J%loC< z&^mM7t@sSJ%<$;kZws{s8b7rq%_zW6$6r2u_RP%NTYCf20B~VAc)qLKF}C^7Ml`d= JmzlUl{|8FqKLP*% literal 0 HcmV?d00001 diff --git a/cave/com.raytheon.viz.product.awips/icons/cave_48x48.png b/cave/com.raytheon.viz.product.awips/icons/cave_48x48.png new file mode 100755 index 0000000000000000000000000000000000000000..08b2f5ffa4d2c48a9865d97f608f935a73051f71 GIT binary patch literal 20077 zcmeI4c{tSF`^TqLl&wc4$(Ai*%!-+@@9Q8-sHibBm@qTU*t4WkDMYC3$`bY1Lv|%X zSu1NwN>W)L$`aqv>O0k3&mZ4Ee%JMzxvnvDpZB@%`+Q#K+~<7ekB@U^CVHG(__qK6 z08RsaZFAZuc=gS;iT2le{rnl)$7Zs=Ed>Bz-?sW*2Z&GP0|1ml36?fg8?-SBOLCLL z;7AU5IUhGNtv3LmtmZ?;U|sN35eK{zfv6%nQBfl*LcpnrTFaxsXtEaGnV|3Ifw%B8 zvBdhhV39acHPtQ3J}6oMH#`+1;^XE@q@a9MM8CvE(Y~)XgG5EXbfLPah-$1R6tO{@ ziD;2L@FMbZ@<1#Y3K3C6%0cAi6&0bfB2X{{4gy0!5Eu}GM8V-GFj(Z5p+H&>AjOwR#rOb;6tS;C zzQ@tVQ?MQcGL=9gimb-PIFLN4Dx#vRiGIAkU6&gfjsB5}Ncl#O#u4O$A%h@tV33;| z=y#qJsyY7WAb;ygvGgV5LFRZ0$n~%Q33Kj;hJE-Br+gUs2Cy^|Lr;mv?LfDRNhfR0RvP7 zJA#3590Ui%AixelCaf?e{T%XF zkJUt9JN#w}z&IoVjD#uzp^6SrARLQF0Fep~7$6Lag*k!|3J@IpNBlMJf6n3Uj3=tn zc60?G7!HI&Eg@hO6pDf(fnWvNzW#lvUwZvH2!-TG^~QMMHJoV0sQUE?G5fxwh-hhj zIWk0~&=@R%xVp_qe~MrnEAF|@L_B#~VI-D0G7`icB!JiIoBieIf1c_0M* zrBW17@(2`dBm7n=KZ1PiNf!f6!Vw&OzsLAfwXMcj(~~|1?OqLsp<@13l(l`=#KB=v zjwBB^3{{ojhH=7!$V4Y)(9gy-J%1i^S|nGJ2W?n*RhTm9cg<_OR!{rY^O?30V7_d5 zv;**$Hv0ZHt4EQm=a>GpbDQ=8L012Mb6qRbyV^2#T`SYO{`9mW5~!-sZ`tUY*Lbbo zlAyl8u%O+B<5d-C$Dq72h`yQL!;IjAceT+b(C$BJ$5UC)i*~20DckxK=4TbV19-6 z`4#~1q&?3&b`$_WT>$`glA@2?*98E0Fb3KhmOg#&<9#n$&facRtMRWOk-Ah=7Vkni zz);zJg<@=lH}-Ge*T9J#aENw#%opAOY_{c<_aWVQ}29~Q+X9oboQ?oiIV3cOfFQ0iu76FZKs(|jZDXJ@9B*Js`9SUIL7M<}CvD6-1_>J_H-3)fYMFe-XSnZktlOSJ z1iu~%o*W-{{|bjwUaV*wx;9&*Tc(qfb-p^+$*A`|0qrh=D`*S-OXJN|t@V9puF8F= zf!EuqFRF0_1LE&S6C|n&-o$KL0n6S~8Lw*Q16iK3Em8|3rT?8seP0RXSI&|v@2nF{R{O6>6OT)!KdkEsXZTA%q)y8 z22(e?y;EE#;x}E}bnRkord$rf1vgUZl*hNcno~0H5H={UAyrW|-j(6UfxxkStjc&}|?0QtoEwL?O_~CAz zRPlR%!7QH6nq(D-@`N8Rl6qy_g32rlb{028Q&@|1H?{dMbiPzvH#{AC#MKpaGo?Y1=)4Nf$3JIgE>V zl}h?nNE=q`fx(ZOmghDI8r-ix4(i&{`Kjo_WNp^0GFaXk=jW?q=-Upy;E~t8_yiDN z;>mI`+QhW%N*nbiw9qfUMMkHFBE<_GBhTvBJ*_t&KhdSR`uGYneTgOb76BRdB;Z5CyCjOE7J`eFCU2TF_Q>MdkZ-cqI$&PT2AXJvF93w#>Xxz3>DTyGBR zZDVIm_r0*U#I4^eK;&XX=DADVn!ZApl=DuffGK+;6X%{y9Df&A7%Ih`mtCJURBy&R zr5)X>9m1~Xc=4U*%TdR;H!y@m%ZtYXJ53+Sy?l6_n_Ukl3Oql1Td?Xb5h0d>?oKfY z&EI@mXYy5rPoTGob)|VjXaFZi+xswMiCsdS)VgQyxMoHM9UT(`<~*?t7v>LWX{ki% zUm+s~gw@R=w>@_7xphFOY_@!_`tX^&%VJTl{L~LhnjCuh)XjdSGA_I3xvm|&ccaLo zqx1Odcg7zJWb&qW@EU-#Ya5d@r7o+K$F&?87?dGfR2`{$RX+ z|MR-@{m1g%oWnADp7A-Vw>RZBaJj-qj=(26!omV9t*tHW?4GP}xDJ@>iu1p|%MMCZ za(Mp=W$h{^32%SFG;il6=1maZE2qRzXyVx2g0K)@l$ zP3D9#_q?r~mu2Z>VGS%XI1_hXyxzg6Ch??m+YJpI%!flrNkLC?pZO0;%gK#T zXj)lWamel4XG8ALnelpDT$GV>&b5XG7Nu?D2+*y|CYd@wu_=x*!jH+Ay<24x+kUS;KSdDi@`yLMPll9J1YJc$lzHo5x8Bnsir&Q~+n=K-ZjV(&tMOhH-<^#BL zc=_~pDT^U*Hd)ru(L{T``iz|xJICtNHg5^B&-vILr9e(C6;X3)t_}!74sh4bb{}bc zEN#)cp@%#vRp#j`y(o3#E&n-f9;K*j;L&&PE7j72ZybC|B2VJ2KFKGz?ByL+6p(%9 z7yKk)ZsxS(r6C0{^@RC>30}l$kuI0^tzpxz^eXP)pX!GYSpYRdwb9Y#b9~zx!xJ3) z`=*8N``O(nE`F?-gBvAqrtz`vzE?)AEm3c>FN_uKU?6Ts+%>>L` zg}O@x5yC`*jZB2=b9OtrzuAa22)*Xqy#CPaYaoy#P0IM=?i%f;$7u*m^R+k6oWwyFERG0I^xhvnsqM4q1FQ`cVMeS89k;GK!6BMMs| zOz|l%R1prN0@(mlF*Zu$-X(m1LMltonxqU zovW(EL}b{rGrR9BT=jNG%6Hm5b26UY+fob`2)PBf1_fp1VhqknxH)NP1RhiatWPuQ ze~RPo+V)V!^>lmM5;?$nVCTzp^-#wQ_og1voP+lSj*T16f6VQvH*#sw*FP`WrG1c> z-%D**<0VPpzyf3!7dkbDcHl+}u{nRfW_IY%A@L~VhU0Rj6)uf`=SuddJ{9-j4F3?T z*J~~8fYaEzM_bss&@Ew!Dy(B0*t$=;-UVS>7|0!2RlUxsR*&cvoJdtPw7rpc46_av zl`auI8esi|_xhggnUAGEkl}I4y+?0m$O{l2v3(*-orC_Zx_qv95&;+Obm&@+2kjX5 z7!9sK) zYT@9UxeLVfYI)1cuV;A3Ik}ivw~x{J6~@W>;@K{d|ESbz{$t)2b~RdWaFD|=>a}~+ zd*ivGk5=NYr(5>)*!gZZ^0fVQ8~qx`%5v)ExxRscfs#Og*ZZDby|)+72b>HchC!97 z@s1yDMl`|6x$99@zB3Xz=wsUKj=j38sms zMoca6n;S;&p4@6AVVm!>u{5toUyCBsb!JDt|HN{b`*4HzKP>rPANXgVR@^KdHL1IR z+4#U_@7)6;{ygFJ?XJ+{n+~;bPd=_lnOpd*((=^f0}BZHChGE4|3;F{(s(SWzwx-Uz#;_# zWu>LrS1`Q>f0m>L zr25-(3jn6-#)|!1 z*j7=`rBm0u^LKD;DsW36Bi;x2-=8j=p8-ip-+8+$M@;f$Q5QG^CHJn6k1HuA`$jgY z4zxq<4vyElEkPG=rQ%()B;kx)_fZW%P={C0w)S+mO0DR}oafL;KRl_&2fd zX_xPa^^3Vn9(!Ee@W7p&OVyt}#dYb`17G%Pil2#z7h7s>Zf@R2*{-qVnQcKY3ib+H z8&20tSU|4t@53gIHd;k$NJk=@6-vN< z>Re8d{D$raf~D`V?>cf=e>Uy9wP8q?Tm8*j8Ah8K9{ zLJN6%;(YR_9a{}0FNsIGIQs!#czDgn7E72Lj#o@nM+*7AilS7njLuZ%W7KqX?y&7n zccVD`l}6x?w|Jv(rE-#;QugUq!Et-rd$&67l?}h_EfYWqPHhmnysaxn)^}xFeE+?p zi22zDk8trF(Cw`_S+DX0$~+ zcB-55j7f7R%6Y$S@N{X}8!3o4uKK)7^(N7&CsiQbGN8OzD2s=Unq}gG&{a-WKNy{p z!3~vC`V@PwcdvEs9$r&$0hMh=GJI$*gEaHFD-Y&1O;%6~gvDCvy?RyDn!Ghqw>iQz z#xVO53nGwH0RE})!y(0*5h`2jQZL04uDuHiW_Ca*Bt{TzoHu0lIYsK=OO*`M@PhCCaUQua$1wLD@70Y@#`IhfOpHX#R zyqJt!&Gc9bds-1?3T-THm^IeE-yz9BpnNn(%F23R{G3qu-+RIeyXM=3kAyCYH-BU+ z{ZQSk*ckn=dQMHM8Qe3s9+zswQB-!wa)D{E7i{xBEW|IUibh@uAZ1-TJUfE-q_Dvo0Q8>56Xt zz*3F!wnu+QevY`G7j$aO;{7aeY5U~+{-BYNeqM-+KI%}M+P$>+g~^_`MS+#$!}5^= z6Yq}r*G-|lnqSWu`;OMJ3N6nA)e;j7ZIu_FqL$05FQ~|)r8d~-?bhlhS8$qV8XVG+ z$-=Svj%Ia}=O0d09gmK~1-Y@1au+X52e8iH-|+@AUd)p`|A_tFM8(I!MQq1tXkHE9 z?G-#m;-+c%qULz z!Rc*!7EjuZJ?+G@TM_o9mrmHeO*{ZSWtVdK$zaXs)brf=u9QI5;^RdL&uZ4yZWipx zCv;^US+0`V*?NRrcW2lmM8gSA=x7(pEH5~H9k{0t3#lY3YRwd0G#v7*n7m_G1_#4$ zwUrQo{`0Z;o2tv7hViw_HRHz?NE?E;I&1!ea|<4LpE!`cP0|?QGH2@)7sn-hFfl1M zcH(0O-lvkdC-z!l$^QG-qPSpJon4)s5yfwdfQt%2C9{`87s^dfEMOBTFViPdHmFmb zj@Wfan!e-T3fMHprH29YJ)HKd3I&*F4KH+b~umvav5`*DBVwJz^kN!~qs_TKY8Gked3wL;hhT@7l=Q@8#l07xUB+-96waEaSODM{!@-LHkebc{0LX-(4NcrkU^-BQlcNY6 z>12%(@p5#=^#%ZB6}+6`2z!(phc(I;jgjM8u4&-nKqKY2jKyIfn6nDX4z20qiZbxg zHAMK>BczdB3i6b)UQnC^N0b|!!^_bDgN1s@aeem-#oZq?1GzZ9cX6|q<5D^Z$YBDz zz@g&gisBF#5f?^)#6&qHrA0)=#U&-hggC@Nq7WcR6etQ77L|rVAW#s9ye zbw%1h^;Ok>gu_X4Ty}15&QKuG)6-ML6D;E7Y6}#VmX-#B#DHRA!nhv7SZ|CQ+)EgP zJ^QDVpMF$PScEIu*$wT4;W+RMw{~)OljGt#2=v$Wy^OA5Z&VI8yUKVch=F z6>iQBcl1y3k%+%+oZVd=zJo?0fG7u)BQ6dW$3XNS0XW+^xjA9&oc<-`U+V7=|0%E= z+U8#X9%%m64CRIX7qbJ+pJw0J)Spu=i-Q7#LG@hGxaALbP<2AMA3T?qsyt31fS~heqN0)zkfaz$LP}IrN<|bRC9V#EsEdM?l+=}Ef&c2_*NXZP<~R3$<_ppZfeY~$ z3@AcW3@If6u@QzygCWAWu+qZT64oGLq=Y0K0*8PhHWH#g1ODdhZ-KNhST{HZf%@?r zXj~9$2{1^^MiMCumqthkLqHOu!q!L|h%mw$Wn&{{BW@#skoxKRH}k)_Rz zS0~5s>EjG{#i92Pc@F@&zbvYv^Jt(gtRmmDQqn*jW|FnB`gJ&1Pj|h zq@=~f5lE1YB<}hD=ImFjpKyKy@b`tJiN?A)xqAPmI2htVnK3}Q{AcS=g#-G(4*ng4qXPdgMw9#_#Ngh3EtF)>3?5L8SI3KACvN#N@G&!K+r z_3uuwPBw0ya95O)EiM`5|J*_@{H!P(Dk|T%3=V!69D&9hl$mos{r;!nzXw3r!7;Wd zq&)Da;m-zq6ItNFvD$;sirTa5TdzmWg(hf;;Rp$;-dTv!zJJyRs0;!;ptA^edke>wTHC!PmLjzxhETpf*mfj&L`5v?JUW1$4&P$^w5i{?_x? zHmBm`;N*%M7D^s03;d_%->eRH`-A-%R|w$WOCD|m{97A-f0Ki)$ie-4f84%}yMUqx z?>|g`SL2(S9&Y-(8sGGnr4a`0CNK6Q8ea2nRtHBC=ywoG){BPd;dZPbLuXw(Wg5Uu;Ts*>cI3GOi5iUF+hl@wJ4(EfXJ;H?t zgbNSI;o=dl!};K8k8t4uIb1x#bvPe9?GY|KAcu=bxDMxor#-@j2jp<^2-o3!@U%y` z@PHgH9^pEi51#f27aowq#UosY^TE>|;lcxQxOjx?a6Wk2BV2eu4i}Gb9nJ?&dxQ%Q z$l>A0XbYe!gV+wJna!KJRpaQN4O5>gQq>hg$Lwt@d(%9eDJhKxbT1+ zE*^@D^4E`RQ5f7WYCUm3oqg_e?H=xDw;TvfeHZ}X!vg>W1OWis`?$|J0Kgpt0L)tf z0MKLrfY~Y5@`XA8z;;(lRmsq6cq+|1-mt%JS;02X06Q?aMoc{zKuvxYR0-*1NO>Qlw+TjK8mLOz{c53Wfe}`A1;(Y;Lq)#fsvrqe^aWO8_=YDbM#qg zBFYg^9mB1-`;F(l!CWjcx+zP6NAZ1=Sdo~p%MC7|_tU?=l^AWIgH+4p*;61=jNx!JThG<$rW$FDWG(VHkL^s0 zo(>QYoT5_d3s8dCMoS1AYCWeXgvD%W(%uMo#1jor2{4G^??twFG1X|q0ZCm-U}Eg4 zW?Ax%2F+&^t;QAD09u6Fih9G%pJI01U)5X)6 z6sxq{Z|86Q$Qgj9_)x9JZBN%l35_M~5RTt;5A{#Z z`E=>cwd@N33{A5n;jiOBCof-cZt^x-%CrwZL1-7jQtQFrUJ%I$qqsFIB4C|0?%}Ex z%d_i2(qMkFI_L8BIwI&}m!MhJ*x(GYwT_HIruuNy72}x;_4m)MA~HMlPqUI|gfri? z;pt{KRt(fm0H#j)&9AvPE)bNS*eyyJmEC|72c$5_%7SE?cU@sDNsUmV(8Jz^#TD& zf$-Q;odpKpKt1ATTh`1YP^{3MlLdRz;{8t5r^~efK=J0Polm+smwYRIYD1SFEui|0 z5+c5!0dCn;cZSYPrsNP`PK&dOYb;K_G;ITan#M%!lIkP&-muc6HYpk7lG@X9`zc8$ z`ADdrwsFV%Z{-l_z7N$}@_^D5om+pD`ROK3`6U+b$piAjb@W4T%@g*q*A?2p+7LiM z05Z zJPPb7q;=yDK23RM-6m}A#CRtA)EzVdWyT+4X6)Nr#%YM!@x=FU+c|kd7Lj5n&5hn1 zV>0sIpW3t!mQsWgxW;mG3f+oKBtF(7n7X>?IH?fLl$P2rEaI1cBY_ODb?kJdc#7?S zC-|JhY`y6Xz;3p9V?(V!%hgjTH3^|(!SK}>V<7hBdbX=wcyKE(+=N$VSUuv)h_fbm}@rJ{a*Afv? zko1W=3<7>D-C?ZA7db6pQtX~rwE5|C=fh8Hnj}jf^NeX>rKYRD#C%sic6v1_)p^Hg zSC^s>q_>5MA|zCj;L}9G>fJAKrgE(;IQm(!yp!g6_M|vSWrfZ3lj|i?6=Ivx;n~WP>hR>@M{SE_xVKlgsbFBk*|of?v>u zQ-^4@lT5tMC4(~Q_8g+NQ^3`Ee35?4@wUrs=4JkMyF6EUeMUlvS{BgJ%7A-4y*>Sp zGhezzvT+QTjE^h7%_i*8@jAJK5@hu66ND=P|n$lnP;?SX{*>`dHNE1DWHZ9 z;lXf9-Yo~&S}P%4t*mDI`@HXwO5zb4!zQa{M4>xzhFhl%eVEJ;rkJa)WZF)7lM~zh zi>KR)r=A8eN;7o}t$dEFVtNFX)%5OdU-hedCw)J8H9VYT;qBv^Z;va+d}<2{DA!od zvC#p1vrsp8M&-Wguq{;VPmK(GBY23S7`ZlC)Hj?;SZ^FPZnQ&211z3O3P1BV3=#X!a_-N}b~w}+(>m0KTfo$>uL>S^pS zRLXOT@`h9ljD?rg|KXX2e!XaN^#O~es_l-@D#J$N~Xv>*S^3VmX?$FIS4C=KHcfO@qml; zHEuz<(m59}RvEr^kZ$fjUe{Vw{)AeN48-~VSqHM}8b;h6ldfwEQRB@(jcozGScnDY zs1WTg_`+1^cZLHkxJ-#)<)5KUG&D%99I%kt6T6kl)RIXF$9Jk&Z7>W5J4;lIb&`|$ zBHlary5&WZ#R{-s=5C#BfTaWG#V6xNP_z)1g`)2nuMTCc01Gt8bdrU$4LEzb>|rnC zR#&2ml%w}E>p8aL!cw<2Fewxsm9q)v4tBR`NY2?G*O*(->pi9Qc`LE05*bJxw;3oU zPI8KM37TYzvM6_N0VJC(!i`4&_B5MfPus!P{DK=|2qL_d}h&

k|;K1&)lo3>?DPdAQO}k(96+i%cz8sBS zj97h0-Bp<(V4voz#p+$>Ur~}Y?%-hKaU;p{8|rxI2kEOAzB!%5hbM&1t{CdGIiS(^ z?%zis0la#5bod#q+hcid;jX8Rn!&ano>|@@xAt1wd)*n$XKT;zQxF~JBu?d8+a9*I ziQlgDQl`s+f4=3p`I=tKWuN3}XU3(LhYP0$Uopw7MucfT;QunnFIWs`frbSmhytD+Q2&N3N(bz>c^xMzOG?B+|Or5Q1Hb{ve9-aWF2;cX7Dnv&zR z{LihQ=*Od3wmwnXCX%Vc6eUhC&0gQPQ2PS}^VTAe>@K(`cxPgDysovd+@Da*;=`N} z_S!cZhGTV%zMrP37=qm&VUL6O^_^p zA_;bq*To_!Xf(9uoDd~;NqY9m1w%tfDJ2z^YO*MzWe9MloVs@~v$?3!~5d=q~cGbV*^^V`j2AELWfJ^b#R)Gm@so0jaJNJTTPOL}lp1?fkTCWf(~Fzu!M z0>x{)Eu{4u9hb&r2D_>d&zokbeA2mHPiP7h{2^n%y*hoCD7wq$oRH8XqP%5Tbk|hz z?dJqq_;*VesJxg(?8pEytJhs|A?8o@^ie@h0 zk6!fs{KrvDXC$yqrjt>_;-<<*Na#)$G0~oNRO=*J79a7m31{L(iL^|eR1@B$q$s*@ zO2CR+NLc}{xXo+%sBV8~om|)~$<9v7b%JKzeJM}H5(=AsCGP7yWN33K7(`8!>(jiK zPDmV1JmvP4eh$7(uFoD|CkAs-3LIJ3AdMoiN~RoikjTyA_W(bP>%SfMF_WY^vd4cl zwxUh4qw-N=F>Q0CJ|dw53;pVb1f0PRriniqLyI^VXe|er|FP zJIokyC3Vm$)>%p@iaY?7>Z3d0uBP0FjA7ot2q3Q!-#l%H_|&L-0#+nFr=crE@xC-) zr--WWr1r(rotGES$TlSQFKoVlM9$zQ6OB6O^iKYw;EA@GXw+Qg{Bq+Ra(5+aIa_O% zrpHqt$cL3JE1?rn`5sJByg~4YQ$)!jfo0DcBb($@4Tj*;GYnOY`kAbmFLm1{9~24& zj})$Ew~kLGQ2XX5o2a*Cb5aOqy4@CAzWLz1gY2NR!;0LwjIP_K<9FVvX1%&1bmA6n z{AknmvHE)dMUUq+BQZL99Jm9nmWk9Si7|!V^Ixo1r^*#?auN=Xj`fs1ET^ONj)5&C zLq@(nHdNNgQ$=d8QGNqQM~#hiYMvJeLD-S;A( zP!fuH{xE<2%&WSytehe1dy^-wkc&2FOtXFo%=w@{UE8%C-*iv&rfPWO`m5Ew;&E-&UPq)gnn?VAjipvr@=NnN=1dg*ha zc}+z&*;yTO-#h{5-o!R~=RtJ}MaYMIs+R*xNDmmrn%)_3^8{1crpx%5ORld>8DeE=r>oTil#GyQgtq>ix|0@y)W5lG9p7EB0d8mJ@JP?BZCR z?~WG1*5{_j_FvfSkgh0EgYb8Y%cr>QjMXDn7YlSCF1a0Te-tS6>uh!Wb zzj{5W?spBc9Dmluhm1iMJCG2efR&9wk|xs}tZzLhL_%|W&m`Rtsv;1lG~kYepLRjN&CT&r&wVp%rRz?9!FzIJz>?@uYHDf> z0SnlMYI=J5k=fJL6xihoF!n0XS}QaC8J)Wv(ZSMOOi~UH%1meW7Z_x}jY$+J%S-I; zd`g$gN#<+VB)K%^dp4+@(J#nl-63R+&v>N?i2HuB@PK#f?4nC_*PQ0_Q|@;RIE_kg zs(!cv66su|U~(9*OR5<6_ccYM&jl)4Apz&LwNvE%phFIro^Ym~$4^L(Rb6#G=^v#o z6emNUl$lJ`ba!yJW#LWb+&zWTshHW5Iu8c720g2;&T#L2yV1YyFs6d=R1kc-{Ncsz z8r(V4@CN19UII2n?jBpFLDRh%L1jMeAdUOT?q{f^Dr?u1OCR(tuC8Qvm6$j*#h){X zB(fSL4clIuo&2(6`t%Q`9JEC#=G#}v)~&^NWxIa<73F~?+5VkzhJK8uytc_tJdZ!K zx<0}Td>^;~oVef=PrZ}s?a?GsE?I8CzR944y1-3Q1LL$!u3S7-4bMvBE#w-D`gM#wdR!iT^zWPlLVYzSbR;sWmqUG^k z#tOLUn9~V~jAS3)$$Iid4K3xBoY<7{R;$Rg{Aw&pZngTPG5yT(WxqC7`-)jXJrS~4;(j$HA?*O-xktJJX-yb|73gVlIGoaMA#(Ma zNLsh2NJi7x5;=CZv&w}2ttG~yG+0OC7CKdSWy*k$)T-l6KF2egvY2+{V1*Nq`HFMn z*`CKH{|>U)So~LL*?+`f?|cM!IZDAayry28#)2{Ad=&I2_u-% zSA!i9)$Kxc{vwE7no~8T(LB0U4mXPJmglDhle#BkiY)v(ZjnWb>?QBae)|mfC>&Kn zp_2IuGhZ&E(b64RDPs@)f^F)}fw z+&UqQJ3G3jEV`I(BO1PqlGAf&wZ_r5Nl^Dsl9|Gn$Lbnt=Q{+QE>}$i4We@gRxB-{ znPd%KBJydf2EJ$J((eE-M)hi*Tle-_(tB7nMb^M#_NJYXeJ=UM{#MjAn?1SXt|RZv z68$Fy=I3*YJ$A+x#f>dDHXdKD*dx{w6BHC&@sj0e&-0KekVp?9(fu-hZNE3_%>HbT zhLFSMkZuhbSL;_FyJs`6VbXvmQZ6lv*pBYB3ynie{=vk)!#;DTZr;zMq%l&br%EE% zrsbz9cC?kN8Sy=LY@2+i)~>)uIZ|7LWo7v7x2r4lq4`DQJy9Z=U7pOA*QY2}L#KO| zw>utJyc-%C+KzZoP;KK$yS~jY2y88VR6A_?7F4}h`cnNu;D-(dtYV@l7~SbcY1#)e za0y1|DSmo7VBtG!?s@Iorl|p+;L4cDrDccrdNQdd=Ho3B^(?Ukv~DuaXbHwS%_zm# zXk^$-_b#S)B_Z3vXUgKb#TOT>q}xWBf2;hR%iHn>;kEDlUXU;*B$T=L5bW&?M9HqK z_vtB6EsEr^*th92m$ZCKm-nV$lV3~W1jx&?`!1F=Y}N)Sn()RJaOO)ib>y%n0xkFM zCkPrB*6l1AeH()eWYx(3B zE&EQX-?We1XEa&SlUvvJT5#kYc^{vho9d(&B7_ubFa&YDx=5!!_#L0ES6|q@wx zpim1U>w+fKmOyK4q*{qOdOBmh_E|d*N7#b`aup4IN0T9ln6d7OmR}_PZJ&Y6AA)Tbae0wL`l5pZRb6AXS zZW{_bSez$9DdzVY&A8>aDAU_EvRu117C!yba;OSB%c_{h-G0*3B?WyFN@65F6-&Xj zQIq%Zp$v**Yk?q|sZ%HRLzeY*0tGTw*3;F6_KFv^wA!VaExaaban{{;ps4esmGc7E zNifAa^kLj3$Z#Yy2%b9VARrgRlJ5{{6_^z3Oq)5N{T5`0wzr+_9^Dyue7VMDx+B4U zczF0Nk$I`mY&d1poT@<3XVQ|_6ju=~aoRizqJYe$-gsJ*M74`9qA3m%aXP%m$-P z_=eH624jBB-hSa85g85JyE5CJJ1U_xpGeIDo{VC8hCXe^k78Pvzc6UKQ8~Wd3(Yvi zpd786yG9c8!5a4$rq?o_OFCy(EZWP<%Yz&p9gAu=-d@IVPz7HRG45U1s5UZsO286w zkB>ggP%MQF@HJ4LgMvkapTGps8p)Gzo-V4jecE+@&;Y?l$d&Z+9)Cv(e7eLjkH7a= zUcTY^^V#~ZMQ;ZoR4y>I5q@<^E74hF5ZBqyB>cSc7{pg#a_tHKoZU-1769KZ%6)de h(d=uRlWY56fWC5;@Mq_VgMU8^&{ES?EmXF;{yzx*=8FIT literal 0 HcmV?d00001 diff --git a/cave/com.raytheon.viz.product.awips/icons/ipr.gif b/cave/com.raytheon.viz.product.awips/icons/ipr.gif new file mode 100644 index 0000000000000000000000000000000000000000..bf385e5e98008471c1a815a7bc96f40293e7d70f GIT binary patch literal 18040 zcmW)HcT^MG_x+ton}h%pYC=sy5fn5i($%4e0Z{{jB1J<{Q2`_EQIt0c0fK@C1Vlv* z2#5+A6%>8o3sM9NMnuKNi;9TleZcy#^5grPKhC=A+%>cA>~+>Uv-jDvEWqE*J?1bt z21EdGz7Zh+rhx;Px&AL#I6XZ*vutf`W+9-uTskn&6A}`)Ey5?z38}8GelYUjzlr~l znXOdLgMkMK2#kpE$;m0N&jEoRoSf9S>YTXh<_7~E(y_6zoZ|A-5M(Tnl^S7RUmjRr zjE02RkBvN#O5<{JJVGL*^~Fvj9a0|-s4tI;NG(r|hzktysm=+ED<0`+jvMHx=b#*E z@dGCgGSC_qml|i!ife8isjluo5Kf#2!s4Lytvxx#tr0mMV;%L)Ju`Pnn_C~$H#bj= zjHHINc9e5+#zu0aXe#P4HikxojG-Z8V?MBO;2+?NQ)S&yrSmp!E?WsXZg96CML29s>~`(wSK8F~CBl)u@z( z4ve4!5vX(~R--dXdyhcWsi&i-nH4uSb8ME+7|W-c#d^SEbyNrDj8(@K$3={dlsEU} zj78*hup-8)BgP^kI#|^?IS)o=vaFm4pHwHOiHQlPz&QI54|{u`oYv}`j**Dg5hs>3 z$0?^ibu0wUK~QN(NNeg0nB`;JP^7sPt#6(IdgevYiPZ8Cr_`}ArwAz;(NSGJ5;0aD zQXlB^V5Hc|-acmriom!rp9!b`!=yQfHRkkxm^3#B#x;*QHTz7o2C|&~51g5;R;T|1 zX9lf_W^^K_HDX(F$5?8_SfJ0C&p@-!z!*Bvj1G9@RD0w^goGdy6C)kfaS^Q%Bjp{< ztxg^}EHr0kJ>fAi;xQ31QO@$HpIKCUOt5k$SZIAUTF*i!M$m}}w0-FZ z9v9#5nzQxUcc;_*J)=^irKhmNgrx0X-R&kDz!O&;%@s$53L;;4%yVe_3mdX zqDlR(zw7rkvcAL{T5{s@26?$RuZS$bNyW8}PWIY4KU6Ps>E)$XX-ht<2?+BsVS}Ncft8FR2xM<0=w-6K3D$mZya@!l zT1@mSU+^Nw@#{lxw}D@44yF4#ibD2@YMteQu~bgY_OXL;IyqlIvwfY+QE!RDTJM83 z!{5MHQl23fO3N)|JeAQBKYNJ!U$@Y2r{hI-F%J(N^MPE?OT%J!q`Vezw7=HlQtzs1 zV?I0kt8kUp^E6Cy#u|yePU9xV>{dVwE>F`-Gn310sBVcnbQu=T z-)i=lOW#$4y7do@B`0-^^n-@qd@VRMmw0^hF~>AvVaKc{=J**T=t@4>oIY58(YlHU zD0y<>bqW;Zfm5|J6^VrTb`Il;jFzb`mpcshoix|;4}@1FC#}(K=^cz}u&ByvyRLDy z^0w}OwfzumyHmccArD2njI^7SJ%B5pTEHd2jsx6>a5O?&jw+}njk6l)Q$ zv+hnWiX+BMFTRtRmqRhK+EplR%`3=j-E$2U`Q+D;U$L)j$}10fOq;Y_r`y@Svr~6g z3;T_ct+ja)LJ@rjrJ>r?K`S-UFlO3 zu{~)l^<$ebPLR80ZT2#srFI7i>^{>Qq{_sXZ*(cnO}|K%yva`21w>HHE4}HBWP)O{ zOhaD(5oa7)h?_dAbbL*R)YsBI^{O=)_OfN4-7xNL!y9h@SYdF(ar}Pw=!(~#eSK@V z`beoQvAoS zucS6UoX1FX0FiySs8f6}xk9e&K&p}%~pkvbJO_z4ILtcVy z){Rxy_=3HHO%5`{w&D_lNjyQ68&? zuvV*3agCvK+Wi7#qUu~k_U_*tv10A3Nfk{RT%5b*{Dx$;A>(~(lcNY@hIbv(-o=zy zDwoc4KeU?mu&1mIQJ9hvj=5(~PUmS6C9+wH(ih|%(6qz0kmnKJ59e3fBzQ0=ZoHYY z3k;4AFretC*ZSKiSLg+8HN3Q^lNvwSm&8mAS%SQh#k1*c%?h~E?sn+bmq%3A$lfze zC$wz4t_Tg-=&$Z$FUS~VjB5TcoLr@n_JpKGB%$b1%&#>60+pWzhu`icKIK8x2iat& zDm$Lt2XRjP9MgZ4Mi0(>=J&i}lcQH}dg!JS8@A6S&S7IE;d)i)J9{g97!pF1W@WD2 zig|W{?(d5t{AZ6!4{kH3BWkQlzGQ`gImaD&7qa}zpAYm_inUAU`L7Laj^a-sss@R+M9bf03t zJu5Us8`K*SF>zp~BAGJ*Z*>1@eP^8DraVQpW$#Xm?*dj#h-U2s2PlMOM%6qT7lppP zFeX`5bs}^j`0tD{M5(f)aMy9UX}8_tTDqtxZ(*|8Es`-}%)VxPKGWsW>@%mVNC))Mq#BO>9x-(cfzZ4_s`i zTfp1LT@|7AU&REj<*?g#HB2!>%4=GT8ufq&&uSE?TQ_TY)Z`7GxG9N$x|u{QZ4yD0B$hGTN<2py_qZe@x&8+@fth#59j&; zmFLA%p%YGaPBA6R300Vmi3`5rv65DPdm)r9v~t(!uE_iL1(vE!KefM~KlLNp%ZKQRllNoMd6N+v!cRXu|nANs!SANXFn!Gz4Pn}0c2vuD}EwffD54|W&p!{xs*B4=au!3FISRC*Tl z8+xn_PY>_CFkm*?@@c*rCfSy=;MgsbyR?^(BP$V8bb$7XeCiPPH z?2ZP*E&2#a$Pue_+q=1*ueqxqZZ=cwa#X^Wyx@N>Wqr(tXZ@jNX`I3nLi3Qke+#oc zG`R~KDX!zWpN%u9fWSTgT~hq%D^M%8(&cgu>=wN)Awvn(R`2$a=+F~0voo7kiyTD!uNj^ zE(|Al;Dvp%;*8AVg+f9WQoPtMCGig}Pl4eWkZ>z_xUAJ4>yoi+WPob37arS2p1c|<{FrS67-d^XYWBp zquO=XjO*GM&O7w{f|of>x6X3j@08>0j3~9gb*6E%z`Ar$Qn}%YQMM>t{`NrOr_IH= zjdGoX#n-3g>5bV9vr-Neloo`SRIQ0WzGkZls?#9_W+-j0I+oK~wD}@T5kOR6oE&F5y7JYWeBZpE%8Jj5P_-;Bq2IxU|Fu^!WK58t!={=s8z zh+qyLXDQcSpv)>$Z5T67c|$2S5)xJw<>ex!VeS>ZCueOy2gSc{eL?k_oJ7<8O5e?h?hpObSP_AmQ#9ySocjvIY9o?qs=UmI0rRTdn< z8?y(-t8&6C?zq=1)(||{N48Ja{gCUR9yRngRU&2y_w|wWk4d{x=_ifWCr~R|w$bJ> zLA7}$&lieRX){wcaVKjl(f#BXj21D};SI8V)}40e-C84X6p*f=I(`AjuY`ET+CQ_W z70AA#3Y{&5ds#>BeXFZ{OVAMz4xTc~v^$c+EWMtU!8%m)>UBDgDJ{hsS`Vby;lXZf z_y^TjFwCrw}7v!?;IsgeZj}1U7ES;kHA&^$82q zuCwH)^Hh*MTQ_NkdBZqEYm!_U4A%%q&r=rQYJ$DU(^j+zNI}si@H?RzA6MMTDjr-1 z^md=Tck*P#p_8lLg5kG>?do$j+bWC?s=>Wwi^56JmyPi(kWyz%bnC0S z6@I?Yt={}e^OHl(_uS80%sIk-r`Y=_SM7G{Z@ZG0Mi;!>;z#6=fm|Cg3Yo`)=Att- zyuI2jN?#6VFEwV03>SRg-pN*OS_AJMTxy298fZk^gX{Gb5oPN5XJf6Ojfn#Lg9eAr zEmWKfrJaAAee&rQ-Bl6ihrU(wzvs5xJinWFD*jT&Y&&UU;TDD*4mTtS+}dh2e`xWQ za~{BH_gAJA`8dzk#oTp@iFU_}nqFHIW+@=Tm8{Y=)!CZX<L+c3bJFaZ>tVwzY zDi57}zvra2R?X@g#XHAy=a1)2xSruqE{sRWp7w6|k{vxgG*4F_9^<7?q(U5=M--au z*`tpHBm2)4)fy;t$4x0pQn9`tIwxWQmfUd|A)L(K<#y$6*7?k~lW!)Q-|s1G)T&|3 zKJriC)zU+SyT9emL;hsZ@`A=XKPgJ9E}kjF{-7vf-u|`YF;tEKdL0aB2;CO11=$>Q z!4;q*XMGRW`C8`a?g`C$1bw>;p`J+}>LnuwE4anke=uu2l)7gk&zpGGm}vcFF{j3U z=as}gSI^A(>o&jZl<}Xfe<4Cz%Dcdv#D`smSqBsCs20<#sPCl*^wrVD=};gSc9X-C zRAbd4)aL{FAj7h%%VF2?B{%Luu5mDN(}sJ@l$6ZgOj)+zE#Zf<_sH(Tl%H4Ux9e^f z729mProZ*t^6b3ZvTU~|WL`o>caUuS%3o)fmGpVV&*niUXkdG2V$)J*XKVWR%zhn> zEk(@!Em-C}W}zpfJgQ|K8G?p*s#o6}bp;?1LxcP=Cr z8sYWY-|px(Pxdw5Q6}C?NQ7QSRs^rhyM4Jj zMV9SaU7a$X=k>JfX@G2~@lUHsEvX1-PkK11v@gI3`Y3G|mwt?dES+OG@Wb|9E%0C1 z_{bXZ!x!lbUG=dAZ4048s=?Wfq=KrNIM<dlo~GL;4DJhQY>7WssbZCjfI*%e4W0(I@gVh4IGK2nhp30jWpn z_tKia>z)lFw3wrSYA2yXa>=vhDR(n7oEjf`F1mU2ts?7LVXyYd8K`z_##7k zo^|bWjt&C!G)!=be=$~Z$}1yaYjJj4owtH;`RB<`|K0q%sZeoR^Zc(u(r?X%)7fqg zr!2N!G769#Tifv3f1{aFyE_p4Hb5$s@Y}oaW5yzz!}z}PLM!^YM6*E~b^63*b^ zx5cf0s|PFAoLWCiPC8TVZ9#+;1aCRu9|hhhF+oApusr z(uavKAYuYd&20IG!av_`EiG*NQ$ZG536V#Wu7}$=NV_ZX2caNu?{>6@bPgmucXwpl z*3?0l?(w7wZU^by#B_yZ^g)uR99SO!EYWBYD(CB%@sp_|ipSZ@0gLT}%b{Z`gDZN^nt*{t)<(6og7S zIQ8qT0gq79DzfBlG(h3mC}DdA!JSR}d3C~xO?xV6C(ffmch1QeTJ= zD_(c|sM#2b%vHE8eX$k6+hD;AEA`QmdkkXvMOmXE0HsWXy|avxS_`K1=8Y+BXwZvAMHnpC8? zv;c*Rc;<&YW!}6uX}Vj#7;d;K1O+Oncdw;%DYVawFxmmTO$&jckT6$ia*jAKOJ%Kx z4Qe}sWGsd&g6Aj(|Lk4w`{_$G%y;E&e>3l^yNHmCBV;)QN*z_W8KdnLNV8L^^4*Qd zSb5Ab9nxakO*{<`?}D^gM06=-IUWCPm6Bk&g zJDcXI?;6h|k07F#J>i5h?r`QyRVTb_K~o}C&W+rQ*226g*&iIP`QxvoHBYOZzoPkT&Zo9R$Q@+q4$cq`=eSPE8b$~wg{+!y!M1`YY z4?ZUwhE)rC%uvQF^q5ayP$B&B(FtIl##bymxCtAT6Zi?xWy@@&n*>LM-^eH1y5V4X z_%((6v>A%yqIkO#5OqNRd3&t1R4gTnR536@R|q9Wx1bY5(fchz&@mFuUPT$KxlP`l zAyb_tW+)OM@pOSHo9M?TIN~l`wZHD7w8>72*4Ga+>gEE>`mEY$>o%OVNg01P9nD(S z6PhXv>nTYV8kQzT-DHa&N6!*<(7Kge24YFwFR#9%%&H=HlYiNi`usM?lBik)t4BC?c@}eMc$6- zDQR-+UWm#x!F`rNmyaPH0!h;iNg?CCS6e&Y1BwX7;g zzLIh9vcxqG!*SrF5aQ!VkyvuCvFt%omWQ%y&It+%iFN&$!hFpRu<5}De^uNgi;?k8 z@^n+?aruS+vR=AWHlA`Yzt!?#bAhkX%`x1}4n^kmZ{lyJ%(2J>dl<}ibAfzIT>jr*1}5d=aQ@;ca$flHN|4DP(8_XIqMpVBAui_4C?My zz_S23B}xSe>9y;`!2s|k$*|p7GmA-_Av4aShO)CV-1Om1Hf<;dVD>oln8twSg!V%u zH96_4SPXB8bDe*;ypC3i158McM8$Dl7tnx&F4ND4H&}DM^S<9z!3$;hX6w{EDxFJC z7UP`6yVRuyHR3(vFx%64UO;g^JH20MYL6rolm({IulE21(nkuTs7C1@j3%!Uj4RPcx(Z?5y|)#V?>E@_8!myRAM z!*619GuiUJUVaAKmEPJr1$pLb6-qtj9ip#5d)KW#L}`mdGv#9JDThG|6|!Gt-HGH0 z7)R8S*P%n_o=Y_ZmYLX%KPa(`QosgNssX%H42d)ZOW8g^wCZA43rU*J8nT;G5}S!5 z(|(gT?%OJg7{zQoFNI2r%c}@_gPAR8OxXCMK|4Dk@5Ewxxz1L_J-?@_R1Wkq*rK+S zHN<9QtKWp^x8%)j=fHaps(s0rSs%%pbP>t7@wiLP9?cWeKOeXJ)+k$?7R!XQuFv!6 zJh(y1CezWS8&~u4^oNgA%+O>y`9q#}wGeevVDLx7E*N1GSrs@DQ~er~jI*VvHEPLDT0Pq!o3AwJrA zYI}y$DQ8NbKbfe~_NqE={)99q?Tz<7t^r4PY1_3FeF(D9h`|xnbe9L#H@m-vO)_BX zD|Lj_u&nbe~|k?!s&*#<%VBt*>SJSN&ht-*wgItxDQ)4 zS?PJ5uCY40wGWA3aL;v|11Dw57P1V1DufMt_d&$z@LiFM(zuMpSAA!l5fwYotr%aB z9@~&bI%`Z{l9T$tmi0Ut%IA_O;oj7_LaI+Ao8lzvTsA}VbcUSPI@n7IxoE>A;saBv zR0GMA6=XJ>{OVMqxK&=_YYnensxE*YE5QHWSec(re{vL8&T4M=p7RJ*(NhY6g;~8& zhxO8bQ)`2yBhc^6Y;E0y=ECxmD!m%bGS90qdy=1KFi)Jh?q+>-YuFuo19u@_9HJ~u zAHXRnDlR=mH4f(rN%|TrF+Aiw(OE%oqwgCU@DJB@1DE`nJ#I)nSG`=<<@h_bzKP(l3)U83Y|yu0Y_6v?<+`@mFm4IAkK7C8N;~Tkm)Y1T8GS&fJF!t4tbGfkcE=8 ze5zmv)_V2j)&DJDm^w_p9;1KW?Evbw#E>nvv=duuI4BA)XW}%_>5aEfSGE67>2>%? zW0I0Et(?-ke#OMK4|1qFcJ0j`GdXn;A4mEQ9{?~mjAbdISQJiGQy9*L7Q8~g^1@?X zrgqS5b*<4n0^X!7V;<@zO;Sh#+-FA1kH8bMA*(}Tw#;@-mUx2Z6UV=HHO}YiaUf)S zO_|~h`PnKrn}*?n&Bt-t6b`~6V>^g}?vBXH+ofoayJx2x4w+3g&O|OMoj!~kH(EL< z#HMQEM}3eh0$WOt5M|vKcDP?lp`TZ`mZna6UJBb0=XJXyRrqD%vv+7`d5B~(G?pww8o-;Al%+Yd3u@gI%wbb1VJcMFA!(fu-^XxNK@ zmrV0P47OKKBijwCt(khhkshP-^jVc zs2VD8&=%i?PI0w7A8B3Bx=2NQGMwdr=W?-OCgrfe#n4f(2xD;jMAl!Vc7u@)dA zZa7UTZGSI*ac;@a{q~T;{R2K!iDR{#jd zrCXupQ_yP>l&UEd+Tqi#Q)2zpPO<8;_Y~;vyHu-}KQt_>jg^Uo?WUM}4WB>A!qpt} zaz;?CHBp}_{V9Xmn0rJViJA$Q0%QG6s7d`t$F2nD|N=bT+#_!TO_kOdi~9bscpFBFd3Fz zzZ?`0JcSRHmJ!w1<8Q#V&;29^DG0y()e}_f=kJfJFymSD-6}Yoc8N|KY-HShfrzV> zM3@~Wb|>C}+VkxW>fJhAA}4w^YRyp-i7ozfP_dp|JZ<47&Lt5a?+~LJKuPCpep9gf zJkfcz4Yg5vgh)0+#j_^?)$TSkQ~d3X?HncS60;y#*gnXn{#nvbl-)I$nqiCX*TpNx zS2G616&bh5=c;jC!@Jd1dv(#}h4kSyPDh@nR>!)#t8UmZzdYP!Se2+Dzl)dM^;SAE z&Ha_IzamW5fp1{kpEC!?ux@M|Omq`|8Aymi-@`xj;!QCCcb6JMuwyO_^Lwf%Ytz6=F?Gq1o!`ZaJ}2c zaoxa4dtU=fx96j~fTvGnKHa=)&_y9m)U5k6I+zgtSYd+{8j#fWp-95oV8QLHJX-@b zafdv_k0)IRJt@V5tRf!%&E3lq(Wh)i9<*(E=!5MwAW6LdVG0i~H7v`#E2t>b=L4E> z(6?=6uskHG+E)}8R~uXqW(GtcU7JbuiYrTi5v zQG2a<6Iz(qX;e3K>sge^xR!yOn95o2mypMXo_wa>M<=`NRt`O9A2A-E^=0Ug-7efk z_RqdNTo;LDnD0rCUOW9F{Nb)=)IzyoYsDZb8e#-CZl7ETtAlk=$jOY@e{%^?2U&?? zewm!{M)a?9A+z!O-w*oU`yksZ6Mv#c2D4WMkRN~1?jTpP3k)f~N{Q*XaB#KeToq2$ z6a=3c3i*ASw;1SF7!vx(>+F>bIdKPj)_&Tv5D)m~N=%~Y|$zIpRt zlH5hz#QJS-XTK}3l-*^vZe=zDxG~(`9sI1vTU=)B34*8AVKJlROlL2LjOc@5j1vE# zbu1`3d|s(%%dgp5I@r9qN5B>xdgaVeD#;|w$0GF^yx_8lzTkEw_w~yE&GHpRGye6s zmvouh7P4SW+`9A8P}HDN$z1{}HbBMrfXyHNp?(XWr5lP_Qv>+G*5%k+OXl8bg8^KB z;md++Z*%KG>++#1nJ;a}qE^U4?9CTp*!2zK8&a6~6(&<06X(7o$427t@ZjBB^O7L) z8hDS$uJ9{{uUw-tj4?+(#+Evj62gtt%QrUWPi)M3YKEy+x-c4SM-5a>>Ger8x1pi+ z5B+yw!O;)clm_8r=dD_-%A;_cr-Yu;m4+u>XvE7E5Xu{+TO%|~zeba|AW z@>;fjMvBQH6qKe$r5tXX&zeI=ujV_FZDN^={Dr zMmKkeuiYIdhN>v`Q03>1<&k3REtmOaXJK_RU0&#|#>tu|CU;)`nh#z7ICkq_+Y_g3 zY?Je5Rrs0+ptZ@J8IHrDC58t}9IykgzSCL}~fs#rI86n~Zm{X(KoIzh|ZK(&>069acu`q2@XrUut< z%v<-Eb;ns>D1YzXbnJHix@}S^|IW$|{=s)?eD-2XU!IwRsJSJ1^8){&&?R`r;@}r6 zzhquYxxXeUQ#XORNSt9Vo{Njk)zo1*6{8vi;2(c+YZ)R@n zuyMf>FELBUJ@j_)^OVQALqjXm9XG6P%?t8N{{r8s|8qmq%omGpLLO^aoHYy?n^z#9 zobJ>>M{~BTjEg=pe9DNgT%;KnKX5PKw-mKoBE;5&8sik4$))f}Qt!1kX0wg9-TpA| z!(||Ql$;$h)Z`yG@bsLUluk*&$E3kd$G$Oc2^`<(x3!{;_?^OF=#TM7V|cpF`F(lW zj;{yruQ3Wne{hDy+%Dlm^aoN!8OFrZhzuC;!9h9k*?wXc%b=15{hlq}K4s|Or}Nza z5{mroYsyXoKJzthYzuFy-m?|9qN*TQnb>vJiry2J?k!nv^$^Q`zO7*u?gIRnA5*tX zCNb4bm1V0R7Z?r~{Mh}`?&00vlvDv1@h_`oWfjKWjpb%k8FEZ>hZtcNiN7~IJ z==}2xpFujt(j)e>144GwmIM*`PF5r-!O|_t?n54-Mwh|A8jR04xKo&V^cQSzIfbiz zkoaVUkj{^aa_%+HN6(P$i4a@W|vz2;wzO z8!r8E|NFxC1QRrKr-lRXLoD82FqbUPlhpU^r>40vu3eS=HH&P#lJf$O-TiIZ@GefM zh%K~d>q)dd-}8LQ20vo~%_!QL;m;}-%3QYt4hFMrgYP^p=?})I-=9=mwg9J5w{zm;SBPY7NiyrN#Qq$Qh zh!Mn33Pn$iUA!;YdfG;FyPoJL^3k=45_PgA7L&fZbBCP~o~1ywpYV7R8G?UiE|8J6 z**#Gdu1GZmOwdj-wN@qxGyNy#RakgjjF|88w&Xft*7#C40{7N#JxnKZlud*6;|Us} zmlQmq7ibU83Ys0nrnx$eVy5j+LQrU#L0#6PjVAxf)jEa#B0l`nPVHN7&Crza=Jrkc zYS}d5D&SNt+|M&(i!c(k{ewTW1nD$E?h=%i~rkd|Czb$;VD{UyHgP)Q+l; za>z`cWL;UPU4V;$8(Rv~6d61=9Hq{q&zNDalN>|ud+QpfiK1y%9p~=s{mrDD*1|Pd z9eb9plEl$QV+uSc(}0MJ-lVUk-reqbM=+d zuGJl7$*FLxX=hY|TzdsGRzhDb2%V@s5}R|VU|C69K=impzzoT?A|lZ z<9kORcCg6{L=d^|HEv)FP-EqF(tBZH|DzgG*pxRlf=$;3GXrHdhSD?yQr%fMrc&zb zhFNpV^N2SY#MJk%A#Jz9O~Qj^oCa&-jhB5Lbg^!ZQG$n;#+$S$#%~SN3HDF$(RbGL z>ELo4iC3AH$pF`ebCFKuAdB7wm zv57OPuNgc&4n315P*_MXQ!90tvXg+6{7WF5Jynb9U5CodmliLDtwNkdm!WC~qd-oi zw|B~_@2b#ZQ6aQ+v+VT2GpmwOikaB zsgCP#1M;eTv3zvNEcbw&V+)ukKlkfI3s33wvCRe#zH(UdA@4973lof1%{v`l4bPAf zH=a*v3R5ZtNsnpgw*eD>1 zW?iPh0}#ysTHjmRIR{viu5_JTGe7^ZL8I-38yA~nF$ngpCqz+~N9YC1!j!M3b+36_ z`Gm6Fq@Sw%y=`&SZhsxek;RaouO{!r^_o3bn>-QLpGn&|Ckg9DCRNy%@?~NJP3PML zmCTu&k$xz=#9D{pRz~EkKHXW4{5aSj;Grn12<1>+FZW+;U9o@t#(y7&A`+n;AkJ;W z?8@5aa|Tl?ZO~r!>_7-3dIKFh>GM-lr%0mDKqXk&@3%FmY{uhRDRqvN(m4S3#} zGRsbcjKI$&=X8QA(O;djJcLdd>8Tzwo_>10Lg=L5FoW zxC)DVLzVBImv5e3)yYw61{tBt^+!-#d%RVDNa63tp_OWdwq1l)A8O6F^Wz!eTR2e> z6+ipV;=3f!uCy@Z2QeWzl5NiQY9U=PD3H!ltvRYVd&}hOG7b@g%o0?&HjWL^pJt1_ z+uw)(GF`r0dmA{~=|+52qssJ|G%_g`QWAc|5$Q!`)9gmx!m;Jr$HUw-K6U3$hc0%l z=wA_;m%K20P4vgIJ1gE|lU-({DQ|X*t2;Mn;J#0yw@B=S{MbEtz+~>qruG=8SH&%L zRx5Th{4m%%{Fe5*50-0^@z*Z9^BHEHT`PXHtGf>Zi`IM8QO)8-3pm5L^>n;kV0g%0A zuOCm`jx~ET;vqQwZtG?|oGpZ&Fn!4vm+l=)*9=>qBW!1fjG4xnw!1d`=NeU(G&u(?1O~+@PCsbs*i<$s*=zF5NQC zyL%hey?%!#G300@vOD60;k7&GI9KdPaMG%i@f)1vfZB%l(3*@ri_op%n#H>wKb|@Q+ilRX*?`n?KISV-k48T6(_P}V-Z0cV zL>WZy#&@ky88xZ8#yKRknlP&1l&u>N25|d7{89$2E<>7tu3wN_6>}x~nzqb`{_kF= z;VLIgMRA^fvLe4aOVg=;`5t|E54S>_Bsk)b5=0XNy!6TKb2VtZaPInuc~@#~U#@XU z@IASutm8749M*X_-&f0R0MI2iO@R7SNR#4TGWA|&HK#0ZiQLx%!M%3kRJ;8}3o%0j zDm6$2fG;%-FlO3Z6XudIp#6StHj^+`^04^G0Q3=9gu@2~I5!yAR!19Wcz-KVn-V2| z{fo{%r=4_=^Wss|6TA3%4Npufg&p-myWza0w)w+JGZY`A&-_)BNchD8`M(Ap*B#^h z4&f=t?46j^4a>u&IHT<**om9#dD|#&v4cQG%W!<@z&ugc1<-pRH-lQ_*4Q)%*-=ag zhq=)=3hT(!SNC)rEk1c$dVh_bHy1~Co0KIDhJD2&#_G%mb$MO`F4i?jD1bB9!F^r5 z?`6%=ao-JmXah8kGxu=VG6g?cVc=UjX$qmH+?}MDEb?3D$~IKxYJKg<&mb~iuYzG= zd}FosmjjM#B4)Fyct-Jak3NCThHg{Eg!lX1i9Y`f=PlyvP(dC!+=qhUiUgHznhFXX zVz+Nh*jT<}tHI@9;j)f8^3XnVq8RxJNJN}G?Cmsp*x}bQ9Yx%l1pPO4nCE(1i{Yb- zW``k7WNM}G%je_0#V9F46B>NzdC}vw+&a^SkjvEJ6C@asPW$Lq|4@Z|!0$I?5rn%BKgPlN_M;*Hg=;;&k3vwr97^=t1}5E!x2&l6@~RO}s!)tnU*??JOWP;u)cf-WZhXH6 z0oRBEe1(hZnM{?=#>W;m-Y=Tnq`GnY-{~&Xa32~Qr#{U~amETWz-rJyMhJ{nT#xlG zN_t(p=>Rmm$MDAR@^<3p0{(}S0_oc$2`j=Tt2O1+Q!Owv>D+DRSJe{plkRM>n19u0lQNk`Z0hnRT4w%Us;AfgXe+PtV5=-k?%^b3l7 zfzvP2OC0Q0rO{J*!Ww(S4tDHbqH$RAJ`X4!{rG%{Qr>l^0J{hXmo5SZUQYj6=_&x) zG8kN4yLhA=@nVpq(-?U?;Q5D#z2kUYR#^KQ;gS_$wku3tlBx!x?@Qjird5xGCW?7< z6_f@ngKBv=tTUtRIW(RKIe5D6&a+tikC#h8JVpz8b0{k~j1D(Hh|w~`R@bTg()aOX zxM`c1Sk#5w0<q;8B_C?>g6&6@^yLvG{0=xEPTeOh&hOdibC-C>AohofQ z?uGtOn+PBofSoZnGin$caN0MPgs>XVzk8i-Li(-RFhBMzqjOPcBN}rGNfg_efx`8` z@iNW}RYCE-88L!dO($PY3;+`A_j;3UPwO-D@*&+>&l~HWHEuDoqMlOQ9wfZJ%}KgA z4S2*G0F|UJ-b>ZsrxyY?S2LrbEo1TWuI!WFY9iI4IdF=a}F4{g_#hM2xgRTx$?p0mM ze5`pG>P|}#X^Wn<+}a+(MEakW8TuGTo-6n!+a`G)Wn*_XVBm0bP%@lECjttu`1G@~ z86$7U&UNH=b?172-}h3hj!s-XelP>V3i!iO9z~i3~>SxugW`Db)35z}a9g8B4 ztS@jK7gv2afH2B2ednbw)^HbuYB4+gZ(!!+=jp`;S1ouP1HVOD^p8pB{kN6hHldUm z&Qt_O6I|OKsI%Rht}@!@;`P$+7xpS_{R)R+zeRT=^;G;*{_?CRUI81uyoFGo@nlqp zL_6J%2$x+;i*a-M*QMdq??dM0FGJPudTs%vL=ZL)+p+0$*r#dPt*wWa;OEy8YN<`L z8EKe)1(uxLNg>Y6Knb>UgA7|#Cy3XcK7BCj92GGyZWyxLnwjUxDRUinX+XCvnb3C# zuFEa(u}k!=cOmICVYAQn8M)OeOB{6G&(-0ZBUIJm&w3T5d9yZrSy)C`cHk87N|qb~ zvAfUlZY9%yg28Z@O;^!=0;uV;Uq49Rc&v22D(PIET}k}Ow1s0970^@rMBr1b(sQWa z_%#}HJB7fBgl>EX0qc!96yC3Fvevhg1?(%wtHL0g)eR3`fw zgEEjp$I^9F)KH8ryhG)4mwv@d-=4_rt@i1x2Iw?lIr(5w^@lku`!ZUF-oF%TBBV3^ z&Zrt=oW&^yU3c|3f3qgb$B3l(TLqU^kYg9fwk>GFT^oI@XI=gyi$I_>0sq}DaQHQn zNCgC{m;rxahHI(6xu~3%|Ao6p;XKLBjvuMVf^Uz~g+t4@|l|urxZWGaG-3j-!ZicM6KgKyF>Y3FOU3=E~`vKn$3G z4CE%cZow8%fSKBl2808wVfYfmhFqs-Ll)=ow*R_iW41p)BtQI<0Ekk8c8dmZz#9L+ z3E1Q+7l8DJfD2ecQ%w2^+}3UN4Sm};eNPm?c7UoC5e}p@Z$QooG=;dvfE^gY7D(dH za)61Eln3CU67nt~T#o`r5~}lvsAKzy~}I2lT*7SB0Rf z2()bh0rbEL#A68P_XT{zrvU6Wu(ZRIK=)aBQ^bIo28sv(N(`WE9rEErr!Ab}D-J*% z0uR@9duEk^cV)A@t6O$*fvVWz_|z*Q+&s^J3dF3y1!Thj@WY0(3CcPnzC8>u zPyne^03e*eAlMSXwt&e>uMq%Tuh>x5$NxYvcz}vX&LEKTr=RPTUeBHUj*g_8%Ks9N zw7uGgxN*oV%WX^$6l=hl00-Ei-CRHge1H$_x(nE(3y44fK)@g{0AwtqC=5Ucz$LG! z^mItr4G2IVwm=1JLAx04pj`e6m;fo`j|5!Dl-i|ZWTqqivMY>Dxs!?qtOVwe~( zKm>t76Dd>}fPh2?B1DRbXuts&0uv@C9H0OwLjp?z1Qbw!0Yjk$0C48ixszv4pFeTl z+$ogj0&)v5C{WsApjA0`KEx(U^R&X)x4SW z&&)a(EGSfHX#oLFD^M6j6#;?|999TxaiPQob6+Eh2(&`sgaIdh#F)?nuZ)f@Oly9zW;AkoLjO;_8EFh^r=toHqO>>YvxERZq?(Gg*EkSCgcfADV749( z428B1o|IuiJA|=Ao(+YmvVnUz&;SP$M4W(-6b4xd#RODzKs+Hv3nnxGWUNunoL~f? zPRwFXv(a)~s)siMbqSFBf;<2iLJT#bBn>{$ z;6G$Jz=04iuM06mPFu?+n=vVnB(enLfl0*z#AEd)qF^Md)>+f56;|~80l=Sj;UuRJ znlNwx+&9(|z?&a|O#guc{Rkn%Dg@kMjgUi4DycO}h6P5i87g22kOT-ov)h=!RjSoi zb&?g`k3oj9TsTKd%3jLw6(9(CDBc$k$X232W(rcMd1lpUreFp@w9O!cid}jMrZ@Jf zjGysrY8PG`L2kO~qwF*a>d;C~N+0=}@>bs<64-ZNiYaCpMR~^d2J8e9_#p^yn*x`` znqDmLsHHRW*y+2|TlY@UkjhaAH34sm$COXISO7F(T3O{Eyj8m?RKayXM}6omMBZ23 z#anZ{*M-wY_;U2~@1^Xmtf_7aXj|~r1+Od+2Le$*2oQpCbK^IAT6*){XM|Pcphkvs zV|s~K%6O$5kX7&KrIzQ1pI}<{9 literal 0 HcmV?d00001 diff --git a/cave/com.raytheon.viz.product.awips/icons/sample.gif b/cave/com.raytheon.viz.product.awips/icons/sample.gif new file mode 100644 index 0000000000000000000000000000000000000000..34fb3c9d8cb7d489681b7f7aee4bdcd7eaf53610 GIT binary patch literal 983 zcmZ?wbhEHb6krfw_|CxKYUg-n!?izO{@9*?jxd%4aX0yzy`dymabz zw#(eg=y~&N&n)dZv2xzduG}5lraiApo3(c4*{Ylg5#|$JO_EEZ<^|a2`Z*=9ns7DV zy=TR&gYw*7f%auV?ip3tvjRPmcdoho{K?x$_vR?C#t5&<;~V}S*>OMCr>h}%%bLZ9 zmo3`hYEwTICo-TTCZwgTsC&VjZRgJ1eE#fBa^%9R zmmfWS@;bnyJ27HWY}kxYzv(Hl>yu;FCPlAEh+34Muq-8Rb6C)<8qA3{r2e5 z`$vyngh#H=FWlqqvnapfc5%(!sQ4v?r7J61-&eJNEN^;KTK}T7{#i-gJh%G*9vcYdwv_*~xdw!Gz4Va?T!sXyyF@8?w<>X`X=#j%uHV4GRvj@+tE@ zQ%F!a)GKcn^~8abN>4la1UNXVL;{ZWi)lEwyeatDu%Lr6;aASiLrXXW zQm# - + diff --git a/cave/com.raytheon.viz.xdat/META-INF/MANIFEST.MF b/cave/com.raytheon.viz.xdat/META-INF/MANIFEST.MF index 2202c9e47c..86ece15db8 100644 --- a/cave/com.raytheon.viz.xdat/META-INF/MANIFEST.MF +++ b/cave/com.raytheon.viz.xdat/META-INF/MANIFEST.MF @@ -14,7 +14,7 @@ Require-Bundle: org.eclipse.ui, com.raytheon.uf.viz.personalities.cave;bundle-version="1.15.0", com.raytheon.viz.ui.personalities.awips;bundle-version="1.12.1174" Bundle-ActivationPolicy: lazy -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.ohd, com.raytheon.uf.common.time, com.raytheon.uf.common.time.util, diff --git a/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/.classpath b/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/.classpath +++ b/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/META-INF/MANIFEST.MF index adc21b9440..e1968cb643 100644 --- a/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.binlightning.legacy/META-INF/MANIFEST.MF @@ -5,4 +5,4 @@ Bundle-SymbolicName: com.raytheon.edex.plugin.binlightning.legacy Bundle-Version: 1.16.0 Bundle-Vendor: RAYTHEON Fragment-Host: com.raytheon.edex.plugin.binlightning;bundle-version="1.14.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/edexOsgi/com.raytheon.edex.plugin.binlightning/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.plugin.binlightning/META-INF/MANIFEST.MF index 0f46d7e326..d39c4f6c5e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.binlightning/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.plugin.binlightning/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Binlightning Plug-in Bundle-SymbolicName: com.raytheon.edex.plugin.binlightning Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.edex.plugin.binlightning.dao Import-Package: com.raytheon.edex.esb, com.raytheon.edex.exception, diff --git a/edexOsgi/com.raytheon.edex.product.uframe/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.edex.product.uframe/META-INF/MANIFEST.MF index 9d99866268..a93d3ca042 100644 --- a/edexOsgi/com.raytheon.edex.product.uframe/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.edex.product.uframe/META-INF/MANIFEST.MF @@ -4,4 +4,4 @@ Bundle-Name: Uframe Plug-in Bundle-SymbolicName: com.raytheon.edex.product.uframe;singleton:=true Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/edexOsgi/com.raytheon.messaging.mhs/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.messaging.mhs/META-INF/MANIFEST.MF index 257b0b9b4d..7d1b919c68 100644 --- a/edexOsgi/com.raytheon.messaging.mhs/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.messaging.mhs/META-INF/MANIFEST.MF @@ -4,5 +4,5 @@ Bundle-Name: Mhs Plug-in Bundle-SymbolicName: com.raytheon.messaging.mhs Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.messaging.mhs diff --git a/edexOsgi/com.raytheon.uf.common.archive/.classpath b/edexOsgi/com.raytheon.uf.common.archive/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/.classpath +++ b/edexOsgi/com.raytheon.uf.common.archive/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.archive/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.archive/META-INF/MANIFEST.MF index fdbfc9a0b5..4f380581f9 100644 --- a/edexOsgi/com.raytheon.uf.common.archive/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.archive/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Common Archive Bundle-SymbolicName: com.raytheon.uf.common.archive Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.archive.config, com.raytheon.uf.common.archive.config.select, com.raytheon.uf.common.archive.exception, diff --git a/edexOsgi/com.raytheon.uf.common.awipstools/.classpath b/edexOsgi/com.raytheon.uf.common.awipstools/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.awipstools/.classpath +++ b/edexOsgi/com.raytheon.uf.common.awipstools/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.damagepath/.classpath b/edexOsgi/com.raytheon.uf.common.damagepath/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.damagepath/.classpath +++ b/edexOsgi/com.raytheon.uf.common.damagepath/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.damagepath/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.damagepath/META-INF/MANIFEST.MF index e82b1a0d1a..68c40aa32c 100644 --- a/edexOsgi/com.raytheon.uf.common.damagepath/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.damagepath/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Damage Path Common plugin Bundle-SymbolicName: com.raytheon.uf.common.damagepath Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.serialization.comm;bundle-version="1.14.0", com.raytheon.uf.common.serialization;bundle-version="1.15.0" Export-Package: com.raytheon.uf.common.damagepath.request diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.acars/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.acars/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.acars/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.acars/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.acarssounding/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.acarssounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.acarssounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.acarssounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrascat/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrascat/META-INF/MANIFEST.MF index 63e2bbe112..428c73fa9e 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrascat/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrascat/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Bufrascat Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.bufrascat Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.bufrascat Require-Bundle: javax.persistence, javax.measure, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrhdw/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrhdw/META-INF/MANIFEST.MF index 5c73cd8571..dbf7ec50cb 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrhdw/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrhdw/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Bufrhdw Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.bufrhdw Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.bufrhdw Require-Bundle: com.raytheon.uf.common.pointdata, javax.persistence, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/META-INF/MANIFEST.MF index ed84f2694a..dd35a48445 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmos/META-INF/MANIFEST.MF @@ -9,6 +9,6 @@ Require-Bundle: org.hibernate;bundle-version="3.5.6", com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", com.raytheon.uf.common.pointdata;bundle-version="1.13.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.common.dataplugin.bufrmos.common diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/META-INF/MANIFEST.MF index a53878d4f5..9fc69c53c0 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.bufrmthdw/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Bufrmthdw Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.bufrmthdw Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.bufrmthdw Require-Bundle: com.raytheon.uf.common.pointdata, javax.persistence, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.cwat/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.cwat/META-INF/MANIFEST.MF index fc52091c05..67b95aa15f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.cwat/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.cwat/META-INF/MANIFEST.MF @@ -11,6 +11,6 @@ Require-Bundle: javax.persistence;bundle-version="1.0.0", com.raytheon.uf.common.geospatial, com.raytheon.uf.common.dataplugin, com.raytheon.uf.common.datastorage -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.common.dataplugin.cwat diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.fog/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.fog/META-INF/MANIFEST.MF index 07bbaa495f..e607dedc3a 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.fog/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.fog/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Fog common Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.fog Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.fog, com.raytheon.uf.common.dataplugin.fog.analysis Require-Bundle: javax.persistence;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/.classpath index 121e527a93..22f30643cb 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/.classpath @@ -1,7 +1,7 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/META-INF/MANIFEST.MF index 7a5f2870ee..b5bba80f29 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.fssobs/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: FSS Obs Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.fssobs Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.fssobs Require-Bundle: com.raytheon.uf.common.dataplugin, com.raytheon.uf.common.serialization, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.goessounding/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.goessounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.goessounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.goessounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.madis/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.madis/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.madis/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.madis/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.madis/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.madis/META-INF/MANIFEST.MF index 3f884ff841..e8ad006cbd 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.madis/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.madis/META-INF/MANIFEST.MF @@ -12,5 +12,5 @@ Require-Bundle: javax.persistence;bundle-version="1.0.0", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", javax.measure;bundle-version="1.0.0", com.raytheon.uf.common.status;bundle-version="1.12.1174" -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.modelsounding/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.modelsounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.modelsounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.modelsounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/META-INF/MANIFEST.MF index 89f956db43..b0e334d0dd 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.nucaps/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: NUCAPS Common Plugin Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.npp.nucaps Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.common.dataplugin.npp.sounding;bundle-version="1.0.0";visibility:=reexport, com.raytheon.uf.common.pointdata;bundle-version="1.12.1174", diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/META-INF/MANIFEST.MF index a84d8c6110..a883d93756 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.sounding/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: NPP Sounding DataPlugin Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.npp.sounding Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", javax.persistence;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.viirs/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.viirs/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.npp.viirs/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.npp.viirs/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.obs/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.obs/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.obs/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.obs/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.poessounding/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.poessounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.poessounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.poessounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.preciprate/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.preciprate/META-INF/MANIFEST.MF index 3b907022bb..fd5aa56adf 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.preciprate/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.preciprate/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Perciprate Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.preciprate Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Require-Bundle: javax.persistence;bundle-version="1.0.0", com.raytheon.uf.common.serialization, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/META-INF/MANIFEST.MF index 5c77a40f73..eb22b30275 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.qpf/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: QPF common Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.qpf Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.qpf Require-Bundle: javax.persistence;bundle-version="1.0.0", com.raytheon.uf.common.serialization, diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.redbook/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.redbook/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.redbook/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.redbook/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/META-INF/MANIFEST.MF index 256026bd73..659d200023 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.satellite/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Satellite Common Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.satellite Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", javax.persistence;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.scan/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.scan/META-INF/MANIFEST.MF index 5a1a87031b..e49cf0e483 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.scan/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.scan/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: QPF common Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dataplugin.scan Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.dataplugin.scan, com.raytheon.uf.common.dataplugin.scan.data Require-Bundle: javax.persistence;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.shef/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.shef/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.vil/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dataplugin.vil/META-INF/MANIFEST.MF index 242c4f13db..dc1845d54b 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.vil/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.vil/META-INF/MANIFEST.MF @@ -14,5 +14,5 @@ Require-Bundle: javax.persistence;bundle-version="1.0.0", Import-Package: com.raytheon.uf.common.dataplugin.radar, org.apache.commons.logging Export-Package: com.raytheon.uf.common.dataplugin.vil -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/.classpath b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.warning/.classpath +++ b/edexOsgi/com.raytheon.uf.common.dataplugin.warning/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.dissemination/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.dissemination/META-INF/MANIFEST.MF index fc2474486a..0883585ed2 100644 --- a/edexOsgi/com.raytheon.uf.common.dissemination/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.dissemination/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Dissemination Plug-in Bundle-SymbolicName: com.raytheon.uf.common.dissemination Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.serialization.comm Export-Package: com.raytheon.uf.common.dissemination Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.11.17", diff --git a/edexOsgi/com.raytheon.uf.common.gridcoverage/.classpath b/edexOsgi/com.raytheon.uf.common.gridcoverage/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.gridcoverage/.classpath +++ b/edexOsgi/com.raytheon.uf.common.gridcoverage/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.hydro/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.hydro/META-INF/MANIFEST.MF index 246a56c2c8..2ef8c3d81a 100644 --- a/edexOsgi/com.raytheon.uf.common.hydro/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.hydro/META-INF/MANIFEST.MF @@ -10,7 +10,7 @@ Require-Bundle: org.geotools;bundle-version="2.5.2", com.raytheon.uf.common.serialization, com.raytheon.uf.common.util;bundle-version="1.14.0", com.raytheon.uf.common.status;bundle-version="1.15.0" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.common.hydro, com.raytheon.uf.common.hydro.data, diff --git a/edexOsgi/com.raytheon.uf.common.localization.python/.classpath b/edexOsgi/com.raytheon.uf.common.localization.python/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.localization.python/.classpath +++ b/edexOsgi/com.raytheon.uf.common.localization.python/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.localization.python/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.localization.python/META-INF/MANIFEST.MF index b6bbbaffe7..96a8ec80a4 100644 --- a/edexOsgi/com.raytheon.uf.common.localization.python/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.localization.python/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Python Bundle-SymbolicName: com.raytheon.uf.common.localization.python Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.localization;bundle-version="1.12.1174", com.raytheon.uf.common.python;bundle-version="1.12.1174", com.raytheon.uf.common.python.concurrent;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.management/.classpath b/edexOsgi/com.raytheon.uf.common.management/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.management/.classpath +++ b/edexOsgi/com.raytheon.uf.common.management/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.management/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.management/META-INF/MANIFEST.MF index fcb733ffe3..b761018253 100644 --- a/edexOsgi/com.raytheon.uf.common.management/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.management/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Management Bundle-SymbolicName: com.raytheon.uf.common.management Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Import-Package: com.raytheon.uf.common.serialization.comm Require-Bundle: com.raytheon.uf.common.serialization diff --git a/edexOsgi/com.raytheon.uf.common.monitor.cpg/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.monitor.cpg/META-INF/MANIFEST.MF index 92ef3a3494..d34745b99b 100644 --- a/edexOsgi/com.raytheon.uf.common.monitor.cpg/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.monitor.cpg/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Cpg Plug-in Bundle-SymbolicName: com.raytheon.uf.common.monitor.cpg Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.common.monitor.cpg Require-Bundle: com.raytheon.uf.common.localization;bundle-version="1.11.17", diff --git a/edexOsgi/com.raytheon.uf.common.mpe.gribit2/.classpath b/edexOsgi/com.raytheon.uf.common.mpe.gribit2/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.mpe.gribit2/.classpath +++ b/edexOsgi/com.raytheon.uf.common.mpe.gribit2/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.nc.bufr/.classpath b/edexOsgi/com.raytheon.uf.common.nc.bufr/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.nc.bufr/.classpath +++ b/edexOsgi/com.raytheon.uf.common.nc.bufr/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.nc4/.classpath b/edexOsgi/com.raytheon.uf.common.nc4/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.nc4/.classpath +++ b/edexOsgi/com.raytheon.uf.common.nc4/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.nc4/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.nc4/META-INF/MANIFEST.MF index 713fbba0e4..cb506d79dd 100644 --- a/edexOsgi/com.raytheon.uf.common.nc4/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.nc4/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Nc4 Bundle-SymbolicName: com.raytheon.uf.common.nc4 Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.nc4, com.raytheon.uf.common.nc4.cf Require-Bundle: edu.mit.ll.netcdf;bundle-version="1.3.0", diff --git a/edexOsgi/com.raytheon.uf.common.ohd/.classpath b/edexOsgi/com.raytheon.uf.common.ohd/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.ohd/.classpath +++ b/edexOsgi/com.raytheon.uf.common.ohd/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.plugin.hpe/.classpath b/edexOsgi/com.raytheon.uf.common.plugin.hpe/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.plugin.hpe/.classpath +++ b/edexOsgi/com.raytheon.uf.common.plugin.hpe/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.registry.event/.classpath b/edexOsgi/com.raytheon.uf.common.registry.event/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.event/.classpath +++ b/edexOsgi/com.raytheon.uf.common.registry.event/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.registry.event/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.registry.event/META-INF/MANIFEST.MF index a452b11917..952151d3dd 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.event/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.registry.event/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: com.raytheon.uf.common.registry.event Bundle-SymbolicName: com.raytheon.uf.common.registry.event Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.common.registry.event Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1174", com.raytheon.uf.common.event;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/.classpath b/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/.classpath +++ b/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/META-INF/MANIFEST.MF index 5cea5868d6..61e7b74b6b 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.registry.schemas.ebxml/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Ebxml Bundle-SymbolicName: com.raytheon.uf.common.registry.schemas.ebxml Bundle-Version: 1.14.1.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: javax.persistence;bundle-version="1.0.0", org.hibernate;bundle-version="1.0.0", com.raytheon.uf.common.serialization;bundle-version="1.12.1174", diff --git a/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/.classpath b/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/.classpath +++ b/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/META-INF/MANIFEST.MF index b76cc61163..d3f41b02ee 100644 --- a/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.registry.schemas.iso19115/META-INF/MANIFEST.MF @@ -4,4 +4,4 @@ Bundle-Name: Iso19115 Bundle-SymbolicName: com.raytheon.uf.common.registry.schemas.iso19115 Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/edexOsgi/com.raytheon.uf.common.site/.classpath b/edexOsgi/com.raytheon.uf.common.site/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.site/.classpath +++ b/edexOsgi/com.raytheon.uf.common.site/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.site/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.site/META-INF/MANIFEST.MF index 9c1c1ab838..abb81ccfeb 100644 --- a/edexOsgi/com.raytheon.uf.common.site/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.site/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Site Plug-in Bundle-SymbolicName: com.raytheon.uf.common.site Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.localization, com.raytheon.uf.common.status, com.raytheon.uf.common.serialization, diff --git a/edexOsgi/com.raytheon.uf.common.sounding/.classpath b/edexOsgi/com.raytheon.uf.common.sounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.sounding/.classpath +++ b/edexOsgi/com.raytheon.uf.common.sounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.tafqueue/.classpath b/edexOsgi/com.raytheon.uf.common.tafqueue/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.tafqueue/.classpath +++ b/edexOsgi/com.raytheon.uf.common.tafqueue/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.tafqueue/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.tafqueue/META-INF/MANIFEST.MF index bc25eac467..6d49289520 100644 --- a/edexOsgi/com.raytheon.uf.common.tafqueue/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.tafqueue/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Tafqueue Bundle-SymbolicName: com.raytheon.uf.common.tafqueue Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.serialization.comm, javax.persistence;bundle-version="1.0.0", org.apache.commons.lang3;bundle-version="3.4.0", diff --git a/edexOsgi/com.raytheon.uf.common.wmo/.classpath b/edexOsgi/com.raytheon.uf.common.wmo/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.wmo/.classpath +++ b/edexOsgi/com.raytheon.uf.common.wmo/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.xmrg/.classpath b/edexOsgi/com.raytheon.uf.common.xmrg/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.common.xmrg/.classpath +++ b/edexOsgi/com.raytheon.uf.common.xmrg/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.common.xmrg/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.common.xmrg/META-INF/MANIFEST.MF index 4d1cdc043b..f06339251a 100644 --- a/edexOsgi/com.raytheon.uf.common.xmrg/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.common.xmrg/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Xmrg Bundle-SymbolicName: com.raytheon.uf.common.xmrg Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.geospatial;bundle-version="1.15.1", com.raytheon.uf.common.util;bundle-version="1.15.0", com.raytheon.uf.common.ohd;bundle-version="1.16.0" diff --git a/edexOsgi/com.raytheon.uf.edex.archive/.classpath b/edexOsgi/com.raytheon.uf.edex.archive/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.archive/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF index c81f8f74a3..18cf8ed049 100644 --- a/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.archive/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Edex Archive Bundle-SymbolicName: com.raytheon.uf.edex.archive Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.edex.archive, com.raytheon.uf.edex.archive.purge Import-Package: com.raytheon.uf.common.archive.config, diff --git a/edexOsgi/com.raytheon.uf.edex.awipstools/.classpath b/edexOsgi/com.raytheon.uf.edex.awipstools/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.awipstools/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.awipstools/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.awipstools/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.awipstools/META-INF/MANIFEST.MF index 8b01650bc3..e2ebbb399a 100644 --- a/edexOsgi/com.raytheon.uf.edex.awipstools/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.awipstools/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Awipstools Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.awipstools Bundle-Version: 1.12.1174.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.serialization;bundle-version="1.12.1112", com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1112", com.raytheon.uf.common.awipstools;bundle-version="1.12.1112", diff --git a/edexOsgi/com.raytheon.uf.edex.cpgsrv/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.cpgsrv/META-INF/MANIFEST.MF index 7ef45e0722..e3ffadf2d1 100644 --- a/edexOsgi/com.raytheon.uf.edex.cpgsrv/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.cpgsrv/META-INF/MANIFEST.MF @@ -11,7 +11,7 @@ Require-Bundle: com.raytheon.edex.common, com.raytheon.uf.edex.event;bundle-version="1.0.0", com.raytheon.uf.common.event;bundle-version="1.0.0", org.slf4j;bundle-version="1.7.12" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.edex.cpgsrv Import-Package: com.raytheon.uf.common.monitor.cpg diff --git a/edexOsgi/com.raytheon.uf.edex.dat.utils/.classpath b/edexOsgi/com.raytheon.uf.edex.dat.utils/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.dat.utils/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.dat.utils/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.dat.utils/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.dat.utils/META-INF/MANIFEST.MF index 28a0b456a9..c152d47830 100644 --- a/edexOsgi/com.raytheon.uf.edex.dat.utils/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.dat.utils/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: DatUtils Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.dat.utils Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.dataplugin.grid, com.raytheon.uf.common.dataplugin.scan.data, com.raytheon.uf.common.localization, diff --git a/edexOsgi/com.raytheon.uf.edex.grid.staticdata/.classpath b/edexOsgi/com.raytheon.uf.edex.grid.staticdata/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.grid.staticdata/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.grid.staticdata/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.gridcoverage/.classpath b/edexOsgi/com.raytheon.uf.edex.gridcoverage/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.gridcoverage/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.gridcoverage/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.gridcoverage/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.gridcoverage/META-INF/MANIFEST.MF index 4888f499da..b9be75c122 100644 --- a/edexOsgi/com.raytheon.uf.edex.gridcoverage/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.gridcoverage/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Grid Coverage Bundle-SymbolicName: com.raytheon.uf.edex.gridcoverage Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.dataplugin, com.raytheon.uf.common.dataplugin.persist, com.raytheon.uf.common.geospatial, diff --git a/edexOsgi/com.raytheon.uf.edex.maintenance/.classpath b/edexOsgi/com.raytheon.uf.edex.maintenance/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.maintenance/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.maintenance/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.management/.classpath b/edexOsgi/com.raytheon.uf.edex.management/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.management/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.management/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.management/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.management/META-INF/MANIFEST.MF index 6d0e8b7b8b..b6c8d74946 100644 --- a/edexOsgi/com.raytheon.uf.edex.management/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.management/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Management Bundle-SymbolicName: com.raytheon.uf.edex.management Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.comm, com.raytheon.uf.common.management.request.diagnostic, com.raytheon.uf.common.serialization.comm diff --git a/edexOsgi/com.raytheon.uf.edex.menus/.classpath b/edexOsgi/com.raytheon.uf.edex.menus/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.menus/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.menus/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.menus/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.menus/META-INF/MANIFEST.MF index a69beddf3a..f0f4c5deed 100644 --- a/edexOsgi/com.raytheon.uf.edex.menus/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.menus/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Edex Menus Bundle-SymbolicName: com.raytheon.uf.edex.menus Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.menus;bundle-version="1.12.1174";visibility:=reexport, com.raytheon.uf.common.localization;bundle-version="1.12.1174", com.raytheon.uf.common.status;bundle-version="1.12.1174", diff --git a/edexOsgi/com.raytheon.uf.edex.metartohmdbsrv/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.metartohmdbsrv/META-INF/MANIFEST.MF index 4b2dae9e72..3aba50d6a2 100644 --- a/edexOsgi/com.raytheon.uf.edex.metartohmdbsrv/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.metartohmdbsrv/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Metartohmdb Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.metartohmdbsrv Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.edex.common;bundle-version="1.11.1", com.raytheon.uf.edex.decodertools;bundle-version="1.0.0", org.geotools, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.acars/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.acars/META-INF/MANIFEST.MF index 8cc163a9b3..b14eb359d9 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.acars/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.acars/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Acars Decoder Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.plugin.acars Bundle-Version: 1.14.1.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.edex.common;bundle-version="1.14.0", com.raytheon.uf.common.dataplugin.acars;bundle-version="1.14.0", com.raytheon.uf.common.pointdata;bundle-version="1.13.0", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrmthdw/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.bufrobs/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.bufrobs/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.bufrobs/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.bufrobs/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.cwa/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.cwa/META-INF/MANIFEST.MF index 376d227812..fccd5c7e95 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.cwa/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.cwa/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Cwa Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.plugin.cwa Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.dataplugin.cwa;bundle-version="1.0.0", com.raytheon.uf.common.pointdata, com.raytheon.uf.edex.pointdata, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.cwat/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.cwat/META-INF/MANIFEST.MF index 8e1d176c60..2f90714718 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.cwat/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.cwat/META-INF/MANIFEST.MF @@ -24,7 +24,7 @@ Require-Bundle: com.raytheon.uf.edex.cpgsrv;bundle-version="1.11.7";resolution:= com.raytheon.uf.edex.database, com.raytheon.uf.common.datastorage, org.slf4j;bundle-version="1.7.12" -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.edex.plugin.cwat, com.raytheon.uf.edex.plugin.cwat.common Import-Package: com.raytheon.edex.urifilter, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/META-INF/MANIFEST.MF index 57ea32702a..3d3735d7ee 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.ffmp/META-INF/MANIFEST.MF @@ -23,7 +23,7 @@ Import-Package: com.raytheon.uf.common.dataplugin.grid, com.raytheon.uf.common.status, com.raytheon.uf.edex.dat.utils, com.raytheon.uf.edex.plugin.scan.common -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.edex.plugin.ffmp, com.raytheon.uf.edex.plugin.ffmp.common diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fog/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.fog/META-INF/MANIFEST.MF index 05144997f9..fd0419ce0d 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fog/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fog/META-INF/MANIFEST.MF @@ -16,7 +16,7 @@ Require-Bundle: com.raytheon.uf.common.monitor;bundle-version="1.0.0", com.raytheon.uf.edex.decodertools;bundle-version="1.12.1174" Import-Package: com.raytheon.uf.common.status, com.raytheon.uf.edex.decodertools.time -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.edex.plugin.fog, com.raytheon.uf.edex.plugin.fog.common diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/META-INF/MANIFEST.MF index ff0ece16c6..c54338e7b2 100755 --- a/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.fssobs/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Datobs Bundle-SymbolicName: com.raytheon.uf.edex.plugin.fssobs Bundle-Version: 1.14.1.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.cpgsrv;bundle-version="1.12.1153", com.raytheon.edex.common;bundle-version="1.12.1153", com.raytheon.uf.common.dataplugin.fssobs;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.hpe/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.hpe/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.hpe/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.hpe/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.hpe/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.hpe/META-INF/MANIFEST.MF index e6ce8326e6..c51ae2e215 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.hpe/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.hpe/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Hpe Grib Preprocessor Bundle-SymbolicName: com.raytheon.uf.edex.plugin.hpe Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Require-Bundle: com.raytheon.uf.common.status;bundle-version="1.12.1174", com.raytheon.uf.edex.database;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.loctables/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.loctables/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.loctables/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.loctables/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.madis/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.madis/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.madis/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.madis/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.madis/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.madis/META-INF/MANIFEST.MF index 3050fb7a5a..7048bb88bb 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.madis/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.madis/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Madis Bundle-SymbolicName: com.raytheon.uf.edex.plugin.madis Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.pointdata;bundle-version="1.12.1174", com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", com.raytheon.uf.common.dataplugin.madis;bundle-version="1.0.0", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.mpe.dpa/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.mpe.dpa/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.mpe.dpa/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.mpe.dpa/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.mpe.test/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.mpe.test/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.mpe.test/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.mpe.test/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.mpe/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.mpe/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.mpe/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.mpe/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/META-INF/MANIFEST.MF index a582ce9655..3bd36012b1 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.crimss/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Crimss Bundle-SymbolicName: com.raytheon.uf.edex.plugin.npp.crimss Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.plugin.npp;bundle-version="1.0.0", com.raytheon.uf.edex.plugin.npp.sounding;bundle-version="1.0.0", com.raytheon.uf.common.dataplugin.npp.crimss;bundle-version="1.0.0";visibility:=reexport, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/META-INF/MANIFEST.MF index 5a15b0636c..248c6f14bf 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.nucaps/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: NUCAPS EDEX Plugin Bundle-SymbolicName: com.raytheon.uf.edex.plugin.npp.nucaps Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.plugin.npp;bundle-version="1.0.0", com.raytheon.uf.edex.plugin.npp.sounding;bundle-version="1.0.0", com.raytheon.uf.common.dataplugin.npp.nucaps;bundle-version="1.0.0";visibility:=reexport, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/META-INF/MANIFEST.MF index 750aa4622e..6cd743c49e 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.sounding/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: NPP Sounding Bundle-SymbolicName: com.raytheon.uf.edex.plugin.npp.sounding Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.plugin.npp;bundle-version="1.0.0", com.raytheon.uf.common.dataplugin.npp.sounding;bundle-version="1.0.0";visibility:=reexport, com.raytheon.uf.common.dataplugin;bundle-version="1.12.1174", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp.viirs/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.npp/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.npp/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.npp/META-INF/MANIFEST.MF index 3ab81bd202..49123d2d0b 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.npp/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.npp/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: NPP Common EDEX Bundle-SymbolicName: com.raytheon.uf.edex.plugin.npp Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174";visibility:=reexport, ucar.nc2;bundle-version="1.0.0";visibility:=reexport, org.geotools;bundle-version="2.6.4" diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/META-INF/MANIFEST.MF index 24a19bc80d..23248b234f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.preciprate/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Preciprate Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.plugin.preciprate Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.edex.cpgsrv;bundle-version="1.11.7";resolution:=optional, com.raytheon.edex.common;bundle-version="1.11.7", com.raytheon.uf.common.monitor;bundle-version="1.11.7", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.qpf/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.qpf/META-INF/MANIFEST.MF index d093cbdb78..8c887e8969 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.qpf/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.qpf/META-INF/MANIFEST.MF @@ -24,6 +24,6 @@ Import-Package: com.raytheon.edex.urifilter, com.raytheon.uf.common.dataplugin.grid, com.raytheon.uf.edex.core, com.raytheon.uf.edex.plugin.scan.common -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Export-Package: com.raytheon.uf.edex.plugin.qpf, com.raytheon.uf.edex.plugin.qpf.common diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.satellite.mcidas/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/.classpath b/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/META-INF/MANIFEST.MF index f749ad985d..4344890bbf 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.scan.common/META-INF/MANIFEST.MF @@ -6,7 +6,7 @@ Bundle-Version: 1.0.0.qualifier Bundle-Vendor: RAYTHEON Require-Bundle: com.raytheon.edex.common;bundle-version="1.12.1174", com.raytheon.uf.common.sounding -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Import-Package: com.raytheon.edex.plugin.radar.dao, com.raytheon.uf.common.dataplugin, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.scan/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.scan/META-INF/MANIFEST.MF index bc05987937..7db459dec2 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.scan/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.scan/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Scan Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.plugin.scan Bundle-Version: 1.14.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.dataplugin.binlightning;bundle-version="1.11.26", com.raytheon.uf.common.dataplugin.scan;bundle-version="1.11.26", com.raytheon.uf.common.monitor;bundle-version="1.11.26", diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.vaa/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.vaa/META-INF/MANIFEST.MF index c3f80005ea..4ddfbb1025 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.vaa/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.vaa/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Vaa Plug-in Bundle-SymbolicName: com.raytheon.uf.edex.plugin.vaa Bundle-Version: 1.13.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: org.geotools, javax.persistence, com.raytheon.uf.common.dataplugin, diff --git a/edexOsgi/com.raytheon.uf.edex.plugin.vil/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.plugin.vil/META-INF/MANIFEST.MF index 3014aac5ff..9d8e3e1238 100644 --- a/edexOsgi/com.raytheon.uf.edex.plugin.vil/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.plugin.vil/META-INF/MANIFEST.MF @@ -22,7 +22,7 @@ Import-Package: com.raytheon.edex.urifilter, com.raytheon.uf.common.time, com.raytheon.uf.common.util.registry, com.raytheon.uf.edex.plugin.scan.common -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.edex.plugin.vil, com.raytheon.uf.edex.plugin.vil.common diff --git a/edexOsgi/com.raytheon.uf.edex.registry.ebxml/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.registry.ebxml/META-INF/MANIFEST.MF index 24f6be76e5..3aac4eeb80 100644 --- a/edexOsgi/com.raytheon.uf.edex.registry.ebxml/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.registry.ebxml/META-INF/MANIFEST.MF @@ -4,7 +4,7 @@ Bundle-Name: Registry Bundle-SymbolicName: com.raytheon.uf.edex.registry.ebxml Bundle-Version: 1.16.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Require-Bundle: com.raytheon.uf.common.registry.schemas.ebxml;bundle-version="1.0.0", org.apache.commons.beanutils;bundle-version="1.8.3", org.apache.xml.security, diff --git a/edexOsgi/com.raytheon.uf.edex.registry.request/.classpath b/edexOsgi/com.raytheon.uf.edex.registry.request/.classpath index 098194ca4b..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.registry.request/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.registry.request/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.registry.request/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.registry.request/META-INF/MANIFEST.MF index b76389e602..815e26df79 100644 --- a/edexOsgi/com.raytheon.uf.edex.registry.request/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.registry.request/META-INF/MANIFEST.MF @@ -4,4 +4,4 @@ Bundle-Name: Request Bundle-SymbolicName: com.raytheon.uf.edex.registry.request Bundle-Version: 1.15.0.qualifier Bundle-Vendor: RAYTHEON -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 diff --git a/edexOsgi/com.raytheon.uf.edex.site/.classpath b/edexOsgi/com.raytheon.uf.edex.site/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/edexOsgi/com.raytheon.uf.edex.site/.classpath +++ b/edexOsgi/com.raytheon.uf.edex.site/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/com.raytheon.uf.edex.site/META-INF/MANIFEST.MF b/edexOsgi/com.raytheon.uf.edex.site/META-INF/MANIFEST.MF index a4c60a59a8..49af8becd8 100644 --- a/edexOsgi/com.raytheon.uf.edex.site/META-INF/MANIFEST.MF +++ b/edexOsgi/com.raytheon.uf.edex.site/META-INF/MANIFEST.MF @@ -9,7 +9,7 @@ Require-Bundle: com.raytheon.edex.common, com.raytheon.uf.common.serialization.comm;bundle-version="1.12.1174", com.raytheon.uf.common.localization;bundle-version="1.12.1174", com.raytheon.uf.common.status;bundle-version="1.12.1174" -Bundle-RequiredExecutionEnvironment: JavaSE-1.6 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Bundle-ActivationPolicy: lazy Export-Package: com.raytheon.uf.edex.site, com.raytheon.uf.edex.site.notify diff --git a/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/.classpath b/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/.classpath index ad32c83a78..eca7bdba8f 100755 --- a/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/.classpath +++ b/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/.classpath @@ -1,6 +1,6 @@ - + diff --git a/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/META-INF/MANIFEST.MF b/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/META-INF/MANIFEST.MF index 2e11e804b9..b4d38fdad1 100755 --- a/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/META-INF/MANIFEST.MF +++ b/edexOsgi/gov.nasa.msfc.sport.edex.plugin.lma/META-INF/MANIFEST.MF @@ -11,7 +11,7 @@ Require-Bundle: com.raytheon.edex.common, com.raytheon.uf.common.dataplugin.grid, com.raytheon.uf.edex.plugin.level Export-Package: gov.nasa.msfc.sport.edex.plugin.lma -Bundle-RequiredExecutionEnvironment: JavaSE-1.7 +Bundle-RequiredExecutionEnvironment: JavaSE-1.8 Import-Package: com.raytheon.uf.common.localization, com.raytheon.uf.common.menus, com.raytheon.uf.common.menus.xml, diff --git a/javaUtilities/com.raytheon.wes2bridge.common/.classpath b/javaUtilities/com.raytheon.wes2bridge.common/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/javaUtilities/com.raytheon.wes2bridge.common/.classpath +++ b/javaUtilities/com.raytheon.wes2bridge.common/.classpath @@ -1,6 +1,6 @@ - + diff --git a/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath b/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath +++ b/javaUtilities/com.raytheon.wes2bridge.configuration/.classpath @@ -1,6 +1,6 @@ - + diff --git a/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath b/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath +++ b/javaUtilities/com.raytheon.wes2bridge.datalink/.classpath @@ -1,6 +1,6 @@ - + diff --git a/javaUtilities/com.raytheon.wes2bridge.manager/.classpath b/javaUtilities/com.raytheon.wes2bridge.manager/.classpath index ad32c83a78..eca7bdba8f 100644 --- a/javaUtilities/com.raytheon.wes2bridge.manager/.classpath +++ b/javaUtilities/com.raytheon.wes2bridge.manager/.classpath @@ -1,6 +1,6 @@ - + From 751cbae7b8cbfbf46142bee6b2a71a484822c33e Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:03:49 -0600 Subject: [PATCH 02/11] unsupported characters in GridUpdater --- .../raytheon/viz/grid/inv/GridUpdater.java | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java index c1b96c0780..f3051566f7 100644 --- a/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java +++ b/cave/com.raytheon.viz.grid/src/com/raytheon/viz/grid/inv/GridUpdater.java @@ -203,31 +203,31 @@ public class GridUpdater extends GridInventoryUpdater { * real state of the record here and it is left to the receiver of * updates to figure it out. */ - GridRecord schrödingersRecord = new GridRecord(); + GridRecord schrodingersRecord = new GridRecord(); DataTime time = record.getDataTime(); - schrödingersRecord.setDataTime(new DataTime(time.getRefTime(), + schrodingersRecord.setDataTime(new DataTime(time.getRefTime(), time.getFcstTime() - value.timeOffset)); - schrödingersRecord.setDatasetId(value.node.getModelName()); + schrodingersRecord.setDatasetId(value.node.getModelName()); Parameter param = new Parameter( value.node.getDesc().getAbbreviation(), value.node.getDesc().getName(), value.node.getDesc().getUnit()); - schrödingersRecord.setParameter(param); - schrödingersRecord.setLevel(value.node.getLevel()); + schrodingersRecord.setParameter(param); + schrodingersRecord.setLevel(value.node.getLevel()); if (value.node instanceof GatherLevelNode) { - schrödingersRecord.setEnsembleId(null); + schrodingersRecord.setEnsembleId(null); } else { - schrödingersRecord.setEnsembleId(record.getEnsembleId()); + schrodingersRecord.setEnsembleId(record.getEnsembleId()); } - schrödingersRecord.setSecondaryId(record.getSecondaryId()); - schrödingersRecord.setLocation(record.getLocation()); + schrodingersRecord.setSecondaryId(record.getSecondaryId()); + schrodingersRecord.setLocation(record.getLocation()); try { - uriUpdateQueue.put(schrödingersRecord.getDataURI()); + uriUpdateQueue.put(schrodingersRecord.getDataURI()); } catch (InterruptedException e) { statusHandler.handle(Priority.PROBLEM, "Failed to send derived update for " - + schrödingersRecord.getDataURI(), + + schrodingersRecord.getDataURI(), e); } } From a43e3bfe190c197291b6eb80b85d800f80674472 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:07:12 -0600 Subject: [PATCH 03/11] case sensitivity --- .../base/gfe/smartinit/WNAWAVE.py | 39 --- .../base/grib/models/GribModels_FNMO-58.xml | 100 ------ .../base/colormaps/Grid/gridded data.cmap | 284 ------------------ 3 files changed, 423 deletions(-) delete mode 100644 edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/WNAWAVE.py delete mode 100644 edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/GribModels_FNMO-58.xml delete mode 100644 edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/colormaps/Grid/gridded data.cmap diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/WNAWAVE.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/WNAWAVE.py deleted file mode 100644 index 3813bbfef4..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/WNAWAVE.py +++ /dev/null @@ -1,39 +0,0 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - - - -from GWW import * - -class WNAWAVEForecaster(GWWForecaster): - def __init__(self): - Forecaster.__init__(self, "WNAWAVE", "WNAWAVE") - -def main(): - WNAWAVEForecaster().run() - diff --git a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/GribModels_FNMO-58.xml b/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/GribModels_FNMO-58.xml deleted file mode 100644 index d863fc8dc8..0000000000 --- a/edexOsgi/com.raytheon.edex.plugin.grib/utility/common_static/base/grib/models/GribModels_FNMO-58.xml +++ /dev/null @@ -1,100 +0,0 @@ - - - - - - - - - - nogaps -

58
- 0 - - 401 - 362181001 - 362181002 - 2291 - 361181002 - 720361001 - - - 58 - 18 - - - - - fens -
58
- 0 - - 2291 - 361181002 - - - 50 - -
- - - fnmocWave -
58
- 0 - - 401 - 362181001 - 362181002 - 2291 - 361181002 - - - 110 - -
- - - NOGAPS -
58
- 20 - 218 - - 120 - -
- - - ${REGION}-NOGAPS -
58
- 20 - 242 - - 120 - -
- - - diff --git a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/colormaps/Grid/gridded data.cmap b/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/colormaps/Grid/gridded data.cmap deleted file mode 100644 index cc9697031a..0000000000 --- a/edexOsgi/com.raytheon.uf.common.dataplugin.grid/utility/common_static/base/colormaps/Grid/gridded data.cmap +++ /dev/null @@ -1,284 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file From e7d3052a888725699018fc8e64969f1f4a22a629 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:08:36 -0600 Subject: [PATCH 04/11] updates for jep implementation on windows --- .../texteditor/scripting/runner/TextWsPythonScript.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/scripting/runner/TextWsPythonScript.java b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/scripting/runner/TextWsPythonScript.java index 0e68cbcbfc..3ee7be74ec 100644 --- a/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/scripting/runner/TextWsPythonScript.java +++ b/cave/com.raytheon.viz.texteditor/src/com/raytheon/viz/texteditor/scripting/runner/TextWsPythonScript.java @@ -115,7 +115,12 @@ public class TextWsPythonScript { public void dispose() { if (jep != null) { - jep.close(); + try { + jep.close(); + } catch (JepException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } jep = null; } } From 912e680e0a40fd56b78085a072f7879deb61582f Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:08:55 -0600 Subject: [PATCH 05/11] warngen dialog edits to fit screen --- .../viz/warngen/gui/WarngenDialog.java | 28 +++++++++---------- 1 file changed, 13 insertions(+), 15 deletions(-) diff --git a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java index ce46192bed..43268bf176 100644 --- a/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java +++ b/cave/com.raytheon.viz.warngen/src/com/raytheon/viz/warngen/gui/WarngenDialog.java @@ -262,7 +262,7 @@ IWarningsArrivedListener, ISimulatedTimeChangeListener { /** "OK" button text */ private static final String OK_BTN_LABEL = "Create Text"; - /** "Restart" button text */ + /** "" button text */ private static final String RS_BTN_LABEL = "Restart"; /** "Cancel" button text */ @@ -496,16 +496,6 @@ IWarningsArrivedListener, ISimulatedTimeChangeListener { } }); - instructionsLabel = new Label(mainComposite, SWT.BOLD); - instructionsLabel.setText("Instructions:"); - - gd = new GridData(SWT.FILL, SWT.DEFAULT, true, false); - gd.heightHint = INSTRUCTIONS_HEIGHT_IN_LINES; - instructionsBox = new Text(mainComposite, SWT.BORDER | SWT.READ_ONLY - | SWT.MULTI); - instructionsBox.setText(""); - instructionsBox.setLayoutData(gd); - startTimeTimer(); } @@ -789,6 +779,14 @@ IWarningsArrivedListener, ISimulatedTimeChangeListener { }); createTrackGroup(backupTrackEditComp); createEditGroup(backupTrackEditComp); + + gd = new GridData(SWT.FILL, SWT.FILL, true, true); + gd.heightHint = 40; + instructionsBox = new Text(mainComposite, SWT.NONE | SWT.READ_ONLY + | SWT.MULTI); + instructionsBox.setText(""); + instructionsBox.setLayoutData(gd); + //instructionsBox.setSize(SWT.DEFAULT, SWT.DEFAULT); } @@ -952,7 +950,7 @@ IWarningsArrivedListener, ISimulatedTimeChangeListener { && !warngenLayer.getStormTrackState().isNonstationary()) { str += INSTRUCTION_DRAG_STORM + "\n"; } else if (warngenLayer.getStormTrackState().trackVisible) { - str += "Adjust Centroid in any Frame" + "\n"; + str += "Adjust Centroid in any Frame | "; } str += "Adjust box around Warning Area"; } @@ -961,9 +959,9 @@ IWarningsArrivedListener, ISimulatedTimeChangeListener { str = presetInstruct; } instructionsBox.setText(str); - Point p1 = instructionsBox.getSize(); - Point p2 = instructionsBox.computeSize(SWT.DEFAULT, SWT.DEFAULT); - instructionsBox.setSize(new Point(p1.x, p2.y)); + //Point p1 = instructionsBox.getSize(); + //Point p2 = instructionsBox.computeSize(SWT.DEFAULT, SWT.DEFAULT); + //instructionsBox.setSize(new Point(p1.x, p2.y)); } /** From df1f1cf27ab8b6955c8f17070389277d978c01d2 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 7 Sep 2018 15:10:00 -0600 Subject: [PATCH 06/11] windows cave customization --- cave/com.raytheon.uf.viz.d2d.ui/plugin.xml | 18 ---- .../feature.xml | 80 ------------------ .../.settings/org.eclipse.jdt.core.prefs | 7 ++ .../developer.product | 16 +++- .../com.raytheon.viz.product.awips/plugin.xml | 8 +- .../splash.bmp | Bin 315054 -> 315056 bytes 6 files changed, 23 insertions(+), 106 deletions(-) create mode 100644 cave/com.raytheon.viz.product.awips/.settings/org.eclipse.jdt.core.prefs diff --git a/cave/com.raytheon.uf.viz.d2d.ui/plugin.xml b/cave/com.raytheon.uf.viz.d2d.ui/plugin.xml index b5648562d6..962154f13f 100644 --- a/cave/com.raytheon.uf.viz.d2d.ui/plugin.xml +++ b/cave/com.raytheon.uf.viz.d2d.ui/plugin.xml @@ -586,24 +586,6 @@ definitionId="com.raytheon.uf.viz.d2d.ui.inD2DActionSet"> - - - - - - - - - - - - - - - - - - - - @@ -125,10 +101,6 @@ id="com.raytheon.uf.viz.ncep.dataplugins.feature" version="0.0.0"/> - - @@ -145,14 +117,6 @@ id="com.raytheon.uf.viz.d2d.nsharp.feature" version="0.0.0"/> - - - - @@ -189,10 +153,6 @@ id="com.raytheon.uf.viz.d2d.ui.awips.feature" version="0.0.0"/> - - @@ -201,10 +161,6 @@ id="com.raytheon.uf.viz.d2d.damagepath.feature" version="0.0.0"/> - - @@ -217,50 +173,14 @@ id="gov.noaa.gsd.viz.ensemble.feature" version="0.0.0"/> - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.product.awips/.settings/org.eclipse.jdt.core.prefs b/cave/com.raytheon.viz.product.awips/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 0000000000..0c68a61dca --- /dev/null +++ b/cave/com.raytheon.viz.product.awips/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,7 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.8 +org.eclipse.jdt.core.compiler.compliance=1.8 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.source=1.8 diff --git a/cave/com.raytheon.viz.product.awips/developer.product b/cave/com.raytheon.viz.product.awips/developer.product index c5a5284b19..5dcd0ccf24 100644 --- a/cave/com.raytheon.viz.product.awips/developer.product +++ b/cave/com.raytheon.viz.product.awips/developer.product @@ -28,7 +28,7 @@ -Dviz.memory.warn.threshold=10M -Dorg.eclipse.swt.internal.gtk.cairoGraphics=false -Dhttps.certificate.check=false --Djava.library.path=/awips2/python/lib/python2.7/site-packages/jep +-Djava.library.path=C:\Users\mjames\Miniconda2\Lib\site-packages\jep -XX:MaxDirectMemorySize=1G -XX:+UnlockExperimentalVMOptions -XX:G1HeapRegionSize=4M @@ -45,7 +45,7 @@ - + - + + - jdk1.7.0 + org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.8 @@ -78,4 +79,11 @@ + + + + + + + diff --git a/cave/com.raytheon.viz.product.awips/plugin.xml b/cave/com.raytheon.viz.product.awips/plugin.xml index c401782bf6..9a3acf343f 100644 --- a/cave/com.raytheon.viz.product.awips/plugin.xml +++ b/cave/com.raytheon.viz.product.awips/plugin.xml @@ -53,10 +53,6 @@ name="startupForegroundColor" value="000000"> - - @@ -65,6 +61,10 @@ name="modelResourceHandler" value="bundleclass://com.raytheon.uf.viz.personalities.cave/com.raytheon.uf.viz.personalities.cave.workbench.VizModelResourceHandler"> + + diff --git a/cave/com.raytheon.viz.ui.personalities.awips/splash.bmp b/cave/com.raytheon.viz.ui.personalities.awips/splash.bmp index a5bf15c5626bbd9e67f580ce8f8c76072b636fde..2b632def59dd373e695368ccdc630d0cba231b78 100644 GIT binary patch delta 981 zcmbV~KTE?<6vba^6rx*;kdNTtpa=ywQ9Ig6nhp`9P%;XIEaH~IRdI0YCS=o^4*dwh zMd<2kM+0@~;G)0g!Zm~{;(_;a|D1c?y)W;+UmNsmoBKzDXWPtCXKk?VEMq?STE=BJ z=A3+W&9EM~U!Ci{<>6cH^kBHPdSOM==4jfq4xgOaZ1$4lcHFkkMpqrHYacfnjVOvf zbEIkNc^(mffeRfwNfIHj<0_vxE*#0KJJ;VS>vOuuj&q<7s6~x~7U~lxJDE&=)~{Bw+R7&~zrOQ~Hmzs2I1+P{AW2K;1$*z0#*z&5ypX-*nYu}35c1uU+`nO GR}TO>s?-Vq From 378b0bcc42ab12ac044a916bd850e42434a2b22f Mon Sep 17 00:00:00 2001 From: Michael James Date: Mon, 17 Sep 2018 22:30:07 -0600 Subject: [PATCH 07/11] 2to3 --- cave/build/tools/convCT.py | 124 +- .../tools/headup/Source/FileTypeConfig.py | 152 +- .../tools/headup/Source/HeaderUpdater.py | 924 +- .../alertViz/python/DebugProcessor.py | 80 +- .../GFESuite/src/gfeClient/gfeClient.py | 282 +- .../GFESuite/src/ifpimage/PngWriter.py | 1066 +- .../GFESuite/src/runifptext/runIFPText.py | 458 +- .../GFESuite/src/runprocedure/runProcedure.py | 468 +- .../help/EXAMPLESmartInit_NAM.py | 1118 +- .../localization/gfe/itool/ISmartScript.py | 466 +- .../localization/gfe/itool/SetupTextEA.py | 658 +- .../localization/gfe/itool/TextProductTest.py | 2231 +-- .../gfe/userPython/procedures/Align_Grids.py | 512 +- .../gfe/userPython/procedures/BOIVerify.py | 16114 ++++++++-------- .../procedures/BOIVerifyAutoCalc.py | 874 +- .../procedures/BOIVerifyBiasCorr.py | 1472 +- .../userPython/procedures/BOIVerifyInfo.py | 4546 ++--- .../gfe/userPython/procedures/CheckTandTd.py | 800 +- .../userPython/procedures/CheckWindGust.py | 476 +- .../userPython/procedures/CompletePopulate.py | 290 +- .../userPython/procedures/CopyNHCProposed.py | 350 +- .../procedures/CreateNatlTCVZoneGroups.py | 830 +- .../userPython/procedures/CreateProposedSS.py | 1698 +- .../procedures/CreateTCVAreaDictionary.py | 530 +- .../gfe/userPython/procedures/Create_RFD.py | 214 +- .../userPython/procedures/DiffFromClimo.py | 570 +- .../gfe/userPython/procedures/ERQCcheck.py | 1204 +- .../gfe/userPython/procedures/Finalize_KML.py | 478 +- .../userPython/procedures/Fire_Wx_First.py | 304 +- .../userPython/procedures/GenerateCyclone.py | 2554 +-- .../userPython/procedures/HazardRecovery.py | 440 +- .../procedures/ISC_Discrepancies.py | 200 +- .../procedures/Interpolate_Procedure.py | 1090 +- .../userPython/procedures/MakeEditAreaRepo.py | 176 +- .../userPython/procedures/MakeHSEditAreas.py | 5104 ++--- .../gfe/userPython/procedures/MakeHazard.py | 644 +- .../gfe/userPython/procedures/MergeHazards.py | 994 +- .../userPython/procedures/MergeWFOEdits.py | 414 +- .../userPython/procedures/NDFD_QPF_Checks.py | 2650 +-- .../userPython/procedures/NDFDgridCheck.py | 984 +- .../userPython/procedures/PWS_Procedure.py | 674 +- .../userPython/procedures/PlotSPCWatches.py | 666 +- .../userPython/procedures/PlotTPCEvents.py | 462 +- .../procedures/PopulateFromClimo.py | 524 +- .../procedures/Populate_SkyProcedure.py | 248 +- .../procedures/Populate_WPC_PQPF.py | 446 +- .../userPython/procedures/ProcedureCmds.py | 306 +- .../gfe/userPython/procedures/Run_NWPS.py | 532 +- .../procedures/SnowAmtQPFPoPWxCheck.py | 2176 +-- .../gfe/userPython/procedures/StormInfo.py | 392 +- .../procedures/TCFloodingRainThreat.py | 1358 +- .../procedures/TCImpactGraphics_KML.py | 534 +- .../gfe/userPython/procedures/TCMWindTool.py | 3848 ++-- .../procedures/TCStormSurgeThreat.py | 1584 +- .../userPython/procedures/TCTornadoThreat.py | 678 +- .../gfe/userPython/procedures/TCWindThreat.py | 1228 +- .../gfe/userPython/procedures/ViewWCL.py | 762 +- .../gfe/userPython/smartTools/CalculateRFD.py | 1448 +- .../smartTools/Curing_from_Green.py | 262 +- .../userPython/smartTools/Enhanced_WxTool.py | 1028 +- .../gfe/userPython/smartTools/ExSS4.py | 154 +- .../gfe/userPython/smartTools/ExSS5.py | 168 +- .../gfe/userPython/smartTools/ExUtil1.py | 158 +- .../gfe/userPython/smartTools/Haines.py | 350 +- .../gfe/userPython/smartTools/LAL_Tool.py | 120 +- .../gfe/userPython/smartTools/MakeTmpGrid.py | 264 +- .../gfe/userPython/smartTools/MixHgt_FWF.py | 392 +- .../gfe/userPython/smartTools/MixHgt_Init.py | 418 +- .../gfe/userPython/smartTools/ModelBlend.py | 1420 +- .../smartTools/MoveFeatureBySpeed.py | 724 +- .../gfe/userPython/smartTools/PERCENTGREEN.py | 262 +- .../userPython/smartTools/Populate_SkyTool.py | 518 +- .../userPython/smartTools/QPF_SmartTool.py | 206 +- .../gfe/userPython/smartTools/Serp.py | 1594 +- .../gfe/userPython/smartTools/SerpISC.py | 958 +- .../userPython/smartTools/Show_ISC_Info.py | 358 +- .../userPython/smartTools/TransWind_NoVar.py | 244 +- .../smartTools/WindGustFromAlgorithm.py | 824 +- .../gfe/userPython/smartTools/getGridsTool.py | 290 +- .../gfe/userPython/smartTools/getMaxGrid.py | 140 +- .../gfe/userPython/smartTools/getSumGrids.py | 134 +- .../gfe/userPython/smartTools/serpFile.py | 530 +- .../userPython/textProducts/HLSTCV_Common.py | 3369 ++-- .../gfe/userPython/textProducts/HSF.py | 5566 +++--- .../userPython/textProducts/HighSeas_AT2.py | 148 +- .../userPython/textProducts/LE_Test_Local.py | 1692 +- .../textProducts/MultipleElementTable.py | 718 +- .../MultipleElementTable_Aux_Local.py | 160 +- .../textUtilities/headline/Analysis.py | 192 +- .../headline/ForecastNarrative.py | 3332 ++-- .../textUtilities/headline/ForecastTable.py | 1056 +- .../textUtilities/headline/FormatterRunner.py | 1108 +- .../textUtilities/headline/HazardsTable.py | 4953 +++-- .../textUtilities/headline/TextFormatter.py | 2020 +- .../textUtilities/headline/offsetTime.py | 298 +- .../textUtilities/regular/CombinedPhrases.py | 1410 +- .../regular/ConfigurableIssuance.py | 402 +- .../regular/DefaultCallToActions.py | 1766 +- .../textUtilities/regular/DiscretePhrases.py | 4178 ++-- .../textUtilities/regular/EditAreaUtils.py | 734 +- .../textUtilities/regular/FWS_Overrides.py | 10140 +++++----- .../textUtilities/regular/FirePhrases.py | 1854 +- .../textUtilities/regular/Header.py | 2010 +- .../textUtilities/regular/Interfaces.py | 390 +- .../textUtilities/regular/MarinePhrases.py | 1114 +- .../textUtilities/regular/ModuleAccessor.py | 330 +- .../textUtilities/regular/PhraseBuilder.py | 8426 ++++---- .../regular/Phrase_Test_Local.py | 1576 +- .../textUtilities/regular/SAF_Overrides.py | 2054 +- .../textUtilities/regular/SampleAnalysis.py | 5936 +++--- .../textUtilities/regular/ScalarPhrases.py | 5492 +++--- .../textUtilities/regular/SimpleTableUtils.py | 390 +- .../textUtilities/regular/StringUtils.py | 910 +- .../textUtilities/regular/TableBuilder.py | 1848 +- .../textUtilities/regular/TextRules.py | 220 +- .../textUtilities/regular/TextUtils.py | 1628 +- .../textUtilities/regular/TimeDescriptor.py | 1498 +- .../textUtilities/regular/TimeRangeUtils.py | 540 +- .../textUtilities/regular/Translator.py | 1376 +- .../textUtilities/regular/TropicalHazards.py | 2580 ++- .../textUtilities/regular/UserInfo.py | 242 +- .../textUtilities/regular/Utility.py | 280 +- .../textUtilities/regular/VTECMessageType.py | 112 +- .../textUtilities/regular/VarDictGroker.py | 372 +- .../regular/VectorRelatedPhrases.py | 2992 +-- .../textUtilities/regular/WxPhrases.py | 3878 ++-- .../gfe/userPython/utilities/AppDialog.py | 176 +- .../userPython/utilities/BOIVerifyUtility.py | 10194 +++++----- .../gfe/userPython/utilities/Common.py | 64 +- .../userPython/utilities/DefineMaxWindGUI.py | 460 +- .../userPython/utilities/EditAreaUtilities.py | 144 +- .../userPython/utilities/GridManipulation.py | 1998 +- .../gfe/userPython/utilities/HazardUtils.py | 2218 +-- .../gfe/userPython/utilities/ISC_Utility.py | 2468 +-- .../userPython/utilities/IToolInterface.py | 164 +- .../userPython/utilities/MakeHazardConfig.py | 384 +- .../gfe/userPython/utilities/MyDialog.py | 264 +- .../gfe/userPython/utilities/ObjAnal.py | 2308 +-- .../utilities/ProcedureInterface.py | 252 +- .../utilities/ProcessVariableList.py | 406 +- .../gfe/userPython/utilities/ProductParser.py | 622 +- .../gfe/userPython/utilities/SmartScript.py | 5564 +++--- .../utilities/SmartToolInterface.py | 298 +- .../gfe/userPython/utilities/StartupDialog.py | 178 +- .../userPython/utilities/TropicalUtility.py | 2540 ++- .../python/autotest/CreateGrids.py | 1512 +- .../python/autotest/Hazard_HLS_TestScript.py | 1368 +- .../python/autotest/TestScript.py | 1090 +- .../python/pyViz/BundlePainter.py | 136 +- .../python/pyViz/GFEPainter.py | 444 +- .../python/pyViz/testBundlePainter.py | 114 +- .../python/query/DBSSClient.py | 247 +- .../python/query/Evaluator.py | 150 +- .../python/query/Query.py | 465 +- .../python/testFormatters/RecreationFcst.py | 1134 +- .../testFormatters/SmartElementTable.py | 618 +- .../python/utility/loadConfig.py | 190 +- .../HoursRefTimePointDataRetrieve.py | 150 +- .../pointdata/PointDataContainer.py | 130 +- .../pointdata/PointDataRetrieve.py | 232 +- .../localization/pointdata/PointDataView.py | 200 +- .../textws/scripting/twsScripting.py | 356 +- .../common_static/base/gfe/config/Maps.py | 738 +- .../base/gfe/config/serverConfig.py | 6958 +++---- .../common_static/base/gfe/python/AbsTime.py | 232 +- .../common_static/base/gfe/python/BaseTool.py | 1012 +- .../base/gfe/python/DefaultEditAreaNaming.py | 142 +- .../base/gfe/python/JSmartUtils.py | 178 +- .../common_static/base/gfe/python/MetLib.py | 496 +- .../common_static/base/gfe/python/ParmID.py | 244 +- .../base/gfe/python/ShapeTable.py | 160 +- .../base/gfe/python/UnitConvertor.py | 324 +- .../base/gfe/python/createAreaDictionary.py | 1238 +- .../base/gfe/python/createComboFiles.py | 342 +- .../common_static/base/gfe/python/doConfig.py | 1157 +- .../base/gfe/python/isc/IrtAccess.py | 1324 +- .../base/gfe/python/isc/IrtServer.py | 1180 +- .../base/gfe/python/isc/ifpnetCDF.py | 2878 +-- .../base/gfe/python/isc/iscDataRec.py | 628 +- .../base/gfe/python/isc/iscExtract.py | 988 +- .../base/gfe/python/isc/iscMosaic.py | 3528 ++-- .../base/gfe/python/isc/iscUtil.py | 572 +- .../base/gfe/smartinit/GFS190.py | 1082 +- .../common_static/base/gfe/smartinit/GFS40.py | 2192 +-- .../common_static/base/gfe/smartinit/GFS75.py | 1078 +- .../common_static/base/gfe/smartinit/GFS80.py | 1124 +- .../base/gfe/smartinit/HIRESWarw.py | 648 +- .../base/gfe/smartinit/HIRESWnmm.py | 648 +- .../common_static/base/gfe/smartinit/HRRR.py | 424 +- .../common_static/base/gfe/smartinit/Init.py | 2820 +-- .../common_static/base/gfe/smartinit/NAM12.py | 3376 ++-- .../common_static/base/gfe/smartinit/NAM40.py | 1154 +- .../common_static/base/gfe/smartinit/NAM80.py | 1100 +- .../common_static/base/gfe/smartinit/NAM95.py | 1104 +- .../common_static/base/gfe/smartinit/RAP13.py | 1116 +- .../common_static/base/gfe/smartinit/RAP40.py | 1066 +- .../common_static/base/gfe/smartinit/gfsLR.py | 1090 +- .../base/gfe/textproducts/Generator.py | 1636 +- .../gfe/textproducts/configureTextProducts.py | 398 +- .../gfe/textproducts/library/SimpleLog.py | 114 +- .../base/gfe/textproducts/moduleTest.py | 60 +- .../gfe/textproducts/templates/product/AFD.py | 2224 +-- .../templates/product/AreaFcst.py | 3034 +-- .../gfe/textproducts/templates/product/CCF.py | 1140 +- .../gfe/textproducts/templates/product/CWF.py | 2654 +-- .../templates/product/CWF_Pacific.py | 2760 +-- .../templates/product/CivilEmerg.py | 402 +- .../product/CivilEmerg_EQR_MultiPil_Local.py | 256 +- .../gfe/textproducts/templates/product/FWF.py | 2088 +- .../templates/product/FWFTable.py | 3818 ++-- .../gfe/textproducts/templates/product/FWM.py | 1180 +- .../templates/product/FWS_Site_MultiPil.py | 272 +- .../product/FWS_Site_MultiPil_Baseline.py | 270 +- .../gfe/textproducts/templates/product/GLF.py | 3368 ++-- .../templates/product/GenericHazards.py | 2336 +-- .../templates/product/GenericReport.py | 606 +- .../gfe/textproducts/templates/product/HLS.py | 6342 +++--- .../templates/product/Hazard_AQA_MultiPil.py | 694 +- .../templates/product/Hazard_FFA_MultiPil.py | 1100 +- .../templates/product/Hazard_HLS.py | 14296 +++++++------- .../templates/product/Hazard_HWO_MultiPil.py | 510 +- .../templates/product/Hazard_RFW_MultiPil.py | 1850 +- .../templates/product/Hazard_TCV.py | 7960 ++++---- .../product/Hazard_TCVNHC_MultiPil.py | 1260 +- .../templates/product/Hazard_WCN_MultiPil.py | 1454 +- .../gfe/textproducts/templates/product/MVF.py | 816 +- .../gfe/textproducts/templates/product/NSH.py | 1592 +- .../gfe/textproducts/templates/product/OFF.py | 2186 +-- .../gfe/textproducts/templates/product/PFM.py | 3928 ++-- .../templates/product/SAF_Site_MultiPil.py | 272 +- .../product/SAF_Site_MultiPil_Baseline.py | 270 +- .../gfe/textproducts/templates/product/SFT.py | 1332 +- .../gfe/textproducts/templates/product/SRF.py | 3924 ++-- .../cli/src/activeTable/sendAT.py | 244 +- 234 files changed, 168719 insertions(+), 168717 deletions(-) diff --git a/cave/build/tools/convCT.py b/cave/build/tools/convCT.py index 7bf1f44f0e..d774cca5a2 100644 --- a/cave/build/tools/convCT.py +++ b/cave/build/tools/convCT.py @@ -1,63 +1,63 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +#!/usr/bin/env python +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# Converts netcdf style colormaps to AWIPS II XML colormaps -# -# Usage: ./convCT.py colormap1 colormap2 colormap3 -# -# Requires scipy and numpy -# -# Deposits files in /tmp -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jun 23, 2008 chammack Initial creation -# - -import pupynere as netcdf -import numpy -import sys -import os - -def convert(i): - return str((i & 0xFF) / 255.0) - -ct = sys.argv -numct = len(ct) - -for k in range(1, numct): - print 'Converting: ' + ct[k] - nc = netcdf.netcdf_file(ct[k], "r") - colors = nc.variables['tableColors'][:][0] - f = open('/tmp/' + os.path.basename(ct[k]).replace('.COLORTABLE', '.cmap'), 'w') - f.write('\n') - - aVal = 1.0 - for i in range(numpy.shape(colors)[1]): - f.write(" \n') - - f.write('\n') - f.close() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# Converts netcdf style colormaps to AWIPS II XML colormaps +# +# Usage: ./convCT.py colormap1 colormap2 colormap3 +# +# Requires scipy and numpy +# +# Deposits files in /tmp +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jun 23, 2008 chammack Initial creation +# + +import pupynere as netcdf +import numpy +import sys +import os + +def convert(i): + return str((i & 0xFF) / 255.0) + +ct = sys.argv +numct = len(ct) + +for k in range(1, numct): + print('Converting: ' + ct[k]) + nc = netcdf.netcdf_file(ct[k], "r") + colors = nc.variables['tableColors'][:][0] + f = open('/tmp/' + os.path.basename(ct[k]).replace('.COLORTABLE', '.cmap'), 'w') + f.write('\n') + + aVal = 1.0 + for i in range(numpy.shape(colors)[1]): + f.write(" \n') + + f.write('\n') + f.close() diff --git a/cave/build/tools/headup/Source/FileTypeConfig.py b/cave/build/tools/headup/Source/FileTypeConfig.py index 4f700623aa..7acdbb1a30 100644 --- a/cave/build/tools/headup/Source/FileTypeConfig.py +++ b/cave/build/tools/headup/Source/FileTypeConfig.py @@ -1,77 +1,77 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 3 Mar 2010 #3771 jelkins Initial Creation. - -from ConfigParser import ConfigParser -from ConfigParser import NoOptionError -from os import pathsep -from os import listdir -from os.path import join - -class FileTypeConfig(ConfigParser): - """ Handles file type configurations - """ - - def __init__(self,defaultConfig = None,configDirectories = None, - fileType = None): - self.fileType = fileType - - dConf = {"space":" "} - if defaultConfig != None: - dConf.update(defaultConfig) - - ConfigParser.__init__(self,dConf) - - if configDirectories != None: - self.loadConfig(configDirectories) - - def isAvailable(self,fileType = None): - if fileType == None: - fileType = self.fileType - return self.has_section(fileType) - - def loadConfig(self,configDirectories): - for path in configDirectories.split(pathsep): - for file in listdir(path): - if ".cfg" in file: - self.read(join(path,file)) - - def _getConfig(self,configKey,getterFunction,varDict=None): - result = None - try: - if varDict != None: - result = getterFunction(self.fileType,configKey,vars=varDict) - else: - result = getterFunction(self.fileType,configKey) - except NoOptionError: - pass - return result - - def getConfig(self,configKey,varDict=None): - return self._getConfig(configKey,self.get,varDict) - - def getBooleanConfig(self,configKey): - return self._getConfig(configKey,self.getboolean) - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 3 Mar 2010 #3771 jelkins Initial Creation. + +from configparser import ConfigParser +from configparser import NoOptionError +from os import pathsep +from os import listdir +from os.path import join + +class FileTypeConfig(ConfigParser): + """ Handles file type configurations + """ + + def __init__(self,defaultConfig = None,configDirectories = None, + fileType = None): + self.fileType = fileType + + dConf = {"space":" "} + if defaultConfig != None: + dConf.update(defaultConfig) + + ConfigParser.__init__(self,dConf) + + if configDirectories != None: + self.loadConfig(configDirectories) + + def isAvailable(self,fileType = None): + if fileType == None: + fileType = self.fileType + return self.has_section(fileType) + + def loadConfig(self,configDirectories): + for path in configDirectories.split(pathsep): + for file in listdir(path): + if ".cfg" in file: + self.read(join(path,file)) + + def _getConfig(self,configKey,getterFunction,varDict=None): + result = None + try: + if varDict != None: + result = getterFunction(self.fileType,configKey,vars=varDict) + else: + result = getterFunction(self.fileType,configKey) + except NoOptionError: + pass + return result + + def getConfig(self,configKey,varDict=None): + return self._getConfig(configKey,self.get,varDict) + + def getBooleanConfig(self,configKey): + return self._getConfig(configKey,self.getboolean) + diff --git a/cave/build/tools/headup/Source/HeaderUpdater.py b/cave/build/tools/headup/Source/HeaderUpdater.py index bfc4b2d749..23127992e7 100755 --- a/cave/build/tools/headup/Source/HeaderUpdater.py +++ b/cave/build/tools/headup/Source/HeaderUpdater.py @@ -1,463 +1,463 @@ -#!/usr/bin/env python - -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 3 Mar 2010 #3771 jelkins Initial Creation. - -from __future__ import with_statement - -# the version is derived from the date last updated y.y.m.d -version = "1.0.3.12" - -from optparse import OptionParser -from optparse import OptionGroup -from os import pathsep -from os import rename -from os.path import basename -from os.path import splitext -import sys -import re -import logging - -from FileTypeConfig import FileTypeConfig -import OptionCallback - -_regexCache = {} - -def getRegex(fileType, regexKey, value=None): - global _regexCache - - fileTypeNode = {} - - if fileType in _regexCache: - fileTypeNode = _regexCache[fileType] - else: - _regexCache[fileType] = fileTypeNode - - if not(regexKey in fileTypeNode): - fileTypeNode[regexKey] = None - if value != None: - fileTypeNode[regexKey] = re.compile(value,re.DOTALL) - - return fileTypeNode[regexKey] - -def getLastMatch(matches, matchSplit, endOffset=None, splitGroup= - 1): - result = re.split(matchSplit, matches) - result = result[splitGroup] - if endOffset != None: - result = result[:endOffset] - return re.escape(result) - -def getHeader(headerFileName, fileConfig): - headerText = "" - - with open(headerFileName, 'r') as headerInput: - for line in headerInput: - searchText = fileConfig.getConfig("textSearch") - replaceText = fileConfig.getConfig("textReplace") - if searchText != None and replaceText != None: - line = re.sub(re.escape(searchText), replaceText, line) - headerText += fileConfig.getConfig("lineTemplate", {"lineText":line}) - result = fileConfig.getConfig("headerTemplate", {"headerText":headerText[: - 1]}) - return result - -def addOptions(commandLineParser): - commandLineParser.add_option("-a", "--disable-addmissing", dest="addMissing", - default=True, action="store_false", - help="do not add a header if an existing header is not found.") - - commandLineParser.add_option("-v", "--verbose", dest="verbose", - action="callback", callback=OptionCallback.flagWithOption, - help="output what's happening to stderr. -v [DEBUG] enable " - + "debug output") - - commandLineParser.add_option("-i", "--in-place", dest="backupSuffix", - action="callback", callback=OptionCallback.flagWithOption, - help="update FILE in place. -i [BACKUPSUFFIX] create a backup " - + "of the original file.") - - commandLineParser.add_option("-r", "--revert-backup", dest="revertSuffix", - help="revert FILE to FILEREVERTSUFFIX and remove backup") - - commandLineParser.add_option("-t", "--textheader", dest="headerFile", - help="read header text from HEADERFILE") - - commandLineParser.add_option("-s", "--search", dest="searchString", - default="", - help="look for an existing header with a matching SEARCHSTRING.") - - commandLineParser.add_option("-S", "--search-regex", dest="searchPattern", - help="look for an existing header with a matching SEARCHPATTERN.") - - commandLineParser.add_option_group(OptionGroup(commandLineParser, - "SEARCHPATTERN|SEARCHSTRING", "Without specifying a SEARCHPATTERN" - + " or SEARCHSTRING a search will only be made for an existing" - + " header that matches the template. Specify a SEARCHSTRING or" - + " SEARCHPATTERN to enable block and line block header searching." - + " If both a SEARCHSTRING and SEARCHPATTERN are given, The" - + " SEARCHPATTERN will override the SEARCHSTRING.")) - - commandLineParser.add_option("-l", "--search-limit", dest="searchLimit", - default=3000, type=int, - help="look for an existing header within the first SEARCHLIMIT " - + "bytes. Recommend setting this to about 200% the size of the current" - + " header. default %default") - - commandLineParser.add_option("-f", "--filetypes", dest="fileTypesDir", - help="include the filetype configurations from FILETYPESDIR. " - + "Multiple directories may be specified using the `" + pathsep - + "' path separater character") - - commandLineParser.add_option("-e", "--ext", dest="fileExtension", - help="specifiy the FILEEXTENSION to use") - - -def main(commandOption=None, FILE=None): - """ Execute HeaderUpdater from the command line - """ - - # define the command line options - commandLineParser = OptionParser(usage="usage: %prog [OPTIONS] [FILE]", - version="%prog " + version) - - commandLineParser.add_option_group(OptionGroup(commandLineParser, - "FILE", "Specify an input FILE. If no FILE is given or if" - + " FILE is `-' read input from stdin. When reading from stdin" - + " the -e option is required.")) - - addOptions(commandLineParser) - - # parse the arguments - - commandLineOption = None - args = None - - if commandOption != None: - commandLineOption = commandOption - else: - (commandLineOption, args) = commandLineParser.parse_args() - - if FILE != None: - args = [FILE] - - if len(args) == 1: - inputFileName = args[0] - elif commandLineOption.fileExtension != None: - inputFileName = "-" - else: - commandLineParser.error("stdin requires -e option") - - # setup the logger - logging.basicConfig(stream=sys.stderr, - format='%(name)-12s: %(levelname)-8s %(message)s') - - logger = logging.getLogger(basename(inputFileName)) - - logLevel = logging.WARNING - verbose = commandLineOption.verbose - if verbose != None: - logLevel = logging.INFO - if verbose != "": - if verbose == "DEBUG": - logLevel = logging.DEBUG - - logger.setLevel(logLevel) - - # quickly restore a file from backup - revertSuffix = commandLineOption.revertSuffix - if revertSuffix != None: - try: - rename(inputFileName + revertSuffix, inputFileName) - except OSError, v: - logger.error(v) - return - - # load the filetype configurations - fileTypeConfig = FileTypeConfig() - - fileTypeConfig.fileType = splitext(inputFileName)[1] - - if commandLineOption.fileExtension != None: - fileTypeConfig.fileType = commandLineOption.fileExtension - - if commandLineOption.fileTypesDir != None: - fileTypeConfig.loadConfig(commandLineOption.fileTypesDir) - logger.debug("Loaded fileType configs from: " + commandLineOption.fileTypesDir) - - # check for a configuration for the input file - if not(fileTypeConfig.isAvailable()): - logger.error("no " + fileTypeConfig.fileType + " configuration exists") - return 10 - - # read the inputfile - inputFile = sys.stdin - if inputFileName != "-": - inputFile = open(inputFileName, 'r') - - inputHeader = inputFile.read(commandLineOption.searchLimit) - inputFooter = inputFile.read() - inputFile.close() - - logger.info("Ready to process " + inputFileName) - - searchOption = re.escape(commandLineOption.searchString) - - if commandLineOption.searchPattern != None: - searchOption = commandLineOption.searchPattern - - searchString = ".*?" + searchOption + ".*?" - - # these offsets provide an easy way to handle line returns caught - # by the match - headerStartOffset = 0 - headerEndOffset = 0 - - # create the newHeader - newHeader = None - - if commandLineOption.headerFile != None: - newHeader = getHeader(commandLineOption.headerFile, fileTypeConfig) - - # check that we don't already have the new header in the inputFile - notUpdated = False - logger.info("Checking if file already contains updated header") - headerMatch = None if newHeader == None else re.search(re.escape(newHeader), inputHeader, re.DOTALL) - if headerMatch != None: - notUpdated = True - logger.info("File already contains the updated header") - else: - # check if we can find a header matching the template - searchHeader = "\n*" + re.escape(fileTypeConfig.getConfig("headerTemplate", {"headerText":"searchStringPlaceholder"})) + "\n" - searchHeader = re.sub("searchStringPlaceholder", searchString, searchHeader) - logger.info("Checking if file contains a header matching the template") - headerMatch = re.search(searchHeader, inputHeader, re.DOTALL) - - if headerMatch != None: - headerEndOffset = - 1 - logger.info("Searching for the start of the header") - headerStartOffset = len(re.search("\n*", headerMatch.group()).group()) - - # we must check that each line starts with the lineTemplate - validTemplateMatch = True - header = headerMatch.group()[headerStartOffset:headerEndOffset] - logger.info("Ensuring each line in the header starts with the lineTemplate") - for line in header.split("\n")[1: - 1]: - lineSearch = fileTypeConfig.getConfig("lineTemplate", {"lineText":""}) - lineMatch = re.search(re.escape(lineSearch), line) - if lineMatch == None: - validTemplateMatch = False - headerMatch = None - break - - if validTemplateMatch == True: - logger.info("Found existing header matching template") - - if headerMatch == None and searchString != ".*?.*?" and fileTypeConfig.getConfig("blockBegin") != None: - # try and find a header located inside a block comment - searchBlock = re.escape(fileTypeConfig.getConfig("blockBegin")) - searchBlock += searchString - searchBlock += re.escape(fileTypeConfig.getConfig("blockEnd")) - - logger.info("Searching for header inside block comment") - headerMatch = re.search(searchBlock, inputHeader, re.DOTALL) - - if headerMatch != None: - blockBegin = re.escape(fileTypeConfig.getConfig("blockBegin")) - isAmbiguousBlock = fileTypeConfig.getConfig("blockBegin") == fileTypeConfig.getConfig("blockEnd") - - splitGroup = - 1 - if isAmbiguousBlock == True: - splitGroup = - 2 - - headerSubGroup = getLastMatch(headerMatch.group(), blockBegin, splitGroup=splitGroup) - headerSubGroup = blockBegin + headerSubGroup - - if isAmbiguousBlock == True: - headerSubGroup += blockBegin - - logger.info("Searching last header inside block comment") - headerMatch = re.search(headerSubGroup, inputHeader, re.DOTALL) - - if headerMatch != None: - logger.info("Found existing header inside block section") - - if headerMatch == None and searchString != ".*?.*?" and fileTypeConfig.getConfig("lineComment") != None: - # try and find a header offset by line comments - # this is only done if the searchRegEx isn't the default, - # otherwise we will probably match something that isn't a header - - lineComment = fileTypeConfig.getConfig("lineComment") - - searchLine = re.escape(lineComment) + ".*?" - searchLine += searchString + "\n" - - # lookahead assertions are AWESOME! - searchLine += "(?!" + re.escape(lineComment) + ")" - - lineHeaderRegex = getRegex(fileTypeConfig.fileType, "lineHeader", searchLine) - - logger.info("Searching for a header in a block of line comments") - headerMatch = lineHeaderRegex.match(inputHeader) - - if headerMatch != None: - logger.info("Splitting the header into its line comment groups") - headerSubGroup = getLastMatch(headerMatch.group(), - "\n(?!" + re.escape(lineComment) + ").*?\n", - 1) - - logger.info("Searching for the last header in a block of line comments") - headerMatch = re.search(headerSubGroup, inputHeader, re.DOTALL) - - # handle situations where the header and placeAfter portion - # are not split by a a line - placeAfter = fileTypeConfig.getConfig("placeAfter") - if headerMatch != None and placeAfter != None: - placeAfterSearch = placeAfter + "(.*)" - logger.info("Searching to see if the header is directly after a placeAfter") - headerMinusPlaceAfter = re.search(placeAfterSearch, headerMatch.group(), re.DOTALL) - if headerMinusPlaceAfter != None: - logger.info("Extracting the header from the placeAfter") - headerMatch = re.search(re.escape( - headerMinusPlaceAfter.group(1)), inputHeader, re.DOTALL) - - # we must check that each line starts with the lineComment - if headerMatch != None: - header = headerMatch.group() - logger.info("Verifying all lines in the header begin with a lineComment") - for line in header.split("\n"): - lineMatch = re.search("^" + re.escape(lineComment) + ".*", line) - if lineMatch == None: - headerMatch = None - break - - if headerMatch != None: - logger.info("Found existing header in line comment section") - - if (headerMatch != None - and commandLineOption.headerFile != None - and notUpdated == False): - # an existing header was found, we will need to replace it - outputHeader = (inputHeader[:headerMatch.start() + headerStartOffset] + - newHeader + inputHeader[headerMatch.end() + headerEndOffset:]) - - logger.info("Updated existing header") - logger.debug("\n" + headerMatch.group() + "\nwith: \n" + newHeader) - elif ((commandLineOption.addMissing and fileTypeConfig.getBooleanConfig("addMissing") != False) - and notUpdated == False - and commandLineOption.headerFile != None): - # an existing header was not found, we need to add a new one - - placementSearch = fileTypeConfig.getConfig("placeAfter") - if placementSearch != None: - logger.info("Searching for the placeAfter") - placementMatch = re.search(placementSearch, inputHeader) - - if placementMatch != None: - - outputHeader = inputHeader[:placementMatch.end()] - - if outputHeader[ - 1] != "\n": - outputHeader += "\n" - - outputHeader += newHeader - - if inputHeader[placementMatch.end()] != "\n": - outputHeader += "\n" - - outputHeader += inputHeader[placementMatch.end():] - - logger.info("Added new header after placement match") - logger.debug("\n" + newHeader + "\nplacement match:\n" + - placementMatch.group()) - else: - # we didn't find the placement match - info = "Failed to find placement match, " - - requirePlaceAfter = fileTypeConfig.getBooleanConfig("requirePlaceAfter") - - if requirePlaceAfter == None: - requirePlaceAfter = True - - if requirePlaceAfter == True: - outputHeader = inputHeader - logger.info(info + "no file modifications were made") - notUpdated = True - else: - outputHeader = newHeader - - if len(inputHeader) != 0 and inputHeader[0] != "\n": - outputHeader += "\n" - - outputHeader += inputHeader - - logger.info(info + "but placement matching is not required") - logger.info("Added new header") - logger.debug("\n" + newHeader) - - else: - outputHeader = newHeader - - if inputHeader[0] != "\n": - outputHeader += "\n" - - outputHeader += inputHeader - - logger.info("Added new header") - logger.debug("\n" + newHeader) - else: - # don't do anything - outputHeader = inputHeader - logInfo = "" - if newHeader == None: - logInfo = "No header file provided, " - elif notUpdated == False: - logInfo = "Failed to find existing header, " - logger.info(logInfo + "no file modifications were made") - notUpdated = True - - outputStream = sys.stdout - - if commandLineOption.backupSuffix != None: - if commandLineOption.backupSuffix != "" and notUpdated == False: - # create a backup of the original file - backupFileName = inputFileName + commandLineOption.backupSuffix - backupFile = open(backupFileName, 'w') - backupFile.write(inputHeader) - backupFile.write(inputFooter) - backupFile.close() - logger.info("Created backup file: " + backupFileName) - outputStream = open(inputFileName, 'w') - - outputStream.write(outputHeader) - outputStream.write(inputFooter) - - outputStream.flush() - - if outputStream != sys.stdout: - outputStream.close() - if notUpdated == False: - logger.info("Performed in-place update") - -if __name__ == "__main__": +#!/usr/bin/env python + +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 3 Mar 2010 #3771 jelkins Initial Creation. + + + +# the version is derived from the date last updated y.y.m.d +version = "1.0.3.12" + +from optparse import OptionParser +from optparse import OptionGroup +from os import pathsep +from os import rename +from os.path import basename +from os.path import splitext +import sys +import re +import logging + +from FileTypeConfig import FileTypeConfig +import OptionCallback + +_regexCache = {} + +def getRegex(fileType, regexKey, value=None): + global _regexCache + + fileTypeNode = {} + + if fileType in _regexCache: + fileTypeNode = _regexCache[fileType] + else: + _regexCache[fileType] = fileTypeNode + + if not(regexKey in fileTypeNode): + fileTypeNode[regexKey] = None + if value != None: + fileTypeNode[regexKey] = re.compile(value,re.DOTALL) + + return fileTypeNode[regexKey] + +def getLastMatch(matches, matchSplit, endOffset=None, splitGroup= - 1): + result = re.split(matchSplit, matches) + result = result[splitGroup] + if endOffset != None: + result = result[:endOffset] + return re.escape(result) + +def getHeader(headerFileName, fileConfig): + headerText = "" + + with open(headerFileName, 'r') as headerInput: + for line in headerInput: + searchText = fileConfig.getConfig("textSearch") + replaceText = fileConfig.getConfig("textReplace") + if searchText != None and replaceText != None: + line = re.sub(re.escape(searchText), replaceText, line) + headerText += fileConfig.getConfig("lineTemplate", {"lineText":line}) + result = fileConfig.getConfig("headerTemplate", {"headerText":headerText[: - 1]}) + return result + +def addOptions(commandLineParser): + commandLineParser.add_option("-a", "--disable-addmissing", dest="addMissing", + default=True, action="store_false", + help="do not add a header if an existing header is not found.") + + commandLineParser.add_option("-v", "--verbose", dest="verbose", + action="callback", callback=OptionCallback.flagWithOption, + help="output what's happening to stderr. -v [DEBUG] enable " + + "debug output") + + commandLineParser.add_option("-i", "--in-place", dest="backupSuffix", + action="callback", callback=OptionCallback.flagWithOption, + help="update FILE in place. -i [BACKUPSUFFIX] create a backup " + + "of the original file.") + + commandLineParser.add_option("-r", "--revert-backup", dest="revertSuffix", + help="revert FILE to FILEREVERTSUFFIX and remove backup") + + commandLineParser.add_option("-t", "--textheader", dest="headerFile", + help="read header text from HEADERFILE") + + commandLineParser.add_option("-s", "--search", dest="searchString", + default="", + help="look for an existing header with a matching SEARCHSTRING.") + + commandLineParser.add_option("-S", "--search-regex", dest="searchPattern", + help="look for an existing header with a matching SEARCHPATTERN.") + + commandLineParser.add_option_group(OptionGroup(commandLineParser, + "SEARCHPATTERN|SEARCHSTRING", "Without specifying a SEARCHPATTERN" + + " or SEARCHSTRING a search will only be made for an existing" + + " header that matches the template. Specify a SEARCHSTRING or" + + " SEARCHPATTERN to enable block and line block header searching." + + " If both a SEARCHSTRING and SEARCHPATTERN are given, The" + + " SEARCHPATTERN will override the SEARCHSTRING.")) + + commandLineParser.add_option("-l", "--search-limit", dest="searchLimit", + default=3000, type=int, + help="look for an existing header within the first SEARCHLIMIT " + + "bytes. Recommend setting this to about 200% the size of the current" + + " header. default %default") + + commandLineParser.add_option("-f", "--filetypes", dest="fileTypesDir", + help="include the filetype configurations from FILETYPESDIR. " + + "Multiple directories may be specified using the `" + pathsep + + "' path separater character") + + commandLineParser.add_option("-e", "--ext", dest="fileExtension", + help="specifiy the FILEEXTENSION to use") + + +def main(commandOption=None, FILE=None): + """ Execute HeaderUpdater from the command line + """ + + # define the command line options + commandLineParser = OptionParser(usage="usage: %prog [OPTIONS] [FILE]", + version="%prog " + version) + + commandLineParser.add_option_group(OptionGroup(commandLineParser, + "FILE", "Specify an input FILE. If no FILE is given or if" + + " FILE is `-' read input from stdin. When reading from stdin" + + " the -e option is required.")) + + addOptions(commandLineParser) + + # parse the arguments + + commandLineOption = None + args = None + + if commandOption != None: + commandLineOption = commandOption + else: + (commandLineOption, args) = commandLineParser.parse_args() + + if FILE != None: + args = [FILE] + + if len(args) == 1: + inputFileName = args[0] + elif commandLineOption.fileExtension != None: + inputFileName = "-" + else: + commandLineParser.error("stdin requires -e option") + + # setup the logger + logging.basicConfig(stream=sys.stderr, + format='%(name)-12s: %(levelname)-8s %(message)s') + + logger = logging.getLogger(basename(inputFileName)) + + logLevel = logging.WARNING + verbose = commandLineOption.verbose + if verbose != None: + logLevel = logging.INFO + if verbose != "": + if verbose == "DEBUG": + logLevel = logging.DEBUG + + logger.setLevel(logLevel) + + # quickly restore a file from backup + revertSuffix = commandLineOption.revertSuffix + if revertSuffix != None: + try: + rename(inputFileName + revertSuffix, inputFileName) + except OSError as v: + logger.error(v) + return + + # load the filetype configurations + fileTypeConfig = FileTypeConfig() + + fileTypeConfig.fileType = splitext(inputFileName)[1] + + if commandLineOption.fileExtension != None: + fileTypeConfig.fileType = commandLineOption.fileExtension + + if commandLineOption.fileTypesDir != None: + fileTypeConfig.loadConfig(commandLineOption.fileTypesDir) + logger.debug("Loaded fileType configs from: " + commandLineOption.fileTypesDir) + + # check for a configuration for the input file + if not(fileTypeConfig.isAvailable()): + logger.error("no " + fileTypeConfig.fileType + " configuration exists") + return 10 + + # read the inputfile + inputFile = sys.stdin + if inputFileName != "-": + inputFile = open(inputFileName, 'r') + + inputHeader = inputFile.read(commandLineOption.searchLimit) + inputFooter = inputFile.read() + inputFile.close() + + logger.info("Ready to process " + inputFileName) + + searchOption = re.escape(commandLineOption.searchString) + + if commandLineOption.searchPattern != None: + searchOption = commandLineOption.searchPattern + + searchString = ".*?" + searchOption + ".*?" + + # these offsets provide an easy way to handle line returns caught + # by the match + headerStartOffset = 0 + headerEndOffset = 0 + + # create the newHeader + newHeader = None + + if commandLineOption.headerFile != None: + newHeader = getHeader(commandLineOption.headerFile, fileTypeConfig) + + # check that we don't already have the new header in the inputFile + notUpdated = False + logger.info("Checking if file already contains updated header") + headerMatch = None if newHeader == None else re.search(re.escape(newHeader), inputHeader, re.DOTALL) + if headerMatch != None: + notUpdated = True + logger.info("File already contains the updated header") + else: + # check if we can find a header matching the template + searchHeader = "\n*" + re.escape(fileTypeConfig.getConfig("headerTemplate", {"headerText":"searchStringPlaceholder"})) + "\n" + searchHeader = re.sub("searchStringPlaceholder", searchString, searchHeader) + logger.info("Checking if file contains a header matching the template") + headerMatch = re.search(searchHeader, inputHeader, re.DOTALL) + + if headerMatch != None: + headerEndOffset = - 1 + logger.info("Searching for the start of the header") + headerStartOffset = len(re.search("\n*", headerMatch.group()).group()) + + # we must check that each line starts with the lineTemplate + validTemplateMatch = True + header = headerMatch.group()[headerStartOffset:headerEndOffset] + logger.info("Ensuring each line in the header starts with the lineTemplate") + for line in header.split("\n")[1: - 1]: + lineSearch = fileTypeConfig.getConfig("lineTemplate", {"lineText":""}) + lineMatch = re.search(re.escape(lineSearch), line) + if lineMatch == None: + validTemplateMatch = False + headerMatch = None + break + + if validTemplateMatch == True: + logger.info("Found existing header matching template") + + if headerMatch == None and searchString != ".*?.*?" and fileTypeConfig.getConfig("blockBegin") != None: + # try and find a header located inside a block comment + searchBlock = re.escape(fileTypeConfig.getConfig("blockBegin")) + searchBlock += searchString + searchBlock += re.escape(fileTypeConfig.getConfig("blockEnd")) + + logger.info("Searching for header inside block comment") + headerMatch = re.search(searchBlock, inputHeader, re.DOTALL) + + if headerMatch != None: + blockBegin = re.escape(fileTypeConfig.getConfig("blockBegin")) + isAmbiguousBlock = fileTypeConfig.getConfig("blockBegin") == fileTypeConfig.getConfig("blockEnd") + + splitGroup = - 1 + if isAmbiguousBlock == True: + splitGroup = - 2 + + headerSubGroup = getLastMatch(headerMatch.group(), blockBegin, splitGroup=splitGroup) + headerSubGroup = blockBegin + headerSubGroup + + if isAmbiguousBlock == True: + headerSubGroup += blockBegin + + logger.info("Searching last header inside block comment") + headerMatch = re.search(headerSubGroup, inputHeader, re.DOTALL) + + if headerMatch != None: + logger.info("Found existing header inside block section") + + if headerMatch == None and searchString != ".*?.*?" and fileTypeConfig.getConfig("lineComment") != None: + # try and find a header offset by line comments + # this is only done if the searchRegEx isn't the default, + # otherwise we will probably match something that isn't a header + + lineComment = fileTypeConfig.getConfig("lineComment") + + searchLine = re.escape(lineComment) + ".*?" + searchLine += searchString + "\n" + + # lookahead assertions are AWESOME! + searchLine += "(?!" + re.escape(lineComment) + ")" + + lineHeaderRegex = getRegex(fileTypeConfig.fileType, "lineHeader", searchLine) + + logger.info("Searching for a header in a block of line comments") + headerMatch = lineHeaderRegex.match(inputHeader) + + if headerMatch != None: + logger.info("Splitting the header into its line comment groups") + headerSubGroup = getLastMatch(headerMatch.group(), + "\n(?!" + re.escape(lineComment) + ").*?\n", - 1) + + logger.info("Searching for the last header in a block of line comments") + headerMatch = re.search(headerSubGroup, inputHeader, re.DOTALL) + + # handle situations where the header and placeAfter portion + # are not split by a a line + placeAfter = fileTypeConfig.getConfig("placeAfter") + if headerMatch != None and placeAfter != None: + placeAfterSearch = placeAfter + "(.*)" + logger.info("Searching to see if the header is directly after a placeAfter") + headerMinusPlaceAfter = re.search(placeAfterSearch, headerMatch.group(), re.DOTALL) + if headerMinusPlaceAfter != None: + logger.info("Extracting the header from the placeAfter") + headerMatch = re.search(re.escape( + headerMinusPlaceAfter.group(1)), inputHeader, re.DOTALL) + + # we must check that each line starts with the lineComment + if headerMatch != None: + header = headerMatch.group() + logger.info("Verifying all lines in the header begin with a lineComment") + for line in header.split("\n"): + lineMatch = re.search("^" + re.escape(lineComment) + ".*", line) + if lineMatch == None: + headerMatch = None + break + + if headerMatch != None: + logger.info("Found existing header in line comment section") + + if (headerMatch != None + and commandLineOption.headerFile != None + and notUpdated == False): + # an existing header was found, we will need to replace it + outputHeader = (inputHeader[:headerMatch.start() + headerStartOffset] + + newHeader + inputHeader[headerMatch.end() + headerEndOffset:]) + + logger.info("Updated existing header") + logger.debug("\n" + headerMatch.group() + "\nwith: \n" + newHeader) + elif ((commandLineOption.addMissing and fileTypeConfig.getBooleanConfig("addMissing") != False) + and notUpdated == False + and commandLineOption.headerFile != None): + # an existing header was not found, we need to add a new one + + placementSearch = fileTypeConfig.getConfig("placeAfter") + if placementSearch != None: + logger.info("Searching for the placeAfter") + placementMatch = re.search(placementSearch, inputHeader) + + if placementMatch != None: + + outputHeader = inputHeader[:placementMatch.end()] + + if outputHeader[ - 1] != "\n": + outputHeader += "\n" + + outputHeader += newHeader + + if inputHeader[placementMatch.end()] != "\n": + outputHeader += "\n" + + outputHeader += inputHeader[placementMatch.end():] + + logger.info("Added new header after placement match") + logger.debug("\n" + newHeader + "\nplacement match:\n" + + placementMatch.group()) + else: + # we didn't find the placement match + info = "Failed to find placement match, " + + requirePlaceAfter = fileTypeConfig.getBooleanConfig("requirePlaceAfter") + + if requirePlaceAfter == None: + requirePlaceAfter = True + + if requirePlaceAfter == True: + outputHeader = inputHeader + logger.info(info + "no file modifications were made") + notUpdated = True + else: + outputHeader = newHeader + + if len(inputHeader) != 0 and inputHeader[0] != "\n": + outputHeader += "\n" + + outputHeader += inputHeader + + logger.info(info + "but placement matching is not required") + logger.info("Added new header") + logger.debug("\n" + newHeader) + + else: + outputHeader = newHeader + + if inputHeader[0] != "\n": + outputHeader += "\n" + + outputHeader += inputHeader + + logger.info("Added new header") + logger.debug("\n" + newHeader) + else: + # don't do anything + outputHeader = inputHeader + logInfo = "" + if newHeader == None: + logInfo = "No header file provided, " + elif notUpdated == False: + logInfo = "Failed to find existing header, " + logger.info(logInfo + "no file modifications were made") + notUpdated = True + + outputStream = sys.stdout + + if commandLineOption.backupSuffix != None: + if commandLineOption.backupSuffix != "" and notUpdated == False: + # create a backup of the original file + backupFileName = inputFileName + commandLineOption.backupSuffix + backupFile = open(backupFileName, 'w') + backupFile.write(inputHeader) + backupFile.write(inputFooter) + backupFile.close() + logger.info("Created backup file: " + backupFileName) + outputStream = open(inputFileName, 'w') + + outputStream.write(outputHeader) + outputStream.write(inputFooter) + + outputStream.flush() + + if outputStream != sys.stdout: + outputStream.close() + if notUpdated == False: + logger.info("Performed in-place update") + +if __name__ == "__main__": main() \ No newline at end of file diff --git a/cave/com.raytheon.uf.viz.alertviz/localization/alertViz/python/DebugProcessor.py b/cave/com.raytheon.uf.viz.alertviz/localization/alertViz/python/DebugProcessor.py index cf3a4154f6..51e8342dfe 100644 --- a/cave/com.raytheon.uf.viz.alertviz/localization/alertViz/python/DebugProcessor.py +++ b/cave/com.raytheon.uf.viz.alertviz/localization/alertViz/python/DebugProcessor.py @@ -1,42 +1,42 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +import AlertVizProcessor + +# +# A debug processor that sends messages to standard out +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/09/09 chammack Initial Creation. +# # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import AlertVizProcessor - -# -# A debug processor that sends messages to standard out -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/09/09 chammack Initial Creation. -# -# -# - -## -# This is a base file that is not intended to be overridden. -## - -class DebugProcessor(AlertVizProcessor.AlertVizProcessor): - - def process(self, statusMessage, alertMetadata, globalConfiguration): - print "%s %s %s" % (statusMessage.getPriority(), statusMessage.getCategory(), statusMessage.getMessage()) +# + +## +# This is a base file that is not intended to be overridden. +## + +class DebugProcessor(AlertVizProcessor.AlertVizProcessor): + + def process(self, statusMessage, alertMetadata, globalConfiguration): + print("%s %s %s" % (statusMessage.getPriority(), statusMessage.getCategory(), statusMessage.getMessage())) diff --git a/cave/com.raytheon.viz.gfe/GFESuite/src/gfeClient/gfeClient.py b/cave/com.raytheon.viz.gfe/GFESuite/src/gfeClient/gfeClient.py index 27941c4381..18dea78320 100755 --- a/cave/com.raytheon.viz.gfe/GFESuite/src/gfeClient/gfeClient.py +++ b/cave/com.raytheon.viz.gfe/GFESuite/src/gfeClient/gfeClient.py @@ -1,141 +1,141 @@ -#!/awips2/python/bin/python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ----------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Jan 24, 2017 6092 randerso Initial Creation -# -## - -import os -import sys -import time -import argparse - -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request.GfeClientRequest import GfeClientRequest -from dynamicserialize.dstypes.java.util import Date -from awips import ThriftClient -from awips import UsageArgumentParser -from awips.UsageArgumentParser import StoreTimeAction -from awips.UsageArgumentParser import TIME_FORMAT - -def validateArgs(args=None): - - parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", - prog='gfeClient', add_help=False) - parser.add_argument("script", action="store", - help=argparse.SUPPRESS, - metavar="script") - parser.add_argument("-h", "--host", action="store", dest="host", - help="host name of edex request server", - default=str(os.getenv("DEFAULT_HOST", "localhost")), - metavar="hostname") - parser.add_argument("-p", "--port", action="store", type=int, dest="port", - help="port number of edex request server", - default=int(os.getenv("DEFAULT_PORT", "9581")), - metavar="port") - - ############################################################################ - # -site is used for backward compatibility, --site is preferred - # long names with single dash are non-standard in Unix/Linux - ############################################################################ - parser.add_argument("--site", "-site", action="store", dest="site", required=True, - help="site ID", - metavar="site") - parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, - default="gfeConfig", - help="GFE config file -- default gfeConfig", - metavar="configFile") - parser.add_argument("-u", action="store", dest="userName", required=False, - help="user name -- default SITE", - default="SITE", - metavar="userName") - parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, - help="displaced real time -- format YYYYMMDD_hhmm", - metavar="drt") - - ############################################################################ - # adding this arguments so -s is not recognized as -site in other scripts - # -s is not used by this script - ############################################################################ - parser.add_argument("-s", action="store", dest="startTime", required=False, - help=argparse.SUPPRESS) - - args, scriptArgs = parser.parse_known_args(args) - return parser, args, scriptArgs - -def main(args): - - # if no args other than script add --help so usage is displayed - if len(args) < 2: - args.extend(["--help"]) - - # if --help in args add dummy --site arg so we can display - # full script usage, not just the gfeClient.py usage - if "--help" in args: - args.extend(["--site", "XXX"]) - - parser, gfeClientArgs, scriptArgs = validateArgs(args) - - # add config and user option to scriptArgs - scriptArgs.extend(["-c", gfeClientArgs.configFile, "-u", gfeClientArgs.userName]) - - # add drt option if specified - if gfeClientArgs.drt: - timeString = time.strftime(TIME_FORMAT, gfeClientArgs.drt) - scriptArgs.extend(["-z", timeString]) - - # add startTime option if specified - if gfeClientArgs.startTime: - scriptArgs.extend(["-s", gfeClientArgs.startTime]) - - # shutdown isn't a real script and has no gfeClientArgs to validate - if gfeClientArgs.script.lower() != "shutdown": - - # call the validateArgs() method in the target script - scriptGlobals = {} - scriptLocals = {} - execfile(gfeClientArgs.script, scriptGlobals, scriptLocals) - scriptLocals["validateArgs"](args, [parser]) - - elif "--help" in args: - # Don't do shutdown if --help specified - # this is only for ifpIMAGE since it's calling shutdown until - # PngWriter can be fixed to run more than once in a session - sys.exit(0) - - request = GfeClientRequest(gfeClientArgs.script, gfeClientArgs.site, - gfeClientArgs.configFile, gfeClientArgs.userName, - scriptArgs) - if gfeClientArgs.drt: - import calendar - - timeInMillis = calendar.timegm(gfeClientArgs.drt) * 1000 - request.setTime(Date(timeInMillis)) - - thriftClient = ThriftClient.ThriftClient(gfeClientArgs.host, gfeClientArgs.port, "/services") - thriftClient.sendRequest(request) - -if __name__ == '__main__': - sys.exit(main(sys.argv[1:])) +#!/awips2/python/bin/python +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ----------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Jan 24, 2017 6092 randerso Initial Creation +# +## + +import os +import sys +import time +import argparse + +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request.GfeClientRequest import GfeClientRequest +from dynamicserialize.dstypes.java.util import Date +from awips import ThriftClient +from awips import UsageArgumentParser +from awips.UsageArgumentParser import StoreTimeAction +from awips.UsageArgumentParser import TIME_FORMAT + +def validateArgs(args=None): + + parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", + prog='gfeClient', add_help=False) + parser.add_argument("script", action="store", + help=argparse.SUPPRESS, + metavar="script") + parser.add_argument("-h", "--host", action="store", dest="host", + help="host name of edex request server", + default=str(os.getenv("DEFAULT_HOST", "localhost")), + metavar="hostname") + parser.add_argument("-p", "--port", action="store", type=int, dest="port", + help="port number of edex request server", + default=int(os.getenv("DEFAULT_PORT", "9581")), + metavar="port") + + ############################################################################ + # -site is used for backward compatibility, --site is preferred + # long names with single dash are non-standard in Unix/Linux + ############################################################################ + parser.add_argument("--site", "-site", action="store", dest="site", required=True, + help="site ID", + metavar="site") + parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, + default="gfeConfig", + help="GFE config file -- default gfeConfig", + metavar="configFile") + parser.add_argument("-u", action="store", dest="userName", required=False, + help="user name -- default SITE", + default="SITE", + metavar="userName") + parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, + help="displaced real time -- format YYYYMMDD_hhmm", + metavar="drt") + + ############################################################################ + # adding this arguments so -s is not recognized as -site in other scripts + # -s is not used by this script + ############################################################################ + parser.add_argument("-s", action="store", dest="startTime", required=False, + help=argparse.SUPPRESS) + + args, scriptArgs = parser.parse_known_args(args) + return parser, args, scriptArgs + +def main(args): + + # if no args other than script add --help so usage is displayed + if len(args) < 2: + args.extend(["--help"]) + + # if --help in args add dummy --site arg so we can display + # full script usage, not just the gfeClient.py usage + if "--help" in args: + args.extend(["--site", "XXX"]) + + parser, gfeClientArgs, scriptArgs = validateArgs(args) + + # add config and user option to scriptArgs + scriptArgs.extend(["-c", gfeClientArgs.configFile, "-u", gfeClientArgs.userName]) + + # add drt option if specified + if gfeClientArgs.drt: + timeString = time.strftime(TIME_FORMAT, gfeClientArgs.drt) + scriptArgs.extend(["-z", timeString]) + + # add startTime option if specified + if gfeClientArgs.startTime: + scriptArgs.extend(["-s", gfeClientArgs.startTime]) + + # shutdown isn't a real script and has no gfeClientArgs to validate + if gfeClientArgs.script.lower() != "shutdown": + + # call the validateArgs() method in the target script + scriptGlobals = {} + scriptLocals = {} + exec(compile(open(gfeClientArgs.script).read(), gfeClientArgs.script, 'exec'), scriptGlobals, scriptLocals) + scriptLocals["validateArgs"](args, [parser]) + + elif "--help" in args: + # Don't do shutdown if --help specified + # this is only for ifpIMAGE since it's calling shutdown until + # PngWriter can be fixed to run more than once in a session + sys.exit(0) + + request = GfeClientRequest(gfeClientArgs.script, gfeClientArgs.site, + gfeClientArgs.configFile, gfeClientArgs.userName, + scriptArgs) + if gfeClientArgs.drt: + import calendar + + timeInMillis = calendar.timegm(gfeClientArgs.drt) * 1000 + request.setTime(Date(timeInMillis)) + + thriftClient = ThriftClient.ThriftClient(gfeClientArgs.host, gfeClientArgs.port, "/services") + thriftClient.sendRequest(request) + +if __name__ == '__main__': + sys.exit(main(sys.argv[1:])) diff --git a/cave/com.raytheon.viz.gfe/GFESuite/src/ifpimage/PngWriter.py b/cave/com.raytheon.viz.gfe/GFESuite/src/ifpimage/PngWriter.py index f496d292c6..aef8dbd634 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/src/ifpimage/PngWriter.py +++ b/cave/com.raytheon.viz.gfe/GFESuite/src/ifpimage/PngWriter.py @@ -1,535 +1,535 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Aug 20, 2012 1077 randerso Fixed backgroundColor setting -# Aug 20, 2012 1082 randerso Fixed 1 image per grid -# Aug 29, 2012 1081 dgilling Update usage statement. -# Apr 25, 2015 4952 njensen Updated for new JEP API -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() -# from gfeClient.py -# -## - -import sys - -def runIfpImage(args): - ############################################################################ - # PngWriter and required imports nested in this function because they - # can only be run under Jep. This allows validateArgs to be called from - # a pure Python environment - ############################################################################ - - import string, LogStream, getopt, os, time - import time, TimeRange, AbsTime - import GFEPainter - import loadConfig - - from operator import attrgetter - - from java.util import ArrayList - from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID - from com.raytheon.uf.common.dataplugin.gfe.sample import SampleId - from com.raytheon.uf.common.dataplugin.gfe.weatherelement import WEItem, WEGroup - from com.raytheon.uf.common.time import DataTime - from com.raytheon.uf.viz.core import RGBColors - from com.raytheon.uf.viz.core.localization import LocalizationManager - from com.raytheon.viz.gfe.core import GFEIntervalTimeMatcher - - from com.raytheon.viz.gfe.core.parm import ParmDisplayAttributes - VisMode = ParmDisplayAttributes.VisMode - - from com.raytheon.viz.gfe.edittool import GridID - from com.raytheon.viz.gfe.rsc.colorbar import FitToData - from com.raytheon.viz.gfe.rsc.colorbar import FitToData - - class PngWriter: - def __init__(self, conf="testIFPImage", baseTime=None, - timeRange=None, usrTimeRange=None): - self.site = LocalizationManager.getInstance().getCurrentSite() - - self._topo = 0 - - # import the config file - self.config = __import__(conf) - loadConfig.loadPreferences(self.config) - - self.baseTime = baseTime - - # Create GFEPainter first and get DataManager from painter - self.viz = self.createPainter() - self.dm = self.viz.getDataManager(); - - LogStream.logEvent("Configuration File: ", conf) - - self.ipn = self.getConfig('Png_image', '') - - # user named time range specified? - if usrTimeRange is not None: - s_tr = self.dm.getSelectTimeRangeManager().getRange(usrTimeRange) - if s_tr is None: - s = usrTimeRange + " is not a valid time range name." - LogStream.logProblem(s) - raise ValueError(s) - else: - tr = TimeRange.TimeRange(s_tr.toTimeRange()) - self.pngTimeRange = tr - else: - self.pngTimeRange = timeRange - - - def __del__(self): - self.dm = None - self.dbss = None - - def getParms(self): - rval = [] - pids = self.initParms() - self.dm.getParmManager().setDisplayedParms(pids) - for p in pids: - rval.append(self.dm.getParmManager().getParm(p)) - - if self._topo: - tparm = self.dm.getParmManager().parmInExpr("Topo", 1) - self.dm.parmMgr().setParmDisplayable(tparm, 1) - rval.append(tparm) - return rval - - def getBG(self): - bgColor = self.getConfig('bgColor', "black") - trans = self.getConfig('Png_transBG', 0, int) - return bgColor, trans - - def getFileName(self, dir, setime): - # calculate output filename, baseTime is AbsTime - baseTimeFormat = self.getConfig('Png_baseTimeFormat', "%Y%m%d_%H%M") - prefix = self.getConfig('Png_filenamePrefix', '') - if self.baseTime is None: - timeString = setime.stringFmt(baseTimeFormat) - else: - deltaTime = (setime - self.baseTime) / 3600 #in hours - timeString = self.baseTime.stringFmt(baseTimeFormat) + \ - '_F' + `deltaTime` - fname = dir + "/" + prefix + timeString - return fname - - def getFileType(self): - ext = self.getConfig('Png_fileType', 'png') - return ext - - def writeInfo(self, dir, setime, visualInfo): - if len(visualInfo) > 0: - fname = self.getFileName(dir, setime) + ".info" - infoWanted = self.getConfig('Png_infoFiles', 1, int) - if infoWanted != 1: - return - - # Write out information file - fmt = "%Y%m%d_%H%M" - file = open(fname, 'w') - file.write('ValidTime: ' + setime.stringFmt(fmt) + '\n') - file.write('NumOfGrids: ' + `len(visualInfo)` + '\n') - for id, start, end, color, image in visualInfo: - if image: - imgString = 'IMAGE' - else: - imgString = 'GRAPHIC' - file.write('Grid: ' + `id` + ' ' + start.stringFmt(fmt) - + ' ' + end.stringFmt(fmt) + ' ' + color + ' ' - + imgString + '\n') - file.close() - - def initSamples(self): - # Load default sample sets - samplesets = self.getConfig('DefaultSamples', []) - sampleParms = self.getConfig('SampleParms', []) - if samplesets is not None: - self.dm.getSampleSetManager().setShowLatLon(False) - # command SampleSet to load each sample set - sampleInv = self.dm.getSampleSetManager().getInventoryAsStrings() - for id in samplesets: - sid = SampleId(id) - for inv in sampleInv: - if sid.getName() == inv: - self.dm.getSampleSetManager().loadSampleSet(sid, 'ADD') - - def initParms(self): - dm = self.dm - btext = self.getConfig('Png_parms', []) - if len(btext) == 0: - LogStream.logProblem("Png_parms missing or empty") - raise UserWarning("Png_parms missing or empty") - - if "Topo" in btext: - self._topo = 1 - btext.remove("Topo") - - ip = self.getConfig('Png_image', None) - if ip == "Topo": - self._topo = 1 - - # Attempt to decode pseudo parms in the config file - wegroup = WEGroup() - wegroup.setName('png') - weItems = jep.jarray(len(btext), WEItem) - for i in range(len(btext)): - text = btext[i].split(' ') - parmid = text[0] + '_00000000_0000' - parmid = string.replace(parmid, ':', ':SITE_GRID_') - cycles = text[1] - p = ParmID(parmid) - weItems[i] = WEItem(p, int(cycles)) - wegroup.setWeItems(weItems) - - # make the text file - # id = AFPS.TextFileID("png",'BUNDLE') - # txtfile = AFPS.TextFile(id, ctext) - - # process the bundle - dbIDs = dm.getParmManager().getAvailableDbs() - availableParmIDs = [] - for i in range(dbIDs.size()): - dbID = dbIDs.get(i) - nextAvailable = dm.getParmManager().getAvailableParms(dbID) - for next in nextAvailable: - availableParmIDs.append(next) - - size = len(availableParmIDs) - jparmIds = jep.jarray(size, ParmID) - for i in range(size): - jparmIds[i] = availableParmIDs[i] - vv = dm.getWEGroupManager().getParmIDs(wegroup, jparmIds) - if len(vv) == 0: - LogStream.logProblem("Png_parms contains no valid weather " - + "elements") - raise UserWarning("Png_parms contains no valid weather elements") - - return vv - - def getConfig(self, opt, default, cast=None): - if hasattr(self.config, opt): - try: - getter = attrgetter(opt) - val = getter(self.config) - #val = self.config[opt] - if cast is not None: - return cast(val) - return val - except KeyError: - return default - else: - return default - - def createPainter(self): - # Extract properties needed to construct painter - height = self.getConfig('Png_height', None, int) - width = self.getConfig('Png_width', None, int) - leftExpand = self.getConfig('OfficeDomain_expandLeft', 10, int) - rightExpand = self.getConfig('OfficeDomain_expandRight', 10, int) - topExpand = self.getConfig('OfficeDomain_expandTop', 10, int) - bottomExpand = self.getConfig('OfficeDomain_expandBottom', 10, int) - mask = self.getConfig(self.site + '_mask', None) - wholeDomain = self.getConfig('Png_wholeDomain', 0, int) - - #TODO handle transparent background - bgColor, trans = self.getBG() - - return GFEPainter.GFEPainter(width, height, leftExpand, rightExpand, topExpand, bottomExpand, mask, wholeDomain, bgColor) - - def paint(self, dir): - #mmgr = self.dm.mapMgr() - mv = [] - mids = [] - localFlag = self.getConfig('Png_localTime', 0, int) - snapshotTime = self.getConfig('Png_snapshotTime', 0, int) - useLegend = self.getConfig('Png_legend', 1, int) - maps = self.getConfig('MapBackgrounds_default', []) - fitToDataArea = self.getConfig('Png_fitToDataArea', None) - omitColorbar = self.getConfig('Png_omitColorBar', 0, int) - showLogo = self.getConfig('Png_logo', None) - logoString = self.getConfig('Png_logoString', None) - smooth = self.getConfig('Png_smoothImage', 0, int) - fexten = self.getFileType() - - # get the fit to data edit area, and set the active edit area - if fitToDataArea is not None: - availableSets = self.dm.getRefManager().getAvailableSets() - setSize = availableSets.size() - for x in range(setSize): - s = availableSets.get(x) - if fitToDataArea == s.getName(): - refdata = self.dm.getRefManager().loadRefSet(s) - self.dm.getRefManager().setActiveRefSet(refdata) - - maskBasedOnHistory = self.getConfig('Png_historyMask', 0, int) - - viz = self.viz - - if not omitColorbar: - viz.enableColorbar() - - prms = self.getParms() - - # allow user to specify precise interval for creation of images - # rather than the automatically generated set - paintInterval = self.getConfig('Png_interval', None, int) - if paintInterval is not None: - # Interval specified, create interval time matcher - paintIntervalOffset = self.getConfig('Png_intervalOffset', 0, int) - if paintInterval < 0: - paintInterval = 1 - if paintInterval > 24: - paintInterval = 24 - tm = GFEIntervalTimeMatcher() - tm.setTimeMatchingInterval(paintInterval, paintIntervalOffset, self.dm.getParmManager().getSystemTimeRange()) - viz.getDescriptor().setTimeMatcher(tm) - - if useLegend: - snapshotTime = self.getConfig('Png_snapshotTime', 1, int) - descName = self.getConfig('Png_descriptiveWeName', 'SHORT') - - localTime = self.getConfig('Png_localTime', 0, int) - if localTime: - selector = 'Png_legendFormat_LT_' - else: - selector = 'Png_legendFormat_Zulu_' - - durFmt = self.getConfig(selector + 'dur', '') - startFmt = self.getConfig(selector + 'start', '%b %d %H%M%Z to ') - endFmt = self.getConfig(selector + 'end', '%b %d %H%M%Z') - snapshotFmt = self.getConfig(selector + 'snapshot', '%b%d%H%M%Z') - overrideColors = {} - for p in prms: - pname = p.getParmID().compositeNameUI() - if pname == self.ipn: - overrideColors[pname] = "White" - - color = self.getConfig(pname + "_Legend_color", None) - if color: - overrideColors[pname] = color - lang = self.getConfig('Png_legendLanguage', ''); - viz.setupLegend(localTime, snapshotTime, snapshotFmt, descName, durFmt, startFmt, endFmt, overrideColors, lang) - - xOffset = self.getConfig("MapLabelXOffset", None, int) - yOffset = self.getConfig("MapLabelYOffset", None, int) - for map in maps: - color = self.getConfig(map + "_graphicColor", None) - lineWidth = self.getConfig(map + "_lineWidth", None, int) - linePattern = self.getConfig(map + "_linePattern", None) - labelAttribute = self.getConfig(map + "_labelAttribute", None) - fontOffset = self.getConfig(map + "_fontOffset", None, int) - viz.addMapBackground(map, color, lineWidth, linePattern, xOffset, - yOffset, labelAttribute, fontOffset) - - graphicParms = [] - fitToDataAlg = None - for p in prms: - pname = p.getParmID().compositeNameUI() - colormap = self.getConfig(pname + '_defaultColorTable', None) - colorMax = self.getConfig(pname + '_maxColorTableValue', None, float) - colorMin = self.getConfig(pname + '_minColorTableValue', None, float) - color = self.getConfig(pname + '_graphicColor', None) - lineWidth = self.getConfig(pname + '_lineWidth', None, int) - viz.addGfeResource(p, colormap=colormap, colorMin=colorMin, colorMax=colorMax, \ - smooth=smooth, color=color, lineWidth=lineWidth) - fitToDataAlg = self.getConfig(pname + '_fitToDataColorTable', None) - if fitToDataAlg is not None: - fit = FitToData(self.dm, p) - if fitToDataAlg == 'All Grids': - fit.fitToData() - fitToDataAlg = None - elif fitToDataAlg == 'All Grids over Area': - fit.fitToData(self.dm.getRefManager().getActiveRefSet()) - fitToDataAlg = None - - if pname == self.ipn: - self.dm.getSpatialDisplayManager().setDisplayMode(p, VisMode.IMAGE) - else: - self.dm.getSpatialDisplayManager().setDisplayMode(p, VisMode.GRAPHIC) - - self.initSamples() - - # Verify all resources are time matched before painting - desc = viz.getDescriptor() - desc.redoTimeMatching() - times = desc.getFramesInfo().getFrameTimes() - - # paint once to get map retrieval started - if len(times) > 0: - viz.paint(times[0]) - - for frame in times: - paintTime = AbsTime.AbsTime(frame.getRefTime()) - if self.overlapsWithGrids(prms, paintTime.javaDate()): - visualInfo = [] - for p in prms: - griddata = p.overlappingGrid(paintTime.javaDate()) - if griddata is None: - continue - - # fit to data special cases - if str(p.getDisplayAttributes().getVisMode()) == 'Image': - fitToDataAlg = self.getConfig(p.getParmID().compositeNameUI() + '_fitToDataColorTable', None) - if fitToDataAlg: - fit = FitToData(self.dm, p) - gridid = GridID(p, paintTime.javaDate()) - if fitToDataAlg == 'Single Grid': - fit.fitToData(gridid) - elif fitToDataAlg == 'Single Grid over Area': - fit.fitToData(gridid, self.dm.getRefManager().getActiveRefSet()) - - info = (str(p.getParmID()), AbsTime.AbsTime(griddata.getGridTime().getStart()), - AbsTime.AbsTime(griddata.getGridTime().getEnd()), - RGBColors.getColorName(p.getDisplayAttributes().getBaseColor()), str(p.getDisplayAttributes().getVisMode()) == 'Image') - visualInfo.append(info) - - viz.paint(frame) - fname = self.getFileName(dir, paintTime) + '.' + fexten - viz.outputFiles(fname, showLogo, logoString) - self.writeInfo(dir, paintTime, visualInfo) - else: - LogStream.logEvent("No grids to generate for ", `paintTime`) - - visuals = None - mv = None - iv = None - lv = None - - # return true if there is grid data that overlaps with time t - def overlapsWithGrids(self, prms, t): - for p in prms: - grid = p.overlappingGrid(t) - if grid is not None: - gridTime = TimeRange.TimeRange(grid.getGridTime()) - if self.pngTimeRange.overlaps(gridTime): - return 1 - return 0 - -################################################################################ -# body of runIfpImage method -################################################################################ - def decodeTimeStruct(timeStruct): - return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, - timeStruct.tm_mday, - timeStruct.tm_hour, timeStruct.tm_min) - - tr = TimeRange.allTimes() - startTime = tr.startTime() - endTime = tr.endTime() - baseTime = None - - if args.startTime: - startTime = decodeTimeStruct(args.startTime) - - if args.endTime: - endTime = decodeTimeStruct(args.endTime) - - if args.baseTime: - baseTime = decodeTimeStruct(args.baseTime) - - pngTimeRange = TimeRange.TimeRange(startTime, endTime) - - outDir = args.outDir - if outDir == DEFAULT_OUTPUT_DIR: - settings = __import__(args.configFile) - if hasattr(settings, "GFESUITE_PRDDIR"): - outDir = getattr(settings, "GFESUITE_PRDDIR") + '/IMAGE' - - if not os.path.exists(outDir): - s = "Missing output directory: " + outDir - LogStream.logProblem(s) - raise IOError(s) - - if not os.path.isdir(outDir): - s = "Specified output directory is not a directory: " + outDir - LogStream.logProblem(s) - raise IOError(s) - - if not os.access(outDir, os.W_OK): - s = "Output directory is not writable: " + outDir - LogStream.logProblem(s) - raise IOError(s) - - LogStream.logEvent("ifpIMAGE Starting") - LogStream.logEvent("Using output directory: " + outDir) - - try: - pngw = PngWriter(args.configFile, baseTime, pngTimeRange, args.usrTimeName) - pngw.paint(outDir) - except Exception, e: - LogStream.logProblem(LogStream.exc()) - LogStream.logEvent("ifpIMAGE Finished") - -def validateArgs(args=None, parents=[]): - ############################################################################ - # imports required for this method must be here so it can be invoked - # from gfeClient.py - ############################################################################ - from awips import UsageArgumentParser - from awips.UsageArgumentParser import StoreTimeAction - - global DEFAULT_OUTPUT_DIR - DEFAULT_OUTPUT_DIR = '../products/IMAGE' - - parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", - parents=parents, - prog='ifpIMAGE') - parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, - default="gfeConfig", - help="GFE config file -- default gfeConfig", - metavar="configFile") - parser.add_argument("-u", action="store", dest="userName", required=False, - help="user name -- default SITE", - default="SITE", - metavar="userName") - parser.add_argument("-o", action="store", dest="outDir", required=False, - help="Where you want the png files written", - default=DEFAULT_OUTPUT_DIR, - metavar="directory") - parser.add_argument("-b", action=StoreTimeAction, dest="baseTime", required=False, - help="Output filenames are relative to baseTime. Basetime format is yyyymmdd_hhmm", - metavar="baseTime") - parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, - help="starting time for images in format YYYYMMDD_hhmm", - metavar="startTime") - parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, - help="ending time for images in format YYYYMMDD_hhmm\n\n", - metavar="endTime") - parser.add_argument("-t", action="store", dest="usrTimeName", required=False, - help="used to specify a user selected time range (e.g., \"Day_3\") 'usrTimeRng' overrides the start/endTime switches.", - metavar="usrTimeRng") - - args = parser.parse_args(args) - - return args - -def error(msg): - print "ERROR: %s\n" % msg - -def main(): - args = validateArgs() - runIfpImage(args) - -if __name__ == "__main__": - main() +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Aug 20, 2012 1077 randerso Fixed backgroundColor setting +# Aug 20, 2012 1082 randerso Fixed 1 image per grid +# Aug 29, 2012 1081 dgilling Update usage statement. +# Apr 25, 2015 4952 njensen Updated for new JEP API +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() +# from gfeClient.py +# +## + +import sys + +def runIfpImage(args): + ############################################################################ + # PngWriter and required imports nested in this function because they + # can only be run under Jep. This allows validateArgs to be called from + # a pure Python environment + ############################################################################ + + import string, LogStream, getopt, os, time + import time, TimeRange, AbsTime + import GFEPainter + import loadConfig + + from operator import attrgetter + + from java.util import ArrayList + from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + from com.raytheon.uf.common.dataplugin.gfe.sample import SampleId + from com.raytheon.uf.common.dataplugin.gfe.weatherelement import WEItem, WEGroup + from com.raytheon.uf.common.time import DataTime + from com.raytheon.uf.viz.core import RGBColors + from com.raytheon.uf.viz.core.localization import LocalizationManager + from com.raytheon.viz.gfe.core import GFEIntervalTimeMatcher + + from com.raytheon.viz.gfe.core.parm import ParmDisplayAttributes + VisMode = ParmDisplayAttributes.VisMode + + from com.raytheon.viz.gfe.edittool import GridID + from com.raytheon.viz.gfe.rsc.colorbar import FitToData + from com.raytheon.viz.gfe.rsc.colorbar import FitToData + + class PngWriter: + def __init__(self, conf="testIFPImage", baseTime=None, + timeRange=None, usrTimeRange=None): + self.site = LocalizationManager.getInstance().getCurrentSite() + + self._topo = 0 + + # import the config file + self.config = __import__(conf) + loadConfig.loadPreferences(self.config) + + self.baseTime = baseTime + + # Create GFEPainter first and get DataManager from painter + self.viz = self.createPainter() + self.dm = self.viz.getDataManager(); + + LogStream.logEvent("Configuration File: ", conf) + + self.ipn = self.getConfig('Png_image', '') + + # user named time range specified? + if usrTimeRange is not None: + s_tr = self.dm.getSelectTimeRangeManager().getRange(usrTimeRange) + if s_tr is None: + s = usrTimeRange + " is not a valid time range name." + LogStream.logProblem(s) + raise ValueError(s) + else: + tr = TimeRange.TimeRange(s_tr.toTimeRange()) + self.pngTimeRange = tr + else: + self.pngTimeRange = timeRange + + + def __del__(self): + self.dm = None + self.dbss = None + + def getParms(self): + rval = [] + pids = self.initParms() + self.dm.getParmManager().setDisplayedParms(pids) + for p in pids: + rval.append(self.dm.getParmManager().getParm(p)) + + if self._topo: + tparm = self.dm.getParmManager().parmInExpr("Topo", 1) + self.dm.parmMgr().setParmDisplayable(tparm, 1) + rval.append(tparm) + return rval + + def getBG(self): + bgColor = self.getConfig('bgColor', "black") + trans = self.getConfig('Png_transBG', 0, int) + return bgColor, trans + + def getFileName(self, dir, setime): + # calculate output filename, baseTime is AbsTime + baseTimeFormat = self.getConfig('Png_baseTimeFormat', "%Y%m%d_%H%M") + prefix = self.getConfig('Png_filenamePrefix', '') + if self.baseTime is None: + timeString = setime.stringFmt(baseTimeFormat) + else: + deltaTime = (setime - self.baseTime) / 3600 #in hours + timeString = self.baseTime.stringFmt(baseTimeFormat) + \ + '_F' + repr(deltaTime) + fname = dir + "/" + prefix + timeString + return fname + + def getFileType(self): + ext = self.getConfig('Png_fileType', 'png') + return ext + + def writeInfo(self, dir, setime, visualInfo): + if len(visualInfo) > 0: + fname = self.getFileName(dir, setime) + ".info" + infoWanted = self.getConfig('Png_infoFiles', 1, int) + if infoWanted != 1: + return + + # Write out information file + fmt = "%Y%m%d_%H%M" + file = open(fname, 'w') + file.write('ValidTime: ' + setime.stringFmt(fmt) + '\n') + file.write('NumOfGrids: ' + repr(len(visualInfo)) + '\n') + for id, start, end, color, image in visualInfo: + if image: + imgString = 'IMAGE' + else: + imgString = 'GRAPHIC' + file.write('Grid: ' + repr(id) + ' ' + start.stringFmt(fmt) + + ' ' + end.stringFmt(fmt) + ' ' + color + ' ' + + imgString + '\n') + file.close() + + def initSamples(self): + # Load default sample sets + samplesets = self.getConfig('DefaultSamples', []) + sampleParms = self.getConfig('SampleParms', []) + if samplesets is not None: + self.dm.getSampleSetManager().setShowLatLon(False) + # command SampleSet to load each sample set + sampleInv = self.dm.getSampleSetManager().getInventoryAsStrings() + for id in samplesets: + sid = SampleId(id) + for inv in sampleInv: + if sid.getName() == inv: + self.dm.getSampleSetManager().loadSampleSet(sid, 'ADD') + + def initParms(self): + dm = self.dm + btext = self.getConfig('Png_parms', []) + if len(btext) == 0: + LogStream.logProblem("Png_parms missing or empty") + raise UserWarning("Png_parms missing or empty") + + if "Topo" in btext: + self._topo = 1 + btext.remove("Topo") + + ip = self.getConfig('Png_image', None) + if ip == "Topo": + self._topo = 1 + + # Attempt to decode pseudo parms in the config file + wegroup = WEGroup() + wegroup.setName('png') + weItems = jep.jarray(len(btext), WEItem) + for i in range(len(btext)): + text = btext[i].split(' ') + parmid = text[0] + '_00000000_0000' + parmid = string.replace(parmid, ':', ':SITE_GRID_') + cycles = text[1] + p = ParmID(parmid) + weItems[i] = WEItem(p, int(cycles)) + wegroup.setWeItems(weItems) + + # make the text file + # id = AFPS.TextFileID("png",'BUNDLE') + # txtfile = AFPS.TextFile(id, ctext) + + # process the bundle + dbIDs = dm.getParmManager().getAvailableDbs() + availableParmIDs = [] + for i in range(dbIDs.size()): + dbID = dbIDs.get(i) + nextAvailable = dm.getParmManager().getAvailableParms(dbID) + for next in nextAvailable: + availableParmIDs.append(next) + + size = len(availableParmIDs) + jparmIds = jep.jarray(size, ParmID) + for i in range(size): + jparmIds[i] = availableParmIDs[i] + vv = dm.getWEGroupManager().getParmIDs(wegroup, jparmIds) + if len(vv) == 0: + LogStream.logProblem("Png_parms contains no valid weather " + + "elements") + raise UserWarning("Png_parms contains no valid weather elements") + + return vv + + def getConfig(self, opt, default, cast=None): + if hasattr(self.config, opt): + try: + getter = attrgetter(opt) + val = getter(self.config) + #val = self.config[opt] + if cast is not None: + return cast(val) + return val + except KeyError: + return default + else: + return default + + def createPainter(self): + # Extract properties needed to construct painter + height = self.getConfig('Png_height', None, int) + width = self.getConfig('Png_width', None, int) + leftExpand = self.getConfig('OfficeDomain_expandLeft', 10, int) + rightExpand = self.getConfig('OfficeDomain_expandRight', 10, int) + topExpand = self.getConfig('OfficeDomain_expandTop', 10, int) + bottomExpand = self.getConfig('OfficeDomain_expandBottom', 10, int) + mask = self.getConfig(self.site + '_mask', None) + wholeDomain = self.getConfig('Png_wholeDomain', 0, int) + + #TODO handle transparent background + bgColor, trans = self.getBG() + + return GFEPainter.GFEPainter(width, height, leftExpand, rightExpand, topExpand, bottomExpand, mask, wholeDomain, bgColor) + + def paint(self, dir): + #mmgr = self.dm.mapMgr() + mv = [] + mids = [] + localFlag = self.getConfig('Png_localTime', 0, int) + snapshotTime = self.getConfig('Png_snapshotTime', 0, int) + useLegend = self.getConfig('Png_legend', 1, int) + maps = self.getConfig('MapBackgrounds_default', []) + fitToDataArea = self.getConfig('Png_fitToDataArea', None) + omitColorbar = self.getConfig('Png_omitColorBar', 0, int) + showLogo = self.getConfig('Png_logo', None) + logoString = self.getConfig('Png_logoString', None) + smooth = self.getConfig('Png_smoothImage', 0, int) + fexten = self.getFileType() + + # get the fit to data edit area, and set the active edit area + if fitToDataArea is not None: + availableSets = self.dm.getRefManager().getAvailableSets() + setSize = availableSets.size() + for x in range(setSize): + s = availableSets.get(x) + if fitToDataArea == s.getName(): + refdata = self.dm.getRefManager().loadRefSet(s) + self.dm.getRefManager().setActiveRefSet(refdata) + + maskBasedOnHistory = self.getConfig('Png_historyMask', 0, int) + + viz = self.viz + + if not omitColorbar: + viz.enableColorbar() + + prms = self.getParms() + + # allow user to specify precise interval for creation of images + # rather than the automatically generated set + paintInterval = self.getConfig('Png_interval', None, int) + if paintInterval is not None: + # Interval specified, create interval time matcher + paintIntervalOffset = self.getConfig('Png_intervalOffset', 0, int) + if paintInterval < 0: + paintInterval = 1 + if paintInterval > 24: + paintInterval = 24 + tm = GFEIntervalTimeMatcher() + tm.setTimeMatchingInterval(paintInterval, paintIntervalOffset, self.dm.getParmManager().getSystemTimeRange()) + viz.getDescriptor().setTimeMatcher(tm) + + if useLegend: + snapshotTime = self.getConfig('Png_snapshotTime', 1, int) + descName = self.getConfig('Png_descriptiveWeName', 'SHORT') + + localTime = self.getConfig('Png_localTime', 0, int) + if localTime: + selector = 'Png_legendFormat_LT_' + else: + selector = 'Png_legendFormat_Zulu_' + + durFmt = self.getConfig(selector + 'dur', '') + startFmt = self.getConfig(selector + 'start', '%b %d %H%M%Z to ') + endFmt = self.getConfig(selector + 'end', '%b %d %H%M%Z') + snapshotFmt = self.getConfig(selector + 'snapshot', '%b%d%H%M%Z') + overrideColors = {} + for p in prms: + pname = p.getParmID().compositeNameUI() + if pname == self.ipn: + overrideColors[pname] = "White" + + color = self.getConfig(pname + "_Legend_color", None) + if color: + overrideColors[pname] = color + lang = self.getConfig('Png_legendLanguage', ''); + viz.setupLegend(localTime, snapshotTime, snapshotFmt, descName, durFmt, startFmt, endFmt, overrideColors, lang) + + xOffset = self.getConfig("MapLabelXOffset", None, int) + yOffset = self.getConfig("MapLabelYOffset", None, int) + for map in maps: + color = self.getConfig(map + "_graphicColor", None) + lineWidth = self.getConfig(map + "_lineWidth", None, int) + linePattern = self.getConfig(map + "_linePattern", None) + labelAttribute = self.getConfig(map + "_labelAttribute", None) + fontOffset = self.getConfig(map + "_fontOffset", None, int) + viz.addMapBackground(map, color, lineWidth, linePattern, xOffset, + yOffset, labelAttribute, fontOffset) + + graphicParms = [] + fitToDataAlg = None + for p in prms: + pname = p.getParmID().compositeNameUI() + colormap = self.getConfig(pname + '_defaultColorTable', None) + colorMax = self.getConfig(pname + '_maxColorTableValue', None, float) + colorMin = self.getConfig(pname + '_minColorTableValue', None, float) + color = self.getConfig(pname + '_graphicColor', None) + lineWidth = self.getConfig(pname + '_lineWidth', None, int) + viz.addGfeResource(p, colormap=colormap, colorMin=colorMin, colorMax=colorMax, \ + smooth=smooth, color=color, lineWidth=lineWidth) + fitToDataAlg = self.getConfig(pname + '_fitToDataColorTable', None) + if fitToDataAlg is not None: + fit = FitToData(self.dm, p) + if fitToDataAlg == 'All Grids': + fit.fitToData() + fitToDataAlg = None + elif fitToDataAlg == 'All Grids over Area': + fit.fitToData(self.dm.getRefManager().getActiveRefSet()) + fitToDataAlg = None + + if pname == self.ipn: + self.dm.getSpatialDisplayManager().setDisplayMode(p, VisMode.IMAGE) + else: + self.dm.getSpatialDisplayManager().setDisplayMode(p, VisMode.GRAPHIC) + + self.initSamples() + + # Verify all resources are time matched before painting + desc = viz.getDescriptor() + desc.redoTimeMatching() + times = desc.getFramesInfo().getFrameTimes() + + # paint once to get map retrieval started + if len(times) > 0: + viz.paint(times[0]) + + for frame in times: + paintTime = AbsTime.AbsTime(frame.getRefTime()) + if self.overlapsWithGrids(prms, paintTime.javaDate()): + visualInfo = [] + for p in prms: + griddata = p.overlappingGrid(paintTime.javaDate()) + if griddata is None: + continue + + # fit to data special cases + if str(p.getDisplayAttributes().getVisMode()) == 'Image': + fitToDataAlg = self.getConfig(p.getParmID().compositeNameUI() + '_fitToDataColorTable', None) + if fitToDataAlg: + fit = FitToData(self.dm, p) + gridid = GridID(p, paintTime.javaDate()) + if fitToDataAlg == 'Single Grid': + fit.fitToData(gridid) + elif fitToDataAlg == 'Single Grid over Area': + fit.fitToData(gridid, self.dm.getRefManager().getActiveRefSet()) + + info = (str(p.getParmID()), AbsTime.AbsTime(griddata.getGridTime().getStart()), + AbsTime.AbsTime(griddata.getGridTime().getEnd()), + RGBColors.getColorName(p.getDisplayAttributes().getBaseColor()), str(p.getDisplayAttributes().getVisMode()) == 'Image') + visualInfo.append(info) + + viz.paint(frame) + fname = self.getFileName(dir, paintTime) + '.' + fexten + viz.outputFiles(fname, showLogo, logoString) + self.writeInfo(dir, paintTime, visualInfo) + else: + LogStream.logEvent("No grids to generate for ", repr(paintTime)) + + visuals = None + mv = None + iv = None + lv = None + + # return true if there is grid data that overlaps with time t + def overlapsWithGrids(self, prms, t): + for p in prms: + grid = p.overlappingGrid(t) + if grid is not None: + gridTime = TimeRange.TimeRange(grid.getGridTime()) + if self.pngTimeRange.overlaps(gridTime): + return 1 + return 0 + +################################################################################ +# body of runIfpImage method +################################################################################ + def decodeTimeStruct(timeStruct): + return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, + timeStruct.tm_mday, + timeStruct.tm_hour, timeStruct.tm_min) + + tr = TimeRange.allTimes() + startTime = tr.startTime() + endTime = tr.endTime() + baseTime = None + + if args.startTime: + startTime = decodeTimeStruct(args.startTime) + + if args.endTime: + endTime = decodeTimeStruct(args.endTime) + + if args.baseTime: + baseTime = decodeTimeStruct(args.baseTime) + + pngTimeRange = TimeRange.TimeRange(startTime, endTime) + + outDir = args.outDir + if outDir == DEFAULT_OUTPUT_DIR: + settings = __import__(args.configFile) + if hasattr(settings, "GFESUITE_PRDDIR"): + outDir = getattr(settings, "GFESUITE_PRDDIR") + '/IMAGE' + + if not os.path.exists(outDir): + s = "Missing output directory: " + outDir + LogStream.logProblem(s) + raise IOError(s) + + if not os.path.isdir(outDir): + s = "Specified output directory is not a directory: " + outDir + LogStream.logProblem(s) + raise IOError(s) + + if not os.access(outDir, os.W_OK): + s = "Output directory is not writable: " + outDir + LogStream.logProblem(s) + raise IOError(s) + + LogStream.logEvent("ifpIMAGE Starting") + LogStream.logEvent("Using output directory: " + outDir) + + try: + pngw = PngWriter(args.configFile, baseTime, pngTimeRange, args.usrTimeName) + pngw.paint(outDir) + except Exception as e: + LogStream.logProblem(LogStream.exc()) + LogStream.logEvent("ifpIMAGE Finished") + +def validateArgs(args=None, parents=[]): + ############################################################################ + # imports required for this method must be here so it can be invoked + # from gfeClient.py + ############################################################################ + from awips import UsageArgumentParser + from awips.UsageArgumentParser import StoreTimeAction + + global DEFAULT_OUTPUT_DIR + DEFAULT_OUTPUT_DIR = '../products/IMAGE' + + parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", + parents=parents, + prog='ifpIMAGE') + parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, + default="gfeConfig", + help="GFE config file -- default gfeConfig", + metavar="configFile") + parser.add_argument("-u", action="store", dest="userName", required=False, + help="user name -- default SITE", + default="SITE", + metavar="userName") + parser.add_argument("-o", action="store", dest="outDir", required=False, + help="Where you want the png files written", + default=DEFAULT_OUTPUT_DIR, + metavar="directory") + parser.add_argument("-b", action=StoreTimeAction, dest="baseTime", required=False, + help="Output filenames are relative to baseTime. Basetime format is yyyymmdd_hhmm", + metavar="baseTime") + parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, + help="starting time for images in format YYYYMMDD_hhmm", + metavar="startTime") + parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, + help="ending time for images in format YYYYMMDD_hhmm\n\n", + metavar="endTime") + parser.add_argument("-t", action="store", dest="usrTimeName", required=False, + help="used to specify a user selected time range (e.g., \"Day_3\") 'usrTimeRng' overrides the start/endTime switches.", + metavar="usrTimeRng") + + args = parser.parse_args(args) + + return args + +def error(msg): + print("ERROR: %s\n" % msg) + +def main(): + args = validateArgs() + runIfpImage(args) + +if __name__ == "__main__": + main() diff --git a/cave/com.raytheon.viz.gfe/GFESuite/src/runifptext/runIFPText.py b/cave/com.raytheon.viz.gfe/GFESuite/src/runifptext/runIFPText.py index 4416f44c2a..47f8c2a416 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/src/runifptext/runIFPText.py +++ b/cave/com.raytheon.viz.gfe/GFESuite/src/runifptext/runIFPText.py @@ -1,229 +1,229 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# runIFPText.py -# Main program and class for running text formatters from the command-line. -# Based on AWIPS-1 TextFormatter.py code written by hansen. -# -# Author: dgilling -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() -# from gfeClient.py -# -## - -import logging -import sys - -LOGGER = None - -def runFormatter(args): - ############################################################################ - # Import of FormatterRunner and loadConfig are nested in this function - # because can only be run under Jep. This allows validateArgs to be called - # from a pure Python environment - ############################################################################ - - import FormatterRunner - import loadConfig - - from com.raytheon.viz.gfe.core import DataManager - - LOGGER.info("TextFormatter Starting") - LOGGER.info("CmdLine: " + str(sys.argv[1:])) - - # set default gfe config so no popup windows appear - loadConfig.loadPreferences(args.configFile) - - dataMgr = DataManager.getInstance(None) - - forecasts = FormatterRunner.runFormatter(str(args.databaseID), - dataMgr.getSiteID(), - args.forecastList, args.varDict, - args.vtecMode, args.userName, dataMgr, - args.serverFile, args.editAreas, - args.timeRanges, args.timePeriod, - args.drt, - args.vtecActiveTable, - args.testMode, - args.experimentalMode, - args.serverOutputFile, - args.startTime, args.endTime, - args.language, - args.outputFile, args.appendFile) - - LOGGER.info("Text Formatter Finished") - -def validateArgs(args=None, parents=[]): - ############################################################################ - # imports required for this method must be here so it can be invoked - # from gfeClient.py - ############################################################################ - from awips import UsageArgumentParser - from awips.UsageArgumentParser import StoreDatabaseIDAction - from awips.UsageArgumentParser import StoreTimeAction - - parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", - parents=parents, - prog='runIFPText') - parser.add_argument("-d", action=StoreDatabaseIDAction, dest="databaseID", required=True, - help="database to run formatter against", - metavar="databaseID") - parser.add_argument("-t", action="append", dest="forecastList", required=True, - help="forecastType", - metavar="forecastList") - parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, - default="gfeConfig", - help="GFE config file -- default gfeConfig", - metavar="configFile") - parser.add_argument("-u", action="store", dest="userName", required=False, - help="user name -- default SITE", - default="SITE", - metavar="userName") - parser.add_argument("-o", action="store", dest="outputFile", required=False, - help="output file for text -- default None", - metavar="outputFile") - parser.add_argument("-O", action="store", dest="serverFile", required=False, - help="server output file for text -- default None", - metavar="serverFile") - parser.add_argument("-S", action="store", dest="serverOutputFile", required=False, - help="server controlled output file -- default None", - metavar="serverOutputFile") - parser.add_argument("-A", action="store", dest="appendFile", required=False, - help="append text to given file name", - metavar="appendFile") - parser.add_argument("-l", action="store", dest="language", required=False, - help="language -- english, french, spanish: default english", - choices=['english', 'french', 'spanish'], - metavar="language") - parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, - help="displaced real time -- format YYYYMMDD_hhmm", - metavar="drt") - group = parser.add_mutually_exclusive_group(required=False) - group.add_argument("-T", action="store_true", dest="testMode", required=False, - help="Generates a \"TEST\" product") - group.add_argument("-E", action="store_true", dest="experimentalMode", required=False, - help="Generates a \"EXPERIMENTAL\" product.") - parser.add_argument("-v", action="store", dest="vtecMode", required=False, - choices=['X', 'O', 'T', 'E'], - help="Specifies vtec mode ('X','O','T','E')", - metavar="vtecMode") - parser.add_argument("-a", action="store", dest="vtecActiveTable", required=False, - choices=['active', 'PRACTICE'], - help="Specifies active table -- 'active' or 'PRACTICE'", - default='active', - metavar="vtecActiveTable") - parser.add_argument("-V", action="store", dest="varDict", required=False, - help="""use this option to provide a run-time VariableList - instead of displaying the user dialog. - The dictionary must be in the form of a Python - dictionary string, e.g., - '{("Forecast Product", "productType"): "Morning", - ("Issuance", "issuanceType"): "Routine"}' - The entries must be complete or the product will be cancelled.""", - default="{}", - metavar="varDict") - parser.add_argument("-r", action="append", dest="editAreas", required=False, - help="Edit Area Name", - default=[], - metavar="editAreas") - parser.add_argument("-w", action="append", dest="timeRanges", required=False, - help="named time range (e.g. Today, Tonight)", - default=[], - metavar="timeRanges") - parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, - help="startTime -- format YYYYMMDD_hhmm", - metavar="startTime") - parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, - help="endTime -- format YYYYMMDD_hhmm", - metavar="endTime") - parser.add_argument("-i", action="store", dest="timePeriod", required=False, - type=float, - help="Period for Tables with variable period (rows or columns)", - metavar="timePeriod") - - args = parser.parse_args(args) - - #force VTEC mode to "T" if in TEST mode and another vtecCode is specified - if args.testMode and args.vtecMode is not None: - args.vtecMode = "T" - - #force VTEC mode to "E" if in EXPERIMENTAL mode and another vtecCode - #is specified - elif args.experimentalMode and args.vtecMode is not None: - args.vtecMode = "E" - - #force into TEST mode, if vtec code is 'T' - if args.vtecMode == "T": - args.testMode = True - args.experimentalMode = False - elif args.vtecMode == "E": - args.experimentalMode = True - args.testMode = False - - return args - -def usage(): - validateArgs(['--help']) - -def error(msg): - print "ERROR: %s\n" % msg - -def main2(argv): - # Parse command line - args = validateArgs() - runFormatter(args) - -def __initLogger(): - global LOGGER - logging.basicConfig(level=logging.INFO, - format="%(asctime)s %(name)s %(levelname)s: %(message)s", - datefmt="%H:%M:%S") - LOGGER = logging.getLogger("runIFPText.py") - -PROFILE = False -def profMain(arg): - __initLogger() - if PROFILE: - import profile, pstats, sys - limit = 20 - profile.run('main2(sys.argv)', 'pyprof.out') - p = pstats.Stats('pyprof.out') - p.strip_dirs() - p.sort_stats('time', 'calls').print_stats(limit) - p.print_callers(limit) - else: - try: - main2(arg) - except: - LOGGER.exception("Caught Exception: ") - -main = profMain -if __name__ == "__main__": - profMain(sys.argv) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# runIFPText.py +# Main program and class for running text formatters from the command-line. +# Based on AWIPS-1 TextFormatter.py code written by hansen. +# +# Author: dgilling +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() +# from gfeClient.py +# +## + +import logging +import sys + +LOGGER = None + +def runFormatter(args): + ############################################################################ + # Import of FormatterRunner and loadConfig are nested in this function + # because can only be run under Jep. This allows validateArgs to be called + # from a pure Python environment + ############################################################################ + + import FormatterRunner + import loadConfig + + from com.raytheon.viz.gfe.core import DataManager + + LOGGER.info("TextFormatter Starting") + LOGGER.info("CmdLine: " + str(sys.argv[1:])) + + # set default gfe config so no popup windows appear + loadConfig.loadPreferences(args.configFile) + + dataMgr = DataManager.getInstance(None) + + forecasts = FormatterRunner.runFormatter(str(args.databaseID), + dataMgr.getSiteID(), + args.forecastList, args.varDict, + args.vtecMode, args.userName, dataMgr, + args.serverFile, args.editAreas, + args.timeRanges, args.timePeriod, + args.drt, + args.vtecActiveTable, + args.testMode, + args.experimentalMode, + args.serverOutputFile, + args.startTime, args.endTime, + args.language, + args.outputFile, args.appendFile) + + LOGGER.info("Text Formatter Finished") + +def validateArgs(args=None, parents=[]): + ############################################################################ + # imports required for this method must be here so it can be invoked + # from gfeClient.py + ############################################################################ + from awips import UsageArgumentParser + from awips.UsageArgumentParser import StoreDatabaseIDAction + from awips.UsageArgumentParser import StoreTimeAction + + parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", + parents=parents, + prog='runIFPText') + parser.add_argument("-d", action=StoreDatabaseIDAction, dest="databaseID", required=True, + help="database to run formatter against", + metavar="databaseID") + parser.add_argument("-t", action="append", dest="forecastList", required=True, + help="forecastType", + metavar="forecastList") + parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, + default="gfeConfig", + help="GFE config file -- default gfeConfig", + metavar="configFile") + parser.add_argument("-u", action="store", dest="userName", required=False, + help="user name -- default SITE", + default="SITE", + metavar="userName") + parser.add_argument("-o", action="store", dest="outputFile", required=False, + help="output file for text -- default None", + metavar="outputFile") + parser.add_argument("-O", action="store", dest="serverFile", required=False, + help="server output file for text -- default None", + metavar="serverFile") + parser.add_argument("-S", action="store", dest="serverOutputFile", required=False, + help="server controlled output file -- default None", + metavar="serverOutputFile") + parser.add_argument("-A", action="store", dest="appendFile", required=False, + help="append text to given file name", + metavar="appendFile") + parser.add_argument("-l", action="store", dest="language", required=False, + help="language -- english, french, spanish: default english", + choices=['english', 'french', 'spanish'], + metavar="language") + parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, + help="displaced real time -- format YYYYMMDD_hhmm", + metavar="drt") + group = parser.add_mutually_exclusive_group(required=False) + group.add_argument("-T", action="store_true", dest="testMode", required=False, + help="Generates a \"TEST\" product") + group.add_argument("-E", action="store_true", dest="experimentalMode", required=False, + help="Generates a \"EXPERIMENTAL\" product.") + parser.add_argument("-v", action="store", dest="vtecMode", required=False, + choices=['X', 'O', 'T', 'E'], + help="Specifies vtec mode ('X','O','T','E')", + metavar="vtecMode") + parser.add_argument("-a", action="store", dest="vtecActiveTable", required=False, + choices=['active', 'PRACTICE'], + help="Specifies active table -- 'active' or 'PRACTICE'", + default='active', + metavar="vtecActiveTable") + parser.add_argument("-V", action="store", dest="varDict", required=False, + help="""use this option to provide a run-time VariableList + instead of displaying the user dialog. + The dictionary must be in the form of a Python + dictionary string, e.g., + '{("Forecast Product", "productType"): "Morning", + ("Issuance", "issuanceType"): "Routine"}' + The entries must be complete or the product will be cancelled.""", + default="{}", + metavar="varDict") + parser.add_argument("-r", action="append", dest="editAreas", required=False, + help="Edit Area Name", + default=[], + metavar="editAreas") + parser.add_argument("-w", action="append", dest="timeRanges", required=False, + help="named time range (e.g. Today, Tonight)", + default=[], + metavar="timeRanges") + parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, + help="startTime -- format YYYYMMDD_hhmm", + metavar="startTime") + parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, + help="endTime -- format YYYYMMDD_hhmm", + metavar="endTime") + parser.add_argument("-i", action="store", dest="timePeriod", required=False, + type=float, + help="Period for Tables with variable period (rows or columns)", + metavar="timePeriod") + + args = parser.parse_args(args) + + #force VTEC mode to "T" if in TEST mode and another vtecCode is specified + if args.testMode and args.vtecMode is not None: + args.vtecMode = "T" + + #force VTEC mode to "E" if in EXPERIMENTAL mode and another vtecCode + #is specified + elif args.experimentalMode and args.vtecMode is not None: + args.vtecMode = "E" + + #force into TEST mode, if vtec code is 'T' + if args.vtecMode == "T": + args.testMode = True + args.experimentalMode = False + elif args.vtecMode == "E": + args.experimentalMode = True + args.testMode = False + + return args + +def usage(): + validateArgs(['--help']) + +def error(msg): + print("ERROR: %s\n" % msg) + +def main2(argv): + # Parse command line + args = validateArgs() + runFormatter(args) + +def __initLogger(): + global LOGGER + logging.basicConfig(level=logging.INFO, + format="%(asctime)s %(name)s %(levelname)s: %(message)s", + datefmt="%H:%M:%S") + LOGGER = logging.getLogger("runIFPText.py") + +PROFILE = False +def profMain(arg): + __initLogger() + if PROFILE: + import profile, pstats, sys + limit = 20 + profile.run('main2(sys.argv)', 'pyprof.out') + p = pstats.Stats('pyprof.out') + p.strip_dirs() + p.sort_stats('time', 'calls').print_stats(limit) + p.print_callers(limit) + else: + try: + main2(arg) + except: + LOGGER.exception("Caught Exception: ") + +main = profMain +if __name__ == "__main__": + profMain(sys.argv) diff --git a/cave/com.raytheon.viz.gfe/GFESuite/src/runprocedure/runProcedure.py b/cave/com.raytheon.viz.gfe/GFESuite/src/runprocedure/runProcedure.py index 49e9aaf3fe..758eb9e22e 100644 --- a/cave/com.raytheon.viz.gfe/GFESuite/src/runprocedure/runProcedure.py +++ b/cave/com.raytheon.viz.gfe/GFESuite/src/runprocedure/runProcedure.py @@ -1,236 +1,236 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# runProcedure.py -# Main program and class for running Procedures from the command line -# -# Author: hansen -# ---------------------------------------------------------------------------- -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Aug 10, 2016 19248 ryu Fix java import issue (by Nate Jensen) -# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() -# from gfeClient.py -# Mar 16, 2017 6092 randerso Added check for change of mutableModel -# -## -CLASS_NAME = 'Procedure' -METHOD_NAME = 'execute' - - -def runProcedure(args): - ############################################################################ - # ProcedureRunner and required imports nested in this function because they - # can only be run under Jep. This allows validateArgs to be called from - # a pure Python environment - ############################################################################ - - import sys - import time - - import TimeRange, AbsTime, LogStream - import loadConfig - import Exceptions - import MasterInterface - - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID - from com.raytheon.viz.gfe.core import DataManagerUIFactory - - class ProcedureRunner(MasterInterface.MasterInterface): - def __init__(self, procName): - MasterInterface.MasterInterface.__init__(self) - self.addModule(procName) - - def runProcedure(self, moduleName, className, methodName, **kwargs): - - try: - return self.runMethod(moduleName, className, methodName, **kwargs) - except Exceptions.EditActionError, e: - if "Cancel" == e.errorType() and "Cancel" == e.errorInfo(): - return None - msg = moduleName + ":" + e.errorType() + ": " + e.errorInfo() - raise RuntimeError(msg) - - def run(self, dataMgr, moduleName, className, methodName, varDict=None, editArea=None, timeRange=None): - tr = None - if timeRange: - tr = timeRange.toJavaObj() - preview = dataMgr.getEditActionProcessor().prepareExecute("Procedure", moduleName, editArea, tr, False) - - procArgNames = self.getMethodArgs(moduleName, CLASS_NAME, METHOD_NAME) - - procArgs = {} - for arg in procArgNames: - if arg == 'varDict': - procArgs['varDict'] = varDict - if arg == 'editArea': - procArgs['editArea'] = editArea - if arg == 'timeRange': - procArgs['timeRange'] = timeRange - - try: - self.runProcedure(moduleName, CLASS_NAME, METHOD_NAME, **procArgs) - finally: - # FIXME: This sleep() call is a timing hack - # There seems to be a small delay in processing unlock notifications - # and calling wrapUpExecute() was trying to send save requests for - # grids in the process of already being unlocked - time.sleep(1.5) - dataMgr.getEditActionProcessor().wrapUpExecute(preview, False) - - class RunProcedure: - def __init__(self, procName, - configFile, startTime, endTime, timeRange, editArea, - mutableModel, varDict): - - # import the config file - prefs = loadConfig.loadPreferences(configFile) - - LogStream.logEvent("Configuration File: ", configFile) - - if mutableModel is None: - mutableModel = prefs.getString('mutableModel') - else: - prefs.setValue('mutableModel', mutableModel) - - self.__dataMgr = DataManagerUIFactory.getInstance(None) - - currentMutableModel = self.__dataMgr.getParmManager().getMutableDatabase() - desiredMutableModel = self.__dataMgr.getParmManager().decodeDbString(mutableModel) - if currentMutableModel != desiredMutableModel: - DataManagerUIFactory.dispose(None) - self.__dataMgr = DataManagerUIFactory.getInstance(None) - - # Create Time Range - if startTime is not None and endTime is not None: - start = self.decodeTimeStruct(startTime) - end = self.decodeTimeStruct(endTime) - self.__timeRange = TimeRange.TimeRange(start, end) - elif timeRange is not None: - self.__timeRange = TimeRange.TimeRange(self.__dataMgr.getSelectTimeRangeManager().getRange(timeRange).toTimeRange()); - else: - self.__timeRange = TimeRange.default() - - if editArea is not None: - refID = ReferenceID(editArea) - self.__editArea = \ - self.__dataMgr.getRefManager().loadRefSet(refID) - else: - self.__editArea = self.__dataMgr.getRefManager().emptyRefSet() - - LogStream.logVerbose("varDict=", varDict) - - runner = ProcedureRunner(procName) - - errors = runner.getImportErrors() - if len(errors) > 0: - msg = "\n\t".join(["Error importing the following procedures:"] + errors) - LogStream.error(msg) - - runner.instantiate(procName, CLASS_NAME, **{'dbss':self.__dataMgr}) - runner.run(self.__dataMgr, procName, CLASS_NAME, METHOD_NAME, varDict, self.__editArea, self.__timeRange) - - def decodeTimeStruct(self, timeStruct): - return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, - timeStruct.tm_mday, - timeStruct.tm_hour, timeStruct.tm_min) - - - runProc = RunProcedure(args.procName, args.configFile, - args.startTime, args.endTime, args.timeRange, - args.editArea, args.mutableModel, args.varDict) - -def validateArgs(args=None, parents=[]): - ############################################################################ - # imports required for this method must be here so it can be invoked - # from gfeClient.py - ############################################################################ - from awips import UsageArgumentParser - from awips.UsageArgumentParser import StoreTimeAction - - parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", - parents=parents, - prog='runProcedure') - parser.add_argument("-n", action="store", dest="procName", required=True, - help="procedureName", - metavar="procName") - parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, - default="gfeConfig", - help="GFE config file -- default gfeConfig", - metavar="configFile") - parser.add_argument("-u", action="store", dest="userName", required=False, - help="user name -- default SITE", - default="SITE", - metavar="userName") - parser.add_argument("-a", action="store", dest="editArea", required=False, - help="editAreaName", - metavar="editArea") - parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, - help="startTime -- format YYYYMMDD_hhmm", - metavar="startTime") - parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, - help="endTime -- format YYYYMMDD_hhmm", - metavar="endTime") - parser.add_argument("-t", action="store", dest="timeRange", required=False, - help="named time range (e.g. Today, Tonight)", - metavar="timeRange") - parser.add_argument("-m", action="store", dest="mutableModel", required=False, - help="mutable database", - metavar="mutableModel") - parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, - help="displaced real time -- format YYYYMMDD_hhmm", - metavar="drt") - parser.add_argument("-V", action="store", dest="varDict", required=False, - help="""use this option to provide a run-time VariableList - instead of displaying the user dialog. - The dictionary must be in the form of a Python - dictionary string, e.g. - '{"Input Variable":"variable value"}'""", - default="{}", - metavar="varDict") - - return parser.parse_args(args) - -def main(): - args = validateArgs() - - if args.drt: - import offsetTime - offsetTime.setDrtOffset(args.drt) - - if args.varDict: - exec "args.varDict = " + args.varDict - - runProcedure(args) - - if args.drt: - import offsetTime - offsetTime.reset() - -if __name__ == "__main__": - main() +# runProcedure.py +# Main program and class for running Procedures from the command line +# +# Author: hansen +# ---------------------------------------------------------------------------- +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Aug 10, 2016 19248 ryu Fix java import issue (by Nate Jensen) +# Feb 07, 2017 6092 randerso Refactored to support calling validateArgs() +# from gfeClient.py +# Mar 16, 2017 6092 randerso Added check for change of mutableModel +# +## +CLASS_NAME = 'Procedure' +METHOD_NAME = 'execute' + + +def runProcedure(args): + ############################################################################ + # ProcedureRunner and required imports nested in this function because they + # can only be run under Jep. This allows validateArgs to be called from + # a pure Python environment + ############################################################################ + + import sys + import time + + import TimeRange, AbsTime, LogStream + import loadConfig + import Exceptions + import MasterInterface + + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID + from com.raytheon.viz.gfe.core import DataManagerUIFactory + + class ProcedureRunner(MasterInterface.MasterInterface): + def __init__(self, procName): + MasterInterface.MasterInterface.__init__(self) + self.addModule(procName) + + def runProcedure(self, moduleName, className, methodName, **kwargs): + + try: + return self.runMethod(moduleName, className, methodName, **kwargs) + except Exceptions.EditActionError as e: + if "Cancel" == e.errorType() and "Cancel" == e.errorInfo(): + return None + msg = moduleName + ":" + e.errorType() + ": " + e.errorInfo() + raise RuntimeError(msg) + + def run(self, dataMgr, moduleName, className, methodName, varDict=None, editArea=None, timeRange=None): + tr = None + if timeRange: + tr = timeRange.toJavaObj() + preview = dataMgr.getEditActionProcessor().prepareExecute("Procedure", moduleName, editArea, tr, False) + + procArgNames = self.getMethodArgs(moduleName, CLASS_NAME, METHOD_NAME) + + procArgs = {} + for arg in procArgNames: + if arg == 'varDict': + procArgs['varDict'] = varDict + if arg == 'editArea': + procArgs['editArea'] = editArea + if arg == 'timeRange': + procArgs['timeRange'] = timeRange + + try: + self.runProcedure(moduleName, CLASS_NAME, METHOD_NAME, **procArgs) + finally: + # FIXME: This sleep() call is a timing hack + # There seems to be a small delay in processing unlock notifications + # and calling wrapUpExecute() was trying to send save requests for + # grids in the process of already being unlocked + time.sleep(1.5) + dataMgr.getEditActionProcessor().wrapUpExecute(preview, False) + + class RunProcedure: + def __init__(self, procName, + configFile, startTime, endTime, timeRange, editArea, + mutableModel, varDict): + + # import the config file + prefs = loadConfig.loadPreferences(configFile) + + LogStream.logEvent("Configuration File: ", configFile) + + if mutableModel is None: + mutableModel = prefs.getString('mutableModel') + else: + prefs.setValue('mutableModel', mutableModel) + + self.__dataMgr = DataManagerUIFactory.getInstance(None) + + currentMutableModel = self.__dataMgr.getParmManager().getMutableDatabase() + desiredMutableModel = self.__dataMgr.getParmManager().decodeDbString(mutableModel) + if currentMutableModel != desiredMutableModel: + DataManagerUIFactory.dispose(None) + self.__dataMgr = DataManagerUIFactory.getInstance(None) + + # Create Time Range + if startTime is not None and endTime is not None: + start = self.decodeTimeStruct(startTime) + end = self.decodeTimeStruct(endTime) + self.__timeRange = TimeRange.TimeRange(start, end) + elif timeRange is not None: + self.__timeRange = TimeRange.TimeRange(self.__dataMgr.getSelectTimeRangeManager().getRange(timeRange).toTimeRange()); + else: + self.__timeRange = TimeRange.default() + + if editArea is not None: + refID = ReferenceID(editArea) + self.__editArea = \ + self.__dataMgr.getRefManager().loadRefSet(refID) + else: + self.__editArea = self.__dataMgr.getRefManager().emptyRefSet() + + LogStream.logVerbose("varDict=", varDict) + + runner = ProcedureRunner(procName) + + errors = runner.getImportErrors() + if len(errors) > 0: + msg = "\n\t".join(["Error importing the following procedures:"] + errors) + LogStream.error(msg) + + runner.instantiate(procName, CLASS_NAME, **{'dbss':self.__dataMgr}) + runner.run(self.__dataMgr, procName, CLASS_NAME, METHOD_NAME, varDict, self.__editArea, self.__timeRange) + + def decodeTimeStruct(self, timeStruct): + return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, + timeStruct.tm_mday, + timeStruct.tm_hour, timeStruct.tm_min) + + + runProc = RunProcedure(args.procName, args.configFile, + args.startTime, args.endTime, args.timeRange, + args.editArea, args.mutableModel, args.varDict) + +def validateArgs(args=None, parents=[]): + ############################################################################ + # imports required for this method must be here so it can be invoked + # from gfeClient.py + ############################################################################ + from awips import UsageArgumentParser + from awips.UsageArgumentParser import StoreTimeAction + + parser = UsageArgumentParser.UsageArgumentParser(conflict_handler="resolve", + parents=parents, + prog='runProcedure') + parser.add_argument("-n", action="store", dest="procName", required=True, + help="procedureName", + metavar="procName") + parser.add_argument("-c", "--config", action="store", dest="configFile", required=False, + default="gfeConfig", + help="GFE config file -- default gfeConfig", + metavar="configFile") + parser.add_argument("-u", action="store", dest="userName", required=False, + help="user name -- default SITE", + default="SITE", + metavar="userName") + parser.add_argument("-a", action="store", dest="editArea", required=False, + help="editAreaName", + metavar="editArea") + parser.add_argument("-s", action=StoreTimeAction, dest="startTime", required=False, + help="startTime -- format YYYYMMDD_hhmm", + metavar="startTime") + parser.add_argument("-e", action=StoreTimeAction, dest="endTime", required=False, + help="endTime -- format YYYYMMDD_hhmm", + metavar="endTime") + parser.add_argument("-t", action="store", dest="timeRange", required=False, + help="named time range (e.g. Today, Tonight)", + metavar="timeRange") + parser.add_argument("-m", action="store", dest="mutableModel", required=False, + help="mutable database", + metavar="mutableModel") + parser.add_argument("-z", "--drt", action=StoreTimeAction, dest="drt", required=False, + help="displaced real time -- format YYYYMMDD_hhmm", + metavar="drt") + parser.add_argument("-V", action="store", dest="varDict", required=False, + help="""use this option to provide a run-time VariableList + instead of displaying the user dialog. + The dictionary must be in the form of a Python + dictionary string, e.g. + '{"Input Variable":"variable value"}'""", + default="{}", + metavar="varDict") + + return parser.parse_args(args) + +def main(): + args = validateArgs() + + if args.drt: + import offsetTime + offsetTime.setDrtOffset(args.drt) + + if args.varDict: + exec("args.varDict = " + args.varDict) + + runProcedure(args) + + if args.drt: + import offsetTime + offsetTime.reset() + +if __name__ == "__main__": + main() diff --git a/cave/com.raytheon.viz.gfe/help/EXAMPLESmartInit_NAM.py b/cave/com.raytheon.viz.gfe/help/EXAMPLESmartInit_NAM.py index d8dd25d66e..22f939004f 100644 --- a/cave/com.raytheon.viz.gfe/help/EXAMPLESmartInit_NAM.py +++ b/cave/com.raytheon.viz.gfe/help/EXAMPLESmartInit_NAM.py @@ -1,559 +1,559 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from NAM model -## output. -## -##-------------------------------------------------------------------------- -class NAM12Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "NAM12", "NAM12") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900","MB850","MB800","MB750", - "MB700","MB650","MB600","MB550", "MB500", - "MB450", "MB400", "MB350"] - -##-------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##-------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##-------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool uses the model's boundary layers to calculate a lapse -## rate and then applies that lapse rate to the difference between the -## model topography and the true topography. This algorithm calculates -## the surface temperature for three different sets of points: those that -## fall above the boundary layer, in the boundary layer, and below the -## boundary layer. -##-------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL12015, p_SFC, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i-1], - log(self.pres[i]), log(self.pres[i-1]), topo) - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i-1], t_c[i], t_c[i-1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), - tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i-1], t_c[i], t_c[i-1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - p_SFC = p_SFC / 100 # get te surface pres. in mb - # define the pres. of each of the boundary layers - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, - p_SFC - 135] - # list of temperature grids - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL12015] - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i-1], temps[i], temps[i-1], p) - gm = greater(pres[i-1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + t_FHAG2, st) - return self.KtoF(st) - -##-------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##-------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10,(Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10,(Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##-------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##-------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##-------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##-------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop.clip(0, 100, pop) # clip to 100% - return pop - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ - * (273.15 - t_c[i-1]) - except: - val = gh_c[i] - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0]-1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex,:,:] - t_c = t_c[:clipindex,:,:] - rh_c = rh_c[:clipindex,:,:] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb=ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i]=self.pres[i] - pmb=clip(pmb,1,1050) - # - # convert temps to C and limit to reasonable values - # - tc=t_c-273.15 - tc=clip(tc,-120,60) - # - # limit RH to reasonable values - # - rh=clip(rh_c,0.5,99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb=self.Wetbulb(tc,rh,pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val=gh_c[i-1]+(gh_c[i]-gh_c[i-1])/(wetb[i]-wetb[i-1])\ - *(-wetb[i-1]) - except: - val=gh_c[i] - snow=where(logical_and(equal(snow,-1),less_equal(wetb[i],0)), - val,snow) - # - # convert to feet - # - snow=snow*3.28 - - return snow - -##-------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##-------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - m1 = less(T, 9) - m2 = greater_equal(T, 30) - snowr = T * -0.5 + 22.5 - snowr[m1] = 20 - snowr[m2] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = self.empty() - fzLevelMask = less_equal(FzLevel - 1000, topo / 0.3048) - snowamt[fzLevelMask] = snowr[fzLevelMask] * QPF[fzLevelMask] - - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[mask] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i-1], gh_c[i], gh_c[i-1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 - -##-------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##-------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] # get wind dir - mag = mag * 1.94 # convert to knots - dir = clip(dir, 0, 359.5) - return (mag, dir) # assemble speed and dir into a tuple - -##-------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##-------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 # 3000 feet - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - # Interpolate (maybe) - magMask = logical_and(equal(famag, -1), mask[i]) - dirMask = logical_and(equal(fadir, -1), mask[i]) - famag[magMask] = wm[i][magMask] - fadir[dirMask] = wd[i][dirMask] - fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.00001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - - -##-------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##-------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, - bli_BL0180): - gh_c = gh_c[:13,:,:] - t_c = t_c[:13,:,:] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i-1], topo), pres[i-1], p_SFC) - tbot = where(greater(gh_c[i-1], topo), t_c[i-1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a11 - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a11 - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a11 - - topomask = logical_and(topomask, cross) - aindex[topomask] += 1 - - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a22 - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a22 - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a22 - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(bli_BL0180, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##-------------------------------------------------------------------------- - def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_BL0180, -3)] += 1 - lal[less(bli_BL0180, -5)] += 1 - return lal - - -def main(): - NAM12Forecaster().run() - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from NAM model +## output. +## +##-------------------------------------------------------------------------- +class NAM12Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "NAM12", "NAM12") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900","MB850","MB800","MB750", + "MB700","MB650","MB600","MB550", "MB500", + "MB450", "MB400", "MB350"] + +##-------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##-------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##-------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool uses the model's boundary layers to calculate a lapse +## rate and then applies that lapse rate to the difference between the +## model topography and the true topography. This algorithm calculates +## the surface temperature for three different sets of points: those that +## fall above the boundary layer, in the boundary layer, and below the +## boundary layer. +##-------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL12015, p_SFC, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i-1], + log(self.pres[i]), log(self.pres[i-1]), topo) + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i-1], t_c[i], t_c[i-1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), + tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i-1], t_c[i], t_c[i-1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + p_SFC = p_SFC / 100 # get te surface pres. in mb + # define the pres. of each of the boundary layers + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, + p_SFC - 135] + # list of temperature grids + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL12015] + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i-1], temps[i], temps[i-1], p) + gm = greater(pres[i-1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + t_FHAG2, st) + return self.KtoF(st) + +##-------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##-------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10,(Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10,(Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##-------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##-------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##-------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##-------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop.clip(0, 100, pop) # clip to 100% + return pop + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ + * (273.15 - t_c[i-1]) + except: + val = gh_c[i] + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0]-1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex,:,:] + t_c = t_c[:clipindex,:,:] + rh_c = rh_c[:clipindex,:,:] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb=ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i]=self.pres[i] + pmb=clip(pmb,1,1050) + # + # convert temps to C and limit to reasonable values + # + tc=t_c-273.15 + tc=clip(tc,-120,60) + # + # limit RH to reasonable values + # + rh=clip(rh_c,0.5,99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb=self.Wetbulb(tc,rh,pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val=gh_c[i-1]+(gh_c[i]-gh_c[i-1])/(wetb[i]-wetb[i-1])\ + *(-wetb[i-1]) + except: + val=gh_c[i] + snow=where(logical_and(equal(snow,-1),less_equal(wetb[i],0)), + val,snow) + # + # convert to feet + # + snow=snow*3.28 + + return snow + +##-------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##-------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + m1 = less(T, 9) + m2 = greater_equal(T, 30) + snowr = T * -0.5 + 22.5 + snowr[m1] = 20 + snowr[m2] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = self.empty() + fzLevelMask = less_equal(FzLevel - 1000, topo / 0.3048) + snowamt[fzLevelMask] = snowr[fzLevelMask] * QPF[fzLevelMask] + + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[mask] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i-1], gh_c[i], gh_c[i-1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 + +##-------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##-------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] # get wind dir + mag = mag * 1.94 # convert to knots + dir = clip(dir, 0, 359.5) + return (mag, dir) # assemble speed and dir into a tuple + +##-------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##-------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 # 3000 feet + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + # Interpolate (maybe) + magMask = logical_and(equal(famag, -1), mask[i]) + dirMask = logical_and(equal(fadir, -1), mask[i]) + famag[magMask] = wm[i][magMask] + fadir[dirMask] = wd[i][dirMask] + fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.00001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + + +##-------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##-------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, + bli_BL0180): + gh_c = gh_c[:13,:,:] + t_c = t_c[:13,:,:] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i-1], topo), pres[i-1], p_SFC) + tbot = where(greater(gh_c[i-1], topo), t_c[i-1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a11 + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a11 + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a11 + + topomask = logical_and(topomask, cross) + aindex[topomask] += 1 + + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a22 + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a22 + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a22 + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(bli_BL0180, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##-------------------------------------------------------------------------- + def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_BL0180, -3)] += 1 + lal[less(bli_BL0180, -5)] += 1 + return lal + + +def main(): + NAM12Forecaster().run() + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/itool/ISmartScript.py b/cave/com.raytheon.viz.gfe/localization/gfe/itool/ISmartScript.py index 5bfc175f79..5f18a9c870 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/itool/ISmartScript.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/itool/ISmartScript.py @@ -1,235 +1,235 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +######################################################################## +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ISmartScript -- library of methods for ITools +# +# Author: hansen +# ---------------------------------------------------------------------------- +######################################################################## +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 06, 2017 5959 randerso Removed Java .toString() calls # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -######################################################################## -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ISmartScript -- library of methods for ITools -# -# Author: hansen -# ---------------------------------------------------------------------------- -######################################################################## -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import string, time -import re, numpy -import Exceptions -import SmartScript, AbsTime - -class ISmartScript(SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self.__dataMgr = dbss - self.__parmMgr = self.__dataMgr.getParmManager() - - def executeCmd(self, cmd): - cmd = re.sub("\s*&\s*\Z", "", cmd) - return self._dbss.dataManager().queueServerProcessing(uiname, cmd) - - def getDataType(self, elementName): - parm = self.__parmMgr.getParmInExpr(elementName, 1) - return str(parm.getGridInfo().getGridType()) - - # This method was copied from gfe/ui/ZoneCombiner.py - def writeActiveComboFile(self, list, comboFilename): - from com.raytheon.viz.gfe.smarttool import TextFileUtil - #self._blockNotify = True - s = """ -# ---------------------------------------------------------------------------- -# -# ---------------------------------------------------------------------------- - -# Format: -# Combinations = [ -# ([ list of edit areas as named in the GFE ], label), -# ... -# ] -# -# NOTE: This file was automatically generated by GFE Zone Combiner Editor - -""" - s = s + "Combinations = [\n" - s = string.replace(s, "", comboFilename) - for zones,reg in list: - s = s + ' (' + repr(zones) + ', ' + repr(reg) + ' ),\n' - s = s + "]\n\n" - - - # write updated combinations file to server - textFileID = TextFileUtil.getSiteTextFile(comboFilename, "COMBINATIONS") - textFile = open(textFileID.getFile().getPath(), 'w') - textFile.write(s) - textFile.close() - textFileID.save() - - - #self.setStatusText('R', - # 'Combinations file saved: ' + comboFilename) - #self._blockNotify = False - - def saveElements(self, elementList, model="Fcst"): - # Save the given Fcst elements to the server - # Example: - # self.saveElements(["T","Td"]) - for element in elementList: - parm = self.getParm(model, element, "SFC") - if parm: - parm.saveParameter(1) - -## def getInitialGrid(self, elementName, elementType, hazardGrid=None): -## # If hazardGrid is not None, return it (this is assuming we are -## # going to overlay hazards onto the grid) -## # Otherwise, return an empty grid per elementType -## if elementType == "DISCRETE": -## self._makeEmptyHazardGrid(elementName, timeRange) -## elif elementType == "WEATHER": -## self._makeEmptyWxGrid(elementName, timeRange) -## else: -## pass - - def getAbsTime(self, timeStr): - "Create an AbsTime from a string: YYYYMMDD_HHMM" - year = string.atoi(timeStr[0:4]) - month = string.atoi(timeStr[4:6]) - day = string.atoi(timeStr[6:8]) - hour = string.atoi(timeStr[9:11]) - minute = string.atoi(timeStr[11:13]) - return AbsTime.absTimeYMD(year,month,day,hour,minute) - - def getTimeStr(self, absTime): - # Create a time string YYYYMMDD_HHMM given an AbsTime - return absTime.stringFmt("%4Y%2m%2d_%2H%2M") - - def getAbsFromLocal(self, year, month, day, hour, minute): - # Return an AbsTime GMT given the year, month, day, local hour, and minute - ltSecs = time.mktime((year, month, day, hour, minute, 0, -1, -1, -1)) - gmTime = time.gmtime(ltSecs) - return AbsTime.absTimeYMD( - gmTime[0],gmTime[1],gmTime[2],gmTime[3],gmTime[4]) - - -############## HazardUtils -- can be removed eventually - - # This method will create an empty hazards-type grid with the specified - # name and timeRange - def _makeEmptyHazardGrid(self, weName, timeRange): - byteGrid = self.empty(int8) - hazKeys = self.getDiscreteKeys("Hazards") - currentKeys = [""] - # make the grid - self.createGrid("Fcst", weName, "DISCRETE", (byteGrid, currentKeys), - timeRange, discreteKeys=hazKeys, - discreteAuxDataLength=4, discreteOverlap=1) - return - - - def _makeMask(self, zoneList): - mask = self.empty(bool) - eaList = self.editAreaList() - for z in zoneList: - if z in eaList: - zoneArea = self.getEditArea(z) - zoneMask = self.encodeEditArea(zoneArea) - mask |= zoneMask - return mask - - # adds the specified hazard to weName over the specified timeRange - # and spatially over the specified mask. Combines the specified - # hazard with the existing hazards by default. For replaceMode, - # specify 0 in the combineField - def _addHazard(self, weName, timeRange, addHaz, mask, combine=1): - # set up the inventory first - self._setupHazardsInventory(weName, [timeRange]) - - # get the inventory - trList = self._getWEInventory(weName, timeRange) - - for tr in trList: - byteGrid, hazKey = self.getGrids("Fcst", weName, "SFC", tr, - mode="First", cache=0) - - uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) - for uKey in uniqueKeys: - newKey = self._makeNewKey(uKey, addHaz) - oldIndex = self.getIndex(uKey, hazKey) - newIndex = self.getIndex(newKey, hazKey) - - # calculate the mask - intersection of mask and oldIndex values - editMask = logical_and(equal(byteGrid, oldIndex), mask) - - # poke in the new values - byteGrid[editMask] = newIndex - - self.createGrid("Fcst", weName, "DISCRETE", (byteGrid, hazKey), - tr, discreteOverlap=1, discreteAuxDataLength=4) - - byteGrid, hazKey = self.getGrids("Fcst", weName, "SFC", tr, - mode="First") - noneMask = equal(byteGrid, 0) - noneCount = sum(sum(noneMask)) - - return - - def dirList2(self): - return { - 'N' : 0, - 'NE':45, - 'E' :90, - 'SE':135, - 'S' :180, - 'SW':225, - 'W' :270, - 'NW':315, - } - - def textToDir(self, textDir): - # Return a numeric direction 8-point text direction - return self.dirList2()[textDir] - - def output(self, msg, outFile, prt=1): - # Put message to outFile - # If prt=1, also print the message - if prt==1: - print msg - if outFile is not None: - try: - outFile.write(msg+"\n") - outFile.flush() - except: - pass - - def internalStrip(self, inStr): - while inStr.find(" ") >= 0: - inStr = inStr.replace(" ", " ") - return inStr - - +## + +## +# This is a base file that is not intended to be overridden. +## + +import string, time +import re, numpy +import Exceptions +import SmartScript, AbsTime + +class ISmartScript(SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self.__dataMgr = dbss + self.__parmMgr = self.__dataMgr.getParmManager() + + def executeCmd(self, cmd): + cmd = re.sub("\s*&\s*\Z", "", cmd) + return self._dbss.dataManager().queueServerProcessing(uiname, cmd) + + def getDataType(self, elementName): + parm = self.__parmMgr.getParmInExpr(elementName, 1) + return str(parm.getGridInfo().getGridType()) + + # This method was copied from gfe/ui/ZoneCombiner.py + def writeActiveComboFile(self, list, comboFilename): + from com.raytheon.viz.gfe.smarttool import TextFileUtil + #self._blockNotify = True + s = """ +# ---------------------------------------------------------------------------- +# +# ---------------------------------------------------------------------------- + +# Format: +# Combinations = [ +# ([ list of edit areas as named in the GFE ], label), +# ... +# ] +# +# NOTE: This file was automatically generated by GFE Zone Combiner Editor + +""" + s = s + "Combinations = [\n" + s = string.replace(s, "", comboFilename) + for zones,reg in list: + s = s + ' (' + repr(zones) + ', ' + repr(reg) + ' ),\n' + s = s + "]\n\n" + + + # write updated combinations file to server + textFileID = TextFileUtil.getSiteTextFile(comboFilename, "COMBINATIONS") + textFile = open(textFileID.getFile().getPath(), 'w') + textFile.write(s) + textFile.close() + textFileID.save() + + + #self.setStatusText('R', + # 'Combinations file saved: ' + comboFilename) + #self._blockNotify = False + + def saveElements(self, elementList, model="Fcst"): + # Save the given Fcst elements to the server + # Example: + # self.saveElements(["T","Td"]) + for element in elementList: + parm = self.getParm(model, element, "SFC") + if parm: + parm.saveParameter(1) + +## def getInitialGrid(self, elementName, elementType, hazardGrid=None): +## # If hazardGrid is not None, return it (this is assuming we are +## # going to overlay hazards onto the grid) +## # Otherwise, return an empty grid per elementType +## if elementType == "DISCRETE": +## self._makeEmptyHazardGrid(elementName, timeRange) +## elif elementType == "WEATHER": +## self._makeEmptyWxGrid(elementName, timeRange) +## else: +## pass + + def getAbsTime(self, timeStr): + "Create an AbsTime from a string: YYYYMMDD_HHMM" + year = string.atoi(timeStr[0:4]) + month = string.atoi(timeStr[4:6]) + day = string.atoi(timeStr[6:8]) + hour = string.atoi(timeStr[9:11]) + minute = string.atoi(timeStr[11:13]) + return AbsTime.absTimeYMD(year,month,day,hour,minute) + + def getTimeStr(self, absTime): + # Create a time string YYYYMMDD_HHMM given an AbsTime + return absTime.stringFmt("%4Y%2m%2d_%2H%2M") + + def getAbsFromLocal(self, year, month, day, hour, minute): + # Return an AbsTime GMT given the year, month, day, local hour, and minute + ltSecs = time.mktime((year, month, day, hour, minute, 0, -1, -1, -1)) + gmTime = time.gmtime(ltSecs) + return AbsTime.absTimeYMD( + gmTime[0],gmTime[1],gmTime[2],gmTime[3],gmTime[4]) + + +############## HazardUtils -- can be removed eventually + + # This method will create an empty hazards-type grid with the specified + # name and timeRange + def _makeEmptyHazardGrid(self, weName, timeRange): + byteGrid = self.empty(int8) + hazKeys = self.getDiscreteKeys("Hazards") + currentKeys = [""] + # make the grid + self.createGrid("Fcst", weName, "DISCRETE", (byteGrid, currentKeys), + timeRange, discreteKeys=hazKeys, + discreteAuxDataLength=4, discreteOverlap=1) + return + + + def _makeMask(self, zoneList): + mask = self.empty(bool) + eaList = self.editAreaList() + for z in zoneList: + if z in eaList: + zoneArea = self.getEditArea(z) + zoneMask = self.encodeEditArea(zoneArea) + mask |= zoneMask + return mask + + # adds the specified hazard to weName over the specified timeRange + # and spatially over the specified mask. Combines the specified + # hazard with the existing hazards by default. For replaceMode, + # specify 0 in the combineField + def _addHazard(self, weName, timeRange, addHaz, mask, combine=1): + # set up the inventory first + self._setupHazardsInventory(weName, [timeRange]) + + # get the inventory + trList = self._getWEInventory(weName, timeRange) + + for tr in trList: + byteGrid, hazKey = self.getGrids("Fcst", weName, "SFC", tr, + mode="First", cache=0) + + uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) + for uKey in uniqueKeys: + newKey = self._makeNewKey(uKey, addHaz) + oldIndex = self.getIndex(uKey, hazKey) + newIndex = self.getIndex(newKey, hazKey) + + # calculate the mask - intersection of mask and oldIndex values + editMask = logical_and(equal(byteGrid, oldIndex), mask) + + # poke in the new values + byteGrid[editMask] = newIndex + + self.createGrid("Fcst", weName, "DISCRETE", (byteGrid, hazKey), + tr, discreteOverlap=1, discreteAuxDataLength=4) + + byteGrid, hazKey = self.getGrids("Fcst", weName, "SFC", tr, + mode="First") + noneMask = equal(byteGrid, 0) + noneCount = sum(sum(noneMask)) + + return + + def dirList2(self): + return { + 'N' : 0, + 'NE':45, + 'E' :90, + 'SE':135, + 'S' :180, + 'SW':225, + 'W' :270, + 'NW':315, + } + + def textToDir(self, textDir): + # Return a numeric direction 8-point text direction + return self.dirList2()[textDir] + + def output(self, msg, outFile, prt=1): + # Put message to outFile + # If prt=1, also print the message + if prt==1: + print(msg) + if outFile is not None: + try: + outFile.write(msg+"\n") + outFile.flush() + except: + pass + + def internalStrip(self, inStr): + while inStr.find(" ") >= 0: + inStr = inStr.replace(" ", " ") + return inStr + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/itool/SetupTextEA.py b/cave/com.raytheon.viz.gfe/localization/gfe/itool/SetupTextEA.py index 6468792e10..8302d2c719 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/itool/SetupTextEA.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/itool/SetupTextEA.py @@ -1,330 +1,330 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -import getopt, sys, os, LogStream, numpy, time, JUtil - -from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID - - - -# -- module ----------------------------------------------------------------- -# The setupTextEA program. Sets up the text edit areas -# -- implementation --------------------------------------------------------- -# The ifpServerText program requires the following command line: -# setupTextEA -h hostname -p rpcPortNumber [-u user] -# -# -h host where the ifpServer is running -# -p rpc port number for the ifpServer. -# -u userid, defaults to GFETEST -# --------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -class setupTextEA: - - def __init__(self): - from com.raytheon.viz.gfe.core import DataManager - from java.lang import System - System.setProperty('user.name', 'GFETEST') - self.__host = None - self.__port = None - self.__user = 'GFETEST' - self.__dataMgr = DataManager.getInstance(None) - - def process(self): - import TimeRange - # get list of edit areas that are part of the Zones/FireWx group - from com.raytheon.viz.gfe.smarttool import TextFileUtil, GridCycler - textID = TextFileUtil.getTextFile('Zones', 'editAreaGroups') - zoneList = [] - textFile = open(textID.getFile().getPath()) - textFile.readline() - for line in textFile: - zoneList.append(line.rstrip()) - textFile.close() - textID = TextFileUtil.getTextFile('FireWxZones', 'editAreaGroups') - textFile = open(textID.getFile().getPath()) - textFile.readline() - for line in textFile: - zoneList.append(line.rstrip()) - textFile.close() - - refMgr = self.__dataMgr.getRefManager() - # make the basic edit areas that are required, go sequentially through - # the zoneList - requiredEA = ["west_half","east_half","east_one_third", - "west_one_third", "east_two_thirds","west_two_thirds", - "east_one_quarter", "west_one_quarter", "east_three_quarters", - "west_three_quarters","Superior"] - for x in xrange(len(requiredEA)): - refData = refMgr.loadRefSet(ReferenceID(zoneList[x])) - ea = ReferenceData(refData) - ea.setId(ReferenceID(requiredEA[x])) - refMgr.saveRefSet(ea) - #ea = self.__client.getEditAreaPolygons(zoneList[x]) - #self.__client.saveEditArea(requiredEA[x], ea) - LogStream.logEvent("Saved ", zoneList[x], "under", requiredEA[x]) - - # special EAs (source,destination) - special = [("ISC_Send_Area","FireArea"), ("ISC_Send_Area", "area3")] - for s in special: - refData = refMgr.loadRefSet(ReferenceID(s[0])) - ea = ReferenceData(refData) - ea.setId(ReferenceID(s[1])) - refMgr.saveRefSet(ea) - #ea = self.__client.getEditAreaPolygons(s[0]) - #self.__client.saveEditArea(s[1], ea) - LogStream.logEvent("Saved ", s[0], "under", s[1]) - - - # topography simulated based edit areas - # area3 = whole area, AboveElev, BelowElev - LogStream.logEvent("Calculating topo-dependent edit areas...") - topo = self.__dataMgr.getParmManager().getParmInExpr("Topo", True) - topogrid = GridCycler.getInstance().getCorrespondingResult( - topo, TimeRange.allTimes().toJavaObj(), "TimeWtAverage") - topogrid = topogrid[0].getGridSlice().getNDArray() - iscSend = ReferenceID('ISC_Send_Area') - #wholeGrid = self.__client.getEditArea("ISC_Send_Area") - wholeGrid = refMgr.loadRefSet(iscSend).getGrid().getNDArray() - topoAve = 0 - count = 0 - minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) - for x in range(minx, maxx): - for y in range(miny, maxy): - if wholeGrid[y,x] == 1: - count = count + 1 - topoAve = topoAve + topogrid[y,x] - topoAve = topoAve / count - aboveGrid = wholeGrid * 0 - belowGrid = wholeGrid * 0 - for x in xrange(topogrid.shape[1]): - for y in xrange(topogrid.shape[0]): - if wholeGrid[y,x] == 1: - if topogrid[y,x] > topoAve: - aboveGrid[y,x] = 1 - else: - belowGrid[y,x] = 1 - # area1 and area2 need to be "BelowElev", but should be different - # than area3 - desiredCount = 2000 - count = 0 - area1 = wholeGrid * 0 - area2 = wholeGrid * 0 - for x in xrange(topogrid.shape[1]): - if count < desiredCount: - for y in xrange(topogrid.shape[0]): - if wholeGrid[y,x] == 0 and topogrid[y,x] < topoAve: - area1[y,x] = 1 - belowGrid[y,x] = 1 - count = count + 1 - count = 0 - for x in xrange(topogrid.shape[1]): - if count < desiredCount: - for y in xrange(topogrid.shape[0]): - if wholeGrid[y,x] == 0 and topogrid[y,x] < topoAve and \ - area1[y,x] == 0: - area2[y,x] = 1 - belowGrid[y,x] = 1 - count = count + 1 - - # save all topography-dependent edit areas - self.__saveEA("area1", area1) - LogStream.logEvent("Saved area1 based on area2, area3, and topo <", - topoAve) - self.__saveEA("area2", area2) - LogStream.logEvent("Saved area2 based on area1, area3, and topo <", - topoAve) - self.__saveEA("AboveElev", aboveGrid) - LogStream.logEvent("Saved AboveElev based on area3 > ", topoAve) - self.__saveEA("BelowElev", belowGrid) - LogStream.logEvent("Saved BelowElev based on area3 <= ", topoAve) - self.__saveEA("Ridges", aboveGrid) - LogStream.logEvent("Saved Ridges based on area3 > ", topoAve) - self.__saveEA("Valleys", belowGrid) - LogStream.logEvent("Saved Valleys based on area3 < ", topoAve) - self.__saveEA("Inland", aboveGrid) - LogStream.logEvent("Saved Ridges based on area3 > ", topoAve) - self.__saveEA("Coastal", belowGrid) - LogStream.logEvent("Saved Valleys based on area3 < ", topoAve) - - - #city areas, which are a small part of other edit areas - cityBased = [("area1",["city1","city2"]), ("area2", ["city3"]), - ("area3",["city4", "area3_pt"])] - for baseArea,cityAreas in cityBased: - #wholeGrid = self.__client.getEditArea(baseArea) - wholeGrid = refMgr.loadRefSet(ReferenceID(baseArea)).getGrid().getNDArray() - minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) - cNumber = 0 - print minx, maxx, miny, maxy, wholeGrid.shape - for x in range(minx, maxx): - for y in range(miny, maxy): - if wholeGrid[y,x] == 1: - if cNumber >= len(cityAreas): - break - cityGrid = numpy.logical_and(wholeGrid, 0) - cityGrid[y,x] = 1 - self.__saveEA(cityAreas[cNumber], cityGrid.astype('int8')) - LogStream.logEvent("Saved ", cityAreas[cNumber], - "based on:", baseArea) - cNumber = cNumber + 1 - - # special for ISC areas for CCF database source test - #txt = self.__eagdb["ISC"] - #iscList = cPickle.loads(txt) - textID = TextFileUtil.getTextFile('ISC', 'editAreaGroups') - iscList = [] - textFile = open(textID.getFile().getPath()) - textFile.readline() - for line in textFile: - iscList.append(line.rstrip()) - textFile.close() - count = 0 - while count < 6: - for i in iscList: - if i == "ISC_Send_Area" or i == "ISC_Tool_Area": - continue - wholeGrid = refMgr.loadRefSet(ReferenceID(i)).getGrid().getNDArray() - minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) - if minx == -1: - continue - ok = 1 - print minx, maxx, miny, maxy, wholeGrid.shape - for x in range(minx, maxx): - if ok: - for y in range(miny, maxy): - if wholeGrid[y,x] == 1: - ptGrid = numpy.logical_and(wholeGrid, 0) - ptGrid[y,x] = 1 - name = "isc" + `count` - self.__saveEA(name, ptGrid.astype('int8')) - requiredEA.append(name) - LogStream.logEvent("Saved ", name, - "based on ", i) - ok = 0 - break - else: - break - - count = count + 1 - if count > 6: - break - - - - # store an edit area group with all of the generated edit areas - requiredEA.append("FireArea") - requiredEA.append("AboveElev") - requiredEA.append("BelowElev") - requiredEA.append("Valleys") - requiredEA.append("Ridges") - requiredEA.append("Inland") - requiredEA.append("Coastal") - requiredEA.append("city1") - requiredEA.append("city2") - requiredEA.append("city3") - requiredEA.append("city4") - requiredEA.append("area3") - requiredEA.append("area2") - requiredEA.append("area1") - - refMgr.saveGroup("GFETest", JUtil.pylistToJavaStringList(requiredEA)) - - time.sleep(.5) - - - def __saveEA(self, name, grid): - #save edit area from a grid - gloc = self.__dataMgr.getParmManager().compositeGridLocation() - id = ReferenceID(name) - # convert grid to polygons - grid2d = Grid2DBit.createBitGrid(int(gloc.getNx()), int(gloc.getNy()), grid) - refdata = ReferenceData(gloc, id, grid2d) - - # save the edit area - self.__dataMgr.getRefManager().saveRefSet(refdata) - #self.__client.saveEditArea(name, ea) - - - - def __extremaOfSetBits(self,mask): - "Returns tuple of extrema of set bits (minx,maxx, miny,maxy)" - xs = sum(mask) - ys = sum(mask, 1) - minx = maxx = miny = maxy = -1 - for x in range(xs.shape[0]): - if xs[x] != 0: - if minx == -1: - minx = x - maxx = x - for y in range(ys.shape[0]): - if ys[y] != 0: - if miny == -1: - miny = y - maxy = y - return (minx, maxx, miny, maxy) - - - def __cmdLine(self): - optlist, oargs = getopt.getopt(sys.argv[1:], "h:p:u:") - for opt in optlist: - if opt[0] == '-h': - self.__host = opt[1] - elif opt[0] == '-p': - self.__port = int(opt[1]) - elif opt[0] == '-u': - self.__user = opt[1] - - # sanity checks, make sure all required switches are specified - if self.__host is None or self.__port is None: - self.__usage() - raise SyntaxWarning, "Error: Missing host or port" - - - def __usage(self): - print """ -Usage: setupTextEA -h hostname -p rpcPortNumber [-u user] - - -h host where the ifpServer is running - -p rpc port number for the ifpServer. - -u userid, defaults to GFETEST - -""" - - -def main(): - LogStream.logEvent("setupTextEA Starting") - - try: - obj = setupTextEA() - obj.process() - except Exception, e: - LogStream.logProblem(LogStream.exc()) - sys.exit(1) - - LogStream.logEvent("setupTextEA Finished") - - -if __name__ == "__main__": - main() - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +import getopt, sys, os, LogStream, numpy, time, JUtil + +from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID + + + +# -- module ----------------------------------------------------------------- +# The setupTextEA program. Sets up the text edit areas +# -- implementation --------------------------------------------------------- +# The ifpServerText program requires the following command line: +# setupTextEA -h hostname -p rpcPortNumber [-u user] +# +# -h host where the ifpServer is running +# -p rpc port number for the ifpServer. +# -u userid, defaults to GFETEST +# --------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +class setupTextEA: + + def __init__(self): + from com.raytheon.viz.gfe.core import DataManager + from java.lang import System + System.setProperty('user.name', 'GFETEST') + self.__host = None + self.__port = None + self.__user = 'GFETEST' + self.__dataMgr = DataManager.getInstance(None) + + def process(self): + import TimeRange + # get list of edit areas that are part of the Zones/FireWx group + from com.raytheon.viz.gfe.smarttool import TextFileUtil, GridCycler + textID = TextFileUtil.getTextFile('Zones', 'editAreaGroups') + zoneList = [] + textFile = open(textID.getFile().getPath()) + textFile.readline() + for line in textFile: + zoneList.append(line.rstrip()) + textFile.close() + textID = TextFileUtil.getTextFile('FireWxZones', 'editAreaGroups') + textFile = open(textID.getFile().getPath()) + textFile.readline() + for line in textFile: + zoneList.append(line.rstrip()) + textFile.close() + + refMgr = self.__dataMgr.getRefManager() + # make the basic edit areas that are required, go sequentially through + # the zoneList + requiredEA = ["west_half","east_half","east_one_third", + "west_one_third", "east_two_thirds","west_two_thirds", + "east_one_quarter", "west_one_quarter", "east_three_quarters", + "west_three_quarters","Superior"] + for x in range(len(requiredEA)): + refData = refMgr.loadRefSet(ReferenceID(zoneList[x])) + ea = ReferenceData(refData) + ea.setId(ReferenceID(requiredEA[x])) + refMgr.saveRefSet(ea) + #ea = self.__client.getEditAreaPolygons(zoneList[x]) + #self.__client.saveEditArea(requiredEA[x], ea) + LogStream.logEvent("Saved ", zoneList[x], "under", requiredEA[x]) + + # special EAs (source,destination) + special = [("ISC_Send_Area","FireArea"), ("ISC_Send_Area", "area3")] + for s in special: + refData = refMgr.loadRefSet(ReferenceID(s[0])) + ea = ReferenceData(refData) + ea.setId(ReferenceID(s[1])) + refMgr.saveRefSet(ea) + #ea = self.__client.getEditAreaPolygons(s[0]) + #self.__client.saveEditArea(s[1], ea) + LogStream.logEvent("Saved ", s[0], "under", s[1]) + + + # topography simulated based edit areas + # area3 = whole area, AboveElev, BelowElev + LogStream.logEvent("Calculating topo-dependent edit areas...") + topo = self.__dataMgr.getParmManager().getParmInExpr("Topo", True) + topogrid = GridCycler.getInstance().getCorrespondingResult( + topo, TimeRange.allTimes().toJavaObj(), "TimeWtAverage") + topogrid = topogrid[0].getGridSlice().getNDArray() + iscSend = ReferenceID('ISC_Send_Area') + #wholeGrid = self.__client.getEditArea("ISC_Send_Area") + wholeGrid = refMgr.loadRefSet(iscSend).getGrid().getNDArray() + topoAve = 0 + count = 0 + minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) + for x in range(minx, maxx): + for y in range(miny, maxy): + if wholeGrid[y,x] == 1: + count = count + 1 + topoAve = topoAve + topogrid[y,x] + topoAve = topoAve / count + aboveGrid = wholeGrid * 0 + belowGrid = wholeGrid * 0 + for x in range(topogrid.shape[1]): + for y in range(topogrid.shape[0]): + if wholeGrid[y,x] == 1: + if topogrid[y,x] > topoAve: + aboveGrid[y,x] = 1 + else: + belowGrid[y,x] = 1 + # area1 and area2 need to be "BelowElev", but should be different + # than area3 + desiredCount = 2000 + count = 0 + area1 = wholeGrid * 0 + area2 = wholeGrid * 0 + for x in range(topogrid.shape[1]): + if count < desiredCount: + for y in range(topogrid.shape[0]): + if wholeGrid[y,x] == 0 and topogrid[y,x] < topoAve: + area1[y,x] = 1 + belowGrid[y,x] = 1 + count = count + 1 + count = 0 + for x in range(topogrid.shape[1]): + if count < desiredCount: + for y in range(topogrid.shape[0]): + if wholeGrid[y,x] == 0 and topogrid[y,x] < topoAve and \ + area1[y,x] == 0: + area2[y,x] = 1 + belowGrid[y,x] = 1 + count = count + 1 + + # save all topography-dependent edit areas + self.__saveEA("area1", area1) + LogStream.logEvent("Saved area1 based on area2, area3, and topo <", + topoAve) + self.__saveEA("area2", area2) + LogStream.logEvent("Saved area2 based on area1, area3, and topo <", + topoAve) + self.__saveEA("AboveElev", aboveGrid) + LogStream.logEvent("Saved AboveElev based on area3 > ", topoAve) + self.__saveEA("BelowElev", belowGrid) + LogStream.logEvent("Saved BelowElev based on area3 <= ", topoAve) + self.__saveEA("Ridges", aboveGrid) + LogStream.logEvent("Saved Ridges based on area3 > ", topoAve) + self.__saveEA("Valleys", belowGrid) + LogStream.logEvent("Saved Valleys based on area3 < ", topoAve) + self.__saveEA("Inland", aboveGrid) + LogStream.logEvent("Saved Ridges based on area3 > ", topoAve) + self.__saveEA("Coastal", belowGrid) + LogStream.logEvent("Saved Valleys based on area3 < ", topoAve) + + + #city areas, which are a small part of other edit areas + cityBased = [("area1",["city1","city2"]), ("area2", ["city3"]), + ("area3",["city4", "area3_pt"])] + for baseArea,cityAreas in cityBased: + #wholeGrid = self.__client.getEditArea(baseArea) + wholeGrid = refMgr.loadRefSet(ReferenceID(baseArea)).getGrid().getNDArray() + minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) + cNumber = 0 + print(minx, maxx, miny, maxy, wholeGrid.shape) + for x in range(minx, maxx): + for y in range(miny, maxy): + if wholeGrid[y,x] == 1: + if cNumber >= len(cityAreas): + break + cityGrid = numpy.logical_and(wholeGrid, 0) + cityGrid[y,x] = 1 + self.__saveEA(cityAreas[cNumber], cityGrid.astype('int8')) + LogStream.logEvent("Saved ", cityAreas[cNumber], + "based on:", baseArea) + cNumber = cNumber + 1 + + # special for ISC areas for CCF database source test + #txt = self.__eagdb["ISC"] + #iscList = cPickle.loads(txt) + textID = TextFileUtil.getTextFile('ISC', 'editAreaGroups') + iscList = [] + textFile = open(textID.getFile().getPath()) + textFile.readline() + for line in textFile: + iscList.append(line.rstrip()) + textFile.close() + count = 0 + while count < 6: + for i in iscList: + if i == "ISC_Send_Area" or i == "ISC_Tool_Area": + continue + wholeGrid = refMgr.loadRefSet(ReferenceID(i)).getGrid().getNDArray() + minx, maxx, miny, maxy = self.__extremaOfSetBits(wholeGrid) + if minx == -1: + continue + ok = 1 + print(minx, maxx, miny, maxy, wholeGrid.shape) + for x in range(minx, maxx): + if ok: + for y in range(miny, maxy): + if wholeGrid[y,x] == 1: + ptGrid = numpy.logical_and(wholeGrid, 0) + ptGrid[y,x] = 1 + name = "isc" + repr(count) + self.__saveEA(name, ptGrid.astype('int8')) + requiredEA.append(name) + LogStream.logEvent("Saved ", name, + "based on ", i) + ok = 0 + break + else: + break + + count = count + 1 + if count > 6: + break + + + + # store an edit area group with all of the generated edit areas + requiredEA.append("FireArea") + requiredEA.append("AboveElev") + requiredEA.append("BelowElev") + requiredEA.append("Valleys") + requiredEA.append("Ridges") + requiredEA.append("Inland") + requiredEA.append("Coastal") + requiredEA.append("city1") + requiredEA.append("city2") + requiredEA.append("city3") + requiredEA.append("city4") + requiredEA.append("area3") + requiredEA.append("area2") + requiredEA.append("area1") + + refMgr.saveGroup("GFETest", JUtil.pylistToJavaStringList(requiredEA)) + + time.sleep(.5) + + + def __saveEA(self, name, grid): + #save edit area from a grid + gloc = self.__dataMgr.getParmManager().compositeGridLocation() + id = ReferenceID(name) + # convert grid to polygons + grid2d = Grid2DBit.createBitGrid(int(gloc.getNx()), int(gloc.getNy()), grid) + refdata = ReferenceData(gloc, id, grid2d) + + # save the edit area + self.__dataMgr.getRefManager().saveRefSet(refdata) + #self.__client.saveEditArea(name, ea) + + + + def __extremaOfSetBits(self,mask): + "Returns tuple of extrema of set bits (minx,maxx, miny,maxy)" + xs = sum(mask) + ys = sum(mask, 1) + minx = maxx = miny = maxy = -1 + for x in range(xs.shape[0]): + if xs[x] != 0: + if minx == -1: + minx = x + maxx = x + for y in range(ys.shape[0]): + if ys[y] != 0: + if miny == -1: + miny = y + maxy = y + return (minx, maxx, miny, maxy) + + + def __cmdLine(self): + optlist, oargs = getopt.getopt(sys.argv[1:], "h:p:u:") + for opt in optlist: + if opt[0] == '-h': + self.__host = opt[1] + elif opt[0] == '-p': + self.__port = int(opt[1]) + elif opt[0] == '-u': + self.__user = opt[1] + + # sanity checks, make sure all required switches are specified + if self.__host is None or self.__port is None: + self.__usage() + raise SyntaxWarning("Error: Missing host or port") + + + def __usage(self): + print(""" +Usage: setupTextEA -h hostname -p rpcPortNumber [-u user] + + -h host where the ifpServer is running + -p rpc port number for the ifpServer. + -u userid, defaults to GFETEST + +""") + + +def main(): + LogStream.logEvent("setupTextEA Starting") + + try: + obj = setupTextEA() + obj.process() + except Exception as e: + LogStream.logProblem(LogStream.exc()) + sys.exit(1) + + LogStream.logEvent("setupTextEA Finished") + + +if __name__ == "__main__": + main() + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/itool/TextProductTest.py b/cave/com.raytheon.viz.gfe/localization/gfe/itool/TextProductTest.py index 7e60b9fdd9..f152b11c14 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/itool/TextProductTest.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/itool/TextProductTest.py @@ -1,1115 +1,1116 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TextProductTest -# -# Author: -# ---------------------------------------------------------------------------- - -##INSTRUCTIONS FOR USE: - -## 0. Set up a site with all products. From GFESuite directory: -## --make SITE=TBW dev -## --Add to release/data/textInstall/afos2awips.txt: - -## MIAOFFABC UFUS42 KTBW -## MIANSHABC UFUS42 KTBW -## MIAGLFABC UFUS42 KTBW -## MIAGLFDEF UFUS42 KTBW -## MIASAFABC UFUS42 KTBW - -## --start (or re-start the server) -## NOTE: You will have to repeat this step every time you do -## --make SITE=TBW dev - -## 1. Bring up GFETEST with TestConfig file. -## 1.1 From release/bin, set up default edit areas: -## run/setupTextEA -## 1.2 Copy examples/iTool/IToolLocalConfig.py release/etc/SITE/localConfig.py -## 2. Bring up the iTools dialog: GFE-->Define iTools -## 3. Activate "TextProductTest": MB3-->Activate -## 4. To Test CivilEmergency Products: -## --MB1 over "TextProductTest" -## --Choose mode: Verbose mode will display product output in terminal window. -## --Click "Run" -## --You can watch the progress of the products running from the -## GFE Process Monitor: Products-->Process Monitor -## 5. To Test Hazard Products: -## --MB1 over "TextProductTest" -## --Choose "Hazards_TestScript" -## --Choose mode: Verbose mode will display product output in terminal window. -## --Click "Run" -## --You can watch the progress of the products running from the -## GFE Process Monitor: Products-->Process Monitor -## 6. To test the Routine Forecast products: -## --Run release/bin/run/setupTextEA -## --Edit AFD_Site_Definition file "state_IDS" entry -## --MB1 over "TextProductTest" -## --Choose "RoutineLevel1_TestScript" -## --Choose mode: Verbose mode will display product output in terminal window. -## --Click "Run" -## --You can watch the progress of the products running from the -## GFE Process Monitor: Products-->Process Monitor -## -## NOTE: To abort a script while running, click MB2 over TextProductTest. - -## To make your own test script: -## --Follow the example of the existing Test Scripts -## --Each script entry is a dictionary with the following fields: -## "name": required. This must be a unique test name -## "productType": required. This is the name of the text product -## for the -t option in the command line. For example: ZFP_BOU -## You may use the variable in the string. -## Can be None. -## "commentary": optional. A text string describing the test. -## "cmdLineVars": optional. Command line variables for the product. -## Can be a method to call to get the command line variables. -## The method will be sent the product Definition. -## "callVariables": optional. Applied only if cmdLineVars is a method. -## Dictionary of "key:value" pairs to be added to the Defintion -## before calling the cmdLineVars method. -## "database": optional. Default is Fcst database. -## "checkMethod": optional. Method to call to check the product results. -## The method will be given the product text output and should return -## 1 if the tests pass, 0 if failed. -## "checkStrings": optional. List of strings which must be included -## in the product results for the test to pass. -## "orderStrings": optional. If 1, will require that "checkStrings" appear -## in the product in the order given in the checkStrings list. -## "notCheckStrings": optional. List of strings which must NOT be included -## in the product results for the test to pass. -## "combinationsFileName": optional. Name of Combinations file to create. -## May include the variable . -## "combinations": optional. Combinations for the Combinations file. -## "deleteGrids": optional. List of tuples each -## representing a grid that should be deleted before running the -## product. The tuple consists of: -## model, -## weather element name, -## level, (Can be "SFC", or D2D level e.g. "MB500") -## start hour for grid to be deleted (relative to gridsStartTime) -## end hour for grid to be deleted (relative to gridsStartTime) -## NOTE: "deleteGrids" happens prior to "createGrids" -## "createGrids": optional. List of tuples each -## representing a grid that should be created before running the -## product. The tuple consists of: -## model, -## weather element name, -## elementType, (Can be "SCALAR", "VECTOR", "WEATHER", or "DISCRETE") -## start hour for grid to be created (relative to gridsStartTime) -## end hour for grid to be created (relative to gridsStartTime) -## Note: start and end hour can be a string expression involving -## ONE OF: MaxTBegin, MaxTEnd, MinTBegin, MinTEnd, -## MaxRHBegin, MaxRHEnd, MinRHBegin, MinRHEnd -## which are relative to midnight of the day of gridsStartTime. -## data value for grid to be created -## (for hazards, value will be combined with other -## values specified for the grid) -## edit areas for the value: may be "all" to specify entire grid -## defaultValue (optional): If the default value for this grid -## is not zero, include it here. For example, "", or 1 for LAL -## "writableCopies": optional. List of tuples each representing a -## writable file copy that will be performed prior to running -## the product. The file is deleted after running the text -## formatter. The tuple consists of: -## fileName: file to be copied from BASE. The name can -## contain which will be replaced by the siteID. No -## file extension is needed. -## fileType: directory of source of file, relative to the -## the databases level, such as "TEXT/Combinations" -## destFileName: renaming of the original filename may be -## accomplished here. The "user" is determined by the -## "user" definition in the test script, and defaults to -## GFETEST. -## "fileChanges": optional. List of tuples each representing a file -## that should be changed before running the product. The tuple consists -## of: -## fileName: file to be changed. The name can contain -## which will be replaced by the siteID -## fileType: TEXT category, such as "TextUtility", -## type of change: can be -## "add": the string will be added to the file -## "replace": a given string will be replaced by another -## strings: If add, the string to be added. -## If replace, a tuple of original and replacement strings or -## a list of tuples for multiple replacements in the same file -## cleanup action: What to do when product is finished. Can be: -## "delete": Delete the file and revert to baseline version -## "undo": Undo the add or replace -## "leave": Leave altered file as is -## "publishGrids": optional. If 1, will publish grids before running product. -## "gridsStartTime": optional. Format YYYYMMDD_HHMM OR AbsTime. If present, will -## create and delete grids relative to this time. If not present, -## will use the "Today" select time range start time. -## "drtTime": optional. Format YYYYMMDD_HHMM OR AbsTime. If present, -## will run the product in this displaced real time. -## If not present, and gridsStartTime is present, -## will run the product using gridsStartTime as the displaced real time. -## "decodeVTEC": optional. If 1, will update the active table after running -## the product. -## "internalStrip": optional. If set to 0, will not strip multiple spaces from the -## result string before matching checkString. Default is 1. -## If set to 2, will try both the stripped and non-stripped -## fcstStr for a match. if at least one matches each -## checkstring, the test will succeed. Note: This will -## handle products with intermixed narrative and tabular -## formats such as the FWS. -## "clearHazardsTable": optional. If 1, clear out the HazardsTable. -## "vtecMode": optional. Can be set to "X", "E", "T", or "O" (-T/-E) - -## NOTE: by default, VTEC iTool Tests are run in operational mode i.e. -v O (and no -T option) -## (unless 'vtecMode" is specified) -## At some point, we should make sure they run in PRACTICE mode as well -## (for NGIT testing) - -## -# This is a base file that is not intended to be overridden. -## - -import sys, time, os, types, copy, inspect, errno -import LogStream -import AbsTime, TimeRange -import numpy, cPickle - - -OUTPUT_DIR = "/tmp/products/autoTest" - -# Triggers can be: -# Message enums and executeMsg will have the message as its argument - -#Triggers = [AFPS.Message.PROCESS_STATUS] -HideTool = 0 -InitialActivation = 1 - -### If desired, Set up variables to be solicited from the user: -VariableList = [ - ("Test Script Name", [], "check", - [ - "CreateGrids", - "RoutineLevel1_TestScript", - "RoutineLevel1_Baseline_TestScript", - "RoutineLevel1_Region_TestScript ", - "RoutineLevel1_AFD_TestScript", - "RoutineLevel1_AFM_TestScript", - "RoutineLevel1_CCF_TestScript", - "RoutineLevel1_FWF_TestScript", - "RoutineLevel1_FWFTable_TestScript", - "RoutineLevel1_FWS_TestScript", - "RoutineLevel1_GLF_TestScript", - "RoutineLevel1_PFM_TestScript", - "RoutineLevel1_SAF_TestScript", - "RoutineLevel1_SFT_TestScript", - "RoutineLevel1_SRF_TestScript", - "RoutineLevel1_ZFP_TestScript", - "RoutineLevel2_1_TestScript", - "RoutineLevel3_1_TestScript", - "RoutineLevel3_2_TestScript", - "RoutineLevel3_3_TestScript -- Local Effects", - "RoutineLevel4_1_TestScript -- More Local Effects", - "RoutineLevel5_1_TestScript -- SnowAccum, PopWx", - "RoutineLevel5_2_TestScript -- Miscellaneous Product Tests", - "RoutineLevel5_3_TestScript -- Miscellaneous Phrase Tests", - "RoutineLevel5_4_TestScript -- Temp Phrases", - "SPW_1_TestScript -- Automatically generated tests for SPW", - "AllowedHazards_TestScript", - "ExpireTime_TestScript", - "ExpireAlg_TestScript", - " ", - "CivilEmerg_TestScript", - "Hazards_TestScript", - "Hazard_HLS_TestScript", - "Hazards_Overview_Options_TestScript", - "HazardsComplex1_TestScript -- CFW", - "VTEC_GHG_Complex1_TestScript -- WSW", - "VTEC_GHG_Complex2_TestScript", - "VTEC_GHG_Complex3_TestScript", - "VTEC_GHG_Complex4_TestScript", - "VTEC_GHG_Complex5_TestScript", - "VTEC_GHG_Complex6_TestScript", - "VTEC_GHG_WCN_TestScript", - "VTEC_GHG_FFA_TestScript", - "VTEC_GHG_GenHaz_TestScript", - "HeadlinesTiming_Watch_TestScript", - "HeadlinesTiming_Warn_TestScript", - "VTEC_EXP_NEW_TestScript", - "VTEC_CrossingYear_TestScript", - "VTEC_TestMode_TestScript", - "VTEC_EXTtoNOW_TestScript", - "VTEC_EXT_UPG_TestScript", - "VTEC_GHG_UPG_SplitETNs_TestScript", - "VTEC_ETN_RESET_Tmode_TestScript", - "VTEC_ETN_Reuse_TestScript", - "VTEC_Reset_Start_TestScript", - "MultipleTZ_TestScript", - "Headline_UPG_TestScript", - "HeadlineSort_TestScript", - "VTEC_Hazard_DR21021_TestScript", - ]), - ("Enter Test Script Name" , "", "alphaNumeric"), - ("Output File " , "/tmp/TestResults.txt", "alphaNumeric"), - ("Failure Limit ", 0, "numeric"), - ("Test Suite", "Run All Tests", "radio", ["Run All Tests", "Individual Tests"]), - ("Reporting Mode" , "Verbose", "radio", ["Verbose", "Moderate", "Brief", "Pretty"]), - ("Run\nSetupTextEA?" , "no", "radio", ["yes", "no"]), - ("Processor?" , "Local", "radio", ["Server", "Local"]), - ("Create Grids?" , "yes", "radio", ["yes", "no"]), - ("Leave File\nChanges?" , "no", "radio", ["yes", "no"]), - ] - -# Set up Class -import ISmartScript -import ProcessVariableList - -from com.raytheon.viz.gfe.textformatter import TextProductFinishWaiter, FormatterUtil, TextProductManager -from com.raytheon.viz.gfe.smarttool import TextFileUtil -from com.raytheon.viz.gfe.dialogs.formatterlauncher import ConfigData -ProductStateEnum = ConfigData.ProductStateEnum -from com.raytheon.uf.common.activetable import ActiveTableMode -from com.raytheon.uf.viz.core.notification.jobs import NotificationManagerJob -from com.raytheon.viz.gfe.textformatter.test import AutoTestVTECNotificationListener - -class ProcessInfo: - def __init__(self, entry, name, pid, script): - self.__entry = entry - self.__name = name - self.__pid = pid - self.__script = script - def entry(self): - return self.__entry - def name(self): - return self.__name - def pid(self): - return self.__pid - def script(self): - return self.__script - - -class ITool (ISmartScript.ISmartScript): - def __init__(self, dbss): - ISmartScript.ISmartScript.__init__(self, dbss) - self._dataMgr = dbss - self._process = None - - # Button 1 in ITool Dialog - def execute(self, varDict): - "Testing for Text Products" - self._failLimit = varDict["Failure Limit "] - self._outFile = open(varDict["Output File "], 'w') - self._reportingMode = varDict["Reporting Mode"] - if self._reportingMode not in ["Pretty"]: - self.output("Calling TextProductTest User Invoked", self._outFile) - self._timer = time.time() - self._testSuite = varDict["Test Suite"] - self._lastCreateGrids = None - self._createGridsRunTime = varDict["Create Grids?"] - self._leaveFileChanges = varDict["Leave File\nChanges?"] - processor = varDict["Processor?"] - setupTextEA = varDict["Run\nSetupTextEA?"] - if setupTextEA == "yes": - if self._reportingMode not in ["Pretty"]: - self.output("Calling setupTextEA", self._outFile) - import SetupTextEA - SetupTextEA.main() - scriptNames = varDict["Test Script Name"] - enterName = varDict["Enter Test Script Name"] - if enterName: - scriptNames.append(enterName) - if scriptNames == []: - return - self._testScript = [] - for scriptName in scriptNames: - scriptName = scriptName.split(" -- ")[0] - scriptName = scriptName.strip() - if len(scriptName) == 0: - continue - - if sys.modules.has_key(scriptName): - del sys.modules[scriptName] - exec "import " + scriptName - exec "testScript = " + scriptName + ".testScript(self, self._dataMgr)" - self._testScript = self._testScript + testScript - self._process = None - self._passed = 0 - self._failures = 0 - self._scripts = 0 - self._scriptName = `scriptNames` - - # Let User Choose Individual Tests - if self._testSuite == "Individual Tests": - success = self._pareTestScript() - if success is None: - return - - self.__listener = AutoTestVTECNotificationListener() - NotificationManagerJob.addObserver('edex.alerts.vtec', self.__listener) - - # Run the test scripts - for index in range(len(self._testScript)): - self._runTestScript(index) - if self._failures > self._failLimit: - break - time.sleep(2) # avoid some race conditions with fileChanges - - NotificationManagerJob.removeObserver('edex.alerts.vtec', self.__listener) - self._finished() - - - # We will wait for it to finish before incrementing index and running the - # next script (see executeMsg below) - - def _pareTestScript(self): - newScript = [] - nameList = [] - for entry in self._testScript: - nameList.append(entry["name"]) - varList = [("Choose Tests", [], "check", nameList)] - processVarList = ProcessVariableList.ProcessVariableList( - "Choose Individual Tests", varList, varDict={}, parent=None) - self._selectionStatus = processVarList.status() - if not self._selectionStatus == "OK": - return None # User Cancelled - varDict = processVarList.varDict() - testList = varDict["Choose Tests"] - for entry in self._testScript: - if entry["name"] in testList: - newScript.append(entry) - self._testScript = newScript - return 1 - - def _runTestScript(self, index): - entry = self._testScript[index] - - # Set defaults - database, user, checkMethod, checkStrings = self._setDefaults() - # Process entry - name = entry["name"] - productType = entry["productType"] - database = entry.get("database", database) - user = entry.get("user", user) - # gridsStartTime - self._gridsStartTime = entry.get("gridsStartTime", None) - if self._gridsStartTime is None: - self._gridsStartTime = self.getTimeRange("Today").startTime() - else: - if not isinstance(self._gridsStartTime, AbsTime.AbsTime): - self._gridsStartTime = self.getAbsTime(self._gridsStartTime) - #print "gridsStartTime", self._gridsStartTime - # drtTime - drtTime = entry.get("drtTime") - if drtTime is None: - drtTime = entry.get("gridsStartTime", None) - #print "\n*********drtTime", drtTime - if drtTime is not None: - if isinstance(drtTime, AbsTime.AbsTime): - drtTime = self.getTimeStr(drtTime) - drtStr = drtTime - self._drtString = drtTime - else: - drtStr = "" - self._drtString = None - #print "drtStr", drtStr - - - self._clearHazardsTable(entry) - self._createCombinationsFile(entry) - self._deleteGrids(entry) - self._createGrids(entry) - self._makeWritableCopy(entry) - self._fileChanges(entry) - - cmdLineVars = self._getCmdLineVars(entry) - vtecMode = entry.get("vtecMode", None) - - if productType is None: - return - - database = database.replace("", self.getSiteID()) - - # Run the product - if self._reportingMode not in ["Pretty"]: - self.output("Running " + name, self._outFile) - message = "Running " + name - self.statusBarMsg(message, "R", category="GFE") - - # this way goes through java in separate threads, debugging doesn't work with it cause each - # thread has its own interpreter.... - # however, running the other way has issue with sampler caches not getting dumped between runs - waiter = TextProductFinishWaiter() - FormatterUtil.runFormatterScript(productType, vtecMode, database, cmdLineVars, "PRACTICE", drtTime, 0, waiter, self._dataMgr) - fcst = waiter.waitAndGetProduct() - state = waiter.getState() - -# import FormatterRunner -# try: -# fcst = FormatterRunner.runFormatter(databaseID=database, site="TBW", -# forecastList=[productType], cmdLineVarDict=cmdLineVars, -# vtecMode=vtecMode, vtecActiveTable='PRACTICE', drtTime=drtTime, -# username='GFETEST', dataMgr=self._dataMgr) -# except: -# fcst = '' -# LogStream.logProblem("Error generating product: " + LogStream.exc()) - - # write product to OUTPUT_DIR - - fname = name + ".txt" - path = os.path.join(OUTPUT_DIR, fname) - with open(path, 'w') as out: - out.write(fcst) - - self._doExecuteMsg(name, fcst, entry, drtTime, state) - - def _getCmdLineVars(self, entry): - cmdLineVars = entry.get("cmdLineVars", None) - if cmdLineVars is None: - return "" - # See if this is a dictionary - try: - exec "varDict = " + cmdLineVars - except: - # Process as method - # Otherwise, get the varDict from calling the given method - callMethod = cmdLineVars - productType = entry["productType"] - if sys.modules.has_key(productType): - del sys.modules[productType] - module = __import__(productType) - exec "callMethod = module.TextProduct()." + callMethod - definition = module.TextProduct().Definition - defVars = entry.get("callVariables", None) - if defVars is not None: - for key in defVars.keys(): - definition[key] = defVars[key] - varDict = callMethod(definition) - #for key in varDict.keys(): - # print "varDict['"+str(key)+"'] =", varDict[key] - cmdLineVars = `varDict` - if cmdLineVars is not None: - return cmdLineVars - else: - return "" - - - def _setDefaults(self): - database = self.getSiteID() + "_GRID__Fcst_00000000_0000" - user = "GFETEST" - return database, user, None, None - - def _clearHazardsTable(self, entry): - clearHazards = entry.get("clearHazardsTable", 0) - if clearHazards: - if self._reportingMode not in ["Pretty"]: - self.output("WARNING::Clearing Hazards Table", self._outFile) - self._dataMgr.getClient().clearVTECTable(ActiveTableMode.PRACTICE) - - def _createCombinationsFile(self, entry): - fn = entry.get("combinationsFileName", None) - combinations = entry.get("combinations", None) - if fn is None or combinations is None: - return - fn = fn.replace("", self.getSiteID()) - self.writeActiveComboFile(combinations, fn) - - def _createGrids(self, entry): - createGrids = entry.get("createGrids", None) - if createGrids is None: - return - if self._createGridsRunTime == "no": - return - # Check to see if the last time we created grids we used the same list - # If so, do not repeat - if self._lastCreateGrids == createGrids: - return - self._lastCreateGrids = createGrids - - wxKeys = [] - hazKeys = [] - gridsTR = TimeRange.TimeRange(self._gridsStartTime, self._gridsStartTime + 12 * 3600) - self._determineMaxMinBeginEnd(entry) - hazardGrid = None - createdGrids = {} - for gridEntry in createGrids: - if len(gridEntry) == 7: - model, elementName, elementType, startHour, endHour, value, editAreas = gridEntry - defValue = 0 - elif len(gridEntry) == 8: - model, elementName, elementType, startHour, endHour, value, editAreas, defValue = gridEntry - else: - #print "GridEntries: ", gridEntry - raise Exception("Improper # of Grid Entries") - startHour = self._translateHour(startHour) - endHour = self._translateHour(endHour) - timeRange = TimeRange.TimeRange(gridsTR.startTime() + startHour * 3600, - gridsTR.startTime() + endHour * 3600) - #self.output("element name, type " + elementName + " " + elementType, self._outFile) - #self.output("startHour, endHour " + `startHour` +" "+`endHour`, self._outFile) - #self.output(" timeRange "+`timeRange`, self._outFile) - # Get the grid we already created, if it exists - key = (model, elementName, startHour, endHour) - if createdGrids.has_key(key): - grid = createdGrids[key] - else: - grid = self.newGrid(defValue) - - if editAreas == "all": - mask = self.newGrid(True, bool) - else: - mask = self._makeMask(editAreas) - #self.output("mask "+`size(mask)`, self._outFile) - #self.output("grid "+`size(grid)`, self._outFile) - #self.output("value "+`value` , self._outFile) - if elementType == "DISCRETE": - #self._addHazard(elementName, timeRange, value, mask) - value = self.getIndex(value, hazKeys) - #self.output("setting value "+value+" "+hazKeys, self._outFile) - grid[mask] = value - grid = grid.astype('int8') - elementType = self.getDataType(elementName) - self.createGrid(model, elementName, elementType, (grid, hazKeys), timeRange) - elif elementType == "WEATHER": - if value == "NoWx": - value = "::::" - value = self.getIndex(value, wxKeys) - #self.output("setting value "+value+" "+wxKeys, self._outFile) - grid[mask] = value - grid = grid.astype('int8') - elementType = self.getDataType(elementName) - self.createGrid(model, elementName, elementType, (grid, wxKeys), timeRange) - elif elementType == "VECTOR": - grid[mask] = value[0] - dirGrid = self.empty() - dirGrid[mask] = self.textToDir(value[1]) - elementType = self.getDataType(elementName) - self.createGrid(model, elementName, elementType, (grid, dirGrid), timeRange) - else: - grid[mask] = value - elementType = self.getDataType(elementName) - self.createGrid(model, elementName, elementType, grid, timeRange) - # Save the grid in the createdGridDict - createdGrids[key] = grid - self.saveElements([elementName], model) - if entry.get("publishGrids", 0): - self.publishElements([elementName], timeRange) - #LogStream.logDebug("Created grid: ", key) - - def _makeWritableCopy(self, entry, user='GFETEST'): - writables = entry.get("writeableCopies", None) - if writables is None: - return - failed = 0 - for fileSrc, fileType, destFilename in writables: - source = fileSrc.replace("", self.getSiteID()) - dest = destFilename.replace("", self.getSiteID()) - #try: - if 1 == 1: - TextFileUtil.makeWritableCopy(source, fileType, - dest, False); - self.output("Made makeWritableCopy: " + source + ' ' + \ - fileType + ' ' + dest, self._outFile) - #except: - else: - failed = failed + 1 - self.output("failed makeWritableCopy: " + source + ' ' + \ - fileType + ' ' + dest, self._outFile) - - if failed == 0: - if self._reportingMode not in ["Pretty"]: - self.output("All Writable Copies successful", self._outFile) - - def _fileChanges(self, entry): - fileChanges = entry.get("fileChanges", None) - if not fileChanges: - return False - - from LockingFile import File - - failed = 0 - for fileName, fileType, changeType, strings, cleanUp in fileChanges: - fileName = fileName.replace("", self.getSiteID()) - # Get the file - lf = TextFileUtil.getTextFile(fileName, fileType) - if lf.getName().endswith(".py"): - if sys.modules.has_key(fileName): - del sys.modules[fileName] - try: - with File(lf.getFile(), '', 'r') as pythonFile: - text = pythonFile.read() - except: - failed = 1 - print "FILE CHANGES failed reading from " + str(lf) - raise - #self.output("FILE CHANGES (initial) from " +str(lf) + "\n" + text, self._outFile) #DEBUG - - # Modify it - if changeType == "add": - text = text + strings - elif changeType == "replace": - # strings may be a tuple (orig, repl) or - # a list of tuples for multiple changes to the same file - if type(strings) == tuple: - strings = [strings] - for orig, repl in strings: - strIndex = text.find(orig) - text = text.replace(orig, repl) - - #self.output("FILE CHANGES (chg): " + orig + ' ' + repl, self._outFile) #DEBUG - #self.output("FILE CHANGES (mod): " + text, self._outFile) #DEBUG - - if strIndex < 0: - self.output("File change failed for " + orig, - self._outFile) - failed = 1 - # Write it - destLf = TextFileUtil.getUserTextFile(lf) - try: - with File(destLf.getFile(), '', 'w') as pythonFile: - pythonFile.write(text) - - destLf.save() - - except: - failed = 1 - print "FILE CHANGES failed writing to " + str(destLf) - raise - #self.output("FILE CHANGES (saved) to " + str(destLf) + "\n" + text, self._outFile) #DEBUG - - if len(fileChanges) and not failed: - if self._reportingMode not in ["Pretty"]: - self.output("All File Changes successful", self._outFile) - return True - - def _determineMaxMinBeginEnd(self, entry): - # Determine MaxT MinT MaxRH MinRH begin and end times - # relative to gridsStartTime - localtime = time.localtime(self._gridsStartTime.unixTime()) - localHour = localtime[3] - if localtime[8]: # daylight - maxBegin = 8 - else: - maxBegin = 7 - self._MaxTBegin = maxBegin - localHour # MaxT begins at 7 am standard time - self._MaxTEnd = self._MaxTBegin + 13 - self._MinTBegin = self._MaxTBegin + 12 - self._MinTEnd = self._MaxTBegin + 12 + 14 - - self._MinRHBegin = maxBegin - 4 - localHour # MinRH begins at 3 am standard time - self._MinRHEnd = self._MinRHBegin + 18 - self._MaxRHBegin = self._MinRHBegin + 12 - self._MaxRHEnd = self._MinRHBegin + 12 + 18 - - def _translateHour(self, hour): - if type(hour) is not types.StringType: - return hour - # Suppose hour == "MaxTBegin + 24" and self._MaxTBegin == 1 - for tStr in ["MaxTBegin", "MaxTEnd", "MinTBegin", "MinTEnd", - "MaxRHBegin", "MaxRHEnd", "MinRHBegin", "MinRHEnd"]: - if hour.find(tStr) >= 0: - exec "tHour = self._" + tStr # tHour = self._MaxTBegin - hour = hour.replace(tStr, `tHour`) # hour == "1 + 24" - exec "newHour = " + hour - return newHour - - def _deleteGrids(self, entry): - deleteGrids = entry.get("deleteGrids", None) - if deleteGrids is None or deleteGrids == []: - return - self._lastCreateGrids = [] #clear it after deleting grids - for gridEntry in deleteGrids: - model, elementName, level, startHour, endHour = gridEntry - if startHour == "all" or endHour == "all": - timeRange = TimeRange.allTimes() - else: - gridsTR = TimeRange.TimeRange(self._gridsStartTime, self._gridsStartTime + 12 * 3600) - timeRange = TimeRange.TimeRange(gridsTR.startTime() + startHour * 3600, - gridsTR.startTime() + endHour * 3600) - self.deleteGrid(model, elementName, level, timeRange) - self.saveElements([elementName], model) - if entry.get("publishGrids", 0): - self.publishElements([elementName], timeRange) - - # Required if Message is a trigger - def executeMsg(self, msg): - if self._process is None: - return - status = msg.status() #list of messages - for msg in status: - if msg.status() == AFPS.ProcessStatus.FINISHED: - self._doExecuteMsg(msg) #call for each possible message - - # Performs the processing - def _doExecuteMsg(self, name, fcst, entry, drtTime, state): - if self._reportingMode not in ["Pretty"]: - self.output("Calling TextProductTest Message Invoked " + `entry`, self._outFile) - - checkMethod = entry.get("checkMethod", None) - checkStrings = entry.get("checkStrings", None) - notCheckStrings = entry.get("notCheckStrings", None) - orderStrings = entry.get("orderStrings", None) - internalStrip = entry.get("internalStrip", 1) - commentary = entry.get("commentary", None) - - if True: - # Clean up fileChanges - self._cleanUpFiles(entry) - self._cleanUpWritableCopies(entry) - - self.output("\n----------------------------------------------", self._outFile) - - if self._reportingMode not in ["Pretty"]: - self.output(name + " (Elapsed time:" + self._getElapsedTimeStr() + ")", - self._outFile) - else: - self.output(name, self._outFile) - if commentary is not None: - self.output(commentary + "\n", self._outFile) - self._scripts += 1 - - if state.equals(ProductStateEnum.Failed): - self.output("Formatter failed!", self._outFile) - success = False - - else: - # Look at results - # If any of the check fails, the test fails - check1 = 1 - check2 = 1 - check3 = 1 - if checkMethod is not None: - check1 = checkMethod(fcst) - if self._reportingMode not in ["Pretty"]: - if not check1: - failMsg = "CHECK METHOD FAILED:" + name - self.output(failMsg, self._outFile) - else: - self.output("CHECK METHOD PASSED: " + name, self._outFile) - - # Prepare results for string searches - if fcst is not None: - fcstStr = fcst.replace("\n", " ") - fcstStrRaw = fcstStr - if internalStrip: - fcstStr = self.internalStrip(fcstStr) - fcstStr = fcstStr.replace("... ", "...") - fcstStrRaw = fcstStrRaw.replace("... ", "...") - - if checkStrings is not None: - check2 = self._checkStrs(name, fcst, checkStrings, - orderStrings, fcstStr, fcstStrRaw, internalStrip) - if check2: - if self._reportingMode not in ["Pretty"]: - self.output("STRING SEARCHES PASSED ", self._outFile) - - if notCheckStrings is not None: - check3 = self._checkStrs(name, fcst, notCheckStrings, 0, - fcstStr, fcstStrRaw, internalStrip, checkMode=0) - if check3: - if self._reportingMode not in ["Pretty"]: - self.output("'NOT' STRING SEARCHES PASSED ", self._outFile) - - success = check1 and check2 and check3 - - if success: - self._passed += 1 - logmsg = name + " Passed" - self.statusBarMsg(logmsg, "R", category="ISC") - else: - self._failures += 1 - logmsg = name + " Failed" - self.statusBarMsg(logmsg, "A", category="ISC") - self.output(logmsg, self._outFile) - - if self._failures > self._failLimit: - self._cleanUp(entry, drtTime) - # Stop processing - return - if self._reportingMode in ["Verbose", "Moderate"]: - if fcst and success: # checkStrings failure will print out the product already - self.output("\n" + fcst, self._outFile) - # DecodeVTEC if requested - # Note for later: if in practice mode, set active - # table to runVTECDecoder("PRACTICE", fcst) - if success and entry.get("decodeVTEC", 0): - self.__runVTECDecoder(fcst, drtTime) - - - # wait until table has been modified or 5 seconds - t1 = time.time(); - while not self.__listener.isReceivedNotification(): - time.sleep(0.1) - if time.time() - t1 > 20: - self.output("Vtec Decoder timed out!", self._outFile) - break - t2 = time.time(); - if self._reportingMode in ["Verbose", "Moderate"]: - self.output("Vtec Decoder wait time: " + "%6.2f" % (t2-t1), - self._outFile) - - self._cleanUp(entry, drtTime) - - def _cleanUp(self, entry, drtTime): - if drtTime is not None: - import offsetTime - offsetTime.reset() - reload(offsetTime) - -# fileChanges = entry.get("fileChanges", []) -# for fileName, fileType, changeType, strings, cleanUp in fileChanges: -# fileName = fileName.replace("", self.getSiteID()) -# reload(sys.modules[fileName]) -# productType = entry['productType'] -# if sys.modules.has_key(productType): -# del sys.modules[productType] - - def _cleanUpWritableCopies(self, entry, user="GFETEST"): - writables = entry.get("writeableCopies", None) - if writables is None: - return - for fileSrc, fileType, destFilename in writables: - source = fileSrc.replace("", self.getSiteID()) - dest = destFilename.replace("", self.getSiteID()) - #try: - if 1 == 1: - TextFileUtil.makeWritableCopy(source, fileType, - dest, True); - #except: - else: - pass - self.output("Cleanup writable copies: " + fileSrc, self._outFile) - - def _cleanUpFiles(self, entry): - fileChanges = entry.get("fileChanges", []) - for fileName, fileType, changeType, strings, cleanUp in fileChanges: - fileName = fileName.replace("", self.getSiteID()) - textFileID = TextFileUtil.getTextFile(fileName, fileType) - if self._leaveFileChanges == "no": - if cleanUp in ["delete", "undo"]: - # File changes are made as overrides at the GFETEST user level - # We just remove these files to restore the previous file - destLf = TextFileUtil.getUserTextFile(textFileID) - TextFileUtil.deleteTextFile(destLf) - - def _checkStrs(self, name, fcst, checkStrings, orderStrings, fcstStr, - fcstStrRaw, internalStrip, checkMode=1): - # Check the fcstStr for the list of checkStrings - # If a checkString is a tuple, at least one of the - # given tuple strings must be found in the fcstStr (or fcstStrRaw) - # If orderStrings == 1, the strings must occur in order - # in the fcstStr - # If checkMode == 0, the strings should NOT be found in the fcstStr - # If internalStrip == 2, check both the fcstStr, and fcstStrRaw - # versions. If at least one succeeds, the checkString succeeds. - curIndex = -1 - for cStr in checkStrings: - if type(cStr) == types.TupleType: - # Will pass if ANY of these strings are found - - # Not valid with checkMode of zero - if not checkMode: - continue - - found = 0 - for subStr in cStr: - found, strIndex, strIndexFlag = self._checkStr( - subStr, fcstStr, fcstStrRaw, internalStrip) - if found: - if self._reportingMode in ["Verbose"]: - self.output("StringCHECK: " + subStr + ' ' + `strIndex`, self._outFile) - elif self._reportingMode in ["Pretty"]: - self.output("CHECK String: " + subStr, self._outFile) - break - if not found: - self._failed(subStr, name, fcst, fcstStr, checkMode) - return 0 - else: - # Must find exact string - found, strIndex, strIndexFlag = self._checkStr(cStr, fcstStr, fcstStrRaw, internalStrip) - if self._reportingMode in ["Verbose"]: - self.output("StringCHECK: " + cStr + ' ' + `strIndex`, self._outFile) - elif self._reportingMode in ["Pretty"]: - self.output("CHECK String: " + cStr, self._outFile) - if strIndex == -1: - # Handle special case of SHOWERS/RAIN SHOWERS - if cStr.find("showers") >= 0: - cStr = cStr.replace("showers", "rain showers") - found, strIndex, strIndexFlag = self._checkStr( - cStr, fcstStr, fcstStrRaw, internalStrip) - if cStr.find("Showers") >= 0: - cStr = cStr.replace("Showers", "Rain showers") - found, strIndex, strIndexFlag = self._checkStr( - cStr, fcstStr, fcstStrRaw, internalStrip) - if strIndex < 0: - if checkMode: - self._failed(cStr, name, fcst, fcstStr, checkMode) - return 0 - else: - if not checkMode: - self._failed(cStr, name, fcst, fcstStr, checkMode) - return 0 - elif checkMode == 0: #not check strings, and not check mode - self._failed(cStr, name, fcst, fcstStr, checkMode) - return 0 - - # Check the ordering - if orderStrings: - if strIndexFlag == "raw": - fcstStrRaw = fcstStrRaw[strIndex:] - else: - fcstStr = fcstStr[strIndex:] - return 1 - - def _checkStr(self, subStr, fcstStr, fcstStrRaw, internalStrip): - # Check for the given string. - # If internalStrip is 2, check both the raw and stripped fcstStrs. - found = 0 - strIndexFlag = "regular" - strIndex = fcstStr.find(subStr) - if strIndex >= 0: - found = 1 - elif internalStrip == 2: - # Try the raw fcstStr - strIndex = fcstStrRaw.find(subStr) - if strIndex >= 0: - found = 1 - strIndexFlag = "raw" - return found, strIndex, strIndexFlag - - def _failed(self, str, name, fcst, fcstStr, checkMode): - failMsg = "STRING SEARCH FAILED: " + name - if checkMode: - errorStr = "Cannot find: " - else: - errorStr = "Found and should not have found: " - failMsg = failMsg + " " + errorStr + `str` - failMsg = failMsg + " Product Result \n" + fcst - self.output(failMsg, self._outFile) - - def _finished(self): - message = "TESTING COMPLETE " + \ - self._scriptName + " \n" + \ - self._getElapsedTimeStr() + \ - "\n" + `self._scripts` + " SCRIPTS RUN.\n" + \ - `self._passed` + " TESTS PASSED.\n" + \ - `self._failures` + " TESTS FAILED." - - self.output(message, self._outFile) - self._outFile.close() - - #send user alert message to GFEs - if self._failures: - status = "U" - else: - status = "S" - self.statusBarMsg(message, status, category="GFE") - - def _getElapsedTimeStr(self): - eTime = (time.time() - self._timer) / 60.0 - return "%4.2f" % eTime + " minutes" - - # Optional -- Invoked by Button 2 in ITool Dialog - def cleanUp(self): - self.output("Calling MyTool CleanUp method", self._outFile) - # Can be called at any time to abort the script - self._testScript = [] - self._outFile.close() - - def __runVTECDecoder(self, fcst, drtString=None): - import tempfile, urlparse - from com.raytheon.uf.viz.core import VizApp - - with tempfile.NamedTemporaryFile(mode='w', prefix="autoTestProd", delete=False) as file: - file.write(fcst) - - url = urlparse.urlparse(VizApp.getHttpServer()) - commandString = "VTECDecoder -f " + file.name + " -d -g -a practice -h " + url.hostname - if drtString is not None: - commandString += " -z " + drtString - - expectedPil = fcst.split("\n",2)[1] - self.__listener.resetListener(expectedPil) - os.system(commandString) - - -def main(): - os.environ["TZ"] = 'EST5EDT' - time.tzset() - import _strptime - _strptime._cache_lock.acquire() - _strptime._TimeRE_cache = _strptime.TimeRE() - _strptime._regex_cache = {} - _strptime._cache_lock.release() - - from java.lang import System - System.setProperty('user.name', 'GFETEST') - - from com.raytheon.uf.viz.core.localization import LocalizationManager - from com.raytheon.uf.common.localization import LocalizationContext - LocalizationLevel = LocalizationContext.LocalizationLevel - LocalizationManager.registerContextName(LocalizationLevel.USER, 'GFETEST'); - - import loadConfig - loadConfig.loadPreferences("gfeConfig") - - from com.raytheon.viz.gfe.core import DataManager - dm = DataManager.getInstance(None) - import IToolInterface - # have to add the user dir to the python path here since we just switched users - # TODO look into switching it from the java - from com.raytheon.uf.common.dataplugin.gfe.python import GfePyIncludeUtil - - for s in sys.path: - if 'textUtilities' in s \ - or 'textProducts' in s \ - or 'combinations' in s: - sys.path.remove(s) - - for s in str(GfePyIncludeUtil.getHeadlineIncludePath()).split(':'): - sys.path.append(s) - - for s in str(GfePyIncludeUtil.getTextUtilitiesIncludePath()).split(':'): - sys.path.append(s) - - for s in str(GfePyIncludeUtil.getTextProductsIncludePath()).split(':'): - sys.path.append(s) - - for s in str(GfePyIncludeUtil.getCombinationsIncludePath()).split(':'): - sys.path.append(s) - - # create output directory for products - try: - os.makedirs(OUTPUT_DIR) - except OSError, e: - if e.errno != errno.EEXIST: - self.output("%s: '%s'" % (e.strerror,e.filename)) - - scriptDir = GfePyIncludeUtil.getIToolIncludePath() - runner = IToolInterface.IToolInterface(scriptDir) - runner.instantiate('TextProductTest', 'ITool', dbss=dm) - processVariableList = ProcessVariableList.ProcessVariableList('TextProductTest', VariableList) - varDict = processVariableList.varDict() - if varDict is None or len(varDict) == 0: - return - runner.runITool('TextProductTest', 'ITool', 'execute', varDict=varDict) - - -if __name__ == "__main__": - main() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TextProductTest +# +# Author: +# ---------------------------------------------------------------------------- + +##INSTRUCTIONS FOR USE: + +## 0. Set up a site with all products. From GFESuite directory: +## --make SITE=TBW dev +## --Add to release/data/textInstall/afos2awips.txt: + +## MIAOFFABC UFUS42 KTBW +## MIANSHABC UFUS42 KTBW +## MIAGLFABC UFUS42 KTBW +## MIAGLFDEF UFUS42 KTBW +## MIASAFABC UFUS42 KTBW + +## --start (or re-start the server) +## NOTE: You will have to repeat this step every time you do +## --make SITE=TBW dev + +## 1. Bring up GFETEST with TestConfig file. +## 1.1 From release/bin, set up default edit areas: +## run/setupTextEA +## 1.2 Copy examples/iTool/IToolLocalConfig.py release/etc/SITE/localConfig.py +## 2. Bring up the iTools dialog: GFE-->Define iTools +## 3. Activate "TextProductTest": MB3-->Activate +## 4. To Test CivilEmergency Products: +## --MB1 over "TextProductTest" +## --Choose mode: Verbose mode will display product output in terminal window. +## --Click "Run" +## --You can watch the progress of the products running from the +## GFE Process Monitor: Products-->Process Monitor +## 5. To Test Hazard Products: +## --MB1 over "TextProductTest" +## --Choose "Hazards_TestScript" +## --Choose mode: Verbose mode will display product output in terminal window. +## --Click "Run" +## --You can watch the progress of the products running from the +## GFE Process Monitor: Products-->Process Monitor +## 6. To test the Routine Forecast products: +## --Run release/bin/run/setupTextEA +## --Edit AFD_Site_Definition file "state_IDS" entry +## --MB1 over "TextProductTest" +## --Choose "RoutineLevel1_TestScript" +## --Choose mode: Verbose mode will display product output in terminal window. +## --Click "Run" +## --You can watch the progress of the products running from the +## GFE Process Monitor: Products-->Process Monitor +## +## NOTE: To abort a script while running, click MB2 over TextProductTest. + +## To make your own test script: +## --Follow the example of the existing Test Scripts +## --Each script entry is a dictionary with the following fields: +## "name": required. This must be a unique test name +## "productType": required. This is the name of the text product +## for the -t option in the command line. For example: ZFP_BOU +## You may use the variable in the string. +## Can be None. +## "commentary": optional. A text string describing the test. +## "cmdLineVars": optional. Command line variables for the product. +## Can be a method to call to get the command line variables. +## The method will be sent the product Definition. +## "callVariables": optional. Applied only if cmdLineVars is a method. +## Dictionary of "key:value" pairs to be added to the Defintion +## before calling the cmdLineVars method. +## "database": optional. Default is Fcst database. +## "checkMethod": optional. Method to call to check the product results. +## The method will be given the product text output and should return +## 1 if the tests pass, 0 if failed. +## "checkStrings": optional. List of strings which must be included +## in the product results for the test to pass. +## "orderStrings": optional. If 1, will require that "checkStrings" appear +## in the product in the order given in the checkStrings list. +## "notCheckStrings": optional. List of strings which must NOT be included +## in the product results for the test to pass. +## "combinationsFileName": optional. Name of Combinations file to create. +## May include the variable . +## "combinations": optional. Combinations for the Combinations file. +## "deleteGrids": optional. List of tuples each +## representing a grid that should be deleted before running the +## product. The tuple consists of: +## model, +## weather element name, +## level, (Can be "SFC", or D2D level e.g. "MB500") +## start hour for grid to be deleted (relative to gridsStartTime) +## end hour for grid to be deleted (relative to gridsStartTime) +## NOTE: "deleteGrids" happens prior to "createGrids" +## "createGrids": optional. List of tuples each +## representing a grid that should be created before running the +## product. The tuple consists of: +## model, +## weather element name, +## elementType, (Can be "SCALAR", "VECTOR", "WEATHER", or "DISCRETE") +## start hour for grid to be created (relative to gridsStartTime) +## end hour for grid to be created (relative to gridsStartTime) +## Note: start and end hour can be a string expression involving +## ONE OF: MaxTBegin, MaxTEnd, MinTBegin, MinTEnd, +## MaxRHBegin, MaxRHEnd, MinRHBegin, MinRHEnd +## which are relative to midnight of the day of gridsStartTime. +## data value for grid to be created +## (for hazards, value will be combined with other +## values specified for the grid) +## edit areas for the value: may be "all" to specify entire grid +## defaultValue (optional): If the default value for this grid +## is not zero, include it here. For example, "", or 1 for LAL +## "writableCopies": optional. List of tuples each representing a +## writable file copy that will be performed prior to running +## the product. The file is deleted after running the text +## formatter. The tuple consists of: +## fileName: file to be copied from BASE. The name can +## contain which will be replaced by the siteID. No +## file extension is needed. +## fileType: directory of source of file, relative to the +## the databases level, such as "TEXT/Combinations" +## destFileName: renaming of the original filename may be +## accomplished here. The "user" is determined by the +## "user" definition in the test script, and defaults to +## GFETEST. +## "fileChanges": optional. List of tuples each representing a file +## that should be changed before running the product. The tuple consists +## of: +## fileName: file to be changed. The name can contain +## which will be replaced by the siteID +## fileType: TEXT category, such as "TextUtility", +## type of change: can be +## "add": the string will be added to the file +## "replace": a given string will be replaced by another +## strings: If add, the string to be added. +## If replace, a tuple of original and replacement strings or +## a list of tuples for multiple replacements in the same file +## cleanup action: What to do when product is finished. Can be: +## "delete": Delete the file and revert to baseline version +## "undo": Undo the add or replace +## "leave": Leave altered file as is +## "publishGrids": optional. If 1, will publish grids before running product. +## "gridsStartTime": optional. Format YYYYMMDD_HHMM OR AbsTime. If present, will +## create and delete grids relative to this time. If not present, +## will use the "Today" select time range start time. +## "drtTime": optional. Format YYYYMMDD_HHMM OR AbsTime. If present, +## will run the product in this displaced real time. +## If not present, and gridsStartTime is present, +## will run the product using gridsStartTime as the displaced real time. +## "decodeVTEC": optional. If 1, will update the active table after running +## the product. +## "internalStrip": optional. If set to 0, will not strip multiple spaces from the +## result string before matching checkString. Default is 1. +## If set to 2, will try both the stripped and non-stripped +## fcstStr for a match. if at least one matches each +## checkstring, the test will succeed. Note: This will +## handle products with intermixed narrative and tabular +## formats such as the FWS. +## "clearHazardsTable": optional. If 1, clear out the HazardsTable. +## "vtecMode": optional. Can be set to "X", "E", "T", or "O" (-T/-E) + +## NOTE: by default, VTEC iTool Tests are run in operational mode i.e. -v O (and no -T option) +## (unless 'vtecMode" is specified) +## At some point, we should make sure they run in PRACTICE mode as well +## (for NGIT testing) + +## +# This is a base file that is not intended to be overridden. +## + +import sys, time, os, types, copy, inspect, errno +import LogStream +import AbsTime, TimeRange +import numpy, pickle +import imp + + +OUTPUT_DIR = "/tmp/products/autoTest" + +# Triggers can be: +# Message enums and executeMsg will have the message as its argument + +#Triggers = [AFPS.Message.PROCESS_STATUS] +HideTool = 0 +InitialActivation = 1 + +### If desired, Set up variables to be solicited from the user: +VariableList = [ + ("Test Script Name", [], "check", + [ + "CreateGrids", + "RoutineLevel1_TestScript", + "RoutineLevel1_Baseline_TestScript", + "RoutineLevel1_Region_TestScript ", + "RoutineLevel1_AFD_TestScript", + "RoutineLevel1_AFM_TestScript", + "RoutineLevel1_CCF_TestScript", + "RoutineLevel1_FWF_TestScript", + "RoutineLevel1_FWFTable_TestScript", + "RoutineLevel1_FWS_TestScript", + "RoutineLevel1_GLF_TestScript", + "RoutineLevel1_PFM_TestScript", + "RoutineLevel1_SAF_TestScript", + "RoutineLevel1_SFT_TestScript", + "RoutineLevel1_SRF_TestScript", + "RoutineLevel1_ZFP_TestScript", + "RoutineLevel2_1_TestScript", + "RoutineLevel3_1_TestScript", + "RoutineLevel3_2_TestScript", + "RoutineLevel3_3_TestScript -- Local Effects", + "RoutineLevel4_1_TestScript -- More Local Effects", + "RoutineLevel5_1_TestScript -- SnowAccum, PopWx", + "RoutineLevel5_2_TestScript -- Miscellaneous Product Tests", + "RoutineLevel5_3_TestScript -- Miscellaneous Phrase Tests", + "RoutineLevel5_4_TestScript -- Temp Phrases", + "SPW_1_TestScript -- Automatically generated tests for SPW", + "AllowedHazards_TestScript", + "ExpireTime_TestScript", + "ExpireAlg_TestScript", + " ", + "CivilEmerg_TestScript", + "Hazards_TestScript", + "Hazard_HLS_TestScript", + "Hazards_Overview_Options_TestScript", + "HazardsComplex1_TestScript -- CFW", + "VTEC_GHG_Complex1_TestScript -- WSW", + "VTEC_GHG_Complex2_TestScript", + "VTEC_GHG_Complex3_TestScript", + "VTEC_GHG_Complex4_TestScript", + "VTEC_GHG_Complex5_TestScript", + "VTEC_GHG_Complex6_TestScript", + "VTEC_GHG_WCN_TestScript", + "VTEC_GHG_FFA_TestScript", + "VTEC_GHG_GenHaz_TestScript", + "HeadlinesTiming_Watch_TestScript", + "HeadlinesTiming_Warn_TestScript", + "VTEC_EXP_NEW_TestScript", + "VTEC_CrossingYear_TestScript", + "VTEC_TestMode_TestScript", + "VTEC_EXTtoNOW_TestScript", + "VTEC_EXT_UPG_TestScript", + "VTEC_GHG_UPG_SplitETNs_TestScript", + "VTEC_ETN_RESET_Tmode_TestScript", + "VTEC_ETN_Reuse_TestScript", + "VTEC_Reset_Start_TestScript", + "MultipleTZ_TestScript", + "Headline_UPG_TestScript", + "HeadlineSort_TestScript", + "VTEC_Hazard_DR21021_TestScript", + ]), + ("Enter Test Script Name" , "", "alphaNumeric"), + ("Output File " , "/tmp/TestResults.txt", "alphaNumeric"), + ("Failure Limit ", 0, "numeric"), + ("Test Suite", "Run All Tests", "radio", ["Run All Tests", "Individual Tests"]), + ("Reporting Mode" , "Verbose", "radio", ["Verbose", "Moderate", "Brief", "Pretty"]), + ("Run\nSetupTextEA?" , "no", "radio", ["yes", "no"]), + ("Processor?" , "Local", "radio", ["Server", "Local"]), + ("Create Grids?" , "yes", "radio", ["yes", "no"]), + ("Leave File\nChanges?" , "no", "radio", ["yes", "no"]), + ] + +# Set up Class +import ISmartScript +import ProcessVariableList + +from com.raytheon.viz.gfe.textformatter import TextProductFinishWaiter, FormatterUtil, TextProductManager +from com.raytheon.viz.gfe.smarttool import TextFileUtil +from com.raytheon.viz.gfe.dialogs.formatterlauncher import ConfigData +ProductStateEnum = ConfigData.ProductStateEnum +from com.raytheon.uf.common.activetable import ActiveTableMode +from com.raytheon.uf.viz.core.notification.jobs import NotificationManagerJob +from com.raytheon.viz.gfe.textformatter.test import AutoTestVTECNotificationListener + +class ProcessInfo: + def __init__(self, entry, name, pid, script): + self.__entry = entry + self.__name = name + self.__pid = pid + self.__script = script + def entry(self): + return self.__entry + def name(self): + return self.__name + def pid(self): + return self.__pid + def script(self): + return self.__script + + +class ITool (ISmartScript.ISmartScript): + def __init__(self, dbss): + ISmartScript.ISmartScript.__init__(self, dbss) + self._dataMgr = dbss + self._process = None + + # Button 1 in ITool Dialog + def execute(self, varDict): + "Testing for Text Products" + self._failLimit = varDict["Failure Limit "] + self._outFile = open(varDict["Output File "], 'w') + self._reportingMode = varDict["Reporting Mode"] + if self._reportingMode not in ["Pretty"]: + self.output("Calling TextProductTest User Invoked", self._outFile) + self._timer = time.time() + self._testSuite = varDict["Test Suite"] + self._lastCreateGrids = None + self._createGridsRunTime = varDict["Create Grids?"] + self._leaveFileChanges = varDict["Leave File\nChanges?"] + processor = varDict["Processor?"] + setupTextEA = varDict["Run\nSetupTextEA?"] + if setupTextEA == "yes": + if self._reportingMode not in ["Pretty"]: + self.output("Calling setupTextEA", self._outFile) + import SetupTextEA + SetupTextEA.main() + scriptNames = varDict["Test Script Name"] + enterName = varDict["Enter Test Script Name"] + if enterName: + scriptNames.append(enterName) + if scriptNames == []: + return + self._testScript = [] + for scriptName in scriptNames: + scriptName = scriptName.split(" -- ")[0] + scriptName = scriptName.strip() + if len(scriptName) == 0: + continue + + if scriptName in sys.modules: + del sys.modules[scriptName] + exec("import " + scriptName) + exec("testScript = " + scriptName + ".testScript(self, self._dataMgr)") + self._testScript = self._testScript + testScript + self._process = None + self._passed = 0 + self._failures = 0 + self._scripts = 0 + self._scriptName = repr(scriptNames) + + # Let User Choose Individual Tests + if self._testSuite == "Individual Tests": + success = self._pareTestScript() + if success is None: + return + + self.__listener = AutoTestVTECNotificationListener() + NotificationManagerJob.addObserver('edex.alerts.vtec', self.__listener) + + # Run the test scripts + for index in range(len(self._testScript)): + self._runTestScript(index) + if self._failures > self._failLimit: + break + time.sleep(2) # avoid some race conditions with fileChanges + + NotificationManagerJob.removeObserver('edex.alerts.vtec', self.__listener) + self._finished() + + + # We will wait for it to finish before incrementing index and running the + # next script (see executeMsg below) + + def _pareTestScript(self): + newScript = [] + nameList = [] + for entry in self._testScript: + nameList.append(entry["name"]) + varList = [("Choose Tests", [], "check", nameList)] + processVarList = ProcessVariableList.ProcessVariableList( + "Choose Individual Tests", varList, varDict={}, parent=None) + self._selectionStatus = processVarList.status() + if not self._selectionStatus == "OK": + return None # User Cancelled + varDict = processVarList.varDict() + testList = varDict["Choose Tests"] + for entry in self._testScript: + if entry["name"] in testList: + newScript.append(entry) + self._testScript = newScript + return 1 + + def _runTestScript(self, index): + entry = self._testScript[index] + + # Set defaults + database, user, checkMethod, checkStrings = self._setDefaults() + # Process entry + name = entry["name"] + productType = entry["productType"] + database = entry.get("database", database) + user = entry.get("user", user) + # gridsStartTime + self._gridsStartTime = entry.get("gridsStartTime", None) + if self._gridsStartTime is None: + self._gridsStartTime = self.getTimeRange("Today").startTime() + else: + if not isinstance(self._gridsStartTime, AbsTime.AbsTime): + self._gridsStartTime = self.getAbsTime(self._gridsStartTime) + #print "gridsStartTime", self._gridsStartTime + # drtTime + drtTime = entry.get("drtTime") + if drtTime is None: + drtTime = entry.get("gridsStartTime", None) + #print "\n*********drtTime", drtTime + if drtTime is not None: + if isinstance(drtTime, AbsTime.AbsTime): + drtTime = self.getTimeStr(drtTime) + drtStr = drtTime + self._drtString = drtTime + else: + drtStr = "" + self._drtString = None + #print "drtStr", drtStr + + + self._clearHazardsTable(entry) + self._createCombinationsFile(entry) + self._deleteGrids(entry) + self._createGrids(entry) + self._makeWritableCopy(entry) + self._fileChanges(entry) + + cmdLineVars = self._getCmdLineVars(entry) + vtecMode = entry.get("vtecMode", None) + + if productType is None: + return + + database = database.replace("", self.getSiteID()) + + # Run the product + if self._reportingMode not in ["Pretty"]: + self.output("Running " + name, self._outFile) + message = "Running " + name + self.statusBarMsg(message, "R", category="GFE") + + # this way goes through java in separate threads, debugging doesn't work with it cause each + # thread has its own interpreter.... + # however, running the other way has issue with sampler caches not getting dumped between runs + waiter = TextProductFinishWaiter() + FormatterUtil.runFormatterScript(productType, vtecMode, database, cmdLineVars, "PRACTICE", drtTime, 0, waiter, self._dataMgr) + fcst = waiter.waitAndGetProduct() + state = waiter.getState() + +# import FormatterRunner +# try: +# fcst = FormatterRunner.runFormatter(databaseID=database, site="TBW", +# forecastList=[productType], cmdLineVarDict=cmdLineVars, +# vtecMode=vtecMode, vtecActiveTable='PRACTICE', drtTime=drtTime, +# username='GFETEST', dataMgr=self._dataMgr) +# except: +# fcst = '' +# LogStream.logProblem("Error generating product: " + LogStream.exc()) + + # write product to OUTPUT_DIR + + fname = name + ".txt" + path = os.path.join(OUTPUT_DIR, fname) + with open(path, 'w') as out: + out.write(fcst) + + self._doExecuteMsg(name, fcst, entry, drtTime, state) + + def _getCmdLineVars(self, entry): + cmdLineVars = entry.get("cmdLineVars", None) + if cmdLineVars is None: + return "" + # See if this is a dictionary + try: + exec("varDict = " + cmdLineVars) + except: + # Process as method + # Otherwise, get the varDict from calling the given method + callMethod = cmdLineVars + productType = entry["productType"] + if productType in sys.modules: + del sys.modules[productType] + module = __import__(productType) + exec("callMethod = module.TextProduct()." + callMethod) + definition = module.TextProduct().Definition + defVars = entry.get("callVariables", None) + if defVars is not None: + for key in list(defVars.keys()): + definition[key] = defVars[key] + varDict = callMethod(definition) + #for key in varDict.keys(): + # print "varDict['"+str(key)+"'] =", varDict[key] + cmdLineVars = repr(varDict) + if cmdLineVars is not None: + return cmdLineVars + else: + return "" + + + def _setDefaults(self): + database = self.getSiteID() + "_GRID__Fcst_00000000_0000" + user = "GFETEST" + return database, user, None, None + + def _clearHazardsTable(self, entry): + clearHazards = entry.get("clearHazardsTable", 0) + if clearHazards: + if self._reportingMode not in ["Pretty"]: + self.output("WARNING::Clearing Hazards Table", self._outFile) + self._dataMgr.getClient().clearVTECTable(ActiveTableMode.PRACTICE) + + def _createCombinationsFile(self, entry): + fn = entry.get("combinationsFileName", None) + combinations = entry.get("combinations", None) + if fn is None or combinations is None: + return + fn = fn.replace("", self.getSiteID()) + self.writeActiveComboFile(combinations, fn) + + def _createGrids(self, entry): + createGrids = entry.get("createGrids", None) + if createGrids is None: + return + if self._createGridsRunTime == "no": + return + # Check to see if the last time we created grids we used the same list + # If so, do not repeat + if self._lastCreateGrids == createGrids: + return + self._lastCreateGrids = createGrids + + wxKeys = [] + hazKeys = [] + gridsTR = TimeRange.TimeRange(self._gridsStartTime, self._gridsStartTime + 12 * 3600) + self._determineMaxMinBeginEnd(entry) + hazardGrid = None + createdGrids = {} + for gridEntry in createGrids: + if len(gridEntry) == 7: + model, elementName, elementType, startHour, endHour, value, editAreas = gridEntry + defValue = 0 + elif len(gridEntry) == 8: + model, elementName, elementType, startHour, endHour, value, editAreas, defValue = gridEntry + else: + #print "GridEntries: ", gridEntry + raise Exception("Improper # of Grid Entries") + startHour = self._translateHour(startHour) + endHour = self._translateHour(endHour) + timeRange = TimeRange.TimeRange(gridsTR.startTime() + startHour * 3600, + gridsTR.startTime() + endHour * 3600) + #self.output("element name, type " + elementName + " " + elementType, self._outFile) + #self.output("startHour, endHour " + `startHour` +" "+`endHour`, self._outFile) + #self.output(" timeRange "+`timeRange`, self._outFile) + # Get the grid we already created, if it exists + key = (model, elementName, startHour, endHour) + if key in createdGrids: + grid = createdGrids[key] + else: + grid = self.newGrid(defValue) + + if editAreas == "all": + mask = self.newGrid(True, bool) + else: + mask = self._makeMask(editAreas) + #self.output("mask "+`size(mask)`, self._outFile) + #self.output("grid "+`size(grid)`, self._outFile) + #self.output("value "+`value` , self._outFile) + if elementType == "DISCRETE": + #self._addHazard(elementName, timeRange, value, mask) + value = self.getIndex(value, hazKeys) + #self.output("setting value "+value+" "+hazKeys, self._outFile) + grid[mask] = value + grid = grid.astype('int8') + elementType = self.getDataType(elementName) + self.createGrid(model, elementName, elementType, (grid, hazKeys), timeRange) + elif elementType == "WEATHER": + if value == "NoWx": + value = "::::" + value = self.getIndex(value, wxKeys) + #self.output("setting value "+value+" "+wxKeys, self._outFile) + grid[mask] = value + grid = grid.astype('int8') + elementType = self.getDataType(elementName) + self.createGrid(model, elementName, elementType, (grid, wxKeys), timeRange) + elif elementType == "VECTOR": + grid[mask] = value[0] + dirGrid = self.empty() + dirGrid[mask] = self.textToDir(value[1]) + elementType = self.getDataType(elementName) + self.createGrid(model, elementName, elementType, (grid, dirGrid), timeRange) + else: + grid[mask] = value + elementType = self.getDataType(elementName) + self.createGrid(model, elementName, elementType, grid, timeRange) + # Save the grid in the createdGridDict + createdGrids[key] = grid + self.saveElements([elementName], model) + if entry.get("publishGrids", 0): + self.publishElements([elementName], timeRange) + #LogStream.logDebug("Created grid: ", key) + + def _makeWritableCopy(self, entry, user='GFETEST'): + writables = entry.get("writeableCopies", None) + if writables is None: + return + failed = 0 + for fileSrc, fileType, destFilename in writables: + source = fileSrc.replace("", self.getSiteID()) + dest = destFilename.replace("", self.getSiteID()) + #try: + if 1 == 1: + TextFileUtil.makeWritableCopy(source, fileType, + dest, False); + self.output("Made makeWritableCopy: " + source + ' ' + \ + fileType + ' ' + dest, self._outFile) + #except: + else: + failed = failed + 1 + self.output("failed makeWritableCopy: " + source + ' ' + \ + fileType + ' ' + dest, self._outFile) + + if failed == 0: + if self._reportingMode not in ["Pretty"]: + self.output("All Writable Copies successful", self._outFile) + + def _fileChanges(self, entry): + fileChanges = entry.get("fileChanges", None) + if not fileChanges: + return False + + from LockingFile import File + + failed = 0 + for fileName, fileType, changeType, strings, cleanUp in fileChanges: + fileName = fileName.replace("", self.getSiteID()) + # Get the file + lf = TextFileUtil.getTextFile(fileName, fileType) + if lf.getName().endswith(".py"): + if fileName in sys.modules: + del sys.modules[fileName] + try: + with File(lf.getFile(), '', 'r') as pythonFile: + text = pythonFile.read() + except: + failed = 1 + print("FILE CHANGES failed reading from " + str(lf)) + raise + #self.output("FILE CHANGES (initial) from " +str(lf) + "\n" + text, self._outFile) #DEBUG + + # Modify it + if changeType == "add": + text = text + strings + elif changeType == "replace": + # strings may be a tuple (orig, repl) or + # a list of tuples for multiple changes to the same file + if type(strings) == tuple: + strings = [strings] + for orig, repl in strings: + strIndex = text.find(orig) + text = text.replace(orig, repl) + + #self.output("FILE CHANGES (chg): " + orig + ' ' + repl, self._outFile) #DEBUG + #self.output("FILE CHANGES (mod): " + text, self._outFile) #DEBUG + + if strIndex < 0: + self.output("File change failed for " + orig, + self._outFile) + failed = 1 + # Write it + destLf = TextFileUtil.getUserTextFile(lf) + try: + with File(destLf.getFile(), '', 'w') as pythonFile: + pythonFile.write(text) + + destLf.save() + + except: + failed = 1 + print("FILE CHANGES failed writing to " + str(destLf)) + raise + #self.output("FILE CHANGES (saved) to " + str(destLf) + "\n" + text, self._outFile) #DEBUG + + if len(fileChanges) and not failed: + if self._reportingMode not in ["Pretty"]: + self.output("All File Changes successful", self._outFile) + return True + + def _determineMaxMinBeginEnd(self, entry): + # Determine MaxT MinT MaxRH MinRH begin and end times + # relative to gridsStartTime + localtime = time.localtime(self._gridsStartTime.unixTime()) + localHour = localtime[3] + if localtime[8]: # daylight + maxBegin = 8 + else: + maxBegin = 7 + self._MaxTBegin = maxBegin - localHour # MaxT begins at 7 am standard time + self._MaxTEnd = self._MaxTBegin + 13 + self._MinTBegin = self._MaxTBegin + 12 + self._MinTEnd = self._MaxTBegin + 12 + 14 + + self._MinRHBegin = maxBegin - 4 - localHour # MinRH begins at 3 am standard time + self._MinRHEnd = self._MinRHBegin + 18 + self._MaxRHBegin = self._MinRHBegin + 12 + self._MaxRHEnd = self._MinRHBegin + 12 + 18 + + def _translateHour(self, hour): + if type(hour) is not bytes: + return hour + # Suppose hour == "MaxTBegin + 24" and self._MaxTBegin == 1 + for tStr in ["MaxTBegin", "MaxTEnd", "MinTBegin", "MinTEnd", + "MaxRHBegin", "MaxRHEnd", "MinRHBegin", "MinRHEnd"]: + if hour.find(tStr) >= 0: + exec("tHour = self._" + tStr) # tHour = self._MaxTBegin + hour = hour.replace(tStr, repr(tHour)) # hour == "1 + 24" + exec("newHour = " + hour) + return newHour + + def _deleteGrids(self, entry): + deleteGrids = entry.get("deleteGrids", None) + if deleteGrids is None or deleteGrids == []: + return + self._lastCreateGrids = [] #clear it after deleting grids + for gridEntry in deleteGrids: + model, elementName, level, startHour, endHour = gridEntry + if startHour == "all" or endHour == "all": + timeRange = TimeRange.allTimes() + else: + gridsTR = TimeRange.TimeRange(self._gridsStartTime, self._gridsStartTime + 12 * 3600) + timeRange = TimeRange.TimeRange(gridsTR.startTime() + startHour * 3600, + gridsTR.startTime() + endHour * 3600) + self.deleteGrid(model, elementName, level, timeRange) + self.saveElements([elementName], model) + if entry.get("publishGrids", 0): + self.publishElements([elementName], timeRange) + + # Required if Message is a trigger + def executeMsg(self, msg): + if self._process is None: + return + status = msg.status() #list of messages + for msg in status: + if msg.status() == AFPS.ProcessStatus.FINISHED: + self._doExecuteMsg(msg) #call for each possible message + + # Performs the processing + def _doExecuteMsg(self, name, fcst, entry, drtTime, state): + if self._reportingMode not in ["Pretty"]: + self.output("Calling TextProductTest Message Invoked " + repr(entry), self._outFile) + + checkMethod = entry.get("checkMethod", None) + checkStrings = entry.get("checkStrings", None) + notCheckStrings = entry.get("notCheckStrings", None) + orderStrings = entry.get("orderStrings", None) + internalStrip = entry.get("internalStrip", 1) + commentary = entry.get("commentary", None) + + if True: + # Clean up fileChanges + self._cleanUpFiles(entry) + self._cleanUpWritableCopies(entry) + + self.output("\n----------------------------------------------", self._outFile) + + if self._reportingMode not in ["Pretty"]: + self.output(name + " (Elapsed time:" + self._getElapsedTimeStr() + ")", + self._outFile) + else: + self.output(name, self._outFile) + if commentary is not None: + self.output(commentary + "\n", self._outFile) + self._scripts += 1 + + if state.equals(ProductStateEnum.Failed): + self.output("Formatter failed!", self._outFile) + success = False + + else: + # Look at results + # If any of the check fails, the test fails + check1 = 1 + check2 = 1 + check3 = 1 + if checkMethod is not None: + check1 = checkMethod(fcst) + if self._reportingMode not in ["Pretty"]: + if not check1: + failMsg = "CHECK METHOD FAILED:" + name + self.output(failMsg, self._outFile) + else: + self.output("CHECK METHOD PASSED: " + name, self._outFile) + + # Prepare results for string searches + if fcst is not None: + fcstStr = fcst.replace("\n", " ") + fcstStrRaw = fcstStr + if internalStrip: + fcstStr = self.internalStrip(fcstStr) + fcstStr = fcstStr.replace("... ", "...") + fcstStrRaw = fcstStrRaw.replace("... ", "...") + + if checkStrings is not None: + check2 = self._checkStrs(name, fcst, checkStrings, + orderStrings, fcstStr, fcstStrRaw, internalStrip) + if check2: + if self._reportingMode not in ["Pretty"]: + self.output("STRING SEARCHES PASSED ", self._outFile) + + if notCheckStrings is not None: + check3 = self._checkStrs(name, fcst, notCheckStrings, 0, + fcstStr, fcstStrRaw, internalStrip, checkMode=0) + if check3: + if self._reportingMode not in ["Pretty"]: + self.output("'NOT' STRING SEARCHES PASSED ", self._outFile) + + success = check1 and check2 and check3 + + if success: + self._passed += 1 + logmsg = name + " Passed" + self.statusBarMsg(logmsg, "R", category="ISC") + else: + self._failures += 1 + logmsg = name + " Failed" + self.statusBarMsg(logmsg, "A", category="ISC") + self.output(logmsg, self._outFile) + + if self._failures > self._failLimit: + self._cleanUp(entry, drtTime) + # Stop processing + return + if self._reportingMode in ["Verbose", "Moderate"]: + if fcst and success: # checkStrings failure will print out the product already + self.output("\n" + fcst, self._outFile) + # DecodeVTEC if requested + # Note for later: if in practice mode, set active + # table to runVTECDecoder("PRACTICE", fcst) + if success and entry.get("decodeVTEC", 0): + self.__runVTECDecoder(fcst, drtTime) + + + # wait until table has been modified or 5 seconds + t1 = time.time(); + while not self.__listener.isReceivedNotification(): + time.sleep(0.1) + if time.time() - t1 > 20: + self.output("Vtec Decoder timed out!", self._outFile) + break + t2 = time.time(); + if self._reportingMode in ["Verbose", "Moderate"]: + self.output("Vtec Decoder wait time: " + "%6.2f" % (t2-t1), + self._outFile) + + self._cleanUp(entry, drtTime) + + def _cleanUp(self, entry, drtTime): + if drtTime is not None: + import offsetTime + offsetTime.reset() + imp.reload(offsetTime) + +# fileChanges = entry.get("fileChanges", []) +# for fileName, fileType, changeType, strings, cleanUp in fileChanges: +# fileName = fileName.replace("", self.getSiteID()) +# reload(sys.modules[fileName]) +# productType = entry['productType'] +# if sys.modules.has_key(productType): +# del sys.modules[productType] + + def _cleanUpWritableCopies(self, entry, user="GFETEST"): + writables = entry.get("writeableCopies", None) + if writables is None: + return + for fileSrc, fileType, destFilename in writables: + source = fileSrc.replace("", self.getSiteID()) + dest = destFilename.replace("", self.getSiteID()) + #try: + if 1 == 1: + TextFileUtil.makeWritableCopy(source, fileType, + dest, True); + #except: + else: + pass + self.output("Cleanup writable copies: " + fileSrc, self._outFile) + + def _cleanUpFiles(self, entry): + fileChanges = entry.get("fileChanges", []) + for fileName, fileType, changeType, strings, cleanUp in fileChanges: + fileName = fileName.replace("", self.getSiteID()) + textFileID = TextFileUtil.getTextFile(fileName, fileType) + if self._leaveFileChanges == "no": + if cleanUp in ["delete", "undo"]: + # File changes are made as overrides at the GFETEST user level + # We just remove these files to restore the previous file + destLf = TextFileUtil.getUserTextFile(textFileID) + TextFileUtil.deleteTextFile(destLf) + + def _checkStrs(self, name, fcst, checkStrings, orderStrings, fcstStr, + fcstStrRaw, internalStrip, checkMode=1): + # Check the fcstStr for the list of checkStrings + # If a checkString is a tuple, at least one of the + # given tuple strings must be found in the fcstStr (or fcstStrRaw) + # If orderStrings == 1, the strings must occur in order + # in the fcstStr + # If checkMode == 0, the strings should NOT be found in the fcstStr + # If internalStrip == 2, check both the fcstStr, and fcstStrRaw + # versions. If at least one succeeds, the checkString succeeds. + curIndex = -1 + for cStr in checkStrings: + if type(cStr) == tuple: + # Will pass if ANY of these strings are found + + # Not valid with checkMode of zero + if not checkMode: + continue + + found = 0 + for subStr in cStr: + found, strIndex, strIndexFlag = self._checkStr( + subStr, fcstStr, fcstStrRaw, internalStrip) + if found: + if self._reportingMode in ["Verbose"]: + self.output("StringCHECK: " + subStr + ' ' + repr(strIndex), self._outFile) + elif self._reportingMode in ["Pretty"]: + self.output("CHECK String: " + subStr, self._outFile) + break + if not found: + self._failed(subStr, name, fcst, fcstStr, checkMode) + return 0 + else: + # Must find exact string + found, strIndex, strIndexFlag = self._checkStr(cStr, fcstStr, fcstStrRaw, internalStrip) + if self._reportingMode in ["Verbose"]: + self.output("StringCHECK: " + cStr + ' ' + repr(strIndex), self._outFile) + elif self._reportingMode in ["Pretty"]: + self.output("CHECK String: " + cStr, self._outFile) + if strIndex == -1: + # Handle special case of SHOWERS/RAIN SHOWERS + if cStr.find("showers") >= 0: + cStr = cStr.replace("showers", "rain showers") + found, strIndex, strIndexFlag = self._checkStr( + cStr, fcstStr, fcstStrRaw, internalStrip) + if cStr.find("Showers") >= 0: + cStr = cStr.replace("Showers", "Rain showers") + found, strIndex, strIndexFlag = self._checkStr( + cStr, fcstStr, fcstStrRaw, internalStrip) + if strIndex < 0: + if checkMode: + self._failed(cStr, name, fcst, fcstStr, checkMode) + return 0 + else: + if not checkMode: + self._failed(cStr, name, fcst, fcstStr, checkMode) + return 0 + elif checkMode == 0: #not check strings, and not check mode + self._failed(cStr, name, fcst, fcstStr, checkMode) + return 0 + + # Check the ordering + if orderStrings: + if strIndexFlag == "raw": + fcstStrRaw = fcstStrRaw[strIndex:] + else: + fcstStr = fcstStr[strIndex:] + return 1 + + def _checkStr(self, subStr, fcstStr, fcstStrRaw, internalStrip): + # Check for the given string. + # If internalStrip is 2, check both the raw and stripped fcstStrs. + found = 0 + strIndexFlag = "regular" + strIndex = fcstStr.find(subStr) + if strIndex >= 0: + found = 1 + elif internalStrip == 2: + # Try the raw fcstStr + strIndex = fcstStrRaw.find(subStr) + if strIndex >= 0: + found = 1 + strIndexFlag = "raw" + return found, strIndex, strIndexFlag + + def _failed(self, str, name, fcst, fcstStr, checkMode): + failMsg = "STRING SEARCH FAILED: " + name + if checkMode: + errorStr = "Cannot find: " + else: + errorStr = "Found and should not have found: " + failMsg = failMsg + " " + errorStr + repr(str) + failMsg = failMsg + " Product Result \n" + fcst + self.output(failMsg, self._outFile) + + def _finished(self): + message = "TESTING COMPLETE " + \ + self._scriptName + " \n" + \ + self._getElapsedTimeStr() + \ + "\n" + repr(self._scripts) + " SCRIPTS RUN.\n" + \ + repr(self._passed) + " TESTS PASSED.\n" + \ + repr(self._failures) + " TESTS FAILED." + + self.output(message, self._outFile) + self._outFile.close() + + #send user alert message to GFEs + if self._failures: + status = "U" + else: + status = "S" + self.statusBarMsg(message, status, category="GFE") + + def _getElapsedTimeStr(self): + eTime = (time.time() - self._timer) / 60.0 + return "%4.2f" % eTime + " minutes" + + # Optional -- Invoked by Button 2 in ITool Dialog + def cleanUp(self): + self.output("Calling MyTool CleanUp method", self._outFile) + # Can be called at any time to abort the script + self._testScript = [] + self._outFile.close() + + def __runVTECDecoder(self, fcst, drtString=None): + import tempfile, urllib.parse + from com.raytheon.uf.viz.core import VizApp + + with tempfile.NamedTemporaryFile(mode='w', prefix="autoTestProd", delete=False) as file: + file.write(fcst) + + url = urllib.parse.urlparse(VizApp.getHttpServer()) + commandString = "VTECDecoder -f " + file.name + " -d -g -a practice -h " + url.hostname + if drtString is not None: + commandString += " -z " + drtString + + expectedPil = fcst.split("\n",2)[1] + self.__listener.resetListener(expectedPil) + os.system(commandString) + + +def main(): + os.environ["TZ"] = 'EST5EDT' + time.tzset() + import _strptime + _strptime._cache_lock.acquire() + _strptime._TimeRE_cache = _strptime.TimeRE() + _strptime._regex_cache = {} + _strptime._cache_lock.release() + + from java.lang import System + System.setProperty('user.name', 'GFETEST') + + from com.raytheon.uf.viz.core.localization import LocalizationManager + from com.raytheon.uf.common.localization import LocalizationContext + LocalizationLevel = LocalizationContext.LocalizationLevel + LocalizationManager.registerContextName(LocalizationLevel.USER, 'GFETEST'); + + import loadConfig + loadConfig.loadPreferences("gfeConfig") + + from com.raytheon.viz.gfe.core import DataManager + dm = DataManager.getInstance(None) + import IToolInterface + # have to add the user dir to the python path here since we just switched users + # TODO look into switching it from the java + from com.raytheon.uf.common.dataplugin.gfe.python import GfePyIncludeUtil + + for s in sys.path: + if 'textUtilities' in s \ + or 'textProducts' in s \ + or 'combinations' in s: + sys.path.remove(s) + + for s in str(GfePyIncludeUtil.getHeadlineIncludePath()).split(':'): + sys.path.append(s) + + for s in str(GfePyIncludeUtil.getTextUtilitiesIncludePath()).split(':'): + sys.path.append(s) + + for s in str(GfePyIncludeUtil.getTextProductsIncludePath()).split(':'): + sys.path.append(s) + + for s in str(GfePyIncludeUtil.getCombinationsIncludePath()).split(':'): + sys.path.append(s) + + # create output directory for products + try: + os.makedirs(OUTPUT_DIR) + except OSError as e: + if e.errno != errno.EEXIST: + self.output("%s: '%s'" % (e.strerror,e.filename)) + + scriptDir = GfePyIncludeUtil.getIToolIncludePath() + runner = IToolInterface.IToolInterface(scriptDir) + runner.instantiate('TextProductTest', 'ITool', dbss=dm) + processVariableList = ProcessVariableList.ProcessVariableList('TextProductTest', VariableList) + varDict = processVariableList.varDict() + if varDict is None or len(varDict) == 0: + return + runner.runITool('TextProductTest', 'ITool', 'execute', varDict=varDict) + + +if __name__ == "__main__": + main() diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Align_Grids.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Align_Grids.py index c3440e4e1b..3c90f2544a 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Align_Grids.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Align_Grids.py @@ -1,258 +1,258 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Align_Grids +# +# Author: Paul Jendrowski +# Version: 1.0 - 11/08/2004 +# +# Description: +# This procedure will align the time ranges of selected +# weather elements with the time range of another element. +# For instance, this will redo the PoP grid to match +# the timeranges of individual Wx grids. Note that WindGust +# is always forced to match the Wind grid. +# +# If a grid does not exist, then it will be created from scratch. +# If the grid to align does exist, the max value of all original +# grids that are within the time range of the source grid is +# used for PoP, Sky, WindGust. Rate based parameters +# (QPF, SnowAmt, etc.) are fragmented then summed. +# +# You can add whatever elements you wish to VariableList. However, +# you must add an appropriate entry in the __init__ method +# to defing if the variable needs to be fragmented (i.e., rate +# based parameters like QPF must be fragmented to sum correctly), +# split (generally all non-rate based should be split before +# being recombined into the aligned grids). _methdodDict +# defines how to recompute the value for the aligned grid from +# the original grid. This value is any value that can be used +# as the mode argument to SmartScript.getGrids method. In addition, +# you can also use "Last" to get the last grid or "MaxTime" which +# will select the grid that has the highest percentage time in the +# period (if a tie for highest percentage, the first is used). +# method is generally for Wx/discrete grids that cannot be +# averaged or summed. Finally, the execute method needs to be +# edited to define time ranges to use for each new element. +# +# Installation: +# Install this file as a Procedure. This procedure also requires +# getGridsTool and MakeTmpGrid SmartTools to be installed. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Edit"] + +Supported_elements=["Wx", "PoP", "Sky", "QPF","SnowAmt","IceAccum","Wind","WindGust"] +VariableList = [ + ("Select Time Range:","All", "radio", + ["Selected Time", "All"]), + ("Source Grid:" , "Wx", "radio",["Wx", "PoP", "QPF"]), + ("Aligned Grids:" , [], "check",Supported_elements), + ] + +import SmartScript +import TimeRange + +import time + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + # These are the elements that need to be split before rederiving the + # values in the newly aligned grids. + self._splitElements = ["Wx", "PoP", "Sky","Wind", "WindGust"] + + # These are the elements that need to be fragmented before + # rederiving the values in the newly aligned grids. + self._fragmentElements = ["QPF", "SnowAmt","IceAccum"] + + # Method to use the populate values in the aligned grid from the + # original values. + #**** There must be a key for every element in the "Aligned Grids:" + #**** VariableList entry!!! + # This value is any value except "List" that can be used + # as the mode argument to SmartScript.getGrids method. In addition, + # you can also use "Last" to get the last grid or "MaxTime" which + # will select the grid that has the highest percentage time in the + # period (if a tie for highest percentage, the first is used). + # method is generally for Wx/discrete grids that cannot be + # averaged or summed. + self._methodDict = { + "Wx" : "MaxTime", + "PoP" : "Max", + "Sky" : "Max", + "WindGust" : "Max", + "Wind" : "TimeWtAverage", + "QPF" : "Sum", + "SnowAmt" : "Sum", + "IceAccum" : "Sum", + } + + def execute(self, editArea, timeRange, varDict): + # First make a list of grids to make from the selection + # Get time range from varDict + period = varDict["Select Time Range:"] + srcGrid = varDict["Source Grid:"] + gridsToMake = [] + + doWindGust = 0 + for elem in varDict["Aligned Grids:"]: + if elem == "WindGust": + doWindGust = 1 + elif elem != srcGrid: + gridsToMake.append(elem) + + # Get current Greenwich Mean Time (GMT) + GMT = time.gmtime(time.time()) + + # Get current hour from GMT + gmthour = GMT[3] + #print "GMT hour=", gmthour + + # Create the timeranges over which to create the grids from scratch + # (timerange varies by element and initial period) + timeRangeDict = {} + srcTR = self._getGridTimeRange("Fcst", srcGrid, "SFC", timeRange) + #print "str=",timeRange, "SrcTR=",srcTR + +#*** If and element is added to VariableList, you must set up the timeRangeDict +#*** for that element in the following if-else block + + if period == "Selected Time": + # Set time range of grids that are actually there + for elem in Supported_elements: + timeRangeDict[elem] = srcTR + timeRangeDict["WindGust"] = self._getGridTimeRange("Fcst", "Wind", "SFC", timeRange) +# print "Selected tr=",timeRange + else: # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Align_Grids -# -# Author: Paul Jendrowski -# Version: 1.0 - 11/08/2004 -# -# Description: -# This procedure will align the time ranges of selected -# weather elements with the time range of another element. -# For instance, this will redo the PoP grid to match -# the timeranges of individual Wx grids. Note that WindGust -# is always forced to match the Wind grid. -# -# If a grid does not exist, then it will be created from scratch. -# If the grid to align does exist, the max value of all original -# grids that are within the time range of the source grid is -# used for PoP, Sky, WindGust. Rate based parameters -# (QPF, SnowAmt, etc.) are fragmented then summed. -# -# You can add whatever elements you wish to VariableList. However, -# you must add an appropriate entry in the __init__ method -# to defing if the variable needs to be fragmented (i.e., rate -# based parameters like QPF must be fragmented to sum correctly), -# split (generally all non-rate based should be split before -# being recombined into the aligned grids). _methdodDict -# defines how to recompute the value for the aligned grid from -# the original grid. This value is any value that can be used -# as the mode argument to SmartScript.getGrids method. In addition, -# you can also use "Last" to get the last grid or "MaxTime" which -# will select the grid that has the highest percentage time in the -# period (if a tie for highest percentage, the first is used). -# method is generally for Wx/discrete grids that cannot be -# averaged or summed. Finally, the execute method needs to be -# edited to define time ranges to use for each new element. -# -# Installation: -# Install this file as a Procedure. This procedure also requires -# getGridsTool and MakeTmpGrid SmartTools to be installed. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Edit"] - -Supported_elements=["Wx", "PoP", "Sky", "QPF","SnowAmt","IceAccum","Wind","WindGust"] -VariableList = [ - ("Select Time Range:","All", "radio", - ["Selected Time", "All"]), - ("Source Grid:" , "Wx", "radio",["Wx", "PoP", "QPF"]), - ("Aligned Grids:" , [], "check",Supported_elements), - ] - -import SmartScript -import TimeRange - -import time - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - # These are the elements that need to be split before rederiving the - # values in the newly aligned grids. - self._splitElements = ["Wx", "PoP", "Sky","Wind", "WindGust"] - - # These are the elements that need to be fragmented before - # rederiving the values in the newly aligned grids. - self._fragmentElements = ["QPF", "SnowAmt","IceAccum"] - - # Method to use the populate values in the aligned grid from the - # original values. - #**** There must be a key for every element in the "Aligned Grids:" - #**** VariableList entry!!! - # This value is any value except "List" that can be used - # as the mode argument to SmartScript.getGrids method. In addition, - # you can also use "Last" to get the last grid or "MaxTime" which - # will select the grid that has the highest percentage time in the - # period (if a tie for highest percentage, the first is used). - # method is generally for Wx/discrete grids that cannot be - # averaged or summed. - self._methodDict = { - "Wx" : "MaxTime", - "PoP" : "Max", - "Sky" : "Max", - "WindGust" : "Max", - "Wind" : "TimeWtAverage", - "QPF" : "Sum", - "SnowAmt" : "Sum", - "IceAccum" : "Sum", - } - - def execute(self, editArea, timeRange, varDict): - # First make a list of grids to make from the selection - # Get time range from varDict - period = varDict["Select Time Range:"] - srcGrid = varDict["Source Grid:"] - gridsToMake = [] - - doWindGust = 0 - for elem in varDict["Aligned Grids:"]: - if elem == "WindGust": - doWindGust = 1 - elif elem != srcGrid: - gridsToMake.append(elem) - - # Get current Greenwich Mean Time (GMT) - GMT = time.gmtime(time.time()) - - # Get current hour from GMT - gmthour = GMT[3] - #print "GMT hour=", gmthour - - # Create the timeranges over which to create the grids from scratch - # (timerange varies by element and initial period) - timeRangeDict = {} - srcTR = self._getGridTimeRange("Fcst", srcGrid, "SFC", timeRange) - #print "str=",timeRange, "SrcTR=",srcTR - -#*** If and element is added to VariableList, you must set up the timeRangeDict -#*** for that element in the following if-else block - - if period == "Selected Time": - # Set time range of grids that are actually there - for elem in Supported_elements: - timeRangeDict[elem] = srcTR - timeRangeDict["WindGust"] = self._getGridTimeRange("Fcst", "Wind", "SFC", timeRange) -# print "Selected tr=",timeRange - else: -# -# Here is where you can adjust the ending times of the preset time ranges used by the -# "All" option. To change these, simply modify the ending time of the following calls -# to createTimeRange to meet your local needs. -# - # First set a reasonable time range then determine which grids are - # actually there. The start time is the current hour in GMT time. - # Set up elements for full time range - tr = self.createTimeRange(gmthour, 192, "Zulu") - - #for elem in ["Wx", "PoP", "Sky", "QPF","SnowAmt","IceAccum","Wind","WindGust"]: - for elem in Supported_elements: - timeRangeDict[elem] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) - - # Override the default time ranges explicitly for selected elements - tr = self.createTimeRange(gmthour, 60, "Zulu") - timeRangeDict["SnowAmt"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) - tr = self.createTimeRange(gmthour, 60, "Zulu") - timeRangeDict["IceAccum"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) - - tr = self.createTimeRange(gmthour, 96, "Zulu") - timeRangeDict["QPF"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) - tr = self.createTimeRange(gmthour, 96, "Zulu") - timeRangeDict["Wind"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) - tr = self.createTimeRange(gmthour, 96, "Zulu") - timeRangeDict["WindGust"] = self._getGridTimeRange("Fcst", "Wind", "SFC", tr) - - - # Create temporary grid of each element to be aligned then remake - # the grid from scratch with the time range from the source grid - for elem in gridsToMake: - #print "_alignGrid",elem,srcGrid,timeRangeDict[elem] - self._alignGrid(elem,srcGrid,timeRangeDict[elem]) - if doWindGust: - elem = "WindGust" - #print "_alignGrid",elem,srcGrid,timeRangeDict[elem] - self._alignGrid(elem,"Wind",timeRangeDict[elem]) - - def _alignGrid(self,elem,srcGrid,timeRange): - tmpName = "tmp" + elem - varDict = {} - varDict["Model"] = "Fcst" - varDict["gridName"] = tmpName - madeTmp=0 - # First check if any old grids exist - elemGridInfo = self.getGridInfo("Fcst", elem, "SFC", timeRange) - #print elem, len(elemGridInfo) - if len(elemGridInfo) > 0: - madeTmp = 1 - rslt = self.callSmartTool("MakeTmpGrid", elem, varDict=varDict, - timeRange=timeRange) - - srcGridInfo = self.getGridInfo("Fcst", srcGrid, "SFC", timeRange) - # Step through each of the source grids and create a scratch - # grid with the same time range as the source grid. - for info in srcGridInfo: - #print info.gridTime() - self.createFromScratchCmd([elem], info.gridTime(), 0, 0) - # Check if the temporary version of the element needs - # to be split. The temporary grid will be used to recompute - # the values in the scratch grids - if madeTmp and elem in self._splitElements: - if len(self.getGridInfo("Fcst", tmpName, "SFC", info.gridTime())) > 0: - self.splitCmd([tmpName], info.gridTime()) - if madeTmp > 0: - varDict["Element"] = tmpName - if elem in self._fragmentElements: - self.fragmentCmd([tmpName], timeRange) - - if self._methodDict.has_key(elem): - varDict["Mode"] = self._methodDict[elem] - self.callSmartTool("getGridsTool", elem, varDict=varDict, - timeRange=timeRange) - # Delete the temp grids - #try: - # self.deleteObject(tmpName, "FcstGrid") - #except: - # pass - try: - self.unloadWE("Fcst", tmpName, "SFC") - except: - pass - - def _getGridTimeRange(self, model,parm,level,timeRange): - # Returns a timeRange covering any grids that intersect input timeRange - # Returns a timeRange with duration less than 3600 if no grids found - info = self.getGridInfo(model, parm, level, timeRange) - if info != []: - st = info[0].gridTime().startTime() - et = info[len(info) - 1].gridTime().endTime() - tr = TimeRange.TimeRange(st,et) - else: - tr = TimeRange.TimeRange(timeRange.startTime(),timeRange.startTime() + 1) - return tr - -## Error Handling -## Call self.abort(errorString) to stop execution of your script and -## display a message to the user. -## For example: -## if x > 1000: -## self.abort("x is too large") -## +# Here is where you can adjust the ending times of the preset time ranges used by the +# "All" option. To change these, simply modify the ending time of the following calls +# to createTimeRange to meet your local needs. +# + # First set a reasonable time range then determine which grids are + # actually there. The start time is the current hour in GMT time. + # Set up elements for full time range + tr = self.createTimeRange(gmthour, 192, "Zulu") + + #for elem in ["Wx", "PoP", "Sky", "QPF","SnowAmt","IceAccum","Wind","WindGust"]: + for elem in Supported_elements: + timeRangeDict[elem] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) + + # Override the default time ranges explicitly for selected elements + tr = self.createTimeRange(gmthour, 60, "Zulu") + timeRangeDict["SnowAmt"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) + tr = self.createTimeRange(gmthour, 60, "Zulu") + timeRangeDict["IceAccum"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) + + tr = self.createTimeRange(gmthour, 96, "Zulu") + timeRangeDict["QPF"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) + tr = self.createTimeRange(gmthour, 96, "Zulu") + timeRangeDict["Wind"] = self._getGridTimeRange("Fcst", srcGrid, "SFC", tr) + tr = self.createTimeRange(gmthour, 96, "Zulu") + timeRangeDict["WindGust"] = self._getGridTimeRange("Fcst", "Wind", "SFC", tr) + + + # Create temporary grid of each element to be aligned then remake + # the grid from scratch with the time range from the source grid + for elem in gridsToMake: + #print "_alignGrid",elem,srcGrid,timeRangeDict[elem] + self._alignGrid(elem,srcGrid,timeRangeDict[elem]) + if doWindGust: + elem = "WindGust" + #print "_alignGrid",elem,srcGrid,timeRangeDict[elem] + self._alignGrid(elem,"Wind",timeRangeDict[elem]) + + def _alignGrid(self,elem,srcGrid,timeRange): + tmpName = "tmp" + elem + varDict = {} + varDict["Model"] = "Fcst" + varDict["gridName"] = tmpName + madeTmp=0 + # First check if any old grids exist + elemGridInfo = self.getGridInfo("Fcst", elem, "SFC", timeRange) + #print elem, len(elemGridInfo) + if len(elemGridInfo) > 0: + madeTmp = 1 + rslt = self.callSmartTool("MakeTmpGrid", elem, varDict=varDict, + timeRange=timeRange) + + srcGridInfo = self.getGridInfo("Fcst", srcGrid, "SFC", timeRange) + # Step through each of the source grids and create a scratch + # grid with the same time range as the source grid. + for info in srcGridInfo: + #print info.gridTime() + self.createFromScratchCmd([elem], info.gridTime(), 0, 0) + # Check if the temporary version of the element needs + # to be split. The temporary grid will be used to recompute + # the values in the scratch grids + if madeTmp and elem in self._splitElements: + if len(self.getGridInfo("Fcst", tmpName, "SFC", info.gridTime())) > 0: + self.splitCmd([tmpName], info.gridTime()) + if madeTmp > 0: + varDict["Element"] = tmpName + if elem in self._fragmentElements: + self.fragmentCmd([tmpName], timeRange) + + if elem in self._methodDict: + varDict["Mode"] = self._methodDict[elem] + self.callSmartTool("getGridsTool", elem, varDict=varDict, + timeRange=timeRange) + # Delete the temp grids + #try: + # self.deleteObject(tmpName, "FcstGrid") + #except: + # pass + try: + self.unloadWE("Fcst", tmpName, "SFC") + except: + pass + + def _getGridTimeRange(self, model,parm,level,timeRange): + # Returns a timeRange covering any grids that intersect input timeRange + # Returns a timeRange with duration less than 3600 if no grids found + info = self.getGridInfo(model, parm, level, timeRange) + if info != []: + st = info[0].gridTime().startTime() + et = info[len(info) - 1].gridTime().endTime() + tr = TimeRange.TimeRange(st,et) + else: + tr = TimeRange.TimeRange(timeRange.startTime(),timeRange.startTime() + 1) + return tr + +## Error Handling +## Call self.abort(errorString) to stop execution of your script and +## display a message to the user. +## For example: +## if x > 1000: +## self.abort("x is too large") +## diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerify.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerify.py index 551abdf21a..5a4213bcfc 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerify.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerify.py @@ -1,8057 +1,8057 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# BOIVerify - version 2.0.5 -# -# Main tool to calculate and display verification information. The main -# dialog contains tabs to display: -# (1) the archived grids (or errors) -# (2) grids of stats calculated for every gridpoint -# (3) histograms of the errors over the current edit area -# (4) line graphs of stats over pre-defined edit areas -# (5) graphs of stats vs. scale -# -# The BOIVerifySave tool saves the grid into the verification database -# The BOIVerifyAutoCalc tool calculates and saves the stats for pre-defined -# editareas. -# -# Author: Tim Barker - SOO BOI -# 2005/07/01 - Original Implmentation -# 2005/07/29 - version 0.1 - update to grid database structure -# 2006/11/06 - version 1.0 - First version with time-series graphs. Still -# lots of bugs and not what I would like for a version 1.0 but -# I've put it off way too long anyway. -# 2007/10/25 - version 2.0 -# . moved into a procedure rather than a tool -# . fixed problem with precision="0" for sites that do -# not have WG1 defined -# . fixed 'flashing' of user interface on startup -# . fixed so that clicking on 'stop' during long drawing -# of many stat grids will stop more quickly. -# . allowed program name in error messages to be something -# other than BOIVerify (could be GridVerify, etc.) -# . use labels of 'histogram' and 'scatterplot' rather than -# errors and values -# . use date label of 'ending on' rather than 'before' -# . added limits to forecast hours shown -# . added support for probabilistic parms -# . added support for handling common cases -# 2008/05/28 - version 2.0.5 -# . fixed problem with forced large range of line graphs -# for QPF bias, etc. In old code if graph range was less -# than 0.01, it was automatically forced to be 1.0. Now, -# it only forcibly expands the graph range when the actual -# range is less than 1/10 the precision of the parm, and -# even then it only expands the graph range upward by 2 -# times the parm precision. -# -# -# 2010/04/23 ryu Initial port to AWIPS II. Fixed bugs with the "Stat vs. Scale" tab. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Dec 02, 2014 RM #625 ryu Changed checkGroup() as suggested to display models -# in multi-columns when a single column is insufficient. -# Apr 16, 2015 17390 ryu Replacing string.atoi with int for string/integer to integer conversion -# (ListBox.curselection() now returns ints instead of strings.) -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# ---------------------------------------------------------------------------- -# - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Verify"] - -from numpy import * -from math import * -import Tkinter -import TkDefaults -import AppDialog -import time,calendar,sys,re,traceback,string -import SmartScript -import BOIVerifyUtility -import os # for debug - -from com.raytheon.viz.gfe import GFEPreference -from java.lang import Float - -PROGNAME="BOIVerify" # you can change it if you dont like BOI. Shame on you! -COLORLIST=["blue","green","red","cyan","yellow","purple","orange", - "Gold","Coral","DarkOliveGreen","DarkOrchid","Brown","DeepPink", - "DodgerBlue","DarkSeaGreen"] -HOURSECS=60*60 -DAYSECS=24*HOURSECS - - -def getFloatPref(key, dflt): - if not GFEPreference.contains(key): - print "no config value for", key - return dflt - - val = GFEPreference.getFloatPreference(key) - print "preference for %s:" % key, val - return val - -def setFloatPref(key, value): - if value is not None: - value = Float.parseFloat(str(value)) - GFEPreference.setPreference(key, value) - - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - print "Start of %s - virtual memory:%d resident: %d"%(PROGNAME,memory(),resident()) - self._dbss=dbss - self.userName=self._dbss.getWsId().getUserName() - SmartScript.SmartScript.__init__(self, dbss) - self.statusBarMsg("Starting %s"%PROGNAME,"R") - self.PROGNAME=PROGNAME - self.HOURSECS=60*60 - self.DAYSECS=24*self.HOURSECS - self.COLORLIST=COLORLIST - self.__colorMapParams = {} - return - - def execute(self): - - print "starting execute routine with memory:%d resident: %d"%(memory(),resident()) - # - # See if a child window of GFE is named "%self.PROGNAME Options" - # If so...then program is already running and just make - # a dialog box telling them that... - # - #if alreadyRunning: - # self.statusBarMsg("%s is already running"%self.PROGNAME,"U") - # return - # - - try: - self.__setup() - - self.root.withdraw() - self.root.mainloop() - #except: - # traceback.print_exc() - finally: - try: - self.root.destroy() - except: - pass - print "Exiting..." - return - - def __setup(self): - tk = Tkinter.Tk() - self.root = tk - sw = tk.winfo_screenwidth() - sh = tk.winfo_screenheight() - tk.geometry("%dx%d+0+0" % (sw,sh)) - - TkDefaults.setDefaults(tk) - # - # Splash screen... - # - splash=Tkinter.Toplevel(None) - splash.overrideredirect(1) - f=Tkinter.Frame(splash,relief=Tkinter.RIDGE,borderwidth=2, - background="yellow") - txtwid=max(len(self.PROGNAME),10) - text="Starting up\n%s"%self.PROGNAME - lab=Tkinter.Label(f,justify=Tkinter.CENTER,text=text, - fg="black",bg="yellow",width=txtwid+2) - lab.pack(side=Tkinter.TOP) - wTxt=Tkinter.StringVar(f) - lab=Tkinter.Label(f,justify=Tkinter.CENTER,textvariable=wTxt, - fg="black",bg="yellow") - lab.pack(side=Tkinter.TOP) - f.pack(side=Tkinter.TOP,ipadx=50,ipady=10) - wTxt.set(". ") - splash.update_idletasks() - ww=splash.winfo_reqwidth() - wh=splash.winfo_reqheight() - sw=splash.winfo_screenwidth() - sh=splash.winfo_screenheight() - newgeom="%dx%d+%d+%d"%(ww,wh,int(float(sw-ww)/2.0),int(float(sh-wh)/2.0)) - splash.geometry(newgeom) - splash.wait_visibility() - splash.update_idletasks() - # - # Start up the utility - # - try: - # - # Start up the utility - # - self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) - print "after setting up VU: memory:%d resident:%d"%(memory(),resident()) - self.setToolType("numeric") - - # - # Setup scaleList. This contains tuples of (numpts,label) where - # numpts is the +/- points to average over, and label is a label - # description of that area. - # - self.scaleList=[] - spacing=self.VU.getGridSpacing() - nominalSpacing=self.VU.getCFG('NOMINALSPACING') - rspacing=int((float(spacing)/float(nominalSpacing))+0.5)*nominalSpacing - maxk=max(self.getGridShape()) - for k in xrange(maxk): - curTxt=wTxt.get() - last=curTxt[-1] - rest=curTxt[:-1] - newTxt=last+rest - wTxt.set(newTxt) - splash.update_idletasks() - if k>0: - scale=k*2.0*rspacing - else: - scale=rspacing - iscale=int(scale+0.5) - if ((scale>50)and(scale<100)and(iscale%10!=0)): - continue - if ((scale>=100)and(scale<200)and(iscale%25!=0)): - continue - if ((scale>=200)and(scale<500)and(iscale%50!=0)): - continue - if ((scale>=500)and(iscale%100!=0)): - continue - rdig=0 - for digits in xrange(2): - mult=10**digits - iscale=int(scale*mult) - rscale=int((scale*mult)+0.5) - if iscale==rscale: - rdig=digits - break - rscale=round(scale,rdig) - - if rdig==0: - lab="%d-km"%int(rscale) - else: - fmt="%%.%df-km"%rdig - lab=fmt%rscale - self.scaleList.append((k,lab)) - # - # Setup the self.pts with number of points in named edit areas - # - self.VU.logMsg("Starting points generation memory:%d resident:%d"%(memory(),resident())) - maxareas=self.VU.CFG['STATAREAS'] - editAreaNames=self.VU.listEditAreas() - self.pts=ones(maxareas,) - for i in xrange(len(editAreaNames)): - # - curTxt=wTxt.get() - last=curTxt[-1] - rest=curTxt[:-1] - newTxt=last+rest - wTxt.set(newTxt) - splash.update_idletasks() - # - areaname=editAreaNames[i] - self.VU.logMsg(" %3d memory:%d resident:%d"%(i,memory(),resident())) - npts=self.getPts(areaname) - self.VU.logMsg(" %3d after npts memory:%d resident:%d"%(i,memory(),resident())) - j=self.VU.getEditAreaNumberFromName(areaname) - self.pts[j]=npts - self.VU.logMsg(" %3d after pts memory:%d resident:%d"%(i,memory(),resident())) - self.VU.logMsg(" %3d after del ea memory:%d resident:%d"%(i,memory(),resident())) - if self.pts[j]<1: - self.pts[j]=1 - self.VU.logMsg(" after edit area %3d memory:%d resident:%d"%(i,memory(),resident())) - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - # Create all the potential dialogs - # - # - self.VU.logMsg("Starting dialog generation memory:%d resident:%d"%(memory(),resident())) - try: - self.mini=MiniDiag(tk,callbackMethod=self.expandMini, - title="Change",buttonText="%s Options"%self.PROGNAME,loc="lr") - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - try: - self.cases=Cases(tk,callbackMethod=self.closeCases) - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - try: - self.miniCases=MiniDiag(tk, callbackMethod=self.expandCases, - title="Display",buttonText="Number of Cases",loc="ur") - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - try: - self.cd=CanvasDisplay(tk, title="Canvas",callbackMethod=self.closeCD) - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - try: - self.dialog=Verif(self.VU,self.userName,self.scaleList, tk, callbackMethod=self.doVerif) - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - # This one last...so it is always on top during 'working' periods - # - try: - self.working=Working(self.dialog,callbackMethod=self.tryToStop) - except: - splash.destroy() - self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") - raise Exception - # - # Destroy the 'starting' message box - # - splash.destroy() - del wTxt - self.statusBarMsg("%s is now running - memory:%d resident:%d"%(self.PROGNAME,memory(),resident()),"R") - - def getPts(self,areaname): - if areaname=="NONE": - ea=self.newGrid(True, bool) - else: - ea=self.encodeEditArea(areaname) - eb=ravel(ea) - num=add.reduce(eb) - pts=int(num) - del ea - del eb - del num - return pts - #================================================================== - # - # Routines for the 'working' dialog. - # - # When the stop button is pressed - tryToStop is called and the stop - # variable is set to 1. - # startWorking sets the stop variable to 0 and sets the label, then - # reveals the working dialog and withdraws the main dialog (unless - # overridden) - # setWorking just updates labels in the working dialog - # checkWorking updates the dialog, and returns the stop variable status - # stopWorking withdraws the working dialog and raises the main dialog - # finishWorking withdraws the working dialog and raises the mini dialog - # - def tryToStop(self): - self.working.stop.set(1) - print "tryToStop was called - should stop soon" - return - def startWorking(self,textString,optionRemove=1): - if optionRemove==1: - self.dialog.withdraw() - self.setWorking(textString) - self.working.stop.set(0) - self.working.deiconify() - self.working.lift() - return - def setWorking(self,textString): - self.working.label.set(textString) - self.working.update() - return - def checkWorking(self): - self.working.update() - return self.working.stop.get() - def setAndCheckWorking(self,textString): - self.working.label.set(textString) - self.working.update() - return self.working.stop.get() - def stopWorking(self): - self.working.withdraw() - self.dialog.deiconify() - self.dialog.lift() - self.dialog.update_idletasks() - return - def finishWorking(self): - self.working.withdraw() - self.working.stop.set(0) - self.mini.deiconify() - self.mini.lift() - self.mini.update_idletasks() - return - #================================================================== - # - # Routines for the 'mini' dialog. - # - # When the button is pressed (or X clicked) - expandMini is called - # - def expandMini(self): - self.mini.withdraw() - self.dialog.deiconify() - self.dialog.lift() - return - # - # Hide the main dialog, and reveal the mini-dialog. - # - def hideDialog(self): - self.dialog.withdraw() - self.mini.deiconify() - self.mini.lift() - self.mini.wait_visibility() - self.mini.update_idletasks() - return - #================================================================== - # - # Routines for the cases dialog, and it's mini dialog. - # - def expandCases(self): - geo1=self.miniCases.geometry() - (wh,of)=geo1.split("+",1) - (wid1,hgt1)=wh.split("x",1) - (ofx1,ofy1)=of.split("+",1) - self.miniCases.withdraw() - self.cases.deiconify() - self.cases.lift() - geo2=self.cases.geometry() - (wh,of)=geo2.split("+",1) - (wid2,hgt2)=wh.split("x",1) - (ofx2,ofy2)=of.split("+",1) - newgeo="%s+%d+%d"%(wh,int(ofx1)+int(wid1)-int(wid2),int(ofy1)) - self.cases.geometry(newgeo) - return - def closeCases(self): - self.cases.withdraw() - self.cases.update_idletasks() - self.miniCases.deiconify() - self.miniCases.lift() - self.miniCases.update_idletasks() - return - #================================================================== - # - # Routines for the 'canvas' dialog - # - def closeCD(self): - self.cd.withdraw() - return - #================================================================== - # doVerif - - # This is the routine that really does the verification calculations - # It is called when the user clicks on "Run" "Hide" or "Cancel" - # in the verification dialog. The routine is called with the button - # type of "Run" (do NOT dismiss dialog), "OK" (DISMISS dialog when done) - # or "Quit". The actual removal of the dialog is handled by the - # dialog routines themselves...so all you have to do is return - # right away if the user hit cancel, or do the calculations if they - # hit anything else. - # - def doVerif(self,buttonType): - if buttonType=="Quit": - self.root.quit() - del self.pts - self.statusBarMsg("%s is finished with memory:%d resident:%s"%(self.PROGNAME,memory(),resident()),"R") - return - if buttonType=="Hide": - self.hideDialog() - return - # - # When doing calculations - make sure the cases windows - # are closed - # - self.cases.withdraw() - self.miniCases.withdraw() - # - # Do calculations - # - try: - dict=self.dialog.getValues() - tab=dict["tab"] - if tab=="Grid Displays": - self.ShowGrids(dict) - if tab=="Grid Stats": - self.ShowGridsStats(dict) - if tab=="Distributions": - self.ShowDists(dict) - if tab=="Point/Area Stats": - self.ShowStats(dict) - if tab=="Stat vs. Scale": - self.ShowScaleStats(dict) - # - # If something goes wrong during calculations - close everything - # and raise the exception - # - except: - (exctype,excvalue,trace)=sys.exc_info() - traceStrings=traceback.format_exception(exctype,excvalue,trace) - fullstring=string.join(traceStrings) - self.statusBarMsg("Error in %s:\n%s"%(self.PROGNAME,fullstring),"S") - self.root.quit() - return - #================================================================== - # showGrids - read and display the archived forecast/observed grids - # - def ShowGrids(self,DialogDict): - self.VU.logMsg("running ShowGrids:",0) - parmList=DialogDict["Parm"] - display=DialogDict["Display"] - groupBy=DialogDict["Group"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Model"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - dateStyle=DialogDict["dateStyle"] - scale=DialogDict["scale"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - # - # Check for good GUI input - # - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - self.startWorking("Working on Grid Display") - # - # - # - numdisplayed=0 - ret=self.setupGM(parmList,modelList) - if ret==1: - self.stopWorking() - return - errpat=re.compile("^(.*?)(spd|dir)Err") - totaliters=len(parmList)*len(modelList) - iter=0 - self.VU.logMsg("going into parmList loop",10) - for parm in parmList: - self.VU.logMsg("in ShowGrids working on %s"%parm,5) - datatype=self.VU.getVerParmType(parm) - verType=self.VU.getVerType(parm) - errColor=self.VU.getVerErrColor(parm) - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - logkey="%s_LogFactor"%parm - logfactor = getFloatPref(logkey, -1) - # - # get info on what that verifies the current parm - # - obsParm=self.VU.getObsParm(parm) - (obsParmUnits,obsParmPrecision,obsParmMinval,obsParmMaxval,obsParmRateFlag,obsParmColorTable, - obsParmDisplayMinval,obsParmDisplayMaxval)=self.getParmInfo(obsmodel,obsParm) - logkey="%s_LogFactor"%obsParm - obslogfactor = getFloatPref(logkey, -1) - - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - but don't require - # observations...since we might want to display just the - # forecasts in the future without a verifying observation - # - caseInfo=self.VU.getCases(parm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay, - numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - requireObs=0,commonCases=commonCases, - basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # - # - parmnames=[] - obsnames=[] - for model in modelList: - iter+=1 - if (totaliters>1): - firstString="Getting (%d of %d) %s %s grids"%(iter,totaliters,model,parm) - else: - firstString="Getting %s %s grids"%(model,parm) - self.setWorking(firstString) - - tomorrow=time.time()+self.DAYSECS - if self.setAndCheckWorking("%s:"%firstString)==1: - self.stopWorking() - return - count=0 - okeys=[] - fcstGridMode=self.getReadMode(model,parm) - cases=caseInfo[model] - casekeys=cases.keys() - casekeys.sort() - totalcount=len(casekeys) - self.VU.logMsg("%d cases for %s"%(len(casekeys),model),1) - for key in casekeys: - count+=1 - if self.setAndCheckWorking("%s: %d of %d"%(firstString,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - tr=self.VU.getVerTimeRange(stime, etime) - # - # If there is an observed grid - read it and display it - # - obsavailable=0 - if len(orecList)>0: - okey="%s,%s"%(stimestr,etimestr) - obsname="%s0bs"%(parm) # zero instead of O so that is sorts before others - if accumFreq0: - obsdata=self.VU.smoothpm(obsdata,scale) - if okey not in okeys: - obsclip=clip(obsdata,parmMinval,parmMaxval) - self.createGrid("Ver",obsname,"SCALAR",obsclip,tr,"Observed", - None,obsParmPrecision,obsParmMinval,obsParmMaxval, - obsParmUnits) - okeys.append(okey) - numdisplayed+=1 - else: - (obsmag,obsdirec)=obsdata - if scale>0: - (u,v)=self.MagDirToUV(obsmag,obsdirec) - u=self.VU.smoothpm(u,scale) - v=self.VU.smoothpm(v,scale) - (obsmag,obsdirec)=self.UVToMagDir(u,v) - obsdata=(obsmag,obsdirec) - if okey not in okeys: - obsmag=clip(obsmag,parmMinval,parmMaxval) - obsdirec=clip(obsdirec,0.0,360.0) - obsclip=(obsmag,obsdirec) - self.createGrid("Ver",obsname,"VECTOR",obsclip,tr,"Observed", - None,obsParmPrecision,obsParmMinval,obsParmMaxval, - obsParmUnits) - okeys.append(okey) - numdisplayed+=1 - if obsname not in obsnames: - obsnames.append(obsname) - # - # Make forecast grid - # - fcstdata=self.VU.getVerGrids(model,basetime,parm, - stime,etime,mode=fcstGridMode, - recList=frecList) - if fcstdata is not None: - if datatype!=1: - if scale>0: - fcstdata=self.VU.smoothpm(fcstdata,scale) - else: - (fcstmag,fcstdirec)=fcstdata - if scale>0: - (u,v)=self.MagDirToUV(fcstmag,fcstdirec) - u=self.VU.smoothpm(u,scale) - v=self.VU.smoothpm(v,scale) - (fcstmag,fcstdirec)=self.UVToMagDir(u,v) - fcstdata=(fcstmag,fcstdirec) - # - # part of name based on grouping method - and model name - # - if groupBy=="Run Time": - basetuple=time.gmtime(basetime) - runTime="%4.4d%2.2d%2.2d%2.2d"%(basetuple[0],basetuple[1],basetuple[2],basetuple[3]) - runHours=self.VU.getFcstHour(basetime,tomorrow) - run=(runHours/6)+1 - runname="run%3.3dfrom%s"%(run,runTime[6:10]) - else: - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - self.VU.logMsg("%d-hour forecasts not shown"%fhr,1) - continue - runname="f%3.3dHr"%(fhr) - if model!="Official": - runname+=model - # - # calculate errors (if needed) and clip to twice the 'bigerr' range - # - if display=="Errors": - if obsavailable==1: - ep=max(self.errPrecision,parmPrecision) - if datatype!=1: - parmname=parm+"Err"+runname - if accumFreq-1: - keyname="%s_LogFactor"%obsname - setFloatPref(keyname, obslogfactor) - parmOb=self.getParm("Ver",obsname,"SFC") - self.setColorTableAndRange(parmOb,obsParmColorTable,obsParmDisplayMinval,obsParmDisplayMaxval) - for parmname in parmnames: - count+=1 - self.setWorking("Setting colorcurves: %d of %d"%(count,totalcount)) - parmOb=self.getParm("Ver",parmname,"SFC") - if display=="Errors": - if datatype!=1: - self.setColorTableAndRange(parmOb,errColor,-bigerr,bigerr) - else: - (errColorMag,errColorDir)=errColor - matchObj=errpat.search(parmname) - if matchObj is not None: - type=matchObj.group(2) - if type=="spd": - self.setColorTableAndRange(parmOb,errColorMag,-bigerrmag,bigerrmag) - else: - self.setColorTableAndRange(parmOb,errColorDir,-bigerrdir,bigerrdir) - else: - self.setColorTableAndRange(parmOb,parmColorTable,parmDisplayMinval,parmDisplayMaxval) - else: - if logfactor>=-1: - keyname="%s_LogFactor"%parmname - setFloatPref(keyname, logfactor) - self.setColorTableAndRange(parmOb,parmColorTable,parmDisplayMinval,parmDisplayMaxval) - # - if numdisplayed==0: - self.stopWorking() - self.statusBarMsg("No grids match your selected models/times/parms","U") - return - self.finishWorking() - return - - - #================================================================== - # setColorTableAndRange - Set the color table - # - # - def setColorTableAndRange(self,parm,colorTable,displayMinval,displayMaxval): - spatialMgr = self._dbss.getSpatialDisplayManager() - if displayMinval or displayMaxval or colorTable: - rsc = spatialMgr.getResourcePair(parm).getResource() - from com.raytheon.uf.viz.core.rsc.capabilities import ColorMapCapability - params = rsc.getCapability(ColorMapCapability).getColorMapParameters() - - if colorTable: - if self.__colorMapParams.has_key(colorTable): - colorMap = self.__colorMapParams[colorTable].getColorMap() - else: - from com.raytheon.uf.viz.core.drawables import ColorMapLoader - if "GFE/" not in colorTable: - colorTable = "GFE/" + colorTable - colorMap = ColorMapLoader.loadColorMap(colorTable) - elemType = str(parm.getGridInfo().getGridType()) - if ('DISCRETE' == elemType): - from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil - DiscreteDisplayUtil.deleteParmColorMap(parm) - params.setColorMap(colorMap) - params.setColorMapName(colorTable) - logfactor = getFloatPref(parm.getParmID().getParmName()+"_LogFactor", None) - if logfactor is not None: - params.setLogFactor(logfactor) - rsc.issueRefresh() - - if displayMinval or displayMaxval: - if (displayMinval != displayMaxval): - params.setColorMapMax(float(displayMaxval)) - params.setColorMapMin(float(displayMinval)) - - parm.getListeners().fireColorTableModified(parm) - - return - - #================================================================== - # showGridsStats - display grid statistics - # - # - def ShowGridsStats(self,DialogDict): - self.VU.logMsg("running ShowGridsStats:") - parmList=[] - parm=DialogDict["Parm"] - parmList.append(parm) - display=DialogDict["Display"] - threshold=DialogDict["Threshold"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Models"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - scale=DialogDict["scale"] - dateStyle=DialogDict["dateStyle"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - TwoCatType=DialogDict["TwoCatType"] - TwoCatCond=DialogDict["TwoCatCond"] - TwoCatValue=DialogDict["TwoCatValue"] - TwoCatValueString=DialogDict["TwoCatValueString"] - # - # Check for good GUI input - # - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - # - # If a TwoCat stat - check to see that TwoCatType is OK - # and setup statID - # - if display=="TwoCat": - statName=TwoCatType - statCond=TwoCatCond - statVal=TwoCatValue - statID=self.VU.getStatID(statName) - if statID is None: - self.statusBarMsg("Invalid Statistic Name","U") - return - else: - statID="xxxx" - # - # - # - self.startWorking("Working on Grid Stats") - ret=self.setupGM(parmList,modelList) - if ret==1: - self.stopWorking() - return - casesInfo=[] - numdisplayed=0 - pctColor=self.VU.getCFG('PERCENT_COLOR') - # - # Loop over parm and model - # - totaliters=len(parmList)*len(modelList) - iter=0 - for parm in parmList: - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - errColor=self.VU.getVerErrColor(readParm) - bigerr=self.VU.getVerBigErr(readParm) - thresholds=self.VU.getVerThresholds(readParm) - if datatype==1: - (errColorMag,errColorDir)=errColor - (bigerrmag,bigerrdir)=bigerr - (threshmag,threshdir)=thresholds - if last3=="Dir": - errColor=errColorDir - bigerr=bigerrdir - thresholdValue=threshdir[threshold] - clipval=180 - else: #Spd or vector err magnitude - errColor=errColorMag - bigerr=bigerrmag - thresholdValue=threshmag[threshold] - clipval=bigerr*2 - else: - thresholdValue=thresholds[threshold] - clipval=bigerr*2 - # - # Get mode for reading obs grids - # - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - # - caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - requireObs=1,commonCases=commonCases, - basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # - # - for model in modelList: - iter+=1 - if (totaliters>1): - firstString="Calculating (%d of %d) %s %s stats"%(iter,totaliters,model,parm) - else: - firstString="Calculating %s %s stats"%(model,parm) - # - # - # - fcstGridMode=self.getReadMode(model,readParm) - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(model,readParm) - # - # - # - parmnames=[] - gridsave={} - gridcount={} - hitssave={} - misssave={} - falrsave={} - cornsave={} - maxcases=0 - self.setWorking("%s:finding matches"%firstString) - # - # Get all the cases for this model - # - cases=caseInfo[model] - # - # Sort them by the start/end time, not the basetime - # - casekeys=cases.keys() - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - totalcount=len(casekeys) - self.VU.logMsg("%d cases for %s"%(len(casekeys),model),1) - count=0 - lastobs="" - for key in casekeys: - count+=1 - self.VU.logMsg("%s : %s"%(model,key),10) - if self.setAndCheckWorking("%s: %d of %d"%(firstString,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - # - # Dont make stats for obs not yet complete - # - if etime>time.time(): # dont make stats for obs not yet complete - count+=1 - continue - # - # check to make sure it is a forecast - # string to store grid under depends on forecast and end hour - # - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - count+=1 - continue - # - # If a new and different obs time - read the obs data - # - obskey=key.split(",",1)[1] - if obskey!=lastobs: - obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, - stime,etime,obsGridMode, - orecList) - # - # Smooth observed grid... - # unless a TwoCat "areal" type - # and smooth vectors in U/V space... - # - if scale>0: - if ((display!="TwoCat")or(statID[0:1]!="a")): - if datatype==1: - (obsmag,obsdir)=obsdata - (u,v)=self.MagDirToUV(obsmag,obsdir) - us=self.VU.smoothpm(u,scale) - vs=self.VU.smoothpm(v,scale) - (obsmag,obsdir)=self.UVToMagDir(us,vs) - obsdata=(obsmag,obsdir) - else: - obsdata=self.VU.smoothpm(obsdata,scale) - # - # For probability types...calculate an obs grid of - # 1 or 0, based on whether the observed threshold - # is met. - # - if verType==1: - obsdata=self.getProbVerGrid(readParm,obsdata) - # - # Save the 'key' for this obs grid - so that we - # don't have to read and calculate it again every - # time...only when a new obs time is encountered - # - lastobs=obskey - # - # get parmname to save as...from cycle/fhr/ehr/model - # - ehr=self.VU.getFcstHour(basetime,etime) - basetuple=time.gmtime(basetime) - fcstcycle=basetuple[3] - parmname="%2.2d%3.3d%3.3d%s"%(fcstcycle,fhr,ehr,model) - # - # Read forecast grid - # - fcstdata=self.VU.getVerGrids(model,basetime,readParm, - stime,etime,fcstGridMode, - frecList) - # - # Smooth forecast grid... - # unless a TwoCat "areal" type - # and smooth vectors in U/V space... - # - if scale>0: - if ((display!="TwoCat")or(statID[0:1]!="a")): - if datatype==1: - (fcstmag,fcstdir)=fcstdata - (u,v)=self.MagDirToUV(fcstmag,fcstdir) - us=self.VU.smoothpm(u,scale) - vs=self.VU.smoothpm(v,scale) - (fcstmag,fcstdir)=self.UVToMagDir(us,vs) - fcstdata=(fcstmag,fcstdir) - else: - fcstdata=self.VU.smoothpm(fcstdata,scale) - # - # For TwoCat stats...calculate hits/misses/falsealarms/etc. - # - if display=="TwoCat": - # - # get the forecast/observed grids into fcstGrid/obsGrid - # Normally this is what is in fcstdata/obsdata - but for - # vectors...need to pick the right component and for - # probabilities - need to divide by 100. - # - if datatype==1: - if last3!="Dir": - fcstGrid=fcstdata[0] - obsGrid=obsdata[0] - else: - fcstGrid=fcstdata[1] - obsGrid=obsdata[1] - else: - if verType!=0: - fcstGrid=fcstdata/100.0 - else: - fcstGrid=fcstdata - obsGrid=obsdata - # - # Now get yes/no of forecast/observed occurrence - # - if statCond==">": - obsOccur=greater(obsGrid,statVal) - fcstOccur=greater(fcstGrid,statVal) - elif statCond==">=": - obsOccur=greater_equal(obsGrid,statVal) - fcstOccur=greater_equal(fcstGrid,statVal) - elif statCond=="<": - obsOccur=less(obsGrid,statVal) - fcstOccur=less(fcstGrid,statVal) - elif statCond=="<=": - obsOccur=less_equal(obsGrid,statVal) - fcstOccur=less_equal(fcstGrid,statVal) - # - # do neighborhood look here - # - if statID[0:1]=="a": - if scale>0: - obsOccur=self.VU.arealOccur(obsOccur,scale) - fcstOccur=self.VU.arealOccur(fcstOccur,scale) - # - # Make grids of hits, misses, false alarms, correct negatives - # - notFcst=logical_not(fcstOccur) - notObs=logical_not(obsOccur) - hitsgrid=logical_and(fcstOccur,obsOccur) - missgrid=logical_and(notFcst,obsOccur) - falrgrid=logical_and(fcstOccur,notObs) - corngrid=logical_and(notFcst,notObs) - # - # Make space to store these results - if first one - # - if parmname not in parmnames: - parmnames.append(parmname) - hitssave[parmname]=self.empty() - misssave[parmname]=self.empty() - falrsave[parmname]=self.empty() - cornsave[parmname]=self.empty() - # - # Add to the hits/miss/falr/corn values - # - hitssave[parmname]+=hitsgrid - misssave[parmname]+=missgrid - falrsave[parmname]+=falrgrid - cornsave[parmname]+=corngrid - # - # For non-TwoCat displays...calculate the errors - # - else: - if datatype!=1: - if verType==0: - errgrid=fcstdata-obsdata - else: - errgrid=(fcstdata/100.0)-obsdata - else: - last3=parm[-3:] - if (last3=="Spd"): - errgrid=fcstdata[0]-obsdata[0] - elif (last3=="Dir"): - errgrid=fcstdata[1]-obsdata[1] - errgrid=where(greater(errgrid,180.0),360.0-errgrid,errgrid) - errgrid=where(less(errgrid,-180.0),-(360.0+errgrid),errgrid) - else: - (fu,fv)=self.MagDirToUV(fcstdata[0],fcstdata[1]) - (ou,ov)=self.MagDirToUV(obsdata[0],obsdata[1]) - eu=fu-ou - ev=fv-ov - (errmag,errdir)=self.UVToMagDir(eu,ev) - errgrid=errmag - # - # change to different scores - # - if display=="Mean Abs Error": - errgrid=where(less(errgrid,0.0),-errgrid,errgrid) - if display in ["RMS Error","Mean Squared Error"]: - errgrid*=errgrid - if display=="Percent Err <": - errgrid=where(less(errgrid,0.0),-errgrid,errgrid) - errgrid=less(errgrid,thresholdValue) - # - # save list of unique parm names being created - # - if parmname not in parmnames: - parmnames.append(parmname) - gridsave[parmname]=self.empty() - gridcount[parmname]=0 - # - # if doing average errors, add errors to sums - # otherwise...display the grid - # - gridsave[parmname]+=errgrid - gridcount[parmname]+=1 - # - # Calculate the statistics grids for this parm/model - # and display them - # - self.VU.logMsg("Creating stat grids") - pnames=[] - totalcount=len(parmnames) - count=0 - for parmname in parmnames: - # - # If they want to stop - stop adding more grids - # but break out to set the color tables correctly - # - count+=1 - self.setWorking("%s:%d of %d"%(firstString,count,totalcount)) - if self.checkWorking()==1: - break - # - # Get timerange to save the final grid into - # - cyc=int(parmname[0:2]) - f1=int(parmname[2:5]) - f2=int(parmname[5:8]) - tr=self.createTimeRange(f1+cyc,f2+cyc,"Zulu") - # - # Make name that will be used in grid manager - # - mod=parmname[8:] - pname="%s%2.2dZ%s"%(parm,cyc,mod) - # - # For TwoCat stats - # - if display=="TwoCat": - hitsgrid=hitssave[parmname] - missgrid=misssave[parmname] - falrgrid=falrsave[parmname] - corngrid=cornsave[parmname] - statgrid=self.VU.getGridBinaryStat(statID,hitsgrid,missgrid, - falrgrid,corngrid) - # - # get case number - for table of cases - # - totgrid=hitsgrid+missgrid+falrgrid+corngrid - n=maximum.reduce(maximum.reduce(totgrid)) - maxcases=max(n,maxcases) - # - # Different stats have different limits - # - minlim=-1.0 - maxlim=1.0 - res=2 - if statID in ["hits","ahits","miss","amiss","fals","afals", - "corn","acorn"]: - minlim=0.0 - maxlim=float(n) - res=0 - elif statID in ["freqo","freqf","fc","afc","pod","apod","far","afar", - "pofd","apofd","ts","ats"]: - minlim=0.0 - maxlim=1.0 - # - # Ones that range from 0 to Infinity : clip at +5.0 - # - elif statID in ["freqbias","afreqbias","oddratio","aoddsratio"]: - minlim=0.0 - maxlim=5.0 - # - # Equitable Threat clips at -0.333 and 1.0 - # - elif statID in ["ets","aets"]: - minlim=-0.3333 - maxlim=1.0 - # - # Hansen Kuipers clips at -1.0 to 1.0 - # - elif statID in ["hk","ahk"]: - minlim=-1.0 - maxlim=1.0 - # - # Heidke ranges from -Infinity to 1, and clips at -5.0 - # - elif statID in ["hss","ahss"]: - minlim=-5.0 - maxlim=1.0 - # - # Clip the grid - # - newgrid=clip(statgrid,minlim,maxlim) - self.createGrid("Ver",pname,"SCALAR",newgrid,tr, - "Forecast",None,res,minlim,maxlim, - "units") - # - # For normal error displays - # - else: - # - # If there weren't any sums saved - dont make - # a grid for it - # - n=gridcount[parmname] - if n<1: - continue - # - # make newgrid the grid to show - # - newgrid=gridsave[parmname]/float(n) - if display=="RMS Error": - newgrid=newgrid**0.5 - if verType==1: - newgrid*=100.0 - # - # clip the newgrid based on the parm clipping value - # - newgrid=clip(newgrid,-clipval,clipval) - # - # Percent error grids always range from 0 to 100 - # - if display=="Percent Err <": - newgrid*=100.0 - newgrid=clip(newgrid,0.0,100.0) - self.createGrid("Ver",pname,"SCALAR",newgrid,tr, - "Forecast",None,0,0.0,100.0,"%") - # - # Others can have variable ranges. We clip the - # values at 2 times the 'bigerr' value - # - else: - ep=max(self.errPrecision,parmPrecision) - if datatype!=1: - bigerr=self.VU.getVerBigErr(parm) - clipval=bigerr*2.0 - self.createGrid("Ver",pname,"SCALAR",newgrid,tr, - "Forecast",None,ep,-clipval, - clipval,self.errUnits) - # - # Keep track of grids actually put in grid manager - # - if pname not in pnames: - pnames.append(pname) - numdisplayed+=1 - casesInfo.append("%-25s|%3.3d|%d"%(pname,f1,n)) - # - # Set the colorTables for each unique parm added. - # - self.VU.logMsg("Setting color tables",2) - for pname in pnames: - parmOb=self.getParm("Ver",pname,"SFC") - if display=="TwoCat": - if res==0: - self.setColorTableAndRange(parmOb,pctColor,0,maxcases) - else: - self.setColorTableAndRange(parmOb,pctColor,minlim,maxlim) - else: - if display=="Percent Err <": - self.setColorTableAndRange(parmOb,pctColor,0,100) - else: - self.setColorTableAndRange(parmOb,errColor,-bigerr,bigerr) - if self.checkWorking()==1: - self.stopWorking() - return - # - # - # - if numdisplayed==0: - self.stopWorking() - self.statusBarMsg("No grids match your selected models/times/parms","U") - return - # - self.finishWorking() - # - # Make text with case info - # - casesInfo.sort() - casesText="Number of Cases:\n" - lastmod="" - for info in casesInfo: - (modlong,fhr,num)=info.split("|") - mod=modlong.strip() - if mod!=lastmod: - casesText+="\n %s:\n"%mod - lastmod=mod - casesText+=" %3d-hr: %5d\n"%(int(fhr),int(num)) - # - # Make the case info pop up - # - self.cases.updateText(casesText) - self.miniCases.deiconify() - self.miniCases.lift() - self.miniCases.update_idletasks() - self.VU.logMsg("Done making stat grids") - return - #================================================================== - # getProbVerGrid - get grid for probability verification, based - # on the obsdata, and the condition/threshold for - # the specified parmName - # - def getProbVerGrid(self,parmName,obsdata): - outdata=obsdata*0 - obsCondition=self.VU.getObsCondition(parmName) - obsThreshold=self.VU.getObsThreshold(parmName) - if obsCondition==">": - outdata=greater(obsdata,obsThreshold) - elif obsCondition==">=": - outdata=greater_equal(obsdata,obsThreshold) - elif obsCondition=="<": - outdata=less(obsdata,obsThreshold) - elif obsCondition=="<=": - outdata=less_equal(obsdata,obsThreshold) - return outdata - #================================================================== - # showGridsDists - display histograms/scatterplots - # - # - def ShowDists(self,DialogDict): - self.VU.logMsg("running ShowDists:") - parmList=[] - parm=DialogDict["Parm"] - parmList.append(parm) - display=DialogDict["Display"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Models"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - dateStyle=DialogDict["dateStyle"] - scale=DialogDict["scale"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - # - # Check for good GUI input - # - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - # - # Do seperate processing for each type - # - if display=="Error Histogram": - self.errorHistogram(parmList,cycleList,modelList,obsmodel,fcstrList, - fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, - dateStyle,scale,commonCases,accumHours,accumFreq) - elif display=="Value Histogram": - self.valueHistogram(parmList,cycleList,modelList,obsmodel,fcstrList, - fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, - dateStyle,scale,commonCases,accumHours,accumFreq) - elif display=="Expected Value": - self.expectedValue(parmList,cycleList,modelList,obsmodel,fcstrList, - fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, - dateStyle,scale,commonCases,accumHours,accumFreq) - elif display=="Scatterplot": - self.scatterPlot(parmList,cycleList,modelList,obsmodel,fcstrList, - fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, - dateStyle,scale,commonCases,accumHours,accumFreq) - return - #================================================================== - # errorHistogram - display error histogram - # - # - def errorHistogram(self,parmList,cycleList,modelList,obsmodel, - fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, - dayList,dateStyle,scale,commonCases,accumHours, - accumFreq): - # - # - # Clear display - setup title - # - parm=parmList[0] - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Error Histogram - %s"%parm) - # - # Start 'working' display - # - workStart="Working on error histogram" - self.startWorking(workStart,optionRemove=0) - # - # - # - NUMTBUTTONS=12 # normal number of time buttons on a row - configure - NUMMBUTTONS=6 # normal number of model buttons on a row - configure - # - # get the active EditArea into ea. If the active edit area is - # None - then assume they want to run it over the entire grid - # - editArea=self.getActiveEditArea() - editAreaMask=self.encodeEditArea(editArea) - npts=add.reduce(add.reduce(editAreaMask)) - if (npts==0): - editArea.invert() - ea=self.encodeEditArea(editArea) - eaflat=ravel(ea) - totalpoints=add.reduce(eaflat) - # - # make space for saving data - # - self.histograms={} # storage for histograms for each model/forecast hour - self.histoWorseLow={} - self.histoWorseHigh={} - self.numCases={} - self.errSums={} - self.errSumSquareds={} - self.errSumAbs={} - # - # - # - totaliters=len(modelList) - iter=0 - # - # For vectors...the parm to read might be different than - # the name of the parm - # - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - # - # Get information about the parm we are reading - # - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - # - # get binwidth and bigerr for parm...but for vectors its - # complicated by dir/mag/vecerr options - # - binwidth=self.VU.getVerBinWidth(readParm) - bigerr=self.VU.getVerBigErr(readParm) - if datatype==1: - (bwMag,bwDir)=binwidth - (beMag,beDir)=bigerr - if last3=="Dir": - binwidth=bwDir - bigerr=beDir - else: - binwidth=bwMag - bigerr=beMag - (binmin,binmax)=self.getBins(binwidth,bigerr) - self.histosetup(-bigerr,bigerr,binwidth) - - nbin=len(binmin) - nbins=reshape(arange(nbin),(nbin,1)) - abinmin=reshape(array(binmin),(nbin,1)) - abinmax=reshape(array(binmax),(nbin,1)) - # - # Get mode for reading obs grids - # - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - # - caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay, - numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Loop over each model - # - for model in modelList: - iter+=1 - workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) - # - fcstGridMode=self.getReadMode(model,readParm) - # - # Get all the cases for this model - # - cases=caseInfo[model] - # - # Sort cases by the start time, not the basetime - # - casekeys=cases.keys() - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - totalcount=len(casekeys) - self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) - count=0 - lastobs="" - for key in casekeys: - count+=1 - self.VU.logMsg("%s : %s"%(model,key),10) - if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - # - # Dont make stats for obs not yet complete - # - if etime>time.time(): - continue - # - # Dont include negative forecast hours - # - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - continue - # - # string to store grid under depends on model and forecast hour - # - savekey="%s-%3.3d"%(model,fhr) - # - # If a new and different obs time - read the obs data - # - obskey=key.split(",",1)[1] - if obskey!=lastobs: - self.VU.logMsg("new Obs grid",10) - obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, - stime,etime,mode=obsGridMode, - recList=orecList) - obsdata=self.scaleGrid(obsdata,scale,datatype) - # - # For probabilistic variables...calculate the - # observed 'yes/no' value - # - if verType==1: - obsdata=self.getProbVerGrid(readParm,obsdata) - # - # - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - obsgrid=obsdata[0] - elif last3=="Dir": - obsgrid=obsdata[1] - else: - obsgrid=obsdata - obsonly=compress(eaflat,ravel(obsgrid)) - else: - (u,v)=self.MagDirToUV(obsdata[0],obsdata[1]) - obsuonly=compress(eaflat,ravel(u)) - obsvonly=compress(eaflat,ravel(v)) - # - # save the last obskey that we have read so that - # we don't read it again many times - # - lastobs=obskey - # - # Read forecast grid - # - fcstdata=self.VU.getVerGrids(model,basetime,readParm, - stime,etime,mode=fcstGridMode, - recList=frecList) - fcstdata=self.scaleGrid(fcstdata,scale,datatype) - # - # Get the error, handling vector error, etc. - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - fcstgrid=fcstdata[0] - elif last3=="Dir": - fcstgrid=fcstdata[1] - else: - fcstgrid=fcstdata - fcstonly=compress(eaflat,ravel(fcstgrid)) - erronly=fcstonly-obsonly - if last3=="Dir": - erronly=where(greater(erronly,180.0),360.0-erronly,erronly) - erronly=where(less(erronly,-180.0),-(360.0+erronly),erronly) - else: - (fcstmag,fcstdir)=fcstdata - (u,v)=self.MagDirToUV(fcstmag,fcstdir) - uonly=compress(eaflat,ravel(u)) - vonly=compress(eaflat,ravel(v)) - uerr=uonly-obsuonly - verr=vonly-obsvonly - (mag,direc)=self.UVToMagDir(uerr,verr) - erronly=mag - # - # make histograms - # - (errCount,worseLow,worseHigh)=self.histo(erronly) - errSum=add.reduce(erronly) - errSumSquared=add.reduce(erronly*erronly) - errabs=abs(erronly) - errSumAb=add.reduce(errabs) - if self.histograms.has_key(savekey): - self.histograms[savekey]+=errCount - self.histoWorseLow[savekey]+=worseLow - self.histoWorseHigh[savekey]+=worseHigh - self.errSums[savekey]+=errSum - self.errSumSquareds[savekey]+=errSumSquared - self.errSumAbs[savekey]+=errSumAb - self.numCases[savekey]+=1 - else: - self.histograms[savekey]=errCount - self.histoWorseLow[savekey]=worseLow - self.histoWorseHigh[savekey]=worseHigh - self.errSums[savekey]=errSum - self.errSumSquareds[savekey]=errSumSquared - self.errSumAbs[savekey]=errSumAb - self.numCases[savekey]=1 - # - # Get all "model-fhr" keys we saved - # - fullkeys=self.histograms.keys() - # - # if no data could be read - stop here - # - if len(fullkeys)<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # For buttons...get models/forecasthours actually in the data - # - fullkeys.sort() - fhrstrs=[] - modkeys=[] - for fullkey in fullkeys: - (mod,fhrstr)=fullkey.split("-") - if fhrstr not in fhrstrs: - fhrstrs.append(fhrstr) - if mod not in modkeys: - modkeys.append(mod) - # - # Change fhrstrs (sorted on 3-character 000-999) into - # smaller fhrkeys that are NOT all 3-characters wide - # - fhrstrs.sort() - fhrkeys=[] - for fhrstr in fhrstrs: - fhrkeys.append("%d"%int(fhrstr)) - # - # If an Official button is in there...make it first - # - modkeys.sort() - if "Official" in modkeys: - idx=modkeys.index("Official") - del modkeys[idx] - modkeys.insert(0,"Official") - # - # set colors for each model - # - self.colornames={} - index=0 - for mod in modkeys: - self.colornames[mod]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # Setup first row of buttons (forecast hours) - # - self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) - # - # Setup second row of buttons (models) - # - self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) - # - # find max number in any bin in any of the histograms - # - histkey1=self.histograms.keys()[0] - maxHist=zeros_like(self.histograms[histkey1]) - for histkey in self.histograms.keys(): - self.histograms[histkey]/=float(self.numCases[histkey]) - maxHist=maximum(maxHist,self.histograms[histkey]) - fullmax=maximum.reduce(maxHist) - # - # Find good tickmark interval for vertical axis and set the - # vertical range to be one tick mark above the fullmax (max - # number in any histogram) - # - numticks=10 - tickInterval=self.niceNumDec(fullmax/(numticks-1),1) - graphmax=(int(fullmax/tickInterval)+1)*tickInterval - # - # - # Setup graphing coordinates - # - minx=-bigerr - maxx=bigerr - maxscore=maxx/2.0 - left=self.cd.curwidth*(50.0/700.0) - right=self.cd.curwidth*(650.0/700.0) - bot=self.cd.curheight*(100.0/530.0) - top=self.cd.curheight*(480.0/530.0) - self.setgraph(minx,maxx,0.0,graphmax,left,right,bot,top) - self.histoaxes(graphmax,-bigerr,bigerr,binwidth,tickInterval) - # - # Draw each histogram - # - totalcount=len(self.histograms.keys()) - count=0 - for key in self.histograms.keys(): - count+=1 - if self.setAndCheckWorking("%s: drawing histogram %d of %d"%(workStart,count,totalcount))==1: - self.stopWorking() - return - tagbase=key.split("-") - mod=tagbase[0] - fhr=int(tagbase[1]) - fhrstr="f%d"%fhr - tagtuple=(mod,fhrstr) - flabel="%d-hr forecast"%fhr - self.labelLine(flabel,3,justify="right",tags=tagtuple) - - colorname=self.colornames[mod] - bins=self.histograms[key] - nbin=bins.shape[0] - for i in xrange(nbin): - y=bins[i] - x1=self.histomin+(i*self.histowidth) - x2=x1+self.histowidth - if y>0: - if i==0: - (sx1,sy1)=self.graphcoord(x1,0) - else: - (sx1,sy1)=self.graphcoord(x1,bins[i-1]) - (sx2,sy2)=self.graphcoord(x1,y) - (sx3,sy3)=self.graphcoord(x2,y) - if ((i+1)==nbin): - (sx4,sy4)=self.graphcoord(x2,0) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - elif bins[i+1]==0: - (sx4,sy4)=self.graphcoord(x2,0) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - else: - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) - self.but2state[mod]=1 - self.but1state[fhrstr]=1 - lowfcst=self.histoWorseLow[key] - highfcst=self.histoWorseHigh[key] - self.showWorse(lowfcst,highfcst,bigerr,15,colorname,tagtuple) - allpts=self.numCases[key]*totalpoints - avg=self.errSums[key]/allpts - mae=self.errSumAbs[key]/allpts - std=sqrt((self.errSumSquareds[key]/allpts)-(avg*avg)) - rms=sqrt(self.errSumSquareds[key]/allpts) - self.showAvg(avg,colorname,tagtuple) - modnum=modkeys.index(mod) - self.showScores(modnum,mod,self.numCases[key],avg,std,mae,rms,colorname,tagtuple) - score=100.0-(self.errSumSquareds[key]/allpts) - self.showScore(score,mod,colorname,tagtuple) - # - # Show first time/model - # - startBut1(self) - startBut2(self) - # - # Label top of graph - # - (x,y)=self.graphcoord(0,graphmax) - self.cd.canvas.create_text(x,y-5,text="Gridpoints per case",fill="black",anchor=Tkinter.S) - # - # Labels - # - ul1="Histogram - %s"%parm - self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - # - # Bin width - # - if binwidth<1.0: - str="Bin width: %3.1f"%binwidth - else: - str="Bin width: %d"%binwidth - self.labelLine(str,3,justify="left") - # - # table labels - # - x=self.cd.curwidth*(80.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="Model",anchor=Tkinter.E,fill="black") - x=self.cd.curwidth*(130.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="Cases",anchor=Tkinter.E,fill="black") - x=self.cd.curwidth*(170.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="Avg",anchor=Tkinter.E,fill="black") - x=self.cd.curwidth*(210.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="Std",anchor=Tkinter.E,fill="black") - x=self.cd.curwidth*(250.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="MAE",anchor=Tkinter.E,fill="black") - x=self.cd.curwidth*(290.0/700.0) - y=self.cd.curheight*(130.0/530.0) - self.cd.canvas.create_text(x,y,text="RMS",anchor=Tkinter.E,fill="black") - # - # Color Bar - # - midx=self.cd.curwidth/2.0 - for i in xrange(0,256): - x=midx-128+i - y=50 - colorstr="#%02x%02x00"%(255-i,i) - self.cd.canvas.create_line(x,y-3,x,y+3,fill=colorstr) - self.cd.canvas.create_text(midx-128-5,50,text="Bad",anchor=Tkinter.E) - self.cd.canvas.create_text(midx+128+5,50,text="Good",anchor=Tkinter.W) - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - return - #================================================================== - # valueHistogram - display value histogram - # - # - def valueHistogram(self,parmList,cycleList,modelList,obsmodel, - fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, - dayList,dateStyle,scale,commonCases,accumHours, - accumFreq): - # - # Clear display - setup title - # - parm=parmList[0] - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Value Histogram - %s"%parm) - # - # - # - workStart="Working on value histogram" - self.startWorking(workStart,optionRemove=0) - # - # - # - NUMTBUTTONS=12 # normal number of time buttons on a row - configure - NUMMBUTTONS=6 # normal number of model buttons on a row - configure - # - # get the active EditArea into ea. If the active edit area is - # None - then assume they want to run it over the entire grid - # - editArea=self.getActiveEditArea() - editAreaMask=self.encodeEditArea(editArea) - npts=add.reduce(add.reduce(editAreaMask)) - if (npts==0): - editArea.invert() - ea=self.encodeEditArea(editArea) - eaflat=ravel(ea) - totalpoints=add.reduce(eaflat) - # - # make space for saving data - # - self.histograms={} # storage for histograms for each model/forecast hour - self.numCases={} - # - # Loop over parm and model - # - totaliters=len(modelList) - iter=0 - # - # For vectors...the parm to read might be different than - # the name of the parm - # - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - # - # Get information about the parm we are reading - # - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - if ((datatype==1)and(last3=="Dir")): - parmMinval=0 - parmMaxval=360 - # - # get binwidth and bigerr for parm...but for vectors its - # complicated by dir/mag/vecerr options - # - binwidth=self.VU.getVerBinWidth(readParm) - if datatype==1: - (bwMag,bwDir)=binwidth - if last3=="Dir": - binwidth=bwDir - else: - binwidth=bwMag - # - # Setup histogram binning routines - # - self.histosetup(parmMinval,parmMaxval,binwidth) - # - # Get mode for reading obs grids - # - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - # - caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay, - numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Loop over each model - # - for model in modelList: - iter+=1 - workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) - # - fcstGridMode=self.getReadMode(model,readParm) - # - # Get all the cases for this model - # - cases=caseInfo[model] - # - # Sort cases by the start/end time, not the basetime - # - casekeys=cases.keys() - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - totalcount=len(casekeys) - self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) - count=0 - lastobs="" - for key in casekeys: - count+=1 - self.VU.logMsg("%s : %s"%(model,key),10) - if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - # - # Dont make stats for obs not yet complete - # - if etime>time.time(): - continue - # - # Dont include negative forecast hours - # - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - continue - # - # string to store grid under depends on model and forecast hour - # - saveKey="%s-%3.3d"%(model,fhr) - # - # If a new and different obs time - read the obs data - # - obskey=key.split(",",1)[1] - if obskey!=lastobs: - self.VU.logMsg("new Obs grid",10) - obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, - stime,etime,mode=obsGridMode, - recList=orecList) - obsdata=self.scaleGrid(obsdata,scale,datatype) - # - # For probabilistic variables...calculate the - # observed 'yes/no' value - # - if verType==1: - obsdata=self.getProbVerGrid(readParm,obsdata)*100.0 - # - # cant do a value histogram of vector wind - # errors...so those get changed to windSpd - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - obsgrid=obsdata[0] - elif last3=="Dir": - obsgrid=obsdata[1] - else: - obsgrid=obsdata - obsonly=compress(eaflat,ravel(obsgrid)) - else: - obsgrid=obsdata[0] - obsonly=compress(eaflat,ravel(obsgrid)) - (obsCount,below,above)=self.histo(obsonly) - lastobs=obskey - # - # Add observed bin counts to counts for same model/fhr - # - obsSaveKey="%s-%3.3d"%(obsmodel,fhr) - if self.histograms.has_key(obsSaveKey): - self.histograms[obsSaveKey]+=obsCount - self.numCases[obsSaveKey]+=1 - else: - self.histograms[obsSaveKey]=obsCount - self.numCases[obsSaveKey]=1 - # - # Read forecast grid and calculate error grid - # - fcstdata=self.VU.getVerGrids(model,basetime,readParm, - stime,etime,mode=fcstGridMode, - recList=frecList) - fcstdata=self.scaleGrid(fcstdata,scale,datatype) - # - # Get the error, handling vector error, etc. - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - fcstgrid=fcstdata[0] - elif last3=="Dir": - fcstgrid=fcstdata[1] - else: - fcstgrid=fcstdata - fcstonly=compress(eaflat,ravel(fcstgrid)) - else: - fcstgrid=fcstdata[0] - fcstonly=compress(eaflat,ravel(fcstgrid)) - # - # bin the forecast values - # - (valCount,below,above)=self.histo(fcstonly) - # - # Add bin counts to counts for same model/fhr - # - if self.histograms.has_key(saveKey): - self.histograms[saveKey]+=valCount - self.numCases[saveKey]+=1 - else: - self.histograms[saveKey]=valCount - self.numCases[saveKey]=1 - # - # Get all the keys that will be displayed - we've been storing in - # different places for different things - # - fullkeys=self.histograms.keys() - # - # if no data could be read - stop here - # - if len(fullkeys)<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # For buttons...get models/forecasthours actually in the data - # - fullkeys.sort() - fhrstrs=[] - modkeys=[] - for fullkey in fullkeys: - (mod,fhrstr)=fullkey.split("-") - if fhrstr not in fhrstrs: - fhrstrs.append(fhrstr) - if mod not in modkeys: - modkeys.append(mod) - # - # Change fhrstrs (sorted on 3-character 000-999) into - # smaller fhrkeys that are NOT all 3-characters wide - # - fhrstrs.sort() - fhrkeys=[] - for fhrstr in fhrstrs: - fhrkeys.append("%d"%int(fhrstr)) - # - # If an Official button is in there...make it first - # - modkeys.sort() - if "Official" in modkeys: - idx=modkeys.index("Official") - del modkeys[idx] - modkeys.insert(0,"Official") - # - # Put the observed one last - # - if obsmodel in modkeys: - idx=modkeys.index(obsmodel) - del modkeys[idx] - modkeys.append(obsmodel) - # - # set colors for each model - # - self.colornames={} - index=0 - for mod in modkeys: - self.colornames[mod]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # Setup first row of buttons (forecast hours) - # - self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) - # - # Setup second row of buttons (models) - # - self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) - # - # Get min/max of forecast/observed values that need to be shown - # - fullmin=999999.0 - fullmax=-999999.0 - self.setWorking("%s: getting max/min"%(workStart)) - tothisto=zeros((self.histonumbins,)) - maxvalue=zeros((self.histonumbins,)) - minvalue=zeros((self.histonumbins,))+9999999.0 - for key in self.histograms.keys(): - tothisto+=self.histograms[key] - nums=self.histograms[key]/float(self.numCases[key]) - maxvalue=maximum(maxvalue,nums) - minvalue=where(greater(nums,0.0),minimum(minvalue,nums),minvalue) - for i in xrange(self.histonumbins): - minval=self.histomin+(i*self.histowidth) - maxval=minval+self.histowidth - print "%3d %5.3f--%5.3f %d"%(i,minval,maxval,tothisto[i]) - if tothisto[i]>0: - fullmin=min(minval,fullmin) - fullmax=max(maxval,fullmax) - #print " fullmin:",fullmin - #print " fullmax:",fullmax - #fullmin=0.025 - #fullmax=2.025 - #print " fullmin:",fullmin - #print " fullmax:",fullmax - # - # If not many bins shown (i.e. nearly constant values)...add bins - # up and down until we get 15 bins - so our values are 'centered' in - # a reaonably wide graph - # - numbins=float(fullmax-fullmin-self.histowidth)/float(self.histowidth) - if numbins<15: - while numbins<15: - fullmax=minimum(fullmax+self.histowidth,parmMaxval+self.histohalf) - fullmin=maximum(fullmin-self.histowidth,parmMinval-self.histohalf) - numbins=float(fullmax-fullmin-self.histowidth)/float(self.histowidth) - if ((numbins<15)and(fullminparmMaxval)): - numbins=16 - #print " fullmin:",fullmin - #print " fullmax:",fullmax - # - # - # - numticks=25 - tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) - print "the tickInterval with 25 desired is:",tickInterval - # - # Dont let tick interval be smaller than parm precision - # - mintick=10**(-parmPrecision) - tickInterval=max(tickInterval,mintick) - print "after checking against precision...tickInterval is:",tickInterval - # - # Set the minimum graph one tick interval below the minimum...but - # not below the parm minimum value - # - graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval - graphmin=maximum(graphmin,parmMinval) - # - # Set the maximum graph one tick interval above the maximum...but - # not above the parm maximum value - # - graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval - graphmax=minimum(graphmax,parmMaxval) - print "so final x-coordinate graph from min/max:",graphmin,graphmax - # - # Find the maximum Y value for the bins being displayed - # - maxnum=0 - minnum=999999 - for i in xrange(self.histonumbins): - minval=self.histomin+(i*self.histowidth) - maxval=minval+self.histowidth - if ((minval>=fullmin)and(maxval<=fullmax)): - testmax=maxvalue[i] - maxnum=max(maxnum,testmax) - testmin=minvalue[i] - minnum=min(minnum,testmin) - print "the maximum value to display is:",maxnum - print "the minimum value to display is:",minnum - vint=self.niceNumDec(maxnum/20,1) - print "the vertical tick interval: vint:",vint - maxnum=(int(float(maxnum)/float(vint))+1)*vint - print "the maxnumber to graph is:",maxnum - # - # - # - # - left=self.cd.curwidth*(175.0/700.0) - right=self.cd.curwidth*(525.0/700.0) - bot=self.cd.curheight*(130.0/530.0) - top=self.cd.curheight*(480.0/530.0) - if ((verType==1)or(parmRateFlag==1)): - logflag=1 - logmax=log(maxnum) - logmin=log(minnum) - print "old min/max=%f,%f"%(minnum,maxnum) - print "new log range: %7.3f %7.3f"%(logmin,logmax) - self.setgraph(graphmin,graphmax,logmin,logmax,left,right,bot,top) - self.logvalhaxes(graphmin,graphmax,tickInterval,logmin,logmax,parm) - else: - logflag=0 - self.setgraph(graphmin,graphmax,0,maxnum,left,right,bot,top) - self.valhaxes(graphmin,graphmax,tickInterval,maxnum,vint,parm) - - - ul1="Value Histogram - %s"%parm - self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - # - # Draw each histogram - # - totalcount=len(self.histograms.keys()) - count=0 - for key in self.histograms.keys(): - count+=1 - if self.setAndCheckWorking("%s: drawing histogram %d of %d"%(workStart,count,totalcount))==1: - self.stopWorking() - return - tagbase=key.split("-") - mod=tagbase[0] - fhr=int(tagbase[1]) - fhrstr="f%d"%fhr - tagtuple=(mod,fhrstr) - flabel="%d-hr forecast"%fhr - self.labelLine(flabel,3,justify="right",tags=tagtuple) - - colorname=self.colornames[mod] - bins=self.histograms[key]/float(self.numCases[key]) - nbin=bins.shape[0] - for i in xrange(nbin): - # - # get x-coords of bin, and ignore bins outside the range - # of x-coordinates that we are showing - # - x1=max(self.histomin+(i*self.histowidth),graphmin) - x2=min(self.histomin+((i+1)*self.histowidth),graphmax) - if x1fullmax: - continue - # - # Logarithmic y-values a little different - # - if logflag==1: - y=bins[i] - if y>0.0: - logy=log(y) - logy=min(logy,logmax) - if i==0: - (sx1,sy1)=self.graphcoord(x1,logmin) - else: - yold=bins[i-1] - if yold>0.0: - logyold=log(yold) - else: - logyold=logmin - (sx1,sy1)=self.graphcoord(x1,logyold) - (sx2,sy2)=self.graphcoord(x1,logy) - (sx3,sy3)=self.graphcoord(x2,logy) - if ((i+1)==nbin): - (sx4,sy4)=self.graphcoord(x2,logmin) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - elif bins[i+1]==0: - (sx4,sy4)=self.graphcoord(x2,logmin) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - else: - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) - # - # Normal graphing for non-logarithmic y-values - # - else: - y=bins[i] - if y>0: - y=min(y,maxnum) - if i==0: - (sx1,sy1)=self.graphcoord(x1,0) - else: - yold=min(bins[i-1],maxnum) - (sx1,sy1)=self.graphcoord(x1,yold) - (sx2,sy2)=self.graphcoord(x1,y) - (sx3,sy3)=self.graphcoord(x2,y) - if ((i+1)==nbin): - (sx4,sy4)=self.graphcoord(x2,0) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - elif bins[i+1]==0: - (sx4,sy4)=self.graphcoord(x2,0) - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) - else: - self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) - self.but2state[mod]=1 - self.but1state[fhrstr]=1 - - - startBut1(self) - startBut2(self) - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - return - #================================================================== - # expectedValue - display expected value for forecast values - # - # - def expectedValue(self,parmList,cycleList,modelList,obsmodel, - fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, - dayList,dateStyle,scale,commonCases,accumHours, - accumFreq): - # - # Clear display - setup title - # - parm=parmList[0] - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Expected Value Distribution - %s"%parm) - # - # - # - workStart="Working on Expected Value Distribution" - self.startWorking(workStart,optionRemove=0) - # - # - # - NUMTBUTTONS=12 # normal number of time buttons on a row - configure - NUMMBUTTONS=6 # normal number of model buttons on a row - configure - # - # get the active EditArea into ea. If the active edit area is - # None - then assume they want to run it over the entire grid - # - editArea=self.getActiveEditArea() - editAreaMask=self.encodeEditArea(editArea) - npts=add.reduce(add.reduce(editAreaMask)) - if (npts==0): - editArea.invert() - editAreaMask=self.encodeEditArea(editArea) - eaflat=ravel(editAreaMask) - totalpoints=add.reduce(eaflat) - # - # make space for saving data - # - self.flists={} # storage for fcst values for each model/forecast hour - self.olists={} # storage for obs values for each model/forecast hour - fullmin=999999.0 - fullmax=-999999.0 - # - # Loop over parm and model - # - totaliters=len(modelList) - iter=0 - # - # For vectors...the parm to read might be different than - # the name of the parm - # - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - # - # Get information about the parm we are reading - # - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - if ((datatype==1)and(last3=="Dir")): - parmMinval=0 - parmMaxval=360 - # - # get binwidth and bigerr for parm...but for vectors its - # complicated by dir/mag/vecerr options - # - binwidth=self.VU.getVerBinWidth(readParm) - if datatype==1: - (bwMag,bwDir)=binwidth - if last3=="Dir": - binwidth=bwDir - else: - binwidth=bwMag - # - # Setup histogram binning routines - # - self.histosetup(parmMinval,parmMaxval,binwidth) - # - # Get mode for reading obs grids - # - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - # - caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay, - numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Loop over each model - # - for model in modelList: - iter+=1 - workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) - # - fcstGridMode=self.getReadMode(model,readParm) - # - # Get all the cases for this model - # - cases=caseInfo[model] - # - # Sort cases by the start/end time, not the basetime - # - casekeys=cases.keys() - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - totalcount=len(casekeys) - self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) - count=0 - lastobs="" - for key in casekeys: - count+=1 - #self.VU.logMsg("%s : %s memory:%d resident:%d"%(model,key,memory(),resident())) - if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - # - # Dont make stats for obs not yet complete - # - if etime>time.time(): - continue - # - # Dont include negative forecast hours - # - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - continue - # - # string to store grid under depends on model and forecast hour - # - saveKey="%s-%3.3d"%(model,fhr) - # - # If a new and different obs time - read the obs data - # - obskey=key.split(",",1)[1] - #self.VU.logMsg("before getObs: %d %d"%(memory(),resident())) - - obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, - stime,etime,mode=obsGridMode, - recList=orecList) - #obsdata1=copy.copy(obsdata) - #del obsdata - obsdata1=obsdata - obsdata1=self.scaleGrid(obsdata1,scale,datatype) - #self.VU.logMsg("after scaling: %d %d"%(memory(),resident())) - # - # For probabilistic variables...calculate the - # observed 'yes/no' value - # - if verType==1: - obsdata1=self.getProbVerGrid(readParm,obsdata1)*100.0 - #self.VU.logMsg("after probing: %d %d"%(memory(),resident())) - # - # cant do a value histogram of vector wind - # errors...so those get changed to windSpd - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - obsgrid=obsdata1[0] - elif last3=="Dir": - obsgrid=obsdata1[1] - else: - obsgrid=obsdata1 - else: - obsgrid=obsdata1[0] - obsonly=compress(eaflat,ravel(obsgrid)) - obsList=list(obsonly) - del obsonly - del obsgrid - del obsdata1 - #self.VU.logMsg("down to obsList: %d %d"%(memory(),resident())) - minObs=min(obsList) - maxObs=max(obsList) - fullmin=min(minObs,fullmin) - fullmax=max(maxObs,fullmax) - if self.olists.has_key(saveKey): - self.olists[saveKey].extend(obsList) - self.VU.logMsg("extending") - else: - self.olists[saveKey]=[] - self.olists[saveKey].extend(obsList) - self.VU.logMsg("new key") - #self.VU.logMsg("after adding: %d %d"%(memory(),resident())) - del obsList - #self.VU.logMsg("del of obsList: %d %d"%(memory(),resident())) - # - # Read forecast grid and calculate error grid - # - #self.VU.logMsg("before getGrids: %d %d"%(memory(),resident())) - fcstdata=self.VU.getVerGrids(model,basetime,readParm, - stime,etime,mode=fcstGridMode, - recList=frecList) - #self.VU.logMsg("after getGrids: %d %d"%(memory(),resident())) - #fcstdata1=copy.copy(fcstdata) - #self.VU.logMsg("after copy: %d %d"%(memory(),resident())) - #del fcstdata - #self.VU.logMsg("after del: %d %d"%(memory(),resident())) - fcstdata1=fcstdata - fcstdata1=self.scaleGrid(fcstdata1,scale,datatype) - # - # Get the error, handling vector error, etc. - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - fcstgrid=fcstdata1[0] - elif last3=="Dir": - fcstgrid=fcstdata1[1] - else: - fcstgrid=fcstdata1 - else: - fcstgrid=fcstdata1[0] - fcstonly=compress(eaflat,ravel(fcstgrid)) - self.VU.logMsg("pts to save:%s"%fcstonly.shape) - del fcstgrid - del fcstdata1 - fcstList=list(fcstonly) - del fcstonly - #self.VU.logMsg("after fcstList: %d %d"%(memory(),resident())) - minFcst=min(fcstList) - maxFcst=max(fcstList) - fullmin=min(minFcst,fullmin) - fullmax=max(maxFcst,fullmax) - #self.VU.logMsg("after maxmin : %d %d"%(memory(),resident())) - # - # Add values forecast lists for same model/fhr - # - # - if self.flists.has_key(saveKey): - self.flists[saveKey].extend(fcstList) - self.VU.logMsg("extending") - else: - self.flists[saveKey]=[] - self.flists[saveKey].extend(fcstList) - self.VU.logMsg("new key") - #self.VU.logMsg("after saving: %d %d"%(memory(),resident())) - del fcstList - #self.VU.logMsg("after del fList: %d %d"%(memory(),resident())) - # - # Get all the keys that will be displayed - # - fullkeys=self.flists.keys() - # - # if no data could be read - stop here - # - if len(fullkeys)<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # For buttons...get models/forecasthours actually in the data - # - fullkeys.sort() - fhrstrs=[] - modkeys=[] - for fullkey in fullkeys: - (mod,fhrstr)=fullkey.split("-") - if fhrstr not in fhrstrs: - fhrstrs.append(fhrstr) - if mod not in modkeys: - modkeys.append(mod) - # - # Change fhrstrs (sorted on 3-character 000-999) into - # smaller fhrkeys that are NOT all 3-characters wide - # - fhrstrs.sort() - fhrkeys=[] - for fhrstr in fhrstrs: - fhrkeys.append("%d"%int(fhrstr)) - # - # If an Official button is in there...make it first - # - modkeys.sort() - if "Official" in modkeys: - idx=modkeys.index("Official") - del modkeys[idx] - modkeys.insert(0,"Official") - # - # set colors for each model - # - self.colornames={} - index=0 - for mod in modkeys: - self.colornames[mod]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # Setup first row of buttons (forecast hours) - # - self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) - # - # Setup second row of buttons (models) - # - self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) - # - # If not many bins shown (i.e. nearly constant values)...add bins - # up and down until we get at least 15 bins - so our values are - # 'centered' in a reaonably wide graph - # - numbins=int(float(fullmax-fullmin)/float(binwidth))+1 - if numbins<15: - while numbins<15: - fullmax=min(fullmax+binwidth,parmMaxval) - fullmin=max(fullmin-binwidth,parmMinval) - numbins=int(float(fullmax-fullmin)/float(binwidth))+1 - if ((numbins<15)and(fullmin==parmMinval)and(fullmax==parmMaxval)): - numbins=16 - # - # - # - numticks=25 - tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) - # - # Dont let tick interval be smaller than parm precision - # - mintick=10**(-parmPrecision) - tickInterval=max(tickInterval,mintick) - # - # Set the minimum graph one tick interval below the minimum...but - # not below the parm minimum value - # - graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval - graphmin=maximum(graphmin,parmMinval) - # - # Set the maximum graph one tick interval above the maximum...but - # not above the parm maximum value - # - graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval - graphmax=minimum(graphmax,parmMaxval) - # - # - # - numTicks=int(float(graphmax-graphmin)/float(tickInterval))+1 - # - # Set up the graph axes - # - left=self.cd.curwidth*(50.0/700.0) - right=self.cd.curwidth*(650.0/700.0) - bot=self.cd.curheight*(100.0/530.0) - top=self.cd.curheight*(480.0/530.0) - self.setgraph(graphmin,graphmax,graphmin,graphmax,left,right,bot,top) - self.expaxes(graphmin,graphmax,tickInterval) - # - # Label the top of the graph - # - ul1="Expected %s Value for %s Forecast"%(obsmodel,parm) - self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - # - # for rateParms, or probability parms, label the length of periods - # - if ((verType==1)or(parmRateFlag==1)): - self.labelLine("%d-hr periods"%accumHours,3) - # - # Draw - # - totalcount=len(fullkeys) - count=0 - for key in fullkeys: - count+=1 - self.VU.logMsg("graph %d memory:%d resident:%d"%(count,memory(),resident())) - if self.setAndCheckWorking("%s: drawing graph %d of %d"%(workStart,count,totalcount))==1: - self.stopWorking() - return - tagbase=key.split("-") - mod=tagbase[0] - modnum=modelList.index(mod) - fhr=int(tagbase[1]) - fhrstr="f%d"%fhr - tagtuple=(mod,fhrstr) - flabel="%d-hr forecast"%fhr - self.labelLine(flabel,3,justify="right",tags=tagtuple) - - colorname=self.colornames[mod] - # - # Turn lists for this model/time back into arrays - # - fcstArray=array(self.flists[key]) - obsArray=array(self.olists[key]) - prevAvg=-99999.9 - for i in xrange(numTicks): - value=graphmin+(i*tickInterval) - valuelow=value-(float(tickInterval)/2.0) - vl1=value-(float(tickInterval)/6.0) - valuehigh=value+(float(tickInterval)/2.0) - vh1=value+(float(tickInterval)/6.0) - pts=logical_and(greater_equal(fcstArray,valuelow),less(fcstArray,valuehigh)) - if sometrue(pts): - #obsDist=sort(compress(pts,obsArray)) - obsDist=compress(pts,obsArray) - numCases=obsDist.shape[0] - #minObs=obsDist[0] - minObs=minimum.reduce(obsDist) - #maxObs=obsDist[numCases-1] - maxObs=maximum.reduce(obsDist) - avgObs=float(add.reduce(obsDist))/float(numCases) - avgObsSquared=float(add.reduce(obsDist*obsDist))/float(numCases) - std=sqrt(avgObsSquared-(avgObs*avgObs)) - #if numCases>1: - # midObs=obsDist[numCases/2] - #else: - # midObs=avgObs - #if numCases>3: - # q1Obs=obsDist[numCases/4] - # q3Obs=obsDist[(3*numCases)/4] - #else: - # q1Obs=avgObs - # q3Obs=avgObs - # - # Graph the average - # - (x1,y1)=self.graphcoord(valuelow,avgObs) - (x2,y2)=self.graphcoord(valuehigh,avgObs) - if prevAvg>-99999.0: - self.cd.canvas.create_line(x1,prevAvg,x1,y1,x2,y2,fill=colorname,tags=tagtuple) - else: - self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) - prevAvg=y1 - # - # For everything except probability parms...plot min/max/std - # - if verType!=1: - # - # Plot the min - # - (x1,y1)=self.graphcoord(vl1,minObs) - (x2,y2)=self.graphcoord(vh1,minObs) - self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) - # - # Plot the max - # - (x1,y1)=self.graphcoord(vl1,maxObs) - (x2,y2)=self.graphcoord(vh1,maxObs) - self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) - q1Obs=avgObs-std - q3Obs=avgObs+std - (x1,y1)=self.graphcoord(valuelow,q1Obs) - (x2,y2)=self.graphcoord(valuehigh,q3Obs) - self.cd.canvas.create_polygon(x1,y1,x1,y2,x2,y2,x2,y1,stipple="gray25",fill=colorname,outline="",tags=tagtuple) - del pts - self.but2state[mod]=1 - self.but1state[fhrstr]=1 - del fcstArray - del obsArray - - startBut1(self) - startBut2(self) - - del self.flists - del self.olists - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - self.VU.logMsg("expected value done memory:%d resident:%d"%(memory(),resident())) - return - #================================================================== - # scatterPlot - display scatterplot - # - # - def scatterPlot(self,parmList,cycleList,modelList,obsmodel, - fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, - dayList,dateStyle,scale,commonCases,accumHours, - accumFreq): - # - # Clear display - setup title - # - parm=parmList[0] - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Scatterplot - %s"%parm) - # - # - # - workStart="Working on Verifying Value Distribution" - self.startWorking(workStart,optionRemove=0) - # - # - # - NUMTBUTTONS=12 # normal number of time buttons on a row - configure - NUMMBUTTONS=6 # normal number of model buttons on a row - configure - # - # get the active EditArea into ea. If the active edit area is - # None - then assume they want to run it over the entire grid - # - editArea=self.getActiveEditArea() - editAreaMask=self.encodeEditArea(editArea) - npts=add.reduce(add.reduce(editAreaMask)) - if (npts==0): - editArea.invert() - editAreaMask=self.encodeEditArea(editArea) - eaflat=ravel(editAreaMask) - totalpoints=add.reduce(eaflat) - # - # make space for saving data - # - self.flists={} # storage for fcst values for each model/forecast hour - self.olists={} # storage for obs values for each model/forecast hour - fullmin=999999.0 - fullmax=-999999.0 - # - # Loop over parm and model - # - totaliters=len(modelList) - iter=0 - # - # For vectors...the parm to read might be different than - # the name of the parm - # - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - # - # Get information about the parm we are reading - # - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - if ((datatype==1)and(last3=="Dir")): - parmMinval=0 - parmMaxval=360 - # - # get binwidth and bigerr for parm...but for vectors its - # complicated by dir/mag/vecerr options - # - binwidth=self.VU.getVerBinWidth(readParm) - if datatype==1: - (bwMag,bwDir)=binwidth - if last3=="Dir": - binwidth=bwDir - else: - binwidth=bwMag - # - # Setup histogram binning routines - # - self.histosetup(parmMinval,parmMaxval,binwidth) - # - # Get mode for reading obs grids - # - obsGridMode=self.getReadMode(obsmodel,obsParm,0) - # - # Get case times/records for all models - # - caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay, - numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd, - accumHours=accumHours,accumFreq=accumFreq, - commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Loop over each model - # - for model in modelList: - iter+=1 - workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) - # - fcstGridMode=self.getReadMode(model,readParm) - # - # Get all the cases for this model - # - cases=caseInfo[model] - # - # Sort cases by the start/end time, not the basetime - # - casekeys=cases.keys() - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - totalcount=len(casekeys) - self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) - count=0 - lastobs="" - for key in casekeys: - count+=1 - self.VU.logMsg("%s : %s"%(model,key),10) - if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: - self.stopWorking() - return - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - stime=int(stimestr) - etime=int(etimestr) - (frecList,orecList)=cases[key] - # - # Dont make stats for obs not yet complete - # - if etime>time.time(): - continue - # - # Dont include negative forecast hours - # - fhr=self.VU.getFcstHour(basetime,stime) - if fhr<0: - continue - # - # string to store grid under depends on model and forecast hour - # - saveKey="%s-%3.3d"%(model,fhr) - # - # If a new and different obs time - read the obs data - # - obskey=key.split(",",1)[1] - obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, - stime,etime,mode=obsGridMode, - recList=orecList) - obsdata=self.scaleGrid(obsdata,scale,datatype) - # - # For probabilistic variables...calculate the - # observed 'yes/no' value - # - if verType==1: - obsdata=self.getProbVerGrid(readParm,obsdata)*100.0 - # - # cant do a value histogram of vector wind - # errors...so those get changed to windSpd - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - obsgrid=obsdata[0] - elif last3=="Dir": - obsgrid=obsdata[1] - else: - obsgrid=obsdata - obsonly=compress(eaflat,ravel(obsgrid)) - else: - obsgrid=obsdata[0] - obsonly=compress(eaflat,ravel(obsgrid)) - obsList=list(obsonly) - minObs=min(obsList) - maxObs=max(obsList) - fullmin=min(minObs,fullmin) - fullmax=max(maxObs,fullmax) - if self.olists.has_key(saveKey): - self.olists[saveKey].extend(obsList) - else: - self.olists[saveKey]=obsList - # - # Read forecast grid and calculate error grid - # - fcstdata=self.VU.getVerGrids(model,basetime,readParm, - stime,etime,mode=fcstGridMode, - recList=frecList) - fcstdata=self.scaleGrid(fcstdata,scale,datatype) - # - # Get the error, handling vector error, etc. - # - if ((datatype!=1)or(last3 in ["Spd","Dir"])): - if last3=="Spd": - fcstgrid=fcstdata[0] - elif last3=="Dir": - fcstgrid=fcstdata[1] - else: - fcstgrid=fcstdata - fcstonly=compress(eaflat,ravel(fcstgrid)) - else: - fcstgrid=fcstdata[0] - fcstonly=compress(eaflat,ravel(fcstgrid)) - fcstList=list(fcstonly) - minFcst=min(fcstList) - maxFcst=max(fcstList) - fullmin=min(minFcst,fullmin) - fullmax=max(maxFcst,fullmax) - # - # Add values forecast lists for same model/fhr - # - # - if self.flists.has_key(saveKey): - self.flists[saveKey].extend(fcstList) - else: - self.flists[saveKey]=fcstList - # - # Get all the keys that will be displayed - # - fullkeys=self.flists.keys() - # - # if no data could be read - stop here - # - if len(fullkeys)<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # For buttons...get models/forecasthours actually in the data - # - fullkeys.sort() - fhrstrs=[] - modkeys=[] - for fullkey in fullkeys: - (mod,fhrstr)=fullkey.split("-") - if fhrstr not in fhrstrs: - fhrstrs.append(fhrstr) - if mod not in modkeys: - modkeys.append(mod) - # - # Change fhrstrs (sorted on 3-character 000-999) into - # smaller fhrkeys that are NOT all 3-characters wide - # - fhrstrs.sort() - fhrkeys=[] - for fhrstr in fhrstrs: - fhrkeys.append("%d"%int(fhrstr)) - # - # If an Official button is in there...make it first - # - modkeys.sort() - if "Official" in modkeys: - idx=modkeys.index("Official") - del modkeys[idx] - modkeys.insert(0,"Official") - # - # set colors for each model - # - self.colornames={} - index=0 - for mod in modkeys: - self.colornames[mod]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # Setup first row of buttons (forecast hours) - # - self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) - # - # Setup second row of buttons (models) - # - self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) - # - # If not many bins shown (i.e. nearly constant values)...add bins - # up and down until we get at least 15 bins - so our values are - # 'centered' in a reaonably wide graph - # - numbins=int(float(fullmax-fullmin)/float(binwidth))+1 - if numbins<15: - while numbins<15: - fullmax=min(fullmax+binwidth,parmMaxval) - fullmin=max(fullmin-binwidth,parmMinval) - numbins=int(float(fullmax-fullmin)/float(binwidth))+1 - if ((numbins<15)and(fullmin==parmMinval)and(fullmax==parmMaxval)): - numbins=16 - # - # - # - numticks=25 - tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) - # - # Dont let tick interval be smaller than parm precision - # - mintick=10**(-parmPrecision) - tickInterval=max(tickInterval,mintick) - # - # Set the minimum graph one tick interval below the minimum...but - # not below the parm minimum value - # - graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval - graphmin=maximum(graphmin,parmMinval) - # - # Set the maximum graph one tick interval above the maximum...but - # not above the parm maximum value - # - graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval - graphmax=minimum(graphmax,parmMaxval) - # - # - # - numTicks=int(float(graphmax-graphmin)/float(tickInterval))+1 - # - # Set up the graph axes - # - left=self.cd.curwidth*(50.0/700.0) - right=self.cd.curwidth*(650.0/700.0) - bot=self.cd.curheight*(100.0/530.0) - top=self.cd.curheight*(480.0/530.0) - self.setgraph(graphmin,graphmax,graphmin,graphmax,left,right,bot,top) - self.valaxes(graphmin,graphmax,tickInterval) - # - numPts=totalpoints - ul1="Scatterplot - %s"%parm - self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - # - # - # - #self.probaxes() - # - # - # - MaxValuesToShow=1000 - numbins=50 - numPts=totalpoints - counts={} - maxcounts={} - maxnum=0 - self.setWorking("%s: scanning scatterplots"%workStart) - for key in self.flists.keys(): - maxnum=max(maxnum,len(self.flists[key])) - if self.checkWorking()==1: - self.stopWorking() - return - if maxnum>MaxValuesToShow: - binsize=float(graphmax-graphmin)/float(numbins) - #print "binsize=",binsize - totaldcount=len(self.flists.keys()) - dcount=0 - for key in self.flists.keys(): - dcount+=1 - self.setWorking("%s: large scatterplot thinning: %d of %d"%(workStart,dcount,totaldcount)) - if self.checkWorking()==1: - self.stopWorking() - return - if len(self.flists[key])>MaxValuesToShow: - count=zeros((numbins,numbins)) - xpos=minimum(((array(self.olists[key])-graphmin)/binsize).astype(int),numbins-1) - ypos=minimum(((array(self.flists[key])-graphmin)/binsize).astype(int),numbins-1) - xl=list(xpos) - yl=list(ypos) - for i in xrange(len(xl)): - x=xl[i] - y=yl[i] - count[y,x]+=1 - maxcounts[key]=maximum.reduce(maximum.reduce(count)) - #print "maxcounts[",key,"]=",maxcounts[key] - counts[key]=count - # - # Draw - # - totalcount=len(fullkeys) - count=0 - for key in fullkeys: - count+=1 - self.setWorking("%s: drawing scatterplot %d of %d"%(workStart,count,totalcount)) - if self.checkWorking()==1: - self.stopWorking() - return - tagbase=key.split("-") - mod=tagbase[0] - modnum=modelList.index(mod) - fhr=int(tagbase[1]) - fhrstr="f%d"%fhr - tagtuple=(mod,fhrstr) - flabel="%d-hr forecast"%fhr - self.labelLine(flabel,3,justify="right",tags=tagtuple) - - colorname=self.colornames[mod] - - if key not in maxcounts.keys(): - ylist=self.flists[key] - xlist=self.olists[key] - for i in xrange(len(ylist)): - (x,y)=self.graphcoord(xlist[i],ylist[i]) - self.cd.canvas.create_line(x-2,y,x+2,y,fill=colorname,tags=tagtuple) - self.cd.canvas.create_line(x,y-2,x,y+2,fill=colorname,tags=tagtuple) - else: - for i in xrange(numbins): - midx=graphmin+((i+0.5)*binsize) - for j in xrange(numbins): - midy=graphmin+((j+0.5)*binsize) - width=(float(counts[key][j,i])/float(maxcounts[key]))*binsize*0.5 - (x0,y0)=self.graphcoord(midx-width,midy-width) - (x1,y1)=self.graphcoord(midx+width,midy+width) - if width>0.01: - self.cd.canvas.create_arc(x0,y0,x1,y1,fill=colorname,outline=colorname,start=0.0,extent=359.9,width=1.0,tags=tagtuple) - - self.but2state[mod]=1 - self.but1state[fhrstr]=1 - - startBut1(self) - startBut2(self) - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - return - #================================================================== - # scaleGrid - smooth a grid by the scale amount. Correctly handles - # vectors indicated by datatype==1. - # - def scaleGrid(self,griddata,scale,datatype): - if scale>0: - if datatype!=1: - griddata=self.VU.smoothpm(griddata,scale) - else: - (gridmag,griddir)=griddata - (u,v)=self.MagDirToUV(gridmag,griddir) - us=self.VU.smoothpm(u,scale) - vs=self.VU.smoothpm(v,scale) - (gridmag,griddir)=self.UVToMagDir(us,vs) - griddata=(gridmag,griddir) - return griddata - #================================================================== - # moveCD - if the first time self.cd is displayed - move to a good - # location - # - def moveCD(self): - if self.cd.firstDisplay==1: - self.cd.update_idletasks() - geo=self.cd.geometry() - (mwh,mof)=geo.split("+",1) - (mw,mh)=mwh.split("x",1) - parentgeo=self.root.geometry() - (wh,of)=parentgeo.split("+",1) - (w,h)=wh.split("x",1) - (ox,oy)=of.split("+",1) - xoff=int(ox)+int(w)-int(mw) - yoff=int((int(h)-int(mh))/2.0)+int(oy) - newgeo=mwh+"+%d+%d"%(xoff,yoff) - self.cd.geometry(newgeo) - self.cd.firstDisplay=0 - return - #================================================================== - # showStats - display point/area statistics - # - def ShowStats(self,DialogDict): - self.VU.logMsg("running ShowStats:") - - plotType=DialogDict["PlotType"] - if plotType=="vs. Time": - self.makeTimeSeries(DialogDict) - if plotType=="vs. Fcst Hour": - self.makeFhourGraph(DialogDict) - return - #================================================================== - # makeTimeSeries - display time series for point/area - # - def makeTimeSeries(self,DialogDict): - self.VU.logMsg("running makeTimeSeries:") - display=DialogDict["Display"] - areaList=DialogDict["areaList"] - AreaCombine=DialogDict["AreaCombine"] - parmList=DialogDict["Parms"] - threshold=DialogDict["Threshold"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Models"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - dateStyle=DialogDict["dateStyle"] - plotType=DialogDict["PlotType"] - scale=DialogDict["scale"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - TwoCatType=DialogDict["TwoCatType"] - TwoCatCond=DialogDict["TwoCatCond"] - TwoCatValue=DialogDict["TwoCatValue"] - TwoCatValueString=DialogDict["TwoCatValueString"] - # - # Check for good GUI input - # - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - # - # Check that we do not have too many things varying - # - if len(parmList)>1: - parmVary=1 - else: - parmVary=0 - if ((len(areaList)>1)and(AreaCombine==0)): - areaVary=1 - else: - areaVary=0 - if len(modelList)>1: - modelVary=1 - else: - modelVary=0 - totalVary=parmVary+areaVary+modelVary - if totalVary>1: - msg="Can only vary one of parm/area/model when doing 'vs. Time' graphs." - self.statusBarMsg(msg,"U") - return - # - # If nothing varying - pick model - # - if totalVary==0: - modelVary=1 - # - # get list of names of selected edit areas into areaNames - # - areaNames=[] - nameList=self.VU.listEditAreas() - descList=self.VU.listEditAreaDescriptions() - for areaDesc in areaList: - if areaDesc=="Current": - areaNames.append("Current") - elif areaDesc in descList: - areaNum=descList.index(areaDesc) - areaNames.append(nameList[areaNum]) - if len(areaNames)<1: - msg="Invalid Edit Area(s) - contact support" - self.statusBarMsg(msg,"U") - return - print "the areaNames are:",areaNames - comboArea=self.empty(bool) - if ((AreaCombine==1)and(len(areaNames)>1)): - for areaName in areaNames: - if areaName=="Current": - areaObject=self.getActiveEditArea() - mask=self.encodeEditArea(areaObject) - any=add.reduce(add.reduce(mask)) - if any==0: - mask=self.newGrid(True, bool) - elif areaName=="NONE": - mask=self.newGrid(True, bool) - else: - mask=self.encodeEditArea(areaName) - comboArea=logical_or(comboArea,mask) - # - # - # - statName=display - if statName=="TwoCat": - statName=TwoCatType - statVal=TwoCatValue - statCond=TwoCatCond - # - # Clear the cd canvas - setup title - # - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Statistic Time Series") - workStart="Working on Statistics" - self.startWorking(workStart,optionRemove=0) - # - # - # - outdata={} - fhrList=[] - timemin=1e32 - timemax=-1e32 - valmin=1.0e32 - valmax=-1.0e32 - - countimax=len(parmList)*len(modelList) - counti=0 - for parm in parmList: - readParm=parm - vectorType=-1 - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - if last3=="Spd": - vectorType==0 - else: - vectorType==1 - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - thresholds=self.VU.getVerThresholds(readParm) - if last3=="Spd": - thresholds=thresholds[0] - elif last3=="Dir": - thresholds=thresholds[1] - thresholdValue=thresholds[threshold] - - statsCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle,dateType, - fromDay=fromDay,numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList,fhrStart=fhrStart, - fhrEnd=fhrEnd,accumHours=accumHours,accumFreq=accumFreq, - commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - gridsCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel,dateStyle,dateType, - fromDay=fromDay,numDays=numDays,dayList=dayList, - fcstrs=fcstrList,cycles=cycleList,fhrStart=fhrStart, - fhrEnd=fhrEnd,accumHours=accumHours,accumFreq=accumFreq, - requireObs=1,commonCases=commonCases,basetimeOffsets=1, - callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - for model in modelList: - counti+=1 - workNow="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) - scases=statsCases[model] - if model in gridsCases.keys(): - gcases=gridsCases[model] - else: - gcases={} - # - # get overall list of keys (both stat and grid) - # - skeys=scases.keys() - gkeys=gcases.keys() - tkeys=skeys - for gkey in gkeys: - if gkey not in tkeys: - tkeys.append(gkey) - - # - # Loop over possible stat or grid cases - # - count=0 - totalcount=len(tkeys) - for key in tkeys: - count+=1 - self.setWorking("%s: %d of %d"%(workNow,count,totalcount)) - if self.checkWorking()==1: - self.stopWorking() - return - if key in scases: - srecList=scases[key] - else: - srecList=None - if key in gcases: - grecList=gcases[key] - else: - grecList=None - # - # - # - (basestr,stimestr,etimestr)=key.split(",") - basetime=int(basestr) - starttime=int(stimestr) - endtime=int(etimestr) - # - # Done show results for grids not yet complete - # - if endtime>time.time(): - continue - # - # Dont show results for grids that start before forecast time - # - fhr=(starttime-basetime)/HOURSECS - if fhr<0: - continue - # - # X-coordinate is 'starttime' when doing "Verifying On" displays - # and 'basetime' when doing "Forecast on" displays - # - if dateStyle=="Verifying on": - x=starttime - else: - x=basetime - timemin=min(x,timemin) - timemax=max(x,timemax) - # - # Thresholds are different for different variables - # - if statName=="Percent Err <": - statVal=thresholdValue - # - # When AreaCombine is on...we already have the combined - # edit area ready - # - if ((AreaCombine==1)and(len(areaNames)>1)): - eaGrid=comboArea - - outkey="%s,%s,%3.3d,-01"%(parm,model,fhr) - if outkey not in outdata.keys(): - outdata[outkey]=[] - - valx=self.VU.getVerStat(model,basetime,readParm,starttime,endtime, - obsmodel,statName,statVal=statVal, - statCond=statCond,editArea=eaGrid, - smooth=scale,vectorType=vectorType, - srecList=srecList,grecList=grecList) - if valx is None: - print "getVerStat returned None" - continue - valmin=min(valx,valmin) - valmax=max(valx,valmax) - outdata[outkey].append((x,valx)) - # - # When AreaCombine is off...loop over editAreas - # - else: - for areaName in areaNames: - outkey="%s,%s,%3.3d,%s"%(parm,model,fhr,areaName) - if outkey not in outdata.keys(): - outdata[outkey]=[] - if areaName=="Current": - areaObject=self.getActiveEditArea() - ea=self.encodeEditArea(areaObject) - any=add.reduce(add.reduce(ea)) - if any==0: - ea=self.newGrid(True, bool) - elif areaName=="NONE": - ea=self.newGrid(True, bool) - else: - ea=areaName - valx=self.VU.getVerStat(model,basetime,readParm,starttime,endtime, - obsmodel,statName,statVal=statVal, - statCond=statCond,editArea=ea, - smooth=scale,vectorType=vectorType, - srecList=srecList,grecList=grecList) - if valx is None: - print "getVerStat returned None" - continue - valmin=min(valx,valmin) - valmax=max(valx,valmax) - outdata[outkey].append((x,valx)) - #self.VU.setDebug(0) - # - # - # - if fhr not in fhrList: - fhrList.append(fhr) - # - # - # If no data read - don't go further - # - if len(outdata.keys())<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - #print "done reading" - # - # valmin/valmax usually works - but for bounded stats - # we always want 0.0 to be the lower bound - # - if display in ["RMS Error","Std Dev","Mean Abs Err"]: - valmin=0.0 - if display=="Percent Err <": - valmin=0.0 - valmax=1.0 - #print "value range:",valmin,valmax - # - # time buttons - # - fhrList.sort() - fList=[] - for fhr in fhrList: - fList.append("%d"%fhr) - self.setupBut1(fList,numbuttons=12,arrows=1,width=3) - # - # First part of title line is the type of error - # - if display=="TwoCat": - titleLine="%s Timeseries - "%TwoCatType - elif display!="Percent Err <": - titleLine="%s Timeseries - "%display - else: - titleLine="%s %d Timeseries - "%(display,threshold) - # - # set varList to the thing that varies: model-parm-area - # - if parmVary==1: - varList=parmList[:] - varButtons=6 - titleLine+=modelList[0] - elif modelVary==1: - if "Official" in modelList: - idx=modelList.index("Official") - del modelList[idx] - modelList.insert(0,"Official") - varList=modelList[:] - varButtons=6 - titleLine+=parmList[0] - if display=="TwoCat": - titleLine+=" %s %s"%(TwoCatCond,TwoCatValueString) - else: - varList=areaList[:] - varButtons=3 - titleLine+="%s %s"%(modelList[0],parmList[0]) - # - # Associate colors with the varying model/parm/area - # - self.colornames={} - index=0 - for var in varList: - self.colornames[var]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # Make buttons - # - self.setupBut2(varList,numbuttons=varButtons,arrows=1) - # - # Setup graphing coordinates - # - numticks=10 - graphrange=valmax-valmin - print "graphrange=",graphrange - tickInterval=self.niceNumDec(graphrange/(numticks-1),1) - #print "tickInterval=",tickInterval - - left=self.cd.curwidth*(50.0/700.0) - right=self.cd.curwidth*(650.0/700.0) - bot=self.cd.curheight*(130.0/530.0) - top=self.cd.curheight*(480.0/530.0) - self.setgraph(timemin,timemax,valmin,valmax,left,right,bot,top) - self.graphaxes(timemin,timemax,valmin,valmax) - # - # Draw timeseries lines - # - for key in outdata.keys(): - #print "timeseries for ",key - tagbase=key.split(",") - fhr="f%d"%int(tagbase[2]) - # - if parmVary==1: - varTag=tagbase[0] - elif modelVary==1: - varTag=tagbase[1] - else: - #varTag=self.VU.EditAreaDescriptions[int(tagbase[3])] - areaNum=self.VU.getEditAreaNumberFromName(tagbase[3]) - varTag=self.VU.EditAreaDescriptions[areaNum] - tagtuple=(varTag,fhr) - - colorname=self.colornames[varTag] - points=outdata[key] - points.sort() - gpoints=[] - for point in points: - (xtime,val)=point - (x,y)=self.graphcoord(xtime,val) - gpoints.append(x) - gpoints.append(y) - if len(gpoints)>3: - self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) - self.but2state[varTag]=1 - self.but1state[fhr]=1 - # - # Label forecast times - # - for fhr in fhrList: - fhrstr="f%d"%fhr - labelstr="%d-hr Forecast"%fhr - self.labelLine(labelstr,3,justify='right',tags=(fhrstr)) - # - # Turn off all but first - # - startBut1(self) - startBut2(self) - # - # Labels at top of graph - # - #if len(areaList)>1: - # if AreaCombine==1: - # numPts=0 - # for areaNum in areaNums: - # numPts+=self.pts[areaNum] - # else: - # numPts=-1 - #else: - # numPts=self.pts[areaNums[0]] - numPts=-1 - self.cdLabels(titleLine,numPts,dateStyle,dateType,numDays,fromDay, - dayList,cycleList) - # - # Done - show the results - # - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - - return - - #================================================================== - # makeFhourGraph - display graph of average error at various fhrs - # - def makeFhourGraph(self,DialogDict): - self.VU.logMsg("running makeFhourGraph:") - display=DialogDict["Display"] - areaList=DialogDict["areaList"] - AreaCombine=DialogDict["AreaCombine"] - parmList=DialogDict["Parms"] - threshold=DialogDict["Threshold"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Models"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - dateStyle=DialogDict["dateStyle"] - plotType=DialogDict["PlotType"] - scale=DialogDict["scale"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - TwoCatType=DialogDict["TwoCatType"] - TwoCatCond=DialogDict["TwoCatCond"] - TwoCatValue=DialogDict["TwoCatValue"] - TwoCatValueString=DialogDict["TwoCatValueString"] - # - # Check for good GUI input - # - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - # - # Check that we do not have too many things varying - # - if len(parmList)>1: - parmVary=1 - else: - parmVary=0 - if ((len(areaList)>1)and(AreaCombine==0)): - areaVary=1 - else: - areaVary=0 - if len(modelList)>1: - modelVary=1 - else: - modelVary=0 - totalVary=parmVary+areaVary+modelVary - if totalVary>2: - msg="Can only vary two of parm/area/model when doing 'vs. Fcst Hour' graphs" - self.statusBarMsg(msg,"U") - return - # - # If only varying one thing - then set the other to model, unless that - # is the one already being done - and then set to parm - # - if totalVary==1: - if modelVary==1: - parmVary=1 - else: - modelVary=1 - # - # - # - if parmVary==1: - if modelVary==1: - varList1=parmList[:] - varList2=modelList[:] - else: - varList1=parmList[:] - varList2=areaList[:] - else: - varList1=areaList[:] - varList2=modelList[:] - # - # Clear the cd canvas - setup title - # - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Statistic Graph") - workStart="Working on Statistics" - self.startWorking(workStart,optionRemove=0) - # - # get names of selected edit areas - # - areaNames=[] - descList=self.VU.listEditAreaDescriptions() - nameList=self.VU.listEditAreas() - for areaDesc in areaList: - if areaDesc=="Current": - areaNames.append("Current") - elif areaDesc in descList: - areaNum=descList.index(areaDesc) - areaNames.append(nameList[areaNum]) - if len(areaNames)<1: - msg="Invalid Edit Area(s) - contact support" - self.statusBarMsg(msg,"U") - return - # - # For 'combined areas' - setup the comboArea just once - # - comboArea=self.empty(bool) - if ((AreaCombine==1)and(len(areaNames)>1)): - for areaName in areaNames: - if areaName=="Current": - areaObject=self.getActiveEditArea() - mask=self.encodeEditArea(areaObject) - any=add.reduce(add.reduce(mask)) - if any==0: - mask=self.newGrid(True, bool) - elif areaName=="NONE": - mask=self.newGrid(True, bool) - else: - mask=self.encodeEditArea(areaName) - comboArea=logical_or(comboArea,mask) - # - # If any of the TwoCat stats are requested - then get - # the contingency table entries instead - # - statName=display - if statName=="TwoCat": - statName="cont" - if TwoCatType[0:1]=="A": - statName="acont" - statVal=TwoCatValue - statCond=TwoCatCond - statID=self.VU.getStatID(TwoCatType) - # - # - # - # - # - # - sumdata={} - cntdata={} - hitsdata={} - missdata={} - falrdata={} - corndata={} - # - timemin=1e32 - timemax=-1e32 - valmin=1.0e32 - valmax=-1.0e32 - - countimax=len(parmList)*len(modelList) - counti=0 - for parm in parmList: - # - # - # - (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, - parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) - # - # setup readParm - which is usually the same as parm, but - # can be different for the Spd/Dir components of a vector - # - readParm=parm - vectorType=-1 - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if (last3 in ["Spd","Dir"]): - readParm=parm[:-3] - if last3=="Spd": - vectorType==0 - else: - vectorType==1 - # - # Get the observed parm for this parm, the verification type, - # the data type and the thresholds - # - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - thresholds=self.VU.getVerThresholds(readParm) - if last3=="Spd": - thresholds=thresholds[0] - elif last3=="dir": - thresholds=thresholds[1] - thresholdValue=thresholds[threshold] - # - # If using the threshold stat - set it now - # - if statName=="Percent Err <": - statVal=thresholdValue - # - # Get the statCases for this parm - # - statCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle, - dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,commonCases=commonCases, - basetimeOffsets=1,callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Get the gridCases for this parm - # - gridCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,requireObs=1,commonCases=commonCases, - basetimeOffsets=1,callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - for model in modelList: - counti+=1 - workStart="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) - # - # get cases for this model - # - scases=statCases[model] - if model in gridCases.keys(): - gcases=gridCases[model] - else: - gcases={} - # - # get overall list of keys (both stat and grid) in tkeys - # - skeys=scases.keys() - gkeys=gcases.keys() - tkeys=skeys - for gkey in gkeys: - if gkey not in tkeys: - tkeys.append(gkey) - # - # Loop over possible stat or grid cases - # - count=0 - totalcount=len(tkeys) - for key in tkeys: - # - # Check for user interrupting - # - count+=1 - if self.setAndCheckWorking("%s: %d of %d"%(workStart,count,totalcount))==1: - self.stopWorking() - return - # - # Get times for this case - # - (basestr,stimestr,etimestr)=key.split(",") - basetime=int(basestr) - starttime=int(stimestr) - endtime=int(etimestr) - # - # Do not use results for grids that are not yet complete - # - if endtime>time.time(): - continue - # - # Do not show results for grids that start before - # forecast time - # - fhr=(starttime-basetime)/HOURSECS - if fhr<0: - continue - # - # Get list of records for this cases - # - if key in scases: - srecList=scases[key] - else: - srecList=None - if key in gcases: - grecList=gcases[key] - else: - grecList=None - # - # - # - # - # When areaCombine is on...we already have the combined - # edit area ready - # - if ((AreaCombine==1)and(len(areaNames)>1)): - eaGrid=comboArea - valx=self.VU.getVerStat(model,basetime,readParm,starttime, - endtime,obsmodel,statName,statVal=statVal, - statCond=statCond,editArea=eaGrid, - smooth=scale,vectorType=vectorType, - srecList=srecList,grecList=grecList) - if valx is None: - print "getVerStat returned None" - continue - # - # store sums in parm,model,fhr,areaName keys - # - outkey="%s,%s,%3.3d,-01"%(parm,model,fhr) - if display!="TwoCat": - if outkey not in sumdata.keys(): - sumdata[outkey]=0.0 - cntdata[outkey]=0 - sumdata[outkey]+=valx - cntdata[outkey]+=1 - else: - if outkey not in hitsdata.keys(): - hitsdata[outkey]=0 - missdata[outkey]=0 - falrdata[outkey]=0 - corndata[outkey]=0 - (hits,miss,falr,corn)=valx - hitsdata[outkey]+=hits - missdata[outkey]+=miss - falrdata[outkey]+=falr - corndata[outkey]+=corn - else: - for areaName in areaNames: - if areaName=="Current": - areaObject=self.getActiveEditArea() - ea=self.encodeEditArea(areaObject) - any=add.reduce(add.reduce(ea)) - if any==0: - ea=self.newGrid(True, bool) - elif areaName=="NONE": - ea=self.newGrid(True, bool) - else: - ea=areaName - valx=self.VU.getVerStat(model,basetime,readParm,starttime, - endtime,obsmodel,statName,statVal=statVal, - statCond=statCond,editArea=ea, - smooth=scale,vectorType=vectorType, - srecList=srecList,grecList=grecList) - if valx is None: - print "getVerStat returned None" - continue - # - # - # - outkey="%s,%s,%3.3d,%s"%(parm,model,fhr,areaName) - if display!="TwoCat": - if display=="RMS Error": - valx=valx**2 - if outkey not in sumdata.keys(): - sumdata[outkey]=0.0 - cntdata[outkey]=0 - sumdata[outkey]+=valx - cntdata[outkey]+=1 - else: - if outkey not in hitsdata.keys(): - hitsdata[outkey]=0 - missdata[outkey]=0 - falrdata[outkey]=0 - corndata[outkey]=0 - cntdata[outkey]=0 - (hits,miss,falr,corn)=valx - hitsdata[outkey]+=hits - missdata[outkey]+=miss - falrdata[outkey]+=falr - corndata[outkey]+=corn - cntdata[outkey]+=1 - # - # if no data could be read - stop here - # - if len(cntdata.keys())<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # We now have the sums...calculate the scores - # - fhrList=[] - valmin=1e32 - valmax=-1e32 - timemin=0 - timemax=0 - outdata={} - for key in cntdata.keys(): - if cntdata[key]<1: - continue - if display!="TwoCat": - stat=float(sumdata[key])/float(cntdata[key]) - if display=="RMS Error": - stat=sqrt(stat) - else: - hits=hitsdata[key] - miss=missdata[key] - falr=falrdata[key] - corn=corndata[key] - stat=self.VU.getTwoCatStat(statID,hits,miss,falr,corn) - # - # - # - valmin=min(valmin,stat) - valmax=max(valmax,stat) - (parm,model,fhrstr,areaName)=key.split(",") - areaNum=self.VU.getEditAreaNumberFromName(areaName) - fhr=int(fhrstr) - timemax=max(fhr,timemax) - if fhr not in fhrList: - fhrList.append(fhr) - if parmVary==1: - if modelVary==1: - outkey="%s,%s"%(parm,model) - else: - # - # ******** ???????? number or name? What if 'current' or -1? - # - outkey="%s,%s"%(parm,self.VU.EditAreaDescriptions[int(areaNum)]) - else: - outkey="%s,%s"%(self.VU.EditAreaDescriptions[int(areaNum)],model) - if outkey not in outdata.keys(): - outdata[outkey]=[] - outdata[outkey].append((fhr,stat)) - # - # Bounded ones always show 0.0 - # - if display in ["RMS Error","Std Dev","Mean Abs Error"]: - valmin=0.0 - # - # If values are constant (compared to precision of this element) - # make the graph show slightly more range - # - prec1=10**(-parmPrecision) - minRange=10**(-(parmPrecision+1)) - #minRange=0.1 - graphrange=valmax-valmin - if graphrange3: - self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) - self.but1state[tag1]=1 - self.but2state[tag2]=1 - # - # Turn off all but first model and first time - # - startBut1(self) - startBut2(self) - # - # Labels - # - if len(areaList)>1: - areaName="Various" - if AreaCombine==1: - numPts=0 - for areaNum in areaNums: - numPts+=self.pts[areaNum] - else: - numPts=-1 - else: - #numPts=self.pts[areaNums[0]] - numPts=-1 - ul1="Average Error Growth - %s"%parm - self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - - return - #================================================================== - # getStat - assuming that the statfile is open correctly, get - # the value for the specified 'display', for the - # record, area, and threshold number - # - def getStat(self,record,areaNum,display,threshold): - if display=="Bias": - val=self.VU.sncStats[record,areaNum,0] - elif display=="Squared Error": - val=self.VU.sncStats[record,areaNum,1] - elif display=="RMS Error": - val=sqrt(self.VU.sncStats[record,areaNum,1]) - elif display=="Std Dev": - sum=self.VU.sncStats[record,areaNum,0] - sqr=self.VU.sncStats[record,areaNum,1] - val=sqrt(sqr-(sum*sum)) - elif display=="Mean Abs Error": - val=self.VU.sncStats[record,areaNum,2] - elif display=="Mean Fcst": - val=self.VU.sncStats[record,areaNum,3] - elif display=="Mean Squared Fcst": - val=self.VU.sncStats[record,areaNum,4] - elif display=="Mean Obs": - val=self.VU.sncStats[record,areaNum,5] - elif display=="Mean Squared Obs": - val=self.VU.sncStats[record,areaNum,6] - elif display=="Covariance": - val=self.VU.sncStats[record,areaNum,7] - elif display=="Percent Err <": - val=self.VU.sncStats[record,areaNum,8+threshold] - else: - print "unknown stat type" - val=0 - return val - #================================================================== - # ShowScaleStats - make graphs of stat vs scale - # - def ShowScaleStats(self,DialogDict): - self.VU.logMsg("running ShowScaleStats:") - display=DialogDict["Display"] - areaList=DialogDict["areaList"] - AreaCombine=DialogDict["AreaCombine"] - parm=DialogDict["Parm"] - threshold=DialogDict["Threshold"] - cycleList=DialogDict["cycleList"] - modelList=DialogDict["Models"] - obsmodel=DialogDict["ObsModel"] - fcstrList=DialogDict["fcstrList"] - fhrStart=DialogDict["fhrStart"] - fhrEnd=DialogDict["fhrEnd"] - dateType=DialogDict["dateType"] - numDays=DialogDict["numDays"] - fromDay=DialogDict["fromDay"] - dayList=DialogDict["dayList"] - dateStyle=DialogDict["dateStyle"] - commonCases=DialogDict["commonCases"] - accumHours=DialogDict["accumHours"] - accumFreq=DialogDict["accumFreq"] - TwoCatType=DialogDict["TwoCatType"] - TwoCatCond=DialogDict["TwoCatCond"] - TwoCatValue=DialogDict["TwoCatValue"] - TwoCatValueString=DialogDict["TwoCatValueString"] - # - # Check for good GUI input - # - parmList=[parm] - ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, - dayList) - if ret==0: - return - # - # Check that we do not have too many things varying - # - parmVary=0 - areaVary=0 - modelVary=1 - # - # Clear the cd canvas - setup title - # - self.cd.canvas.delete(Tkinter.ALL) - self.cd.title("Statistic Graph") - workStart="Working on Statistics vs. Scale" - self.startWorking(workStart,optionRemove=0) - # - # get names of selected edit areas - # - areaNames=[] - descList=self.VU.listEditAreaDescriptions() - nameList=self.VU.listEditAreas() - for areaDesc in areaList: - if areaDesc=="Current": - areaNames.append("Current") - elif areaDesc in descList: - areaNum=descList.index(areaDesc) - areaNames.append(nameList[areaNum]) - if len(areaNames)<1: - msg="Invalid Edit Area(s) - contact support" - self.statusBarMsg(msg,"U") - return - # - # Setup the combined area - # - comboArea=self.empty(bool) - for areaName in areaNames: - if areaName=="Current": - areaObject=self.getActiveEditArea() - mask=self.encodeEditArea(areaObject) - any=add.reduce(add.reduce(mask)) - if any==0: - mask=self.newGrid(True, bool) - elif areaName=="NONE": - mask=self.newGrid(True, bool) - else: - mask=self.encodeEditArea(areaName) - comboArea=logical_or(comboArea,mask) - # - # If any of the TwoCat stats are requested - then get - # the contingency table entries instead - # - statName=display - if statName=="TwoCat": - statName="cont" - if TwoCatType[0:1]=="A": - statName="acont" - statVal=TwoCatValue - statCond=TwoCatCond - statID=self.VU.getStatID(TwoCatType) - # - # - # - sumdata={} - cntdata={} - hitsdata={} - missdata={} - falrdata={} - corndata={} - # - timemin=1e32 - timemax=-1e32 - valmin=1.0e32 - valmax=-1.0e32 - - countimax=len(parmList)*len(modelList) - counti=0 - # - # setup readParm - which is usually the same as parm, but - # can be different for the Spd/Dir components of a vector - # - readParm=parm - vectorType=-1 - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if (last3 in ["Spd","Dir"]): - readParm=parm[:-3] - if last3=="Spd": - vectorType==0 - else: - vectorType==1 - # - # Get the observed parm for this parm, the verification type, - # the data type and the thresholds - # - obsParm=self.VU.getObsParm(readParm) - verType=self.VU.getVerType(readParm) - datatype=self.VU.getVerParmType(readParm) - thresholds=self.VU.getVerThresholds(readParm) - if last3=="Spd": - thresholds=thresholds[0] - elif last3=="dir": - thresholds=thresholds[1] - thresholdValue=thresholds[threshold] - # - # If using the threshold stat - set it now - # - if statName=="Percent Err <": - statVal=thresholdValue - # - # Get the statCases for this parm - # - statCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle, - dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,commonCases=commonCases, - basetimeOffsets=1,callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - # - # Get the gridCases for this parm - # - gridCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrList,cycles=cycleList, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,requireObs=1,commonCases=commonCases, - basetimeOffsets=1,callbackMethod=self.workingCommon) - if self.checkWorking()==1: - self.stopWorking() - return - for model in modelList: - counti+=1 - workStart="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) - # - # get cases for this model - # - scases=statCases[model] - if model in gridCases.keys(): - gcases=gridCases[model] - else: - gcases={} - # - # get overall list of keys (both stat and grid) in tkeys - # - skeys=scases.keys() - gkeys=gcases.keys() - tkeys=skeys - for gkey in gkeys: - if gkey not in tkeys: - tkeys.append(gkey) - # - # Loop over possible stat or grid cases - # - count=0 - totalcount=len(tkeys) - for key in tkeys: - # - # Check for user interrupting - # - count+=1 - if self.setAndCheckWorking("%s: %d of %d"%(workStart,count,totalcount))==1: - self.stopWorking() - return - # - # Get times for this case - # - (basestr,stimestr,etimestr)=key.split(",") - basetime=int(basestr) - starttime=int(stimestr) - endtime=int(etimestr) - # - # Do not use results for grids that are not yet complete - # - if endtime>time.time(): - continue - # - # Do not show results for grids that start before - # forecast time - # - fhr=(starttime-basetime)/HOURSECS - if fhr<0: - continue - # - # Get list of records for this cases - # - if key in scases: - srecList=scases[key] - else: - srecList=None - if key in gcases: - grecList=gcases[key] - else: - grecList=None - # - # Loop over scales - # - smoothList=[] - for (scale,text) in self.scaleList: - smoothList.append(scale) - - valx=self.VU.getVerStatScales(model,basetime,readParm,starttime, - endtime,obsmodel,statName,statVal=statVal, - statCond=statCond,editArea=comboArea, - smoothList=smoothList,vectorType=vectorType, - grecList=grecList) - if valx is None: - print "getVerStatScales returned None" - continue - if len(valx)<1: - print "getVerStatScales returned empty list" - continue - - for i in xrange(len(smoothList)): - (scale,text)=self.scaleList[i] - val=valx[i] - # - # store sums in model,fhr,scale keys - # - outkey="%s,%3.3d,%4.4d"%(model,fhr,scale) - if display!="TwoCat": - if outkey not in sumdata.keys(): - sumdata[outkey]=0.0 - cntdata[outkey]=0 - sumdata[outkey]+=val - cntdata[outkey]+=1 - else: - if outkey not in hitsdata.keys(): - hitsdata[outkey]=0 - missdata[outkey]=0 - falrdata[outkey]=0 - corndata[outkey]=0 - (hits,miss,falr,corn)=val - hitsdata[outkey]+=hits - missdata[outkey]+=miss - falrdata[outkey]+=falr - corndata[outkey]+=corn - # - # if no data could be read - stop here - # - if len(cntdata.keys())<1: - self.stopWorking() - msg="No verification data could be found matching those criteria" - self.statusBarMsg(msg,"U") - return - # - # We now have the sums...calculate the scores - # - fhrList=[] - valmin=1e32 - valmax=-1e32 - scalemin=0 - scalemax=0 - outdata={} - for key in cntdata.keys(): - if cntdata[key]<1: - continue - if display!="TwoCat": - stat=float(sumdata[key])/float(cntdata[key]) - if display=="RMS Error": - stat=sqrt(stat) - else: - hits=hitsdata[key] - miss=missdata[key] - falr=falrdata[key] - corn=corndata[key] - stat=self.VU.getTwoCatStat(statID,hits,miss,falr,corn) - # - # - # - valmin=min(valmin,stat) - valmax=max(valmax,stat) - (model,fhrstr,scalestr)=key.split(",") - fhr=int(fhrstr) - scale=int(scalestr) - scalemax=max(scale,scalemax) - if fhr not in fhrList: - fhrList.append(fhr) - outkey="%s,%s"%(fhrstr,model) - if outkey not in outdata.keys(): - outdata[outkey]=[] - outdata[outkey].append((scale,stat)) - # - # Bounded ones always show 0.0 - # - if display in ["RMS Error","Std Dev","Mean Abs Error"]: - valmin=0.0 - # - # If values are constant - show one up - # - graphrange=valmax-valmin - if graphrange<0.01: - valmax+=1.0 - # - # - allkeys=outdata.keys() - allkeys.sort() - # - # Get lists of the actual buttons we have data for - # - varBut1=[] - varBut2=[] - for key in outdata.keys(): - (but1,but2)=key.split(",") - if but1 not in varBut1: - varBut1.append(but1) - if but2 not in varBut2: - varBut2.append(but2) - varBut1.sort() - varBut2.sort() - - # - # In model list - make sure Official comes first - # - if "Official" in varBut2: - idx=varBut2.index("Official") - del varBut2[idx] - varBut2.insert(0,"Official") - # - # Associate colors with the varList2 - # - self.colornames={} - index=0 - for var in varBut2: - self.colornames[var]=self.COLORLIST[index] - index+=1 - if index==len(self.COLORLIST): - index=0 - # - # setup buttons - # - self.setupBut1(varBut1,numbuttons=6,arrows=1) - self.setupBut2(varBut2,numbuttons=6,arrows=1) - # - # Setup graphing coordinates - # - numticks=10 - graphrange=valmax-valmin - tickInterval=self.niceNumDec(graphrange/(numticks-1),1) - - left=self.cd.curwidth*(50.0/700.0) - right=self.cd.curwidth*(650.0/700.0) - bot=self.cd.curheight*(130.0/530.0) - top=self.cd.curheight*(480.0/530.0) - self.setgraph(scalemin,scalemax,valmin,valmax,left,right,bot,top) - #self.fhouraxes(timemin,timemax,valmin,valmax) - # - # Draw timeseries lines - # - for key in outdata.keys(): - (tag1,tag2)=key.split(",") - tagtuple=(tag1,tag2) - colorname=self.colornames[tag2] - points=outdata[key] - points.sort() - gpoints=[] - for point in points: - (scale,val)=point - (x,y)=self.graphcoord(scale,val) - gpoints.append(x) - gpoints.append(y) - if len(gpoints)>3: - self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) - self.but1state[tag1]=1 - self.but2state[tag2]=1 - # - # Turn off all but first model and first time - # - startBut1(self) - startBut2(self) - # - # Labels - # - if len(areaList)>1: - areaName="Various" - if AreaCombine==1: - numPts=0 - for areaNum in areaNums: - numPts+=self.pts[areaNum] - else: - numPts=-1 - else: - #numPts=self.pts[areaNums[0]] - numPts=-1 - ul1="Average Error Growth - %s"%parm - self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) - - self.stopWorking() - self.moveCD() - self.cd.deiconify() - self.cd.lift() - - return - #================================================================== - # getReadMode - figure out if parm is a rateParm...and set mode - # to "Sum" if it is. - # If not...and checkProb is set...figure out if the - # parm is a probability parm and set mode to - # "Max" if it is (floating PoP). - # Otherwise set to "Average" - # - def getReadMode(self,model,parmName,checkProb=1): - rateFlag=self.VU.getRateFlag(model,parmName) - if (rateFlag==1): - readMode="Sum" - else: - readMode="TimeWtAverage" - if checkProb==1: - verType=self.VU.getVerType(parmName) - if verType==1: - readMode="Max" - return readMode - - #================================================================== - # workingCommon - suitable for a callback that provides a message - # like (x of y), so that it sets the 'working' - # display - and returns a 1 if the 'stop' button - # has been set - # - def workingCommon(self,message): - fullmsg="Finding Common Cases: %s"%message - return self.setAndCheckWorking(fullmsg) - #================================================================== - # setupBut1 - setup button 1 buttons. Names in butList are copied - # to self.but1names[]. Desired buttons on a row in - # numbuttons. arrows flag adds 'prev/next' buttons. - # - # After setup, self.but1names[] holds names. - # self.but1{} holds button references - # self.but1state{} holds button state - def setupBut1(self,butList,numbuttons=5,arrows=0,width=0): - # - # clear old buttons (fbar holds button 1s) - # - slaves=self.cd.fbar.pack_slaves() - if slaves is not None: - for slave in slaves: - slave.destroy() - # - # put generic 'move left' button on the side - # - if ((arrows==1)and(len(butList)>1)): - cb=GenericCallback(prevBut1,self) - prev=Tkinter.Button(self.cd.fbar,text="<",padx=2,pady=0, - fg="black",command=cb) - prev.pack(side=Tkinter.LEFT,fill=Tkinter.Y) - # - # figure number of rows of buttons - and create frames - # - numrows=int((float(len(butList))/float(numbuttons))+0.5) - if numrows<1: - numrows=1 - self.fbarmodrow=[] - for i in xrange(numrows): - self.fbarmodrow.append(Tkinter.Frame(self.cd.fbar)) - numinrow=int(float(len(butList))/float(numrows))+1 - # - # Make buttons - # - self.but1names=butList[:] - self.but1text=butList[:] - for i in xrange(len(self.but1names)): - but=self.but1names[i] - if but.isdigit(): - self.but1names[i]="f%s"%but - num=1 - self.but1={} - self.but1state={} - for i in xrange(len(self.but1names)): - but=self.but1names[i] - buttext=self.but1text[i] - cb=GenericCallback(showBut1,self,but) - row=int(float(num)/float(numinrow)) - if width==0: - self.but1[but]=Tkinter.Button(self.fbarmodrow[row],text=buttext, - padx=2,pady=0,fg="black", - command=cb) - else: - self.but1[but]=Tkinter.Button(self.fbarmodrow[row],text=buttext, - width=width,padx=2,pady=0,fg="black", - command=cb) - self.but1[but].pack(side=Tkinter.LEFT) - num+=1 - # - # put generic 'move right' button on the side - # - if ((arrows==1)and(len(butList)>1)): - cb=GenericCallback(nextBut1,self) - next=Tkinter.Button(self.cd.fbar,text=">",padx=2,pady=0, - fg="black",command=cb) - next.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - # - # pack buttons between possible next/prev buttons - # - for i in xrange(numrows): - self.fbarmodrow[i].pack(side=Tkinter.TOP) - # - # Update cd widget - so size of buttonbars don't affect size of - # the current canvas - # - self.cd.update_idletasks() - hgt1=self.cd.bar.winfo_reqheight() - hgt2=self.cd.fbar.winfo_reqheight() - hgt3=28+536 # size of exit button bar + smallest canvas height - hgt=hgt1+hgt2+hgt3 - self.cd.minsize(706,hgt) - geo=self.cd.geometry() - (wh,of)=geo.split("+",1) - (wid,oldhgt)=wh.split("x",1) - if hgt>int(oldhgt): - self.cd.geometry("%sx%d+%s"%(wid,hgt,of)) - self.cd.update_idletasks() - return - #================================================================== - # setupBut2 - setup button 2 buttons. Names in butList are copied - # to self.but2names[]. Desired buttons on a row in - # numbuttons. arrows flag adds 'prev/next' buttons. - # - # After setup, self.but2names[] holds names. - # self.but2{} holds button references - # self.but2state{} holds button state - # - def setupBut2(self,butList,numbuttons=5,arrows=0,width=0): - # - # remove old buttons (bar holds button 2s) - # - slaves=self.cd.bar.pack_slaves() - if slaves is not None: - for slave in slaves: - slave.destroy() - # - # put generic 'move left' button on the side - # - if ((arrows==1)and(len(butList)>1)): - cb=GenericCallback(prevBut2,self) - prev=Tkinter.Button(self.cd.bar,text="<",padx=2,pady=0, - fg="black",command=cb) - prev.pack(side=Tkinter.LEFT,fill=Tkinter.Y) - # - # figure number of rows of buttons - and create frames - # - numrows=int((float(len(butList))/float(numbuttons))+0.5) - if numrows<1: - numrows=1 - self.barmodrow=[] - for i in xrange(numrows): - self.barmodrow.append(Tkinter.Frame(self.cd.bar)) - numinrow=int(float(len(butList))/float(numrows))+1 - # - # Make buttons - # - self.but2names=butList[:] - num=1 - self.but2={} - self.but2state={} - for i in xrange(len(self.but2names)): - but=self.but2names[i] - cb=GenericCallback(showBut2,self,but) - row=int(float(num)/float(numinrow)) - if width==0: - self.but2[but]=Tkinter.Button(self.barmodrow[row],text=but, - padx=2,pady=0,fg=self.colornames[but], - command=cb) - else: - self.but2[but]=Tkinter.Button(self.barmodrow[row],text=but,width=width, - padx=2,pady=0,fg=self.colornames[but], - command=cb) - self.but2[but].pack(side=Tkinter.LEFT) - num+=1 - # - # put generic 'move right' button on the side - # - if ((arrows==1)and(len(butList)>1)): - cb=GenericCallback(nextBut2,self) - next=Tkinter.Button(self.cd.bar,text=">",padx=2,pady=0, - fg="black",command=cb) - next.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - # - # pack buttons between possible next/prev buttons - # - for i in xrange(numrows): - self.barmodrow[i].pack(side=Tkinter.TOP) - # - # Update cd widget and its minsize - so size of buttonbar - # doesn't affect size of the current canvas - # - self.cd.update_idletasks() - hgt1=self.cd.bar.winfo_reqheight() - hgt2=self.cd.fbar.winfo_reqheight() - hgt3=28+536 # size of exit button bar + smallest canvas height - hgt=hgt1+hgt2+hgt3 - self.cd.minsize(706,hgt) - geo=self.cd.geometry() - (wh,of)=geo.split("+",1) - (oldwid,oldhgt)=wh.split("x",1) - if hgt>int(oldhgt): - self.cd.geometry("%sx%d+%s"%(oldwid,hgt,of)) - self.cd.update_idletasks() - return - #================================================================== - # cdLabels - labels at the top of the canvas - # - def cdLabels(self,ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList): - # - # Upper Left has variable text - # - self.labelLine(ul1,1,justify="left") - # - str="Gridpoints in editarea: %d"%numPts - self.labelLine(str,2,justify="left") - # - # Dates - # - timelabel=dateStyle - if dateType=="Period Length": - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(fromDay) - if numDays==1: - timelabel+=" %4.4d/%2.2d/%2.2d"%(gyea,gmon,gday) - else: - timelabel+=" the %d days ending on %4.4d/%2.2d/%2.2d"%(numDays,gyea,gmon,gday) - else: - if len(dayList)==1: - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(dayList[0]) - timelabel+=" %4.4d/%2.2d/%2.2d"%(gyea,gmon,gday) - else: - timelabel+=" several dates" - self.labelLine(timelabel,1,justify="right") - # - # Cycles - # - runlabel="" - if len(cycleList)>1: - for cyc in cycleList: - runlabel+="%2.2d+"%cyc - runlabel=runlabel[:-1]+" UTC Runs" - else: - runlabel="%2.2d"%cycleList[0]+" UTC Run ONLY" - self.labelLine(runlabel,2,justify="right") - return - #================================================================== - # labLine - draw a label - # - def labelLine(self,text,lineNum,color="black",justify="left", - tags=None): - lineheight=15 - yoff=5 - xoff=5 - - y=((lineNum-1)*lineheight)+yoff - if justify=="left": - x=xoff - anchorType=Tkinter.NW - else: - x=self.cd.curwidth-xoff - anchorType=Tkinter.NE - self.cd.canvas.create_text(x,y,text=text,fill=color, - anchor=anchorType,tags=tags) - return - #================================================================== - # checkLists - check lists returned from GUI to make sure at least - # one is chosen - # - def checkLists(self,modelList,parmList,cycleList,fcstrList,dateType, - dayList): - if (len(modelList)<1): - self.statusBarMsg("Must choose at least one model","U") - return 0 - if (len(parmList)<1): - self.statusBarMsg("Must choose at least one parm","U") - return 0 - if (len(cycleList)<1): - self.statusBarMsg("Must choose at least one cycle","U") - return 0 - if (len(fcstrList)<1): - self.statusBarMsg("Must choose at least one forecaster","U") - return 0 - if dateType=="List of dates": - if (len(dayList)<1): - self.statusBarMsg("Must choose at least one date","U") - return 0 - return 1 - #================================================================== - # - # code to scale everything on the canvas so that you always display - # the same area that you started with - # - def resizecanvas(self,event): - scalex=float(event.width)/self.curwidth - scaley=float(event.height)/self.curheight - if ((scalex!=1.0)or(scaley!=1.0)): - self.canvas.scale("all",0.0,0.0,scalex,scaley) - self.curwidth=float(event.width) - self.curheight=float(event.height) - #bw=2 - #self.canwidth=self.curwidth-((bw+1.0)*2.0) - #self.canheight=self.canheight-((bw+1.0)*2.0) - #print "resize canvas gives width/height as: %7.2f,%7.2f"%(self.curwidth,self.curheight) - return - # - # setup graph coordintes - # - def setgraph(self,xmin,xmax,ymin,ymax,sxmin,sxmax,symin,symax): - self.xmin=xmin - self.xmax=xmax - self.ymin=ymin - self.ymax=ymax - self.xmult=(sxmax-sxmin)/(xmax-xmin) - self.xoff=sxmin - self.ymult=(symax-symin)/(ymax-ymin) - self.yoff=symax - def graphcoord(self,x,y): - newx=((x-self.xmin)*self.xmult)+self.xoff - newy=self.yoff-((y-self.ymin)*self.ymult) - return newx,newy - #================================================================== - # - # draw histogram axes - # - def histoaxes(self,maxheight,minx,maxx,binwidth,htick): - (sx,sy)=self.graphcoord(0.0,0.0) - (tx,ty)=self.graphcoord(0.0,maxheight) - self.cd.canvas.create_line(sx,sy,tx,ty) - self.vtick(0.0,5,0.0,maxheight,htick,label=1,labeloffset=10, - skipfirst=1,labelinterval=2) - #minx=binmin[1]+(binwidth/2.0) - #maxx=binmax[len(binmax)-2]-(binwidth/2.0) - (sx,sy)=self.graphcoord(minx,0.0) - (tx,ty)=self.graphcoord(maxx,0.0) - self.cd.canvas.create_line(sx,sy,tx,ty) - numticks=10 - tickInterval=self.niceNumDec(maxx/(numticks-1),1) - self.htick(0.0,5,0.0,maxx,tickInterval,label=1, - labeloffset=5,labelinterval=2,skipfirst=1) - self.htick(0.0,5,0.0,maxx,tickInterval,label=1, - labeloffset=5,labelinterval=2,skipfirst=1,negative=1) - #================================================================== - # probaxes - draw axes for probability reliability diagrams - # - def probaxes(self): - (llx,lly)=self.graphcoord(0,0) - (urx,ury)=self.graphcoord(100,100) - self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly,urx,ury) - self.vtick(0,5,0,100,10,label=1, - labelinterval=1,labeloffset=-10.0,labelanchor=Tkinter.E) - self.vtick(100,5,0,100,10,label=1, - labelinterval=1,labeloffset=10.0,labelanchor=Tkinter.W) - self.htick(0,5,0,100,10,label=1, - labelinterval=1,labeloffset=8.0,labelanchor=Tkinter.N) - self.htick(100,5,0,100,10,label=1, - labelinterval=1,labeloffset=-8.0,labelanchor=Tkinter.S) - (midx,ny)=self.graphcoord(50,0) - self.cd.canvas.create_text(midx,ny+20,anchor=Tkinter.N,text="Forecast Probability") - (nx,midy)=self.graphcoord(0,50) - self.cd.canvas.create_text(nx-35,midy,anchor=Tkinter.E,text="O\nb\ns\ne\nr\nv\ne\nd\n \nF\nr\ne\nq\nu\ne\nc\ny") - # - #================================================================== - # - # draw graph axes - # - def graphaxes(self,timemin,timemax,valmin,valmax): - (llx,lly)=self.graphcoord(timemin,valmin) - (urx,ury)=self.graphcoord(timemax,valmax) - self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly) - zeroline=0 - if ((valmin<0.0)and(valmax>0.0)): - (lx,zy)=self.graphcoord(timemin,0.0) - self.cd.canvas.create_line(llx,zy,urx,zy) - zeroline=1 - numticks=10 - self.timetick(timemin,timemax,numticks,valmin,5,label=1,labeloffset=10) - self.timetick(timemin,timemax,numticks,valmax,5,label=1,labeloffset=-10, - labelanchor=Tkinter.S) - if zeroline==1: - self.timetick(timemin,timemax,numticks,0.0,5,label=0) - # - numticks=10 - valInterval=self.niceNumDec((valmax-valmin)/(numticks-1),1) - if zeroline==1: - self.vtick(timemin,5,0,valmax,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E) - self.vtick(timemin,5,0,-valmin,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E,negative=1) - self.vtick(timemax,5,0,valmax,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W) - self.vtick(timemax,5,0,-valmin,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W,negative=1) - else: - self.vtick(timemin,5,valmin,valmax,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E) - self.vtick(timemax,5,valmin,valmax,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W) - #================================================================== - # - # draw fhour axes - # - def fhouraxes(self,timemin,timemax,valmin,valmax): - (llx,lly)=self.graphcoord(timemin,valmin) - (urx,ury)=self.graphcoord(timemax,valmax) - self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly) - zeroline=0 - if ((valmin<0.0)and(valmax>0.0)): - (lx,zy)=self.graphcoord(timemin,0.0) - self.cd.canvas.create_line(llx,zy,urx,zy) - zeroline=1 - finterval=6 - if timemax>120: - finterval=24 - if timemax>48: - finterval=12 - #finterval=self.niceNumDec((timemax-timemin)/(numticks-1),1) - self.htick(valmin,5,timemin,timemax,finterval,label=1,labeloffset=+5, - labelanchor=Tkinter.N) - self.htick(valmax,5,timemin,timemax,finterval,label=1,labeloffset=-5, - labelanchor=Tkinter.S) - if zeroline==1: - self.htick(0.0,5,timemin,timemax,finterval,label=0) - #self.timetick(timemin,timemax,numticks,valmin,5,label=1,labeloffset=10) - #self.timetick(timemin,timemax,numticks,valmax,5,label=1,labeloffset=-10, - # labelanchor=Tkinter.S) - #if zeroline==1: - # self.timetick(timemin,timemax,numticks,0.0,5,label=0) - # - numticks=10 - valInterval=self.niceNumDec((valmax-valmin)/(numticks-1),1) - if zeroline==1: - self.vtick(timemin,5,0,valmax,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E) - self.vtick(timemin,5,0,-valmin,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E,negative=1, - skipfirst=1) - self.vtick(timemax,5,0,valmax,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W) - self.vtick(timemax,5,0,-valmin,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W,negative=1, - skipfirst=1) - else: - self.vtick(timemin,5,valmin,valmax,valInterval,label=1, - labeloffset=-10,labelanchor=Tkinter.E) - self.vtick(timemax,5,valmin,valmax,valInterval,label=1, - labeloffset=10,labelanchor=Tkinter.W) - #================================================================== - # - def timetick(self,minsecs,maxsecs,desirednum,yval,ywid,label=1,labeloffset=-10,labelanchor=Tkinter.N): - #print "in timetick with %d-%d, desired:%d"%(minsecs,maxsecs,desirednum) - numrange=desirednum*0.75 - minnum=desirednum-numrange - maxnum=desirednum+numrange - monString=[" ","JAN","FEB","MAR","APR","MAY","JUN","JUL","AUG","SEP","OCT","NOV","DEC"] - #print "minnum-maxnum: %d-%d"%(minnum,maxnum) - HOUR=3600 - DAY=24*HOUR - MONTH=30*DAY - YEAR=365*DAY - tryintervals=[(3,HOUR), - (6,HOUR), - (12,HOUR), - (1,DAY), - (2,DAY), - (5,DAY), - (15,DAY), - (1,MONTH), - (2,MONTH), - (3,MONTH), - (6,MONTH), - (1,YEAR)] - - secondsRange=maxsecs-minsecs - intervals=[] - for (interval,base) in tryintervals: - intervalSeconds=interval*base - num=int(secondsRange/intervalSeconds) - if ((num>minnum)and(numminsecs: - (tx,ty)=self.graphcoord(newTime,yval) - self.cd.canvas.create_line(tx,ty-ywid,tx,ty+ywid) - if label==1: - if bestBase==HOUR: - if ((maxsecs-minsecs)/HOUR)>24: - labelstring="%d %2.2dZ"%(gday,ghou) - else: - labelstring="%2.2dZ"%ghou - elif bestBase==DAY: - if ((maxsecs-minsecs)/DAY)>28: - labelstring="%d/%d"%(gmon,gday) - else: - labelstring="%d"%gday - else: - if ((maxsecs-minsecs)/MONTH)>9: - labelstring="%d/%2.2d"%(gmon,gyea%100) - else: - labelstring="%s"%monString[gmon] - #labelstring="%d/%d %2.2dZ"%(gmon,gday,ghou) - self.cd.canvas.create_text(tx,ty+labeloffset,anchor=labelanchor, - text=labelstring) - - #print "tick at %4.4d/%2.2d/%2.2d %2.2dZ"%(gyea,gmon,gday,ghou) - if bestBase==HOUR: - tryTime=calendar.timegm((gyea,gmon,gday,ghou+bestInterval,gmin,gsec,gwda,gyda,gdst)) - elif bestBase==DAY: - tryTime=calendar.timegm((gyea,gmon,gday+bestInterval,ghou,gmin,gsec,gwda,gyda,gdst)) - else: - newmon=gmon+bestInterval - if newmon>12: - gyea+=1 - newmon=1 - tryTime=calendar.timegm((gyea,newmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) - (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime(tryTime) - if ((nday!=gday)and(bestBase==HOUR)): - gday+=1 - ghou=0 - tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) - elif (((nmon!=gmon)or(nday>26))and(bestBase==DAY)): - gmon+=1 - if gmon>12: - gmon=1 - gyea+=1 - gday=1 - tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) - elif ((nyea!=gyea)and(bestBase==MONTH)): - gyea+=1 - gmon=1 - tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) - newTime=tryTime - #print "-----" - return - #================================================================== - # draw ticks on a horizontal axis at y-value:yval. The width of - # ticks if ywid. Ticks go between minx and maxx with interval - # tickinterval. If labelinterval is 0, no labels are drawn, - # if 1, the every label is drawn. If labelinterval is 2 then - # every 2nd label is drawn. It labelinterval is 3 then every - # 3rd label is drawn, etc. If skipfirst=1 or skiplast=1 then - # labelling is skipped for those ticks. - # - def htick(self,yval,ywid,minx,maxx,tickInterval,label=0,labeloffset=5, - labelinterval=1,skipfirst=0,skiplast=0,labelanchor=Tkinter.N, - negative=0): - numticks=int((maxx-minx)/tickInterval)+1 - labeldigits=max(-floor(log10(tickInterval)),0) - neg=1.0 - if negative==1: - neg=-1.0 - num=0 - for i in xrange(0,numticks): - x=(minx+(i*tickInterval))*neg - (tx,ty)=self.graphcoord(x,yval) - self.cd.canvas.create_line(tx,ty-ywid,tx,ty+ywid) - if label==1: - if (i%labelinterval==0): - if labeldigits==0: - labelstring="%d"%x - else: - format="%%.%df"%labeldigits - labelstring=format%x - skip=0 - if ((skipfirst==1)and(i==0))or((skiplast==1)and(i==(numticks-1))): - skip=1 - if skip==0: - self.cd.canvas.create_text(tx,ty+labeloffset,anchor=labelanchor, - text=labelstring) - def vtick(self,xval,xwid,miny,maxy,tickInterval,label=0,labeloffset=5, - labelinterval=1,skipfirst=0,skiplast=0,labelanchor=Tkinter.W, - negative=0): - numticks=int((maxy-miny)/tickInterval)+1 - labeldigits=max(-floor(log10(tickInterval)),0) - neg=1.0 - if negative==1: - neg=-1.0 - num=0 - for i in xrange(0,numticks): - y=(miny+(i*tickInterval))*neg - (tx,ty)=self.graphcoord(xval,y) - self.cd.canvas.create_line(tx-xwid,ty,tx+xwid,ty) - if label==1: - if (i%labelinterval==0): - if labeldigits==0: - labelstring="%d"%y - else: - format="%%.%df"%labeldigits - labelstring=format%y - skip=0 - if ((skipfirst==1)and(i==0))or((skiplast==1)and(i==(numticks-1))): - skip=1 - if skip==0: - self.cd.canvas.create_text(tx+labeloffset,ty,anchor=labelanchor, - text=labelstring) - - #================================================================== - # - # make axes for value distributions - # - def valaxes(self,minval,maxval,tickInterval): - (nx,ny)=self.graphcoord(minval,minval) - (xx,xy)=self.graphcoord(maxval,maxval) - self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny,xx,xy) - self.vtick(minval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-10.0,labelanchor=Tkinter.E) - self.vtick(maxval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=10.0,labelanchor=Tkinter.W) - self.htick(minval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=8.0,labelanchor=Tkinter.N) - self.htick(maxval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-8.0,labelanchor=Tkinter.S) - (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) - self.cd.canvas.create_text(midx,ny+20,anchor=Tkinter.N,text="Observed") - (dumx,midy)=self.graphcoord(minval,(maxval+minval)/2.0) - self.cd.canvas.create_text(nx-35,midy,anchor=Tkinter.E,text="F\no\nr\ne\nc\na\ns\nt") - #================================================================== - # - # make axes for expected value distributions - # - def expaxes(self,minval,maxval,tickInterval): - (nx,ny)=self.graphcoord(minval,minval) - (xx,xy)=self.graphcoord(maxval,maxval) - self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny,xx,xy) - self.vtick(minval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-10.0,labelanchor=Tkinter.E) - self.vtick(maxval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=10.0,labelanchor=Tkinter.W) - self.htick(minval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=8.0,labelanchor=Tkinter.N) - self.htick(maxval,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-8.0,labelanchor=Tkinter.S) - (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) - self.cd.canvas.create_text(midx,ny+20,anchor=Tkinter.N,text="Forecast") - (dumx,midy)=self.graphcoord(minval,(maxval+minval)/2.0) - self.cd.canvas.create_text(nx-35,midy,anchor=Tkinter.E,text="O\nb\ns\ne\nr\nv\ne\nd") - #================================================================== - # - # make axes for value histograms - # - def valhaxes(self,minval,maxval,tickInterval,maxnum,vint,parm): - (nx,ny)=self.graphcoord(minval,0) - (xx,xy)=self.graphcoord(maxval,maxnum) - self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny) - #vint=self.niceNumDec(maxnum/20,1) - self.vtick(minval,5,0.0,maxnum,vint,label=1, - labelinterval=3,labeloffset=-10.0,labelanchor=Tkinter.E) - self.vtick(maxval,5,0.0,maxnum,vint,label=1, - labelinterval=3,labeloffset=10.0,labelanchor=Tkinter.W) - self.htick(0,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=8.0,labelanchor=Tkinter.N) - self.htick(maxnum,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-8.0,labelanchor=Tkinter.S) - (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) - self.cd.canvas.create_text(midx,ny+20,anchor=Tkinter.N,text=parm) - (dumx,midy)=self.graphcoord(minval,(0+maxnum)/2.0) - self.cd.canvas.create_text(nx-35,midy,anchor=Tkinter.E,text="N\nu\nm\nb\ne\nr\n \np\ne\nr\n \nc\na\ns\ne") - #================================================================== - # - # make axes for logarithmic value histograms - # - def logvalhaxes(self,minval,maxval,tickInterval,logmin,logmax,parm): - (nx,ny)=self.graphcoord(minval,logmin) - (xx,xy)=self.graphcoord(maxval,logmax) - self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny) - #vint=self.niceNumDec(maxnum/20,1) - lownum=exp(logmin) - hignum=exp(logmax) - print "need ticks from %f to %f"%(lownum,hignum) - expstart=int(floor(log10(lownum))) - expend=int(floor(log10(hignum))) - print " exponents from %d to %d"%(expstart,expend) - for j in xrange(expstart,expend+1): - a=10.0**j - print " loop decade=%f"%a - for i in xrange(1,10): - if i==1: - xwid=5 - else: - xwid=2 - val=float(i)*a - if ((val>lownum)and(val=0: - labelstring="%d"%val - else: - format="%%.%df"%abs(j) - labelstring=format%val - self.cd.canvas.create_text(nx-10.0,ty,anchor=Tkinter.E, - text=labelstring) - self.cd.canvas.create_text(xx+10.0,ty,anchor=Tkinter.W, - text=labelstring) - - - #self.vtick(minval,5,0.0,maxnum,vint,label=1, - # labelinterval=3,labeloffset=-10.0,labelanchor=Tkinter.E) - #self.vtick(maxval,5,0.0,maxnum,vint,label=1, - # labelinterval=3,labeloffset=10.0,labelanchor=Tkinter.W) - self.htick(logmin,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=8.0,labelanchor=Tkinter.N) - self.htick(logmax,5,minval,maxval,tickInterval,label=1, - labelinterval=3,labeloffset=-8.0,labelanchor=Tkinter.S) - (midx,dumy)=self.graphcoord((maxval+minval)/2.0,logmin) - self.cd.canvas.create_text(midx,ny+20,anchor=Tkinter.N,text=parm) - (dumx,midy)=self.graphcoord(minval,(logmin+logmax)/2.0) - self.cd.canvas.create_text(nx-35,midy,anchor=Tkinter.E,text="N\nu\nm\nb\ne\nr\n \np\ne\nr\n \nc\na\ns\ne") - #================================================================== - # niceNumDec - pick a nice decimal number - suitable for tick - # marks, etc. - # - def niceNumDec(self,val,roundit): - if val==0: - return 1 - e=floor(log10(val)) - a=10.0**e - f=val/a - if roundit>0: - if f<1.5: - nf=1 - elif f<3.0: - nf=2 - elif f<7.0: - nf=5 - else: - nf=10 - else: - if f<=1.0: - nf=1 - elif f<=2.0: - nf=2.0 - elif f<=5.0: - nf=5.0 - else: - nf=10.0 - return nf*a - #================================================================== - # showScore - draw tick on 'colorcurve' with label of 0-100 score - # - def showScore(self,fullscore,mod,color,taglabel): - midx=self.cd.curwidth/2.0 - #x=midx+(128*((-fullscore+0.5)/0.5)) - if fullscore<0: - fullscore=0.0 - x=midx+(128*((fullscore-50.0)/50.0)) - txt="%d"%int(fullscore) - if mod=="Official": - self.cd.canvas.create_line(x,50-8,x,50-3,fill=color,tags=taglabel) - self.cd.canvas.create_text(x,50-8,text=txt,fill=color,anchor=Tkinter.S,tags=taglabel) - else: - self.cd.canvas.create_line(x,50+3,x,50+8,fill=color,tags=taglabel) - self.cd.canvas.create_text(x,50+8,text=txt,fill=color,anchor=Tkinter.N,tags=taglabel) - #================================================================== - # showWorse - show the number in the first/last bins - which are - # worse than the error limits - # - def showWorse(self,low,high,xmax,yoffset,color,taglist): - x=xmax - y=0 - (sx,sy)=self.graphcoord(x,y) - self.cd.canvas.create_text(sx+5,sy,text="Worse",anchor=Tkinter.W) - textstring="%d"%high - self.cd.canvas.create_text(sx+5,sy-yoffset,text=textstring,anchor=Tkinter.W,fill=color,tags=taglist) - x=-xmax - y=0 - (sx,sy)=self.graphcoord(x,y) - self.cd.canvas.create_text(sx-5,sy,text="Worse",anchor=Tkinter.E) - textstring="%d"%low - self.cd.canvas.create_text(sx-5,sy-yoffset,text=textstring,anchor=Tkinter.E,fill=color,tags=taglist) - #================================================================== - # showScores - display modelname,n,avg,std,mae,rms on histogram - # - def showScores(self,modnum,mod,num,avg,std,mae,rms,color,taglist): - font=12 - ystart=self.cd.curheight*(135.0/530.0) - y=ystart+font+(modnum*font) - x=self.cd.curwidth*(80.0/700.0) - self.cd.canvas.create_text(x,y,text=mod,anchor=Tkinter.E,fill=color,tags=taglist) - textstring="%2d"%num - x=self.cd.curwidth*(130.0/700.0) - self.cd.canvas.create_text(x,y,text=textstring,anchor=Tkinter.E, - fill=color,tags=taglist) - textstring="%6.2f"%avg - x=self.cd.curwidth*(170.0/700.0) - self.cd.canvas.create_text(x,y,text=textstring,anchor=Tkinter.E, - fill=color,tags=taglist) - textstring="%5.2f"%std - x=self.cd.curwidth*(210.0/700.0) - self.cd.canvas.create_text(x,y,text=textstring,anchor=Tkinter.E, - fill=color,tags=taglist) - textstring="%5.2f"%mae - x=self.cd.curwidth*(250.0/700.0) - self.cd.canvas.create_text(x,y,text=textstring,anchor=Tkinter.E, - fill=color,tags=taglist) - textstring="%5.2f"%rms - x=self.cd.curwidth*(290.0/700.0) - self.cd.canvas.create_text(x,y,text=textstring,anchor=Tkinter.E, - fill=color,tags=taglist) - return - #================================================================== - # showAvg - draw arrow on histogram axis at average value - # - def showAvg(self,avg,color,taglist): - (sx,sy)=self.graphcoord(avg,0) - self.cd.canvas.create_line(sx,sy+30,sx,sy,fill=color, - arrow=Tkinter.LAST,tags=taglist) - textstring="%.2f"%avg - self.cd.canvas.create_text(sx,sy+30,text=textstring,anchor=Tkinter.N, - fill=color,tags=taglist) - #================================================================== - # - # getBins - given a bin width and maxerr value, return - # lists of each bin's min,max, with one of them - # centerred on zero. Last bins may start up to - # a half binwidth more than maxerr - # - def getBins(self,binwidth,maxerr): - halfbin=float(binwidth)/2.0 - binmin=[] - binmax=[] - - mid=0.0 - while ((mid+halfbin)<=(maxerr+halfbin)): - maxx=mid+halfbin - binmin.append(-maxx) - binmax.append(-maxx+binwidth) - binmin.append(maxx-binwidth) - binmax.append(maxx) - mid+=binwidth - binmin.append(-900000.0) - binmax.append(-(mid-halfbin)) - binmin.append(mid-halfbin) - binmax.append(9000000.0) - binmin.sort() - binmax.sort() - return(binmin,binmax) - # - def getProbBins(self,binwidth): - binmin=[] - binmax=[] - halfbin=float(binwidth)/2.0 - num=int(100.0/float(binwidth)) - for i in xrange(num): - binmid=i*binwidth - bot=max(binmid-halfbin,0) - top=min(binmid+halfbin,101) - binmin.append(bot) - binmax.append(top) - return(binmin,binmax) - #================================================================== - # - # binerrs - given a 1-D array of errors, create a 1-D array of - # the number of points with errors inside each bin - # described by the binmin and binmax limits - # - def binerrs(self,err,abinmin,abinmax): - bincnt=add.reduce(logical_and(greater(err,abinmin[:,NewAxis]), - less_equal(err,abinmax[:,NewAxis])),-1) - return bincnt - def histosetup(self,minlimit,maxlimit,binwidth): - self.histowidth=binwidth - self.histohalf=binwidth/2.0 - self.histomin=minlimit-self.histohalf - self.histomax=maxlimit+self.histohalf - self.histonumbins=int(float(self.histomax-self.histomin)/float(self.histowidth)) - self.histobins=resize(arange(self.histonumbins+1),(self.histonumbins+1,1)) - return - def histo(self,data): - worseLow=add.reduce(less(data,self.histomin)) - worseHigh=add.reduce(greater(data,self.histomax)) - data=repeat(data,logical_and(less_equal(data,self.histomax), - greater_equal(data,self.histomin))) - data=((data-self.histomin)/self.histowidth).astype(int) - histoData=add.reduce(equal(self.histobins,data),-1) - histoData[-2]+=histoData[-1] - return histoData[:-1],worseLow,worseHigh - def hitcount(self,data,verif): - numless=add.reduce(less(data,self.histomin)) - numgreater=add.reduce(greater(data,self.histomax)) - data=repeat(data,logical_and(less_equal(data,self.histomax), - greater_equal(data,self.histomin))) - data=((data-self.histomin)/self.histowidth).astype(int) - d1=equal(self.histobins,data) - self.VU.logMsg("shape of d1=%s"%str(d1.shape)) - self.VU.logMsg("shape of verif=%s"%str(verif.shape)) - histoData=add.reduce(d1,-1) - self.VU.logMsg("done with histoData reduce") - #hitCount=add.reduce(where(d1,verif,0),-1) - a=where(d1,verif,float32(0)) - hitCount=add.reduce(a,-1) - self.VU.logMsg("done with hitCount reduce") - histoData[-2]+=histoData[-1] - hitCount[-2]+=hitCount[-1] - self.VU.logMsg("returning") - return histoData[:-2],hitCount[:-2] - - #================================================================= - # setupGM - setup Grid Manager - remove all parms from display - # except for parms in parmList for models in modelList - # (or in mutableModel) and WG1 for the mutableModel - # (if available) - # - def setupGM(self,parmList,modelList): - # - # - # - newParmList=[] - for parm in parmList: - if (len(parm)>3): - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - realname=parm[:-3] - if realname not in newParmList: - newParmList.append(realname) - else: - newParmList.append(parm) - else: - newParmList.append(parm) - mutableModel=self.mutableID().modelName() - displayObjList=self._dbss.getParmManager().getDisplayedParms() - totalcount=len(displayObjList) - count=0 - for parmObj in displayObjList: - count+=1 - self.setWorking("Cleaning Grid Manager:%d of %d"%(count,totalcount)) - if self.checkWorking()==1: - return 1 - pid=parmObj.getParmID() - pmodel=pid.getDbId().getModelName() - pname=pid.getParmName() - plevel=pid.getParmLevel() - if ((pmodel==mutableModel)and(pname in newParmList)): - continue - if ((pmodel==mutableModel)and(pname=="WG1")): - continue - if ((pmodel in modelList)and(pname in newParmList)): - continue - print pmodel, pname, plevel - self.unloadWE(pmodel,pname,plevel) - # - # if WG1 exists - use that for the units and precision of - # error grids - otherwise use default values - # - (self.errUnits,self.errPrecision,minval,maxval,rateFlag, - ct,dminval,dmaxval)=self.getParmInfo(mutableModel,"WG1") - return 0 - #================================================================== - # - # - # - def getParmInfo(self,mutableModel,parm): - units="units" - precision=0 - minval=0 - maxval=100 - rateflag=0 - colorTable="Gridded Data" - displayMinval=0 - displayMaxval=100 - parm=self.getParm(mutableModel,parm,"SFC") - if parm is not None: - parmInfo = parm.getGridInfo() - units=parmInfo.getUnitString() - precision=parmInfo.getPrecision() - minval=parmInfo.getMinValue() - maxval=parmInfo.getMaxValue() - rateflag=parmInfo.isRateParm() - from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil - ctInfo = DiscreteDisplayUtil.buildColorMapParameters(parm) - if ctInfo is not None: - colorTable = ctInfo.getColorMapName() - displayMinval = ctInfo.getColorMapMin() - displayMaxval = ctInfo.getColorMapMax() - self.__colorMapParams[colorTable] = ctInfo - return(units,precision,minval,maxval,rateflag,colorTable,displayMinval,displayMaxval) -#============================================================================== -# -# Class for other dialogs. -# -""" -class SimpleDialog(AppDialog.Dialog): - def __init__(self, parent=None, name="Simple Dialog", callbackMethod=None, - modal=1): - self.__parent = parent - self.__name = name - self.__modal = modal - self.__callbackMethod = callbackMethod - self.__dialog=AppDialog.Dialog.__init__(self, - parent=self.__parent, - title=self.__name, - modal=self.__modal) - return self.__dialog - - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - if self.__modal == 1: - Tkinter.Button(buttonFrame, text="Ok", - command=self.__okCB, width=10, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - else: - Tkinter.Button(buttonFrame, text="Run", - command=self.__runCB, width=10, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Run/Dismiss", - command=self.__okCB, width=12, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Cancel", width=10, - command=self.cancelCB).pack(side=Tkinter.RIGHT, pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - def body(self, master): - bodylabel=Tkinter.Label(master,text="This is the body") - bodylabel.pack(side=Tkinter.BOTTOM) - def __runCB(self): - self.__callbackMethod("Run") - def __okCB(self): - self.withdraw() - self.__callbackMethod("OK") - self.ok() - def cancelCB(self): - self.__callbackMethod("Cancel") - self.cancel() -""" -#======================================================================= -class BVDialog(Tkinter.Toplevel): - def __init__(self,parent,title=None,modal=1,hide=0): - self.__modal=modal - Tkinter.Toplevel.__init__(self,parent) - try: - if hide==1: - self.withdraw() - ##self.transient(parent) - if title: - self.title(title) - self.parent=parent - self.buttonbox() - bodyFrame=Tkinter.Frame(self) - self.body(bodyFrame) - bodyFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.BOTH,expand=1) - self.protocol("WM_DELETE_WINDOW",self.cancel) - if parent is not None: - self.geometry("+%d+%d"%(parent.winfo_rootx(),parent.winfo_rooty())) - if self.__modal==1: - self.deiconify() - self.wait_window(self) - except: - ##self.destroy() - raise Exception - - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - if self.__modal == 1: - Tkinter.Button(buttonFrame, text="Ok", - command=self.ok, width=10, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - else: - Tkinter.Button(buttonFrame, text="Run", - command=self.run, width=10, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Run/Dismiss", - command=self.ok, width=12, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Cancel", width=10, - command=self.cancelCB).pack(side=Tkinter.RIGHT, pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - def body(self, master): - pass - def ok(self,event=None): - if not self.validate(): - return - self.withdraw() - self.update_idletasks() - self.apply() - self.cancel() - def cancel(self,event=None): - self.destroy() - def validate(self): - return 1 - def apply(self): - pass - -#======================================================================= -# Working - is a dialog to give user info while 'working'. You can -# set the label for it, or get the value of 'stop', which -# turns to 1 if they hit 'cancel' while this is displayed. -# should be 'withdrawn' while not 'working' on something. -# -class Working(BVDialog): - def __init__(self, parent=None, callbackMethod=None): - self.__parent=parent - self.__callbackMethod=callbackMethod - self.stop=Tkinter.IntVar() - self.label=Tkinter.StringVar() - BVDialog.__init__(self,parent=self.__parent, - title="%s Working"%PROGNAME,modal=0,hide=1) - self.update() - self.resizable(0,0) - return - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - but=Tkinter.Button(buttonFrame,text="Stop",command=self.__callbackMethod) - but.pack(side=Tkinter.LEFT,expand=0) - buttonFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=0) - return - def body(self,master): - lab=Tkinter.Label(master,textvariable=self.label,width=60, - anchor=Tkinter.W) - lab.pack(side=Tkinter.LEFT) - self.label.set("Default Text") - return - def cancel(self): - self.__callbackMethod - return -#======================================================================= -# Cases - a dialog with a scrolled text window showing number of cases -# info. It has one button - a close button. -# -class Cases(BVDialog): - def __init__(self,parent,callbackMethod): - self.__parent=parent - self.__callbackMethod=callbackMethod - BVDialog.__init__(self,parent=self.__parent,title="Number of Cases",modal=0,hide=1) - self.update() - geo=self.geometry() - (wh,of)=geo.split("+",1) - (w,h)=wh.split("x",1) - self.minsize(int(w),int(h)) - return - def buttonbox(self): - buttonFrame=Tkinter.Frame(self) - but=Tkinter.Button(self,text="Close",command=self.__callbackMethod) - but.pack(side=Tkinter.TOP) - buttonFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=0) - def body(self,master): - self.sb=Tkinter.Scrollbar(master) - self.sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - self.dataText=Tkinter.Text(master,state=Tkinter.DISABLED,width=25, - height=10) - self.sb.configure(command=self.dataText.yview) - self.dataText.configure(yscrollcommand=self.sb.set) - self.dataText.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - self.updateText("Default Text") - return - def cancel(self): - self.__callbackMethod() - return - def updateText(self,updateText): - self.dataText.configure(state=Tkinter.NORMAL) - self.dataText.delete(1.0,Tkinter.END) - self.dataText.insert(Tkinter.END,updateText) - self.dataText.configure(state=Tkinter.DISABLED) - return -#======================================================================= -# MiniDiag - is a minimized dialog - to save screen real -# estate. It has one button - to go back to the main -# dialog. -# -class MiniDiag(BVDialog): - def __init__(self,parent,callbackMethod,title="Title",buttonText="Button",loc="x"): - self.__parent=parent - self.__callbackMethod=callbackMethod - self.__title=title - self.__buttonText=buttonText - self.__loc=loc - BVDialog.__init__(self,parent=self.__parent,title=self.__title,modal=0,hide=1) - self.update() - self.resizable(0,0) - # - # Set initial location (ul and ur) - # - if self.__loc in ("ur","lr") and parent is not None: - parentgeo=self.__parent.geometry() - (wh,of)=parentgeo.split("+",1) - (w,h)=wh.split("x",1) - (ox,oy)=of.split("+",1) - self.update_idletasks() - geo=self.geometry() - (mwh,mo)=geo.split("+",1) - (mw,mh)=mwh.split("x",1) - if self.__loc=="lr": - newgeo=mwh+"+%d+%d"%(int(ox)+int(w)-int(mw),int(oy)+int(h)-int(mh)) - elif self.__loc=="ur": - newgeo=mwh+"+%d+%d"%(int(ox)+int(w)-int(mw),int(oy)) - self.geometry(newgeo) - self.update_idletasks() - return - def buttonbox(self): - buttonFrame=Tkinter.Frame(self) - but=Tkinter.Button(self,text=self.__buttonText,command=self.__callbackMethod) - but.pack(side=Tkinter.TOP) - buttonFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=0) - def cancel(self): - self.__callbackMethod() - return -#===================================================================== -class CanvasDisplay(BVDialog): - def __init__(self, parent, title="Canvas Display", callbackMethod=None): - self.__parent=parent - self.__title=title - self.__callbackMethod=callbackMethod - self.curwidth=706.0 # initial canvas width - self.curheight=536.0 # initial canvas height - BVDialog.__init__(self,parent=self.__parent, - title=self.__title,modal=0,hide=1) - self.update() - self.firstDisplay=1 - geo=self.geometry() - (mwh,mof)=geo.split("+",1) - (mw,mh)=mwh.split("x",1) - self.minsize(int(mw),int(mh)) - return - def body(self,master): - self.bar=Tkinter.Frame(master) - self.bar.pack(side=Tkinter.BOTTOM) - self.fbar=Tkinter.Frame(master) - self.fbar.pack(side=Tkinter.BOTTOM) - # - borderwidth=2 - canwidth=self.curwidth-((borderwidth+1)*2) - canheight=self.curheight-((borderwidth+1)*2) - self.canvas=Tkinter.Canvas(master,width=canwidth,height=canheight, - borderwidth=borderwidth,relief=Tkinter.SUNKEN) - self.canvas.bind("",self.resizecanvas) - self.canvas.pack(fill=Tkinter.BOTH,expand=1) - def buttonbox(self): - buttonFrame=Tkinter.Frame(self) - but=Tkinter.Button(buttonFrame,text="Exit",fg="red",command=self.__callbackMethod) - but.pack(side=Tkinter.LEFT) - self.label=Tkinter.Label(buttonFrame,text=" ") - self.label.pack(side=Tkinter.LEFT,fill=Tkinter.X) - buttonFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X) - def cancel(self): - self.__callbackMethod() - - def resizecanvas(self,event): - w=float(event.width) - h=float(event.height) - #print "resizecanvas called: %s %s"%(event.width,event.height) - scalex=w/self.curwidth - scaley=h/self.curheight - if ((scalex!=1.0)or(scaley!=1.0)): - self.canvas.scale("all",0.0,0.0,scalex,scaley) - self.curwidth=w - self.curheight=h - return -#======================================================================= -# -# Custom dialog that provides selection for verification stuff -# -class Verif(BVDialog): - def __init__(self, VU, userName, scaleList, parent=None, callbackMethod=None): - self.__VU=VU - self.__parent=parent - self.__callbackMethod=callbackMethod - self.__userName=userName - self.__scaleList=scaleList - BVDialog.__init__(self,parent=self.__parent, - title="%s Options"%PROGNAME,modal=0,hide=1) - # - # find minimum size - # - self.update() - maxw=0 - maxh=0 - self.dispGrids() - self.update_idletasks() - geo=self.geometry() - (maxw,maxh)=self.checkMax(geo,maxw,maxh) - self.dispGridStats() - self.update_idletasks() - geo=self.geometry() - (maxw,maxh)=self.checkMax(geo,maxw,maxh) - self.dispDists() - self.update_idletasks() - geo=self.geometry() - (maxw,maxh)=self.checkMax(geo,maxw,maxh) - self.dispStats() - self.update_idletasks() - geo=self.geometry() - (maxw,maxh)=self.checkMax(geo,maxw,maxh) - self.minsize(maxw,maxh) - self.dispGrids() - self.deiconify() - self.lift() - self.wait_visibility() - self.protocol("WM_DELETE_WINDOW",self.__quitCB) - return - def checkMax(self,geo,maxw,maxh): - (wh,of)=geo.split("+",1) - (w,h)=wh.split("x",1) - maxw=max(int(w),maxw) - maxh=max(int(h),maxh) - return(maxw,maxh) - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - Tkinter.Button(buttonFrame, text="Run",command=self.__runCB, width=6, - state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Hide", - command=self.__hideCB, width=6, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Quit", width=6, - command=self.__quitCB).pack(side=Tkinter.RIGHT, pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - def __runCB(self): - self.__callbackMethod("Run") - def __hideCB(self): - self.__callbackMethod("Hide") - def __quitCB(self): - self.cancel() - def cancel(self): - self.__callbackMethod("Quit") - return - # - # Custom body that has tabbed frames - # - def body(self, master): - # - # The "tab" buttons at the top - # - tabs=[("Grid Displays",self.dispGrids), - ("Grid Stats",self.dispGridStats), - ("Distributions",self.dispDists), - ("Point/Area Stats",self.dispStats), - ("Stat vs. Scale",self.dispScaleStats), - ] - tabFrame=Tkinter.Frame(master,relief="sunken",borderwidth=1) - self.tabSetting=Tkinter.StringVar() - self.tabSetting.set("Grid Displays") - for (text,callback) in tabs: - x=Tkinter.Radiobutton(tabFrame,text=text,indicatoron=0, - command=callback,variable=self.tabSetting, - value=text) - col=x.cget("highlightbackground") - x.config(selectcolor=col) - x.pack(side=Tkinter.LEFT) - tabFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X) - # - # Big "body" part of dialog - # - self.BodyFrame=Tkinter.Frame(master) - # - self.col4=Tkinter.Frame(self.BodyFrame) - self.column4(self.col4) - self.col4.pack(side=Tkinter.RIGHT,fill=Tkinter.Y,expand=0) - self.col3=Tkinter.Frame(self.BodyFrame) - self.column3(self.col3) - self.col3.pack(side=Tkinter.RIGHT,fill=Tkinter.Y,expand=0) - self.col2=Tkinter.Frame(self.BodyFrame) - self.column2(self.col2) - self.col2.pack(side=Tkinter.RIGHT,fill=Tkinter.Y,expand=0) - # - self.Grids=Tkinter.Frame(self.BodyFrame) - self.OptionsGrids(self.Grids) - # - self.GridStats=Tkinter.Frame(self.BodyFrame) - self.OptionsGridsStats(self.GridStats) - # - self.ScaleStats=Tkinter.Frame(self.BodyFrame) - self.OptionsScaleStats(self.ScaleStats) - # - self.Dists=Tkinter.Frame(self.BodyFrame) - self.OptionsDists(self.Dists) - # - self.Stats=Tkinter.Frame(self.BodyFrame) - self.OptionsStats(self.Stats) - # - self.BodyFrame.pack(side=Tkinter.TOP,fill=Tkinter.BOTH,expand=1) - # - # setup scales (updating the GridsScale updates all others) - # - self.updateGridsScale() - # - # pack the default one - # - cur=self.tabSetting.get() - for (text,callback) in tabs: - if cur==text: - callback() - return - #================================================================== - # - # Switch tab frame displayed - # - def dispGrids(self): - self.GridStats.pack_forget() - self.ScaleStats.pack_forget() - self.Dists.pack_forget() - self.Stats.pack_forget() - self.Grids.pack(side=Tkinter.RIGHT, - fill=Tkinter.BOTH,expand=1) - def dispGridStats(self): - self.Grids.pack_forget() - self.ScaleStats.pack_forget() - self.Dists.pack_forget() - self.Stats.pack_forget() - self.GridStats.pack(side=Tkinter.RIGHT, - fill=Tkinter.BOTH,expand=1) - def dispDists(self): - self.Grids.pack_forget() - self.GridStats.pack_forget() - self.ScaleStats.pack_forget() - self.Stats.pack_forget() - self.Dists.pack(side=Tkinter.RIGHT, - fill=Tkinter.BOTH,expand=1) - def dispStats(self): - self.Grids.pack_forget() - self.GridStats.pack_forget() - self.ScaleStats.pack_forget() - self.Dists.pack_forget() - self.Stats.pack(side=Tkinter.RIGHT, - fill=Tkinter.BOTH,expand=1) - def dispScaleStats(self): - self.Grids.pack_forget() - self.GridStats.pack_forget() - self.Dists.pack_forget() - self.Stats.pack_forget() - self.ScaleStats.pack(side=Tkinter.RIGHT, - fill=Tkinter.BOTH,expand=1) - # - # Get the values associated with the dialog pieces that - # are displayed with the current tab - # - def getValues(self): - values={} - tabtype=self.tabSetting.get() - values["tab"]=tabtype - if tabtype=="Grid Displays": - values=self.getGridsValues(values) - if tabtype=="Grid Stats": - values=self.getGridsStatsValues(values) - if tabtype=="Stat vs. Scale": - values=self.getScaleStatsValues(values) - if tabtype=="Distributions": - values=self.getDistsValues(values) - if tabtype=="Point/Area Stats": - values=self.getStatsValues(values) - return values - # - # values on with the Grids tab - # - def getGridsValues(self,values): - values["Display"]=self.GridsDisplay.get() - values["Parm"]=self.getCheckList(self.GridsParms) - values["Group"]=self.GridsGroup.get() - values["Model"]=self.getCheckList(self.Models) - values["ObsModel"]=self.ObsModel.get() - values["fcstrList"]=self.getForecasterListbox() - values["fhrStart"]=self.fhrStart.get() - values["fhrEnd"]=self.fhrEnd.get() - values["commonCases"]=self.Common.get() - values["dateStyle"]=self.Datestyle.get() - values["dateType"]=self.Datetype.get() - values["numDays"]=self.Ndays.get() - values["fromDay"]=self.getFromdayListbox() - values["dayList"]=self.getDaylistListbox() - values["cycleList"]=self.getCycleVals() - values["scale"]=self.GridsScale.get() - values["accumHours"]=self.accumHours.get() - values["accumFreq"]=self.accumFreq.get() - return values - # - # values on with the GridsStats tab - # - def getGridsStatsValues(self,values): - values["Display"]=self.GridsStatsDisplay.get() - #values["Parms"]=self.getCheckList(self.GridsStatsParms) - values["Parm"]=self.GridsStatsParm.get() - values["Threshold"]=self.GridsStatsThreshold.get() - values["Models"]=self.getCheckList(self.Models) - values["ObsModel"]=self.ObsModel.get() - values["fcstrList"]=self.getForecasterListbox() - values["fhrStart"]=self.fhrStart.get() - values["fhrEnd"]=self.fhrEnd.get() - values["commonCases"]=self.Common.get() - values["dateStyle"]=self.Datestyle.get() - values["dateType"]=self.Datetype.get() - values["numDays"]=self.Ndays.get() - values["fromDay"]=self.getFromdayListbox() - values["dayList"]=self.getDaylistListbox() - values["cycleList"]=self.getCycleVals() - values["scale"]=self.GridsStatsScale.get() - values["accumHours"]=self.accumHours.get() - values["accumFreq"]=self.accumFreq.get() - values["TwoCatType"]=self.GridsStatsTwoCatType.get() - values["TwoCatCond"]=self.GridsStatsTwoCatCond.get() - str=self.GridsStatsTwoCatValueString.get() - try: - val=float(str) - except: - val=0.0 - values["TwoCatValue"]=val - values["TwoCatValueString"]=str - return values - # - # values on with the Dists tab - # - def getDistsValues(self,values): - values["Display"]=self.DistsDisplay.get() - #values["Parms"]=self.getCheckList(self.DistsParms) - values["Parm"]=self.DistsParm.get() - values["Models"]=self.getCheckList(self.Models) - values["ObsModel"]=self.ObsModel.get() - values["fcstrList"]=self.getForecasterListbox() - values["fhrStart"]=self.fhrStart.get() - values["fhrEnd"]=self.fhrEnd.get() - values["commonCases"]=self.Common.get() - values["dateStyle"]=self.Datestyle.get() - values["dateType"]=self.Datetype.get() - values["numDays"]=self.Ndays.get() - values["fromDay"]=self.getFromdayListbox() - values["dayList"]=self.getDaylistListbox() - values["cycleList"]=self.getCycleVals() - values["scale"]=self.DistsScale.get() - values["accumHours"]=self.accumHours.get() - values["accumFreq"]=self.accumFreq.get() - return values - # - # values on the Stats tab - # - def getStatsValues(self,values): - values["Display"]=self.StatsDisplay.get() - values["areaList"]=self.getListbox(self.StatsAreasListbox) - values["AreaCombine"]=self.StatsAreaCombine.get() - values["Parms"]=self.getCheckList(self.StatsParms) - values["Threshold"]=self.StatsThreshold.get() - values["PlotType"]=self.StatsType.get() - #values["Parm"]=self.StatsParm.get() - values["Models"]=self.getCheckList(self.Models) - values["ObsModel"]=self.ObsModel.get() - values["fcstrList"]=self.getForecasterListbox() - values["fhrStart"]=self.fhrStart.get() - values["fhrEnd"]=self.fhrEnd.get() - values["commonCases"]=self.Common.get() - values["dateStyle"]=self.Datestyle.get() - values["dateType"]=self.Datetype.get() - values["numDays"]=self.Ndays.get() - values["fromDay"]=self.getFromdayListbox() - values["dayList"]=self.getDaylistListbox() - values["cycleList"]=self.getCycleVals() - values["scale"]=self.StatsScale.get() - values["accumHours"]=self.accumHours.get() - values["accumFreq"]=self.accumFreq.get() - values["TwoCatType"]=self.statsTwoCatType.get() - values["TwoCatCond"]=self.statsTwoCatCond.get() - str=self.statsTwoCatValueString.get() - try: - val=float(str) - except: - val=0.0 - values["TwoCatValue"]=val - values["TwoCatValueString"]=str - return values - # - # values on with the ScaleStats tab - # - def getScaleStatsValues(self,values): - values["Display"]=self.ScaleStatsDisplay.get() - values["areaList"]=self.getListbox(self.ScaleStatsAreasListbox) - values["AreaCombine"]=self.ScaleStatsAreaCombine.get() - values["Parm"]=self.ScaleStatsParm.get() - values["Threshold"]=self.ScaleStatsThreshold.get() - values["Models"]=self.getCheckList(self.Models) - values["ObsModel"]=self.ObsModel.get() - values["fcstrList"]=self.getForecasterListbox() - values["fhrStart"]=self.fhrStart.get() - values["fhrEnd"]=self.fhrEnd.get() - values["commonCases"]=self.Common.get() - values["dateStyle"]=self.Datestyle.get() - values["dateType"]=self.Datetype.get() - values["numDays"]=self.Ndays.get() - values["fromDay"]=self.getFromdayListbox() - values["dayList"]=self.getDaylistListbox() - values["cycleList"]=self.getCycleVals() - values["scale"]=self.GridsStatsScale.get() - values["accumHours"]=self.accumHours.get() - values["accumFreq"]=self.accumFreq.get() - values["TwoCatType"]=self.scaleStatsTwoCatType.get() - values["TwoCatCond"]=self.scaleStatsTwoCatCond.get() - str=self.scaleStatsTwoCatValueString.get() - try: - val=float(str) - except: - val=0.0 - values["TwoCatValue"]=val - values["TwoCatValueString"]=str - return values - #=============================================================== - # - # Column 2 - model - # - def column2(self,master): - # - # At bottom - ObsModel being used - # - obsModelFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - obsModelLabel=Tkinter.Label(obsModelFrame,text="Observed:") - obsModelLabel.pack(side=Tkinter.LEFT) - obsModels=self.__VU.getCFG('OBSMODELS') - namewidth=0 - for model in obsModels: - if len(model)>namewidth: - namewidth=len(model) - self.ObsModel=Tkinter.StringVar() - self.ObsModelButton=Tkinter.Menubutton(obsModelFrame,textvariable=self.ObsModel, - relief=Tkinter.RAISED,indicatoron=1,width=namewidth+1,anchor=Tkinter.W) - self.ObsModelButton.pack(side=Tkinter.RIGHT) - self.ObsModelPopup=Tkinter.Menu(self.ObsModelButton,tearoff=0) - obsModels=self.__VU.getCFG('OBSMODELS') - for model in obsModels: - self.ObsModelPopup.add_radiobutton(label=model,indicatoron=0,value=model, - variable=self.ObsModel) - self.ObsModel.set(obsModels[0]) - obsModelFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=1) - self.ObsModelButton.config(menu=self.ObsModelPopup) - - # - # common Cases checkbox - # - commonFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - self.Common=Tkinter.IntVar() - commonCheck=Tkinter.Checkbutton(commonFrame,text="Common Cases", - variable=self.Common, - onvalue=1,offvalue=0) - self.Common.set(1) - commonCheck.pack(side=Tkinter.TOP,anchor=Tkinter.W) - commonFrame.pack(side=Tkinter.BOTTOM,anchor=Tkinter.N, - fill=Tkinter.X,expand=0) - # - # Models checkbox - # - self.Models=[] - models=self.__VU.listModels() - for model in models: - self.Models.append(Tkinter.StringVar()) - if "Official" in models: - defaultModels=["Official",] - else: - defaultModels=[models[0],] - self.checkGroup(master,"Model:",self.Models,models,defaultModels,Tkinter.BOTH,1) - return - #=============================================================== - # - # Column 3 - Forecaster and common cases - # - def column3(self,master): - XHOUR=self.__VU.getCFG('MAXFORECASTHOUR') - # - # Accumulation Time Periods: - # - accumFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - self.accumHours=Tkinter.IntVar() - self.accumFreq=Tkinter.IntVar() - freqFrame=Tkinter.Frame(accumFrame) - flab=Tkinter.Label(freqFrame,text="Every:",width=8) - flab.pack(side=Tkinter.LEFT,anchor=Tkinter.S) - flab=Tkinter.Label(freqFrame,text="hrs") - flab.pack(side=Tkinter.RIGHT,anchor=Tkinter.S) - scaleFreq=Tkinter.Scale(freqFrame,from_=1,to=24, - variable=self.accumFreq, - orient=Tkinter.HORIZONTAL, - sliderlength=15) - accumFrequencyDefault=self.__VU.getCFG("ACCUM_FREQUENCY_DEFAULT") - if accumFrequencyDefault is None: - accumFrequencyDefault=6 - self.accumFreq.set(accumFrequencyDefault) - scaleFreq.pack(side=Tkinter.RIGHT,fill=Tkinter.X,expand=1) - freqFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=1) - hoursFrame=Tkinter.Frame(accumFrame) - flab=Tkinter.Label(hoursFrame,text="Length:",width=8) - flab.pack(side=Tkinter.LEFT,anchor=Tkinter.S) - flab=Tkinter.Label(hoursFrame,text="hrs") - flab.pack(side=Tkinter.RIGHT,anchor=Tkinter.S) - accumResolution=self.__VU.getCFG('ACCUM_RESOLUTION') - if accumResolution is None: - accumResolution=6 - scaleHours=Tkinter.Scale(hoursFrame,from_=accumResolution, - to=XHOUR,resolution=accumResolution, - variable=self.accumHours, - orient=Tkinter.HORIZONTAL, - sliderlength=15) - accumDefaultLength=self.__VU.getCFG("ACCUM_LENGTH_DEFAULT") - if accumDefaultLength is None: - accumDefaultLength=6 - self.accumHours.set(accumDefaultLength) - scaleHours.pack(side=Tkinter.RIGHT,fill=Tkinter.X,expand=1) - hoursFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=1) - flab=Tkinter.Label(accumFrame,text="Accumulation Time Periods:") - flab.pack(side=Tkinter.BOTTOM,expand=0) - accumFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=0) - # - # Forecast Hours start/stop - # - fhrFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - self.fhrStart=Tkinter.IntVar() - self.fhrEnd=Tkinter.IntVar() - fend=Tkinter.Scale(fhrFrame,from_=0,to=XHOUR,variable=self.fhrEnd, - orient=Tkinter.HORIZONTAL,command=self.endMove, - sliderlength=15) - self.fhrEnd.set(XHOUR) - fend.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=1) - fstart=Tkinter.Scale(fhrFrame,from_=0,to=XHOUR,variable=self.fhrStart, - orient=Tkinter.HORIZONTAL,command=self.startMove, - sliderlength=15) - self.fhrStart.set(0) - fstart.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=1) - flab=Tkinter.Label(fhrFrame,text="Forecast Hours:") - flab.pack(side=Tkinter.BOTTOM,expand=0) - fhrFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.X,expand=0) - # - # Forecaster names to show... - # - forecasters=["ALL"] - self.forecasterNumbers=[-1] - # - trimming=self.__VU.getCFG('FORECASTER_LIST_TRIMMING') - trimADMIN=self.__VU.getCFG('FORECASTER_LIST_TRIMMING_ADMINISTRATORS') - fFormat=self.__VU.getCFG('FORECASTER_LIST_FORMAT') - fSort=self.__VU.getCFG('FORECASTER_LIST_SORT') - labels=[] - numstrs=self.__VU.getFcstrNums() - for numstr in numstrs: - num=int(numstr) - id=self.__VU.getFcstrID(num) - if ((trimming==1)and(self.__userName not in trimADMIN)and(self.__userName!=id)and(num!=0)): - continue - name=self.__VU.getFcstrName(num) - sort=numstr #defaults to number - if fSort=="id": - sort=id - elif fSort=="name": - sort=name - label=name #defaults to name - if fFormat=="number": - label=numstr - elif fFormat=="id": - label=id - elif fFormat=="number-name": - label="%s - %s"%(numstr,name) - elif fFormat=="number-id": - label="%s - %s"%(numstr,id) - labels.append("%s|%s|%s"%(sort,numstr,label)) - labels.sort() - for entry in labels: - (sstr,numstr,label)=entry.split("|") - forecasters.append(label) - self.forecasterNumbers.append(int(numstr)) - defaultForecasters=["ALL",] - maxwid=0 - for forecaster in forecasters: - wid=len(forecaster) - if wid>maxwid: - maxwid=wid - maxheight=10 - fcstrFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - self.ForecasterListbox=self.sListbox(fcstrFrame,"Forecaster:", - forecasters,defaultForecasters,maxwid+1,maxheight,Tkinter.EXTENDED) - fcstrFrame.pack(side=Tkinter.BOTTOM,fill=Tkinter.BOTH,expand=1) - return - # - #================================================================= - # getForecasterListbox - get list of integer forecast numbers for - # forcasters turned on in ForecasterListbox - # - def getForecasterListbox(self): - outlist=[] - itemnums=self.ForecasterListbox.curselection() - try: - itemnums=map(int,itemnums) - except ValueError: pass - for itemnum in itemnums: - outlist.append(self.forecasterNumbers[itemnum]) - return outlist - #================================================================== - # - # If moving fhrStart...check to make sure that it is not more - # than fhrEnd...and if it is...move fhrEnd too. - # - def startMove(self,event): - st=self.fhrStart.get() - en=self.fhrEnd.get() - if en",padx=0,pady=0,command=self.toggleNScale) - self.NTog.pack(side=Tkinter.RIGHT,anchor=Tkinter.E) - labFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - self.NScale=Tkinter.Scale(nFrame,from_=1,to=50,variable=self.Ndays, - showvalue=1,orient=Tkinter.HORIZONTAL, - sliderlength=15) - self.NScale.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.X) - nFrame.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.X) - - gridDayStrings,self.gridDays=self.getRecentDates(self.__VU.GRIDDAYS) - defaultDay=[gridDayStrings[0],] - maxwid=10 - maxheight=5 # number of days to show - self.FromdayListbox=self.sListbox(self.ByPeriod,"Ending on:", - gridDayStrings,defaultDay,maxwid,maxheight,Tkinter.BROWSE) - self.ByPeriod.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.BOTH,expand=1) - # - # byList - # - self.ByList=Tkinter.Frame(byFrame) - #days,daydates=self.getRecentDates(self.__VU.GRIDDAYS) - defaultDaylist=[] - for i in xrange(7): - defaultDaylist.append(gridDayStrings[i]) - maxwid=10 - maxheight=5 #number of days to show - self.DaylistListbox=self.sListbox(self.ByList,"Include:", - gridDayStrings,defaultDaylist,maxwid,maxheight,Tkinter.EXTENDED) - self.ByList.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.BOTH,expand=1) - # - # datetype - # - datetypeFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - datetypeLabel=Tkinter.Label(datetypeFrame,text="Choose Dates by:") - datetypeLabel.pack(side=Tkinter.TOP,anchor=Tkinter.W) - self.Datetype=Tkinter.StringVar() - datetypeDefault="Period Length" - datetypes=["Period Length","List of dates"] - for datetype in datetypes: - a=Tkinter.Radiobutton(datetypeFrame,text=datetype,command=self.setDatetype, - variable=self.Datetype,value=datetype) - if datetype is datetypeDefault: - a.invoke() - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - datetypeFrame.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.X) - # - # Now pack the frame with the "byPeriod" or "byList" - # - byFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.BOTH,expand=1) - # - # cycle - # - cycleFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - labFrame=Tkinter.Frame(cycleFrame) - cycleLabel=Tkinter.Label(labFrame,text="Cycle:") - cycleLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - cycleToggle=Tkinter.Button(labFrame,text="ALL",padx=0,pady=0,command=self.toggleCycles) - cycleToggle.pack(side=Tkinter.RIGHT,anchor=Tkinter.E) - labFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - cyclecol1=Tkinter.Frame(cycleFrame) - cyclecol2=Tkinter.Frame(cycleFrame) - cycleDefault=self.__VU.getCFG('ALLCYCLES') - cycles=self.__VU.getCFG('ALLCYCLES') - self.CycleFlags=[] - self.CycleVals=[] - cnt=0 - for cycle in cycles: - self.CycleFlags.append(Tkinter.IntVar()) - if cnt": - self.NScale.configure(to=self.__VU.STATDAYS) - self.NTog.configure(text="<") - else: - if curN>50: - self.Ndays.set(50) - self.NScale.configure(to=50) - self.NTog.configure(text=">") - return - #to=self.__VU.STATDAYS - #================================================================ - # toggleCycles - toggles all the cycle buttons - # - def toggleCycles(self): - for cycleFlag in self.CycleFlags: - val=cycleFlag.get() - val=abs(val-1) - cycleFlag.set(val) - return - #================================================================= - # getCycleVals - get list of values turned on in Cycles - # - def getCycleVals(self): - outlist=[] - for i in xrange(len(self.CycleFlags)): - a=self.CycleFlags[i].get() - if a!=0: - outlist.append(self.CycleVals[i]) - return outlist - #================================================================= - # getFromdayListbox - get unix date for day listed in Fromday - # listbox - # - def getFromdayListbox(self): - itemnums=self.FromdayListbox.curselection() - try: - itemnums=map(int,itemnums) - except ValueError: pass - itemnum=itemnums[0] - outdate=self.gridDays[itemnum] - return outdate - #================================================================= - # getDaylistListbox - get list of integer forecast numbers for - # forcasters turned on in ForecasterListbox - # - def getDaylistListbox(self): - outlist=[] - itemnums=self.DaylistListbox.curselection() - try: - itemnums=map(int,itemnums) - except ValueError: pass - for itemnum in itemnums: - outlist.append(self.gridDays[itemnum]) - return outlist - #================================================================== - # - # Frame that specifies the options for the Grids displays - # - def OptionsGrids(self,master): - # - # parameter - # - self.GridsParms=[] - parms=self.__VU.getVerParms() - for parm in parms: - self.GridsParms.append(Tkinter.StringVar()) - defaultParms=[parms[0],] - self.checkGroup(master,"Parameter:",self.GridsParms,parms, - defaultParms,Tkinter.BOTH,1) - # - # display - # - self.GridsDisplay=Tkinter.StringVar() - displays=["Forecasts","Errors"] - defaultDisplay="Forecasts" - gridDisplayFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - gdLabelFrame=Tkinter.Frame(gridDisplayFrame) - gridDisplayLabel=Tkinter.Label(gdLabelFrame,text="Display:") - gridDisplayLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - - self.GridsScale=Tkinter.IntVar() - self.GridsScaleText=Tkinter.StringVar() - but=Tkinter.Menubutton(gdLabelFrame,textvariable=self.GridsScaleText, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.RIGHT,anchor=Tkinter.W) - self.GridsScalePopup=Tkinter.Menu(but,tearoff=0) - for (value,text) in self.__scaleList: - self.GridsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, - variable=self.GridsScale, - command=self.updateGridsScale) - self.GridsScale.set(0) - #self.updateGridsScale() - but.config(menu=self.GridsScalePopup) - - gdLabelFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=0) - for item in displays: - a=Tkinter.Radiobutton(gridDisplayFrame,text=item, - variable=self.GridsDisplay,value=item) - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - if item is defaultDisplay: - self.GridsDisplay.set(item) - gridDisplayFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.X,expand=0) - # - # Group by - # - self.GridsGroup=Tkinter.StringVar() - defaultGroup="Forecast Hour" - groups=["Forecast Hour","Run Time"] - self.radioGroup(master,"Group by:",self.GridsGroup,groups,defaultGroup,Tkinter.X,0) - return - # - def updateGridsScale(self): - value=self.GridsScale.get() - for i in xrange(len(self.__scaleList)): - (num,text)=self.__scaleList[i] - if num==value: - self.GridsScaleText.set(text) - self.GridsStatsScale.set(num) - self.GridsStatsScaleText.set(text) - self.DistsScale.set(num) - self.DistsScaleText.set(text) - self.StatsScale.set(num) - self.StatsScaleText.set(text) - break - #================================================================== - # - # Frame that specifies the options for the GridsStats display - # - def OptionsGridsStats(self,master): - # - # parameter - # - self.GridsStatsParm=Tkinter.StringVar() - parms=self.__VU.getVerParmsVect() - #for parm in parms: - # self.GridsStatsParms.append(Tkinter.StringVar()) - defaultParm=parms[0] - self.radioGroup(master,"Parameter:",self.GridsStatsParm,parms, - defaultParm,Tkinter.BOTH,1,callback=self.updateGridsStatsThreshold) - # - # display - # - self.GridsStatsDisplay=Tkinter.StringVar() - displays=["Bias","Mean Abs Error","RMS Error","Mean Squared Error"] - defaultDisplay="Bias" - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - scaleFrame=Tkinter.Frame(radioFrame) - radioLabel=Tkinter.Label(scaleFrame,text="Display:") - radioLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - - self.GridsStatsScale=Tkinter.IntVar() - self.GridsStatsScaleText=Tkinter.StringVar() - but=Tkinter.Menubutton(scaleFrame,textvariable=self.GridsStatsScaleText, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.RIGHT,anchor=Tkinter.W) - self.GridsStatsScalePopup=Tkinter.Menu(but,tearoff=0) - for (value,text) in self.__scaleList: - self.GridsStatsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, - variable=self.GridsStatsScale, - command=self.updateGridsStatsScale) - self.GridsStatsScale.set(0) - #self.updateGridsStatsScale() - - but.config(menu=self.GridsStatsScalePopup) - scaleFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - - a=Tkinter.Radiobutton(radioFrame,text="Bias", - variable=self.GridsStatsDisplay,value="Bias") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - self.GridsStatsDisplay.set("Bias") - a=Tkinter.Radiobutton(radioFrame,text="Mean Abs Error", - variable=self.GridsStatsDisplay,value="Mean Abs Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="RMS Error", - variable=self.GridsStatsDisplay,value="RMS Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="Mean Squared Error", - variable=self.GridsStatsDisplay,value="Mean Squared Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - threshFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(threshFrame,text="Percent Err <", - variable=self.GridsStatsDisplay,value="Percent Err <") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.GridsStatsThreshold=Tkinter.IntVar() - self.GridsStatsThresholdValue=Tkinter.StringVar() - but=Tkinter.Menubutton(threshFrame,textvariable=self.GridsStatsThresholdValue, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.GridsStatsThresholdPopup=Tkinter.Menu(but,tearoff=0) - self.GridsStatsThresholdPopup.add_command(label="stuff") - for i in xrange(self.__VU.getCFG('NUMTHRESH')): - self.GridsStatsThresholdPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, - variable=self.GridsStatsThreshold, - command=self.pickGridsStatsThreshold) - but.config(menu=self.GridsStatsThresholdPopup) - threshFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - - twocatFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(twocatFrame,text="",variable=self.GridsStatsDisplay, - value="TwoCat") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.GridsStatsTwoCatType=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.GridsStatsTwoCatType, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.GridsStatsTwoCatTypePopup=Tkinter.Menu(but,tearoff=0) - for stat in ["Hits","Areal Hits","Misses","Areal Misses", - "False Alarms","Areal False Alarms","Correct Negatives", - "Areal Correct Negatives", - "Frequency Observed", - "Frequency Forecast", - "Fraction Correct","Areal Fraction Correct", - "Frequency Bias","Areal Frequency Bias", - "POD","Areal POD","FAR","Areal FAR","Threat Score", - "Areal Threat Score","Equitable Threat Score", - "Areal Equitable Threat Score","True Skill Score", - "Areal True Skill Score","Heidke Skill Score", - "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: - self.GridsStatsTwoCatTypePopup.add_radiobutton(label=stat, - indicatoron=0,value=stat, - variable=self.GridsStatsTwoCatType,command=self.updateGridsStatsTwoCatType) - self.GridsStatsTwoCatType.set("Fraction Correct") - but.config(menu=self.GridsStatsTwoCatTypePopup) - self.GridsStatsTwoCatCond=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.GridsStatsTwoCatCond, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.GridsStatsTwoCatCondPopup=Tkinter.Menu(but,tearoff=0) - for cond in [">",">=","<=","<"]: - self.GridsStatsTwoCatCondPopup.add_radiobutton(label=cond, - indicatoron=0,value=cond, - variable=self.GridsStatsTwoCatCond,command=self.updateGridsStatsTwoCatType) - self.GridsStatsTwoCatCond.set(">") - but.config(menu=self.GridsStatsTwoCatCondPopup) - self.GridsStatsTwoCatValueString=Tkinter.StringVar() - ent=Tkinter.Entry(twocatFrame,textvariable=self.GridsStatsTwoCatValueString, - width=5,relief=Tkinter.SUNKEN) - self.GridsStatsTwoCatValueString.set("0.0") - ent.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - twocatFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.X,expand=0) - self.updateGridsStatsThreshold() - return - #================================================================== - # - def updateGridsStatsScale(self): - value=self.GridsStatsScale.get() - for i in xrange(len(self.__scaleList)): - (num,text)=self.__scaleList[i] - if num==value: - self.GridsStatsScaleText.set(text) - self.GridsScale.set(num) - self.GridsScaleText.set(text) - self.DistsScale.set(num) - self.DistsScaleText.set(text) - self.StatsScale.set(num) - self.StatsScaleText.set(text) - break - # - # When user actually picks a threshold - then also set the display to - # use the Percent Err < display - # - def pickGridsStatsThreshold(self): - self.GridsStatsDisplay.set("Percent Err <") - self.updateGridsStatsThreshold() - return - def updateGridsStatsTwoCatType(self): - self.GridsStatsDisplay.set("TwoCat") - return - # - # When parm is changed, or when the user picks a threshold - need to - # update the chosen thresholds. - # - def updateGridsStatsThreshold(self): - #print "in updateGridsStatsThreshold" - parm=self.GridsStatsParm.get() - #print " parm=",parm - parmList=[parm,] - if len(parmList)<1: - return - tlist=[] - plist=[] - for parm in parmList: - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - datatype=self.__VU.getVerParmType(readParm) - thresholds=self.__VU.getVerThresholds(readParm) - #print " thresholds for ",parm - #print " are:",thresholds - if datatype==1: - (threshmag,threshdir)=thresholds - if last3=="Dir": - thresholds=threshdir - else: - thresholds=threshmag - #if last3 in ("Spd","Dir"): - # (threshmag,threshdir)=thresholds - # if last3=="Spd": - # thresholds=threshmag - # else: - # thresholds=threshdir - if len(tlist)>0: - same=1 - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - same=1 - for i in xrange(len(thresh)): - if thresh[i]!=thresholds[i]: - same=0 - break - if same==1: - plist[j]+=",%s"%parm - break - if same!=1: - tlist.append(thresholds) - plist.append(parm) - else: - tlist.append(thresholds) - plist.append(parm) - - dthresh=[] - if len(tlist)>1: - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%(t) - if len(dthresh)<(i+1): - dthresh.append(str) - else: - dthresh[i]+=" | %s"%str - else: - thresh=tlist[0] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%t - dthresh.append(str) - # - # - parmList=" | ".join(plist) - self.GridsStatsThresholdPopup.entryconfigure(0,label=parmList) - for i in xrange(len(dthresh)): - self.GridsStatsThresholdPopup.entryconfigure(i+1,label=dthresh[i]) - #print " ",i,dthresh[i] - self.GridsStatsThresholdValue.set(dthresh[self.GridsStatsThreshold.get()]) - return - def updateScaleThreshold(self): - parm=self.GridsStatsParm.get() - parmList=[parm,] - if len(parmList)<1: - return - tlist=[] - plist=[] - for parm in parmList: - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - thresholds=self.__VU.getVerThresholds(readParm) - print " thresholds for ",parm - print " are:",thresholds - if last3 in ("Spd","Dir"): - (threshmag,threshdir)=thresholds - if last3=="Spd": - thresholds=threshmag - else: - thresholds=threshdir - if len(tlist)>0: - same=1 - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - same=1 - for i in xrange(len(thresh)): - if thresh[i]!=thresholds[i]: - same=0 - break - if same==1: - plist[j]+=",%s"%parm - break - if same!=1: - tlist.append(thresholds) - plist.append(parm) - else: - tlist.append(thresholds) - plist.append(parm) - - dthresh=[] - if len(tlist)>1: - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%(t) - if len(dthresh)<(i+1): - dthresh.append(str) - else: - dthresh[i]+=" | %s"%str - else: - thresh=tlist[0] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%t - dthresh.append(str) - # - # - parmList=" | ".join(plist) - self.ScaleThresholdPopup.entryconfigure(0,label=parmList) - for i in xrange(len(dthresh)): - self.ScaleThresholdPopup.entryconfigure(i+1,label=dthresh[i]) - #print " ",i,dthresh[i] - self.ScaleThresholdValue.set(dthresh[self.ScaleThreshold.get()]) - return - #================================================================== - # - # Frame that specifies the options for the Dists display - # - def OptionsDists(self,master): - # - # parameter - # - self.DistsParm=Tkinter.StringVar() - parms=self.__VU.getVerParmsVect() - defaultParm=parms[0] - self.radioGroup(master,"Parameter:",self.DistsParm,parms,defaultParm,Tkinter.BOTH,1) - # - # display - # - self.DistsDisplay=Tkinter.StringVar() - - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - labFrame=Tkinter.Frame(radioFrame) - - radioLabel=Tkinter.Label(labFrame,text="Display:") - radioLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - - self.DistsScale=Tkinter.IntVar() - self.DistsScaleText=Tkinter.StringVar() - but=Tkinter.Menubutton(labFrame,textvariable=self.DistsScaleText, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.RIGHT,anchor=Tkinter.W) - self.DistsScalePopup=Tkinter.Menu(but,tearoff=0) - for (value,text) in self.__scaleList: - self.DistsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, - variable=self.DistsScale, - command=self.updateDistsScale) - self.DistsScale.set(0) - #self.updateDistsScale() - but.config(menu=self.DistsScalePopup) - labFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - - a=Tkinter.Radiobutton(radioFrame,text="Error Histogram", - variable=self.DistsDisplay,value="Error Histogram") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - self.DistsDisplay.set("Error Histogram") - a=Tkinter.Radiobutton(radioFrame,text="Value Histogram", - variable=self.DistsDisplay,value="Value Histogram") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="Expected Value", - variable=self.DistsDisplay,value="Expected Value") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="Scatterplot", - variable=self.DistsDisplay,value="Scatterplot") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.X,expand=0) - - #displays=["Error Histogram","Value Histogram","Expected Value","Scatterplot"] - #defaultDisplay="Error Histogram" - #self.radioGroup(master,"Display:",self.DistsDisplay,displays,defaultDisplay,Tkinter.X,0) - return - #================================================================== - # - def updateDistsScale(self): - value=self.DistsScale.get() - for i in xrange(len(self.__scaleList)): - (num,text)=self.__scaleList[i] - if num==value: - self.DistsScaleText.set(text) - self.GridsScale.set(num) - self.GridsScaleText.set(text) - self.GridsStatsScale.set(num) - self.GridsStatsScaleText.set(text) - self.StatsScale.set(num) - self.StatsScaleText.set(text) - break - #================================================================== - # - # Frame that specifies the options for the Stats display - # - def OptionsStats(self,master): - # - # parameter - # - self.StatsParms=[] - parms=self.__VU.getVerParmsVect() - for parm in parms: - self.StatsParms.append(Tkinter.StringVar()) - defaultParms=[parms[0],] - self.checkGroup(master,"Parameter:",self.StatsParms,parms, - defaultParms,Tkinter.X,0,callback=self.updateStatsThreshold) - # - # Area list - # - af=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - alist=self.__VU.listEditAreaDescriptions() - alist[0:0]=["Current"] - defaultArea=alist[0] - maxwid=0 - for area in alist: - if len(area)>maxwid: - maxwid=len(area) - if len(alist)>5: - maxheight=5 - else: - maxheight=len(alist) - - acomb=Tkinter.Frame(af) - self.StatsAreaCombine=Tkinter.IntVar() - comb=Tkinter.Checkbutton(acomb,text="Combine",variable=self.StatsAreaCombine) - self.StatsAreaCombine.set(1) - comb.pack(side=Tkinter.RIGHT,anchor=Tkinter.E) - sLabel=Tkinter.Label(acomb,text="Edit Area:") - sLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - acomb.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X) - sb=Tkinter.Scrollbar(af,orient=Tkinter.VERTICAL) - self.StatsAreasListbox=Tkinter.Listbox(af,yscrollcommand=sb.set, - selectmode=Tkinter.EXTENDED,width=maxwid,height=maxheight) - sb.config(command=self.StatsAreasListbox.yview) - sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - self.StatsAreasListbox.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - idx=0 - for item in alist: - self.StatsAreasListbox.insert(Tkinter.END,item) - if item in defaultArea: - self.StatsAreasListbox.select_set(idx) - idx+=1 - - af.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.BOTH,expand=1) - # - # display - # - self.StatsDisplay=Tkinter.StringVar() - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - labFrame=Tkinter.Frame(radioFrame) - - radioLabel=Tkinter.Label(labFrame,text="Display:") - radioLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - - self.StatsScale=Tkinter.IntVar() - self.StatsScaleText=Tkinter.StringVar() - but=Tkinter.Menubutton(labFrame,textvariable=self.StatsScaleText, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.RIGHT,anchor=Tkinter.W) - self.StatsScalePopup=Tkinter.Menu(but,tearoff=0) - for (value,text) in self.__scaleList: - self.StatsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, - variable=self.StatsScale, - command=self.updateStatsScale) - self.StatsScale.set(0) - #self.updateStatsScale() - but.config(menu=self.StatsScalePopup) - labFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - - a=Tkinter.Radiobutton(radioFrame,text="Bias", - variable=self.StatsDisplay,value="Bias") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - self.StatsDisplay.set("Bias") - a=Tkinter.Radiobutton(radioFrame,text="Mean Abs Error", - variable=self.StatsDisplay,value="Mean Abs Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="RMS Error", - variable=self.StatsDisplay,value="RMS Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="Mean Squared Error", - variable=self.StatsDisplay,value="Mean Squared Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - - threshFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(threshFrame,text="Percent Err <", - variable=self.StatsDisplay,value="Percent Err <") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.StatsThreshold=Tkinter.IntVar() - self.StatsThresholdValue=Tkinter.StringVar() - but=Tkinter.Menubutton(threshFrame,textvariable=self.StatsThresholdValue, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.thresholdStatsPopup=Tkinter.Menu(but,tearoff=0) - self.thresholdStatsPopup.add_command(label="stuff") - for i in xrange(self.__VU.getCFG('NUMTHRESH')): - self.thresholdStatsPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, - variable=self.StatsThreshold, - command=self.pickStatsThreshold) - but.config(menu=self.thresholdStatsPopup) - threshFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - twocatFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(twocatFrame,text="",variable=self.StatsDisplay, - value="TwoCat") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.statsTwoCatType=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.statsTwoCatType, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.statsTwoCatTypePopup=Tkinter.Menu(but,tearoff=0) - for stat in ["Hits","Areal Hits","Misses","Areal Misses", - "False Alarms","Areal False Alarms","Correct Negatives", - "Areal Correct Negatives", - "Frequency Observed", - "Frequency Forecast", - "Fraction Correct", - "Areal Fraction Correct", - "Frequency Bias","Areal Frequency Bias", - "POD","Areal POD","FAR","Areal FAR","Threat Score", - "Areal Threat Score","Equitable Threat Score", - "Areal Equitable Threat Score","True Skill Score", - "Areal True Skill Score","Heidke Skill Score", - "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: - self.statsTwoCatTypePopup.add_radiobutton(label=stat, - indicatoron=0,value=stat, - variable=self.statsTwoCatType,command=self.updateStatsTwoCatType) - self.statsTwoCatType.set("Fraction Correct") - but.config(menu=self.statsTwoCatTypePopup) - self.statsTwoCatCond=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.statsTwoCatCond, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.statsTwoCatCondPopup=Tkinter.Menu(but,tearoff=0) - for cond in [">",">=","<=","<"]: - self.statsTwoCatCondPopup.add_radiobutton(label=cond, - indicatoron=0,value=cond, - variable=self.statsTwoCatCond,command=self.updateStatsTwoCatType) - self.statsTwoCatCond.set(">") - but.config(menu=self.statsTwoCatCondPopup) - self.statsTwoCatValueString=Tkinter.StringVar() - ent=Tkinter.Entry(twocatFrame,textvariable=self.statsTwoCatValueString, - width=5,relief=Tkinter.SUNKEN) - self.statsTwoCatValueString.set("0.0") - ent.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - twocatFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.X,expand=0) - self.updateStatsThreshold() - # - # Stat type - # - self.StatsType=Tkinter.StringVar() - stattypes=["vs. Time","vs. Fcst Hour"] - defaulttype="vs. Time" - self.radioGroup(master,"Plot:",self.StatsType,stattypes,defaulttype,Tkinter.X,0) - return - #================================================================== - # - def updateStatsScale(self): - value=self.StatsScale.get() - for i in xrange(len(self.__scaleList)): - (num,text)=self.__scaleList[i] - if num==value: - self.StatsScaleText.set(text) - self.GridsScale.set(num) - self.GridsScaleText.set(text) - self.GridsStatsScale.set(num) - self.GridsStatsScaleText.set(text) - self.DistsScale.set(num) - self.DistsScaleText.set(text) - break - def updateStatsTwoCatType(self): - self.StatsDisplay.set("TwoCat") - return - #================================================================== - # - def pickStatsThreshold(self): - self.StatsDisplay.set("Percent Err <") - self.updateStatsThreshold() - return - - def updateStatsThreshold(self): - parmList=self.getCheckList(self.StatsParms) - if len(parmList)<1: - return - tlist=[] - plist=[] - for parm in parmList: - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - thresholds=self.__VU.getVerThresholds(readParm) - if last3 in ("Spd","Dir"): - (threshmag,threshdir)=thresholds - if last3=="Spd": - thresholds=threshmag - else: - thresholds=threshdir - if len(tlist)>0: - same=1 - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - same=1 - for i in xrange(len(thresh)): - if thresh[i]!=thresholds[i]: - same=0 - break - if same==1: - plist[j]+=",%s"%parm - break - if same!=1: - tlist.append(thresholds) - plist.append(parm) - else: - tlist.append(thresholds) - plist.append(parm) - - dthresh=[] - if len(tlist)>1: - for j in xrange(len(tlist)): - thresh=tlist[j] - parms=plist[j] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%(t) - if len(dthresh)<(i+1): - dthresh.append(str) - else: - dthresh[i]+=" | %s"%str - else: - thresh=tlist[0] - for i in xrange(len(thresh)): - t=thresh[i] - str="%d"%t - dthresh.append(str) - # - # - parmList=" | ".join(plist) - self.thresholdStatsPopup.entryconfigure(0,label=parmList) - for i in xrange(len(dthresh)): - self.thresholdStatsPopup.entryconfigure(i+1,label=dthresh[i]) - #print " ",i,dthresh[i] - self.StatsThresholdValue.set(dthresh[self.StatsThreshold.get()]) - return - #================================================================== - # - # Frame that specifies the options for the Scale vs Stat display - # - def OptionsScaleStats(self,master): - # - # parameter - # - self.ScaleStatsParm=Tkinter.StringVar() - parms=self.__VU.getVerParmsVect() - defaultParm=parms[0] - self.radioGroup(master,"Parameter:",self.ScaleStatsParm,parms,defaultParm,Tkinter.BOTH,1) - # - # Area list - # - af=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - alist=self.__VU.listEditAreaDescriptions() - alist[0:0]=["Current"] - defaultArea=alist[0] - maxwid=0 - for area in alist: - if len(area)>maxwid: - maxwid=len(area) - if len(alist)>5: - maxheight=5 - else: - maxheight=len(alist) - - acomb=Tkinter.Frame(af) - self.ScaleStatsAreaCombine=Tkinter.IntVar() - #comb=Tkinter.Checkbutton(acomb,text="Combine",variable=self.StatsAreaCombine) - self.ScaleStatsAreaCombine.set(1) # always set - #comb.pack(side=Tkinter.RIGHT,anchor=Tkinter.E) - sLabel=Tkinter.Label(acomb,text="Edit Area:") - sLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - acomb.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X) - sb=Tkinter.Scrollbar(af,orient=Tkinter.VERTICAL) - self.ScaleStatsAreasListbox=Tkinter.Listbox(af,yscrollcommand=sb.set, - selectmode=Tkinter.EXTENDED,width=maxwid,height=maxheight) - sb.config(command=self.ScaleStatsAreasListbox.yview) - sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - self.ScaleStatsAreasListbox.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - idx=0 - for item in alist: - self.ScaleStatsAreasListbox.insert(Tkinter.END,item) - if item in defaultArea: - self.ScaleStatsAreasListbox.select_set(idx) - idx+=1 - - af.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.BOTH,expand=1) - # - # display - # - self.ScaleStatsDisplay=Tkinter.StringVar() - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - labFrame=Tkinter.Frame(radioFrame) - - radioLabel=Tkinter.Label(labFrame,text="Display:") - radioLabel.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - labFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - - a=Tkinter.Radiobutton(radioFrame,text="Bias", - variable=self.ScaleStatsDisplay,value="Bias") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - self.ScaleStatsDisplay.set("Bias") - a=Tkinter.Radiobutton(radioFrame,text="Mean Abs Error", - variable=self.ScaleStatsDisplay,value="Mean Abs Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="RMS Error", - variable=self.ScaleStatsDisplay,value="RMS Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - a=Tkinter.Radiobutton(radioFrame,text="Mean Squared Error", - variable=self.ScaleStatsDisplay,value="Mean Squared Error") - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - - threshFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(threshFrame,text="Percent Err <", - variable=self.ScaleStatsDisplay,value="Percent Err <") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.ScaleStatsThreshold=Tkinter.IntVar() - self.ScaleStatsThresholdValue=Tkinter.StringVar() - but=Tkinter.Menubutton(threshFrame,textvariable=self.ScaleStatsThresholdValue, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.thresholdScaleStatsPopup=Tkinter.Menu(but,tearoff=0) - self.thresholdScaleStatsPopup.add_command(label="stuff") - for i in xrange(self.__VU.getCFG('NUMTHRESH')): - self.thresholdScaleStatsPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, - variable=self.ScaleStatsThreshold, - command=self.pickScaleStatsThreshold) - but.config(menu=self.thresholdScaleStatsPopup) - threshFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - twocatFrame=Tkinter.Frame(radioFrame) - a=Tkinter.Radiobutton(twocatFrame,text="",variable=self.ScaleStatsDisplay, - value="TwoCat") - a.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.scaleStatsTwoCatType=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.scaleStatsTwoCatType, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.scaleStatsTwoCatTypePopup=Tkinter.Menu(but,tearoff=0) - for stat in ["Hits","Areal Hits","Misses","Areal Misses", - "False Alarms","Areal False Alarms","Correct Negatives", - "Areal Correct Negatives", - "Frequency Observed", - "Frequency Forecast", - "Fraction Correct", - "Areal Fraction Correct", - "Frequency Bias","Areal Frequency Bias", - "POD","Areal POD","FAR","Areal FAR","Threat Score", - "Areal Threat Score","Equitable Threat Score", - "Areal Equitable Threat Score","True Skill Score", - "Areal True Skill Score","Heidke Skill Score", - "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: - self.scaleStatsTwoCatTypePopup.add_radiobutton(label=stat, - indicatoron=0,value=stat, - variable=self.scaleStatsTwoCatType,command=self.updateScaleStatsTwoCatType) - self.scaleStatsTwoCatType.set("Fraction Correct") - but.config(menu=self.scaleStatsTwoCatTypePopup) - self.scaleStatsTwoCatCond=Tkinter.StringVar() - but=Tkinter.Menubutton(twocatFrame,textvariable=self.scaleStatsTwoCatCond, - relief=Tkinter.RAISED,indicatoron=1) - but.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - self.scaleStatsTwoCatCondPopup=Tkinter.Menu(but,tearoff=0) - for cond in [">",">=","<=","<"]: - self.scaleStatsTwoCatCondPopup.add_radiobutton(label=cond, - indicatoron=0,value=cond, - variable=self.scaleStatsTwoCatCond,command=self.updateScaleStatsTwoCatType) - self.scaleStatsTwoCatCond.set(">") - but.config(menu=self.scaleStatsTwoCatCondPopup) - self.scaleStatsTwoCatValueString=Tkinter.StringVar() - ent=Tkinter.Entry(twocatFrame,textvariable=self.scaleStatsTwoCatValueString, - width=5,relief=Tkinter.SUNKEN) - self.scaleStatsTwoCatValueString.set("0.0") - ent.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - twocatFrame.pack(side=Tkinter.TOP,anchor=Tkinter.W) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.X,expand=0) - self.updateScaleStatsThreshold() - return - def updateScaleStatsTwoCatType(self): - self.ScaleStatsDisplay.set("TwoCat") - return - #================================================================== - # - def pickScaleStatsThreshold(self): - self.ScaleStatsDisplay.set("Percent Err <") - self.updateScaleStatsThreshold() - return - - def updateScaleStatsThreshold(self): - parm=self.ScaleStatsParm.get() - readParm=parm - last3="xxx" - if len(parm)>3: - last3=parm[-3:] - if ((last3=="Spd")or(last3=="Dir")): - readParm=parm[:-3] - thresholds=self.__VU.getVerThresholds(readParm) - if last3 in ("Spd","Dir"): - (threshmag,threshdir)=thresholds - if last3=="Spd": - thresholds=threshmag - else: - thresholds=threshdir - - dthresh=[] - for i in xrange(len(thresholds)): - t=thresholds[i] - str="%d"%t - dthresh.append(str) - # - # - self.thresholdScaleStatsPopup.entryconfigure(0,label=parm) - for i in xrange(len(dthresh)): - self.thresholdScaleStatsPopup.entryconfigure(i+1,label=dthresh[i]) - self.ScaleStatsThresholdValue.set(dthresh[self.ScaleStatsThreshold.get()]) - return - #================================================================= - # displayGroup - make a group of radio buttons with scale stuff - # - def displayGroup(self,master,labeltext,var,valuelist,defaultvalue,filltype,expandflag): - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - radioLabel=Tkinter.Label(radioFrame,text=labeltext) - radioLabel.pack(side=Tkinter.TOP,anchor=Tkinter.W) - for item in valuelist: - a=Tkinter.Radiobutton(radioFrame,text=item, - variable=var,value=item) - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - if item is defaultvalue: - var.set(item) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=filltype,expand=expandflag) - #================================================================= - # radioGroup - make a group of radio buttons - # - def radioGroup(self,master,labeltext,var,valuelist,defaultvalue,filltype, - expandflag,callback=None): - radioFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - radioLabel=Tkinter.Label(radioFrame,text=labeltext) - radioLabel.pack(side=Tkinter.TOP,anchor=Tkinter.W) - for item in valuelist: - a=Tkinter.Radiobutton(radioFrame,text=item, - variable=var,value=item,command=callback) - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - if item is defaultvalue: - var.set(item) - radioFrame.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=filltype,expand=expandflag) - #================================================================= - # checkGroup - make a group of check buttons - # - def checkGroup(self,master,labeltext,varlist,valuelist, - defaultvalues,filltype,expandflag,callback=None,maxRows=30): - checkFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - checkLabel=Tkinter.Label(checkFrame,text=labeltext) - checkLabel.pack(side=Tkinter.TOP,anchor=Tkinter.W) - cnt=0 - row=0 - col=0 - f=Tkinter.Frame(checkFrame,relief=Tkinter.FLAT,borderwidth=2) - f.pack(side=Tkinter.TOP,anchor=Tkinter.W) - if len(valuelist) > maxRows: - ncols = (len(valuelist) - 1)/maxRows + 1 - maxRows = (len(valuelist) - 1)/ncols + 1 - for item in valuelist: - a=Tkinter.Checkbutton(f,text=item,variable=varlist[cnt], - onvalue=item,offvalue="",command=callback) - if item in defaultvalues: - varlist[cnt].set(item) - - a.grid(row=row,column=col,sticky=Tkinter.NW) - print "Formatting row %d col %d for %s" % (row,col,item) - row=row+1 - if row == maxRows: - row = 0 - col =col + 1 - cnt=cnt+1 - checkFrame.pack(side=Tkinter.TOP,fill=filltype,expand=expandflag) - return varlist - #================================================================= - # getCheckList - get list of values turned on in the checkbutton list - # - def getCheckList(self,checklist): - outlist=[] - for i in xrange(len(checklist)): - a=checklist[i].get() - if a!="": - outlist.append(a) - return outlist - #================================================================= - # sListbox - make a listbox with a scrollbar - # - def sListbox(self,master,labeltext,itemlist,defaultItems, - maxwid,maxheight,smode,filltype=Tkinter.BOTH,expandflag=1): - sLabel=Tkinter.Label(master,text=labeltext) - sLabel.pack(side=Tkinter.TOP,anchor=Tkinter.W) - sb=Tkinter.Scrollbar(master,orient=Tkinter.VERTICAL) - slb=Tkinter.Listbox(master,yscrollcommand=sb.set, - selectmode=smode,width=maxwid,height=maxheight) - sb.config(command=slb.yview) - sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - slb.pack(side=Tkinter.LEFT,fill=filltype,expand=expandflag) - idx=0 - for item in itemlist: - slb.insert(Tkinter.END,item) - if item in defaultItems: - slb.select_set(idx) - idx+=1 - return slb - #================================================================= - # getListbox - get list of values turned on in the listbox - # - def getListbox(self,listbox): - outlist=[] - itemnums=listbox.curselection() - try: - itemnums=map(int,itemnums) - except ValueError: pass - for itemnum in itemnums: - outlist.append(listbox.get(itemnum)) - return outlist - #================================================================= - def setDatetype(self): - type=self.Datetype.get() - if type=="Period Length": - self.ByList.pack_forget() - self.ByPeriod.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.BOTH,expand=1) - else: - self.ByPeriod.pack_forget() - self.ByList.pack(side=Tkinter.TOP,anchor=Tkinter.NW,fill=Tkinter.BOTH,expand=1) - #================================================================== - # getRecentDates - gets a list of date strings from today through - # numdays in the past. Also returns list of - # unix times for the beginning of each date. - def getRecentDates(self,numdays): - recentDateStrings=[] - recentDates=[] - (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime() - midtoday=calendar.timegm((nyea,nmon,nday,0,0,0,0,0,0)) - for i in xrange(numdays): - daymid=midtoday-(i*DAYSECS) - (gyr,gmo,gdy,ghr,gmi,gse,gwd,gyd,gds)=time.gmtime(daymid) - recentDateStrings.append("%4.4d/%2.2d/%2.2d"%(gyr,gmo,gdy)) - recentDates.append(daymid) - return recentDateStrings,recentDates -# -# Special global routines used in Histogram callback stuff to -# move data on/off the screen -# -def showmodel(self,modname): - if self.showmod[modname]==1: - self.cd.canvas.move(modname,0,-self.cd.curheight) - self.cd.canvas.lower(modname) - self.showmod[modname]=0 - self.modb[modname].config(fg="grey") - else: - self.cd.canvas.move(modname,0,self.cd.curheight) - self.cd.canvas.lift(modname) - self.showmod[modname]=1 - self.modb[modname].config(fg=self.colornames[modname]) - return -#===================================================================== -# -# Toggle stuff with "but" tag in the but1 list -# -def showBut1(self,but): - if but.isdigit(): - newbut="f%s"%but - but=newbut - if self.but1state.get(but)==1: - self.cd.canvas.move(but,0,-self.cd.curheight) - self.cd.canvas.lower(but) - self.but1state[but]=0 - self.but1[but].config(fg="grey") - else: - self.cd.canvas.move(but,0,self.cd.curheight) - self.cd.canvas.lift(but) - self.but1state[but]=1 - self.but1[but].config(fg="black") -#===================================================================== -# -# Turn off all but1 tags except first button -# -def startBut1(self): - for but in self.but1names: - if but!=self.but1names[0]: - showBut1(self,but) -#===================================================================== -# -# Move toggled but1 buttons - one to the left -# -def prevBut1(self): - newbut=[] - for but in self.but1names: - if self.but1state.get(but)==1: - newbut.append(1) - else: - newbut.append(0) - temp=newbut[0] - del newbut[0] - newbut.append(temp) - for i in xrange(len(self.but1names)): - but=self.but1names[i] - now=self.but1state[but] - after=newbut[i] - if ((now==1)and(after==0)): - self.cd.canvas.move(but,0,-self.cd.curheight) - self.cd.canvas.lower(but) - self.but1state[but]=0 - self.but1[but].config(fg="grey") - elif ((now==0)and(after==1)): - self.cd.canvas.move(but,0,self.cd.curheight) - self.cd.canvas.lift(but) - self.but1state[but]=1 - self.but1[but].config(fg="black") -#===================================================================== -# -# Move toggled but1 buttons - one to the right -# -def nextBut1(self): - newbut=[] - for but in self.but1names: - if self.but1state.get(but)==1: - newbut.append(1) - else: - newbut.append(0) - temp=newbut.pop() - newbut[0:0]=[temp] - for i in xrange(len(self.but1names)): - but=self.but1names[i] - now=self.but1state[but] - after=newbut[i] - if ((now==1)and(after==0)): - self.cd.canvas.move(but,0,-self.cd.curheight) - self.cd.canvas.lower(but) - self.but1state[but]=0 - self.but1[but].config(fg="grey") - elif ((now==0)and(after==1)): - self.cd.canvas.move(but,0,self.cd.curheight) - self.cd.canvas.lift(but) - self.but1state[but]=1 - self.but1[but].config(fg="black") -#===================================================================== -# -# Toggle stuff with "but" tag in the but2 list -# -def showBut2(self,but): - if self.but2state.get(but)==1: - self.cd.canvas.move(but,-self.cd.curwidth,0) - self.cd.canvas.lower(but) - self.but2state[but]=0 - self.but2[but].config(fg="grey") - else: - self.cd.canvas.move(but,self.cd.curwidth,0) - self.cd.canvas.lift(but) - self.but2state[but]=1 - self.but2[but].config(fg=self.colornames[but]) -#===================================================================== -# -# Turn off all but2 tags except first button -# -def startBut2(self): - for but in self.but2names: - if but!=self.but2names[0]: - showBut2(self,but) -#===================================================================== -# -# Move toggled but2 buttons - one to the left -# -def prevBut2(self): - newbut=[] - for but in self.but2names: - if self.but2state.get(but)==1: - newbut.append(1) - else: - newbut.append(0) - temp=newbut[0] - del newbut[0] - newbut.append(temp) - for i in xrange(len(self.but2names)): - but=self.but2names[i] - now=self.but2state[but] - after=newbut[i] - if ((now==1)and(after==0)): - self.cd.canvas.move(but,-self.cd.curwidth,0) - self.cd.canvas.lower(but) - self.but2state[but]=0 - self.but2[but].config(fg="grey") - elif ((now==0)and(after==1)): - self.cd.canvas.move(but,self.cd.curwidth,0) - self.cd.canvas.lift(but) - self.but2state[but]=1 - self.but2[but].config(fg=self.colornames[but]) -#===================================================================== -# -# Move toggled but2 buttons - one to the right -# -def nextBut2(self): - newbut=[] - for but in self.but2names: - if self.but2state.get(but)==1: - newbut.append(1) - else: - newbut.append(0) - temp=newbut.pop() - newbut[0:0]=[temp] - for i in xrange(len(self.but2names)): - but=self.but2names[i] - now=self.but2state[but] - after=newbut[i] - if ((now==1)and(after==0)): - self.cd.canvas.move(but,-self.cd.curwidth,0) - self.cd.canvas.lower(but) - self.but2state[but]=0 - self.but2[but].config(fg="grey") - elif ((now==0)and(after==1)): - self.cd.canvas.move(but,self.cd.curwidth,0) - self.cd.canvas.lift(but) - self.but2state[but]=1 - self.but2[but].config(fg=self.colornames[but]) -# -# debug stuff for memory usage -# -_proc_status="/proc/%d/status"%os.getpid() -_scale={'kB':1024.0,'mB':1024.0*1024.0, - 'KB':1024.0,'MB':1024.0*1024.0} -def _VmB(VmKey): - try: - t=open(_proc_status) - v=t.read() - t.close() - except IOError: - return 0.0 - i=v.index(VmKey) - v=v[i:].split(None,3) - if len(v)<3: - return 0.0 - return float(v[1])*_scale[v[2]] -def memory(): - return _VmB('VmSize:') -def resident(): - return _VmB('VmRSS:') -# -# stuff to support a callback with a pre-known variable -# -def GenericCallback(callback, *firstArgs, **firstKWArgs): - if firstKWArgs: - return GC(callback, *firstArgs, **firstKWArgs) - else: - return GCNoKWArgs(callback, *firstArgs) -# -# Classes for callbacks -# -class GC: - def __init__(self,callback,*firstArgs, **firstKWArgs): - self.__callback=callback - self.__firstArgs=firstArgs - self.__firstKWArgs=firstKWArgs - def __call__(self, *lastArgs, **kwArgs): - if kwArgs: - netKWArgs=self.__firstKWArgs.copy() - netKWArgs.update(self.__kwArgs) - else: - netKWArgs=self.__firstKWArgs - return self.__callback (*(self.__firstArgs+lastArgs),**netKWArgs) -class GCNoKWArgs: - def __init__(self, callback, *firstArgs): - self.__callback=callback - self.__firstArgs=firstArgs - def __call__(self, *args, **kwArgs): - return self.__callback (*(self.__firstArgs+args),**kwArgs) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# BOIVerify - version 2.0.5 +# +# Main tool to calculate and display verification information. The main +# dialog contains tabs to display: +# (1) the archived grids (or errors) +# (2) grids of stats calculated for every gridpoint +# (3) histograms of the errors over the current edit area +# (4) line graphs of stats over pre-defined edit areas +# (5) graphs of stats vs. scale +# +# The BOIVerifySave tool saves the grid into the verification database +# The BOIVerifyAutoCalc tool calculates and saves the stats for pre-defined +# editareas. +# +# Author: Tim Barker - SOO BOI +# 2005/07/01 - Original Implmentation +# 2005/07/29 - version 0.1 - update to grid database structure +# 2006/11/06 - version 1.0 - First version with time-series graphs. Still +# lots of bugs and not what I would like for a version 1.0 but +# I've put it off way too long anyway. +# 2007/10/25 - version 2.0 +# . moved into a procedure rather than a tool +# . fixed problem with precision="0" for sites that do +# not have WG1 defined +# . fixed 'flashing' of user interface on startup +# . fixed so that clicking on 'stop' during long drawing +# of many stat grids will stop more quickly. +# . allowed program name in error messages to be something +# other than BOIVerify (could be GridVerify, etc.) +# . use labels of 'histogram' and 'scatterplot' rather than +# errors and values +# . use date label of 'ending on' rather than 'before' +# . added limits to forecast hours shown +# . added support for probabilistic parms +# . added support for handling common cases +# 2008/05/28 - version 2.0.5 +# . fixed problem with forced large range of line graphs +# for QPF bias, etc. In old code if graph range was less +# than 0.01, it was automatically forced to be 1.0. Now, +# it only forcibly expands the graph range when the actual +# range is less than 1/10 the precision of the parm, and +# even then it only expands the graph range upward by 2 +# times the parm precision. +# +# +# 2010/04/23 ryu Initial port to AWIPS II. Fixed bugs with the "Stat vs. Scale" tab. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Dec 02, 2014 RM #625 ryu Changed checkGroup() as suggested to display models +# in multi-columns when a single column is insufficient. +# Apr 16, 2015 17390 ryu Replacing string.atoi with int for string/integer to integer conversion +# (ListBox.curselection() now returns ints instead of strings.) +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# ---------------------------------------------------------------------------- +# + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Verify"] + +from numpy import * +from math import * +import tkinter +import TkDefaults +import AppDialog +import time,calendar,sys,re,traceback,string +import SmartScript +import BOIVerifyUtility +import os # for debug + +from com.raytheon.viz.gfe import GFEPreference +from java.lang import Float + +PROGNAME="BOIVerify" # you can change it if you dont like BOI. Shame on you! +COLORLIST=["blue","green","red","cyan","yellow","purple","orange", + "Gold","Coral","DarkOliveGreen","DarkOrchid","Brown","DeepPink", + "DodgerBlue","DarkSeaGreen"] +HOURSECS=60*60 +DAYSECS=24*HOURSECS + + +def getFloatPref(key, dflt): + if not GFEPreference.contains(key): + print("no config value for", key) + return dflt + + val = GFEPreference.getFloatPreference(key) + print("preference for %s:" % key, val) + return val + +def setFloatPref(key, value): + if value is not None: + value = Float.parseFloat(str(value)) + GFEPreference.setPreference(key, value) + + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + print("Start of %s - virtual memory:%d resident: %d"%(PROGNAME,memory(),resident())) + self._dbss=dbss + self.userName=self._dbss.getWsId().getUserName() + SmartScript.SmartScript.__init__(self, dbss) + self.statusBarMsg("Starting %s"%PROGNAME,"R") + self.PROGNAME=PROGNAME + self.HOURSECS=60*60 + self.DAYSECS=24*self.HOURSECS + self.COLORLIST=COLORLIST + self.__colorMapParams = {} + return + + def execute(self): + + print("starting execute routine with memory:%d resident: %d"%(memory(),resident())) + # + # See if a child window of GFE is named "%self.PROGNAME Options" + # If so...then program is already running and just make + # a dialog box telling them that... + # + #if alreadyRunning: + # self.statusBarMsg("%s is already running"%self.PROGNAME,"U") + # return + # + + try: + self.__setup() + + self.root.withdraw() + self.root.mainloop() + #except: + # traceback.print_exc() + finally: + try: + self.root.destroy() + except: + pass + print("Exiting...") + return + + def __setup(self): + tk = tkinter.Tk() + self.root = tk + sw = tk.winfo_screenwidth() + sh = tk.winfo_screenheight() + tk.geometry("%dx%d+0+0" % (sw,sh)) + + TkDefaults.setDefaults(tk) + # + # Splash screen... + # + splash=tkinter.Toplevel(None) + splash.overrideredirect(1) + f=tkinter.Frame(splash,relief=tkinter.RIDGE,borderwidth=2, + background="yellow") + txtwid=max(len(self.PROGNAME),10) + text="Starting up\n%s"%self.PROGNAME + lab=tkinter.Label(f,justify=tkinter.CENTER,text=text, + fg="black",bg="yellow",width=txtwid+2) + lab.pack(side=tkinter.TOP) + wTxt=tkinter.StringVar(f) + lab=tkinter.Label(f,justify=tkinter.CENTER,textvariable=wTxt, + fg="black",bg="yellow") + lab.pack(side=tkinter.TOP) + f.pack(side=tkinter.TOP,ipadx=50,ipady=10) + wTxt.set(". ") + splash.update_idletasks() + ww=splash.winfo_reqwidth() + wh=splash.winfo_reqheight() + sw=splash.winfo_screenwidth() + sh=splash.winfo_screenheight() + newgeom="%dx%d+%d+%d"%(ww,wh,int(float(sw-ww)/2.0),int(float(sh-wh)/2.0)) + splash.geometry(newgeom) + splash.wait_visibility() + splash.update_idletasks() + # + # Start up the utility + # + try: + # + # Start up the utility + # + self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) + print("after setting up VU: memory:%d resident:%d"%(memory(),resident())) + self.setToolType("numeric") + + # + # Setup scaleList. This contains tuples of (numpts,label) where + # numpts is the +/- points to average over, and label is a label + # description of that area. + # + self.scaleList=[] + spacing=self.VU.getGridSpacing() + nominalSpacing=self.VU.getCFG('NOMINALSPACING') + rspacing=int((float(spacing)/float(nominalSpacing))+0.5)*nominalSpacing + maxk=max(self.getGridShape()) + for k in range(maxk): + curTxt=wTxt.get() + last=curTxt[-1] + rest=curTxt[:-1] + newTxt=last+rest + wTxt.set(newTxt) + splash.update_idletasks() + if k>0: + scale=k*2.0*rspacing + else: + scale=rspacing + iscale=int(scale+0.5) + if ((scale>50)and(scale<100)and(iscale%10!=0)): + continue + if ((scale>=100)and(scale<200)and(iscale%25!=0)): + continue + if ((scale>=200)and(scale<500)and(iscale%50!=0)): + continue + if ((scale>=500)and(iscale%100!=0)): + continue + rdig=0 + for digits in range(2): + mult=10**digits + iscale=int(scale*mult) + rscale=int((scale*mult)+0.5) + if iscale==rscale: + rdig=digits + break + rscale=round(scale,rdig) + + if rdig==0: + lab="%d-km"%int(rscale) + else: + fmt="%%.%df-km"%rdig + lab=fmt%rscale + self.scaleList.append((k,lab)) + # + # Setup the self.pts with number of points in named edit areas + # + self.VU.logMsg("Starting points generation memory:%d resident:%d"%(memory(),resident())) + maxareas=self.VU.CFG['STATAREAS'] + editAreaNames=self.VU.listEditAreas() + self.pts=ones(maxareas,) + for i in range(len(editAreaNames)): + # + curTxt=wTxt.get() + last=curTxt[-1] + rest=curTxt[:-1] + newTxt=last+rest + wTxt.set(newTxt) + splash.update_idletasks() + # + areaname=editAreaNames[i] + self.VU.logMsg(" %3d memory:%d resident:%d"%(i,memory(),resident())) + npts=self.getPts(areaname) + self.VU.logMsg(" %3d after npts memory:%d resident:%d"%(i,memory(),resident())) + j=self.VU.getEditAreaNumberFromName(areaname) + self.pts[j]=npts + self.VU.logMsg(" %3d after pts memory:%d resident:%d"%(i,memory(),resident())) + self.VU.logMsg(" %3d after del ea memory:%d resident:%d"%(i,memory(),resident())) + if self.pts[j]<1: + self.pts[j]=1 + self.VU.logMsg(" after edit area %3d memory:%d resident:%d"%(i,memory(),resident())) + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + # Create all the potential dialogs + # + # + self.VU.logMsg("Starting dialog generation memory:%d resident:%d"%(memory(),resident())) + try: + self.mini=MiniDiag(tk,callbackMethod=self.expandMini, + title="Change",buttonText="%s Options"%self.PROGNAME,loc="lr") + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + try: + self.cases=Cases(tk,callbackMethod=self.closeCases) + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + try: + self.miniCases=MiniDiag(tk, callbackMethod=self.expandCases, + title="Display",buttonText="Number of Cases",loc="ur") + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + try: + self.cd=CanvasDisplay(tk, title="Canvas",callbackMethod=self.closeCD) + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + try: + self.dialog=Verif(self.VU,self.userName,self.scaleList, tk, callbackMethod=self.doVerif) + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + # This one last...so it is always on top during 'working' periods + # + try: + self.working=Working(self.dialog,callbackMethod=self.tryToStop) + except: + splash.destroy() + self.statusBarMsg("%s could not start up"%self.PROGNAME,"R") + raise Exception + # + # Destroy the 'starting' message box + # + splash.destroy() + del wTxt + self.statusBarMsg("%s is now running - memory:%d resident:%d"%(self.PROGNAME,memory(),resident()),"R") + + def getPts(self,areaname): + if areaname=="NONE": + ea=self.newGrid(True, bool) + else: + ea=self.encodeEditArea(areaname) + eb=ravel(ea) + num=add.reduce(eb) + pts=int(num) + del ea + del eb + del num + return pts + #================================================================== + # + # Routines for the 'working' dialog. + # + # When the stop button is pressed - tryToStop is called and the stop + # variable is set to 1. + # startWorking sets the stop variable to 0 and sets the label, then + # reveals the working dialog and withdraws the main dialog (unless + # overridden) + # setWorking just updates labels in the working dialog + # checkWorking updates the dialog, and returns the stop variable status + # stopWorking withdraws the working dialog and raises the main dialog + # finishWorking withdraws the working dialog and raises the mini dialog + # + def tryToStop(self): + self.working.stop.set(1) + print("tryToStop was called - should stop soon") + return + def startWorking(self,textString,optionRemove=1): + if optionRemove==1: + self.dialog.withdraw() + self.setWorking(textString) + self.working.stop.set(0) + self.working.deiconify() + self.working.lift() + return + def setWorking(self,textString): + self.working.label.set(textString) + self.working.update() + return + def checkWorking(self): + self.working.update() + return self.working.stop.get() + def setAndCheckWorking(self,textString): + self.working.label.set(textString) + self.working.update() + return self.working.stop.get() + def stopWorking(self): + self.working.withdraw() + self.dialog.deiconify() + self.dialog.lift() + self.dialog.update_idletasks() + return + def finishWorking(self): + self.working.withdraw() + self.working.stop.set(0) + self.mini.deiconify() + self.mini.lift() + self.mini.update_idletasks() + return + #================================================================== + # + # Routines for the 'mini' dialog. + # + # When the button is pressed (or X clicked) - expandMini is called + # + def expandMini(self): + self.mini.withdraw() + self.dialog.deiconify() + self.dialog.lift() + return + # + # Hide the main dialog, and reveal the mini-dialog. + # + def hideDialog(self): + self.dialog.withdraw() + self.mini.deiconify() + self.mini.lift() + self.mini.wait_visibility() + self.mini.update_idletasks() + return + #================================================================== + # + # Routines for the cases dialog, and it's mini dialog. + # + def expandCases(self): + geo1=self.miniCases.geometry() + (wh,of)=geo1.split("+",1) + (wid1,hgt1)=wh.split("x",1) + (ofx1,ofy1)=of.split("+",1) + self.miniCases.withdraw() + self.cases.deiconify() + self.cases.lift() + geo2=self.cases.geometry() + (wh,of)=geo2.split("+",1) + (wid2,hgt2)=wh.split("x",1) + (ofx2,ofy2)=of.split("+",1) + newgeo="%s+%d+%d"%(wh,int(ofx1)+int(wid1)-int(wid2),int(ofy1)) + self.cases.geometry(newgeo) + return + def closeCases(self): + self.cases.withdraw() + self.cases.update_idletasks() + self.miniCases.deiconify() + self.miniCases.lift() + self.miniCases.update_idletasks() + return + #================================================================== + # + # Routines for the 'canvas' dialog + # + def closeCD(self): + self.cd.withdraw() + return + #================================================================== + # doVerif - + # This is the routine that really does the verification calculations + # It is called when the user clicks on "Run" "Hide" or "Cancel" + # in the verification dialog. The routine is called with the button + # type of "Run" (do NOT dismiss dialog), "OK" (DISMISS dialog when done) + # or "Quit". The actual removal of the dialog is handled by the + # dialog routines themselves...so all you have to do is return + # right away if the user hit cancel, or do the calculations if they + # hit anything else. + # + def doVerif(self,buttonType): + if buttonType=="Quit": + self.root.quit() + del self.pts + self.statusBarMsg("%s is finished with memory:%d resident:%s"%(self.PROGNAME,memory(),resident()),"R") + return + if buttonType=="Hide": + self.hideDialog() + return + # + # When doing calculations - make sure the cases windows + # are closed + # + self.cases.withdraw() + self.miniCases.withdraw() + # + # Do calculations + # + try: + dict=self.dialog.getValues() + tab=dict["tab"] + if tab=="Grid Displays": + self.ShowGrids(dict) + if tab=="Grid Stats": + self.ShowGridsStats(dict) + if tab=="Distributions": + self.ShowDists(dict) + if tab=="Point/Area Stats": + self.ShowStats(dict) + if tab=="Stat vs. Scale": + self.ShowScaleStats(dict) + # + # If something goes wrong during calculations - close everything + # and raise the exception + # + except: + (exctype,excvalue,trace)=sys.exc_info() + traceStrings=traceback.format_exception(exctype,excvalue,trace) + fullstring=string.join(traceStrings) + self.statusBarMsg("Error in %s:\n%s"%(self.PROGNAME,fullstring),"S") + self.root.quit() + return + #================================================================== + # showGrids - read and display the archived forecast/observed grids + # + def ShowGrids(self,DialogDict): + self.VU.logMsg("running ShowGrids:",0) + parmList=DialogDict["Parm"] + display=DialogDict["Display"] + groupBy=DialogDict["Group"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Model"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + dateStyle=DialogDict["dateStyle"] + scale=DialogDict["scale"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + # + # Check for good GUI input + # + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + self.startWorking("Working on Grid Display") + # + # + # + numdisplayed=0 + ret=self.setupGM(parmList,modelList) + if ret==1: + self.stopWorking() + return + errpat=re.compile("^(.*?)(spd|dir)Err") + totaliters=len(parmList)*len(modelList) + iter=0 + self.VU.logMsg("going into parmList loop",10) + for parm in parmList: + self.VU.logMsg("in ShowGrids working on %s"%parm,5) + datatype=self.VU.getVerParmType(parm) + verType=self.VU.getVerType(parm) + errColor=self.VU.getVerErrColor(parm) + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + logkey="%s_LogFactor"%parm + logfactor = getFloatPref(logkey, -1) + # + # get info on what that verifies the current parm + # + obsParm=self.VU.getObsParm(parm) + (obsParmUnits,obsParmPrecision,obsParmMinval,obsParmMaxval,obsParmRateFlag,obsParmColorTable, + obsParmDisplayMinval,obsParmDisplayMaxval)=self.getParmInfo(obsmodel,obsParm) + logkey="%s_LogFactor"%obsParm + obslogfactor = getFloatPref(logkey, -1) + + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models - but don't require + # observations...since we might want to display just the + # forecasts in the future without a verifying observation + # + caseInfo=self.VU.getCases(parm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay, + numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + requireObs=0,commonCases=commonCases, + basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # + # + parmnames=[] + obsnames=[] + for model in modelList: + iter+=1 + if (totaliters>1): + firstString="Getting (%d of %d) %s %s grids"%(iter,totaliters,model,parm) + else: + firstString="Getting %s %s grids"%(model,parm) + self.setWorking(firstString) + + tomorrow=time.time()+self.DAYSECS + if self.setAndCheckWorking("%s:"%firstString)==1: + self.stopWorking() + return + count=0 + okeys=[] + fcstGridMode=self.getReadMode(model,parm) + cases=caseInfo[model] + casekeys=list(cases.keys()) + casekeys.sort() + totalcount=len(casekeys) + self.VU.logMsg("%d cases for %s"%(len(casekeys),model),1) + for key in casekeys: + count+=1 + if self.setAndCheckWorking("%s: %d of %d"%(firstString,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + tr=self.VU.getVerTimeRange(stime, etime) + # + # If there is an observed grid - read it and display it + # + obsavailable=0 + if len(orecList)>0: + okey="%s,%s"%(stimestr,etimestr) + obsname="%s0bs"%(parm) # zero instead of O so that is sorts before others + if accumFreq0: + obsdata=self.VU.smoothpm(obsdata,scale) + if okey not in okeys: + obsclip=clip(obsdata,parmMinval,parmMaxval) + self.createGrid("Ver",obsname,"SCALAR",obsclip,tr,"Observed", + None,obsParmPrecision,obsParmMinval,obsParmMaxval, + obsParmUnits) + okeys.append(okey) + numdisplayed+=1 + else: + (obsmag,obsdirec)=obsdata + if scale>0: + (u,v)=self.MagDirToUV(obsmag,obsdirec) + u=self.VU.smoothpm(u,scale) + v=self.VU.smoothpm(v,scale) + (obsmag,obsdirec)=self.UVToMagDir(u,v) + obsdata=(obsmag,obsdirec) + if okey not in okeys: + obsmag=clip(obsmag,parmMinval,parmMaxval) + obsdirec=clip(obsdirec,0.0,360.0) + obsclip=(obsmag,obsdirec) + self.createGrid("Ver",obsname,"VECTOR",obsclip,tr,"Observed", + None,obsParmPrecision,obsParmMinval,obsParmMaxval, + obsParmUnits) + okeys.append(okey) + numdisplayed+=1 + if obsname not in obsnames: + obsnames.append(obsname) + # + # Make forecast grid + # + fcstdata=self.VU.getVerGrids(model,basetime,parm, + stime,etime,mode=fcstGridMode, + recList=frecList) + if fcstdata is not None: + if datatype!=1: + if scale>0: + fcstdata=self.VU.smoothpm(fcstdata,scale) + else: + (fcstmag,fcstdirec)=fcstdata + if scale>0: + (u,v)=self.MagDirToUV(fcstmag,fcstdirec) + u=self.VU.smoothpm(u,scale) + v=self.VU.smoothpm(v,scale) + (fcstmag,fcstdirec)=self.UVToMagDir(u,v) + fcstdata=(fcstmag,fcstdirec) + # + # part of name based on grouping method - and model name + # + if groupBy=="Run Time": + basetuple=time.gmtime(basetime) + runTime="%4.4d%2.2d%2.2d%2.2d"%(basetuple[0],basetuple[1],basetuple[2],basetuple[3]) + runHours=self.VU.getFcstHour(basetime,tomorrow) + run=(runHours/6)+1 + runname="run%3.3dfrom%s"%(run,runTime[6:10]) + else: + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + self.VU.logMsg("%d-hour forecasts not shown"%fhr,1) + continue + runname="f%3.3dHr"%(fhr) + if model!="Official": + runname+=model + # + # calculate errors (if needed) and clip to twice the 'bigerr' range + # + if display=="Errors": + if obsavailable==1: + ep=max(self.errPrecision,parmPrecision) + if datatype!=1: + parmname=parm+"Err"+runname + if accumFreq-1: + keyname="%s_LogFactor"%obsname + setFloatPref(keyname, obslogfactor) + parmOb=self.getParm("Ver",obsname,"SFC") + self.setColorTableAndRange(parmOb,obsParmColorTable,obsParmDisplayMinval,obsParmDisplayMaxval) + for parmname in parmnames: + count+=1 + self.setWorking("Setting colorcurves: %d of %d"%(count,totalcount)) + parmOb=self.getParm("Ver",parmname,"SFC") + if display=="Errors": + if datatype!=1: + self.setColorTableAndRange(parmOb,errColor,-bigerr,bigerr) + else: + (errColorMag,errColorDir)=errColor + matchObj=errpat.search(parmname) + if matchObj is not None: + type=matchObj.group(2) + if type=="spd": + self.setColorTableAndRange(parmOb,errColorMag,-bigerrmag,bigerrmag) + else: + self.setColorTableAndRange(parmOb,errColorDir,-bigerrdir,bigerrdir) + else: + self.setColorTableAndRange(parmOb,parmColorTable,parmDisplayMinval,parmDisplayMaxval) + else: + if logfactor>=-1: + keyname="%s_LogFactor"%parmname + setFloatPref(keyname, logfactor) + self.setColorTableAndRange(parmOb,parmColorTable,parmDisplayMinval,parmDisplayMaxval) + # + if numdisplayed==0: + self.stopWorking() + self.statusBarMsg("No grids match your selected models/times/parms","U") + return + self.finishWorking() + return + + + #================================================================== + # setColorTableAndRange - Set the color table + # + # + def setColorTableAndRange(self,parm,colorTable,displayMinval,displayMaxval): + spatialMgr = self._dbss.getSpatialDisplayManager() + if displayMinval or displayMaxval or colorTable: + rsc = spatialMgr.getResourcePair(parm).getResource() + from com.raytheon.uf.viz.core.rsc.capabilities import ColorMapCapability + params = rsc.getCapability(ColorMapCapability).getColorMapParameters() + + if colorTable: + if colorTable in self.__colorMapParams: + colorMap = self.__colorMapParams[colorTable].getColorMap() + else: + from com.raytheon.uf.viz.core.drawables import ColorMapLoader + if "GFE/" not in colorTable: + colorTable = "GFE/" + colorTable + colorMap = ColorMapLoader.loadColorMap(colorTable) + elemType = str(parm.getGridInfo().getGridType()) + if ('DISCRETE' == elemType): + from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil + DiscreteDisplayUtil.deleteParmColorMap(parm) + params.setColorMap(colorMap) + params.setColorMapName(colorTable) + logfactor = getFloatPref(parm.getParmID().getParmName()+"_LogFactor", None) + if logfactor is not None: + params.setLogFactor(logfactor) + rsc.issueRefresh() + + if displayMinval or displayMaxval: + if (displayMinval != displayMaxval): + params.setColorMapMax(float(displayMaxval)) + params.setColorMapMin(float(displayMinval)) + + parm.getListeners().fireColorTableModified(parm) + + return + + #================================================================== + # showGridsStats - display grid statistics + # + # + def ShowGridsStats(self,DialogDict): + self.VU.logMsg("running ShowGridsStats:") + parmList=[] + parm=DialogDict["Parm"] + parmList.append(parm) + display=DialogDict["Display"] + threshold=DialogDict["Threshold"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Models"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + scale=DialogDict["scale"] + dateStyle=DialogDict["dateStyle"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + TwoCatType=DialogDict["TwoCatType"] + TwoCatCond=DialogDict["TwoCatCond"] + TwoCatValue=DialogDict["TwoCatValue"] + TwoCatValueString=DialogDict["TwoCatValueString"] + # + # Check for good GUI input + # + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + # + # If a TwoCat stat - check to see that TwoCatType is OK + # and setup statID + # + if display=="TwoCat": + statName=TwoCatType + statCond=TwoCatCond + statVal=TwoCatValue + statID=self.VU.getStatID(statName) + if statID is None: + self.statusBarMsg("Invalid Statistic Name","U") + return + else: + statID="xxxx" + # + # + # + self.startWorking("Working on Grid Stats") + ret=self.setupGM(parmList,modelList) + if ret==1: + self.stopWorking() + return + casesInfo=[] + numdisplayed=0 + pctColor=self.VU.getCFG('PERCENT_COLOR') + # + # Loop over parm and model + # + totaliters=len(parmList)*len(modelList) + iter=0 + for parm in parmList: + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + errColor=self.VU.getVerErrColor(readParm) + bigerr=self.VU.getVerBigErr(readParm) + thresholds=self.VU.getVerThresholds(readParm) + if datatype==1: + (errColorMag,errColorDir)=errColor + (bigerrmag,bigerrdir)=bigerr + (threshmag,threshdir)=thresholds + if last3=="Dir": + errColor=errColorDir + bigerr=bigerrdir + thresholdValue=threshdir[threshold] + clipval=180 + else: #Spd or vector err magnitude + errColor=errColorMag + bigerr=bigerrmag + thresholdValue=threshmag[threshold] + clipval=bigerr*2 + else: + thresholdValue=thresholds[threshold] + clipval=bigerr*2 + # + # Get mode for reading obs grids + # + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models + # + caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + requireObs=1,commonCases=commonCases, + basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # + # + for model in modelList: + iter+=1 + if (totaliters>1): + firstString="Calculating (%d of %d) %s %s stats"%(iter,totaliters,model,parm) + else: + firstString="Calculating %s %s stats"%(model,parm) + # + # + # + fcstGridMode=self.getReadMode(model,readParm) + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(model,readParm) + # + # + # + parmnames=[] + gridsave={} + gridcount={} + hitssave={} + misssave={} + falrsave={} + cornsave={} + maxcases=0 + self.setWorking("%s:finding matches"%firstString) + # + # Get all the cases for this model + # + cases=caseInfo[model] + # + # Sort them by the start/end time, not the basetime + # + casekeys=list(cases.keys()) + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + totalcount=len(casekeys) + self.VU.logMsg("%d cases for %s"%(len(casekeys),model),1) + count=0 + lastobs="" + for key in casekeys: + count+=1 + self.VU.logMsg("%s : %s"%(model,key),10) + if self.setAndCheckWorking("%s: %d of %d"%(firstString,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + # + # Dont make stats for obs not yet complete + # + if etime>time.time(): # dont make stats for obs not yet complete + count+=1 + continue + # + # check to make sure it is a forecast + # string to store grid under depends on forecast and end hour + # + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + count+=1 + continue + # + # If a new and different obs time - read the obs data + # + obskey=key.split(",",1)[1] + if obskey!=lastobs: + obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, + stime,etime,obsGridMode, + orecList) + # + # Smooth observed grid... + # unless a TwoCat "areal" type + # and smooth vectors in U/V space... + # + if scale>0: + if ((display!="TwoCat")or(statID[0:1]!="a")): + if datatype==1: + (obsmag,obsdir)=obsdata + (u,v)=self.MagDirToUV(obsmag,obsdir) + us=self.VU.smoothpm(u,scale) + vs=self.VU.smoothpm(v,scale) + (obsmag,obsdir)=self.UVToMagDir(us,vs) + obsdata=(obsmag,obsdir) + else: + obsdata=self.VU.smoothpm(obsdata,scale) + # + # For probability types...calculate an obs grid of + # 1 or 0, based on whether the observed threshold + # is met. + # + if verType==1: + obsdata=self.getProbVerGrid(readParm,obsdata) + # + # Save the 'key' for this obs grid - so that we + # don't have to read and calculate it again every + # time...only when a new obs time is encountered + # + lastobs=obskey + # + # get parmname to save as...from cycle/fhr/ehr/model + # + ehr=self.VU.getFcstHour(basetime,etime) + basetuple=time.gmtime(basetime) + fcstcycle=basetuple[3] + parmname="%2.2d%3.3d%3.3d%s"%(fcstcycle,fhr,ehr,model) + # + # Read forecast grid + # + fcstdata=self.VU.getVerGrids(model,basetime,readParm, + stime,etime,fcstGridMode, + frecList) + # + # Smooth forecast grid... + # unless a TwoCat "areal" type + # and smooth vectors in U/V space... + # + if scale>0: + if ((display!="TwoCat")or(statID[0:1]!="a")): + if datatype==1: + (fcstmag,fcstdir)=fcstdata + (u,v)=self.MagDirToUV(fcstmag,fcstdir) + us=self.VU.smoothpm(u,scale) + vs=self.VU.smoothpm(v,scale) + (fcstmag,fcstdir)=self.UVToMagDir(us,vs) + fcstdata=(fcstmag,fcstdir) + else: + fcstdata=self.VU.smoothpm(fcstdata,scale) + # + # For TwoCat stats...calculate hits/misses/falsealarms/etc. + # + if display=="TwoCat": + # + # get the forecast/observed grids into fcstGrid/obsGrid + # Normally this is what is in fcstdata/obsdata - but for + # vectors...need to pick the right component and for + # probabilities - need to divide by 100. + # + if datatype==1: + if last3!="Dir": + fcstGrid=fcstdata[0] + obsGrid=obsdata[0] + else: + fcstGrid=fcstdata[1] + obsGrid=obsdata[1] + else: + if verType!=0: + fcstGrid=fcstdata/100.0 + else: + fcstGrid=fcstdata + obsGrid=obsdata + # + # Now get yes/no of forecast/observed occurrence + # + if statCond==">": + obsOccur=greater(obsGrid,statVal) + fcstOccur=greater(fcstGrid,statVal) + elif statCond==">=": + obsOccur=greater_equal(obsGrid,statVal) + fcstOccur=greater_equal(fcstGrid,statVal) + elif statCond=="<": + obsOccur=less(obsGrid,statVal) + fcstOccur=less(fcstGrid,statVal) + elif statCond=="<=": + obsOccur=less_equal(obsGrid,statVal) + fcstOccur=less_equal(fcstGrid,statVal) + # + # do neighborhood look here + # + if statID[0:1]=="a": + if scale>0: + obsOccur=self.VU.arealOccur(obsOccur,scale) + fcstOccur=self.VU.arealOccur(fcstOccur,scale) + # + # Make grids of hits, misses, false alarms, correct negatives + # + notFcst=logical_not(fcstOccur) + notObs=logical_not(obsOccur) + hitsgrid=logical_and(fcstOccur,obsOccur) + missgrid=logical_and(notFcst,obsOccur) + falrgrid=logical_and(fcstOccur,notObs) + corngrid=logical_and(notFcst,notObs) + # + # Make space to store these results - if first one + # + if parmname not in parmnames: + parmnames.append(parmname) + hitssave[parmname]=self.empty() + misssave[parmname]=self.empty() + falrsave[parmname]=self.empty() + cornsave[parmname]=self.empty() + # + # Add to the hits/miss/falr/corn values + # + hitssave[parmname]+=hitsgrid + misssave[parmname]+=missgrid + falrsave[parmname]+=falrgrid + cornsave[parmname]+=corngrid + # + # For non-TwoCat displays...calculate the errors + # + else: + if datatype!=1: + if verType==0: + errgrid=fcstdata-obsdata + else: + errgrid=(fcstdata/100.0)-obsdata + else: + last3=parm[-3:] + if (last3=="Spd"): + errgrid=fcstdata[0]-obsdata[0] + elif (last3=="Dir"): + errgrid=fcstdata[1]-obsdata[1] + errgrid=where(greater(errgrid,180.0),360.0-errgrid,errgrid) + errgrid=where(less(errgrid,-180.0),-(360.0+errgrid),errgrid) + else: + (fu,fv)=self.MagDirToUV(fcstdata[0],fcstdata[1]) + (ou,ov)=self.MagDirToUV(obsdata[0],obsdata[1]) + eu=fu-ou + ev=fv-ov + (errmag,errdir)=self.UVToMagDir(eu,ev) + errgrid=errmag + # + # change to different scores + # + if display=="Mean Abs Error": + errgrid=where(less(errgrid,0.0),-errgrid,errgrid) + if display in ["RMS Error","Mean Squared Error"]: + errgrid*=errgrid + if display=="Percent Err <": + errgrid=where(less(errgrid,0.0),-errgrid,errgrid) + errgrid=less(errgrid,thresholdValue) + # + # save list of unique parm names being created + # + if parmname not in parmnames: + parmnames.append(parmname) + gridsave[parmname]=self.empty() + gridcount[parmname]=0 + # + # if doing average errors, add errors to sums + # otherwise...display the grid + # + gridsave[parmname]+=errgrid + gridcount[parmname]+=1 + # + # Calculate the statistics grids for this parm/model + # and display them + # + self.VU.logMsg("Creating stat grids") + pnames=[] + totalcount=len(parmnames) + count=0 + for parmname in parmnames: + # + # If they want to stop - stop adding more grids + # but break out to set the color tables correctly + # + count+=1 + self.setWorking("%s:%d of %d"%(firstString,count,totalcount)) + if self.checkWorking()==1: + break + # + # Get timerange to save the final grid into + # + cyc=int(parmname[0:2]) + f1=int(parmname[2:5]) + f2=int(parmname[5:8]) + tr=self.createTimeRange(f1+cyc,f2+cyc,"Zulu") + # + # Make name that will be used in grid manager + # + mod=parmname[8:] + pname="%s%2.2dZ%s"%(parm,cyc,mod) + # + # For TwoCat stats + # + if display=="TwoCat": + hitsgrid=hitssave[parmname] + missgrid=misssave[parmname] + falrgrid=falrsave[parmname] + corngrid=cornsave[parmname] + statgrid=self.VU.getGridBinaryStat(statID,hitsgrid,missgrid, + falrgrid,corngrid) + # + # get case number - for table of cases + # + totgrid=hitsgrid+missgrid+falrgrid+corngrid + n=maximum.reduce(maximum.reduce(totgrid)) + maxcases=max(n,maxcases) + # + # Different stats have different limits + # + minlim=-1.0 + maxlim=1.0 + res=2 + if statID in ["hits","ahits","miss","amiss","fals","afals", + "corn","acorn"]: + minlim=0.0 + maxlim=float(n) + res=0 + elif statID in ["freqo","freqf","fc","afc","pod","apod","far","afar", + "pofd","apofd","ts","ats"]: + minlim=0.0 + maxlim=1.0 + # + # Ones that range from 0 to Infinity : clip at +5.0 + # + elif statID in ["freqbias","afreqbias","oddratio","aoddsratio"]: + minlim=0.0 + maxlim=5.0 + # + # Equitable Threat clips at -0.333 and 1.0 + # + elif statID in ["ets","aets"]: + minlim=-0.3333 + maxlim=1.0 + # + # Hansen Kuipers clips at -1.0 to 1.0 + # + elif statID in ["hk","ahk"]: + minlim=-1.0 + maxlim=1.0 + # + # Heidke ranges from -Infinity to 1, and clips at -5.0 + # + elif statID in ["hss","ahss"]: + minlim=-5.0 + maxlim=1.0 + # + # Clip the grid + # + newgrid=clip(statgrid,minlim,maxlim) + self.createGrid("Ver",pname,"SCALAR",newgrid,tr, + "Forecast",None,res,minlim,maxlim, + "units") + # + # For normal error displays + # + else: + # + # If there weren't any sums saved - dont make + # a grid for it + # + n=gridcount[parmname] + if n<1: + continue + # + # make newgrid the grid to show + # + newgrid=gridsave[parmname]/float(n) + if display=="RMS Error": + newgrid=newgrid**0.5 + if verType==1: + newgrid*=100.0 + # + # clip the newgrid based on the parm clipping value + # + newgrid=clip(newgrid,-clipval,clipval) + # + # Percent error grids always range from 0 to 100 + # + if display=="Percent Err <": + newgrid*=100.0 + newgrid=clip(newgrid,0.0,100.0) + self.createGrid("Ver",pname,"SCALAR",newgrid,tr, + "Forecast",None,0,0.0,100.0,"%") + # + # Others can have variable ranges. We clip the + # values at 2 times the 'bigerr' value + # + else: + ep=max(self.errPrecision,parmPrecision) + if datatype!=1: + bigerr=self.VU.getVerBigErr(parm) + clipval=bigerr*2.0 + self.createGrid("Ver",pname,"SCALAR",newgrid,tr, + "Forecast",None,ep,-clipval, + clipval,self.errUnits) + # + # Keep track of grids actually put in grid manager + # + if pname not in pnames: + pnames.append(pname) + numdisplayed+=1 + casesInfo.append("%-25s|%3.3d|%d"%(pname,f1,n)) + # + # Set the colorTables for each unique parm added. + # + self.VU.logMsg("Setting color tables",2) + for pname in pnames: + parmOb=self.getParm("Ver",pname,"SFC") + if display=="TwoCat": + if res==0: + self.setColorTableAndRange(parmOb,pctColor,0,maxcases) + else: + self.setColorTableAndRange(parmOb,pctColor,minlim,maxlim) + else: + if display=="Percent Err <": + self.setColorTableAndRange(parmOb,pctColor,0,100) + else: + self.setColorTableAndRange(parmOb,errColor,-bigerr,bigerr) + if self.checkWorking()==1: + self.stopWorking() + return + # + # + # + if numdisplayed==0: + self.stopWorking() + self.statusBarMsg("No grids match your selected models/times/parms","U") + return + # + self.finishWorking() + # + # Make text with case info + # + casesInfo.sort() + casesText="Number of Cases:\n" + lastmod="" + for info in casesInfo: + (modlong,fhr,num)=info.split("|") + mod=modlong.strip() + if mod!=lastmod: + casesText+="\n %s:\n"%mod + lastmod=mod + casesText+=" %3d-hr: %5d\n"%(int(fhr),int(num)) + # + # Make the case info pop up + # + self.cases.updateText(casesText) + self.miniCases.deiconify() + self.miniCases.lift() + self.miniCases.update_idletasks() + self.VU.logMsg("Done making stat grids") + return + #================================================================== + # getProbVerGrid - get grid for probability verification, based + # on the obsdata, and the condition/threshold for + # the specified parmName + # + def getProbVerGrid(self,parmName,obsdata): + outdata=obsdata*0 + obsCondition=self.VU.getObsCondition(parmName) + obsThreshold=self.VU.getObsThreshold(parmName) + if obsCondition==">": + outdata=greater(obsdata,obsThreshold) + elif obsCondition==">=": + outdata=greater_equal(obsdata,obsThreshold) + elif obsCondition=="<": + outdata=less(obsdata,obsThreshold) + elif obsCondition=="<=": + outdata=less_equal(obsdata,obsThreshold) + return outdata + #================================================================== + # showGridsDists - display histograms/scatterplots + # + # + def ShowDists(self,DialogDict): + self.VU.logMsg("running ShowDists:") + parmList=[] + parm=DialogDict["Parm"] + parmList.append(parm) + display=DialogDict["Display"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Models"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + dateStyle=DialogDict["dateStyle"] + scale=DialogDict["scale"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + # + # Check for good GUI input + # + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + # + # Do seperate processing for each type + # + if display=="Error Histogram": + self.errorHistogram(parmList,cycleList,modelList,obsmodel,fcstrList, + fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, + dateStyle,scale,commonCases,accumHours,accumFreq) + elif display=="Value Histogram": + self.valueHistogram(parmList,cycleList,modelList,obsmodel,fcstrList, + fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, + dateStyle,scale,commonCases,accumHours,accumFreq) + elif display=="Expected Value": + self.expectedValue(parmList,cycleList,modelList,obsmodel,fcstrList, + fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, + dateStyle,scale,commonCases,accumHours,accumFreq) + elif display=="Scatterplot": + self.scatterPlot(parmList,cycleList,modelList,obsmodel,fcstrList, + fhrStart,fhrEnd,dateType,numDays,fromDay,dayList, + dateStyle,scale,commonCases,accumHours,accumFreq) + return + #================================================================== + # errorHistogram - display error histogram + # + # + def errorHistogram(self,parmList,cycleList,modelList,obsmodel, + fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, + dayList,dateStyle,scale,commonCases,accumHours, + accumFreq): + # + # + # Clear display - setup title + # + parm=parmList[0] + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Error Histogram - %s"%parm) + # + # Start 'working' display + # + workStart="Working on error histogram" + self.startWorking(workStart,optionRemove=0) + # + # + # + NUMTBUTTONS=12 # normal number of time buttons on a row - configure + NUMMBUTTONS=6 # normal number of model buttons on a row - configure + # + # get the active EditArea into ea. If the active edit area is + # None - then assume they want to run it over the entire grid + # + editArea=self.getActiveEditArea() + editAreaMask=self.encodeEditArea(editArea) + npts=add.reduce(add.reduce(editAreaMask)) + if (npts==0): + editArea.invert() + ea=self.encodeEditArea(editArea) + eaflat=ravel(ea) + totalpoints=add.reduce(eaflat) + # + # make space for saving data + # + self.histograms={} # storage for histograms for each model/forecast hour + self.histoWorseLow={} + self.histoWorseHigh={} + self.numCases={} + self.errSums={} + self.errSumSquareds={} + self.errSumAbs={} + # + # + # + totaliters=len(modelList) + iter=0 + # + # For vectors...the parm to read might be different than + # the name of the parm + # + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + # + # Get information about the parm we are reading + # + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + # + # get binwidth and bigerr for parm...but for vectors its + # complicated by dir/mag/vecerr options + # + binwidth=self.VU.getVerBinWidth(readParm) + bigerr=self.VU.getVerBigErr(readParm) + if datatype==1: + (bwMag,bwDir)=binwidth + (beMag,beDir)=bigerr + if last3=="Dir": + binwidth=bwDir + bigerr=beDir + else: + binwidth=bwMag + bigerr=beMag + (binmin,binmax)=self.getBins(binwidth,bigerr) + self.histosetup(-bigerr,bigerr,binwidth) + + nbin=len(binmin) + nbins=reshape(arange(nbin),(nbin,1)) + abinmin=reshape(array(binmin),(nbin,1)) + abinmax=reshape(array(binmax),(nbin,1)) + # + # Get mode for reading obs grids + # + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models + # + caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay, + numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Loop over each model + # + for model in modelList: + iter+=1 + workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) + # + fcstGridMode=self.getReadMode(model,readParm) + # + # Get all the cases for this model + # + cases=caseInfo[model] + # + # Sort cases by the start time, not the basetime + # + casekeys=list(cases.keys()) + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + totalcount=len(casekeys) + self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) + count=0 + lastobs="" + for key in casekeys: + count+=1 + self.VU.logMsg("%s : %s"%(model,key),10) + if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + # + # Dont make stats for obs not yet complete + # + if etime>time.time(): + continue + # + # Dont include negative forecast hours + # + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + continue + # + # string to store grid under depends on model and forecast hour + # + savekey="%s-%3.3d"%(model,fhr) + # + # If a new and different obs time - read the obs data + # + obskey=key.split(",",1)[1] + if obskey!=lastobs: + self.VU.logMsg("new Obs grid",10) + obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, + stime,etime,mode=obsGridMode, + recList=orecList) + obsdata=self.scaleGrid(obsdata,scale,datatype) + # + # For probabilistic variables...calculate the + # observed 'yes/no' value + # + if verType==1: + obsdata=self.getProbVerGrid(readParm,obsdata) + # + # + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + obsgrid=obsdata[0] + elif last3=="Dir": + obsgrid=obsdata[1] + else: + obsgrid=obsdata + obsonly=compress(eaflat,ravel(obsgrid)) + else: + (u,v)=self.MagDirToUV(obsdata[0],obsdata[1]) + obsuonly=compress(eaflat,ravel(u)) + obsvonly=compress(eaflat,ravel(v)) + # + # save the last obskey that we have read so that + # we don't read it again many times + # + lastobs=obskey + # + # Read forecast grid + # + fcstdata=self.VU.getVerGrids(model,basetime,readParm, + stime,etime,mode=fcstGridMode, + recList=frecList) + fcstdata=self.scaleGrid(fcstdata,scale,datatype) + # + # Get the error, handling vector error, etc. + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + fcstgrid=fcstdata[0] + elif last3=="Dir": + fcstgrid=fcstdata[1] + else: + fcstgrid=fcstdata + fcstonly=compress(eaflat,ravel(fcstgrid)) + erronly=fcstonly-obsonly + if last3=="Dir": + erronly=where(greater(erronly,180.0),360.0-erronly,erronly) + erronly=where(less(erronly,-180.0),-(360.0+erronly),erronly) + else: + (fcstmag,fcstdir)=fcstdata + (u,v)=self.MagDirToUV(fcstmag,fcstdir) + uonly=compress(eaflat,ravel(u)) + vonly=compress(eaflat,ravel(v)) + uerr=uonly-obsuonly + verr=vonly-obsvonly + (mag,direc)=self.UVToMagDir(uerr,verr) + erronly=mag + # + # make histograms + # + (errCount,worseLow,worseHigh)=self.histo(erronly) + errSum=add.reduce(erronly) + errSumSquared=add.reduce(erronly*erronly) + errabs=abs(erronly) + errSumAb=add.reduce(errabs) + if savekey in self.histograms: + self.histograms[savekey]+=errCount + self.histoWorseLow[savekey]+=worseLow + self.histoWorseHigh[savekey]+=worseHigh + self.errSums[savekey]+=errSum + self.errSumSquareds[savekey]+=errSumSquared + self.errSumAbs[savekey]+=errSumAb + self.numCases[savekey]+=1 + else: + self.histograms[savekey]=errCount + self.histoWorseLow[savekey]=worseLow + self.histoWorseHigh[savekey]=worseHigh + self.errSums[savekey]=errSum + self.errSumSquareds[savekey]=errSumSquared + self.errSumAbs[savekey]=errSumAb + self.numCases[savekey]=1 + # + # Get all "model-fhr" keys we saved + # + fullkeys=list(self.histograms.keys()) + # + # if no data could be read - stop here + # + if len(fullkeys)<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # For buttons...get models/forecasthours actually in the data + # + fullkeys.sort() + fhrstrs=[] + modkeys=[] + for fullkey in fullkeys: + (mod,fhrstr)=fullkey.split("-") + if fhrstr not in fhrstrs: + fhrstrs.append(fhrstr) + if mod not in modkeys: + modkeys.append(mod) + # + # Change fhrstrs (sorted on 3-character 000-999) into + # smaller fhrkeys that are NOT all 3-characters wide + # + fhrstrs.sort() + fhrkeys=[] + for fhrstr in fhrstrs: + fhrkeys.append("%d"%int(fhrstr)) + # + # If an Official button is in there...make it first + # + modkeys.sort() + if "Official" in modkeys: + idx=modkeys.index("Official") + del modkeys[idx] + modkeys.insert(0,"Official") + # + # set colors for each model + # + self.colornames={} + index=0 + for mod in modkeys: + self.colornames[mod]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # Setup first row of buttons (forecast hours) + # + self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) + # + # Setup second row of buttons (models) + # + self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) + # + # find max number in any bin in any of the histograms + # + histkey1=list(self.histograms.keys())[0] + maxHist=zeros_like(self.histograms[histkey1]) + for histkey in list(self.histograms.keys()): + self.histograms[histkey]/=float(self.numCases[histkey]) + maxHist=maximum(maxHist,self.histograms[histkey]) + fullmax=maximum.reduce(maxHist) + # + # Find good tickmark interval for vertical axis and set the + # vertical range to be one tick mark above the fullmax (max + # number in any histogram) + # + numticks=10 + tickInterval=self.niceNumDec(fullmax/(numticks-1),1) + graphmax=(int(fullmax/tickInterval)+1)*tickInterval + # + # + # Setup graphing coordinates + # + minx=-bigerr + maxx=bigerr + maxscore=maxx/2.0 + left=self.cd.curwidth*(50.0/700.0) + right=self.cd.curwidth*(650.0/700.0) + bot=self.cd.curheight*(100.0/530.0) + top=self.cd.curheight*(480.0/530.0) + self.setgraph(minx,maxx,0.0,graphmax,left,right,bot,top) + self.histoaxes(graphmax,-bigerr,bigerr,binwidth,tickInterval) + # + # Draw each histogram + # + totalcount=len(list(self.histograms.keys())) + count=0 + for key in list(self.histograms.keys()): + count+=1 + if self.setAndCheckWorking("%s: drawing histogram %d of %d"%(workStart,count,totalcount))==1: + self.stopWorking() + return + tagbase=key.split("-") + mod=tagbase[0] + fhr=int(tagbase[1]) + fhrstr="f%d"%fhr + tagtuple=(mod,fhrstr) + flabel="%d-hr forecast"%fhr + self.labelLine(flabel,3,justify="right",tags=tagtuple) + + colorname=self.colornames[mod] + bins=self.histograms[key] + nbin=bins.shape[0] + for i in range(nbin): + y=bins[i] + x1=self.histomin+(i*self.histowidth) + x2=x1+self.histowidth + if y>0: + if i==0: + (sx1,sy1)=self.graphcoord(x1,0) + else: + (sx1,sy1)=self.graphcoord(x1,bins[i-1]) + (sx2,sy2)=self.graphcoord(x1,y) + (sx3,sy3)=self.graphcoord(x2,y) + if ((i+1)==nbin): + (sx4,sy4)=self.graphcoord(x2,0) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + elif bins[i+1]==0: + (sx4,sy4)=self.graphcoord(x2,0) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + else: + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) + self.but2state[mod]=1 + self.but1state[fhrstr]=1 + lowfcst=self.histoWorseLow[key] + highfcst=self.histoWorseHigh[key] + self.showWorse(lowfcst,highfcst,bigerr,15,colorname,tagtuple) + allpts=self.numCases[key]*totalpoints + avg=self.errSums[key]/allpts + mae=self.errSumAbs[key]/allpts + std=sqrt((self.errSumSquareds[key]/allpts)-(avg*avg)) + rms=sqrt(self.errSumSquareds[key]/allpts) + self.showAvg(avg,colorname,tagtuple) + modnum=modkeys.index(mod) + self.showScores(modnum,mod,self.numCases[key],avg,std,mae,rms,colorname,tagtuple) + score=100.0-(self.errSumSquareds[key]/allpts) + self.showScore(score,mod,colorname,tagtuple) + # + # Show first time/model + # + startBut1(self) + startBut2(self) + # + # Label top of graph + # + (x,y)=self.graphcoord(0,graphmax) + self.cd.canvas.create_text(x,y-5,text="Gridpoints per case",fill="black",anchor=tkinter.S) + # + # Labels + # + ul1="Histogram - %s"%parm + self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + # + # Bin width + # + if binwidth<1.0: + str="Bin width: %3.1f"%binwidth + else: + str="Bin width: %d"%binwidth + self.labelLine(str,3,justify="left") + # + # table labels + # + x=self.cd.curwidth*(80.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="Model",anchor=tkinter.E,fill="black") + x=self.cd.curwidth*(130.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="Cases",anchor=tkinter.E,fill="black") + x=self.cd.curwidth*(170.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="Avg",anchor=tkinter.E,fill="black") + x=self.cd.curwidth*(210.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="Std",anchor=tkinter.E,fill="black") + x=self.cd.curwidth*(250.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="MAE",anchor=tkinter.E,fill="black") + x=self.cd.curwidth*(290.0/700.0) + y=self.cd.curheight*(130.0/530.0) + self.cd.canvas.create_text(x,y,text="RMS",anchor=tkinter.E,fill="black") + # + # Color Bar + # + midx=self.cd.curwidth/2.0 + for i in range(0,256): + x=midx-128+i + y=50 + colorstr="#%02x%02x00"%(255-i,i) + self.cd.canvas.create_line(x,y-3,x,y+3,fill=colorstr) + self.cd.canvas.create_text(midx-128-5,50,text="Bad",anchor=tkinter.E) + self.cd.canvas.create_text(midx+128+5,50,text="Good",anchor=tkinter.W) + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + return + #================================================================== + # valueHistogram - display value histogram + # + # + def valueHistogram(self,parmList,cycleList,modelList,obsmodel, + fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, + dayList,dateStyle,scale,commonCases,accumHours, + accumFreq): + # + # Clear display - setup title + # + parm=parmList[0] + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Value Histogram - %s"%parm) + # + # + # + workStart="Working on value histogram" + self.startWorking(workStart,optionRemove=0) + # + # + # + NUMTBUTTONS=12 # normal number of time buttons on a row - configure + NUMMBUTTONS=6 # normal number of model buttons on a row - configure + # + # get the active EditArea into ea. If the active edit area is + # None - then assume they want to run it over the entire grid + # + editArea=self.getActiveEditArea() + editAreaMask=self.encodeEditArea(editArea) + npts=add.reduce(add.reduce(editAreaMask)) + if (npts==0): + editArea.invert() + ea=self.encodeEditArea(editArea) + eaflat=ravel(ea) + totalpoints=add.reduce(eaflat) + # + # make space for saving data + # + self.histograms={} # storage for histograms for each model/forecast hour + self.numCases={} + # + # Loop over parm and model + # + totaliters=len(modelList) + iter=0 + # + # For vectors...the parm to read might be different than + # the name of the parm + # + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + # + # Get information about the parm we are reading + # + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + if ((datatype==1)and(last3=="Dir")): + parmMinval=0 + parmMaxval=360 + # + # get binwidth and bigerr for parm...but for vectors its + # complicated by dir/mag/vecerr options + # + binwidth=self.VU.getVerBinWidth(readParm) + if datatype==1: + (bwMag,bwDir)=binwidth + if last3=="Dir": + binwidth=bwDir + else: + binwidth=bwMag + # + # Setup histogram binning routines + # + self.histosetup(parmMinval,parmMaxval,binwidth) + # + # Get mode for reading obs grids + # + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models + # + caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay, + numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Loop over each model + # + for model in modelList: + iter+=1 + workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) + # + fcstGridMode=self.getReadMode(model,readParm) + # + # Get all the cases for this model + # + cases=caseInfo[model] + # + # Sort cases by the start/end time, not the basetime + # + casekeys=list(cases.keys()) + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + totalcount=len(casekeys) + self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) + count=0 + lastobs="" + for key in casekeys: + count+=1 + self.VU.logMsg("%s : %s"%(model,key),10) + if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + # + # Dont make stats for obs not yet complete + # + if etime>time.time(): + continue + # + # Dont include negative forecast hours + # + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + continue + # + # string to store grid under depends on model and forecast hour + # + saveKey="%s-%3.3d"%(model,fhr) + # + # If a new and different obs time - read the obs data + # + obskey=key.split(",",1)[1] + if obskey!=lastobs: + self.VU.logMsg("new Obs grid",10) + obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, + stime,etime,mode=obsGridMode, + recList=orecList) + obsdata=self.scaleGrid(obsdata,scale,datatype) + # + # For probabilistic variables...calculate the + # observed 'yes/no' value + # + if verType==1: + obsdata=self.getProbVerGrid(readParm,obsdata)*100.0 + # + # cant do a value histogram of vector wind + # errors...so those get changed to windSpd + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + obsgrid=obsdata[0] + elif last3=="Dir": + obsgrid=obsdata[1] + else: + obsgrid=obsdata + obsonly=compress(eaflat,ravel(obsgrid)) + else: + obsgrid=obsdata[0] + obsonly=compress(eaflat,ravel(obsgrid)) + (obsCount,below,above)=self.histo(obsonly) + lastobs=obskey + # + # Add observed bin counts to counts for same model/fhr + # + obsSaveKey="%s-%3.3d"%(obsmodel,fhr) + if obsSaveKey in self.histograms: + self.histograms[obsSaveKey]+=obsCount + self.numCases[obsSaveKey]+=1 + else: + self.histograms[obsSaveKey]=obsCount + self.numCases[obsSaveKey]=1 + # + # Read forecast grid and calculate error grid + # + fcstdata=self.VU.getVerGrids(model,basetime,readParm, + stime,etime,mode=fcstGridMode, + recList=frecList) + fcstdata=self.scaleGrid(fcstdata,scale,datatype) + # + # Get the error, handling vector error, etc. + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + fcstgrid=fcstdata[0] + elif last3=="Dir": + fcstgrid=fcstdata[1] + else: + fcstgrid=fcstdata + fcstonly=compress(eaflat,ravel(fcstgrid)) + else: + fcstgrid=fcstdata[0] + fcstonly=compress(eaflat,ravel(fcstgrid)) + # + # bin the forecast values + # + (valCount,below,above)=self.histo(fcstonly) + # + # Add bin counts to counts for same model/fhr + # + if saveKey in self.histograms: + self.histograms[saveKey]+=valCount + self.numCases[saveKey]+=1 + else: + self.histograms[saveKey]=valCount + self.numCases[saveKey]=1 + # + # Get all the keys that will be displayed - we've been storing in + # different places for different things + # + fullkeys=list(self.histograms.keys()) + # + # if no data could be read - stop here + # + if len(fullkeys)<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # For buttons...get models/forecasthours actually in the data + # + fullkeys.sort() + fhrstrs=[] + modkeys=[] + for fullkey in fullkeys: + (mod,fhrstr)=fullkey.split("-") + if fhrstr not in fhrstrs: + fhrstrs.append(fhrstr) + if mod not in modkeys: + modkeys.append(mod) + # + # Change fhrstrs (sorted on 3-character 000-999) into + # smaller fhrkeys that are NOT all 3-characters wide + # + fhrstrs.sort() + fhrkeys=[] + for fhrstr in fhrstrs: + fhrkeys.append("%d"%int(fhrstr)) + # + # If an Official button is in there...make it first + # + modkeys.sort() + if "Official" in modkeys: + idx=modkeys.index("Official") + del modkeys[idx] + modkeys.insert(0,"Official") + # + # Put the observed one last + # + if obsmodel in modkeys: + idx=modkeys.index(obsmodel) + del modkeys[idx] + modkeys.append(obsmodel) + # + # set colors for each model + # + self.colornames={} + index=0 + for mod in modkeys: + self.colornames[mod]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # Setup first row of buttons (forecast hours) + # + self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) + # + # Setup second row of buttons (models) + # + self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) + # + # Get min/max of forecast/observed values that need to be shown + # + fullmin=999999.0 + fullmax=-999999.0 + self.setWorking("%s: getting max/min"%(workStart)) + tothisto=zeros((self.histonumbins,)) + maxvalue=zeros((self.histonumbins,)) + minvalue=zeros((self.histonumbins,))+9999999.0 + for key in list(self.histograms.keys()): + tothisto+=self.histograms[key] + nums=self.histograms[key]/float(self.numCases[key]) + maxvalue=maximum(maxvalue,nums) + minvalue=where(greater(nums,0.0),minimum(minvalue,nums),minvalue) + for i in range(self.histonumbins): + minval=self.histomin+(i*self.histowidth) + maxval=minval+self.histowidth + print("%3d %5.3f--%5.3f %d"%(i,minval,maxval,tothisto[i])) + if tothisto[i]>0: + fullmin=min(minval,fullmin) + fullmax=max(maxval,fullmax) + #print " fullmin:",fullmin + #print " fullmax:",fullmax + #fullmin=0.025 + #fullmax=2.025 + #print " fullmin:",fullmin + #print " fullmax:",fullmax + # + # If not many bins shown (i.e. nearly constant values)...add bins + # up and down until we get 15 bins - so our values are 'centered' in + # a reaonably wide graph + # + numbins=float(fullmax-fullmin-self.histowidth)/float(self.histowidth) + if numbins<15: + while numbins<15: + fullmax=minimum(fullmax+self.histowidth,parmMaxval+self.histohalf) + fullmin=maximum(fullmin-self.histowidth,parmMinval-self.histohalf) + numbins=float(fullmax-fullmin-self.histowidth)/float(self.histowidth) + if ((numbins<15)and(fullminparmMaxval)): + numbins=16 + #print " fullmin:",fullmin + #print " fullmax:",fullmax + # + # + # + numticks=25 + tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) + print("the tickInterval with 25 desired is:",tickInterval) + # + # Dont let tick interval be smaller than parm precision + # + mintick=10**(-parmPrecision) + tickInterval=max(tickInterval,mintick) + print("after checking against precision...tickInterval is:",tickInterval) + # + # Set the minimum graph one tick interval below the minimum...but + # not below the parm minimum value + # + graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval + graphmin=maximum(graphmin,parmMinval) + # + # Set the maximum graph one tick interval above the maximum...but + # not above the parm maximum value + # + graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval + graphmax=minimum(graphmax,parmMaxval) + print("so final x-coordinate graph from min/max:",graphmin,graphmax) + # + # Find the maximum Y value for the bins being displayed + # + maxnum=0 + minnum=999999 + for i in range(self.histonumbins): + minval=self.histomin+(i*self.histowidth) + maxval=minval+self.histowidth + if ((minval>=fullmin)and(maxval<=fullmax)): + testmax=maxvalue[i] + maxnum=max(maxnum,testmax) + testmin=minvalue[i] + minnum=min(minnum,testmin) + print("the maximum value to display is:",maxnum) + print("the minimum value to display is:",minnum) + vint=self.niceNumDec(maxnum/20,1) + print("the vertical tick interval: vint:",vint) + maxnum=(int(float(maxnum)/float(vint))+1)*vint + print("the maxnumber to graph is:",maxnum) + # + # + # + # + left=self.cd.curwidth*(175.0/700.0) + right=self.cd.curwidth*(525.0/700.0) + bot=self.cd.curheight*(130.0/530.0) + top=self.cd.curheight*(480.0/530.0) + if ((verType==1)or(parmRateFlag==1)): + logflag=1 + logmax=log(maxnum) + logmin=log(minnum) + print("old min/max=%f,%f"%(minnum,maxnum)) + print("new log range: %7.3f %7.3f"%(logmin,logmax)) + self.setgraph(graphmin,graphmax,logmin,logmax,left,right,bot,top) + self.logvalhaxes(graphmin,graphmax,tickInterval,logmin,logmax,parm) + else: + logflag=0 + self.setgraph(graphmin,graphmax,0,maxnum,left,right,bot,top) + self.valhaxes(graphmin,graphmax,tickInterval,maxnum,vint,parm) + + + ul1="Value Histogram - %s"%parm + self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + # + # Draw each histogram + # + totalcount=len(list(self.histograms.keys())) + count=0 + for key in list(self.histograms.keys()): + count+=1 + if self.setAndCheckWorking("%s: drawing histogram %d of %d"%(workStart,count,totalcount))==1: + self.stopWorking() + return + tagbase=key.split("-") + mod=tagbase[0] + fhr=int(tagbase[1]) + fhrstr="f%d"%fhr + tagtuple=(mod,fhrstr) + flabel="%d-hr forecast"%fhr + self.labelLine(flabel,3,justify="right",tags=tagtuple) + + colorname=self.colornames[mod] + bins=self.histograms[key]/float(self.numCases[key]) + nbin=bins.shape[0] + for i in range(nbin): + # + # get x-coords of bin, and ignore bins outside the range + # of x-coordinates that we are showing + # + x1=max(self.histomin+(i*self.histowidth),graphmin) + x2=min(self.histomin+((i+1)*self.histowidth),graphmax) + if x1fullmax: + continue + # + # Logarithmic y-values a little different + # + if logflag==1: + y=bins[i] + if y>0.0: + logy=log(y) + logy=min(logy,logmax) + if i==0: + (sx1,sy1)=self.graphcoord(x1,logmin) + else: + yold=bins[i-1] + if yold>0.0: + logyold=log(yold) + else: + logyold=logmin + (sx1,sy1)=self.graphcoord(x1,logyold) + (sx2,sy2)=self.graphcoord(x1,logy) + (sx3,sy3)=self.graphcoord(x2,logy) + if ((i+1)==nbin): + (sx4,sy4)=self.graphcoord(x2,logmin) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + elif bins[i+1]==0: + (sx4,sy4)=self.graphcoord(x2,logmin) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + else: + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) + # + # Normal graphing for non-logarithmic y-values + # + else: + y=bins[i] + if y>0: + y=min(y,maxnum) + if i==0: + (sx1,sy1)=self.graphcoord(x1,0) + else: + yold=min(bins[i-1],maxnum) + (sx1,sy1)=self.graphcoord(x1,yold) + (sx2,sy2)=self.graphcoord(x1,y) + (sx3,sy3)=self.graphcoord(x2,y) + if ((i+1)==nbin): + (sx4,sy4)=self.graphcoord(x2,0) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + elif bins[i+1]==0: + (sx4,sy4)=self.graphcoord(x2,0) + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,sx4,sy4,fill=colorname,tags=tagtuple) + else: + self.cd.canvas.create_line(sx1,sy1,sx2,sy2,sx3,sy3,fill=colorname,tags=tagtuple) + self.but2state[mod]=1 + self.but1state[fhrstr]=1 + + + startBut1(self) + startBut2(self) + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + return + #================================================================== + # expectedValue - display expected value for forecast values + # + # + def expectedValue(self,parmList,cycleList,modelList,obsmodel, + fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, + dayList,dateStyle,scale,commonCases,accumHours, + accumFreq): + # + # Clear display - setup title + # + parm=parmList[0] + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Expected Value Distribution - %s"%parm) + # + # + # + workStart="Working on Expected Value Distribution" + self.startWorking(workStart,optionRemove=0) + # + # + # + NUMTBUTTONS=12 # normal number of time buttons on a row - configure + NUMMBUTTONS=6 # normal number of model buttons on a row - configure + # + # get the active EditArea into ea. If the active edit area is + # None - then assume they want to run it over the entire grid + # + editArea=self.getActiveEditArea() + editAreaMask=self.encodeEditArea(editArea) + npts=add.reduce(add.reduce(editAreaMask)) + if (npts==0): + editArea.invert() + editAreaMask=self.encodeEditArea(editArea) + eaflat=ravel(editAreaMask) + totalpoints=add.reduce(eaflat) + # + # make space for saving data + # + self.flists={} # storage for fcst values for each model/forecast hour + self.olists={} # storage for obs values for each model/forecast hour + fullmin=999999.0 + fullmax=-999999.0 + # + # Loop over parm and model + # + totaliters=len(modelList) + iter=0 + # + # For vectors...the parm to read might be different than + # the name of the parm + # + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + # + # Get information about the parm we are reading + # + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + if ((datatype==1)and(last3=="Dir")): + parmMinval=0 + parmMaxval=360 + # + # get binwidth and bigerr for parm...but for vectors its + # complicated by dir/mag/vecerr options + # + binwidth=self.VU.getVerBinWidth(readParm) + if datatype==1: + (bwMag,bwDir)=binwidth + if last3=="Dir": + binwidth=bwDir + else: + binwidth=bwMag + # + # Setup histogram binning routines + # + self.histosetup(parmMinval,parmMaxval,binwidth) + # + # Get mode for reading obs grids + # + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models + # + caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay, + numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Loop over each model + # + for model in modelList: + iter+=1 + workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) + # + fcstGridMode=self.getReadMode(model,readParm) + # + # Get all the cases for this model + # + cases=caseInfo[model] + # + # Sort cases by the start/end time, not the basetime + # + casekeys=list(cases.keys()) + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + totalcount=len(casekeys) + self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) + count=0 + lastobs="" + for key in casekeys: + count+=1 + #self.VU.logMsg("%s : %s memory:%d resident:%d"%(model,key,memory(),resident())) + if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + # + # Dont make stats for obs not yet complete + # + if etime>time.time(): + continue + # + # Dont include negative forecast hours + # + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + continue + # + # string to store grid under depends on model and forecast hour + # + saveKey="%s-%3.3d"%(model,fhr) + # + # If a new and different obs time - read the obs data + # + obskey=key.split(",",1)[1] + #self.VU.logMsg("before getObs: %d %d"%(memory(),resident())) + + obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, + stime,etime,mode=obsGridMode, + recList=orecList) + #obsdata1=copy.copy(obsdata) + #del obsdata + obsdata1=obsdata + obsdata1=self.scaleGrid(obsdata1,scale,datatype) + #self.VU.logMsg("after scaling: %d %d"%(memory(),resident())) + # + # For probabilistic variables...calculate the + # observed 'yes/no' value + # + if verType==1: + obsdata1=self.getProbVerGrid(readParm,obsdata1)*100.0 + #self.VU.logMsg("after probing: %d %d"%(memory(),resident())) + # + # cant do a value histogram of vector wind + # errors...so those get changed to windSpd + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + obsgrid=obsdata1[0] + elif last3=="Dir": + obsgrid=obsdata1[1] + else: + obsgrid=obsdata1 + else: + obsgrid=obsdata1[0] + obsonly=compress(eaflat,ravel(obsgrid)) + obsList=list(obsonly) + del obsonly + del obsgrid + del obsdata1 + #self.VU.logMsg("down to obsList: %d %d"%(memory(),resident())) + minObs=min(obsList) + maxObs=max(obsList) + fullmin=min(minObs,fullmin) + fullmax=max(maxObs,fullmax) + if saveKey in self.olists: + self.olists[saveKey].extend(obsList) + self.VU.logMsg("extending") + else: + self.olists[saveKey]=[] + self.olists[saveKey].extend(obsList) + self.VU.logMsg("new key") + #self.VU.logMsg("after adding: %d %d"%(memory(),resident())) + del obsList + #self.VU.logMsg("del of obsList: %d %d"%(memory(),resident())) + # + # Read forecast grid and calculate error grid + # + #self.VU.logMsg("before getGrids: %d %d"%(memory(),resident())) + fcstdata=self.VU.getVerGrids(model,basetime,readParm, + stime,etime,mode=fcstGridMode, + recList=frecList) + #self.VU.logMsg("after getGrids: %d %d"%(memory(),resident())) + #fcstdata1=copy.copy(fcstdata) + #self.VU.logMsg("after copy: %d %d"%(memory(),resident())) + #del fcstdata + #self.VU.logMsg("after del: %d %d"%(memory(),resident())) + fcstdata1=fcstdata + fcstdata1=self.scaleGrid(fcstdata1,scale,datatype) + # + # Get the error, handling vector error, etc. + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + fcstgrid=fcstdata1[0] + elif last3=="Dir": + fcstgrid=fcstdata1[1] + else: + fcstgrid=fcstdata1 + else: + fcstgrid=fcstdata1[0] + fcstonly=compress(eaflat,ravel(fcstgrid)) + self.VU.logMsg("pts to save:%s"%fcstonly.shape) + del fcstgrid + del fcstdata1 + fcstList=list(fcstonly) + del fcstonly + #self.VU.logMsg("after fcstList: %d %d"%(memory(),resident())) + minFcst=min(fcstList) + maxFcst=max(fcstList) + fullmin=min(minFcst,fullmin) + fullmax=max(maxFcst,fullmax) + #self.VU.logMsg("after maxmin : %d %d"%(memory(),resident())) + # + # Add values forecast lists for same model/fhr + # + # + if saveKey in self.flists: + self.flists[saveKey].extend(fcstList) + self.VU.logMsg("extending") + else: + self.flists[saveKey]=[] + self.flists[saveKey].extend(fcstList) + self.VU.logMsg("new key") + #self.VU.logMsg("after saving: %d %d"%(memory(),resident())) + del fcstList + #self.VU.logMsg("after del fList: %d %d"%(memory(),resident())) + # + # Get all the keys that will be displayed + # + fullkeys=list(self.flists.keys()) + # + # if no data could be read - stop here + # + if len(fullkeys)<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # For buttons...get models/forecasthours actually in the data + # + fullkeys.sort() + fhrstrs=[] + modkeys=[] + for fullkey in fullkeys: + (mod,fhrstr)=fullkey.split("-") + if fhrstr not in fhrstrs: + fhrstrs.append(fhrstr) + if mod not in modkeys: + modkeys.append(mod) + # + # Change fhrstrs (sorted on 3-character 000-999) into + # smaller fhrkeys that are NOT all 3-characters wide + # + fhrstrs.sort() + fhrkeys=[] + for fhrstr in fhrstrs: + fhrkeys.append("%d"%int(fhrstr)) + # + # If an Official button is in there...make it first + # + modkeys.sort() + if "Official" in modkeys: + idx=modkeys.index("Official") + del modkeys[idx] + modkeys.insert(0,"Official") + # + # set colors for each model + # + self.colornames={} + index=0 + for mod in modkeys: + self.colornames[mod]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # Setup first row of buttons (forecast hours) + # + self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) + # + # Setup second row of buttons (models) + # + self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) + # + # If not many bins shown (i.e. nearly constant values)...add bins + # up and down until we get at least 15 bins - so our values are + # 'centered' in a reaonably wide graph + # + numbins=int(float(fullmax-fullmin)/float(binwidth))+1 + if numbins<15: + while numbins<15: + fullmax=min(fullmax+binwidth,parmMaxval) + fullmin=max(fullmin-binwidth,parmMinval) + numbins=int(float(fullmax-fullmin)/float(binwidth))+1 + if ((numbins<15)and(fullmin==parmMinval)and(fullmax==parmMaxval)): + numbins=16 + # + # + # + numticks=25 + tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) + # + # Dont let tick interval be smaller than parm precision + # + mintick=10**(-parmPrecision) + tickInterval=max(tickInterval,mintick) + # + # Set the minimum graph one tick interval below the minimum...but + # not below the parm minimum value + # + graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval + graphmin=maximum(graphmin,parmMinval) + # + # Set the maximum graph one tick interval above the maximum...but + # not above the parm maximum value + # + graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval + graphmax=minimum(graphmax,parmMaxval) + # + # + # + numTicks=int(float(graphmax-graphmin)/float(tickInterval))+1 + # + # Set up the graph axes + # + left=self.cd.curwidth*(50.0/700.0) + right=self.cd.curwidth*(650.0/700.0) + bot=self.cd.curheight*(100.0/530.0) + top=self.cd.curheight*(480.0/530.0) + self.setgraph(graphmin,graphmax,graphmin,graphmax,left,right,bot,top) + self.expaxes(graphmin,graphmax,tickInterval) + # + # Label the top of the graph + # + ul1="Expected %s Value for %s Forecast"%(obsmodel,parm) + self.cdLabels(ul1,totalpoints,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + # + # for rateParms, or probability parms, label the length of periods + # + if ((verType==1)or(parmRateFlag==1)): + self.labelLine("%d-hr periods"%accumHours,3) + # + # Draw + # + totalcount=len(fullkeys) + count=0 + for key in fullkeys: + count+=1 + self.VU.logMsg("graph %d memory:%d resident:%d"%(count,memory(),resident())) + if self.setAndCheckWorking("%s: drawing graph %d of %d"%(workStart,count,totalcount))==1: + self.stopWorking() + return + tagbase=key.split("-") + mod=tagbase[0] + modnum=modelList.index(mod) + fhr=int(tagbase[1]) + fhrstr="f%d"%fhr + tagtuple=(mod,fhrstr) + flabel="%d-hr forecast"%fhr + self.labelLine(flabel,3,justify="right",tags=tagtuple) + + colorname=self.colornames[mod] + # + # Turn lists for this model/time back into arrays + # + fcstArray=array(self.flists[key]) + obsArray=array(self.olists[key]) + prevAvg=-99999.9 + for i in range(numTicks): + value=graphmin+(i*tickInterval) + valuelow=value-(float(tickInterval)/2.0) + vl1=value-(float(tickInterval)/6.0) + valuehigh=value+(float(tickInterval)/2.0) + vh1=value+(float(tickInterval)/6.0) + pts=logical_and(greater_equal(fcstArray,valuelow),less(fcstArray,valuehigh)) + if sometrue(pts): + #obsDist=sort(compress(pts,obsArray)) + obsDist=compress(pts,obsArray) + numCases=obsDist.shape[0] + #minObs=obsDist[0] + minObs=minimum.reduce(obsDist) + #maxObs=obsDist[numCases-1] + maxObs=maximum.reduce(obsDist) + avgObs=float(add.reduce(obsDist))/float(numCases) + avgObsSquared=float(add.reduce(obsDist*obsDist))/float(numCases) + std=sqrt(avgObsSquared-(avgObs*avgObs)) + #if numCases>1: + # midObs=obsDist[numCases/2] + #else: + # midObs=avgObs + #if numCases>3: + # q1Obs=obsDist[numCases/4] + # q3Obs=obsDist[(3*numCases)/4] + #else: + # q1Obs=avgObs + # q3Obs=avgObs + # + # Graph the average + # + (x1,y1)=self.graphcoord(valuelow,avgObs) + (x2,y2)=self.graphcoord(valuehigh,avgObs) + if prevAvg>-99999.0: + self.cd.canvas.create_line(x1,prevAvg,x1,y1,x2,y2,fill=colorname,tags=tagtuple) + else: + self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) + prevAvg=y1 + # + # For everything except probability parms...plot min/max/std + # + if verType!=1: + # + # Plot the min + # + (x1,y1)=self.graphcoord(vl1,minObs) + (x2,y2)=self.graphcoord(vh1,minObs) + self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) + # + # Plot the max + # + (x1,y1)=self.graphcoord(vl1,maxObs) + (x2,y2)=self.graphcoord(vh1,maxObs) + self.cd.canvas.create_line(x1,y1,x2,y2,fill=colorname,tags=tagtuple) + q1Obs=avgObs-std + q3Obs=avgObs+std + (x1,y1)=self.graphcoord(valuelow,q1Obs) + (x2,y2)=self.graphcoord(valuehigh,q3Obs) + self.cd.canvas.create_polygon(x1,y1,x1,y2,x2,y2,x2,y1,stipple="gray25",fill=colorname,outline="",tags=tagtuple) + del pts + self.but2state[mod]=1 + self.but1state[fhrstr]=1 + del fcstArray + del obsArray + + startBut1(self) + startBut2(self) + + del self.flists + del self.olists + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + self.VU.logMsg("expected value done memory:%d resident:%d"%(memory(),resident())) + return + #================================================================== + # scatterPlot - display scatterplot + # + # + def scatterPlot(self,parmList,cycleList,modelList,obsmodel, + fcstrList,fhrStart,fhrEnd,dateType,numDays,fromDay, + dayList,dateStyle,scale,commonCases,accumHours, + accumFreq): + # + # Clear display - setup title + # + parm=parmList[0] + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Scatterplot - %s"%parm) + # + # + # + workStart="Working on Verifying Value Distribution" + self.startWorking(workStart,optionRemove=0) + # + # + # + NUMTBUTTONS=12 # normal number of time buttons on a row - configure + NUMMBUTTONS=6 # normal number of model buttons on a row - configure + # + # get the active EditArea into ea. If the active edit area is + # None - then assume they want to run it over the entire grid + # + editArea=self.getActiveEditArea() + editAreaMask=self.encodeEditArea(editArea) + npts=add.reduce(add.reduce(editAreaMask)) + if (npts==0): + editArea.invert() + editAreaMask=self.encodeEditArea(editArea) + eaflat=ravel(editAreaMask) + totalpoints=add.reduce(eaflat) + # + # make space for saving data + # + self.flists={} # storage for fcst values for each model/forecast hour + self.olists={} # storage for obs values for each model/forecast hour + fullmin=999999.0 + fullmax=-999999.0 + # + # Loop over parm and model + # + totaliters=len(modelList) + iter=0 + # + # For vectors...the parm to read might be different than + # the name of the parm + # + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + # + # Get information about the parm we are reading + # + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + if ((datatype==1)and(last3=="Dir")): + parmMinval=0 + parmMaxval=360 + # + # get binwidth and bigerr for parm...but for vectors its + # complicated by dir/mag/vecerr options + # + binwidth=self.VU.getVerBinWidth(readParm) + if datatype==1: + (bwMag,bwDir)=binwidth + if last3=="Dir": + binwidth=bwDir + else: + binwidth=bwMag + # + # Setup histogram binning routines + # + self.histosetup(parmMinval,parmMaxval,binwidth) + # + # Get mode for reading obs grids + # + obsGridMode=self.getReadMode(obsmodel,obsParm,0) + # + # Get case times/records for all models + # + caseInfo=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay, + numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd, + accumHours=accumHours,accumFreq=accumFreq, + commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Loop over each model + # + for model in modelList: + iter+=1 + workNow=workStart+":%s (%d of %d)"%(model,iter,totaliters) + # + fcstGridMode=self.getReadMode(model,readParm) + # + # Get all the cases for this model + # + cases=caseInfo[model] + # + # Sort cases by the start/end time, not the basetime + # + casekeys=list(cases.keys()) + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + totalcount=len(casekeys) + self.VU.logMsg("reading %d cases for %s"%(totalcount,model),1) + count=0 + lastobs="" + for key in casekeys: + count+=1 + self.VU.logMsg("%s : %s"%(model,key),10) + if self.setAndCheckWorking("%s: %d of %d"%(workNow,count,totalcount))==1: + self.stopWorking() + return + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + stime=int(stimestr) + etime=int(etimestr) + (frecList,orecList)=cases[key] + # + # Dont make stats for obs not yet complete + # + if etime>time.time(): + continue + # + # Dont include negative forecast hours + # + fhr=self.VU.getFcstHour(basetime,stime) + if fhr<0: + continue + # + # string to store grid under depends on model and forecast hour + # + saveKey="%s-%3.3d"%(model,fhr) + # + # If a new and different obs time - read the obs data + # + obskey=key.split(",",1)[1] + obsdata=self.VU.getVerGrids(obsmodel,basetime,obsParm, + stime,etime,mode=obsGridMode, + recList=orecList) + obsdata=self.scaleGrid(obsdata,scale,datatype) + # + # For probabilistic variables...calculate the + # observed 'yes/no' value + # + if verType==1: + obsdata=self.getProbVerGrid(readParm,obsdata)*100.0 + # + # cant do a value histogram of vector wind + # errors...so those get changed to windSpd + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + obsgrid=obsdata[0] + elif last3=="Dir": + obsgrid=obsdata[1] + else: + obsgrid=obsdata + obsonly=compress(eaflat,ravel(obsgrid)) + else: + obsgrid=obsdata[0] + obsonly=compress(eaflat,ravel(obsgrid)) + obsList=list(obsonly) + minObs=min(obsList) + maxObs=max(obsList) + fullmin=min(minObs,fullmin) + fullmax=max(maxObs,fullmax) + if saveKey in self.olists: + self.olists[saveKey].extend(obsList) + else: + self.olists[saveKey]=obsList + # + # Read forecast grid and calculate error grid + # + fcstdata=self.VU.getVerGrids(model,basetime,readParm, + stime,etime,mode=fcstGridMode, + recList=frecList) + fcstdata=self.scaleGrid(fcstdata,scale,datatype) + # + # Get the error, handling vector error, etc. + # + if ((datatype!=1)or(last3 in ["Spd","Dir"])): + if last3=="Spd": + fcstgrid=fcstdata[0] + elif last3=="Dir": + fcstgrid=fcstdata[1] + else: + fcstgrid=fcstdata + fcstonly=compress(eaflat,ravel(fcstgrid)) + else: + fcstgrid=fcstdata[0] + fcstonly=compress(eaflat,ravel(fcstgrid)) + fcstList=list(fcstonly) + minFcst=min(fcstList) + maxFcst=max(fcstList) + fullmin=min(minFcst,fullmin) + fullmax=max(maxFcst,fullmax) + # + # Add values forecast lists for same model/fhr + # + # + if saveKey in self.flists: + self.flists[saveKey].extend(fcstList) + else: + self.flists[saveKey]=fcstList + # + # Get all the keys that will be displayed + # + fullkeys=list(self.flists.keys()) + # + # if no data could be read - stop here + # + if len(fullkeys)<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # For buttons...get models/forecasthours actually in the data + # + fullkeys.sort() + fhrstrs=[] + modkeys=[] + for fullkey in fullkeys: + (mod,fhrstr)=fullkey.split("-") + if fhrstr not in fhrstrs: + fhrstrs.append(fhrstr) + if mod not in modkeys: + modkeys.append(mod) + # + # Change fhrstrs (sorted on 3-character 000-999) into + # smaller fhrkeys that are NOT all 3-characters wide + # + fhrstrs.sort() + fhrkeys=[] + for fhrstr in fhrstrs: + fhrkeys.append("%d"%int(fhrstr)) + # + # If an Official button is in there...make it first + # + modkeys.sort() + if "Official" in modkeys: + idx=modkeys.index("Official") + del modkeys[idx] + modkeys.insert(0,"Official") + # + # set colors for each model + # + self.colornames={} + index=0 + for mod in modkeys: + self.colornames[mod]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # Setup first row of buttons (forecast hours) + # + self.setupBut1(fhrkeys,numbuttons=NUMTBUTTONS,arrows=1,width=3) + # + # Setup second row of buttons (models) + # + self.setupBut2(modkeys,numbuttons=NUMMBUTTONS,arrows=1) + # + # If not many bins shown (i.e. nearly constant values)...add bins + # up and down until we get at least 15 bins - so our values are + # 'centered' in a reaonably wide graph + # + numbins=int(float(fullmax-fullmin)/float(binwidth))+1 + if numbins<15: + while numbins<15: + fullmax=min(fullmax+binwidth,parmMaxval) + fullmin=max(fullmin-binwidth,parmMinval) + numbins=int(float(fullmax-fullmin)/float(binwidth))+1 + if ((numbins<15)and(fullmin==parmMinval)and(fullmax==parmMaxval)): + numbins=16 + # + # + # + numticks=25 + tickInterval=self.niceNumDec((fullmax-fullmin)/float(numticks-1),1) + # + # Dont let tick interval be smaller than parm precision + # + mintick=10**(-parmPrecision) + tickInterval=max(tickInterval,mintick) + # + # Set the minimum graph one tick interval below the minimum...but + # not below the parm minimum value + # + graphmin=(floor(float(fullmin)/float(tickInterval))-1)*tickInterval + graphmin=maximum(graphmin,parmMinval) + # + # Set the maximum graph one tick interval above the maximum...but + # not above the parm maximum value + # + graphmax=(floor(float(fullmax)/float(tickInterval))+2)*tickInterval + graphmax=minimum(graphmax,parmMaxval) + # + # + # + numTicks=int(float(graphmax-graphmin)/float(tickInterval))+1 + # + # Set up the graph axes + # + left=self.cd.curwidth*(50.0/700.0) + right=self.cd.curwidth*(650.0/700.0) + bot=self.cd.curheight*(100.0/530.0) + top=self.cd.curheight*(480.0/530.0) + self.setgraph(graphmin,graphmax,graphmin,graphmax,left,right,bot,top) + self.valaxes(graphmin,graphmax,tickInterval) + # + numPts=totalpoints + ul1="Scatterplot - %s"%parm + self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + # + # + # + #self.probaxes() + # + # + # + MaxValuesToShow=1000 + numbins=50 + numPts=totalpoints + counts={} + maxcounts={} + maxnum=0 + self.setWorking("%s: scanning scatterplots"%workStart) + for key in list(self.flists.keys()): + maxnum=max(maxnum,len(self.flists[key])) + if self.checkWorking()==1: + self.stopWorking() + return + if maxnum>MaxValuesToShow: + binsize=float(graphmax-graphmin)/float(numbins) + #print "binsize=",binsize + totaldcount=len(list(self.flists.keys())) + dcount=0 + for key in list(self.flists.keys()): + dcount+=1 + self.setWorking("%s: large scatterplot thinning: %d of %d"%(workStart,dcount,totaldcount)) + if self.checkWorking()==1: + self.stopWorking() + return + if len(self.flists[key])>MaxValuesToShow: + count=zeros((numbins,numbins)) + xpos=minimum(((array(self.olists[key])-graphmin)/binsize).astype(int),numbins-1) + ypos=minimum(((array(self.flists[key])-graphmin)/binsize).astype(int),numbins-1) + xl=list(xpos) + yl=list(ypos) + for i in range(len(xl)): + x=xl[i] + y=yl[i] + count[y,x]+=1 + maxcounts[key]=maximum.reduce(maximum.reduce(count)) + #print "maxcounts[",key,"]=",maxcounts[key] + counts[key]=count + # + # Draw + # + totalcount=len(fullkeys) + count=0 + for key in fullkeys: + count+=1 + self.setWorking("%s: drawing scatterplot %d of %d"%(workStart,count,totalcount)) + if self.checkWorking()==1: + self.stopWorking() + return + tagbase=key.split("-") + mod=tagbase[0] + modnum=modelList.index(mod) + fhr=int(tagbase[1]) + fhrstr="f%d"%fhr + tagtuple=(mod,fhrstr) + flabel="%d-hr forecast"%fhr + self.labelLine(flabel,3,justify="right",tags=tagtuple) + + colorname=self.colornames[mod] + + if key not in list(maxcounts.keys()): + ylist=self.flists[key] + xlist=self.olists[key] + for i in range(len(ylist)): + (x,y)=self.graphcoord(xlist[i],ylist[i]) + self.cd.canvas.create_line(x-2,y,x+2,y,fill=colorname,tags=tagtuple) + self.cd.canvas.create_line(x,y-2,x,y+2,fill=colorname,tags=tagtuple) + else: + for i in range(numbins): + midx=graphmin+((i+0.5)*binsize) + for j in range(numbins): + midy=graphmin+((j+0.5)*binsize) + width=(float(counts[key][j,i])/float(maxcounts[key]))*binsize*0.5 + (x0,y0)=self.graphcoord(midx-width,midy-width) + (x1,y1)=self.graphcoord(midx+width,midy+width) + if width>0.01: + self.cd.canvas.create_arc(x0,y0,x1,y1,fill=colorname,outline=colorname,start=0.0,extent=359.9,width=1.0,tags=tagtuple) + + self.but2state[mod]=1 + self.but1state[fhrstr]=1 + + startBut1(self) + startBut2(self) + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + return + #================================================================== + # scaleGrid - smooth a grid by the scale amount. Correctly handles + # vectors indicated by datatype==1. + # + def scaleGrid(self,griddata,scale,datatype): + if scale>0: + if datatype!=1: + griddata=self.VU.smoothpm(griddata,scale) + else: + (gridmag,griddir)=griddata + (u,v)=self.MagDirToUV(gridmag,griddir) + us=self.VU.smoothpm(u,scale) + vs=self.VU.smoothpm(v,scale) + (gridmag,griddir)=self.UVToMagDir(us,vs) + griddata=(gridmag,griddir) + return griddata + #================================================================== + # moveCD - if the first time self.cd is displayed - move to a good + # location + # + def moveCD(self): + if self.cd.firstDisplay==1: + self.cd.update_idletasks() + geo=self.cd.geometry() + (mwh,mof)=geo.split("+",1) + (mw,mh)=mwh.split("x",1) + parentgeo=self.root.geometry() + (wh,of)=parentgeo.split("+",1) + (w,h)=wh.split("x",1) + (ox,oy)=of.split("+",1) + xoff=int(ox)+int(w)-int(mw) + yoff=int((int(h)-int(mh))/2.0)+int(oy) + newgeo=mwh+"+%d+%d"%(xoff,yoff) + self.cd.geometry(newgeo) + self.cd.firstDisplay=0 + return + #================================================================== + # showStats - display point/area statistics + # + def ShowStats(self,DialogDict): + self.VU.logMsg("running ShowStats:") + + plotType=DialogDict["PlotType"] + if plotType=="vs. Time": + self.makeTimeSeries(DialogDict) + if plotType=="vs. Fcst Hour": + self.makeFhourGraph(DialogDict) + return + #================================================================== + # makeTimeSeries - display time series for point/area + # + def makeTimeSeries(self,DialogDict): + self.VU.logMsg("running makeTimeSeries:") + display=DialogDict["Display"] + areaList=DialogDict["areaList"] + AreaCombine=DialogDict["AreaCombine"] + parmList=DialogDict["Parms"] + threshold=DialogDict["Threshold"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Models"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + dateStyle=DialogDict["dateStyle"] + plotType=DialogDict["PlotType"] + scale=DialogDict["scale"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + TwoCatType=DialogDict["TwoCatType"] + TwoCatCond=DialogDict["TwoCatCond"] + TwoCatValue=DialogDict["TwoCatValue"] + TwoCatValueString=DialogDict["TwoCatValueString"] + # + # Check for good GUI input + # + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + # + # Check that we do not have too many things varying + # + if len(parmList)>1: + parmVary=1 + else: + parmVary=0 + if ((len(areaList)>1)and(AreaCombine==0)): + areaVary=1 + else: + areaVary=0 + if len(modelList)>1: + modelVary=1 + else: + modelVary=0 + totalVary=parmVary+areaVary+modelVary + if totalVary>1: + msg="Can only vary one of parm/area/model when doing 'vs. Time' graphs." + self.statusBarMsg(msg,"U") + return + # + # If nothing varying - pick model + # + if totalVary==0: + modelVary=1 + # + # get list of names of selected edit areas into areaNames + # + areaNames=[] + nameList=self.VU.listEditAreas() + descList=self.VU.listEditAreaDescriptions() + for areaDesc in areaList: + if areaDesc=="Current": + areaNames.append("Current") + elif areaDesc in descList: + areaNum=descList.index(areaDesc) + areaNames.append(nameList[areaNum]) + if len(areaNames)<1: + msg="Invalid Edit Area(s) - contact support" + self.statusBarMsg(msg,"U") + return + print("the areaNames are:",areaNames) + comboArea=self.empty(bool) + if ((AreaCombine==1)and(len(areaNames)>1)): + for areaName in areaNames: + if areaName=="Current": + areaObject=self.getActiveEditArea() + mask=self.encodeEditArea(areaObject) + any=add.reduce(add.reduce(mask)) + if any==0: + mask=self.newGrid(True, bool) + elif areaName=="NONE": + mask=self.newGrid(True, bool) + else: + mask=self.encodeEditArea(areaName) + comboArea=logical_or(comboArea,mask) + # + # + # + statName=display + if statName=="TwoCat": + statName=TwoCatType + statVal=TwoCatValue + statCond=TwoCatCond + # + # Clear the cd canvas - setup title + # + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Statistic Time Series") + workStart="Working on Statistics" + self.startWorking(workStart,optionRemove=0) + # + # + # + outdata={} + fhrList=[] + timemin=1e32 + timemax=-1e32 + valmin=1.0e32 + valmax=-1.0e32 + + countimax=len(parmList)*len(modelList) + counti=0 + for parm in parmList: + readParm=parm + vectorType=-1 + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + if last3=="Spd": + vectorType==0 + else: + vectorType==1 + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + thresholds=self.VU.getVerThresholds(readParm) + if last3=="Spd": + thresholds=thresholds[0] + elif last3=="Dir": + thresholds=thresholds[1] + thresholdValue=thresholds[threshold] + + statsCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle,dateType, + fromDay=fromDay,numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList,fhrStart=fhrStart, + fhrEnd=fhrEnd,accumHours=accumHours,accumFreq=accumFreq, + commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + gridsCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel,dateStyle,dateType, + fromDay=fromDay,numDays=numDays,dayList=dayList, + fcstrs=fcstrList,cycles=cycleList,fhrStart=fhrStart, + fhrEnd=fhrEnd,accumHours=accumHours,accumFreq=accumFreq, + requireObs=1,commonCases=commonCases,basetimeOffsets=1, + callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + for model in modelList: + counti+=1 + workNow="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) + scases=statsCases[model] + if model in list(gridsCases.keys()): + gcases=gridsCases[model] + else: + gcases={} + # + # get overall list of keys (both stat and grid) + # + skeys=list(scases.keys()) + gkeys=list(gcases.keys()) + tkeys=skeys + for gkey in gkeys: + if gkey not in tkeys: + tkeys.append(gkey) + + # + # Loop over possible stat or grid cases + # + count=0 + totalcount=len(tkeys) + for key in tkeys: + count+=1 + self.setWorking("%s: %d of %d"%(workNow,count,totalcount)) + if self.checkWorking()==1: + self.stopWorking() + return + if key in scases: + srecList=scases[key] + else: + srecList=None + if key in gcases: + grecList=gcases[key] + else: + grecList=None + # + # + # + (basestr,stimestr,etimestr)=key.split(",") + basetime=int(basestr) + starttime=int(stimestr) + endtime=int(etimestr) + # + # Done show results for grids not yet complete + # + if endtime>time.time(): + continue + # + # Dont show results for grids that start before forecast time + # + fhr=(starttime-basetime)/HOURSECS + if fhr<0: + continue + # + # X-coordinate is 'starttime' when doing "Verifying On" displays + # and 'basetime' when doing "Forecast on" displays + # + if dateStyle=="Verifying on": + x=starttime + else: + x=basetime + timemin=min(x,timemin) + timemax=max(x,timemax) + # + # Thresholds are different for different variables + # + if statName=="Percent Err <": + statVal=thresholdValue + # + # When AreaCombine is on...we already have the combined + # edit area ready + # + if ((AreaCombine==1)and(len(areaNames)>1)): + eaGrid=comboArea + + outkey="%s,%s,%3.3d,-01"%(parm,model,fhr) + if outkey not in list(outdata.keys()): + outdata[outkey]=[] + + valx=self.VU.getVerStat(model,basetime,readParm,starttime,endtime, + obsmodel,statName,statVal=statVal, + statCond=statCond,editArea=eaGrid, + smooth=scale,vectorType=vectorType, + srecList=srecList,grecList=grecList) + if valx is None: + print("getVerStat returned None") + continue + valmin=min(valx,valmin) + valmax=max(valx,valmax) + outdata[outkey].append((x,valx)) + # + # When AreaCombine is off...loop over editAreas + # + else: + for areaName in areaNames: + outkey="%s,%s,%3.3d,%s"%(parm,model,fhr,areaName) + if outkey not in list(outdata.keys()): + outdata[outkey]=[] + if areaName=="Current": + areaObject=self.getActiveEditArea() + ea=self.encodeEditArea(areaObject) + any=add.reduce(add.reduce(ea)) + if any==0: + ea=self.newGrid(True, bool) + elif areaName=="NONE": + ea=self.newGrid(True, bool) + else: + ea=areaName + valx=self.VU.getVerStat(model,basetime,readParm,starttime,endtime, + obsmodel,statName,statVal=statVal, + statCond=statCond,editArea=ea, + smooth=scale,vectorType=vectorType, + srecList=srecList,grecList=grecList) + if valx is None: + print("getVerStat returned None") + continue + valmin=min(valx,valmin) + valmax=max(valx,valmax) + outdata[outkey].append((x,valx)) + #self.VU.setDebug(0) + # + # + # + if fhr not in fhrList: + fhrList.append(fhr) + # + # + # If no data read - don't go further + # + if len(list(outdata.keys()))<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + #print "done reading" + # + # valmin/valmax usually works - but for bounded stats + # we always want 0.0 to be the lower bound + # + if display in ["RMS Error","Std Dev","Mean Abs Err"]: + valmin=0.0 + if display=="Percent Err <": + valmin=0.0 + valmax=1.0 + #print "value range:",valmin,valmax + # + # time buttons + # + fhrList.sort() + fList=[] + for fhr in fhrList: + fList.append("%d"%fhr) + self.setupBut1(fList,numbuttons=12,arrows=1,width=3) + # + # First part of title line is the type of error + # + if display=="TwoCat": + titleLine="%s Timeseries - "%TwoCatType + elif display!="Percent Err <": + titleLine="%s Timeseries - "%display + else: + titleLine="%s %d Timeseries - "%(display,threshold) + # + # set varList to the thing that varies: model-parm-area + # + if parmVary==1: + varList=parmList[:] + varButtons=6 + titleLine+=modelList[0] + elif modelVary==1: + if "Official" in modelList: + idx=modelList.index("Official") + del modelList[idx] + modelList.insert(0,"Official") + varList=modelList[:] + varButtons=6 + titleLine+=parmList[0] + if display=="TwoCat": + titleLine+=" %s %s"%(TwoCatCond,TwoCatValueString) + else: + varList=areaList[:] + varButtons=3 + titleLine+="%s %s"%(modelList[0],parmList[0]) + # + # Associate colors with the varying model/parm/area + # + self.colornames={} + index=0 + for var in varList: + self.colornames[var]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # Make buttons + # + self.setupBut2(varList,numbuttons=varButtons,arrows=1) + # + # Setup graphing coordinates + # + numticks=10 + graphrange=valmax-valmin + print("graphrange=",graphrange) + tickInterval=self.niceNumDec(graphrange/(numticks-1),1) + #print "tickInterval=",tickInterval + + left=self.cd.curwidth*(50.0/700.0) + right=self.cd.curwidth*(650.0/700.0) + bot=self.cd.curheight*(130.0/530.0) + top=self.cd.curheight*(480.0/530.0) + self.setgraph(timemin,timemax,valmin,valmax,left,right,bot,top) + self.graphaxes(timemin,timemax,valmin,valmax) + # + # Draw timeseries lines + # + for key in list(outdata.keys()): + #print "timeseries for ",key + tagbase=key.split(",") + fhr="f%d"%int(tagbase[2]) + # + if parmVary==1: + varTag=tagbase[0] + elif modelVary==1: + varTag=tagbase[1] + else: + #varTag=self.VU.EditAreaDescriptions[int(tagbase[3])] + areaNum=self.VU.getEditAreaNumberFromName(tagbase[3]) + varTag=self.VU.EditAreaDescriptions[areaNum] + tagtuple=(varTag,fhr) + + colorname=self.colornames[varTag] + points=outdata[key] + points.sort() + gpoints=[] + for point in points: + (xtime,val)=point + (x,y)=self.graphcoord(xtime,val) + gpoints.append(x) + gpoints.append(y) + if len(gpoints)>3: + self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) + self.but2state[varTag]=1 + self.but1state[fhr]=1 + # + # Label forecast times + # + for fhr in fhrList: + fhrstr="f%d"%fhr + labelstr="%d-hr Forecast"%fhr + self.labelLine(labelstr,3,justify='right',tags=(fhrstr)) + # + # Turn off all but first + # + startBut1(self) + startBut2(self) + # + # Labels at top of graph + # + #if len(areaList)>1: + # if AreaCombine==1: + # numPts=0 + # for areaNum in areaNums: + # numPts+=self.pts[areaNum] + # else: + # numPts=-1 + #else: + # numPts=self.pts[areaNums[0]] + numPts=-1 + self.cdLabels(titleLine,numPts,dateStyle,dateType,numDays,fromDay, + dayList,cycleList) + # + # Done - show the results + # + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + + return + + #================================================================== + # makeFhourGraph - display graph of average error at various fhrs + # + def makeFhourGraph(self,DialogDict): + self.VU.logMsg("running makeFhourGraph:") + display=DialogDict["Display"] + areaList=DialogDict["areaList"] + AreaCombine=DialogDict["AreaCombine"] + parmList=DialogDict["Parms"] + threshold=DialogDict["Threshold"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Models"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + dateStyle=DialogDict["dateStyle"] + plotType=DialogDict["PlotType"] + scale=DialogDict["scale"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + TwoCatType=DialogDict["TwoCatType"] + TwoCatCond=DialogDict["TwoCatCond"] + TwoCatValue=DialogDict["TwoCatValue"] + TwoCatValueString=DialogDict["TwoCatValueString"] + # + # Check for good GUI input + # + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + # + # Check that we do not have too many things varying + # + if len(parmList)>1: + parmVary=1 + else: + parmVary=0 + if ((len(areaList)>1)and(AreaCombine==0)): + areaVary=1 + else: + areaVary=0 + if len(modelList)>1: + modelVary=1 + else: + modelVary=0 + totalVary=parmVary+areaVary+modelVary + if totalVary>2: + msg="Can only vary two of parm/area/model when doing 'vs. Fcst Hour' graphs" + self.statusBarMsg(msg,"U") + return + # + # If only varying one thing - then set the other to model, unless that + # is the one already being done - and then set to parm + # + if totalVary==1: + if modelVary==1: + parmVary=1 + else: + modelVary=1 + # + # + # + if parmVary==1: + if modelVary==1: + varList1=parmList[:] + varList2=modelList[:] + else: + varList1=parmList[:] + varList2=areaList[:] + else: + varList1=areaList[:] + varList2=modelList[:] + # + # Clear the cd canvas - setup title + # + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Statistic Graph") + workStart="Working on Statistics" + self.startWorking(workStart,optionRemove=0) + # + # get names of selected edit areas + # + areaNames=[] + descList=self.VU.listEditAreaDescriptions() + nameList=self.VU.listEditAreas() + for areaDesc in areaList: + if areaDesc=="Current": + areaNames.append("Current") + elif areaDesc in descList: + areaNum=descList.index(areaDesc) + areaNames.append(nameList[areaNum]) + if len(areaNames)<1: + msg="Invalid Edit Area(s) - contact support" + self.statusBarMsg(msg,"U") + return + # + # For 'combined areas' - setup the comboArea just once + # + comboArea=self.empty(bool) + if ((AreaCombine==1)and(len(areaNames)>1)): + for areaName in areaNames: + if areaName=="Current": + areaObject=self.getActiveEditArea() + mask=self.encodeEditArea(areaObject) + any=add.reduce(add.reduce(mask)) + if any==0: + mask=self.newGrid(True, bool) + elif areaName=="NONE": + mask=self.newGrid(True, bool) + else: + mask=self.encodeEditArea(areaName) + comboArea=logical_or(comboArea,mask) + # + # If any of the TwoCat stats are requested - then get + # the contingency table entries instead + # + statName=display + if statName=="TwoCat": + statName="cont" + if TwoCatType[0:1]=="A": + statName="acont" + statVal=TwoCatValue + statCond=TwoCatCond + statID=self.VU.getStatID(TwoCatType) + # + # + # + # + # + # + sumdata={} + cntdata={} + hitsdata={} + missdata={} + falrdata={} + corndata={} + # + timemin=1e32 + timemax=-1e32 + valmin=1.0e32 + valmax=-1.0e32 + + countimax=len(parmList)*len(modelList) + counti=0 + for parm in parmList: + # + # + # + (parmUnits,parmPrecision,parmMinval,parmMaxval,parmRateFlag,parmColorTable, + parmDisplayMinval,parmDisplayMaxval)=self.getParmInfo(self.mutableID(),parm) + # + # setup readParm - which is usually the same as parm, but + # can be different for the Spd/Dir components of a vector + # + readParm=parm + vectorType=-1 + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if (last3 in ["Spd","Dir"]): + readParm=parm[:-3] + if last3=="Spd": + vectorType==0 + else: + vectorType==1 + # + # Get the observed parm for this parm, the verification type, + # the data type and the thresholds + # + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + thresholds=self.VU.getVerThresholds(readParm) + if last3=="Spd": + thresholds=thresholds[0] + elif last3=="dir": + thresholds=thresholds[1] + thresholdValue=thresholds[threshold] + # + # If using the threshold stat - set it now + # + if statName=="Percent Err <": + statVal=thresholdValue + # + # Get the statCases for this parm + # + statCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle, + dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,commonCases=commonCases, + basetimeOffsets=1,callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Get the gridCases for this parm + # + gridCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,requireObs=1,commonCases=commonCases, + basetimeOffsets=1,callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + for model in modelList: + counti+=1 + workStart="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) + # + # get cases for this model + # + scases=statCases[model] + if model in list(gridCases.keys()): + gcases=gridCases[model] + else: + gcases={} + # + # get overall list of keys (both stat and grid) in tkeys + # + skeys=list(scases.keys()) + gkeys=list(gcases.keys()) + tkeys=skeys + for gkey in gkeys: + if gkey not in tkeys: + tkeys.append(gkey) + # + # Loop over possible stat or grid cases + # + count=0 + totalcount=len(tkeys) + for key in tkeys: + # + # Check for user interrupting + # + count+=1 + if self.setAndCheckWorking("%s: %d of %d"%(workStart,count,totalcount))==1: + self.stopWorking() + return + # + # Get times for this case + # + (basestr,stimestr,etimestr)=key.split(",") + basetime=int(basestr) + starttime=int(stimestr) + endtime=int(etimestr) + # + # Do not use results for grids that are not yet complete + # + if endtime>time.time(): + continue + # + # Do not show results for grids that start before + # forecast time + # + fhr=(starttime-basetime)/HOURSECS + if fhr<0: + continue + # + # Get list of records for this cases + # + if key in scases: + srecList=scases[key] + else: + srecList=None + if key in gcases: + grecList=gcases[key] + else: + grecList=None + # + # + # + # + # When areaCombine is on...we already have the combined + # edit area ready + # + if ((AreaCombine==1)and(len(areaNames)>1)): + eaGrid=comboArea + valx=self.VU.getVerStat(model,basetime,readParm,starttime, + endtime,obsmodel,statName,statVal=statVal, + statCond=statCond,editArea=eaGrid, + smooth=scale,vectorType=vectorType, + srecList=srecList,grecList=grecList) + if valx is None: + print("getVerStat returned None") + continue + # + # store sums in parm,model,fhr,areaName keys + # + outkey="%s,%s,%3.3d,-01"%(parm,model,fhr) + if display!="TwoCat": + if outkey not in list(sumdata.keys()): + sumdata[outkey]=0.0 + cntdata[outkey]=0 + sumdata[outkey]+=valx + cntdata[outkey]+=1 + else: + if outkey not in list(hitsdata.keys()): + hitsdata[outkey]=0 + missdata[outkey]=0 + falrdata[outkey]=0 + corndata[outkey]=0 + (hits,miss,falr,corn)=valx + hitsdata[outkey]+=hits + missdata[outkey]+=miss + falrdata[outkey]+=falr + corndata[outkey]+=corn + else: + for areaName in areaNames: + if areaName=="Current": + areaObject=self.getActiveEditArea() + ea=self.encodeEditArea(areaObject) + any=add.reduce(add.reduce(ea)) + if any==0: + ea=self.newGrid(True, bool) + elif areaName=="NONE": + ea=self.newGrid(True, bool) + else: + ea=areaName + valx=self.VU.getVerStat(model,basetime,readParm,starttime, + endtime,obsmodel,statName,statVal=statVal, + statCond=statCond,editArea=ea, + smooth=scale,vectorType=vectorType, + srecList=srecList,grecList=grecList) + if valx is None: + print("getVerStat returned None") + continue + # + # + # + outkey="%s,%s,%3.3d,%s"%(parm,model,fhr,areaName) + if display!="TwoCat": + if display=="RMS Error": + valx=valx**2 + if outkey not in list(sumdata.keys()): + sumdata[outkey]=0.0 + cntdata[outkey]=0 + sumdata[outkey]+=valx + cntdata[outkey]+=1 + else: + if outkey not in list(hitsdata.keys()): + hitsdata[outkey]=0 + missdata[outkey]=0 + falrdata[outkey]=0 + corndata[outkey]=0 + cntdata[outkey]=0 + (hits,miss,falr,corn)=valx + hitsdata[outkey]+=hits + missdata[outkey]+=miss + falrdata[outkey]+=falr + corndata[outkey]+=corn + cntdata[outkey]+=1 + # + # if no data could be read - stop here + # + if len(list(cntdata.keys()))<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # We now have the sums...calculate the scores + # + fhrList=[] + valmin=1e32 + valmax=-1e32 + timemin=0 + timemax=0 + outdata={} + for key in list(cntdata.keys()): + if cntdata[key]<1: + continue + if display!="TwoCat": + stat=float(sumdata[key])/float(cntdata[key]) + if display=="RMS Error": + stat=sqrt(stat) + else: + hits=hitsdata[key] + miss=missdata[key] + falr=falrdata[key] + corn=corndata[key] + stat=self.VU.getTwoCatStat(statID,hits,miss,falr,corn) + # + # + # + valmin=min(valmin,stat) + valmax=max(valmax,stat) + (parm,model,fhrstr,areaName)=key.split(",") + areaNum=self.VU.getEditAreaNumberFromName(areaName) + fhr=int(fhrstr) + timemax=max(fhr,timemax) + if fhr not in fhrList: + fhrList.append(fhr) + if parmVary==1: + if modelVary==1: + outkey="%s,%s"%(parm,model) + else: + # + # ******** ???????? number or name? What if 'current' or -1? + # + outkey="%s,%s"%(parm,self.VU.EditAreaDescriptions[int(areaNum)]) + else: + outkey="%s,%s"%(self.VU.EditAreaDescriptions[int(areaNum)],model) + if outkey not in list(outdata.keys()): + outdata[outkey]=[] + outdata[outkey].append((fhr,stat)) + # + # Bounded ones always show 0.0 + # + if display in ["RMS Error","Std Dev","Mean Abs Error"]: + valmin=0.0 + # + # If values are constant (compared to precision of this element) + # make the graph show slightly more range + # + prec1=10**(-parmPrecision) + minRange=10**(-(parmPrecision+1)) + #minRange=0.1 + graphrange=valmax-valmin + if graphrange3: + self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) + self.but1state[tag1]=1 + self.but2state[tag2]=1 + # + # Turn off all but first model and first time + # + startBut1(self) + startBut2(self) + # + # Labels + # + if len(areaList)>1: + areaName="Various" + if AreaCombine==1: + numPts=0 + for areaNum in areaNums: + numPts+=self.pts[areaNum] + else: + numPts=-1 + else: + #numPts=self.pts[areaNums[0]] + numPts=-1 + ul1="Average Error Growth - %s"%parm + self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + + return + #================================================================== + # getStat - assuming that the statfile is open correctly, get + # the value for the specified 'display', for the + # record, area, and threshold number + # + def getStat(self,record,areaNum,display,threshold): + if display=="Bias": + val=self.VU.sncStats[record,areaNum,0] + elif display=="Squared Error": + val=self.VU.sncStats[record,areaNum,1] + elif display=="RMS Error": + val=sqrt(self.VU.sncStats[record,areaNum,1]) + elif display=="Std Dev": + sum=self.VU.sncStats[record,areaNum,0] + sqr=self.VU.sncStats[record,areaNum,1] + val=sqrt(sqr-(sum*sum)) + elif display=="Mean Abs Error": + val=self.VU.sncStats[record,areaNum,2] + elif display=="Mean Fcst": + val=self.VU.sncStats[record,areaNum,3] + elif display=="Mean Squared Fcst": + val=self.VU.sncStats[record,areaNum,4] + elif display=="Mean Obs": + val=self.VU.sncStats[record,areaNum,5] + elif display=="Mean Squared Obs": + val=self.VU.sncStats[record,areaNum,6] + elif display=="Covariance": + val=self.VU.sncStats[record,areaNum,7] + elif display=="Percent Err <": + val=self.VU.sncStats[record,areaNum,8+threshold] + else: + print("unknown stat type") + val=0 + return val + #================================================================== + # ShowScaleStats - make graphs of stat vs scale + # + def ShowScaleStats(self,DialogDict): + self.VU.logMsg("running ShowScaleStats:") + display=DialogDict["Display"] + areaList=DialogDict["areaList"] + AreaCombine=DialogDict["AreaCombine"] + parm=DialogDict["Parm"] + threshold=DialogDict["Threshold"] + cycleList=DialogDict["cycleList"] + modelList=DialogDict["Models"] + obsmodel=DialogDict["ObsModel"] + fcstrList=DialogDict["fcstrList"] + fhrStart=DialogDict["fhrStart"] + fhrEnd=DialogDict["fhrEnd"] + dateType=DialogDict["dateType"] + numDays=DialogDict["numDays"] + fromDay=DialogDict["fromDay"] + dayList=DialogDict["dayList"] + dateStyle=DialogDict["dateStyle"] + commonCases=DialogDict["commonCases"] + accumHours=DialogDict["accumHours"] + accumFreq=DialogDict["accumFreq"] + TwoCatType=DialogDict["TwoCatType"] + TwoCatCond=DialogDict["TwoCatCond"] + TwoCatValue=DialogDict["TwoCatValue"] + TwoCatValueString=DialogDict["TwoCatValueString"] + # + # Check for good GUI input + # + parmList=[parm] + ret=self.checkLists(modelList,parmList,cycleList,fcstrList,dateType, + dayList) + if ret==0: + return + # + # Check that we do not have too many things varying + # + parmVary=0 + areaVary=0 + modelVary=1 + # + # Clear the cd canvas - setup title + # + self.cd.canvas.delete(tkinter.ALL) + self.cd.title("Statistic Graph") + workStart="Working on Statistics vs. Scale" + self.startWorking(workStart,optionRemove=0) + # + # get names of selected edit areas + # + areaNames=[] + descList=self.VU.listEditAreaDescriptions() + nameList=self.VU.listEditAreas() + for areaDesc in areaList: + if areaDesc=="Current": + areaNames.append("Current") + elif areaDesc in descList: + areaNum=descList.index(areaDesc) + areaNames.append(nameList[areaNum]) + if len(areaNames)<1: + msg="Invalid Edit Area(s) - contact support" + self.statusBarMsg(msg,"U") + return + # + # Setup the combined area + # + comboArea=self.empty(bool) + for areaName in areaNames: + if areaName=="Current": + areaObject=self.getActiveEditArea() + mask=self.encodeEditArea(areaObject) + any=add.reduce(add.reduce(mask)) + if any==0: + mask=self.newGrid(True, bool) + elif areaName=="NONE": + mask=self.newGrid(True, bool) + else: + mask=self.encodeEditArea(areaName) + comboArea=logical_or(comboArea,mask) + # + # If any of the TwoCat stats are requested - then get + # the contingency table entries instead + # + statName=display + if statName=="TwoCat": + statName="cont" + if TwoCatType[0:1]=="A": + statName="acont" + statVal=TwoCatValue + statCond=TwoCatCond + statID=self.VU.getStatID(TwoCatType) + # + # + # + sumdata={} + cntdata={} + hitsdata={} + missdata={} + falrdata={} + corndata={} + # + timemin=1e32 + timemax=-1e32 + valmin=1.0e32 + valmax=-1.0e32 + + countimax=len(parmList)*len(modelList) + counti=0 + # + # setup readParm - which is usually the same as parm, but + # can be different for the Spd/Dir components of a vector + # + readParm=parm + vectorType=-1 + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if (last3 in ["Spd","Dir"]): + readParm=parm[:-3] + if last3=="Spd": + vectorType==0 + else: + vectorType==1 + # + # Get the observed parm for this parm, the verification type, + # the data type and the thresholds + # + obsParm=self.VU.getObsParm(readParm) + verType=self.VU.getVerType(readParm) + datatype=self.VU.getVerParmType(readParm) + thresholds=self.VU.getVerThresholds(readParm) + if last3=="Spd": + thresholds=thresholds[0] + elif last3=="dir": + thresholds=thresholds[1] + thresholdValue=thresholds[threshold] + # + # If using the threshold stat - set it now + # + if statName=="Percent Err <": + statVal=thresholdValue + # + # Get the statCases for this parm + # + statCases=self.VU.getStatCases(parm,modelList,obsmodel,dateStyle, + dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,commonCases=commonCases, + basetimeOffsets=1,callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + # + # Get the gridCases for this parm + # + gridCases=self.VU.getCases(readParm,modelList,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrList,cycles=cycleList, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,requireObs=1,commonCases=commonCases, + basetimeOffsets=1,callbackMethod=self.workingCommon) + if self.checkWorking()==1: + self.stopWorking() + return + for model in modelList: + counti+=1 + workStart="Reading %s %s (%d of %d)"%(parm,model,counti,countimax) + # + # get cases for this model + # + scases=statCases[model] + if model in list(gridCases.keys()): + gcases=gridCases[model] + else: + gcases={} + # + # get overall list of keys (both stat and grid) in tkeys + # + skeys=list(scases.keys()) + gkeys=list(gcases.keys()) + tkeys=skeys + for gkey in gkeys: + if gkey not in tkeys: + tkeys.append(gkey) + # + # Loop over possible stat or grid cases + # + count=0 + totalcount=len(tkeys) + for key in tkeys: + # + # Check for user interrupting + # + count+=1 + if self.setAndCheckWorking("%s: %d of %d"%(workStart,count,totalcount))==1: + self.stopWorking() + return + # + # Get times for this case + # + (basestr,stimestr,etimestr)=key.split(",") + basetime=int(basestr) + starttime=int(stimestr) + endtime=int(etimestr) + # + # Do not use results for grids that are not yet complete + # + if endtime>time.time(): + continue + # + # Do not show results for grids that start before + # forecast time + # + fhr=(starttime-basetime)/HOURSECS + if fhr<0: + continue + # + # Get list of records for this cases + # + if key in scases: + srecList=scases[key] + else: + srecList=None + if key in gcases: + grecList=gcases[key] + else: + grecList=None + # + # Loop over scales + # + smoothList=[] + for (scale,text) in self.scaleList: + smoothList.append(scale) + + valx=self.VU.getVerStatScales(model,basetime,readParm,starttime, + endtime,obsmodel,statName,statVal=statVal, + statCond=statCond,editArea=comboArea, + smoothList=smoothList,vectorType=vectorType, + grecList=grecList) + if valx is None: + print("getVerStatScales returned None") + continue + if len(valx)<1: + print("getVerStatScales returned empty list") + continue + + for i in range(len(smoothList)): + (scale,text)=self.scaleList[i] + val=valx[i] + # + # store sums in model,fhr,scale keys + # + outkey="%s,%3.3d,%4.4d"%(model,fhr,scale) + if display!="TwoCat": + if outkey not in list(sumdata.keys()): + sumdata[outkey]=0.0 + cntdata[outkey]=0 + sumdata[outkey]+=val + cntdata[outkey]+=1 + else: + if outkey not in list(hitsdata.keys()): + hitsdata[outkey]=0 + missdata[outkey]=0 + falrdata[outkey]=0 + corndata[outkey]=0 + (hits,miss,falr,corn)=val + hitsdata[outkey]+=hits + missdata[outkey]+=miss + falrdata[outkey]+=falr + corndata[outkey]+=corn + # + # if no data could be read - stop here + # + if len(list(cntdata.keys()))<1: + self.stopWorking() + msg="No verification data could be found matching those criteria" + self.statusBarMsg(msg,"U") + return + # + # We now have the sums...calculate the scores + # + fhrList=[] + valmin=1e32 + valmax=-1e32 + scalemin=0 + scalemax=0 + outdata={} + for key in list(cntdata.keys()): + if cntdata[key]<1: + continue + if display!="TwoCat": + stat=float(sumdata[key])/float(cntdata[key]) + if display=="RMS Error": + stat=sqrt(stat) + else: + hits=hitsdata[key] + miss=missdata[key] + falr=falrdata[key] + corn=corndata[key] + stat=self.VU.getTwoCatStat(statID,hits,miss,falr,corn) + # + # + # + valmin=min(valmin,stat) + valmax=max(valmax,stat) + (model,fhrstr,scalestr)=key.split(",") + fhr=int(fhrstr) + scale=int(scalestr) + scalemax=max(scale,scalemax) + if fhr not in fhrList: + fhrList.append(fhr) + outkey="%s,%s"%(fhrstr,model) + if outkey not in list(outdata.keys()): + outdata[outkey]=[] + outdata[outkey].append((scale,stat)) + # + # Bounded ones always show 0.0 + # + if display in ["RMS Error","Std Dev","Mean Abs Error"]: + valmin=0.0 + # + # If values are constant - show one up + # + graphrange=valmax-valmin + if graphrange<0.01: + valmax+=1.0 + # + # + allkeys=list(outdata.keys()) + allkeys.sort() + # + # Get lists of the actual buttons we have data for + # + varBut1=[] + varBut2=[] + for key in list(outdata.keys()): + (but1,but2)=key.split(",") + if but1 not in varBut1: + varBut1.append(but1) + if but2 not in varBut2: + varBut2.append(but2) + varBut1.sort() + varBut2.sort() + + # + # In model list - make sure Official comes first + # + if "Official" in varBut2: + idx=varBut2.index("Official") + del varBut2[idx] + varBut2.insert(0,"Official") + # + # Associate colors with the varList2 + # + self.colornames={} + index=0 + for var in varBut2: + self.colornames[var]=self.COLORLIST[index] + index+=1 + if index==len(self.COLORLIST): + index=0 + # + # setup buttons + # + self.setupBut1(varBut1,numbuttons=6,arrows=1) + self.setupBut2(varBut2,numbuttons=6,arrows=1) + # + # Setup graphing coordinates + # + numticks=10 + graphrange=valmax-valmin + tickInterval=self.niceNumDec(graphrange/(numticks-1),1) + + left=self.cd.curwidth*(50.0/700.0) + right=self.cd.curwidth*(650.0/700.0) + bot=self.cd.curheight*(130.0/530.0) + top=self.cd.curheight*(480.0/530.0) + self.setgraph(scalemin,scalemax,valmin,valmax,left,right,bot,top) + #self.fhouraxes(timemin,timemax,valmin,valmax) + # + # Draw timeseries lines + # + for key in list(outdata.keys()): + (tag1,tag2)=key.split(",") + tagtuple=(tag1,tag2) + colorname=self.colornames[tag2] + points=outdata[key] + points.sort() + gpoints=[] + for point in points: + (scale,val)=point + (x,y)=self.graphcoord(scale,val) + gpoints.append(x) + gpoints.append(y) + if len(gpoints)>3: + self.cd.canvas.create_line(gpoints,fill=colorname,tags=tagtuple) + self.but1state[tag1]=1 + self.but2state[tag2]=1 + # + # Turn off all but first model and first time + # + startBut1(self) + startBut2(self) + # + # Labels + # + if len(areaList)>1: + areaName="Various" + if AreaCombine==1: + numPts=0 + for areaNum in areaNums: + numPts+=self.pts[areaNum] + else: + numPts=-1 + else: + #numPts=self.pts[areaNums[0]] + numPts=-1 + ul1="Average Error Growth - %s"%parm + self.cdLabels(ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList) + + self.stopWorking() + self.moveCD() + self.cd.deiconify() + self.cd.lift() + + return + #================================================================== + # getReadMode - figure out if parm is a rateParm...and set mode + # to "Sum" if it is. + # If not...and checkProb is set...figure out if the + # parm is a probability parm and set mode to + # "Max" if it is (floating PoP). + # Otherwise set to "Average" + # + def getReadMode(self,model,parmName,checkProb=1): + rateFlag=self.VU.getRateFlag(model,parmName) + if (rateFlag==1): + readMode="Sum" + else: + readMode="TimeWtAverage" + if checkProb==1: + verType=self.VU.getVerType(parmName) + if verType==1: + readMode="Max" + return readMode + + #================================================================== + # workingCommon - suitable for a callback that provides a message + # like (x of y), so that it sets the 'working' + # display - and returns a 1 if the 'stop' button + # has been set + # + def workingCommon(self,message): + fullmsg="Finding Common Cases: %s"%message + return self.setAndCheckWorking(fullmsg) + #================================================================== + # setupBut1 - setup button 1 buttons. Names in butList are copied + # to self.but1names[]. Desired buttons on a row in + # numbuttons. arrows flag adds 'prev/next' buttons. + # + # After setup, self.but1names[] holds names. + # self.but1{} holds button references + # self.but1state{} holds button state + def setupBut1(self,butList,numbuttons=5,arrows=0,width=0): + # + # clear old buttons (fbar holds button 1s) + # + slaves=self.cd.fbar.pack_slaves() + if slaves is not None: + for slave in slaves: + slave.destroy() + # + # put generic 'move left' button on the side + # + if ((arrows==1)and(len(butList)>1)): + cb=GenericCallback(prevBut1,self) + prev=tkinter.Button(self.cd.fbar,text="<",padx=2,pady=0, + fg="black",command=cb) + prev.pack(side=tkinter.LEFT,fill=tkinter.Y) + # + # figure number of rows of buttons - and create frames + # + numrows=int((float(len(butList))/float(numbuttons))+0.5) + if numrows<1: + numrows=1 + self.fbarmodrow=[] + for i in range(numrows): + self.fbarmodrow.append(tkinter.Frame(self.cd.fbar)) + numinrow=int(float(len(butList))/float(numrows))+1 + # + # Make buttons + # + self.but1names=butList[:] + self.but1text=butList[:] + for i in range(len(self.but1names)): + but=self.but1names[i] + if but.isdigit(): + self.but1names[i]="f%s"%but + num=1 + self.but1={} + self.but1state={} + for i in range(len(self.but1names)): + but=self.but1names[i] + buttext=self.but1text[i] + cb=GenericCallback(showBut1,self,but) + row=int(float(num)/float(numinrow)) + if width==0: + self.but1[but]=tkinter.Button(self.fbarmodrow[row],text=buttext, + padx=2,pady=0,fg="black", + command=cb) + else: + self.but1[but]=tkinter.Button(self.fbarmodrow[row],text=buttext, + width=width,padx=2,pady=0,fg="black", + command=cb) + self.but1[but].pack(side=tkinter.LEFT) + num+=1 + # + # put generic 'move right' button on the side + # + if ((arrows==1)and(len(butList)>1)): + cb=GenericCallback(nextBut1,self) + next=tkinter.Button(self.cd.fbar,text=">",padx=2,pady=0, + fg="black",command=cb) + next.pack(side=tkinter.RIGHT,fill=tkinter.Y) + # + # pack buttons between possible next/prev buttons + # + for i in range(numrows): + self.fbarmodrow[i].pack(side=tkinter.TOP) + # + # Update cd widget - so size of buttonbars don't affect size of + # the current canvas + # + self.cd.update_idletasks() + hgt1=self.cd.bar.winfo_reqheight() + hgt2=self.cd.fbar.winfo_reqheight() + hgt3=28+536 # size of exit button bar + smallest canvas height + hgt=hgt1+hgt2+hgt3 + self.cd.minsize(706,hgt) + geo=self.cd.geometry() + (wh,of)=geo.split("+",1) + (wid,oldhgt)=wh.split("x",1) + if hgt>int(oldhgt): + self.cd.geometry("%sx%d+%s"%(wid,hgt,of)) + self.cd.update_idletasks() + return + #================================================================== + # setupBut2 - setup button 2 buttons. Names in butList are copied + # to self.but2names[]. Desired buttons on a row in + # numbuttons. arrows flag adds 'prev/next' buttons. + # + # After setup, self.but2names[] holds names. + # self.but2{} holds button references + # self.but2state{} holds button state + # + def setupBut2(self,butList,numbuttons=5,arrows=0,width=0): + # + # remove old buttons (bar holds button 2s) + # + slaves=self.cd.bar.pack_slaves() + if slaves is not None: + for slave in slaves: + slave.destroy() + # + # put generic 'move left' button on the side + # + if ((arrows==1)and(len(butList)>1)): + cb=GenericCallback(prevBut2,self) + prev=tkinter.Button(self.cd.bar,text="<",padx=2,pady=0, + fg="black",command=cb) + prev.pack(side=tkinter.LEFT,fill=tkinter.Y) + # + # figure number of rows of buttons - and create frames + # + numrows=int((float(len(butList))/float(numbuttons))+0.5) + if numrows<1: + numrows=1 + self.barmodrow=[] + for i in range(numrows): + self.barmodrow.append(tkinter.Frame(self.cd.bar)) + numinrow=int(float(len(butList))/float(numrows))+1 + # + # Make buttons + # + self.but2names=butList[:] + num=1 + self.but2={} + self.but2state={} + for i in range(len(self.but2names)): + but=self.but2names[i] + cb=GenericCallback(showBut2,self,but) + row=int(float(num)/float(numinrow)) + if width==0: + self.but2[but]=tkinter.Button(self.barmodrow[row],text=but, + padx=2,pady=0,fg=self.colornames[but], + command=cb) + else: + self.but2[but]=tkinter.Button(self.barmodrow[row],text=but,width=width, + padx=2,pady=0,fg=self.colornames[but], + command=cb) + self.but2[but].pack(side=tkinter.LEFT) + num+=1 + # + # put generic 'move right' button on the side + # + if ((arrows==1)and(len(butList)>1)): + cb=GenericCallback(nextBut2,self) + next=tkinter.Button(self.cd.bar,text=">",padx=2,pady=0, + fg="black",command=cb) + next.pack(side=tkinter.RIGHT,fill=tkinter.Y) + # + # pack buttons between possible next/prev buttons + # + for i in range(numrows): + self.barmodrow[i].pack(side=tkinter.TOP) + # + # Update cd widget and its minsize - so size of buttonbar + # doesn't affect size of the current canvas + # + self.cd.update_idletasks() + hgt1=self.cd.bar.winfo_reqheight() + hgt2=self.cd.fbar.winfo_reqheight() + hgt3=28+536 # size of exit button bar + smallest canvas height + hgt=hgt1+hgt2+hgt3 + self.cd.minsize(706,hgt) + geo=self.cd.geometry() + (wh,of)=geo.split("+",1) + (oldwid,oldhgt)=wh.split("x",1) + if hgt>int(oldhgt): + self.cd.geometry("%sx%d+%s"%(oldwid,hgt,of)) + self.cd.update_idletasks() + return + #================================================================== + # cdLabels - labels at the top of the canvas + # + def cdLabels(self,ul1,numPts,dateStyle,dateType,numDays,fromDay,dayList,cycleList): + # + # Upper Left has variable text + # + self.labelLine(ul1,1,justify="left") + # + str="Gridpoints in editarea: %d"%numPts + self.labelLine(str,2,justify="left") + # + # Dates + # + timelabel=dateStyle + if dateType=="Period Length": + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(fromDay) + if numDays==1: + timelabel+=" %4.4d/%2.2d/%2.2d"%(gyea,gmon,gday) + else: + timelabel+=" the %d days ending on %4.4d/%2.2d/%2.2d"%(numDays,gyea,gmon,gday) + else: + if len(dayList)==1: + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(dayList[0]) + timelabel+=" %4.4d/%2.2d/%2.2d"%(gyea,gmon,gday) + else: + timelabel+=" several dates" + self.labelLine(timelabel,1,justify="right") + # + # Cycles + # + runlabel="" + if len(cycleList)>1: + for cyc in cycleList: + runlabel+="%2.2d+"%cyc + runlabel=runlabel[:-1]+" UTC Runs" + else: + runlabel="%2.2d"%cycleList[0]+" UTC Run ONLY" + self.labelLine(runlabel,2,justify="right") + return + #================================================================== + # labLine - draw a label + # + def labelLine(self,text,lineNum,color="black",justify="left", + tags=None): + lineheight=15 + yoff=5 + xoff=5 + + y=((lineNum-1)*lineheight)+yoff + if justify=="left": + x=xoff + anchorType=tkinter.NW + else: + x=self.cd.curwidth-xoff + anchorType=tkinter.NE + self.cd.canvas.create_text(x,y,text=text,fill=color, + anchor=anchorType,tags=tags) + return + #================================================================== + # checkLists - check lists returned from GUI to make sure at least + # one is chosen + # + def checkLists(self,modelList,parmList,cycleList,fcstrList,dateType, + dayList): + if (len(modelList)<1): + self.statusBarMsg("Must choose at least one model","U") + return 0 + if (len(parmList)<1): + self.statusBarMsg("Must choose at least one parm","U") + return 0 + if (len(cycleList)<1): + self.statusBarMsg("Must choose at least one cycle","U") + return 0 + if (len(fcstrList)<1): + self.statusBarMsg("Must choose at least one forecaster","U") + return 0 + if dateType=="List of dates": + if (len(dayList)<1): + self.statusBarMsg("Must choose at least one date","U") + return 0 + return 1 + #================================================================== + # + # code to scale everything on the canvas so that you always display + # the same area that you started with + # + def resizecanvas(self,event): + scalex=float(event.width)/self.curwidth + scaley=float(event.height)/self.curheight + if ((scalex!=1.0)or(scaley!=1.0)): + self.canvas.scale("all",0.0,0.0,scalex,scaley) + self.curwidth=float(event.width) + self.curheight=float(event.height) + #bw=2 + #self.canwidth=self.curwidth-((bw+1.0)*2.0) + #self.canheight=self.canheight-((bw+1.0)*2.0) + #print "resize canvas gives width/height as: %7.2f,%7.2f"%(self.curwidth,self.curheight) + return + # + # setup graph coordintes + # + def setgraph(self,xmin,xmax,ymin,ymax,sxmin,sxmax,symin,symax): + self.xmin=xmin + self.xmax=xmax + self.ymin=ymin + self.ymax=ymax + self.xmult=(sxmax-sxmin)/(xmax-xmin) + self.xoff=sxmin + self.ymult=(symax-symin)/(ymax-ymin) + self.yoff=symax + def graphcoord(self,x,y): + newx=((x-self.xmin)*self.xmult)+self.xoff + newy=self.yoff-((y-self.ymin)*self.ymult) + return newx,newy + #================================================================== + # + # draw histogram axes + # + def histoaxes(self,maxheight,minx,maxx,binwidth,htick): + (sx,sy)=self.graphcoord(0.0,0.0) + (tx,ty)=self.graphcoord(0.0,maxheight) + self.cd.canvas.create_line(sx,sy,tx,ty) + self.vtick(0.0,5,0.0,maxheight,htick,label=1,labeloffset=10, + skipfirst=1,labelinterval=2) + #minx=binmin[1]+(binwidth/2.0) + #maxx=binmax[len(binmax)-2]-(binwidth/2.0) + (sx,sy)=self.graphcoord(minx,0.0) + (tx,ty)=self.graphcoord(maxx,0.0) + self.cd.canvas.create_line(sx,sy,tx,ty) + numticks=10 + tickInterval=self.niceNumDec(maxx/(numticks-1),1) + self.htick(0.0,5,0.0,maxx,tickInterval,label=1, + labeloffset=5,labelinterval=2,skipfirst=1) + self.htick(0.0,5,0.0,maxx,tickInterval,label=1, + labeloffset=5,labelinterval=2,skipfirst=1,negative=1) + #================================================================== + # probaxes - draw axes for probability reliability diagrams + # + def probaxes(self): + (llx,lly)=self.graphcoord(0,0) + (urx,ury)=self.graphcoord(100,100) + self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly,urx,ury) + self.vtick(0,5,0,100,10,label=1, + labelinterval=1,labeloffset=-10.0,labelanchor=tkinter.E) + self.vtick(100,5,0,100,10,label=1, + labelinterval=1,labeloffset=10.0,labelanchor=tkinter.W) + self.htick(0,5,0,100,10,label=1, + labelinterval=1,labeloffset=8.0,labelanchor=tkinter.N) + self.htick(100,5,0,100,10,label=1, + labelinterval=1,labeloffset=-8.0,labelanchor=tkinter.S) + (midx,ny)=self.graphcoord(50,0) + self.cd.canvas.create_text(midx,ny+20,anchor=tkinter.N,text="Forecast Probability") + (nx,midy)=self.graphcoord(0,50) + self.cd.canvas.create_text(nx-35,midy,anchor=tkinter.E,text="O\nb\ns\ne\nr\nv\ne\nd\n \nF\nr\ne\nq\nu\ne\nc\ny") + # + #================================================================== + # + # draw graph axes + # + def graphaxes(self,timemin,timemax,valmin,valmax): + (llx,lly)=self.graphcoord(timemin,valmin) + (urx,ury)=self.graphcoord(timemax,valmax) + self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly) + zeroline=0 + if ((valmin<0.0)and(valmax>0.0)): + (lx,zy)=self.graphcoord(timemin,0.0) + self.cd.canvas.create_line(llx,zy,urx,zy) + zeroline=1 + numticks=10 + self.timetick(timemin,timemax,numticks,valmin,5,label=1,labeloffset=10) + self.timetick(timemin,timemax,numticks,valmax,5,label=1,labeloffset=-10, + labelanchor=tkinter.S) + if zeroline==1: + self.timetick(timemin,timemax,numticks,0.0,5,label=0) + # + numticks=10 + valInterval=self.niceNumDec((valmax-valmin)/(numticks-1),1) + if zeroline==1: + self.vtick(timemin,5,0,valmax,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E) + self.vtick(timemin,5,0,-valmin,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E,negative=1) + self.vtick(timemax,5,0,valmax,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W) + self.vtick(timemax,5,0,-valmin,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W,negative=1) + else: + self.vtick(timemin,5,valmin,valmax,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E) + self.vtick(timemax,5,valmin,valmax,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W) + #================================================================== + # + # draw fhour axes + # + def fhouraxes(self,timemin,timemax,valmin,valmax): + (llx,lly)=self.graphcoord(timemin,valmin) + (urx,ury)=self.graphcoord(timemax,valmax) + self.cd.canvas.create_line(llx,lly,urx,lly,urx,ury,llx,ury,llx,lly) + zeroline=0 + if ((valmin<0.0)and(valmax>0.0)): + (lx,zy)=self.graphcoord(timemin,0.0) + self.cd.canvas.create_line(llx,zy,urx,zy) + zeroline=1 + finterval=6 + if timemax>120: + finterval=24 + if timemax>48: + finterval=12 + #finterval=self.niceNumDec((timemax-timemin)/(numticks-1),1) + self.htick(valmin,5,timemin,timemax,finterval,label=1,labeloffset=+5, + labelanchor=tkinter.N) + self.htick(valmax,5,timemin,timemax,finterval,label=1,labeloffset=-5, + labelanchor=tkinter.S) + if zeroline==1: + self.htick(0.0,5,timemin,timemax,finterval,label=0) + #self.timetick(timemin,timemax,numticks,valmin,5,label=1,labeloffset=10) + #self.timetick(timemin,timemax,numticks,valmax,5,label=1,labeloffset=-10, + # labelanchor=Tkinter.S) + #if zeroline==1: + # self.timetick(timemin,timemax,numticks,0.0,5,label=0) + # + numticks=10 + valInterval=self.niceNumDec((valmax-valmin)/(numticks-1),1) + if zeroline==1: + self.vtick(timemin,5,0,valmax,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E) + self.vtick(timemin,5,0,-valmin,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E,negative=1, + skipfirst=1) + self.vtick(timemax,5,0,valmax,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W) + self.vtick(timemax,5,0,-valmin,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W,negative=1, + skipfirst=1) + else: + self.vtick(timemin,5,valmin,valmax,valInterval,label=1, + labeloffset=-10,labelanchor=tkinter.E) + self.vtick(timemax,5,valmin,valmax,valInterval,label=1, + labeloffset=10,labelanchor=tkinter.W) + #================================================================== + # + def timetick(self,minsecs,maxsecs,desirednum,yval,ywid,label=1,labeloffset=-10,labelanchor=tkinter.N): + #print "in timetick with %d-%d, desired:%d"%(minsecs,maxsecs,desirednum) + numrange=desirednum*0.75 + minnum=desirednum-numrange + maxnum=desirednum+numrange + monString=[" ","JAN","FEB","MAR","APR","MAY","JUN","JUL","AUG","SEP","OCT","NOV","DEC"] + #print "minnum-maxnum: %d-%d"%(minnum,maxnum) + HOUR=3600 + DAY=24*HOUR + MONTH=30*DAY + YEAR=365*DAY + tryintervals=[(3,HOUR), + (6,HOUR), + (12,HOUR), + (1,DAY), + (2,DAY), + (5,DAY), + (15,DAY), + (1,MONTH), + (2,MONTH), + (3,MONTH), + (6,MONTH), + (1,YEAR)] + + secondsRange=maxsecs-minsecs + intervals=[] + for (interval,base) in tryintervals: + intervalSeconds=interval*base + num=int(secondsRange/intervalSeconds) + if ((num>minnum)and(numminsecs: + (tx,ty)=self.graphcoord(newTime,yval) + self.cd.canvas.create_line(tx,ty-ywid,tx,ty+ywid) + if label==1: + if bestBase==HOUR: + if ((maxsecs-minsecs)/HOUR)>24: + labelstring="%d %2.2dZ"%(gday,ghou) + else: + labelstring="%2.2dZ"%ghou + elif bestBase==DAY: + if ((maxsecs-minsecs)/DAY)>28: + labelstring="%d/%d"%(gmon,gday) + else: + labelstring="%d"%gday + else: + if ((maxsecs-minsecs)/MONTH)>9: + labelstring="%d/%2.2d"%(gmon,gyea%100) + else: + labelstring="%s"%monString[gmon] + #labelstring="%d/%d %2.2dZ"%(gmon,gday,ghou) + self.cd.canvas.create_text(tx,ty+labeloffset,anchor=labelanchor, + text=labelstring) + + #print "tick at %4.4d/%2.2d/%2.2d %2.2dZ"%(gyea,gmon,gday,ghou) + if bestBase==HOUR: + tryTime=calendar.timegm((gyea,gmon,gday,ghou+bestInterval,gmin,gsec,gwda,gyda,gdst)) + elif bestBase==DAY: + tryTime=calendar.timegm((gyea,gmon,gday+bestInterval,ghou,gmin,gsec,gwda,gyda,gdst)) + else: + newmon=gmon+bestInterval + if newmon>12: + gyea+=1 + newmon=1 + tryTime=calendar.timegm((gyea,newmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) + (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime(tryTime) + if ((nday!=gday)and(bestBase==HOUR)): + gday+=1 + ghou=0 + tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) + elif (((nmon!=gmon)or(nday>26))and(bestBase==DAY)): + gmon+=1 + if gmon>12: + gmon=1 + gyea+=1 + gday=1 + tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) + elif ((nyea!=gyea)and(bestBase==MONTH)): + gyea+=1 + gmon=1 + tryTime=calendar.timegm((gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)) + newTime=tryTime + #print "-----" + return + #================================================================== + # draw ticks on a horizontal axis at y-value:yval. The width of + # ticks if ywid. Ticks go between minx and maxx with interval + # tickinterval. If labelinterval is 0, no labels are drawn, + # if 1, the every label is drawn. If labelinterval is 2 then + # every 2nd label is drawn. It labelinterval is 3 then every + # 3rd label is drawn, etc. If skipfirst=1 or skiplast=1 then + # labelling is skipped for those ticks. + # + def htick(self,yval,ywid,minx,maxx,tickInterval,label=0,labeloffset=5, + labelinterval=1,skipfirst=0,skiplast=0,labelanchor=tkinter.N, + negative=0): + numticks=int((maxx-minx)/tickInterval)+1 + labeldigits=max(-floor(log10(tickInterval)),0) + neg=1.0 + if negative==1: + neg=-1.0 + num=0 + for i in range(0,numticks): + x=(minx+(i*tickInterval))*neg + (tx,ty)=self.graphcoord(x,yval) + self.cd.canvas.create_line(tx,ty-ywid,tx,ty+ywid) + if label==1: + if (i%labelinterval==0): + if labeldigits==0: + labelstring="%d"%x + else: + format="%%.%df"%labeldigits + labelstring=format%x + skip=0 + if ((skipfirst==1)and(i==0))or((skiplast==1)and(i==(numticks-1))): + skip=1 + if skip==0: + self.cd.canvas.create_text(tx,ty+labeloffset,anchor=labelanchor, + text=labelstring) + def vtick(self,xval,xwid,miny,maxy,tickInterval,label=0,labeloffset=5, + labelinterval=1,skipfirst=0,skiplast=0,labelanchor=tkinter.W, + negative=0): + numticks=int((maxy-miny)/tickInterval)+1 + labeldigits=max(-floor(log10(tickInterval)),0) + neg=1.0 + if negative==1: + neg=-1.0 + num=0 + for i in range(0,numticks): + y=(miny+(i*tickInterval))*neg + (tx,ty)=self.graphcoord(xval,y) + self.cd.canvas.create_line(tx-xwid,ty,tx+xwid,ty) + if label==1: + if (i%labelinterval==0): + if labeldigits==0: + labelstring="%d"%y + else: + format="%%.%df"%labeldigits + labelstring=format%y + skip=0 + if ((skipfirst==1)and(i==0))or((skiplast==1)and(i==(numticks-1))): + skip=1 + if skip==0: + self.cd.canvas.create_text(tx+labeloffset,ty,anchor=labelanchor, + text=labelstring) + + #================================================================== + # + # make axes for value distributions + # + def valaxes(self,minval,maxval,tickInterval): + (nx,ny)=self.graphcoord(minval,minval) + (xx,xy)=self.graphcoord(maxval,maxval) + self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny,xx,xy) + self.vtick(minval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-10.0,labelanchor=tkinter.E) + self.vtick(maxval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=10.0,labelanchor=tkinter.W) + self.htick(minval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=8.0,labelanchor=tkinter.N) + self.htick(maxval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-8.0,labelanchor=tkinter.S) + (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) + self.cd.canvas.create_text(midx,ny+20,anchor=tkinter.N,text="Observed") + (dumx,midy)=self.graphcoord(minval,(maxval+minval)/2.0) + self.cd.canvas.create_text(nx-35,midy,anchor=tkinter.E,text="F\no\nr\ne\nc\na\ns\nt") + #================================================================== + # + # make axes for expected value distributions + # + def expaxes(self,minval,maxval,tickInterval): + (nx,ny)=self.graphcoord(minval,minval) + (xx,xy)=self.graphcoord(maxval,maxval) + self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny,xx,xy) + self.vtick(minval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-10.0,labelanchor=tkinter.E) + self.vtick(maxval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=10.0,labelanchor=tkinter.W) + self.htick(minval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=8.0,labelanchor=tkinter.N) + self.htick(maxval,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-8.0,labelanchor=tkinter.S) + (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) + self.cd.canvas.create_text(midx,ny+20,anchor=tkinter.N,text="Forecast") + (dumx,midy)=self.graphcoord(minval,(maxval+minval)/2.0) + self.cd.canvas.create_text(nx-35,midy,anchor=tkinter.E,text="O\nb\ns\ne\nr\nv\ne\nd") + #================================================================== + # + # make axes for value histograms + # + def valhaxes(self,minval,maxval,tickInterval,maxnum,vint,parm): + (nx,ny)=self.graphcoord(minval,0) + (xx,xy)=self.graphcoord(maxval,maxnum) + self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny) + #vint=self.niceNumDec(maxnum/20,1) + self.vtick(minval,5,0.0,maxnum,vint,label=1, + labelinterval=3,labeloffset=-10.0,labelanchor=tkinter.E) + self.vtick(maxval,5,0.0,maxnum,vint,label=1, + labelinterval=3,labeloffset=10.0,labelanchor=tkinter.W) + self.htick(0,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=8.0,labelanchor=tkinter.N) + self.htick(maxnum,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-8.0,labelanchor=tkinter.S) + (midx,dumy)=self.graphcoord((maxval+minval)/2.0,minval) + self.cd.canvas.create_text(midx,ny+20,anchor=tkinter.N,text=parm) + (dumx,midy)=self.graphcoord(minval,(0+maxnum)/2.0) + self.cd.canvas.create_text(nx-35,midy,anchor=tkinter.E,text="N\nu\nm\nb\ne\nr\n \np\ne\nr\n \nc\na\ns\ne") + #================================================================== + # + # make axes for logarithmic value histograms + # + def logvalhaxes(self,minval,maxval,tickInterval,logmin,logmax,parm): + (nx,ny)=self.graphcoord(minval,logmin) + (xx,xy)=self.graphcoord(maxval,logmax) + self.cd.canvas.create_line(nx,ny,xx,ny,xx,xy,nx,xy,nx,ny) + #vint=self.niceNumDec(maxnum/20,1) + lownum=exp(logmin) + hignum=exp(logmax) + print("need ticks from %f to %f"%(lownum,hignum)) + expstart=int(floor(log10(lownum))) + expend=int(floor(log10(hignum))) + print(" exponents from %d to %d"%(expstart,expend)) + for j in range(expstart,expend+1): + a=10.0**j + print(" loop decade=%f"%a) + for i in range(1,10): + if i==1: + xwid=5 + else: + xwid=2 + val=float(i)*a + if ((val>lownum)and(val=0: + labelstring="%d"%val + else: + format="%%.%df"%abs(j) + labelstring=format%val + self.cd.canvas.create_text(nx-10.0,ty,anchor=tkinter.E, + text=labelstring) + self.cd.canvas.create_text(xx+10.0,ty,anchor=tkinter.W, + text=labelstring) + + + #self.vtick(minval,5,0.0,maxnum,vint,label=1, + # labelinterval=3,labeloffset=-10.0,labelanchor=Tkinter.E) + #self.vtick(maxval,5,0.0,maxnum,vint,label=1, + # labelinterval=3,labeloffset=10.0,labelanchor=Tkinter.W) + self.htick(logmin,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=8.0,labelanchor=tkinter.N) + self.htick(logmax,5,minval,maxval,tickInterval,label=1, + labelinterval=3,labeloffset=-8.0,labelanchor=tkinter.S) + (midx,dumy)=self.graphcoord((maxval+minval)/2.0,logmin) + self.cd.canvas.create_text(midx,ny+20,anchor=tkinter.N,text=parm) + (dumx,midy)=self.graphcoord(minval,(logmin+logmax)/2.0) + self.cd.canvas.create_text(nx-35,midy,anchor=tkinter.E,text="N\nu\nm\nb\ne\nr\n \np\ne\nr\n \nc\na\ns\ne") + #================================================================== + # niceNumDec - pick a nice decimal number - suitable for tick + # marks, etc. + # + def niceNumDec(self,val,roundit): + if val==0: + return 1 + e=floor(log10(val)) + a=10.0**e + f=val/a + if roundit>0: + if f<1.5: + nf=1 + elif f<3.0: + nf=2 + elif f<7.0: + nf=5 + else: + nf=10 + else: + if f<=1.0: + nf=1 + elif f<=2.0: + nf=2.0 + elif f<=5.0: + nf=5.0 + else: + nf=10.0 + return nf*a + #================================================================== + # showScore - draw tick on 'colorcurve' with label of 0-100 score + # + def showScore(self,fullscore,mod,color,taglabel): + midx=self.cd.curwidth/2.0 + #x=midx+(128*((-fullscore+0.5)/0.5)) + if fullscore<0: + fullscore=0.0 + x=midx+(128*((fullscore-50.0)/50.0)) + txt="%d"%int(fullscore) + if mod=="Official": + self.cd.canvas.create_line(x,50-8,x,50-3,fill=color,tags=taglabel) + self.cd.canvas.create_text(x,50-8,text=txt,fill=color,anchor=tkinter.S,tags=taglabel) + else: + self.cd.canvas.create_line(x,50+3,x,50+8,fill=color,tags=taglabel) + self.cd.canvas.create_text(x,50+8,text=txt,fill=color,anchor=tkinter.N,tags=taglabel) + #================================================================== + # showWorse - show the number in the first/last bins - which are + # worse than the error limits + # + def showWorse(self,low,high,xmax,yoffset,color,taglist): + x=xmax + y=0 + (sx,sy)=self.graphcoord(x,y) + self.cd.canvas.create_text(sx+5,sy,text="Worse",anchor=tkinter.W) + textstring="%d"%high + self.cd.canvas.create_text(sx+5,sy-yoffset,text=textstring,anchor=tkinter.W,fill=color,tags=taglist) + x=-xmax + y=0 + (sx,sy)=self.graphcoord(x,y) + self.cd.canvas.create_text(sx-5,sy,text="Worse",anchor=tkinter.E) + textstring="%d"%low + self.cd.canvas.create_text(sx-5,sy-yoffset,text=textstring,anchor=tkinter.E,fill=color,tags=taglist) + #================================================================== + # showScores - display modelname,n,avg,std,mae,rms on histogram + # + def showScores(self,modnum,mod,num,avg,std,mae,rms,color,taglist): + font=12 + ystart=self.cd.curheight*(135.0/530.0) + y=ystart+font+(modnum*font) + x=self.cd.curwidth*(80.0/700.0) + self.cd.canvas.create_text(x,y,text=mod,anchor=tkinter.E,fill=color,tags=taglist) + textstring="%2d"%num + x=self.cd.curwidth*(130.0/700.0) + self.cd.canvas.create_text(x,y,text=textstring,anchor=tkinter.E, + fill=color,tags=taglist) + textstring="%6.2f"%avg + x=self.cd.curwidth*(170.0/700.0) + self.cd.canvas.create_text(x,y,text=textstring,anchor=tkinter.E, + fill=color,tags=taglist) + textstring="%5.2f"%std + x=self.cd.curwidth*(210.0/700.0) + self.cd.canvas.create_text(x,y,text=textstring,anchor=tkinter.E, + fill=color,tags=taglist) + textstring="%5.2f"%mae + x=self.cd.curwidth*(250.0/700.0) + self.cd.canvas.create_text(x,y,text=textstring,anchor=tkinter.E, + fill=color,tags=taglist) + textstring="%5.2f"%rms + x=self.cd.curwidth*(290.0/700.0) + self.cd.canvas.create_text(x,y,text=textstring,anchor=tkinter.E, + fill=color,tags=taglist) + return + #================================================================== + # showAvg - draw arrow on histogram axis at average value + # + def showAvg(self,avg,color,taglist): + (sx,sy)=self.graphcoord(avg,0) + self.cd.canvas.create_line(sx,sy+30,sx,sy,fill=color, + arrow=tkinter.LAST,tags=taglist) + textstring="%.2f"%avg + self.cd.canvas.create_text(sx,sy+30,text=textstring,anchor=tkinter.N, + fill=color,tags=taglist) + #================================================================== + # + # getBins - given a bin width and maxerr value, return + # lists of each bin's min,max, with one of them + # centerred on zero. Last bins may start up to + # a half binwidth more than maxerr + # + def getBins(self,binwidth,maxerr): + halfbin=float(binwidth)/2.0 + binmin=[] + binmax=[] + + mid=0.0 + while ((mid+halfbin)<=(maxerr+halfbin)): + maxx=mid+halfbin + binmin.append(-maxx) + binmax.append(-maxx+binwidth) + binmin.append(maxx-binwidth) + binmax.append(maxx) + mid+=binwidth + binmin.append(-900000.0) + binmax.append(-(mid-halfbin)) + binmin.append(mid-halfbin) + binmax.append(9000000.0) + binmin.sort() + binmax.sort() + return(binmin,binmax) + # + def getProbBins(self,binwidth): + binmin=[] + binmax=[] + halfbin=float(binwidth)/2.0 + num=int(100.0/float(binwidth)) + for i in range(num): + binmid=i*binwidth + bot=max(binmid-halfbin,0) + top=min(binmid+halfbin,101) + binmin.append(bot) + binmax.append(top) + return(binmin,binmax) + #================================================================== + # + # binerrs - given a 1-D array of errors, create a 1-D array of + # the number of points with errors inside each bin + # described by the binmin and binmax limits + # + def binerrs(self,err,abinmin,abinmax): + bincnt=add.reduce(logical_and(greater(err,abinmin[:,NewAxis]), + less_equal(err,abinmax[:,NewAxis])),-1) + return bincnt + def histosetup(self,minlimit,maxlimit,binwidth): + self.histowidth=binwidth + self.histohalf=binwidth/2.0 + self.histomin=minlimit-self.histohalf + self.histomax=maxlimit+self.histohalf + self.histonumbins=int(float(self.histomax-self.histomin)/float(self.histowidth)) + self.histobins=resize(arange(self.histonumbins+1),(self.histonumbins+1,1)) + return + def histo(self,data): + worseLow=add.reduce(less(data,self.histomin)) + worseHigh=add.reduce(greater(data,self.histomax)) + data=repeat(data,logical_and(less_equal(data,self.histomax), + greater_equal(data,self.histomin))) + data=((data-self.histomin)/self.histowidth).astype(int) + histoData=add.reduce(equal(self.histobins,data),-1) + histoData[-2]+=histoData[-1] + return histoData[:-1],worseLow,worseHigh + def hitcount(self,data,verif): + numless=add.reduce(less(data,self.histomin)) + numgreater=add.reduce(greater(data,self.histomax)) + data=repeat(data,logical_and(less_equal(data,self.histomax), + greater_equal(data,self.histomin))) + data=((data-self.histomin)/self.histowidth).astype(int) + d1=equal(self.histobins,data) + self.VU.logMsg("shape of d1=%s"%str(d1.shape)) + self.VU.logMsg("shape of verif=%s"%str(verif.shape)) + histoData=add.reduce(d1,-1) + self.VU.logMsg("done with histoData reduce") + #hitCount=add.reduce(where(d1,verif,0),-1) + a=where(d1,verif,float32(0)) + hitCount=add.reduce(a,-1) + self.VU.logMsg("done with hitCount reduce") + histoData[-2]+=histoData[-1] + hitCount[-2]+=hitCount[-1] + self.VU.logMsg("returning") + return histoData[:-2],hitCount[:-2] + + #================================================================= + # setupGM - setup Grid Manager - remove all parms from display + # except for parms in parmList for models in modelList + # (or in mutableModel) and WG1 for the mutableModel + # (if available) + # + def setupGM(self,parmList,modelList): + # + # + # + newParmList=[] + for parm in parmList: + if (len(parm)>3): + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + realname=parm[:-3] + if realname not in newParmList: + newParmList.append(realname) + else: + newParmList.append(parm) + else: + newParmList.append(parm) + mutableModel=self.mutableID().modelName() + displayObjList=self._dbss.getParmManager().getDisplayedParms() + totalcount=len(displayObjList) + count=0 + for parmObj in displayObjList: + count+=1 + self.setWorking("Cleaning Grid Manager:%d of %d"%(count,totalcount)) + if self.checkWorking()==1: + return 1 + pid=parmObj.getParmID() + pmodel=pid.getDbId().getModelName() + pname=pid.getParmName() + plevel=pid.getParmLevel() + if ((pmodel==mutableModel)and(pname in newParmList)): + continue + if ((pmodel==mutableModel)and(pname=="WG1")): + continue + if ((pmodel in modelList)and(pname in newParmList)): + continue + print(pmodel, pname, plevel) + self.unloadWE(pmodel,pname,plevel) + # + # if WG1 exists - use that for the units and precision of + # error grids - otherwise use default values + # + (self.errUnits,self.errPrecision,minval,maxval,rateFlag, + ct,dminval,dmaxval)=self.getParmInfo(mutableModel,"WG1") + return 0 + #================================================================== + # + # + # + def getParmInfo(self,mutableModel,parm): + units="units" + precision=0 + minval=0 + maxval=100 + rateflag=0 + colorTable="Gridded Data" + displayMinval=0 + displayMaxval=100 + parm=self.getParm(mutableModel,parm,"SFC") + if parm is not None: + parmInfo = parm.getGridInfo() + units=parmInfo.getUnitString() + precision=parmInfo.getPrecision() + minval=parmInfo.getMinValue() + maxval=parmInfo.getMaxValue() + rateflag=parmInfo.isRateParm() + from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil + ctInfo = DiscreteDisplayUtil.buildColorMapParameters(parm) + if ctInfo is not None: + colorTable = ctInfo.getColorMapName() + displayMinval = ctInfo.getColorMapMin() + displayMaxval = ctInfo.getColorMapMax() + self.__colorMapParams[colorTable] = ctInfo + return(units,precision,minval,maxval,rateflag,colorTable,displayMinval,displayMaxval) +#============================================================================== +# +# Class for other dialogs. +# +""" +class SimpleDialog(AppDialog.Dialog): + def __init__(self, parent=None, name="Simple Dialog", callbackMethod=None, + modal=1): + self.__parent = parent + self.__name = name + self.__modal = modal + self.__callbackMethod = callbackMethod + self.__dialog=AppDialog.Dialog.__init__(self, + parent=self.__parent, + title=self.__name, + modal=self.__modal) + return self.__dialog + + def buttonbox(self): + buttonFrame = Tkinter.Frame(self) + if self.__modal == 1: + Tkinter.Button(buttonFrame, text="Ok", + command=self.__okCB, width=10, state=Tkinter.NORMAL).pack(\ + side=Tkinter.LEFT, pady=5, padx=10) + else: + Tkinter.Button(buttonFrame, text="Run", + command=self.__runCB, width=10, state=Tkinter.NORMAL).pack(\ + side=Tkinter.LEFT, pady=5, padx=10) + Tkinter.Button(buttonFrame, text="Run/Dismiss", + command=self.__okCB, width=12, state=Tkinter.NORMAL).pack(\ + side=Tkinter.LEFT, pady=5, padx=10) + Tkinter.Button(buttonFrame, text="Cancel", width=10, + command=self.cancelCB).pack(side=Tkinter.RIGHT, pady=5, padx=10) + buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) + def body(self, master): + bodylabel=Tkinter.Label(master,text="This is the body") + bodylabel.pack(side=Tkinter.BOTTOM) + def __runCB(self): + self.__callbackMethod("Run") + def __okCB(self): + self.withdraw() + self.__callbackMethod("OK") + self.ok() + def cancelCB(self): + self.__callbackMethod("Cancel") + self.cancel() +""" +#======================================================================= +class BVDialog(tkinter.Toplevel): + def __init__(self,parent,title=None,modal=1,hide=0): + self.__modal=modal + tkinter.Toplevel.__init__(self,parent) + try: + if hide==1: + self.withdraw() + ##self.transient(parent) + if title: + self.title(title) + self.parent=parent + self.buttonbox() + bodyFrame=tkinter.Frame(self) + self.body(bodyFrame) + bodyFrame.pack(side=tkinter.BOTTOM,fill=tkinter.BOTH,expand=1) + self.protocol("WM_DELETE_WINDOW",self.cancel) + if parent is not None: + self.geometry("+%d+%d"%(parent.winfo_rootx(),parent.winfo_rooty())) + if self.__modal==1: + self.deiconify() + self.wait_window(self) + except: + ##self.destroy() + raise Exception + + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + if self.__modal == 1: + tkinter.Button(buttonFrame, text="Ok", + command=self.ok, width=10, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + else: + tkinter.Button(buttonFrame, text="Run", + command=self.run, width=10, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Run/Dismiss", + command=self.ok, width=12, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Cancel", width=10, + command=self.cancelCB).pack(side=tkinter.RIGHT, pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM,expand=0) + def body(self, master): + pass + def ok(self,event=None): + if not self.validate(): + return + self.withdraw() + self.update_idletasks() + self.apply() + self.cancel() + def cancel(self,event=None): + self.destroy() + def validate(self): + return 1 + def apply(self): + pass + +#======================================================================= +# Working - is a dialog to give user info while 'working'. You can +# set the label for it, or get the value of 'stop', which +# turns to 1 if they hit 'cancel' while this is displayed. +# should be 'withdrawn' while not 'working' on something. +# +class Working(BVDialog): + def __init__(self, parent=None, callbackMethod=None): + self.__parent=parent + self.__callbackMethod=callbackMethod + self.stop=tkinter.IntVar() + self.label=tkinter.StringVar() + BVDialog.__init__(self,parent=self.__parent, + title="%s Working"%PROGNAME,modal=0,hide=1) + self.update() + self.resizable(0,0) + return + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + but=tkinter.Button(buttonFrame,text="Stop",command=self.__callbackMethod) + but.pack(side=tkinter.LEFT,expand=0) + buttonFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=0) + return + def body(self,master): + lab=tkinter.Label(master,textvariable=self.label,width=60, + anchor=tkinter.W) + lab.pack(side=tkinter.LEFT) + self.label.set("Default Text") + return + def cancel(self): + self.__callbackMethod + return +#======================================================================= +# Cases - a dialog with a scrolled text window showing number of cases +# info. It has one button - a close button. +# +class Cases(BVDialog): + def __init__(self,parent,callbackMethod): + self.__parent=parent + self.__callbackMethod=callbackMethod + BVDialog.__init__(self,parent=self.__parent,title="Number of Cases",modal=0,hide=1) + self.update() + geo=self.geometry() + (wh,of)=geo.split("+",1) + (w,h)=wh.split("x",1) + self.minsize(int(w),int(h)) + return + def buttonbox(self): + buttonFrame=tkinter.Frame(self) + but=tkinter.Button(self,text="Close",command=self.__callbackMethod) + but.pack(side=tkinter.TOP) + buttonFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=0) + def body(self,master): + self.sb=tkinter.Scrollbar(master) + self.sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + self.dataText=tkinter.Text(master,state=tkinter.DISABLED,width=25, + height=10) + self.sb.configure(command=self.dataText.yview) + self.dataText.configure(yscrollcommand=self.sb.set) + self.dataText.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + self.updateText("Default Text") + return + def cancel(self): + self.__callbackMethod() + return + def updateText(self,updateText): + self.dataText.configure(state=tkinter.NORMAL) + self.dataText.delete(1.0,tkinter.END) + self.dataText.insert(tkinter.END,updateText) + self.dataText.configure(state=tkinter.DISABLED) + return +#======================================================================= +# MiniDiag - is a minimized dialog - to save screen real +# estate. It has one button - to go back to the main +# dialog. +# +class MiniDiag(BVDialog): + def __init__(self,parent,callbackMethod,title="Title",buttonText="Button",loc="x"): + self.__parent=parent + self.__callbackMethod=callbackMethod + self.__title=title + self.__buttonText=buttonText + self.__loc=loc + BVDialog.__init__(self,parent=self.__parent,title=self.__title,modal=0,hide=1) + self.update() + self.resizable(0,0) + # + # Set initial location (ul and ur) + # + if self.__loc in ("ur","lr") and parent is not None: + parentgeo=self.__parent.geometry() + (wh,of)=parentgeo.split("+",1) + (w,h)=wh.split("x",1) + (ox,oy)=of.split("+",1) + self.update_idletasks() + geo=self.geometry() + (mwh,mo)=geo.split("+",1) + (mw,mh)=mwh.split("x",1) + if self.__loc=="lr": + newgeo=mwh+"+%d+%d"%(int(ox)+int(w)-int(mw),int(oy)+int(h)-int(mh)) + elif self.__loc=="ur": + newgeo=mwh+"+%d+%d"%(int(ox)+int(w)-int(mw),int(oy)) + self.geometry(newgeo) + self.update_idletasks() + return + def buttonbox(self): + buttonFrame=tkinter.Frame(self) + but=tkinter.Button(self,text=self.__buttonText,command=self.__callbackMethod) + but.pack(side=tkinter.TOP) + buttonFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=0) + def cancel(self): + self.__callbackMethod() + return +#===================================================================== +class CanvasDisplay(BVDialog): + def __init__(self, parent, title="Canvas Display", callbackMethod=None): + self.__parent=parent + self.__title=title + self.__callbackMethod=callbackMethod + self.curwidth=706.0 # initial canvas width + self.curheight=536.0 # initial canvas height + BVDialog.__init__(self,parent=self.__parent, + title=self.__title,modal=0,hide=1) + self.update() + self.firstDisplay=1 + geo=self.geometry() + (mwh,mof)=geo.split("+",1) + (mw,mh)=mwh.split("x",1) + self.minsize(int(mw),int(mh)) + return + def body(self,master): + self.bar=tkinter.Frame(master) + self.bar.pack(side=tkinter.BOTTOM) + self.fbar=tkinter.Frame(master) + self.fbar.pack(side=tkinter.BOTTOM) + # + borderwidth=2 + canwidth=self.curwidth-((borderwidth+1)*2) + canheight=self.curheight-((borderwidth+1)*2) + self.canvas=tkinter.Canvas(master,width=canwidth,height=canheight, + borderwidth=borderwidth,relief=tkinter.SUNKEN) + self.canvas.bind("",self.resizecanvas) + self.canvas.pack(fill=tkinter.BOTH,expand=1) + def buttonbox(self): + buttonFrame=tkinter.Frame(self) + but=tkinter.Button(buttonFrame,text="Exit",fg="red",command=self.__callbackMethod) + but.pack(side=tkinter.LEFT) + self.label=tkinter.Label(buttonFrame,text=" ") + self.label.pack(side=tkinter.LEFT,fill=tkinter.X) + buttonFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X) + def cancel(self): + self.__callbackMethod() + + def resizecanvas(self,event): + w=float(event.width) + h=float(event.height) + #print "resizecanvas called: %s %s"%(event.width,event.height) + scalex=w/self.curwidth + scaley=h/self.curheight + if ((scalex!=1.0)or(scaley!=1.0)): + self.canvas.scale("all",0.0,0.0,scalex,scaley) + self.curwidth=w + self.curheight=h + return +#======================================================================= +# +# Custom dialog that provides selection for verification stuff +# +class Verif(BVDialog): + def __init__(self, VU, userName, scaleList, parent=None, callbackMethod=None): + self.__VU=VU + self.__parent=parent + self.__callbackMethod=callbackMethod + self.__userName=userName + self.__scaleList=scaleList + BVDialog.__init__(self,parent=self.__parent, + title="%s Options"%PROGNAME,modal=0,hide=1) + # + # find minimum size + # + self.update() + maxw=0 + maxh=0 + self.dispGrids() + self.update_idletasks() + geo=self.geometry() + (maxw,maxh)=self.checkMax(geo,maxw,maxh) + self.dispGridStats() + self.update_idletasks() + geo=self.geometry() + (maxw,maxh)=self.checkMax(geo,maxw,maxh) + self.dispDists() + self.update_idletasks() + geo=self.geometry() + (maxw,maxh)=self.checkMax(geo,maxw,maxh) + self.dispStats() + self.update_idletasks() + geo=self.geometry() + (maxw,maxh)=self.checkMax(geo,maxw,maxh) + self.minsize(maxw,maxh) + self.dispGrids() + self.deiconify() + self.lift() + self.wait_visibility() + self.protocol("WM_DELETE_WINDOW",self.__quitCB) + return + def checkMax(self,geo,maxw,maxh): + (wh,of)=geo.split("+",1) + (w,h)=wh.split("x",1) + maxw=max(int(w),maxw) + maxh=max(int(h),maxh) + return(maxw,maxh) + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + tkinter.Button(buttonFrame, text="Run",command=self.__runCB, width=6, + state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Hide", + command=self.__hideCB, width=6, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Quit", width=6, + command=self.__quitCB).pack(side=tkinter.RIGHT, pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM,expand=0) + def __runCB(self): + self.__callbackMethod("Run") + def __hideCB(self): + self.__callbackMethod("Hide") + def __quitCB(self): + self.cancel() + def cancel(self): + self.__callbackMethod("Quit") + return + # + # Custom body that has tabbed frames + # + def body(self, master): + # + # The "tab" buttons at the top + # + tabs=[("Grid Displays",self.dispGrids), + ("Grid Stats",self.dispGridStats), + ("Distributions",self.dispDists), + ("Point/Area Stats",self.dispStats), + ("Stat vs. Scale",self.dispScaleStats), + ] + tabFrame=tkinter.Frame(master,relief="sunken",borderwidth=1) + self.tabSetting=tkinter.StringVar() + self.tabSetting.set("Grid Displays") + for (text,callback) in tabs: + x=tkinter.Radiobutton(tabFrame,text=text,indicatoron=0, + command=callback,variable=self.tabSetting, + value=text) + col=x.cget("highlightbackground") + x.config(selectcolor=col) + x.pack(side=tkinter.LEFT) + tabFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X) + # + # Big "body" part of dialog + # + self.BodyFrame=tkinter.Frame(master) + # + self.col4=tkinter.Frame(self.BodyFrame) + self.column4(self.col4) + self.col4.pack(side=tkinter.RIGHT,fill=tkinter.Y,expand=0) + self.col3=tkinter.Frame(self.BodyFrame) + self.column3(self.col3) + self.col3.pack(side=tkinter.RIGHT,fill=tkinter.Y,expand=0) + self.col2=tkinter.Frame(self.BodyFrame) + self.column2(self.col2) + self.col2.pack(side=tkinter.RIGHT,fill=tkinter.Y,expand=0) + # + self.Grids=tkinter.Frame(self.BodyFrame) + self.OptionsGrids(self.Grids) + # + self.GridStats=tkinter.Frame(self.BodyFrame) + self.OptionsGridsStats(self.GridStats) + # + self.ScaleStats=tkinter.Frame(self.BodyFrame) + self.OptionsScaleStats(self.ScaleStats) + # + self.Dists=tkinter.Frame(self.BodyFrame) + self.OptionsDists(self.Dists) + # + self.Stats=tkinter.Frame(self.BodyFrame) + self.OptionsStats(self.Stats) + # + self.BodyFrame.pack(side=tkinter.TOP,fill=tkinter.BOTH,expand=1) + # + # setup scales (updating the GridsScale updates all others) + # + self.updateGridsScale() + # + # pack the default one + # + cur=self.tabSetting.get() + for (text,callback) in tabs: + if cur==text: + callback() + return + #================================================================== + # + # Switch tab frame displayed + # + def dispGrids(self): + self.GridStats.pack_forget() + self.ScaleStats.pack_forget() + self.Dists.pack_forget() + self.Stats.pack_forget() + self.Grids.pack(side=tkinter.RIGHT, + fill=tkinter.BOTH,expand=1) + def dispGridStats(self): + self.Grids.pack_forget() + self.ScaleStats.pack_forget() + self.Dists.pack_forget() + self.Stats.pack_forget() + self.GridStats.pack(side=tkinter.RIGHT, + fill=tkinter.BOTH,expand=1) + def dispDists(self): + self.Grids.pack_forget() + self.GridStats.pack_forget() + self.ScaleStats.pack_forget() + self.Stats.pack_forget() + self.Dists.pack(side=tkinter.RIGHT, + fill=tkinter.BOTH,expand=1) + def dispStats(self): + self.Grids.pack_forget() + self.GridStats.pack_forget() + self.ScaleStats.pack_forget() + self.Dists.pack_forget() + self.Stats.pack(side=tkinter.RIGHT, + fill=tkinter.BOTH,expand=1) + def dispScaleStats(self): + self.Grids.pack_forget() + self.GridStats.pack_forget() + self.Dists.pack_forget() + self.Stats.pack_forget() + self.ScaleStats.pack(side=tkinter.RIGHT, + fill=tkinter.BOTH,expand=1) + # + # Get the values associated with the dialog pieces that + # are displayed with the current tab + # + def getValues(self): + values={} + tabtype=self.tabSetting.get() + values["tab"]=tabtype + if tabtype=="Grid Displays": + values=self.getGridsValues(values) + if tabtype=="Grid Stats": + values=self.getGridsStatsValues(values) + if tabtype=="Stat vs. Scale": + values=self.getScaleStatsValues(values) + if tabtype=="Distributions": + values=self.getDistsValues(values) + if tabtype=="Point/Area Stats": + values=self.getStatsValues(values) + return values + # + # values on with the Grids tab + # + def getGridsValues(self,values): + values["Display"]=self.GridsDisplay.get() + values["Parm"]=self.getCheckList(self.GridsParms) + values["Group"]=self.GridsGroup.get() + values["Model"]=self.getCheckList(self.Models) + values["ObsModel"]=self.ObsModel.get() + values["fcstrList"]=self.getForecasterListbox() + values["fhrStart"]=self.fhrStart.get() + values["fhrEnd"]=self.fhrEnd.get() + values["commonCases"]=self.Common.get() + values["dateStyle"]=self.Datestyle.get() + values["dateType"]=self.Datetype.get() + values["numDays"]=self.Ndays.get() + values["fromDay"]=self.getFromdayListbox() + values["dayList"]=self.getDaylistListbox() + values["cycleList"]=self.getCycleVals() + values["scale"]=self.GridsScale.get() + values["accumHours"]=self.accumHours.get() + values["accumFreq"]=self.accumFreq.get() + return values + # + # values on with the GridsStats tab + # + def getGridsStatsValues(self,values): + values["Display"]=self.GridsStatsDisplay.get() + #values["Parms"]=self.getCheckList(self.GridsStatsParms) + values["Parm"]=self.GridsStatsParm.get() + values["Threshold"]=self.GridsStatsThreshold.get() + values["Models"]=self.getCheckList(self.Models) + values["ObsModel"]=self.ObsModel.get() + values["fcstrList"]=self.getForecasterListbox() + values["fhrStart"]=self.fhrStart.get() + values["fhrEnd"]=self.fhrEnd.get() + values["commonCases"]=self.Common.get() + values["dateStyle"]=self.Datestyle.get() + values["dateType"]=self.Datetype.get() + values["numDays"]=self.Ndays.get() + values["fromDay"]=self.getFromdayListbox() + values["dayList"]=self.getDaylistListbox() + values["cycleList"]=self.getCycleVals() + values["scale"]=self.GridsStatsScale.get() + values["accumHours"]=self.accumHours.get() + values["accumFreq"]=self.accumFreq.get() + values["TwoCatType"]=self.GridsStatsTwoCatType.get() + values["TwoCatCond"]=self.GridsStatsTwoCatCond.get() + str=self.GridsStatsTwoCatValueString.get() + try: + val=float(str) + except: + val=0.0 + values["TwoCatValue"]=val + values["TwoCatValueString"]=str + return values + # + # values on with the Dists tab + # + def getDistsValues(self,values): + values["Display"]=self.DistsDisplay.get() + #values["Parms"]=self.getCheckList(self.DistsParms) + values["Parm"]=self.DistsParm.get() + values["Models"]=self.getCheckList(self.Models) + values["ObsModel"]=self.ObsModel.get() + values["fcstrList"]=self.getForecasterListbox() + values["fhrStart"]=self.fhrStart.get() + values["fhrEnd"]=self.fhrEnd.get() + values["commonCases"]=self.Common.get() + values["dateStyle"]=self.Datestyle.get() + values["dateType"]=self.Datetype.get() + values["numDays"]=self.Ndays.get() + values["fromDay"]=self.getFromdayListbox() + values["dayList"]=self.getDaylistListbox() + values["cycleList"]=self.getCycleVals() + values["scale"]=self.DistsScale.get() + values["accumHours"]=self.accumHours.get() + values["accumFreq"]=self.accumFreq.get() + return values + # + # values on the Stats tab + # + def getStatsValues(self,values): + values["Display"]=self.StatsDisplay.get() + values["areaList"]=self.getListbox(self.StatsAreasListbox) + values["AreaCombine"]=self.StatsAreaCombine.get() + values["Parms"]=self.getCheckList(self.StatsParms) + values["Threshold"]=self.StatsThreshold.get() + values["PlotType"]=self.StatsType.get() + #values["Parm"]=self.StatsParm.get() + values["Models"]=self.getCheckList(self.Models) + values["ObsModel"]=self.ObsModel.get() + values["fcstrList"]=self.getForecasterListbox() + values["fhrStart"]=self.fhrStart.get() + values["fhrEnd"]=self.fhrEnd.get() + values["commonCases"]=self.Common.get() + values["dateStyle"]=self.Datestyle.get() + values["dateType"]=self.Datetype.get() + values["numDays"]=self.Ndays.get() + values["fromDay"]=self.getFromdayListbox() + values["dayList"]=self.getDaylistListbox() + values["cycleList"]=self.getCycleVals() + values["scale"]=self.StatsScale.get() + values["accumHours"]=self.accumHours.get() + values["accumFreq"]=self.accumFreq.get() + values["TwoCatType"]=self.statsTwoCatType.get() + values["TwoCatCond"]=self.statsTwoCatCond.get() + str=self.statsTwoCatValueString.get() + try: + val=float(str) + except: + val=0.0 + values["TwoCatValue"]=val + values["TwoCatValueString"]=str + return values + # + # values on with the ScaleStats tab + # + def getScaleStatsValues(self,values): + values["Display"]=self.ScaleStatsDisplay.get() + values["areaList"]=self.getListbox(self.ScaleStatsAreasListbox) + values["AreaCombine"]=self.ScaleStatsAreaCombine.get() + values["Parm"]=self.ScaleStatsParm.get() + values["Threshold"]=self.ScaleStatsThreshold.get() + values["Models"]=self.getCheckList(self.Models) + values["ObsModel"]=self.ObsModel.get() + values["fcstrList"]=self.getForecasterListbox() + values["fhrStart"]=self.fhrStart.get() + values["fhrEnd"]=self.fhrEnd.get() + values["commonCases"]=self.Common.get() + values["dateStyle"]=self.Datestyle.get() + values["dateType"]=self.Datetype.get() + values["numDays"]=self.Ndays.get() + values["fromDay"]=self.getFromdayListbox() + values["dayList"]=self.getDaylistListbox() + values["cycleList"]=self.getCycleVals() + values["scale"]=self.GridsStatsScale.get() + values["accumHours"]=self.accumHours.get() + values["accumFreq"]=self.accumFreq.get() + values["TwoCatType"]=self.scaleStatsTwoCatType.get() + values["TwoCatCond"]=self.scaleStatsTwoCatCond.get() + str=self.scaleStatsTwoCatValueString.get() + try: + val=float(str) + except: + val=0.0 + values["TwoCatValue"]=val + values["TwoCatValueString"]=str + return values + #=============================================================== + # + # Column 2 - model + # + def column2(self,master): + # + # At bottom - ObsModel being used + # + obsModelFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + obsModelLabel=tkinter.Label(obsModelFrame,text="Observed:") + obsModelLabel.pack(side=tkinter.LEFT) + obsModels=self.__VU.getCFG('OBSMODELS') + namewidth=0 + for model in obsModels: + if len(model)>namewidth: + namewidth=len(model) + self.ObsModel=tkinter.StringVar() + self.ObsModelButton=tkinter.Menubutton(obsModelFrame,textvariable=self.ObsModel, + relief=tkinter.RAISED,indicatoron=1,width=namewidth+1,anchor=tkinter.W) + self.ObsModelButton.pack(side=tkinter.RIGHT) + self.ObsModelPopup=tkinter.Menu(self.ObsModelButton,tearoff=0) + obsModels=self.__VU.getCFG('OBSMODELS') + for model in obsModels: + self.ObsModelPopup.add_radiobutton(label=model,indicatoron=0,value=model, + variable=self.ObsModel) + self.ObsModel.set(obsModels[0]) + obsModelFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=1) + self.ObsModelButton.config(menu=self.ObsModelPopup) + + # + # common Cases checkbox + # + commonFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + self.Common=tkinter.IntVar() + commonCheck=tkinter.Checkbutton(commonFrame,text="Common Cases", + variable=self.Common, + onvalue=1,offvalue=0) + self.Common.set(1) + commonCheck.pack(side=tkinter.TOP,anchor=tkinter.W) + commonFrame.pack(side=tkinter.BOTTOM,anchor=tkinter.N, + fill=tkinter.X,expand=0) + # + # Models checkbox + # + self.Models=[] + models=self.__VU.listModels() + for model in models: + self.Models.append(tkinter.StringVar()) + if "Official" in models: + defaultModels=["Official",] + else: + defaultModels=[models[0],] + self.checkGroup(master,"Model:",self.Models,models,defaultModels,tkinter.BOTH,1) + return + #=============================================================== + # + # Column 3 - Forecaster and common cases + # + def column3(self,master): + XHOUR=self.__VU.getCFG('MAXFORECASTHOUR') + # + # Accumulation Time Periods: + # + accumFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + self.accumHours=tkinter.IntVar() + self.accumFreq=tkinter.IntVar() + freqFrame=tkinter.Frame(accumFrame) + flab=tkinter.Label(freqFrame,text="Every:",width=8) + flab.pack(side=tkinter.LEFT,anchor=tkinter.S) + flab=tkinter.Label(freqFrame,text="hrs") + flab.pack(side=tkinter.RIGHT,anchor=tkinter.S) + scaleFreq=tkinter.Scale(freqFrame,from_=1,to=24, + variable=self.accumFreq, + orient=tkinter.HORIZONTAL, + sliderlength=15) + accumFrequencyDefault=self.__VU.getCFG("ACCUM_FREQUENCY_DEFAULT") + if accumFrequencyDefault is None: + accumFrequencyDefault=6 + self.accumFreq.set(accumFrequencyDefault) + scaleFreq.pack(side=tkinter.RIGHT,fill=tkinter.X,expand=1) + freqFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=1) + hoursFrame=tkinter.Frame(accumFrame) + flab=tkinter.Label(hoursFrame,text="Length:",width=8) + flab.pack(side=tkinter.LEFT,anchor=tkinter.S) + flab=tkinter.Label(hoursFrame,text="hrs") + flab.pack(side=tkinter.RIGHT,anchor=tkinter.S) + accumResolution=self.__VU.getCFG('ACCUM_RESOLUTION') + if accumResolution is None: + accumResolution=6 + scaleHours=tkinter.Scale(hoursFrame,from_=accumResolution, + to=XHOUR,resolution=accumResolution, + variable=self.accumHours, + orient=tkinter.HORIZONTAL, + sliderlength=15) + accumDefaultLength=self.__VU.getCFG("ACCUM_LENGTH_DEFAULT") + if accumDefaultLength is None: + accumDefaultLength=6 + self.accumHours.set(accumDefaultLength) + scaleHours.pack(side=tkinter.RIGHT,fill=tkinter.X,expand=1) + hoursFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=1) + flab=tkinter.Label(accumFrame,text="Accumulation Time Periods:") + flab.pack(side=tkinter.BOTTOM,expand=0) + accumFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=0) + # + # Forecast Hours start/stop + # + fhrFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + self.fhrStart=tkinter.IntVar() + self.fhrEnd=tkinter.IntVar() + fend=tkinter.Scale(fhrFrame,from_=0,to=XHOUR,variable=self.fhrEnd, + orient=tkinter.HORIZONTAL,command=self.endMove, + sliderlength=15) + self.fhrEnd.set(XHOUR) + fend.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=1) + fstart=tkinter.Scale(fhrFrame,from_=0,to=XHOUR,variable=self.fhrStart, + orient=tkinter.HORIZONTAL,command=self.startMove, + sliderlength=15) + self.fhrStart.set(0) + fstart.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=1) + flab=tkinter.Label(fhrFrame,text="Forecast Hours:") + flab.pack(side=tkinter.BOTTOM,expand=0) + fhrFrame.pack(side=tkinter.BOTTOM,fill=tkinter.X,expand=0) + # + # Forecaster names to show... + # + forecasters=["ALL"] + self.forecasterNumbers=[-1] + # + trimming=self.__VU.getCFG('FORECASTER_LIST_TRIMMING') + trimADMIN=self.__VU.getCFG('FORECASTER_LIST_TRIMMING_ADMINISTRATORS') + fFormat=self.__VU.getCFG('FORECASTER_LIST_FORMAT') + fSort=self.__VU.getCFG('FORECASTER_LIST_SORT') + labels=[] + numstrs=self.__VU.getFcstrNums() + for numstr in numstrs: + num=int(numstr) + id=self.__VU.getFcstrID(num) + if ((trimming==1)and(self.__userName not in trimADMIN)and(self.__userName!=id)and(num!=0)): + continue + name=self.__VU.getFcstrName(num) + sort=numstr #defaults to number + if fSort=="id": + sort=id + elif fSort=="name": + sort=name + label=name #defaults to name + if fFormat=="number": + label=numstr + elif fFormat=="id": + label=id + elif fFormat=="number-name": + label="%s - %s"%(numstr,name) + elif fFormat=="number-id": + label="%s - %s"%(numstr,id) + labels.append("%s|%s|%s"%(sort,numstr,label)) + labels.sort() + for entry in labels: + (sstr,numstr,label)=entry.split("|") + forecasters.append(label) + self.forecasterNumbers.append(int(numstr)) + defaultForecasters=["ALL",] + maxwid=0 + for forecaster in forecasters: + wid=len(forecaster) + if wid>maxwid: + maxwid=wid + maxheight=10 + fcstrFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + self.ForecasterListbox=self.sListbox(fcstrFrame,"Forecaster:", + forecasters,defaultForecasters,maxwid+1,maxheight,tkinter.EXTENDED) + fcstrFrame.pack(side=tkinter.BOTTOM,fill=tkinter.BOTH,expand=1) + return + # + #================================================================= + # getForecasterListbox - get list of integer forecast numbers for + # forcasters turned on in ForecasterListbox + # + def getForecasterListbox(self): + outlist=[] + itemnums=self.ForecasterListbox.curselection() + try: + itemnums=list(map(int,itemnums)) + except ValueError: pass + for itemnum in itemnums: + outlist.append(self.forecasterNumbers[itemnum]) + return outlist + #================================================================== + # + # If moving fhrStart...check to make sure that it is not more + # than fhrEnd...and if it is...move fhrEnd too. + # + def startMove(self,event): + st=self.fhrStart.get() + en=self.fhrEnd.get() + if en",padx=0,pady=0,command=self.toggleNScale) + self.NTog.pack(side=tkinter.RIGHT,anchor=tkinter.E) + labFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + self.NScale=tkinter.Scale(nFrame,from_=1,to=50,variable=self.Ndays, + showvalue=1,orient=tkinter.HORIZONTAL, + sliderlength=15) + self.NScale.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.X) + nFrame.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.X) + + gridDayStrings,self.gridDays=self.getRecentDates(self.__VU.GRIDDAYS) + defaultDay=[gridDayStrings[0],] + maxwid=10 + maxheight=5 # number of days to show + self.FromdayListbox=self.sListbox(self.ByPeriod,"Ending on:", + gridDayStrings,defaultDay,maxwid,maxheight,tkinter.BROWSE) + self.ByPeriod.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.BOTH,expand=1) + # + # byList + # + self.ByList=tkinter.Frame(byFrame) + #days,daydates=self.getRecentDates(self.__VU.GRIDDAYS) + defaultDaylist=[] + for i in range(7): + defaultDaylist.append(gridDayStrings[i]) + maxwid=10 + maxheight=5 #number of days to show + self.DaylistListbox=self.sListbox(self.ByList,"Include:", + gridDayStrings,defaultDaylist,maxwid,maxheight,tkinter.EXTENDED) + self.ByList.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.BOTH,expand=1) + # + # datetype + # + datetypeFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + datetypeLabel=tkinter.Label(datetypeFrame,text="Choose Dates by:") + datetypeLabel.pack(side=tkinter.TOP,anchor=tkinter.W) + self.Datetype=tkinter.StringVar() + datetypeDefault="Period Length" + datetypes=["Period Length","List of dates"] + for datetype in datetypes: + a=tkinter.Radiobutton(datetypeFrame,text=datetype,command=self.setDatetype, + variable=self.Datetype,value=datetype) + if datetype is datetypeDefault: + a.invoke() + a.pack(side=tkinter.TOP,anchor=tkinter.W) + datetypeFrame.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.X) + # + # Now pack the frame with the "byPeriod" or "byList" + # + byFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.BOTH,expand=1) + # + # cycle + # + cycleFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + labFrame=tkinter.Frame(cycleFrame) + cycleLabel=tkinter.Label(labFrame,text="Cycle:") + cycleLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + cycleToggle=tkinter.Button(labFrame,text="ALL",padx=0,pady=0,command=self.toggleCycles) + cycleToggle.pack(side=tkinter.RIGHT,anchor=tkinter.E) + labFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + cyclecol1=tkinter.Frame(cycleFrame) + cyclecol2=tkinter.Frame(cycleFrame) + cycleDefault=self.__VU.getCFG('ALLCYCLES') + cycles=self.__VU.getCFG('ALLCYCLES') + self.CycleFlags=[] + self.CycleVals=[] + cnt=0 + for cycle in cycles: + self.CycleFlags.append(tkinter.IntVar()) + if cnt": + self.NScale.configure(to=self.__VU.STATDAYS) + self.NTog.configure(text="<") + else: + if curN>50: + self.Ndays.set(50) + self.NScale.configure(to=50) + self.NTog.configure(text=">") + return + #to=self.__VU.STATDAYS + #================================================================ + # toggleCycles - toggles all the cycle buttons + # + def toggleCycles(self): + for cycleFlag in self.CycleFlags: + val=cycleFlag.get() + val=abs(val-1) + cycleFlag.set(val) + return + #================================================================= + # getCycleVals - get list of values turned on in Cycles + # + def getCycleVals(self): + outlist=[] + for i in range(len(self.CycleFlags)): + a=self.CycleFlags[i].get() + if a!=0: + outlist.append(self.CycleVals[i]) + return outlist + #================================================================= + # getFromdayListbox - get unix date for day listed in Fromday + # listbox + # + def getFromdayListbox(self): + itemnums=self.FromdayListbox.curselection() + try: + itemnums=list(map(int,itemnums)) + except ValueError: pass + itemnum=itemnums[0] + outdate=self.gridDays[itemnum] + return outdate + #================================================================= + # getDaylistListbox - get list of integer forecast numbers for + # forcasters turned on in ForecasterListbox + # + def getDaylistListbox(self): + outlist=[] + itemnums=self.DaylistListbox.curselection() + try: + itemnums=list(map(int,itemnums)) + except ValueError: pass + for itemnum in itemnums: + outlist.append(self.gridDays[itemnum]) + return outlist + #================================================================== + # + # Frame that specifies the options for the Grids displays + # + def OptionsGrids(self,master): + # + # parameter + # + self.GridsParms=[] + parms=self.__VU.getVerParms() + for parm in parms: + self.GridsParms.append(tkinter.StringVar()) + defaultParms=[parms[0],] + self.checkGroup(master,"Parameter:",self.GridsParms,parms, + defaultParms,tkinter.BOTH,1) + # + # display + # + self.GridsDisplay=tkinter.StringVar() + displays=["Forecasts","Errors"] + defaultDisplay="Forecasts" + gridDisplayFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + gdLabelFrame=tkinter.Frame(gridDisplayFrame) + gridDisplayLabel=tkinter.Label(gdLabelFrame,text="Display:") + gridDisplayLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + + self.GridsScale=tkinter.IntVar() + self.GridsScaleText=tkinter.StringVar() + but=tkinter.Menubutton(gdLabelFrame,textvariable=self.GridsScaleText, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.RIGHT,anchor=tkinter.W) + self.GridsScalePopup=tkinter.Menu(but,tearoff=0) + for (value,text) in self.__scaleList: + self.GridsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, + variable=self.GridsScale, + command=self.updateGridsScale) + self.GridsScale.set(0) + #self.updateGridsScale() + but.config(menu=self.GridsScalePopup) + + gdLabelFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=0) + for item in displays: + a=tkinter.Radiobutton(gridDisplayFrame,text=item, + variable=self.GridsDisplay,value=item) + a.pack(side=tkinter.TOP,anchor=tkinter.W) + if item is defaultDisplay: + self.GridsDisplay.set(item) + gridDisplayFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.X,expand=0) + # + # Group by + # + self.GridsGroup=tkinter.StringVar() + defaultGroup="Forecast Hour" + groups=["Forecast Hour","Run Time"] + self.radioGroup(master,"Group by:",self.GridsGroup,groups,defaultGroup,tkinter.X,0) + return + # + def updateGridsScale(self): + value=self.GridsScale.get() + for i in range(len(self.__scaleList)): + (num,text)=self.__scaleList[i] + if num==value: + self.GridsScaleText.set(text) + self.GridsStatsScale.set(num) + self.GridsStatsScaleText.set(text) + self.DistsScale.set(num) + self.DistsScaleText.set(text) + self.StatsScale.set(num) + self.StatsScaleText.set(text) + break + #================================================================== + # + # Frame that specifies the options for the GridsStats display + # + def OptionsGridsStats(self,master): + # + # parameter + # + self.GridsStatsParm=tkinter.StringVar() + parms=self.__VU.getVerParmsVect() + #for parm in parms: + # self.GridsStatsParms.append(Tkinter.StringVar()) + defaultParm=parms[0] + self.radioGroup(master,"Parameter:",self.GridsStatsParm,parms, + defaultParm,tkinter.BOTH,1,callback=self.updateGridsStatsThreshold) + # + # display + # + self.GridsStatsDisplay=tkinter.StringVar() + displays=["Bias","Mean Abs Error","RMS Error","Mean Squared Error"] + defaultDisplay="Bias" + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + scaleFrame=tkinter.Frame(radioFrame) + radioLabel=tkinter.Label(scaleFrame,text="Display:") + radioLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + + self.GridsStatsScale=tkinter.IntVar() + self.GridsStatsScaleText=tkinter.StringVar() + but=tkinter.Menubutton(scaleFrame,textvariable=self.GridsStatsScaleText, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.RIGHT,anchor=tkinter.W) + self.GridsStatsScalePopup=tkinter.Menu(but,tearoff=0) + for (value,text) in self.__scaleList: + self.GridsStatsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, + variable=self.GridsStatsScale, + command=self.updateGridsStatsScale) + self.GridsStatsScale.set(0) + #self.updateGridsStatsScale() + + but.config(menu=self.GridsStatsScalePopup) + scaleFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + + a=tkinter.Radiobutton(radioFrame,text="Bias", + variable=self.GridsStatsDisplay,value="Bias") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + self.GridsStatsDisplay.set("Bias") + a=tkinter.Radiobutton(radioFrame,text="Mean Abs Error", + variable=self.GridsStatsDisplay,value="Mean Abs Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="RMS Error", + variable=self.GridsStatsDisplay,value="RMS Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="Mean Squared Error", + variable=self.GridsStatsDisplay,value="Mean Squared Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + threshFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(threshFrame,text="Percent Err <", + variable=self.GridsStatsDisplay,value="Percent Err <") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.GridsStatsThreshold=tkinter.IntVar() + self.GridsStatsThresholdValue=tkinter.StringVar() + but=tkinter.Menubutton(threshFrame,textvariable=self.GridsStatsThresholdValue, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.GridsStatsThresholdPopup=tkinter.Menu(but,tearoff=0) + self.GridsStatsThresholdPopup.add_command(label="stuff") + for i in range(self.__VU.getCFG('NUMTHRESH')): + self.GridsStatsThresholdPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, + variable=self.GridsStatsThreshold, + command=self.pickGridsStatsThreshold) + but.config(menu=self.GridsStatsThresholdPopup) + threshFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + + twocatFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(twocatFrame,text="",variable=self.GridsStatsDisplay, + value="TwoCat") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.GridsStatsTwoCatType=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.GridsStatsTwoCatType, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.GridsStatsTwoCatTypePopup=tkinter.Menu(but,tearoff=0) + for stat in ["Hits","Areal Hits","Misses","Areal Misses", + "False Alarms","Areal False Alarms","Correct Negatives", + "Areal Correct Negatives", + "Frequency Observed", + "Frequency Forecast", + "Fraction Correct","Areal Fraction Correct", + "Frequency Bias","Areal Frequency Bias", + "POD","Areal POD","FAR","Areal FAR","Threat Score", + "Areal Threat Score","Equitable Threat Score", + "Areal Equitable Threat Score","True Skill Score", + "Areal True Skill Score","Heidke Skill Score", + "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: + self.GridsStatsTwoCatTypePopup.add_radiobutton(label=stat, + indicatoron=0,value=stat, + variable=self.GridsStatsTwoCatType,command=self.updateGridsStatsTwoCatType) + self.GridsStatsTwoCatType.set("Fraction Correct") + but.config(menu=self.GridsStatsTwoCatTypePopup) + self.GridsStatsTwoCatCond=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.GridsStatsTwoCatCond, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.GridsStatsTwoCatCondPopup=tkinter.Menu(but,tearoff=0) + for cond in [">",">=","<=","<"]: + self.GridsStatsTwoCatCondPopup.add_radiobutton(label=cond, + indicatoron=0,value=cond, + variable=self.GridsStatsTwoCatCond,command=self.updateGridsStatsTwoCatType) + self.GridsStatsTwoCatCond.set(">") + but.config(menu=self.GridsStatsTwoCatCondPopup) + self.GridsStatsTwoCatValueString=tkinter.StringVar() + ent=tkinter.Entry(twocatFrame,textvariable=self.GridsStatsTwoCatValueString, + width=5,relief=tkinter.SUNKEN) + self.GridsStatsTwoCatValueString.set("0.0") + ent.pack(side=tkinter.LEFT,anchor=tkinter.W) + twocatFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.X,expand=0) + self.updateGridsStatsThreshold() + return + #================================================================== + # + def updateGridsStatsScale(self): + value=self.GridsStatsScale.get() + for i in range(len(self.__scaleList)): + (num,text)=self.__scaleList[i] + if num==value: + self.GridsStatsScaleText.set(text) + self.GridsScale.set(num) + self.GridsScaleText.set(text) + self.DistsScale.set(num) + self.DistsScaleText.set(text) + self.StatsScale.set(num) + self.StatsScaleText.set(text) + break + # + # When user actually picks a threshold - then also set the display to + # use the Percent Err < display + # + def pickGridsStatsThreshold(self): + self.GridsStatsDisplay.set("Percent Err <") + self.updateGridsStatsThreshold() + return + def updateGridsStatsTwoCatType(self): + self.GridsStatsDisplay.set("TwoCat") + return + # + # When parm is changed, or when the user picks a threshold - need to + # update the chosen thresholds. + # + def updateGridsStatsThreshold(self): + #print "in updateGridsStatsThreshold" + parm=self.GridsStatsParm.get() + #print " parm=",parm + parmList=[parm,] + if len(parmList)<1: + return + tlist=[] + plist=[] + for parm in parmList: + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + datatype=self.__VU.getVerParmType(readParm) + thresholds=self.__VU.getVerThresholds(readParm) + #print " thresholds for ",parm + #print " are:",thresholds + if datatype==1: + (threshmag,threshdir)=thresholds + if last3=="Dir": + thresholds=threshdir + else: + thresholds=threshmag + #if last3 in ("Spd","Dir"): + # (threshmag,threshdir)=thresholds + # if last3=="Spd": + # thresholds=threshmag + # else: + # thresholds=threshdir + if len(tlist)>0: + same=1 + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + same=1 + for i in range(len(thresh)): + if thresh[i]!=thresholds[i]: + same=0 + break + if same==1: + plist[j]+=",%s"%parm + break + if same!=1: + tlist.append(thresholds) + plist.append(parm) + else: + tlist.append(thresholds) + plist.append(parm) + + dthresh=[] + if len(tlist)>1: + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%(t) + if len(dthresh)<(i+1): + dthresh.append(str) + else: + dthresh[i]+=" | %s"%str + else: + thresh=tlist[0] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%t + dthresh.append(str) + # + # + parmList=" | ".join(plist) + self.GridsStatsThresholdPopup.entryconfigure(0,label=parmList) + for i in range(len(dthresh)): + self.GridsStatsThresholdPopup.entryconfigure(i+1,label=dthresh[i]) + #print " ",i,dthresh[i] + self.GridsStatsThresholdValue.set(dthresh[self.GridsStatsThreshold.get()]) + return + def updateScaleThreshold(self): + parm=self.GridsStatsParm.get() + parmList=[parm,] + if len(parmList)<1: + return + tlist=[] + plist=[] + for parm in parmList: + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + thresholds=self.__VU.getVerThresholds(readParm) + print(" thresholds for ",parm) + print(" are:",thresholds) + if last3 in ("Spd","Dir"): + (threshmag,threshdir)=thresholds + if last3=="Spd": + thresholds=threshmag + else: + thresholds=threshdir + if len(tlist)>0: + same=1 + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + same=1 + for i in range(len(thresh)): + if thresh[i]!=thresholds[i]: + same=0 + break + if same==1: + plist[j]+=",%s"%parm + break + if same!=1: + tlist.append(thresholds) + plist.append(parm) + else: + tlist.append(thresholds) + plist.append(parm) + + dthresh=[] + if len(tlist)>1: + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%(t) + if len(dthresh)<(i+1): + dthresh.append(str) + else: + dthresh[i]+=" | %s"%str + else: + thresh=tlist[0] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%t + dthresh.append(str) + # + # + parmList=" | ".join(plist) + self.ScaleThresholdPopup.entryconfigure(0,label=parmList) + for i in range(len(dthresh)): + self.ScaleThresholdPopup.entryconfigure(i+1,label=dthresh[i]) + #print " ",i,dthresh[i] + self.ScaleThresholdValue.set(dthresh[self.ScaleThreshold.get()]) + return + #================================================================== + # + # Frame that specifies the options for the Dists display + # + def OptionsDists(self,master): + # + # parameter + # + self.DistsParm=tkinter.StringVar() + parms=self.__VU.getVerParmsVect() + defaultParm=parms[0] + self.radioGroup(master,"Parameter:",self.DistsParm,parms,defaultParm,tkinter.BOTH,1) + # + # display + # + self.DistsDisplay=tkinter.StringVar() + + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + labFrame=tkinter.Frame(radioFrame) + + radioLabel=tkinter.Label(labFrame,text="Display:") + radioLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + + self.DistsScale=tkinter.IntVar() + self.DistsScaleText=tkinter.StringVar() + but=tkinter.Menubutton(labFrame,textvariable=self.DistsScaleText, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.RIGHT,anchor=tkinter.W) + self.DistsScalePopup=tkinter.Menu(but,tearoff=0) + for (value,text) in self.__scaleList: + self.DistsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, + variable=self.DistsScale, + command=self.updateDistsScale) + self.DistsScale.set(0) + #self.updateDistsScale() + but.config(menu=self.DistsScalePopup) + labFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + + a=tkinter.Radiobutton(radioFrame,text="Error Histogram", + variable=self.DistsDisplay,value="Error Histogram") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + self.DistsDisplay.set("Error Histogram") + a=tkinter.Radiobutton(radioFrame,text="Value Histogram", + variable=self.DistsDisplay,value="Value Histogram") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="Expected Value", + variable=self.DistsDisplay,value="Expected Value") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="Scatterplot", + variable=self.DistsDisplay,value="Scatterplot") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.X,expand=0) + + #displays=["Error Histogram","Value Histogram","Expected Value","Scatterplot"] + #defaultDisplay="Error Histogram" + #self.radioGroup(master,"Display:",self.DistsDisplay,displays,defaultDisplay,Tkinter.X,0) + return + #================================================================== + # + def updateDistsScale(self): + value=self.DistsScale.get() + for i in range(len(self.__scaleList)): + (num,text)=self.__scaleList[i] + if num==value: + self.DistsScaleText.set(text) + self.GridsScale.set(num) + self.GridsScaleText.set(text) + self.GridsStatsScale.set(num) + self.GridsStatsScaleText.set(text) + self.StatsScale.set(num) + self.StatsScaleText.set(text) + break + #================================================================== + # + # Frame that specifies the options for the Stats display + # + def OptionsStats(self,master): + # + # parameter + # + self.StatsParms=[] + parms=self.__VU.getVerParmsVect() + for parm in parms: + self.StatsParms.append(tkinter.StringVar()) + defaultParms=[parms[0],] + self.checkGroup(master,"Parameter:",self.StatsParms,parms, + defaultParms,tkinter.X,0,callback=self.updateStatsThreshold) + # + # Area list + # + af=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + alist=self.__VU.listEditAreaDescriptions() + alist[0:0]=["Current"] + defaultArea=alist[0] + maxwid=0 + for area in alist: + if len(area)>maxwid: + maxwid=len(area) + if len(alist)>5: + maxheight=5 + else: + maxheight=len(alist) + + acomb=tkinter.Frame(af) + self.StatsAreaCombine=tkinter.IntVar() + comb=tkinter.Checkbutton(acomb,text="Combine",variable=self.StatsAreaCombine) + self.StatsAreaCombine.set(1) + comb.pack(side=tkinter.RIGHT,anchor=tkinter.E) + sLabel=tkinter.Label(acomb,text="Edit Area:") + sLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + acomb.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X) + sb=tkinter.Scrollbar(af,orient=tkinter.VERTICAL) + self.StatsAreasListbox=tkinter.Listbox(af,yscrollcommand=sb.set, + selectmode=tkinter.EXTENDED,width=maxwid,height=maxheight) + sb.config(command=self.StatsAreasListbox.yview) + sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + self.StatsAreasListbox.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + idx=0 + for item in alist: + self.StatsAreasListbox.insert(tkinter.END,item) + if item in defaultArea: + self.StatsAreasListbox.select_set(idx) + idx+=1 + + af.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.BOTH,expand=1) + # + # display + # + self.StatsDisplay=tkinter.StringVar() + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + labFrame=tkinter.Frame(radioFrame) + + radioLabel=tkinter.Label(labFrame,text="Display:") + radioLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + + self.StatsScale=tkinter.IntVar() + self.StatsScaleText=tkinter.StringVar() + but=tkinter.Menubutton(labFrame,textvariable=self.StatsScaleText, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.RIGHT,anchor=tkinter.W) + self.StatsScalePopup=tkinter.Menu(but,tearoff=0) + for (value,text) in self.__scaleList: + self.StatsScalePopup.add_radiobutton(label=text,indicatoron=0,value=value, + variable=self.StatsScale, + command=self.updateStatsScale) + self.StatsScale.set(0) + #self.updateStatsScale() + but.config(menu=self.StatsScalePopup) + labFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + + a=tkinter.Radiobutton(radioFrame,text="Bias", + variable=self.StatsDisplay,value="Bias") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + self.StatsDisplay.set("Bias") + a=tkinter.Radiobutton(radioFrame,text="Mean Abs Error", + variable=self.StatsDisplay,value="Mean Abs Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="RMS Error", + variable=self.StatsDisplay,value="RMS Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="Mean Squared Error", + variable=self.StatsDisplay,value="Mean Squared Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + + threshFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(threshFrame,text="Percent Err <", + variable=self.StatsDisplay,value="Percent Err <") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.StatsThreshold=tkinter.IntVar() + self.StatsThresholdValue=tkinter.StringVar() + but=tkinter.Menubutton(threshFrame,textvariable=self.StatsThresholdValue, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.thresholdStatsPopup=tkinter.Menu(but,tearoff=0) + self.thresholdStatsPopup.add_command(label="stuff") + for i in range(self.__VU.getCFG('NUMTHRESH')): + self.thresholdStatsPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, + variable=self.StatsThreshold, + command=self.pickStatsThreshold) + but.config(menu=self.thresholdStatsPopup) + threshFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + twocatFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(twocatFrame,text="",variable=self.StatsDisplay, + value="TwoCat") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.statsTwoCatType=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.statsTwoCatType, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.statsTwoCatTypePopup=tkinter.Menu(but,tearoff=0) + for stat in ["Hits","Areal Hits","Misses","Areal Misses", + "False Alarms","Areal False Alarms","Correct Negatives", + "Areal Correct Negatives", + "Frequency Observed", + "Frequency Forecast", + "Fraction Correct", + "Areal Fraction Correct", + "Frequency Bias","Areal Frequency Bias", + "POD","Areal POD","FAR","Areal FAR","Threat Score", + "Areal Threat Score","Equitable Threat Score", + "Areal Equitable Threat Score","True Skill Score", + "Areal True Skill Score","Heidke Skill Score", + "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: + self.statsTwoCatTypePopup.add_radiobutton(label=stat, + indicatoron=0,value=stat, + variable=self.statsTwoCatType,command=self.updateStatsTwoCatType) + self.statsTwoCatType.set("Fraction Correct") + but.config(menu=self.statsTwoCatTypePopup) + self.statsTwoCatCond=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.statsTwoCatCond, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.statsTwoCatCondPopup=tkinter.Menu(but,tearoff=0) + for cond in [">",">=","<=","<"]: + self.statsTwoCatCondPopup.add_radiobutton(label=cond, + indicatoron=0,value=cond, + variable=self.statsTwoCatCond,command=self.updateStatsTwoCatType) + self.statsTwoCatCond.set(">") + but.config(menu=self.statsTwoCatCondPopup) + self.statsTwoCatValueString=tkinter.StringVar() + ent=tkinter.Entry(twocatFrame,textvariable=self.statsTwoCatValueString, + width=5,relief=tkinter.SUNKEN) + self.statsTwoCatValueString.set("0.0") + ent.pack(side=tkinter.LEFT,anchor=tkinter.W) + twocatFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.X,expand=0) + self.updateStatsThreshold() + # + # Stat type + # + self.StatsType=tkinter.StringVar() + stattypes=["vs. Time","vs. Fcst Hour"] + defaulttype="vs. Time" + self.radioGroup(master,"Plot:",self.StatsType,stattypes,defaulttype,tkinter.X,0) + return + #================================================================== + # + def updateStatsScale(self): + value=self.StatsScale.get() + for i in range(len(self.__scaleList)): + (num,text)=self.__scaleList[i] + if num==value: + self.StatsScaleText.set(text) + self.GridsScale.set(num) + self.GridsScaleText.set(text) + self.GridsStatsScale.set(num) + self.GridsStatsScaleText.set(text) + self.DistsScale.set(num) + self.DistsScaleText.set(text) + break + def updateStatsTwoCatType(self): + self.StatsDisplay.set("TwoCat") + return + #================================================================== + # + def pickStatsThreshold(self): + self.StatsDisplay.set("Percent Err <") + self.updateStatsThreshold() + return + + def updateStatsThreshold(self): + parmList=self.getCheckList(self.StatsParms) + if len(parmList)<1: + return + tlist=[] + plist=[] + for parm in parmList: + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + thresholds=self.__VU.getVerThresholds(readParm) + if last3 in ("Spd","Dir"): + (threshmag,threshdir)=thresholds + if last3=="Spd": + thresholds=threshmag + else: + thresholds=threshdir + if len(tlist)>0: + same=1 + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + same=1 + for i in range(len(thresh)): + if thresh[i]!=thresholds[i]: + same=0 + break + if same==1: + plist[j]+=",%s"%parm + break + if same!=1: + tlist.append(thresholds) + plist.append(parm) + else: + tlist.append(thresholds) + plist.append(parm) + + dthresh=[] + if len(tlist)>1: + for j in range(len(tlist)): + thresh=tlist[j] + parms=plist[j] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%(t) + if len(dthresh)<(i+1): + dthresh.append(str) + else: + dthresh[i]+=" | %s"%str + else: + thresh=tlist[0] + for i in range(len(thresh)): + t=thresh[i] + str="%d"%t + dthresh.append(str) + # + # + parmList=" | ".join(plist) + self.thresholdStatsPopup.entryconfigure(0,label=parmList) + for i in range(len(dthresh)): + self.thresholdStatsPopup.entryconfigure(i+1,label=dthresh[i]) + #print " ",i,dthresh[i] + self.StatsThresholdValue.set(dthresh[self.StatsThreshold.get()]) + return + #================================================================== + # + # Frame that specifies the options for the Scale vs Stat display + # + def OptionsScaleStats(self,master): + # + # parameter + # + self.ScaleStatsParm=tkinter.StringVar() + parms=self.__VU.getVerParmsVect() + defaultParm=parms[0] + self.radioGroup(master,"Parameter:",self.ScaleStatsParm,parms,defaultParm,tkinter.BOTH,1) + # + # Area list + # + af=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + alist=self.__VU.listEditAreaDescriptions() + alist[0:0]=["Current"] + defaultArea=alist[0] + maxwid=0 + for area in alist: + if len(area)>maxwid: + maxwid=len(area) + if len(alist)>5: + maxheight=5 + else: + maxheight=len(alist) + + acomb=tkinter.Frame(af) + self.ScaleStatsAreaCombine=tkinter.IntVar() + #comb=Tkinter.Checkbutton(acomb,text="Combine",variable=self.StatsAreaCombine) + self.ScaleStatsAreaCombine.set(1) # always set + #comb.pack(side=Tkinter.RIGHT,anchor=Tkinter.E) + sLabel=tkinter.Label(acomb,text="Edit Area:") + sLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + acomb.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X) + sb=tkinter.Scrollbar(af,orient=tkinter.VERTICAL) + self.ScaleStatsAreasListbox=tkinter.Listbox(af,yscrollcommand=sb.set, + selectmode=tkinter.EXTENDED,width=maxwid,height=maxheight) + sb.config(command=self.ScaleStatsAreasListbox.yview) + sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + self.ScaleStatsAreasListbox.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + idx=0 + for item in alist: + self.ScaleStatsAreasListbox.insert(tkinter.END,item) + if item in defaultArea: + self.ScaleStatsAreasListbox.select_set(idx) + idx+=1 + + af.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.BOTH,expand=1) + # + # display + # + self.ScaleStatsDisplay=tkinter.StringVar() + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + labFrame=tkinter.Frame(radioFrame) + + radioLabel=tkinter.Label(labFrame,text="Display:") + radioLabel.pack(side=tkinter.LEFT,anchor=tkinter.W) + labFrame.pack(side=tkinter.TOP,anchor=tkinter.W,fill=tkinter.X,expand=1) + + a=tkinter.Radiobutton(radioFrame,text="Bias", + variable=self.ScaleStatsDisplay,value="Bias") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + self.ScaleStatsDisplay.set("Bias") + a=tkinter.Radiobutton(radioFrame,text="Mean Abs Error", + variable=self.ScaleStatsDisplay,value="Mean Abs Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="RMS Error", + variable=self.ScaleStatsDisplay,value="RMS Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + a=tkinter.Radiobutton(radioFrame,text="Mean Squared Error", + variable=self.ScaleStatsDisplay,value="Mean Squared Error") + a.pack(side=tkinter.TOP,anchor=tkinter.W) + + threshFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(threshFrame,text="Percent Err <", + variable=self.ScaleStatsDisplay,value="Percent Err <") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.ScaleStatsThreshold=tkinter.IntVar() + self.ScaleStatsThresholdValue=tkinter.StringVar() + but=tkinter.Menubutton(threshFrame,textvariable=self.ScaleStatsThresholdValue, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.thresholdScaleStatsPopup=tkinter.Menu(but,tearoff=0) + self.thresholdScaleStatsPopup.add_command(label="stuff") + for i in range(self.__VU.getCFG('NUMTHRESH')): + self.thresholdScaleStatsPopup.add_radiobutton(label="xxxx",indicatoron=0,value=i, + variable=self.ScaleStatsThreshold, + command=self.pickScaleStatsThreshold) + but.config(menu=self.thresholdScaleStatsPopup) + threshFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + twocatFrame=tkinter.Frame(radioFrame) + a=tkinter.Radiobutton(twocatFrame,text="",variable=self.ScaleStatsDisplay, + value="TwoCat") + a.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.scaleStatsTwoCatType=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.scaleStatsTwoCatType, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.scaleStatsTwoCatTypePopup=tkinter.Menu(but,tearoff=0) + for stat in ["Hits","Areal Hits","Misses","Areal Misses", + "False Alarms","Areal False Alarms","Correct Negatives", + "Areal Correct Negatives", + "Frequency Observed", + "Frequency Forecast", + "Fraction Correct", + "Areal Fraction Correct", + "Frequency Bias","Areal Frequency Bias", + "POD","Areal POD","FAR","Areal FAR","Threat Score", + "Areal Threat Score","Equitable Threat Score", + "Areal Equitable Threat Score","True Skill Score", + "Areal True Skill Score","Heidke Skill Score", + "Areal Heidke Skill Score","Odds Ratio","Areal Odds Ratio"]: + self.scaleStatsTwoCatTypePopup.add_radiobutton(label=stat, + indicatoron=0,value=stat, + variable=self.scaleStatsTwoCatType,command=self.updateScaleStatsTwoCatType) + self.scaleStatsTwoCatType.set("Fraction Correct") + but.config(menu=self.scaleStatsTwoCatTypePopup) + self.scaleStatsTwoCatCond=tkinter.StringVar() + but=tkinter.Menubutton(twocatFrame,textvariable=self.scaleStatsTwoCatCond, + relief=tkinter.RAISED,indicatoron=1) + but.pack(side=tkinter.LEFT,anchor=tkinter.W) + self.scaleStatsTwoCatCondPopup=tkinter.Menu(but,tearoff=0) + for cond in [">",">=","<=","<"]: + self.scaleStatsTwoCatCondPopup.add_radiobutton(label=cond, + indicatoron=0,value=cond, + variable=self.scaleStatsTwoCatCond,command=self.updateScaleStatsTwoCatType) + self.scaleStatsTwoCatCond.set(">") + but.config(menu=self.scaleStatsTwoCatCondPopup) + self.scaleStatsTwoCatValueString=tkinter.StringVar() + ent=tkinter.Entry(twocatFrame,textvariable=self.scaleStatsTwoCatValueString, + width=5,relief=tkinter.SUNKEN) + self.scaleStatsTwoCatValueString.set("0.0") + ent.pack(side=tkinter.LEFT,anchor=tkinter.W) + twocatFrame.pack(side=tkinter.TOP,anchor=tkinter.W) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.X,expand=0) + self.updateScaleStatsThreshold() + return + def updateScaleStatsTwoCatType(self): + self.ScaleStatsDisplay.set("TwoCat") + return + #================================================================== + # + def pickScaleStatsThreshold(self): + self.ScaleStatsDisplay.set("Percent Err <") + self.updateScaleStatsThreshold() + return + + def updateScaleStatsThreshold(self): + parm=self.ScaleStatsParm.get() + readParm=parm + last3="xxx" + if len(parm)>3: + last3=parm[-3:] + if ((last3=="Spd")or(last3=="Dir")): + readParm=parm[:-3] + thresholds=self.__VU.getVerThresholds(readParm) + if last3 in ("Spd","Dir"): + (threshmag,threshdir)=thresholds + if last3=="Spd": + thresholds=threshmag + else: + thresholds=threshdir + + dthresh=[] + for i in range(len(thresholds)): + t=thresholds[i] + str="%d"%t + dthresh.append(str) + # + # + self.thresholdScaleStatsPopup.entryconfigure(0,label=parm) + for i in range(len(dthresh)): + self.thresholdScaleStatsPopup.entryconfigure(i+1,label=dthresh[i]) + self.ScaleStatsThresholdValue.set(dthresh[self.ScaleStatsThreshold.get()]) + return + #================================================================= + # displayGroup - make a group of radio buttons with scale stuff + # + def displayGroup(self,master,labeltext,var,valuelist,defaultvalue,filltype,expandflag): + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + radioLabel=tkinter.Label(radioFrame,text=labeltext) + radioLabel.pack(side=tkinter.TOP,anchor=tkinter.W) + for item in valuelist: + a=tkinter.Radiobutton(radioFrame,text=item, + variable=var,value=item) + a.pack(side=tkinter.TOP,anchor=tkinter.W) + if item is defaultvalue: + var.set(item) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=filltype,expand=expandflag) + #================================================================= + # radioGroup - make a group of radio buttons + # + def radioGroup(self,master,labeltext,var,valuelist,defaultvalue,filltype, + expandflag,callback=None): + radioFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + radioLabel=tkinter.Label(radioFrame,text=labeltext) + radioLabel.pack(side=tkinter.TOP,anchor=tkinter.W) + for item in valuelist: + a=tkinter.Radiobutton(radioFrame,text=item, + variable=var,value=item,command=callback) + a.pack(side=tkinter.TOP,anchor=tkinter.W) + if item is defaultvalue: + var.set(item) + radioFrame.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=filltype,expand=expandflag) + #================================================================= + # checkGroup - make a group of check buttons + # + def checkGroup(self,master,labeltext,varlist,valuelist, + defaultvalues,filltype,expandflag,callback=None,maxRows=30): + checkFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + checkLabel=tkinter.Label(checkFrame,text=labeltext) + checkLabel.pack(side=tkinter.TOP,anchor=tkinter.W) + cnt=0 + row=0 + col=0 + f=tkinter.Frame(checkFrame,relief=tkinter.FLAT,borderwidth=2) + f.pack(side=tkinter.TOP,anchor=tkinter.W) + if len(valuelist) > maxRows: + ncols = (len(valuelist) - 1)/maxRows + 1 + maxRows = (len(valuelist) - 1)/ncols + 1 + for item in valuelist: + a=tkinter.Checkbutton(f,text=item,variable=varlist[cnt], + onvalue=item,offvalue="",command=callback) + if item in defaultvalues: + varlist[cnt].set(item) + + a.grid(row=row,column=col,sticky=tkinter.NW) + print("Formatting row %d col %d for %s" % (row,col,item)) + row=row+1 + if row == maxRows: + row = 0 + col =col + 1 + cnt=cnt+1 + checkFrame.pack(side=tkinter.TOP,fill=filltype,expand=expandflag) + return varlist + #================================================================= + # getCheckList - get list of values turned on in the checkbutton list + # + def getCheckList(self,checklist): + outlist=[] + for i in range(len(checklist)): + a=checklist[i].get() + if a!="": + outlist.append(a) + return outlist + #================================================================= + # sListbox - make a listbox with a scrollbar + # + def sListbox(self,master,labeltext,itemlist,defaultItems, + maxwid,maxheight,smode,filltype=tkinter.BOTH,expandflag=1): + sLabel=tkinter.Label(master,text=labeltext) + sLabel.pack(side=tkinter.TOP,anchor=tkinter.W) + sb=tkinter.Scrollbar(master,orient=tkinter.VERTICAL) + slb=tkinter.Listbox(master,yscrollcommand=sb.set, + selectmode=smode,width=maxwid,height=maxheight) + sb.config(command=slb.yview) + sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + slb.pack(side=tkinter.LEFT,fill=filltype,expand=expandflag) + idx=0 + for item in itemlist: + slb.insert(tkinter.END,item) + if item in defaultItems: + slb.select_set(idx) + idx+=1 + return slb + #================================================================= + # getListbox - get list of values turned on in the listbox + # + def getListbox(self,listbox): + outlist=[] + itemnums=listbox.curselection() + try: + itemnums=list(map(int,itemnums)) + except ValueError: pass + for itemnum in itemnums: + outlist.append(listbox.get(itemnum)) + return outlist + #================================================================= + def setDatetype(self): + type=self.Datetype.get() + if type=="Period Length": + self.ByList.pack_forget() + self.ByPeriod.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.BOTH,expand=1) + else: + self.ByPeriod.pack_forget() + self.ByList.pack(side=tkinter.TOP,anchor=tkinter.NW,fill=tkinter.BOTH,expand=1) + #================================================================== + # getRecentDates - gets a list of date strings from today through + # numdays in the past. Also returns list of + # unix times for the beginning of each date. + def getRecentDates(self,numdays): + recentDateStrings=[] + recentDates=[] + (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime() + midtoday=calendar.timegm((nyea,nmon,nday,0,0,0,0,0,0)) + for i in range(numdays): + daymid=midtoday-(i*DAYSECS) + (gyr,gmo,gdy,ghr,gmi,gse,gwd,gyd,gds)=time.gmtime(daymid) + recentDateStrings.append("%4.4d/%2.2d/%2.2d"%(gyr,gmo,gdy)) + recentDates.append(daymid) + return recentDateStrings,recentDates +# +# Special global routines used in Histogram callback stuff to +# move data on/off the screen +# +def showmodel(self,modname): + if self.showmod[modname]==1: + self.cd.canvas.move(modname,0,-self.cd.curheight) + self.cd.canvas.lower(modname) + self.showmod[modname]=0 + self.modb[modname].config(fg="grey") + else: + self.cd.canvas.move(modname,0,self.cd.curheight) + self.cd.canvas.lift(modname) + self.showmod[modname]=1 + self.modb[modname].config(fg=self.colornames[modname]) + return +#===================================================================== +# +# Toggle stuff with "but" tag in the but1 list +# +def showBut1(self,but): + if but.isdigit(): + newbut="f%s"%but + but=newbut + if self.but1state.get(but)==1: + self.cd.canvas.move(but,0,-self.cd.curheight) + self.cd.canvas.lower(but) + self.but1state[but]=0 + self.but1[but].config(fg="grey") + else: + self.cd.canvas.move(but,0,self.cd.curheight) + self.cd.canvas.lift(but) + self.but1state[but]=1 + self.but1[but].config(fg="black") +#===================================================================== +# +# Turn off all but1 tags except first button +# +def startBut1(self): + for but in self.but1names: + if but!=self.but1names[0]: + showBut1(self,but) +#===================================================================== +# +# Move toggled but1 buttons - one to the left +# +def prevBut1(self): + newbut=[] + for but in self.but1names: + if self.but1state.get(but)==1: + newbut.append(1) + else: + newbut.append(0) + temp=newbut[0] + del newbut[0] + newbut.append(temp) + for i in range(len(self.but1names)): + but=self.but1names[i] + now=self.but1state[but] + after=newbut[i] + if ((now==1)and(after==0)): + self.cd.canvas.move(but,0,-self.cd.curheight) + self.cd.canvas.lower(but) + self.but1state[but]=0 + self.but1[but].config(fg="grey") + elif ((now==0)and(after==1)): + self.cd.canvas.move(but,0,self.cd.curheight) + self.cd.canvas.lift(but) + self.but1state[but]=1 + self.but1[but].config(fg="black") +#===================================================================== +# +# Move toggled but1 buttons - one to the right +# +def nextBut1(self): + newbut=[] + for but in self.but1names: + if self.but1state.get(but)==1: + newbut.append(1) + else: + newbut.append(0) + temp=newbut.pop() + newbut[0:0]=[temp] + for i in range(len(self.but1names)): + but=self.but1names[i] + now=self.but1state[but] + after=newbut[i] + if ((now==1)and(after==0)): + self.cd.canvas.move(but,0,-self.cd.curheight) + self.cd.canvas.lower(but) + self.but1state[but]=0 + self.but1[but].config(fg="grey") + elif ((now==0)and(after==1)): + self.cd.canvas.move(but,0,self.cd.curheight) + self.cd.canvas.lift(but) + self.but1state[but]=1 + self.but1[but].config(fg="black") +#===================================================================== +# +# Toggle stuff with "but" tag in the but2 list +# +def showBut2(self,but): + if self.but2state.get(but)==1: + self.cd.canvas.move(but,-self.cd.curwidth,0) + self.cd.canvas.lower(but) + self.but2state[but]=0 + self.but2[but].config(fg="grey") + else: + self.cd.canvas.move(but,self.cd.curwidth,0) + self.cd.canvas.lift(but) + self.but2state[but]=1 + self.but2[but].config(fg=self.colornames[but]) +#===================================================================== +# +# Turn off all but2 tags except first button +# +def startBut2(self): + for but in self.but2names: + if but!=self.but2names[0]: + showBut2(self,but) +#===================================================================== +# +# Move toggled but2 buttons - one to the left +# +def prevBut2(self): + newbut=[] + for but in self.but2names: + if self.but2state.get(but)==1: + newbut.append(1) + else: + newbut.append(0) + temp=newbut[0] + del newbut[0] + newbut.append(temp) + for i in range(len(self.but2names)): + but=self.but2names[i] + now=self.but2state[but] + after=newbut[i] + if ((now==1)and(after==0)): + self.cd.canvas.move(but,-self.cd.curwidth,0) + self.cd.canvas.lower(but) + self.but2state[but]=0 + self.but2[but].config(fg="grey") + elif ((now==0)and(after==1)): + self.cd.canvas.move(but,self.cd.curwidth,0) + self.cd.canvas.lift(but) + self.but2state[but]=1 + self.but2[but].config(fg=self.colornames[but]) +#===================================================================== +# +# Move toggled but2 buttons - one to the right +# +def nextBut2(self): + newbut=[] + for but in self.but2names: + if self.but2state.get(but)==1: + newbut.append(1) + else: + newbut.append(0) + temp=newbut.pop() + newbut[0:0]=[temp] + for i in range(len(self.but2names)): + but=self.but2names[i] + now=self.but2state[but] + after=newbut[i] + if ((now==1)and(after==0)): + self.cd.canvas.move(but,-self.cd.curwidth,0) + self.cd.canvas.lower(but) + self.but2state[but]=0 + self.but2[but].config(fg="grey") + elif ((now==0)and(after==1)): + self.cd.canvas.move(but,self.cd.curwidth,0) + self.cd.canvas.lift(but) + self.but2state[but]=1 + self.but2[but].config(fg=self.colornames[but]) +# +# debug stuff for memory usage +# +_proc_status="/proc/%d/status"%os.getpid() +_scale={'kB':1024.0,'mB':1024.0*1024.0, + 'KB':1024.0,'MB':1024.0*1024.0} +def _VmB(VmKey): + try: + t=open(_proc_status) + v=t.read() + t.close() + except IOError: + return 0.0 + i=v.index(VmKey) + v=v[i:].split(None,3) + if len(v)<3: + return 0.0 + return float(v[1])*_scale[v[2]] +def memory(): + return _VmB('VmSize:') +def resident(): + return _VmB('VmRSS:') +# +# stuff to support a callback with a pre-known variable +# +def GenericCallback(callback, *firstArgs, **firstKWArgs): + if firstKWArgs: + return GC(callback, *firstArgs, **firstKWArgs) + else: + return GCNoKWArgs(callback, *firstArgs) +# +# Classes for callbacks +# +class GC: + def __init__(self,callback,*firstArgs, **firstKWArgs): + self.__callback=callback + self.__firstArgs=firstArgs + self.__firstKWArgs=firstKWArgs + def __call__(self, *lastArgs, **kwArgs): + if kwArgs: + netKWArgs=self.__firstKWArgs.copy() + netKWArgs.update(self.__kwArgs) + else: + netKWArgs=self.__firstKWArgs + return self.__callback (*(self.__firstArgs+lastArgs),**netKWArgs) +class GCNoKWArgs: + def __init__(self, callback, *firstArgs): + self.__callback=callback + self.__firstArgs=firstArgs + def __call__(self, *args, **kwArgs): + return self.__callback (*(self.__firstArgs+args),**kwArgs) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyAutoCalc.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyAutoCalc.py index 2af72b4e70..deef92c74f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyAutoCalc.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyAutoCalc.py @@ -1,437 +1,437 @@ - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# BOIVerifyAutoCalc - Version 2.0.5 -# -# Calculate verification stats for the specified parms for all models -# and all edit areas listed in the pre-defined edit area list: -# -# Author: Tim Barker - SOO BOI -# 2005/07/01 - version 0.0 - original implementation -# 2005/07/29 - version 0.1 - update to grid database structure -# 2006/11/06 - version 1.0 - Make only one procedure! And have parms -# specified via the file name FILE (constant below). If -# the editable grid is not there - create it temporarily -# while the tool runs. It runs fast enough to be OK - but -# still very slow. -# 2007/10/25 - version 2.0 - Turned into a procedure rather than a tool. -# Lots of work to make it faster. Still slower than I -# would like. -# 2008/05/28 - version 2.0.5 - Removed accidental inclusion in populate -# menu. Made it not run for periods that havent -# quite finished yet - or have finished only recently. -# -# 2010/04/23 ryu Initial port to AWIPS II. -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/21/13 16770 ryu Change name of temporary files -# for dual domain. -# -#============================================================================= - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# -# Do not show this in any menu. Should only be run via runProcedure after -# putting the parms to save in /tmp/_FILE (one on each line) -# -#MenuItems = ["Verify"] -# -MenuItems = ["None"] -# -# imports -# -from numpy import * -import os,os.path,time,calendar -import SmartScript,BOIVerifyUtility -# -# CONSTANTS -# -HOURS=60*60 -FILE="BOIVerifyAutoCalc.txt" -# -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss=dbss - # - # - # - def execute(self, editArea): - self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) - self.VU.logMsg("BOIVerifyAutoCalc Procedure Start") - - # - # When testing - I often set Debug up higher to see what is going on - # - #self.VU.setDebug(0) - # - # AUTOCALC_DAYS configuration should hold the number of DAYSBACK - # to scan for new observation data. If not specified - default - # to 5 days - # - DAYSBACK=self.VU.getCFG('AUTOCALC_DAYS') - if DAYSBACK is None: - DAYSBACK=5 - # - # AUTOCALC_TOO_RECENT configuration is number of hours that - # indicates an observation grid is too recent to make calculations - # of it. Thus figuring that it might get revised later - and we - # don't want to take the time to calculate these scores again - - # so don't do it yet. - # - TOO_RECENT=self.VU.getCFG('AUTOCALC_TOO_RECENT') - if TOO_RECENT is None: - TOO_RECENT=12 - # - # Read the parms from the FILE file - # - parmlist=[] - filename="/tmp/%s_%s"% (self.getSiteID(), FILE) - if ((os.path.exists(filename)) and (os.path.isfile(filename))): - try: - infile=file(filename,"r") - parmlines=infile.readlines() - infile.close() - os.remove(filename) - for line in parmlines: - parm=line.strip() - if len(parm)>0: - parmlist.append(parm) - except: - self.VU.logMsg("Difficulty reading %s"%filename) - # - # If no parms in parmlist - run for all parms - # - if len(parmlist)==0: - parmlist=self.VU.listParms() - # - # - # - now=time.time() - (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime(time.time()) - enddate=calendar.timegm((nyea,nmon,nday,0,0,0,0,0,0)) - # - # parms and models obtained from data archive - # editarea list from configuration - # - allParms=self.VU.listParms() - parms=parmlist - models=self.VU.listModels() - # - editAreaNames=self.VU.listEditAreas() - maxareas=self.VU.CFG['STATAREAS'] - maxstats=self.VU.CFG['STATTYPES'] - # - # Setup big arrays where data to be stored will be written - # - sums=zeros((maxareas,maxstats),float32) - sumsDir=zeros((maxareas,maxstats),float32) - sumsMag=zeros((maxareas,maxstats),float32) - # - # Setup the edit areas - # - # pts and eas are only as big as the number of edit - # areas actually being calculated. Then we - # use numedit to put it in the right slot of - # sums. - # - numAreas=len(editAreaNames) - shape=self.getGridShape() - allpts=shape[0] * shape[1] - eas=zeros((allpts,numAreas)) - numedit=[] - pts=[] - for i in xrange(numAreas): - areaname=editAreaNames[i] - if areaname=="NONE": - ea=self.newGrid(True, bool) - else: - ea=self.encodeEditArea(areaname) - eas[:,i]=ea.flat - pt=int(add.reduce(add.reduce(ea))) - if pt<1: - pt=1 - pts.append(pt) - numedit.append(self.VU.getEditAreaNumberFromName(areaname)) - self.VU.logMsg("Using %d edit areas out of %d"%(numAreas,maxareas),1) - # - # Loop over all ObsModels - # - obsModels=self.VU.getCFG('OBSMODELS') - for obsModel in obsModels: - self.VU.logMsg("Working on %s observations"%obsModel,0) - # - # Loop over parms - # - for parm in parms: - if parm not in allParms: - continue - self.VU.logMsg(" %s Grids"%parm,0) - datatype=self.VU.getVerParmType(parm) - if datatype is None: - continue - if datatype==1: - statMagSums=zeros((maxstats,allpts),float) - statDirSums=zeros((maxstats,allpts),float) - else: - statSums=zeros((maxstats,allpts),float) - numthresh=self.VU.getCFG('NUMTHRESH') - thresholds=self.VU.getVerThresholds(parm) - if datatype==1: - magThresholds=thresholds[0] - dirThresholds=thresholds[1] - obsParm=self.VU.getObsParm(parm) - obsReadMode=self.VU.getReadMode(obsModel,obsParm) - # - # Loop over ver models - # - for model in models: - if model in self.VU.getCFG('OBSMODELS'): - continue - self.VU.logMsg(" %s model"%model,0) - fcstReadMode=self.VU.getReadMode(model,parm) - # - # Get the cases for obs over past DAYSBACK days - # - caseInfo=self.VU.getCommonCases(parm,model,obsParm,obsModel, - "Verifying on","Period Length",fromDay=enddate, - numDays=DAYSBACK, dayList=[], fhrStart=-24, - fhrEnd=self.VU.MAXFORECASTHOUR,fcstrs=-1, - cycles=-1) - cases=caseInfo[model] - casekeys=cases.keys() - if len(casekeys)<1: - continue - # - # sort keys by the starting time...and put in reverse order - # so that we work backward - # - casekeys.sort() # first sort by basetime - casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) - casekeys.reverse() - lastobs="" - for key in casekeys: - pt1=time.time() - (basetimestr,stimestr,etimestr)=key.split(",") - basetime=int(basetimestr) - btup=time.gmtime(basetime) - cycle=btup[3] - stime=int(stimestr) - etime=int(etimestr) - etup=time.gmtime(etime) - (frecList,orecList)=cases[key] - if len(orecList)<1: - continue - fhour=(stime-basetime)/HOURS - if fhour<0: - self.VU.logMsg("%13s %8s %3d-hr Fcst ending %4.4d/%2.2d/%2.2d %2.2dZ -- skipped - no negative forecast hours"%(parm, - model,fhour,etup[0],etup[1],etup[2],etup[3]),2) - continue - # - # Only read the observed grid once - # - obskey=key.split(",",1)[1] - if obskey!=lastobs: - obsgrid=self.VU.getVerGrids(obsModel,stime,obsParm,stime,etime, - obsReadMode,orecList) - #if verType==1: - # obsdata=self.getProbVerGrid(readParm,obsdata) - # - # If this observation only saved recently...dont - # calculate yet - because it may change - # - ovtime=0 - for rec in orecList: - ovtime=max(self.VU.oncVtime[rec],ovtime) - ovhours=float(now-ovtime)/float(HOURS) - if ovhours_FILE (one on each line) +# +#MenuItems = ["Verify"] +# +MenuItems = ["None"] +# +# imports +# +from numpy import * +import os,os.path,time,calendar +import SmartScript,BOIVerifyUtility +# +# CONSTANTS +# +HOURS=60*60 +FILE="BOIVerifyAutoCalc.txt" +# +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss=dbss + # + # + # + def execute(self, editArea): + self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) + self.VU.logMsg("BOIVerifyAutoCalc Procedure Start") + + # + # When testing - I often set Debug up higher to see what is going on + # + #self.VU.setDebug(0) + # + # AUTOCALC_DAYS configuration should hold the number of DAYSBACK + # to scan for new observation data. If not specified - default + # to 5 days + # + DAYSBACK=self.VU.getCFG('AUTOCALC_DAYS') + if DAYSBACK is None: + DAYSBACK=5 + # + # AUTOCALC_TOO_RECENT configuration is number of hours that + # indicates an observation grid is too recent to make calculations + # of it. Thus figuring that it might get revised later - and we + # don't want to take the time to calculate these scores again - + # so don't do it yet. + # + TOO_RECENT=self.VU.getCFG('AUTOCALC_TOO_RECENT') + if TOO_RECENT is None: + TOO_RECENT=12 + # + # Read the parms from the FILE file + # + parmlist=[] + filename="/tmp/%s_%s"% (self.getSiteID(), FILE) + if ((os.path.exists(filename)) and (os.path.isfile(filename))): + try: + infile=file(filename,"r") + parmlines=infile.readlines() + infile.close() + os.remove(filename) + for line in parmlines: + parm=line.strip() + if len(parm)>0: + parmlist.append(parm) + except: + self.VU.logMsg("Difficulty reading %s"%filename) + # + # If no parms in parmlist - run for all parms + # + if len(parmlist)==0: + parmlist=self.VU.listParms() + # + # + # + now=time.time() + (nyea,nmon,nday,nhou,nmin,nsec,nwda,nyda,ndst)=time.gmtime(time.time()) + enddate=calendar.timegm((nyea,nmon,nday,0,0,0,0,0,0)) + # + # parms and models obtained from data archive + # editarea list from configuration + # + allParms=self.VU.listParms() + parms=parmlist + models=self.VU.listModels() + # + editAreaNames=self.VU.listEditAreas() + maxareas=self.VU.CFG['STATAREAS'] + maxstats=self.VU.CFG['STATTYPES'] + # + # Setup big arrays where data to be stored will be written + # + sums=zeros((maxareas,maxstats),float32) + sumsDir=zeros((maxareas,maxstats),float32) + sumsMag=zeros((maxareas,maxstats),float32) + # + # Setup the edit areas + # + # pts and eas are only as big as the number of edit + # areas actually being calculated. Then we + # use numedit to put it in the right slot of + # sums. + # + numAreas=len(editAreaNames) + shape=self.getGridShape() + allpts=shape[0] * shape[1] + eas=zeros((allpts,numAreas)) + numedit=[] + pts=[] + for i in range(numAreas): + areaname=editAreaNames[i] + if areaname=="NONE": + ea=self.newGrid(True, bool) + else: + ea=self.encodeEditArea(areaname) + eas[:,i]=ea.flat + pt=int(add.reduce(add.reduce(ea))) + if pt<1: + pt=1 + pts.append(pt) + numedit.append(self.VU.getEditAreaNumberFromName(areaname)) + self.VU.logMsg("Using %d edit areas out of %d"%(numAreas,maxareas),1) + # + # Loop over all ObsModels + # + obsModels=self.VU.getCFG('OBSMODELS') + for obsModel in obsModels: + self.VU.logMsg("Working on %s observations"%obsModel,0) + # + # Loop over parms + # + for parm in parms: + if parm not in allParms: + continue + self.VU.logMsg(" %s Grids"%parm,0) + datatype=self.VU.getVerParmType(parm) + if datatype is None: + continue + if datatype==1: + statMagSums=zeros((maxstats,allpts),float) + statDirSums=zeros((maxstats,allpts),float) + else: + statSums=zeros((maxstats,allpts),float) + numthresh=self.VU.getCFG('NUMTHRESH') + thresholds=self.VU.getVerThresholds(parm) + if datatype==1: + magThresholds=thresholds[0] + dirThresholds=thresholds[1] + obsParm=self.VU.getObsParm(parm) + obsReadMode=self.VU.getReadMode(obsModel,obsParm) + # + # Loop over ver models + # + for model in models: + if model in self.VU.getCFG('OBSMODELS'): + continue + self.VU.logMsg(" %s model"%model,0) + fcstReadMode=self.VU.getReadMode(model,parm) + # + # Get the cases for obs over past DAYSBACK days + # + caseInfo=self.VU.getCommonCases(parm,model,obsParm,obsModel, + "Verifying on","Period Length",fromDay=enddate, + numDays=DAYSBACK, dayList=[], fhrStart=-24, + fhrEnd=self.VU.MAXFORECASTHOUR,fcstrs=-1, + cycles=-1) + cases=caseInfo[model] + casekeys=list(cases.keys()) + if len(casekeys)<1: + continue + # + # sort keys by the starting time...and put in reverse order + # so that we work backward + # + casekeys.sort() # first sort by basetime + casekeys.sort(lambda x,y: cmp(x.split(",",1)[1],y.split(",",1)[1])) + casekeys.reverse() + lastobs="" + for key in casekeys: + pt1=time.time() + (basetimestr,stimestr,etimestr)=key.split(",") + basetime=int(basetimestr) + btup=time.gmtime(basetime) + cycle=btup[3] + stime=int(stimestr) + etime=int(etimestr) + etup=time.gmtime(etime) + (frecList,orecList)=cases[key] + if len(orecList)<1: + continue + fhour=(stime-basetime)/HOURS + if fhour<0: + self.VU.logMsg("%13s %8s %3d-hr Fcst ending %4.4d/%2.2d/%2.2d %2.2dZ -- skipped - no negative forecast hours"%(parm, + model,fhour,etup[0],etup[1],etup[2],etup[3]),2) + continue + # + # Only read the observed grid once + # + obskey=key.split(",",1)[1] + if obskey!=lastobs: + obsgrid=self.VU.getVerGrids(obsModel,stime,obsParm,stime,etime, + obsReadMode,orecList) + #if verType==1: + # obsdata=self.getProbVerGrid(readParm,obsdata) + # + # If this observation only saved recently...dont + # calculate yet - because it may change + # + ovtime=0 + for rec in orecList: + ovtime=max(self.VU.oncVtime[rec],ovtime) + ovhours=float(now-ovtime)/float(HOURS) + if ovhours_FILE -# -#MenuItems = ["Verify"] -# -MenuItems = ["None"] -# -# Constants -# -FILE="BOIVerifyBiasCorr.txt" -HOURSECS=60*60 -DAYSECS=24*HOURSECS -# -# imports -# -from numpy import * -import os, time -import SmartScript,BOIVerifyUtility -import AbsTime -import Exceptions -# -# -DSPATTR_PORT = 0 # TODO - display attributes not working the same way -# -# -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss=dbss - - def execute(self, editArea): - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime() - self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) - self.VU.logMsg("BOIVerifyBiasCorr Procedure Start") - - # - # - # BIASCORR_DAYS configuration should hold number of days usually used - # in the bias correction. If it doesn't...we default to 30 - # - VERPERIOD=self.VU.getCFG('BIASCORR_DAYS') - if VERPERIOD is None: - VERPERIOD=30 - # - # BIASCORR_MINDAYS configuration should hold minimum number of days - # needed to do a regression. If it doesn't...we default to 14 - # - MINDAYS=self.VU.getCFG('BIASCORR_MINDAYS') - if MINDAYS is None: - MINDAYS=14 - # - # BIASCORR_EXTRAPOLATE_PERCENTAGE configuration should hold the - # percentage of range of recent forecasts that we will extrapolate - # with regression. Outside of this...drop back toward using - # only average bias - # - FUZZ=self.VU.getCFG('BIASCORR_EXTRAPOLATE_PERCENTAGE') - if FUZZ is None: - FUZZ=10 - fuzz=FUZZ/100 - # - # Thes are the Parms we currently know how to bias correct - # - parmList=["T","MaxT","MinT","RH","MaxRH","MinRH"] - self.transformParms=["RH","MaxRH","MinRH"] - # - # Read the force flag, model, and obsmodel from the FILE file - # - force="0" - model="" - obsmodel="" - filename="/tmp/%s_%s"% (self.getSiteID(), FILE) - if ((os.path.exists(filename))and(os.path.isfile(filename))): - try: - infile=file(filename,"r") - inlines=infile.readlines() - infile.close() - os.remove(filename) - if len(inlines)>0: - line=inlines[0] - force=line.strip() - if len(inlines)>1: - line=inlines[1] - model=line.strip() - if len(inlines)>2: - line=inlines[2] - obsmodel=line.strip() - except: - self.VU.logMsg("Difficulty reading %s"%filename) - ## - ## testing - ## - ##model="NAM12" - ##obsmodel="Obs" - ##force="1" - ##self.VU.setDebug(10) - # - # If model is blank - give error and stop - # - if (model==""): - self.VU.logMsg("No model to correct - cannot continue") - return - if (obsmodel==""): - self.VU.logMsg("No obs model specified - cannot continue") - return - # - # if obsmodel is not a valid obsmodel - stop - # - if obsmodel not in self.VU.getCFG('OBSMODELS'): - self.VU.logMsg("%s is not a valid name for observations"%obsmodel) - return - # - # Get corresponding (non-bc) model run with same basetime - # - mutableModel=self.mutableID().modelName() - modeltime=self.mutableID().modelTime().unixTime() - mtime=time.strftime("%Y%m%d_%H%M",time.gmtime(modeltime)) - foundit=0 - for run in xrange(0,-10,-1): - dbid=self.findDatabase(model,version=run) - if dbid is not None: - inputtime=dbid.modelTime().unixTime() - if inputtime==modeltime: - foundit=1 - break - if foundit==0: - self.VU.logMsg("%s run from %s could not be found"%(model,mtime)) - self.VU.logMsg("Aborting!") - return - (myr,mmo,mdy,mhr,mmi,mse,mwd,myd,mds)=time.gmtime(modeltime) - yesterday=modeltime-DAYSECS - modelCycle=mhr - bigtr=self.createTimeRange(0,500,"Database",dbid) - self.VU.logMsg("for %s at %2.2dZ"%(model,modelCycle)) - # - # Loop over parms - # - for parm in parmList: - self.VU.logMsg("Working on %s"%parm) - (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, - self.parmColorTable,self.parmDisplayMinval, - self.parmDisplayMaxval)=self.getParmInfo(model,parm) - obsparm=self.VU.getObsParm(parm) - # - # get corresponding pairs of observed/fcst grids for last VERPERIOD days - # - dateStyle="Verifying on" - dateType="Period Length" - self.VU.logMsg(" getting cases",5) - cases=self.VU.getCases(parm,model,obsparm,obsmodel,dateStyle,dateType, - fromDay=yesterday,numDays=VERPERIOD, - cycles=mhr,fhrEnd=240) - caseInfo=cases[model] - del cases - fhrInfo={} - for key in caseInfo.keys(): - (basestr,startstr,endstr)=key.split(",") - basetime=int(basestr) - starttime=int(startstr) - fhrInfo[key]=(starttime-basetime)/HOURSECS - self.VU.logMsg(" done with getting cases",5) - # - # Loop over all forecast grids - # - try: - gridInfoList=self.getGridInfo(dbid,parm,"SFC",bigtr) - except Exception, e: - self.VU.logMsg(str(e)) - continue - - for gridInfo in gridInfoList: - starttime=gridInfo.gridTime().startTime().unixTime() - endtime=gridInfo.gridTime().endTime().unixTime() - forecastHour=int((starttime-modeltime)/3600.0) - endHour=int((endtime-modeltime)/3600.0) - self.VU.logMsg(" %d-hr forecast"%forecastHour) - # - # Check to see if guess grid is newer than output grid - # (with 10 minute overlap...) - # - origtr=self.createTimeRange(forecastHour,endHour,"Database",dbid) - guesstime=self.getLastUpdate(dbid,parm,"SFC",origtr) - outtime=self.getLastUpdate(mutableModel,parm,"SFC",origtr) - if ((guesstime<=(outtime-(10*60)))and(force=="0")): - self.VU.logMsg(" no new info - skipping") - continue - # - # Read the guess grid - # - inputGrid=self.getGrids(dbid,parm,"SFC",origtr,noDataError=0,cache=0) - if inputGrid is None: - self.VU.logMsg("Could not read input grid") - continue - if parm in self.transformParms: - inputGrid=self.Ztrans(inputGrid) - # - # Get old forecasts for this forecast hour - # - t1=time.time() - result=self.getOldForecasts(caseInfo,fhrInfo,parm,model,obsparm, - obsmodel,forecastHour,self.parmMinval, - self.parmMaxval) - t2=time.time() - self.VU.logMsg(" getting old forecasts took %6.2f"%(t2-t1),5) - (obsList,fcstList)=result - numGrids=len(obsList) - del result - # - # Do not calculate any grids if no previous observations/forecasts - # - if numGrids<1: - continue - # - # - if numGrids=MINDAYS: - self.VU.logMsg(" %d old obs/fcst grids used for regression"%numGrids,2) - # - # - # Linear regression of forecast anomalies to non-average - # forecast errors. - # - t1=time.time() - result=self.getRegression(avgFcst,avgErr,obsGrids,fcstGrids) - t2=time.time() - self.VU.logMsg(" getting Regression took %6.3f"%(t2-t1),5) - (correlation,stdind,stddep)=result - del result - del obsGrids - del fcstGrids - # - # Create the regressed part of error grid from the linear regression - # - regErr=(((correlation*stddep)/stdind)*(inputGrid-avgFcst)) - del avgFcst - del stddep - del stdind - del correlation - else: - self.VU.logMsg(" %d old obs/fcst grids - used plain bias"%numGrids,2) - regErr=self.empty() - # - # Make a multiplier for the regressed error. Normally 1.0...but - # when the forecast is more than fuzz% beyond the range of forecasts - # in the training period...start cutting back correction amount - # until getting back to zero regressed amount at (2*fuzz)% beyond. - # - multiplier=self.newGrid(1) - fuzzGrid=maximum((maxFcst-minFcst)*fuzz,0.01) # dont have zero - max1=maxFcst+fuzzGrid - multiplier=where(greater(inputGrid,max1),1.0-((inputGrid-max1)/fuzzGrid),multiplier) - min1=minFcst-fuzzGrid - multiplier=where(less(inputGrid,min1),1.0-((min1-inputGrid)/fuzzGrid),multiplier) - multiplier[less(multiplier,0.0)] = 0.0 - del min1 - del max1 - del minFcst - del maxFcst - del fuzzGrid - if self.VU.getDebug>=1: - count=count_nonzero(less(multiplier,0.98)) - count2=count_nonzero(less(multiplier,0.02)) - if count>0: - self.VU.logMsg(" %d fcst points are outliers and regression was reduced"%count,1) - if count2>0: - self.VU.logMsg(" %d points so far out that only average error used"%count2,1) - # - # Correct the forecast with the predicted error - # - corrected=inputGrid-(avgErr+(regErr*multiplier)) - del regErr - del avgErr - del inputGrid - del multiplier - # - # For Transformed Parms (like RH) transform it back to the real variable - # - if parm in self.transformParms: - corrected=self.Rtrans(corrected) - self.VU.logMsg(" done with making corrected grid",5) - # - # clip to legal values and save - # - corrected=clip(corrected,self.parmMinval,self.parmMaxval) - self.createGrid(mutableModel,parm,"SCALAR",corrected,origtr, - parm,None,self.parmPrecision,self.parmMinval, - self.parmMaxval,self.parmUnits) - del corrected - self.VU.logMsg(" mem:%d res:%d"%(memory(),resident()),10) - self.saveGrid(mutableModel,parm) - # - # Fix issues that can arise via doing each parm independently - # - # - # Make sure MaxT is as high as the max of all T grids - # - self.VU.logMsg("Checking for hourly T grids higher than MaxT") - gridInfoList=self.getGridInfo(mutableModel,"MaxT","SFC",bigtr) - for gridInfo in gridInfoList: - newtr=gridInfo.gridTime() - maxtgrid=self.getGrids(mutableModel,"MaxT","SFC",newtr,noDataError=0,cache=0) - if maxtgrid is not None: - maxoftgrid=self.getGrids(mutableModel,"T","SFC",newtr,mode="Max",noDataError=0,cache=0) - if maxoftgrid is not None: - maxt=maximum(maxoftgrid,maxtgrid) - changed=count_nonzero(greater(maxt,maxtgrid)) - if changed>0: - fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) - self.VU.logMsg("Had to update MaxT at %d-hrs to match hourly T grids at %d points"%(fhr,changed)) - self.createGrid(mutableModel,"MaxT","SCALAR",maxt,newtr) - self.saveGrid(mutableModel,"MaxT") - # - # Make sure Mint is as low as the min of all T grids - # - self.VU.logMsg("Checking for hourly T grids lower than MinT") - gridInfoList=self.getGridInfo(mutableModel,"MinT","SFC",bigtr) - for gridInfo in gridInfoList: - newtr=gridInfo.gridTime() - mintgrid=self.getGrids(mutableModel,"MinT","SFC",newtr,noDataError=0,cache=0) - if mintgrid is not None: - minoftgrid=self.getGrids(mutableModel,"T","SFC",newtr,mode="Min",noDataError=0,cache=0) - if minoftgrid is not None: - mint=minimum(minoftgrid,mintgrid) - changed=count_nonzero(less(mint,mintgrid)) - if changed>0: - fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) - self.VU.logMsg("Had to update MinT at %d-hrs to match hourly T grids at %d points"%(fhr,changed)) - self.createGrid(mutableModel,"MinT","SCALAR",mint,newtr) - self.saveGrid(mutableModel,"MinT") - # - # Make sure MaxRH is as high as the max of all RH grids - # - self.VU.logMsg("Checking for hourly RH grids higher than MaxRH") - gridInfoList=self.getGridInfo(mutableModel,"MaxRH","SFC",bigtr) - for gridInfo in gridInfoList: - newtr=gridInfo.gridTime() - maxrhgrid=self.getGrids(mutableModel,"MaxRH","SFC",newtr,noDataError=0,cache=0) - if maxrhgrid is not None: - maxofrhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,mode="Max",noDataError=0,cache=0) - if maxofrhgrid is not None: - maxrh=maximum(maxofrhgrid,maxrhgrid) - changed=count_nonzero(greater(maxrh,maxrhgrid)) - if changed>0: - fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) - self.VU.logMsg("Had to update MaxRH at %d-hrs to match hourly RH grids at %d points"%(fhr,changed)) - self.createGrid(mutableModel,"MaxRH","SCALAR",maxrh,newtr) - self.saveGrid(mutableModel,"MaxRH") - # - # Make sure MinRH is as low as the min of all RH grids - # - self.VU.logMsg("Checking for hourly RH grids lower than MinRH") - gridInfoList=self.getGridInfo(mutableModel,"MinRH","SFC",bigtr) - for gridInfo in gridInfoList: - newtr=gridInfo.gridTime() - minrhgrid=self.getGrids(mutableModel,"MinRH","SFC",newtr,noDataError=0,cache=0) - if minrhgrid is not None: - minofrhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,mode="Min",noDataError=0,cache=0) - if minofrhgrid is not None: - minrh=minimum(minofrhgrid,minrhgrid) - changed=count_nonzero(less(minrh,minrhgrid)) - if changed>0: - fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) - self.VU.logMsg("Had to update MinRH at %d-hrs to match hourly RH grids at %d points"%(fhr,changed)) - self.createGrid(mutableModel,"MinRH","SCALAR",minrh,newtr) - self.saveGrid(mutableModel,"MinRH") - # - # Create Td from T and RH - # - self.VU.logMsg("Making Td from T and RH") - (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, - self.parmColorTable,self.parmDisplayMinval, - self.parmDisplayMaxval)=self.getParmInfo(mutableModel,"Td") - Funits=0 - if self.parmUnits.find("F")>-1: - Funits=1 - gridInfoList=self.getGridInfo(mutableModel,"T","SFC",bigtr) - for gridInfo in gridInfoList: - newtr=gridInfo.gridTime() - tgrid=self.getGrids(mutableModel,"T","SFC",newtr,noDataError=0,cache=0) - if tgrid is not None: - rhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,noDataError=0,cache=0) - if rhgrid is not None: - if Funits==1: - tc=(tgrid-32.0)*(5.0/9.0) - else: - tc=tgrid - rh=clip(rhgrid,0.5,99.5)/100.0 - x=(log(rh)/17.67)+(tc/(tc+243.5)) - tdc=(243.5*x)/(1-x) - if Funits==1: - Td=(tdc*9.0/5.0)+32.0 - else: - Td=tdc - Td=clip(Td,self.parmMinval,self.parmMaxval) - self.createGrid(mutableModel,"Td","SCALAR",Td,newtr) - self.saveGrid(mutableModel,"Td") - # - # Calculate TdMrn/TdAft from MinT/MaxRH and MaxT/MinRH values - # Only if the TdMrn parameter exists for the mutable database - # - starttime=AbsTime.AbsTime(int(modeltime)) - parmInfoMrn = self.getParmInfo(mutableModel,"TdMrn") - #if ((self.parmUnits!="NONE")or(self.parmPrecision!=-99)): - if parmInfoMrn != None: - self.VU.logMsg("Making TdMrn/TdAft grids") - (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, - self.parmColorTable,self.parmDisplayMinval, - self.parmDisplayMaxval) = parmInfoMrn - (runtr,gridTimes)=self.getGridTimes(mutableModel,"TdMrn","SFC",starttime,240) - for newtr in gridTimes: - maxRH=self.getGrids(mutableModel,"MaxRH","SFC",newtr,noDataError=0,cache=0) - if maxRH is not None: - minT=self.getGrids(mutableModel,"MinT","SFC",newtr,noDataError=0,cache=0) - if minT is not None: - if Funits==1: - tc=(minT-32.0)*(5.0/9.0) - else: - tc=MinT - rh=clip(maxRH,0.5,99.5)/100.0 - x=(log(rh)/17.67)+(tc/(tc+243.5)) - tdc=(243.5*x)/(1-x) - if Funits==1: - Td=(tdc*9.0/5.0)+32.0 - else: - Td=tdc - Td=clip(Td,self.parmMinval,self.parmMaxval) - self.createGrid(mutableModel,"TdMrn","SCALAR",Td,newtr) - self.saveGrid(mutableModel,"TdMrn") - (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, - self.parmColorTable,self.parmDisplayMinval, - self.parmDisplayMaxval)=self.getParmInfo(mutableModel,"TdAft") - (runtr,gridTimes)=self.getGridTimes(mutableModel,"TdAft","SFC",starttime,240) - for newtr in gridTimes: - minRH=self.getGrids(mutableModel,"MinRH","SFC",newtr,noDataError=0,cache=0) - if minRH is not None: - maxT=self.getGrids(mutableModel,"MaxT","SFC",newtr,noDataError=0,cache=0) - if maxT is not None: - if Funits==1: - tc=(maxT-32.0)*(5.0/9.0) - else: - tc=maxT - rh=clip(minRH,0.5,99.5)/100.0 - x=(log(rh)/17.67)+(tc/(tc+243.5)) - tdc=(243.5*x)/(1-x) - if Funits==1: - Td=(tdc*9.0/5.0)+32.0 - else: - Td=tdc - Td=clip(Td,self.parmMinval,self.parmMaxval) - self.createGrid(mutableModel,"TdAft","SCALAR",Td,newtr) - self.saveGrid(mutableModel,"TdAft") - self.VU.logMsg("BOIVerifyBiasCorr Procedure Finished") - return - #================================================================= - # getLastUpdate(self,model,parmname,level,tr) - # - # Gets the last update time for the grid for the model, parmname, - # level and timerange. Should only call this for a timerange where - # only one grid is located. If multiple grids are found - gives the - # last update for the FIRST one in the list. If no grids are found, - # it returns 0. - # - def getLastUpdate(self,model,parmname,level,tr): - ret=-1 - try: - historyList = self.getGridHistory(model, parmname, level, tr) - except: - return ret - if not historyList: - return ret - for history in historyList[0]: - if history[5] is not None: - upd = history[5].unixTime() - else: - upd = None - if upd>ret: - ret=upd - - return ret - #================================================================= - # - # RH transforms - # - def Ztrans(self,ingrid): - c=clip(ingrid,0.1,99.9) - return 0.5*log(c/(100-c)) - def Rtrans(self,ingrid): - v=exp(ingrid*2.0) - return (100*v)/(1.0+v) - #================================================================= - # getOldForecasts - Get list of matching forecast grids and - # observed grids for the specified forecast - # hour. - # - def getOldForecasts(self,caseInfo,fhrInfo,parm,model,obsparm,obsmodel,forecastHour, - minval,maxval): - obsList=[] - fcstList=[] - # - # - # - for key in caseInfo.keys(): - (freclist,oreclist)=caseInfo[key] - # - # Have to have both forecast and observed grids - # - if ((len(freclist)<1)or(len(oreclist)<1)): - continue - # - # Use only ones that match the current forecastHour - # - fhr=fhrInfo[key] - if fhr!=forecastHour: - continue - # - # Get average of non-bad forecast grids - # - numgrids=0 - fcstgrid=self.empty() - for frec in freclist: - testgrid=self.VU.readRecord(parm,model,frec) - if self.badGrid(testgrid,minval,maxval): - basetime=self.VU.fncBtime[frec] - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(basetime) - self.VU.logMsg(" %d-hr forecast from %4.4d/%2.2d/%2.2d %2.2dZ run appears bad - so skipped"%(fhr, - byea,bmon,bday,bhou),2) - continue - fcstgrid+=testgrid - numgrids+=1 - if numgrids==0: - continue - fcstgrid/=float(numgrids) - # - # Get average of non-bad observed grids - # - numgrids=0 - obsgrid=self.empty() - for orec in oreclist: - testgrid=self.VU.readRecord(obsparm,obsmodel,orec) - if self.badGrid(testgrid,minval,maxval): - starttime=self.VU.oncStime[orec] - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(starttime) - self.VU.logMsg(" Observed grid at %4.4d/%2.2d/%2.2d %2.2dZ appears bad - so skipped"%(syea, - smon,sday,shou),2) - continue - obsgrid+=testgrid - numgrids+=1 - if numgrids==0: - continue - obsgrid/=float(numgrids) - # - # Transform the ob/fcst if needed - # - if parm in self.transformParms: - obsgrid=self.Ztrans(obsgrid) - fcstgrid=self.Ztrans(fcstgrid) - # - # Add grid to lists of grids - # - obsList.append(obsgrid) - fcstList.append(fcstgrid) - # - # Return lists of forecast/observed grids - # - return(obsList,fcstList) - #================================================================= - # getRegression - # - # get linear regression predictors of non-average forecast errors - # from forecast anomalies (difference of forecast from average - # forecast) from the pairList forecasts - # - def getRegression(self,avgFcst,avgErr,obsGrids,fcstGrids): - # - # The independent variable is forecast anomaly (forecast-avgFcst) - # - ind=subtract(fcstGrids,avgFcst) - # - # The dependent variable is error anomaly (error-avgErr) - # - dep=subtract(fcstGrids-obsGrids,avgErr) - # - # Get covariance, and square of dependent and independent variables - # across the cases - # - covsum=add.reduce(dep*ind) - depsqr=add.reduce(dep*dep) - indsqr=add.reduce(ind*ind) - # - # Calculate standard deviation of indpendent variables - # - num=obsGrids.shape[0] - stdind=(indsqr/num)**0.5 # standard deviation of independent variable - stddep=(depsqr/num)**0.5 # standard deviation of dependent variable - stdind[less(stdind,0.001)] = 0.001 - stddep[less(stddep,0.001)] = 0.001 - covariance=covsum/num # covariance - correlation=covariance/(stdind*stddep) # correlation coefficient - return(correlation,stdind,stddep) - #================================================================== - # - # badGrid - very crude way to flag bad grids from calculations. - # Just looks for grids with virtually no variance. - # (this won't work for precip that typically does have - # no variance on days with no precip) - # returns 1 if bad, 0 if OK. - # - def badGrid(self,grid,minvalue,maxvalue): - gridShape = self.getGridShape() - numpts=gridShape[0]*gridShape[1] - avg=add.reduce(add.reduce(grid))/float(numpts) - sqr=add.reduce(add.reduce(grid*grid))/float(numpts) - var=sqr-(avg*avg) - if var<((maxvalue-minvalue)/1000.0): - return 1 - return 0 - #================================================================== - # - # - def getParmInfo(self,mutableModel,parm): - units="NONE" - precision=-99 - minval=0 - maxval=100 - colorTable="" - displayMinval=0 - displayMaxval=100 - parm=self.getParm(mutableModel,parm,"SFC") - if parm is not None: - parmInfo = parm.getGridInfo() - units=parmInfo.getUnitString() - precision=parmInfo.getPrecision() - minval=parmInfo.getMinValue() - maxval=parmInfo.getMaxValue() - if DSPATTR_PORT: - ctInfo=parmInfo.getDisplayAttributes().colorTable() - if ctInfo is not None: - colorTable=ctInfo.name() - displayMinval=ctInfo.minLimit() - displayMaxval=ctInfo.maxLimit() - return(units,precision,minval,maxval,colorTable, - displayMinval,displayMaxval) - #----------------------------------------------------------------- - # - # Save grid for the specified outdb and parmname - # - def saveGrid(self,outdb,parmname): - p=self.getParm(outdb,parmname,"SFC") - if p is not None: - p.saveParameter(1) - return -# -# debug stuff for memory usage -# -_proc_status="/proc/%d/status"%os.getpid() -_scale={'kB':1024.0,'mB':1024.0*1024.0, - 'KB':1024.0,'MB':1024.0*1024.0} -def _VmB(VmKey): - try: - t=open(_proc_status) - v=t.read() - t.close() - except IOError: - return 0.0 - i=v.index(VmKey) - v=v[i:].split(None,3) - if len(v)<3: - return 0.0 - return float(v[1])*_scale[v[2]] -def memory(): - return _VmB('VmSize:') -def resident(): - return _VmB('VmRSS:') - + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# BOIVerifyBiasCorr2 - Version 2.0.5 +# +# Run the BOIVerifyBiasCorr tool +# +# Author: Tim Barker - SOO BOI +# 2005/07/01 - version 0.0 - original implementation +# 2005/07/29 - version 0.1 - update to grid database structure +# 2006/11/06 - version 1.0 - Make only one procedure! And have parms +# specified via the file name FILE (constant below). +# 2007/10/25 - version 2.0 - Everything now in procedure - for simpler +# maintenance and less tool/procedure complexity. +# Tones down regression estimate when forecast value is +# well outside recent forecasts - falling back to simple +# average error over past VERPERIOD days. Obs database +# must now be specified. Better logging. Better memory +# usage. +# 2008/05/28 - version 2.0.5 - bugfix for basetime/starttime when an +# archived grid is constant. +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 11/21/13 16770 ryu Change name of temporary files +# for dual domain. +#============================================================================= + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# +# Do not show this in any menu. Should only be run via runProcedure after +# putting the force flag and model to correct in /tmp/_FILE +# +#MenuItems = ["Verify"] +# +MenuItems = ["None"] +# +# Constants +# +FILE="BOIVerifyBiasCorr.txt" +HOURSECS=60*60 +DAYSECS=24*HOURSECS +# +# imports +# +from numpy import * +import os, time +import SmartScript,BOIVerifyUtility +import AbsTime +import Exceptions +# +# +DSPATTR_PORT = 0 # TODO - display attributes not working the same way +# +# +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss=dbss + + def execute(self, editArea): + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime() + self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) + self.VU.logMsg("BOIVerifyBiasCorr Procedure Start") + + # + # + # BIASCORR_DAYS configuration should hold number of days usually used + # in the bias correction. If it doesn't...we default to 30 + # + VERPERIOD=self.VU.getCFG('BIASCORR_DAYS') + if VERPERIOD is None: + VERPERIOD=30 + # + # BIASCORR_MINDAYS configuration should hold minimum number of days + # needed to do a regression. If it doesn't...we default to 14 + # + MINDAYS=self.VU.getCFG('BIASCORR_MINDAYS') + if MINDAYS is None: + MINDAYS=14 + # + # BIASCORR_EXTRAPOLATE_PERCENTAGE configuration should hold the + # percentage of range of recent forecasts that we will extrapolate + # with regression. Outside of this...drop back toward using + # only average bias + # + FUZZ=self.VU.getCFG('BIASCORR_EXTRAPOLATE_PERCENTAGE') + if FUZZ is None: + FUZZ=10 + fuzz=FUZZ/100 + # + # Thes are the Parms we currently know how to bias correct + # + parmList=["T","MaxT","MinT","RH","MaxRH","MinRH"] + self.transformParms=["RH","MaxRH","MinRH"] + # + # Read the force flag, model, and obsmodel from the FILE file + # + force="0" + model="" + obsmodel="" + filename="/tmp/%s_%s"% (self.getSiteID(), FILE) + if ((os.path.exists(filename))and(os.path.isfile(filename))): + try: + infile=file(filename,"r") + inlines=infile.readlines() + infile.close() + os.remove(filename) + if len(inlines)>0: + line=inlines[0] + force=line.strip() + if len(inlines)>1: + line=inlines[1] + model=line.strip() + if len(inlines)>2: + line=inlines[2] + obsmodel=line.strip() + except: + self.VU.logMsg("Difficulty reading %s"%filename) + ## + ## testing + ## + ##model="NAM12" + ##obsmodel="Obs" + ##force="1" + ##self.VU.setDebug(10) + # + # If model is blank - give error and stop + # + if (model==""): + self.VU.logMsg("No model to correct - cannot continue") + return + if (obsmodel==""): + self.VU.logMsg("No obs model specified - cannot continue") + return + # + # if obsmodel is not a valid obsmodel - stop + # + if obsmodel not in self.VU.getCFG('OBSMODELS'): + self.VU.logMsg("%s is not a valid name for observations"%obsmodel) + return + # + # Get corresponding (non-bc) model run with same basetime + # + mutableModel=self.mutableID().modelName() + modeltime=self.mutableID().modelTime().unixTime() + mtime=time.strftime("%Y%m%d_%H%M",time.gmtime(modeltime)) + foundit=0 + for run in range(0,-10,-1): + dbid=self.findDatabase(model,version=run) + if dbid is not None: + inputtime=dbid.modelTime().unixTime() + if inputtime==modeltime: + foundit=1 + break + if foundit==0: + self.VU.logMsg("%s run from %s could not be found"%(model,mtime)) + self.VU.logMsg("Aborting!") + return + (myr,mmo,mdy,mhr,mmi,mse,mwd,myd,mds)=time.gmtime(modeltime) + yesterday=modeltime-DAYSECS + modelCycle=mhr + bigtr=self.createTimeRange(0,500,"Database",dbid) + self.VU.logMsg("for %s at %2.2dZ"%(model,modelCycle)) + # + # Loop over parms + # + for parm in parmList: + self.VU.logMsg("Working on %s"%parm) + (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, + self.parmColorTable,self.parmDisplayMinval, + self.parmDisplayMaxval)=self.getParmInfo(model,parm) + obsparm=self.VU.getObsParm(parm) + # + # get corresponding pairs of observed/fcst grids for last VERPERIOD days + # + dateStyle="Verifying on" + dateType="Period Length" + self.VU.logMsg(" getting cases",5) + cases=self.VU.getCases(parm,model,obsparm,obsmodel,dateStyle,dateType, + fromDay=yesterday,numDays=VERPERIOD, + cycles=mhr,fhrEnd=240) + caseInfo=cases[model] + del cases + fhrInfo={} + for key in list(caseInfo.keys()): + (basestr,startstr,endstr)=key.split(",") + basetime=int(basestr) + starttime=int(startstr) + fhrInfo[key]=(starttime-basetime)/HOURSECS + self.VU.logMsg(" done with getting cases",5) + # + # Loop over all forecast grids + # + try: + gridInfoList=self.getGridInfo(dbid,parm,"SFC",bigtr) + except Exception as e: + self.VU.logMsg(str(e)) + continue + + for gridInfo in gridInfoList: + starttime=gridInfo.gridTime().startTime().unixTime() + endtime=gridInfo.gridTime().endTime().unixTime() + forecastHour=int((starttime-modeltime)/3600.0) + endHour=int((endtime-modeltime)/3600.0) + self.VU.logMsg(" %d-hr forecast"%forecastHour) + # + # Check to see if guess grid is newer than output grid + # (with 10 minute overlap...) + # + origtr=self.createTimeRange(forecastHour,endHour,"Database",dbid) + guesstime=self.getLastUpdate(dbid,parm,"SFC",origtr) + outtime=self.getLastUpdate(mutableModel,parm,"SFC",origtr) + if ((guesstime<=(outtime-(10*60)))and(force=="0")): + self.VU.logMsg(" no new info - skipping") + continue + # + # Read the guess grid + # + inputGrid=self.getGrids(dbid,parm,"SFC",origtr,noDataError=0,cache=0) + if inputGrid is None: + self.VU.logMsg("Could not read input grid") + continue + if parm in self.transformParms: + inputGrid=self.Ztrans(inputGrid) + # + # Get old forecasts for this forecast hour + # + t1=time.time() + result=self.getOldForecasts(caseInfo,fhrInfo,parm,model,obsparm, + obsmodel,forecastHour,self.parmMinval, + self.parmMaxval) + t2=time.time() + self.VU.logMsg(" getting old forecasts took %6.2f"%(t2-t1),5) + (obsList,fcstList)=result + numGrids=len(obsList) + del result + # + # Do not calculate any grids if no previous observations/forecasts + # + if numGrids<1: + continue + # + # + if numGrids=MINDAYS: + self.VU.logMsg(" %d old obs/fcst grids used for regression"%numGrids,2) + # + # + # Linear regression of forecast anomalies to non-average + # forecast errors. + # + t1=time.time() + result=self.getRegression(avgFcst,avgErr,obsGrids,fcstGrids) + t2=time.time() + self.VU.logMsg(" getting Regression took %6.3f"%(t2-t1),5) + (correlation,stdind,stddep)=result + del result + del obsGrids + del fcstGrids + # + # Create the regressed part of error grid from the linear regression + # + regErr=(((correlation*stddep)/stdind)*(inputGrid-avgFcst)) + del avgFcst + del stddep + del stdind + del correlation + else: + self.VU.logMsg(" %d old obs/fcst grids - used plain bias"%numGrids,2) + regErr=self.empty() + # + # Make a multiplier for the regressed error. Normally 1.0...but + # when the forecast is more than fuzz% beyond the range of forecasts + # in the training period...start cutting back correction amount + # until getting back to zero regressed amount at (2*fuzz)% beyond. + # + multiplier=self.newGrid(1) + fuzzGrid=maximum((maxFcst-minFcst)*fuzz,0.01) # dont have zero + max1=maxFcst+fuzzGrid + multiplier=where(greater(inputGrid,max1),1.0-((inputGrid-max1)/fuzzGrid),multiplier) + min1=minFcst-fuzzGrid + multiplier=where(less(inputGrid,min1),1.0-((min1-inputGrid)/fuzzGrid),multiplier) + multiplier[less(multiplier,0.0)] = 0.0 + del min1 + del max1 + del minFcst + del maxFcst + del fuzzGrid + if self.VU.getDebug>=1: + count=count_nonzero(less(multiplier,0.98)) + count2=count_nonzero(less(multiplier,0.02)) + if count>0: + self.VU.logMsg(" %d fcst points are outliers and regression was reduced"%count,1) + if count2>0: + self.VU.logMsg(" %d points so far out that only average error used"%count2,1) + # + # Correct the forecast with the predicted error + # + corrected=inputGrid-(avgErr+(regErr*multiplier)) + del regErr + del avgErr + del inputGrid + del multiplier + # + # For Transformed Parms (like RH) transform it back to the real variable + # + if parm in self.transformParms: + corrected=self.Rtrans(corrected) + self.VU.logMsg(" done with making corrected grid",5) + # + # clip to legal values and save + # + corrected=clip(corrected,self.parmMinval,self.parmMaxval) + self.createGrid(mutableModel,parm,"SCALAR",corrected,origtr, + parm,None,self.parmPrecision,self.parmMinval, + self.parmMaxval,self.parmUnits) + del corrected + self.VU.logMsg(" mem:%d res:%d"%(memory(),resident()),10) + self.saveGrid(mutableModel,parm) + # + # Fix issues that can arise via doing each parm independently + # + # + # Make sure MaxT is as high as the max of all T grids + # + self.VU.logMsg("Checking for hourly T grids higher than MaxT") + gridInfoList=self.getGridInfo(mutableModel,"MaxT","SFC",bigtr) + for gridInfo in gridInfoList: + newtr=gridInfo.gridTime() + maxtgrid=self.getGrids(mutableModel,"MaxT","SFC",newtr,noDataError=0,cache=0) + if maxtgrid is not None: + maxoftgrid=self.getGrids(mutableModel,"T","SFC",newtr,mode="Max",noDataError=0,cache=0) + if maxoftgrid is not None: + maxt=maximum(maxoftgrid,maxtgrid) + changed=count_nonzero(greater(maxt,maxtgrid)) + if changed>0: + fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) + self.VU.logMsg("Had to update MaxT at %d-hrs to match hourly T grids at %d points"%(fhr,changed)) + self.createGrid(mutableModel,"MaxT","SCALAR",maxt,newtr) + self.saveGrid(mutableModel,"MaxT") + # + # Make sure Mint is as low as the min of all T grids + # + self.VU.logMsg("Checking for hourly T grids lower than MinT") + gridInfoList=self.getGridInfo(mutableModel,"MinT","SFC",bigtr) + for gridInfo in gridInfoList: + newtr=gridInfo.gridTime() + mintgrid=self.getGrids(mutableModel,"MinT","SFC",newtr,noDataError=0,cache=0) + if mintgrid is not None: + minoftgrid=self.getGrids(mutableModel,"T","SFC",newtr,mode="Min",noDataError=0,cache=0) + if minoftgrid is not None: + mint=minimum(minoftgrid,mintgrid) + changed=count_nonzero(less(mint,mintgrid)) + if changed>0: + fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) + self.VU.logMsg("Had to update MinT at %d-hrs to match hourly T grids at %d points"%(fhr,changed)) + self.createGrid(mutableModel,"MinT","SCALAR",mint,newtr) + self.saveGrid(mutableModel,"MinT") + # + # Make sure MaxRH is as high as the max of all RH grids + # + self.VU.logMsg("Checking for hourly RH grids higher than MaxRH") + gridInfoList=self.getGridInfo(mutableModel,"MaxRH","SFC",bigtr) + for gridInfo in gridInfoList: + newtr=gridInfo.gridTime() + maxrhgrid=self.getGrids(mutableModel,"MaxRH","SFC",newtr,noDataError=0,cache=0) + if maxrhgrid is not None: + maxofrhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,mode="Max",noDataError=0,cache=0) + if maxofrhgrid is not None: + maxrh=maximum(maxofrhgrid,maxrhgrid) + changed=count_nonzero(greater(maxrh,maxrhgrid)) + if changed>0: + fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) + self.VU.logMsg("Had to update MaxRH at %d-hrs to match hourly RH grids at %d points"%(fhr,changed)) + self.createGrid(mutableModel,"MaxRH","SCALAR",maxrh,newtr) + self.saveGrid(mutableModel,"MaxRH") + # + # Make sure MinRH is as low as the min of all RH grids + # + self.VU.logMsg("Checking for hourly RH grids lower than MinRH") + gridInfoList=self.getGridInfo(mutableModel,"MinRH","SFC",bigtr) + for gridInfo in gridInfoList: + newtr=gridInfo.gridTime() + minrhgrid=self.getGrids(mutableModel,"MinRH","SFC",newtr,noDataError=0,cache=0) + if minrhgrid is not None: + minofrhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,mode="Min",noDataError=0,cache=0) + if minofrhgrid is not None: + minrh=minimum(minofrhgrid,minrhgrid) + changed=count_nonzero(less(minrh,minrhgrid)) + if changed>0: + fhr=int((newtr.startTime().unixTime()-modeltime)/3600.0) + self.VU.logMsg("Had to update MinRH at %d-hrs to match hourly RH grids at %d points"%(fhr,changed)) + self.createGrid(mutableModel,"MinRH","SCALAR",minrh,newtr) + self.saveGrid(mutableModel,"MinRH") + # + # Create Td from T and RH + # + self.VU.logMsg("Making Td from T and RH") + (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, + self.parmColorTable,self.parmDisplayMinval, + self.parmDisplayMaxval)=self.getParmInfo(mutableModel,"Td") + Funits=0 + if self.parmUnits.find("F")>-1: + Funits=1 + gridInfoList=self.getGridInfo(mutableModel,"T","SFC",bigtr) + for gridInfo in gridInfoList: + newtr=gridInfo.gridTime() + tgrid=self.getGrids(mutableModel,"T","SFC",newtr,noDataError=0,cache=0) + if tgrid is not None: + rhgrid=self.getGrids(mutableModel,"RH","SFC",newtr,noDataError=0,cache=0) + if rhgrid is not None: + if Funits==1: + tc=(tgrid-32.0)*(5.0/9.0) + else: + tc=tgrid + rh=clip(rhgrid,0.5,99.5)/100.0 + x=(log(rh)/17.67)+(tc/(tc+243.5)) + tdc=(243.5*x)/(1-x) + if Funits==1: + Td=(tdc*9.0/5.0)+32.0 + else: + Td=tdc + Td=clip(Td,self.parmMinval,self.parmMaxval) + self.createGrid(mutableModel,"Td","SCALAR",Td,newtr) + self.saveGrid(mutableModel,"Td") + # + # Calculate TdMrn/TdAft from MinT/MaxRH and MaxT/MinRH values + # Only if the TdMrn parameter exists for the mutable database + # + starttime=AbsTime.AbsTime(int(modeltime)) + parmInfoMrn = self.getParmInfo(mutableModel,"TdMrn") + #if ((self.parmUnits!="NONE")or(self.parmPrecision!=-99)): + if parmInfoMrn != None: + self.VU.logMsg("Making TdMrn/TdAft grids") + (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, + self.parmColorTable,self.parmDisplayMinval, + self.parmDisplayMaxval) = parmInfoMrn + (runtr,gridTimes)=self.getGridTimes(mutableModel,"TdMrn","SFC",starttime,240) + for newtr in gridTimes: + maxRH=self.getGrids(mutableModel,"MaxRH","SFC",newtr,noDataError=0,cache=0) + if maxRH is not None: + minT=self.getGrids(mutableModel,"MinT","SFC",newtr,noDataError=0,cache=0) + if minT is not None: + if Funits==1: + tc=(minT-32.0)*(5.0/9.0) + else: + tc=MinT + rh=clip(maxRH,0.5,99.5)/100.0 + x=(log(rh)/17.67)+(tc/(tc+243.5)) + tdc=(243.5*x)/(1-x) + if Funits==1: + Td=(tdc*9.0/5.0)+32.0 + else: + Td=tdc + Td=clip(Td,self.parmMinval,self.parmMaxval) + self.createGrid(mutableModel,"TdMrn","SCALAR",Td,newtr) + self.saveGrid(mutableModel,"TdMrn") + (self.parmUnits,self.parmPrecision,self.parmMinval,self.parmMaxval, + self.parmColorTable,self.parmDisplayMinval, + self.parmDisplayMaxval)=self.getParmInfo(mutableModel,"TdAft") + (runtr,gridTimes)=self.getGridTimes(mutableModel,"TdAft","SFC",starttime,240) + for newtr in gridTimes: + minRH=self.getGrids(mutableModel,"MinRH","SFC",newtr,noDataError=0,cache=0) + if minRH is not None: + maxT=self.getGrids(mutableModel,"MaxT","SFC",newtr,noDataError=0,cache=0) + if maxT is not None: + if Funits==1: + tc=(maxT-32.0)*(5.0/9.0) + else: + tc=maxT + rh=clip(minRH,0.5,99.5)/100.0 + x=(log(rh)/17.67)+(tc/(tc+243.5)) + tdc=(243.5*x)/(1-x) + if Funits==1: + Td=(tdc*9.0/5.0)+32.0 + else: + Td=tdc + Td=clip(Td,self.parmMinval,self.parmMaxval) + self.createGrid(mutableModel,"TdAft","SCALAR",Td,newtr) + self.saveGrid(mutableModel,"TdAft") + self.VU.logMsg("BOIVerifyBiasCorr Procedure Finished") + return + #================================================================= + # getLastUpdate(self,model,parmname,level,tr) + # + # Gets the last update time for the grid for the model, parmname, + # level and timerange. Should only call this for a timerange where + # only one grid is located. If multiple grids are found - gives the + # last update for the FIRST one in the list. If no grids are found, + # it returns 0. + # + def getLastUpdate(self,model,parmname,level,tr): + ret=-1 + try: + historyList = self.getGridHistory(model, parmname, level, tr) + except: + return ret + if not historyList: + return ret + for history in historyList[0]: + if history[5] is not None: + upd = history[5].unixTime() + else: + upd = None + if upd>ret: + ret=upd + + return ret + #================================================================= + # + # RH transforms + # + def Ztrans(self,ingrid): + c=clip(ingrid,0.1,99.9) + return 0.5*log(c/(100-c)) + def Rtrans(self,ingrid): + v=exp(ingrid*2.0) + return (100*v)/(1.0+v) + #================================================================= + # getOldForecasts - Get list of matching forecast grids and + # observed grids for the specified forecast + # hour. + # + def getOldForecasts(self,caseInfo,fhrInfo,parm,model,obsparm,obsmodel,forecastHour, + minval,maxval): + obsList=[] + fcstList=[] + # + # + # + for key in list(caseInfo.keys()): + (freclist,oreclist)=caseInfo[key] + # + # Have to have both forecast and observed grids + # + if ((len(freclist)<1)or(len(oreclist)<1)): + continue + # + # Use only ones that match the current forecastHour + # + fhr=fhrInfo[key] + if fhr!=forecastHour: + continue + # + # Get average of non-bad forecast grids + # + numgrids=0 + fcstgrid=self.empty() + for frec in freclist: + testgrid=self.VU.readRecord(parm,model,frec) + if self.badGrid(testgrid,minval,maxval): + basetime=self.VU.fncBtime[frec] + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(basetime) + self.VU.logMsg(" %d-hr forecast from %4.4d/%2.2d/%2.2d %2.2dZ run appears bad - so skipped"%(fhr, + byea,bmon,bday,bhou),2) + continue + fcstgrid+=testgrid + numgrids+=1 + if numgrids==0: + continue + fcstgrid/=float(numgrids) + # + # Get average of non-bad observed grids + # + numgrids=0 + obsgrid=self.empty() + for orec in oreclist: + testgrid=self.VU.readRecord(obsparm,obsmodel,orec) + if self.badGrid(testgrid,minval,maxval): + starttime=self.VU.oncStime[orec] + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(starttime) + self.VU.logMsg(" Observed grid at %4.4d/%2.2d/%2.2d %2.2dZ appears bad - so skipped"%(syea, + smon,sday,shou),2) + continue + obsgrid+=testgrid + numgrids+=1 + if numgrids==0: + continue + obsgrid/=float(numgrids) + # + # Transform the ob/fcst if needed + # + if parm in self.transformParms: + obsgrid=self.Ztrans(obsgrid) + fcstgrid=self.Ztrans(fcstgrid) + # + # Add grid to lists of grids + # + obsList.append(obsgrid) + fcstList.append(fcstgrid) + # + # Return lists of forecast/observed grids + # + return(obsList,fcstList) + #================================================================= + # getRegression + # + # get linear regression predictors of non-average forecast errors + # from forecast anomalies (difference of forecast from average + # forecast) from the pairList forecasts + # + def getRegression(self,avgFcst,avgErr,obsGrids,fcstGrids): + # + # The independent variable is forecast anomaly (forecast-avgFcst) + # + ind=subtract(fcstGrids,avgFcst) + # + # The dependent variable is error anomaly (error-avgErr) + # + dep=subtract(fcstGrids-obsGrids,avgErr) + # + # Get covariance, and square of dependent and independent variables + # across the cases + # + covsum=add.reduce(dep*ind) + depsqr=add.reduce(dep*dep) + indsqr=add.reduce(ind*ind) + # + # Calculate standard deviation of indpendent variables + # + num=obsGrids.shape[0] + stdind=(indsqr/num)**0.5 # standard deviation of independent variable + stddep=(depsqr/num)**0.5 # standard deviation of dependent variable + stdind[less(stdind,0.001)] = 0.001 + stddep[less(stddep,0.001)] = 0.001 + covariance=covsum/num # covariance + correlation=covariance/(stdind*stddep) # correlation coefficient + return(correlation,stdind,stddep) + #================================================================== + # + # badGrid - very crude way to flag bad grids from calculations. + # Just looks for grids with virtually no variance. + # (this won't work for precip that typically does have + # no variance on days with no precip) + # returns 1 if bad, 0 if OK. + # + def badGrid(self,grid,minvalue,maxvalue): + gridShape = self.getGridShape() + numpts=gridShape[0]*gridShape[1] + avg=add.reduce(add.reduce(grid))/float(numpts) + sqr=add.reduce(add.reduce(grid*grid))/float(numpts) + var=sqr-(avg*avg) + if var<((maxvalue-minvalue)/1000.0): + return 1 + return 0 + #================================================================== + # + # + def getParmInfo(self,mutableModel,parm): + units="NONE" + precision=-99 + minval=0 + maxval=100 + colorTable="" + displayMinval=0 + displayMaxval=100 + parm=self.getParm(mutableModel,parm,"SFC") + if parm is not None: + parmInfo = parm.getGridInfo() + units=parmInfo.getUnitString() + precision=parmInfo.getPrecision() + minval=parmInfo.getMinValue() + maxval=parmInfo.getMaxValue() + if DSPATTR_PORT: + ctInfo=parmInfo.getDisplayAttributes().colorTable() + if ctInfo is not None: + colorTable=ctInfo.name() + displayMinval=ctInfo.minLimit() + displayMaxval=ctInfo.maxLimit() + return(units,precision,minval,maxval,colorTable, + displayMinval,displayMaxval) + #----------------------------------------------------------------- + # + # Save grid for the specified outdb and parmname + # + def saveGrid(self,outdb,parmname): + p=self.getParm(outdb,parmname,"SFC") + if p is not None: + p.saveParameter(1) + return +# +# debug stuff for memory usage +# +_proc_status="/proc/%d/status"%os.getpid() +_scale={'kB':1024.0,'mB':1024.0*1024.0, + 'KB':1024.0,'MB':1024.0*1024.0} +def _VmB(VmKey): + try: + t=open(_proc_status) + v=t.read() + t.close() + except IOError: + return 0.0 + i=v.index(VmKey) + v=v[i:].split(None,3) + if len(v)<3: + return 0.0 + return float(v[1])*_scale[v[2]] +def memory(): + return _VmB('VmSize:') +def resident(): + return _VmB('VmRSS:') + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyInfo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyInfo.py index 2794bcc815..59d76686fe 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyInfo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/BOIVerifyInfo.py @@ -1,2273 +1,2273 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# BOIVerifyInfo - version 2.0.5 -# -# Allows changes to forecaster numbers - and changes any current records -# to match the new numbers. Allows deletion of grids in the archived -# database (i.e. to remove bad grids) -# -# Author: Tim Barker - SOO BOI -# 2007/11/06 - version 2.0 - Original Documented Implementation -# 2008/05/28 - version 2.0.5 - added ability to show and delete grids -# from obs models -# -# -# 2010/04/23 ryu Initial port to AWIPS II. -# -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Verify"] - -from numpy import * -from math import * -import Tkinter,tkFont,tkMessageBox -import TkDefaults -import time -import SmartScript -import BOIVerifyUtility - -PROGNAME="BOIVerifyInfo" # you can change it if you dont like BOI. Shame on you! - -MONS=["DUM","Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"] -DAYS=["Mon","Tue","Wed","Thu","Fri","Sat","Sun"] -COLORLIST=["Cyan","Orange","PaleGreen","Red","Blue","Magenta","ForestGreen","Sienna", - "Pink","Green","purple","Yellow","Tan","Turquoise","SteelBlue", - "chartreuse","Gold","tomato","Violet","aquamarine","Coral"] -#===================================================================== -# The dummy procedure - which does nothing more than start the -# non-modal BOIVerifyInfo dialog box. -# -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss=dbss - SmartScript.SmartScript.__init__(self, dbss) - return - #------------------------------------------------------------------ - # execute - simply starts the non-modal "Info" dialog box - # - def execute(self): - self.statusBarMsg("Starting %s"%PROGNAME,"R") - - tk = Tkinter.Tk() - sw = tk.winfo_screenwidth() - sh = tk.winfo_screenheight() - tk.geometry("%dx%d+0+0" % (sw,sh)) - self.root = tk - - TkDefaults.setDefaults(tk) - - self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) - self.dialog=VerifInfo(self.VU, parent=tk, - name="BOIVerify Grid Archive Info", - callbackMethod=self.doneInfo, - modal=0, - ) - tk.withdraw() - tk.mainloop() - - self.statusBarMsg("Finished starting %s"%PROGNAME,"R") - return - #------------------------------------------------------------------ - # doneInfo - called when the Info dialog is closed (with the button - # name as an argument - # - def doneInfo(self,buttonType): - self.root.destroy() - return -#===================================================================== -# -# Basic Class for a dialog - similar to IFPDialog.Dialog -# -class Dialog(Tkinter.Toplevel): - def __init__(self,parent=None,title=None,modal=0): - if parent is None: - return - Tkinter.Toplevel.__init__(self,parent) - #self.transient(parent) - if title: - self.title(title) - self.parent=parent - self.result=None - self.buttonbox() - bodyFrame=Tkinter.Frame(self) - self.initial_focus=self.body(bodyFrame) - bodyFrame.pack(padx=5,pady=5,fill=Tkinter.BOTH,expand=1) - bodyFrame.pack_propagate(1) - # - if not self.initial_focus: - self.initial_focus=self - self.protocol("WM_DELETE_WINDOW", self.cancel) - self.geometry("+%d+%d"%(parent.winfo_rootx()+50, - parent.winfo_rooty()+50)) - self.initial_focus.focus_set() - - self.wait_visibility() - self.update_idletasks() - geom=self.geometry() - (wh,rest)=geom.split("+",1) - (wid,hgt)=wh.split("x",1) - self.minsize(wid,hgt) - - if modal==1: - self.grab_set() - self.wait_window(self) - return self.result - else: - return self - #------------------------------------------------------------------ - # body - normally overridden with the stuff you want to display - # in the dialog box - # - def body(self,master): - pass - #------------------------------------------------------------------ - # buttonbox - displays the buttonbox at the bottom of the dialog. - # Normally has OK and Cancel buttons - but can be - # overridden to have any buttons desired - # - def buttonbox(self): - box=Tkinter.Frame(self) - w=Tkinter.Button(box,text="Ok",width=10,command=self.ok, - default=Tkinter.ACTIVE) - w.pack(side=Tkinter.LEFT,padx=5,pady=5) - w=Tkinter.Button(box,text="Cancel",width=10,command=self.cancel) - w.pack(side=Tkinter.LEFT,padx=5,pady=5) - box.pack(side=Tkinter.BOTTOM) - #------------------------------------------------------------------ - # ok - called when the OK button is pressed. Calls validate to - # see if the input is OK. If the input is OK it removes - # the dialog and does the action specified in apply - # If the input has some problem - it returns to the dialog - # - def ok(self,event=None): - if not self.validate(): - self.initial_focus.focus_set() - return - self.withdraw() - self.update_idletasks() - self.apply() - self.cancel() - #------------------------------------------------------------------ - # cancel - called when the Cancel button is pressed - and when - # everything else is done. Destroys the dialog - # - def cancel(self,event=None): - self.parent.focus_set() - self.destroy() - #------------------------------------------------------------------ - # validate - normally overridden with stuff that checks the input - # on the dialog box. Should return 1 if input is OK, - # and 0 if not. - # - def validate(self): - return 1 - #------------------------------------------------------------------ - # apply - normally overridden with stuff that needs to be done - # when the dialog input has been validated and it is - # OK to proceed. - # - def apply(self): - pass -#===================================================================== -# ChangeCancelDialog - a Dialog to change the forecaster number, -# forecaster ID, or forecater name -# -class ChangeCancelDialog(Dialog): - def __init__(self, VU, numberVar, idVar, nameVar, - parent=None, name="Edit Forecaster", callbackMethod=None, - modal=1): - - self.__parent = parent - self.__name = name - self.__modal = modal - self.__callbackMethod = callbackMethod - self.__VU=VU - self.__numberVar=numberVar - self.__idVar=idVar - self.__nameVar=nameVar - self.__oldNum=self.__numberVar.get() - self.__oldID=self.__idVar.get() - self.__oldName=self.__nameVar.get() - self.__dialog=Dialog.__init__(self,parent=self.__parent, - title=self.__name, - modal=self.__modal) - return - #----------------------------------------------------------------- - # buttonbox - special buttonbox with Change and Cancel buttons - # - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - Tkinter.Button(buttonFrame, text="Change", width=7, - command=self.changeCB).pack(side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Cancel",width=7, - command=self.cancelCB).pack(side=Tkinter.LEFT,pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - #------------------------------------------------------------------ - # body - special body with the current number, username, and - # display name shown - # - def body(self, master): - Tkinter.Label(master,text="Number:").grid(column=0,row=0,sticky=Tkinter.E) - Tkinter.Label(master,text="Username:").grid(column=0,row=1,sticky=Tkinter.E) - Tkinter.Label(master,text="Display Name:").grid(column=0,row=2,sticky=Tkinter.E) - self.numEntry=Tkinter.Entry(master,textvariable=self.__numberVar,width=2) - self.numEntry.grid(column=1,row=0,sticky=Tkinter.W) - self.idEntry=Tkinter.Entry(master,textvariable=self.__idVar,width=8) - self.idEntry.grid(column=1,row=1,sticky=Tkinter.W) - self.nameEntry=Tkinter.Entry(master,textvariable=self.__nameVar,width=25) - self.nameEntry.grid(column=1,row=2,sticky=Tkinter.W) - #------------------------------------------------------------------ - # changeCB - called when they click on Change. Need to validate - # everything to make sure the changes are OK. - # - def changeCB(self): - # - # Check forecaster number for just a number, between 1 and 99 - # - newNumStr=self.__numberVar.get().strip() - try: - num=int(newNumStr) - except: - tkMessageBox.showerror("Error","Forecaster Number needs to be an integer",parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - if ((num<0)or(num>99)): - tkMessageBox.showerror("Error","Forecater Number needs to be between 1 and 99", - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - if (num==0): - tkMessageBox.showerror("Error","Forecaster Number 0 cannot be used", - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - # - # If changing numbers - make sure the new number is not already - # in use. - # - if num!=int(self.__oldNum): - curNums=self.__VU.getFcstrNums() - for testNum in curNums: - if int(testNum)==num: - tkMessageBox.showerror("Errror","Forecaster Number %d is already in use"%num, - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - # - # Must provide a username - # - newID=self.__idVar.get().strip() - if len(newID)==0: - tkMessageBox.showerror("Error","You must provide a username", - parent=self) - self.idEntry.selection_range(0,Tkinter.END) - self.idEntry.focus_set() - return - # - # username cannot be the same as any other current username - # - if newID!=self.__oldID: - curIDs=self.__VU.getFcstrIDs() - for testID in curIDs.values(): - if testID==newID: - tkMessageBox.showerror("Error","Username %s is already in use"%newID, - parent=self) - self.idEntry.selection_range(0,Tkinter.END) - self.idEntry.focus_set() - return - # - # Must provide a display name - # - newName=self.__nameVar.get().strip() - if len(newName)==0: - tkMessageBox.showerror("Error","You must provide a display name", - parent=self) - self.nameEntry.selection_range(0,Tkinter.END) - self.nameEntry.focus_set() - return - # - # If everything the same as when we started - treat this - # the same as a cancel - # - if ((num==int(self.__oldNum))and(newID==self.__oldID)and(newName==self.__oldName)): - self.cancel() - return - # - # If number changes - need to do a lot more stuff - so ask them - # if they are sure - and do it if they say yes - # - if (num!=int(self.__oldNum)): - oldnum=int(self.__oldNum) - text= "It will take a while to change forecasts " - text+="attributed to old forecaster number %d "%oldnum - text+="to new forecaster number %d.\n"%num - text+="\n" - text+="Are you sure you want to proceed?" - ynDiag=tkMessageBox.askyesno("Are you sure?",text, - parent=self,default=tkMessageBox.NO) - if not ynDiag: - self.__callbackMethod("Change") - self.cancel() - return - # - # OK - all input is valid and user wants to proceed - # so remove the 'change' dialog and actually make - # the changes - # - self.withdraw() - self.update_idletasks() - # - # make number changes (could take some time) - # - if (num!=int(self.__oldNum)): - self.changeNumbers() - # - # Change IDs and names - should be fast - # - Names=self.__VU.getFcstrNames() - IDs=self.__VU.getFcstrIDs() - numStr="%2.2d"%num - Names[numStr]=newName - IDs[numStr]=newID - self.__VU.setFcstrs(Names,IDs) - self.__VU.saveFcstrNums() - # - if self.__callbackMethod is not None: - self.__callbackMethod("Change") - self.cancel() - #------------------------------------------------------------------ - # cancelCB - called when they click on Cancel when making - # changes - # - def cancelCB(self): - if self.__callbackMethod is not None: - self.__callbackMethod("Cancel") - self.cancel() - #------------------------------------------------------------------ - # changeNumbers - gets called if they REALLY want to change - # numbers for a forecaster. Goes through all - # Official datafiles and changes all grids - # associated with old number to new number - # - def changeNumbers(self): - newNumStr=self.__numberVar.get().strip() - num=int(newNumStr) - oldnum=int(self.__oldNum) - for parm in self.__VU.getVerParms(): - datatype=self.__VU.getVerParmType(parm) - if not self.__VU.checkFile(parm,"Official",modify=1,datatype=datatype): - continue - fnc=self.__VU.fncFcstr[:,:] - count=add.reduce(add.reduce(equal(fnc,oldnum))) - nnc=where(equal(fnc,oldnum),num,fnc) - self.__VU.fncFcstr[:,:]=nnc[:,:].astype(int8) - self.__VU.closeFcstFile() - newID=self.__idVar.get().strip() - newName=self.__nameVar.get().strip() - Names=self.__VU.getFcstrNames() - IDs=self.__VU.getFcstrIDs() - del Names[self.__oldNum] - del IDs[self.__oldNum] - newNumStr="%2.2d"%num - Names[newNumStr]=newName - IDs[newNumStr]=newID - self.__VU.setFcstrs(Names,IDs) - self.__VU.saveFcstrNums() - return -#===================================================================== -# AddCancelDialog - for when they want to add a forecaster -# -class AddCancelDialog(Dialog): - def __init__(self, VU, numberVar, idVar, nameVar, - parent=None, name="Add Forecaster", callbackMethod=None, - modal=1): - - self.__parent = parent - self.__name = name - self.__modal = modal - self.__callbackMethod = callbackMethod - self.__VU=VU - self.__numberVar=numberVar - self.__idVar=idVar - self.__nameVar=nameVar - - self.__dialog=Dialog.__init__(self,parent=self.__parent, - title=self.__name, - modal=self.__modal) - return - #------------------------------------------------------------------ - # buttonbox - special buttonbox with Add and Cancel buttons - # - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - Tkinter.Button(buttonFrame, text="Add", width=7, - command=self.addCB).pack(side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Cancel",width=7, - command=self.cancelCB).pack(side=Tkinter.LEFT,pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - #------------------------------------------------------------------ - # body - special body with the current number, username, and - # display name shown - # - def body(self, master): - Tkinter.Label(master,text="Number:").grid(column=0,row=0,sticky=Tkinter.E) - Tkinter.Label(master,text="Username:").grid(column=0,row=1,sticky=Tkinter.E) - Tkinter.Label(master,text="Display Name:").grid(column=0,row=2,sticky=Tkinter.E) - self.numEntry=Tkinter.Entry(master,textvariable=self.__numberVar,width=2) - self.numEntry.grid(column=1,row=0,sticky=Tkinter.W) - self.idEntry=Tkinter.Entry(master,textvariable=self.__idVar,width=8) - self.idEntry.grid(column=1,row=1,sticky=Tkinter.W) - self.nameEntry=Tkinter.Entry(master,textvariable=self.__nameVar,width=25) - self.nameEntry.grid(column=1,row=2,sticky=Tkinter.W) - #------------------------------------------------------------------ - # addCB - called when they click on Add. Need to validate - # everything to make sure the changes are OK. - # - def addCB(self): - # - # Check forecaster number for just a number, between 1 and 99 - # - newNumStr=self.__numberVar.get().strip() - try: - num=int(newNumStr) - except: - tkMessageBox.showerror("Error","Forecaster Number needs to be an integer number", - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - if ((num<0)or(num>99)): - tkMessageBox.showerror("Error","Forecater Number needs to be between 1 and 99", - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - if (num==0): - tkMessageBox.showerror("Error","Forecaster Number 0 cannot be used", - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - # - # Make sure the new number is not already - # in use. - # - curNums=self.__VU.getFcstrNums() - for testNum in curNums: - if int(testNum)==num: - tkMessageBox.showerror("Error","Forecaster Number %d is already in use"%num, - parent=self) - self.numEntry.selection_range(0,Tkinter.END) - self.numEntry.focus_set() - return - # - # Must provide a username - # - newID=self.__idVar.get().strip() - if len(newID)==0: - tkMessageBox.showerror("Error","You must provide a username", - parent=self) - self.idEntry.selection_range(0,Tkinter.END) - self.idEntry.focus_set() - return - # - # username cannot be the same as any other current username - # - curIDs=self.__VU.getFcstrIDs() - for testID in curIDs.values(): - if testID==newID: - tkMessageBox.showerror("Error","Username %s is already in use"%newID, - parent=self) - self.idEntry.selection_range(0,Tkinter.END) - self.idEntry.focus_set() - return - # - # Must provide a display name - # - newName=self.__nameVar.get().strip() - if len(newName)==0: - tkMessageBox.showerror("Error","You must provide a display name", - parent=self) - self.nameEntry.selection_range(0,Tkinter.END) - self.nameEntry.focus_set() - return - # - # No number changes - but change IDs and names - # - self.withdraw() - self.update_idletasks() - Names=self.__VU.getFcstrNames() - IDs=self.__VU.getFcstrIDs() - numStr="%2.2d"%num - Names[numStr]=newName - IDs[numStr]=newID - self.__VU.setFcstrs(Names,IDs) - self.__VU.saveFcstrNums() - # - if self.__callbackMethod is not None: - self.__callbackMethod("Add") - self.cancel() - #------------------------------------------------------------------ - # cancelCB - called when they click on Cancel when making - # changes - # - def cancelCB(self): - if self.__callbackMethod is not None: - self.__callbackMethod("Cancel") - self.cancel() -#============================================================================== -# doneDialog - a generic dialog class with a single DONE button at the bottom -# -class doneDialog(Dialog): - def __init__(self, parent=None, name="nonModal Dialog", callbackMethod=None, - modal=1): - - self.__parent = parent - self.__name = name - self.__modal = modal - self.__callbackMethod = callbackMethod - self.__dialog=Dialog.__init__(self,parent=self.__parent, - title=self.__name, - modal=self.__modal) - return self.__dialog - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - Tkinter.Button(buttonFrame, text="Done", width=10, - command=self.doneCB).pack(side=Tkinter.RIGHT, pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM,expand=0) - def body(self, master): - bodylabel=Tkinter.Label(master,text="This is the body of doneDialog") - bodylabel.pack(side=Tkinter.BOTTOM) - def doneCB(self): - if self.__callbackMethod is not None: - self.__callbackMethod("Done") - self.cancel() -#======================================================================= -# ListDialog - shows all the forecasts made by the specified -# forecaster number -# -class ListDialog(doneDialog): - def __init__(self, VU, numstr, idstr, namestr, parent=None, - name="List of forecasts", callbackMethod=None, modal=1): - self.__VU=VU - self.__numstr=numstr - self.__idstr=idstr - self.__namestr=namestr - self.__parent=parent - doneDialog.__init__(self,parent=parent,name=name,callbackMethod=callbackMethod, - modal=modal) - def body(self,master): - sb=Tkinter.Scrollbar(master=master) - sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - self.cf=tkFont.Font(family="Courier",size=-12) - txt=Tkinter.Text(master=master,width=60,height=30,yscrollcommand=sb.set, - font=self.cf) - txt.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - sb.config(command=txt.yview) - text="" - maxparmwidth=0 - totalgrids=0 - oldnum=int(self.__numstr) - fcstsMade={} - for parm in self.__VU.getVerParms(): - maxparmwidth=max(maxparmwidth,len(parm)) - datatype=self.__VU.getVerParmType(parm) - if not self.__VU.checkFile(parm,"Official",modify=0,datatype=datatype): - continue - fnc=self.__VU.fncFcstr[:,:] - involved=logical_or.reduce(equal(fnc,oldnum),1) - recsUsed=compress(involved,self.__VU.fncRecs) - for i in xrange(recsUsed.shape[0]): - rec=int(recsUsed[i]) - totalgrids+=1 - basekey="%d"%self.__VU.fncBtime[rec] - if basekey not in fcstsMade.keys(): - fcstsMade[basekey]={} - if parm not in fcstsMade[basekey].keys(): - fcstsMade[basekey][parm]=(1,self.__VU.fncStime[rec],self.__VU.fncEtime[rec]) - else: - (num,start,end)=fcstsMade[basekey][parm] - num+=1 - start=min(self.__VU.fncStime[rec],start) - end=max(self.__VU.fncEtime[rec],end) - fcstsMade[basekey][parm]=(num,start,end) - self.__VU.closeFcstFile() - # - # Display the data - # - cbb=tkFont.Font(family="Courier",size=-14,weight=tkFont.BOLD) - cb=tkFont.Font(family="Courier",size=-12,weight=tkFont.BOLD) - txt.tag_config("title",font=cbb) - txt.tag_config("date",font=cb) - txt.insert(Tkinter.END,"%s Grids:\n\n"%self.__namestr,("title")) - if totalgrids==0: - txt.insert(Tkinter.END," NONE\n\n",("date")) - bases=fcstsMade.keys() - bases.sort() - bases.reverse() - fmtn=" %%%ds: %%3d grids made from %%3d to %%3d hours\n"%(maxparmwidth) - fmt1=" %%%ds: %%3d grid made from %%3d to %%3d hours\n"%(maxparmwidth) - for base in bases: - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(int(base)) - txt.insert(Tkinter.END,"%s %s %d, %4.4d %2.2dZ:\n"%(DAYS[gwda], - MONS[gmon],gday,gyea,ghou),("date")) - made=fcstsMade[base] - parms=made.keys() - parms.sort() - for parm in parms: - (num,start,end)=made[parm] - starthr=int((start-int(base))/3600.) - endhr=int((end-int(base))/3600.) - fmt=fmtn - if num==1: - fmt=fmt1 - txt.insert(Tkinter.END,fmt%(parm,num,starthr,endhr)) - txt.insert(Tkinter.END,"\n") - txt.config(state=Tkinter.DISABLED) - return -#======================================================================= -# InfoDialog - shows info about the specified grid -# -class InfoDialog(doneDialog): - def __init__(self, VU, model, parm, record, parent=None, - name="Grid Info", callbackMethod=None, modal=1): - self.__VU=VU - self.__model=model - self.__parm=parm - self.__record=record - self.__parent=parent - doneDialog.__init__(self,parent=parent,name=name,callbackMethod=callbackMethod, - modal=modal) - def body(self,master): - sb=Tkinter.Scrollbar(master=master) - sb.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - self.cf=tkFont.Font(family="Courier",size=-12) - txt=Tkinter.Text(master=master,width=60,height=20,yscrollcommand=sb.set, - font=self.cf) - txt.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - sb.config(command=txt.yview) - obsmodels=self.__VU.getCFG('OBSMODELS') - text="" - # - if not self.__VU.checkFile(self.__parm,self.__model,modify=0): - text+="\n\nCould not read info for %s %s grid! \n\n"%(self.__model,self.__parm) - else: - if self.__model in obsmodels: - btime=self.__VU.oncBtime[self.__record] - stime=self.__VU.oncStime[self.__record] - etime=self.__VU.oncEtime[self.__record] - vtime=self.__VU.oncVtime[self.__record] - fnums=[] - else: - btime=self.__VU.fncBtime[self.__record] - stime=self.__VU.fncStime[self.__record] - etime=self.__VU.fncEtime[self.__record] - vtime=self.__VU.fncVtime[self.__record] - fnums=self.__VU.fncFcstr[self.__record,:] - - datatype=self.__VU.getVerParmType(self.__parm) - gridData=self.__VU.readRecord(self.__parm,self.__model,self.__record) - if datatype!=1: - minval=minimum.reduce(minimum.reduce(gridData)) - maxval=maximum.reduce(maximum.reduce(gridData)) - sum=add.reduce(add.reduce(gridData)) - sumsqr=add.reduce(add.reduce(gridData*gridData)) - else: - (mag,direc)=gridData - minval=minimum.reduce(minimum.reduce(mag)) - maxval=maximum.reduce(maximum.reduce(mag)) - sum=add.reduce(add.reduce(mag)) - sumsqr=add.reduce(add.reduce(mag*mag)) - gs = self.__VU.getGridShape() - numpts=gs[0]*gs[1] - avg=sum/numpts - std=sqrt((sumsqr/numpts)-(avg*avg)) - self.__VU.closeFcstFile() - - prec=self.__VU.getParmPrecision(self.__model,self.__parm) - if prec>0: - fmt1="%%.%df"%prec - fmt2="%%.%df"%(prec+1) - else: - fmt1="%d" - fmt2="%.1f" - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(btime) - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(stime) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(etime) - (vyea,vmon,vday,vhou,vmin,vsec,vwda,vyda,vdst)=time.gmtime(vtime) - if self.__model in obsmodels: - if (sday==eday): - text+="\n%s %s grid from %2.2d-%2.2dZ %4.4d/%2.2d/%2.2d\n"%(self.__model, - self.__parm,shou,ehou,syea,smon,sday) - else: - if (smon==emon): - text+="\n%s %s grid from %2.2d %2.2dZ through %2.2d %2.2dZ %4.4d/%2.2d\n"%(self.__model, - self.__parm,sday,shou,eday,ehou,syea,smon) - else: - text+="\n%s %s grid from %4.4d/%2.2d/%2.2d %2.2dZ through %4.4d/%2.2d/%2.2d %2.2dZ\n"%(self.__model, - self.__parm,syea,smon,sday,shou,eyea,emon,eday,ehou) - else: - text+="\n%s %s grid from %2.2dZ run %4.4d/%2.2d/%2.2d\n"%(self.__model, - self.__parm,bhou,byea,bmon,bday) - text+="\n" - # - # Show forecast hour and valid time - # - if self.__model not in obsmodels: - fhr=int((stime-btime)/3600.0) - text+=" %d-hr forecast\n"%fhr - if (sday==eday): - text+=" Valid: %2.2d-%2.2dZ %4.4d/%2.2d/%2.2d\n"%(shou,ehou, - syea,smon,sday) - else: - if (smon==emon): - text+=" Valid: %2.2d %2.2dZ through %2.2d %2.2dZ %4.4d/%2.2d\n"%(sday, - shou,eday,ehou,syea,smon) - else: - text+=" Valid: %4.4d/%2.2d/%2.2d %2.2dZ through %4.4d/%2.2d/%2.2d %2.2dZ\n"%(syea, - smon,sday,shou,eyea,emon,eday,ehou) - text+="\n" - # - # Show archive time - # - text+=" Archived at %2.2d:%2.2dZ %4.4d/%2.2d/%2.2d\n"%(vhou,vmin,vyea,vmon,vday) - text+="\n" - # - # Show forecasters - # - if self.__model=="Official": - text+=" Forecasters:\n" - for j in xrange(fnums.shape[0]): - if fnums[j]>0: - text+=" %2.2d - %s \n"%(fnums[j],self.__VU.getFcstrName(fnums[j])) - text+="\n" - # - # Show stats - # - minvalStr=fmt1%minval - maxvalStr=fmt1%maxval - avgStr=fmt2%avg - stdStr=fmt2%std - text+=" Minimum: %s\n"%minvalStr - text+=" Maximum: %s\n"%maxvalStr - text+=" Average: %s\n"%avgStr - text+=" Std Dev: %s\n"%stdStr - text+="\n" - # - # - # - txt.insert(Tkinter.END,text) - txt.config(state=Tkinter.DISABLED) - return -#======================================================================= -# -# The main BOIVerify Info dialog box -# -class VerifInfo(doneDialog): - def __init__(self, VU, parent=None, name="nonModal Dialog", - callbackMethod=None, modal=1): - self.__parent=parent - self.__VU=VU - self.fontHeight=18 # in pixels (well, not quite, but close) - self.boxWidth=7 # width of hour, in pixels - self.hourWidth=self.boxWidth+3 - self.rowH=self.fontHeight+5 # 5 pixels to surround box and allow sep line - self.scrollIncY=self.rowH/3 - self.scrollIncX=self.scrollIncY*2 - self.scrbuffer=10 # within this many pixels of edge - it auto-scrolls - self.yoff=2 # to space down past border - self.cfb=tkFont.Font(family="Arial",size=-self.fontHeight,weight=tkFont.BOLD) - self.cf=tkFont.Font(family="Arial",size=-10) - self.fcstrNames=self.__VU.getFcstrNames() - self.fcstrIDs=self.__VU.getFcstrIDs() - self.fcstrNums=self.__VU.getFcstrNums() - self.usedFcstrs=[] - self.fcbstates=[] - dialog=doneDialog.__init__(self,parent=parent,name=name, - callbackMethod=callbackMethod, - modal=modal) - # - # Now that dialog exists - set minimum size on dialog box - then expand - # - geom=dialog.geometry() - (wh,rest)=geom.split("+",1) - (wid,hgt)=wh.split("x",1) - # - # Now make it a more reasonable width - # - iwid=int(wid)+300 - geom="%dx%s+%s"%(iwid,hgt,rest) - dialog.geometry(geom) - # - # Setup dialog for latest date - # - self.displayDate() - return - #------------------------------------------------------------------ - # newModel - called when a new model is chosen from the list of - # models. Have to read in all the model basetimes and - # find the basetime closest to the currently displayed - # basetime - # - def newModel(self): - # - # Get new model name and setup message while working - # - model=self.ml.getCurrentSelection() - msgWindow=messageWindow("Searching %s Grids"%model,self) - try: - # - # Get new parms and put them in order - # - parmList=self.__VU.listModelParms(model) - self.parmList=self.orderParms(parmList) - self.drawParmNames() - # - # Get time being shown for current model...will - # try to match this time for the new model - # - timeindex=self.tl.getCurrentIndex() - oldbasetime=self.times[timeindex] - # - # Get new basetimes for this new model - want to search through - # parm that has the least grids. We'll guess MaxT - but if - # there is no MaxT for this model - use the first parm and - # get all the basetimes for that parm - # - self.times=[] - if "MaxT" in self.parmList: - self.times=self.__VU.getBases("MaxT",model) - else: - self.times=self.__VU.getBases(self.parmList[0],model) - # - # Search through basetimes trying to find the index - # with the basetime closest to the basetime we had - # before - # - self.times.sort() - self.times.reverse() - self.timestrs=[] - defentry=0 - defindex=0 - mindiff=abs(self.times[0]-oldbasetime) - for i in xrange(len(self.times)): - btime=self.times[i] - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(btime) - self.timestrs.append("%2.2d/%2.2d/%4.4d %2.2dZ"%(gmon,gday,gyea,ghou)) - diff=abs(btime-oldbasetime) - if diff0: - for colorgroup in colorgroups: - (fcstrlist,color)=colorgroup - match=1 - for j in xrange(len(fcstrlist)): - if fcstrlist[j]!=flist[j]: - match=0 - break - if match==1: - colorfill=color - break - if match==0: - colorfill=COLORLIST[len(colorgroups)%len(COLORLIST)] - newgroup=(flist,colorfill) - colorgroups.append(newgroup) - else: # anything but Official...has white/solid timeblocks - colorfill="white" - stippletype="" - # - # Setup tags with rec:(record number), - # parm:(parm name), - # col:(original color) - # - tagtuple=("grid","rec:%d"%rec,"parm:%s"%parm,"col:%s"%colorfill) - # - # find coordinates of box based on time and row (i) - # - shr=(stime-basetime)/3600 - ehr=(etime-basetime)/3600 - x1=shr*(self.hourWidth)+2 - x2=ehr*(self.hourWidth)-2 - y1=(i*self.rowH)+self.yoff+2 - y2=y1+self.fontHeight-1 - # - # Make the timeblock box - # - self.cGrd.create_polygon(x1,y1,x1,y2,x2,y2,x2,y1,fill=colorfill,outline=colorfill, - stipple=stippletype,width=1,tags=tagtuple) - # - # Keep track of max/min times displayed - # - maxpix=max(maxpix,ehr*(self.hourWidth)) - minpix=min(minpix,shr*(self.hourWidth)) - # - # Setup bindings for popups on the grid boxes - # - self.cGrd.tag_bind("grid","",self.postPopGrid) - if model=="Official": - self.cGrd.tag_bind("grid","",self.extract) - self.cGrd.bind("",self.buttonstart) - self.cGrd.bind("",self.drag) - self.cGrd.bind("",self.buttonstop) - else: - self.cGrd.tag_unbind("grid","") - self.cGrd.unbind("") - self.cGrd.unbind("") - self.cGrd.unbind("") - # - # Setup scrolling regions for grid canvas and timelabel canvas - # - self.cGrd.configure(scrollregion=(minpix,0,maxpix,self.parmHeight)) - self.cTim.configure(scrollregion=(minpix,0,maxpix,50)) - # - # Horizontally move to the start of this basetime - # (for Official add 12 hours) - # - offset=0 - if model=="Official": - offset=12*self.hourWidth - x0=float(offset-minpix)/float(maxpix-minpix) - self.cGrd.xview("moveto",x0) - self.cTim.xview("moveto",x0) - # - # Make time marks from mintime to maxtime - # tick marks at hourly intervals - # hash marks through grid canvas at 6 hourly intervals - # label centerred above 12Z each day - # - for jtim in xrange(mintime,maxtime,3600): - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(jtim) - fhr=(jtim-basetime)/3600 - x=fhr*self.hourWidth - if ghou==0: - ywid=30 - self.cGrd.create_line(x,0,x,50*self.rowH,fill="blue",stipple="gray50",tags="hash") - elif (ghou%6==0): - ywid=10 - self.cTim.create_text(x,30-ywid,anchor=Tkinter.S,fill="white",font=self.cf,text="%2.2d"%ghou) - self.cGrd.create_line(x,0,x,50*self.rowH,fill="blue",stipple="gray50",tags="hash") - if ghou==12: - self.cTim.create_text(x,30-ywid-10,anchor=Tkinter.S, - fill="white",font=self.cf, - text="%s %d (%s)"%(MONS[gmon],gday,DAYS[gwda])) - else: - ywid=5 - self.cTim.create_line(x,30,x,30-ywid,fill="white") - self.cGrd.lower("hash") - # - # Update the color boxes next to forecaster names...based on - # the forecasters active making the grids currently displayed - # - self.updateFcstrButtons() - # - # Check to see if the 'set combo' button can be made active - # - self.checkSetButton() - # - # set the baseDisplayed time...so future changes in date - # can know what is on the screen now... - # - self.baseDisplayed=basetime - except: - pass - # - # Close the message window - we're done displaying this - # basetime - # - msgWindow.destroy() - return - #----------------------------------------------------------------- - # body - custom body that has GridManager-like qualities. It - # displays gridblocks with forecaster numbers associated - # with each grid - # - def body(self, master): - # - # - # - self.screvent=None # grid canvas timing events - self.baseDisplayed=0 - # - self.modelstrs=self.__VU.listModels() # what if no models ? - obsmodels=self.__VU.getCFG('OBSMODELS') # what if no obs models ? - for model in obsmodels: - if model not in self.modelstrs: - self.modelstrs.append(model) - self.modelstrs.sort() - if "Official" in self.modelstrs: - defmodel="Official" - else: - defmodel=self.modelstrs[0] - # - parmList=self.__VU.listModelParms(defmodel) - self.parmList=self.orderParms(parmList) - # - # Popup Menu for forecaster actions - # - self.popFcstr=Tkinter.Menu(master=master,tearoff=0) - self.popFcstr.add_command(label="Edit",command=self.editFcstr) - self.popFcstr.add_command(label="Delete",command=self.deleteFcstr) - self.popFcstr.add_command(label="List Forecasts",command=self.listFcstr) - # - # Popup Menu for grid button-3 actions - # - self.popGrid=Tkinter.Menu(master=master,tearoff=0) - self.popGrid.add_command(label="Display Info",command=self.gridInfo) - self.popGrid.add_separator() - self.popGrid.add_command(label="Delete Grid",command=self.gridDelete) - # - # Get base times of model - # - self.times=self.__VU.getBases(self.parmList[0],defmodel) - self.times.sort() - self.times.reverse() - self.timestrs=[] - for i in self.times: - print "time:", i - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(i) - self.timestrs.append("%2.2d/%2.2d/%4.4d %2.2dZ"%(gmon,gday,gyea,ghou)) - defentry=self.timestrs[0] # ?? what if no entries in list? - # - pwid=0 - for i in xrange(len(self.parmList)): - parm=self.parmList[i] - pwid=max(pwid,self.cfb.measure(parm)) - parmWidth=pwid+(4*2) # four of the 2-pixel spacers - self.parmHeight=(len(self.parmList)*self.rowH)+(self.yoff*2) - - topframe=Tkinter.Frame(master) - lab=Tkinter.Label(topframe,text="Model:") - lab.pack(side=Tkinter.LEFT) - self.ml=comboList(defmodel,self.modelstrs,parent=topframe, - callbackMethod=self.newModel) - self.ml.cf.pack(side=Tkinter.LEFT,fill=Tkinter.NONE,expand=0) - fr=Tkinter.Frame(topframe,width=10) - fr.pack(side=Tkinter.LEFT) - lab=Tkinter.Label(topframe,text="Date/Cycle:") - lab.pack(side=Tkinter.LEFT,padx=10,anchor=Tkinter.W) - self.prevBut=Tkinter.Button(topframe,text="<<",pady=0,padx=0,command=self.prevCycle) - self.prevBut.pack(side=Tkinter.LEFT) - self.tl=comboList(defentry,self.timestrs,parent=topframe, - callbackMethod=self.changeDate) - self.tl.cf.pack(side=Tkinter.LEFT,fill=Tkinter.NONE,expand=0) - self.nextBut=Tkinter.Button(topframe,text=">>",pady=0,padx=0,command=self.nextCycle) - self.nextBut.pack(side=Tkinter.LEFT) - topframe.pack(side=Tkinter.TOP,anchor="w") - - botframe=Tkinter.Frame(master) - self.sHor=Tkinter.Scrollbar(botframe,orient=Tkinter.HORIZONTAL) - self.sVer=Tkinter.Scrollbar(botframe,orient=Tkinter.VERTICAL) - self.cLab=Tkinter.Canvas(botframe,relief=Tkinter.SUNKEN,width=parmWidth, - height=50,scrollregion=(0,0,parmWidth,self.parmHeight), - bg="black", - bd=2,yscrollcommand=self.sVer.set, - yscrollincrement=self.scrollIncY) - self.cTim=Tkinter.Canvas(botframe,relief=Tkinter.SUNKEN,width=200, - height=30,bd=2,xscrollcommand=self.sHor.set, - bg="black", - scrollregion=(0,0,400,30), - xscrollincrement=self.scrollIncX) - self.cGrd=Tkinter.Canvas(botframe,relief=Tkinter.SUNKEN,width=200, - height=50,bd=2,xscrollcommand=self.sHor.set, - bg="black", - scrollregion=(0,0,400,self.parmHeight), - yscrollcommand=self.sVer.set, - xscrollincrement=self.scrollIncX,yscrollincrement=self.scrollIncY) - self.cGrd.bind("",self.buttonstart) - self.cGrd.bind("",self.drag) - self.cGrd.bind("",self.buttonstop) - self.fFcs=Tkinter.Frame(botframe,width=100,height=50,relief=Tkinter.SUNKEN, - bd=2) - self.updateFcstrButtons() - # - # - # - self.sHor.config(command=self.scrollBothX) - self.sVer.config(command=self.scrollBothY) - - self.sHor.grid(row=0,column=1,sticky=Tkinter.W+Tkinter.E) - self.cTim.grid(row=1,column=1,sticky=Tkinter.W+Tkinter.E) - self.cLab.grid(row=2,column=0,sticky=Tkinter.N+Tkinter.S) - self.cGrd.grid(row=2,column=1,sticky=Tkinter.W+Tkinter.E+Tkinter.N+Tkinter.S) - self.sVer.grid(row=2,column=2,sticky=Tkinter.N+Tkinter.S) - self.fFcs.grid(row=2,column=3,sticky=Tkinter.N+Tkinter.S) - - but=Tkinter.Button(master=botframe,text="Add New Forecaster", - command=self.addFcstr,) - but.grid(row=1,column=3) - self.scb=Tkinter.Button(master=botframe,text="Set Forecasters for Selected Grids", - command=self.setCombo,) - self.scb.grid(row=3,column=3,sticky=Tkinter.N+Tkinter.S) - - botframe.grid_rowconfigure(0,weight=0) - botframe.grid_rowconfigure(1,weight=0) - botframe.grid_rowconfigure(2,weight=1,minsize=50) - botframe.grid_rowconfigure(3,weight=0) - botframe.grid_columnconfigure(0,weight=0,minsize=50) - botframe.grid_columnconfigure(1,weight=1,minsize=50) - botframe.grid_columnconfigure(2,weight=0) - botframe.grid_columnconfigure(3,weight=0,minsize=100) - - botframe.pack(side=Tkinter.TOP,expand=1,fill=Tkinter.BOTH) - self.dlgtop=botframe.winfo_toplevel() - self.drawParmNames() - return - #------------------------------------------------------------------ - # setCombo - set the forecaster number info on the currently - # selected grids with the forecasters currently "ON" - # in the checkboxes - def setCombo(self): - # - # scan thorugh fcbstates to get forecaster numbers of - # those that are are "ON". fnums is a list of numbers - # - fnums=[] - for i in xrange(len(self.fcstrNums)): - if self.fcbstates[i].get()>0: - fnums.append(self.fcstrNums[i]) - # - # If too many forecasters in the combination...give them - # an error message - # - maxForecasters=self.__VU.getCFG("MAXFCSTRS") - if len(fnums)>maxForecasters: - tkMessageBox.showerror("Error","No more than %d forecasters on any grid"%maxForecasters, - parent=self) - return - # - # Loop through selected grids - # - selItems=self.cGrd.find_withtag("selected") - if selItems is not None: - for item in selItems: - grid=0 - record=-1 - parm="" - tags=self.cGrd.gettags(item) - for tag in tags: - if tag[0:4]=="grid": - grid=1 - elif tag[0:4]=="rec:": - record=int(tag[4:]) - elif tag[0:5]=="parm:": - parm=tag[5:] - if grid==1: - gridModel=self.ml.getCurrentSelection() - if self.__VU.checkFile(parm,gridModel,modify=1): - fcstrs=self.__VU.fncFcstr[record,:] - #print " %s %s %d fcstrs=%s"%(gridModel,parm,record,str(fcstrs)) - for i in xrange(self.__VU.getCFG('MAXFCSTRS')): - self.__VU.fncFcstr[record,i]=0 - for i in xrange(len(fnums)): - self.__VU.fncFcstr[record,i]=int(fnums[i]) - fcstrs=self.__VU.fncFcstr[record,:] - #print " changed to %s"%str(fcstrs) - self.__VU.closeFcstFile() - self.redisplayDate() - return - #------------------------------------------------------------------ - # updateFcstrButtons - update the display of forecaster buttons - # with new list of forecasters. - # - def updateFcstrButtons(self): - # - # get the current on/off states for each number - # - state={} - if len(self.fcbstates)>0: - for i in xrange(len(self.fcstrNums)): - num=self.fcstrNums[i] - state[num]=self.fcbstates[i].get() - stateKeys=state.keys() - # - # Delete any widgets currently in the frame - # the one that caused the callback will not - # be deleted (this is a memory leak!) - # - widgets=self.fFcs.pack_slaves() - if widgets is not None: - for widget in widgets: - widget.pack_forget() - del widget - # - # get the updated names/nums/IDs - # - self.fcstrNames=self.__VU.getFcstrNames() - self.fcstrIDs=self.__VU.getFcstrIDs() - self.fcstrNums=self.__VU.getFcstrNums() - # - # - maxwid=0 - for num in self.fcstrNums: - maxwid=max(maxwid,len(self.fcstrNames[num])) - # - self.fcbstates=[] - for i in xrange(len(self.fcstrNums)): - num=self.fcstrNums[i] - label="%s - %s"%(num,self.fcstrNames[num]) - rowframe=Tkinter.Frame(master=self.fFcs,name="f%s"%num) - var=Tkinter.IntVar() - if num in stateKeys: - var.set(state[num]) - else: - var.set(0) - self.fcbstates.append(var) - if i==0: - color="white" - else: - color=COLORLIST[i%len(COLORLIST)] - cb=Tkinter.Checkbutton(master=rowframe,text=label,indicatoron=1, - variable=var,padx=0,pady=0,name="c%s"%num, - command=self.checkSetButton) - #print "checking",num,"against usedFcstrs:",self.usedFcstrs - if ((i==0)or(num in self.usedFcstrs)): - mb=Tkinter.Button(master=rowframe,relief=Tkinter.FLAT, - command=cb.toggle,width=5, - text=" ",padx=0,pady=0,borderwidth=0, - background=color,foreground='white',name="b%s"%num, - activebackground=color,activeforeground='white') - else: - bgcol=rowframe.cget("bg") - mb=Tkinter.Button(master=rowframe, - relief=Tkinter.FLAT,command=cb.toggle,width=5, - text=" ",padx=0,pady=0,borderwidth=0, - background=bgcol,foreground='white',name="b%s"%num, - activebackground=bgcol,activeforeground='white') - mb.pack(side=Tkinter.LEFT) - if i!=0: - mb.bind("",self.postPopFcstr) - - cb.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - rowframe.pack(side=Tkinter.TOP,fill=Tkinter.X,expand=1) - if i!=0: - rowframe.bind("",self.postPopFcstr) - cb.bind("",self.postPopFcstr) - - # - # - # - df=Tkinter.Frame(master=self.fFcs).pack(side=Tkinter.TOP,fill=Tkinter.BOTH,expand=1) - # - # give the dialog a chance to update size on its own - # - tl=self.fFcs.winfo_toplevel() - tl.update_idletasks() - # - # Set new minimum size -based on requested width/height - # - rwid=tl.winfo_reqwidth() - rhgt=tl.winfo_reqheight() - tl.minsize(rwid,rhgt) - # - # If height of current grid is not as big as the minimum - # size, then make that change manually (if they have - # modified the size earlier - the automatic propagate wont - # make it bigger) - # - geom=tl.geometry() - (wh,rest)=geom.split("+",1) - (wid,hgt)=wh.split("x",1) - if int(hgt)0: - self.tl.setCurrentIndex(timeindex-1) - self.changeDate() - return - #------------------------------------------------------------------ - # prevCycle - move to the previous basetime in the "tl" self.times - # list of basetimes - # - def prevCycle(self): - timeindex=self.tl.getCurrentIndex() - if timeindex-1: - self.gridRecord=int(tag[4:]) - if tag.find("parm")>-1: - self.gridParm=tag[5:] - self.gridModel=self.ml.getCurrentSelection() - self.popGrid.post(event.x_root,event.y_root) - self.popGrid.grab_set() - #------------------------------------------------------------------ - # gridInfo - post the dialog with info about the particular grid - # called from the popGrid popup menu - # - def gridInfo(self): - InfoDialog(self.__VU,self.gridModel,self.gridParm,self.gridRecord,parent=self) - return - #================================================================== - # gridDelete - delete the specified grid (but give them a chance - # to back out of it first) - # called from the popGrid popup menu - # - def gridDelete(self): - # - # Give them a chance to back out of deleting an archived grid. - # - obsmodels=self.__VU.getCFG('OBSMODELS') - model=self.gridModel - parm=self.gridParm - record=self.gridRecord - text="" - # - # Make sure we can open this file - # - if not self.__VU.checkFile(parm,model,modify=0): - text+="Cant delete this grid" - tkMessageBox.showerror("Error",text,parent=self) - return - # - # Make different warning message depending on whether - # it is a forecast grid or an observed grid - # - if model in obsmodels: - stime=self.__VU.oncStime[record] - etime=self.__VU.oncEtime[record] - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(stime) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(etime) - text+="Are you sure you want to delete the %s %s grid "%(model,parm) - text+="from %2.2dZ %4.4d/%2.2d/%2.2d through %2.2dZ %4.4d/%2.2d/%2.2d\n\n"%(shou, - syea,smon,sday,ehou,eyea,emon,eday) - else: - btime=self.__VU.fncBtime[self.gridRecord] - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(btime) - stime=self.__VU.fncStime[self.gridRecord] - fhr=int((stime-btime)/3600.0) - text+="Are you sure you want to delete the %d-hr %s forecast "%(fhr,self.gridParm) - text+="from the %2.2dZ %4.4d/%2.2d/%2.2d run from %s?\n\n"%(bhou, - byea,bmon,bday,self.gridModel) - # - # But in all cases...give DIRE warning in messsage - # so that they think this through - # - text+="Once deleted it cannot be retreived!\n\n" - text+="CAREFULLY CONSIDER WHAT YOU ARE DOING!" - # - # Make sure that they want to continue - # - ynDiag=tkMessageBox.askyesno("Are you sure?",text, - parent=self,default=tkMessageBox.NO) - if not ynDiag: - return - # - # Delete the grid...and if there is an error doing that - # tell them - # - reclist=[record,] - if not self.__VU.deleteRecord(parm,model,reclist): - tkMessageBox.showerror("Error","Could not delete grid",parent=self) - return - # - # Finally...redisplay the grids for the current date - # - self.redisplayDate() - return - #------------------------------------------------------------------ - # postPopFcstr - post the popup menu that allows them to edit info - # about a forecaster, list forecasts made by a - # forecaster, or delete a forecaster - # - def postPopFcstr(self,event): - self.editFnum=str(event.widget)[-2:] - self.editFID=self.fcstrIDs[self.editFnum] - self.editFname=self.fcstrNames[self.editFnum] - self.popFcstr.post(event.x_root,event.y_root) - self.popFcstr.grab_set() - #----------------------------------------------------------------- - # editFcstr - post the dialog where the forecaster number/id/name - # can be changed. - # This is called from the popFcstr popup menu - # - def editFcstr(self): - self.numVar=Tkinter.StringVar() - self.numVar.set(self.editFnum) - self.idVar=Tkinter.StringVar() - self.idVar.set(self.editFID) - self.nameVar=Tkinter.StringVar() - self.nameVar.set(self.editFname) - ChangeCancelDialog(self.__VU, self.numVar,self.idVar,self.nameVar, - parent=self) - self.updateFcstrButtons() - return - #------------------------------------------------------------------ - # deleteFcstr - delete a forecaster from the list of forecasters - # (give them a chance to back out of it first). - # Any grids currently attributed to this number - # will be changed into the 'unknown' forecaster - # - def deleteFcstr(self): - # - # Don't let them delete the 'unknown' forecaster - # - num=int(self.editFnum) - name=self.editFname - if num==0: - tkMessageBox.showerror("Error","You cannot delete the Unknown user", - parent=self) - return - # - # see how many grids this number is attributed to - # - msgWindow=messageWindow("Checking on forecaster %2.2d"%num,self) - try: - self.totalgrids=0 - for parm in self.__VU.getVerParms(): - datatype=self.__VU.getVerParmType(parm) - if not self.__VU.checkFile(parm,"Official",modify=0,datatype=datatype): - continue - fnc=self.__VU.fncFcstr[:,:] - numrec=add.reduce(add.reduce(equal(fnc,num))) - self.totalgrids+=numrec - self.__VU.closeFcstFile() - except: - pass - msgWindow.destroy() - # - # Give them a chance to back out of it. - # - if self.totalgrids>0: - text= "There are %d archived grids made by "%self.totalgrids - text+="forecaster number %d : %s.\n"%(num,name) - text+="\n" - text+="Are you sure you want to delete %s and "%name - text+="associate all those grids with the Unknown " - text+="forecaster?" - else: - text= "Are you sure you want to delete forecaster " - text+="number %d : %s ?"%(num,name) - ynDiag=tkMessageBox.askyesno("Are you sure?",text, - parent=self,default=tkMessageBox.NO) - if not ynDiag: - return - # - # setup a message window because this may take a while... - # - text="Deleting Forecaster #%d"%num - msgWindow=messageWindow(text,self) - try: - if self.totalgrids>0: - for parm in self.__VU.getVerParms(): - #print "deleting #%d from %s"%(num,parm) - datatype=self.__VU.getVerParmType(parm) - if not self.__VU.checkFile(parm,"Official",modify=1,datatype=datatype): - #print "Could not open %s file for Official"%parm - continue - fnc=self.__VU.fncFcstr[:,:] - involved=logical_or.reduce(equal(fnc,num),1) - recsUsed=compress(involved,self.__VU.fncRecs) - for i in xrange(recsUsed.shape[0]): - rec=recsUsed[i] - fcstrs=fnc[rec,:] - #print " record %d has %s"%(rec,fcstrs) - numfcstrs=add.reduce(greater_equal(fcstrs,0)) - if numfcstrs==1: - fcstrs[equal(fcstrs,num)] = 0 - else: - fcstrs[equal(fcstrs,num)] = -127 - #print " changed to %s"%fcstrs - fnc[rec,:]=fcstrs - self.__VU.fncFcstr[:,:]=fnc[:,:].astype(int8) - self.__VU.closeFcstFile() - numstr="%2.2d"%num - Names=self.__VU.getFcstrNames() - IDs=self.__VU.getFcstrIDs() - del Names[numstr] - del IDs[numstr] - self.__VU.setFcstrs(Names,IDs) - self.__VU.saveFcstrNums() - except: - tkMessageBox.showerror("Error","Could not delete forecaster #%d"%num, - parent=self) - msgWindow.destroy() - # - # re-draw list of forecaster buttons - # - self.updateFcstrButtons() - return - #------------------------------------------------------------------ - # listFcstr - post the dialog where we display all the forecast - # made for this forecaster. - # Called by the PopFcstr popup menu - # - def listFcstr(self): - ListDialog(self.__VU,self.editFnum,self.editFID, - self.editFname,parent=self) - return - #------------------------------------------------------------------ - # addFcstr - post the dialog where we can add a forecaster. - # called by the popFcstr popup menu - # - def addFcstr(self): - self.numVar=Tkinter.StringVar() - self.numVar.set("") - self.idVar=Tkinter.StringVar() - self.idVar.set("") - self.nameVar=Tkinter.StringVar() - self.nameVar.set("") - AddCancelDialog(self.__VU, self.numVar,self.idVar,self.nameVar, - parent=self) - self.updateFcstrButtons() - return - #------------------------------------------------------------------ - # drawParmNames - clear the parm name list - and draw text with new - # names - # - def drawParmNames(self): - self.cLab.delete(Tkinter.ALL) - # - # Fill in parameter names - # - for i in xrange(len(self.parmList)): - parm=self.parmList[i] - yrow=i*(self.rowH)+self.yoff - self.cLab.create_text(5,yrow+3,anchor=Tkinter.NW,fill="white", - font=self.cfb,text=parm) - return - #------------------------------------------------------------------ - # scrollBothX - horizontally scrolls timebar and grid canvas - - # unless all of the X-scrollregion is already visible - # - def scrollBothX(self,*args): - sr=self.cGrd.cget('scrollregion').split() - sw=int(sr[2])-int(sr[0]) - wd=self.cGrd.winfo_width() - if wd>=sw: # abort scross/moves if all of xscrollregion already visible - return None - apply(self.cTim.xview,args) - apply(self.cGrd.xview,args) - return None - #------------------------------------------------------------------ - # scrollBothY - vertically scrolls parm lables and grid canvas - - # unless all of the Y-scrollregion is already visible - # - def scrollBothY(self,*args): - sr=self.cGrd.cget('scrollregion').split() - sh=int(sr[3])-int(sr[1]) - hg=self.cGrd.winfo_height() - if hg>=sh: # abort scrolls/moves if all of yscrollregion already visible - return None - apply(self.cLab.yview,args) - apply(self.cGrd.yview,args) - return None - #------------------------------------------------------------------ - # buttonstart - button 1 is pushed down. Store current location in - # xx,yy and store the starting location in xcstart,ycstart - # setup to call 'scrtest' (to test for auto-scrolling) - # if button is still down in a few milliseconds - # - def buttonstart(self,event): - self.xx=event.x - self.yy=event.y - self.xcstart=self.cGrd.canvasx(self.xx) - self.ycstart=self.cGrd.canvasy(self.yy) - self.screvent=self.cGrd.after(200,self.scrtest) - # - # If any grid boxes are 'selected' now - turn them off and - # set their color back to their original color - # - selItems=self.cGrd.find_withtag("selected") - if selItems is not None: - for item in selItems: - tags=self.cGrd.gettags(item) - for tag in tags: - if tag[0:4]=="col:": - oldcolor=tag[4:] - self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") - self.cGrd.dtag(Tkinter.ALL,"selected") - # - return "break" - #------------------------------------------------------------------ - # drag - button is held down while moving. Get new location in xx,yy - # and convert to new canvas location in xcnow,ycnow. Draw - # selection box from xcstart,ycstart to xcnow,ycnow. - # - def drag(self,event): - self.xx=event.x - self.yy=event.y - self.xcnow=self.cGrd.canvasx(self.xx) - self.ycnow=self.cGrd.canvasy(self.yy) - self.cGrd.delete('areasel') - self.cGrd.create_rectangle(self.xcstart,self.ycstart, - self.xcnow,self.ycnow, - outline="cyan",tags='areasel') - # - # Get selected grids, and any item inside the selection box - # - selItems=self.cGrd.find_withtag("selected") - inItems=self.cGrd.find_overlapping(self.xcstart,self.ycstart, - self.xcnow,self.ycnow) - # - # Check for grid items inside the selection box that are NOT - # currently in the selected list. For these - set them to - # selected and set their color to the highlight color - # - if inItems is not None: - for item in inItems: - tags=self.cGrd.gettags(item) - if "grid" in tags: - if item not in selItems: - newtags=list(tags) - newtags.append("selected") - self.cGrd.itemconfigure(item,fill="yellow",outline="yellow",stipple="gray12") - self.cGrd.itemconfigure(item,tags=tuple(newtags)) - # - # Check currently selected items...and if no longer in the - # selection box, then turn their color back to their original color - # - if selItems is not None: - for item in selItems: - if item not in inItems: - tags=self.cGrd.gettags(item) - if "grid" in tags: - for tag in tags: - if tag[0:4]=="col:": - oldcolor=tag[4:] - self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") - self.cGrd.dtag(item,"selected") - # - # Finally check for status of 'set selected' button - # - self.checkSetButton() - self.cGrd.update_idletasks() - return "break" - #------------------------------------------------------------------ - # scrtest - while button is down but not moving - check to see if - # pointer is in the auto-scrolling zone (within scrbuffer - # of edge of canvas) and scroll if so. If we scroll - - # then update the selection box. - # - def scrtest(self): - hg=self.cGrd.winfo_height() - wd=self.cGrd.winfo_width() - scrollflag=0 - if self.xx(wd-self.scrbuffer): - self.scrollBothX('scroll','1','units') - scrollflag=1 - if self.yy(hg-self.scrbuffer): - self.scrollBothY('scroll','1','units') - scrollflag=1 - # - # If we scrolled - update the area that is highlighted - # - if scrollflag==1: - self.xcnow=self.cGrd.canvasx(self.xx) - self.ycnow=self.cGrd.canvasy(self.yy) - self.cGrd.delete('areasel') - self.cGrd.create_rectangle(self.xcstart,self.ycstart, - self.xcnow,self.ycnow, - fill='',outline="cyan",tags='areasel') - self.cGrd.update_idletasks() - # - # Check again for scrolling in a few milliseconds - # - self.screvent=self.cGrd.after(50,self.scrtest) - #------------------------------------------------------------------ - # buttonstop - button 1 is released - save final position in xcnow, - # ycnow. Remove the selection box. - # - def buttonstop(self,event): - if self.screvent is not None: - self.cGrd.after_cancel(self.screvent) - self.screvent=None - self.xx=event.x - self.yy=event.y - self.xcnow=self.cGrd.canvasx(self.xx) - self.ycnow=self.cGrd.canvasy(self.yy) - self.cGrd.delete('areasel') - # - # Get selected grids, and any item inside the selection box - # - selItems=self.cGrd.find_withtag("selected") - inItems=self.cGrd.find_overlapping(self.xcstart,self.ycstart, - self.xcnow,self.ycnow) - # - # Check for grid items inside the selection box that are NOT - # currently in the selected list. For these - set them to - # selected and set their color to the highlight color - # - if inItems is not None: - for item in inItems: - tags=self.cGrd.gettags(item) - if "grid" in tags: - if item not in selItems: - newtags=list(tags) - newtags.append("selected") - self.cGrd.itemconfigure(item,fill="yellow",outline="yellow",stipple="gray12") - self.cGrd.itemconfigure(item,tags=tuple(newtags)) - # - # Check currently selected items...and if no longer in the - # selection box, then turn their color back to their original color - # - if selItems is not None: - for item in selItems: - if item not in inItems: - tags=self.cGrd.gettags(item) - if "grid" in tags: - for tag in tags: - if tag[0:4]=="col:": - oldcolor=tag[4:] - self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") - self.cGrd.dtag(item,"selected") - self.checkSetButton() - self.cGrd.update_idletasks() - return "break" - #------------------------------------------------------------------ - # checkSetButton - check to see if the "Set Forecasters for Selected - # Grids" button can be enabled. There have to be - # some selected grids - AND - there have to be - # some selected forecasters - # - def checkSetButton(self): - someFcstrs=0 - for i in xrange(len(self.fcstrNums)): - if self.fcbstates[i].get()>0: - someFcstrs=1 - break - # - if someFcstrs==1: - selItems=self.cGrd.find_withtag("selected") - if selItems is not None: - if len(selItems)>0: - self.scb.configure(state=Tkinter.NORMAL) - return - # - self.scb.configure(state=Tkinter.DISABLED) - return - #------------------------------------------------------------------ - # - # extract forecasters for this grid into the currently selected - # forecasters. - # - def extract(self,event): - curgrid=self.cGrd.find_withtag(Tkinter.CURRENT) - grtags=self.cGrd.gettags(curgrid) - for tag in grtags: - if tag.find("rec")>-1: - self.gridRecord=int(tag[4:]) - if tag.find("parm")>-1: - self.gridParm=tag[5:] - self.gridModel=self.ml.getCurrentSelection() - - if self.__VU.checkFile(self.gridParm,self.gridModel,modify=0): - fcstrs=self.__VU.fncFcstr[self.gridRecord,:] - self.fcstrNums=self.__VU.getFcstrNums() - for i in xrange(len(self.fcstrNums)): - self.fcbstates[i].set(0) - for i in xrange(fcstrs.shape[0]): - fnum=fcstrs[i] - if fnum>0: - fnumstr="%2.2d"%fnum - if fnumstr in self.fcstrNums: - idx=self.fcstrNums.index(fnumstr) - self.fcbstates[idx].set(1) - self.checkSetButton() -#===================================================================== -# -# Custom comboList widget -# -# User sees currently selected entry from list of entries, and a -# pulldown button. When pulldown is activated the list is shown - -# with scrollbars (if needed) and the user can click on the entry -# desired. The callbackMethod is called when the user chooses an -# entry - and you can get the currentEntry with getCurrentEntry method -# and currentIndex with getCurrentIndex method. -# -class comboList(Tkinter.Frame): - def __init__(self,defaultEntry,entryList,parent=None,callbackMethod=None, - width=0,height=5): - if defaultEntry not in entryList: - return - Tkinter.Frame.__init__(self,parent) - self.__callbackMethod=callbackMethod - self.entries=[] - for entry in entryList: - self.entries.append(entry) - self.currentIndex=self.entries.index(defaultEntry) - self.currentSelection=defaultEntry - - if width==0: - for entry in self.entries: - width=max(len(entry),width) - width+=1 - # - # Make the popup chooser - # - self.opop=Tkinter.Toplevel() - self.opop.withdraw() - of=Tkinter.Frame(self.opop) - if len(self.entries)>height: - os=Tkinter.Scrollbar(of,orient=Tkinter.VERTICAL) - self.ol=Tkinter.Listbox(of,width=width,height=height, - yscrollcommand=os.set, - selectmode=Tkinter.SINGLE, - exportselection=0) - os.config(command=self.ol.yview) - os.pack(side=Tkinter.RIGHT,fill=Tkinter.Y) - else: - self.ol=Tkinter.Listbox(of,width=width,height=height) - for entry in self.entries: - self.ol.insert(Tkinter.END,entry) - self.ol.pack(side=Tkinter.LEFT,fill=Tkinter.BOTH,expand=1) - of.pack(side=Tkinter.TOP) - self.ol.bind("",self.removePopup) - self.opop.transient(parent) - self.opop.overrideredirect(1) - self.opop.update_idletasks() - popwidth=self.opop.winfo_reqwidth() - if (len(self.entries)<=height): - popwidth+=21 - popheight=self.opop.winfo_reqheight() - hpl=popheight/height - # - # Make the display of current entry and pulldown button - # - self.cf=Tkinter.Frame(parent,width=popwidth) - self.cl=Tkinter.Listbox(self.cf,width=width,height=1, - selectmode=Tkinter.SINGLE, - exportselection=0) - self.cl.insert(Tkinter.END,defaultEntry) - self.cl.pack(side=Tkinter.LEFT) - self.cl.update_idletasks() - cw=self.cl.winfo_reqwidth() - ch=self.cl.winfo_reqheight() - canw=popwidth-cw-6 - canh=ch-6 - bw=2 # border width - cc=Tkinter.Canvas(self.cf,width=canw,height=canh, - relief=Tkinter.RAISED,bd=bw) - tsize=min(canw,canh) - toffx=((canw-tsize)/2.0)+bw+bw - toffy=((canh-tsize)/2.0)+bw+bw - twid=tsize-bw-bw - x0=toffx - y0=toffy - x1=toffx+twid - y1=toffy - x2=toffx+(twid/2) - y2=toffy+twid - cp=cc.create_polygon(x0,y0,x1,y1,x2,y2,fill="black") - cc.pack(side=Tkinter.LEFT) - #self.cf.pack(side=Tkinter.TOP) - self.cl.bind("",self.postPopup) - cc.bind("",self.postPopup) - return - def postPopup(self,event): - curgeom=self.cf.winfo_geometry() - (wh,rest)=curgeom.split("+",1) - (w,h)=wh.split("x",1) - iw=int(w) - ih=int(h) - x=self.cf.winfo_rootx() - y=self.cf.winfo_rooty() - newgeom="+%d+%d"%(x,y+ih) - self.opop.geometry(newgeom) - self.opop.deiconify() - self.ol.select_clear(0,Tkinter.END) - self.ol.select_set(self.currentIndex) - self.ol.see(self.currentIndex) - popgeom=self.opop.geometry() - (wh,rest)=popgeom.split("+",1) - (iw,ih)=wh.split("x",1) - self.iw=int(iw) - self.ih=int(ih) - self.opop.grab_set_global() # once you get here - you MUST choose - #self.opop.grab_set() - self.opop.focus_set() - self.opop.bind("",self.closePopup) - self.opop.bind("",self.popClick) - return "break" - #------------------------------------------------------------------ - # popClick - Test if they are clicking in the list - if so - they - # might release inside the list - and that will get - # captured in removePopup. If not - then close the - # popup without choosing - # - def popClick(self,event): - x=event.x - y=event.y - if ((x>self.iw)or(x<1)): - return self.closePopup(event) - if ((y>self.ih)or(y<1)): - return self.closePopup(event) - return "break" - #------------------------------------------------------------------ - # removePopup - called when they choose one in the list - # - def removePopup(self,event): - selectIndex=int(self.ol.nearest(event.y)) - self.currentIndex=selectIndex - newEntry=self.entries[selectIndex] - self.currentSelection=newEntry - self.opop.grab_release() - self.cl.delete(0) - self.cl.insert(0,newEntry) - self.cf.focus_set() - self.opop.withdraw() - self.__callbackMethod() - return - #------------------------------------------------------------------ - # closePopup - called when they dont pick from the list - but - # need to close the popup - # - def closePopup(self,event): - #self.cf.focus_set() - self.opop.grab_release() - self.opop.unbind("") - self.opop.withdraw() - return - #------------------------------------------------------------------ - # getCurrentSelection - tells you the currently selected text - # - def getCurrentSelection(self): - return self.currentSelection - #------------------------------------------------------------------ - # getCurrentIndex - tells you the index of the currently selected - # text - # - def getCurrentIndex(self): - return self.currentIndex - #------------------------------------------------------------------ - # delIndex - delete the specified index from the entries - # - def delIndex(self,index): - if ((index<0)or(index>=len(self.entries))): - return - curSel=self.currentSelection - self.ol.delete(index) - del self.entries[index] - if curSel in self.entries: - self.currentIndex=self.entries.index[curSel] - else: - self.currentIndex=min(self.currentIndex,len(self.entries)-1) - self.currentSelection=self.entries[self.currentIndex] - self.cl.delete(0,Tkinter.END) - self.cl.insert(Tkinter.END,self.currentSelection) - return - #------------------------------------------------------------------ - # setCurrentIndex - set the selected entry to the specified index - # in entries - # - def setCurrentIndex(self,index): - if ((index<0)or(index>=len(self.entries))): - return - self.currentIndex=index - self.currentSelection=self.entries[index] - self.cl.delete(0,Tkinter.END) - self.cl.insert(Tkinter.END,self.currentSelection) - return - #------------------------------------------------------------------ - # setCurrentSelection - set the selected entry to the specified - # entry - # - def setCurrentSelection(self,selection): - if selection not in self.entries: - return - index=self.entries.index[selection] - self.currentIndex=index - self.currentSelection=self.entries[index] - self.cl.delete(0,Tkinter.END) - self.cl.insert(Tkinter.END,self.currentSelection) - return - #------------------------------------------------------------------ - # delValue - delete the specified entry - # - def delValue(self,value): - if value in self.entries: - indexdel=self.entries.index(value) - self.delIndex(indexdel) - return - #------------------------------------------------------------------ - # newEntries - replace all the entries with a new list of entries. - # If the currently selected entry is in the new - # list of entries - then select it in the new list - # otherwise select the first entry - # - def newEntries(self,newList,newDef): - if len(newList)>0: - self.ol.delete(0,Tkinter.END) - self.entries=[] - for entry in newList: - self.entries.append(entry) - self.ol.insert(Tkinter.END,entry) - if newDef in newList: - self.setCurrentIndex(newList.index(newDef)) - else: - self.setCurrentIndex(0) - return -#===================================================================== -# Create a basic 'message window' indicating that something -# is happening. Must be careful to destroy this...because there -# is no way for the user to destroy this if something goes wrong. -# -def messageWindow(message,parent=None): - if parent is None: - return - pwid=parent.winfo_width() - phgt=parent.winfo_height() - px=parent.winfo_rootx() - py=parent.winfo_rooty() - msgWindow=Tkinter.Toplevel(master=parent) - msgWindow.resizable(0,0) - msgWindow.transient() - msgWindow.overrideredirect(1) - msgLab=Tkinter.Label(master=msgWindow,text=message, - relief=Tkinter.RIDGE,height=5,width=29,borderwidth=4) - msgLab.pack(side=Tkinter.TOP) - msgWindow.update_idletasks() - wid=msgWindow.winfo_width() - hgt=msgWindow.winfo_height() - nx=int(px+(pwid/2.0)-(wid/2.0)) - ny=int(py+(phgt/2.0)-(hgt/2.0)) - msgWindow.geometry("%dx%d+%d+%d"%(wid,hgt,nx,ny)) - msgWindow.update_idletasks() - return msgWindow -#===================================================================== -# -# stuff to support a callback with a pre-known variable -# -def GenericCallback(callback, *firstArgs, **firstKWArgs): - if firstKWArgs: - return GC(callback, *firstArgs, **firstKWArgs) - else: - return GCNoKWArgs(callback, *firstArgs) -#===================================================================== -# -# Classes for callbacks -# -class GC: - def __init__(self,callback,*firstArgs, **firstKWArgs): - self.__callback=callback - self.__firstArgs=firstArgs - self.__firstKWArgs=firstKWArgs - def __call__(self, *lastArgs, **kwArgs): - if kwArgs: - netKWArgs=self.__firstKWArgs.copy() - netKWArgs.update(self.__kwArgs) - else: - netKWArgs=self.__firstKWArgs - return self.__callback (*(self.__firstArgs+lastArgs),**netKWArgs) -class GCNoKWArgs: - def __init__(self, callback, *firstArgs): - self.__callback=callback - self.__firstArgs=firstArgs - def __call__(self, *args, **kwArgs): - return self.__callback (*(self.__firstArgs+args),**kwArgs) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# BOIVerifyInfo - version 2.0.5 +# +# Allows changes to forecaster numbers - and changes any current records +# to match the new numbers. Allows deletion of grids in the archived +# database (i.e. to remove bad grids) +# +# Author: Tim Barker - SOO BOI +# 2007/11/06 - version 2.0 - Original Documented Implementation +# 2008/05/28 - version 2.0.5 - added ability to show and delete grids +# from obs models +# +# +# 2010/04/23 ryu Initial port to AWIPS II. +# +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Verify"] + +from numpy import * +from math import * +import tkinter,tkinter.font,tkinter.messagebox +import TkDefaults +import time +import SmartScript +import BOIVerifyUtility + +PROGNAME="BOIVerifyInfo" # you can change it if you dont like BOI. Shame on you! + +MONS=["DUM","Jan","Feb","Mar","Apr","May","Jun","Jul","Aug","Sep","Oct","Nov","Dec"] +DAYS=["Mon","Tue","Wed","Thu","Fri","Sat","Sun"] +COLORLIST=["Cyan","Orange","PaleGreen","Red","Blue","Magenta","ForestGreen","Sienna", + "Pink","Green","purple","Yellow","Tan","Turquoise","SteelBlue", + "chartreuse","Gold","tomato","Violet","aquamarine","Coral"] +#===================================================================== +# The dummy procedure - which does nothing more than start the +# non-modal BOIVerifyInfo dialog box. +# +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss=dbss + SmartScript.SmartScript.__init__(self, dbss) + return + #------------------------------------------------------------------ + # execute - simply starts the non-modal "Info" dialog box + # + def execute(self): + self.statusBarMsg("Starting %s"%PROGNAME,"R") + + tk = tkinter.Tk() + sw = tk.winfo_screenwidth() + sh = tk.winfo_screenheight() + tk.geometry("%dx%d+0+0" % (sw,sh)) + self.root = tk + + TkDefaults.setDefaults(tk) + + self.VU=BOIVerifyUtility.BOIVerifyUtility(self._dbss, None) + self.dialog=VerifInfo(self.VU, parent=tk, + name="BOIVerify Grid Archive Info", + callbackMethod=self.doneInfo, + modal=0, + ) + tk.withdraw() + tk.mainloop() + + self.statusBarMsg("Finished starting %s"%PROGNAME,"R") + return + #------------------------------------------------------------------ + # doneInfo - called when the Info dialog is closed (with the button + # name as an argument + # + def doneInfo(self,buttonType): + self.root.destroy() + return +#===================================================================== +# +# Basic Class for a dialog - similar to IFPDialog.Dialog +# +class Dialog(tkinter.Toplevel): + def __init__(self,parent=None,title=None,modal=0): + if parent is None: + return + tkinter.Toplevel.__init__(self,parent) + #self.transient(parent) + if title: + self.title(title) + self.parent=parent + self.result=None + self.buttonbox() + bodyFrame=tkinter.Frame(self) + self.initial_focus=self.body(bodyFrame) + bodyFrame.pack(padx=5,pady=5,fill=tkinter.BOTH,expand=1) + bodyFrame.pack_propagate(1) + # + if not self.initial_focus: + self.initial_focus=self + self.protocol("WM_DELETE_WINDOW", self.cancel) + self.geometry("+%d+%d"%(parent.winfo_rootx()+50, + parent.winfo_rooty()+50)) + self.initial_focus.focus_set() + + self.wait_visibility() + self.update_idletasks() + geom=self.geometry() + (wh,rest)=geom.split("+",1) + (wid,hgt)=wh.split("x",1) + self.minsize(wid,hgt) + + if modal==1: + self.grab_set() + self.wait_window(self) + return self.result + else: + return self + #------------------------------------------------------------------ + # body - normally overridden with the stuff you want to display + # in the dialog box + # + def body(self,master): + pass + #------------------------------------------------------------------ + # buttonbox - displays the buttonbox at the bottom of the dialog. + # Normally has OK and Cancel buttons - but can be + # overridden to have any buttons desired + # + def buttonbox(self): + box=tkinter.Frame(self) + w=tkinter.Button(box,text="Ok",width=10,command=self.ok, + default=tkinter.ACTIVE) + w.pack(side=tkinter.LEFT,padx=5,pady=5) + w=tkinter.Button(box,text="Cancel",width=10,command=self.cancel) + w.pack(side=tkinter.LEFT,padx=5,pady=5) + box.pack(side=tkinter.BOTTOM) + #------------------------------------------------------------------ + # ok - called when the OK button is pressed. Calls validate to + # see if the input is OK. If the input is OK it removes + # the dialog and does the action specified in apply + # If the input has some problem - it returns to the dialog + # + def ok(self,event=None): + if not self.validate(): + self.initial_focus.focus_set() + return + self.withdraw() + self.update_idletasks() + self.apply() + self.cancel() + #------------------------------------------------------------------ + # cancel - called when the Cancel button is pressed - and when + # everything else is done. Destroys the dialog + # + def cancel(self,event=None): + self.parent.focus_set() + self.destroy() + #------------------------------------------------------------------ + # validate - normally overridden with stuff that checks the input + # on the dialog box. Should return 1 if input is OK, + # and 0 if not. + # + def validate(self): + return 1 + #------------------------------------------------------------------ + # apply - normally overridden with stuff that needs to be done + # when the dialog input has been validated and it is + # OK to proceed. + # + def apply(self): + pass +#===================================================================== +# ChangeCancelDialog - a Dialog to change the forecaster number, +# forecaster ID, or forecater name +# +class ChangeCancelDialog(Dialog): + def __init__(self, VU, numberVar, idVar, nameVar, + parent=None, name="Edit Forecaster", callbackMethod=None, + modal=1): + + self.__parent = parent + self.__name = name + self.__modal = modal + self.__callbackMethod = callbackMethod + self.__VU=VU + self.__numberVar=numberVar + self.__idVar=idVar + self.__nameVar=nameVar + self.__oldNum=self.__numberVar.get() + self.__oldID=self.__idVar.get() + self.__oldName=self.__nameVar.get() + self.__dialog=Dialog.__init__(self,parent=self.__parent, + title=self.__name, + modal=self.__modal) + return + #----------------------------------------------------------------- + # buttonbox - special buttonbox with Change and Cancel buttons + # + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + tkinter.Button(buttonFrame, text="Change", width=7, + command=self.changeCB).pack(side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Cancel",width=7, + command=self.cancelCB).pack(side=tkinter.LEFT,pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM,expand=0) + #------------------------------------------------------------------ + # body - special body with the current number, username, and + # display name shown + # + def body(self, master): + tkinter.Label(master,text="Number:").grid(column=0,row=0,sticky=tkinter.E) + tkinter.Label(master,text="Username:").grid(column=0,row=1,sticky=tkinter.E) + tkinter.Label(master,text="Display Name:").grid(column=0,row=2,sticky=tkinter.E) + self.numEntry=tkinter.Entry(master,textvariable=self.__numberVar,width=2) + self.numEntry.grid(column=1,row=0,sticky=tkinter.W) + self.idEntry=tkinter.Entry(master,textvariable=self.__idVar,width=8) + self.idEntry.grid(column=1,row=1,sticky=tkinter.W) + self.nameEntry=tkinter.Entry(master,textvariable=self.__nameVar,width=25) + self.nameEntry.grid(column=1,row=2,sticky=tkinter.W) + #------------------------------------------------------------------ + # changeCB - called when they click on Change. Need to validate + # everything to make sure the changes are OK. + # + def changeCB(self): + # + # Check forecaster number for just a number, between 1 and 99 + # + newNumStr=self.__numberVar.get().strip() + try: + num=int(newNumStr) + except: + tkinter.messagebox.showerror("Error","Forecaster Number needs to be an integer",parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + if ((num<0)or(num>99)): + tkinter.messagebox.showerror("Error","Forecater Number needs to be between 1 and 99", + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + if (num==0): + tkinter.messagebox.showerror("Error","Forecaster Number 0 cannot be used", + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + # + # If changing numbers - make sure the new number is not already + # in use. + # + if num!=int(self.__oldNum): + curNums=self.__VU.getFcstrNums() + for testNum in curNums: + if int(testNum)==num: + tkinter.messagebox.showerror("Errror","Forecaster Number %d is already in use"%num, + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + # + # Must provide a username + # + newID=self.__idVar.get().strip() + if len(newID)==0: + tkinter.messagebox.showerror("Error","You must provide a username", + parent=self) + self.idEntry.selection_range(0,tkinter.END) + self.idEntry.focus_set() + return + # + # username cannot be the same as any other current username + # + if newID!=self.__oldID: + curIDs=self.__VU.getFcstrIDs() + for testID in list(curIDs.values()): + if testID==newID: + tkinter.messagebox.showerror("Error","Username %s is already in use"%newID, + parent=self) + self.idEntry.selection_range(0,tkinter.END) + self.idEntry.focus_set() + return + # + # Must provide a display name + # + newName=self.__nameVar.get().strip() + if len(newName)==0: + tkinter.messagebox.showerror("Error","You must provide a display name", + parent=self) + self.nameEntry.selection_range(0,tkinter.END) + self.nameEntry.focus_set() + return + # + # If everything the same as when we started - treat this + # the same as a cancel + # + if ((num==int(self.__oldNum))and(newID==self.__oldID)and(newName==self.__oldName)): + self.cancel() + return + # + # If number changes - need to do a lot more stuff - so ask them + # if they are sure - and do it if they say yes + # + if (num!=int(self.__oldNum)): + oldnum=int(self.__oldNum) + text= "It will take a while to change forecasts " + text+="attributed to old forecaster number %d "%oldnum + text+="to new forecaster number %d.\n"%num + text+="\n" + text+="Are you sure you want to proceed?" + ynDiag=tkinter.messagebox.askyesno("Are you sure?",text, + parent=self,default=tkinter.messagebox.NO) + if not ynDiag: + self.__callbackMethod("Change") + self.cancel() + return + # + # OK - all input is valid and user wants to proceed + # so remove the 'change' dialog and actually make + # the changes + # + self.withdraw() + self.update_idletasks() + # + # make number changes (could take some time) + # + if (num!=int(self.__oldNum)): + self.changeNumbers() + # + # Change IDs and names - should be fast + # + Names=self.__VU.getFcstrNames() + IDs=self.__VU.getFcstrIDs() + numStr="%2.2d"%num + Names[numStr]=newName + IDs[numStr]=newID + self.__VU.setFcstrs(Names,IDs) + self.__VU.saveFcstrNums() + # + if self.__callbackMethod is not None: + self.__callbackMethod("Change") + self.cancel() + #------------------------------------------------------------------ + # cancelCB - called when they click on Cancel when making + # changes + # + def cancelCB(self): + if self.__callbackMethod is not None: + self.__callbackMethod("Cancel") + self.cancel() + #------------------------------------------------------------------ + # changeNumbers - gets called if they REALLY want to change + # numbers for a forecaster. Goes through all + # Official datafiles and changes all grids + # associated with old number to new number + # + def changeNumbers(self): + newNumStr=self.__numberVar.get().strip() + num=int(newNumStr) + oldnum=int(self.__oldNum) + for parm in self.__VU.getVerParms(): + datatype=self.__VU.getVerParmType(parm) + if not self.__VU.checkFile(parm,"Official",modify=1,datatype=datatype): + continue + fnc=self.__VU.fncFcstr[:,:] + count=add.reduce(add.reduce(equal(fnc,oldnum))) + nnc=where(equal(fnc,oldnum),num,fnc) + self.__VU.fncFcstr[:,:]=nnc[:,:].astype(int8) + self.__VU.closeFcstFile() + newID=self.__idVar.get().strip() + newName=self.__nameVar.get().strip() + Names=self.__VU.getFcstrNames() + IDs=self.__VU.getFcstrIDs() + del Names[self.__oldNum] + del IDs[self.__oldNum] + newNumStr="%2.2d"%num + Names[newNumStr]=newName + IDs[newNumStr]=newID + self.__VU.setFcstrs(Names,IDs) + self.__VU.saveFcstrNums() + return +#===================================================================== +# AddCancelDialog - for when they want to add a forecaster +# +class AddCancelDialog(Dialog): + def __init__(self, VU, numberVar, idVar, nameVar, + parent=None, name="Add Forecaster", callbackMethod=None, + modal=1): + + self.__parent = parent + self.__name = name + self.__modal = modal + self.__callbackMethod = callbackMethod + self.__VU=VU + self.__numberVar=numberVar + self.__idVar=idVar + self.__nameVar=nameVar + + self.__dialog=Dialog.__init__(self,parent=self.__parent, + title=self.__name, + modal=self.__modal) + return + #------------------------------------------------------------------ + # buttonbox - special buttonbox with Add and Cancel buttons + # + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + tkinter.Button(buttonFrame, text="Add", width=7, + command=self.addCB).pack(side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Cancel",width=7, + command=self.cancelCB).pack(side=tkinter.LEFT,pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM,expand=0) + #------------------------------------------------------------------ + # body - special body with the current number, username, and + # display name shown + # + def body(self, master): + tkinter.Label(master,text="Number:").grid(column=0,row=0,sticky=tkinter.E) + tkinter.Label(master,text="Username:").grid(column=0,row=1,sticky=tkinter.E) + tkinter.Label(master,text="Display Name:").grid(column=0,row=2,sticky=tkinter.E) + self.numEntry=tkinter.Entry(master,textvariable=self.__numberVar,width=2) + self.numEntry.grid(column=1,row=0,sticky=tkinter.W) + self.idEntry=tkinter.Entry(master,textvariable=self.__idVar,width=8) + self.idEntry.grid(column=1,row=1,sticky=tkinter.W) + self.nameEntry=tkinter.Entry(master,textvariable=self.__nameVar,width=25) + self.nameEntry.grid(column=1,row=2,sticky=tkinter.W) + #------------------------------------------------------------------ + # addCB - called when they click on Add. Need to validate + # everything to make sure the changes are OK. + # + def addCB(self): + # + # Check forecaster number for just a number, between 1 and 99 + # + newNumStr=self.__numberVar.get().strip() + try: + num=int(newNumStr) + except: + tkinter.messagebox.showerror("Error","Forecaster Number needs to be an integer number", + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + if ((num<0)or(num>99)): + tkinter.messagebox.showerror("Error","Forecater Number needs to be between 1 and 99", + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + if (num==0): + tkinter.messagebox.showerror("Error","Forecaster Number 0 cannot be used", + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + # + # Make sure the new number is not already + # in use. + # + curNums=self.__VU.getFcstrNums() + for testNum in curNums: + if int(testNum)==num: + tkinter.messagebox.showerror("Error","Forecaster Number %d is already in use"%num, + parent=self) + self.numEntry.selection_range(0,tkinter.END) + self.numEntry.focus_set() + return + # + # Must provide a username + # + newID=self.__idVar.get().strip() + if len(newID)==0: + tkinter.messagebox.showerror("Error","You must provide a username", + parent=self) + self.idEntry.selection_range(0,tkinter.END) + self.idEntry.focus_set() + return + # + # username cannot be the same as any other current username + # + curIDs=self.__VU.getFcstrIDs() + for testID in list(curIDs.values()): + if testID==newID: + tkinter.messagebox.showerror("Error","Username %s is already in use"%newID, + parent=self) + self.idEntry.selection_range(0,tkinter.END) + self.idEntry.focus_set() + return + # + # Must provide a display name + # + newName=self.__nameVar.get().strip() + if len(newName)==0: + tkinter.messagebox.showerror("Error","You must provide a display name", + parent=self) + self.nameEntry.selection_range(0,tkinter.END) + self.nameEntry.focus_set() + return + # + # No number changes - but change IDs and names + # + self.withdraw() + self.update_idletasks() + Names=self.__VU.getFcstrNames() + IDs=self.__VU.getFcstrIDs() + numStr="%2.2d"%num + Names[numStr]=newName + IDs[numStr]=newID + self.__VU.setFcstrs(Names,IDs) + self.__VU.saveFcstrNums() + # + if self.__callbackMethod is not None: + self.__callbackMethod("Add") + self.cancel() + #------------------------------------------------------------------ + # cancelCB - called when they click on Cancel when making + # changes + # + def cancelCB(self): + if self.__callbackMethod is not None: + self.__callbackMethod("Cancel") + self.cancel() +#============================================================================== +# doneDialog - a generic dialog class with a single DONE button at the bottom +# +class doneDialog(Dialog): + def __init__(self, parent=None, name="nonModal Dialog", callbackMethod=None, + modal=1): + + self.__parent = parent + self.__name = name + self.__modal = modal + self.__callbackMethod = callbackMethod + self.__dialog=Dialog.__init__(self,parent=self.__parent, + title=self.__name, + modal=self.__modal) + return self.__dialog + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + tkinter.Button(buttonFrame, text="Done", width=10, + command=self.doneCB).pack(side=tkinter.RIGHT, pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM,expand=0) + def body(self, master): + bodylabel=tkinter.Label(master,text="This is the body of doneDialog") + bodylabel.pack(side=tkinter.BOTTOM) + def doneCB(self): + if self.__callbackMethod is not None: + self.__callbackMethod("Done") + self.cancel() +#======================================================================= +# ListDialog - shows all the forecasts made by the specified +# forecaster number +# +class ListDialog(doneDialog): + def __init__(self, VU, numstr, idstr, namestr, parent=None, + name="List of forecasts", callbackMethod=None, modal=1): + self.__VU=VU + self.__numstr=numstr + self.__idstr=idstr + self.__namestr=namestr + self.__parent=parent + doneDialog.__init__(self,parent=parent,name=name,callbackMethod=callbackMethod, + modal=modal) + def body(self,master): + sb=tkinter.Scrollbar(master=master) + sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + self.cf=tkinter.font.Font(family="Courier",size=-12) + txt=tkinter.Text(master=master,width=60,height=30,yscrollcommand=sb.set, + font=self.cf) + txt.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + sb.config(command=txt.yview) + text="" + maxparmwidth=0 + totalgrids=0 + oldnum=int(self.__numstr) + fcstsMade={} + for parm in self.__VU.getVerParms(): + maxparmwidth=max(maxparmwidth,len(parm)) + datatype=self.__VU.getVerParmType(parm) + if not self.__VU.checkFile(parm,"Official",modify=0,datatype=datatype): + continue + fnc=self.__VU.fncFcstr[:,:] + involved=logical_or.reduce(equal(fnc,oldnum),1) + recsUsed=compress(involved,self.__VU.fncRecs) + for i in range(recsUsed.shape[0]): + rec=int(recsUsed[i]) + totalgrids+=1 + basekey="%d"%self.__VU.fncBtime[rec] + if basekey not in list(fcstsMade.keys()): + fcstsMade[basekey]={} + if parm not in list(fcstsMade[basekey].keys()): + fcstsMade[basekey][parm]=(1,self.__VU.fncStime[rec],self.__VU.fncEtime[rec]) + else: + (num,start,end)=fcstsMade[basekey][parm] + num+=1 + start=min(self.__VU.fncStime[rec],start) + end=max(self.__VU.fncEtime[rec],end) + fcstsMade[basekey][parm]=(num,start,end) + self.__VU.closeFcstFile() + # + # Display the data + # + cbb=tkinter.font.Font(family="Courier",size=-14,weight=tkinter.font.BOLD) + cb=tkinter.font.Font(family="Courier",size=-12,weight=tkinter.font.BOLD) + txt.tag_config("title",font=cbb) + txt.tag_config("date",font=cb) + txt.insert(tkinter.END,"%s Grids:\n\n"%self.__namestr,("title")) + if totalgrids==0: + txt.insert(tkinter.END," NONE\n\n",("date")) + bases=list(fcstsMade.keys()) + bases.sort() + bases.reverse() + fmtn=" %%%ds: %%3d grids made from %%3d to %%3d hours\n"%(maxparmwidth) + fmt1=" %%%ds: %%3d grid made from %%3d to %%3d hours\n"%(maxparmwidth) + for base in bases: + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(int(base)) + txt.insert(tkinter.END,"%s %s %d, %4.4d %2.2dZ:\n"%(DAYS[gwda], + MONS[gmon],gday,gyea,ghou),("date")) + made=fcstsMade[base] + parms=list(made.keys()) + parms.sort() + for parm in parms: + (num,start,end)=made[parm] + starthr=int((start-int(base))/3600.) + endhr=int((end-int(base))/3600.) + fmt=fmtn + if num==1: + fmt=fmt1 + txt.insert(tkinter.END,fmt%(parm,num,starthr,endhr)) + txt.insert(tkinter.END,"\n") + txt.config(state=tkinter.DISABLED) + return +#======================================================================= +# InfoDialog - shows info about the specified grid +# +class InfoDialog(doneDialog): + def __init__(self, VU, model, parm, record, parent=None, + name="Grid Info", callbackMethod=None, modal=1): + self.__VU=VU + self.__model=model + self.__parm=parm + self.__record=record + self.__parent=parent + doneDialog.__init__(self,parent=parent,name=name,callbackMethod=callbackMethod, + modal=modal) + def body(self,master): + sb=tkinter.Scrollbar(master=master) + sb.pack(side=tkinter.RIGHT,fill=tkinter.Y) + self.cf=tkinter.font.Font(family="Courier",size=-12) + txt=tkinter.Text(master=master,width=60,height=20,yscrollcommand=sb.set, + font=self.cf) + txt.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + sb.config(command=txt.yview) + obsmodels=self.__VU.getCFG('OBSMODELS') + text="" + # + if not self.__VU.checkFile(self.__parm,self.__model,modify=0): + text+="\n\nCould not read info for %s %s grid! \n\n"%(self.__model,self.__parm) + else: + if self.__model in obsmodels: + btime=self.__VU.oncBtime[self.__record] + stime=self.__VU.oncStime[self.__record] + etime=self.__VU.oncEtime[self.__record] + vtime=self.__VU.oncVtime[self.__record] + fnums=[] + else: + btime=self.__VU.fncBtime[self.__record] + stime=self.__VU.fncStime[self.__record] + etime=self.__VU.fncEtime[self.__record] + vtime=self.__VU.fncVtime[self.__record] + fnums=self.__VU.fncFcstr[self.__record,:] + + datatype=self.__VU.getVerParmType(self.__parm) + gridData=self.__VU.readRecord(self.__parm,self.__model,self.__record) + if datatype!=1: + minval=minimum.reduce(minimum.reduce(gridData)) + maxval=maximum.reduce(maximum.reduce(gridData)) + sum=add.reduce(add.reduce(gridData)) + sumsqr=add.reduce(add.reduce(gridData*gridData)) + else: + (mag,direc)=gridData + minval=minimum.reduce(minimum.reduce(mag)) + maxval=maximum.reduce(maximum.reduce(mag)) + sum=add.reduce(add.reduce(mag)) + sumsqr=add.reduce(add.reduce(mag*mag)) + gs = self.__VU.getGridShape() + numpts=gs[0]*gs[1] + avg=sum/numpts + std=sqrt((sumsqr/numpts)-(avg*avg)) + self.__VU.closeFcstFile() + + prec=self.__VU.getParmPrecision(self.__model,self.__parm) + if prec>0: + fmt1="%%.%df"%prec + fmt2="%%.%df"%(prec+1) + else: + fmt1="%d" + fmt2="%.1f" + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(btime) + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(stime) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(etime) + (vyea,vmon,vday,vhou,vmin,vsec,vwda,vyda,vdst)=time.gmtime(vtime) + if self.__model in obsmodels: + if (sday==eday): + text+="\n%s %s grid from %2.2d-%2.2dZ %4.4d/%2.2d/%2.2d\n"%(self.__model, + self.__parm,shou,ehou,syea,smon,sday) + else: + if (smon==emon): + text+="\n%s %s grid from %2.2d %2.2dZ through %2.2d %2.2dZ %4.4d/%2.2d\n"%(self.__model, + self.__parm,sday,shou,eday,ehou,syea,smon) + else: + text+="\n%s %s grid from %4.4d/%2.2d/%2.2d %2.2dZ through %4.4d/%2.2d/%2.2d %2.2dZ\n"%(self.__model, + self.__parm,syea,smon,sday,shou,eyea,emon,eday,ehou) + else: + text+="\n%s %s grid from %2.2dZ run %4.4d/%2.2d/%2.2d\n"%(self.__model, + self.__parm,bhou,byea,bmon,bday) + text+="\n" + # + # Show forecast hour and valid time + # + if self.__model not in obsmodels: + fhr=int((stime-btime)/3600.0) + text+=" %d-hr forecast\n"%fhr + if (sday==eday): + text+=" Valid: %2.2d-%2.2dZ %4.4d/%2.2d/%2.2d\n"%(shou,ehou, + syea,smon,sday) + else: + if (smon==emon): + text+=" Valid: %2.2d %2.2dZ through %2.2d %2.2dZ %4.4d/%2.2d\n"%(sday, + shou,eday,ehou,syea,smon) + else: + text+=" Valid: %4.4d/%2.2d/%2.2d %2.2dZ through %4.4d/%2.2d/%2.2d %2.2dZ\n"%(syea, + smon,sday,shou,eyea,emon,eday,ehou) + text+="\n" + # + # Show archive time + # + text+=" Archived at %2.2d:%2.2dZ %4.4d/%2.2d/%2.2d\n"%(vhou,vmin,vyea,vmon,vday) + text+="\n" + # + # Show forecasters + # + if self.__model=="Official": + text+=" Forecasters:\n" + for j in range(fnums.shape[0]): + if fnums[j]>0: + text+=" %2.2d - %s \n"%(fnums[j],self.__VU.getFcstrName(fnums[j])) + text+="\n" + # + # Show stats + # + minvalStr=fmt1%minval + maxvalStr=fmt1%maxval + avgStr=fmt2%avg + stdStr=fmt2%std + text+=" Minimum: %s\n"%minvalStr + text+=" Maximum: %s\n"%maxvalStr + text+=" Average: %s\n"%avgStr + text+=" Std Dev: %s\n"%stdStr + text+="\n" + # + # + # + txt.insert(tkinter.END,text) + txt.config(state=tkinter.DISABLED) + return +#======================================================================= +# +# The main BOIVerify Info dialog box +# +class VerifInfo(doneDialog): + def __init__(self, VU, parent=None, name="nonModal Dialog", + callbackMethod=None, modal=1): + self.__parent=parent + self.__VU=VU + self.fontHeight=18 # in pixels (well, not quite, but close) + self.boxWidth=7 # width of hour, in pixels + self.hourWidth=self.boxWidth+3 + self.rowH=self.fontHeight+5 # 5 pixels to surround box and allow sep line + self.scrollIncY=self.rowH/3 + self.scrollIncX=self.scrollIncY*2 + self.scrbuffer=10 # within this many pixels of edge - it auto-scrolls + self.yoff=2 # to space down past border + self.cfb=tkinter.font.Font(family="Arial",size=-self.fontHeight,weight=tkinter.font.BOLD) + self.cf=tkinter.font.Font(family="Arial",size=-10) + self.fcstrNames=self.__VU.getFcstrNames() + self.fcstrIDs=self.__VU.getFcstrIDs() + self.fcstrNums=self.__VU.getFcstrNums() + self.usedFcstrs=[] + self.fcbstates=[] + dialog=doneDialog.__init__(self,parent=parent,name=name, + callbackMethod=callbackMethod, + modal=modal) + # + # Now that dialog exists - set minimum size on dialog box - then expand + # + geom=dialog.geometry() + (wh,rest)=geom.split("+",1) + (wid,hgt)=wh.split("x",1) + # + # Now make it a more reasonable width + # + iwid=int(wid)+300 + geom="%dx%s+%s"%(iwid,hgt,rest) + dialog.geometry(geom) + # + # Setup dialog for latest date + # + self.displayDate() + return + #------------------------------------------------------------------ + # newModel - called when a new model is chosen from the list of + # models. Have to read in all the model basetimes and + # find the basetime closest to the currently displayed + # basetime + # + def newModel(self): + # + # Get new model name and setup message while working + # + model=self.ml.getCurrentSelection() + msgWindow=messageWindow("Searching %s Grids"%model,self) + try: + # + # Get new parms and put them in order + # + parmList=self.__VU.listModelParms(model) + self.parmList=self.orderParms(parmList) + self.drawParmNames() + # + # Get time being shown for current model...will + # try to match this time for the new model + # + timeindex=self.tl.getCurrentIndex() + oldbasetime=self.times[timeindex] + # + # Get new basetimes for this new model - want to search through + # parm that has the least grids. We'll guess MaxT - but if + # there is no MaxT for this model - use the first parm and + # get all the basetimes for that parm + # + self.times=[] + if "MaxT" in self.parmList: + self.times=self.__VU.getBases("MaxT",model) + else: + self.times=self.__VU.getBases(self.parmList[0],model) + # + # Search through basetimes trying to find the index + # with the basetime closest to the basetime we had + # before + # + self.times.sort() + self.times.reverse() + self.timestrs=[] + defentry=0 + defindex=0 + mindiff=abs(self.times[0]-oldbasetime) + for i in range(len(self.times)): + btime=self.times[i] + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(btime) + self.timestrs.append("%2.2d/%2.2d/%4.4d %2.2dZ"%(gmon,gday,gyea,ghou)) + diff=abs(btime-oldbasetime) + if diff0: + for colorgroup in colorgroups: + (fcstrlist,color)=colorgroup + match=1 + for j in range(len(fcstrlist)): + if fcstrlist[j]!=flist[j]: + match=0 + break + if match==1: + colorfill=color + break + if match==0: + colorfill=COLORLIST[len(colorgroups)%len(COLORLIST)] + newgroup=(flist,colorfill) + colorgroups.append(newgroup) + else: # anything but Official...has white/solid timeblocks + colorfill="white" + stippletype="" + # + # Setup tags with rec:(record number), + # parm:(parm name), + # col:(original color) + # + tagtuple=("grid","rec:%d"%rec,"parm:%s"%parm,"col:%s"%colorfill) + # + # find coordinates of box based on time and row (i) + # + shr=(stime-basetime)/3600 + ehr=(etime-basetime)/3600 + x1=shr*(self.hourWidth)+2 + x2=ehr*(self.hourWidth)-2 + y1=(i*self.rowH)+self.yoff+2 + y2=y1+self.fontHeight-1 + # + # Make the timeblock box + # + self.cGrd.create_polygon(x1,y1,x1,y2,x2,y2,x2,y1,fill=colorfill,outline=colorfill, + stipple=stippletype,width=1,tags=tagtuple) + # + # Keep track of max/min times displayed + # + maxpix=max(maxpix,ehr*(self.hourWidth)) + minpix=min(minpix,shr*(self.hourWidth)) + # + # Setup bindings for popups on the grid boxes + # + self.cGrd.tag_bind("grid","",self.postPopGrid) + if model=="Official": + self.cGrd.tag_bind("grid","",self.extract) + self.cGrd.bind("",self.buttonstart) + self.cGrd.bind("",self.drag) + self.cGrd.bind("",self.buttonstop) + else: + self.cGrd.tag_unbind("grid","") + self.cGrd.unbind("") + self.cGrd.unbind("") + self.cGrd.unbind("") + # + # Setup scrolling regions for grid canvas and timelabel canvas + # + self.cGrd.configure(scrollregion=(minpix,0,maxpix,self.parmHeight)) + self.cTim.configure(scrollregion=(minpix,0,maxpix,50)) + # + # Horizontally move to the start of this basetime + # (for Official add 12 hours) + # + offset=0 + if model=="Official": + offset=12*self.hourWidth + x0=float(offset-minpix)/float(maxpix-minpix) + self.cGrd.xview("moveto",x0) + self.cTim.xview("moveto",x0) + # + # Make time marks from mintime to maxtime + # tick marks at hourly intervals + # hash marks through grid canvas at 6 hourly intervals + # label centerred above 12Z each day + # + for jtim in range(mintime,maxtime,3600): + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(jtim) + fhr=(jtim-basetime)/3600 + x=fhr*self.hourWidth + if ghou==0: + ywid=30 + self.cGrd.create_line(x,0,x,50*self.rowH,fill="blue",stipple="gray50",tags="hash") + elif (ghou%6==0): + ywid=10 + self.cTim.create_text(x,30-ywid,anchor=tkinter.S,fill="white",font=self.cf,text="%2.2d"%ghou) + self.cGrd.create_line(x,0,x,50*self.rowH,fill="blue",stipple="gray50",tags="hash") + if ghou==12: + self.cTim.create_text(x,30-ywid-10,anchor=tkinter.S, + fill="white",font=self.cf, + text="%s %d (%s)"%(MONS[gmon],gday,DAYS[gwda])) + else: + ywid=5 + self.cTim.create_line(x,30,x,30-ywid,fill="white") + self.cGrd.lower("hash") + # + # Update the color boxes next to forecaster names...based on + # the forecasters active making the grids currently displayed + # + self.updateFcstrButtons() + # + # Check to see if the 'set combo' button can be made active + # + self.checkSetButton() + # + # set the baseDisplayed time...so future changes in date + # can know what is on the screen now... + # + self.baseDisplayed=basetime + except: + pass + # + # Close the message window - we're done displaying this + # basetime + # + msgWindow.destroy() + return + #----------------------------------------------------------------- + # body - custom body that has GridManager-like qualities. It + # displays gridblocks with forecaster numbers associated + # with each grid + # + def body(self, master): + # + # + # + self.screvent=None # grid canvas timing events + self.baseDisplayed=0 + # + self.modelstrs=self.__VU.listModels() # what if no models ? + obsmodels=self.__VU.getCFG('OBSMODELS') # what if no obs models ? + for model in obsmodels: + if model not in self.modelstrs: + self.modelstrs.append(model) + self.modelstrs.sort() + if "Official" in self.modelstrs: + defmodel="Official" + else: + defmodel=self.modelstrs[0] + # + parmList=self.__VU.listModelParms(defmodel) + self.parmList=self.orderParms(parmList) + # + # Popup Menu for forecaster actions + # + self.popFcstr=tkinter.Menu(master=master,tearoff=0) + self.popFcstr.add_command(label="Edit",command=self.editFcstr) + self.popFcstr.add_command(label="Delete",command=self.deleteFcstr) + self.popFcstr.add_command(label="List Forecasts",command=self.listFcstr) + # + # Popup Menu for grid button-3 actions + # + self.popGrid=tkinter.Menu(master=master,tearoff=0) + self.popGrid.add_command(label="Display Info",command=self.gridInfo) + self.popGrid.add_separator() + self.popGrid.add_command(label="Delete Grid",command=self.gridDelete) + # + # Get base times of model + # + self.times=self.__VU.getBases(self.parmList[0],defmodel) + self.times.sort() + self.times.reverse() + self.timestrs=[] + for i in self.times: + print("time:", i) + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(i) + self.timestrs.append("%2.2d/%2.2d/%4.4d %2.2dZ"%(gmon,gday,gyea,ghou)) + defentry=self.timestrs[0] # ?? what if no entries in list? + # + pwid=0 + for i in range(len(self.parmList)): + parm=self.parmList[i] + pwid=max(pwid,self.cfb.measure(parm)) + parmWidth=pwid+(4*2) # four of the 2-pixel spacers + self.parmHeight=(len(self.parmList)*self.rowH)+(self.yoff*2) + + topframe=tkinter.Frame(master) + lab=tkinter.Label(topframe,text="Model:") + lab.pack(side=tkinter.LEFT) + self.ml=comboList(defmodel,self.modelstrs,parent=topframe, + callbackMethod=self.newModel) + self.ml.cf.pack(side=tkinter.LEFT,fill=tkinter.NONE,expand=0) + fr=tkinter.Frame(topframe,width=10) + fr.pack(side=tkinter.LEFT) + lab=tkinter.Label(topframe,text="Date/Cycle:") + lab.pack(side=tkinter.LEFT,padx=10,anchor=tkinter.W) + self.prevBut=tkinter.Button(topframe,text="<<",pady=0,padx=0,command=self.prevCycle) + self.prevBut.pack(side=tkinter.LEFT) + self.tl=comboList(defentry,self.timestrs,parent=topframe, + callbackMethod=self.changeDate) + self.tl.cf.pack(side=tkinter.LEFT,fill=tkinter.NONE,expand=0) + self.nextBut=tkinter.Button(topframe,text=">>",pady=0,padx=0,command=self.nextCycle) + self.nextBut.pack(side=tkinter.LEFT) + topframe.pack(side=tkinter.TOP,anchor="w") + + botframe=tkinter.Frame(master) + self.sHor=tkinter.Scrollbar(botframe,orient=tkinter.HORIZONTAL) + self.sVer=tkinter.Scrollbar(botframe,orient=tkinter.VERTICAL) + self.cLab=tkinter.Canvas(botframe,relief=tkinter.SUNKEN,width=parmWidth, + height=50,scrollregion=(0,0,parmWidth,self.parmHeight), + bg="black", + bd=2,yscrollcommand=self.sVer.set, + yscrollincrement=self.scrollIncY) + self.cTim=tkinter.Canvas(botframe,relief=tkinter.SUNKEN,width=200, + height=30,bd=2,xscrollcommand=self.sHor.set, + bg="black", + scrollregion=(0,0,400,30), + xscrollincrement=self.scrollIncX) + self.cGrd=tkinter.Canvas(botframe,relief=tkinter.SUNKEN,width=200, + height=50,bd=2,xscrollcommand=self.sHor.set, + bg="black", + scrollregion=(0,0,400,self.parmHeight), + yscrollcommand=self.sVer.set, + xscrollincrement=self.scrollIncX,yscrollincrement=self.scrollIncY) + self.cGrd.bind("",self.buttonstart) + self.cGrd.bind("",self.drag) + self.cGrd.bind("",self.buttonstop) + self.fFcs=tkinter.Frame(botframe,width=100,height=50,relief=tkinter.SUNKEN, + bd=2) + self.updateFcstrButtons() + # + # + # + self.sHor.config(command=self.scrollBothX) + self.sVer.config(command=self.scrollBothY) + + self.sHor.grid(row=0,column=1,sticky=tkinter.W+tkinter.E) + self.cTim.grid(row=1,column=1,sticky=tkinter.W+tkinter.E) + self.cLab.grid(row=2,column=0,sticky=tkinter.N+tkinter.S) + self.cGrd.grid(row=2,column=1,sticky=tkinter.W+tkinter.E+tkinter.N+tkinter.S) + self.sVer.grid(row=2,column=2,sticky=tkinter.N+tkinter.S) + self.fFcs.grid(row=2,column=3,sticky=tkinter.N+tkinter.S) + + but=tkinter.Button(master=botframe,text="Add New Forecaster", + command=self.addFcstr,) + but.grid(row=1,column=3) + self.scb=tkinter.Button(master=botframe,text="Set Forecasters for Selected Grids", + command=self.setCombo,) + self.scb.grid(row=3,column=3,sticky=tkinter.N+tkinter.S) + + botframe.grid_rowconfigure(0,weight=0) + botframe.grid_rowconfigure(1,weight=0) + botframe.grid_rowconfigure(2,weight=1,minsize=50) + botframe.grid_rowconfigure(3,weight=0) + botframe.grid_columnconfigure(0,weight=0,minsize=50) + botframe.grid_columnconfigure(1,weight=1,minsize=50) + botframe.grid_columnconfigure(2,weight=0) + botframe.grid_columnconfigure(3,weight=0,minsize=100) + + botframe.pack(side=tkinter.TOP,expand=1,fill=tkinter.BOTH) + self.dlgtop=botframe.winfo_toplevel() + self.drawParmNames() + return + #------------------------------------------------------------------ + # setCombo - set the forecaster number info on the currently + # selected grids with the forecasters currently "ON" + # in the checkboxes + def setCombo(self): + # + # scan thorugh fcbstates to get forecaster numbers of + # those that are are "ON". fnums is a list of numbers + # + fnums=[] + for i in range(len(self.fcstrNums)): + if self.fcbstates[i].get()>0: + fnums.append(self.fcstrNums[i]) + # + # If too many forecasters in the combination...give them + # an error message + # + maxForecasters=self.__VU.getCFG("MAXFCSTRS") + if len(fnums)>maxForecasters: + tkinter.messagebox.showerror("Error","No more than %d forecasters on any grid"%maxForecasters, + parent=self) + return + # + # Loop through selected grids + # + selItems=self.cGrd.find_withtag("selected") + if selItems is not None: + for item in selItems: + grid=0 + record=-1 + parm="" + tags=self.cGrd.gettags(item) + for tag in tags: + if tag[0:4]=="grid": + grid=1 + elif tag[0:4]=="rec:": + record=int(tag[4:]) + elif tag[0:5]=="parm:": + parm=tag[5:] + if grid==1: + gridModel=self.ml.getCurrentSelection() + if self.__VU.checkFile(parm,gridModel,modify=1): + fcstrs=self.__VU.fncFcstr[record,:] + #print " %s %s %d fcstrs=%s"%(gridModel,parm,record,str(fcstrs)) + for i in range(self.__VU.getCFG('MAXFCSTRS')): + self.__VU.fncFcstr[record,i]=0 + for i in range(len(fnums)): + self.__VU.fncFcstr[record,i]=int(fnums[i]) + fcstrs=self.__VU.fncFcstr[record,:] + #print " changed to %s"%str(fcstrs) + self.__VU.closeFcstFile() + self.redisplayDate() + return + #------------------------------------------------------------------ + # updateFcstrButtons - update the display of forecaster buttons + # with new list of forecasters. + # + def updateFcstrButtons(self): + # + # get the current on/off states for each number + # + state={} + if len(self.fcbstates)>0: + for i in range(len(self.fcstrNums)): + num=self.fcstrNums[i] + state[num]=self.fcbstates[i].get() + stateKeys=list(state.keys()) + # + # Delete any widgets currently in the frame + # the one that caused the callback will not + # be deleted (this is a memory leak!) + # + widgets=self.fFcs.pack_slaves() + if widgets is not None: + for widget in widgets: + widget.pack_forget() + del widget + # + # get the updated names/nums/IDs + # + self.fcstrNames=self.__VU.getFcstrNames() + self.fcstrIDs=self.__VU.getFcstrIDs() + self.fcstrNums=self.__VU.getFcstrNums() + # + # + maxwid=0 + for num in self.fcstrNums: + maxwid=max(maxwid,len(self.fcstrNames[num])) + # + self.fcbstates=[] + for i in range(len(self.fcstrNums)): + num=self.fcstrNums[i] + label="%s - %s"%(num,self.fcstrNames[num]) + rowframe=tkinter.Frame(master=self.fFcs,name="f%s"%num) + var=tkinter.IntVar() + if num in stateKeys: + var.set(state[num]) + else: + var.set(0) + self.fcbstates.append(var) + if i==0: + color="white" + else: + color=COLORLIST[i%len(COLORLIST)] + cb=tkinter.Checkbutton(master=rowframe,text=label,indicatoron=1, + variable=var,padx=0,pady=0,name="c%s"%num, + command=self.checkSetButton) + #print "checking",num,"against usedFcstrs:",self.usedFcstrs + if ((i==0)or(num in self.usedFcstrs)): + mb=tkinter.Button(master=rowframe,relief=tkinter.FLAT, + command=cb.toggle,width=5, + text=" ",padx=0,pady=0,borderwidth=0, + background=color,foreground='white',name="b%s"%num, + activebackground=color,activeforeground='white') + else: + bgcol=rowframe.cget("bg") + mb=tkinter.Button(master=rowframe, + relief=tkinter.FLAT,command=cb.toggle,width=5, + text=" ",padx=0,pady=0,borderwidth=0, + background=bgcol,foreground='white',name="b%s"%num, + activebackground=bgcol,activeforeground='white') + mb.pack(side=tkinter.LEFT) + if i!=0: + mb.bind("",self.postPopFcstr) + + cb.pack(side=tkinter.LEFT,anchor=tkinter.W) + rowframe.pack(side=tkinter.TOP,fill=tkinter.X,expand=1) + if i!=0: + rowframe.bind("",self.postPopFcstr) + cb.bind("",self.postPopFcstr) + + # + # + # + df=tkinter.Frame(master=self.fFcs).pack(side=tkinter.TOP,fill=tkinter.BOTH,expand=1) + # + # give the dialog a chance to update size on its own + # + tl=self.fFcs.winfo_toplevel() + tl.update_idletasks() + # + # Set new minimum size -based on requested width/height + # + rwid=tl.winfo_reqwidth() + rhgt=tl.winfo_reqheight() + tl.minsize(rwid,rhgt) + # + # If height of current grid is not as big as the minimum + # size, then make that change manually (if they have + # modified the size earlier - the automatic propagate wont + # make it bigger) + # + geom=tl.geometry() + (wh,rest)=geom.split("+",1) + (wid,hgt)=wh.split("x",1) + if int(hgt)0: + self.tl.setCurrentIndex(timeindex-1) + self.changeDate() + return + #------------------------------------------------------------------ + # prevCycle - move to the previous basetime in the "tl" self.times + # list of basetimes + # + def prevCycle(self): + timeindex=self.tl.getCurrentIndex() + if timeindex-1: + self.gridRecord=int(tag[4:]) + if tag.find("parm")>-1: + self.gridParm=tag[5:] + self.gridModel=self.ml.getCurrentSelection() + self.popGrid.post(event.x_root,event.y_root) + self.popGrid.grab_set() + #------------------------------------------------------------------ + # gridInfo - post the dialog with info about the particular grid + # called from the popGrid popup menu + # + def gridInfo(self): + InfoDialog(self.__VU,self.gridModel,self.gridParm,self.gridRecord,parent=self) + return + #================================================================== + # gridDelete - delete the specified grid (but give them a chance + # to back out of it first) + # called from the popGrid popup menu + # + def gridDelete(self): + # + # Give them a chance to back out of deleting an archived grid. + # + obsmodels=self.__VU.getCFG('OBSMODELS') + model=self.gridModel + parm=self.gridParm + record=self.gridRecord + text="" + # + # Make sure we can open this file + # + if not self.__VU.checkFile(parm,model,modify=0): + text+="Cant delete this grid" + tkinter.messagebox.showerror("Error",text,parent=self) + return + # + # Make different warning message depending on whether + # it is a forecast grid or an observed grid + # + if model in obsmodels: + stime=self.__VU.oncStime[record] + etime=self.__VU.oncEtime[record] + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(stime) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(etime) + text+="Are you sure you want to delete the %s %s grid "%(model,parm) + text+="from %2.2dZ %4.4d/%2.2d/%2.2d through %2.2dZ %4.4d/%2.2d/%2.2d\n\n"%(shou, + syea,smon,sday,ehou,eyea,emon,eday) + else: + btime=self.__VU.fncBtime[self.gridRecord] + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(btime) + stime=self.__VU.fncStime[self.gridRecord] + fhr=int((stime-btime)/3600.0) + text+="Are you sure you want to delete the %d-hr %s forecast "%(fhr,self.gridParm) + text+="from the %2.2dZ %4.4d/%2.2d/%2.2d run from %s?\n\n"%(bhou, + byea,bmon,bday,self.gridModel) + # + # But in all cases...give DIRE warning in messsage + # so that they think this through + # + text+="Once deleted it cannot be retreived!\n\n" + text+="CAREFULLY CONSIDER WHAT YOU ARE DOING!" + # + # Make sure that they want to continue + # + ynDiag=tkinter.messagebox.askyesno("Are you sure?",text, + parent=self,default=tkinter.messagebox.NO) + if not ynDiag: + return + # + # Delete the grid...and if there is an error doing that + # tell them + # + reclist=[record,] + if not self.__VU.deleteRecord(parm,model,reclist): + tkinter.messagebox.showerror("Error","Could not delete grid",parent=self) + return + # + # Finally...redisplay the grids for the current date + # + self.redisplayDate() + return + #------------------------------------------------------------------ + # postPopFcstr - post the popup menu that allows them to edit info + # about a forecaster, list forecasts made by a + # forecaster, or delete a forecaster + # + def postPopFcstr(self,event): + self.editFnum=str(event.widget)[-2:] + self.editFID=self.fcstrIDs[self.editFnum] + self.editFname=self.fcstrNames[self.editFnum] + self.popFcstr.post(event.x_root,event.y_root) + self.popFcstr.grab_set() + #----------------------------------------------------------------- + # editFcstr - post the dialog where the forecaster number/id/name + # can be changed. + # This is called from the popFcstr popup menu + # + def editFcstr(self): + self.numVar=tkinter.StringVar() + self.numVar.set(self.editFnum) + self.idVar=tkinter.StringVar() + self.idVar.set(self.editFID) + self.nameVar=tkinter.StringVar() + self.nameVar.set(self.editFname) + ChangeCancelDialog(self.__VU, self.numVar,self.idVar,self.nameVar, + parent=self) + self.updateFcstrButtons() + return + #------------------------------------------------------------------ + # deleteFcstr - delete a forecaster from the list of forecasters + # (give them a chance to back out of it first). + # Any grids currently attributed to this number + # will be changed into the 'unknown' forecaster + # + def deleteFcstr(self): + # + # Don't let them delete the 'unknown' forecaster + # + num=int(self.editFnum) + name=self.editFname + if num==0: + tkinter.messagebox.showerror("Error","You cannot delete the Unknown user", + parent=self) + return + # + # see how many grids this number is attributed to + # + msgWindow=messageWindow("Checking on forecaster %2.2d"%num,self) + try: + self.totalgrids=0 + for parm in self.__VU.getVerParms(): + datatype=self.__VU.getVerParmType(parm) + if not self.__VU.checkFile(parm,"Official",modify=0,datatype=datatype): + continue + fnc=self.__VU.fncFcstr[:,:] + numrec=add.reduce(add.reduce(equal(fnc,num))) + self.totalgrids+=numrec + self.__VU.closeFcstFile() + except: + pass + msgWindow.destroy() + # + # Give them a chance to back out of it. + # + if self.totalgrids>0: + text= "There are %d archived grids made by "%self.totalgrids + text+="forecaster number %d : %s.\n"%(num,name) + text+="\n" + text+="Are you sure you want to delete %s and "%name + text+="associate all those grids with the Unknown " + text+="forecaster?" + else: + text= "Are you sure you want to delete forecaster " + text+="number %d : %s ?"%(num,name) + ynDiag=tkinter.messagebox.askyesno("Are you sure?",text, + parent=self,default=tkinter.messagebox.NO) + if not ynDiag: + return + # + # setup a message window because this may take a while... + # + text="Deleting Forecaster #%d"%num + msgWindow=messageWindow(text,self) + try: + if self.totalgrids>0: + for parm in self.__VU.getVerParms(): + #print "deleting #%d from %s"%(num,parm) + datatype=self.__VU.getVerParmType(parm) + if not self.__VU.checkFile(parm,"Official",modify=1,datatype=datatype): + #print "Could not open %s file for Official"%parm + continue + fnc=self.__VU.fncFcstr[:,:] + involved=logical_or.reduce(equal(fnc,num),1) + recsUsed=compress(involved,self.__VU.fncRecs) + for i in range(recsUsed.shape[0]): + rec=recsUsed[i] + fcstrs=fnc[rec,:] + #print " record %d has %s"%(rec,fcstrs) + numfcstrs=add.reduce(greater_equal(fcstrs,0)) + if numfcstrs==1: + fcstrs[equal(fcstrs,num)] = 0 + else: + fcstrs[equal(fcstrs,num)] = -127 + #print " changed to %s"%fcstrs + fnc[rec,:]=fcstrs + self.__VU.fncFcstr[:,:]=fnc[:,:].astype(int8) + self.__VU.closeFcstFile() + numstr="%2.2d"%num + Names=self.__VU.getFcstrNames() + IDs=self.__VU.getFcstrIDs() + del Names[numstr] + del IDs[numstr] + self.__VU.setFcstrs(Names,IDs) + self.__VU.saveFcstrNums() + except: + tkinter.messagebox.showerror("Error","Could not delete forecaster #%d"%num, + parent=self) + msgWindow.destroy() + # + # re-draw list of forecaster buttons + # + self.updateFcstrButtons() + return + #------------------------------------------------------------------ + # listFcstr - post the dialog where we display all the forecast + # made for this forecaster. + # Called by the PopFcstr popup menu + # + def listFcstr(self): + ListDialog(self.__VU,self.editFnum,self.editFID, + self.editFname,parent=self) + return + #------------------------------------------------------------------ + # addFcstr - post the dialog where we can add a forecaster. + # called by the popFcstr popup menu + # + def addFcstr(self): + self.numVar=tkinter.StringVar() + self.numVar.set("") + self.idVar=tkinter.StringVar() + self.idVar.set("") + self.nameVar=tkinter.StringVar() + self.nameVar.set("") + AddCancelDialog(self.__VU, self.numVar,self.idVar,self.nameVar, + parent=self) + self.updateFcstrButtons() + return + #------------------------------------------------------------------ + # drawParmNames - clear the parm name list - and draw text with new + # names + # + def drawParmNames(self): + self.cLab.delete(tkinter.ALL) + # + # Fill in parameter names + # + for i in range(len(self.parmList)): + parm=self.parmList[i] + yrow=i*(self.rowH)+self.yoff + self.cLab.create_text(5,yrow+3,anchor=tkinter.NW,fill="white", + font=self.cfb,text=parm) + return + #------------------------------------------------------------------ + # scrollBothX - horizontally scrolls timebar and grid canvas - + # unless all of the X-scrollregion is already visible + # + def scrollBothX(self,*args): + sr=self.cGrd.cget('scrollregion').split() + sw=int(sr[2])-int(sr[0]) + wd=self.cGrd.winfo_width() + if wd>=sw: # abort scross/moves if all of xscrollregion already visible + return None + self.cTim.xview(*args) + self.cGrd.xview(*args) + return None + #------------------------------------------------------------------ + # scrollBothY - vertically scrolls parm lables and grid canvas - + # unless all of the Y-scrollregion is already visible + # + def scrollBothY(self,*args): + sr=self.cGrd.cget('scrollregion').split() + sh=int(sr[3])-int(sr[1]) + hg=self.cGrd.winfo_height() + if hg>=sh: # abort scrolls/moves if all of yscrollregion already visible + return None + self.cLab.yview(*args) + self.cGrd.yview(*args) + return None + #------------------------------------------------------------------ + # buttonstart - button 1 is pushed down. Store current location in + # xx,yy and store the starting location in xcstart,ycstart + # setup to call 'scrtest' (to test for auto-scrolling) + # if button is still down in a few milliseconds + # + def buttonstart(self,event): + self.xx=event.x + self.yy=event.y + self.xcstart=self.cGrd.canvasx(self.xx) + self.ycstart=self.cGrd.canvasy(self.yy) + self.screvent=self.cGrd.after(200,self.scrtest) + # + # If any grid boxes are 'selected' now - turn them off and + # set their color back to their original color + # + selItems=self.cGrd.find_withtag("selected") + if selItems is not None: + for item in selItems: + tags=self.cGrd.gettags(item) + for tag in tags: + if tag[0:4]=="col:": + oldcolor=tag[4:] + self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") + self.cGrd.dtag(tkinter.ALL,"selected") + # + return "break" + #------------------------------------------------------------------ + # drag - button is held down while moving. Get new location in xx,yy + # and convert to new canvas location in xcnow,ycnow. Draw + # selection box from xcstart,ycstart to xcnow,ycnow. + # + def drag(self,event): + self.xx=event.x + self.yy=event.y + self.xcnow=self.cGrd.canvasx(self.xx) + self.ycnow=self.cGrd.canvasy(self.yy) + self.cGrd.delete('areasel') + self.cGrd.create_rectangle(self.xcstart,self.ycstart, + self.xcnow,self.ycnow, + outline="cyan",tags='areasel') + # + # Get selected grids, and any item inside the selection box + # + selItems=self.cGrd.find_withtag("selected") + inItems=self.cGrd.find_overlapping(self.xcstart,self.ycstart, + self.xcnow,self.ycnow) + # + # Check for grid items inside the selection box that are NOT + # currently in the selected list. For these - set them to + # selected and set their color to the highlight color + # + if inItems is not None: + for item in inItems: + tags=self.cGrd.gettags(item) + if "grid" in tags: + if item not in selItems: + newtags=list(tags) + newtags.append("selected") + self.cGrd.itemconfigure(item,fill="yellow",outline="yellow",stipple="gray12") + self.cGrd.itemconfigure(item,tags=tuple(newtags)) + # + # Check currently selected items...and if no longer in the + # selection box, then turn their color back to their original color + # + if selItems is not None: + for item in selItems: + if item not in inItems: + tags=self.cGrd.gettags(item) + if "grid" in tags: + for tag in tags: + if tag[0:4]=="col:": + oldcolor=tag[4:] + self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") + self.cGrd.dtag(item,"selected") + # + # Finally check for status of 'set selected' button + # + self.checkSetButton() + self.cGrd.update_idletasks() + return "break" + #------------------------------------------------------------------ + # scrtest - while button is down but not moving - check to see if + # pointer is in the auto-scrolling zone (within scrbuffer + # of edge of canvas) and scroll if so. If we scroll - + # then update the selection box. + # + def scrtest(self): + hg=self.cGrd.winfo_height() + wd=self.cGrd.winfo_width() + scrollflag=0 + if self.xx(wd-self.scrbuffer): + self.scrollBothX('scroll','1','units') + scrollflag=1 + if self.yy(hg-self.scrbuffer): + self.scrollBothY('scroll','1','units') + scrollflag=1 + # + # If we scrolled - update the area that is highlighted + # + if scrollflag==1: + self.xcnow=self.cGrd.canvasx(self.xx) + self.ycnow=self.cGrd.canvasy(self.yy) + self.cGrd.delete('areasel') + self.cGrd.create_rectangle(self.xcstart,self.ycstart, + self.xcnow,self.ycnow, + fill='',outline="cyan",tags='areasel') + self.cGrd.update_idletasks() + # + # Check again for scrolling in a few milliseconds + # + self.screvent=self.cGrd.after(50,self.scrtest) + #------------------------------------------------------------------ + # buttonstop - button 1 is released - save final position in xcnow, + # ycnow. Remove the selection box. + # + def buttonstop(self,event): + if self.screvent is not None: + self.cGrd.after_cancel(self.screvent) + self.screvent=None + self.xx=event.x + self.yy=event.y + self.xcnow=self.cGrd.canvasx(self.xx) + self.ycnow=self.cGrd.canvasy(self.yy) + self.cGrd.delete('areasel') + # + # Get selected grids, and any item inside the selection box + # + selItems=self.cGrd.find_withtag("selected") + inItems=self.cGrd.find_overlapping(self.xcstart,self.ycstart, + self.xcnow,self.ycnow) + # + # Check for grid items inside the selection box that are NOT + # currently in the selected list. For these - set them to + # selected and set their color to the highlight color + # + if inItems is not None: + for item in inItems: + tags=self.cGrd.gettags(item) + if "grid" in tags: + if item not in selItems: + newtags=list(tags) + newtags.append("selected") + self.cGrd.itemconfigure(item,fill="yellow",outline="yellow",stipple="gray12") + self.cGrd.itemconfigure(item,tags=tuple(newtags)) + # + # Check currently selected items...and if no longer in the + # selection box, then turn their color back to their original color + # + if selItems is not None: + for item in selItems: + if item not in inItems: + tags=self.cGrd.gettags(item) + if "grid" in tags: + for tag in tags: + if tag[0:4]=="col:": + oldcolor=tag[4:] + self.cGrd.itemconfigure(item,fill=oldcolor,outline=oldcolor,stipple="") + self.cGrd.dtag(item,"selected") + self.checkSetButton() + self.cGrd.update_idletasks() + return "break" + #------------------------------------------------------------------ + # checkSetButton - check to see if the "Set Forecasters for Selected + # Grids" button can be enabled. There have to be + # some selected grids - AND - there have to be + # some selected forecasters + # + def checkSetButton(self): + someFcstrs=0 + for i in range(len(self.fcstrNums)): + if self.fcbstates[i].get()>0: + someFcstrs=1 + break + # + if someFcstrs==1: + selItems=self.cGrd.find_withtag("selected") + if selItems is not None: + if len(selItems)>0: + self.scb.configure(state=tkinter.NORMAL) + return + # + self.scb.configure(state=tkinter.DISABLED) + return + #------------------------------------------------------------------ + # + # extract forecasters for this grid into the currently selected + # forecasters. + # + def extract(self,event): + curgrid=self.cGrd.find_withtag(tkinter.CURRENT) + grtags=self.cGrd.gettags(curgrid) + for tag in grtags: + if tag.find("rec")>-1: + self.gridRecord=int(tag[4:]) + if tag.find("parm")>-1: + self.gridParm=tag[5:] + self.gridModel=self.ml.getCurrentSelection() + + if self.__VU.checkFile(self.gridParm,self.gridModel,modify=0): + fcstrs=self.__VU.fncFcstr[self.gridRecord,:] + self.fcstrNums=self.__VU.getFcstrNums() + for i in range(len(self.fcstrNums)): + self.fcbstates[i].set(0) + for i in range(fcstrs.shape[0]): + fnum=fcstrs[i] + if fnum>0: + fnumstr="%2.2d"%fnum + if fnumstr in self.fcstrNums: + idx=self.fcstrNums.index(fnumstr) + self.fcbstates[idx].set(1) + self.checkSetButton() +#===================================================================== +# +# Custom comboList widget +# +# User sees currently selected entry from list of entries, and a +# pulldown button. When pulldown is activated the list is shown - +# with scrollbars (if needed) and the user can click on the entry +# desired. The callbackMethod is called when the user chooses an +# entry - and you can get the currentEntry with getCurrentEntry method +# and currentIndex with getCurrentIndex method. +# +class comboList(tkinter.Frame): + def __init__(self,defaultEntry,entryList,parent=None,callbackMethod=None, + width=0,height=5): + if defaultEntry not in entryList: + return + tkinter.Frame.__init__(self,parent) + self.__callbackMethod=callbackMethod + self.entries=[] + for entry in entryList: + self.entries.append(entry) + self.currentIndex=self.entries.index(defaultEntry) + self.currentSelection=defaultEntry + + if width==0: + for entry in self.entries: + width=max(len(entry),width) + width+=1 + # + # Make the popup chooser + # + self.opop=tkinter.Toplevel() + self.opop.withdraw() + of=tkinter.Frame(self.opop) + if len(self.entries)>height: + os=tkinter.Scrollbar(of,orient=tkinter.VERTICAL) + self.ol=tkinter.Listbox(of,width=width,height=height, + yscrollcommand=os.set, + selectmode=tkinter.SINGLE, + exportselection=0) + os.config(command=self.ol.yview) + os.pack(side=tkinter.RIGHT,fill=tkinter.Y) + else: + self.ol=tkinter.Listbox(of,width=width,height=height) + for entry in self.entries: + self.ol.insert(tkinter.END,entry) + self.ol.pack(side=tkinter.LEFT,fill=tkinter.BOTH,expand=1) + of.pack(side=tkinter.TOP) + self.ol.bind("",self.removePopup) + self.opop.transient(parent) + self.opop.overrideredirect(1) + self.opop.update_idletasks() + popwidth=self.opop.winfo_reqwidth() + if (len(self.entries)<=height): + popwidth+=21 + popheight=self.opop.winfo_reqheight() + hpl=popheight/height + # + # Make the display of current entry and pulldown button + # + self.cf=tkinter.Frame(parent,width=popwidth) + self.cl=tkinter.Listbox(self.cf,width=width,height=1, + selectmode=tkinter.SINGLE, + exportselection=0) + self.cl.insert(tkinter.END,defaultEntry) + self.cl.pack(side=tkinter.LEFT) + self.cl.update_idletasks() + cw=self.cl.winfo_reqwidth() + ch=self.cl.winfo_reqheight() + canw=popwidth-cw-6 + canh=ch-6 + bw=2 # border width + cc=tkinter.Canvas(self.cf,width=canw,height=canh, + relief=tkinter.RAISED,bd=bw) + tsize=min(canw,canh) + toffx=((canw-tsize)/2.0)+bw+bw + toffy=((canh-tsize)/2.0)+bw+bw + twid=tsize-bw-bw + x0=toffx + y0=toffy + x1=toffx+twid + y1=toffy + x2=toffx+(twid/2) + y2=toffy+twid + cp=cc.create_polygon(x0,y0,x1,y1,x2,y2,fill="black") + cc.pack(side=tkinter.LEFT) + #self.cf.pack(side=Tkinter.TOP) + self.cl.bind("",self.postPopup) + cc.bind("",self.postPopup) + return + def postPopup(self,event): + curgeom=self.cf.winfo_geometry() + (wh,rest)=curgeom.split("+",1) + (w,h)=wh.split("x",1) + iw=int(w) + ih=int(h) + x=self.cf.winfo_rootx() + y=self.cf.winfo_rooty() + newgeom="+%d+%d"%(x,y+ih) + self.opop.geometry(newgeom) + self.opop.deiconify() + self.ol.select_clear(0,tkinter.END) + self.ol.select_set(self.currentIndex) + self.ol.see(self.currentIndex) + popgeom=self.opop.geometry() + (wh,rest)=popgeom.split("+",1) + (iw,ih)=wh.split("x",1) + self.iw=int(iw) + self.ih=int(ih) + self.opop.grab_set_global() # once you get here - you MUST choose + #self.opop.grab_set() + self.opop.focus_set() + self.opop.bind("",self.closePopup) + self.opop.bind("",self.popClick) + return "break" + #------------------------------------------------------------------ + # popClick - Test if they are clicking in the list - if so - they + # might release inside the list - and that will get + # captured in removePopup. If not - then close the + # popup without choosing + # + def popClick(self,event): + x=event.x + y=event.y + if ((x>self.iw)or(x<1)): + return self.closePopup(event) + if ((y>self.ih)or(y<1)): + return self.closePopup(event) + return "break" + #------------------------------------------------------------------ + # removePopup - called when they choose one in the list + # + def removePopup(self,event): + selectIndex=int(self.ol.nearest(event.y)) + self.currentIndex=selectIndex + newEntry=self.entries[selectIndex] + self.currentSelection=newEntry + self.opop.grab_release() + self.cl.delete(0) + self.cl.insert(0,newEntry) + self.cf.focus_set() + self.opop.withdraw() + self.__callbackMethod() + return + #------------------------------------------------------------------ + # closePopup - called when they dont pick from the list - but + # need to close the popup + # + def closePopup(self,event): + #self.cf.focus_set() + self.opop.grab_release() + self.opop.unbind("") + self.opop.withdraw() + return + #------------------------------------------------------------------ + # getCurrentSelection - tells you the currently selected text + # + def getCurrentSelection(self): + return self.currentSelection + #------------------------------------------------------------------ + # getCurrentIndex - tells you the index of the currently selected + # text + # + def getCurrentIndex(self): + return self.currentIndex + #------------------------------------------------------------------ + # delIndex - delete the specified index from the entries + # + def delIndex(self,index): + if ((index<0)or(index>=len(self.entries))): + return + curSel=self.currentSelection + self.ol.delete(index) + del self.entries[index] + if curSel in self.entries: + self.currentIndex=self.entries.index[curSel] + else: + self.currentIndex=min(self.currentIndex,len(self.entries)-1) + self.currentSelection=self.entries[self.currentIndex] + self.cl.delete(0,tkinter.END) + self.cl.insert(tkinter.END,self.currentSelection) + return + #------------------------------------------------------------------ + # setCurrentIndex - set the selected entry to the specified index + # in entries + # + def setCurrentIndex(self,index): + if ((index<0)or(index>=len(self.entries))): + return + self.currentIndex=index + self.currentSelection=self.entries[index] + self.cl.delete(0,tkinter.END) + self.cl.insert(tkinter.END,self.currentSelection) + return + #------------------------------------------------------------------ + # setCurrentSelection - set the selected entry to the specified + # entry + # + def setCurrentSelection(self,selection): + if selection not in self.entries: + return + index=self.entries.index[selection] + self.currentIndex=index + self.currentSelection=self.entries[index] + self.cl.delete(0,tkinter.END) + self.cl.insert(tkinter.END,self.currentSelection) + return + #------------------------------------------------------------------ + # delValue - delete the specified entry + # + def delValue(self,value): + if value in self.entries: + indexdel=self.entries.index(value) + self.delIndex(indexdel) + return + #------------------------------------------------------------------ + # newEntries - replace all the entries with a new list of entries. + # If the currently selected entry is in the new + # list of entries - then select it in the new list + # otherwise select the first entry + # + def newEntries(self,newList,newDef): + if len(newList)>0: + self.ol.delete(0,tkinter.END) + self.entries=[] + for entry in newList: + self.entries.append(entry) + self.ol.insert(tkinter.END,entry) + if newDef in newList: + self.setCurrentIndex(newList.index(newDef)) + else: + self.setCurrentIndex(0) + return +#===================================================================== +# Create a basic 'message window' indicating that something +# is happening. Must be careful to destroy this...because there +# is no way for the user to destroy this if something goes wrong. +# +def messageWindow(message,parent=None): + if parent is None: + return + pwid=parent.winfo_width() + phgt=parent.winfo_height() + px=parent.winfo_rootx() + py=parent.winfo_rooty() + msgWindow=tkinter.Toplevel(master=parent) + msgWindow.resizable(0,0) + msgWindow.transient() + msgWindow.overrideredirect(1) + msgLab=tkinter.Label(master=msgWindow,text=message, + relief=tkinter.RIDGE,height=5,width=29,borderwidth=4) + msgLab.pack(side=tkinter.TOP) + msgWindow.update_idletasks() + wid=msgWindow.winfo_width() + hgt=msgWindow.winfo_height() + nx=int(px+(pwid/2.0)-(wid/2.0)) + ny=int(py+(phgt/2.0)-(hgt/2.0)) + msgWindow.geometry("%dx%d+%d+%d"%(wid,hgt,nx,ny)) + msgWindow.update_idletasks() + return msgWindow +#===================================================================== +# +# stuff to support a callback with a pre-known variable +# +def GenericCallback(callback, *firstArgs, **firstKWArgs): + if firstKWArgs: + return GC(callback, *firstArgs, **firstKWArgs) + else: + return GCNoKWArgs(callback, *firstArgs) +#===================================================================== +# +# Classes for callbacks +# +class GC: + def __init__(self,callback,*firstArgs, **firstKWArgs): + self.__callback=callback + self.__firstArgs=firstArgs + self.__firstKWArgs=firstKWArgs + def __call__(self, *lastArgs, **kwArgs): + if kwArgs: + netKWArgs=self.__firstKWArgs.copy() + netKWArgs.update(self.__kwArgs) + else: + netKWArgs=self.__firstKWArgs + return self.__callback (*(self.__firstArgs+lastArgs),**netKWArgs) +class GCNoKWArgs: + def __init__(self, callback, *firstArgs): + self.__callback=callback + self.__firstArgs=firstArgs + def __call__(self, *args, **kwArgs): + return self.__callback (*(self.__firstArgs+args),**kwArgs) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckTandTd.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckTandTd.py index 0afe218713..f6516c5941 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckTandTd.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckTandTd.py @@ -1,401 +1,401 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CheckTandTd -# -# Author: Tom LeFebvre -# -# Version Date: 4 January 2006 -# Version: 6.5 -# -# 7/27/2015 yteng Use the time range selected in the Grid Manager if any -# -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Consistency"] - -VariableList = [("Check or Force:" , "Check Only", "radio", - ["Check Only", "Force: TMin<=T<=TMax\n and Td<=T"]), - ] - -import SmartScript -import TimeRange -import AbsTime -from JUtil import JavaWrapperClass -from numpy import * - -MODEL = "Fcst" -LEVEL = "SFC" -DAY_IN_SECS = 24 * 3600 - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - ## - # Get the list of time ranges at the grid whose element name is WEName - # contains grids. The model and level of the weather element are assumed - # to be MODEL and LEVEL, respectively. - # - # @param WEName: Name of a weather element - # @type WEName: string - # @return: time ranges at which WEName has data. - # @rtype: Python list of Python TimeRange objects - def getWEInventory(self, WEName, timeRange=None): - if timeRange is None: - yesterday = self._gmtime() - (2 * DAY_IN_SECS) # two days ago - later = self._gmtime() + 10 * DAY_IN_SECS # 10 days from now - timeRange = TimeRange.TimeRange(yesterday, later) - if isinstance(timeRange, JavaWrapperClass): - timeRange = timeRange.toJavaObj() - parm = self.getParm(MODEL, WEName, LEVEL); - inv = parm.getGridInventory(timeRange) - trList = [] - for gd in inv: - tr = TimeRange.TimeRange(gd.getGridTime()) - trList.append(tr) - - return trList - - ## - # Get time ranges locked by other workstations for the weather element named - # weName. The model for weName is taken from this object's mutableID() method; - # the level is LEVEL. - # @param weName: Name of a weather element. - # @type weName: string - # @return: time ranges locked by others - # @rtype: Python list of TimeRanges; if asJava is True, these are Java - # TimeRanges, otherwise they are Python TimeRanges. - def getLocksByOthers(self, weName): - # returns list of time ranges locked by others for this weather element - parm = self.getParm(self.mutableID(), weName, LEVEL) - if parm is None: - return [] - lt = parm.getLockTable() - jlok = lt.lockedByOther(); - lbo = [] - for i in xrange(jlok.size()): - tr = jlok.get(i) - tr = TimeRange.TimeRange(tr) - lbo.append( tr ) - return lbo - - ## - # Filter trList, returning only the time ranges that overlap timeRange. - # @param timeRange: the time range to test against - # @type timeRange: a Python TimeRange - # @param trList: the list of time ranges to filter - # @type trList: Python list of Python TimeRanges - # @return: The time ranges in trList that overlap timeRange. - # @rtype: a Python list of Python time ranges - def overlappingTRs(self, timeRange, trList): - newTRList = [] - for tr in trList: - if timeRange.overlaps(tr): - newTRList.append(tr) - - return newTRList - - ## - # method so that timeRanges will be sorted earliest to latest - # @param first: The first time range to compare - # @type first: Python TimeRange - # @param last: The second time range to compare - # @type last: Python TimeRange - # @return: -1 if first starts before last, 1 if first starts after last, - # and 0 if first and last start at the same time. - # @rtype: integer - def trSortMethod(self, first, last): - if first.startTime() < last.startTime(): - return -1 - elif first.startTime() == last.startTime(): - return 0 - else: - return 1 - - ## - # Concatenate minTRList and maxTRList and sort by starting times. - # Duplicate time ranges are NOT eliminated. - # @param minTRList: time ranges of the minT grid - # @type minTRList: Python list of Python TimeRange objects. - # @param maxTRList: time ranges of the maxT grid - # @type maxTRList: Python list of Python TimeRange objects. - # @return: The combined and sorted collection. - # @rtype: Python list of Python TimeRange objects - def combineInventoryLists(self, minTRList, maxTRList): - bigList = minTRList + maxTRList - bigList.sort(self.trSortMethod) - return bigList - - ## - # Main entry point of this procedure. If varDict["Check or Force"] is - # "Check Only", temporary grids will be created. Otherwise, the minT, maxT, - # T, and Td grids may be changed. - # @param varDict: Determines whether temporary grids are created or - # temperature grids are modified. - # @type varDict: Python dictionary of strings to strings - def execute(self, timeRange, varDict): - checkOnly = varDict["Check or Force:"] == "Check Only" - - # remove any temporary WEs we created - weList = ["TLessThanMin", "TGreaterThanMax", "TdGreaterThanT", - "MinGreaterThanMax", "MaxLessThanMin"] - for we in weList: - parm = self.getParm(MODEL, we, LEVEL) - if parm is not None: - self.unloadWE(MODEL, we, LEVEL) - - self.setToolType("numeric") - - if timeRange is None or not timeRange.isValid(): - start = self._gmtime() - (2 * DAY_IN_SECS) # two days ago - end = self._gmtime() + (10 * DAY_IN_SECS) # 10 days from now - timeRange = TimeRange.TimeRange(start, end) - - # get all the grids for all elements upfront and update as we modify - # any grids. We need to do this because the GFE caches the original - # version of all grids and there's no way yet to turn this off. - - minTRList = self.getWEInventory("MinT", timeRange) - minTDict = self.getGrids(MODEL, "MinT", LEVEL, minTRList, mode = "First") - - maxTRList = self.getWEInventory("MaxT", timeRange) - maxTDict = self.getGrids(MODEL, "MaxT", LEVEL, maxTRList, mode = "First") - - TTRList = self.getWEInventory("T", timeRange) - tDict = self.getGrids(MODEL, "T", LEVEL, TTRList, mode = "First") - - TdTRList = self.getWEInventory("Td", timeRange) - tdDict = self.getGrids(MODEL, "Td", LEVEL, TdTRList, mode = "First") - - # get the all locks by other users, so we can detect they are locked - # before attempting to modify them - minTLocks = self.getLocksByOthers("MinT") - maxTLocks = self.getLocksByOthers("MaxT") - tLocks = self.getLocksByOthers("T") - tdLocks = self.getLocksByOthers("Td") - - # get the list of edit areas - eaList = self.editAreaList() - - # get the local WFO domain and make a mask with it - # local sites may wish to use a different maks so that a larger area - # is operated on by the tool - for example marine sites may wish to - # expand it to marine zones as well as land. - # To change the area, simply use a locally-defined edit area instead - # of self.getSiteID(). Example: siteID = "CWAPlusMarineZones" - #siteID = self.getSiteID() - this was set in A2 - changed to A1 below - siteID = "ISC_Send_Area" - if siteID in eaList: # make sure the edit area is there - siteEA = self.getEditArea(siteID) # get the edit area - siteMask = self.encodeEditArea(siteEA) # make a mask with siteEA - siteMask = siteMask.astype(bool8) - else: - topo = self.getGridShape() - siteMask = ones(topo, bool8) - print siteID, "edit area not found. Using entire GFE domain." - - # Ensure that MinT <= MaxT first - minMaxList = self.combineInventoryLists(minTRList, maxTRList) - foundProblem = False - for i in xrange(0, len(minMaxList) - 1): - if minMaxList[i+1] in minTRList: # previous max modifies min - maxTR = minMaxList[i] - minTR = minMaxList[i+1] - # Make sure these TRs really exist in the inventory - if maxTR not in maxTRList: - continue - if minTR not in minTRList: - continue - - minGrid = minTDict[minTR] - maxGrid = maxTDict[maxTR] - - mask = (minGrid > maxGrid) & siteMask - if not sometrue(mask): # make sure some points are set - continue - - foundProblem = True - - if checkOnly: - self.createGrid(MODEL, "MinGreaterThanMax", "SCALAR", mask.astype(float32), - minTR, minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force the change - if minTR in minTLocks: - msg = "Can't modify MinT grid at " + str(minTR) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - # calculate and modify the MinT grid - minGrid[mask] = maxGrid[mask] - self.createGrid(MODEL, "MinT", "SCALAR", minGrid, minTR) - minTDict[minTR] = minGrid # update the minT dictionary - - elif minMaxList[i+1] in maxTRList: # previous min modifies max - minTR = minMaxList[i] - maxTR = minMaxList[i+1] - # Make sure these TRs really exist in the inventory - if maxTR not in maxTRList: - continue - if minTR not in minTRList: - continue - maxGrid = maxTDict[maxTR] - minGrid = minTDict[minTR] - - mask = (maxGrid < minGrid) & siteMask - if not sometrue(mask): # make sure some points are set - continue - - foundProblem = True - - if checkOnly: - self.createGrid(MODEL, "MaxLessThanMin", "SCALAR", mask.astype(float32), - maxTR, minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force the change - if maxTR in maxTLocks: - msg = "Can't modify MaxT grid at " + str(maxTR) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - # calculate and modify the MaxT grid - maxGrid[mask] = minGrid[mask] - self.createGrid(MODEL, "MaxT", "SCALAR", maxGrid, maxTR) - # update the minT dictionary with the modified minT grid - maxTDict[maxTR] = maxGrid - - - # Now check for T < MinT - for tr in minTRList: - minTGrid = minTDict[tr] - - tInv = self.overlappingTRs(tr, TTRList) - if tInv == []: # empty list, keep going - continue - - for tymeRng in tInv: - # find points in the siteMask where T < MinT - tGrid = tDict[tymeRng] - tTooLow = (tGrid < minTGrid) & siteMask - if not sometrue(tTooLow): - continue - - foundProblem = True - - if checkOnly: # just make a grid showing the mask where T < MinT - self.createGrid(MODEL, "TLessThanMin", "SCALAR", tTooLow.astype(float32), tymeRng, - minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force T to the MinT value - if tymeRng in tLocks: - msg = "Can't modify T grid at " + str(tymeRng) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - tGrid[tTooLow] = minTGrid[tTooLow] - self.createGrid(MODEL, "T", "SCALAR", tGrid, tymeRng) - tDict[tymeRng] = tGrid # update the tDict - - - # check for T > MaxT - for tr in maxTRList: - # get the grid first - maxTGrid = maxTDict[tr] - - # then warp the end time so we include T grids ending at 01z - startTime = tr.startTime() - endTime = tr.endTime().unixTime() - roundedTime = int((endTime + 43200) / 86400) * 86400 + 3600 - endTime = max(endTime, roundedTime) - endTime = AbsTime.AbsTime(endTime) - timeRange = TimeRange.TimeRange(startTime, endTime) - - # use the warpedTR to fetch the T inventory - tInv = self.overlappingTRs(timeRange, TTRList) - if tInv == []: # empty list, keep going - continue - - for tymeRng in tInv: - # find points in the siteMask where T > MaxT - tGrid = tDict[tymeRng] - tTooHigh = (tGrid > maxTGrid) & siteMask - if not sometrue(tTooHigh): # make sure some points are set - continue - - foundProblem = True - - if checkOnly: # just make a grid - self.createGrid(MODEL, "TGreaterThanMax", "SCALAR", tTooHigh.astype(float32), tymeRng, - minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force T to the MaxT value - if tymeRng in tLocks: - msg = "Can't modify T grid at " + str(tymeRng) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - tGrid[tTooHigh] = maxTGrid[tTooHigh] - self.createGrid(MODEL, "T", "SCALAR", tGrid, tymeRng) - tDict[tymeRng] = tGrid # update the tDict - - # Now check T < Td - for tr in TTRList: - - # make sure there's a matching Td grid - if not tr in TdTRList: - continue - - tGrid = tDict[tr] - tdGrid = tdDict[tr] - - # find points in the siteMask where Td > T - TdTooHigh = (tdGrid > tGrid) & siteMask - if not sometrue(TdTooHigh): # make sure some points are set - continue - - foundProblem = True - - if checkOnly: # just make a grid - self.createGrid(MODEL, "TdGreaterThanT", "SCALAR", TdTooHigh.astype(float32), tr, - minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force Td <= T - if tr in tdLocks: - msg = "Can't modify Td grid at " + str(tInv[i]) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - tdGrid[TdTooHigh] = tGrid[TdTooHigh] - self.createGrid(MODEL, "Td", "SCALAR", tdGrid, tr) - tdDict[tr] = tdGrid # update the tdDict - - - if not foundProblem: - msg = "CheckTandTd found no inconsistencies." - self.statusBarMsg(msg, "R") - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CheckTandTd +# +# Author: Tom LeFebvre +# +# Version Date: 4 January 2006 +# Version: 6.5 +# +# 7/27/2015 yteng Use the time range selected in the Grid Manager if any +# +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Consistency"] + +VariableList = [("Check or Force:" , "Check Only", "radio", + ["Check Only", "Force: TMin<=T<=TMax\n and Td<=T"]), + ] + +import SmartScript +import TimeRange +import AbsTime +from JUtil import JavaWrapperClass +from numpy import * + +MODEL = "Fcst" +LEVEL = "SFC" +DAY_IN_SECS = 24 * 3600 + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + ## + # Get the list of time ranges at the grid whose element name is WEName + # contains grids. The model and level of the weather element are assumed + # to be MODEL and LEVEL, respectively. + # + # @param WEName: Name of a weather element + # @type WEName: string + # @return: time ranges at which WEName has data. + # @rtype: Python list of Python TimeRange objects + def getWEInventory(self, WEName, timeRange=None): + if timeRange is None: + yesterday = self._gmtime() - (2 * DAY_IN_SECS) # two days ago + later = self._gmtime() + 10 * DAY_IN_SECS # 10 days from now + timeRange = TimeRange.TimeRange(yesterday, later) + if isinstance(timeRange, JavaWrapperClass): + timeRange = timeRange.toJavaObj() + parm = self.getParm(MODEL, WEName, LEVEL); + inv = parm.getGridInventory(timeRange) + trList = [] + for gd in inv: + tr = TimeRange.TimeRange(gd.getGridTime()) + trList.append(tr) + + return trList + + ## + # Get time ranges locked by other workstations for the weather element named + # weName. The model for weName is taken from this object's mutableID() method; + # the level is LEVEL. + # @param weName: Name of a weather element. + # @type weName: string + # @return: time ranges locked by others + # @rtype: Python list of TimeRanges; if asJava is True, these are Java + # TimeRanges, otherwise they are Python TimeRanges. + def getLocksByOthers(self, weName): + # returns list of time ranges locked by others for this weather element + parm = self.getParm(self.mutableID(), weName, LEVEL) + if parm is None: + return [] + lt = parm.getLockTable() + jlok = lt.lockedByOther(); + lbo = [] + for i in range(jlok.size()): + tr = jlok.get(i) + tr = TimeRange.TimeRange(tr) + lbo.append( tr ) + return lbo + + ## + # Filter trList, returning only the time ranges that overlap timeRange. + # @param timeRange: the time range to test against + # @type timeRange: a Python TimeRange + # @param trList: the list of time ranges to filter + # @type trList: Python list of Python TimeRanges + # @return: The time ranges in trList that overlap timeRange. + # @rtype: a Python list of Python time ranges + def overlappingTRs(self, timeRange, trList): + newTRList = [] + for tr in trList: + if timeRange.overlaps(tr): + newTRList.append(tr) + + return newTRList + + ## + # method so that timeRanges will be sorted earliest to latest + # @param first: The first time range to compare + # @type first: Python TimeRange + # @param last: The second time range to compare + # @type last: Python TimeRange + # @return: -1 if first starts before last, 1 if first starts after last, + # and 0 if first and last start at the same time. + # @rtype: integer + def trSortMethod(self, first, last): + if first.startTime() < last.startTime(): + return -1 + elif first.startTime() == last.startTime(): + return 0 + else: + return 1 + + ## + # Concatenate minTRList and maxTRList and sort by starting times. + # Duplicate time ranges are NOT eliminated. + # @param minTRList: time ranges of the minT grid + # @type minTRList: Python list of Python TimeRange objects. + # @param maxTRList: time ranges of the maxT grid + # @type maxTRList: Python list of Python TimeRange objects. + # @return: The combined and sorted collection. + # @rtype: Python list of Python TimeRange objects + def combineInventoryLists(self, minTRList, maxTRList): + bigList = minTRList + maxTRList + bigList.sort(self.trSortMethod) + return bigList + + ## + # Main entry point of this procedure. If varDict["Check or Force"] is + # "Check Only", temporary grids will be created. Otherwise, the minT, maxT, + # T, and Td grids may be changed. + # @param varDict: Determines whether temporary grids are created or + # temperature grids are modified. + # @type varDict: Python dictionary of strings to strings + def execute(self, timeRange, varDict): + checkOnly = varDict["Check or Force:"] == "Check Only" + + # remove any temporary WEs we created + weList = ["TLessThanMin", "TGreaterThanMax", "TdGreaterThanT", + "MinGreaterThanMax", "MaxLessThanMin"] + for we in weList: + parm = self.getParm(MODEL, we, LEVEL) + if parm is not None: + self.unloadWE(MODEL, we, LEVEL) + + self.setToolType("numeric") + + if timeRange is None or not timeRange.isValid(): + start = self._gmtime() - (2 * DAY_IN_SECS) # two days ago + end = self._gmtime() + (10 * DAY_IN_SECS) # 10 days from now + timeRange = TimeRange.TimeRange(start, end) + + # get all the grids for all elements upfront and update as we modify + # any grids. We need to do this because the GFE caches the original + # version of all grids and there's no way yet to turn this off. + + minTRList = self.getWEInventory("MinT", timeRange) + minTDict = self.getGrids(MODEL, "MinT", LEVEL, minTRList, mode = "First") + + maxTRList = self.getWEInventory("MaxT", timeRange) + maxTDict = self.getGrids(MODEL, "MaxT", LEVEL, maxTRList, mode = "First") + + TTRList = self.getWEInventory("T", timeRange) + tDict = self.getGrids(MODEL, "T", LEVEL, TTRList, mode = "First") + + TdTRList = self.getWEInventory("Td", timeRange) + tdDict = self.getGrids(MODEL, "Td", LEVEL, TdTRList, mode = "First") + + # get the all locks by other users, so we can detect they are locked + # before attempting to modify them + minTLocks = self.getLocksByOthers("MinT") + maxTLocks = self.getLocksByOthers("MaxT") + tLocks = self.getLocksByOthers("T") + tdLocks = self.getLocksByOthers("Td") + + # get the list of edit areas + eaList = self.editAreaList() + + # get the local WFO domain and make a mask with it + # local sites may wish to use a different maks so that a larger area + # is operated on by the tool - for example marine sites may wish to + # expand it to marine zones as well as land. + # To change the area, simply use a locally-defined edit area instead + # of self.getSiteID(). Example: siteID = "CWAPlusMarineZones" + #siteID = self.getSiteID() - this was set in A2 - changed to A1 below + siteID = "ISC_Send_Area" + if siteID in eaList: # make sure the edit area is there + siteEA = self.getEditArea(siteID) # get the edit area + siteMask = self.encodeEditArea(siteEA) # make a mask with siteEA + siteMask = siteMask.astype(bool8) + else: + topo = self.getGridShape() + siteMask = ones(topo, bool8) + print(siteID, "edit area not found. Using entire GFE domain.") + + # Ensure that MinT <= MaxT first + minMaxList = self.combineInventoryLists(minTRList, maxTRList) + foundProblem = False + for i in range(0, len(minMaxList) - 1): + if minMaxList[i+1] in minTRList: # previous max modifies min + maxTR = minMaxList[i] + minTR = minMaxList[i+1] + # Make sure these TRs really exist in the inventory + if maxTR not in maxTRList: + continue + if minTR not in minTRList: + continue + + minGrid = minTDict[minTR] + maxGrid = maxTDict[maxTR] + + mask = (minGrid > maxGrid) & siteMask + if not sometrue(mask): # make sure some points are set + continue + + foundProblem = True + + if checkOnly: + self.createGrid(MODEL, "MinGreaterThanMax", "SCALAR", mask.astype(float32), + minTR, minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force the change + if minTR in minTLocks: + msg = "Can't modify MinT grid at " + str(minTR) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + # calculate and modify the MinT grid + minGrid[mask] = maxGrid[mask] + self.createGrid(MODEL, "MinT", "SCALAR", minGrid, minTR) + minTDict[minTR] = minGrid # update the minT dictionary + + elif minMaxList[i+1] in maxTRList: # previous min modifies max + minTR = minMaxList[i] + maxTR = minMaxList[i+1] + # Make sure these TRs really exist in the inventory + if maxTR not in maxTRList: + continue + if minTR not in minTRList: + continue + maxGrid = maxTDict[maxTR] + minGrid = minTDict[minTR] + + mask = (maxGrid < minGrid) & siteMask + if not sometrue(mask): # make sure some points are set + continue + + foundProblem = True + + if checkOnly: + self.createGrid(MODEL, "MaxLessThanMin", "SCALAR", mask.astype(float32), + maxTR, minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force the change + if maxTR in maxTLocks: + msg = "Can't modify MaxT grid at " + str(maxTR) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + # calculate and modify the MaxT grid + maxGrid[mask] = minGrid[mask] + self.createGrid(MODEL, "MaxT", "SCALAR", maxGrid, maxTR) + # update the minT dictionary with the modified minT grid + maxTDict[maxTR] = maxGrid + + + # Now check for T < MinT + for tr in minTRList: + minTGrid = minTDict[tr] + + tInv = self.overlappingTRs(tr, TTRList) + if tInv == []: # empty list, keep going + continue + + for tymeRng in tInv: + # find points in the siteMask where T < MinT + tGrid = tDict[tymeRng] + tTooLow = (tGrid < minTGrid) & siteMask + if not sometrue(tTooLow): + continue + + foundProblem = True + + if checkOnly: # just make a grid showing the mask where T < MinT + self.createGrid(MODEL, "TLessThanMin", "SCALAR", tTooLow.astype(float32), tymeRng, + minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force T to the MinT value + if tymeRng in tLocks: + msg = "Can't modify T grid at " + str(tymeRng) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + tGrid[tTooLow] = minTGrid[tTooLow] + self.createGrid(MODEL, "T", "SCALAR", tGrid, tymeRng) + tDict[tymeRng] = tGrid # update the tDict + + + # check for T > MaxT + for tr in maxTRList: + # get the grid first + maxTGrid = maxTDict[tr] + + # then warp the end time so we include T grids ending at 01z + startTime = tr.startTime() + endTime = tr.endTime().unixTime() + roundedTime = int((endTime + 43200) / 86400) * 86400 + 3600 + endTime = max(endTime, roundedTime) + endTime = AbsTime.AbsTime(endTime) + timeRange = TimeRange.TimeRange(startTime, endTime) + + # use the warpedTR to fetch the T inventory + tInv = self.overlappingTRs(timeRange, TTRList) + if tInv == []: # empty list, keep going + continue + + for tymeRng in tInv: + # find points in the siteMask where T > MaxT + tGrid = tDict[tymeRng] + tTooHigh = (tGrid > maxTGrid) & siteMask + if not sometrue(tTooHigh): # make sure some points are set + continue + + foundProblem = True + + if checkOnly: # just make a grid + self.createGrid(MODEL, "TGreaterThanMax", "SCALAR", tTooHigh.astype(float32), tymeRng, + minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force T to the MaxT value + if tymeRng in tLocks: + msg = "Can't modify T grid at " + str(tymeRng) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + tGrid[tTooHigh] = maxTGrid[tTooHigh] + self.createGrid(MODEL, "T", "SCALAR", tGrid, tymeRng) + tDict[tymeRng] = tGrid # update the tDict + + # Now check T < Td + for tr in TTRList: + + # make sure there's a matching Td grid + if not tr in TdTRList: + continue + + tGrid = tDict[tr] + tdGrid = tdDict[tr] + + # find points in the siteMask where Td > T + TdTooHigh = (tdGrid > tGrid) & siteMask + if not sometrue(TdTooHigh): # make sure some points are set + continue + + foundProblem = True + + if checkOnly: # just make a grid + self.createGrid(MODEL, "TdGreaterThanT", "SCALAR", TdTooHigh.astype(float32), tr, + minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force Td <= T + if tr in tdLocks: + msg = "Can't modify Td grid at " + str(tInv[i]) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + tdGrid[TdTooHigh] = tGrid[TdTooHigh] + self.createGrid(MODEL, "Td", "SCALAR", tdGrid, tr) + tdDict[tr] = tdGrid # update the tdDict + + + if not foundProblem: + msg = "CheckTandTd found no inconsistencies." + self.statusBarMsg(msg, "R") + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckWindGust.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckWindGust.py index 9a32212feb..2da3595ac3 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckWindGust.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CheckWindGust.py @@ -1,238 +1,238 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CheckWindGust -# -# Author: Tom Mazza, based on Tom LeFebvre's CheckTandTd -# -# Version Date: 6 Oct 2006 -# Version: 1.0 -# -# Modified by Tom Mazza 6 Jun 2005 to use local ISC_Send_Area -# and to redo RH, and, if loaded in GE at the ttime, HeatIndex and WindChill, -# on "Force: TMin<=T<=TMax\n and Td<=T" option anytime T and / or Td are -# changed (change on Td only does not affect WindChill). -# -# 7/27/2015 yteng Use the time range selected in the Grid Manager if any, -# and retrived teh necessary grids for improved efficiency -# -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Consistency"] - -VariableList = [("Check or Force:" , "Check Only", "radio", - ["Check Only", "Force: WindGust>=Wind"])] - -import SmartScript -import time -import TimeRange -import AbsTime -from JUtil import JavaWrapperClass -from numpy import * - -MODEL = "Fcst" -LEVEL = "SFC" -DAY_IN_SECS = 24 * 3600 - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - - def getWEInventory(self, WEName, timeRange=None): - if timeRange is None: - yesterday = self._gmtime() - (2 * DAY_IN_SECS) # two days ago - later = self._gmtime() + 10 * DAY_IN_SECS # 10 days from now - timeRange = TimeRange.TimeRange(yesterday, later) - if isinstance(timeRange, JavaWrapperClass): - timeRange = timeRange.toJavaObj() - parm = self.getParm(MODEL, WEName, LEVEL); - inv = parm.getGridInventory(timeRange) - trList = [] - for gd in inv: - tr = TimeRange.TimeRange(gd.getGridTime()) - trList.append(tr) - - return trList - - - def getLocksByOthers(self, weName): - # returns list of time ranges locked by others for this weather element - parm = self.getParm(self.mutableID(), weName, LEVEL) - if parm is None: - return [] - lt = parm.getLockTable() - jlok = lt.lockedByOther() - lbo = [] - for i in xrange(jlok.size()): - tr = jlok.get(i) - tr = TimeRange.TimeRange(tr) - lbo.append( tr ) - return lbo - - def overlappingTRs(self, timeRange, trList): - newTRList = [] - for tr in trList: - if timeRange.overlaps(tr): - newTRList.append(tr) - - return newTRList - - # method so that timeRanges will be sorted earliest to latest - def trSortMethod(self, first, last): - if first.startTime() < last.startTime(): - return -1 - elif first.startTime() == last.startTime(): - return 0 - else: - return 1 - - def combineInventoryLists(self, minTRList, maxTRList): - bigList = minTRList + maxTRList - bigList.sort(self.trSortMethod) - return bigList - - def execute(self, timeRange, varDict): - checkOnly = varDict["Check or Force:"] == "Check Only" - startWindChill = 10 ## First month to report wind chill - endWindChill = 4 ## Last month to report wind chill - - # Get local edit area simply by using the baseline edit area - eaList = self.editAreaList() - siteID = self.getSiteID() - - if siteID in eaList: # make sure the edit area is there - siteEA = self.getEditArea(siteID) # get the edit area - siteMask = self.encodeEditArea(siteEA) # make a mask with siteEA - siteMask = siteMask.astype(bool8) - else: - topo = self.getGridShape() - siteMask = ones(topo, bool8) - print siteID, "edit area not found. Using entire GFE domain." - - errorsFound = "no" # To keep track of any errors found for - # status bar message at the end. - - # remove any temporary WEs we created - weList = ["WindGustLessThanWindSpeed"] - for we in weList: - parm = self.getParm(MODEL, we, LEVEL) - if parm is not None: - self.unloadWE(MODEL, we, LEVEL) - - self.setToolType("numeric") - - # if no timeRange selected then make a big timeRange - if timeRange is None or not timeRange.isValid(): - start = self._gmtime() - (2 * DAY_IN_SECS) # two days ago - end = self._gmtime() + (10 * DAY_IN_SECS) # 10 days from now - timeRange = TimeRange.TimeRange(start, end) - - # get all the grids for all elements upfront and update as we modify - # any grids. We need to do this because the GFE caches the original - # version of all grids and there's no way yet to turn this off. - - WindDirDict = {} - WindSpeedDict = {} - WindGustDict = {} - - WindTRList = self.getWEInventory("Wind", timeRange) - for tr in WindTRList: - grid = self.getGrids(MODEL, "Wind", LEVEL, tr, mode = "First") - WindDirDict[tr] = grid[1] - WindSpeedDict[tr] = grid[0] - - WindGustTRList = self.getWEInventory("WindGust", timeRange) - WindGustDict = self.getGrids(MODEL, "WindGust", LEVEL, WindGustTRList, mode = "First") - - # get the all locks by other users, so we can detect they are locked - # before attempting to modify them - WindLocks = self.getLocksByOthers("Wind") - WindGustLocks = self.getLocksByOthers("WindGust") - - nowZ = time.gmtime(time.time()) - curMon = nowZ[1] - - WindChangeTools = [] - if curMon >= startWindChill or curMon <= endWindChill: - WindChangeTools.append(("WindChillTool", "WindChill")) - - WindGustOnlyChangeTools = [] - databaseID = self.findDatabase(MODEL) - - # Now check WindGust >= WindSpeed # was T < Td - for tr in WindTRList: - - # make sure there's a matching WindGust grid - if not tr in WindGustTRList: - continue - - WindSpeedGrid = WindSpeedDict[tr] - WindGustGrid = WindGustDict[tr] - - # find points in the siteMask where WindGust < Wind - mask = (WindGustGrid < WindSpeedGrid) & siteMask - if not sometrue(mask): # make sure some points are set - continue - - errorsFound = "yes" - if checkOnly: # just make a grid - self.createGrid(MODEL, "WindGustLessThanWindSpeed", "SCALAR", mask.astype(float32), tr, minAllowedValue=0.0, maxAllowedValue= 1.0) - else: # force WindGust >= WindSpeed - if tr in WindGustLocks: - msg = "Can't modify WindGust grid at " + str(tInv[i]) + \ - " locked by another user." - self.statusBarMsg(msg, "S") - continue - editArea = self.decodeEditArea(mask.astype(float32)) - WindGustGrid = where(mask, WindSpeedGrid, WindGustGrid) - self.createGrid(MODEL, "WindGust", "SCALAR", WindGustGrid, tr) - WindGustDict[tr] = WindGustGrid # update the tdDict - for toolName, elementName in WindChangeTools: - parm = (elementName, LEVEL, databaseID) - if (toolName in WindGustOnlyChangeTools) and (parm in self.loadedParms()): - gridInfo = self.getGridInfo(MODEL, elementName, LEVEL, tr) - if gridInfo == []: - self.createFromScratchCmd([elementName], tr, - repeat=1, duration=1) - error = self.callSmartTool(toolName, elementName, - editArea, tr) - if error is not None: - break - - - # Send a message to the status bar - if errorsFound == "yes": - self.statusBarMsg('CheckWindGust completed - One or more Flags on the play.', 'R') - else: - self.statusBarMsg('CheckWindGust completed - No Flags!', 'R') +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CheckWindGust +# +# Author: Tom Mazza, based on Tom LeFebvre's CheckTandTd +# +# Version Date: 6 Oct 2006 +# Version: 1.0 +# +# Modified by Tom Mazza 6 Jun 2005 to use local ISC_Send_Area +# and to redo RH, and, if loaded in GE at the ttime, HeatIndex and WindChill, +# on "Force: TMin<=T<=TMax\n and Td<=T" option anytime T and / or Td are +# changed (change on Td only does not affect WindChill). +# +# 7/27/2015 yteng Use the time range selected in the Grid Manager if any, +# and retrived teh necessary grids for improved efficiency +# +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Consistency"] + +VariableList = [("Check or Force:" , "Check Only", "radio", + ["Check Only", "Force: WindGust>=Wind"])] + +import SmartScript +import time +import TimeRange +import AbsTime +from JUtil import JavaWrapperClass +from numpy import * + +MODEL = "Fcst" +LEVEL = "SFC" +DAY_IN_SECS = 24 * 3600 + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + + def getWEInventory(self, WEName, timeRange=None): + if timeRange is None: + yesterday = self._gmtime() - (2 * DAY_IN_SECS) # two days ago + later = self._gmtime() + 10 * DAY_IN_SECS # 10 days from now + timeRange = TimeRange.TimeRange(yesterday, later) + if isinstance(timeRange, JavaWrapperClass): + timeRange = timeRange.toJavaObj() + parm = self.getParm(MODEL, WEName, LEVEL); + inv = parm.getGridInventory(timeRange) + trList = [] + for gd in inv: + tr = TimeRange.TimeRange(gd.getGridTime()) + trList.append(tr) + + return trList + + + def getLocksByOthers(self, weName): + # returns list of time ranges locked by others for this weather element + parm = self.getParm(self.mutableID(), weName, LEVEL) + if parm is None: + return [] + lt = parm.getLockTable() + jlok = lt.lockedByOther() + lbo = [] + for i in range(jlok.size()): + tr = jlok.get(i) + tr = TimeRange.TimeRange(tr) + lbo.append( tr ) + return lbo + + def overlappingTRs(self, timeRange, trList): + newTRList = [] + for tr in trList: + if timeRange.overlaps(tr): + newTRList.append(tr) + + return newTRList + + # method so that timeRanges will be sorted earliest to latest + def trSortMethod(self, first, last): + if first.startTime() < last.startTime(): + return -1 + elif first.startTime() == last.startTime(): + return 0 + else: + return 1 + + def combineInventoryLists(self, minTRList, maxTRList): + bigList = minTRList + maxTRList + bigList.sort(self.trSortMethod) + return bigList + + def execute(self, timeRange, varDict): + checkOnly = varDict["Check or Force:"] == "Check Only" + startWindChill = 10 ## First month to report wind chill + endWindChill = 4 ## Last month to report wind chill + + # Get local edit area simply by using the baseline edit area + eaList = self.editAreaList() + siteID = self.getSiteID() + + if siteID in eaList: # make sure the edit area is there + siteEA = self.getEditArea(siteID) # get the edit area + siteMask = self.encodeEditArea(siteEA) # make a mask with siteEA + siteMask = siteMask.astype(bool8) + else: + topo = self.getGridShape() + siteMask = ones(topo, bool8) + print(siteID, "edit area not found. Using entire GFE domain.") + + errorsFound = "no" # To keep track of any errors found for + # status bar message at the end. + + # remove any temporary WEs we created + weList = ["WindGustLessThanWindSpeed"] + for we in weList: + parm = self.getParm(MODEL, we, LEVEL) + if parm is not None: + self.unloadWE(MODEL, we, LEVEL) + + self.setToolType("numeric") + + # if no timeRange selected then make a big timeRange + if timeRange is None or not timeRange.isValid(): + start = self._gmtime() - (2 * DAY_IN_SECS) # two days ago + end = self._gmtime() + (10 * DAY_IN_SECS) # 10 days from now + timeRange = TimeRange.TimeRange(start, end) + + # get all the grids for all elements upfront and update as we modify + # any grids. We need to do this because the GFE caches the original + # version of all grids and there's no way yet to turn this off. + + WindDirDict = {} + WindSpeedDict = {} + WindGustDict = {} + + WindTRList = self.getWEInventory("Wind", timeRange) + for tr in WindTRList: + grid = self.getGrids(MODEL, "Wind", LEVEL, tr, mode = "First") + WindDirDict[tr] = grid[1] + WindSpeedDict[tr] = grid[0] + + WindGustTRList = self.getWEInventory("WindGust", timeRange) + WindGustDict = self.getGrids(MODEL, "WindGust", LEVEL, WindGustTRList, mode = "First") + + # get the all locks by other users, so we can detect they are locked + # before attempting to modify them + WindLocks = self.getLocksByOthers("Wind") + WindGustLocks = self.getLocksByOthers("WindGust") + + nowZ = time.gmtime(time.time()) + curMon = nowZ[1] + + WindChangeTools = [] + if curMon >= startWindChill or curMon <= endWindChill: + WindChangeTools.append(("WindChillTool", "WindChill")) + + WindGustOnlyChangeTools = [] + databaseID = self.findDatabase(MODEL) + + # Now check WindGust >= WindSpeed # was T < Td + for tr in WindTRList: + + # make sure there's a matching WindGust grid + if not tr in WindGustTRList: + continue + + WindSpeedGrid = WindSpeedDict[tr] + WindGustGrid = WindGustDict[tr] + + # find points in the siteMask where WindGust < Wind + mask = (WindGustGrid < WindSpeedGrid) & siteMask + if not sometrue(mask): # make sure some points are set + continue + + errorsFound = "yes" + if checkOnly: # just make a grid + self.createGrid(MODEL, "WindGustLessThanWindSpeed", "SCALAR", mask.astype(float32), tr, minAllowedValue=0.0, maxAllowedValue= 1.0) + else: # force WindGust >= WindSpeed + if tr in WindGustLocks: + msg = "Can't modify WindGust grid at " + str(tInv[i]) + \ + " locked by another user." + self.statusBarMsg(msg, "S") + continue + editArea = self.decodeEditArea(mask.astype(float32)) + WindGustGrid = where(mask, WindSpeedGrid, WindGustGrid) + self.createGrid(MODEL, "WindGust", "SCALAR", WindGustGrid, tr) + WindGustDict[tr] = WindGustGrid # update the tdDict + for toolName, elementName in WindChangeTools: + parm = (elementName, LEVEL, databaseID) + if (toolName in WindGustOnlyChangeTools) and (parm in self.loadedParms()): + gridInfo = self.getGridInfo(MODEL, elementName, LEVEL, tr) + if gridInfo == []: + self.createFromScratchCmd([elementName], tr, + repeat=1, duration=1) + error = self.callSmartTool(toolName, elementName, + editArea, tr) + if error is not None: + break + + + # Send a message to the status bar + if errorsFound == "yes": + self.statusBarMsg('CheckWindGust completed - One or more Flags on the play.', 'R') + else: + self.statusBarMsg('CheckWindGust completed - No Flags!', 'R') diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CompletePopulate.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CompletePopulate.py index 2f0e004481..5565fba80f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CompletePopulate.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CompletePopulate.py @@ -1,145 +1,145 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CompletePopulate -# -# Author: -# ---------------------------------------------------------------------------- - -MenuItems = ["Populate"] - -import SmartScript - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - """Copy from models. """ - - elements = ["T", "Td", "Wind", "Sky", "Wx", "PoP", "LAL", - "QPF", "LAL", "SnowAmt", "MaxT", "MinT", - "TransWind", "CWR", "SnowLevel", "FzLevel", - "Haines", "MixHgt"] - # - print "Copying grids from models" - dbID = self.findDatabase("RAP13") - tr = self.createTimeRange(0, 168, "Database", dbID) - self.copyCmd(elements, dbID, tr) - # - dbID = self.findDatabase("GFS80") - tr = self.createTimeRange(0, 168, "Database", dbID) - self.copyCmd(elements, dbID, tr) - # - dbID = self.findDatabase("NAM12") - tr = self.createTimeRange(0, 60, "Database", dbID) - self.copyCmd(elements, dbID, tr) - #Copy - dbID = self.findDatabase("RAP40") - tr = self.createTimeRange(0, 12, "Database", dbID) - self.copyCmd(elements, dbID, tr) - - # Create the rest of the data from scratch - timeRange = self.createTimeRange(-24, 168, "LT") - - print "Creating grids from scratch" - elements1 = ['RH', 'WaveHeight', 'WindWaveHgt', 'Swell', - 'Swell2','Period', 'Period2', 'WindGust'] - for element in elements1: - self.createFromScratchCmd([element], timeRange, 6, 6) - - elements2 = ['HrsOfSun', 'InvBurnOffTemp', 'MaxRH', 'MinRH', - 'RHtrend', 'Ttrend', 'Wetflag', 'WindChill', 'HeatIndex'] - for element in elements2: - self.createFromScratchCmd([element], timeRange, 0, 0) - - # Create a Hazards grid - hazKeys = [] - value = self.getIndex("CF.Y^FF.A^HF.W^HI.A^FW.W^TO.A:1234^BZ.W", hazKeys) - grid = self.newGrid(value, dtype=int8) - todayTR = self.getTimeRange("Today") - self.createGrid("Fcst", "Hazards", "DISCRETE", (grid, hazKeys), todayTR) - - print "Interpolating" - # Interpolate - interpElements = ['T', 'Wind', 'Sky', 'TransWind'] - self.interpolateCmd(interpElements, timeRange, "GAPS","SYNC", 3) - - # Make data for the other elements - print "Calling WindGustTool" - self.callSmartTool("WindGustTool", "WindGust", editArea, timeRange, - missingDataMode="Create") - print "Calling WindChillTool" - self.callSmartTool("WindChillTool", "WindChill", editArea, timeRange, - missingDataMode="Create") - print "Calling HeatIndexTool" - self.callSmartTool("HeatIndexTool", "HeatIndex", editArea, timeRange, - missingDataMode="Create") - print "Calling RHTool" - self.callSmartTool("RHTool", "RH", editArea, timeRange, - missingDataMode="Create") - print "Calling MinRHTool" - self.callSmartTool("MinRHTool", "MinRH", editArea, timeRange, - missingDataMode="Create") - print "Calling MaxRHTool" - self.callSmartTool("MaxRHTool", "MaxRH", editArea, timeRange, - missingDataMode="Create") - print "Calling TtrendTool" - self.callSmartTool("TtrendTool", "Ttrend", editArea, timeRange, - missingDataMode="Create") - print "Calling RHtrendTool" - self.callSmartTool("RHtrendTool", "RHtrend", editArea, timeRange, - missingDataMode="Create") - print "Calling WetflagTool" - self.callSmartTool("WetflagTool", "Wetflag", editArea, timeRange, - missingDataMode="Create") - print "Calling HrsOfSunTool" - self.callSmartTool("HrsOfSunTool", "HrsOfSun", editArea, timeRange, - missingDataMode="Create") - print "Calling InvBurnOffTempTool" - self.callSmartTool("InvBurnOffTempTool", "InvBurnOffTemp", editArea, timeRange, - missingDataMode="Create") - print "Calling SwellTool" - self.callSmartTool("SwellTool", "Swell", editArea, timeRange, - missingDataMode="Create") - print "Calling Swell2Tool" - self.callSmartTool("Swell2Tool", "Swell2", editArea, timeRange, - missingDataMode="Create") - print "Calling PeriodTool" - self.callSmartTool("PeriodTool", "Period", editArea, timeRange, - missingDataMode="Create") - print "Calling Period2Tool" - self.callSmartTool("Period2Tool", "Period2", editArea, timeRange, - missingDataMode="Create") - print "Calling WindWaveHgtTool" - self.callSmartTool("WindWaveHgtTool", "WindWaveHgt", editArea, timeRange, - missingDataMode="Create") - print "Calling WaveHeightTool" - self.callSmartTool("WaveHeightTool", "WaveHeight", editArea, timeRange, - missingDataMode="Create") - print "CompletePopulate Done" - - - - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CompletePopulate +# +# Author: +# ---------------------------------------------------------------------------- + +MenuItems = ["Populate"] + +import SmartScript + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + """Copy from models. """ + + elements = ["T", "Td", "Wind", "Sky", "Wx", "PoP", "LAL", + "QPF", "LAL", "SnowAmt", "MaxT", "MinT", + "TransWind", "CWR", "SnowLevel", "FzLevel", + "Haines", "MixHgt"] + # + print("Copying grids from models") + dbID = self.findDatabase("RAP13") + tr = self.createTimeRange(0, 168, "Database", dbID) + self.copyCmd(elements, dbID, tr) + # + dbID = self.findDatabase("GFS80") + tr = self.createTimeRange(0, 168, "Database", dbID) + self.copyCmd(elements, dbID, tr) + # + dbID = self.findDatabase("NAM12") + tr = self.createTimeRange(0, 60, "Database", dbID) + self.copyCmd(elements, dbID, tr) + #Copy + dbID = self.findDatabase("RAP40") + tr = self.createTimeRange(0, 12, "Database", dbID) + self.copyCmd(elements, dbID, tr) + + # Create the rest of the data from scratch + timeRange = self.createTimeRange(-24, 168, "LT") + + print("Creating grids from scratch") + elements1 = ['RH', 'WaveHeight', 'WindWaveHgt', 'Swell', + 'Swell2','Period', 'Period2', 'WindGust'] + for element in elements1: + self.createFromScratchCmd([element], timeRange, 6, 6) + + elements2 = ['HrsOfSun', 'InvBurnOffTemp', 'MaxRH', 'MinRH', + 'RHtrend', 'Ttrend', 'Wetflag', 'WindChill', 'HeatIndex'] + for element in elements2: + self.createFromScratchCmd([element], timeRange, 0, 0) + + # Create a Hazards grid + hazKeys = [] + value = self.getIndex("CF.Y^FF.A^HF.W^HI.A^FW.W^TO.A:1234^BZ.W", hazKeys) + grid = self.newGrid(value, dtype=int8) + todayTR = self.getTimeRange("Today") + self.createGrid("Fcst", "Hazards", "DISCRETE", (grid, hazKeys), todayTR) + + print("Interpolating") + # Interpolate + interpElements = ['T', 'Wind', 'Sky', 'TransWind'] + self.interpolateCmd(interpElements, timeRange, "GAPS","SYNC", 3) + + # Make data for the other elements + print("Calling WindGustTool") + self.callSmartTool("WindGustTool", "WindGust", editArea, timeRange, + missingDataMode="Create") + print("Calling WindChillTool") + self.callSmartTool("WindChillTool", "WindChill", editArea, timeRange, + missingDataMode="Create") + print("Calling HeatIndexTool") + self.callSmartTool("HeatIndexTool", "HeatIndex", editArea, timeRange, + missingDataMode="Create") + print("Calling RHTool") + self.callSmartTool("RHTool", "RH", editArea, timeRange, + missingDataMode="Create") + print("Calling MinRHTool") + self.callSmartTool("MinRHTool", "MinRH", editArea, timeRange, + missingDataMode="Create") + print("Calling MaxRHTool") + self.callSmartTool("MaxRHTool", "MaxRH", editArea, timeRange, + missingDataMode="Create") + print("Calling TtrendTool") + self.callSmartTool("TtrendTool", "Ttrend", editArea, timeRange, + missingDataMode="Create") + print("Calling RHtrendTool") + self.callSmartTool("RHtrendTool", "RHtrend", editArea, timeRange, + missingDataMode="Create") + print("Calling WetflagTool") + self.callSmartTool("WetflagTool", "Wetflag", editArea, timeRange, + missingDataMode="Create") + print("Calling HrsOfSunTool") + self.callSmartTool("HrsOfSunTool", "HrsOfSun", editArea, timeRange, + missingDataMode="Create") + print("Calling InvBurnOffTempTool") + self.callSmartTool("InvBurnOffTempTool", "InvBurnOffTemp", editArea, timeRange, + missingDataMode="Create") + print("Calling SwellTool") + self.callSmartTool("SwellTool", "Swell", editArea, timeRange, + missingDataMode="Create") + print("Calling Swell2Tool") + self.callSmartTool("Swell2Tool", "Swell2", editArea, timeRange, + missingDataMode="Create") + print("Calling PeriodTool") + self.callSmartTool("PeriodTool", "Period", editArea, timeRange, + missingDataMode="Create") + print("Calling Period2Tool") + self.callSmartTool("Period2Tool", "Period2", editArea, timeRange, + missingDataMode="Create") + print("Calling WindWaveHgtTool") + self.callSmartTool("WindWaveHgtTool", "WindWaveHgt", editArea, timeRange, + missingDataMode="Create") + print("Calling WaveHeightTool") + self.callSmartTool("WaveHeightTool", "WaveHeight", editArea, timeRange, + missingDataMode="Create") + print("CompletePopulate Done") + + + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CopyNHCProposed.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CopyNHCProposed.py index ed3931088f..1d94b4dec1 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CopyNHCProposed.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CopyNHCProposed.py @@ -1,175 +1,175 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CopyNHCProposed - Version 3.0 -# -# Author: T LeFebvre/P. Santos -# -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Dec 10, 2015 T LeFebvre -# Sep 19, 2016 19293 randerso Changes for 2017 Tropical Season -# Feb 27, 2017 122217 T Lefebvre Fix to calDiff Grid Call (sending both boolean and grid instead of just grid) -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] - -# The ToolList is optional, but recommended, if you are calling -# Smart Tools from your Script. -# If present, it can be used to show which grids will be -# modified by the Script. - -ToolList = [] - -### If desired, Set up variables to be solicited from the user: -## If your script calls Smart Tools, this VariableList should cover -## cover all the variables necessary for the tools. - -import re, time - -import AbsTime -import LogStream -import TimeRange -import TropicalUtility -import numpy as np - - -# The GUI portion is commented out for now, since we have only one element. -# This may be restored when other hazards are supported. -# Supported_elements=["ProposedSS"] -# VariableList = [("Choose Hazards:" , ["ProposedSS"], "check", Supported_elements), -# # ["ProposedSS"]), -# ] -class Procedure (TropicalUtility.TropicalUtility): - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - def execute(self): - - # Assign a timeRange from now to 48 hours from now - start = int(self._gmtime().unixTime() / 3600) * 3600 - end = start + 48 * 3600 - timeRange = self.GM_makeTimeRange(start, end) - - # Commented out until this tool suports more than one hazard -# # Copy the ISC data into a grid -# hazardsToCopy = varDict["Choose Hazards:"] -# -# if len(hazardsToCopy) == 0: -# self.statusBarMsg("You must choose at least one hazard.", "U") -# return - # Delete any existing collaboration difference grids - self.unloadWE("Fcst", "CollabDiffSS", "SFC") - - # Hard-coded to the only element supported by this tool. - hazardsToCopy = ["ProposedSS"] - - # This list should come from the GUI. Hard-coded for now. - weNames = ["ProposedSS"] # Eventually should come from the GUI - - # Remove any pre-existing grids first - for weName in weNames: - - if weName not in hazardsToCopy: - continue - - trList = self.GM_getWEInventory(weName, "Fcst") - for delTR in trList: - self.deleteGrid("Fcst", weName, "SFC", delTR) - - # Copy any Proposed nc grids into the Fcst - for weName in weNames: - - if weName not in hazardsToCopy: - continue - - iscWeName = weName + "nc" - trList = self.GM_getWEInventory(iscWeName, "ISC", timeRange=timeRange) - - if len(trList) == 0: - LogStream.logProblem("No grids found for ", iscWeName) - continue - - gridTR = trList[-1] # only interested in the latest grid - - proposedGrid = self.getGrids("ISC", iscWeName, "SFC", gridTR) - (iscGrid, iscKeys) = proposedGrid - - start = gridTR.endTime().unixTime() - (48 * 3600) - end = gridTR.endTime().unixTime() - createTR = self.GM_makeTimeRange(start, end) - - self.createGrid("Fcst", weName, "DISCRETE", (iscGrid, iscKeys), - createTR, discreteKeys=iscKeys, discreteOverlap=0, - discreteAuxDataLength=0, defaultColorTable="StormSurgeHazards") - - # This If section added during Jan 2017 SWiT. It creates a diff grid between incoming NHC ProposedSS and what is already in Hazards Grid. - if weName == "ProposedSS": - #hazTRlist = self.GM_getWEInventory("Hazards", "Fcst", gridTR) - hazTRlist = self.GM_getWEInventory("Hazards", "Fcst") - print "hazTRlist: ", hazTRlist - - if len(hazTRlist) == 0: - self.statusBarMsg("No Hazards grids found. No Diff to calculate.", "U") - else: - - HazardList = [] - anySSHazardsFound = False - - # See if we have any SS hazards - for hazTR in hazTRlist: - - # Assume we have SS hazards - ssHazardsFound = False - - hazardsGrid = self.getGrids("Fcst", "Hazards", "SFC", hazTR) - - (hazGrid, hazKeys) = hazardsGrid - - for key in hazKeys: -# print "key", key - if "SS." in key: - ssHazardsFound = True - anySSHazardsFound = True - break -# print "B4 anySSHazardsFound, ssHazardsFound", anySSHazardsFound, ssHazardsFound - # If this Hazards grid is not None - HazardList.append((ssHazardsFound, hazardsGrid)) - - print len(HazardList), len(hazTRlist) - - for index in range(len(HazardList)): - print "hazTR: ", hazTRlist[index] - - # Get the state of this Hazards grid - ssFound, hazardsGrid = HazardList[index] -# hazardsGrid = self.getGrids("Fcst", "Hazards", "SFC", hazTR) - - if hazTRlist[index].overlaps(createTR): -# if hazTR.startTime().unixTime() < createTR.endTime().unixTime(): - -# print "\n\nHazard time range is good" - # If any SS hazards were found, only produce the - # diff for grids which had a SS hazard. Otherwise, - # compare all grids - - if not anySSHazardsFound or (anySSHazardsFound and ssFound): - self.calcDiffGrid(HazardList[index][1], proposedGrid, "CollabDiffSS", hazTRlist[index], isWFO=True) - # Display the Storm Surge area to remind forecasters of the domain - stormSurgeEditArea = self.getEditArea("StormSurgeWW_EditArea_Local") - self.setActiveEditArea(stormSurgeEditArea) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CopyNHCProposed - Version 3.0 +# +# Author: T LeFebvre/P. Santos +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Dec 10, 2015 T LeFebvre +# Sep 19, 2016 19293 randerso Changes for 2017 Tropical Season +# Feb 27, 2017 122217 T Lefebvre Fix to calDiff Grid Call (sending both boolean and grid instead of just grid) +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] + +# The ToolList is optional, but recommended, if you are calling +# Smart Tools from your Script. +# If present, it can be used to show which grids will be +# modified by the Script. + +ToolList = [] + +### If desired, Set up variables to be solicited from the user: +## If your script calls Smart Tools, this VariableList should cover +## cover all the variables necessary for the tools. + +import re, time + +import AbsTime +import LogStream +import TimeRange +import TropicalUtility +import numpy as np + + +# The GUI portion is commented out for now, since we have only one element. +# This may be restored when other hazards are supported. +# Supported_elements=["ProposedSS"] +# VariableList = [("Choose Hazards:" , ["ProposedSS"], "check", Supported_elements), +# # ["ProposedSS"]), +# ] +class Procedure (TropicalUtility.TropicalUtility): + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + def execute(self): + + # Assign a timeRange from now to 48 hours from now + start = int(self._gmtime().unixTime() / 3600) * 3600 + end = start + 48 * 3600 + timeRange = self.GM_makeTimeRange(start, end) + + # Commented out until this tool suports more than one hazard +# # Copy the ISC data into a grid +# hazardsToCopy = varDict["Choose Hazards:"] +# +# if len(hazardsToCopy) == 0: +# self.statusBarMsg("You must choose at least one hazard.", "U") +# return + # Delete any existing collaboration difference grids + self.unloadWE("Fcst", "CollabDiffSS", "SFC") + + # Hard-coded to the only element supported by this tool. + hazardsToCopy = ["ProposedSS"] + + # This list should come from the GUI. Hard-coded for now. + weNames = ["ProposedSS"] # Eventually should come from the GUI + + # Remove any pre-existing grids first + for weName in weNames: + + if weName not in hazardsToCopy: + continue + + trList = self.GM_getWEInventory(weName, "Fcst") + for delTR in trList: + self.deleteGrid("Fcst", weName, "SFC", delTR) + + # Copy any Proposed nc grids into the Fcst + for weName in weNames: + + if weName not in hazardsToCopy: + continue + + iscWeName = weName + "nc" + trList = self.GM_getWEInventory(iscWeName, "ISC", timeRange=timeRange) + + if len(trList) == 0: + LogStream.logProblem("No grids found for ", iscWeName) + continue + + gridTR = trList[-1] # only interested in the latest grid + + proposedGrid = self.getGrids("ISC", iscWeName, "SFC", gridTR) + (iscGrid, iscKeys) = proposedGrid + + start = gridTR.endTime().unixTime() - (48 * 3600) + end = gridTR.endTime().unixTime() + createTR = self.GM_makeTimeRange(start, end) + + self.createGrid("Fcst", weName, "DISCRETE", (iscGrid, iscKeys), + createTR, discreteKeys=iscKeys, discreteOverlap=0, + discreteAuxDataLength=0, defaultColorTable="StormSurgeHazards") + + # This If section added during Jan 2017 SWiT. It creates a diff grid between incoming NHC ProposedSS and what is already in Hazards Grid. + if weName == "ProposedSS": + #hazTRlist = self.GM_getWEInventory("Hazards", "Fcst", gridTR) + hazTRlist = self.GM_getWEInventory("Hazards", "Fcst") + print("hazTRlist: ", hazTRlist) + + if len(hazTRlist) == 0: + self.statusBarMsg("No Hazards grids found. No Diff to calculate.", "U") + else: + + HazardList = [] + anySSHazardsFound = False + + # See if we have any SS hazards + for hazTR in hazTRlist: + + # Assume we have SS hazards + ssHazardsFound = False + + hazardsGrid = self.getGrids("Fcst", "Hazards", "SFC", hazTR) + + (hazGrid, hazKeys) = hazardsGrid + + for key in hazKeys: +# print "key", key + if "SS." in key: + ssHazardsFound = True + anySSHazardsFound = True + break +# print "B4 anySSHazardsFound, ssHazardsFound", anySSHazardsFound, ssHazardsFound + # If this Hazards grid is not None + HazardList.append((ssHazardsFound, hazardsGrid)) + + print(len(HazardList), len(hazTRlist)) + + for index in range(len(HazardList)): + print("hazTR: ", hazTRlist[index]) + + # Get the state of this Hazards grid + ssFound, hazardsGrid = HazardList[index] +# hazardsGrid = self.getGrids("Fcst", "Hazards", "SFC", hazTR) + + if hazTRlist[index].overlaps(createTR): +# if hazTR.startTime().unixTime() < createTR.endTime().unixTime(): + +# print "\n\nHazard time range is good" + # If any SS hazards were found, only produce the + # diff for grids which had a SS hazard. Otherwise, + # compare all grids + + if not anySSHazardsFound or (anySSHazardsFound and ssFound): + self.calcDiffGrid(HazardList[index][1], proposedGrid, "CollabDiffSS", hazTRlist[index], isWFO=True) + # Display the Storm Surge area to remind forecasters of the domain + stormSurgeEditArea = self.getEditArea("StormSurgeWW_EditArea_Local") + self.setActiveEditArea(stormSurgeEditArea) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateNatlTCVZoneGroups.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateNatlTCVZoneGroups.py index da98ac1c13..0e42e19355 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateNatlTCVZoneGroups.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateNatlTCVZoneGroups.py @@ -1,415 +1,415 @@ -## ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CreateNatlTCVZoneGroups - Version 3.0 -# -# Author: Matthew Belk (BOX) -# -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- -------------------------------------------- -# Apr 12, 2016 cleanup code and refactor to use -# GridManipulation and TropicalUtility -# Apr 29, 2016 added a popup banner with instructions to run the -# specific text formatter, for a particular storm -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["None"] -import os, re, time - -import AbsTime -import LocalizationSupport -import LogStream -import ProcessVariableList -import TropicalUtility -import numpy as np - - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - #======================================================================= - # Define the UGC zone code prefix for each state in the domain. The - # keys are the edit area name for each state in the domain we want. - - self.searchUGCdict = { - "Alabama":"ALZ", "Arkansas":"ARZ", "Connecticut":"CTZ", - "Delaware":"DEZ", "DistrictofColumbia":"DCZ", "Florida":"FLZ", - "Georgia":"GAZ", "Louisiana":"LAZ", "Maine":"MEZ", "Maryland":"MDZ", - "Massachusetts":"MAZ", "Mississippi":"MSZ", "Missouri":"MOZ", - "NewHampshire":"NHZ", "NewJersey":"NJZ", "NewMexico":"NMZ", - "NewYork":"NYZ", "NorthCarolina":"NCZ", "Oklahoma":"OKZ", - "Pennsylvania":"PAZ", "PuertoRico":"PRZ", "RhodeIsland":"RIZ", - "SouthCarolina":"SCZ", "Tennessee":"TNZ", "Texas":"TXZ", - "VirginIslands":"VIZ", "Virginia":"VAZ", "WestVirginia":"WVZ", - } - - - # Define a method to construct a mask which identifies all areas impacted - # by hazards associated with a particular storm - def constructStormHazardMask(self, searchEtn): - - # Get ready to store the composite mask - finalStormHazardMask = self.empty(np.bool) - - # Examine entire inventory of the Hazards grids - for tr in self.GM_getWEInventory("Hazards"): - - print "+"*90 - hazards = self.getGrids("Fcst", "Hazards", "SFC", tr) - - # Split the Hazards data into its component parts - hazardBytes, hazardKeys = hazards - - # Look at each of the hazards keys - for hazIndex, key in enumerate(hazardKeys): - print "key =", key - - # Ignore the default hazard keys which do not have ETN's - if key in ["", ""]: - continue - - # Get the ETN for this hazard - curETN = self.getETN(key) - print "curETN =", curETN, "\tsearchEtn =", searchEtn - - # If this ETN does not match the storm we are interested in - if curETN is None or searchEtn is None or \ - int(curETN) != int(searchEtn): - continue # move on - - # If we made it this far, mask where this hazard exists - hazMask = hazardBytes == hazIndex - - finalStormHazardMask[hazMask] = True - - # Return the completed hazard mask for this storm - return finalStormHazardMask - - - # Define a method to search breakpoint segment edit areas for hazards - # associated with a specific storm - def searchHazardsBySegment(self, hazardMask): - - # Get ready to track our results - results = set() - - # Make a regular expression to look for breakpoint segment areas - segmentPattern = re.compile( - "^(LN\d\d\d\d|ISL\d\d\d|KEY\d\d\d|WT[A-Z][A-Z]\d\d|" + - "NAI\d\d\d|USC\d\d\d)_?") - - # Now examine every search area - for searchArea in self.editAreaList(): - - # If this is not a breakpoint segment area we care about - if segmentPattern.search(searchArea) is None: - continue # Move on -# print searchArea - - # Get the edit area as a mask - try: - searchMask = self.encodeEditArea(searchArea) - except: - LogStream.logEvent("Could not encode a mask for %s" % - (searchArea)) - continue - - # See if these areas overlap - overlap = hazardMask & searchMask - - # If these areas do overlap - if overlap.any(): - - # Add this search area to the results list, if it is not - # already included - results.add(searchArea) - - # Return the results we found - return results - - - # Define a method to search state edit areas for hazards associated with a - # specific storm - def searchHazardsByState(self, hazardMask): - - # Get ready to track our results - results = set() - -# print "-"*60 -# print "Start state search" - - # Now examine every search area - for searchArea in self.searchUGCdict.keys(): -# print "State searchArea =", searchArea - - # Get the edit area as a mask - try: - searchMask = self.encodeEditArea(searchArea) - except: - LogStream.logEvent("Could not encode a mask for %s" % - (searchArea)) - continue - - # See if these areas overlap - overlap = hazardMask & searchMask - - # If these areas do overlap - if overlap.any(): - - # Add this search area to the results list, if it is not - # already included - results.add(searchArea) - - # Return the results we found - return results - - - # Define a method to search state forecast zone edit areas for hazards - # associated with a specific storm - def findZones(self, hazardMask, allEditAreaNames, results, keep=True): - - print "\nI'm starting with %d edit areas" % (len(allEditAreaNames)) - - # Now look for all zones associated with this state - for (index, searchArea) in enumerate(allEditAreaNames): -# print "Zone searchArea =", searchArea, index - - # Get this edit area as a mask - try: - searchMask = self.encodeEditArea(searchArea) - except: - LogStream.logEvent("Could not encode a mask for %s" % - (searchArea)) - continue - - # See if these areas overlap - overlap = hazardMask & searchMask - - # If these areas do overlap - if overlap.any(): - - # Add this search area to the results list, if it is not - # already included - if keep: - results.add(searchArea) -# print "Kept Zone searchArea =", searchArea, index -# else: -# print "Removing Zone searchArea =", searchArea, index -# -# # Since we already found this zone, do not look for it -# # with future hazard searches -# allEditAreaNames.remove(searchArea) -# else: -# print "Ignoring Zone searchArea ->", searchArea, index - - print "I now have %d edit areas" % (len(allEditAreaNames)) - print "with %d results" % (len(results)) - - # Return the results we determined - return (results, allEditAreaNames) - - - # Define a method to search state forecast zone edit areas for hazards - # associated with a specific storm - def searchHazardsByZone(self, hazardMask, stateList): - - # Get ready to track our results - stateZones = set() - results = set() - - # Make a list of all know edit area names - print "Getting all edit area names" - allEditAreaNames = self.editAreaList() - print "I have %d edit areas, to start" % (len(allEditAreaNames)) - - # Make a set of zone codes prefixes we need to search further - for state in stateList: - - # Get the string expression to look for UGC zone codes - # associated with this state - try: - stateZones.add(self.searchUGCdict[state]) - except KeyError: - LogStream.logEvent("Could not find this state %s" % (state)) - continue - - print "\tExamining zones in ->", stateZones - - # Make a copy of all edit area names, so we can remove some without - # crashing the following loop - copyEditAreaNames = allEditAreaNames[:] - - # Let's eliminate edit areas which do not match the pattern of UGC - # zone code names. All other edit area names will be removed. - for editArea in copyEditAreaNames: - - if len(editArea) != 6 or editArea[:3] not in stateZones: - allEditAreaNames.remove(editArea) -# print "Removing ->", editArea - - # Now that we've removed the areas we know we will not need, take a - # closer look at the areas still left - (results, allEditAreaNames) = self.findZones(hazardMask, - allEditAreaNames, results) - - # Return the results we found - return results - - - def execute(self, varDict): - - if varDict is None: - varDict={} - - # Let's start timing this - print "*" *80 - t0 = time.time() - print time.gmtime(t0), "CreateNatlTCVZoneGroups Starting" - - stormList = self.extractStormInfo() - stormNames = [] - for sDict in stormList: - stormNames.append(sDict["stormName"]) - - # Make the variableList dynamically based on the storm info - bogusStormName = "xyz" - variableList = [] - variableList.append(("StormName", bogusStormName, "radio", stormNames)) - - # Display the GUI - processVarList = ProcessVariableList.ProcessVariableList( - "Create Text Formatter Sampling Combinations", variableList, varDict) - status = processVarList.status() - if status.upper() != "OK": - self.cancel() - - print "varDict =", varDict - # Create a new time range - now = int(self._gmtime().unixTime() / 3600) * 3600 - timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - # Get the name of our selected storm - stormName = varDict["StormName"] - if stormName == bogusStormName: - self.statusBarMsg("Please select a storm name.", "U") - return - - # Extract storm number for selected storm - for sDict in stormList: - if sDict["stormName"] == stormName: - stormNum = int(sDict["stormNumber"]) - lastModified = sDict["lastModified"] - pil = sDict["pil"] - - # Get the segment number and filter for valid characters - if stormNum is None: - self.abort("You must supply the storm number!") - - # Ensure this is a national VTEC number - if stormNum < 1000: - stormNum = stormNum + 1000 - - # Make sure that the storm info has been updated within the last 7 hours - if self._gmtime().unixTime() - lastModified > 7 * 3600: - self.statusBarMsg("StormInfo for " + stormName + " is old. " + \ - "Please update StormInfo first.", "U") - return - -# LogStream.logEvent("Got this data\n\tpil = %s\tnumber = %s\n" % -# (pil, stormNum)) -# print "Got this data\n\tpil = %s\tnumber = %s\n" % (pil, stormNum) - - #======================================================================= - # Get ready to make a list of all states which need to be examined - # more closely - - closerLookStates = [] -# finalResults = set() - - # Find all areas with hazards associated with this storm - hazardMask = self.constructStormHazardMask(stormNum) - - # Look for breakpoint segments -# segments = self.searchHazardsBySegment(hazardMask) -# print "segments = ", segments -# -# for segment in segments: -# if segment not in finalResults: -# finalResults.add(segment) - - # If there are any areas still impacted by this storm - if hazardMask.any(): - - # Make a list of states we need to examine more closely - closerLookStates = self.searchHazardsByState(hazardMask) - print "CloserLookStates = ", closerLookStates - - # If there are any states we need to look more closely at - results = self.searchHazardsByZone(hazardMask, closerLookStates) - else: - results = set() - - print "results =", results - - # Make a filename for this output - name = "Combinations_%s_%s" % (pil, "NHA") - - # Get the previous version of this combinations file - prevCombo = None - try: - prevCombo = self.loadCombinationsFile("prev" + name) - except: - LogStream.logProblem("Error loading previous combinations file: %s\n" % name, LogStream.exc()) - - # if prevCombo is None or empty - if not prevCombo: - prevCombo = [[]] - - # Add back any cancelled zones - finalSet = results | set(prevCombo[0]) - print "finalSet =", finalSet - - - # Now make the final combinations file - self.saveCombinationsFile(name, [list(finalSet)]) - - t1 = time.time() - print "\n\n%f milliseconds to update combinations file" % ((t1 - t0)*1000.0) - - - #======================================================================= - # Notify user which formatter to run if there were any zones left - - if (len(finalSet) > 0): - msg = "You may now create the national TCV for %s " % (stormName) +\ - "through the GFE Formatter Launcher (In GFE, Products->" +\ - "Formatter Launcher). In the Formatter Launcher, Products->" +\ - "Hazard->Hazard_TCV%s." % (pil) + "Click on the gear icon " +\ - "(second from the right). Transmit the product when " +\ - "satisfied it is correct." - - self.statusBarMsg(msg, "A") - - # Let's see how long this took - t3 = time.time() - - print "\n\n%f milliseconds for total process" % ((t3 - t0) * 1000.0) - print self._gmtime().timetuple(), "CreateNatlTCVZoneGroups Done" \ No newline at end of file +## ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CreateNatlTCVZoneGroups - Version 3.0 +# +# Author: Matthew Belk (BOX) +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- -------------------------------------------- +# Apr 12, 2016 cleanup code and refactor to use +# GridManipulation and TropicalUtility +# Apr 29, 2016 added a popup banner with instructions to run the +# specific text formatter, for a particular storm +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["None"] +import os, re, time + +import AbsTime +import LocalizationSupport +import LogStream +import ProcessVariableList +import TropicalUtility +import numpy as np + + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + #======================================================================= + # Define the UGC zone code prefix for each state in the domain. The + # keys are the edit area name for each state in the domain we want. + + self.searchUGCdict = { + "Alabama":"ALZ", "Arkansas":"ARZ", "Connecticut":"CTZ", + "Delaware":"DEZ", "DistrictofColumbia":"DCZ", "Florida":"FLZ", + "Georgia":"GAZ", "Louisiana":"LAZ", "Maine":"MEZ", "Maryland":"MDZ", + "Massachusetts":"MAZ", "Mississippi":"MSZ", "Missouri":"MOZ", + "NewHampshire":"NHZ", "NewJersey":"NJZ", "NewMexico":"NMZ", + "NewYork":"NYZ", "NorthCarolina":"NCZ", "Oklahoma":"OKZ", + "Pennsylvania":"PAZ", "PuertoRico":"PRZ", "RhodeIsland":"RIZ", + "SouthCarolina":"SCZ", "Tennessee":"TNZ", "Texas":"TXZ", + "VirginIslands":"VIZ", "Virginia":"VAZ", "WestVirginia":"WVZ", + } + + + # Define a method to construct a mask which identifies all areas impacted + # by hazards associated with a particular storm + def constructStormHazardMask(self, searchEtn): + + # Get ready to store the composite mask + finalStormHazardMask = self.empty(np.bool) + + # Examine entire inventory of the Hazards grids + for tr in self.GM_getWEInventory("Hazards"): + + print("+"*90) + hazards = self.getGrids("Fcst", "Hazards", "SFC", tr) + + # Split the Hazards data into its component parts + hazardBytes, hazardKeys = hazards + + # Look at each of the hazards keys + for hazIndex, key in enumerate(hazardKeys): + print("key =", key) + + # Ignore the default hazard keys which do not have ETN's + if key in ["", ""]: + continue + + # Get the ETN for this hazard + curETN = self.getETN(key) + print("curETN =", curETN, "\tsearchEtn =", searchEtn) + + # If this ETN does not match the storm we are interested in + if curETN is None or searchEtn is None or \ + int(curETN) != int(searchEtn): + continue # move on + + # If we made it this far, mask where this hazard exists + hazMask = hazardBytes == hazIndex + + finalStormHazardMask[hazMask] = True + + # Return the completed hazard mask for this storm + return finalStormHazardMask + + + # Define a method to search breakpoint segment edit areas for hazards + # associated with a specific storm + def searchHazardsBySegment(self, hazardMask): + + # Get ready to track our results + results = set() + + # Make a regular expression to look for breakpoint segment areas + segmentPattern = re.compile( + "^(LN\d\d\d\d|ISL\d\d\d|KEY\d\d\d|WT[A-Z][A-Z]\d\d|" + + "NAI\d\d\d|USC\d\d\d)_?") + + # Now examine every search area + for searchArea in self.editAreaList(): + + # If this is not a breakpoint segment area we care about + if segmentPattern.search(searchArea) is None: + continue # Move on +# print searchArea + + # Get the edit area as a mask + try: + searchMask = self.encodeEditArea(searchArea) + except: + LogStream.logEvent("Could not encode a mask for %s" % + (searchArea)) + continue + + # See if these areas overlap + overlap = hazardMask & searchMask + + # If these areas do overlap + if overlap.any(): + + # Add this search area to the results list, if it is not + # already included + results.add(searchArea) + + # Return the results we found + return results + + + # Define a method to search state edit areas for hazards associated with a + # specific storm + def searchHazardsByState(self, hazardMask): + + # Get ready to track our results + results = set() + +# print "-"*60 +# print "Start state search" + + # Now examine every search area + for searchArea in list(self.searchUGCdict.keys()): +# print "State searchArea =", searchArea + + # Get the edit area as a mask + try: + searchMask = self.encodeEditArea(searchArea) + except: + LogStream.logEvent("Could not encode a mask for %s" % + (searchArea)) + continue + + # See if these areas overlap + overlap = hazardMask & searchMask + + # If these areas do overlap + if overlap.any(): + + # Add this search area to the results list, if it is not + # already included + results.add(searchArea) + + # Return the results we found + return results + + + # Define a method to search state forecast zone edit areas for hazards + # associated with a specific storm + def findZones(self, hazardMask, allEditAreaNames, results, keep=True): + + print("\nI'm starting with %d edit areas" % (len(allEditAreaNames))) + + # Now look for all zones associated with this state + for (index, searchArea) in enumerate(allEditAreaNames): +# print "Zone searchArea =", searchArea, index + + # Get this edit area as a mask + try: + searchMask = self.encodeEditArea(searchArea) + except: + LogStream.logEvent("Could not encode a mask for %s" % + (searchArea)) + continue + + # See if these areas overlap + overlap = hazardMask & searchMask + + # If these areas do overlap + if overlap.any(): + + # Add this search area to the results list, if it is not + # already included + if keep: + results.add(searchArea) +# print "Kept Zone searchArea =", searchArea, index +# else: +# print "Removing Zone searchArea =", searchArea, index +# +# # Since we already found this zone, do not look for it +# # with future hazard searches +# allEditAreaNames.remove(searchArea) +# else: +# print "Ignoring Zone searchArea ->", searchArea, index + + print("I now have %d edit areas" % (len(allEditAreaNames))) + print("with %d results" % (len(results))) + + # Return the results we determined + return (results, allEditAreaNames) + + + # Define a method to search state forecast zone edit areas for hazards + # associated with a specific storm + def searchHazardsByZone(self, hazardMask, stateList): + + # Get ready to track our results + stateZones = set() + results = set() + + # Make a list of all know edit area names + print("Getting all edit area names") + allEditAreaNames = self.editAreaList() + print("I have %d edit areas, to start" % (len(allEditAreaNames))) + + # Make a set of zone codes prefixes we need to search further + for state in stateList: + + # Get the string expression to look for UGC zone codes + # associated with this state + try: + stateZones.add(self.searchUGCdict[state]) + except KeyError: + LogStream.logEvent("Could not find this state %s" % (state)) + continue + + print("\tExamining zones in ->", stateZones) + + # Make a copy of all edit area names, so we can remove some without + # crashing the following loop + copyEditAreaNames = allEditAreaNames[:] + + # Let's eliminate edit areas which do not match the pattern of UGC + # zone code names. All other edit area names will be removed. + for editArea in copyEditAreaNames: + + if len(editArea) != 6 or editArea[:3] not in stateZones: + allEditAreaNames.remove(editArea) +# print "Removing ->", editArea + + # Now that we've removed the areas we know we will not need, take a + # closer look at the areas still left + (results, allEditAreaNames) = self.findZones(hazardMask, + allEditAreaNames, results) + + # Return the results we found + return results + + + def execute(self, varDict): + + if varDict is None: + varDict={} + + # Let's start timing this + print("*" *80) + t0 = time.time() + print(time.gmtime(t0), "CreateNatlTCVZoneGroups Starting") + + stormList = self.extractStormInfo() + stormNames = [] + for sDict in stormList: + stormNames.append(sDict["stormName"]) + + # Make the variableList dynamically based on the storm info + bogusStormName = "xyz" + variableList = [] + variableList.append(("StormName", bogusStormName, "radio", stormNames)) + + # Display the GUI + processVarList = ProcessVariableList.ProcessVariableList( + "Create Text Formatter Sampling Combinations", variableList, varDict) + status = processVarList.status() + if status.upper() != "OK": + self.cancel() + + print("varDict =", varDict) + # Create a new time range + now = int(self._gmtime().unixTime() / 3600) * 3600 + timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + # Get the name of our selected storm + stormName = varDict["StormName"] + if stormName == bogusStormName: + self.statusBarMsg("Please select a storm name.", "U") + return + + # Extract storm number for selected storm + for sDict in stormList: + if sDict["stormName"] == stormName: + stormNum = int(sDict["stormNumber"]) + lastModified = sDict["lastModified"] + pil = sDict["pil"] + + # Get the segment number and filter for valid characters + if stormNum is None: + self.abort("You must supply the storm number!") + + # Ensure this is a national VTEC number + if stormNum < 1000: + stormNum = stormNum + 1000 + + # Make sure that the storm info has been updated within the last 7 hours + if self._gmtime().unixTime() - lastModified > 7 * 3600: + self.statusBarMsg("StormInfo for " + stormName + " is old. " + \ + "Please update StormInfo first.", "U") + return + +# LogStream.logEvent("Got this data\n\tpil = %s\tnumber = %s\n" % +# (pil, stormNum)) +# print "Got this data\n\tpil = %s\tnumber = %s\n" % (pil, stormNum) + + #======================================================================= + # Get ready to make a list of all states which need to be examined + # more closely + + closerLookStates = [] +# finalResults = set() + + # Find all areas with hazards associated with this storm + hazardMask = self.constructStormHazardMask(stormNum) + + # Look for breakpoint segments +# segments = self.searchHazardsBySegment(hazardMask) +# print "segments = ", segments +# +# for segment in segments: +# if segment not in finalResults: +# finalResults.add(segment) + + # If there are any areas still impacted by this storm + if hazardMask.any(): + + # Make a list of states we need to examine more closely + closerLookStates = self.searchHazardsByState(hazardMask) + print("CloserLookStates = ", closerLookStates) + + # If there are any states we need to look more closely at + results = self.searchHazardsByZone(hazardMask, closerLookStates) + else: + results = set() + + print("results =", results) + + # Make a filename for this output + name = "Combinations_%s_%s" % (pil, "NHA") + + # Get the previous version of this combinations file + prevCombo = None + try: + prevCombo = self.loadCombinationsFile("prev" + name) + except: + LogStream.logProblem("Error loading previous combinations file: %s\n" % name, LogStream.exc()) + + # if prevCombo is None or empty + if not prevCombo: + prevCombo = [[]] + + # Add back any cancelled zones + finalSet = results | set(prevCombo[0]) + print("finalSet =", finalSet) + + + # Now make the final combinations file + self.saveCombinationsFile(name, [list(finalSet)]) + + t1 = time.time() + print("\n\n%f milliseconds to update combinations file" % ((t1 - t0)*1000.0)) + + + #======================================================================= + # Notify user which formatter to run if there were any zones left + + if (len(finalSet) > 0): + msg = "You may now create the national TCV for %s " % (stormName) +\ + "through the GFE Formatter Launcher (In GFE, Products->" +\ + "Formatter Launcher). In the Formatter Launcher, Products->" +\ + "Hazard->Hazard_TCV%s." % (pil) + "Click on the gear icon " +\ + "(second from the right). Transmit the product when " +\ + "satisfied it is correct." + + self.statusBarMsg(msg, "A") + + # Let's see how long this took + t3 = time.time() + + print("\n\n%f milliseconds for total process" % ((t3 - t0) * 1000.0)) + print(self._gmtime().timetuple(), "CreateNatlTCVZoneGroups Done") \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateProposedSS.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateProposedSS.py index 854e1c8e13..072665392b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateProposedSS.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateProposedSS.py @@ -1,849 +1,849 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CreateProposedSS - Version 4.0 -# -# Author: Lefebvre/Belk/Hardin/Santos/Trogdon -# -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- -------------------------------------------- -# Jun 04, 2014 incorporate Inundation Grid and Incremental Grid -# Apr 12, 2016 cleanup code and refactor to use GridManipulation -# and TropicalUtility -# Jul 14, 2016 Fixed Smoothing, Added Manual Options, -# refactored code a little bit. -# Sep 19, 2016 19293 randerso Initial baseline check in -# -# Nov 13-18 2016 - Further tweaks made during SWIT testing -# Dec 15, 2016 - Added UpdateInunMax option -# Dec 21, 2016 - Deleted saveElements line in main execute -# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, -# improved UpdateInunMax option and made changes to makeInundationTiming methods to accommodate new TCs for -# the TPCSurgeProb and PETSS dbs. -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["None"] - -import sys, time, re, os - -import AbsTime -import ProcessVariableList -import TimeRange -import TropicalUtility -import numpy as np - - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - - def trimTimeRange(self, weName, timeRange): - - # Get a list of all grids we have for this parameter - trList = self.GM_getWEInventory(weName) - if len(trList) == 0: # nothing more to do - return - - # Fragment all grids to their smallest time range - self.splitCmd([weName], timeRange) - - # If there is more than 1 grid to deal with - if trList > 1: - - # Make a time range from the start of the first grid, to the start - # of the desired time range - tr = self.GM_makeTimeRange(trList[0].startTime().unixTime(), - timeRange.startTime().unixTime()) - - # Delete all grids in this interim time range - self.deleteCmd([weName], tr) - - # Get the parameter values from the last time range in the list - tr = trList[-1] - grid = self.getGrids("Fcst", weName, "SFC", tr) - - # Make a new grid with these values, using the desired time range - self.createGrid("Fcst", weName, "DISCRETE", grid, timeRange) - - - def getTPCSurgeProbModelTime(self, modelSource, pctStr, level): - - siteID = self.getSiteID() - if modelSource == "PETSS": - dbName = siteID + "_D2D_" + modelSource + "LoRes" - else: - dbName = siteID + "_D2D_TPCSurgeProb" + modelSource - - weName = "Surge" + pctStr + "Pct" - trList = self.GM_getWEInventory(weName, dbName, level) - - if len(trList) == 0: - msgStr = dbName + " " + weName + " " + level + " does not exist in the TPCSurge model. " - self.statusBarMsg(msgStr, "S") - return None - - modelStart = trList[0].startTime() - - return modelStart - - def getExceedanceHeight(self, modelSource, pctStr, level): - - siteID = self.getSiteID() - if modelSource == "PETSS": - dbName = siteID + "_D2D_" + modelSource + "LoRes" - else: - dbName = siteID + "_D2D_TPCSurgeProb" + modelSource - - weName = "Surge" + pctStr + "Pct" - trList = self.GM_getWEInventory(weName, dbName, level) - - print "getExceedanceHeight.....TRList for ", dbName, weName, pctStr, level - - # Return None if no grids were found. This should post an error to the user. - if len(trList) == 0: - return None - - modelStart = self.getTPCSurgeProbModelTime(modelSource, pctStr, level) - - # Didn't find grid in the Fcst database so fetch it from model database - grid = self.getGrids(dbName, weName, level, trList[-1]) - - mask = grid <= -10.0 # invalid point - in meters - grid /= 0.3048 # convert meters to feet - grid[mask] = -80.0 # reset values less than -10.0 meters to -80.0 feet - - if level == "SFC": - return grid, modelStart - else: - return grid - - # Create InundationTiming grids from PHISH - def makeTimingGridsFromModel(self, modelSource, pctStr, level, ssea, MHHWMask): - - siteID = self.getSiteID() - if modelSource == "PETSS": - dbName = siteID + "_D2D_" + modelSource + "LoRes" - else: - dbName = siteID + "_D2D_TPCSurgeProb" + modelSource - - weName = "Surge" + pctStr + "Pctincr" - trList = self.GM_getWEInventory(weName, dbName, level) - - print "makeTimingGridsFromModel.....TRList for ", dbName, weName, pctStr, level - - if len(trList) == 0: - self.statusBarMsg("No grids available for model:" + dbName, "S") - return - - # make timeRanges based on the current time not the time of the model - baseTime = int(trList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - endTime = int(trList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - if endTime < trList[-1].endTime().unixTime(): - endTime += 6 * 3600 - timingTRList = self.makeTimingTRs(baseTime, endTime) - - gridList = [] - for tr in trList: - phishGrid = self.getGrids(dbName, weName, level, tr) - if phishGrid is None: - self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") - continue - - phishMask = phishGrid > -25.0 - phishGrid[phishMask] /= 0.3048 # convert meters to feet - phishGrid[~phishMask] = -80.0 - - phishMask = (phishGrid > 0.0) & ssea - phishGrid[phishMask] = self.GM_smoothGrid(phishGrid, 3, phishMask)[phishMask] - - phishGrid = np.clip(phishGrid, 0.0, 100.0) - phishGrid[~ssea] = 0.0 - phishGrid[MHHWMask] = 0.0 - - gridList.append(phishGrid) - - - for i, grid in enumerate(gridList): - self.createGrid("Fcst", "InundationTiming", "SCALAR", grid, timingTRList[i], precision=1) - - - def makeInundationMaxGrid(self, timingGrids, trList): - - itCube = np.array(timingGrids) - maxGrid = np.amax(itCube, axis=0) - - now = int(self._gmtime().unixTime() / 3600) * 3600 - maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) - - return maxGrid - - # Fetch the VDATUM grid and mask all points below the specified value. - def getVDATUMSGrid(self, weName, threshold): - siteID = self.getSiteID() - dbName = siteID + "_D2D_VDATUMS" - - # First check to see if the grid has been stored as a temporary grid - # and return that. Purely for performance purposes - fcstTRList = self.GM_getWEInventory(weName) - if len(fcstTRList) > 0: - grid = self.getGrids("Fcst", weName, "SFC", fcstTRList[0]) - return grid - - trList = self.GM_getWEInventory(weName, dbName) - - if len(trList) == 0: - msgStr = weName + " does not exist in the VDATUMS model. " - self.statusBarMsg(msgStr, "S") - # TODO: Should we return here or throw an exception since there's no grid to operate on? - - # TODO: why are we looping over all the time ranges retrieving grid - # we're not using. We should just use the last tr in the list. - # There's probably only one anyway. - for tr in trList: - grid = self.getGrids(dbName, weName, "SFC", tr, mode="First") - - mask = grid <= threshold # points below threshold - - grid /= 0.3048 # convert meters to feet - grid[mask] = -80.0 # set points to min value below threshold - - return grid - - # returns the combined key. Enforces the rule that keys with the - # same phen returns the one key with the highest priority sig. - def combinedKey(self, subKeys, newKey): - if newKey is None: - return subKeys - - subKeyList = subKeys.split("^") - - # check for same keys - if newKey in subKeyList: - return subKeys - - defaultCombo = subKeys + "^" + newKey - - # check for non-VTEC key - if "." not in newKey: - return defaultCombo - - # more exceptions - these phens are above the law - exceptions = ["TO", "SV", "FF"] - sigList = ["W", "Y", "A"] - if self.keyPhen(newKey) in exceptions: - return defaultCombo - - for sk in subKeyList: - if self.keyPhen(sk) == self.keyPhen(newKey): - subSig = self.keySig(sk) - newSig = self.keySig(newKey) - if subSig == newSig: - return subKeys - - if subSig not in sigList or newSig not in sigList: - continue - - if sigList.index(subSig) > sigList.index(newSig): - subKeys = subKeys.replace(sk, newKey) - - return subKeys - - return defaultCombo - - # makes a new hazard given the oldKey and a new watch phen, - # sig and etn. - def makeNewKey(self, oldKey, phenSig): - # check for the dumb cases - if oldKey == "" or oldKey == phenSig: - return phenSig - - # split up the key, add the hazard, sort, and reassemble - parts = oldKey.split("^") - parts.append(phenSig) - parts.sort() # makes sure the same set of subKeys look the same - # assemble the new key - newKey = "" - for p in parts: - if newKey == "": - newKey = p - else: - newKey = self.combinedKey(newKey, p) - # just in case - if newKey == "": - newKey = "" - - return newKey - - - # Returns a list of unique keys for the specified grid and mask - def getUniqueKeys(self, byteGrid, keys, mask=None): - - uniqueKeys = [] - for keyIndex, key in enumerate(keys): - valueMask = byteGrid == keyIndex - valueMask &= mask - if valueMask.any(): - uniqueKeys.append(key) - - return uniqueKeys - - # adds the specified hazard to weName over the specified timeRange - # and spatially over the specified mask. Combines the specified - # hazard with the existing hazards by default. For replaceMode, - # specify 0 in the combineField - def combineHazards(self, targetGrid, addHaz, mask, combine=1): - - byteGrid, hazKey = targetGrid - uniqueKeys = self.getUniqueKeys(byteGrid, hazKey, mask) - for uKey in uniqueKeys: - if combine: - newKey = self.makeNewKey(uKey, addHaz) - else: # replace - newKey = addHaz - - oldIndex = self.getIndex(uKey, hazKey) - newIndex = self.getIndex(newKey, hazKey) - - # calculate the mask - intersection of mask and oldIndex values - editMask = (byteGrid == oldIndex) & mask - - # poke in the new values - byteGrid[editMask] = newIndex - - return (byteGrid, hazKey) - - def addHazard(self, targetGrid, addHaz, mask, combine=1): - - # Only interested in SS subKeys, so extract that part first - ssKey = "" - subKeys = self.getSubKeys(addHaz) - for subKey in subKeys: - if "SS" in subKey: - ssKey = subKey - - if ssKey == "": - print "SS subKey not found in key:", addHaz - return - - targetByteGrid, targetKeys = self.combineHazards(targetGrid, - ssKey, mask, combine) - - return targetByteGrid, targetKeys - - def deleteAllGrids(self, weList): - - for weName in weList: - trList = self.GM_getWEInventory(weName) - if len(trList) == 0: - continue - start = trList[0].startTime().unixTime() - end = trList[-1].endTime().unixTime() - tr = self.GM_makeTimeRange(start, end) - - self.deleteCmd([weName], tr) - - return - - def makeDiffGrid(self): - # Get the ProposedSS grid - trList = self.GM_getWEInventory("ProposedSS") - if len(trList) != 1: - self.statusBarMsg("No ProposedSS grids found.", "S") - return - else: - proposedTR = trList[0] - - # Get the Raw guidance grid - trList = self.GM_getWEInventory("tempProposedSS") - if len(trList) != 1: - self.statusBarMsg("No tempProposedSS grids found.", "S") - return - else: - rawGuidTR = trList[0] - - proposedSSGrid = self.getGrids("Fcst", "ProposedSS", "SFC", proposedTR) - rawGuidSSGrid = self.getGrids("Fcst", "tempProposedSS", "SFC", rawGuidTR) - - self.calcDiffGrid(proposedSSGrid, rawGuidSSGrid, "PrevGuidDiffSS", - proposedTR) - - return - - def baseGuidanceTime(self): - startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) - return startTime - - # Make a list of timeRanges that will be used to make InundationTiming grids - def makeTimingTRs(self, baseTime, endTime): - # Make the inundation timing grids - trList = [] - start = baseTime - end = baseTime + 6 * 3600 - while end <= endTime: - tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) - trList.append(tr) - start = end - end += 6 * 3600 - - return trList - - def getTimingGrids(self): - - baseTime = self.baseGuidanceTime() - endTime = baseTime + 78 * 3600 - trList = self.makeTimingTRs(baseTime, endTime) - - gridList = [] - for tr in trList: - gridList.append(self.empty(np.float32)) - - return trList, gridList - - def execute(self, editArea): - - editAreaMask = self.encodeEditArea(editArea) - - # If we did not find an edit are mask - if editArea is None or (not editAreaMask.any()): - # Select the entire domain - editAreaMask = self.newGrid(True, np.bool) - - # Extract the info from all storms - stormList = self.extractStormInfo() - - stormNames = [] - - mutableID = self.mutableID() - - # make a list of the active storms to pass into the variableList - for sDict in stormList: - stormNames.append(sDict["stormName"]) - - variableList = [] - bogusStormName = "WXYZZYXW" - variableList.append(("Data Source", "N-SBN (Default)", "radio", - ["N-SBN (Default)", "Backup", "PETSS", "Manual Replace", "Manual Add", "UpdateInunMax" - ])) - variableList.append(("Indicate Your Situational Forecast Confidence", "Typical (Combined; 10% Exceedance)", - "radio", ["Typical (Combined; 10% Exceedance)", - "Medium (Combined; 20% Exceedance)", - "High (Combined; 30% Exceedance)", - "Higher (Combined; 40% Exceedance)", - "Highest (Combined; 50% Exceedance)"])) - variableList.append(("StormName", bogusStormName, "radio", stormNames)) - variableList.append(("Hazard", "Storm Surge Watch", "radio", - ["Storm Surge Watch", "Storm Surge Warning"])) - variableList.append(("Raw Guidance or Proposed SS Grid?", "ProposedSS", "radio", - ["Raw Guidance", "ProposedSS"])) - variableList.append(("Inundation Threshold" , 3, "scale", [1, 8], 1)) - variableList.append(("Manual Inundation settings:", "", "label")) - variableList.append(("Inundation Height" , 3, "scale", [1, 8], 1)) - variableList.append(("Start Hour for Inundation Timing", 0, "scale", [0.0, 72.0], 6.0)) - variableList.append(("End Hour for Inundation Timing", 6, "scale", [0.0, 78.0], 6.0)) - -# variableList.append(("Make Inunudation Timing Grids?", "Yes", "radio", ["Yes", "No"])) - - # Display the GUI and check for cancel - varDict = {} - processVarList = ProcessVariableList.ProcessVariableList("StormSurgeWW", variableList, varDict) - status = processVarList.status() - if status.upper() != "OK": - self.cancel() - - sourceOption = varDict["Data Source"] - - if sourceOption == "N-SBN (Default)": - self._dataSource = "LoRes" - elif sourceOption == "Backup": - self._dataSource = "Manual" - elif sourceOption == "PETSS": - self._dataSource = "PETSS" - elif sourceOption == "Manual Replace": - self._dataSource = "LoRes" - elif sourceOption == "Manual Add": - self._dataSource = "LoRes" - - # Fetch the StormSurge edit area - ssEditArea = self.getEditArea("StormSurgeWW_EditArea") - ssea = self.encodeEditArea(ssEditArea) - - # Below you can configure different edit areas to specify different tide corrections - inundationThresh = varDict["Inundation Threshold"] - - tempAddReplace = varDict["Raw Guidance or Proposed SS Grid?"] - - # Make sure a storm was selected - stormName = varDict["StormName"] - if stormName == bogusStormName: - self.statusBarMsg("Please select a storm name.", "U") - return - - # Extract storm number for selected storm - for sDict in stormList: - if sDict["stormName"] == stormName: - stormNum = int(sDict["stormNumber"]) - lastModified = sDict["lastModified"] - - # Make sure that the storm info has been updated within the last 7 hours - if self._gmtime().unixTime() - lastModified > 7 * 3600: - self.statusBarMsg("StormInfo for " + stormName + " is old. " + \ - "Please update StormInfo first.", "U") - return - - # Ensure this is a national VTEC number - if stormNum < 1000: - stormNum = int(stormNum + 1000) - - confidenceStr = varDict["Indicate Your Situational Forecast Confidence"] - - # extract the percent value from this string - pctPos = confidenceStr.find("%") - pctStr = confidenceStr[pctPos - 2:pctPos] - - now = int(self._gmtime().unixTime() / 3600) * 3600 - timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - if sourceOption in ["N-SBN (Default)", "Backup", "PETSS"]: - - if stormNum is None: - self.abort("You must supply the storm!") - return - - # Now get the P-surge - surgePctGrid = self.getExceedanceHeight(self._dataSource, pctStr, "FHAG0") - - # Stop the tool if we didn't get the grid we wanted - if surgePctGrid is None: - self.statusBarMsg("No StormSurge guidance found for source " + self._dataSource + ".", "S") - return - - surgePctGrid = np.clip(surgePctGrid, 0.0, 100.0) - surgePctGrid[~ssea] = 0.0 - - # Get NAVD grids - surgePctGridNAVD, modelStart = self.getExceedanceHeight(self._dataSource, pctStr, "SFC") - surgePctGridNAVD = np.clip(surgePctGridNAVD, -80.0, 100.0) - surgePctGridNAVD[~ssea] = -80.0 - - # smooth grids - surgePctGrid = self.GM_smoothGrid(surgePctGrid, 3, (surgePctGrid > 0.0) & ssea) - surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, (surgePctGridNAVD > -10.0) & ssea) - - # Calculate and display surge guidance grids - msltonavd = self.getVDATUMSGrid("MSLtoNAVD88", -0.40) - navdtomllw = self.getVDATUMSGrid("NAVD88toMLLW", -2.20) - navdtomhhw = self. getVDATUMSGrid("NAVD88toMHHW", -3.40) - - validSurgeMask = surgePctGridNAVD > -80.0 - wTopoMask = validSurgeMask & (msltonavd > -80.0) - - surgePctGridMSL = self.newGrid(-80.0) - surgePctGridMSL[wTopoMask] = (surgePctGridNAVD - msltonavd)[wTopoMask] - mllwMask = validSurgeMask & (navdtomllw > -80.0) - - surgePctGridMLLW = self.newGrid(-80.0) - surgePctGridMLLW[mllwMask] = (surgePctGridNAVD + navdtomllw)[mllwMask] - mhhwMask = validSurgeMask & (navdtomhhw > -80.0) - - surgePctGridMHHW = self.newGrid(-80.0) - surgePctGridMHHW[mhhwMask] = (surgePctGridNAVD + navdtomhhw)[mhhwMask] - - surgePctGridMSL = np.clip(surgePctGridMSL, -30.0, 100.0) - surgePctGridMLLW = np.clip(surgePctGridMLLW, -30.0, 100.0) - surgePctGridNAVD = np.clip(surgePctGridNAVD, -30.0, 100.0) - surgePctGridMHHW = np.clip(surgePctGridMHHW, -30.0, 100.0) - - # Clip the MHHW grid at 0.0 - MHHWMask = surgePctGridMHHW <= 0.0 - surgePctGrid[MHHWMask] = 0.0 - - weList = ["InundationMax", "InundationTiming", "SurgeHtPlusTideMSL", "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"] - - self.deleteAllGrids(weList) - - now = int(self._gmtime().unixTime() / 3600) * 3600 - guidanceTR = self.GM_makeTimeRange(now, now + 48 * 3600) - - self.createGrid("Fcst", "SurgeHtPlusTideMSL", "SCALAR", - surgePctGridMSL, guidanceTR, precision=2) - - self.createGrid("Fcst", "SurgeHtPlusTideMLLW", "SCALAR", - surgePctGridMLLW, guidanceTR, precision=2) - - self.createGrid("Fcst", "SurgeHtPlusTideNAVD", "SCALAR", - surgePctGridNAVD, guidanceTR, precision=2) - - self.createGrid("Fcst", "SurgeHtPlusTideMHHW", "SCALAR", - surgePctGridMHHW, guidanceTR, precision=2) - - self.createGrid("Fcst", "SurgeHtPlusTideMLLW", "SCALAR", - surgePctGridMLLW, guidanceTR, precision=2) - - elif sourceOption in ["Manual Replace", "Manual Add"]: - # Figure out the total number of points - gridSize = self.getGridShape() - totalPoints = gridSize[0] * gridSize[1] - # Make sure the user selected a real edit area before continuing - selectedMask = self.encodeEditArea(editArea) # make the mask based on the selected edit area - if editArea is None or (not selectedMask.any()) or np.count_nonzero(selectedMask) == totalPoints: - self.statusBarMsg("Please select an edit area before running the Manual Replace or Manual Add option." , "S") - return - -# modelStart = self.getTPCSurgeProbModelTime("LoRes", pctStr, "SFC") - - inundationHeight = float(varDict["Inundation Height"]) - inunStartHour = float(varDict["Start Hour for Inundation Timing"]) - inunEndHour = float(varDict["End Hour for Inundation Timing"]) - - modifyMask = selectedMask & ssea - - if inunStartHour >= inunEndHour: - self.statusBarMsg("Please define the end hour after the start hour.", "S") - return - - surgePctGrid = self.empty(np.float32) - - # Fetch the old grids if we're adding - if sourceOption == "Manual Add": - imTRList = self.GM_getWEInventory("InundationMax", self.mutableID(), "SFC") - if len(imTRList) > 0: - imTR = imTRList[0] - surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) - - surgePctGrid = surgePctGrid * 0 # reset the surgePctGrid - surgePctGrid[modifyMask] = inundationHeight # poke in the new values - - # Make the timing grids - baseTime = self.baseGuidanceTime() - - self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) - - if sourceOption == "Manual Replace": # Make new grids and replace all IT grids - self.deleteAllGrids(["InundationTiming"]) - - trList, timingGrids = self.getTimingGrids() # fetch empty grids with times - - for i, tr in enumerate(trList): - start = tr.startTime().unixTime() - end = tr.endTime().unixTime() - - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - timingGrids[i][selectedMask] = inundationHeight # populate only where needed - timingGrids[i][~ssea] = 0.0 - - for i, tr in enumerate(trList): - timingGrids[i] = np.clip(timingGrids[i], 0.0, 100.0) - self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], tr) - - # Finally create the surge grid which will be saved as the InundationMax - itCube = np.array(timingGrids) - surgePctGrid = np.amax(itCube, axis = 0) - - self.makeInundationMaxGrid(timingGrids, trList) - - elif sourceOption == "Manual Add": # Just replace the selected grid points over the selected time - # Fetch the existing IT grids - itTRList = self.GM_getWEInventory("InundationTiming", self.mutableID(), "SFC") - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") - return - - timingGrids = [] - trList = [] - - # Fetch all the timing grids - for tr in itTRList: - grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) - timingGrids.append(grid) - trList.append(tr) - - # Now poke in the selected value in each grid we need to modify - for i in range(len(timingGrids)): - start = trList[i].startTime().unixTime() - end = trList[i].endTime().unixTime() - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - timingGrids[i][modifyMask] = inundationHeight # poke in the values and create the grids - - # Delete the grids before re-creating them - self.deleteAllGrids(["InundationTiming"]) - - # Create the InundationTiming grids - for i in range(len(timingGrids)): - self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) - - # Finally create the surge grid which will be saved as the InundationMax - itCube = np.array(timingGrids) - surgePctGrid = np.amax(itCube, axis = 0) - - self.makeInundationMaxGrid(timingGrids, itTRList) - - else: # Then this is UpdateInunMax - - self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) - - itTRList = self.GM_getWEInventory("InundationTiming", self.mutableID(), "SFC") - - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") - return - - timingGrids = [] - - # Fetch all the timing grids - for tr in itTRList: - grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) - grid[~ssea] = 0.0 - timingGrids.append(grid) - self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) - self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) - - # Finally create the surge grid which will be saved as the InundationMax - - surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) - - #return - - # Done with manual options - - # Get the hazard key based on the GUI - hazardType = varDict["Hazard"] - - # Make the key with the storm number - if hazardType == "Storm Surge Watch": - ssAddKey = "SS.A:" + str(stormNum) - # ssAddKey = "SS.W:" + str(stormNum) - elif hazardType == "Storm Surge Warning": - ssAddKey = "SS.W:" + str(stormNum) - # ssAddKey = "SS.A:" + str(stormNum) - else: - ssAddKey = "" - - # Calculate the new pSurge mask restricting to the storm surge - # edit area and the selected edit area - pSurgeMask = surgePctGrid > inundationThresh - pSurgeMask = pSurgeMask & ssea - pSurgeMask = pSurgeMask & editAreaMask - - ############################# Now make the Hazard grid and check for conflicts ######################## - - # Make an empty grid which will be populated - ssGrid = self.empty(np.int8) - ssKeys = ["", ssAddKey] - - ssIndex = self.getIndex(ssAddKey, ssKeys) - ssGrid[pSurgeMask] = ssIndex - - # If we're making the temp grid, just make it now with no Hazard merging and return - hazTRList = self.GM_getWEInventory("Hazards") - if tempAddReplace == "Raw Guidance": - for tr in hazTRList: - hazGrid = self.getGrids("Fcst", "Hazards", "SFC", tr) - - if self.anyHazardConflictsByPoint(hazGrid, (ssGrid, ssKeys), editAreaMask): - self.statusBarMsg("ETNs do not match Hazards grid in selected area for Raw Guidance.", "U") - return - - self.createGrid("Fcst", "tempProposedSS" , "DISCRETE", (ssGrid, ssKeys), timeRange) - self.makeDiffGrid() - return - - proposedSSTRList = self.GM_getWEInventory("ProposedSS") - - # Next we need to extract the existing SS Hazards from the Hazard grid - # and insert those hazards in the SS grid so we never lose them. - # So iterate over each Hazard grid and add SS values as we go - if len(proposedSSTRList) == 0: - hazSSGrid = self.empty(np.int8) - hazSSKeys = [""] - - # A ProposedSS grid already exists - start with the last one - else: - hazSSGrid, hazSSKeys = self.getGrids("Fcst", "ProposedSS", "SFC", proposedSSTRList[-1]) - - # Get the grids to check for conflicts - for tr in hazTRList: - hazGrid = self.getGrids("Fcst", "Hazards", "SFC", tr) - if self.anyHazardConflictsByPoint(hazGrid, (ssGrid, ssKeys), editAreaMask): - self.statusBarMsg("ETNs do not match Hazards grid in selected area. Please Revert your grids.", "U") - return - - print "No conflicts found...." - - # Merge any existing SS hazards into the ProposedSS grid - if len(hazTRList) > 0: - for hazTR in hazTRList: - hazGrid, hazKeys = self.getGrids("Fcst", "Hazards", "SFC", hazTR) - - # Merge the selected hazards, in this case SS.A and SS.W, - # into the existing ProposedSS grid - (hazSSGrid, hazSSKeys) = self.mergeCertainHazards( - (hazSSGrid, hazSSKeys), (hazGrid, hazKeys), hazTR, - ["SS.W", "SS.A"]) - - # Update these hazards where there was no hazard, using the pSurge grid - noneIndex = self.getIndex("", hazSSKeys) - ssIndex = self.getIndex(ssAddKey, hazSSKeys) - mask = pSurgeMask & (hazSSGrid == noneIndex) - hazSSGrid[mask] = ssIndex - - # Finally upgrade Watch areas to Warnings over the edit area, if necessary - if "SS.W" in ssAddKey: - - # Find Watch points over the edit area - print "Upgrading watches to warnings." - # Find the key containing "SS.A" that matches the ETN - etn = self.getETN(ssAddKey) - watchKey = "SS.A:" + etn - - ssWatchIndex = self.getIndex(watchKey, hazSSKeys) - ssWarningIndex = self.getIndex(ssAddKey, hazSSKeys) - mask = (hazSSGrid == ssWatchIndex) & pSurgeMask - hazSSGrid[mask] = ssWarningIndex - - # Now create the new storm surge hazard grid(s) - # Create a new time range -# start = modelStart.unixTime() - 80 * 3600 -# end = start + 78 * 3600 -# timeRange = TimeRange.TimeRange(AbsTime.AbsTime(start), -# AbsTime.AbsTime(end)) - -#*********************************************************************************** - - weNameList = ["ProposedSS", "InitialSS"] - now = int(self._gmtime().unixTime() / 3600) * 3600 - timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - # Make an InitialSS grid if it's the first time - for weName in weNameList: - self.trimTimeRange(weName, timeRange) - self.createGrid("Fcst", weName, "DISCRETE", - (hazSSGrid, hazSSKeys), timeRange) - - # Make the timing grids and the max grid from the model - if sourceOption in ["N-SBN (Default)", "Backup", "PETSS"]: - self.makeTimingGridsFromModel(self._dataSource, pctStr, "FHAG0", ssea, MHHWMask) - print "Creating new InundationMax grid..............................................." - self.createGrid("Fcst", "InundationMax", "SCALAR", surgePctGrid, - timeRange, precision=1) - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CreateProposedSS - Version 4.0 +# +# Author: Lefebvre/Belk/Hardin/Santos/Trogdon +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- -------------------------------------------- +# Jun 04, 2014 incorporate Inundation Grid and Incremental Grid +# Apr 12, 2016 cleanup code and refactor to use GridManipulation +# and TropicalUtility +# Jul 14, 2016 Fixed Smoothing, Added Manual Options, +# refactored code a little bit. +# Sep 19, 2016 19293 randerso Initial baseline check in +# +# Nov 13-18 2016 - Further tweaks made during SWIT testing +# Dec 15, 2016 - Added UpdateInunMax option +# Dec 21, 2016 - Deleted saveElements line in main execute +# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, +# improved UpdateInunMax option and made changes to makeInundationTiming methods to accommodate new TCs for +# the TPCSurgeProb and PETSS dbs. +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["None"] + +import sys, time, re, os + +import AbsTime +import ProcessVariableList +import TimeRange +import TropicalUtility +import numpy as np + + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + + def trimTimeRange(self, weName, timeRange): + + # Get a list of all grids we have for this parameter + trList = self.GM_getWEInventory(weName) + if len(trList) == 0: # nothing more to do + return + + # Fragment all grids to their smallest time range + self.splitCmd([weName], timeRange) + + # If there is more than 1 grid to deal with + if trList > 1: + + # Make a time range from the start of the first grid, to the start + # of the desired time range + tr = self.GM_makeTimeRange(trList[0].startTime().unixTime(), + timeRange.startTime().unixTime()) + + # Delete all grids in this interim time range + self.deleteCmd([weName], tr) + + # Get the parameter values from the last time range in the list + tr = trList[-1] + grid = self.getGrids("Fcst", weName, "SFC", tr) + + # Make a new grid with these values, using the desired time range + self.createGrid("Fcst", weName, "DISCRETE", grid, timeRange) + + + def getTPCSurgeProbModelTime(self, modelSource, pctStr, level): + + siteID = self.getSiteID() + if modelSource == "PETSS": + dbName = siteID + "_D2D_" + modelSource + "LoRes" + else: + dbName = siteID + "_D2D_TPCSurgeProb" + modelSource + + weName = "Surge" + pctStr + "Pct" + trList = self.GM_getWEInventory(weName, dbName, level) + + if len(trList) == 0: + msgStr = dbName + " " + weName + " " + level + " does not exist in the TPCSurge model. " + self.statusBarMsg(msgStr, "S") + return None + + modelStart = trList[0].startTime() + + return modelStart + + def getExceedanceHeight(self, modelSource, pctStr, level): + + siteID = self.getSiteID() + if modelSource == "PETSS": + dbName = siteID + "_D2D_" + modelSource + "LoRes" + else: + dbName = siteID + "_D2D_TPCSurgeProb" + modelSource + + weName = "Surge" + pctStr + "Pct" + trList = self.GM_getWEInventory(weName, dbName, level) + + print("getExceedanceHeight.....TRList for ", dbName, weName, pctStr, level) + + # Return None if no grids were found. This should post an error to the user. + if len(trList) == 0: + return None + + modelStart = self.getTPCSurgeProbModelTime(modelSource, pctStr, level) + + # Didn't find grid in the Fcst database so fetch it from model database + grid = self.getGrids(dbName, weName, level, trList[-1]) + + mask = grid <= -10.0 # invalid point - in meters + grid /= 0.3048 # convert meters to feet + grid[mask] = -80.0 # reset values less than -10.0 meters to -80.0 feet + + if level == "SFC": + return grid, modelStart + else: + return grid + + # Create InundationTiming grids from PHISH + def makeTimingGridsFromModel(self, modelSource, pctStr, level, ssea, MHHWMask): + + siteID = self.getSiteID() + if modelSource == "PETSS": + dbName = siteID + "_D2D_" + modelSource + "LoRes" + else: + dbName = siteID + "_D2D_TPCSurgeProb" + modelSource + + weName = "Surge" + pctStr + "Pctincr" + trList = self.GM_getWEInventory(weName, dbName, level) + + print("makeTimingGridsFromModel.....TRList for ", dbName, weName, pctStr, level) + + if len(trList) == 0: + self.statusBarMsg("No grids available for model:" + dbName, "S") + return + + # make timeRanges based on the current time not the time of the model + baseTime = int(trList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + endTime = int(trList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + if endTime < trList[-1].endTime().unixTime(): + endTime += 6 * 3600 + timingTRList = self.makeTimingTRs(baseTime, endTime) + + gridList = [] + for tr in trList: + phishGrid = self.getGrids(dbName, weName, level, tr) + if phishGrid is None: + self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") + continue + + phishMask = phishGrid > -25.0 + phishGrid[phishMask] /= 0.3048 # convert meters to feet + phishGrid[~phishMask] = -80.0 + + phishMask = (phishGrid > 0.0) & ssea + phishGrid[phishMask] = self.GM_smoothGrid(phishGrid, 3, phishMask)[phishMask] + + phishGrid = np.clip(phishGrid, 0.0, 100.0) + phishGrid[~ssea] = 0.0 + phishGrid[MHHWMask] = 0.0 + + gridList.append(phishGrid) + + + for i, grid in enumerate(gridList): + self.createGrid("Fcst", "InundationTiming", "SCALAR", grid, timingTRList[i], precision=1) + + + def makeInundationMaxGrid(self, timingGrids, trList): + + itCube = np.array(timingGrids) + maxGrid = np.amax(itCube, axis=0) + + now = int(self._gmtime().unixTime() / 3600) * 3600 + maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) + + return maxGrid + + # Fetch the VDATUM grid and mask all points below the specified value. + def getVDATUMSGrid(self, weName, threshold): + siteID = self.getSiteID() + dbName = siteID + "_D2D_VDATUMS" + + # First check to see if the grid has been stored as a temporary grid + # and return that. Purely for performance purposes + fcstTRList = self.GM_getWEInventory(weName) + if len(fcstTRList) > 0: + grid = self.getGrids("Fcst", weName, "SFC", fcstTRList[0]) + return grid + + trList = self.GM_getWEInventory(weName, dbName) + + if len(trList) == 0: + msgStr = weName + " does not exist in the VDATUMS model. " + self.statusBarMsg(msgStr, "S") + # TODO: Should we return here or throw an exception since there's no grid to operate on? + + # TODO: why are we looping over all the time ranges retrieving grid + # we're not using. We should just use the last tr in the list. + # There's probably only one anyway. + for tr in trList: + grid = self.getGrids(dbName, weName, "SFC", tr, mode="First") + + mask = grid <= threshold # points below threshold + + grid /= 0.3048 # convert meters to feet + grid[mask] = -80.0 # set points to min value below threshold + + return grid + + # returns the combined key. Enforces the rule that keys with the + # same phen returns the one key with the highest priority sig. + def combinedKey(self, subKeys, newKey): + if newKey is None: + return subKeys + + subKeyList = subKeys.split("^") + + # check for same keys + if newKey in subKeyList: + return subKeys + + defaultCombo = subKeys + "^" + newKey + + # check for non-VTEC key + if "." not in newKey: + return defaultCombo + + # more exceptions - these phens are above the law + exceptions = ["TO", "SV", "FF"] + sigList = ["W", "Y", "A"] + if self.keyPhen(newKey) in exceptions: + return defaultCombo + + for sk in subKeyList: + if self.keyPhen(sk) == self.keyPhen(newKey): + subSig = self.keySig(sk) + newSig = self.keySig(newKey) + if subSig == newSig: + return subKeys + + if subSig not in sigList or newSig not in sigList: + continue + + if sigList.index(subSig) > sigList.index(newSig): + subKeys = subKeys.replace(sk, newKey) + + return subKeys + + return defaultCombo + + # makes a new hazard given the oldKey and a new watch phen, + # sig and etn. + def makeNewKey(self, oldKey, phenSig): + # check for the dumb cases + if oldKey == "" or oldKey == phenSig: + return phenSig + + # split up the key, add the hazard, sort, and reassemble + parts = oldKey.split("^") + parts.append(phenSig) + parts.sort() # makes sure the same set of subKeys look the same + # assemble the new key + newKey = "" + for p in parts: + if newKey == "": + newKey = p + else: + newKey = self.combinedKey(newKey, p) + # just in case + if newKey == "": + newKey = "" + + return newKey + + + # Returns a list of unique keys for the specified grid and mask + def getUniqueKeys(self, byteGrid, keys, mask=None): + + uniqueKeys = [] + for keyIndex, key in enumerate(keys): + valueMask = byteGrid == keyIndex + valueMask &= mask + if valueMask.any(): + uniqueKeys.append(key) + + return uniqueKeys + + # adds the specified hazard to weName over the specified timeRange + # and spatially over the specified mask. Combines the specified + # hazard with the existing hazards by default. For replaceMode, + # specify 0 in the combineField + def combineHazards(self, targetGrid, addHaz, mask, combine=1): + + byteGrid, hazKey = targetGrid + uniqueKeys = self.getUniqueKeys(byteGrid, hazKey, mask) + for uKey in uniqueKeys: + if combine: + newKey = self.makeNewKey(uKey, addHaz) + else: # replace + newKey = addHaz + + oldIndex = self.getIndex(uKey, hazKey) + newIndex = self.getIndex(newKey, hazKey) + + # calculate the mask - intersection of mask and oldIndex values + editMask = (byteGrid == oldIndex) & mask + + # poke in the new values + byteGrid[editMask] = newIndex + + return (byteGrid, hazKey) + + def addHazard(self, targetGrid, addHaz, mask, combine=1): + + # Only interested in SS subKeys, so extract that part first + ssKey = "" + subKeys = self.getSubKeys(addHaz) + for subKey in subKeys: + if "SS" in subKey: + ssKey = subKey + + if ssKey == "": + print("SS subKey not found in key:", addHaz) + return + + targetByteGrid, targetKeys = self.combineHazards(targetGrid, + ssKey, mask, combine) + + return targetByteGrid, targetKeys + + def deleteAllGrids(self, weList): + + for weName in weList: + trList = self.GM_getWEInventory(weName) + if len(trList) == 0: + continue + start = trList[0].startTime().unixTime() + end = trList[-1].endTime().unixTime() + tr = self.GM_makeTimeRange(start, end) + + self.deleteCmd([weName], tr) + + return + + def makeDiffGrid(self): + # Get the ProposedSS grid + trList = self.GM_getWEInventory("ProposedSS") + if len(trList) != 1: + self.statusBarMsg("No ProposedSS grids found.", "S") + return + else: + proposedTR = trList[0] + + # Get the Raw guidance grid + trList = self.GM_getWEInventory("tempProposedSS") + if len(trList) != 1: + self.statusBarMsg("No tempProposedSS grids found.", "S") + return + else: + rawGuidTR = trList[0] + + proposedSSGrid = self.getGrids("Fcst", "ProposedSS", "SFC", proposedTR) + rawGuidSSGrid = self.getGrids("Fcst", "tempProposedSS", "SFC", rawGuidTR) + + self.calcDiffGrid(proposedSSGrid, rawGuidSSGrid, "PrevGuidDiffSS", + proposedTR) + + return + + def baseGuidanceTime(self): + startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) + return startTime + + # Make a list of timeRanges that will be used to make InundationTiming grids + def makeTimingTRs(self, baseTime, endTime): + # Make the inundation timing grids + trList = [] + start = baseTime + end = baseTime + 6 * 3600 + while end <= endTime: + tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) + trList.append(tr) + start = end + end += 6 * 3600 + + return trList + + def getTimingGrids(self): + + baseTime = self.baseGuidanceTime() + endTime = baseTime + 78 * 3600 + trList = self.makeTimingTRs(baseTime, endTime) + + gridList = [] + for tr in trList: + gridList.append(self.empty(np.float32)) + + return trList, gridList + + def execute(self, editArea): + + editAreaMask = self.encodeEditArea(editArea) + + # If we did not find an edit are mask + if editArea is None or (not editAreaMask.any()): + # Select the entire domain + editAreaMask = self.newGrid(True, np.bool) + + # Extract the info from all storms + stormList = self.extractStormInfo() + + stormNames = [] + + mutableID = self.mutableID() + + # make a list of the active storms to pass into the variableList + for sDict in stormList: + stormNames.append(sDict["stormName"]) + + variableList = [] + bogusStormName = "WXYZZYXW" + variableList.append(("Data Source", "N-SBN (Default)", "radio", + ["N-SBN (Default)", "Backup", "PETSS", "Manual Replace", "Manual Add", "UpdateInunMax" + ])) + variableList.append(("Indicate Your Situational Forecast Confidence", "Typical (Combined; 10% Exceedance)", + "radio", ["Typical (Combined; 10% Exceedance)", + "Medium (Combined; 20% Exceedance)", + "High (Combined; 30% Exceedance)", + "Higher (Combined; 40% Exceedance)", + "Highest (Combined; 50% Exceedance)"])) + variableList.append(("StormName", bogusStormName, "radio", stormNames)) + variableList.append(("Hazard", "Storm Surge Watch", "radio", + ["Storm Surge Watch", "Storm Surge Warning"])) + variableList.append(("Raw Guidance or Proposed SS Grid?", "ProposedSS", "radio", + ["Raw Guidance", "ProposedSS"])) + variableList.append(("Inundation Threshold" , 3, "scale", [1, 8], 1)) + variableList.append(("Manual Inundation settings:", "", "label")) + variableList.append(("Inundation Height" , 3, "scale", [1, 8], 1)) + variableList.append(("Start Hour for Inundation Timing", 0, "scale", [0.0, 72.0], 6.0)) + variableList.append(("End Hour for Inundation Timing", 6, "scale", [0.0, 78.0], 6.0)) + +# variableList.append(("Make Inunudation Timing Grids?", "Yes", "radio", ["Yes", "No"])) + + # Display the GUI and check for cancel + varDict = {} + processVarList = ProcessVariableList.ProcessVariableList("StormSurgeWW", variableList, varDict) + status = processVarList.status() + if status.upper() != "OK": + self.cancel() + + sourceOption = varDict["Data Source"] + + if sourceOption == "N-SBN (Default)": + self._dataSource = "LoRes" + elif sourceOption == "Backup": + self._dataSource = "Manual" + elif sourceOption == "PETSS": + self._dataSource = "PETSS" + elif sourceOption == "Manual Replace": + self._dataSource = "LoRes" + elif sourceOption == "Manual Add": + self._dataSource = "LoRes" + + # Fetch the StormSurge edit area + ssEditArea = self.getEditArea("StormSurgeWW_EditArea") + ssea = self.encodeEditArea(ssEditArea) + + # Below you can configure different edit areas to specify different tide corrections + inundationThresh = varDict["Inundation Threshold"] + + tempAddReplace = varDict["Raw Guidance or Proposed SS Grid?"] + + # Make sure a storm was selected + stormName = varDict["StormName"] + if stormName == bogusStormName: + self.statusBarMsg("Please select a storm name.", "U") + return + + # Extract storm number for selected storm + for sDict in stormList: + if sDict["stormName"] == stormName: + stormNum = int(sDict["stormNumber"]) + lastModified = sDict["lastModified"] + + # Make sure that the storm info has been updated within the last 7 hours + if self._gmtime().unixTime() - lastModified > 7 * 3600: + self.statusBarMsg("StormInfo for " + stormName + " is old. " + \ + "Please update StormInfo first.", "U") + return + + # Ensure this is a national VTEC number + if stormNum < 1000: + stormNum = int(stormNum + 1000) + + confidenceStr = varDict["Indicate Your Situational Forecast Confidence"] + + # extract the percent value from this string + pctPos = confidenceStr.find("%") + pctStr = confidenceStr[pctPos - 2:pctPos] + + now = int(self._gmtime().unixTime() / 3600) * 3600 + timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + if sourceOption in ["N-SBN (Default)", "Backup", "PETSS"]: + + if stormNum is None: + self.abort("You must supply the storm!") + return + + # Now get the P-surge + surgePctGrid = self.getExceedanceHeight(self._dataSource, pctStr, "FHAG0") + + # Stop the tool if we didn't get the grid we wanted + if surgePctGrid is None: + self.statusBarMsg("No StormSurge guidance found for source " + self._dataSource + ".", "S") + return + + surgePctGrid = np.clip(surgePctGrid, 0.0, 100.0) + surgePctGrid[~ssea] = 0.0 + + # Get NAVD grids + surgePctGridNAVD, modelStart = self.getExceedanceHeight(self._dataSource, pctStr, "SFC") + surgePctGridNAVD = np.clip(surgePctGridNAVD, -80.0, 100.0) + surgePctGridNAVD[~ssea] = -80.0 + + # smooth grids + surgePctGrid = self.GM_smoothGrid(surgePctGrid, 3, (surgePctGrid > 0.0) & ssea) + surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, (surgePctGridNAVD > -10.0) & ssea) + + # Calculate and display surge guidance grids + msltonavd = self.getVDATUMSGrid("MSLtoNAVD88", -0.40) + navdtomllw = self.getVDATUMSGrid("NAVD88toMLLW", -2.20) + navdtomhhw = self. getVDATUMSGrid("NAVD88toMHHW", -3.40) + + validSurgeMask = surgePctGridNAVD > -80.0 + wTopoMask = validSurgeMask & (msltonavd > -80.0) + + surgePctGridMSL = self.newGrid(-80.0) + surgePctGridMSL[wTopoMask] = (surgePctGridNAVD - msltonavd)[wTopoMask] + mllwMask = validSurgeMask & (navdtomllw > -80.0) + + surgePctGridMLLW = self.newGrid(-80.0) + surgePctGridMLLW[mllwMask] = (surgePctGridNAVD + navdtomllw)[mllwMask] + mhhwMask = validSurgeMask & (navdtomhhw > -80.0) + + surgePctGridMHHW = self.newGrid(-80.0) + surgePctGridMHHW[mhhwMask] = (surgePctGridNAVD + navdtomhhw)[mhhwMask] + + surgePctGridMSL = np.clip(surgePctGridMSL, -30.0, 100.0) + surgePctGridMLLW = np.clip(surgePctGridMLLW, -30.0, 100.0) + surgePctGridNAVD = np.clip(surgePctGridNAVD, -30.0, 100.0) + surgePctGridMHHW = np.clip(surgePctGridMHHW, -30.0, 100.0) + + # Clip the MHHW grid at 0.0 + MHHWMask = surgePctGridMHHW <= 0.0 + surgePctGrid[MHHWMask] = 0.0 + + weList = ["InundationMax", "InundationTiming", "SurgeHtPlusTideMSL", "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"] + + self.deleteAllGrids(weList) + + now = int(self._gmtime().unixTime() / 3600) * 3600 + guidanceTR = self.GM_makeTimeRange(now, now + 48 * 3600) + + self.createGrid("Fcst", "SurgeHtPlusTideMSL", "SCALAR", + surgePctGridMSL, guidanceTR, precision=2) + + self.createGrid("Fcst", "SurgeHtPlusTideMLLW", "SCALAR", + surgePctGridMLLW, guidanceTR, precision=2) + + self.createGrid("Fcst", "SurgeHtPlusTideNAVD", "SCALAR", + surgePctGridNAVD, guidanceTR, precision=2) + + self.createGrid("Fcst", "SurgeHtPlusTideMHHW", "SCALAR", + surgePctGridMHHW, guidanceTR, precision=2) + + self.createGrid("Fcst", "SurgeHtPlusTideMLLW", "SCALAR", + surgePctGridMLLW, guidanceTR, precision=2) + + elif sourceOption in ["Manual Replace", "Manual Add"]: + # Figure out the total number of points + gridSize = self.getGridShape() + totalPoints = gridSize[0] * gridSize[1] + # Make sure the user selected a real edit area before continuing + selectedMask = self.encodeEditArea(editArea) # make the mask based on the selected edit area + if editArea is None or (not selectedMask.any()) or np.count_nonzero(selectedMask) == totalPoints: + self.statusBarMsg("Please select an edit area before running the Manual Replace or Manual Add option." , "S") + return + +# modelStart = self.getTPCSurgeProbModelTime("LoRes", pctStr, "SFC") + + inundationHeight = float(varDict["Inundation Height"]) + inunStartHour = float(varDict["Start Hour for Inundation Timing"]) + inunEndHour = float(varDict["End Hour for Inundation Timing"]) + + modifyMask = selectedMask & ssea + + if inunStartHour >= inunEndHour: + self.statusBarMsg("Please define the end hour after the start hour.", "S") + return + + surgePctGrid = self.empty(np.float32) + + # Fetch the old grids if we're adding + if sourceOption == "Manual Add": + imTRList = self.GM_getWEInventory("InundationMax", self.mutableID(), "SFC") + if len(imTRList) > 0: + imTR = imTRList[0] + surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) + + surgePctGrid = surgePctGrid * 0 # reset the surgePctGrid + surgePctGrid[modifyMask] = inundationHeight # poke in the new values + + # Make the timing grids + baseTime = self.baseGuidanceTime() + + self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) + + if sourceOption == "Manual Replace": # Make new grids and replace all IT grids + self.deleteAllGrids(["InundationTiming"]) + + trList, timingGrids = self.getTimingGrids() # fetch empty grids with times + + for i, tr in enumerate(trList): + start = tr.startTime().unixTime() + end = tr.endTime().unixTime() + + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + timingGrids[i][selectedMask] = inundationHeight # populate only where needed + timingGrids[i][~ssea] = 0.0 + + for i, tr in enumerate(trList): + timingGrids[i] = np.clip(timingGrids[i], 0.0, 100.0) + self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], tr) + + # Finally create the surge grid which will be saved as the InundationMax + itCube = np.array(timingGrids) + surgePctGrid = np.amax(itCube, axis = 0) + + self.makeInundationMaxGrid(timingGrids, trList) + + elif sourceOption == "Manual Add": # Just replace the selected grid points over the selected time + # Fetch the existing IT grids + itTRList = self.GM_getWEInventory("InundationTiming", self.mutableID(), "SFC") + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") + return + + timingGrids = [] + trList = [] + + # Fetch all the timing grids + for tr in itTRList: + grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) + timingGrids.append(grid) + trList.append(tr) + + # Now poke in the selected value in each grid we need to modify + for i in range(len(timingGrids)): + start = trList[i].startTime().unixTime() + end = trList[i].endTime().unixTime() + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + timingGrids[i][modifyMask] = inundationHeight # poke in the values and create the grids + + # Delete the grids before re-creating them + self.deleteAllGrids(["InundationTiming"]) + + # Create the InundationTiming grids + for i in range(len(timingGrids)): + self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) + + # Finally create the surge grid which will be saved as the InundationMax + itCube = np.array(timingGrids) + surgePctGrid = np.amax(itCube, axis = 0) + + self.makeInundationMaxGrid(timingGrids, itTRList) + + else: # Then this is UpdateInunMax + + self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) + + itTRList = self.GM_getWEInventory("InundationTiming", self.mutableID(), "SFC") + + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") + return + + timingGrids = [] + + # Fetch all the timing grids + for tr in itTRList: + grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) + grid[~ssea] = 0.0 + timingGrids.append(grid) + self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) + self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) + + # Finally create the surge grid which will be saved as the InundationMax + + surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) + + #return + + # Done with manual options + + # Get the hazard key based on the GUI + hazardType = varDict["Hazard"] + + # Make the key with the storm number + if hazardType == "Storm Surge Watch": + ssAddKey = "SS.A:" + str(stormNum) + # ssAddKey = "SS.W:" + str(stormNum) + elif hazardType == "Storm Surge Warning": + ssAddKey = "SS.W:" + str(stormNum) + # ssAddKey = "SS.A:" + str(stormNum) + else: + ssAddKey = "" + + # Calculate the new pSurge mask restricting to the storm surge + # edit area and the selected edit area + pSurgeMask = surgePctGrid > inundationThresh + pSurgeMask = pSurgeMask & ssea + pSurgeMask = pSurgeMask & editAreaMask + + ############################# Now make the Hazard grid and check for conflicts ######################## + + # Make an empty grid which will be populated + ssGrid = self.empty(np.int8) + ssKeys = ["", ssAddKey] + + ssIndex = self.getIndex(ssAddKey, ssKeys) + ssGrid[pSurgeMask] = ssIndex + + # If we're making the temp grid, just make it now with no Hazard merging and return + hazTRList = self.GM_getWEInventory("Hazards") + if tempAddReplace == "Raw Guidance": + for tr in hazTRList: + hazGrid = self.getGrids("Fcst", "Hazards", "SFC", tr) + + if self.anyHazardConflictsByPoint(hazGrid, (ssGrid, ssKeys), editAreaMask): + self.statusBarMsg("ETNs do not match Hazards grid in selected area for Raw Guidance.", "U") + return + + self.createGrid("Fcst", "tempProposedSS" , "DISCRETE", (ssGrid, ssKeys), timeRange) + self.makeDiffGrid() + return + + proposedSSTRList = self.GM_getWEInventory("ProposedSS") + + # Next we need to extract the existing SS Hazards from the Hazard grid + # and insert those hazards in the SS grid so we never lose them. + # So iterate over each Hazard grid and add SS values as we go + if len(proposedSSTRList) == 0: + hazSSGrid = self.empty(np.int8) + hazSSKeys = [""] + + # A ProposedSS grid already exists - start with the last one + else: + hazSSGrid, hazSSKeys = self.getGrids("Fcst", "ProposedSS", "SFC", proposedSSTRList[-1]) + + # Get the grids to check for conflicts + for tr in hazTRList: + hazGrid = self.getGrids("Fcst", "Hazards", "SFC", tr) + if self.anyHazardConflictsByPoint(hazGrid, (ssGrid, ssKeys), editAreaMask): + self.statusBarMsg("ETNs do not match Hazards grid in selected area. Please Revert your grids.", "U") + return + + print("No conflicts found....") + + # Merge any existing SS hazards into the ProposedSS grid + if len(hazTRList) > 0: + for hazTR in hazTRList: + hazGrid, hazKeys = self.getGrids("Fcst", "Hazards", "SFC", hazTR) + + # Merge the selected hazards, in this case SS.A and SS.W, + # into the existing ProposedSS grid + (hazSSGrid, hazSSKeys) = self.mergeCertainHazards( + (hazSSGrid, hazSSKeys), (hazGrid, hazKeys), hazTR, + ["SS.W", "SS.A"]) + + # Update these hazards where there was no hazard, using the pSurge grid + noneIndex = self.getIndex("", hazSSKeys) + ssIndex = self.getIndex(ssAddKey, hazSSKeys) + mask = pSurgeMask & (hazSSGrid == noneIndex) + hazSSGrid[mask] = ssIndex + + # Finally upgrade Watch areas to Warnings over the edit area, if necessary + if "SS.W" in ssAddKey: + + # Find Watch points over the edit area + print("Upgrading watches to warnings.") + # Find the key containing "SS.A" that matches the ETN + etn = self.getETN(ssAddKey) + watchKey = "SS.A:" + etn + + ssWatchIndex = self.getIndex(watchKey, hazSSKeys) + ssWarningIndex = self.getIndex(ssAddKey, hazSSKeys) + mask = (hazSSGrid == ssWatchIndex) & pSurgeMask + hazSSGrid[mask] = ssWarningIndex + + # Now create the new storm surge hazard grid(s) + # Create a new time range +# start = modelStart.unixTime() - 80 * 3600 +# end = start + 78 * 3600 +# timeRange = TimeRange.TimeRange(AbsTime.AbsTime(start), +# AbsTime.AbsTime(end)) + +#*********************************************************************************** + + weNameList = ["ProposedSS", "InitialSS"] + now = int(self._gmtime().unixTime() / 3600) * 3600 + timeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + # Make an InitialSS grid if it's the first time + for weName in weNameList: + self.trimTimeRange(weName, timeRange) + self.createGrid("Fcst", weName, "DISCRETE", + (hazSSGrid, hazSSKeys), timeRange) + + # Make the timing grids and the max grid from the model + if sourceOption in ["N-SBN (Default)", "Backup", "PETSS"]: + self.makeTimingGridsFromModel(self._dataSource, pctStr, "FHAG0", ssea, MHHWMask) + print("Creating new InundationMax grid...............................................") + self.createGrid("Fcst", "InundationMax", "SCALAR", surgePctGrid, + timeRange, precision=1) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateTCVAreaDictionary.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateTCVAreaDictionary.py index 1df554dde0..944cf42007 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateTCVAreaDictionary.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/CreateTCVAreaDictionary.py @@ -1,265 +1,265 @@ - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CreateTCVAreaDictionary -# -# Author: -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] - -try: # See if this is the AWIPS I environment - import AFPS - AWIPS_ENVIRON = "AWIPS1" -except: # Must be the AWIPS II environment - AWIPS_ENVIRON = "AWIPS2" - - -import SmartScript -from LockingFile import File -from com.raytheon.uf.common.localization import PathManagerFactory -from com.raytheon.uf.common.localization import LocalizationContext -LocalizationType = LocalizationContext.LocalizationType -LocalizationLevel = LocalizationContext.LocalizationLevel -## For documentation on the available commands, -## see the SmartScript Utility, which can be viewed from -## the Edit Actions Dialog Utilities window - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - self._siteID = self.getSiteID() - - if AWIPS_ENVIRON == "AWIPS1": - import siteConfig - self._gfeHome = siteConfig.GFESUITE_HOME - self._gfeServer = siteConfig.GFESUITE_SERVER - self._gfePort = siteConfig.GFESUITE_PORT - - self._tcvAreaDictionaryContents = \ -""" -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TCV_AreaDictionary -# TCV_AreaDictionary file -# -# Author: GFE Installation Script -# ---------------------------------------------------------------------------- - -# Here is an example TCVAreaDictionary for just a single zone and with comments -# to talk about the structure of the dictionary. -# -# TCV_AreaDictionary = { -# # Zone -# 'FLZ173': { -# # A list of location names. -# 'locationsAffected': [ -# "Miami Beach", -# "Downtown Miami", -# ], -# -# # Potential impacts statements can be overriden here; anything not -# # overriden here will use the generic potential impacts statements -# 'potentialImpactsStatements': { -# # Section name: "Wind", "Storm Surge", "Flooding Rain" or "Tornado" -# "Wind": { -# # Threat level: "None", "Low", "Mod", "High" or "Extreme" -# "Extreme": [ -# # Each string will be on its own line -# "Widespread power outages with some areas experiencing long-term outages", -# "Many bridges and access routes connecting barrier islands impassable", -# "Structural category to sturdy buildings with some having complete wall and roof failures", -# "Complete destruction of mobile homes", -# "Numerous roads impassable from large debris", -# -# ], -# }, -# }, -# -# # Additional information that will be displayed at the end of the segment -# # The structure is a list containing strings and/or lists. Strings in the -# # same list will be idented the same amount. Introducing a list, idents the -# # text until it ends. For example: -# # -# # 'infoSection': [ -# # "This will be at tab level 0", -# # [ -# # "A new list was introduced so this is at tab level 1", -# # [ -# # "Yet another list so this is tab level 2", -# # "Still at tab level 2 here", -# # ], -# # "We are back at tab level 1 because we ended the list", -# # ], -# # "We ended the other list and are back at tab level 0 now", -# # ] -# 'infoSection': [ -# "LOCAL EVACUATION AND SHELTERING: MIAMI-DADE COUNTY EMERGENCY MANAGEMENT", -# [ -# "HTTP://WWW.MIAMIDADE.GOV/EMERGENCY/", -# ], -# "FAMILY EMERGENCY PLANS: FEDERAL EMERGENCY MANAGEMENT AGENCY", -# [ -# "HTTP://READY.GOV/", -# ], -# "LOCAL WEATHER CONDITIONS AND FORECASTS: NWS MIAMI FLORIDA", -# [ -# "HTTP://WWW.SRH.NOAA.GOV/MFL/", -# ], -# ], -# }, -# } - -TCV_AreaDictionary = { -""" - self._zoneSkeletonContents = { - 'locationsAffected' : [], - 'potentialImpactsStatements' : {}, - 'infoSection' : [], - } - - TCVAreaDictionary = {} - try: - if AWIPS_ENVIRON == "AWIPS1": - import TCVAreaDictionary - TCVAreaDictionary = TCVAreaDictionary.TCV_AreaDictionary - else: - filename = "gfe/userPython/textUtilities/regular/TCVAreaDictionary.py" - fileContents = self._getFileContents(LocalizationType.CAVE_STATIC, - LocalizationLevel.SITE, - self._siteID, - filename) - - exec(fileContents) - - TCVAreaDictionary = TCV_AreaDictionary - except Exception: - pass - - for zone in self._getZones(): - self._tcvAreaDictionaryContents += " '" + zone + "': {\n" - - # Don't clobber existing dictionary entries - if zone in TCVAreaDictionary: - # Add new entries - for key in self._zoneSkeletonContents: - if key not in TCVAreaDictionary[zone]: - TCVAreaDictionary[zone][key] = self._zoneSkeletonContents[key] - - # Remove entries that are no longer needed - existingKeys = TCVAreaDictionary[zone].keys() - for key in existingKeys: - if key not in self._zoneSkeletonContents: - TCVAreaDictionary[zone].pop(key) - - self._tcvAreaDictionaryContents += self._formatDictionary(TCVAreaDictionary[zone], - tabLevel = 2) - else: - self._tcvAreaDictionaryContents += self._formatDictionary(self._zoneSkeletonContents, - tabLevel = 2) - - self._tcvAreaDictionaryContents += " },\n\n" - - self._tcvAreaDictionaryContents += "}\n" - - with open("/tmp/TCVAreaDictionary.TextUtility", "w") as file: - file.write(self._tcvAreaDictionaryContents) - - self._installDictionary() - - def _installDictionary(self): - from subprocess import call - if AWIPS_ENVIRON == "AWIPS1": - call([self._gfeHome + "/bin/ifpServerText", - "-h", self._gfeServer, - "-p", self._gfePort, - "-s", - "-u", "SITE", - "-n", "TCVAreaDictionary", - "-f", "/tmp/TCVAreaDictionary.TextUtility", - "-c", "TextUtility"]) - else: - call(["/awips2/GFESuite/bin/ifpServerText", - "-o", self._siteID, - "-s", - "-u", "SITE", - "-n", "TCVAreaDictionary", - "-f", "/tmp/TCVAreaDictionary.TextUtility", - "-c", "TextUtility"]) - - def _getZones(self): - editAreasFilename = "gfe/combinations/EditAreas_PublicZones_" + \ - self._siteID + ".py" - zonesKey = "Zones_" + self._siteID - - editAreasFileContents = self._getFileContents(LocalizationType.CAVE_STATIC, - LocalizationLevel.CONFIGURED, - self._siteID, - editAreasFilename) - exec(editAreasFileContents) - - # EASourceMap comes from the EditAreas file - return EASourceMap[zonesKey] - - def _getFileContents(self, loctype, loclevel, locname, filename): - pathManager = PathManagerFactory.getPathManager() - context = pathManager.getContext(loctype, loclevel) - context.setContextName(locname) - localizationFile = pathManager.getLocalizationFile(context, filename) - with File(localizationFile.getFile(), filename, 'r') as pythonFile: - fileContents = pythonFile.read() - - return fileContents - - def _formatDictionary(self, dictionary, tabLevel, output=""): - TAB = " " * 4 - - for key in dictionary: - output += TAB*tabLevel + repr(key) + ": " - - value = dictionary[key] - if type(value) is dict: - output += "{\n" - output = self._formatDictionary(value, tabLevel+1, output) - output += TAB*tabLevel + "},\n" - elif type(value) is list: - output += "[\n" - output = self._formatList(value, tabLevel+1, output) - output += TAB*tabLevel + "],\n" - else: - output += repr(value) + ",\n" - - return output - - def _formatList(self, theList, tabLevel, output=""): - TAB = " " * 4 - - for value in theList: - if type(value) is dict: - output += TAB*tabLevel + "{\n" - output = self._formatDictionary(value, tabLevel+1, output) - output += TAB*tabLevel + "},\n" - elif type(value) is list: - output += TAB*tabLevel + "[\n" - output = self._formatList(value, tabLevel+1, output) - output += TAB*tabLevel + "],\n" - else: - output += TAB*tabLevel + repr(value) + ",\n" - - return output + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CreateTCVAreaDictionary +# +# Author: +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] + +try: # See if this is the AWIPS I environment + import AFPS + AWIPS_ENVIRON = "AWIPS1" +except: # Must be the AWIPS II environment + AWIPS_ENVIRON = "AWIPS2" + + +import SmartScript +from LockingFile import File +from com.raytheon.uf.common.localization import PathManagerFactory +from com.raytheon.uf.common.localization import LocalizationContext +LocalizationType = LocalizationContext.LocalizationType +LocalizationLevel = LocalizationContext.LocalizationLevel +## For documentation on the available commands, +## see the SmartScript Utility, which can be viewed from +## the Edit Actions Dialog Utilities window + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + self._siteID = self.getSiteID() + + if AWIPS_ENVIRON == "AWIPS1": + import siteConfig + self._gfeHome = siteConfig.GFESUITE_HOME + self._gfeServer = siteConfig.GFESUITE_SERVER + self._gfePort = siteConfig.GFESUITE_PORT + + self._tcvAreaDictionaryContents = \ +""" +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TCV_AreaDictionary +# TCV_AreaDictionary file +# +# Author: GFE Installation Script +# ---------------------------------------------------------------------------- + +# Here is an example TCVAreaDictionary for just a single zone and with comments +# to talk about the structure of the dictionary. +# +# TCV_AreaDictionary = { +# # Zone +# 'FLZ173': { +# # A list of location names. +# 'locationsAffected': [ +# "Miami Beach", +# "Downtown Miami", +# ], +# +# # Potential impacts statements can be overriden here; anything not +# # overriden here will use the generic potential impacts statements +# 'potentialImpactsStatements': { +# # Section name: "Wind", "Storm Surge", "Flooding Rain" or "Tornado" +# "Wind": { +# # Threat level: "None", "Low", "Mod", "High" or "Extreme" +# "Extreme": [ +# # Each string will be on its own line +# "Widespread power outages with some areas experiencing long-term outages", +# "Many bridges and access routes connecting barrier islands impassable", +# "Structural category to sturdy buildings with some having complete wall and roof failures", +# "Complete destruction of mobile homes", +# "Numerous roads impassable from large debris", +# +# ], +# }, +# }, +# +# # Additional information that will be displayed at the end of the segment +# # The structure is a list containing strings and/or lists. Strings in the +# # same list will be idented the same amount. Introducing a list, idents the +# # text until it ends. For example: +# # +# # 'infoSection': [ +# # "This will be at tab level 0", +# # [ +# # "A new list was introduced so this is at tab level 1", +# # [ +# # "Yet another list so this is tab level 2", +# # "Still at tab level 2 here", +# # ], +# # "We are back at tab level 1 because we ended the list", +# # ], +# # "We ended the other list and are back at tab level 0 now", +# # ] +# 'infoSection': [ +# "LOCAL EVACUATION AND SHELTERING: MIAMI-DADE COUNTY EMERGENCY MANAGEMENT", +# [ +# "HTTP://WWW.MIAMIDADE.GOV/EMERGENCY/", +# ], +# "FAMILY EMERGENCY PLANS: FEDERAL EMERGENCY MANAGEMENT AGENCY", +# [ +# "HTTP://READY.GOV/", +# ], +# "LOCAL WEATHER CONDITIONS AND FORECASTS: NWS MIAMI FLORIDA", +# [ +# "HTTP://WWW.SRH.NOAA.GOV/MFL/", +# ], +# ], +# }, +# } + +TCV_AreaDictionary = { +""" + self._zoneSkeletonContents = { + 'locationsAffected' : [], + 'potentialImpactsStatements' : {}, + 'infoSection' : [], + } + + TCVAreaDictionary = {} + try: + if AWIPS_ENVIRON == "AWIPS1": + import TCVAreaDictionary + TCVAreaDictionary = TCVAreaDictionary.TCV_AreaDictionary + else: + filename = "gfe/userPython/textUtilities/regular/TCVAreaDictionary.py" + fileContents = self._getFileContents(LocalizationType.CAVE_STATIC, + LocalizationLevel.SITE, + self._siteID, + filename) + + exec(fileContents) + + TCVAreaDictionary = TCV_AreaDictionary + except Exception: + pass + + for zone in self._getZones(): + self._tcvAreaDictionaryContents += " '" + zone + "': {\n" + + # Don't clobber existing dictionary entries + if zone in TCVAreaDictionary: + # Add new entries + for key in self._zoneSkeletonContents: + if key not in TCVAreaDictionary[zone]: + TCVAreaDictionary[zone][key] = self._zoneSkeletonContents[key] + + # Remove entries that are no longer needed + existingKeys = list(TCVAreaDictionary[zone].keys()) + for key in existingKeys: + if key not in self._zoneSkeletonContents: + TCVAreaDictionary[zone].pop(key) + + self._tcvAreaDictionaryContents += self._formatDictionary(TCVAreaDictionary[zone], + tabLevel = 2) + else: + self._tcvAreaDictionaryContents += self._formatDictionary(self._zoneSkeletonContents, + tabLevel = 2) + + self._tcvAreaDictionaryContents += " },\n\n" + + self._tcvAreaDictionaryContents += "}\n" + + with open("/tmp/TCVAreaDictionary.TextUtility", "w") as file: + file.write(self._tcvAreaDictionaryContents) + + self._installDictionary() + + def _installDictionary(self): + from subprocess import call + if AWIPS_ENVIRON == "AWIPS1": + call([self._gfeHome + "/bin/ifpServerText", + "-h", self._gfeServer, + "-p", self._gfePort, + "-s", + "-u", "SITE", + "-n", "TCVAreaDictionary", + "-f", "/tmp/TCVAreaDictionary.TextUtility", + "-c", "TextUtility"]) + else: + call(["/awips2/GFESuite/bin/ifpServerText", + "-o", self._siteID, + "-s", + "-u", "SITE", + "-n", "TCVAreaDictionary", + "-f", "/tmp/TCVAreaDictionary.TextUtility", + "-c", "TextUtility"]) + + def _getZones(self): + editAreasFilename = "gfe/combinations/EditAreas_PublicZones_" + \ + self._siteID + ".py" + zonesKey = "Zones_" + self._siteID + + editAreasFileContents = self._getFileContents(LocalizationType.CAVE_STATIC, + LocalizationLevel.CONFIGURED, + self._siteID, + editAreasFilename) + exec(editAreasFileContents) + + # EASourceMap comes from the EditAreas file + return EASourceMap[zonesKey] + + def _getFileContents(self, loctype, loclevel, locname, filename): + pathManager = PathManagerFactory.getPathManager() + context = pathManager.getContext(loctype, loclevel) + context.setContextName(locname) + localizationFile = pathManager.getLocalizationFile(context, filename) + with File(localizationFile.getFile(), filename, 'r') as pythonFile: + fileContents = pythonFile.read() + + return fileContents + + def _formatDictionary(self, dictionary, tabLevel, output=""): + TAB = " " * 4 + + for key in dictionary: + output += TAB*tabLevel + repr(key) + ": " + + value = dictionary[key] + if type(value) is dict: + output += "{\n" + output = self._formatDictionary(value, tabLevel+1, output) + output += TAB*tabLevel + "},\n" + elif type(value) is list: + output += "[\n" + output = self._formatList(value, tabLevel+1, output) + output += TAB*tabLevel + "],\n" + else: + output += repr(value) + ",\n" + + return output + + def _formatList(self, theList, tabLevel, output=""): + TAB = " " * 4 + + for value in theList: + if type(value) is dict: + output += TAB*tabLevel + "{\n" + output = self._formatDictionary(value, tabLevel+1, output) + output += TAB*tabLevel + "},\n" + elif type(value) is list: + output += TAB*tabLevel + "[\n" + output = self._formatList(value, tabLevel+1, output) + output += TAB*tabLevel + "],\n" + else: + output += TAB*tabLevel + repr(value) + ",\n" + + return output diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Create_RFD.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Create_RFD.py index f16655a16a..9d3d05675b 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Create_RFD.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Create_RFD.py @@ -1,107 +1,107 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Create_RFD.py -# -# Author: dtomalak -# Optimized by njensen -# ---------------------------------------------------------------------------- - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] -import LogStream, time -from math import * - -import time -import AbsTime -import SmartScript -## For documentation on the available commands, -## see the SmartScript Utility, which can be viewed from -## the Edit Actions Dialog Utilities window - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - ####CONFIGURABLE SECTION - #### - - ### added to use 14Z if morning issuance, 18Z if update - dur = 12 - startt = 12 - timelength = dur + startt - timeRange = self.createTimeRange(startt, timelength, "Zulu") - print "Delete timerange:", timeRange - self.deleteCmd(['RFDmax'], timeRange) - self.deleteCmd(['PERCENTGREEN'], timeRange) - self.deleteCmd(['RFD'], timeRange) - self.deleteCmd(['Curing'], timeRange) - self.deleteCmd(['GFDI'], timeRange) - - curTime = time.gmtime() - hour = curTime[3] - if hour < 15: - dur = 12 #HOW MANY HOURS OF DATA ARE NEEDED - startt = 12 #WHEN TO START (UTC) - else: - dur = 6 - startt = 18 - ####end of added section - - ####END CONFIGURATIONS!!!!!!!!!!!!!! - ############################################################ - timelength = dur + startt - timeRange = self.createTimeRange(startt,timelength ,"Zulu") -## deltimerange = self.createTimeRange(27,startt+24 ,"Zulu") -## RFDarea = self.getEditArea("Surrounding_Offices") -## self.callSmartTool("RFD_isc_to_fcst", "T", RFDarea, timeRange) -## self.callSmartTool("RFDisc_T", "T", None, timeRange) -## self.callSmartTool("RFDisc_Td", "Td", None, timeRange) -## self.callSmartTool("RFDisc_Wind", "Wind", None, timeRange) -## self.callSmartTool("RFDisc_Sky", "Sky", None, timeRange) -## self.callSmartTool("RFDisc_PoP", "PoP", None, timeRange) -## self.callSmartTool("RHTool", "RH", None, timeRange) - -############ &&&&&&&&&&&&&&&&&&& ###################### - - DB = self.findDatabase("ISC") - self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP', 'RH'], DB, timeRange) - self.createFromScratchCmd(['PERCENTGREEN'], timeRange) - self.callSmartTool("PERCENTGREEN", "PERCENTGREEN", None, timeRange) - self.createFromScratchCmd(['RFD'], timeRange, repeat=1, duration=1) - self.callSmartTool("CalculateRFD", "RFD", None, timeRange) - self.createFromScratchCmd(['Curing'], timeRange, repeat=1, duration=1) - self.callSmartTool("Curing_from_Green", "Curing", None, timeRange) - self.createFromScratchCmd(['GFDI'], timeRange, repeat=1, duration=1) - self.callSmartTool("Calc_GFDI", "GFDI", None, timeRange) -## self.callSmartTool("RFDmax", "RFDmax", None, timeRange) - DB = self.findDatabase("ISC") -## self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP'], DB, timeRange) - self.createFromScratchCmd(['RFDmax'], timeRange) - self.callSmartTool("RFDmax", "RFDmax", None, timeRange) - DB = self.findDatabase("Official") - self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP', 'RH'], DB, timeRange) -####Begin day 2 stuff for planning purposes - dur = 12 #HOW MANY HOURS OF DATA ARE NEEDED - startt = 12 #WHEN TO START (UTC) - timelength = dur + startt - timeRange = self.createTimeRange(startt+24,timelength+24,"Zulu") -## deltimerange = self.createTimeRange(startt+24, timelength+24,"Zulu") - DB = self.findDatabase("Official") - self.createFromScratchCmd(['PERCENTGREEN'], timeRange) - self.callSmartTool("PERCENTGREEN", "PERCENTGREEN", None, timeRange) - self.createFromScratchCmd(['RFD'], timeRange, repeat=1, duration=1) - self.callSmartTool("CalculateRFD", "RFD", None, timeRange) - self.createFromScratchCmd(['Curing'], timeRange, repeat=1, duration=1) - self.callSmartTool("Curing_from_Green", "Curing", None, timeRange) - self.createFromScratchCmd(['GFDI'], timeRange, repeat=1, duration=1) - self.callSmartTool("Calc_GFDI", "GFDI", None, timeRange) - self.createFromScratchCmd(['RFDmax'], timeRange) - self.callSmartTool("RFDmax", "RFDmax", None, timeRange) - - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Create_RFD.py +# +# Author: dtomalak +# Optimized by njensen +# ---------------------------------------------------------------------------- + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] +import LogStream, time +from math import * + +import time +import AbsTime +import SmartScript +## For documentation on the available commands, +## see the SmartScript Utility, which can be viewed from +## the Edit Actions Dialog Utilities window + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + ####CONFIGURABLE SECTION + #### + + ### added to use 14Z if morning issuance, 18Z if update + dur = 12 + startt = 12 + timelength = dur + startt + timeRange = self.createTimeRange(startt, timelength, "Zulu") + print("Delete timerange:", timeRange) + self.deleteCmd(['RFDmax'], timeRange) + self.deleteCmd(['PERCENTGREEN'], timeRange) + self.deleteCmd(['RFD'], timeRange) + self.deleteCmd(['Curing'], timeRange) + self.deleteCmd(['GFDI'], timeRange) + + curTime = time.gmtime() + hour = curTime[3] + if hour < 15: + dur = 12 #HOW MANY HOURS OF DATA ARE NEEDED + startt = 12 #WHEN TO START (UTC) + else: + dur = 6 + startt = 18 + ####end of added section + + ####END CONFIGURATIONS!!!!!!!!!!!!!! + ############################################################ + timelength = dur + startt + timeRange = self.createTimeRange(startt,timelength ,"Zulu") +## deltimerange = self.createTimeRange(27,startt+24 ,"Zulu") +## RFDarea = self.getEditArea("Surrounding_Offices") +## self.callSmartTool("RFD_isc_to_fcst", "T", RFDarea, timeRange) +## self.callSmartTool("RFDisc_T", "T", None, timeRange) +## self.callSmartTool("RFDisc_Td", "Td", None, timeRange) +## self.callSmartTool("RFDisc_Wind", "Wind", None, timeRange) +## self.callSmartTool("RFDisc_Sky", "Sky", None, timeRange) +## self.callSmartTool("RFDisc_PoP", "PoP", None, timeRange) +## self.callSmartTool("RHTool", "RH", None, timeRange) + +############ &&&&&&&&&&&&&&&&&&& ###################### + + DB = self.findDatabase("ISC") + self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP', 'RH'], DB, timeRange) + self.createFromScratchCmd(['PERCENTGREEN'], timeRange) + self.callSmartTool("PERCENTGREEN", "PERCENTGREEN", None, timeRange) + self.createFromScratchCmd(['RFD'], timeRange, repeat=1, duration=1) + self.callSmartTool("CalculateRFD", "RFD", None, timeRange) + self.createFromScratchCmd(['Curing'], timeRange, repeat=1, duration=1) + self.callSmartTool("Curing_from_Green", "Curing", None, timeRange) + self.createFromScratchCmd(['GFDI'], timeRange, repeat=1, duration=1) + self.callSmartTool("Calc_GFDI", "GFDI", None, timeRange) +## self.callSmartTool("RFDmax", "RFDmax", None, timeRange) + DB = self.findDatabase("ISC") +## self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP'], DB, timeRange) + self.createFromScratchCmd(['RFDmax'], timeRange) + self.callSmartTool("RFDmax", "RFDmax", None, timeRange) + DB = self.findDatabase("Official") + self.copyCmd(['T', 'Td', 'Wind', 'Sky', 'PoP', 'RH'], DB, timeRange) +####Begin day 2 stuff for planning purposes + dur = 12 #HOW MANY HOURS OF DATA ARE NEEDED + startt = 12 #WHEN TO START (UTC) + timelength = dur + startt + timeRange = self.createTimeRange(startt+24,timelength+24,"Zulu") +## deltimerange = self.createTimeRange(startt+24, timelength+24,"Zulu") + DB = self.findDatabase("Official") + self.createFromScratchCmd(['PERCENTGREEN'], timeRange) + self.callSmartTool("PERCENTGREEN", "PERCENTGREEN", None, timeRange) + self.createFromScratchCmd(['RFD'], timeRange, repeat=1, duration=1) + self.callSmartTool("CalculateRFD", "RFD", None, timeRange) + self.createFromScratchCmd(['Curing'], timeRange, repeat=1, duration=1) + self.callSmartTool("Curing_from_Green", "Curing", None, timeRange) + self.createFromScratchCmd(['GFDI'], timeRange, repeat=1, duration=1) + self.callSmartTool("Calc_GFDI", "GFDI", None, timeRange) + self.createFromScratchCmd(['RFDmax'], timeRange) + self.callSmartTool("RFDmax", "RFDmax", None, timeRange) + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/DiffFromClimo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/DiffFromClimo.py index d408707062..9731ab8a86 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/DiffFromClimo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/DiffFromClimo.py @@ -1,286 +1,286 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# DiffFromClimo -# -# This procedure calculates MinT or MaxT grids based on the NCDC or -# PRISM climatology grids stored in a netCDF file, subtracts the result -# from the current MinT or MaxT grid and creates the difference Grid. -# -# Author: lefebvre -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -VariableList = [("Weather Element:" , "MaxT", "radio", ["MaxT", "MinT"]), - ("Climo Source:" , "PRISM", "radio", ["PRISM", "NCDC"]), - ] -MenuItems = ["Populate"] - -from numpy import * -import SmartScript -import types, copy -import time -import AbsTime -import TimeRange - -MODEL = "Fcst" -LEVEL = "SFC" - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - # Main cubic spline method that accepts a list of grids and int time - # along with a list of times for which grids are to be calculated. - # This method returns the corresponding list of grids that matches the - # interpTimes list. - def _cubicSpline(self, grids, times, interpTimes): - - # STEP 1: Create coefficients for cubic spline curve - # zCoefs : List of cubic spline coefficient grids computed to fit the - # curve defined by grids and times - # n : length of grids - 1. - # Determine coefficients - if grids == []: - print "No grids sent to _cublicSpline. No grids returned" - return - gridShape = shape(grids[0]) - - timeGrids = [] - for t in times: - tGrid = zeros(gridShape) + t - timeGrids.append(tGrid) - - n = len(grids) - 1 - zCoefs = self._spline3_coef(n, timeGrids, grids) - - # Create interpolated grids using coefficients - # interpTimes : List of times for which we want interpolated grids - # gridList : List of interpolated Grids - - # Create interpolated grids - gridList = [] - for interpTime in interpTimes: - x = zeros(gridShape) + interpTime # make a grid of times - xGrid = self._spline3_eval(n, timeGrids, grids, zCoefs, x) - gridList.append(xGrid) - - return gridList - - # This method calculates the spline coefficients that are later used to - # calculate grids at the interpolation times. This method is just a helper - # method to _cubicSpline and should not be called directly. - def _spline3_coef(self, n, t, y): - gridShape = y[0].shape - # These will get filled in later with grids as values - # They are just place holders - h=[0] * n - b=[0] * n - u=[0] * n - v=[0] * n - z=[0] * (n+1) - # Calculate h and b - # range 0 thru n-1 - for i in xrange(n): - h[i] = t[i+1] - t[i] - b[i] = (y[i+1] - y[i])/h[i] - # Calculate u and v as functions of h and b - # range 1 thru n-1 - u[1] = (2*(h[0] + h[1])) - v[1] = (6*(b[1]-b[0])) - for i in xrange(2, n): - u[i] = (2.0*(h[i]+h[i-1]) - h[i-1].astype(float32)**2.0/u[i-1]) - v[i] = (6.0*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) - # Calculate z - # range 0 thru n - z[n] = zeros(gridShape) - for i in xrange(n-1, 0, -1): - if type(u[i]) is types.IntType: - print "u[i] is IntType!", i - z[i] = (v[i] - h[i]*z[i+1])/u[i] - z[0] = zeros(gridShape) - return z - - # This method accepts the spline coefficients and calculates a grid. - # This method is a help method to _cubicSpline and should not be - # called directly - def _spline3_eval(self, n, t, y, z, x): - for i in xrange(n-1, 0, -1): - if x[0][0]-t[i][0][0] >= 0: - break - h = t[i+1]-t[i] - tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) - tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp - - return y[i] + (x-t[i]) * tmp - - - ## - # Get the list of time ranges at the grid whose element name is WEName - # contains grids. The level of the weather element is assumed to be LEVEL. - # - # @param dbName: Name of the database to consult - # @type dbName: string - # @param WEName: Name of a weather element - # @type WEName: string - # @param timeRange: Limits of time range of interest, or None for all times - # @type timeRange: Java TimeRange or Nonetype - # @return: time ranges at which WEName has data. - # @rtype: Python list of Python TimeRange objects - def _getWEInventory(self, dbName, WEName, timeRange=None): - # set up a timeRange if it is None - if timeRange is None: - timeRange = TimeRange.allTimes() - parm = self.getParm(dbName, WEName, LEVEL) - if parm is None: - print "PFC: dbName =", dbName - print "PFC: WEName =", WEName - print "PFC: parm is None" - return [] - inv = parm.getGridInventory(timeRange.toJavaObj()) - if inv is None: self.statusBarMsg("inv is None","S") - elif len(inv)==0: print self.statusBarMsg("PFC: len(inv)==0","S") - trList = [] - for gd in inv: - tr = TimeRange.TimeRange(gd.getGridTime()) - trList.append(tr) - return trList - - def gridRound(self,grid,mode="Nearest",digits=0): - if mode not in ("RoundUp","RoundDown","Nearest"): - raise TypeError("mode is invalid:" + mode) - if mode=="Nearest": - rgrid=around(grid,digits) - else: - digitvalue=10.0**(-int(digits)) - sign=abs(grid)/grid - rgrid=((grid/digitvalue).astype(Int)*digitvalue) - - if mode=="RoundUp": - rgrid+=(sign*digitvalue) - - return rgrid - - # This main method retrieves the climatology grids, assigns - # appropriate times to each and calls the _cubicSpline method - # to calculate the grid values inbetween the given climatology - # grids. This methods creates grids of MinT or MaxT over the - # timeRange selected in the GridManager. - def execute(self, timeRange, varDict): - - # get the climo source - parmName= varDict["Weather Element:"] - climoSource = varDict["Climo Source:"] - - # get times for all the grids that overlap the selected time range - if timeRange is None: - gridTimes = [] - else: - startTime = timeRange.startTime() - hours = timeRange.duration() / 3600 - someTimeRange, gridTimes = self.getGridTimes(MODEL, parmName, LEVEL, - timeRange.startTime(), hours) - if len(gridTimes) == 0: - self.statusBarMsg("Please select a MinT or MaxT timeRange before running the tool.", "S") - return # can't go on - - # make a list of AbsTimes from the parmName times - interpTimes = [] - baseTime = gridTimes[0].startTime() - for g in gridTimes: - interpTimes.append(g.startTime().unixTime()) - - siteID = self.getSiteID() - # get all of the grids from the climo database - dbName = siteID + "_D2D_" + climoSource + "Climo" - - if parmName == "MaxT": - weName = "mxt" - elif parmName == "MinT": - weName = "mnt" - else: - print "Invalid parmName:", parmName - return - - # get the climo grid inventory - trList = self._getWEInventory(dbName, weName) - if len(trList) == 0: - self.statusBarMsg("No climatology grids available for " + parmName, "S") - return # can't go on - - # Figure out what year it is - - currentTime = AbsTime.current().unixTime() - jan01Tuple = (time.gmtime(currentTime)[0],1,1,0,0,0,0,0,0) # 01 Jan this year - jan01Secs = time.mktime(jan01Tuple) # 01 Jan in seconds - - # Fetch the grids from the climo database, but warp the times - # so that they are set to this year. - gridList = [] - times = [] - for tr in trList: - grid = self.getGrids(dbName, weName, LEVEL, tr) - gridList.append(grid) - times.append(tr.startTime().unixTime() + jan01Secs) - - # tack on the Dec. at the beginning and the Jan at the end so - # calculations from Dec 15 to Jan 15 are correct. - gridList.insert(0, gridList[-1]) # prepend the last grid - gridList.append(gridList[1]) # append what was the first grid - - days31 = 31 * 24 * 3600 # the number of seconds in 31 days - times.insert(0, times[0] - days31) # 15 Dec the previous year - times.append(times[-1] + days31) # 15 Jan the next year - - interpGrids = self._cubicSpline(gridList, times, interpTimes) - - parm = self.getParm(MODEL, parmName, LEVEL) - parmInfo = parm.getGridInfo() - maxLimit = parmInfo.getMaxValue() - minLimit = parmInfo.getMinValue() - precision = parmInfo.getPrecision() - - # get the Fcst grid inventory - fcstInv = self._getWEInventory(MODEL, parmName) - - for i in xrange(len(gridTimes)): - # convert K to F first - climoGrid = self.KtoF(interpGrids[i]) - climoGrid = clip(climoGrid, minLimit, maxLimit) # clip to min/max limits - # round the grid to the precision of the parm - climoGrid = self.gridRound(climoGrid, "Nearest", precision) - - - if gridTimes[i] in fcstInv: - fcstGrid = self.getGrids(MODEL, parmName, LEVEL, gridTimes[i]) - fcstGrid = self.gridRound(fcstGrid, "Nearest", precision) - - diffGrid = fcstGrid - climoGrid - gridName = parmName + "DiffFromClimo" - self.createGrid(MODEL, gridName, "SCALAR", diffGrid, gridTimes[i], - minAllowedValue=-200.0, maxAllowedValue=200.0) - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# DiffFromClimo +# +# This procedure calculates MinT or MaxT grids based on the NCDC or +# PRISM climatology grids stored in a netCDF file, subtracts the result +# from the current MinT or MaxT grid and creates the difference Grid. +# +# Author: lefebvre +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +VariableList = [("Weather Element:" , "MaxT", "radio", ["MaxT", "MinT"]), + ("Climo Source:" , "PRISM", "radio", ["PRISM", "NCDC"]), + ] +MenuItems = ["Populate"] + +from numpy import * +import SmartScript +import types, copy +import time +import AbsTime +import TimeRange + +MODEL = "Fcst" +LEVEL = "SFC" + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + # Main cubic spline method that accepts a list of grids and int time + # along with a list of times for which grids are to be calculated. + # This method returns the corresponding list of grids that matches the + # interpTimes list. + def _cubicSpline(self, grids, times, interpTimes): + + # STEP 1: Create coefficients for cubic spline curve + # zCoefs : List of cubic spline coefficient grids computed to fit the + # curve defined by grids and times + # n : length of grids - 1. + # Determine coefficients + if grids == []: + print("No grids sent to _cublicSpline. No grids returned") + return + gridShape = shape(grids[0]) + + timeGrids = [] + for t in times: + tGrid = zeros(gridShape) + t + timeGrids.append(tGrid) + + n = len(grids) - 1 + zCoefs = self._spline3_coef(n, timeGrids, grids) + + # Create interpolated grids using coefficients + # interpTimes : List of times for which we want interpolated grids + # gridList : List of interpolated Grids + + # Create interpolated grids + gridList = [] + for interpTime in interpTimes: + x = zeros(gridShape) + interpTime # make a grid of times + xGrid = self._spline3_eval(n, timeGrids, grids, zCoefs, x) + gridList.append(xGrid) + + return gridList + + # This method calculates the spline coefficients that are later used to + # calculate grids at the interpolation times. This method is just a helper + # method to _cubicSpline and should not be called directly. + def _spline3_coef(self, n, t, y): + gridShape = y[0].shape + # These will get filled in later with grids as values + # They are just place holders + h=[0] * n + b=[0] * n + u=[0] * n + v=[0] * n + z=[0] * (n+1) + # Calculate h and b + # range 0 thru n-1 + for i in range(n): + h[i] = t[i+1] - t[i] + b[i] = (y[i+1] - y[i])/h[i] + # Calculate u and v as functions of h and b + # range 1 thru n-1 + u[1] = (2*(h[0] + h[1])) + v[1] = (6*(b[1]-b[0])) + for i in range(2, n): + u[i] = (2.0*(h[i]+h[i-1]) - h[i-1].astype(float32)**2.0/u[i-1]) + v[i] = (6.0*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) + # Calculate z + # range 0 thru n + z[n] = zeros(gridShape) + for i in range(n-1, 0, -1): + if type(u[i]) is int: + print("u[i] is IntType!", i) + z[i] = (v[i] - h[i]*z[i+1])/u[i] + z[0] = zeros(gridShape) + return z + + # This method accepts the spline coefficients and calculates a grid. + # This method is a help method to _cubicSpline and should not be + # called directly + def _spline3_eval(self, n, t, y, z, x): + for i in range(n-1, 0, -1): + if x[0][0]-t[i][0][0] >= 0: + break + h = t[i+1]-t[i] + tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) + tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp + + return y[i] + (x-t[i]) * tmp + + + ## + # Get the list of time ranges at the grid whose element name is WEName + # contains grids. The level of the weather element is assumed to be LEVEL. + # + # @param dbName: Name of the database to consult + # @type dbName: string + # @param WEName: Name of a weather element + # @type WEName: string + # @param timeRange: Limits of time range of interest, or None for all times + # @type timeRange: Java TimeRange or Nonetype + # @return: time ranges at which WEName has data. + # @rtype: Python list of Python TimeRange objects + def _getWEInventory(self, dbName, WEName, timeRange=None): + # set up a timeRange if it is None + if timeRange is None: + timeRange = TimeRange.allTimes() + parm = self.getParm(dbName, WEName, LEVEL) + if parm is None: + print("PFC: dbName =", dbName) + print("PFC: WEName =", WEName) + print("PFC: parm is None") + return [] + inv = parm.getGridInventory(timeRange.toJavaObj()) + if inv is None: self.statusBarMsg("inv is None","S") + elif len(inv)==0: print(self.statusBarMsg("PFC: len(inv)==0","S")) + trList = [] + for gd in inv: + tr = TimeRange.TimeRange(gd.getGridTime()) + trList.append(tr) + return trList + + def gridRound(self,grid,mode="Nearest",digits=0): + if mode not in ("RoundUp","RoundDown","Nearest"): + raise TypeError("mode is invalid:" + mode) + if mode=="Nearest": + rgrid=around(grid,digits) + else: + digitvalue=10.0**(-int(digits)) + sign=abs(grid)/grid + rgrid=((grid/digitvalue).astype(Int)*digitvalue) + + if mode=="RoundUp": + rgrid+=(sign*digitvalue) + + return rgrid + + # This main method retrieves the climatology grids, assigns + # appropriate times to each and calls the _cubicSpline method + # to calculate the grid values inbetween the given climatology + # grids. This methods creates grids of MinT or MaxT over the + # timeRange selected in the GridManager. + def execute(self, timeRange, varDict): + + # get the climo source + parmName= varDict["Weather Element:"] + climoSource = varDict["Climo Source:"] + + # get times for all the grids that overlap the selected time range + if timeRange is None: + gridTimes = [] + else: + startTime = timeRange.startTime() + hours = timeRange.duration() / 3600 + someTimeRange, gridTimes = self.getGridTimes(MODEL, parmName, LEVEL, + timeRange.startTime(), hours) + if len(gridTimes) == 0: + self.statusBarMsg("Please select a MinT or MaxT timeRange before running the tool.", "S") + return # can't go on + + # make a list of AbsTimes from the parmName times + interpTimes = [] + baseTime = gridTimes[0].startTime() + for g in gridTimes: + interpTimes.append(g.startTime().unixTime()) + + siteID = self.getSiteID() + # get all of the grids from the climo database + dbName = siteID + "_D2D_" + climoSource + "Climo" + + if parmName == "MaxT": + weName = "mxt" + elif parmName == "MinT": + weName = "mnt" + else: + print("Invalid parmName:", parmName) + return + + # get the climo grid inventory + trList = self._getWEInventory(dbName, weName) + if len(trList) == 0: + self.statusBarMsg("No climatology grids available for " + parmName, "S") + return # can't go on + + # Figure out what year it is + + currentTime = AbsTime.current().unixTime() + jan01Tuple = (time.gmtime(currentTime)[0],1,1,0,0,0,0,0,0) # 01 Jan this year + jan01Secs = time.mktime(jan01Tuple) # 01 Jan in seconds + + # Fetch the grids from the climo database, but warp the times + # so that they are set to this year. + gridList = [] + times = [] + for tr in trList: + grid = self.getGrids(dbName, weName, LEVEL, tr) + gridList.append(grid) + times.append(tr.startTime().unixTime() + jan01Secs) + + # tack on the Dec. at the beginning and the Jan at the end so + # calculations from Dec 15 to Jan 15 are correct. + gridList.insert(0, gridList[-1]) # prepend the last grid + gridList.append(gridList[1]) # append what was the first grid + + days31 = 31 * 24 * 3600 # the number of seconds in 31 days + times.insert(0, times[0] - days31) # 15 Dec the previous year + times.append(times[-1] + days31) # 15 Jan the next year + + interpGrids = self._cubicSpline(gridList, times, interpTimes) + + parm = self.getParm(MODEL, parmName, LEVEL) + parmInfo = parm.getGridInfo() + maxLimit = parmInfo.getMaxValue() + minLimit = parmInfo.getMinValue() + precision = parmInfo.getPrecision() + + # get the Fcst grid inventory + fcstInv = self._getWEInventory(MODEL, parmName) + + for i in range(len(gridTimes)): + # convert K to F first + climoGrid = self.KtoF(interpGrids[i]) + climoGrid = clip(climoGrid, minLimit, maxLimit) # clip to min/max limits + # round the grid to the precision of the parm + climoGrid = self.gridRound(climoGrid, "Nearest", precision) + + + if gridTimes[i] in fcstInv: + fcstGrid = self.getGrids(MODEL, parmName, LEVEL, gridTimes[i]) + fcstGrid = self.gridRound(fcstGrid, "Nearest", precision) + + diffGrid = fcstGrid - climoGrid + gridName = parmName + "DiffFromClimo" + self.createGrid(MODEL, gridName, "SCALAR", diffGrid, gridTimes[i], + minAllowedValue=-200.0, maxAllowedValue=200.0) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ERQCcheck.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ERQCcheck.py index a4124c0ecf..446ced5f95 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ERQCcheck.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ERQCcheck.py @@ -1,603 +1,603 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ERQCcheck -# -# Authors: Matthew H. Belk WFO Taunton, MA Created: 04/25/03 -# Thomas R. Mazza WFO Charleston, WV Last Modified: 3/29/06 -# Some of the modules used by this procedure were edited from modules -# originally written by Bob Stauber, Steve Nelson, Jim Hayes, Paul -# Jendrowski and Tom LeFebvre. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import time -import AbsTime - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Consistency"] -import LogStream, time - -# The ToolList is optional, but recommended, if you are calling -# Smart Tools from your Script. -# If present, it can be used to show which grids will be -# modified by the Script. - -ToolList = [] - -################################################################################# -# -# -# Control the weather element groups available: -# -availableElementGroups = ["Public", "Fire Weather"] -#availableElementGroups = ["Public", "Fire Weather", "Marine"] -# -# Marine can be eliminated for inland sites. - - -VariableList = [] - ################################################################# - # - # get time range to check - -VariableList.append(("Use Selected Time Range from the Grid Manager ?", "Y", "radio", - ["N", "Y"])) -VariableList.append(("If not, Pick period to start with here:", "Today", "radio", - ["Today (00Z Cycle Only)", "Tonight", "Tomorrow", "Tomorrow Night", "Next Day", "Next Day Night", - "Day3 day", "Day3 night", "Day4", "Day5", "Day6", "Day7"])) -VariableList.append(("...and to end with here:", "Day7", "radio", - ["Today (00Z Cycle Only)", "Tonight", "Tomorrow", "Tomorrow Night", "Next Day", "Next Day Night", - "Day3 day", "Day3 night", "Day4", "Day5", "Day6", "Day7"])) -VariableList.append(("00Z or 12Z cycle (don't use Today if 12Z) ?", "Auto", "radio", - ["Auto", "00Z cycle", "12Z cycle"])) -VariableList.append(("", "", "label")) - - ################################################################# - # - # get elements to check - -VariableList.append(("All (Overrides other choices if not No)", "No", "radio", - ["No", "Highlight only", "Fix All"])) -VariableList.append(("NDFD Grid Check (Checks all elements all 7 days)", "No", "radio", - ["No", "Yes"])) -VariableList.append(("Which element group(s)?" , - ["Public"], "check", - availableElementGroups, - )) -VariableList.append(("For each element, choose No not to check, Highlight only to highlight inconsistencies, \ -and Fix to actually fix inconsistencies.", "", "label")) -##VariableList.append(("Checks for Temperatures and Wind Gusts (Checks all 7 days)..." , "", "label")) -VariableList.append(("Checks for Temperatures and Wind Gusts (Checks all 7 days), and for Sky, PoP, Wx, QPF and SnowAmt :", "", "label")) -VariableList.append(("Temperatures", "No", "radio", - ["No", "Highlight only", "Fix"])) -VariableList.append(("Wind Gusts", "No", "radio", - ["No", "Highlight only", "Fix"])) -##VariableList.append(("Zero out wind gusts not in excess of sustained wind by more than:" , "5", "numeric")) -##VariableList.append(("Minimum wind gust to report:" , "15", "numeric")) -##VariableList.append(("Checks for Sky, PoP, Wx, QPF and SnowAmt...", "", "label")) -VariableList.append(("CheckSkyWithPoP", "No", "radio", - ["No", "Highlight only", "Fix"])) -VariableList.append(("Sky vs PoP Relationship:", "add", "radio", - ["add", "multiply", "Sky Limit"])) -##VariableList.append(("CheckPoPwithSky", "No", "radio", -## ["No", "Highlight only", "Fix"])) -##VariableList.append(("CheckPoPwithWx", "No", "radio", -## ["No", "Highlight only", "Fix"])) -##VariableList.append(("CheckWxWithPoP", "No", "radio", -## ["No", "Highlight only", "Fix"])) -##VariableList.append(("NoPoPNoQPF", "No", "radio", -## ["No", "Highlight only", "Fix"])) -##VariableList.append(("NoPoPNoSnowAmt", "No", "radio", -## ["No", "Highlight only", "Fix"])) -##VariableList.append(("Run PPI", "Yes", "radio", ## For offices doing Precipitation Probability Index images for the web -## ["No", "Yes"])) ## (Also uncomment the two PPI sections near the bottom) -VariableList.append(("For wind gusts :", "", "label")) -VariableList.append(("Limit wind gusts in excess of sustained wind by:", "12", "numeric")) -VariableList.append(("Make wind gusts in excess of sustained wind by factor of at least:", "1.0", "numeric")) -VariableList.append(("For Sky and PoP :", "", "label")) -VariableList.append(("For add, multiply (smaller factor), by how much ?" , "20", "numeric")) -VariableList.append(("For Sky Limit, only Sky less than Limit affected; it is raised to the Limit:", "", "label")) -VariableList.append(("Enter Sky Limit: the minimum Sky cover needed to support Wx:" , 60, "numeric")) -##VariableList.append(("Enter minimum PoP for measurable precip:", 15, "numeric")) -VariableList.append(("Enter Sky cover for 5% PoP:" , 30, "numeric")) -VariableList.append(('For checks between QPF, SnowAmt, PoP and Wx, if "Cleanup" is selected, then\nonly cleanup actions will run. No checks will be made, regardless of the above settings.', '', 'label')) -VariableList.append(('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup'])) -VariableList.append(('Run SnowAmt/QPF Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run SnowAmt/Wx Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run QPF/PoP Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run QPF/Wx Check?', ['Yes'], 'check', ['Yes'])) - -# Procedures and Tools used in QCcheck with credits -# -# In this table, Procedures and Tools used by a Procedure -# are indented once (4 spaces), Procedures and Tools used -# by a Procedure used by a Procedure indented again, etc. -# -# Tools with two pound signs (##) at the beginning of the -# are baseline tools used by ERQCcheck, and are listed -# here for reference only. -# -# Procedure or Tool Procedure Tool (Wx Credit -# Element -# Edited) -# -# ERQCcheck Procedure Nelson/Mazza -# NDFDgridCheck Procedure Hayes, James -# CheckTemepratues Procedure LeFebvre, Tom -## RHTool RH -## WindChillTool WindChill -## HeatIndexTool HeatIndex -# CheckWindGust Procedure LeFebvre/Mazza -# CheckSkyWithPoP Sky Nelson, Steve -# CheckPoPwithWx PoP Nelson -# CheckWx Wx Nelson -# CheckQPF QPF -# CheckSnowAmt SnowAmt -# EnufCloudForPoP Sky Mazza -# ForcePoPtoWx PoP -# PoP12hrFmMaxPoP PoP12hr Mazza -# NoPoPNoQPF QPF Mazza -# NoPoPNoSnowAmt SnowAmt Mazza -# -# The following tools and procedures are no longer used by -# ERQCcheck since Tom LeFebvre's CheckTandTd. This reduces -# the total number of Procedures and Tools involved from 31 to 14. -# These Tools and Procedures can safely be removed from your local GFE. -# -# CheckMaxTvsMinTvsMaxT Procedure Stauber/Mazza -# MakeTemporaryMinT MinT Stauber, Bob -# TempMinTfmMinT MinT Stauber/Mazza -# CheckMinTagainstMaxT TempMinT Stauber/Mazza -# CheckMaxTagainstMinT MaxT Stauber -# CheckTagainstMaxTandMinT Procedure Stauber/Mazza -# CheckTagainstMinT T Stauber -# CheckTd Td Stauber -# CheckTagainstMaxT T Stauber -# MaxTvsMinTvsMaxT Procedure Stauber/Mazza -## MakeTemporaryMinT MinT Stauber -## TempMinTfmMinT MinT Stauber -# MinTaobMaxT TempMinT Stauber -# MinTfrTempMinT MinT Stauber -# MaxTaoaMinT MaxT Stauber -# ForceTbetweenMaxTandMinT Procedure Stauber/Mazza -# MakeTaoaMinT T Stauber -# MakeTaobMaxT T Stauber -# TdLessTbyUsrAmt Td Mazza, Thomas -# CheckTd Td - - -import SmartScript -## For documentation on the available commands, -## see the SmartScript Utility, which can be viewed from -## the Edit Actions Dialog Utilities window - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - # Checks chosen dependent elements in GFE against appropriate independent elements - self.logProcUse("Procedure execution") - - #################################################################################### - # - # Configuration Section - # - # Zulu time threshold for procedure to begin using 12Z as the cycle - # (periods are pushed one day ahead, thereby adding a new day to the forecast) - # - start12Zcycle = 12 - # - # Zulu time for beginning of daytime period, e.g., at what zulu hour do periods - # like "Today", "Tomorrow" and "Day5" begin - # - startDayPeriod = 12 - # - # Difference in hours between zulu time and local standard time (zulu - local) - # - localStandardOffset = 5 - # - # Zulu time for beginning of MaxT grid - # - maxTstartTimeZ = 12 - # - # Duration of MaxT grid - # - maxTdur = 12 - # - # Zulu time for beginning of MinT grid - # - minTstartTimeZ = 0 - # - # Duration of MinT grid - # - minTdur = 13 - # - QCarea = self.getEditArea("ISC_Send_Area") - # - GustLessSusMin = 5 # Zero out wind gusts not in excess of sustained wind by more than - # - minGust = 15 # Minimum wind gust to report - # - TemperatureProc = "CheckTandTd" - # I set this up so that if we chnage the name of CheckTandTd.Procedure, we could - # just chnage it here instead of every opccurence within this procedure. - #TemperatureProc = "CheckTemperatures" - WindGustProc = "CheckWindGust" - startHeatIndex = 4 ## First month to report heat index - endHeatIndex = 10 ## Last month to report heat index - startWindChill = 10 ## First month to report wind chill - endWindChill = 4 ## Last month to report wind chill - # - # End Configuration Section - # - #################################################################################### - - Allareas = editArea ## Hold all areas when procedure invoked w/o wedit area selected. - ## This is necessary for grids created from scratch - -## areaFmProc = self.getEditArea(varDict["savedEditArea"]) -## procAreaMask = self.encodeEditArea(areaFmProc) - # Get user chosen time range - - deltaStartDay = startDayPeriod - 12 - varDict["Start 12Z cycle at zulu time"] = start12Zcycle - varDict["Hours past 12Z to start day periods"] = deltaStartDay - varDict["Local standard offset"] = localStandardOffset - varDict["MaxT start time"] = maxTstartTimeZ - varDict["MaxT duration"] = maxTdur - varDict["MinT start time"] = minTstartTimeZ - varDict["MinT duration"] = minTdur - - now = time.localtime(time.time()) - nowZ = time.gmtime(time.time()) - if now[8] == 1: edton = 1 ## Lying on an eiderdown - else: edton = 0 - cycle = varDict["00Z or 12Z cycle (don't use Today if 12Z) ?"] - if cycle == "Auto": - if now[3] >= start12Zcycle: - ## if now[3] > 12 and now[3] - edton < 19: - cycle = "12Z cycle" - else: - cycle = "00Z cycle" - - if varDict["Use Selected Time Range from the Grid Manager ?"] == "Y": - - self.p_timeHeader = time.strftime("%Z", time.localtime(time.time())) - timeHeader = time.strftime("%Z", time.localtime(time.time())) - - present = AbsTime.current() - - timeHeader = self.p_timeHeader - - today = AbsTime.absTimeYMD(present.year, present.month, present.day) - startTimeOffset = (timeRange.startTime() - today) / 3600 - endTimeOffset = (timeRange.endTime() - today) / 3600 -## print startTimeOffset, endTimeOffset - SubtractADay = 0 -## if now[3] - edton > 23 - localStandardOffset: -## SubtractADay = 24 - starth = startTimeOffset - SubtractADay - endh = endTimeOffset - SubtractADay -## print "ERQC: now[3], starth, endh:", now[3], starth, endh - - else: - - startPeriod = varDict["If not, Pick period to start with here:"] - endPeriod = varDict["...and to end with here:"] - - startEnd = [("Today (00Z Cycle Only)", 12, 24), ("Tonight", 24, 36), ("Tomorrow", 36, 48), ("Tomorrow Night", 48, 60), - ("Next Day", 60, 72), ("Next Day Night", 72, 84), ("Day3 day", 84, 96), ("Day3 night", 96, 108), - ("Day4", 108, 132), ("Day5", 132, 156), ("Day6", 156, 180), ("Day7", 180, 205) - ] - - for i in xrange(len(startEnd)): - - period, startHour, endHour = startEnd[i] - - ################################################################################################ - # - # Now determine the time period chosen - - if period == startPeriod: - starth = startHour - h = starth - if period == endPeriod: - endh = endHour - -#### if starth < firsth: -#### firsth = starth -#### if endh > finalh: -#### finalh = endh - - ################################################################################################ - # - # Handle exceptions - - if endPeriod == "Day6" and cycle == "00Z cycle": - endh = 181 - finalNight = 1 - - if endPeriod == "Day7" or endPeriod == "Day8": - finalNight = 1 - - if now[3] - edton > 23 - localStandardOffset: - starth -= 24 - if starth < 0: - starth = 0 - endh -= 24 - if endh < 0: - endh = 11 - - - starth -= deltaStartDay - endh -= deltaStartDay - - timeRange = self.createTimeRange(starth, endh, "Zulu") - - ######################################################################## - # - # What follows here is a little louie that involves calling other - # procedures, namely CheckTagainstMaxTandMinT and - # ForceTbetweenMaxTandMinT. These procedures repeat the timeRange - # logic above, so they could be run independently. But this means, for - # using the selected time range, where subtracting one from the end time - # is necessary to effect the proper time range, this hour needs added - # again before calling one of these procedures, to avoid the duplicate - # subtraction. - - timeRangePlusOneHr = self.createTimeRange(starth, endh + 1, "Zulu") - -################################################################################ -# -# Period Today Tonight Tomorrow Tomorrow Next Day Next Day -# Night Night -# -# starth 12 24 36 48 60 72 -# endH 23 35 47 59 71 83 -# -# Period Day4 Day5 Day6 Day7 -# -# starth 84 108 132 156 -# endH 107 131 155 181 -# -################################################################################ - - # Define variables for wind gust check: -## varDict["Wind Gust QC:"] = ["Ensure gusts where >=10kts Sustained", "Zero out where minimal difference"] -## varDict["Zero out wind gusts not in excess of sustained wind by more than:"] = GustLessSusMin -## varDict["Minimum wind gust to report:"] = minGust - - # Define a list of elements for which to create grids if necessary -## makeList = ['RH', 'HeatIndex', 'WindChill', 'PPI'] -## makeList = ['RH''PPI'] - makeList = [] - makeList.append(("RH")) - makeList.append(("PPI")) - curMon = nowZ[1] - - if curMon >= startWindChill or curMon <= endWindChill: - makeList.append(('WindChill')) - if curMon >= startHeatIndex and curMon <= endHeatIndex: - makeList.append(('HeatIndex')) - - # Get list of SmartTools to run - all = varDict["All (Overrides other choices if not No)"] - if all == "Highlight only": -## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], - self.callProcedure("NDFDgridCheck", - timeRange=timeRange, varDict=varDict, editArea=QCarea) - varDict["Check or Force:"] = "Check Only" - self.callProcedure(TemperatureProc, - varDict=varDict, editArea=QCarea) - self.callProcedure(WindGustProc, - varDict=varDict, editArea=QCarea) -## ToolList.append(("WindGustQC", "WindGust")) - ToolList.append(("CheckSkyWithPoP", "PoP")) -## ToolList.append(("CheckPoPwithSky", "PoP")) -# ToolList.append(("CheckPoPwithWx", "PoP")) -# ToolList.append(("CheckWx", "Wx")) -# ToolList.append(("CheckQPF", "QPF")) -# ToolList.append(("CheckQPF", "QPF6hr")) -# ToolList.append(("CheckQPF", "QPF12hr")) -# ToolList.append(("CheckSnowAmt", "SnowAmt")) -# ToolList.append(("CheckSnowAmt", "SnowAmt6hr")) -# ToolList.append(("CheckSnowAmt", "SnowAmt12hr")) - - elif all == "Fix All": -## elif all == "Fix All (Force Sky to PoP)" or all == "Fix All (Force PoP to Sky)": -## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], - self.callProcedure("NDFDgridCheck", - timeRange=timeRange, varDict=varDict, editArea=QCarea) - varDict["Check or Force:"] = "Force: TMin<=T<=TMax\n and Td<=T" - self.callProcedure(TemperatureProc, - varDict=varDict, editArea=QCarea) - varDict["Check or Force:"] = "Force: WindGust>=Wind" - self.callProcedure(WindGustProc, - varDict=varDict, editArea=QCarea) -## ToolList.append(("WindGustQC", "WindGust")) - ToolList.append(("EnufCloudForPoP", "Sky")) -## if all == "Fix All (Force Sky to PoP)": -## ToolList.append(("EnufCloudForPoP", "Sky")) -## elif all == "Fix All (Force PoP to Sky)": -## ToolList.append(("ForcePoPtoSky", "PoP")) -# ToolList.append(("ForcePoPtoWx", "PoP")) -# ToolList.append(("WxCovMatchPoP", "Wx")) -# ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) -# ToolList.append(("NoPoPNoQPF", "QPF")) -# ToolList.append(("QPF6hrFmQPFsum", "QPF6hr")) -# ToolList.append(("QPF12hrFmQPFsum", "QPF12hr")) -# ToolList.append(("NoPoPNoSnowAmt", "SnowAmt")) -## self.createFromScratchCmd(["PPI"], timeRange, # (Also uncomment out Variablelist.append lines at top) -## repeat=1, duration=1) -## ToolList.append(("PPIfmPoP", "PPI")) # For offices doing Precipitation Probability Index images for the web -# ToolList.append(("SnowAmt6hrFmSnowAmt", "SnowAmt6hr")) -# ToolList.append(("SnowAmt12hr", "SnowAmt12hr")) - - else: - if varDict["NDFD Grid Check (Checks all elements all 7 days)"] == "Yes": -## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], - self.callProcedure("NDFDgridCheck", - timeRange=timeRange, varDict=varDict, editArea=QCarea) - if varDict["Temperatures"] == "Highlight only": - varDict["Check or Force:"] = "Check Only" - self.callProcedure(TemperatureProc, - varDict=varDict, editArea=QCarea) - elif varDict["Temperatures"] == "Fix": - varDict["Check or Force:"] = "Force: TMin<=T<=TMax\n and Td<=T" - self.callProcedure(TemperatureProc, - varDict=varDict, editArea=QCarea) - if varDict["Wind Gusts"] == "Highlight only": - varDict["Check or Force:"] = "Check Only" - self.callProcedure(WindGustProc, - varDict=varDict, editArea=QCarea) - elif varDict["Wind Gusts"] == "Fix": - varDict["Check or Force:"] = "Force: WindGust>=Wind" - self.callProcedure(WindGustProc, - varDict=varDict, editArea=QCarea) -## ToolList.append(("WindGustQC", "WindGust")) - - if varDict["CheckSkyWithPoP"] == "Highlight only": - ToolList.append(("CheckSkyWithPoP", "PoP")) - elif varDict["CheckSkyWithPoP"] == "Fix": - ToolList.append(("EnufCloudForPoP", "Sky")) - -## if varDict["CheckPoPwithSky"] == "Highlight only": -## ToolList.append(("CheckPoPwithSky", "PoP")) -## elif varDict["CheckPoPwithSky"] == "Fix": -## ToolList.append(("ForcePoPtoSky", "PoP")) -## ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) - -## if varDict["CheckPoPwithWx"] == "Highlight only": -## ToolList.append(("CheckPoPwithWx", "PoP")) -### ToolList.append(("CheckPoP", "PoP12hr")) -## elif varDict["CheckPoPwithWx"] == "Fix": -## ToolList.append(("ForcePoPtoWx", "PoP")) -### ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) -## -## if varDict["CheckWxWithPoP"] == "Highlight only": -## ToolList.append(("CheckWx", "Wx")) -## -## if varDict["CheckWxWithPoP"] == "Fix": -## ToolList.append(("WxCovMatchPoP", "Wx")) -## -## if varDict["NoPoPNoQPF"] == "Highlight only": -## ToolList.append(("CheckQPF", "QPF")) -### ToolList.append(("CheckQPF", "QPF6hr")) -### ToolList.append(("CheckQPF", "QPF12hr")) -## elif varDict["NoPoPNoQPF"] == "Fix": -## ToolList.append(("NoPoPNoQPF", "QPF")) -### ToolList.append(("QPF6hrFmQPFsum", "QPF6hr")) -### ToolList.append(("QPF12hrFmQPFsum", "QPF12hr")) -## if varDict["NoPoPNoSnowAmt"] == "Highlight only": -## ToolList.append(("CheckSnowAmt", "SnowAmt")) -### ToolList.append(("CheckSnowAmt", "SnowAmt6hr")) -### ToolList.append(("CheckSnowAmt", "SnowAmt12hr")) -## elif varDict["NoPoPNoSnowAmt"] == "Fix": -## ToolList.append(("NoPoPNoSnowAmt", "SnowAmt")) -## if varDict["Run PPI"] == "Yes": # For offices doing Precipitation Probability Index images for the web -## self.createFromScratchCmd(["PPI"], timeRange, # (Also uncomment out Variablelist.append lines at top) -## repeat=1, duration=1) -## ToolList.append(("PPIfmPoP", "PPI")) - -# elif varDict["NoPoPNoSnowAmt"] == "Re-run from QPF": -# ToolList.append(("SnowDog", "SnowAmt")) -# ToolList.append(("SnowAmt6hrFmSnowAmt", "SnowAmt6hr")) -# ToolList.append(("SnowAmt12hr", "SnowAmt12hr")) - - QPFSnowWxPoPCheck = [] - if 'Yes' in varDict['Run SnowAmt/QPF Check?']: - QPFSnowWxPoPCheck.append((1)) - if 'Yes' in varDict['Run SnowAmt/Wx Check?']: - QPFSnowWxPoPCheck.append((1)) - if 'Yes' in varDict['Run QPF/PoP Check?']: - QPFSnowWxPoPCheck.append((1)) - if 'Yes' in varDict['Run QPF/Wx Check?']: - QPFSnowWxPoPCheck.append((1)) -# print "QPFSnowWxPoPCheck and its length :", QPFSnowWxPoPCheck, len(QPFSnowWxPoPCheck) - if len(QPFSnowWxPoPCheck) > 0: - self.callProcedure("SnowAmtQPFPoPWxCheck", - varDict=varDict, editArea=QCarea, timeRange=timeRange) - - # For each SmartTool in the list - for toolName, elementName in ToolList: - - # Send a message to the status bar - self.statusBarMsg('ER_QC_Check running -> %s' % (toolName), 'R') - - # If this element is in the 'make grids' list - if elementName in makeList: -## makeList = ['RH', 'HeatIndex', 'WindChill', 'PPI'] ## defined above - - # Ensure we have grids for this element - self.createFromScratchCmd([elementName], timeRange, - repeat=1, duration=1) - ## these are all hourly so can create hourly and obviate - ## the need for the fragment call below. - -## # Fragment these fields -## self.fragmentCmd([elementName], timeRange) - - editArea = Allareas - - # Execute this SmartTool - error = self.callSmartTool(toolName, elementName, - editArea, timeRange, varDict, - missingDataMode="Create") - - else: - editArea = QCarea - # Execute this SmartTool - if toolName == "WxCovMatchPoP": - error = self.callSmartTool(toolName, elementName, - editArea, timeRange, - missingDataMode="Create") - else: - error = self.callSmartTool(toolName, elementName, - editArea, timeRange, varDict, - missingDataMode="Create") - - if error is not None: - break - - - # Send a message to the status bar - self.statusBarMsg('ER_QC_Check completed!', 'R') - - - - - - def logProcUse(self, string): - gtime = time.gmtime() - ts = "%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d" % (gtime[0], gtime[1], gtime[2], - gtime[3], gtime[4], gtime[5]) - LogStream.logEvent("%s| %s" % (ts, string)) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ERQCcheck +# +# Authors: Matthew H. Belk WFO Taunton, MA Created: 04/25/03 +# Thomas R. Mazza WFO Charleston, WV Last Modified: 3/29/06 +# Some of the modules used by this procedure were edited from modules +# originally written by Bob Stauber, Steve Nelson, Jim Hayes, Paul +# Jendrowski and Tom LeFebvre. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import time +import AbsTime + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Consistency"] +import LogStream, time + +# The ToolList is optional, but recommended, if you are calling +# Smart Tools from your Script. +# If present, it can be used to show which grids will be +# modified by the Script. + +ToolList = [] + +################################################################################# +# +# +# Control the weather element groups available: +# +availableElementGroups = ["Public", "Fire Weather"] +#availableElementGroups = ["Public", "Fire Weather", "Marine"] +# +# Marine can be eliminated for inland sites. + + +VariableList = [] + ################################################################# + # + # get time range to check + +VariableList.append(("Use Selected Time Range from the Grid Manager ?", "Y", "radio", + ["N", "Y"])) +VariableList.append(("If not, Pick period to start with here:", "Today", "radio", + ["Today (00Z Cycle Only)", "Tonight", "Tomorrow", "Tomorrow Night", "Next Day", "Next Day Night", + "Day3 day", "Day3 night", "Day4", "Day5", "Day6", "Day7"])) +VariableList.append(("...and to end with here:", "Day7", "radio", + ["Today (00Z Cycle Only)", "Tonight", "Tomorrow", "Tomorrow Night", "Next Day", "Next Day Night", + "Day3 day", "Day3 night", "Day4", "Day5", "Day6", "Day7"])) +VariableList.append(("00Z or 12Z cycle (don't use Today if 12Z) ?", "Auto", "radio", + ["Auto", "00Z cycle", "12Z cycle"])) +VariableList.append(("", "", "label")) + + ################################################################# + # + # get elements to check + +VariableList.append(("All (Overrides other choices if not No)", "No", "radio", + ["No", "Highlight only", "Fix All"])) +VariableList.append(("NDFD Grid Check (Checks all elements all 7 days)", "No", "radio", + ["No", "Yes"])) +VariableList.append(("Which element group(s)?" , + ["Public"], "check", + availableElementGroups, + )) +VariableList.append(("For each element, choose No not to check, Highlight only to highlight inconsistencies, \ +and Fix to actually fix inconsistencies.", "", "label")) +##VariableList.append(("Checks for Temperatures and Wind Gusts (Checks all 7 days)..." , "", "label")) +VariableList.append(("Checks for Temperatures and Wind Gusts (Checks all 7 days), and for Sky, PoP, Wx, QPF and SnowAmt :", "", "label")) +VariableList.append(("Temperatures", "No", "radio", + ["No", "Highlight only", "Fix"])) +VariableList.append(("Wind Gusts", "No", "radio", + ["No", "Highlight only", "Fix"])) +##VariableList.append(("Zero out wind gusts not in excess of sustained wind by more than:" , "5", "numeric")) +##VariableList.append(("Minimum wind gust to report:" , "15", "numeric")) +##VariableList.append(("Checks for Sky, PoP, Wx, QPF and SnowAmt...", "", "label")) +VariableList.append(("CheckSkyWithPoP", "No", "radio", + ["No", "Highlight only", "Fix"])) +VariableList.append(("Sky vs PoP Relationship:", "add", "radio", + ["add", "multiply", "Sky Limit"])) +##VariableList.append(("CheckPoPwithSky", "No", "radio", +## ["No", "Highlight only", "Fix"])) +##VariableList.append(("CheckPoPwithWx", "No", "radio", +## ["No", "Highlight only", "Fix"])) +##VariableList.append(("CheckWxWithPoP", "No", "radio", +## ["No", "Highlight only", "Fix"])) +##VariableList.append(("NoPoPNoQPF", "No", "radio", +## ["No", "Highlight only", "Fix"])) +##VariableList.append(("NoPoPNoSnowAmt", "No", "radio", +## ["No", "Highlight only", "Fix"])) +##VariableList.append(("Run PPI", "Yes", "radio", ## For offices doing Precipitation Probability Index images for the web +## ["No", "Yes"])) ## (Also uncomment the two PPI sections near the bottom) +VariableList.append(("For wind gusts :", "", "label")) +VariableList.append(("Limit wind gusts in excess of sustained wind by:", "12", "numeric")) +VariableList.append(("Make wind gusts in excess of sustained wind by factor of at least:", "1.0", "numeric")) +VariableList.append(("For Sky and PoP :", "", "label")) +VariableList.append(("For add, multiply (smaller factor), by how much ?" , "20", "numeric")) +VariableList.append(("For Sky Limit, only Sky less than Limit affected; it is raised to the Limit:", "", "label")) +VariableList.append(("Enter Sky Limit: the minimum Sky cover needed to support Wx:" , 60, "numeric")) +##VariableList.append(("Enter minimum PoP for measurable precip:", 15, "numeric")) +VariableList.append(("Enter Sky cover for 5% PoP:" , 30, "numeric")) +VariableList.append(('For checks between QPF, SnowAmt, PoP and Wx, if "Cleanup" is selected, then\nonly cleanup actions will run. No checks will be made, regardless of the above settings.', '', 'label')) +VariableList.append(('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup'])) +VariableList.append(('Run SnowAmt/QPF Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run SnowAmt/Wx Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run QPF/PoP Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run QPF/Wx Check?', ['Yes'], 'check', ['Yes'])) + +# Procedures and Tools used in QCcheck with credits +# +# In this table, Procedures and Tools used by a Procedure +# are indented once (4 spaces), Procedures and Tools used +# by a Procedure used by a Procedure indented again, etc. +# +# Tools with two pound signs (##) at the beginning of the +# are baseline tools used by ERQCcheck, and are listed +# here for reference only. +# +# Procedure or Tool Procedure Tool (Wx Credit +# Element +# Edited) +# +# ERQCcheck Procedure Nelson/Mazza +# NDFDgridCheck Procedure Hayes, James +# CheckTemepratues Procedure LeFebvre, Tom +## RHTool RH +## WindChillTool WindChill +## HeatIndexTool HeatIndex +# CheckWindGust Procedure LeFebvre/Mazza +# CheckSkyWithPoP Sky Nelson, Steve +# CheckPoPwithWx PoP Nelson +# CheckWx Wx Nelson +# CheckQPF QPF +# CheckSnowAmt SnowAmt +# EnufCloudForPoP Sky Mazza +# ForcePoPtoWx PoP +# PoP12hrFmMaxPoP PoP12hr Mazza +# NoPoPNoQPF QPF Mazza +# NoPoPNoSnowAmt SnowAmt Mazza +# +# The following tools and procedures are no longer used by +# ERQCcheck since Tom LeFebvre's CheckTandTd. This reduces +# the total number of Procedures and Tools involved from 31 to 14. +# These Tools and Procedures can safely be removed from your local GFE. +# +# CheckMaxTvsMinTvsMaxT Procedure Stauber/Mazza +# MakeTemporaryMinT MinT Stauber, Bob +# TempMinTfmMinT MinT Stauber/Mazza +# CheckMinTagainstMaxT TempMinT Stauber/Mazza +# CheckMaxTagainstMinT MaxT Stauber +# CheckTagainstMaxTandMinT Procedure Stauber/Mazza +# CheckTagainstMinT T Stauber +# CheckTd Td Stauber +# CheckTagainstMaxT T Stauber +# MaxTvsMinTvsMaxT Procedure Stauber/Mazza +## MakeTemporaryMinT MinT Stauber +## TempMinTfmMinT MinT Stauber +# MinTaobMaxT TempMinT Stauber +# MinTfrTempMinT MinT Stauber +# MaxTaoaMinT MaxT Stauber +# ForceTbetweenMaxTandMinT Procedure Stauber/Mazza +# MakeTaoaMinT T Stauber +# MakeTaobMaxT T Stauber +# TdLessTbyUsrAmt Td Mazza, Thomas +# CheckTd Td + + +import SmartScript +## For documentation on the available commands, +## see the SmartScript Utility, which can be viewed from +## the Edit Actions Dialog Utilities window + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + # Checks chosen dependent elements in GFE against appropriate independent elements + self.logProcUse("Procedure execution") + + #################################################################################### + # + # Configuration Section + # + # Zulu time threshold for procedure to begin using 12Z as the cycle + # (periods are pushed one day ahead, thereby adding a new day to the forecast) + # + start12Zcycle = 12 + # + # Zulu time for beginning of daytime period, e.g., at what zulu hour do periods + # like "Today", "Tomorrow" and "Day5" begin + # + startDayPeriod = 12 + # + # Difference in hours between zulu time and local standard time (zulu - local) + # + localStandardOffset = 5 + # + # Zulu time for beginning of MaxT grid + # + maxTstartTimeZ = 12 + # + # Duration of MaxT grid + # + maxTdur = 12 + # + # Zulu time for beginning of MinT grid + # + minTstartTimeZ = 0 + # + # Duration of MinT grid + # + minTdur = 13 + # + QCarea = self.getEditArea("ISC_Send_Area") + # + GustLessSusMin = 5 # Zero out wind gusts not in excess of sustained wind by more than + # + minGust = 15 # Minimum wind gust to report + # + TemperatureProc = "CheckTandTd" + # I set this up so that if we chnage the name of CheckTandTd.Procedure, we could + # just chnage it here instead of every opccurence within this procedure. + #TemperatureProc = "CheckTemperatures" + WindGustProc = "CheckWindGust" + startHeatIndex = 4 ## First month to report heat index + endHeatIndex = 10 ## Last month to report heat index + startWindChill = 10 ## First month to report wind chill + endWindChill = 4 ## Last month to report wind chill + # + # End Configuration Section + # + #################################################################################### + + Allareas = editArea ## Hold all areas when procedure invoked w/o wedit area selected. + ## This is necessary for grids created from scratch + +## areaFmProc = self.getEditArea(varDict["savedEditArea"]) +## procAreaMask = self.encodeEditArea(areaFmProc) + # Get user chosen time range + + deltaStartDay = startDayPeriod - 12 + varDict["Start 12Z cycle at zulu time"] = start12Zcycle + varDict["Hours past 12Z to start day periods"] = deltaStartDay + varDict["Local standard offset"] = localStandardOffset + varDict["MaxT start time"] = maxTstartTimeZ + varDict["MaxT duration"] = maxTdur + varDict["MinT start time"] = minTstartTimeZ + varDict["MinT duration"] = minTdur + + now = time.localtime(time.time()) + nowZ = time.gmtime(time.time()) + if now[8] == 1: edton = 1 ## Lying on an eiderdown + else: edton = 0 + cycle = varDict["00Z or 12Z cycle (don't use Today if 12Z) ?"] + if cycle == "Auto": + if now[3] >= start12Zcycle: + ## if now[3] > 12 and now[3] - edton < 19: + cycle = "12Z cycle" + else: + cycle = "00Z cycle" + + if varDict["Use Selected Time Range from the Grid Manager ?"] == "Y": + + self.p_timeHeader = time.strftime("%Z", time.localtime(time.time())) + timeHeader = time.strftime("%Z", time.localtime(time.time())) + + present = AbsTime.current() + + timeHeader = self.p_timeHeader + + today = AbsTime.absTimeYMD(present.year, present.month, present.day) + startTimeOffset = (timeRange.startTime() - today) / 3600 + endTimeOffset = (timeRange.endTime() - today) / 3600 +## print startTimeOffset, endTimeOffset + SubtractADay = 0 +## if now[3] - edton > 23 - localStandardOffset: +## SubtractADay = 24 + starth = startTimeOffset - SubtractADay + endh = endTimeOffset - SubtractADay +## print "ERQC: now[3], starth, endh:", now[3], starth, endh + + else: + + startPeriod = varDict["If not, Pick period to start with here:"] + endPeriod = varDict["...and to end with here:"] + + startEnd = [("Today (00Z Cycle Only)", 12, 24), ("Tonight", 24, 36), ("Tomorrow", 36, 48), ("Tomorrow Night", 48, 60), + ("Next Day", 60, 72), ("Next Day Night", 72, 84), ("Day3 day", 84, 96), ("Day3 night", 96, 108), + ("Day4", 108, 132), ("Day5", 132, 156), ("Day6", 156, 180), ("Day7", 180, 205) + ] + + for i in range(len(startEnd)): + + period, startHour, endHour = startEnd[i] + + ################################################################################################ + # + # Now determine the time period chosen + + if period == startPeriod: + starth = startHour + h = starth + if period == endPeriod: + endh = endHour + +#### if starth < firsth: +#### firsth = starth +#### if endh > finalh: +#### finalh = endh + + ################################################################################################ + # + # Handle exceptions + + if endPeriod == "Day6" and cycle == "00Z cycle": + endh = 181 + finalNight = 1 + + if endPeriod == "Day7" or endPeriod == "Day8": + finalNight = 1 + + if now[3] - edton > 23 - localStandardOffset: + starth -= 24 + if starth < 0: + starth = 0 + endh -= 24 + if endh < 0: + endh = 11 + + + starth -= deltaStartDay + endh -= deltaStartDay + + timeRange = self.createTimeRange(starth, endh, "Zulu") + + ######################################################################## + # + # What follows here is a little louie that involves calling other + # procedures, namely CheckTagainstMaxTandMinT and + # ForceTbetweenMaxTandMinT. These procedures repeat the timeRange + # logic above, so they could be run independently. But this means, for + # using the selected time range, where subtracting one from the end time + # is necessary to effect the proper time range, this hour needs added + # again before calling one of these procedures, to avoid the duplicate + # subtraction. + + timeRangePlusOneHr = self.createTimeRange(starth, endh + 1, "Zulu") + +################################################################################ +# +# Period Today Tonight Tomorrow Tomorrow Next Day Next Day +# Night Night +# +# starth 12 24 36 48 60 72 +# endH 23 35 47 59 71 83 +# +# Period Day4 Day5 Day6 Day7 +# +# starth 84 108 132 156 +# endH 107 131 155 181 +# +################################################################################ + + # Define variables for wind gust check: +## varDict["Wind Gust QC:"] = ["Ensure gusts where >=10kts Sustained", "Zero out where minimal difference"] +## varDict["Zero out wind gusts not in excess of sustained wind by more than:"] = GustLessSusMin +## varDict["Minimum wind gust to report:"] = minGust + + # Define a list of elements for which to create grids if necessary +## makeList = ['RH', 'HeatIndex', 'WindChill', 'PPI'] +## makeList = ['RH''PPI'] + makeList = [] + makeList.append(("RH")) + makeList.append(("PPI")) + curMon = nowZ[1] + + if curMon >= startWindChill or curMon <= endWindChill: + makeList.append(('WindChill')) + if curMon >= startHeatIndex and curMon <= endHeatIndex: + makeList.append(('HeatIndex')) + + # Get list of SmartTools to run + all = varDict["All (Overrides other choices if not No)"] + if all == "Highlight only": +## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], + self.callProcedure("NDFDgridCheck", + timeRange=timeRange, varDict=varDict, editArea=QCarea) + varDict["Check or Force:"] = "Check Only" + self.callProcedure(TemperatureProc, + varDict=varDict, editArea=QCarea) + self.callProcedure(WindGustProc, + varDict=varDict, editArea=QCarea) +## ToolList.append(("WindGustQC", "WindGust")) + ToolList.append(("CheckSkyWithPoP", "PoP")) +## ToolList.append(("CheckPoPwithSky", "PoP")) +# ToolList.append(("CheckPoPwithWx", "PoP")) +# ToolList.append(("CheckWx", "Wx")) +# ToolList.append(("CheckQPF", "QPF")) +# ToolList.append(("CheckQPF", "QPF6hr")) +# ToolList.append(("CheckQPF", "QPF12hr")) +# ToolList.append(("CheckSnowAmt", "SnowAmt")) +# ToolList.append(("CheckSnowAmt", "SnowAmt6hr")) +# ToolList.append(("CheckSnowAmt", "SnowAmt12hr")) + + elif all == "Fix All": +## elif all == "Fix All (Force Sky to PoP)" or all == "Fix All (Force PoP to Sky)": +## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], + self.callProcedure("NDFDgridCheck", + timeRange=timeRange, varDict=varDict, editArea=QCarea) + varDict["Check or Force:"] = "Force: TMin<=T<=TMax\n and Td<=T" + self.callProcedure(TemperatureProc, + varDict=varDict, editArea=QCarea) + varDict["Check or Force:"] = "Force: WindGust>=Wind" + self.callProcedure(WindGustProc, + varDict=varDict, editArea=QCarea) +## ToolList.append(("WindGustQC", "WindGust")) + ToolList.append(("EnufCloudForPoP", "Sky")) +## if all == "Fix All (Force Sky to PoP)": +## ToolList.append(("EnufCloudForPoP", "Sky")) +## elif all == "Fix All (Force PoP to Sky)": +## ToolList.append(("ForcePoPtoSky", "PoP")) +# ToolList.append(("ForcePoPtoWx", "PoP")) +# ToolList.append(("WxCovMatchPoP", "Wx")) +# ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) +# ToolList.append(("NoPoPNoQPF", "QPF")) +# ToolList.append(("QPF6hrFmQPFsum", "QPF6hr")) +# ToolList.append(("QPF12hrFmQPFsum", "QPF12hr")) +# ToolList.append(("NoPoPNoSnowAmt", "SnowAmt")) +## self.createFromScratchCmd(["PPI"], timeRange, # (Also uncomment out Variablelist.append lines at top) +## repeat=1, duration=1) +## ToolList.append(("PPIfmPoP", "PPI")) # For offices doing Precipitation Probability Index images for the web +# ToolList.append(("SnowAmt6hrFmSnowAmt", "SnowAmt6hr")) +# ToolList.append(("SnowAmt12hr", "SnowAmt12hr")) + + else: + if varDict["NDFD Grid Check (Checks all elements all 7 days)"] == "Yes": +## varDict["Which element group(s)?"] = ["Public", "Fire Weather", "Marine"], + self.callProcedure("NDFDgridCheck", + timeRange=timeRange, varDict=varDict, editArea=QCarea) + if varDict["Temperatures"] == "Highlight only": + varDict["Check or Force:"] = "Check Only" + self.callProcedure(TemperatureProc, + varDict=varDict, editArea=QCarea) + elif varDict["Temperatures"] == "Fix": + varDict["Check or Force:"] = "Force: TMin<=T<=TMax\n and Td<=T" + self.callProcedure(TemperatureProc, + varDict=varDict, editArea=QCarea) + if varDict["Wind Gusts"] == "Highlight only": + varDict["Check or Force:"] = "Check Only" + self.callProcedure(WindGustProc, + varDict=varDict, editArea=QCarea) + elif varDict["Wind Gusts"] == "Fix": + varDict["Check or Force:"] = "Force: WindGust>=Wind" + self.callProcedure(WindGustProc, + varDict=varDict, editArea=QCarea) +## ToolList.append(("WindGustQC", "WindGust")) + + if varDict["CheckSkyWithPoP"] == "Highlight only": + ToolList.append(("CheckSkyWithPoP", "PoP")) + elif varDict["CheckSkyWithPoP"] == "Fix": + ToolList.append(("EnufCloudForPoP", "Sky")) + +## if varDict["CheckPoPwithSky"] == "Highlight only": +## ToolList.append(("CheckPoPwithSky", "PoP")) +## elif varDict["CheckPoPwithSky"] == "Fix": +## ToolList.append(("ForcePoPtoSky", "PoP")) +## ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) + +## if varDict["CheckPoPwithWx"] == "Highlight only": +## ToolList.append(("CheckPoPwithWx", "PoP")) +### ToolList.append(("CheckPoP", "PoP12hr")) +## elif varDict["CheckPoPwithWx"] == "Fix": +## ToolList.append(("ForcePoPtoWx", "PoP")) +### ToolList.append(("PoP12hrFmMaxPoP", "PoP12hr")) +## +## if varDict["CheckWxWithPoP"] == "Highlight only": +## ToolList.append(("CheckWx", "Wx")) +## +## if varDict["CheckWxWithPoP"] == "Fix": +## ToolList.append(("WxCovMatchPoP", "Wx")) +## +## if varDict["NoPoPNoQPF"] == "Highlight only": +## ToolList.append(("CheckQPF", "QPF")) +### ToolList.append(("CheckQPF", "QPF6hr")) +### ToolList.append(("CheckQPF", "QPF12hr")) +## elif varDict["NoPoPNoQPF"] == "Fix": +## ToolList.append(("NoPoPNoQPF", "QPF")) +### ToolList.append(("QPF6hrFmQPFsum", "QPF6hr")) +### ToolList.append(("QPF12hrFmQPFsum", "QPF12hr")) +## if varDict["NoPoPNoSnowAmt"] == "Highlight only": +## ToolList.append(("CheckSnowAmt", "SnowAmt")) +### ToolList.append(("CheckSnowAmt", "SnowAmt6hr")) +### ToolList.append(("CheckSnowAmt", "SnowAmt12hr")) +## elif varDict["NoPoPNoSnowAmt"] == "Fix": +## ToolList.append(("NoPoPNoSnowAmt", "SnowAmt")) +## if varDict["Run PPI"] == "Yes": # For offices doing Precipitation Probability Index images for the web +## self.createFromScratchCmd(["PPI"], timeRange, # (Also uncomment out Variablelist.append lines at top) +## repeat=1, duration=1) +## ToolList.append(("PPIfmPoP", "PPI")) + +# elif varDict["NoPoPNoSnowAmt"] == "Re-run from QPF": +# ToolList.append(("SnowDog", "SnowAmt")) +# ToolList.append(("SnowAmt6hrFmSnowAmt", "SnowAmt6hr")) +# ToolList.append(("SnowAmt12hr", "SnowAmt12hr")) + + QPFSnowWxPoPCheck = [] + if 'Yes' in varDict['Run SnowAmt/QPF Check?']: + QPFSnowWxPoPCheck.append((1)) + if 'Yes' in varDict['Run SnowAmt/Wx Check?']: + QPFSnowWxPoPCheck.append((1)) + if 'Yes' in varDict['Run QPF/PoP Check?']: + QPFSnowWxPoPCheck.append((1)) + if 'Yes' in varDict['Run QPF/Wx Check?']: + QPFSnowWxPoPCheck.append((1)) +# print "QPFSnowWxPoPCheck and its length :", QPFSnowWxPoPCheck, len(QPFSnowWxPoPCheck) + if len(QPFSnowWxPoPCheck) > 0: + self.callProcedure("SnowAmtQPFPoPWxCheck", + varDict=varDict, editArea=QCarea, timeRange=timeRange) + + # For each SmartTool in the list + for toolName, elementName in ToolList: + + # Send a message to the status bar + self.statusBarMsg('ER_QC_Check running -> %s' % (toolName), 'R') + + # If this element is in the 'make grids' list + if elementName in makeList: +## makeList = ['RH', 'HeatIndex', 'WindChill', 'PPI'] ## defined above + + # Ensure we have grids for this element + self.createFromScratchCmd([elementName], timeRange, + repeat=1, duration=1) + ## these are all hourly so can create hourly and obviate + ## the need for the fragment call below. + +## # Fragment these fields +## self.fragmentCmd([elementName], timeRange) + + editArea = Allareas + + # Execute this SmartTool + error = self.callSmartTool(toolName, elementName, + editArea, timeRange, varDict, + missingDataMode="Create") + + else: + editArea = QCarea + # Execute this SmartTool + if toolName == "WxCovMatchPoP": + error = self.callSmartTool(toolName, elementName, + editArea, timeRange, + missingDataMode="Create") + else: + error = self.callSmartTool(toolName, elementName, + editArea, timeRange, varDict, + missingDataMode="Create") + + if error is not None: + break + + + # Send a message to the status bar + self.statusBarMsg('ER_QC_Check completed!', 'R') + + + + + + def logProcUse(self, string): + gtime = time.gmtime() + ts = "%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d" % (gtime[0], gtime[1], gtime[2], + gtime[3], gtime[4], gtime[5]) + LogStream.logEvent("%s| %s" % (ts, string)) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Finalize_KML.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Finalize_KML.py index 7c7086137a..40b71a69f0 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Finalize_KML.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Finalize_KML.py @@ -1,239 +1,239 @@ - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Finalize_KML -# Version 3.0 -# -# Author: Joe Maloney/P. Santos -# -# IMPORTANT: Uses /data/local/GFEnhc/archive_ss_gfx.sh -# -# ---------------------------------------------------------------------------- -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Jun 20, 2012 Santos Initial creation -# Apr 12, 2016 LeFebvre Code cleanup and refactoring -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["None"] - -import os -import time - -import TropicalUtility -import numpy as np - -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -CoordinateType = ReferenceData.CoordinateType - - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - - def makeThreatKML(self, discreteWEName, discreteKeys, discreteGrid_kml, stormNum): - -# CONFIG READ: the directory below is the directory where the KML txt files will be dumped. -# From there it is syncronized to the web servers along with the graphics. If you set up -# your gHLS scripts and data directories in a different place than recommended in the install -# instructions, you would need to change that directory here. Do not change .kml.txt to .kml. -# Only .txt file can be uploaded as include files. In the servers a php script will convert the -# file name so that a browser can properly interpret it as a KML file to be open with Google -# Earth or equivalent application. - -# Also, make sure the ownership of the kml.txt files created below is fxa:fxalpha with permissions set -# to 666. - -# You can test the KML files created by copying them outside AWIPS and renaming them .kml. Then open them with -# Google Earth. - - date = time.strftime("%Y%m") - datetime = time.strftime("%Y%m%d") - - # Define a file name for the output KML file - kml_filename = '/data/local/WWTool/kml/' + discreteWEName + '_' + datetime + '_AT' + stormNum + '.kml' - - print "KML FILE IS: ", kml_filename - - with open(kml_filename, 'w') as kml: - kml.write('\n') - kml.write('\n') - kml.write(''+discreteWEName+'.kml\n') - - # TODO: do we need to keep these commented out lines? - #kml.write('\n') - #kml.write('\n') - #kml.write('\n')watch orange - #kml.write('\n')warning red - kml.write('\n') - kml.write('\n') - kml.write(''+discreteWEName+'0\n') - - print "DISCRETEKEYS ARE: ", discreteKeys - - # initialize a flag. It will only be NO for the first polygon in the file. - flag = False - - # Process all of the keys found in this discrete grid - for hazIndex, key in enumerate(discreteKeys): - - # Skip the "" value - if "None" in key: - continue - - # Identify all portions of the grid where this key applies - mask = discreteGrid_kml == hazIndex - - # If there are no areas associated with this key - move on - # (this should not happen often) - if not mask.any(): - continue - - # Make an editArea from the current mask - editArea = self.decodeEditArea(mask) - - # Extract the polygons from the edit area - polygons = editArea.getPolygons(CoordinateType.LATLON) - - # CONFIG READ: The following section is the one that needs the most local config. For each key and threat element - # below you must type in the generic impact definitions that you submitted to the national ghls page. For example, - # for the case of MLB for winds, Those would be found here: - # http://www.weather.gov/ghls/php/ghls_index.php?sid=mlb&threat=wind#none - - #------------------------------------------------------------------- - # Handle storm surge watches and warnings, if we're dealing with - # the collaboration grids - - # Storm Surge Watch - if key.startswith("SS.A"): - - if discreteWEName in ["InitialSS","ProposedSS","tempProposedSS"]: - kmlHeader='Storm Surge WatchStorm Surge Watch in Effect\n#SSA\n' - - # Storm Surge Warning - elif key.startswith("SS.W"): - if discreteWEName in ["InitialSS","ProposedSS","tempProposedSS"]: - kmlHeader='Storm Surge WarningStorm Surge Warning in Effect\n#SSW\n' - - #------------------------------------------------------------------- - # Produce KML for every polygon associated with this particular key - - for i in range(polygons.getNumGeometries()): - poly = polygons.getGeometryN(i) - shell = poly.getExteriorRing(); - if shell: - if flag: - kml.write('\n') - - kml.write(kmlHeader) - kml.write('') - print "Outer shell coordinates:" - for c in shell.getCoordinates(): - line = str(c.x) + ',' + str(c.y) + ',0 \n' - kml.write(line) - - kml.write('') - # Now that we've written at least one polygon, set flag to YES - flag = True - - for j in xrange(poly.getNumInteriorRing()): - hole = poly.getInteriorRingN(j) - print "Hole",j,"coordinates:" - kml.write('') - for c in hole.getCoordinates(): - line = str(c.x) + ',' + str(c.y) + ',0 \n' - kml.write(line) - - kml.write('') - - kmlEnd='\n\n' - kml.write(kmlEnd) - - - # For each unique ETN, determine the mask that covers that ETN. Return the - # result in a dictionary ETN:mask - def calcStormMasks(self, threatGrid, threatKeys): - - # Initialize the dictionary - stormDict = {} - - # process all the keys within this discrete grid - for index, key in enumerate(threatKeys): - - # Get the storm number, the last two digits, from the ETN - etn = self.getETN(key)[-2:] - - # Skip the "" value, we don't need it. Also skip any key - # which does not contain a valid storm number - if "None" in key or not etn.isdigit(): - continue - - # Identify areas where this key is applied - mask = (threatGrid == index) - - # If we already have a mask for this storm, add to it - if stormDict.has_key(etn): - stormDict[etn] |= mask - - # Otherwise, make a new dictionary entry - else: - stormDict[etn] = mask - - # Return the completed mask dictionary - return stormDict - - - def execute(self): - - # TODO: should this commented out line be removed? - #os.system('/data/local/GFEnhc/archive_ss_gfx.sh') - - # Make a timeRange that starts at top of this hour and 12 hours long - start = int(self._gmtime().unixTime() / 3600) * 3600 - end = start + 12 * 3600 - tr = self.GM_makeTimeRange(start, end) - - # Define some conditions before we start processing - discreteList = ['ProposedSS'] #['InitialSS','ProposedSS','tempProposedSS'] - editAreaMask = self.encodeEditArea("StormSurgeWW_EditArea") - - # Process all of the specified discrete grids - for discreteWEName in discreteList: - - discreteGrid, discreteKeys = self.getGrids("Fcst", discreteWEName, "SFC", tr) - - # Get the masks that cover each ETN for this grid - stormMaskDict = self.calcStormMasks(discreteGrid, discreteKeys) - - # Process the mask for each storm we found - for stormNum in stormMaskDict.keys(): - - print "****************stormNum: ", stormNum - # Initialize grid to -9 everywhere, we're not sure why at this point - discreteGrid_kml = np.zeros(self.getGridShape(), np.int8) - 9 - - # Get the mask associated with this particular storm - threatMask = stormMaskDict[stormNum] - - # Update the grid we're going to convert to KML only with data - # where we have actual threats - discreteGrid_kml[threatMask] = discreteGrid[threatMask] - - # Format the final KML for this storm - self.makeThreatKML(discreteWEName, discreteKeys, discreteGrid_kml, stormNum) + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Finalize_KML +# Version 3.0 +# +# Author: Joe Maloney/P. Santos +# +# IMPORTANT: Uses /data/local/GFEnhc/archive_ss_gfx.sh +# +# ---------------------------------------------------------------------------- +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Jun 20, 2012 Santos Initial creation +# Apr 12, 2016 LeFebvre Code cleanup and refactoring +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["None"] + +import os +import time + +import TropicalUtility +import numpy as np + +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +CoordinateType = ReferenceData.CoordinateType + + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + + def makeThreatKML(self, discreteWEName, discreteKeys, discreteGrid_kml, stormNum): + +# CONFIG READ: the directory below is the directory where the KML txt files will be dumped. +# From there it is syncronized to the web servers along with the graphics. If you set up +# your gHLS scripts and data directories in a different place than recommended in the install +# instructions, you would need to change that directory here. Do not change .kml.txt to .kml. +# Only .txt file can be uploaded as include files. In the servers a php script will convert the +# file name so that a browser can properly interpret it as a KML file to be open with Google +# Earth or equivalent application. + +# Also, make sure the ownership of the kml.txt files created below is fxa:fxalpha with permissions set +# to 666. + +# You can test the KML files created by copying them outside AWIPS and renaming them .kml. Then open them with +# Google Earth. + + date = time.strftime("%Y%m") + datetime = time.strftime("%Y%m%d") + + # Define a file name for the output KML file + kml_filename = '/data/local/WWTool/kml/' + discreteWEName + '_' + datetime + '_AT' + stormNum + '.kml' + + print("KML FILE IS: ", kml_filename) + + with open(kml_filename, 'w') as kml: + kml.write('\n') + kml.write('\n') + kml.write(''+discreteWEName+'.kml\n') + + # TODO: do we need to keep these commented out lines? + #kml.write('\n') + #kml.write('\n') + #kml.write('\n')watch orange + #kml.write('\n')warning red + kml.write('\n') + kml.write('\n') + kml.write(''+discreteWEName+'0\n') + + print("DISCRETEKEYS ARE: ", discreteKeys) + + # initialize a flag. It will only be NO for the first polygon in the file. + flag = False + + # Process all of the keys found in this discrete grid + for hazIndex, key in enumerate(discreteKeys): + + # Skip the "" value + if "None" in key: + continue + + # Identify all portions of the grid where this key applies + mask = discreteGrid_kml == hazIndex + + # If there are no areas associated with this key - move on + # (this should not happen often) + if not mask.any(): + continue + + # Make an editArea from the current mask + editArea = self.decodeEditArea(mask) + + # Extract the polygons from the edit area + polygons = editArea.getPolygons(CoordinateType.LATLON) + + # CONFIG READ: The following section is the one that needs the most local config. For each key and threat element + # below you must type in the generic impact definitions that you submitted to the national ghls page. For example, + # for the case of MLB for winds, Those would be found here: + # http://www.weather.gov/ghls/php/ghls_index.php?sid=mlb&threat=wind#none + + #------------------------------------------------------------------- + # Handle storm surge watches and warnings, if we're dealing with + # the collaboration grids + + # Storm Surge Watch + if key.startswith("SS.A"): + + if discreteWEName in ["InitialSS","ProposedSS","tempProposedSS"]: + kmlHeader='Storm Surge WatchStorm Surge Watch in Effect\n#SSA\n' + + # Storm Surge Warning + elif key.startswith("SS.W"): + if discreteWEName in ["InitialSS","ProposedSS","tempProposedSS"]: + kmlHeader='Storm Surge WarningStorm Surge Warning in Effect\n#SSW\n' + + #------------------------------------------------------------------- + # Produce KML for every polygon associated with this particular key + + for i in range(polygons.getNumGeometries()): + poly = polygons.getGeometryN(i) + shell = poly.getExteriorRing(); + if shell: + if flag: + kml.write('\n') + + kml.write(kmlHeader) + kml.write('') + print("Outer shell coordinates:") + for c in shell.getCoordinates(): + line = str(c.x) + ',' + str(c.y) + ',0 \n' + kml.write(line) + + kml.write('') + # Now that we've written at least one polygon, set flag to YES + flag = True + + for j in range(poly.getNumInteriorRing()): + hole = poly.getInteriorRingN(j) + print("Hole",j,"coordinates:") + kml.write('') + for c in hole.getCoordinates(): + line = str(c.x) + ',' + str(c.y) + ',0 \n' + kml.write(line) + + kml.write('') + + kmlEnd='\n\n' + kml.write(kmlEnd) + + + # For each unique ETN, determine the mask that covers that ETN. Return the + # result in a dictionary ETN:mask + def calcStormMasks(self, threatGrid, threatKeys): + + # Initialize the dictionary + stormDict = {} + + # process all the keys within this discrete grid + for index, key in enumerate(threatKeys): + + # Get the storm number, the last two digits, from the ETN + etn = self.getETN(key)[-2:] + + # Skip the "" value, we don't need it. Also skip any key + # which does not contain a valid storm number + if "None" in key or not etn.isdigit(): + continue + + # Identify areas where this key is applied + mask = (threatGrid == index) + + # If we already have a mask for this storm, add to it + if etn in stormDict: + stormDict[etn] |= mask + + # Otherwise, make a new dictionary entry + else: + stormDict[etn] = mask + + # Return the completed mask dictionary + return stormDict + + + def execute(self): + + # TODO: should this commented out line be removed? + #os.system('/data/local/GFEnhc/archive_ss_gfx.sh') + + # Make a timeRange that starts at top of this hour and 12 hours long + start = int(self._gmtime().unixTime() / 3600) * 3600 + end = start + 12 * 3600 + tr = self.GM_makeTimeRange(start, end) + + # Define some conditions before we start processing + discreteList = ['ProposedSS'] #['InitialSS','ProposedSS','tempProposedSS'] + editAreaMask = self.encodeEditArea("StormSurgeWW_EditArea") + + # Process all of the specified discrete grids + for discreteWEName in discreteList: + + discreteGrid, discreteKeys = self.getGrids("Fcst", discreteWEName, "SFC", tr) + + # Get the masks that cover each ETN for this grid + stormMaskDict = self.calcStormMasks(discreteGrid, discreteKeys) + + # Process the mask for each storm we found + for stormNum in list(stormMaskDict.keys()): + + print("****************stormNum: ", stormNum) + # Initialize grid to -9 everywhere, we're not sure why at this point + discreteGrid_kml = np.zeros(self.getGridShape(), np.int8) - 9 + + # Get the mask associated with this particular storm + threatMask = stormMaskDict[stormNum] + + # Update the grid we're going to convert to KML only with data + # where we have actual threats + discreteGrid_kml[threatMask] = discreteGrid[threatMask] + + # Format the final KML for this storm + self.makeThreatKML(discreteWEName, discreteKeys, discreteGrid_kml, stormNum) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Fire_Wx_First.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Fire_Wx_First.py index b0e80b80d2..a94a4dc7ca 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Fire_Wx_First.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Fire_Wx_First.py @@ -1,152 +1,152 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Fire_Wx_First.py -# -# Author: dtomalak -# ---------------------------------------------------------------------------- - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] -import LogStream, time -from math import * - -# The ToolList is optional, but recommended, if you are calling -# Smart Tools from your Script. -# If present, it can be used to show which grids will be -# modified by the Script. - - -##ToolList = [("T_Tool", "T"), -## ("PoP_Tool", "PoP"), -## ("Wind_Tool", "Wind"), -## ] - -### If desired, Set up variables to be solicited from the user: -## If your script calls Smart Tools, this VariableList should -## cover all the variables necessary for the tools. - -VariableList = [ - ("Model" , "NAM12", "radio", ["NAM12","NAM40", "GFS40"]), - ] -#VariableList.append(("Extrapolate:", "Forward in Time", "radio", ["Forward in Time", "Backward in Time"])) -#VariableList.append(("Movement Speed (Kts):", "15", "numeric")) -#VariableList.append(("This is just a label", "", "label")) -#VariableList.append(("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0)) - -import time -import AbsTime -import SmartScript -## For documentation on the available commands, -## see the SmartScript Utility, which can be viewed from -## the Edit Actions Dialog Utilities window - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - self.loadWEGroup("FireWx") - fwModel = varDict["Model"] - self.saveObject("FireModel", fwModel, "ModelType") - - hour = int(time.strftime('%H', time.gmtime(time.time()))) - if hour > 3 and hour < 16: - starttime = 12 - endtime = 61 - elif hour < 3: - startime = 0 - endtime = 61 - else: - starttime = 12 - endtime = 73 - - # Clean up old data - print 'Create_Fire_With_Smoke: Deleting old grids.' - tr = self.createTimeRange(starttime, endtime, mode="Zulu") - self.deleteCmd(['MaxRH', 'MinRH', 'Haines', 'MixHgt', 'TransWind', 'VentRate'],tr) - - if fwModel == "NAM12" or fwModel == "NAM40": - print 'Create_Fire_With_Smoke: Creating new scratch grids.' - self.createFromScratchCmd(['MaxRH', 'MinRH'], tr) - self.createFromScratchCmd(['MixHgt'], tr, 3, 1) - self.createFromScratchCmd(['TransWind'], tr, 3, 1) - self.createFromScratchCmd(['VentRate'], tr, 3, 1) - self.createFromScratchCmd(['MixHgt'], tr, 3, 1) - self.createFromScratchCmd(['Haines'], tr, 3, 1) -# self.createFromScratchCmd(['TransWindAve'], tr, 3, 1) -# self.createFromScratchCmd(['VentRateAve'], tr, 3, 1) -# self.createFromScratchCmd(['HainesAve'], tr, 3, 1) -# self.createFromScratchCmd(['HrsOfSun'], tr, 12, 12) - self.createFromScratchCmd(['LAL'], tr, 6, 6) -### self.createFromScratchCmd(['RFD'], tr, 3, 1) - - else: - print 'Create_Fire_With_Smoke: Creating new scratch grids.' - self.createFromScratchCmd(['MaxRH', 'MinRH'], tr) - self.createFromScratchCmd(['MixHgt'], tr, 6, 1) - self.createFromScratchCmd(['TransWind'], tr, 6, 1) - self.createFromScratchCmd(['VentRate'], tr, 6, 1) -# self.createFromScratchCmd(['MixHgtAve'], tr, 6, 1) -# self.createFromScratchCmd(['TransWindAve'], tr, 6, 1) -# self.createFromScratchCmd(['VentRateAve'], tr, 6, 1) - self.createFromScratchCmd(['Haines'], tr, 6, 1) -# self.createFromScratchCmd(['HrsOfSun'], tr, 12, 12) - self.createFromScratchCmd(['LAL'], tr, 6, 6) -### self.createFromScratchCmd(['RFD'], tr, 3, 1) - - # QC the dewpoint, then populate the RH, then MinRH MaxRH - print 'Create_Fire_With_Smoke: Running Td and RH tools.' -## self.callSmartTool("Td_SmartTool", "Td", None, tr, missingDataMode="skip") -## self.callSmartTool("RHTool","RH", None, tr, missingDataMode="create") - self.callSmartTool("MaxRH_Tool","MaxRH", None, tr, missingDataMode="skip") - self.callSmartTool("MinRH_Tool","MinRH", None, tr, missingDataMode="skip") - - - # Populate the smoke parameters - print 'Create_Fire_With_Smoke: Starting Haines.' - self.callSmartTool("Haines", "Haines", None, tr, missingDataMode="skip") - print 'Create_Fire_With_Smoke: Starting Mixing Hgt.' - self.callSmartTool("MixHgt_FWF", "MixHgt", None, tr, missingDataMode="skip") - print 'Create_Fire_With_Smoke: Starting Transport Winds.' - self.callSmartTool("TransWind_NoVar", "TransWind", None, tr, missingDataMode="skip") - #print 'Create_Fire_With_Smoke: Starting Vent Rate.' - self.callSmartTool("VentRate", "VentRate", None, tr, missingDataMode="skip") - print 'Create_Fire_With_Smoke: Starting LAL.' - self.callSmartTool("LAL_Tool", "LAL", None, tr, missingDataMode="create") -# self.callSmartTool("CalculateRFD", "RFD", None, tr, missingDataMode="create") - - # Interpolate the smoke parameters to hourly grids - print 'Create_Fire_With_Smoke: Interpolating Grids.' - self.interpolateCmd(["MixHgt", "VentRate", "TransWind", "Haines"], tr, "GAPS", "SYNC", interval = 1) - - # Populate the afternoon average smoke parameters for the ZFP -# print 'Create_Fire_With_Smoke: Starting Average Mixing Hgt.' -# self.callSmartTool("MixHgtAve", "MixHgtAve", None, tr, missingDataMode="skip") -# print 'Create_Fire_With_Smoke: Starting Average Transport Winds.' -# self.callSmartTool("TransWindAve", "TransWindAve", None, tr, missingDataMode="skip") -# print 'Create_Fire_With_Smoke: Starting Average Vent Rate.' -# self.callSmartTool("VentRateAve", "VentRateAve", None, tr, missingDataMode="skip") - - - # Calculate the hours of sun -# print 'Create_Fire_With_Smoke: Starting hours of sun.' -# def execute(self, editArea, timeRange, varDict): -# new_timeRange = self.createTimeRange(06, 78, mode="Zulu") -# self.createFromScratchCmd(['HrsOfSun'], new_timeRange, 24, 24) -# self.callSmartTool("Calc_Hours_of_Sun","HrsOfSun", None, tr, missingDataMode="skip") -############## -### Added by Dergan for calculation of RFD (9/14/07) ### -############## -## dur = 10 -## startt = 14 -## timelength = dur + startt -## rfd_tr = self.createTimeRange(startt, timelength, "Zulu") -## self.createFromScratchCmd(['RFD'], rfd_tr, 1, 1) -## self.callSmartTool("CalculateRFD", "RFD", None, rfd_tr, varDict) -## self.createFromScratchCmd(['RFDmax'], rfd_tr, 0, timelength) -## self.callSmartTool("RFDmax", "RFDmax", None, rfd_tr, varDict) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Fire_Wx_First.py +# +# Author: dtomalak +# ---------------------------------------------------------------------------- + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] +import LogStream, time +from math import * + +# The ToolList is optional, but recommended, if you are calling +# Smart Tools from your Script. +# If present, it can be used to show which grids will be +# modified by the Script. + + +##ToolList = [("T_Tool", "T"), +## ("PoP_Tool", "PoP"), +## ("Wind_Tool", "Wind"), +## ] + +### If desired, Set up variables to be solicited from the user: +## If your script calls Smart Tools, this VariableList should +## cover all the variables necessary for the tools. + +VariableList = [ + ("Model" , "NAM12", "radio", ["NAM12","NAM40", "GFS40"]), + ] +#VariableList.append(("Extrapolate:", "Forward in Time", "radio", ["Forward in Time", "Backward in Time"])) +#VariableList.append(("Movement Speed (Kts):", "15", "numeric")) +#VariableList.append(("This is just a label", "", "label")) +#VariableList.append(("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0)) + +import time +import AbsTime +import SmartScript +## For documentation on the available commands, +## see the SmartScript Utility, which can be viewed from +## the Edit Actions Dialog Utilities window + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + self.loadWEGroup("FireWx") + fwModel = varDict["Model"] + self.saveObject("FireModel", fwModel, "ModelType") + + hour = int(time.strftime('%H', time.gmtime(time.time()))) + if hour > 3 and hour < 16: + starttime = 12 + endtime = 61 + elif hour < 3: + startime = 0 + endtime = 61 + else: + starttime = 12 + endtime = 73 + + # Clean up old data + print('Create_Fire_With_Smoke: Deleting old grids.') + tr = self.createTimeRange(starttime, endtime, mode="Zulu") + self.deleteCmd(['MaxRH', 'MinRH', 'Haines', 'MixHgt', 'TransWind', 'VentRate'],tr) + + if fwModel == "NAM12" or fwModel == "NAM40": + print('Create_Fire_With_Smoke: Creating new scratch grids.') + self.createFromScratchCmd(['MaxRH', 'MinRH'], tr) + self.createFromScratchCmd(['MixHgt'], tr, 3, 1) + self.createFromScratchCmd(['TransWind'], tr, 3, 1) + self.createFromScratchCmd(['VentRate'], tr, 3, 1) + self.createFromScratchCmd(['MixHgt'], tr, 3, 1) + self.createFromScratchCmd(['Haines'], tr, 3, 1) +# self.createFromScratchCmd(['TransWindAve'], tr, 3, 1) +# self.createFromScratchCmd(['VentRateAve'], tr, 3, 1) +# self.createFromScratchCmd(['HainesAve'], tr, 3, 1) +# self.createFromScratchCmd(['HrsOfSun'], tr, 12, 12) + self.createFromScratchCmd(['LAL'], tr, 6, 6) +### self.createFromScratchCmd(['RFD'], tr, 3, 1) + + else: + print('Create_Fire_With_Smoke: Creating new scratch grids.') + self.createFromScratchCmd(['MaxRH', 'MinRH'], tr) + self.createFromScratchCmd(['MixHgt'], tr, 6, 1) + self.createFromScratchCmd(['TransWind'], tr, 6, 1) + self.createFromScratchCmd(['VentRate'], tr, 6, 1) +# self.createFromScratchCmd(['MixHgtAve'], tr, 6, 1) +# self.createFromScratchCmd(['TransWindAve'], tr, 6, 1) +# self.createFromScratchCmd(['VentRateAve'], tr, 6, 1) + self.createFromScratchCmd(['Haines'], tr, 6, 1) +# self.createFromScratchCmd(['HrsOfSun'], tr, 12, 12) + self.createFromScratchCmd(['LAL'], tr, 6, 6) +### self.createFromScratchCmd(['RFD'], tr, 3, 1) + + # QC the dewpoint, then populate the RH, then MinRH MaxRH + print('Create_Fire_With_Smoke: Running Td and RH tools.') +## self.callSmartTool("Td_SmartTool", "Td", None, tr, missingDataMode="skip") +## self.callSmartTool("RHTool","RH", None, tr, missingDataMode="create") + self.callSmartTool("MaxRH_Tool","MaxRH", None, tr, missingDataMode="skip") + self.callSmartTool("MinRH_Tool","MinRH", None, tr, missingDataMode="skip") + + + # Populate the smoke parameters + print('Create_Fire_With_Smoke: Starting Haines.') + self.callSmartTool("Haines", "Haines", None, tr, missingDataMode="skip") + print('Create_Fire_With_Smoke: Starting Mixing Hgt.') + self.callSmartTool("MixHgt_FWF", "MixHgt", None, tr, missingDataMode="skip") + print('Create_Fire_With_Smoke: Starting Transport Winds.') + self.callSmartTool("TransWind_NoVar", "TransWind", None, tr, missingDataMode="skip") + #print 'Create_Fire_With_Smoke: Starting Vent Rate.' + self.callSmartTool("VentRate", "VentRate", None, tr, missingDataMode="skip") + print('Create_Fire_With_Smoke: Starting LAL.') + self.callSmartTool("LAL_Tool", "LAL", None, tr, missingDataMode="create") +# self.callSmartTool("CalculateRFD", "RFD", None, tr, missingDataMode="create") + + # Interpolate the smoke parameters to hourly grids + print('Create_Fire_With_Smoke: Interpolating Grids.') + self.interpolateCmd(["MixHgt", "VentRate", "TransWind", "Haines"], tr, "GAPS", "SYNC", interval = 1) + + # Populate the afternoon average smoke parameters for the ZFP +# print 'Create_Fire_With_Smoke: Starting Average Mixing Hgt.' +# self.callSmartTool("MixHgtAve", "MixHgtAve", None, tr, missingDataMode="skip") +# print 'Create_Fire_With_Smoke: Starting Average Transport Winds.' +# self.callSmartTool("TransWindAve", "TransWindAve", None, tr, missingDataMode="skip") +# print 'Create_Fire_With_Smoke: Starting Average Vent Rate.' +# self.callSmartTool("VentRateAve", "VentRateAve", None, tr, missingDataMode="skip") + + + # Calculate the hours of sun +# print 'Create_Fire_With_Smoke: Starting hours of sun.' +# def execute(self, editArea, timeRange, varDict): +# new_timeRange = self.createTimeRange(06, 78, mode="Zulu") +# self.createFromScratchCmd(['HrsOfSun'], new_timeRange, 24, 24) +# self.callSmartTool("Calc_Hours_of_Sun","HrsOfSun", None, tr, missingDataMode="skip") +############## +### Added by Dergan for calculation of RFD (9/14/07) ### +############## +## dur = 10 +## startt = 14 +## timelength = dur + startt +## rfd_tr = self.createTimeRange(startt, timelength, "Zulu") +## self.createFromScratchCmd(['RFD'], rfd_tr, 1, 1) +## self.callSmartTool("CalculateRFD", "RFD", None, rfd_tr, varDict) +## self.createFromScratchCmd(['RFDmax'], rfd_tr, 0, timelength) +## self.callSmartTool("RFDmax", "RFDmax", None, rfd_tr, varDict) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/GenerateCyclone.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/GenerateCyclone.py index 0624026a77..010cd2ae81 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/GenerateCyclone.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/GenerateCyclone.py @@ -1,1277 +1,1277 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# GenerateCyclone -# -# Author: lefebvre -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Populate"] - -VariableList = [("ProductID:", "", "alphaNumeric"), - ("Background\nModel:", "Fcst", "radio", ["GFS80", "NAM12", "Fcst"]), - ("Number of\n Pie Slices:", "20", "radio", ["4", "12", "20", "36", "72"]), -# ("Time Interval\n(hours):", "1", "radio", ["1", "3", "6", "12"]), - ("Make Grids over\nSelected Time Only:", "No", "radio", ["Yes", "No"]), - ("Decrease Wind over Land by (%):", 0, "scale", [-20, 50], 1), - ] - -import TimeRange -import AbsTime - -import SmartScript - -import string, time -import Exceptions - -from numpy import * - - -## For available commands, see SmartScript - -class TCMDecoder: - def __init__(self): - self.pos = 0 - # key words in TCM products from NCEP - self.keyWordDict = {"FORECAST VALID" : self.decodeWindForecast, - "TPC/NATIONAL HURRICANE CENTER" : self.decodeAltFilename, - "CENTER LOCATED NEAR" : self.decodeCenterLocation, - "CENTER LOCATED INLAND NEAR" : self.decodeCenterLocation, - "MAX SUSTAINED WINDS" : self.decodeMaxSustainedWinds, - "MAX WIND" : self.decodeMaxWind, - "EYE DIAMETER" : self.decodeEyeDiameter, - "KT..." : self.decodeRadii, - # key words for JTWC products - "WTPN" : self.decodeJTWCProductTime, - "WARNING POSITION:" : self.decodeJTWCTimeCenter, - "VALID AT:" : self.decodeJTWCWindForecast, - "RADIUS OF" : self.decodeJTWCRadii, - " ---" : self.endJTWCWindForecast, - "REMARKS:" : self.stopDecodingJTWC, - } - - self.fcstList = [] # a place to store all of the forecasts - - self.text = [] # the text product - - self.currentFcst = {} # the current forecast we are docoding - - self.baseProductTime = 0 - - self.foundEyeDiameter = 0 - - self.altFilename = "" - - def calcEyeDiameter(self, center, maxWind): - lat = center[0] # latitude in degrees - maxWindC = maxWind / 1.944 # convert to meters per second - rmw = 46.29 * exp(-0.0153 * maxWindC + 0.0166 * lat) - - # convert to diameter and convert from km to nm - ed = rmw * 2.0 / 1.852 - return ed - - def stripText(self): - endStr = chr(13) + chr(13) + chr(10) - for i in xrange(len(self.text)): - self.text[i] = string.replace(self.text[i], endStr, "") - return - - def getFcstList(self): - return self.fcstList - - def getBaseProductTime(self): - return self.baseProductTime - - def getAltInfoFilename(self): - return self.altFilename - - def currentLine(self): - return self.text[self.pos] - - def nextLine(self): - self.pos += 1 - if self.pos < len(self.text): - return self.text[self.pos] - else: - return "" - - def monthNum(self, monthStr): - monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", - "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] - - try: - return monthList.index(monthStr) + 1 - except ValueError: - return 0 - - def convertBaseTime(self, timeStr): - # timeStr format: "HHMMZ DAY MON DD YYYY" - - # extract time parts from the str - strList = string.split(timeStr) - if len(strList) != 5: - print "Invalid time string:", timeStr - print "Format should be of the form HHMMZ DAY MON DD YYYY" - return - - hour = int(timeStr[0:2]) - minute = int(timeStr[2:4]) - monthStr = strList[2] - month = self.monthNum(monthStr) - day = int(strList[3]) - year = int(strList[4]) - - # time.mktime returns time in seconds but in local time - baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) - - # Adjust to UTC - diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) - - # subtract timeZone and round to the nearest hour - roundedTime = int((baseTime - diffTime) / 3600) * 3600 - - return roundedTime - - def convert_ddhhmm(self, ddhhmmStr, baseTime): - - # remove the slash if present - ddhhmmStr = string.replace(ddhhmmStr, "/", "") - - if baseTime == 0: - baseTime = time.time() - - # extract the time parts - dayStr = ddhhmmStr[0:2] - hourStr = ddhhmmStr[2:4] - minStr = ddhhmmStr[4:6] - day = int(dayStr) - hour = int(hourStr) - minute = int(minStr) - tupleTime = time.gmtime(baseTime) - year = tupleTime[0] - month = tupleTime[1] - # see if we crossed over to a new month - if tupleTime[2] > day: - month += 1 - if month > 12: - month = 1 - year += 1 - - newTuple = (year, month, day, hour, minute, tupleTime[5], - tupleTime[6], tupleTime[7], tupleTime[8]) - - secondsTime = time.mktime(newTuple) - # Adjustment to UTC - diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) - return secondsTime - diffTime # subtract timeZone - - def decodeProductTime(self): - # Time of the product found on the next line - timeStr = self.nextLine() - - # sanity check for the time string - hhmm = timeStr[0:4] - for c in hhmm: - if not c in string.digits: - return - - baseTime = self.convertBaseTime(timeStr) - self.baseProductTime = baseTime - - return - - def decodeAltFilename(self): - nameStr = self.currentLine() - parts = string.split(nameStr) - - self.altFilename = parts[-1] # grab the last string token - return - - def decodeCenterLocation(self): - locStr = self.currentLine() - # check for the repeat center....don't want this one - if string.find(locStr, "REPEAT") >= 0: - return - - keyWord = "NEAR" - pos = string.find(locStr, keyWord) - if pos > -1: # found it - locStr = locStr[pos + len(keyWord):] - tokenList = string.split(locStr) - if len(tokenList) >= 2: - lat = self.decodeLatLonToken(tokenList[0]) - lon = self.decodeLatLonToken(tokenList[1]) - - if len(tokenList) > 3: # grab the time - validTime = self.convert_ddhhmm(tokenList[3], self.baseProductTime) - # New fcst (analysis actually) - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - else: - print "Invalid Center Location string:", locStr - return - - def decodeMaxSustainedWinds(self): - keyWord = "MAX SUSTAINED WINDS" - windStr = self.currentLine() - pos = string.find(windStr, keyWord) - if pos > -1: # found it - windList = [] - tokenList = string.split(windStr) - for i in xrange(len(tokenList)): - if string.find(tokenList[i], "KT") >= 0: - windList.append(float(tokenList[i - 1])) - - # Sometimes there is no max wind/gust reported - if windList == []: - print "No Max Sustained Winds or Gusts found." - return - - # store the max wind - self.currentFcst["maxWind"] = windList[0] - self.currentFcst["maxGust"] = windList[1] - - # if we have a center location and a max wind we can calc - # the eye diameter - if self.currentFcst.has_key('centerLocation') and \ - self.currentFcst.has_key('maxWind'): - # if it's zero it's not in the product and the user didn't - # change it, so calculate it based on the Willoughby formula - if self.currentFcst.has_key('eyeDiameter') and \ - self.currentFcst['eyeDiameter'] == 0: - self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( - self.currentFcst['centerLocation'], - self.currentFcst['maxWind']) - else: # otherwise use what's been defined or read from the text - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeMaxWind(self): - str = self.currentLine() - str = string.replace(str, '.', ' ') # remove ... - tokenList = string.split(str) - if len(tokenList) >= 6: - maxWind = float(tokenList[2]) - maxGust = float(tokenList[5]) - - # store in current fcst - self.currentFcst["maxWind"] = maxWind - self.currentFcst["maxGust"] = maxGust - - # if we have a center location and a max wind we can calc - # the eye diameter - if self.currentFcst.has_key('centerLocation') and \ - self.currentFcst.has_key('maxWind'): - # if it's zero it's not in the product and the user didn't - # change it, so calculate it based on the Willoughby formula - if self.currentFcst.has_key('eyeDiameter') and \ - self.currentFcst['eyeDiameter'] == 0: - self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( - self.currentFcst['centerLocation'], - self.currentFcst['maxWind']) - else: # otherwise use what's been defined or read from the text - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeRadii(self): - if self.currentFcst == {}: # can't continue - return - - str = self.currentLine() - str = string.replace(str, '.', ' ') # remove ... - tokenList = string.split(str) - # check for KT in the second slot - if len(tokenList) < 4 or tokenList[1] != "KT": - print "Invalid TCM wind string:", str - return - radiiWindValue = float(tokenList[0]) - dirList = ["NE", "SE", "SW", "NW"] - radiusList = [] - for token in tokenList: - for d in dirList: - pos = string.find(token, d) - if pos >= 0: - radiusStr = token[:pos] - radius = float(radiusStr) - radiusList.append(radius) - - - if len(radiusList) == 0: - print "Error decoding radii in string:", str - # store the radii info - if not self.currentFcst.has_key("radii"): - self.currentFcst['radii'] = {} - - self.currentFcst['radii'][radiiWindValue] = radiusList - - return - - def decodeWindForecast(self): - # if we're decoding a new forecast, save the old one first - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - self.currentFcst = {} # reset - - str = self.currentLine() - str = string.replace(str, '...', ' ') # remove ... - - tokenList = string.split(str) - # decode the validTime - validTime = self.convert_ddhhmm(tokenList[2], self.baseProductTime) - if self.baseProductTime == 0: - self.baseProductTime = validTime - # decode the center location - if len(tokenList) >= 5: - lat = self.decodeLatLonToken(tokenList[3]) - lon = self.decodeLatLonToken(tokenList[4]) - # If we can't decode the lat or lon it's probably an outlook - # with no guidance so just return - if lat == None or lon == None: - print "Failed to decode latStr:", lat, "lonStr:", lon - return - - # initialize a new forecast and begin filling values - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeEyeDiameter(self): - str = self.currentLine() - - tokenList = string.split(str) - diameter = int(tokenList[2]) - - self.currentFcst['eyeDiameter'] = diameter - - # Since we found it in the procuct, set the default diameter - self.defaultEyeDiameter = diameter - self.foundEyeDiameter = 1 # mark that we found it - return - - def decodeTCMProduct(self, TCMProduct, eyeDiameter): - self.text = TCMProduct - self.pos = 0 - self.fcstList = [] - self.defaultEyeDiameter = eyeDiameter - - self.stripText() - while self.pos < len(TCMProduct): - line = self.currentLine() - for k in self.keyWordDict.keys(): - if string.find(line, k) > -1: - self.keyWordDict[k]() - break - self.pos += 1 - - # store the last forecast in the list of forecasts - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - self.currentFcst = {} # reset - - return - - def decodeLatLonToken(self, latLonStr): - dirList = ['N', 'S', 'E', 'W'] - for d in dirList: - pos = string.find(latLonStr, d) - if pos >= 0: - try: - value = float(latLonStr[0:pos]) - if d == 'S' or d == 'W': - value = -value # flip the numeric sign - return value - except: - # it was not decodable (not numbers) - print "Failed to decode lat/lon token:", latLonStr - return None - - # undecodable latLon for some reason - return None - - def decodeJTWCProductTime(self): - line = self.currentLine() - tokenList = string.split(line) - ddhhmmStr = tokenList[2] - self.baseProductTime = self.convert_ddhhmm(ddhhmmStr, 0) - - self.baseProductTime = int(self.baseProductTime / 3600) * 3600 - return None - - def decodeJTWCTimeCenter(self): - line = self.nextLine() - tokenList = string.split(line) - if len(tokenList) >= 5: - dateTimeStr = tokenList[0][0:6] - latStr = tokenList[3] - lonStr = tokenList[4] - else: - print "Error decoding JTWC Time/Center string:", line - print "Format should be: DDHHMMZx --- NEAR Lat Lon" - return - - # could be None - lat = self.decodeLatLonToken(latStr) - lon = self.decodeLatLonToken(lonStr) - if lon > 0: - lon -= 360.0 - productTime = self.convert_ddhhmm(dateTimeStr, self.baseProductTime) - - # make a new fcst object to store the analysis - self.currentFcst = {} - self.currentFcst['validTime'] = productTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - return - - def decodeJTWCWindForecast(self): - line = self.nextLine() - - tokenList = string.split(line) - - # Grab everything just to the left of the first 'Z' - zPos = string.find(tokenList[0], 'Z') - if zPos >= 0: - timeStr = tokenList[0][0:zPos] - validTime = self.convert_ddhhmm(timeStr, self.baseProductTime) - else: - print "couldnt find Z in timeStr:", line - return - - latStr = tokenList[2] - lonStr = tokenList[3] - lat = self.decodeLatLonToken(latStr) - lon = self.decodeLatLonToken(lonStr) - if lon > 0: - lon -= 360.0 - - # make a new currentFcst and store the info - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - return - - def decodeJTWCRadii(self): - line = self.currentLine() - radList = [] - windSpeed = 0 - while string.find(line, "---") == -1 and line != "": - tokenList = string.split(line) - if string.find(line, "RADIUS") >= 0: # it's the first line - # check to see if we need to store the radii first - if radList != []: # we decoded some already - self.currentFcst['radii'][windSpeed] = radList - radList = [] - - # extract the windSpeed for these radii - windSpeed = float(tokenList[2]) - if string.find(line, "QUADRANT") == -1: # no "QUADRANT" found - radius = float(tokenList[6]) - radList = [radius, radius, radius, radius] - else: # QUADRANT found - radius = float(tokenList[6]) - radList = [radius] - else: # no RADIUS found so maybe a QUADRANT line - if string.find(line, "QUADRANT") >= 0: - radius = float(tokenList[0]) - radList.append(radius) - - line = self.nextLine() - - # save the last radii info - if radList != []: - self.currentFcst['radii'][windSpeed] = radList - - # save the whole forecast in the list - self.fcstList.append(self.currentFcst) - self.currentFcst = {} - - return - - def endJTWCWindForecast(self): - - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - - self.currentFcst = {} - return - - def stopDecodingJTWC(self): - line = "ZZZZZ" - while line != "": - line = self.nextLine() - return - -# end class TCMDecoder - -# begin class CircleEA -# This class helps make circular edit areas and quadrants thereof. -class CircleEA(SmartScript.SmartScript): - def __init__(self, latGrid, lonGrid, center, slices): - pi = 3.1459 - RadsPerDeg = 2 * pi / 360 - cosLatGrid = cos(latGrid * RadsPerDeg) - self.xDist = (lonGrid - center[1]) * 111.1 * cosLatGrid - self.yDist = (latGrid - center[0]) * 111.1 - self.distGrid = sqrt(pow(self.xDist, 2)+ pow(self.yDist, 2)) - - self.tanGrid = arctan2(-self.xDist, -self.yDist) - # mask off all but the specified quadrant. - self.quadList = [] - for quad in xrange(1, slices + 1): - minValue = -pi + (quad - 1) * 2 * pi / slices - maxValue = -pi + quad * 2 * pi / slices - - quadrant = logical_and(greater_equal(self.tanGrid, minValue), - less(self.tanGrid, maxValue)) - self.quadList.append(quadrant) - - return - - # Return an edit area for just one quadrant. - # By convention quadrant numbering starts at 1 (due North) and - # progresses clockwise by one slice increment - def getQuadrant(self, quad, radius): - # trim the mask beyond the specified radius - radiusMask = less_equal(self.distGrid, radius) - - quadrant = logical_and(radiusMask, self.quadList[quad - 1]) - return quadrant - - def getDistanceGrid(self): - return self.distGrid - - def getXYDistGrids(self): - return self.xDist, self.yDist - -# end class CircleEA ------------------------------------------------------- - - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - self.fcstWindGrids = {} - - # Use this method if you want to get your product - # from a simple text file - def getTextProductFromFile(self, filename): - try: - f = file(filename, 'r') - except: - print filename, "not found when getting product from file." - return [] - textList = [] - line = f.readline() - textList.append(line) - while line != "": - line = f.readline() - textList.append(line) - f.close() - - return textList - - # Reads decodes depression information using the specified product. - def decodeDepressionInfo(self, textProduct): - - for line in textProduct: - parts = string.split(line, ",") - if len(parts) < 20: - continue - if parts[4] == " CARQ" and parts[5] == " 0": - outsideRad = int(parts[18]) - rmw = int(parts[19]) - return (rmw, outsideRad) - - return (0, 0) - - def injectDepressionInfo(self, fcstList, rmw, outsideRad): - if rmw == 0 and outsideRad == 0: - return fcstList - - eyeDiam = (rmw + 8.0) * 2.0 - for f in fcstList: - f['eyeDiameter'] = eyeDiam - # add the maxWind radius - maxWind = f['maxWind'] - maxWindRadii = [rmw, rmw, rmw, rmw] - f['radii'][maxWind] = maxWindRadii - - # Make an arbitrary radius at the outer most isobar - outSpeed = maxWind / 3.0 # somewhat arbitrary - f['radii'][outSpeed] = [outsideRad, outsideRad, outsideRad, outsideRad] - - return fcstList - - def getWEInventory(self, modelName, WEName, level): - yesterday = self._gmtime() - (2 * 24 * 3600) # two days ago - later = self._gmtime() + 10 * 24 * 3600 # 10 days from now - allTimes = TimeRange.TimeRange(yesterday, later) - parm = self.getParm(modelName, WEName, level); - inv = parm.getGridInventory(allTimes.toJavaObj()) - trList = [] - for gd in inv: - tr = TimeRange.TimeRange(gd.getGridTime()) - trList.append(tr) - return trList - - # returns a wind grid from the specified model most closely matched in - # time - def getClosestWindGrid(self, modelName, timeTarget): - t1 = AbsTime.AbsTime(0) - t2 = AbsTime.current() + 300 * 24 * 3600 # 300 days out - timeRange = TimeRange.TimeRange(t1, t2) - siteID = self.getSiteID() - if modelName == "Fcst": - level = "SFC" - elementName = "Wind" - else: - modelName = siteID + "_D2D_" + modelName - level = "FHAG10" - elementName = "wind" - - topo = self.getTopo() - calmGrid = self.makeWindGrid(0.0, 0.0, topo.shape) - gridInfo = [] - try: - gridInfo = self.getGridInfo(modelName, elementName, level, timeRange) - except Exceptions.EditActionError: - print "No grids found for model/level:", modelName, level - if string.find(modelName, "GFS") >= 0: - modelName = siteID + "_D2D_" + "AVN" - level = "BL030" - try: - gridInfo = self.getGridInfo(modelName, elementName, level, timeRange) - except Exceptions.EditActionError: - print "No grids found for model", modelName, "level:", level - print "Using calm grid." - return calmGrid - - if len(gridInfo) == 0: - print "No grid info found for:", modelName, "at:", timeRange - print "No grid info...Using calm grid." - return calmGrid - - minDiff = 3600 * 24 * 365 # just a large number - gridIndex = -1 - tr = None - # figure out which grid is closest in time - for i in xrange(len(gridInfo)): - gridTime = gridInfo[i].gridTime() - gTime = gridTime.startTime().unixTime() - diff = abs(gTime - timeTarget) - if diff < minDiff: - tr = gridInfo[i].gridTime() - minDiff = diff - if diff == 0: - break - - if minDiff > 3 * 3600: - print "Returning calm grid as background." - return calmGrid - - grid = calmGrid - # fetch the grid - if modelName == "Fcst": - if self.fcstWindGrids.has_key(tr): - grid = self.fcstWindGrids[tr] - else: - # hunt down any grid that overlaps the timeTarget - for gridTR in self.fcstWindGrids.keys(): - if gridTR.contains(AbsTime.AbsTime(timeTarget)): - grid = self.fcstWindGrids[gridTR] - else: - grid = self.getGrids(modelName, elementName, level, tr, mode="First") - grid = (grid[0] * 1.944, grid[1]) - - return grid - - # makes a direction grid where winds blow counter-clockwise about - # the specified center. - def makeDirectionGrid(self, latGrid, lonGrid, latCenter, lonCenter): - cycWt = 0.7 # cyclonic circulation weight - convWt = 0.3 # convergence weight - cycU = -(latGrid - latCenter) # pure counter-clockwise circulation - cycV = lonGrid - lonCenter - convU = -cycV # pure convergence - convV = cycU - u = cycU * cycWt + convU * convWt - v = cycV * cycWt + convV * convWt - mag, dir = self.UVToMagDir(u, v) - - return dir - - # interpolates radii information based on the specified info. - # returns a new radii - def interpRadii(self, t1, t2, newTime, f1Radii, f2Radii): - # set the list of radii based on the first set: f1Radii - radiiList = f1Radii - - newRadii = {} - for r in radiiList: - quadList = [] - for i in xrange(4): # always and only 4 quadrants at this point - r1 = f1Radii[r][i] - if f2Radii.has_key(r): - r2 = f2Radii[r][i] - else: - msg = "Wind forecast missing wind value: " + str(r) + " knots. " - msg += "Recommend defining wind radii for " + str(r) + " knots." - ##self.statusBarMsg(msg, "S") - r2 = r1 # just use the f1 value so we can keep going - radius = r1 + (r2 - r1) * (newTime - t1) / (t2 - t1) - quadList.append(radius) - newRadii[r] = quadList - - return newRadii - - # interpolates the wind forecasts inbetween the two specified forecasts. - # interval is assumed to be specified in hours. - # returns a new list of forecasts with f1 at the front of the list - # and f2 not present at all in the list. - def interpolateWindFcst(self, f1, f2, interval): - intSecs = 3600 * interval - t1 = f1['validTime'] - t2 = f2['validTime'] - # Just return the first fcst if the interval is too big - if t2 - t1 <= intSecs: - return [f1] - - f1Lat = f1['centerLocation'][0] - f1Lon = f1['centerLocation'][1] - f2Lat = f2['centerLocation'][0] - f2Lon = f2['centerLocation'][1] - f1Eye = f1['eyeDiameter'] - f2Eye = f2['eyeDiameter'] - tDiff = f2['validTime'] - f1['validTime'] - f1MaxWind = f1['maxWind'] - f2MaxWind = f2['maxWind'] - timeSlots = int(tDiff / intSecs) - dLat = (f2Lat - f1Lat) / timeSlots - dLon = (f2Lon - f1Lon) / timeSlots - dEye = (f2Eye - f1Eye) / timeSlots - dMaxWind = (f2MaxWind - f1MaxWind) / timeSlots - f1Radii = f1['radii'] - f2Radii = f2['radii'] - fcstList = [f1] # include the first fcst in the list - for i in xrange(1, timeSlots): - newTime = t1 + (i * intSecs) - newLat = f1Lat + (i * dLat) - newLon = f1Lon + (i * dLon) - newEye = f1Eye + (i * dEye) - newMaxWind = f1MaxWind + (i * dMaxWind) - newRadii = self.interpRadii(t1, t2, newTime, f1Radii, f2Radii) - f = {} - f['centerLocation'] = (newLat, newLon) - f['eyeDiameter'] = newEye - f['validTime'] = newTime - f['maxWind'] = newMaxWind - f['radii'] = newRadii - fcstList.append(f) - - return fcstList - - # Smooths the specified grid by the specified factor - # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. - # Even factors (4, 6, 8,...) round up to the next odd value - # If factors <3 are specified, the unmodified grid is returned. - def smoothGrid(self, grid, factor): - # factors of less than 3 are useless or dangerous - if factor < 3: - return grid - st = time.time() - half = int(factor)/ 2 - sg = zeros(grid.shape,float64) - count = zeros(grid.shape,float64) - gridOfOnes = ones(grid.shape,float64) - for y in xrange(-half, half + 1): - for x in xrange(-half, half + 1): - if y < 0: - yTargetSlice = slice(-y, None, None) - ySrcSlice = slice(0, y, None) - if y == 0: - yTargetSlice = slice(0, None, None) - ySrcSlice = slice(0, None, None) - if y > 0: - yTargetSlice = slice(0, -y, None) - ySrcSlice = slice(y, None, None) - if x < 0: - xTargetSlice = slice(-x, None, None) - xSrcSlice = slice(0, x, None) - if x == 0: - xTargetSlice = slice(0, None, None) - xSrcSlice = slice(0, None, None) - if x > 0: - xTargetSlice = slice(0, -x, None) - xSrcSlice = slice(x, None, None) - - target = [yTargetSlice, xTargetSlice] - src = [ySrcSlice, xSrcSlice] - sg[target] += grid[src] - count[target] += gridOfOnes[src] - return sg / count - - def printFcst(self, f, baseTime): - print "==============================================================" - print "Time:", time.asctime(time.gmtime(f['validTime'])), - print "LeadTime:", (f['validTime'] - baseTime) / 3600 + 3 - print "Center:", f['centerLocation'] - print "Eye:", f['eyeDiameter'] - if f.has_key('maxWind'): - print "Max Wind:", f['maxWind'] - radKeys = f['radii'].keys() - sort(radKeys) - print "RADII:" - for r in radKeys: - print r, "kts:", f['radii'][r] - - - # Smooths the direction grid without regard to the magnitude - def smoothDirectionGrid(self, dir, factor): - mag = ones(dir.shape, float) # 1.0 everywhere - u, v = self.MagDirToUV(mag, dir) - u = self.smoothGrid(u, factor) - v = self.smoothGrid(v, factor) - mag, dir = self.UVToMagDir(u, v) - return dir - - def makeWindGrid(self, mag, dir, gridShape): - mag = ones(gridShape, float) * mag - dir = ones(gridShape, float) * dir - return mag, dir - - def decreaseWindOverLand(self, grid, fraction, Topo): - mask = greater(Topo, 0.0) - grid = where(mask, grid * fraction, grid) - return grid - - def getTimeConstraintDuration(self, element): - return self.getParm("Fcst", element, "SFC").getGridInfo()\ - .getTimeConstraints().getDuration() - - # Blends the specified grid together - def blendGrids(self, windGrid, bgGrid): - - # make a mask around the edge - windMag = windGrid[0] - backMag = bgGrid[0] - mag = windMag.copy() - - # make a weightingGrid - lower = average(backMag) - # calculate the average value over the area where blending will occur - - upper = lower + 10.0 - - ringMask = logical_and(less(mag, upper), greater(mag, lower)) - - avgGrid = where(ringMask, backMag, float32(0.0)) - - # a nearly calm grid means no blending required - if lower < 1.0: - return windGrid - - wtGrid = greater(mag, upper).astype(float32) - ringMask = logical_and(less(mag, upper), greater(mag, lower)) - wtGrid = where(ringMask, (mag - lower) / (upper - lower), wtGrid) - wtGrid[less(mag, lower)]= 0.0 - wtGrid = self.smoothGrid(wtGrid, 5) - - # calculate the new mag grid - mag *= wtGrid - mag += backMag * (1 - wtGrid) - - # calculate direction grid - onesGrid = ones_like(mag) - gridU, gridV = self.MagDirToUV(onesGrid, windGrid[1]) - bgU, bgV = self.MagDirToUV(onesGrid, bgGrid[1]) - gridU *= wtGrid - gridU += bgU * (1 - wtGrid) - gridV *= wtGrid - gridV += bgV * (1 - wtGrid) - - # get the dirGrid and toss out the magnitude - magGrid, dirGrid = self.UVToMagDir(gridU, gridV) - - return mag, dirGrid - - def getLatLonGrids(self): - # Try to get them from the fcst database to save time - startTime = AbsTime.current() - 86400 - endTime = AbsTime.current() + 86400 # 1 day - timeRange = TimeRange.TimeRange(startTime, endTime) - latGrid = self.getGrids("Fcst", "latGrid", "SFC", timeRange, - mode = "First", noDataError = 0) - lonGrid = self.getGrids("Fcst", "lonGrid", "SFC", timeRange, - mode = "First", noDataError = 0) - if latGrid != None and lonGrid != None: - return latGrid, lonGrid - - # make the latGrid and lonGrid - latGrid, lonGrid = SmartScript.SmartScript.getLatLonGrids(self) - - # Temporarliy save them in the forecast database - startTime = AbsTime.current() - endTime = startTime + 86400 * 7 # 7 days - timeRange = TimeRange.TimeRange(startTime, endTime) - self.createGrid("Fcst", "latGrid", "SCALAR", latGrid, timeRange, - descriptiveName=None, timeConstraints=None, - precision=1, minAllowedValue=0.0, - maxAllowedValue=90.0) - - self.createGrid("Fcst", "lonGrid", "SCALAR", lonGrid, timeRange, - descriptiveName=None, timeConstraints=None, - precision=1, minAllowedValue=-360.0, - maxAllowedValue=180.0) - - return latGrid, lonGrid - - # This method interpolates the specified radii in rDict to the - # number of slices specified in pieSlices. This adds more angular - # resolution to the wind forecast which typically comes with 4 slices. - def interpolateQuadrants(self, rDict, pieSlices): - # make sure we have something to do first - if pieSlices <= 4: - return rDict - - newDict = {} - for k in rDict.keys(): - rList = rDict[k] # fetch the list of radii - - interpFactor = pieSlices / len(rList) - newList = [] - for i in xrange(-1, len(rList) -1): - minVal = rList[i] - maxVal = rList[i + 1] - dVal = (maxVal - minVal) / interpFactor - for f in xrange(interpFactor): - radius = minVal + dVal * f - newList.append(radius) - - # Since we started with the NW quadrant we need to shift - # the list so that it starts at North to conform to convention - shift = int(pieSlices / 4) - shiftedList = newList[shift:] - shiftedList += newList[:shift] - newDict[k] = shiftedList - return newDict - - # Makes a Rankine Vortex wind speed grid that decreases exponentially - # from the known values at known radii. Inside the Radius of maximum - # wind the wind decreases linearly toward the center - def makeRankine(self, f, latGrid, lonGrid, pieSlices): - st = time.time() - rDict = f['radii'] - validTime = f['validTime'] - center = f['centerLocation'] - circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) - - rDict = self.interpolateQuadrants(rDict, pieSlices) - - # get the distance grid and make sure it's never zero anywhere - distanceGrid = circleEA.getDistanceGrid() / 1.852 # dist in NM - distanceGrid[equal(distanceGrid, 0)] = 0.01 - - # make a grid into which we will define the wind speeds - grid = self.empty() - - # insert the maxWind radii - if f.has_key('maxWind'): - maxWind = f['maxWind'] - if f.has_key('eyeDiameter'): - maxRad = f['eyeDiameter'] / 2.0 + 8.0 - else: - print "Error --- no eye diameter found." - maxRad = 12.5 # half of default 25 nm eye diameter - - # add an entry that uses the max wind and radius - rDict[maxWind] = [maxRad] * pieSlices - - # make a list sorted by average radii value - wsList = rDict.keys() - - if len(wsList) == 0: - print "No radii found. Returning calm grid." - return (grid, grid) - - radList = [] - for ws in wsList: - rList = rDict[ws] - sum = 0 - for r in rList: - sum += r - average = sum / len(rList) - radList.append((average, ws)) - - radList.sort() - radList.reverse() - - wsList = [] - for rad, ws in radList: - wsList.append(ws) - - - maxRad, maxWindValue = radList[-1] - maxWindValue += 0.1 - rDict[maxWindValue] = [1.0] * pieSlices - wsList.append(maxWindValue) - - # for each rDict record and quadrant, make the grid one piece at a time - for i in xrange(len(wsList) - 1): - if not rDict.has_key(wsList[i]): - continue - radiusList = rDict[wsList[i]] - nextRadiusList = rDict[wsList[i + 1]] - for quad in xrange(len(radiusList)): - outSpeed = float(wsList[i]) - inSpeed = float(wsList[i + 1]) - outRadius = float(radiusList[quad]) - inRadius = float(nextRadiusList[quad]) - - # get the edit area for this quadrant - mask = circleEA.getQuadrant(quad + 1, outRadius * 1.852) - - # log10 and exp math functions are fussy about zero - if inSpeed == 0.0: - inSpeed = 0.1 - if outSpeed == 0.0: - outSpeed = 0.1 - if inRadius == 0.0: - inRadius = 0.1 - if outRadius == 0.0: - outRadius = 0.1 - - # no wind speed can never exceed the maximum allowable wind speed - if inSpeed > maxWind: - inSpeed = maxWind - if outSpeed > maxWind: - outSpeed = maxWind - - # don't bother with trivial cases - if inRadius < 2.0 and outRadius < 2.0: - continue - if inRadius > outRadius: - continue - - # calculate the exponent so that we exactly fit the next radius - denom = log10(inRadius / outRadius) - if denom == 0: - exponent = 1.0 - else: - exponent = (log10(outSpeed) - log10(inSpeed)) / denom - - # make sure the exponent behaves itself - if exponent > 10.0: - exponent = 10.0 - # inside RMW gets a linear slope to largest of max wind forecasts - if inRadius <= 1.0: - dSdR = (outSpeed - inSpeed) / (outRadius - inRadius) - grid = where(mask, inSpeed + (dSdR * distanceGrid), grid) - else: # outside RMW - grid = where(mask, inSpeed * power((inRadius / distanceGrid), exponent), - grid) - grid.clip(0.0, 200.0, grid) - - dirGrid = self.makeDirectionGrid(latGrid, lonGrid, center[0], center[1]) - - # clip values between zero and maxWind - grid.clip(0.0, maxWind, grid) - # apply the wind reduction over land - fraction = 1.0 - (self.lessOverLand / 100.0) - grid = self.decreaseWindOverLand(grid, fraction, self.elevation) - - return (grid, dirGrid) - - def execute(self, varDict, timeRange): - self.setToolType("numeric") - self.toolTimeRange = timeRange - - # define the default eye diameter for bulletins where they are missing - self.dialogEyeDiameter = 0.0 - - Topo = self.getTopo() - -## interval = int(varDict["Time Interval\n(hours):"]) - - tcDuration = self.getTimeConstraintDuration("Wind") - tcHours = int(tcDuration / 3600) # durations are expressed in seconds - # set the time interpolation interval to the duration - interval = tcHours - # get the product ID -## productList1 = varDict["Product to\ndecode:"] -## productList2 = varDict["Product to\n decode:"] -## productList1 = productList1 + productList2 # concatenate -## if len(productList1) != 1: -## self.statusBarMsg("Please select one TCM bulletin only.", "S") -## return None - -## productID = productList1[0] - - # special code for GUM since they do things just a little differently - siteID = self.getSiteID() - if siteID == "GUM": - productID = "GTW" + productID - - bgModelName = varDict["Background\nModel:"] - # If we're using the Fcst, grab all of the grids now - if bgModelName == "Fcst": - inv = self.getWEInventory("Fcst", "Wind", "SFC") - for tr in inv: - self.fcstWindGrids[tr] = self.getGrids("Fcst", "Wind", "SFC", - tr, mode="First") - - pieSlices = int(varDict["Number of\n Pie Slices:"]) - - self.lessOverLand = int(varDict["Decrease Wind over Land by (%):"]) - self.elevation = Topo - -## # Use this method to fetch a text product from a file -## fileName = "Your path and file name goes here" -## textProduct = self.getTextProductFromFile(fileName) -## productID = string.split(fileName, "/")[-1] - - # Use this method to fetch a product from the text database - productID = varDict["ProductID:"] - textProduct = self.getTextProductFromDB(productID) - if len(textProduct) < 5: - print productID, "could not be retrieved from text database." - return None - - decoder = TCMDecoder() - decoder.decodeTCMProduct(textProduct, self.dialogEyeDiameter) - fcstList = decoder.getFcstList() - print "Decoded:", len(fcstList), " forecasts." - - # Attempt to get the alternate info from a file or the textDB - altFileName = decoder.getAltInfoFilename() - - altFileName = "/home/eagle6/lefebvre/TPC/" + altFileName - - # get additional info if available - altProduct = self.getTextProductFromDB(altFileName) - -## ## use this version to fetch from a file -## altProduct = self.getTextProductFromFile(altFileName) - - rmw, outsideRad = (0, 0) # initialize - if len(altProduct) < 5: - print altProduct, "alternate info file could not be retrieved from text database." - else: - rmw, outsideRad = self.decodeDepressionInfo(altProduct) - - # Set the baseDecodedTime - validTime of first entry - 3 hours - if len(fcstList) > 0: - self.baseDecodedTime = fcstList[0]['validTime'] - 3 * 3600 - -## # See if the decoded fcst is close to the current time. This is needed -## # so the tool will work on archived data sets - selectionTROnly = 0 -## if abs(time.time() - self.baseDecodedTime) > 2 * 24 * 3600: # older than 2 days -## testMode = 1 - # restrict grids to the selected time period if option is selected. - restrictAnswer = varDict["Make Grids over\nSelected Time Only:"] - if restrictAnswer == "Yes": - selectionTROnly = 1 - - # push this info in the fcsts - fcstList = self.injectDepressionInfo(fcstList, rmw, outsideRad) - - # interpolate the wind forecasts we got from the decoder - print "Interpolating wind forecasts:" - selectedStartTime = self.toolTimeRange.startTime().unixTime() - selectedEndTime = self.toolTimeRange.endTime().unixTime() - interpFcstList = [] - for i in xrange(len(fcstList) - 1): - - newFcstList = self.interpolateWindFcst(fcstList[i], fcstList[i+1], - interval) - # if we've processed the last time segment, append the last forecast - if i == len(fcstList) - 2: - newFcstList.append(fcstList[-1]) - - # Make sure the fcst is within the selected time range - for f in newFcstList: - if (selectionTROnly and (f['validTime'] >= selectedStartTime and \ - f['validTime'] < selectedEndTime)) or not selectionTROnly: - interpFcstList.append(f) - - if len(fcstList) == 1: - interpFcstList = fcstList - - if len(interpFcstList) == 0: - self.statusBarMsg("No cyclone forecasts found within the Selected TimeRange", - "S") - else: - print "Generating", len(interpFcstList), "wind grids" - - # get the lat, lon grids - latGrid, lonGrid = self.getLatLonGrids() - - # make a grid for each interpolate forecast - gridCount = 0 - for f in interpFcstList: - windGrid = self.makeRankine(f, latGrid, lonGrid, pieSlices) - - validTime = int(f['validTime'] / 3600) * 3600 - bgGrid = self.getClosestWindGrid(bgModelName, validTime) - - if bgGrid is None: - print "Using calm background grid." - bgGrid = self.makeWindGrid(0.0, 0.0, latGrid.shape) - - grid = self.blendGrids(windGrid, bgGrid) - - start = AbsTime.AbsTime(int(validTime)) - timeRange = TimeRange.TimeRange(start, start + interval * 3600) - - name = "Wind" - self.createGrid("Fcst", name, "VECTOR", grid, timeRange, - precision=1, minAllowedValue=0.0, - maxAllowedValue=200.0) - - gridCount += 1 - print "GenerateCyclone tool:", productID, "- Generated",gridCount, \ - "out of", len(interpFcstList), "grids" - - return None +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# GenerateCyclone +# +# Author: lefebvre +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Populate"] + +VariableList = [("ProductID:", "", "alphaNumeric"), + ("Background\nModel:", "Fcst", "radio", ["GFS80", "NAM12", "Fcst"]), + ("Number of\n Pie Slices:", "20", "radio", ["4", "12", "20", "36", "72"]), +# ("Time Interval\n(hours):", "1", "radio", ["1", "3", "6", "12"]), + ("Make Grids over\nSelected Time Only:", "No", "radio", ["Yes", "No"]), + ("Decrease Wind over Land by (%):", 0, "scale", [-20, 50], 1), + ] + +import TimeRange +import AbsTime + +import SmartScript + +import string, time +import Exceptions + +from numpy import * + + +## For available commands, see SmartScript + +class TCMDecoder: + def __init__(self): + self.pos = 0 + # key words in TCM products from NCEP + self.keyWordDict = {"FORECAST VALID" : self.decodeWindForecast, + "TPC/NATIONAL HURRICANE CENTER" : self.decodeAltFilename, + "CENTER LOCATED NEAR" : self.decodeCenterLocation, + "CENTER LOCATED INLAND NEAR" : self.decodeCenterLocation, + "MAX SUSTAINED WINDS" : self.decodeMaxSustainedWinds, + "MAX WIND" : self.decodeMaxWind, + "EYE DIAMETER" : self.decodeEyeDiameter, + "KT..." : self.decodeRadii, + # key words for JTWC products + "WTPN" : self.decodeJTWCProductTime, + "WARNING POSITION:" : self.decodeJTWCTimeCenter, + "VALID AT:" : self.decodeJTWCWindForecast, + "RADIUS OF" : self.decodeJTWCRadii, + " ---" : self.endJTWCWindForecast, + "REMARKS:" : self.stopDecodingJTWC, + } + + self.fcstList = [] # a place to store all of the forecasts + + self.text = [] # the text product + + self.currentFcst = {} # the current forecast we are docoding + + self.baseProductTime = 0 + + self.foundEyeDiameter = 0 + + self.altFilename = "" + + def calcEyeDiameter(self, center, maxWind): + lat = center[0] # latitude in degrees + maxWindC = maxWind / 1.944 # convert to meters per second + rmw = 46.29 * exp(-0.0153 * maxWindC + 0.0166 * lat) + + # convert to diameter and convert from km to nm + ed = rmw * 2.0 / 1.852 + return ed + + def stripText(self): + endStr = chr(13) + chr(13) + chr(10) + for i in range(len(self.text)): + self.text[i] = string.replace(self.text[i], endStr, "") + return + + def getFcstList(self): + return self.fcstList + + def getBaseProductTime(self): + return self.baseProductTime + + def getAltInfoFilename(self): + return self.altFilename + + def currentLine(self): + return self.text[self.pos] + + def nextLine(self): + self.pos += 1 + if self.pos < len(self.text): + return self.text[self.pos] + else: + return "" + + def monthNum(self, monthStr): + monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", + "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] + + try: + return monthList.index(monthStr) + 1 + except ValueError: + return 0 + + def convertBaseTime(self, timeStr): + # timeStr format: "HHMMZ DAY MON DD YYYY" + + # extract time parts from the str + strList = string.split(timeStr) + if len(strList) != 5: + print("Invalid time string:", timeStr) + print("Format should be of the form HHMMZ DAY MON DD YYYY") + return + + hour = int(timeStr[0:2]) + minute = int(timeStr[2:4]) + monthStr = strList[2] + month = self.monthNum(monthStr) + day = int(strList[3]) + year = int(strList[4]) + + # time.mktime returns time in seconds but in local time + baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) + + # Adjust to UTC + diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) + + # subtract timeZone and round to the nearest hour + roundedTime = int((baseTime - diffTime) / 3600) * 3600 + + return roundedTime + + def convert_ddhhmm(self, ddhhmmStr, baseTime): + + # remove the slash if present + ddhhmmStr = string.replace(ddhhmmStr, "/", "") + + if baseTime == 0: + baseTime = time.time() + + # extract the time parts + dayStr = ddhhmmStr[0:2] + hourStr = ddhhmmStr[2:4] + minStr = ddhhmmStr[4:6] + day = int(dayStr) + hour = int(hourStr) + minute = int(minStr) + tupleTime = time.gmtime(baseTime) + year = tupleTime[0] + month = tupleTime[1] + # see if we crossed over to a new month + if tupleTime[2] > day: + month += 1 + if month > 12: + month = 1 + year += 1 + + newTuple = (year, month, day, hour, minute, tupleTime[5], + tupleTime[6], tupleTime[7], tupleTime[8]) + + secondsTime = time.mktime(newTuple) + # Adjustment to UTC + diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) + return secondsTime - diffTime # subtract timeZone + + def decodeProductTime(self): + # Time of the product found on the next line + timeStr = self.nextLine() + + # sanity check for the time string + hhmm = timeStr[0:4] + for c in hhmm: + if not c in string.digits: + return + + baseTime = self.convertBaseTime(timeStr) + self.baseProductTime = baseTime + + return + + def decodeAltFilename(self): + nameStr = self.currentLine() + parts = string.split(nameStr) + + self.altFilename = parts[-1] # grab the last string token + return + + def decodeCenterLocation(self): + locStr = self.currentLine() + # check for the repeat center....don't want this one + if string.find(locStr, "REPEAT") >= 0: + return + + keyWord = "NEAR" + pos = string.find(locStr, keyWord) + if pos > -1: # found it + locStr = locStr[pos + len(keyWord):] + tokenList = string.split(locStr) + if len(tokenList) >= 2: + lat = self.decodeLatLonToken(tokenList[0]) + lon = self.decodeLatLonToken(tokenList[1]) + + if len(tokenList) > 3: # grab the time + validTime = self.convert_ddhhmm(tokenList[3], self.baseProductTime) + # New fcst (analysis actually) + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + else: + print("Invalid Center Location string:", locStr) + return + + def decodeMaxSustainedWinds(self): + keyWord = "MAX SUSTAINED WINDS" + windStr = self.currentLine() + pos = string.find(windStr, keyWord) + if pos > -1: # found it + windList = [] + tokenList = string.split(windStr) + for i in range(len(tokenList)): + if string.find(tokenList[i], "KT") >= 0: + windList.append(float(tokenList[i - 1])) + + # Sometimes there is no max wind/gust reported + if windList == []: + print("No Max Sustained Winds or Gusts found.") + return + + # store the max wind + self.currentFcst["maxWind"] = windList[0] + self.currentFcst["maxGust"] = windList[1] + + # if we have a center location and a max wind we can calc + # the eye diameter + if 'centerLocation' in self.currentFcst and \ + 'maxWind' in self.currentFcst: + # if it's zero it's not in the product and the user didn't + # change it, so calculate it based on the Willoughby formula + if 'eyeDiameter' in self.currentFcst and \ + self.currentFcst['eyeDiameter'] == 0: + self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( + self.currentFcst['centerLocation'], + self.currentFcst['maxWind']) + else: # otherwise use what's been defined or read from the text + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeMaxWind(self): + str = self.currentLine() + str = string.replace(str, '.', ' ') # remove ... + tokenList = string.split(str) + if len(tokenList) >= 6: + maxWind = float(tokenList[2]) + maxGust = float(tokenList[5]) + + # store in current fcst + self.currentFcst["maxWind"] = maxWind + self.currentFcst["maxGust"] = maxGust + + # if we have a center location and a max wind we can calc + # the eye diameter + if 'centerLocation' in self.currentFcst and \ + 'maxWind' in self.currentFcst: + # if it's zero it's not in the product and the user didn't + # change it, so calculate it based on the Willoughby formula + if 'eyeDiameter' in self.currentFcst and \ + self.currentFcst['eyeDiameter'] == 0: + self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( + self.currentFcst['centerLocation'], + self.currentFcst['maxWind']) + else: # otherwise use what's been defined or read from the text + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeRadii(self): + if self.currentFcst == {}: # can't continue + return + + str = self.currentLine() + str = string.replace(str, '.', ' ') # remove ... + tokenList = string.split(str) + # check for KT in the second slot + if len(tokenList) < 4 or tokenList[1] != "KT": + print("Invalid TCM wind string:", str) + return + radiiWindValue = float(tokenList[0]) + dirList = ["NE", "SE", "SW", "NW"] + radiusList = [] + for token in tokenList: + for d in dirList: + pos = string.find(token, d) + if pos >= 0: + radiusStr = token[:pos] + radius = float(radiusStr) + radiusList.append(radius) + + + if len(radiusList) == 0: + print("Error decoding radii in string:", str) + # store the radii info + if "radii" not in self.currentFcst: + self.currentFcst['radii'] = {} + + self.currentFcst['radii'][radiiWindValue] = radiusList + + return + + def decodeWindForecast(self): + # if we're decoding a new forecast, save the old one first + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + self.currentFcst = {} # reset + + str = self.currentLine() + str = string.replace(str, '...', ' ') # remove ... + + tokenList = string.split(str) + # decode the validTime + validTime = self.convert_ddhhmm(tokenList[2], self.baseProductTime) + if self.baseProductTime == 0: + self.baseProductTime = validTime + # decode the center location + if len(tokenList) >= 5: + lat = self.decodeLatLonToken(tokenList[3]) + lon = self.decodeLatLonToken(tokenList[4]) + # If we can't decode the lat or lon it's probably an outlook + # with no guidance so just return + if lat == None or lon == None: + print("Failed to decode latStr:", lat, "lonStr:", lon) + return + + # initialize a new forecast and begin filling values + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeEyeDiameter(self): + str = self.currentLine() + + tokenList = string.split(str) + diameter = int(tokenList[2]) + + self.currentFcst['eyeDiameter'] = diameter + + # Since we found it in the procuct, set the default diameter + self.defaultEyeDiameter = diameter + self.foundEyeDiameter = 1 # mark that we found it + return + + def decodeTCMProduct(self, TCMProduct, eyeDiameter): + self.text = TCMProduct + self.pos = 0 + self.fcstList = [] + self.defaultEyeDiameter = eyeDiameter + + self.stripText() + while self.pos < len(TCMProduct): + line = self.currentLine() + for k in list(self.keyWordDict.keys()): + if string.find(line, k) > -1: + self.keyWordDict[k]() + break + self.pos += 1 + + # store the last forecast in the list of forecasts + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + self.currentFcst = {} # reset + + return + + def decodeLatLonToken(self, latLonStr): + dirList = ['N', 'S', 'E', 'W'] + for d in dirList: + pos = string.find(latLonStr, d) + if pos >= 0: + try: + value = float(latLonStr[0:pos]) + if d == 'S' or d == 'W': + value = -value # flip the numeric sign + return value + except: + # it was not decodable (not numbers) + print("Failed to decode lat/lon token:", latLonStr) + return None + + # undecodable latLon for some reason + return None + + def decodeJTWCProductTime(self): + line = self.currentLine() + tokenList = string.split(line) + ddhhmmStr = tokenList[2] + self.baseProductTime = self.convert_ddhhmm(ddhhmmStr, 0) + + self.baseProductTime = int(self.baseProductTime / 3600) * 3600 + return None + + def decodeJTWCTimeCenter(self): + line = self.nextLine() + tokenList = string.split(line) + if len(tokenList) >= 5: + dateTimeStr = tokenList[0][0:6] + latStr = tokenList[3] + lonStr = tokenList[4] + else: + print("Error decoding JTWC Time/Center string:", line) + print("Format should be: DDHHMMZx --- NEAR Lat Lon") + return + + # could be None + lat = self.decodeLatLonToken(latStr) + lon = self.decodeLatLonToken(lonStr) + if lon > 0: + lon -= 360.0 + productTime = self.convert_ddhhmm(dateTimeStr, self.baseProductTime) + + # make a new fcst object to store the analysis + self.currentFcst = {} + self.currentFcst['validTime'] = productTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + return + + def decodeJTWCWindForecast(self): + line = self.nextLine() + + tokenList = string.split(line) + + # Grab everything just to the left of the first 'Z' + zPos = string.find(tokenList[0], 'Z') + if zPos >= 0: + timeStr = tokenList[0][0:zPos] + validTime = self.convert_ddhhmm(timeStr, self.baseProductTime) + else: + print("couldnt find Z in timeStr:", line) + return + + latStr = tokenList[2] + lonStr = tokenList[3] + lat = self.decodeLatLonToken(latStr) + lon = self.decodeLatLonToken(lonStr) + if lon > 0: + lon -= 360.0 + + # make a new currentFcst and store the info + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + return + + def decodeJTWCRadii(self): + line = self.currentLine() + radList = [] + windSpeed = 0 + while string.find(line, "---") == -1 and line != "": + tokenList = string.split(line) + if string.find(line, "RADIUS") >= 0: # it's the first line + # check to see if we need to store the radii first + if radList != []: # we decoded some already + self.currentFcst['radii'][windSpeed] = radList + radList = [] + + # extract the windSpeed for these radii + windSpeed = float(tokenList[2]) + if string.find(line, "QUADRANT") == -1: # no "QUADRANT" found + radius = float(tokenList[6]) + radList = [radius, radius, radius, radius] + else: # QUADRANT found + radius = float(tokenList[6]) + radList = [radius] + else: # no RADIUS found so maybe a QUADRANT line + if string.find(line, "QUADRANT") >= 0: + radius = float(tokenList[0]) + radList.append(radius) + + line = self.nextLine() + + # save the last radii info + if radList != []: + self.currentFcst['radii'][windSpeed] = radList + + # save the whole forecast in the list + self.fcstList.append(self.currentFcst) + self.currentFcst = {} + + return + + def endJTWCWindForecast(self): + + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + + self.currentFcst = {} + return + + def stopDecodingJTWC(self): + line = "ZZZZZ" + while line != "": + line = self.nextLine() + return + +# end class TCMDecoder + +# begin class CircleEA +# This class helps make circular edit areas and quadrants thereof. +class CircleEA(SmartScript.SmartScript): + def __init__(self, latGrid, lonGrid, center, slices): + pi = 3.1459 + RadsPerDeg = 2 * pi / 360 + cosLatGrid = cos(latGrid * RadsPerDeg) + self.xDist = (lonGrid - center[1]) * 111.1 * cosLatGrid + self.yDist = (latGrid - center[0]) * 111.1 + self.distGrid = sqrt(pow(self.xDist, 2)+ pow(self.yDist, 2)) + + self.tanGrid = arctan2(-self.xDist, -self.yDist) + # mask off all but the specified quadrant. + self.quadList = [] + for quad in range(1, slices + 1): + minValue = -pi + (quad - 1) * 2 * pi / slices + maxValue = -pi + quad * 2 * pi / slices + + quadrant = logical_and(greater_equal(self.tanGrid, minValue), + less(self.tanGrid, maxValue)) + self.quadList.append(quadrant) + + return + + # Return an edit area for just one quadrant. + # By convention quadrant numbering starts at 1 (due North) and + # progresses clockwise by one slice increment + def getQuadrant(self, quad, radius): + # trim the mask beyond the specified radius + radiusMask = less_equal(self.distGrid, radius) + + quadrant = logical_and(radiusMask, self.quadList[quad - 1]) + return quadrant + + def getDistanceGrid(self): + return self.distGrid + + def getXYDistGrids(self): + return self.xDist, self.yDist + +# end class CircleEA ------------------------------------------------------- + + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + self.fcstWindGrids = {} + + # Use this method if you want to get your product + # from a simple text file + def getTextProductFromFile(self, filename): + try: + f = file(filename, 'r') + except: + print(filename, "not found when getting product from file.") + return [] + textList = [] + line = f.readline() + textList.append(line) + while line != "": + line = f.readline() + textList.append(line) + f.close() + + return textList + + # Reads decodes depression information using the specified product. + def decodeDepressionInfo(self, textProduct): + + for line in textProduct: + parts = string.split(line, ",") + if len(parts) < 20: + continue + if parts[4] == " CARQ" and parts[5] == " 0": + outsideRad = int(parts[18]) + rmw = int(parts[19]) + return (rmw, outsideRad) + + return (0, 0) + + def injectDepressionInfo(self, fcstList, rmw, outsideRad): + if rmw == 0 and outsideRad == 0: + return fcstList + + eyeDiam = (rmw + 8.0) * 2.0 + for f in fcstList: + f['eyeDiameter'] = eyeDiam + # add the maxWind radius + maxWind = f['maxWind'] + maxWindRadii = [rmw, rmw, rmw, rmw] + f['radii'][maxWind] = maxWindRadii + + # Make an arbitrary radius at the outer most isobar + outSpeed = maxWind / 3.0 # somewhat arbitrary + f['radii'][outSpeed] = [outsideRad, outsideRad, outsideRad, outsideRad] + + return fcstList + + def getWEInventory(self, modelName, WEName, level): + yesterday = self._gmtime() - (2 * 24 * 3600) # two days ago + later = self._gmtime() + 10 * 24 * 3600 # 10 days from now + allTimes = TimeRange.TimeRange(yesterday, later) + parm = self.getParm(modelName, WEName, level); + inv = parm.getGridInventory(allTimes.toJavaObj()) + trList = [] + for gd in inv: + tr = TimeRange.TimeRange(gd.getGridTime()) + trList.append(tr) + return trList + + # returns a wind grid from the specified model most closely matched in + # time + def getClosestWindGrid(self, modelName, timeTarget): + t1 = AbsTime.AbsTime(0) + t2 = AbsTime.current() + 300 * 24 * 3600 # 300 days out + timeRange = TimeRange.TimeRange(t1, t2) + siteID = self.getSiteID() + if modelName == "Fcst": + level = "SFC" + elementName = "Wind" + else: + modelName = siteID + "_D2D_" + modelName + level = "FHAG10" + elementName = "wind" + + topo = self.getTopo() + calmGrid = self.makeWindGrid(0.0, 0.0, topo.shape) + gridInfo = [] + try: + gridInfo = self.getGridInfo(modelName, elementName, level, timeRange) + except Exceptions.EditActionError: + print("No grids found for model/level:", modelName, level) + if string.find(modelName, "GFS") >= 0: + modelName = siteID + "_D2D_" + "AVN" + level = "BL030" + try: + gridInfo = self.getGridInfo(modelName, elementName, level, timeRange) + except Exceptions.EditActionError: + print("No grids found for model", modelName, "level:", level) + print("Using calm grid.") + return calmGrid + + if len(gridInfo) == 0: + print("No grid info found for:", modelName, "at:", timeRange) + print("No grid info...Using calm grid.") + return calmGrid + + minDiff = 3600 * 24 * 365 # just a large number + gridIndex = -1 + tr = None + # figure out which grid is closest in time + for i in range(len(gridInfo)): + gridTime = gridInfo[i].gridTime() + gTime = gridTime.startTime().unixTime() + diff = abs(gTime - timeTarget) + if diff < minDiff: + tr = gridInfo[i].gridTime() + minDiff = diff + if diff == 0: + break + + if minDiff > 3 * 3600: + print("Returning calm grid as background.") + return calmGrid + + grid = calmGrid + # fetch the grid + if modelName == "Fcst": + if tr in self.fcstWindGrids: + grid = self.fcstWindGrids[tr] + else: + # hunt down any grid that overlaps the timeTarget + for gridTR in list(self.fcstWindGrids.keys()): + if gridTR.contains(AbsTime.AbsTime(timeTarget)): + grid = self.fcstWindGrids[gridTR] + else: + grid = self.getGrids(modelName, elementName, level, tr, mode="First") + grid = (grid[0] * 1.944, grid[1]) + + return grid + + # makes a direction grid where winds blow counter-clockwise about + # the specified center. + def makeDirectionGrid(self, latGrid, lonGrid, latCenter, lonCenter): + cycWt = 0.7 # cyclonic circulation weight + convWt = 0.3 # convergence weight + cycU = -(latGrid - latCenter) # pure counter-clockwise circulation + cycV = lonGrid - lonCenter + convU = -cycV # pure convergence + convV = cycU + u = cycU * cycWt + convU * convWt + v = cycV * cycWt + convV * convWt + mag, dir = self.UVToMagDir(u, v) + + return dir + + # interpolates radii information based on the specified info. + # returns a new radii + def interpRadii(self, t1, t2, newTime, f1Radii, f2Radii): + # set the list of radii based on the first set: f1Radii + radiiList = f1Radii + + newRadii = {} + for r in radiiList: + quadList = [] + for i in range(4): # always and only 4 quadrants at this point + r1 = f1Radii[r][i] + if r in f2Radii: + r2 = f2Radii[r][i] + else: + msg = "Wind forecast missing wind value: " + str(r) + " knots. " + msg += "Recommend defining wind radii for " + str(r) + " knots." + ##self.statusBarMsg(msg, "S") + r2 = r1 # just use the f1 value so we can keep going + radius = r1 + (r2 - r1) * (newTime - t1) / (t2 - t1) + quadList.append(radius) + newRadii[r] = quadList + + return newRadii + + # interpolates the wind forecasts inbetween the two specified forecasts. + # interval is assumed to be specified in hours. + # returns a new list of forecasts with f1 at the front of the list + # and f2 not present at all in the list. + def interpolateWindFcst(self, f1, f2, interval): + intSecs = 3600 * interval + t1 = f1['validTime'] + t2 = f2['validTime'] + # Just return the first fcst if the interval is too big + if t2 - t1 <= intSecs: + return [f1] + + f1Lat = f1['centerLocation'][0] + f1Lon = f1['centerLocation'][1] + f2Lat = f2['centerLocation'][0] + f2Lon = f2['centerLocation'][1] + f1Eye = f1['eyeDiameter'] + f2Eye = f2['eyeDiameter'] + tDiff = f2['validTime'] - f1['validTime'] + f1MaxWind = f1['maxWind'] + f2MaxWind = f2['maxWind'] + timeSlots = int(tDiff / intSecs) + dLat = (f2Lat - f1Lat) / timeSlots + dLon = (f2Lon - f1Lon) / timeSlots + dEye = (f2Eye - f1Eye) / timeSlots + dMaxWind = (f2MaxWind - f1MaxWind) / timeSlots + f1Radii = f1['radii'] + f2Radii = f2['radii'] + fcstList = [f1] # include the first fcst in the list + for i in range(1, timeSlots): + newTime = t1 + (i * intSecs) + newLat = f1Lat + (i * dLat) + newLon = f1Lon + (i * dLon) + newEye = f1Eye + (i * dEye) + newMaxWind = f1MaxWind + (i * dMaxWind) + newRadii = self.interpRadii(t1, t2, newTime, f1Radii, f2Radii) + f = {} + f['centerLocation'] = (newLat, newLon) + f['eyeDiameter'] = newEye + f['validTime'] = newTime + f['maxWind'] = newMaxWind + f['radii'] = newRadii + fcstList.append(f) + + return fcstList + + # Smooths the specified grid by the specified factor + # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. + # Even factors (4, 6, 8,...) round up to the next odd value + # If factors <3 are specified, the unmodified grid is returned. + def smoothGrid(self, grid, factor): + # factors of less than 3 are useless or dangerous + if factor < 3: + return grid + st = time.time() + half = int(factor)/ 2 + sg = zeros(grid.shape,float64) + count = zeros(grid.shape,float64) + gridOfOnes = ones(grid.shape,float64) + for y in range(-half, half + 1): + for x in range(-half, half + 1): + if y < 0: + yTargetSlice = slice(-y, None, None) + ySrcSlice = slice(0, y, None) + if y == 0: + yTargetSlice = slice(0, None, None) + ySrcSlice = slice(0, None, None) + if y > 0: + yTargetSlice = slice(0, -y, None) + ySrcSlice = slice(y, None, None) + if x < 0: + xTargetSlice = slice(-x, None, None) + xSrcSlice = slice(0, x, None) + if x == 0: + xTargetSlice = slice(0, None, None) + xSrcSlice = slice(0, None, None) + if x > 0: + xTargetSlice = slice(0, -x, None) + xSrcSlice = slice(x, None, None) + + target = [yTargetSlice, xTargetSlice] + src = [ySrcSlice, xSrcSlice] + sg[target] += grid[src] + count[target] += gridOfOnes[src] + return sg / count + + def printFcst(self, f, baseTime): + print("==============================================================") + print("Time:", time.asctime(time.gmtime(f['validTime'])), end=' ') + print("LeadTime:", (f['validTime'] - baseTime) / 3600 + 3) + print("Center:", f['centerLocation']) + print("Eye:", f['eyeDiameter']) + if 'maxWind' in f: + print("Max Wind:", f['maxWind']) + radKeys = list(f['radii'].keys()) + sort(radKeys) + print("RADII:") + for r in radKeys: + print(r, "kts:", f['radii'][r]) + + + # Smooths the direction grid without regard to the magnitude + def smoothDirectionGrid(self, dir, factor): + mag = ones(dir.shape, float) # 1.0 everywhere + u, v = self.MagDirToUV(mag, dir) + u = self.smoothGrid(u, factor) + v = self.smoothGrid(v, factor) + mag, dir = self.UVToMagDir(u, v) + return dir + + def makeWindGrid(self, mag, dir, gridShape): + mag = ones(gridShape, float) * mag + dir = ones(gridShape, float) * dir + return mag, dir + + def decreaseWindOverLand(self, grid, fraction, Topo): + mask = greater(Topo, 0.0) + grid = where(mask, grid * fraction, grid) + return grid + + def getTimeConstraintDuration(self, element): + return self.getParm("Fcst", element, "SFC").getGridInfo()\ + .getTimeConstraints().getDuration() + + # Blends the specified grid together + def blendGrids(self, windGrid, bgGrid): + + # make a mask around the edge + windMag = windGrid[0] + backMag = bgGrid[0] + mag = windMag.copy() + + # make a weightingGrid + lower = average(backMag) + # calculate the average value over the area where blending will occur + + upper = lower + 10.0 + + ringMask = logical_and(less(mag, upper), greater(mag, lower)) + + avgGrid = where(ringMask, backMag, float32(0.0)) + + # a nearly calm grid means no blending required + if lower < 1.0: + return windGrid + + wtGrid = greater(mag, upper).astype(float32) + ringMask = logical_and(less(mag, upper), greater(mag, lower)) + wtGrid = where(ringMask, (mag - lower) / (upper - lower), wtGrid) + wtGrid[less(mag, lower)]= 0.0 + wtGrid = self.smoothGrid(wtGrid, 5) + + # calculate the new mag grid + mag *= wtGrid + mag += backMag * (1 - wtGrid) + + # calculate direction grid + onesGrid = ones_like(mag) + gridU, gridV = self.MagDirToUV(onesGrid, windGrid[1]) + bgU, bgV = self.MagDirToUV(onesGrid, bgGrid[1]) + gridU *= wtGrid + gridU += bgU * (1 - wtGrid) + gridV *= wtGrid + gridV += bgV * (1 - wtGrid) + + # get the dirGrid and toss out the magnitude + magGrid, dirGrid = self.UVToMagDir(gridU, gridV) + + return mag, dirGrid + + def getLatLonGrids(self): + # Try to get them from the fcst database to save time + startTime = AbsTime.current() - 86400 + endTime = AbsTime.current() + 86400 # 1 day + timeRange = TimeRange.TimeRange(startTime, endTime) + latGrid = self.getGrids("Fcst", "latGrid", "SFC", timeRange, + mode = "First", noDataError = 0) + lonGrid = self.getGrids("Fcst", "lonGrid", "SFC", timeRange, + mode = "First", noDataError = 0) + if latGrid != None and lonGrid != None: + return latGrid, lonGrid + + # make the latGrid and lonGrid + latGrid, lonGrid = SmartScript.SmartScript.getLatLonGrids(self) + + # Temporarliy save them in the forecast database + startTime = AbsTime.current() + endTime = startTime + 86400 * 7 # 7 days + timeRange = TimeRange.TimeRange(startTime, endTime) + self.createGrid("Fcst", "latGrid", "SCALAR", latGrid, timeRange, + descriptiveName=None, timeConstraints=None, + precision=1, minAllowedValue=0.0, + maxAllowedValue=90.0) + + self.createGrid("Fcst", "lonGrid", "SCALAR", lonGrid, timeRange, + descriptiveName=None, timeConstraints=None, + precision=1, minAllowedValue=-360.0, + maxAllowedValue=180.0) + + return latGrid, lonGrid + + # This method interpolates the specified radii in rDict to the + # number of slices specified in pieSlices. This adds more angular + # resolution to the wind forecast which typically comes with 4 slices. + def interpolateQuadrants(self, rDict, pieSlices): + # make sure we have something to do first + if pieSlices <= 4: + return rDict + + newDict = {} + for k in list(rDict.keys()): + rList = rDict[k] # fetch the list of radii + + interpFactor = pieSlices / len(rList) + newList = [] + for i in range(-1, len(rList) -1): + minVal = rList[i] + maxVal = rList[i + 1] + dVal = (maxVal - minVal) / interpFactor + for f in range(interpFactor): + radius = minVal + dVal * f + newList.append(radius) + + # Since we started with the NW quadrant we need to shift + # the list so that it starts at North to conform to convention + shift = int(pieSlices / 4) + shiftedList = newList[shift:] + shiftedList += newList[:shift] + newDict[k] = shiftedList + return newDict + + # Makes a Rankine Vortex wind speed grid that decreases exponentially + # from the known values at known radii. Inside the Radius of maximum + # wind the wind decreases linearly toward the center + def makeRankine(self, f, latGrid, lonGrid, pieSlices): + st = time.time() + rDict = f['radii'] + validTime = f['validTime'] + center = f['centerLocation'] + circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) + + rDict = self.interpolateQuadrants(rDict, pieSlices) + + # get the distance grid and make sure it's never zero anywhere + distanceGrid = circleEA.getDistanceGrid() / 1.852 # dist in NM + distanceGrid[equal(distanceGrid, 0)] = 0.01 + + # make a grid into which we will define the wind speeds + grid = self.empty() + + # insert the maxWind radii + if 'maxWind' in f: + maxWind = f['maxWind'] + if 'eyeDiameter' in f: + maxRad = f['eyeDiameter'] / 2.0 + 8.0 + else: + print("Error --- no eye diameter found.") + maxRad = 12.5 # half of default 25 nm eye diameter + + # add an entry that uses the max wind and radius + rDict[maxWind] = [maxRad] * pieSlices + + # make a list sorted by average radii value + wsList = list(rDict.keys()) + + if len(wsList) == 0: + print("No radii found. Returning calm grid.") + return (grid, grid) + + radList = [] + for ws in wsList: + rList = rDict[ws] + sum = 0 + for r in rList: + sum += r + average = sum / len(rList) + radList.append((average, ws)) + + radList.sort() + radList.reverse() + + wsList = [] + for rad, ws in radList: + wsList.append(ws) + + + maxRad, maxWindValue = radList[-1] + maxWindValue += 0.1 + rDict[maxWindValue] = [1.0] * pieSlices + wsList.append(maxWindValue) + + # for each rDict record and quadrant, make the grid one piece at a time + for i in range(len(wsList) - 1): + if wsList[i] not in rDict: + continue + radiusList = rDict[wsList[i]] + nextRadiusList = rDict[wsList[i + 1]] + for quad in range(len(radiusList)): + outSpeed = float(wsList[i]) + inSpeed = float(wsList[i + 1]) + outRadius = float(radiusList[quad]) + inRadius = float(nextRadiusList[quad]) + + # get the edit area for this quadrant + mask = circleEA.getQuadrant(quad + 1, outRadius * 1.852) + + # log10 and exp math functions are fussy about zero + if inSpeed == 0.0: + inSpeed = 0.1 + if outSpeed == 0.0: + outSpeed = 0.1 + if inRadius == 0.0: + inRadius = 0.1 + if outRadius == 0.0: + outRadius = 0.1 + + # no wind speed can never exceed the maximum allowable wind speed + if inSpeed > maxWind: + inSpeed = maxWind + if outSpeed > maxWind: + outSpeed = maxWind + + # don't bother with trivial cases + if inRadius < 2.0 and outRadius < 2.0: + continue + if inRadius > outRadius: + continue + + # calculate the exponent so that we exactly fit the next radius + denom = log10(inRadius / outRadius) + if denom == 0: + exponent = 1.0 + else: + exponent = (log10(outSpeed) - log10(inSpeed)) / denom + + # make sure the exponent behaves itself + if exponent > 10.0: + exponent = 10.0 + # inside RMW gets a linear slope to largest of max wind forecasts + if inRadius <= 1.0: + dSdR = (outSpeed - inSpeed) / (outRadius - inRadius) + grid = where(mask, inSpeed + (dSdR * distanceGrid), grid) + else: # outside RMW + grid = where(mask, inSpeed * power((inRadius / distanceGrid), exponent), + grid) + grid.clip(0.0, 200.0, grid) + + dirGrid = self.makeDirectionGrid(latGrid, lonGrid, center[0], center[1]) + + # clip values between zero and maxWind + grid.clip(0.0, maxWind, grid) + # apply the wind reduction over land + fraction = 1.0 - (self.lessOverLand / 100.0) + grid = self.decreaseWindOverLand(grid, fraction, self.elevation) + + return (grid, dirGrid) + + def execute(self, varDict, timeRange): + self.setToolType("numeric") + self.toolTimeRange = timeRange + + # define the default eye diameter for bulletins where they are missing + self.dialogEyeDiameter = 0.0 + + Topo = self.getTopo() + +## interval = int(varDict["Time Interval\n(hours):"]) + + tcDuration = self.getTimeConstraintDuration("Wind") + tcHours = int(tcDuration / 3600) # durations are expressed in seconds + # set the time interpolation interval to the duration + interval = tcHours + # get the product ID +## productList1 = varDict["Product to\ndecode:"] +## productList2 = varDict["Product to\n decode:"] +## productList1 = productList1 + productList2 # concatenate +## if len(productList1) != 1: +## self.statusBarMsg("Please select one TCM bulletin only.", "S") +## return None + +## productID = productList1[0] + + # special code for GUM since they do things just a little differently + siteID = self.getSiteID() + if siteID == "GUM": + productID = "GTW" + productID + + bgModelName = varDict["Background\nModel:"] + # If we're using the Fcst, grab all of the grids now + if bgModelName == "Fcst": + inv = self.getWEInventory("Fcst", "Wind", "SFC") + for tr in inv: + self.fcstWindGrids[tr] = self.getGrids("Fcst", "Wind", "SFC", + tr, mode="First") + + pieSlices = int(varDict["Number of\n Pie Slices:"]) + + self.lessOverLand = int(varDict["Decrease Wind over Land by (%):"]) + self.elevation = Topo + +## # Use this method to fetch a text product from a file +## fileName = "Your path and file name goes here" +## textProduct = self.getTextProductFromFile(fileName) +## productID = string.split(fileName, "/")[-1] + + # Use this method to fetch a product from the text database + productID = varDict["ProductID:"] + textProduct = self.getTextProductFromDB(productID) + if len(textProduct) < 5: + print(productID, "could not be retrieved from text database.") + return None + + decoder = TCMDecoder() + decoder.decodeTCMProduct(textProduct, self.dialogEyeDiameter) + fcstList = decoder.getFcstList() + print("Decoded:", len(fcstList), " forecasts.") + + # Attempt to get the alternate info from a file or the textDB + altFileName = decoder.getAltInfoFilename() + + altFileName = "/home/eagle6/lefebvre/TPC/" + altFileName + + # get additional info if available + altProduct = self.getTextProductFromDB(altFileName) + +## ## use this version to fetch from a file +## altProduct = self.getTextProductFromFile(altFileName) + + rmw, outsideRad = (0, 0) # initialize + if len(altProduct) < 5: + print(altProduct, "alternate info file could not be retrieved from text database.") + else: + rmw, outsideRad = self.decodeDepressionInfo(altProduct) + + # Set the baseDecodedTime - validTime of first entry - 3 hours + if len(fcstList) > 0: + self.baseDecodedTime = fcstList[0]['validTime'] - 3 * 3600 + +## # See if the decoded fcst is close to the current time. This is needed +## # so the tool will work on archived data sets + selectionTROnly = 0 +## if abs(time.time() - self.baseDecodedTime) > 2 * 24 * 3600: # older than 2 days +## testMode = 1 + # restrict grids to the selected time period if option is selected. + restrictAnswer = varDict["Make Grids over\nSelected Time Only:"] + if restrictAnswer == "Yes": + selectionTROnly = 1 + + # push this info in the fcsts + fcstList = self.injectDepressionInfo(fcstList, rmw, outsideRad) + + # interpolate the wind forecasts we got from the decoder + print("Interpolating wind forecasts:") + selectedStartTime = self.toolTimeRange.startTime().unixTime() + selectedEndTime = self.toolTimeRange.endTime().unixTime() + interpFcstList = [] + for i in range(len(fcstList) - 1): + + newFcstList = self.interpolateWindFcst(fcstList[i], fcstList[i+1], + interval) + # if we've processed the last time segment, append the last forecast + if i == len(fcstList) - 2: + newFcstList.append(fcstList[-1]) + + # Make sure the fcst is within the selected time range + for f in newFcstList: + if (selectionTROnly and (f['validTime'] >= selectedStartTime and \ + f['validTime'] < selectedEndTime)) or not selectionTROnly: + interpFcstList.append(f) + + if len(fcstList) == 1: + interpFcstList = fcstList + + if len(interpFcstList) == 0: + self.statusBarMsg("No cyclone forecasts found within the Selected TimeRange", + "S") + else: + print("Generating", len(interpFcstList), "wind grids") + + # get the lat, lon grids + latGrid, lonGrid = self.getLatLonGrids() + + # make a grid for each interpolate forecast + gridCount = 0 + for f in interpFcstList: + windGrid = self.makeRankine(f, latGrid, lonGrid, pieSlices) + + validTime = int(f['validTime'] / 3600) * 3600 + bgGrid = self.getClosestWindGrid(bgModelName, validTime) + + if bgGrid is None: + print("Using calm background grid.") + bgGrid = self.makeWindGrid(0.0, 0.0, latGrid.shape) + + grid = self.blendGrids(windGrid, bgGrid) + + start = AbsTime.AbsTime(int(validTime)) + timeRange = TimeRange.TimeRange(start, start + interval * 3600) + + name = "Wind" + self.createGrid("Fcst", name, "VECTOR", grid, timeRange, + precision=1, minAllowedValue=0.0, + maxAllowedValue=200.0) + + gridCount += 1 + print("GenerateCyclone tool:", productID, "- Generated",gridCount, \ + "out of", len(interpFcstList), "grids") + + return None diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/HazardRecovery.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/HazardRecovery.py index 2bdbfb67e0..f6de86d131 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/HazardRecovery.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/HazardRecovery.py @@ -1,221 +1,221 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# HazardRecovery -# -# This procedure synchonizes the hazards grids with the contents of the -# VTEC active table. -# -# -# Author: lefebvre/mathewson -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 08/09/13 1856 dgilling Fix script so it runs correctly -# in SimulatedTime. -# 10/12/16 5936 dgilling Add additional phensigs to skipPhenSig -# in getActiveTable. -######################################################################## - -## -# This is a base file that is not intended to be overridden. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Hazards"] - -from numpy import * -import SmartScript -import time -import HazardUtils -import ProcessVariableList -import LogStream - - -MYSITEONLY = 0 #set to 1 to only have your sites be considered - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - def getActiveTable(self, site4ID=None, allZones=None): - #gets the active table, filtered by site4ID, and list of zones - activeTable = {} - vtecTable = self.vtecActiveTable() - vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) - - actionList = ["NEW", "EXA", "EXB", "EXT", "CON"] - tropicalPhens = ["HU", "TY", "TR"] - SPCPhens = ["TO", "SV"] - skipPhenSig = {('FA','W'), ('FA', 'Y'), ('FF','W'), - ('FL','W'), ('FL', 'A'), ('FL','Y'), - ('MA','W'), ('SV','W'), ('TO','W'), ('EW','W')} - - currentTime = self._gmtime().unixTime() - - for v in vtecTable: - # filter based on zones - if allZones is not None and v['id'] not in allZones: - continue - - # filter based on actionCode - action = v['act'] - if action not in actionList: - continue - - # filter out phen/sigs to skip (short-fused) - phen = v['phen'] - sig = v['sig'] - if (phen,sig) in skipPhenSig: - continue - - #set up checks - if MYSITEONLY: - spcsiteCheck = (v['officeid'] == site4ID) - nonNatlCheck = (v['officeid'] == site4ID) - else: - spcsiteCheck = (v['officeid'] != 'KWNS') - nonNatlCheck = (v['officeid'] not in ['KNHC','KWNS']) - - #filter - hazKey = None - if v['phen'] in SPCPhens and spcsiteCheck and \ - v['sig'] == "A": - if v['pil'] in ['WCN']: - hazKey = v['phen'] + "." + v['sig'] + ":" + str(v['etn']) - elif nonNatlCheck: - hazKey = v['phen'] + "." + v['sig'] - elif v['phen'] in tropicalPhens and v['officeid'] == "KNHC": - hazKey = v['phen'] + "." + v['sig'] - - if hazKey is None: - continue - - startTm = v['startTime'] - # turncate the startTime to the top of the hour - startTm = int(startTm / 3600) * 3600 - - # filter out past hazards - if v['endTime'] < currentTime or \ - (v['officeid'] == "KNHC" and \ - (v['issueTime'] + 36*3600 < currentTime)): - continue - - # end times can be insanely large. Arbitrarily trim them to 36hr - # for UntilFurtherNotice - endTm = v['endTime'] - if v.get('ufn', 0): - endTm = currentTime + (36 * 3600) - - # now adjust starting times if event has already started to - # be the current hour - if startTm < currentTime: - startTm = currentTime - - dictKey = (hazKey, startTm, endTm, v['act']) - - if not dictKey in activeTable: - activeTable[dictKey] = [] - - activeTable[dictKey].append(v['id']) - - return activeTable - - - def execute(self, editArea, timeRange, varDict): - # get the hazard utilities - self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) - - self.setToolType("numeric") - - # any temporary grids? - if self._hazUtils._tempWELoaded(): - self.statusBarMsg("Unload temporary hazard weather elements before running HazardRecovery.", - "S") - return - - # get the active table - site4ID = self.getSite4ID(self.getSiteID()) - allZones = self.editAreaList() - activeTable = self.getActiveTable(site4ID, allZones) - - # define this to define the dialog - variableList = [ - ("Your entire Hazards inventory will be replaced with the " + \ - "contents of the active table.","", "label"), - ] - - # call this to pop the dialog - title = "Hazard Recovery" - processVarList = ProcessVariableList.ProcessVariableList(title, \ - variableList, varDict, parent = None) - status = processVarList.status() - if status.lower() != "ok": - print "status:", status - LogStream.logDebug("HazardRecovery: cancel") - return - LogStream.logDebug("HazardRecovery: OK") - - # see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.statusBarMsg("Hazards Weather Element must be loaded in the GFE" + \ - " before running HazardRecovery", "S") - return - - # remove all of the current hazard grids - self._hazUtils._removeAllHazardsGrids() - self._hazUtils._unlockHazards() - - # any hazards at all? - if len(activeTable) == 0: - self.statusBarMsg("There are no hazards in the active table to recover. Hazard grids have been cleared.", - "S") - # return - - - keys = activeTable.keys() - allzones = self.editAreaList() - for key, start, end, action in keys: - timeRange = self._hazUtils._makeTimeRange(start, end) - zoneList = activeTable[(key, start, end, action)] - filteredZoneList = [] - for z in zoneList: - if z in allzones: - filteredZoneList.append(z) - - mask = self._hazUtils._makeMask(filteredZoneList) - self._hazUtils._addHazard("Hazards", timeRange, key, mask) - - LogStream.logEvent(self._hazUtils._printTime(start), - self._hazUtils._printTime(end), key, zoneList) - - - - return +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# HazardRecovery +# +# This procedure synchonizes the hazards grids with the contents of the +# VTEC active table. +# +# +# Author: lefebvre/mathewson +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 08/09/13 1856 dgilling Fix script so it runs correctly +# in SimulatedTime. +# 10/12/16 5936 dgilling Add additional phensigs to skipPhenSig +# in getActiveTable. +######################################################################## + +## +# This is a base file that is not intended to be overridden. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Hazards"] + +from numpy import * +import SmartScript +import time +import HazardUtils +import ProcessVariableList +import LogStream + + +MYSITEONLY = 0 #set to 1 to only have your sites be considered + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + def getActiveTable(self, site4ID=None, allZones=None): + #gets the active table, filtered by site4ID, and list of zones + activeTable = {} + vtecTable = self.vtecActiveTable() + vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) + + actionList = ["NEW", "EXA", "EXB", "EXT", "CON"] + tropicalPhens = ["HU", "TY", "TR"] + SPCPhens = ["TO", "SV"] + skipPhenSig = {('FA','W'), ('FA', 'Y'), ('FF','W'), + ('FL','W'), ('FL', 'A'), ('FL','Y'), + ('MA','W'), ('SV','W'), ('TO','W'), ('EW','W')} + + currentTime = self._gmtime().unixTime() + + for v in vtecTable: + # filter based on zones + if allZones is not None and v['id'] not in allZones: + continue + + # filter based on actionCode + action = v['act'] + if action not in actionList: + continue + + # filter out phen/sigs to skip (short-fused) + phen = v['phen'] + sig = v['sig'] + if (phen,sig) in skipPhenSig: + continue + + #set up checks + if MYSITEONLY: + spcsiteCheck = (v['officeid'] == site4ID) + nonNatlCheck = (v['officeid'] == site4ID) + else: + spcsiteCheck = (v['officeid'] != 'KWNS') + nonNatlCheck = (v['officeid'] not in ['KNHC','KWNS']) + + #filter + hazKey = None + if v['phen'] in SPCPhens and spcsiteCheck and \ + v['sig'] == "A": + if v['pil'] in ['WCN']: + hazKey = v['phen'] + "." + v['sig'] + ":" + str(v['etn']) + elif nonNatlCheck: + hazKey = v['phen'] + "." + v['sig'] + elif v['phen'] in tropicalPhens and v['officeid'] == "KNHC": + hazKey = v['phen'] + "." + v['sig'] + + if hazKey is None: + continue + + startTm = v['startTime'] + # turncate the startTime to the top of the hour + startTm = int(startTm / 3600) * 3600 + + # filter out past hazards + if v['endTime'] < currentTime or \ + (v['officeid'] == "KNHC" and \ + (v['issueTime'] + 36*3600 < currentTime)): + continue + + # end times can be insanely large. Arbitrarily trim them to 36hr + # for UntilFurtherNotice + endTm = v['endTime'] + if v.get('ufn', 0): + endTm = currentTime + (36 * 3600) + + # now adjust starting times if event has already started to + # be the current hour + if startTm < currentTime: + startTm = currentTime + + dictKey = (hazKey, startTm, endTm, v['act']) + + if not dictKey in activeTable: + activeTable[dictKey] = [] + + activeTable[dictKey].append(v['id']) + + return activeTable + + + def execute(self, editArea, timeRange, varDict): + # get the hazard utilities + self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) + + self.setToolType("numeric") + + # any temporary grids? + if self._hazUtils._tempWELoaded(): + self.statusBarMsg("Unload temporary hazard weather elements before running HazardRecovery.", + "S") + return + + # get the active table + site4ID = self.getSite4ID(self.getSiteID()) + allZones = self.editAreaList() + activeTable = self.getActiveTable(site4ID, allZones) + + # define this to define the dialog + variableList = [ + ("Your entire Hazards inventory will be replaced with the " + \ + "contents of the active table.","", "label"), + ] + + # call this to pop the dialog + title = "Hazard Recovery" + processVarList = ProcessVariableList.ProcessVariableList(title, \ + variableList, varDict, parent = None) + status = processVarList.status() + if status.lower() != "ok": + print("status:", status) + LogStream.logDebug("HazardRecovery: cancel") + return + LogStream.logDebug("HazardRecovery: OK") + + # see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.statusBarMsg("Hazards Weather Element must be loaded in the GFE" + \ + " before running HazardRecovery", "S") + return + + # remove all of the current hazard grids + self._hazUtils._removeAllHazardsGrids() + self._hazUtils._unlockHazards() + + # any hazards at all? + if len(activeTable) == 0: + self.statusBarMsg("There are no hazards in the active table to recover. Hazard grids have been cleared.", + "S") + # return + + + keys = list(activeTable.keys()) + allzones = self.editAreaList() + for key, start, end, action in keys: + timeRange = self._hazUtils._makeTimeRange(start, end) + zoneList = activeTable[(key, start, end, action)] + filteredZoneList = [] + for z in zoneList: + if z in allzones: + filteredZoneList.append(z) + + mask = self._hazUtils._makeMask(filteredZoneList) + self._hazUtils._addHazard("Hazards", timeRange, key, mask) + + LogStream.logEvent(self._hazUtils._printTime(start), + self._hazUtils._printTime(end), key, zoneList) + + + + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ISC_Discrepancies.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ISC_Discrepancies.py index 3aedbd225d..7a9482e757 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ISC_Discrepancies.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ISC_Discrepancies.py @@ -1,101 +1,101 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ISC_Discrepancies -# -# This procedure runs the Show_ISC_Highlights tool on a set of weather elements. -# The set-up dialog allows the user to choose: -# -# A set of weather elements for which to run the procedure -# A time range -- time range over which to run the procedure -# -# It uses the ISC_Utility and ISC_Utility_Local for setting up -# the algorithm, edit area, and thresholds. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -MenuItems = ["Consistency"] - -import ISC_Utility_Local -import SmartScript -import time -import ProcessVariableList - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss = dbss - SmartScript.SmartScript.__init__(self, self._dbss) - - def execute(self, editArea, timeRange, varDict): - # Calls the Show_ISC_Highlights for each element - - #self._utility = ISC_Utility_Local.ISC_Utility_Local( - # self._dbss, self.eaMgr()) - self._utility = ISC_Utility_Local.ISC_Utility_Local( - self._dbss, None) - - # Put up VariableList dialog - variableList = [] - # Determine Elements - elementList = self._utility._getElementList() - variableList.append(("Elements" , elementList, "check", elementList)) - # Determine Time Ranges - trList = self._utility._getTimeRangeList() - variableList.append(("Time Range", "All Grids", "radio", trList)) - varDict = {} - processVarList = ProcessVariableList.ProcessVariableList( - "ISC_Discrepancies", variableList, varDict, - None) - status = processVarList.status() - if status != "OK": - return - - # Determine Elements and Time Range - elements = processVarList.varDict()["Elements"] - trName = processVarList.varDict()["Time Range"] - timeRange = self._utility._convertTimeRange(trName) - area = None - - # Run Smart Tool for each Element over Time Range - time1 = time.time() - for elementName in elements: - if elementName == "Wx": - print "Not yet able to identify discrepancies for Wx" - continue - print "Running Discrepancies for ", elementName - time2 = time.time() - if len(self.getParm("Fcst", elementName, "SFC").getGridInventory(timeRange.toJavaObj())) > 0: - error = self.callSmartTool("Show_ISC_Highlights", elementName, - area, timeRange, varDict, missingDataMode="Skip") - print " Time :", time.time() - time2 - if error is not None: - break - - print "Total Time ", time.time() - time1 - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ISC_Discrepancies +# +# This procedure runs the Show_ISC_Highlights tool on a set of weather elements. +# The set-up dialog allows the user to choose: +# +# A set of weather elements for which to run the procedure +# A time range -- time range over which to run the procedure +# +# It uses the ISC_Utility and ISC_Utility_Local for setting up +# the algorithm, edit area, and thresholds. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +MenuItems = ["Consistency"] + +import ISC_Utility_Local +import SmartScript +import time +import ProcessVariableList + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss = dbss + SmartScript.SmartScript.__init__(self, self._dbss) + + def execute(self, editArea, timeRange, varDict): + # Calls the Show_ISC_Highlights for each element + + #self._utility = ISC_Utility_Local.ISC_Utility_Local( + # self._dbss, self.eaMgr()) + self._utility = ISC_Utility_Local.ISC_Utility_Local( + self._dbss, None) + + # Put up VariableList dialog + variableList = [] + # Determine Elements + elementList = self._utility._getElementList() + variableList.append(("Elements" , elementList, "check", elementList)) + # Determine Time Ranges + trList = self._utility._getTimeRangeList() + variableList.append(("Time Range", "All Grids", "radio", trList)) + varDict = {} + processVarList = ProcessVariableList.ProcessVariableList( + "ISC_Discrepancies", variableList, varDict, + None) + status = processVarList.status() + if status != "OK": + return + + # Determine Elements and Time Range + elements = processVarList.varDict()["Elements"] + trName = processVarList.varDict()["Time Range"] + timeRange = self._utility._convertTimeRange(trName) + area = None + + # Run Smart Tool for each Element over Time Range + time1 = time.time() + for elementName in elements: + if elementName == "Wx": + print("Not yet able to identify discrepancies for Wx") + continue + print("Running Discrepancies for ", elementName) + time2 = time.time() + if len(self.getParm("Fcst", elementName, "SFC").getGridInventory(timeRange.toJavaObj())) > 0: + error = self.callSmartTool("Show_ISC_Highlights", elementName, + area, timeRange, varDict, missingDataMode="Skip") + print(" Time :", time.time() - time2) + if error is not None: + break + + print("Total Time ", time.time() - time1) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Interpolate_Procedure.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Interpolate_Procedure.py index 89d2f46dc8..cceae0f3f5 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Interpolate_Procedure.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Interpolate_Procedure.py @@ -1,545 +1,545 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Interpolation_4D -# -# Author: -# ---------------------------------------------------------------------------- -MenuItems = ["Edit"] - -VariableList = [ -# ("", "Gaps", "radio", ["Gaps", "Based on Edited Data"]), - ("Algorithm", "Cubic Spline", "radio", ["Cubic Spline", "Tweening"]), - ("", "Gaps", "radio", ["Gaps"]), - ("Grid Type", "Scalar", "radio", ["Scalar", "Discrete"]), - ("Interpolation Interval in Hours", 1, "scale", [1,24], 1), - ("Duration of Grids in Hours", 1, "scale", [1,24], 1), - ("Anti-aliasing Supersampling Level", 4, "scale", [1,12], 1), - ("Anti-aliasing Downsample Mode", "Region-Weighted Averaging", "radio", - ["Region-Weighted Averaging", "Flat Averaging", "Maximizing"]), - ("Region Weighting", 5, "scale", [1,10], 1), - ("Verbose", "no", "radio", ["yes", "no"]), - ] - -from numpy import * -import numpy -import SmartScript -import TimeRange -import types, copy -import random - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss = dbss; - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, timeRange, varDict): - # Interpolates active element over given time range - - # This is necessary so that the "getGrids" command will - # return numeric grids from a Procedure since the default - # is point-based. - # This should be fixed in future versions!! - self.setToolType("numeric") - - # Get Grids and Times based on timeRange and active element - # grids : List of Existing Grids to be used in interpolating - # times : List of times (seconds past the start time) corresponding to grids - element = self.getActiveElement() - elementName = element.getParmID().getParmName() - grids = self.getGrids("Fcst", elementName, "SFC", timeRange, - mode="List") - gridInfos = self.getGridInfo("Fcst", elementName, "SFC", timeRange) - - if varDict["Verbose"] == "yes": - self._verbose = 1 - else: - self._verbose = 0 - # gridShape : dimension of the grids - if varDict["Grid Type"] == "Scalar": - self._gridShape = shape(grids[0]) - else: - self._gridShape = 1 - times = [zeros(self._gridShape)] - - firstTime = 1 - for gridInfo in gridInfos: - tr = gridInfo.gridTime() - if firstTime: - beginTime = tr.startTime() - firstTime = 0 - else: - times.append(zeros(self._gridShape) + (tr.startTime() - beginTime)) - lastStart = tr.startTime() - - # Determine interpTimes - mode = varDict[""] - interval = varDict["Interpolation Interval in Hours"] - duration = varDict["Duration of Grids in Hours"] - totalDur = timeRange.endTime() - timeRange.startTime() - interpTimes = [] - prevStart = beginTime - if self._verbose: - print "Figuring interpTimes", interval - while 1: - newStart = prevStart + interval * 3600 - prevStart = newStart - if newStart >= lastStart: - break - interpTime = newStart - beginTime - for time in times: - if interpTime == time.flat[0]: - continue - interpTimes.append(interpTime) - if self._verbose: - print "interpTimes" - for t in interpTimes: - print " ", t - - algorithm = varDict["Algorithm"] - if algorithm == "Cubic Spline": - self._cubicSpline(beginTime, duration, elementName, grids, times, interpTimes) - elif algorithm == "Tweening": - sampleFactor = varDict["Anti-aliasing Supersampling Level"] - downsampleMethod = varDict["Anti-aliasing Downsample Mode"] - regionWeighting = varDict["Region Weighting"] - if varDict["Grid Type"] == "Scalar": - self._scalarTween(beginTime, duration, elementName, grids, times, interpTimes, - sampleFactor, downsampleMethod, regionWeighting) - else: - self._discreteTween(beginTime, duration, elementName, grids, times, interpTimes, - sampleFactor) - - # Perform interpolation on scalar grids. - # - # Basic options: - # beginTime -- start of the time range to be interpolated - # duration -- length of the time range to - # elementName -- name of the element to be interpolated - # grids -- grids to be interpolated - # times -- time grids for provided data grids - # interpTimes -- times for each interpolated frame - # - # Anti-aliasing options: - # sampleFactor -- up-sample grids by this factor in each direction - # TODO: Introduce quincunx anti-aliasing, possibly? - # (This might be patented.) - # downsampleMethod -- what method to use when bringing the grids - # back to native resolution. - # Options are: - # "Flat Averaging" - Average all the points in a - # point's supersampled area to derive the - # downsampled point. - # "Region-Weighted Averaging" - Like "Flat Averaging," - # but weight interpolated points - # higher than background points. - # "Maximizing" - Select the highest value from a - # point's supersampled area to generate - # the downsampled point. - # regionWeighting -- Parameter for use with "Region-Weighted Averaging" - # downsample method only. By what degree ought in-region - # points be preferred over out-of-region points? - def _scalarTween(self, beginTime, duration, - elementName, grids, times, interpTimes, - sampleFactor, downsampleMethod, regionWeighting): - if self._verbose: - print "Interpolating scalar grids..." - - # Find all points to be interpolated in the source grid. In this case, - # we say all points with a value greater than 0 are to be interpolated. - APoints = [] - for x in range(len(grids[0])): - for y in range(len(grids[0][0])): - if(grids[0][x][y] > 0): - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - APoints.append( (tx, ty, grids[0][x][y]) ) - - # Likewise, find all points to be interpolated in the destination grid. - BPoints = [] - for x in range(len(grids[1])): - for y in range(len(grids[1][0])): - if(grids[1][x][y] > 0): - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - BPoints.append( (tx, ty, grids[1][x][y]) ) - - if self._verbose: - print "Points in A: " + str(len(APoints)) - print "Points in B: " + str(len(BPoints)) - - # We always want to interpolate from more points to less points, to get - # maximum "coverage". Interpolate in "reverse" if need be, and set a - # flag so we remember to flip the results around in the end. - toggle = 0 - if len(APoints) < len(BPoints): - toggle = 1 - x = APoints - APoints = BPoints - BPoints = x - - # Generate a random mapping of points in the source grid to points in - # the destination grid. This works remarkably well, especially for - # high supersample values. Interestingly, it looks significantly - # better than many more intelligent-seeming methods (interpolate to - # nearest point, interpolate by overlaid region). - mapping = [] - for i in range(len(APoints)): - mapping.append(random.randrange(0, len(BPoints))) - - # Supersample and interpolate the grids, blowing them up to many times - # their original resolution. This is incredibly memory-intensive, but - # when we produce the downsampled output, it is much smoother. - sampleGrids = [] - for t in range(len(interpTimes)): - grid = [] - tfa = 1.0 - float((t + 1.0) / (len(interpTimes) + 1.0)) - tfb = float(t + 1.0) / float(len(interpTimes) + 1.0) - if self._verbose: - print "tfa=" + str(tfa) - print "tfb=" + str(tfb) - for x in range(len(grids[0]) * sampleFactor): - row = [] - for y in range(len(grids[0][0]) * sampleFactor): - row.append(-30) - grid.append(row) - - for i in range(len(mapping)): - grid[int(tfa * APoints[i][0] + tfb * BPoints[mapping[i]][0])][int(tfa * APoints[i][1] + tfb * BPoints[mapping[i]][1])] = tfa * APoints[i][2] + tfb * BPoints[mapping[i]][2] - - sampleGrids.append(grid) - - # Downsample the super-sampled grids using the specified algorithm. - iGrids = [] - for i in range(len(sampleGrids)): - igrid = [] - for x in range(len(grids[0])): - irow = [] - for y in range(len(grids[0][0])): - if downsampleMethod == "Flat Averaging": - value = 0.0 - count = 0 - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - value = value + sampleGrids[i][tx][ty] - count = count + 1 - value = value / float(count) - - elif downsampleMethod == "Region-Weighted Averaging": - value = 0.0 - count = 0 - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - if sampleGrids[i][tx][ty] > 0: - value = value + regionWeighting * sampleGrids[i][tx][ty] - count = count + regionWeighting - else: - value = value + sampleGrids[i][tx][ty] - count = count + 1 - - if count > 0: - value = value / float(count) - else: - value = -30 - - elif downsampleMethod == "Maximizing": - value = sampleGrids[i][x * sampleFactor][y * sampleFactor] - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - if (sampleGrids[i][tx][ty] > value): - value = sampleGrids[i][tx][ty] - - irow.append(int(value)) - igrid.append(irow) - iGrids.append(igrid) - - # If we decided above to flip the order of grids in order to - # maintain our more-points to fewer-points interpolation order, - # switch them back at this stage. - if toggle == 1: - iGrids.reverse() - - # Drop the grids back into the GFE. - for i in range(len(interpTimes)): - absTime = beginTime + interpTimes[i] - gridTimeRange = TimeRange.TimeRange(absTime, absTime + duration * 360) - self.createGrid("Fcst", elementName, "SCALAR", numpy.asarray(iGrids[i]), - gridTimeRange) - - # Perform interpolation on discrete grids. - # - # Basic options: - # beginTime -- start of the time range to be interpolated - # duration -- length of the time range to - # elementName -- name of the element to be interpolated - # grids -- grids to be interpolated - # times -- time grids for provided data grids - # interpTimes -- times for each interpolated frame - # - # Anti-aliasing options: - # sampleFactor -- up-sample grids by this factor in each direction - # TODO: Introduce quincunx anti-aliasing, possibly? - # (This might be patented.) - # - # (A number of smoothing options are not available for discrete grids, - # because some operations--such as averaging their values--do not - # make sense.) - def _discreteTween(self, beginTime, duration, - elementName, grids, times, interpTimes, - sampleFactor): - if self._verbose: - print "Interpolating discrete grids (sample=" + str(sampleFactor) + ")" - - # Find all unique "features" (meterological elements) in the - # source grid and a list of the points comprising each of them - # to a set. They will be interpolated independently. - AFeatures = set() - for x in range(len(grids[0][0])): - for y in range(len(grids[0][0][0])): - if not (grids[0][1][grids[0][0][x][y]] in AFeatures): - AFeatures.add(grids[0][1][grids[0][0][x][y]]) - - # Likewise, for the destination grid. - BFeatures = set() - for x in range(len(grids[1][0])): - for y in range(len(grids[1][0][0])): - if not (grids[1][1][grids[1][0][x][y]] in BFeatures): - BFeatures.add(grids[1][1][grids[1][0][x][y]]) - - # We're going to put our generated grids into discreteGrids, but we'll - # have to fill them with suitable null values first, and we need to keep - # them around for multiple loop passes, so declare discreteGrids outside - # the loop, here. - discreteGrids = [] - - # We only interpolate on features that exist in both A and B. Currently, - # this fails to do things like turn a heavy rain into a light rain. When - # we fix that, this is the line to change. - features = AFeatures.intersection(BFeatures) - - # We're going to define a dictionary for features that's shared amongst - # interpolated grids. This reduces headache later. - featureTuples = [] - featureTuples.append(grids[0][1][0]) - counter = 0 - - # For each feature, interpolate across all points representing it... - for feature in features: - if feature != "" and feature != "::::": - counter = counter + 1 - - if self._verbose: - print "Interpolating on: " + str(feature) + ", " + str(counter) - - # As in scalarTween, select all the relevant points from the - # source and destination grids. - APoints = [] - for x in range(len(grids[0][0])): - for y in range(len(grids[0][0][0])): - if(grids[0][1][grids[0][0][x][y]] == feature): - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - APoints.append( (tx, ty) ) - - BPoints = [] - for x in range(len(grids[1][0])): - for y in range(len(grids[1][0][0])): - if(grids[1][1][grids[1][0][x][y]] == feature): - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - BPoints.append( (tx, ty) ) - - - if self._verbose: - print "Points in A: " + str(len(APoints)) - print "Points in B: " + str(len(BPoints)) - - ## We want to interpolate from more points to less points, - ## to get maximum coverage. Interpolate in reverse if need be. - toggle = 0 - if len(APoints) < len(BPoints): - toggle = 1 - x = APoints - APoints = BPoints - BPoints = x - - ## Generate a random mapping of points in the source form - ## to points in the destination form. This works remarkably - ## well, especially for high supersample values. - mapping = [] - for i in range(len(APoints)): - mapping.append(random.randrange(0, len(BPoints))) - - # As in scalarTween, upsample and dump the results into sampledGrids. - # This step also initializes the grids to (0), if they haven't - # been touched yet. - sampleGrids = [] - for t in range(len(interpTimes)): - grid = [] - tfa = 1.0 - float((t + 1.0) / (len(interpTimes) + 1.0)) - tfb = float(t + 1.0) / float(len(interpTimes) + 1.0) - if self._verbose: - print "tfa=" + str(tfa) - print "tfb=" + str(tfb) - for x in range(len(grids[0][0]) * sampleFactor): - row = [] - for y in range(len(grids[0][0][0]) * sampleFactor): - row.append(0) - grid.append(row) - - for i in range(len(mapping)): - grid[int(tfa * APoints[i][0] + tfb * BPoints[mapping[i]][0])][int(tfa * APoints[i][1] + tfb * BPoints[mapping[i]][1])] = counter - - sampleGrids.append(grid) - - # Downsample the grids using maximizing downsample - # (neither of the others really make sense, given that - # you can't really average two discrete points. If - # they do exist, this will only overwrite the grid-stored - # values if their values are set to 0. - iGrids = [] - for i in range(len(sampleGrids)): - igrid = [] - for x in range(len(grids[0][0])): - irow = [] - for y in range(len(grids[0][0][0])): - value = sampleGrids[i][x * sampleFactor][y * sampleFactor] - for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): - for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): - if (sampleGrids[i][tx][ty] > value): - value = sampleGrids[i][tx][ty] - - irow.append(int(value)) - igrid.append(irow) - iGrids.append(igrid) - - if toggle == 1: - iGrids.reverse() - - if len(discreteGrids) == 0: - for x in iGrids: - discreteGrids.append(x) - else: - for i in range(len(discreteGrids)): - for x in range(len(discreteGrids[i])): - for y in range(len(discreteGrids[i][x])): - if iGrids[i][x][y] != 0: - discreteGrids[i][x][y] = iGrids[i][x][y] - - # Now, drop the finished grids, one by one, into a finished queue. - featureTuples.append(feature) - - # ...and add them to the GFE. - for i in range(len(interpTimes)): - absTime = beginTime + interpTimes[i] - gridTimeRange = TimeRange.TimeRange(absTime, absTime + duration * 360) - self.createGrid("Fcst", elementName, "DISCRETE", (numpy.asarray(discreteGrids[i]), featureTuples), - gridTimeRange) - - def _cubicSpline(self, beginTime, duration, elementName, grids, times, interpTimes): - - # STEP 1: Create coefficients for cubic spline curve - # zCoefs : List of cubic spline coefficient grids computed to fit the - # curve defined by grids and times - # n : length of grids - 1. - - # Determine coefficients - n = len(grids) - 1 - #print "Calculating coeffs" - #print "n", n - #print "times, grids lengths", len(times), len(grids) - zCoefs = self._spline3_coef(n, times, grids) - #print "Done with coeffs" - - # STEP 2: Create interpolated grids using coefficients - # interpTimes : List of Times for which we want interpolated grids - # xGrids : List of interpolated Grids - - # Create interpolated grids - if self._verbose: - print "Interpolating grids" - for interpTime in interpTimes: - x = zeros(self._gridShape) + interpTime - xGrid = self._spline3_eval(n, times, grids, zCoefs, x) - absTime = beginTime + interpTime - gridTimeRange = TimeRange.TimeRange( - absTime, absTime + duration * 3600) - self.createGrid("Fcst", elementName, "SCALAR", xGrid, - gridTimeRange) - if self._verbose: - print "Done creating new grids" - - def _spline3_coef(self, n, t, y): - gridShape = y[0].shape - # These will get filled in later with grids as values - # They are just place holders - h=[0] * n - b=[0] * n - u=[0] * n - v=[0] * n - z=[0] * (n+1) - # Calculate h and b - # range 0 thru n-1 - #print "Calculating h and b" - for i in range(n): - #print "i", i - h[i] = t[i+1] - t[i] - b[i] = (y[i+1] - y[i])/h[i] - #print "h, b", h[i][0][0], b[i][0][0] - # Calculate u and v as functions of h and b - # range 1 thru n-1 - #print "Calculating u and v" - u[1] = (2*(h[0] + h[1])) - v[1] = (6*(b[1]-b[0])) - #print "u1, v1", u[1][0][0], v[1][0][0] - for i in range(2, n): - #print "i", i - u[i] = (2*(h[i]+h[i-1]) - h[i-1]**2/u[i-1]) - v[i] = (6*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) - #print "u, v", u[i][0][0], v[i][0][0] - # Calculate z - # range 0 thru n - z[n] = zeros(gridShape) - #print "Calculating z" - for i in range(n-1, 0, -1): - #print "i", i - if type(u[i]) is types.IntType: - print "u", u[i] - z[i] = (v[i] - h[i]*z[i+1])/u[i] - #print "z", z[i][0][0] - z[0] = zeros(gridShape) - return z - - def _spline3_eval(self, n, t, y, z, x): - for i in range(n-1, 0, -1): - #print "x, t", x[0][0], t[i][0][0] - if x[0][0]-t[i][0][0] >= 0: - break - #print "using i", i - h = t[i+1]-t[i] - tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) - tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp - return y[i] + (x-t[i]) * tmp - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Interpolation_4D +# +# Author: +# ---------------------------------------------------------------------------- +MenuItems = ["Edit"] + +VariableList = [ +# ("", "Gaps", "radio", ["Gaps", "Based on Edited Data"]), + ("Algorithm", "Cubic Spline", "radio", ["Cubic Spline", "Tweening"]), + ("", "Gaps", "radio", ["Gaps"]), + ("Grid Type", "Scalar", "radio", ["Scalar", "Discrete"]), + ("Interpolation Interval in Hours", 1, "scale", [1,24], 1), + ("Duration of Grids in Hours", 1, "scale", [1,24], 1), + ("Anti-aliasing Supersampling Level", 4, "scale", [1,12], 1), + ("Anti-aliasing Downsample Mode", "Region-Weighted Averaging", "radio", + ["Region-Weighted Averaging", "Flat Averaging", "Maximizing"]), + ("Region Weighting", 5, "scale", [1,10], 1), + ("Verbose", "no", "radio", ["yes", "no"]), + ] + +from numpy import * +import numpy +import SmartScript +import TimeRange +import types, copy +import random + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss = dbss; + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, timeRange, varDict): + # Interpolates active element over given time range + + # This is necessary so that the "getGrids" command will + # return numeric grids from a Procedure since the default + # is point-based. + # This should be fixed in future versions!! + self.setToolType("numeric") + + # Get Grids and Times based on timeRange and active element + # grids : List of Existing Grids to be used in interpolating + # times : List of times (seconds past the start time) corresponding to grids + element = self.getActiveElement() + elementName = element.getParmID().getParmName() + grids = self.getGrids("Fcst", elementName, "SFC", timeRange, + mode="List") + gridInfos = self.getGridInfo("Fcst", elementName, "SFC", timeRange) + + if varDict["Verbose"] == "yes": + self._verbose = 1 + else: + self._verbose = 0 + # gridShape : dimension of the grids + if varDict["Grid Type"] == "Scalar": + self._gridShape = shape(grids[0]) + else: + self._gridShape = 1 + times = [zeros(self._gridShape)] + + firstTime = 1 + for gridInfo in gridInfos: + tr = gridInfo.gridTime() + if firstTime: + beginTime = tr.startTime() + firstTime = 0 + else: + times.append(zeros(self._gridShape) + (tr.startTime() - beginTime)) + lastStart = tr.startTime() + + # Determine interpTimes + mode = varDict[""] + interval = varDict["Interpolation Interval in Hours"] + duration = varDict["Duration of Grids in Hours"] + totalDur = timeRange.endTime() - timeRange.startTime() + interpTimes = [] + prevStart = beginTime + if self._verbose: + print("Figuring interpTimes", interval) + while 1: + newStart = prevStart + interval * 3600 + prevStart = newStart + if newStart >= lastStart: + break + interpTime = newStart - beginTime + for time in times: + if interpTime == time.flat[0]: + continue + interpTimes.append(interpTime) + if self._verbose: + print("interpTimes") + for t in interpTimes: + print(" ", t) + + algorithm = varDict["Algorithm"] + if algorithm == "Cubic Spline": + self._cubicSpline(beginTime, duration, elementName, grids, times, interpTimes) + elif algorithm == "Tweening": + sampleFactor = varDict["Anti-aliasing Supersampling Level"] + downsampleMethod = varDict["Anti-aliasing Downsample Mode"] + regionWeighting = varDict["Region Weighting"] + if varDict["Grid Type"] == "Scalar": + self._scalarTween(beginTime, duration, elementName, grids, times, interpTimes, + sampleFactor, downsampleMethod, regionWeighting) + else: + self._discreteTween(beginTime, duration, elementName, grids, times, interpTimes, + sampleFactor) + + # Perform interpolation on scalar grids. + # + # Basic options: + # beginTime -- start of the time range to be interpolated + # duration -- length of the time range to + # elementName -- name of the element to be interpolated + # grids -- grids to be interpolated + # times -- time grids for provided data grids + # interpTimes -- times for each interpolated frame + # + # Anti-aliasing options: + # sampleFactor -- up-sample grids by this factor in each direction + # TODO: Introduce quincunx anti-aliasing, possibly? + # (This might be patented.) + # downsampleMethod -- what method to use when bringing the grids + # back to native resolution. + # Options are: + # "Flat Averaging" - Average all the points in a + # point's supersampled area to derive the + # downsampled point. + # "Region-Weighted Averaging" - Like "Flat Averaging," + # but weight interpolated points + # higher than background points. + # "Maximizing" - Select the highest value from a + # point's supersampled area to generate + # the downsampled point. + # regionWeighting -- Parameter for use with "Region-Weighted Averaging" + # downsample method only. By what degree ought in-region + # points be preferred over out-of-region points? + def _scalarTween(self, beginTime, duration, + elementName, grids, times, interpTimes, + sampleFactor, downsampleMethod, regionWeighting): + if self._verbose: + print("Interpolating scalar grids...") + + # Find all points to be interpolated in the source grid. In this case, + # we say all points with a value greater than 0 are to be interpolated. + APoints = [] + for x in range(len(grids[0])): + for y in range(len(grids[0][0])): + if(grids[0][x][y] > 0): + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + APoints.append( (tx, ty, grids[0][x][y]) ) + + # Likewise, find all points to be interpolated in the destination grid. + BPoints = [] + for x in range(len(grids[1])): + for y in range(len(grids[1][0])): + if(grids[1][x][y] > 0): + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + BPoints.append( (tx, ty, grids[1][x][y]) ) + + if self._verbose: + print("Points in A: " + str(len(APoints))) + print("Points in B: " + str(len(BPoints))) + + # We always want to interpolate from more points to less points, to get + # maximum "coverage". Interpolate in "reverse" if need be, and set a + # flag so we remember to flip the results around in the end. + toggle = 0 + if len(APoints) < len(BPoints): + toggle = 1 + x = APoints + APoints = BPoints + BPoints = x + + # Generate a random mapping of points in the source grid to points in + # the destination grid. This works remarkably well, especially for + # high supersample values. Interestingly, it looks significantly + # better than many more intelligent-seeming methods (interpolate to + # nearest point, interpolate by overlaid region). + mapping = [] + for i in range(len(APoints)): + mapping.append(random.randrange(0, len(BPoints))) + + # Supersample and interpolate the grids, blowing them up to many times + # their original resolution. This is incredibly memory-intensive, but + # when we produce the downsampled output, it is much smoother. + sampleGrids = [] + for t in range(len(interpTimes)): + grid = [] + tfa = 1.0 - float((t + 1.0) / (len(interpTimes) + 1.0)) + tfb = float(t + 1.0) / float(len(interpTimes) + 1.0) + if self._verbose: + print("tfa=" + str(tfa)) + print("tfb=" + str(tfb)) + for x in range(len(grids[0]) * sampleFactor): + row = [] + for y in range(len(grids[0][0]) * sampleFactor): + row.append(-30) + grid.append(row) + + for i in range(len(mapping)): + grid[int(tfa * APoints[i][0] + tfb * BPoints[mapping[i]][0])][int(tfa * APoints[i][1] + tfb * BPoints[mapping[i]][1])] = tfa * APoints[i][2] + tfb * BPoints[mapping[i]][2] + + sampleGrids.append(grid) + + # Downsample the super-sampled grids using the specified algorithm. + iGrids = [] + for i in range(len(sampleGrids)): + igrid = [] + for x in range(len(grids[0])): + irow = [] + for y in range(len(grids[0][0])): + if downsampleMethod == "Flat Averaging": + value = 0.0 + count = 0 + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + value = value + sampleGrids[i][tx][ty] + count = count + 1 + value = value / float(count) + + elif downsampleMethod == "Region-Weighted Averaging": + value = 0.0 + count = 0 + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + if sampleGrids[i][tx][ty] > 0: + value = value + regionWeighting * sampleGrids[i][tx][ty] + count = count + regionWeighting + else: + value = value + sampleGrids[i][tx][ty] + count = count + 1 + + if count > 0: + value = value / float(count) + else: + value = -30 + + elif downsampleMethod == "Maximizing": + value = sampleGrids[i][x * sampleFactor][y * sampleFactor] + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + if (sampleGrids[i][tx][ty] > value): + value = sampleGrids[i][tx][ty] + + irow.append(int(value)) + igrid.append(irow) + iGrids.append(igrid) + + # If we decided above to flip the order of grids in order to + # maintain our more-points to fewer-points interpolation order, + # switch them back at this stage. + if toggle == 1: + iGrids.reverse() + + # Drop the grids back into the GFE. + for i in range(len(interpTimes)): + absTime = beginTime + interpTimes[i] + gridTimeRange = TimeRange.TimeRange(absTime, absTime + duration * 360) + self.createGrid("Fcst", elementName, "SCALAR", numpy.asarray(iGrids[i]), + gridTimeRange) + + # Perform interpolation on discrete grids. + # + # Basic options: + # beginTime -- start of the time range to be interpolated + # duration -- length of the time range to + # elementName -- name of the element to be interpolated + # grids -- grids to be interpolated + # times -- time grids for provided data grids + # interpTimes -- times for each interpolated frame + # + # Anti-aliasing options: + # sampleFactor -- up-sample grids by this factor in each direction + # TODO: Introduce quincunx anti-aliasing, possibly? + # (This might be patented.) + # + # (A number of smoothing options are not available for discrete grids, + # because some operations--such as averaging their values--do not + # make sense.) + def _discreteTween(self, beginTime, duration, + elementName, grids, times, interpTimes, + sampleFactor): + if self._verbose: + print("Interpolating discrete grids (sample=" + str(sampleFactor) + ")") + + # Find all unique "features" (meterological elements) in the + # source grid and a list of the points comprising each of them + # to a set. They will be interpolated independently. + AFeatures = set() + for x in range(len(grids[0][0])): + for y in range(len(grids[0][0][0])): + if not (grids[0][1][grids[0][0][x][y]] in AFeatures): + AFeatures.add(grids[0][1][grids[0][0][x][y]]) + + # Likewise, for the destination grid. + BFeatures = set() + for x in range(len(grids[1][0])): + for y in range(len(grids[1][0][0])): + if not (grids[1][1][grids[1][0][x][y]] in BFeatures): + BFeatures.add(grids[1][1][grids[1][0][x][y]]) + + # We're going to put our generated grids into discreteGrids, but we'll + # have to fill them with suitable null values first, and we need to keep + # them around for multiple loop passes, so declare discreteGrids outside + # the loop, here. + discreteGrids = [] + + # We only interpolate on features that exist in both A and B. Currently, + # this fails to do things like turn a heavy rain into a light rain. When + # we fix that, this is the line to change. + features = AFeatures.intersection(BFeatures) + + # We're going to define a dictionary for features that's shared amongst + # interpolated grids. This reduces headache later. + featureTuples = [] + featureTuples.append(grids[0][1][0]) + counter = 0 + + # For each feature, interpolate across all points representing it... + for feature in features: + if feature != "" and feature != "::::": + counter = counter + 1 + + if self._verbose: + print("Interpolating on: " + str(feature) + ", " + str(counter)) + + # As in scalarTween, select all the relevant points from the + # source and destination grids. + APoints = [] + for x in range(len(grids[0][0])): + for y in range(len(grids[0][0][0])): + if(grids[0][1][grids[0][0][x][y]] == feature): + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + APoints.append( (tx, ty) ) + + BPoints = [] + for x in range(len(grids[1][0])): + for y in range(len(grids[1][0][0])): + if(grids[1][1][grids[1][0][x][y]] == feature): + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + BPoints.append( (tx, ty) ) + + + if self._verbose: + print("Points in A: " + str(len(APoints))) + print("Points in B: " + str(len(BPoints))) + + ## We want to interpolate from more points to less points, + ## to get maximum coverage. Interpolate in reverse if need be. + toggle = 0 + if len(APoints) < len(BPoints): + toggle = 1 + x = APoints + APoints = BPoints + BPoints = x + + ## Generate a random mapping of points in the source form + ## to points in the destination form. This works remarkably + ## well, especially for high supersample values. + mapping = [] + for i in range(len(APoints)): + mapping.append(random.randrange(0, len(BPoints))) + + # As in scalarTween, upsample and dump the results into sampledGrids. + # This step also initializes the grids to (0), if they haven't + # been touched yet. + sampleGrids = [] + for t in range(len(interpTimes)): + grid = [] + tfa = 1.0 - float((t + 1.0) / (len(interpTimes) + 1.0)) + tfb = float(t + 1.0) / float(len(interpTimes) + 1.0) + if self._verbose: + print("tfa=" + str(tfa)) + print("tfb=" + str(tfb)) + for x in range(len(grids[0][0]) * sampleFactor): + row = [] + for y in range(len(grids[0][0][0]) * sampleFactor): + row.append(0) + grid.append(row) + + for i in range(len(mapping)): + grid[int(tfa * APoints[i][0] + tfb * BPoints[mapping[i]][0])][int(tfa * APoints[i][1] + tfb * BPoints[mapping[i]][1])] = counter + + sampleGrids.append(grid) + + # Downsample the grids using maximizing downsample + # (neither of the others really make sense, given that + # you can't really average two discrete points. If + # they do exist, this will only overwrite the grid-stored + # values if their values are set to 0. + iGrids = [] + for i in range(len(sampleGrids)): + igrid = [] + for x in range(len(grids[0][0])): + irow = [] + for y in range(len(grids[0][0][0])): + value = sampleGrids[i][x * sampleFactor][y * sampleFactor] + for tx in range(x * sampleFactor, x * sampleFactor + sampleFactor): + for ty in range(y * sampleFactor, y * sampleFactor + sampleFactor): + if (sampleGrids[i][tx][ty] > value): + value = sampleGrids[i][tx][ty] + + irow.append(int(value)) + igrid.append(irow) + iGrids.append(igrid) + + if toggle == 1: + iGrids.reverse() + + if len(discreteGrids) == 0: + for x in iGrids: + discreteGrids.append(x) + else: + for i in range(len(discreteGrids)): + for x in range(len(discreteGrids[i])): + for y in range(len(discreteGrids[i][x])): + if iGrids[i][x][y] != 0: + discreteGrids[i][x][y] = iGrids[i][x][y] + + # Now, drop the finished grids, one by one, into a finished queue. + featureTuples.append(feature) + + # ...and add them to the GFE. + for i in range(len(interpTimes)): + absTime = beginTime + interpTimes[i] + gridTimeRange = TimeRange.TimeRange(absTime, absTime + duration * 360) + self.createGrid("Fcst", elementName, "DISCRETE", (numpy.asarray(discreteGrids[i]), featureTuples), + gridTimeRange) + + def _cubicSpline(self, beginTime, duration, elementName, grids, times, interpTimes): + + # STEP 1: Create coefficients for cubic spline curve + # zCoefs : List of cubic spline coefficient grids computed to fit the + # curve defined by grids and times + # n : length of grids - 1. + + # Determine coefficients + n = len(grids) - 1 + #print "Calculating coeffs" + #print "n", n + #print "times, grids lengths", len(times), len(grids) + zCoefs = self._spline3_coef(n, times, grids) + #print "Done with coeffs" + + # STEP 2: Create interpolated grids using coefficients + # interpTimes : List of Times for which we want interpolated grids + # xGrids : List of interpolated Grids + + # Create interpolated grids + if self._verbose: + print("Interpolating grids") + for interpTime in interpTimes: + x = zeros(self._gridShape) + interpTime + xGrid = self._spline3_eval(n, times, grids, zCoefs, x) + absTime = beginTime + interpTime + gridTimeRange = TimeRange.TimeRange( + absTime, absTime + duration * 3600) + self.createGrid("Fcst", elementName, "SCALAR", xGrid, + gridTimeRange) + if self._verbose: + print("Done creating new grids") + + def _spline3_coef(self, n, t, y): + gridShape = y[0].shape + # These will get filled in later with grids as values + # They are just place holders + h=[0] * n + b=[0] * n + u=[0] * n + v=[0] * n + z=[0] * (n+1) + # Calculate h and b + # range 0 thru n-1 + #print "Calculating h and b" + for i in range(n): + #print "i", i + h[i] = t[i+1] - t[i] + b[i] = (y[i+1] - y[i])/h[i] + #print "h, b", h[i][0][0], b[i][0][0] + # Calculate u and v as functions of h and b + # range 1 thru n-1 + #print "Calculating u and v" + u[1] = (2*(h[0] + h[1])) + v[1] = (6*(b[1]-b[0])) + #print "u1, v1", u[1][0][0], v[1][0][0] + for i in range(2, n): + #print "i", i + u[i] = (2*(h[i]+h[i-1]) - h[i-1]**2/u[i-1]) + v[i] = (6*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) + #print "u, v", u[i][0][0], v[i][0][0] + # Calculate z + # range 0 thru n + z[n] = zeros(gridShape) + #print "Calculating z" + for i in range(n-1, 0, -1): + #print "i", i + if type(u[i]) is int: + print("u", u[i]) + z[i] = (v[i] - h[i]*z[i+1])/u[i] + #print "z", z[i][0][0] + z[0] = zeros(gridShape) + return z + + def _spline3_eval(self, n, t, y, z, x): + for i in range(n-1, 0, -1): + #print "x, t", x[0][0], t[i][0][0] + if x[0][0]-t[i][0][0] >= 0: + break + #print "using i", i + h = t[i+1]-t[i] + tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) + tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp + return y[i] + (x-t[i]) * tmp + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeEditAreaRepo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeEditAreaRepo.py index a7a822bb01..931acda80c 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeEditAreaRepo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeEditAreaRepo.py @@ -1,88 +1,88 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# ---------------------------------------------------------------------------- -## -# -# MakeEditAreaRepo.py -# -# This procedure uses EditAreaUtilities to make an edit area repository for -# saving and retrieving edit areas outside of CAVE. -# -# Author: lefebvre -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ----------- ---------- ----------- -------------------------- -# 11/21/2016 - tlefebvre Added siteID to Edit area repo path so -# domains will stay separated. -# 12/02/2016 - tlefebvre Final clean-up -# 12/20/2017 DCS17686 tlefebvre Initial baseline version. -# -## -# ---------------------------------------------------------------------------- -MenuItems = ["Edit"] -import LogStream, time - -VariableList = [] - -import time -import SmartScript -import EditAreaUtilities -import os, stat - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Creates the directory tree with appropriate permissions - def makeFullDirPath(self, dirPath, permissions): - - if not os.path.exists(dirPath): - os.makedirs(dirPath, permissions) - return - - def execute(self, editArea, timeRange, varDict): - - t1 = time.time() - - siteID = self.getSiteID() - - path = "" - - # Define the path for the repository here - -# For High Seas tools -# path = "/data/local/HighSeas/" + siteID + "/EditAreas/" - - if path == "": - self.statusBarMsg("You must define a path in this procedure before creating a repository", "U") - return - - self._eaUtils = EditAreaUtilities.EditAreaUtilities(path) - - permissions = stat.S_IRWXU + stat.S_IRWXG + stat.S_IROTH # 775 permissions on dirs and files - - self.makeFullDirPath(path, permissions) - - eaList = self.editAreaList() - for ea in eaList: - - try: - gfeEA = self.encodeEditArea(ea) - except: - print "error getting", ea, "from GFE." - continue - - self._eaUtils.saveEditArea(ea, gfeEA) - - print "Saved edit area", path + ea - - print "Finished saving all edit areas in", time.time() - t1, "seconds." - - return - - - - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# ---------------------------------------------------------------------------- +## +# +# MakeEditAreaRepo.py +# +# This procedure uses EditAreaUtilities to make an edit area repository for +# saving and retrieving edit areas outside of CAVE. +# +# Author: lefebvre +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ----------- ---------- ----------- -------------------------- +# 11/21/2016 - tlefebvre Added siteID to Edit area repo path so +# domains will stay separated. +# 12/02/2016 - tlefebvre Final clean-up +# 12/20/2017 DCS17686 tlefebvre Initial baseline version. +# +## +# ---------------------------------------------------------------------------- +MenuItems = ["Edit"] +import LogStream, time + +VariableList = [] + +import time +import SmartScript +import EditAreaUtilities +import os, stat + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Creates the directory tree with appropriate permissions + def makeFullDirPath(self, dirPath, permissions): + + if not os.path.exists(dirPath): + os.makedirs(dirPath, permissions) + return + + def execute(self, editArea, timeRange, varDict): + + t1 = time.time() + + siteID = self.getSiteID() + + path = "" + + # Define the path for the repository here + +# For High Seas tools +# path = "/data/local/HighSeas/" + siteID + "/EditAreas/" + + if path == "": + self.statusBarMsg("You must define a path in this procedure before creating a repository", "U") + return + + self._eaUtils = EditAreaUtilities.EditAreaUtilities(path) + + permissions = stat.S_IRWXU + stat.S_IRWXG + stat.S_IROTH # 775 permissions on dirs and files + + self.makeFullDirPath(path, permissions) + + eaList = self.editAreaList() + for ea in eaList: + + try: + gfeEA = self.encodeEditArea(ea) + except: + print("error getting", ea, "from GFE.") + continue + + self._eaUtils.saveEditArea(ea, gfeEA) + + print("Saved edit area", path + ea) + + print("Finished saving all edit areas in", time.time() - t1, "seconds.") + + return + + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHSEditAreas.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHSEditAreas.py index e44a8682d7..92945e27f3 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHSEditAreas.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHSEditAreas.py @@ -1,2552 +1,2552 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# ---------------------------------------------------------------------------- -## -# Author: lefebvre -# -# HSF -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ----------- ---------- ----------- -------------------------- -# 03/09/2015 - tlefebvre modified formatPolygonEditArea to add -# leading zero when lat < 10 -# 07/29/2016 - tlefebvre Changed edit area retrieval and storage to -# work outside CAVE so edit areas could be shared. -# 12/20/2017 DCS17686 tlefebvre Initial baseline version. -# -## -# ---------------------------------------------------------------------------- - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] - -import SmartScript -import Tkinter -import tkMessageBox -import time -import types -import math -import MetLib -import numpy as np -import cPickle, os, copy, stat -import TimeRange -import AbsTime -import string -import EditAreaUtilities - -import sys -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # The following section reduces a polygon to just the essential points - - # Calculate the angle formed by the center and y, x - def calcAngle(self, centerY, centerX, y, x): - - lat1 = centerY * self._degToRad - lon1 = centerX * self._degToRad - lat2 = y * self._degToRad - lon2 = x * self._degToRad - - yDiff = (lat2 - lat1) * self._degToRad - xDiff = (lon2 - lon1) * self._degToRad - - angle = math.atan2(yDiff, xDiff) / self._degToRad - if angle < 0: - angle = angle + 360.0 - return angle - - # calculate the great circle distance between the two specified points - # WARNING: resulting distances can be negative - def distance(self, lat1, lon1, lat2, lon2): - - lat1 = lat1 * self._degToRad - lon1 = lon1 * self._degToRad - lat2 = lat2 * self._degToRad - lon2 = lon2 * self._degToRad - - - # some issue with acos and large numbers throws an except - if abs(lon2 - lon1) < 0.000001: - return (lat2 - lat1) / self._degToRad - - dist = math.acos(np.sin(lat1) * np.sin(lat2) + np.cos(lat1) * np.cos(lat2) * \ - np.cos(lon2 - lon1)) / self._degToRad - - - return dist - - # Debugging method to print coordinates in an easy to read format - def printCoords(self, i, x0, y0, x1, y1, x2, y2): - - print "%d---" % i, "%.2f" % y0, "%.2f" % x0, "%.2f" % y1, "%.2f" % x1, "%.2f" % y2, "%.2f" % x2 - - return - - # Returns a list of lat/lons that represent the specified edit area. - # These points approximate the edit area with no more than maxPoints - # points. - def simplifyEditArea(self, editArea): - - mask = self.encodeEditArea(editArea) - - - polygons = editArea.getPolygons(ReferenceData.CoordinateType.LATLON) - - coords = polygons.getCoordinates() - - coordList = [] - - for c in coords: - coordList.append((c.x, c.y)) - - # Algorithm configuration section. These values will affect how - # well (or poorly) the algorithm works. - - # Define the number of slices. More gives better radial resolution - # at the price of slightly slower execution. - slices = 36 - - # The resulting simplified area will contain no more than - # this number of points - maxPoints = 6 - - # Any triangle whose area is less than this ratio, the middle - # point gets tossed as being not significant. - areaThreshold = 0.05 - - ### End configuration section - - # Make fixed size arrays of length pieSlices for each access - maxDist = slices * [0] - - # Initialize this list to None so we know if no points were - # found in each slice. - rawLoc = slices * [None] - - # calculate the original area - - gridSize = self.getGridLoc().gridSize() - - # calculate the center of the polygon - - centroid = polygons.getCentroid() - - latCenter = centroid.getY() - lonCenter = centroid.getX() - - originalArea = polygons.getArea() - - # reduce the number of points to a manageable number by - # breaking the edit area up into pie slices and removing - # all points in each slice except the outer most point in - # each pie slice. - - for x, y in coordList: - - angle = self.calcAngle(latCenter, lonCenter, y, x) - angleIndex = int(angle / (360.0 / slices)) - dist = self.distance(latCenter, lonCenter, y, x) - if dist > maxDist[angleIndex]: - maxDist[angleIndex] = dist - rawLoc[angleIndex] = (y, x) - - # it's possible no points were found in some sices, so filter them out - loc = [] - for i in rawLoc: - if not i is None: - loc.append(i) - - while True: # loop until we can't remove any more points - - prevCount = len(loc) - minArea = originalArea - areaList = [] - - for i in range(len(loc)): - # figure out the index of the previous and next points - last = i - 1 - next = i + 1 - if next >= len(loc): - next = 0 - - x0 = loc[i][1] - y0 = loc[i][0] - x1 = loc[last][1] - y1 = loc[last][0] - x2 = loc[next][1] - y2 = loc[next][0] - - # make sure the slopes are not zero or infinity - if x2 == x1: - x2 = x2 + 0.000001 - if y2 == y1: - y2 = y2 + 0.000001 - - # it's now safe to calculate the slope of the base of the triagle - slope = (y2 - y1) / (x2 - x1) - - # calculate the point on a line connecting last and next - # points that is perpendicular to the middle point - x = (slope * x1 - y1 + (x0 / slope) + y0) / (slope + (1 / slope)) - y = ((x0 - x) / slope) + y0 - - # calculate the area of the triangle formed by the three points: - # next, current, and previous - height = self.distance(y, x, y0, x0) - base = self.distance(y1, x1, y2, x2) - area = abs(base * height / 2.0) # note area can be negative! - areaList.append(area) - - # save the area if it's the smallest - if area < minArea: - minArea = area - delIndex = i - - ######################## end looping through all points ############## - - # calculate the ratio of the area and the original area of the edit area - # Use this later to decide whether to remove the point - ratio = minArea / originalArea - - if len(loc) > maxPoints: - del loc[delIndex] - continue - else: - if ratio < areaThreshold: - del loc[delIndex] - - # If we're down to 3 points: stop - if len(loc) <= 3: - break - - #If no new points have been removed, we're done - if prevCount == len(loc): - break - - return loc - - - ################### End polygon code ################################## - - # Fetches the inventory in the form of a timeRange list - def getWEInventory(self, modelName, weName, timeRange=None): - - if timeRange is None: - timeRange = TimeRange.allTimes() - - trList = [] - # getGridInfo will just die if the modelName or weName is not valid - # so wrap it in a try block and return [] if it fails - try: - gridInfo = self.getGridInfo(modelName, weName, "SFC", timeRange) - except: - return trList - - for g in gridInfo: - if timeRange.overlaps(g.gridTime()): - trList.append(g.gridTime()) - - return trList - - - # Create the list box that holds the names of the scripts - def makeListBox(self): - - labelList = self.getAllEditAreaNames() - labelList.sort() - - label = Tkinter.Label(self._listFrame, text="Edit Area List") - - label.grid(row=0, padx=15, pady=10) - - scrollbar = Tkinter.Scrollbar(self._listFrame, orient=Tkinter.VERTICAL) - self._listbox = Tkinter.Listbox(self._listFrame, yscrollcommand=scrollbar.set, - selectmode=Tkinter.MULTIPLE) - self._listbox.bind("", self.listBoxClick) - #self._listbox.bind("", self.listBoxClick) - - for i in range(len(labelList)): - listItem = self._listbox.insert(Tkinter.END, labelList[i]) - - self._listbox.grid(row=1, padx=5, pady=5) - - scrollbar.config(command=self._listbox.yview) - scrollbar.grid(row=1, column=1, sticky=Tkinter.N+Tkinter.S) - - return - - # Make the time period buttons - def makeTimeButtons(self, timeFrame): - label = Tkinter.Label(timeFrame, text="Select Time\nPeriod") - label.grid(row=0, padx=15, pady=10) - - labelList = [] - for i in range(0, 49, 6): - label = str(i) - if len(label) < 2: - label = "0" + label - label = label + "h" - labelList.append(label) - - buttonRow = 1 - for i in range(len(labelList)): - b = Tkinter.Radiobutton(timeFrame, text=labelList[i], - value=labelList[i]) - if labelList[i] == self._defaultTime: - b.select() - self._currentTimePeriod = self._defaultTime - - b.grid(row=buttonRow, column=0, sticky=Tkinter.N+Tkinter.S, pady=2) - buttonRow = buttonRow + 1 - b.bind("", self.timeButtonClick) - return - - - # Callback that fires when a list selector is clicked. - def listBoxClick(self, event=None): - - if self._textBoxCurSelection is not None: - self.textBoxReturnPress() - - # Disable all but the Add New button - if len(self._listbox.curselection()) == 0: - self._clearSelButton.configure(state=Tkinter.DISABLED) - self._replaceSelButton.configure(state=Tkinter.DISABLED) - self._saveSelButton.configure(state=Tkinter.DISABLED) - self._killSelButton.configure(state=Tkinter.DISABLED) -# return - - - # Display the edit area(s) on the GFE - gridSize = (self._gridLoc.gridSize().y, self._gridLoc.gridSize().x) - - mask = np.zeros(gridSize, bool) - - for i in self._listbox.curselection(): - curIndex = int(i) - editAreaName = self._listbox.get(curIndex) - - mask = self._eaUtils.fetchEditArea(editAreaName) - if mask is None: - self.statusBarMsg("Edit Area: " + editAreaName + " not found in HighSeas repository.", "S") - return - - editArea = self.decodeEditArea(mask) - self.setActiveEditArea(editArea) - - # Update the textBox - if sum(sum(mask)) > 0: - editAreaDesc = self.getEditAreaDesc(editAreaName) - self._textBox.configure(font=self._boldFont) - self.updateTextBoxSaveState("Saved") - - self.displayAreaDesc(editAreaDesc) - - if len(self._listbox.curselection()) != 1: - # Disable the Replace and Save Buttons - self._replaceSelButton.configure(state=Tkinter.DISABLED) - self._saveSelButton.configure(state=Tkinter.DISABLED) - self._killSelButton.configure(state=Tkinter.DISABLED) - else: - self._replaceSelButton.configure(state=Tkinter.NORMAL) - self._clearSelButton.configure(state=Tkinter.NORMAL) - self._saveSelButton.configure(state=Tkinter.NORMAL) - self._killSelButton.configure(state=Tkinter.NORMAL) - - return - - def enterEvent(self, event=None): - - sys.stdout.flush() - - self._enterCount = self._enterCount + 1 - - if self._enterCount > 4: - self.setActiveEditArea(self._initialEditArea) - self._master.unbind("") - - return - - def redrawListBox(self): - - self._listbox.delete(0, Tkinter.END) - - listItems = [] - for fDict in self._hsDesc: - for areaDict in fDict["areaList"]: - listItems.append(areaDict["areaName"]) - - listItems.sort() - - for i in range(len(listItems)): - self._listbox.insert(Tkinter.END, listItems[i]) - - return - - def selectListBoxItem(self, item): - listBoxItems = self._listbox.get(0, Tkinter.END) - - listBoxItems = list(listBoxItems) - - selectedIndex = listBoxItems.index(item) - - self._listbox.selection_set(selectedIndex) - - return - - def setSelectButtonState(self, buttonState): - - self._clearSelButton.configure(state=buttonState) - self._replaceSelButton.configure(state=buttonState) - self._saveSelButton.configure(state=buttonState) - self._killSelButton.configure(state=buttonState) - - return - - def timeButtonClick(self, event): - - # the event's widget value is a tuple and we want the last one - buttonText = event.widget.config("value")[-1] - - # for some reason, inserting a \n makes that entry a tuple - # so just concatenate the tuple elements into a single string - if type(buttonText) is types.TupleType: - valueText = "" - for t in buttonText: - valueText = valueText + t + " " - valueText = valueText[0:-1] - else: - valueText = buttonText - - self._currentTimePeriod = valueText - - return - - # Called when the pie is click upon by the mouse - def pieClick(self, event): - - xCenter = self._circleRadius + (self._circleOffset / 2.0) - yCenter = self._circleRadius + (self._circleOffset / 2.0) - - dist = math.sqrt((event.x - xCenter) * (event.x - xCenter) + \ - (event.y - yCenter) * (event.y - yCenter)) - - xDiff = event.x - xCenter - yDiff = event.y - yCenter - - angle = -math.atan2(yDiff, xDiff) * 360 / (2 * math.pi) - - x1, y1, x2, y2 = self.getCircleCoords() - - # truncate the angle between the center and the click location - angleStart = int(angle / 45.0) * 45.0 - angleExtent = 45.0 - - if angle < 0: - angleExtent = -45.0 - - sliceAngle = angle - if angle < 0: - sliceAngle = angle + 360 - sliceNum = int(sliceAngle / 45.0) - - fillColor = self._pieDeselectColor - - ButtonDownEvent = "4" - ButtonUpEvent = "5" - - if event.type == ButtonDownEvent and not self._pieState[sliceNum] : - self._selectingPieSlices = True - self._deselectingPieSlices = False - elif event.type == ButtonDownEvent and self._pieState[sliceNum]: - self._selectingPieSlices = False - self._deselectingPieSlices = True - elif event.type == ButtonUpEvent: - self._selectingPieSlices = False - self._deselectingPieSlices = False - self.makePolarEditArea() - return - - # select a pieSlice if appropriate - if self._selectingPieSlices: - self._pieState[sliceNum] = True - elif self._deselectingPieSlices: - self._pieState[sliceNum] = False - - # If we're not selecting or deselecting, do nothing - if not self._selectingPieSlices and not self._deselectingPieSlices: - return - - if self._selectingPieSlices: - fillColor = self._pieSelectColor - elif self._deselectingPieSlices: - fillColor = self._pieDeselectColor - - self._canvas.create_arc(x1, y1, x2, y2, style=Tkinter.PIESLICE, - start=angleStart, extent=angleExtent, fill=fillColor) - - return - - # Turns on one pieSlice if they are all off. This is so it shows up on the - # screen - def activatePie(self): - - # Don't bother if any slices are already on - for state in self._pieState: - if state: - return - - # Make a fake event so pieClick will be called. - xCenter = self._circleRadius + (self._circleOffset / 2.0) - yCenter = self._circleRadius + (self._circleOffset / 2.0) - - xPos = xCenter + 10 - yPos = yCenter + 3 - - self._canvas.event_generate("", x=xPos, y=yPos) - - return - - def prettyLatString(self, latValue): - if latValue >= 0: - return str(latValue) + "N" - return str(latValue)[1:] + "S" - - def prettyLonString(self, lonValue): - if lonValue >= 0: - return str(lonValue) + "E" - else: - if lonValue < -180.0: - lonValue = lonValue + 360.0 - return str(lonValue) + "E" - - return str(lonValue)[1:] + "W" - - def latScaleMotion(self, value): - - value = float(value) - if value == self._latScaleValue: - return - - self._latScaleValue = value - - latStr = self.prettyLatString(value) - self._latEntry.delete(0, Tkinter.END) - self._latEntry.insert(0, latStr) - - self.activatePie() - self.makePolarEditArea() - return - - def lonScaleMotion(self, value): - - value = float(value) - if value == self._lonScaleValue: - return - - self._lonScaleValue = value - - lonStr = self.prettyLonString(value) - self._lonEntry.delete(0, Tkinter.END) - self._lonEntry.insert(0, lonStr) - - self.activatePie() - self.makePolarEditArea() - - return - - def getLatLonGrids(self): - trList = self.getWEInventory("Fcst", "lat") - if len(trList) == 0: - gridLoc = self.getGridLoc() - latGrid, lonGrid = MetLib.getLatLonGrids(gridLoc) - else: - latGrid = self.getGrids("Fcst", "lat", "SFC", trList[0]) - lonGrid = self.getGrids("Fcst", "lon", "SFC", trList[0]) - - return latGrid, lonGrid - - def radiusScaleMotion(self, value): - - value = float(value) - if value == self._radiusScaleValue: - return - - self._radVar.set(str(value)) - self._radEntry.delete(0, Tkinter.END) - self._radEntry.insert(0, str(value)) - - self._radiusScaleValue = value - self.activatePie() - self.makePolarEditArea() - - return - - # A single place where the circle coords can be had. - def getCircleCoords(self): - topLeftX = self._circleOffset - topLeftY = self._circleOffset - bottomRightX = self._circleRadius * 2 - bottomRightY = self._circleRadius * 2 - - return topLeftX, topLeftY, bottomRightX, bottomRightY - - def filterTextEntry(self, entryStr, allowedChars): - - returnStr = "" - for c in entryStr: - if allowedChars.find(c) > -1: - returnStr = returnStr + c - - return returnStr - - def parseLatitude(self): - latStr = self._latEntry.get() - - sign = 1.0 - if latStr.find("S") > -1 or latStr.find("s") > -1: - sign = -1.0 - latStr = latStr[0:-1] - elif latStr.find("N") > -1 or latStr.find("n") > -1: - latStr = latStr[0:-1] - - try: - latValue = float(latStr) * sign - except: - self.statusBarMsg("Invalid latitude value: " + str(self._lonEntry.get()), "S") - return - - return latValue - - def parseLongitude(self): - lonStr = copy.copy(self._lonEntry.get()) - - sign = 1.0 - if lonStr.find("W") > -1 or lonStr.find("w") > -1: - sign = -1.0 - lonStr = lonStr[0:-1] - elif lonStr.find("E") > -1 or lonStr.find("e") > -1: - lonStr = lonStr[0:-1] - - try: - lonValue = float(lonStr) * sign - if lonValue > 0: - lonValue = lonValue - 360 - except: - self.statusBarMsg("Invalid longitude value: " + str(self._lonEntry.get()), "S") - return - - return lonValue - - def latKeyPress(self, event): - # Ignore certain special characters - ignore = ["BackSpace"] - if event.keysym in ignore: - return - - if event.keysym == "Return": - self._latScaleValue = self.parseLatitude() - self.activatePie() - self.makePolarEditArea() - self._latScale.set(self._latScaleValue) - return - - allowedChars = "0123456789-.NSns" - entryStr = self._latEntry.get() + event.char - filterStr = self.filterTextEntry(entryStr, allowedChars) - - # replace the string if not valid - if filterStr != entryStr: - self._latVar.set(str(filterStr)) - # Insert a backspace in the queue to remove the offending char - self._latEntry.event_generate("", keysym="BackSpace", - when="tail") - return - - def lonKeyPress(self, event): - # Ignore certain special characters - ignore = ["BackSpace"] - if event.keysym in ignore: - return - - if event.keysym == "Return": - self._lonScaleValue = self.parseLongitude() - self.activatePie() - self.makePolarEditArea() - self._lonScale.set(self._lonScaleValue) - return - - allowedChars = "0123456789-.EWew" - entryStr = self._lonEntry.get() + event.char - filterStr = self.filterTextEntry(entryStr, allowedChars) - - # replace the string if not valid - if filterStr != entryStr: - self._lonVar.set(str(filterStr)) - # Insert a backspace in the queue to remove the offending char - self._lonEntry.event_generate("", keysym="BackSpace", - when="tail") - return - - def radKeyPress(self, event): - # Ignore certain special characters - ignore = ["BackSpace"] - if event.keysym in ignore: - return - - allowedChars = "0123456789." - entryStr = self._radEntry.get() + event.char - filterStr = self.filterTextEntry(entryStr, allowedChars) - - try: - self._radiusScaleValue = float(filterStr) - except: - # replace the string if not valid - self._radVar.set(str(filterStr)) - # Insert a backspace in the queue to remove the offending char - self._radEntry.event_generate("", keysym="BackSpace", when="tail") - return - - if event.keysym == "Return": - self._radiusScale.set(self._radiusScaleValue) - self.makePolarEditArea() - - return - - def makePolarWidgets(self): - - # make the pie slices - x1, y1, x2, y2 = self.getCircleCoords() - canvasWidth = x2 + self._circleOffset - canvasHeight = y2 + self._circleOffset - self._canvas = Tkinter.Canvas(self._polarFrame, width=canvasWidth, - height=canvasHeight) - self._canvas.grid(row=0, column=1, columnspan=2) - - for i in range(0, 8): - self._canvas.create_arc(x1, y1, x2, y2, style=Tkinter.PIESLICE, - start=i*45, extent=45, fill=self._pieDeselectColor) - - self._canvas.bind("", self.pieClick) - self._canvas.bind("", self.pieClick) - self._canvas.bind("", self.pieClick) - - # change from/to to minmax lat/lon values - - self._latScale = Tkinter.Scale(self._polarFrame, from_=self._maxLat, to=self._minLat, - orient=Tkinter.VERTICAL, showvalue=0, - resolution=0.5, length=160, - command=self.latScaleMotion) - self._latScale.grid(row=0, column=0, sticky=Tkinter.N) - self._latScale.set(self._latScaleValue) - - self._latVar = Tkinter.StringVar() - self._latVar.set(str(self._latScaleValue)) - - self._latEntry = Tkinter.Entry(self._polarFrame, width=6, textvariable=self._latVar) - - self._latEntry.grid(row=1, column=0, padx=10, sticky=Tkinter.N) - - self._latEntry.bind("", self.latKeyPress) - - latLabel = Tkinter.Label(self._polarFrame, text="Latitude") - latLabel.grid(row=2, column=0, sticky=Tkinter.N) - - self._lonScale = Tkinter.Scale(self._polarFrame, from_=self._minLon, to=self._maxLon, - orient=Tkinter.HORIZONTAL, showvalue=0, - resolution=0.5, length=160, command=self.lonScaleMotion) - self._lonScale.grid(row=1, column=2, sticky=Tkinter.N) - self._lonScale.set(self._lonScaleValue) - - self._lonVar = Tkinter.StringVar() - self._lonEntry = Tkinter.Entry(self._polarFrame, width=6, textvariable=self._lonVar) - self._lonEntry.grid(row=1, column=1, sticky=Tkinter.N) - self._lonEntry.bind("", self.lonKeyPress) - - lonLabel = Tkinter.Label(self._polarFrame, text="Longitude") - lonLabel.grid(row=2, column=1, sticky=Tkinter.NW) - - self._radiusScale = Tkinter.Scale(self._polarFrame, from_=30, to=900, - orient=Tkinter.HORIZONTAL, showvalue=0, - resolution=10, length=160, command=self.radiusScaleMotion) - self._radiusScale.grid(row=3, column=2, pady=10, sticky=Tkinter.W) - self._radiusScale.set(self._defaultRadius) - - self._radVar = Tkinter.StringVar() - self._radEntry = Tkinter.Entry(self._polarFrame, width=6, textvariable=self._radVar) - self._radEntry.grid(row=3, column=0, pady=10, columnspan=2) - self._radEntry.bind("", self.radKeyPress) - - - label = Tkinter.Label(self._polarFrame, text="Radius (nm)") - label.grid(row=4, column=0, columnspan=2, sticky=Tkinter.N) - - - return - - # Create the Execute and Cancel buttons - def makeBottomButtons(self): - - button = Tkinter.Button(self._bottomFrame, text="Cancel", - command=self.cancelCommand) - button.grid(row=0, column=0, padx=30, pady=10, sticky=Tkinter.E) - - button = Tkinter.Button(self._bottomFrame, text="Save", - command=self.saveOnlyCommand) - button.grid(row=0, column=1, padx=30, pady=10, sticky=Tkinter.E) - - button = Tkinter.Button(self._bottomFrame, text="Save and Exit", - command=self.saveExitCommand) - button.grid(row=0, column=2, padx=30, pady=10, sticky=Tkinter.E) - - self._bottomFrame.grid() - - return - - def saveOnlyCommand(self): - self.saveHSDescToFile() - - return - - def saveExitCommand(self): - self.saveHSDescToFile() - print "saving desc to file" - self.cancelCommand() - - return - - # Cancels the GUI and the tool. - def cancelCommand(self): - - msg = "Your edits have not been saved. Cancel Anyway?" - if not self._editsMade: - self._master.destroy() - self._tkmaster.destroy() - return - - if tkMessageBox.askokcancel("High Sea - Cancel", msg, parent=self._bottomFrame): - self._master.destroy() - self._tkmaster.destroy() - - return - - def getPolarInfo(self, editArea): - lat = None - lon = None - radius = None - pieState = None - - try: - eaMask = self.encodeEditArea(editArea) - except: - eaMask = editArea - - eaMask = eaMask & self._clipMask - polarMask = self.encodeEditArea(self._polarEditArea) & self._clipMask - - if sum(sum(eaMask)) == sum(sum(polarMask)): # they are the same editArea - lat = self._latScaleValue - lon = self._lonScaleValue - radius = self._radiusScaleValue - pieState = self._pieState - return lat, lon, radius, pieState - - - # Return the max width and height of the edit area in lat/lon coordinates - def editAreaExtent(self, editArea): - polygons = editArea.getPolygons(ReferenceData.CoordinateType.LATLON) - - coords = polygons.getCoordinates() - - minX = 10000.0 - minY = 10000.0 - maxX = -10000.0 - maxY = -10000.0 - for c in coords: - minX = min(c.x, minX) - minY = min(c.y, minY) - maxX = max(c.x, maxX) - maxY = max(c.y, maxY) - - diffX = maxX - minX - diffY = maxY - minY - - return diffX, diffY - - # Called when the save button is clicked. - def saveSelCommand(self): - editArea = self.getActiveEditArea() - - # Check for null area - mask = self.encodeEditArea(editArea) & self._clipMask - if sum(sum(mask)) == 0: - self.statusBarMsg("Please select an edit area before saving.", "S") - return - - # Check to ensure the edit area is large enough - diffLon, diffLat = self.editAreaExtent(editArea) - - if diffLon < self._minEASize or diffLat < self._minEASize: - self.statusBarMsg("Please select a larger edit area before saving", "S") - return - - editArea = self.decodeEditArea(mask) - self.setActiveEditArea(editArea) - - # get the selected button - curSel = self._listbox.curselection() - if len(curSel) == 0: - return - - listIndex = self._listbox.curselection()[0] - buttonStr = self._listbox.get(listIndex) - - featureName = self.extractFeature(buttonStr) - timePeriod = self._currentTimePeriod - editAreaName = self.getNewAreaName(featureName, timePeriod) - - # get the polar parameters - polarInfo = self.getPolarInfo(editArea) - - if polarInfo is not None: - lat, lon, radius, pieState = polarInfo - else: - lat = None - lon = None - radius = None - pieState = None - - editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) - # save the area - - activeArea = self.getActiveEditArea() - - activeMask = self.encodeEditArea(activeArea) - self._eaUtils.saveEditArea(editAreaName, activeMask) - - self.displayAreaDesc(editAreaDesc) - self.addNewArea(featureName, timePeriod, editAreaName, editAreaDesc, - lat, lon, radius, pieState) - - - self.redrawListBox() - self.selectListBoxItem(editAreaName) - - self.setSelectButtonState(Tkinter.NORMAL) - - self._editsMade = True - - def saveNewCommand(self): - - editArea = self.getActiveEditArea() - - # Check for null area - mask = self.encodeEditArea(editArea) & self._clipMask # clip to domain mask - - if sum(sum(mask)) == 0: - self.statusBarMsg("Please select an edit area before saving.", "S") - return - - # Check to ensure the edit area is large enough - diffLon, diffLat = self.editAreaExtent(editArea) - - if diffLon < self._minEASize or diffLat < self._minEASize: - self.statusBarMsg("Please select a larger edit area before saving", "S") - return - - editArea = self.decodeEditArea(mask) - self.setActiveEditArea(editArea) - - timePeriod = self._currentTimePeriod - featureName = self.getNewFeatureName() - editAreaName = self.getNewAreaName(featureName, timePeriod) - - # get the polar parameters - polarInfo = self.getPolarInfo(editArea) - - if polarInfo is not None: - lat, lon, radius, pieState = polarInfo - else: - lat = None - lon = None - radius = None - pieState = None - - editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) - # save the area - activeArea = self.getActiveEditArea() - - activeMask = self.encodeEditArea(activeArea) - self._eaUtils.saveEditArea(editAreaName, activeMask) - - self.addNewFeature(featureName, timePeriod, editAreaName, editAreaDesc, - lat, lon, radius, pieState) - - - self.redrawListBox() - - self.selectListBoxItem(editAreaName) - - self.setSelectButtonState(Tkinter.NORMAL) - - self.displayAreaDesc(editAreaDesc) - - self._editsMade = True - - return - - def replaceSelCommand(self): - - listIndex = self._listbox.curselection()[0] - editAreaName = self._listbox.get(listIndex) - - editArea = self.getActiveEditArea() - - # Check to ensure the edit area is large enough - diffLon, diffLat = self.editAreaExtent(editArea) - - if diffLon < self._minEASize or diffLat < self._minEASize: - self.statusBarMsg("Please select a larger edit area before saving", "S") - return - - mask = self.encodeEditArea(editArea) & self._clipMask # clip to domain mask - - editArea = self.decodeEditArea(mask) - self.setActiveEditArea(editArea) - - self._eaUtils.saveEditArea(editAreaName, mask) - - # get the polar parameters - lat, lon, radius, pieState = self.getPolarInfo(editArea) - - editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) - - self.replaceEditAreaDesc(editAreaName, editAreaDesc) - - self.displayAreaDesc(editAreaDesc) - self._editsMade = True - - return - - def clearSelCommand(self): - self.displayAreaDesc("") - - while True: - - if len(self._listbox.curselection()) == 0: - break - - index = self._listbox.curselection()[0] - - editAreaName = self._listbox.get(index) - - self.removeEditArea(editAreaName) - - self._listbox.delete(index) - - self.setSelectButtonState(Tkinter.DISABLED) - - self._editsMade = True - - return - - def clearAllCommand(self): - - self.displayAreaDesc("") - - listSize = self._listbox.size() - for i in range(listSize): - editAreaName = self._listbox.get(i) - self.removeEditArea(editAreaName) - - self._listbox.delete(0, listSize) - - self.setSelectButtonState(Tkinter.DISABLED) - - self._editsMade = True - - return - - # Called when the Reasons window is closed. Just destroy the window. - def closeReasonWindow(self): - - self._killWindow.destroy() - - self.addKilledArea() - - return - - # Called when any reason button is clicked. Updates the reason to the GUI - def reasonButtonClick(self, event): - - self._killDesc = self._killVar.get() - - # Update the display on the GUI - self.displayAreaDesc(self._killDesc) - - return - - def displayKillReasonsDialog(self, killReasons): - - self._killWindow = Tkinter.Toplevel(self._master) - self._killWindow.title("Reason") - self._killFrame = Tkinter.Frame(self._killWindow, width=500, height=500) # can't get this to resize - self._killFrame.pack() - self._killVar = Tkinter.StringVar() - self._killVar.set(self._killDesc) - for i in range(len(killReasons)): - b = Tkinter.Radiobutton(self._killFrame, text=killReasons[i], - variable=self._killVar, value=killReasons[i]) - if b == self._defaultKillReason: - b.select() - - b.grid(row=i, column=0, sticky=Tkinter.W, pady=2) - - b.bind("", self.reasonButtonClick) - - self._doneButton = Tkinter.Button(self._killFrame, text= "DONE", command=self.closeReasonWindow) - self._doneButton.grid(row=len(killReasons), sticky=Tkinter.E+Tkinter.W) - - # Update the display on the GUI - self.displayAreaDesc(self._killDesc) - - self._killFrame.grid() - - # make this dialog modal so no other buttons can be clicked. - self._killWindow.transient(self._master) - self._killWindow.grab_set() - - return - - def killSelCommand(self): - # make sure something is selected (should never happen) - if len(self._listbox.curselection()) == 0: - return - - self.displayKillReasonsDialog(self._killReasons) - - return - - def addKilledArea(self): - - # Get the selected button names - buttonStr = self._currentTimePeriod - listIndex = self._listbox.curselection()[0] - - editAreaName = self._listbox.get(listIndex) - # Create the editArea name from the button name - pos = editAreaName.find("_") - editAreaName = editAreaName[0:pos+1] + buttonStr + editAreaName[pos+4:] - - activeArea = self.getActiveEditArea() - - activeMask = self.encodeEditArea(activeArea) - self._eaUtils.saveEditArea(editAreaName, activeMask) - - featureName = self.extractFeature(editAreaName) - timePeriod = self.extractTimePeriod(editAreaName) - - # get the polar parameters - polarInfo = self.getPolarInfo(activeArea) - - if polarInfo is not None: - lat, lon, radius, pieState = polarInfo - else: - lat = None - lon = None - radius = None - pieState = None - - editAreaDesc = self._killDesc - - self.addNewFeature(featureName, timePeriod, editAreaName, editAreaDesc, - lat, lon, radius, pieState) - - for hs in self._hsDesc: - if hs["featureName"] == featureName and hs["timePeriod"] == timePeriod: - for area in hs["areaList"]: - if area["areaName"] == editAreaName: - area["areaDesc"] = editAreaDesc - - self.setSelectButtonState(Tkinter.NORMAL) - - self._editsMade = True - - self.redrawListBox() - return - - def makePolarEditArea(self): - lat1 = self._latScaleValue * self._degToRad - lon1 = self._lonScaleValue * self._degToRad - radius = self._radiusScaleValue - - earthRadius = 6371.0 / 1.852 # Nautical miles - - distanceGrid = np.arccos(np.sin(lat1) * np.sin(self._latGrid) + \ - np.cos(lat1) * np.cos(self._latGrid) * \ - np.cos(self._lonGrid - lon1)) * earthRadius - - distanceMask = np.less(distanceGrid, radius) - - xDist = self._latGrid - lat1 - yDist = self._lonGrid - lon1 - - tanGrid = np.arctan2(xDist, yDist) / self._degToRad - tanGrid = np.where(np.less(tanGrid, 0.0), tanGrid + 360.0, tanGrid) - tanGrid = tanGrid / 45.0 # convert to 8 "quardants - - # mask off all but the selected quadrants. - circleMask = np.zeros(tanGrid.shape, dtype=np.int8) - for i in range(len(self._pieState)): - if self._pieState[i]: - tanMask = np.logical_and(np.greater_equal(tanGrid, i), np.less(tanGrid, i+1)) - circleMask |= tanMask - - mask = np.logical_and(distanceMask, circleMask) - editArea = self.decodeEditArea(mask) - - self.setActiveEditArea(editArea) - - self._polarEditArea = editArea - - return - - def dirSort(self, dirA, dirB): - - aDir, aCount = dirA - bDir, bCount = dirB - - if aCount > bCount: - return 1 - elif aCount == bCount: - return 0 - else: - return -1 - - # Returns a string that describes a circular edit area - def formatCircleEditArea(self, lat, lon, radius, pieState): - - dirList = ["E", "NE", "N", "NW", "W", "SW", "S", "SE"] - - # Check to see if they're all selected (the whole pie) - if sum(pieState) == len(pieState): - latStr = self.latLonStr(lat, "lat") - lonStr = self.latLonStr(lon, "lon") - - return "WITHIN " + str(int(radius)) + " NM OF CENTER AT " + \ - latStr + lonStr - - quad = [0] * len(pieState) - semi = [0] * len(pieState) - - # Find out if we have six consecutive slices selected - # This is because the rules are different if 3/4 of the - # pie is selected. - sixConsec = False - if sum(pieState) >= 6: - for i in range(-2, len(pieState)): - if not pieState[i] and not pieState[i + 1]: - sixConsec = True - break - - for i in range(len(pieState)): - if pieState[i] and pieState[i - 1]: - quad[i] = 1 - - # Begin with the first selected pieSlice - start = 0 - while quad[start]: - start = start - 1 - - for i in range(start, start + len(quad)): - last = i - 1 - this = i - next = i + 1 - afterNext = next + 1 - if next >= len(quad): - next = next - len(quad) - if afterNext >= len(quad): - afterNext = afterNext - len(quad) - - if quad[last] and quad[this] and quad[next]: - semi[this] = 1 - quad[last] = 0 - quad[this] = 0 - quad[next] = 0 - # if 3/4 of the pie is selected then turn off on extra quad - if sixConsec: - quad[afterNext] = 0 - - # Format the slices into words - finalDir = "" - - # Direction order must be clockwise starting from N - # This list orders directions properly. - pieOrderList = [2, 1, 0, 7, 6, 5, 4, 3] - -## for i in range(len(semi)): - for i in pieOrderList: - if semi[i]: - finalDir = dirList[i] + " SEMICIRCLE" - - if sum(semi) > 0 and sum(quad) > 0: - finalDir = finalDir + " AND " - -## for i in range(len(quad)): - for i in pieOrderList: - if quad[i]: - finalDir = finalDir + dirList[i] + " AND " - - if sum(quad) > 0: - finalDir = finalDir[0:-5] # remove the last AND - - if sum(quad) > 0: - finalDir = finalDir + " QUADRANT" - - if sum(quad) > 1: - finalDir = finalDir + "S" - - latStr = self.latLonStr(lat, "lat") - lonStr = self.latLonStr(lon, "lon") - - desc = "WITHIN " + str(int(radius)) + " NM " + finalDir + \ - " OF CENTER AT " + latStr + lonStr - - return desc - - # Formats a lat lon string with appropriate direction labels - def latLonStr(self, coord, latOrLon, precision=1.0): - - if latOrLon == "lat": - if coord >= 0: - dirStr = "N" - else: - dirStr = "S" - - # Exception for southern boundary of EP3 area to force 18.5S output - # This code is needed only for NH1 domain - if coord < -18.5: - precision = 0.5 - - # Exception for northern boundary of EP3 area to force 3.4S output - # This code is needed only for NH1 domain - if coord < -3.36 and coord > -3.38: - coord = -3.4 - coordStr = "0" + "%.1f" % abs(coord) + dirStr - return coordStr - - elif latOrLon == "lon": - if coord >= 0: - dirStr = "E" - else: - dirStr = "W" - - coordStr = "%.0f" % abs(round(coord)) + dirStr - # if the precision is 0.5, round to the nearest 0.5 degrees - if precision == 0.5: - coord = int((abs(coord) + 0.25) * 2.0) / 2.0 - # Keep the 0.5 if we have it after rounding - if coord != int(coord): - if abs(coord) < 10: - coordStr = "0" + "%.1f" % abs(coord) + dirStr - else: - coordStr = "%.1f" % abs(coord) + dirStr - - return coordStr - - # Determines the precision based on the max extent in both lat and lon - # Uses self._marginalEASize to define the threshold for the precision. - def determinePrecision(self, points): - - latMin = 10000.0 - latMax = -10000.0 - lonMin = 10000.0 - lonMax = -10000.0 - - # Calculate the min/max extent of these points. - for lat, lon in points: - latMin = min(lat, latMin) - latMax = max(lat, latMax) - lonMin = min(lon, lonMin) - lonMax = max(lon, lonMax) - - # Calculate the difference between the maxes and mins - latDiff = latMax - latMin - lonDiff = lonMax - lonMin - - # If we have a marginally small area, set the precision to 0.5 otherwise 1.0 degrees. - precision= 1.0 - if latDiff < self._marginalEASize or lonDiff < self._marginalEASize: - precision = 0.5 - - return precision - - # Returns a string that describes a polygon edit area - # modified to add leading "0" when lat < 10 - def formatPolygonEditArea(self, editArea): - - - mask = self.encodeEditArea(editArea) - points = self.simplifyEditArea(editArea) - - precision = self.determinePrecision(points) - - coordList = [] - - fullStr = "" - for y, x in points: - latStr = self.latLonStr(y, "lat", precision) - lonStr = self.latLonStr(x, "lon", precision) - - coordList.append((latStr, lonStr)) - - # close the polygon - coordList.append(coordList[0]) - - lastLat = "" - lastLon = "" - fullStr = "WITHIN AREA BOUNDED BY " - for latStr, lonStr in coordList: - if latStr == lastLat and lonStr == lastLon: - continue - if len(latStr) < 3: - latStr = latStr.zfill(3) - fullStr = fullStr + latStr + lonStr + " TO " - lastLat = latStr - lastLon = lonStr - - # remove the last " TO " - fullStr = fullStr[0:-4] - - return fullStr - - def makeNamedAreaDesc(self, editArea): - - descList = [] - - editAreaMask = self.encodeEditArea(editArea) - for eaName in self._namedAreaMasks.keys(): - overlap = self._namedAreaMasks[eaName] & editAreaMask - - if overlap.any(): - descList.append(self._namedAreaDescip[eaName]) -# Commented out below as was requiring that ALL namedAreas be overlapped in order to return (JL/TL 07/21/2016) -# else: -# return "" - - if len(descList) == 0: - return "" - - finalStr = "...INCLUDING " - - for desc in descList: - finalStr = finalStr + desc - if desc != descList[-1]: # add an AND if we're not at the last one - finalStr = finalStr + " AND " - else: - finalStr = finalStr + "..." - - return finalStr - - def makeEditAreaDesc(self, editArea, editAreaName, lat=None, lon=None, radius=None, pieState=None): - - mask = self.encodeEditArea(editArea) - - namedAreaDesc = self.makeNamedAreaDesc(editArea) - - if lat is not None: - desc = self.formatCircleEditArea(lat, lon, radius, pieState) - else: - desc = self.formatPolygonEditArea(editArea) - - # Add delimiters around the lat/lon descriptor so we can replace it when needed - return self._descDelimStart + desc + namedAreaDesc + self._descDelimEnd - - def displayAreaDesc(self, editAreaDesc): - - self._textBox.configure(state=Tkinter.NORMAL) - self._textBox.delete("1.0", Tkinter.END) - self._textBox.insert("1.0", editAreaDesc) - self._textBox.configure(state=Tkinter.DISABLED) - - return - - def textBoxClick(self, event=None): - - self._textBox.configure(font=self._normalFont) - self._textBox.configure(state=Tkinter.NORMAL) - self._textBoxCurSelection = self._listbox.curselection()[0] - - return - - def textBoxReturnPress(self, event=None): - - #listIndex = int(self._listbox.curselection()[0]) - listIndex = self._textBoxCurSelection - editAreaName = self._listbox.get(listIndex, None) - timePeriod = self._currentTimePeriod - editAreaDesc = self._textBox.get("1.0", Tkinter.END) - - editAreaDesc = editAreaDesc.replace(chr(10), "") - self.displayAreaDesc(editAreaDesc) - - self.replaceEditAreaDesc(editAreaName, editAreaDesc) - self._textBox.configure(font=self._boldFont) - self._textBox.configure(state=Tkinter.DISABLED) - self.updateTextBoxSaveState("Saved") - self._textBoxCurSelection = None - - def textBoxKeyPress(self, event): - - if event.char in string.printable: - self.updateTextBoxSaveState("NOT Saved") - - return - - def updateTextBoxSaveState(self, state): - msg = "Edit Area Description..." + state - - if state == "NOT Saved": - color = "red" - else: - color = "black" - self._textLabel.configure(text=msg, foreground=color) - return - - ########################################################################## - - #### Data structure code - - ########################################################################## - - def getBasinName(self, editAreaName): - # Figure out which basin this area is in - basinName = "" - basinList = [] - for basin in self._basinNames: - if basin in self._allEditAreaNames: - - basinMask = self._eaUtils.fetchEditArea(basin) - else: - continue - - areaMask = self._eaUtils.fetchEditArea(editAreaName) - if areaMask is None: - self.statusBarMsg("Edit area: " + " not found in repository.", "S") - continue - - if sum(sum(basinMask & areaMask)) > 0: - basinList.append(basin) - - basinList.sort() - for b in basinList: - if basinName == "": - basinName = b - else: - basinName = basinName + " AND " + b - - if basinName == "": - self.statusBarMsg("No Basin identified for last edit area.", "S") - - return basinName - - - def addNewFeature(self, featureName, timePeriod, editAreaName, editAreaDesc, - lat=None, lon=None, radius=None, pieState=None): - - if self._siteID in self._sitesWithBasins: - basinName = self.getBasinName(editAreaName) - else: - basinName = None - if basinName is not None: - if basinName.find("_") > -1: - basinName = basinName.replace("_", " ") - - # make the low level data structure - areaDict = {"areaName" : editAreaName, - "areaDesc" : editAreaDesc, - "basin" : basinName, - "lat" : lat, - "lon" : lon, - "radius" : radius, - "pieState" : pieState, - } - - featureDict = { - "featureName" : featureName, - "timePeriod" : timePeriod, - "areaList" : [areaDict], # list of areaDict - "basin" : basinName, - } - - self._hsDesc.append(featureDict) - - return featureName - - def addNewArea(self, featureName, timePeriod, editAreaName, editAreaDesc, - lat=None, lon=None, radius=None, pieState=None): - - if self._siteID in self._sitesWithBasins: - basinName = self.getBasinName(editAreaName) - else: - basinName = None - - if basinName is not None: - if basinName.find("_") > -1: - basinName = basinName.replace("_", " ") - - - areaDict = {"areaName" : editAreaName, - "areaDesc" : editAreaDesc, - "basin" : basinName, - "lat" : lat, - "lon" : lon, - "radius" : radius, - "pieState" : pieState, - } - - # Find any feature/time - for fDict in self._hsDesc: - if fDict["featureName"] == featureName: - if fDict["timePeriod"] == timePeriod: - # add it to the existing list - fDict["areaList"].append(areaDict) - - newAreaList = [] - for d in fDict["areaList"]: - newAreaList.append(d['areaName']) - newAreaList.sort() - return - - # featureName and time not found so make a new one - self.addNewFeature(featureName, timePeriod, editAreaName, - editAreaDesc, lat, lon, radius, pieState) - - return - - def removeEditArea(self, editAreaName): - # Find the feature - for fDict in self._hsDesc: - # look for the areaName - for areaDict in fDict["areaList"]: - if areaDict["areaName"] == editAreaName: - fDict["areaList"].remove(areaDict) - - if len(fDict["areaList"]) == 0: - self._hsDesc.remove(fDict) - - return - - return - - def replaceEditAreaDesc(self, editAreaName, editAreaDesc): - # Find the feature - - if self._siteID in self._sitesWithBasins: - basinName = self.getBasinName(editAreaName) - else: - basinName = None - - if basinName is not None: - if basinName.find("_") > -1: - basinName = basinName.replace("_", " ") - - for fDict in self._hsDesc: - # look for the areaName - for areaDict in fDict["areaList"]: - if areaDict["areaName"] == editAreaName: - areaDict["areaDesc"] = editAreaDesc - areaDict["basin"] = basinName - return - - # Didn't find areaName - - return - - def getEditAreaDesc(self, editAreaName): - for fDict in self._hsDesc: - # look for the areaName - for areaDict in fDict["areaList"]: - if areaDict["areaName"] == editAreaName: - return areaDict["areaDesc"] - - return - - def getAllEditAreaNames(self): - areaNameList = [] - for fDict in self._hsDesc: - for areaDict in fDict["areaList"]: - areaNameList.append(areaDict["areaName"]) - - return areaNameList - - def extractFeature(self, name): - - parts = name.split("_") - - return parts[0] - - def extractTimePeriod(self, name): - - parts = name.split("_") - - return parts[1] - - # Extracts the directory and removes the fileName - def extractDirName(self, path): - parts = path.split("/") - dirName = "" - for i in range(len(parts) - 1): - dirName = dirName + parts[i] + "/" - - return dirName - - # Creates the directory tree with appropriate permissions - def makeFullDirPath(self, dirPath, permissions): - - if not os.path.exists(dirPath): - os.makedirs(dirPath, permissions) - return - - def getNonLatLonText(self, areaDesc): - - areaDescOld = copy.copy(areaDesc) - openBrace = areaDescOld.find(self._descDelimStart) - closeBrace = areaDescOld.find(self._descDelimEnd) - - preText = areaDescOld[0:openBrace] - postText = areaDescOld[closeBrace+1:] - - return preText, postText - - def clipDescAreas(self, hsDesc, clipAreaName): - # make a copy so we don't corrupt the original version of the descriptor - descList = [] - - if clipAreaName in self._allEditAreaNames: - clipMask = self._eaUtils.fetchEditArea(clipAreaName) - else: - self.statusBarMsg("Edit area: " + clipAreaName + " not found in repository.", "S") - return descList - - for desc in hsDesc: - # Process the editAreas first - areaList = desc["areaList"] - newAreaList = [] - - for area in areaList: - # Get the original edit area and clip to the clipArea - oldAreaMask = self._eaUtils.fetchEditArea(area["areaName"]) - if oldAreaMask is None: - self.statusBarMsg("Edit area: " + clipAreaName + " not found in repository.", "S") - continue - - clippedMask = oldAreaMask & clipMask - - if sum(sum(clippedMask)) == 0: # Don't process this area - continue - - newEditArea = self.decodeEditArea(clippedMask) - # save this newly clipped area with a different name - newAreaName = area["areaName"] + clipAreaName - self._eaUtils.saveEditArea(newAreaName, clippedMask) - - lat = None - lon = None - radius = None - pieState = None - - if area["lat"] is not None: - lat = area["lat"] - lon = area["lon"] - radius = area["radius"] - pieState = area["pieState"] - - if len(area["areaDesc"]) < 100: - freshAreaDesc = area["areaDesc"] - else: - # Extract the positions of the text before and after the lat/lon text - oldAreaDesc = copy.copy(area["areaDesc"]) - newAreaDesc = self.makeEditAreaDesc(newEditArea, newAreaName, lat, lon, radius, pieState) - - if area["areaDesc"].find(self._descDelimStart) > -1: - # Extract the positions of the text before and after the lat/lon text - preText, postText = self.getNonLatLonText(oldAreaDesc) - # Restore the pre and post text - freshAreaDesc = preText + newAreaDesc + postText - - newAreaDict = { - "areaName" : newAreaName, - "areaDesc" : freshAreaDesc, - "basin" : area["basin"], - "lat" : None, - "lon" : None, - "radius" : None, - "pieState" : None, - } - newAreaList.append(newAreaDict) - - if len(newAreaList) > 0: - descDict = {} - descDict["timePeriod"] = desc["timePeriod"] - descDict["featureName"] = desc["featureName"] - descDict["basin"] = desc["basin"] - descDict["areaList"] = newAreaList - - descList.append(descDict) - - return descList - - def removeTextBoxDelims(self, highSeasDescriptor): - - for descDict in highSeasDescriptor: - for areaDict in descDict["areaList"]: - areaDict['areaDesc'] = areaDict['areaDesc'].replace("{", "") - areaDict['areaDesc'] = areaDict['areaDesc'].replace("}", "") - - return - - # This method does the work of writing the descriptor information to a - # file. This may be called several times one for the tool info and again for - # the formatter info. In addition some sites may create two descriptor files - # one north of the equator and one south. - def writeHSDescriptorToFile(self, highSeasDescriptor, highSeasFileName): - - if not os.path.exists(highSeasFileName): - # See if the directory exists and if not make it - dirOnly = self.extractDirName(highSeasFileName) - if not os.path.exists(dirOnly): - # make the fill directory path - self.makeFullDirPath(dirOnly) - - # open the file for write - setPermissions = False - if not os.path.exists(highSeasFileName): # it's a new file set permissions - setPermissions = True - # open the file for write - - try: - f = open(highSeasFileName, "w") - cPickle.dump(highSeasDescriptor, f) - if setPermissions: - os.chmod(highSeasFileName, self._permissions) - except: - msg = "Error opening " + highSeasFileName + " for write." - self.statusBarMsg(msg, "U") - return - - # Contains the logic for saving the descriptor info to files. - def saveHSDescToFile(self): - - print "in saveHSDescToFile................................................" - # First save the info for the tool - highSeasFileName = self.descriptorFileName("Tool") - self.writeHSDescriptorToFile(self._hsDesc, highSeasFileName) - - # Now save the info for the formatter. - # Find out what we need to save - saveAreas = [] - if self._multiProductSites.has_key(self._siteID): - saveAreas = self._multiProductSites[self._siteID] - - # Otherwise we're making two files, but we need to make two new - # descriptors with the areas clipped to the savedAreas - - for area in saveAreas: - newHSDesc = self.clipDescAreas(self._hsDesc, area) - - # remove the delimiters from the descriptors for the formatter version - self.removeTextBoxDelims(newHSDesc) - highSeasFileName = self.descriptorFileName("Formatter", area) - self.writeHSDescriptorToFile(newHSDesc, highSeasFileName) - - # Now sample the weather grids for interesting weather, save that in - # a different list of dictionaries and save that to a file using the - # same structure. - print "interesting weather............................................." - self.getInterestingWeather() - - # reset the editsMade flag - self._editsMade = False - - return - - def fetchWxGrids(self): - startTime = int(time.time() / (6 * 3600)) * (6 * 3600) # truncated to last 6 hour period - endTime = startTime + (48 * 3600) + 1 # 60 hours later - - trList = self.getWEInventory("Fcst", "Wx") - - wxGridList = [] - for t in range(startTime, endTime, 24*3600): - # Find the grid that overlaps the first hour of the period in which we're interested - tr = TimeRange.TimeRange(AbsTime.AbsTime(t), AbsTime.AbsTime(t+ 3600)) - for invTR in trList: - if tr.overlaps(invTR): - wxGrid = self.getGrids("Fcst", "Wx", "SFC", tr) - timePeriod = (t - startTime) / 3600 # hours from start - wxGridList.append((wxGrid, timePeriod)) - - return wxGridList - - def getToolWxTypeIntens(self, wxType): - - intens = wxType[-1] - if intens == "+" or intens == "-": - typeOnly = wxType[:-1] - else: - intens = "m" - typeOnly = wxType - -#JL Testing "T"; currently reporting based on Intensity and not Coverage for "T" 05/30/2016 - if typeOnly in ["T", "K", "VA", "F"] and intens == "m": - intens = "" - - return typeOnly, intens - - # Returns a mask where all points with the specified wxType are set to 1 - def getMaskForWxType(self, wxGrid, wxType): - byteGrid, wxKeys = wxGrid - wxMask = np.zeros(byteGrid.shape, np.bool) - for wxKey in wxKeys: - # get subKeys - subKeys = wxKey.split("^") - for subKey in subKeys: - wxParts = subKey.split(":") - wxStr = wxParts[1] + wxParts[2] - if wxStr == wxType: - wxIndex = self.getIndex(wxKey, wxKeys) - mask = np.equal(byteGrid, wxIndex) - wxMask = mask | wxMask - - return wxMask - - - # Discovers the individual sub edit areas of the specified editArea and - # saves them as individual edit areas. - def getContiguousEditAreas(self, editArea): - - - # Make a dummy empty editArea that we will reset - emptyEditAreaName = "EmptyEditArea" - bitGrid = editArea.getGrid() - locs = bitGrid.getContiguousBitArrayLocations() - editAreaList = [] - for i in range(len(locs)): - newGrid = bitGrid.contiguousBitArray(locs[i]) - - # Make a dummy empty editArea that we will reset - emptyMask = np.zeros(self.getGridShape(), np.bool) - newEditArea = self.decodeEditArea(emptyMask) - self.saveEditArea(emptyEditAreaName, newEditArea) - # Now use the edit area - newEditArea = self.getEditArea(emptyEditAreaName) - newEditArea.setGrid(newGrid) -# eaMask = self.encodeEditArea(newEditArea) # Why do this? - editAreaList.append(newEditArea) - - return editAreaList - - # Determines if the specified WxKey matches the weather combinations defined in - # wxTypeList. If a match is found, return the wx coverage, type, and intensity, - # otherwise return None. - def matchingWxKey(self, wxKey, wxTypeList): - # Examine each sub key - subKeys = wxKey.split("^") - for subKey in subKeys: - wxParts = subKey.split(":") - # Extract the coverage, type and intensity - if len(wxParts) >= 4: - wxCov = wxParts[0] - wxType = wxParts[1] - wxIntens = wxParts[2] - # Check each allowed type for a match - for wxTypeIntens in wxTypeList: - allowedType, allowedIntens = self.getToolWxTypeIntens(wxTypeIntens) - if wxType == allowedType and wxIntens == allowedIntens: - # We're only interested in T if coverage matches one in self._thunderCoverages EXCEPT - # for the case Iso T m (moderate) where we will convert to Sct T- to represent - # Scattered thunderstorms. - if "T" in wxType and wxCov == "Iso": - return wxCov, allowedType, allowedIntens - - if "T" in wxType and wxCov not in self._thunderCoverages: - continue - - return wxCov, allowedType, allowedIntens - - # If we get to here, no match was found - return None - - def makePeriodList(self, wxGrid, timePeriodStr, area): - - # Dictionary used to convert GFE intensities since it's part of an edit area name - intenDict = { - "+" : "h", - "m" : "m", - "" : "n", - "-" : "l", - } - - byteGrid, wxKeys = wxGrid - - # Figure out the Wx types and intensities allowed for the HSF - # Filter out all other Wx type and intensities but these. - wxTypeList = self._allPeriodWxTypes - if timePeriodStr == "00h": - wxTypeList = wxTypeList + self._firstPeriodWxTypes - - areaList = [] # There's a chance that we won't find any wx so define this here - areaMask = self.encodeEditArea(area) - for wxKey in wxKeys: - # Returns coverage, type, and intensity of any matching sub key - covTypeIntens = self.matchingWxKey(wxKey, wxTypeList) - - if covTypeIntens is None: - continue - - wxCov, wxType, wxIntens = covTypeIntens # extract the components - - # Change IsoTm to SctT- so we can report scattered thunderstorms - if wxCov == "Iso" and wxType == "T" and wxIntens == "": - wxCov = "Sct" - wxIntens = "-" - - wxIndex = self.getIndex(wxKey, wxKeys) - mask = (byteGrid == wxIndex) - mask = mask & self._clipMask - if np.sum(mask) == 0: # no points found with this wxType - continue - - # Make an edit area from the mask - wxEditArea = self.decodeEditArea(mask) - # Get the individual contiguous areas from the edit area - wxEAList = self.getContiguousEditAreas(wxEditArea) - - for eaNum, ea in enumerate(wxEAList): - # Only include edit areas that overlap the current domain - eaMask = self.encodeEditArea(ea) - overlap = eaMask & areaMask - - if not overlap.any(): - print "No overlap for ", wxType - continue - - intenStr = intenDict[wxIntens] - eaName = "Z" + timePeriodStr + wxCov + wxType + intenStr + str(eaNum).zfill(2) - - self._eaUtils.saveEditArea(eaName, eaMask) - - eaDesc = self.makeEditAreaDesc(ea, eaName) - areaDict = {} - areaDict["timePeriod"] = timePeriodStr - areaDict["areaName"] = eaName - areaDict["areaDesc"] = eaDesc - areaDict["wxType"] = wxType - areaDict["wxCoverage"] = wxCov - areaDict["intensity"] = wxIntens - - areaList.append(areaDict) - - return areaList - - def getInterestingWeather(self): - # Fetch the Wx grids we need - wxGridList = self.fetchWxGrids() - - saveDomains = [] - if self._multiProductSites.has_key(self._siteID): - saveDomains = self._multiProductSites[self._siteID] - else: - self.statusBarMsg("Domains not defined for site:" + siteID, "S") - self.statusBarMsg("Grid based features will not be saved." "S") - return - for domain in saveDomains: - - gridBasedFeatures = [] - - for wxGrid, timePeriod in wxGridList: - timePeriodStr = str(timePeriod).zfill(2) + "h" - periodList = self.makePeriodList(wxGrid, timePeriodStr, domain) - gridBasedFeatures = gridBasedFeatures + periodList - - # Strip and leading and trailing braces - for gFeature in gridBasedFeatures: - gFeature["areaDesc"] = gFeature["areaDesc"].replace("{", "") - gFeature["areaDesc"] = gFeature["areaDesc"].replace("}", "") - - wxFeaturesFileName = self.gridBasedFileName(domain[-3:]) - - self.writeFeatureList(gridBasedFeatures, wxFeaturesFileName) - - return - - def similarArea(self, area1, area2): - - eaName1 = area1["areaName"] - eaName2 = area2["areaName"] - - mask1 = self._eaUtils.fetchEditArea(eaName1) - mask2 = self._eaUtils.fetchEditArea(eaName2) - - overlap = mask1 & mask2 - - if overlap.any(): - return True - - return False - - def findSimilarFeature(self, area): - # Extract the part from the area - wxType = area["wxType"] - wxCov = area["wxCoverage"] - wxIntens = area["intensity"] - - for group in self._sortedFeatures: - # Check for the same weather - if group[0]["wxType"] != wxType or group[0]["wxCoverage"] != wxCov or group[0]["intensity"] != wxIntens: - continue - # Check for similar area - if self.similarArea(area, group[0]): - return group - - return None - - def groupGridBasedFeatures(self, gridBasedFeatures): - # Populate the sorted features with the first area if we have any features - self._sortedFeatures = [] - if len(gridBasedFeatures) == 0: - return - - self._sortedFeatures = [[copy.copy(gridBasedFeatures[0])]] - - for i in range(1, len(gridBasedFeatures)): - simGroup = self.findSimilarFeature(gridBasedFeatures[i]) - if simGroup is not None: # make a new group - simGroup.append(copy.copy(gridBasedFeatures[i])) - continue - else: - self._sortedFeatures.append([copy.copy(gridBasedFeatures[i])]) - return - - def gridBasedFileName(self, subArea): - dirPath = "/data/local/HighSeas/Formatter/" + self._siteID + "/HSF_" + subArea + "GridBasedFeatures.pic" - - return dirPath - - # Writes the gridBased features to a file. - def writeFeatureList(self, gridBasedFeatures, gridBasedFileName): - - self._sortedFeatures = [] - self.groupGridBasedFeatures(gridBasedFeatures) - - if not os.path.exists(gridBasedFileName): - # See if the directory exists and if not make it - dirOnly = self.extractDirName(gridBasedFileName) - if not os.path.exists(dirOnly): - # make the fill directory path - self.makeFullDirPath(dirOnly) - - # open the file for write - setPermissions = False - if not os.path.exists(gridBasedFileName): # it's a new file set set permissions - setPermissions = True - try: - with open(gridBasedFileName, "w") as f: - cPickle.dump(self._sortedFeatures, f) - if setPermissions: - os.chmod(gridBasedFileName, self._permissions) - f.close() - except: - msg = "Error opening " + gridBasedFileName + " for write." - self.statusBarMsg(msg, "U") - - return - - - def dumpArea(self, areaName): - - for fDict in self._hsDesc: - for areaDict in fDict["areaList"]: - if areaDict["areaName"] == areaName: - print "-------------------------------------" - print "Feature Name:", fDict["featureName"] - print "Time Period:", fDict["timePeriod"] - print "AreaName:", areaDict["areaName"] - print "AreaDesc:", areaDict["areaDesc"] - print "Basin:", areaDict["basin"] - print "Lat:", areaDict["lat"] - print "Lon:", areaDict["lon"] - print "Radius:", areaDict["radius"] - print "PieState", areaDict["pieState"] - return - - def dumpDatabase(self): - - areaList = [] - print - print "+++++++++++++++ START Dump of database +++++++++++++++" - for fDict in self._hsDesc: - for areaDict in fDict["areaList"]: - areaList.append(areaDict["areaName"]) - areaList.sort() - for areaName in areaList: - self.dumpArea(areaName) - - print "+++++++++++++++ END Dump of database +++++++++++++++" - return - - def getNewFeatureName(self): - # make a list of possibleNames - featureNameList = [] - for fDict in self._hsDesc: - featureNameList.append(fDict["featureName"]) - - num = 1 - while True: - featureName = "Feature" + str(num) - if featureName not in featureNameList: - return featureName - - num = num + 1 - if num > 1000: # ininite loop prevention - break - - print "Error, no new feature for getNewFeatureName" - return "" - - def getNewAreaName(self, featureName, timePeriod): - count = 0 - for fDict in self._hsDesc: - if fDict["featureName"] == featureName and fDict["timePeriod"] == timePeriod: - count = count + len(fDict["areaList"]) - - name = featureName + "_" + str(timePeriod) + "_" + "EA" + str(count+1) - - return name - - # Reads the descriptor from the file. - def initializeHSDescriptor(self): - - highSeasFileName = self.descriptorFileName("Tool") - - try: - f = open(highSeasFileName, "r") - self._hsDesc = cPickle.load(f) - except: - msg = "Descriptor file not found. Starting with an empty descriptor." - self.statusBarMsg(msg, "S") - self._hsDesc = [] - - self._editsMade = False - - return - - def makeClipMask(self, clipEditAreas): - - clipMask = None - for editArea in clipEditAreas: - if editArea in self._allEditAreaNames: - mask = self._eaUtils.fetchEditArea(editArea) - if clipMask is None: - clipMask = mask - else: - clipMask = clipMask | mask - else: - self.statusBarMsg(editArea + " edit area not found for clip area", "S") - - return clipMask - - ########################################################################## - - #### END -----Data structure code - - ########################################################################## - - - # Main method that glues all of the GIU pieces together. Creates - # all the frames used by other widgets and calls other methods - # to create buttonsn listboxes, and the status window. - def setUpUI(self): - - # create the main objects - self._tkmaster = Tkinter.Tk() - - self._master = Tkinter.Toplevel(self._tkmaster) - self._tkmaster.withdraw() - - # Capture the "x" click to close the GUI - self._master.protocol('WM_DELETE_WINDOW', self.cancelCommand) - - self._master.title('High Sea Edit Areas') - - self._topFrame = Tkinter.Frame(self._master) - self._topFrame.grid() - - self._timeFrame = Tkinter.Frame(self._topFrame, bd=2, relief=Tkinter.GROOVE) - self._timeFrame.grid(row=0, column=0, sticky=Tkinter.N) - self._timeVar = Tkinter.StringVar() - self.makeTimeButtons(self._timeFrame) - - self._listFrame = Tkinter.Frame(self._topFrame) - self._listFrame.grid(row=0, column=1, padx=15) - self.makeListBox() - - self._listButtonFrame = Tkinter.Frame(self._listFrame, bd=2, relief=Tkinter.GROOVE) - self._listButtonFrame.grid(row=2, column=0, columnspan=2, pady=5) - - self._saveNewButton = Tkinter.Button(self._listButtonFrame, text="ADD New Feature", - command=self.saveNewCommand, width=25) - self._saveNewButton.grid(row=0, column=0) - - self._saveSelButton = Tkinter.Button(self._listButtonFrame, text="SAVE To Selected Feature", - command=self.saveSelCommand, width=25, - state=Tkinter.DISABLED) - self._saveSelButton.grid(row=1, column=0) - - self._replaceSelButton = Tkinter.Button(self._listButtonFrame, - text="REPLACE Selected Area", command=self.replaceSelCommand, - width=25, state=Tkinter.DISABLED) - self._replaceSelButton.grid(row=2, column=0) - - self._clearSelButton = Tkinter.Button(self._listButtonFrame, - text="REMOVE Selected Area(s)", - command=self.clearSelCommand, width=25, - state=Tkinter.DISABLED) - self._clearSelButton.grid(row=3, column=0) - - self._clearAllButton = Tkinter.Button(self._listButtonFrame, - text="REMOVE All Areas", - command=self.clearAllCommand, width=25) - - self._clearAllButton.grid(row=4, column=0) - - self._killSelButton = Tkinter.Button(self._listButtonFrame, - text="KILL Selected Area", - command=self.killSelCommand, width=25, - state=Tkinter.DISABLED) - self._killSelButton.grid(row=5, column=0) - - - self._circleRadius = 80 - self._circleOffset = 20 - self._polarFrame = Tkinter.Frame(self._topFrame, bd=2, relief=Tkinter.GROOVE) - self._polarFrame.grid(row=0, column=2, padx=20) - self._pieSelectColor = "red" - self._pieDeselectColor = "gray80" - if self._displayPolarWidgets: - self.makePolarWidgets() - - self._textFrame = Tkinter.Frame(self._topFrame) - self._textFrame.grid(row=2, column=0, columnspan=3, pady=20) - self._textBox = Tkinter.Text(self._textFrame, height=3, width=60, wrap=Tkinter.CHAR, - foreground="black", font=self._boldFont) - self._textBox.grid(row=0, column=0, padx=10) - self._textBox.bind("", self.textBoxClick) - self._textBox.bind("", self.textBoxReturnPress) - self._textBox.bind("", self.textBoxKeyPress) - self._textLabel = Tkinter.Label(self._textFrame, text="Edit Area Description") - self._textLabel.grid(row=1, column=0) - - self._bottomFrame = Tkinter.Frame(self._topFrame) - self._bottomFrame.grid(row=3, column=0, columnspan=3) - self.makeBottomButtons() - - self._master.bind("", self.enterEvent) - - return - - # Returns the name of the file used to store the edit area information. - # The appType can be "Tool" or "Formatter" only - # The subArea parameter will be used for sites creating more than one - # High Seas product - def descriptorFileName(self, appType, subArea = ""): - - dirPath = "/data/local/HighSeas/" + appType + "/" - - fileName = dirPath + self._siteID + "/" + subArea + "HighSeasDescriptors.pic" - - return fileName - - def makeNamedAreaMasks(self, namedAreaDict): - - namedAreaMasks = {} - for eaName in namedAreaDict.keys(): - eaMask = self._eaUtils.fetchEditArea(eaName) - - if eaMask is None: - self.statusBarMsg("Edit area: " + eaName + " not found in repository.", "S") - continue - - - namedAreaMasks[eaName] = eaMask - - return namedAreaMasks - - # Defines the local effect edit areas. These will be added to the lat/lon descriptors - # to better identify the area. - # These are areas for which we look for local effects. Format: Edit area name : Description - def defineNamedAreas(self): - - allAreas = { - "le_cabocorrientes" : "CABO CORRIENTES", - "CALIFORNIA" : "GULF OF CALIFORNIA", - "le_pmz011" : "SEBASTIAN VIZCAINO BAY", - "le_pmz013" : "WITHIN 60 NM OF SHORE", - "le_pmz015" : "WITHIN 60 NM OF SHORE", - "le_tehuantepec" : "THE GULF OF TEHUANTEPEC", - "le_panama" : "THE GULF OF PANAMA", - "le_pmz115" : "NEAR THE AZUERO PENINSULA", - "le_papagayo" : "THE GULF OF PAPAGAYO", - "le_pmz119" : "THE GULF OF GUAYAQUIL", - "le_pmz123" : "LEE OF GALAPAGOS ISLANDS", - "le_gmz021_straits_of_florida" : "STRAITS OF FLORIDA", - "le_gmz023_s_of_21n_w_of_95w" : "WITHIN 60 NM OF COAST OF VERACRUZ", - "le_gmz025_60nm_of_campeche" : "WITHIN 60 NM OF COAST OF CAMPECHE", - "le_amz011_yucatan_channel" : "IN YUCATAN CHANNEL", - "le_amz013_cuba_jamaica" : "BETWEEN CUBA AND JAMAICA", - "le_gulf_of_honduras" : "GULF OF HONDURAS", - "le_amz023_mona_swell" : "IN MONA PASSAGE", - "le_amz025_atlc_exposures_and_passages" : "IN ATLANTIC EXPOSURES AND PASSAGES", - "le_amz029_nicaraguan_coast" : "WITHIN 60 NM OF COAST OF NICARAGUA", - "le_amz031_colombian_coast" : "WITHIN 90 NM OF COAST OF COLOMBIA", - "le_amz033_gulf_of_venezuela" : "GULF OF VENEZUELA", - "le_amz035_atlantic" : "ATLANTIC EXPOSURES", - "le_amz117_atlc_exposures" :"ATLANTIC EXPOSURES", - "le_windward_passage" :"APPROACH TO WINDWARD PASSAGE", - } - # Make sure we hvae the edit area before including it in the returned list - allEANames = self._eaUtils.allEditAreaNames() - namedAreaDescrip = {} - for eaName in allAreas.keys(): - if eaName in allEANames: - namedAreaDescrip[eaName] = allAreas[eaName] - - return namedAreaDescrip - - def execute(self, timeRange): - - self._timeRange = timeRange - - siteID = self.getSiteID() - - #editAreasPath = "/scratch/local/HighSeas/EditAreas/" # for Boulder development - editAreasPath = "/data/local/HighSeas/" + siteID + "/EditAreas/" - - self._eaUtils = EditAreaUtilities.EditAreaUtilities(editAreasPath) - - self._gridLoc = self.getGridLoc() - - self._latGrid, self._lonGrid = self.getLatLonGrids() - self._minLat = int(min(self._latGrid.flat)) - self._minLon = int(min(self._lonGrid.flat)) - self._maxLat = int(max(self._latGrid.flat)) + 1.0 - self._maxLon = int(max(self._lonGrid.flat)) + 1.0 - self._degToRad = 2.0 * math.pi / 360.0 - self._latGrid = self._latGrid * self._degToRad - self._lonGrid = self._lonGrid * self._degToRad - - start = int(time.time()) / 3600 * 3600 - end = start + 24 * 3600 - startTime = AbsTime.AbsTime(start) - endTime = AbsTime.AbsTime(end) - - latLonTR = TimeRange.TimeRange(startTime, endTime) - - self._defaultLat = int((self._minLat + self._maxLat) / 2.0) - self._defaultLon = int((self._minLon + self._maxLon) / 2.0) - - self._defaultRadius = 200.0 - self._latScaleValue = self._defaultLat + 0.01 - self._lonScaleValue = self._defaultLon + 0.01 - self._radiusScaleValue = self._defaultRadius + 0.01 - self._pieState = [False, False, False, False, False, False, False, False] - self._defaultTime = "00h" - self._normalFont = ("helvetica", "12", "normal") - self._boldFont = ("helvetica", "12", "bold") - self._textBoxCurSelection = None - - self._allEditAreaNames = self._eaUtils.allEditAreaNames() - - - # Configurable section. Probably will get moved to separate config file. - self._siteID = self.getSiteID() - self._sitesWithBasins = ["NH2"] - self._basinNames = ["ATLC", "GULF_OF_MEXICO", "CARIBBEAN", "HSF_NP", "HSF_SP", "HSF_AT1", "HSF_EP1", "HSF_EPi"] - self._killReasons = ["INLAND.", "MOVED ... OF AREA.", "CONDITIONS MERGED.", "ABSORBED.", "CONDITIONS IMPROVE.", - "LITTLE CHANGE.", "NONE."] - self._defaultKillReason = self._killReasons[-1] - self._killDesc = self._defaultKillReason - - if self._siteID == "HPA": - self._clipEditAreas = ["HSF_NP", "HSF_SP"] - elif self._siteID == "NH1": - self._clipEditAreas = ["HSF_EP2", "HSF_EP3"] - elif self._siteID == "NH2": - self._clipEditAreas = ["ATLC", "GOM", "CARIB"] - elif self._siteID == "ONA": - self._clipEditAreas = ["HSF_AT1"] - elif self._siteID == "ONP": - self._clipEditAreas = ["HSF_EP1"] - else: - self._clipEditAreas = [] - - self._clipMask = self.makeClipMask(self._clipEditAreas) - if self._clipMask is None: - self._clipMask = np.ones(self.getGridShape(), np.bool) - - # Define data for sites that make two descriptor files, one for the NH and another for the SH - # These sites make two products from the same GFE domain. The identifiers for each define the - # edit area or mask over which the product is valid. All defined features will be clipped to - # these areas when the descriptors are generated and save to to the file. - self._multiProductSites = { - "HPA" : ["HSF_NP", "HSF_SP"], - "NH1" : ["HSF_EP2", "HSF_EP3"], - "NH2" : ["HSF_AT2"], - "ONA" : ["HSF_AT1"], - "ONP" : ["HSF_EP1"], - } - - self._thunderCoverages = ["Sct", "Num", "Wide"] - self._firstPeriodWxTypes = ["T", "T+", "VA"] - self._allPeriodWxTypes = ["F+", "K", "ZY-", "ZY", "ZY+"] - self._descDelimStart = "{" - self._descDelimEnd = "}" - - self._selectingPieSlices = False - self._deselectingPieSlices = False - - self._displayPolarWidgets = True - - self._editsMade = False - - self._selectedEditArea = "" - - self._minEASize = 0.75 - self._marginalEASize = 1.0 - - self._initialEditArea = self.getActiveEditArea() - self._enterCount = 0 - - self._polarEditArea = self._initialEditArea - - self._namedAreaDescip = self.defineNamedAreas() - self._namedAreaMasks = self.makeNamedAreaMasks(self._namedAreaDescip) - - self._permissions = stat.S_IRWXU + stat.S_IRWXG + stat.S_IROTH # 775 permissions on dirs and files - - self.initializeHSDescriptor() - self.setUpUI() - Tkinter.mainloop() - - return +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# ---------------------------------------------------------------------------- +## +# Author: lefebvre +# +# HSF +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ----------- ---------- ----------- -------------------------- +# 03/09/2015 - tlefebvre modified formatPolygonEditArea to add +# leading zero when lat < 10 +# 07/29/2016 - tlefebvre Changed edit area retrieval and storage to +# work outside CAVE so edit areas could be shared. +# 12/20/2017 DCS17686 tlefebvre Initial baseline version. +# +## +# ---------------------------------------------------------------------------- + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] + +import SmartScript +import tkinter +import tkinter.messagebox +import time +import types +import math +import MetLib +import numpy as np +import pickle, os, copy, stat +import TimeRange +import AbsTime +import string +import EditAreaUtilities + +import sys +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # The following section reduces a polygon to just the essential points + + # Calculate the angle formed by the center and y, x + def calcAngle(self, centerY, centerX, y, x): + + lat1 = centerY * self._degToRad + lon1 = centerX * self._degToRad + lat2 = y * self._degToRad + lon2 = x * self._degToRad + + yDiff = (lat2 - lat1) * self._degToRad + xDiff = (lon2 - lon1) * self._degToRad + + angle = math.atan2(yDiff, xDiff) / self._degToRad + if angle < 0: + angle = angle + 360.0 + return angle + + # calculate the great circle distance between the two specified points + # WARNING: resulting distances can be negative + def distance(self, lat1, lon1, lat2, lon2): + + lat1 = lat1 * self._degToRad + lon1 = lon1 * self._degToRad + lat2 = lat2 * self._degToRad + lon2 = lon2 * self._degToRad + + + # some issue with acos and large numbers throws an except + if abs(lon2 - lon1) < 0.000001: + return (lat2 - lat1) / self._degToRad + + dist = math.acos(np.sin(lat1) * np.sin(lat2) + np.cos(lat1) * np.cos(lat2) * \ + np.cos(lon2 - lon1)) / self._degToRad + + + return dist + + # Debugging method to print coordinates in an easy to read format + def printCoords(self, i, x0, y0, x1, y1, x2, y2): + + print("%d---" % i, "%.2f" % y0, "%.2f" % x0, "%.2f" % y1, "%.2f" % x1, "%.2f" % y2, "%.2f" % x2) + + return + + # Returns a list of lat/lons that represent the specified edit area. + # These points approximate the edit area with no more than maxPoints + # points. + def simplifyEditArea(self, editArea): + + mask = self.encodeEditArea(editArea) + + + polygons = editArea.getPolygons(ReferenceData.CoordinateType.LATLON) + + coords = polygons.getCoordinates() + + coordList = [] + + for c in coords: + coordList.append((c.x, c.y)) + + # Algorithm configuration section. These values will affect how + # well (or poorly) the algorithm works. + + # Define the number of slices. More gives better radial resolution + # at the price of slightly slower execution. + slices = 36 + + # The resulting simplified area will contain no more than + # this number of points + maxPoints = 6 + + # Any triangle whose area is less than this ratio, the middle + # point gets tossed as being not significant. + areaThreshold = 0.05 + + ### End configuration section + + # Make fixed size arrays of length pieSlices for each access + maxDist = slices * [0] + + # Initialize this list to None so we know if no points were + # found in each slice. + rawLoc = slices * [None] + + # calculate the original area + + gridSize = self.getGridLoc().gridSize() + + # calculate the center of the polygon + + centroid = polygons.getCentroid() + + latCenter = centroid.getY() + lonCenter = centroid.getX() + + originalArea = polygons.getArea() + + # reduce the number of points to a manageable number by + # breaking the edit area up into pie slices and removing + # all points in each slice except the outer most point in + # each pie slice. + + for x, y in coordList: + + angle = self.calcAngle(latCenter, lonCenter, y, x) + angleIndex = int(angle / (360.0 / slices)) + dist = self.distance(latCenter, lonCenter, y, x) + if dist > maxDist[angleIndex]: + maxDist[angleIndex] = dist + rawLoc[angleIndex] = (y, x) + + # it's possible no points were found in some sices, so filter them out + loc = [] + for i in rawLoc: + if not i is None: + loc.append(i) + + while True: # loop until we can't remove any more points + + prevCount = len(loc) + minArea = originalArea + areaList = [] + + for i in range(len(loc)): + # figure out the index of the previous and next points + last = i - 1 + next = i + 1 + if next >= len(loc): + next = 0 + + x0 = loc[i][1] + y0 = loc[i][0] + x1 = loc[last][1] + y1 = loc[last][0] + x2 = loc[next][1] + y2 = loc[next][0] + + # make sure the slopes are not zero or infinity + if x2 == x1: + x2 = x2 + 0.000001 + if y2 == y1: + y2 = y2 + 0.000001 + + # it's now safe to calculate the slope of the base of the triagle + slope = (y2 - y1) / (x2 - x1) + + # calculate the point on a line connecting last and next + # points that is perpendicular to the middle point + x = (slope * x1 - y1 + (x0 / slope) + y0) / (slope + (1 / slope)) + y = ((x0 - x) / slope) + y0 + + # calculate the area of the triangle formed by the three points: + # next, current, and previous + height = self.distance(y, x, y0, x0) + base = self.distance(y1, x1, y2, x2) + area = abs(base * height / 2.0) # note area can be negative! + areaList.append(area) + + # save the area if it's the smallest + if area < minArea: + minArea = area + delIndex = i + + ######################## end looping through all points ############## + + # calculate the ratio of the area and the original area of the edit area + # Use this later to decide whether to remove the point + ratio = minArea / originalArea + + if len(loc) > maxPoints: + del loc[delIndex] + continue + else: + if ratio < areaThreshold: + del loc[delIndex] + + # If we're down to 3 points: stop + if len(loc) <= 3: + break + + #If no new points have been removed, we're done + if prevCount == len(loc): + break + + return loc + + + ################### End polygon code ################################## + + # Fetches the inventory in the form of a timeRange list + def getWEInventory(self, modelName, weName, timeRange=None): + + if timeRange is None: + timeRange = TimeRange.allTimes() + + trList = [] + # getGridInfo will just die if the modelName or weName is not valid + # so wrap it in a try block and return [] if it fails + try: + gridInfo = self.getGridInfo(modelName, weName, "SFC", timeRange) + except: + return trList + + for g in gridInfo: + if timeRange.overlaps(g.gridTime()): + trList.append(g.gridTime()) + + return trList + + + # Create the list box that holds the names of the scripts + def makeListBox(self): + + labelList = self.getAllEditAreaNames() + labelList.sort() + + label = tkinter.Label(self._listFrame, text="Edit Area List") + + label.grid(row=0, padx=15, pady=10) + + scrollbar = tkinter.Scrollbar(self._listFrame, orient=tkinter.VERTICAL) + self._listbox = tkinter.Listbox(self._listFrame, yscrollcommand=scrollbar.set, + selectmode=tkinter.MULTIPLE) + self._listbox.bind("", self.listBoxClick) + #self._listbox.bind("", self.listBoxClick) + + for i in range(len(labelList)): + listItem = self._listbox.insert(tkinter.END, labelList[i]) + + self._listbox.grid(row=1, padx=5, pady=5) + + scrollbar.config(command=self._listbox.yview) + scrollbar.grid(row=1, column=1, sticky=tkinter.N+tkinter.S) + + return + + # Make the time period buttons + def makeTimeButtons(self, timeFrame): + label = tkinter.Label(timeFrame, text="Select Time\nPeriod") + label.grid(row=0, padx=15, pady=10) + + labelList = [] + for i in range(0, 49, 6): + label = str(i) + if len(label) < 2: + label = "0" + label + label = label + "h" + labelList.append(label) + + buttonRow = 1 + for i in range(len(labelList)): + b = tkinter.Radiobutton(timeFrame, text=labelList[i], + value=labelList[i]) + if labelList[i] == self._defaultTime: + b.select() + self._currentTimePeriod = self._defaultTime + + b.grid(row=buttonRow, column=0, sticky=tkinter.N+tkinter.S, pady=2) + buttonRow = buttonRow + 1 + b.bind("", self.timeButtonClick) + return + + + # Callback that fires when a list selector is clicked. + def listBoxClick(self, event=None): + + if self._textBoxCurSelection is not None: + self.textBoxReturnPress() + + # Disable all but the Add New button + if len(self._listbox.curselection()) == 0: + self._clearSelButton.configure(state=tkinter.DISABLED) + self._replaceSelButton.configure(state=tkinter.DISABLED) + self._saveSelButton.configure(state=tkinter.DISABLED) + self._killSelButton.configure(state=tkinter.DISABLED) +# return + + + # Display the edit area(s) on the GFE + gridSize = (self._gridLoc.gridSize().y, self._gridLoc.gridSize().x) + + mask = np.zeros(gridSize, bool) + + for i in self._listbox.curselection(): + curIndex = int(i) + editAreaName = self._listbox.get(curIndex) + + mask = self._eaUtils.fetchEditArea(editAreaName) + if mask is None: + self.statusBarMsg("Edit Area: " + editAreaName + " not found in HighSeas repository.", "S") + return + + editArea = self.decodeEditArea(mask) + self.setActiveEditArea(editArea) + + # Update the textBox + if sum(sum(mask)) > 0: + editAreaDesc = self.getEditAreaDesc(editAreaName) + self._textBox.configure(font=self._boldFont) + self.updateTextBoxSaveState("Saved") + + self.displayAreaDesc(editAreaDesc) + + if len(self._listbox.curselection()) != 1: + # Disable the Replace and Save Buttons + self._replaceSelButton.configure(state=tkinter.DISABLED) + self._saveSelButton.configure(state=tkinter.DISABLED) + self._killSelButton.configure(state=tkinter.DISABLED) + else: + self._replaceSelButton.configure(state=tkinter.NORMAL) + self._clearSelButton.configure(state=tkinter.NORMAL) + self._saveSelButton.configure(state=tkinter.NORMAL) + self._killSelButton.configure(state=tkinter.NORMAL) + + return + + def enterEvent(self, event=None): + + sys.stdout.flush() + + self._enterCount = self._enterCount + 1 + + if self._enterCount > 4: + self.setActiveEditArea(self._initialEditArea) + self._master.unbind("") + + return + + def redrawListBox(self): + + self._listbox.delete(0, tkinter.END) + + listItems = [] + for fDict in self._hsDesc: + for areaDict in fDict["areaList"]: + listItems.append(areaDict["areaName"]) + + listItems.sort() + + for i in range(len(listItems)): + self._listbox.insert(tkinter.END, listItems[i]) + + return + + def selectListBoxItem(self, item): + listBoxItems = self._listbox.get(0, tkinter.END) + + listBoxItems = list(listBoxItems) + + selectedIndex = listBoxItems.index(item) + + self._listbox.selection_set(selectedIndex) + + return + + def setSelectButtonState(self, buttonState): + + self._clearSelButton.configure(state=buttonState) + self._replaceSelButton.configure(state=buttonState) + self._saveSelButton.configure(state=buttonState) + self._killSelButton.configure(state=buttonState) + + return + + def timeButtonClick(self, event): + + # the event's widget value is a tuple and we want the last one + buttonText = event.widget.config("value")[-1] + + # for some reason, inserting a \n makes that entry a tuple + # so just concatenate the tuple elements into a single string + if type(buttonText) is tuple: + valueText = "" + for t in buttonText: + valueText = valueText + t + " " + valueText = valueText[0:-1] + else: + valueText = buttonText + + self._currentTimePeriod = valueText + + return + + # Called when the pie is click upon by the mouse + def pieClick(self, event): + + xCenter = self._circleRadius + (self._circleOffset / 2.0) + yCenter = self._circleRadius + (self._circleOffset / 2.0) + + dist = math.sqrt((event.x - xCenter) * (event.x - xCenter) + \ + (event.y - yCenter) * (event.y - yCenter)) + + xDiff = event.x - xCenter + yDiff = event.y - yCenter + + angle = -math.atan2(yDiff, xDiff) * 360 / (2 * math.pi) + + x1, y1, x2, y2 = self.getCircleCoords() + + # truncate the angle between the center and the click location + angleStart = int(angle / 45.0) * 45.0 + angleExtent = 45.0 + + if angle < 0: + angleExtent = -45.0 + + sliceAngle = angle + if angle < 0: + sliceAngle = angle + 360 + sliceNum = int(sliceAngle / 45.0) + + fillColor = self._pieDeselectColor + + ButtonDownEvent = "4" + ButtonUpEvent = "5" + + if event.type == ButtonDownEvent and not self._pieState[sliceNum] : + self._selectingPieSlices = True + self._deselectingPieSlices = False + elif event.type == ButtonDownEvent and self._pieState[sliceNum]: + self._selectingPieSlices = False + self._deselectingPieSlices = True + elif event.type == ButtonUpEvent: + self._selectingPieSlices = False + self._deselectingPieSlices = False + self.makePolarEditArea() + return + + # select a pieSlice if appropriate + if self._selectingPieSlices: + self._pieState[sliceNum] = True + elif self._deselectingPieSlices: + self._pieState[sliceNum] = False + + # If we're not selecting or deselecting, do nothing + if not self._selectingPieSlices and not self._deselectingPieSlices: + return + + if self._selectingPieSlices: + fillColor = self._pieSelectColor + elif self._deselectingPieSlices: + fillColor = self._pieDeselectColor + + self._canvas.create_arc(x1, y1, x2, y2, style=tkinter.PIESLICE, + start=angleStart, extent=angleExtent, fill=fillColor) + + return + + # Turns on one pieSlice if they are all off. This is so it shows up on the + # screen + def activatePie(self): + + # Don't bother if any slices are already on + for state in self._pieState: + if state: + return + + # Make a fake event so pieClick will be called. + xCenter = self._circleRadius + (self._circleOffset / 2.0) + yCenter = self._circleRadius + (self._circleOffset / 2.0) + + xPos = xCenter + 10 + yPos = yCenter + 3 + + self._canvas.event_generate("", x=xPos, y=yPos) + + return + + def prettyLatString(self, latValue): + if latValue >= 0: + return str(latValue) + "N" + return str(latValue)[1:] + "S" + + def prettyLonString(self, lonValue): + if lonValue >= 0: + return str(lonValue) + "E" + else: + if lonValue < -180.0: + lonValue = lonValue + 360.0 + return str(lonValue) + "E" + + return str(lonValue)[1:] + "W" + + def latScaleMotion(self, value): + + value = float(value) + if value == self._latScaleValue: + return + + self._latScaleValue = value + + latStr = self.prettyLatString(value) + self._latEntry.delete(0, tkinter.END) + self._latEntry.insert(0, latStr) + + self.activatePie() + self.makePolarEditArea() + return + + def lonScaleMotion(self, value): + + value = float(value) + if value == self._lonScaleValue: + return + + self._lonScaleValue = value + + lonStr = self.prettyLonString(value) + self._lonEntry.delete(0, tkinter.END) + self._lonEntry.insert(0, lonStr) + + self.activatePie() + self.makePolarEditArea() + + return + + def getLatLonGrids(self): + trList = self.getWEInventory("Fcst", "lat") + if len(trList) == 0: + gridLoc = self.getGridLoc() + latGrid, lonGrid = MetLib.getLatLonGrids(gridLoc) + else: + latGrid = self.getGrids("Fcst", "lat", "SFC", trList[0]) + lonGrid = self.getGrids("Fcst", "lon", "SFC", trList[0]) + + return latGrid, lonGrid + + def radiusScaleMotion(self, value): + + value = float(value) + if value == self._radiusScaleValue: + return + + self._radVar.set(str(value)) + self._radEntry.delete(0, tkinter.END) + self._radEntry.insert(0, str(value)) + + self._radiusScaleValue = value + self.activatePie() + self.makePolarEditArea() + + return + + # A single place where the circle coords can be had. + def getCircleCoords(self): + topLeftX = self._circleOffset + topLeftY = self._circleOffset + bottomRightX = self._circleRadius * 2 + bottomRightY = self._circleRadius * 2 + + return topLeftX, topLeftY, bottomRightX, bottomRightY + + def filterTextEntry(self, entryStr, allowedChars): + + returnStr = "" + for c in entryStr: + if allowedChars.find(c) > -1: + returnStr = returnStr + c + + return returnStr + + def parseLatitude(self): + latStr = self._latEntry.get() + + sign = 1.0 + if latStr.find("S") > -1 or latStr.find("s") > -1: + sign = -1.0 + latStr = latStr[0:-1] + elif latStr.find("N") > -1 or latStr.find("n") > -1: + latStr = latStr[0:-1] + + try: + latValue = float(latStr) * sign + except: + self.statusBarMsg("Invalid latitude value: " + str(self._lonEntry.get()), "S") + return + + return latValue + + def parseLongitude(self): + lonStr = copy.copy(self._lonEntry.get()) + + sign = 1.0 + if lonStr.find("W") > -1 or lonStr.find("w") > -1: + sign = -1.0 + lonStr = lonStr[0:-1] + elif lonStr.find("E") > -1 or lonStr.find("e") > -1: + lonStr = lonStr[0:-1] + + try: + lonValue = float(lonStr) * sign + if lonValue > 0: + lonValue = lonValue - 360 + except: + self.statusBarMsg("Invalid longitude value: " + str(self._lonEntry.get()), "S") + return + + return lonValue + + def latKeyPress(self, event): + # Ignore certain special characters + ignore = ["BackSpace"] + if event.keysym in ignore: + return + + if event.keysym == "Return": + self._latScaleValue = self.parseLatitude() + self.activatePie() + self.makePolarEditArea() + self._latScale.set(self._latScaleValue) + return + + allowedChars = "0123456789-.NSns" + entryStr = self._latEntry.get() + event.char + filterStr = self.filterTextEntry(entryStr, allowedChars) + + # replace the string if not valid + if filterStr != entryStr: + self._latVar.set(str(filterStr)) + # Insert a backspace in the queue to remove the offending char + self._latEntry.event_generate("", keysym="BackSpace", + when="tail") + return + + def lonKeyPress(self, event): + # Ignore certain special characters + ignore = ["BackSpace"] + if event.keysym in ignore: + return + + if event.keysym == "Return": + self._lonScaleValue = self.parseLongitude() + self.activatePie() + self.makePolarEditArea() + self._lonScale.set(self._lonScaleValue) + return + + allowedChars = "0123456789-.EWew" + entryStr = self._lonEntry.get() + event.char + filterStr = self.filterTextEntry(entryStr, allowedChars) + + # replace the string if not valid + if filterStr != entryStr: + self._lonVar.set(str(filterStr)) + # Insert a backspace in the queue to remove the offending char + self._lonEntry.event_generate("", keysym="BackSpace", + when="tail") + return + + def radKeyPress(self, event): + # Ignore certain special characters + ignore = ["BackSpace"] + if event.keysym in ignore: + return + + allowedChars = "0123456789." + entryStr = self._radEntry.get() + event.char + filterStr = self.filterTextEntry(entryStr, allowedChars) + + try: + self._radiusScaleValue = float(filterStr) + except: + # replace the string if not valid + self._radVar.set(str(filterStr)) + # Insert a backspace in the queue to remove the offending char + self._radEntry.event_generate("", keysym="BackSpace", when="tail") + return + + if event.keysym == "Return": + self._radiusScale.set(self._radiusScaleValue) + self.makePolarEditArea() + + return + + def makePolarWidgets(self): + + # make the pie slices + x1, y1, x2, y2 = self.getCircleCoords() + canvasWidth = x2 + self._circleOffset + canvasHeight = y2 + self._circleOffset + self._canvas = tkinter.Canvas(self._polarFrame, width=canvasWidth, + height=canvasHeight) + self._canvas.grid(row=0, column=1, columnspan=2) + + for i in range(0, 8): + self._canvas.create_arc(x1, y1, x2, y2, style=tkinter.PIESLICE, + start=i*45, extent=45, fill=self._pieDeselectColor) + + self._canvas.bind("", self.pieClick) + self._canvas.bind("", self.pieClick) + self._canvas.bind("", self.pieClick) + + # change from/to to minmax lat/lon values + + self._latScale = tkinter.Scale(self._polarFrame, from_=self._maxLat, to=self._minLat, + orient=tkinter.VERTICAL, showvalue=0, + resolution=0.5, length=160, + command=self.latScaleMotion) + self._latScale.grid(row=0, column=0, sticky=tkinter.N) + self._latScale.set(self._latScaleValue) + + self._latVar = tkinter.StringVar() + self._latVar.set(str(self._latScaleValue)) + + self._latEntry = tkinter.Entry(self._polarFrame, width=6, textvariable=self._latVar) + + self._latEntry.grid(row=1, column=0, padx=10, sticky=tkinter.N) + + self._latEntry.bind("", self.latKeyPress) + + latLabel = tkinter.Label(self._polarFrame, text="Latitude") + latLabel.grid(row=2, column=0, sticky=tkinter.N) + + self._lonScale = tkinter.Scale(self._polarFrame, from_=self._minLon, to=self._maxLon, + orient=tkinter.HORIZONTAL, showvalue=0, + resolution=0.5, length=160, command=self.lonScaleMotion) + self._lonScale.grid(row=1, column=2, sticky=tkinter.N) + self._lonScale.set(self._lonScaleValue) + + self._lonVar = tkinter.StringVar() + self._lonEntry = tkinter.Entry(self._polarFrame, width=6, textvariable=self._lonVar) + self._lonEntry.grid(row=1, column=1, sticky=tkinter.N) + self._lonEntry.bind("", self.lonKeyPress) + + lonLabel = tkinter.Label(self._polarFrame, text="Longitude") + lonLabel.grid(row=2, column=1, sticky=tkinter.NW) + + self._radiusScale = tkinter.Scale(self._polarFrame, from_=30, to=900, + orient=tkinter.HORIZONTAL, showvalue=0, + resolution=10, length=160, command=self.radiusScaleMotion) + self._radiusScale.grid(row=3, column=2, pady=10, sticky=tkinter.W) + self._radiusScale.set(self._defaultRadius) + + self._radVar = tkinter.StringVar() + self._radEntry = tkinter.Entry(self._polarFrame, width=6, textvariable=self._radVar) + self._radEntry.grid(row=3, column=0, pady=10, columnspan=2) + self._radEntry.bind("", self.radKeyPress) + + + label = tkinter.Label(self._polarFrame, text="Radius (nm)") + label.grid(row=4, column=0, columnspan=2, sticky=tkinter.N) + + + return + + # Create the Execute and Cancel buttons + def makeBottomButtons(self): + + button = tkinter.Button(self._bottomFrame, text="Cancel", + command=self.cancelCommand) + button.grid(row=0, column=0, padx=30, pady=10, sticky=tkinter.E) + + button = tkinter.Button(self._bottomFrame, text="Save", + command=self.saveOnlyCommand) + button.grid(row=0, column=1, padx=30, pady=10, sticky=tkinter.E) + + button = tkinter.Button(self._bottomFrame, text="Save and Exit", + command=self.saveExitCommand) + button.grid(row=0, column=2, padx=30, pady=10, sticky=tkinter.E) + + self._bottomFrame.grid() + + return + + def saveOnlyCommand(self): + self.saveHSDescToFile() + + return + + def saveExitCommand(self): + self.saveHSDescToFile() + print("saving desc to file") + self.cancelCommand() + + return + + # Cancels the GUI and the tool. + def cancelCommand(self): + + msg = "Your edits have not been saved. Cancel Anyway?" + if not self._editsMade: + self._master.destroy() + self._tkmaster.destroy() + return + + if tkinter.messagebox.askokcancel("High Sea - Cancel", msg, parent=self._bottomFrame): + self._master.destroy() + self._tkmaster.destroy() + + return + + def getPolarInfo(self, editArea): + lat = None + lon = None + radius = None + pieState = None + + try: + eaMask = self.encodeEditArea(editArea) + except: + eaMask = editArea + + eaMask = eaMask & self._clipMask + polarMask = self.encodeEditArea(self._polarEditArea) & self._clipMask + + if sum(sum(eaMask)) == sum(sum(polarMask)): # they are the same editArea + lat = self._latScaleValue + lon = self._lonScaleValue + radius = self._radiusScaleValue + pieState = self._pieState + return lat, lon, radius, pieState + + + # Return the max width and height of the edit area in lat/lon coordinates + def editAreaExtent(self, editArea): + polygons = editArea.getPolygons(ReferenceData.CoordinateType.LATLON) + + coords = polygons.getCoordinates() + + minX = 10000.0 + minY = 10000.0 + maxX = -10000.0 + maxY = -10000.0 + for c in coords: + minX = min(c.x, minX) + minY = min(c.y, minY) + maxX = max(c.x, maxX) + maxY = max(c.y, maxY) + + diffX = maxX - minX + diffY = maxY - minY + + return diffX, diffY + + # Called when the save button is clicked. + def saveSelCommand(self): + editArea = self.getActiveEditArea() + + # Check for null area + mask = self.encodeEditArea(editArea) & self._clipMask + if sum(sum(mask)) == 0: + self.statusBarMsg("Please select an edit area before saving.", "S") + return + + # Check to ensure the edit area is large enough + diffLon, diffLat = self.editAreaExtent(editArea) + + if diffLon < self._minEASize or diffLat < self._minEASize: + self.statusBarMsg("Please select a larger edit area before saving", "S") + return + + editArea = self.decodeEditArea(mask) + self.setActiveEditArea(editArea) + + # get the selected button + curSel = self._listbox.curselection() + if len(curSel) == 0: + return + + listIndex = self._listbox.curselection()[0] + buttonStr = self._listbox.get(listIndex) + + featureName = self.extractFeature(buttonStr) + timePeriod = self._currentTimePeriod + editAreaName = self.getNewAreaName(featureName, timePeriod) + + # get the polar parameters + polarInfo = self.getPolarInfo(editArea) + + if polarInfo is not None: + lat, lon, radius, pieState = polarInfo + else: + lat = None + lon = None + radius = None + pieState = None + + editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) + # save the area + + activeArea = self.getActiveEditArea() + + activeMask = self.encodeEditArea(activeArea) + self._eaUtils.saveEditArea(editAreaName, activeMask) + + self.displayAreaDesc(editAreaDesc) + self.addNewArea(featureName, timePeriod, editAreaName, editAreaDesc, + lat, lon, radius, pieState) + + + self.redrawListBox() + self.selectListBoxItem(editAreaName) + + self.setSelectButtonState(tkinter.NORMAL) + + self._editsMade = True + + def saveNewCommand(self): + + editArea = self.getActiveEditArea() + + # Check for null area + mask = self.encodeEditArea(editArea) & self._clipMask # clip to domain mask + + if sum(sum(mask)) == 0: + self.statusBarMsg("Please select an edit area before saving.", "S") + return + + # Check to ensure the edit area is large enough + diffLon, diffLat = self.editAreaExtent(editArea) + + if diffLon < self._minEASize or diffLat < self._minEASize: + self.statusBarMsg("Please select a larger edit area before saving", "S") + return + + editArea = self.decodeEditArea(mask) + self.setActiveEditArea(editArea) + + timePeriod = self._currentTimePeriod + featureName = self.getNewFeatureName() + editAreaName = self.getNewAreaName(featureName, timePeriod) + + # get the polar parameters + polarInfo = self.getPolarInfo(editArea) + + if polarInfo is not None: + lat, lon, radius, pieState = polarInfo + else: + lat = None + lon = None + radius = None + pieState = None + + editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) + # save the area + activeArea = self.getActiveEditArea() + + activeMask = self.encodeEditArea(activeArea) + self._eaUtils.saveEditArea(editAreaName, activeMask) + + self.addNewFeature(featureName, timePeriod, editAreaName, editAreaDesc, + lat, lon, radius, pieState) + + + self.redrawListBox() + + self.selectListBoxItem(editAreaName) + + self.setSelectButtonState(tkinter.NORMAL) + + self.displayAreaDesc(editAreaDesc) + + self._editsMade = True + + return + + def replaceSelCommand(self): + + listIndex = self._listbox.curselection()[0] + editAreaName = self._listbox.get(listIndex) + + editArea = self.getActiveEditArea() + + # Check to ensure the edit area is large enough + diffLon, diffLat = self.editAreaExtent(editArea) + + if diffLon < self._minEASize or diffLat < self._minEASize: + self.statusBarMsg("Please select a larger edit area before saving", "S") + return + + mask = self.encodeEditArea(editArea) & self._clipMask # clip to domain mask + + editArea = self.decodeEditArea(mask) + self.setActiveEditArea(editArea) + + self._eaUtils.saveEditArea(editAreaName, mask) + + # get the polar parameters + lat, lon, radius, pieState = self.getPolarInfo(editArea) + + editAreaDesc = self.makeEditAreaDesc(editArea, editAreaName, lat, lon, radius, pieState) + + self.replaceEditAreaDesc(editAreaName, editAreaDesc) + + self.displayAreaDesc(editAreaDesc) + self._editsMade = True + + return + + def clearSelCommand(self): + self.displayAreaDesc("") + + while True: + + if len(self._listbox.curselection()) == 0: + break + + index = self._listbox.curselection()[0] + + editAreaName = self._listbox.get(index) + + self.removeEditArea(editAreaName) + + self._listbox.delete(index) + + self.setSelectButtonState(tkinter.DISABLED) + + self._editsMade = True + + return + + def clearAllCommand(self): + + self.displayAreaDesc("") + + listSize = self._listbox.size() + for i in range(listSize): + editAreaName = self._listbox.get(i) + self.removeEditArea(editAreaName) + + self._listbox.delete(0, listSize) + + self.setSelectButtonState(tkinter.DISABLED) + + self._editsMade = True + + return + + # Called when the Reasons window is closed. Just destroy the window. + def closeReasonWindow(self): + + self._killWindow.destroy() + + self.addKilledArea() + + return + + # Called when any reason button is clicked. Updates the reason to the GUI + def reasonButtonClick(self, event): + + self._killDesc = self._killVar.get() + + # Update the display on the GUI + self.displayAreaDesc(self._killDesc) + + return + + def displayKillReasonsDialog(self, killReasons): + + self._killWindow = tkinter.Toplevel(self._master) + self._killWindow.title("Reason") + self._killFrame = tkinter.Frame(self._killWindow, width=500, height=500) # can't get this to resize + self._killFrame.pack() + self._killVar = tkinter.StringVar() + self._killVar.set(self._killDesc) + for i in range(len(killReasons)): + b = tkinter.Radiobutton(self._killFrame, text=killReasons[i], + variable=self._killVar, value=killReasons[i]) + if b == self._defaultKillReason: + b.select() + + b.grid(row=i, column=0, sticky=tkinter.W, pady=2) + + b.bind("", self.reasonButtonClick) + + self._doneButton = tkinter.Button(self._killFrame, text= "DONE", command=self.closeReasonWindow) + self._doneButton.grid(row=len(killReasons), sticky=tkinter.E+tkinter.W) + + # Update the display on the GUI + self.displayAreaDesc(self._killDesc) + + self._killFrame.grid() + + # make this dialog modal so no other buttons can be clicked. + self._killWindow.transient(self._master) + self._killWindow.grab_set() + + return + + def killSelCommand(self): + # make sure something is selected (should never happen) + if len(self._listbox.curselection()) == 0: + return + + self.displayKillReasonsDialog(self._killReasons) + + return + + def addKilledArea(self): + + # Get the selected button names + buttonStr = self._currentTimePeriod + listIndex = self._listbox.curselection()[0] + + editAreaName = self._listbox.get(listIndex) + # Create the editArea name from the button name + pos = editAreaName.find("_") + editAreaName = editAreaName[0:pos+1] + buttonStr + editAreaName[pos+4:] + + activeArea = self.getActiveEditArea() + + activeMask = self.encodeEditArea(activeArea) + self._eaUtils.saveEditArea(editAreaName, activeMask) + + featureName = self.extractFeature(editAreaName) + timePeriod = self.extractTimePeriod(editAreaName) + + # get the polar parameters + polarInfo = self.getPolarInfo(activeArea) + + if polarInfo is not None: + lat, lon, radius, pieState = polarInfo + else: + lat = None + lon = None + radius = None + pieState = None + + editAreaDesc = self._killDesc + + self.addNewFeature(featureName, timePeriod, editAreaName, editAreaDesc, + lat, lon, radius, pieState) + + for hs in self._hsDesc: + if hs["featureName"] == featureName and hs["timePeriod"] == timePeriod: + for area in hs["areaList"]: + if area["areaName"] == editAreaName: + area["areaDesc"] = editAreaDesc + + self.setSelectButtonState(tkinter.NORMAL) + + self._editsMade = True + + self.redrawListBox() + return + + def makePolarEditArea(self): + lat1 = self._latScaleValue * self._degToRad + lon1 = self._lonScaleValue * self._degToRad + radius = self._radiusScaleValue + + earthRadius = 6371.0 / 1.852 # Nautical miles + + distanceGrid = np.arccos(np.sin(lat1) * np.sin(self._latGrid) + \ + np.cos(lat1) * np.cos(self._latGrid) * \ + np.cos(self._lonGrid - lon1)) * earthRadius + + distanceMask = np.less(distanceGrid, radius) + + xDist = self._latGrid - lat1 + yDist = self._lonGrid - lon1 + + tanGrid = np.arctan2(xDist, yDist) / self._degToRad + tanGrid = np.where(np.less(tanGrid, 0.0), tanGrid + 360.0, tanGrid) + tanGrid = tanGrid / 45.0 # convert to 8 "quardants + + # mask off all but the selected quadrants. + circleMask = np.zeros(tanGrid.shape, dtype=np.int8) + for i in range(len(self._pieState)): + if self._pieState[i]: + tanMask = np.logical_and(np.greater_equal(tanGrid, i), np.less(tanGrid, i+1)) + circleMask |= tanMask + + mask = np.logical_and(distanceMask, circleMask) + editArea = self.decodeEditArea(mask) + + self.setActiveEditArea(editArea) + + self._polarEditArea = editArea + + return + + def dirSort(self, dirA, dirB): + + aDir, aCount = dirA + bDir, bCount = dirB + + if aCount > bCount: + return 1 + elif aCount == bCount: + return 0 + else: + return -1 + + # Returns a string that describes a circular edit area + def formatCircleEditArea(self, lat, lon, radius, pieState): + + dirList = ["E", "NE", "N", "NW", "W", "SW", "S", "SE"] + + # Check to see if they're all selected (the whole pie) + if sum(pieState) == len(pieState): + latStr = self.latLonStr(lat, "lat") + lonStr = self.latLonStr(lon, "lon") + + return "WITHIN " + str(int(radius)) + " NM OF CENTER AT " + \ + latStr + lonStr + + quad = [0] * len(pieState) + semi = [0] * len(pieState) + + # Find out if we have six consecutive slices selected + # This is because the rules are different if 3/4 of the + # pie is selected. + sixConsec = False + if sum(pieState) >= 6: + for i in range(-2, len(pieState)): + if not pieState[i] and not pieState[i + 1]: + sixConsec = True + break + + for i in range(len(pieState)): + if pieState[i] and pieState[i - 1]: + quad[i] = 1 + + # Begin with the first selected pieSlice + start = 0 + while quad[start]: + start = start - 1 + + for i in range(start, start + len(quad)): + last = i - 1 + this = i + next = i + 1 + afterNext = next + 1 + if next >= len(quad): + next = next - len(quad) + if afterNext >= len(quad): + afterNext = afterNext - len(quad) + + if quad[last] and quad[this] and quad[next]: + semi[this] = 1 + quad[last] = 0 + quad[this] = 0 + quad[next] = 0 + # if 3/4 of the pie is selected then turn off on extra quad + if sixConsec: + quad[afterNext] = 0 + + # Format the slices into words + finalDir = "" + + # Direction order must be clockwise starting from N + # This list orders directions properly. + pieOrderList = [2, 1, 0, 7, 6, 5, 4, 3] + +## for i in range(len(semi)): + for i in pieOrderList: + if semi[i]: + finalDir = dirList[i] + " SEMICIRCLE" + + if sum(semi) > 0 and sum(quad) > 0: + finalDir = finalDir + " AND " + +## for i in range(len(quad)): + for i in pieOrderList: + if quad[i]: + finalDir = finalDir + dirList[i] + " AND " + + if sum(quad) > 0: + finalDir = finalDir[0:-5] # remove the last AND + + if sum(quad) > 0: + finalDir = finalDir + " QUADRANT" + + if sum(quad) > 1: + finalDir = finalDir + "S" + + latStr = self.latLonStr(lat, "lat") + lonStr = self.latLonStr(lon, "lon") + + desc = "WITHIN " + str(int(radius)) + " NM " + finalDir + \ + " OF CENTER AT " + latStr + lonStr + + return desc + + # Formats a lat lon string with appropriate direction labels + def latLonStr(self, coord, latOrLon, precision=1.0): + + if latOrLon == "lat": + if coord >= 0: + dirStr = "N" + else: + dirStr = "S" + + # Exception for southern boundary of EP3 area to force 18.5S output + # This code is needed only for NH1 domain + if coord < -18.5: + precision = 0.5 + + # Exception for northern boundary of EP3 area to force 3.4S output + # This code is needed only for NH1 domain + if coord < -3.36 and coord > -3.38: + coord = -3.4 + coordStr = "0" + "%.1f" % abs(coord) + dirStr + return coordStr + + elif latOrLon == "lon": + if coord >= 0: + dirStr = "E" + else: + dirStr = "W" + + coordStr = "%.0f" % abs(round(coord)) + dirStr + # if the precision is 0.5, round to the nearest 0.5 degrees + if precision == 0.5: + coord = int((abs(coord) + 0.25) * 2.0) / 2.0 + # Keep the 0.5 if we have it after rounding + if coord != int(coord): + if abs(coord) < 10: + coordStr = "0" + "%.1f" % abs(coord) + dirStr + else: + coordStr = "%.1f" % abs(coord) + dirStr + + return coordStr + + # Determines the precision based on the max extent in both lat and lon + # Uses self._marginalEASize to define the threshold for the precision. + def determinePrecision(self, points): + + latMin = 10000.0 + latMax = -10000.0 + lonMin = 10000.0 + lonMax = -10000.0 + + # Calculate the min/max extent of these points. + for lat, lon in points: + latMin = min(lat, latMin) + latMax = max(lat, latMax) + lonMin = min(lon, lonMin) + lonMax = max(lon, lonMax) + + # Calculate the difference between the maxes and mins + latDiff = latMax - latMin + lonDiff = lonMax - lonMin + + # If we have a marginally small area, set the precision to 0.5 otherwise 1.0 degrees. + precision= 1.0 + if latDiff < self._marginalEASize or lonDiff < self._marginalEASize: + precision = 0.5 + + return precision + + # Returns a string that describes a polygon edit area + # modified to add leading "0" when lat < 10 + def formatPolygonEditArea(self, editArea): + + + mask = self.encodeEditArea(editArea) + points = self.simplifyEditArea(editArea) + + precision = self.determinePrecision(points) + + coordList = [] + + fullStr = "" + for y, x in points: + latStr = self.latLonStr(y, "lat", precision) + lonStr = self.latLonStr(x, "lon", precision) + + coordList.append((latStr, lonStr)) + + # close the polygon + coordList.append(coordList[0]) + + lastLat = "" + lastLon = "" + fullStr = "WITHIN AREA BOUNDED BY " + for latStr, lonStr in coordList: + if latStr == lastLat and lonStr == lastLon: + continue + if len(latStr) < 3: + latStr = latStr.zfill(3) + fullStr = fullStr + latStr + lonStr + " TO " + lastLat = latStr + lastLon = lonStr + + # remove the last " TO " + fullStr = fullStr[0:-4] + + return fullStr + + def makeNamedAreaDesc(self, editArea): + + descList = [] + + editAreaMask = self.encodeEditArea(editArea) + for eaName in list(self._namedAreaMasks.keys()): + overlap = self._namedAreaMasks[eaName] & editAreaMask + + if overlap.any(): + descList.append(self._namedAreaDescip[eaName]) +# Commented out below as was requiring that ALL namedAreas be overlapped in order to return (JL/TL 07/21/2016) +# else: +# return "" + + if len(descList) == 0: + return "" + + finalStr = "...INCLUDING " + + for desc in descList: + finalStr = finalStr + desc + if desc != descList[-1]: # add an AND if we're not at the last one + finalStr = finalStr + " AND " + else: + finalStr = finalStr + "..." + + return finalStr + + def makeEditAreaDesc(self, editArea, editAreaName, lat=None, lon=None, radius=None, pieState=None): + + mask = self.encodeEditArea(editArea) + + namedAreaDesc = self.makeNamedAreaDesc(editArea) + + if lat is not None: + desc = self.formatCircleEditArea(lat, lon, radius, pieState) + else: + desc = self.formatPolygonEditArea(editArea) + + # Add delimiters around the lat/lon descriptor so we can replace it when needed + return self._descDelimStart + desc + namedAreaDesc + self._descDelimEnd + + def displayAreaDesc(self, editAreaDesc): + + self._textBox.configure(state=tkinter.NORMAL) + self._textBox.delete("1.0", tkinter.END) + self._textBox.insert("1.0", editAreaDesc) + self._textBox.configure(state=tkinter.DISABLED) + + return + + def textBoxClick(self, event=None): + + self._textBox.configure(font=self._normalFont) + self._textBox.configure(state=tkinter.NORMAL) + self._textBoxCurSelection = self._listbox.curselection()[0] + + return + + def textBoxReturnPress(self, event=None): + + #listIndex = int(self._listbox.curselection()[0]) + listIndex = self._textBoxCurSelection + editAreaName = self._listbox.get(listIndex, None) + timePeriod = self._currentTimePeriod + editAreaDesc = self._textBox.get("1.0", tkinter.END) + + editAreaDesc = editAreaDesc.replace(chr(10), "") + self.displayAreaDesc(editAreaDesc) + + self.replaceEditAreaDesc(editAreaName, editAreaDesc) + self._textBox.configure(font=self._boldFont) + self._textBox.configure(state=tkinter.DISABLED) + self.updateTextBoxSaveState("Saved") + self._textBoxCurSelection = None + + def textBoxKeyPress(self, event): + + if event.char in string.printable: + self.updateTextBoxSaveState("NOT Saved") + + return + + def updateTextBoxSaveState(self, state): + msg = "Edit Area Description..." + state + + if state == "NOT Saved": + color = "red" + else: + color = "black" + self._textLabel.configure(text=msg, foreground=color) + return + + ########################################################################## + + #### Data structure code + + ########################################################################## + + def getBasinName(self, editAreaName): + # Figure out which basin this area is in + basinName = "" + basinList = [] + for basin in self._basinNames: + if basin in self._allEditAreaNames: + + basinMask = self._eaUtils.fetchEditArea(basin) + else: + continue + + areaMask = self._eaUtils.fetchEditArea(editAreaName) + if areaMask is None: + self.statusBarMsg("Edit area: " + " not found in repository.", "S") + continue + + if sum(sum(basinMask & areaMask)) > 0: + basinList.append(basin) + + basinList.sort() + for b in basinList: + if basinName == "": + basinName = b + else: + basinName = basinName + " AND " + b + + if basinName == "": + self.statusBarMsg("No Basin identified for last edit area.", "S") + + return basinName + + + def addNewFeature(self, featureName, timePeriod, editAreaName, editAreaDesc, + lat=None, lon=None, radius=None, pieState=None): + + if self._siteID in self._sitesWithBasins: + basinName = self.getBasinName(editAreaName) + else: + basinName = None + if basinName is not None: + if basinName.find("_") > -1: + basinName = basinName.replace("_", " ") + + # make the low level data structure + areaDict = {"areaName" : editAreaName, + "areaDesc" : editAreaDesc, + "basin" : basinName, + "lat" : lat, + "lon" : lon, + "radius" : radius, + "pieState" : pieState, + } + + featureDict = { + "featureName" : featureName, + "timePeriod" : timePeriod, + "areaList" : [areaDict], # list of areaDict + "basin" : basinName, + } + + self._hsDesc.append(featureDict) + + return featureName + + def addNewArea(self, featureName, timePeriod, editAreaName, editAreaDesc, + lat=None, lon=None, radius=None, pieState=None): + + if self._siteID in self._sitesWithBasins: + basinName = self.getBasinName(editAreaName) + else: + basinName = None + + if basinName is not None: + if basinName.find("_") > -1: + basinName = basinName.replace("_", " ") + + + areaDict = {"areaName" : editAreaName, + "areaDesc" : editAreaDesc, + "basin" : basinName, + "lat" : lat, + "lon" : lon, + "radius" : radius, + "pieState" : pieState, + } + + # Find any feature/time + for fDict in self._hsDesc: + if fDict["featureName"] == featureName: + if fDict["timePeriod"] == timePeriod: + # add it to the existing list + fDict["areaList"].append(areaDict) + + newAreaList = [] + for d in fDict["areaList"]: + newAreaList.append(d['areaName']) + newAreaList.sort() + return + + # featureName and time not found so make a new one + self.addNewFeature(featureName, timePeriod, editAreaName, + editAreaDesc, lat, lon, radius, pieState) + + return + + def removeEditArea(self, editAreaName): + # Find the feature + for fDict in self._hsDesc: + # look for the areaName + for areaDict in fDict["areaList"]: + if areaDict["areaName"] == editAreaName: + fDict["areaList"].remove(areaDict) + + if len(fDict["areaList"]) == 0: + self._hsDesc.remove(fDict) + + return + + return + + def replaceEditAreaDesc(self, editAreaName, editAreaDesc): + # Find the feature + + if self._siteID in self._sitesWithBasins: + basinName = self.getBasinName(editAreaName) + else: + basinName = None + + if basinName is not None: + if basinName.find("_") > -1: + basinName = basinName.replace("_", " ") + + for fDict in self._hsDesc: + # look for the areaName + for areaDict in fDict["areaList"]: + if areaDict["areaName"] == editAreaName: + areaDict["areaDesc"] = editAreaDesc + areaDict["basin"] = basinName + return + + # Didn't find areaName + + return + + def getEditAreaDesc(self, editAreaName): + for fDict in self._hsDesc: + # look for the areaName + for areaDict in fDict["areaList"]: + if areaDict["areaName"] == editAreaName: + return areaDict["areaDesc"] + + return + + def getAllEditAreaNames(self): + areaNameList = [] + for fDict in self._hsDesc: + for areaDict in fDict["areaList"]: + areaNameList.append(areaDict["areaName"]) + + return areaNameList + + def extractFeature(self, name): + + parts = name.split("_") + + return parts[0] + + def extractTimePeriod(self, name): + + parts = name.split("_") + + return parts[1] + + # Extracts the directory and removes the fileName + def extractDirName(self, path): + parts = path.split("/") + dirName = "" + for i in range(len(parts) - 1): + dirName = dirName + parts[i] + "/" + + return dirName + + # Creates the directory tree with appropriate permissions + def makeFullDirPath(self, dirPath, permissions): + + if not os.path.exists(dirPath): + os.makedirs(dirPath, permissions) + return + + def getNonLatLonText(self, areaDesc): + + areaDescOld = copy.copy(areaDesc) + openBrace = areaDescOld.find(self._descDelimStart) + closeBrace = areaDescOld.find(self._descDelimEnd) + + preText = areaDescOld[0:openBrace] + postText = areaDescOld[closeBrace+1:] + + return preText, postText + + def clipDescAreas(self, hsDesc, clipAreaName): + # make a copy so we don't corrupt the original version of the descriptor + descList = [] + + if clipAreaName in self._allEditAreaNames: + clipMask = self._eaUtils.fetchEditArea(clipAreaName) + else: + self.statusBarMsg("Edit area: " + clipAreaName + " not found in repository.", "S") + return descList + + for desc in hsDesc: + # Process the editAreas first + areaList = desc["areaList"] + newAreaList = [] + + for area in areaList: + # Get the original edit area and clip to the clipArea + oldAreaMask = self._eaUtils.fetchEditArea(area["areaName"]) + if oldAreaMask is None: + self.statusBarMsg("Edit area: " + clipAreaName + " not found in repository.", "S") + continue + + clippedMask = oldAreaMask & clipMask + + if sum(sum(clippedMask)) == 0: # Don't process this area + continue + + newEditArea = self.decodeEditArea(clippedMask) + # save this newly clipped area with a different name + newAreaName = area["areaName"] + clipAreaName + self._eaUtils.saveEditArea(newAreaName, clippedMask) + + lat = None + lon = None + radius = None + pieState = None + + if area["lat"] is not None: + lat = area["lat"] + lon = area["lon"] + radius = area["radius"] + pieState = area["pieState"] + + if len(area["areaDesc"]) < 100: + freshAreaDesc = area["areaDesc"] + else: + # Extract the positions of the text before and after the lat/lon text + oldAreaDesc = copy.copy(area["areaDesc"]) + newAreaDesc = self.makeEditAreaDesc(newEditArea, newAreaName, lat, lon, radius, pieState) + + if area["areaDesc"].find(self._descDelimStart) > -1: + # Extract the positions of the text before and after the lat/lon text + preText, postText = self.getNonLatLonText(oldAreaDesc) + # Restore the pre and post text + freshAreaDesc = preText + newAreaDesc + postText + + newAreaDict = { + "areaName" : newAreaName, + "areaDesc" : freshAreaDesc, + "basin" : area["basin"], + "lat" : None, + "lon" : None, + "radius" : None, + "pieState" : None, + } + newAreaList.append(newAreaDict) + + if len(newAreaList) > 0: + descDict = {} + descDict["timePeriod"] = desc["timePeriod"] + descDict["featureName"] = desc["featureName"] + descDict["basin"] = desc["basin"] + descDict["areaList"] = newAreaList + + descList.append(descDict) + + return descList + + def removeTextBoxDelims(self, highSeasDescriptor): + + for descDict in highSeasDescriptor: + for areaDict in descDict["areaList"]: + areaDict['areaDesc'] = areaDict['areaDesc'].replace("{", "") + areaDict['areaDesc'] = areaDict['areaDesc'].replace("}", "") + + return + + # This method does the work of writing the descriptor information to a + # file. This may be called several times one for the tool info and again for + # the formatter info. In addition some sites may create two descriptor files + # one north of the equator and one south. + def writeHSDescriptorToFile(self, highSeasDescriptor, highSeasFileName): + + if not os.path.exists(highSeasFileName): + # See if the directory exists and if not make it + dirOnly = self.extractDirName(highSeasFileName) + if not os.path.exists(dirOnly): + # make the fill directory path + self.makeFullDirPath(dirOnly) + + # open the file for write + setPermissions = False + if not os.path.exists(highSeasFileName): # it's a new file set permissions + setPermissions = True + # open the file for write + + try: + f = open(highSeasFileName, "w") + pickle.dump(highSeasDescriptor, f) + if setPermissions: + os.chmod(highSeasFileName, self._permissions) + except: + msg = "Error opening " + highSeasFileName + " for write." + self.statusBarMsg(msg, "U") + return + + # Contains the logic for saving the descriptor info to files. + def saveHSDescToFile(self): + + print("in saveHSDescToFile................................................") + # First save the info for the tool + highSeasFileName = self.descriptorFileName("Tool") + self.writeHSDescriptorToFile(self._hsDesc, highSeasFileName) + + # Now save the info for the formatter. + # Find out what we need to save + saveAreas = [] + if self._siteID in self._multiProductSites: + saveAreas = self._multiProductSites[self._siteID] + + # Otherwise we're making two files, but we need to make two new + # descriptors with the areas clipped to the savedAreas + + for area in saveAreas: + newHSDesc = self.clipDescAreas(self._hsDesc, area) + + # remove the delimiters from the descriptors for the formatter version + self.removeTextBoxDelims(newHSDesc) + highSeasFileName = self.descriptorFileName("Formatter", area) + self.writeHSDescriptorToFile(newHSDesc, highSeasFileName) + + # Now sample the weather grids for interesting weather, save that in + # a different list of dictionaries and save that to a file using the + # same structure. + print("interesting weather.............................................") + self.getInterestingWeather() + + # reset the editsMade flag + self._editsMade = False + + return + + def fetchWxGrids(self): + startTime = int(time.time() / (6 * 3600)) * (6 * 3600) # truncated to last 6 hour period + endTime = startTime + (48 * 3600) + 1 # 60 hours later + + trList = self.getWEInventory("Fcst", "Wx") + + wxGridList = [] + for t in range(startTime, endTime, 24*3600): + # Find the grid that overlaps the first hour of the period in which we're interested + tr = TimeRange.TimeRange(AbsTime.AbsTime(t), AbsTime.AbsTime(t+ 3600)) + for invTR in trList: + if tr.overlaps(invTR): + wxGrid = self.getGrids("Fcst", "Wx", "SFC", tr) + timePeriod = (t - startTime) / 3600 # hours from start + wxGridList.append((wxGrid, timePeriod)) + + return wxGridList + + def getToolWxTypeIntens(self, wxType): + + intens = wxType[-1] + if intens == "+" or intens == "-": + typeOnly = wxType[:-1] + else: + intens = "m" + typeOnly = wxType + +#JL Testing "T"; currently reporting based on Intensity and not Coverage for "T" 05/30/2016 + if typeOnly in ["T", "K", "VA", "F"] and intens == "m": + intens = "" + + return typeOnly, intens + + # Returns a mask where all points with the specified wxType are set to 1 + def getMaskForWxType(self, wxGrid, wxType): + byteGrid, wxKeys = wxGrid + wxMask = np.zeros(byteGrid.shape, np.bool) + for wxKey in wxKeys: + # get subKeys + subKeys = wxKey.split("^") + for subKey in subKeys: + wxParts = subKey.split(":") + wxStr = wxParts[1] + wxParts[2] + if wxStr == wxType: + wxIndex = self.getIndex(wxKey, wxKeys) + mask = np.equal(byteGrid, wxIndex) + wxMask = mask | wxMask + + return wxMask + + + # Discovers the individual sub edit areas of the specified editArea and + # saves them as individual edit areas. + def getContiguousEditAreas(self, editArea): + + + # Make a dummy empty editArea that we will reset + emptyEditAreaName = "EmptyEditArea" + bitGrid = editArea.getGrid() + locs = bitGrid.getContiguousBitArrayLocations() + editAreaList = [] + for i in range(len(locs)): + newGrid = bitGrid.contiguousBitArray(locs[i]) + + # Make a dummy empty editArea that we will reset + emptyMask = np.zeros(self.getGridShape(), np.bool) + newEditArea = self.decodeEditArea(emptyMask) + self.saveEditArea(emptyEditAreaName, newEditArea) + # Now use the edit area + newEditArea = self.getEditArea(emptyEditAreaName) + newEditArea.setGrid(newGrid) +# eaMask = self.encodeEditArea(newEditArea) # Why do this? + editAreaList.append(newEditArea) + + return editAreaList + + # Determines if the specified WxKey matches the weather combinations defined in + # wxTypeList. If a match is found, return the wx coverage, type, and intensity, + # otherwise return None. + def matchingWxKey(self, wxKey, wxTypeList): + # Examine each sub key + subKeys = wxKey.split("^") + for subKey in subKeys: + wxParts = subKey.split(":") + # Extract the coverage, type and intensity + if len(wxParts) >= 4: + wxCov = wxParts[0] + wxType = wxParts[1] + wxIntens = wxParts[2] + # Check each allowed type for a match + for wxTypeIntens in wxTypeList: + allowedType, allowedIntens = self.getToolWxTypeIntens(wxTypeIntens) + if wxType == allowedType and wxIntens == allowedIntens: + # We're only interested in T if coverage matches one in self._thunderCoverages EXCEPT + # for the case Iso T m (moderate) where we will convert to Sct T- to represent + # Scattered thunderstorms. + if "T" in wxType and wxCov == "Iso": + return wxCov, allowedType, allowedIntens + + if "T" in wxType and wxCov not in self._thunderCoverages: + continue + + return wxCov, allowedType, allowedIntens + + # If we get to here, no match was found + return None + + def makePeriodList(self, wxGrid, timePeriodStr, area): + + # Dictionary used to convert GFE intensities since it's part of an edit area name + intenDict = { + "+" : "h", + "m" : "m", + "" : "n", + "-" : "l", + } + + byteGrid, wxKeys = wxGrid + + # Figure out the Wx types and intensities allowed for the HSF + # Filter out all other Wx type and intensities but these. + wxTypeList = self._allPeriodWxTypes + if timePeriodStr == "00h": + wxTypeList = wxTypeList + self._firstPeriodWxTypes + + areaList = [] # There's a chance that we won't find any wx so define this here + areaMask = self.encodeEditArea(area) + for wxKey in wxKeys: + # Returns coverage, type, and intensity of any matching sub key + covTypeIntens = self.matchingWxKey(wxKey, wxTypeList) + + if covTypeIntens is None: + continue + + wxCov, wxType, wxIntens = covTypeIntens # extract the components + + # Change IsoTm to SctT- so we can report scattered thunderstorms + if wxCov == "Iso" and wxType == "T" and wxIntens == "": + wxCov = "Sct" + wxIntens = "-" + + wxIndex = self.getIndex(wxKey, wxKeys) + mask = (byteGrid == wxIndex) + mask = mask & self._clipMask + if np.sum(mask) == 0: # no points found with this wxType + continue + + # Make an edit area from the mask + wxEditArea = self.decodeEditArea(mask) + # Get the individual contiguous areas from the edit area + wxEAList = self.getContiguousEditAreas(wxEditArea) + + for eaNum, ea in enumerate(wxEAList): + # Only include edit areas that overlap the current domain + eaMask = self.encodeEditArea(ea) + overlap = eaMask & areaMask + + if not overlap.any(): + print("No overlap for ", wxType) + continue + + intenStr = intenDict[wxIntens] + eaName = "Z" + timePeriodStr + wxCov + wxType + intenStr + str(eaNum).zfill(2) + + self._eaUtils.saveEditArea(eaName, eaMask) + + eaDesc = self.makeEditAreaDesc(ea, eaName) + areaDict = {} + areaDict["timePeriod"] = timePeriodStr + areaDict["areaName"] = eaName + areaDict["areaDesc"] = eaDesc + areaDict["wxType"] = wxType + areaDict["wxCoverage"] = wxCov + areaDict["intensity"] = wxIntens + + areaList.append(areaDict) + + return areaList + + def getInterestingWeather(self): + # Fetch the Wx grids we need + wxGridList = self.fetchWxGrids() + + saveDomains = [] + if self._siteID in self._multiProductSites: + saveDomains = self._multiProductSites[self._siteID] + else: + self.statusBarMsg("Domains not defined for site:" + siteID, "S") + self.statusBarMsg("Grid based features will not be saved." "S") + return + for domain in saveDomains: + + gridBasedFeatures = [] + + for wxGrid, timePeriod in wxGridList: + timePeriodStr = str(timePeriod).zfill(2) + "h" + periodList = self.makePeriodList(wxGrid, timePeriodStr, domain) + gridBasedFeatures = gridBasedFeatures + periodList + + # Strip and leading and trailing braces + for gFeature in gridBasedFeatures: + gFeature["areaDesc"] = gFeature["areaDesc"].replace("{", "") + gFeature["areaDesc"] = gFeature["areaDesc"].replace("}", "") + + wxFeaturesFileName = self.gridBasedFileName(domain[-3:]) + + self.writeFeatureList(gridBasedFeatures, wxFeaturesFileName) + + return + + def similarArea(self, area1, area2): + + eaName1 = area1["areaName"] + eaName2 = area2["areaName"] + + mask1 = self._eaUtils.fetchEditArea(eaName1) + mask2 = self._eaUtils.fetchEditArea(eaName2) + + overlap = mask1 & mask2 + + if overlap.any(): + return True + + return False + + def findSimilarFeature(self, area): + # Extract the part from the area + wxType = area["wxType"] + wxCov = area["wxCoverage"] + wxIntens = area["intensity"] + + for group in self._sortedFeatures: + # Check for the same weather + if group[0]["wxType"] != wxType or group[0]["wxCoverage"] != wxCov or group[0]["intensity"] != wxIntens: + continue + # Check for similar area + if self.similarArea(area, group[0]): + return group + + return None + + def groupGridBasedFeatures(self, gridBasedFeatures): + # Populate the sorted features with the first area if we have any features + self._sortedFeatures = [] + if len(gridBasedFeatures) == 0: + return + + self._sortedFeatures = [[copy.copy(gridBasedFeatures[0])]] + + for i in range(1, len(gridBasedFeatures)): + simGroup = self.findSimilarFeature(gridBasedFeatures[i]) + if simGroup is not None: # make a new group + simGroup.append(copy.copy(gridBasedFeatures[i])) + continue + else: + self._sortedFeatures.append([copy.copy(gridBasedFeatures[i])]) + return + + def gridBasedFileName(self, subArea): + dirPath = "/data/local/HighSeas/Formatter/" + self._siteID + "/HSF_" + subArea + "GridBasedFeatures.pic" + + return dirPath + + # Writes the gridBased features to a file. + def writeFeatureList(self, gridBasedFeatures, gridBasedFileName): + + self._sortedFeatures = [] + self.groupGridBasedFeatures(gridBasedFeatures) + + if not os.path.exists(gridBasedFileName): + # See if the directory exists and if not make it + dirOnly = self.extractDirName(gridBasedFileName) + if not os.path.exists(dirOnly): + # make the fill directory path + self.makeFullDirPath(dirOnly) + + # open the file for write + setPermissions = False + if not os.path.exists(gridBasedFileName): # it's a new file set set permissions + setPermissions = True + try: + with open(gridBasedFileName, "w") as f: + pickle.dump(self._sortedFeatures, f) + if setPermissions: + os.chmod(gridBasedFileName, self._permissions) + f.close() + except: + msg = "Error opening " + gridBasedFileName + " for write." + self.statusBarMsg(msg, "U") + + return + + + def dumpArea(self, areaName): + + for fDict in self._hsDesc: + for areaDict in fDict["areaList"]: + if areaDict["areaName"] == areaName: + print("-------------------------------------") + print("Feature Name:", fDict["featureName"]) + print("Time Period:", fDict["timePeriod"]) + print("AreaName:", areaDict["areaName"]) + print("AreaDesc:", areaDict["areaDesc"]) + print("Basin:", areaDict["basin"]) + print("Lat:", areaDict["lat"]) + print("Lon:", areaDict["lon"]) + print("Radius:", areaDict["radius"]) + print("PieState", areaDict["pieState"]) + return + + def dumpDatabase(self): + + areaList = [] + print() + print("+++++++++++++++ START Dump of database +++++++++++++++") + for fDict in self._hsDesc: + for areaDict in fDict["areaList"]: + areaList.append(areaDict["areaName"]) + areaList.sort() + for areaName in areaList: + self.dumpArea(areaName) + + print("+++++++++++++++ END Dump of database +++++++++++++++") + return + + def getNewFeatureName(self): + # make a list of possibleNames + featureNameList = [] + for fDict in self._hsDesc: + featureNameList.append(fDict["featureName"]) + + num = 1 + while True: + featureName = "Feature" + str(num) + if featureName not in featureNameList: + return featureName + + num = num + 1 + if num > 1000: # ininite loop prevention + break + + print("Error, no new feature for getNewFeatureName") + return "" + + def getNewAreaName(self, featureName, timePeriod): + count = 0 + for fDict in self._hsDesc: + if fDict["featureName"] == featureName and fDict["timePeriod"] == timePeriod: + count = count + len(fDict["areaList"]) + + name = featureName + "_" + str(timePeriod) + "_" + "EA" + str(count+1) + + return name + + # Reads the descriptor from the file. + def initializeHSDescriptor(self): + + highSeasFileName = self.descriptorFileName("Tool") + + try: + f = open(highSeasFileName, "r") + self._hsDesc = pickle.load(f) + except: + msg = "Descriptor file not found. Starting with an empty descriptor." + self.statusBarMsg(msg, "S") + self._hsDesc = [] + + self._editsMade = False + + return + + def makeClipMask(self, clipEditAreas): + + clipMask = None + for editArea in clipEditAreas: + if editArea in self._allEditAreaNames: + mask = self._eaUtils.fetchEditArea(editArea) + if clipMask is None: + clipMask = mask + else: + clipMask = clipMask | mask + else: + self.statusBarMsg(editArea + " edit area not found for clip area", "S") + + return clipMask + + ########################################################################## + + #### END -----Data structure code + + ########################################################################## + + + # Main method that glues all of the GIU pieces together. Creates + # all the frames used by other widgets and calls other methods + # to create buttonsn listboxes, and the status window. + def setUpUI(self): + + # create the main objects + self._tkmaster = tkinter.Tk() + + self._master = tkinter.Toplevel(self._tkmaster) + self._tkmaster.withdraw() + + # Capture the "x" click to close the GUI + self._master.protocol('WM_DELETE_WINDOW', self.cancelCommand) + + self._master.title('High Sea Edit Areas') + + self._topFrame = tkinter.Frame(self._master) + self._topFrame.grid() + + self._timeFrame = tkinter.Frame(self._topFrame, bd=2, relief=tkinter.GROOVE) + self._timeFrame.grid(row=0, column=0, sticky=tkinter.N) + self._timeVar = tkinter.StringVar() + self.makeTimeButtons(self._timeFrame) + + self._listFrame = tkinter.Frame(self._topFrame) + self._listFrame.grid(row=0, column=1, padx=15) + self.makeListBox() + + self._listButtonFrame = tkinter.Frame(self._listFrame, bd=2, relief=tkinter.GROOVE) + self._listButtonFrame.grid(row=2, column=0, columnspan=2, pady=5) + + self._saveNewButton = tkinter.Button(self._listButtonFrame, text="ADD New Feature", + command=self.saveNewCommand, width=25) + self._saveNewButton.grid(row=0, column=0) + + self._saveSelButton = tkinter.Button(self._listButtonFrame, text="SAVE To Selected Feature", + command=self.saveSelCommand, width=25, + state=tkinter.DISABLED) + self._saveSelButton.grid(row=1, column=0) + + self._replaceSelButton = tkinter.Button(self._listButtonFrame, + text="REPLACE Selected Area", command=self.replaceSelCommand, + width=25, state=tkinter.DISABLED) + self._replaceSelButton.grid(row=2, column=0) + + self._clearSelButton = tkinter.Button(self._listButtonFrame, + text="REMOVE Selected Area(s)", + command=self.clearSelCommand, width=25, + state=tkinter.DISABLED) + self._clearSelButton.grid(row=3, column=0) + + self._clearAllButton = tkinter.Button(self._listButtonFrame, + text="REMOVE All Areas", + command=self.clearAllCommand, width=25) + + self._clearAllButton.grid(row=4, column=0) + + self._killSelButton = tkinter.Button(self._listButtonFrame, + text="KILL Selected Area", + command=self.killSelCommand, width=25, + state=tkinter.DISABLED) + self._killSelButton.grid(row=5, column=0) + + + self._circleRadius = 80 + self._circleOffset = 20 + self._polarFrame = tkinter.Frame(self._topFrame, bd=2, relief=tkinter.GROOVE) + self._polarFrame.grid(row=0, column=2, padx=20) + self._pieSelectColor = "red" + self._pieDeselectColor = "gray80" + if self._displayPolarWidgets: + self.makePolarWidgets() + + self._textFrame = tkinter.Frame(self._topFrame) + self._textFrame.grid(row=2, column=0, columnspan=3, pady=20) + self._textBox = tkinter.Text(self._textFrame, height=3, width=60, wrap=tkinter.CHAR, + foreground="black", font=self._boldFont) + self._textBox.grid(row=0, column=0, padx=10) + self._textBox.bind("", self.textBoxClick) + self._textBox.bind("", self.textBoxReturnPress) + self._textBox.bind("", self.textBoxKeyPress) + self._textLabel = tkinter.Label(self._textFrame, text="Edit Area Description") + self._textLabel.grid(row=1, column=0) + + self._bottomFrame = tkinter.Frame(self._topFrame) + self._bottomFrame.grid(row=3, column=0, columnspan=3) + self.makeBottomButtons() + + self._master.bind("", self.enterEvent) + + return + + # Returns the name of the file used to store the edit area information. + # The appType can be "Tool" or "Formatter" only + # The subArea parameter will be used for sites creating more than one + # High Seas product + def descriptorFileName(self, appType, subArea = ""): + + dirPath = "/data/local/HighSeas/" + appType + "/" + + fileName = dirPath + self._siteID + "/" + subArea + "HighSeasDescriptors.pic" + + return fileName + + def makeNamedAreaMasks(self, namedAreaDict): + + namedAreaMasks = {} + for eaName in list(namedAreaDict.keys()): + eaMask = self._eaUtils.fetchEditArea(eaName) + + if eaMask is None: + self.statusBarMsg("Edit area: " + eaName + " not found in repository.", "S") + continue + + + namedAreaMasks[eaName] = eaMask + + return namedAreaMasks + + # Defines the local effect edit areas. These will be added to the lat/lon descriptors + # to better identify the area. + # These are areas for which we look for local effects. Format: Edit area name : Description + def defineNamedAreas(self): + + allAreas = { + "le_cabocorrientes" : "CABO CORRIENTES", + "CALIFORNIA" : "GULF OF CALIFORNIA", + "le_pmz011" : "SEBASTIAN VIZCAINO BAY", + "le_pmz013" : "WITHIN 60 NM OF SHORE", + "le_pmz015" : "WITHIN 60 NM OF SHORE", + "le_tehuantepec" : "THE GULF OF TEHUANTEPEC", + "le_panama" : "THE GULF OF PANAMA", + "le_pmz115" : "NEAR THE AZUERO PENINSULA", + "le_papagayo" : "THE GULF OF PAPAGAYO", + "le_pmz119" : "THE GULF OF GUAYAQUIL", + "le_pmz123" : "LEE OF GALAPAGOS ISLANDS", + "le_gmz021_straits_of_florida" : "STRAITS OF FLORIDA", + "le_gmz023_s_of_21n_w_of_95w" : "WITHIN 60 NM OF COAST OF VERACRUZ", + "le_gmz025_60nm_of_campeche" : "WITHIN 60 NM OF COAST OF CAMPECHE", + "le_amz011_yucatan_channel" : "IN YUCATAN CHANNEL", + "le_amz013_cuba_jamaica" : "BETWEEN CUBA AND JAMAICA", + "le_gulf_of_honduras" : "GULF OF HONDURAS", + "le_amz023_mona_swell" : "IN MONA PASSAGE", + "le_amz025_atlc_exposures_and_passages" : "IN ATLANTIC EXPOSURES AND PASSAGES", + "le_amz029_nicaraguan_coast" : "WITHIN 60 NM OF COAST OF NICARAGUA", + "le_amz031_colombian_coast" : "WITHIN 90 NM OF COAST OF COLOMBIA", + "le_amz033_gulf_of_venezuela" : "GULF OF VENEZUELA", + "le_amz035_atlantic" : "ATLANTIC EXPOSURES", + "le_amz117_atlc_exposures" :"ATLANTIC EXPOSURES", + "le_windward_passage" :"APPROACH TO WINDWARD PASSAGE", + } + # Make sure we hvae the edit area before including it in the returned list + allEANames = self._eaUtils.allEditAreaNames() + namedAreaDescrip = {} + for eaName in list(allAreas.keys()): + if eaName in allEANames: + namedAreaDescrip[eaName] = allAreas[eaName] + + return namedAreaDescrip + + def execute(self, timeRange): + + self._timeRange = timeRange + + siteID = self.getSiteID() + + #editAreasPath = "/scratch/local/HighSeas/EditAreas/" # for Boulder development + editAreasPath = "/data/local/HighSeas/" + siteID + "/EditAreas/" + + self._eaUtils = EditAreaUtilities.EditAreaUtilities(editAreasPath) + + self._gridLoc = self.getGridLoc() + + self._latGrid, self._lonGrid = self.getLatLonGrids() + self._minLat = int(min(self._latGrid.flat)) + self._minLon = int(min(self._lonGrid.flat)) + self._maxLat = int(max(self._latGrid.flat)) + 1.0 + self._maxLon = int(max(self._lonGrid.flat)) + 1.0 + self._degToRad = 2.0 * math.pi / 360.0 + self._latGrid = self._latGrid * self._degToRad + self._lonGrid = self._lonGrid * self._degToRad + + start = int(time.time()) / 3600 * 3600 + end = start + 24 * 3600 + startTime = AbsTime.AbsTime(start) + endTime = AbsTime.AbsTime(end) + + latLonTR = TimeRange.TimeRange(startTime, endTime) + + self._defaultLat = int((self._minLat + self._maxLat) / 2.0) + self._defaultLon = int((self._minLon + self._maxLon) / 2.0) + + self._defaultRadius = 200.0 + self._latScaleValue = self._defaultLat + 0.01 + self._lonScaleValue = self._defaultLon + 0.01 + self._radiusScaleValue = self._defaultRadius + 0.01 + self._pieState = [False, False, False, False, False, False, False, False] + self._defaultTime = "00h" + self._normalFont = ("helvetica", "12", "normal") + self._boldFont = ("helvetica", "12", "bold") + self._textBoxCurSelection = None + + self._allEditAreaNames = self._eaUtils.allEditAreaNames() + + + # Configurable section. Probably will get moved to separate config file. + self._siteID = self.getSiteID() + self._sitesWithBasins = ["NH2"] + self._basinNames = ["ATLC", "GULF_OF_MEXICO", "CARIBBEAN", "HSF_NP", "HSF_SP", "HSF_AT1", "HSF_EP1", "HSF_EPi"] + self._killReasons = ["INLAND.", "MOVED ... OF AREA.", "CONDITIONS MERGED.", "ABSORBED.", "CONDITIONS IMPROVE.", + "LITTLE CHANGE.", "NONE."] + self._defaultKillReason = self._killReasons[-1] + self._killDesc = self._defaultKillReason + + if self._siteID == "HPA": + self._clipEditAreas = ["HSF_NP", "HSF_SP"] + elif self._siteID == "NH1": + self._clipEditAreas = ["HSF_EP2", "HSF_EP3"] + elif self._siteID == "NH2": + self._clipEditAreas = ["ATLC", "GOM", "CARIB"] + elif self._siteID == "ONA": + self._clipEditAreas = ["HSF_AT1"] + elif self._siteID == "ONP": + self._clipEditAreas = ["HSF_EP1"] + else: + self._clipEditAreas = [] + + self._clipMask = self.makeClipMask(self._clipEditAreas) + if self._clipMask is None: + self._clipMask = np.ones(self.getGridShape(), np.bool) + + # Define data for sites that make two descriptor files, one for the NH and another for the SH + # These sites make two products from the same GFE domain. The identifiers for each define the + # edit area or mask over which the product is valid. All defined features will be clipped to + # these areas when the descriptors are generated and save to to the file. + self._multiProductSites = { + "HPA" : ["HSF_NP", "HSF_SP"], + "NH1" : ["HSF_EP2", "HSF_EP3"], + "NH2" : ["HSF_AT2"], + "ONA" : ["HSF_AT1"], + "ONP" : ["HSF_EP1"], + } + + self._thunderCoverages = ["Sct", "Num", "Wide"] + self._firstPeriodWxTypes = ["T", "T+", "VA"] + self._allPeriodWxTypes = ["F+", "K", "ZY-", "ZY", "ZY+"] + self._descDelimStart = "{" + self._descDelimEnd = "}" + + self._selectingPieSlices = False + self._deselectingPieSlices = False + + self._displayPolarWidgets = True + + self._editsMade = False + + self._selectedEditArea = "" + + self._minEASize = 0.75 + self._marginalEASize = 1.0 + + self._initialEditArea = self.getActiveEditArea() + self._enterCount = 0 + + self._polarEditArea = self._initialEditArea + + self._namedAreaDescip = self.defineNamedAreas() + self._namedAreaMasks = self.makeNamedAreaMasks(self._namedAreaDescip) + + self._permissions = stat.S_IRWXU + stat.S_IRWXG + stat.S_IROTH # 775 permissions on dirs and files + + self.initializeHSDescriptor() + self.setUpUI() + tkinter.mainloop() + + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHazard.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHazard.py index 863caa2b6b..32e535b897 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHazard.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MakeHazard.py @@ -1,322 +1,322 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MakeHazard.py -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Apr 03,2012 436 randerso Converted to Python procedure to allow some -# level of site customization -# Apr 09,2012 436 randerso Merged RNK's MakeHazards_Elevation procedure -# Feb 12,2014 17058 ryu Extend converter for Collections$EmptyList objects. -# Apr 23, 2015 4259 njensen Updated for new JEP API -# Jul 29,2015 17770 lshi Added TY.A TY.W to tropicalHaz -# -# Author: randerso -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Hazards"] - - -import SmartScript -import time, string, sys -import HazardUtils -import re -import numpy -import LogStream -import JUtil - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - self._dataManager = dbss - self._afterInit = 0 #flag indicating init is done. - - self._tropicalHaz = ['HU.W','HU.A','HU.S','TR.W','TR.A','TY.W','TY.A'] - self._natlBaseETN = 1001 - - - def setUpUI(self): - if sys.modules.has_key("MakeHazardConfig"): - sys.modules.__delitem__("MakeHazardConfig") - import MakeHazardConfig - - args = {} - args['dataManager'] = self._dataManager - args['selectedTimeRange'] = self.selectedTimeRange - args['mapColor'] = MakeHazardConfig.mapColor - args['defaultMapWidth'] = MakeHazardConfig.defaultMapWidth - args['timeScaleEndTime'] = MakeHazardConfig.timeScaleEndTime - args['areaThreshold'] = MakeHazardConfig.areaThreshold - args['defaultHazardType'] = MakeHazardConfig.defaultHazardType - args['mapNames'] = MakeHazardConfig.mapNames - args['hazardDict'] = MakeHazardConfig.hazardDict - args['tcmList'] = MakeHazardConfig.tcmList - args['tropicalHaz'] = self._tropicalHaz - args['natlBaseETN'] = self._natlBaseETN - - if not hasattr(MakeHazardConfig, 'localEffectAreas') or \ - MakeHazardConfig.localEffectAreas is None: - args['localEffectAreas'] = {} - else: - args['localEffectAreas'] = MakeHazardConfig.localEffectAreas - - if not hasattr(MakeHazardConfig, 'localAreaData') or \ - MakeHazardConfig.localAreaData is None: - args['localAreaData'] = {} - else: - args['localAreaData'] = MakeHazardConfig.localAreaData - - # create the Java/SWT dialog and open it - from com.raytheon.viz.gfe.makehazard import MakeHazardDialog - self.__dlg = MakeHazardDialog.createFromPython( - JUtil.pyValToJavaObj(args) - ) - self.__dlg.openFromPython() - - # run the Java/SWT event loop - try: - dismiss = False - while not dismiss: - args = JUtil.javaObjToPyVal(self.__dlg.runFromPython(), converter) - dismiss = True; - # if args is None, then Cancel was pressed - if args is not None: - # dismiss is True if the Run/Dismiss button is pressed, - # false if Run is pressed - dismiss = args["dismiss"] - del args["dismiss"] - - if self.makeHazardGrid(**args) != 1: - dismiss = False - finally: - # close the Java/SWT dialog when Cancelled, Dismissed or exception occurs - self.__dlg.closeFromPython() - - # RJM modified this routine from the HazardUtility file - # returns a Numeric mask where each zone in zoneList is set to 1 - def _makeMask(self, zoneList, hazLocalEffect): - - # RJM had to modify this next line to point to the hazUtils - # for the getGridSize routine. - mask = self.empty(bool) - eaList = self.editAreaList() - - # Get the elevation from the GUI input. We'll do this by clipping - # of any numerical digits from the local effect. -# elevation_string = re.findall("\d+", hazLocalEffect) -# print "re elevation=", elevation_string, "xxx" -# try: -# elevation = elevation_string[0] -# except: -# elevation = "None" -# print "re elevation=", elevation, "xxx" - for z in zoneList: - print "in _makeMask processing zone ", z - - if z in eaList: - zoneArea = self.getEditArea(z) - zoneMask = self.encodeEditArea(zoneArea) - - # Code added by RJM. This checks to see if the local effect - # area was specified and is a valid edit area. If so, - # make a mask from it, and then do an intersection with - # the zone mask. - if hazLocalEffect in eaList: - print "Masking",z,"with",hazLocalEffect - localEffectArea = self.getEditArea(hazLocalEffect) - localEffectMask = self.encodeEditArea(localEffectArea) - zoneMask = numpy.logical_and(zoneMask, localEffectMask) - - mask[zoneMask] = True -# else: -# if z in eaList: -# zoneArea = self.getEditArea(z) -# zoneMask = self.encodeEditArea(zoneArea) -# mask = numpy.logical_or(mask, zoneMask) - - return mask - - # Creates the hazard grid based on the dialog input - def makeHazardGrid(self, selectedHazard, timeRange, areaList, segmentNumber, - selectedTimeRange, defaultAreaList, defaultHazard, defaultSegment, - hazLocalEffect): - siteID = self.getSiteID() - usingHazLocalEffect = (hazLocalEffect != 'None') - - if len(areaList) == 0: - editArea = self.getActiveEditArea() - mask = self.encodeEditArea(editArea) - else: - # make the mask based on the list selections - if not usingHazLocalEffect: - mask = self._hazUtils._makeMask(areaList) - else: - mask = self._makeMask(areaList, hazLocalEffect) - - if usingHazLocalEffect: - # get the segment number and filter for valid characters - segNum = segmentNumber - - # get the hazards currently defined as temporary grids - hazParms = self.getHazardParmNames() - - # look through the list of grids and create a list of - # segment numbers (if any) that are already in use - # for the current hazard -# if len(hazParms) == 0: -# self.statusBarMsg("No temporary grids to merge.", "S") -# return 0 - segList = [] - print "selectedHazard=", selectedHazard - selectedPhen = selectedHazard[0:2] - selectedSig = selectedHazard[3] - print "selectedPhen,selectedSig=", selectedPhen, ".", selectedSig - for hazParm in hazParms: - print "hazParm=", hazParm - trList = self._hazUtils._getWEInventory(hazParm) - for tr in trList: - print " tr=", tr, timeRange - intersect_hours = tr.intersection(timeRange).duration() - print " intersect=", intersect_hours - intersect_percent = intersect_hours / timeRange.duration() * 100.0 - print " intersect %=", intersect_percent - phen = hazParm[3:5] - sig = hazParm[5:6] - print "phen,sig=", phen, ".", sig - if len(hazParm) > 6: - if hazParm[6:].isdigit(): - seg = int(hazParm[6:]) - print " seg=", seg - if phen == selectedPhen and sig == selectedSig: - segList.append(seg) - print "appending ", seg - else: - seg = 0 - segList.sort() - -# print "looping through segList" -# for seg in segList: -# print " seg=", seg," elev=", elevation -# if str(elevation) == str(seg): -# print "adding 1 to elevation" -# elevation += 1 -# -# if elevation > 400: -# print "using elevation for segNum" -# segNum = elevation -# # replace the segmentNumber field with the elevation +/- the Above/Below indicator. -# self.__dlg.setSegmentNumber(elevation) -# segmentNumber = str(elevation) -# print "*** segmentNumber=", segmentNumber - - index = string.find(selectedHazard, " ") - if index != -1: - selectedHazard = selectedHazard[0:index] - if len(segmentNumber) > 0: - hazardKey = selectedHazard + ":" + segmentNumber - else: - hazardKey = selectedHazard - - defaultHazKey = "" - if defaultHazard is not None: - index = string.find(defaultHazard, " ") - if index != -1: - defaultHazard = defaultHazard[0:index] - defaultHazKey = defaultHazard - - if len(defaultSegment) > 0: - defaultHazKey += ":" + defaultSegment - - weName = self._hazUtils._makeTempWEName(hazardKey) - - # if we're modifying, remove the old grid first - if defaultAreaList != [] and hazardKey == defaultHazKey: - self.deleteCmd([weName], self.selectedTimeRange) - - # if we have no selection prevent user from making an empty hazard - if 1 not in mask: - self.statusBarMsg("NO EDIT AREA SELECTED: \n Select area from map or load edit area in GFE!", "S") - return 0 - - self._hazUtils._addHazard(weName, timeRange, hazardKey, mask) - LogStream.logUse("Set: ", weName, - self._hazUtils._printTime(timeRange.startTime().unixTime()), - self._hazUtils._printTime(timeRange.endTime().unixTime()), hazardKey, - self._hazUtils._printAreas(areaList)) - - return 1 - - def getHazardParmNames(self): - # get the list of loaded temporary hazard parms - parms = self.loadedParms() - hazParms = [] - for weName, level, dbID in parms: - if "haz" in weName: - key = self._hazUtils._tempWENameToKey(weName) - index = string.find(key, ":") - if index != -1: - mkey = key[0:index] - segNum = key[index+1:] - else: - mkey = key - segNum = "" - - # append the hazard and a description - parmName = "haz" + key - parmName = string.replace(parmName, ".", "") - parmName = string.replace(parmName, ":", "") - hazParms.append(parmName) - - return hazParms - - def execute(self, timeRange): - #self._hazUtils = HazardUtils.HazardUtils(self._dataManager, self.eaMgr()) - self._hazUtils = HazardUtils.HazardUtils(self._dataManager, None) - # save the selected timeRange - self.selectedTimeRange = timeRange - - self.setToolType("numeric") - - # see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.statusBarMsg("Hazards Weather Element must be loaded in " + \ - "the GFE before running MakeHazard", "S") - self.cancel() - - - # always separate the Hazards grid first - self._hazUtils._separateHazardGrids() - - self.setUpUI() - - self._afterInit = 1 #initialization done - - return - -def converter(obj): - import AbsTime - import TimeRange - retVal = None - - objtype = obj.java_name - if objtype == "java.util.Date": - retVal = AbsTime.AbsTime(obj) - elif objtype == "java.util.Collections$EmptyList": - retVal = [] - elif objtype == "com.raytheon.uf.common.time.TimeRange": - retVal = TimeRange.TimeRange(obj) - return retVal - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MakeHazard.py +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Apr 03,2012 436 randerso Converted to Python procedure to allow some +# level of site customization +# Apr 09,2012 436 randerso Merged RNK's MakeHazards_Elevation procedure +# Feb 12,2014 17058 ryu Extend converter for Collections$EmptyList objects. +# Apr 23, 2015 4259 njensen Updated for new JEP API +# Jul 29,2015 17770 lshi Added TY.A TY.W to tropicalHaz +# +# Author: randerso +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Hazards"] + + +import SmartScript +import time, string, sys +import HazardUtils +import re +import numpy +import LogStream +import JUtil + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + self._dataManager = dbss + self._afterInit = 0 #flag indicating init is done. + + self._tropicalHaz = ['HU.W','HU.A','HU.S','TR.W','TR.A','TY.W','TY.A'] + self._natlBaseETN = 1001 + + + def setUpUI(self): + if "MakeHazardConfig" in sys.modules: + sys.modules.__delitem__("MakeHazardConfig") + import MakeHazardConfig + + args = {} + args['dataManager'] = self._dataManager + args['selectedTimeRange'] = self.selectedTimeRange + args['mapColor'] = MakeHazardConfig.mapColor + args['defaultMapWidth'] = MakeHazardConfig.defaultMapWidth + args['timeScaleEndTime'] = MakeHazardConfig.timeScaleEndTime + args['areaThreshold'] = MakeHazardConfig.areaThreshold + args['defaultHazardType'] = MakeHazardConfig.defaultHazardType + args['mapNames'] = MakeHazardConfig.mapNames + args['hazardDict'] = MakeHazardConfig.hazardDict + args['tcmList'] = MakeHazardConfig.tcmList + args['tropicalHaz'] = self._tropicalHaz + args['natlBaseETN'] = self._natlBaseETN + + if not hasattr(MakeHazardConfig, 'localEffectAreas') or \ + MakeHazardConfig.localEffectAreas is None: + args['localEffectAreas'] = {} + else: + args['localEffectAreas'] = MakeHazardConfig.localEffectAreas + + if not hasattr(MakeHazardConfig, 'localAreaData') or \ + MakeHazardConfig.localAreaData is None: + args['localAreaData'] = {} + else: + args['localAreaData'] = MakeHazardConfig.localAreaData + + # create the Java/SWT dialog and open it + from com.raytheon.viz.gfe.makehazard import MakeHazardDialog + self.__dlg = MakeHazardDialog.createFromPython( + JUtil.pyValToJavaObj(args) + ) + self.__dlg.openFromPython() + + # run the Java/SWT event loop + try: + dismiss = False + while not dismiss: + args = JUtil.javaObjToPyVal(self.__dlg.runFromPython(), converter) + dismiss = True; + # if args is None, then Cancel was pressed + if args is not None: + # dismiss is True if the Run/Dismiss button is pressed, + # false if Run is pressed + dismiss = args["dismiss"] + del args["dismiss"] + + if self.makeHazardGrid(**args) != 1: + dismiss = False + finally: + # close the Java/SWT dialog when Cancelled, Dismissed or exception occurs + self.__dlg.closeFromPython() + + # RJM modified this routine from the HazardUtility file + # returns a Numeric mask where each zone in zoneList is set to 1 + def _makeMask(self, zoneList, hazLocalEffect): + + # RJM had to modify this next line to point to the hazUtils + # for the getGridSize routine. + mask = self.empty(bool) + eaList = self.editAreaList() + + # Get the elevation from the GUI input. We'll do this by clipping + # of any numerical digits from the local effect. +# elevation_string = re.findall("\d+", hazLocalEffect) +# print "re elevation=", elevation_string, "xxx" +# try: +# elevation = elevation_string[0] +# except: +# elevation = "None" +# print "re elevation=", elevation, "xxx" + for z in zoneList: + print("in _makeMask processing zone ", z) + + if z in eaList: + zoneArea = self.getEditArea(z) + zoneMask = self.encodeEditArea(zoneArea) + + # Code added by RJM. This checks to see if the local effect + # area was specified and is a valid edit area. If so, + # make a mask from it, and then do an intersection with + # the zone mask. + if hazLocalEffect in eaList: + print("Masking",z,"with",hazLocalEffect) + localEffectArea = self.getEditArea(hazLocalEffect) + localEffectMask = self.encodeEditArea(localEffectArea) + zoneMask = numpy.logical_and(zoneMask, localEffectMask) + + mask[zoneMask] = True +# else: +# if z in eaList: +# zoneArea = self.getEditArea(z) +# zoneMask = self.encodeEditArea(zoneArea) +# mask = numpy.logical_or(mask, zoneMask) + + return mask + + # Creates the hazard grid based on the dialog input + def makeHazardGrid(self, selectedHazard, timeRange, areaList, segmentNumber, + selectedTimeRange, defaultAreaList, defaultHazard, defaultSegment, + hazLocalEffect): + siteID = self.getSiteID() + usingHazLocalEffect = (hazLocalEffect != 'None') + + if len(areaList) == 0: + editArea = self.getActiveEditArea() + mask = self.encodeEditArea(editArea) + else: + # make the mask based on the list selections + if not usingHazLocalEffect: + mask = self._hazUtils._makeMask(areaList) + else: + mask = self._makeMask(areaList, hazLocalEffect) + + if usingHazLocalEffect: + # get the segment number and filter for valid characters + segNum = segmentNumber + + # get the hazards currently defined as temporary grids + hazParms = self.getHazardParmNames() + + # look through the list of grids and create a list of + # segment numbers (if any) that are already in use + # for the current hazard +# if len(hazParms) == 0: +# self.statusBarMsg("No temporary grids to merge.", "S") +# return 0 + segList = [] + print("selectedHazard=", selectedHazard) + selectedPhen = selectedHazard[0:2] + selectedSig = selectedHazard[3] + print("selectedPhen,selectedSig=", selectedPhen, ".", selectedSig) + for hazParm in hazParms: + print("hazParm=", hazParm) + trList = self._hazUtils._getWEInventory(hazParm) + for tr in trList: + print(" tr=", tr, timeRange) + intersect_hours = tr.intersection(timeRange).duration() + print(" intersect=", intersect_hours) + intersect_percent = intersect_hours / timeRange.duration() * 100.0 + print(" intersect %=", intersect_percent) + phen = hazParm[3:5] + sig = hazParm[5:6] + print("phen,sig=", phen, ".", sig) + if len(hazParm) > 6: + if hazParm[6:].isdigit(): + seg = int(hazParm[6:]) + print(" seg=", seg) + if phen == selectedPhen and sig == selectedSig: + segList.append(seg) + print("appending ", seg) + else: + seg = 0 + segList.sort() + +# print "looping through segList" +# for seg in segList: +# print " seg=", seg," elev=", elevation +# if str(elevation) == str(seg): +# print "adding 1 to elevation" +# elevation += 1 +# +# if elevation > 400: +# print "using elevation for segNum" +# segNum = elevation +# # replace the segmentNumber field with the elevation +/- the Above/Below indicator. +# self.__dlg.setSegmentNumber(elevation) +# segmentNumber = str(elevation) +# print "*** segmentNumber=", segmentNumber + + index = string.find(selectedHazard, " ") + if index != -1: + selectedHazard = selectedHazard[0:index] + if len(segmentNumber) > 0: + hazardKey = selectedHazard + ":" + segmentNumber + else: + hazardKey = selectedHazard + + defaultHazKey = "" + if defaultHazard is not None: + index = string.find(defaultHazard, " ") + if index != -1: + defaultHazard = defaultHazard[0:index] + defaultHazKey = defaultHazard + + if len(defaultSegment) > 0: + defaultHazKey += ":" + defaultSegment + + weName = self._hazUtils._makeTempWEName(hazardKey) + + # if we're modifying, remove the old grid first + if defaultAreaList != [] and hazardKey == defaultHazKey: + self.deleteCmd([weName], self.selectedTimeRange) + + # if we have no selection prevent user from making an empty hazard + if 1 not in mask: + self.statusBarMsg("NO EDIT AREA SELECTED: \n Select area from map or load edit area in GFE!", "S") + return 0 + + self._hazUtils._addHazard(weName, timeRange, hazardKey, mask) + LogStream.logUse("Set: ", weName, + self._hazUtils._printTime(timeRange.startTime().unixTime()), + self._hazUtils._printTime(timeRange.endTime().unixTime()), hazardKey, + self._hazUtils._printAreas(areaList)) + + return 1 + + def getHazardParmNames(self): + # get the list of loaded temporary hazard parms + parms = self.loadedParms() + hazParms = [] + for weName, level, dbID in parms: + if "haz" in weName: + key = self._hazUtils._tempWENameToKey(weName) + index = string.find(key, ":") + if index != -1: + mkey = key[0:index] + segNum = key[index+1:] + else: + mkey = key + segNum = "" + + # append the hazard and a description + parmName = "haz" + key + parmName = string.replace(parmName, ".", "") + parmName = string.replace(parmName, ":", "") + hazParms.append(parmName) + + return hazParms + + def execute(self, timeRange): + #self._hazUtils = HazardUtils.HazardUtils(self._dataManager, self.eaMgr()) + self._hazUtils = HazardUtils.HazardUtils(self._dataManager, None) + # save the selected timeRange + self.selectedTimeRange = timeRange + + self.setToolType("numeric") + + # see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.statusBarMsg("Hazards Weather Element must be loaded in " + \ + "the GFE before running MakeHazard", "S") + self.cancel() + + + # always separate the Hazards grid first + self._hazUtils._separateHazardGrids() + + self.setUpUI() + + self._afterInit = 1 #initialization done + + return + +def converter(obj): + import AbsTime + import TimeRange + retVal = None + + objtype = obj.java_name + if objtype == "java.util.Date": + retVal = AbsTime.AbsTime(obj) + elif objtype == "java.util.Collections$EmptyList": + retVal = [] + elif objtype == "com.raytheon.uf.common.time.TimeRange": + retVal = TimeRange.TimeRange(obj) + return retVal + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeHazards.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeHazards.py index 550d13c8f5..68b2252397 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeHazards.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeHazards.py @@ -1,497 +1,497 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MergeHazards -# -# Author: lefebvre -# -# This procedure reads all of the temporary hazard grids and selectively -# loads them in the the "Hazards" grid. -# ---------------------------------------------------------------------------- -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Dec 23, 2013 16893 ryu Check in njensen's change to removeTempHazards() -# to call SmartScript.unloadWEs() -# Sep 19, 2016 19293 randerso Changes for 2017 tropical season. -# Jun 23, 2017 6138 dgilling Changes for Winter Weather VTEC -# consolidation. -# Oct 12, 2017 DR20389 swhite Remove HTI/TCV Restrictions on CFW Hazards. -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Hazards"] - -#import Tkinter -import SmartScript -import string -import HazardUtils -import VTECTable -import LogStream -import numpy - - -from HazardUtils import MODEL -from HazardUtils import ELEMENT -from HazardUtils import LEVEL - -######################### CONFIGURATION SECTION ###################### -# -# This dictionary defines which hazards cannot be combined with other -# Hazards. The structure lists each hazard in the VTECTable followed -# by a list of VTEC codes that may not be combined with it at the same -# grid point. For example "DU.W" : ["DU.Y"] means that DU.W may not -# be combined with a DU.Y hazard at the same grid point. - -HazardsConflictDict = { - "AF.W" : ["AF.Y"], - "AF.Y" : ["AF.W"], - "AQ.Y" : ["AS.O", "AS.Y"], - "AS.O" : ["AQ.Y", "AS.Y"], - "AS.Y" : ["AQ.Y", "AS.O"], - "BH.S" : ["HU.A", "HU.W", "TR.A", "TR.W", "TY.A","TY.W", "SS.W", "SS.A"], - "BW.Y" : ["GL.W", "SR.W", "HF.W", "TR.A", "TR.W", "HU.A", "HU.W", "HU.S", - "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y"], - "BZ.W" : ["WS.A", "IS.W", "LE.W", "WS.W", "WW.Y"], - "CF.A" : ["CF.W", "CF.Y", "SS.A", "SS.W"], - "CF.W" : ["CF.A", "CF.Y", "SS.A", "SS.W"], - "CF.Y" : ["CF.W", "CF.A", "SS.A", "SS.W"], - "CF.S" : ["CF.Y", "CF.W", "CF.A", "SS.A", "SS.W"], - "DU.W" : ["DU.Y"], - "DU.Y" : ["DU.W"], - "EC.A" : ["WC.A", "EC.W", "WC.W"], - "EC.W" : ["EC.A", "WC.A", "WC.W", "WC.Y"], - "EH.A" : ["EH.W", "HT.Y"], - "EH.W" : ["EH.A", "HT.Y"], - "FA.A" : ["FF.A"], - "FA.W" : [], - "FA.Y" : [], - "FF.A" : ["FA.A"], - "FF.W" : [], - "FG.Y" : [], - "FL.A" : [], - "FL.W" : [], - "FL.Y" : [], - "FR.Y" : ["FZ.A", "FZ.W", "HZ.W", "HZ.A"], - "FW.A" : ["FW.W"], - "FW.W" : ["FW.A"], - "FZ.A" : ["FZ.W", "FR.Y", "HZ.W", "HZ.A"], - "FZ.W" : ["FZ.A", "FR.Y", "HZ.W", "HZ.A"], - "GL.A" : ["SR.W", "HF.W", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", - "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.W", "SR.A", - "HF.A", "SE.A", "TY.A", "TY.W"], - "GL.W" : ["SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", - "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", - "SE.A","TY.W"], - "HF.A" : ["BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", "GL.A", "SR.A", - "HF.W", "SE.A", "TY.A", "TY.W"], - "HF.W" : ["GL.W", "SR.W", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", - "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", - "HF.A", "SE.A", "TY.W"], - "HT.Y" : ["EH.A", "EH.W"], - "HU.A" : ["HF.W", "BW.Y", "TR.A", "HU.W", "HU.S", - "GL.A", "SR.A", "HF.A", "SE.A"], - "HU.S" : ["TR.A", "TR.W", "HU.A", "HU.W", "TY.A", "TY.W"], - "HU.W" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.A", "TR.W", "HU.A", "SC.Y", - "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", "HF.A", "SE.A", - "HU.S"], - "HW.A" : ["HW.W", "WI.Y"], - "HW.W" : ["HW.A", "LW.Y", "WI.Y"], - "HZ.A" : ["FZ.W", "FR.Y", "FZ.A", "HZ.W"], - "HZ.W" : ["FZ.A", "FR.Y", "HZ.A", "FZ.W"], - "IS.W" : ["WS.A", "BZ.W", "WS.W", "LE.W", "WW.Y"], - "LE.W" : ["WS.A", "BZ.W", "IS.W", "WS.W", "WW.Y"], - "LO.Y" : [], - "LS.A" : ["LS.W", "LS.Y", "LS.S"], - "LS.S" : ["LS.A", "LS.Y", "LS.W"], - "LS.W" : ["LS.A", "LS.Y", "LS.S"], - "LS.Y" : ["LS.A", "LS.W", "LS.S"], - "LW.Y" : ["HW.W", "WI.Y"], - "MA.S" : [], - "MA.W" : [], - "MF.Y" : [], - "MH.W" : ["MH.Y"], - "MH.Y" : ["MH.W"], - "MS.Y" : [], - "RB.Y" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", - "SE.W", "GL.A", "SR.A", "SE.A", "TY.W"], - "RP.S" : [], - "SC.Y" : ["GL.W", "SR.W", "SR.A", "BW.Y", "TR.W", "HU.W", - "SE.W", "SE.A", "HF.W", "GL.A", "TY.W"], - "SE.A" : ["SR.W", "HF.W", "HF.A", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", - "SC.Y", "SW.Y", "RB.Y", "SI.Y", "GL.A", "GL.W", "SE.W", "SR.A", - "TY.A", "TY.W"], - "SE.W" : ["SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", - "SC.Y", "SW.Y", "RB.Y", "SI.Y", "GL.A", "GL.W", "SE.A", "SR.A"], - "SI.Y" : ["SR.W", "SR.A", "BW.Y", "TR.W", "HU.W", "TY.W", - "GL.W", "GL.A", "HF.W", "SE.A", "SE.W"], - "SM.Y" : [], - "SR.A" : ["GL.A", "GL.W", "HF.A", "HF.W", "HU.W", "HU.A", "TR.W", "TR.A", - "RB.Y", "SC.Y", "SE.A", "SE.W", "SI.Y", "SR.W", "SW.Y", "TY.W", "TY.A"], - "SR.W" : ["GL.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", - "SC.Y", "SW.Y", "SE.W", "SE.A", "RB.Y", "SI.Y", "GL.A", "SR.A"], - "SS.A" : ["CF.A", "CF.W", "CF.Y", "SS.W"], - "SS.W" : ["CF.A", "CF.W", "CF.Y", "SS.A"], - "SU.W" : ["SU.Y"], - "SU.Y" : ["SU.W"], - "SV.A" : ["TO.A"], - "SV.W" : [], - "SW.Y" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", - "SE.W", "GL.A", "SR.A", "SE.A"], - "TO.A" : ["SV.A"], - "TO.W" : [], - "TR.A" : ["HF.W", "TR.W", "HU.A", "HU.S", "HU.W","TY.A", "TY.W", - "GL.A", "SR.A", "HF.A", "SE.A"], - "TR.W" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.A", "HU.W", "HU.S", - "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "TY.W", - "GL.A", "SR.A", "HF.A", "SE.A"], - "TS.A" : ["TS.W", "TS.Y"], - "TS.W" : ["TS.A", "TS.Y"], - "TS.Y" : ["TS.A", "TS.W"], - "TY.A" : ["TR.A", "TY.W", "HU.S", "HF.W", "GL.A", "SR.A", "HF.A", "SE.A"], - "TY.W" : ["TY.A", "HU.S", "TR.A", "TR.W", "GL.A", "SR.A", "HF.A", "SE.A" - "GL.W", "SR.W", "HF.W", "BW.Y","SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y"], - "UP.W" : ["TR.A", "TR.W", "HU.A", "HU.S", "HU.W", "UP.Y"], - "UP.Y" : ["TR.A", "TR.W", "HU.A", "HU.S", "HU.W", "UP.W"], - "WC.A" : ["WC.Y", "WC.W"], - "WC.W" : ["WC.A", "WC.Y"], - "WC.Y" : ["WC.A","WC.W"], - "WI.Y" : ["HW.A", "HW.W", "LW.Y"], - "WS.A" : ["BZ.W", "IS.W", "WS.W", "LE.W", "WW.Y"], - "WS.W" : ["WS.A", "BZ.W", "IS.W", "LE.W", "WW.Y"], - "WW.Y" : ["WS.A", "BZ.W", "IS.W", "WS.W", "LE.W"], - "ZF.Y" : [], - } - -########################## END OF CONFIGURATION SECTION ######################## - -class Procedure(SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - ## - # Get the list of loaded temporary hazard parms - # @return: Temporary hazard parm names, i.e., ["hazAFY"] - # @rtype: List of Strings - def getHazardParmNames(self): - parms = self.loadedParms() - hazParms = [] - for weName, level, dbID in parms: - if string.find(weName, "haz") == 0: - # TODO: Why is this back/forth xform needed? - key = self._hazUtils._tempWENameToKey(weName) - index = string.find(key, ":") - if index != -1: - mkey = key[0:index] - segNum = key[index+1:] - else: - mkey = key - segNum = "" - - # append the hazard and a description - parmName = "haz" + key - parmName = string.replace(parmName, ".", "") - parmName = string.replace(parmName, ":", "") - hazParms.append(parmName) - - return hazParms - - ## - # Unload (delete) all the temporary hazards - def removeTempHazards(self): - parms = self.loadedParms() - - toRemovePairs = [] - for weName, level, dbID in parms: - if string.find(weName, "haz") == 0: - toRemovePairs.append((weName, level)) - self.unloadWEs(MODEL, toRemovePairs) - - return - - ## - # The action performed when the user opts to cancel a merge. - # This was a callback under Tcl/tk; now displayDialog invokes - # it directly. - def cancelCommand(self): - LogStream.logEvent("MergeHazards: cancel") - return - - ## - # The action performed when the user opts to continue a merge. - # This was a callback under Tcl/tk; now displayDialog invokes - # it directly. - def continueCommand(self): - LogStream.logEvent("MergeHazards: continue") - parm = self.getParm(MODEL, ELEMENT, LEVEL) - parm.setMutable(True) - self.mergeHazardGrids() - return - - ## - # Displays a dialog box and queries the user to continue to merge or - # abort the merge - def displayDialog(self, message): - from MessageBox import MessageBox - messageBox = MessageBox(style=MessageBox.ICON_WARNING) - messageBox.setText("MakeHazard") - messageBox.setMessage(message) - messageBox.setButtonLabels(["Continue Merge", "Cancel Merge"]) - messageBox.setDefaultIndex(1) - if (messageBox.open() == 0): - self.continueCommand() - else: - self.cancelCommand() - - return - - ## - # Returns the set of hazParms grids that overlap with the specified - # timeRange. - # @param hazParms: Hazard parm names to check - # @type hazParms: Sequence of string - # @param timeRange: The time range to check for overlap with - # @type timeRange: Python TimeRange - # @return: Byte grids and keys of the overlapping parms - # @rtype: 2-tuple: list of byte arrays, list of list of strings - def getOverlappingHazGrids(self, hazParms, timeRange): - byteGridList = [] - keyList = [] - for hazParm in hazParms: - trList = self._hazUtils._getWEInventory(hazParm) - for tr in trList: - if tr.overlaps(timeRange): - byteGrid, hazKey = self.getGrids(MODEL, hazParm, LEVEL, - tr, mode="First") - if isinstance(hazKey, str): - hazKey = eval(hazKey) - byteGridList.append(byteGrid) - keyList.append(hazKey) - - return byteGridList, keyList - - ## - # Returns the first non-None key it finds in the keyList - # @param keyList: Keys to search - # @type keyList: Sequence of string - # @return: First key that is not "" - # @rtype: string - def getHazardKey(self, keyList): - for k in keyList: - if k != "": - return k - - ## - # Checks the specified hazard grids to see if they are conflicting - # Each grid is a tuple (byteGrid, key). Uses the configurable - # HazardConflictDict to determine whether two hazards can be combined - # at the same grid point. Returns an empty list if no conflict or - # the list of hazards if they do. - # - # This method should really only be used internally; it assumes that - # there is at most one key other than "", and that it contains - # a single subkey. - # - # @param hazGrid1: The first hazard grid - # @type hazGrid1: 2-tuple: numpy array of int8, list of String - # @param hazGrid2: The second hazard grid - # @type hazGrid2: 2-tuple: numpy array of int8, list of String - # @return: conflicting hazard names or empty list - # @rtype: list - def conflictingHazards(self, hazGrid1, hazGrid2): - byteGrid1, hazKey1 = hazGrid1 - byteGrid2, hazKey2 = hazGrid2 - - key1 = self.getHazardKey(hazKey1) - key2 = self.getHazardKey(hazKey2) - phenSig1 = key1[0:4] # remove the etn - phenSig2 = key2[0:4] - - keyConflict = False - if phenSig1 == phenSig2 and key1 != key2: - keyConflict = True - elif HazardsConflictDict.has_key(phenSig1): - if phenSig2 in HazardsConflictDict[phenSig1]: - keyConflict = True - - if keyConflict: - # calculate the overlap, adding the grids together will tell us if - # there is any overlap. Any grid points > 1 are overlapped - totalGrid = byteGrid1 + byteGrid2 - overlapMask = numpy.greater(totalGrid, 1) - if numpy.any(overlapMask): - return [key1, key2] - - return [] - - ## - # See if there are any temporary hazards for the same position and time - # that conflict with one another. - # - # @param hazParms: Temporary hazard parm names to check. - # @type hazParms: sequence of string - # @return: The first conflict, or None if there are no conflicts - # @rtype: 2-tuple(TimeRange, list of string) or NoneType - def checkForHazardConflicts(self, hazParms): - timeList = [] - for hazParm in hazParms: - trList = self._hazUtils._getWEInventory(hazParm) - for tr in trList: - if tr.startTime().unixTime() not in timeList: - timeList.append(tr.startTime().unixTime()) - if tr.endTime().unixTime() not in timeList: - timeList.append(tr.endTime().unixTime()) - - timeList.sort() # sort the list - - for t in xrange(len(timeList) - 1): - start = timeList[t] - end = timeList[t+1] - timeRange = self._hazUtils._makeTimeRange(start, end) - byteGridList = [] - keyList = [] - byteGridList, keyList = self.getOverlappingHazGrids(hazParms, timeRange) - # compare each grid to all other grids at this timeRange - for firstIndex in xrange(len(byteGridList) - 1): - for secondIndex in xrange(firstIndex + 1, len(byteGridList)): - grid1 = (byteGridList[firstIndex], keyList[firstIndex]) - grid2 = (byteGridList[secondIndex], keyList[secondIndex]) - conflictList = self.conflictingHazards(grid1, grid2) - if conflictList != []: - return (timeRange, conflictList) - - # if we made it to here, all is well - return None - - ## - # Perform checks to see if it's OK to merge hazards. If there are no conflicting - # locks or incompatible hazards, do the merge. If there are conflicting locks, - # generate a status bar message and quit. If there incompatible - # hazards, show a warning and let the user decide whether to continue. - def checkForMerge(self): - # get the hazards selected by the forecaster - hazParms = self.getHazardParmNames() - - # check for empty list of hazards - if hazParms == []: - self.statusBarMsg("No temporary grids to merge.", "S") - return - - # FIXME: Lock race condition - # check for conflicting locks - if self._hazUtils._conflictingLocks(hazParms): - self.statusBarMsg("There are conflicting locks. " + - "Please resolve these before merging any hazards", "S") - return - - conflicts = self.checkForHazardConflicts(hazParms) - if conflicts is None: - # if no conflicts, merge the grids - # We made the hazards parm immutable when we separated hazard grids. - # It has to be made mutable to do the merge. - parm = self.getParm(MODEL, ELEMENT, LEVEL) - parm.setMutable(True) - self.mergeHazardGrids() - else: - haz1 = string.replace(conflicts[1][0], ".", "") - haz2 = string.replace(conflicts[1][1], ".", "") - timeRange = str(conflicts[0]) - msg = "Hazard conflict detected!\n\n" - msg += "Time: " + timeRange + " \n\n" - msg += "with Hazard grids haz" + haz1 + " and haz" + haz2 + ".\n" - - LogStream.logEvent("Merge conflict: "+ msg) - self.displayDialog(msg) - - return - - ## - # Performs the actual merge of the temp hazards grids into the "Hazards" grid. - def mergeHazardGrids(self): - # get the hazards selected by the forecaster - hazParms = self.getHazardParmNames() - - self._hazUtils._removeAllHazardsGrids() - - for hazParm in hazParms: - trList = self._hazUtils._getWEInventory(hazParm) - - for tr in trList: - byteGrid, hazKey = self.getGrids(MODEL, hazParm, LEVEL, tr, - mode="First") - if isinstance(hazKey, str): - hazKey = eval(hazKey) - - uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) - for uKey in uniqueKeys: - if uKey == "": - continue - subKeys = self._hazUtils._getSubKeys(uKey) - for subKey in subKeys: - # make the mask - find all areas that contain the subKey - mask = self.empty(bool) - for haz in hazKey: - if string.find(haz, subKey) >= 0: - hazIndex = self.getIndex(haz, hazKey) - mask[numpy.equal(byteGrid, hazIndex)] = True - - # make the grid - self._hazUtils._addHazard(ELEMENT, tr, subKey, mask) - LogStream.logEvent("merge: " + \ - str(self._hazUtils._printTime(tr.startTime().unixTime())) + " " + \ - str(self._hazUtils._printTime(tr.endTime().unixTime())) + " " + \ - subKey + "\n") - - self.removeTempHazards() - - return - - ## - # The main entry point of the procedure. - def execute(self): - self.setToolType("numeric") - - self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) - - # see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.statusBarMsg("Hazards Weather Element must be loaded in " +\ - "the GFE before running MergeHazards", "S") - self.cancel() - - self.checkForMerge() - return - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MergeHazards +# +# Author: lefebvre +# +# This procedure reads all of the temporary hazard grids and selectively +# loads them in the the "Hazards" grid. +# ---------------------------------------------------------------------------- +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Dec 23, 2013 16893 ryu Check in njensen's change to removeTempHazards() +# to call SmartScript.unloadWEs() +# Sep 19, 2016 19293 randerso Changes for 2017 tropical season. +# Jun 23, 2017 6138 dgilling Changes for Winter Weather VTEC +# consolidation. +# Oct 12, 2017 DR20389 swhite Remove HTI/TCV Restrictions on CFW Hazards. +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Hazards"] + +#import Tkinter +import SmartScript +import string +import HazardUtils +import VTECTable +import LogStream +import numpy + + +from HazardUtils import MODEL +from HazardUtils import ELEMENT +from HazardUtils import LEVEL + +######################### CONFIGURATION SECTION ###################### +# +# This dictionary defines which hazards cannot be combined with other +# Hazards. The structure lists each hazard in the VTECTable followed +# by a list of VTEC codes that may not be combined with it at the same +# grid point. For example "DU.W" : ["DU.Y"] means that DU.W may not +# be combined with a DU.Y hazard at the same grid point. + +HazardsConflictDict = { + "AF.W" : ["AF.Y"], + "AF.Y" : ["AF.W"], + "AQ.Y" : ["AS.O", "AS.Y"], + "AS.O" : ["AQ.Y", "AS.Y"], + "AS.Y" : ["AQ.Y", "AS.O"], + "BH.S" : ["HU.A", "HU.W", "TR.A", "TR.W", "TY.A","TY.W", "SS.W", "SS.A"], + "BW.Y" : ["GL.W", "SR.W", "HF.W", "TR.A", "TR.W", "HU.A", "HU.W", "HU.S", + "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y"], + "BZ.W" : ["WS.A", "IS.W", "LE.W", "WS.W", "WW.Y"], + "CF.A" : ["CF.W", "CF.Y", "SS.A", "SS.W"], + "CF.W" : ["CF.A", "CF.Y", "SS.A", "SS.W"], + "CF.Y" : ["CF.W", "CF.A", "SS.A", "SS.W"], + "CF.S" : ["CF.Y", "CF.W", "CF.A", "SS.A", "SS.W"], + "DU.W" : ["DU.Y"], + "DU.Y" : ["DU.W"], + "EC.A" : ["WC.A", "EC.W", "WC.W"], + "EC.W" : ["EC.A", "WC.A", "WC.W", "WC.Y"], + "EH.A" : ["EH.W", "HT.Y"], + "EH.W" : ["EH.A", "HT.Y"], + "FA.A" : ["FF.A"], + "FA.W" : [], + "FA.Y" : [], + "FF.A" : ["FA.A"], + "FF.W" : [], + "FG.Y" : [], + "FL.A" : [], + "FL.W" : [], + "FL.Y" : [], + "FR.Y" : ["FZ.A", "FZ.W", "HZ.W", "HZ.A"], + "FW.A" : ["FW.W"], + "FW.W" : ["FW.A"], + "FZ.A" : ["FZ.W", "FR.Y", "HZ.W", "HZ.A"], + "FZ.W" : ["FZ.A", "FR.Y", "HZ.W", "HZ.A"], + "GL.A" : ["SR.W", "HF.W", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", + "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.W", "SR.A", + "HF.A", "SE.A", "TY.A", "TY.W"], + "GL.W" : ["SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", + "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", + "SE.A","TY.W"], + "HF.A" : ["BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", "GL.A", "SR.A", + "HF.W", "SE.A", "TY.A", "TY.W"], + "HF.W" : ["GL.W", "SR.W", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", + "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", + "HF.A", "SE.A", "TY.W"], + "HT.Y" : ["EH.A", "EH.W"], + "HU.A" : ["HF.W", "BW.Y", "TR.A", "HU.W", "HU.S", + "GL.A", "SR.A", "HF.A", "SE.A"], + "HU.S" : ["TR.A", "TR.W", "HU.A", "HU.W", "TY.A", "TY.W"], + "HU.W" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.A", "TR.W", "HU.A", "SC.Y", + "SW.Y", "SE.W", "RB.Y", "SI.Y", "GL.A", "SR.A", "HF.A", "SE.A", + "HU.S"], + "HW.A" : ["HW.W", "WI.Y"], + "HW.W" : ["HW.A", "LW.Y", "WI.Y"], + "HZ.A" : ["FZ.W", "FR.Y", "FZ.A", "HZ.W"], + "HZ.W" : ["FZ.A", "FR.Y", "HZ.A", "FZ.W"], + "IS.W" : ["WS.A", "BZ.W", "WS.W", "LE.W", "WW.Y"], + "LE.W" : ["WS.A", "BZ.W", "IS.W", "WS.W", "WW.Y"], + "LO.Y" : [], + "LS.A" : ["LS.W", "LS.Y", "LS.S"], + "LS.S" : ["LS.A", "LS.Y", "LS.W"], + "LS.W" : ["LS.A", "LS.Y", "LS.S"], + "LS.Y" : ["LS.A", "LS.W", "LS.S"], + "LW.Y" : ["HW.W", "WI.Y"], + "MA.S" : [], + "MA.W" : [], + "MF.Y" : [], + "MH.W" : ["MH.Y"], + "MH.Y" : ["MH.W"], + "MS.Y" : [], + "RB.Y" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", + "SE.W", "GL.A", "SR.A", "SE.A", "TY.W"], + "RP.S" : [], + "SC.Y" : ["GL.W", "SR.W", "SR.A", "BW.Y", "TR.W", "HU.W", + "SE.W", "SE.A", "HF.W", "GL.A", "TY.W"], + "SE.A" : ["SR.W", "HF.W", "HF.A", "BW.Y", "TR.A", "TR.W", "HU.A", "HU.W", + "SC.Y", "SW.Y", "RB.Y", "SI.Y", "GL.A", "GL.W", "SE.W", "SR.A", + "TY.A", "TY.W"], + "SE.W" : ["SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", + "SC.Y", "SW.Y", "RB.Y", "SI.Y", "GL.A", "GL.W", "SE.A", "SR.A"], + "SI.Y" : ["SR.W", "SR.A", "BW.Y", "TR.W", "HU.W", "TY.W", + "GL.W", "GL.A", "HF.W", "SE.A", "SE.W"], + "SM.Y" : [], + "SR.A" : ["GL.A", "GL.W", "HF.A", "HF.W", "HU.W", "HU.A", "TR.W", "TR.A", + "RB.Y", "SC.Y", "SE.A", "SE.W", "SI.Y", "SR.W", "SW.Y", "TY.W", "TY.A"], + "SR.W" : ["GL.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", + "SC.Y", "SW.Y", "SE.W", "SE.A", "RB.Y", "SI.Y", "GL.A", "SR.A"], + "SS.A" : ["CF.A", "CF.W", "CF.Y", "SS.W"], + "SS.W" : ["CF.A", "CF.W", "CF.Y", "SS.A"], + "SU.W" : ["SU.Y"], + "SU.Y" : ["SU.W"], + "SV.A" : ["TO.A"], + "SV.W" : [], + "SW.Y" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.W", "HU.W", "TY.W", + "SE.W", "GL.A", "SR.A", "SE.A"], + "TO.A" : ["SV.A"], + "TO.W" : [], + "TR.A" : ["HF.W", "TR.W", "HU.A", "HU.S", "HU.W","TY.A", "TY.W", + "GL.A", "SR.A", "HF.A", "SE.A"], + "TR.W" : ["GL.W", "SR.W", "HF.W", "BW.Y", "TR.A", "HU.W", "HU.S", + "SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y", "TY.W", + "GL.A", "SR.A", "HF.A", "SE.A"], + "TS.A" : ["TS.W", "TS.Y"], + "TS.W" : ["TS.A", "TS.Y"], + "TS.Y" : ["TS.A", "TS.W"], + "TY.A" : ["TR.A", "TY.W", "HU.S", "HF.W", "GL.A", "SR.A", "HF.A", "SE.A"], + "TY.W" : ["TY.A", "HU.S", "TR.A", "TR.W", "GL.A", "SR.A", "HF.A", "SE.A" + "GL.W", "SR.W", "HF.W", "BW.Y","SC.Y", "SW.Y", "SE.W", "RB.Y", "SI.Y"], + "UP.W" : ["TR.A", "TR.W", "HU.A", "HU.S", "HU.W", "UP.Y"], + "UP.Y" : ["TR.A", "TR.W", "HU.A", "HU.S", "HU.W", "UP.W"], + "WC.A" : ["WC.Y", "WC.W"], + "WC.W" : ["WC.A", "WC.Y"], + "WC.Y" : ["WC.A","WC.W"], + "WI.Y" : ["HW.A", "HW.W", "LW.Y"], + "WS.A" : ["BZ.W", "IS.W", "WS.W", "LE.W", "WW.Y"], + "WS.W" : ["WS.A", "BZ.W", "IS.W", "LE.W", "WW.Y"], + "WW.Y" : ["WS.A", "BZ.W", "IS.W", "WS.W", "LE.W"], + "ZF.Y" : [], + } + +########################## END OF CONFIGURATION SECTION ######################## + +class Procedure(SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + ## + # Get the list of loaded temporary hazard parms + # @return: Temporary hazard parm names, i.e., ["hazAFY"] + # @rtype: List of Strings + def getHazardParmNames(self): + parms = self.loadedParms() + hazParms = [] + for weName, level, dbID in parms: + if string.find(weName, "haz") == 0: + # TODO: Why is this back/forth xform needed? + key = self._hazUtils._tempWENameToKey(weName) + index = string.find(key, ":") + if index != -1: + mkey = key[0:index] + segNum = key[index+1:] + else: + mkey = key + segNum = "" + + # append the hazard and a description + parmName = "haz" + key + parmName = string.replace(parmName, ".", "") + parmName = string.replace(parmName, ":", "") + hazParms.append(parmName) + + return hazParms + + ## + # Unload (delete) all the temporary hazards + def removeTempHazards(self): + parms = self.loadedParms() + + toRemovePairs = [] + for weName, level, dbID in parms: + if string.find(weName, "haz") == 0: + toRemovePairs.append((weName, level)) + self.unloadWEs(MODEL, toRemovePairs) + + return + + ## + # The action performed when the user opts to cancel a merge. + # This was a callback under Tcl/tk; now displayDialog invokes + # it directly. + def cancelCommand(self): + LogStream.logEvent("MergeHazards: cancel") + return + + ## + # The action performed when the user opts to continue a merge. + # This was a callback under Tcl/tk; now displayDialog invokes + # it directly. + def continueCommand(self): + LogStream.logEvent("MergeHazards: continue") + parm = self.getParm(MODEL, ELEMENT, LEVEL) + parm.setMutable(True) + self.mergeHazardGrids() + return + + ## + # Displays a dialog box and queries the user to continue to merge or + # abort the merge + def displayDialog(self, message): + from MessageBox import MessageBox + messageBox = MessageBox(style=MessageBox.ICON_WARNING) + messageBox.setText("MakeHazard") + messageBox.setMessage(message) + messageBox.setButtonLabels(["Continue Merge", "Cancel Merge"]) + messageBox.setDefaultIndex(1) + if (messageBox.open() == 0): + self.continueCommand() + else: + self.cancelCommand() + + return + + ## + # Returns the set of hazParms grids that overlap with the specified + # timeRange. + # @param hazParms: Hazard parm names to check + # @type hazParms: Sequence of string + # @param timeRange: The time range to check for overlap with + # @type timeRange: Python TimeRange + # @return: Byte grids and keys of the overlapping parms + # @rtype: 2-tuple: list of byte arrays, list of list of strings + def getOverlappingHazGrids(self, hazParms, timeRange): + byteGridList = [] + keyList = [] + for hazParm in hazParms: + trList = self._hazUtils._getWEInventory(hazParm) + for tr in trList: + if tr.overlaps(timeRange): + byteGrid, hazKey = self.getGrids(MODEL, hazParm, LEVEL, + tr, mode="First") + if isinstance(hazKey, str): + hazKey = eval(hazKey) + byteGridList.append(byteGrid) + keyList.append(hazKey) + + return byteGridList, keyList + + ## + # Returns the first non-None key it finds in the keyList + # @param keyList: Keys to search + # @type keyList: Sequence of string + # @return: First key that is not "" + # @rtype: string + def getHazardKey(self, keyList): + for k in keyList: + if k != "": + return k + + ## + # Checks the specified hazard grids to see if they are conflicting + # Each grid is a tuple (byteGrid, key). Uses the configurable + # HazardConflictDict to determine whether two hazards can be combined + # at the same grid point. Returns an empty list if no conflict or + # the list of hazards if they do. + # + # This method should really only be used internally; it assumes that + # there is at most one key other than "", and that it contains + # a single subkey. + # + # @param hazGrid1: The first hazard grid + # @type hazGrid1: 2-tuple: numpy array of int8, list of String + # @param hazGrid2: The second hazard grid + # @type hazGrid2: 2-tuple: numpy array of int8, list of String + # @return: conflicting hazard names or empty list + # @rtype: list + def conflictingHazards(self, hazGrid1, hazGrid2): + byteGrid1, hazKey1 = hazGrid1 + byteGrid2, hazKey2 = hazGrid2 + + key1 = self.getHazardKey(hazKey1) + key2 = self.getHazardKey(hazKey2) + phenSig1 = key1[0:4] # remove the etn + phenSig2 = key2[0:4] + + keyConflict = False + if phenSig1 == phenSig2 and key1 != key2: + keyConflict = True + elif phenSig1 in HazardsConflictDict: + if phenSig2 in HazardsConflictDict[phenSig1]: + keyConflict = True + + if keyConflict: + # calculate the overlap, adding the grids together will tell us if + # there is any overlap. Any grid points > 1 are overlapped + totalGrid = byteGrid1 + byteGrid2 + overlapMask = numpy.greater(totalGrid, 1) + if numpy.any(overlapMask): + return [key1, key2] + + return [] + + ## + # See if there are any temporary hazards for the same position and time + # that conflict with one another. + # + # @param hazParms: Temporary hazard parm names to check. + # @type hazParms: sequence of string + # @return: The first conflict, or None if there are no conflicts + # @rtype: 2-tuple(TimeRange, list of string) or NoneType + def checkForHazardConflicts(self, hazParms): + timeList = [] + for hazParm in hazParms: + trList = self._hazUtils._getWEInventory(hazParm) + for tr in trList: + if tr.startTime().unixTime() not in timeList: + timeList.append(tr.startTime().unixTime()) + if tr.endTime().unixTime() not in timeList: + timeList.append(tr.endTime().unixTime()) + + timeList.sort() # sort the list + + for t in range(len(timeList) - 1): + start = timeList[t] + end = timeList[t+1] + timeRange = self._hazUtils._makeTimeRange(start, end) + byteGridList = [] + keyList = [] + byteGridList, keyList = self.getOverlappingHazGrids(hazParms, timeRange) + # compare each grid to all other grids at this timeRange + for firstIndex in range(len(byteGridList) - 1): + for secondIndex in range(firstIndex + 1, len(byteGridList)): + grid1 = (byteGridList[firstIndex], keyList[firstIndex]) + grid2 = (byteGridList[secondIndex], keyList[secondIndex]) + conflictList = self.conflictingHazards(grid1, grid2) + if conflictList != []: + return (timeRange, conflictList) + + # if we made it to here, all is well + return None + + ## + # Perform checks to see if it's OK to merge hazards. If there are no conflicting + # locks or incompatible hazards, do the merge. If there are conflicting locks, + # generate a status bar message and quit. If there incompatible + # hazards, show a warning and let the user decide whether to continue. + def checkForMerge(self): + # get the hazards selected by the forecaster + hazParms = self.getHazardParmNames() + + # check for empty list of hazards + if hazParms == []: + self.statusBarMsg("No temporary grids to merge.", "S") + return + + # FIXME: Lock race condition + # check for conflicting locks + if self._hazUtils._conflictingLocks(hazParms): + self.statusBarMsg("There are conflicting locks. " + + "Please resolve these before merging any hazards", "S") + return + + conflicts = self.checkForHazardConflicts(hazParms) + if conflicts is None: + # if no conflicts, merge the grids + # We made the hazards parm immutable when we separated hazard grids. + # It has to be made mutable to do the merge. + parm = self.getParm(MODEL, ELEMENT, LEVEL) + parm.setMutable(True) + self.mergeHazardGrids() + else: + haz1 = string.replace(conflicts[1][0], ".", "") + haz2 = string.replace(conflicts[1][1], ".", "") + timeRange = str(conflicts[0]) + msg = "Hazard conflict detected!\n\n" + msg += "Time: " + timeRange + " \n\n" + msg += "with Hazard grids haz" + haz1 + " and haz" + haz2 + ".\n" + + LogStream.logEvent("Merge conflict: "+ msg) + self.displayDialog(msg) + + return + + ## + # Performs the actual merge of the temp hazards grids into the "Hazards" grid. + def mergeHazardGrids(self): + # get the hazards selected by the forecaster + hazParms = self.getHazardParmNames() + + self._hazUtils._removeAllHazardsGrids() + + for hazParm in hazParms: + trList = self._hazUtils._getWEInventory(hazParm) + + for tr in trList: + byteGrid, hazKey = self.getGrids(MODEL, hazParm, LEVEL, tr, + mode="First") + if isinstance(hazKey, str): + hazKey = eval(hazKey) + + uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) + for uKey in uniqueKeys: + if uKey == "": + continue + subKeys = self._hazUtils._getSubKeys(uKey) + for subKey in subKeys: + # make the mask - find all areas that contain the subKey + mask = self.empty(bool) + for haz in hazKey: + if string.find(haz, subKey) >= 0: + hazIndex = self.getIndex(haz, hazKey) + mask[numpy.equal(byteGrid, hazIndex)] = True + + # make the grid + self._hazUtils._addHazard(ELEMENT, tr, subKey, mask) + LogStream.logEvent("merge: " + \ + str(self._hazUtils._printTime(tr.startTime().unixTime())) + " " + \ + str(self._hazUtils._printTime(tr.endTime().unixTime())) + " " + \ + subKey + "\n") + + self.removeTempHazards() + + return + + ## + # The main entry point of the procedure. + def execute(self): + self.setToolType("numeric") + + self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) + + # see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.statusBarMsg("Hazards Weather Element must be loaded in " +\ + "the GFE before running MergeHazards", "S") + self.cancel() + + self.checkForMerge() + return + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeWFOEdits.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeWFOEdits.py index 3c89182f64..ad73ba7afa 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeWFOEdits.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/MergeWFOEdits.py @@ -1,207 +1,207 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MergeWFOEdits - Version 3.1 -# -# Author: LeFebvre, Santos -# -# ---------------------------------------------------------------------------- -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Apr 13, 2016 LeFebvre Code cleanup and refactor -# Sep 10, 2016 Santos Fix fetching of split wfo grids from -# ProposedSSwfo ISC db -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import TimeRange -import TropicalUtility -import numpy as np - - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["None"] - -bogusWFO = ["XYZYX"] # This allows no default selected for the check buttons when the GUI appears -VariableList = [ - ("WFOs", bogusWFO, "check", ["CAR", "GYX", "BOX", "OKX", "PHI", "LWX", - "AKQ", "MHX", "ILM", "CHS", "JAX", "MLB", - "MFL", "KEY", "TBW", "TAE", "MOB", "LIX", - "LCH", "HGX", "CRP", "BRO"]), - ] - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - # Fetch the ProposedSSwfo grid comprising the last grid where any site has a hazard defined. - def fetchProposedWFOGrid(self, cwaList): - - # Fetch the WFO ISC grid inventory - weName = "ProposedSSwfo" - wfoTRs = self.GM_getWEInventory(weName, "ISC") - - # No grids, time to go home - if len(wfoTRs) == 0: - self.statusBarMsg("No " + weName + " grids found to merge into ProposedSS.", "U") - return None - - cGrid = self.empty(np.int8) - cKeys = [""] - - wfoTRs.reverse() # process the grids latest to oldest - - foundCWAs = [] - for tr in wfoTRs: - #print tr - wfoSSGrid = self.getGrids("ISC", weName, "SFC", tr) - hazGrid, hazKeys = wfoSSGrid - #print "keys:", hazKeys - for cwa in cwaList: - eacwa = "ISC_" + cwa - #print cwa - - # Skip cwas we've already found - if cwa in foundCWAs: - continue - - ea = self.getEditArea(eacwa) - if not ea: - self.statusBarMsg("Edit area for CWA " + cwa + " not found.", "S") - continue - - cwaMask = self.encodeEditArea(ea) - for hazKey in hazKeys: - if hazKey == "": - continue - print "hazKey", hazKey - hazIndex = self.getIndex(hazKey, hazKeys) - hazMask = (hazGrid == hazIndex) - overlap = hazMask & cwaMask - if overlap.any(): - newIndex = self.getIndex(hazKey, cKeys) - print "added key now:", cKeys - cGrid[overlap] = newIndex - if not cwa in foundCWAs: - foundCWAs.append(cwa) - - return cGrid, cKeys - - def execute(self, editArea, varDict): - - cwas = varDict["WFOs"] - if cwas == bogusWFO: - self.statusBarMsg("Please select a valid WFO.", "U") - return - - # Make a time range to find the initial storm surge hazards - # Truncate to top of last hour - start = int(self._gmtime().unixTime() / 3600) * 3600 - end = start + 48 * 3600 # 2 days later - timeRange48Hour = self.GM_makeTimeRange(start, end) - - # Fetch the proposed grid and the WFO ISC grid - ssTRs = self.GM_getWEInventory("ProposedSS","Fcst" ) - if len(ssTRs) > 0: - propSSGrid = self.getGrids("Fcst", "ProposedSS", "SFC", ssTRs[-1]) - else: - self.statusBarMsg("No PropsedSS grids found.", "U") - return - - # Fetch InitialSS grid - ssTRs = self.GM_getWEInventory("InitialSS", "Fcst" ) - if len(ssTRs) > 0: - initSSGrid = self.getGrids("Fcst", "InitialSS", "SFC", ssTRs[-1]) - else: - self.statusBarMsg("No InitialSS grids found.", "U") - return - - # Replaced above with this code to fetch a combined Hazards grid - wfoSSGrid = self.fetchProposedWFOGrid(cwas) - - # Calculate the overlap of selected WFO's ISC areas and selected area - selectedMask = self.encodeEditArea(editArea) - - # If no points selected, select all points - if not selectedMask.any(): - selectedMask = self.newGrid(True, np.bool) - - # Make an empty mask grid, so we can use it to add up all the WFO areas - cwaMask = self.newGrid(False, np.bool) - - # Process all the selected CWAs - for cwa in cwas: - - # Get the ISC edit area mask for this office - mask = self.encodeEditArea("ISC_" + cwa.upper()) - cwaMask = cwaMask | mask - - # Check for conflicts with just this CWA - if self.anyHazardConflicts(initSSGrid, wfoSSGrid, mask): - self.statusBarMsg( - "WFO " + cwa + " conflicts with the InitialSS grid." + \ - " Check for discrepancy in either event code or ETN in " + \ - "incoming WFO ISC grids.", "S") - return - - # Use the intersection of the CWA areas and the selected area - selectedMask = selectedMask & cwaMask - - # Further reduce the area to just the StormSurge editArea - ssMask = self.encodeEditArea("StormSurgeWW_EditArea") - selectedMask = selectedMask & ssMask - - # Now check for conflicts with each WFO and the InitialSS grid. - wfoKeys = wfoSSGrid[1] - noneWfoIndex = self.getIndex("", wfoKeys) - - # Finally merge the WFO ISC grids into the ProposedSS grid - wfoGrid, wfoKeys = wfoSSGrid - ssGrid, ssKeys = propSSGrid - ssGrid = ssGrid.copy() # make a copy so we don't affect the original - ssKeys = list(ssKeys) # make a copy - - # Process all the WFO proposed hazards - for wfoIndex, wfoKey in enumerate(wfoKeys): - - # Identify where this WFO hazard intersects the selected area - mask = (wfoGrid == wfoIndex) & selectedMask - - # Get the index of this hazard key within the NHC hazards - ssIndex = self.getIndex(wfoKey, ssKeys) - - # Convert the WFO key index to an NHC key index - ssGrid[mask] = ssIndex - - # Put the merged ProposedSS back together - proposedGrid = (ssGrid, ssKeys) - - # Delete any existing collaboration difference grids - self.unloadWE("Fcst", "CollabDiffSS", "SFC") - - # Delete all versions of this weather element - self.deleteCmd(["ProposedSS"], TimeRange.allTimes()) - - # Create the grid, so we can see it - self.createGrid("Fcst", "ProposedSS", "DISCRETE", proposedGrid, - timeRange48Hour) - - # Calculate the new difference grid for this time range - self.calcDiffGrid(initSSGrid, proposedGrid, "CollabDiffSS", - timeRange48Hour) - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MergeWFOEdits - Version 3.1 +# +# Author: LeFebvre, Santos +# +# ---------------------------------------------------------------------------- +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Apr 13, 2016 LeFebvre Code cleanup and refactor +# Sep 10, 2016 Santos Fix fetching of split wfo grids from +# ProposedSSwfo ISC db +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import TimeRange +import TropicalUtility +import numpy as np + + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["None"] + +bogusWFO = ["XYZYX"] # This allows no default selected for the check buttons when the GUI appears +VariableList = [ + ("WFOs", bogusWFO, "check", ["CAR", "GYX", "BOX", "OKX", "PHI", "LWX", + "AKQ", "MHX", "ILM", "CHS", "JAX", "MLB", + "MFL", "KEY", "TBW", "TAE", "MOB", "LIX", + "LCH", "HGX", "CRP", "BRO"]), + ] + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + # Fetch the ProposedSSwfo grid comprising the last grid where any site has a hazard defined. + def fetchProposedWFOGrid(self, cwaList): + + # Fetch the WFO ISC grid inventory + weName = "ProposedSSwfo" + wfoTRs = self.GM_getWEInventory(weName, "ISC") + + # No grids, time to go home + if len(wfoTRs) == 0: + self.statusBarMsg("No " + weName + " grids found to merge into ProposedSS.", "U") + return None + + cGrid = self.empty(np.int8) + cKeys = [""] + + wfoTRs.reverse() # process the grids latest to oldest + + foundCWAs = [] + for tr in wfoTRs: + #print tr + wfoSSGrid = self.getGrids("ISC", weName, "SFC", tr) + hazGrid, hazKeys = wfoSSGrid + #print "keys:", hazKeys + for cwa in cwaList: + eacwa = "ISC_" + cwa + #print cwa + + # Skip cwas we've already found + if cwa in foundCWAs: + continue + + ea = self.getEditArea(eacwa) + if not ea: + self.statusBarMsg("Edit area for CWA " + cwa + " not found.", "S") + continue + + cwaMask = self.encodeEditArea(ea) + for hazKey in hazKeys: + if hazKey == "": + continue + print("hazKey", hazKey) + hazIndex = self.getIndex(hazKey, hazKeys) + hazMask = (hazGrid == hazIndex) + overlap = hazMask & cwaMask + if overlap.any(): + newIndex = self.getIndex(hazKey, cKeys) + print("added key now:", cKeys) + cGrid[overlap] = newIndex + if not cwa in foundCWAs: + foundCWAs.append(cwa) + + return cGrid, cKeys + + def execute(self, editArea, varDict): + + cwas = varDict["WFOs"] + if cwas == bogusWFO: + self.statusBarMsg("Please select a valid WFO.", "U") + return + + # Make a time range to find the initial storm surge hazards + # Truncate to top of last hour + start = int(self._gmtime().unixTime() / 3600) * 3600 + end = start + 48 * 3600 # 2 days later + timeRange48Hour = self.GM_makeTimeRange(start, end) + + # Fetch the proposed grid and the WFO ISC grid + ssTRs = self.GM_getWEInventory("ProposedSS","Fcst" ) + if len(ssTRs) > 0: + propSSGrid = self.getGrids("Fcst", "ProposedSS", "SFC", ssTRs[-1]) + else: + self.statusBarMsg("No PropsedSS grids found.", "U") + return + + # Fetch InitialSS grid + ssTRs = self.GM_getWEInventory("InitialSS", "Fcst" ) + if len(ssTRs) > 0: + initSSGrid = self.getGrids("Fcst", "InitialSS", "SFC", ssTRs[-1]) + else: + self.statusBarMsg("No InitialSS grids found.", "U") + return + + # Replaced above with this code to fetch a combined Hazards grid + wfoSSGrid = self.fetchProposedWFOGrid(cwas) + + # Calculate the overlap of selected WFO's ISC areas and selected area + selectedMask = self.encodeEditArea(editArea) + + # If no points selected, select all points + if not selectedMask.any(): + selectedMask = self.newGrid(True, np.bool) + + # Make an empty mask grid, so we can use it to add up all the WFO areas + cwaMask = self.newGrid(False, np.bool) + + # Process all the selected CWAs + for cwa in cwas: + + # Get the ISC edit area mask for this office + mask = self.encodeEditArea("ISC_" + cwa.upper()) + cwaMask = cwaMask | mask + + # Check for conflicts with just this CWA + if self.anyHazardConflicts(initSSGrid, wfoSSGrid, mask): + self.statusBarMsg( + "WFO " + cwa + " conflicts with the InitialSS grid." + \ + " Check for discrepancy in either event code or ETN in " + \ + "incoming WFO ISC grids.", "S") + return + + # Use the intersection of the CWA areas and the selected area + selectedMask = selectedMask & cwaMask + + # Further reduce the area to just the StormSurge editArea + ssMask = self.encodeEditArea("StormSurgeWW_EditArea") + selectedMask = selectedMask & ssMask + + # Now check for conflicts with each WFO and the InitialSS grid. + wfoKeys = wfoSSGrid[1] + noneWfoIndex = self.getIndex("", wfoKeys) + + # Finally merge the WFO ISC grids into the ProposedSS grid + wfoGrid, wfoKeys = wfoSSGrid + ssGrid, ssKeys = propSSGrid + ssGrid = ssGrid.copy() # make a copy so we don't affect the original + ssKeys = list(ssKeys) # make a copy + + # Process all the WFO proposed hazards + for wfoIndex, wfoKey in enumerate(wfoKeys): + + # Identify where this WFO hazard intersects the selected area + mask = (wfoGrid == wfoIndex) & selectedMask + + # Get the index of this hazard key within the NHC hazards + ssIndex = self.getIndex(wfoKey, ssKeys) + + # Convert the WFO key index to an NHC key index + ssGrid[mask] = ssIndex + + # Put the merged ProposedSS back together + proposedGrid = (ssGrid, ssKeys) + + # Delete any existing collaboration difference grids + self.unloadWE("Fcst", "CollabDiffSS", "SFC") + + # Delete all versions of this weather element + self.deleteCmd(["ProposedSS"], TimeRange.allTimes()) + + # Create the grid, so we can see it + self.createGrid("Fcst", "ProposedSS", "DISCRETE", proposedGrid, + timeRange48Hour) + + # Calculate the new difference grid for this time range + self.calcDiffGrid(initSSGrid, proposedGrid, "CollabDiffSS", + timeRange48Hour) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFD_QPF_Checks.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFD_QPF_Checks.py index c147193670..50e31374f3 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFD_QPF_Checks.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFD_QPF_Checks.py @@ -1,1326 +1,1326 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# NDFD_QPF_Checks (was SnowAmtQPFPoPWxCheck) -# -# Author: Jay Smith, WFO Fairbanks, jay.smith@noaa.gov, 907-458-3721 -# SnowAmtQPFPoPWxCheck Incarnation -# Version: 1.0.0, 09/14/2006 - Initial version -# 1.0.1, 10/12/2006 - Added PoP/QPF check at request of DSPAC -# 1.0.2, 10/18/2006 - Changed PoP/QPF check to treat the PoP as -# floating. Instead of checking each individual PoP grid -# against its corresponding QPF grid, the max of all the -# PoP grids overlapping a QPF grid will be checked. -# 1.1.0, 01/25/2007 - Added options to choose which checks to run. -# Reorganized code so that each check is its own method. -# Added a check for QPF and Wx. Added highlighting for the -# created temporary grids. -# 1.1.1, 02/01/2007 - Changed the SnowAmt/Wx check to return -# consistent results for SnowAmt > 0 and Wx grid containing -# S, SW, or IP regardless of whether the frozen precip is -# mixed with freezing and/or liquid precip. -# 1.2.0, 02/13/2007 - Added a configuration option to provide a CWA -# edit area to run the procedure over. A bad edit area or no -# edit area will result in running over the whole domain. -# Modified the SnowAmt/Wx and QPF/Wx checks to handle two -# cases. Case 1: The SnowAmt/QPF grid is 6-hr long and starts -# at 00, 06, 12, or 18 UTC. Then only one of the corresponding -# Wx grids has to meet the consistency rule. Case 2: The -# SnowAmt/QPF grid does not meet the case 1 definition. Then -# all of the corresponding Wx grids must meet the consistency -# rule. -# NDFD_QPF_Checks Incarnation -# Version: 1.0.0, 04/04/2007 - The program now requires the presence of the -# following 3 grids: QPF6hr, SnowAmt6hr, and PoP12hr. The 2 -# 6-hr grids are expected to start at 00, 06, 12, or 18 UTC -# and be exactly 6 hours long. The PoP12hr grids are expected -# to start at 00 or 12 UTC and be exactly 12 hours long. -# The procedure still needs the PoP and Wx grids. With these -# grid changes, the procedure can now finally perform the last -# of the required NDFD checks. This also means there is no -# longer the need for two check cases in the two "Wx" checks. -# Now, Wx is consistent if any of its grids have the appropriate -# type. The procedure can now be run in a "quiet" mode if -# called from another procedure. Any messages generated in -# "quiet" mode will be of "R" severity so no pop-up messages -# are generated but the information is still available from the -# GFE status bar area. All error messages are templated in a -# dictionary in a separate method. This allows me to put all -# the triple-quoted strings, which ignore indentation, in one -# location. The code for checking for locked grids was also -# templated in its own method, which chopped about 15 lines of -# code off the front of each check method. There were a couple -# of places where I was applying the "tolerance" values -# incorrectly, which have been fixed. I dropped the -# "Inconsistent" label from all temporary grid names. I was -# making those grid names so long, they didn't actually fit -# in the spatial editor window when the grid manager was on -# the left. Temporary grid names now are just a concatenation -# of the two grids used in the check. -# Version: 1.1.0, The logic for the handling of inconsistencies in the -# SnowAmt6hr/Wx, QPF6hr/Wx, and PoP12hr/QPF6hr checks could -# result in false positive inconsistencies. This is because -# I was checking for inconsistencies on each of these grids -# individually when I needed to be checking the cumulative -# inconsistencies of these grids. With the new logic, if any -# of these checks has inconsistencies, the resulting temporary -# grid will have the same time range as the "controlling" grid, -# which is the first grid listed in each check name. Also, I -# have enforced the precision of the SnowAmt6hr and QPF6hr -# grids in the methods where they are used. SmartScript has a -# method called "around" that does this. I ran into an issue -# when this procedure was called from NDFD_QPF_Suite. I am -# unable to figure out how to uncache the QPF6hr, SnowAmt6hr, -# and PoP12hr grids after they are generated by the -# Collaborate_PoP_SnowAmt_QPF procedure. For the QPF6hr and -# SnowAmt6hr grids, this means getGrids is getting them from -# the cache, which means these float parameters have machine -# precision. This is utterly unacceptable. I have to have -# ifpServer precision. Now, I think I've ensured this. -# 1.1.1, 05/02/2007 - There have been instances of unexpected procedure -# performance if the NDFD QPF parameters are not visible in the -# grid manager when the procedure is run. The procedure will -# now require the existence of a weather element group which -# contains just the NDFP QPF parameters. -# 1.2.0, 05/03/2007 - Upon further review, the unexpected procedure -# performance arises when some of the NDFD QPF parameters are -# not present in the grid manager. However, I do not need to -# load a weather element group to make the parameters present. -# I can use the loadParm command on each element instead. Given -# this, the "weGroup" configuration has been removed. Also, -# some people believe the lock checking is overly stringent. -# To some extent, I agree. For the purposes of this procedure, -# other GFE users can have any of the NDFD QPF parmeteres -# locked. The user running the procedure, however, cannot have -# any of the parameters locked; i.e., that person must save -# those elements before running the procedure. -# The procedure performs the following checks: -# 1. If SnowAmt6hr present and >= 0.5 inches, then corresponding QPF6hr grids -# must be >= 0.01 inches. -# 2. If SnowAmt6hr >= 0.1 inches then at least one of the corresponding Wx -# grids must have S, SW, or IP. -# 3. If QPF6hr > 0.0, then at least one of the corresponding PoP grids must -# be > 0 -# 4. If QPF6hr > 0.0 then at least one of the corresponding Wx grids must have -# R, RW, S, SW, RS, IP, L, ZR, ZL. -# 5. If PoP12hr >= 50%, then at least one of the corresponding QPF6hr grids -# must be >= 0.0. -# For all of the checks above, if the initial threshold is not exceeded, then -# the two grids are consistent by definition. In other words: -# 1. If SnowAmt6hr < 0.5, then SnowAmt6hr and QPF6hr are always consistent. -# 2. If SnowAmt6hr < 0.1, then SnowAmt6hr and Wx are always consistent. -# 3. If QPF6hr = 0.0, then QPF6hr and PoP are always consistent. -# 4. If QPF6hr = 0.0, then QPF6hr and Wx are always consistent. -# 5. If PoP12hr < 50%, then PoP12hr and QPF6hr are always consistent. -# For the Wx checks above, only the Wx type is considered. -# -# ****** NOTE NOTE NOTE NOTE ****** -# The program checks the PoP12hr, QPF6hr, and SnowAmt6hr grids to make sure -# their time constraints are met. For any grid where the time constraint is -# violated, those grids are not checked. To reiterate the time constraints: -# PoP12hr: starts at either 00 or 12 UTC and is exactly 12 hours duration -# QPF6hr: starts at 00, 06, 12, or 18 UTC and is exactly 6 hours duration -# SnowAmt6hr: starts at 00, 06, 12 or 18 UTC and is exactly 6 hours duration -# ****** NOTE NOTE NOTE NOTE ****** -# -# If discrepancies are found, then the "bad" grids will be highlighted. -# Temporary grids showing where the discrepancies occur will be created and -# also highlighted. -# -# I've written this code such that it's optimized to minimize memory usage -# (at least I think I've done that). As a result, it's not particularly -# optimized for ifpServer database access. In fact, I retrieve the various -# grids from the ifpServer database many times during the procedure's run. -# This will have an impact on how fast the procedure runs (it'll run slower -# than if I had optimized for ifpServer database access). The choice to favor -# memory optimization comes from my belief that there are still "memory leak" -# problems in the GFE and that the consequences of those problems will be most -# manifest when this procedure is most likely to be run (near the end of the -# shift). Funky memory problems are a prime cause of funky application -# behavior like application crashes or spontaneous logouts. So, this procedure -# basically reads a grid into memory, keeps it as long as it's needed, and -# then discards it. -# -# Finally, this procedure is also intended to provide an example to other -# developers of how to write and document code. I have reservations as to how -# well I've succeeded at that task. The code is heavily documented, probably -# excessively so. Also, it's not as well as organized as it could be. As you -# look through the various methods, it should become quickly apparent that -# there is a lot of repeated code. I've consciously left some the code this -# way in the hopes that it will be easier to understand by more novice -# programmers and because the code hasn't quite grown to the point where -# updating the repeating code is onerous or overly error-prone. It would be -# better to capture the repeating code in separate methods, but keeping track -# of the where you are in the code becomes harder the more you have to jump -# around from method to method. Anyone who has ever worked with the text -# formatters can sympathize with that. As with all things, there are trade- -# offs involved. UPDATE: 4/3/2007 - Starting with the first NDFD_QPF_Checks -# version, I consolidated quite a bit of the repeating code into separate -# methods. So, there's some improvement on that front. -# -# Acknowledgement: -# Many of the Python "tricks" I use in this procedure I learned from -# reading/perusing the following book: Python Cookbook, Alex Martelli & -# David Ascher, eds., 2002, O'Reilly and Associates -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Consistency"] - -VariableList = [ - ('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup']), - ('Run SnowAmt6hr/QPF6hr Check?', ['Yes'], 'check', ['Yes']), - ('Run SnowAmt6hr/Wx Check?', ['Yes'], 'check', ['Yes']), - ('Run QPF6hr/PoP Check?', ['Yes'], 'check', ['Yes']), - ('Run QPF6hr/Wx Check?', ['Yes'], 'check', ['Yes']), - ('Run PoP12hr/QPF6hr Check?', ['Yes'], 'check', ['Yes']), - ('If "Cleanup" is selected, then only cleanup actions will run.\nNo checks will be made, regardless of the above settings.', '', 'label'), - ] - -#### Config section -# Both the QPF and SnowAmt grids have values which are floating point -# numbers. This means comparisons must use a tolerance value. In other -# words, 0.5 may be represented in machine numbers as 0.49999999999 or -# 0.500000000001. By specifying a tolerance value, we account for the -# vagaries of machine representation of floating point numbers while -# keeping the precision of the comparisons to acceptable levels. Depending -# on the comparison being done, the tolerance value will be added to or -# subtracted from the comparison value to allow for machine error in the -# floating point number representation. -# By default in the GFE, QPF precision is to the nearest one-hundredth while -# SnowAmt precision is to the nearest tenth. -qpfTol = 0.00001 # 1/100,000 tolerance vs 1/100 precision -snowAmtTol = 0.0001 # 1/10,000 tolerance vs 1/10 precision -# Inconsistent grid highlight color. One size fits all. To turn off -# highlighting, set the variable to the empty string, ''. -inconGridColor = 'red' -# Temporary grid highlight color. One size fits all. To turn off highlighting, -# set the variable to the empty string, ''. -tempGridColor = 'orange' -# Name of CWA edit area to use instead of running the procedure over the -# whole domain. Set to the empty string, '', if you want the procedure to -# always run over the whole domain. If the procedure has a problem with the -# edit area you provide, it will run over the whole domain. You should probably -# choose an edit area that is slightly larger than your entire CWA. It's -# possible that when mapping your GFE grids to NDFD grids that the NDFD thinks -# some GFE grid cells are in your CWA that the GFE does not think are in your -# CWA. Using an edit area slightly larger than the CWA, like the ISC_Send_Area -# which is the mask used when sending grids to the NDFD, should eliminate the -# possibibilty of the NDFD intermittently flagging CWA border "points" as -# inconsistent. Note: running the procedure over a subset of the entire GFE -# domain does not really provide any performance gains. Given the way the -# underlying array data structure works, calculations are almost always made -# at every single grid point first and then a mask is applied to limit the -# meaningful results to the edit area. For the purposes of this procedure, the -# values outside the edit area are set to the appropriate "consistent" result. -# The real benefit of this option is it limits the inconsistent results to the -# areas the forecaster really cares about, which should lessen the workload of -# using this procedure. Marine Offices: Make sure the edit area provided -# includes your marine zones. -cwaEditArea = 'ISC_Send_Area' -#### Config section end - -import SmartScript -from numpy import * - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def __cleanup(self, timeRange): - # Remove any temporary grids created previously. Turn off any - # previous highlighting. - for element in ( - 'SnowAmt6hrQPF6hr', 'SnowAmt6hrWx', 'QPF6hrPoP', 'QPF6hrWx', - 'PoP12hrQPF6hr'): - try: - # From SmartScript - self.unloadWE('Fcst', element, 'SFC') - except: - # A failure is almost certainly no grids to unload. - pass - # Turn off any highlights. From SmartScript - self.highlightGrids( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids( - 'Fcst', 'QPF6hr', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids( - 'Fcst', 'PoP', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids( - 'Fcst', 'PoP12hr', 'SFC', timeRange, inconGridColor, on=0) - return - - def __checkConfigValueTypes(self): - # Make sure the values provided in the configuration section are the - # correct type. - # Later in the code are two methods devoted to creating error - # messages. The error messages here could pop up in quite a large - # number of different combinations, which makes capturing them in the - # later methods very complex. Rather than do that and considering - # that these error messages should never appear once the procedure - # is correctly set up, I decided to leave them here. There always has - # to be an exception. :-) - import types - message = '' - badValues = False - if not type(qpfTol) is types.FloatType: - message = '%sThe "qpfTol" variable is not defined as a floating point value. Please contact your IFPS focal point to fix this.\n' % message - badValues = True - if not type(snowAmtTol) is types.FloatType: - message = '%sThe "snowAmtTol" variable is not defined as a floating point value. Please contact your IFPS focal point to fix this.\n' % message - badValues = True - if not type(inconGridColor) is types.StringType: - message = '%sThe "inconGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message - badValues = True - if not type(tempGridColor) is types.StringType: - message = '%sThe "tempGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message - badValues = True - if not type(cwaEditArea) is types.StringType: - message = '%sThe "cwaEditArea" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message - badValues = True - if badValues: - message = '%sYou will not be able to run the procedure until the problem is corrected.' % message - # The next two commands are from SmartScript - self.statusBarMsg(message, 'U') - self.cancel() - return - - def __checkLockedStatus(self, elementList): - # There can be a significant difference between the values stored - # in memory and the values returned from the database. This is because - # when values are saved, the element's precision (as defined in - # serverConfig.py/localConfig.py) is enforced. Values in memory do not - # have the element's precision enforced; in fact, they have the - # machine precision of the underlying data type. - # At the beginning of each check method, a call to this method is - # made to make sure the grids are saved. A check method will not run - # if the grids it's to check are not saved. This method will return - # a list of boolean values indicating if the elements are locked by me - # and then if the elements are locked by other. - # The lockedByMe and lockedByOther methods are from SmartScript - lockedByMe = [] - lockedByOther = [] - for element in elementList: - if self.lockedByMe(element, 'SFC'): - lockedByMe.append(True) - else: - lockedByMe.append(False) -## if self.lockedByOther(element, 'SFC'): -## lockedByOther.append(True) -## else: -## lockedByOther.append(False) - lockedByOther.append(False) - return lockedByMe + lockedByOther - - def __getMsgSeverity(self, severity): - # For calls to self.statusBarMsg where I intended the severity to be - # something other than 'R', this method is now called to determine - # what the severity should be. This procedure can be called from - # another procedure in such a way as to suppress the pop-up type of - # status bar messages. This is done by passing in a varDict with a - # 'Quiet' key which evaluates to 'True'. For those situations, the - # procedure defers to the calling program and turns any non 'R' - # severities into 'R' severity. This allows the message to be - # communicated still to the GFE session, but only via the 'Status' - # line area of the GFE. When run interactively from the GFE, the - # severity this procedure assigns to a message will be used. - # This method is actually invoked in the call to statusBarMsg in - # place of the severity string. As long as the entry for the severity - # in statusBarMsg evaluates to a string type, statusBarMsg will be - # 'happy'. - if self._quiet: - return 'R' - return severity - - def __checkTC(self, element, gridTR): - # The QPF6hr, SnowAmt6hr, and PoP12hr grids have specific time - # constraints that the respective grids must adhere to. In other - # words, it's not acceptable to this procedure for the QPF6hr grid, - # for example, to be stretched to 12 hours long. This method makes - # sure each of the grids exactly conforms to the time constraint - # defintion. The method returns True if good, False if bad. If, for - # some reason, the method gets called with some other element, the - # method will return True. - if element == 'QPF6hr' or element == 'SnowAmt6hr': - startHourTup = (0, 6, 12, 18) - goodDuration = 6 * 3600 - elif element == 'PoP12hr': - startHourTup = (0, 12) - goodDuration = 12 * 3600 - else: - return True - - if gridTR.startTime().hour in startHourTup and \ - gridTR.duration() == goodDuration: - return True - return False - - def _runSnowAmt6hrQPF6hrCheck(self, timeRange): - # This method implements the check that if SnowAmt6hr >= 0.5, then - # QPF6hr must be >= 0.01. - - # If there are locks, post urgent messages and return from the method. - snowLockMe, qpfLockMe, snowLockOther, qpfLockOther = \ - self.__checkLockedStatus(['SnowAmt6hr', 'QPF6hr']) - if snowLockMe or qpfLockMe or snowLockOther or qpfLockOther: - # Something's locked, create messages. - self._makeLockMsgs( - snowLockMe, qpfLockMe, snowLockOther, qpfLockOther, - 'SnowAmt6hr', 'QPF6hr', 'snowLockMe', 'qpfLockMe', - 'snowLockOther', 'qpfLockOther', 'SnowAmt6hr/QPF6hr') - return - - # Make sure there are actually SnowAmt6hr grids in the time range. - # The self.getGridInfo command will return an empty list if there - # are no grids in the time range. This is more efficient than using - # self.getGrids with mode='First' and noDataError=0. - # The getGridInfo method is from SmartScript - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) - if snowAmtInfoList == []: - message = self._getMsg( - 'noGrids', element='SnowAmt6hr', timeRange=timeRange, - method='SnowAmt6hr/QPF6hr') - # The statusBarMsg method is from SmartScript - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - # One might ask why I don't just return the result of self.getGrids - # to a variable and iterate over that. I'm trying to minimize the - # memory footprint of the procedure. Reading all the grids into a - # variable could be a fairly large memory hit. I believe the construct - # below only reads one SnowAmt6hr grid at a time into memory, the one - # that's being checked. (But I can't find the reference that supports - # my belief.) By using the cache=0 switch on all the self.getGrids - # command, I prevent the GFE from saving the grids into memory for me. - # (At least, that's what I think the cache=0 switch does. The - # SmartScript documentation is a little vague on this point.) - # The Python builtin command enumerate loops over an iterable object - # and returns a 2-tuple containing the current index of the - # iteration and the object at that index. In cases where I need - # both the index and the object, I think this construct is more - # elegant than: - # for i in xrange(len(iterableObject)): - # object = iterableObject[i] - for snowAmtIndex, snowAmtGrid in enumerate(self.getGrids( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, mode='List', cache=0)): - gridTR = snowAmtInfoList[snowAmtIndex].gridTime() - # Check to make sure the grid meets it's time constraints. - if not self.__checkTC('SnowAmt6hr', gridTR): - message = self._getMsg( - 'badTC', element='SnowAmt6hr', timeRange=gridTR) - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # around is from SmartScript - snowAmtGrid = around(snowAmtGrid, 1) - # The greater_equal method is from Numeric. For the given array - # and threshold, a new array of the same dimensions as the input - # array is returned. The new array has the value 1 where the - # input array was greater than or equal to the threshold and - # has the value 0 elsewhere. - # The getGridInfo method is from SmartScript - halfInchMask = greater_equal(snowAmtGrid, 0.5 - snowAmtTol) - qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', gridTR) - # There should always be more QPF6hr grids than SnowAmt6hr grids, - # so if qpfInfoList is empty, then there are missing QPF6hr - # grids. Otherwise, qpfInfoList will have length 1 because - # SnowAmt6hr and QPF6hr have the same time constrain. However, - # the QPF6hr grid that overlaps the SnowAmt6hr grid will still - # need to be checked to make sure it hasn't been stretched. - if qpfInfoList == []: - message = self._getMsg( - 'noGrids', element='QPF6hr', timeRange=gridTR, - method='SnowAmt6hr/QPF6hr') - # The statusBarMsg is from SmartScript - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # I don't need the noDataError=0 in the self.getGrids call - # because if there were no grids in the gridTR, the previous - # if block would have caught that. - # The getGrids method is from SmartScript - qpfGrid = self.getGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, mode='First', cache=0) - if not self.__checkTC('QPF6hr', gridTR): - message = self._getMsg( - 'badTC', element='QPF6hr', timeRange=gridTR) - # The statusBarMsg method is from SmartScrtipt - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # If we get here, then we have a SnowAmt6hr grid and a QPF6hr - # grid which meet their time constraints and are ready to be - # compared. - # around is from SmartScript - qpfGrid = around(qpfGrid, 2) - # The less method is from Numeric. It behaves analogously to - # the greater_equal method described above using less than for - # the comparison. - qpfMask = less(qpfGrid, 0.01 - qpfTol) - # The following is the "truth" table for the logical - # comparison. - # SnowAmt6hr >= 0.5, 1; SnowAmt6hr < 0.5, 0 - # QPF6hr < 0.01, 1; QPF6hr >= 0.01, 0 - # SnowAmt6hr >= 0.5 (1) and QPF6hr < 0.01 (1) = 1 (Bad result) - # SnowAmt6hr >= 0.5 (1) and QPF6hr >= 0.01 (0) = 0 (Good result) - # SnowAmt6hr < 0.5 (0) and QPF6hr < 0.01 (1) = 0 (Good result) - # SnowAmt6hr < 0.5 (0) and QPF6hr >= 0.01 (0) = 0 (Good result) - # The logical_and method is from Numeric. A logical and comparison - # results in a "True" value if both compared elements are "True". - # Otherwise, the result is "False". - consistMask = logical_and(halfInchMask, qpfMask) - # Now, apply the CWA mask. There's an assumption here that - # all offices will use a mask and provide a valid one, which - # means this step does something meaningful. If that assumption - # does not hold, then the next statement doesn't actually - # change anything, even though each and every grid point has a - # comparison check made. - # where is from Numeric. The first argument is a mask. - # The second argument is/are the value/values to use at the - # array points where the mask is one. The third argument - # is/are the value/values to use at the array points - # where the mask is zero. For this comparison, I want - # the values of consistMask where self.cwaMask is one and - # I want the "good result", which is zero, where - # self.cwaMask is zero. - consistMask[logical_not(self.cwaMask)] = 0 - # The ravel and sometrue methods are from Numeric. - if sometrue(ravel(consistMask)): - # The ravel method reduces the rank of the array by one. - # Since we had a 2-d array, the ravel function creates a - # 1-d array (a vector) such that reading the 2-d array from - # left-to-right, top-to-bottom returns the same values as - # reading the 1-d array from left-to-right. The sometrue - # method performs a logical or on subsequent element pairs - # in the 1-d array and returns the final result. If - # there are inconsistencies, the result will be 1. - # The highlightGrids method is from SmartScript. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt6hr', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) - # The createGrid method is from SmartScript - self.createGrid( - 'Fcst', 'SnowAmt6hrQPF6hr', 'SCALAR', - consistMask, gridTR, - descriptiveName='SnowAmt6hrQPF6hrInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt6hrQPF6hr', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - # While not required, I like to terminate my methods with a return - # statement to make it clear this is where the method ends. - return - - def _runSnowAmt6hrWxCheck(self, timeRange): - # This implements the check that if SnowAmt6hr >= 0.1, then the Wx grid - # must contain S, SW, or IP, regardless of whether or not there is - # any freezing or liquid types. Finally, the check does not look at - # anything other than the Wx type. In other words, the check will be - # okay if SnowAmt != 0 and Wx has Chc:S:- or Def:SW:-- or Lkly:S:+. - - # If there are locks, post urgent messages and return from the method. - snowLockMe, wxLockMe, snowLockOther, wxLockOther = \ - self.__checkLockedStatus(['SnowAmt6hr', 'Wx']) - if snowLockMe or wxLockMe or snowLockOther or wxLockOther: - # Something's locked, create messages. - self._makeLockMsgs( - snowLockMe, wxLockMe, snowLockOther, wxLockOther, - 'SnowAmt6hr', 'Wx', 'snowLockMe', 'wxLockMe', - 'snowLockOther', 'wxLockOther', 'SnowAmt6hr/Wx') - return - - # Make sure there are actually SnowAmt6hr grids in the time range. - # The getGridInfo method is from SmartScript. - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) - if snowAmtInfoList == []: - message = self._getMsg( - 'noGrids', element='SnowAmt6hr', timeRange=timeRange, - method='SnowAmt6hr/Wx') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - for snowAmtIndex, snowAmtGrid in enumerate(self.getGrids( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, mode='List', cache=0)): - gridTR = snowAmtInfoList[snowAmtIndex].gridTime() - # Make sure the snowAmtGrid meets the time constraints. - if not self.__checkTC('SnowAmt6hr', gridTR): - message = self._getMsg( - 'badTC', element='SnowAmt6hr', timeRange=gridTR) - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # around is from SmartScript - snowAmtGrid = around(snowAmtGrid, 1) - # The greater_equal method is from Numeric. - # The getGridInfo method is from SmartScript. - nonZeroMask = greater_equal(snowAmtGrid, 0.1 - snowAmtTol) - wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) - # Check for Wx grid in gridTR - if wxInfoList == []: - message = self._getMsg( - 'noGrids', element='Wx', timeRange=gridTR, - method='SnowAmt6hr/Wx') - # The statusBarMsg method is from SmartScript - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # Now check the overlapping Wx grids. Initialize a totally - # inconsistent grid. - # ones is from Numeric - inconsistGrid = ones(nonZeroMask.shape, int) - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', cache=0)): - # The wxMask method is from SmartScript - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - # The logical_or method is from Numeric. For the two input - # arrays, if both values are "False", then the result is - # "False". Otherwise, the result is "True". - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - # I don't need these arrays any longer. Delete them to free - # up the memory they use. - del (sMask, swMask, ipMask) - # The where method is from Numeric - wxMask = logical_not(snowMask) - # "Truth" table for the logical comparison follows - # SnowAmt6hr >= 0.1, 1; SnowAmt6hr < 0.1, 0 - # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 - # SnowAmt6hr >= 0.1 (1) and Wx has (0) = 0 (Good result) - # SnowAmt6hr >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) - # SnowAmt6hr < 0.1 (0) and Wx has (0) = 0 (Good result) - # SnowAmt6hr < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) - # - # The logical_and, where, sometrue, and ravel methods are all - # from Numeric. - consistMask = logical_and(nonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - # Update inconsistGrid to be the current state of the - # inconsistencies. - inconsistGrid = logical_and(inconsistGrid, consistMask) - if not sometrue(ravel(inconsistGrid)): - # There were no longer any inconsistencies between - # SnowAmt6hr and Wx. - break - else: - # This block will only execute if the for loop runs to - # completion, i.e., the break statement is not executed. - # So, if we get here, we have inconsistencies and need to - # highlight the appropriate grids. - # The highlightGrids method is from SmartScript. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt6hr', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) - # The createGrid method is from SmartScript - self.createGrid( - 'Fcst', 'SnowAmt6hrWx', 'SCALAR', inconsistGrid, gridTR, - descriptiveName='SnowAmt6hrWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt6hrWx', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - return - - def _runQPF6hrPoPCheck(self, timeRange): - # This method implements the check that if any QPF6hr grid is non zero - # then one of the corresponding floating PoP grids must also be non - # zero. - - # If there are locks, post urgent messages and return from the method. - qpfLockMe, popLockMe, qpfLockOther, popLockOther = \ - self.__checkLockedStatus(['QPF6hr', 'PoP']) - if qpfLockMe or popLockMe or qpfLockOther or popLockOther: - # Something's locked, create messages. - self._makeLockMsgs( - qpfLockMe, popLockMe, qpfLockOther, popLockOther, - 'QPF6hr', 'PoP', 'qpfLockMe', 'popLockMe', - 'qpfLockOther', 'popLockOther', 'QPF6hr/PoP') - return - - # The getGridInfo method is from SmartScript. - qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) - # Make sure there are actually QPF6hr grids in the time range. - if qpfInfoList == []: - message = self._getMsg( - 'noGrids', element='QPF6hr', timeRange=timeRange, - method='QPF6hr/PoP') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - for qpfIndex, qpfGrid in enumerate(self.getGrids( - 'Fcst', 'QPF6hr', 'SFC', timeRange, mode='List', cache=0)): - gridTR = qpfInfoList[qpfIndex].gridTime() - # Check the QPF6hr grid time constraints - if not self.__checkTC('QPF6hr', gridTR): - message = self._getMsg( - 'badTC', element='QPF6hr', timeRange=gridTR) - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # around is from SmartScript - qpfGrid = around(qpfGrid, 2) - # The greater_equal method is from Numeric. The getGrids method - # is from SmartScript. - qpfNonZeroMask = greater_equal(qpfGrid, 0.01 - qpfTol) - popGrid = self.getGrids( - 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, - cache=0) - # Since I don't need to loop over the PoP grids, just get their - # max, I don't need to call getGridInfo like in other methods. - # With noDataError=0 in the getGrids call, if there are no grids, - # then the special Python value None will be returned. So, I can - # just check that to see if all the PoP grids are missing. If - # there were a gap in the PoP grids, that would not be caught. - # But, no one's PoP grids should ever have a gap in them, right? - if popGrid == None: - message = self._getMsg( - 'noGrids', element='PoP', timeRange=gridTR, - method='QPF6hr/PoP') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # The equal method is from Numeric. - popZeroMask = equal(popGrid, 0) - # popZeroMask = 1 if PoP = 0; popZeroMask = 0 if PoP != 0 - # qpfNonZeroMask = 1 if QPF6hr > 0; qpfNonZeroMask = 0 if QPF6hr = 0 - # PoP = 0 (1) and QPF6hr = 0 (0) => 0 (Good result) - # PoP != 0 (0) and QPF6hr = 0 (0) => 0 (Good result) - # PoP != 0 (0) and QPF6hr > 0 (1) => 0 (Good result) - # PoP = 0 (1) and QPF6hr > 0 (1) => 1 (Bad result) - # - # The logical_and, where, sometrue, and ravel methods are all - # from Numeric. - consistMask = logical_and(qpfNonZeroMask, popZeroMask) - consistMask[logical_not(self.cwaMask)] = 0 - if sometrue(ravel(consistMask)): - # The good result is if the logical_and returns zeros - # for every grid point, that is "none true". So, if - # the sometrue method evaluates True, there are - # inconsistencies. - # The createGrid and highlightGrids methods are from - # SmartScript. - self.createGrid( - 'Fcst', 'QPF6hrPoP', 'SCALAR', - consistMask, gridTR, - descriptiveName='QPF6hrPoPInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'QPF6hrPoP', 'SFC', gridTR, - tempGridColor) - if inconGridColor: - self.highlightGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'PoP', 'SFC', gridTR, inconGridColor) - self.inconsistent = True - return - - def _runQPF6hrWxCheck(self, timeRange): - # This method implements the check that if QPF6hr non zero, then the - # corresponding Wx grids must contain a precipitable Wx type. Note: - # the method only checks the Wx type, no cov/prob, no inten, etc. - - # If there are locks, post urgent messages and return from the method. - qpfLockMe, wxLockMe, qpfLockOther, wxLockOther = \ - self.__checkLockedStatus(['QPF6hr', 'Wx']) - if qpfLockMe or wxLockMe or qpfLockOther or wxLockOther: - # Something's locked, create messages. - self._makeLockMsgs( - qpfLockMe, wxLockMe, qpfLockOther, wxLockOther, - 'QPF6hr', 'Wx', 'qpfLockMe', 'wxLockMe', - 'qpfLockOther', 'wxLockOther', 'QPF6hr/Wx') - return - - # The getGridInfo method is from SmartScript. - qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) - # Make sure there are actually QPF6hr grids in the time range. - if qpfInfoList == []: - message = self._getMsg( - 'noGrids', element='QPF6hr', timeRange=timeRange, - method='QPF6hr/Wx') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - for qpfIndex, qpfGrid in enumerate(self.getGrids( - 'Fcst', 'QPF6hr', 'SFC', timeRange, mode='List', noDataError=0, - cache=0)): - gridTR = qpfInfoList[qpfIndex].gridTime() - # Make sure the QPF6hr grid meets the time constraints - if not self.__checkTC('QPF6hr', gridTR): - message = self._getMsg( - 'badTC', element='QPF6hr', timeRange=gridTR) - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # around is from SmartScript - qpfGrid = around(qpfGrid, 2) - # The greater_equal method is from Numeric. - qpfNonZeroMask = greater_equal(qpfGrid, 0.01 - qpfTol) - # The getGridInfo method is from SmartScript. - wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) - # Make sure there are Wx grids overlapping the QPF6hr grid - if wxInfoList == []: - message = self._getMsg( - 'noGrids', element='Wx', timeRange=gridTR, - method='QPF6hr/Wx') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # Initialize a totally inconsistent grid and loop over the - # overlapping Wx grids. - inconsistGrid = ones(qpfNonZeroMask.shape, int) - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, - cache=0)): - # The wxMask method is from SmartScript. - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - # The logical_or method is from Numeric. - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - # I don't need these three grids any longer, so delete them - # and free up their memory. - del (sMask, swMask, ipMask) - rMask = self.wxMask(wxGrid, ':R:') - rwMask = self.wxMask(wxGrid, ':RW:') - lMask = self.wxMask(wxGrid, ':L:') - zlMask = self.wxMask(wxGrid, ':ZL:') - zrMask = self.wxMask(wxGrid, ':ZR:') - # The logical_or method is from Numeric. - rainMask = logical_or( - rMask, logical_or( - rwMask, logical_or( - lMask, logical_or(zlMask, zrMask)))) - # Again, I don't need these grids any longer, so delete them - # and free up their memory. - del (rMask, rwMask, lMask, zlMask, zrMask) - precipMask = logical_or(snowMask, rainMask) - del (snowMask, rainMask) - wxMask = logical_not(precipMask) - # QPF6hr >= 0.01, 1; QPF6hr < 0.01, 0 - # Wx has precip, 0; Wx doesn't have precip, 1 - # QPF6hr >= 0.01 (1) and Wx has (0) = 0 (Good result) - # QPF6hr >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) - # QPF6hr < 0.01 (0) and Wx has (0) = 0 (Good result) - # QPF6hr < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) - # - # The logical_and, where, sometrue, and ravel methods are all - # from Numeric. - consistMask = logical_and(qpfNonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - # Update the inconsistGrid to the current state of the - # inconsistencies. - inconsistGrid = logical_and(inconsistGrid, consistMask) - if not sometrue(ravel(inconsistGrid)): - # There were no longer any inconsistencies between the Wx - # grids and the QPF6hr grid. - break - else: - # This block will only execute if the for loop runs to - # completion, i.e., the break statement is not executed. - # So, if we get here, we have inconsistencies and need to - # highlight the appropriate grids. - # The highlightGrids method is from SmartScript. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) - - # The createGrid method is from SmartScript. - self.createGrid( - 'Fcst', 'QPF6hrWx', 'SCALAR', inconsistGrid, gridTR, - descriptiveName='QPF6hrWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'QPF6hrWx', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - return - - def _runPoP12hrQPF6hrCheck(self, timeRange): - # This method implements the check that if any PoP12hr grid - # is >= 50%, then at least one of the two corresponding QPF6hr grids - # must be non zero. - - # If there are locks, post urgent messages and return from the method. - qpfLockMe, popLockMe, qpfLockOther, popLockOther = \ - self.__checkLockedStatus(['QPF6hr', 'PoP12hr']) - if qpfLockMe or popLockMe or qpfLockOther or popLockOther: - # Something's locked, create messages. - self._makeLockMsgs( - qpfLockMe, popLockMe, qpfLockOther, popLockOther, - 'QPF6hr', 'PoP12hr', 'qpfLockMe', 'popLockMe', - 'qpfLockOther', 'popLockOther', 'PoP12hr/QPF6hr') - return - - # The getGridInfo method is from SmartScript. - # Make sure there are actually PoP12hr grids in the time range - popInfoList = self.getGridInfo('Fcst', 'PoP12hr', 'SFC', timeRange) - if popInfoList == []: - message = self._getMsg( - 'noGrids', element='PoP12hr', timeRange=timeRange, - method='PoP12hr/QPF6hr') - # The statusBarMsg method is from SmartScript. - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - # This is the one check where it will almost always be the case that - # the "controlling" element (PoP12hr) will extend later in time than - # the "checked" element (QPF6hr). We don't want a lot of annoying - # pop-up messages for missing QPF6hr grids for that case. I will - # determine the end time of the last QPF6hr grid, adjust it back to the - # nearest 00 or 12 UTC time to align it with the PoP12hr grid, and then - # check the end time of the last PoP12hr grid. If the end time for - # the QPF6hr grid is earlier, I will adjust the timeRange variable - # inside this method to end with the QPF6hr grids. - # The getGridInfo method is from SmartScript. - qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) - # Make sure there are actually QPF6hr grids in the time range - if qpfInfoList == []: - message = self._getMsg( - 'noGrids', element='QPF6hr', timeRange=timeRange, - method='PoP12hr/QPF6hr') - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - lastQPFTR = qpfInfoList[-1].gridTime() - qpfEndTime = lastQPFTR.endTime().unixTime() - qpfEndHr = lastQPFTR.endTime().hour - qpfEndTime -= ((qpfEndHr % 12) * 3600) - popEndTime = popInfoList[-1].gridTime().endTime().unixTime() - if popEndTime > qpfEndTime: - # Adjust time range to QPF6hr time range - qpfStartTime = qpfInfoList[0].gridTime().startTime().unixTime() - qpfDuration = (qpfEndTime - qpfStartTime) / 3600 - offset = (qpfStartTime - \ - self.timeRange0_1.startTime().unixTime()) / 3600 - timeRange = self.createTimeRange( - offset, offset+qpfDuration, 'Zulu') - message = self._getMsg( - 'changeTR', method='PoP12hr/QPF6hr', timeRange=timeRange) - self.statusBarMsg(message, 'R') - # Because the timeRange has changed, popInfoList needs to be - # updated. qpfInfoList will be updated later. - # The getGridInfo method is from SmartScript. - popInfoList = self.getGridInfo('Fcst', 'PoP12hr', 'SFC', timeRange) - # Now, it's possible there were gaps in the PoP12hr grids and the - # new time range spans a gap. So, we have to check for grid - # existence again. - if popInfoList == []: - message = self._getMsg( - 'noGrids', element='PoP12hr', timeRange=timeRange, - method='PoP12hr/QPF6hr') - self.statusBarMsg(message, self.__getMsgSeverity('U')) - return - for popIndex, popGrid in enumerate(self.getGrids( - 'Fcst', 'PoP12hr', 'SFC', timeRange, mode='List', cache=0)): - gridTR = popInfoList[popIndex].gridTime() - qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', gridTR) - # Check for existence of QPF6hr grids in the time range. - if qpfInfoList == []: - message = self._getMsg( - 'noGrids', element='QPF6hr', timeRange=gridTR, - method='PoP12hr/QPF6hr') - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # Check the PoP12hr time constraints - if not self.__checkTC('PoP12hr', gridTR): - message = self._getMsg( - 'badTC', element='PoP12hr', timeRange=gridTR) - self.statusBarMsg(message, self.__getMsgSeverity('U')) - continue - # The greater_equal method is from Numeric. - pop50Mask = greater_equal(popGrid, 50) - # Initialize a totally inconsistent grid. - inconsistGrid = ones(pop50Mask.shape, int) - for qpfIndex, qpfGrid in enumerate(self.getGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, mode='List', cache=0)): - qpfGridTR = qpfInfoList[qpfIndex].gridTime() - # Check the QPF6hr time contraints - if not self.__checkTC('QPF6hr', qpfGridTR): - message = self._getMsg( - 'badTC', element='QPF6hr', timeRange=qpfGridTR) - self.statusBarMsg(message, self.__getMsgSeverity('U')) - # If one of the QPF6hr grids has a bad time constraint, - # then I only have one QPF6hr grid. Time to break. - break - # around is from SmartScript - qpfGrid = around(qpfGrid, 2) - # The less method is from Numeric. - qpfMask = less(qpfGrid, 0.01 - qpfTol) - # The following is the "truth" table for the logical - # comparison. - # PoP12hr >= 50, 1; PoP12hr < 50, 0 - # QPF6hr < 0.01, 1; QPF6hr >= 0.01, 0 - # PoP12hr >= 50 (1) and QPF6hr < 0.01 (1) = 1 (Bad result) - # PoP12hr >= 50 (1) and QPF6hr >= 0.01 (0) = 0 (Good result) - # PoP12hr < 50 (0) and QPF6hr < 0.01 (1) = 0 (Good result) - # PoP12hr < 50 (0) and QPF6hr >= 0.01 (0) = 0 (Good result) - # logical_and is from Numeric - # The logical_and, where, sometrue, and ravel methods are all - # from Numeric. - consistMask = logical_and(pop50Mask, qpfMask) - consistMask[logical_not(self.cwaMask)] = 0 - # Update the inconsistentGrid with the state of the - # inconsistencies. - inconsistGrid = logical_and(inconsistGrid, consistMask) - # ravel and sometrue are from Numeric - if not sometrue(ravel(inconsistGrid)): - # There were no longer any inconsistencies between the - # QPF6hr grids and PoP12hr grid. - break - else: - # This else block will only execute if the for loop exits - # "naturally", i.e., the above break statement didn't execute. - # This means there were inconsistencies. - # The highlightGrids method is from SmartScript. -## lin_index = nonzero(ravel(inconsistGrid)) -## sh = list(shape(inconsistGrid)) -## sh.reverse() -## new_index = zeros((len(lin_index), len(sh))) -## mod = zeros(len(lin_index)) -## for j in arange(len(lin_index)): -## count = len(sh) -## for i in sh: -## lin_index[j], mod[j] = divmod(lin_index[j], i) -## count = count - 1 -## new_index[j, count] = mod[j] -## print new_index -## print popGrid[0,0], qpfGrid[0,0] - if inconGridColor: - self.highlightGrids( - 'Fcst', 'PoP12hr', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) - self.createGrid( - 'Fcst', 'PoP12hrQPF6hr', 'SCALAR', inconsistGrid, gridTR, - descriptiveName='PoP12hrQPF6hrInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'PoP12hrQPF6hr', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - return - - def _makeLockMsgs( - self, lockMe1, lockMe2, lockOther1, lockOther2, element1, element2, - lockMeKey1, lockMeKey2, lockOtherKey1, lockOtherKey2, method): - # As I went through the five check methods, I noted that this code - # was basically being repeated over and over again. So, here's a - # case where I took the repeated code, made it abstract, and turned - # it into a callable method. Now where each of the five methods had - # about 20 lines of code to do this, they now have only 5, which is - # a fairly substantial decrease. The trade-off is in somewhat - # lessened code readability because of having to jump from method - # to method to track the code. - # Below, I assign the call to _getMsg to a temporary variable - # called message for readability. Embedding the call to _getMsg - # in the call to statusBarMsg makes those calls much harder to - # follow. - # The statusBarMsg method is from SmartScript. - if lockMe1: - message = self._getMsg( - lockMeKey1, method=method, element=element1) - self.statusBarMsg( - message, self.__getMsgSeverity('U')) - if lockMe2: - message = self._getMsg( - lockMeKey2, method=method, element=element2) - self.statusBarMsg( - message, self.__getMsgSeverity('U')) - if lockOther1: - message = self._getMsg( - lockOtherKey1, method=method, element=element1) - self.statusBarMsg( - message, self.__getMsgSeverity('U')) - if lockOther2: - message = self._getMsg( - lockOtherKey2, method=method, element=element2) - self.statusBarMsg( - message, self.__getMsgSeverity('U')) - return - - def _msgDict(self): - # Since I seem to be incapable of writing concise error messages, - # I decided to capture the error messages in a separate method. - # Because I tend to be verbose, I like to use triple quoted strings - # for messages since this helps minimize (not eliminate) characters - # which extend beyond the right margin of the editor window. But triple - # quoted strings disrupt the indentation patterns of the code, making - # the code harder to read. By capturing all the triple quoted strings - # in a separate method, the indentation issue is mitigated. Doing this - # does add some complexity to the code, but it's a fair trade-off in my - # mind. This method is just a dictionary of all the error messages, - # which may contain string formatting code place holders. Another - # method, _getMsg, will look up the message boiler plates here and have - # the logic to correctly pass the needed variables to the string - # formatting codes. - return { -'complete': 'NDFD_QPF_Checks complete.', -'0_240': -'''The NDFD_QPF_Checks procedure did not run over the 0 to 240 hour time period, -it ran over %s. This may be what you desired.''', -'cwaMask': -'''The procedure was not able to use the CWA edit area, %s, -provided in the configuration. You should inform the person responsible for procedures of -this problem. The procedure ran over the whole domain.''', -'incon': 'NDFD_QPF_Checks complete. Inconsistencies found!', -'snowLockMe': -'''You have the %s grid locked. Please save the %s grid. The %s -check was not run.''', -'qpfLockMe': -'''You have the %s grid locked. Please save the %s grid. The %s check was -not run.''', -'snowLockOther': -'''Another user has the %s grid locked. Please have that user save the %s grid. The -%s check was not run.''', -'qpfLockOther': -'''Another user has the %s grid locked. Please have that user save the %s grid. The -%s check was not run.''', -'wxLockMe': -'''You have the %s grid locked. Please save the %s grid. The %s check was not run.''', -'wxLockOther': -'''Another user has the %s grid locked. Please have that user save the %s grid. The %s -check was not run.''', -'popLockMe': -'''You have the %s grid locked. Please save the %s grid. The %s check was -not run.''', -'popLockOther': -'''Another user has the %s grid locked. Please have that user save the %s grid. The -%s check was not run.''', -'noGrids': -'''There are no %s grids in the time range, %s. -The %s Check skipped the time range.''', -'changeTR': -'''The time range of the %s check was changed to ensure the PoP12hr grid is not checked -beyond the time of the last QPF6hr grid. The time range used was %s.''', -'badTC': -'''A %s grid has the following time range: %s, -which does not adhere to the time constraint requirement. This %s grid has not been consistency checked at all. -Please fix the grid and re-run the procedure.''', -} - - def _getMsg(self, key, timeRange=None, method=None, element=None): - # This method looks up the needed error message by passing key to - # _msgDict. The other parameters, if provided, are used to expand - # the embedded string formatting codes. The resulting message is - # passed back to the caller. - message = self._msgDict().get(key, '') - if key == '0_240': - message = message % str(timeRange) - return message - if key == 'cwaMask': - message = message % cwaEditArea - return message - if key == 'incon': - if inconGridColor and tempGridColor: - message = '%s Inconsistent grids highlighted %s.\nTemporary grids highlighted %s.' % (message, inconGridColor, tempGridColor) - return message - if inconGridColor: - message = '%s Inconsistent grids highlighted %s.' % ( - message, inconGridColor) - return message - if tempGridColor: - message = '%s Temporary grids highlighted %s.' % ( - message, tempGridColor) - return message - return message - if key == 'snowLockMe' or key == 'qpfLockMe' or key == 'wxLockMe' or \ - key == 'snowLockOther' or key == 'qpfLockOther' or \ - key == 'wxLockOther' or key == 'popLockMe' or \ - key == 'popLockOther': - message = message % (element, element, method) - return message - if key == 'noGrids': - message = message % (element, str(timeRange), method) - return message - if key == 'changeTR': - message = message % (method, str(timeRange)) - return message - if key == 'badTC': - message = message % (element, str(timeRange), element) - return message - # If for some reason the key look-up failed, then the message - # variable will be the empty string, '', and this will be returned. - # Since the calling method expects a string to be returned, I must - # ensure that this happens. - return message - - def execute(self, timeRange, varDict): - # Are we in quiet mode? The variableList above does NOT have a - # variable for 'Quiet', by design. When run interactively, some error - # conditions will generate pop-up messages. If an office decides to - # run this procedure as part of an over-arching check procedure, they - # can choose to turn the pop-up messages into routine messages by - # passing in a varDict with a 'Quiet' key that evaluates to 'True'. - # In Python, you can use the 'get' method on a dictionary to test - # for the existence of a key. If the key exists, then 'get' returns - # the value. If the key doesn't exist, then the second argument of - # the 'get' call is returned. If you don't provide a second argument - # to the 'get' call and the key is not found, then None is returned. - # As you can see below, my call to 'get' will return False if the key - # 'Quiet' is not in varDict. The 'get' method allows you to avoid - # constructs like: - # try: - # self._quiet = varDict['Quiet'] - # except KeyError: - # self._quiet = False - # I don't know for sure, but I'd be willing to bet that the 'get' - # method is just a wrapper to the 'try/except' construct very - # similar to the one demonstrated. - self._quiet = varDict.get('Quiet', False) -## self._quiet = True - - # Make sure the configuration values are the correct types. - self.__checkConfigValueTypes() - # createTimeRange is from SmartScript - timeRange0_240 = self.createTimeRange(0, 240, 'Zulu') - checkCleanup = varDict.get('Check_Cleanup', 'Check') - self.__cleanup(timeRange0_240) - if checkCleanup == 'Cleanup': - self.statusBarMsg(self._getMsg('complete'), 'R') - self.cancel() - elementList = ( - 'PoP', 'QPF', 'Wx', 'SnowAmt', 'QPF6hr', 'SnowAmt6hr', 'PoP12hr') - for element in elementList: - self.loadParm('Fcst', element, 'SFC') - if timeRange.endTime().unixTime() - timeRange.startTime().unixTime() < \ - 3600: # No time range selected, use 0 to 240 hour range - timeRange = timeRange0_240 - - # If the user has a time range swept out, send an informational - # message. - if (timeRange.startTime().unixTime() != \ - timeRange0_240.startTime().unixTime()) or \ - (timeRange.endTime().unixTime() != \ - timeRange0_240.endTime().unixTime()) or \ - (timeRange.duration() != timeRange0_240.duration()): - # What the incredibly dense expression in the above if statement - # does is compare the start and end of the time range to the start - # and end of the 0-240 hour time range. If either are different, - # then a time range was swept out. - self.statusBarMsg( - self._getMsg( - '0_240', timeRange=timeRange), self.__getMsgSeverity('S')) - - # I'll need to know the unix time of 00Z so I can determine the - # start time of temporary grids later. I'll need this in more than - # one of the methods called later, so this will become an instance - # variable, i.e., prefixed with "self." I also need an instance - # variable that flags whether or not there were inconsistent grids. - self.timeRange0_1 = self.createTimeRange(0, 1, 'Zulu') - self.inconsistent = False - - # A CWA edit area can be provided in the configuration section. - # Attempt to encode that edit area as a Numeric Python mask so that - # the later checks are limited to the edit area. The GFE is not very - # friendly if the encoding fails. The GFE will send a nasty message - # to the user, but continue executing the procedure. No trappable - # error is thrown. As of this writing, the GFE appears to create an - # array of shape (0, 0) if the encoding cannot be done, so I will - # check for that and, if I find it, then set the edit area to the - # domain. - # encodeEditArea comes from SmartScript. For the points that are in - # the edit area, a value of one is assigned. Otherwise, a value of - # zero is assigned. - if cwaEditArea: - self.cwaMask = self.encodeEditArea(cwaEditArea) - if self.cwaMask.shape == (0, 0): - # Use the getGridInfo command to get information about the - # SnowAmt grid. From this, the grid size can be extracted. I - # could use getGridInfo on any valid GFE grid. - # getGridInfo is from SmartScript - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) - # I painfully discovered that the array shape is (y, x) - gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, - snowAmtInfoList[0].gridLocation().gridSize().x) - # ones is from Numeric. It creates an array of the given size - # and data type where all values are one. - self.cwaMask = ones(gridSize, int) - self.statusBarMsg( - self._getMsg('cwaMask'), self.__getMsgSeverity('S')) - else: - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) - gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, - snowAmtInfoList[0].gridLocation().gridSize().x) - self.cwaMask = ones(gridSize, int) - - # Based on the user's input, run the appropriate checks. - # By making each of these options a checkbox with only one option in - # the VariableList above, if an option is unchecked then an empty - # list, [], will be what's in varDict. If an option is checked then a - # list with the value "Yes", ["Yes"], will be what's in varDict. In - # Python, a conditional expression can be whether or not a data - # structure is empty. In these cases, an empty data structure, - # e.g., an empty list, an empty tuple, an empty dictionary, - # conditionally test to False while non empty data structures - # conditionally test to True. In the if statements below, every varDict - # lookup returns a list: either [] or ["Yes"]. I think the constructs - # below or more elegant and easier to understand. - if varDict['Run SnowAmt6hr/QPF6hr Check?']: - # Call the SnowAmt6hr/QPF6hr check method - self._runSnowAmt6hrQPF6hrCheck(timeRange) - if varDict['Run SnowAmt6hr/Wx Check?']: - # Call the SnowAmt6hr/Wx check method - self._runSnowAmt6hrWxCheck(timeRange) - if varDict['Run QPF6hr/PoP Check?']: - # Call the QPF6hr/PoP check method - self._runQPF6hrPoPCheck(timeRange) - if varDict['Run QPF6hr/Wx Check?']: - # Call the QPF6hr/Wx check method - self._runQPF6hrWxCheck(timeRange) - if varDict['Run PoP12hr/QPF6hr Check?']: - # Call the PoP12hr/QPF6hr check method - self._runPoP12hrQPF6hrCheck(timeRange) - if self.inconsistent: - self.statusBarMsg(self._getMsg('incon'), self.__getMsgSeverity('S')) - else: - self.statusBarMsg(self._getMsg('complete'), 'R') +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# NDFD_QPF_Checks (was SnowAmtQPFPoPWxCheck) +# +# Author: Jay Smith, WFO Fairbanks, jay.smith@noaa.gov, 907-458-3721 +# SnowAmtQPFPoPWxCheck Incarnation +# Version: 1.0.0, 09/14/2006 - Initial version +# 1.0.1, 10/12/2006 - Added PoP/QPF check at request of DSPAC +# 1.0.2, 10/18/2006 - Changed PoP/QPF check to treat the PoP as +# floating. Instead of checking each individual PoP grid +# against its corresponding QPF grid, the max of all the +# PoP grids overlapping a QPF grid will be checked. +# 1.1.0, 01/25/2007 - Added options to choose which checks to run. +# Reorganized code so that each check is its own method. +# Added a check for QPF and Wx. Added highlighting for the +# created temporary grids. +# 1.1.1, 02/01/2007 - Changed the SnowAmt/Wx check to return +# consistent results for SnowAmt > 0 and Wx grid containing +# S, SW, or IP regardless of whether the frozen precip is +# mixed with freezing and/or liquid precip. +# 1.2.0, 02/13/2007 - Added a configuration option to provide a CWA +# edit area to run the procedure over. A bad edit area or no +# edit area will result in running over the whole domain. +# Modified the SnowAmt/Wx and QPF/Wx checks to handle two +# cases. Case 1: The SnowAmt/QPF grid is 6-hr long and starts +# at 00, 06, 12, or 18 UTC. Then only one of the corresponding +# Wx grids has to meet the consistency rule. Case 2: The +# SnowAmt/QPF grid does not meet the case 1 definition. Then +# all of the corresponding Wx grids must meet the consistency +# rule. +# NDFD_QPF_Checks Incarnation +# Version: 1.0.0, 04/04/2007 - The program now requires the presence of the +# following 3 grids: QPF6hr, SnowAmt6hr, and PoP12hr. The 2 +# 6-hr grids are expected to start at 00, 06, 12, or 18 UTC +# and be exactly 6 hours long. The PoP12hr grids are expected +# to start at 00 or 12 UTC and be exactly 12 hours long. +# The procedure still needs the PoP and Wx grids. With these +# grid changes, the procedure can now finally perform the last +# of the required NDFD checks. This also means there is no +# longer the need for two check cases in the two "Wx" checks. +# Now, Wx is consistent if any of its grids have the appropriate +# type. The procedure can now be run in a "quiet" mode if +# called from another procedure. Any messages generated in +# "quiet" mode will be of "R" severity so no pop-up messages +# are generated but the information is still available from the +# GFE status bar area. All error messages are templated in a +# dictionary in a separate method. This allows me to put all +# the triple-quoted strings, which ignore indentation, in one +# location. The code for checking for locked grids was also +# templated in its own method, which chopped about 15 lines of +# code off the front of each check method. There were a couple +# of places where I was applying the "tolerance" values +# incorrectly, which have been fixed. I dropped the +# "Inconsistent" label from all temporary grid names. I was +# making those grid names so long, they didn't actually fit +# in the spatial editor window when the grid manager was on +# the left. Temporary grid names now are just a concatenation +# of the two grids used in the check. +# Version: 1.1.0, The logic for the handling of inconsistencies in the +# SnowAmt6hr/Wx, QPF6hr/Wx, and PoP12hr/QPF6hr checks could +# result in false positive inconsistencies. This is because +# I was checking for inconsistencies on each of these grids +# individually when I needed to be checking the cumulative +# inconsistencies of these grids. With the new logic, if any +# of these checks has inconsistencies, the resulting temporary +# grid will have the same time range as the "controlling" grid, +# which is the first grid listed in each check name. Also, I +# have enforced the precision of the SnowAmt6hr and QPF6hr +# grids in the methods where they are used. SmartScript has a +# method called "around" that does this. I ran into an issue +# when this procedure was called from NDFD_QPF_Suite. I am +# unable to figure out how to uncache the QPF6hr, SnowAmt6hr, +# and PoP12hr grids after they are generated by the +# Collaborate_PoP_SnowAmt_QPF procedure. For the QPF6hr and +# SnowAmt6hr grids, this means getGrids is getting them from +# the cache, which means these float parameters have machine +# precision. This is utterly unacceptable. I have to have +# ifpServer precision. Now, I think I've ensured this. +# 1.1.1, 05/02/2007 - There have been instances of unexpected procedure +# performance if the NDFD QPF parameters are not visible in the +# grid manager when the procedure is run. The procedure will +# now require the existence of a weather element group which +# contains just the NDFP QPF parameters. +# 1.2.0, 05/03/2007 - Upon further review, the unexpected procedure +# performance arises when some of the NDFD QPF parameters are +# not present in the grid manager. However, I do not need to +# load a weather element group to make the parameters present. +# I can use the loadParm command on each element instead. Given +# this, the "weGroup" configuration has been removed. Also, +# some people believe the lock checking is overly stringent. +# To some extent, I agree. For the purposes of this procedure, +# other GFE users can have any of the NDFD QPF parmeteres +# locked. The user running the procedure, however, cannot have +# any of the parameters locked; i.e., that person must save +# those elements before running the procedure. +# The procedure performs the following checks: +# 1. If SnowAmt6hr present and >= 0.5 inches, then corresponding QPF6hr grids +# must be >= 0.01 inches. +# 2. If SnowAmt6hr >= 0.1 inches then at least one of the corresponding Wx +# grids must have S, SW, or IP. +# 3. If QPF6hr > 0.0, then at least one of the corresponding PoP grids must +# be > 0 +# 4. If QPF6hr > 0.0 then at least one of the corresponding Wx grids must have +# R, RW, S, SW, RS, IP, L, ZR, ZL. +# 5. If PoP12hr >= 50%, then at least one of the corresponding QPF6hr grids +# must be >= 0.0. +# For all of the checks above, if the initial threshold is not exceeded, then +# the two grids are consistent by definition. In other words: +# 1. If SnowAmt6hr < 0.5, then SnowAmt6hr and QPF6hr are always consistent. +# 2. If SnowAmt6hr < 0.1, then SnowAmt6hr and Wx are always consistent. +# 3. If QPF6hr = 0.0, then QPF6hr and PoP are always consistent. +# 4. If QPF6hr = 0.0, then QPF6hr and Wx are always consistent. +# 5. If PoP12hr < 50%, then PoP12hr and QPF6hr are always consistent. +# For the Wx checks above, only the Wx type is considered. +# +# ****** NOTE NOTE NOTE NOTE ****** +# The program checks the PoP12hr, QPF6hr, and SnowAmt6hr grids to make sure +# their time constraints are met. For any grid where the time constraint is +# violated, those grids are not checked. To reiterate the time constraints: +# PoP12hr: starts at either 00 or 12 UTC and is exactly 12 hours duration +# QPF6hr: starts at 00, 06, 12, or 18 UTC and is exactly 6 hours duration +# SnowAmt6hr: starts at 00, 06, 12 or 18 UTC and is exactly 6 hours duration +# ****** NOTE NOTE NOTE NOTE ****** +# +# If discrepancies are found, then the "bad" grids will be highlighted. +# Temporary grids showing where the discrepancies occur will be created and +# also highlighted. +# +# I've written this code such that it's optimized to minimize memory usage +# (at least I think I've done that). As a result, it's not particularly +# optimized for ifpServer database access. In fact, I retrieve the various +# grids from the ifpServer database many times during the procedure's run. +# This will have an impact on how fast the procedure runs (it'll run slower +# than if I had optimized for ifpServer database access). The choice to favor +# memory optimization comes from my belief that there are still "memory leak" +# problems in the GFE and that the consequences of those problems will be most +# manifest when this procedure is most likely to be run (near the end of the +# shift). Funky memory problems are a prime cause of funky application +# behavior like application crashes or spontaneous logouts. So, this procedure +# basically reads a grid into memory, keeps it as long as it's needed, and +# then discards it. +# +# Finally, this procedure is also intended to provide an example to other +# developers of how to write and document code. I have reservations as to how +# well I've succeeded at that task. The code is heavily documented, probably +# excessively so. Also, it's not as well as organized as it could be. As you +# look through the various methods, it should become quickly apparent that +# there is a lot of repeated code. I've consciously left some the code this +# way in the hopes that it will be easier to understand by more novice +# programmers and because the code hasn't quite grown to the point where +# updating the repeating code is onerous or overly error-prone. It would be +# better to capture the repeating code in separate methods, but keeping track +# of the where you are in the code becomes harder the more you have to jump +# around from method to method. Anyone who has ever worked with the text +# formatters can sympathize with that. As with all things, there are trade- +# offs involved. UPDATE: 4/3/2007 - Starting with the first NDFD_QPF_Checks +# version, I consolidated quite a bit of the repeating code into separate +# methods. So, there's some improvement on that front. +# +# Acknowledgement: +# Many of the Python "tricks" I use in this procedure I learned from +# reading/perusing the following book: Python Cookbook, Alex Martelli & +# David Ascher, eds., 2002, O'Reilly and Associates +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Consistency"] + +VariableList = [ + ('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup']), + ('Run SnowAmt6hr/QPF6hr Check?', ['Yes'], 'check', ['Yes']), + ('Run SnowAmt6hr/Wx Check?', ['Yes'], 'check', ['Yes']), + ('Run QPF6hr/PoP Check?', ['Yes'], 'check', ['Yes']), + ('Run QPF6hr/Wx Check?', ['Yes'], 'check', ['Yes']), + ('Run PoP12hr/QPF6hr Check?', ['Yes'], 'check', ['Yes']), + ('If "Cleanup" is selected, then only cleanup actions will run.\nNo checks will be made, regardless of the above settings.', '', 'label'), + ] + +#### Config section +# Both the QPF and SnowAmt grids have values which are floating point +# numbers. This means comparisons must use a tolerance value. In other +# words, 0.5 may be represented in machine numbers as 0.49999999999 or +# 0.500000000001. By specifying a tolerance value, we account for the +# vagaries of machine representation of floating point numbers while +# keeping the precision of the comparisons to acceptable levels. Depending +# on the comparison being done, the tolerance value will be added to or +# subtracted from the comparison value to allow for machine error in the +# floating point number representation. +# By default in the GFE, QPF precision is to the nearest one-hundredth while +# SnowAmt precision is to the nearest tenth. +qpfTol = 0.00001 # 1/100,000 tolerance vs 1/100 precision +snowAmtTol = 0.0001 # 1/10,000 tolerance vs 1/10 precision +# Inconsistent grid highlight color. One size fits all. To turn off +# highlighting, set the variable to the empty string, ''. +inconGridColor = 'red' +# Temporary grid highlight color. One size fits all. To turn off highlighting, +# set the variable to the empty string, ''. +tempGridColor = 'orange' +# Name of CWA edit area to use instead of running the procedure over the +# whole domain. Set to the empty string, '', if you want the procedure to +# always run over the whole domain. If the procedure has a problem with the +# edit area you provide, it will run over the whole domain. You should probably +# choose an edit area that is slightly larger than your entire CWA. It's +# possible that when mapping your GFE grids to NDFD grids that the NDFD thinks +# some GFE grid cells are in your CWA that the GFE does not think are in your +# CWA. Using an edit area slightly larger than the CWA, like the ISC_Send_Area +# which is the mask used when sending grids to the NDFD, should eliminate the +# possibibilty of the NDFD intermittently flagging CWA border "points" as +# inconsistent. Note: running the procedure over a subset of the entire GFE +# domain does not really provide any performance gains. Given the way the +# underlying array data structure works, calculations are almost always made +# at every single grid point first and then a mask is applied to limit the +# meaningful results to the edit area. For the purposes of this procedure, the +# values outside the edit area are set to the appropriate "consistent" result. +# The real benefit of this option is it limits the inconsistent results to the +# areas the forecaster really cares about, which should lessen the workload of +# using this procedure. Marine Offices: Make sure the edit area provided +# includes your marine zones. +cwaEditArea = 'ISC_Send_Area' +#### Config section end + +import SmartScript +from numpy import * + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def __cleanup(self, timeRange): + # Remove any temporary grids created previously. Turn off any + # previous highlighting. + for element in ( + 'SnowAmt6hrQPF6hr', 'SnowAmt6hrWx', 'QPF6hrPoP', 'QPF6hrWx', + 'PoP12hrQPF6hr'): + try: + # From SmartScript + self.unloadWE('Fcst', element, 'SFC') + except: + # A failure is almost certainly no grids to unload. + pass + # Turn off any highlights. From SmartScript + self.highlightGrids( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids( + 'Fcst', 'QPF6hr', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids( + 'Fcst', 'PoP', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids( + 'Fcst', 'PoP12hr', 'SFC', timeRange, inconGridColor, on=0) + return + + def __checkConfigValueTypes(self): + # Make sure the values provided in the configuration section are the + # correct type. + # Later in the code are two methods devoted to creating error + # messages. The error messages here could pop up in quite a large + # number of different combinations, which makes capturing them in the + # later methods very complex. Rather than do that and considering + # that these error messages should never appear once the procedure + # is correctly set up, I decided to leave them here. There always has + # to be an exception. :-) + import types + message = '' + badValues = False + if not type(qpfTol) is float: + message = '%sThe "qpfTol" variable is not defined as a floating point value. Please contact your IFPS focal point to fix this.\n' % message + badValues = True + if not type(snowAmtTol) is float: + message = '%sThe "snowAmtTol" variable is not defined as a floating point value. Please contact your IFPS focal point to fix this.\n' % message + badValues = True + if not type(inconGridColor) is bytes: + message = '%sThe "inconGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message + badValues = True + if not type(tempGridColor) is bytes: + message = '%sThe "tempGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message + badValues = True + if not type(cwaEditArea) is bytes: + message = '%sThe "cwaEditArea" variable is not defined as a string value. Please contact your IFPS focal point to fix this.\n' % message + badValues = True + if badValues: + message = '%sYou will not be able to run the procedure until the problem is corrected.' % message + # The next two commands are from SmartScript + self.statusBarMsg(message, 'U') + self.cancel() + return + + def __checkLockedStatus(self, elementList): + # There can be a significant difference between the values stored + # in memory and the values returned from the database. This is because + # when values are saved, the element's precision (as defined in + # serverConfig.py/localConfig.py) is enforced. Values in memory do not + # have the element's precision enforced; in fact, they have the + # machine precision of the underlying data type. + # At the beginning of each check method, a call to this method is + # made to make sure the grids are saved. A check method will not run + # if the grids it's to check are not saved. This method will return + # a list of boolean values indicating if the elements are locked by me + # and then if the elements are locked by other. + # The lockedByMe and lockedByOther methods are from SmartScript + lockedByMe = [] + lockedByOther = [] + for element in elementList: + if self.lockedByMe(element, 'SFC'): + lockedByMe.append(True) + else: + lockedByMe.append(False) +## if self.lockedByOther(element, 'SFC'): +## lockedByOther.append(True) +## else: +## lockedByOther.append(False) + lockedByOther.append(False) + return lockedByMe + lockedByOther + + def __getMsgSeverity(self, severity): + # For calls to self.statusBarMsg where I intended the severity to be + # something other than 'R', this method is now called to determine + # what the severity should be. This procedure can be called from + # another procedure in such a way as to suppress the pop-up type of + # status bar messages. This is done by passing in a varDict with a + # 'Quiet' key which evaluates to 'True'. For those situations, the + # procedure defers to the calling program and turns any non 'R' + # severities into 'R' severity. This allows the message to be + # communicated still to the GFE session, but only via the 'Status' + # line area of the GFE. When run interactively from the GFE, the + # severity this procedure assigns to a message will be used. + # This method is actually invoked in the call to statusBarMsg in + # place of the severity string. As long as the entry for the severity + # in statusBarMsg evaluates to a string type, statusBarMsg will be + # 'happy'. + if self._quiet: + return 'R' + return severity + + def __checkTC(self, element, gridTR): + # The QPF6hr, SnowAmt6hr, and PoP12hr grids have specific time + # constraints that the respective grids must adhere to. In other + # words, it's not acceptable to this procedure for the QPF6hr grid, + # for example, to be stretched to 12 hours long. This method makes + # sure each of the grids exactly conforms to the time constraint + # defintion. The method returns True if good, False if bad. If, for + # some reason, the method gets called with some other element, the + # method will return True. + if element == 'QPF6hr' or element == 'SnowAmt6hr': + startHourTup = (0, 6, 12, 18) + goodDuration = 6 * 3600 + elif element == 'PoP12hr': + startHourTup = (0, 12) + goodDuration = 12 * 3600 + else: + return True + + if gridTR.startTime().hour in startHourTup and \ + gridTR.duration() == goodDuration: + return True + return False + + def _runSnowAmt6hrQPF6hrCheck(self, timeRange): + # This method implements the check that if SnowAmt6hr >= 0.5, then + # QPF6hr must be >= 0.01. + + # If there are locks, post urgent messages and return from the method. + snowLockMe, qpfLockMe, snowLockOther, qpfLockOther = \ + self.__checkLockedStatus(['SnowAmt6hr', 'QPF6hr']) + if snowLockMe or qpfLockMe or snowLockOther or qpfLockOther: + # Something's locked, create messages. + self._makeLockMsgs( + snowLockMe, qpfLockMe, snowLockOther, qpfLockOther, + 'SnowAmt6hr', 'QPF6hr', 'snowLockMe', 'qpfLockMe', + 'snowLockOther', 'qpfLockOther', 'SnowAmt6hr/QPF6hr') + return + + # Make sure there are actually SnowAmt6hr grids in the time range. + # The self.getGridInfo command will return an empty list if there + # are no grids in the time range. This is more efficient than using + # self.getGrids with mode='First' and noDataError=0. + # The getGridInfo method is from SmartScript + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) + if snowAmtInfoList == []: + message = self._getMsg( + 'noGrids', element='SnowAmt6hr', timeRange=timeRange, + method='SnowAmt6hr/QPF6hr') + # The statusBarMsg method is from SmartScript + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + # One might ask why I don't just return the result of self.getGrids + # to a variable and iterate over that. I'm trying to minimize the + # memory footprint of the procedure. Reading all the grids into a + # variable could be a fairly large memory hit. I believe the construct + # below only reads one SnowAmt6hr grid at a time into memory, the one + # that's being checked. (But I can't find the reference that supports + # my belief.) By using the cache=0 switch on all the self.getGrids + # command, I prevent the GFE from saving the grids into memory for me. + # (At least, that's what I think the cache=0 switch does. The + # SmartScript documentation is a little vague on this point.) + # The Python builtin command enumerate loops over an iterable object + # and returns a 2-tuple containing the current index of the + # iteration and the object at that index. In cases where I need + # both the index and the object, I think this construct is more + # elegant than: + # for i in xrange(len(iterableObject)): + # object = iterableObject[i] + for snowAmtIndex, snowAmtGrid in enumerate(self.getGrids( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, mode='List', cache=0)): + gridTR = snowAmtInfoList[snowAmtIndex].gridTime() + # Check to make sure the grid meets it's time constraints. + if not self.__checkTC('SnowAmt6hr', gridTR): + message = self._getMsg( + 'badTC', element='SnowAmt6hr', timeRange=gridTR) + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # around is from SmartScript + snowAmtGrid = around(snowAmtGrid, 1) + # The greater_equal method is from Numeric. For the given array + # and threshold, a new array of the same dimensions as the input + # array is returned. The new array has the value 1 where the + # input array was greater than or equal to the threshold and + # has the value 0 elsewhere. + # The getGridInfo method is from SmartScript + halfInchMask = greater_equal(snowAmtGrid, 0.5 - snowAmtTol) + qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', gridTR) + # There should always be more QPF6hr grids than SnowAmt6hr grids, + # so if qpfInfoList is empty, then there are missing QPF6hr + # grids. Otherwise, qpfInfoList will have length 1 because + # SnowAmt6hr and QPF6hr have the same time constrain. However, + # the QPF6hr grid that overlaps the SnowAmt6hr grid will still + # need to be checked to make sure it hasn't been stretched. + if qpfInfoList == []: + message = self._getMsg( + 'noGrids', element='QPF6hr', timeRange=gridTR, + method='SnowAmt6hr/QPF6hr') + # The statusBarMsg is from SmartScript + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # I don't need the noDataError=0 in the self.getGrids call + # because if there were no grids in the gridTR, the previous + # if block would have caught that. + # The getGrids method is from SmartScript + qpfGrid = self.getGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, mode='First', cache=0) + if not self.__checkTC('QPF6hr', gridTR): + message = self._getMsg( + 'badTC', element='QPF6hr', timeRange=gridTR) + # The statusBarMsg method is from SmartScrtipt + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # If we get here, then we have a SnowAmt6hr grid and a QPF6hr + # grid which meet their time constraints and are ready to be + # compared. + # around is from SmartScript + qpfGrid = around(qpfGrid, 2) + # The less method is from Numeric. It behaves analogously to + # the greater_equal method described above using less than for + # the comparison. + qpfMask = less(qpfGrid, 0.01 - qpfTol) + # The following is the "truth" table for the logical + # comparison. + # SnowAmt6hr >= 0.5, 1; SnowAmt6hr < 0.5, 0 + # QPF6hr < 0.01, 1; QPF6hr >= 0.01, 0 + # SnowAmt6hr >= 0.5 (1) and QPF6hr < 0.01 (1) = 1 (Bad result) + # SnowAmt6hr >= 0.5 (1) and QPF6hr >= 0.01 (0) = 0 (Good result) + # SnowAmt6hr < 0.5 (0) and QPF6hr < 0.01 (1) = 0 (Good result) + # SnowAmt6hr < 0.5 (0) and QPF6hr >= 0.01 (0) = 0 (Good result) + # The logical_and method is from Numeric. A logical and comparison + # results in a "True" value if both compared elements are "True". + # Otherwise, the result is "False". + consistMask = logical_and(halfInchMask, qpfMask) + # Now, apply the CWA mask. There's an assumption here that + # all offices will use a mask and provide a valid one, which + # means this step does something meaningful. If that assumption + # does not hold, then the next statement doesn't actually + # change anything, even though each and every grid point has a + # comparison check made. + # where is from Numeric. The first argument is a mask. + # The second argument is/are the value/values to use at the + # array points where the mask is one. The third argument + # is/are the value/values to use at the array points + # where the mask is zero. For this comparison, I want + # the values of consistMask where self.cwaMask is one and + # I want the "good result", which is zero, where + # self.cwaMask is zero. + consistMask[logical_not(self.cwaMask)] = 0 + # The ravel and sometrue methods are from Numeric. + if sometrue(ravel(consistMask)): + # The ravel method reduces the rank of the array by one. + # Since we had a 2-d array, the ravel function creates a + # 1-d array (a vector) such that reading the 2-d array from + # left-to-right, top-to-bottom returns the same values as + # reading the 1-d array from left-to-right. The sometrue + # method performs a logical or on subsequent element pairs + # in the 1-d array and returns the final result. If + # there are inconsistencies, the result will be 1. + # The highlightGrids method is from SmartScript. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt6hr', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) + # The createGrid method is from SmartScript + self.createGrid( + 'Fcst', 'SnowAmt6hrQPF6hr', 'SCALAR', + consistMask, gridTR, + descriptiveName='SnowAmt6hrQPF6hrInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt6hrQPF6hr', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + # While not required, I like to terminate my methods with a return + # statement to make it clear this is where the method ends. + return + + def _runSnowAmt6hrWxCheck(self, timeRange): + # This implements the check that if SnowAmt6hr >= 0.1, then the Wx grid + # must contain S, SW, or IP, regardless of whether or not there is + # any freezing or liquid types. Finally, the check does not look at + # anything other than the Wx type. In other words, the check will be + # okay if SnowAmt != 0 and Wx has Chc:S:- or Def:SW:-- or Lkly:S:+. + + # If there are locks, post urgent messages and return from the method. + snowLockMe, wxLockMe, snowLockOther, wxLockOther = \ + self.__checkLockedStatus(['SnowAmt6hr', 'Wx']) + if snowLockMe or wxLockMe or snowLockOther or wxLockOther: + # Something's locked, create messages. + self._makeLockMsgs( + snowLockMe, wxLockMe, snowLockOther, wxLockOther, + 'SnowAmt6hr', 'Wx', 'snowLockMe', 'wxLockMe', + 'snowLockOther', 'wxLockOther', 'SnowAmt6hr/Wx') + return + + # Make sure there are actually SnowAmt6hr grids in the time range. + # The getGridInfo method is from SmartScript. + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) + if snowAmtInfoList == []: + message = self._getMsg( + 'noGrids', element='SnowAmt6hr', timeRange=timeRange, + method='SnowAmt6hr/Wx') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + for snowAmtIndex, snowAmtGrid in enumerate(self.getGrids( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange, mode='List', cache=0)): + gridTR = snowAmtInfoList[snowAmtIndex].gridTime() + # Make sure the snowAmtGrid meets the time constraints. + if not self.__checkTC('SnowAmt6hr', gridTR): + message = self._getMsg( + 'badTC', element='SnowAmt6hr', timeRange=gridTR) + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # around is from SmartScript + snowAmtGrid = around(snowAmtGrid, 1) + # The greater_equal method is from Numeric. + # The getGridInfo method is from SmartScript. + nonZeroMask = greater_equal(snowAmtGrid, 0.1 - snowAmtTol) + wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) + # Check for Wx grid in gridTR + if wxInfoList == []: + message = self._getMsg( + 'noGrids', element='Wx', timeRange=gridTR, + method='SnowAmt6hr/Wx') + # The statusBarMsg method is from SmartScript + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # Now check the overlapping Wx grids. Initialize a totally + # inconsistent grid. + # ones is from Numeric + inconsistGrid = ones(nonZeroMask.shape, int) + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', cache=0)): + # The wxMask method is from SmartScript + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + # The logical_or method is from Numeric. For the two input + # arrays, if both values are "False", then the result is + # "False". Otherwise, the result is "True". + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + # I don't need these arrays any longer. Delete them to free + # up the memory they use. + del (sMask, swMask, ipMask) + # The where method is from Numeric + wxMask = logical_not(snowMask) + # "Truth" table for the logical comparison follows + # SnowAmt6hr >= 0.1, 1; SnowAmt6hr < 0.1, 0 + # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 + # SnowAmt6hr >= 0.1 (1) and Wx has (0) = 0 (Good result) + # SnowAmt6hr >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) + # SnowAmt6hr < 0.1 (0) and Wx has (0) = 0 (Good result) + # SnowAmt6hr < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) + # + # The logical_and, where, sometrue, and ravel methods are all + # from Numeric. + consistMask = logical_and(nonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + # Update inconsistGrid to be the current state of the + # inconsistencies. + inconsistGrid = logical_and(inconsistGrid, consistMask) + if not sometrue(ravel(inconsistGrid)): + # There were no longer any inconsistencies between + # SnowAmt6hr and Wx. + break + else: + # This block will only execute if the for loop runs to + # completion, i.e., the break statement is not executed. + # So, if we get here, we have inconsistencies and need to + # highlight the appropriate grids. + # The highlightGrids method is from SmartScript. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt6hr', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) + # The createGrid method is from SmartScript + self.createGrid( + 'Fcst', 'SnowAmt6hrWx', 'SCALAR', inconsistGrid, gridTR, + descriptiveName='SnowAmt6hrWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt6hrWx', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + return + + def _runQPF6hrPoPCheck(self, timeRange): + # This method implements the check that if any QPF6hr grid is non zero + # then one of the corresponding floating PoP grids must also be non + # zero. + + # If there are locks, post urgent messages and return from the method. + qpfLockMe, popLockMe, qpfLockOther, popLockOther = \ + self.__checkLockedStatus(['QPF6hr', 'PoP']) + if qpfLockMe or popLockMe or qpfLockOther or popLockOther: + # Something's locked, create messages. + self._makeLockMsgs( + qpfLockMe, popLockMe, qpfLockOther, popLockOther, + 'QPF6hr', 'PoP', 'qpfLockMe', 'popLockMe', + 'qpfLockOther', 'popLockOther', 'QPF6hr/PoP') + return + + # The getGridInfo method is from SmartScript. + qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) + # Make sure there are actually QPF6hr grids in the time range. + if qpfInfoList == []: + message = self._getMsg( + 'noGrids', element='QPF6hr', timeRange=timeRange, + method='QPF6hr/PoP') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + for qpfIndex, qpfGrid in enumerate(self.getGrids( + 'Fcst', 'QPF6hr', 'SFC', timeRange, mode='List', cache=0)): + gridTR = qpfInfoList[qpfIndex].gridTime() + # Check the QPF6hr grid time constraints + if not self.__checkTC('QPF6hr', gridTR): + message = self._getMsg( + 'badTC', element='QPF6hr', timeRange=gridTR) + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # around is from SmartScript + qpfGrid = around(qpfGrid, 2) + # The greater_equal method is from Numeric. The getGrids method + # is from SmartScript. + qpfNonZeroMask = greater_equal(qpfGrid, 0.01 - qpfTol) + popGrid = self.getGrids( + 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, + cache=0) + # Since I don't need to loop over the PoP grids, just get their + # max, I don't need to call getGridInfo like in other methods. + # With noDataError=0 in the getGrids call, if there are no grids, + # then the special Python value None will be returned. So, I can + # just check that to see if all the PoP grids are missing. If + # there were a gap in the PoP grids, that would not be caught. + # But, no one's PoP grids should ever have a gap in them, right? + if popGrid == None: + message = self._getMsg( + 'noGrids', element='PoP', timeRange=gridTR, + method='QPF6hr/PoP') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # The equal method is from Numeric. + popZeroMask = equal(popGrid, 0) + # popZeroMask = 1 if PoP = 0; popZeroMask = 0 if PoP != 0 + # qpfNonZeroMask = 1 if QPF6hr > 0; qpfNonZeroMask = 0 if QPF6hr = 0 + # PoP = 0 (1) and QPF6hr = 0 (0) => 0 (Good result) + # PoP != 0 (0) and QPF6hr = 0 (0) => 0 (Good result) + # PoP != 0 (0) and QPF6hr > 0 (1) => 0 (Good result) + # PoP = 0 (1) and QPF6hr > 0 (1) => 1 (Bad result) + # + # The logical_and, where, sometrue, and ravel methods are all + # from Numeric. + consistMask = logical_and(qpfNonZeroMask, popZeroMask) + consistMask[logical_not(self.cwaMask)] = 0 + if sometrue(ravel(consistMask)): + # The good result is if the logical_and returns zeros + # for every grid point, that is "none true". So, if + # the sometrue method evaluates True, there are + # inconsistencies. + # The createGrid and highlightGrids methods are from + # SmartScript. + self.createGrid( + 'Fcst', 'QPF6hrPoP', 'SCALAR', + consistMask, gridTR, + descriptiveName='QPF6hrPoPInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'QPF6hrPoP', 'SFC', gridTR, + tempGridColor) + if inconGridColor: + self.highlightGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'PoP', 'SFC', gridTR, inconGridColor) + self.inconsistent = True + return + + def _runQPF6hrWxCheck(self, timeRange): + # This method implements the check that if QPF6hr non zero, then the + # corresponding Wx grids must contain a precipitable Wx type. Note: + # the method only checks the Wx type, no cov/prob, no inten, etc. + + # If there are locks, post urgent messages and return from the method. + qpfLockMe, wxLockMe, qpfLockOther, wxLockOther = \ + self.__checkLockedStatus(['QPF6hr', 'Wx']) + if qpfLockMe or wxLockMe or qpfLockOther or wxLockOther: + # Something's locked, create messages. + self._makeLockMsgs( + qpfLockMe, wxLockMe, qpfLockOther, wxLockOther, + 'QPF6hr', 'Wx', 'qpfLockMe', 'wxLockMe', + 'qpfLockOther', 'wxLockOther', 'QPF6hr/Wx') + return + + # The getGridInfo method is from SmartScript. + qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) + # Make sure there are actually QPF6hr grids in the time range. + if qpfInfoList == []: + message = self._getMsg( + 'noGrids', element='QPF6hr', timeRange=timeRange, + method='QPF6hr/Wx') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + for qpfIndex, qpfGrid in enumerate(self.getGrids( + 'Fcst', 'QPF6hr', 'SFC', timeRange, mode='List', noDataError=0, + cache=0)): + gridTR = qpfInfoList[qpfIndex].gridTime() + # Make sure the QPF6hr grid meets the time constraints + if not self.__checkTC('QPF6hr', gridTR): + message = self._getMsg( + 'badTC', element='QPF6hr', timeRange=gridTR) + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # around is from SmartScript + qpfGrid = around(qpfGrid, 2) + # The greater_equal method is from Numeric. + qpfNonZeroMask = greater_equal(qpfGrid, 0.01 - qpfTol) + # The getGridInfo method is from SmartScript. + wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) + # Make sure there are Wx grids overlapping the QPF6hr grid + if wxInfoList == []: + message = self._getMsg( + 'noGrids', element='Wx', timeRange=gridTR, + method='QPF6hr/Wx') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # Initialize a totally inconsistent grid and loop over the + # overlapping Wx grids. + inconsistGrid = ones(qpfNonZeroMask.shape, int) + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, + cache=0)): + # The wxMask method is from SmartScript. + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + # The logical_or method is from Numeric. + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + # I don't need these three grids any longer, so delete them + # and free up their memory. + del (sMask, swMask, ipMask) + rMask = self.wxMask(wxGrid, ':R:') + rwMask = self.wxMask(wxGrid, ':RW:') + lMask = self.wxMask(wxGrid, ':L:') + zlMask = self.wxMask(wxGrid, ':ZL:') + zrMask = self.wxMask(wxGrid, ':ZR:') + # The logical_or method is from Numeric. + rainMask = logical_or( + rMask, logical_or( + rwMask, logical_or( + lMask, logical_or(zlMask, zrMask)))) + # Again, I don't need these grids any longer, so delete them + # and free up their memory. + del (rMask, rwMask, lMask, zlMask, zrMask) + precipMask = logical_or(snowMask, rainMask) + del (snowMask, rainMask) + wxMask = logical_not(precipMask) + # QPF6hr >= 0.01, 1; QPF6hr < 0.01, 0 + # Wx has precip, 0; Wx doesn't have precip, 1 + # QPF6hr >= 0.01 (1) and Wx has (0) = 0 (Good result) + # QPF6hr >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) + # QPF6hr < 0.01 (0) and Wx has (0) = 0 (Good result) + # QPF6hr < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) + # + # The logical_and, where, sometrue, and ravel methods are all + # from Numeric. + consistMask = logical_and(qpfNonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + # Update the inconsistGrid to the current state of the + # inconsistencies. + inconsistGrid = logical_and(inconsistGrid, consistMask) + if not sometrue(ravel(inconsistGrid)): + # There were no longer any inconsistencies between the Wx + # grids and the QPF6hr grid. + break + else: + # This block will only execute if the for loop runs to + # completion, i.e., the break statement is not executed. + # So, if we get here, we have inconsistencies and need to + # highlight the appropriate grids. + # The highlightGrids method is from SmartScript. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) + + # The createGrid method is from SmartScript. + self.createGrid( + 'Fcst', 'QPF6hrWx', 'SCALAR', inconsistGrid, gridTR, + descriptiveName='QPF6hrWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'QPF6hrWx', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + return + + def _runPoP12hrQPF6hrCheck(self, timeRange): + # This method implements the check that if any PoP12hr grid + # is >= 50%, then at least one of the two corresponding QPF6hr grids + # must be non zero. + + # If there are locks, post urgent messages and return from the method. + qpfLockMe, popLockMe, qpfLockOther, popLockOther = \ + self.__checkLockedStatus(['QPF6hr', 'PoP12hr']) + if qpfLockMe or popLockMe or qpfLockOther or popLockOther: + # Something's locked, create messages. + self._makeLockMsgs( + qpfLockMe, popLockMe, qpfLockOther, popLockOther, + 'QPF6hr', 'PoP12hr', 'qpfLockMe', 'popLockMe', + 'qpfLockOther', 'popLockOther', 'PoP12hr/QPF6hr') + return + + # The getGridInfo method is from SmartScript. + # Make sure there are actually PoP12hr grids in the time range + popInfoList = self.getGridInfo('Fcst', 'PoP12hr', 'SFC', timeRange) + if popInfoList == []: + message = self._getMsg( + 'noGrids', element='PoP12hr', timeRange=timeRange, + method='PoP12hr/QPF6hr') + # The statusBarMsg method is from SmartScript. + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + # This is the one check where it will almost always be the case that + # the "controlling" element (PoP12hr) will extend later in time than + # the "checked" element (QPF6hr). We don't want a lot of annoying + # pop-up messages for missing QPF6hr grids for that case. I will + # determine the end time of the last QPF6hr grid, adjust it back to the + # nearest 00 or 12 UTC time to align it with the PoP12hr grid, and then + # check the end time of the last PoP12hr grid. If the end time for + # the QPF6hr grid is earlier, I will adjust the timeRange variable + # inside this method to end with the QPF6hr grids. + # The getGridInfo method is from SmartScript. + qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', timeRange) + # Make sure there are actually QPF6hr grids in the time range + if qpfInfoList == []: + message = self._getMsg( + 'noGrids', element='QPF6hr', timeRange=timeRange, + method='PoP12hr/QPF6hr') + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + lastQPFTR = qpfInfoList[-1].gridTime() + qpfEndTime = lastQPFTR.endTime().unixTime() + qpfEndHr = lastQPFTR.endTime().hour + qpfEndTime -= ((qpfEndHr % 12) * 3600) + popEndTime = popInfoList[-1].gridTime().endTime().unixTime() + if popEndTime > qpfEndTime: + # Adjust time range to QPF6hr time range + qpfStartTime = qpfInfoList[0].gridTime().startTime().unixTime() + qpfDuration = (qpfEndTime - qpfStartTime) / 3600 + offset = (qpfStartTime - \ + self.timeRange0_1.startTime().unixTime()) / 3600 + timeRange = self.createTimeRange( + offset, offset+qpfDuration, 'Zulu') + message = self._getMsg( + 'changeTR', method='PoP12hr/QPF6hr', timeRange=timeRange) + self.statusBarMsg(message, 'R') + # Because the timeRange has changed, popInfoList needs to be + # updated. qpfInfoList will be updated later. + # The getGridInfo method is from SmartScript. + popInfoList = self.getGridInfo('Fcst', 'PoP12hr', 'SFC', timeRange) + # Now, it's possible there were gaps in the PoP12hr grids and the + # new time range spans a gap. So, we have to check for grid + # existence again. + if popInfoList == []: + message = self._getMsg( + 'noGrids', element='PoP12hr', timeRange=timeRange, + method='PoP12hr/QPF6hr') + self.statusBarMsg(message, self.__getMsgSeverity('U')) + return + for popIndex, popGrid in enumerate(self.getGrids( + 'Fcst', 'PoP12hr', 'SFC', timeRange, mode='List', cache=0)): + gridTR = popInfoList[popIndex].gridTime() + qpfInfoList = self.getGridInfo('Fcst', 'QPF6hr', 'SFC', gridTR) + # Check for existence of QPF6hr grids in the time range. + if qpfInfoList == []: + message = self._getMsg( + 'noGrids', element='QPF6hr', timeRange=gridTR, + method='PoP12hr/QPF6hr') + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # Check the PoP12hr time constraints + if not self.__checkTC('PoP12hr', gridTR): + message = self._getMsg( + 'badTC', element='PoP12hr', timeRange=gridTR) + self.statusBarMsg(message, self.__getMsgSeverity('U')) + continue + # The greater_equal method is from Numeric. + pop50Mask = greater_equal(popGrid, 50) + # Initialize a totally inconsistent grid. + inconsistGrid = ones(pop50Mask.shape, int) + for qpfIndex, qpfGrid in enumerate(self.getGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, mode='List', cache=0)): + qpfGridTR = qpfInfoList[qpfIndex].gridTime() + # Check the QPF6hr time contraints + if not self.__checkTC('QPF6hr', qpfGridTR): + message = self._getMsg( + 'badTC', element='QPF6hr', timeRange=qpfGridTR) + self.statusBarMsg(message, self.__getMsgSeverity('U')) + # If one of the QPF6hr grids has a bad time constraint, + # then I only have one QPF6hr grid. Time to break. + break + # around is from SmartScript + qpfGrid = around(qpfGrid, 2) + # The less method is from Numeric. + qpfMask = less(qpfGrid, 0.01 - qpfTol) + # The following is the "truth" table for the logical + # comparison. + # PoP12hr >= 50, 1; PoP12hr < 50, 0 + # QPF6hr < 0.01, 1; QPF6hr >= 0.01, 0 + # PoP12hr >= 50 (1) and QPF6hr < 0.01 (1) = 1 (Bad result) + # PoP12hr >= 50 (1) and QPF6hr >= 0.01 (0) = 0 (Good result) + # PoP12hr < 50 (0) and QPF6hr < 0.01 (1) = 0 (Good result) + # PoP12hr < 50 (0) and QPF6hr >= 0.01 (0) = 0 (Good result) + # logical_and is from Numeric + # The logical_and, where, sometrue, and ravel methods are all + # from Numeric. + consistMask = logical_and(pop50Mask, qpfMask) + consistMask[logical_not(self.cwaMask)] = 0 + # Update the inconsistentGrid with the state of the + # inconsistencies. + inconsistGrid = logical_and(inconsistGrid, consistMask) + # ravel and sometrue are from Numeric + if not sometrue(ravel(inconsistGrid)): + # There were no longer any inconsistencies between the + # QPF6hr grids and PoP12hr grid. + break + else: + # This else block will only execute if the for loop exits + # "naturally", i.e., the above break statement didn't execute. + # This means there were inconsistencies. + # The highlightGrids method is from SmartScript. +## lin_index = nonzero(ravel(inconsistGrid)) +## sh = list(shape(inconsistGrid)) +## sh.reverse() +## new_index = zeros((len(lin_index), len(sh))) +## mod = zeros(len(lin_index)) +## for j in arange(len(lin_index)): +## count = len(sh) +## for i in sh: +## lin_index[j], mod[j] = divmod(lin_index[j], i) +## count = count - 1 +## new_index[j, count] = mod[j] +## print new_index +## print popGrid[0,0], qpfGrid[0,0] + if inconGridColor: + self.highlightGrids( + 'Fcst', 'PoP12hr', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'QPF6hr', 'SFC', gridTR, inconGridColor) + self.createGrid( + 'Fcst', 'PoP12hrQPF6hr', 'SCALAR', inconsistGrid, gridTR, + descriptiveName='PoP12hrQPF6hrInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'PoP12hrQPF6hr', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + return + + def _makeLockMsgs( + self, lockMe1, lockMe2, lockOther1, lockOther2, element1, element2, + lockMeKey1, lockMeKey2, lockOtherKey1, lockOtherKey2, method): + # As I went through the five check methods, I noted that this code + # was basically being repeated over and over again. So, here's a + # case where I took the repeated code, made it abstract, and turned + # it into a callable method. Now where each of the five methods had + # about 20 lines of code to do this, they now have only 5, which is + # a fairly substantial decrease. The trade-off is in somewhat + # lessened code readability because of having to jump from method + # to method to track the code. + # Below, I assign the call to _getMsg to a temporary variable + # called message for readability. Embedding the call to _getMsg + # in the call to statusBarMsg makes those calls much harder to + # follow. + # The statusBarMsg method is from SmartScript. + if lockMe1: + message = self._getMsg( + lockMeKey1, method=method, element=element1) + self.statusBarMsg( + message, self.__getMsgSeverity('U')) + if lockMe2: + message = self._getMsg( + lockMeKey2, method=method, element=element2) + self.statusBarMsg( + message, self.__getMsgSeverity('U')) + if lockOther1: + message = self._getMsg( + lockOtherKey1, method=method, element=element1) + self.statusBarMsg( + message, self.__getMsgSeverity('U')) + if lockOther2: + message = self._getMsg( + lockOtherKey2, method=method, element=element2) + self.statusBarMsg( + message, self.__getMsgSeverity('U')) + return + + def _msgDict(self): + # Since I seem to be incapable of writing concise error messages, + # I decided to capture the error messages in a separate method. + # Because I tend to be verbose, I like to use triple quoted strings + # for messages since this helps minimize (not eliminate) characters + # which extend beyond the right margin of the editor window. But triple + # quoted strings disrupt the indentation patterns of the code, making + # the code harder to read. By capturing all the triple quoted strings + # in a separate method, the indentation issue is mitigated. Doing this + # does add some complexity to the code, but it's a fair trade-off in my + # mind. This method is just a dictionary of all the error messages, + # which may contain string formatting code place holders. Another + # method, _getMsg, will look up the message boiler plates here and have + # the logic to correctly pass the needed variables to the string + # formatting codes. + return { +'complete': 'NDFD_QPF_Checks complete.', +'0_240': +'''The NDFD_QPF_Checks procedure did not run over the 0 to 240 hour time period, +it ran over %s. This may be what you desired.''', +'cwaMask': +'''The procedure was not able to use the CWA edit area, %s, +provided in the configuration. You should inform the person responsible for procedures of +this problem. The procedure ran over the whole domain.''', +'incon': 'NDFD_QPF_Checks complete. Inconsistencies found!', +'snowLockMe': +'''You have the %s grid locked. Please save the %s grid. The %s +check was not run.''', +'qpfLockMe': +'''You have the %s grid locked. Please save the %s grid. The %s check was +not run.''', +'snowLockOther': +'''Another user has the %s grid locked. Please have that user save the %s grid. The +%s check was not run.''', +'qpfLockOther': +'''Another user has the %s grid locked. Please have that user save the %s grid. The +%s check was not run.''', +'wxLockMe': +'''You have the %s grid locked. Please save the %s grid. The %s check was not run.''', +'wxLockOther': +'''Another user has the %s grid locked. Please have that user save the %s grid. The %s +check was not run.''', +'popLockMe': +'''You have the %s grid locked. Please save the %s grid. The %s check was +not run.''', +'popLockOther': +'''Another user has the %s grid locked. Please have that user save the %s grid. The +%s check was not run.''', +'noGrids': +'''There are no %s grids in the time range, %s. +The %s Check skipped the time range.''', +'changeTR': +'''The time range of the %s check was changed to ensure the PoP12hr grid is not checked +beyond the time of the last QPF6hr grid. The time range used was %s.''', +'badTC': +'''A %s grid has the following time range: %s, +which does not adhere to the time constraint requirement. This %s grid has not been consistency checked at all. +Please fix the grid and re-run the procedure.''', +} + + def _getMsg(self, key, timeRange=None, method=None, element=None): + # This method looks up the needed error message by passing key to + # _msgDict. The other parameters, if provided, are used to expand + # the embedded string formatting codes. The resulting message is + # passed back to the caller. + message = self._msgDict().get(key, '') + if key == '0_240': + message = message % str(timeRange) + return message + if key == 'cwaMask': + message = message % cwaEditArea + return message + if key == 'incon': + if inconGridColor and tempGridColor: + message = '%s Inconsistent grids highlighted %s.\nTemporary grids highlighted %s.' % (message, inconGridColor, tempGridColor) + return message + if inconGridColor: + message = '%s Inconsistent grids highlighted %s.' % ( + message, inconGridColor) + return message + if tempGridColor: + message = '%s Temporary grids highlighted %s.' % ( + message, tempGridColor) + return message + return message + if key == 'snowLockMe' or key == 'qpfLockMe' or key == 'wxLockMe' or \ + key == 'snowLockOther' or key == 'qpfLockOther' or \ + key == 'wxLockOther' or key == 'popLockMe' or \ + key == 'popLockOther': + message = message % (element, element, method) + return message + if key == 'noGrids': + message = message % (element, str(timeRange), method) + return message + if key == 'changeTR': + message = message % (method, str(timeRange)) + return message + if key == 'badTC': + message = message % (element, str(timeRange), element) + return message + # If for some reason the key look-up failed, then the message + # variable will be the empty string, '', and this will be returned. + # Since the calling method expects a string to be returned, I must + # ensure that this happens. + return message + + def execute(self, timeRange, varDict): + # Are we in quiet mode? The variableList above does NOT have a + # variable for 'Quiet', by design. When run interactively, some error + # conditions will generate pop-up messages. If an office decides to + # run this procedure as part of an over-arching check procedure, they + # can choose to turn the pop-up messages into routine messages by + # passing in a varDict with a 'Quiet' key that evaluates to 'True'. + # In Python, you can use the 'get' method on a dictionary to test + # for the existence of a key. If the key exists, then 'get' returns + # the value. If the key doesn't exist, then the second argument of + # the 'get' call is returned. If you don't provide a second argument + # to the 'get' call and the key is not found, then None is returned. + # As you can see below, my call to 'get' will return False if the key + # 'Quiet' is not in varDict. The 'get' method allows you to avoid + # constructs like: + # try: + # self._quiet = varDict['Quiet'] + # except KeyError: + # self._quiet = False + # I don't know for sure, but I'd be willing to bet that the 'get' + # method is just a wrapper to the 'try/except' construct very + # similar to the one demonstrated. + self._quiet = varDict.get('Quiet', False) +## self._quiet = True + + # Make sure the configuration values are the correct types. + self.__checkConfigValueTypes() + # createTimeRange is from SmartScript + timeRange0_240 = self.createTimeRange(0, 240, 'Zulu') + checkCleanup = varDict.get('Check_Cleanup', 'Check') + self.__cleanup(timeRange0_240) + if checkCleanup == 'Cleanup': + self.statusBarMsg(self._getMsg('complete'), 'R') + self.cancel() + elementList = ( + 'PoP', 'QPF', 'Wx', 'SnowAmt', 'QPF6hr', 'SnowAmt6hr', 'PoP12hr') + for element in elementList: + self.loadParm('Fcst', element, 'SFC') + if timeRange.endTime().unixTime() - timeRange.startTime().unixTime() < \ + 3600: # No time range selected, use 0 to 240 hour range + timeRange = timeRange0_240 + + # If the user has a time range swept out, send an informational + # message. + if (timeRange.startTime().unixTime() != \ + timeRange0_240.startTime().unixTime()) or \ + (timeRange.endTime().unixTime() != \ + timeRange0_240.endTime().unixTime()) or \ + (timeRange.duration() != timeRange0_240.duration()): + # What the incredibly dense expression in the above if statement + # does is compare the start and end of the time range to the start + # and end of the 0-240 hour time range. If either are different, + # then a time range was swept out. + self.statusBarMsg( + self._getMsg( + '0_240', timeRange=timeRange), self.__getMsgSeverity('S')) + + # I'll need to know the unix time of 00Z so I can determine the + # start time of temporary grids later. I'll need this in more than + # one of the methods called later, so this will become an instance + # variable, i.e., prefixed with "self." I also need an instance + # variable that flags whether or not there were inconsistent grids. + self.timeRange0_1 = self.createTimeRange(0, 1, 'Zulu') + self.inconsistent = False + + # A CWA edit area can be provided in the configuration section. + # Attempt to encode that edit area as a Numeric Python mask so that + # the later checks are limited to the edit area. The GFE is not very + # friendly if the encoding fails. The GFE will send a nasty message + # to the user, but continue executing the procedure. No trappable + # error is thrown. As of this writing, the GFE appears to create an + # array of shape (0, 0) if the encoding cannot be done, so I will + # check for that and, if I find it, then set the edit area to the + # domain. + # encodeEditArea comes from SmartScript. For the points that are in + # the edit area, a value of one is assigned. Otherwise, a value of + # zero is assigned. + if cwaEditArea: + self.cwaMask = self.encodeEditArea(cwaEditArea) + if self.cwaMask.shape == (0, 0): + # Use the getGridInfo command to get information about the + # SnowAmt grid. From this, the grid size can be extracted. I + # could use getGridInfo on any valid GFE grid. + # getGridInfo is from SmartScript + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) + # I painfully discovered that the array shape is (y, x) + gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, + snowAmtInfoList[0].gridLocation().gridSize().x) + # ones is from Numeric. It creates an array of the given size + # and data type where all values are one. + self.cwaMask = ones(gridSize, int) + self.statusBarMsg( + self._getMsg('cwaMask'), self.__getMsgSeverity('S')) + else: + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt6hr', 'SFC', timeRange) + gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, + snowAmtInfoList[0].gridLocation().gridSize().x) + self.cwaMask = ones(gridSize, int) + + # Based on the user's input, run the appropriate checks. + # By making each of these options a checkbox with only one option in + # the VariableList above, if an option is unchecked then an empty + # list, [], will be what's in varDict. If an option is checked then a + # list with the value "Yes", ["Yes"], will be what's in varDict. In + # Python, a conditional expression can be whether or not a data + # structure is empty. In these cases, an empty data structure, + # e.g., an empty list, an empty tuple, an empty dictionary, + # conditionally test to False while non empty data structures + # conditionally test to True. In the if statements below, every varDict + # lookup returns a list: either [] or ["Yes"]. I think the constructs + # below or more elegant and easier to understand. + if varDict['Run SnowAmt6hr/QPF6hr Check?']: + # Call the SnowAmt6hr/QPF6hr check method + self._runSnowAmt6hrQPF6hrCheck(timeRange) + if varDict['Run SnowAmt6hr/Wx Check?']: + # Call the SnowAmt6hr/Wx check method + self._runSnowAmt6hrWxCheck(timeRange) + if varDict['Run QPF6hr/PoP Check?']: + # Call the QPF6hr/PoP check method + self._runQPF6hrPoPCheck(timeRange) + if varDict['Run QPF6hr/Wx Check?']: + # Call the QPF6hr/Wx check method + self._runQPF6hrWxCheck(timeRange) + if varDict['Run PoP12hr/QPF6hr Check?']: + # Call the PoP12hr/QPF6hr check method + self._runPoP12hrQPF6hrCheck(timeRange) + if self.inconsistent: + self.statusBarMsg(self._getMsg('incon'), self.__getMsgSeverity('S')) + else: + self.statusBarMsg(self._getMsg('complete'), 'R') diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFDgridCheck.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFDgridCheck.py index ad01f1e26e..74f5fc8737 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFDgridCheck.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/NDFDgridCheck.py @@ -1,493 +1,493 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -#---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# -# ER_NDFD_GridCheck # # Author: -# Chris Gibson NWSFO SLC 10/03 -# Updated for ER : Jim Hayes 7/03 -# Updated : Tom Mazza 7/04 -# -handles new NDFD times -# -includes configuration section -# -added logic to look for summer or winter grids as appropriate -# Developer email Chris.Gibson@noaa.gov, Thomas.Mazza@noaa.gov. -# -# Checks for grids in the Fcst database for each time required in the -# NDFD matrix. Specifies "groups" of weather elements for Public, Fire Weather, etc. -# Uses 00Z as base time for timerange (TR). Thus, no dependence on new model -# data for TR. -# -# 00Z to 12Z grids are needed for case of evening -# update after 00z. Elements and times are configured in dictionaries -# below. Uses getGridInfo to test if grid exists. -# ---------------------------------------------------------------------------- -# Known version issues: None Tested in GFE RPP 17/18/19 (Does NOT -# work in RPP18.5) -# Works in IFPS15; model name changes to be made for IFPS16 -# edit areas needed: None -# Smart tools needed: None -# Further work: Needs a getTime function to create time ranges for each -# element and depending on the time of day (issuance). -# Potentially this could eliminate the need for -# different tables for day and night shifts. -# - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -################################################################################################ -# -# Configuration Section -# -# Here is where to control whether this procedure shows up in GFE and, if so, under which menu; -# control the weather element groups available, i.e., remove "Marine" if in an inland office; -# and to control the start of the day shift, and the start and end of winter and summer to -# determine which public weather elements to check for. -# -availableElementGroups = ["Public", "Fire Weather", "Marine"] -# -# Marine can be eliminated for inland sites. It could be replaced with another group, but -# the elements and valid time info in the def marine_elements_dict_DayShift and _MidShift -# modules below need edited to include the appropriate infoprmation. -# -################################################################################# -# -# Control whether this procedure shows up in GFE and, if so, under which menu: -# -##MenuItems = ["Consistency"] -MenuItems = [""] -# -# -################################################################################# -# -# -# Control the weather element groups available: -# -availableElementGroups = [] -availableElementGroups.append(("Public")) -availableElementGroups.append(("Fire Weather")) -#availableElementGroups.append(("Marine")) ## also uncomment the marine section near the bottom - -VariableList = [] -VariableList.append(("Elements List for Grid Completeness Check" , "", "label")) -VariableList.append(("Which element group(s)?" , - ["Public"], "check", - availableElementGroups)) - -# -# -################################################################################# -# -# -# Control the start of the day shift: -# (Mid Shift hard-coded to begin at 00Z) -# -# -startDayShift = 15 # (15Z) -# -# -################################################################################# -# -# -# Control the start and end of winter and summer: -# (Seasons can overlap or be disjoint - e.g., entries of 10, 5, 5 and 10 below, -# respectively, mean May and October are part of both winter and summer, which, -# in turn, means that both winter-specific and summer-specific elements will -# be checked for.) -# -# -StartWinter = 10 # (October is first month of Winter) -EndWinter = 5 # (May is last month of Winter) -StartSummer = 5 # (May is first month of Summer) -EndSummer = 10 # (October is last month of Summer) -# -# End Configuration Section -# -################################################################################################ - - -import SmartScript, string, time -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def public_elements_dict_DayShift_Both(self): ## used 15Z-2359Z - return [ - ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. - ("MaxT", 36, 24, 191), - ("MinT", 24, 24, 180), - ("T", 24, 3, 192), - ("Td", 24, 3, 192), - ("HeatIndex", 24, 3, 96), - ("WindChill", 24, 3, 96), - ("RH", 24, 3, 192), - ("PoP", 24, 12, 192), - ("PoP12hr", 24, 12, 192), - ("Sky", 24, 3, 192), - ("Wind", 24, 3, 192), - ("WindGust", 24, 3, 144), - ("Wx", 24, 3, 192), - ("QPF", 24, 6, 95), - ("QPF6hr", 24, 6, 95), - ("SnowAmt", 24, 6, 71), - ("SnowAmt6hr", 24, 6, 71), - ] - - def public_elements_dict_MidShift_Both(self): ## used 00Z-1459Z - return [ - ("MaxT", 12, 24, 167), - ("MinT", 24, 24, 156), - ("T", 12, 3, 168), - ("Td", 12, 3, 168), - ("HeatIndex", 12, 3, 72), - ("WindChill", 12, 3, 72), - ("RH", 12, 3, 168), - ("PoP", 12, 12, 168), - ("PoP12hr", 12, 12, 168), - ("Sky", 12, 3, 168), - ("Wind", 12, 3,168), - ("WindGust", 12, 3, 132), - ("Wx", 12,3, 168), - ("QPF", 12, 6, 71), - ("QPF6hr", 12, 6, 71), - ("SnowAmt", 12, 6, 47), - ("SnowAmt6hr", 12, 6, 47) - ] - - def public_elements_dict_DayShift_Winter(self): ## used 15Z-2359Z - return [ - ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. - ("MaxT", 36, 24, 191), - ("MinT", 24, 24, 180), - ("T", 24, 3, 192), - ("Td", 24, 3, 192), - ("WindChill", 24, 3, 96), - ("RH", 24, 3, 192), - ("PoP", 24, 12, 192), - ("PoP12hr", 24, 12, 192), - ("Sky", 24, 3, 192), - ("Wind", 24, 3, 192), - ("WindGust", 24, 3, 144), - ("Wx", 24, 3, 192), - ("QPF", 24, 6, 95), - ("QPF6hr", 24, 6, 95), - ("SnowAmt", 24, 6, 71), - ("SnowAmt6hr", 24, 6, 71), - ] - - def public_elements_dict_MidShift_Winter(self): ## used 00Z-1459Z - return [ - ("MaxT", 12, 24, 167), - ("MinT", 24, 24, 156), - ("T", 12, 3, 168), - ("Td", 12, 3, 168), - ("WindChill", 12, 3, 72), - ("RH", 12, 3, 168), - ("PoP", 12, 12, 168), - ("PoP12hr", 12, 12, 168), - ("Sky", 12, 3, 168), - ("Wind", 12, 3,168), - ("WindGust", 12, 3, 132), - ("Wx", 12,3, 168), - ("QPF", 12, 6, 71), - ("QPF6hr", 12, 6, 71), - ("SnowAmt", 12, 6, 47), - ("SnowAmt6hr", 12, 6, 47) - ] - - def public_elements_dict_DayShift_Summer(self): ## used 15Z-2359Z - return [ - ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. - ("MaxT", 36, 24, 191), - ("MinT", 24, 24, 180), - ("T", 24, 3, 192), - ("Td", 24, 3, 192), - ("HeatIndex", 24, 3, 96), - ("RH", 24, 3, 192), - ("PoP", 24, 12, 192), - ("PoP12hr", 24, 12, 192), - ("Sky", 24, 3, 192), - ("Wind", 24, 3, 192), - ("WindGust", 24, 3, 144), - ("Wx", 24, 3, 192), - ("QPF", 24, 6, 95), - ("QPF6hr", 24, 6, 95), - ] - - def public_elements_dict_MidShift_Summer(self): ## used 00Z-1459Z - return [ - ("MaxT", 12, 24, 167), - ("MinT", 24, 24, 156), - ("T", 12, 3, 168), - ("Td", 12, 3, 168), - ("HeatIndex", 12, 3, 72), - ("RH", 12, 3, 168), - ("PoP", 12, 12, 168), - ("PoP12hr", 12, 12, 168), - ("Sky", 12, 3, 168), - ("Wind", 12, 3,168), - ("WindGust", 12, 3, 132), - ("Wx", 12,3, 168), - ("QPF", 12, 6, 71), - ("QPF6hr", 12, 6, 71), - ] - - def public_elements_dict_DayShift_None(self): ## used 15Z-2359Z - return [ - ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. - ("MaxT", 36, 24, 191), - ("MinT", 24, 24, 180), - ("T", 24, 3, 192), - ("Td", 24, 3, 192), - ("RH", 24, 3, 192), - ("PoP", 24, 12, 192), - ("PoP12hr", 24, 12, 192), - ("Sky", 24, 3, 192), - ("Wind", 24, 3, 192), - ("WindGust", 24, 3, 144), - ("Wx", 24, 3, 192), - ("QPF", 24, 6, 95), - ("QPF6hr", 24, 6, 95), - ] - - def public_elements_dict_MidShift_None(self): ## used 00Z-1459Z - return [ - ("MaxT", 12, 24, 167), - ("MinT", 24, 24, 156), - ("T", 12, 3, 168), - ("Td", 12, 3, 168), - ("RH", 12, 3, 168), - ("PoP", 12, 12, 168), - ("PoP12hr", 12, 12, 168), - ("Sky", 12, 3, 168), - ("Wind", 12, 3,168), - ("WindGust", 12, 3, 132), - ("Wx", 12,3, 168), - ("QPF", 12, 6, 71), - ("QPF6hr", 12, 6, 71), - ] - - def marine_elements_dict_DayShift(self): - return [ - ("WaveHeight", 24, 12, 144), - ("Vsby", 24, 12, 144), - ] - - def marine_elements_dict_MidShift(self): - return [ - ("WaveHeight", 12, 12, 120), - ("Vsby", 12, 12, 120), - ] - - def fwx_elements_dict_DayShift(self): - return [ - ("MinRH", 20, 24, 72), - ("MaxRH", 32, 24, 60), - ("LAL", 18, 6, 72), - ("MixHgt", 24, 12, 72), - ("TransWind", 24, 12, 72), - ("Haines", 18, 6, 72), - ] - - def fwx_elements_dict_MidShift(self): - return [ - ("MinRH", 8, 24, 60), - ("MaxRH", 20, 24, 48), - ("LAL", 6, 6, 60), - ("MixHgt", 12, 12, 60), - ("TransWind", 12, 12, 60), - ("Haines", 6, 6, 60), -] - - def execute(self, editArea, timeRange, varDict): - missingFlag = 99 ## if stays 99 then no valid list selected. - - ## Figure out if day or mid shift ZFP package. - time1 = time.gmtime() - (year, month, day, h, m, s, w, day, dst) = time1 - ## print "ZHour: ", h - ## Adjust hours as needed in Zulu. - if h > startDayShift - 1: # day shift - Day = 1 - else: - Day = 0 -# print month, StartWinter, EndSummer - Season = "None" - - ########################################################## - # - # Check for summer and not winter - # - if month > StartSummer - 1 and month > EndWinter: - if month < EndSummer + 1 and month < StartWinter: - Season = "Summer" - # - # - ########################################################## - # - # Check for winter and not summer - # - if Season == "None": - if month < EndWinter + 1 or month > StartWinter - 1: - print month, StartWinter, EndSummer - if month < StartSummer or month > EndSummer: - Season = "Winter" - # - # - ########################################################## - # - # Check for winter and summer - seasons overlap - # - if Season == "None": - if month > StartSummer - 1 and month < EndSummer + 1: - if month < EndWinter + 1 or month > StartWinter - 1: - Season = "Both" - # - # - ########################################################## - # - # Otherwise not winter and not summer - seasons disjoint - # - Season left as "None". - # - print Season - - - ## print "Day: ", Day - ## print "month: ", month - - print varDict["Which element group(s)?"] - - elementGroups = varDict["Which element group(s)?"] - -## for elementList in varDict["Which element group(s)?"]: -## missingFlag = 0 - for elementGroup in elementGroups: - # loop through element groups - missingFlag = 0 - - if elementGroup == "Public": - ## public elements - ## Check for shift time here and use appropriate dictionary. - if Day == 1: - if Season == "Summer": - element_list = self.public_elements_dict_DayShift_Summer() - elif Season == "Winter": - element_list = self.public_elements_dict_DayShift_Winter() - elif Season == "Both": - element_list = self.public_elements_dict_DayShift_Both() - else: - element_list = self.public_elements_dict_DayShift_None() - endInterval3 = 84 # end of 3 hourly requirement (see below) - else: - if Season == "Summer": - element_list = self.public_elements_dict_MidShift_Summer() - elif Season == "Winter": - element_list = self.public_elements_dict_MidShift_Winter() - elif Season == "Both": - element_list = self.public_elements_dict_MidShift_Both() - else: - element_list = self.public_elements_dict_MidShift_None() - endInterval3 = 60 # end of 3 hourly requirement (see below) - - print "element_list = ", element_list - for x in xrange(len(element_list)): - # loop through elements - element, StartTime, Interval, FinalTime = element_list[x] - Grid = StartTime - print x, element_list[x], Grid, FinalTime - while (Grid <= FinalTime): - - ## End of 3 hourly requirement: - ## Hourly public elements required every 3 hours in the short term - ## are only required every 6 hours after 72 hours (84 hours from 00Z). - if Interval < 6 and Grid >= endInterval3: - Interval = 6 - - ## get timeRange based on 00z today - ## TR of hour to+1 hour returns 00Z-01Z, 03Z-04Z, etc. This matches - ## NDFD which shows "snapshots" of grids at these times, as opposed - ## to snaphots of 23-00Z, 02-03Z, etc. Satisfies NDFD requirements - ## as of the spring of 2004, when their code was changed from the old - ## NDFD, which showed "snapshots" of grids at 23-00Z, 02-03Z, etc. - ## Either way allows dictionaries to remain 3,6,9,12, etc. - ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") - timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") - gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) - if gridInfo == []: - missingFlag = 1 - message = "Missing " + element + " grid for " + str(timeRange) - self.statusBarMsg(message ,"S") - Grid += Interval - - elif elementGroup == "Fire Weather": - ## fwx elements - if Day == 1: - element_list = self.fwx_elements_dict_DayShift() - else: - element_list = self.fwx_elements_dict_MidShift() - - for x in xrange(len(element_list)): - # loop through elements - element, StartTime, Interval, FinalTime = element_list[x] - Grid = StartTime - while (Grid <= FinalTime): - ## get timeRange based on 00z today - ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") - timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") - gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) - if gridInfo == []: - missingFlag = 1 - message = "Missing " + element + " grid for " + str(timeRange) - self.statusBarMsg(message ,"S") - Grid += Interval - - ## This section for Marine -## elif elementGroup == "Marine": -## if Day == 1: -## element_list = self.marine_elements_dict_DayShift() -## else: -## element_list = self.marine_elements_dict_MidShift() -## -## for x in xrange(len(element_list)): -## # loop through elements -## element, StartTime, Interval, FinalTime = element_list[x] -## Grid = StartTime -## while (Grid <= FinalTime): -## ## get timeRange based on 00z today -## ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") -## timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") -## gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) -## if gridInfo == []: -## missingFlag = 1 -## message = "Missing " + element + " grid for " + str(timeRange) -## self.statusBarMsg(message ,"S") -## Grid = Grid + Interval - - if missingFlag == 0: - self.statusBarMsg("-- All necessary grids found. --","R") - else: - self.statusBarMsg("-- Missing grids listed below !! --","S") - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +#---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# +# ER_NDFD_GridCheck # # Author: +# Chris Gibson NWSFO SLC 10/03 +# Updated for ER : Jim Hayes 7/03 +# Updated : Tom Mazza 7/04 +# -handles new NDFD times +# -includes configuration section +# -added logic to look for summer or winter grids as appropriate +# Developer email Chris.Gibson@noaa.gov, Thomas.Mazza@noaa.gov. +# +# Checks for grids in the Fcst database for each time required in the +# NDFD matrix. Specifies "groups" of weather elements for Public, Fire Weather, etc. +# Uses 00Z as base time for timerange (TR). Thus, no dependence on new model +# data for TR. +# +# 00Z to 12Z grids are needed for case of evening +# update after 00z. Elements and times are configured in dictionaries +# below. Uses getGridInfo to test if grid exists. +# ---------------------------------------------------------------------------- +# Known version issues: None Tested in GFE RPP 17/18/19 (Does NOT +# work in RPP18.5) +# Works in IFPS15; model name changes to be made for IFPS16 +# edit areas needed: None +# Smart tools needed: None +# Further work: Needs a getTime function to create time ranges for each +# element and depending on the time of day (issuance). +# Potentially this could eliminate the need for +# different tables for day and night shifts. +# + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +################################################################################################ +# +# Configuration Section +# +# Here is where to control whether this procedure shows up in GFE and, if so, under which menu; +# control the weather element groups available, i.e., remove "Marine" if in an inland office; +# and to control the start of the day shift, and the start and end of winter and summer to +# determine which public weather elements to check for. +# +availableElementGroups = ["Public", "Fire Weather", "Marine"] +# +# Marine can be eliminated for inland sites. It could be replaced with another group, but +# the elements and valid time info in the def marine_elements_dict_DayShift and _MidShift +# modules below need edited to include the appropriate infoprmation. +# +################################################################################# +# +# Control whether this procedure shows up in GFE and, if so, under which menu: +# +##MenuItems = ["Consistency"] +MenuItems = [""] +# +# +################################################################################# +# +# +# Control the weather element groups available: +# +availableElementGroups = [] +availableElementGroups.append(("Public")) +availableElementGroups.append(("Fire Weather")) +#availableElementGroups.append(("Marine")) ## also uncomment the marine section near the bottom + +VariableList = [] +VariableList.append(("Elements List for Grid Completeness Check" , "", "label")) +VariableList.append(("Which element group(s)?" , + ["Public"], "check", + availableElementGroups)) + +# +# +################################################################################# +# +# +# Control the start of the day shift: +# (Mid Shift hard-coded to begin at 00Z) +# +# +startDayShift = 15 # (15Z) +# +# +################################################################################# +# +# +# Control the start and end of winter and summer: +# (Seasons can overlap or be disjoint - e.g., entries of 10, 5, 5 and 10 below, +# respectively, mean May and October are part of both winter and summer, which, +# in turn, means that both winter-specific and summer-specific elements will +# be checked for.) +# +# +StartWinter = 10 # (October is first month of Winter) +EndWinter = 5 # (May is last month of Winter) +StartSummer = 5 # (May is first month of Summer) +EndSummer = 10 # (October is last month of Summer) +# +# End Configuration Section +# +################################################################################################ + + +import SmartScript, string, time +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def public_elements_dict_DayShift_Both(self): ## used 15Z-2359Z + return [ + ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. + ("MaxT", 36, 24, 191), + ("MinT", 24, 24, 180), + ("T", 24, 3, 192), + ("Td", 24, 3, 192), + ("HeatIndex", 24, 3, 96), + ("WindChill", 24, 3, 96), + ("RH", 24, 3, 192), + ("PoP", 24, 12, 192), + ("PoP12hr", 24, 12, 192), + ("Sky", 24, 3, 192), + ("Wind", 24, 3, 192), + ("WindGust", 24, 3, 144), + ("Wx", 24, 3, 192), + ("QPF", 24, 6, 95), + ("QPF6hr", 24, 6, 95), + ("SnowAmt", 24, 6, 71), + ("SnowAmt6hr", 24, 6, 71), + ] + + def public_elements_dict_MidShift_Both(self): ## used 00Z-1459Z + return [ + ("MaxT", 12, 24, 167), + ("MinT", 24, 24, 156), + ("T", 12, 3, 168), + ("Td", 12, 3, 168), + ("HeatIndex", 12, 3, 72), + ("WindChill", 12, 3, 72), + ("RH", 12, 3, 168), + ("PoP", 12, 12, 168), + ("PoP12hr", 12, 12, 168), + ("Sky", 12, 3, 168), + ("Wind", 12, 3,168), + ("WindGust", 12, 3, 132), + ("Wx", 12,3, 168), + ("QPF", 12, 6, 71), + ("QPF6hr", 12, 6, 71), + ("SnowAmt", 12, 6, 47), + ("SnowAmt6hr", 12, 6, 47) + ] + + def public_elements_dict_DayShift_Winter(self): ## used 15Z-2359Z + return [ + ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. + ("MaxT", 36, 24, 191), + ("MinT", 24, 24, 180), + ("T", 24, 3, 192), + ("Td", 24, 3, 192), + ("WindChill", 24, 3, 96), + ("RH", 24, 3, 192), + ("PoP", 24, 12, 192), + ("PoP12hr", 24, 12, 192), + ("Sky", 24, 3, 192), + ("Wind", 24, 3, 192), + ("WindGust", 24, 3, 144), + ("Wx", 24, 3, 192), + ("QPF", 24, 6, 95), + ("QPF6hr", 24, 6, 95), + ("SnowAmt", 24, 6, 71), + ("SnowAmt6hr", 24, 6, 71), + ] + + def public_elements_dict_MidShift_Winter(self): ## used 00Z-1459Z + return [ + ("MaxT", 12, 24, 167), + ("MinT", 24, 24, 156), + ("T", 12, 3, 168), + ("Td", 12, 3, 168), + ("WindChill", 12, 3, 72), + ("RH", 12, 3, 168), + ("PoP", 12, 12, 168), + ("PoP12hr", 12, 12, 168), + ("Sky", 12, 3, 168), + ("Wind", 12, 3,168), + ("WindGust", 12, 3, 132), + ("Wx", 12,3, 168), + ("QPF", 12, 6, 71), + ("QPF6hr", 12, 6, 71), + ("SnowAmt", 12, 6, 47), + ("SnowAmt6hr", 12, 6, 47) + ] + + def public_elements_dict_DayShift_Summer(self): ## used 15Z-2359Z + return [ + ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. + ("MaxT", 36, 24, 191), + ("MinT", 24, 24, 180), + ("T", 24, 3, 192), + ("Td", 24, 3, 192), + ("HeatIndex", 24, 3, 96), + ("RH", 24, 3, 192), + ("PoP", 24, 12, 192), + ("PoP12hr", 24, 12, 192), + ("Sky", 24, 3, 192), + ("Wind", 24, 3, 192), + ("WindGust", 24, 3, 144), + ("Wx", 24, 3, 192), + ("QPF", 24, 6, 95), + ("QPF6hr", 24, 6, 95), + ] + + def public_elements_dict_MidShift_Summer(self): ## used 00Z-1459Z + return [ + ("MaxT", 12, 24, 167), + ("MinT", 24, 24, 156), + ("T", 12, 3, 168), + ("Td", 12, 3, 168), + ("HeatIndex", 12, 3, 72), + ("RH", 12, 3, 168), + ("PoP", 12, 12, 168), + ("PoP12hr", 12, 12, 168), + ("Sky", 12, 3, 168), + ("Wind", 12, 3,168), + ("WindGust", 12, 3, 132), + ("Wx", 12,3, 168), + ("QPF", 12, 6, 71), + ("QPF6hr", 12, 6, 71), + ] + + def public_elements_dict_DayShift_None(self): ## used 15Z-2359Z + return [ + ## Format ("WeatherElement", Startgrid, interval, Finalgrid), Start/Final based on 00z. + ("MaxT", 36, 24, 191), + ("MinT", 24, 24, 180), + ("T", 24, 3, 192), + ("Td", 24, 3, 192), + ("RH", 24, 3, 192), + ("PoP", 24, 12, 192), + ("PoP12hr", 24, 12, 192), + ("Sky", 24, 3, 192), + ("Wind", 24, 3, 192), + ("WindGust", 24, 3, 144), + ("Wx", 24, 3, 192), + ("QPF", 24, 6, 95), + ("QPF6hr", 24, 6, 95), + ] + + def public_elements_dict_MidShift_None(self): ## used 00Z-1459Z + return [ + ("MaxT", 12, 24, 167), + ("MinT", 24, 24, 156), + ("T", 12, 3, 168), + ("Td", 12, 3, 168), + ("RH", 12, 3, 168), + ("PoP", 12, 12, 168), + ("PoP12hr", 12, 12, 168), + ("Sky", 12, 3, 168), + ("Wind", 12, 3,168), + ("WindGust", 12, 3, 132), + ("Wx", 12,3, 168), + ("QPF", 12, 6, 71), + ("QPF6hr", 12, 6, 71), + ] + + def marine_elements_dict_DayShift(self): + return [ + ("WaveHeight", 24, 12, 144), + ("Vsby", 24, 12, 144), + ] + + def marine_elements_dict_MidShift(self): + return [ + ("WaveHeight", 12, 12, 120), + ("Vsby", 12, 12, 120), + ] + + def fwx_elements_dict_DayShift(self): + return [ + ("MinRH", 20, 24, 72), + ("MaxRH", 32, 24, 60), + ("LAL", 18, 6, 72), + ("MixHgt", 24, 12, 72), + ("TransWind", 24, 12, 72), + ("Haines", 18, 6, 72), + ] + + def fwx_elements_dict_MidShift(self): + return [ + ("MinRH", 8, 24, 60), + ("MaxRH", 20, 24, 48), + ("LAL", 6, 6, 60), + ("MixHgt", 12, 12, 60), + ("TransWind", 12, 12, 60), + ("Haines", 6, 6, 60), +] + + def execute(self, editArea, timeRange, varDict): + missingFlag = 99 ## if stays 99 then no valid list selected. + + ## Figure out if day or mid shift ZFP package. + time1 = time.gmtime() + (year, month, day, h, m, s, w, day, dst) = time1 + ## print "ZHour: ", h + ## Adjust hours as needed in Zulu. + if h > startDayShift - 1: # day shift + Day = 1 + else: + Day = 0 +# print month, StartWinter, EndSummer + Season = "None" + + ########################################################## + # + # Check for summer and not winter + # + if month > StartSummer - 1 and month > EndWinter: + if month < EndSummer + 1 and month < StartWinter: + Season = "Summer" + # + # + ########################################################## + # + # Check for winter and not summer + # + if Season == "None": + if month < EndWinter + 1 or month > StartWinter - 1: + print(month, StartWinter, EndSummer) + if month < StartSummer or month > EndSummer: + Season = "Winter" + # + # + ########################################################## + # + # Check for winter and summer - seasons overlap + # + if Season == "None": + if month > StartSummer - 1 and month < EndSummer + 1: + if month < EndWinter + 1 or month > StartWinter - 1: + Season = "Both" + # + # + ########################################################## + # + # Otherwise not winter and not summer - seasons disjoint + # - Season left as "None". + # + print(Season) + + + ## print "Day: ", Day + ## print "month: ", month + + print(varDict["Which element group(s)?"]) + + elementGroups = varDict["Which element group(s)?"] + +## for elementList in varDict["Which element group(s)?"]: +## missingFlag = 0 + for elementGroup in elementGroups: + # loop through element groups + missingFlag = 0 + + if elementGroup == "Public": + ## public elements + ## Check for shift time here and use appropriate dictionary. + if Day == 1: + if Season == "Summer": + element_list = self.public_elements_dict_DayShift_Summer() + elif Season == "Winter": + element_list = self.public_elements_dict_DayShift_Winter() + elif Season == "Both": + element_list = self.public_elements_dict_DayShift_Both() + else: + element_list = self.public_elements_dict_DayShift_None() + endInterval3 = 84 # end of 3 hourly requirement (see below) + else: + if Season == "Summer": + element_list = self.public_elements_dict_MidShift_Summer() + elif Season == "Winter": + element_list = self.public_elements_dict_MidShift_Winter() + elif Season == "Both": + element_list = self.public_elements_dict_MidShift_Both() + else: + element_list = self.public_elements_dict_MidShift_None() + endInterval3 = 60 # end of 3 hourly requirement (see below) + + print("element_list = ", element_list) + for x in range(len(element_list)): + # loop through elements + element, StartTime, Interval, FinalTime = element_list[x] + Grid = StartTime + print(x, element_list[x], Grid, FinalTime) + while (Grid <= FinalTime): + + ## End of 3 hourly requirement: + ## Hourly public elements required every 3 hours in the short term + ## are only required every 6 hours after 72 hours (84 hours from 00Z). + if Interval < 6 and Grid >= endInterval3: + Interval = 6 + + ## get timeRange based on 00z today + ## TR of hour to+1 hour returns 00Z-01Z, 03Z-04Z, etc. This matches + ## NDFD which shows "snapshots" of grids at these times, as opposed + ## to snaphots of 23-00Z, 02-03Z, etc. Satisfies NDFD requirements + ## as of the spring of 2004, when their code was changed from the old + ## NDFD, which showed "snapshots" of grids at 23-00Z, 02-03Z, etc. + ## Either way allows dictionaries to remain 3,6,9,12, etc. + ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") + timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") + gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) + if gridInfo == []: + missingFlag = 1 + message = "Missing " + element + " grid for " + str(timeRange) + self.statusBarMsg(message ,"S") + Grid += Interval + + elif elementGroup == "Fire Weather": + ## fwx elements + if Day == 1: + element_list = self.fwx_elements_dict_DayShift() + else: + element_list = self.fwx_elements_dict_MidShift() + + for x in range(len(element_list)): + # loop through elements + element, StartTime, Interval, FinalTime = element_list[x] + Grid = StartTime + while (Grid <= FinalTime): + ## get timeRange based on 00z today + ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") + timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") + gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) + if gridInfo == []: + missingFlag = 1 + message = "Missing " + element + " grid for " + str(timeRange) + self.statusBarMsg(message ,"S") + Grid += Interval + + ## This section for Marine +## elif elementGroup == "Marine": +## if Day == 1: +## element_list = self.marine_elements_dict_DayShift() +## else: +## element_list = self.marine_elements_dict_MidShift() +## +## for x in xrange(len(element_list)): +## # loop through elements +## element, StartTime, Interval, FinalTime = element_list[x] +## Grid = StartTime +## while (Grid <= FinalTime): +## ## get timeRange based on 00z today +## ## timeRange = self.createTimeRange(Grid-1, Grid, mode="Zulu") +## timeRange = self.createTimeRange(Grid, Grid+1, mode="Zulu") +## gridInfo = self.getGridInfo("Fcst", element, "SFC", timeRange) +## if gridInfo == []: +## missingFlag = 1 +## message = "Missing " + element + " grid for " + str(timeRange) +## self.statusBarMsg(message ,"S") +## Grid = Grid + Interval + + if missingFlag == 0: + self.statusBarMsg("-- All necessary grids found. --","R") + else: + self.statusBarMsg("-- Missing grids listed below !! --","S") + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PWS_Procedure.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PWS_Procedure.py index bd296bbff5..6b05c7cd7f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PWS_Procedure.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PWS_Procedure.py @@ -1,337 +1,337 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Authors: Tom LeFebvre, Pablo Santos -# Last Modified: Dec 10, 2010 - new equations to process 12 hour incremental wind speed probability grids (PWS(D,N)) from 6 hourly pws34 and pws64 grids. -# March 23, 2011 - Corrected for proper accounting of input inventory (pws34 and pws64) -# July 18, 2017 - Added option to select Preliminary of Official probablistic model source. -Tom -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] - -import SmartScript -import AbsTime -import TimeRange -import string -import time -from numpy import * - -LEVEL = "SFC" - -VariableList = [("Probabilistic Wind Source?", "Official", "radio", ["Official", "Preliminary"]), - ] -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # caluclates maximum value at each grid point - def maxGrid(self, gridList): - - if len(gridList) < 1: - return None - - gridMax = maximum.reduce(gridList) - return gridMax - - def intGrid(self, gridList): - - if len(gridList) < 1: - return None - - gridInt = [] - i=0 - while i < len(gridList): - gridInt.append(gridList[i] - gridList[i]) - i += 1 - - i=1 - while i < len(gridList): - - gridInt[i] = gridList[i] - gridList[i-1] - i += 1 - - return gridInt - - # return a list of modelIDs that match the specified string. - def getModelIDList(self, matchStr): - - availParms = self.availableParms() - - modelList = [] - for pName, level, dbID in availParms: - modelId = dbID.modelIdentifier() - if modelId.find(matchStr) > -1: - if modelId not in modelList: - modelList.append(modelId) - - return modelList - - # Get grid inventory for the specified info - def getWEInventory(self, modelName, WEName, level, timeRange): - weTR = TimeRange.allTimes().toJavaObj() - gridInfo = self.getGridInfo(modelName, WEName, level, weTR) - - trList = [] - for g in gridInfo: - start = g.gridTime().startTime().unixTime() * 1000 - end = g.gridTime().endTime().unixTime() * 1000 - tr = TimeRange.TimeRange(start,end) - if tr.overlaps(timeRange): - trList.append(tr) - - return trList - - # determines the latest pws model currently available - def getLatestPWSModel(self): - - # Use the model source selected from the GUI - modelIDList = self.getModelIDList(self._probWindModelSource) - modelIDList.sort() - - if len(modelIDList) == 0: - self.statusBarMsg("No PWS models found in your inventory.", "S") - return "" - - # the last one is the latest - return modelIDList[-1] - - # Examines the full inventory and computes a timeRange that encompasses - # all grids - def getModelTimeRange(self, modelID, param, weNames): - before = AbsTime.current() - (7 * 24 * 3600) # 7 days ago - later = AbsTime.current() + 8 * 24 * 3600 # 8 days from now - timeRange = TimeRange.TimeRange(before, later) - self.deleteCmd(weNames, timeRange) - gridInv = self.getGridInfo(modelID, param, "FHAG10", timeRange) - if len(gridInv) == 0: - self.statusBarMsg("No grids available for model:" + modelID, "S") - return None - - minTime = later.unixTime() - maxTime = before.unixTime() - for g in gridInv: - start = g.gridTime().startTime().unixTime() - end = g.gridTime().endTime().unixTime() - if start < minTime: - minTime = start - if end > maxTime: - maxTime = end - - - - # adjust the times since this data is cumulative probabilities - #minTime = minTime - (6 * 3600) - #minTime = minTime + (14*3600) - #maxTime = maxTime - 3600 - modelTR = TimeRange.TimeRange(AbsTime.AbsTime(minTime), - AbsTime.AbsTime(maxTime)) - #modelTR_adv = TimeRange.TimeRange(AbsTime.AbsTime(minTime+(3*3600)), AbsTime.AbsTime(maxTime)) - #self.remove(['pwsD34'],minTime - (24*3600),minTime, 'Fcst') - #self.remove(['pwsN34'],minTime - (24*3600),minTime, 'Fcst') - #self.remove(['pwsD64'],minTime - (24*3600),minTime, 'Fcst') - #self.remove(['pwsN64'],minTime - (24*3600),minTime, 'Fcst') - - #print "MODELTR", modelTR, minTime - return modelTR, minTime, maxTime - - # Calculate the PWS grid using the formula: - # - # pwsGrid = pws1 + (prob2 - prob1) - # - # where pws1 and pws2 are the first and second incremental wind speed probability input grids in the PWS(D,N) - # time interval and prob1 and prob2 are the first and second cumulative wind speed probability input grid in the - # PWS(D,N) time interval. There's a little extra code devoted to making sure that - # the grids are in the correct time order, since this is very important in the equation. - - - def makePWSScratch(self, weName, timeRange): - # make the D2D elementName - pwsWE = "pws" + weName[-2:] - probWE = "prob" + weName[-2:] - - #print "pwsWE and probWE are: ", pwsWE, probWE - - # fetch the pws and prob grids - modelName = self.getLatestPWSModel() - - modelLevel = "FHAG10" - - # To make sure that we're fetching model grids in the right temporal - # order, get the inventory and sort them first before fetching. - pwsInv = self.getWEInventory(modelName, pwsWE, modelLevel, timeRange) - probInv = self.getWEInventory(modelName, probWE, modelLevel, timeRange) - count = 0 - - for item in pwsInv: - count+=1 - - if count > 1: - print "pwsInv is GOOD" - else: - print "pwsInv is BAD DELETING" - self.deleteCmd([weName],timeRange) - - return - - def makePWSGrid(self, weName, timeRange): - # make the D2D elementName - pwsWE = "pws" + weName[-2:] - probWE = "prob" + weName[-2:] - - # print "pwsWE and probWE are: ", pwsWE, probWE - - # fetch the pws and prob grids - modelName = self.getLatestPWSModel() - - modelLevel = "FHAG10" - - # To make sure that we're fetching model grids in the right temporal - # order, get the inventory and sort them first before fetching. - pwsInv = self.getWEInventory(modelName, pwsWE, modelLevel, timeRange) - probInv = self.getWEInventory(modelName, probWE, modelLevel, timeRange) - - pwsInv.sort() - probInv.sort() - - # Now get the grids individually to ensure proper time order - pws1 = self.getGrids(modelName, pwsWE, modelLevel, pwsInv[0], - mode="First") - pws2 = self.getGrids(modelName, pwsWE, modelLevel, pwsInv[1], - mode="First") - prob1 = self.getGrids(modelName, probWE, modelLevel, probInv[0], - mode="First") - prob2 = self.getGrids(modelName, probWE, modelLevel, probInv[1], - mode="First") - - # Calculate the grid - grid = pws1 + prob2 - prob1 - - # clip the grid at 100.0 percent - grid = clip(grid, 0.0, 100.0) - - return grid - - # main method - def execute(self, varDict): - - # Fetch the model source and define the model name - sourceDB = varDict["Probabilistic Wind Source?"] - if sourceDB == "Official": - self._probWindModelSource = "TPCProb" - elif sourceDB == "Preliminary": - self._probWindModelSource = "TPCProbPrelim" - else: - self.statusBarMsg("Unknown model source selected. Aborting.", "U") - return - - - modelID = self.getLatestPWSModel() - if modelID == "": - self.statusBarMsg("The selected model source was not found. Aborting.", "U") - return - - print "modelID:", len(modelID) - weNames = ["prob34", "prob64", "pws34int", "pws64int"] - modelTR_cum = self.getModelTimeRange(modelID, "prob34", weNames) - weNames = ["pwsD34", "pwsD64", "pwsN34", "pwsN64"] - modelTR_inc = self.getModelTimeRange(modelID, "pws34", weNames) - if modelTR_cum is None or len(modelTR_cum) < 3: - return - timeRange = modelTR_cum[0] - minTime = modelTR_cum[1] - maxTime = modelTR_cum[2] - - -# THE FOLLOWING SEGMENT POPULATES THE CUMULATIVE PROBABILITIES (prob34 and prob64) -# AND CALCULATES AND POPULATES THE INTERVAL PROBABILITIES (pws34int and pws64int) -# FROM THE CUMULATIVE PROBABILITIES. THESE ARE FOR USE BY THE NEW HLS FORMATTER -# AS WELL AS THEIR COMPANION GRAPHICAL COMPONENT, THAT IS, THE TROPICAL IMPACT -# GRAPHICS. - - gridList_int34 = self.getGrids(modelID, "prob34", "FHAG10", - timeRange, mode = "List") - nt = len(gridList_int34) -# print "LENGTH OF GRID IS: ", nt - - tr = TimeRange.TimeRange(AbsTime.AbsTime(minTime+3*3600), AbsTime.AbsTime(maxTime)) - self.createGrid("Fcst", "prob34", "SCALAR", gridList_int34[nt-1], tr) - intGrid34 = self.intGrid(gridList_int34) - - gridList_int64 = self.getGrids(modelID, "prob64", "FHAG10", - timeRange, mode = "List") - nt=len(gridList_int64) - tr = TimeRange.TimeRange(AbsTime.AbsTime(minTime+3*3600), AbsTime.AbsTime(maxTime)) - self.createGrid("Fcst", "prob64", "SCALAR", gridList_int64[nt-1], tr) - intGrid64 = self.intGrid(gridList_int64) - - i=1 - while i < len(intGrid34): - minT = minTime + (i-1)*21600 - maxTime = minT + 21600 - tr = TimeRange.TimeRange(AbsTime.AbsTime(minT), AbsTime.AbsTime(maxTime)) - self.createGrid("Fcst", "pws34int", "SCALAR", intGrid34[i], tr) - i += 1 - i=1 - while i < len(intGrid64): - minT = minTime + (i-1)*21600 - maxTime = minT + 21600 - tr = TimeRange.TimeRange(AbsTime.AbsTime(minT), AbsTime.AbsTime(maxTime)) - self.createGrid("Fcst", "pws64int", "SCALAR", intGrid64[i], tr) - i += 1 -# -# THE FOLLOWING POPULATES THE INCREMENTAL PROBABILITIES (pwsD34, pwsD64, pwsN34, and pwsN64) -# FOR USE BY THE TROPICAL ZFP AND CWF, THE SO CALLED EXPRESSIONS OF UNCERTAINTY. -# - timeRange = modelTR_inc[0] - # print "TIME RANGE IS: ", timeRange - - # create grids from scratch that match the model inventory - weNames = ["pwsD34", "pwsD64", "pwsN34", "pwsN64"] - self.createFromScratchCmd(weNames, timeRange) - - for weName in weNames: - trList = self.getWEInventory("Fcst", weName, "SFC", timeRange) - for tr in trList: - #print "weName TR IS: ", weName, tr - self.makePWSScratch(weName, tr) - - for weName in weNames: - trList = self.getWEInventory("Fcst", weName, "SFC", timeRange) - for tr in trList: - #print "weName TR IS: ", weName, tr - probGrid = self.makePWSGrid(weName, tr) - self.createGrid("Fcst", weName, "SCALAR", probGrid, tr) - - # Post the model time we used to the GFE status bar - modelTimeStr = modelID[-13:] - self.statusBarMsg(modelTimeStr + " used to make pws grids.", "R") - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Authors: Tom LeFebvre, Pablo Santos +# Last Modified: Dec 10, 2010 - new equations to process 12 hour incremental wind speed probability grids (PWS(D,N)) from 6 hourly pws34 and pws64 grids. +# March 23, 2011 - Corrected for proper accounting of input inventory (pws34 and pws64) +# July 18, 2017 - Added option to select Preliminary of Official probablistic model source. -Tom +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] + +import SmartScript +import AbsTime +import TimeRange +import string +import time +from numpy import * + +LEVEL = "SFC" + +VariableList = [("Probabilistic Wind Source?", "Official", "radio", ["Official", "Preliminary"]), + ] +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # caluclates maximum value at each grid point + def maxGrid(self, gridList): + + if len(gridList) < 1: + return None + + gridMax = maximum.reduce(gridList) + return gridMax + + def intGrid(self, gridList): + + if len(gridList) < 1: + return None + + gridInt = [] + i=0 + while i < len(gridList): + gridInt.append(gridList[i] - gridList[i]) + i += 1 + + i=1 + while i < len(gridList): + + gridInt[i] = gridList[i] - gridList[i-1] + i += 1 + + return gridInt + + # return a list of modelIDs that match the specified string. + def getModelIDList(self, matchStr): + + availParms = self.availableParms() + + modelList = [] + for pName, level, dbID in availParms: + modelId = dbID.modelIdentifier() + if modelId.find(matchStr) > -1: + if modelId not in modelList: + modelList.append(modelId) + + return modelList + + # Get grid inventory for the specified info + def getWEInventory(self, modelName, WEName, level, timeRange): + weTR = TimeRange.allTimes().toJavaObj() + gridInfo = self.getGridInfo(modelName, WEName, level, weTR) + + trList = [] + for g in gridInfo: + start = g.gridTime().startTime().unixTime() * 1000 + end = g.gridTime().endTime().unixTime() * 1000 + tr = TimeRange.TimeRange(start,end) + if tr.overlaps(timeRange): + trList.append(tr) + + return trList + + # determines the latest pws model currently available + def getLatestPWSModel(self): + + # Use the model source selected from the GUI + modelIDList = self.getModelIDList(self._probWindModelSource) + modelIDList.sort() + + if len(modelIDList) == 0: + self.statusBarMsg("No PWS models found in your inventory.", "S") + return "" + + # the last one is the latest + return modelIDList[-1] + + # Examines the full inventory and computes a timeRange that encompasses + # all grids + def getModelTimeRange(self, modelID, param, weNames): + before = AbsTime.current() - (7 * 24 * 3600) # 7 days ago + later = AbsTime.current() + 8 * 24 * 3600 # 8 days from now + timeRange = TimeRange.TimeRange(before, later) + self.deleteCmd(weNames, timeRange) + gridInv = self.getGridInfo(modelID, param, "FHAG10", timeRange) + if len(gridInv) == 0: + self.statusBarMsg("No grids available for model:" + modelID, "S") + return None + + minTime = later.unixTime() + maxTime = before.unixTime() + for g in gridInv: + start = g.gridTime().startTime().unixTime() + end = g.gridTime().endTime().unixTime() + if start < minTime: + minTime = start + if end > maxTime: + maxTime = end + + + + # adjust the times since this data is cumulative probabilities + #minTime = minTime - (6 * 3600) + #minTime = minTime + (14*3600) + #maxTime = maxTime - 3600 + modelTR = TimeRange.TimeRange(AbsTime.AbsTime(minTime), + AbsTime.AbsTime(maxTime)) + #modelTR_adv = TimeRange.TimeRange(AbsTime.AbsTime(minTime+(3*3600)), AbsTime.AbsTime(maxTime)) + #self.remove(['pwsD34'],minTime - (24*3600),minTime, 'Fcst') + #self.remove(['pwsN34'],minTime - (24*3600),minTime, 'Fcst') + #self.remove(['pwsD64'],minTime - (24*3600),minTime, 'Fcst') + #self.remove(['pwsN64'],minTime - (24*3600),minTime, 'Fcst') + + #print "MODELTR", modelTR, minTime + return modelTR, minTime, maxTime + + # Calculate the PWS grid using the formula: + # + # pwsGrid = pws1 + (prob2 - prob1) + # + # where pws1 and pws2 are the first and second incremental wind speed probability input grids in the PWS(D,N) + # time interval and prob1 and prob2 are the first and second cumulative wind speed probability input grid in the + # PWS(D,N) time interval. There's a little extra code devoted to making sure that + # the grids are in the correct time order, since this is very important in the equation. + + + def makePWSScratch(self, weName, timeRange): + # make the D2D elementName + pwsWE = "pws" + weName[-2:] + probWE = "prob" + weName[-2:] + + #print "pwsWE and probWE are: ", pwsWE, probWE + + # fetch the pws and prob grids + modelName = self.getLatestPWSModel() + + modelLevel = "FHAG10" + + # To make sure that we're fetching model grids in the right temporal + # order, get the inventory and sort them first before fetching. + pwsInv = self.getWEInventory(modelName, pwsWE, modelLevel, timeRange) + probInv = self.getWEInventory(modelName, probWE, modelLevel, timeRange) + count = 0 + + for item in pwsInv: + count+=1 + + if count > 1: + print("pwsInv is GOOD") + else: + print("pwsInv is BAD DELETING") + self.deleteCmd([weName],timeRange) + + return + + def makePWSGrid(self, weName, timeRange): + # make the D2D elementName + pwsWE = "pws" + weName[-2:] + probWE = "prob" + weName[-2:] + + # print "pwsWE and probWE are: ", pwsWE, probWE + + # fetch the pws and prob grids + modelName = self.getLatestPWSModel() + + modelLevel = "FHAG10" + + # To make sure that we're fetching model grids in the right temporal + # order, get the inventory and sort them first before fetching. + pwsInv = self.getWEInventory(modelName, pwsWE, modelLevel, timeRange) + probInv = self.getWEInventory(modelName, probWE, modelLevel, timeRange) + + pwsInv.sort() + probInv.sort() + + # Now get the grids individually to ensure proper time order + pws1 = self.getGrids(modelName, pwsWE, modelLevel, pwsInv[0], + mode="First") + pws2 = self.getGrids(modelName, pwsWE, modelLevel, pwsInv[1], + mode="First") + prob1 = self.getGrids(modelName, probWE, modelLevel, probInv[0], + mode="First") + prob2 = self.getGrids(modelName, probWE, modelLevel, probInv[1], + mode="First") + + # Calculate the grid + grid = pws1 + prob2 - prob1 + + # clip the grid at 100.0 percent + grid = clip(grid, 0.0, 100.0) + + return grid + + # main method + def execute(self, varDict): + + # Fetch the model source and define the model name + sourceDB = varDict["Probabilistic Wind Source?"] + if sourceDB == "Official": + self._probWindModelSource = "TPCProb" + elif sourceDB == "Preliminary": + self._probWindModelSource = "TPCProbPrelim" + else: + self.statusBarMsg("Unknown model source selected. Aborting.", "U") + return + + + modelID = self.getLatestPWSModel() + if modelID == "": + self.statusBarMsg("The selected model source was not found. Aborting.", "U") + return + + print("modelID:", len(modelID)) + weNames = ["prob34", "prob64", "pws34int", "pws64int"] + modelTR_cum = self.getModelTimeRange(modelID, "prob34", weNames) + weNames = ["pwsD34", "pwsD64", "pwsN34", "pwsN64"] + modelTR_inc = self.getModelTimeRange(modelID, "pws34", weNames) + if modelTR_cum is None or len(modelTR_cum) < 3: + return + timeRange = modelTR_cum[0] + minTime = modelTR_cum[1] + maxTime = modelTR_cum[2] + + +# THE FOLLOWING SEGMENT POPULATES THE CUMULATIVE PROBABILITIES (prob34 and prob64) +# AND CALCULATES AND POPULATES THE INTERVAL PROBABILITIES (pws34int and pws64int) +# FROM THE CUMULATIVE PROBABILITIES. THESE ARE FOR USE BY THE NEW HLS FORMATTER +# AS WELL AS THEIR COMPANION GRAPHICAL COMPONENT, THAT IS, THE TROPICAL IMPACT +# GRAPHICS. + + gridList_int34 = self.getGrids(modelID, "prob34", "FHAG10", + timeRange, mode = "List") + nt = len(gridList_int34) +# print "LENGTH OF GRID IS: ", nt + + tr = TimeRange.TimeRange(AbsTime.AbsTime(minTime+3*3600), AbsTime.AbsTime(maxTime)) + self.createGrid("Fcst", "prob34", "SCALAR", gridList_int34[nt-1], tr) + intGrid34 = self.intGrid(gridList_int34) + + gridList_int64 = self.getGrids(modelID, "prob64", "FHAG10", + timeRange, mode = "List") + nt=len(gridList_int64) + tr = TimeRange.TimeRange(AbsTime.AbsTime(minTime+3*3600), AbsTime.AbsTime(maxTime)) + self.createGrid("Fcst", "prob64", "SCALAR", gridList_int64[nt-1], tr) + intGrid64 = self.intGrid(gridList_int64) + + i=1 + while i < len(intGrid34): + minT = minTime + (i-1)*21600 + maxTime = minT + 21600 + tr = TimeRange.TimeRange(AbsTime.AbsTime(minT), AbsTime.AbsTime(maxTime)) + self.createGrid("Fcst", "pws34int", "SCALAR", intGrid34[i], tr) + i += 1 + i=1 + while i < len(intGrid64): + minT = minTime + (i-1)*21600 + maxTime = minT + 21600 + tr = TimeRange.TimeRange(AbsTime.AbsTime(minT), AbsTime.AbsTime(maxTime)) + self.createGrid("Fcst", "pws64int", "SCALAR", intGrid64[i], tr) + i += 1 +# +# THE FOLLOWING POPULATES THE INCREMENTAL PROBABILITIES (pwsD34, pwsD64, pwsN34, and pwsN64) +# FOR USE BY THE TROPICAL ZFP AND CWF, THE SO CALLED EXPRESSIONS OF UNCERTAINTY. +# + timeRange = modelTR_inc[0] + # print "TIME RANGE IS: ", timeRange + + # create grids from scratch that match the model inventory + weNames = ["pwsD34", "pwsD64", "pwsN34", "pwsN64"] + self.createFromScratchCmd(weNames, timeRange) + + for weName in weNames: + trList = self.getWEInventory("Fcst", weName, "SFC", timeRange) + for tr in trList: + #print "weName TR IS: ", weName, tr + self.makePWSScratch(weName, tr) + + for weName in weNames: + trList = self.getWEInventory("Fcst", weName, "SFC", timeRange) + for tr in trList: + #print "weName TR IS: ", weName, tr + probGrid = self.makePWSGrid(weName, tr) + self.createGrid("Fcst", weName, "SCALAR", probGrid, tr) + + # Post the model time we used to the GFE status bar + modelTimeStr = modelID[-13:] + self.statusBarMsg(modelTimeStr + " used to make pws grids.", "R") + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotSPCWatches.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotSPCWatches.py index 9273909c32..2aad04c5ee 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotSPCWatches.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotSPCWatches.py @@ -1,334 +1,334 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PlotSPCWatches -# -# This procedure synchronizes the hazards from SPC that are in the active table. -# -# -# Author: lefebvre -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 06/18/13 #2083 dgilling Code cleanup, reinstated logging -# for every hazard grid written. -# -######################################################################## - -## -# This is a base file that is not intended to be overridden. -## - - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Hazards"] - -from numpy import * -import SmartScript -import time -import HazardUtils -import logging -import UFStatusHandler - - -PLUGIN_NAME = 'com.raytheon.viz.gfe' -CATEGORY = 'GFE' - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - logging.basicConfig(level=logging.INFO) - self.log = logging.getLogger("PlotSPCWatches") - - self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY, level=logging.WARNING)) - - def getWatches(self): - nonSPCRecords = [] - spcRecords = [] - spcCANRecords = [] - - vtecTable = self.vtecActiveTable() - vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) - - phenSigList = ["TO.A", "SV.A"] - pilList = ['WOU','WCN'] - spcActions = ['NEW','CON','EXT','EXA','EXB'] - othActions = ['NEW','CON','EXT','EXA','EXB','CAN','EXP','UPG'] - spcActionsCAN = ['CAN'] - - # step 1: Separate into SPC/nonSPC/spcCAN, keep only certain actions - for v in vtecTable: - # filter based on phen/sig - phenSig = v['phen'] + "." + v['sig'] - if not phenSig in phenSigList: - continue - - # eliminate non-interesting products - if v['pil'] not in pilList: - continue - - # eliminate records in the past - if v['endTime'] < self._gmtime().unixTime(): - continue - - # add to appropriate list - if v['officeid'] == 'KWNS': - if v['act'] in spcActions: - spcRecords.append(v) - elif v['act'] in spcActionsCAN: - spcCANRecords.append(v) - else: - if v['act'] in othActions: - nonSPCRecords.append(v) - - #LogStream.logUse("step1 PlotSPCWatches: spcRec=", spcRecords, - # "\nspcCANRecords=", spcCANRecords, "\n nonSPCRec=", nonSPCRecords) - - # step 2: eliminate records in SPC that are also in non-SPC. Filter - # based on etn, id, phen/sig. Ignore action, i.e., CAN in the nonSPC - # record will override any action in SPC records. Remaining records - # will be the "NEW" watch. - compare = ['etn','id','phen','sig'] - filteredSPCWatches = [] - - for spcRec in spcRecords: - removeRecord = False - for nonSPCRec in nonSPCRecords: - if self._recordCompare(spcRec, nonSPCRec, compare): - removeRecord = True #match found in nonSPCRecord - break - if not removeRecord: - filteredSPCWatches.append(spcRec) - - #LogStream.logUse("step2 PlotSPCWatches: elim SPC in nonSPC. ", - #"spcRec=", filteredSPCWatches) - - # step 3: eliminate records in non-SPC that are CAN, EXP - eliminateActions = ['CAN','EXP'] - filteredNonSPCWatches = [] - for nonSPCRec in nonSPCRecords: - if nonSPCRec['act'] not in eliminateActions: - filteredNonSPCWatches.append(nonSPCRec) - - #LogStream.logUse("step3 PlotSPCWatches: elim nonSPC CANEXP. ", - # "nonSPCRec=", filteredSPCWatches) - - # step 4: combine the two data sets, now we have both the new - # watches and the old watches (still in effect) in the same - # table. - watchTable = filteredNonSPCWatches - watchTable.extend(filteredSPCWatches) - - #LogStream.logUse("step4 PlotSPCWatches: combine nonSPC SPC: ", - #watchTable) - - # step 5: Looking at the spcCANrecords, eliminate any records - # in the watchTable that have a matching CAN. This will be records - # from WCNs that are "active", but now SPC has "CAN" the watch. - tmp = [] - compare = ['etn','id','phen','sig'] - for r in watchTable: - removeRecord = 0 - for s in spcCANRecords: - if self._recordCompare(r, s, compare): - removeRecord = 1 #match fround in nonSPCRecord - break - if not removeRecord: - tmp.append(r) - watchTable = tmp - - #LogStream.logUse("step5 PlotSPCWatches: remove active in nonSPC that ", - #" are CAN by SPC", watchTable) - - - - - # step 6: eliminate overlapping watches. Can't have multiple watches - # in the same zone. Also trim down the start time - zoneDict = self._convertToZoneDict(watchTable) - - zones = zoneDict.keys() - - for zone in zones: - watch = self._removeSupersededWatches(zoneDict[zone]) - watch['startTime'] = int(watch['startTime'] / 3600) * 3600 - zoneDict[zone] = watch - - #LogStream.logUse("step6 PlotSPCWatches: elim overlap: ", - #watchTable) - - return zoneDict - - # compares two dictionary records for equality - def _recordCompare(self, rec1, rec2, fields): - #Compares two records for equality, based on the fields given. - #Records are dictionaries. Fields are assumed to exist in both recs. - for f in fields: - if rec1[f] != rec2[f]: - return False - return True - - - def removeAllWatches(self): - # remove all SV.A and TO.A grids from the Hazards inventory - trList = self._hazUtils._getWEInventory("Hazards") - for tr in trList: - byteGrid, hazKey = self.getGrids("Fcst", "Hazards", "SFC", tr, - mode="First", cache=0) - uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) - for uKey in uniqueKeys: - subKeys = self._hazUtils._getSubKeys(uKey) - if subKeys is not None: - for subKey in subKeys: - phen = self._hazUtils._keyPhen(subKey) - if phen in ["SV", "TO"]: - self._hazUtils._removeHazard("Hazards", tr, subKey) - - def _convertToZoneDict(self, watchTable): - #returns a dictionary organized by zone for each hazard - hazardsByZone = {} - for h in watchTable: - if hazardsByZone.has_key(h['id']): - hazardsByZone[h['id']].append(h) - else: - hazardsByZone[h['id']] = [h] - return hazardsByZone - - - def _removeSupersededWatches(self, zoneRecords): - # looks for multiple watches in the same zone, eliminates the - # lower etn (older) version. Returns a single record. - - # nothing needs calculating - if len(zoneRecords) == 1: - return zoneRecords[0] - - # TEST watches are etns >= 9000, eliminate test watches if there - # are any real watches - testWatches = 0 #etn >= 9000 - normalWatches = 0 #etn < 9000 - for zr in zoneRecords: - if zr['etn'] >= 9000: - testWatches = 1 - else: - normalWatches = 0 - if normalWatches: - list = [] - for zr in zoneRecords: - if zr['etn'] < 9000: - list.append(zr) - zoneRecords = list - - # any left? - if len(zoneRecords) == 1: - return zoneRecords[0] - - # find the higher watch etn for this year - watch = zoneRecords[0] #final choice - for index in xrange(1, len(zoneRecords)): - recYear = time.gmtime(zoneRecords[index]['issueTime'])[0] - watchYear = time.gmtime(watch['issueTime'])[0] - if recYear > watchYear or (recYear == watchYear and \ - zoneRecords[index]['etn'] > watch['etn']): - watch = zoneRecords[index] #higher watch - return watch - - - - def writeHazard(self, key, startTime, endTime, zones): - timeRange = self._hazUtils._makeTimeRange(startTime, endTime) - zoneMask = self._hazUtils._makeMask(zones) - self._hazUtils._addHazard("Hazards", timeRange, key, zoneMask) - self.log.info("{} {} {} {}".format(self._hazUtils._printTime(startTime), - self._hazUtils._printTime(endTime), key, zones)) - - def execute(self): - # get the hazard utilities - self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) - - self.setToolType("numeric") - - # see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.log.warning("Hazards Weather Element must be loaded in the GFE before running PlotSPCWatches.") - self.cancel() - - - # if there are any temp grids loaded, refuse to run - if self._hazUtils._tempWELoaded(): - self.log.warning("There are temporary hazard grids loaded. " +\ - "Please merge all hazards grids before running PlotSPCWatches.") - self.cancel() - - # hazard locked anywhere by others? - if self.lockedByOther('Hazards', 'SFC'): - self.log.warning("There are conflicting locks (red locks - owned by others) on Hazards. " + \ - "Please resolve these before running PlotSPCWatches") - self.cancel() - - watchTable = self.getWatches() - self.removeAllWatches() - - # AWIPS2 porting note: to improve performance of this procedure, we've - # made a deviation in how the phenomena from the active table are saved - # to grids - # We write to the hazards grid in batches. The batches are based on a - # set of zones all having the same phen-sig, ETN, and valid time. - watchTable = watchTable.values() - def sortkey(x): - key = x['phen'] + x['sig'] + str(x['etn']) + \ - str(self._hazUtils._makeTimeRange(x['startTime'], x['endTime'])) + \ - x['id'] - return key - watchTable.sort(key=sortkey) - - hazKeyToWrite = None - hazStartToWrite = None - hazEndToWrite = None - hazZonesToWrite = [] - for zh in watchTable: - key = zh['phen'] + '.' + zh['sig'] + ":" + str(zh['etn']) - if key != hazKeyToWrite or zh['startTime'] != hazStartToWrite or zh['endTime'] != hazEndToWrite: - # we have a new hazard, save the previously collected hazard - # data to a grid. - if hazZonesToWrite: - self.writeHazard(hazKeyToWrite, hazStartToWrite, hazEndToWrite, hazZonesToWrite) - hazZonesToWrite = [] - hazKeyToWrite = key - hazStartToWrite = zh['startTime'] - hazEndToWrite = zh['endTime'] - hazZonesToWrite.append(zh['id']) - - # write the last set of collected hazard information to a grid - if hazZonesToWrite: - self.writeHazard(hazKeyToWrite, hazStartToWrite, hazEndToWrite, hazZonesToWrite) - - return +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PlotSPCWatches +# +# This procedure synchronizes the hazards from SPC that are in the active table. +# +# +# Author: lefebvre +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 06/18/13 #2083 dgilling Code cleanup, reinstated logging +# for every hazard grid written. +# +######################################################################## + +## +# This is a base file that is not intended to be overridden. +## + + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Hazards"] + +from numpy import * +import SmartScript +import time +import HazardUtils +import logging +import UFStatusHandler + + +PLUGIN_NAME = 'com.raytheon.viz.gfe' +CATEGORY = 'GFE' + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + logging.basicConfig(level=logging.INFO) + self.log = logging.getLogger("PlotSPCWatches") + + self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY, level=logging.WARNING)) + + def getWatches(self): + nonSPCRecords = [] + spcRecords = [] + spcCANRecords = [] + + vtecTable = self.vtecActiveTable() + vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) + + phenSigList = ["TO.A", "SV.A"] + pilList = ['WOU','WCN'] + spcActions = ['NEW','CON','EXT','EXA','EXB'] + othActions = ['NEW','CON','EXT','EXA','EXB','CAN','EXP','UPG'] + spcActionsCAN = ['CAN'] + + # step 1: Separate into SPC/nonSPC/spcCAN, keep only certain actions + for v in vtecTable: + # filter based on phen/sig + phenSig = v['phen'] + "." + v['sig'] + if not phenSig in phenSigList: + continue + + # eliminate non-interesting products + if v['pil'] not in pilList: + continue + + # eliminate records in the past + if v['endTime'] < self._gmtime().unixTime(): + continue + + # add to appropriate list + if v['officeid'] == 'KWNS': + if v['act'] in spcActions: + spcRecords.append(v) + elif v['act'] in spcActionsCAN: + spcCANRecords.append(v) + else: + if v['act'] in othActions: + nonSPCRecords.append(v) + + #LogStream.logUse("step1 PlotSPCWatches: spcRec=", spcRecords, + # "\nspcCANRecords=", spcCANRecords, "\n nonSPCRec=", nonSPCRecords) + + # step 2: eliminate records in SPC that are also in non-SPC. Filter + # based on etn, id, phen/sig. Ignore action, i.e., CAN in the nonSPC + # record will override any action in SPC records. Remaining records + # will be the "NEW" watch. + compare = ['etn','id','phen','sig'] + filteredSPCWatches = [] + + for spcRec in spcRecords: + removeRecord = False + for nonSPCRec in nonSPCRecords: + if self._recordCompare(spcRec, nonSPCRec, compare): + removeRecord = True #match found in nonSPCRecord + break + if not removeRecord: + filteredSPCWatches.append(spcRec) + + #LogStream.logUse("step2 PlotSPCWatches: elim SPC in nonSPC. ", + #"spcRec=", filteredSPCWatches) + + # step 3: eliminate records in non-SPC that are CAN, EXP + eliminateActions = ['CAN','EXP'] + filteredNonSPCWatches = [] + for nonSPCRec in nonSPCRecords: + if nonSPCRec['act'] not in eliminateActions: + filteredNonSPCWatches.append(nonSPCRec) + + #LogStream.logUse("step3 PlotSPCWatches: elim nonSPC CANEXP. ", + # "nonSPCRec=", filteredSPCWatches) + + # step 4: combine the two data sets, now we have both the new + # watches and the old watches (still in effect) in the same + # table. + watchTable = filteredNonSPCWatches + watchTable.extend(filteredSPCWatches) + + #LogStream.logUse("step4 PlotSPCWatches: combine nonSPC SPC: ", + #watchTable) + + # step 5: Looking at the spcCANrecords, eliminate any records + # in the watchTable that have a matching CAN. This will be records + # from WCNs that are "active", but now SPC has "CAN" the watch. + tmp = [] + compare = ['etn','id','phen','sig'] + for r in watchTable: + removeRecord = 0 + for s in spcCANRecords: + if self._recordCompare(r, s, compare): + removeRecord = 1 #match fround in nonSPCRecord + break + if not removeRecord: + tmp.append(r) + watchTable = tmp + + #LogStream.logUse("step5 PlotSPCWatches: remove active in nonSPC that ", + #" are CAN by SPC", watchTable) + + + + + # step 6: eliminate overlapping watches. Can't have multiple watches + # in the same zone. Also trim down the start time + zoneDict = self._convertToZoneDict(watchTable) + + zones = list(zoneDict.keys()) + + for zone in zones: + watch = self._removeSupersededWatches(zoneDict[zone]) + watch['startTime'] = int(watch['startTime'] / 3600) * 3600 + zoneDict[zone] = watch + + #LogStream.logUse("step6 PlotSPCWatches: elim overlap: ", + #watchTable) + + return zoneDict + + # compares two dictionary records for equality + def _recordCompare(self, rec1, rec2, fields): + #Compares two records for equality, based on the fields given. + #Records are dictionaries. Fields are assumed to exist in both recs. + for f in fields: + if rec1[f] != rec2[f]: + return False + return True + + + def removeAllWatches(self): + # remove all SV.A and TO.A grids from the Hazards inventory + trList = self._hazUtils._getWEInventory("Hazards") + for tr in trList: + byteGrid, hazKey = self.getGrids("Fcst", "Hazards", "SFC", tr, + mode="First", cache=0) + uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) + for uKey in uniqueKeys: + subKeys = self._hazUtils._getSubKeys(uKey) + if subKeys is not None: + for subKey in subKeys: + phen = self._hazUtils._keyPhen(subKey) + if phen in ["SV", "TO"]: + self._hazUtils._removeHazard("Hazards", tr, subKey) + + def _convertToZoneDict(self, watchTable): + #returns a dictionary organized by zone for each hazard + hazardsByZone = {} + for h in watchTable: + if h['id'] in hazardsByZone: + hazardsByZone[h['id']].append(h) + else: + hazardsByZone[h['id']] = [h] + return hazardsByZone + + + def _removeSupersededWatches(self, zoneRecords): + # looks for multiple watches in the same zone, eliminates the + # lower etn (older) version. Returns a single record. + + # nothing needs calculating + if len(zoneRecords) == 1: + return zoneRecords[0] + + # TEST watches are etns >= 9000, eliminate test watches if there + # are any real watches + testWatches = 0 #etn >= 9000 + normalWatches = 0 #etn < 9000 + for zr in zoneRecords: + if zr['etn'] >= 9000: + testWatches = 1 + else: + normalWatches = 0 + if normalWatches: + list = [] + for zr in zoneRecords: + if zr['etn'] < 9000: + list.append(zr) + zoneRecords = list + + # any left? + if len(zoneRecords) == 1: + return zoneRecords[0] + + # find the higher watch etn for this year + watch = zoneRecords[0] #final choice + for index in range(1, len(zoneRecords)): + recYear = time.gmtime(zoneRecords[index]['issueTime'])[0] + watchYear = time.gmtime(watch['issueTime'])[0] + if recYear > watchYear or (recYear == watchYear and \ + zoneRecords[index]['etn'] > watch['etn']): + watch = zoneRecords[index] #higher watch + return watch + + + + def writeHazard(self, key, startTime, endTime, zones): + timeRange = self._hazUtils._makeTimeRange(startTime, endTime) + zoneMask = self._hazUtils._makeMask(zones) + self._hazUtils._addHazard("Hazards", timeRange, key, zoneMask) + self.log.info("{} {} {} {}".format(self._hazUtils._printTime(startTime), + self._hazUtils._printTime(endTime), key, zones)) + + def execute(self): + # get the hazard utilities + self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) + + self.setToolType("numeric") + + # see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.log.warning("Hazards Weather Element must be loaded in the GFE before running PlotSPCWatches.") + self.cancel() + + + # if there are any temp grids loaded, refuse to run + if self._hazUtils._tempWELoaded(): + self.log.warning("There are temporary hazard grids loaded. " +\ + "Please merge all hazards grids before running PlotSPCWatches.") + self.cancel() + + # hazard locked anywhere by others? + if self.lockedByOther('Hazards', 'SFC'): + self.log.warning("There are conflicting locks (red locks - owned by others) on Hazards. " + \ + "Please resolve these before running PlotSPCWatches") + self.cancel() + + watchTable = self.getWatches() + self.removeAllWatches() + + # AWIPS2 porting note: to improve performance of this procedure, we've + # made a deviation in how the phenomena from the active table are saved + # to grids + # We write to the hazards grid in batches. The batches are based on a + # set of zones all having the same phen-sig, ETN, and valid time. + watchTable = list(watchTable.values()) + def sortkey(x): + key = x['phen'] + x['sig'] + str(x['etn']) + \ + str(self._hazUtils._makeTimeRange(x['startTime'], x['endTime'])) + \ + x['id'] + return key + watchTable.sort(key=sortkey) + + hazKeyToWrite = None + hazStartToWrite = None + hazEndToWrite = None + hazZonesToWrite = [] + for zh in watchTable: + key = zh['phen'] + '.' + zh['sig'] + ":" + str(zh['etn']) + if key != hazKeyToWrite or zh['startTime'] != hazStartToWrite or zh['endTime'] != hazEndToWrite: + # we have a new hazard, save the previously collected hazard + # data to a grid. + if hazZonesToWrite: + self.writeHazard(hazKeyToWrite, hazStartToWrite, hazEndToWrite, hazZonesToWrite) + hazZonesToWrite = [] + hazKeyToWrite = key + hazStartToWrite = zh['startTime'] + hazEndToWrite = zh['endTime'] + hazZonesToWrite.append(zh['id']) + + # write the last set of collected hazard information to a grid + if hazZonesToWrite: + self.writeHazard(hazKeyToWrite, hazStartToWrite, hazEndToWrite, hazZonesToWrite) + + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotTPCEvents.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotTPCEvents.py index 58158c2976..92e0ce601b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotTPCEvents.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PlotTPCEvents.py @@ -1,232 +1,232 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PlotTPCEvents1 -# -# This procedure synchonizes the hazards from TPC that are in the active table -# for some zones. -# -# -# Author: lefebvre/mathewson -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 09/30/15 18141 ryu Allow processing for TCV issued by CPHC -# 07/11/17 20104 ryu Plot only records from TCV for pacific sites -# and from PTC for other sites -# 07/18/17 20104 ryu Check xxxid to make sure records -# are not from TCVHFO -######################################################################## - -## -# This is a base file that is not intended to be overridden. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Hazards"] - -from numpy import * -import SmartScript -import AbsTime -import HazardUtils -import VTECPartners -import LogStream, logging -import UFStatusHandler - -PLUGIN_NAME = 'com.raytheon.viz.gfe' -CATEGORY = 'GFE' - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - logging.basicConfig(level=logging.INFO) - self.log = logging.getLogger("PlotTPCEvents") - - self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY, level=logging.WARNING)) - - ## - # getWatches returns ({activeWatches}, {cancelledWatches}) - # the returned records are not VTEC table entries, but rather - # a dictionary with key of (phen,sig) and value of zone ids - # - # @return: active watches and cancelled watches - # @rtype: 2-tuple of dicts; each dict is keyed by a 2-tuple of strings, - # and contains lists of strings for its values. - def getWatches(self): - """ - getWatches returns ({activeWatches}, {cancelledWatches}). - The returned records are dictionaries with keys of (phen, sig) - and values of [zoneida, zoneidb,...]. - """ - - active = {} #add records - cancel = {} #remove records - - vtecTable = self.vtecActiveTable() - vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) - - siteID = self.getSiteID() - - sourcePil = "PTC" - if siteID in ['HFO', 'SGX', 'LOX']: - sourcePil = "TCV" - - # step 1: just save the tropical hazards - for v in vtecTable: - - # process only records in the phenSigList and - # from the right pil - if not (v['phen'] in ['HU','TR'] and v['pil'] == sourcePil): - continue - - # only look at the KNHC records - if v['officeid'] not in VTECPartners.VTEC_TPC_SITE: - continue - - # For HFO only: we have to make sure the records are from TCVCPx - # since HFO will issue TCV as well. - if v['xxxid'] == siteID: - continue - - key = (v['phen'], v['sig'], v['etn']) - - # cancel events - if v['act'] in ['CAN','EXP','UPG']: - if not key in cancel: - cancel[key] = [] - cancel[key].append(v['id']) - - # active events - elif v['act'] in ["NEW", "EXA", "EXB", "EXT", "CON"]: - if not key in active: - active[key] = [] - active[key].append(v['id']) - - return (active, cancel) - - ## - # Remove all tropical grids from the Hazards inventory that contain - # the specified phen, sig for the given zone. zone can be a single - # zone name, or a list of zones. - # - # @param phen: Phenomenon code - # @type phen: string - # @param sig: Significance code - # @type sig: string - # @param zones: Zones from which hazard should be removed - # @type zones: list of strings or string - def removeHazardByZone(self, phen, sig, zones): - """ - Remove all tropical grids from the Hazards inventory that contain - the specified phen, sig for the given zone. zone can be a single - zone name, or a list of zones. - """ - - if type(zones) is not list: - zonemask = self._hazUtils._makeMask([zones]) - else: - zonemask = self._hazUtils._makeMask(zones) - - trList = self._hazUtils._getWEInventory("Hazards") - for tr in trList: - byteGrid, hazKey = self.getGrids("Fcst", "Hazards", "SFC", tr, - mode="First", cache=0) - uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) - - startT = self._hazUtils._printTime(tr.startTime().unixTime()) - endT = self._hazUtils._printTime(tr.startTime().unixTime()) - - for uKey in uniqueKeys: - subKeys = self._hazUtils._getSubKeys(uKey) - if subKeys is not None: - for subKey in subKeys: - hazPhen = self._hazUtils._keyPhen(subKey) - hazSig = self._hazUtils._keySig(subKey) #may have seg - if phen == hazPhen and sig == hazSig[0]: - self._hazUtils._removeHazard("Hazards", tr, subKey, - mask=zonemask) - LogStream.logEvent("Remove: ", startT, endT, - subKey, zones) - return - - def execute(self, editArea, timeRange, varDict): - # get the hazard utilities - self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) - - self.setToolType("numeric") - - #see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.statusBarMsg("Hazards Weather Element must be loaded in "+\ - "the GFE before running PlotTPCEvents.", "S") - self.cancel() - - #ensure there are no temp grids loaded, refuse to run - if self._hazUtils._tempWELoaded(): - self.statusBarMsg("There are temporary hazard grids loaded. " + \ - "Please merge all hazards grids before running PlotTPCEvents.", "S") - self.cancel() - - if self.lockedByOther('Hazards', 'SFC'): - self.statusBarMsg("There are conflicting locks (red locks - owned by others) on Hazards. " + \ - "Please resolve these before running PlotTPCEvents", "S") - self.cancel() - - # get just the tropical events - (activeEvents, cancelEvents) = self.getWatches() - - # remove any cancelled watches from the grids, but for all times - for phen,sig,etn in cancelEvents.keys(): - self.removeHazardByZone(phen, sig, cancelEvents[(phen,sig,etn)]) - - # remove any active events from the grid, to handle any slight grid - # time differences from previous runs - for phen,sig,etn in activeEvents.keys(): - self.removeHazardByZone(phen, sig, activeEvents[(phen,sig,etn)]) - - #have to fake the start/ending times since tropical events - #have no time information - changed to 48 hours in 9.3 - startT = int(AbsTime.current().unixTime()/3600) * 3600 - endT = startT + 48*3600 - - # add active events - timeRange = self._hazUtils._makeTimeRange(startT, endT) - for phen, sig, etn in activeEvents.keys(): - key = phen + '.' + sig + ':' + str(etn) - zones = activeEvents[(phen, sig, etn)] - zoneMask = self._hazUtils._makeMask(zones) - self._hazUtils._addHazard("Hazards", timeRange, key, zoneMask) - LogStream.logVerbose("Add:", self._hazUtils._printTime(startT), - self._hazUtils._printTime(endT), key, zones) - - LogStream.logVerbose("PlotTPCEvents completed normally.", "(" + - str(len(activeEvents.keys())) + " events)") - return +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PlotTPCEvents1 +# +# This procedure synchonizes the hazards from TPC that are in the active table +# for some zones. +# +# +# Author: lefebvre/mathewson +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 09/30/15 18141 ryu Allow processing for TCV issued by CPHC +# 07/11/17 20104 ryu Plot only records from TCV for pacific sites +# and from PTC for other sites +# 07/18/17 20104 ryu Check xxxid to make sure records +# are not from TCVHFO +######################################################################## + +## +# This is a base file that is not intended to be overridden. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Hazards"] + +from numpy import * +import SmartScript +import AbsTime +import HazardUtils +import VTECPartners +import LogStream, logging +import UFStatusHandler + +PLUGIN_NAME = 'com.raytheon.viz.gfe' +CATEGORY = 'GFE' + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + logging.basicConfig(level=logging.INFO) + self.log = logging.getLogger("PlotTPCEvents") + + self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY, level=logging.WARNING)) + + ## + # getWatches returns ({activeWatches}, {cancelledWatches}) + # the returned records are not VTEC table entries, but rather + # a dictionary with key of (phen,sig) and value of zone ids + # + # @return: active watches and cancelled watches + # @rtype: 2-tuple of dicts; each dict is keyed by a 2-tuple of strings, + # and contains lists of strings for its values. + def getWatches(self): + """ + getWatches returns ({activeWatches}, {cancelledWatches}). + The returned records are dictionaries with keys of (phen, sig) + and values of [zoneida, zoneidb,...]. + """ + + active = {} #add records + cancel = {} #remove records + + vtecTable = self.vtecActiveTable() + vtecTable = self._hazUtils._filterVTECBasedOnGFEMode(vtecTable) + + siteID = self.getSiteID() + + sourcePil = "PTC" + if siteID in ['HFO', 'SGX', 'LOX']: + sourcePil = "TCV" + + # step 1: just save the tropical hazards + for v in vtecTable: + + # process only records in the phenSigList and + # from the right pil + if not (v['phen'] in ['HU','TR'] and v['pil'] == sourcePil): + continue + + # only look at the KNHC records + if v['officeid'] not in VTECPartners.VTEC_TPC_SITE: + continue + + # For HFO only: we have to make sure the records are from TCVCPx + # since HFO will issue TCV as well. + if v['xxxid'] == siteID: + continue + + key = (v['phen'], v['sig'], v['etn']) + + # cancel events + if v['act'] in ['CAN','EXP','UPG']: + if not key in cancel: + cancel[key] = [] + cancel[key].append(v['id']) + + # active events + elif v['act'] in ["NEW", "EXA", "EXB", "EXT", "CON"]: + if not key in active: + active[key] = [] + active[key].append(v['id']) + + return (active, cancel) + + ## + # Remove all tropical grids from the Hazards inventory that contain + # the specified phen, sig for the given zone. zone can be a single + # zone name, or a list of zones. + # + # @param phen: Phenomenon code + # @type phen: string + # @param sig: Significance code + # @type sig: string + # @param zones: Zones from which hazard should be removed + # @type zones: list of strings or string + def removeHazardByZone(self, phen, sig, zones): + """ + Remove all tropical grids from the Hazards inventory that contain + the specified phen, sig for the given zone. zone can be a single + zone name, or a list of zones. + """ + + if type(zones) is not list: + zonemask = self._hazUtils._makeMask([zones]) + else: + zonemask = self._hazUtils._makeMask(zones) + + trList = self._hazUtils._getWEInventory("Hazards") + for tr in trList: + byteGrid, hazKey = self.getGrids("Fcst", "Hazards", "SFC", tr, + mode="First", cache=0) + uniqueKeys = self._hazUtils._getUniqueKeys(byteGrid, hazKey) + + startT = self._hazUtils._printTime(tr.startTime().unixTime()) + endT = self._hazUtils._printTime(tr.startTime().unixTime()) + + for uKey in uniqueKeys: + subKeys = self._hazUtils._getSubKeys(uKey) + if subKeys is not None: + for subKey in subKeys: + hazPhen = self._hazUtils._keyPhen(subKey) + hazSig = self._hazUtils._keySig(subKey) #may have seg + if phen == hazPhen and sig == hazSig[0]: + self._hazUtils._removeHazard("Hazards", tr, subKey, + mask=zonemask) + LogStream.logEvent("Remove: ", startT, endT, + subKey, zones) + return + + def execute(self, editArea, timeRange, varDict): + # get the hazard utilities + self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) + + self.setToolType("numeric") + + #see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.statusBarMsg("Hazards Weather Element must be loaded in "+\ + "the GFE before running PlotTPCEvents.", "S") + self.cancel() + + #ensure there are no temp grids loaded, refuse to run + if self._hazUtils._tempWELoaded(): + self.statusBarMsg("There are temporary hazard grids loaded. " + \ + "Please merge all hazards grids before running PlotTPCEvents.", "S") + self.cancel() + + if self.lockedByOther('Hazards', 'SFC'): + self.statusBarMsg("There are conflicting locks (red locks - owned by others) on Hazards. " + \ + "Please resolve these before running PlotTPCEvents", "S") + self.cancel() + + # get just the tropical events + (activeEvents, cancelEvents) = self.getWatches() + + # remove any cancelled watches from the grids, but for all times + for phen,sig,etn in list(cancelEvents.keys()): + self.removeHazardByZone(phen, sig, cancelEvents[(phen,sig,etn)]) + + # remove any active events from the grid, to handle any slight grid + # time differences from previous runs + for phen,sig,etn in list(activeEvents.keys()): + self.removeHazardByZone(phen, sig, activeEvents[(phen,sig,etn)]) + + #have to fake the start/ending times since tropical events + #have no time information - changed to 48 hours in 9.3 + startT = int(AbsTime.current().unixTime()/3600) * 3600 + endT = startT + 48*3600 + + # add active events + timeRange = self._hazUtils._makeTimeRange(startT, endT) + for phen, sig, etn in list(activeEvents.keys()): + key = phen + '.' + sig + ':' + str(etn) + zones = activeEvents[(phen, sig, etn)] + zoneMask = self._hazUtils._makeMask(zones) + self._hazUtils._addHazard("Hazards", timeRange, key, zoneMask) + LogStream.logVerbose("Add:", self._hazUtils._printTime(startT), + self._hazUtils._printTime(endT), key, zones) + + LogStream.logVerbose("PlotTPCEvents completed normally.", "(" + + str(len(list(activeEvents.keys()))) + " events)") + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PopulateFromClimo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PopulateFromClimo.py index 9538047256..0bd048e685 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PopulateFromClimo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/PopulateFromClimo.py @@ -1,263 +1,263 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PopulateFromClimo -# -# This procedure calculates MinT or MaxT grids based on the NCDC or -# PRISM climatology grids stored in a netCDF file. This file must be -# present for this procedure to work. -# -# Author: lefebvre -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -VariableList = [("Weather Element:" , "MaxT", "radio", ["MaxT", "MinT"]), - ("Climo Source:" , "PRISM", "radio", ["PRISM", "NCDC"]), - ] -MenuItems = ["Populate"] - -from numpy import * -import SmartScript -import types, copy -import time -import AbsTime -import TimeRange - -MODEL = "Fcst" -LEVEL = "SFC" - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - ## - # Get the list of time ranges at the grid whose element name is WEName - # contains grids. The level of the weather element is assumed to be LEVEL. - # - # @param dbName: Name of the database to consult - # @type dbName: string - # @param WEName: Name of a weather element - # @type WEName: string - # @param timeRange: Limits of time range of interest, or None for all times - # @type timeRange: Java TimeRange or Nonetype - # @return: time ranges at which WEName has data. - # @rtype: Python list of Python TimeRange objects - def _getWEInventory(self, dbName, WEName, timeRange=None): - # set up a timeRange if it is None - if timeRange is None: - timeRange = TimeRange.allTimes() - parm = self.getParm(dbName, WEName, LEVEL) - if parm is None: - return [] - inv = parm.getGridInventory(timeRange.toJavaObj()) - if inv is None: self.statusBarMsg("inv is None","S") - elif len(inv)==0: print self.statusBarMsg("PFC: len(inv)==0","S") - trList = [] - for gd in inv: - tr = TimeRange.TimeRange(gd.getGridTime()) - trList.append(tr) - return trList - - # Main cubic spline method that accepts a list of grids and int time - # along with a list of times for which grids are to be calculated. - # This method returns the corresponding list of grids that matches the - # interpTimes list. - def _cubicSpline(self, grids, times, interpTimes): - - # STEP 1: Create coefficients for cubic spline curve - # zCoefs : List of cubic spline coefficient grids computed to fit the - # curve defined by grids and times - # n : length of grids - 1. - # Determine coefficients - if grids == []: - self.statusBarMsg("No grids sent to _cublicSpline. No grids returned", "S") - return - gridShape = shape(grids[0]) - - timeGrids = [] - for t in times: - tGrid = zeros(gridShape) + t - timeGrids.append(tGrid) - - n = len(grids) - 1 - zCoefs = self._spline3_coef(n, timeGrids, grids) - - # Create interpolated grids using coefficients - # interpTimes : List of times for which we want interpolated grids - # gridList : List of interpolated Grids - - # Create interpolated grids - gridList = [] - for interpTime in interpTimes: - x = zeros(gridShape) + interpTime # make a grid of times - xGrid = self._spline3_eval(n, timeGrids, grids, zCoefs, x) - gridList.append(xGrid) - - return gridList - - # This method calculates the spline coefficients that are later used to - # calculate grids at the interpolation times. This method is just a helper - # method to _cubicSpline and should not be called directly. - def _spline3_coef(self, n, t, y): - gridShape = y[0].shape - # These will get filled in later with grids as values - # They are just place holders - h=[0] * n - b=[0] * n - u=[0] * n - v=[0] * n - z=[0] * (n+1) - # Calculate h and b - # range 0 thru n-1 - for i in xrange(n): - h[i] = t[i+1] - t[i] - b[i] = (y[i+1] - y[i])/h[i] - # Calculate u and v as functions of h and b - # range 1 thru n-1 - u[1] = (2*(h[0] + h[1])) - v[1] = (6*(b[1]-b[0])) - for i in xrange(2, n): - u[i] = (2.0*(h[i]+h[i-1]) - h[i-1].astype(float32)**2.0/u[i-1]) - v[i] = (6.0*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) - # Calculate z - # range 0 thru n - z[n] = zeros(gridShape) - for i in xrange(n-1, 0, -1): - z[i] = (v[i] - h[i]*z[i+1])/u[i] - z[0] = zeros(gridShape) - return z - - # This method accepts the spline coefficients and calculates a grid. - # This method is a help method to _cubicSpline and should not be - # called directly - def _spline3_eval(self, n, t, y, z, x): - for i in xrange(n-1, 0, -1): - if x[0][0]-t[i][0][0] >= 0: - break - h = t[i+1]-t[i] - tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) - tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp - - return y[i] + (x-t[i]) * tmp - - def parmIsLoaded(self, weName): - tupleList = self.loadedParms() # list of all loaded parms - for element, level, databaseID in tupleList: - modelName = databaseID.modelName() - if element == weName and level == "SFC" and modelName == "Fcst": - return 1 - - # if we got this far we didn't find it. - return 0 - - - # This main method retrieves the climatology grids, assigns - # appropriate times to each and calls the _cubicSpline method - # to calculate the grid values inbetween the given climatology - # grids. This methods creates grids of MinT or MaxT over the - # timeRange selected in the GridManager. - def execute(self, timeRange, varDict): - - # get the climo source - parmName= varDict["Weather Element:"] - climoSource = varDict["Climo Source:"] - - if not self.parmIsLoaded(parmName): - self.statusBarMsg("You must load the " + parmName + - " element before you can populate it.", "S") - return # can't go on - - - # get times for all the grids that overlap the selected time range - startTime = timeRange.startTime() - hours = timeRange.duration() / 3600 - someTimeRange, gridTimes = self.getGridTimes("Fcst", parmName, "SFC", - timeRange.startTime(), hours) - - if len(gridTimes) == 0: - self.statusBarMsg("Please select a MinT or MaxT timeRange to populate.", "S") - return # can't go on - - # make a list of AbsTimes from the parmName times - interpTimes = [] - baseTime = gridTimes[0].startTime() - for g in gridTimes: - interpTimes.append(g.startTime().unixTime()) - - siteID = self.getSiteID() - # get all of the grids from the climo database - dbName = siteID + "_D2D_" + climoSource + "Climo" - - if parmName == "MaxT": - weName = "mxt" - elif parmName == "MinT": - weName = "mnt" - else: - self.statusBarMsg("Invalid parmName:" + parmName, "S") - return - - # get the climo grid inventory - trList = self._getWEInventory(dbName, weName) - if len(trList) == 0: - self.statusBarMsg("No climatology grids available for " + parmName, "S") - return # can't go on - - # Figure out what year it is - - currentTime = self._gmtime().unixTime() - jan01Tuple = (time.gmtime(currentTime)[0],1,1,0,0,0,0,0,0) # 01 Jan this year - jan01Secs = time.mktime(jan01Tuple) # 01 Jan in seconds - - # Fetch the grids from the climo database, but warp the times - # so that they are set to this year. - gridList = [] - times = [] - for tr in trList: - grid = self.getGrids(dbName, weName, "SFC", tr) - gridList.append(grid) - times.append(tr.startTime().unixTime() + jan01Secs) - - # tack on the Dec. at the beginning and the Jan at the end so - # calculations from Dec 15 to Jan 15 are correct. - gridList.insert(0, gridList[-1]) # prepend the last grid - gridList.append(gridList[1]) # append what was the first grid - - days31 = 31 * 24 * 3600 # the number of seconds in 31 days - times.insert(0, times[0] - days31) # 15 Dec the previous year - times.append(times[-1] + days31) # 15 Jan the next year - - interpGrids = self._cubicSpline(gridList, times, interpTimes) - - parm = self.getParm("Fcst", parmName, "SFC") - maxLimit = parm.getGridInfo().getMaxValue() - minLimit = parm.getGridInfo().getMinValue() - - for i in xrange(len(gridTimes)): - # convert K to F first - grid = self.KtoF(interpGrids[i]) - grid = clip(grid, minLimit, maxLimit) # clip to min/max limits - self.createGrid("Fcst", parmName, "SCALAR", grid, gridTimes[i]) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PopulateFromClimo +# +# This procedure calculates MinT or MaxT grids based on the NCDC or +# PRISM climatology grids stored in a netCDF file. This file must be +# present for this procedure to work. +# +# Author: lefebvre +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +VariableList = [("Weather Element:" , "MaxT", "radio", ["MaxT", "MinT"]), + ("Climo Source:" , "PRISM", "radio", ["PRISM", "NCDC"]), + ] +MenuItems = ["Populate"] + +from numpy import * +import SmartScript +import types, copy +import time +import AbsTime +import TimeRange + +MODEL = "Fcst" +LEVEL = "SFC" + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + ## + # Get the list of time ranges at the grid whose element name is WEName + # contains grids. The level of the weather element is assumed to be LEVEL. + # + # @param dbName: Name of the database to consult + # @type dbName: string + # @param WEName: Name of a weather element + # @type WEName: string + # @param timeRange: Limits of time range of interest, or None for all times + # @type timeRange: Java TimeRange or Nonetype + # @return: time ranges at which WEName has data. + # @rtype: Python list of Python TimeRange objects + def _getWEInventory(self, dbName, WEName, timeRange=None): + # set up a timeRange if it is None + if timeRange is None: + timeRange = TimeRange.allTimes() + parm = self.getParm(dbName, WEName, LEVEL) + if parm is None: + return [] + inv = parm.getGridInventory(timeRange.toJavaObj()) + if inv is None: self.statusBarMsg("inv is None","S") + elif len(inv)==0: print(self.statusBarMsg("PFC: len(inv)==0","S")) + trList = [] + for gd in inv: + tr = TimeRange.TimeRange(gd.getGridTime()) + trList.append(tr) + return trList + + # Main cubic spline method that accepts a list of grids and int time + # along with a list of times for which grids are to be calculated. + # This method returns the corresponding list of grids that matches the + # interpTimes list. + def _cubicSpline(self, grids, times, interpTimes): + + # STEP 1: Create coefficients for cubic spline curve + # zCoefs : List of cubic spline coefficient grids computed to fit the + # curve defined by grids and times + # n : length of grids - 1. + # Determine coefficients + if grids == []: + self.statusBarMsg("No grids sent to _cublicSpline. No grids returned", "S") + return + gridShape = shape(grids[0]) + + timeGrids = [] + for t in times: + tGrid = zeros(gridShape) + t + timeGrids.append(tGrid) + + n = len(grids) - 1 + zCoefs = self._spline3_coef(n, timeGrids, grids) + + # Create interpolated grids using coefficients + # interpTimes : List of times for which we want interpolated grids + # gridList : List of interpolated Grids + + # Create interpolated grids + gridList = [] + for interpTime in interpTimes: + x = zeros(gridShape) + interpTime # make a grid of times + xGrid = self._spline3_eval(n, timeGrids, grids, zCoefs, x) + gridList.append(xGrid) + + return gridList + + # This method calculates the spline coefficients that are later used to + # calculate grids at the interpolation times. This method is just a helper + # method to _cubicSpline and should not be called directly. + def _spline3_coef(self, n, t, y): + gridShape = y[0].shape + # These will get filled in later with grids as values + # They are just place holders + h=[0] * n + b=[0] * n + u=[0] * n + v=[0] * n + z=[0] * (n+1) + # Calculate h and b + # range 0 thru n-1 + for i in range(n): + h[i] = t[i+1] - t[i] + b[i] = (y[i+1] - y[i])/h[i] + # Calculate u and v as functions of h and b + # range 1 thru n-1 + u[1] = (2*(h[0] + h[1])) + v[1] = (6*(b[1]-b[0])) + for i in range(2, n): + u[i] = (2.0*(h[i]+h[i-1]) - h[i-1].astype(float32)**2.0/u[i-1]) + v[i] = (6.0*(b[i]-b[i-1]) - h[i-1]*v[i-1]/u[i-1]) + # Calculate z + # range 0 thru n + z[n] = zeros(gridShape) + for i in range(n-1, 0, -1): + z[i] = (v[i] - h[i]*z[i+1])/u[i] + z[0] = zeros(gridShape) + return z + + # This method accepts the spline coefficients and calculates a grid. + # This method is a help method to _cubicSpline and should not be + # called directly + def _spline3_eval(self, n, t, y, z, x): + for i in range(n-1, 0, -1): + if x[0][0]-t[i][0][0] >= 0: + break + h = t[i+1]-t[i] + tmp = (z[i]/2) + (x-t[i]) * (z[i+1]-z[i])/(6*h) + tmp = -(h/6)*(z[i+1]+2*z[i]) + (y[i+1]-y[i])/h + (x-t[i]) * tmp + + return y[i] + (x-t[i]) * tmp + + def parmIsLoaded(self, weName): + tupleList = self.loadedParms() # list of all loaded parms + for element, level, databaseID in tupleList: + modelName = databaseID.modelName() + if element == weName and level == "SFC" and modelName == "Fcst": + return 1 + + # if we got this far we didn't find it. + return 0 + + + # This main method retrieves the climatology grids, assigns + # appropriate times to each and calls the _cubicSpline method + # to calculate the grid values inbetween the given climatology + # grids. This methods creates grids of MinT or MaxT over the + # timeRange selected in the GridManager. + def execute(self, timeRange, varDict): + + # get the climo source + parmName= varDict["Weather Element:"] + climoSource = varDict["Climo Source:"] + + if not self.parmIsLoaded(parmName): + self.statusBarMsg("You must load the " + parmName + + " element before you can populate it.", "S") + return # can't go on + + + # get times for all the grids that overlap the selected time range + startTime = timeRange.startTime() + hours = timeRange.duration() / 3600 + someTimeRange, gridTimes = self.getGridTimes("Fcst", parmName, "SFC", + timeRange.startTime(), hours) + + if len(gridTimes) == 0: + self.statusBarMsg("Please select a MinT or MaxT timeRange to populate.", "S") + return # can't go on + + # make a list of AbsTimes from the parmName times + interpTimes = [] + baseTime = gridTimes[0].startTime() + for g in gridTimes: + interpTimes.append(g.startTime().unixTime()) + + siteID = self.getSiteID() + # get all of the grids from the climo database + dbName = siteID + "_D2D_" + climoSource + "Climo" + + if parmName == "MaxT": + weName = "mxt" + elif parmName == "MinT": + weName = "mnt" + else: + self.statusBarMsg("Invalid parmName:" + parmName, "S") + return + + # get the climo grid inventory + trList = self._getWEInventory(dbName, weName) + if len(trList) == 0: + self.statusBarMsg("No climatology grids available for " + parmName, "S") + return # can't go on + + # Figure out what year it is + + currentTime = self._gmtime().unixTime() + jan01Tuple = (time.gmtime(currentTime)[0],1,1,0,0,0,0,0,0) # 01 Jan this year + jan01Secs = time.mktime(jan01Tuple) # 01 Jan in seconds + + # Fetch the grids from the climo database, but warp the times + # so that they are set to this year. + gridList = [] + times = [] + for tr in trList: + grid = self.getGrids(dbName, weName, "SFC", tr) + gridList.append(grid) + times.append(tr.startTime().unixTime() + jan01Secs) + + # tack on the Dec. at the beginning and the Jan at the end so + # calculations from Dec 15 to Jan 15 are correct. + gridList.insert(0, gridList[-1]) # prepend the last grid + gridList.append(gridList[1]) # append what was the first grid + + days31 = 31 * 24 * 3600 # the number of seconds in 31 days + times.insert(0, times[0] - days31) # 15 Dec the previous year + times.append(times[-1] + days31) # 15 Jan the next year + + interpGrids = self._cubicSpline(gridList, times, interpTimes) + + parm = self.getParm("Fcst", parmName, "SFC") + maxLimit = parm.getGridInfo().getMaxValue() + minLimit = parm.getGridInfo().getMinValue() + + for i in range(len(gridTimes)): + # convert K to F first + grid = self.KtoF(interpGrids[i]) + grid = clip(grid, minLimit, maxLimit) # clip to min/max limits + self.createGrid("Fcst", parmName, "SCALAR", grid, gridTimes[i]) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_SkyProcedure.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_SkyProcedure.py index 7dd1b93316..ca3fd4705b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_SkyProcedure.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_SkyProcedure.py @@ -1,126 +1,126 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Populate_SkyProcedure -# -# Author: Pete Banacos (began: 12/12/06) -# Last Updated: 10/07/08 -# -# This program makes use of the Populate_SkyTool to populate sky grids. -# 10/7/08: Update to allow flexible time range (start and end time doesn't -# need to correspond to model available times) -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Populate_SkyProcedure +# +# Author: Pete Banacos (began: 12/12/06) +# Last Updated: 10/07/08 +# +# This program makes use of the Populate_SkyTool to populate sky grids. +# 10/7/08: Update to allow flexible time range (start and end time doesn't +# need to correspond to model available times) +# ---------------------------------------------------------------------------- -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Populate"] - -# The ToolList is optional, but recommended, if you are calling -# Smart Tools from your Script. -# If present, it can be used to show which grids will be -# modified by the Script. - -ToolList = [("Populate_SkyTool", "Sky"), - ] -VariableList = [("Populate SkyProcedure Version 1.0","","label"), - ("Note:" , "Before running, highlight desired time range in the Grid Manager.", "alphaNumeric"), - ("Model:", "NAM12", "radio", ["GFS40", "NAM12"]), - ("Model Run:", "Current", "radio", ["Current", "Previous"]), - ("Layer depth:", "50mb", "radio", ["50mb", "25mb"]), - ("Use RH w.r.t. ICE @ T < -25C?", "No", "radio", ["Yes", "No"]), - ("", "", "label"), - ("Include high clouds (500-300mb)?", "No", "radio", ["Yes", "No"]), - ("Include clouds below 925mb?", "Yes", "radio", ["Yes", "No"]), - ("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0), - ("Above value sets RH threshold for CLR skies.", "", "label"), - ("Calibration:", 1.00, "scale", [1.00, 1.50],0.02), - ("Raise calibration to get more sky cover for a given RH.", "", "label"), - (" --- Limit Values Section --- ", "", "label"), - ("Don't give me sky cover above (percent):", 100, "scale", [0, 100], 1), - ("Don't give me sky cover below (percent):", 0, "scale", [0, 100], 1), - ] - -import SmartScript -import time -from Tkinter import * - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, editArea, timeRange, varDict): - - databaseID = self.findDatabase("Fcst") - model1 = varDict["Model:"] - modelrun = varDict["Model Run:"] - modeltemp = "D2D_" + model1 - - if modelrun == "Current": - model = self.findDatabase(modeltemp, 0) - else: - model = self.findDatabase(modeltemp, -1) - - time1 = time.gmtime() - (year,month,day,h,m,s,w,day,dst)=time1 - shift = (h*3600) + (m*60) + s - starth = timeRange.startTime().unixTime() - endh = timeRange.endTime().unixTime() - now = int(time.time()) - zero = now - shift - starttime = (starth-zero)/3600 - endtime = (endh-zero)/3600 - - # print timeRange - # print "start_hr:", starttime, "end_hr:", endtime - - if model1 == "NAM12": - self.deleteCmd(['Sky'], timeRange) - for i in xrange(starttime,endtime): - curmod = i % 3 - if curmod == 0: - # print "make grid for hour", i - Sky_timeRange = self.createTimeRange(i,i+1,"Zulu") - # print "Sky_TimeRange:", Sky_timeRange - self.createFromScratchCmd(['Sky'], Sky_timeRange, 1, 1) - self.callSmartTool("Populate_SkyTool",'Sky', timeRange=Sky_timeRange, varDict=varDict) - else: - # Use "GFS40"... - self.deleteCmd(['Sky'], timeRange) - for i in xrange(starttime,endtime): - curmod = i % 6 - if curmod == 0: - # print "make grid for hour", i - Sky_timeRange = self.createTimeRange(i,i+1,"Zulu") - # print "Sky_TimeRange:", Sky_timeRange - self.createFromScratchCmd(['Sky'], Sky_timeRange, 1, 1) - self.callSmartTool("Populate_SkyTool",'Sky', timeRange=Sky_timeRange, varDict=varDict) - - # Interpolate gaps. Use fcst database either side of time range... - self.interpolateCmd(['Sky'], timeRange, "GAPS", "SYNC") +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Populate"] + +# The ToolList is optional, but recommended, if you are calling +# Smart Tools from your Script. +# If present, it can be used to show which grids will be +# modified by the Script. + +ToolList = [("Populate_SkyTool", "Sky"), + ] +VariableList = [("Populate SkyProcedure Version 1.0","","label"), + ("Note:" , "Before running, highlight desired time range in the Grid Manager.", "alphaNumeric"), + ("Model:", "NAM12", "radio", ["GFS40", "NAM12"]), + ("Model Run:", "Current", "radio", ["Current", "Previous"]), + ("Layer depth:", "50mb", "radio", ["50mb", "25mb"]), + ("Use RH w.r.t. ICE @ T < -25C?", "No", "radio", ["Yes", "No"]), + ("", "", "label"), + ("Include high clouds (500-300mb)?", "No", "radio", ["Yes", "No"]), + ("Include clouds below 925mb?", "Yes", "radio", ["Yes", "No"]), + ("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0), + ("Above value sets RH threshold for CLR skies.", "", "label"), + ("Calibration:", 1.00, "scale", [1.00, 1.50],0.02), + ("Raise calibration to get more sky cover for a given RH.", "", "label"), + (" --- Limit Values Section --- ", "", "label"), + ("Don't give me sky cover above (percent):", 100, "scale", [0, 100], 1), + ("Don't give me sky cover below (percent):", 0, "scale", [0, 100], 1), + ] + +import SmartScript +import time +from tkinter import * + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, editArea, timeRange, varDict): + + databaseID = self.findDatabase("Fcst") + model1 = varDict["Model:"] + modelrun = varDict["Model Run:"] + modeltemp = "D2D_" + model1 + + if modelrun == "Current": + model = self.findDatabase(modeltemp, 0) + else: + model = self.findDatabase(modeltemp, -1) + + time1 = time.gmtime() + (year,month,day,h,m,s,w,day,dst)=time1 + shift = (h*3600) + (m*60) + s + starth = timeRange.startTime().unixTime() + endh = timeRange.endTime().unixTime() + now = int(time.time()) + zero = now - shift + starttime = (starth-zero)/3600 + endtime = (endh-zero)/3600 + + # print timeRange + # print "start_hr:", starttime, "end_hr:", endtime + + if model1 == "NAM12": + self.deleteCmd(['Sky'], timeRange) + for i in range(starttime,endtime): + curmod = i % 3 + if curmod == 0: + # print "make grid for hour", i + Sky_timeRange = self.createTimeRange(i,i+1,"Zulu") + # print "Sky_TimeRange:", Sky_timeRange + self.createFromScratchCmd(['Sky'], Sky_timeRange, 1, 1) + self.callSmartTool("Populate_SkyTool",'Sky', timeRange=Sky_timeRange, varDict=varDict) + else: + # Use "GFS40"... + self.deleteCmd(['Sky'], timeRange) + for i in range(starttime,endtime): + curmod = i % 6 + if curmod == 0: + # print "make grid for hour", i + Sky_timeRange = self.createTimeRange(i,i+1,"Zulu") + # print "Sky_TimeRange:", Sky_timeRange + self.createFromScratchCmd(['Sky'], Sky_timeRange, 1, 1) + self.callSmartTool("Populate_SkyTool",'Sky', timeRange=Sky_timeRange, varDict=varDict) + + # Interpolate gaps. Use fcst database either side of time range... + self.interpolateCmd(['Sky'], timeRange, "GAPS", "SYNC") diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_WPC_PQPF.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_WPC_PQPF.py index fbe84de625..d9034ed844 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_WPC_PQPF.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Populate_WPC_PQPF.py @@ -1,223 +1,223 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PopulateWithERP.py -# -# Author: lefebvre -# This procedure copies the latest version of the hi res QPF grids which is the tpHPCndfd parameter -# from WPC grid found in the D2D HPCERP GFE model db. These grids are transmitted by WPC 4 times a day. -# The 00Z and 12Z cycles of data go out to day 7. 06Z and 18Z only go out 72 hours. The procedure -# makes an inventory check and populates the QPF grids using the latest HPCERP tpHPCndfd or a combination -# of the latest HPCERP db and the previous one if the latest one is not completely in the system yet. -# When the QPF grids are populated from combination of the latest model cycle and the previous model cycle the -# tool will pop a banner telling you so. Also. the tool copies the guidance into 6 hours QPF grids -# regardless whether the time constraint of your QPF grids is 1, 3, or 6 hours. -# -# If a time range is preselected in the Grid Manager, the tool only runs for that time range. Otherwise if no -# time range is preselected the tool runs from the current 6 hours period out to 7 days using the latest -# guidance available as far out as it is available at the time the tool is ran. -# -# The tpHPCndfd parameter in the HPCERP D2D model db in GFE is the high resolution 2.5 km WPC QPF guidance. -# As of AWIPS build 16.1.2 and 16.2.1 this parameter is equivalent to accessing the following guidance from -# the D2D Volume Browser: -# -# 1. From Sources -> SfcGrid -> HPC -# 2. From Fields -> Precip -> 6hr Precip Accum -# 3. From Planes -> Misc -> Sfc -# -# Contributor: P. Santos - -# Version 2.2 - 27 March 2016 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Edit"] -import LogStream, time -from math import * -import AbsTime, TimeRange - -VariableList = [] - -import time -import AbsTime -import SmartScript - - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - - # Returns the duration for the QPF parm in hours - def getQPFGridDuration(self): - parm = self.getParm("Fcst", "QPF", "SFC") - - parmDuration = parm.getGridInfo().getTimeConstraints().getDuration() - - return parmDuration / 3600 - - # Fetch the inventory as a list of timeRanges for the specified model - # and element. Trim the list to the specified timeRange, if any. - def getWEInventory(self, modelName, WEName, timeRange=None): - - allTimes = TimeRange.allTimes() - - if timeRange is None: - timeRange = allTimes - - trList = [] - # getGridInfo will just die if the modelName or weName is not valid - # so wrap it in a try block and return [] if it fails - try: - gridInfo = self.getGridInfo(modelName, WEName, "SFC", timeRange) - except: - return trList - - for g in gridInfo: - if timeRange.overlaps(g.gridTime()): - trList.append(g.gridTime()) - - return trList - - # Returns a list of model names matching the specified model name, - # weather element name and level - def getModelList(self, modelName, weName, weLevel): - modelList = [] - - availParms = self.availableParms() - - for pName, level, dbID in availParms: - if dbID.modelName().find(modelName) > -1: - if pName.find(weName) > -1: - if level.find(weLevel) > -1: - if dbID.modelIdentifier() not in modelList: -# modelList.append(dbID.modelIdentifier()) - modelList.append(dbID) - return modelList - - # Fetch the ERP grids at the times specified by the gridTRList. Always fetches the latest model that - # contains the requested grid. - def getERPGrids(self, gridTRList): - # get the list of all available models. They come sorted latest to oldest. - modelList = self.getModelList(self._ERPModelName, self._ERPVarName, self._ERPLevel) - modelList = modelList[0:2] # trim the list to the last two versions - - # make a dict and fill with grids with the default value. - # These will be replaced when we find real grids - gridDict = {} - usedPreviousModel = False - - for gridTR in gridTRList: - gridDict[gridTR] = None # initialize to None - for model in modelList: - modelTRList = self.getWEInventory(model, self._ERPVarName) - for modelTR in modelTRList: - if modelTR.overlaps(gridTR): # Find the grid that overlaps - grid = self.getGrids(model, self._ERPVarName, "SFC", modelTR) - if grid is None: - break - gridDict[gridTR] = grid - - # See if we used a previous model version - if model != modelList[0]: - usedPreviousModel = True - break # found a grid, time to stop - - if gridDict[gridTR] is not None: # if we found a grid, break from the model loop - break - - if usedPreviousModel: - self.statusBarMsg("Some grids were populated with a previous version of " + self._ERPModelName, "S") - - return gridDict - - # Make a list of timeRanges rounded and synched to the 6 hour synoptic times. - def makeTimeRangeList(self, selectedTimeRange): - - sixHrs = 6 * 3600 - days = 7 - now = int(time.time() / sixHrs) * (sixHrs) - trList = [] - for t in range(now, now + (days * 24 * 3600), sixHrs): - start = AbsTime.AbsTime(t) - end = AbsTime.AbsTime(t + sixHrs) - tr = TimeRange.TimeRange(start, end) - if tr.overlaps(selectedTimeRange) or not selectedTimeRange.isValid(): - trList.append(tr) - - return trList - - - def execute(self, editArea, timeRange, varDict): - - - self._ERPModelName = "HPCERP" - self._ERPVarName = "tpHPCndfd" - self._ERPLevel = "SFC" - - qpfDuration = self.getQPFGridDuration() - - if qpfDuration not in [1, 3, 6]: - self.statusBarMsg("Your QPF grid duration is not compatible with WPC QPF durations.", "S") - return - - modelList = self.getModelList(self._ERPModelName, self._ERPVarName, self._ERPLevel) - if len(modelList) == 0: - self.statusBarMsg("No HPCERP models found to populate. Game over.", "S") - return - - # Fetch the latest model and check its inventory -# latestERPModel = modelList[0] # Fetch the latest model and check its inventory - - # Data checks - if any one fails, the tool aborts - -# # If not at least 72 hours, determined by the first and last available grid, bail out. -# modelTRList = self.getWEInventory(latestERPModel, self._ERPVarName) -# if len(modelTRList) == 0: -# self.statusBarMsg("The latest HPCERP model has no grids yet.", "S") -# return - -# # Make sure the first grid starts near the model time -# firstGridTime = int(modelTRList[0].startTime().unixTime() / (3600 * 6)) * (3600* 6) -# if firstGridTime != latestERPModel.modelTime().unixTime(): -# self.statusBarMsg("The latest HPCERP model has not completely arrived (First Grid too late).", "S") -# return - -# # Make sure there's enough grids in the latest model -# lastGridTime = int(modelTRList[-1].startTime().unixTime() / (3600 * 6)) * (3600* 6) -# if (lastGridTime - firstGridTime) / 3600 < 72: -# self.statusBarMsg("The latest HPCERP model has not completely arrived.(not enough grids", "S") -# return - - # Make a list of TimeRanges that we will use to populate the QPF. Trim to the selected timeRange - trList = self.makeTimeRangeList(timeRange) - erpGridDict = self.getERPGrids(trList) - - erpTRList = erpGridDict.keys() - - # Make sure all the grids are there, or bail - for erpTR in erpTRList: - if erpGridDict[erpTR] is None: - self.statusBarMsg("Missing guidance at " + str(erpTR) + " Aborting.", "S") - return - - for tr in trList: - for erpTR in erpTRList: - if erpTR.overlaps(tr): - if erpGridDict[erpTR] is None: - break - grid = erpGridDict[erpTR] / 25.4 # convert mm to inches - self.createGrid("Fcst", "QPF", "SCALAR", grid, tr) - - return - - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PopulateWithERP.py +# +# Author: lefebvre +# This procedure copies the latest version of the hi res QPF grids which is the tpHPCndfd parameter +# from WPC grid found in the D2D HPCERP GFE model db. These grids are transmitted by WPC 4 times a day. +# The 00Z and 12Z cycles of data go out to day 7. 06Z and 18Z only go out 72 hours. The procedure +# makes an inventory check and populates the QPF grids using the latest HPCERP tpHPCndfd or a combination +# of the latest HPCERP db and the previous one if the latest one is not completely in the system yet. +# When the QPF grids are populated from combination of the latest model cycle and the previous model cycle the +# tool will pop a banner telling you so. Also. the tool copies the guidance into 6 hours QPF grids +# regardless whether the time constraint of your QPF grids is 1, 3, or 6 hours. +# +# If a time range is preselected in the Grid Manager, the tool only runs for that time range. Otherwise if no +# time range is preselected the tool runs from the current 6 hours period out to 7 days using the latest +# guidance available as far out as it is available at the time the tool is ran. +# +# The tpHPCndfd parameter in the HPCERP D2D model db in GFE is the high resolution 2.5 km WPC QPF guidance. +# As of AWIPS build 16.1.2 and 16.2.1 this parameter is equivalent to accessing the following guidance from +# the D2D Volume Browser: +# +# 1. From Sources -> SfcGrid -> HPC +# 2. From Fields -> Precip -> 6hr Precip Accum +# 3. From Planes -> Misc -> Sfc +# +# Contributor: P. Santos + +# Version 2.2 - 27 March 2016 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Edit"] +import LogStream, time +from math import * +import AbsTime, TimeRange + +VariableList = [] + +import time +import AbsTime +import SmartScript + + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + + # Returns the duration for the QPF parm in hours + def getQPFGridDuration(self): + parm = self.getParm("Fcst", "QPF", "SFC") + + parmDuration = parm.getGridInfo().getTimeConstraints().getDuration() + + return parmDuration / 3600 + + # Fetch the inventory as a list of timeRanges for the specified model + # and element. Trim the list to the specified timeRange, if any. + def getWEInventory(self, modelName, WEName, timeRange=None): + + allTimes = TimeRange.allTimes() + + if timeRange is None: + timeRange = allTimes + + trList = [] + # getGridInfo will just die if the modelName or weName is not valid + # so wrap it in a try block and return [] if it fails + try: + gridInfo = self.getGridInfo(modelName, WEName, "SFC", timeRange) + except: + return trList + + for g in gridInfo: + if timeRange.overlaps(g.gridTime()): + trList.append(g.gridTime()) + + return trList + + # Returns a list of model names matching the specified model name, + # weather element name and level + def getModelList(self, modelName, weName, weLevel): + modelList = [] + + availParms = self.availableParms() + + for pName, level, dbID in availParms: + if dbID.modelName().find(modelName) > -1: + if pName.find(weName) > -1: + if level.find(weLevel) > -1: + if dbID.modelIdentifier() not in modelList: +# modelList.append(dbID.modelIdentifier()) + modelList.append(dbID) + return modelList + + # Fetch the ERP grids at the times specified by the gridTRList. Always fetches the latest model that + # contains the requested grid. + def getERPGrids(self, gridTRList): + # get the list of all available models. They come sorted latest to oldest. + modelList = self.getModelList(self._ERPModelName, self._ERPVarName, self._ERPLevel) + modelList = modelList[0:2] # trim the list to the last two versions + + # make a dict and fill with grids with the default value. + # These will be replaced when we find real grids + gridDict = {} + usedPreviousModel = False + + for gridTR in gridTRList: + gridDict[gridTR] = None # initialize to None + for model in modelList: + modelTRList = self.getWEInventory(model, self._ERPVarName) + for modelTR in modelTRList: + if modelTR.overlaps(gridTR): # Find the grid that overlaps + grid = self.getGrids(model, self._ERPVarName, "SFC", modelTR) + if grid is None: + break + gridDict[gridTR] = grid + + # See if we used a previous model version + if model != modelList[0]: + usedPreviousModel = True + break # found a grid, time to stop + + if gridDict[gridTR] is not None: # if we found a grid, break from the model loop + break + + if usedPreviousModel: + self.statusBarMsg("Some grids were populated with a previous version of " + self._ERPModelName, "S") + + return gridDict + + # Make a list of timeRanges rounded and synched to the 6 hour synoptic times. + def makeTimeRangeList(self, selectedTimeRange): + + sixHrs = 6 * 3600 + days = 7 + now = int(time.time() / sixHrs) * (sixHrs) + trList = [] + for t in range(now, now + (days * 24 * 3600), sixHrs): + start = AbsTime.AbsTime(t) + end = AbsTime.AbsTime(t + sixHrs) + tr = TimeRange.TimeRange(start, end) + if tr.overlaps(selectedTimeRange) or not selectedTimeRange.isValid(): + trList.append(tr) + + return trList + + + def execute(self, editArea, timeRange, varDict): + + + self._ERPModelName = "HPCERP" + self._ERPVarName = "tpHPCndfd" + self._ERPLevel = "SFC" + + qpfDuration = self.getQPFGridDuration() + + if qpfDuration not in [1, 3, 6]: + self.statusBarMsg("Your QPF grid duration is not compatible with WPC QPF durations.", "S") + return + + modelList = self.getModelList(self._ERPModelName, self._ERPVarName, self._ERPLevel) + if len(modelList) == 0: + self.statusBarMsg("No HPCERP models found to populate. Game over.", "S") + return + + # Fetch the latest model and check its inventory +# latestERPModel = modelList[0] # Fetch the latest model and check its inventory + + # Data checks - if any one fails, the tool aborts + +# # If not at least 72 hours, determined by the first and last available grid, bail out. +# modelTRList = self.getWEInventory(latestERPModel, self._ERPVarName) +# if len(modelTRList) == 0: +# self.statusBarMsg("The latest HPCERP model has no grids yet.", "S") +# return + +# # Make sure the first grid starts near the model time +# firstGridTime = int(modelTRList[0].startTime().unixTime() / (3600 * 6)) * (3600* 6) +# if firstGridTime != latestERPModel.modelTime().unixTime(): +# self.statusBarMsg("The latest HPCERP model has not completely arrived (First Grid too late).", "S") +# return + +# # Make sure there's enough grids in the latest model +# lastGridTime = int(modelTRList[-1].startTime().unixTime() / (3600 * 6)) * (3600* 6) +# if (lastGridTime - firstGridTime) / 3600 < 72: +# self.statusBarMsg("The latest HPCERP model has not completely arrived.(not enough grids", "S") +# return + + # Make a list of TimeRanges that we will use to populate the QPF. Trim to the selected timeRange + trList = self.makeTimeRangeList(timeRange) + erpGridDict = self.getERPGrids(trList) + + erpTRList = list(erpGridDict.keys()) + + # Make sure all the grids are there, or bail + for erpTR in erpTRList: + if erpGridDict[erpTR] is None: + self.statusBarMsg("Missing guidance at " + str(erpTR) + " Aborting.", "S") + return + + for tr in trList: + for erpTR in erpTRList: + if erpTR.overlaps(tr): + if erpGridDict[erpTR] is None: + break + grid = erpGridDict[erpTR] / 25.4 # convert mm to inches + self.createGrid("Fcst", "QPF", "SCALAR", grid, tr) + + return + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ProcedureCmds.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ProcedureCmds.py index 6fa51fdfb7..2fd9b578d1 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ProcedureCmds.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ProcedureCmds.py @@ -1,154 +1,154 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ProcedureCmds -- test and example procedure commands -# -# Author: hansen -# ---------------------------------------------------------------------------- - -import MyDialog -import SmartScript - -MenuItems = ["Populate"] - -VariableList = [ - ("Model" , "", "model"), - ("Model Elements" , ["All"], "check", ["All", "Wx", "T", "Td", "Wind", - "MaxT", "MinT", "Sky", "PoP", - "QPF"]), - ("Begin Hour" , 0, "scale", [0, 120]), - ("Initialize From Model", "", "D2D_model"), - ("", 600, "scrollbar"), - ] - -AllElements =['T','Td','MaxT','MinT','Wind','Sky','Wx','PoP','QPF'] - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - def execute(self, editArea, timeRange, varDict): - - # Put up In Progress dialog - dialog = MyDialog.MyDialog(None," Status "," Loading Grids ") - - # Get Variables - model = varDict["Model"] - # getDatabase -- convert user input to databaseID - databaseID = self.getDatabase(model) - elements = varDict["Model Elements"] - if "All" in elements: - elements = AllElements - beginHour = varDict["Begin Hour"] - - # createTimeRange -- Create time ranges relative to chosen model - timeRange_120_240 = self.createTimeRange( - 120, 240, "Database", databaseID) - timeRange_begin_60 = self.createTimeRange( - beginHour, 60, "Database", databaseID) - timeRange_begin_24 = self.createTimeRange( - beginHour, 24, "Database", databaseID) - # findDatabase -- Find databaseID for gfsLR model - gfsLR_databaseID = self.findDatabase("gfsLR") - - # copy Commands - print "Copy commands" - copyTimeRange = self.getTimeRange(beginHour, 120, 'gfsLR') - self.copyCmd(elements, gfsLR_databaseID, copyTimeRange) - self.copyCmd(elements, gfsLR_databaseID, timeRange_120_240) - self.copyCmd(elements, databaseID, timeRange_begin_60) - - # createFromScratch - # Create 1-hour grids repeating every 6 hours over the entire time range - print "Create From Scratch" - createTimeRange = self.getTimeRange(3, 4, 'gfsLR') - self.createFromScratchCmd(['MixHgt'], createTimeRange) - - # split - print "Split" - splitTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') - self.splitCmd(['MixHgt'], splitTimeRange) - self.splitCmd(['MixHgt'], timeRange_begin_60) - - # fragment - print "Fragment" - fragmentTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') - self.fragmentCmd(['MixHgt'], fragmentTimeRange) - self.fragmentCmd(['MixHgt'], timeRange_begin_60) - - # remove - print "Remove" - removeTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') - self.deleteCmd(['MixHgt'], removeTimeRange) - print "CreateFromScratchCmd" - self.createFromScratchCmd( - ['MixHgt'], timeRange_begin_60, repeat=6, duration=1) - # deleteCmd - print "Delete" - self.deleteCmd(['MixHgt'], timeRange_begin_60) - self.createFromScratchCmd( - ['MixHgt'], timeRange_begin_60, repeat=6, duration=1) - - # zero - print "Zero" - zeroTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') - self.zeroCmd(['MixHgt'], zeroTimeRange) - self.zeroCmd(['MixHgt'], timeRange_begin_60) - - # assignValue - print "AssignValue" - assignTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') - self.assignValueCmd(['MixHgt'], assignTimeRange, 2000) - self.assignValueCmd(['MixHgt'], timeRange_begin_60, 3000) - - # getEditArea - toolEditArea = self.getEditArea("ISC_Send_Area") - - # callSmartTool -- Run Smart Tool using created time range - print "callSmartTool" - varDict["Initialize From Model: "] = varDict["Initialize From Model"] - self.callSmartTool("MixHgt_Init","MixHgt", - toolEditArea, timeRange_begin_24, varDict) - - # timeShift - print "TimeShift" - shiftTimeRange = self.getTimeRange(beginHour, 24, 'gfsLR') - self.timeShiftCmd(['T', 'Wind'], 1, 3, shiftTimeRange) - self.timeShiftCmd(['T', 'Wind'], 1, 3, timeRange_begin_24) - - # interpolate - print "Interpolate" - interpolateTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') - self.interpolateCmd(elements, interpolateTimeRange, "GAPS", "SYNC", 0, 0) - self.interpolateCmd(elements, timeRange_begin_24, "GAPS", "ASYNC", 0, 0) - - # - # Destroy In Progress dialog - dialog.destroy() - - def getTimeRange(self, hourStart, hourEnd, modelBase): - databaseID = self.findDatabase(modelBase) - timeRange = self.createTimeRange(hourStart, hourEnd, "Database", databaseID) - return timeRange - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ProcedureCmds -- test and example procedure commands +# +# Author: hansen +# ---------------------------------------------------------------------------- + +import MyDialog +import SmartScript + +MenuItems = ["Populate"] + +VariableList = [ + ("Model" , "", "model"), + ("Model Elements" , ["All"], "check", ["All", "Wx", "T", "Td", "Wind", + "MaxT", "MinT", "Sky", "PoP", + "QPF"]), + ("Begin Hour" , 0, "scale", [0, 120]), + ("Initialize From Model", "", "D2D_model"), + ("", 600, "scrollbar"), + ] + +AllElements =['T','Td','MaxT','MinT','Wind','Sky','Wx','PoP','QPF'] + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + def execute(self, editArea, timeRange, varDict): + + # Put up In Progress dialog + dialog = MyDialog.MyDialog(None," Status "," Loading Grids ") + + # Get Variables + model = varDict["Model"] + # getDatabase -- convert user input to databaseID + databaseID = self.getDatabase(model) + elements = varDict["Model Elements"] + if "All" in elements: + elements = AllElements + beginHour = varDict["Begin Hour"] + + # createTimeRange -- Create time ranges relative to chosen model + timeRange_120_240 = self.createTimeRange( + 120, 240, "Database", databaseID) + timeRange_begin_60 = self.createTimeRange( + beginHour, 60, "Database", databaseID) + timeRange_begin_24 = self.createTimeRange( + beginHour, 24, "Database", databaseID) + # findDatabase -- Find databaseID for gfsLR model + gfsLR_databaseID = self.findDatabase("gfsLR") + + # copy Commands + print("Copy commands") + copyTimeRange = self.getTimeRange(beginHour, 120, 'gfsLR') + self.copyCmd(elements, gfsLR_databaseID, copyTimeRange) + self.copyCmd(elements, gfsLR_databaseID, timeRange_120_240) + self.copyCmd(elements, databaseID, timeRange_begin_60) + + # createFromScratch + # Create 1-hour grids repeating every 6 hours over the entire time range + print("Create From Scratch") + createTimeRange = self.getTimeRange(3, 4, 'gfsLR') + self.createFromScratchCmd(['MixHgt'], createTimeRange) + + # split + print("Split") + splitTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') + self.splitCmd(['MixHgt'], splitTimeRange) + self.splitCmd(['MixHgt'], timeRange_begin_60) + + # fragment + print("Fragment") + fragmentTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') + self.fragmentCmd(['MixHgt'], fragmentTimeRange) + self.fragmentCmd(['MixHgt'], timeRange_begin_60) + + # remove + print("Remove") + removeTimeRange = self.getTimeRange(beginHour, 48, 'gfsLR') + self.deleteCmd(['MixHgt'], removeTimeRange) + print("CreateFromScratchCmd") + self.createFromScratchCmd( + ['MixHgt'], timeRange_begin_60, repeat=6, duration=1) + # deleteCmd + print("Delete") + self.deleteCmd(['MixHgt'], timeRange_begin_60) + self.createFromScratchCmd( + ['MixHgt'], timeRange_begin_60, repeat=6, duration=1) + + # zero + print("Zero") + zeroTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') + self.zeroCmd(['MixHgt'], zeroTimeRange) + self.zeroCmd(['MixHgt'], timeRange_begin_60) + + # assignValue + print("AssignValue") + assignTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') + self.assignValueCmd(['MixHgt'], assignTimeRange, 2000) + self.assignValueCmd(['MixHgt'], timeRange_begin_60, 3000) + + # getEditArea + toolEditArea = self.getEditArea("ISC_Send_Area") + + # callSmartTool -- Run Smart Tool using created time range + print("callSmartTool") + varDict["Initialize From Model: "] = varDict["Initialize From Model"] + self.callSmartTool("MixHgt_Init","MixHgt", + toolEditArea, timeRange_begin_24, varDict) + + # timeShift + print("TimeShift") + shiftTimeRange = self.getTimeRange(beginHour, 24, 'gfsLR') + self.timeShiftCmd(['T', 'Wind'], 1, 3, shiftTimeRange) + self.timeShiftCmd(['T', 'Wind'], 1, 3, timeRange_begin_24) + + # interpolate + print("Interpolate") + interpolateTimeRange = self.getTimeRange(beginHour, 6, 'gfsLR') + self.interpolateCmd(elements, interpolateTimeRange, "GAPS", "SYNC", 0, 0) + self.interpolateCmd(elements, timeRange_begin_24, "GAPS", "ASYNC", 0, 0) + + # + # Destroy In Progress dialog + dialog.destroy() + + def getTimeRange(self, hourStart, hourEnd, modelBase): + databaseID = self.findDatabase(modelBase) + timeRange = self.createTimeRange(hourStart, hourEnd, "Database", databaseID) + return timeRange + \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Run_NWPS.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Run_NWPS.py index fe7272af81..5e573cdb25 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Run_NWPS.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/Run_NWPS.py @@ -1,266 +1,266 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Run_NWPS -# Description: AWIPS 2 Version 18.1.1 -# -# This runs a Procedure within the GFE that builds NWPS -# forecast wind grids based on the operational wind forecast grids -# and then sends those Wind grids to the NWPS model. -# -# Authors: Pablo Santos, Alex Gibbs, and Joe Maloney. -# -# Last Modified: 01/23/15 by AG/PS for AWIPS Baseline. -# Last Modified: 09/10/15 by Joe Maloney/PS for mulitsite version of NWPS. -# Last Modified: 10/14/15 by Joe Maloney/PS to remove dependancy on cron. Totally on demand by user. -# This means baseline cron entry for nwps will be removed with 16.2.1 -# Last Modified: 10/30/15 by Joe Maloney, added -q flags to scp/ssh at end. -# Last Modified: 11/25/15 by Tom LeFebvre, added switch to run tool from a cron or interactively. -# Last Modified: 11/29/15 by P. Santos, completed adding code to enable running Run_NWPS interactively or from a cron. -# Last modified: 03/18/16 by Joe Maloney, a minor tweak to runManualNWPS_OutsideAWIPS call. -# Last modified: 04/14/16 by T. LeFebvre/P. Santos, for wind inventory check. -# Last modified: 07/18/2016 by J. Maloney/P. Santos, post 16.4.1 code review. -# Last modified: 12/10/2017 by P. Stanko, remove archaic unused options and default RTOFS on in gulf stream, off elsewhere. -# Last modified: 12/10/2017 by P. Stanko, also added call to new baseline script for NWPS warning messages from WCOSS. -# Last modified: 12/12/2017 by P. Stanko, Differentiate between structured and unstructured sites, TAFB and non-TAFB, always try hotstart -# ---------------------------------------------------------------------------- -# -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. Possible items are: Populate, Edit, Consistency, -# Verify, Hazards -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Edit", "Populate"] -import SmartScript, LogStream -import time, os, shutil, TimeRange, AbsTime -import ProcessVariableList - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def fileNameFromIntTime(self, floatTime): - tupleTime = time.gmtime(floatTime) - # print "TUPLETIME IS: ", tupleTime - return time.strftime("%Y%m%d_%H00", tupleTime) - - def getButtonNames(self): - - #currentTime = int(time.time() / 3600) * 3600 # truncated to this hour - currentTime = (self._gmtime().unixTime()/3600)*3600 - - if time.gmtime(currentTime).tm_hour % 6 != 0: - currentTime = currentTime + (6 * 3600) # add six hours - - startTime = int(currentTime / (6 * 3600)) * (6 * 3600) - #print "StartTime from GUI is: ", startTime - - timeStrs = [] - timeList = [] - - for i in range(0, 6): - currentTime = startTime + (6 * i) * 3600 - 108000 # 30 hrs - strTime = self.fileNameFromIntTime(currentTime) - timeList.append(currentTime) - timeStrs.append(strTime) - - return timeStrs,timeList - - def getModelTimeRange(self, modelID, param): - #before = time.time() - (3000 * 24 * 3600) # 3000 days ago. Does not work with DRT - #later = time.time() + 100 * 24 * 3600 # 100 days from now. Does not work with DRT - before = self._gmtime().unixTime() - (7 * 24 * 3600) # 7 days ago - later = self._gmtime().unixTime() + 8 * 24 * 3600 # 8 days from now - timeRange = TimeRange.TimeRange(AbsTime.AbsTime(before), AbsTime.AbsTime(later)) - #self.deleteCmd(weNames, timeRange) - gridInfo = self.getGridInfo(modelID, param, "SFC", timeRange) - #print "GRIDINFO IS: ", modelID, gridInfo - if len(gridInfo) == 0: - self.statusBarMsg("No grids available for model:" + modelID, "S") - return None - - minTime = later - maxTime = before - for g in gridInfo: - start = g.gridTime().startTime().unixTime() - end = g.gridTime().endTime().unixTime() - minTime = min(minTime,start) - maxTime = max(maxTime,end) - - modelTR = TimeRange.TimeRange(AbsTime.AbsTime(minTime), AbsTime.AbsTime(maxTime)) - #print "MODELTR", modelTR, minTime, maxTime - return modelTR, minTime, maxTime - - def getWEInventory(self, WEName, dbase="Fcst", level="SFC", - timeRange=TimeRange.allTimes()): - """Return a list of time ranges with available data for a field from - a specific database and level. - Args: - string WEName: name of field to inventory - string dbase: name of database to search (default = 'Fcst') - string level: level of data to inventory (default = 'SFC') - Returns: - Python list of Python time range objects - """ - - # print "Getting inventory of -> '%s' from '%s' at '%s'" % \ - # (WEName, dbase, level) - - trList = [] - # getGridInfo will just die if the modelName or weName is not valid - # so wrap it in a try block and return [] if it fails - try: - gridInfo = self.getGridInfo(dbase, WEName, level, timeRange) - trList = [g.gridTime() for g in gridInfo] - except: - self.statusBarMsg("Problems retrieving wind grids", "S") - - return trList - - def execute(self, editArea, timeRange, varDict): - - gulfStreamSites=['KEY', 'MFL', 'MLB', 'JAX', 'CHS', 'ILM', 'MHX', 'AKQ', 'PHI', 'OKX', 'BOX', 'GYX', 'CAR'] - tafbSites=['BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'MFL', 'KEY', 'MLB', 'JAX', 'CHS', 'ILM'] - buttonList, timeList = self.getButtonNames() - GFEDomainname = self.getSiteID() - print "GFEDomain is: ", GFEDomainname - cron = True - - if varDict is None: # This means the tool is being run interactively, so make the GUI. - - variableList = [ - ("How Long Do You Want To Run NWPS:" , 144, "scale", [12, 144], 3), - ("**NOTE: NCEP WCOSS Runs Always Go Out 144 Hours Regardless of Your Choice Here","", "label"), - ("Model Start Time:", buttonList[4], "radio", buttonList), - ("Local, NCEP, or Both:", "NCEP", "radio", ["Local","NCEP","Both"]), - ("Waterlevels:", "ESTOFS", "radio", ["ESTOFS","PSURGE", "No"]), - ("If PSURGE\n% Exceedance Hgt:", "10", "radio", ["10", "20", "30", "40", "50"]), - ] - - if GFEDomainname in gulfStreamSites: - variableList.append(("RTOFS Currents:", "Yes", "radio", ["Yes","No"])) - else: - variableList.append(("RTOFS Currents:", "No", "radio", ["Yes","No"])) - - nest="Yes" - - if GFEDomainname in tafbSites: - variableList.append(("Boundary Conditions:", "WAVEWATCH", "radio", ["WAVEWATCH", "TAFB-NWPS", "HURWave", "No"])) - else: - variableList.append(("Boundary Conditions:", "WAVEWATCH", "radio", ["WAVEWATCH", "HURWave", "No"])) - varDict = {} - processVarList = ProcessVariableList.ProcessVariableList("Run_NWPS", variableList, varDict, None) - status = processVarList.status() - if status != "OK": - return - - fcst_length = processVarList.varDict()["How Long Do You Want To Run NWPS:"] - fcstlength = str(fcst_length) - wind="ForecastWindGrids" - modelstarttime = processVarList.varDict()["Model Start Time:"] - wheretorun = processVarList.varDict()["Local, NCEP, or Both:"] - model = "SWAN" - web="Yes" - plot="Yes" - wna = processVarList.varDict()["Boundary Conditions:"] - gstream = processVarList.varDict()["RTOFS Currents:"] - tstep="600" - hotstart="True" - waterlevels = processVarList.varDict()["Waterlevels:"] - excd = processVarList.varDict()["If PSURGE\n% Exceedance Hgt:"] - cron = False - # label it WAVEWATCH in the GUI, but continue to call it WNAWave for WCOSS. - if wna == "WAVEWATCH": - wna="WNAWave" - # end interactive GUI portion - - else: - -# This part of if else statement assumes procedure is being run from command -#line with variable list passed on using the -V option to runProcedure. This -#allows to run procedure from a cron. Example default for runProcedure would be: -# All variables shown below passed with -V option are required for procedure to run properly. -# /awips2/GFESuite/bin/runProcedure -n Run_NWPS -c gfeConfig -# -V '{"fcstlength":"102","wind":"ForecastWindGrids","wheretorun":"NCEP","model":"SWAN","web":"Yes","plot":"Yes","wna":"WNAWave","nest":"Yes","gstream":"Yes","tstep":"600","hotstart":"True","waterlevels":"ESTOFS","excd":"10"}' -# If running from a cron, you do not need to create a SITE level override of this baseline procedure if your input variables -# are different because you pass that on from the command line. - - modelstarttime = buttonList[4] - fcstlength = varDict['fcstlength'] - wind = varDict['wind'] - wheretorun = varDict['wheretorun'] - model = varDict['model'] - web = varDict['web'] - plot = varDict['plot'] - wna = varDict['wna'] - nest = varDict['nest'] - gstream = varDict['gstream'] - tstep = varDict['tstep'] - hotstart = varDict['hotstart'] - waterlevels = varDict['waterlevels'] - excd = varDict['excd'] - - modelTR = self.getModelTimeRange("Fcst", "Wind") - startHour = modelTR[1] - endHour = modelTR[2] - timeRange = modelTR[0] - - if (modelstarttime == buttonList[0]): - starttime=timeList[0] - elif (modelstarttime == buttonList[1]): - starttime=timeList[1] - elif (modelstarttime == buttonList[2]): - starttime=timeList[2] - elif (modelstarttime == buttonList[3]): - starttime=timeList[3] - elif (modelstarttime == buttonList[4]): - starttime=timeList[4] - elif (modelstarttime == buttonList[5]): - starttime=timeList[5] - elif (modelstarttime == buttonList[6]): - starttime=timeList[6] - else: - starttime=startHour # Model start Hour if all others empty - - if (startHour > starttime): - starttime = startHour - - timeRange1 = TimeRange.TimeRange(AbsTime.AbsTime(starttime - 7*24*3600), AbsTime.AbsTime(starttime + 8*24*3600)) - timeRange2 = TimeRange.TimeRange(AbsTime.AbsTime(starttime), AbsTime.AbsTime(starttime + 8*24*3600)) - - self.deleteCmd(['NWPSwind'], timeRange1) - databaseID = self.findDatabase("Fcst") - self.copyToCmd([('Wind', 'NWPSwind')], databaseID, timeRange2) - self.fragmentCmd(['NWPSwind'], timeRange2) - self.saveElements(["NWPSwind"]) - - trList = self.getWEInventory("NWPSwind") - if len(trList) < 144: - self.statusBarMsg("Not enough Wind grids. You need at least 144 hours.", "S") - return - - inp_args = fcstlength + ":" + wna + ":" + nest + ":" + gstream + ":" + wind + ":" + web + ":" + plot + ":" + tstep + ":" + hotstart + ":" + waterlevels + ":" + model + ":" + excd + ":" + wheretorun - - try: - os.stat('/tmp/nwps/'+GFEDomainname) - except: - os.makedirs('/tmp/nwps/'+GFEDomainname) - os.chmod('/tmp/nwps/'+GFEDomainname,0o775) - - with open('/tmp/nwps/'+GFEDomainname+'/inp_args', 'w') as f: - f.write(inp_args) - os.chmod('/tmp/nwps/'+GFEDomainname+'/inp_args',0o666) - - os.system('mkdir -p /awips2/GFESuite/nwps/'+GFEDomainname+'_var') - os.system('chmod 775 /awips2/GFESuite/nwps/'+GFEDomainname+'_var') - os.system('cp -rpq /tmp/nwps/'+GFEDomainname+'/inp_args /awips2/GFESuite/nwps/'+GFEDomainname+'_var/') - if cron: - os.system('/awips2/GFESuite/nwps/bin/runManualNWPS_OutsideAWIPS.sh '+GFEDomainname) - else: - os.system('/awips2/GFESuite/nwps/bin/runManualNWPS_OutsideAWIPS.sh '+GFEDomainname+' &') - shutil.rmtree('/tmp/nwps/'+GFEDomainname) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Run_NWPS +# Description: AWIPS 2 Version 18.1.1 +# +# This runs a Procedure within the GFE that builds NWPS +# forecast wind grids based on the operational wind forecast grids +# and then sends those Wind grids to the NWPS model. +# +# Authors: Pablo Santos, Alex Gibbs, and Joe Maloney. +# +# Last Modified: 01/23/15 by AG/PS for AWIPS Baseline. +# Last Modified: 09/10/15 by Joe Maloney/PS for mulitsite version of NWPS. +# Last Modified: 10/14/15 by Joe Maloney/PS to remove dependancy on cron. Totally on demand by user. +# This means baseline cron entry for nwps will be removed with 16.2.1 +# Last Modified: 10/30/15 by Joe Maloney, added -q flags to scp/ssh at end. +# Last Modified: 11/25/15 by Tom LeFebvre, added switch to run tool from a cron or interactively. +# Last Modified: 11/29/15 by P. Santos, completed adding code to enable running Run_NWPS interactively or from a cron. +# Last modified: 03/18/16 by Joe Maloney, a minor tweak to runManualNWPS_OutsideAWIPS call. +# Last modified: 04/14/16 by T. LeFebvre/P. Santos, for wind inventory check. +# Last modified: 07/18/2016 by J. Maloney/P. Santos, post 16.4.1 code review. +# Last modified: 12/10/2017 by P. Stanko, remove archaic unused options and default RTOFS on in gulf stream, off elsewhere. +# Last modified: 12/10/2017 by P. Stanko, also added call to new baseline script for NWPS warning messages from WCOSS. +# Last modified: 12/12/2017 by P. Stanko, Differentiate between structured and unstructured sites, TAFB and non-TAFB, always try hotstart +# ---------------------------------------------------------------------------- +# +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. Possible items are: Populate, Edit, Consistency, +# Verify, Hazards +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Edit", "Populate"] +import SmartScript, LogStream +import time, os, shutil, TimeRange, AbsTime +import ProcessVariableList + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def fileNameFromIntTime(self, floatTime): + tupleTime = time.gmtime(floatTime) + # print "TUPLETIME IS: ", tupleTime + return time.strftime("%Y%m%d_%H00", tupleTime) + + def getButtonNames(self): + + #currentTime = int(time.time() / 3600) * 3600 # truncated to this hour + currentTime = (self._gmtime().unixTime()/3600)*3600 + + if time.gmtime(currentTime).tm_hour % 6 != 0: + currentTime = currentTime + (6 * 3600) # add six hours + + startTime = int(currentTime / (6 * 3600)) * (6 * 3600) + #print "StartTime from GUI is: ", startTime + + timeStrs = [] + timeList = [] + + for i in range(0, 6): + currentTime = startTime + (6 * i) * 3600 - 108000 # 30 hrs + strTime = self.fileNameFromIntTime(currentTime) + timeList.append(currentTime) + timeStrs.append(strTime) + + return timeStrs,timeList + + def getModelTimeRange(self, modelID, param): + #before = time.time() - (3000 * 24 * 3600) # 3000 days ago. Does not work with DRT + #later = time.time() + 100 * 24 * 3600 # 100 days from now. Does not work with DRT + before = self._gmtime().unixTime() - (7 * 24 * 3600) # 7 days ago + later = self._gmtime().unixTime() + 8 * 24 * 3600 # 8 days from now + timeRange = TimeRange.TimeRange(AbsTime.AbsTime(before), AbsTime.AbsTime(later)) + #self.deleteCmd(weNames, timeRange) + gridInfo = self.getGridInfo(modelID, param, "SFC", timeRange) + #print "GRIDINFO IS: ", modelID, gridInfo + if len(gridInfo) == 0: + self.statusBarMsg("No grids available for model:" + modelID, "S") + return None + + minTime = later + maxTime = before + for g in gridInfo: + start = g.gridTime().startTime().unixTime() + end = g.gridTime().endTime().unixTime() + minTime = min(minTime,start) + maxTime = max(maxTime,end) + + modelTR = TimeRange.TimeRange(AbsTime.AbsTime(minTime), AbsTime.AbsTime(maxTime)) + #print "MODELTR", modelTR, minTime, maxTime + return modelTR, minTime, maxTime + + def getWEInventory(self, WEName, dbase="Fcst", level="SFC", + timeRange=TimeRange.allTimes()): + """Return a list of time ranges with available data for a field from + a specific database and level. + Args: + string WEName: name of field to inventory + string dbase: name of database to search (default = 'Fcst') + string level: level of data to inventory (default = 'SFC') + Returns: + Python list of Python time range objects + """ + + # print "Getting inventory of -> '%s' from '%s' at '%s'" % \ + # (WEName, dbase, level) + + trList = [] + # getGridInfo will just die if the modelName or weName is not valid + # so wrap it in a try block and return [] if it fails + try: + gridInfo = self.getGridInfo(dbase, WEName, level, timeRange) + trList = [g.gridTime() for g in gridInfo] + except: + self.statusBarMsg("Problems retrieving wind grids", "S") + + return trList + + def execute(self, editArea, timeRange, varDict): + + gulfStreamSites=['KEY', 'MFL', 'MLB', 'JAX', 'CHS', 'ILM', 'MHX', 'AKQ', 'PHI', 'OKX', 'BOX', 'GYX', 'CAR'] + tafbSites=['BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'MFL', 'KEY', 'MLB', 'JAX', 'CHS', 'ILM'] + buttonList, timeList = self.getButtonNames() + GFEDomainname = self.getSiteID() + print("GFEDomain is: ", GFEDomainname) + cron = True + + if varDict is None: # This means the tool is being run interactively, so make the GUI. + + variableList = [ + ("How Long Do You Want To Run NWPS:" , 144, "scale", [12, 144], 3), + ("**NOTE: NCEP WCOSS Runs Always Go Out 144 Hours Regardless of Your Choice Here","", "label"), + ("Model Start Time:", buttonList[4], "radio", buttonList), + ("Local, NCEP, or Both:", "NCEP", "radio", ["Local","NCEP","Both"]), + ("Waterlevels:", "ESTOFS", "radio", ["ESTOFS","PSURGE", "No"]), + ("If PSURGE\n% Exceedance Hgt:", "10", "radio", ["10", "20", "30", "40", "50"]), + ] + + if GFEDomainname in gulfStreamSites: + variableList.append(("RTOFS Currents:", "Yes", "radio", ["Yes","No"])) + else: + variableList.append(("RTOFS Currents:", "No", "radio", ["Yes","No"])) + + nest="Yes" + + if GFEDomainname in tafbSites: + variableList.append(("Boundary Conditions:", "WAVEWATCH", "radio", ["WAVEWATCH", "TAFB-NWPS", "HURWave", "No"])) + else: + variableList.append(("Boundary Conditions:", "WAVEWATCH", "radio", ["WAVEWATCH", "HURWave", "No"])) + varDict = {} + processVarList = ProcessVariableList.ProcessVariableList("Run_NWPS", variableList, varDict, None) + status = processVarList.status() + if status != "OK": + return + + fcst_length = processVarList.varDict()["How Long Do You Want To Run NWPS:"] + fcstlength = str(fcst_length) + wind="ForecastWindGrids" + modelstarttime = processVarList.varDict()["Model Start Time:"] + wheretorun = processVarList.varDict()["Local, NCEP, or Both:"] + model = "SWAN" + web="Yes" + plot="Yes" + wna = processVarList.varDict()["Boundary Conditions:"] + gstream = processVarList.varDict()["RTOFS Currents:"] + tstep="600" + hotstart="True" + waterlevels = processVarList.varDict()["Waterlevels:"] + excd = processVarList.varDict()["If PSURGE\n% Exceedance Hgt:"] + cron = False + # label it WAVEWATCH in the GUI, but continue to call it WNAWave for WCOSS. + if wna == "WAVEWATCH": + wna="WNAWave" + # end interactive GUI portion + + else: + +# This part of if else statement assumes procedure is being run from command +#line with variable list passed on using the -V option to runProcedure. This +#allows to run procedure from a cron. Example default for runProcedure would be: +# All variables shown below passed with -V option are required for procedure to run properly. +# /awips2/GFESuite/bin/runProcedure -n Run_NWPS -c gfeConfig +# -V '{"fcstlength":"102","wind":"ForecastWindGrids","wheretorun":"NCEP","model":"SWAN","web":"Yes","plot":"Yes","wna":"WNAWave","nest":"Yes","gstream":"Yes","tstep":"600","hotstart":"True","waterlevels":"ESTOFS","excd":"10"}' +# If running from a cron, you do not need to create a SITE level override of this baseline procedure if your input variables +# are different because you pass that on from the command line. + + modelstarttime = buttonList[4] + fcstlength = varDict['fcstlength'] + wind = varDict['wind'] + wheretorun = varDict['wheretorun'] + model = varDict['model'] + web = varDict['web'] + plot = varDict['plot'] + wna = varDict['wna'] + nest = varDict['nest'] + gstream = varDict['gstream'] + tstep = varDict['tstep'] + hotstart = varDict['hotstart'] + waterlevels = varDict['waterlevels'] + excd = varDict['excd'] + + modelTR = self.getModelTimeRange("Fcst", "Wind") + startHour = modelTR[1] + endHour = modelTR[2] + timeRange = modelTR[0] + + if (modelstarttime == buttonList[0]): + starttime=timeList[0] + elif (modelstarttime == buttonList[1]): + starttime=timeList[1] + elif (modelstarttime == buttonList[2]): + starttime=timeList[2] + elif (modelstarttime == buttonList[3]): + starttime=timeList[3] + elif (modelstarttime == buttonList[4]): + starttime=timeList[4] + elif (modelstarttime == buttonList[5]): + starttime=timeList[5] + elif (modelstarttime == buttonList[6]): + starttime=timeList[6] + else: + starttime=startHour # Model start Hour if all others empty + + if (startHour > starttime): + starttime = startHour + + timeRange1 = TimeRange.TimeRange(AbsTime.AbsTime(starttime - 7*24*3600), AbsTime.AbsTime(starttime + 8*24*3600)) + timeRange2 = TimeRange.TimeRange(AbsTime.AbsTime(starttime), AbsTime.AbsTime(starttime + 8*24*3600)) + + self.deleteCmd(['NWPSwind'], timeRange1) + databaseID = self.findDatabase("Fcst") + self.copyToCmd([('Wind', 'NWPSwind')], databaseID, timeRange2) + self.fragmentCmd(['NWPSwind'], timeRange2) + self.saveElements(["NWPSwind"]) + + trList = self.getWEInventory("NWPSwind") + if len(trList) < 144: + self.statusBarMsg("Not enough Wind grids. You need at least 144 hours.", "S") + return + + inp_args = fcstlength + ":" + wna + ":" + nest + ":" + gstream + ":" + wind + ":" + web + ":" + plot + ":" + tstep + ":" + hotstart + ":" + waterlevels + ":" + model + ":" + excd + ":" + wheretorun + + try: + os.stat('/tmp/nwps/'+GFEDomainname) + except: + os.makedirs('/tmp/nwps/'+GFEDomainname) + os.chmod('/tmp/nwps/'+GFEDomainname,0o775) + + with open('/tmp/nwps/'+GFEDomainname+'/inp_args', 'w') as f: + f.write(inp_args) + os.chmod('/tmp/nwps/'+GFEDomainname+'/inp_args',0o666) + + os.system('mkdir -p /awips2/GFESuite/nwps/'+GFEDomainname+'_var') + os.system('chmod 775 /awips2/GFESuite/nwps/'+GFEDomainname+'_var') + os.system('cp -rpq /tmp/nwps/'+GFEDomainname+'/inp_args /awips2/GFESuite/nwps/'+GFEDomainname+'_var/') + if cron: + os.system('/awips2/GFESuite/nwps/bin/runManualNWPS_OutsideAWIPS.sh '+GFEDomainname) + else: + os.system('/awips2/GFESuite/nwps/bin/runManualNWPS_OutsideAWIPS.sh '+GFEDomainname+' &') + shutil.rmtree('/tmp/nwps/'+GFEDomainname) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/SnowAmtQPFPoPWxCheck.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/SnowAmtQPFPoPWxCheck.py index 9d7c3a3c85..786e4f2ba0 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/SnowAmtQPFPoPWxCheck.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/SnowAmtQPFPoPWxCheck.py @@ -1,1089 +1,1089 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -#---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# SnowAmtQPFPoPWxCheck -# -# Author: Jay Smith, WFO Fairbanks, jay.smith@noaa.gov, 907-458-3721 -# Version: 1.0.0, 09/14/2006 - Initial version -# 1.0.1, 10/12/2006 - Added PoP/QPF check at request of DSPAC -# 1.0.2, 10/18/2006 - Changed PoP/QPF check to treat the PoP as -# floating. Instead of checking each individual PoP grid -# against its corresponding QPF grid, the max of all the -# PoP grids overlapping a QPF grid will be checked. -# 1.1.0, 01/25/2007 - Added options to choose which checks to run. -# Reorganized code so that each check is its own method. -# Added a check for QPF and Wx. Added highlighting for the -# created temporary grids. -# 1.1.1, 02/01/2007 - Changed the SnowAmt/Wx check to return -# consistent results for SnowAmt > 0 and Wx grid containing -# S, SW, or IP regardless of whether the frozen precip is -# mixed with freezing and/or liquid precip. -# 1.2.0, 02/13/2007 - Added a configuration option to provide a CWA -# edit area to run the procedure over. A bad edit area or no -# edit area will result in running over the whole domain. -# Modified the SnowAmt/Wx and QPF/Wx checks to handle two -# cases. Case 1: The SnowAmt/QPF grid is 6-hr long and starts -# at 00, 06, 12, or 18 UTC. Then only one of the corresponding -# Wx grids has to meet the consistency rule. Case 2: The -# SnowAmt/QPF grid does not meet the case 1 definition. Then -# all of the corresponding Wx grids must meet the consistency -# rule. -# The procedure performs the following checks: -# 1. If SnowAmt present and >= 0.5 inches, then corresponding QPF grids -# must add up to 0.01 inches. -# 2. If SnowAmt >= 0.1 inches, then there are two cases: -# a. If the SnowAmt grid is exactly 6 hours long and starts at 00, 06, 12, -# or 18 UTC, then at least one of the corresponding Wx grids must have -# S, SW, or IP. -# b. If the SnowAmt grid does not adhere to the time constraints listed in -# in the previous paragraph, then all of the corresponding Wx grids -# must have S, SW, or IP. This more stringent test is required because -# with grids offset from the NDFD time constraints, it's possible for -# the GFE to evaluate the grids as consistent using an "any" -# criteria but have the NDFD flag those same grids as inconsistent. -# 3. If QPF > 0, then at least one of the corresponding PoP grids must be > 0 -# 4. If QPF > 0, then there are two cases: -# a. If the QPF grid is exactly 6 hours long and starts at 00, 06, 12, or 18 -# UTC, then at least one of the corresponding Wx grids must have R, RW, -# S, SW, RS, IP, L, ZR, ZL. -# b. If the QPF grid does not adhere to the time constraints listed in the -# previous paragraph, then all corresponding Wx grids must contain a -# precipitating weather type. This more stringent test is required -# because with grids offset from the NDFD time constraints, it's -# possible for the GFE to evaluate grids as consistent using an "any" -# criteria but have the NDFD flag those same grids as inconsistent. -# For all of the checks above, if the initial threshold is not exceeded, then -# the two grids are consistent by definition. In other words: -# 1. If SnowAmt < 0.5, then SnowAmt and QPF are always consistent. -# 2. If SnowAmt < 0.1, then SnowAmt and Wx are always consistent. -# 3. If QPF = 0, then QPF and PoP are always consistent. -# 4. If QPF = 0, then QPF and Wx are always consistent. -# For the Wx checks above, only the Wx type is considered. -# -# ****** NOTE NOTE NOTE NOTE ****** -# At this time, the check for two 6-hour QPF grids vs. one 12-hr PoP grid -# is not implemented because neither of those grid definitions is implemented -# in the GFE baseline. I don't know how to do a check on grids that don't -# exist. -# ****** NOTE NOTE NOTE NOTE ****** -# -# If discrepancies are found, then the "bad" grids will be highlighted. -# Temporary grids showing where the discrepancies occur will be created and -# also highlighted. -# -# Dealing with QPF and SnowAmt is always a pain, because they are "cumulative" -# elements. This procedure will account for the possibility that the SnowAmt and -# QPF grids are not the same duration. It will also account for the possibilty -# that the SnowAmt and QPF grids are not aligned on either or both ends. -# The only sane way to handle either situation is to believe that the QPF -# accumulation happens uniformally across the grid's duration and to use -# the proportional amount of the QPF that corresponds the SnowAmt grid's -# duration. Some examples: -# 1. The QPF grid is 3 hours long and there are 3, 1-hour, SnowAmt grids. -# Each SnowAmt grid will be compared to 1/3 the value of the QPF grid. -# 2. The last two hours of a 3-hour QPF grid overlaps a 2-hour SnowAmt grid. -# The SnowAmt grid will be compared to 2/3 the value of the QPF grid. -# 3. Two 3-hour QPF grids align with one 6-hour SnowAmt grid. The first QPF -# grid will be compared to the SnowAmt grid. If the consistency check passes -# on that comparison, the program will continue. If the consistency check -# fails, then the sum of the two QPF grids will be compared to the SnowAmt -# grid. -# 4. The last four hours of a 6-hour QPF grid and the first two hours of a -# 3-hour QPF grid overlap a 6-hour SnowAmt grid. The SnowAmt grid will be -# compared to 2/3 of the first QPF grid. If the consistency check passes, -# the program will continue. If the consistency check fails, then 2/3 of the -# first QPF grid will be added to 2/3 of the second QPF grid and that QPF -# sum will be compared against the SnowAmt grid. -# -# Confused yet? Of course, all of these gyrations can be avoided if the -# QPF and SnowAmt grids are aligned and of the same duration. -# -# Unfortunately, the GFE does not provide a way to deal with proportional -# amounts of the accumulative grids, so I have done this. -# -# I've written this code such that it's optimized to minimize memory usage -# (at least I think I've done that). As a result, it's not particularly -# optimized for ifpServer database access. In fact, I retrieve the various -# grids from the ifpServer database many times during the procedure's run. -# This will have an impact on how fast the procedure runs (it'll run slower -# than if I had optimized for ifpServer database access). The choice to favor -# memory optimization comes from my belief that there are still "memory leak" -# problems in the GFE and that the consequences of those problems will be most -# manifest when this procedure is most likely to be run (near the end of the -# shift). Funky memory problems are a prime cause of funky application -# behavior like application crashes or spontaneous logouts. So, this procedure -# basically reads a grid into memory, keeps it as long as it's needed, and -# then discards it. -# -# Finally, this procedure is also intended to provide an example to other -# developers of how to write and document code. I have reservations as to how -# well I've succeeded at that task. The code is heavily documented, probably -# excessively so. Also, it's not as well as organized as it could be. As you -# look through the various methods, it should become quickly apparent that -# there is a lot of repeated code. I've consciously left the code this way in -# the hopes that it will be easier to understand by more novice programmers -# and because the code hasn't quite grown to the point where updating the -# repeating code is onerous or overly error-prone. It would be better to -# capture the repeating code in separate methods, but keeping track of the -# where you are in the code becomes harder the more you have to jump around -# from method to method. As with all things, there are trade-offs involved. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Consistency"] - -VariableList = [] -VariableList.append(('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup'])) -VariableList.append(('Run SnowAmt/QPF Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run SnowAmt/Wx Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run QPF/PoP Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('Run QPF/Wx Check?', ['Yes'], 'check', ['Yes'])) -VariableList.append(('If "Cleanup" is selected, then only cleanup actions will run.\nNo checks will be made, regardless of the above settings.', '', 'label')) - -#### Config section -# Both the QPF and SnowAmt grids have values which are floating point -# numbers. This means comparisons must use a tolerance value. In other -# words, 0.5 may be represented in machine numbers as 0.49999999999 or -# 0.500000000001. By specifying a tolerance value, we account for the -# vagaries of machine representation of floating point numbers while -# keeping the precision of the comparisons to acceptable levels. Depending -# on the comparison being done, the tolerance value will be added to or -# subtracted from the comparison value to allow for machine error in the -# floating point number representation. -# By default in the GFE, QPF precision is to the nearest one-hundredth while -# SnowAmt precision is to the nearest tenth. -qpfTol = 0.00001 # 1/100,000 tolerance vs 1/100 precision -snowAmtTol = 0.0001 # 1/10,000 tolerance vs 1/10 precision -# Inconsistent grid highlight color. One size fits all. To turn off -# highlighting, set the variable to the empty string, ''. -inconGridColor = 'red' -# Temporary grid highlight color. One size fits all. To turn off highlighting, -# set the variable to the empty string, ''. -tempGridColor = 'orange' -# Name of CWA edit area to use instead of running the procedure over the -# whole domain. Set to the empty string, '', if you want the procedure to -# always run over the whole domain. If the procedure has a problem with the -# edit area you provide, it will run over the whole domain. You should probably -# choose an edit area that is slightly larger than your entire CWA. It's -# possible that when mapping your GFE grids to NDFD grids that the NDFD thinks -# some GFE grid cells are in your CWA that the GFE does not think are in your -# CWA. Using an edit area slightly larger than the CWA, like the ISC_Send_Area -# which is the mask used when sending grids to the NDFD, should eliminate the -# possibibilty of the NDFD intermittently flagging CWA border "points" as -# inconsistent. Note: running the procedure over a subset of the entire GFE -# domain does not really provide any performance gains. Given the way the -# underlying array data structure works, calculations are almost always made -# at every single grid point first and then a mask is applied to limit the -# meaningful results to the edit area. For the purposes of this procedure, the -# values outside the edit area are set to the appropriate "consistent" result. -# The real benefit of this option is it limits the inconsistent results to the -# areas the forecaster really cares about, which should lessen the workload of -# using this procedure. Marine Offices: Make sure the edit area provided -# includes your marine zones. -cwaEditArea = 'ISC_Send_Area' -#### Config section end - -import SmartScript -from numpy import * - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def __cleanup(self, timeRange): - # Remove any temporary grids created previously. - for element in ( - 'SnowAmtQPFInconsistent', 'SnowAmtWxInconsistent', - 'QPFPoPInconsistent', 'QPFWxInconsistent'): - try: - # From SmartScript - self.unloadWE('Fcst', element, 'SFC') - except: - # A failure is almost certainly no grids to unload. - pass - # Turn off any highlights. From SmartScript - self.highlightGrids('Fcst', 'SnowAmt', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids('Fcst', 'QPF', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids('Fcst', 'Wx', 'SFC', timeRange, inconGridColor, on=0) - self.highlightGrids('Fcst', 'PoP', 'SFC', timeRange, inconGridColor, on=0) - return - - def __checkConfigValueTypes(self): - import types - message = '' - badValues = False - if not type(inconGridColor) is types.StringType: - message = '%sThe "inconGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message - badValues = True - if not type(tempGridColor) is types.StringType: - message = '%sThe "tempGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message - badValues = True - if not type(cwaEditArea) is types.StringType: - message = '%sThe "cwaEditArea" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message - badValues = True - if badValues: - message = '%sYou will not be able to run the procedure until the problem is corrected.' % message - # The next two commands are from SmartScript - self.statusBarMsg(message, 'U') - self.cancel() - return - - def _runSnowAmtQPFCheck(self, timeRange): - # This method implements the check that if SnowAmt >= 0.5, then - # QPF must be >= 0.01. - - # There can be a significant difference between the values stored - # in memory and the values returned from the database. This is because - # when values are saved, the element's precision (as defined in - # serverConfig.py/localConfig.py) is enforced. Values in memory do not - # have the element's precision enforced; in fact, they have the - # machine precision of the underlying data type. - # If there are locks, post an urgent message and return from the method. - message = '' - # lockedByMe is from SmartScript - if self.lockedByMe('QPF', 'SFC'): - message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message - if self.lockedByMe('SnowAmt', 'SFC'): - message = '%sYou have the SnowAmt grid locked. Please save the SnowAmt grid.\n' % message - # lockedByOther is from SmartScript - if self.lockedByOther('QPF', 'SFC'): - message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message - if self.lockedByOther('SnowAmt', 'SFC'): - message = '%sThe SnowAmt grid is locked by someone else. Please have that person save the SnowAmt grid.\n' % message - if message: - message = '%sThe SnowAmt/QPF Check was not run.' % message - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have locked grid problems. - return - - # Make sure there are actually SnowAmt grids in the time range. - # The self.getGrids command will return None if there are no grids - # in the time range for mode='First' and noDataError=0. The None - # variable cannot be iterated over. Rather than trap in a try/except, - # I'll just check for the condititon. This may not be the most - # Pythonic way of doing things, but it allows me to avoid having - # a bunch of code indented beneath a try statement. If no SnowAmt - # grids are found, post an urgent message and return from the method. - # getGrids is from SmartScript - snowAmtInfoList = self.getGridInfo('Fcst', 'SnowAmt', 'SFC', timeRange) - if [] == snowAmtInfoList: - message = 'There are no SnowAmt grids in the time range you selected.\nThe SnowAmt/QPF Check did not run.' - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - - # getGridInfo is from SmartScript - # One might ask why I don't just return the result of self.getGrids - # to a variable and iterate over that. I'm trying to minimize the - # memory footprint of the procedure. Reading all the grids into a - # variable could be a fairly large memory hit. The construct below - # only reads one SnowAmt grid at a time into memory, the one that's - # being checked. By using the cache=0 switch on all the self.getGrids - # command, I prevent the GFE from saving the grids into memory for me. - # The Python builtin command enumerate loops over an iterable object - # and returns a 2-tuple containing the current index of the - # iteration and the object at that index. In cases where I need - # both the index and the object, I think this construct is more - # elegant than: - # for i in xrange(len(iterableObject)): - # object = iterableObject[i] - snowAmtGrids = self.getGrids('Fcst', 'SnowAmt', 'SFC', - timeRange, mode='List', noDataError=0,cache=0) - for snowAmtIndex, snowAmtGrid in enumerate(snowAmtGrids): - # greater_equal is from Numeric. For the given array and - # threshold, a new array of the same dimensions as the input - # array is returned. The new array has the value 1 where the - # input array was greater than or equal to the threshold and - # has the value 0 elsewhere. - halfInchMask = greater_equal(snowAmtGrid, 0.5 - snowAmtTol) - gridTR = snowAmtInfoList[snowAmtIndex].gridTime() - # zeros is from Numeric. It creates an array of all zeros for - # the given dimensions and numeric type. - qpfSum = self.empty() - qpfGrids = self.getGrids( - 'Fcst', 'QPF', 'SFC', gridTR, mode='List', noDataError=0, - cache=0) - if qpfGrids is None: - message = '''There are no QPF grids in time range %s. -The SnowAmt/QPF Check skipped the time range.''' % gridTR - self.statusBarMsg(message, 'U') - continue - qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', gridTR) - for qpfIndex, qpfGrid in enumerate(qpfGrids): - snowAmtGridStartTime = gridTR.startTime().unixTime() - qpfGridTR = qpfInfoList[qpfIndex].gridTime() - qpfGridStartTime = qpfGridTR.startTime().unixTime() - fraction = 1.0 - if qpfGridStartTime < snowAmtGridStartTime: - diff = snowAmtGridStartTime - qpfGridStartTime - fraction -= (float(diff) / qpfGridTR.duration()) - snowAmtGridEndTime = gridTR.endTime().unixTime() - qpfGridEndTime = qpfGridTR.endTime().unixTime() - if qpfGridEndTime > snowAmtGridEndTime: - diff = qpfGridEndTime - snowAmtGridEndTime - fraction -= (float(diff) / qpfGridTR.duration()) - # For some reason, the construct: - # qpfSum = qpfSum + (qpfGrid * fraction) - # doesn't assign the expression evaluation back to qpfSum. - # Thus, I use a temporary variable. - qpfTemp = qpfSum + (qpfGrid * fraction) - qpfSum = qpfTemp - del qpfTemp - # less is from Numeric. It behaves analogously to greater_equal, - # described above. - qpfMask = less(qpfSum, 0.01 + qpfTol) - # The following is the "truth" table for the logical - # comparison. - # SnowAmt >= 0.5, 1; SnowAmt < 0.5, 0 - # QPF < 0.01, 1; QPF >= 0.01, 0 - # SnowAmt >= 0.5 (1) and QPF < 0.01 (1) = 1 (Bad result) - # SnowAmt >= 0.5 (1) and QPF >= 0.01 (0) = 0 (Good result) - # SnowAmt < 0.5 (0) and QPF < 0.01 (1) = 0 (Good result) - # SnowAmt < 0.5 (0) and QPF >= 0.01 (0) = 0 (Good result) - # logical_and is from Numeric - consistMask = logical_and(halfInchMask, qpfMask) - # Now, apply the CWA mask. There's an assumption here that - # all offices will use a mask and provide a valid one, which - # means this step does something meaningful. If that assumption - # does not hold, then the next statement doesn't actually - # change anything, even though each and every grid point has a - # comparison check made. - # where is from Numeric. The first argument is a mask. - # The second argument is/are the value/values to use at the - # array points where the mask is one. The third argument - # is/are the value/values to use at the array points - # where the mask is zero. For this comparison, I want - # the values of consistMask where self.cwaMask is one and - # I want the "good result", which is zero, where - # self.cwaMask is zero. - consistMask[logical_not(self.cwaMask)] = 0 - # ravel and sometrue are from Numeric. - if not sometrue(ravel(consistMask)): - # This is the good result, even though it may not be - # intuitive. The ravel function reduces the rank of the - # array by one. Since we had a 2-d array, the ravel - # function creates a 1-d array (a vector) such that - # reading the 2-d array from left-to-right, top-to- - # bottom returns the same values as reading the 1-d - # array from left-to-right. The sometrue function - # performs a logical or on subsequent element pairs - # in the 1-d array and returns the final result. If - # there's no inconsistency, the result will be 0. - # Thus, negating the sometrue result gives us the - # positive outcome. Phew. - # Since QPF is an accumulative element, we don't need - # to continue the loop once the QPF sum meets the - # threshold. - break - else: - # This block will only execute if the for loop runs to - # completion, i.e., the break statement is not executed. - # So, if we get here, we have an inconsistency and need to - # highlight the appropriate grids. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) - # createGrid is from SmartScript - # Since this block of code only executes if the for loop - # runs to completion, then the value of consistMask from - # the for loop will contain all of the inconsistencies. - self.createGrid( - 'Fcst', 'SnowAmtQPFInconsistent', 'SCALAR', consistMask, - gridTR, descriptiveName='SnowAmtQPFInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmtQPFInconsistent', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - # While not required, I like to terminate my methods with a return - # statement to make it clear this is where the method ends. - return - - def _runSnowAmtWxCheck(self, timeRange): - # This implements the check that if SnowAmt >= 0.1, then the Wx grid - # must contain S, SW, or IP, regardless of whether or not there is - # any freezing or liquid types. Finally, the check does not look at - # anything other than the Wx type. In other words, the check will be - # okay if SnowAmt != 0 and Wx has Chc:S:- or Def:SW:-- or Lkly:S:+. - - # There can be a significant difference between the values stored - # in memory and the values returned from the database. This is because - # when values are saved, the element's precision (as defined in - # serverConfig.py/localConfig.py) is enforced. Values in memory do not - # have the element's precision enforced; in fact, they have the - # machine precision of the underlying data type. - # If there are locks, post an urgent message and return from the method. - message = '' - # lockedByMe is from SmartScript - if self.lockedByMe('Wx', 'SFC'): - message = '%sYou have the Wx grid locked. Please save the Wx grid.\n' % message - if self.lockedByMe('SnowAmt', 'SFC'): - message = '%sYou have the SnowAmt grid locked. Please save the SnowAmt grid.\n' % message - # lockedByOther is from SmartScript - if self.lockedByOther('Wx', 'SFC'): - message = '%sThe Wx grid is locked by someone else. Please have that person save the Wx grid.\n' % message - if self.lockedByOther('SnowAmt', 'SFC'): - message = '%sThe SnowAmt grid is locked by someone else. Please have that person save the SnowAmt grid.\n' % message - if message: - message = '%sThe SnowAmt/Wx Check was not run.' % message - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have locked grid problems. - return - - # Make sure there are actually SnowAmt grids in the time range. - # The self.getGrids command will return None if there are no grids - # in the time range for noDataError=0. The None - # variable cannot be iterated over. Rather than trap in a try/except, - # I'll just check for the condititon. This may not be the most - # Pythonic way of doing things, but it allows me to avoid having - # a bunch of code indented beneath a try statement. If no SnowAmt - # grids are found, post an urgent message and return from the method. - # getGrids is from SmartScript - snowAmtInfoList = self.getGridInfo('Fcst', 'SnowAmt', 'SFC', timeRange) - if [] == snowAmtInfoList: - message = 'There are no SnowAmt grids in the time range you selected.\nThe SnowAmt/Wx Check did not run.' - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - - snowAmtGrids = self.getGrids( - 'Fcst', 'SnowAmt', 'SFC', timeRange, mode='List', noDataError=0, - cache=0) - for snowAmtIndex, snowAmtGrid in enumerate(snowAmtGrids): - nonZeroMask = greater_equal(snowAmtGrid, 0.1 - snowAmtTol) - gridTR = snowAmtInfoList[snowAmtIndex].gridTime() - - wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) - if [] == wxInfoList: - message = '''There are no Wx grids in time range %s. -The SnowAmt/Wx Check skipped the time range.''' % gridTR - self.statusBarMsg(message, 'U') - continue - # There are two cases, which I'll capture in individual methods - # If the SnowAmt grid is exactly 6 hours long and starts at - # 00, 06, 12, or 18 UTC, then only one overlapping Wx grid needs - # to match. Otherwise, all overlapping Wx grids need to match. - if gridTR.duration() / 3600 == 6 and \ - gridTR.startTime().hour in (0, 6, 12, 18): - self._snowAmtWxCheckLocked(nonZeroMask, gridTR, wxInfoList) - else: - self._snowAmtWxCheckUnlocked(nonZeroMask, gridTR, wxInfoList) - return - - def _snowAmtWxCheckLocked(self, nonZeroMask, gridTR, wxInfoList): - # The "Locked" comes from the idea that if the SnowAmt grid meets - # the duration and start time constraints, then it's been "locked". - # I need to capture the consistency masks for each individual Wx grid - # just in case I end up with inconsistencies. - consistMaskList = [] - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, - cache=0)): - # wxMask is from SmartScript - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - del (sMask, swMask, ipMask) - wxMask = logical_not(snowMask) - # "Truth" table for the logical comparison follows - # SnowAmt >= 0.1, 1; SnowAmt < 0.1, 0 - # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 - # SnowAmt >= 0.1 (1) and Wx has (0) = 0 (Good result) - # SnowAmt >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) - # SnowAmt < 0.1 (0) and Wx has (0) = 0 (Good result) - # SnowAmt < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) - # - consistMask = logical_and(nonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - consistMaskList.append(consistMask) - if not sometrue(ravel(consistMask)): - # There were no inconsistencies with this Wx grid. Since only - # one needs to be consistent, we don't need to do any more - # checks. - break - else: - # This block will only execute if the for loop runs to - # completion, i.e., the break statement is not executed. - # So, if we get here, we have an inconsistency and need to - # highlight the appropriate grids. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) - # createGrid is from SmartScript - for index in xrange(len(wxInfoList)): - # Create temporary grids for each Wx grid. Limit the start and - # end times of the temporary grids so that they don't extend - # beyond the start and end times of the corresponding SnowAmt - # grid. - wxGridTR = wxInfoList[index].gridTime() - tempGridStartTime = wxGridTR.startTime().unixTime() - if tempGridStartTime < gridTR.startTime().unixTime(): - tempGridStartTime = gridTR.startTime().unixTime() - tempGridEndTime = wxGridTR.endTime().unixTime() - if tempGridEndTime > gridTR.endTime().unixTime(): - tempGridEndTime = gridTR.endTime().unixTime() - tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 - offset = (tempGridStartTime - \ - self.timeRange0_1.startTime().unixTime()) / 3600 - # Because the time range may be different for the temporary - # grid, I need to create and use that time range when - # creating the temporary grid. - tempGridTR = self.createTimeRange( - offset, offset+tempGridDur, 'Zulu') - self.createGrid( - 'Fcst', 'SnowAmtWxInconsistent', 'SCALAR', - consistMaskList[index], tempGridTR, - descriptiveName='SnowAmtWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmtWxInconsistent', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - return - - def _snowAmtWxCheckUnlocked(self, nonZeroMask, gridTR, wxInfoList): - # The "Unlocked" comes from the idea that if the SnowAmt grid does - # not meet the duration and start time constraints, then it's been - # left "unlocked". - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, - cache=0)): - # wxMask is from SmartScript - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - del (sMask, swMask, ipMask) - wxMask = logical_not(snowMask) - # "Truth" table for the logical comparison follows - # SnowAmt >= 0.1, 1; SnowAmt < 0.1, 0 - # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 - # SnowAmt >= 0.1 (1) and Wx has (0) = 0 (Good result) - # SnowAmt >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) - # SnowAmt < 0.1 (0) and Wx has (0) = 0 (Good result) - # SnowAmt < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) - # - # All Wx grids overlapping the SnowAmt grid must be consistent. - consistMask = logical_and(nonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - if sometrue(ravel(consistMask)): - # I'll highlight the SnowAmt grids and Wx grids in - # gridTR as I did with QPF. However, I'll make - # temporary grids here using the Wx grid's time - # range but, the temporary grid cannot start before - # the start of the corresponding SnowAmt grid nor can - # it end after the end of the corresponding SnowAmt grid. - wxGridTR = wxInfoList[wxIndex].gridTime() - tempGridStartTime = wxGridTR.startTime().unixTime() - if tempGridStartTime < gridTR.startTime().unixTime(): - # Clip to start of SnowAmt grid - tempGridStartTime = gridTR.startTime().unixTime() - tempGridEndTime = wxGridTR.endTime().unixTime() - if tempGridEndTime > gridTR.endTime().unixTime(): - # Clip to end of SnowAmtGrid - tempGridEndTime = gridTR.endTime().unixTime() - tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 - offset = (tempGridStartTime - \ - self.timeRange0_1.startTime().unixTime()) / 3600 - # Since either the front or end of the Wx grid's - # time range may have been clipped, create a time - # range using those values. - tempGridTR = self.createTimeRange( - offset, offset+tempGridDur, 'Zulu') - self.createGrid( - 'Fcst', 'SnowAmtWxInconsistent', 'SCALAR', consistMask, - tempGridTR, descriptiveName='SnowAmtWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmtWxInconsistent', 'SFC', gridTR, - tempGridColor) - if inconGridColor: - self.highlightGrids( - 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', wxGridTR, inconGridColor) - self.inconsistent = True - return - - def _runQPFPoPCheck(self, timeRange): - # This method implements the check that if any QPF grid is non zero - # then one of the corresponding floating PoP grids must also be non - # zero. - - # There can be a significant difference between the values stored - # in memory and the values returned from the database. This is because - # when values are saved, the element's precision (as defined in - # serverConfig.py/localConfig.py) is enforced. Values in memory do not - # have the element's precision enforced; in fact, they have the - # machine precision of the underlying data type. - # If there are locks, post an urgent message and return from the method. - message = '' - # lockedByMe is from SmartScript - if self.lockedByMe('QPF', 'SFC'): - message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message - if self.lockedByMe('PoP', 'SFC'): - message = '%sYou have the PoP grid locked. Please save the PoP grid.\n' % message - # lockedByOther is from SmartScript - if self.lockedByOther('QPF', 'SFC'): - message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message - if self.lockedByOther('PoP', 'SFC'): - message = '%sThe PoP grid is locked by someone else. Please have that person save the PoP grid.\n' % message - if message: - message = '%sThe QPF/PoP Check was not run.' % message - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have locked grid problems. - return - - # Make sure there are actually QPF grids in the time range. - # The self.getGrids command will return None if there are no grids - # in the time range for mode='First' and noDataError=0. The None - # variable cannot be iterated over. Rather than trap in a try/except, - # I'll just check for the condititon. This may not be the most - # Pythonic way of doing things, but it allows me to avoid having - # a bunch of code indented beneath a try statement. If no SnowAmt - # grids are found, post an urgent message and return from the method. - # getGrids is from SmartScript - qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', timeRange) - if [] == qpfInfoList: - message = 'There are no QPF grids in the time range you selected.\nThe QPF/PoP Check did not run.' - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - qpfGrids = self.getGrids( - 'Fcst', 'QPF', 'SFC', timeRange, mode='List', noDataError=0, - cache=0) - for qpfIndex, qpfGrid in enumerate(qpfGrids): - gridTR = qpfInfoList[qpfIndex].gridTime() - - popGrid = self.getGrids( - 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, - cache=0) - if popGrid is None: - message = '''There are no PoP grids in time range %s. -The QPF/PoP Check skipped the time range.''' % gridTR - self.statusBarMsg(message, 'U') - continue - qpfNonZeroMask = greater(qpfGrid, qpfTol) - popZeroMask = equal(popGrid, 0) - # popZeroMask = 1 if PoP = 0; popZeroMask = 0 if PoP != 0 - # qpfNonZeroMask = 1 if QPF > 0; qpfNonZeroMask = 0 if QPF = 0 - # PoP = 0 (1) and QPF = 0 (0) => 0 (Good result) - # PoP != 0 (0) and QPF = 0 (0) => 0 (Good result) - # PoP != 0 (0) and QPF > 0 (1) => 0 (Good result) - # PoP = 0 (1) and QPF > 0 (1) => 1 (Bad result) - consistMask = logical_and(qpfNonZeroMask, popZeroMask) - consistMask[logical_not(self.cwaMask)] = 0 - if sometrue(ravel(consistMask)): - # The good result is if the logical_and returns zeros - # for every grid point, that is "none true". So, if - # the sometrue method evaluates True, there are - # inconsistencies. - self.createGrid( - 'Fcst', 'QPFPoPInconsistent', 'SCALAR', consistMask, gridTR, - descriptiveName='QPFPoPInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'QPFPoPInconsistent', 'SFC', gridTR, - tempGridColor) - if inconGridColor: - self.highlightGrids( - 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'PoP', 'SFC', gridTR, inconGridColor) - self.inconsistent = True - - ##### Edited by Rob Radzanowski (WFO-CTP) 03-16-2009 to add missing NDFD check for QPF=0 & PoP > 50 - ##### which is causing unexplained yellow banners due to lack of checking for this error. - qpfZeroMask = equal(qpfGrid, 0) - popGrid = self.getGrids( - 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, cache=0) - popGreater50Mask = greater(popGrid, 50) - # popGreater50Mask = 1 if PoP > 50; popGreater50Mask = 0 if PoP <= 50 - # qpfZeroMask = 0 if QPF > 0; qpfZeroMask = 1 if QPF = 0 - # PoP > 50 (1) and QPF > 0 (0) => 0 (Good result) - # PoP > 50 (1) and QPF = 0 (1) => 1 (Bad result) - # PoP <= 50 (0) and QPF > 0 (0) => 0 (Good/Irrelevant result) - # PoP <= 50 (0) and QPF = 0 (1) => 0 (Good result) - - consistMask2 = logical_and(qpfZeroMask, popGreater50Mask) - consistMask2[logical_not(self.cwaMask)] = 0 - if sometrue(ravel(consistMask2)): - # The good result is if the logical_and returns zeros - # for every grid point, that is "none true". So, if - # the sometrue method evaluates True, there are - # inconsistencies. - self.createGrid( - 'Fcst', 'QPFPoPInconsistent', 'SCALAR', consistMask2, gridTR, - descriptiveName='QPFPoPInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - - if tempGridColor: - self.highlightGrids('Fcst', 'QPFPoPInconsistent', 'SFC', gridTR, tempGridColor) - if inconGridColor: - self.highlightGrids('Fcst', 'QPF', 'SFC', gridTR, inconGridColor) - self.highlightGrids('Fcst', 'PoP', 'SFC', gridTR, inconGridColor) - self.inconsistent = True - return - - def _runQPFWxCheck(self, timeRange): - # This method implements the check that if QPF non zero, then the - # corresponding Wx grids must contain a precipitable Wx type. Note: - # the method only checks the Wx type, no cov/prob, no inten, etc. - - # There can be a significant difference between the values stored - # in memory and the values returned from the database. This is because - # when values are saved, the element's precision (as defined in - # serverConfig.py/localConfig.py) is enforced. Values in memory do not - # have the element's precision enforced; in fact, they have the - # machine precision of the underlying data type. - # If there are locks, post an urgent message and return from the method. - message = '' - # lockedByMe is from SmartScript - if self.lockedByMe('QPF', 'SFC'): - message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message - if self.lockedByMe('Wx', 'SFC'): - message = '%sYou have the Wx grid locked. Please save the Wx grid.\n' % message - # lockedByOther is from SmartScript - if self.lockedByOther('QPF', 'SFC'): - message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message - if self.lockedByOther('Wx', 'SFC'): - message = '%sThe Wx grid is locked by someone else. Please have that person save the Wx grid.\n' % message - if message: - message = '%sThe QPF/Wx Check was not run.' % message - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have locked grid problems. - return - - # Make sure there are actually QPF grids in the time range. - # I'll just check for the condititon. If no SnowAmt - # grids are found, post an urgent message and return from the method. - qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', timeRange) - if [] == qpfInfoList: - message = 'There are no QPF grids in the time range you selected.\nThe QPF/PoP Check did not run.' - self.statusBarMsg(message, 'U') - # I return instead of aborting because the user may have asked for - # other tests that do not have missing grid problems. - return - for qpfIndex, qpfGrid in enumerate(self.getGrids( - 'Fcst', 'QPF', 'SFC', timeRange, mode='List', noDataError=0, - cache=0)): - qpfNonZeroMask = greater(qpfGrid, qpfTol) - gridTR = qpfInfoList[qpfIndex].gridTime() - wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) - if [] == wxInfoList: - message = '''There are no Wx grids in time range %s. -The QPF/Wx Check skipped the time range.''' % gridTR - self.statusBarMsg(message, 'U') - continue - # There are two cases. If the QPF grid is exactly 6 hours long and - # starts at 00, 06, 12, or 18 UTC, then only one of the - # corresponding Wx grids needs to be consistent. Otherwise, all the - # corresponding Wx grids need to be consistent. - if gridTR.duration() / 3600 == 6 and gridTR.startTime().hour in (0, 6, 12, 18): - self._qpfWxCheckLocked(qpfNonZeroMask, gridTR, wxInfoList) - else: - self._qpfWxCheckUnlocked(qpfNonZeroMask, gridTR, wxInfoList) - return - - def _qpfWxCheckLocked(self, qpfNonZeroMask, gridTR, wxInfoList): - # The "Locked" comes from the idea that if the QPF grid is - # exactly 6 hours long and starts at 00, 06, 12, or 18 UTC, then it - # is "locked". - consistMaskList = [] - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, - cache=0)): - # wxMask is from SmartScript - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - del (sMask, swMask, ipMask) - rMask = self.wxMask(wxGrid, ':R:') - rwMask = self.wxMask(wxGrid, ':RW:') - lMask = self.wxMask(wxGrid, ':L:') - zlMask = self.wxMask(wxGrid, ':ZL:') - zrMask = self.wxMask(wxGrid, ':ZR:') - # logical_or is from Numeric - rainMask = logical_or( - rMask, logical_or( - rwMask, logical_or( - lMask, logical_or(zlMask, zrMask)))) - del (rMask, rwMask, lMask, zlMask, zrMask) - precipMask = logical_or(snowMask, rainMask) - del (snowMask, rainMask) - wxMask = logical_not(precipMask) - # QPF >= 0.01, 1; QPF < 0.01, 0 - # Wx has precip, 0; Wx doesn't have precip, 1 - # QPF >= 0.01 (1) and Wx has (0) = 0 (Good result) - # QPF >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) - # QPF < 0.01 (0) and Wx has (0) = 0 (Good result) - # QPF < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) - consistMask = logical_and(qpfNonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - consistMaskList.append(consistMask) - if not sometrue(ravel(consistMask)): - # There were no inconsistencies with this Wx grid. Since only - # one needs to be consistent, we don't need to do any more - # checks. - break - else: - # This block will only execute if the for loop runs to - # completion, i.e., the break statement is not executed. - # So, if we get here, we have an inconsistency and need to - # highlight the appropriate grids. - if inconGridColor: - self.highlightGrids( - 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) - # createGrid is from SmartScript - for index in xrange(len(wxInfoList)): - # Create temporary grids for each Wx grid. Limit the time - # range of the temporary grid so that it doesn't start any - # earlier or any later than the corresponding QPF grid. - wxGridTR = wxInfoList[index].gridTime() - tempGridStartTime = wxGridTR.startTime().unixTime() - if tempGridStartTime < gridTR.startTime().unixTime(): - tempGridStartTime = gridTR.startTime().unixTime() - tempGridEndTime = wxGridTR.endTime().unixTime() - if tempGridEndTime > gridTR.endTime().unixTime(): - tempGridEndTime = gridTR.endTime().unixTime() - tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 - offset = (tempGridStartTime - \ - self.timeRange0_1.startTime().unixTime()) / 3600 - # Since the temporary grid could have a different time range - # than the Wx grid, I need to create and use that time range - # when creating the temporary grid. - tempGridTR = self.createTimeRange( - offset, offset+tempGridDur, 'Zulu') - self.createGrid( - 'Fcst', 'QPFWxInconsistent', 'SCALAR', - consistMaskList[index], tempGridTR, - descriptiveName='QPFWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'QPFWxInconsistent', 'SFC', gridTR, - tempGridColor) - self.inconsistent = True - return - - def _qpfWxCheckUnlocked(self, qpfNonZeroMask, gridTR, wxInfoList): - # The "Unlocked" comes from the idea that if the QPF grid is not - # exactly 6 hours long and starting at 00, 06, 12, or 18 UTC, then it - # is "unlocked". - for wxIndex, wxGrid in enumerate(self.getGrids( - 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, - cache=0)): - # wxMask is from SmartScript - sMask = self.wxMask(wxGrid, ':S:') - swMask = self.wxMask(wxGrid, ':SW:') - ipMask = self.wxMask(wxGrid, ':IP:') - snowMask = logical_or(logical_or(sMask, swMask), ipMask) - del (sMask, swMask, ipMask) - rMask = self.wxMask(wxGrid, ':R:') - rwMask = self.wxMask(wxGrid, ':RW:') - lMask = self.wxMask(wxGrid, ':L:') - zlMask = self.wxMask(wxGrid, ':ZL:') - zrMask = self.wxMask(wxGrid, ':ZR:') - # logical_or is from Numeric - rainMask = logical_or( - rMask, logical_or( - rwMask, logical_or( - lMask, logical_or(zlMask, zrMask)))) - del (rMask, rwMask, lMask, zlMask, zrMask) - precipMask = logical_or(snowMask, rainMask) - del (snowMask, rainMask) - wxMask = logical_not(precipMask) - # QPF >= 0.01, 1; QPF < 0.01, 0 - # Wx has precip, 0; Wx doesn't have precip, 1 - # QPF >= 0.01 (1) and Wx has (0) = 0 (Good result) - # QPF >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) - # QPF < 0.01 (0) and Wx has (0) = 0 (Good result) - # QPF < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) - # - # All Wx grids overlapping the SnowAmt grid must be consistent. - consistMask = logical_and(qpfNonZeroMask, wxMask) - consistMask[logical_not(self.cwaMask)] = 0 - if sometrue(ravel(consistMask)): - wxGridTR = wxInfoList[wxIndex].gridTime() - tempGridStartTime = wxGridTR.startTime().unixTime() - if tempGridStartTime < gridTR.startTime().unixTime(): - # Clip to start of QPF grid - tempGridStartTime = gridTR.startTime().unixTime() - tempGridEndTime = wxGridTR.endTime().unixTime() - if tempGridEndTime > gridTR.endTime().unixTime(): - # Clip to end of QPF Grid - tempGridEndTime = gridTR.endTime().unixTime() - tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 - offset = (tempGridStartTime - \ - self.timeRange0_1.startTime().unixTime()) / 3600 - # Since either the front or end of the Wx grid's - # time range may have been clipped, create a time - # range using those values. - tempGridTR = self.createTimeRange( - offset, offset+tempGridDur, 'Zulu') - self.createGrid( - 'Fcst', 'QPFWxInconsistent', 'SCALAR', consistMask, - tempGridTR, descriptiveName='QPFWxInconsistent', - minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') - if tempGridColor: - self.highlightGrids( - 'Fcst', 'QPFWxInconsistent', 'SFC', gridTR, - tempGridColor) - if inconGridColor: - self.highlightGrids( - 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) - self.highlightGrids( - 'Fcst', 'Wx', 'SFC', wxGridTR, inconGridColor) - self.inconsistent = True - return - - def _calcTolerance(self, gridInfo): - precision = gridInfo.gridParmInfo.getPrecision() - return pow(10, -precision) - - def execute(self, timeRange, varDict): - # Make sure the configuration values are the correct types. - self.__checkConfigValueTypes() - # createTimeRange is from SmartScript - timeRange0_240 = self.createTimeRange(0, 241, 'Zulu') - checkCleanup = varDict.get('Check_Cleanup', 'Check') - self.__cleanup(timeRange0_240) - if checkCleanup == 'Cleanup': - message = 'SnowQPFPoPWxCheck complete.' - self.statusBarMsg(message, 'R') - self.cancel() - if timeRange.endTime().unixTime() - timeRange.startTime().unixTime() < \ - 3600: # No time range selected, use create a 0 to 240 hour range - timeRange = timeRange0_240 - - # If the user has a time range swept out, send an informational - # message. - if (timeRange.startTime().unixTime() != timeRange0_240.startTime().unixTime()) or \ - (timeRange.endTime().unixTime() != timeRange0_240.endTime().unixTime()) or \ - (timeRange.duration() != timeRange0_240.duration()): - message = 'The SnowAmtQPFPoPWxCheck procedure did not run over the 0 to 240 hour time period,\nit ran over %s. This may be what you desired.' % str(timeRange) - self.statusBarMsg(message, 'S') - - # I'll need to know the unix time of 00Z so I can determine the - # start time of temporary grids later. I'll need this in more than - # one of the methods called later, so this will become an instance - # variable, i.e., prefixed with "self." I also need an instance - # variable that flags whether or not there were inconsistent grids. - self.timeRange0_1 = self.createTimeRange(0, 1, 'Zulu') - self.inconsistent = False - - # A CWA edit area can be provided in the configuration section. - # Attempt to encode that edit area as a Numeric Python mask so that - # the later checks are limited to the edit area. The GFE is not very - # friendly if the encoding fails. The GFE will send a nasty message - # to the user, but continue executing the procedure. No trappable - # error is thrown. As of this writing, the GFE appears to create an - # array of shape (0, 0) if the encoding cannot be done, so I will - # check for that and, if I find it, then set the edit area to the - # domain. - # encodeEditArea comes from SmartScript. For the points that are in - # the edit area, a value of one is assigned. Otherwise, a value of - # zero is assigned. - if cwaEditArea: - self.cwaMask = self.encodeEditArea(cwaEditArea) - if self.cwaMask.shape == (0, 0): - # Use the getGridInfo command to get information about the - # SnowAmt grid. From this, the grid size can be extracted. I - # could use getGridInfo on any valid GFE grid. - # getGridInfo is from SmartScript - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt', 'SFC', timeRange) - # I painfully discovered that the array shape is (y, x) - gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, - snowAmtInfoList[0].gridLocation().gridSize().x) - # ones is from Numeric. It creates an array of the given size - # and data type where all values are one. - self.cwaMask = ones(gridSize, Int) - message = \ -'''The procedure was not able to use the CWA edit area, %s, provided -in the configuration. You should inform the person responsible for procedures -of this problem. The procedure ran over the whole domain.''' % cwaEditArea - self.statusBarMsg(message, 'S') - else: - snowAmtInfoList = self.getGridInfo( - 'Fcst', 'SnowAmt', 'SFC', timeRange) - gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, - snowAmtInfoList[0].gridLocation().gridSize().x) - self.cwaMask = ones(gridSize, Int) - - # Based on the user's input, run the appropriate checks. - # By making each of these options a checkbox with only one option in - # the VariableList above, if an option is unchecked then an empty - # list, [], will be what's in varDict. If an option is checked then a - # list with the value "Yes", ["Yes"], will be what's in varDict. In - # Python, a conditional expression can be whether or not a data - # structure is empty. In these cases, an empty data structure, - # e.g., an empty list, an empty tuple, an empty dictionary, - # conditionally test to False while non empty data structures - # conditionally test to True. In the if statements below, every varDict - # lookup returns a list: either [] or ["Yes"]. I think the constructs - # below or more elegant and easier to understand. - if varDict['Run SnowAmt/QPF Check?']: - # Call the SnowAmt/QPF check method - self._runSnowAmtQPFCheck(timeRange) - if varDict['Run SnowAmt/Wx Check?']: - # Call the SnowAmt/Wx check method - self._runSnowAmtWxCheck(timeRange) - if varDict['Run QPF/PoP Check?']: - # Call the QPF/PoP check method - self._runQPFPoPCheck(timeRange) - if varDict['Run QPF/Wx Check?']: - # Call the QPF/Wx check method - self._runQPFWxCheck(timeRange) - message = 'SnowAmtQPFPoPWxCheck complete.' - if self.inconsistent: - message = '%s Inconsistencies found! Grids highlighted %s and %s.' % ( - message, inconGridColor, tempGridColor) - self.statusBarMsg(message, 'S') - else: - self.statusBarMsg(message, 'R') - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +#---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# SnowAmtQPFPoPWxCheck +# +# Author: Jay Smith, WFO Fairbanks, jay.smith@noaa.gov, 907-458-3721 +# Version: 1.0.0, 09/14/2006 - Initial version +# 1.0.1, 10/12/2006 - Added PoP/QPF check at request of DSPAC +# 1.0.2, 10/18/2006 - Changed PoP/QPF check to treat the PoP as +# floating. Instead of checking each individual PoP grid +# against its corresponding QPF grid, the max of all the +# PoP grids overlapping a QPF grid will be checked. +# 1.1.0, 01/25/2007 - Added options to choose which checks to run. +# Reorganized code so that each check is its own method. +# Added a check for QPF and Wx. Added highlighting for the +# created temporary grids. +# 1.1.1, 02/01/2007 - Changed the SnowAmt/Wx check to return +# consistent results for SnowAmt > 0 and Wx grid containing +# S, SW, or IP regardless of whether the frozen precip is +# mixed with freezing and/or liquid precip. +# 1.2.0, 02/13/2007 - Added a configuration option to provide a CWA +# edit area to run the procedure over. A bad edit area or no +# edit area will result in running over the whole domain. +# Modified the SnowAmt/Wx and QPF/Wx checks to handle two +# cases. Case 1: The SnowAmt/QPF grid is 6-hr long and starts +# at 00, 06, 12, or 18 UTC. Then only one of the corresponding +# Wx grids has to meet the consistency rule. Case 2: The +# SnowAmt/QPF grid does not meet the case 1 definition. Then +# all of the corresponding Wx grids must meet the consistency +# rule. +# The procedure performs the following checks: +# 1. If SnowAmt present and >= 0.5 inches, then corresponding QPF grids +# must add up to 0.01 inches. +# 2. If SnowAmt >= 0.1 inches, then there are two cases: +# a. If the SnowAmt grid is exactly 6 hours long and starts at 00, 06, 12, +# or 18 UTC, then at least one of the corresponding Wx grids must have +# S, SW, or IP. +# b. If the SnowAmt grid does not adhere to the time constraints listed in +# in the previous paragraph, then all of the corresponding Wx grids +# must have S, SW, or IP. This more stringent test is required because +# with grids offset from the NDFD time constraints, it's possible for +# the GFE to evaluate the grids as consistent using an "any" +# criteria but have the NDFD flag those same grids as inconsistent. +# 3. If QPF > 0, then at least one of the corresponding PoP grids must be > 0 +# 4. If QPF > 0, then there are two cases: +# a. If the QPF grid is exactly 6 hours long and starts at 00, 06, 12, or 18 +# UTC, then at least one of the corresponding Wx grids must have R, RW, +# S, SW, RS, IP, L, ZR, ZL. +# b. If the QPF grid does not adhere to the time constraints listed in the +# previous paragraph, then all corresponding Wx grids must contain a +# precipitating weather type. This more stringent test is required +# because with grids offset from the NDFD time constraints, it's +# possible for the GFE to evaluate grids as consistent using an "any" +# criteria but have the NDFD flag those same grids as inconsistent. +# For all of the checks above, if the initial threshold is not exceeded, then +# the two grids are consistent by definition. In other words: +# 1. If SnowAmt < 0.5, then SnowAmt and QPF are always consistent. +# 2. If SnowAmt < 0.1, then SnowAmt and Wx are always consistent. +# 3. If QPF = 0, then QPF and PoP are always consistent. +# 4. If QPF = 0, then QPF and Wx are always consistent. +# For the Wx checks above, only the Wx type is considered. +# +# ****** NOTE NOTE NOTE NOTE ****** +# At this time, the check for two 6-hour QPF grids vs. one 12-hr PoP grid +# is not implemented because neither of those grid definitions is implemented +# in the GFE baseline. I don't know how to do a check on grids that don't +# exist. +# ****** NOTE NOTE NOTE NOTE ****** +# +# If discrepancies are found, then the "bad" grids will be highlighted. +# Temporary grids showing where the discrepancies occur will be created and +# also highlighted. +# +# Dealing with QPF and SnowAmt is always a pain, because they are "cumulative" +# elements. This procedure will account for the possibility that the SnowAmt and +# QPF grids are not the same duration. It will also account for the possibilty +# that the SnowAmt and QPF grids are not aligned on either or both ends. +# The only sane way to handle either situation is to believe that the QPF +# accumulation happens uniformally across the grid's duration and to use +# the proportional amount of the QPF that corresponds the SnowAmt grid's +# duration. Some examples: +# 1. The QPF grid is 3 hours long and there are 3, 1-hour, SnowAmt grids. +# Each SnowAmt grid will be compared to 1/3 the value of the QPF grid. +# 2. The last two hours of a 3-hour QPF grid overlaps a 2-hour SnowAmt grid. +# The SnowAmt grid will be compared to 2/3 the value of the QPF grid. +# 3. Two 3-hour QPF grids align with one 6-hour SnowAmt grid. The first QPF +# grid will be compared to the SnowAmt grid. If the consistency check passes +# on that comparison, the program will continue. If the consistency check +# fails, then the sum of the two QPF grids will be compared to the SnowAmt +# grid. +# 4. The last four hours of a 6-hour QPF grid and the first two hours of a +# 3-hour QPF grid overlap a 6-hour SnowAmt grid. The SnowAmt grid will be +# compared to 2/3 of the first QPF grid. If the consistency check passes, +# the program will continue. If the consistency check fails, then 2/3 of the +# first QPF grid will be added to 2/3 of the second QPF grid and that QPF +# sum will be compared against the SnowAmt grid. +# +# Confused yet? Of course, all of these gyrations can be avoided if the +# QPF and SnowAmt grids are aligned and of the same duration. +# +# Unfortunately, the GFE does not provide a way to deal with proportional +# amounts of the accumulative grids, so I have done this. +# +# I've written this code such that it's optimized to minimize memory usage +# (at least I think I've done that). As a result, it's not particularly +# optimized for ifpServer database access. In fact, I retrieve the various +# grids from the ifpServer database many times during the procedure's run. +# This will have an impact on how fast the procedure runs (it'll run slower +# than if I had optimized for ifpServer database access). The choice to favor +# memory optimization comes from my belief that there are still "memory leak" +# problems in the GFE and that the consequences of those problems will be most +# manifest when this procedure is most likely to be run (near the end of the +# shift). Funky memory problems are a prime cause of funky application +# behavior like application crashes or spontaneous logouts. So, this procedure +# basically reads a grid into memory, keeps it as long as it's needed, and +# then discards it. +# +# Finally, this procedure is also intended to provide an example to other +# developers of how to write and document code. I have reservations as to how +# well I've succeeded at that task. The code is heavily documented, probably +# excessively so. Also, it's not as well as organized as it could be. As you +# look through the various methods, it should become quickly apparent that +# there is a lot of repeated code. I've consciously left the code this way in +# the hopes that it will be easier to understand by more novice programmers +# and because the code hasn't quite grown to the point where updating the +# repeating code is onerous or overly error-prone. It would be better to +# capture the repeating code in separate methods, but keeping track of the +# where you are in the code becomes harder the more you have to jump around +# from method to method. As with all things, there are trade-offs involved. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Consistency"] + +VariableList = [] +VariableList.append(('Check_Cleanup', 'Check', 'radio', ['Check', 'Cleanup'])) +VariableList.append(('Run SnowAmt/QPF Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run SnowAmt/Wx Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run QPF/PoP Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('Run QPF/Wx Check?', ['Yes'], 'check', ['Yes'])) +VariableList.append(('If "Cleanup" is selected, then only cleanup actions will run.\nNo checks will be made, regardless of the above settings.', '', 'label')) + +#### Config section +# Both the QPF and SnowAmt grids have values which are floating point +# numbers. This means comparisons must use a tolerance value. In other +# words, 0.5 may be represented in machine numbers as 0.49999999999 or +# 0.500000000001. By specifying a tolerance value, we account for the +# vagaries of machine representation of floating point numbers while +# keeping the precision of the comparisons to acceptable levels. Depending +# on the comparison being done, the tolerance value will be added to or +# subtracted from the comparison value to allow for machine error in the +# floating point number representation. +# By default in the GFE, QPF precision is to the nearest one-hundredth while +# SnowAmt precision is to the nearest tenth. +qpfTol = 0.00001 # 1/100,000 tolerance vs 1/100 precision +snowAmtTol = 0.0001 # 1/10,000 tolerance vs 1/10 precision +# Inconsistent grid highlight color. One size fits all. To turn off +# highlighting, set the variable to the empty string, ''. +inconGridColor = 'red' +# Temporary grid highlight color. One size fits all. To turn off highlighting, +# set the variable to the empty string, ''. +tempGridColor = 'orange' +# Name of CWA edit area to use instead of running the procedure over the +# whole domain. Set to the empty string, '', if you want the procedure to +# always run over the whole domain. If the procedure has a problem with the +# edit area you provide, it will run over the whole domain. You should probably +# choose an edit area that is slightly larger than your entire CWA. It's +# possible that when mapping your GFE grids to NDFD grids that the NDFD thinks +# some GFE grid cells are in your CWA that the GFE does not think are in your +# CWA. Using an edit area slightly larger than the CWA, like the ISC_Send_Area +# which is the mask used when sending grids to the NDFD, should eliminate the +# possibibilty of the NDFD intermittently flagging CWA border "points" as +# inconsistent. Note: running the procedure over a subset of the entire GFE +# domain does not really provide any performance gains. Given the way the +# underlying array data structure works, calculations are almost always made +# at every single grid point first and then a mask is applied to limit the +# meaningful results to the edit area. For the purposes of this procedure, the +# values outside the edit area are set to the appropriate "consistent" result. +# The real benefit of this option is it limits the inconsistent results to the +# areas the forecaster really cares about, which should lessen the workload of +# using this procedure. Marine Offices: Make sure the edit area provided +# includes your marine zones. +cwaEditArea = 'ISC_Send_Area' +#### Config section end + +import SmartScript +from numpy import * + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def __cleanup(self, timeRange): + # Remove any temporary grids created previously. + for element in ( + 'SnowAmtQPFInconsistent', 'SnowAmtWxInconsistent', + 'QPFPoPInconsistent', 'QPFWxInconsistent'): + try: + # From SmartScript + self.unloadWE('Fcst', element, 'SFC') + except: + # A failure is almost certainly no grids to unload. + pass + # Turn off any highlights. From SmartScript + self.highlightGrids('Fcst', 'SnowAmt', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids('Fcst', 'QPF', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids('Fcst', 'Wx', 'SFC', timeRange, inconGridColor, on=0) + self.highlightGrids('Fcst', 'PoP', 'SFC', timeRange, inconGridColor, on=0) + return + + def __checkConfigValueTypes(self): + import types + message = '' + badValues = False + if not type(inconGridColor) is bytes: + message = '%sThe "inconGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message + badValues = True + if not type(tempGridColor) is bytes: + message = '%sThe "tempGridColor" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message + badValues = True + if not type(cwaEditArea) is bytes: + message = '%sThe "cwaEditArea" variable is not defined as a string value. Please contact your IFPS focal point to fix this problem.\n' % message + badValues = True + if badValues: + message = '%sYou will not be able to run the procedure until the problem is corrected.' % message + # The next two commands are from SmartScript + self.statusBarMsg(message, 'U') + self.cancel() + return + + def _runSnowAmtQPFCheck(self, timeRange): + # This method implements the check that if SnowAmt >= 0.5, then + # QPF must be >= 0.01. + + # There can be a significant difference between the values stored + # in memory and the values returned from the database. This is because + # when values are saved, the element's precision (as defined in + # serverConfig.py/localConfig.py) is enforced. Values in memory do not + # have the element's precision enforced; in fact, they have the + # machine precision of the underlying data type. + # If there are locks, post an urgent message and return from the method. + message = '' + # lockedByMe is from SmartScript + if self.lockedByMe('QPF', 'SFC'): + message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message + if self.lockedByMe('SnowAmt', 'SFC'): + message = '%sYou have the SnowAmt grid locked. Please save the SnowAmt grid.\n' % message + # lockedByOther is from SmartScript + if self.lockedByOther('QPF', 'SFC'): + message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message + if self.lockedByOther('SnowAmt', 'SFC'): + message = '%sThe SnowAmt grid is locked by someone else. Please have that person save the SnowAmt grid.\n' % message + if message: + message = '%sThe SnowAmt/QPF Check was not run.' % message + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have locked grid problems. + return + + # Make sure there are actually SnowAmt grids in the time range. + # The self.getGrids command will return None if there are no grids + # in the time range for mode='First' and noDataError=0. The None + # variable cannot be iterated over. Rather than trap in a try/except, + # I'll just check for the condititon. This may not be the most + # Pythonic way of doing things, but it allows me to avoid having + # a bunch of code indented beneath a try statement. If no SnowAmt + # grids are found, post an urgent message and return from the method. + # getGrids is from SmartScript + snowAmtInfoList = self.getGridInfo('Fcst', 'SnowAmt', 'SFC', timeRange) + if [] == snowAmtInfoList: + message = 'There are no SnowAmt grids in the time range you selected.\nThe SnowAmt/QPF Check did not run.' + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + + # getGridInfo is from SmartScript + # One might ask why I don't just return the result of self.getGrids + # to a variable and iterate over that. I'm trying to minimize the + # memory footprint of the procedure. Reading all the grids into a + # variable could be a fairly large memory hit. The construct below + # only reads one SnowAmt grid at a time into memory, the one that's + # being checked. By using the cache=0 switch on all the self.getGrids + # command, I prevent the GFE from saving the grids into memory for me. + # The Python builtin command enumerate loops over an iterable object + # and returns a 2-tuple containing the current index of the + # iteration and the object at that index. In cases where I need + # both the index and the object, I think this construct is more + # elegant than: + # for i in xrange(len(iterableObject)): + # object = iterableObject[i] + snowAmtGrids = self.getGrids('Fcst', 'SnowAmt', 'SFC', + timeRange, mode='List', noDataError=0,cache=0) + for snowAmtIndex, snowAmtGrid in enumerate(snowAmtGrids): + # greater_equal is from Numeric. For the given array and + # threshold, a new array of the same dimensions as the input + # array is returned. The new array has the value 1 where the + # input array was greater than or equal to the threshold and + # has the value 0 elsewhere. + halfInchMask = greater_equal(snowAmtGrid, 0.5 - snowAmtTol) + gridTR = snowAmtInfoList[snowAmtIndex].gridTime() + # zeros is from Numeric. It creates an array of all zeros for + # the given dimensions and numeric type. + qpfSum = self.empty() + qpfGrids = self.getGrids( + 'Fcst', 'QPF', 'SFC', gridTR, mode='List', noDataError=0, + cache=0) + if qpfGrids is None: + message = '''There are no QPF grids in time range %s. +The SnowAmt/QPF Check skipped the time range.''' % gridTR + self.statusBarMsg(message, 'U') + continue + qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', gridTR) + for qpfIndex, qpfGrid in enumerate(qpfGrids): + snowAmtGridStartTime = gridTR.startTime().unixTime() + qpfGridTR = qpfInfoList[qpfIndex].gridTime() + qpfGridStartTime = qpfGridTR.startTime().unixTime() + fraction = 1.0 + if qpfGridStartTime < snowAmtGridStartTime: + diff = snowAmtGridStartTime - qpfGridStartTime + fraction -= (float(diff) / qpfGridTR.duration()) + snowAmtGridEndTime = gridTR.endTime().unixTime() + qpfGridEndTime = qpfGridTR.endTime().unixTime() + if qpfGridEndTime > snowAmtGridEndTime: + diff = qpfGridEndTime - snowAmtGridEndTime + fraction -= (float(diff) / qpfGridTR.duration()) + # For some reason, the construct: + # qpfSum = qpfSum + (qpfGrid * fraction) + # doesn't assign the expression evaluation back to qpfSum. + # Thus, I use a temporary variable. + qpfTemp = qpfSum + (qpfGrid * fraction) + qpfSum = qpfTemp + del qpfTemp + # less is from Numeric. It behaves analogously to greater_equal, + # described above. + qpfMask = less(qpfSum, 0.01 + qpfTol) + # The following is the "truth" table for the logical + # comparison. + # SnowAmt >= 0.5, 1; SnowAmt < 0.5, 0 + # QPF < 0.01, 1; QPF >= 0.01, 0 + # SnowAmt >= 0.5 (1) and QPF < 0.01 (1) = 1 (Bad result) + # SnowAmt >= 0.5 (1) and QPF >= 0.01 (0) = 0 (Good result) + # SnowAmt < 0.5 (0) and QPF < 0.01 (1) = 0 (Good result) + # SnowAmt < 0.5 (0) and QPF >= 0.01 (0) = 0 (Good result) + # logical_and is from Numeric + consistMask = logical_and(halfInchMask, qpfMask) + # Now, apply the CWA mask. There's an assumption here that + # all offices will use a mask and provide a valid one, which + # means this step does something meaningful. If that assumption + # does not hold, then the next statement doesn't actually + # change anything, even though each and every grid point has a + # comparison check made. + # where is from Numeric. The first argument is a mask. + # The second argument is/are the value/values to use at the + # array points where the mask is one. The third argument + # is/are the value/values to use at the array points + # where the mask is zero. For this comparison, I want + # the values of consistMask where self.cwaMask is one and + # I want the "good result", which is zero, where + # self.cwaMask is zero. + consistMask[logical_not(self.cwaMask)] = 0 + # ravel and sometrue are from Numeric. + if not sometrue(ravel(consistMask)): + # This is the good result, even though it may not be + # intuitive. The ravel function reduces the rank of the + # array by one. Since we had a 2-d array, the ravel + # function creates a 1-d array (a vector) such that + # reading the 2-d array from left-to-right, top-to- + # bottom returns the same values as reading the 1-d + # array from left-to-right. The sometrue function + # performs a logical or on subsequent element pairs + # in the 1-d array and returns the final result. If + # there's no inconsistency, the result will be 0. + # Thus, negating the sometrue result gives us the + # positive outcome. Phew. + # Since QPF is an accumulative element, we don't need + # to continue the loop once the QPF sum meets the + # threshold. + break + else: + # This block will only execute if the for loop runs to + # completion, i.e., the break statement is not executed. + # So, if we get here, we have an inconsistency and need to + # highlight the appropriate grids. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) + # createGrid is from SmartScript + # Since this block of code only executes if the for loop + # runs to completion, then the value of consistMask from + # the for loop will contain all of the inconsistencies. + self.createGrid( + 'Fcst', 'SnowAmtQPFInconsistent', 'SCALAR', consistMask, + gridTR, descriptiveName='SnowAmtQPFInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmtQPFInconsistent', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + # While not required, I like to terminate my methods with a return + # statement to make it clear this is where the method ends. + return + + def _runSnowAmtWxCheck(self, timeRange): + # This implements the check that if SnowAmt >= 0.1, then the Wx grid + # must contain S, SW, or IP, regardless of whether or not there is + # any freezing or liquid types. Finally, the check does not look at + # anything other than the Wx type. In other words, the check will be + # okay if SnowAmt != 0 and Wx has Chc:S:- or Def:SW:-- or Lkly:S:+. + + # There can be a significant difference between the values stored + # in memory and the values returned from the database. This is because + # when values are saved, the element's precision (as defined in + # serverConfig.py/localConfig.py) is enforced. Values in memory do not + # have the element's precision enforced; in fact, they have the + # machine precision of the underlying data type. + # If there are locks, post an urgent message and return from the method. + message = '' + # lockedByMe is from SmartScript + if self.lockedByMe('Wx', 'SFC'): + message = '%sYou have the Wx grid locked. Please save the Wx grid.\n' % message + if self.lockedByMe('SnowAmt', 'SFC'): + message = '%sYou have the SnowAmt grid locked. Please save the SnowAmt grid.\n' % message + # lockedByOther is from SmartScript + if self.lockedByOther('Wx', 'SFC'): + message = '%sThe Wx grid is locked by someone else. Please have that person save the Wx grid.\n' % message + if self.lockedByOther('SnowAmt', 'SFC'): + message = '%sThe SnowAmt grid is locked by someone else. Please have that person save the SnowAmt grid.\n' % message + if message: + message = '%sThe SnowAmt/Wx Check was not run.' % message + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have locked grid problems. + return + + # Make sure there are actually SnowAmt grids in the time range. + # The self.getGrids command will return None if there are no grids + # in the time range for noDataError=0. The None + # variable cannot be iterated over. Rather than trap in a try/except, + # I'll just check for the condititon. This may not be the most + # Pythonic way of doing things, but it allows me to avoid having + # a bunch of code indented beneath a try statement. If no SnowAmt + # grids are found, post an urgent message and return from the method. + # getGrids is from SmartScript + snowAmtInfoList = self.getGridInfo('Fcst', 'SnowAmt', 'SFC', timeRange) + if [] == snowAmtInfoList: + message = 'There are no SnowAmt grids in the time range you selected.\nThe SnowAmt/Wx Check did not run.' + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + + snowAmtGrids = self.getGrids( + 'Fcst', 'SnowAmt', 'SFC', timeRange, mode='List', noDataError=0, + cache=0) + for snowAmtIndex, snowAmtGrid in enumerate(snowAmtGrids): + nonZeroMask = greater_equal(snowAmtGrid, 0.1 - snowAmtTol) + gridTR = snowAmtInfoList[snowAmtIndex].gridTime() + + wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) + if [] == wxInfoList: + message = '''There are no Wx grids in time range %s. +The SnowAmt/Wx Check skipped the time range.''' % gridTR + self.statusBarMsg(message, 'U') + continue + # There are two cases, which I'll capture in individual methods + # If the SnowAmt grid is exactly 6 hours long and starts at + # 00, 06, 12, or 18 UTC, then only one overlapping Wx grid needs + # to match. Otherwise, all overlapping Wx grids need to match. + if gridTR.duration() / 3600 == 6 and \ + gridTR.startTime().hour in (0, 6, 12, 18): + self._snowAmtWxCheckLocked(nonZeroMask, gridTR, wxInfoList) + else: + self._snowAmtWxCheckUnlocked(nonZeroMask, gridTR, wxInfoList) + return + + def _snowAmtWxCheckLocked(self, nonZeroMask, gridTR, wxInfoList): + # The "Locked" comes from the idea that if the SnowAmt grid meets + # the duration and start time constraints, then it's been "locked". + # I need to capture the consistency masks for each individual Wx grid + # just in case I end up with inconsistencies. + consistMaskList = [] + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, + cache=0)): + # wxMask is from SmartScript + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + del (sMask, swMask, ipMask) + wxMask = logical_not(snowMask) + # "Truth" table for the logical comparison follows + # SnowAmt >= 0.1, 1; SnowAmt < 0.1, 0 + # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 + # SnowAmt >= 0.1 (1) and Wx has (0) = 0 (Good result) + # SnowAmt >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) + # SnowAmt < 0.1 (0) and Wx has (0) = 0 (Good result) + # SnowAmt < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) + # + consistMask = logical_and(nonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + consistMaskList.append(consistMask) + if not sometrue(ravel(consistMask)): + # There were no inconsistencies with this Wx grid. Since only + # one needs to be consistent, we don't need to do any more + # checks. + break + else: + # This block will only execute if the for loop runs to + # completion, i.e., the break statement is not executed. + # So, if we get here, we have an inconsistency and need to + # highlight the appropriate grids. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) + # createGrid is from SmartScript + for index in range(len(wxInfoList)): + # Create temporary grids for each Wx grid. Limit the start and + # end times of the temporary grids so that they don't extend + # beyond the start and end times of the corresponding SnowAmt + # grid. + wxGridTR = wxInfoList[index].gridTime() + tempGridStartTime = wxGridTR.startTime().unixTime() + if tempGridStartTime < gridTR.startTime().unixTime(): + tempGridStartTime = gridTR.startTime().unixTime() + tempGridEndTime = wxGridTR.endTime().unixTime() + if tempGridEndTime > gridTR.endTime().unixTime(): + tempGridEndTime = gridTR.endTime().unixTime() + tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 + offset = (tempGridStartTime - \ + self.timeRange0_1.startTime().unixTime()) / 3600 + # Because the time range may be different for the temporary + # grid, I need to create and use that time range when + # creating the temporary grid. + tempGridTR = self.createTimeRange( + offset, offset+tempGridDur, 'Zulu') + self.createGrid( + 'Fcst', 'SnowAmtWxInconsistent', 'SCALAR', + consistMaskList[index], tempGridTR, + descriptiveName='SnowAmtWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmtWxInconsistent', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + return + + def _snowAmtWxCheckUnlocked(self, nonZeroMask, gridTR, wxInfoList): + # The "Unlocked" comes from the idea that if the SnowAmt grid does + # not meet the duration and start time constraints, then it's been + # left "unlocked". + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, + cache=0)): + # wxMask is from SmartScript + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + del (sMask, swMask, ipMask) + wxMask = logical_not(snowMask) + # "Truth" table for the logical comparison follows + # SnowAmt >= 0.1, 1; SnowAmt < 0.1, 0 + # Wx has S, SW, or IP, 0; Wx doesn't have S, SW, or IP, 1 + # SnowAmt >= 0.1 (1) and Wx has (0) = 0 (Good result) + # SnowAmt >= 0.1 (1) and Wx doesn't have (1) = 1 (Bad result) + # SnowAmt < 0.1 (0) and Wx has (0) = 0 (Good result) + # SnowAmt < 0.1 (0) and Wx doesn't have (1) = 0 (Good result) + # + # All Wx grids overlapping the SnowAmt grid must be consistent. + consistMask = logical_and(nonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + if sometrue(ravel(consistMask)): + # I'll highlight the SnowAmt grids and Wx grids in + # gridTR as I did with QPF. However, I'll make + # temporary grids here using the Wx grid's time + # range but, the temporary grid cannot start before + # the start of the corresponding SnowAmt grid nor can + # it end after the end of the corresponding SnowAmt grid. + wxGridTR = wxInfoList[wxIndex].gridTime() + tempGridStartTime = wxGridTR.startTime().unixTime() + if tempGridStartTime < gridTR.startTime().unixTime(): + # Clip to start of SnowAmt grid + tempGridStartTime = gridTR.startTime().unixTime() + tempGridEndTime = wxGridTR.endTime().unixTime() + if tempGridEndTime > gridTR.endTime().unixTime(): + # Clip to end of SnowAmtGrid + tempGridEndTime = gridTR.endTime().unixTime() + tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 + offset = (tempGridStartTime - \ + self.timeRange0_1.startTime().unixTime()) / 3600 + # Since either the front or end of the Wx grid's + # time range may have been clipped, create a time + # range using those values. + tempGridTR = self.createTimeRange( + offset, offset+tempGridDur, 'Zulu') + self.createGrid( + 'Fcst', 'SnowAmtWxInconsistent', 'SCALAR', consistMask, + tempGridTR, descriptiveName='SnowAmtWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmtWxInconsistent', 'SFC', gridTR, + tempGridColor) + if inconGridColor: + self.highlightGrids( + 'Fcst', 'SnowAmt', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', wxGridTR, inconGridColor) + self.inconsistent = True + return + + def _runQPFPoPCheck(self, timeRange): + # This method implements the check that if any QPF grid is non zero + # then one of the corresponding floating PoP grids must also be non + # zero. + + # There can be a significant difference between the values stored + # in memory and the values returned from the database. This is because + # when values are saved, the element's precision (as defined in + # serverConfig.py/localConfig.py) is enforced. Values in memory do not + # have the element's precision enforced; in fact, they have the + # machine precision of the underlying data type. + # If there are locks, post an urgent message and return from the method. + message = '' + # lockedByMe is from SmartScript + if self.lockedByMe('QPF', 'SFC'): + message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message + if self.lockedByMe('PoP', 'SFC'): + message = '%sYou have the PoP grid locked. Please save the PoP grid.\n' % message + # lockedByOther is from SmartScript + if self.lockedByOther('QPF', 'SFC'): + message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message + if self.lockedByOther('PoP', 'SFC'): + message = '%sThe PoP grid is locked by someone else. Please have that person save the PoP grid.\n' % message + if message: + message = '%sThe QPF/PoP Check was not run.' % message + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have locked grid problems. + return + + # Make sure there are actually QPF grids in the time range. + # The self.getGrids command will return None if there are no grids + # in the time range for mode='First' and noDataError=0. The None + # variable cannot be iterated over. Rather than trap in a try/except, + # I'll just check for the condititon. This may not be the most + # Pythonic way of doing things, but it allows me to avoid having + # a bunch of code indented beneath a try statement. If no SnowAmt + # grids are found, post an urgent message and return from the method. + # getGrids is from SmartScript + qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', timeRange) + if [] == qpfInfoList: + message = 'There are no QPF grids in the time range you selected.\nThe QPF/PoP Check did not run.' + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + qpfGrids = self.getGrids( + 'Fcst', 'QPF', 'SFC', timeRange, mode='List', noDataError=0, + cache=0) + for qpfIndex, qpfGrid in enumerate(qpfGrids): + gridTR = qpfInfoList[qpfIndex].gridTime() + + popGrid = self.getGrids( + 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, + cache=0) + if popGrid is None: + message = '''There are no PoP grids in time range %s. +The QPF/PoP Check skipped the time range.''' % gridTR + self.statusBarMsg(message, 'U') + continue + qpfNonZeroMask = greater(qpfGrid, qpfTol) + popZeroMask = equal(popGrid, 0) + # popZeroMask = 1 if PoP = 0; popZeroMask = 0 if PoP != 0 + # qpfNonZeroMask = 1 if QPF > 0; qpfNonZeroMask = 0 if QPF = 0 + # PoP = 0 (1) and QPF = 0 (0) => 0 (Good result) + # PoP != 0 (0) and QPF = 0 (0) => 0 (Good result) + # PoP != 0 (0) and QPF > 0 (1) => 0 (Good result) + # PoP = 0 (1) and QPF > 0 (1) => 1 (Bad result) + consistMask = logical_and(qpfNonZeroMask, popZeroMask) + consistMask[logical_not(self.cwaMask)] = 0 + if sometrue(ravel(consistMask)): + # The good result is if the logical_and returns zeros + # for every grid point, that is "none true". So, if + # the sometrue method evaluates True, there are + # inconsistencies. + self.createGrid( + 'Fcst', 'QPFPoPInconsistent', 'SCALAR', consistMask, gridTR, + descriptiveName='QPFPoPInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'QPFPoPInconsistent', 'SFC', gridTR, + tempGridColor) + if inconGridColor: + self.highlightGrids( + 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'PoP', 'SFC', gridTR, inconGridColor) + self.inconsistent = True + + ##### Edited by Rob Radzanowski (WFO-CTP) 03-16-2009 to add missing NDFD check for QPF=0 & PoP > 50 + ##### which is causing unexplained yellow banners due to lack of checking for this error. + qpfZeroMask = equal(qpfGrid, 0) + popGrid = self.getGrids( + 'Fcst', 'PoP', 'SFC', gridTR, mode='Max', noDataError=0, cache=0) + popGreater50Mask = greater(popGrid, 50) + # popGreater50Mask = 1 if PoP > 50; popGreater50Mask = 0 if PoP <= 50 + # qpfZeroMask = 0 if QPF > 0; qpfZeroMask = 1 if QPF = 0 + # PoP > 50 (1) and QPF > 0 (0) => 0 (Good result) + # PoP > 50 (1) and QPF = 0 (1) => 1 (Bad result) + # PoP <= 50 (0) and QPF > 0 (0) => 0 (Good/Irrelevant result) + # PoP <= 50 (0) and QPF = 0 (1) => 0 (Good result) + + consistMask2 = logical_and(qpfZeroMask, popGreater50Mask) + consistMask2[logical_not(self.cwaMask)] = 0 + if sometrue(ravel(consistMask2)): + # The good result is if the logical_and returns zeros + # for every grid point, that is "none true". So, if + # the sometrue method evaluates True, there are + # inconsistencies. + self.createGrid( + 'Fcst', 'QPFPoPInconsistent', 'SCALAR', consistMask2, gridTR, + descriptiveName='QPFPoPInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + + if tempGridColor: + self.highlightGrids('Fcst', 'QPFPoPInconsistent', 'SFC', gridTR, tempGridColor) + if inconGridColor: + self.highlightGrids('Fcst', 'QPF', 'SFC', gridTR, inconGridColor) + self.highlightGrids('Fcst', 'PoP', 'SFC', gridTR, inconGridColor) + self.inconsistent = True + return + + def _runQPFWxCheck(self, timeRange): + # This method implements the check that if QPF non zero, then the + # corresponding Wx grids must contain a precipitable Wx type. Note: + # the method only checks the Wx type, no cov/prob, no inten, etc. + + # There can be a significant difference between the values stored + # in memory and the values returned from the database. This is because + # when values are saved, the element's precision (as defined in + # serverConfig.py/localConfig.py) is enforced. Values in memory do not + # have the element's precision enforced; in fact, they have the + # machine precision of the underlying data type. + # If there are locks, post an urgent message and return from the method. + message = '' + # lockedByMe is from SmartScript + if self.lockedByMe('QPF', 'SFC'): + message = '%sYou have the QPF grid locked. Please save the QPF grid.\n' % message + if self.lockedByMe('Wx', 'SFC'): + message = '%sYou have the Wx grid locked. Please save the Wx grid.\n' % message + # lockedByOther is from SmartScript + if self.lockedByOther('QPF', 'SFC'): + message = '%sThe QPF grid is locked by someone else. Please have that person save the QPF grid.\n' % message + if self.lockedByOther('Wx', 'SFC'): + message = '%sThe Wx grid is locked by someone else. Please have that person save the Wx grid.\n' % message + if message: + message = '%sThe QPF/Wx Check was not run.' % message + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have locked grid problems. + return + + # Make sure there are actually QPF grids in the time range. + # I'll just check for the condititon. If no SnowAmt + # grids are found, post an urgent message and return from the method. + qpfInfoList = self.getGridInfo('Fcst', 'QPF', 'SFC', timeRange) + if [] == qpfInfoList: + message = 'There are no QPF grids in the time range you selected.\nThe QPF/PoP Check did not run.' + self.statusBarMsg(message, 'U') + # I return instead of aborting because the user may have asked for + # other tests that do not have missing grid problems. + return + for qpfIndex, qpfGrid in enumerate(self.getGrids( + 'Fcst', 'QPF', 'SFC', timeRange, mode='List', noDataError=0, + cache=0)): + qpfNonZeroMask = greater(qpfGrid, qpfTol) + gridTR = qpfInfoList[qpfIndex].gridTime() + wxInfoList = self.getGridInfo('Fcst', 'Wx', 'SFC', gridTR) + if [] == wxInfoList: + message = '''There are no Wx grids in time range %s. +The QPF/Wx Check skipped the time range.''' % gridTR + self.statusBarMsg(message, 'U') + continue + # There are two cases. If the QPF grid is exactly 6 hours long and + # starts at 00, 06, 12, or 18 UTC, then only one of the + # corresponding Wx grids needs to be consistent. Otherwise, all the + # corresponding Wx grids need to be consistent. + if gridTR.duration() / 3600 == 6 and gridTR.startTime().hour in (0, 6, 12, 18): + self._qpfWxCheckLocked(qpfNonZeroMask, gridTR, wxInfoList) + else: + self._qpfWxCheckUnlocked(qpfNonZeroMask, gridTR, wxInfoList) + return + + def _qpfWxCheckLocked(self, qpfNonZeroMask, gridTR, wxInfoList): + # The "Locked" comes from the idea that if the QPF grid is + # exactly 6 hours long and starts at 00, 06, 12, or 18 UTC, then it + # is "locked". + consistMaskList = [] + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, + cache=0)): + # wxMask is from SmartScript + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + del (sMask, swMask, ipMask) + rMask = self.wxMask(wxGrid, ':R:') + rwMask = self.wxMask(wxGrid, ':RW:') + lMask = self.wxMask(wxGrid, ':L:') + zlMask = self.wxMask(wxGrid, ':ZL:') + zrMask = self.wxMask(wxGrid, ':ZR:') + # logical_or is from Numeric + rainMask = logical_or( + rMask, logical_or( + rwMask, logical_or( + lMask, logical_or(zlMask, zrMask)))) + del (rMask, rwMask, lMask, zlMask, zrMask) + precipMask = logical_or(snowMask, rainMask) + del (snowMask, rainMask) + wxMask = logical_not(precipMask) + # QPF >= 0.01, 1; QPF < 0.01, 0 + # Wx has precip, 0; Wx doesn't have precip, 1 + # QPF >= 0.01 (1) and Wx has (0) = 0 (Good result) + # QPF >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) + # QPF < 0.01 (0) and Wx has (0) = 0 (Good result) + # QPF < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) + consistMask = logical_and(qpfNonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + consistMaskList.append(consistMask) + if not sometrue(ravel(consistMask)): + # There were no inconsistencies with this Wx grid. Since only + # one needs to be consistent, we don't need to do any more + # checks. + break + else: + # This block will only execute if the for loop runs to + # completion, i.e., the break statement is not executed. + # So, if we get here, we have an inconsistency and need to + # highlight the appropriate grids. + if inconGridColor: + self.highlightGrids( + 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', gridTR, inconGridColor) + # createGrid is from SmartScript + for index in range(len(wxInfoList)): + # Create temporary grids for each Wx grid. Limit the time + # range of the temporary grid so that it doesn't start any + # earlier or any later than the corresponding QPF grid. + wxGridTR = wxInfoList[index].gridTime() + tempGridStartTime = wxGridTR.startTime().unixTime() + if tempGridStartTime < gridTR.startTime().unixTime(): + tempGridStartTime = gridTR.startTime().unixTime() + tempGridEndTime = wxGridTR.endTime().unixTime() + if tempGridEndTime > gridTR.endTime().unixTime(): + tempGridEndTime = gridTR.endTime().unixTime() + tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 + offset = (tempGridStartTime - \ + self.timeRange0_1.startTime().unixTime()) / 3600 + # Since the temporary grid could have a different time range + # than the Wx grid, I need to create and use that time range + # when creating the temporary grid. + tempGridTR = self.createTimeRange( + offset, offset+tempGridDur, 'Zulu') + self.createGrid( + 'Fcst', 'QPFWxInconsistent', 'SCALAR', + consistMaskList[index], tempGridTR, + descriptiveName='QPFWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'QPFWxInconsistent', 'SFC', gridTR, + tempGridColor) + self.inconsistent = True + return + + def _qpfWxCheckUnlocked(self, qpfNonZeroMask, gridTR, wxInfoList): + # The "Unlocked" comes from the idea that if the QPF grid is not + # exactly 6 hours long and starting at 00, 06, 12, or 18 UTC, then it + # is "unlocked". + for wxIndex, wxGrid in enumerate(self.getGrids( + 'Fcst', 'Wx', 'SFC', gridTR, mode='List', noDataError=0, + cache=0)): + # wxMask is from SmartScript + sMask = self.wxMask(wxGrid, ':S:') + swMask = self.wxMask(wxGrid, ':SW:') + ipMask = self.wxMask(wxGrid, ':IP:') + snowMask = logical_or(logical_or(sMask, swMask), ipMask) + del (sMask, swMask, ipMask) + rMask = self.wxMask(wxGrid, ':R:') + rwMask = self.wxMask(wxGrid, ':RW:') + lMask = self.wxMask(wxGrid, ':L:') + zlMask = self.wxMask(wxGrid, ':ZL:') + zrMask = self.wxMask(wxGrid, ':ZR:') + # logical_or is from Numeric + rainMask = logical_or( + rMask, logical_or( + rwMask, logical_or( + lMask, logical_or(zlMask, zrMask)))) + del (rMask, rwMask, lMask, zlMask, zrMask) + precipMask = logical_or(snowMask, rainMask) + del (snowMask, rainMask) + wxMask = logical_not(precipMask) + # QPF >= 0.01, 1; QPF < 0.01, 0 + # Wx has precip, 0; Wx doesn't have precip, 1 + # QPF >= 0.01 (1) and Wx has (0) = 0 (Good result) + # QPF >= 0.01 (1) and Wx doesn't have (1) = 1 (Bad result) + # QPF < 0.01 (0) and Wx has (0) = 0 (Good result) + # QPF < 0.01 (0) and Wx doesn't have (1) = 0 (Good result) + # + # All Wx grids overlapping the SnowAmt grid must be consistent. + consistMask = logical_and(qpfNonZeroMask, wxMask) + consistMask[logical_not(self.cwaMask)] = 0 + if sometrue(ravel(consistMask)): + wxGridTR = wxInfoList[wxIndex].gridTime() + tempGridStartTime = wxGridTR.startTime().unixTime() + if tempGridStartTime < gridTR.startTime().unixTime(): + # Clip to start of QPF grid + tempGridStartTime = gridTR.startTime().unixTime() + tempGridEndTime = wxGridTR.endTime().unixTime() + if tempGridEndTime > gridTR.endTime().unixTime(): + # Clip to end of QPF Grid + tempGridEndTime = gridTR.endTime().unixTime() + tempGridDur = (tempGridEndTime - tempGridStartTime) / 3600 + offset = (tempGridStartTime - \ + self.timeRange0_1.startTime().unixTime()) / 3600 + # Since either the front or end of the Wx grid's + # time range may have been clipped, create a time + # range using those values. + tempGridTR = self.createTimeRange( + offset, offset+tempGridDur, 'Zulu') + self.createGrid( + 'Fcst', 'QPFWxInconsistent', 'SCALAR', consistMask, + tempGridTR, descriptiveName='QPFWxInconsistent', + minAllowedValue=0, maxAllowedValue=1, units='Good/Bad') + if tempGridColor: + self.highlightGrids( + 'Fcst', 'QPFWxInconsistent', 'SFC', gridTR, + tempGridColor) + if inconGridColor: + self.highlightGrids( + 'Fcst', 'QPF', 'SFC', gridTR, inconGridColor) + self.highlightGrids( + 'Fcst', 'Wx', 'SFC', wxGridTR, inconGridColor) + self.inconsistent = True + return + + def _calcTolerance(self, gridInfo): + precision = gridInfo.gridParmInfo.getPrecision() + return pow(10, -precision) + + def execute(self, timeRange, varDict): + # Make sure the configuration values are the correct types. + self.__checkConfigValueTypes() + # createTimeRange is from SmartScript + timeRange0_240 = self.createTimeRange(0, 241, 'Zulu') + checkCleanup = varDict.get('Check_Cleanup', 'Check') + self.__cleanup(timeRange0_240) + if checkCleanup == 'Cleanup': + message = 'SnowQPFPoPWxCheck complete.' + self.statusBarMsg(message, 'R') + self.cancel() + if timeRange.endTime().unixTime() - timeRange.startTime().unixTime() < \ + 3600: # No time range selected, use create a 0 to 240 hour range + timeRange = timeRange0_240 + + # If the user has a time range swept out, send an informational + # message. + if (timeRange.startTime().unixTime() != timeRange0_240.startTime().unixTime()) or \ + (timeRange.endTime().unixTime() != timeRange0_240.endTime().unixTime()) or \ + (timeRange.duration() != timeRange0_240.duration()): + message = 'The SnowAmtQPFPoPWxCheck procedure did not run over the 0 to 240 hour time period,\nit ran over %s. This may be what you desired.' % str(timeRange) + self.statusBarMsg(message, 'S') + + # I'll need to know the unix time of 00Z so I can determine the + # start time of temporary grids later. I'll need this in more than + # one of the methods called later, so this will become an instance + # variable, i.e., prefixed with "self." I also need an instance + # variable that flags whether or not there were inconsistent grids. + self.timeRange0_1 = self.createTimeRange(0, 1, 'Zulu') + self.inconsistent = False + + # A CWA edit area can be provided in the configuration section. + # Attempt to encode that edit area as a Numeric Python mask so that + # the later checks are limited to the edit area. The GFE is not very + # friendly if the encoding fails. The GFE will send a nasty message + # to the user, but continue executing the procedure. No trappable + # error is thrown. As of this writing, the GFE appears to create an + # array of shape (0, 0) if the encoding cannot be done, so I will + # check for that and, if I find it, then set the edit area to the + # domain. + # encodeEditArea comes from SmartScript. For the points that are in + # the edit area, a value of one is assigned. Otherwise, a value of + # zero is assigned. + if cwaEditArea: + self.cwaMask = self.encodeEditArea(cwaEditArea) + if self.cwaMask.shape == (0, 0): + # Use the getGridInfo command to get information about the + # SnowAmt grid. From this, the grid size can be extracted. I + # could use getGridInfo on any valid GFE grid. + # getGridInfo is from SmartScript + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt', 'SFC', timeRange) + # I painfully discovered that the array shape is (y, x) + gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, + snowAmtInfoList[0].gridLocation().gridSize().x) + # ones is from Numeric. It creates an array of the given size + # and data type where all values are one. + self.cwaMask = ones(gridSize, Int) + message = \ +'''The procedure was not able to use the CWA edit area, %s, provided +in the configuration. You should inform the person responsible for procedures +of this problem. The procedure ran over the whole domain.''' % cwaEditArea + self.statusBarMsg(message, 'S') + else: + snowAmtInfoList = self.getGridInfo( + 'Fcst', 'SnowAmt', 'SFC', timeRange) + gridSize = (snowAmtInfoList[0].gridLocation().gridSize().y, + snowAmtInfoList[0].gridLocation().gridSize().x) + self.cwaMask = ones(gridSize, Int) + + # Based on the user's input, run the appropriate checks. + # By making each of these options a checkbox with only one option in + # the VariableList above, if an option is unchecked then an empty + # list, [], will be what's in varDict. If an option is checked then a + # list with the value "Yes", ["Yes"], will be what's in varDict. In + # Python, a conditional expression can be whether or not a data + # structure is empty. In these cases, an empty data structure, + # e.g., an empty list, an empty tuple, an empty dictionary, + # conditionally test to False while non empty data structures + # conditionally test to True. In the if statements below, every varDict + # lookup returns a list: either [] or ["Yes"]. I think the constructs + # below or more elegant and easier to understand. + if varDict['Run SnowAmt/QPF Check?']: + # Call the SnowAmt/QPF check method + self._runSnowAmtQPFCheck(timeRange) + if varDict['Run SnowAmt/Wx Check?']: + # Call the SnowAmt/Wx check method + self._runSnowAmtWxCheck(timeRange) + if varDict['Run QPF/PoP Check?']: + # Call the QPF/PoP check method + self._runQPFPoPCheck(timeRange) + if varDict['Run QPF/Wx Check?']: + # Call the QPF/Wx check method + self._runQPFWxCheck(timeRange) + message = 'SnowAmtQPFPoPWxCheck complete.' + if self.inconsistent: + message = '%s Inconsistencies found! Grids highlighted %s and %s.' % ( + message, inconGridColor, tempGridColor) + self.statusBarMsg(message, 'S') + else: + self.statusBarMsg(message, 'R') + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/StormInfo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/StormInfo.py index 9b2be86c72..5caa16cd77 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/StormInfo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/StormInfo.py @@ -1,196 +1,196 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# StormInfo - Version 3.0 -# -# Authors: Matt Belk (BOX), Shannon White (OCWWS), Tom LeFebvre (GSD), Pablo Santos (MFL) -# -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Sep 13, 2016 Adjustments from Hermine to add -# Post-Tropical stormType. -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import os -import time - -import ProcessVariableList -import StormNames -import TropicalUtility - - -MenuItems = ["None"] - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - - def execute(self): - - # Get list of available storms - stormList = self.extractStormInfo() - - # Get info on what storm this is - bogusStormName = "ZYXWWXYZ" # Define a bogus storm so none will be defaulted - selectedName = self.determineStorm(stormList, bogusStormName) - - # Ensure we have a choice - if selectedName == bogusStormName: - self.statusBarMsg("Please rerun StormInfo and select a storm name.", "U") - return - else: - stormName = selectedName.strip() - - # Define a dictionary of PILs to use for each basin - PILs = { - "Atlantic": ["AT1", "AT2", "AT3", "AT4", "AT5"], - "Eastern Pacific": ["EP1", "EP2", "EP3", "EP4", "EP5"], - "Central Pacific": ["CP1", "CP2", "CP3", "CP4", "CP5"], - } - - Numbers = ["One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", - "Nine", "Ten", "Eleven", "Twelve", "Thirteen", "Fourteen", - "Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen", "Twenty", - "Twenty-One", "Twenty-Two", "Twenty-Three", "Twenty-Four", "Twenty-Five"] - - # Set things up for the Atlantic by default - Basin = "Atlantic" - maxLists = len(StormNames.NameDict[Basin]) - - # Get the current UTC year - two digits only - curYear = self._gmtime().timetuple().tm_year % 100 - - stormList = self.extractStormInfo() - - # Build the Variable Lists dynamically based on the chosen storm - - # If New, make default GUI - newStorm = False - if stormName == "New": - newStorm = True - variableList = [] - variableList.append(("AWIPS bin number", "", "radio", PILs[Basin])) - variableList.append(("Storm Type", "Tropical Storm", "radio", - ["Potential Tropical Cyclone", "Subtropical Depression", "Subtropical Storm", - "Tropical Depression", "Tropical Storm", "Hurricane", "Post-Tropical Cyclone"])) - variableList.append(("Storm Name" , "", "radio", StormNames.NameDict[Basin][curYear % maxLists])) - variableList.append(("Other Storm Name (e.g. Alpha but NOT Three)", "", "alphaNumeric")) - variableList.append(("Storm Number", 1, "scale", [1, 25], 1)) - variableList.append(("Advisory Type" , "Routine", "radio", - ["Routine", "Special", "Intermediate"])) - variableList.append(("Advisory Number" , "", "alphaNumeric")) - - # If existing storm, build GUI using previous choices - else: - for sDict in stormList: - if sDict["stormName"] == stormName: - PIL = sDict["pil"] - stormType = sDict["stormType"] - stormNum = sDict["stormNumber"] - advisoryType = sDict["advisoryType"] - advisoryNum = sDict["advisoryNumber"] - - variableList = [] - variableList.append(("AWIPS bin number", PIL, "radio", [PIL])) - variableList.append(("Storm Type", stormType, "radio", - ["Potential Tropical Cyclone", "Subtropical Depression", "Subtropical Storm", - "Tropical Depression", "Tropical Storm", "Hurricane", "Post-Tropical Cyclone"])) - if stormName in Numbers: - variableList.append(("Storm Name", "None", "radio", StormNames.NameDict[Basin][curYear % maxLists])) - elif stormName not in StormNames.NameDict[Basin][curYear % maxLists]: - variableList.append(("Other Storm Name (e.g. Alpha but NOT Three)", stormName, "alphaNumeric")) - else: - variableList.append(("Storm Name", stormName, "radio", [stormName])) - variableList.append(("Storm Number", stormNum, "radio", [stormNum])) - variableList.append(("Advisory Type" , advisoryType , "radio", - ["Routine", "Special", "Intermediate"])) - variableList.append(("Advisory Number" , advisoryNum, "alphaNumeric")) - - - # Display the GUI - varDict = {} - processVarList = ProcessVariableList.ProcessVariableList( - "Set Advisory Information", variableList, varDict) - status = processVarList.status() - if status.upper() != "OK": - self.cancel() - - # Collect all the info provided by the forecaster - pil = varDict["AWIPS bin number"].strip() - stormType = varDict["Storm Type"].strip() - if varDict.has_key("Other Storm Name (e.g. Alpha but NOT Three)"): - otherStormName = varDict["Other Storm Name (e.g. Alpha but NOT Three)"].strip() - else: - otherStormName = "" - - if otherStormName is not "": - stormName = otherStormName - else: - if len(varDict["Storm Name"]) == 0: - stormName = "None" - else: - stormName = varDict["Storm Name"] - - # QC the storm number - stormNumber = int(varDict["Storm Number"]) - try: - pilModNumber = int(pil[2:]) - if pilModNumber == 5: - pilModNumber = 0 - except: - self.statusBarMsg("You did not provide a correct bin. Please rerun StormInfo.", "U") - return - - if stormNumber % 5 != pilModNumber: - self.statusBarMsg("The chosen storm number is not correct for chosen bin. Please rerun StormInfo.", "S") - return - - if stormName == "None": - stormName = Numbers[int(stormNumber)-1] - - advisoryType = varDict["Advisory Type"].strip() - - try: - advisoryNumber = varDict["Advisory Number"].strip() - except: - advisoryNumber = "" - - if advisoryNumber == "": - self.statusBarMsg("The advisory number is missing. Please rerun StormInfo.", "S") - return - - if advisoryType == "Intermediate": - advisoryNumber += "A" - - if not newStorm: - if advisoryNumber == advisoryNum: - self.statusBarMsg("You did not increment the advisory number. Make sure that is what you want. Proceeded without changing Adv number", "A") - - - # Open the file to store all the info for this particular storm - stormDict = {} - stormDict["stormType"] = stormType - stormDict["stormName"] = stormName - stormDict["stormNumber"] = stormNumber - stormDict["advisoryType"] = advisoryType - stormDict["advisoryNumber"] = advisoryNumber - stormDict["pil"] = pil - - # Save the info for this storm - self._saveAdvisory(pil, stormDict) - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# StormInfo - Version 3.0 +# +# Authors: Matt Belk (BOX), Shannon White (OCWWS), Tom LeFebvre (GSD), Pablo Santos (MFL) +# +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Sep 13, 2016 Adjustments from Hermine to add +# Post-Tropical stormType. +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import os +import time + +import ProcessVariableList +import StormNames +import TropicalUtility + + +MenuItems = ["None"] + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + + def execute(self): + + # Get list of available storms + stormList = self.extractStormInfo() + + # Get info on what storm this is + bogusStormName = "ZYXWWXYZ" # Define a bogus storm so none will be defaulted + selectedName = self.determineStorm(stormList, bogusStormName) + + # Ensure we have a choice + if selectedName == bogusStormName: + self.statusBarMsg("Please rerun StormInfo and select a storm name.", "U") + return + else: + stormName = selectedName.strip() + + # Define a dictionary of PILs to use for each basin + PILs = { + "Atlantic": ["AT1", "AT2", "AT3", "AT4", "AT5"], + "Eastern Pacific": ["EP1", "EP2", "EP3", "EP4", "EP5"], + "Central Pacific": ["CP1", "CP2", "CP3", "CP4", "CP5"], + } + + Numbers = ["One", "Two", "Three", "Four", "Five", "Six", "Seven", "Eight", + "Nine", "Ten", "Eleven", "Twelve", "Thirteen", "Fourteen", + "Fifteen", "Sixteen", "Seventeen", "Eighteen", "Nineteen", "Twenty", + "Twenty-One", "Twenty-Two", "Twenty-Three", "Twenty-Four", "Twenty-Five"] + + # Set things up for the Atlantic by default + Basin = "Atlantic" + maxLists = len(StormNames.NameDict[Basin]) + + # Get the current UTC year - two digits only + curYear = self._gmtime().timetuple().tm_year % 100 + + stormList = self.extractStormInfo() + + # Build the Variable Lists dynamically based on the chosen storm + + # If New, make default GUI + newStorm = False + if stormName == "New": + newStorm = True + variableList = [] + variableList.append(("AWIPS bin number", "", "radio", PILs[Basin])) + variableList.append(("Storm Type", "Tropical Storm", "radio", + ["Potential Tropical Cyclone", "Subtropical Depression", "Subtropical Storm", + "Tropical Depression", "Tropical Storm", "Hurricane", "Post-Tropical Cyclone"])) + variableList.append(("Storm Name" , "", "radio", StormNames.NameDict[Basin][curYear % maxLists])) + variableList.append(("Other Storm Name (e.g. Alpha but NOT Three)", "", "alphaNumeric")) + variableList.append(("Storm Number", 1, "scale", [1, 25], 1)) + variableList.append(("Advisory Type" , "Routine", "radio", + ["Routine", "Special", "Intermediate"])) + variableList.append(("Advisory Number" , "", "alphaNumeric")) + + # If existing storm, build GUI using previous choices + else: + for sDict in stormList: + if sDict["stormName"] == stormName: + PIL = sDict["pil"] + stormType = sDict["stormType"] + stormNum = sDict["stormNumber"] + advisoryType = sDict["advisoryType"] + advisoryNum = sDict["advisoryNumber"] + + variableList = [] + variableList.append(("AWIPS bin number", PIL, "radio", [PIL])) + variableList.append(("Storm Type", stormType, "radio", + ["Potential Tropical Cyclone", "Subtropical Depression", "Subtropical Storm", + "Tropical Depression", "Tropical Storm", "Hurricane", "Post-Tropical Cyclone"])) + if stormName in Numbers: + variableList.append(("Storm Name", "None", "radio", StormNames.NameDict[Basin][curYear % maxLists])) + elif stormName not in StormNames.NameDict[Basin][curYear % maxLists]: + variableList.append(("Other Storm Name (e.g. Alpha but NOT Three)", stormName, "alphaNumeric")) + else: + variableList.append(("Storm Name", stormName, "radio", [stormName])) + variableList.append(("Storm Number", stormNum, "radio", [stormNum])) + variableList.append(("Advisory Type" , advisoryType , "radio", + ["Routine", "Special", "Intermediate"])) + variableList.append(("Advisory Number" , advisoryNum, "alphaNumeric")) + + + # Display the GUI + varDict = {} + processVarList = ProcessVariableList.ProcessVariableList( + "Set Advisory Information", variableList, varDict) + status = processVarList.status() + if status.upper() != "OK": + self.cancel() + + # Collect all the info provided by the forecaster + pil = varDict["AWIPS bin number"].strip() + stormType = varDict["Storm Type"].strip() + if "Other Storm Name (e.g. Alpha but NOT Three)" in varDict: + otherStormName = varDict["Other Storm Name (e.g. Alpha but NOT Three)"].strip() + else: + otherStormName = "" + + if otherStormName is not "": + stormName = otherStormName + else: + if len(varDict["Storm Name"]) == 0: + stormName = "None" + else: + stormName = varDict["Storm Name"] + + # QC the storm number + stormNumber = int(varDict["Storm Number"]) + try: + pilModNumber = int(pil[2:]) + if pilModNumber == 5: + pilModNumber = 0 + except: + self.statusBarMsg("You did not provide a correct bin. Please rerun StormInfo.", "U") + return + + if stormNumber % 5 != pilModNumber: + self.statusBarMsg("The chosen storm number is not correct for chosen bin. Please rerun StormInfo.", "S") + return + + if stormName == "None": + stormName = Numbers[int(stormNumber)-1] + + advisoryType = varDict["Advisory Type"].strip() + + try: + advisoryNumber = varDict["Advisory Number"].strip() + except: + advisoryNumber = "" + + if advisoryNumber == "": + self.statusBarMsg("The advisory number is missing. Please rerun StormInfo.", "S") + return + + if advisoryType == "Intermediate": + advisoryNumber += "A" + + if not newStorm: + if advisoryNumber == advisoryNum: + self.statusBarMsg("You did not increment the advisory number. Make sure that is what you want. Proceeded without changing Adv number", "A") + + + # Open the file to store all the info for this particular storm + stormDict = {} + stormDict["stormType"] = stormType + stormDict["stormName"] = stormName + stormDict["stormNumber"] = stormNumber + stormDict["advisoryType"] = advisoryType + stormDict["advisoryNumber"] = advisoryNumber + stormDict["pil"] = pil + + # Save the info for this storm + self._saveAdvisory(pil, stormDict) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py index e43c74a006..d120138ffb 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCFloodingRainThreat.py @@ -1,679 +1,679 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# InlandFloodThreat -# -# Author: lefebvre,santos -# Last Modified: April 4, 2012 - Fixed to handle negative gridded FFG. -# Migrated procedure for AWIPS2. Updated 6/22/2012. S.O. -# -# Comment on 05/21/2014 (Santos): Some sites used QPF and others QPF6hr. Check that parameter -# in getQPFGrid method. Also in AWIPS 2 FFG is gridded and called just FFG. -# This is fixed in getRFCFFGModels method. -# -# LeFevbre/Santos: This is the version being turned in for baseline in 16.1.2 as of 12/7/2015. It includes fixes -#for new ERP data changes that took place in Summer of 2015 and better handling of grid points where there -#is no FFG guidance available. -# Last Modified -# 7/15/2016 - Lefebvre/Santos: working on code to add PQPF to the algorithm. -# 9/2/2016 - Lefebvre/Santos: Finished integrating {QPF into the algorithm -# 9/7/2016 - Lefebvre/Santos: Added better logic for grid missing messages and Don't use guidance option. -# 9/7/2016 - Lefebvre/Santos: Change ppffg timeRanges to anchor on 12Z cycles. -# VERSION 17.1.1 = The one checked in. -# 11/14/2016 - Santos - Modified at testbed in Silver Springs to fix overlap variable to do the composite -# of the rfc list edit areas, not just the overlap with cwa mask. Commented out statusBarMsg for the ppffg inventories. -# 07/21/2017 - Tweaked for 2018 baseline (17.3.1) based on WPC recommendations following upcoming change in ERPs -# to neighborhood based probabilities. Check 2018 version of HTI User Guide for details. PS/TL -# 8/31/2017 - Fixed issues found during Harvey when FFG was zero across large chunks of the area. PS/TL -# 10/21/2017 - Additional tweaks made per Raytheon suggestions during code review. (DR20333) -# 11/14/2017 - Fixed ERP thresholds per WPC recommendations during SWiT. Fixed also minimum value allowed for -# FFG guidance in GFE D2D FFGXXX db to treat NO DATA or exception values as negative in GFE. This change was made in -# the RFCFFGParameterInfo file. Otherwise logic below would not work. -# -#Search for COMMENTS to see any local config step you might need to take. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Populate"] - -VariableList = [("Gridded/Text FFG Blending?" , "Yes", "radio", ["Yes", "No"]), - ("Probabilistic QPF Exceedance Level to use:", "10%", "radio", ["Don't Use Prob Guidance", "05%", "10%", "25%", "50%"]), - -# Use the above line for the GUI when testing is finished. the line below will be enabled when we are ingesting 20%, 30%, and 40% percentiles -# ("Probabilistic QPF Exceedance Level to use:", "10%", "radio", ["Don't Use Prob Guidance", "10%", "20%", "30%", "40%", "50%"]), - ] - -import SmartScript -import time -import popen2 -import sys -import AbsTime -import TimeRange -import GridManipulation -import numpy as np - - -class Procedure (GridManipulation.GridManipulation): - def __init__(self, dbss): - GridManipulation.GridManipulation.__init__(self, dbss) - - # get the current time, truncates to the last six hour value. - # returns a timeRange with this startTime until 72 hrs from this time - - def make72hrTimeRange(self, startTime): - - # Make the end time 3 days from the startTime - end = startTime + (72 * 3600) - # Convert them to AbsTimes - startTime = AbsTime.AbsTime(startTime) - endTime = AbsTime.AbsTime(end) - - timeRange = TimeRange.TimeRange(startTime, endTime) - - return timeRange - - - # returns a list of timeRange with the specified duration in hours over the - # specified timeRange - def makeTimeRangeList(self, timeRange, duration): - trList = [] - sTime = timeRange.startTime().unixTime() - delta = duration * 3600 - while sTime < timeRange.endTime().unixTime(): - trList.append(self.GM_makeTimeRange(sTime, sTime + delta)) - sTime = sTime + delta - - return trList - - - # Returns a list of database IDs matching the specified model name, - # weather element name and level - def getModelList(self, modelName, weName, weLevel): - modelList = [] - - availParms = self.availableParms() - - for pName, level, dbID in availParms: - if modelName in dbID.modelName(): - if weName in pName: - if weLevel in level: - if dbID not in modelList: - modelList.append(dbID) - return modelList - - - # A small algorithm to determine the day number - def determineDay(self, modelTime, validTime): - - diff = (validTime - modelTime) / 3600 - if diff < 48: - return 1 - elif diff < 72: - return 2 - else: - return 3 - - return 0 - - def baseModelTime(self, modelTime): - - oneDay = 3600 * 24 - offset = 3600 * 0 # hours after which we expect models to arrive - baseTime = (int((modelTime + offset) / oneDay) * oneDay) - offset - - return baseTime - - def getProbBaseTime(self): - ERPModelName = "HPCERP" - ERPVarName = "TP10pct6hr" - ERPLevel = "SFC" - # get the list of all available models. They come sorted latest to oldest. - modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel) - - if len(modelList) == 0: - self.statusBarMsg("No ERP Guidance found.", "S") - return None - - for model in modelList: - trList = self.GM_getWEInventory(ERPVarName, model) - if len(trList) == 0: - continue - - latestHr = (trList[-1].startTime().unixTime() - model.modelTime().unixTime()) / 3600 - - # return the time of the first model we find with enough data - if latestHr >= 72: - return model.modelTime() - - # If we get here, we have found no models with 72 hours of data so return the latest model time - self.statusBarMsg("No model runs found with 72 hours of grids. Using latest model") - return modelList[0].modelTime() - - # Find the time of the model with a day 3 grid and truncate the modelTime to the last 12Z - def getPpffgBaseTime(self): - ERPModelName = "HPCERP" - ERPVarName = "ppffg" - ERPLevel = "SFC" - # get the list of all available models. They come sorted latest to oldest. - modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel) - - if len(modelList) == 0: - self.statusBarMsg("No ERP Guidance found for ppffg.", "S") - return None - - for model in modelList: - trList = self.GM_getWEInventory(ERPVarName, model) - if len(trList) == 0: - continue - - latestHr = (trList[-1].startTime().unixTime() - model.modelTime().unixTime()) / 3600 - - # return the time of the first model we find with enough data - if latestHr > 48: - - #print "model time with day 3 grid:", model.modelTime() - modelTime = model.modelTime().unixTime() - (12 * 3600) - - # truncate the model time to the last 12Z cycle - baseTime = int(modelTime / (3600 * 24)) * (3600 * 24) + (12 * 3600) - return AbsTime.AbsTime(baseTime) - - # If we get here, we have found no models with 72 hours of data so return the latest model time - self.statusBarMsg("No model runs found with 72 hours of grids. Using latest model") - return modelList[0].modelTime() - - # Format a timeRange string into something smaller and readable - def trStr(self, tr): - return str(tr)[5:7] + "." + str(tr)[11:13] + "-" + str(tr)[29:31] + "." + str(tr)[35:37] + "Z" - - # Fetch ERP probabilistic data using the latest available model. In some cases - # the grids retrieved may originate from more than one model version. In all cases, - # latest guidance available for each time slot will returned. - # Returns a dictionary with key as timeRange and grid as the data. - def getERPGuidance(self, weName, trList): - - ERPModelName = "HPCERP" - ERPLevel = "SFC" - - # Get the list of all available models. They come sorted latest to oldest. - modelList = self.getModelList(ERPModelName, weName, ERPLevel) - if len(modelList) == 0: # No grids found for the model/weName combination - return {} # So just return an empty GridDict - - # For each timeRange, find the model with the latest grid and save that - gridDict = {} - for tr in trList: - - # Determine the equivalent d2D timeRange based on GFE QPF tr in the trList - d2dTR = self.GM_makeTimeRange(tr.endTime().unixTime(), tr.endTime().unixTime() + 3600) - - foundGrid = False - for model in modelList: - - # See if the ERP grids we want are in this model cycle - d2dInv = self.GM_getWEInventory(weName, model) - - - if d2dTR not in d2dInv: - continue - - # Fetch the grid and save it - grid = self.getGrids(model, weName, ERPLevel, d2dTR, mode="First") - gridDict[tr] = grid - - modelStr = str(model.modelTime())[0:13] + "Z" - - ### DEBUG DEBUG DEBUG #### Remove when testing is complete - #print "WE:", weName, "GFE TimeRange:", self.trStr(tr), "using model at:", modelStr, " valid:", self.trStr(d2dTR) - ### DEBUG DEBUG DEBUG #### Remove when testing is complete - - if modelList.index(model) != 0: - # Suppress messages for ppffg for the last timeRange only, since ppffg only arrives once per day - if weName == "ppffg" and tr == trList[-1]: - pass -# else: -# self.statusBarMsg("Using " + modelStr + " model for time period " + self.trStr(tr), "S") -# - foundGrid = True - break - - # If we get here, no model was found with the TR we want - if not foundGrid: - gridDict[tr] = None - self.statusBarMsg("No Probabilistic WPC QPF found for timeRange:" + str(tr), "S") - - return gridDict - - # Use this method for testing if you have no luck getting products - # directly from the text database - def getTextProductFromFile(self, filename): - # replace the filename with one on your system - filename = "/tmp/FFGData/" + filename + ".txt" - f = file(filename, 'r') - textList = [] - line = f.readline() - textList.append(line) - while line != "": - line = f.readline() - textList.append(line) - f.close() - return textList - - # Retrieves a text product from the text database - def getTextProductFromDB(self, productID): - cmd = "textdb -r " + productID - - (stdout, stdin, stderr) = popen2.popen3(cmd) - - textList = [] - line = stdout.readline() - textList.append(line) - while line != "": - line = stdout.readline() - textList.append(line) - return textList - - - # given a text product, this method decodes the ffg values and - # returns a dictionary {area : value} - def decodeFFGText(self, ffgText): - ffgDict = {} - for s in ffgText: - parts = s.split() - if len(parts) < 4: - continue - if len(parts[0]) != 6: - continue - if parts[0][2] != "Z": - continue - area = parts[0] - value6hr = float(parts[3][:-1]) # strip the "/" - ffgDict[area] = value6hr - - return ffgDict - - - # Fetch all of the gridded FFG model names and return just the - # latest version of each type (region) - def getRFCFFGModelName(self, rfcName): - - # Find all the models matching this description. - modelList = self.getModelList(rfcName, "FFG0624hr", "SFC") - - for model in modelList: - # WARNING!!! This check should be more specific to the DBID string. - if model.modelIdentifier().find(rfcName) > -1: - return model - - return None - - - # Returns the list of RFCs that overlap the local GFE domain - # as defined by getSiteID(). - def getOverlappingRFCs(self): - # The list of all the RFCs - RFCEditAreas = ["ISC_PTR", "ISC_RSA", "ISC_STR", "ISC_ACR", "ISC_KRF", - "ISC_MSR", "ISC_TUA", "ISC_FWR", "ISC_ORN", "ISC_TIR", - "ISC_ALR", "ISC_RHA", "ISC_TAR"] - -## cwaEA = self.getSiteID() -## cwaMask = self.encodeEditArea(cwaEA) - #cwaMask = self.encodeEditArea(self.getSiteID()) - - eaList = self.editAreaList() - - rfcList = [] - - for rfc in RFCEditAreas: - - rfcMask = None - if rfc in eaList: - rfcMask = self.encodeEditArea(rfc) - - if rfcMask is None: - continue - - #overlap = cwaMask & rfcMask - overlap = rfcMask - - if overlap.any(): - rfcList.append(rfc) - - return rfcList - - # First try to access the gridded FFG from the D2D files. If they exist - # mosaic all the ones we find and return the composite. If we can't - # find any gridded FFG, then fetch the FFG text product, decode it, - # and create a patchwork grid from the guidance value in each county - # or basin. - def getRFCFlashFloodGrid(self, productList, varDict): - - ffgGrid = self.newGrid(-9.0) - foundGrids = False - blending = varDict["Gridded/Text FFG Blending?"] - - RFCList = self.getOverlappingRFCs() - - ffgWEName = "FFG0624hr" - # Fetch the gridded FFG, mosaic these into a single grid - for rfc in RFCList: - - tmplist = rfc.split('_'); - rfcsid = tmplist[1]; - rfcName = 'FFG' + rfcsid; - # Find the model for this RFC - modelName = self.getRFCFFGModelName(rfcName) - - #print "modelName:", modelName - if modelName is None: - self.statusBarMsg("No FFG database found for " + rfc, "S") - continue - - trList = self.GM_getWEInventory(ffgWEName, modelName) - if len(trList) == 0: - self.statusBarMsg("No FFG grids found in database " + modelName, "S") - continue - - # Get the first grid - tempGrid = self.getGrids(modelName, ffgWEName, "SFC", trList[0], mode="First") - - # Make a mask of the RFC domain - rfcMask = self.encodeEditArea(rfc) - - mask = (tempGrid >= 0.0) & rfcMask - ffgGrid[mask] = tempGrid[mask] - ffgGrid[mask] /= 25.4 - foundGrids = True - - # Make another FFG grid from the text guidance - editAreaList = self.editAreaList() - - ffgTextGrid = self.empty() - for prod in productList: - - # Uncomment the next line to fetch FFG data from a file -## ffgText = self.getTextProductFromFile(prod) - - # Use this method to retrieve FFG data from the text database - ffgText = self.getTextProductFromDB(prod) - - ffgDict = self.decodeFFGText(ffgText) - - for area in ffgDict.keys(): - if area not in editAreaList: - continue - refArea = self.getEditArea(area) - mask = self.encodeEditArea(refArea) - value = ffgDict[area] - ffgTextGrid[mask] = value - - # Comment this in to see intermediate FFG from text guidance -## tr = self.getTimeRange("Today") -## self.createGrid("Fcst","FFGFromText", "SCALAR", ffgTextGrid, tr, -## minAllowedValue = -1, maxAllowedValue=100, -## precision=2) - - # Since the gridded FFG tends to have lots of holes in it, - # fill those holes with the text version of the FFG where the - # gridded FFG is less than its non-zero average. - - # if we found the grids fill it in with values from the text products. - - siteID = self.getSiteID() - cwamask = self.encodeEditArea(siteID) - - if foundGrids: - # get the >=zero gridded average - mask = ffgGrid >= 0.0 - - if not mask.any(): # no points in mask - return ffgTextGrid - - if blending == "Yes": - missingMask = (ffgGrid < 0.0) & cwamask - if missingMask.any(): - ffgGrid[missingMask] = ffgTextGrid[missingMask] - - else: - ffgGrid = ffgTextGrid - - return ffgGrid - - # Returns the QPF sum over the specified timeRange - def getQPFGrid(self, timeRange): -# -# This assumes QPF has a constraint of TC6NG. If not and your office uses QPF6 -# or QPF6hr you will need to change this here accordingly. -# - # Inventory all QPF grids from the Fcst database - trList = self.GM_getWEInventory("QPF", timeRange=timeRange) - if len(trList) == 0: - return None - - qpfGrid = self.empty() - for tr in trList: - grid = self.getGrids("Fcst", "QPF", "SFC", timeRange, mode="First") - qpfGrid += grid - - return qpfGrid - - def getOverlappingTR(self, tr6, tr24List): - - for tr24 in tr24List: - if tr6.overlaps(tr24): - return tr24 - return None - - def execute(self, varDict): - -# # Find the nominal start time when we will be making grids -# start = int(time.time() / (24 * 3600)) * (24 * 3600) # self._gmtime() -# for i in range(start, start + (72 * 3600),(6 * 3600)): -# bTime = self.baseModelTime(i) -# print "time:", time.asctime(time.gmtime(i)), "BaseTime:", time.asctime(time.gmtime(bTime)) - - - ### CONFIGURATION SECTION ################################ - ### Levels must exactly match the levels in the inland threat - ### weather element. - ### Next two lines changed for 2018 with 17.3.1 baseline. -# ratios = [0.0, 0.75, 1.0, 1.5, 100.0] -# erps = [0.0, 5.0, 10.0, 20.0, 50.0, 100.0] # - - ratios = [0.0, 0.75, 1.0, 1.5, 100.0] - erps = [0.0, 5.0, 10.0, 20.0, 50.0, 100.0] # - - threatMatrix = [ - ["None", "Elevated", "Elevated", "Mod" ], # lowest ERP - ["Elevated", "Elevated", "Mod", "High" ], - ["Elevated", "Mod", "Mod", "High" ], - ["Mod", "Mod", "High", "Extreme"], - ["High", "High", "Extreme", "Extreme"], # highest # ERP - ] # low ------ QPF/FFG ratio -------->high -# -# Old matrix. Keept it for reference. -# threatMatrix = [ -# ["None", "Elevated", "Mod", "High" ], # lowest ERP -# ["Elevated", "Mod", "Mod", "High" ], -# ["Elevated", "Mod", "High", "High" ], -# ["Mod", "High", "High", "Extreme"], -# ["High", "High", "Extreme", "Extreme"], # highest # ERP -# ] # low ------ QPF/FFG ratio -------->high -# - # COMMENTS: The list of FFG products that contain FFG data for your WFO - # The following is an example for Miami. Default list is empty. You must - # populate it with your CWA FFG guidance. -# productList = ["ATLFFGMFL", "ATLFFGTBW", "ATLFFGMLB","ATLFFGKEY"] - productList = [] - if len(productList) == 0: - self.statusBarMsg("You have not configured Text FFG in Procedure. Create a site level copy, and configure your text FFG Guidance. Search for COMMENTS in the procedure.", "S") - - ### END CONFIGURATION SECTION ################################# - - try: - threatKeys = self.getDiscreteKeys("FloodingRainThreat") - except: - threatKeys = ["None", "Elevated", "Mod", "High", "Extreme"] - - baseTime = self.getProbBaseTime().unixTime() - anchorTimeRange = self.GM_makeTimeRange(baseTime, baseTime + (3600 * 72)) - - ppffgBaseTime = self.getPpffgBaseTime().unixTime() - ppffgTimeRange = self.GM_makeTimeRange(ppffgBaseTime, ppffgBaseTime + (3600 * 72)) - - #print "Prob TimeRange:", anchorTimeRange - #print "ppffg TimeRange", ppffgTimeRange - - # make a 72 hour timeRange and a list of 6 hour timeRanges based on the anchorTime - probTRList = self.makeTimeRangeList(anchorTimeRange, 6) - ppffgTRList = self.makeTimeRangeList(ppffgTimeRange, 24) - - # Fetch the probabilistic value selected by the user and make the weName - probGridDict = {} - probStr = varDict["Probabilistic QPF Exceedance Level to use:"] - - # If we're not using prob guidance, fill the dictionary with grids of zeros - if probStr == "Don't Use Prob Guidance": - for tr in probTRList: - probGridDict[tr] = np.zeros(self.getGridShape(), np.float32) - probWEName = "TR00pct6hr" - # If we're using prob guidance, make the prob string into an integer so we can figure out the weName - else: - probStr = probStr[:-1] - # Make an int value so we can subtract it from 100 to get the weName - try: - probValue = int(probStr) # This should always succeed, but just in case... - except: - self.statusBarMsg("Error parsing probability string. " + probStr, "U") - return - # Fetch the probabilistic guidance - probWEName = "TP" + str(100 - probValue) + "pct6hr" - probGridDict = self.getERPGuidance(probWEName, probTRList) - if not probGridDict: # no prob grids found for this weName - self.statusBarMsg(probWEName + " ERP guidance is not available. Please re-run this tool with a different prob exceeedance level or use Don't Use Prob Guidance option in GUI.", "S") - return - - # Create an empty discrete grid - maxFloodThreat = np.zeros(self.getGridShape(), np.int8) - - # Fetch the FFG grid either from gridded data or the text product - ffgGrid = self.getRFCFlashFloodGrid(productList, varDict) - - # calculate the areas where the FFG is missing. We will fill these values with None eventually - missingFFGMask = ffgGrid < 0.0 - - # Get the ERP grids and stuff them in six hour time blocks to match - # the cummulative QPF grids will create later - ppffgGridDict = self.getERPGuidance("ppffg", ppffgTRList) - - if not ppffgGridDict: # Didn't find any ppffg guidance - self.statusBarMsg("The current ERP guidance is not available. Please re-run this tool at a later time.", "S") - return - - #### DEBUG DEBUG DEBUG ######################################################################################## - - for tr in probTRList: - self.createGrid("Fcst", probWEName, "SCALAR", probGridDict[tr]/25.4, tr, precision=2) - - for tr in ppffgTRList: - self.createGrid("Fcst", "ERP", "SCALAR", ppffgGridDict[tr], tr, precision=2) - - #### DEBUG DEBUG DEBUG ######################################################################################## - - - for i in range(len(probTRList)): - probTR = probTRList[i] - #print "probTR:", probTR - # get the EPR grid - - # Fetch the erp grids and reference by timeRange (24 hours each) - # Use the probTR in this loop and timeRange.overlaps to figure out which erp grid to use. - # All other code should be the same - ppffgTR = self.getOverlappingTR(probTR, ppffgTRList) - if ppffgTR is None: - continue - - ppffgGrid = ppffgGridDict[ppffgTR] #+ in pct (%) - # get the probabilistic grid - pQPFGrid = probGridDict[probTR] / 25.4 # convert mm to inches - - qpfFcstGrid = self.getQPFGrid(probTR) - - # Get the maximum of the Fcst QPF and the ERP probabilistic grid - qpfGrid = np.maximum(qpfFcstGrid, pQPFGrid) - - self.createGrid("Fcst", "MaxFcstPQPF", "SCALAR", qpfGrid, probTR, precision=2) - - if ffgGrid is None or qpfGrid is None: - self.statusBarMsg("FlashFlood or QPF grids missing at timeRange:" + - str(probTR), "S") - continue - - if ppffgGrid is None: - self.statusBarMsg("ERP grids missing at timeRange:" + str(probTR), "S") - continue - - tempffgGrid = ffgGrid.copy() - tempffgGrid[ffgGrid == 0.0] = 0.1 - ratioGrid = qpfGrid / tempffgGrid - - # Clip the ratioGrid to 1000.0 to prevent problems when displaying - ratioGrid.clip(0.0, 1000.0, ratioGrid) - - self.createGrid("Fcst", "FFG", "SCALAR", ffgGrid, probTR, - minAllowedValue = -9, maxAllowedValue=10, precision=2) - self.createGrid("Fcst", "QPFtoFFGRatio", "SCALAR", ratioGrid, probTR, - minAllowedValue = 0, maxAllowedValue=1000, precision=2) - - floodThreat = np.zeros(self.getGridShape(), np.int8) - - for e in range(len(erps) - 1): - for r in range(len(ratios) - 1): - eMin = erps[e] - eMax = erps[e+1] - rMin = ratios[r] - rMax = ratios[r+1] - ratioMask = (ratioGrid >= rMin) & (ratioGrid < rMax) - erpMask = (ppffgGrid >= eMin) & (ppffgGrid < eMax) - mask = ratioMask & erpMask - - keyIndex = self.getIndex(threatMatrix[e][r], threatKeys) #e is y and r ix x - floodThreat[mask] = keyIndex - - # Now set the values we found missing to the None key - noneIndex = self.getIndex("None", threatKeys) - floodThreat[missingFFGMask] = noneIndex - - # Create the grid - self.createGrid("Fcst", "FloodThreat", "DISCRETE", - (floodThreat, threatKeys), probTR, - discreteKeys=threatKeys, - discreteOverlap=0, - discreteAuxDataLength=2, - defaultColorTable="gHLS_new") - - maxFloodThreat = np.maximum(floodThreat, maxFloodThreat) - - # Make a big timeRange and delete all the FloodingRainThreat grids - startTime = int(self._gmtime().unixTime()/ 3600) * 3600 - (24 * 3600) - endTime = startTime + (24 * 3600 * 10) - dbTR = self.GM_makeTimeRange(startTime, endTime) - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - end = cTime + (6*3600) - threatTR = self.GM_makeTimeRange(cTime, end) - self.deleteCmd(['FloodingRainThreat'], dbTR) - self.createGrid("Fcst", "FloodingRainThreat", "DISCRETE", - (maxFloodThreat, threatKeys), threatTR, - discreteKeys=threatKeys, - discreteOverlap=0, - discreteAuxDataLength=2, - defaultColorTable="gHLS_new") - - - return +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# InlandFloodThreat +# +# Author: lefebvre,santos +# Last Modified: April 4, 2012 - Fixed to handle negative gridded FFG. +# Migrated procedure for AWIPS2. Updated 6/22/2012. S.O. +# +# Comment on 05/21/2014 (Santos): Some sites used QPF and others QPF6hr. Check that parameter +# in getQPFGrid method. Also in AWIPS 2 FFG is gridded and called just FFG. +# This is fixed in getRFCFFGModels method. +# +# LeFevbre/Santos: This is the version being turned in for baseline in 16.1.2 as of 12/7/2015. It includes fixes +#for new ERP data changes that took place in Summer of 2015 and better handling of grid points where there +#is no FFG guidance available. +# Last Modified +# 7/15/2016 - Lefebvre/Santos: working on code to add PQPF to the algorithm. +# 9/2/2016 - Lefebvre/Santos: Finished integrating {QPF into the algorithm +# 9/7/2016 - Lefebvre/Santos: Added better logic for grid missing messages and Don't use guidance option. +# 9/7/2016 - Lefebvre/Santos: Change ppffg timeRanges to anchor on 12Z cycles. +# VERSION 17.1.1 = The one checked in. +# 11/14/2016 - Santos - Modified at testbed in Silver Springs to fix overlap variable to do the composite +# of the rfc list edit areas, not just the overlap with cwa mask. Commented out statusBarMsg for the ppffg inventories. +# 07/21/2017 - Tweaked for 2018 baseline (17.3.1) based on WPC recommendations following upcoming change in ERPs +# to neighborhood based probabilities. Check 2018 version of HTI User Guide for details. PS/TL +# 8/31/2017 - Fixed issues found during Harvey when FFG was zero across large chunks of the area. PS/TL +# 10/21/2017 - Additional tweaks made per Raytheon suggestions during code review. (DR20333) +# 11/14/2017 - Fixed ERP thresholds per WPC recommendations during SWiT. Fixed also minimum value allowed for +# FFG guidance in GFE D2D FFGXXX db to treat NO DATA or exception values as negative in GFE. This change was made in +# the RFCFFGParameterInfo file. Otherwise logic below would not work. +# +#Search for COMMENTS to see any local config step you might need to take. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Populate"] + +VariableList = [("Gridded/Text FFG Blending?" , "Yes", "radio", ["Yes", "No"]), + ("Probabilistic QPF Exceedance Level to use:", "10%", "radio", ["Don't Use Prob Guidance", "05%", "10%", "25%", "50%"]), + +# Use the above line for the GUI when testing is finished. the line below will be enabled when we are ingesting 20%, 30%, and 40% percentiles +# ("Probabilistic QPF Exceedance Level to use:", "10%", "radio", ["Don't Use Prob Guidance", "10%", "20%", "30%", "40%", "50%"]), + ] + +import SmartScript +import time +import popen2 +import sys +import AbsTime +import TimeRange +import GridManipulation +import numpy as np + + +class Procedure (GridManipulation.GridManipulation): + def __init__(self, dbss): + GridManipulation.GridManipulation.__init__(self, dbss) + + # get the current time, truncates to the last six hour value. + # returns a timeRange with this startTime until 72 hrs from this time + + def make72hrTimeRange(self, startTime): + + # Make the end time 3 days from the startTime + end = startTime + (72 * 3600) + # Convert them to AbsTimes + startTime = AbsTime.AbsTime(startTime) + endTime = AbsTime.AbsTime(end) + + timeRange = TimeRange.TimeRange(startTime, endTime) + + return timeRange + + + # returns a list of timeRange with the specified duration in hours over the + # specified timeRange + def makeTimeRangeList(self, timeRange, duration): + trList = [] + sTime = timeRange.startTime().unixTime() + delta = duration * 3600 + while sTime < timeRange.endTime().unixTime(): + trList.append(self.GM_makeTimeRange(sTime, sTime + delta)) + sTime = sTime + delta + + return trList + + + # Returns a list of database IDs matching the specified model name, + # weather element name and level + def getModelList(self, modelName, weName, weLevel): + modelList = [] + + availParms = self.availableParms() + + for pName, level, dbID in availParms: + if modelName in dbID.modelName(): + if weName in pName: + if weLevel in level: + if dbID not in modelList: + modelList.append(dbID) + return modelList + + + # A small algorithm to determine the day number + def determineDay(self, modelTime, validTime): + + diff = (validTime - modelTime) / 3600 + if diff < 48: + return 1 + elif diff < 72: + return 2 + else: + return 3 + + return 0 + + def baseModelTime(self, modelTime): + + oneDay = 3600 * 24 + offset = 3600 * 0 # hours after which we expect models to arrive + baseTime = (int((modelTime + offset) / oneDay) * oneDay) - offset + + return baseTime + + def getProbBaseTime(self): + ERPModelName = "HPCERP" + ERPVarName = "TP10pct6hr" + ERPLevel = "SFC" + # get the list of all available models. They come sorted latest to oldest. + modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel) + + if len(modelList) == 0: + self.statusBarMsg("No ERP Guidance found.", "S") + return None + + for model in modelList: + trList = self.GM_getWEInventory(ERPVarName, model) + if len(trList) == 0: + continue + + latestHr = (trList[-1].startTime().unixTime() - model.modelTime().unixTime()) / 3600 + + # return the time of the first model we find with enough data + if latestHr >= 72: + return model.modelTime() + + # If we get here, we have found no models with 72 hours of data so return the latest model time + self.statusBarMsg("No model runs found with 72 hours of grids. Using latest model") + return modelList[0].modelTime() + + # Find the time of the model with a day 3 grid and truncate the modelTime to the last 12Z + def getPpffgBaseTime(self): + ERPModelName = "HPCERP" + ERPVarName = "ppffg" + ERPLevel = "SFC" + # get the list of all available models. They come sorted latest to oldest. + modelList = self.getModelList(ERPModelName, ERPVarName, ERPLevel) + + if len(modelList) == 0: + self.statusBarMsg("No ERP Guidance found for ppffg.", "S") + return None + + for model in modelList: + trList = self.GM_getWEInventory(ERPVarName, model) + if len(trList) == 0: + continue + + latestHr = (trList[-1].startTime().unixTime() - model.modelTime().unixTime()) / 3600 + + # return the time of the first model we find with enough data + if latestHr > 48: + + #print "model time with day 3 grid:", model.modelTime() + modelTime = model.modelTime().unixTime() - (12 * 3600) + + # truncate the model time to the last 12Z cycle + baseTime = int(modelTime / (3600 * 24)) * (3600 * 24) + (12 * 3600) + return AbsTime.AbsTime(baseTime) + + # If we get here, we have found no models with 72 hours of data so return the latest model time + self.statusBarMsg("No model runs found with 72 hours of grids. Using latest model") + return modelList[0].modelTime() + + # Format a timeRange string into something smaller and readable + def trStr(self, tr): + return str(tr)[5:7] + "." + str(tr)[11:13] + "-" + str(tr)[29:31] + "." + str(tr)[35:37] + "Z" + + # Fetch ERP probabilistic data using the latest available model. In some cases + # the grids retrieved may originate from more than one model version. In all cases, + # latest guidance available for each time slot will returned. + # Returns a dictionary with key as timeRange and grid as the data. + def getERPGuidance(self, weName, trList): + + ERPModelName = "HPCERP" + ERPLevel = "SFC" + + # Get the list of all available models. They come sorted latest to oldest. + modelList = self.getModelList(ERPModelName, weName, ERPLevel) + if len(modelList) == 0: # No grids found for the model/weName combination + return {} # So just return an empty GridDict + + # For each timeRange, find the model with the latest grid and save that + gridDict = {} + for tr in trList: + + # Determine the equivalent d2D timeRange based on GFE QPF tr in the trList + d2dTR = self.GM_makeTimeRange(tr.endTime().unixTime(), tr.endTime().unixTime() + 3600) + + foundGrid = False + for model in modelList: + + # See if the ERP grids we want are in this model cycle + d2dInv = self.GM_getWEInventory(weName, model) + + + if d2dTR not in d2dInv: + continue + + # Fetch the grid and save it + grid = self.getGrids(model, weName, ERPLevel, d2dTR, mode="First") + gridDict[tr] = grid + + modelStr = str(model.modelTime())[0:13] + "Z" + + ### DEBUG DEBUG DEBUG #### Remove when testing is complete + #print "WE:", weName, "GFE TimeRange:", self.trStr(tr), "using model at:", modelStr, " valid:", self.trStr(d2dTR) + ### DEBUG DEBUG DEBUG #### Remove when testing is complete + + if modelList.index(model) != 0: + # Suppress messages for ppffg for the last timeRange only, since ppffg only arrives once per day + if weName == "ppffg" and tr == trList[-1]: + pass +# else: +# self.statusBarMsg("Using " + modelStr + " model for time period " + self.trStr(tr), "S") +# + foundGrid = True + break + + # If we get here, no model was found with the TR we want + if not foundGrid: + gridDict[tr] = None + self.statusBarMsg("No Probabilistic WPC QPF found for timeRange:" + str(tr), "S") + + return gridDict + + # Use this method for testing if you have no luck getting products + # directly from the text database + def getTextProductFromFile(self, filename): + # replace the filename with one on your system + filename = "/tmp/FFGData/" + filename + ".txt" + f = file(filename, 'r') + textList = [] + line = f.readline() + textList.append(line) + while line != "": + line = f.readline() + textList.append(line) + f.close() + return textList + + # Retrieves a text product from the text database + def getTextProductFromDB(self, productID): + cmd = "textdb -r " + productID + + (stdout, stdin, stderr) = popen2.popen3(cmd) + + textList = [] + line = stdout.readline() + textList.append(line) + while line != "": + line = stdout.readline() + textList.append(line) + return textList + + + # given a text product, this method decodes the ffg values and + # returns a dictionary {area : value} + def decodeFFGText(self, ffgText): + ffgDict = {} + for s in ffgText: + parts = s.split() + if len(parts) < 4: + continue + if len(parts[0]) != 6: + continue + if parts[0][2] != "Z": + continue + area = parts[0] + value6hr = float(parts[3][:-1]) # strip the "/" + ffgDict[area] = value6hr + + return ffgDict + + + # Fetch all of the gridded FFG model names and return just the + # latest version of each type (region) + def getRFCFFGModelName(self, rfcName): + + # Find all the models matching this description. + modelList = self.getModelList(rfcName, "FFG0624hr", "SFC") + + for model in modelList: + # WARNING!!! This check should be more specific to the DBID string. + if model.modelIdentifier().find(rfcName) > -1: + return model + + return None + + + # Returns the list of RFCs that overlap the local GFE domain + # as defined by getSiteID(). + def getOverlappingRFCs(self): + # The list of all the RFCs + RFCEditAreas = ["ISC_PTR", "ISC_RSA", "ISC_STR", "ISC_ACR", "ISC_KRF", + "ISC_MSR", "ISC_TUA", "ISC_FWR", "ISC_ORN", "ISC_TIR", + "ISC_ALR", "ISC_RHA", "ISC_TAR"] + +## cwaEA = self.getSiteID() +## cwaMask = self.encodeEditArea(cwaEA) + #cwaMask = self.encodeEditArea(self.getSiteID()) + + eaList = self.editAreaList() + + rfcList = [] + + for rfc in RFCEditAreas: + + rfcMask = None + if rfc in eaList: + rfcMask = self.encodeEditArea(rfc) + + if rfcMask is None: + continue + + #overlap = cwaMask & rfcMask + overlap = rfcMask + + if overlap.any(): + rfcList.append(rfc) + + return rfcList + + # First try to access the gridded FFG from the D2D files. If they exist + # mosaic all the ones we find and return the composite. If we can't + # find any gridded FFG, then fetch the FFG text product, decode it, + # and create a patchwork grid from the guidance value in each county + # or basin. + def getRFCFlashFloodGrid(self, productList, varDict): + + ffgGrid = self.newGrid(-9.0) + foundGrids = False + blending = varDict["Gridded/Text FFG Blending?"] + + RFCList = self.getOverlappingRFCs() + + ffgWEName = "FFG0624hr" + # Fetch the gridded FFG, mosaic these into a single grid + for rfc in RFCList: + + tmplist = rfc.split('_'); + rfcsid = tmplist[1]; + rfcName = 'FFG' + rfcsid; + # Find the model for this RFC + modelName = self.getRFCFFGModelName(rfcName) + + #print "modelName:", modelName + if modelName is None: + self.statusBarMsg("No FFG database found for " + rfc, "S") + continue + + trList = self.GM_getWEInventory(ffgWEName, modelName) + if len(trList) == 0: + self.statusBarMsg("No FFG grids found in database " + modelName, "S") + continue + + # Get the first grid + tempGrid = self.getGrids(modelName, ffgWEName, "SFC", trList[0], mode="First") + + # Make a mask of the RFC domain + rfcMask = self.encodeEditArea(rfc) + + mask = (tempGrid >= 0.0) & rfcMask + ffgGrid[mask] = tempGrid[mask] + ffgGrid[mask] /= 25.4 + foundGrids = True + + # Make another FFG grid from the text guidance + editAreaList = self.editAreaList() + + ffgTextGrid = self.empty() + for prod in productList: + + # Uncomment the next line to fetch FFG data from a file +## ffgText = self.getTextProductFromFile(prod) + + # Use this method to retrieve FFG data from the text database + ffgText = self.getTextProductFromDB(prod) + + ffgDict = self.decodeFFGText(ffgText) + + for area in list(ffgDict.keys()): + if area not in editAreaList: + continue + refArea = self.getEditArea(area) + mask = self.encodeEditArea(refArea) + value = ffgDict[area] + ffgTextGrid[mask] = value + + # Comment this in to see intermediate FFG from text guidance +## tr = self.getTimeRange("Today") +## self.createGrid("Fcst","FFGFromText", "SCALAR", ffgTextGrid, tr, +## minAllowedValue = -1, maxAllowedValue=100, +## precision=2) + + # Since the gridded FFG tends to have lots of holes in it, + # fill those holes with the text version of the FFG where the + # gridded FFG is less than its non-zero average. + + # if we found the grids fill it in with values from the text products. + + siteID = self.getSiteID() + cwamask = self.encodeEditArea(siteID) + + if foundGrids: + # get the >=zero gridded average + mask = ffgGrid >= 0.0 + + if not mask.any(): # no points in mask + return ffgTextGrid + + if blending == "Yes": + missingMask = (ffgGrid < 0.0) & cwamask + if missingMask.any(): + ffgGrid[missingMask] = ffgTextGrid[missingMask] + + else: + ffgGrid = ffgTextGrid + + return ffgGrid + + # Returns the QPF sum over the specified timeRange + def getQPFGrid(self, timeRange): +# +# This assumes QPF has a constraint of TC6NG. If not and your office uses QPF6 +# or QPF6hr you will need to change this here accordingly. +# + # Inventory all QPF grids from the Fcst database + trList = self.GM_getWEInventory("QPF", timeRange=timeRange) + if len(trList) == 0: + return None + + qpfGrid = self.empty() + for tr in trList: + grid = self.getGrids("Fcst", "QPF", "SFC", timeRange, mode="First") + qpfGrid += grid + + return qpfGrid + + def getOverlappingTR(self, tr6, tr24List): + + for tr24 in tr24List: + if tr6.overlaps(tr24): + return tr24 + return None + + def execute(self, varDict): + +# # Find the nominal start time when we will be making grids +# start = int(time.time() / (24 * 3600)) * (24 * 3600) # self._gmtime() +# for i in range(start, start + (72 * 3600),(6 * 3600)): +# bTime = self.baseModelTime(i) +# print "time:", time.asctime(time.gmtime(i)), "BaseTime:", time.asctime(time.gmtime(bTime)) + + + ### CONFIGURATION SECTION ################################ + ### Levels must exactly match the levels in the inland threat + ### weather element. + ### Next two lines changed for 2018 with 17.3.1 baseline. +# ratios = [0.0, 0.75, 1.0, 1.5, 100.0] +# erps = [0.0, 5.0, 10.0, 20.0, 50.0, 100.0] # + + ratios = [0.0, 0.75, 1.0, 1.5, 100.0] + erps = [0.0, 5.0, 10.0, 20.0, 50.0, 100.0] # + + threatMatrix = [ + ["None", "Elevated", "Elevated", "Mod" ], # lowest ERP + ["Elevated", "Elevated", "Mod", "High" ], + ["Elevated", "Mod", "Mod", "High" ], + ["Mod", "Mod", "High", "Extreme"], + ["High", "High", "Extreme", "Extreme"], # highest # ERP + ] # low ------ QPF/FFG ratio -------->high +# +# Old matrix. Keept it for reference. +# threatMatrix = [ +# ["None", "Elevated", "Mod", "High" ], # lowest ERP +# ["Elevated", "Mod", "Mod", "High" ], +# ["Elevated", "Mod", "High", "High" ], +# ["Mod", "High", "High", "Extreme"], +# ["High", "High", "Extreme", "Extreme"], # highest # ERP +# ] # low ------ QPF/FFG ratio -------->high +# + # COMMENTS: The list of FFG products that contain FFG data for your WFO + # The following is an example for Miami. Default list is empty. You must + # populate it with your CWA FFG guidance. +# productList = ["ATLFFGMFL", "ATLFFGTBW", "ATLFFGMLB","ATLFFGKEY"] + productList = [] + if len(productList) == 0: + self.statusBarMsg("You have not configured Text FFG in Procedure. Create a site level copy, and configure your text FFG Guidance. Search for COMMENTS in the procedure.", "S") + + ### END CONFIGURATION SECTION ################################# + + try: + threatKeys = self.getDiscreteKeys("FloodingRainThreat") + except: + threatKeys = ["None", "Elevated", "Mod", "High", "Extreme"] + + baseTime = self.getProbBaseTime().unixTime() + anchorTimeRange = self.GM_makeTimeRange(baseTime, baseTime + (3600 * 72)) + + ppffgBaseTime = self.getPpffgBaseTime().unixTime() + ppffgTimeRange = self.GM_makeTimeRange(ppffgBaseTime, ppffgBaseTime + (3600 * 72)) + + #print "Prob TimeRange:", anchorTimeRange + #print "ppffg TimeRange", ppffgTimeRange + + # make a 72 hour timeRange and a list of 6 hour timeRanges based on the anchorTime + probTRList = self.makeTimeRangeList(anchorTimeRange, 6) + ppffgTRList = self.makeTimeRangeList(ppffgTimeRange, 24) + + # Fetch the probabilistic value selected by the user and make the weName + probGridDict = {} + probStr = varDict["Probabilistic QPF Exceedance Level to use:"] + + # If we're not using prob guidance, fill the dictionary with grids of zeros + if probStr == "Don't Use Prob Guidance": + for tr in probTRList: + probGridDict[tr] = np.zeros(self.getGridShape(), np.float32) + probWEName = "TR00pct6hr" + # If we're using prob guidance, make the prob string into an integer so we can figure out the weName + else: + probStr = probStr[:-1] + # Make an int value so we can subtract it from 100 to get the weName + try: + probValue = int(probStr) # This should always succeed, but just in case... + except: + self.statusBarMsg("Error parsing probability string. " + probStr, "U") + return + # Fetch the probabilistic guidance + probWEName = "TP" + str(100 - probValue) + "pct6hr" + probGridDict = self.getERPGuidance(probWEName, probTRList) + if not probGridDict: # no prob grids found for this weName + self.statusBarMsg(probWEName + " ERP guidance is not available. Please re-run this tool with a different prob exceeedance level or use Don't Use Prob Guidance option in GUI.", "S") + return + + # Create an empty discrete grid + maxFloodThreat = np.zeros(self.getGridShape(), np.int8) + + # Fetch the FFG grid either from gridded data or the text product + ffgGrid = self.getRFCFlashFloodGrid(productList, varDict) + + # calculate the areas where the FFG is missing. We will fill these values with None eventually + missingFFGMask = ffgGrid < 0.0 + + # Get the ERP grids and stuff them in six hour time blocks to match + # the cummulative QPF grids will create later + ppffgGridDict = self.getERPGuidance("ppffg", ppffgTRList) + + if not ppffgGridDict: # Didn't find any ppffg guidance + self.statusBarMsg("The current ERP guidance is not available. Please re-run this tool at a later time.", "S") + return + + #### DEBUG DEBUG DEBUG ######################################################################################## + + for tr in probTRList: + self.createGrid("Fcst", probWEName, "SCALAR", probGridDict[tr]/25.4, tr, precision=2) + + for tr in ppffgTRList: + self.createGrid("Fcst", "ERP", "SCALAR", ppffgGridDict[tr], tr, precision=2) + + #### DEBUG DEBUG DEBUG ######################################################################################## + + + for i in range(len(probTRList)): + probTR = probTRList[i] + #print "probTR:", probTR + # get the EPR grid + + # Fetch the erp grids and reference by timeRange (24 hours each) + # Use the probTR in this loop and timeRange.overlaps to figure out which erp grid to use. + # All other code should be the same + ppffgTR = self.getOverlappingTR(probTR, ppffgTRList) + if ppffgTR is None: + continue + + ppffgGrid = ppffgGridDict[ppffgTR] #+ in pct (%) + # get the probabilistic grid + pQPFGrid = probGridDict[probTR] / 25.4 # convert mm to inches + + qpfFcstGrid = self.getQPFGrid(probTR) + + # Get the maximum of the Fcst QPF and the ERP probabilistic grid + qpfGrid = np.maximum(qpfFcstGrid, pQPFGrid) + + self.createGrid("Fcst", "MaxFcstPQPF", "SCALAR", qpfGrid, probTR, precision=2) + + if ffgGrid is None or qpfGrid is None: + self.statusBarMsg("FlashFlood or QPF grids missing at timeRange:" + + str(probTR), "S") + continue + + if ppffgGrid is None: + self.statusBarMsg("ERP grids missing at timeRange:" + str(probTR), "S") + continue + + tempffgGrid = ffgGrid.copy() + tempffgGrid[ffgGrid == 0.0] = 0.1 + ratioGrid = qpfGrid / tempffgGrid + + # Clip the ratioGrid to 1000.0 to prevent problems when displaying + ratioGrid.clip(0.0, 1000.0, ratioGrid) + + self.createGrid("Fcst", "FFG", "SCALAR", ffgGrid, probTR, + minAllowedValue = -9, maxAllowedValue=10, precision=2) + self.createGrid("Fcst", "QPFtoFFGRatio", "SCALAR", ratioGrid, probTR, + minAllowedValue = 0, maxAllowedValue=1000, precision=2) + + floodThreat = np.zeros(self.getGridShape(), np.int8) + + for e in range(len(erps) - 1): + for r in range(len(ratios) - 1): + eMin = erps[e] + eMax = erps[e+1] + rMin = ratios[r] + rMax = ratios[r+1] + ratioMask = (ratioGrid >= rMin) & (ratioGrid < rMax) + erpMask = (ppffgGrid >= eMin) & (ppffgGrid < eMax) + mask = ratioMask & erpMask + + keyIndex = self.getIndex(threatMatrix[e][r], threatKeys) #e is y and r ix x + floodThreat[mask] = keyIndex + + # Now set the values we found missing to the None key + noneIndex = self.getIndex("None", threatKeys) + floodThreat[missingFFGMask] = noneIndex + + # Create the grid + self.createGrid("Fcst", "FloodThreat", "DISCRETE", + (floodThreat, threatKeys), probTR, + discreteKeys=threatKeys, + discreteOverlap=0, + discreteAuxDataLength=2, + defaultColorTable="gHLS_new") + + maxFloodThreat = np.maximum(floodThreat, maxFloodThreat) + + # Make a big timeRange and delete all the FloodingRainThreat grids + startTime = int(self._gmtime().unixTime()/ 3600) * 3600 - (24 * 3600) + endTime = startTime + (24 * 3600 * 10) + dbTR = self.GM_makeTimeRange(startTime, endTime) + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + end = cTime + (6*3600) + threatTR = self.GM_makeTimeRange(cTime, end) + self.deleteCmd(['FloodingRainThreat'], dbTR) + self.createGrid("Fcst", "FloodingRainThreat", "DISCRETE", + (maxFloodThreat, threatKeys), threatTR, + discreteKeys=threatKeys, + discreteOverlap=0, + discreteAuxDataLength=2, + defaultColorTable="gHLS_new") + + + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCImpactGraphics_KML.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCImpactGraphics_KML.py index 8830ca683b..499110374e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCImpactGraphics_KML.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCImpactGraphics_KML.py @@ -1,267 +1,267 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TCImpactGraphics_KML -# -# Author: P. Santos/Joe Maloney - 4/19/2011 -# -# Last edited: 27 July 2012 - Shannon/R. Anderson - made A2 compatible -# Last Modified 30 July 2012 - J Maloney/P. Santos - made it work with A2 -# Modified 09 Sept 2014 - J. Maloney - for 2015 season, removed MarineThreat, -# renamed CoastalThreat -> StormSurgeThreat, InlandThreat -> FloodingRainThreat, -# removed verylow from kml styles in header -# Modified 11 Sept 2014 - J. Maloney/S. White - site ID is now automatically -# retrieved from the environment. -# Modified 16 Sept 2014 - J. Maloney/T. Lefebvre - impact statements are now -# read from TCVDictionary (in Utilities) and formatted on-the-fly! -# Modified 21 Oct 2014 - J. Maloney - products now go in /awips2/GFESuite/hti/ -# data. -# Modified 9 June, 2017: Remove old labels for 2018. PS -# Modified 21 June 2017 - JCM - added CDATA tags to Placemark descriptions -# Modified 25 July 2017 - PS/SEW - added EA config and threatPhrase dicts -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -MenuItems = ["Populate"] - -import SmartScript -import numpy as np -import time -import os -import TimeRange -import AbsTime -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -CoordinateType = ReferenceData.CoordinateType -import TCVDictionary - -### CONFIG section READ ########## - -# The kml txt files will be placed in /awips2/GFESuite/hti/data/'threatWEName'.kml.txt. -# From there it is synchronized to the web servers along with the graphics. -# In the servers a php script will convert the # file name so that a browser can properly interpret -# it as a kml file to be open with Google Earth or equivalent application. - -# Also, make sure the ownership of the kml.txt files created is fxa:fxalpha with permissions set -# to 666. - -# You can test the files created by copying them outside AWIPS and renaming them .kml. -# Then open them with Google Earth. - -### Make the edit areas below for Wind, FloodingRain, and Tornado -### your local Land-only CWA edit area -###################################################################### - -editAreaDict = { - "StormSurgeThreat" : "StormSurgeWW_EditArea_Local", # Leave as-is - "WindThreat" : "MHX", # Land-only EA - "FloodingRainThreat" : "MHX", # Land-only EA - "TornadoThreat" : "MHX" # Land-only EA - } - -##### End Config ######### - -threatPhraseDict = { - "Wind": { - "Extreme": "Potential for wind greater than 110 mph", - "High": "Potential for wind 74 to 110 mph", - "Mod": "Potential for wind 58 to 73 mph", - "Elevated": "Potential for wind 39 to 57 mph", - "None": "Wind less than 39 mph" - }, - "Storm Surge": { - "Extreme": "Potential for storm surge flooding greater than 9 feet above ground", - "High": "Potential for storm surge flooding greater than 6 feet above ground", - "Mod": "Potential for storm surge flooding greater than 3 feet above ground", - "Elevated": "Potential for storm surge flooding greater than 1 foot above ground", - "None": "Little to no storm surge flooding" - }, - "Flooding Rain": { - "Extreme": "Potential for extreme flooding rain", - "High": "Potential for major flooding rain", - "Mod": "Potential for moderate flooding rain", - "Elevated": "Potential for localized flooding rain", - "None": "Little or no potential for flooding rain" - }, - "Tornado": { - "Extreme": "Potential for an outbreak of tornadoes", - "High": "Potential for many tornadoes", - "Mod": "Potential for several tornadoes", - "Elevated": "Potential for a few tornadoes", - "None": "Tornadoes not expected" - } - } - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def makeTimeRange(self): - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime - 12 * 3600) - endTime = AbsTime.AbsTime(cTime + 12 * 3600) # 12 hours - tr = TimeRange.TimeRange(startTime, endTime) - - return tr - - def makeThreatKML(self,threatWEName,threatKeys,threatGrid_kml): - - kml_filename = '/awips2/GFESuite/hti/data/' + threatWEName + '.kml.txt' - kml = open(kml_filename, 'w') - kml.write('\n') - kml.write('\n') - kml.write(''+threatWEName+'.kml\n\n') - kml.write('\n') - kml.write('\n') - kml.write('\n') - kml.write('\n') - kml.write(''+threatWEName+'0\n') - kml.write('Product LegendProduct Legend1') - - # each threatWEName has its own separate legend - # need site id, in lowercase - SiteID = self.getSiteID().lower() - - if threatWEName == "StormSurgeThreat": - kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/stormsurgethreatlegend.png') - elif threatWEName == "WindThreat": - kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/windthreatlegend.png') - elif threatWEName == "FloodingRainThreat": - kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/floodingrainthreatlegend.png') - elif threatWEName == "TornadoThreat": - kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/tornadothreatlegend.png') - - # Complete the kml legend - kml.write('') - - #threatKeys = self.getDiscreteKeys(threatWEName) - #print "THREATKEYS ARE: ", threatKeys - - # initialize a flag. It will only be NO for the first polygon in the file. - flag = 'NO' - - for key in threatKeys: - #print "Key:", key - - # get index for this key - hazIndex = self.getIndex(key, threatKeys) - #print "hazIndex:", hazIndex - - mask = np.equal(threatGrid_kml, hazIndex) - - #print "Number of Grid Points: ", sum(sum(mask)) - - if sum(sum(mask)) == 0: - continue - - # make an editArea from the mask - editArea = self.decodeEditArea(mask) - - # extract the polygons from the edit area - polygons = editArea.getPolygons(CoordinateType.LATLON) - - # pull out the impact statements from the TCVDictionary - # We need to match the threatWEName to the entries found - # in the TCVDictionary - if threatWEName == "TornadoThreat": - threat='Tornado' - elif threatWEName == "StormSurgeThreat": - threat='Storm Surge' - elif threatWEName == "WindThreat": - threat='Wind' - else: - threat='Flooding Rain' - - - if key =="Extreme": - styleUrl = '#extreme' - elif key == "High": - styleUrl = '#high' - elif key =="Mod": - styleUrl = '#moderate' - elif key =="Elevated": - styleUrl = '#low' - else: - styleUrl = '#none' - -# Retrieve the new threat description from the dictionary - threatPhrase = threatPhraseDict[threat][key] - - # Extract the appropriate list from the dictionary, join them - # into a string, and make them separate bullets - impactStatement = "" - impactList = TCVDictionary.PotentialImpactStatements[threat][key] - impactStatement = "
* ".join(impactList) - impactStatement = "* " + impactStatement -# print "impactList:", impactList -# print "impactStatement:", impactStatement - - # Put our kml header together - kmlHeader = 'Threat Level - ' + threatPhrase + 'Potential Impacts Include:
' + impactStatement + ']]>
\n' + styleUrl + '\n' - - for i in xrange(polygons.getNumGeometries()): - poly = polygons.getGeometryN(i) - shell = poly.getExteriorRing(); - if shell: - # If shell is true, it's a new polygon - if flag == 'YES': - # If flag is YES, this is not the first polygon we're writing out - # so close the previous polygon before continuing. - kml.write('
\n') - - kml.write(kmlHeader) - kml.write('') - #print "Outer shell coordinates:" - for c in shell.getCoordinates(): - #print "x:",c.x,"y:",c.y - line = str(c.x) + ',' + str(c.y) + ',0 \n' - kml.write(line) - - kml.write('') - # Now that we've written at least one polygon, set flag to YES - flag = 'YES' - - # CHECK FOR INNER LOOPS (HOLES) - for j in xrange(poly.getNumInteriorRing()): - hole = poly.getInteriorRingN(j) - #print "Hole",j,"coordinates:" - kml.write('') - for c in hole.getCoordinates(): - #print "x:",c.x,"y:",c.y - line = str(c.x) + ',' + str(c.y) + ',0 \n' - kml.write(line) - - kml.write('') - - kmlEnd='\n
\n' - kml.write(kmlEnd) - kml.close() - - return - - def execute(self, varDict): - - tr = self.makeTimeRange() - threatlist = ['StormSurgeThreat','WindThreat','FloodingRainThreat','TornadoThreat'] - - for grid in threatlist: - threatWEName = grid - threatGrid, threatKeys = self.getGrids("Fcst", threatWEName, "SFC", tr) - - localEditArea = editAreaDict[threatWEName] -# print "EDIT AREA DICT IS: ", EditArea - editArea = self.getEditArea(localEditArea) - - threatEditArea = self.encodeEditArea(editArea) - threatGrid_kml = np.where(threatEditArea, threatGrid, threatGrid-9.0) - - self.makeThreatKML(threatWEName,threatKeys,threatGrid_kml) - - os.system("/awips2/GFESuite/hti/bin/kml_legend.sh") - - return +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TCImpactGraphics_KML +# +# Author: P. Santos/Joe Maloney - 4/19/2011 +# +# Last edited: 27 July 2012 - Shannon/R. Anderson - made A2 compatible +# Last Modified 30 July 2012 - J Maloney/P. Santos - made it work with A2 +# Modified 09 Sept 2014 - J. Maloney - for 2015 season, removed MarineThreat, +# renamed CoastalThreat -> StormSurgeThreat, InlandThreat -> FloodingRainThreat, +# removed verylow from kml styles in header +# Modified 11 Sept 2014 - J. Maloney/S. White - site ID is now automatically +# retrieved from the environment. +# Modified 16 Sept 2014 - J. Maloney/T. Lefebvre - impact statements are now +# read from TCVDictionary (in Utilities) and formatted on-the-fly! +# Modified 21 Oct 2014 - J. Maloney - products now go in /awips2/GFESuite/hti/ +# data. +# Modified 9 June, 2017: Remove old labels for 2018. PS +# Modified 21 June 2017 - JCM - added CDATA tags to Placemark descriptions +# Modified 25 July 2017 - PS/SEW - added EA config and threatPhrase dicts +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +MenuItems = ["Populate"] + +import SmartScript +import numpy as np +import time +import os +import TimeRange +import AbsTime +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +CoordinateType = ReferenceData.CoordinateType +import TCVDictionary + +### CONFIG section READ ########## + +# The kml txt files will be placed in /awips2/GFESuite/hti/data/'threatWEName'.kml.txt. +# From there it is synchronized to the web servers along with the graphics. +# In the servers a php script will convert the # file name so that a browser can properly interpret +# it as a kml file to be open with Google Earth or equivalent application. + +# Also, make sure the ownership of the kml.txt files created is fxa:fxalpha with permissions set +# to 666. + +# You can test the files created by copying them outside AWIPS and renaming them .kml. +# Then open them with Google Earth. + +### Make the edit areas below for Wind, FloodingRain, and Tornado +### your local Land-only CWA edit area +###################################################################### + +editAreaDict = { + "StormSurgeThreat" : "StormSurgeWW_EditArea_Local", # Leave as-is + "WindThreat" : "MHX", # Land-only EA + "FloodingRainThreat" : "MHX", # Land-only EA + "TornadoThreat" : "MHX" # Land-only EA + } + +##### End Config ######### + +threatPhraseDict = { + "Wind": { + "Extreme": "Potential for wind greater than 110 mph", + "High": "Potential for wind 74 to 110 mph", + "Mod": "Potential for wind 58 to 73 mph", + "Elevated": "Potential for wind 39 to 57 mph", + "None": "Wind less than 39 mph" + }, + "Storm Surge": { + "Extreme": "Potential for storm surge flooding greater than 9 feet above ground", + "High": "Potential for storm surge flooding greater than 6 feet above ground", + "Mod": "Potential for storm surge flooding greater than 3 feet above ground", + "Elevated": "Potential for storm surge flooding greater than 1 foot above ground", + "None": "Little to no storm surge flooding" + }, + "Flooding Rain": { + "Extreme": "Potential for extreme flooding rain", + "High": "Potential for major flooding rain", + "Mod": "Potential for moderate flooding rain", + "Elevated": "Potential for localized flooding rain", + "None": "Little or no potential for flooding rain" + }, + "Tornado": { + "Extreme": "Potential for an outbreak of tornadoes", + "High": "Potential for many tornadoes", + "Mod": "Potential for several tornadoes", + "Elevated": "Potential for a few tornadoes", + "None": "Tornadoes not expected" + } + } + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def makeTimeRange(self): + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime - 12 * 3600) + endTime = AbsTime.AbsTime(cTime + 12 * 3600) # 12 hours + tr = TimeRange.TimeRange(startTime, endTime) + + return tr + + def makeThreatKML(self,threatWEName,threatKeys,threatGrid_kml): + + kml_filename = '/awips2/GFESuite/hti/data/' + threatWEName + '.kml.txt' + kml = open(kml_filename, 'w') + kml.write('\n') + kml.write('\n') + kml.write(''+threatWEName+'.kml\n\n') + kml.write('\n') + kml.write('\n') + kml.write('\n') + kml.write('\n') + kml.write(''+threatWEName+'0\n') + kml.write('Product LegendProduct Legend1') + + # each threatWEName has its own separate legend + # need site id, in lowercase + SiteID = self.getSiteID().lower() + + if threatWEName == "StormSurgeThreat": + kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/stormsurgethreatlegend.png') + elif threatWEName == "WindThreat": + kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/windthreatlegend.png') + elif threatWEName == "FloodingRainThreat": + kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/floodingrainthreatlegend.png') + elif threatWEName == "TornadoThreat": + kml.write('http://www.nws.noaa.gov/images/ghls/' + SiteID + '/tornadothreatlegend.png') + + # Complete the kml legend + kml.write('') + + #threatKeys = self.getDiscreteKeys(threatWEName) + #print "THREATKEYS ARE: ", threatKeys + + # initialize a flag. It will only be NO for the first polygon in the file. + flag = 'NO' + + for key in threatKeys: + #print "Key:", key + + # get index for this key + hazIndex = self.getIndex(key, threatKeys) + #print "hazIndex:", hazIndex + + mask = np.equal(threatGrid_kml, hazIndex) + + #print "Number of Grid Points: ", sum(sum(mask)) + + if sum(sum(mask)) == 0: + continue + + # make an editArea from the mask + editArea = self.decodeEditArea(mask) + + # extract the polygons from the edit area + polygons = editArea.getPolygons(CoordinateType.LATLON) + + # pull out the impact statements from the TCVDictionary + # We need to match the threatWEName to the entries found + # in the TCVDictionary + if threatWEName == "TornadoThreat": + threat='Tornado' + elif threatWEName == "StormSurgeThreat": + threat='Storm Surge' + elif threatWEName == "WindThreat": + threat='Wind' + else: + threat='Flooding Rain' + + + if key =="Extreme": + styleUrl = '#extreme' + elif key == "High": + styleUrl = '#high' + elif key =="Mod": + styleUrl = '#moderate' + elif key =="Elevated": + styleUrl = '#low' + else: + styleUrl = '#none' + +# Retrieve the new threat description from the dictionary + threatPhrase = threatPhraseDict[threat][key] + + # Extract the appropriate list from the dictionary, join them + # into a string, and make them separate bullets + impactStatement = "" + impactList = TCVDictionary.PotentialImpactStatements[threat][key] + impactStatement = "
* ".join(impactList) + impactStatement = "* " + impactStatement +# print "impactList:", impactList +# print "impactStatement:", impactStatement + + # Put our kml header together + kmlHeader = 'Threat Level - ' + threatPhrase + 'Potential Impacts Include:
' + impactStatement + ']]>
\n' + styleUrl + '\n' + + for i in range(polygons.getNumGeometries()): + poly = polygons.getGeometryN(i) + shell = poly.getExteriorRing(); + if shell: + # If shell is true, it's a new polygon + if flag == 'YES': + # If flag is YES, this is not the first polygon we're writing out + # so close the previous polygon before continuing. + kml.write('
\n') + + kml.write(kmlHeader) + kml.write('') + #print "Outer shell coordinates:" + for c in shell.getCoordinates(): + #print "x:",c.x,"y:",c.y + line = str(c.x) + ',' + str(c.y) + ',0 \n' + kml.write(line) + + kml.write('') + # Now that we've written at least one polygon, set flag to YES + flag = 'YES' + + # CHECK FOR INNER LOOPS (HOLES) + for j in range(poly.getNumInteriorRing()): + hole = poly.getInteriorRingN(j) + #print "Hole",j,"coordinates:" + kml.write('') + for c in hole.getCoordinates(): + #print "x:",c.x,"y:",c.y + line = str(c.x) + ',' + str(c.y) + ',0 \n' + kml.write(line) + + kml.write('') + + kmlEnd='\n
\n' + kml.write(kmlEnd) + kml.close() + + return + + def execute(self, varDict): + + tr = self.makeTimeRange() + threatlist = ['StormSurgeThreat','WindThreat','FloodingRainThreat','TornadoThreat'] + + for grid in threatlist: + threatWEName = grid + threatGrid, threatKeys = self.getGrids("Fcst", threatWEName, "SFC", tr) + + localEditArea = editAreaDict[threatWEName] +# print "EDIT AREA DICT IS: ", EditArea + editArea = self.getEditArea(localEditArea) + + threatEditArea = self.encodeEditArea(editArea) + threatGrid_kml = np.where(threatEditArea, threatGrid, threatGrid-9.0) + + self.makeThreatKML(threatWEName,threatKeys,threatGrid_kml) + + os.system("/awips2/GFESuite/hti/bin/kml_legend.sh") + + return diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCMWindTool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCMWindTool.py index deeb46f092..e01a7b7d20 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCMWindTool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCMWindTool.py @@ -1,1924 +1,1924 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TCMWindTool -# -# Version 2.7.1 2 Sept 2010 Modified to Fix RCL error -# Version 2.7.2 01 Feb 2011 Fixed Pie Slice Algorithm/Added Backgroun Options -# Version Last 14 Apr 2014 Added User-editable max winds -# Modified On 22 May 2014 Introduced option for handling asymetry -# in inner core (RMW), option to use 85th wind radii reduction based on -# 2009 paper by DeMaria or the NCST Bias Correction scheme outside radius -# of MaxWind, corrected problem with ring of lower wind value introduced -# at times in the transition between 34 knots wind radii and background -# field, and introduced option to use preliminary TCM message being -# pushed to all offices text databases beginning with 2014 season. -# -# Modified: 1 Jun 2014 to fix bugs with Lat grids and add option -# to use WindReductionFactor grids for Mid Atlantic offices. -# Modified: 6 June 2014 to fix bugs with large reduction factors over land. -# Modified: 9 June 2014 to fix GUI option to run or not over Selected Time Range. -# -# Modified: 2 July to fix decoding of PRE TCM files -# Whatever options are needed should be carefully coordinated among -# offices. -# -# Last Modified: October 28, 2016 to add mis to maxwindswath == "Yes" and adjust -# shift variable in interpolateQuadrants method. -# Submitted for 17.1.1 -# -# Author: Tom LeFebvre -# Contributor: Pablo Santos -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify -MenuItems = ["Populate"] - -VariableList = [("Product to\ndecode:", [], "check", - ["preTCM","WRKTCM","TCMAT1", "TCMAT2", "TCMAT3", "TCMAT4", "TCMAT5", - "TCMEP1", "TCMEP2", "TCMEP3", "TCMEP4", "TCMEP5"]), - ("Product to\n decode:", [], "check", - ["PREAT1", "PREAT2", "PREAT3", "PREAT4", "PREAT5", - "PREEP1", "PREEP2", "PREEP3", "PREEP4", "PREEP5"]), -# ("Background\nModel:", "Fcst", "radio", ["GFS0p5degGbl", "UKMET", "ECMWFHiRes", "Fcst"]), - ("Number of Pie Slices?", "16", "radio", ["4", "8", "12", "16", "24"]), - ("Eye Diameter:", 0, "scale", [0, 100], 1), - ("34 knot radius at 3 days (NM):", 100, "scale", [0, 1000], 10), - ("34 knot radius at 4 days (NM):", 100, "scale", [0, 1000], 10), - ("34 knot radius at 5 days (NM):", 100, "scale", [0, 1000], 10), - ("Decrease Wind over Land by (%):", 15, "scale", [-20, 50], 1), - ("Make Grids over \nSelected Time Only:", "No", "radio", ["Yes", "No"]), - ("MaxWind Swath for \nTCWindThreat?", "No", "radio", ["Yes", "No"]), - ("Define Asymmetrical \nMax Winds?", "No", "radio", ["Yes", "No"]), - ("Reduce Radii by 15% or \n NC State Bias Correction", "Reduce by 15%", - "radio", ["Reduce by 15%", "NC State Bias Correction"]), - ("Constant Land\nReduction (Slider Bar)\nor Wind Reduction\nFactor Grid?", - "Constant", "radio", ["Constant", "Grid"]), - ] - -try: # See if this is the AWIPS I environment - from Numeric import * - import AFPS - AWIPS_ENVIRON = "AWIPS1" -except: # Must be the AWIPS II environment - from numpy import * - import AbsTime - import TimeRange - AWIPS_ENVIRON = "AWIPS2" - -import SmartScript -import DefineMaxWindGUI -import MetLib - -import popen2, string, time, os, cPickle -import Exceptions, types, copy - -class TCMDecoder: - def __init__(self): - self.pos = 0 - # key words in TCM products from NCEP - self.keyWordDict = {"HURRICANE CENTER" : self.decodeProductTime, - "FORECAST VALID" : self.decodeWindForecast, - "CENTER LOCATED NEAR" : self.decodeCenterLocation, - "CENTER LOCATED INLAND NEAR" : self.decodeCenterLocation, - "MINIMUM CENTRAL PRESSURE" : self.decodeCentralPressure, - "MAX SUSTAINED WINDS" : self.decodeMaxSustainedWinds, - "MAX WIND" : self.decodeMaxWind, - "OUTLOOK VALID" : self.decodeWindForecast, - "EYE DIAMETER" : self.decodeEyeDiameter, - "64 KT..." : self.decodeRadii, - "50 KT..." : self.decodeRadii, - "34 KT..." : self.decodeRadii, - # key words for JTWC products - "WTPN" : self.decodeJTWCProductTime, - "WARNING POSITION:" : self.decodeJTWCTimeCenter, - "VALID AT:" : self.decodeJTWCWindForecast, - "RADIUS OF 034 KT WINDS" : self.decodeJTWCRadii, - "RADIUS OF 050 KT WINDS" : self.decodeJTWCRadii, - "RADIUS OF 064 KT WINDS" : self.decodeJTWCRadii, - "RADIUS OF 100 KT WINDS" : self.decodeJTWCRadii, - " ---" : self.endJTWCWindForecast, - "REMARKS:" : self.stopDecodingJTWC, - } - - self.fcstList = [] # a place to store all of the forecasts - - self.text = [] # the text product - - self.currentFcst = {} # the current forecast we are docoding - - self.baseProductTime = 0 - - self.foundEyeDiameter = 0 - - self.AltFileName = "" - - def calcEyeDiameter(self, center, maxWind): - lat = center[0] # latitude in degrees - maxWind = maxWind / 1.944 # convert to meters per second - rmw = 46.29 * exp(-0.0153 * maxWind + 0.0166 * lat) - - # convert to diameter and convert from km to nm - ed = rmw * 2.0 / 1.852 - return ed - - def stripText(self): - endStr = chr(13) + chr(13) + chr(10) - for i in range(len(self.text)): - self.text[i] = string.replace(self.text[i], endStr, "") - return - - def getFcstList(self): - return self.fcstList - - def getBaseProductTime(self): - return self.baseProductTime - - def getAltInfoFileName(self): - return self.AltFileName - - def currentLine(self): - return self.text[self.pos] - - def nextLine(self): - self.pos = self.pos + 1 - if self.pos < len(self.text): - return self.text[self.pos] - else: - return "" - - def monthNum(self, monthStr): - monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", - "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] - - try: - return monthList.index(monthStr) + 1 - except ValueError: - return 0 - - def convertBaseTime(self, timeStr): - # timeStr format: "HHMM UTC DAY MON DD YYYY" - - # extract time parts from the str - hour = int(timeStr[0:2]) - minute = int(timeStr[2:4]) - strList = string.split(timeStr) - monthStr = strList[3] - month = self.monthNum(monthStr) - day = int(strList[4]) - year = int(strList[5]) - - # time.mktime returns time in seconds but in local time - baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) - - # Adjustment to UTC - diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) - - # subtract timeZone and round to the nearest hour - roundedTime = int((baseTime - diffTime) / 3600) * 3600 - - return roundedTime - - def convert_ddhhmm(self, ddhhmmStr, baseTime): - - # remove the slash if present - ddhhmmStr = string.replace(ddhhmmStr, "/", "") - - if baseTime == 0: - baseTime = time.time() - - # extract the time parts - dayStr = ddhhmmStr[0:2] - hourStr = ddhhmmStr[2:4] - minStr = ddhhmmStr[4:6] - day = int(dayStr) - hour = int(hourStr) - minute = int(minStr) - tupleTime = time.gmtime(baseTime) - year = tupleTime[0] - month = tupleTime[1] - # see if we crossed over to a new month - if tupleTime[2] > day: - month = month + 1 - if month > 12: - month = 1 - year = year + 1 - - newTuple = (year, month, day, hour, minute, tupleTime[5], - tupleTime[6], tupleTime[7], tupleTime[8]) - - secondsTime = time.mktime(newTuple) - # Adjustment to UTC - diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) - return secondsTime - diffTime # subtract timeZone - - def decodeProductTime(self): - # extract the alt filename - self.decodeAltFileName() - # Time of the product found on the next line - timeStr = self.nextLine() - # sanity check for the time string - hhmm = timeStr[0:4] - for c in hhmm: - if not c in string.digits: - return - - baseTime = self.convertBaseTime(timeStr) - self.baseProductTime = baseTime - return - - def decodeAltFileName(self): - nameStr = self.currentLine() - parts = string.split(nameStr) - - self.AltFileName = parts[-1] # grab the last string token - - return - - def decodeCenterLocation(self): - locStr = self.currentLine() - # check for the repeat center....don't want this one - if string.find(locStr, "REPEAT") >= 0: - return - - keyWord = "NEAR" - pos = string.find(locStr, keyWord) - if pos > -1: # found it - locStr = locStr[pos + len(keyWord):] - tokenList = string.split(locStr) - if len(tokenList) >= 2: - lat = self.decodeLatLonToken(tokenList[0]) - lon = self.decodeLatLonToken(tokenList[1]) - - if len(tokenList) > 3: # grab the time - validTime = self.convert_ddhhmm(tokenList[3], self.baseProductTime) - # New fcst (analysis actually) - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeCentralPressure(self): - keyWord = "MINIMUM CENTRAL PRESSURE" - presStr = self.currentLine() - pos = string.find(presStr, keyWord) - if pos > -1: # found it - presStr = presStr[pos + len(keyWord):] - - return - - def decodeMaxSustainedWinds(self): - keyWord = "MAX SUSTAINED WINDS" - windStr = self.currentLine() - pos = string.find(windStr, keyWord) - if pos > -1: # found it - windList = [] - tokenList = string.split(windStr) - for i in range(len(tokenList)): - if string.find(tokenList[i], "KT") >= 0: - windList.append(float(tokenList[i - 1])) - - # Sometimes there is no max wind/gust reported - if windList == []: - return - - # store the max wind - self.currentFcst["maxWind"] = windList[0] - self.currentFcst["maxGust"] = windList[1] - - # if we have a center location and a max wind we can calc - # the eye diameter - if self.currentFcst.has_key('centerLocation') and \ - self.currentFcst.has_key('maxWind'): - # if it's zero it's not in the product and the user didn't - # change it, so calculate it based on the Willoughby formula - if self.currentFcst.has_key('eyeDiameter') and \ - self.currentFcst['eyeDiameter'] == 0: - self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( - self.currentFcst['centerLocation'], - self.currentFcst['maxWind']) - else: # otherwise use what's been defined or read from the text - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeMaxWind(self): - str = self.currentLine() - str = string.replace(str, '.', ' ') # remove ... - tokenList = string.split(str) - if len(tokenList) >= 6: - maxWind = float(tokenList[2]) - maxGust = float(tokenList[5]) - - # store in current fcst - self.currentFcst["maxWind"] = maxWind - self.currentFcst["maxGust"] = maxGust - - # if we have a center location and a max wind we can calc - # the eye diameter - if self.currentFcst.has_key('centerLocation') and \ - self.currentFcst.has_key('maxWind'): - # if it's zero it's not in the product and the user didn't - # change it, so calculate it based on the Willoughby formula - if self.currentFcst.has_key('eyeDiameter') and \ - self.currentFcst['eyeDiameter'] == 0: - self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( - self.currentFcst['centerLocation'], - self.currentFcst['maxWind']) - else: # otherwise use what's been defined or read from the text - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeRadii(self): - # if there's no currentFcst dict, we cannot continue - if self.currentFcst == {}: - return - - - str = self.currentLine() - str = string.replace(str, '.', ' ') # remove ... - tokenList = string.split(str) - # check for KT in the second slot - if len(tokenList) < 4 or tokenList[1] != "KT": - return - radiiWindValue = float(tokenList[0]) - dirList = ["NE", "SE", "SW", "NW"] - radiusList = [] - for token in tokenList: - for d in dirList: - pos = string.find(token, d) - if pos >= 0: - radiusStr = token[:pos] - radius = float(radiusStr) - radiusList.append(radius) - # store the radii info - self.currentFcst['radii'][radiiWindValue] = radiusList - - return - - def decodeWindForecast(self): - # if we're decoding a new forecast, save the old one first - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - self.currentFcst = {} # reset - - str = self.currentLine() - str = string.replace(str, '...', ' ') # remove ... - - tokenList = string.split(str) - # decode the validTime - validTime = self.convert_ddhhmm(tokenList[2], self.baseProductTime) - # decode the center location - if len(tokenList) >= 5: - lat = self.decodeLatLonToken(tokenList[3]) - lon = self.decodeLatLonToken(tokenList[4]) - # If we can't decode the lat or lon it's probably an outlook - # with no guidance so just return - if lat == None or lon == None: - print "Failed to decode latStr:", lat, "lonStr:", lon - return - - # initialize a new forecast and begin filling values - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - return - - def decodeEyeDiameter(self): - str = self.currentLine() - - tokenList = string.split(str) - diameter = int(tokenList[2]) - - self.currentFcst['eyeDiameter'] = diameter - - # Since we found it in the procuct, set the default diameter - self.defaultEyeDiameter = diameter - self.foundEyeDiameter = 1 # mark that we found it - return - - def decodeTCMProduct(self, TCMProduct, eyeDiameter): - self.text = TCMProduct - self.pos = 0 - self.fcstList = [] - self.defaultEyeDiameter = eyeDiameter - - self.stripText() - - try: - while self.pos < len(TCMProduct): - line = self.currentLine() - for k in self.keyWordDict.keys(): - if string.find(line, k) > -1: - self.keyWordDict[k]() - break - self.pos = self.pos + 1 - - # store the last forecast in the list of forecasts - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - self.currentFcst = {} # reset - except: - # Some problem occured during the decoding process so return an empty fcst - self.baseProductTime = 0 - self.fcstList = {} # reset - - return - - def decodeLatLonToken(self, latLonStr): - dirList = ['N', 'S', 'E', 'W'] - for d in dirList: - pos = string.find(latLonStr, d) - if pos >= 0: - try: - value = float(latLonStr[0:pos]) - if d == 'S' or d == 'W': - value = -value # flip the numeric sign - return value - except: - # it was not decodable (not numbers) - print "Failed to decode lat/lon token:", latLonStr - return None - - # undecodable latLon for some reason - return None - - def decodeJTWCProductTime(self): - line = self.currentLine() - tokenList = string.split(line) - ddhhmmStr = tokenList[2] - self.baseProductTime = self.convert_ddhhmm(ddhhmmStr, 0) - - self.baseProductTime = int(self.baseProductTime / 3600) * 3600 - return None - - def decodeJTWCTimeCenter(self): - line = self.nextLine() - tokenList = string.split(line) - dateTimeStr = tokenList[0][0:6] - latStr = tokenList[3] - lonStr = tokenList[4] - - # could be None - lat = self.decodeLatLonToken(latStr) - lon = self.decodeLatLonToken(lonStr) - if lon > 0: - lon = lon - 360.0 - productTime = self.convert_ddhhmm(dateTimeStr, self.baseProductTime) - - # make a new fcst object to store the analysis - self.currentFcst = {} - self.currentFcst['validTime'] = productTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - - def decodeJTWCWindForecast(self): - line = self.nextLine() - - tokenList = string.split(line) - - # Grab everything just to the left of the first 'Z' - zPos = string.find(tokenList[0], 'Z') - if zPos >= 0: - timeStr = tokenList[0][0:zPos] - validTime = self.convert_ddhhmm(timeStr, self.baseProductTime) - else: - print "couldnt find Z in timeStr:", line - return - - latStr = tokenList[2] - lonStr = tokenList[3] - lat = self.decodeLatLonToken(latStr) - lon = self.decodeLatLonToken(lonStr) - if lon > 0: - lon = lon - 360.0 - - # make a new currentFcst and store the info - self.currentFcst = {} - self.currentFcst['validTime'] = validTime - self.currentFcst['centerLocation'] = (lat, lon) - self.currentFcst['radii'] = {} - self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter - return - - def decodeJTWCRadii(self): - line = self.currentLine() - radList = [] - windSpeed = 0 - while string.find(line, "---") == -1 and line != "": - tokenList = string.split(line) - if string.find(line, "RADIUS") >= 0: # it's the first line - # check to see if we need to store the radii first - if radList != []: # we decoded some already - self.currentFcst['radii'][windSpeed] = radList - radList = [] - - # extract the windSpeed for these radii - windSpeed = float(tokenList[2]) - if string.find(line, "QUADRANT") == -1: # no "QUADRANT" found - radius = float(tokenList[6]) - radList = [radius, radius, radius, radius] - else: # QUADRANT found - radius = float(tokenList[6]) - radList = [radius] - else: # no RADIUS found so maybe a QUADRANT line - if string.find(line, "QUADRANT") >= 0: - radius = float(tokenList[0]) - radList.append(radius) - - line = self.nextLine() - - # save the last radii info - if radList != []: - self.currentFcst['radii'][windSpeed] = radList - - # save the whole forecast in the list - self.fcstList.append(self.currentFcst) - self.currentFcst = {} - - return - - def endJTWCWindForecast(self): - - if self.currentFcst != {}: - self.fcstList.append(self.currentFcst) - - self.currentFcst = {} - return - - def stopDecodingJTWC(self): - line = "ZZZZZ" - while line != "": - line = self.nextLine() - return - -# end class TCMDecoder - -# begin class CircleEA -# This class helps make circular edit areas and quadrants thereof. -class CircleEA(SmartScript.SmartScript): - def __init__(self, latGrid, lonGrid, center, slices): - pi = 3.1459 - RadsPerDeg = 2 * pi / 360 - cosLatGrid = cos(latGrid * RadsPerDeg) - self.xDist = (lonGrid - center[1]) * 111.1 * cosLatGrid - self.yDist = (latGrid - center[0]) * 111.1 - self.distGrid = sqrt(pow(self.xDist, 2)+ pow(self.yDist, 2)) - - self.tanGrid = arctan2(-self.xDist, -self.yDist) - # mask off all but the specified quadrant. - self.quadList = [] - for quad in range(1, slices + 1): - minValue = -pi + (quad - 1) * 2 * pi / slices - maxValue = -pi + quad * 2 * pi / slices - - quadrant = logical_and(greater_equal(self.tanGrid, minValue), - less(self.tanGrid, maxValue)) - self.quadList.append(quadrant) - - return - - # Return an edit area for just one quadrant. - # By convention quadrant numbering starts at 1 (due North) and - # progresses clockwise by one slice increment - def getQuadrant(self, quad, radius): - # trim the mask beyond the specified radius - radiusMask = less_equal(self.distGrid, radius) - - quadrant = logical_and(radiusMask, self.quadList[quad - 1]) - return quadrant - - def getDistanceGrid(self): - return self.distGrid - - def getXYDistGrids(self): - return self.xDist, self.yDist - -# end class CircleEA ------------------------------------------------------- - - -class Procedure (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - # Make a timeRange based on the start and end int times - def makeTimeRange(self, start=0, end=0): - - if AWIPS_ENVIRON == "AWIPS1": - - if start == 0 and end == 0: - return AFPS.TimeRange.allTimes() - - startTime = AFPS.AbsTime(start) - endTime = AFPS.AbsTime(end) - - tr = AFPS.TimeRange(startTime, endTime) - - elif AWIPS_ENVIRON == "AWIPS2": - if start == 0 and end == 0: - startTime = AbsTime.AbsTime(start) - endTime = AbsTime.maxFutureTime() - else: - startTime = AbsTime.AbsTime(start) - endTime = AbsTime.AbsTime(end) - - tr = TimeRange.TimeRange(startTime, endTime) - else: - self.statusBarMsg("Unknown AWIPS version", "U") - tr = None - - return tr - - def getParmTimeConstraints(self, weName, dbName): - - parm = self.getParm(dbName, weName, "SFC") - - if AWIPS_ENVIRON == "AWIPS1": - parmStart = parm.timeConstraints().startTime() - parmDuration = parm.timeConstraints().duration() - parmRepeat = parm.timeConstraints().repeatInterval() - - elif AWIPS_ENVIRON == "AWIPS2": - parmStart = parm.getGridInfo().getTimeConstraints().getStartTime() - parmDuration = parm.getGridInfo().getTimeConstraints().getDuration() - parmRepeat = parm.getGridInfo().getTimeConstraints().getRepeatInterval() - else: - self.statusBarMsg("Unknown AWIPS version", "U") - return None, None, None - - return parmStart, parmDuration, parmRepeat - - - # Use this method if you have no luck getting products - # directly from the text database - def getTextProductFromFile(self, filename): - f = file(filename, 'r') - textList = [] - line = f.readline() - textList.append(line) - while line != "": - line = f.readline() - textList.append(line) - f.close() - return textList - - # Retrieves a text product from the text database - def getTextProductFromDB(self, productID): - - cmd = "textdb -r " + productID - - # if your path does not include FXA_HOME/bin, - # this line may work instead of the above line. -# cmd = "/awips2/fxa/bin/textdb -r " + productID - - (stdout, stdin, stderr) = popen2.popen3(cmd) - - textList = [] - line = stdout.readline() - textList.append(line) - while line != "": - line = stdout.readline() - textList.append(line) - return textList - - def printFcst(self, f, baseTime=None): - print "==============================================================" - print "Time:", time.asctime(time.gmtime(f['validTime'])), - if baseTime is not None: - print "LeadTime:", (f['validTime'] - baseTime) / 3600 + 3 - print "Center:", f['centerLocation'] - print "Eye:", f['eyeDiameter'] - if f.has_key('maxWind'): - print "Max Wind:", f['maxWind'] - radKeys = f['radii'].keys() - sort(radKeys) - print "RADII:" - for r in radKeys: - print r, "kts:", f['radii'][r] - - def getWEInventory(self, modelName, WEName, level): - allTimes = self.makeTimeRange(0, 0) - gridInfo = self.getGridInfo(modelName, WEName, level, allTimes) - trList = [] - for g in gridInfo: - start = g.gridTime().startTime().unixTime() - end = g.gridTime().endTime().unixTime() - tr = self.makeTimeRange(start, end) - trList.append(tr) - - return trList - - def timeRangeSort(self, a, b): - if a.startTime() <= b.startTime(): - return -1 - else: - return 1 - - # returns a wind grid from the specified model most closely matched in - # time - def getClosestWindGrid(self, modelName, bgDict, timeTarget): - topo = self.getTopo() - calmGrid = self.makeWindGrid(0.0, 0.0, topo.shape) - - if len(bgDict.keys()) == 0: -# print "No background grids available...Using calm grid." - return calmGrid - - minDiff = 3600 * 24 * 365 # just a large number - gridIndex = -1 - tr = None - - # sort the keys by time so we get consistent behavior - bgKeys = bgDict.keys() - bgKeys.sort(self.timeRangeSort) - targetTR = self.makeTimeRange(timeTarget, timeTarget + 3600) - # figure out which grid is closest in time - for invTR in bgKeys: - - # if we have an exact match, we're done - if invTR.overlaps(targetTR): - tr = invTR # set the tr - minDiff = 0 - break - - # update stats for "closest" grid - gTime = invTR.startTime().unixTime() - diff = abs(gTime - timeTarget) - - if diff < minDiff: - tr = invTR - minDiff = diff - - # if we're off by more than 4 hours, return a calm grid - if minDiff > (4 * 3600): - return calmGrid - - # return the closest grid in time - if modelName == "Fcst": - grid = bgDict[tr] - else: - grid = bgDict[tr] - grid = (grid[0] * 1.944, grid[1]) # convert from m/s - - return grid - - # makes a direction grid where winds blow counter-clockwise about - # the specified center. - def makeDirectionGrid(self, latGrid, lonGrid, latCenter, lonCenter): - cycWt = 0.7 # cyclonic circulation weight - convWt = 0.3 # convergence weight - cycU = -(latGrid - latCenter) # pure counter-clockwise circulation - cycV = lonGrid - lonCenter - convU = -cycV # pure convergence - convV = cycU - u = cycU * cycWt + convU * convWt - v = cycV * cycWt + convV * convWt - mag, dir = self.UVToMagDir(u, v) - - return dir - - # interpolates radii information based on the specified info. - # returns a new radii - def interpRadii(self, t1, t2, newTime, f1Radii, f2Radii): - # Add radii if they are not there - radiiList = [34.0, 50.0, 64.0, 100.0] - for r in radiiList: - if not f1Radii.has_key(r): - f1Radii[r] = [0, 0, 0, 0] - if not f2Radii.has_key(r): - f2Radii[r] = [0, 0, 0, 0] - - newRadii = {} - for r in radiiList: - quadList = [] - # Check for partial list of radii - if len(f1Radii[r]) < 4 or len(f2Radii[r]) < 4: - print "Partial radii list found. Substituting with zero radius." - - # Add zeros if list is partial - while len(f1Radii[r]) < 4: - f1Radii[r].append(0) - while len(f2Radii[r]) < 4: - f2Radii[r].append(0) - - for i in range(4): - r1 = f1Radii[r][i] - r2 = f2Radii[r][i] - radius = r1 + (r2 - r1) * (newTime - t1) / (t2 - t1) - quadList.append(radius) - newRadii[r] = quadList - - return newRadii - - # interpolate the wind forecasts inbetween the two specified forecasts. - # interval is assumed to be specified in hours. - # returns a new list of forecasts with f1 at the front of the list - # and f2 not present at all in the list. - def interpolateWindFcst(self, f1, f2, interval): - - intSecs = 3600 * interval - t1 = f1['validTime'] - t2 = f2['validTime'] - # Just return the first fcst if the interval is too big - if t2 - t1 <= intSecs: - return [f1] - - f1Lat = f1['centerLocation'][0] - f1Lon = f1['centerLocation'][1] - f2Lat = f2['centerLocation'][0] - f2Lon = f2['centerLocation'][1] - f1Eye = f1['eyeDiameter'] - f2Eye = f2['eyeDiameter'] - tDiff = f2['validTime'] - f1['validTime'] - f1MaxWind = f1['maxWind'] - f2MaxWind = f2['maxWind'] - timeSlots = int(tDiff / intSecs) - dLat = (f2Lat - f1Lat) / timeSlots - dLon = (f2Lon - f1Lon) / timeSlots - dEye = (f2Eye - f1Eye) / timeSlots - dMaxWind = (f2MaxWind - f1MaxWind) / timeSlots - f1Radii = f1['radii'] - f2Radii = f2['radii'] - - if f1.has_key("editedMaxWinds"): - emw1 = array(f1["editedMaxWinds"]) - emw2 = array(f2["editedMaxWinds"]) - demw = (emw2 - emw1) / timeSlots - - fcstList = [f1] # include the first fcst in the list - for i in range(1, timeSlots): - newTime = t1 + (i * intSecs) - newLat = f1Lat + (i * dLat) - newLon = f1Lon + (i * dLon) - newEye = f1Eye + (i * dEye) - newMaxWind = f1MaxWind + (i * dMaxWind) - if f1.has_key("editedMaxWinds"): - newEMW = emw1 + (i * demw) - - newRadii = self.interpRadii(t1, t2, newTime, f1Radii, f2Radii) - f = {} - f['centerLocation'] = (newLat, newLon) - f['eyeDiameter'] = newEye - f['validTime'] = newTime - f['maxWind'] = newMaxWind - f['radii'] = newRadii - if f1.has_key("editedMaxWinds"): - f['editedMaxWinds'] = list(newEMW) - fcstList.append(f) - - return fcstList - - def calcRadiusList(self, maxWind, rmw, rad34, newRadii): - for i in range(len(newRadii)): - # linearly interpolate - newRadii[i] = rmw + ((rmw - rad34) / (maxWind - 34.0)) / (64.0 - maxWind) - if newRadii[i] < 0: - newRadii[i] = 0 - return newRadii - - - # This method fills in radii/wind values one way for the 36-72 hour period - # and another way for the 72-120 hour period. The concept is to add more - # data values to the wind field so that the wind grids look more realistic. - def extrapolateRadii(self, fcstList, baseDecodedTime, radiiFactor): - for i in range(1, len(fcstList)): - fcst = fcstList[i] - prevFcst = fcstList[i-1] - - # calc the lead time in hours - leadTime = (fcst['validTime'] - baseDecodedTime) / 3600 + 3 - - - extRadius = self.getOutlookRadius(leadTime) - zeroRadius = extRadius * radiiFactor - - if leadTime <= 36: # no extrapolation for these times - continue - # for this period, manufacture new 64 knot radii under specific conditions - if leadTime > 36 and leadTime <= 72: - # make sure we have the data we need - if not prevFcst['radii'].has_key(64): - continue - if not prevFcst['radii'].has_key(50): - continue - if not fcst['radii'].has_key(50): - continue - if fcst['radii'].has_key(64): - continue - - if fcst['maxWind'] <= 64: - continue - - prev50 = prevFcst['radii'][50] - prev64 = prevFcst['radii'][64] - fcst50 = fcst['radii'][50] - newRadii = [0, 0, 0, 0] - for i in range(len(prev50)): - if prev50[i] == 0: - continue - - newRadii[i] = fcst50[i] / prev50[i] * prev64[i] - - if not fcst['radii'].has_key(64): - fcst['radii'][64] = newRadii - # add in a 5 knot radius for better blending - fcst['radii'][5.0] = [zeroRadius, zeroRadius, zeroRadius, zeroRadius] - - elif leadTime > 72: # different algorithm for beyond 72 hours - - # if there are radii already defined, don't extrapolate new radii - if fcst.has_key("radii"): - if len(fcst["radii"]) > 0: - continue - - # Stuff radii into the rDict to make a cyclone - maxWind = 0 - if fcst.has_key("maxWind"): - maxWind = fcst["maxWind"] - - rDict = {} - - # add the radii for maxWind at the rmw - if maxWind > 0: - # calculate an rmw - lat = fcst["centerLocation"][0] # latitude in degrees - rmw = 46.29 * exp(-0.0153 * (maxWind / 1.944) + 0.0166 * lat) - rmw = rmw / 1.852 # convert to nautical miles - - for ws in [64.0, 50.0]: - newRadii = [0, 0, 0, 0] - if ws < maxWind: - newRadii = self.calcRadiusList(maxWind, rmw, extRadius, newRadii) - rDict[ws] = newRadii - - rDict[34.0] = [extRadius, extRadius, extRadius, extRadius] - rDict[5.0] = [zeroRadius, zeroRadius, zeroRadius, zeroRadius] - fcst['radii'] = rDict - - return fcstList - - # Smooths the specified grid by the specified factor - # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. - # Even factors (4, 6, 8,...) round up to the next odd value - # If factors <3 are specified, the unmodified grid is returned. - def smoothGrid(self, grid, factor): - # factors of less than 3 are useless or dangerous - if factor < 3: - return grid - - # Specifying the grid type depends on the environment - - typecode = float64 - - st = time.time() - half = int(factor)/ 2 - sg = zeros(grid.shape, typecode) - count = zeros(grid.shape, typecode) - gridOfOnes = ones(grid.shape, typecode) - - for y in range(-half, half + 1): - for x in range(-half, half + 1): - if y < 0: - yTargetSlice = slice(-y, None, None) - ySrcSlice = slice(0, y, None) - if y == 0: - yTargetSlice = slice(0, None, None) - ySrcSlice = slice(0, None, None) - if y > 0: - yTargetSlice = slice(0, -y, None) - ySrcSlice = slice(y, None, None) - if x < 0: - xTargetSlice = slice(-x, None, None) - xSrcSlice = slice(0, x, None) - if x == 0: - xTargetSlice = slice(0, None, None) - xSrcSlice = slice(0, None, None) - if x > 0: - xTargetSlice = slice(0, -x, None) - xSrcSlice = slice(x, None, None) - - target = [yTargetSlice, xTargetSlice] - src = [ySrcSlice, xSrcSlice] - sg[target] = sg[target] + grid[src] - count[target] = count[target] + gridOfOnes[src] - return sg / count - - # Smooths the direction grid without regard to the magnitude - def smoothDirectionGrid(self, dirGrid, factor): - mag = ones(dirGrid.shape, float32) # 1.0 everywhere - u, v = self.MagDirToUV(mag, dirGrid) - u = self.smoothGrid(u, factor) - v = self.smoothGrid(v, factor) - mag, dirGrid = self.UVToMagDir(u, v) - return dirGrid - - def makeWindGrid(self, mag, direct, gridShape): - mag = ones(gridShape, float32) * mag - direct = ones(gridShape, float32) * direct - return mag, direct - - def decreaseWindOverLand(self, grid, fraction, Topo, timeRange): - - if self.lessOverLandGrid == "Grid": - - windFactorGrid = self.getWindReductionFactorGrid("Fcst", timeRange) - if windFactorGrid is not None: - # Restrict reduction to the cyclone winds defined by the TCM - grid = where(self._cycloneMask, grid * (1 - windFactorGrid), grid) - return grid - else: - # If no grid was found just return the standard reduction - self.statusBarMsg("Wind Reduction Factor grid not found. Using standard reduction." , "S") - - # If area over which you desire to apply land correction you prefer be - # based on Edit Are instead of areas with Topo greater than zero then - # uncomment the next two lines and specify Edit Area to use. - - #editArea = self.getEditArea("LAND_EDIT_ARE_NAME_HERE") - #mask = self.encodeEditArea(editArea) - - # Restrict reduction to the cyclone winds defined by the TCM - mask = logical_and(greater(Topo, 0.0), self._cycloneMask) - - grid = where(mask, grid * fraction, grid) - - return grid - - # fetches and returns all of the wind reduction factor grids in Fcst DB. - def getWindReductionFactorGrid(self, modelName, timeRange): - try: - inv = self.getWEInventory(modelName, "WindReductionFactor", "SFC") - for tr in inv: - if tr.overlaps(timeRange): - WindRedGrid = self.getGrids(modelName, "WindReductionFactor", "SFC", - timeRange, mode="First") - return WindRedGrid - # If no overlapping grids, return None - return None - except: - return None - - def getTimeConstraintDuration(self, element): - - parmStart, parmDuration, parmRepeat = self.getParmTimeConstraints(element, "Fcst") - return parmDuration - - def getParmMinMaxLimits(self, modelName, weName): - - parm = self.getParm(modelName, weName, "SFC") - - if AWIPS_ENVIRON == "AWIPS1": - return parm.minLimit(), parm.maxLimit() - elif AWIPS_ENVIRON == "AWIPS2": - return parm.getGridInfo().getMinValue(), parm.getGridInfo().getMaxValue() - else: - self.statusBarMsg("Unknown AWIPS version", "U") - return None, None - - return - - # returns the maximum allowable wind speed based on NWS directives - def getMaxAllowableWind(self, maxWind): - minAllowable, maxAllowable = self.getParmMinMaxLimits("Fcst", "Wind") - - return min(maxWind, maxAllowable) - - # returns an interpolated radius based on input radii - def getOutlookRadius(self, leadTime): - leadTimeList = [72, 96, 120] - radiusList = [self.day3Radius, self.day4Radius, self.day5Radius] - - if leadTime < leadTimeList[0]: - return radiusList[0] - - for i in range(1, len(leadTimeList)): - if leadTime < leadTimeList[i]: - dt = leadTimeList[i] - leadTimeList[i - 1] - dr = radiusList[i] - radiusList[i - 1] - return radiusList[i - 1] + (leadTime - leadTimeList[i - 1]) * dr / dt - - return radiusList[-1] # return the last item - - # Blends the specified grid together - def blendGrids(self, windGrid, bgGrid): - - # Combine the two grids using the windGrid for the cyclone and the - # background grid everywhere else. - - windMag, windDir = windGrid - bgMag, bgDir = bgGrid - - mask = greater_equal(windMag, 34.0) - - # No background winds inside any defined wind radii - # Add in the point inside the defined wind radii - mask = logical_or(mask, self._cycloneMask) - - magGrid = where(mask, windMag, bgMag) - dirGrid = where(mask, windDir, bgDir) - - return magGrid, dirGrid - - def getLatLonGrids(self): - # Try to get them from the fcst database to save time - try: - trList = self.getWEInventory("Fcst", "latGrid", "SFC") - except: - trList= [] - - if len(trList) > 0: - timeRange = trList[0] - latGrid = self.getGrids("Fcst", "latGrid", "SFC", timeRange, - mode = "First", noDataError = 0) - lonGrid = self.getGrids("Fcst", "lonGrid", "SFC", timeRange, - mode = "First", noDataError = 0) - if latGrid != None and lonGrid != None: - return latGrid, lonGrid - - # make the lat and lon grids - gridLoc = self.getGridLoc() - - latGrid, lonGrid = MetLib.getLatLonGrids(gridLoc) - - start = int(time.time() / (24 * 3600)) * 24 * 3600 - end = start + (24 * 3600) - timeRange = self.makeTimeRange(start, end) - - # Temporarily save them in the forecast database - self.createGrid("Fcst", "latGrid", "SCALAR", latGrid, timeRange, - descriptiveName=None, timeConstraints=None, - precision=1, minAllowedValue=-90.0, - maxAllowedValue=90.0) - - self.createGrid("Fcst", "lonGrid", "SCALAR", lonGrid, timeRange, - descriptiveName=None, timeConstraints=None, - precision=1, minAllowedValue=-360.0, - maxAllowedValue=180.0) - - return latGrid, lonGrid - - # This method interpolates the specified radii in rDict to the - # number of slices specified in pieSlices. This adds more angular - # resolution to the wind forecast which typically comes with 4 slices. - def interpolateQuadrants(self, rDict, pieSlices): - # make sure we have something to do first - if pieSlices <= 4: - return rDict - - newDict = {} - for k in rDict.keys(): - rList = rDict[k] # fetch the list of radii - - interpFactor = pieSlices / len(rList) - newList = [] - for i in range(-1, len(rList) -1): - minVal = rList[i] - maxVal = rList[i + 1] - dVal = (maxVal - minVal) / interpFactor - for f in range(interpFactor): - radius = minVal + dVal * f - # make sure we never exceed the forecast radius -## if radius > minVal: -## radius = minVal - newList.append(radius) - - # Since we started with the NW quadrant we need to shift - # the list so that it starts at North to conform to convention - shift = int(pieSlices / 4) - 1 - shiftedList = newList[shift:] - shiftedList = shiftedList + newList[:shift] - newDict[k] = shiftedList - return newDict - - - # fetches and returns all of the wind grids specified by the model - # name. Should be called before any new wind grids are created - def getBackgroundGrids(self, modelName): - bgDict = {} - - modelName = "Fcst" - siteID = self.getSiteID() - if modelName == "Fcst": - level = "SFC" - elementName = "Wind" - else: - modelName = siteID + "_D2D_" + modelName - if modelName.find("ECMWFHiRes") > -1: - level = "SFC" - elementName = "wind" - else: - level = "FHAG10" - elementName = "wind" - - inv = self.getWEInventory(modelName, elementName, level) - for tr in inv: - bgDict[tr] = self.getGrids(modelName, elementName, level, - tr, mode="First") - return bgDict - - def secondsToYYYYMMDDHH(self, baseTime): - # convert the base time to a string - gTime = time.gmtime(baseTime) - yearStr = str(gTime.tm_year) - monthStr = str(gTime.tm_mon) - dayStr = str(gTime.tm_mday) - hourStr = str(gTime.tm_hour) - while len(monthStr) < 2: - monthStr = "0" + monthStr - while len(dayStr) < 2: - dayStr = "0" + dayStr - while len(hourStr) < 2: - hourStr = "0" + hourStr - - baseTimeStr = yearStr + monthStr + dayStr + hourStr - - return baseTimeStr - - - # returns the index corresponding to the specified timeStr and fcstHour - def findFcst(self, fcstList, fcstHour): - for i in range(len(fcstList)): - validTime = fcstList[i]["validTime"] - leadTime = (validTime - self.baseDecodedTime) / 3600 - if fcstHour == leadTime: - return i - - return None - - # Accepts the number of slices to interpolate and a list of defined - # wind values. Returns a new list of length slices with the specified - # windList interpolated to the new resolution. - def interpWindMax(self, slices, windList): - - maxWindList = [0.0] * slices - - quads = len(windList) - ratio = slices / quads - intOffset = int(ratio / 2) - floatOffset = float(intOffset) / ratio - sliceMap = [] - windPos = [0] * len(windList) - - # Figure out the left and right positions for each new slice - for i in range(slices): - left = int((i - int(ratio/2)) / ratio) - if i % ratio == int(ratio/2): - right = left - windPos[left] = i - else: - right = left + 1 - - if right >= quads: - right = right - quads - - sliceMap.append((left, right)) - - # Do the actual interpolation based on the above positions - interpWindList = [] - for i in range(slices): - left, right = sliceMap[i] - - if left == right: - val = windList[left] - absDist = 1.1111 - elif windPos[left] > windPos[right]: - absDist = slices - abs(windPos[right] - windPos[left]) - else: - absDist = abs(windPos[right] - windPos[left]) - - diff = i - windPos[left] - if diff < 0: - diff = slices + diff - val = windList[left] + diff * ((windList[right] - windList[left]) / absDist) - interpWindList.append(val) - - return interpWindList - - - # Calculate the radius of the maxWind based on teh specified eyeDiameter - def maxWindRadius(self, eyeDiameter=None): - - if eyeDiameter is None: - return 12.5 - - rmw = (eyeDiameter / 2.0) + 8.0 - - return rmw - - def adjustMaxWind(self, outSpeed, inSpeed, outRadius, inRadius, - globalMaxWind, maxWindList, maxWindRadius, quad): - - maxWind = maxWindList[quad] - - # check which speed/radius should be modified - if outSpeed == globalMaxWind: - outSpd = maxWind - outRad = maxWindRadius - inSpd = inSpeed - inRad = inRadius - elif inSpeed == globalMaxWind: - inSpd = maxWind - inRad = maxWindRadius - outSpd = outSpeed - outRad = outRadius - else: - print "ERROR!!! Neither inSpeed or outSpeed is max!!!" - - return outSpd, inSpd, outRad, inRad, maxWind - - # Makes a Rankine Vortex wind speed grid that decreases exponentially - # from the known values at known radii. Inside the Radius of maximum - # wind the wind decreases linearly toward the center - def makeRankine(self, f, latGrid, lonGrid, pieSlices, radiiFactor, timeRange): - st = time.time() - rDict = f['radii'] - - rDict = self.interpolateQuadrants(rDict, pieSlices) - - validTime = f['validTime'] - center = f['centerLocation'] - maxWind = f['maxWind'] - - circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) - - # make a list that contains the highest non-zero radius speed - centerWindList = [0] * pieSlices - for k in rDict.keys(): - for i in range(len(rDict[k])): - if rDict[k][i] > 0 and k > centerWindList[i]: - centerWindList[i] = k - - - for k in rDict.keys(): - if rDict[k] == [0] * pieSlices: - del rDict[k] - # make a list of lowest wind speed found with zero radius - # and save the next lowest wind speed for later - if rDict.has_key(100.0): - speedList = [None, 100.0, 50.0, 34.0, 5.0] - else: - speedList = [None, 64.0, 50.0, 34.0, 5.0] - - zeroRadList = [999] * pieSlices - validRadList = [999] * pieSlices - for s in range(len(speedList) - 1): - speed = speedList[s] - nextSpeed = speedList[s + 1] - if not rDict.has_key(speed): - zeroRadList = [speed] * pieSlices - validRadList = [nextSpeed] * pieSlices - else: - for i in range(len(rDict[speed])): - if rDict[speed][i] == 0: - zeroRadList[i] = speed - validRadList[i] = nextSpeed - - # get the distance grid and make sure it's never zero anywhere - distanceGrid = circleEA.getDistanceGrid() / 1.852 # dist in NM - distanceGrid[distanceGrid == 0] = 0.01 - - # make a grid into which we will define the wind speeds - grid = self.empty() - - # The cyclone algorithm depends on the forecast lead time - fcstLeadTime = (validTime - self.baseDecodedTime) / 3600 - - # add the radius for maxWind for interpolation - if f.has_key('eyeDiameter'): - eyeDiameter = f['eyeDiameter'] - else: - print "Error --- no eye diameter found." - eyeDiameter = None - - maxWindRadius = self.maxWindRadius(eyeDiameter) - - maxWindList = [] - # add the edited maxWind values, if any - if f.has_key("editedMaxWinds"): - # First interpolate based on pie slices - maxWindList = self.interpWindMax(pieSlices, f["editedMaxWinds"]) - - # Add in the maxWind and radius as a point - if not rDict.has_key('maxWind'): - rDict[maxWind] = [maxWindRadius] * pieSlices - # extract the list and sort it - wsList = rDict.keys() - wsList.sort() - - # insert a dummy wind near the center and append so it's done last - rDict[1] = [1] * pieSlices - wsList.append(1.0) - - # insert an artificial 5 knot radius at a distance proportional - # to the 34 knot radius for that quadrant - tenKnotRadiusList = [108.0] * pieSlices - - if rDict.has_key(34.0): - tenKnotRadList = [] - radList34 = rDict[34.0] - for r in radList34: - tenKnotRadList.append(r * radiiFactor) - - # insert the 5 knot radius at the beginning so is made first - rDict[5.0] = tenKnotRadList - wsList.insert(0, 5.0) - - insideRMWMask = self.empty(bool) - self._cycloneMask = self.empty(bool) - # for each rDict record and quadrant, make the grid one piece at a time - for i in range(len(wsList) - 1): - self.lastRadius = [None] * pieSlices - if not rDict.has_key(wsList[i]): - continue - radiusList = rDict[wsList[i]] - nextRadiusList = rDict[wsList[i + 1]] - - maxRadius = maxWindRadius # temp copy - for quad in range(len(radiusList)): - - maxRadius = maxWindRadius # temp copy - maxWind = f['maxWind'] # reset maxWind as we may fiddle with it - - # fetch the speeds and radii we'll need - outSpeed = float(wsList[i]) - inSpeed = float(wsList[i + 1]) - outRadius = float(radiusList[quad]) - inRadius = float(nextRadiusList[quad]) - - # Here's where the speeds and radii are adjusted based - # on the edited values but only if they have been edited - # and only if we're working on the maxWind. - if f.has_key("editedMaxWinds"): - if maxWind == wsList[i] or maxWind == wsList[i+1]: - outSpeed, inSpeed, outRadius, inRadius, maxWind = \ - self.adjustMaxWind(outSpeed, inSpeed, outRadius, - inRadius, maxWind, maxWindList, - maxWindRadius, quad) - - # Some cases require we adjust the maxWindRadius - if outSpeed in [64.0, 50.0, 34.0] and outRadius <= maxWindRadius: - inRadius = outRadius * 0.9 - self.lastRadius[quad] = outRadius - elif inSpeed == 1.0 and self.lastRadius[quad] is not None: - outRadius = self.lastRadius[quad] * 0.9 - #print "Adjusting MaxWindRadius at:", inSpeed, "kts" - self.lastRadius[quad] = None - - # reset the speeds if they exceed the maxWind - if fcstLeadTime <= 72 and zeroRadList[quad] is not None: - if inSpeed >= zeroRadList[quad]: - inSpeed = validRadList[quad] - if outSpeed >= zeroRadList[quad]: - outSpeed = validRadList[quad] - - # set the center value to max fcst wind - if inSpeed == 1.0: - inSpeed = centerWindList[quad] - - # get the edit area for this quadrant - mask = circleEA.getQuadrant(quad + 1, outRadius * 1.852) - - # log10 and exp math functions are fussy about zero - if inSpeed == 0.0: - inSpeed = 0.1 - if outSpeed == 0.0: - outSpeed = 0.1 - if inRadius == 0.0: - inRadius = 0.1 - if outRadius == 0.0: - outRadius = 0.1 - # no wind speed can never exceed the maximum allowable wind speed - if inSpeed > maxWind: - inSpeed = maxWind - if outSpeed > maxWind: - outSpeed = maxWind - - # don't bother with trivial cases - if inRadius < 2.0 and outRadius < 2.0: - continue - if inRadius > outRadius: - continue - - if inSpeed == 0.0 or outSpeed == 0.0: - continue - # calculate the exponent so that we exactly fit the next radius - denom = log10(inRadius / outRadius) - if denom == 0: - exponent = 1.0 - else: - exponent = (log10(outSpeed) - log10(inSpeed)) / denom - - # make sure the exponent behaves itself - if exponent > 10.0: - exponent = 10.0 - # inside RMW gets a linear slope to largest of max wind forecasts - if inRadius <= 1.0: - dSdR = (outSpeed - inSpeed) / (outRadius - inRadius) - grid = where(mask, inSpeed + (dSdR * distanceGrid), grid) - insideRMWMask[mask] = True - else: # outside RMW - grid = where(mask, inSpeed * power((inRadius / distanceGrid), exponent), - grid) - if outSpeed >= 34.0 and inSpeed >= 34.0: - self._cycloneMask = logical_or(self._cycloneMask, mask) - - # Apply the NC State correction outside the RMW - if self._applyNCSCorrection: - corrGrid = self.makeCorrectionGrid(latGrid, lonGrid, center) -## self.createGrid("Fcst", "NCSCorr", "SCALAR", corrGrid, self._timeRange, -## precision=3, minAllowedValue=-1.0, maxAllowedValue=1.0) - - m = logical_not(insideRMWMask) - grid[m] *= (1 - corrGrid)[m] - - maxWind = f['maxWind'] # reset again before clipping - - dirGrid = self.makeDirectionGrid(latGrid, lonGrid, center[0], center[1]) - - - # clip values between zero and the maximum allowable wind speed - maxWind = self.getMaxAllowableWind(maxWind) - grid.clip(0.0, maxWind, grid) - # apply the wind reduction over land - fraction = 1.0 - (self.lessOverLand / 100.0) - grid = self.decreaseWindOverLand(grid, fraction, self.elevation, timeRange) - return (grid, dirGrid) - - def makeMaxWindGrid(self, interpFcstList, interval, latGrid, lonGrid, pieSlices, - radiiFactor): - -## if len(interpFcstList) == 0: -## return - - startTime = interpFcstList[0]["validTime"] - endTime = startTime + (123 * 3600) # 123 hours later - - timeRange = self.makeTimeRange(startTime, endTime) - - # Used getGrids to calculate the maximum wind grid. - # - # Fetch the max of the wind grids just generated as this is very fast. - maxWindGrid, maxDirGrid = self.getGrids("Fcst", "Wind", "SFC", timeRange, mode="Max") - - maxWindGrid = self.smoothGrid(maxWindGrid,3) - - self.createGrid("Fcst", "TCMMaxWindComposite", "SCALAR", maxWindGrid, timeRange, - precision=1, minAllowedValue=0.0, maxAllowedValue=200.0) - - # save the grid in the server - self.saveObject("TCMMaxWindGrid", maxWindGrid, "WindGrid") - - return - - def validateCycloneForecast(self, fcstList, baseTime): - - # Now check each forecast to make sure that we have a radius for any - # standard wind values less than the maxWind - - if len(fcstList) == 0: - return False - - windValues = [64, 50, 34] - for f in fcstList: - for value in windValues: - if value > f["maxWind"]: - continue - if not f["radii"].has_key(value): - print f["radii"].keys(), "is missing value:", value - return False - - return True - - # Returns a dictionary that lists the min and max allowed wind for each hour - def makeWindDict(self, fcstList): - - windDict = {} - - - for f in fcstList: - windValues = f["radii"].keys() - hour = (f["validTime"] - self.baseDecodedTime) / 3600 - maxWind = f["maxWind"] - minWind = 999999.0 - if len(f["radii"].keys()) == 0: - minWind = 0.0 - - # Grab the first (highest) forecast wind speed value - if len(windValues) > 0: - minWind = windValues[0] - else: - minWind = 0.0 - - windDict[hour] = (minWind, maxWind) - - return windDict - - # Pop up a GUI that will maxWind values for each quadrant and time - def launchMaxWindGUI(self, fcstList): - - windDict = self.makeWindDict(fcstList) - if AWIPS_ENVIRON == "AWIPS1": - eaMgr = self.eaMgr() - else: - eaMgr = None - - self._maxWindGUI = DefineMaxWindGUI.DefineMaxWindGUI(self._dbss, eaMgr) - - newMaxWinds = self._maxWindGUI.displayGUI(windDict) - - if newMaxWinds is not None: - - hourList = newMaxWinds.keys() - hourList.sort() - - self._maxWindGUI.cancelCommand() - - return newMaxWinds - - # Make the NCState bais correction grid based on the forecast. - def makeCorrectionGrid(self, latGrid, lonGrid, center): - - - # structure to hold the polynomial coefficients - coeff = [[1.282e-011, -3.067e-008, 2.16e-005, -5.258e-003, 3.794e-001], - [3.768e-011, -4.729e-008, 2.097e-005, -3.904e-003, 2.722e-001], - [4.692e-011, -5.832e-008, 2.565e-005, -4.673e-003, 2.952e-001], - [3.869e-011, -4.486e-008, 1.84e-005, -3.331e-003, 2.738e-001]] - - # make the circle edit area and distance grid - pieSlices = 4 - circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) - - dist = circleEA.getDistanceGrid() # dist in km - - corrGrid = self.empty() - - for quad in range(pieSlices): - - ea = circleEA.getQuadrant(quad + 1, 500.0) - grid = coeff[quad][0] * pow(dist, 4) + coeff[quad][1] * pow(dist, 3) + \ - coeff[quad][2] * pow(dist, 2) + coeff[quad][3] * dist + \ - coeff[quad][4] - - corrGrid = where(ea, grid, corrGrid) - - return corrGrid - - def execute(self, varDict, timeRange): - - RADII_FACTOR = 4.5 - - self.setToolType("numeric") - self.toolTimeRange = timeRange - - # define the default eye diameter for bulletins where they are missing - eyeStr = varDict["Eye Diameter:"] - self.dialogEyeDiameter = float(eyeStr) - maxwindswath = varDict["MaxWind Swath for \nTCWindThreat?"] - - Topo = self.getTopo() - - tcDuration = self.getTimeConstraintDuration("Wind") - tcHours = int(tcDuration / 3600) # durations are expressed in seconds - # set the time interpolation interval to the duration - interval = tcHours - - # get the product ID - productList1 = varDict["Product to\ndecode:"] - productList2 = varDict["Product to\n decode:"] - productList1 = productList1 + productList2 # concatenate - if len(productList1) != 1: - self.statusBarMsg("Please select one TCM bulletin only.", "S") - return None - - productID = productList1[0] - - # get the ID for this site - siteID = self.getSiteID() - - bgModelName = "Fcst" - self.day3Radius = varDict["34 knot radius at 3 days (NM):"] - self.day4Radius = varDict["34 knot radius at 4 days (NM):"] - self.day5Radius = varDict["34 knot radius at 5 days (NM):"] - - # grab all of the background grids now before we make any new grids - bgDict = self.getBackgroundGrids(bgModelName) - - # Radial slices hard-coded to 4. Changing this will divide the wind - # forecast into more radial pieces. Recommended alternative values: - # 12, 20, 36, 72. - pieSlices = int(varDict["Number of Pie Slices?"]) - - # define radii factor - may make this configurable - # Multiply 3-5 day radius by this factor to get the zero radius. - # Smaller values ramp the cyclone down to zero more quickly. - - - self.lessOverLand = int(varDict["Decrease Wind over Land by (%):"]) - self.lessOverLandGrid = varDict["Constant Land\nReduction (Slider Bar)\nor Wind Reduction\nFactor Grid?"] - self.elevation = Topo - rclDecoder = TCMDecoder() - tcmDecoder = TCMDecoder() - - msg = "" - - # Fetch the text product - if productID == "preTCM": - textProduct = self.getTextProductFromFile("/tmp/Wilma.txt") - decoder = TCMDecoder() - decoder.decodeTCMProduct(textProduct, self.dialogEyeDiameter) - fcstList = decoder.getFcstList() - baseTime = decoder.getBaseProductTime() - #elif productID == "WRKTCM": - # textProduct = self.getTextProductFromFile("/data/local/research/TPCWindProb/WRKTCM") - else: - # try fetching the RCL first. - rclProductID = "MIARCL" + productID[3:] - print "Attempting to Fetch rclProductID:", rclProductID - rclTextProduct = self.getTextProductFromDB(rclProductID) - completeFcst = False - if len(rclTextProduct) < 5: - #msg = rclProductID + " not found. Using TCM to make cyclone." - # self.statusBarMsg(msg, "S") - rclBaseTime = 0 - else: - rclDecoder.decodeTCMProduct(rclTextProduct, self.dialogEyeDiameter) - rclFcstList = rclDecoder.getFcstList() - rclBaseTime = rclDecoder.getBaseProductTime() - completeFcst = self.validateCycloneForecast(rclFcstList, rclBaseTime) - - if productID[:3] == "PRE": - productID = "MIA" + productID - - tcmTextProduct = self.getTextProductFromDB(productID) - - if len(tcmTextProduct) < 5: - msg = productID + " could not be retrieved from the text database." - self.statusBarMsg(msg, "S") - return None # Just return if no TCM is found. Something's really wrong - else: - tcmDecoder.decodeTCMProduct(tcmTextProduct, self.dialogEyeDiameter) - tcmFcstList = tcmDecoder.getFcstList() - tcmBaseTime = tcmDecoder.getBaseProductTime() - - #print "TCM and RCL Base Times are: ", tcmBaseTime, rclBaseTime - if not completeFcst or rclBaseTime != tcmBaseTime: - msg = "Problem decoding " + rclProductID + " Used TCM to make cyclone.\n" - msg = msg + " Used GUI sliders for 3, 4, 5 day forecast." - #self.statusBarMsg(msg, "S") - fcstList = tcmFcstList - baseTime = tcmBaseTime - else: - msg = "RCL message looked good so used that for TCM." - fcstList = rclFcstList - baseTime = rclBaseTime - productID = rclProductID - - print "Decoded:", len(fcstList), " forecasts." - - # Set the baseDecodedTime - validTime of first entry - 3 hours - if len(fcstList) > 0: - self.baseDecodedTime = fcstList[0]['validTime'] - 3 * 3600 - - if varDict["Define Asymmetrical \nMax Winds?"] == "Yes": - - newMaxWinds = self.launchMaxWindGUI(fcstList) - for i in range(len(fcstList)): - fcstHour = (fcstList[i]['validTime'] - baseTime) / 3600 + 3 - maxList = newMaxWinds[fcstHour] - fcstList[i]["editedMaxWinds"] = maxList - - fcstList = self.extrapolateRadii(fcstList, baseTime, RADII_FACTOR) - -## # See if the decoded fcst is close to the current time. This is needed -## # so the tool will work on archived data sets (testMode) -## testMode = False -## if abs(time.time() - self.baseDecodedTime) > 2 * 24 * 3600: # older than 2 days -## testMode = True - - # restrict grids to the selected time period if option is selected. - testMode = False - restrictAnswer = varDict["Make Grids over \nSelected Time Only:"] - if restrictAnswer == "Yes": - testMode = True - - # Turn off testMode if the selected timeRange is less than an hour in duration - if self.toolTimeRange.duration() < 3600: - testMode = False - - # interpolate the wind forecasts we got from the decoder - selectedStartTime = self.toolTimeRange.startTime().unixTime() - selectedEndTime = self.toolTimeRange.endTime().unixTime() - interpFcstList = [] - for i in range(len(fcstList) - 1): - newFcstList = self.interpolateWindFcst(fcstList[i], fcstList[i+1], - interval) - - # Make sure the fcst is within the selected time range or we're in testMode - for f in newFcstList: - if (testMode and (f['validTime'] >= selectedStartTime and \ - f['validTime'] < selectedEndTime)) or (not testMode): - interpFcstList.append(f) - - # append the very last forecast on to the end of the interpolated list - if len(fcstList) > 0: - if (testMode and (f['validTime'] >= selectedStartTime and \ - f['validTime'] < selectedEndTime)) or (not testMode): - interpFcstList.append(fcstList[-1]) - - if len(fcstList) == 1: - interpFcstList = fcstList - - if len(interpFcstList) == 0: - self.statusBarMsg("No cyclone forecasts found within the Selected TimeRange", - "S") - else: - # If the wind grids are more than 3 hours long, the first grid ends up being double - # duration. So, add an extra duplicate forecast at the beginning and reset - # the validTime - print "tcHours:", tcHours - if tcHours > 3: - interpFcstList.insert(0, copy.deepcopy(interpFcstList[0])) - interpFcstList[0]["validTime"] = (int(interpFcstList[0]["validTime"] / tcDuration) \ - * tcDuration) - interpFcstList[1]["validTime"] = (int(interpFcstList[0]["validTime"] / tcDuration) \ - * tcDuration) + tcDuration - print "Adjusted time for first forecast" - print "Generating", len(interpFcstList), "wind grids" - - # get the lat, lon grids - latGrid, lonGrid = self.getLatLonGrids() - - self._applyNCSCorrection = False - if varDict["Reduce Radii by 15% or \n NC State Bias Correction"] == "Reduce by 15%": - # Reduce the extent of the wind radii per Mark De Maria's research - # Loop through each wind radius and modify in place - for f in interpFcstList: - for windValue in f["radii"]: - for i in range(len(f["radii"][windValue])): - f["radii"][windValue][i] = f["radii"][windValue][i] * 0.85 - elif varDict["Reduce Radii by 15% or \n NC State Bias Correction"] == "NC State Bias Correction": - self._applyNCSCorrection = True - - # make a grid for each interpolate forecast - gridCount = 0 - for f in interpFcstList: - - self._timeRange = timeRange - - validTime = int(f['validTime'] / 3600) * 3600 - bgGrid = self.getClosestWindGrid(bgModelName, bgDict, validTime) - startTime = validTime - endTime = validTime + (interval * 3600) - timeRange = self.makeTimeRange(startTime, endTime) - self._cycloneTimeRange = timeRange - - t1 = time.time() - windGrid = self.makeRankine(f, latGrid, lonGrid, pieSlices, RADII_FACTOR, timeRange) - print "Time to makeRankine:", time.time() - t1 - - magGrid, dirGrid = self.blendGrids(windGrid, bgGrid) - magGrid = self.smoothGrid(magGrid, 5) - dirGrid = self.smoothDirectionGrid(dirGrid, 5) - - name = "Wind" - self.createGrid("Fcst", name, "VECTOR", (magGrid, dirGrid), timeRange, - descriptiveName=None, timeConstraints=None, - precision=1, minAllowedValue=0.0, - maxAllowedValue=200.0) - - gridCount = gridCount + 1 - print "TCMWindTool:", productID, "- Generated", gridCount, \ - "out of", len(interpFcstList), "grids", \ - time.asctime(time.gmtime(timeRange.startTime().unixTime())) - - # interpolate through forecast period to very high resolution and make - # a composite maxWind grid from those wind grids - if maxwindswath == "Yes": - t1 = time.time() - self.makeMaxWindGrid(interpFcstList, interval, latGrid, lonGrid, pieSlices, - RADII_FACTOR) - print time.time() - t1, "seconds to generate Max wind composite." - - if msg != "": - self.statusBarMsg(msg, "S") - - return None +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TCMWindTool +# +# Version 2.7.1 2 Sept 2010 Modified to Fix RCL error +# Version 2.7.2 01 Feb 2011 Fixed Pie Slice Algorithm/Added Backgroun Options +# Version Last 14 Apr 2014 Added User-editable max winds +# Modified On 22 May 2014 Introduced option for handling asymetry +# in inner core (RMW), option to use 85th wind radii reduction based on +# 2009 paper by DeMaria or the NCST Bias Correction scheme outside radius +# of MaxWind, corrected problem with ring of lower wind value introduced +# at times in the transition between 34 knots wind radii and background +# field, and introduced option to use preliminary TCM message being +# pushed to all offices text databases beginning with 2014 season. +# +# Modified: 1 Jun 2014 to fix bugs with Lat grids and add option +# to use WindReductionFactor grids for Mid Atlantic offices. +# Modified: 6 June 2014 to fix bugs with large reduction factors over land. +# Modified: 9 June 2014 to fix GUI option to run or not over Selected Time Range. +# +# Modified: 2 July to fix decoding of PRE TCM files +# Whatever options are needed should be carefully coordinated among +# offices. +# +# Last Modified: October 28, 2016 to add mis to maxwindswath == "Yes" and adjust +# shift variable in interpolateQuadrants method. +# Submitted for 17.1.1 +# +# Author: Tom LeFebvre +# Contributor: Pablo Santos +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify +MenuItems = ["Populate"] + +VariableList = [("Product to\ndecode:", [], "check", + ["preTCM","WRKTCM","TCMAT1", "TCMAT2", "TCMAT3", "TCMAT4", "TCMAT5", + "TCMEP1", "TCMEP2", "TCMEP3", "TCMEP4", "TCMEP5"]), + ("Product to\n decode:", [], "check", + ["PREAT1", "PREAT2", "PREAT3", "PREAT4", "PREAT5", + "PREEP1", "PREEP2", "PREEP3", "PREEP4", "PREEP5"]), +# ("Background\nModel:", "Fcst", "radio", ["GFS0p5degGbl", "UKMET", "ECMWFHiRes", "Fcst"]), + ("Number of Pie Slices?", "16", "radio", ["4", "8", "12", "16", "24"]), + ("Eye Diameter:", 0, "scale", [0, 100], 1), + ("34 knot radius at 3 days (NM):", 100, "scale", [0, 1000], 10), + ("34 knot radius at 4 days (NM):", 100, "scale", [0, 1000], 10), + ("34 knot radius at 5 days (NM):", 100, "scale", [0, 1000], 10), + ("Decrease Wind over Land by (%):", 15, "scale", [-20, 50], 1), + ("Make Grids over \nSelected Time Only:", "No", "radio", ["Yes", "No"]), + ("MaxWind Swath for \nTCWindThreat?", "No", "radio", ["Yes", "No"]), + ("Define Asymmetrical \nMax Winds?", "No", "radio", ["Yes", "No"]), + ("Reduce Radii by 15% or \n NC State Bias Correction", "Reduce by 15%", + "radio", ["Reduce by 15%", "NC State Bias Correction"]), + ("Constant Land\nReduction (Slider Bar)\nor Wind Reduction\nFactor Grid?", + "Constant", "radio", ["Constant", "Grid"]), + ] + +try: # See if this is the AWIPS I environment + from Numeric import * + import AFPS + AWIPS_ENVIRON = "AWIPS1" +except: # Must be the AWIPS II environment + from numpy import * + import AbsTime + import TimeRange + AWIPS_ENVIRON = "AWIPS2" + +import SmartScript +import DefineMaxWindGUI +import MetLib + +import popen2, string, time, os, pickle +import Exceptions, types, copy + +class TCMDecoder: + def __init__(self): + self.pos = 0 + # key words in TCM products from NCEP + self.keyWordDict = {"HURRICANE CENTER" : self.decodeProductTime, + "FORECAST VALID" : self.decodeWindForecast, + "CENTER LOCATED NEAR" : self.decodeCenterLocation, + "CENTER LOCATED INLAND NEAR" : self.decodeCenterLocation, + "MINIMUM CENTRAL PRESSURE" : self.decodeCentralPressure, + "MAX SUSTAINED WINDS" : self.decodeMaxSustainedWinds, + "MAX WIND" : self.decodeMaxWind, + "OUTLOOK VALID" : self.decodeWindForecast, + "EYE DIAMETER" : self.decodeEyeDiameter, + "64 KT..." : self.decodeRadii, + "50 KT..." : self.decodeRadii, + "34 KT..." : self.decodeRadii, + # key words for JTWC products + "WTPN" : self.decodeJTWCProductTime, + "WARNING POSITION:" : self.decodeJTWCTimeCenter, + "VALID AT:" : self.decodeJTWCWindForecast, + "RADIUS OF 034 KT WINDS" : self.decodeJTWCRadii, + "RADIUS OF 050 KT WINDS" : self.decodeJTWCRadii, + "RADIUS OF 064 KT WINDS" : self.decodeJTWCRadii, + "RADIUS OF 100 KT WINDS" : self.decodeJTWCRadii, + " ---" : self.endJTWCWindForecast, + "REMARKS:" : self.stopDecodingJTWC, + } + + self.fcstList = [] # a place to store all of the forecasts + + self.text = [] # the text product + + self.currentFcst = {} # the current forecast we are docoding + + self.baseProductTime = 0 + + self.foundEyeDiameter = 0 + + self.AltFileName = "" + + def calcEyeDiameter(self, center, maxWind): + lat = center[0] # latitude in degrees + maxWind = maxWind / 1.944 # convert to meters per second + rmw = 46.29 * exp(-0.0153 * maxWind + 0.0166 * lat) + + # convert to diameter and convert from km to nm + ed = rmw * 2.0 / 1.852 + return ed + + def stripText(self): + endStr = chr(13) + chr(13) + chr(10) + for i in range(len(self.text)): + self.text[i] = string.replace(self.text[i], endStr, "") + return + + def getFcstList(self): + return self.fcstList + + def getBaseProductTime(self): + return self.baseProductTime + + def getAltInfoFileName(self): + return self.AltFileName + + def currentLine(self): + return self.text[self.pos] + + def nextLine(self): + self.pos = self.pos + 1 + if self.pos < len(self.text): + return self.text[self.pos] + else: + return "" + + def monthNum(self, monthStr): + monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", + "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] + + try: + return monthList.index(monthStr) + 1 + except ValueError: + return 0 + + def convertBaseTime(self, timeStr): + # timeStr format: "HHMM UTC DAY MON DD YYYY" + + # extract time parts from the str + hour = int(timeStr[0:2]) + minute = int(timeStr[2:4]) + strList = string.split(timeStr) + monthStr = strList[3] + month = self.monthNum(monthStr) + day = int(strList[4]) + year = int(strList[5]) + + # time.mktime returns time in seconds but in local time + baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) + + # Adjustment to UTC + diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) + + # subtract timeZone and round to the nearest hour + roundedTime = int((baseTime - diffTime) / 3600) * 3600 + + return roundedTime + + def convert_ddhhmm(self, ddhhmmStr, baseTime): + + # remove the slash if present + ddhhmmStr = string.replace(ddhhmmStr, "/", "") + + if baseTime == 0: + baseTime = time.time() + + # extract the time parts + dayStr = ddhhmmStr[0:2] + hourStr = ddhhmmStr[2:4] + minStr = ddhhmmStr[4:6] + day = int(dayStr) + hour = int(hourStr) + minute = int(minStr) + tupleTime = time.gmtime(baseTime) + year = tupleTime[0] + month = tupleTime[1] + # see if we crossed over to a new month + if tupleTime[2] > day: + month = month + 1 + if month > 12: + month = 1 + year = year + 1 + + newTuple = (year, month, day, hour, minute, tupleTime[5], + tupleTime[6], tupleTime[7], tupleTime[8]) + + secondsTime = time.mktime(newTuple) + # Adjustment to UTC + diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) + return secondsTime - diffTime # subtract timeZone + + def decodeProductTime(self): + # extract the alt filename + self.decodeAltFileName() + # Time of the product found on the next line + timeStr = self.nextLine() + # sanity check for the time string + hhmm = timeStr[0:4] + for c in hhmm: + if not c in string.digits: + return + + baseTime = self.convertBaseTime(timeStr) + self.baseProductTime = baseTime + return + + def decodeAltFileName(self): + nameStr = self.currentLine() + parts = string.split(nameStr) + + self.AltFileName = parts[-1] # grab the last string token + + return + + def decodeCenterLocation(self): + locStr = self.currentLine() + # check for the repeat center....don't want this one + if string.find(locStr, "REPEAT") >= 0: + return + + keyWord = "NEAR" + pos = string.find(locStr, keyWord) + if pos > -1: # found it + locStr = locStr[pos + len(keyWord):] + tokenList = string.split(locStr) + if len(tokenList) >= 2: + lat = self.decodeLatLonToken(tokenList[0]) + lon = self.decodeLatLonToken(tokenList[1]) + + if len(tokenList) > 3: # grab the time + validTime = self.convert_ddhhmm(tokenList[3], self.baseProductTime) + # New fcst (analysis actually) + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeCentralPressure(self): + keyWord = "MINIMUM CENTRAL PRESSURE" + presStr = self.currentLine() + pos = string.find(presStr, keyWord) + if pos > -1: # found it + presStr = presStr[pos + len(keyWord):] + + return + + def decodeMaxSustainedWinds(self): + keyWord = "MAX SUSTAINED WINDS" + windStr = self.currentLine() + pos = string.find(windStr, keyWord) + if pos > -1: # found it + windList = [] + tokenList = string.split(windStr) + for i in range(len(tokenList)): + if string.find(tokenList[i], "KT") >= 0: + windList.append(float(tokenList[i - 1])) + + # Sometimes there is no max wind/gust reported + if windList == []: + return + + # store the max wind + self.currentFcst["maxWind"] = windList[0] + self.currentFcst["maxGust"] = windList[1] + + # if we have a center location and a max wind we can calc + # the eye diameter + if 'centerLocation' in self.currentFcst and \ + 'maxWind' in self.currentFcst: + # if it's zero it's not in the product and the user didn't + # change it, so calculate it based on the Willoughby formula + if 'eyeDiameter' in self.currentFcst and \ + self.currentFcst['eyeDiameter'] == 0: + self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( + self.currentFcst['centerLocation'], + self.currentFcst['maxWind']) + else: # otherwise use what's been defined or read from the text + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeMaxWind(self): + str = self.currentLine() + str = string.replace(str, '.', ' ') # remove ... + tokenList = string.split(str) + if len(tokenList) >= 6: + maxWind = float(tokenList[2]) + maxGust = float(tokenList[5]) + + # store in current fcst + self.currentFcst["maxWind"] = maxWind + self.currentFcst["maxGust"] = maxGust + + # if we have a center location and a max wind we can calc + # the eye diameter + if 'centerLocation' in self.currentFcst and \ + 'maxWind' in self.currentFcst: + # if it's zero it's not in the product and the user didn't + # change it, so calculate it based on the Willoughby formula + if 'eyeDiameter' in self.currentFcst and \ + self.currentFcst['eyeDiameter'] == 0: + self.currentFcst['eyeDiameter'] = self.calcEyeDiameter( + self.currentFcst['centerLocation'], + self.currentFcst['maxWind']) + else: # otherwise use what's been defined or read from the text + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeRadii(self): + # if there's no currentFcst dict, we cannot continue + if self.currentFcst == {}: + return + + + str = self.currentLine() + str = string.replace(str, '.', ' ') # remove ... + tokenList = string.split(str) + # check for KT in the second slot + if len(tokenList) < 4 or tokenList[1] != "KT": + return + radiiWindValue = float(tokenList[0]) + dirList = ["NE", "SE", "SW", "NW"] + radiusList = [] + for token in tokenList: + for d in dirList: + pos = string.find(token, d) + if pos >= 0: + radiusStr = token[:pos] + radius = float(radiusStr) + radiusList.append(radius) + # store the radii info + self.currentFcst['radii'][radiiWindValue] = radiusList + + return + + def decodeWindForecast(self): + # if we're decoding a new forecast, save the old one first + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + self.currentFcst = {} # reset + + str = self.currentLine() + str = string.replace(str, '...', ' ') # remove ... + + tokenList = string.split(str) + # decode the validTime + validTime = self.convert_ddhhmm(tokenList[2], self.baseProductTime) + # decode the center location + if len(tokenList) >= 5: + lat = self.decodeLatLonToken(tokenList[3]) + lon = self.decodeLatLonToken(tokenList[4]) + # If we can't decode the lat or lon it's probably an outlook + # with no guidance so just return + if lat == None or lon == None: + print("Failed to decode latStr:", lat, "lonStr:", lon) + return + + # initialize a new forecast and begin filling values + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + return + + def decodeEyeDiameter(self): + str = self.currentLine() + + tokenList = string.split(str) + diameter = int(tokenList[2]) + + self.currentFcst['eyeDiameter'] = diameter + + # Since we found it in the procuct, set the default diameter + self.defaultEyeDiameter = diameter + self.foundEyeDiameter = 1 # mark that we found it + return + + def decodeTCMProduct(self, TCMProduct, eyeDiameter): + self.text = TCMProduct + self.pos = 0 + self.fcstList = [] + self.defaultEyeDiameter = eyeDiameter + + self.stripText() + + try: + while self.pos < len(TCMProduct): + line = self.currentLine() + for k in list(self.keyWordDict.keys()): + if string.find(line, k) > -1: + self.keyWordDict[k]() + break + self.pos = self.pos + 1 + + # store the last forecast in the list of forecasts + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + self.currentFcst = {} # reset + except: + # Some problem occured during the decoding process so return an empty fcst + self.baseProductTime = 0 + self.fcstList = {} # reset + + return + + def decodeLatLonToken(self, latLonStr): + dirList = ['N', 'S', 'E', 'W'] + for d in dirList: + pos = string.find(latLonStr, d) + if pos >= 0: + try: + value = float(latLonStr[0:pos]) + if d == 'S' or d == 'W': + value = -value # flip the numeric sign + return value + except: + # it was not decodable (not numbers) + print("Failed to decode lat/lon token:", latLonStr) + return None + + # undecodable latLon for some reason + return None + + def decodeJTWCProductTime(self): + line = self.currentLine() + tokenList = string.split(line) + ddhhmmStr = tokenList[2] + self.baseProductTime = self.convert_ddhhmm(ddhhmmStr, 0) + + self.baseProductTime = int(self.baseProductTime / 3600) * 3600 + return None + + def decodeJTWCTimeCenter(self): + line = self.nextLine() + tokenList = string.split(line) + dateTimeStr = tokenList[0][0:6] + latStr = tokenList[3] + lonStr = tokenList[4] + + # could be None + lat = self.decodeLatLonToken(latStr) + lon = self.decodeLatLonToken(lonStr) + if lon > 0: + lon = lon - 360.0 + productTime = self.convert_ddhhmm(dateTimeStr, self.baseProductTime) + + # make a new fcst object to store the analysis + self.currentFcst = {} + self.currentFcst['validTime'] = productTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + + def decodeJTWCWindForecast(self): + line = self.nextLine() + + tokenList = string.split(line) + + # Grab everything just to the left of the first 'Z' + zPos = string.find(tokenList[0], 'Z') + if zPos >= 0: + timeStr = tokenList[0][0:zPos] + validTime = self.convert_ddhhmm(timeStr, self.baseProductTime) + else: + print("couldnt find Z in timeStr:", line) + return + + latStr = tokenList[2] + lonStr = tokenList[3] + lat = self.decodeLatLonToken(latStr) + lon = self.decodeLatLonToken(lonStr) + if lon > 0: + lon = lon - 360.0 + + # make a new currentFcst and store the info + self.currentFcst = {} + self.currentFcst['validTime'] = validTime + self.currentFcst['centerLocation'] = (lat, lon) + self.currentFcst['radii'] = {} + self.currentFcst['eyeDiameter'] = self.defaultEyeDiameter + return + + def decodeJTWCRadii(self): + line = self.currentLine() + radList = [] + windSpeed = 0 + while string.find(line, "---") == -1 and line != "": + tokenList = string.split(line) + if string.find(line, "RADIUS") >= 0: # it's the first line + # check to see if we need to store the radii first + if radList != []: # we decoded some already + self.currentFcst['radii'][windSpeed] = radList + radList = [] + + # extract the windSpeed for these radii + windSpeed = float(tokenList[2]) + if string.find(line, "QUADRANT") == -1: # no "QUADRANT" found + radius = float(tokenList[6]) + radList = [radius, radius, radius, radius] + else: # QUADRANT found + radius = float(tokenList[6]) + radList = [radius] + else: # no RADIUS found so maybe a QUADRANT line + if string.find(line, "QUADRANT") >= 0: + radius = float(tokenList[0]) + radList.append(radius) + + line = self.nextLine() + + # save the last radii info + if radList != []: + self.currentFcst['radii'][windSpeed] = radList + + # save the whole forecast in the list + self.fcstList.append(self.currentFcst) + self.currentFcst = {} + + return + + def endJTWCWindForecast(self): + + if self.currentFcst != {}: + self.fcstList.append(self.currentFcst) + + self.currentFcst = {} + return + + def stopDecodingJTWC(self): + line = "ZZZZZ" + while line != "": + line = self.nextLine() + return + +# end class TCMDecoder + +# begin class CircleEA +# This class helps make circular edit areas and quadrants thereof. +class CircleEA(SmartScript.SmartScript): + def __init__(self, latGrid, lonGrid, center, slices): + pi = 3.1459 + RadsPerDeg = 2 * pi / 360 + cosLatGrid = cos(latGrid * RadsPerDeg) + self.xDist = (lonGrid - center[1]) * 111.1 * cosLatGrid + self.yDist = (latGrid - center[0]) * 111.1 + self.distGrid = sqrt(pow(self.xDist, 2)+ pow(self.yDist, 2)) + + self.tanGrid = arctan2(-self.xDist, -self.yDist) + # mask off all but the specified quadrant. + self.quadList = [] + for quad in range(1, slices + 1): + minValue = -pi + (quad - 1) * 2 * pi / slices + maxValue = -pi + quad * 2 * pi / slices + + quadrant = logical_and(greater_equal(self.tanGrid, minValue), + less(self.tanGrid, maxValue)) + self.quadList.append(quadrant) + + return + + # Return an edit area for just one quadrant. + # By convention quadrant numbering starts at 1 (due North) and + # progresses clockwise by one slice increment + def getQuadrant(self, quad, radius): + # trim the mask beyond the specified radius + radiusMask = less_equal(self.distGrid, radius) + + quadrant = logical_and(radiusMask, self.quadList[quad - 1]) + return quadrant + + def getDistanceGrid(self): + return self.distGrid + + def getXYDistGrids(self): + return self.xDist, self.yDist + +# end class CircleEA ------------------------------------------------------- + + +class Procedure (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + # Make a timeRange based on the start and end int times + def makeTimeRange(self, start=0, end=0): + + if AWIPS_ENVIRON == "AWIPS1": + + if start == 0 and end == 0: + return AFPS.TimeRange.allTimes() + + startTime = AFPS.AbsTime(start) + endTime = AFPS.AbsTime(end) + + tr = AFPS.TimeRange(startTime, endTime) + + elif AWIPS_ENVIRON == "AWIPS2": + if start == 0 and end == 0: + startTime = AbsTime.AbsTime(start) + endTime = AbsTime.maxFutureTime() + else: + startTime = AbsTime.AbsTime(start) + endTime = AbsTime.AbsTime(end) + + tr = TimeRange.TimeRange(startTime, endTime) + else: + self.statusBarMsg("Unknown AWIPS version", "U") + tr = None + + return tr + + def getParmTimeConstraints(self, weName, dbName): + + parm = self.getParm(dbName, weName, "SFC") + + if AWIPS_ENVIRON == "AWIPS1": + parmStart = parm.timeConstraints().startTime() + parmDuration = parm.timeConstraints().duration() + parmRepeat = parm.timeConstraints().repeatInterval() + + elif AWIPS_ENVIRON == "AWIPS2": + parmStart = parm.getGridInfo().getTimeConstraints().getStartTime() + parmDuration = parm.getGridInfo().getTimeConstraints().getDuration() + parmRepeat = parm.getGridInfo().getTimeConstraints().getRepeatInterval() + else: + self.statusBarMsg("Unknown AWIPS version", "U") + return None, None, None + + return parmStart, parmDuration, parmRepeat + + + # Use this method if you have no luck getting products + # directly from the text database + def getTextProductFromFile(self, filename): + f = file(filename, 'r') + textList = [] + line = f.readline() + textList.append(line) + while line != "": + line = f.readline() + textList.append(line) + f.close() + return textList + + # Retrieves a text product from the text database + def getTextProductFromDB(self, productID): + + cmd = "textdb -r " + productID + + # if your path does not include FXA_HOME/bin, + # this line may work instead of the above line. +# cmd = "/awips2/fxa/bin/textdb -r " + productID + + (stdout, stdin, stderr) = popen2.popen3(cmd) + + textList = [] + line = stdout.readline() + textList.append(line) + while line != "": + line = stdout.readline() + textList.append(line) + return textList + + def printFcst(self, f, baseTime=None): + print("==============================================================") + print("Time:", time.asctime(time.gmtime(f['validTime'])), end=' ') + if baseTime is not None: + print("LeadTime:", (f['validTime'] - baseTime) / 3600 + 3) + print("Center:", f['centerLocation']) + print("Eye:", f['eyeDiameter']) + if 'maxWind' in f: + print("Max Wind:", f['maxWind']) + radKeys = list(f['radii'].keys()) + sort(radKeys) + print("RADII:") + for r in radKeys: + print(r, "kts:", f['radii'][r]) + + def getWEInventory(self, modelName, WEName, level): + allTimes = self.makeTimeRange(0, 0) + gridInfo = self.getGridInfo(modelName, WEName, level, allTimes) + trList = [] + for g in gridInfo: + start = g.gridTime().startTime().unixTime() + end = g.gridTime().endTime().unixTime() + tr = self.makeTimeRange(start, end) + trList.append(tr) + + return trList + + def timeRangeSort(self, a, b): + if a.startTime() <= b.startTime(): + return -1 + else: + return 1 + + # returns a wind grid from the specified model most closely matched in + # time + def getClosestWindGrid(self, modelName, bgDict, timeTarget): + topo = self.getTopo() + calmGrid = self.makeWindGrid(0.0, 0.0, topo.shape) + + if len(list(bgDict.keys())) == 0: +# print "No background grids available...Using calm grid." + return calmGrid + + minDiff = 3600 * 24 * 365 # just a large number + gridIndex = -1 + tr = None + + # sort the keys by time so we get consistent behavior + bgKeys = list(bgDict.keys()) + bgKeys.sort(self.timeRangeSort) + targetTR = self.makeTimeRange(timeTarget, timeTarget + 3600) + # figure out which grid is closest in time + for invTR in bgKeys: + + # if we have an exact match, we're done + if invTR.overlaps(targetTR): + tr = invTR # set the tr + minDiff = 0 + break + + # update stats for "closest" grid + gTime = invTR.startTime().unixTime() + diff = abs(gTime - timeTarget) + + if diff < minDiff: + tr = invTR + minDiff = diff + + # if we're off by more than 4 hours, return a calm grid + if minDiff > (4 * 3600): + return calmGrid + + # return the closest grid in time + if modelName == "Fcst": + grid = bgDict[tr] + else: + grid = bgDict[tr] + grid = (grid[0] * 1.944, grid[1]) # convert from m/s + + return grid + + # makes a direction grid where winds blow counter-clockwise about + # the specified center. + def makeDirectionGrid(self, latGrid, lonGrid, latCenter, lonCenter): + cycWt = 0.7 # cyclonic circulation weight + convWt = 0.3 # convergence weight + cycU = -(latGrid - latCenter) # pure counter-clockwise circulation + cycV = lonGrid - lonCenter + convU = -cycV # pure convergence + convV = cycU + u = cycU * cycWt + convU * convWt + v = cycV * cycWt + convV * convWt + mag, dir = self.UVToMagDir(u, v) + + return dir + + # interpolates radii information based on the specified info. + # returns a new radii + def interpRadii(self, t1, t2, newTime, f1Radii, f2Radii): + # Add radii if they are not there + radiiList = [34.0, 50.0, 64.0, 100.0] + for r in radiiList: + if r not in f1Radii: + f1Radii[r] = [0, 0, 0, 0] + if r not in f2Radii: + f2Radii[r] = [0, 0, 0, 0] + + newRadii = {} + for r in radiiList: + quadList = [] + # Check for partial list of radii + if len(f1Radii[r]) < 4 or len(f2Radii[r]) < 4: + print("Partial radii list found. Substituting with zero radius.") + + # Add zeros if list is partial + while len(f1Radii[r]) < 4: + f1Radii[r].append(0) + while len(f2Radii[r]) < 4: + f2Radii[r].append(0) + + for i in range(4): + r1 = f1Radii[r][i] + r2 = f2Radii[r][i] + radius = r1 + (r2 - r1) * (newTime - t1) / (t2 - t1) + quadList.append(radius) + newRadii[r] = quadList + + return newRadii + + # interpolate the wind forecasts inbetween the two specified forecasts. + # interval is assumed to be specified in hours. + # returns a new list of forecasts with f1 at the front of the list + # and f2 not present at all in the list. + def interpolateWindFcst(self, f1, f2, interval): + + intSecs = 3600 * interval + t1 = f1['validTime'] + t2 = f2['validTime'] + # Just return the first fcst if the interval is too big + if t2 - t1 <= intSecs: + return [f1] + + f1Lat = f1['centerLocation'][0] + f1Lon = f1['centerLocation'][1] + f2Lat = f2['centerLocation'][0] + f2Lon = f2['centerLocation'][1] + f1Eye = f1['eyeDiameter'] + f2Eye = f2['eyeDiameter'] + tDiff = f2['validTime'] - f1['validTime'] + f1MaxWind = f1['maxWind'] + f2MaxWind = f2['maxWind'] + timeSlots = int(tDiff / intSecs) + dLat = (f2Lat - f1Lat) / timeSlots + dLon = (f2Lon - f1Lon) / timeSlots + dEye = (f2Eye - f1Eye) / timeSlots + dMaxWind = (f2MaxWind - f1MaxWind) / timeSlots + f1Radii = f1['radii'] + f2Radii = f2['radii'] + + if "editedMaxWinds" in f1: + emw1 = array(f1["editedMaxWinds"]) + emw2 = array(f2["editedMaxWinds"]) + demw = (emw2 - emw1) / timeSlots + + fcstList = [f1] # include the first fcst in the list + for i in range(1, timeSlots): + newTime = t1 + (i * intSecs) + newLat = f1Lat + (i * dLat) + newLon = f1Lon + (i * dLon) + newEye = f1Eye + (i * dEye) + newMaxWind = f1MaxWind + (i * dMaxWind) + if "editedMaxWinds" in f1: + newEMW = emw1 + (i * demw) + + newRadii = self.interpRadii(t1, t2, newTime, f1Radii, f2Radii) + f = {} + f['centerLocation'] = (newLat, newLon) + f['eyeDiameter'] = newEye + f['validTime'] = newTime + f['maxWind'] = newMaxWind + f['radii'] = newRadii + if "editedMaxWinds" in f1: + f['editedMaxWinds'] = list(newEMW) + fcstList.append(f) + + return fcstList + + def calcRadiusList(self, maxWind, rmw, rad34, newRadii): + for i in range(len(newRadii)): + # linearly interpolate + newRadii[i] = rmw + ((rmw - rad34) / (maxWind - 34.0)) / (64.0 - maxWind) + if newRadii[i] < 0: + newRadii[i] = 0 + return newRadii + + + # This method fills in radii/wind values one way for the 36-72 hour period + # and another way for the 72-120 hour period. The concept is to add more + # data values to the wind field so that the wind grids look more realistic. + def extrapolateRadii(self, fcstList, baseDecodedTime, radiiFactor): + for i in range(1, len(fcstList)): + fcst = fcstList[i] + prevFcst = fcstList[i-1] + + # calc the lead time in hours + leadTime = (fcst['validTime'] - baseDecodedTime) / 3600 + 3 + + + extRadius = self.getOutlookRadius(leadTime) + zeroRadius = extRadius * radiiFactor + + if leadTime <= 36: # no extrapolation for these times + continue + # for this period, manufacture new 64 knot radii under specific conditions + if leadTime > 36 and leadTime <= 72: + # make sure we have the data we need + if 64 not in prevFcst['radii']: + continue + if 50 not in prevFcst['radii']: + continue + if 50 not in fcst['radii']: + continue + if 64 in fcst['radii']: + continue + + if fcst['maxWind'] <= 64: + continue + + prev50 = prevFcst['radii'][50] + prev64 = prevFcst['radii'][64] + fcst50 = fcst['radii'][50] + newRadii = [0, 0, 0, 0] + for i in range(len(prev50)): + if prev50[i] == 0: + continue + + newRadii[i] = fcst50[i] / prev50[i] * prev64[i] + + if 64 not in fcst['radii']: + fcst['radii'][64] = newRadii + # add in a 5 knot radius for better blending + fcst['radii'][5.0] = [zeroRadius, zeroRadius, zeroRadius, zeroRadius] + + elif leadTime > 72: # different algorithm for beyond 72 hours + + # if there are radii already defined, don't extrapolate new radii + if "radii" in fcst: + if len(fcst["radii"]) > 0: + continue + + # Stuff radii into the rDict to make a cyclone + maxWind = 0 + if "maxWind" in fcst: + maxWind = fcst["maxWind"] + + rDict = {} + + # add the radii for maxWind at the rmw + if maxWind > 0: + # calculate an rmw + lat = fcst["centerLocation"][0] # latitude in degrees + rmw = 46.29 * exp(-0.0153 * (maxWind / 1.944) + 0.0166 * lat) + rmw = rmw / 1.852 # convert to nautical miles + + for ws in [64.0, 50.0]: + newRadii = [0, 0, 0, 0] + if ws < maxWind: + newRadii = self.calcRadiusList(maxWind, rmw, extRadius, newRadii) + rDict[ws] = newRadii + + rDict[34.0] = [extRadius, extRadius, extRadius, extRadius] + rDict[5.0] = [zeroRadius, zeroRadius, zeroRadius, zeroRadius] + fcst['radii'] = rDict + + return fcstList + + # Smooths the specified grid by the specified factor + # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. + # Even factors (4, 6, 8,...) round up to the next odd value + # If factors <3 are specified, the unmodified grid is returned. + def smoothGrid(self, grid, factor): + # factors of less than 3 are useless or dangerous + if factor < 3: + return grid + + # Specifying the grid type depends on the environment + + typecode = float64 + + st = time.time() + half = int(factor)/ 2 + sg = zeros(grid.shape, typecode) + count = zeros(grid.shape, typecode) + gridOfOnes = ones(grid.shape, typecode) + + for y in range(-half, half + 1): + for x in range(-half, half + 1): + if y < 0: + yTargetSlice = slice(-y, None, None) + ySrcSlice = slice(0, y, None) + if y == 0: + yTargetSlice = slice(0, None, None) + ySrcSlice = slice(0, None, None) + if y > 0: + yTargetSlice = slice(0, -y, None) + ySrcSlice = slice(y, None, None) + if x < 0: + xTargetSlice = slice(-x, None, None) + xSrcSlice = slice(0, x, None) + if x == 0: + xTargetSlice = slice(0, None, None) + xSrcSlice = slice(0, None, None) + if x > 0: + xTargetSlice = slice(0, -x, None) + xSrcSlice = slice(x, None, None) + + target = [yTargetSlice, xTargetSlice] + src = [ySrcSlice, xSrcSlice] + sg[target] = sg[target] + grid[src] + count[target] = count[target] + gridOfOnes[src] + return sg / count + + # Smooths the direction grid without regard to the magnitude + def smoothDirectionGrid(self, dirGrid, factor): + mag = ones(dirGrid.shape, float32) # 1.0 everywhere + u, v = self.MagDirToUV(mag, dirGrid) + u = self.smoothGrid(u, factor) + v = self.smoothGrid(v, factor) + mag, dirGrid = self.UVToMagDir(u, v) + return dirGrid + + def makeWindGrid(self, mag, direct, gridShape): + mag = ones(gridShape, float32) * mag + direct = ones(gridShape, float32) * direct + return mag, direct + + def decreaseWindOverLand(self, grid, fraction, Topo, timeRange): + + if self.lessOverLandGrid == "Grid": + + windFactorGrid = self.getWindReductionFactorGrid("Fcst", timeRange) + if windFactorGrid is not None: + # Restrict reduction to the cyclone winds defined by the TCM + grid = where(self._cycloneMask, grid * (1 - windFactorGrid), grid) + return grid + else: + # If no grid was found just return the standard reduction + self.statusBarMsg("Wind Reduction Factor grid not found. Using standard reduction." , "S") + + # If area over which you desire to apply land correction you prefer be + # based on Edit Are instead of areas with Topo greater than zero then + # uncomment the next two lines and specify Edit Area to use. + + #editArea = self.getEditArea("LAND_EDIT_ARE_NAME_HERE") + #mask = self.encodeEditArea(editArea) + + # Restrict reduction to the cyclone winds defined by the TCM + mask = logical_and(greater(Topo, 0.0), self._cycloneMask) + + grid = where(mask, grid * fraction, grid) + + return grid + + # fetches and returns all of the wind reduction factor grids in Fcst DB. + def getWindReductionFactorGrid(self, modelName, timeRange): + try: + inv = self.getWEInventory(modelName, "WindReductionFactor", "SFC") + for tr in inv: + if tr.overlaps(timeRange): + WindRedGrid = self.getGrids(modelName, "WindReductionFactor", "SFC", + timeRange, mode="First") + return WindRedGrid + # If no overlapping grids, return None + return None + except: + return None + + def getTimeConstraintDuration(self, element): + + parmStart, parmDuration, parmRepeat = self.getParmTimeConstraints(element, "Fcst") + return parmDuration + + def getParmMinMaxLimits(self, modelName, weName): + + parm = self.getParm(modelName, weName, "SFC") + + if AWIPS_ENVIRON == "AWIPS1": + return parm.minLimit(), parm.maxLimit() + elif AWIPS_ENVIRON == "AWIPS2": + return parm.getGridInfo().getMinValue(), parm.getGridInfo().getMaxValue() + else: + self.statusBarMsg("Unknown AWIPS version", "U") + return None, None + + return + + # returns the maximum allowable wind speed based on NWS directives + def getMaxAllowableWind(self, maxWind): + minAllowable, maxAllowable = self.getParmMinMaxLimits("Fcst", "Wind") + + return min(maxWind, maxAllowable) + + # returns an interpolated radius based on input radii + def getOutlookRadius(self, leadTime): + leadTimeList = [72, 96, 120] + radiusList = [self.day3Radius, self.day4Radius, self.day5Radius] + + if leadTime < leadTimeList[0]: + return radiusList[0] + + for i in range(1, len(leadTimeList)): + if leadTime < leadTimeList[i]: + dt = leadTimeList[i] - leadTimeList[i - 1] + dr = radiusList[i] - radiusList[i - 1] + return radiusList[i - 1] + (leadTime - leadTimeList[i - 1]) * dr / dt + + return radiusList[-1] # return the last item + + # Blends the specified grid together + def blendGrids(self, windGrid, bgGrid): + + # Combine the two grids using the windGrid for the cyclone and the + # background grid everywhere else. + + windMag, windDir = windGrid + bgMag, bgDir = bgGrid + + mask = greater_equal(windMag, 34.0) + + # No background winds inside any defined wind radii + # Add in the point inside the defined wind radii + mask = logical_or(mask, self._cycloneMask) + + magGrid = where(mask, windMag, bgMag) + dirGrid = where(mask, windDir, bgDir) + + return magGrid, dirGrid + + def getLatLonGrids(self): + # Try to get them from the fcst database to save time + try: + trList = self.getWEInventory("Fcst", "latGrid", "SFC") + except: + trList= [] + + if len(trList) > 0: + timeRange = trList[0] + latGrid = self.getGrids("Fcst", "latGrid", "SFC", timeRange, + mode = "First", noDataError = 0) + lonGrid = self.getGrids("Fcst", "lonGrid", "SFC", timeRange, + mode = "First", noDataError = 0) + if latGrid != None and lonGrid != None: + return latGrid, lonGrid + + # make the lat and lon grids + gridLoc = self.getGridLoc() + + latGrid, lonGrid = MetLib.getLatLonGrids(gridLoc) + + start = int(time.time() / (24 * 3600)) * 24 * 3600 + end = start + (24 * 3600) + timeRange = self.makeTimeRange(start, end) + + # Temporarily save them in the forecast database + self.createGrid("Fcst", "latGrid", "SCALAR", latGrid, timeRange, + descriptiveName=None, timeConstraints=None, + precision=1, minAllowedValue=-90.0, + maxAllowedValue=90.0) + + self.createGrid("Fcst", "lonGrid", "SCALAR", lonGrid, timeRange, + descriptiveName=None, timeConstraints=None, + precision=1, minAllowedValue=-360.0, + maxAllowedValue=180.0) + + return latGrid, lonGrid + + # This method interpolates the specified radii in rDict to the + # number of slices specified in pieSlices. This adds more angular + # resolution to the wind forecast which typically comes with 4 slices. + def interpolateQuadrants(self, rDict, pieSlices): + # make sure we have something to do first + if pieSlices <= 4: + return rDict + + newDict = {} + for k in list(rDict.keys()): + rList = rDict[k] # fetch the list of radii + + interpFactor = pieSlices / len(rList) + newList = [] + for i in range(-1, len(rList) -1): + minVal = rList[i] + maxVal = rList[i + 1] + dVal = (maxVal - minVal) / interpFactor + for f in range(interpFactor): + radius = minVal + dVal * f + # make sure we never exceed the forecast radius +## if radius > minVal: +## radius = minVal + newList.append(radius) + + # Since we started with the NW quadrant we need to shift + # the list so that it starts at North to conform to convention + shift = int(pieSlices / 4) - 1 + shiftedList = newList[shift:] + shiftedList = shiftedList + newList[:shift] + newDict[k] = shiftedList + return newDict + + + # fetches and returns all of the wind grids specified by the model + # name. Should be called before any new wind grids are created + def getBackgroundGrids(self, modelName): + bgDict = {} + + modelName = "Fcst" + siteID = self.getSiteID() + if modelName == "Fcst": + level = "SFC" + elementName = "Wind" + else: + modelName = siteID + "_D2D_" + modelName + if modelName.find("ECMWFHiRes") > -1: + level = "SFC" + elementName = "wind" + else: + level = "FHAG10" + elementName = "wind" + + inv = self.getWEInventory(modelName, elementName, level) + for tr in inv: + bgDict[tr] = self.getGrids(modelName, elementName, level, + tr, mode="First") + return bgDict + + def secondsToYYYYMMDDHH(self, baseTime): + # convert the base time to a string + gTime = time.gmtime(baseTime) + yearStr = str(gTime.tm_year) + monthStr = str(gTime.tm_mon) + dayStr = str(gTime.tm_mday) + hourStr = str(gTime.tm_hour) + while len(monthStr) < 2: + monthStr = "0" + monthStr + while len(dayStr) < 2: + dayStr = "0" + dayStr + while len(hourStr) < 2: + hourStr = "0" + hourStr + + baseTimeStr = yearStr + monthStr + dayStr + hourStr + + return baseTimeStr + + + # returns the index corresponding to the specified timeStr and fcstHour + def findFcst(self, fcstList, fcstHour): + for i in range(len(fcstList)): + validTime = fcstList[i]["validTime"] + leadTime = (validTime - self.baseDecodedTime) / 3600 + if fcstHour == leadTime: + return i + + return None + + # Accepts the number of slices to interpolate and a list of defined + # wind values. Returns a new list of length slices with the specified + # windList interpolated to the new resolution. + def interpWindMax(self, slices, windList): + + maxWindList = [0.0] * slices + + quads = len(windList) + ratio = slices / quads + intOffset = int(ratio / 2) + floatOffset = float(intOffset) / ratio + sliceMap = [] + windPos = [0] * len(windList) + + # Figure out the left and right positions for each new slice + for i in range(slices): + left = int((i - int(ratio/2)) / ratio) + if i % ratio == int(ratio/2): + right = left + windPos[left] = i + else: + right = left + 1 + + if right >= quads: + right = right - quads + + sliceMap.append((left, right)) + + # Do the actual interpolation based on the above positions + interpWindList = [] + for i in range(slices): + left, right = sliceMap[i] + + if left == right: + val = windList[left] + absDist = 1.1111 + elif windPos[left] > windPos[right]: + absDist = slices - abs(windPos[right] - windPos[left]) + else: + absDist = abs(windPos[right] - windPos[left]) + + diff = i - windPos[left] + if diff < 0: + diff = slices + diff + val = windList[left] + diff * ((windList[right] - windList[left]) / absDist) + interpWindList.append(val) + + return interpWindList + + + # Calculate the radius of the maxWind based on teh specified eyeDiameter + def maxWindRadius(self, eyeDiameter=None): + + if eyeDiameter is None: + return 12.5 + + rmw = (eyeDiameter / 2.0) + 8.0 + + return rmw + + def adjustMaxWind(self, outSpeed, inSpeed, outRadius, inRadius, + globalMaxWind, maxWindList, maxWindRadius, quad): + + maxWind = maxWindList[quad] + + # check which speed/radius should be modified + if outSpeed == globalMaxWind: + outSpd = maxWind + outRad = maxWindRadius + inSpd = inSpeed + inRad = inRadius + elif inSpeed == globalMaxWind: + inSpd = maxWind + inRad = maxWindRadius + outSpd = outSpeed + outRad = outRadius + else: + print("ERROR!!! Neither inSpeed or outSpeed is max!!!") + + return outSpd, inSpd, outRad, inRad, maxWind + + # Makes a Rankine Vortex wind speed grid that decreases exponentially + # from the known values at known radii. Inside the Radius of maximum + # wind the wind decreases linearly toward the center + def makeRankine(self, f, latGrid, lonGrid, pieSlices, radiiFactor, timeRange): + st = time.time() + rDict = f['radii'] + + rDict = self.interpolateQuadrants(rDict, pieSlices) + + validTime = f['validTime'] + center = f['centerLocation'] + maxWind = f['maxWind'] + + circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) + + # make a list that contains the highest non-zero radius speed + centerWindList = [0] * pieSlices + for k in list(rDict.keys()): + for i in range(len(rDict[k])): + if rDict[k][i] > 0 and k > centerWindList[i]: + centerWindList[i] = k + + + for k in list(rDict.keys()): + if rDict[k] == [0] * pieSlices: + del rDict[k] + # make a list of lowest wind speed found with zero radius + # and save the next lowest wind speed for later + if 100.0 in rDict: + speedList = [None, 100.0, 50.0, 34.0, 5.0] + else: + speedList = [None, 64.0, 50.0, 34.0, 5.0] + + zeroRadList = [999] * pieSlices + validRadList = [999] * pieSlices + for s in range(len(speedList) - 1): + speed = speedList[s] + nextSpeed = speedList[s + 1] + if speed not in rDict: + zeroRadList = [speed] * pieSlices + validRadList = [nextSpeed] * pieSlices + else: + for i in range(len(rDict[speed])): + if rDict[speed][i] == 0: + zeroRadList[i] = speed + validRadList[i] = nextSpeed + + # get the distance grid and make sure it's never zero anywhere + distanceGrid = circleEA.getDistanceGrid() / 1.852 # dist in NM + distanceGrid[distanceGrid == 0] = 0.01 + + # make a grid into which we will define the wind speeds + grid = self.empty() + + # The cyclone algorithm depends on the forecast lead time + fcstLeadTime = (validTime - self.baseDecodedTime) / 3600 + + # add the radius for maxWind for interpolation + if 'eyeDiameter' in f: + eyeDiameter = f['eyeDiameter'] + else: + print("Error --- no eye diameter found.") + eyeDiameter = None + + maxWindRadius = self.maxWindRadius(eyeDiameter) + + maxWindList = [] + # add the edited maxWind values, if any + if "editedMaxWinds" in f: + # First interpolate based on pie slices + maxWindList = self.interpWindMax(pieSlices, f["editedMaxWinds"]) + + # Add in the maxWind and radius as a point + if 'maxWind' not in rDict: + rDict[maxWind] = [maxWindRadius] * pieSlices + # extract the list and sort it + wsList = list(rDict.keys()) + wsList.sort() + + # insert a dummy wind near the center and append so it's done last + rDict[1] = [1] * pieSlices + wsList.append(1.0) + + # insert an artificial 5 knot radius at a distance proportional + # to the 34 knot radius for that quadrant + tenKnotRadiusList = [108.0] * pieSlices + + if 34.0 in rDict: + tenKnotRadList = [] + radList34 = rDict[34.0] + for r in radList34: + tenKnotRadList.append(r * radiiFactor) + + # insert the 5 knot radius at the beginning so is made first + rDict[5.0] = tenKnotRadList + wsList.insert(0, 5.0) + + insideRMWMask = self.empty(bool) + self._cycloneMask = self.empty(bool) + # for each rDict record and quadrant, make the grid one piece at a time + for i in range(len(wsList) - 1): + self.lastRadius = [None] * pieSlices + if wsList[i] not in rDict: + continue + radiusList = rDict[wsList[i]] + nextRadiusList = rDict[wsList[i + 1]] + + maxRadius = maxWindRadius # temp copy + for quad in range(len(radiusList)): + + maxRadius = maxWindRadius # temp copy + maxWind = f['maxWind'] # reset maxWind as we may fiddle with it + + # fetch the speeds and radii we'll need + outSpeed = float(wsList[i]) + inSpeed = float(wsList[i + 1]) + outRadius = float(radiusList[quad]) + inRadius = float(nextRadiusList[quad]) + + # Here's where the speeds and radii are adjusted based + # on the edited values but only if they have been edited + # and only if we're working on the maxWind. + if "editedMaxWinds" in f: + if maxWind == wsList[i] or maxWind == wsList[i+1]: + outSpeed, inSpeed, outRadius, inRadius, maxWind = \ + self.adjustMaxWind(outSpeed, inSpeed, outRadius, + inRadius, maxWind, maxWindList, + maxWindRadius, quad) + + # Some cases require we adjust the maxWindRadius + if outSpeed in [64.0, 50.0, 34.0] and outRadius <= maxWindRadius: + inRadius = outRadius * 0.9 + self.lastRadius[quad] = outRadius + elif inSpeed == 1.0 and self.lastRadius[quad] is not None: + outRadius = self.lastRadius[quad] * 0.9 + #print "Adjusting MaxWindRadius at:", inSpeed, "kts" + self.lastRadius[quad] = None + + # reset the speeds if they exceed the maxWind + if fcstLeadTime <= 72 and zeroRadList[quad] is not None: + if inSpeed >= zeroRadList[quad]: + inSpeed = validRadList[quad] + if outSpeed >= zeroRadList[quad]: + outSpeed = validRadList[quad] + + # set the center value to max fcst wind + if inSpeed == 1.0: + inSpeed = centerWindList[quad] + + # get the edit area for this quadrant + mask = circleEA.getQuadrant(quad + 1, outRadius * 1.852) + + # log10 and exp math functions are fussy about zero + if inSpeed == 0.0: + inSpeed = 0.1 + if outSpeed == 0.0: + outSpeed = 0.1 + if inRadius == 0.0: + inRadius = 0.1 + if outRadius == 0.0: + outRadius = 0.1 + # no wind speed can never exceed the maximum allowable wind speed + if inSpeed > maxWind: + inSpeed = maxWind + if outSpeed > maxWind: + outSpeed = maxWind + + # don't bother with trivial cases + if inRadius < 2.0 and outRadius < 2.0: + continue + if inRadius > outRadius: + continue + + if inSpeed == 0.0 or outSpeed == 0.0: + continue + # calculate the exponent so that we exactly fit the next radius + denom = log10(inRadius / outRadius) + if denom == 0: + exponent = 1.0 + else: + exponent = (log10(outSpeed) - log10(inSpeed)) / denom + + # make sure the exponent behaves itself + if exponent > 10.0: + exponent = 10.0 + # inside RMW gets a linear slope to largest of max wind forecasts + if inRadius <= 1.0: + dSdR = (outSpeed - inSpeed) / (outRadius - inRadius) + grid = where(mask, inSpeed + (dSdR * distanceGrid), grid) + insideRMWMask[mask] = True + else: # outside RMW + grid = where(mask, inSpeed * power((inRadius / distanceGrid), exponent), + grid) + if outSpeed >= 34.0 and inSpeed >= 34.0: + self._cycloneMask = logical_or(self._cycloneMask, mask) + + # Apply the NC State correction outside the RMW + if self._applyNCSCorrection: + corrGrid = self.makeCorrectionGrid(latGrid, lonGrid, center) +## self.createGrid("Fcst", "NCSCorr", "SCALAR", corrGrid, self._timeRange, +## precision=3, minAllowedValue=-1.0, maxAllowedValue=1.0) + + m = logical_not(insideRMWMask) + grid[m] *= (1 - corrGrid)[m] + + maxWind = f['maxWind'] # reset again before clipping + + dirGrid = self.makeDirectionGrid(latGrid, lonGrid, center[0], center[1]) + + + # clip values between zero and the maximum allowable wind speed + maxWind = self.getMaxAllowableWind(maxWind) + grid.clip(0.0, maxWind, grid) + # apply the wind reduction over land + fraction = 1.0 - (self.lessOverLand / 100.0) + grid = self.decreaseWindOverLand(grid, fraction, self.elevation, timeRange) + return (grid, dirGrid) + + def makeMaxWindGrid(self, interpFcstList, interval, latGrid, lonGrid, pieSlices, + radiiFactor): + +## if len(interpFcstList) == 0: +## return + + startTime = interpFcstList[0]["validTime"] + endTime = startTime + (123 * 3600) # 123 hours later + + timeRange = self.makeTimeRange(startTime, endTime) + + # Used getGrids to calculate the maximum wind grid. + # + # Fetch the max of the wind grids just generated as this is very fast. + maxWindGrid, maxDirGrid = self.getGrids("Fcst", "Wind", "SFC", timeRange, mode="Max") + + maxWindGrid = self.smoothGrid(maxWindGrid,3) + + self.createGrid("Fcst", "TCMMaxWindComposite", "SCALAR", maxWindGrid, timeRange, + precision=1, minAllowedValue=0.0, maxAllowedValue=200.0) + + # save the grid in the server + self.saveObject("TCMMaxWindGrid", maxWindGrid, "WindGrid") + + return + + def validateCycloneForecast(self, fcstList, baseTime): + + # Now check each forecast to make sure that we have a radius for any + # standard wind values less than the maxWind + + if len(fcstList) == 0: + return False + + windValues = [64, 50, 34] + for f in fcstList: + for value in windValues: + if value > f["maxWind"]: + continue + if value not in f["radii"]: + print(list(f["radii"].keys()), "is missing value:", value) + return False + + return True + + # Returns a dictionary that lists the min and max allowed wind for each hour + def makeWindDict(self, fcstList): + + windDict = {} + + + for f in fcstList: + windValues = list(f["radii"].keys()) + hour = (f["validTime"] - self.baseDecodedTime) / 3600 + maxWind = f["maxWind"] + minWind = 999999.0 + if len(list(f["radii"].keys())) == 0: + minWind = 0.0 + + # Grab the first (highest) forecast wind speed value + if len(windValues) > 0: + minWind = windValues[0] + else: + minWind = 0.0 + + windDict[hour] = (minWind, maxWind) + + return windDict + + # Pop up a GUI that will maxWind values for each quadrant and time + def launchMaxWindGUI(self, fcstList): + + windDict = self.makeWindDict(fcstList) + if AWIPS_ENVIRON == "AWIPS1": + eaMgr = self.eaMgr() + else: + eaMgr = None + + self._maxWindGUI = DefineMaxWindGUI.DefineMaxWindGUI(self._dbss, eaMgr) + + newMaxWinds = self._maxWindGUI.displayGUI(windDict) + + if newMaxWinds is not None: + + hourList = list(newMaxWinds.keys()) + hourList.sort() + + self._maxWindGUI.cancelCommand() + + return newMaxWinds + + # Make the NCState bais correction grid based on the forecast. + def makeCorrectionGrid(self, latGrid, lonGrid, center): + + + # structure to hold the polynomial coefficients + coeff = [[1.282e-011, -3.067e-008, 2.16e-005, -5.258e-003, 3.794e-001], + [3.768e-011, -4.729e-008, 2.097e-005, -3.904e-003, 2.722e-001], + [4.692e-011, -5.832e-008, 2.565e-005, -4.673e-003, 2.952e-001], + [3.869e-011, -4.486e-008, 1.84e-005, -3.331e-003, 2.738e-001]] + + # make the circle edit area and distance grid + pieSlices = 4 + circleEA = CircleEA(latGrid, lonGrid, center, pieSlices) + + dist = circleEA.getDistanceGrid() # dist in km + + corrGrid = self.empty() + + for quad in range(pieSlices): + + ea = circleEA.getQuadrant(quad + 1, 500.0) + grid = coeff[quad][0] * pow(dist, 4) + coeff[quad][1] * pow(dist, 3) + \ + coeff[quad][2] * pow(dist, 2) + coeff[quad][3] * dist + \ + coeff[quad][4] + + corrGrid = where(ea, grid, corrGrid) + + return corrGrid + + def execute(self, varDict, timeRange): + + RADII_FACTOR = 4.5 + + self.setToolType("numeric") + self.toolTimeRange = timeRange + + # define the default eye diameter for bulletins where they are missing + eyeStr = varDict["Eye Diameter:"] + self.dialogEyeDiameter = float(eyeStr) + maxwindswath = varDict["MaxWind Swath for \nTCWindThreat?"] + + Topo = self.getTopo() + + tcDuration = self.getTimeConstraintDuration("Wind") + tcHours = int(tcDuration / 3600) # durations are expressed in seconds + # set the time interpolation interval to the duration + interval = tcHours + + # get the product ID + productList1 = varDict["Product to\ndecode:"] + productList2 = varDict["Product to\n decode:"] + productList1 = productList1 + productList2 # concatenate + if len(productList1) != 1: + self.statusBarMsg("Please select one TCM bulletin only.", "S") + return None + + productID = productList1[0] + + # get the ID for this site + siteID = self.getSiteID() + + bgModelName = "Fcst" + self.day3Radius = varDict["34 knot radius at 3 days (NM):"] + self.day4Radius = varDict["34 knot radius at 4 days (NM):"] + self.day5Radius = varDict["34 knot radius at 5 days (NM):"] + + # grab all of the background grids now before we make any new grids + bgDict = self.getBackgroundGrids(bgModelName) + + # Radial slices hard-coded to 4. Changing this will divide the wind + # forecast into more radial pieces. Recommended alternative values: + # 12, 20, 36, 72. + pieSlices = int(varDict["Number of Pie Slices?"]) + + # define radii factor - may make this configurable + # Multiply 3-5 day radius by this factor to get the zero radius. + # Smaller values ramp the cyclone down to zero more quickly. + + + self.lessOverLand = int(varDict["Decrease Wind over Land by (%):"]) + self.lessOverLandGrid = varDict["Constant Land\nReduction (Slider Bar)\nor Wind Reduction\nFactor Grid?"] + self.elevation = Topo + rclDecoder = TCMDecoder() + tcmDecoder = TCMDecoder() + + msg = "" + + # Fetch the text product + if productID == "preTCM": + textProduct = self.getTextProductFromFile("/tmp/Wilma.txt") + decoder = TCMDecoder() + decoder.decodeTCMProduct(textProduct, self.dialogEyeDiameter) + fcstList = decoder.getFcstList() + baseTime = decoder.getBaseProductTime() + #elif productID == "WRKTCM": + # textProduct = self.getTextProductFromFile("/data/local/research/TPCWindProb/WRKTCM") + else: + # try fetching the RCL first. + rclProductID = "MIARCL" + productID[3:] + print("Attempting to Fetch rclProductID:", rclProductID) + rclTextProduct = self.getTextProductFromDB(rclProductID) + completeFcst = False + if len(rclTextProduct) < 5: + #msg = rclProductID + " not found. Using TCM to make cyclone." + # self.statusBarMsg(msg, "S") + rclBaseTime = 0 + else: + rclDecoder.decodeTCMProduct(rclTextProduct, self.dialogEyeDiameter) + rclFcstList = rclDecoder.getFcstList() + rclBaseTime = rclDecoder.getBaseProductTime() + completeFcst = self.validateCycloneForecast(rclFcstList, rclBaseTime) + + if productID[:3] == "PRE": + productID = "MIA" + productID + + tcmTextProduct = self.getTextProductFromDB(productID) + + if len(tcmTextProduct) < 5: + msg = productID + " could not be retrieved from the text database." + self.statusBarMsg(msg, "S") + return None # Just return if no TCM is found. Something's really wrong + else: + tcmDecoder.decodeTCMProduct(tcmTextProduct, self.dialogEyeDiameter) + tcmFcstList = tcmDecoder.getFcstList() + tcmBaseTime = tcmDecoder.getBaseProductTime() + + #print "TCM and RCL Base Times are: ", tcmBaseTime, rclBaseTime + if not completeFcst or rclBaseTime != tcmBaseTime: + msg = "Problem decoding " + rclProductID + " Used TCM to make cyclone.\n" + msg = msg + " Used GUI sliders for 3, 4, 5 day forecast." + #self.statusBarMsg(msg, "S") + fcstList = tcmFcstList + baseTime = tcmBaseTime + else: + msg = "RCL message looked good so used that for TCM." + fcstList = rclFcstList + baseTime = rclBaseTime + productID = rclProductID + + print("Decoded:", len(fcstList), " forecasts.") + + # Set the baseDecodedTime - validTime of first entry - 3 hours + if len(fcstList) > 0: + self.baseDecodedTime = fcstList[0]['validTime'] - 3 * 3600 + + if varDict["Define Asymmetrical \nMax Winds?"] == "Yes": + + newMaxWinds = self.launchMaxWindGUI(fcstList) + for i in range(len(fcstList)): + fcstHour = (fcstList[i]['validTime'] - baseTime) / 3600 + 3 + maxList = newMaxWinds[fcstHour] + fcstList[i]["editedMaxWinds"] = maxList + + fcstList = self.extrapolateRadii(fcstList, baseTime, RADII_FACTOR) + +## # See if the decoded fcst is close to the current time. This is needed +## # so the tool will work on archived data sets (testMode) +## testMode = False +## if abs(time.time() - self.baseDecodedTime) > 2 * 24 * 3600: # older than 2 days +## testMode = True + + # restrict grids to the selected time period if option is selected. + testMode = False + restrictAnswer = varDict["Make Grids over \nSelected Time Only:"] + if restrictAnswer == "Yes": + testMode = True + + # Turn off testMode if the selected timeRange is less than an hour in duration + if self.toolTimeRange.duration() < 3600: + testMode = False + + # interpolate the wind forecasts we got from the decoder + selectedStartTime = self.toolTimeRange.startTime().unixTime() + selectedEndTime = self.toolTimeRange.endTime().unixTime() + interpFcstList = [] + for i in range(len(fcstList) - 1): + newFcstList = self.interpolateWindFcst(fcstList[i], fcstList[i+1], + interval) + + # Make sure the fcst is within the selected time range or we're in testMode + for f in newFcstList: + if (testMode and (f['validTime'] >= selectedStartTime and \ + f['validTime'] < selectedEndTime)) or (not testMode): + interpFcstList.append(f) + + # append the very last forecast on to the end of the interpolated list + if len(fcstList) > 0: + if (testMode and (f['validTime'] >= selectedStartTime and \ + f['validTime'] < selectedEndTime)) or (not testMode): + interpFcstList.append(fcstList[-1]) + + if len(fcstList) == 1: + interpFcstList = fcstList + + if len(interpFcstList) == 0: + self.statusBarMsg("No cyclone forecasts found within the Selected TimeRange", + "S") + else: + # If the wind grids are more than 3 hours long, the first grid ends up being double + # duration. So, add an extra duplicate forecast at the beginning and reset + # the validTime + print("tcHours:", tcHours) + if tcHours > 3: + interpFcstList.insert(0, copy.deepcopy(interpFcstList[0])) + interpFcstList[0]["validTime"] = (int(interpFcstList[0]["validTime"] / tcDuration) \ + * tcDuration) + interpFcstList[1]["validTime"] = (int(interpFcstList[0]["validTime"] / tcDuration) \ + * tcDuration) + tcDuration + print("Adjusted time for first forecast") + print("Generating", len(interpFcstList), "wind grids") + + # get the lat, lon grids + latGrid, lonGrid = self.getLatLonGrids() + + self._applyNCSCorrection = False + if varDict["Reduce Radii by 15% or \n NC State Bias Correction"] == "Reduce by 15%": + # Reduce the extent of the wind radii per Mark De Maria's research + # Loop through each wind radius and modify in place + for f in interpFcstList: + for windValue in f["radii"]: + for i in range(len(f["radii"][windValue])): + f["radii"][windValue][i] = f["radii"][windValue][i] * 0.85 + elif varDict["Reduce Radii by 15% or \n NC State Bias Correction"] == "NC State Bias Correction": + self._applyNCSCorrection = True + + # make a grid for each interpolate forecast + gridCount = 0 + for f in interpFcstList: + + self._timeRange = timeRange + + validTime = int(f['validTime'] / 3600) * 3600 + bgGrid = self.getClosestWindGrid(bgModelName, bgDict, validTime) + startTime = validTime + endTime = validTime + (interval * 3600) + timeRange = self.makeTimeRange(startTime, endTime) + self._cycloneTimeRange = timeRange + + t1 = time.time() + windGrid = self.makeRankine(f, latGrid, lonGrid, pieSlices, RADII_FACTOR, timeRange) + print("Time to makeRankine:", time.time() - t1) + + magGrid, dirGrid = self.blendGrids(windGrid, bgGrid) + magGrid = self.smoothGrid(magGrid, 5) + dirGrid = self.smoothDirectionGrid(dirGrid, 5) + + name = "Wind" + self.createGrid("Fcst", name, "VECTOR", (magGrid, dirGrid), timeRange, + descriptiveName=None, timeConstraints=None, + precision=1, minAllowedValue=0.0, + maxAllowedValue=200.0) + + gridCount = gridCount + 1 + print("TCMWindTool:", productID, "- Generated", gridCount, \ + "out of", len(interpFcstList), "grids", \ + time.asctime(time.gmtime(timeRange.startTime().unixTime()))) + + # interpolate through forecast period to very high resolution and make + # a composite maxWind grid from those wind grids + if maxwindswath == "Yes": + t1 = time.time() + self.makeMaxWindGrid(interpFcstList, interval, latGrid, lonGrid, pieSlices, + RADII_FACTOR) + print(time.time() - t1, "seconds to generate Max wind composite.") + + if msg != "": + self.statusBarMsg(msg, "S") + + return None diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py index b72b5c68cd..843af3bc1d 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCStormSurgeThreat.py @@ -1,792 +1,792 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# StormSurgeThreat -# -# Author: Tom LeFebvre/Pablo Santos -# April 20, 2012 - To use gridded MSL TO NAVD and MSL to MLLW -# corrections and to get rid of Very Low. -# Last Modified: June 7, 2012 Shannon White - To fix the handling of time -# for A2 so it works for both real time and displaced real time -# Migrated TC Coastal Flood for AWIPS2. Updated 6/22/2012. S.O. -# March 11, 2014 to adapt to new PSURGE 2.0/PHISH and VDATUM Datasets in A1. PS -# May 21, 2014: for new PHISH but in AWIPS 2: PS/SW -# Aug 13, 2014: To rename SurgeHtPlustTide to InundationMax and incorporate InundationTiming. PS -# Sept 17, 2014: To finalize changes and clean up for 2015initial Baseline Check in. -# -# Sept 18, 2014: Added code to pull grids from NHC via ISC if PHISH not -# Available on time. Left inactive (commented out) for the moment until that can be fully tested later -# in 2014 or in 2015. -# LeFebvre/Santos, July 27, 2015: Expanded Manual options to include Replace and Add options. -# This allows sites to specify manually different threat levels across different edit areas and time ranges. -# See 2015HTIUserGuide for details. -# -# Feb 11, 2016 LeFebvre (16.2.1): Added code to create zero grids and manual grids when -# PSURGE not available. Added checks for current guidance for PHISH and ISC options. -# -# April 14, 2016: Lefebvre/Santos: Added multabledb to restore ISC option -# -# Last Modified: -# 6/20/2016 - Santos: Added code to fix issue of old grid not being deleted when running Manual/Add option. -# 7/15/2016 - Lefebvre/Santos: Added Code to improved Manual Options, numpy compatibility and future builds, -# common methods. Fixed Smoothing Algorithm. inundation grid zeroed out where MHHW <=0. -# 9/8/2016 - Santos: Updated copyISC method to better handle when grids missing in ISC db. -# VERSION 17.1.1 = The one checked in. -# 9/26/16 - LeFebvre - Removed commented out code to pass code review. -# 10/20/16 - Santos - Removed code that stops procedure from running when guidance for current -# advisory is not available and instead advises forecaster. -# 11/3/2016: Santos - Addressed Code Review Comments. -# 12/21/2016: Santos - Added option to adjust InundationMax from manually adjusted InundationTiming grid. -# Also when running with PHISH or PETSS option computes InundationMax from comp max of InundationTiming for consistency. Previously -# they were both retrieved indply from model source and with smoothing it would result in minor differences between -# InundationMax and InundationTiming. -# 01/08/2017: Modified BE CAREFUL line when alerting forecaster PSURGE Data is still from a previous cycle. -# 01/09/2017: Renamed UpdateInunMax in GUI for clarity. Also, introduced on Jan 2017 SWiT ability for procedure to force InundationMax that are > 1 and < 1.5 to 1.5. -# This is because TCV rounds to nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that -# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not -# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. -# 07/20/2017: Enabled PETSS option for 2018. PS -# 10/11/2017: LeFebvre - GFE: tool failed due to an old grid being present (DR 20309) -# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, -# improved UpdateInunMax option and made changes to makeInundationTiming methods to accomodate new TCs for -# the TPCSurgeProb and PETSS dbs. -# 03/20/2018 Check in Pablo's fix. -# 4/3/2018 - Additional fixes needed to enable Manual options to work out to 102 hours. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards - -MenuItems = ["Populate"] - -import TropicalUtility, LogStream -import SmartScript -import numpy as np -import TimeRange -import AbsTime -import time -import sys - -VariableList = [("DEFAULT: Typical. Should only be changed in coordination with NHC SS Unit", "", "label"), - ("Forecast Confidence? - (Applies to PHISH/PETSS Only)", "Typical (10% Exceedance; for most systems anytime within 48 hours)", -## "radio", ["Low (Prob-only; 10% Exceedance; for ill behaved systems)", - "radio", ["Typical (10% Exceedance; for most systems anytime within 48 hours)", - "Medium (20% Exceedance; for well-behaved systems within 12 hours of event)", - "High (30% Exceedance; for well-behaved systems within 6-12 hours of event)", - "Higher (40% Exceedance; for well-behaved systems within 6 hours of the event)", - "Highest (50% Exceedance; for well-behaved systems at time of the event)"]), - ("Grid Smoothing?", "Yes", "radio", ["Yes","No"]), - ("Make grids from \nPHISH, PETSS, ISC, or Manually?", "PHISH", "radio", ["PHISH", "PETSS", "ISC", "Manually Replace", "Manually Add", "UpdateInunMax (Edit Inundation Timing Grids)"]), - ("Manual Inundation settings: Time ranges below relative to advisory model cycle", "", "label"), - ("Inundation Height:", 1.0, "scale", [0.0, 3.0], 0.1), - ("Start Hour for Inundation Timing", 0, "scale", [0.0, 96.0], 6.0), - ("End Hour for Inundation Timing", 6, "scale", [0.0, 102.0], 6.0), - ] - -class Procedure (TropicalUtility.TropicalUtility): - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - # Compute a base time for this guidance - def baseGuidanceTime(self): - startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) - return startTime - - # Method to get the average topography for each grid point - def getAvgTopoGrid(self, topodb): - - siteID = self.getSiteID() -# print "********************\n TOPO IS: ", topodb - dbName = siteID + "_D2D_" + topodb - - weName = "avgTopo" -# timeRange = TimeRange.allTimes().toJavaObj() - trList = self.GM_getWEInventory(weName, dbName, "SFC") - - #print "NED Topo list is", trList - - if len(trList)== 0: - #print "CRAP!!!" - return - for tr in trList: -# print "My time is", tr - topoGrid = self.getGrids(dbName, weName, "SFC", tr, mode="First") - - # Convert topography from meters to feet - topoGrid /= 0.3048 - min = -16000 - max = 16000.0 - mask2 = (topoGrid > max) - topoGrid[topoGrid < min] = -80 - topoGrid[mask2] = self.getTopo()[mask2] - return topoGrid - - # Make a time range of x hours duration from the current time - def makeNewTimeRange(self, hours): - - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime) - endTime = startTime + (hours * 3600) - timeRange = TimeRange.TimeRange(startTime, endTime) - - return timeRange - - # Method to find all database versions for the specified model - def getModelIDList(self, matchStr): - - # Make a list of all available parameters - availParms = self.availableParms() - - # Initialize a list of the database identifiers we want to keep - modelList = [] - - # Look through every parameter, then check the database id - for pName, level, dbID in availParms: - modelId = dbID.modelIdentifier() - if matchStr in modelId: - if modelId not in modelList: - modelList.append(modelId) - - return modelList - - # Method to get the selected exceedance height data - def getExceedanceHeight(self, modelName, pctStr, level): - - ap = self.availableParms() - dbName = self.getSiteID() + "_D2D_" + modelName - - modelIDList = self.getModelIDList(modelName) - modelIDList.sort() - - if len(modelIDList) == 0: - return None - - surgeModel = modelIDList[-1] - - weName = "Surge" + pctStr + "Pct" - trList = self.GM_getWEInventory(weName, dbName, level) - - if len(trList) == 0: # No grids found for this database - return None - - baseTime = self.baseGuidanceTime() - - if baseTime > trList[0].startTime().unixTime(): - #modelCycle = AbsTime.AbsTime(self.baseGuidanceTime() - (6*3600)) - message = "BE CAREFUL: " + modelName + " IS STILL FROM A PREVIOUS ADVISORY/MODEL CYCLE" - self.statusBarMsg(message, "A") - #return None - - #print "Retrieving ", weName, " at ", level - # Make a new time range to span all current data - timeRange = self.GM_makeTimeRange(trList[0].startTime().unixTime(), - trList[-1].endTime().unixTime()) - - grid = self.getGrids(dbName, weName, level, timeRange, mode="Max") - -# for tr in trList: -# grid = self.getGrids(dbName, weName, level, tr, mode="Max") - - # Convert current surge values from meters to feet - mask = (grid <= -100) - grid /= 0.3048 - grid[mask] = -80.0 -# grid[mask] = np.where(mask,surgeVal*3.28, np.float32(-80.0)) - - return grid # convert meters to feet - - # Method to create the inundation timing grids - def makeInundationTiming(self, modelName, pctStr, level, smoothThreatGrid, mutableID, ssea, MHHWMask): - - dbName = self.getSiteID() + "_D2D_" + modelName - weName = "Surge" + pctStr + "Pctincr" - #print "Attempting to retrieve: ", weName, level - # get the StormSurgeProb inventory - surgeTRList = self.GM_getWEInventory(weName, dbName, level) - if len(surgeTRList) == 0: - self.statusBarMsg("No PHISH grid found.", "U") - return - - # Make timeRanges for all 13 grids. Start with the beginning of the first Phish grid - baseTime = int(surgeTRList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - endTime = int(surgeTRList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period - if endTime < surgeTRList[-1].endTime().unixTime(): - endTime += 6 * 3600 - trList = self.makeTimingTRs(baseTime, endTime) - - timingGrids = [] - - self.deleteAllGrids(["InundationTiming"]) - for tr in trList: - - if tr in surgeTRList: - phishGrid = self.getGrids(dbName, weName, level, tr) - else: - phishGrid = self.empty() - -# -# For consistency we need to add smoothing here too as we do in execute. -# - if phishGrid is None: - self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") - continue - - #print "SmoothThreatGrid:", smoothThreatGrid - if smoothThreatGrid == "Yes": -# mask = np.greater(phishGrid, 0.0) & ssea -# phishGrid = np.where(np.greater(phishGrid, 0.0), self.GM_smoothGrid(phishGrid, 3, mask), phishGrid) - mask = (phishGrid > 0.0) & ssea - smoothedPhish = self.GM_smoothGrid(phishGrid, 3, mask) - phishGrid[mask] = smoothedPhish[mask] - - # Convert units from meters to feet -# mask = (phishGrid <= -100) - grid = phishGrid / 0.3048 -# grid[mask] = -80.0 -# grid = np.where(phishGrid>-100, phishGrid*3.28, np.float32(-80.0)) # Convert units from meters to feet - - grid.clip(0.0, 100.0, grid) - grid[~ssea] = 0.0 - grid[MHHWMask] = 0.0 - timingGrids.append(grid) - self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) - - return trList,timingGrids - - def makeInundationMaxGrid(self, timingGrids, trList): - - itCube = np.array(timingGrids) - maxGrid = np.amax(itCube, axis=0) - - now = int(self._gmtime().unixTime() / 3600) * 3600 - maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) - - self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) - - return maxGrid - -#************************************************************************************** -# THis procedure was written to extract MSL to NAVD corrections from the VDATUMS D2D -# Database. It is not yet implemented because the VDATUMS database has not been -# finalized. - - def deleteAllGrids(self, weList): - - for weName in weList: - trList = self.GM_getWEInventory(weName) - if len(trList) == 0: - continue - start = trList[0].startTime().unixTime() - end = trList[-1].endTime().unixTime() - tr = self.GM_makeTimeRange(start, end) - - self.deleteCmd([weName], tr) - - return - - def getVDATUM(self, weName, limit): - siteID = self.getSiteID() - dbName = siteID + "_D2D_VDATUMS" - - grid = self.getGrids(dbName, weName, "SFC", TimeRange.allTimes(), - mode="First") - - if grid is None: - msgStr = weName + " does not exist in the VDATUMS model. " - self.statusBarMsg(msgStr, "S") - - mask = (grid <= limit) - grid /= 0.3048 - grid[mask] = -80.0 - - # Converted from meters to feet - return grid - -# This procedure was written to extract MSL to NAVD88 corrections from the VDATUMS D2D -# Database. - - def getMSLtoNAVD(self): - return self.getVDATUM("MSLtoNAVD88", -0.40) - - -# This procedure was written to extract MSL to MLLW corrections from the VDATUMS D2D -# Database. - - def getMSLtoMLLW(self): - return self.getVDATUM("MSLtoMLLW", 0.0) - -# This procedure was written to extract MSL to MHHW corrections from the VDATUMS D2D -# Database. - - def getMSLtoMHHW(self): - return self.getVDATUM("MSLtoMHHW", -3.09) - -# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D -# Database. - def getNAVDtoMLLW(self): - return self.getVDATUM("NAVD88toMLLW", -2.20) - -# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D -# Database. - - def getNAVDtoMHHW(self): - return self.getVDATUM("NAVD88toMHHW", -3.40) - - # Copies the specified weather elements in elementList into the Fcst database. - def copyISCGridstoFcst(self, elementList, mutableID): - - # Initialize all the grids we plan to return - - surgePctGrid = None - surgePctGridMSL = None - surgePctGridMLLW = None - surgePctGridMHHW = None - surgePctGridNAVD = None - - baseTime = self.baseGuidanceTime() - - # Remove all the grids first before replacing them later - - self.deleteCmd(elementList, TimeRange.allTimes()) - - # Ensure we're not fetching older ISC grids to avoid the ISC purge bug by - # fetching ISC grids within a specific window. - allTimes = TimeRange.allTimes() - iscStart = AbsTime.AbsTime(baseTime - (10 * 3600)) # 10 hours before the baseTime - iscEnd = allTimes.endTime() # Latest time possible - ISCTRWindow = TimeRange.TimeRange(iscStart, iscEnd) - -# Amended To distinguish when inundation grids are available but not datum ones. - for weName in elementList: - #print "Processing ISC ", weName - GridsCheck = True - iscWeName = weName + "nc" - # get the inventory for the ISC grids - - try: - trList = self.GM_getWEInventory(iscWeName, "ISC", "SFC", ISCTRWindow) - except: - GridsCheck = False - - if len(trList) == 0: - GridsCheck = False - - if (weName == "InundationMax" or weName == "InundationTiming") and not GridsCheck: - self.statusBarMsg("No inundation grids found in ISC database for " + iscWeName + ". Stopping. Revert Forecast db.", "S") - return None, None, None, None, None - - if not GridsCheck: - self.statusBarMsg("No datum grids in ISC database for " + iscWeName + ". Proceeding without it.", "S") - - # Make sure that the ISC grids are current - if GridsCheck: - if baseTime > trList[0].startTime().unixTime(): - if weName == "InundationMax" or weName == "InundationTiming": - self.statusBarMsg("ISC grids for inundation element " + iscWeName + " are not current. They correspond to a previous cycle. Aborting. Revert Forecast db.", "S") - return None, None, None, None, None - else: - self.statusBarMsg("ISC grids for datum element " + iscWeName + " are not current. They correspond to a previous cycle. Proceeding without it.", "S") - GridsCheck = False - - for tr in trList: - grid = self.getGrids("ISC", iscWeName, "SFC", tr) - if iscWeName == "InundationMaxnc" or iscWeName == "InundationTimingnc": - grid.clip(0.0, 100.0, grid) - else: - grid.clip(-30.0, 100.0, grid) - - if iscWeName == "InundationTimingnc": - self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) - elif iscWeName == "InundationMaxnc": - surgePctGrid = grid - self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) - elif iscWeName == "SurgeHtPlusTideMSLnc" and GridsCheck: - surgePctGridMSL = grid - elif iscWeName == "SurgeHtPlusTideMLLWnc" and GridsCheck: - surgePctGridMLLW = grid - elif iscWeName == "SurgeHtPlusTideMHHWnc" and GridsCheck: - surgePctGridMHHW = grid - elif iscWeName == "SurgeHtPlusTideNAVDnc" and GridsCheck: - surgePctGridNAVD = grid - - return surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD - - # Make a list of timeRanges that will be used to make InundationTiming grids - def makeTimingTRs(self, baseTime, endTime): - # Make the inundation timing grids - trList = [] - start = baseTime - end = baseTime + 6 * 3600 - while end <= endTime: - tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) - trList.append(tr) - start = end - end += 6 * 3600 - - return trList - - def getTimingGrids(self): - - baseTime = self.baseGuidanceTime() - endTime = baseTime + 102 * 3600 - gridList= [] - trList = self.makeTimingTRs(baseTime, endTime) - - for tr in trList: - timingGrid = self.empty() - gridList.append(timingGrid) - - return trList, gridList - - def execute(self, varDict, editArea, timeRange): - - t0 = time.time() - - self._timeRange = timeRange - - mutableID = self.mutableID() - - # List of elements - # See if we should copy from ISC. If so, do the copy and exit - smoothThreatGrid = varDict["Grid Smoothing?"] - - makeOption = varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] - topodb = "NED" - - ssea = self.encodeEditArea("StormSurgeWW_EditArea") - - Topo = self.getAvgTopoGrid(topodb) - - confidenceStr = varDict["Forecast Confidence? - (Applies to PHISH/PETSS Only)"] - - # extract the percent value from this string - pctPos = confidenceStr.find("%") - pctStr = confidenceStr[pctPos - 2:pctPos] - - threatWEName = "StormSurgeThreat" - - #print "pctStr is: ", pctStr - surgePctGrid = None - surgePctGridMSL = None - surgePctGridMLLW = None - surgePctGridNHHW = None - surgePctGridNAVD = None - - if makeOption == "PHISH" or makeOption == "PETSS": - - # Now get the psurge - if makeOption == "PHISH": - modelName = "TPCSurgeProb" - else: - modelName = "PETSS" - surgePctGrid = self.getExceedanceHeight(modelName, pctStr, "FHAG0") - if surgePctGrid is None: - message = "No inundation data found for " + modelName - self.statusBarMsg(message, "S") - return - - phishMask = ~ssea - surgePctGrid[phishMask] = 0.0 - surgePctGridNAVD = self.getExceedanceHeight(modelName, pctStr, "SFC") - if surgePctGridNAVD is None: - message = "No Surge plus Tide NAVD data found for " + modelName - self.statusBarMsg(message, "S") - return - - surgePctGridNAVD[phishMask] = -80.0 - if surgePctGrid is None or surgePctGridNAVD is None: - return - - # - # The following lines are the gridded vdatum corrections. - # - msltonavd = self.getMSLtoNAVD() - msltomllw = self.getMSLtoMLLW() - msltomhhw = self.getMSLtoMHHW() - navdtomllw = self.getNAVDtoMLLW() - navdtomhhw = self.getNAVDtoMHHW() - - # Apply 3x3 smooth within the surge zone - # for values greater than 1 as to not underplay areas adjacent to zero value pixels. - # If you apply a smoother, for consistency among storm surge plus tide and derived - # grids, it must be done here. - if smoothThreatGrid == "Yes": - #mask = np.greater(surgePctGrid, 0.0) & ssea - #surgePctGrid = np.where(np.greater(surgePctGrid, 0.0), self.GM_smoothGrid(surgePctGrid,3, mask), surgePctGrid) - -# mask = np.greater(surgePctGridNAVD, -10.0) & ssea -# surgePctGridNAVD = np.where(np.greater(surgePctGridNAVD, -10.0), self.GM_smoothGrid(surgePctGridNAVD,3, mask), surgePctGridNAVD) - - mask = (surgePctGridNAVD > -10.0) & ssea - surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, mask) - -# surgePctGridMSL= np.where(mask1, surgePctGridNAVD - msltonavd, np.float32(-80.0)) # MSL Grid - navdMask = (surgePctGridNAVD > -80.0) - mask = (msltonavd > -80.0) & navdMask & ssea - - # MSL Grid - surgePctGridMSL = surgePctGridNAVD - msltonavd - surgePctGridMSL[~mask] = -80.0 - -# surgePctGridMLLW = np.where(np.greater(navdtomllw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ -# surgePctGridNAVD + navdtomllw, np.float32(-80.0)) # MLLW Grid - - # MLLW Grid - mask = (navdtomllw > -80.0) & navdMask - surgePctGridMLLW = surgePctGridNAVD + navdtomllw - surgePctGridMLLW[~mask] = -80.0 - -# surgePctGridMHHW = np.where(np.greater(navdtomhhw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ -# surgePctGridNAVD + navdtomhhw, np.float32(-80.0)) # MHHW Grid - # MHHW Grid - mask = (navdtomhhw > -80.0) & navdMask - surgePctGridMHHW = surgePctGridNAVD + navdtomhhw - surgePctGridMHHW[~mask] = -80.0 - -# surgeDiffMLLWMHHW = np.where(np.greater(surgePctGridMLLW,-80.0) & np.greater(surgePctGridMHHW, -80.0), \ -# surgePctGridMLLW-surgePctGridMHHW, np.float32(-80.0)) # Diff Grid Between MLLW and MHHW - - # Diff Grid Between MLLW and MHHW (i.e tidal range) - mask = (surgePctGridMLLW > -80.0) & (surgePctGridMHHW > -80.0) - surgeDiffMLLWMHHW = surgePctGridMLLW - surgePctGridMHHW - surgeDiffMLLWMHHW[~mask] = -80.0 - - # Mask - MHHWMask = surgePctGridMHHW <= 0.0 - - #surgePctGrid[MHHWMask] = 0.0 - - trList,timingGrids = self.makeInundationTiming(modelName, pctStr, "FHAG0", smoothThreatGrid, mutableID, ssea, MHHWMask) - #surgePctGrid and InundationMax recomputed from InundationTiming sequence for consistency - surgePctGrid = self.makeInundationMaxGrid(timingGrids, trList) - - elif makeOption == "ISC": - - elementList = ["InundationMax","InundationTiming", "SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] - surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD = self.copyISCGridstoFcst(elementList, mutableID) - # if you look in CopyISC method if either InundationMax or InundationTiming is missing the procedure stops all together and notifies forecaster. - if surgePctGrid is None: - return - - elif makeOption == "Manually Replace" or makeOption == "Manually Add": - - inundationHeight = float(varDict["Inundation Height:"]) - inunStartHour = float(varDict["Start Hour for Inundation Timing"]) - inunEndHour = float(varDict["End Hour for Inundation Timing"]) - - selectedMask = self.encodeEditArea(editArea) - if not selectedMask.any(): - self.statusBarMsg("Please define an area over which to assign the inundation values.", "S") - return - - modifyMask = selectedMask & ssea - if not modifyMask.any(): - self.statusBarMsg("Please define an area that intersects the StormSurgeEditArea to assign the inundation values.", "S") - return # Calculate the intersection of the SSEditArea and selected editAre - - if inunStartHour >= inunEndHour: - self.statusBarMsg("Please define the end hour after the start hour.", "S") - return - - surgePctGrid = self.empty() - - # Fetch the old grids if we're adding - if varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] == "Manually Add": - imTRList = self.GM_getWEInventory("InundationMax", mutableID, "SFC") - if len(imTRList) > 0: - imTR = imTRList[0] - surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) - - surgePctGrid[modifyMask] = inundationHeight - - # Make the timing grids - baseTime = self.baseGuidanceTime() - if makeOption == "Manually Replace": # Make new grids and replace all IT grids - trList, timingGrids = self.getTimingGrids() - - for i in range(len(trList)): - # only modify grids in the specified time range - start = trList[i].startTime().unixTime() - end = trList[i].endTime().unixTime() - - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - timingGrids[i] = surgePctGrid # populate only where needed - - timeRange = TimeRange.allTimes() - self.deleteCmd(["InundationTiming"], timeRange) - for i in range(len(trList)): - timingGrids[i].clip(0.0, 100.0, timingGrids[i]) - self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) - - elif makeOption == "Manually Add": # Just replace the selected grid points over the selected time - # Fetch the existing IT grids - itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all.", "S") - return - #Fetch the grids - itGrids = [] - trList = [] - for tr in itTRList: - start = tr.startTime().unixTime() - end = tr.endTime().unixTime() - #print "Checking tr:", tr - if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: - grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) - itGrids.append(grid) - trList.append(tr) - - if len(itGrids) == 0: - self.statusBarMsg("No InundationTiming grids found for selected start and end hours.", "S") - return - - # Surgically insert grid values into the InundationTiming grids over the selected hours - for i in range(len(trList)): - itGrids[i][modifyMask] = inundationHeight # poke in the values - - self.createGrid(mutableID, "InundationTiming", "SCALAR", itGrids[i], trList[i]) - - timingGrids = [] - for tr in itTRList: - grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) - grid[~ssea] = 0.0 - timingGrids.append(grid) - - surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) - - elif makeOption == "UpdateInunMax (Edit Inundation Timing Grids)": - - self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) - - itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") - - if len(itTRList) == 0: - self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") - return - - timingGrids = [] - - # Fetch all the timing grids - for tr in itTRList: - grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) - grid[~ssea] = 0.0 - timingGrids.append(grid) - self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) - self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) - - # Finally create the surge grid which will be saved as the InundationMax - - surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) - - #return - # Done with manual options - -# Next line introduced on Jan 2017 SWiT. It forces points in InundationMax that are > 1 and < 1.5 to 1.5. This is because TCV rounds to -# nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that -# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not -# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. - - surgePctGrid[(surgePctGrid > 1.0) & (surgePctGrid < 1.5)] = 1.5 - - threatKeys = self.getDiscreteKeys(threatWEName) - - # Define a mapping between UI names and key names - # keyMap = {"Very Low" :"Very Low", - keyMap = {"Elevated" : "Elevated", - "Moderate" : "Mod", - "High" : "High", - "Extreme" : "Extreme", - } - - threshDict = {} # a dict to store thresholds from the UI - - for key in keyMap.keys(): - - if keyMap[key] == "Extreme": - threshDict[keyMap[key]] = 9 - elif keyMap[key] == "High": - threshDict[keyMap[key]] = 6 - elif keyMap[key] == "Mod": - threshDict[keyMap[key]] = 3 - elif keyMap[key] == "Elevated": - threshDict[keyMap[key]] = 1 - - #print "threshDict[keyMap[key]]: ", keyMap[key], threshDict[keyMap[key]] - - # make a timeRange - 6 hours long - elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", - "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] - - # make a new timeRange that will be used to create new grids - timeRange = self.makeNewTimeRange(6) - - # Remove old guidance grids and replace them with the new grids - # Delete the old grids first - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime - 48*3600) - endTime = startTime + 240*3600 - deleteTimeRange = TimeRange.TimeRange(startTime, endTime) - - for elem in elementList: - self.deleteCmd([elem], deleteTimeRange) - - if makeOption != "Manually Replace" and makeOption != "Manually Add" and makeOption != "UpdateInunMax (Edit Inundation Timing Grids)": - if surgePctGridMSL is not None: - surgePctGridMSL.clip(-30.0, 100.0, surgePctGridMSL) - self.createGrid(mutableID, "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL, - timeRange, precision=2) - if surgePctGridMLLW is not None: - surgePctGridMLLW.clip(-30.0, 100.0, surgePctGridMLLW) - self.createGrid(mutableID, "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW, - timeRange, precision=2) - if surgePctGridNAVD is not None: - surgePctGridNAVD.clip(-30.0, 100.0, surgePctGridNAVD) - self.createGrid(mutableID, "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD, - timeRange, precision=2) - if surgePctGridMHHW is not None: - surgePctGridMHHW.clip(-30.0, 100.0, surgePctGridMHHW) - self.createGrid(mutableID, "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW, - timeRange, precision=2) - - # Make the grid. Start with the existing grid if we have one otherwise zeros - coastalThreat = self.empty(np.int8) - surgePctGrid.clip(0.0, 100.0, surgePctGrid) - self.createGrid(mutableID, "InundationMax", "SCALAR", surgePctGrid, timeRange, precision=2) - - # Yet another list to define the order in which we set grid values - # This order must be ranked lowest to highest - #keyList = ["Very Low", "Elevated", "Mod", "High", "Extreme"] - keyList = ["Elevated", "Mod", "High", "Extreme"] - - # Set the grid values based on the surgePctGrid grid and thresholds - for key in keyList: - #print "THRESHOLD FOR KEY IS: ", key, threshDict[key] - thresh = threshDict[key] - keyIndex = self.getIndex(key, threatKeys) - #coastalMask = ssea & np.greater_equal(surgePctGrid, thresh) - coastalMask = ssea & np.greater(surgePctGrid, thresh) - coastalThreat[coastalMask] = keyIndex - -# create the CoastalThreat Grid - self.createGrid(mutableID, threatWEName, "DISCRETE", - (coastalThreat, threatKeys), timeRange, - discreteKeys=threatKeys, - discreteOverlap=0, - discreteAuxDataLength=2, - defaultColorTable="Hazards") - - t1 = time.time() - LogStream.logEvent("Finished TCStormSurgeThreat in %f.4 ms" % ((t1-t0) * 1000)) - - return - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# StormSurgeThreat +# +# Author: Tom LeFebvre/Pablo Santos +# April 20, 2012 - To use gridded MSL TO NAVD and MSL to MLLW +# corrections and to get rid of Very Low. +# Last Modified: June 7, 2012 Shannon White - To fix the handling of time +# for A2 so it works for both real time and displaced real time +# Migrated TC Coastal Flood for AWIPS2. Updated 6/22/2012. S.O. +# March 11, 2014 to adapt to new PSURGE 2.0/PHISH and VDATUM Datasets in A1. PS +# May 21, 2014: for new PHISH but in AWIPS 2: PS/SW +# Aug 13, 2014: To rename SurgeHtPlustTide to InundationMax and incorporate InundationTiming. PS +# Sept 17, 2014: To finalize changes and clean up for 2015initial Baseline Check in. +# +# Sept 18, 2014: Added code to pull grids from NHC via ISC if PHISH not +# Available on time. Left inactive (commented out) for the moment until that can be fully tested later +# in 2014 or in 2015. +# LeFebvre/Santos, July 27, 2015: Expanded Manual options to include Replace and Add options. +# This allows sites to specify manually different threat levels across different edit areas and time ranges. +# See 2015HTIUserGuide for details. +# +# Feb 11, 2016 LeFebvre (16.2.1): Added code to create zero grids and manual grids when +# PSURGE not available. Added checks for current guidance for PHISH and ISC options. +# +# April 14, 2016: Lefebvre/Santos: Added multabledb to restore ISC option +# +# Last Modified: +# 6/20/2016 - Santos: Added code to fix issue of old grid not being deleted when running Manual/Add option. +# 7/15/2016 - Lefebvre/Santos: Added Code to improved Manual Options, numpy compatibility and future builds, +# common methods. Fixed Smoothing Algorithm. inundation grid zeroed out where MHHW <=0. +# 9/8/2016 - Santos: Updated copyISC method to better handle when grids missing in ISC db. +# VERSION 17.1.1 = The one checked in. +# 9/26/16 - LeFebvre - Removed commented out code to pass code review. +# 10/20/16 - Santos - Removed code that stops procedure from running when guidance for current +# advisory is not available and instead advises forecaster. +# 11/3/2016: Santos - Addressed Code Review Comments. +# 12/21/2016: Santos - Added option to adjust InundationMax from manually adjusted InundationTiming grid. +# Also when running with PHISH or PETSS option computes InundationMax from comp max of InundationTiming for consistency. Previously +# they were both retrieved indply from model source and with smoothing it would result in minor differences between +# InundationMax and InundationTiming. +# 01/08/2017: Modified BE CAREFUL line when alerting forecaster PSURGE Data is still from a previous cycle. +# 01/09/2017: Renamed UpdateInunMax in GUI for clarity. Also, introduced on Jan 2017 SWiT ability for procedure to force InundationMax that are > 1 and < 1.5 to 1.5. +# This is because TCV rounds to nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that +# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not +# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. +# 07/20/2017: Enabled PETSS option for 2018. PS +# 10/11/2017: LeFebvre - GFE: tool failed due to an old grid being present (DR 20309) +# 11/15/2017: Tweaked during SWiT to better handle extended PSurge/PETTS Guidance out to 102 hours, +# improved UpdateInunMax option and made changes to makeInundationTiming methods to accomodate new TCs for +# the TPCSurgeProb and PETSS dbs. +# 03/20/2018 Check in Pablo's fix. +# 4/3/2018 - Additional fixes needed to enable Manual options to work out to 102 hours. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards + +MenuItems = ["Populate"] + +import TropicalUtility, LogStream +import SmartScript +import numpy as np +import TimeRange +import AbsTime +import time +import sys + +VariableList = [("DEFAULT: Typical. Should only be changed in coordination with NHC SS Unit", "", "label"), + ("Forecast Confidence? - (Applies to PHISH/PETSS Only)", "Typical (10% Exceedance; for most systems anytime within 48 hours)", +## "radio", ["Low (Prob-only; 10% Exceedance; for ill behaved systems)", + "radio", ["Typical (10% Exceedance; for most systems anytime within 48 hours)", + "Medium (20% Exceedance; for well-behaved systems within 12 hours of event)", + "High (30% Exceedance; for well-behaved systems within 6-12 hours of event)", + "Higher (40% Exceedance; for well-behaved systems within 6 hours of the event)", + "Highest (50% Exceedance; for well-behaved systems at time of the event)"]), + ("Grid Smoothing?", "Yes", "radio", ["Yes","No"]), + ("Make grids from \nPHISH, PETSS, ISC, or Manually?", "PHISH", "radio", ["PHISH", "PETSS", "ISC", "Manually Replace", "Manually Add", "UpdateInunMax (Edit Inundation Timing Grids)"]), + ("Manual Inundation settings: Time ranges below relative to advisory model cycle", "", "label"), + ("Inundation Height:", 1.0, "scale", [0.0, 3.0], 0.1), + ("Start Hour for Inundation Timing", 0, "scale", [0.0, 96.0], 6.0), + ("End Hour for Inundation Timing", 6, "scale", [0.0, 102.0], 6.0), + ] + +class Procedure (TropicalUtility.TropicalUtility): + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + # Compute a base time for this guidance + def baseGuidanceTime(self): + startTime = int((self._gmtime().unixTime() - (2 * 3600)) / (6 * 3600)) * (6 * 3600) + return startTime + + # Method to get the average topography for each grid point + def getAvgTopoGrid(self, topodb): + + siteID = self.getSiteID() +# print "********************\n TOPO IS: ", topodb + dbName = siteID + "_D2D_" + topodb + + weName = "avgTopo" +# timeRange = TimeRange.allTimes().toJavaObj() + trList = self.GM_getWEInventory(weName, dbName, "SFC") + + #print "NED Topo list is", trList + + if len(trList)== 0: + #print "CRAP!!!" + return + for tr in trList: +# print "My time is", tr + topoGrid = self.getGrids(dbName, weName, "SFC", tr, mode="First") + + # Convert topography from meters to feet + topoGrid /= 0.3048 + min = -16000 + max = 16000.0 + mask2 = (topoGrid > max) + topoGrid[topoGrid < min] = -80 + topoGrid[mask2] = self.getTopo()[mask2] + return topoGrid + + # Make a time range of x hours duration from the current time + def makeNewTimeRange(self, hours): + + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime) + endTime = startTime + (hours * 3600) + timeRange = TimeRange.TimeRange(startTime, endTime) + + return timeRange + + # Method to find all database versions for the specified model + def getModelIDList(self, matchStr): + + # Make a list of all available parameters + availParms = self.availableParms() + + # Initialize a list of the database identifiers we want to keep + modelList = [] + + # Look through every parameter, then check the database id + for pName, level, dbID in availParms: + modelId = dbID.modelIdentifier() + if matchStr in modelId: + if modelId not in modelList: + modelList.append(modelId) + + return modelList + + # Method to get the selected exceedance height data + def getExceedanceHeight(self, modelName, pctStr, level): + + ap = self.availableParms() + dbName = self.getSiteID() + "_D2D_" + modelName + + modelIDList = self.getModelIDList(modelName) + modelIDList.sort() + + if len(modelIDList) == 0: + return None + + surgeModel = modelIDList[-1] + + weName = "Surge" + pctStr + "Pct" + trList = self.GM_getWEInventory(weName, dbName, level) + + if len(trList) == 0: # No grids found for this database + return None + + baseTime = self.baseGuidanceTime() + + if baseTime > trList[0].startTime().unixTime(): + #modelCycle = AbsTime.AbsTime(self.baseGuidanceTime() - (6*3600)) + message = "BE CAREFUL: " + modelName + " IS STILL FROM A PREVIOUS ADVISORY/MODEL CYCLE" + self.statusBarMsg(message, "A") + #return None + + #print "Retrieving ", weName, " at ", level + # Make a new time range to span all current data + timeRange = self.GM_makeTimeRange(trList[0].startTime().unixTime(), + trList[-1].endTime().unixTime()) + + grid = self.getGrids(dbName, weName, level, timeRange, mode="Max") + +# for tr in trList: +# grid = self.getGrids(dbName, weName, level, tr, mode="Max") + + # Convert current surge values from meters to feet + mask = (grid <= -100) + grid /= 0.3048 + grid[mask] = -80.0 +# grid[mask] = np.where(mask,surgeVal*3.28, np.float32(-80.0)) + + return grid # convert meters to feet + + # Method to create the inundation timing grids + def makeInundationTiming(self, modelName, pctStr, level, smoothThreatGrid, mutableID, ssea, MHHWMask): + + dbName = self.getSiteID() + "_D2D_" + modelName + weName = "Surge" + pctStr + "Pctincr" + #print "Attempting to retrieve: ", weName, level + # get the StormSurgeProb inventory + surgeTRList = self.GM_getWEInventory(weName, dbName, level) + if len(surgeTRList) == 0: + self.statusBarMsg("No PHISH grid found.", "U") + return + + # Make timeRanges for all 13 grids. Start with the beginning of the first Phish grid + baseTime = int(surgeTRList[0].startTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + endTime = int(surgeTRList[-1].endTime().unixTime() / (6 * 3600)) * (6 * 3600) #snap to 6 hour period + if endTime < surgeTRList[-1].endTime().unixTime(): + endTime += 6 * 3600 + trList = self.makeTimingTRs(baseTime, endTime) + + timingGrids = [] + + self.deleteAllGrids(["InundationTiming"]) + for tr in trList: + + if tr in surgeTRList: + phishGrid = self.getGrids(dbName, weName, level, tr) + else: + phishGrid = self.empty() + +# +# For consistency we need to add smoothing here too as we do in execute. +# + if phishGrid is None: + self.statusBarMsg("No PHISH grid available for:" + repr(tr), "S") + continue + + #print "SmoothThreatGrid:", smoothThreatGrid + if smoothThreatGrid == "Yes": +# mask = np.greater(phishGrid, 0.0) & ssea +# phishGrid = np.where(np.greater(phishGrid, 0.0), self.GM_smoothGrid(phishGrid, 3, mask), phishGrid) + mask = (phishGrid > 0.0) & ssea + smoothedPhish = self.GM_smoothGrid(phishGrid, 3, mask) + phishGrid[mask] = smoothedPhish[mask] + + # Convert units from meters to feet +# mask = (phishGrid <= -100) + grid = phishGrid / 0.3048 +# grid[mask] = -80.0 +# grid = np.where(phishGrid>-100, phishGrid*3.28, np.float32(-80.0)) # Convert units from meters to feet + + grid.clip(0.0, 100.0, grid) + grid[~ssea] = 0.0 + grid[MHHWMask] = 0.0 + timingGrids.append(grid) + self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) + + return trList,timingGrids + + def makeInundationMaxGrid(self, timingGrids, trList): + + itCube = np.array(timingGrids) + maxGrid = np.amax(itCube, axis=0) + + now = int(self._gmtime().unixTime() / 3600) * 3600 + maxTimeRange = self.GM_makeTimeRange(now, now + 48 * 3600) + + self.createGrid(self.mutableID(), "InundationMax", "SCALAR", maxGrid, maxTimeRange) + + return maxGrid + +#************************************************************************************** +# THis procedure was written to extract MSL to NAVD corrections from the VDATUMS D2D +# Database. It is not yet implemented because the VDATUMS database has not been +# finalized. + + def deleteAllGrids(self, weList): + + for weName in weList: + trList = self.GM_getWEInventory(weName) + if len(trList) == 0: + continue + start = trList[0].startTime().unixTime() + end = trList[-1].endTime().unixTime() + tr = self.GM_makeTimeRange(start, end) + + self.deleteCmd([weName], tr) + + return + + def getVDATUM(self, weName, limit): + siteID = self.getSiteID() + dbName = siteID + "_D2D_VDATUMS" + + grid = self.getGrids(dbName, weName, "SFC", TimeRange.allTimes(), + mode="First") + + if grid is None: + msgStr = weName + " does not exist in the VDATUMS model. " + self.statusBarMsg(msgStr, "S") + + mask = (grid <= limit) + grid /= 0.3048 + grid[mask] = -80.0 + + # Converted from meters to feet + return grid + +# This procedure was written to extract MSL to NAVD88 corrections from the VDATUMS D2D +# Database. + + def getMSLtoNAVD(self): + return self.getVDATUM("MSLtoNAVD88", -0.40) + + +# This procedure was written to extract MSL to MLLW corrections from the VDATUMS D2D +# Database. + + def getMSLtoMLLW(self): + return self.getVDATUM("MSLtoMLLW", 0.0) + +# This procedure was written to extract MSL to MHHW corrections from the VDATUMS D2D +# Database. + + def getMSLtoMHHW(self): + return self.getVDATUM("MSLtoMHHW", -3.09) + +# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D +# Database. + def getNAVDtoMLLW(self): + return self.getVDATUM("NAVD88toMLLW", -2.20) + +# This procedure was written to extract NAVD88 to MLLW corrections from the VDATUMS D2D +# Database. + + def getNAVDtoMHHW(self): + return self.getVDATUM("NAVD88toMHHW", -3.40) + + # Copies the specified weather elements in elementList into the Fcst database. + def copyISCGridstoFcst(self, elementList, mutableID): + + # Initialize all the grids we plan to return + + surgePctGrid = None + surgePctGridMSL = None + surgePctGridMLLW = None + surgePctGridMHHW = None + surgePctGridNAVD = None + + baseTime = self.baseGuidanceTime() + + # Remove all the grids first before replacing them later + + self.deleteCmd(elementList, TimeRange.allTimes()) + + # Ensure we're not fetching older ISC grids to avoid the ISC purge bug by + # fetching ISC grids within a specific window. + allTimes = TimeRange.allTimes() + iscStart = AbsTime.AbsTime(baseTime - (10 * 3600)) # 10 hours before the baseTime + iscEnd = allTimes.endTime() # Latest time possible + ISCTRWindow = TimeRange.TimeRange(iscStart, iscEnd) + +# Amended To distinguish when inundation grids are available but not datum ones. + for weName in elementList: + #print "Processing ISC ", weName + GridsCheck = True + iscWeName = weName + "nc" + # get the inventory for the ISC grids + + try: + trList = self.GM_getWEInventory(iscWeName, "ISC", "SFC", ISCTRWindow) + except: + GridsCheck = False + + if len(trList) == 0: + GridsCheck = False + + if (weName == "InundationMax" or weName == "InundationTiming") and not GridsCheck: + self.statusBarMsg("No inundation grids found in ISC database for " + iscWeName + ". Stopping. Revert Forecast db.", "S") + return None, None, None, None, None + + if not GridsCheck: + self.statusBarMsg("No datum grids in ISC database for " + iscWeName + ". Proceeding without it.", "S") + + # Make sure that the ISC grids are current + if GridsCheck: + if baseTime > trList[0].startTime().unixTime(): + if weName == "InundationMax" or weName == "InundationTiming": + self.statusBarMsg("ISC grids for inundation element " + iscWeName + " are not current. They correspond to a previous cycle. Aborting. Revert Forecast db.", "S") + return None, None, None, None, None + else: + self.statusBarMsg("ISC grids for datum element " + iscWeName + " are not current. They correspond to a previous cycle. Proceeding without it.", "S") + GridsCheck = False + + for tr in trList: + grid = self.getGrids("ISC", iscWeName, "SFC", tr) + if iscWeName == "InundationMaxnc" or iscWeName == "InundationTimingnc": + grid.clip(0.0, 100.0, grid) + else: + grid.clip(-30.0, 100.0, grid) + + if iscWeName == "InundationTimingnc": + self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) + elif iscWeName == "InundationMaxnc": + surgePctGrid = grid + self.createGrid(mutableID, weName, "SCALAR", grid, tr, precision=2) + elif iscWeName == "SurgeHtPlusTideMSLnc" and GridsCheck: + surgePctGridMSL = grid + elif iscWeName == "SurgeHtPlusTideMLLWnc" and GridsCheck: + surgePctGridMLLW = grid + elif iscWeName == "SurgeHtPlusTideMHHWnc" and GridsCheck: + surgePctGridMHHW = grid + elif iscWeName == "SurgeHtPlusTideNAVDnc" and GridsCheck: + surgePctGridNAVD = grid + + return surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD + + # Make a list of timeRanges that will be used to make InundationTiming grids + def makeTimingTRs(self, baseTime, endTime): + # Make the inundation timing grids + trList = [] + start = baseTime + end = baseTime + 6 * 3600 + while end <= endTime: + tr = TimeRange.TimeRange(AbsTime.AbsTime(start), AbsTime.AbsTime(end)) + trList.append(tr) + start = end + end += 6 * 3600 + + return trList + + def getTimingGrids(self): + + baseTime = self.baseGuidanceTime() + endTime = baseTime + 102 * 3600 + gridList= [] + trList = self.makeTimingTRs(baseTime, endTime) + + for tr in trList: + timingGrid = self.empty() + gridList.append(timingGrid) + + return trList, gridList + + def execute(self, varDict, editArea, timeRange): + + t0 = time.time() + + self._timeRange = timeRange + + mutableID = self.mutableID() + + # List of elements + # See if we should copy from ISC. If so, do the copy and exit + smoothThreatGrid = varDict["Grid Smoothing?"] + + makeOption = varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] + topodb = "NED" + + ssea = self.encodeEditArea("StormSurgeWW_EditArea") + + Topo = self.getAvgTopoGrid(topodb) + + confidenceStr = varDict["Forecast Confidence? - (Applies to PHISH/PETSS Only)"] + + # extract the percent value from this string + pctPos = confidenceStr.find("%") + pctStr = confidenceStr[pctPos - 2:pctPos] + + threatWEName = "StormSurgeThreat" + + #print "pctStr is: ", pctStr + surgePctGrid = None + surgePctGridMSL = None + surgePctGridMLLW = None + surgePctGridNHHW = None + surgePctGridNAVD = None + + if makeOption == "PHISH" or makeOption == "PETSS": + + # Now get the psurge + if makeOption == "PHISH": + modelName = "TPCSurgeProb" + else: + modelName = "PETSS" + surgePctGrid = self.getExceedanceHeight(modelName, pctStr, "FHAG0") + if surgePctGrid is None: + message = "No inundation data found for " + modelName + self.statusBarMsg(message, "S") + return + + phishMask = ~ssea + surgePctGrid[phishMask] = 0.0 + surgePctGridNAVD = self.getExceedanceHeight(modelName, pctStr, "SFC") + if surgePctGridNAVD is None: + message = "No Surge plus Tide NAVD data found for " + modelName + self.statusBarMsg(message, "S") + return + + surgePctGridNAVD[phishMask] = -80.0 + if surgePctGrid is None or surgePctGridNAVD is None: + return + + # + # The following lines are the gridded vdatum corrections. + # + msltonavd = self.getMSLtoNAVD() + msltomllw = self.getMSLtoMLLW() + msltomhhw = self.getMSLtoMHHW() + navdtomllw = self.getNAVDtoMLLW() + navdtomhhw = self.getNAVDtoMHHW() + + # Apply 3x3 smooth within the surge zone + # for values greater than 1 as to not underplay areas adjacent to zero value pixels. + # If you apply a smoother, for consistency among storm surge plus tide and derived + # grids, it must be done here. + if smoothThreatGrid == "Yes": + #mask = np.greater(surgePctGrid, 0.0) & ssea + #surgePctGrid = np.where(np.greater(surgePctGrid, 0.0), self.GM_smoothGrid(surgePctGrid,3, mask), surgePctGrid) + +# mask = np.greater(surgePctGridNAVD, -10.0) & ssea +# surgePctGridNAVD = np.where(np.greater(surgePctGridNAVD, -10.0), self.GM_smoothGrid(surgePctGridNAVD,3, mask), surgePctGridNAVD) + + mask = (surgePctGridNAVD > -10.0) & ssea + surgePctGridNAVD = self.GM_smoothGrid(surgePctGridNAVD, 3, mask) + +# surgePctGridMSL= np.where(mask1, surgePctGridNAVD - msltonavd, np.float32(-80.0)) # MSL Grid + navdMask = (surgePctGridNAVD > -80.0) + mask = (msltonavd > -80.0) & navdMask & ssea + + # MSL Grid + surgePctGridMSL = surgePctGridNAVD - msltonavd + surgePctGridMSL[~mask] = -80.0 + +# surgePctGridMLLW = np.where(np.greater(navdtomllw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ +# surgePctGridNAVD + navdtomllw, np.float32(-80.0)) # MLLW Grid + + # MLLW Grid + mask = (navdtomllw > -80.0) & navdMask + surgePctGridMLLW = surgePctGridNAVD + navdtomllw + surgePctGridMLLW[~mask] = -80.0 + +# surgePctGridMHHW = np.where(np.greater(navdtomhhw,-80.0) & np.greater(surgePctGridNAVD,-80.0), \ +# surgePctGridNAVD + navdtomhhw, np.float32(-80.0)) # MHHW Grid + # MHHW Grid + mask = (navdtomhhw > -80.0) & navdMask + surgePctGridMHHW = surgePctGridNAVD + navdtomhhw + surgePctGridMHHW[~mask] = -80.0 + +# surgeDiffMLLWMHHW = np.where(np.greater(surgePctGridMLLW,-80.0) & np.greater(surgePctGridMHHW, -80.0), \ +# surgePctGridMLLW-surgePctGridMHHW, np.float32(-80.0)) # Diff Grid Between MLLW and MHHW + + # Diff Grid Between MLLW and MHHW (i.e tidal range) + mask = (surgePctGridMLLW > -80.0) & (surgePctGridMHHW > -80.0) + surgeDiffMLLWMHHW = surgePctGridMLLW - surgePctGridMHHW + surgeDiffMLLWMHHW[~mask] = -80.0 + + # Mask + MHHWMask = surgePctGridMHHW <= 0.0 + + #surgePctGrid[MHHWMask] = 0.0 + + trList,timingGrids = self.makeInundationTiming(modelName, pctStr, "FHAG0", smoothThreatGrid, mutableID, ssea, MHHWMask) + #surgePctGrid and InundationMax recomputed from InundationTiming sequence for consistency + surgePctGrid = self.makeInundationMaxGrid(timingGrids, trList) + + elif makeOption == "ISC": + + elementList = ["InundationMax","InundationTiming", "SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] + surgePctGrid,surgePctGridMSL,surgePctGridMLLW,surgePctGridMHHW,surgePctGridNAVD = self.copyISCGridstoFcst(elementList, mutableID) + # if you look in CopyISC method if either InundationMax or InundationTiming is missing the procedure stops all together and notifies forecaster. + if surgePctGrid is None: + return + + elif makeOption == "Manually Replace" or makeOption == "Manually Add": + + inundationHeight = float(varDict["Inundation Height:"]) + inunStartHour = float(varDict["Start Hour for Inundation Timing"]) + inunEndHour = float(varDict["End Hour for Inundation Timing"]) + + selectedMask = self.encodeEditArea(editArea) + if not selectedMask.any(): + self.statusBarMsg("Please define an area over which to assign the inundation values.", "S") + return + + modifyMask = selectedMask & ssea + if not modifyMask.any(): + self.statusBarMsg("Please define an area that intersects the StormSurgeEditArea to assign the inundation values.", "S") + return # Calculate the intersection of the SSEditArea and selected editAre + + if inunStartHour >= inunEndHour: + self.statusBarMsg("Please define the end hour after the start hour.", "S") + return + + surgePctGrid = self.empty() + + # Fetch the old grids if we're adding + if varDict["Make grids from \nPHISH, PETSS, ISC, or Manually?"] == "Manually Add": + imTRList = self.GM_getWEInventory("InundationMax", mutableID, "SFC") + if len(imTRList) > 0: + imTR = imTRList[0] + surgePctGrid = self.getGrids(mutableID, "InundationMax", "SFC", imTR) + + surgePctGrid[modifyMask] = inundationHeight + + # Make the timing grids + baseTime = self.baseGuidanceTime() + if makeOption == "Manually Replace": # Make new grids and replace all IT grids + trList, timingGrids = self.getTimingGrids() + + for i in range(len(trList)): + # only modify grids in the specified time range + start = trList[i].startTime().unixTime() + end = trList[i].endTime().unixTime() + + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + timingGrids[i] = surgePctGrid # populate only where needed + + timeRange = TimeRange.allTimes() + self.deleteCmd(["InundationTiming"], timeRange) + for i in range(len(trList)): + timingGrids[i].clip(0.0, 100.0, timingGrids[i]) + self.createGrid(mutableID, "InundationTiming", "SCALAR", timingGrids[i], trList[i]) + + elif makeOption == "Manually Add": # Just replace the selected grid points over the selected time + # Fetch the existing IT grids + itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all.", "S") + return + #Fetch the grids + itGrids = [] + trList = [] + for tr in itTRList: + start = tr.startTime().unixTime() + end = tr.endTime().unixTime() + #print "Checking tr:", tr + if (start - baseTime) / 3600 >= inunStartHour and (end - baseTime) / 3600 <= inunEndHour: + grid = self.getGrids(mutableID, "InundationTiming", "SFC", tr) + itGrids.append(grid) + trList.append(tr) + + if len(itGrids) == 0: + self.statusBarMsg("No InundationTiming grids found for selected start and end hours.", "S") + return + + # Surgically insert grid values into the InundationTiming grids over the selected hours + for i in range(len(trList)): + itGrids[i][modifyMask] = inundationHeight # poke in the values + + self.createGrid(mutableID, "InundationTiming", "SCALAR", itGrids[i], trList[i]) + + timingGrids = [] + for tr in itTRList: + grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) + grid[~ssea] = 0.0 + timingGrids.append(grid) + + surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) + + elif makeOption == "UpdateInunMax (Edit Inundation Timing Grids)": + + self.deleteAllGrids(["InundationMax","SurgeHtPlusTideMSL", "SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD", "SurgeHtPlusTideMHHW", "SurgeHtPlusTideMLLW"]) + + itTRList = self.GM_getWEInventory("InundationTiming", mutableID, "SFC") + + if len(itTRList) == 0: + self.statusBarMsg("No InundationTiming grids found at all. Inundation grids required to exist when running with this option. Otherwise run with Manual Replace Option.", "S") + return + + timingGrids = [] + + # Fetch all the timing grids + for tr in itTRList: + grid = self.getGrids(self.mutableID(), "InundationTiming", "SFC", tr) + grid[~ssea] = 0.0 + timingGrids.append(grid) + self.deleteGrid(mutableID, "InundationTiming", "SFC", tr) + self.createGrid(mutableID, "InundationTiming", "SCALAR", grid, tr, precision=1) + + # Finally create the surge grid which will be saved as the InundationMax + + surgePctGrid = self.makeInundationMaxGrid(timingGrids, itTRList) + + #return + # Done with manual options + +# Next line introduced on Jan 2017 SWiT. It forces points in InundationMax that are > 1 and < 1.5 to 1.5. This is because TCV rounds to +# nearest one foot for categorical HTI threat level consistency with inundation graphic. Not doing this would cause TCV to throw away zones that +# might have more than 3% coverage of inundation > 1 but less than 1.5 altogether. Changing TCV to key on anything with InundationMax >= 1 would not +# do because it would then include zones in TCV with inundation forecasts of less than 1 but >= 0.5 overdoing the threat. + + surgePctGrid[(surgePctGrid > 1.0) & (surgePctGrid < 1.5)] = 1.5 + + threatKeys = self.getDiscreteKeys(threatWEName) + + # Define a mapping between UI names and key names + # keyMap = {"Very Low" :"Very Low", + keyMap = {"Elevated" : "Elevated", + "Moderate" : "Mod", + "High" : "High", + "Extreme" : "Extreme", + } + + threshDict = {} # a dict to store thresholds from the UI + + for key in list(keyMap.keys()): + + if keyMap[key] == "Extreme": + threshDict[keyMap[key]] = 9 + elif keyMap[key] == "High": + threshDict[keyMap[key]] = 6 + elif keyMap[key] == "Mod": + threshDict[keyMap[key]] = 3 + elif keyMap[key] == "Elevated": + threshDict[keyMap[key]] = 1 + + #print "threshDict[keyMap[key]]: ", keyMap[key], threshDict[keyMap[key]] + + # make a timeRange - 6 hours long + elementList = ["StormSurgeThreat","InundationMax","SurgeHtPlusTideMSL","SurgeHtPlusTideMLLW", + "SurgeHtPlusTideNAVD","SurgeHtPlusTideMHHW"] + + # make a new timeRange that will be used to create new grids + timeRange = self.makeNewTimeRange(6) + + # Remove old guidance grids and replace them with the new grids + # Delete the old grids first + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime - 48*3600) + endTime = startTime + 240*3600 + deleteTimeRange = TimeRange.TimeRange(startTime, endTime) + + for elem in elementList: + self.deleteCmd([elem], deleteTimeRange) + + if makeOption != "Manually Replace" and makeOption != "Manually Add" and makeOption != "UpdateInunMax (Edit Inundation Timing Grids)": + if surgePctGridMSL is not None: + surgePctGridMSL.clip(-30.0, 100.0, surgePctGridMSL) + self.createGrid(mutableID, "SurgeHtPlusTideMSL", "SCALAR", surgePctGridMSL, + timeRange, precision=2) + if surgePctGridMLLW is not None: + surgePctGridMLLW.clip(-30.0, 100.0, surgePctGridMLLW) + self.createGrid(mutableID, "SurgeHtPlusTideMLLW", "SCALAR", surgePctGridMLLW, + timeRange, precision=2) + if surgePctGridNAVD is not None: + surgePctGridNAVD.clip(-30.0, 100.0, surgePctGridNAVD) + self.createGrid(mutableID, "SurgeHtPlusTideNAVD", "SCALAR", surgePctGridNAVD, + timeRange, precision=2) + if surgePctGridMHHW is not None: + surgePctGridMHHW.clip(-30.0, 100.0, surgePctGridMHHW) + self.createGrid(mutableID, "SurgeHtPlusTideMHHW", "SCALAR", surgePctGridMHHW, + timeRange, precision=2) + + # Make the grid. Start with the existing grid if we have one otherwise zeros + coastalThreat = self.empty(np.int8) + surgePctGrid.clip(0.0, 100.0, surgePctGrid) + self.createGrid(mutableID, "InundationMax", "SCALAR", surgePctGrid, timeRange, precision=2) + + # Yet another list to define the order in which we set grid values + # This order must be ranked lowest to highest + #keyList = ["Very Low", "Elevated", "Mod", "High", "Extreme"] + keyList = ["Elevated", "Mod", "High", "Extreme"] + + # Set the grid values based on the surgePctGrid grid and thresholds + for key in keyList: + #print "THRESHOLD FOR KEY IS: ", key, threshDict[key] + thresh = threshDict[key] + keyIndex = self.getIndex(key, threatKeys) + #coastalMask = ssea & np.greater_equal(surgePctGrid, thresh) + coastalMask = ssea & np.greater(surgePctGrid, thresh) + coastalThreat[coastalMask] = keyIndex + +# create the CoastalThreat Grid + self.createGrid(mutableID, threatWEName, "DISCRETE", + (coastalThreat, threatKeys), timeRange, + discreteKeys=threatKeys, + discreteOverlap=0, + discreteAuxDataLength=2, + defaultColorTable="Hazards") + + t1 = time.time() + LogStream.logEvent("Finished TCStormSurgeThreat in %f.4 ms" % ((t1-t0) * 1000)) + + return + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCTornadoThreat.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCTornadoThreat.py index 0b782ff5a1..5cd19c8915 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCTornadoThreat.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCTornadoThreat.py @@ -1,339 +1,339 @@ -# ---------------------------------------------------------------------------- -# -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TornadoFloodThreat -# -# Author: Tom LeFebvre/Pablo Santos -# Updated: April 16, 2012 to lower Low Category Threshold and hide Sliding Bars -# Migrated procedure for AWIPS2. Updated 6/22/2012. S.O. -# Sept 19, 2014: Updated Low to Elevated for 2015 Official Implementation. PS -# Modified: By Belk 07/15/2016 to make efficiency improvements, and -# refactor to make use of a utility containing common methods with other tools -# Modified: By LeFebvre 09/23/2016 finish converstion to numpy conventions. -# CHECKED IN for 17.1.1 -# -# Modified: By LeFebvre 06/12/17 - Fixed bug that incremented TornadoThreat -# beyond Extreme and caused a crash. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate"] - -import TropicalUtility -import time -import sys -import AbsTime -import TimeRange -import numpy as np - -VariableList = [("NOTE: Day 1 Tornado Probabilities Used by Procedure:", "", "label"), - #("Very Low" , "02", "radio", ["02"]), - ("Elevated" , "02", "radio", ["02"]), - ("Mod" ,"15", "radio", ["15"]), - ("High","30", "radio", ["30"]), - ("Extreme" ,"45", "radio", ["45"]), - ("Day 2: Prob Svr Wx (ptotsvr) >= 5: Elevated; ptotsvr >= 60%: Mod", "", "label"), - ("Day 2: Prob Sig Svr Wx (ptotxsvr) >= 10: Elevated -> Mod and Mod -> High", "", "label"), - ("Day 3: Prob Svr Wx (ptotsvr) >= 5: Elevated", "", "label"), - ("Day 3: Prob Sig Svr Wx (ptotxsvr) >= 10: Elevated -> Mod", "", "label"), - ("NOTE: After applying logic above", "", "label"), - ("threat level is the max composite from Day 1-3", "", "label") - ] - - -class Procedure (TropicalUtility.TropicalUtility): - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - - def variableExists(self, modelName, weName, weLevel): - - # it turns out the the modelName will not match the dbID().model() - # directly, so it needs to be massaged a bit. - parts = modelName.split("_") - if len(parts) >= 4: - modelName = parts[3] - - availParms = self.availableParms() - for pName, level, dbID in availParms: - if dbID.modelName().find(modelName) > -1: - if pName.find(weName) > -1 and level.find(weLevel) > -1: - return True - - return False - - def getWEInventory(self, modelName, WEName, timeRange = None): - - allTimes = TimeRange.allTimes() - - if timeRange is None: - timeRange = allTimes - - weLevel = "SFC" - - if not self.variableExists(modelName, WEName, weLevel): - #print WEName, "does not exist in model", modelName - return [] - - try: - gridInfo = self.getGridInfo(modelName, WEName, weLevel, timeRange) - except: - #print "GridInfo failed for:", modelName, WEName, weLevel, timeRange - return [] - - trList = [] - for g in gridInfo: - start = g.gridTime().startTime().unixTime() - end = g.gridTime().endTime().unixTime() - tr = self.GM_makeTimeRange(start, end) - if timeRange.overlaps(tr): - trList.append(tr) - - return trList - - # get the current time, truncates to the last six hour value. - # returns a timeRange with this startTime until 24 hrs from this time - def make6hrTimeRange(self): - startTime = int(self._gmtime().unixTime()/ (3600 * 6)) * 3600 * 6 - endTime = startTime + (3600 * 6) - - timeRange = self.GM_makeTimeRange(startTime, endTime) - - return timeRange - - # Returns a list of dbIdentifiers that match the specified model - # name, weName and level. - def getModelList(self, modelName, weName, weLevel): - modelList = [] - - availParms = self.availableParms() - - for pName, level, dbID in availParms: - if dbID.modelName().find(modelName) > -1: - if pName.find(weName) > -1: - if level.find(weLevel) > -1: - if dbID.modelIdentifier() not in modelList: - modelList.append(dbID.modelIdentifier()) - return modelList - - def determineDay(self, modelTime, validTime): - - diff = (validTime - modelTime) / 3600 - if diff < 36: - return 1 - elif diff >= 36 and diff < 60: - return 2 - elif diff >= 60: - return 3 - - return 0 - - # returns a unix time based on the specified model ID. - def getModelTime(self, modelName): - - timeStr = modelName[-13:] - - year = int(timeStr[0:4]) - month = int(timeStr[4:6]) - day = int(timeStr[6:8]) - hour = int(timeStr[9:11]) - - absTime = AbsTime.absTimeYMD(year, month, day, hour, 0, 0) - absTime = AbsTime.absTimeYMD(year, month, day, hour, 0, 0) - - return absTime.unixTime() - - def getTornadoGrid(self, varName, dayNum): - siteID = self.getSiteID() - SPCModelName = siteID + "_D2D_SPC" - SPCVarName = varName - SPCLevel = "SFC" - - modelList = self.getModelList("SPC", SPCVarName, SPCLevel) - #print "Processing varName, dayNum: ", varName, dayNum - - hours24 = 24 * 3600 - - for modelName in modelList: - - trList = self.getWEInventory(modelName, SPCVarName) - #print "modelName trList:", modelName, trList - if len(trList) == 0: # no grids found for this version - continue # go on to older versions - - modelTime = self.getModelTime(modelName) - # get the current time rounded to the nearest 12Z - currentTime = (int(time.time() / hours24) * hours24) + (12 * 3600) - - for tr in trList: - gridDayNum = self.determineDay(currentTime, - tr.startTime().unixTime()) - #print "modelName, modelTime, and gridDayNum:", modelName, modelTime, gridDayNum - if gridDayNum == dayNum: - grid = self.getGrids(modelName, SPCVarName, SPCLevel, tr) - return grid - - return None - - # This method adjusts an existing threat grid - def adjustTornadoGrid(self, tornadoThreat, threatKeys, var, dayNum, extThreshold): - D2DGrid = self.getTornadoGrid(var, dayNum) - if D2DGrid is None: - #print "No grid found for", var, "day:", dayNum - return tornadoThreat - - # Account for offices using the four key arrangement - # Just change the "Very Low" to "Low" in the threshDict - #if "Very Low" not in threatKeys: - # # find all places greater than "Very Low or Low" in the tornadoThreat - # lowIndex = self.getIndex("Low", threatKeys) - #else: - # lowIndex = self.getIndex("Very Low", threatKeys) - #lowMask = greater_equal(tornadoThreat, lowIndex) - -# lowMask = greater(tornadoThreat, 0) - lowMask = tornadoThreat > 0 - - # finds all places in the extreme grid >= to the extThreshold -# xMask = greater_equal(D2DGrid, extThreshold) - xMask = D2DGrid >= extThreshold - - # increment the threat where these masks intersect - mask = lowMask & xMask - - # make sure we're not incrementing too far - extremeIndex = self.getIndex("Extreme", threatKeys) - - # increment the category. This code assumes that the categories are - # defined in increasing order of severity. - tornadoThreat[mask] += 1 - - # Clip the adjusted grid to the maximum allowed value - extremeIndex - tornadoThreat = np.clip(tornadoThreat, 0, extremeIndex) - - return tornadoThreat - - - def setTornadoGrid(self, tornadoThreat, threatKeys, var, dayNum, threshDict): - - D2DGrid = self.getTornadoGrid(var, dayNum) - if D2DGrid is None: - #print "No grid found for", var, "day:", dayNum - return tornadoThreat - - # Account for offices using the four key arrangement - # Just change the "Very Low" to "Low" in the threshDict - #print "THREATKEYS ARE: ", threatKeys - #print "THRESHDICT IS: ", threshDict.keys() - if "Very Low" not in threatKeys: - for key in threshDict.keys(): - if threshDict[key] == "Very Low": - threshDict[key] = "Elevated" - dictKeys = threshDict.keys() - - #print "unsorted dictKeys: ", dictKeys - dictKeys.sort() # sort lowest to highest value - #print "sorted dictKeys: ", dictKeys - - - # Set the grid values based on the tornado prob grid and thresholds - for key in dictKeys: - thresh = int(key) - #print "THRESH IS: ", thresh - keyIndex = self.getIndex(threshDict[key], threatKeys) - # make a temp grid where the thresholds are exceeded - tempGrid = self.empty(dtype=np.int8) - tempGrid[D2DGrid >= thresh] = keyIndex - # calculate areas where this temp grid exceeds the threatGrid - mask = tempGrid > tornadoThreat - # update the threatGrid for these areas only - tornadoThreat[mask] = keyIndex - - return tornadoThreat - - def execute(self, varDict): - - threatWEName = "TornadoThreat" - - threatKeys = self.getDiscreteKeys(threatWEName) - - # make a dict to store thresholds from the UI - ptorDict = {} - - for key in threatKeys: - if key == "None": - continue - ptorDict[varDict[key]] = key - - #print "***************************" - #print "ptorDict is:", ptorDict - #print "***************************" - - # Set up the data for processing the various grids. - # Each entry consists of the D2D variable to be checked, - # the day number of that grid, and a dictionary that defines - # each threshold value and the corresponding discrete value. - # Note the grids will be processed in the order defined in - # this list. - actionList = [ - ("ptor", 1, ptorDict, "sigtrndprob", 10), # ptorDict comes from the GUI - - ("prsvr", 2, { 5 : "Very Low", - #15 : "Elevated", - 5 : "Elevated", - 60 : "Mod", - }, - "prsigsv", 10), - - ("prsvr", 3, { 5 : "Very Low", - #15 : "Elevated", - 5 : "Elevated", - - }, - "prsigsv", 10), - - ] - - # make a grid of zeros. This will be the TornadoThreat grid - tornadoThreat = self.empty(dtype=np.int8) - - for var, dayNum, threshDict, xVar, xThreshold in actionList: - tornadoThreat = self.setTornadoGrid(tornadoThreat, threatKeys, - var, dayNum, threshDict) - - # now adjust the grid based on the extreme grid category - tornadoThreat = self.adjustTornadoGrid(tornadoThreat, threatKeys, - xVar, dayNum, xThreshold) - - # make a timeRange - 6 hours long, rounded to nearest hour - startTime = int(self._gmtime().unixTime()/ 3600) * 3600 - endTime = startTime + (6 * 3600) - threatTR = self.GM_makeTimeRange(startTime, endTime) - - # remove any old grids that are lying around - startTime = int(self._gmtime().unixTime()/ 3600) * 3600 - (24 * 3600) - endTime = startTime + (24 * 3600 * 10) - removeTR = self.GM_makeTimeRange(startTime, endTime) - self.deleteCmd([threatWEName], removeTR) - - # create the TornadoThreat Grid - self.createGrid("Fcst", threatWEName, "DISCRETE", - (tornadoThreat, threatKeys), threatTR, - discreteKeys=threatKeys, - discreteOverlap=0, - discreteAuxDataLength=2, - defaultColorTable="Hazards") - - return - - +# ---------------------------------------------------------------------------- +# +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TornadoFloodThreat +# +# Author: Tom LeFebvre/Pablo Santos +# Updated: April 16, 2012 to lower Low Category Threshold and hide Sliding Bars +# Migrated procedure for AWIPS2. Updated 6/22/2012. S.O. +# Sept 19, 2014: Updated Low to Elevated for 2015 Official Implementation. PS +# Modified: By Belk 07/15/2016 to make efficiency improvements, and +# refactor to make use of a utility containing common methods with other tools +# Modified: By LeFebvre 09/23/2016 finish converstion to numpy conventions. +# CHECKED IN for 17.1.1 +# +# Modified: By LeFebvre 06/12/17 - Fixed bug that incremented TornadoThreat +# beyond Extreme and caused a crash. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate"] + +import TropicalUtility +import time +import sys +import AbsTime +import TimeRange +import numpy as np + +VariableList = [("NOTE: Day 1 Tornado Probabilities Used by Procedure:", "", "label"), + #("Very Low" , "02", "radio", ["02"]), + ("Elevated" , "02", "radio", ["02"]), + ("Mod" ,"15", "radio", ["15"]), + ("High","30", "radio", ["30"]), + ("Extreme" ,"45", "radio", ["45"]), + ("Day 2: Prob Svr Wx (ptotsvr) >= 5: Elevated; ptotsvr >= 60%: Mod", "", "label"), + ("Day 2: Prob Sig Svr Wx (ptotxsvr) >= 10: Elevated -> Mod and Mod -> High", "", "label"), + ("Day 3: Prob Svr Wx (ptotsvr) >= 5: Elevated", "", "label"), + ("Day 3: Prob Sig Svr Wx (ptotxsvr) >= 10: Elevated -> Mod", "", "label"), + ("NOTE: After applying logic above", "", "label"), + ("threat level is the max composite from Day 1-3", "", "label") + ] + + +class Procedure (TropicalUtility.TropicalUtility): + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + + def variableExists(self, modelName, weName, weLevel): + + # it turns out the the modelName will not match the dbID().model() + # directly, so it needs to be massaged a bit. + parts = modelName.split("_") + if len(parts) >= 4: + modelName = parts[3] + + availParms = self.availableParms() + for pName, level, dbID in availParms: + if dbID.modelName().find(modelName) > -1: + if pName.find(weName) > -1 and level.find(weLevel) > -1: + return True + + return False + + def getWEInventory(self, modelName, WEName, timeRange = None): + + allTimes = TimeRange.allTimes() + + if timeRange is None: + timeRange = allTimes + + weLevel = "SFC" + + if not self.variableExists(modelName, WEName, weLevel): + #print WEName, "does not exist in model", modelName + return [] + + try: + gridInfo = self.getGridInfo(modelName, WEName, weLevel, timeRange) + except: + #print "GridInfo failed for:", modelName, WEName, weLevel, timeRange + return [] + + trList = [] + for g in gridInfo: + start = g.gridTime().startTime().unixTime() + end = g.gridTime().endTime().unixTime() + tr = self.GM_makeTimeRange(start, end) + if timeRange.overlaps(tr): + trList.append(tr) + + return trList + + # get the current time, truncates to the last six hour value. + # returns a timeRange with this startTime until 24 hrs from this time + def make6hrTimeRange(self): + startTime = int(self._gmtime().unixTime()/ (3600 * 6)) * 3600 * 6 + endTime = startTime + (3600 * 6) + + timeRange = self.GM_makeTimeRange(startTime, endTime) + + return timeRange + + # Returns a list of dbIdentifiers that match the specified model + # name, weName and level. + def getModelList(self, modelName, weName, weLevel): + modelList = [] + + availParms = self.availableParms() + + for pName, level, dbID in availParms: + if dbID.modelName().find(modelName) > -1: + if pName.find(weName) > -1: + if level.find(weLevel) > -1: + if dbID.modelIdentifier() not in modelList: + modelList.append(dbID.modelIdentifier()) + return modelList + + def determineDay(self, modelTime, validTime): + + diff = (validTime - modelTime) / 3600 + if diff < 36: + return 1 + elif diff >= 36 and diff < 60: + return 2 + elif diff >= 60: + return 3 + + return 0 + + # returns a unix time based on the specified model ID. + def getModelTime(self, modelName): + + timeStr = modelName[-13:] + + year = int(timeStr[0:4]) + month = int(timeStr[4:6]) + day = int(timeStr[6:8]) + hour = int(timeStr[9:11]) + + absTime = AbsTime.absTimeYMD(year, month, day, hour, 0, 0) + absTime = AbsTime.absTimeYMD(year, month, day, hour, 0, 0) + + return absTime.unixTime() + + def getTornadoGrid(self, varName, dayNum): + siteID = self.getSiteID() + SPCModelName = siteID + "_D2D_SPC" + SPCVarName = varName + SPCLevel = "SFC" + + modelList = self.getModelList("SPC", SPCVarName, SPCLevel) + #print "Processing varName, dayNum: ", varName, dayNum + + hours24 = 24 * 3600 + + for modelName in modelList: + + trList = self.getWEInventory(modelName, SPCVarName) + #print "modelName trList:", modelName, trList + if len(trList) == 0: # no grids found for this version + continue # go on to older versions + + modelTime = self.getModelTime(modelName) + # get the current time rounded to the nearest 12Z + currentTime = (int(time.time() / hours24) * hours24) + (12 * 3600) + + for tr in trList: + gridDayNum = self.determineDay(currentTime, + tr.startTime().unixTime()) + #print "modelName, modelTime, and gridDayNum:", modelName, modelTime, gridDayNum + if gridDayNum == dayNum: + grid = self.getGrids(modelName, SPCVarName, SPCLevel, tr) + return grid + + return None + + # This method adjusts an existing threat grid + def adjustTornadoGrid(self, tornadoThreat, threatKeys, var, dayNum, extThreshold): + D2DGrid = self.getTornadoGrid(var, dayNum) + if D2DGrid is None: + #print "No grid found for", var, "day:", dayNum + return tornadoThreat + + # Account for offices using the four key arrangement + # Just change the "Very Low" to "Low" in the threshDict + #if "Very Low" not in threatKeys: + # # find all places greater than "Very Low or Low" in the tornadoThreat + # lowIndex = self.getIndex("Low", threatKeys) + #else: + # lowIndex = self.getIndex("Very Low", threatKeys) + #lowMask = greater_equal(tornadoThreat, lowIndex) + +# lowMask = greater(tornadoThreat, 0) + lowMask = tornadoThreat > 0 + + # finds all places in the extreme grid >= to the extThreshold +# xMask = greater_equal(D2DGrid, extThreshold) + xMask = D2DGrid >= extThreshold + + # increment the threat where these masks intersect + mask = lowMask & xMask + + # make sure we're not incrementing too far + extremeIndex = self.getIndex("Extreme", threatKeys) + + # increment the category. This code assumes that the categories are + # defined in increasing order of severity. + tornadoThreat[mask] += 1 + + # Clip the adjusted grid to the maximum allowed value - extremeIndex + tornadoThreat = np.clip(tornadoThreat, 0, extremeIndex) + + return tornadoThreat + + + def setTornadoGrid(self, tornadoThreat, threatKeys, var, dayNum, threshDict): + + D2DGrid = self.getTornadoGrid(var, dayNum) + if D2DGrid is None: + #print "No grid found for", var, "day:", dayNum + return tornadoThreat + + # Account for offices using the four key arrangement + # Just change the "Very Low" to "Low" in the threshDict + #print "THREATKEYS ARE: ", threatKeys + #print "THRESHDICT IS: ", threshDict.keys() + if "Very Low" not in threatKeys: + for key in list(threshDict.keys()): + if threshDict[key] == "Very Low": + threshDict[key] = "Elevated" + dictKeys = list(threshDict.keys()) + + #print "unsorted dictKeys: ", dictKeys + dictKeys.sort() # sort lowest to highest value + #print "sorted dictKeys: ", dictKeys + + + # Set the grid values based on the tornado prob grid and thresholds + for key in dictKeys: + thresh = int(key) + #print "THRESH IS: ", thresh + keyIndex = self.getIndex(threshDict[key], threatKeys) + # make a temp grid where the thresholds are exceeded + tempGrid = self.empty(dtype=np.int8) + tempGrid[D2DGrid >= thresh] = keyIndex + # calculate areas where this temp grid exceeds the threatGrid + mask = tempGrid > tornadoThreat + # update the threatGrid for these areas only + tornadoThreat[mask] = keyIndex + + return tornadoThreat + + def execute(self, varDict): + + threatWEName = "TornadoThreat" + + threatKeys = self.getDiscreteKeys(threatWEName) + + # make a dict to store thresholds from the UI + ptorDict = {} + + for key in threatKeys: + if key == "None": + continue + ptorDict[varDict[key]] = key + + #print "***************************" + #print "ptorDict is:", ptorDict + #print "***************************" + + # Set up the data for processing the various grids. + # Each entry consists of the D2D variable to be checked, + # the day number of that grid, and a dictionary that defines + # each threshold value and the corresponding discrete value. + # Note the grids will be processed in the order defined in + # this list. + actionList = [ + ("ptor", 1, ptorDict, "sigtrndprob", 10), # ptorDict comes from the GUI + + ("prsvr", 2, { 5 : "Very Low", + #15 : "Elevated", + 5 : "Elevated", + 60 : "Mod", + }, + "prsigsv", 10), + + ("prsvr", 3, { 5 : "Very Low", + #15 : "Elevated", + 5 : "Elevated", + + }, + "prsigsv", 10), + + ] + + # make a grid of zeros. This will be the TornadoThreat grid + tornadoThreat = self.empty(dtype=np.int8) + + for var, dayNum, threshDict, xVar, xThreshold in actionList: + tornadoThreat = self.setTornadoGrid(tornadoThreat, threatKeys, + var, dayNum, threshDict) + + # now adjust the grid based on the extreme grid category + tornadoThreat = self.adjustTornadoGrid(tornadoThreat, threatKeys, + xVar, dayNum, xThreshold) + + # make a timeRange - 6 hours long, rounded to nearest hour + startTime = int(self._gmtime().unixTime()/ 3600) * 3600 + endTime = startTime + (6 * 3600) + threatTR = self.GM_makeTimeRange(startTime, endTime) + + # remove any old grids that are lying around + startTime = int(self._gmtime().unixTime()/ 3600) * 3600 - (24 * 3600) + endTime = startTime + (24 * 3600 * 10) + removeTR = self.GM_makeTimeRange(startTime, endTime) + self.deleteCmd([threatWEName], removeTR) + + # create the TornadoThreat Grid + self.createGrid("Fcst", threatWEName, "DISCRETE", + (tornadoThreat, threatKeys), threatTR, + discreteKeys=threatKeys, + discreteOverlap=0, + discreteAuxDataLength=2, + defaultColorTable="Hazards") + + return + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCWindThreat.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCWindThreat.py index db7ffc38fc..ea3f5a1c1c 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCWindThreat.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/TCWindThreat.py @@ -1,614 +1,614 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# A2TCWindThreatFinal_3 -# This version is for use with MLB's Graphical HLS. -# This version modifies the gHLS WindThreat element. -# -# Author: lefebvre 9/11/06 -# Modified by: Volkmer/MLB and Santos/MFL 5/23/07 -# More modifications: by LeFebvre/Santos/Sharp 05/7/09 -# LeFebvre/Santos 07/20/10 -# Modified: by Santos 04/26/2011 to accomodate alternate logic for NE Coast and hide bars -# Modified: by Santos 04/20/2012 to get rid off Very Low and tweak GUI. -# Modified: by Shannon/Pablo 06/19/2012 to make A2 and DRT compatible -# Modified: by Pablo/Shannon on 05/21/2014 to fix bug introduced in 2012 when Very Low was eliminated -# Modified: By Santos/Lefebvre 09/17/2014 to make changes for official implementation in 2015. -# Modified: By Belk 07/15/2016 to make efficiency improvements, and -# refactor to make use of a utility containing common methods with other tools -# CHECKED IN FOR 17.1.1: By LeFebvre 09/23/2016 finish converstion to numpy conventions. -# Modified: By LeFebvre 09/26/16 - Removed commented out code to pass code review. -# Modified: By LeFebvre 10/31/16 - Added more code to ensure only one cyclone center point is calculated -# Modified: By LeFebvre 07/18/17 - Added option to populate based on Preliminary or Official prob guidance. -#---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# -# THIS CODE SHALL NOT BE CHANGED WITHOUT CONSULTING ORIGINAL DEVELOPERS. -# -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards -MenuItems = ["Populate","Edit"] - -# The ToolList is optional, but recommended, if you are calling -# Smart Tools from your Script. -# If present, it can be used to show which grids will be -# modified by the Script. - - -VariableList = [("Probabilistic Wind Source?", "Official", "radio", ["Official", "Preliminary"]), - ("Forecast Confidence?", "Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)", - "radio", ["Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)", - "High (Combined; for well-behaved systems within 12 hours of landfall or closest approach)", - "Higher (Combined; for very well-behaved systems within 6 hours of landfall or closest approach)", - "Highest (Deterministic-only; assumes a perfect forecast)"]), - ("WindMax Source?", "WFO Mesoscale (default)", "radio", ["WFO Mesoscale (default)","NHC/TCM Synoptic"]), - ] - -import TropicalUtility -import string -import TimeRange -import AbsTime -import time -import MetLib -import sys -import math -import numpy as np - -class Procedure (TropicalUtility.TropicalUtility): - - def __init__(self, dbss): - TropicalUtility.TropicalUtility.__init__(self, dbss) - - - # Finds the TPC prob database, extracts all of the grids and returns - # the last one of each type - def getProbGrids(self): - ap = self.availableParms() - searchStr = self.getSiteID() + "_GRID_D2D_" + self._probWindModelSource - probModel = "" - for elem, level, model in ap: - if searchStr in model.modelIdentifier(): - probModel = model.modelIdentifier() - break - - if probModel == "": - self.statusBarMsg("TPC Wind probability grids not found.", "S") - return None, None, None - - # make a big timeRange - timeRange = TimeRange.allTimes() - - # get the TPC prob grids, and keep the last one - prob34List = self.getGrids(probModel, "prob34", "FHAG10", timeRange, - mode = "List") - prob34Grid = prob34List[-1] - - prob50List = self.getGrids(probModel, "prob50", "FHAG10", timeRange, - mode = "List") - prob50Grid = prob50List[-1] - - prob64List = self.getGrids(probModel, "prob64", "FHAG10", timeRange, - mode = "List") - prob64Grid = prob64List[-1] - - return prob34Grid, prob50Grid, prob64Grid - - - # Make a timeRange spanning from the current hour to 6 hours hence - def make6hrTimeRange(self): - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime) - endTime = AbsTime.AbsTime(cTime + (6 * 3600)) # 6 hours - tr = TimeRange.TimeRange(startTime, endTime) - - return tr - - - # Make a timeRange spanning the usual -24 to 10-day db - def makeDBTimeRange(self): - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime - (24*3600)) - endTime = AbsTime.AbsTime(cTime + (240 * 3600)) # 10 days - tr = TimeRange.TimeRange(startTime, endTime) - - return tr - - - # Removes all grids from a selected set of temporary parms - def removeTempGrids(self): - elementList = ["Prob34", "Prob50", "Prob64", "WindMax"] - ap = self.availableParms() - tr = TimeRange.allTimes() - - for elem, level, model in ap: - modelName = model.modelIdentifier() - if elem in elementList and level == "SFC": - self.deleteCmd([elem], tr) - return - - # Returns the distance between two points - def pointDist(self, a, b): - return math.sqrt((a[0] - a[1]) * (a[0] - a[1])) + ((b[0] - b[1]) * (b[0] - b[1])) - - def distSort(self, a, b): - - return cmp(a, b) - - # Calculates the average location of the specified points - def avgLocation(self, pointList): - # calc avg - xSum = 0.0 - ySum = 0.0 - for x, y in pointList: - xSum = xSum + x - ySum = ySum + y - - avgX = xSum / len(pointList) - avgY = ySum / len(pointList) - - avgLoc = (avgX, avgY) - - return avgLoc - - # Given a list of (x, y) pairs, determine the true center of "the cluster" of - # points. - def selectSingleCenter(self, pointList): - - # If there's only one point, we're done. - if len(pointList) == 1: - return pointList[0] - - # calc the distance from each point to the average location of all the points - avgLoc = self.avgLocation(pointList) - #print "avgLoc:", avgLoc - - distList = [] - for x, y in pointList: - dist = self.pointDist(avgLoc, (x, y)) - distList.append((dist, x, y)) - - # sort smallest to largest distance - distList.sort(self.distSort) - #print "DistanceList after sort:", distList - - # Now collect the closest n points - closestPoint = (distList[0][1], distList[0][2]) - #print "closestPoint:", closestPoint - - # See if the closest point -# if self.pointDist(avgLoc, closestPoint) > 20: -# self.statusBarMsg("Warning! No cluster of points found. Center not reliable", "U") - - cluster = [closestPoint] - for d, x, y in distList: - dist = self.pointDist(closestPoint, (x, y)) - if dist < 5: - cluster.append((x, y)) - - avgX, avgY = self.avgLocation(cluster) - - avgX = int(avgX + 0.5) - avgY = int(avgY + 0.5) - - return avgX, avgY - - - # Calculates the grid position of the storm center - def getStormCenter(self, windGrid, tr): - u, v = self.MagDirToUV(windGrid[0], windGrid[1]) - vortGrid = MetLib.vorticity((u, v)) - vortGrid = self.GM_smoothGrid(vortGrid, 3) - vortGrid = np.clip(vortGrid, 0.0, 1000000) - # Find the max vorticity magnitude - #maxVort = np.amax(vortGrid) - maxVort = np.max(vortGrid.flat) - if maxVort < 10: - return None, None - - # Find the location of the max (i.e. the center) - mask = (vortGrid == maxVort) - centerList = np.nonzero(mask.flat)[0] - - # Convert the list of nonzero points to x, y coordinates and append to a list - pointList = [] - for c in centerList: - row = c / mask.shape[1] - col = c % mask.shape[1] - pointList.append((col, row)) - - # Call this method to select a single center - centerCol, centerRow = self.selectSingleCenter(pointList) - - return centerCol, centerRow - - - # Using the center, a distance grid from that center and a mask - # indicating the area that includes the maxWind field. This - # method returns the radius of that area in gridpoint units. - # Note this method adds 2 gridpoints since the area tends to - # fall a little short of the actual max wind value. - def calcRadius(self, center, distGrid, mask): - maskPoints = np.nonzero(mask) - if len(maskPoints) == 0: - return 0 - - histDict = {} - yCoords, xCoords = maskPoints - - for i in range(len(yCoords)): - dist = int(distGrid[yCoords[i], xCoords[i]]) - - if dist not in histDict: - histDict[dist] = 1 - else: - histDict[dist] = histDict[dist] + 1 - - histKeys = histDict.keys() - histKeys.sort() - lastKey = 0 - for key in histKeys: - if key - lastKey >=2: - return lastKey + 2 - lastKey = key - - return lastKey + 2 - - - # Makes a grid of distance from the specified center - # which must be expressed as a tuple (x, y) - def makeDistGrid(self, center): - iGrid = np.indices(self.getGridShape()) - yDist = center[1] - iGrid[0] - xDist = center[0] - iGrid[1] - distGrid = np.sqrt(xDist**2 + yDist**2) - - return distGrid - - - # Given a specified wind speed and direction grid along with - # a distance grid, this method computes a mask that covers - # the area defined by the ring of max winds. - def makeCoreMask(self, ws, wd, distGrid): - # Find the area inside the max wind ring - u, v = self.MagDirToUV(ws, wd) - windGrad = MetLib.gradient(ws) - distGrad = MetLib.gradient(distGrid) - dotGrid = MetLib.dot(windGrad, distGrad) - - # smooth this grid to remove noise - dotGrid = self.GM_smoothGrid(dotGrid, 9) - mask = dotGrid > 0.0 - - return mask - - - # This method returns the max wind speed value found in the - # specified wind speed grid. This method restricts the search - # to an area just inside and just outside the specified radius. - def getMaxWindValue(self, distGrid, radius, ws, tr): - maxWS = 0.0 - lessMask = distGrid < (radius + 2) - moreMask = distGrid > (radius - 7) - mask = lessMask & moreMask -## self.createGrid("Fcst", "MaxMask", "SCALAR", mask, tr) - ringGrid = ws * mask - thisMax = np.amax(ringGrid) - if thisMax > maxWS: - maxWS = thisMax - - return maxWS - - - # This method adjusts and returns the specified maxWindGrid by - # temporally interpolating each wind grid found in the current - # inventory. A vorticity field is used to determine the storm - # center. Then the wind speed and direction grids are used to - # calculate the area surrounded by the ring of max winds. This - # area is used to calculate the radius of max wind and the all - # of the attributes are used to calculate the max wind field - # over the entire event by interpolating temporally. This max - # wind field is later used to calculate the WindThreat. - def adjustWindMax(self, maxWindGrid): - trList = self.GM_getWEInventory("Wind") - if len(trList) == 0: - return - - adjustedWindMax = maxWindGrid - stormMaxWindValue = -1 - - modifiedMask = np.zeros(self.getGridShape(), np.bool) - - lastXPos = None - lastYPos = None - lastRadius = None - lastWindMax = None - for tr in trList: - - ws, wd = self.getGrids("Fcst", "Wind", "SFC", tr, mode="First") - xPos, yPos = self.getStormCenter((ws, wd), tr) - - # See if the storm is outside the GFE domain. If it is, just update - # the adjustedWindMax and the stormMaxWindValue - if xPos is None or yPos is None: - mask = ws > adjustedWindMax - adjustedWindMax[mask] = ws[mask] - - # Update the overall max - gridWindMax = np.amax(ws) # Max value over the whole grid - stormMaxWindValue = max(gridWindMax, stormMaxWindValue) - continue - - # calc change in wind speed as a function of radius - distGrid = self.makeDistGrid((xPos, yPos)) - coreMask = self.makeCoreMask(ws, wd, distGrid) - - # first time through just store the position - if lastXPos == None or lastYPos == None: - lastXPos = xPos - lastYPos = yPos - lastRadius = self.calcRadius((xPos, yPos), distGrid, coreMask) - lastWindMax = self.getMaxWindValue(distGrid, lastRadius, ws, tr) - continue - - # get the maxWindRadius in grid cell coordinates - radius = self.calcRadius((xPos, yPos), distGrid, coreMask) - if radius is None: - continue - - # Get the max wind value, but restrict it to near the radius - maxWindValue = self.getMaxWindValue(distGrid, radius, ws, tr) - #print "unrestricted max Wind Value: ", maxWindValue - - dr = radius - lastRadius - - # Calculate distance from last point - dx = xPos - lastXPos - dy = yPos - lastYPos - dWind = maxWindValue - lastWindMax - - # One iteration per grid point - steps = abs(max(dx, dy)) - - for i in range(steps): - x = lastXPos + (float(dx) / steps) * i - y = lastYPos + (float(dy) / steps) * i - r = lastRadius + (float(dr) / steps) * i - - windValue = lastWindMax + (float(dWind) / steps) * i - distGrid = self.makeDistGrid((x, y)) - - # Change grids at the intersection of a circle defined by - # the center and radius and points that whose value is - # less than the current maxWind value - distMask = distGrid <= r - lessMask = adjustedWindMax < windValue - - - mask = distMask & lessMask # union of dist and less masks - - adjustedWindMax[mask] = windValue - - modifiedMask = mask | modifiedMask - - lastXPos = xPos - lastYPos = yPos - lastRadius = radius - lastWindMax = maxWindValue - - # Calculate the maximum wind value over the modified area - stormMaxWindValue = np.amax(adjustedWindMax * modifiedMask) - - #print "stormMaxWindValue: ", stormMaxWindValue - # smooth out moderate values to remove "quadrant effect" - adjustedWindMax = self.GM_smoothGrid(adjustedWindMax, 5) - - return adjustedWindMax, stormMaxWindValue - - - # fetch a grid that represents the maxWind everywhere for the next 5 days - def getWindMax(self, timeRange): - - cTime = int(self._gmtime().unixTime()/ 3600) * 3600 - startTime = AbsTime.AbsTime(cTime) - endTime = startTime + (5 * 24 * 3600) # 5 days from the startTime - tr = TimeRange.TimeRange(startTime, endTime) - - try: - windMax, dir = self.getGrids("Fcst", "Wind", "SFC", tr, mode = "Max") - except: - windMax = None - self.statusBarMsg("No Wind grids found. Please define Wind grids first.", - "S") - return windMax - - - def getMaxWindGrid(self): - try: - grid = self.getObject("TCMMaxWindGrid", "WindGrid") - maxWindValue = np.amax(grid) - return (grid, maxWindValue) - except: - self.statusBarMsg("No TCMMaxWindGrid found.", "S") - return (None, None) - - - def execute(self, varDict): - - # Fetch the model source and define the model name - sourceDB = varDict["Probabilistic Wind Source?"] - if sourceDB == "Official": - self._probWindModelSource = "TPCProb" - elif sourceDB == "Preliminary": - self._probWindModelSource = "TPCProbPrelim" - else: - self.statusBarMsg("Unknown model source selected. Aborting.", "U") - return - - # Get confidence value from the dialog - confidenceStr = varDict["Forecast Confidence?"] - tcmwindmax = varDict["WindMax Source?"] - - # define a couple of boolean flags that will come in handy later - # allProb = confidenceStr == "Low (Probability-only; 10% Default-05% NE Coast)" - # allWind = confidenceStr == "Highest (Deterministic-only; MaxWind Composite)" - - #allProb = confidenceStr == "Low (Probability-only; for ill-behaved systems)" - allWind = confidenceStr == "Highest (Deterministic-only; assumes a perfect forecast)" - typical = confidenceStr == "Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)" - high = confidenceStr == "High (Combined; for well-behaved systems within 12 hours of landfall or closest approach)" - higher = confidenceStr == "Higher (Combined; for very well-behaved systems within 6 hours of landfall or closest approach)" - - #print "allProb and allWind are: ", allProb, allWind - - # extract the percent value from this string - # pctPos = confidenceStr.find("% Default") - # pctStr = confidenceStr[pctPos - 2:pctPos] - pctStr="10" - if high: - pctStr="20" - elif higher: - pctStr="30" - - # Percent thresholds for each confidence category - threatDict = {"10" : [10.0, 10.0, 10.0, 20.0, 30.0], - "20" : [20.0, 20.0, 20.0, 30.0, 40.0], - "30" : [30.0, 30.0, 30.0, 40.0, 50.0], - } - # wind thresholds for each threat category - windDict = {'None' : 0.0, - # 'Very Low' : 34.0, - #'Elevated' : 50.0, - 'Elevated': 34.0, - 'Mod' : 50.0, - 'High1' : 64.0, - 'High2' : 89.0, - 'Extreme' : 96.0, - } - - # Make sure the string is valid. If not then assign any value since the user - # has indicted Highest confidence in the wind field and is not using the - # probabilistic grids at all. - - #if not threatDict.has_key(pctStr): - # pctStr = "10" - - #print "pctStr is: ", pctStr - - # Extract the proper list and assign thresholds - thresholdList = threatDict[pctStr] -# - t34TS1 = thresholdList[0] - t50TS2 = thresholdList[1] - t64Cat1 = thresholdList[2] - t64Cat2 = thresholdList[3] - t64Cat3 = thresholdList[4] - - timeRange = self.make6hrTimeRange() - - # Remove previous version of grids. - self.removeTempGrids() - - # set up the indices for the discrete keys - keys = self.getDiscreteKeys("WindThreat") -## print "WindThreat keys are:", keys - noneIndex = self.getIndex("None", keys) - - lowIndex = self.getIndex("Elevated", keys) - modIndex = self.getIndex("Mod", keys) - highIndex = self.getIndex("High", keys) - extremeIndex = self.getIndex("Extreme", keys) - - # Initialize the threat grid - threatGrid = self.empty(np.int8) # a grid of zeros - - # Attempt to get the grid from the server. - windMax, maxWindValue = self.getMaxWindGrid() - - #print "MAXWIND ACROSS DOMAIN IS: ", maxWindValue - - # Use the old-fashioned method - if windMax is None or tcmwindmax == "WFO Mesoscale (default)": - - # Get and adjust a grid of maximum wind over the entire storm - windMax = self.getWindMax(timeRange) - - windMax, maxWindValue = self.adjustWindMax(windMax) - - # Create this grid, if it is valid - if windMax is not None: - self.createGrid("Fcst", "WindMax", "SCALAR", windMax, timeRange, - minAllowedValue=0, maxAllowedValue=200) - - # Assign values to the grid based on the probability grids - if allWind: - - threatGrid[windMax >= windDict["Elevated"]] = lowIndex - threatGrid[windMax >= windDict["Mod"]] = modIndex - threatGrid[windMax >= windDict["High1"]] = highIndex - threatGrid[windMax >= windDict["Extreme"]] = extremeIndex - - else: - - # high and extreme threats require maxWind to meet particular windMax criteria - # Fetch the probabilistic grids - prob34Grid, prob50Grid, prob64Grid = self.getProbGrids() - - #self.createGrid("Fcst", "Prob34", "SCALAR", prob34Grid, timeRange) - #self.createGrid("Fcst", "Prob50", "SCALAR", prob50Grid, timeRange) - #self.createGrid("Fcst", "Prob64", "SCALAR", prob64Grid, timeRange) - #print "MAXWIND IS: ", maxWindValue - - threatGrid[prob34Grid >= t34TS1] = lowIndex - threatGrid[prob50Grid >= t50TS2] = modIndex - threatGrid[prob64Grid >= t64Cat1] = highIndex - - if maxWindValue >= windDict['High2']: - threatGrid[prob64Grid >= t64Cat3] = extremeIndex - if maxWindValue >= windDict['Extreme']: - threatGrid[prob64Grid >= t64Cat2] = extremeIndex - - #=================================================================== - # Upgrade windThreat based on windMax grid -## - # Upgrade None to Elevated - windMask = ((windMax >= windDict['Elevated']) & - (windMax < windDict['Mod'])) - threatMask = threatGrid < lowIndex - threatGrid[windMask & threatMask] = lowIndex - - # Upgrade Elevated to Med - windMask = ((windMax >= windDict['Mod']) & - (windMax < windDict['High1'])) - threatMask = threatGrid < modIndex - threatGrid[windMask & threatMask] = modIndex - - # Upgrade Med to High - windMask = ((windMax >= windDict['High1']) & - (windMax < windDict['Extreme'])) - threatMask = threatGrid < highIndex - threatGrid[windMask & threatMask] =highIndex - - # Upgrade High to Extreme - windMask = windMax >= windDict['Extreme'] - threatMask = threatGrid < extremeIndex - threatGrid[windMask & threatMask] = extremeIndex - - # Remove previous version of grid. - dbTimes = self.makeDBTimeRange() - self.deleteCmd(['WindThreat'], dbTimes) - - # create the threat grid - self.createGrid("Fcst", "WindThreat", "DISCRETE", (threatGrid, keys), - timeRange, discreteKeys=keys, discreteAuxDataLength=5, - discreteOverlap=0) - - - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# A2TCWindThreatFinal_3 +# This version is for use with MLB's Graphical HLS. +# This version modifies the gHLS WindThreat element. +# +# Author: lefebvre 9/11/06 +# Modified by: Volkmer/MLB and Santos/MFL 5/23/07 +# More modifications: by LeFebvre/Santos/Sharp 05/7/09 +# LeFebvre/Santos 07/20/10 +# Modified: by Santos 04/26/2011 to accomodate alternate logic for NE Coast and hide bars +# Modified: by Santos 04/20/2012 to get rid off Very Low and tweak GUI. +# Modified: by Shannon/Pablo 06/19/2012 to make A2 and DRT compatible +# Modified: by Pablo/Shannon on 05/21/2014 to fix bug introduced in 2012 when Very Low was eliminated +# Modified: By Santos/Lefebvre 09/17/2014 to make changes for official implementation in 2015. +# Modified: By Belk 07/15/2016 to make efficiency improvements, and +# refactor to make use of a utility containing common methods with other tools +# CHECKED IN FOR 17.1.1: By LeFebvre 09/23/2016 finish converstion to numpy conventions. +# Modified: By LeFebvre 09/26/16 - Removed commented out code to pass code review. +# Modified: By LeFebvre 10/31/16 - Added more code to ensure only one cyclone center point is calculated +# Modified: By LeFebvre 07/18/17 - Added option to populate based on Preliminary or Official prob guidance. +#---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# +# THIS CODE SHALL NOT BE CHANGED WITHOUT CONSULTING ORIGINAL DEVELOPERS. +# +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards +MenuItems = ["Populate","Edit"] + +# The ToolList is optional, but recommended, if you are calling +# Smart Tools from your Script. +# If present, it can be used to show which grids will be +# modified by the Script. + + +VariableList = [("Probabilistic Wind Source?", "Official", "radio", ["Official", "Preliminary"]), + ("Forecast Confidence?", "Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)", + "radio", ["Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)", + "High (Combined; for well-behaved systems within 12 hours of landfall or closest approach)", + "Higher (Combined; for very well-behaved systems within 6 hours of landfall or closest approach)", + "Highest (Deterministic-only; assumes a perfect forecast)"]), + ("WindMax Source?", "WFO Mesoscale (default)", "radio", ["WFO Mesoscale (default)","NHC/TCM Synoptic"]), + ] + +import TropicalUtility +import string +import TimeRange +import AbsTime +import time +import MetLib +import sys +import math +import numpy as np + +class Procedure (TropicalUtility.TropicalUtility): + + def __init__(self, dbss): + TropicalUtility.TropicalUtility.__init__(self, dbss) + + + # Finds the TPC prob database, extracts all of the grids and returns + # the last one of each type + def getProbGrids(self): + ap = self.availableParms() + searchStr = self.getSiteID() + "_GRID_D2D_" + self._probWindModelSource + probModel = "" + for elem, level, model in ap: + if searchStr in model.modelIdentifier(): + probModel = model.modelIdentifier() + break + + if probModel == "": + self.statusBarMsg("TPC Wind probability grids not found.", "S") + return None, None, None + + # make a big timeRange + timeRange = TimeRange.allTimes() + + # get the TPC prob grids, and keep the last one + prob34List = self.getGrids(probModel, "prob34", "FHAG10", timeRange, + mode = "List") + prob34Grid = prob34List[-1] + + prob50List = self.getGrids(probModel, "prob50", "FHAG10", timeRange, + mode = "List") + prob50Grid = prob50List[-1] + + prob64List = self.getGrids(probModel, "prob64", "FHAG10", timeRange, + mode = "List") + prob64Grid = prob64List[-1] + + return prob34Grid, prob50Grid, prob64Grid + + + # Make a timeRange spanning from the current hour to 6 hours hence + def make6hrTimeRange(self): + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime) + endTime = AbsTime.AbsTime(cTime + (6 * 3600)) # 6 hours + tr = TimeRange.TimeRange(startTime, endTime) + + return tr + + + # Make a timeRange spanning the usual -24 to 10-day db + def makeDBTimeRange(self): + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime - (24*3600)) + endTime = AbsTime.AbsTime(cTime + (240 * 3600)) # 10 days + tr = TimeRange.TimeRange(startTime, endTime) + + return tr + + + # Removes all grids from a selected set of temporary parms + def removeTempGrids(self): + elementList = ["Prob34", "Prob50", "Prob64", "WindMax"] + ap = self.availableParms() + tr = TimeRange.allTimes() + + for elem, level, model in ap: + modelName = model.modelIdentifier() + if elem in elementList and level == "SFC": + self.deleteCmd([elem], tr) + return + + # Returns the distance between two points + def pointDist(self, a, b): + return math.sqrt((a[0] - a[1]) * (a[0] - a[1])) + ((b[0] - b[1]) * (b[0] - b[1])) + + def distSort(self, a, b): + + return cmp(a, b) + + # Calculates the average location of the specified points + def avgLocation(self, pointList): + # calc avg + xSum = 0.0 + ySum = 0.0 + for x, y in pointList: + xSum = xSum + x + ySum = ySum + y + + avgX = xSum / len(pointList) + avgY = ySum / len(pointList) + + avgLoc = (avgX, avgY) + + return avgLoc + + # Given a list of (x, y) pairs, determine the true center of "the cluster" of + # points. + def selectSingleCenter(self, pointList): + + # If there's only one point, we're done. + if len(pointList) == 1: + return pointList[0] + + # calc the distance from each point to the average location of all the points + avgLoc = self.avgLocation(pointList) + #print "avgLoc:", avgLoc + + distList = [] + for x, y in pointList: + dist = self.pointDist(avgLoc, (x, y)) + distList.append((dist, x, y)) + + # sort smallest to largest distance + distList.sort(self.distSort) + #print "DistanceList after sort:", distList + + # Now collect the closest n points + closestPoint = (distList[0][1], distList[0][2]) + #print "closestPoint:", closestPoint + + # See if the closest point +# if self.pointDist(avgLoc, closestPoint) > 20: +# self.statusBarMsg("Warning! No cluster of points found. Center not reliable", "U") + + cluster = [closestPoint] + for d, x, y in distList: + dist = self.pointDist(closestPoint, (x, y)) + if dist < 5: + cluster.append((x, y)) + + avgX, avgY = self.avgLocation(cluster) + + avgX = int(avgX + 0.5) + avgY = int(avgY + 0.5) + + return avgX, avgY + + + # Calculates the grid position of the storm center + def getStormCenter(self, windGrid, tr): + u, v = self.MagDirToUV(windGrid[0], windGrid[1]) + vortGrid = MetLib.vorticity((u, v)) + vortGrid = self.GM_smoothGrid(vortGrid, 3) + vortGrid = np.clip(vortGrid, 0.0, 1000000) + # Find the max vorticity magnitude + #maxVort = np.amax(vortGrid) + maxVort = np.max(vortGrid.flat) + if maxVort < 10: + return None, None + + # Find the location of the max (i.e. the center) + mask = (vortGrid == maxVort) + centerList = np.nonzero(mask.flat)[0] + + # Convert the list of nonzero points to x, y coordinates and append to a list + pointList = [] + for c in centerList: + row = c / mask.shape[1] + col = c % mask.shape[1] + pointList.append((col, row)) + + # Call this method to select a single center + centerCol, centerRow = self.selectSingleCenter(pointList) + + return centerCol, centerRow + + + # Using the center, a distance grid from that center and a mask + # indicating the area that includes the maxWind field. This + # method returns the radius of that area in gridpoint units. + # Note this method adds 2 gridpoints since the area tends to + # fall a little short of the actual max wind value. + def calcRadius(self, center, distGrid, mask): + maskPoints = np.nonzero(mask) + if len(maskPoints) == 0: + return 0 + + histDict = {} + yCoords, xCoords = maskPoints + + for i in range(len(yCoords)): + dist = int(distGrid[yCoords[i], xCoords[i]]) + + if dist not in histDict: + histDict[dist] = 1 + else: + histDict[dist] = histDict[dist] + 1 + + histKeys = list(histDict.keys()) + histKeys.sort() + lastKey = 0 + for key in histKeys: + if key - lastKey >=2: + return lastKey + 2 + lastKey = key + + return lastKey + 2 + + + # Makes a grid of distance from the specified center + # which must be expressed as a tuple (x, y) + def makeDistGrid(self, center): + iGrid = np.indices(self.getGridShape()) + yDist = center[1] - iGrid[0] + xDist = center[0] - iGrid[1] + distGrid = np.sqrt(xDist**2 + yDist**2) + + return distGrid + + + # Given a specified wind speed and direction grid along with + # a distance grid, this method computes a mask that covers + # the area defined by the ring of max winds. + def makeCoreMask(self, ws, wd, distGrid): + # Find the area inside the max wind ring + u, v = self.MagDirToUV(ws, wd) + windGrad = MetLib.gradient(ws) + distGrad = MetLib.gradient(distGrid) + dotGrid = MetLib.dot(windGrad, distGrad) + + # smooth this grid to remove noise + dotGrid = self.GM_smoothGrid(dotGrid, 9) + mask = dotGrid > 0.0 + + return mask + + + # This method returns the max wind speed value found in the + # specified wind speed grid. This method restricts the search + # to an area just inside and just outside the specified radius. + def getMaxWindValue(self, distGrid, radius, ws, tr): + maxWS = 0.0 + lessMask = distGrid < (radius + 2) + moreMask = distGrid > (radius - 7) + mask = lessMask & moreMask +## self.createGrid("Fcst", "MaxMask", "SCALAR", mask, tr) + ringGrid = ws * mask + thisMax = np.amax(ringGrid) + if thisMax > maxWS: + maxWS = thisMax + + return maxWS + + + # This method adjusts and returns the specified maxWindGrid by + # temporally interpolating each wind grid found in the current + # inventory. A vorticity field is used to determine the storm + # center. Then the wind speed and direction grids are used to + # calculate the area surrounded by the ring of max winds. This + # area is used to calculate the radius of max wind and the all + # of the attributes are used to calculate the max wind field + # over the entire event by interpolating temporally. This max + # wind field is later used to calculate the WindThreat. + def adjustWindMax(self, maxWindGrid): + trList = self.GM_getWEInventory("Wind") + if len(trList) == 0: + return + + adjustedWindMax = maxWindGrid + stormMaxWindValue = -1 + + modifiedMask = np.zeros(self.getGridShape(), np.bool) + + lastXPos = None + lastYPos = None + lastRadius = None + lastWindMax = None + for tr in trList: + + ws, wd = self.getGrids("Fcst", "Wind", "SFC", tr, mode="First") + xPos, yPos = self.getStormCenter((ws, wd), tr) + + # See if the storm is outside the GFE domain. If it is, just update + # the adjustedWindMax and the stormMaxWindValue + if xPos is None or yPos is None: + mask = ws > adjustedWindMax + adjustedWindMax[mask] = ws[mask] + + # Update the overall max + gridWindMax = np.amax(ws) # Max value over the whole grid + stormMaxWindValue = max(gridWindMax, stormMaxWindValue) + continue + + # calc change in wind speed as a function of radius + distGrid = self.makeDistGrid((xPos, yPos)) + coreMask = self.makeCoreMask(ws, wd, distGrid) + + # first time through just store the position + if lastXPos == None or lastYPos == None: + lastXPos = xPos + lastYPos = yPos + lastRadius = self.calcRadius((xPos, yPos), distGrid, coreMask) + lastWindMax = self.getMaxWindValue(distGrid, lastRadius, ws, tr) + continue + + # get the maxWindRadius in grid cell coordinates + radius = self.calcRadius((xPos, yPos), distGrid, coreMask) + if radius is None: + continue + + # Get the max wind value, but restrict it to near the radius + maxWindValue = self.getMaxWindValue(distGrid, radius, ws, tr) + #print "unrestricted max Wind Value: ", maxWindValue + + dr = radius - lastRadius + + # Calculate distance from last point + dx = xPos - lastXPos + dy = yPos - lastYPos + dWind = maxWindValue - lastWindMax + + # One iteration per grid point + steps = abs(max(dx, dy)) + + for i in range(steps): + x = lastXPos + (float(dx) / steps) * i + y = lastYPos + (float(dy) / steps) * i + r = lastRadius + (float(dr) / steps) * i + + windValue = lastWindMax + (float(dWind) / steps) * i + distGrid = self.makeDistGrid((x, y)) + + # Change grids at the intersection of a circle defined by + # the center and radius and points that whose value is + # less than the current maxWind value + distMask = distGrid <= r + lessMask = adjustedWindMax < windValue + + + mask = distMask & lessMask # union of dist and less masks + + adjustedWindMax[mask] = windValue + + modifiedMask = mask | modifiedMask + + lastXPos = xPos + lastYPos = yPos + lastRadius = radius + lastWindMax = maxWindValue + + # Calculate the maximum wind value over the modified area + stormMaxWindValue = np.amax(adjustedWindMax * modifiedMask) + + #print "stormMaxWindValue: ", stormMaxWindValue + # smooth out moderate values to remove "quadrant effect" + adjustedWindMax = self.GM_smoothGrid(adjustedWindMax, 5) + + return adjustedWindMax, stormMaxWindValue + + + # fetch a grid that represents the maxWind everywhere for the next 5 days + def getWindMax(self, timeRange): + + cTime = int(self._gmtime().unixTime()/ 3600) * 3600 + startTime = AbsTime.AbsTime(cTime) + endTime = startTime + (5 * 24 * 3600) # 5 days from the startTime + tr = TimeRange.TimeRange(startTime, endTime) + + try: + windMax, dir = self.getGrids("Fcst", "Wind", "SFC", tr, mode = "Max") + except: + windMax = None + self.statusBarMsg("No Wind grids found. Please define Wind grids first.", + "S") + return windMax + + + def getMaxWindGrid(self): + try: + grid = self.getObject("TCMMaxWindGrid", "WindGrid") + maxWindValue = np.amax(grid) + return (grid, maxWindValue) + except: + self.statusBarMsg("No TCMMaxWindGrid found.", "S") + return (None, None) + + + def execute(self, varDict): + + # Fetch the model source and define the model name + sourceDB = varDict["Probabilistic Wind Source?"] + if sourceDB == "Official": + self._probWindModelSource = "TPCProb" + elif sourceDB == "Preliminary": + self._probWindModelSource = "TPCProbPrelim" + else: + self.statusBarMsg("Unknown model source selected. Aborting.", "U") + return + + # Get confidence value from the dialog + confidenceStr = varDict["Forecast Confidence?"] + tcmwindmax = varDict["WindMax Source?"] + + # define a couple of boolean flags that will come in handy later + # allProb = confidenceStr == "Low (Probability-only; 10% Default-05% NE Coast)" + # allWind = confidenceStr == "Highest (Deterministic-only; MaxWind Composite)" + + #allProb = confidenceStr == "Low (Probability-only; for ill-behaved systems)" + allWind = confidenceStr == "Highest (Deterministic-only; assumes a perfect forecast)" + typical = confidenceStr == "Typical (Combined; For ill defined or for most systems anytime within 48 hours of landfall or closest approach)" + high = confidenceStr == "High (Combined; for well-behaved systems within 12 hours of landfall or closest approach)" + higher = confidenceStr == "Higher (Combined; for very well-behaved systems within 6 hours of landfall or closest approach)" + + #print "allProb and allWind are: ", allProb, allWind + + # extract the percent value from this string + # pctPos = confidenceStr.find("% Default") + # pctStr = confidenceStr[pctPos - 2:pctPos] + pctStr="10" + if high: + pctStr="20" + elif higher: + pctStr="30" + + # Percent thresholds for each confidence category + threatDict = {"10" : [10.0, 10.0, 10.0, 20.0, 30.0], + "20" : [20.0, 20.0, 20.0, 30.0, 40.0], + "30" : [30.0, 30.0, 30.0, 40.0, 50.0], + } + # wind thresholds for each threat category + windDict = {'None' : 0.0, + # 'Very Low' : 34.0, + #'Elevated' : 50.0, + 'Elevated': 34.0, + 'Mod' : 50.0, + 'High1' : 64.0, + 'High2' : 89.0, + 'Extreme' : 96.0, + } + + # Make sure the string is valid. If not then assign any value since the user + # has indicted Highest confidence in the wind field and is not using the + # probabilistic grids at all. + + #if not threatDict.has_key(pctStr): + # pctStr = "10" + + #print "pctStr is: ", pctStr + + # Extract the proper list and assign thresholds + thresholdList = threatDict[pctStr] +# + t34TS1 = thresholdList[0] + t50TS2 = thresholdList[1] + t64Cat1 = thresholdList[2] + t64Cat2 = thresholdList[3] + t64Cat3 = thresholdList[4] + + timeRange = self.make6hrTimeRange() + + # Remove previous version of grids. + self.removeTempGrids() + + # set up the indices for the discrete keys + keys = self.getDiscreteKeys("WindThreat") +## print "WindThreat keys are:", keys + noneIndex = self.getIndex("None", keys) + + lowIndex = self.getIndex("Elevated", keys) + modIndex = self.getIndex("Mod", keys) + highIndex = self.getIndex("High", keys) + extremeIndex = self.getIndex("Extreme", keys) + + # Initialize the threat grid + threatGrid = self.empty(np.int8) # a grid of zeros + + # Attempt to get the grid from the server. + windMax, maxWindValue = self.getMaxWindGrid() + + #print "MAXWIND ACROSS DOMAIN IS: ", maxWindValue + + # Use the old-fashioned method + if windMax is None or tcmwindmax == "WFO Mesoscale (default)": + + # Get and adjust a grid of maximum wind over the entire storm + windMax = self.getWindMax(timeRange) + + windMax, maxWindValue = self.adjustWindMax(windMax) + + # Create this grid, if it is valid + if windMax is not None: + self.createGrid("Fcst", "WindMax", "SCALAR", windMax, timeRange, + minAllowedValue=0, maxAllowedValue=200) + + # Assign values to the grid based on the probability grids + if allWind: + + threatGrid[windMax >= windDict["Elevated"]] = lowIndex + threatGrid[windMax >= windDict["Mod"]] = modIndex + threatGrid[windMax >= windDict["High1"]] = highIndex + threatGrid[windMax >= windDict["Extreme"]] = extremeIndex + + else: + + # high and extreme threats require maxWind to meet particular windMax criteria + # Fetch the probabilistic grids + prob34Grid, prob50Grid, prob64Grid = self.getProbGrids() + + #self.createGrid("Fcst", "Prob34", "SCALAR", prob34Grid, timeRange) + #self.createGrid("Fcst", "Prob50", "SCALAR", prob50Grid, timeRange) + #self.createGrid("Fcst", "Prob64", "SCALAR", prob64Grid, timeRange) + #print "MAXWIND IS: ", maxWindValue + + threatGrid[prob34Grid >= t34TS1] = lowIndex + threatGrid[prob50Grid >= t50TS2] = modIndex + threatGrid[prob64Grid >= t64Cat1] = highIndex + + if maxWindValue >= windDict['High2']: + threatGrid[prob64Grid >= t64Cat3] = extremeIndex + if maxWindValue >= windDict['Extreme']: + threatGrid[prob64Grid >= t64Cat2] = extremeIndex + + #=================================================================== + # Upgrade windThreat based on windMax grid +## + # Upgrade None to Elevated + windMask = ((windMax >= windDict['Elevated']) & + (windMax < windDict['Mod'])) + threatMask = threatGrid < lowIndex + threatGrid[windMask & threatMask] = lowIndex + + # Upgrade Elevated to Med + windMask = ((windMax >= windDict['Mod']) & + (windMax < windDict['High1'])) + threatMask = threatGrid < modIndex + threatGrid[windMask & threatMask] = modIndex + + # Upgrade Med to High + windMask = ((windMax >= windDict['High1']) & + (windMax < windDict['Extreme'])) + threatMask = threatGrid < highIndex + threatGrid[windMask & threatMask] =highIndex + + # Upgrade High to Extreme + windMask = windMax >= windDict['Extreme'] + threatMask = threatGrid < extremeIndex + threatGrid[windMask & threatMask] = extremeIndex + + # Remove previous version of grid. + dbTimes = self.makeDBTimeRange() + self.deleteCmd(['WindThreat'], dbTimes) + + # create the threat grid + self.createGrid("Fcst", "WindThreat", "DISCRETE", (threatGrid, keys), + timeRange, discreteKeys=keys, discreteAuxDataLength=5, + discreteOverlap=0) + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ViewWCL.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ViewWCL.py index 39e6a83bbc..c13e7a181e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ViewWCL.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/procedures/ViewWCL.py @@ -1,383 +1,383 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ViewWCL +# +# Author: Matt Davis/ARX, Tom LeFebvre - Modified to fetch inventory # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ViewWCL -# -# Author: Matt Davis/ARX, Tom LeFebvre - Modified to fetch inventory -# -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -# The MenuItems list defines the GFE menu item(s) under which the -# Procedure is to appear. -# Possible items are: Populate, Edit, Consistency, Verify, Hazards - -MenuItems = ["Hazards"] - -import os -import SmartScript, re, time -import AbsTime -import TimeRange -import Tkinter -import HazardUtils -from numpy import * - -class Procedure (SmartScript.SmartScript): - - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss = dbss - - # creates the various UI objects - def setUpUI(self): - - self.__inventoryDict = self.getWCLInventoryDict() - - # first check to see if there's any data we can plot - if len(self.__inventoryDict.keys()) == 0: # no entries in the dict - self.statusBarMsg("There are no current WCLs to view.", "S") - self.cancel() - return - - # make the frames - self.__master = Tkinter.Tk() - self.__master.title('ViewWCL') - self.__topFrame = Tkinter.Frame(self.__master) - - self.__topFrame.pack(side=Tkinter.TOP, expand=Tkinter.YES, - anchor=Tkinter.N, fill=Tkinter.BOTH) - - self.__listFrame = Tkinter.Frame(self.__topFrame, borderwidth=3, - relief=Tkinter.GROOVE) - self.__buttonFrame = Tkinter.Frame(self.__master) - - - self.makeInventoryButtons(self.__inventoryDict) - - # make the buttons at the bottom of the dialog - self.makeRunButton() - self.makeRunDismissButton() - self.makeCancelButton() - - self.__buttonFrame.pack(side=Tkinter.TOP) - - ## - # Get the directory in which decoded WCLs are stored from GFE localization. - # - # @return: the WCL directory - # @rtype: string - def getWclDir(self): - # get the path manager - from com.raytheon.uf.common.localization import PathManagerFactory - pathManager = PathManagerFactory.getPathManager() - - # get the proper localization context - from com.raytheon.uf.common.localization import LocalizationContext - LocalizationType = LocalizationContext.LocalizationType - CAVE_STATIC = LocalizationType.valueOf("CAVE_STATIC") - LocalizationLevel = LocalizationContext.LocalizationLevel - SITE = LocalizationLevel.valueOf("SITE") - ctx = pathManager.getContext(CAVE_STATIC, SITE) - - # use localization to get the full path - wclName = os.path.join("gfe", "wcl") - wclDir = pathManager.getFile(ctx, wclName) - return wclDir.getAbsolutePath() - - # gets the text inventory for WCL products and stores in a dictionary - def getWCLInventoryDict(self): - - invDict = {} - wclDir = self.getWclDir(); - - if os.path.exists(wclDir): - for name in os.listdir(wclDir): - if re.search(r'^WCL[A-J]$', name): - absName = os.path.join(wclDir, name) - file = None - text = None - try: - file = open(absName, "r") - text = file.read(0xffff) - finally: - if file is not None: - file.close() - if text is not None: - dataDict = {} - exec text in dataDict - dataDict.setdefault("issueTime", 0) - dataDict.setdefault("expTime", 0) - issueTime = dataDict["issueTime"] - expTime = dataDict["expTime"] - timeDiff = (AbsTime.current().unixTime() - issueTime) - if timeDiff < 6 * 3600 and issueTime < expTime: - invDict[name] = dataDict - - return invDict - - # Make a button for each entry in the inventory - def makeInventoryButtons(self, invDict): - - labelStr = Tkinter.StringVar() - labelStr.set("Name Issuance Time") - label = Tkinter.Label(self.__listFrame, textvariable=labelStr) - label.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10, - pady=10) - - invList = [] - for name in invDict: - invList.append((invDict[name]['issueTime'], name)) - invList.sort() - invList.reverse() - firstName = invList[0][1] - self.prodSelected = Tkinter.StringVar() - for issueTime, name in invList: - timeStr = time.strftime("%a %d %b %H%MZ", time.gmtime(issueTime)) - buttonStr = name + " " + timeStr - - button = Tkinter.Radiobutton(self.__listFrame, highlightthickness = 0, - text=buttonStr, value=name, - variable=self.prodSelected, - command=self.WCLSelected) - if name == firstName: - button.select() - - button.pack(side=Tkinter.TOP, anchor=Tkinter.W) - - self.__listFrame.pack(side=Tkinter.TOP, expand=Tkinter.NO, - fill=Tkinter.Y, anchor=Tkinter.N) - return - - # called when a selection is made (does nothing) - def WCLSelected(self): - return - - def _deleteWCLGrids(self): - tr = TimeRange.allTimes() - gridInfo = self.getGridInfo("WCL", "ProposedWatches", "SFC", tr) - trList = [] - for g in gridInfo: - self.deleteGrid("WCL", "ProposedWatches", "SFC", g.gridTime()) - - def makeRunButton(self): - # create the Run button - Tkinter.Button(self.__buttonFrame, text="Run", - width=10, command=self.runCommand, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - - ### Makes the Run/Dismiss buttom - def makeRunDismissButton(self): - # create the Combine button - Tkinter.Button(self.__buttonFrame, text="Run/Dismiss", - width=10, command=self.runDismissCommand, - state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5, padx=10) - - ### Makes the Cancel buttom - def makeCancelButton(self): - # create the Combine button - Tkinter.Button(self.__buttonFrame, text="Cancel", - width=10, command=self.cancelCommand, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - - ### called when the Run button is selected - def runCommand(self): - prodName = self.prodSelected.get() - self.plotWCL(prodName) - return - - ### called when the Run/Dismiss button is selected - def runDismissCommand(self): - prodName = self.prodSelected.get() - self.plotWCL(prodName) - self.cancelCommand() - - ### called when the Cancel button is selected - def cancelCommand(self): - # unregister the maps - self.__master.destroy() - - # Main block of the tool. Sets up the UI. - def execute(self, timeRange): - self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) - - self.setToolType("numeric") - - # see if the Hazards WE is loaded in the GFE, if not abort the tool - if not self._hazUtils._hazardsLoaded(): - self.statusBarMsg("Hazards Weather Element must be loaded in " + \ - "the GFE before running ViewWCL", "S") - self.cancel() - - self.setUpUI() - self.__master.mainloop() - - return - - # Does the work of plotting the watch areas. Fetches the specified text - # product, deocodes it and creates a temporary grid that shows the areas - # of the proposed watches - def plotWCL(self, productName): - #extract the data fro the dictionary - watchType = self.__inventoryDict[productName]['watchType'] - expTime = self.__inventoryDict[productName]['expTime'] - issueTime = self.__inventoryDict[productName]['issueTime'] - finalUGCList = self.__inventoryDict[productName]['finalUGCList'] - - currentTime = time.time() - startTime = None - endTime = None - - # Select WCL to plot - wclVersion = productName - - # This section reads the active table and decodes current watches - activeTable = self.vtecActiveTable() - - # Remove unwanted data - cleanTable = [] - for each in activeTable: - if not each.has_key('pil'): - continue - if not each['pil'] == 'WCN': - continue - if not each.has_key('endTime'): - continue - if each['endTime'] <= currentTime: - continue - if not each.has_key('act'): - continue - - if each['act'] not in ['CAN', 'EXP']: - cleanTable.append(each) - if startTime is None: - startTime = each['startTime'] - elif startTime > each['startTime']: - startTime = each['startTime'] - if endTime is None: - endTime = each['endTime'] - elif endTime > each['endTime']: - endTime = each['endTime'] - - # Adjust start/end times based on issueTime, expTime - if endTime is None or expTime > endTime: - endTime = expTime - if startTime is None or issueTime < startTime: - startTime = issueTime - - # Round to hour - startTime = int(startTime / 3600) * 3600 - - # Change keys for this procedure - for each in cleanTable: - if each['phensig'] == 'SV.A': - each['phensig'] = 'sv.a' - else: - each['phensig'] = 'to.a' - - # Create a master list of all IDs - watchUGCs = [] - for each in cleanTable: - if each['id'] not in watchUGCs: - watchUGCs.append(each['id']) - for each in finalUGCList: - if each not in watchUGCs: - watchUGCs.append(each) - - # Next, loop over the master ID list to determine the final key to - # plot. - finalKeys = [] - for each in watchUGCs: - actualKey = '' - for eachRecord in cleanTable: - if eachRecord['id'] == each: - actualKey = eachRecord['phensig'] - if each in finalUGCList: - if actualKey != '': - if actualKey == 'sv.a': - if watchType == 'SV.A': - actualKey = 'sv->SV' - else: - actualKey = 'sv->TO' - else: - if watchType == 'SV.A': - actualKey = 'to->SV' - else: - actualKey = 'to->TO' - else: - actualKey = watchType - finalKeys.append((each, actualKey)) - - # Get the ending day/time to create a timeRange. Don't have to bother - # with day groups, as watches are always < 24 hrs. - #ensure sanity - if abs(time.time() - startTime) > 43200: - startTime = int(time.time() / 3600) * 3600 - if abs(time.time() - endTime) > 43200: - endTime = int(time.time() / 3600) * 3600 + (43200) - timeRange = TimeRange.TimeRange(AbsTime.AbsTime(startTime), - AbsTime.AbsTime(endTime)) - - # Create a dummy grid of zeros - grid = self.empty(int8) - - # Define the allowed keys - keys = ['','SV.A','TO.A','sv.a','to.a','to->SV','sv->SV','to->TO','sv->TO'] - - # Loop over the finalKeys list and plot - eaList = self.editAreaList() - - for each in finalKeys: - watchIndex = self.getIndex(each[1], keys) - - # Set each edit area found in the WCL to the mask value - mask = self.empty(bool) - if each[0] in eaList: - zoneArea = self.getEditArea(each[0]) - zoneMask = self.encodeEditArea(zoneArea) - mask[zoneMask] = True - grid[mask] = watchIndex - - - #remove any existing grid - parms = self.loadedParms() - for weName, level, dbID in parms: - if weName == "ProposedWatches" and level == "SFC" and \ - dbID.modelName() == "WCL": - # found parm, delete any grids - self._deleteWCLGrids() - break - - self.createGrid("WCL", "ProposedWatches", "DISCRETE", (grid, keys), \ - timeRange, discreteKeys=keys, discreteOverlap=0, \ - discreteAuxDataLength=0) - self.setActiveElement("WCL", "ProposedWatches", "SFC", timeRange, \ - colorTable='GFE/WCLHazards') - - return - - - +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +# The MenuItems list defines the GFE menu item(s) under which the +# Procedure is to appear. +# Possible items are: Populate, Edit, Consistency, Verify, Hazards + +MenuItems = ["Hazards"] + +import os +import SmartScript, re, time +import AbsTime +import TimeRange +import tkinter +import HazardUtils +from numpy import * + +class Procedure (SmartScript.SmartScript): + + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss = dbss + + # creates the various UI objects + def setUpUI(self): + + self.__inventoryDict = self.getWCLInventoryDict() + + # first check to see if there's any data we can plot + if len(list(self.__inventoryDict.keys())) == 0: # no entries in the dict + self.statusBarMsg("There are no current WCLs to view.", "S") + self.cancel() + return + + # make the frames + self.__master = tkinter.Tk() + self.__master.title('ViewWCL') + self.__topFrame = tkinter.Frame(self.__master) + + self.__topFrame.pack(side=tkinter.TOP, expand=tkinter.YES, + anchor=tkinter.N, fill=tkinter.BOTH) + + self.__listFrame = tkinter.Frame(self.__topFrame, borderwidth=3, + relief=tkinter.GROOVE) + self.__buttonFrame = tkinter.Frame(self.__master) + + + self.makeInventoryButtons(self.__inventoryDict) + + # make the buttons at the bottom of the dialog + self.makeRunButton() + self.makeRunDismissButton() + self.makeCancelButton() + + self.__buttonFrame.pack(side=tkinter.TOP) + + ## + # Get the directory in which decoded WCLs are stored from GFE localization. + # + # @return: the WCL directory + # @rtype: string + def getWclDir(self): + # get the path manager + from com.raytheon.uf.common.localization import PathManagerFactory + pathManager = PathManagerFactory.getPathManager() + + # get the proper localization context + from com.raytheon.uf.common.localization import LocalizationContext + LocalizationType = LocalizationContext.LocalizationType + CAVE_STATIC = LocalizationType.valueOf("CAVE_STATIC") + LocalizationLevel = LocalizationContext.LocalizationLevel + SITE = LocalizationLevel.valueOf("SITE") + ctx = pathManager.getContext(CAVE_STATIC, SITE) + + # use localization to get the full path + wclName = os.path.join("gfe", "wcl") + wclDir = pathManager.getFile(ctx, wclName) + return wclDir.getAbsolutePath() + + # gets the text inventory for WCL products and stores in a dictionary + def getWCLInventoryDict(self): + + invDict = {} + wclDir = self.getWclDir(); + + if os.path.exists(wclDir): + for name in os.listdir(wclDir): + if re.search(r'^WCL[A-J]$', name): + absName = os.path.join(wclDir, name) + file = None + text = None + try: + file = open(absName, "r") + text = file.read(0xffff) + finally: + if file is not None: + file.close() + if text is not None: + dataDict = {} + exec(text, dataDict) + dataDict.setdefault("issueTime", 0) + dataDict.setdefault("expTime", 0) + issueTime = dataDict["issueTime"] + expTime = dataDict["expTime"] + timeDiff = (AbsTime.current().unixTime() - issueTime) + if timeDiff < 6 * 3600 and issueTime < expTime: + invDict[name] = dataDict + + return invDict + + # Make a button for each entry in the inventory + def makeInventoryButtons(self, invDict): + + labelStr = tkinter.StringVar() + labelStr.set("Name Issuance Time") + label = tkinter.Label(self.__listFrame, textvariable=labelStr) + label.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10, + pady=10) + + invList = [] + for name in invDict: + invList.append((invDict[name]['issueTime'], name)) + invList.sort() + invList.reverse() + firstName = invList[0][1] + self.prodSelected = tkinter.StringVar() + for issueTime, name in invList: + timeStr = time.strftime("%a %d %b %H%MZ", time.gmtime(issueTime)) + buttonStr = name + " " + timeStr + + button = tkinter.Radiobutton(self.__listFrame, highlightthickness = 0, + text=buttonStr, value=name, + variable=self.prodSelected, + command=self.WCLSelected) + if name == firstName: + button.select() + + button.pack(side=tkinter.TOP, anchor=tkinter.W) + + self.__listFrame.pack(side=tkinter.TOP, expand=tkinter.NO, + fill=tkinter.Y, anchor=tkinter.N) + return + + # called when a selection is made (does nothing) + def WCLSelected(self): + return + + def _deleteWCLGrids(self): + tr = TimeRange.allTimes() + gridInfo = self.getGridInfo("WCL", "ProposedWatches", "SFC", tr) + trList = [] + for g in gridInfo: + self.deleteGrid("WCL", "ProposedWatches", "SFC", g.gridTime()) + + def makeRunButton(self): + # create the Run button + tkinter.Button(self.__buttonFrame, text="Run", + width=10, command=self.runCommand, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + + ### Makes the Run/Dismiss buttom + def makeRunDismissButton(self): + # create the Combine button + tkinter.Button(self.__buttonFrame, text="Run/Dismiss", + width=10, command=self.runDismissCommand, + state=tkinter.NORMAL).pack(side=tkinter.LEFT, pady=5, padx=10) + + ### Makes the Cancel buttom + def makeCancelButton(self): + # create the Combine button + tkinter.Button(self.__buttonFrame, text="Cancel", + width=10, command=self.cancelCommand, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + + ### called when the Run button is selected + def runCommand(self): + prodName = self.prodSelected.get() + self.plotWCL(prodName) + return + + ### called when the Run/Dismiss button is selected + def runDismissCommand(self): + prodName = self.prodSelected.get() + self.plotWCL(prodName) + self.cancelCommand() + + ### called when the Cancel button is selected + def cancelCommand(self): + # unregister the maps + self.__master.destroy() + + # Main block of the tool. Sets up the UI. + def execute(self, timeRange): + self._hazUtils = HazardUtils.HazardUtils(self._dbss, None) + + self.setToolType("numeric") + + # see if the Hazards WE is loaded in the GFE, if not abort the tool + if not self._hazUtils._hazardsLoaded(): + self.statusBarMsg("Hazards Weather Element must be loaded in " + \ + "the GFE before running ViewWCL", "S") + self.cancel() + + self.setUpUI() + self.__master.mainloop() + + return + + # Does the work of plotting the watch areas. Fetches the specified text + # product, deocodes it and creates a temporary grid that shows the areas + # of the proposed watches + def plotWCL(self, productName): + #extract the data fro the dictionary + watchType = self.__inventoryDict[productName]['watchType'] + expTime = self.__inventoryDict[productName]['expTime'] + issueTime = self.__inventoryDict[productName]['issueTime'] + finalUGCList = self.__inventoryDict[productName]['finalUGCList'] + + currentTime = time.time() + startTime = None + endTime = None + + # Select WCL to plot + wclVersion = productName + + # This section reads the active table and decodes current watches + activeTable = self.vtecActiveTable() + + # Remove unwanted data + cleanTable = [] + for each in activeTable: + if 'pil' not in each: + continue + if not each['pil'] == 'WCN': + continue + if 'endTime' not in each: + continue + if each['endTime'] <= currentTime: + continue + if 'act' not in each: + continue + + if each['act'] not in ['CAN', 'EXP']: + cleanTable.append(each) + if startTime is None: + startTime = each['startTime'] + elif startTime > each['startTime']: + startTime = each['startTime'] + if endTime is None: + endTime = each['endTime'] + elif endTime > each['endTime']: + endTime = each['endTime'] + + # Adjust start/end times based on issueTime, expTime + if endTime is None or expTime > endTime: + endTime = expTime + if startTime is None or issueTime < startTime: + startTime = issueTime + + # Round to hour + startTime = int(startTime / 3600) * 3600 + + # Change keys for this procedure + for each in cleanTable: + if each['phensig'] == 'SV.A': + each['phensig'] = 'sv.a' + else: + each['phensig'] = 'to.a' + + # Create a master list of all IDs + watchUGCs = [] + for each in cleanTable: + if each['id'] not in watchUGCs: + watchUGCs.append(each['id']) + for each in finalUGCList: + if each not in watchUGCs: + watchUGCs.append(each) + + # Next, loop over the master ID list to determine the final key to + # plot. + finalKeys = [] + for each in watchUGCs: + actualKey = '' + for eachRecord in cleanTable: + if eachRecord['id'] == each: + actualKey = eachRecord['phensig'] + if each in finalUGCList: + if actualKey != '': + if actualKey == 'sv.a': + if watchType == 'SV.A': + actualKey = 'sv->SV' + else: + actualKey = 'sv->TO' + else: + if watchType == 'SV.A': + actualKey = 'to->SV' + else: + actualKey = 'to->TO' + else: + actualKey = watchType + finalKeys.append((each, actualKey)) + + # Get the ending day/time to create a timeRange. Don't have to bother + # with day groups, as watches are always < 24 hrs. + #ensure sanity + if abs(time.time() - startTime) > 43200: + startTime = int(time.time() / 3600) * 3600 + if abs(time.time() - endTime) > 43200: + endTime = int(time.time() / 3600) * 3600 + (43200) + timeRange = TimeRange.TimeRange(AbsTime.AbsTime(startTime), + AbsTime.AbsTime(endTime)) + + # Create a dummy grid of zeros + grid = self.empty(int8) + + # Define the allowed keys + keys = ['','SV.A','TO.A','sv.a','to.a','to->SV','sv->SV','to->TO','sv->TO'] + + # Loop over the finalKeys list and plot + eaList = self.editAreaList() + + for each in finalKeys: + watchIndex = self.getIndex(each[1], keys) + + # Set each edit area found in the WCL to the mask value + mask = self.empty(bool) + if each[0] in eaList: + zoneArea = self.getEditArea(each[0]) + zoneMask = self.encodeEditArea(zoneArea) + mask[zoneMask] = True + grid[mask] = watchIndex + + + #remove any existing grid + parms = self.loadedParms() + for weName, level, dbID in parms: + if weName == "ProposedWatches" and level == "SFC" and \ + dbID.modelName() == "WCL": + # found parm, delete any grids + self._deleteWCLGrids() + break + + self.createGrid("WCL", "ProposedWatches", "DISCRETE", (grid, keys), \ + timeRange, discreteKeys=keys, discreteOverlap=0, \ + discreteAuxDataLength=0) + self.setActiveElement("WCL", "ProposedWatches", "SFC", timeRange, \ + colorTable='GFE/WCLHazards') + + return + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/CalculateRFD.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/CalculateRFD.py index 6e4c8e7cc3..b8861d1630 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/CalculateRFD.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/CalculateRFD.py @@ -1,724 +1,724 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CalculateRFD.py -# -# Author: dtomalak -# Optimized by njensen -# ---------------------------------------------------------------------------- - - -ToolType = "numeric" -WeatherElementEdited = "RFD" -from numpy import * -import time -HideTool = 0 - -# Set up Class -import SmartScript -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # %comment - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, T, RH, Wind, Sky, PoP, RFD, PERCENTGREEN, varDict, GridTimeRange): - "Put your tool description here" - t0 = time.time() - self._determinemaxmin(T, RH, Wind, PERCENTGREEN) #find absolute max/MIN values - self._popthresh = self._preciptthresh() - #st = time.time() - fuellag = self._create1hrtimelag(T,RH,Sky,GridTimeRange) - #ed = time.time() - #diff = ed- st - #diff = str(diff) - #print diff + " seconds to run fuel lag" - #st = time.time() - finemoist = self._calcfinefuelmoisture(fuellag, PERCENTGREEN, GridTimeRange) - #ed = time.time() - #diff = ed- st - #diff = str(diff) - #print diff + " seconds to run fine moist" - #finefuel, temp, Sky - #st = time.time() - ignite = self._calcignition(finemoist, T, Sky) - #ed = time.time() - #diff = ed- st - #diff = str(diff) - #print diff + " seconds to run ignition" - #st = time.time() - RFD = self._calcRFDINDEX(ignite, Wind) - #ed = time.time() - #diff = ed- st - #diff = str(diff) - #print diff + " seconds to run RFD" - RFDcat = self._catagoricalRFD(RFD) - #CLEAN UP WHERE SIG PRECIP IS OCCURING - #st = time.time() - #njensen RFDcat = where(greater_equal(PoP,self._popthresh),0,RFDcat) - RFDcat[greater_equal(PoP,self._popthresh)] = 0 - #ed = time.time() - #diff = ed- st - #diff = str(diff) - #print diff + " seconds to run RFDcat" - #print Wind[1] - #self.createGrid("EXP","finemoiost","SCALAR",finemoist,GridTimeRange) - #self.createGrid("EXP","ignite","SCALAR",ignite,GridTimeRange) - #self.createGrid("EXP","RFD","SCALAR",RFD,GridTimeRange) - #self.createGrid("EXP","SunT","SCALAR",sun,GridTimeRange) - #self.createGrid("EXP","FuelT","SCALAR",fuel,GridTimeRange) - t1 = time.time() - print "inside CalculateRFD_NDJ took:", (t1-t0) - return RFDcat - - #EACH STEP IN THE PROCESS WILL GET ITS OWN MODULE - #it appears that there are 4 main steps - #CALCULATIONS - #SINCE THIS TOOL REQUIRES SEVERAL DIFFERENT CALCULATIONS/COMPARISONS - #WILL DO EACH MODULALLY - def _create1hrtimelag(self, Tgrid, RHgrid,Skygrid,timer): - #Step one Make RH and T Catagory grids - #there are 6 temperature catagories and 21 Rh catagories - #create the temp catagories 0 - 5 - #njensen Tcat = where(Tgrid,0,0) - Tcat = zeros(Tgrid.shape, int32) - count = 0 - starter = 29 - tlist = [] - if self._mint <= starter: - #njensen Tcat = where(less_equal(Tgrid, starter), count, Tcat) - Tcat[Tgrid <= starter] = count - tlist.append(count) - count = 1 - while starter < 109: - if starter > self._maxt: - break - if starter+20 >= self._mint and starter <= self._maxt: - #njensen Tcat = where(logical_and(greater(Tgrid, starter), less_equal(Tgrid,starter + 20)), count, Tcat) - Tcat[logical_and(greater(Tgrid, starter), less_equal(Tgrid,starter + 20))] = count - tlist.append(count) - count = count + 1 - starter = starter + 20 - continue - if self._maxt >= 109: - #njensen Tcat = where(greater_equal(Tgrid, 109), count, Tcat) - Tcat[Tgrid >= 109] = count - tlist.append(count) - #NOW THE RHCAT - #njensen RHcat = where(RHgrid,0,0) - RHcat = zeros(RHgrid.shape, int32) - #njensen RHcat = where(less(RHgrid, self._minrh), 100, RHcat) - RHcat[RHgrid < self._minrh] = 100 - validrh = [] - if self._minrh <= 4: - #njensen RHcat = where(less_equal(RHgrid, 4), 0 , RHcat) - RHcat[RHgrid <= 4] = 0 - validrh.append(0) - starter = 1 - baseRh = 4 - while starter <= 19: - if baseRh > self._maxrh: - break - if self._minrh <= baseRh + 5 and baseRh <= self._maxrh: - #since RH values are even 4 % increments can use this short cut - #njensen RHcat = where(logical_and(greater(RHgrid, baseRh), less_equal(RHgrid, baseRh + 5)), starter, RHcat) - RHcat[logical_and(greater(RHgrid, baseRh), less_equal(RHgrid, baseRh + 5))] = starter - validrh.append(starter) - starter = starter + 1 - baseRh = baseRh + 5 - continue - - if self._maxrh > 99: - #njensen RHcat = where(greater(RHgrid, 99), 20, RHcat) - RHcat[RHgrid > 99] = 20 - validrh.append(20) - SKYcat = where(greater_equal(Skygrid, 75), 1, 0) - suntable,cloudtable = self._onehrtimelagtable() - - - ####NOW THE DATA HAS BEEN CATAGORIZED WILL HAVE TO STEP THROUGH EACH - #CATAGORY TO DETERMINE 1hr fuel lag grid - x = tlist[0] - y = validrh[0] - xsize = tlist[-1] - ysize = validrh[-1] - #njensen sunnyfuel = where(Skygrid, 0 , 0) - sunnyfuel = zeros(Skygrid.shape, int32) - hrtimelag = SKYcat -## while x <= xsize: -## row = suntable[x] -## rowcld = cloudtable[x] -## y = 0 -## while y <= ysize: -## value = row[y] -## valuecld = rowcld[y] -## tempo = where(logical_and(equal(Tcat, x), equal(RHcat, y)), 1, 0) -## fl = value#where(equal(SKYcat, 1), valuecld,value) -## sunnyfuel = tempo * fl #all locations that have 0 for tempo will be set to zero -## #sunnyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, sunnyfuel) -## y = y + 1 -## hrtimelag = where(greater(sunnyfuel,0),sunnyfuel, hrtimelag) -## continue -## x = x + 1 -## continue - while x <= xsize: - row = suntable[x] - y = validrh[0] - while y <= ysize: - value = row[y] - #njensen sunnyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, sunnyfuel) - sunnyfuel[logical_and(equal(Tcat, x), equal(RHcat, y))] = value - y = y + 1 - continue - x = x + 1 - continue - if SKYcat.any() : - #njensen cloudyfuel = where(Skygrid, 0 , 0) - cloudyfuel = zeros(Skygrid.shape, int32) - x = tlist[0] - y = validrh[0] - xsize = tlist[-1] - ysize = validrh[-1] - while x <= xsize: - row = cloudtable[x] - #row = suntable[x] - y = validrh[0] - while y <= ysize: - value = row[y] - #njensen cloudyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, cloudyfuel) - cloudyfuel[logical_and(equal(Tcat, x), equal(RHcat, y))] = value - y = y + 1 - continue - x = x + 1 - continue - hrtimelag = where(Skygrid,0,0) - if SKYcat.any(): - hrtimelag = where(equal(SKYcat, 1), cloudyfuel, sunnyfuel) - else: - hrtimelag = sunnyfuel - #self.createGrid("EXP","cldmoist","SCALAR",cloudyfuel,timer) - #self.createGrid("EXP","Sunmoist","SCALAR",hrtimelag,timer) - #self.createGrid("EXP","RHCAT","SCALAR",RHcat,timer) - #self.createGrid("EXP","SKYCAT","SCALAR",SKYcat,timer) - - return hrtimelag - def _calcfinefuelmoisture(self, timelagfuel, greeness, timer): - tabledat = self._finefuelmoisturetable() - x = len(timelagfuel) - y = len(timelagfuel[0]) - sizer = x * y - lagdata = reshape(timelagfuel,(sizer,)) - lagdata = sort(lagdata) - lagmin = lagdata[0] - lagmax = lagdata[-1] - #STEP ONE NEED TO CATAGORIZE THE 1hr time time lag and percent green data - #SIMILAR TO TEMPS AND RH - #THERE ARE 15 CATAGORIES FOR Fuel moisture and roughly 9 for precent green - #njensen Fuelcat = where(greeness,0,0) - Fuelcat = zeros(greeness.shape, int32) - #1-6 value the same as the step - stepper = 1 - count = 0 - laglist = [] - flcatlngth = len(tabledat) - if lagmin <= 6: - while stepper <= 6: - if stepper >= lagmin and stepper <= lagmax: - #njensen Fuelcat = where(equal(timelagfuel,stepper),count,Fuelcat) - Fuelcat[equal(timelagfuel,stepper)] = count - laglist.append(count) - stepper = stepper + 1 - count = count + 1 - # print stepper,count - continue - else: - stepper = 7 - count = 5 - if lagmin < 19 and lagmax > 6: - while stepper < 19: - #print stepper - if stepper+1 >= lagmin and stepper <= lagmax: - #njensen Fuelcat = where(logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+1)),count,Fuelcat) - Fuelcat[logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+1))] = count - laglist.append(count) - stepper = stepper + 2 - count = count + 1 - #print stepper, count - continue - else: - stepper = 19 - count = 11 - if lagmin < 25 and lagmax >= 19: - while stepper < 25: - #print str(stepper) + "$$$" - if stepper+2 >= lagmin and stepper <= lagmax: - #Fuelcat = where(logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+2)),count,Fuelcat) - Fuelcat[logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+2))] = count - laglist.append(count) - stepper = stepper + 3 - print stepper - count = count + 1 - #print stepper,count - continue - else: - stepper = 25 - count = 14 - if lagmax >= 25: - if stepper >= 25: - #njensen Fuelcat = where(greater_equal(timelagfuel, stepper),count,Fuelcat) - Fuelcat[timelagfuel > stepper] = count - laglist.append(count) - #print stepper, count - #NOW PERCENT GREEN CATAGORIES - #ON THE PERCENT GREEN TABLE ONLY THE 1st and LAST are different...all other are 9 - #percent increments - #njensen GREENcat = where(greeness,0,0) - GREENcat = zeros(greeness.shape, int32) - grlist = [] - if self._mingreen < 5: - #njensen GREENcat = where(less(greeness, 5), 0, GREENcat) - GREENcat[greeness < 5] = 0 - grlist.append(0) - count = 1 - ender = 8 - stepper = 5 - while count < ender: - #print count, stepper - if stepper+10 > self._mingreen and stepper <= self._maxgreen: - #njensen GREENcat = where(logical_and(greater_equal(greeness,stepper),less(greeness, stepper+10)),count,GREENcat) - GREENcat[logical_and(greater_equal(greeness,stepper),less(greeness, stepper+10))] = count - grlist.append(count) - count = count + 1 - stepper = stepper + 10 - continue - if self._maxgreen >= 75: - #print count, stepper - #njensen GREENcat = where(greater(greeness, 75), count, GREENcat) - GREENcat[greeness > 75] = count - grlist.append(count) - #CALCULATE THE FINE FUEL MOISTURE - x = laglist[0] - y = grlist[0] - xsize = laglist[-1] - ysize = grlist[-1] - #njensen finemoisture = where(greeness, 0 , 0) - finemoisture = zeros(greeness.shape, int32) - #print len(GREENcat[0]), len(Fuelcat[0]), len(finemoisture) - while x <= xsize: - row = tabledat[x] - y = grlist[0] - while y <= ysize: - value = row[y] - #print value - #njensen finemoisture = where(logical_and(equal(Fuelcat, x), equal(GREENcat, y)), value, finemoisture) - finemoisture[logical_and(equal(Fuelcat, x), equal(GREENcat, y))] = value - y = y + 1 - continue - x = x + 1 - continue - ###### Commented out the following line as we were - ###### getting abnormally high values when rh was around 50 percent. - ###### Seems when 1-hr Time Lag Fuel Moisture is cat 8 and Percent Green - ###### is between 5-14 percent, Fine Fuel moisture does not go to a 9 as tables - ###### show they should. - finemoisture = where(equal(finemoisture,0),timelagfuel,finemoisture) - - #self.createGrid("EXP","finemoist","SCALAR",finemoisture,timer) - #self.createGrid("EXP","tlagfuel","SCALAR",timelagfuel,timer) - #self.createGrid("EXP","FuelCat","SCALAR",Fuelcat,timer) - #self.createGrid("EXP","GREENcat","SCALAR",GREENcat,timer) - return finemoisture - - #TABLES SECTION - def _calcignition(self, finefuel, temp, Sky): - tabledat = self._ignitioncomptable() - #njensen - #Fuelcat = where(Sky,0,0) - #SunnyTcat = where(Sky,0,0) - #CloudyTcat = where(Sky, 0, 0) - #ignitionsun = where(Sky,0,0) - #ignitioncld = where(Sky,0,0) - Fuelcat = zeros(Sky.shape, int32) - SunnyTcat = zeros(Sky.shape, int32) - CloudyTcat = zeros(Sky.shape, int32) - ignitionsun = zeros(Sky.shape, int32) - ignitioncld = zeros(Sky.shape, int32) - Skycat = where(greater_equal(Sky,80),1,0) - temp = where(logical_and(equal(Skycat,1), greater_equal(temp, 20)),temp-20,temp) - #njensen temp = where(less(temp,10),10,temp) - temp[temp < 10] = 10 - x = len(temp) - y = len(temp[0]) - newsize = x*y #should be same size for all arrays - tdata = reshape(temp,(newsize,)) - tdata = sort(tdata) - self._skymint= tdata[0] - if Skycat.any() and self._skymint < self._mint: - self._mint = self._skymint - x = len(finefuel) - y = len(finefuel[0]) - sizer = x * y - fueldata = reshape(finefuel,(sizer,)) - fueldata = sort(fueldata) - fuelmin = fueldata[0] - fuelmax = fueldata[-1] - #First create Fine fuel catagories - #EXACT SAME METHOD AS finefuelcat - #1-6 value the same as the step - stepper = 1 - fuellist = [] - count = 0 - if fuelmin <= 6: - while stepper <= 6: - if stepper >= fuelmin and stepper <= fuelmax: - #njensen Fuelcat = where(equal(finefuel,stepper),count,Fuelcat) - Fuelcat[equal(finefuel,stepper)] = count - fuellist.append(count) - stepper = stepper + 1 - count = count + 1 - # print stepper,count - continue - else: - stepper = 7 - count = 5 - if fuelmin < 19 and fuelmax > 6: - while stepper < 19: - #print stepper - if stepper+1 >= fuelmin and stepper <= fuelmax: - #njensen Fuelcat = where(logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+1)),count,Fuelcat) - Fuelcat[logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+1))] = count - fuellist.append(count) - stepper = stepper + 2 - count = count +1 - #print stepper, count - continue - else: - stepper = 19 - count = 11 - if fuelmin < 25 and fuelmax >= 19: - while stepper < 25: - #print str(stepper) + "$$$" - if stepper+2 >= fuelmin and stepper <= fuelmax: - #njensen Fuelcat = where(logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+2)),count,Fuelcat) - Fuelcat[logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+2))] = count - fuellist.append(count) - stepper = stepper + 3 - count = count + 1 - #print stepper,count - continue - else: - stepper = 25 - count = 14 - if fuelmax >= 25: - if stepper >= 25: - #Fuelcat = where(greater_equal(finefuel, stepper),count,Fuelcat) - Fuelcat[finefuel >= stepper] = count - fuellist.append(count) - stepper = 20 - tlist = [] - if self._mint < 20: - #njensen SunnyTcat = where(less(temp, stepper), 0 , SunnyTcat) - SunnyTcat[temp < stepper] = 0 - tlist.append(0) - count = 1 - ender =len(tabledat) - 1 - while count < ender: - if stepper+10 > self._mint and stepper <= self._maxt: - #njensen SunnyTcat =where(logical_and(greater_equal(temp,stepper), less(temp,stepper+10)),count, SunnyTcat) - SunnyTcat[logical_and(greater_equal(temp,stepper), less(temp,stepper+10))] = count - tlist.append(count) - count = count+1 - stepper = stepper + 10 - continue - if self._maxt >= stepper: - #njensen SunnyTcat = where(greater_equal(temp, stepper), count, SunnyTcat) - SunnyTcat[temp >= stepper] = count - tlist.append(count) - #IGNITION - x=tlist[0] - y=fuellist[0] - xsize = tlist[-1] - ysize = fuellist[-1] - while x <= xsize: - row = tabledat[x] - y = fuellist[0] - while y <= ysize: - value = row[y] - #njensen ignitionsun = where(logical_and(equal(SunnyTcat, x), equal(Fuelcat, y)), value, ignitionsun) - ignitionsun[logical_and(equal(SunnyTcat, x), equal(Fuelcat, y))] = value - y = y + 1 - continue - x = x + 1 - continue - return ignitionsun - - def _calcRFDINDEX(self, ignition, wind): - #THIS TOOL WILL CALCULATE THE RANGELAND FIRE DANGER...BASED OF THE IGNITION Component - #and WINDSPEED - #variables - x = len(ignition) - y = len(ignition[0]) - sizer = x * y - igdata = reshape(ignition,(sizer,)) - igdata = sort(igdata) - igmin = igdata[0] - igmax = igdata[-1] - #njensen RFD = where(ignition,0,0) - RFD = zeros(ignition.shape, int32) - spd = wind[0] - wind = spd * 1.15 #convert to mph - self._minwind = self._minwind * 1.15 - self._maxwind = self._maxwind * 1.15 - tabledat = self._RFDtable() - #SPLIT DATA INTO x,y "component" grids Ignition -x Wind speed y - #First ignition - count = 1 - #SPECIAL CASE For 0 - iglist = [] - #njensen IGcat = where(ignition,0,0) - IGcat = zeros(ignition.shape, int32) - if igmin <= 0: - #njensen IGcat = where(equal(ignition,0),0,IGcat) - IGcat[equal(ignition,0)] = 0 - #no need to iterate through this axis as RFD will be 0 - stepper = 1 - ender = len(tabledat) - 1 - while count <= ender: - #print str(stepper) + "-" + str(stepper+5) + ":::" + str(count) - if stepper+5 > igmin and stepper <= igmax: - #njensen IGcat = where(logical_and(greater_equal(ignition,stepper), less(ignition,stepper+5)), count, IGcat) - IGcat[logical_and(greater_equal(ignition,stepper), less(ignition,stepper+5))] = count - iglist.append(count) - count = count + 1 - stepper = stepper + 5 - continue - #WIND (y axis) - #1-8 mph are every 1 mph - #after 8 mph it is every 2 - ender = len(tabledat[0])-2 - count = 0 - wlist = [] - stepper = 0.0 #am using .5 decimal place to account for conversions leaving remainders - #ie 0-1 = > 0 <1.5 - #njensen Wndcat = where(wind,0,0) - Wndcat = zeros(wind.shape, int32) - test = 0 - while count <=7: - #print count - #print ":::::" - if count == 0 and self._minwind <= 1.4: - #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+1.5)), count, Wndcat) - Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+1.5))] = count - #print stepper, stepper + 1.5 - #print "?????????????????" - wlist.append(count) - stepper = stepper + 1.5 - test = 1 - else: - if test != 1: - test = 1 - count = 1 - stepper = 1.5 #should only happen once - if stepper+1.0 > self._minwind and stepper <= self._maxwind: - #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+1)), count, Wndcat) - Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+1))] = count - wlist.append(count) - stepper = stepper + 1 - count = count + 1 - while count <= ender: - #every 2 mph until 26.5 mph - #print count, stepper, stepper + 2 - if stepper+2.0 > self._minwind and stepper <= self._maxwind: - #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+2)), count, Wndcat) - Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+2))] = count - wlist.append(count) - stepper = stepper + 2 - count = count + 1 - continue - if count > ender: - #print count, stepper - if stepper <= self._maxwind: - #njensen Wndcat = where(greater_equal(wind, stepper), count, Wndcat) - Wndcat[wind >= stepper] = count - wlist.append(count) - x=iglist[0] - y=wlist[0] - xsize = iglist[-1] - ysize = wlist[-1] - while x <= xsize: - row = tabledat[x] - y = wlist[0] - while y <= ysize: - value = row[y] - #njensen RFD = where(logical_and(equal(IGcat, x), equal(Wndcat, y)), value, RFD) - RFD[logical_and(equal(IGcat, x), equal(Wndcat, y))] = value - y = y + 1 - continue - x = x + 1 - continue -## if igmin <= 0: -## RFD = where(equal(IGcat,0),0,RFD) - return RFD - - def _catagoricalRFD(self,RFD): - self._cat = self._statecriteria() - keys = self._cat.keys() - #njensen newRfd = where(RFD,0,0) - newRfd = zeros(RFD.shape, int32) - for area in keys: - datalist = self._cat[str(area)] - areamask = self.encodeEditArea(area) - #njensen tempo = where(RFD,0,0) - tempo = zeros(RFD.shape, int32) - for config in datalist: - cat = config[0] - min = config[1] - max = config[2] - #njensen tempo = where(logical_and(greater_equal(RFD,min), less_equal(RFD,max)),cat,tempo) - tempo[logical_and(greater_equal(RFD,min), less_equal(RFD,max))] = cat - newRfd = where(greater_equal(areamask,1),tempo,newRfd) - continue - - return newRfd - def _determinemaxmin(self, T, RH,Wind, PERCENTGREEN): - #in order to save time this script will try to determine the maxium - #and minimum range to look for each variable - #for rh - #rhdata = asarray(RH) - #tdata = asarray(T) - #wdata = asarray(Wind[0]) - x = len(RH) - y = len(RH[0]) - newsize = x*y #should be same size for all arrays - rhdata = reshape(RH,(newsize,)) - tdata = reshape(T,(newsize,)) - winddata = reshape(Wind[0],(newsize,)) - greendata = reshape(PERCENTGREEN, (newsize,)) - rhdata = sort(rhdata) - tdata = sort(tdata) - winddata = sort(winddata) - greendata = sort(greendata) - self._minrh = rhdata[0] - self._maxrh = rhdata[-1] - self._mint= tdata[0] - self._maxt = tdata[-1] - self._minwind =winddata[0] - self._maxwind =winddata[-1] - self._mingreen =greendata[0] - self._maxgreen =greendata[-1] - return - - def _onehrtimelagtable(self): - #returns two lists 1 for sunny 1 for cloudy - #BASED ON TABLE 1 - #setup id [temprange[rhrangevalues]] - #docmunetation pending - sunnyhrlag = [[1,2,2,3,4,5,5,6,7,8,8,8,9,9,10,11,12,12,13,13,14], - [1,2,2,3,4,5,5,6,7,7,7,8,9,9,10,10,11,12,13,13,13], - [1,2,2,3,4,5,5,6,6,7,7,8,8,9, 9,10,11,12,12,12,13], - [1,1,2,2,3,4,5,5,6,7,7,8,8,8, 9,10,10,11,12,12,13], - [1,1,2,2,3,4,4,5,6,7,7,8,8,8, 9,10,10,11,12,12,13], - [1,1,2,2,3,4,4,5,6,7,7,8,8,8, 9,10,10,11,12,12,13] - ] - cloudylag = [[1,2,4,5,5,6,7,8,9,10,11,12,12,14,15,17,19,22,25,25,25], - [1,2,3,4,5,6,7,8,9, 9,11,11,12,13,14,16,18,21,24,25,25], - [1,2,3,4,5,6,6,8,8, 9,10,11,11,12,14,16,17,20,23,25,25], - [1,2,3,4,4,5,6,7,8, 9,10,10,11,12,13,15,17,20,23,25,25], - [1,2,3,3,4,5,6,7,8, 9, 9,10,10,11,13,14,16,19,22,25,25], - [1,2,2,3,4,5,6,6,8, 8, 9, 9,10,11,12,14,16,19,21,24,25] - ] - return sunnyhrlag, cloudylag - - def _finefuelmoisturetable(self): - #returns a list based of Table2 - #will comapre 1 hr fuel moisture to percent green - #will be a two dimensional list with x =1 hr fuel moisture catagory - #and y (the internal list being the the fine fuel moisture value with the - #percent green catagory represented by the y index - #0 represents a NO CHANGE value use the 1 hr time lag value - table = [[0,2,3,4,5,8,13,18,21], - [0,3,4,5,7,10,16,19,22], - [0,4,5,7,9,14,18,20,22], - [0,5,6,8,12,16,19,21,23], - [0,6,8,11,14,18,20,22,23], - [0,7,10,13,16,19,20,22,23], - [0,9,12,15,18,20,21,22,23], - [0,12,15,17,19,20,22,23,24], - [0,14,17,18,20,21,22,23,24], - [0,16,18,19,20,21,22,23,24], - [0,17,19,20,21,22,22,23,24], - [0,19,20,21,21,22,23,23,24], - [0,21,21,22,22,23,23,24,24], - [0,24,24,24,24,24,24,24,25], - [0,25,25,25,25,25,25,25,25] - ] - return table - - def _ignitioncomptable(self): - #will return one table (only difference is temp ranges for cloudy vs sunny - tble = [ - [88,75,64,54,46,39,30,21,14,9,5,2,0,0,0], - [90,77,66,56,48,41,32,22,15,9,5,2,0,0,0], - [93,80,68,58,50,42,33,23,16,10,6,3,0,0,0], - [95,82,71,61,52,44,35,25,17,11,7,3,1,0,0], - [98,85,73,63,54,46,36,26,18,12,7,4,1,0,0], - [100,87,76,65,56,48,38,28,19,13,8,5,1,0,0], - [100,90,78,68,58,50,40,29,21,14,9,5,2,0,0], - [100,93,81,70,61,53,42,31,22,15,10,6,2,0,0], - [100,97,84,73,63,55,44,32,23,16,11,7,3,0,0], - [100,100,87,76,66,57,46,34,25,18,12,8,4,0,0], - [100,100,90,79,69,60,49,36,27,19,13,9,4,1,0], - [100,100,92,80,70,61,50,37,28,20,14,9,5,1,0], - ] - - return tble - def _RFDtable(self): - #IGNITION COMPONENT(x) vs Wind spd (y) - return [ - [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], - [ 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 6, 6], - [ 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 11, 11, 12, 13, 14, 14, 15, 16], - [ 13, 14, 14, 14, 14, 14, 14, 15, 15, 16, 17, 18, 19, 21, 22, 25, 25, 25], - [ 18, 19, 19, 19, 19, 20, 20, 20, 21, 22, 24, 25, 27, 29, 30, 32, 35, 35], - [ 23, 24, 24, 24, 25, 25, 26, 26, 27, 29, 30, 32, 34, 36, 39, 42, 44, 46], - [ 29, 29, 29, 30, 30, 31, 31, 32, 33, 35, 37, 39, 41, 44, 47, 51, 54, 57], - [ 34, 34, 34, 35, 35, 36, 37, 38, 39, 41, 43, 46, 49, 52, 56, 60, 64, 68], - [ 39, 39, 40, 40, 40, 41, 42, 43, 45, 47, 50, 53, 57, 60, 64, 69, 74, 79], - [ 44, 44, 45, 45, 46, 47, 48, 49, 51, 54, 57, 60, 64, 68, 73, 78, 83, 89], - [ 49, 49, 50, 51, 51, 52, 53, 55, 57, 60, 63, 67, 71, 76, 81, 87, 92, 97], - [ 54, 55, 55, 56, 57, 58, 59, 60, 63, 66, 70, 74, 79, 84, 90, 96,100,100], - [ 59, 60, 60, 61, 62, 63, 65, 66, 68, 72, 76, 81, 86, 92, 98,100,100,100], - [ 64, 65, 66, 66, 68, 69, 70, 72, 74, 78, 83, 88, 94,100,100,100,100,100], - [ 69, 70, 71, 72, 73, 74, 76, 77, 80, 85, 90, 95,100,100,100,100,100,100], - [ 74, 75, 76, 77, 78, 80, 81, 83, 86, 91, 96,100,100,100,100,100,100,100], - [ 79, 80, 81, 82, 84, 85, 89, 89, 92, 97,100,100,100,100,100,100,100,100], - [ 85, 85, 86, 87, 89, 91, 92, 95, 98,100,100,100,100,100,100,100,100,100], - [ 90, 91, 92, 93, 94, 96, 98,100,100,100,100,100,100,100,100,100,100,100], - [ 95, 96, 97, 98,100,100,100,100,100,100,100,100,100,100,100,100,100,100], - [100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100] - - ] - def _statecriteria(self): - ####CONFIGURATION FOR EACH STATE - ###DICTIONARY FORMAT - ## DICT = {"STATE", [(catnumber,low,high), (catnumber,low,high)]} - statedict = {"Kansas" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], - "Colorado" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], - "Nebraska" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], - "Iowa" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], - } - return statedict - - - def _preciptthresh(self): - #POP WHERE RFD WILL BE SET TO LOW - #SET TO 101 if you want this disabled - return 65 - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CalculateRFD.py +# +# Author: dtomalak +# Optimized by njensen +# ---------------------------------------------------------------------------- + + +ToolType = "numeric" +WeatherElementEdited = "RFD" +from numpy import * +import time +HideTool = 0 + +# Set up Class +import SmartScript +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # %comment + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, T, RH, Wind, Sky, PoP, RFD, PERCENTGREEN, varDict, GridTimeRange): + "Put your tool description here" + t0 = time.time() + self._determinemaxmin(T, RH, Wind, PERCENTGREEN) #find absolute max/MIN values + self._popthresh = self._preciptthresh() + #st = time.time() + fuellag = self._create1hrtimelag(T,RH,Sky,GridTimeRange) + #ed = time.time() + #diff = ed- st + #diff = str(diff) + #print diff + " seconds to run fuel lag" + #st = time.time() + finemoist = self._calcfinefuelmoisture(fuellag, PERCENTGREEN, GridTimeRange) + #ed = time.time() + #diff = ed- st + #diff = str(diff) + #print diff + " seconds to run fine moist" + #finefuel, temp, Sky + #st = time.time() + ignite = self._calcignition(finemoist, T, Sky) + #ed = time.time() + #diff = ed- st + #diff = str(diff) + #print diff + " seconds to run ignition" + #st = time.time() + RFD = self._calcRFDINDEX(ignite, Wind) + #ed = time.time() + #diff = ed- st + #diff = str(diff) + #print diff + " seconds to run RFD" + RFDcat = self._catagoricalRFD(RFD) + #CLEAN UP WHERE SIG PRECIP IS OCCURING + #st = time.time() + #njensen RFDcat = where(greater_equal(PoP,self._popthresh),0,RFDcat) + RFDcat[greater_equal(PoP,self._popthresh)] = 0 + #ed = time.time() + #diff = ed- st + #diff = str(diff) + #print diff + " seconds to run RFDcat" + #print Wind[1] + #self.createGrid("EXP","finemoiost","SCALAR",finemoist,GridTimeRange) + #self.createGrid("EXP","ignite","SCALAR",ignite,GridTimeRange) + #self.createGrid("EXP","RFD","SCALAR",RFD,GridTimeRange) + #self.createGrid("EXP","SunT","SCALAR",sun,GridTimeRange) + #self.createGrid("EXP","FuelT","SCALAR",fuel,GridTimeRange) + t1 = time.time() + print("inside CalculateRFD_NDJ took:", (t1-t0)) + return RFDcat + + #EACH STEP IN THE PROCESS WILL GET ITS OWN MODULE + #it appears that there are 4 main steps + #CALCULATIONS + #SINCE THIS TOOL REQUIRES SEVERAL DIFFERENT CALCULATIONS/COMPARISONS + #WILL DO EACH MODULALLY + def _create1hrtimelag(self, Tgrid, RHgrid,Skygrid,timer): + #Step one Make RH and T Catagory grids + #there are 6 temperature catagories and 21 Rh catagories + #create the temp catagories 0 - 5 + #njensen Tcat = where(Tgrid,0,0) + Tcat = zeros(Tgrid.shape, int32) + count = 0 + starter = 29 + tlist = [] + if self._mint <= starter: + #njensen Tcat = where(less_equal(Tgrid, starter), count, Tcat) + Tcat[Tgrid <= starter] = count + tlist.append(count) + count = 1 + while starter < 109: + if starter > self._maxt: + break + if starter+20 >= self._mint and starter <= self._maxt: + #njensen Tcat = where(logical_and(greater(Tgrid, starter), less_equal(Tgrid,starter + 20)), count, Tcat) + Tcat[logical_and(greater(Tgrid, starter), less_equal(Tgrid,starter + 20))] = count + tlist.append(count) + count = count + 1 + starter = starter + 20 + continue + if self._maxt >= 109: + #njensen Tcat = where(greater_equal(Tgrid, 109), count, Tcat) + Tcat[Tgrid >= 109] = count + tlist.append(count) + #NOW THE RHCAT + #njensen RHcat = where(RHgrid,0,0) + RHcat = zeros(RHgrid.shape, int32) + #njensen RHcat = where(less(RHgrid, self._minrh), 100, RHcat) + RHcat[RHgrid < self._minrh] = 100 + validrh = [] + if self._minrh <= 4: + #njensen RHcat = where(less_equal(RHgrid, 4), 0 , RHcat) + RHcat[RHgrid <= 4] = 0 + validrh.append(0) + starter = 1 + baseRh = 4 + while starter <= 19: + if baseRh > self._maxrh: + break + if self._minrh <= baseRh + 5 and baseRh <= self._maxrh: + #since RH values are even 4 % increments can use this short cut + #njensen RHcat = where(logical_and(greater(RHgrid, baseRh), less_equal(RHgrid, baseRh + 5)), starter, RHcat) + RHcat[logical_and(greater(RHgrid, baseRh), less_equal(RHgrid, baseRh + 5))] = starter + validrh.append(starter) + starter = starter + 1 + baseRh = baseRh + 5 + continue + + if self._maxrh > 99: + #njensen RHcat = where(greater(RHgrid, 99), 20, RHcat) + RHcat[RHgrid > 99] = 20 + validrh.append(20) + SKYcat = where(greater_equal(Skygrid, 75), 1, 0) + suntable,cloudtable = self._onehrtimelagtable() + + + ####NOW THE DATA HAS BEEN CATAGORIZED WILL HAVE TO STEP THROUGH EACH + #CATAGORY TO DETERMINE 1hr fuel lag grid + x = tlist[0] + y = validrh[0] + xsize = tlist[-1] + ysize = validrh[-1] + #njensen sunnyfuel = where(Skygrid, 0 , 0) + sunnyfuel = zeros(Skygrid.shape, int32) + hrtimelag = SKYcat +## while x <= xsize: +## row = suntable[x] +## rowcld = cloudtable[x] +## y = 0 +## while y <= ysize: +## value = row[y] +## valuecld = rowcld[y] +## tempo = where(logical_and(equal(Tcat, x), equal(RHcat, y)), 1, 0) +## fl = value#where(equal(SKYcat, 1), valuecld,value) +## sunnyfuel = tempo * fl #all locations that have 0 for tempo will be set to zero +## #sunnyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, sunnyfuel) +## y = y + 1 +## hrtimelag = where(greater(sunnyfuel,0),sunnyfuel, hrtimelag) +## continue +## x = x + 1 +## continue + while x <= xsize: + row = suntable[x] + y = validrh[0] + while y <= ysize: + value = row[y] + #njensen sunnyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, sunnyfuel) + sunnyfuel[logical_and(equal(Tcat, x), equal(RHcat, y))] = value + y = y + 1 + continue + x = x + 1 + continue + if SKYcat.any() : + #njensen cloudyfuel = where(Skygrid, 0 , 0) + cloudyfuel = zeros(Skygrid.shape, int32) + x = tlist[0] + y = validrh[0] + xsize = tlist[-1] + ysize = validrh[-1] + while x <= xsize: + row = cloudtable[x] + #row = suntable[x] + y = validrh[0] + while y <= ysize: + value = row[y] + #njensen cloudyfuel = where(logical_and(equal(Tcat, x), equal(RHcat, y)), value, cloudyfuel) + cloudyfuel[logical_and(equal(Tcat, x), equal(RHcat, y))] = value + y = y + 1 + continue + x = x + 1 + continue + hrtimelag = where(Skygrid,0,0) + if SKYcat.any(): + hrtimelag = where(equal(SKYcat, 1), cloudyfuel, sunnyfuel) + else: + hrtimelag = sunnyfuel + #self.createGrid("EXP","cldmoist","SCALAR",cloudyfuel,timer) + #self.createGrid("EXP","Sunmoist","SCALAR",hrtimelag,timer) + #self.createGrid("EXP","RHCAT","SCALAR",RHcat,timer) + #self.createGrid("EXP","SKYCAT","SCALAR",SKYcat,timer) + + return hrtimelag + def _calcfinefuelmoisture(self, timelagfuel, greeness, timer): + tabledat = self._finefuelmoisturetable() + x = len(timelagfuel) + y = len(timelagfuel[0]) + sizer = x * y + lagdata = reshape(timelagfuel,(sizer,)) + lagdata = sort(lagdata) + lagmin = lagdata[0] + lagmax = lagdata[-1] + #STEP ONE NEED TO CATAGORIZE THE 1hr time time lag and percent green data + #SIMILAR TO TEMPS AND RH + #THERE ARE 15 CATAGORIES FOR Fuel moisture and roughly 9 for precent green + #njensen Fuelcat = where(greeness,0,0) + Fuelcat = zeros(greeness.shape, int32) + #1-6 value the same as the step + stepper = 1 + count = 0 + laglist = [] + flcatlngth = len(tabledat) + if lagmin <= 6: + while stepper <= 6: + if stepper >= lagmin and stepper <= lagmax: + #njensen Fuelcat = where(equal(timelagfuel,stepper),count,Fuelcat) + Fuelcat[equal(timelagfuel,stepper)] = count + laglist.append(count) + stepper = stepper + 1 + count = count + 1 + # print stepper,count + continue + else: + stepper = 7 + count = 5 + if lagmin < 19 and lagmax > 6: + while stepper < 19: + #print stepper + if stepper+1 >= lagmin and stepper <= lagmax: + #njensen Fuelcat = where(logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+1)),count,Fuelcat) + Fuelcat[logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+1))] = count + laglist.append(count) + stepper = stepper + 2 + count = count + 1 + #print stepper, count + continue + else: + stepper = 19 + count = 11 + if lagmin < 25 and lagmax >= 19: + while stepper < 25: + #print str(stepper) + "$$$" + if stepper+2 >= lagmin and stepper <= lagmax: + #Fuelcat = where(logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+2)),count,Fuelcat) + Fuelcat[logical_and(greater_equal(timelagfuel, stepper), less_equal(timelagfuel,stepper+2))] = count + laglist.append(count) + stepper = stepper + 3 + print(stepper) + count = count + 1 + #print stepper,count + continue + else: + stepper = 25 + count = 14 + if lagmax >= 25: + if stepper >= 25: + #njensen Fuelcat = where(greater_equal(timelagfuel, stepper),count,Fuelcat) + Fuelcat[timelagfuel > stepper] = count + laglist.append(count) + #print stepper, count + #NOW PERCENT GREEN CATAGORIES + #ON THE PERCENT GREEN TABLE ONLY THE 1st and LAST are different...all other are 9 + #percent increments + #njensen GREENcat = where(greeness,0,0) + GREENcat = zeros(greeness.shape, int32) + grlist = [] + if self._mingreen < 5: + #njensen GREENcat = where(less(greeness, 5), 0, GREENcat) + GREENcat[greeness < 5] = 0 + grlist.append(0) + count = 1 + ender = 8 + stepper = 5 + while count < ender: + #print count, stepper + if stepper+10 > self._mingreen and stepper <= self._maxgreen: + #njensen GREENcat = where(logical_and(greater_equal(greeness,stepper),less(greeness, stepper+10)),count,GREENcat) + GREENcat[logical_and(greater_equal(greeness,stepper),less(greeness, stepper+10))] = count + grlist.append(count) + count = count + 1 + stepper = stepper + 10 + continue + if self._maxgreen >= 75: + #print count, stepper + #njensen GREENcat = where(greater(greeness, 75), count, GREENcat) + GREENcat[greeness > 75] = count + grlist.append(count) + #CALCULATE THE FINE FUEL MOISTURE + x = laglist[0] + y = grlist[0] + xsize = laglist[-1] + ysize = grlist[-1] + #njensen finemoisture = where(greeness, 0 , 0) + finemoisture = zeros(greeness.shape, int32) + #print len(GREENcat[0]), len(Fuelcat[0]), len(finemoisture) + while x <= xsize: + row = tabledat[x] + y = grlist[0] + while y <= ysize: + value = row[y] + #print value + #njensen finemoisture = where(logical_and(equal(Fuelcat, x), equal(GREENcat, y)), value, finemoisture) + finemoisture[logical_and(equal(Fuelcat, x), equal(GREENcat, y))] = value + y = y + 1 + continue + x = x + 1 + continue + ###### Commented out the following line as we were + ###### getting abnormally high values when rh was around 50 percent. + ###### Seems when 1-hr Time Lag Fuel Moisture is cat 8 and Percent Green + ###### is between 5-14 percent, Fine Fuel moisture does not go to a 9 as tables + ###### show they should. + finemoisture = where(equal(finemoisture,0),timelagfuel,finemoisture) + + #self.createGrid("EXP","finemoist","SCALAR",finemoisture,timer) + #self.createGrid("EXP","tlagfuel","SCALAR",timelagfuel,timer) + #self.createGrid("EXP","FuelCat","SCALAR",Fuelcat,timer) + #self.createGrid("EXP","GREENcat","SCALAR",GREENcat,timer) + return finemoisture + + #TABLES SECTION + def _calcignition(self, finefuel, temp, Sky): + tabledat = self._ignitioncomptable() + #njensen + #Fuelcat = where(Sky,0,0) + #SunnyTcat = where(Sky,0,0) + #CloudyTcat = where(Sky, 0, 0) + #ignitionsun = where(Sky,0,0) + #ignitioncld = where(Sky,0,0) + Fuelcat = zeros(Sky.shape, int32) + SunnyTcat = zeros(Sky.shape, int32) + CloudyTcat = zeros(Sky.shape, int32) + ignitionsun = zeros(Sky.shape, int32) + ignitioncld = zeros(Sky.shape, int32) + Skycat = where(greater_equal(Sky,80),1,0) + temp = where(logical_and(equal(Skycat,1), greater_equal(temp, 20)),temp-20,temp) + #njensen temp = where(less(temp,10),10,temp) + temp[temp < 10] = 10 + x = len(temp) + y = len(temp[0]) + newsize = x*y #should be same size for all arrays + tdata = reshape(temp,(newsize,)) + tdata = sort(tdata) + self._skymint= tdata[0] + if Skycat.any() and self._skymint < self._mint: + self._mint = self._skymint + x = len(finefuel) + y = len(finefuel[0]) + sizer = x * y + fueldata = reshape(finefuel,(sizer,)) + fueldata = sort(fueldata) + fuelmin = fueldata[0] + fuelmax = fueldata[-1] + #First create Fine fuel catagories + #EXACT SAME METHOD AS finefuelcat + #1-6 value the same as the step + stepper = 1 + fuellist = [] + count = 0 + if fuelmin <= 6: + while stepper <= 6: + if stepper >= fuelmin and stepper <= fuelmax: + #njensen Fuelcat = where(equal(finefuel,stepper),count,Fuelcat) + Fuelcat[equal(finefuel,stepper)] = count + fuellist.append(count) + stepper = stepper + 1 + count = count + 1 + # print stepper,count + continue + else: + stepper = 7 + count = 5 + if fuelmin < 19 and fuelmax > 6: + while stepper < 19: + #print stepper + if stepper+1 >= fuelmin and stepper <= fuelmax: + #njensen Fuelcat = where(logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+1)),count,Fuelcat) + Fuelcat[logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+1))] = count + fuellist.append(count) + stepper = stepper + 2 + count = count +1 + #print stepper, count + continue + else: + stepper = 19 + count = 11 + if fuelmin < 25 and fuelmax >= 19: + while stepper < 25: + #print str(stepper) + "$$$" + if stepper+2 >= fuelmin and stepper <= fuelmax: + #njensen Fuelcat = where(logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+2)),count,Fuelcat) + Fuelcat[logical_and(greater_equal(finefuel, stepper), less_equal(finefuel,stepper+2))] = count + fuellist.append(count) + stepper = stepper + 3 + count = count + 1 + #print stepper,count + continue + else: + stepper = 25 + count = 14 + if fuelmax >= 25: + if stepper >= 25: + #Fuelcat = where(greater_equal(finefuel, stepper),count,Fuelcat) + Fuelcat[finefuel >= stepper] = count + fuellist.append(count) + stepper = 20 + tlist = [] + if self._mint < 20: + #njensen SunnyTcat = where(less(temp, stepper), 0 , SunnyTcat) + SunnyTcat[temp < stepper] = 0 + tlist.append(0) + count = 1 + ender =len(tabledat) - 1 + while count < ender: + if stepper+10 > self._mint and stepper <= self._maxt: + #njensen SunnyTcat =where(logical_and(greater_equal(temp,stepper), less(temp,stepper+10)),count, SunnyTcat) + SunnyTcat[logical_and(greater_equal(temp,stepper), less(temp,stepper+10))] = count + tlist.append(count) + count = count+1 + stepper = stepper + 10 + continue + if self._maxt >= stepper: + #njensen SunnyTcat = where(greater_equal(temp, stepper), count, SunnyTcat) + SunnyTcat[temp >= stepper] = count + tlist.append(count) + #IGNITION + x=tlist[0] + y=fuellist[0] + xsize = tlist[-1] + ysize = fuellist[-1] + while x <= xsize: + row = tabledat[x] + y = fuellist[0] + while y <= ysize: + value = row[y] + #njensen ignitionsun = where(logical_and(equal(SunnyTcat, x), equal(Fuelcat, y)), value, ignitionsun) + ignitionsun[logical_and(equal(SunnyTcat, x), equal(Fuelcat, y))] = value + y = y + 1 + continue + x = x + 1 + continue + return ignitionsun + + def _calcRFDINDEX(self, ignition, wind): + #THIS TOOL WILL CALCULATE THE RANGELAND FIRE DANGER...BASED OF THE IGNITION Component + #and WINDSPEED + #variables + x = len(ignition) + y = len(ignition[0]) + sizer = x * y + igdata = reshape(ignition,(sizer,)) + igdata = sort(igdata) + igmin = igdata[0] + igmax = igdata[-1] + #njensen RFD = where(ignition,0,0) + RFD = zeros(ignition.shape, int32) + spd = wind[0] + wind = spd * 1.15 #convert to mph + self._minwind = self._minwind * 1.15 + self._maxwind = self._maxwind * 1.15 + tabledat = self._RFDtable() + #SPLIT DATA INTO x,y "component" grids Ignition -x Wind speed y + #First ignition + count = 1 + #SPECIAL CASE For 0 + iglist = [] + #njensen IGcat = where(ignition,0,0) + IGcat = zeros(ignition.shape, int32) + if igmin <= 0: + #njensen IGcat = where(equal(ignition,0),0,IGcat) + IGcat[equal(ignition,0)] = 0 + #no need to iterate through this axis as RFD will be 0 + stepper = 1 + ender = len(tabledat) - 1 + while count <= ender: + #print str(stepper) + "-" + str(stepper+5) + ":::" + str(count) + if stepper+5 > igmin and stepper <= igmax: + #njensen IGcat = where(logical_and(greater_equal(ignition,stepper), less(ignition,stepper+5)), count, IGcat) + IGcat[logical_and(greater_equal(ignition,stepper), less(ignition,stepper+5))] = count + iglist.append(count) + count = count + 1 + stepper = stepper + 5 + continue + #WIND (y axis) + #1-8 mph are every 1 mph + #after 8 mph it is every 2 + ender = len(tabledat[0])-2 + count = 0 + wlist = [] + stepper = 0.0 #am using .5 decimal place to account for conversions leaving remainders + #ie 0-1 = > 0 <1.5 + #njensen Wndcat = where(wind,0,0) + Wndcat = zeros(wind.shape, int32) + test = 0 + while count <=7: + #print count + #print ":::::" + if count == 0 and self._minwind <= 1.4: + #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+1.5)), count, Wndcat) + Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+1.5))] = count + #print stepper, stepper + 1.5 + #print "?????????????????" + wlist.append(count) + stepper = stepper + 1.5 + test = 1 + else: + if test != 1: + test = 1 + count = 1 + stepper = 1.5 #should only happen once + if stepper+1.0 > self._minwind and stepper <= self._maxwind: + #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+1)), count, Wndcat) + Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+1))] = count + wlist.append(count) + stepper = stepper + 1 + count = count + 1 + while count <= ender: + #every 2 mph until 26.5 mph + #print count, stepper, stepper + 2 + if stepper+2.0 > self._minwind and stepper <= self._maxwind: + #njensen Wndcat = where(logical_and(greater_equal(wind, stepper), less(wind, stepper+2)), count, Wndcat) + Wndcat[logical_and(greater_equal(wind, stepper), less(wind, stepper+2))] = count + wlist.append(count) + stepper = stepper + 2 + count = count + 1 + continue + if count > ender: + #print count, stepper + if stepper <= self._maxwind: + #njensen Wndcat = where(greater_equal(wind, stepper), count, Wndcat) + Wndcat[wind >= stepper] = count + wlist.append(count) + x=iglist[0] + y=wlist[0] + xsize = iglist[-1] + ysize = wlist[-1] + while x <= xsize: + row = tabledat[x] + y = wlist[0] + while y <= ysize: + value = row[y] + #njensen RFD = where(logical_and(equal(IGcat, x), equal(Wndcat, y)), value, RFD) + RFD[logical_and(equal(IGcat, x), equal(Wndcat, y))] = value + y = y + 1 + continue + x = x + 1 + continue +## if igmin <= 0: +## RFD = where(equal(IGcat,0),0,RFD) + return RFD + + def _catagoricalRFD(self,RFD): + self._cat = self._statecriteria() + keys = list(self._cat.keys()) + #njensen newRfd = where(RFD,0,0) + newRfd = zeros(RFD.shape, int32) + for area in keys: + datalist = self._cat[str(area)] + areamask = self.encodeEditArea(area) + #njensen tempo = where(RFD,0,0) + tempo = zeros(RFD.shape, int32) + for config in datalist: + cat = config[0] + min = config[1] + max = config[2] + #njensen tempo = where(logical_and(greater_equal(RFD,min), less_equal(RFD,max)),cat,tempo) + tempo[logical_and(greater_equal(RFD,min), less_equal(RFD,max))] = cat + newRfd = where(greater_equal(areamask,1),tempo,newRfd) + continue + + return newRfd + def _determinemaxmin(self, T, RH,Wind, PERCENTGREEN): + #in order to save time this script will try to determine the maxium + #and minimum range to look for each variable + #for rh + #rhdata = asarray(RH) + #tdata = asarray(T) + #wdata = asarray(Wind[0]) + x = len(RH) + y = len(RH[0]) + newsize = x*y #should be same size for all arrays + rhdata = reshape(RH,(newsize,)) + tdata = reshape(T,(newsize,)) + winddata = reshape(Wind[0],(newsize,)) + greendata = reshape(PERCENTGREEN, (newsize,)) + rhdata = sort(rhdata) + tdata = sort(tdata) + winddata = sort(winddata) + greendata = sort(greendata) + self._minrh = rhdata[0] + self._maxrh = rhdata[-1] + self._mint= tdata[0] + self._maxt = tdata[-1] + self._minwind =winddata[0] + self._maxwind =winddata[-1] + self._mingreen =greendata[0] + self._maxgreen =greendata[-1] + return + + def _onehrtimelagtable(self): + #returns two lists 1 for sunny 1 for cloudy + #BASED ON TABLE 1 + #setup id [temprange[rhrangevalues]] + #docmunetation pending + sunnyhrlag = [[1,2,2,3,4,5,5,6,7,8,8,8,9,9,10,11,12,12,13,13,14], + [1,2,2,3,4,5,5,6,7,7,7,8,9,9,10,10,11,12,13,13,13], + [1,2,2,3,4,5,5,6,6,7,7,8,8,9, 9,10,11,12,12,12,13], + [1,1,2,2,3,4,5,5,6,7,7,8,8,8, 9,10,10,11,12,12,13], + [1,1,2,2,3,4,4,5,6,7,7,8,8,8, 9,10,10,11,12,12,13], + [1,1,2,2,3,4,4,5,6,7,7,8,8,8, 9,10,10,11,12,12,13] + ] + cloudylag = [[1,2,4,5,5,6,7,8,9,10,11,12,12,14,15,17,19,22,25,25,25], + [1,2,3,4,5,6,7,8,9, 9,11,11,12,13,14,16,18,21,24,25,25], + [1,2,3,4,5,6,6,8,8, 9,10,11,11,12,14,16,17,20,23,25,25], + [1,2,3,4,4,5,6,7,8, 9,10,10,11,12,13,15,17,20,23,25,25], + [1,2,3,3,4,5,6,7,8, 9, 9,10,10,11,13,14,16,19,22,25,25], + [1,2,2,3,4,5,6,6,8, 8, 9, 9,10,11,12,14,16,19,21,24,25] + ] + return sunnyhrlag, cloudylag + + def _finefuelmoisturetable(self): + #returns a list based of Table2 + #will comapre 1 hr fuel moisture to percent green + #will be a two dimensional list with x =1 hr fuel moisture catagory + #and y (the internal list being the the fine fuel moisture value with the + #percent green catagory represented by the y index + #0 represents a NO CHANGE value use the 1 hr time lag value + table = [[0,2,3,4,5,8,13,18,21], + [0,3,4,5,7,10,16,19,22], + [0,4,5,7,9,14,18,20,22], + [0,5,6,8,12,16,19,21,23], + [0,6,8,11,14,18,20,22,23], + [0,7,10,13,16,19,20,22,23], + [0,9,12,15,18,20,21,22,23], + [0,12,15,17,19,20,22,23,24], + [0,14,17,18,20,21,22,23,24], + [0,16,18,19,20,21,22,23,24], + [0,17,19,20,21,22,22,23,24], + [0,19,20,21,21,22,23,23,24], + [0,21,21,22,22,23,23,24,24], + [0,24,24,24,24,24,24,24,25], + [0,25,25,25,25,25,25,25,25] + ] + return table + + def _ignitioncomptable(self): + #will return one table (only difference is temp ranges for cloudy vs sunny + tble = [ + [88,75,64,54,46,39,30,21,14,9,5,2,0,0,0], + [90,77,66,56,48,41,32,22,15,9,5,2,0,0,0], + [93,80,68,58,50,42,33,23,16,10,6,3,0,0,0], + [95,82,71,61,52,44,35,25,17,11,7,3,1,0,0], + [98,85,73,63,54,46,36,26,18,12,7,4,1,0,0], + [100,87,76,65,56,48,38,28,19,13,8,5,1,0,0], + [100,90,78,68,58,50,40,29,21,14,9,5,2,0,0], + [100,93,81,70,61,53,42,31,22,15,10,6,2,0,0], + [100,97,84,73,63,55,44,32,23,16,11,7,3,0,0], + [100,100,87,76,66,57,46,34,25,18,12,8,4,0,0], + [100,100,90,79,69,60,49,36,27,19,13,9,4,1,0], + [100,100,92,80,70,61,50,37,28,20,14,9,5,1,0], + ] + + return tble + def _RFDtable(self): + #IGNITION COMPONENT(x) vs Wind spd (y) + return [ + [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], + [ 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 6, 6], + [ 8, 8, 8, 8, 9, 9, 9, 9, 9, 10, 11, 11, 12, 13, 14, 14, 15, 16], + [ 13, 14, 14, 14, 14, 14, 14, 15, 15, 16, 17, 18, 19, 21, 22, 25, 25, 25], + [ 18, 19, 19, 19, 19, 20, 20, 20, 21, 22, 24, 25, 27, 29, 30, 32, 35, 35], + [ 23, 24, 24, 24, 25, 25, 26, 26, 27, 29, 30, 32, 34, 36, 39, 42, 44, 46], + [ 29, 29, 29, 30, 30, 31, 31, 32, 33, 35, 37, 39, 41, 44, 47, 51, 54, 57], + [ 34, 34, 34, 35, 35, 36, 37, 38, 39, 41, 43, 46, 49, 52, 56, 60, 64, 68], + [ 39, 39, 40, 40, 40, 41, 42, 43, 45, 47, 50, 53, 57, 60, 64, 69, 74, 79], + [ 44, 44, 45, 45, 46, 47, 48, 49, 51, 54, 57, 60, 64, 68, 73, 78, 83, 89], + [ 49, 49, 50, 51, 51, 52, 53, 55, 57, 60, 63, 67, 71, 76, 81, 87, 92, 97], + [ 54, 55, 55, 56, 57, 58, 59, 60, 63, 66, 70, 74, 79, 84, 90, 96,100,100], + [ 59, 60, 60, 61, 62, 63, 65, 66, 68, 72, 76, 81, 86, 92, 98,100,100,100], + [ 64, 65, 66, 66, 68, 69, 70, 72, 74, 78, 83, 88, 94,100,100,100,100,100], + [ 69, 70, 71, 72, 73, 74, 76, 77, 80, 85, 90, 95,100,100,100,100,100,100], + [ 74, 75, 76, 77, 78, 80, 81, 83, 86, 91, 96,100,100,100,100,100,100,100], + [ 79, 80, 81, 82, 84, 85, 89, 89, 92, 97,100,100,100,100,100,100,100,100], + [ 85, 85, 86, 87, 89, 91, 92, 95, 98,100,100,100,100,100,100,100,100,100], + [ 90, 91, 92, 93, 94, 96, 98,100,100,100,100,100,100,100,100,100,100,100], + [ 95, 96, 97, 98,100,100,100,100,100,100,100,100,100,100,100,100,100,100], + [100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100,100] + + ] + def _statecriteria(self): + ####CONFIGURATION FOR EACH STATE + ###DICTIONARY FORMAT + ## DICT = {"STATE", [(catnumber,low,high), (catnumber,low,high)]} + statedict = {"Kansas" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], + "Colorado" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], + "Nebraska" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], + "Iowa" : [(0,0,30), (1,31,50), (2,51,70), (3,71,94), (4,95,100)], + } + return statedict + + + def _preciptthresh(self): + #POP WHERE RFD WILL BE SET TO LOW + #SET TO 101 if you want this disabled + return 65 + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Curing_from_Green.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Curing_from_Green.py index 89c3e13e69..7fa12678fc 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Curing_from_Green.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Curing_from_Green.py @@ -1,131 +1,131 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Curing_from_Green.py -# -# Author: dtomalak -# ---------------------------------------------------------------------------- - - -ToolType = "numeric" -WeatherElementEdited = "Curing" -HideTool = 0 - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -#ScreenList = ["T","Td"] -#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] - -# If desired, Set up variables to be solicited from the user: -# VariableList = [ -# ("Variable name1" , defaultValue1, "numeric"), -# ("Variable name2" , "default value2", "alphaNumeric"), -# ("Variable name3" , ["default value1", "default value2"], "check", -# ["value1", "value2", "value3"]), -# ("Variable name4" , "default value4", "radio", -# ["value1", "value2", "value3"]), -# ("Variable name5" , defaultValue, "scale", -# [minValue, maxValue], resolution), -# ("Variable name6" , "", "model"), -# ("Variable name7" , "", "D2D_model"), -# ("Label contents" , "", "label"), -# ("", dialogHeight, "scrollbar"), -# ] - -# Set up Class -import SmartScript -import types, string, imp, cPickle, time, sys -from math import * -from numpy import * -import re -import Exceptions -import UnitConvertor -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # %comment - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, Curing, varDict): - "THIS TOOL WILL POPULATE A % GREEN GRID FOR THE RANGELAND FIRE DANGER INDEX" - ####CONFIGURABLE SECTION - ###PERCENT GREEN DATA IS NEEDS TO BE IN FIPS CODE VALUE FORMAT!!! - #STATE DICTIONARY - #DICTIONARY OF EACH DESIRED STATE AND THE FILENAME OF % GREEN FILE - self._statesdict = {"NE" : "ne.green.txt", - "IA" : "ia.green.txt", - } - - #DATA DIRECTORY - name of directory where data is stored - #ex "/home/local/testdat (leave off last /) - datadir = "/data/local/PercentGreen/" - #SET VARIABLES TO "NONE" - ####END CONFIGURATIONS!!!!!!!!!!!!!! - ############################################################ - ############################################################ - ############## MAKE NO CHANGES ########################### - ############################################################ - # - #COLLECT FIPS AREAS IN DATABASE - alleditareas = self.editAreaList() - FIPSonly = [] - statekeys = self._statesdict.keys() - for area in alleditareas: - #TEST FOR FIPS CODES - if len(area) != 6: - continue - else: - test = area[0:2] - test2 = area[2:] - if test in statekeys: - #do something - if string.find(test2, "C") != -1: - #AREA HAS PASSED ALL TESTS>>>IS LIKELY A FIPS CODE - FIPSonly.append(area) - continue - else: - continue - else: - continue - #FOREACH STATE GRAB THE DATA AND PUT IT IN STRING FORMAT - #WILL RETURN ONE LIST FOR ALL STATES - datadict = {} - for state in statekeys: - stfile = self._statesdict[state] - try: - getdat = open(datadir + "/" + stfile, "r") - data = getdat.readlines() - getdat.close() - for line in data: - line = string.strip(line) #CLEAN OUT EXTRA SPACES if there is any - val = string.split(line, " ") - if len(val) > 2: - #PREVENT NON DATA POINTS FROM GETTTING INTO DATA DICT - continue - if val[0] in FIPSonly: - datadict[str(val[0])] = str(val[1]) - else: - continue - except: - continue - #DATA NOW IN DICTIONARY FORM...STEP THROUGH EACH KEY AND ASSIGN A DATA VALUE - #USING WHERE STATEMENTS - newgreen = zeros(Curing.shape, int32) - - #WILL DEFAULT TO ZERO IF NO NEW DATA IS FOUND - for zone in datadict.keys(): - area = zone - value = int(datadict[zone]) - areamask = self.encodeEditArea(area) - newgreen[not_equal(areamask,0)] = value - - Curing = 100 - newgreen - return Curing +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Curing_from_Green.py +# +# Author: dtomalak +# ---------------------------------------------------------------------------- + + +ToolType = "numeric" +WeatherElementEdited = "Curing" +HideTool = 0 + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +#ScreenList = ["T","Td"] +#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] + +# If desired, Set up variables to be solicited from the user: +# VariableList = [ +# ("Variable name1" , defaultValue1, "numeric"), +# ("Variable name2" , "default value2", "alphaNumeric"), +# ("Variable name3" , ["default value1", "default value2"], "check", +# ["value1", "value2", "value3"]), +# ("Variable name4" , "default value4", "radio", +# ["value1", "value2", "value3"]), +# ("Variable name5" , defaultValue, "scale", +# [minValue, maxValue], resolution), +# ("Variable name6" , "", "model"), +# ("Variable name7" , "", "D2D_model"), +# ("Label contents" , "", "label"), +# ("", dialogHeight, "scrollbar"), +# ] + +# Set up Class +import SmartScript +import types, string, imp, pickle, time, sys +from math import * +from numpy import * +import re +import Exceptions +import UnitConvertor +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # %comment + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, Curing, varDict): + "THIS TOOL WILL POPULATE A % GREEN GRID FOR THE RANGELAND FIRE DANGER INDEX" + ####CONFIGURABLE SECTION + ###PERCENT GREEN DATA IS NEEDS TO BE IN FIPS CODE VALUE FORMAT!!! + #STATE DICTIONARY + #DICTIONARY OF EACH DESIRED STATE AND THE FILENAME OF % GREEN FILE + self._statesdict = {"NE" : "ne.green.txt", + "IA" : "ia.green.txt", + } + + #DATA DIRECTORY - name of directory where data is stored + #ex "/home/local/testdat (leave off last /) + datadir = "/data/local/PercentGreen/" + #SET VARIABLES TO "NONE" + ####END CONFIGURATIONS!!!!!!!!!!!!!! + ############################################################ + ############################################################ + ############## MAKE NO CHANGES ########################### + ############################################################ + # + #COLLECT FIPS AREAS IN DATABASE + alleditareas = self.editAreaList() + FIPSonly = [] + statekeys = list(self._statesdict.keys()) + for area in alleditareas: + #TEST FOR FIPS CODES + if len(area) != 6: + continue + else: + test = area[0:2] + test2 = area[2:] + if test in statekeys: + #do something + if string.find(test2, "C") != -1: + #AREA HAS PASSED ALL TESTS>>>IS LIKELY A FIPS CODE + FIPSonly.append(area) + continue + else: + continue + else: + continue + #FOREACH STATE GRAB THE DATA AND PUT IT IN STRING FORMAT + #WILL RETURN ONE LIST FOR ALL STATES + datadict = {} + for state in statekeys: + stfile = self._statesdict[state] + try: + getdat = open(datadir + "/" + stfile, "r") + data = getdat.readlines() + getdat.close() + for line in data: + line = string.strip(line) #CLEAN OUT EXTRA SPACES if there is any + val = string.split(line, " ") + if len(val) > 2: + #PREVENT NON DATA POINTS FROM GETTTING INTO DATA DICT + continue + if val[0] in FIPSonly: + datadict[str(val[0])] = str(val[1]) + else: + continue + except: + continue + #DATA NOW IN DICTIONARY FORM...STEP THROUGH EACH KEY AND ASSIGN A DATA VALUE + #USING WHERE STATEMENTS + newgreen = zeros(Curing.shape, int32) + + #WILL DEFAULT TO ZERO IF NO NEW DATA IS FOUND + for zone in list(datadict.keys()): + area = zone + value = int(datadict[zone]) + areamask = self.encodeEditArea(area) + newgreen[not_equal(areamask,0)] = value + + Curing = 100 - newgreen + return Curing diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Enhanced_WxTool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Enhanced_WxTool.py index 1e49e8cbd2..172e4f9853 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Enhanced_WxTool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Enhanced_WxTool.py @@ -1,517 +1,517 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ----------------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# BUF_WxTool - Version 2.1 +# +# Author: John Rozbicki NWS BUF +# Date Last Modified: 08/25/08 +# +# History of Changes: +# +# Version 2.1 (08/25/08) - Made the following changes: +# +# (1) Added code so that the tool just produces a single pop-up error message if +# thunderstorms (T) or severe thunderstorms (T+) are selected, along with +# multiple invalid thunderstorm attributes. Previously, the tool would generate +# an error message for each invalid attribute. +# +# (2) Added new checks to allow the tool to run to completion if the user selects: +# +# (a) Areal coverage qualifiers; and +# (b) A weather type of R, S, ZR, IP, L, or ZL; and +# (c) A valid alternate precipitation coverage. +# +# Previously, the tool would just ignore any alternate precipitation coverages, +# and return a pop-up error message if just conditions (a) and (b) were met. Now, +# the alternate precipitation coverage is also checked - an error message is returned +# only if conditions (a) and (b) are met and a valid alternate coverage is NOT +# selected. +# +# (3) Changed the error message returned when the user selects a coverage of "Iso", +# "Sct" or "Num" with a precip type of R, S, ZR, IP, L, or ZL. Previously, this +# message stated that all areal coverage qualifiers were not valid for these weather +# types; this really isn't true. The change makes the output less confusing for the +# user. +# +# (4) Made some minor miscellaneous changes to the tool documentation and other +# pop-up error messages. +# +# Version 2.0 (03/27/06) - Made the following changes: +# +# (1) Added the ability to override the default probability/coverage +# obtained from the corresponding PoP grid if desired. This can be done +# separately for each individual weather type. This allows the easy creation +# of weather types such as "Lkly RW:Chc SW". This approach also allows for +# the use of other weather types (F, L, and ZL) which aren't necessarily PoP- +# dependent. +# +# (2) In conjunction with (1), added 3 new weather types (F, L, and ZL). Also +# added coverage terms ("Patchy", "Areas") which are specific to these elements. +# +# (3) Added very light (--) to the list of intensities. This makes in possible +# to assign weather types such as RW-- (sprinkles) and SW-- (flurries). +# +# (4) In conjunction with the 3 changes listed above, added the following internal +# checks to ensure that the tool does not create invalid weather types: +# +# (a) A check for the use of "Patchy" and "Areas" coverages with weather +# types other than L, ZL, and F. (prevents invalid types such +# as "Patchy R"). +# +# (b) A check for the use of Areal Coverage qualifiers other than "Patchy", +# "Areas", and "Wide" with L, ZL, and F (prevents invalid types such as +# "Iso ZL"). +# +# (c) A check for the use of very light, light, or moderate intensities with +# F (none of which are valid). +# +# If the conditions in (a) or (b) are met, the tool will display a +# message indicating that the proposed weather type is invalid, +# and exits without modifying the Wx grid. +# +# If the condition in (c) is met, the tool will display a message indicating +# that the proposed intensity cannot be assigned to fog. It will also +# automatically change the intensity to without any further action +# on the part of the forecaster. +# +# (5) Added the ability to select alternate prob/cov terms (i.e., other than +# "Def" or "Wide") if categorical (>=74.5%) PoPs are used. In such cases, +# the following additional options are now available: "Ocnl", "Frq", "Brf", +# "Pds", and "Inter". When used, this setting will apply to all categorical +# precip coverages that are derived from the PoP grid. +# +# (6) Added the ability to specify an alternate probability/coverage +# for T if desired. This setting will override the default probability/ +# coverage obtained from the corresponding PoP grid, and makes possible +# to create precip types such as "Def RW:Chc T". +# +# (7) Added an Tornadoes to the list of thunderstorm attributes. A check was also +# added to prevent the use of the Tornadoes attribute with non-severe +# thunderstorms. +# +# (8) Modified the tool to prevent the use of the SmA (small hail) and +# GW (gusty winds) thunderstorm attributes with severe thunderstorms (T+). +# +# (9) Modified the tool to now display a pop-up message if the user tries to +# select invalid thunderstorm attributes. If this occurs, the tool will still +# automatically ignore such attributes and run to completion as before. +# +# (10) Made the minimum PoP threshold for SChc weather site-configurable. This +# allows sites to tailor this setting to meet their individual needs. The +# default setting remains 14.5% - the Eastern Region standard. +# +# IMPORTANT: The above enhancements resulted in numerous changes to the tool GUI and +# the tool code. If you have previously customized the tool for your site, +# it is strongly recommended that you use the following procedure when upgrading: +# +# (1) Save off a copy of the existing BUF_WxTool under a different name. +# (2) Install Version 2.0 of the BUF_WxTool. +# (3) After installing Version 2.0, merge any local changes back into the tool +# (if needed). # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ----------------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# BUF_WxTool - Version 2.1 -# -# Author: John Rozbicki NWS BUF -# Date Last Modified: 08/25/08 -# -# History of Changes: -# -# Version 2.1 (08/25/08) - Made the following changes: -# -# (1) Added code so that the tool just produces a single pop-up error message if -# thunderstorms (T) or severe thunderstorms (T+) are selected, along with -# multiple invalid thunderstorm attributes. Previously, the tool would generate -# an error message for each invalid attribute. -# -# (2) Added new checks to allow the tool to run to completion if the user selects: -# -# (a) Areal coverage qualifiers; and -# (b) A weather type of R, S, ZR, IP, L, or ZL; and -# (c) A valid alternate precipitation coverage. -# -# Previously, the tool would just ignore any alternate precipitation coverages, -# and return a pop-up error message if just conditions (a) and (b) were met. Now, -# the alternate precipitation coverage is also checked - an error message is returned -# only if conditions (a) and (b) are met and a valid alternate coverage is NOT -# selected. -# -# (3) Changed the error message returned when the user selects a coverage of "Iso", -# "Sct" or "Num" with a precip type of R, S, ZR, IP, L, or ZL. Previously, this -# message stated that all areal coverage qualifiers were not valid for these weather -# types; this really isn't true. The change makes the output less confusing for the -# user. -# -# (4) Made some minor miscellaneous changes to the tool documentation and other -# pop-up error messages. -# -# Version 2.0 (03/27/06) - Made the following changes: -# -# (1) Added the ability to override the default probability/coverage -# obtained from the corresponding PoP grid if desired. This can be done -# separately for each individual weather type. This allows the easy creation -# of weather types such as "Lkly RW:Chc SW". This approach also allows for -# the use of other weather types (F, L, and ZL) which aren't necessarily PoP- -# dependent. -# -# (2) In conjunction with (1), added 3 new weather types (F, L, and ZL). Also -# added coverage terms ("Patchy", "Areas") which are specific to these elements. -# -# (3) Added very light (--) to the list of intensities. This makes in possible -# to assign weather types such as RW-- (sprinkles) and SW-- (flurries). -# -# (4) In conjunction with the 3 changes listed above, added the following internal -# checks to ensure that the tool does not create invalid weather types: -# -# (a) A check for the use of "Patchy" and "Areas" coverages with weather -# types other than L, ZL, and F. (prevents invalid types such -# as "Patchy R"). -# -# (b) A check for the use of Areal Coverage qualifiers other than "Patchy", -# "Areas", and "Wide" with L, ZL, and F (prevents invalid types such as -# "Iso ZL"). -# -# (c) A check for the use of very light, light, or moderate intensities with -# F (none of which are valid). -# -# If the conditions in (a) or (b) are met, the tool will display a -# message indicating that the proposed weather type is invalid, -# and exits without modifying the Wx grid. -# -# If the condition in (c) is met, the tool will display a message indicating -# that the proposed intensity cannot be assigned to fog. It will also -# automatically change the intensity to without any further action -# on the part of the forecaster. -# -# (5) Added the ability to select alternate prob/cov terms (i.e., other than -# "Def" or "Wide") if categorical (>=74.5%) PoPs are used. In such cases, -# the following additional options are now available: "Ocnl", "Frq", "Brf", -# "Pds", and "Inter". When used, this setting will apply to all categorical -# precip coverages that are derived from the PoP grid. -# -# (6) Added the ability to specify an alternate probability/coverage -# for T if desired. This setting will override the default probability/ -# coverage obtained from the corresponding PoP grid, and makes possible -# to create precip types such as "Def RW:Chc T". -# -# (7) Added an Tornadoes to the list of thunderstorm attributes. A check was also -# added to prevent the use of the Tornadoes attribute with non-severe -# thunderstorms. -# -# (8) Modified the tool to prevent the use of the SmA (small hail) and -# GW (gusty winds) thunderstorm attributes with severe thunderstorms (T+). -# -# (9) Modified the tool to now display a pop-up message if the user tries to -# select invalid thunderstorm attributes. If this occurs, the tool will still -# automatically ignore such attributes and run to completion as before. -# -# (10) Made the minimum PoP threshold for SChc weather site-configurable. This -# allows sites to tailor this setting to meet their individual needs. The -# default setting remains 14.5% - the Eastern Region standard. -# -# IMPORTANT: The above enhancements resulted in numerous changes to the tool GUI and -# the tool code. If you have previously customized the tool for your site, -# it is strongly recommended that you use the following procedure when upgrading: -# -# (1) Save off a copy of the existing BUF_WxTool under a different name. -# (2) Install Version 2.0 of the BUF_WxTool. -# (3) After installing Version 2.0, merge any local changes back into the tool -# (if needed). -# -# Version 1.2 (02/23/05) - Updated for IFPS 16.2. Hail (A) is no longer a separate -# weather type in GFE, so removed separate handling of -# hail attributes from other thunderstorm attributes. This -# resulted in a simplification of the code, and the removal -# of all references to "hailstring". Also, fixed a potential -# bug which could result in "DmgW' not being assigned even if -# it was selected by the forecaster. -# -# Version 1.1 (03/21/03) - Changed the PoP thresholds to match the precision of the -# gridded data. For example, the threshold for Chc/Sct was -# 25% before the change, with the change it's now 24.5%. -# -# The reason for the change: in Version 1.0, the tool would -# assign an incorrect Wx value for PoP values just below the -# thresholds; for example, "SChc" or "Iso" Wx would be -# assigned for PoPs in the 24.5%-24.9% range. With the change, -# the tool assigns the correct Wx for such PoP values - i.e. -# "Chc" or "Sct" Wx. -# ------------------------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# ----- BEGIN SITE CONFIGURATION SECTION ----- -# -# Here you can configure the minimum PoP threshold for SChc wx. -# The default is the ER-standard setting of 14.5%. -# -SChc_min_PoP_threshold = 14.5 -# -# ------ END SITE CONFIGURATION SECTION ------ - -ToolType = "numeric" -WeatherElementEdited = "Wx" -from numpy import * - -ScreenList = [""] - -##Set up variables to be solicited from the user: - -VariableList = [ - ("Qualifier\nType:", "Prob", "radio", - ["Prob", "Cov"]), - ("Alter\nTerms\nCat\nPoPs:", "None", "radio", - ["None", "Ocnl", "Frq", "Brf", "Pds", "Inter"]), - ("1st\nType:", "RW", "radio", - ["RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), - ("1st\nInten:", "-", "radio", - ["--", "-", "m", "+"]), - ("1st Type\nAlternate\nProb/Cov:", "None", "radio", - ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), - ("2nd\nType:", "None", "radio", - ["None", "RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), - ("2nd\nInten:", "-", "radio", - ["--", "-", "m", "+"]), - ("2nd Type\nAlternate\nProb/Cov:", "None", "radio", - ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), - ("3rd\nType:", "None", "radio", - ["None", "RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), - ("3rd\nInten:", "-", "radio", - ["--", "-", "m", "+"]), - ("3rd Type\nAlternate\nProb/Cov:", "None", "radio", - ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), - ("Thunder?", "No", "radio", - ["No", "Yes (T)", "Yes (T+)"]), - ("Thunder\nAlternate\nProb/Cov:", "None", "radio", - ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num"]), - ("Tstm\nAttributes?", ["None"], "check", - ["Small Hail", "Heavy Rain", "Gusty Winds", "Frequent Lightning", "Large Hail", \ - "Damaging Winds", "Tornadoes"]) - ] - -# Set up Class -import SmartScript -# Hack to make Tkinter work: -import sys -sys.argv = [''] -import Tkinter -import threading -## For available commands, see SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # Called for each Point for each Grid to be edited - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, PoP, Wx, varDict): - "Sets Wx Coverage within Active Edit Area according to PoP values" - - # Set up variables from the varDict - - qualifiertype = varDict["Qualifier\nType:"] - alt_catpop_probcov = varDict["Alter\nTerms\nCat\nPoPs:"] - type1 = varDict["1st\nType:"] - intensity1 = varDict["1st\nInten:"] - alt_probcov1 = varDict["1st Type\nAlternate\nProb/Cov:"] - type2 = varDict["2nd\nType:"] - intensity2 = varDict["2nd\nInten:"] - alt_probcov2 = varDict["2nd Type\nAlternate\nProb/Cov:"] - type3 = varDict["3rd\nType:"] - intensity3 = varDict["3rd\nInten:"] - alt_probcov3 = varDict["3rd Type\nAlternate\nProb/Cov:"] - thunder = varDict["Thunder?"] - alt_thunder_probcov = varDict["Thunder\nAlternate\nProb/Cov:"] - attributes = varDict["Tstm\nAttributes?"] - - type = [type1, type2, type3] - intensity = [intensity1, intensity2, intensity3] - # Set up alt_probcov list (lists alternate probabilities/coverages for each Wx Type) - alt_probcov = [alt_probcov1, alt_probcov2, alt_probcov3] - - # Block to check for invalid Wx types if the user selects areal coverage qualifiers, and a precip type - # of R, S, ZR, IP, L, or ZL. In such cases, return an error message - unless a valid areal coverage - # qualifier is specified as an alternate coverage. The check for valid alternate coverages was added - # with version 2.1; previously, the tool would just ingore valid alternate coverage terms and return - # an error message regardless. - if qualifiertype == "Cov": - for x in range(len(type)): - if ((type[x] == "R") or (type[x] == "S") or (type[x] == "ZR") or (type[x] == "IP")): - if ((alt_probcov[x] == "None") or (alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") \ - or (alt_probcov[x] == "Num") or (alt_probcov[x] == "Patchy") or (alt_probcov[x] == "Areas")): - print type[x], alt_probcov[x] - self.errorNotice('Invalid Wx Type', "Areal coverage qualifiers other than 'Wide' may not be used with a "\ - "precip type of R, S, ZR, or IP.\nPlease re-run the tool using different qualifiers.") - return Wx - if ((type[x] == "L") or (type[x] == "ZL")): - if ((alt_probcov[x] == "None") or (alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") or (alt_probcov[x] == "Num")): - print type[x], alt_probcov[x] - self.errorNotice('Invalid Wx Type', "Areal coverage qualifiers other than 'Patchy', 'Areas', or 'Wide' may not be used with "\ - "a precip type of L or ZL.\nPlease re-run the tool using different qualifiers.") - return Wx - - # If block to check for invalid Wx types if the user selects alternate prob/cov terms... - for x in range(len(alt_probcov)): - # Add check in case Fog is selected with no alternate qualifier - if (type[x] == "F") and (alt_probcov[x] == "None"): - print type[x] - self.errorNotice('Invalid Wx Type', "You must use a qualifier of 'Patchy', 'Areas', or 'Wide' with F (Fog).\n"\ - "Please re-run the tool using an allowed qualifier.") - return Wx - # Added additional check to the following line to ensure that type[x] is also None. This prevents - # the error message(s) below from appearing if a second/third pcpn type is set to "None", yet the - # second/third alt_probcov is some value other than "None". - if (alt_probcov[x] != "None") and (type[x] != "None"): - if ((type[x] == "R") or (type[x] == "S") or (type[x] == "ZR") or (type[x] == "IP") or (type[x] == "L") or (type[x] == "ZL")) and \ - ((alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") or (alt_probcov[x] == "Num")): - print type[x] - self.errorNotice('Invalid Wx Type', "The 'Iso', 'Sct', or 'Num' qualifiers may not be used with a precip type of "\ - "R, S, ZR, IP, L, or ZL.\nPlease re-run the tool using a different qualifier.") - return Wx - if ((type[x] != "L") and (type[x] != "ZL") and (type[x] != "F")) and \ - ((alt_probcov[x] == "Patchy") or (alt_probcov[x] == "Areas")): - print type[x] - self.errorNotice('Invalid Wx Type', "'Patchy' or 'Areas' qualifiers may not be used with a precip type of RW, SW, R, S, ZR, or IP.\n"\ - "Please re-run the tool using a different qualifier.") - return Wx - # Add check in case Fog is selected, and the qualifier is set to something other than 'Patchy', 'Areas', - # or "Wide'. - if (type[x] == "F") and \ - ((alt_probcov[x] != "Patchy") and (alt_probcov[x] != "Areas") and (alt_probcov[x] != "Wide")): - print type[x] - self.errorNotice('Invalid Wx Type', "Qualifiers other than 'Patchy', 'Areas', or 'Wide' may not be used with F (Fog).\n"\ - "Please re-run the tool using an allowed qualifier.") - return Wx - - # Create Attributes String for later use - - if thunder == "No": - attstring = "" - - if thunder == "Yes (T)": - # If (non-severe) thunder is selected, do not allow LgA, DmgW, or TOR attributes to be used - if attributes == []: - attstring = "" - else: - attstring = "" - invalid_attributes = 0 - - for x in range(len(attributes)): - if attributes[x] == "Small Hail": - attstring += "SmA," - if attributes[x] == "Heavy Rain": - attstring += "HvyRn," - if attributes[x] == "Gusty Winds": - attstring += "GW," - if attributes[x] == "Frequent Lightning": - attstring += "FL" - - # Check for invalid Tstm attributes (cannot use LgA, DmgW, or TOR with T); - # if any are selected - display a pop-up message indicating that these are not valid - # (but let tool run, ignoring the invalid attributes) - if (attributes[x] == "Large Hail") or (attributes[x] == "Damaging Winds") or \ - (attributes[x] == "Tornadoes"): - invalid_attributes = 1 - pass - - if invalid_attributes == 1: - self.errorNotice('Invalid Thunderstorm Attribute(s):', "The 'Large Hail', 'Damaging Winds', and 'Tornadoes' attributes may not be used with "\ - "Non-severe thunderstorms (T).\nThese attributes will not be included in the returned grid.") - #print attstring - - if thunder == "Yes (T+)": - # If severe thunder is selected, do not allow SmA or GW attributes to be used. - # Also allow tornadoes to be used (both changes for Version 2.0) - if attributes == []: - attstring = "" - else: - attstring = "" - invalid_attributes = 0 - - for x in range(len(attributes)): - if attributes[x] == "Heavy Rain": - attstring += "HvyRn," - if attributes[x] == "Frequent Lightning": - attstring += "FL," - if attributes[x] == "Large Hail": - attstring += "LgA," - if attributes[x] == "Damaging Winds": - attstring += "DmgW," - if attributes[x] == "Tornadoes": - attstring += "TOR," - # Check for invalid Tstm attributes (cannot use SmA or GW with T+); - # if any are selected - display a pop-up message indicating that these are not valid - # (but let tool run, ignoring the invalid attributes) - if (attributes[x] == "Small Hail") or (attributes[x] == "Gusty Winds"): - invalid_attributes = 1 - pass - - if invalid_attributes == 1: - self.errorNotice('Invalid Thunderstorm Attribute(s):', "The 'Small Hail' and 'Gusty Winds' attributes may not be used with "\ - "Severe thunderstorms (T+).\nThese attributes will not be included in the returned grid.") - #print attstring - - # Initialize the Wx values and keys - wxValues = empty_like(PoP, int8) - keys = [] - - if qualifiertype == "Prob": - wxValues[less_equal(PoP, 100)] = self.getByteValue("Def", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 74.5)] = self.getByteValue("Lkly", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 54.5)] = self.getByteValue("Chc", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 24.5)] = self.getByteValue("SChc", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, SChc_min_PoP_threshold)] = self.getByteValue("", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - - if qualifiertype == "Cov": - wxValues[less_equal(PoP, 100)] = self.getByteValue("Wide", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 74.5)] = self.getByteValue("Num", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 54.5)] = self.getByteValue("Sct", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, 24.5)] = self.getByteValue("Iso", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - wxValues[less(PoP, SChc_min_PoP_threshold)] = self.getByteValue("", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) - - print "keys = ", keys - return (wxValues, keys) - - def getByteValue(self, prevail_cov, type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov): - # Adapted from Convective Smart Tool (billingsley) - - # Use alt_catpop_probcov if set: - if alt_catpop_probcov != "None": - if (prevail_cov == "Def") or (prevail_cov == "Wide"): - prevail_cov = alt_catpop_probcov - - # Use alt_thunder_probcov if set: - if alt_thunder_probcov == "None": - thunder_cov = prevail_cov - else: - thunder_cov = alt_thunder_probcov - - # Set type0cov, type1cov, type2cov - if alt_probcov[0] == "None": - type0cov = prevail_cov - else: - type0cov = alt_probcov[0] - if alt_probcov[1] == "None": - type1cov = prevail_cov - else: - type1cov = alt_probcov[1] - if alt_probcov[2] == "None": - type2cov = prevail_cov - else: - type2cov = alt_probcov[2] - - # Check for intensities with Fog - if intensity is very light (--), light (-) or moderate (m), - # set to ...but also display a pop-up message letting the user know these are not allowed. - for x in range(len(type)): - if type[x] == "F": - if (intensity[x] == "--") or (intensity[x] == "-") or (intensity[x] == "m"): - intensity[x] = "" - self.errorNotice('Invalid Fog intensity:', "The '--', '-', and 'm' intensities may not be used with "\ - "a weather type of Fog (F).\nThe intensity has been set to in the returned grid.") - - - # Now create the uglyString... - if type[1] == "None" and type[2] == "None": - if prevail_cov == "": - uglyString = "" - else: - if thunder == "Yes (T+)": - uglyString = thunder_cov + ":T:+::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::" - elif thunder == "Yes (T)": - uglyString = thunder_cov + ":T:::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::" - else: - uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::" - - print uglyString - if type[1] != "None" and type[2] == "None": - if prevail_cov == "": - uglyString = "" - else: - if thunder == "Yes (T+)": - uglyString = thunder_cov + ":T:+::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::" - elif thunder == "Yes (T)": - uglyString = thunder_cov + ":T:::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::" - else: - uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::" - print uglyString - if type[1] == "None" and type[2] != "None": - if prevail_cov == "": - uglyString = "" - else: - if thunder == "Yes (T+)": - uglyString = thunder_cov + ":T:+::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - elif thunder == "Yes (T)": - uglyString = thunder_cov + ":T:::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - else: - uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - print uglyString - if type[1] != "None" and type[2] != "None": - if prevail_cov == "": - uglyString = "" - else: - if thunder == "Yes (T+)": - uglyString = thunder_cov + ":T:+::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - elif thunder == "Yes (T)": - uglyString = thunder_cov + ":T:::" + attstring + "^" \ - + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - else: - uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ - + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ - + type2cov + ":" + type[2] + ":" + intensity[2] + "::" - print uglyString - - if "" == uglyString: - uglyString = "::::" - return self.getIndex(uglyString, keys) - - def errorNotice(self, title=None, text=None, **kwargs): - "Show the user a multi-line error message." - # Title and kwargs are not used - # They may be used if this method goes back to a Tk implementation - if text is not None: - self.statusBarMsg(text, "A") - -# Tk implementation of errorNotice -# currently can't use due to short-lived nature of Jep scripts -# def errorNotice(self, title=None, text=None, **kwargs): -# "Show the user a multi-line error message." -# if msg is not None: -# Exit = Tkinter.Tk() -# Exit.title(title) -# kwargs['command'] = Exit.destroy -# kwargs['text'] = text +# Version 1.2 (02/23/05) - Updated for IFPS 16.2. Hail (A) is no longer a separate +# weather type in GFE, so removed separate handling of +# hail attributes from other thunderstorm attributes. This +# resulted in a simplification of the code, and the removal +# of all references to "hailstring". Also, fixed a potential +# bug which could result in "DmgW' not being assigned even if +# it was selected by the forecaster. +# +# Version 1.1 (03/21/03) - Changed the PoP thresholds to match the precision of the +# gridded data. For example, the threshold for Chc/Sct was +# 25% before the change, with the change it's now 24.5%. +# +# The reason for the change: in Version 1.0, the tool would +# assign an incorrect Wx value for PoP values just below the +# thresholds; for example, "SChc" or "Iso" Wx would be +# assigned for PoPs in the 24.5%-24.9% range. With the change, +# the tool assigns the correct Wx for such PoP values - i.e. +# "Chc" or "Sct" Wx. +# ------------------------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# ----- BEGIN SITE CONFIGURATION SECTION ----- +# +# Here you can configure the minimum PoP threshold for SChc wx. +# The default is the ER-standard setting of 14.5%. +# +SChc_min_PoP_threshold = 14.5 +# +# ------ END SITE CONFIGURATION SECTION ------ + +ToolType = "numeric" +WeatherElementEdited = "Wx" +from numpy import * + +ScreenList = [""] + +##Set up variables to be solicited from the user: + +VariableList = [ + ("Qualifier\nType:", "Prob", "radio", + ["Prob", "Cov"]), + ("Alter\nTerms\nCat\nPoPs:", "None", "radio", + ["None", "Ocnl", "Frq", "Brf", "Pds", "Inter"]), + ("1st\nType:", "RW", "radio", + ["RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), + ("1st\nInten:", "-", "radio", + ["--", "-", "m", "+"]), + ("1st Type\nAlternate\nProb/Cov:", "None", "radio", + ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), + ("2nd\nType:", "None", "radio", + ["None", "RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), + ("2nd\nInten:", "-", "radio", + ["--", "-", "m", "+"]), + ("2nd Type\nAlternate\nProb/Cov:", "None", "radio", + ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), + ("3rd\nType:", "None", "radio", + ["None", "RW", "SW", "R", "S", "ZR", "IP", "L", "ZL", "F"]), + ("3rd\nInten:", "-", "radio", + ["--", "-", "m", "+"]), + ("3rd Type\nAlternate\nProb/Cov:", "None", "radio", + ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num", "Patchy", "Areas", "Wide"]), + ("Thunder?", "No", "radio", + ["No", "Yes (T)", "Yes (T+)"]), + ("Thunder\nAlternate\nProb/Cov:", "None", "radio", + ["None", "SChc", "Iso", "Chc", "Sct", "Lkly", "Num"]), + ("Tstm\nAttributes?", ["None"], "check", + ["Small Hail", "Heavy Rain", "Gusty Winds", "Frequent Lightning", "Large Hail", \ + "Damaging Winds", "Tornadoes"]) + ] + +# Set up Class +import SmartScript +# Hack to make Tkinter work: +import sys +sys.argv = [''] +import tkinter +import threading +## For available commands, see SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # Called for each Point for each Grid to be edited + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, PoP, Wx, varDict): + "Sets Wx Coverage within Active Edit Area according to PoP values" + + # Set up variables from the varDict + + qualifiertype = varDict["Qualifier\nType:"] + alt_catpop_probcov = varDict["Alter\nTerms\nCat\nPoPs:"] + type1 = varDict["1st\nType:"] + intensity1 = varDict["1st\nInten:"] + alt_probcov1 = varDict["1st Type\nAlternate\nProb/Cov:"] + type2 = varDict["2nd\nType:"] + intensity2 = varDict["2nd\nInten:"] + alt_probcov2 = varDict["2nd Type\nAlternate\nProb/Cov:"] + type3 = varDict["3rd\nType:"] + intensity3 = varDict["3rd\nInten:"] + alt_probcov3 = varDict["3rd Type\nAlternate\nProb/Cov:"] + thunder = varDict["Thunder?"] + alt_thunder_probcov = varDict["Thunder\nAlternate\nProb/Cov:"] + attributes = varDict["Tstm\nAttributes?"] + + type = [type1, type2, type3] + intensity = [intensity1, intensity2, intensity3] + # Set up alt_probcov list (lists alternate probabilities/coverages for each Wx Type) + alt_probcov = [alt_probcov1, alt_probcov2, alt_probcov3] + + # Block to check for invalid Wx types if the user selects areal coverage qualifiers, and a precip type + # of R, S, ZR, IP, L, or ZL. In such cases, return an error message - unless a valid areal coverage + # qualifier is specified as an alternate coverage. The check for valid alternate coverages was added + # with version 2.1; previously, the tool would just ingore valid alternate coverage terms and return + # an error message regardless. + if qualifiertype == "Cov": + for x in range(len(type)): + if ((type[x] == "R") or (type[x] == "S") or (type[x] == "ZR") or (type[x] == "IP")): + if ((alt_probcov[x] == "None") or (alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") \ + or (alt_probcov[x] == "Num") or (alt_probcov[x] == "Patchy") or (alt_probcov[x] == "Areas")): + print(type[x], alt_probcov[x]) + self.errorNotice('Invalid Wx Type', "Areal coverage qualifiers other than 'Wide' may not be used with a "\ + "precip type of R, S, ZR, or IP.\nPlease re-run the tool using different qualifiers.") + return Wx + if ((type[x] == "L") or (type[x] == "ZL")): + if ((alt_probcov[x] == "None") or (alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") or (alt_probcov[x] == "Num")): + print(type[x], alt_probcov[x]) + self.errorNotice('Invalid Wx Type', "Areal coverage qualifiers other than 'Patchy', 'Areas', or 'Wide' may not be used with "\ + "a precip type of L or ZL.\nPlease re-run the tool using different qualifiers.") + return Wx + + # If block to check for invalid Wx types if the user selects alternate prob/cov terms... + for x in range(len(alt_probcov)): + # Add check in case Fog is selected with no alternate qualifier + if (type[x] == "F") and (alt_probcov[x] == "None"): + print(type[x]) + self.errorNotice('Invalid Wx Type', "You must use a qualifier of 'Patchy', 'Areas', or 'Wide' with F (Fog).\n"\ + "Please re-run the tool using an allowed qualifier.") + return Wx + # Added additional check to the following line to ensure that type[x] is also None. This prevents + # the error message(s) below from appearing if a second/third pcpn type is set to "None", yet the + # second/third alt_probcov is some value other than "None". + if (alt_probcov[x] != "None") and (type[x] != "None"): + if ((type[x] == "R") or (type[x] == "S") or (type[x] == "ZR") or (type[x] == "IP") or (type[x] == "L") or (type[x] == "ZL")) and \ + ((alt_probcov[x] == "Iso") or (alt_probcov[x] == "Sct") or (alt_probcov[x] == "Num")): + print(type[x]) + self.errorNotice('Invalid Wx Type', "The 'Iso', 'Sct', or 'Num' qualifiers may not be used with a precip type of "\ + "R, S, ZR, IP, L, or ZL.\nPlease re-run the tool using a different qualifier.") + return Wx + if ((type[x] != "L") and (type[x] != "ZL") and (type[x] != "F")) and \ + ((alt_probcov[x] == "Patchy") or (alt_probcov[x] == "Areas")): + print(type[x]) + self.errorNotice('Invalid Wx Type', "'Patchy' or 'Areas' qualifiers may not be used with a precip type of RW, SW, R, S, ZR, or IP.\n"\ + "Please re-run the tool using a different qualifier.") + return Wx + # Add check in case Fog is selected, and the qualifier is set to something other than 'Patchy', 'Areas', + # or "Wide'. + if (type[x] == "F") and \ + ((alt_probcov[x] != "Patchy") and (alt_probcov[x] != "Areas") and (alt_probcov[x] != "Wide")): + print(type[x]) + self.errorNotice('Invalid Wx Type', "Qualifiers other than 'Patchy', 'Areas', or 'Wide' may not be used with F (Fog).\n"\ + "Please re-run the tool using an allowed qualifier.") + return Wx + + # Create Attributes String for later use + + if thunder == "No": + attstring = "" + + if thunder == "Yes (T)": + # If (non-severe) thunder is selected, do not allow LgA, DmgW, or TOR attributes to be used + if attributes == []: + attstring = "" + else: + attstring = "" + invalid_attributes = 0 + + for x in range(len(attributes)): + if attributes[x] == "Small Hail": + attstring += "SmA," + if attributes[x] == "Heavy Rain": + attstring += "HvyRn," + if attributes[x] == "Gusty Winds": + attstring += "GW," + if attributes[x] == "Frequent Lightning": + attstring += "FL" + + # Check for invalid Tstm attributes (cannot use LgA, DmgW, or TOR with T); + # if any are selected - display a pop-up message indicating that these are not valid + # (but let tool run, ignoring the invalid attributes) + if (attributes[x] == "Large Hail") or (attributes[x] == "Damaging Winds") or \ + (attributes[x] == "Tornadoes"): + invalid_attributes = 1 + pass + + if invalid_attributes == 1: + self.errorNotice('Invalid Thunderstorm Attribute(s):', "The 'Large Hail', 'Damaging Winds', and 'Tornadoes' attributes may not be used with "\ + "Non-severe thunderstorms (T).\nThese attributes will not be included in the returned grid.") + #print attstring + + if thunder == "Yes (T+)": + # If severe thunder is selected, do not allow SmA or GW attributes to be used. + # Also allow tornadoes to be used (both changes for Version 2.0) + if attributes == []: + attstring = "" + else: + attstring = "" + invalid_attributes = 0 + + for x in range(len(attributes)): + if attributes[x] == "Heavy Rain": + attstring += "HvyRn," + if attributes[x] == "Frequent Lightning": + attstring += "FL," + if attributes[x] == "Large Hail": + attstring += "LgA," + if attributes[x] == "Damaging Winds": + attstring += "DmgW," + if attributes[x] == "Tornadoes": + attstring += "TOR," + # Check for invalid Tstm attributes (cannot use SmA or GW with T+); + # if any are selected - display a pop-up message indicating that these are not valid + # (but let tool run, ignoring the invalid attributes) + if (attributes[x] == "Small Hail") or (attributes[x] == "Gusty Winds"): + invalid_attributes = 1 + pass + + if invalid_attributes == 1: + self.errorNotice('Invalid Thunderstorm Attribute(s):', "The 'Small Hail' and 'Gusty Winds' attributes may not be used with "\ + "Severe thunderstorms (T+).\nThese attributes will not be included in the returned grid.") + #print attstring + + # Initialize the Wx values and keys + wxValues = empty_like(PoP, int8) + keys = [] + + if qualifiertype == "Prob": + wxValues[less_equal(PoP, 100)] = self.getByteValue("Def", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 74.5)] = self.getByteValue("Lkly", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 54.5)] = self.getByteValue("Chc", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 24.5)] = self.getByteValue("SChc", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, SChc_min_PoP_threshold)] = self.getByteValue("", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + + if qualifiertype == "Cov": + wxValues[less_equal(PoP, 100)] = self.getByteValue("Wide", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 74.5)] = self.getByteValue("Num", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 54.5)] = self.getByteValue("Sct", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, 24.5)] = self.getByteValue("Iso", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + wxValues[less(PoP, SChc_min_PoP_threshold)] = self.getByteValue("", type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov) + + print("keys = ", keys) + return (wxValues, keys) + + def getByteValue(self, prevail_cov, type, intensity, thunder, attstring, keys, alt_probcov, alt_catpop_probcov, alt_thunder_probcov): + # Adapted from Convective Smart Tool (billingsley) + + # Use alt_catpop_probcov if set: + if alt_catpop_probcov != "None": + if (prevail_cov == "Def") or (prevail_cov == "Wide"): + prevail_cov = alt_catpop_probcov + + # Use alt_thunder_probcov if set: + if alt_thunder_probcov == "None": + thunder_cov = prevail_cov + else: + thunder_cov = alt_thunder_probcov + + # Set type0cov, type1cov, type2cov + if alt_probcov[0] == "None": + type0cov = prevail_cov + else: + type0cov = alt_probcov[0] + if alt_probcov[1] == "None": + type1cov = prevail_cov + else: + type1cov = alt_probcov[1] + if alt_probcov[2] == "None": + type2cov = prevail_cov + else: + type2cov = alt_probcov[2] + + # Check for intensities with Fog - if intensity is very light (--), light (-) or moderate (m), + # set to ...but also display a pop-up message letting the user know these are not allowed. + for x in range(len(type)): + if type[x] == "F": + if (intensity[x] == "--") or (intensity[x] == "-") or (intensity[x] == "m"): + intensity[x] = "" + self.errorNotice('Invalid Fog intensity:', "The '--', '-', and 'm' intensities may not be used with "\ + "a weather type of Fog (F).\nThe intensity has been set to in the returned grid.") + + + # Now create the uglyString... + if type[1] == "None" and type[2] == "None": + if prevail_cov == "": + uglyString = "" + else: + if thunder == "Yes (T+)": + uglyString = thunder_cov + ":T:+::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::" + elif thunder == "Yes (T)": + uglyString = thunder_cov + ":T:::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::" + else: + uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::" + + print(uglyString) + if type[1] != "None" and type[2] == "None": + if prevail_cov == "": + uglyString = "" + else: + if thunder == "Yes (T+)": + uglyString = thunder_cov + ":T:+::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::" + elif thunder == "Yes (T)": + uglyString = thunder_cov + ":T:::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::" + else: + uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::" + print(uglyString) + if type[1] == "None" and type[2] != "None": + if prevail_cov == "": + uglyString = "" + else: + if thunder == "Yes (T+)": + uglyString = thunder_cov + ":T:+::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + elif thunder == "Yes (T)": + uglyString = thunder_cov + ":T:::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + else: + uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + print(uglyString) + if type[1] != "None" and type[2] != "None": + if prevail_cov == "": + uglyString = "" + else: + if thunder == "Yes (T+)": + uglyString = thunder_cov + ":T:+::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + elif thunder == "Yes (T)": + uglyString = thunder_cov + ":T:::" + attstring + "^" \ + + type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + else: + uglyString = type0cov + ":" + type[0] + ":" + intensity[0] + "::^" \ + + type1cov + ":" + type[1] + ":" + intensity[1] + "::^" \ + + type2cov + ":" + type[2] + ":" + intensity[2] + "::" + print(uglyString) + + if "" == uglyString: + uglyString = "::::" + return self.getIndex(uglyString, keys) + + def errorNotice(self, title=None, text=None, **kwargs): + "Show the user a multi-line error message." + # Title and kwargs are not used + # They may be used if this method goes back to a Tk implementation + if text is not None: + self.statusBarMsg(text, "A") + +# Tk implementation of errorNotice +# currently can't use due to short-lived nature of Jep scripts +# def errorNotice(self, title=None, text=None, **kwargs): +# "Show the user a multi-line error message." +# if msg is not None: +# Exit = Tkinter.Tk() +# Exit.title(title) +# kwargs['command'] = Exit.destroy +# kwargs['text'] = text # Tkinter.Button(Exit, **kwargs).pack() \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS4.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS4.py index 103905ab16..8b5b358e30 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS4.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS4.py @@ -1,77 +1,77 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ExSS4 -# -# Author: -# ---------------------------------------------------------------------------- - -ToolType = "numeric" -WeatherElementEdited = "T" -from numpy import * -import SmartScript - -VariableList = [("Model:" , "", "D2D_model")] -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, Topo, varDict): - "Test Tool to calculate T from model data" - - model = varDict["Model:"] - - # Convert Topo to meters - topo_M = self.convertFtToM(Topo) - - # Make a sounding cubes for T - # Height will increase in the sounding and be the - # first dimension - levels = ["MB1000","MB850", "MB700","MB500"] - gh_Cube, t_Cube = self.makeNumericSounding( - model, "t", levels, GridTimeRange) - - print "Cube shapes ", gh_Cube.shape, t_Cube.shape - - # Make an initial T grid with values of -200 - # This is an out-of-range value to help us identify values that - # have already been set. - T = (Topo * 0) - 200 - - # Work "upward" in the cubes to assign T - # We will only set the value once, i.e. the first time the - # gh height is greater than the Topo - # For each level - for i in xrange(gh_Cube.shape[0]): - # where ( gh > topo and T == -200 ), - # set to t_Cube value, otherwise keep value already set)) - notSet = equal(T, -200) - aboveGround = greater(gh_Cube[i], topo_M) - readyToSet = logical_and(notSet, aboveGround) - T = where(readyToSet, t_Cube[i], T) - - # Convert from K to F - T_F = self.convertKtoF(T) - - return T_F +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ExSS4 +# +# Author: +# ---------------------------------------------------------------------------- + +ToolType = "numeric" +WeatherElementEdited = "T" +from numpy import * +import SmartScript + +VariableList = [("Model:" , "", "D2D_model")] +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, Topo, varDict): + "Test Tool to calculate T from model data" + + model = varDict["Model:"] + + # Convert Topo to meters + topo_M = self.convertFtToM(Topo) + + # Make a sounding cubes for T + # Height will increase in the sounding and be the + # first dimension + levels = ["MB1000","MB850", "MB700","MB500"] + gh_Cube, t_Cube = self.makeNumericSounding( + model, "t", levels, GridTimeRange) + + print("Cube shapes ", gh_Cube.shape, t_Cube.shape) + + # Make an initial T grid with values of -200 + # This is an out-of-range value to help us identify values that + # have already been set. + T = (Topo * 0) - 200 + + # Work "upward" in the cubes to assign T + # We will only set the value once, i.e. the first time the + # gh height is greater than the Topo + # For each level + for i in range(gh_Cube.shape[0]): + # where ( gh > topo and T == -200 ), + # set to t_Cube value, otherwise keep value already set)) + notSet = equal(T, -200) + aboveGround = greater(gh_Cube[i], topo_M) + readyToSet = logical_and(notSet, aboveGround) + T = where(readyToSet, t_Cube[i], T) + + # Convert from K to F + T_F = self.convertKtoF(T) + + return T_F diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS5.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS5.py index a7f5c0547e..01d8c43c56 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS5.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExSS5.py @@ -1,84 +1,84 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ExSS5 -# -# Author: -# ---------------------------------------------------------------------------- - -ToolType = "numeric" -WeatherElementEdited = "T" -from numpy import * -import SmartScript - -VariableList = [("Model:" , "", "D2D_model")] -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, Topo, varDict): - "This tool accesses QPF and tp grids directly" - - model = varDict["Model:"] - - # Convert Topo to meters - topo_M = self.convertFtToM(Topo) - - # Make a sounding cubes for T - # Height will increase in the sounding and be the - # first dimension - levels = ["MB1000","MB850", "MB700","MB500"] - gh_Cube, t_Cube = self.makeNumericSounding( - model, "t", levels, GridTimeRange) - - print "Cube shapes ", gh_Cube.shape, t_Cube.shape - - # Make an initial T grid with values of -200 - # This is an out-of-range value to help us identify values that - # have already been set. - T = (Topo * 0) - 200 - - # Work "upward" in the cubes to assign T - # We will only set the value once, i.e. the first time the - # gh height is greater than the Topo - # For each level - for i in xrange(gh_Cube.shape[0]): - # where ( gh > topo and T == -200 ), - # set to t_Cube value, otherwise keep value already set)) - notSet = equal(T, -200) - aboveGround = greater(gh_Cube[i], topo_M) - readyToSet = logical_and(notSet, aboveGround) - try: - # Interpolate between levels - T = where(readyToSet, - self.interpolateScalarValues(topo_M,(gh_Cube[i-1],t_Cube[i-1]),(gh_Cube[i],t_Cube[i])), T) - except: - # Handle first level by extrapolating - T = where(readyToSet, - self.extrapolate(topo_M,(gh_Cube[0],t_Cube[0]),(gh_Cube[1],t_Cube[1])), T) - - # Convert from K to F - T_F = self.convertKtoF(T) - - return T_F +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ExSS5 +# +# Author: +# ---------------------------------------------------------------------------- + +ToolType = "numeric" +WeatherElementEdited = "T" +from numpy import * +import SmartScript + +VariableList = [("Model:" , "", "D2D_model")] +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, Topo, varDict): + "This tool accesses QPF and tp grids directly" + + model = varDict["Model:"] + + # Convert Topo to meters + topo_M = self.convertFtToM(Topo) + + # Make a sounding cubes for T + # Height will increase in the sounding and be the + # first dimension + levels = ["MB1000","MB850", "MB700","MB500"] + gh_Cube, t_Cube = self.makeNumericSounding( + model, "t", levels, GridTimeRange) + + print("Cube shapes ", gh_Cube.shape, t_Cube.shape) + + # Make an initial T grid with values of -200 + # This is an out-of-range value to help us identify values that + # have already been set. + T = (Topo * 0) - 200 + + # Work "upward" in the cubes to assign T + # We will only set the value once, i.e. the first time the + # gh height is greater than the Topo + # For each level + for i in range(gh_Cube.shape[0]): + # where ( gh > topo and T == -200 ), + # set to t_Cube value, otherwise keep value already set)) + notSet = equal(T, -200) + aboveGround = greater(gh_Cube[i], topo_M) + readyToSet = logical_and(notSet, aboveGround) + try: + # Interpolate between levels + T = where(readyToSet, + self.interpolateScalarValues(topo_M,(gh_Cube[i-1],t_Cube[i-1]),(gh_Cube[i],t_Cube[i])), T) + except: + # Handle first level by extrapolating + T = where(readyToSet, + self.extrapolate(topo_M,(gh_Cube[0],t_Cube[0]),(gh_Cube[1],t_Cube[1])), T) + + # Convert from K to F + T_F = self.convertKtoF(T) + + return T_F diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExUtil1.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExUtil1.py index 6f1f9754bb..1110a9ca70 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExUtil1.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ExUtil1.py @@ -1,79 +1,79 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ExUtil1 -# -# Author: -# ---------------------------------------------------------------------------- - -ToolType = "numeric" -WeatherElementEdited = "T" -from numpy import * - -import SmartScript -import Common - -VariableList = [("Model:" , "", "D2D_model")] - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss = dbss - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, Topo, varDict): - "This tool accesses T grids directly" - self._common = Common.Common(self._dbss) - - model = varDict["Model:"] - - # Convert Topo to meters - topo_M = self._common._convertFtToM(Topo) - - # Make a sounding cubes for T - # Height will increase in the sounding and be the - # first dimension - levels = ["MB1000","MB850", "MB700","MB500"] - gh_Cube, t_Cube = self.makeNumericSounding( - model, "t", levels, GridTimeRange) - - print "Cube shapes ", gh_Cube.shape, t_Cube.shape - - # Make an initial T grid with values of -200 - # This is an out-of-range value to help us identify values that - # have already been set. - T = (Topo * 0) - 200 - - # Work "upward" in the cubes to assign T - # We will only set the value once, i.e. the first time the - # gh height is greater than the Topo - # For each level - for i in xrange(gh_Cube.shape[0]): - # where ( gh > topo and T == -200), - # set to t_Cube value, otherwise keep value already set)) - T = where(logical_and(greater(gh_Cube[i], topo_M), equal(T,-200)), t_Cube[i], T) - - # Convert from K to F - T_F = self.convertKtoF(T) - - return T_F +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ExUtil1 +# +# Author: +# ---------------------------------------------------------------------------- + +ToolType = "numeric" +WeatherElementEdited = "T" +from numpy import * + +import SmartScript +import Common + +VariableList = [("Model:" , "", "D2D_model")] + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss = dbss + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, Topo, varDict): + "This tool accesses T grids directly" + self._common = Common.Common(self._dbss) + + model = varDict["Model:"] + + # Convert Topo to meters + topo_M = self._common._convertFtToM(Topo) + + # Make a sounding cubes for T + # Height will increase in the sounding and be the + # first dimension + levels = ["MB1000","MB850", "MB700","MB500"] + gh_Cube, t_Cube = self.makeNumericSounding( + model, "t", levels, GridTimeRange) + + print("Cube shapes ", gh_Cube.shape, t_Cube.shape) + + # Make an initial T grid with values of -200 + # This is an out-of-range value to help us identify values that + # have already been set. + T = (Topo * 0) - 200 + + # Work "upward" in the cubes to assign T + # We will only set the value once, i.e. the first time the + # gh height is greater than the Topo + # For each level + for i in range(gh_Cube.shape[0]): + # where ( gh > topo and T == -200), + # set to t_Cube value, otherwise keep value already set)) + T = where(logical_and(greater(gh_Cube[i], topo_M), equal(T,-200)), t_Cube[i], T) + + # Convert from K to F + T_F = self.convertKtoF(T) + + return T_F diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Haines.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Haines.py index 90430807a3..b9c032e694 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Haines.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Haines.py @@ -1,175 +1,175 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Haines.py -# -# Author: dtomalak -# ---------------------------------------------------------------------------- - - -ToolType = "numeric" -WeatherElementEdited = "Haines" -from numpy import * -HideTool = 0 -#ScreenList = ["Haines"] -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -#ScreenList = ["T","Td"] -#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] - -# If desired, Set up variables to be solicited from the user: -# VariableList = [ -# ("Variable name1" , defaultValue1, "numeric"), -# ("Variable name2" , "default value2", "alphaNumeric"), -# ("Variable name3" , ["default value1", "default value2"], "check", -# ["value1", "value2", "value3"]), -# ("Variable name4" , "default value4", "radio", -# ["value1", "value2", "value3"]), -# ("Variable name5" , defaultValue, "scale", -# [minValue, maxValue], resolution), -# ("Variable name6" , "", "model"), -# ("Variable name7" , "", "D2D_model"), -# ("Label contents" , "", "label"), -# ("", dialogHeight, "scrollbar"), -# ] - -# Set up Class -import SmartScript -import string, time -import AbsTime, TimeRange -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # %comment - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - - def execute(self, GridTimeRange, varDict): - "Sets Haines according to selected model" - - - fwModel = self.getObject("FireModel", "ModelType") - - if fwModel == "NAM12": - modelSource = "_D2D_NAM12" - elif fwModel == "NAM40": - modelSource = "_D2D_NAM40" - else: - modelSource = "_D2D_GFS40" - - - ############################################ - # - # Valid Modes are 'Low', 'Mid', or 'High' - # - ############################################ - - Mode = 'Low' - -## month = time.strftime('%b%d', time.gmtime()) -## monthrun = month + run - site = self.getSiteID() -## model = site + modelSource + monthrun - model = site + modelSource - print "Using " + model + " for Haines Index Calculation" - print "Haines time range is: \n" + `GridTimeRange` - - T950 = self.getGrids(model,"t","MB950",GridTimeRange,noDataError=0) - if T950 is None: - self.noData() - - T850 = self.getGrids(model,"t","MB850",GridTimeRange,noDataError=0) - if T850 is None: - self.noData() - - T700 = self.getGrids(model,"t","MB700",GridTimeRange,noDataError=0) - if T850 is None: - self.noData() - - T500 = self.getGrids(model,"t","MB500",GridTimeRange,noDataError=0) - if T850 is None: - self.noData() - - RH850 = self.getGrids(model,"rh","MB850",GridTimeRange,noDataError=0) - if RH850 is None: - self.noData() - - RH700 = self.getGrids(model,"rh","MB700",GridTimeRange,noDataError=0) - if RH850 is None: - self.noData() - - # - # Calculate 850 MB Depressions - # - - Dwpt_Factor_850 = (log(RH850/100.0) + ((17.27 * (T850 - 273.3))/(237.3 + (T850 - 273.31))))/ 17.27 - DD850 = T850 - ((Dwpt_Factor_850 * 237.3) / ( 1.0 - Dwpt_Factor_850 ) + 273.3) - - # - # Calculate 700 MB Depressions - # - - Dwpt_Factor_700 = (log(RH700/100.0) + ((17.27 * (T700 - 273.3))/(237.3 + (T700 - 273.31))))/ 17.27 - DD700 = T700 - ((Dwpt_Factor_700 * 237.3) / ( 1.0 - Dwpt_Factor_700 ) + 273.3) - - if string.upper(Mode) == 'LOW': - - # find T difference between levels - Tdiff = (T950 - T850) - - # compute A & B terms - Aterm = 2 - Bterm = 2 - Aterm = where(less_equal(Tdiff,3),1,Aterm) - Aterm[greater_equal(Tdiff,8)] = 3 - Bterm = where(less_equal(DD850,5),1,Bterm) - Bterm[greater_equal(DD850,10)] = 3 - - # compute Haines - Haines = Aterm + Bterm - - return Haines - - if string.upper(Mode) == 'MID': - - # find T difference between levels - Tdiff = (T850 - T700) - - # compute A & B terms - Aterm = 2 - Bterm = 2 - Aterm = where(less_equal(Tdiff,5),1,Aterm) - Aterm[greater_equal(Tdiff,11)] = 3 - Bterm = where(less_equal(DD850,5),1,Bterm) - Bterm[greater_equal(DD850,13)] = 3 - - # compute Haines - Haines = Aterm + Bterm - - return Haines - - if string.upper(Mode) == 'HIGH': - - # find T difference between levels - Tdiff = (T700 - T500) - - # compute A & B terms - Aterm = 2 - Bterm = 2 - Aterm = where(less_equal(Tdiff,17),1,Aterm) - Aterm[greater_equal(Tdiff,22)] = 3 - Bterm = where(less_equal(DD700,14),3,Bterm) - Bterm[greater_equal(DD700,21)] = 1 - - # compute Haines - Haines = Aterm + Bterm - - return Haines +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Haines.py +# +# Author: dtomalak +# ---------------------------------------------------------------------------- + + +ToolType = "numeric" +WeatherElementEdited = "Haines" +from numpy import * +HideTool = 0 +#ScreenList = ["Haines"] +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +#ScreenList = ["T","Td"] +#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] + +# If desired, Set up variables to be solicited from the user: +# VariableList = [ +# ("Variable name1" , defaultValue1, "numeric"), +# ("Variable name2" , "default value2", "alphaNumeric"), +# ("Variable name3" , ["default value1", "default value2"], "check", +# ["value1", "value2", "value3"]), +# ("Variable name4" , "default value4", "radio", +# ["value1", "value2", "value3"]), +# ("Variable name5" , defaultValue, "scale", +# [minValue, maxValue], resolution), +# ("Variable name6" , "", "model"), +# ("Variable name7" , "", "D2D_model"), +# ("Label contents" , "", "label"), +# ("", dialogHeight, "scrollbar"), +# ] + +# Set up Class +import SmartScript +import string, time +import AbsTime, TimeRange +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # %comment + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + + def execute(self, GridTimeRange, varDict): + "Sets Haines according to selected model" + + + fwModel = self.getObject("FireModel", "ModelType") + + if fwModel == "NAM12": + modelSource = "_D2D_NAM12" + elif fwModel == "NAM40": + modelSource = "_D2D_NAM40" + else: + modelSource = "_D2D_GFS40" + + + ############################################ + # + # Valid Modes are 'Low', 'Mid', or 'High' + # + ############################################ + + Mode = 'Low' + +## month = time.strftime('%b%d', time.gmtime()) +## monthrun = month + run + site = self.getSiteID() +## model = site + modelSource + monthrun + model = site + modelSource + print("Using " + model + " for Haines Index Calculation") + print("Haines time range is: \n" + repr(GridTimeRange)) + + T950 = self.getGrids(model,"t","MB950",GridTimeRange,noDataError=0) + if T950 is None: + self.noData() + + T850 = self.getGrids(model,"t","MB850",GridTimeRange,noDataError=0) + if T850 is None: + self.noData() + + T700 = self.getGrids(model,"t","MB700",GridTimeRange,noDataError=0) + if T850 is None: + self.noData() + + T500 = self.getGrids(model,"t","MB500",GridTimeRange,noDataError=0) + if T850 is None: + self.noData() + + RH850 = self.getGrids(model,"rh","MB850",GridTimeRange,noDataError=0) + if RH850 is None: + self.noData() + + RH700 = self.getGrids(model,"rh","MB700",GridTimeRange,noDataError=0) + if RH850 is None: + self.noData() + + # + # Calculate 850 MB Depressions + # + + Dwpt_Factor_850 = (log(RH850/100.0) + ((17.27 * (T850 - 273.3))/(237.3 + (T850 - 273.31))))/ 17.27 + DD850 = T850 - ((Dwpt_Factor_850 * 237.3) / ( 1.0 - Dwpt_Factor_850 ) + 273.3) + + # + # Calculate 700 MB Depressions + # + + Dwpt_Factor_700 = (log(RH700/100.0) + ((17.27 * (T700 - 273.3))/(237.3 + (T700 - 273.31))))/ 17.27 + DD700 = T700 - ((Dwpt_Factor_700 * 237.3) / ( 1.0 - Dwpt_Factor_700 ) + 273.3) + + if string.upper(Mode) == 'LOW': + + # find T difference between levels + Tdiff = (T950 - T850) + + # compute A & B terms + Aterm = 2 + Bterm = 2 + Aterm = where(less_equal(Tdiff,3),1,Aterm) + Aterm[greater_equal(Tdiff,8)] = 3 + Bterm = where(less_equal(DD850,5),1,Bterm) + Bterm[greater_equal(DD850,10)] = 3 + + # compute Haines + Haines = Aterm + Bterm + + return Haines + + if string.upper(Mode) == 'MID': + + # find T difference between levels + Tdiff = (T850 - T700) + + # compute A & B terms + Aterm = 2 + Bterm = 2 + Aterm = where(less_equal(Tdiff,5),1,Aterm) + Aterm[greater_equal(Tdiff,11)] = 3 + Bterm = where(less_equal(DD850,5),1,Bterm) + Bterm[greater_equal(DD850,13)] = 3 + + # compute Haines + Haines = Aterm + Bterm + + return Haines + + if string.upper(Mode) == 'HIGH': + + # find T difference between levels + Tdiff = (T700 - T500) + + # compute A & B terms + Aterm = 2 + Bterm = 2 + Aterm = where(less_equal(Tdiff,17),1,Aterm) + Aterm[greater_equal(Tdiff,22)] = 3 + Bterm = where(less_equal(DD700,14),3,Bterm) + Bterm[greater_equal(DD700,21)] = 1 + + # compute Haines + Haines = Aterm + Bterm + + return Haines diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/LAL_Tool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/LAL_Tool.py index 6921fbdcbe..c473ccfbe8 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/LAL_Tool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/LAL_Tool.py @@ -1,60 +1,60 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# LAL_Tool - Uses weather, pop and qpf to assign LAL -# -# No local configuration is required for this tool -# -# Author: Brian Brong 8-22-02 -# -# Ported to AWIPS II by Tom LeFebvre -# -# ---------------------------------------------------------------------------- - -ToolType = "numeric" -WeatherElementEdited = "LAL" -from numpy import * -HideTool = 0 -from WxMethods import * - -# Set up Class -import SmartScript - -class Tool(SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, Wx, PoP, QPF, LAL, varDict): - "Assigns LAL based on Wx, PoP, and QPF" - LAL = 1 - - # QPF mask for LAL 6 - qpfmask = less(QPF, 0.10) - - # Wx mask for Thunder in the Wx grids - wxmask = self.wxMask(Wx, ":T:") - - #dry thunderstorm Mask - dryTmask = logical_and(qpfmask, wxmask) - - print "Dry mask has", sum(sum(dryTmask)), "points" - - # Masks for LAL values 2-6 based on PoP and wxmask - # Trouble with using the Wx grid is GFE does not understand - # Widely Scattered for LAL 3. Will use a PoP range 20 to 35 for LAL = 3 - lal2 = logical_and(wxmask, logical_and(greater_equal(PoP, 10), less(PoP, 20))) - lal3 = logical_and(wxmask, logical_and(greater_equal(PoP, 20), less(PoP, 35))) - lal4 = logical_and(wxmask, logical_and(greater_equal(PoP, 35), less(PoP, 60))) - lal5 = logical_and(wxmask, greater_equal(PoP, 60)) - lal6 = logical_and(dryTmask, greater_equal(LAL,3)) - - # Assign LAL values 2-5 based on LAL masks - LAL = where(lal2, 2, where(lal3, 3, where(lal4, 4, where(lal5, 5, LAL)))) - - # Assign LAL 6 where dryTmask and LAL >= 3 - LAL[lal6] = 6 - - # Return the new value - return LAL +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# LAL_Tool - Uses weather, pop and qpf to assign LAL +# +# No local configuration is required for this tool +# +# Author: Brian Brong 8-22-02 +# +# Ported to AWIPS II by Tom LeFebvre +# +# ---------------------------------------------------------------------------- + +ToolType = "numeric" +WeatherElementEdited = "LAL" +from numpy import * +HideTool = 0 +from WxMethods import * + +# Set up Class +import SmartScript + +class Tool(SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, Wx, PoP, QPF, LAL, varDict): + "Assigns LAL based on Wx, PoP, and QPF" + LAL = 1 + + # QPF mask for LAL 6 + qpfmask = less(QPF, 0.10) + + # Wx mask for Thunder in the Wx grids + wxmask = self.wxMask(Wx, ":T:") + + #dry thunderstorm Mask + dryTmask = logical_and(qpfmask, wxmask) + + print("Dry mask has", sum(sum(dryTmask)), "points") + + # Masks for LAL values 2-6 based on PoP and wxmask + # Trouble with using the Wx grid is GFE does not understand + # Widely Scattered for LAL 3. Will use a PoP range 20 to 35 for LAL = 3 + lal2 = logical_and(wxmask, logical_and(greater_equal(PoP, 10), less(PoP, 20))) + lal3 = logical_and(wxmask, logical_and(greater_equal(PoP, 20), less(PoP, 35))) + lal4 = logical_and(wxmask, logical_and(greater_equal(PoP, 35), less(PoP, 60))) + lal5 = logical_and(wxmask, greater_equal(PoP, 60)) + lal6 = logical_and(dryTmask, greater_equal(LAL,3)) + + # Assign LAL values 2-5 based on LAL masks + LAL = where(lal2, 2, where(lal3, 3, where(lal4, 4, where(lal5, 5, LAL)))) + + # Assign LAL 6 where dryTmask and LAL >= 3 + LAL[lal6] = 6 + + # Return the new value + return LAL diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MakeTmpGrid.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MakeTmpGrid.py index 96c5aecc6b..e4a8c9ad0b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MakeTmpGrid.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MakeTmpGrid.py @@ -1,133 +1,133 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MakeTmpGrid - Creates a temporary grid of the active element. The new grid -# has model name of "Temp" and element name of "tmp" -# -# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) -# paul.jendrowski@noaa.gov -# Version: 3.0 Date: 11/08/2004 -# Change History: -# 11/08/2004 - Added support for Wx and Vector grids. -# 07/17/2003 - Changed timeConstraints in call to createGrid for RPP21.a7 -# values changed from hours to seconds -# Added logic to let calling procedure optionally specify -# name of new grid in argDict (backward compatible) -# 02/21/2003 - original version -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "None" - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -#ScreenList = ["T","Td"] -ScreenList = ["SCALAR","VECTOR","WEATHER"] - -# Set up Class -import SmartScript -import GridInfo - -## For available commands, see SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # Called once for each grid - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, WEname, variableElement, variableElement_GridInfo, - GridTimeRange, varDict): - "Creates a temporary grid of the active element" - - if self._gridName == "None": - name = "tmp" + WEname - else: - name = self._gridName - - theParm = self.getParm("Fcst", WEname, "SFC") - pInfo = theParm.getGridInfo() - if pInfo is not None: - pInfo = GridInfo.GridInfo(gridParmInfo=pInfo, gridTime=GridTimeRange) - pType = variableElement_GridInfo.getGridType().ordinal() - #print WEname,"is type",pType -# Not exactly sure how to set the timeConstraints, but (1,1,1) seems to work - # Determine new value - hr = 3600 - #print "_getGfeHourVersion=",hr - tc = (0, 1 * hr, 1 * hr) - if pType == 1: - self.createGrid(self._model, name, "SCALAR", variableElement, GridTimeRange, - descriptiveName = name, timeConstraints = tc, - precision = pInfo.precision(), - minAllowedValue = pInfo.minLimit(), - maxAllowedValue = pInfo.maxLimit(), - units = pInfo.units(), - rateParm = pInfo.rateParm()) - elif pType == 2: - self.createGrid(self._model, name, "VECTOR", variableElement, GridTimeRange, - descriptiveName = name) - elif pType == 3: - self.createGrid(self._model, name, "WEATHER", variableElement, GridTimeRange, - descriptiveName = name) - else: - self.noData("This tool does not support element enumeration=" +`pType`+"!") - - # Return the new value - return None - - def preProcessTool(self, varDict): - # Set up for invocation from user as Edit Action or Procedure - self._model = "Temp" - self._gridName = "None" - self._calledfrom = "User" - - if varDict != None: - if varDict.has_key("Model"): - self._model = varDict["Model"] - if varDict.has_key("gridName"): - self._gridName = varDict["gridName"] - self._calledfrom = "Proc" - -# The post process tool sets the temporary grid as active and fits the color -# bar to fit to the data. - def postProcessTool(self, WEname, ToolTimeRange): - # If this was called interactively, make the tmp grid active - #print "In postProcessTool" -## if WEname == "Wind": -## return - if self._calledfrom == "User": - name = "tmp" + WEname - try: - self.setActiveElement(self._model, name, "SFC", ToolTimeRange, fitToData=1) - except: - pass +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MakeTmpGrid - Creates a temporary grid of the active element. The new grid +# has model name of "Temp" and element name of "tmp" +# +# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) +# paul.jendrowski@noaa.gov +# Version: 3.0 Date: 11/08/2004 +# Change History: +# 11/08/2004 - Added support for Wx and Vector grids. +# 07/17/2003 - Changed timeConstraints in call to createGrid for RPP21.a7 +# values changed from hours to seconds +# Added logic to let calling procedure optionally specify +# name of new grid in argDict (backward compatible) +# 02/21/2003 - original version +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "None" + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +#ScreenList = ["T","Td"] +ScreenList = ["SCALAR","VECTOR","WEATHER"] + +# Set up Class +import SmartScript +import GridInfo + +## For available commands, see SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # Called once for each grid + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, WEname, variableElement, variableElement_GridInfo, + GridTimeRange, varDict): + "Creates a temporary grid of the active element" + + if self._gridName == "None": + name = "tmp" + WEname + else: + name = self._gridName + + theParm = self.getParm("Fcst", WEname, "SFC") + pInfo = theParm.getGridInfo() + if pInfo is not None: + pInfo = GridInfo.GridInfo(gridParmInfo=pInfo, gridTime=GridTimeRange) + pType = variableElement_GridInfo.getGridType().ordinal() + #print WEname,"is type",pType +# Not exactly sure how to set the timeConstraints, but (1,1,1) seems to work + # Determine new value + hr = 3600 + #print "_getGfeHourVersion=",hr + tc = (0, 1 * hr, 1 * hr) + if pType == 1: + self.createGrid(self._model, name, "SCALAR", variableElement, GridTimeRange, + descriptiveName = name, timeConstraints = tc, + precision = pInfo.precision(), + minAllowedValue = pInfo.minLimit(), + maxAllowedValue = pInfo.maxLimit(), + units = pInfo.units(), + rateParm = pInfo.rateParm()) + elif pType == 2: + self.createGrid(self._model, name, "VECTOR", variableElement, GridTimeRange, + descriptiveName = name) + elif pType == 3: + self.createGrid(self._model, name, "WEATHER", variableElement, GridTimeRange, + descriptiveName = name) + else: + self.noData("This tool does not support element enumeration=" +repr(pType)+"!") + + # Return the new value + return None + + def preProcessTool(self, varDict): + # Set up for invocation from user as Edit Action or Procedure + self._model = "Temp" + self._gridName = "None" + self._calledfrom = "User" + + if varDict != None: + if "Model" in varDict: + self._model = varDict["Model"] + if "gridName" in varDict: + self._gridName = varDict["gridName"] + self._calledfrom = "Proc" + +# The post process tool sets the temporary grid as active and fits the color +# bar to fit to the data. + def postProcessTool(self, WEname, ToolTimeRange): + # If this was called interactively, make the tmp grid active + #print "In postProcessTool" +## if WEname == "Wind": +## return + if self._calledfrom == "User": + name = "tmp" + WEname + try: + self.setActiveElement(self._model, name, "SFC", ToolTimeRange, fitToData=1) + except: + pass diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_FWF.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_FWF.py index 91efc5904a..b534e66383 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_FWF.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_FWF.py @@ -1,196 +1,196 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MixHgt_FWF.py -# -# Author: dtomalak -# ---------------------------------------------------------------------------- - - -ToolType = "numeric" -WeatherElementEdited = "MixHgt" -from numpy import * -HideTool = 0 - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -#ScreenList = ["T","Td"] -#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] - -# If desired, Set up variables to be solicited from the user: -# VariableList = [ -# ("Variable name1" , defaultValue1, "numeric"), -# ("Variable name2" , "default value2", "alphaNumeric"), -# ("Variable name3" , ["default value1", "default value2"], "check", -# ["value1", "value2", "value3"]), -# ("Variable name4" , "default value4", "radio", -# ["value1", "value2", "value3"]), -# ("Variable name5" , defaultValue, "scale", -# [minValue, maxValue], resolution), -# ("Variable name6" , "", "model"), -# ("Variable name7" , "", "D2D_model"), -# ("Label contents" , "", "label"), -# ("", dialogHeight, "scrollbar"), -# ] - -# Set up Class -import SmartScript -import string, time -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, T, Topo): - "Calculates mixing height by looking for lapse rate changes above the surface" - - fwModel = self.getObject("FireModel", "ModelType") - - if fwModel == "GFS40": - modelSource = "_D2D_GFS40" - elif fwModel == "NAM40": - modelSource = "_D2D_NAM40" - else: - modelSource = "_D2D_NAM12" - - # - # This determines the correct full model run to use. - # - - ## hour = int(time.strftime('%H', time.gmtime())) - - ## if hour > 15 or hour < 4: -## run = '_1200' -## else: -## run = '_0000' -## month = time.strftime('%Y%m%d', time.gmtime()) -## day = time.strftime('%d', time.gmtime()) -## monthrun = month + run - site = self.getSiteID() -## model = site + modelSource + monthrun - model = site + modelSource - sfc_model = model - - if fwModel == "NAM40": - sfc_model = site + "_D2D_NAM12" -## sfc_model = site + "_GRID_D2D_NAM12" + monthrun - - print "Using " + model + " for Mixing Height Calculation" - print "MixHgt time range is: \n" + `GridTimeRange` - - # - # Set the levels of the eta sounding to use. - # - - self.__D2Dmodel = model - soundingLevels = ["MB975", "MB950", "MB925", "MB900", "MB875","MB850", - "MB825","MB800","MB775","MB750","MB725","MB700", - "MB650","MB600", "MB550","MB500"] - - # - # Get the surface pressure and temperature from the eta model. - # - - - self.__D2Dmodel = sfc_model - sfc_pressure = self.getGrids(sfc_model, 'p', 'SFC', GridTimeRange) - sfc_pressure = sfc_pressure / 100.0 - sfc_model_temp = self.getGrids(sfc_model, 't', 'FHAG2', GridTimeRange) - self.__D2Dmodel = model - - # For working with sounding, convert topo field to meters and sfc temp to kelvin - # There is a two degree offset to allow mixing when the model sounding is not truly - # dry adiabatic in the low levels. - - sfcTempK = self.convertFtoK(T) + 0.00 - topoMeters = Topo/3.281 - - # - # Compute the difference between the model surface temp and the forecast surface temp. - # - - thetaOffset = where(less(sfcTempK, sfc_model_temp), (sfc_model_temp - sfcTempK), 0.00) - - # Create Surface Theta - sfcTheta = sfcTempK * pow((1000 / sfc_pressure), 0.286) - - # Create the Height and Temperature Cubes - sounding = self.makeNumericSounding(self.__D2Dmodel, 't', soundingLevels, GridTimeRange, noDataError=0) - if sounding is None: - self.noData() - ghCube, tCube = sounding - - # Initialize Mixing Hgt Grid - MixHgt = zeros(T.shape) - - # Initialize the mixDepth variable. - mixDepth = zeros(T.shape) - - # Initialize the bottom of the sounding. 1.0 is a seed value to avoid divide by zero. - lastTheta = sfcTheta - lastghCube = topoMeters + 1.0 - - # Climb through sounding, checking to see if the surface theta is less than the upper theta - for level in xrange(ghCube.shape[0]): - # Get pressure at the current level. - pressure = string.atof(soundingLevels[level][2:]) - - # Compute the potential temperature of the environment at the current level. - potTemp = (tCube[level] - thetaOffset)* pow((1000 / pressure), 0.286) - - # The mixing height is ready to set when the environmental potential temperature - # is greater than the parcel potential temperature, and the height at the current - # level is above the surface. - readyToSet = logical_and(less_equal(topoMeters, ghCube[level]), logical_and(equal(MixHgt,0), greater(potTemp, sfcTheta))) - - # This part allows the mixing height to be between levels...ie. the sounding could - # have crossed before the current height. This will bring it back to an offset of - # that level. - - - # To turn off adjustment, comment out the next line, and uncomment the other. - # Adjusted: - trueMixHgt = lastghCube + (((sfcTheta - lastTheta)/(potTemp - lastTheta))*(ghCube[level] - lastghCube)) - # Unadjusted: - #trueMixHgt = (ghCube[level] - topoMeters) - - # Apply a check that the adjusted mixing height can never be greater than the - # unadjusted mixing height. - trueMixHgtCheck = (ghCube[level] - topoMeters) - trueMixHgt = where(greater(trueMixHgt, trueMixHgtCheck), trueMixHgtCheck, trueMixHgt) - - # If the surface temp is greater than the eta...mix to the height where the surface - # theta is equal to the environment - mixDepth = where(greater_equal(sfcTempK, sfc_model_temp), trueMixHgt, mixDepth) - - # If the surface temp is colder than the eta, use the eta surface temp to establish the unadjusted height, - # then reduce it by a percentage of the surface temp/mix height temp difference. - mixDepth = where(logical_and(less(sfcTempK, sfc_model_temp), greater_equal(sfcTempK, tCube[level])), ((sfcTempK - tCube[level])/(sfc_model_temp - tCube[level]) * trueMixHgt), mixDepth) - - # If the surface is even colder than the mixing height temp...do not allow mixing. - mixDepth[less(sfcTempK,tCube[level])] = 1.0 - - # Set the mixing height for valid points - MixHgt = where(readyToSet, mixDepth, MixHgt) - - # Set parameters for next iteration of loop - lastTheta = potTemp - lastghCube = (ghCube[level] - topoMeters) - - # - # Final adjustments - # - - # Change mixing height from meters to feet - MixHgt = MixHgt * 3.281 - - # Set a minimum value of 250 feet to account for plume mixing - MixHgt[less(MixHgt, 250.0)] = 250.0 - - # return the mixing height - return MixHgt +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MixHgt_FWF.py +# +# Author: dtomalak +# ---------------------------------------------------------------------------- + + +ToolType = "numeric" +WeatherElementEdited = "MixHgt" +from numpy import * +HideTool = 0 + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +#ScreenList = ["T","Td"] +#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] + +# If desired, Set up variables to be solicited from the user: +# VariableList = [ +# ("Variable name1" , defaultValue1, "numeric"), +# ("Variable name2" , "default value2", "alphaNumeric"), +# ("Variable name3" , ["default value1", "default value2"], "check", +# ["value1", "value2", "value3"]), +# ("Variable name4" , "default value4", "radio", +# ["value1", "value2", "value3"]), +# ("Variable name5" , defaultValue, "scale", +# [minValue, maxValue], resolution), +# ("Variable name6" , "", "model"), +# ("Variable name7" , "", "D2D_model"), +# ("Label contents" , "", "label"), +# ("", dialogHeight, "scrollbar"), +# ] + +# Set up Class +import SmartScript +import string, time +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, T, Topo): + "Calculates mixing height by looking for lapse rate changes above the surface" + + fwModel = self.getObject("FireModel", "ModelType") + + if fwModel == "GFS40": + modelSource = "_D2D_GFS40" + elif fwModel == "NAM40": + modelSource = "_D2D_NAM40" + else: + modelSource = "_D2D_NAM12" + + # + # This determines the correct full model run to use. + # + + ## hour = int(time.strftime('%H', time.gmtime())) + + ## if hour > 15 or hour < 4: +## run = '_1200' +## else: +## run = '_0000' +## month = time.strftime('%Y%m%d', time.gmtime()) +## day = time.strftime('%d', time.gmtime()) +## monthrun = month + run + site = self.getSiteID() +## model = site + modelSource + monthrun + model = site + modelSource + sfc_model = model + + if fwModel == "NAM40": + sfc_model = site + "_D2D_NAM12" +## sfc_model = site + "_GRID_D2D_NAM12" + monthrun + + print("Using " + model + " for Mixing Height Calculation") + print("MixHgt time range is: \n" + repr(GridTimeRange)) + + # + # Set the levels of the eta sounding to use. + # + + self.__D2Dmodel = model + soundingLevels = ["MB975", "MB950", "MB925", "MB900", "MB875","MB850", + "MB825","MB800","MB775","MB750","MB725","MB700", + "MB650","MB600", "MB550","MB500"] + + # + # Get the surface pressure and temperature from the eta model. + # + + + self.__D2Dmodel = sfc_model + sfc_pressure = self.getGrids(sfc_model, 'p', 'SFC', GridTimeRange) + sfc_pressure = sfc_pressure / 100.0 + sfc_model_temp = self.getGrids(sfc_model, 't', 'FHAG2', GridTimeRange) + self.__D2Dmodel = model + + # For working with sounding, convert topo field to meters and sfc temp to kelvin + # There is a two degree offset to allow mixing when the model sounding is not truly + # dry adiabatic in the low levels. + + sfcTempK = self.convertFtoK(T) + 0.00 + topoMeters = Topo/3.281 + + # + # Compute the difference between the model surface temp and the forecast surface temp. + # + + thetaOffset = where(less(sfcTempK, sfc_model_temp), (sfc_model_temp - sfcTempK), 0.00) + + # Create Surface Theta + sfcTheta = sfcTempK * pow((1000 / sfc_pressure), 0.286) + + # Create the Height and Temperature Cubes + sounding = self.makeNumericSounding(self.__D2Dmodel, 't', soundingLevels, GridTimeRange, noDataError=0) + if sounding is None: + self.noData() + ghCube, tCube = sounding + + # Initialize Mixing Hgt Grid + MixHgt = zeros(T.shape) + + # Initialize the mixDepth variable. + mixDepth = zeros(T.shape) + + # Initialize the bottom of the sounding. 1.0 is a seed value to avoid divide by zero. + lastTheta = sfcTheta + lastghCube = topoMeters + 1.0 + + # Climb through sounding, checking to see if the surface theta is less than the upper theta + for level in range(ghCube.shape[0]): + # Get pressure at the current level. + pressure = string.atof(soundingLevels[level][2:]) + + # Compute the potential temperature of the environment at the current level. + potTemp = (tCube[level] - thetaOffset)* pow((1000 / pressure), 0.286) + + # The mixing height is ready to set when the environmental potential temperature + # is greater than the parcel potential temperature, and the height at the current + # level is above the surface. + readyToSet = logical_and(less_equal(topoMeters, ghCube[level]), logical_and(equal(MixHgt,0), greater(potTemp, sfcTheta))) + + # This part allows the mixing height to be between levels...ie. the sounding could + # have crossed before the current height. This will bring it back to an offset of + # that level. + + + # To turn off adjustment, comment out the next line, and uncomment the other. + # Adjusted: + trueMixHgt = lastghCube + (((sfcTheta - lastTheta)/(potTemp - lastTheta))*(ghCube[level] - lastghCube)) + # Unadjusted: + #trueMixHgt = (ghCube[level] - topoMeters) + + # Apply a check that the adjusted mixing height can never be greater than the + # unadjusted mixing height. + trueMixHgtCheck = (ghCube[level] - topoMeters) + trueMixHgt = where(greater(trueMixHgt, trueMixHgtCheck), trueMixHgtCheck, trueMixHgt) + + # If the surface temp is greater than the eta...mix to the height where the surface + # theta is equal to the environment + mixDepth = where(greater_equal(sfcTempK, sfc_model_temp), trueMixHgt, mixDepth) + + # If the surface temp is colder than the eta, use the eta surface temp to establish the unadjusted height, + # then reduce it by a percentage of the surface temp/mix height temp difference. + mixDepth = where(logical_and(less(sfcTempK, sfc_model_temp), greater_equal(sfcTempK, tCube[level])), ((sfcTempK - tCube[level])/(sfc_model_temp - tCube[level]) * trueMixHgt), mixDepth) + + # If the surface is even colder than the mixing height temp...do not allow mixing. + mixDepth[less(sfcTempK,tCube[level])] = 1.0 + + # Set the mixing height for valid points + MixHgt = where(readyToSet, mixDepth, MixHgt) + + # Set parameters for next iteration of loop + lastTheta = potTemp + lastghCube = (ghCube[level] - topoMeters) + + # + # Final adjustments + # + + # Change mixing height from meters to feet + MixHgt = MixHgt * 3.281 + + # Set a minimum value of 250 feet to account for plume mixing + MixHgt[less(MixHgt, 250.0)] = 250.0 + + # return the mixing height + return MixHgt diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_Init.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_Init.py index 3fe8c90d33..16308efe53 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_Init.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MixHgt_Init.py @@ -1,208 +1,210 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Numeric_MixHgt_Init -# -# Author: -# ---------------------------------------------------------------------------- - -ToolType = "numeric" -WeatherElementEdited = "MixHgt" -from numpy import * - -VariableList = [("Initialize From Model: " , "", "D2D_model")] - -import SmartScript -import string - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def preProcessTool(self, varDict): - self.__D2Dmodel = varDict["Initialize From Model: "] - print self.__D2Dmodel - - ## This tool calculates mixing height by first calculating the surface - ## potential temp and then looking at increasing levels of the temp. - ## sounding until the potential temp at that level is greater than - ## the surface potential temperature. Then the mixing height is - ## calculated by interpolating between the level at which the pot. temp. - ## was exceeded and the level below it. Mixing height is returned in - ## units of feet above ground. - - def execute(self, GridTimeRange, T, Topo): - "Assigns MixHgt Index" - - levels = ["MB600","MB650","MB700","MB750","MB800","MB850", - "MB900","MB950"] - - # Get the ghCube and tCube - sounding = self.makeNumericSounding(self.__D2Dmodel, 't', levels, - GridTimeRange, noDataError=0) - if sounding is None: - self.noData() - ghCube, tCube = sounding - print ghCube[0][90,80], tCube[0][90,80] - - zeroGrid = zeros(T.shape, dtype=float) - thetaCube = [] - pLevelCube = [] - # Make a potential temperature and pressure cubes - for i in xrange(len(levels)): - pLevel = zeroGrid + string.atof(levels[i][2:]) - potTemp = tCube[i] * pow((1000 / pLevel), 0.286) # in millibars - thetaCube = thetaCube + [potTemp] - pLevelCube = pLevelCube + [pLevel] - pLevelCube = array(pLevelCube) - thetaCube = array(thetaCube) - - ## Calculate the surface potential temperature - T_K = self.convertFtoK(T) - # Get the surface pressure from NAM in pascals - sfcPres = self.getGrids(self.__D2Dmodel, "p", "SFC", GridTimeRange) - sfcPres = sfcPres / 100 # convert from Pascals to millibars - sfcTheta = T_K * pow((1000 / sfcPres), 0.286) - - ## find the height where the potential temp > surfacePotTemp - # Initialize to -1 - mixingHt = zeros(T.shape, dtype=float) - 1 - lastTuple = (ghCube[0], thetaCube[0]) - for i in xrange(len(levels)): - pMask = less(pLevelCube[i], sfcPres) - tMask = less(thetaCube[i], sfcTheta) - # Assign only if mixingHt is -1 (i.e. we haven't assigned - # it yet) AND pMask and tMask are true - readyToSet = logical_and(equal(mixingHt,-1), - logical_and(pMask, tMask)) - # Calculate the whole grid at this level - #print "\n", i, readyToSet[90,80] - newMh = self.getMixingHeight((ghCube[i], thetaCube[i]), - lastTuple, sfcTheta) - mixingHt = where(readyToSet, newMh, mixingHt) - lastTuple = (ghCube[i], thetaCube[i]) - - mixingHt[equal(mixingHt,-1)] = 0.0 - mixingHt *= 3.2808 - mask = not_equal(mixingHt, 0.0) - mixingHt[mask] -= Topo[mask] - - #print "MixingHT:", mixingHt[90,80], "pres:", sfcPres[90,80], - #print sfcTheta[90,80], Topo[90,80] - - return mixingHt - - def getMixingHeight(self, (h1, t1), (h2, t2), sfcT): - # Since the sounding increases with height h1 < h2 - try: - result = h1 + (((h2 - h1) / (t2 - t1)) * (sfcT - t1)) - #print h1[90,80], h2[90,80], t1[90,80], t2[90,80], sfcT[90,80] - #print "getMixingHeight", result[90,80] - return result - except: - return h1 - - - -## POINT-BASED VERSION -##VariableList = [("Initialize From Model: " , "", "D2D_model")] - -##import SmartScript -##import string - -##class Tool (SmartScript.SmartScript): -## def __init__(self, dbss): -## SmartScript.SmartScript.__init__(self, dbss) - -## def preProcessTool(self, varDict): -## self.__D2Dmodel = varDict["Initialize From Model: "] - -## ## This tool calculates mixing height by first calculating the surface -## ## potential temp and then looking at increasing levels of the temp. -## ## sounding until the potential temp at that level is greater than -## ## the surface potential temperature. Then the mixing height is -## ## calculated by interpolating between the level at which the pot. temp. -## ## was exceeded and the level below it. Mixing height is returned in -## ## units of feet above ground. -## def execute(self, x, y, GridTimeRange, T, Topo): -## "Assigns MixHgt Index" - -## levels = ["MB600","MB650","MB700","MB750","MB800","MB850", -## "MB900","MB950"] - -## sounding = self.makeSounding(self.__D2Dmodel, 't', levels, -## x, y, GridTimeRange, noDataError=0) - -## if sounding is None: -## self.noData() - -## thetaSounding = [] -## index = 0 -## for h, t in sounding: -## pLevel = string.atof(levels[index][2:]) -## potTemp = t * pow((1000 / pLevel), 0.286) # in millibars -## thetaSounding.append((pLevel, h, potTemp)) -## index = index + 1 - -## ## Calculate the surface potential temperature -## T_K = self.convertFtoK(T) - -## # Get the surface pressure from NAM in pascals -## sfcPres = self.getValue(self.__D2Dmodel, "p", "SFC", -## x, y, GridTimeRange) -## sfcPres = sfcPres / 100 # convert from Pascals to millibars - -## sfcTheta = T_K * pow((1000 / sfcPres), 0.286) - -## ## find the height where the potential temp > surfacePotTemp -## mixingHt = 0.0 # initialize -## lastTuple = (thetaSounding[0][1], thetaSounding[0][2]) -## for p, h, t in thetaSounding: -## print "pLevel:", p, "ht:", h, "t:", t, "sfcTheta:", sfcTheta -## if p < sfcPres: -## if t < sfcTheta: -## mixingHt = self.getMixingHeight((h, t), lastTuple, sfcTheta) -## break -## lastTuple = (h, t) - -### print "MixingHT:", mixingHt, "pres:", sfcPres - -## if mixingHt == 0.0: -## return mixingHt - -## mixingHt = mixingHt * 3.2808 # convert meters to feet - -## mixingHt = mixingHt - Topo # subtract Topo to get height above ground - -## return mixingHt - -## def getMixingHeight(self, (h1, t1), (h2, t2), sfcT): -## # Since the sounding increases with height h1 < h2 -### print "h1:", h1, "t1:", t1, "h2:", h2, "t2:", t2, "sfcT:", sfcT -## try: -## return h1 + ((h2 - h1) / (t2 - t1) * (sfcT - t1)) -## except: -## return h1 - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Numeric_MixHgt_Init +# +# Author: +# ---------------------------------------------------------------------------- + +ToolType = "numeric" +WeatherElementEdited = "MixHgt" +from numpy import * + +VariableList = [("Initialize From Model: " , "", "D2D_model")] + +import SmartScript +import string + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def preProcessTool(self, varDict): + self.__D2Dmodel = varDict["Initialize From Model: "] + print(self.__D2Dmodel) + + ## This tool calculates mixing height by first calculating the surface + ## potential temp and then looking at increasing levels of the temp. + ## sounding until the potential temp at that level is greater than + ## the surface potential temperature. Then the mixing height is + ## calculated by interpolating between the level at which the pot. temp. + ## was exceeded and the level below it. Mixing height is returned in + ## units of feet above ground. + + def execute(self, GridTimeRange, T, Topo): + "Assigns MixHgt Index" + + levels = ["MB600","MB650","MB700","MB750","MB800","MB850", + "MB900","MB950"] + + # Get the ghCube and tCube + sounding = self.makeNumericSounding(self.__D2Dmodel, 't', levels, + GridTimeRange, noDataError=0) + if sounding is None: + self.noData() + ghCube, tCube = sounding + print(ghCube[0][90,80], tCube[0][90,80]) + + zeroGrid = zeros(T.shape, dtype=float) + thetaCube = [] + pLevelCube = [] + # Make a potential temperature and pressure cubes + for i in range(len(levels)): + pLevel = zeroGrid + string.atof(levels[i][2:]) + potTemp = tCube[i] * pow((1000 / pLevel), 0.286) # in millibars + thetaCube = thetaCube + [potTemp] + pLevelCube = pLevelCube + [pLevel] + pLevelCube = array(pLevelCube) + thetaCube = array(thetaCube) + + ## Calculate the surface potential temperature + T_K = self.convertFtoK(T) + # Get the surface pressure from NAM in pascals + sfcPres = self.getGrids(self.__D2Dmodel, "p", "SFC", GridTimeRange) + sfcPres = sfcPres / 100 # convert from Pascals to millibars + sfcTheta = T_K * pow((1000 / sfcPres), 0.286) + + ## find the height where the potential temp > surfacePotTemp + # Initialize to -1 + mixingHt = zeros(T.shape, dtype=float) - 1 + lastTuple = (ghCube[0], thetaCube[0]) + for i in range(len(levels)): + pMask = less(pLevelCube[i], sfcPres) + tMask = less(thetaCube[i], sfcTheta) + # Assign only if mixingHt is -1 (i.e. we haven't assigned + # it yet) AND pMask and tMask are true + readyToSet = logical_and(equal(mixingHt,-1), + logical_and(pMask, tMask)) + # Calculate the whole grid at this level + #print "\n", i, readyToSet[90,80] + newMh = self.getMixingHeight((ghCube[i], thetaCube[i]), + lastTuple, sfcTheta) + mixingHt = where(readyToSet, newMh, mixingHt) + lastTuple = (ghCube[i], thetaCube[i]) + + mixingHt[equal(mixingHt,-1)] = 0.0 + mixingHt *= 3.2808 + mask = not_equal(mixingHt, 0.0) + mixingHt[mask] -= Topo[mask] + + #print "MixingHT:", mixingHt[90,80], "pres:", sfcPres[90,80], + #print sfcTheta[90,80], Topo[90,80] + + return mixingHt + + def getMixingHeight(self, xxx_todo_changeme, xxx_todo_changeme1, sfcT): + # Since the sounding increases with height h1 < h2 + (h1, t1) = xxx_todo_changeme + (h2, t2) = xxx_todo_changeme1 + try: + result = h1 + (((h2 - h1) / (t2 - t1)) * (sfcT - t1)) + #print h1[90,80], h2[90,80], t1[90,80], t2[90,80], sfcT[90,80] + #print "getMixingHeight", result[90,80] + return result + except: + return h1 + + + +## POINT-BASED VERSION +##VariableList = [("Initialize From Model: " , "", "D2D_model")] + +##import SmartScript +##import string + +##class Tool (SmartScript.SmartScript): +## def __init__(self, dbss): +## SmartScript.SmartScript.__init__(self, dbss) + +## def preProcessTool(self, varDict): +## self.__D2Dmodel = varDict["Initialize From Model: "] + +## ## This tool calculates mixing height by first calculating the surface +## ## potential temp and then looking at increasing levels of the temp. +## ## sounding until the potential temp at that level is greater than +## ## the surface potential temperature. Then the mixing height is +## ## calculated by interpolating between the level at which the pot. temp. +## ## was exceeded and the level below it. Mixing height is returned in +## ## units of feet above ground. +## def execute(self, x, y, GridTimeRange, T, Topo): +## "Assigns MixHgt Index" + +## levels = ["MB600","MB650","MB700","MB750","MB800","MB850", +## "MB900","MB950"] + +## sounding = self.makeSounding(self.__D2Dmodel, 't', levels, +## x, y, GridTimeRange, noDataError=0) + +## if sounding is None: +## self.noData() + +## thetaSounding = [] +## index = 0 +## for h, t in sounding: +## pLevel = string.atof(levels[index][2:]) +## potTemp = t * pow((1000 / pLevel), 0.286) # in millibars +## thetaSounding.append((pLevel, h, potTemp)) +## index = index + 1 + +## ## Calculate the surface potential temperature +## T_K = self.convertFtoK(T) + +## # Get the surface pressure from NAM in pascals +## sfcPres = self.getValue(self.__D2Dmodel, "p", "SFC", +## x, y, GridTimeRange) +## sfcPres = sfcPres / 100 # convert from Pascals to millibars + +## sfcTheta = T_K * pow((1000 / sfcPres), 0.286) + +## ## find the height where the potential temp > surfacePotTemp +## mixingHt = 0.0 # initialize +## lastTuple = (thetaSounding[0][1], thetaSounding[0][2]) +## for p, h, t in thetaSounding: +## print "pLevel:", p, "ht:", h, "t:", t, "sfcTheta:", sfcTheta +## if p < sfcPres: +## if t < sfcTheta: +## mixingHt = self.getMixingHeight((h, t), lastTuple, sfcTheta) +## break +## lastTuple = (h, t) + +### print "MixingHT:", mixingHt, "pres:", sfcPres + +## if mixingHt == 0.0: +## return mixingHt + +## mixingHt = mixingHt * 3.2808 # convert meters to feet + +## mixingHt = mixingHt - Topo # subtract Topo to get height above ground + +## return mixingHt + +## def getMixingHeight(self, (h1, t1), (h2, t2), sfcT): +## # Since the sounding increases with height h1 < h2 +### print "h1:", h1, "t1:", t1, "h2:", h2, "t2:", t2, "sfcT:", sfcT +## try: +## return h1 + ((h2 - h1) / (t2 - t1) * (sfcT - t1)) +## except: +## return h1 + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ModelBlend.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ModelBlend.py index 5c496f9f07..2d30dadd4a 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ModelBlend.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/ModelBlend.py @@ -1,712 +1,712 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Model_Blend version 2.1 +# +# Make combinations of recent (last two) model runs, or the forecast or +# official grids. Can extrapolate by using a combination of negative and +# positive weights. Weights cannot add up to zero - and error message is +# generated if user sets them that way. +# +# Author: Tim Barker +# 2016 +# 2009-12-18: Rewritten to run from Java. Removed features that depend +# on prestarted Tkinter app: dialog derived from tkSimpleDialog, +# global-level IntVars and StringVars, IntVars and StringVars +# using the default parent. +# 2006-01-12: Version 2.1. Fixed accumulative elements like QPF/SnowAmt +# to add up model QPF/SnowAmt grids before doing blend. Old +# way averaged them - then blended. Also fixed so that it +# does not read grids from cache - that way changes to Fcst +# grid in one area are reflected when the tool is run again +# in another area. +# 2005-06-01: Version 2.0. Re-worked to be non-modal dialog box, add +# optional edge effects when working on an edit area, +# simplify using previous model runs, and make negative +# weights optional. +# 2002-10-09: Original Implementation from Les Colin Idea +#---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 10, 2016 5283 nabowle Remove NGM support. +# Feb 06, 2017 5959 randerso Removed Java .toString() calls # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Model_Blend version 2.1 -# -# Make combinations of recent (last two) model runs, or the forecast or -# official grids. Can extrapolate by using a combination of negative and -# positive weights. Weights cannot add up to zero - and error message is -# generated if user sets them that way. -# -# Author: Tim Barker -# 2016 -# 2009-12-18: Rewritten to run from Java. Removed features that depend -# on prestarted Tkinter app: dialog derived from tkSimpleDialog, -# global-level IntVars and StringVars, IntVars and StringVars -# using the default parent. -# 2006-01-12: Version 2.1. Fixed accumulative elements like QPF/SnowAmt -# to add up model QPF/SnowAmt grids before doing blend. Old -# way averaged them - then blended. Also fixed so that it -# does not read grids from cache - that way changes to Fcst -# grid in one area are reflected when the tool is run again -# in another area. -# 2005-06-01: Version 2.0. Re-worked to be non-modal dialog box, add -# optional edge effects when working on an edit area, -# simplify using previous model runs, and make negative -# weights optional. -# 2002-10-09: Original Implementation from Les Colin Idea -#---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 10, 2016 5283 nabowle Remove NGM support. -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# -## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -#--------------------------------------------------------------------- -# -# C O N F I G U R A T I O N S E C T I O N -# -#--------------------------------------------------------------------- -# -# To keep the dialog from being too "long", you can specify the -# maximum number of weights in a column of the dialog. It will try -# to balance columns if needed. -# -MAX_IN_COLUMN=15 -# -# If you do not want to allow negative weights (which can be used to -# extrapolate trends), then set USE_NEGATIVE_WEIGHTS to zero. -# -USE_NEGATIVE_WEIGHTS=1 -# -# List of GFE model databases that you will potentially blend. -# The name is followed by a number (separated by a colon) that -# gives the number of versions to potentially blend. The versions -# is followed by a list of elements for which to add this model -# (assumed to be for all elements if missing) or if the list starts -# with a ^ character, then it is a list of elements where this model -# will NOT be listed). You can add the "ISC" database with "ISC:1". -# If ISC is specified, the list of elements reflect the destination -# weather elements in the Fcst database, and not the actual weather element -# names in the ISC database (since they can be renamed by the system). -# -Models=("ADJMET:2", - "ADJMETBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "Eta12:2", - "Eta12BC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "ADJMAV:2", - "ADJMAVBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "GFS40:2", - "GFS40BC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "ADJFWC:1", - "ADJFWCBC:1", - "ADJMEX:2", - "ADJMEXBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "ADJMEH:1:MaxT,MinT,PoP", - "ADJMEN:1:MaxT,MinT,PoP", - "ADJMEL:1:MaxT,MinT,PoP", - "ADJHPC:1:MaxT,MinT,PoP,Sky,Td,Wind", - "ADJKAF:2:MaxT,MinT,Wind,T,Td,MaxRH,MinRH,RH,TdMrn,TdAft", - "ADJKAFBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", - "NWHAS:3:QPF", - ) - -edgestyleDefault="Flat" -#--------------------------------------------------------------------- -# -# END OF CONFIGURATION SECTION -# -#--------------------------------------------------------------------- -# -# -# -ToolType = "numeric" -WeatherElementEdited = "variableElement" -ScreenList = ["SCALAR","VECTOR"] -# -# -# -from numpy import * -import Tkinter -import AppDialog -import SmartScript - -edgestyles=["Flat","Edge","Taper"] -# -# -# -class ToolDialog(AppDialog.AppDialog): - def __init__(self, title="Tk", callbackMethod=None, labels=None, **kwargs): - self.__callbackMethod = callbackMethod - self.dbIds = [] - self.labels = [] - self.__percents = [] - self.weights = [] - self.__weightVars = [] - self.numrows = MAX_IN_COLUMN - self.numcolumns = 1 - if labels is not None: - self.labels.extend(labels) - self.numrows = min(len(labels), MAX_IN_COLUMN) - self.numcolumns = (len(labels)-1)/MAX_IN_COLUMN + 1 - AppDialog.AppDialog.__init__(self, **kwargs) - self.title(title) - - def buttonbox(self): - buttonFrame = Tkinter.Frame(self) - # create the buttons associated with this dialog - Tkinter.Button(buttonFrame, text="Run", - command=self.__runCB, width=10, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Run/Dismiss", - command=self.__okCB, width=12, state=Tkinter.NORMAL).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - Tkinter.Button(buttonFrame, text="Cancel", width=10, - command=self.cancelCB).pack(\ - side=Tkinter.LEFT, pady=5, padx=10) - buttonFrame.pack(side=Tkinter.BOTTOM) - - def body(self, master): - bodyFrame = Tkinter.Frame(master) - self.buildWeightSliders(bodyFrame) - self.buildEdgeControl(bodyFrame) - bodyFrame.pack(side=Tkinter.TOP) - return bodyFrame - - ## - # Validate the inputs. - # Because self.destroy() is called before apply(), any Tkinter variables - # such as StringVar or IntVar instances will be invalid by the time apply() - # is called. Therefore, validate() must also preserve any data in such - # variables that apply() will need. - def validate(self): - rtnval = True; - self.weights = [] - for wv in self.__weightVars: - self.weights.append(wv.get()) - self.edgestyle = self.edgestyleString.get() - self.edgeWidth = self.edgeWidthVar.get() - return rtnval - - ## - # Set the percent labels based on the slider weights. - # This is primarily a callback method invoked by the scale widgets. - # @param weight: Weight of the scale widget that changed - # @type weight: int - # - def setPercents(self, weight): - "Set the percent labels based on the slider weights." - total=0 - for wv in self.__weightVars: - total+=wv.get() - if total==0: - for pctVar in self.__percents: - pctVar.set("%4d%%"%0) - else: - wpct = 100 / float(total) - for i, pctVar in enumerate(self.__percents): - pctVar.set("%4d%%"%(self.__weightVars[i].get() * wpct)) - - def __runCB(self): - "The callback invoked by the Run button" - self.validate() - self.__callbackMethod("Run") - - def __okCB(self): - "The callback invoked by the Ok button" - self.validate() - self.__callbackMethod("OK") - self.ok() - - def cancelCB(self): - "The callback invoked by the Cancel button" - self.__callbackMethod("Cancel") - self.cancel() - - def apply(self, event=None): - pass - - def buildWeightSliders(self, master): - hull = Tkinter.Frame(master) - lastColumn = len(self.labels)/MAX_IN_COLUMN - row = 0 - column = 0 - fc = None - if USE_NEGATIVE_WEIGHTS: - origin = -10 - else: - origin = 0 - for labelText in self.labels: - if fc is None: - fc = Tkinter.Frame(hull) - # Create Tk variables for the weight and percent - weightVar = Tkinter.IntVar(master) - pctVar = Tkinter.StringVar(master) - # Store references for other routines - self.__weightVars.append(weightVar) - self.__percents.append(pctVar) - # Initialize the weight and percent variables - weightVar.set(0) - pctVar.set("%4d%%"%0) - # Create labels and sliders - lbl = Tkinter.Label(fc, text=labelText) - slider = Tkinter.Scale(fc,orient=Tkinter.HORIZONTAL, - from_=origin,to=10,resolution=1, - command=self.setPercents, - variable=weightVar,length=150) - lab2=Tkinter.Label(fc,textvariable=pctVar,width=5) - # Grid the items left-to-right in the current row - lbl.grid(row=row, column=0, sticky=Tkinter.SE) - slider.grid(row=row, column=1, sticky=Tkinter.SE) - lab2.grid(row=row,column=2,sticky=Tkinter.SE) - if column < lastColumn: - f2=Tkinter.Frame(fc,bg="black",width=1) - f2.grid(row=row,column=3,sticky=Tkinter.NS) - row+=1 - if row >= MAX_IN_COLUMN: - fc.grid(row=0, column=column, sticky=Tkinter.N) - row=0 - column+=1 - fc = None - if fc is not None: - fc.grid(row=0, column=column, sticky=Tkinter.N) - # Revise the weight of the forecast item - self.__weightVars[0].set(1) - self.setPercents(1) - hull.grid(row=0,column=0, sticky=Tkinter.S) - - def buildEdgeControl(self, master): - edgeFrame=Tkinter.Frame(master,relief=Tkinter.GROOVE,borderwidth=2) - edgestyleFrame=Tkinter.Frame(edgeFrame) - edgewidthFrame=Tkinter.Frame(edgeFrame) - # Create the edge style radio buttons - self.edgestyleString=Tkinter.StringVar(master) - for edgestyle in edgestyles: - a=Tkinter.Radiobutton(edgestyleFrame,text=edgestyle, - variable=self.edgestyleString,value=edgestyle) - if edgestyle == edgestyleDefault: - a.invoke() - a.pack(side=Tkinter.TOP,anchor=Tkinter.W) - edgestyleFrame.pack(side=Tkinter.LEFT,anchor=Tkinter.W) - # Create the edge width slider - self.edgeWidthVar=Tkinter.IntVar(master) - self.edgeWidthVar.set(5) - a=Tkinter.Scale(edgewidthFrame,from_=1,to=30,variable=self.edgeWidthVar, - showvalue=1,label="Edge Width:",orient=Tkinter.HORIZONTAL) - a.pack(side=Tkinter.TOP,anchor=Tkinter.N,fill=Tkinter.X) - edgewidthFrame.pack(side=Tkinter.RIGHT,anchor=Tkinter.W,fill=Tkinter.X,expand=1) - - # Add the edge control below the weight sliders - edgeFrame.grid(row=self.numrows,column=0,columnspan=self.numcolumns,sticky=Tkinter.EW) - -#======================================================================== -class TestDialog(object): - "A dummy object used to test the back end." - - def __init__(self, title="Tk", callbackMethod=None, labels=None, **kwargs): - print "TestDialog constructor:" - print "Title=", title - print "labels=", labels - print "kwargs=", kwargs - self.__callbackMethod = callbackMethod - self.edgestyle = "Taper" - self.edgeWidth = "" - self.weights = [1] * len(labels) - - def mainloop(self): - self.__callbackMethod("Run") - -#======================================================================== -# -# The real GFE Tool -# -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss=dbss - SmartScript.SmartScript.__init__(self, dbss) - - def preProcessGrid(self,WEname): - # - # Setup the arrays of information for the dialog - # box that sets the weights - # - # The mutable database is labeled "Forecast" no matter - # what you do, and put in the first slot on the dialog. - # The "Official" database is hardcoded into the next slot, - # then others are added - # - - self.labels = [] - self.dbIds = [] - - db=self.mutableID() - id=db.modelIdentifier() - self._addModel('Forecast:', id) - # - db=self.findDatabase("Official") - id=db.modelIdentifier() - self._addModel("Official", id) - # - plist = None - allOfficeTypes = None - for modelString in Models: - model, versions, parmlist = self.parseMS(modelString) - if model is None: - continue - if not self.acceptPL(WEname, parmlist): - continue - - # - # Make labels for each of the model runs we want. - # Singleton databases (like FCST or Official) that have - # no date (actually a 1970 date) have no date/run label. - # - for run in range(0,-versions,-1): - db=self.findDatabase(model,run) - if db is None: - continue - id=db.modelIdentifier() - if id is None or ""==id or id in self.dbIds: - continue - if "ISC" == db.modelName(): - if allOfficeTypes is None: - allOfficeTypes = self.knownOfficeTypes() - iscOfficeTypes = [self.myOfficeType()] - if plist is None: - plist = self.availableParms() - for pname, plevel, pdb in plist: - if id != pdb.modelIdentifier(): - continue - for ot in allOfficeTypes: - if pname.endswith(ot) and \ - ot not in iscOfficeTypes: - iscOfficeTypes.append(ot) - for otype in iscOfficeTypes: - ltext = "%s (%s):"%(model, otype) - self._addModel(ltext, id) - else: - modtime=db.modelTime() - year=modtime.year - if year==1970: - lbltext="%s:"%model - else: - month=modtime.month - day=modtime.day - hour=modtime.hour - lbltext="%s %2.2d/%2.2d %2.2dZ:" % (model,month,day,hour) - self._addModel(lbltext,id) - # - # Now run the dialog box to get the weights - # resulting weights stored in Weights array - # - self.dlg=ToolDialog("Set Model Weights", - callbackMethod=self.execWeights, - labels=self.labels) -# self.dlg=TestDialog("Set Model Weights", -# callbackMethod=self.execWeights, -# labels=self.labels) - # - # Cancel the tool in the first pre-processGrid routine. - # No Execute routine is done - and grid is not marked as - # edited. Any editing will take place when they press a - # Button on the dialog and it calls execWeights - # - self.dlg.mainloop() - self.cancel() - - def parseMS(self, modelstring): - """Parse a model string into a model, versions, and parmlist.""" - model = None - versions = None - parmlist = None - pieces = modelstring.split(":") - len_pcs = len(pieces) - if len_pcs < 4: - model = pieces[0] - versions = 1 - parmlist = 'ALL' - if len_pcs > 1: - try: - versions = abs(int(pieces[1])) - except: - pass - if len_pcs > 2: - parmlist = pieces[2] - return (model, versions, parmlist) - - def acceptPL(self, WEName, parmlist): - """Check WEName against parmlist.""" - invert = False - parms = parmlist.split(",") - if '^'==parms[0][0]: - parms[0] = parms[0][1:] - invert = True - result = ('ALL'==parms[0]) or (WEName in parms) - result = invert ^ result - return result - - ## - # - # - def _addModel(self, text, id): - "Add text and id to self.labels and self.dbIds, respecively." - self.labels.append(text) - self.dbIds.append(id) - - #================================================================= - # - # Dummy execute routine. Tool is cancelled in preProcessGrid - # and all the real action is accomplished in execWeights which - # is called when the user presses a button on the dialog - # - def execute(self,variableElement): - "Specified blend of any/all model/forecast fields" - return variableElement - #================================================================= - # - # execWeights - The main calculation routine called when a button - # is pressed in the dialog. Passes in the string - # name of the button pressed - # - def execWeights(self,button): - # - # If user presses cancel, do an immediate return and stop - # - if button=="Cancel": - return - - # - # Get the results from the dialog - # - #for num in range(len(Labels)): - # Weights[num]=ScaleIDs[num].get() - EdgeType=self.dlg.edgestyle - EdgeWidth=self.dlg.edgeWidth - # - # If user presses run or run/dismiss, first add up the - # weights (in the ScaleIDs variables) and check for - # common issues like all weights zero, only weights on - # current grid, or grids add up to zero. - # - totweight=0 - fcstweight=0 - someweights=0 - otherweights=0 - - dbIds = self.dbIds # alias - weights = self.dlg.weights - maxAbsWeight = max( max(weights), abs(min(weights)) ) - someweights = (maxAbsWeight > 0.5) - fcstweight = weights[0] - otherweights = sum(weights[1:]) - totweight = fcstweight + otherweights - - if not someweights: - self.statusBarMsg("ModelBlend has no weights","R") - return - if abs(fcstweight) > 0.5 and otherweights==0: - self.statusBarMsg("ModelBlend Weights add to no change","R") - return - if totweight==0: - self.statusBarMsg("Weights cannot add up to zero","A") - return - # - # Get stuff usually provided by tool code: - # fcst=mutable model database name - # selectTR=the selected timerange - # - fcst = self.mutableID().modelIdentifier() - selectTR = self._dbss.getParmOp().getSelectionTimeRange() - # - # get list of parms that are selected and mutable - # - # Making a derivation from AWIPS1's version of this script. - # Instead of calling direct to Java's ParmManager to get the Parm - # objects, we'll use SmartScript's selectedParms() to retrieve native - # Python objects which should save us Java heap space which wouldn't - # be freed otherwise until the user terminates the SmartTool - # - # allParms = self._dbss.getParmManager().getSelectedParms() - allParms = self.selectedParms() - parms = [] - for parm in allParms: - # model = parm.getParmID().getDbId().getModelId() - model = parm[2].modelIdentifier() - if model == fcst: - parms.append(parm) - - # - # loop over the mutable parms. - # get: wxType - type of parm - # WEname - short parm name string - # parmlevel - parm level string - # - for WEname, parmlevel, dbId in parms: - # Another AWIPS1 derivation: Use of different selectedParms() - # call forces us to retrieve Parm to retrieve some of these - # pieces of information - # - parm = self.getParm(dbId, WEname, parmlevel) - rateParm = parm.getGridInfo().isRateParm() - wxType = str(parm.getGridInfo().getGridType()) - del parm - - # - # Get list of grids for this parm within the selcted time range - # and loop over each of those grids - # - gridinfos=self.getGridInfo(fcst,WEname,parmlevel,selectTR) - for gridinfo in gridinfos: - GridTimeRange=gridinfo.gridTime() - # - # Easier when just a scalar - # - if 'SCALAR'==wxType: - # - # read each 'model' grid with a non-zero weight - # add up the weights again, because we cannot count - # weights for grids that cannot be read. - # - gsum=self.empty() - totweight=0 - fcstweight=0 - oldgrid=self.getGrids(self.dbIds[0],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) - if oldgrid==None: - self.statusBarMsg("ModelBlend tool could not get Fcst data for " + WEName,"A") - for num, label in enumerate(self.labels): - weight=weights[num] - if weight!=0: - modeType="TimeWtAverage" - if rateParm==1: - modeType="Sum" - #determine source - special if from ISC - idx = label.find("(") - idx1 = label.find(")",idx) - if idx == -1 or idx1 == -1: - WEnameSource = WEname - else: - ot = label[idx+1:idx1] - if ot == self.myOfficeType(): - WEnameSource = WEname - else: - WEnameSource = WEname + ot - grid=self.getGrids(self.dbIds[num],WEnameSource,"SFC",GridTimeRange,mode=modeType,noDataError=0,cache=0) - if grid != None: - gsum+=(grid*weight) - totweight+=weight - if (num==0): - fcstweight=weight - else: - errorstring="ModelBlend tool could not get data for %s" % label - self.statusBarMsg(errorstring,"A") - # - # Check again for no weights, or only weights for the current - # grid - in which case we make no changes and write info message - # otherwise - save the grid - # - if (totweight!=0): - if fcstweight==totweight: - self.statusBarMsg("ModelBlend makes no change","R") - else: - newgrid=gsum/totweight - finalgrid=self.inEditArea(newgrid,oldgrid,EdgeType,EdgeWidth) - self.createGrid(fcst,WEname,wxType,finalgrid,GridTimeRange) - else: - self.statusBarMsg("ModelBlend weights ended up Zero - so cancelled","A") - # - # A little more complicated when a vector - # - if 'VECTOR'==wxType: - # - # read each 'model' grid with a non-zero weight - # add up the weights again, because we cannot count - # weights for grids that cannot be read. - # - oldgrid=self.getGrids(dbIds[0],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) - if oldgrid==None: - self.statusBarMsg("ModelBlend tool could not get Fcst data for " + WEName,"A") - (mag,direc)=oldgrid - (uold,vold)=self.MagDirToUV(mag,direc) - - usum=self.empty() - vsum=self.empty() - - totweight=0 - fcstweight=0 - for num, weight in enumerate(weights): - if weight!=0: - grid=self.getGrids(self.dbIds[num],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) - if grid != None: - (mag,direc)=grid - (u,v)=self.MagDirToUV(mag,direc) - usum+=(u*weight) - vsum+=(v*weight) - totweight+=weight - if (num==0): - fcstweight=weight - else: - errorstring="ModelBlend tool could not get data for %s" % self.labels[num] - self.statusBarMsg(errorstring,"A") - # - # Check again for no weights, or only weights for the current - # grid - in which case we make no changes and write info message - # otherwise - save the grid. - # - if (totweight!=0): - if fcstweight==totweight: - self.statusBarMsg("ModelBlend makes no change","R") - else: - unew=usum/totweight - vnew=vsum/totweight - ufinal=self.inEditArea(unew,uold,EdgeType,EdgeWidth) - vfinal=self.inEditArea(vnew,vold,EdgeType,EdgeWidth) - result=self.UVToMagDir(ufinal,vfinal) - self.createGrid(fcst,WEname,wxType,result,GridTimeRange) - #self.callSmartTool("DoNothing",WEname,None,GridTimeRange) - else: - self.statusBarMsg("ModelBlend weights ended up Zero - so cancelled","A") - - #===================================================================== - # inEditArea - Take an old grid and a new grid - and return the - # grid with the proper weighting between the two. - # - # This is where the EdgeType and EdgeWidth of the dialog - # box gets used. If there are no points in the current - # edit area - then we assume they want the entire domain. - # Otherwise we use the current edit area. If FLAT is - # used, then the new grid is returned in the edit area - # and the old grid is returned outside the edit area. - # If EDGE or TAPER are used - then we nudge areas inside - # the edit area toward the new grid - based on how close - # it is to the edge of the edit are. - # - # Returns the final grid that should be returned. - # - def inEditArea(self,new,old,EdgeType,EdgeWidth): - # - # Get the active editarea - # - editArea=self.getActiveEditArea() - # - # We don't have the benefit of the usual GFE question about what - # to do with empty edit areas. We assume they want to run it over - # the entire domain - but have to switch the edit area ourselves - # - editAreaMask=editArea.getGrid() - if not editAreaMask.isAnyBitsSet(): - editArea.invert() - # - # Make edgegrid 0-1 across edit area - # - if (EdgeType=="Flat"): - edgegrid=editArea.getGrid().getNDArray() - elif (EdgeType=="Edge"): - edgegrid=self.taperGrid(editArea,EdgeWidth) - else: - edgegrid=self.taperGrid(editArea,0) - # - # return the final grid - # - diff=new-old - final=old+(diff*edgegrid) - return(final) +## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +#--------------------------------------------------------------------- +# +# C O N F I G U R A T I O N S E C T I O N +# +#--------------------------------------------------------------------- +# +# To keep the dialog from being too "long", you can specify the +# maximum number of weights in a column of the dialog. It will try +# to balance columns if needed. +# +MAX_IN_COLUMN=15 +# +# If you do not want to allow negative weights (which can be used to +# extrapolate trends), then set USE_NEGATIVE_WEIGHTS to zero. +# +USE_NEGATIVE_WEIGHTS=1 +# +# List of GFE model databases that you will potentially blend. +# The name is followed by a number (separated by a colon) that +# gives the number of versions to potentially blend. The versions +# is followed by a list of elements for which to add this model +# (assumed to be for all elements if missing) or if the list starts +# with a ^ character, then it is a list of elements where this model +# will NOT be listed). You can add the "ISC" database with "ISC:1". +# If ISC is specified, the list of elements reflect the destination +# weather elements in the Fcst database, and not the actual weather element +# names in the ISC database (since they can be renamed by the system). +# +Models=("ADJMET:2", + "ADJMETBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "Eta12:2", + "Eta12BC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "ADJMAV:2", + "ADJMAVBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "GFS40:2", + "GFS40BC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "ADJFWC:1", + "ADJFWCBC:1", + "ADJMEX:2", + "ADJMEXBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "ADJMEH:1:MaxT,MinT,PoP", + "ADJMEN:1:MaxT,MinT,PoP", + "ADJMEL:1:MaxT,MinT,PoP", + "ADJHPC:1:MaxT,MinT,PoP,Sky,Td,Wind", + "ADJKAF:2:MaxT,MinT,Wind,T,Td,MaxRH,MinRH,RH,TdMrn,TdAft", + "ADJKAFBC:2:MaxT,MinT,MaxRH,MinRH,TdMrn,TdAft,T,Td,RH", + "NWHAS:3:QPF", + ) + +edgestyleDefault="Flat" +#--------------------------------------------------------------------- +# +# END OF CONFIGURATION SECTION +# +#--------------------------------------------------------------------- +# +# +# +ToolType = "numeric" +WeatherElementEdited = "variableElement" +ScreenList = ["SCALAR","VECTOR"] +# +# +# +from numpy import * +import tkinter +import AppDialog +import SmartScript + +edgestyles=["Flat","Edge","Taper"] +# +# +# +class ToolDialog(AppDialog.AppDialog): + def __init__(self, title="Tk", callbackMethod=None, labels=None, **kwargs): + self.__callbackMethod = callbackMethod + self.dbIds = [] + self.labels = [] + self.__percents = [] + self.weights = [] + self.__weightVars = [] + self.numrows = MAX_IN_COLUMN + self.numcolumns = 1 + if labels is not None: + self.labels.extend(labels) + self.numrows = min(len(labels), MAX_IN_COLUMN) + self.numcolumns = (len(labels)-1)/MAX_IN_COLUMN + 1 + AppDialog.AppDialog.__init__(self, **kwargs) + self.title(title) + + def buttonbox(self): + buttonFrame = tkinter.Frame(self) + # create the buttons associated with this dialog + tkinter.Button(buttonFrame, text="Run", + command=self.__runCB, width=10, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Run/Dismiss", + command=self.__okCB, width=12, state=tkinter.NORMAL).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + tkinter.Button(buttonFrame, text="Cancel", width=10, + command=self.cancelCB).pack(\ + side=tkinter.LEFT, pady=5, padx=10) + buttonFrame.pack(side=tkinter.BOTTOM) + + def body(self, master): + bodyFrame = tkinter.Frame(master) + self.buildWeightSliders(bodyFrame) + self.buildEdgeControl(bodyFrame) + bodyFrame.pack(side=tkinter.TOP) + return bodyFrame + + ## + # Validate the inputs. + # Because self.destroy() is called before apply(), any Tkinter variables + # such as StringVar or IntVar instances will be invalid by the time apply() + # is called. Therefore, validate() must also preserve any data in such + # variables that apply() will need. + def validate(self): + rtnval = True; + self.weights = [] + for wv in self.__weightVars: + self.weights.append(wv.get()) + self.edgestyle = self.edgestyleString.get() + self.edgeWidth = self.edgeWidthVar.get() + return rtnval + + ## + # Set the percent labels based on the slider weights. + # This is primarily a callback method invoked by the scale widgets. + # @param weight: Weight of the scale widget that changed + # @type weight: int + # + def setPercents(self, weight): + "Set the percent labels based on the slider weights." + total=0 + for wv in self.__weightVars: + total+=wv.get() + if total==0: + for pctVar in self.__percents: + pctVar.set("%4d%%"%0) + else: + wpct = 100 / float(total) + for i, pctVar in enumerate(self.__percents): + pctVar.set("%4d%%"%(self.__weightVars[i].get() * wpct)) + + def __runCB(self): + "The callback invoked by the Run button" + self.validate() + self.__callbackMethod("Run") + + def __okCB(self): + "The callback invoked by the Ok button" + self.validate() + self.__callbackMethod("OK") + self.ok() + + def cancelCB(self): + "The callback invoked by the Cancel button" + self.__callbackMethod("Cancel") + self.cancel() + + def apply(self, event=None): + pass + + def buildWeightSliders(self, master): + hull = tkinter.Frame(master) + lastColumn = len(self.labels)/MAX_IN_COLUMN + row = 0 + column = 0 + fc = None + if USE_NEGATIVE_WEIGHTS: + origin = -10 + else: + origin = 0 + for labelText in self.labels: + if fc is None: + fc = tkinter.Frame(hull) + # Create Tk variables for the weight and percent + weightVar = tkinter.IntVar(master) + pctVar = tkinter.StringVar(master) + # Store references for other routines + self.__weightVars.append(weightVar) + self.__percents.append(pctVar) + # Initialize the weight and percent variables + weightVar.set(0) + pctVar.set("%4d%%"%0) + # Create labels and sliders + lbl = tkinter.Label(fc, text=labelText) + slider = tkinter.Scale(fc,orient=tkinter.HORIZONTAL, + from_=origin,to=10,resolution=1, + command=self.setPercents, + variable=weightVar,length=150) + lab2=tkinter.Label(fc,textvariable=pctVar,width=5) + # Grid the items left-to-right in the current row + lbl.grid(row=row, column=0, sticky=tkinter.SE) + slider.grid(row=row, column=1, sticky=tkinter.SE) + lab2.grid(row=row,column=2,sticky=tkinter.SE) + if column < lastColumn: + f2=tkinter.Frame(fc,bg="black",width=1) + f2.grid(row=row,column=3,sticky=tkinter.NS) + row+=1 + if row >= MAX_IN_COLUMN: + fc.grid(row=0, column=column, sticky=tkinter.N) + row=0 + column+=1 + fc = None + if fc is not None: + fc.grid(row=0, column=column, sticky=tkinter.N) + # Revise the weight of the forecast item + self.__weightVars[0].set(1) + self.setPercents(1) + hull.grid(row=0,column=0, sticky=tkinter.S) + + def buildEdgeControl(self, master): + edgeFrame=tkinter.Frame(master,relief=tkinter.GROOVE,borderwidth=2) + edgestyleFrame=tkinter.Frame(edgeFrame) + edgewidthFrame=tkinter.Frame(edgeFrame) + # Create the edge style radio buttons + self.edgestyleString=tkinter.StringVar(master) + for edgestyle in edgestyles: + a=tkinter.Radiobutton(edgestyleFrame,text=edgestyle, + variable=self.edgestyleString,value=edgestyle) + if edgestyle == edgestyleDefault: + a.invoke() + a.pack(side=tkinter.TOP,anchor=tkinter.W) + edgestyleFrame.pack(side=tkinter.LEFT,anchor=tkinter.W) + # Create the edge width slider + self.edgeWidthVar=tkinter.IntVar(master) + self.edgeWidthVar.set(5) + a=tkinter.Scale(edgewidthFrame,from_=1,to=30,variable=self.edgeWidthVar, + showvalue=1,label="Edge Width:",orient=tkinter.HORIZONTAL) + a.pack(side=tkinter.TOP,anchor=tkinter.N,fill=tkinter.X) + edgewidthFrame.pack(side=tkinter.RIGHT,anchor=tkinter.W,fill=tkinter.X,expand=1) + + # Add the edge control below the weight sliders + edgeFrame.grid(row=self.numrows,column=0,columnspan=self.numcolumns,sticky=tkinter.EW) + +#======================================================================== +class TestDialog(object): + "A dummy object used to test the back end." + + def __init__(self, title="Tk", callbackMethod=None, labels=None, **kwargs): + print("TestDialog constructor:") + print("Title=", title) + print("labels=", labels) + print("kwargs=", kwargs) + self.__callbackMethod = callbackMethod + self.edgestyle = "Taper" + self.edgeWidth = "" + self.weights = [1] * len(labels) + + def mainloop(self): + self.__callbackMethod("Run") + +#======================================================================== +# +# The real GFE Tool +# +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss=dbss + SmartScript.SmartScript.__init__(self, dbss) + + def preProcessGrid(self,WEname): + # + # Setup the arrays of information for the dialog + # box that sets the weights + # + # The mutable database is labeled "Forecast" no matter + # what you do, and put in the first slot on the dialog. + # The "Official" database is hardcoded into the next slot, + # then others are added + # + + self.labels = [] + self.dbIds = [] + + db=self.mutableID() + id=db.modelIdentifier() + self._addModel('Forecast:', id) + # + db=self.findDatabase("Official") + id=db.modelIdentifier() + self._addModel("Official", id) + # + plist = None + allOfficeTypes = None + for modelString in Models: + model, versions, parmlist = self.parseMS(modelString) + if model is None: + continue + if not self.acceptPL(WEname, parmlist): + continue + + # + # Make labels for each of the model runs we want. + # Singleton databases (like FCST or Official) that have + # no date (actually a 1970 date) have no date/run label. + # + for run in range(0,-versions,-1): + db=self.findDatabase(model,run) + if db is None: + continue + id=db.modelIdentifier() + if id is None or ""==id or id in self.dbIds: + continue + if "ISC" == db.modelName(): + if allOfficeTypes is None: + allOfficeTypes = self.knownOfficeTypes() + iscOfficeTypes = [self.myOfficeType()] + if plist is None: + plist = self.availableParms() + for pname, plevel, pdb in plist: + if id != pdb.modelIdentifier(): + continue + for ot in allOfficeTypes: + if pname.endswith(ot) and \ + ot not in iscOfficeTypes: + iscOfficeTypes.append(ot) + for otype in iscOfficeTypes: + ltext = "%s (%s):"%(model, otype) + self._addModel(ltext, id) + else: + modtime=db.modelTime() + year=modtime.year + if year==1970: + lbltext="%s:"%model + else: + month=modtime.month + day=modtime.day + hour=modtime.hour + lbltext="%s %2.2d/%2.2d %2.2dZ:" % (model,month,day,hour) + self._addModel(lbltext,id) + # + # Now run the dialog box to get the weights + # resulting weights stored in Weights array + # + self.dlg=ToolDialog("Set Model Weights", + callbackMethod=self.execWeights, + labels=self.labels) +# self.dlg=TestDialog("Set Model Weights", +# callbackMethod=self.execWeights, +# labels=self.labels) + # + # Cancel the tool in the first pre-processGrid routine. + # No Execute routine is done - and grid is not marked as + # edited. Any editing will take place when they press a + # Button on the dialog and it calls execWeights + # + self.dlg.mainloop() + self.cancel() + + def parseMS(self, modelstring): + """Parse a model string into a model, versions, and parmlist.""" + model = None + versions = None + parmlist = None + pieces = modelstring.split(":") + len_pcs = len(pieces) + if len_pcs < 4: + model = pieces[0] + versions = 1 + parmlist = 'ALL' + if len_pcs > 1: + try: + versions = abs(int(pieces[1])) + except: + pass + if len_pcs > 2: + parmlist = pieces[2] + return (model, versions, parmlist) + + def acceptPL(self, WEName, parmlist): + """Check WEName against parmlist.""" + invert = False + parms = parmlist.split(",") + if '^'==parms[0][0]: + parms[0] = parms[0][1:] + invert = True + result = ('ALL'==parms[0]) or (WEName in parms) + result = invert ^ result + return result + + ## + # + # + def _addModel(self, text, id): + "Add text and id to self.labels and self.dbIds, respecively." + self.labels.append(text) + self.dbIds.append(id) + + #================================================================= + # + # Dummy execute routine. Tool is cancelled in preProcessGrid + # and all the real action is accomplished in execWeights which + # is called when the user presses a button on the dialog + # + def execute(self,variableElement): + "Specified blend of any/all model/forecast fields" + return variableElement + #================================================================= + # + # execWeights - The main calculation routine called when a button + # is pressed in the dialog. Passes in the string + # name of the button pressed + # + def execWeights(self,button): + # + # If user presses cancel, do an immediate return and stop + # + if button=="Cancel": + return + + # + # Get the results from the dialog + # + #for num in range(len(Labels)): + # Weights[num]=ScaleIDs[num].get() + EdgeType=self.dlg.edgestyle + EdgeWidth=self.dlg.edgeWidth + # + # If user presses run or run/dismiss, first add up the + # weights (in the ScaleIDs variables) and check for + # common issues like all weights zero, only weights on + # current grid, or grids add up to zero. + # + totweight=0 + fcstweight=0 + someweights=0 + otherweights=0 + + dbIds = self.dbIds # alias + weights = self.dlg.weights + maxAbsWeight = max( max(weights), abs(min(weights)) ) + someweights = (maxAbsWeight > 0.5) + fcstweight = weights[0] + otherweights = sum(weights[1:]) + totweight = fcstweight + otherweights + + if not someweights: + self.statusBarMsg("ModelBlend has no weights","R") + return + if abs(fcstweight) > 0.5 and otherweights==0: + self.statusBarMsg("ModelBlend Weights add to no change","R") + return + if totweight==0: + self.statusBarMsg("Weights cannot add up to zero","A") + return + # + # Get stuff usually provided by tool code: + # fcst=mutable model database name + # selectTR=the selected timerange + # + fcst = self.mutableID().modelIdentifier() + selectTR = self._dbss.getParmOp().getSelectionTimeRange() + # + # get list of parms that are selected and mutable + # + # Making a derivation from AWIPS1's version of this script. + # Instead of calling direct to Java's ParmManager to get the Parm + # objects, we'll use SmartScript's selectedParms() to retrieve native + # Python objects which should save us Java heap space which wouldn't + # be freed otherwise until the user terminates the SmartTool + # + # allParms = self._dbss.getParmManager().getSelectedParms() + allParms = self.selectedParms() + parms = [] + for parm in allParms: + # model = parm.getParmID().getDbId().getModelId() + model = parm[2].modelIdentifier() + if model == fcst: + parms.append(parm) + + # + # loop over the mutable parms. + # get: wxType - type of parm + # WEname - short parm name string + # parmlevel - parm level string + # + for WEname, parmlevel, dbId in parms: + # Another AWIPS1 derivation: Use of different selectedParms() + # call forces us to retrieve Parm to retrieve some of these + # pieces of information + # + parm = self.getParm(dbId, WEname, parmlevel) + rateParm = parm.getGridInfo().isRateParm() + wxType = str(parm.getGridInfo().getGridType()) + del parm + + # + # Get list of grids for this parm within the selcted time range + # and loop over each of those grids + # + gridinfos=self.getGridInfo(fcst,WEname,parmlevel,selectTR) + for gridinfo in gridinfos: + GridTimeRange=gridinfo.gridTime() + # + # Easier when just a scalar + # + if 'SCALAR'==wxType: + # + # read each 'model' grid with a non-zero weight + # add up the weights again, because we cannot count + # weights for grids that cannot be read. + # + gsum=self.empty() + totweight=0 + fcstweight=0 + oldgrid=self.getGrids(self.dbIds[0],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) + if oldgrid==None: + self.statusBarMsg("ModelBlend tool could not get Fcst data for " + WEName,"A") + for num, label in enumerate(self.labels): + weight=weights[num] + if weight!=0: + modeType="TimeWtAverage" + if rateParm==1: + modeType="Sum" + #determine source - special if from ISC + idx = label.find("(") + idx1 = label.find(")",idx) + if idx == -1 or idx1 == -1: + WEnameSource = WEname + else: + ot = label[idx+1:idx1] + if ot == self.myOfficeType(): + WEnameSource = WEname + else: + WEnameSource = WEname + ot + grid=self.getGrids(self.dbIds[num],WEnameSource,"SFC",GridTimeRange,mode=modeType,noDataError=0,cache=0) + if grid != None: + gsum+=(grid*weight) + totweight+=weight + if (num==0): + fcstweight=weight + else: + errorstring="ModelBlend tool could not get data for %s" % label + self.statusBarMsg(errorstring,"A") + # + # Check again for no weights, or only weights for the current + # grid - in which case we make no changes and write info message + # otherwise - save the grid + # + if (totweight!=0): + if fcstweight==totweight: + self.statusBarMsg("ModelBlend makes no change","R") + else: + newgrid=gsum/totweight + finalgrid=self.inEditArea(newgrid,oldgrid,EdgeType,EdgeWidth) + self.createGrid(fcst,WEname,wxType,finalgrid,GridTimeRange) + else: + self.statusBarMsg("ModelBlend weights ended up Zero - so cancelled","A") + # + # A little more complicated when a vector + # + if 'VECTOR'==wxType: + # + # read each 'model' grid with a non-zero weight + # add up the weights again, because we cannot count + # weights for grids that cannot be read. + # + oldgrid=self.getGrids(dbIds[0],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) + if oldgrid==None: + self.statusBarMsg("ModelBlend tool could not get Fcst data for " + WEName,"A") + (mag,direc)=oldgrid + (uold,vold)=self.MagDirToUV(mag,direc) + + usum=self.empty() + vsum=self.empty() + + totweight=0 + fcstweight=0 + for num, weight in enumerate(weights): + if weight!=0: + grid=self.getGrids(self.dbIds[num],WEname,"SFC",GridTimeRange,noDataError=0,cache=0) + if grid != None: + (mag,direc)=grid + (u,v)=self.MagDirToUV(mag,direc) + usum+=(u*weight) + vsum+=(v*weight) + totweight+=weight + if (num==0): + fcstweight=weight + else: + errorstring="ModelBlend tool could not get data for %s" % self.labels[num] + self.statusBarMsg(errorstring,"A") + # + # Check again for no weights, or only weights for the current + # grid - in which case we make no changes and write info message + # otherwise - save the grid. + # + if (totweight!=0): + if fcstweight==totweight: + self.statusBarMsg("ModelBlend makes no change","R") + else: + unew=usum/totweight + vnew=vsum/totweight + ufinal=self.inEditArea(unew,uold,EdgeType,EdgeWidth) + vfinal=self.inEditArea(vnew,vold,EdgeType,EdgeWidth) + result=self.UVToMagDir(ufinal,vfinal) + self.createGrid(fcst,WEname,wxType,result,GridTimeRange) + #self.callSmartTool("DoNothing",WEname,None,GridTimeRange) + else: + self.statusBarMsg("ModelBlend weights ended up Zero - so cancelled","A") + + #===================================================================== + # inEditArea - Take an old grid and a new grid - and return the + # grid with the proper weighting between the two. + # + # This is where the EdgeType and EdgeWidth of the dialog + # box gets used. If there are no points in the current + # edit area - then we assume they want the entire domain. + # Otherwise we use the current edit area. If FLAT is + # used, then the new grid is returned in the edit area + # and the old grid is returned outside the edit area. + # If EDGE or TAPER are used - then we nudge areas inside + # the edit area toward the new grid - based on how close + # it is to the edge of the edit are. + # + # Returns the final grid that should be returned. + # + def inEditArea(self,new,old,EdgeType,EdgeWidth): + # + # Get the active editarea + # + editArea=self.getActiveEditArea() + # + # We don't have the benefit of the usual GFE question about what + # to do with empty edit areas. We assume they want to run it over + # the entire domain - but have to switch the edit area ourselves + # + editAreaMask=editArea.getGrid() + if not editAreaMask.isAnyBitsSet(): + editArea.invert() + # + # Make edgegrid 0-1 across edit area + # + if (EdgeType=="Flat"): + edgegrid=editArea.getGrid().getNDArray() + elif (EdgeType=="Edge"): + edgegrid=self.taperGrid(editArea,EdgeWidth) + else: + edgegrid=self.taperGrid(editArea,0) + # + # return the final grid + # + diff=new-old + final=old+(diff*edgegrid) + return(final) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MoveFeatureBySpeed.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MoveFeatureBySpeed.py index 97a901f5d0..8d904b9242 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MoveFeatureBySpeed.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/MoveFeatureBySpeed.py @@ -1,362 +1,362 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MoveFeatureBySpeed -# -# Author: Thomas R. Mazza -# adapted from EditAreaAdjust tool written by -# Les Colin -# Additional Contribution: Todd Lericos -# Last Updated: Tue 10 Jun 8 -# last Submitted to str: Tue 10 Jun 8 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import LogStream, time -ToolType = "numeric" -WeatherElementEdited = "variableElement" -from numpy import * -ScreenList = ["SCALAR","VECTOR"] -HideTool = 1 -import math -import AbsTime -from math import * - -####### CONFIGURATION SECTION ######################################################################### -# -# Add or delete models according to whether or not they are available at your office. -# - -#sourceList = ["NAM12", "GFS40", "RAP40"] -sourceList = ["NAM12", "GFS40", "RAP40", "wrfnmm", "wrfarw", "WSETA"] - -threeHour = ["NAM12"] -sixHour = ["GFS40"] -RUC = ["RAP13", "RAP40"] - -resolution = 2.5 - -# -####### END CONFIGURATION SECTION ##################################################################### - -sourceList.append(("Fcst")) -sourceList.append(("Observed (enter below)")) - - -import SmartScript -## For available commands, see SmartScript - -VariableList = [] - -VariableList.append(("Source:", "Observed (enter below)", "radio", sourceList)) -VariableList.append(("Wind Level if using model:","MB700","radio",["MB925","MB850","MB700","MB500", "MB925-850", "MB850-700", "MB925-700", "MB925-500", "MB850-500", "MB700-500"])) -VariableList.append(("Movement Speed (Kts):", "15", "numeric")) -VariableList.append(("Movement Direction:" , "90", "numeric")) -VariableList.append(("Backfill upstream edges with:", "Original data", "radio", ["Original data", "Data from very edge\n(Fcst or Model only)", "Zeros"])) - - -# Set up Class - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - - def execute(self, variableElement, variableElement_GridInfo, GridTimeRange, varDict): - "Extrapolate features foward or backward in time based on observed or model speed" - self.logToolUse("Smart Tool execution") - - modelList = ["NAM12", "GFS40", "RAP40", "RAP13", "wrfnmm", "wrfarw", "WSETA", "Fcst"] ##################################################### - - if varDict["Source:"] not in modelList and varDict["Source:"] != "Observed (enter below)": - modelList.append((varDict["Source:"])) - - Backfill = varDict["Backfill upstream edges with:"] - - xLimit = len(variableElement[0]) - yLimit = len(variableElement) - - ######################################################################################### - # - # Get the source for the speed of motion - - source = varDict["Source:"] - levels = [] - if source in modelList: - site = self.getSiteID() - - if source == "Fcst": - level = "SFC" - msource = source - wind = "Wind" - Wind=self.getGrids(msource,wind,level,GridTimeRange) - (speed, dir) = (Wind[0], Wind[1]) - else: - #------------------------------------------------------------------------ - # Modification made by CAR to handle gaps in Model grids - #-------------------------------------------------------------------------- - # This section expands the range of time to look for model data to use. - # Model data is only available at certain times (e.g. NAM12: 00,03,06..etc) - # Therefore, if the grid to be created is not at a time where model data - # exists...then this code expands the time range to grab the nearest model - # data. This range will be different for each model. Therefore...if models - # are added to the top this section must be changed. - # - # Edited: Todd Lericos - # Date: 3 Aug 2006 - # - present = AbsTime.current() - today = AbsTime.absTimeYMD(present.year, present.month, present.day) - startTimeModel = (GridTimeRange.startTime() - today) /3600 -# print "over at Move", startTimeModel - - if source in threeHour: - self.modelRange = self.createTimeRange(startTimeModel-1, startTimeModel+2, "Zulu") - elif source in sixHour: - self.modelRange = self.createTimeRange(startTimeModel-2, startTimeModel+4, "Zulu") - else: - self.modelRange = GridTimeRange - - level = varDict["Wind Level if using model:"] - msource=site+"_D2D_"+source - wind = "wind" - if level in ["MB925","MB850","MB700","MB500"]: - Wind=self.getGrids(msource,wind,level,self.modelRange) - (speed, dir) = (Wind[0], Wind[1]) - else: - if level == "MB925-850": - levels = self.buildLevels(950, 850, source) - elif level == "MB850-700": - levels = self.buildLevels(950, 700, source) - elif level == "MB925-700": - levels = self.buildLevels(925, 700, source) - elif level == "MB925-500": - levels = self.buildLevels(925, 500, source) - elif level == "MB850-500": - levels = self.buildLevels(850, 500, source) - elif level == "MB700-500": - levels = self.buildLevels(700, 500, source) - i = 0 - j = 0 - for k in xrange(len(levels)): - Wind=self.getGrids(msource,wind,levels[k],GridTimeRange) - (u,v)=self.MagDirToUV(Wind[0], Wind[1]) - i += u - j += v - u = i / len(levels) - v = j / len(levels) - - # - # convert speed/dir arrays to speed/dir arrays - # - (speed, dir) = self.UVToMagDir(u,v) - # - # convert from m/s to kts: - # - if wind == "wind": ## have model data - need to convert from m/s to kts. - speed *= 1.94384449244 - - else: - - speed = varDict["Movement Speed (Kts):"] - dir = varDict["Movement Direction:"] - - - ######################################################################################## - # - # OK, we have the two components of motion, in kts. Now convert to kph and compute - # movement over the grid. - - if source == "Observed (enter below)": - - speed *= 1.852 / resolution ### 1 kt = 1.852 kph. - dir = abs(abs(360 - dir) + 90) - theta = dir % 360 - rads = pi * theta / 180 - - x = int(round(speed * cos(rads))) - y = -int(round(speed * sin(rads))) - - if type(variableElement_GridInfo) is not str: - - newT = zeros(shape(variableElement),dtype=float64) - 80.0 # default value for T - if Backfill == "Zeros": - changedMask = zeros(shape(variableElement),dtype=float64) # default value for T - if x > 0 and y > 0: - newT[y:, x:] = variableElement[:-y, :-x] - if Backfill == "Zeros": - changedMask[y:, x:] = 1 - elif x > 0 and y < 0: - newT[:y, x:] = variableElement[-y:, :-x] - if Backfill == "Zeros": - changedMask[:y, x:] = 1 - elif x < 0 and y > 0: - newT[y:, :x] = variableElement[:-y, -x:] - if Backfill == "Zeros": - changedMask[y:, :x] = 1 - elif x < 0 and y < 0: - newT[:y, :x] = variableElement[-y:, -x:] - if Backfill == "Zeros": - changedMask[:y, :x] = 1 - elif x == 0 and y > 0: - newT[y:, x:] = variableElement[:-y, :] - if Backfill == "Zeros": - changedMask[y:, x:] = 1 - elif x == 0 and y < 0: - newT[:y, x:] = variableElement[-y:, :] - if Backfill == "Zeros": - changedMask[:y, x:] = 1 - elif x > 0 and y == 0: - newT[y:, x:] = variableElement[:, :-x] - if Backfill == "Zeros": - changedMask[y:, x:] = 1 - elif x < 0 and y == 0: - newT[y:, :x] = variableElement[:, -x:] - if Backfill == "Zeros": - changedMask[y:, :x] = 1 - - else: - - newT0 = zeros(shape(variableElement[0]),dtype=int32) - 80.0 - newT1 = zeros(shape(variableElement[1]),dtype=int32) - 80.0 - oldT0,oldT1 = variableElement - if Backfill == "Zeros": - changedMask = zeros(shape(variableElement),dtype=float64) # default value for T - - if x > 0 and y > 0: - newT0[y:, x:] = oldT0[:-y, :-x] - newT1[y:, x:] = oldT1[:-y, :-x] - elif x > 0 and y < 0: - newT0[:y, x:] = oldT0[-y:, :-x] - newT1[:y, x:] = oldT1[-y:, :-x] - elif x < 0 and y > 0: - newT0[y:, :x] = oldT0[:-y, -x:] - newT1[y:, :x] = oldT1[:-y, -x:] - elif x < 0 and y < 0: - newT0[:y, :x] = oldT0[-y:, -x:] - newT1[:y, :x] = oldT1[-y:, -x:] - elif x == 0 and y > 0: - newT0[y:, x:] = oldT0[:-y, :] - newT1[y:, x:] = oldT1[:-y, :] - elif x == 0 and y < 0: - newT0[:y, x:] = oldT0[-y:, :] - newT1[:y, x:] = oldT1[-y:, :] - elif x > 0 and y == 0: - newT0[y:, x:] = oldT0[:, :-x] - newT1[y:, x:] = oldT1[:, :-x] - elif x < 0 and y == 0: - newT0[y:, :x] = oldT0[:, -x:] - newT1[y:, :x] = oldT1[:, -x:] - - if Backfill == "Zeros": - changedMask[y:, x:] = 1 - - newT = newT0,newT1 - - else: # source is a model - speed1 = speed * 1.852 / resolution ### 1 kt = 1.852 kph. -# print "in move feature tool, missing hours ", missingHours - theta = dir % 360 - - (u,v)=self.MagDirToUV(speed1,theta) - u /= resolution - v /= resolution - if varDict["Movement Direction:"] < 0: - u *= -1 - v *= -1 - newVariableElement = zeros(shape(variableElement),dtype=int16) - if Backfill != "Original data": - changedMask = zeros(shape(variableElement),dtype=float64) + 1 - - for x in xrange(len(variableElement[0])): - - for y in xrange(len(variableElement)): - i = u[y,x] - j = v[y,x] - a = x - i - b = y - j - a = int(clip(a,0,xLimit - 1)) - b = int(clip(b,0,yLimit - 1)) - - newVariableElement[y,x] = variableElement[b,a] - - if Backfill == "Data from very edge\n(Fcst or Model only)": - - if u[y,x] > x : - newVariableElement[y,x] = newVariableElement[y,0] - if u[y,x] < 0 and u[y,x] > xLimit - x: - newVariableElement[y,x] = newVariableElement[y,xLimit] - if v[y,x] > y: - newVariableElement[y,x] = newVariableElement[0,x] - if v[y,x] < 0 and v[y,x] > yLimit - y: - newVariableElement[y,x] = newVariableElement[yLimit,x] - - elif Backfill == "Zeros": - - if u[y,x] > x : - changedMask[y, x] = 0 - if u[y,x] < 0 and abs(u[y,x]) > xLimit - x: - changedMask[y, x] = 0 - if v[y,x] > y: - changedMask[y, x] = 0 - if v[y,x] < 0 and abs(v[y,x]) > yLimit - y: - changedMask[y, x] = 0 - - newT = newVariableElement - - newT = where(less(newT, -30), variableElement, newT) - if Backfill == "Zeros": - newT[(changedMask <= 0)] = 0 - # Return the new value - - return newT.astype(variableElement.dtype) - - def buildLevels(self, base, top, model): - plevels = [] - if model in RUC: - if base == 925: - base = 950 - plevel = base - while plevel >= top: - plevels.append(("MB" + str(plevel))) - plevel -= 50 - else: - plevel = base - while plevel >= top: - plevels.append(("MB" + str(plevel))) - plevel -= 25 - return plevels - - - - def logToolUse(self,string): - gtime=time.gmtime() - ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(gtime[0],gtime[1],gtime[2], - gtime[3],gtime[4],gtime[5]) - LogStream.logEvent("%s| %s" % (ts,string)) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MoveFeatureBySpeed +# +# Author: Thomas R. Mazza +# adapted from EditAreaAdjust tool written by +# Les Colin +# Additional Contribution: Todd Lericos +# Last Updated: Tue 10 Jun 8 +# last Submitted to str: Tue 10 Jun 8 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import LogStream, time +ToolType = "numeric" +WeatherElementEdited = "variableElement" +from numpy import * +ScreenList = ["SCALAR","VECTOR"] +HideTool = 1 +import math +import AbsTime +from math import * + +####### CONFIGURATION SECTION ######################################################################### +# +# Add or delete models according to whether or not they are available at your office. +# + +#sourceList = ["NAM12", "GFS40", "RAP40"] +sourceList = ["NAM12", "GFS40", "RAP40", "wrfnmm", "wrfarw", "WSETA"] + +threeHour = ["NAM12"] +sixHour = ["GFS40"] +RUC = ["RAP13", "RAP40"] + +resolution = 2.5 + +# +####### END CONFIGURATION SECTION ##################################################################### + +sourceList.append(("Fcst")) +sourceList.append(("Observed (enter below)")) + + +import SmartScript +## For available commands, see SmartScript + +VariableList = [] + +VariableList.append(("Source:", "Observed (enter below)", "radio", sourceList)) +VariableList.append(("Wind Level if using model:","MB700","radio",["MB925","MB850","MB700","MB500", "MB925-850", "MB850-700", "MB925-700", "MB925-500", "MB850-500", "MB700-500"])) +VariableList.append(("Movement Speed (Kts):", "15", "numeric")) +VariableList.append(("Movement Direction:" , "90", "numeric")) +VariableList.append(("Backfill upstream edges with:", "Original data", "radio", ["Original data", "Data from very edge\n(Fcst or Model only)", "Zeros"])) + + +# Set up Class + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + + def execute(self, variableElement, variableElement_GridInfo, GridTimeRange, varDict): + "Extrapolate features foward or backward in time based on observed or model speed" + self.logToolUse("Smart Tool execution") + + modelList = ["NAM12", "GFS40", "RAP40", "RAP13", "wrfnmm", "wrfarw", "WSETA", "Fcst"] ##################################################### + + if varDict["Source:"] not in modelList and varDict["Source:"] != "Observed (enter below)": + modelList.append((varDict["Source:"])) + + Backfill = varDict["Backfill upstream edges with:"] + + xLimit = len(variableElement[0]) + yLimit = len(variableElement) + + ######################################################################################### + # + # Get the source for the speed of motion + + source = varDict["Source:"] + levels = [] + if source in modelList: + site = self.getSiteID() + + if source == "Fcst": + level = "SFC" + msource = source + wind = "Wind" + Wind=self.getGrids(msource,wind,level,GridTimeRange) + (speed, dir) = (Wind[0], Wind[1]) + else: + #------------------------------------------------------------------------ + # Modification made by CAR to handle gaps in Model grids + #-------------------------------------------------------------------------- + # This section expands the range of time to look for model data to use. + # Model data is only available at certain times (e.g. NAM12: 00,03,06..etc) + # Therefore, if the grid to be created is not at a time where model data + # exists...then this code expands the time range to grab the nearest model + # data. This range will be different for each model. Therefore...if models + # are added to the top this section must be changed. + # + # Edited: Todd Lericos + # Date: 3 Aug 2006 + # + present = AbsTime.current() + today = AbsTime.absTimeYMD(present.year, present.month, present.day) + startTimeModel = (GridTimeRange.startTime() - today) /3600 +# print "over at Move", startTimeModel + + if source in threeHour: + self.modelRange = self.createTimeRange(startTimeModel-1, startTimeModel+2, "Zulu") + elif source in sixHour: + self.modelRange = self.createTimeRange(startTimeModel-2, startTimeModel+4, "Zulu") + else: + self.modelRange = GridTimeRange + + level = varDict["Wind Level if using model:"] + msource=site+"_D2D_"+source + wind = "wind" + if level in ["MB925","MB850","MB700","MB500"]: + Wind=self.getGrids(msource,wind,level,self.modelRange) + (speed, dir) = (Wind[0], Wind[1]) + else: + if level == "MB925-850": + levels = self.buildLevels(950, 850, source) + elif level == "MB850-700": + levels = self.buildLevels(950, 700, source) + elif level == "MB925-700": + levels = self.buildLevels(925, 700, source) + elif level == "MB925-500": + levels = self.buildLevels(925, 500, source) + elif level == "MB850-500": + levels = self.buildLevels(850, 500, source) + elif level == "MB700-500": + levels = self.buildLevels(700, 500, source) + i = 0 + j = 0 + for k in range(len(levels)): + Wind=self.getGrids(msource,wind,levels[k],GridTimeRange) + (u,v)=self.MagDirToUV(Wind[0], Wind[1]) + i += u + j += v + u = i / len(levels) + v = j / len(levels) + + # + # convert speed/dir arrays to speed/dir arrays + # + (speed, dir) = self.UVToMagDir(u,v) + # + # convert from m/s to kts: + # + if wind == "wind": ## have model data - need to convert from m/s to kts. + speed *= 1.94384449244 + + else: + + speed = varDict["Movement Speed (Kts):"] + dir = varDict["Movement Direction:"] + + + ######################################################################################## + # + # OK, we have the two components of motion, in kts. Now convert to kph and compute + # movement over the grid. + + if source == "Observed (enter below)": + + speed *= 1.852 / resolution ### 1 kt = 1.852 kph. + dir = abs(abs(360 - dir) + 90) + theta = dir % 360 + rads = pi * theta / 180 + + x = int(round(speed * cos(rads))) + y = -int(round(speed * sin(rads))) + + if type(variableElement_GridInfo) is not str: + + newT = zeros(shape(variableElement),dtype=float64) - 80.0 # default value for T + if Backfill == "Zeros": + changedMask = zeros(shape(variableElement),dtype=float64) # default value for T + if x > 0 and y > 0: + newT[y:, x:] = variableElement[:-y, :-x] + if Backfill == "Zeros": + changedMask[y:, x:] = 1 + elif x > 0 and y < 0: + newT[:y, x:] = variableElement[-y:, :-x] + if Backfill == "Zeros": + changedMask[:y, x:] = 1 + elif x < 0 and y > 0: + newT[y:, :x] = variableElement[:-y, -x:] + if Backfill == "Zeros": + changedMask[y:, :x] = 1 + elif x < 0 and y < 0: + newT[:y, :x] = variableElement[-y:, -x:] + if Backfill == "Zeros": + changedMask[:y, :x] = 1 + elif x == 0 and y > 0: + newT[y:, x:] = variableElement[:-y, :] + if Backfill == "Zeros": + changedMask[y:, x:] = 1 + elif x == 0 and y < 0: + newT[:y, x:] = variableElement[-y:, :] + if Backfill == "Zeros": + changedMask[:y, x:] = 1 + elif x > 0 and y == 0: + newT[y:, x:] = variableElement[:, :-x] + if Backfill == "Zeros": + changedMask[y:, x:] = 1 + elif x < 0 and y == 0: + newT[y:, :x] = variableElement[:, -x:] + if Backfill == "Zeros": + changedMask[y:, :x] = 1 + + else: + + newT0 = zeros(shape(variableElement[0]),dtype=int32) - 80.0 + newT1 = zeros(shape(variableElement[1]),dtype=int32) - 80.0 + oldT0,oldT1 = variableElement + if Backfill == "Zeros": + changedMask = zeros(shape(variableElement),dtype=float64) # default value for T + + if x > 0 and y > 0: + newT0[y:, x:] = oldT0[:-y, :-x] + newT1[y:, x:] = oldT1[:-y, :-x] + elif x > 0 and y < 0: + newT0[:y, x:] = oldT0[-y:, :-x] + newT1[:y, x:] = oldT1[-y:, :-x] + elif x < 0 and y > 0: + newT0[y:, :x] = oldT0[:-y, -x:] + newT1[y:, :x] = oldT1[:-y, -x:] + elif x < 0 and y < 0: + newT0[:y, :x] = oldT0[-y:, -x:] + newT1[:y, :x] = oldT1[-y:, -x:] + elif x == 0 and y > 0: + newT0[y:, x:] = oldT0[:-y, :] + newT1[y:, x:] = oldT1[:-y, :] + elif x == 0 and y < 0: + newT0[:y, x:] = oldT0[-y:, :] + newT1[:y, x:] = oldT1[-y:, :] + elif x > 0 and y == 0: + newT0[y:, x:] = oldT0[:, :-x] + newT1[y:, x:] = oldT1[:, :-x] + elif x < 0 and y == 0: + newT0[y:, :x] = oldT0[:, -x:] + newT1[y:, :x] = oldT1[:, -x:] + + if Backfill == "Zeros": + changedMask[y:, x:] = 1 + + newT = newT0,newT1 + + else: # source is a model + speed1 = speed * 1.852 / resolution ### 1 kt = 1.852 kph. +# print "in move feature tool, missing hours ", missingHours + theta = dir % 360 + + (u,v)=self.MagDirToUV(speed1,theta) + u /= resolution + v /= resolution + if varDict["Movement Direction:"] < 0: + u *= -1 + v *= -1 + newVariableElement = zeros(shape(variableElement),dtype=int16) + if Backfill != "Original data": + changedMask = zeros(shape(variableElement),dtype=float64) + 1 + + for x in range(len(variableElement[0])): + + for y in range(len(variableElement)): + i = u[y,x] + j = v[y,x] + a = x - i + b = y - j + a = int(clip(a,0,xLimit - 1)) + b = int(clip(b,0,yLimit - 1)) + + newVariableElement[y,x] = variableElement[b,a] + + if Backfill == "Data from very edge\n(Fcst or Model only)": + + if u[y,x] > x : + newVariableElement[y,x] = newVariableElement[y,0] + if u[y,x] < 0 and u[y,x] > xLimit - x: + newVariableElement[y,x] = newVariableElement[y,xLimit] + if v[y,x] > y: + newVariableElement[y,x] = newVariableElement[0,x] + if v[y,x] < 0 and v[y,x] > yLimit - y: + newVariableElement[y,x] = newVariableElement[yLimit,x] + + elif Backfill == "Zeros": + + if u[y,x] > x : + changedMask[y, x] = 0 + if u[y,x] < 0 and abs(u[y,x]) > xLimit - x: + changedMask[y, x] = 0 + if v[y,x] > y: + changedMask[y, x] = 0 + if v[y,x] < 0 and abs(v[y,x]) > yLimit - y: + changedMask[y, x] = 0 + + newT = newVariableElement + + newT = where(less(newT, -30), variableElement, newT) + if Backfill == "Zeros": + newT[(changedMask <= 0)] = 0 + # Return the new value + + return newT.astype(variableElement.dtype) + + def buildLevels(self, base, top, model): + plevels = [] + if model in RUC: + if base == 925: + base = 950 + plevel = base + while plevel >= top: + plevels.append(("MB" + str(plevel))) + plevel -= 50 + else: + plevel = base + while plevel >= top: + plevels.append(("MB" + str(plevel))) + plevel -= 25 + return plevels + + + + def logToolUse(self,string): + gtime=time.gmtime() + ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(gtime[0],gtime[1],gtime[2], + gtime[3],gtime[4],gtime[5]) + LogStream.logEvent("%s| %s" % (ts,string)) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/PERCENTGREEN.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/PERCENTGREEN.py index 685ab740f3..171a7bebba 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/PERCENTGREEN.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/PERCENTGREEN.py @@ -1,132 +1,132 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PERCENTGREEN.py -# -# Author: dtomalak -# ---------------------------------------------------------------------------- - - -ToolType = "numeric" -WeatherElementEdited = "PERCENTGREEN" -from numpy import * -HideTool = 0 - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -#ScreenList = ["T","Td"] -#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] - -# If desired, Set up variables to be solicited from the user: -# VariableList = [ -# ("Variable name1" , defaultValue1, "numeric"), -# ("Variable name2" , "default value2", "alphaNumeric"), -# ("Variable name3" , ["default value1", "default value2"], "check", -# ["value1", "value2", "value3"]), -# ("Variable name4" , "default value4", "radio", -# ["value1", "value2", "value3"]), -# ("Variable name5" , defaultValue, "scale", -# [minValue, maxValue], resolution), -# ("Variable name6" , "", "model"), -# ("Variable name7" , "", "D2D_model"), -# ("Label contents" , "", "label"), -# ("", dialogHeight, "scrollbar"), -# ] - -# Set up Class -import SmartScript -import types, string, imp, cPickle, time, sys -from math import * -import re -import Exceptions -import UnitConvertor -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Required Method: Execute - # %comment - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, PERCENTGREEN, varDict): - "THIS TOOL WILL POPULATE A % GREEN GRID FOR THE RANGELAND FIRE DANGER INDEX" - ####CONFIGURABLE SECTION - ###PERCENT GREEN DATA IS NEEDS TO BE IN FIPS CODE VALUE FORMAT!!! - #STATE DICTIONARY - #DICTIONARY OF EACH DESIRED STATE AND THE FILENAME OF % GREEN FILE - self._statesdict = {"NE" : "ne.green.txt", - "IA" : "ia.green.txt", - } - - #DATA DIRECTORY - name of directory where data is stored - #ex "/home/local/testdat (leave off last /) - datadir = "/data/local/PercentGreen/" - - #SET VARIABLES TO "NONE" - ####END CONFIGURATIONS!!!!!!!!!!!!!! - ############################################################ - ############################################################ - ############## MAKE NO CHANGES ########################### - ############################################################ - # - #COLLECT FIPS AREAS IN DATABASE - alleditareas = self.editAreaList() - FIPSonly = [] - statekeys = self._statesdict.keys() - for area in alleditareas: - #TEST FOR FIPS CODES - if len(area) != 6: - continue - else: - test = area[0:2] - test2 = area[2:] - if test in statekeys: - #do something - if string.find(test2, "C") != -1: - #AREA HAS PASSED ALL TESTS>>>IS LIKELY A FIPS CODE - FIPSonly.append(area) - continue - else: - continue - else: - continue - #FOREACH STATE GRAB THE DATA AND PUT IT IN STRING FORMAT - #WILL RETURN ONE LIST FOR ALL STATES - datadict = {} - for state in statekeys: - stfile = self._statesdict[state] - try: - getdat = open(datadir + "/" + stfile, "r") - data = getdat.readlines() - getdat.close() - for line in data: - line = string.strip(line) #CLEAN OUT EXTRA SPACES if there is any - val = string.split(line, " ") - if len(val) > 2: - #PREVENT NON DATA POINTS FROM GETTTING INTO DATA DICT - continue - if val[0] in FIPSonly: - datadict[str(val[0])] = str(val[1]) - else: - continue - except: - continue - #DATA NOW IN DICTIONARY FORM...STEP THROUGH EACH KEY AND ASSIGN A DATA VALUE - #USING WHERE STATEMENTS - newgreen = zeros(PERCENTGREEN.shape, int32) - - for zone in datadict.keys(): - area = zone - value = int(datadict[zone]) - areamask = self.encodeEditArea(area) - newgreen[not_equal(areamask,0)] = value - - PERCENTGREEN = newgreen - return PERCENTGREEN +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PERCENTGREEN.py +# +# Author: dtomalak +# ---------------------------------------------------------------------------- + + +ToolType = "numeric" +WeatherElementEdited = "PERCENTGREEN" +from numpy import * +HideTool = 0 + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +#ScreenList = ["T","Td"] +#ScreenList = ["SCALAR","VECTOR","WEATHER","DISCRETE"] + +# If desired, Set up variables to be solicited from the user: +# VariableList = [ +# ("Variable name1" , defaultValue1, "numeric"), +# ("Variable name2" , "default value2", "alphaNumeric"), +# ("Variable name3" , ["default value1", "default value2"], "check", +# ["value1", "value2", "value3"]), +# ("Variable name4" , "default value4", "radio", +# ["value1", "value2", "value3"]), +# ("Variable name5" , defaultValue, "scale", +# [minValue, maxValue], resolution), +# ("Variable name6" , "", "model"), +# ("Variable name7" , "", "D2D_model"), +# ("Label contents" , "", "label"), +# ("", dialogHeight, "scrollbar"), +# ] + +# Set up Class +import SmartScript +import types, string, imp, pickle, time, sys +from math import * +import re +import Exceptions +import UnitConvertor +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Required Method: Execute + # %comment + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, PERCENTGREEN, varDict): + "THIS TOOL WILL POPULATE A % GREEN GRID FOR THE RANGELAND FIRE DANGER INDEX" + ####CONFIGURABLE SECTION + ###PERCENT GREEN DATA IS NEEDS TO BE IN FIPS CODE VALUE FORMAT!!! + #STATE DICTIONARY + #DICTIONARY OF EACH DESIRED STATE AND THE FILENAME OF % GREEN FILE + self._statesdict = {"NE" : "ne.green.txt", + "IA" : "ia.green.txt", + } + + #DATA DIRECTORY - name of directory where data is stored + #ex "/home/local/testdat (leave off last /) + datadir = "/data/local/PercentGreen/" + + #SET VARIABLES TO "NONE" + ####END CONFIGURATIONS!!!!!!!!!!!!!! + ############################################################ + ############################################################ + ############## MAKE NO CHANGES ########################### + ############################################################ + # + #COLLECT FIPS AREAS IN DATABASE + alleditareas = self.editAreaList() + FIPSonly = [] + statekeys = list(self._statesdict.keys()) + for area in alleditareas: + #TEST FOR FIPS CODES + if len(area) != 6: + continue + else: + test = area[0:2] + test2 = area[2:] + if test in statekeys: + #do something + if string.find(test2, "C") != -1: + #AREA HAS PASSED ALL TESTS>>>IS LIKELY A FIPS CODE + FIPSonly.append(area) + continue + else: + continue + else: + continue + #FOREACH STATE GRAB THE DATA AND PUT IT IN STRING FORMAT + #WILL RETURN ONE LIST FOR ALL STATES + datadict = {} + for state in statekeys: + stfile = self._statesdict[state] + try: + getdat = open(datadir + "/" + stfile, "r") + data = getdat.readlines() + getdat.close() + for line in data: + line = string.strip(line) #CLEAN OUT EXTRA SPACES if there is any + val = string.split(line, " ") + if len(val) > 2: + #PREVENT NON DATA POINTS FROM GETTTING INTO DATA DICT + continue + if val[0] in FIPSonly: + datadict[str(val[0])] = str(val[1]) + else: + continue + except: + continue + #DATA NOW IN DICTIONARY FORM...STEP THROUGH EACH KEY AND ASSIGN A DATA VALUE + #USING WHERE STATEMENTS + newgreen = zeros(PERCENTGREEN.shape, int32) + + for zone in list(datadict.keys()): + area = zone + value = int(datadict[zone]) + areamask = self.encodeEditArea(area) + newgreen[not_equal(areamask,0)] = value + + PERCENTGREEN = newgreen + return PERCENTGREEN \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Populate_SkyTool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Populate_SkyTool.py index 475a02d812..ce18e7a8e7 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Populate_SkyTool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Populate_SkyTool.py @@ -1,259 +1,259 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Populate_SkyTool -- Version 1.0 -# -# Author: Pete Banacos, WFO BTV (Started: 9/20/06) -# Last update: 1/23/07 -# -# DR 18542 bwhundermark Fixed RH calculation w.r.t ice 06/01/16 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "Sky" - -from numpy import * - -### Solicite variables from the forecaster: -VariableList = [ - ("Populate SkyTool Version 1.0","","label"), - ("Model:", "NAM12", "radio", ["GFS40", "NAM12"]), - ("Model Run:", "Current", "radio", ["Current", "Previous"]), - ("Layer depth:", "50mb", "radio", ["50mb", "25mb"]), - ("Use RH w.r.t. ICE @ T < -25C?", "No", "radio", ["Yes", "No"]), - ("", "", "label"), - ("Include high clouds (500-300mb)?", "No", "radio", ["Yes", "No"]), - ("Include clouds below 925mb?", "Yes", "radio", ["Yes", "No"]), - ("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0), - ("Above value sets RH threshold for CLR skies.", "", "label"), - ("Calibration:", 1.00, "scale", [1.00, 1.50],0.02), - ("Raise calibration to get more sky cover for a given RH.", "", "label"), - (" --- Limit Values Section --- ", "", "label"), - ("Don't give me sky cover above (percent):", 100, "scale", [0, 100], 1), - ("Don't give me sky cover below (percent):", 0, "scale", [0, 100], 1), - ] - -import SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, Sky, GridTimeRange, varDict): - "Determine Sky Cover based on exponential function where layer RH is the dependent variable." - - SITE = self.getSiteID() - - layer_depth = varDict["Layer depth:"] - lowPBL = varDict["Include clouds below 925mb?"] - hiCLDS = varDict["Include high clouds (500-300mb)?"] - UseRHi = varDict["Use RH w.r.t. ICE @ T < -25C?"] - limit_MAX = varDict["Don't give me sky cover above (percent):"] - limit_MIN = varDict["Don't give me sky cover below (percent):"] - model1 = varDict["Model:"] - modelrun = varDict["Model Run:"] - modeltemp = "D2D_" + model1 - - if modelrun == "Current": - model = self.findDatabase(modeltemp, 0) - else: - model = self.findDatabase(modeltemp, -1) - -# Grab RH values from the numerical model - - print 'GridTimeRange = ', GridTimeRange - RHPBL030 = self.getGrids(model, "rh", "BL030", GridTimeRange) - RHPBL3060 = self.getGrids(model, "rh", "BL3060", GridTimeRange) - RHPBL6090 = self.getGrids(model, "rh", "BL6090", GridTimeRange) - RHPBL90120 = self.getGrids(model, "rh", "BL90120", GridTimeRange) - -# TESTING SECTION - - lvl = ["BL030", "BL3060", "BL6090", "BL90120", - "MB925","MB900","MB875","MB850", - "MB825","MB800","MB775","MB750", - "MB725","MB700","MB675","MB650", - "MB625","MB600","MB575","MB550", - "MB525","MB500","MB450","MB400", - "MB350","MB300"] - -# Populate Temperature list with default of freezing - T_lvl = [273,273,273,273,273,273,273,273,273, - 273,273,273,273,273,273,273,273,273, - 273,273,273,273,273,273,273,273] -# Populate RH list with default of 50% - RH_lvl = [50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50] - e_lvl = [50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50] - es_lvl = [50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50] - esi_lvl = [50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50] - RHi_lvl = [50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50,50, - 50,50,50,50,50,50,50,50] - - for x in range(len(lvl)): - T_lvl[x] = self.getGrids(model, "t", lvl[x], GridTimeRange) - RH_lvl[x] = self.getGrids(model, "rh", lvl[x], GridTimeRange) - es_lvl[x] = 6.11 * exp(5412.*((1./273.)-(1./T_lvl[x]))) - e_lvl[x] = (RH_lvl[x] * es_lvl[x]) / 100. -# compute RH with respect to ice, using latent heat of sublimation: - esi_lvl[x] = 6.11 * exp(6133.*((1./273.)-(1./T_lvl[x]))) - RHi_lvl[x] = (e_lvl[x] / esi_lvl[x]) * 100. - -# If grid pt. temperature is less than -25C, use RH over ice... - if UseRHi == "Yes": - mask = T_lvl[x] < 248 - RH_lvl[x][mask] = RHi_lvl[x][mask] - -# Populate 30mb near-sfc AGL layers: - - RHPBL030 = RH_lvl[0] - RHPBL3060 = RH_lvl[1] - RHPBL6090 = RH_lvl[2] - RHPBL90120 = RH_lvl[3] - -# compute layer-averaged RH values (50mb) - - if layer_depth == "50mb": - RHavg925_875 = ((RH_lvl[4]+RH_lvl[5]+RH_lvl[6])/3) - RHavg900_850 = ((RH_lvl[5]+RH_lvl[6]+RH_lvl[7])/3) - RHavg875_825 = ((RH_lvl[6]+RH_lvl[7]+RH_lvl[8])/3) - RHavg850_800 = ((RH_lvl[7]+RH_lvl[8]+RH_lvl[9])/3) - RHavg825_775 = ((RH_lvl[8]+RH_lvl[9]+RH_lvl[10])/3) - RHavg800_750 = ((RH_lvl[9]+RH_lvl[10]+RH_lvl[11])/3) - RHavg775_725 = ((RH_lvl[10]+RH_lvl[11]+RH_lvl[12])/3) - RHavg750_700 = ((RH_lvl[11]+RH_lvl[12]+RH_lvl[13])/3) - RHavg725_675 = ((RH_lvl[12]+RH_lvl[13]+RH_lvl[14])/3) - RHavg700_650 = ((RH_lvl[13]+RH_lvl[14]+RH_lvl[15])/3) - RHavg675_625 = ((RH_lvl[14]+RH_lvl[15]+RH_lvl[16])/3) - RHavg650_600 = ((RH_lvl[15]+RH_lvl[16]+RH_lvl[17])/3) - RHavg625_575 = ((RH_lvl[16]+RH_lvl[17]+RH_lvl[18])/3) - RHavg600_550 = ((RH_lvl[17]+RH_lvl[18]+RH_lvl[19])/3) - RHavg575_525 = ((RH_lvl[18]+RH_lvl[19]+RH_lvl[20])/3) - RHavg550_500 = ((RH_lvl[19]+RH_lvl[20]+RH_lvl[21])/3) - else: -# depth is in 25mb layers - RHavg925_900 = ((RH_lvl[4]+RH_lvl[5])/2) - RHavg900_875 = ((RH_lvl[5]+RH_lvl[6])/2) - RHavg875_850 = ((RH_lvl[6]+RH_lvl[7])/2) - RHavg850_825 = ((RH_lvl[7]+RH_lvl[8])/2) - RHavg825_800 = ((RH_lvl[8]+RH_lvl[9])/2) - RHavg800_775 = ((RH_lvl[9]+RH_lvl[10])/2) - RHavg775_750 = ((RH_lvl[10]+RH_lvl[11])/2) - RHavg750_725 = ((RH_lvl[11]+RH_lvl[12])/2) - RHavg725_700 = ((RH_lvl[12]+RH_lvl[13])/2) - RHavg700_675 = ((RH_lvl[13]+RH_lvl[14])/2) - RHavg675_650 = ((RH_lvl[14]+RH_lvl[15])/2) - RHavg650_625 = ((RH_lvl[15]+RH_lvl[16])/2) - RHavg625_600 = ((RH_lvl[16]+RH_lvl[17])/2) - RHavg600_575 = ((RH_lvl[17]+RH_lvl[18])/2) - RHavg575_550 = ((RH_lvl[18]+RH_lvl[19])/2) - RHavg550_525 = ((RH_lvl[19]+RH_lvl[20])/2) - RHavg525_500 = ((RH_lvl[20]+RH_lvl[21])/2) - -# Layer depth above 500mb is always 50mb... - RHavg500_450 = ((RH_lvl[21]+RH_lvl[22])/2) - RHavg450_400 = ((RH_lvl[22]+RH_lvl[23])/2) - RHavg400_350 = ((RH_lvl[23]+RH_lvl[24])/2) - RHavg350_300 = ((RH_lvl[24]+RH_lvl[25])/2) - -# Generate List of layers to check. - - my_PBLlist = [RHPBL030, RHPBL3060, RHPBL6090, RHPBL90120] - my_upr_trop_list = [RHavg500_450,RHavg450_400, RHavg400_350, - RHavg350_300] - - if layer_depth == "50mb": - my_list = [RHavg925_875, RHavg900_850, RHavg875_825, - RHavg850_800, RHavg825_775, RHavg800_750, RHavg775_725, - RHavg750_700, RHavg725_675, RHavg700_650, RHavg675_625, - RHavg650_600, RHavg625_575, RHavg600_550, RHavg575_525, - RHavg550_500] - - else: - my_list25 = [RHavg925_900, RHavg900_875, RHavg875_850, RHavg850_825, - RHavg825_800, RHavg800_775, RHavg775_750, RHavg750_725, - RHavg725_700, RHavg700_675, RHavg675_650, RHavg650_625, - RHavg625_600, RHavg600_575, RHavg575_550, RHavg550_525, - RHavg525_500] - - -# Put lowest RH layer being used into place holder... - if lowPBL == "Yes": - holder = RHPBL030 - elif layer_depth == "50mb": - holder = RHavg925_875 - else: - holder = RHavg925_900 - -# check Ground Relative layers first at low-levels, as selected by user. - if lowPBL == "Yes": - for layerRH in my_PBLlist: - holder = where(greater(layerRH, holder), layerRH, holder) - -# Check Layers incrementally - if layer_depth == "50mb": - for layerRH in my_list: - holder= where(greater(layerRH, holder), layerRH, holder) - else: - for layerRH in my_list25: - holder= where(greater(layerRH, holder), layerRH, holder) - -# If user wants high clouds (above 500mb), continue layer checking... - if hiCLDS == "Yes": - for layerRH in my_upr_trop_list: - holder = where(greater(layerRH, holder), layerRH, holder) - -# Compute Cloud Amount - Calib = varDict["Calibration:"] - clr_threshold = varDict["5% Sky Cover threshold at RH percentage:"] -# cloudamt = 5. * (exp(.106*(holder-70.))) - cloudamt = 5. * (exp(3.*Calib*((holder-clr_threshold)/(100.-clr_threshold)))) - -# Apply Limit Values Portion as input by User: -# The limit values takes precedence over the PoP QC tool since it is the -# final check done. - cloudamt[greater(cloudamt, limit_MAX)] = limit_MAX - cloudamt[less(cloudamt, limit_MIN)] = limit_MIN - -# Warn user if Min cloud Limit exceeds Max Cloud Limit: - if limit_MIN > limit_MAX: - self.statusBarMsg("Warning: Limit value discrpency noted (MIN cloud amount > MAX cloud amount). Verify settings", ".") - -# Return Value to for Sky grid - Sky = cloudamt - - return Sky +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Populate_SkyTool -- Version 1.0 +# +# Author: Pete Banacos, WFO BTV (Started: 9/20/06) +# Last update: 1/23/07 +# +# DR 18542 bwhundermark Fixed RH calculation w.r.t ice 06/01/16 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "Sky" + +from numpy import * + +### Solicite variables from the forecaster: +VariableList = [ + ("Populate SkyTool Version 1.0","","label"), + ("Model:", "NAM12", "radio", ["GFS40", "NAM12"]), + ("Model Run:", "Current", "radio", ["Current", "Previous"]), + ("Layer depth:", "50mb", "radio", ["50mb", "25mb"]), + ("Use RH w.r.t. ICE @ T < -25C?", "No", "radio", ["Yes", "No"]), + ("", "", "label"), + ("Include high clouds (500-300mb)?", "No", "radio", ["Yes", "No"]), + ("Include clouds below 925mb?", "Yes", "radio", ["Yes", "No"]), + ("5% Sky Cover threshold at RH percentage:", 60., "scale", [44., 74.],2.0), + ("Above value sets RH threshold for CLR skies.", "", "label"), + ("Calibration:", 1.00, "scale", [1.00, 1.50],0.02), + ("Raise calibration to get more sky cover for a given RH.", "", "label"), + (" --- Limit Values Section --- ", "", "label"), + ("Don't give me sky cover above (percent):", 100, "scale", [0, 100], 1), + ("Don't give me sky cover below (percent):", 0, "scale", [0, 100], 1), + ] + +import SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, Sky, GridTimeRange, varDict): + "Determine Sky Cover based on exponential function where layer RH is the dependent variable." + + SITE = self.getSiteID() + + layer_depth = varDict["Layer depth:"] + lowPBL = varDict["Include clouds below 925mb?"] + hiCLDS = varDict["Include high clouds (500-300mb)?"] + UseRHi = varDict["Use RH w.r.t. ICE @ T < -25C?"] + limit_MAX = varDict["Don't give me sky cover above (percent):"] + limit_MIN = varDict["Don't give me sky cover below (percent):"] + model1 = varDict["Model:"] + modelrun = varDict["Model Run:"] + modeltemp = "D2D_" + model1 + + if modelrun == "Current": + model = self.findDatabase(modeltemp, 0) + else: + model = self.findDatabase(modeltemp, -1) + +# Grab RH values from the numerical model + + print('GridTimeRange = ', GridTimeRange) + RHPBL030 = self.getGrids(model, "rh", "BL030", GridTimeRange) + RHPBL3060 = self.getGrids(model, "rh", "BL3060", GridTimeRange) + RHPBL6090 = self.getGrids(model, "rh", "BL6090", GridTimeRange) + RHPBL90120 = self.getGrids(model, "rh", "BL90120", GridTimeRange) + +# TESTING SECTION + + lvl = ["BL030", "BL3060", "BL6090", "BL90120", + "MB925","MB900","MB875","MB850", + "MB825","MB800","MB775","MB750", + "MB725","MB700","MB675","MB650", + "MB625","MB600","MB575","MB550", + "MB525","MB500","MB450","MB400", + "MB350","MB300"] + +# Populate Temperature list with default of freezing + T_lvl = [273,273,273,273,273,273,273,273,273, + 273,273,273,273,273,273,273,273,273, + 273,273,273,273,273,273,273,273] +# Populate RH list with default of 50% + RH_lvl = [50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50] + e_lvl = [50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50] + es_lvl = [50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50] + esi_lvl = [50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50] + RHi_lvl = [50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50,50, + 50,50,50,50,50,50,50,50] + + for x in range(len(lvl)): + T_lvl[x] = self.getGrids(model, "t", lvl[x], GridTimeRange) + RH_lvl[x] = self.getGrids(model, "rh", lvl[x], GridTimeRange) + es_lvl[x] = 6.11 * exp(5412.*((1./273.)-(1./T_lvl[x]))) + e_lvl[x] = (RH_lvl[x] * es_lvl[x]) / 100. +# compute RH with respect to ice, using latent heat of sublimation: + esi_lvl[x] = 6.11 * exp(6133.*((1./273.)-(1./T_lvl[x]))) + RHi_lvl[x] = (e_lvl[x] / esi_lvl[x]) * 100. + +# If grid pt. temperature is less than -25C, use RH over ice... + if UseRHi == "Yes": + mask = T_lvl[x] < 248 + RH_lvl[x][mask] = RHi_lvl[x][mask] + +# Populate 30mb near-sfc AGL layers: + + RHPBL030 = RH_lvl[0] + RHPBL3060 = RH_lvl[1] + RHPBL6090 = RH_lvl[2] + RHPBL90120 = RH_lvl[3] + +# compute layer-averaged RH values (50mb) + + if layer_depth == "50mb": + RHavg925_875 = ((RH_lvl[4]+RH_lvl[5]+RH_lvl[6])/3) + RHavg900_850 = ((RH_lvl[5]+RH_lvl[6]+RH_lvl[7])/3) + RHavg875_825 = ((RH_lvl[6]+RH_lvl[7]+RH_lvl[8])/3) + RHavg850_800 = ((RH_lvl[7]+RH_lvl[8]+RH_lvl[9])/3) + RHavg825_775 = ((RH_lvl[8]+RH_lvl[9]+RH_lvl[10])/3) + RHavg800_750 = ((RH_lvl[9]+RH_lvl[10]+RH_lvl[11])/3) + RHavg775_725 = ((RH_lvl[10]+RH_lvl[11]+RH_lvl[12])/3) + RHavg750_700 = ((RH_lvl[11]+RH_lvl[12]+RH_lvl[13])/3) + RHavg725_675 = ((RH_lvl[12]+RH_lvl[13]+RH_lvl[14])/3) + RHavg700_650 = ((RH_lvl[13]+RH_lvl[14]+RH_lvl[15])/3) + RHavg675_625 = ((RH_lvl[14]+RH_lvl[15]+RH_lvl[16])/3) + RHavg650_600 = ((RH_lvl[15]+RH_lvl[16]+RH_lvl[17])/3) + RHavg625_575 = ((RH_lvl[16]+RH_lvl[17]+RH_lvl[18])/3) + RHavg600_550 = ((RH_lvl[17]+RH_lvl[18]+RH_lvl[19])/3) + RHavg575_525 = ((RH_lvl[18]+RH_lvl[19]+RH_lvl[20])/3) + RHavg550_500 = ((RH_lvl[19]+RH_lvl[20]+RH_lvl[21])/3) + else: +# depth is in 25mb layers + RHavg925_900 = ((RH_lvl[4]+RH_lvl[5])/2) + RHavg900_875 = ((RH_lvl[5]+RH_lvl[6])/2) + RHavg875_850 = ((RH_lvl[6]+RH_lvl[7])/2) + RHavg850_825 = ((RH_lvl[7]+RH_lvl[8])/2) + RHavg825_800 = ((RH_lvl[8]+RH_lvl[9])/2) + RHavg800_775 = ((RH_lvl[9]+RH_lvl[10])/2) + RHavg775_750 = ((RH_lvl[10]+RH_lvl[11])/2) + RHavg750_725 = ((RH_lvl[11]+RH_lvl[12])/2) + RHavg725_700 = ((RH_lvl[12]+RH_lvl[13])/2) + RHavg700_675 = ((RH_lvl[13]+RH_lvl[14])/2) + RHavg675_650 = ((RH_lvl[14]+RH_lvl[15])/2) + RHavg650_625 = ((RH_lvl[15]+RH_lvl[16])/2) + RHavg625_600 = ((RH_lvl[16]+RH_lvl[17])/2) + RHavg600_575 = ((RH_lvl[17]+RH_lvl[18])/2) + RHavg575_550 = ((RH_lvl[18]+RH_lvl[19])/2) + RHavg550_525 = ((RH_lvl[19]+RH_lvl[20])/2) + RHavg525_500 = ((RH_lvl[20]+RH_lvl[21])/2) + +# Layer depth above 500mb is always 50mb... + RHavg500_450 = ((RH_lvl[21]+RH_lvl[22])/2) + RHavg450_400 = ((RH_lvl[22]+RH_lvl[23])/2) + RHavg400_350 = ((RH_lvl[23]+RH_lvl[24])/2) + RHavg350_300 = ((RH_lvl[24]+RH_lvl[25])/2) + +# Generate List of layers to check. + + my_PBLlist = [RHPBL030, RHPBL3060, RHPBL6090, RHPBL90120] + my_upr_trop_list = [RHavg500_450,RHavg450_400, RHavg400_350, + RHavg350_300] + + if layer_depth == "50mb": + my_list = [RHavg925_875, RHavg900_850, RHavg875_825, + RHavg850_800, RHavg825_775, RHavg800_750, RHavg775_725, + RHavg750_700, RHavg725_675, RHavg700_650, RHavg675_625, + RHavg650_600, RHavg625_575, RHavg600_550, RHavg575_525, + RHavg550_500] + + else: + my_list25 = [RHavg925_900, RHavg900_875, RHavg875_850, RHavg850_825, + RHavg825_800, RHavg800_775, RHavg775_750, RHavg750_725, + RHavg725_700, RHavg700_675, RHavg675_650, RHavg650_625, + RHavg625_600, RHavg600_575, RHavg575_550, RHavg550_525, + RHavg525_500] + + +# Put lowest RH layer being used into place holder... + if lowPBL == "Yes": + holder = RHPBL030 + elif layer_depth == "50mb": + holder = RHavg925_875 + else: + holder = RHavg925_900 + +# check Ground Relative layers first at low-levels, as selected by user. + if lowPBL == "Yes": + for layerRH in my_PBLlist: + holder = where(greater(layerRH, holder), layerRH, holder) + +# Check Layers incrementally + if layer_depth == "50mb": + for layerRH in my_list: + holder= where(greater(layerRH, holder), layerRH, holder) + else: + for layerRH in my_list25: + holder= where(greater(layerRH, holder), layerRH, holder) + +# If user wants high clouds (above 500mb), continue layer checking... + if hiCLDS == "Yes": + for layerRH in my_upr_trop_list: + holder = where(greater(layerRH, holder), layerRH, holder) + +# Compute Cloud Amount + Calib = varDict["Calibration:"] + clr_threshold = varDict["5% Sky Cover threshold at RH percentage:"] +# cloudamt = 5. * (exp(.106*(holder-70.))) + cloudamt = 5. * (exp(3.*Calib*((holder-clr_threshold)/(100.-clr_threshold)))) + +# Apply Limit Values Portion as input by User: +# The limit values takes precedence over the PoP QC tool since it is the +# final check done. + cloudamt[greater(cloudamt, limit_MAX)] = limit_MAX + cloudamt[less(cloudamt, limit_MIN)] = limit_MIN + +# Warn user if Min cloud Limit exceeds Max Cloud Limit: + if limit_MIN > limit_MAX: + self.statusBarMsg("Warning: Limit value discrpency noted (MIN cloud amount > MAX cloud amount). Verify settings", ".") + +# Return Value to for Sky grid + Sky = cloudamt + + return Sky diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/QPF_SmartTool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/QPF_SmartTool.py index ed29ba8ff1..28e729cec0 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/QPF_SmartTool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/QPF_SmartTool.py @@ -1,103 +1,103 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# QPF_SmartTool.py -# -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "QPF" -from numpy import * -import MetLib, time - -HideTool = 0 - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# - -VariableList = [ - ("Vertical Motion Influence" , 50, "scale", [0,100]), - ] - -# Set up Class -import SmartScript -# For available commands, see SmartScript - - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Smooths the specified grid by the specified factor - # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. - # Even factors (4, 6, 8,...) round up to the next odd value - # If factors <3 are specified, the unmodified grid is returned. - def smoothGrid(self, grid, factor): - # factors of less than 3 are useless or dangerous - if factor < 3: - return grid - st = time.time() - half = int(factor)/ 2 - sg = zeros(grid.shape,float64) - count = zeros(grid.shape,float64) - gridOfOnes = ones(grid.shape,float64) - for y in xrange(-half, half + 1): - for x in xrange(-half, half + 1): - if y < 0: - yTargetSlice = slice(-y, None, None) - ySrcSlice = slice(0, y, None) - if y == 0: - yTargetSlice = slice(0, None, None) - ySrcSlice = slice(0, None, None) - if y > 0: - yTargetSlice = slice(0, -y, None) - ySrcSlice = slice(y, None, None) - if x < 0: - xTargetSlice = slice(-x, None, None) - xSrcSlice = slice(0, x, None) - if x == 0: - xTargetSlice = slice(0, None, None) - xSrcSlice = slice(0, None, None) - if x > 0: - xTargetSlice = slice(0, -x, None) - xSrcSlice = slice(x, None, None) - - target = [yTargetSlice, xTargetSlice] - src = [ySrcSlice, xSrcSlice] - sg[target] += grid[src] - count[target] += gridOfOnes[src] - return sg / count - - # Required Method: Execute - # %comment - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - def execute(self, QPF, Wind, varDict): - - # get the scale value - scale = float(varDict["Vertical Motion Influence"]) / 50.0 - - # Calculate the gridient of the topoGrid - topoGrid = self.getTopo() - - d_dx, d_dy = MetLib.gradient(topoGrid) - - # Convert wind to u and v components - u, v = self.MagDirToUV(Wind[0], Wind[1]) - - # Calculate the dot product which is positive when wind blows - # upslope and negative when it blows downslope - dotGrid = MetLib.dot((d_dx, d_dy), (u, -v)) / 5000.0 - dotGrid = self.smoothGrid(dotGrid, 9) - - # adjust the existing QPF grid using the scale and dot product - QPF = QPF * (1 + scale * dotGrid) - - return QPF +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# QPF_SmartTool.py +# +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "QPF" +from numpy import * +import MetLib, time + +HideTool = 0 + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# + +VariableList = [ + ("Vertical Motion Influence" , 50, "scale", [0,100]), + ] + +# Set up Class +import SmartScript +# For available commands, see SmartScript + + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Smooths the specified grid by the specified factor + # With factor == 3, 3x3 smooth, factor == 5 5x5 smooth, etc. + # Even factors (4, 6, 8,...) round up to the next odd value + # If factors <3 are specified, the unmodified grid is returned. + def smoothGrid(self, grid, factor): + # factors of less than 3 are useless or dangerous + if factor < 3: + return grid + st = time.time() + half = int(factor)/ 2 + sg = zeros(grid.shape,float64) + count = zeros(grid.shape,float64) + gridOfOnes = ones(grid.shape,float64) + for y in range(-half, half + 1): + for x in range(-half, half + 1): + if y < 0: + yTargetSlice = slice(-y, None, None) + ySrcSlice = slice(0, y, None) + if y == 0: + yTargetSlice = slice(0, None, None) + ySrcSlice = slice(0, None, None) + if y > 0: + yTargetSlice = slice(0, -y, None) + ySrcSlice = slice(y, None, None) + if x < 0: + xTargetSlice = slice(-x, None, None) + xSrcSlice = slice(0, x, None) + if x == 0: + xTargetSlice = slice(0, None, None) + xSrcSlice = slice(0, None, None) + if x > 0: + xTargetSlice = slice(0, -x, None) + xSrcSlice = slice(x, None, None) + + target = [yTargetSlice, xTargetSlice] + src = [ySrcSlice, xSrcSlice] + sg[target] += grid[src] + count[target] += gridOfOnes[src] + return sg / count + + # Required Method: Execute + # %comment + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + def execute(self, QPF, Wind, varDict): + + # get the scale value + scale = float(varDict["Vertical Motion Influence"]) / 50.0 + + # Calculate the gridient of the topoGrid + topoGrid = self.getTopo() + + d_dx, d_dy = MetLib.gradient(topoGrid) + + # Convert wind to u and v components + u, v = self.MagDirToUV(Wind[0], Wind[1]) + + # Calculate the dot product which is positive when wind blows + # upslope and negative when it blows downslope + dotGrid = MetLib.dot((d_dx, d_dy), (u, -v)) / 5000.0 + dotGrid = self.smoothGrid(dotGrid, 9) + + # adjust the existing QPF grid using the scale and dot product + QPF = QPF * (1 + scale * dotGrid) + + return QPF diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Serp.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Serp.py index f361744fba..444f3568ee 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Serp.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Serp.py @@ -1,797 +1,797 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# SVN: $Revision: 130 $ $Date: 2010-07-30 17:45:24 +0000 (Fri, 30 Jul 2010) $ -# -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Serp - version 2.6 (AWIPS-2) -# -# Changes the existing field by asking the user to set values at control -# points, then fitting a surface to all the changes (using "serpentine" -# curves), and adding that change grid onto the existing grid. The new -# grid will exactly match the values specified at the control points. -# -# When run over an edit area, only control points "inside" the edit area -# are used. In addition, many "bogus" control points with "no change" are -# added around the edge of the edit area, so that the changes made inside -# blend in nicely to the areas outside the edit area that are not changed. -# -# Original Serpentine Algorithm Author: Les Colin - WFO Boise, ID -# Python implmentation: Tim Barker - SOO Boise, ID -# -# History:--------------------------------------------------------------------- -# 2012/03/27 - version 2.6 : Tim Barker : making clearer GMSG-style config -# syntax. And fixing bad version in latest_stable. -# 2012/03/04 - version 2.5 : Tim Barker : changed GMSG-style config syntax -# again. -# 2012/02/25 - version 2.4 : Tim Barker : GMSG-style config added, Fixed: -# problems when using current samples, issues with parm -# precision, cleanup of code for readability, remove last global -# passing thru to GUI class instead. -# 2011/03/14 - version 2.3 : Tim Barker : Fix issues with getGridCell now -# returning floats -# 2011/03/05 - version 2.2 : Tim Barker : Adding features that were in the -# AWIPS-1 version 1.15 -# 2010/07/30 - verison 2.0 : Paul Jendrowski : Preliminary AWIPS 2 version -#============================================================================== -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# -# - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType="numeric" -WeatherElementEdited = "variableElement" -ScreenList = ["SCALAR","VECTOR"] -# -# Imports -# -from numpy import * -import ObjAnal -import SmartScript -import copy -import LogStream -from math import log10 -import Tkinter -# -# Get site configuration -# -import SerpConfig as SC -# -# Set defauls if not set in site configuration -# -if "Locations" not in SC.Config: - SC.Config["Locations"]={"Bad Config 1":[("Bad Config 1",40.0,-110.0), - ("Bad Config 2",40.1,-110.0)], - "Bad Config 2":[("Bad Config 3",40.2,-110.0), - ("Bad Config 4",40.3,-110.0)]} -if "DefaultGroup" not in SC.Config: - SC.Config["DefaultGroup"]="Bad Config 1" -if "MaxPointsInColumn" not in SC.Config: - SC.Config["MaxPointsInColumn"]=10 -if "ElevationDefault" not in SC.Config: - SC.Config["ElevationDefault"]="On" -# -# The initial tool dialog - where major options are set -# -VariableList=[] -VariableList.append(("Options for Serp Tool","","label")) -keys=["Current Samples"] -for key in SC.Config["Locations"].keys(): - keys.append(key) -VariableList.append(("Sample Set:",SC.Config["DefaultGroup"],"radio",keys)) -VariableList.append(("Elevation Adjustment",SC.Config["ElevationDefault"],"radio",["On","Off"])) -VariableList.append(("Elevation Factor",36,"numeric")) -# -# The actual Tool -# -class Tool (SmartScript.SmartScript): - def __init__(self,dbss): - self._dbss=dbss - SmartScript.SmartScript.__init__(self,dbss) - self.tkroot=None - # - # Global variables used throughout - # - self.guiInfo={} - self.guiInfo['vectedit']=0 - self.guiInfo['minvalue']=0 - self.guiInfo['maxvalue']=100 - self.guiInfo['resolution']=0 - self.guiInfo['masterlabel']="xxx" - - def preProcessTool(self,varDict): - self.OA = ObjAnal.ObjAnal(self._dbss) - self.setname=varDict["Sample Set:"] - if varDict["Elevation Adjustment"]=="On": - self.elevfactor=varDict["Elevation Factor"] - else: - self.elevfactor=0.0 - if self.elevfactor<1: - self.elevfactor=0.0 -#--------------------------------------------------------------------------- -# - def execute(self, Topo, variableElement,variableElement_GridInfo, WEname, - GridTimeRange, editArea): - # - # get variable type, get the vector edit mode, change the variable - # name if we are modifying only a part of a vector, get the min/max - # values for the sliders - # - wxType=str(variableElement_GridInfo.getGridType()) - UpdatedName=self.getVectEdit(WEname,wxType) - self.getResolution(variableElement_GridInfo) - self.getMinMaxValue(variableElement_GridInfo) - self.getMasterLabel(GridTimeRange,UpdatedName) - # - # setup mask for editArea - # - editAreaMask=self.setupEditAreaMask(editArea) - # - # if user wants the current sample set, set up the - # locations array with those values - # - if self.setname=="Current Samples": - err=self.addCurrentSamples(Topo) - if (err==1): - self.statusBarMsg("No sample points defined","U") - self.cancel() - # - # setup sliders with current value at each point - # and save the current values for later - # - err=self.setInitValues(wxType,variableElement,editAreaMask,Topo) - if (err==1): - self.statusBarMsg("No control points defined","U") - self.cancel() - # - # Run the dialog which is a new Tkinter root window every time execute runs. - # The dialog will halt execution of the main processing until the dialog - # window is destroyed. The values from the GUI will be in an attribute of - # the ControlValues instance. - # - dialog=ControlValues(self.guiInfo, SC.Config["MaxPointsInColumn"], "Set Control Values") - dialog.mainloop() - if (dialog.result!="OK"): - self.cancel() - # - # If the user pressed OK, get the changes and get the - # remoteness, the average min distance to other control - # points, and the distance weights from each control point - # to all other gridpoints (all values that will be used - # later in the serp algorithm) - # - self.getChangeValues(self.guiInfo['vectedit'], - dialog.Values, - self.guiInfo['InitValues'], - self.InitDirs, - self.InitSpeeds - ) - # - # Handle adding no-change points around the outside of the - # editArea, if the tool is not operating on the whole grid. - # - self.handleEditArea(editAreaMask) - # - # Calculate the change grid - # - zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist, - self.elevfactor,Topo) - # - # add result to the original values - - # OR - for vectors, modify the speed/dir - # - absmax=variableElement_GridInfo.getMaxValue() - absmin=variableElement_GridInfo.getMinValue() - if wxType=='SCALAR': # scalar - return clip(variableElement+zval,absmin,absmax) - if wxType=='VECTOR': # vector - speed=variableElement[0] - direc=variableElement[1] - if (self.guiInfo['vectedit']==1): - newspd=speed - newdir=direc+zval - newdir[greater(newdir, 360)] -= 360 - newdir[less(newdir ,0)] += 360 - elif (self.guiInfo['vectedit']==0): - newspd=clip(speed+zval,absmin,absmax) - newdir=direc - else: - newspd=clip(speed+zval,absmin,absmax) - zval=self.OA.Serp(self.ylist,self.xloclist,self.yloclist,self.hloclist, - self.elevfactor,Topo) - newdir=direc+zval - newdir[greater(newdir, 360)] -= 360 - newdir[less(newdir ,0)] += 360 - - return (newspd,newdir) -#--------------------------------------------------------------------------- -# -# Make label for controlpoint dialog with timerange of grid -# - def getMasterLabel(self,GridTimeRange,WEname): - startday=GridTimeRange.startTime().day - starthour=GridTimeRange.startTime().hour - endday=GridTimeRange.endTime().day - endhour=GridTimeRange.endTime().hour - self.guiInfo["masterlabel"]="Set %s for %d / %2.2dZ --> %d / %2.2dZ" % (WEname, - startday,starthour,endday,endhour) - return -#--------------------------------------------------------------------------- -# -# Get the vector edit mode (and modify WEname if needed), -# vectedit=0 if a scalar or a vector modifying only magnitude -# vectedit=1 if a vector modifying direction only -# vectedit=2 if a vector modifying both -# - def getVectEdit(self,WEname,wxType): - self.guiInfo["vectedit"]=0 - if (wxType=='VECTOR'): - vecteditstring=self.getVectorEditMode() - if (vecteditstring=="Magnitude Only"): - self.guiInfo["vectedit"]=0 - WEname+="Spd" - if (vecteditstring=="Direction Only"): - self.guiInfo["vectedit"]=1 - WEname+="Dir" - if (vecteditstring=="Both"): - self.guiInfo["vectedit"]=2 - return(WEname) -#--------------------------------------------------------------------------- -# -# Get the resolution of changes (i.e. 0.01 for QPF, 0.1 for SnowAmount) -# by using the parm precision information -# - def getResolution(self,variableElement_GridInfo): - precision=variableElement_GridInfo.getPrecision() - if (precision==0): - self.guiInfo["resolution"]=1.0 - else: - self.guiInfo["resolution"]=1.0/(10**precision) - return -#--------------------------------------------------------------------------- -# -# Get the minimum/maximum value for the sliders from the variable -# max/min limits -# - def getMinMaxValue(self,variableElement_GridInfo): - self.guiInfo["minvalue"]=variableElement_GridInfo.getMinValue() - self.guiInfo["maxvalue"]=variableElement_GridInfo.getMaxValue() - if (self.guiInfo["vectedit"]==1): - self.guiInfo["minvalue"]=0 - self.guiInfo["maxvalue"]=360 - return -#--------------------------------------------------------------------------- -# -# Add the current sample point lat/lon to the Locations array -# return an err of 1 if no sample points are currently specified -# return an err of 0 if some sample points were found -# - def addCurrentSamples(self,Topo): - shape1=Topo.shape - ymax=shape1[0]-1 - xmax=shape1[1]-1 - self.samplePoints = self.getSamplePoints(None) - curpoints=[] - for sample in self.samplePoints: - (x,y)=sample - if (x<0)or(x>xmax)or(y<0)or(y>ymax): - LogStream.logEvent("serp:sample point at %d,%d is off GFE grid - ignored"%(x,y)) - continue - (lat,lon)=self.getLatLon(x,y) - label="%5.2f %7.2f" % (lat,lon) - curpoints.append((label,lat,lon)) - if (len(curpoints)<1): - return 1 - SC.Config["Locations"]["Current Samples"]=curpoints - return 0 -#--------------------------------------------------------------------------- -# -# Limit direction changes to +/- 180 degrees -# - def limitDirChange(self,dirchg): - while dirchg>180: - dirchg=dirchg-360 - while dirchg<-180: - dirchg=dirchg+360 - return dirchg -#--------------------------------------------------------------------------- -# -# setup InitValues array with current values at points, -# as well as xloclist, yloclist, hloclist with location/elevation at points -# - def setInitValues(self,wxType,variableElement,editAreaMask,Topo): - - self.xloclist=[] - self.yloclist=[] - self.hloclist=[] - self.guiInfo['InitValues']=[] - self.guiInfo['Labels']=[] - self.InitSpeeds=[] - self.InitDirs=[] - for i in range(len(SC.Config["Locations"][self.setname])): - (name,lat,lon)=SC.Config["Locations"][self.setname][i] - (x,y)=self.getGridCell(lat,lon) - if ((x is None)or(y is None)): - msg="serp:point %s ignored because it is off the GFE grid"%name - LogStream.logEvent(msg) - continue - # - # Ignore sites not on the GFE grid - # - xint=int(round(x,0)+0.5) - yint=int(round(y,0)+0.5) - if (editAreaMask[yint,xint]<0.5): - LogStream.logEvent("serp:point %s ignored because it is not in editArea"%name) - continue - # - # ignore sites at a gridpoint already included - # - if ((xint in self.xloclist) and (yint in self.yloclist)): - skip=0 - for j in range(len(self.xloclist)): - if ((xint==self.xloclist[j])and(yint==self.yloclist[j])): - skip=1 - continue - if (skip==1): - LogStream.logEvent("serp:point %s ignored because gridpoint is already a control point"%name) - continue - # - # append location to control point list - # - self.guiInfo['Labels'].append(name) - elev=Topo[yint,xint] - self.hloclist.append(elev) - self.xloclist.append(xint) - self.yloclist.append(yint) - # - # get initial value at control points - # - if wxType=='SCALAR': - current=self.round(variableElement[yint,xint],"Nearest",self.guiInfo['resolution']) - else: - if (self.guiInfo['vectedit']==0): - current=self.round(variableElement[0][yint,xint],"Nearest",self.guiInfo['resolution']) - elif (self.guiInfo['vectedit']==1): - current=self.round(variableElement[1][yint,xint],"Nearest",self.guiInfo['resolution']) - else: - curspd=variableElement[0][yint,xint] - curdir=variableElement[1][yint,xint] - self.InitSpeeds.append(curspd) - self.InitDirs.append(curdir) - current="%3d@%-3d" % (int(curdir+0.5),int(curspd+0.5)) - self.guiInfo['InitValues'].append(current) - # - # return error if no points in control point list - # - if (len(self.xloclist)<1): - return 1 - return 0 -#--------------------------------------------------------------------------- -# -# get change values at every point (zlist), if a vector change - also get ylist -# - def getChangeValues(self,vectedit,Values,InitValues,InitDirs,InitSpeeds): - - self.zlist=[]; - self.ylist=[]; - for i in range(len(InitValues)): - if (vectedit==2): - valreturn=Values[i] - (dirstr,spdstr)=valreturn.split("@") - dir1=int(dirstr) - spd1=int(spdstr) - dirchg=self.limitDirChange(dir1-InitDirs[i]) - spdchg=spd1-InitSpeeds[i] - self.zlist.append(spdchg) - self.ylist.append(dirchg) - else: - change=Values[i]-InitValues[i] - if (vectedit==1): - change=self.limitDirChange(change) - self.zlist.append(change) -#------------------------------------------------------------------------------ -# -# setupEditAreaMask - sets up a mask for gridpoints inside the editArea -# - def setupEditAreaMask(self,editArea): - if editArea is None: - mask=self.getTopo()*0 - else: - mask=self.encodeEditArea(editArea) - return mask -#------------------------------------------------------------------------------ -# -# handleEditArea - if an editArea is specified, then it adds in "bogus" -# control points that specify "no change" just outside the border of -# the editArea -# - def handleEditArea(self,editAreaMask): - # - # If editArea include all gridpoints - then no bogus points are - # needed - # - Topo=self.getTopo() - allpts=add.reduce(add.reduce(less(Topo*0.0,5))) - numpts=add.reduce(add.reduce(editAreaMask)) - if numpts==allpts: - return - # - # make out1 a grid that is 1 for all pixels just outside the - # editArea - # - mask=editAreaMask*100 - smooth1=self.smoothpm(mask,1) - out1=logical_and(greater(smooth1,0),less(mask,50)) - # - # get list of all x,y coords that are on the edge - # - xl=[] - yl=[] - for iy in range(Topo.shape[0]): - for ix in range(Topo.shape[1]): - if out1[iy,ix]>0.5: - xl.append(ix) - yl.append(iy) - # - # Thin the points (if needed) - # - roughMax=250 - if len(xl)>roughMax: - thinamt=float(len(xl))/float(roughMax) - (xpts,ypts)=self.thinpts(xl,yl,thinamt) - else: - xpts=xl - ypts=yl - # - # We can simply add these points to the list of points. - # Normally, we would have to be careful to make sure that - # a duplicate point did not exist. But here, all the normal - # control points are inside the editArea, and all these - # added "bogus" points are outside the editArea, so they are - # guaranteed to not be a duplicate of the others - # - for i in range(len(xpts)): - elev=Topo[ypts[i],xpts[i]] - self.hloclist.append(elev) - self.xloclist.append(xpts[i]) - self.yloclist.append(ypts[i]) - self.zlist.append(0.0) - self.ylist.append(0.0) - # - # - # - return - #------------------------------------------------------------------- - # Given a list of x,y coordinates of points - thin the list - # so that no points are closer than "num" gridpoints to another - # - def thinpts(self,xl,yl,num): - xc=copy.copy(xl) - yc=copy.copy(yl) - xpts=[] - ypts=[] - xp=xc[0] - yp=yc[0] - xpts.append(xp) - ypts.append(yp) - while len(xc)>0: - dlist=self.within(xp,yp,xc,yc,num) - dlist.sort() - dlist.reverse() - for i in range(len(dlist)): - del xc[dlist[i]] - del yc[dlist[i]] - del dlist - if len(xc)>0: - (xnear,ynear)=self.nearest(xp,yp,xc,yc) - xp=xnear - yp=ynear - xpts.append(xp) - ypts.append(yp) - return(xpts,ypts) - #------------------------------------------------------------------- - # Return x,y of point nearest xp,yp - # - def nearest(self,xp,yp,xc,yc): - dist=9.0e10 - for i in range(len(xc)): - dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) - if dif2=len(self.guiInfo['Labels'])): - continue - fr=Tkinter.Frame(fc,relief=Tkinter.GROOVE,borderwidth=1) - lab=Tkinter.Label(fr,text=self.guiInfo['Labels'][num]) - # Make Tkinter variables for use as widget variables - # textvar to show delta from original value - tkStrVar=Tkinter.StringVar() - tkStrVar.set('(0)') - self.ChangeVals.append(tkStrVar) - # The slider values - tkDblVar=Tkinter.DoubleVar() - tkDblVar.set(self.guiInfo['InitValues'][num]) - self.Values.append(tkDblVar) - - if (self.guiInfo['vectedit']==2): - lab.grid(row=0,column=0,sticky=Tkinter.EW) - self.ScaleIDs.append(Tkinter.Entry(fr,width=7)) - self.ScaleIDs[num].delete(0,Tkinter.END) - self.ScaleIDs[num].insert(Tkinter.END,self.guiInfo['InitValues'][num]) - self.ScaleIDs[num].grid(row=1,column=0) - else: - lab.grid(row=0,column=0,columnspan=2,sticky=Tkinter.EW) - self.ScaleIDs.append( - Tkinter.Scale(fr, orient=Tkinter.HORIZONTAL, - from_=self.guiInfo['minvalue'], - to=self.guiInfo['maxvalue'], - resolution=self.guiInfo['resolution'], - variable=self.Values[num], - command=self.setChanges, - length=175 - )) - val=self.guiInfo['InitValues'][num] - self.ScaleIDs[num].set(val) - self.ScaleIDs[num].grid(row=1,column=0,sticky=Tkinter.EW) - chg=Tkinter.Label(fr,textvariable=self.ChangeVals[num]) - chg.grid(row=1,column=1,sticky=Tkinter.S) - fr.columnconfigure(1,minsize=60) - fr.pack(side=Tkinter.TOP,fill=Tkinter.X) - num=num+1 - fc.pack(side=Tkinter.LEFT,fill=Tkinter.Y,expand=0) - # AppDialog wants a widget returned from body to set the focus to. - return frame - - def ok(self, event=None): - """Process the Ok button. The ok method in AppDialog destroys the window - before running the apply method. Need to run apply first to get the - data from the slider widgets.""" - if self.validate(): - self.apply() - self.result="OK" - self.destroy() - - def apply(self): - """Retrieve the values from the scale widgets into attribute Values.""" - self.Values=[] - for num in range(len(self.guiInfo['Labels'])): - self.Values.append(self.ScaleIDs[num].get()) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# SVN: $Revision: 130 $ $Date: 2010-07-30 17:45:24 +0000 (Fri, 30 Jul 2010) $ +# +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Serp - version 2.6 (AWIPS-2) +# +# Changes the existing field by asking the user to set values at control +# points, then fitting a surface to all the changes (using "serpentine" +# curves), and adding that change grid onto the existing grid. The new +# grid will exactly match the values specified at the control points. +# +# When run over an edit area, only control points "inside" the edit area +# are used. In addition, many "bogus" control points with "no change" are +# added around the edge of the edit area, so that the changes made inside +# blend in nicely to the areas outside the edit area that are not changed. +# +# Original Serpentine Algorithm Author: Les Colin - WFO Boise, ID +# Python implmentation: Tim Barker - SOO Boise, ID +# +# History:--------------------------------------------------------------------- +# 2012/03/27 - version 2.6 : Tim Barker : making clearer GMSG-style config +# syntax. And fixing bad version in latest_stable. +# 2012/03/04 - version 2.5 : Tim Barker : changed GMSG-style config syntax +# again. +# 2012/02/25 - version 2.4 : Tim Barker : GMSG-style config added, Fixed: +# problems when using current samples, issues with parm +# precision, cleanup of code for readability, remove last global +# passing thru to GUI class instead. +# 2011/03/14 - version 2.3 : Tim Barker : Fix issues with getGridCell now +# returning floats +# 2011/03/05 - version 2.2 : Tim Barker : Adding features that were in the +# AWIPS-1 version 1.15 +# 2010/07/30 - verison 2.0 : Paul Jendrowski : Preliminary AWIPS 2 version +#============================================================================== +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# +# + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType="numeric" +WeatherElementEdited = "variableElement" +ScreenList = ["SCALAR","VECTOR"] +# +# Imports +# +from numpy import * +import ObjAnal +import SmartScript +import copy +import LogStream +from math import log10 +import tkinter +# +# Get site configuration +# +import SerpConfig as SC +# +# Set defauls if not set in site configuration +# +if "Locations" not in SC.Config: + SC.Config["Locations"]={"Bad Config 1":[("Bad Config 1",40.0,-110.0), + ("Bad Config 2",40.1,-110.0)], + "Bad Config 2":[("Bad Config 3",40.2,-110.0), + ("Bad Config 4",40.3,-110.0)]} +if "DefaultGroup" not in SC.Config: + SC.Config["DefaultGroup"]="Bad Config 1" +if "MaxPointsInColumn" not in SC.Config: + SC.Config["MaxPointsInColumn"]=10 +if "ElevationDefault" not in SC.Config: + SC.Config["ElevationDefault"]="On" +# +# The initial tool dialog - where major options are set +# +VariableList=[] +VariableList.append(("Options for Serp Tool","","label")) +keys=["Current Samples"] +for key in list(SC.Config["Locations"].keys()): + keys.append(key) +VariableList.append(("Sample Set:",SC.Config["DefaultGroup"],"radio",keys)) +VariableList.append(("Elevation Adjustment",SC.Config["ElevationDefault"],"radio",["On","Off"])) +VariableList.append(("Elevation Factor",36,"numeric")) +# +# The actual Tool +# +class Tool (SmartScript.SmartScript): + def __init__(self,dbss): + self._dbss=dbss + SmartScript.SmartScript.__init__(self,dbss) + self.tkroot=None + # + # Global variables used throughout + # + self.guiInfo={} + self.guiInfo['vectedit']=0 + self.guiInfo['minvalue']=0 + self.guiInfo['maxvalue']=100 + self.guiInfo['resolution']=0 + self.guiInfo['masterlabel']="xxx" + + def preProcessTool(self,varDict): + self.OA = ObjAnal.ObjAnal(self._dbss) + self.setname=varDict["Sample Set:"] + if varDict["Elevation Adjustment"]=="On": + self.elevfactor=varDict["Elevation Factor"] + else: + self.elevfactor=0.0 + if self.elevfactor<1: + self.elevfactor=0.0 +#--------------------------------------------------------------------------- +# + def execute(self, Topo, variableElement,variableElement_GridInfo, WEname, + GridTimeRange, editArea): + # + # get variable type, get the vector edit mode, change the variable + # name if we are modifying only a part of a vector, get the min/max + # values for the sliders + # + wxType=str(variableElement_GridInfo.getGridType()) + UpdatedName=self.getVectEdit(WEname,wxType) + self.getResolution(variableElement_GridInfo) + self.getMinMaxValue(variableElement_GridInfo) + self.getMasterLabel(GridTimeRange,UpdatedName) + # + # setup mask for editArea + # + editAreaMask=self.setupEditAreaMask(editArea) + # + # if user wants the current sample set, set up the + # locations array with those values + # + if self.setname=="Current Samples": + err=self.addCurrentSamples(Topo) + if (err==1): + self.statusBarMsg("No sample points defined","U") + self.cancel() + # + # setup sliders with current value at each point + # and save the current values for later + # + err=self.setInitValues(wxType,variableElement,editAreaMask,Topo) + if (err==1): + self.statusBarMsg("No control points defined","U") + self.cancel() + # + # Run the dialog which is a new Tkinter root window every time execute runs. + # The dialog will halt execution of the main processing until the dialog + # window is destroyed. The values from the GUI will be in an attribute of + # the ControlValues instance. + # + dialog=ControlValues(self.guiInfo, SC.Config["MaxPointsInColumn"], "Set Control Values") + dialog.mainloop() + if (dialog.result!="OK"): + self.cancel() + # + # If the user pressed OK, get the changes and get the + # remoteness, the average min distance to other control + # points, and the distance weights from each control point + # to all other gridpoints (all values that will be used + # later in the serp algorithm) + # + self.getChangeValues(self.guiInfo['vectedit'], + dialog.Values, + self.guiInfo['InitValues'], + self.InitDirs, + self.InitSpeeds + ) + # + # Handle adding no-change points around the outside of the + # editArea, if the tool is not operating on the whole grid. + # + self.handleEditArea(editAreaMask) + # + # Calculate the change grid + # + zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist, + self.elevfactor,Topo) + # + # add result to the original values - + # OR - for vectors, modify the speed/dir + # + absmax=variableElement_GridInfo.getMaxValue() + absmin=variableElement_GridInfo.getMinValue() + if wxType=='SCALAR': # scalar + return clip(variableElement+zval,absmin,absmax) + if wxType=='VECTOR': # vector + speed=variableElement[0] + direc=variableElement[1] + if (self.guiInfo['vectedit']==1): + newspd=speed + newdir=direc+zval + newdir[greater(newdir, 360)] -= 360 + newdir[less(newdir ,0)] += 360 + elif (self.guiInfo['vectedit']==0): + newspd=clip(speed+zval,absmin,absmax) + newdir=direc + else: + newspd=clip(speed+zval,absmin,absmax) + zval=self.OA.Serp(self.ylist,self.xloclist,self.yloclist,self.hloclist, + self.elevfactor,Topo) + newdir=direc+zval + newdir[greater(newdir, 360)] -= 360 + newdir[less(newdir ,0)] += 360 + + return (newspd,newdir) +#--------------------------------------------------------------------------- +# +# Make label for controlpoint dialog with timerange of grid +# + def getMasterLabel(self,GridTimeRange,WEname): + startday=GridTimeRange.startTime().day + starthour=GridTimeRange.startTime().hour + endday=GridTimeRange.endTime().day + endhour=GridTimeRange.endTime().hour + self.guiInfo["masterlabel"]="Set %s for %d / %2.2dZ --> %d / %2.2dZ" % (WEname, + startday,starthour,endday,endhour) + return +#--------------------------------------------------------------------------- +# +# Get the vector edit mode (and modify WEname if needed), +# vectedit=0 if a scalar or a vector modifying only magnitude +# vectedit=1 if a vector modifying direction only +# vectedit=2 if a vector modifying both +# + def getVectEdit(self,WEname,wxType): + self.guiInfo["vectedit"]=0 + if (wxType=='VECTOR'): + vecteditstring=self.getVectorEditMode() + if (vecteditstring=="Magnitude Only"): + self.guiInfo["vectedit"]=0 + WEname+="Spd" + if (vecteditstring=="Direction Only"): + self.guiInfo["vectedit"]=1 + WEname+="Dir" + if (vecteditstring=="Both"): + self.guiInfo["vectedit"]=2 + return(WEname) +#--------------------------------------------------------------------------- +# +# Get the resolution of changes (i.e. 0.01 for QPF, 0.1 for SnowAmount) +# by using the parm precision information +# + def getResolution(self,variableElement_GridInfo): + precision=variableElement_GridInfo.getPrecision() + if (precision==0): + self.guiInfo["resolution"]=1.0 + else: + self.guiInfo["resolution"]=1.0/(10**precision) + return +#--------------------------------------------------------------------------- +# +# Get the minimum/maximum value for the sliders from the variable +# max/min limits +# + def getMinMaxValue(self,variableElement_GridInfo): + self.guiInfo["minvalue"]=variableElement_GridInfo.getMinValue() + self.guiInfo["maxvalue"]=variableElement_GridInfo.getMaxValue() + if (self.guiInfo["vectedit"]==1): + self.guiInfo["minvalue"]=0 + self.guiInfo["maxvalue"]=360 + return +#--------------------------------------------------------------------------- +# +# Add the current sample point lat/lon to the Locations array +# return an err of 1 if no sample points are currently specified +# return an err of 0 if some sample points were found +# + def addCurrentSamples(self,Topo): + shape1=Topo.shape + ymax=shape1[0]-1 + xmax=shape1[1]-1 + self.samplePoints = self.getSamplePoints(None) + curpoints=[] + for sample in self.samplePoints: + (x,y)=sample + if (x<0)or(x>xmax)or(y<0)or(y>ymax): + LogStream.logEvent("serp:sample point at %d,%d is off GFE grid - ignored"%(x,y)) + continue + (lat,lon)=self.getLatLon(x,y) + label="%5.2f %7.2f" % (lat,lon) + curpoints.append((label,lat,lon)) + if (len(curpoints)<1): + return 1 + SC.Config["Locations"]["Current Samples"]=curpoints + return 0 +#--------------------------------------------------------------------------- +# +# Limit direction changes to +/- 180 degrees +# + def limitDirChange(self,dirchg): + while dirchg>180: + dirchg=dirchg-360 + while dirchg<-180: + dirchg=dirchg+360 + return dirchg +#--------------------------------------------------------------------------- +# +# setup InitValues array with current values at points, +# as well as xloclist, yloclist, hloclist with location/elevation at points +# + def setInitValues(self,wxType,variableElement,editAreaMask,Topo): + + self.xloclist=[] + self.yloclist=[] + self.hloclist=[] + self.guiInfo['InitValues']=[] + self.guiInfo['Labels']=[] + self.InitSpeeds=[] + self.InitDirs=[] + for i in range(len(SC.Config["Locations"][self.setname])): + (name,lat,lon)=SC.Config["Locations"][self.setname][i] + (x,y)=self.getGridCell(lat,lon) + if ((x is None)or(y is None)): + msg="serp:point %s ignored because it is off the GFE grid"%name + LogStream.logEvent(msg) + continue + # + # Ignore sites not on the GFE grid + # + xint=int(round(x,0)+0.5) + yint=int(round(y,0)+0.5) + if (editAreaMask[yint,xint]<0.5): + LogStream.logEvent("serp:point %s ignored because it is not in editArea"%name) + continue + # + # ignore sites at a gridpoint already included + # + if ((xint in self.xloclist) and (yint in self.yloclist)): + skip=0 + for j in range(len(self.xloclist)): + if ((xint==self.xloclist[j])and(yint==self.yloclist[j])): + skip=1 + continue + if (skip==1): + LogStream.logEvent("serp:point %s ignored because gridpoint is already a control point"%name) + continue + # + # append location to control point list + # + self.guiInfo['Labels'].append(name) + elev=Topo[yint,xint] + self.hloclist.append(elev) + self.xloclist.append(xint) + self.yloclist.append(yint) + # + # get initial value at control points + # + if wxType=='SCALAR': + current=self.round(variableElement[yint,xint],"Nearest",self.guiInfo['resolution']) + else: + if (self.guiInfo['vectedit']==0): + current=self.round(variableElement[0][yint,xint],"Nearest",self.guiInfo['resolution']) + elif (self.guiInfo['vectedit']==1): + current=self.round(variableElement[1][yint,xint],"Nearest",self.guiInfo['resolution']) + else: + curspd=variableElement[0][yint,xint] + curdir=variableElement[1][yint,xint] + self.InitSpeeds.append(curspd) + self.InitDirs.append(curdir) + current="%3d@%-3d" % (int(curdir+0.5),int(curspd+0.5)) + self.guiInfo['InitValues'].append(current) + # + # return error if no points in control point list + # + if (len(self.xloclist)<1): + return 1 + return 0 +#--------------------------------------------------------------------------- +# +# get change values at every point (zlist), if a vector change - also get ylist +# + def getChangeValues(self,vectedit,Values,InitValues,InitDirs,InitSpeeds): + + self.zlist=[]; + self.ylist=[]; + for i in range(len(InitValues)): + if (vectedit==2): + valreturn=Values[i] + (dirstr,spdstr)=valreturn.split("@") + dir1=int(dirstr) + spd1=int(spdstr) + dirchg=self.limitDirChange(dir1-InitDirs[i]) + spdchg=spd1-InitSpeeds[i] + self.zlist.append(spdchg) + self.ylist.append(dirchg) + else: + change=Values[i]-InitValues[i] + if (vectedit==1): + change=self.limitDirChange(change) + self.zlist.append(change) +#------------------------------------------------------------------------------ +# +# setupEditAreaMask - sets up a mask for gridpoints inside the editArea +# + def setupEditAreaMask(self,editArea): + if editArea is None: + mask=self.getTopo()*0 + else: + mask=self.encodeEditArea(editArea) + return mask +#------------------------------------------------------------------------------ +# +# handleEditArea - if an editArea is specified, then it adds in "bogus" +# control points that specify "no change" just outside the border of +# the editArea +# + def handleEditArea(self,editAreaMask): + # + # If editArea include all gridpoints - then no bogus points are + # needed + # + Topo=self.getTopo() + allpts=add.reduce(add.reduce(less(Topo*0.0,5))) + numpts=add.reduce(add.reduce(editAreaMask)) + if numpts==allpts: + return + # + # make out1 a grid that is 1 for all pixels just outside the + # editArea + # + mask=editAreaMask*100 + smooth1=self.smoothpm(mask,1) + out1=logical_and(greater(smooth1,0),less(mask,50)) + # + # get list of all x,y coords that are on the edge + # + xl=[] + yl=[] + for iy in range(Topo.shape[0]): + for ix in range(Topo.shape[1]): + if out1[iy,ix]>0.5: + xl.append(ix) + yl.append(iy) + # + # Thin the points (if needed) + # + roughMax=250 + if len(xl)>roughMax: + thinamt=float(len(xl))/float(roughMax) + (xpts,ypts)=self.thinpts(xl,yl,thinamt) + else: + xpts=xl + ypts=yl + # + # We can simply add these points to the list of points. + # Normally, we would have to be careful to make sure that + # a duplicate point did not exist. But here, all the normal + # control points are inside the editArea, and all these + # added "bogus" points are outside the editArea, so they are + # guaranteed to not be a duplicate of the others + # + for i in range(len(xpts)): + elev=Topo[ypts[i],xpts[i]] + self.hloclist.append(elev) + self.xloclist.append(xpts[i]) + self.yloclist.append(ypts[i]) + self.zlist.append(0.0) + self.ylist.append(0.0) + # + # + # + return + #------------------------------------------------------------------- + # Given a list of x,y coordinates of points - thin the list + # so that no points are closer than "num" gridpoints to another + # + def thinpts(self,xl,yl,num): + xc=copy.copy(xl) + yc=copy.copy(yl) + xpts=[] + ypts=[] + xp=xc[0] + yp=yc[0] + xpts.append(xp) + ypts.append(yp) + while len(xc)>0: + dlist=self.within(xp,yp,xc,yc,num) + dlist.sort() + dlist.reverse() + for i in range(len(dlist)): + del xc[dlist[i]] + del yc[dlist[i]] + del dlist + if len(xc)>0: + (xnear,ynear)=self.nearest(xp,yp,xc,yc) + xp=xnear + yp=ynear + xpts.append(xp) + ypts.append(yp) + return(xpts,ypts) + #------------------------------------------------------------------- + # Return x,y of point nearest xp,yp + # + def nearest(self,xp,yp,xc,yc): + dist=9.0e10 + for i in range(len(xc)): + dif2=((xc[i]-xp)**2)+((yc[i]-yp)**2) + if dif2=len(self.guiInfo['Labels'])): + continue + fr=tkinter.Frame(fc,relief=tkinter.GROOVE,borderwidth=1) + lab=tkinter.Label(fr,text=self.guiInfo['Labels'][num]) + # Make Tkinter variables for use as widget variables + # textvar to show delta from original value + tkStrVar=tkinter.StringVar() + tkStrVar.set('(0)') + self.ChangeVals.append(tkStrVar) + # The slider values + tkDblVar=tkinter.DoubleVar() + tkDblVar.set(self.guiInfo['InitValues'][num]) + self.Values.append(tkDblVar) + + if (self.guiInfo['vectedit']==2): + lab.grid(row=0,column=0,sticky=tkinter.EW) + self.ScaleIDs.append(tkinter.Entry(fr,width=7)) + self.ScaleIDs[num].delete(0,tkinter.END) + self.ScaleIDs[num].insert(tkinter.END,self.guiInfo['InitValues'][num]) + self.ScaleIDs[num].grid(row=1,column=0) + else: + lab.grid(row=0,column=0,columnspan=2,sticky=tkinter.EW) + self.ScaleIDs.append( + tkinter.Scale(fr, orient=tkinter.HORIZONTAL, + from_=self.guiInfo['minvalue'], + to=self.guiInfo['maxvalue'], + resolution=self.guiInfo['resolution'], + variable=self.Values[num], + command=self.setChanges, + length=175 + )) + val=self.guiInfo['InitValues'][num] + self.ScaleIDs[num].set(val) + self.ScaleIDs[num].grid(row=1,column=0,sticky=tkinter.EW) + chg=tkinter.Label(fr,textvariable=self.ChangeVals[num]) + chg.grid(row=1,column=1,sticky=tkinter.S) + fr.columnconfigure(1,minsize=60) + fr.pack(side=tkinter.TOP,fill=tkinter.X) + num=num+1 + fc.pack(side=tkinter.LEFT,fill=tkinter.Y,expand=0) + # AppDialog wants a widget returned from body to set the focus to. + return frame + + def ok(self, event=None): + """Process the Ok button. The ok method in AppDialog destroys the window + before running the apply method. Need to run apply first to get the + data from the slider widgets.""" + if self.validate(): + self.apply() + self.result="OK" + self.destroy() + + def apply(self): + """Retrieve the values from the scale widgets into attribute Values.""" + self.Values=[] + for num in range(len(self.guiInfo['Labels'])): + self.Values.append(self.ScaleIDs[num].get()) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpISC.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpISC.py index 84944b1515..4da0d7bb85 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpISC.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/SerpISC.py @@ -1,479 +1,479 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# SVN: $Revision$ - $Date$ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# serpISC - version 1.7 -# -# Changes an existing grid to blend better into neighboring ISC grids. -# Can be used as an initial or final step in coordination. Only your grids -# are affected: nothing happens to the ISC grids. The ISC button must have -# been clicked on at least once before using this tool. -# -# Every point on the outer perimeter of CWA (i.e, belonging to selected ISCs) -# takes part in a serp adjustment of the existing grid. If any ISC grids are -# missing or not selected on a CWA boundary, your own grid is used there instead. -# -# You can use this tool on one ISC at a time to see how each one would influence -# your grid. To fit all ISC boundaries at once you must have all of them clicked -# on. Running the tool sequentially on each ISC will retain previous results if -# you keep the older ones turned on, but different sequences will yield slightly -# different results. -# -# Make sure your grid does not have an artificial boundary near the CWA border. -# Otherwise, it might already match your ISC neighbor there, so the tool won't -# adjust anything and your artificial boundary will remain. -# -# You can include or exclude as many sample points within your CWA as you like, but -# sample points close to an ISC border can create unrealistic gradients. -# -# You can match a border only partway if you want. Suppose you want to meet your -# ISC neighbor half way. Then set the "percent of full match" to 50. After sending -# your ISC grid, your neighbor will want to match FULL way (not half) to meet the -# newly received grid. You can also use "percent of full match" to nudge your -# grid to your neighbors' grids. -# -# If your grid's duration spans several shorter-duration ISC grids, the ISC -# grids will be time-averaged first (except for PoP which always uses the -# maximum value) and the fit will be inexact. Or, if the ISC grids themselves -# don't match at a CWA boundary (something you can't do in your own grid), the -# the tool will converge intermediate contours to the point of the mismatch, -# and the fit will look artificial. -# -# For winds serp runs twice, once for u and once for v. -# -# This tool cannot be used with Wx grids. -# -# Authors: Les Colin - WFO Boise, ID, and Tim Barker - SOO Boise, ID -# -# 2003/06/21 - Revised "remoteness" calculation (to counteract observation- -# clustering). New module is called getGoodRemoteness. -# numpy-Python code: Barker. Algorithm: Colin. -# 2003/06/22 - Analyzes winds in u and v components, rather than by speed -# and direction. -# 2003/06/23 - Finishes tool by copying ISC data outside CWA. -# 2003/10/29 - Runs serp without considering sample points, then runs it -# again only on the samples. ISC-copy feature has been removed. -# 2004/05/30 - Uses improved serp analysis (see Barker). Can include or exclude -# various ISC neighbors. Can include or exclude currently displayed -# samples within your CWA. Samples in the ISC areas are ignored. -# 2004/07/09 - Modified to ignore duplicate sample points (previously, they -# would hang the tool). Also modified tool to allow partial match -# so that CWA grid adjusts only partway toward ISC grid. -# 2004/09/04 - Modified to work on an edit area, perhaps only half way across the -# home CWA. The effect is a taper from a full (or partial) adjustment -# at designated ISC borders to zero change inside the home CWA where -# the edit area stops. -# 2004/09/21 - Now works even if preceded by ISC_Copy (by moving the home CWA-border -# inward one pixel and comparing to nearest ISC neighbor values). -# Tool completes by running an equivalent ISC_Copy on the selected ISC -# borders. Tool now also contains a thinning feature to speed up -# execution. e.g., thinning by 2 runs the tool on alternate border -# points, thinning by 3 runs the tool on every third border point, etc. -# 2004/09/25 - Corrected bug in preceding version in which sample points could possibly -# coincide with the revised home CWA-border points and hang the tool. -# 2004/11/10 - Final ISC_Copy feature made optional. -# 2004/11/17 - Corrected return statement at end of tool, and repaired code when -# NOT adjusting for elevation. -# 2008/07/31 - added int() for arguments to createTimeRange for OB8.3. /TB -# 2012/07/13 - Version 1.7. AWIPS2 Port. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "variableElement" -ScreenList=["SCALAR","VECTOR"] - -# -#==================================================================== -# Part to modify for local configuration -defaultCWA="STO" -VariableList=[ - ("Include these WFOs:",["MTR","EKA","HNX","REV","MFR"],"check",["MTR","EKA","HNX","REV","MFR"]), - ("Intentional mismatch (CWA minus WFO):","0","alphaNumeric"), - ("Currently displayed CWA sample points:","Use","radio",["Use","Don't use"]), - ("Adjust for terrain elevation?","Yes","radio",["Yes","No"]), - ("Elevation Factor",36,"numeric"), - ("Tool thinning-factor:",1,"scale",[1,10],1), - ("Percent of full match",100,"scale",[0,100],1), - ("Copy ISC data in afterward?","No","radio",["Yes","No"]), - ] - -from numpy import * -import ObjAnal -import SmartScript -import time -from math import sin,cos,acos,pi - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss=dbss - SmartScript.SmartScript.__init__(self, dbss) - def preProcessTool(self,varDict): - self.OA = ObjAnal.ObjAnal(self._dbss) - - def execute(self, variableElement, variableElement_GridInfo, editArea, varDict, Topo, WEname, GridTimeRange): - - wxType = variableElement_GridInfo.getGridType().ordinal() - defCWA=self.getEditArea(defaultCWA) - defcwa=self.encodeEditArea(defCWA) - nondefcwa=1-defcwa # i.e., toggle - nondefCWA=self.decodeEditArea(nondefcwa) - defea=self.taperGrid(nondefCWA,2)*2 - - # The above line defines the default CWA area as defea==0, the outer perimeter of the default CWA - # as defea==1, and further outside as defea==2. - - arbea=self.encodeEditArea(editArea) - nonarbea=1-arbea - nonarbEA=self.decodeEditArea(nonarbea) - arbea=self.taperGrid(nonarbEA,2)*2 - - cwa=self.empty(bool) - ISC=varDict["Include these WFOs:"] - samps=varDict["Currently displayed CWA sample points:"] - thin=varDict["Tool thinning-factor:"] - partial=varDict["Percent of full match"]*.01 - - for WFO in ISC: - CWA=self.getEditArea(WFO) - cwa |= self.encodeEditArea(CWA) - - alltrs=self._getAllHourlyTimeRanges(GridTimeRange) - if ((WEname=="MaxT")or(WEname=="PoP")): - sum=self.newGrid(-150.0) - elif (WEname=="MinT"): - sum=self.newGrid(150.0) - else: - if (wxType==2): - sum=[self.empty(),self.empty()] - else: - sum=self.empty() - cnt=self.empty() - - for tr in alltrs: - isc=self.getComposite(WEname,tr,0) - if isc is None: - - continue - # - # Add to sums, or min/max - # - if wxType==1: # SCALAR - bits,iscgrid=isc - if ((WEname=="MaxT")or(WEname=="PoP")): - sum=where(bits,maximum(iscgrid,sum),sum) - cnt[bits] = 1 - elif (WEname=="MinT"): - sum=where(bits,minimum(iscgrid,sum),sum) - cnt[bits] = 1 - else: - sum=where(bits,sum+iscgrid,sum) - cnt[bits] += 1 - if wxType==2: # VECTOR - bits,mag,dir=isc - (u,v)=self.MagDirToUV(mag,dir) - sum[0]=where(bits,sum[0]+u,sum[0]) - sum[1]=where(bits,sum[1]+v,sum[1]) - cnt[bits] += 1 - # - # now calculate average/max/min, etc. - # (count is always 1 for max/min) - # - if ((wxType==1)or(wxType==2)): - if (wxType==2): - (mag,dir)=variableElement - (u,v)=self.MagDirToUV(mag,dir) - sum[0]=where(equal(cnt,0),u,sum[0]) - sum[1]=where(equal(cnt,0),v,sum[1]) - else: - sum=where(equal(cnt,0),variableElement,sum) - cnt[equal(cnt,0)] = 1 - new=sum/cnt - if (wxType==2): - (mag,dir)=self.UVToMagDir(new[0],new[1]) - newvec=(mag,dir) - - self.elevadjust=0 - self.elevfactor=0. - if varDict["Adjust for terrain elevation?"]=="Yes": - self.elevadjust=1 - self.elevfactor=varDict["Elevation Factor"] - if self.elevfactor<1: - self.elevfactor=0. - - self.xloclist=[] - self.yloclist=[] - self.hloclist=[] - self.zlist=[] - self.ulist=[] - self.vlist=[] - - for x in range(1,Topo.shape[1]-1): - for y in range(1,Topo.shape[0]-1): - if (x+y)%thin!=0: - continue - if (arbea[y,x]<2 and defea[y,x]==0): - if (cwa[y,x+1]) or (cwa[y,x-1]) or (cwa[y+1,x]) or (cwa[y-1,x]): - if self.elevadjust==1: - self.hloclist.append(Topo[y,x]) - else: - self.hloclist.append(0.) - self.xloclist.append(x) - self.yloclist.append(y) - if wxType==1: - chgval=0. - n=0 - if cwa[y,x+1]==1: - if self.elevadjust==0: - chgval=chgval+(new[y,x+1]-variableElement[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y,x+1]) - if elevdif<5000.: - # ISC-CWA neighbors more than 5000 ft apart in elevation are too - # dissimilar to compare. - chgval=chgval+(new[y,x+1]-variableElement[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y,x-1]==1: - if self.elevadjust==0: - chgval=chgval+(new[y,x-1]-variableElement[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y,x-1]) - if elevdif<5000.: - chgval=chgval+(new[y,x-1]-variableElement[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y+1,x]==1: - if self.elevadjust==0: - chgval=chgval+(new[y+1,x]-variableElement[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y+1,x]) - if elevdif<5000.: - chgval=chgval+(new[y+1,x]-variableElement[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y-1,x]==1: - if self.elevadjust==0: - chgval=chgval+(new[y-1,x]-variableElement[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y-1,x]) - if elevdif<5000.: - chgval=chgval+(new[y-1,x]-variableElement[y,x])*(1.0-elevdif/5000.) - n=n+1 - self.zlist.append((chgval/n)*partial) - - elif wxType==2: - (magcwa,dircwa)=variableElement - (ucwa,vcwa)=self.MagDirToUV(magcwa,dircwa) - (uisc,visc)=self.MagDirToUV(mag,dir) - chgu=0. - chgv=0. - n=0 - if cwa[y,x+1]==1: - if self.elevadjust==0: - chgu=chgu+(uisc[y,x+1]-ucwa[y,x]) - chgv=chgv+(visc[y,x+1]-vcwa[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y,x+1]) - if elevdif<5000.: - chgu=chgu+(uisc[y,x+1]-ucwa[y,x])*(1.0-elevdif/5000.) - chgv=chgv+(visc[y,x+1]-vcwa[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y,x-1]==1: - if self.elevadjust==0: - chgu=chgu+(uisc[y,x-1]-ucwa[y,x]) - chgv=chgv+(visc[y,x-1]-vcwa[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y,x-1]) - if elevdif<5000.: - chgu=chgu+(uisc[y,x-1]-ucwa[y,x])*(1.0-elevdif/5000.) - chgv=chgv+(visc[y,x-1]-vcwa[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y+1,x]==1: - if self.elevadjust==0: - chgu=chgu+(uisc[y+1,x]-ucwa[y,x]) - chgv=chgv+(visc[y+1,x]-vcwa[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y+1,x]) - if elevdif<5000.: - chgu=chgu+(uisc[y+1,x]-ucwa[y,x])*(1.0-elevdif/5000.) - chgv=chgv+(visc[y+1,x]-vcwa[y,x])*(1.0-elevdif/5000.) - n=n+1 - if cwa[y-1,x]==1: - if self.elevadjust==0: - chgu=chgu+(uisc[y-1,x]-ucwa[y,x]) - chgv=chgv+(visc[y-1,x]-vcwa[y,x]) - elif self.elevadjust==1: - elevdif=abs(Topo[y,x]-Topo[y-1,x]) - if elevdif<5000.: - chgu=chgu+(uisc[y-1,x]-ucwa[y,x])*(1.0-elevdif/5000.) - chgv=chgv+(visc[y-1,x]-vcwa[y,x])*(1.0-elevdif/5000.) - n=n+1 - self.ulist.append((chgu/n)*partial) - self.vlist.append((chgv/n)*partial) - if arbea[y,x]==1 and defea[y,x]==0: - self.pointok=0 - for nn in range(len(self.xloclist)): - if (y==self.yloclist[nn]) and (x==self.xloclist[nn]): - self.pointok=1 - # In the above line an edit area IS on the screen and here we're looking for boundary points - # inside the home CWA that are more than one pixel from the border. We want to hold these - # points steady (i.e., zero change). - if self.pointok==1: # we already have this point, don't use it twice. - continue - self.xloclist.append(x) - self.yloclist.append(y) - if self.elevadjust==1: - self.hloclist.append(Topo[y,x]) - else: - self.hloclist.append(0.) - if wxType==1: - self.zlist.append(0.) - if wxType==2: - self.ulist.append(0.) - self.vlist.append(0.) - - if samps=="Use": - self.samplePoints = self.getSamplePoints(None) - for sample in self.samplePoints: - (x,y)=sample - self.sampleok=0 - for count in range(len(self.xloclist)): - if ((x==self.xloclist[count]) and (y==self.yloclist[count])): - self.sampleok=1 - # self.sampleok becomes 1 for a duplicate entry, so bypass the duplicate. - if self.sampleok==1: - continue - if x<0 or x>Topo.shape[1]-1: - continue - if y<0 or y>Topo.shape[0]-1: - continue - if defea[y,x]!=0: - continue - - if self.elevadjust==1: - self.hloclist.append(Topo[y,x]) - else: - self.hloclist.append(0.) - self.xloclist.append(x) - self.yloclist.append(y) - if wxType==1: - self.zlist.append(0.) - if wxType==2: - self.ulist.append(0.) - self.vlist.append(0.) - # - # Don't proceed if no points - # - if len(self.xloclist)==0: - self.statusBarMsg("No data available to serp to...","R") - return variableElement - else: - print " the number of points being used:",len(self.xloclist) - # - # - # - if wxType==1: # scalar - zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) - # zval is the new scalar-change grid. - if varDict["Copy ISC data in afterward?"]=="Yes": - znew=where(logical_or(equal(defea,0),equal(cwa,0)),variableElement+zval,new) - else: - znew=variableElement+zval - - if wxType==2: # vector - zval=self.OA.Serp(self.ulist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) - # zval is the new u-change grid. - if varDict["Copy ISC data in afterward?"]=="Yes": - newu=where(logical_or(equal(defea,0),equal(cwa,0)),ucwa+zval,new[0]) - else: - newu=ucwa+zval - zval=self.OA.Serp(self.vlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) - # this zval is the new v-change grid. - if varDict["Copy ISC data in afterward?"]=="Yes": - newv=where(logical_or(equal(defea,0),equal(cwa,0)),vcwa+zval,new[1]) - else: - newv=vcwa+zval - (newspd,newdir)=self.UVToMagDir(newu,newv) - # newspd=where(equal(defea+cwa,0),newspd,mag) - # newdir=where(equal(defea+cwa,0),newdir,dir) - - znew=(newspd,newdir) - - absmax=variableElement_GridInfo.getMaxValue() - absmin=variableElement_GridInfo.getMinValue() - - if wxType==1: - return clip(znew,absmin,absmax) - else: - return znew - - #=================================================================== - # _getAllHourlyTimeRanges - gets a list of all 1-hour time ranges - # within the specified time range - # - def _getAllHourlyTimeRanges(self,tr): - # - # get integer time of UTC midnight today - # - secsinhour=60*60 - lt=time.gmtime() - mid=time.mktime((lt[0],lt[1],lt[2],0,0,0,lt[6],lt[7],lt[8])) - # - # get integer time of input timerange start - # - start=tr.startTime() - year=start.year - month=start.month - day=start.day - hour=start.hour - trs=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8])) - # - # get integer time of input timerange end - # - end=tr.endTime() - year=end.year - month=end.month - day=end.day - hour=end.hour - tre=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8])) - # - # The difference between start/end determines number of hours - # - numhours=int((tre-trs)/secsinhour) - # - # Difference between mid/start determines starting offset - # - offset=int((trs-mid)/secsinhour) - # - # create each hourly time range from offset - # - alltrs=[] - for hour in range(0,numhours): - newtr=self.createTimeRange(int(offset+hour),int(offset+hour+1),"Zulu") - alltrs.append(newtr) - - return alltrs +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# SVN: $Revision$ - $Date$ +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# serpISC - version 1.7 +# +# Changes an existing grid to blend better into neighboring ISC grids. +# Can be used as an initial or final step in coordination. Only your grids +# are affected: nothing happens to the ISC grids. The ISC button must have +# been clicked on at least once before using this tool. +# +# Every point on the outer perimeter of CWA (i.e, belonging to selected ISCs) +# takes part in a serp adjustment of the existing grid. If any ISC grids are +# missing or not selected on a CWA boundary, your own grid is used there instead. +# +# You can use this tool on one ISC at a time to see how each one would influence +# your grid. To fit all ISC boundaries at once you must have all of them clicked +# on. Running the tool sequentially on each ISC will retain previous results if +# you keep the older ones turned on, but different sequences will yield slightly +# different results. +# +# Make sure your grid does not have an artificial boundary near the CWA border. +# Otherwise, it might already match your ISC neighbor there, so the tool won't +# adjust anything and your artificial boundary will remain. +# +# You can include or exclude as many sample points within your CWA as you like, but +# sample points close to an ISC border can create unrealistic gradients. +# +# You can match a border only partway if you want. Suppose you want to meet your +# ISC neighbor half way. Then set the "percent of full match" to 50. After sending +# your ISC grid, your neighbor will want to match FULL way (not half) to meet the +# newly received grid. You can also use "percent of full match" to nudge your +# grid to your neighbors' grids. +# +# If your grid's duration spans several shorter-duration ISC grids, the ISC +# grids will be time-averaged first (except for PoP which always uses the +# maximum value) and the fit will be inexact. Or, if the ISC grids themselves +# don't match at a CWA boundary (something you can't do in your own grid), the +# the tool will converge intermediate contours to the point of the mismatch, +# and the fit will look artificial. +# +# For winds serp runs twice, once for u and once for v. +# +# This tool cannot be used with Wx grids. +# +# Authors: Les Colin - WFO Boise, ID, and Tim Barker - SOO Boise, ID +# +# 2003/06/21 - Revised "remoteness" calculation (to counteract observation- +# clustering). New module is called getGoodRemoteness. +# numpy-Python code: Barker. Algorithm: Colin. +# 2003/06/22 - Analyzes winds in u and v components, rather than by speed +# and direction. +# 2003/06/23 - Finishes tool by copying ISC data outside CWA. +# 2003/10/29 - Runs serp without considering sample points, then runs it +# again only on the samples. ISC-copy feature has been removed. +# 2004/05/30 - Uses improved serp analysis (see Barker). Can include or exclude +# various ISC neighbors. Can include or exclude currently displayed +# samples within your CWA. Samples in the ISC areas are ignored. +# 2004/07/09 - Modified to ignore duplicate sample points (previously, they +# would hang the tool). Also modified tool to allow partial match +# so that CWA grid adjusts only partway toward ISC grid. +# 2004/09/04 - Modified to work on an edit area, perhaps only half way across the +# home CWA. The effect is a taper from a full (or partial) adjustment +# at designated ISC borders to zero change inside the home CWA where +# the edit area stops. +# 2004/09/21 - Now works even if preceded by ISC_Copy (by moving the home CWA-border +# inward one pixel and comparing to nearest ISC neighbor values). +# Tool completes by running an equivalent ISC_Copy on the selected ISC +# borders. Tool now also contains a thinning feature to speed up +# execution. e.g., thinning by 2 runs the tool on alternate border +# points, thinning by 3 runs the tool on every third border point, etc. +# 2004/09/25 - Corrected bug in preceding version in which sample points could possibly +# coincide with the revised home CWA-border points and hang the tool. +# 2004/11/10 - Final ISC_Copy feature made optional. +# 2004/11/17 - Corrected return statement at end of tool, and repaired code when +# NOT adjusting for elevation. +# 2008/07/31 - added int() for arguments to createTimeRange for OB8.3. /TB +# 2012/07/13 - Version 1.7. AWIPS2 Port. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "variableElement" +ScreenList=["SCALAR","VECTOR"] + +# +#==================================================================== +# Part to modify for local configuration +defaultCWA="STO" +VariableList=[ + ("Include these WFOs:",["MTR","EKA","HNX","REV","MFR"],"check",["MTR","EKA","HNX","REV","MFR"]), + ("Intentional mismatch (CWA minus WFO):","0","alphaNumeric"), + ("Currently displayed CWA sample points:","Use","radio",["Use","Don't use"]), + ("Adjust for terrain elevation?","Yes","radio",["Yes","No"]), + ("Elevation Factor",36,"numeric"), + ("Tool thinning-factor:",1,"scale",[1,10],1), + ("Percent of full match",100,"scale",[0,100],1), + ("Copy ISC data in afterward?","No","radio",["Yes","No"]), + ] + +from numpy import * +import ObjAnal +import SmartScript +import time +from math import sin,cos,acos,pi + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss=dbss + SmartScript.SmartScript.__init__(self, dbss) + def preProcessTool(self,varDict): + self.OA = ObjAnal.ObjAnal(self._dbss) + + def execute(self, variableElement, variableElement_GridInfo, editArea, varDict, Topo, WEname, GridTimeRange): + + wxType = variableElement_GridInfo.getGridType().ordinal() + defCWA=self.getEditArea(defaultCWA) + defcwa=self.encodeEditArea(defCWA) + nondefcwa=1-defcwa # i.e., toggle + nondefCWA=self.decodeEditArea(nondefcwa) + defea=self.taperGrid(nondefCWA,2)*2 + + # The above line defines the default CWA area as defea==0, the outer perimeter of the default CWA + # as defea==1, and further outside as defea==2. + + arbea=self.encodeEditArea(editArea) + nonarbea=1-arbea + nonarbEA=self.decodeEditArea(nonarbea) + arbea=self.taperGrid(nonarbEA,2)*2 + + cwa=self.empty(bool) + ISC=varDict["Include these WFOs:"] + samps=varDict["Currently displayed CWA sample points:"] + thin=varDict["Tool thinning-factor:"] + partial=varDict["Percent of full match"]*.01 + + for WFO in ISC: + CWA=self.getEditArea(WFO) + cwa |= self.encodeEditArea(CWA) + + alltrs=self._getAllHourlyTimeRanges(GridTimeRange) + if ((WEname=="MaxT")or(WEname=="PoP")): + sum=self.newGrid(-150.0) + elif (WEname=="MinT"): + sum=self.newGrid(150.0) + else: + if (wxType==2): + sum=[self.empty(),self.empty()] + else: + sum=self.empty() + cnt=self.empty() + + for tr in alltrs: + isc=self.getComposite(WEname,tr,0) + if isc is None: + + continue + # + # Add to sums, or min/max + # + if wxType==1: # SCALAR + bits,iscgrid=isc + if ((WEname=="MaxT")or(WEname=="PoP")): + sum=where(bits,maximum(iscgrid,sum),sum) + cnt[bits] = 1 + elif (WEname=="MinT"): + sum=where(bits,minimum(iscgrid,sum),sum) + cnt[bits] = 1 + else: + sum=where(bits,sum+iscgrid,sum) + cnt[bits] += 1 + if wxType==2: # VECTOR + bits,mag,dir=isc + (u,v)=self.MagDirToUV(mag,dir) + sum[0]=where(bits,sum[0]+u,sum[0]) + sum[1]=where(bits,sum[1]+v,sum[1]) + cnt[bits] += 1 + # + # now calculate average/max/min, etc. + # (count is always 1 for max/min) + # + if ((wxType==1)or(wxType==2)): + if (wxType==2): + (mag,dir)=variableElement + (u,v)=self.MagDirToUV(mag,dir) + sum[0]=where(equal(cnt,0),u,sum[0]) + sum[1]=where(equal(cnt,0),v,sum[1]) + else: + sum=where(equal(cnt,0),variableElement,sum) + cnt[equal(cnt,0)] = 1 + new=sum/cnt + if (wxType==2): + (mag,dir)=self.UVToMagDir(new[0],new[1]) + newvec=(mag,dir) + + self.elevadjust=0 + self.elevfactor=0. + if varDict["Adjust for terrain elevation?"]=="Yes": + self.elevadjust=1 + self.elevfactor=varDict["Elevation Factor"] + if self.elevfactor<1: + self.elevfactor=0. + + self.xloclist=[] + self.yloclist=[] + self.hloclist=[] + self.zlist=[] + self.ulist=[] + self.vlist=[] + + for x in range(1,Topo.shape[1]-1): + for y in range(1,Topo.shape[0]-1): + if (x+y)%thin!=0: + continue + if (arbea[y,x]<2 and defea[y,x]==0): + if (cwa[y,x+1]) or (cwa[y,x-1]) or (cwa[y+1,x]) or (cwa[y-1,x]): + if self.elevadjust==1: + self.hloclist.append(Topo[y,x]) + else: + self.hloclist.append(0.) + self.xloclist.append(x) + self.yloclist.append(y) + if wxType==1: + chgval=0. + n=0 + if cwa[y,x+1]==1: + if self.elevadjust==0: + chgval=chgval+(new[y,x+1]-variableElement[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y,x+1]) + if elevdif<5000.: + # ISC-CWA neighbors more than 5000 ft apart in elevation are too + # dissimilar to compare. + chgval=chgval+(new[y,x+1]-variableElement[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y,x-1]==1: + if self.elevadjust==0: + chgval=chgval+(new[y,x-1]-variableElement[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y,x-1]) + if elevdif<5000.: + chgval=chgval+(new[y,x-1]-variableElement[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y+1,x]==1: + if self.elevadjust==0: + chgval=chgval+(new[y+1,x]-variableElement[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y+1,x]) + if elevdif<5000.: + chgval=chgval+(new[y+1,x]-variableElement[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y-1,x]==1: + if self.elevadjust==0: + chgval=chgval+(new[y-1,x]-variableElement[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y-1,x]) + if elevdif<5000.: + chgval=chgval+(new[y-1,x]-variableElement[y,x])*(1.0-elevdif/5000.) + n=n+1 + self.zlist.append((chgval/n)*partial) + + elif wxType==2: + (magcwa,dircwa)=variableElement + (ucwa,vcwa)=self.MagDirToUV(magcwa,dircwa) + (uisc,visc)=self.MagDirToUV(mag,dir) + chgu=0. + chgv=0. + n=0 + if cwa[y,x+1]==1: + if self.elevadjust==0: + chgu=chgu+(uisc[y,x+1]-ucwa[y,x]) + chgv=chgv+(visc[y,x+1]-vcwa[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y,x+1]) + if elevdif<5000.: + chgu=chgu+(uisc[y,x+1]-ucwa[y,x])*(1.0-elevdif/5000.) + chgv=chgv+(visc[y,x+1]-vcwa[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y,x-1]==1: + if self.elevadjust==0: + chgu=chgu+(uisc[y,x-1]-ucwa[y,x]) + chgv=chgv+(visc[y,x-1]-vcwa[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y,x-1]) + if elevdif<5000.: + chgu=chgu+(uisc[y,x-1]-ucwa[y,x])*(1.0-elevdif/5000.) + chgv=chgv+(visc[y,x-1]-vcwa[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y+1,x]==1: + if self.elevadjust==0: + chgu=chgu+(uisc[y+1,x]-ucwa[y,x]) + chgv=chgv+(visc[y+1,x]-vcwa[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y+1,x]) + if elevdif<5000.: + chgu=chgu+(uisc[y+1,x]-ucwa[y,x])*(1.0-elevdif/5000.) + chgv=chgv+(visc[y+1,x]-vcwa[y,x])*(1.0-elevdif/5000.) + n=n+1 + if cwa[y-1,x]==1: + if self.elevadjust==0: + chgu=chgu+(uisc[y-1,x]-ucwa[y,x]) + chgv=chgv+(visc[y-1,x]-vcwa[y,x]) + elif self.elevadjust==1: + elevdif=abs(Topo[y,x]-Topo[y-1,x]) + if elevdif<5000.: + chgu=chgu+(uisc[y-1,x]-ucwa[y,x])*(1.0-elevdif/5000.) + chgv=chgv+(visc[y-1,x]-vcwa[y,x])*(1.0-elevdif/5000.) + n=n+1 + self.ulist.append((chgu/n)*partial) + self.vlist.append((chgv/n)*partial) + if arbea[y,x]==1 and defea[y,x]==0: + self.pointok=0 + for nn in range(len(self.xloclist)): + if (y==self.yloclist[nn]) and (x==self.xloclist[nn]): + self.pointok=1 + # In the above line an edit area IS on the screen and here we're looking for boundary points + # inside the home CWA that are more than one pixel from the border. We want to hold these + # points steady (i.e., zero change). + if self.pointok==1: # we already have this point, don't use it twice. + continue + self.xloclist.append(x) + self.yloclist.append(y) + if self.elevadjust==1: + self.hloclist.append(Topo[y,x]) + else: + self.hloclist.append(0.) + if wxType==1: + self.zlist.append(0.) + if wxType==2: + self.ulist.append(0.) + self.vlist.append(0.) + + if samps=="Use": + self.samplePoints = self.getSamplePoints(None) + for sample in self.samplePoints: + (x,y)=sample + self.sampleok=0 + for count in range(len(self.xloclist)): + if ((x==self.xloclist[count]) and (y==self.yloclist[count])): + self.sampleok=1 + # self.sampleok becomes 1 for a duplicate entry, so bypass the duplicate. + if self.sampleok==1: + continue + if x<0 or x>Topo.shape[1]-1: + continue + if y<0 or y>Topo.shape[0]-1: + continue + if defea[y,x]!=0: + continue + + if self.elevadjust==1: + self.hloclist.append(Topo[y,x]) + else: + self.hloclist.append(0.) + self.xloclist.append(x) + self.yloclist.append(y) + if wxType==1: + self.zlist.append(0.) + if wxType==2: + self.ulist.append(0.) + self.vlist.append(0.) + # + # Don't proceed if no points + # + if len(self.xloclist)==0: + self.statusBarMsg("No data available to serp to...","R") + return variableElement + else: + print(" the number of points being used:",len(self.xloclist)) + # + # + # + if wxType==1: # scalar + zval=self.OA.Serp(self.zlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) + # zval is the new scalar-change grid. + if varDict["Copy ISC data in afterward?"]=="Yes": + znew=where(logical_or(equal(defea,0),equal(cwa,0)),variableElement+zval,new) + else: + znew=variableElement+zval + + if wxType==2: # vector + zval=self.OA.Serp(self.ulist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) + # zval is the new u-change grid. + if varDict["Copy ISC data in afterward?"]=="Yes": + newu=where(logical_or(equal(defea,0),equal(cwa,0)),ucwa+zval,new[0]) + else: + newu=ucwa+zval + zval=self.OA.Serp(self.vlist,self.xloclist,self.yloclist,self.hloclist,self.elevfactor,Topo) + # this zval is the new v-change grid. + if varDict["Copy ISC data in afterward?"]=="Yes": + newv=where(logical_or(equal(defea,0),equal(cwa,0)),vcwa+zval,new[1]) + else: + newv=vcwa+zval + (newspd,newdir)=self.UVToMagDir(newu,newv) + # newspd=where(equal(defea+cwa,0),newspd,mag) + # newdir=where(equal(defea+cwa,0),newdir,dir) + + znew=(newspd,newdir) + + absmax=variableElement_GridInfo.getMaxValue() + absmin=variableElement_GridInfo.getMinValue() + + if wxType==1: + return clip(znew,absmin,absmax) + else: + return znew + + #=================================================================== + # _getAllHourlyTimeRanges - gets a list of all 1-hour time ranges + # within the specified time range + # + def _getAllHourlyTimeRanges(self,tr): + # + # get integer time of UTC midnight today + # + secsinhour=60*60 + lt=time.gmtime() + mid=time.mktime((lt[0],lt[1],lt[2],0,0,0,lt[6],lt[7],lt[8])) + # + # get integer time of input timerange start + # + start=tr.startTime() + year=start.year + month=start.month + day=start.day + hour=start.hour + trs=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8])) + # + # get integer time of input timerange end + # + end=tr.endTime() + year=end.year + month=end.month + day=end.day + hour=end.hour + tre=time.mktime((year,month,day,hour,0,0,lt[6],lt[7],lt[8])) + # + # The difference between start/end determines number of hours + # + numhours=int((tre-trs)/secsinhour) + # + # Difference between mid/start determines starting offset + # + offset=int((trs-mid)/secsinhour) + # + # create each hourly time range from offset + # + alltrs=[] + for hour in range(0,numhours): + newtr=self.createTimeRange(int(offset+hour),int(offset+hour+1),"Zulu") + alltrs.append(newtr) + + return alltrs diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Show_ISC_Info.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Show_ISC_Info.py index 5241eeeb9f..4d0228eba7 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Show_ISC_Info.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/Show_ISC_Info.py @@ -1,180 +1,180 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Show_ISC_Info - Version 3.04 (Tim Barker - SOO Boise, ID) -# -# creates a Tk window where it shows information on the ISC discrepancy -# stats for each of the surrounding CWAs (the list of CWAs is controlled -# by the config information in the ISC_Utility_Local). For each CWA it -# gives the average discrepancy, the average threshold, the number of -# points considered (some border pairs ignored because of topography -# differences, ocean/land difference, or failure to meet conditional -# criteria like wind speeds above 12 kts, etc.), number of individual -# points that failed, and a colored highlight for whether that border -# passed or failed. -# -# Optionally can have it check multiple parameters when run on certain -# grids. By default, when you run it on RH grids, it checks T and Td -# grids too. -# -# 2006-01-23 - Barker - Version 3.04. Added thresholds for more parameters -# 2006-01-19 - Barker - Version 3.03. Fixed another problm in ISC_Utility -# for non-square GFE domains. -# 2006-01-17 - Barker - Version 3.02. Fixed problem in ISC_Utility for -# non-square GFE domains. -# 2006-01-13 - Barker - Version 3.01. Changed for new NDFD algorithm. -# Thresholds now vary at each gridpoint - overall average -# difference along border must be less than average threshold -# along that border (a much better algorithm!). All -# calculations done in ISC Utility routine. Text summary of -# borders is displayed in format similar to old tool - but -# different because of new agorithm. -# 2004-11-17 - Mathewson - baselined at FSL -# 2004-10-31 - Version 2.4. Remove by-length calculations. Fix error in -# sky threshold. Fix Status Bar messages for IFPS 16. Fix -# accumulative elements. -# 2004-10-12 - Version 2.3. Remove restriction that ISC grids must be -# displayed (not needed in IFPS 15 or 16). Cuts down on number -# of cached grids it stores and increases time between -# recomputes of cached grids -# 2004-09-30 - Version 2.2. Changes to ISC_Utility_Local for handling -# specified edit areas (which can include marine) rather than -# edit areas based only on CWA name. Thresholds changed -# extensively to add thresholds based on grid values. Code -# to eliminate border pairs with large elevation differences -# changed to more reasonable code, since NDFD fixed their code. -# Changed to show if the average discrepancy would violate -# the 'by length' weighted average threshold (as NDFD does). -# 2004-09-05 - Version 2.1. Changes to ISC_Utility_Local for -# handling areas where no neighbor exists. -# 2004-08-15 - Version 2.0 - Thresholds are not hard-coded and derived from -# average elevation difference - like in NDFD (but still -# WRONG (in my opinion). -# 2004-06-20 - version 1.1 - Various changes to vector checks to be somewhat -# closer to NDFD checks - though NDFD is in a state of flux -# 2004-06-08 - version 1.0 - Added multi-parameter tests, and the -# handling of vector parms -# 2004-02-11 - Cleaned up old tool used in BOI for quite some time. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# ---------------------------------------------------------------------------- -# -# C O N F I G U R A T I O N S E C T I O N -# -# See ISC_Utility. -# -# E N D O F C O N F I G U R A T I O N S E C T I O N -# -#---------------------------------------------------------------------------- -ToolType = "numeric" -WeatherElementEdited = "None" -ScreenList = ["SCALAR","VECTOR"] - -import numpy -from Tkinter import * - -import ISC_Utility_Local -import SmartScript -import time - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - self._dbss = dbss - SmartScript.SmartScript.__init__(self, dbss) - - def preProcessTool(self, WEname): - self._utility = ISC_Utility_Local.ISC_Utility_Local(self._dbss, None) - - def preProcessGrid(self, WEname, GridTimeRange): - # - # Run the check - and get the text listing - # - (numchecked,violate,warning)=self._utility._checkParmBorders(WEname,GridTimeRange,listing=1) - outtext=self._utility._getListing() - # - # Setup window to display results - # - self.chk=Tk() - self.chk.title("ISC Discrepancy Info") - self.frame=Frame(self.chk) - self.button=Button(self.frame,text="Close",fg="red", - command=self.chk.quit) - self.button.pack(side=BOTTOM) - self.frame.pack(side=BOTTOM,fill=X) - self.scrollbar=Scrollbar(self.chk) - self.scrollbar.pack(side=RIGHT,fill=Y) - self.text=Text(self.chk,width=100,height=25, - yscrollcommand=self.scrollbar.set) - self.text.tag_config("extreme",foreground="purple4") - self.text.tag_config("high",foreground="red") - self.text.tag_config("medium",foreground="DarkOrange") - self.text.tag_config("low",foreground="DarkGreen") - self.text.tag_config("none",foreground="black") - self.text.pack(fill=BOTH,expand=1) - # - # Display lines - coloring the last word if it is recognized. - # - try: - lines=outtext.split("\n") - for line in lines: - if line[-2:]=="OK": - newline=line[:-2] - self.text.insert(END,newline,"none") - self.text.insert(END,"OK\n","low") - elif line[-7:]=="IGNORED": - newline=line[:-7] - self.text.insert(END,newline,"none") - self.text.insert(END,"IGNORED\n","medium") - elif line[-6:]=="FAILED": - newline=line[:-6] - self.text.insert(END,newline,"none") - self.text.insert(END,"FAILED\n","high") - else: - self.text.insert(END,line+"\n","none") - except: - self.chk.destroy() - raise - else: - self.text.configure(state=DISABLED) - self.scrollbar.config(command=self.text.yview) - self.chk.mainloop() - try: - self.chk.destroy() - except: - pass - self.cancel() - - #====================================================================== - # - # dummy execute routine - # - def execute(self): - "Show ISC discrepancies for each neighbor" - return - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Show_ISC_Info - Version 3.04 (Tim Barker - SOO Boise, ID) +# +# creates a Tk window where it shows information on the ISC discrepancy +# stats for each of the surrounding CWAs (the list of CWAs is controlled +# by the config information in the ISC_Utility_Local). For each CWA it +# gives the average discrepancy, the average threshold, the number of +# points considered (some border pairs ignored because of topography +# differences, ocean/land difference, or failure to meet conditional +# criteria like wind speeds above 12 kts, etc.), number of individual +# points that failed, and a colored highlight for whether that border +# passed or failed. +# +# Optionally can have it check multiple parameters when run on certain +# grids. By default, when you run it on RH grids, it checks T and Td +# grids too. +# +# 2006-01-23 - Barker - Version 3.04. Added thresholds for more parameters +# 2006-01-19 - Barker - Version 3.03. Fixed another problm in ISC_Utility +# for non-square GFE domains. +# 2006-01-17 - Barker - Version 3.02. Fixed problem in ISC_Utility for +# non-square GFE domains. +# 2006-01-13 - Barker - Version 3.01. Changed for new NDFD algorithm. +# Thresholds now vary at each gridpoint - overall average +# difference along border must be less than average threshold +# along that border (a much better algorithm!). All +# calculations done in ISC Utility routine. Text summary of +# borders is displayed in format similar to old tool - but +# different because of new agorithm. +# 2004-11-17 - Mathewson - baselined at FSL +# 2004-10-31 - Version 2.4. Remove by-length calculations. Fix error in +# sky threshold. Fix Status Bar messages for IFPS 16. Fix +# accumulative elements. +# 2004-10-12 - Version 2.3. Remove restriction that ISC grids must be +# displayed (not needed in IFPS 15 or 16). Cuts down on number +# of cached grids it stores and increases time between +# recomputes of cached grids +# 2004-09-30 - Version 2.2. Changes to ISC_Utility_Local for handling +# specified edit areas (which can include marine) rather than +# edit areas based only on CWA name. Thresholds changed +# extensively to add thresholds based on grid values. Code +# to eliminate border pairs with large elevation differences +# changed to more reasonable code, since NDFD fixed their code. +# Changed to show if the average discrepancy would violate +# the 'by length' weighted average threshold (as NDFD does). +# 2004-09-05 - Version 2.1. Changes to ISC_Utility_Local for +# handling areas where no neighbor exists. +# 2004-08-15 - Version 2.0 - Thresholds are not hard-coded and derived from +# average elevation difference - like in NDFD (but still +# WRONG (in my opinion). +# 2004-06-20 - version 1.1 - Various changes to vector checks to be somewhat +# closer to NDFD checks - though NDFD is in a state of flux +# 2004-06-08 - version 1.0 - Added multi-parameter tests, and the +# handling of vector parms +# 2004-02-11 - Cleaned up old tool used in BOI for quite some time. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# ---------------------------------------------------------------------------- +# +# C O N F I G U R A T I O N S E C T I O N +# +# See ISC_Utility. +# +# E N D O F C O N F I G U R A T I O N S E C T I O N +# +#---------------------------------------------------------------------------- +ToolType = "numeric" +WeatherElementEdited = "None" +ScreenList = ["SCALAR","VECTOR"] + +import numpy +from tkinter import * + +import ISC_Utility_Local +import SmartScript +import time + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + self._dbss = dbss + SmartScript.SmartScript.__init__(self, dbss) + + def preProcessTool(self, WEname): + self._utility = ISC_Utility_Local.ISC_Utility_Local(self._dbss, None) + + def preProcessGrid(self, WEname, GridTimeRange): + # + # Run the check - and get the text listing + # + (numchecked,violate,warning)=self._utility._checkParmBorders(WEname,GridTimeRange,listing=1) + outtext=self._utility._getListing() + # + # Setup window to display results + # + self.chk=Tk() + self.chk.title("ISC Discrepancy Info") + self.frame=Frame(self.chk) + self.button=Button(self.frame,text="Close",fg="red", + command=self.chk.quit) + self.button.pack(side=BOTTOM) + self.frame.pack(side=BOTTOM,fill=X) + self.scrollbar=Scrollbar(self.chk) + self.scrollbar.pack(side=RIGHT,fill=Y) + self.text=Text(self.chk,width=100,height=25, + yscrollcommand=self.scrollbar.set) + self.text.tag_config("extreme",foreground="purple4") + self.text.tag_config("high",foreground="red") + self.text.tag_config("medium",foreground="DarkOrange") + self.text.tag_config("low",foreground="DarkGreen") + self.text.tag_config("none",foreground="black") + self.text.pack(fill=BOTH,expand=1) + # + # Display lines - coloring the last word if it is recognized. + # + try: + lines=outtext.split("\n") + for line in lines: + if line[-2:]=="OK": + newline=line[:-2] + self.text.insert(END,newline,"none") + self.text.insert(END,"OK\n","low") + elif line[-7:]=="IGNORED": + newline=line[:-7] + self.text.insert(END,newline,"none") + self.text.insert(END,"IGNORED\n","medium") + elif line[-6:]=="FAILED": + newline=line[:-6] + self.text.insert(END,newline,"none") + self.text.insert(END,"FAILED\n","high") + else: + self.text.insert(END,line+"\n","none") + except: + self.chk.destroy() + raise + else: + self.text.configure(state=DISABLED) + self.scrollbar.config(command=self.text.yview) + self.chk.mainloop() + try: + self.chk.destroy() + except: + pass + self.cancel() + + #====================================================================== + # + # dummy execute routine + # + def execute(self): + "Show ISC discrepancies for each neighbor" + return + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/TransWind_NoVar.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/TransWind_NoVar.py index fe88020232..2cc2bd25d6 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/TransWind_NoVar.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/TransWind_NoVar.py @@ -1,122 +1,122 @@ -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TransWind2_Tool numeric version of a Transport Wind smarttool. -# -# Author: C. Gibson SLC 10/01 Modified from a wind smarttool written by Tom L. -# -# Designed for the NAM but can be adapted for other models. Two major modes of -# operation 1. boundary layer (BL) winds included, or 2. only pressure "layers" -# used. Follow comments to switch between. Also, you can comment out certain BL -# levels as desired. -# -# ---------------------------------------------------------------------------- -# -# This tool modified February 2004/Matt Davis/ARX -# 10/2004 - Updated to allow the NAM12, NAM40, and GFS choices for this procedure. -# NCNWS Fire Bundle Package Release 2.1 -# -ToolType = "numeric" -WeatherElementEdited = "TransWind" -from numpy import * -import SmartScript -import Dialog, time - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, Topo, MixHgt, Wind): - - fwModel = self.getObject("FireModel", "ModelType") - - if fwModel == "NAM12": - modelSource = "_D2D_NAM12" - elif fwModel == "NAM40": - modelSource = "_D2D_NAM40" - else: - modelSource = "_D2D_GFS40" - - "Calculates average wind in the mixed layer." - self.setVectorEditMode('Both') - site = self.getSiteID() -## model = site + modelSource + monthrun - model = site + modelSource - print "Using " + model + " for Transport Wind Calculation" - print "Transport Wind time range is: \n" + `GridTimeRange` - - - self.__D2Dmodel = model - - - layers = ["MB975", "MB950", "MB925", "MB900", "MB875","MB850","MB825","MB800","MB775","MB750","MB725","MB700","MB650","MB600"] - - # Get the ghCube - gh = self.makeNumericSounding(self.__D2Dmodel, 'wind', layers, - GridTimeRange, noDataError=1) - if gh is None: - self.noData() - - ghCube, windCube = gh - magCube, dirCube = windCube - - levels = 0 - uTotal = 0 - vTotal = 0 - - u,v = getUV(self, Wind[0], Wind[1]) - levels = levels + 1 - uTotal = uTotal + u - vTotal = vTotal + v - - -# Average winds at pressure levels between Topo and MixHgt - - - Topo_M = Topo / 3.2808 - MixHgt_M = (Topo + MixHgt) / 3.2808 - - for i in xrange(len(layers)): - MHset = less(ghCube[i],MixHgt_M) - toposet = greater(ghCube[i],Topo_M) - readyToSet = logical_and(less(ghCube[i],MixHgt_M), greater(ghCube[i],Topo_M)) - u,v = getUV(self, magCube[i], dirCube[i]) - - levels = where(readyToSet, levels + 1, levels) - uTotal = where(readyToSet, uTotal + u, uTotal) - vTotal = where(readyToSet, vTotal + v, vTotal) - - - leveltest = equal(levels, 0) - - levels[leveltest] = 1 - - vTotal = vTotal/levels - uTotal = uTotal/levels - mag_Msec, dir = getMD(self, uTotal, vTotal) - mag_Kts = self.convertMsecToKts(mag_Msec) - - TransWind = (mag_Kts, dir) - # Return the new value - - return TransWind - -# converts mag, dir to u,v -# addapted from BASE init.py -def getUV(self, mag, dir): - rad = dir * 0.0174 - u = mag * sin(rad) - v = mag * cos(rad) - return (u, v) - -# converts u,v to mag and direction. -# adapted from BASE init.py -def getMD(self, u, v): - mag = sqrt(u * u + v * v) - dir = arctan2(u, v) / 0.0174 - - dir[greater_equal(dir, 360)] -= 360 - dir[less(dir, 0)] +=360 - - return (mag, dir) +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TransWind2_Tool numeric version of a Transport Wind smarttool. +# +# Author: C. Gibson SLC 10/01 Modified from a wind smarttool written by Tom L. +# +# Designed for the NAM but can be adapted for other models. Two major modes of +# operation 1. boundary layer (BL) winds included, or 2. only pressure "layers" +# used. Follow comments to switch between. Also, you can comment out certain BL +# levels as desired. +# +# ---------------------------------------------------------------------------- +# +# This tool modified February 2004/Matt Davis/ARX +# 10/2004 - Updated to allow the NAM12, NAM40, and GFS choices for this procedure. +# NCNWS Fire Bundle Package Release 2.1 +# +ToolType = "numeric" +WeatherElementEdited = "TransWind" +from numpy import * +import SmartScript +import tkinter.dialog, time + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, Topo, MixHgt, Wind): + + fwModel = self.getObject("FireModel", "ModelType") + + if fwModel == "NAM12": + modelSource = "_D2D_NAM12" + elif fwModel == "NAM40": + modelSource = "_D2D_NAM40" + else: + modelSource = "_D2D_GFS40" + + "Calculates average wind in the mixed layer." + self.setVectorEditMode('Both') + site = self.getSiteID() +## model = site + modelSource + monthrun + model = site + modelSource + print("Using " + model + " for Transport Wind Calculation") + print("Transport Wind time range is: \n" + repr(GridTimeRange)) + + + self.__D2Dmodel = model + + + layers = ["MB975", "MB950", "MB925", "MB900", "MB875","MB850","MB825","MB800","MB775","MB750","MB725","MB700","MB650","MB600"] + + # Get the ghCube + gh = self.makeNumericSounding(self.__D2Dmodel, 'wind', layers, + GridTimeRange, noDataError=1) + if gh is None: + self.noData() + + ghCube, windCube = gh + magCube, dirCube = windCube + + levels = 0 + uTotal = 0 + vTotal = 0 + + u,v = getUV(self, Wind[0], Wind[1]) + levels = levels + 1 + uTotal = uTotal + u + vTotal = vTotal + v + + +# Average winds at pressure levels between Topo and MixHgt + + + Topo_M = Topo / 3.2808 + MixHgt_M = (Topo + MixHgt) / 3.2808 + + for i in range(len(layers)): + MHset = less(ghCube[i],MixHgt_M) + toposet = greater(ghCube[i],Topo_M) + readyToSet = logical_and(less(ghCube[i],MixHgt_M), greater(ghCube[i],Topo_M)) + u,v = getUV(self, magCube[i], dirCube[i]) + + levels = where(readyToSet, levels + 1, levels) + uTotal = where(readyToSet, uTotal + u, uTotal) + vTotal = where(readyToSet, vTotal + v, vTotal) + + + leveltest = equal(levels, 0) + + levels[leveltest] = 1 + + vTotal = vTotal/levels + uTotal = uTotal/levels + mag_Msec, dir = getMD(self, uTotal, vTotal) + mag_Kts = self.convertMsecToKts(mag_Msec) + + TransWind = (mag_Kts, dir) + # Return the new value + + return TransWind + +# converts mag, dir to u,v +# addapted from BASE init.py +def getUV(self, mag, dir): + rad = dir * 0.0174 + u = mag * sin(rad) + v = mag * cos(rad) + return (u, v) + +# converts u,v to mag and direction. +# adapted from BASE init.py +def getMD(self, u, v): + mag = sqrt(u * u + v * v) + dir = arctan2(u, v) / 0.0174 + + dir[greater_equal(dir, 360)] -= 360 + dir[less(dir, 0)] +=360 + + return (mag, dir) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py index 4b7fad07fb..6a67691446 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/WindGustFromAlgorithm.py @@ -1,413 +1,413 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# New_WindGust_Tool -# -# Authors: Tom Mazza NWS Charleston, WV Created: 04/25/03 -# Matthew H. Belk NWS Taunton, MA Last Modified: 06/16/03 -# Mathewson FSL Modified: 3/30/04 -# -change in model names to OB3 names -#---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 02/10/2016 5283 nabowle Remove NGM support. -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "WindGust" -from numpy import * -# without this, the builtin max() is used -from numpy import max -import LogStream - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: - -#ScreenList = ["MixHgt","WindGust", "TransWind"] - -# Set up variables to be solicited from the user: -VariableList = [ - ("Momentum algorithm:", "RUC", "radio", ["RUC", "Power"]), - ("Use BL Winds:", "No", "radio", ["Yes", "No"]), - ("Model:", "NAM12", "radio", - ["GFS80", "NAM12", "gfsLR", "RAP40"]) -] - - -#Set up Class -import SmartScript -## For available commands, see SmartScript - -toolName = 'WindGustFromAlgorithm' - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Define your site ID - self._SITEID = "BOX" - - # Required Method: Execute - # Called once for each grid - # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... - - def execute(self, Wind, MixHgt, Topo, GridTimeRange): - "Determines WindGust using one of two algorithms, one from the RUC or a power relationship. This tool assumes your mixing height has already been adjusted for your surface temperatures." - - sounding = self.makeNumericSounding(self._model, "wind", - self._modelCube, GridTimeRange, - noDataError=0) - - ######################################################################## - # If we don't have a model sounding at this point in time, or the - # size of the grids do not match - if sounding is None: # or sounding[0].shape != Topo.shape: - LogStream.logProblem(toolName, ': cannot obtain a Wind sounding') - return None # leaves current WindGust grid alone - - ######################################################################## - # If we made it this far, split up the sounding into its component - # cubes of height and wind - (gh_Cube, wind_Cube) = sounding - - if gh_Cube is None: - LogStream.logProblem(toolName, 'gh_Cube is None') - return None - - if wind_Cube is None: - LogStream.logProblem(toolName, 'wind_Cube is None') - return None - - ######################################################################## - # Convert topography from feet to meters - self._topo = self.ftToM(Topo) - - ######################################################################## - # Initialize a cube to hold BL wind grids - bl_WindCube = {} - - ######################################################################## - # Cycle through all the BL levels we have for this model - for lvl in self._blCube: - - #################################################################### - # Initialize BL wind grid for this level - grid = None - - #################################################################### - # If this is the NAM40/20 model - if self._model.find('NAM40') != -1: - - ################################################################ - # Get BL winds from other NAM40/NAM20 file - tempModel = self._model.replace('NAM40', 'NAM20') - - ################################################################ - # Try to get model BL winds for this time - grid = self.getGrids(tempModel, "wind", lvl, GridTimeRange, - noDataError=0) - - #################################################################### - # Otherwise - else: - - ################################################################ - # Try to get model BL winds for this time - grid = self.getGrids(self._model, "Wind", lvl, GridTimeRange, - noDataError=0) - - #################################################################### - # Add this grid to the BL wind cube - if it is valid - if grid != None: - - ################################################################ - # Store the wind speeds at this BL level - bl_WindCube[lvl] = grid[0] - - #################################################################### - # Otherwise - else: - - ################################################################ - # Store a placeholder - bl_WindCube[lvl] = None - - ######################################################################## - # Convert mixing height from ft ASL to m ASL - mixHgt_m = self.ftToM(MixHgt) - - ######################################################################## - # Make a 3D mask where the model sounding level is ABOVE the ground, - # but below the Mixing Height - self._mixedLayer = (gh_Cube >= self._topo) & (gh_Cube <= mixHgt_m) - ######################################################################## - # Method to compute WindGust using a version of the RUC technique - # adapted by Matthew H. Belk (BOX). - - ######################################################################## - # Initialize WindGust using current 10m Wind speeds - (mag, dir) - WindGust = Wind[0] - - ######################################################################## - # Move vertically through the model BL cube - for lvl in self._blCube: - - #################################################################### - # Make a mask where this BL surface is at or below the MixHgt - blMask = MixHgt <= self._blHgt[lvl] - - #################################################################### - # If there are any points in the mixed layer at this surface, and - # there actually is a wind grid - if any(blMask) and bl_WindCube[lvl] != None: - - ################################################################ - # Get wind magnitude at current level - remember model winds - # are in m/s and need to be in kts for comparison - curMag = self.mpsToKt(bl_WindCube[lvl]) - - ################################################################ - # Compute difference between wind at this level and SFC wind - # where points are in the mixed layer - deltaSpd = curMag - Wind[0] - - ################################################################ - # Get the depth of the mixed layer to this point (m AGL) - deltaZ = self._blHgt[lvl] - - ################################################################ - # Adjust change in wind speed by a coefficient - using the - # lesser of 0.5 or (deltaZ / 2000) - # First get the factor, which will range from 0.5 to 1.0, - # higher closer to the ground - delta = max(1.0 - deltaZ/2000.0, 0.5) - - ################################################################ - # Employ the power relationship if selected: it focuses in on - # how much lower than one this factor will be (it ranges from - # no less than 1 just above the surface to 0.5 lower than 1 - # 1000 or more feet from the surface). The power relationship - # takes this small number (between 0 and 0.5) to the second - # power, which makes it smaller still. It actually first - # doubles it, then squares it, then halves it again. This - # causes a difference of 0 to stay 0, a difference of 0.5 to - # stay at 0.5, but a difference of 0.25 will become 0.125. - # This difference is then subtracted from one, to get a new, - # equal or larger factor by which to multiply the potential - # wind gust, to arrive at a gust potential that decreases more - # slowly at first with height, then more rapidly later on, to - # arrive at the same factor up at 1000 m and more above the - # surface. The resulting wind gust is always equal to or - # greater than using the RUC algorthm straight up. - - if self._algorithm == 'Power': - delta = 1 - (pow((2 * (1 - delta)), 2)) / 2 - - ################################################################ - # Adjust wind speed difference by chosen coefficient - deltaSpd *= delta - - gustV = Wind[0] + deltaSpd - ################################################################ - # Make a mask where this WindGust is > current WindGust - newGust = gustV > WindGust - - ################################################################ - # Assign new WindGust where new WindGust is greater and the - # surface is still within the mixed layer - WindGustMask = newGust & blMask - WindGust[WindGustMask] = gustV[WindGustMask] - - ######################################################################## - # Move vertically through the model cube - for i in xrange(gh_Cube.shape[0]): - - #################################################################### - # If there are any points in the mixed layer at this surface - if any(self._mixedLayer[i]): - - ################################################################ - # Get wind magnitude at current level - remember model winds - # are in m/s and need to be in kts for comparison - curMag = self.mpsToKt(wind_Cube[0][i]) - - ################################################################ - # Compute difference between wind at this level and SFC wind - # where points are in the mixed layer - deltaSpd = curMag - Wind[0] - - ################################################################ - # Get the depth of the mixed layer to this point (m AGL) - deltaZ = gh_Cube[i] - self._topo - - ################################################################ - # Adjust change in wind speed by a coefficient - using the - # lesser of 0.5 or (deltaZ / 2000) - # First get the factor, which will range from 0.5 to 1.0, - # higher closer to the ground - delta = max(1.0-deltaZ/2000.0,0.5) - - ################################################################ - # Employ the power relationship if selected: it focuses in on - # how much lower than one this factor will be (it ranges from - # no less than 1 just above the surface to 0.5 lower than 1 - # 1000 or more feet from the surface). The power relationship - # takes this small number (between 0 and 0.5) to the second - # power, which makes it smaller still. It actually first - # doubles it, then squares it, then halves it again. This - # causes a difference of 0 to stay 0, a difference of 0.5 to - # stay at 0.5, but a difference of 0.25 will become 0.125. - # This difference is then subtracted from one, to get a new, - # equal or larger factor by which to multiply the potential - # wind gust, to arrive at a gust potential that decreases more - # slowly at first with height, then more rapidly later on, to - # arrive at the same factor up at 1000 feet and more above the - # surface. The resulting wind gust is always equal to or - # greater than using the RUC algorthm straight up. - - if self._algorithm == 'Power': - delta = 1 - (pow((2 * (1 - delta)), 2)) / 2 - - ################################################################ - # Adjust wind speed difference by chosen coefficient - deltaSpd *= delta - - gustV = Wind[0] + deltaSpd - ################################################################ - # Make a mask where this WindGust is > current WindGust - newGust = gustV > WindGust - - ################################################################ - # Assign new WindGust where new WindGust is greater and the - # surface is still within the mixed layer - WindGustMask = newGust & self._mixedLayer[i] - WindGust[WindGustMask] = gustV[WindGustMask] - - ######################################################################## - # Return the computed WindGust - return WindGust - - - - # Optional Methods - # These methods can have the additional argument: - # ToolTimeRange -- selected time range over which we are running the tool - - def preProcessTool(self, varDict): - # Called once at beginning of Tool - # Cannot have WeatherElement or Grid arguments - - ######################################################################## - # Get site ID - try: - siteID=self.mutableID().siteID() - except: - siteID=self._SITEID - - ######################################################################## - # Get name of chosen model - and fix it up so we can use it later on. - # This will grab the latest version of the chosen model from the D2D - # netCDF files. - self._model = "%s_D2D_%s" % (siteID, varDict["Model:"]) - - ######################################################################## - # Get chosen algorithm - self._algorithm = varDict["Momentum algorithm:"] - - ######################################################################## - # Get answer if we should use BL winds - useBLwinds = varDict["Use BL Winds:"] - - ######################################################################## - # Initialize a list of model levels - self._modelCube = [] - - ######################################################################## - # Determine model levels available for each model - if self._model.find( 'GFS80') != -1 or \ - self._model.find( 'GFS') != -1: - self._modelCube = ["MB850", "MB700", "MB500", "MB400", "MB300"] - self._blCube = [] - - elif self._model.find( 'NAM12') != -1: - self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800", - "MB750", "MB700", "MB650", "MB600", "MB550", - "MB500", "MB450", "MB400", "MB350"] - self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL12015"] - - elif self._model.find( 'NAM40') != -1 or \ - self._model.find( 'NAM20') != -1: - self._modelCube = ["MB975", "MB950", "MB925", "MB900", "MB875", - "MB850", "MB825", "MB800", "MB775", "MB750", - "MB725", "MB700", "MB675", "MB650", "MB625", - "MB600", "MB550", "MB500", "MB450", "MB400", - "MB350", "MB300"] - self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL120150"] - - elif self._model.find( 'gfsLR') != -1: - self._modelCube = ["MB1000", "MB850", "MB700", "MB500", "MB300"] - self._blCube = [] - - elif self._model.find( 'RAP40') != -1: - self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800", - "MB750", "MB700", "MB650", "MB600", "MB550", - "MB500", "MB450", "MB400", "MB350", "MB300"] - self._blCube = ["BL030", "BL6090", "BL15018"] - - ######################################################################## - # If we should not use the BL winds - if useBLwinds is 'No': - - #################################################################### - # Reset the levels in the BL cube so we don't do anything - self._blCube = [] - - ######################################################################## - # Determine height of all possible BL levels available for each model. - # If level is not at a fixed height AGL, use the hydrostatic equation. - # Assume the density of the air is 1 kg/m3 and gravity is 9.80 m/s^2. - # The height will be in m AGL at the center of the layer. Remember - # there are 100 Pa per 1 mb. - self._blHgt = {'BL030' : (15.0 * 100.0/ 9.8), - 'BL3060' : (45.0 * 100.0 / 9.8), - 'BL03060' : (45.0 * 100.0 / 9.8), - 'BL6090' : (75.0 * 100.0 / 9.8), - 'BL90120' : (105.0 * 100.0 / 9.8), - 'BL12015' : (135.0 * 100.0 / 9.8), - 'BL120150': (135.0 * 100.0 / 9.8), - 'BL15018' : (165.0 * 100.0 / 9.8), - 'FH1829' : 1829.0, - 'FH2743' : 2743.0, - 'FH3658' : 3658.0 - } - - LogStream.logDebug(toolName, ': preProcessTool complete.') +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# New_WindGust_Tool +# +# Authors: Tom Mazza NWS Charleston, WV Created: 04/25/03 +# Matthew H. Belk NWS Taunton, MA Last Modified: 06/16/03 +# Mathewson FSL Modified: 3/30/04 +# -change in model names to OB3 names +#---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 02/10/2016 5283 nabowle Remove NGM support. +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "WindGust" +from numpy import * +# without this, the builtin max() is used +from numpy import max +import LogStream + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: + +#ScreenList = ["MixHgt","WindGust", "TransWind"] + +# Set up variables to be solicited from the user: +VariableList = [ + ("Momentum algorithm:", "RUC", "radio", ["RUC", "Power"]), + ("Use BL Winds:", "No", "radio", ["Yes", "No"]), + ("Model:", "NAM12", "radio", + ["GFS80", "NAM12", "gfsLR", "RAP40"]) +] + + +#Set up Class +import SmartScript +## For available commands, see SmartScript + +toolName = 'WindGustFromAlgorithm' + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Define your site ID + self._SITEID = "BOX" + + # Required Method: Execute + # Called once for each grid + # Fill in the arguments you want to use -- WeatherElement1, WeatherElement2... + + def execute(self, Wind, MixHgt, Topo, GridTimeRange): + "Determines WindGust using one of two algorithms, one from the RUC or a power relationship. This tool assumes your mixing height has already been adjusted for your surface temperatures." + + sounding = self.makeNumericSounding(self._model, "wind", + self._modelCube, GridTimeRange, + noDataError=0) + + ######################################################################## + # If we don't have a model sounding at this point in time, or the + # size of the grids do not match + if sounding is None: # or sounding[0].shape != Topo.shape: + LogStream.logProblem(toolName, ': cannot obtain a Wind sounding') + return None # leaves current WindGust grid alone + + ######################################################################## + # If we made it this far, split up the sounding into its component + # cubes of height and wind + (gh_Cube, wind_Cube) = sounding + + if gh_Cube is None: + LogStream.logProblem(toolName, 'gh_Cube is None') + return None + + if wind_Cube is None: + LogStream.logProblem(toolName, 'wind_Cube is None') + return None + + ######################################################################## + # Convert topography from feet to meters + self._topo = self.ftToM(Topo) + + ######################################################################## + # Initialize a cube to hold BL wind grids + bl_WindCube = {} + + ######################################################################## + # Cycle through all the BL levels we have for this model + for lvl in self._blCube: + + #################################################################### + # Initialize BL wind grid for this level + grid = None + + #################################################################### + # If this is the NAM40/20 model + if self._model.find('NAM40') != -1: + + ################################################################ + # Get BL winds from other NAM40/NAM20 file + tempModel = self._model.replace('NAM40', 'NAM20') + + ################################################################ + # Try to get model BL winds for this time + grid = self.getGrids(tempModel, "wind", lvl, GridTimeRange, + noDataError=0) + + #################################################################### + # Otherwise + else: + + ################################################################ + # Try to get model BL winds for this time + grid = self.getGrids(self._model, "Wind", lvl, GridTimeRange, + noDataError=0) + + #################################################################### + # Add this grid to the BL wind cube - if it is valid + if grid != None: + + ################################################################ + # Store the wind speeds at this BL level + bl_WindCube[lvl] = grid[0] + + #################################################################### + # Otherwise + else: + + ################################################################ + # Store a placeholder + bl_WindCube[lvl] = None + + ######################################################################## + # Convert mixing height from ft ASL to m ASL + mixHgt_m = self.ftToM(MixHgt) + + ######################################################################## + # Make a 3D mask where the model sounding level is ABOVE the ground, + # but below the Mixing Height + self._mixedLayer = (gh_Cube >= self._topo) & (gh_Cube <= mixHgt_m) + ######################################################################## + # Method to compute WindGust using a version of the RUC technique + # adapted by Matthew H. Belk (BOX). + + ######################################################################## + # Initialize WindGust using current 10m Wind speeds - (mag, dir) + WindGust = Wind[0] + + ######################################################################## + # Move vertically through the model BL cube + for lvl in self._blCube: + + #################################################################### + # Make a mask where this BL surface is at or below the MixHgt + blMask = MixHgt <= self._blHgt[lvl] + + #################################################################### + # If there are any points in the mixed layer at this surface, and + # there actually is a wind grid + if any(blMask) and bl_WindCube[lvl] != None: + + ################################################################ + # Get wind magnitude at current level - remember model winds + # are in m/s and need to be in kts for comparison + curMag = self.mpsToKt(bl_WindCube[lvl]) + + ################################################################ + # Compute difference between wind at this level and SFC wind + # where points are in the mixed layer + deltaSpd = curMag - Wind[0] + + ################################################################ + # Get the depth of the mixed layer to this point (m AGL) + deltaZ = self._blHgt[lvl] + + ################################################################ + # Adjust change in wind speed by a coefficient - using the + # lesser of 0.5 or (deltaZ / 2000) + # First get the factor, which will range from 0.5 to 1.0, + # higher closer to the ground + delta = max(1.0 - deltaZ/2000.0, 0.5) + + ################################################################ + # Employ the power relationship if selected: it focuses in on + # how much lower than one this factor will be (it ranges from + # no less than 1 just above the surface to 0.5 lower than 1 + # 1000 or more feet from the surface). The power relationship + # takes this small number (between 0 and 0.5) to the second + # power, which makes it smaller still. It actually first + # doubles it, then squares it, then halves it again. This + # causes a difference of 0 to stay 0, a difference of 0.5 to + # stay at 0.5, but a difference of 0.25 will become 0.125. + # This difference is then subtracted from one, to get a new, + # equal or larger factor by which to multiply the potential + # wind gust, to arrive at a gust potential that decreases more + # slowly at first with height, then more rapidly later on, to + # arrive at the same factor up at 1000 m and more above the + # surface. The resulting wind gust is always equal to or + # greater than using the RUC algorthm straight up. + + if self._algorithm == 'Power': + delta = 1 - (pow((2 * (1 - delta)), 2)) / 2 + + ################################################################ + # Adjust wind speed difference by chosen coefficient + deltaSpd *= delta + + gustV = Wind[0] + deltaSpd + ################################################################ + # Make a mask where this WindGust is > current WindGust + newGust = gustV > WindGust + + ################################################################ + # Assign new WindGust where new WindGust is greater and the + # surface is still within the mixed layer + WindGustMask = newGust & blMask + WindGust[WindGustMask] = gustV[WindGustMask] + + ######################################################################## + # Move vertically through the model cube + for i in range(gh_Cube.shape[0]): + + #################################################################### + # If there are any points in the mixed layer at this surface + if any(self._mixedLayer[i]): + + ################################################################ + # Get wind magnitude at current level - remember model winds + # are in m/s and need to be in kts for comparison + curMag = self.mpsToKt(wind_Cube[0][i]) + + ################################################################ + # Compute difference between wind at this level and SFC wind + # where points are in the mixed layer + deltaSpd = curMag - Wind[0] + + ################################################################ + # Get the depth of the mixed layer to this point (m AGL) + deltaZ = gh_Cube[i] - self._topo + + ################################################################ + # Adjust change in wind speed by a coefficient - using the + # lesser of 0.5 or (deltaZ / 2000) + # First get the factor, which will range from 0.5 to 1.0, + # higher closer to the ground + delta = max(1.0-deltaZ/2000.0,0.5) + + ################################################################ + # Employ the power relationship if selected: it focuses in on + # how much lower than one this factor will be (it ranges from + # no less than 1 just above the surface to 0.5 lower than 1 + # 1000 or more feet from the surface). The power relationship + # takes this small number (between 0 and 0.5) to the second + # power, which makes it smaller still. It actually first + # doubles it, then squares it, then halves it again. This + # causes a difference of 0 to stay 0, a difference of 0.5 to + # stay at 0.5, but a difference of 0.25 will become 0.125. + # This difference is then subtracted from one, to get a new, + # equal or larger factor by which to multiply the potential + # wind gust, to arrive at a gust potential that decreases more + # slowly at first with height, then more rapidly later on, to + # arrive at the same factor up at 1000 feet and more above the + # surface. The resulting wind gust is always equal to or + # greater than using the RUC algorthm straight up. + + if self._algorithm == 'Power': + delta = 1 - (pow((2 * (1 - delta)), 2)) / 2 + + ################################################################ + # Adjust wind speed difference by chosen coefficient + deltaSpd *= delta + + gustV = Wind[0] + deltaSpd + ################################################################ + # Make a mask where this WindGust is > current WindGust + newGust = gustV > WindGust + + ################################################################ + # Assign new WindGust where new WindGust is greater and the + # surface is still within the mixed layer + WindGustMask = newGust & self._mixedLayer[i] + WindGust[WindGustMask] = gustV[WindGustMask] + + ######################################################################## + # Return the computed WindGust + return WindGust + + + + # Optional Methods + # These methods can have the additional argument: + # ToolTimeRange -- selected time range over which we are running the tool + + def preProcessTool(self, varDict): + # Called once at beginning of Tool + # Cannot have WeatherElement or Grid arguments + + ######################################################################## + # Get site ID + try: + siteID=self.mutableID().siteID() + except: + siteID=self._SITEID + + ######################################################################## + # Get name of chosen model - and fix it up so we can use it later on. + # This will grab the latest version of the chosen model from the D2D + # netCDF files. + self._model = "%s_D2D_%s" % (siteID, varDict["Model:"]) + + ######################################################################## + # Get chosen algorithm + self._algorithm = varDict["Momentum algorithm:"] + + ######################################################################## + # Get answer if we should use BL winds + useBLwinds = varDict["Use BL Winds:"] + + ######################################################################## + # Initialize a list of model levels + self._modelCube = [] + + ######################################################################## + # Determine model levels available for each model + if self._model.find( 'GFS80') != -1 or \ + self._model.find( 'GFS') != -1: + self._modelCube = ["MB850", "MB700", "MB500", "MB400", "MB300"] + self._blCube = [] + + elif self._model.find( 'NAM12') != -1: + self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800", + "MB750", "MB700", "MB650", "MB600", "MB550", + "MB500", "MB450", "MB400", "MB350"] + self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL12015"] + + elif self._model.find( 'NAM40') != -1 or \ + self._model.find( 'NAM20') != -1: + self._modelCube = ["MB975", "MB950", "MB925", "MB900", "MB875", + "MB850", "MB825", "MB800", "MB775", "MB750", + "MB725", "MB700", "MB675", "MB650", "MB625", + "MB600", "MB550", "MB500", "MB450", "MB400", + "MB350", "MB300"] + self._blCube = ["BL030", "BL03060", "BL6090", "BL90120", "BL120150"] + + elif self._model.find( 'gfsLR') != -1: + self._modelCube = ["MB1000", "MB850", "MB700", "MB500", "MB300"] + self._blCube = [] + + elif self._model.find( 'RAP40') != -1: + self._modelCube = ["MB1000", "MB950", "MB900", "MB850", "MB800", + "MB750", "MB700", "MB650", "MB600", "MB550", + "MB500", "MB450", "MB400", "MB350", "MB300"] + self._blCube = ["BL030", "BL6090", "BL15018"] + + ######################################################################## + # If we should not use the BL winds + if useBLwinds is 'No': + + #################################################################### + # Reset the levels in the BL cube so we don't do anything + self._blCube = [] + + ######################################################################## + # Determine height of all possible BL levels available for each model. + # If level is not at a fixed height AGL, use the hydrostatic equation. + # Assume the density of the air is 1 kg/m3 and gravity is 9.80 m/s^2. + # The height will be in m AGL at the center of the layer. Remember + # there are 100 Pa per 1 mb. + self._blHgt = {'BL030' : (15.0 * 100.0/ 9.8), + 'BL3060' : (45.0 * 100.0 / 9.8), + 'BL03060' : (45.0 * 100.0 / 9.8), + 'BL6090' : (75.0 * 100.0 / 9.8), + 'BL90120' : (105.0 * 100.0 / 9.8), + 'BL12015' : (135.0 * 100.0 / 9.8), + 'BL120150': (135.0 * 100.0 / 9.8), + 'BL15018' : (165.0 * 100.0 / 9.8), + 'FH1829' : 1829.0, + 'FH2743' : 2743.0, + 'FH3658' : 3658.0 + } + + LogStream.logDebug(toolName, ': preProcessTool complete.') diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getGridsTool.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getGridsTool.py index 029b071079..956ef8f2eb 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getGridsTool.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getGridsTool.py @@ -1,146 +1,146 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# getGridsTool - generic smart tool to use the SmartScript class getGrids -# method to get the max, sum, etc. value of all grids of the -# element defined by varDict["Element"] into the active element. -# The active element should span multiple grids of -# varDict["Element"]. This tool is designed to be -# called by a procedure and not run interactively. -# This is a more generic version of getSumGrids and getMaxGrid -# SmartTools and could be used to replace these tools by adding -# varDict["Mode"] = "method" to the calling procedure. Method -# is any value accepted by the "mode" argument to getGrids plus -# "Last" to get the last grid and "MaxTime" to get the grid with -# the largest percentage time coverage. -# -# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) -# paul.jendrowski@noaa.gov -# Version: 1.0 Date: 11/08/2004 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "variableElement" -from numpy import * - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -# This tool is normally run from a procedure so hide it! -ScreenList = [""] - -# Set up Class -import SmartScript -## For available commands, see SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, varDict): - """Gets value of another set of grids. - This tool should be run from a procedure and not interactively.""" - - # Check for required arguments in varDict - if varDict == None: - msg="getGridsTool - No element defined.\n" - msg += " This tool should not be run interactively!" - self.noData(msg) - - if varDict.has_key("Element"): - we = varDict["Element"] - else: - msg="getGridsTool - No element defined.\n" - msg += " This tool should not be run interactively!" - self.noData(msg) - - if varDict.has_key("Mode"): - self.__getMode = varDict["Mode"] - else: - msg="getGridsTool - No Mode defined.\n" - msg += " This tool should not be run interactively!" - self.noData(msg) - - if self.__getMode == "MaxTime" or self.__getMode == "Last": - # Determine the Wx grids that correspond to this grid - # Note: There could be more than one Wx grid within - # the grid time range. If this is the case, - # Wx_GridInfo will be a list. - # If not, make it into a list for processing. - gridInfo = self.getGridInfo("Fcst", varDict["Element"], "SFC", - GridTimeRange) - - if gridInfo is None: - Wx_GridInfo = [] - elif isinstance(gridInfo, (list,tuple)): - Wx_GridInfo = gridInfo - else: - Wx_GridInfo = [gridInfo] - - if self.__getMode == "MaxTime": - - # Determine the percentage of GridTimeRange (PoP Grid) that each - # Wx grid takes up. Put this percentage into a list. - # (Note: we have to name the percentage list with the prefix, - # self.__, so that it can be used in the execute method). - max=0 - i=0 - index = -1 - for info in Wx_GridInfo: - wxDuration = float(GridTimeRange.intersection( - info.gridTime()).duration()) - if wxDuration > max: - index=i - max = wxDuration - i += 1 - elif len(Wx_GridInfo) > 0: - index = len(Wx_GridInfo)-1 - else: - index = -1 - - if (self.__getMode == "MaxTime" or self.__getMode == "Last") and index >= 0: - WxLst = self.getGrids("Fcst", we, "SFC", GridTimeRange, mode="List", noDataError=0) - if WxLst is not None and len(WxLst) > index: - Wx = WxLst[index] - else: - Wx = None - else: - Wx = self.getGrids("Fcst",we,"SFC",GridTimeRange, - mode=self.__getMode, noDataError=0) - - # Returning None is bad. - # If this is a temporary grid, try to get a grid from the permanent grid. - # This is most likely the scratch grid we're filling in, but it beats nothing. - if Wx is None and len(we) > 3 and we[0:3]=="tmp": - we = we[3:] - Wx = self.getGrids("Fcst", we, "Sfc", GridTimeRange, mode="Last") - - - # Return the new value - return Wx +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# getGridsTool - generic smart tool to use the SmartScript class getGrids +# method to get the max, sum, etc. value of all grids of the +# element defined by varDict["Element"] into the active element. +# The active element should span multiple grids of +# varDict["Element"]. This tool is designed to be +# called by a procedure and not run interactively. +# This is a more generic version of getSumGrids and getMaxGrid +# SmartTools and could be used to replace these tools by adding +# varDict["Mode"] = "method" to the calling procedure. Method +# is any value accepted by the "mode" argument to getGrids plus +# "Last" to get the last grid and "MaxTime" to get the grid with +# the largest percentage time coverage. +# +# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) +# paul.jendrowski@noaa.gov +# Version: 1.0 Date: 11/08/2004 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "variableElement" +from numpy import * + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +# This tool is normally run from a procedure so hide it! +ScreenList = [""] + +# Set up Class +import SmartScript +## For available commands, see SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, varDict): + """Gets value of another set of grids. + This tool should be run from a procedure and not interactively.""" + + # Check for required arguments in varDict + if varDict == None: + msg="getGridsTool - No element defined.\n" + msg += " This tool should not be run interactively!" + self.noData(msg) + + if "Element" in varDict: + we = varDict["Element"] + else: + msg="getGridsTool - No element defined.\n" + msg += " This tool should not be run interactively!" + self.noData(msg) + + if "Mode" in varDict: + self.__getMode = varDict["Mode"] + else: + msg="getGridsTool - No Mode defined.\n" + msg += " This tool should not be run interactively!" + self.noData(msg) + + if self.__getMode == "MaxTime" or self.__getMode == "Last": + # Determine the Wx grids that correspond to this grid + # Note: There could be more than one Wx grid within + # the grid time range. If this is the case, + # Wx_GridInfo will be a list. + # If not, make it into a list for processing. + gridInfo = self.getGridInfo("Fcst", varDict["Element"], "SFC", + GridTimeRange) + + if gridInfo is None: + Wx_GridInfo = [] + elif isinstance(gridInfo, (list,tuple)): + Wx_GridInfo = gridInfo + else: + Wx_GridInfo = [gridInfo] + + if self.__getMode == "MaxTime": + + # Determine the percentage of GridTimeRange (PoP Grid) that each + # Wx grid takes up. Put this percentage into a list. + # (Note: we have to name the percentage list with the prefix, + # self.__, so that it can be used in the execute method). + max=0 + i=0 + index = -1 + for info in Wx_GridInfo: + wxDuration = float(GridTimeRange.intersection( + info.gridTime()).duration()) + if wxDuration > max: + index=i + max = wxDuration + i += 1 + elif len(Wx_GridInfo) > 0: + index = len(Wx_GridInfo)-1 + else: + index = -1 + + if (self.__getMode == "MaxTime" or self.__getMode == "Last") and index >= 0: + WxLst = self.getGrids("Fcst", we, "SFC", GridTimeRange, mode="List", noDataError=0) + if WxLst is not None and len(WxLst) > index: + Wx = WxLst[index] + else: + Wx = None + else: + Wx = self.getGrids("Fcst",we,"SFC",GridTimeRange, + mode=self.__getMode, noDataError=0) + + # Returning None is bad. + # If this is a temporary grid, try to get a grid from the permanent grid. + # This is most likely the scratch grid we're filling in, but it beats nothing. + if Wx is None and len(we) > 3 and we[0:3]=="tmp": + we = we[3:] + Wx = self.getGrids("Fcst", we, "Sfc", GridTimeRange, mode="Last") + + + # Return the new value + return Wx diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getMaxGrid.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getMaxGrid.py index bd605b32da..82f69e94f3 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getMaxGrid.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getMaxGrid.py @@ -1,71 +1,71 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# getMaxGrid - generic smart tool to get the max value of all grids of the -# element defined by varDict["Element"] into the active element. -# The active element should span multiple grids of -# varDict["Element"]. This tool is designed to be -# called by a procedure and not run interactively -# -# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) -# paul.jendrowski@noaa.gov -# Version: 1.0 Date: 02/21/2003 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "variableElement" -from numpy import * - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -# This tool is normally run from a procedure so hide it! -ScreenList = [""] - -# Set up Class -import SmartScript -## For available commands, see SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, varDict): - "Gets max value of another set of grids. This tool should be run from a procedure and not interactively." - - if varDict is None or not varDict.has_key("Element"): - msg="getMaxGrid - No element defined." - msg += " This tool should not be run interactively!" - self.noData(msg) - we = varDict["Element"] - grid = self.getGrids("Fcst",we,"SFC",GridTimeRange,mode="Max") - - # Return the new value - return grid - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# getMaxGrid - generic smart tool to get the max value of all grids of the +# element defined by varDict["Element"] into the active element. +# The active element should span multiple grids of +# varDict["Element"]. This tool is designed to be +# called by a procedure and not run interactively +# +# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) +# paul.jendrowski@noaa.gov +# Version: 1.0 Date: 02/21/2003 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "variableElement" +from numpy import * + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +# This tool is normally run from a procedure so hide it! +ScreenList = [""] + +# Set up Class +import SmartScript +## For available commands, see SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, varDict): + "Gets max value of another set of grids. This tool should be run from a procedure and not interactively." + + if varDict is None or "Element" not in varDict: + msg="getMaxGrid - No element defined." + msg += " This tool should not be run interactively!" + self.noData(msg) + we = varDict["Element"] + grid = self.getGrids("Fcst",we,"SFC",GridTimeRange,mode="Max") + + # Return the new value + return grid + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getSumGrids.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getSumGrids.py index 5803912457..da702553a9 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getSumGrids.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/getSumGrids.py @@ -1,68 +1,68 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# getSumGrids - generic smart tool to sum into the active element the grids -# defined by varDict["Element"]. This tool is designed to be -# called by a procedure and generally only for a snow or qpf grid -# -# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) -# paul.jendrowski@noaa.gov -# Version: 1.0 Date: 02/21/2003 -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -ToolType = "numeric" -WeatherElementEdited = "variableElement" -from numpy import * - -# You can screen the elements for which your tool will appear by using -# a ScreenList. For example: -# -# This tool is normally run from a procedure so hide it! -ScreenList = [""] - -# Set up Class -import SmartScript -## For available commands, see SmartScript - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - def execute(self, GridTimeRange, varDict): - "Sums a grid into current element. This tool should be run from a procedure and not interactively." - - if varDict is None or not varDict.has_key("Element"): - msg="getSumGrids - No element defined." - msg += " This tool should not be run interactively!" - self.noData(msg) - we = varDict["Element"] - grid = self.getGrids("Fcst",we,"SFC",GridTimeRange,mode="Sum") - - # Return the new value - return grid +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# getSumGrids - generic smart tool to sum into the active element the grids +# defined by varDict["Element"]. This tool is designed to be +# called by a procedure and generally only for a snow or qpf grid +# +# Author: Paul Jendrowski WFO Blacksburg, VA (RNK) +# paul.jendrowski@noaa.gov +# Version: 1.0 Date: 02/21/2003 +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +ToolType = "numeric" +WeatherElementEdited = "variableElement" +from numpy import * + +# You can screen the elements for which your tool will appear by using +# a ScreenList. For example: +# +# This tool is normally run from a procedure so hide it! +ScreenList = [""] + +# Set up Class +import SmartScript +## For available commands, see SmartScript + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + def execute(self, GridTimeRange, varDict): + "Sums a grid into current element. This tool should be run from a procedure and not interactively." + + if varDict is None or "Element" not in varDict: + msg="getSumGrids - No element defined." + msg += " This tool should not be run interactively!" + self.noData(msg) + we = varDict["Element"] + grid = self.getGrids("Fcst",we,"SFC",GridTimeRange,mode="Sum") + + # Return the new value + return grid diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py index 509a7fee0f..605976e287 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/smartTools/serpFile.py @@ -1,265 +1,265 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# serpFile - version 2.0 -# -# Does the equivalent of the plain 'serp' tool - but gets the location -# and control point information from a file. Thus, this tool needs no -# user input and can be run as part of a cronjob, etc. -# -# The file is a comma delimited file where each data line contains a -# station ID (ignored), a latitude, a longitude, and a data value. -# Typical data lines might look like this: -# -# BOI,43.57,-116.22,50.5 -# TWF,42.48,-114.48,43 # comment about this line -# -# To make the file more readable, you can have comment lines which -# start with a # character or are simply whitespace. -# -# Any lines with less than 4 comma delimited values are ignored. Lines -# with more than 4 comma delimited values are potentially used - but -# fields after the first 4 are ignored. -# -# Stations located off the GFE grid are ignored. -# -# Multiple sites lying on the same GFE gridpoint are ignored (only -# the first one is used - and a status bar message is produced -# which tells you that the second (or more) station is being ignored). -# -# No timeRange checking is done - the tool simply operates on the -# current grid, using the values supplied in the file and stores the -# results back into the same grid. Clipping is performed so that the -# values of the new grid do not exceed the allowable values for the -# grid. -# -# This works for SCALAR grids only - not vectors or weather/discrete -# elements -# -# Author: Tim Barker - SOO BOI (serp tool is from Les Colin) -# 2014/06/11 - Modified a couple of things to make it cleaner in A2 -# 2010/08/05 - updated to use ObjAnal utility -# 2003/10/16 - original implementation based on serp tool - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -#======================================================================= -# START OF CONFIGURATION SECTION -# -# The filename to read -# -FILENAME="/tmp/lsrinfo.dat" -# -# If you wish to include elevation adjustment (so that adjustments -# are based on elevation differences as well as horizontal distance -# from the point) then set elevation_factor to a non-zero value. -# -# elevation_factor should be in units of feet/km. -# -# If you set it to 1, then 1 foot of elevation difference is -# equivalent to 1km of horizontal distance (this means -# that elevation is VERY important in the analysis). -# -# if you set it to 1000, then 1000 feet of elevation -# difference is equal to 1 km of horizontal distance -# (this means that elevation is NOT important to the -# analysis). -# -# To turn off elevation completely - set the elevation_factor to zero. -# which is the default -# -# A value of 36 feet/km seems work reasonably well for including SOME -# influence of elevation - but not too much. -# -elevation_factor=0.0 -# -# END OF CONFIGURATION SECTION -#======================================================================= -ToolType = "numeric" -WeatherElementEdited = "variableElement" -ScreenList = ["SCALAR"] - -import numpy as np -import SmartScript -import ObjAnal -import os,re - -class Tool (SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - self._dbss=dbss - - def execute(self, Topo, variableElement, variableElement_GridInfo, varDict): - "Match specified points to values in file using objective analysis" - print "Tool serp_file starting" - # - # Setup the utility - # - self.OA=ObjAnal.ObjAnal(self._dbss) - # - # value limits for grid being edited - # - self.valmax=variableElement_GridInfo.getMaxValue() - self.valmin=variableElement_GridInfo.getMinValue() - # - # read data lines from file - # - filename=FILENAME - datalines=self.readFILE(filename) - if len(datalines)<1: - msg="No data in file %s, so grid left unchanged"%(filename) - self.statusBarMsg(msg,"S") - print msg - self.cancel() - # - # setup data locations from file - # - valuelist=self.getDataLocations(datalines,variableElement,Topo) - if (len(valuelist)<1): - msg="No valid data in file %s, so grid left unchanged"%(filename) - self.statusBarMsg(msg,"S") - print msg - self.cancel() - # - # - # - new=self.OA.ObjectiveAnalysis(valuelist,variableElement,"serp", - elevfactor=elevation_factor) - # - # clip to grid min/max - # - newclip=np.clip(new,self.valmin,self.valmax) - print "Tool serp_file complete" - return newclip - #================================================================= - # - # Read data values from the data lines - # - def getDataLocations(self,datalines,variableElement,Topo): - # - # setup storage for location info - # - valuelist=[] - self.xloclist=[] - self.yloclist=[] - # - # decode data lines into location info - # - for line in datalines: - (id,latstr,lonstr,valuestr)=line.split(",",3) - latstr=re.sub('[^-0123456789.]','',latstr) - lonstr=re.sub('[^-0123456789.]','',lonstr) - valuestr=re.sub(',.*$','',valuestr) # get rid of any more comma-delimited things at end of line - valuestr=re.sub('#.*$','',valuestr) # get rid of any inline comments at end of field - valuestr=re.sub('[^-0123456789.]','',valuestr) # get rid of non-numeric characters in remaining value - latf=float(latstr) - lonf=float(lonstr) - if (latf<-90.0)or(latf>90.0)or(lonf<-180.0)or(lonf>180.0): - msg="Invalid lat/lon ignored: %s"%line - self.statusBarMsg(msg,"S") - print msg - continue - # - # make sure point is on grid - # - (x,y)=self.getGridCell(latf,lonf) - if ((x is None)or(y is None)): - msg="Data for %s ignored (%6.3f,%8.3f) - location not on GFE grid" % (id,latf,lonf) - self.statusBarMsg(msg,"S") - print msg - continue - xint=int(x) - yint=int(y) - # - # Make sure point has not already been specified - # - if len(self.xloclist)>0: - skip=0 - for i in range(len(self.xloclist)): - if ((self.xloclist[i]==xint) and (self.yloclist[i]==yint)): - msg="Data for %s ignored - data for this GFE gridpoint already specified"%(id) - self.statusBarMsg(msg,"S") - print msg - skip=1 - break - if skip==1: - continue - # - # Make sure value is valid - # - valf=float(valuestr) - if (valfself.valmax): - msg="%s value of %.3f clipped to allowable range of %f-%f"%(id,valf,self.valmin,self.valmax) - self.statusBarMsg(msg,"S") - print msg - valf=float(self.valmax) - # - # add it to list - # - valuelist.append((id,xint,yint,Topo[yint,xint],valf)) - self.xloclist.append(xint) - self.yloclist.append(yint) - return valuelist - #=================================================================== - # readFILE - read specified FILE returning only data lines where - # 4 or more comma delimited values occur - # - def readFILE(self,filename): - datalines=[] - # - # make sure the file exists - # - if (not os.path.exists(filename)): - msg="Could not find file %s" % (filename) - self.statusBarMsg(msg,"S") - print msg - return datalines - # - # read the file - # - filespec=file(filename,'r') - lines=filespec.readlines() - filespec.close() - # - # get only data lines - # - for line in lines: - stripline=line.strip() # ignore whitespace at begin/end - if len(stripline)<1: - continue - if line[0:1]=="#": # ignore comment lines - continue - pieces=stripline.split(",",3) - if len(pieces)!=4: # ignore lines with less than 4 comma fields - continue - datalines.append(stripline) - return datalines +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# serpFile - version 2.0 +# +# Does the equivalent of the plain 'serp' tool - but gets the location +# and control point information from a file. Thus, this tool needs no +# user input and can be run as part of a cronjob, etc. +# +# The file is a comma delimited file where each data line contains a +# station ID (ignored), a latitude, a longitude, and a data value. +# Typical data lines might look like this: +# +# BOI,43.57,-116.22,50.5 +# TWF,42.48,-114.48,43 # comment about this line +# +# To make the file more readable, you can have comment lines which +# start with a # character or are simply whitespace. +# +# Any lines with less than 4 comma delimited values are ignored. Lines +# with more than 4 comma delimited values are potentially used - but +# fields after the first 4 are ignored. +# +# Stations located off the GFE grid are ignored. +# +# Multiple sites lying on the same GFE gridpoint are ignored (only +# the first one is used - and a status bar message is produced +# which tells you that the second (or more) station is being ignored). +# +# No timeRange checking is done - the tool simply operates on the +# current grid, using the values supplied in the file and stores the +# results back into the same grid. Clipping is performed so that the +# values of the new grid do not exceed the allowable values for the +# grid. +# +# This works for SCALAR grids only - not vectors or weather/discrete +# elements +# +# Author: Tim Barker - SOO BOI (serp tool is from Les Colin) +# 2014/06/11 - Modified a couple of things to make it cleaner in A2 +# 2010/08/05 - updated to use ObjAnal utility +# 2003/10/16 - original implementation based on serp tool + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +#======================================================================= +# START OF CONFIGURATION SECTION +# +# The filename to read +# +FILENAME="/tmp/lsrinfo.dat" +# +# If you wish to include elevation adjustment (so that adjustments +# are based on elevation differences as well as horizontal distance +# from the point) then set elevation_factor to a non-zero value. +# +# elevation_factor should be in units of feet/km. +# +# If you set it to 1, then 1 foot of elevation difference is +# equivalent to 1km of horizontal distance (this means +# that elevation is VERY important in the analysis). +# +# if you set it to 1000, then 1000 feet of elevation +# difference is equal to 1 km of horizontal distance +# (this means that elevation is NOT important to the +# analysis). +# +# To turn off elevation completely - set the elevation_factor to zero. +# which is the default +# +# A value of 36 feet/km seems work reasonably well for including SOME +# influence of elevation - but not too much. +# +elevation_factor=0.0 +# +# END OF CONFIGURATION SECTION +#======================================================================= +ToolType = "numeric" +WeatherElementEdited = "variableElement" +ScreenList = ["SCALAR"] + +import numpy as np +import SmartScript +import ObjAnal +import os,re + +class Tool (SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + self._dbss=dbss + + def execute(self, Topo, variableElement, variableElement_GridInfo, varDict): + "Match specified points to values in file using objective analysis" + print("Tool serp_file starting") + # + # Setup the utility + # + self.OA=ObjAnal.ObjAnal(self._dbss) + # + # value limits for grid being edited + # + self.valmax=variableElement_GridInfo.getMaxValue() + self.valmin=variableElement_GridInfo.getMinValue() + # + # read data lines from file + # + filename=FILENAME + datalines=self.readFILE(filename) + if len(datalines)<1: + msg="No data in file %s, so grid left unchanged"%(filename) + self.statusBarMsg(msg,"S") + print(msg) + self.cancel() + # + # setup data locations from file + # + valuelist=self.getDataLocations(datalines,variableElement,Topo) + if (len(valuelist)<1): + msg="No valid data in file %s, so grid left unchanged"%(filename) + self.statusBarMsg(msg,"S") + print(msg) + self.cancel() + # + # + # + new=self.OA.ObjectiveAnalysis(valuelist,variableElement,"serp", + elevfactor=elevation_factor) + # + # clip to grid min/max + # + newclip=np.clip(new,self.valmin,self.valmax) + print("Tool serp_file complete") + return newclip + #================================================================= + # + # Read data values from the data lines + # + def getDataLocations(self,datalines,variableElement,Topo): + # + # setup storage for location info + # + valuelist=[] + self.xloclist=[] + self.yloclist=[] + # + # decode data lines into location info + # + for line in datalines: + (id,latstr,lonstr,valuestr)=line.split(",",3) + latstr=re.sub('[^-0123456789.]','',latstr) + lonstr=re.sub('[^-0123456789.]','',lonstr) + valuestr=re.sub(',.*$','',valuestr) # get rid of any more comma-delimited things at end of line + valuestr=re.sub('#.*$','',valuestr) # get rid of any inline comments at end of field + valuestr=re.sub('[^-0123456789.]','',valuestr) # get rid of non-numeric characters in remaining value + latf=float(latstr) + lonf=float(lonstr) + if (latf<-90.0)or(latf>90.0)or(lonf<-180.0)or(lonf>180.0): + msg="Invalid lat/lon ignored: %s"%line + self.statusBarMsg(msg,"S") + print(msg) + continue + # + # make sure point is on grid + # + (x,y)=self.getGridCell(latf,lonf) + if ((x is None)or(y is None)): + msg="Data for %s ignored (%6.3f,%8.3f) - location not on GFE grid" % (id,latf,lonf) + self.statusBarMsg(msg,"S") + print(msg) + continue + xint=int(x) + yint=int(y) + # + # Make sure point has not already been specified + # + if len(self.xloclist)>0: + skip=0 + for i in range(len(self.xloclist)): + if ((self.xloclist[i]==xint) and (self.yloclist[i]==yint)): + msg="Data for %s ignored - data for this GFE gridpoint already specified"%(id) + self.statusBarMsg(msg,"S") + print(msg) + skip=1 + break + if skip==1: + continue + # + # Make sure value is valid + # + valf=float(valuestr) + if (valfself.valmax): + msg="%s value of %.3f clipped to allowable range of %f-%f"%(id,valf,self.valmin,self.valmax) + self.statusBarMsg(msg,"S") + print(msg) + valf=float(self.valmax) + # + # add it to list + # + valuelist.append((id,xint,yint,Topo[yint,xint],valf)) + self.xloclist.append(xint) + self.yloclist.append(yint) + return valuelist + #=================================================================== + # readFILE - read specified FILE returning only data lines where + # 4 or more comma delimited values occur + # + def readFILE(self,filename): + datalines=[] + # + # make sure the file exists + # + if (not os.path.exists(filename)): + msg="Could not find file %s" % (filename) + self.statusBarMsg(msg,"S") + print(msg) + return datalines + # + # read the file + # + filespec=file(filename,'r') + lines=filespec.readlines() + filespec.close() + # + # get only data lines + # + for line in lines: + stripline=line.strip() # ignore whitespace at begin/end + if len(stripline)<1: + continue + if line[0:1]=="#": # ignore comment lines + continue + pieces=stripline.split(",",3) + if len(pieces)!=4: # ignore lines with less than 4 comma fields + continue + datalines.append(stripline) + return datalines diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HLSTCV_Common.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HLSTCV_Common.py index 4130ff1fbb..9af711f7df 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HLSTCV_Common.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HLSTCV_Common.py @@ -1,1685 +1,1684 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# GFE Training Guide->GFE Text Products User Guide section of the GFE Online -# Help for guidance on creating a new text product. -## - -# Version 2017.10.04-0 - -import GenericHazards -import JsonSupport -import LocalizationSupport -import string, time, os, errno, re, types, copy, collections -import LogStream, ModuleAccessor, SampleAnalysis, EditAreaUtils -import math -import pprint - -from AbsTime import * -from StartupDialog import IFPDialog as Dialog -from LockingFile import File - -from com.raytheon.viz.core.mode import CAVEMode - -AWIPS_ENVIRON = "AWIPS2" - -class TextProduct(GenericHazards.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - self._pp = pprint.PrettyPrinter() - - ############################################################### - ### Hazards and Additional Hazards - ### allowedHazards is used for VTEC records and summary - ### headlines - ### allowedHeadlines are additional hazards reported in - ### certain sections - ############################################################### - - ############################################################### - ### Initialization - ############################################################### - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - ############################################################### - - ############################################################### - ### Product Parts Implementation - ############################################################### - - ############################################################### - ### Product Dictionary methods for creating, populating and - ### formatting the product dictionary - ############################################################### - - ############################################################### - ### Sampling and Statistics related methods - ############################################################### - - ############################################################### - ### Area, Zone and Segment related methods - ############################################################### - - ############################################################### - ### Hazards related methods - ############################################################### - - ############################################################### - ### Time related methods - ############################################################### - - ############################################################### - ### Storm Information and TCP related methods - ############################################################### - - ############################################################### - ### Advisory related methods - ############################################################### - - ############################################################### - ### GUI related methods - ############################################################### - - - ############################################################### - ### Hazards and Additional Hazards - - def allowedHazards(self): - tropicalActions = ["NEW", "EXA", "CAN", "CON"] - return [ - ('HU.W',tropicalActions,'Hurricane'), - ('HU.A',tropicalActions,'Hurricane'), - ('SS.W',tropicalActions,'Surge'), - ('SS.A',tropicalActions,'Surge'), - ('TR.W',tropicalActions,'Tropical'), - ('TR.A',tropicalActions,'Tropical'), - ] - - def allowedHeadlines(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH - ('FA.A', allActions, 'Flood'), # FLOOD WATCH - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ] - - ############################################################### - ### Initialization - - def _initializeVariables(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - self._argDict = argDict - self._productID = self._pil[0:3].upper() - - argDict["definition"] = self._definition - - self._initializeTimeVariables(argDict) - - self._initializeHazardsTable(argDict) - - error = self._initializeStormInformation() - if error is not None: - return error - - # Set up the areaDictionary for all to use - accessor = ModuleAccessor.ModuleAccessor() - self._areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - - self._tpc = TextProductCommon() - self._tpc.setUp(self._areaDict) - - return None - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - - def moderated_dict(self, parmHisto, timeRange, componentName): - """ - Specifies the lower percentages and upper percentages of - data to be thrown out for moderated stats. - """ - # COMMENT: This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. - - # Get Baseline thresholds - dict = SampleAnalysis.SampleAnalysis.moderated_dict( - self, parmHisto, timeRange, componentName) - - # Change thresholds - dict["Wind"] = (0, 15) - dict["WindGust"] = (0, 15) - dict["pws34int"] = (0, 5) - dict["pws64int"] = (0, 5) - dict["pwsD34"] = (0, 5) - dict["pwsN34"] = (0, 5) - dict["pwsD64"] = (0, 5) - dict["pwsN64"] = (0, 5) - dict["InundationMax"] = (0, 3) - dict["InundationTiming"] = (0, 3) - return dict - - ############################################################### - ### Product Parts Implementation - - ################# Product Level - - def _wmoHeader(self, productDict, productSegmentGroup, arguments=None): - headerDict = collections.OrderedDict() - headerDict['TTAAii'] = self._wmoID - headerDict['originatingOffice'] = self._fullStationID - headerDict['productID'] = self._productID - headerDict['siteID'] = self._site - headerDict['fullStationID'] = self._fullStationID - headerDict['ddhhmmTime'] = self._ddhhmmTime - productDict['wmoHeader'] = headerDict - - def _productHeader(self, productDict, productSegmentGroup, arguments=None): - headerDict = dict() - headerDict['disclaimer'] = 'This XML wrapped text product should be considered COMPLETELY EXPERIMENTAL. The National Weather Service currently makes NO GUARANTEE WHATSOEVER that this product will continue to be supplied without interruption. The format of this product MAY CHANGE AT ANY TIME without notice.' - headerDict['cityState'] = self._wfoCityState - headerDict['stormNumber'] = self._getStormNumberStringFromTCP() - # Modify the product name to indicate test or experimental mode if necessary - self._productName = self.checkTestMode( - self._argDict, productSegmentGroup.get('productName') + self._areaName) - headerDict['productName'] = self._productName - headerDict['stormType'] = self._getStormTypeFromTCP() - headerDict['stormName'] = self._getStormNameFromTCP() - headerDict['advisoryType'] = self._getAdvisoryTypeFromTCP() - headerDict['advisoryNumber'] = self._getAdvisoryNumberStringFromTCP() - headerDict['issuedByString'] = self.getIssuedByString() - headerDict['issuanceTimeDate'] = self._timeLabel - productDict['productHeader'] = headerDict - - ################# Mixed Level - - def _ugcHeader(self, productDict, productSegmentGroup, productSegment): - # The UGC header is the formatted list of UGCs along with an expire time - # For example: 'FLZ066>068-071-072-063-069-073>075-168-172>174-070-230515-' - ugcHeader = self._tpc.formatUGCs(self._ugcs, self._expireTime) - productDict['ugcHeader'] = ugcHeader - - ################# Product Parts Processing - - def _processProductParts(self, productGenerator, productDict, productSegmentGroup, productParts): - ''' - @param productDict - @param productSegmentGroup - @param productParts - @return product dictionary created from the product parts - - Note that this method is called recursively such that a product part is allowed to be - a set of subParts specified as follows: - (subPartLabel, list of productParts for each subPart) - For example, we have - ('segments', [list of [segment product parts]]) - - # Product Dictionary - # Contains information for all formats e.g. - # partner XML, CAP, and Legacy text - ''' - - - if type(productParts) is types.DictType: - arguments = productParts.get('arguments') - partsList = productParts.get('partsList') - else: - partsList = productParts - - removedParts = [] - for part in partsList: - if type(part) is types.TupleType: - # e.g. subPart == 'segments', subPartsLists == list of parts for each segment - subPart, subPartsLists = part - subParts = [] - for subPartsList in subPartsLists: - subDict = collections.OrderedDict() - self._processProductParts(productGenerator, subDict, productSegmentGroup, subPartsList) - subParts.append(subDict) - # e.g. productDict['segments'] = segment dictionaries - productDict[subPart] = subParts - else: - if part not in self._noOpParts(): - execString = 'productGenerator._'+part+'(productDict, productSegmentGroup, arguments)' - exec execString - if part not in productDict: - removedParts.append(part) - - for part in removedParts: - self.debug_print("in _processProductParts - " + - "Removing product part = %s" % (part), 1) - partsList.remove(part) - - ############################################################### - ### Product Dictionary methods for creating, populating and - ### formatting the product dictionary - - def _createProductDictionary(self, productPartsGenerator, segments, areProductPartsSegmented): - # Create the product dictionary - productSegmentGroup = self._groupSegments(productPartsGenerator, - segments, - areProductPartsSegmented) - - productDict = self._initializeProductDictionary(productSegmentGroup) - productParts = productSegmentGroup.get('productParts') - productDict['productParts'] = productParts - self._processProductParts(self, productDict, productSegmentGroup, productParts) - - return productDict - - def _initializeProductDictionary(self, productSegmentGroup): - ''' - Set up the Product Dictionary for the given Product consisting of a - group of segments. - - Fill in the dictionary information for the product header. - - @param productSegmentGroup: holds meta information about the product - @return initialized product dictionary - - *********** - Example segmented product: - - WGUS63 KBOU 080400 - FFABOU - - URGENT - IMMEDIATE BROADCAST REQUESTED - FLOOD WATCH - NATIONAL WEATHER SERVICE DENVER CO - 400 AM GMT TUE FEB 8 2011 - - Overview Headline - Overview - - *********** - Example non-segmented product: - WGUS63 KBOU 080400 - FFWBOU - - ''' - if self._areaName != '': - self._areaName = ' for ' + self._areaName + '\n' - - # Fill in product dictionary information - productDict = collections.OrderedDict() - productDict['productID'] = self._productID - return productDict - - def _formatProductDictionary(self, formatterClass, productDict): - formatter = formatterClass(self) - product = formatter.execute(productDict) - - return product - - ############################################################### - ### Sampling and Statistics related methods - - def _getStatValue(self, statDict, element, method=None, dataType=None): - self.debug_print("In _getStatValue looking for '%s'" % (element), 1) - self.debug_print("method = %s" % (pprint.pformat(method)), 1) - self.debug_print("dataType = %s" % (pprint.pformat(dataType)), 1) - - stats = statDict.get(element, None) - self.debug_print("stats = %s" % (pprint.pformat(stats)), 1) - - if stats is None: return None - if type(stats) is types.ListType: - stats = stats[0] - stats, tr = stats - if dataType==self.VECTOR(): - stats, dir = stats - - value = self.getValue(stats, method) - self.debug_print("value = %s" % (pprint.pformat(value)), 1) - return value - - # Define a class to handle missing statistics - class StatisticsException(Exception): - pass - - ############################################################### - ### Area, Zone and Segment related methods - - def _allAreas(self): - return self._inlandAreas() + self._coastalAreas() - - def _groupSegments(self, productPartsGenerator, segments, areProductPartsSegmented): - ''' - Group the segments into the products. The TCV and HLS product generators - only create a single product each so there is only one product segment group. - ''' - - segment_vtecRecords_tuples = self._getSegmentVTECRecordsTuples(segments) - - productSegmentGroup = { - 'productID' : self._productID, - 'productName': self._productName, - 'geoType': 'area', - 'vtecEngine': self._hazardsTable, - 'mapType': 'publicZones', - 'segmented': areProductPartsSegmented, - 'productParts': productPartsGenerator(segment_vtecRecords_tuples), - } - - return productSegmentGroup - - def _getSegmentVTECRecordsTuples(self, segments): - segment_vtecRecords_tuples = [] - for segment in segments: - vtecRecords = self._getVtecRecords(segment) - self.debug_print("vtecRecords for %s =\n\n%s\n" % (segment, self._pp.pformat(vtecRecords))) - segment_vtecRecords_tuples.append((segment, vtecRecords)) - - return segment_vtecRecords_tuples - - def _computeIntersectAreas(self, editAreas, argDict): - editAreaUtils = EditAreaUtils.EditAreaUtils() - editAreaUtils.setUp(None, argDict) - surgeEditArea = editAreaUtils.getEditArea("StormSurgeWW_EditArea", argDict) - intersectAreas = [] - for (_, editAreaLabel) in editAreas: - editArea = editAreaUtils.getEditArea(editAreaLabel, argDict) - intersectAreaLabel = "intersect_"+editAreaLabel - intersectArea = editAreaUtils.intersectAreas(intersectAreaLabel, editArea, surgeEditArea) - grid = intersectArea.getGrid() - if grid.isAnyBitsSet(): # Make sure the intersection isn't empty - editAreaUtils.saveEditAreas([intersectArea]) # Register the new edit area with the system - intersectAreas.append((intersectAreaLabel, intersectAreaLabel)) - - return intersectAreas - - ############################################################### - ### Hazards related methods - - def _initializeHazardsTable(self, argDict): - import VTECMessageType - vtecMode = VTECMessageType.getVTECMessageType(self._productID) - argDict["vtecMode"] = vtecMode - - self._setVTECActiveTable(argDict) - - # Need to check hazards against all edit areas in the CWA MAOR - argDict["combinations"]= [(self._allAreas(),"Region1")] - - self._hazardsTable = self._getHazardsTable(argDict, self.filterMethod) - argDict["hazards"] = self._hazardsTable - - def _getHazardsTable(self, argDict, filterMethod): - # Set up edit areas as list of lists - dfEditAreas = argDict["combinations"] - editAreas = [] - for area, label in dfEditAreas: - if type(area) is types.ListType: - editAreas.append(area) - elif type(area) is types.TupleType: #LatLon - editAreas.append([self.__getLatLonAreaName(area)]) - else: - editAreas.append([area]) - # Get Product ID and other info for HazardsTable - stationID4 = self._fullStationID - productCategory = self._productID - definition = argDict['definition'] - sampleThreshold = definition.get("hazardSamplingThreshold", (10, None)) - # Process the hazards - accurateCities = definition.get('accurateCities', 0) - import HazardsTable - hazards = HazardsTable.HazardsTable( - argDict["ifpClient"], editAreas, productCategory, filterMethod, - argDict["databaseID"], - stationID4, argDict["vtecActiveTable"], argDict["vtecMode"], sampleThreshold, - creationTime=argDict["creationTime"], accurateCities=accurateCities, - cityEditAreas=[], dataMgr=argDict['dataMgr']) - return hazards - - def _ignoreActions(self): - # Ignore hazards with these action codes in the overview headlines - # NOTE: the VTEC and segments will still include them correctly. - return ['CAN', 'UPG'] - - def _setVTECActiveTable(self, argDict): - gfeMode = CAVEMode.getMode().name() - - self.debug_print("*" *100, 1) - self.debug_print("gfeMode = '%s'" % (gfeMode), 1) - self.debug_print("*" *100, 1) - - if gfeMode == "PRACTICE": - argDict["vtecActiveTable"] = "PRACTICE" - else: - argDict["vtecActiveTable"] = "active" - - def _getVtecRecords(self, segment): - vtecRecords = self._hazardsTable.getHazardList(segment) - # Tropical hazards shouldn't ever have EXT and EXB actions since - # they are "until further notice" - for record in vtecRecords: - if record['act'] == "EXT": - record['act'] = "CON" - elif record['act'] == "EXB": - record['act'] = "EXA" - - return vtecRecords - - def _getAllowedHazardList(self, allowedHazardList=None): - # Get the list of allowed phenSigs (ie. "HU.W") - if allowedHazardList is None: - allowedHazardList = self.allowedHazards() - hazardList = [] - for h in allowedHazardList: - if type(h) is types.TupleType: - hazardList.append(h[0]) - else: - hazardList.append(h) - return hazardList - - def _altFilterMethod(self, hazardTable, allowedHazardsOnly=False): - # Remove hazards not in allowedHeadlines list - allowedHazardList = self._getAllowedHazardList(self.allowedHeadlines()) - return self._filterHazards(hazardTable, allowedHazardList, - allowedHazardsOnly) - - def _filterHazards(self, hazardTable, allowedHazardList, - allowedHazardsOnly=False): - newTable = [] - hazStr = "" - for i in range(len(hazardTable)): - if hazardTable[i]['sig'] != "": # VTEC - hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] - else: #non-VTEC - hazStr = hazardTable[i]['phen'] - - if hazStr in allowedHazardList: - newTable.append(hazardTable[i]) - if allowedHazardsOnly: - return newTable - # get a raw list of unique edit areas - zoneList = [] - for t in newTable: - if t['id'] not in zoneList: - zoneList.append(t['id']) - for zone in zoneList: - # Remove lower priority hazards of the same type - self.filterZoneHazards(zone, newTable) - return newTable - - def _getAdditionalHazards(self): - argDict = self._argDict - argDict['definition'] = self._definition - altHazards = self._getHazardsTable(argDict, self._altFilterMethod) - conTable = altHazards.consolidatedTableByID() - - # Consolidate across action codes - hazDict = {} - for hazard in conTable: - hdln=hazard['hdln'] - phen=hazard['phen'] - sig=hazard['sig'] - act=hazard['act'] - if act in self._ignoreActions(): - continue - for area in hazard['id']: - hazDict.setdefault((hdln, phen, sig), []).append(area) - - self.debug_print("hazDict = %s" % (self._pp.pformat(hazDict)), 1) - hazardHdlns=[] - huAreas = [] - self.debug_print("Additional Hazard Headlines", 1) - for key in hazDict.keys(): - hdln, phen, sig = key - huAreas = huAreas + hazDict[key] - hazardHdln = ((hdln, "NEW", phen,sig), hazDict[key], [],[],[]) - self.debug_print(" %s" % (self._pp.pformat(hazardHdln)), 1) - self.debug_print(" %s" % (self._pp.pformat(hazDict[key])), 1) - hazardHdlns.append(hazardHdln) - return hazardHdlns, huAreas - - def _checkHazard(self, hazardHdlns, phenSigList, checkAreaTypes=None, - checkAreas=None, returnList=False, mode="any", includeCAN=False): - # Given a list of hazards in the form - # (key, landList, marineList, coastalList, inlandList) - # where key is (hdln, act, phen, sig) and the lists show which areas - # contain the hazard - # If mode == "any": - # Check to see if any of the given phenSigList = [(phen, sig), (phen, sig)] - # are found - # If mode == "all": - # Check to see if all of the given phenSigList are found - # IF checkAreaTypes is given, then check against that particular area type(s) i.e. - # "land", "marine", etc. - # IF checkAreas is given, only return areas that are in that list - # IF returnList=True, returns a list of (key, areas) that meet the criteria - # IF includeCAN is True then CAN hazards will be included as well. - # Otherwise, they are ignored. - # - # E.g. hdlnList = self._checkHazard(hazardHdlns, [("FA","W")], returnList=True) - self.debug_print("_checkHazard hazardHdlns is %s" % (self._pp.pformat(hazardHdlns)), 1) - self.debug_print("_checkHazard phenSigList is %s" % (self._pp.pformat(phenSigList)), 1) - chosen = [] - for key, landList, marineList, coastalList, inlandList in hazardHdlns: - - # We do not want to consider marine hazards in this product - hazAreas = landList - hazValue = (key, hazAreas) - self.debug_print("hazValue is %s" % (repr(hazValue)), 1) - hdln, act, phen, sig = key - if not includeCAN and act == "CAN": - continue - for checkPhen, checkSig in phenSigList: - self.debug_print("checkPhen is %s" % (checkPhen), 1) - self.debug_print("checkSig is %s" % (checkSig), 1) - if phen == checkPhen and sig == checkSig: - if checkAreaTypes is not None: - # Check for land, marine, etc. - for checkAreaType in checkAreaTypes: - exec "testList = " + checkAreaType + "List" - self.debug_print("testList is %s" % (testList), 1) - if testList != []: - chosen.append(hazValue) - elif checkAreas is not None: - acceptedAreas=[] - for hazArea in hazAreas: - if hazArea in checkAreas: - acceptedAreas.append(hazArea) - if acceptedAreas!=[]: - chosen.append((key, acceptedAreas)) - else: - chosen.append(hazValue) - if not returnList and chosen!=[]: break - - self.debug_print("In _checkHazard chosen = %s" % - (self._pp.pformat(chosen)), 1) - if not returnList: - return chosen!=[] - return chosen - - ############################################################### - ### Time related methods - - def _initializeTimeVariables(self, argDict): - argDict['creationTime'] = int(time.time()/60)*60 - self._issueTime_secs = argDict['creationTime'] - self._issueTime_ms = self._issueTime_secs * 1000 # in milliseconds - - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._purgeHours = self._purgeTime - self._expireTime = self._issueTime_secs + self._purgeHours*3600 - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - def _determineTimeRanges(self, argDict): - # Create a 120 hour time range starting from the issuanceHour and broken into 1-hour chunks - # Used for Wind Section - startTime1Hour = self._calculateStartTime(self._issueTime_secs, resolution=1) - self._timeRange1Hour = self.makeTimeRange(startTime1Hour, startTime1Hour+120*3600) - self._timeRangeList1Hour = self._createTimeRangeList(self._timeRange1Hour, hours=1) - - # Create a 120 hour time range starting from the issuanceHour and broken into 3-hour chunks - # Used for Flooding Rain and Tornado Sections - startTime3Hour = self._calculateStartTime(self._issueTime_secs, resolution=3) - timeRange3Hour = self.makeTimeRange(startTime3Hour, startTime3Hour+120*3600) - self._timeRangeList3Hour = self._createTimeRangeList(timeRange3Hour, hours=3) - - # Create a time range to look from the current time back 12 hours. - # We will use this to determine if we need to use "additional" - # wording with rainfall for the TCV - self._extraSampleTimeRange = self.makeTimeRange(startTime3Hour-12*3600, startTime3Hour) - - # Create a 120 hour time range starting from the issuanceHour and broken into 6-hour chunks - # Used for Storm Surge Section - startTime6Hour = self._calculateStartTime(self._issueTime_secs, resolution=6) - timeRange6Hour = self.makeTimeRange(startTime6Hour, startTime6Hour+120*3600) - self._timeRangeList6Hour = self._createTimeRangeList(timeRange6Hour, hours=6) - # Create a list of 10 periods (GMT time) corresponding to 6AM and 6PM times (local time) - self._createPeriodList(startTime1Hour) - - def _createTimeRangeList(self, timeRange, hours): - subRanges = self.divideRange(timeRange, hours) - trList = [] - for index, tr in enumerate(subRanges): - self.debug_print("In _createTimeRangeList (%s hour chunks) -> tr = %s" % - (hours, self._pp.pformat(tr)), 1) - trList.append((tr, "Label")) - - return trList - - def _createPeriodList(self, startTime): - # Create the 10 periods - self._periodList = [] - - localtime = time.localtime(startTime.unixTime()) - - # Determine the number of hours to the next 6AM or 6PM period - if localtime.tm_hour < 6: - periodLength = 6 - localtime.tm_hour - elif localtime.tm_hour >= 6 and localtime.tm_hour < 18: - periodLength = 18 - localtime.tm_hour - else: - periodLength = 30 - localtime.tm_hour - - # Don't allow the first period to be less than 3 hours long; - # instead just start with the next period - if periodLength < 3: - periodStart = startTime + periodLength*3600 - period = self.makeTimeRange(periodStart, periodStart+12*3600) - else: - period = self.makeTimeRange(startTime, startTime+periodLength*3600) - - self._periodList.append(period) - - for i in range(1,10): - startTime = period.endTime() # Start where the last period leaves off - period = self.makeTimeRange(startTime, startTime+12*3600) - self._periodList.append(period) - - self.debug_print("final periodList =\n\n%s\n" % - (self._pp.pformat(self._periodList)), 1) - - def _calculateStartTime(self, localCreationTime, resolution): - resolution = resolution*3600 - - self.debug_print("In _calculateStartTime incoming res %d localCreationTime = %d" % - (resolution, localCreationTime), 1) - - # Determine how far along we are in a given time block - adjust = localCreationTime % resolution - - # If we are less than halfway though a block we would want. - # If this is for surge (6 hour resolution aka 21600 seconds) and we - # aren't within 30 minutes (aka 1800 seconds) of the next block, go - # back to the start of the current block - if adjust < resolution / 2 or (resolution == 21600 and (resolution - adjust) > 1800): - adjust *= -1 # to move back to beginning of the block - else: - adjust = resolution - adjust # go to next block - - self.debug_print("In _calculateStartTime %d adjust = %d" % - (resolution, adjust), 1) - - startTime = AbsTime(localCreationTime + adjust) - - return startTime - - def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False, - resolution=3): - # Format period (a timeRange) resulting in - # DAY + MORNING / AFTERNOON / EVENING / OVERNIGHT. - # If wholePeriod, format FROM ... TO... - - self.debug_print("Format period wholePeriod = %s, period = %s, useEndTime =%s" % - (str(wholePeriod), str(period), str(useEndTime)), 1) - if period is None: - return "" - if useEndTime: - startTime = period.endTime() - else: - startTime = period.startTime() - result = self._getTimeDesc(startTime, resolution, shiftToLocal) - self.debug_print("_getTimeDesc result = '%s'" % (result), 1) - if wholePeriod: - endResult = self._getTimeDesc(period.endTime(), resolution, shiftToLocal) - self.debug_print("_getTimeDesc endResult = '%s'" % (endResult), 1) - if result != endResult: - result=result + " TO "+ endResult - return result - - def _getTimeDesc(self, startTime, resolution=3, shiftToLocal=True): - # Create phrase such as Tuesday morning - # Handle today/tonight and "this" morning/afternoon/etc.. - # - self.debug_print("\n\n**************Formatting Period for GMT startTime %s" % - (repr(startTime)), 1) - labels = self.Labels()["SimpleWorded"] - currentTime = self._timeRange1Hour.startTime() - self.debug_print(" currentTime = %s" % (repr(currentTime)), 1) - if shiftToLocal: - currentLocalTime, shift = self.determineTimeShift() - startTime = startTime + shift - currentTime = currentTime + shift - self.debug_print("shift = %s shifted start = %s current = %s" % - (shift/3600, startTime, currentTime), 1) - hour = startTime.hour - prevDay = False - prevDay, partOfDay = self._getPartOfDay(hour, resolution) -# if prevDay: -# startTime = startTime - 24*3600 - todayFlag = currentTime.day == startTime.day - if todayFlag: - if partOfDay.lower().find("midnight")>0: todayWord = "tonight" - else: todayWord = "this" - weekday = todayWord - else: - weekday = labels["Weekday"][startTime.weekday()] - if partOfDay.find("") >= 0: - result = partOfDay.replace('', weekday) - else: - result = weekday + " " + partOfDay - self.debug_print("Result = '%s'" % (result), 1) - return result - - def _getPartOfDay(self, hour, resolution): - prevDay = False - if resolution == 3: - if hour < 3: - prevDay = True - partOfDay = "early morning" -# partOfDay = "after midnight" - elif hour < 6: - partOfDay = "early morning" - elif hour < 9: - partOfDay = "morning" - elif hour < 12: - partOfDay = "late morning" - elif hour < 15: - partOfDay = "early afternoon" - elif hour < 18: - partOfDay = "late afternoon" - elif hour < 21: - partOfDay = "early evening" - else: - partOfDay = "late evening" - else: - if hour < 6: - prevDay = True -# partOfDay = "after midnight" - partOfDay = "early morning" - elif hour < 12: partOfDay = "morning" - elif hour < 18: partOfDay = "afternoon" - else: partOfDay = "evening" - return prevDay, partOfDay - - ############################################################### - ### Storm Information and TCP related methods - - # These variables were previously all set to None - def _initializeStormInformation(self): - self._stormType = None - self._stormName = None - self._advisoryType = None - self._advisoryNumber = None - self._stormNumber = None # This is an 8-digit string like "AL092016" - self._stormID = None # This is a 2-digit string embedded in the storm number ("09" in "AL092016") - - if self._useTestTCP(): - self._TCP = self._testTCP() - elif "Enter PIL below" in self._StormInfo: - if len(self._StormInfo_entry.strip()) == 0: - return "You need to enter the PIL" - else: - # Ensure PIL is in UPPERCASE - self._TCP = self.getPreviousProduct(self._StormInfo_entry.strip().upper()) - else: - self._TCP = self.getPreviousProduct(self._StormInfo) - - self._parseTCP(self._TCP) - - return None - - def _parseTCP(self, tcp): - # This pattern will handle multiple word names - # (including certain special characters). - # This is for the NHC format. - mndSearch = re.search("(?im)^.*?(?PHURRICANE|" + - "(POTENTIAL|SUB|POST.?)" + - "?TROPICAL (STORM|DEPRESSION|CYCLONE)|" + - "(SUPER )?TYPHOON|REMNANTS OF) " + - "(?P[A-Z0-9\-\(\) ]+?)" + - "(?PSPECIAL |INTERMEDIATE )" + - "?ADVISORY NUMBER[ ]+" + - "(?P[A-Z0-9]+)[ ]*", tcp) - - if mndSearch is not None: - self._stormType = mndSearch.group("stormType").strip() - self._stormName = mndSearch.group("stormName").strip() - advisoryType = mndSearch.group("advisoryType") - if advisoryType is not None: - self._advisoryType = advisoryType.strip() - self._advisoryNumber = mndSearch.group("advisoryNumber").strip() - - senderSearch = re.search("(?im)^(?P(NWS (National |Central Pacific )?Hurricane Center|" + - "National Weather Service).*?)$", tcp) - - if senderSearch is not None: - sender = senderSearch.group("sender") - senderParts = sender.split(" ") - # If the storm number is mentioned, it will be the last "word" of the line - stormNumber = senderParts[-1] - if len(stormNumber) == 8 and \ - stormNumber[0:2].isalpha() and \ - stormNumber[2:].isdigit(): - self._stormNumber = stormNumber.strip() - self._stormID = stormNumber[2:4] - - def _getStormTypeFromTCP(self): - return self._stormType - - def _getStormNameFromTCP(self): - return self._stormName - - def _getAdvisoryTypeFromTCP(self): - return self._advisoryType - - def _getAdvisoryNumberStringFromTCP(self): - return self._advisoryNumber - - def _getStormNumberStringFromTCP(self): - return self._stormNumber - - def _getStormIDStringFromTCP(self): - return self._stormID - - ## Used for testing and debugging - def _useTestTCP(self): - #return True - return False - - def _testTCP(self): - return \ -"""337 -WTNT34 KNHC 250256 -TCPAT4 - -BULLETIN -TROPICAL STORM ISAAC ADVISORY NUMBER 16 -NWS NATIONAL HURRICANE CENTER MIAMI FL AL092012 -1100 PM EDT FRI AUG 24 2012 - -...ISAAC GETTING BETTER ORGANIZED AS IT MOVES NORTHWESTWARD TOWARD -HAITI... - - -SUMMARY OF 1100 PM EDT...0300 UTC...INFORMATION ------------------------------------------------ -LOCATION...17.7N 72.5W -ABOUT 65 MI...100 KM SSW OF PORT AU PRINCE HAITI -ABOUT 245 MI...395 KM SE OF GUANTANAMO CUBA -MAXIMUM SUSTAINED WINDS...70 MPH...110 KM/H -PRESENT MOVEMENT...NW OR 310 DEGREES AT 14 MPH...22 KM/H -MINIMUM CENTRAL PRESSURE...990 MB...29.23 INCHES - - -WATCHES AND WARNINGS --------------------- -CHANGES WITH THIS ADVISORY... - -A HURRICANE WATCH AND A TROPICAL STORM WARNING HAVE BEEN ISSUED FOR -ALL OF THE FLORIDA KEYS...INCLUDING FLORIDA BAY...AND FOR THE COAST -OF THE SOUTHERN FLORIDA PENINSULA FROM OCEAN REEF ON THE EAST COAST -WESTWARD TO BONITA BEACH ON THE WEST COAST. - -A TROPICAL STORM WARNING HAS BEEN ISSUED FOR THE SOUTHEAST FLORIDA -COAST FROM NORTH OF OCEAN REEF NORTHWARD TO JUPITER INLET...AND FOR -LAKE OKEECHOBEE. - -THE GOVERNMENT OF THE BAHAMAS HAS ISSUED A TROPICAL STORM WARNING -FOR ALL OF THE NORTHWESTERN BAHAMAS. - -A TROPICAL STORM WATCH HAS BEEN ISSUED FOR THE EAST-CENTRAL FLORIDA -COAST FROM NORTH OF JUPITER INLET TO SEBASTIAN INLET. - -THE CAYMAN ISLANDS METEOROLOGICAL SERVICE HAS ISSUED A TROPICAL -STORM WATCH FOR THE CAYMAN ISLANDS. - -SUMMARY OF WATCHES AND WARNINGS IN EFFECT... - -A HURRICANE WATCH IS IN EFFECT FOR... -* HAITI -* FLORIDA KEYS INCLUDING THE DRY TORTUGAS -* FLORIDA BAY -* THE FLORIDA EAST COAST FROM OCEAN REEF SOUTHWARD -* THE FLORIDA WEST COAST FROM BONITA BEACH SOUTHWARD - -A TROPICAL STORM WARNING IS IN EFFECT FOR... -* DOMINICAN REPUBLIC -* HAITI -* CUBAN PROVINCES OF CIEGO DE AVILA...SANCTI SPIRITUS...VILLA -CLARA...CAMAGUEY...LAS TUNAS...GRANMA...HOLGUIN...SANTIAGO DE -CUBA...AND GUANTANAMO -* THE BAHAMAS -* TURKS AND CAICOS ISLANDS -* THE FLORIDA KEYS INCLUDING THE DRY TORTUGAS -* THE FLORIDA EAST COAST FROM JUPITER INLET SOUTHWARD -* THE FLORIDA WEST COAST FROM BONITA BEACH SOUTHWARD -* FLORIDA BAY AND LAKE OKEECHOBEE - -A TROPICAL STORM WATCH IS IN EFFECT FOR... -* CUBAN PROVINCES OF MATANZAS AND CIENFUEGOS -* JAMAICA -* THE FLORIDA EAST COAST NORTH OF JUPITER INLET TO SEBASTIAN INLET - -A HURRICANE WATCH MEANS THAT HURRICANE CONDITIONS ARE POSSIBLE -WITHIN THE WATCH AREA...IN THIS CASE WITHIN THE NEXT 24 TO 36 HOURS. - -A TROPICAL STORM WARNING MEANS THAT TROPICAL STORM CONDITIONS ARE -EXPECTED SOMEWHERE WITHIN THE WARNING AREA WITHIN 36 HOURS. - -A TROPICAL STORM WATCH MEANS THAT TROPICAL STORM CONDITIONS ARE -POSSIBLE WITHIN THE WATCH AREA...GENERALLY WITHIN 48 HOURS. - -INTERESTS IN THE REMAINDER OF CUBA AND THE REMAINDER OF THE FLORIDA -PENINSULA SHOULD MONITOR THE PROGRESS OF ISAAC. - -FOR STORM INFORMATION SPECIFIC TO YOUR AREA IN THE UNITED -STATES...INCLUDING POSSIBLE INLAND WATCHES AND WARNINGS...PLEASE -MONITOR PRODUCTS ISSUED BY YOUR LOCAL NATIONAL WEATHER SERVICE -FORECAST OFFICE. FOR STORM INFORMATION SPECIFIC TO YOUR AREA OUTSIDE -THE UNITED STATES...PLEASE MONITOR PRODUCTS ISSUED BY YOUR NATIONAL -METEOROLOGICAL SERVICE. - - -DISCUSSION AND 48-HOUR OUTLOOK ------------------------------- -AT 1100 PM EDT...0300 UTC...THE CENTER OF TROPICAL STORM ISAAC WAS -LOCATED NEAR LATITUDE 17.7 NORTH...LONGITUDE 72.5 WEST. ISAAC IS -MOVING TOWARD THE NORTHWEST NEAR 14 MPH...22 KM/H...BUT IS EXPECTED -TO RESUME A FASTER FORWARD SPEED TOWARD THE NORTHWEST TONIGHT -THROUGH SUNDAY. ON THE FORECAST TRACK...THE CENTER OF ISAAC SHOULD -MAKE LANDFALL IN HAITI TONIGHT...MOVE NEAR OR OVER SOUTHEASTERN -CUBA ON SATURDAY...MOVE NEAR OR OVER CENTRAL CUBA SATURDAY NIGHT... -AND APPROACH THE FLORIDA KEYS ON SUNDAY. - -MAXIMUM SUSTAINED WINDS ARE NEAR 70 MPH...110 KM/H...WITH HIGHER -GUSTS. LITTLE CHANGE IN STRENGTH IS LIKELY BEFORE LANDFALL... -FOLLOWED BY SOME WEAKENING AS THE CENTER CROSSES HAITI AND -SOUTHEASTERN CUBA. - -TROPICAL-STORM-FORCE WINDS EXTEND OUTWARD UP TO 230 MILES... -370 KM...MAINLY NORTHWEST AND NORTHEAST OF THE CENTER. - -ESTIMATED MINIMUM CENTRAL PRESSURE IS 990 MB...29.23 INCHES. - - -HAZARDS AFFECTING LAND ----------------------- -RAINFALL...TOTAL RAINFALL ACCUMULATIONS OF 8 TO 12 INCHES...WITH -MAXIMUM AMOUNTS OF 20 INCHES...ARE POSSIBLE OVER HISPANIOLA. THESE -RAINS COULD CAUSE LIFE-THREATENING FLASH FLOODS AND MUD SLIDES. -TOTAL RAIN ACCUMULATIONS OF 4 TO 8 INCHES...WITH MAXIMUM AMOUNTS OF -12 INCHES...ARE POSSIBLE ACROSS JAMAICA...THE CENTRAL AND EASTERN -PORTIONS OF CUBA...THE FLORIDA KEYS AND THE SOUTHERN PENINSULA OF -FLORIDA. TOTAL RAIN ACCUMULATIONS OF 2 TO 4 INCHES ARE POSSIBLE -OVER THE CENTRAL AND SOUTHEASTERN BAHAMAS. - -WIND...TROPICAL STORM CONDITIONS ARE SPREADING OVER PORTIONS OF THE -DOMINICAN REPUBLIC AND HAITI...WITH HURRICANE CONDITIONS POSSIBLE IN -HAITI. TROPICAL STORM CONDITIONS ARE EXPECTED OVER THE SOUTHEASTERN -BAHAMAS AND THE TURKS AND CAICOS ISLANDS TONIGHT...ARE EXPECTED -OVER THE CENTRAL BAHAMAS BY SATURDAY OR SATURDAY NIGHT...AND ARE -EXPECTED OVER THE NORTHWESTERN BAHAMAS BY SUNDAY. TROPICAL STORM -CONDITIONS ARE EXPECTED OVER EASTERN CUBA BY TONIGHT AND OVER -CENTRAL CUBA BY SATURDAY OR SATURDAY NIGHT. TROPICAL STORM -CONDITIONS ARE EXPECTED TO REACH NORTHWESTERN CUBA AND THE -NORTHWESTERN BAHAMAS BY SATURDAY NIGHT OR SUNDAY...AND SOUTH -FLORIDA AND THE FLORIDA KEYS ON SUNDAY. HURRICANE CONDITIONS ARE -POSSIBLE OVER THE FLORIDA KEYS...FLORIDA BAY...AND THE SOUTHERNMOST -FLORIDA PENINSULA BY SUNDAY EVENING. - -STORM SURGE...THE COMBINATION OF A STORM SURGE AND THE TIDE WILL -CAUSE NORMALLY DRY AREAS NEAR THE COAST TO BE FLOODED BY RISING -WATERS. THE WATER COULD REACH THE FOLLOWING DEPTHS ABOVE GROUND -IF THE PEAK SURGE OCCURS AT THE TIME OF HIGH TIDE... - -SOUTH FLORIDA INCLUDING THE FLORIDA KEYS...2 TO 4 FT -HISPANIOLA AND EASTERN CUBA...1 TO 3 FT -THE BAHAMAS AND TURKS AND CAICOS...1 TO 3 FT - -THE DEEPEST WATER WILL OCCUR ALONG THE IMMEDIATE COAST IN AREAS OF -ONSHORE FLOW. SURGE-RELATED FLOODING DEPENDS ON THE RELATIVE TIMING -OF THE SURGE AND THE TIDAL CYCLE...AND CAN VARY GREATLY OVER SHORT -DISTANCES. FOR INFORMATION SPECIFIC TO YOUR AREA...PLEASE SEE -PRODUCTS ISSUED BY YOUR LOCAL WEATHER SERVICE OFFICE. NEAR THE -COAST...THE SURGE WILL BE ACCOMPANIED BY DANGEROUS WAVES. - -SURF...DANGEROUS SURF AND RIP CURRENT CONDITIONS WILL AFFECT PUERTO -RICO...HISPANIOLA...THE BAHAMAS...THE TURKS AND CAICOS...EASTERN -AND CENTRAL CUBA...AND THE EAST COAST OF FLORIDA AND THE FLORIDA -KEYS DURING THE NEXT COUPLE OF DAYS. PLEASE CONSULT PRODUCTS FROM -YOUR LOCAL WEATHER OFFICE FOR MORE INFORMATION. - - -NEXT ADVISORY -------------- -NEXT INTERMEDIATE ADVISORY...200 AM EDT. -NEXT COMPLETE ADVISORY...500 AM EDT. - -$$ -FORECASTER STEWART""" - - ############################################################### - ### Advisory related methods - - def _initializeAdvisories(self): - self._currentAdvisory = dict() - self._currentAdvisory['ZoneData'] = dict() - self._loadLastTwoAdvisories() - - def _synchronizeAdvisories(self): - # Retrieving a directory causes synching to occur. - # This code can throw an exception but don't catch it - # so that forecasters can be made aware of the issue. - file = LocalizationSupport.getLocalizationFile(LocalizationSupport.CAVE_STATIC, - LocalizationSupport.SITE, self._site, - self._getAdvisoryPath()).getFile() - - return file - - def _getLocalAdvisoryDirectoryPath(self): - file = self._synchronizeAdvisories() - path = file.getPath() - - try: - os.makedirs(path) - except OSError as exception: - if exception.errno != errno.EEXIST: - raise - - return path - - def _getStormAdvisoryNames(self): - advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath() - filenames = os.listdir(advisoryDirectoryPath) - allAdvisories = filter(lambda filename: filename[-5:] == ".json", filenames) - - self.debug_print("allAdvisories = %s" % (self._pp.pformat(allAdvisories))) - - stormAdvisories = filter(lambda filename: self._getStormNumberStringFromTCP() in filename, - allAdvisories) - stormAdvisories = map(lambda filename: filename[:-5], stormAdvisories) - self.debug_print("stormAdvisories = %s" % (self._pp.pformat(stormAdvisories))) - - return stormAdvisories - - def _loadLastTwoAdvisories(self): - stormAdvisories = self._getStormAdvisoryNames() - - # We need to reverse the order of the advisories so the latest - # advisories come first in this list - stormAdvisories.sort(reverse=True) - - lastTwoAdvisories = [] - - # Get the current advisory number string from the TCP - curAdvisoryString = self._getAdvisoryNumberStringFromTCP() - - if self._awipsWANPil.find("TCV") != -1: - for advisory in stormAdvisories: - if not advisory.endswith(curAdvisoryString): - # Different advisory - keep it - lastTwoAdvisories.append(advisory) - - else: # Must be the HLS - lastTwoAdvisories = stormAdvisories[:2] - if len(lastTwoAdvisories) > 0: - self._previousAdvisoryMatchesNumber = lastTwoAdvisories[0].endswith(\ - curAdvisoryString) - - self.debug_print("DEBUG: last two advisories = %s" % - (self._pp.pformat(lastTwoAdvisories)), 1) - self._previousAdvisory = None - if len(lastTwoAdvisories) >= 1: - self._previousAdvisory = self._loadAdvisory(lastTwoAdvisories[0]) - - self._previousPreviousAdvisory = None - if len(lastTwoAdvisories) >= 2: - self._previousPreviousAdvisory = self._loadAdvisory(lastTwoAdvisories[1]) - - def _loadAdvisory(self, advisoryName): - self._synchronizeAdvisories() - fileName = self._getAdvisoryFilename(advisoryName) - - try: - pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC, - self._site, - fileName) - - self.debug_print("File contents for %s:" % (fileName), 1) - self.debug_print(self._pp.pformat(pythonDict), 1) - - # Only use transmitted advisories - if pythonDict["Transmitted"] == False and advisoryName != "pending": - return None - else: - return pythonDict - except Exception, e: - self.debug_print("Load Exception for %s : %s" % (fileName, e), 1) - return None - - def _getAdvisoryPath(self): - gfeMode = CAVEMode.getMode().name() - if gfeMode == "PRACTICE": - return os.path.join("gfe", "tcvAdvisories", "practice") - else: - return os.path.join("gfe", "tcvAdvisories") - - def _getAdvisoryFilename(self, advisoryName): - advisoryFilename = os.path.join(self._getAdvisoryPath(), - advisoryName+".json") - return advisoryFilename - - ############################################################### - ### GUI related methods - - def _processVariableList(self, definition, parent): - # Get Definition variables - for key in definition.keys(): - exec "self._" + key + "= definition[key]" - - # Overview GUI - while True: - overviewDict = self._displayGUI() - if overviewDict is None: - return None - break - - # Consolidate information from GUI's - varDict = overviewDict - return varDict - - def _GUI_sizing_dict(self): - # This contains values that adjust the GUI sizing. - return { - "GUI_height_limit": 900, # limit to GUI height in canvas pixels - "charSize": 9, - } - - def _GUI1_configDict(self): - return { - # Order and inclusion of GUI1 buttons - # Each entry is (name of button in GUI code, desired label on GUI) - "buttonList":[ - ("Run","Run"), - ("Cancel","Cancel"), - ], - } - - def _font_GUI_dict(self): - return { - "headers": ("blue", ("Helvetica", 14, "bold")), - "instructions": (None, ("Helvetica", 12, "italic")), - } - - -import Tkinter -class Common_Dialog(Dialog): - def __init__(self, parent, title, infoDict=None): - self._status = "Cancel" # exception, or user-cancels - self._tkObject_dict = {} # place to store reference to tk objects - self._varDict = {} # all end results must be saved here - self._infoDict = infoDict - self._parent = parent - self._pp = pprint.PrettyPrinter() - Dialog.__init__(self, parent=None, title=title) - - def getVarDict(self): - return self._varDict - - def _makeRadioOrCheckList(self, master, label, elementList, default=None, - buttonSide=Tkinter.TOP, frameSide=Tkinter.LEFT, entryField=None, - headerFG=None, headerFont=None, boxType="radio", - listFrameRelief=Tkinter.GROOVE): - listFrame = Tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) - - if label != "": - listLabel = Tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) - listLabel.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10) - - ivar = Tkinter.IntVar() - defaultIndex = 0 - ivarList = [] - for element in elementList: - index = elementList.index(element) - if type(element) is types.TupleType: - element, key = element - if boxType== "radio": - button = Tkinter.Radiobutton(listFrame, variable=ivar, text=element, value=index) - else: - ivar = Tkinter.IntVar() - if default is not None and element in default: ivar.set(1) - else: ivar.set(0) - button= Tkinter.Checkbutton(listFrame, variable=ivar, text=element) - ivarList.append(ivar) - button.pack(side=buttonSide, anchor=Tkinter.W, expand=Tkinter.YES, padx=4) - # Look for default - if element == default: - defaultIndex = index - - entryObject = None - if entryField is not None: - entryObject = self._makeEntry(listFrame, entryField) - # packing - listFrame.pack(side=frameSide, expand=Tkinter.NO, fill=Tkinter.Y) #, anchor=Tkinter.N) - #listFrame.pack(side=frameSide, expand=Tkinter.YES, fill=Tkinter.Y, anchor=Tkinter.N) - - if boxType == "radio": - ivar.set(defaultIndex) # set the default - if boxType == "check": - ivar = ivarList - return ivar, entryObject - - def _makeEntry(self, frame, text, width=20): - label = Tkinter.Label(frame, text=text) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - entry = Tkinter.Entry(frame, relief=Tkinter.SUNKEN, width=width) - entry.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - return entry - - def cancelCB(self): - self._status = "Cancel" - self.cancel() - - def _entryName(self, name): - return name+"_entry" - - def _makeTuple(self,str): - str = re.sub('(?im)[^_a-z]', '', str) - return (str+":",str) - - def _setVarDict(self, key, value, options=None): - if options is not None: - value = options[value] - if type(value) is types.TupleType: - value = value[1] - self._varDict[self._makeTuple(key)] = value - - def status(self): - return self._status - - def buttonbox(self): - # override the existing ok/cancel button box, removing it. - # we do this so that we can attach our own hooks into the functions. - pass - - -######################################################### -# The following defintions are from TextProductCommon. # -# This is just bringing over the minimum amount needed. # -######################################################### -import DiscretePhrases -class TextProductCommon(DiscretePhrases.DiscretePhrases): - def __init__(self): - DiscretePhrases.DiscretePhrases.__init__(self) - - def setUp(self, areaDict): - self._areaDictionary = areaDict - - def hazardTimeZones(self, areaList): - ''' - Returns list of time zones for the starting time - and list of time zones for the ending time. - - The areaList provides a complete list of areas for this headline. - startT, endT are the hazard times. - ''' - - # get this time zone - thisTimeZone = os.environ.get('TZ') - if thisTimeZone is None: - thisTimeZone = 'GMT' - - zoneList = [] - areaDict = self._areaDictionary - - # check to see if we have any areas outside our time zone - for areaName in areaList: - if areaName in areaDict.keys(): - entry = areaDict[areaName] - if not entry.has_key('ugcTimeZone'): #add your site id - if thisTimeZone not in zoneList: - zoneList.append(thisTimeZone) - continue # skip it - timeZoneList = entry['ugcTimeZone'] - if type(timeZoneList) is not types.ListType: # a single value - timeZoneList = [str(timeZoneList)] # make it into a list - for timeZone in timeZoneList: - if timeZone not in zoneList: - zoneList.append(timeZone) - - # if the resulting zoneList is empty, put in our time zone - if len(zoneList) == 0: - zoneList.append(thisTimeZone) - - # if the resulting zoneList has our time zone in it, be sure it - # is the first one in the list - try: - index = zoneList.index(thisTimeZone) - if index != 0: - del zoneList[index] - zoneList.insert(0, thisTimeZone) - except: - pass - - return zoneList - - def getExpireTime(self, issueTime, purgeHours, vtecRecords, roundMinutes=15, - fixedExpire=0): - ''' - Given the issuance time, purgeHours, and the vtecRecords (with times converted to ms), - returns the appropriate expiration time. - - Expiration time is the earliest of the specified expiration time, 1 hr if a CAN code - is detected, or the ending time of ongoing events (CON, EXT, EXB, NEW). - The issueTime and expireTime are ints in milliseconds. - - @param issueTime in ms - @param purgeHours -- set time past issuance time. - The default for this is set by policy e.g. an FFA expires by default - in 8 hours. However, if there is a hazard end time earlier, then that - is used. - if -1, then hazard end time is to be used - @param vtecRecords in the segment with times converted to ms - @param roundMinutes - @param fixedExpire -- indicates to ignore the VTEC actions when computing the - expiration time - - ''' - if purgeHours > 0: - expireTime = issueTime + purgeHours * 3600 * 1000 - else: - expireTime = None - # Pick the earliest end time of the vtecRecords in the segment - for vtecRecord in vtecRecords: - if expireTime is None or vtecRecord.get('endTime') < expireTime: - expireTime = vtecRecord.get('endTime') - - if not fixedExpire: - canExpFound = 0 - activeFound = 0 - laterActive = None #later end time of all active events - for vtecRecord in vtecRecords: - action = vtecRecord.get('act') - if action in ['CAN','EXP']: - canExpFound = 1 - elif action in ['NEW','CON','EXT','EXB','EXA']: - activeFound = 1 - endTime = vtecRecord.get('endTime') - if endTime != 0: - if laterActive is not None: - laterActive = max(laterActive, endTime) - else: - laterActive = endTime - if laterActive is not None: - expireTime = min(expireTime, laterActive) - elif canExpFound and not activeFound: - expireTime = min(expireTime, issueTime+3600*1000) #1hr from now - - #ensure expireTime is not before issueTime, and is at least 1 hour - if expireTime - issueTime < 3600*1000: - expireTime = issueTime + 3600*1000 - - #round to next 'roundMinutes' - roundValue = roundMinutes*60*1000 #in milliseconds - delta = expireTime % roundValue # in milliseconds - baseTime = int(expireTime/roundValue)*roundValue - if delta/60*1000 >= 1: #add the next increment - expireTime = baseTime + roundValue - else: #within 1 minute, don't add the next increment - expireTime = baseTime - - return expireTime - - def getHeadlinesAndSections(self, vtecRecords, productID, issueTime): - ''' - Order vtec records and create the sections for the segment - - @param vtecRecords: vtecRecords for a segment - @param metaDataList: list of (metaData, hazardEvent) for the segment - @param productID: product ID e.g. FFA, CWF, etc. - @param issueTime: in seconds so that it compares to the vtec records - ''' - sections = [] - headlines = [] - headlineStr = '' - hList = copy.deepcopy(vtecRecords) - if len(hList): - if productID in ['CWF','NSH','OFF','GLF']: - hList.sort(self.marineSortHazardAlg) - else: - hList.sort(self.regularSortHazardAlg) - - while len(hList) > 0: - vtecRecord = hList[0] - - # Can't make phrases with vtecRecords with no 'hdln' entry - if vtecRecord['hdln'] == '': - hList.remove(vtecRecord) - continue - - # make sure the vtecRecord is still in effect or within EXP critiera - if (vtecRecord['act'] != 'EXP' and issueTime >= vtecRecord['endTime']) or \ - (vtecRecord['act'] == 'EXP' and issueTime > 30*60 + vtecRecord['endTime']): - hList.remove(vtecRecord) - continue # no headline for expired vtecRecords - - #assemble the vtecRecord type - hazStr = vtecRecord['hdln'] - headlines.append(hazStr) - #hazStr = self.convertToLower(hazStr) - - # if the vtecRecord is a convective watch, tack on the etn - phenSig = vtecRecord['phen'] + '.' + vtecRecord['sig'] - if phenSig in ['TO.A', 'SV.A']: - hazStr = hazStr + ' ' + str(vtecRecord['etn']) - - # add on the action - actionWords = self.actionControlWord(vtecRecord, issueTime) - hazStr = hazStr + ' ' + actionWords - - if len(hazStr): - # Call user hook - localStr = self.hazard_hook( - None, None, vtecRecord['phen'], vtecRecord['sig'], vtecRecord['act'], - vtecRecord['startTime'], vtecRecord['endTime']) # May need to add leading space if non-null - headlineStr = headlineStr + '...' + hazStr + localStr + '...\n' - - # always remove the main vtecRecord from the list - hList.remove(vtecRecord) - - return headlineStr, headlines - - def formatUGCs(self, ugcs, expireTime): - ''' - Create ugc header with expire time - Examples: - 'COC123-112330-' - 'FLZ066>068-071-072-063-069-073>075-168-172>174-070-230515-' - ''' - ugcStr = self.makeUGCString(ugcs) - ddhhmmTime = self.getFormattedTime( - expireTime/1000, '%d%H%M', shiftToLocal=0, stripLeading=0).upper() - ugcStr = ugcStr + '-' + ddhhmmTime + '-' - return ugcStr - - def getFormattedTime(self, time_secs, format='%I%M %p %Z %a %b %d %Y', - shiftToLocal=1, upperCase=0, stripLeading=1): - ''' - Return a text string of the given time in seconds in the given format - This method is used for product headers. - ''' - if time_secs == 0: - time_secs = time.time() - if shiftToLocal == 1: - curTime = time.localtime(time_secs) - else: - curTime = time.gmtime(time_secs) - localTime = time.localtime(time_secs) - zoneName = time.strftime('%Z',localTime) - timeStr = time.strftime(format, curTime) - if shiftToLocal == 0: - timeStr = string.replace(timeStr, zoneName, 'GMT') - if stripLeading==1 and (timeStr[0] == '0' or timeStr[0] == ' '): - timeStr = timeStr[1:] - if upperCase == 1: - timeStr = string.upper(timeStr) - timeStr = string.replace(timeStr, ' ', ' ') - return timeStr - - def formatUGC_names(self, ugcs, alphabetize=False, separator='-'): - ''' - For example: Saunders-Douglas-Sarpy-Lancaster-Cass-Otoe- - ''' - nameList = [] - for ugc in ugcs: - entry = self._areaDictionary.get(ugc) - nameList.append(entry.get('ugcName', ugc)) - if alphabetize: - nameList.sort() - return self.formatNameString(nameList, separator) - - def formatNameString(self, nameList, separator, state=None): - nameString = '' - for name in nameList: - nameString+= name + separator - if state: - nameString = nameString.rstrip(separator) + ' ('+state+') ' - return nameString - - def getVal(self, dictionary, key, default=None, altDict=None): - ''' - Convenience method to access dictionary keys and account for :skip and :editable suffixes - - @param dictionary - @param key, potentially without a suffix e.g. 'info' - @return the key value accounting for suffixes e.g. 'info:skip' - ''' - for dictKey in [key, key+':skip', key+':editable']: - if dictionary.get(dictKey): - return dictionary.get(dictKey) - if altDict and altDict.get(dictKey): - return altDict.get(dictKey) - return default - - def formatDatetime(self, dt, format='ISO', timeZone=None): - ''' - @param dt: datetime object - @param format: format string e.g. '%H%M %p %Z %a %e %b %Y' - @param zone: time zone e.g.'CST7CDT'. If None use UTC - @return datetime formatted with time zone e.g. '1400 PM CST Mon 12 Feb 2011' - ''' - import datetime - from dateutil import tz - # TODO REMOVE THIS BLOCK AS PART OF THE JSON REFACTOR. - if type(dt) is float: - dt = datetime.fromtimestamp(dt / 1000) - - from_zone = tz.tzutc() - new_time = dt.replace(tzinfo=from_zone) - if timeZone is not None: - to_zone = tz.gettz(timeZone) - new_time = new_time.astimezone(to_zone) - if format == 'ISO': - return new_time.isoformat() - else: - return new_time.strftime(format) - - def flush(self): - ''' Flush the print buffer ''' - os.sys.__stdout__.flush() - - def makeUGCString(self, ugcs): - ''' - Create the UGC string for product / segment headers. - - Examples: - FLZ173- - FLZ066>068-071-072-063-069-073>075-168-172>174-070- - ''' - # if nothing in the list, return empty string - if len(ugcs) == 0: - return '' - ugcList = copy.deepcopy(ugcs) - # Remove any blank UGC lines from the list - listsize=len(ugcList) - j=0 - while j < listsize: - if ugcList[j] == '': - del ugcList[j] - j=j+1 - - # Set up state variables and process initialize ugcStr with first ugc - # in ugcList - inSeq = 0 - ugcStr = ugcList[0] - curState = ugcStr[0:3] - lastNum = int(ugcList[0][3:]) - firstNum = 0 - lastUgc = ugcList[0] - - # By initializing properly we don't need the first item - ugcList.remove(ugcList[0]) - - for ugc in ugcList: - ugcState = ugc[:3] - ugcNumStr = ugc[3:] - num = int(ugcNumStr) - if ugcState == curState: - if num == lastNum + 1: - if inSeq > 0: - # Replace the last ugcNumStr in sequence with the - # current ugcNumStr - # e.g. 062>063 becomes 062>064 - ugcStr = ugcStr[:len(ugcStr)-3] + ugcNumStr - inSeq += 1 - else: - ugcStr += '>' + ugcNumStr - inSeq = 1 - else: # num != lastNum + 1 - ugcStr = self.checkLastArrow(inSeq, ugcStr) - inSeq = 0 # reset sequence when number not in sequence - ugcStr += '-' + ugcNumStr - else: - ugcStr = self.checkLastArrow(inSeq, ugcStr) - ugcStr += '-' + ugc - curState = ugcState - inSeq = 0 #reset sequence when switching states - lastNum = num - lastUgc = ugc - - # May have to clean up last arrow at the end - ugcStr = self.checkLastArrow(inSeq, ugcStr) - return ugcStr - - def checkLastArrow(self, inSeq, ugcStr): - ''' - Part of formatUGCs - ''' - if inSeq == 1: - # Change the last arrow to - since - # we only had 2 in the sequence e.g. - # 062>063 should be 062-063 - arrowIndex = ugcStr.rfind('>') - if arrowIndex >= 0: - ugcStr = ugcStr[:arrowIndex] + '-' + ugcStr[arrowIndex+1:] - return ugcStr - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# GFE Training Guide->GFE Text Products User Guide section of the GFE Online +# Help for guidance on creating a new text product. +## + +# Version 2017.10.04-0 + +import GenericHazards +import JsonSupport +import LocalizationSupport +import string, time, os, errno, re, types, copy, collections +import LogStream, ModuleAccessor, SampleAnalysis, EditAreaUtils +import math +import pprint + +from AbsTime import * +from StartupDialog import IFPDialog as Dialog +from LockingFile import File + +from com.raytheon.viz.core.mode import CAVEMode + +AWIPS_ENVIRON = "AWIPS2" + +class TextProduct(GenericHazards.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + self._pp = pprint.PrettyPrinter() + + ############################################################### + ### Hazards and Additional Hazards + ### allowedHazards is used for VTEC records and summary + ### headlines + ### allowedHeadlines are additional hazards reported in + ### certain sections + ############################################################### + + ############################################################### + ### Initialization + ############################################################### + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + ############################################################### + + ############################################################### + ### Product Parts Implementation + ############################################################### + + ############################################################### + ### Product Dictionary methods for creating, populating and + ### formatting the product dictionary + ############################################################### + + ############################################################### + ### Sampling and Statistics related methods + ############################################################### + + ############################################################### + ### Area, Zone and Segment related methods + ############################################################### + + ############################################################### + ### Hazards related methods + ############################################################### + + ############################################################### + ### Time related methods + ############################################################### + + ############################################################### + ### Storm Information and TCP related methods + ############################################################### + + ############################################################### + ### Advisory related methods + ############################################################### + + ############################################################### + ### GUI related methods + ############################################################### + + + ############################################################### + ### Hazards and Additional Hazards + + def allowedHazards(self): + tropicalActions = ["NEW", "EXA", "CAN", "CON"] + return [ + ('HU.W',tropicalActions,'Hurricane'), + ('HU.A',tropicalActions,'Hurricane'), + ('SS.W',tropicalActions,'Surge'), + ('SS.A',tropicalActions,'Surge'), + ('TR.W',tropicalActions,'Tropical'), + ('TR.A',tropicalActions,'Tropical'), + ] + + def allowedHeadlines(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH + ('FA.A', allActions, 'Flood'), # FLOOD WATCH + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ] + + ############################################################### + ### Initialization + + def _initializeVariables(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + self._argDict = argDict + self._productID = self._pil[0:3].upper() + + argDict["definition"] = self._definition + + self._initializeTimeVariables(argDict) + + self._initializeHazardsTable(argDict) + + error = self._initializeStormInformation() + if error is not None: + return error + + # Set up the areaDictionary for all to use + accessor = ModuleAccessor.ModuleAccessor() + self._areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + + self._tpc = TextProductCommon() + self._tpc.setUp(self._areaDict) + + return None + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + + def moderated_dict(self, parmHisto, timeRange, componentName): + """ + Specifies the lower percentages and upper percentages of + data to be thrown out for moderated stats. + """ + # COMMENT: This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. + + # Get Baseline thresholds + dict = SampleAnalysis.SampleAnalysis.moderated_dict( + self, parmHisto, timeRange, componentName) + + # Change thresholds + dict["Wind"] = (0, 15) + dict["WindGust"] = (0, 15) + dict["pws34int"] = (0, 5) + dict["pws64int"] = (0, 5) + dict["pwsD34"] = (0, 5) + dict["pwsN34"] = (0, 5) + dict["pwsD64"] = (0, 5) + dict["pwsN64"] = (0, 5) + dict["InundationMax"] = (0, 3) + dict["InundationTiming"] = (0, 3) + return dict + + ############################################################### + ### Product Parts Implementation + + ################# Product Level + + def _wmoHeader(self, productDict, productSegmentGroup, arguments=None): + headerDict = collections.OrderedDict() + headerDict['TTAAii'] = self._wmoID + headerDict['originatingOffice'] = self._fullStationID + headerDict['productID'] = self._productID + headerDict['siteID'] = self._site + headerDict['fullStationID'] = self._fullStationID + headerDict['ddhhmmTime'] = self._ddhhmmTime + productDict['wmoHeader'] = headerDict + + def _productHeader(self, productDict, productSegmentGroup, arguments=None): + headerDict = dict() + headerDict['disclaimer'] = 'This XML wrapped text product should be considered COMPLETELY EXPERIMENTAL. The National Weather Service currently makes NO GUARANTEE WHATSOEVER that this product will continue to be supplied without interruption. The format of this product MAY CHANGE AT ANY TIME without notice.' + headerDict['cityState'] = self._wfoCityState + headerDict['stormNumber'] = self._getStormNumberStringFromTCP() + # Modify the product name to indicate test or experimental mode if necessary + self._productName = self.checkTestMode( + self._argDict, productSegmentGroup.get('productName') + self._areaName) + headerDict['productName'] = self._productName + headerDict['stormType'] = self._getStormTypeFromTCP() + headerDict['stormName'] = self._getStormNameFromTCP() + headerDict['advisoryType'] = self._getAdvisoryTypeFromTCP() + headerDict['advisoryNumber'] = self._getAdvisoryNumberStringFromTCP() + headerDict['issuedByString'] = self.getIssuedByString() + headerDict['issuanceTimeDate'] = self._timeLabel + productDict['productHeader'] = headerDict + + ################# Mixed Level + + def _ugcHeader(self, productDict, productSegmentGroup, productSegment): + # The UGC header is the formatted list of UGCs along with an expire time + # For example: 'FLZ066>068-071-072-063-069-073>075-168-172>174-070-230515-' + ugcHeader = self._tpc.formatUGCs(self._ugcs, self._expireTime) + productDict['ugcHeader'] = ugcHeader + + ################# Product Parts Processing + + def _processProductParts(self, productGenerator, productDict, productSegmentGroup, productParts): + ''' + @param productDict + @param productSegmentGroup + @param productParts + @return product dictionary created from the product parts + + Note that this method is called recursively such that a product part is allowed to be + a set of subParts specified as follows: + (subPartLabel, list of productParts for each subPart) + For example, we have + ('segments', [list of [segment product parts]]) + + # Product Dictionary + # Contains information for all formats e.g. + # partner XML, CAP, and Legacy text + ''' + + + if type(productParts) is dict: + arguments = productParts.get('arguments') + partsList = productParts.get('partsList') + else: + partsList = productParts + + removedParts = [] + for part in partsList: + if type(part) is tuple: + # e.g. subPart == 'segments', subPartsLists == list of parts for each segment + subPart, subPartsLists = part + subParts = [] + for subPartsList in subPartsLists: + subDict = collections.OrderedDict() + self._processProductParts(productGenerator, subDict, productSegmentGroup, subPartsList) + subParts.append(subDict) + # e.g. productDict['segments'] = segment dictionaries + productDict[subPart] = subParts + else: + if part not in self._noOpParts(): + execString = 'productGenerator._'+part+'(productDict, productSegmentGroup, arguments)' + exec(execString) + if part not in productDict: + removedParts.append(part) + + for part in removedParts: + self.debug_print("in _processProductParts - " + + "Removing product part = %s" % (part), 1) + partsList.remove(part) + + ############################################################### + ### Product Dictionary methods for creating, populating and + ### formatting the product dictionary + + def _createProductDictionary(self, productPartsGenerator, segments, areProductPartsSegmented): + # Create the product dictionary + productSegmentGroup = self._groupSegments(productPartsGenerator, + segments, + areProductPartsSegmented) + + productDict = self._initializeProductDictionary(productSegmentGroup) + productParts = productSegmentGroup.get('productParts') + productDict['productParts'] = productParts + self._processProductParts(self, productDict, productSegmentGroup, productParts) + + return productDict + + def _initializeProductDictionary(self, productSegmentGroup): + ''' + Set up the Product Dictionary for the given Product consisting of a + group of segments. + + Fill in the dictionary information for the product header. + + @param productSegmentGroup: holds meta information about the product + @return initialized product dictionary + + *********** + Example segmented product: + + WGUS63 KBOU 080400 + FFABOU + + URGENT - IMMEDIATE BROADCAST REQUESTED + FLOOD WATCH + NATIONAL WEATHER SERVICE DENVER CO + 400 AM GMT TUE FEB 8 2011 + + Overview Headline + Overview + + *********** + Example non-segmented product: + WGUS63 KBOU 080400 + FFWBOU + + ''' + if self._areaName != '': + self._areaName = ' for ' + self._areaName + '\n' + + # Fill in product dictionary information + productDict = collections.OrderedDict() + productDict['productID'] = self._productID + return productDict + + def _formatProductDictionary(self, formatterClass, productDict): + formatter = formatterClass(self) + product = formatter.execute(productDict) + + return product + + ############################################################### + ### Sampling and Statistics related methods + + def _getStatValue(self, statDict, element, method=None, dataType=None): + self.debug_print("In _getStatValue looking for '%s'" % (element), 1) + self.debug_print("method = %s" % (pprint.pformat(method)), 1) + self.debug_print("dataType = %s" % (pprint.pformat(dataType)), 1) + + stats = statDict.get(element, None) + self.debug_print("stats = %s" % (pprint.pformat(stats)), 1) + + if stats is None: return None + if type(stats) is list: + stats = stats[0] + stats, tr = stats + if dataType==self.VECTOR(): + stats, dir = stats + + value = self.getValue(stats, method) + self.debug_print("value = %s" % (pprint.pformat(value)), 1) + return value + + # Define a class to handle missing statistics + class StatisticsException(Exception): + pass + + ############################################################### + ### Area, Zone and Segment related methods + + def _allAreas(self): + return self._inlandAreas() + self._coastalAreas() + + def _groupSegments(self, productPartsGenerator, segments, areProductPartsSegmented): + ''' + Group the segments into the products. The TCV and HLS product generators + only create a single product each so there is only one product segment group. + ''' + + segment_vtecRecords_tuples = self._getSegmentVTECRecordsTuples(segments) + + productSegmentGroup = { + 'productID' : self._productID, + 'productName': self._productName, + 'geoType': 'area', + 'vtecEngine': self._hazardsTable, + 'mapType': 'publicZones', + 'segmented': areProductPartsSegmented, + 'productParts': productPartsGenerator(segment_vtecRecords_tuples), + } + + return productSegmentGroup + + def _getSegmentVTECRecordsTuples(self, segments): + segment_vtecRecords_tuples = [] + for segment in segments: + vtecRecords = self._getVtecRecords(segment) + self.debug_print("vtecRecords for %s =\n\n%s\n" % (segment, self._pp.pformat(vtecRecords))) + segment_vtecRecords_tuples.append((segment, vtecRecords)) + + return segment_vtecRecords_tuples + + def _computeIntersectAreas(self, editAreas, argDict): + editAreaUtils = EditAreaUtils.EditAreaUtils() + editAreaUtils.setUp(None, argDict) + surgeEditArea = editAreaUtils.getEditArea("StormSurgeWW_EditArea", argDict) + intersectAreas = [] + for (_, editAreaLabel) in editAreas: + editArea = editAreaUtils.getEditArea(editAreaLabel, argDict) + intersectAreaLabel = "intersect_"+editAreaLabel + intersectArea = editAreaUtils.intersectAreas(intersectAreaLabel, editArea, surgeEditArea) + grid = intersectArea.getGrid() + if grid.isAnyBitsSet(): # Make sure the intersection isn't empty + editAreaUtils.saveEditAreas([intersectArea]) # Register the new edit area with the system + intersectAreas.append((intersectAreaLabel, intersectAreaLabel)) + + return intersectAreas + + ############################################################### + ### Hazards related methods + + def _initializeHazardsTable(self, argDict): + import VTECMessageType + vtecMode = VTECMessageType.getVTECMessageType(self._productID) + argDict["vtecMode"] = vtecMode + + self._setVTECActiveTable(argDict) + + # Need to check hazards against all edit areas in the CWA MAOR + argDict["combinations"]= [(self._allAreas(),"Region1")] + + self._hazardsTable = self._getHazardsTable(argDict, self.filterMethod) + argDict["hazards"] = self._hazardsTable + + def _getHazardsTable(self, argDict, filterMethod): + # Set up edit areas as list of lists + dfEditAreas = argDict["combinations"] + editAreas = [] + for area, label in dfEditAreas: + if type(area) is list: + editAreas.append(area) + elif type(area) is tuple: #LatLon + editAreas.append([self.__getLatLonAreaName(area)]) + else: + editAreas.append([area]) + # Get Product ID and other info for HazardsTable + stationID4 = self._fullStationID + productCategory = self._productID + definition = argDict['definition'] + sampleThreshold = definition.get("hazardSamplingThreshold", (10, None)) + # Process the hazards + accurateCities = definition.get('accurateCities', 0) + import HazardsTable + hazards = HazardsTable.HazardsTable( + argDict["ifpClient"], editAreas, productCategory, filterMethod, + argDict["databaseID"], + stationID4, argDict["vtecActiveTable"], argDict["vtecMode"], sampleThreshold, + creationTime=argDict["creationTime"], accurateCities=accurateCities, + cityEditAreas=[], dataMgr=argDict['dataMgr']) + return hazards + + def _ignoreActions(self): + # Ignore hazards with these action codes in the overview headlines + # NOTE: the VTEC and segments will still include them correctly. + return ['CAN', 'UPG'] + + def _setVTECActiveTable(self, argDict): + gfeMode = CAVEMode.getMode().name() + + self.debug_print("*" *100, 1) + self.debug_print("gfeMode = '%s'" % (gfeMode), 1) + self.debug_print("*" *100, 1) + + if gfeMode == "PRACTICE": + argDict["vtecActiveTable"] = "PRACTICE" + else: + argDict["vtecActiveTable"] = "active" + + def _getVtecRecords(self, segment): + vtecRecords = self._hazardsTable.getHazardList(segment) + # Tropical hazards shouldn't ever have EXT and EXB actions since + # they are "until further notice" + for record in vtecRecords: + if record['act'] == "EXT": + record['act'] = "CON" + elif record['act'] == "EXB": + record['act'] = "EXA" + + return vtecRecords + + def _getAllowedHazardList(self, allowedHazardList=None): + # Get the list of allowed phenSigs (ie. "HU.W") + if allowedHazardList is None: + allowedHazardList = self.allowedHazards() + hazardList = [] + for h in allowedHazardList: + if type(h) is tuple: + hazardList.append(h[0]) + else: + hazardList.append(h) + return hazardList + + def _altFilterMethod(self, hazardTable, allowedHazardsOnly=False): + # Remove hazards not in allowedHeadlines list + allowedHazardList = self._getAllowedHazardList(self.allowedHeadlines()) + return self._filterHazards(hazardTable, allowedHazardList, + allowedHazardsOnly) + + def _filterHazards(self, hazardTable, allowedHazardList, + allowedHazardsOnly=False): + newTable = [] + hazStr = "" + for i in range(len(hazardTable)): + if hazardTable[i]['sig'] != "": # VTEC + hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] + else: #non-VTEC + hazStr = hazardTable[i]['phen'] + + if hazStr in allowedHazardList: + newTable.append(hazardTable[i]) + if allowedHazardsOnly: + return newTable + # get a raw list of unique edit areas + zoneList = [] + for t in newTable: + if t['id'] not in zoneList: + zoneList.append(t['id']) + for zone in zoneList: + # Remove lower priority hazards of the same type + self.filterZoneHazards(zone, newTable) + return newTable + + def _getAdditionalHazards(self): + argDict = self._argDict + argDict['definition'] = self._definition + altHazards = self._getHazardsTable(argDict, self._altFilterMethod) + conTable = altHazards.consolidatedTableByID() + + # Consolidate across action codes + hazDict = {} + for hazard in conTable: + hdln=hazard['hdln'] + phen=hazard['phen'] + sig=hazard['sig'] + act=hazard['act'] + if act in self._ignoreActions(): + continue + for area in hazard['id']: + hazDict.setdefault((hdln, phen, sig), []).append(area) + + self.debug_print("hazDict = %s" % (self._pp.pformat(hazDict)), 1) + hazardHdlns=[] + huAreas = [] + self.debug_print("Additional Hazard Headlines", 1) + for key in list(hazDict.keys()): + hdln, phen, sig = key + huAreas = huAreas + hazDict[key] + hazardHdln = ((hdln, "NEW", phen,sig), hazDict[key], [],[],[]) + self.debug_print(" %s" % (self._pp.pformat(hazardHdln)), 1) + self.debug_print(" %s" % (self._pp.pformat(hazDict[key])), 1) + hazardHdlns.append(hazardHdln) + return hazardHdlns, huAreas + + def _checkHazard(self, hazardHdlns, phenSigList, checkAreaTypes=None, + checkAreas=None, returnList=False, mode="any", includeCAN=False): + # Given a list of hazards in the form + # (key, landList, marineList, coastalList, inlandList) + # where key is (hdln, act, phen, sig) and the lists show which areas + # contain the hazard + # If mode == "any": + # Check to see if any of the given phenSigList = [(phen, sig), (phen, sig)] + # are found + # If mode == "all": + # Check to see if all of the given phenSigList are found + # IF checkAreaTypes is given, then check against that particular area type(s) i.e. + # "land", "marine", etc. + # IF checkAreas is given, only return areas that are in that list + # IF returnList=True, returns a list of (key, areas) that meet the criteria + # IF includeCAN is True then CAN hazards will be included as well. + # Otherwise, they are ignored. + # + # E.g. hdlnList = self._checkHazard(hazardHdlns, [("FA","W")], returnList=True) + self.debug_print("_checkHazard hazardHdlns is %s" % (self._pp.pformat(hazardHdlns)), 1) + self.debug_print("_checkHazard phenSigList is %s" % (self._pp.pformat(phenSigList)), 1) + chosen = [] + for key, landList, marineList, coastalList, inlandList in hazardHdlns: + + # We do not want to consider marine hazards in this product + hazAreas = landList + hazValue = (key, hazAreas) + self.debug_print("hazValue is %s" % (repr(hazValue)), 1) + hdln, act, phen, sig = key + if not includeCAN and act == "CAN": + continue + for checkPhen, checkSig in phenSigList: + self.debug_print("checkPhen is %s" % (checkPhen), 1) + self.debug_print("checkSig is %s" % (checkSig), 1) + if phen == checkPhen and sig == checkSig: + if checkAreaTypes is not None: + # Check for land, marine, etc. + for checkAreaType in checkAreaTypes: + exec("testList = " + checkAreaType + "List") + self.debug_print("testList is %s" % (testList), 1) + if testList != []: + chosen.append(hazValue) + elif checkAreas is not None: + acceptedAreas=[] + for hazArea in hazAreas: + if hazArea in checkAreas: + acceptedAreas.append(hazArea) + if acceptedAreas!=[]: + chosen.append((key, acceptedAreas)) + else: + chosen.append(hazValue) + if not returnList and chosen!=[]: break + + self.debug_print("In _checkHazard chosen = %s" % + (self._pp.pformat(chosen)), 1) + if not returnList: + return chosen!=[] + return chosen + + ############################################################### + ### Time related methods + + def _initializeTimeVariables(self, argDict): + argDict['creationTime'] = int(time.time()/60)*60 + self._issueTime_secs = argDict['creationTime'] + self._issueTime_ms = self._issueTime_secs * 1000 # in milliseconds + + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._purgeHours = self._purgeTime + self._expireTime = self._issueTime_secs + self._purgeHours*3600 + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + def _determineTimeRanges(self, argDict): + # Create a 120 hour time range starting from the issuanceHour and broken into 1-hour chunks + # Used for Wind Section + startTime1Hour = self._calculateStartTime(self._issueTime_secs, resolution=1) + self._timeRange1Hour = self.makeTimeRange(startTime1Hour, startTime1Hour+120*3600) + self._timeRangeList1Hour = self._createTimeRangeList(self._timeRange1Hour, hours=1) + + # Create a 120 hour time range starting from the issuanceHour and broken into 3-hour chunks + # Used for Flooding Rain and Tornado Sections + startTime3Hour = self._calculateStartTime(self._issueTime_secs, resolution=3) + timeRange3Hour = self.makeTimeRange(startTime3Hour, startTime3Hour+120*3600) + self._timeRangeList3Hour = self._createTimeRangeList(timeRange3Hour, hours=3) + + # Create a time range to look from the current time back 12 hours. + # We will use this to determine if we need to use "additional" + # wording with rainfall for the TCV + self._extraSampleTimeRange = self.makeTimeRange(startTime3Hour-12*3600, startTime3Hour) + + # Create a 120 hour time range starting from the issuanceHour and broken into 6-hour chunks + # Used for Storm Surge Section + startTime6Hour = self._calculateStartTime(self._issueTime_secs, resolution=6) + timeRange6Hour = self.makeTimeRange(startTime6Hour, startTime6Hour+120*3600) + self._timeRangeList6Hour = self._createTimeRangeList(timeRange6Hour, hours=6) + # Create a list of 10 periods (GMT time) corresponding to 6AM and 6PM times (local time) + self._createPeriodList(startTime1Hour) + + def _createTimeRangeList(self, timeRange, hours): + subRanges = self.divideRange(timeRange, hours) + trList = [] + for index, tr in enumerate(subRanges): + self.debug_print("In _createTimeRangeList (%s hour chunks) -> tr = %s" % + (hours, self._pp.pformat(tr)), 1) + trList.append((tr, "Label")) + + return trList + + def _createPeriodList(self, startTime): + # Create the 10 periods + self._periodList = [] + + localtime = time.localtime(startTime.unixTime()) + + # Determine the number of hours to the next 6AM or 6PM period + if localtime.tm_hour < 6: + periodLength = 6 - localtime.tm_hour + elif localtime.tm_hour >= 6 and localtime.tm_hour < 18: + periodLength = 18 - localtime.tm_hour + else: + periodLength = 30 - localtime.tm_hour + + # Don't allow the first period to be less than 3 hours long; + # instead just start with the next period + if periodLength < 3: + periodStart = startTime + periodLength*3600 + period = self.makeTimeRange(periodStart, periodStart+12*3600) + else: + period = self.makeTimeRange(startTime, startTime+periodLength*3600) + + self._periodList.append(period) + + for i in range(1,10): + startTime = period.endTime() # Start where the last period leaves off + period = self.makeTimeRange(startTime, startTime+12*3600) + self._periodList.append(period) + + self.debug_print("final periodList =\n\n%s\n" % + (self._pp.pformat(self._periodList)), 1) + + def _calculateStartTime(self, localCreationTime, resolution): + resolution = resolution*3600 + + self.debug_print("In _calculateStartTime incoming res %d localCreationTime = %d" % + (resolution, localCreationTime), 1) + + # Determine how far along we are in a given time block + adjust = localCreationTime % resolution + + # If we are less than halfway though a block we would want. + # If this is for surge (6 hour resolution aka 21600 seconds) and we + # aren't within 30 minutes (aka 1800 seconds) of the next block, go + # back to the start of the current block + if adjust < resolution / 2 or (resolution == 21600 and (resolution - adjust) > 1800): + adjust *= -1 # to move back to beginning of the block + else: + adjust = resolution - adjust # go to next block + + self.debug_print("In _calculateStartTime %d adjust = %d" % + (resolution, adjust), 1) + + startTime = AbsTime(localCreationTime + adjust) + + return startTime + + def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False, + resolution=3): + # Format period (a timeRange) resulting in + # DAY + MORNING / AFTERNOON / EVENING / OVERNIGHT. + # If wholePeriod, format FROM ... TO... + + self.debug_print("Format period wholePeriod = %s, period = %s, useEndTime =%s" % + (str(wholePeriod), str(period), str(useEndTime)), 1) + if period is None: + return "" + if useEndTime: + startTime = period.endTime() + else: + startTime = period.startTime() + result = self._getTimeDesc(startTime, resolution, shiftToLocal) + self.debug_print("_getTimeDesc result = '%s'" % (result), 1) + if wholePeriod: + endResult = self._getTimeDesc(period.endTime(), resolution, shiftToLocal) + self.debug_print("_getTimeDesc endResult = '%s'" % (endResult), 1) + if result != endResult: + result=result + " TO "+ endResult + return result + + def _getTimeDesc(self, startTime, resolution=3, shiftToLocal=True): + # Create phrase such as Tuesday morning + # Handle today/tonight and "this" morning/afternoon/etc.. + # + self.debug_print("\n\n**************Formatting Period for GMT startTime %s" % + (repr(startTime)), 1) + labels = self.Labels()["SimpleWorded"] + currentTime = self._timeRange1Hour.startTime() + self.debug_print(" currentTime = %s" % (repr(currentTime)), 1) + if shiftToLocal: + currentLocalTime, shift = self.determineTimeShift() + startTime = startTime + shift + currentTime = currentTime + shift + self.debug_print("shift = %s shifted start = %s current = %s" % + (shift/3600, startTime, currentTime), 1) + hour = startTime.hour + prevDay = False + prevDay, partOfDay = self._getPartOfDay(hour, resolution) +# if prevDay: +# startTime = startTime - 24*3600 + todayFlag = currentTime.day == startTime.day + if todayFlag: + if partOfDay.lower().find("midnight")>0: todayWord = "tonight" + else: todayWord = "this" + weekday = todayWord + else: + weekday = labels["Weekday"][startTime.weekday()] + if partOfDay.find("") >= 0: + result = partOfDay.replace('', weekday) + else: + result = weekday + " " + partOfDay + self.debug_print("Result = '%s'" % (result), 1) + return result + + def _getPartOfDay(self, hour, resolution): + prevDay = False + if resolution == 3: + if hour < 3: + prevDay = True + partOfDay = "early morning" +# partOfDay = "after midnight" + elif hour < 6: + partOfDay = "early morning" + elif hour < 9: + partOfDay = "morning" + elif hour < 12: + partOfDay = "late morning" + elif hour < 15: + partOfDay = "early afternoon" + elif hour < 18: + partOfDay = "late afternoon" + elif hour < 21: + partOfDay = "early evening" + else: + partOfDay = "late evening" + else: + if hour < 6: + prevDay = True +# partOfDay = "after midnight" + partOfDay = "early morning" + elif hour < 12: partOfDay = "morning" + elif hour < 18: partOfDay = "afternoon" + else: partOfDay = "evening" + return prevDay, partOfDay + + ############################################################### + ### Storm Information and TCP related methods + + # These variables were previously all set to None + def _initializeStormInformation(self): + self._stormType = None + self._stormName = None + self._advisoryType = None + self._advisoryNumber = None + self._stormNumber = None # This is an 8-digit string like "AL092016" + self._stormID = None # This is a 2-digit string embedded in the storm number ("09" in "AL092016") + + if self._useTestTCP(): + self._TCP = self._testTCP() + elif "Enter PIL below" in self._StormInfo: + if len(self._StormInfo_entry.strip()) == 0: + return "You need to enter the PIL" + else: + # Ensure PIL is in UPPERCASE + self._TCP = self.getPreviousProduct(self._StormInfo_entry.strip().upper()) + else: + self._TCP = self.getPreviousProduct(self._StormInfo) + + self._parseTCP(self._TCP) + + return None + + def _parseTCP(self, tcp): + # This pattern will handle multiple word names + # (including certain special characters). + # This is for the NHC format. + mndSearch = re.search("(?im)^.*?(?PHURRICANE|" + + "(POTENTIAL|SUB|POST.?)" + + "?TROPICAL (STORM|DEPRESSION|CYCLONE)|" + + "(SUPER )?TYPHOON|REMNANTS OF) " + + "(?P[A-Z0-9\-\(\) ]+?)" + + "(?PSPECIAL |INTERMEDIATE )" + + "?ADVISORY NUMBER[ ]+" + + "(?P[A-Z0-9]+)[ ]*", tcp) + + if mndSearch is not None: + self._stormType = mndSearch.group("stormType").strip() + self._stormName = mndSearch.group("stormName").strip() + advisoryType = mndSearch.group("advisoryType") + if advisoryType is not None: + self._advisoryType = advisoryType.strip() + self._advisoryNumber = mndSearch.group("advisoryNumber").strip() + + senderSearch = re.search("(?im)^(?P(NWS (National |Central Pacific )?Hurricane Center|" + + "National Weather Service).*?)$", tcp) + + if senderSearch is not None: + sender = senderSearch.group("sender") + senderParts = sender.split(" ") + # If the storm number is mentioned, it will be the last "word" of the line + stormNumber = senderParts[-1] + if len(stormNumber) == 8 and \ + stormNumber[0:2].isalpha() and \ + stormNumber[2:].isdigit(): + self._stormNumber = stormNumber.strip() + self._stormID = stormNumber[2:4] + + def _getStormTypeFromTCP(self): + return self._stormType + + def _getStormNameFromTCP(self): + return self._stormName + + def _getAdvisoryTypeFromTCP(self): + return self._advisoryType + + def _getAdvisoryNumberStringFromTCP(self): + return self._advisoryNumber + + def _getStormNumberStringFromTCP(self): + return self._stormNumber + + def _getStormIDStringFromTCP(self): + return self._stormID + + ## Used for testing and debugging + def _useTestTCP(self): + #return True + return False + + def _testTCP(self): + return \ +"""337 +WTNT34 KNHC 250256 +TCPAT4 + +BULLETIN +TROPICAL STORM ISAAC ADVISORY NUMBER 16 +NWS NATIONAL HURRICANE CENTER MIAMI FL AL092012 +1100 PM EDT FRI AUG 24 2012 + +...ISAAC GETTING BETTER ORGANIZED AS IT MOVES NORTHWESTWARD TOWARD +HAITI... + + +SUMMARY OF 1100 PM EDT...0300 UTC...INFORMATION +----------------------------------------------- +LOCATION...17.7N 72.5W +ABOUT 65 MI...100 KM SSW OF PORT AU PRINCE HAITI +ABOUT 245 MI...395 KM SE OF GUANTANAMO CUBA +MAXIMUM SUSTAINED WINDS...70 MPH...110 KM/H +PRESENT MOVEMENT...NW OR 310 DEGREES AT 14 MPH...22 KM/H +MINIMUM CENTRAL PRESSURE...990 MB...29.23 INCHES + + +WATCHES AND WARNINGS +-------------------- +CHANGES WITH THIS ADVISORY... + +A HURRICANE WATCH AND A TROPICAL STORM WARNING HAVE BEEN ISSUED FOR +ALL OF THE FLORIDA KEYS...INCLUDING FLORIDA BAY...AND FOR THE COAST +OF THE SOUTHERN FLORIDA PENINSULA FROM OCEAN REEF ON THE EAST COAST +WESTWARD TO BONITA BEACH ON THE WEST COAST. + +A TROPICAL STORM WARNING HAS BEEN ISSUED FOR THE SOUTHEAST FLORIDA +COAST FROM NORTH OF OCEAN REEF NORTHWARD TO JUPITER INLET...AND FOR +LAKE OKEECHOBEE. + +THE GOVERNMENT OF THE BAHAMAS HAS ISSUED A TROPICAL STORM WARNING +FOR ALL OF THE NORTHWESTERN BAHAMAS. + +A TROPICAL STORM WATCH HAS BEEN ISSUED FOR THE EAST-CENTRAL FLORIDA +COAST FROM NORTH OF JUPITER INLET TO SEBASTIAN INLET. + +THE CAYMAN ISLANDS METEOROLOGICAL SERVICE HAS ISSUED A TROPICAL +STORM WATCH FOR THE CAYMAN ISLANDS. + +SUMMARY OF WATCHES AND WARNINGS IN EFFECT... + +A HURRICANE WATCH IS IN EFFECT FOR... +* HAITI +* FLORIDA KEYS INCLUDING THE DRY TORTUGAS +* FLORIDA BAY +* THE FLORIDA EAST COAST FROM OCEAN REEF SOUTHWARD +* THE FLORIDA WEST COAST FROM BONITA BEACH SOUTHWARD + +A TROPICAL STORM WARNING IS IN EFFECT FOR... +* DOMINICAN REPUBLIC +* HAITI +* CUBAN PROVINCES OF CIEGO DE AVILA...SANCTI SPIRITUS...VILLA +CLARA...CAMAGUEY...LAS TUNAS...GRANMA...HOLGUIN...SANTIAGO DE +CUBA...AND GUANTANAMO +* THE BAHAMAS +* TURKS AND CAICOS ISLANDS +* THE FLORIDA KEYS INCLUDING THE DRY TORTUGAS +* THE FLORIDA EAST COAST FROM JUPITER INLET SOUTHWARD +* THE FLORIDA WEST COAST FROM BONITA BEACH SOUTHWARD +* FLORIDA BAY AND LAKE OKEECHOBEE + +A TROPICAL STORM WATCH IS IN EFFECT FOR... +* CUBAN PROVINCES OF MATANZAS AND CIENFUEGOS +* JAMAICA +* THE FLORIDA EAST COAST NORTH OF JUPITER INLET TO SEBASTIAN INLET + +A HURRICANE WATCH MEANS THAT HURRICANE CONDITIONS ARE POSSIBLE +WITHIN THE WATCH AREA...IN THIS CASE WITHIN THE NEXT 24 TO 36 HOURS. + +A TROPICAL STORM WARNING MEANS THAT TROPICAL STORM CONDITIONS ARE +EXPECTED SOMEWHERE WITHIN THE WARNING AREA WITHIN 36 HOURS. + +A TROPICAL STORM WATCH MEANS THAT TROPICAL STORM CONDITIONS ARE +POSSIBLE WITHIN THE WATCH AREA...GENERALLY WITHIN 48 HOURS. + +INTERESTS IN THE REMAINDER OF CUBA AND THE REMAINDER OF THE FLORIDA +PENINSULA SHOULD MONITOR THE PROGRESS OF ISAAC. + +FOR STORM INFORMATION SPECIFIC TO YOUR AREA IN THE UNITED +STATES...INCLUDING POSSIBLE INLAND WATCHES AND WARNINGS...PLEASE +MONITOR PRODUCTS ISSUED BY YOUR LOCAL NATIONAL WEATHER SERVICE +FORECAST OFFICE. FOR STORM INFORMATION SPECIFIC TO YOUR AREA OUTSIDE +THE UNITED STATES...PLEASE MONITOR PRODUCTS ISSUED BY YOUR NATIONAL +METEOROLOGICAL SERVICE. + + +DISCUSSION AND 48-HOUR OUTLOOK +------------------------------ +AT 1100 PM EDT...0300 UTC...THE CENTER OF TROPICAL STORM ISAAC WAS +LOCATED NEAR LATITUDE 17.7 NORTH...LONGITUDE 72.5 WEST. ISAAC IS +MOVING TOWARD THE NORTHWEST NEAR 14 MPH...22 KM/H...BUT IS EXPECTED +TO RESUME A FASTER FORWARD SPEED TOWARD THE NORTHWEST TONIGHT +THROUGH SUNDAY. ON THE FORECAST TRACK...THE CENTER OF ISAAC SHOULD +MAKE LANDFALL IN HAITI TONIGHT...MOVE NEAR OR OVER SOUTHEASTERN +CUBA ON SATURDAY...MOVE NEAR OR OVER CENTRAL CUBA SATURDAY NIGHT... +AND APPROACH THE FLORIDA KEYS ON SUNDAY. + +MAXIMUM SUSTAINED WINDS ARE NEAR 70 MPH...110 KM/H...WITH HIGHER +GUSTS. LITTLE CHANGE IN STRENGTH IS LIKELY BEFORE LANDFALL... +FOLLOWED BY SOME WEAKENING AS THE CENTER CROSSES HAITI AND +SOUTHEASTERN CUBA. + +TROPICAL-STORM-FORCE WINDS EXTEND OUTWARD UP TO 230 MILES... +370 KM...MAINLY NORTHWEST AND NORTHEAST OF THE CENTER. + +ESTIMATED MINIMUM CENTRAL PRESSURE IS 990 MB...29.23 INCHES. + + +HAZARDS AFFECTING LAND +---------------------- +RAINFALL...TOTAL RAINFALL ACCUMULATIONS OF 8 TO 12 INCHES...WITH +MAXIMUM AMOUNTS OF 20 INCHES...ARE POSSIBLE OVER HISPANIOLA. THESE +RAINS COULD CAUSE LIFE-THREATENING FLASH FLOODS AND MUD SLIDES. +TOTAL RAIN ACCUMULATIONS OF 4 TO 8 INCHES...WITH MAXIMUM AMOUNTS OF +12 INCHES...ARE POSSIBLE ACROSS JAMAICA...THE CENTRAL AND EASTERN +PORTIONS OF CUBA...THE FLORIDA KEYS AND THE SOUTHERN PENINSULA OF +FLORIDA. TOTAL RAIN ACCUMULATIONS OF 2 TO 4 INCHES ARE POSSIBLE +OVER THE CENTRAL AND SOUTHEASTERN BAHAMAS. + +WIND...TROPICAL STORM CONDITIONS ARE SPREADING OVER PORTIONS OF THE +DOMINICAN REPUBLIC AND HAITI...WITH HURRICANE CONDITIONS POSSIBLE IN +HAITI. TROPICAL STORM CONDITIONS ARE EXPECTED OVER THE SOUTHEASTERN +BAHAMAS AND THE TURKS AND CAICOS ISLANDS TONIGHT...ARE EXPECTED +OVER THE CENTRAL BAHAMAS BY SATURDAY OR SATURDAY NIGHT...AND ARE +EXPECTED OVER THE NORTHWESTERN BAHAMAS BY SUNDAY. TROPICAL STORM +CONDITIONS ARE EXPECTED OVER EASTERN CUBA BY TONIGHT AND OVER +CENTRAL CUBA BY SATURDAY OR SATURDAY NIGHT. TROPICAL STORM +CONDITIONS ARE EXPECTED TO REACH NORTHWESTERN CUBA AND THE +NORTHWESTERN BAHAMAS BY SATURDAY NIGHT OR SUNDAY...AND SOUTH +FLORIDA AND THE FLORIDA KEYS ON SUNDAY. HURRICANE CONDITIONS ARE +POSSIBLE OVER THE FLORIDA KEYS...FLORIDA BAY...AND THE SOUTHERNMOST +FLORIDA PENINSULA BY SUNDAY EVENING. + +STORM SURGE...THE COMBINATION OF A STORM SURGE AND THE TIDE WILL +CAUSE NORMALLY DRY AREAS NEAR THE COAST TO BE FLOODED BY RISING +WATERS. THE WATER COULD REACH THE FOLLOWING DEPTHS ABOVE GROUND +IF THE PEAK SURGE OCCURS AT THE TIME OF HIGH TIDE... + +SOUTH FLORIDA INCLUDING THE FLORIDA KEYS...2 TO 4 FT +HISPANIOLA AND EASTERN CUBA...1 TO 3 FT +THE BAHAMAS AND TURKS AND CAICOS...1 TO 3 FT + +THE DEEPEST WATER WILL OCCUR ALONG THE IMMEDIATE COAST IN AREAS OF +ONSHORE FLOW. SURGE-RELATED FLOODING DEPENDS ON THE RELATIVE TIMING +OF THE SURGE AND THE TIDAL CYCLE...AND CAN VARY GREATLY OVER SHORT +DISTANCES. FOR INFORMATION SPECIFIC TO YOUR AREA...PLEASE SEE +PRODUCTS ISSUED BY YOUR LOCAL WEATHER SERVICE OFFICE. NEAR THE +COAST...THE SURGE WILL BE ACCOMPANIED BY DANGEROUS WAVES. + +SURF...DANGEROUS SURF AND RIP CURRENT CONDITIONS WILL AFFECT PUERTO +RICO...HISPANIOLA...THE BAHAMAS...THE TURKS AND CAICOS...EASTERN +AND CENTRAL CUBA...AND THE EAST COAST OF FLORIDA AND THE FLORIDA +KEYS DURING THE NEXT COUPLE OF DAYS. PLEASE CONSULT PRODUCTS FROM +YOUR LOCAL WEATHER OFFICE FOR MORE INFORMATION. + + +NEXT ADVISORY +------------- +NEXT INTERMEDIATE ADVISORY...200 AM EDT. +NEXT COMPLETE ADVISORY...500 AM EDT. + +$$ +FORECASTER STEWART""" + + ############################################################### + ### Advisory related methods + + def _initializeAdvisories(self): + self._currentAdvisory = dict() + self._currentAdvisory['ZoneData'] = dict() + self._loadLastTwoAdvisories() + + def _synchronizeAdvisories(self): + # Retrieving a directory causes synching to occur. + # This code can throw an exception but don't catch it + # so that forecasters can be made aware of the issue. + file = LocalizationSupport.getLocalizationFile(LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, self._site, + self._getAdvisoryPath()).getFile() + + return file + + def _getLocalAdvisoryDirectoryPath(self): + file = self._synchronizeAdvisories() + path = file.getPath() + + try: + os.makedirs(path) + except OSError as exception: + if exception.errno != errno.EEXIST: + raise + + return path + + def _getStormAdvisoryNames(self): + advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath() + filenames = os.listdir(advisoryDirectoryPath) + allAdvisories = [filename for filename in filenames if filename[-5:] == ".json"] + + self.debug_print("allAdvisories = %s" % (self._pp.pformat(allAdvisories))) + + stormAdvisories = [filename for filename in allAdvisories if self._getStormNumberStringFromTCP() in filename] + stormAdvisories = [filename[:-5] for filename in stormAdvisories] + self.debug_print("stormAdvisories = %s" % (self._pp.pformat(stormAdvisories))) + + return stormAdvisories + + def _loadLastTwoAdvisories(self): + stormAdvisories = self._getStormAdvisoryNames() + + # We need to reverse the order of the advisories so the latest + # advisories come first in this list + stormAdvisories.sort(reverse=True) + + lastTwoAdvisories = [] + + # Get the current advisory number string from the TCP + curAdvisoryString = self._getAdvisoryNumberStringFromTCP() + + if self._awipsWANPil.find("TCV") != -1: + for advisory in stormAdvisories: + if not advisory.endswith(curAdvisoryString): + # Different advisory - keep it + lastTwoAdvisories.append(advisory) + + else: # Must be the HLS + lastTwoAdvisories = stormAdvisories[:2] + if len(lastTwoAdvisories) > 0: + self._previousAdvisoryMatchesNumber = lastTwoAdvisories[0].endswith(\ + curAdvisoryString) + + self.debug_print("DEBUG: last two advisories = %s" % + (self._pp.pformat(lastTwoAdvisories)), 1) + self._previousAdvisory = None + if len(lastTwoAdvisories) >= 1: + self._previousAdvisory = self._loadAdvisory(lastTwoAdvisories[0]) + + self._previousPreviousAdvisory = None + if len(lastTwoAdvisories) >= 2: + self._previousPreviousAdvisory = self._loadAdvisory(lastTwoAdvisories[1]) + + def _loadAdvisory(self, advisoryName): + self._synchronizeAdvisories() + fileName = self._getAdvisoryFilename(advisoryName) + + try: + pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC, + self._site, + fileName) + + self.debug_print("File contents for %s:" % (fileName), 1) + self.debug_print(self._pp.pformat(pythonDict), 1) + + # Only use transmitted advisories + if pythonDict["Transmitted"] == False and advisoryName != "pending": + return None + else: + return pythonDict + except Exception as e: + self.debug_print("Load Exception for %s : %s" % (fileName, e), 1) + return None + + def _getAdvisoryPath(self): + gfeMode = CAVEMode.getMode().name() + if gfeMode == "PRACTICE": + return os.path.join("gfe", "tcvAdvisories", "practice") + else: + return os.path.join("gfe", "tcvAdvisories") + + def _getAdvisoryFilename(self, advisoryName): + advisoryFilename = os.path.join(self._getAdvisoryPath(), + advisoryName+".json") + return advisoryFilename + + ############################################################### + ### GUI related methods + + def _processVariableList(self, definition, parent): + # Get Definition variables + for key in list(definition.keys()): + exec("self._" + key + "= definition[key]") + + # Overview GUI + while True: + overviewDict = self._displayGUI() + if overviewDict is None: + return None + break + + # Consolidate information from GUI's + varDict = overviewDict + return varDict + + def _GUI_sizing_dict(self): + # This contains values that adjust the GUI sizing. + return { + "GUI_height_limit": 900, # limit to GUI height in canvas pixels + "charSize": 9, + } + + def _GUI1_configDict(self): + return { + # Order and inclusion of GUI1 buttons + # Each entry is (name of button in GUI code, desired label on GUI) + "buttonList":[ + ("Run","Run"), + ("Cancel","Cancel"), + ], + } + + def _font_GUI_dict(self): + return { + "headers": ("blue", ("Helvetica", 14, "bold")), + "instructions": (None, ("Helvetica", 12, "italic")), + } + + +import tkinter +class Common_Dialog(Dialog): + def __init__(self, parent, title, infoDict=None): + self._status = "Cancel" # exception, or user-cancels + self._tkObject_dict = {} # place to store reference to tk objects + self._varDict = {} # all end results must be saved here + self._infoDict = infoDict + self._parent = parent + self._pp = pprint.PrettyPrinter() + Dialog.__init__(self, parent=None, title=title) + + def getVarDict(self): + return self._varDict + + def _makeRadioOrCheckList(self, master, label, elementList, default=None, + buttonSide=tkinter.TOP, frameSide=tkinter.LEFT, entryField=None, + headerFG=None, headerFont=None, boxType="radio", + listFrameRelief=tkinter.GROOVE): + listFrame = tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) + + if label != "": + listLabel = tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) + listLabel.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10) + + ivar = tkinter.IntVar() + defaultIndex = 0 + ivarList = [] + for element in elementList: + index = elementList.index(element) + if type(element) is tuple: + element, key = element + if boxType== "radio": + button = tkinter.Radiobutton(listFrame, variable=ivar, text=element, value=index) + else: + ivar = tkinter.IntVar() + if default is not None and element in default: ivar.set(1) + else: ivar.set(0) + button= tkinter.Checkbutton(listFrame, variable=ivar, text=element) + ivarList.append(ivar) + button.pack(side=buttonSide, anchor=tkinter.W, expand=tkinter.YES, padx=4) + # Look for default + if element == default: + defaultIndex = index + + entryObject = None + if entryField is not None: + entryObject = self._makeEntry(listFrame, entryField) + # packing + listFrame.pack(side=frameSide, expand=tkinter.NO, fill=tkinter.Y) #, anchor=Tkinter.N) + #listFrame.pack(side=frameSide, expand=Tkinter.YES, fill=Tkinter.Y, anchor=Tkinter.N) + + if boxType == "radio": + ivar.set(defaultIndex) # set the default + if boxType == "check": + ivar = ivarList + return ivar, entryObject + + def _makeEntry(self, frame, text, width=20): + label = tkinter.Label(frame, text=text) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + entry = tkinter.Entry(frame, relief=tkinter.SUNKEN, width=width) + entry.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + return entry + + def cancelCB(self): + self._status = "Cancel" + self.cancel() + + def _entryName(self, name): + return name+"_entry" + + def _makeTuple(self,str): + str = re.sub('(?im)[^_a-z]', '', str) + return (str+":",str) + + def _setVarDict(self, key, value, options=None): + if options is not None: + value = options[value] + if type(value) is tuple: + value = value[1] + self._varDict[self._makeTuple(key)] = value + + def status(self): + return self._status + + def buttonbox(self): + # override the existing ok/cancel button box, removing it. + # we do this so that we can attach our own hooks into the functions. + pass + + +######################################################### +# The following defintions are from TextProductCommon. # +# This is just bringing over the minimum amount needed. # +######################################################### +import DiscretePhrases +class TextProductCommon(DiscretePhrases.DiscretePhrases): + def __init__(self): + DiscretePhrases.DiscretePhrases.__init__(self) + + def setUp(self, areaDict): + self._areaDictionary = areaDict + + def hazardTimeZones(self, areaList): + ''' + Returns list of time zones for the starting time + and list of time zones for the ending time. + + The areaList provides a complete list of areas for this headline. + startT, endT are the hazard times. + ''' + + # get this time zone + thisTimeZone = os.environ.get('TZ') + if thisTimeZone is None: + thisTimeZone = 'GMT' + + zoneList = [] + areaDict = self._areaDictionary + + # check to see if we have any areas outside our time zone + for areaName in areaList: + if areaName in list(areaDict.keys()): + entry = areaDict[areaName] + if 'ugcTimeZone' not in entry: #add your site id + if thisTimeZone not in zoneList: + zoneList.append(thisTimeZone) + continue # skip it + timeZoneList = entry['ugcTimeZone'] + if type(timeZoneList) is not list: # a single value + timeZoneList = [str(timeZoneList)] # make it into a list + for timeZone in timeZoneList: + if timeZone not in zoneList: + zoneList.append(timeZone) + + # if the resulting zoneList is empty, put in our time zone + if len(zoneList) == 0: + zoneList.append(thisTimeZone) + + # if the resulting zoneList has our time zone in it, be sure it + # is the first one in the list + try: + index = zoneList.index(thisTimeZone) + if index != 0: + del zoneList[index] + zoneList.insert(0, thisTimeZone) + except: + pass + + return zoneList + + def getExpireTime(self, issueTime, purgeHours, vtecRecords, roundMinutes=15, + fixedExpire=0): + ''' + Given the issuance time, purgeHours, and the vtecRecords (with times converted to ms), + returns the appropriate expiration time. + + Expiration time is the earliest of the specified expiration time, 1 hr if a CAN code + is detected, or the ending time of ongoing events (CON, EXT, EXB, NEW). + The issueTime and expireTime are ints in milliseconds. + + @param issueTime in ms + @param purgeHours -- set time past issuance time. + The default for this is set by policy e.g. an FFA expires by default + in 8 hours. However, if there is a hazard end time earlier, then that + is used. + if -1, then hazard end time is to be used + @param vtecRecords in the segment with times converted to ms + @param roundMinutes + @param fixedExpire -- indicates to ignore the VTEC actions when computing the + expiration time + + ''' + if purgeHours > 0: + expireTime = issueTime + purgeHours * 3600 * 1000 + else: + expireTime = None + # Pick the earliest end time of the vtecRecords in the segment + for vtecRecord in vtecRecords: + if expireTime is None or vtecRecord.get('endTime') < expireTime: + expireTime = vtecRecord.get('endTime') + + if not fixedExpire: + canExpFound = 0 + activeFound = 0 + laterActive = None #later end time of all active events + for vtecRecord in vtecRecords: + action = vtecRecord.get('act') + if action in ['CAN','EXP']: + canExpFound = 1 + elif action in ['NEW','CON','EXT','EXB','EXA']: + activeFound = 1 + endTime = vtecRecord.get('endTime') + if endTime != 0: + if laterActive is not None: + laterActive = max(laterActive, endTime) + else: + laterActive = endTime + if laterActive is not None: + expireTime = min(expireTime, laterActive) + elif canExpFound and not activeFound: + expireTime = min(expireTime, issueTime+3600*1000) #1hr from now + + #ensure expireTime is not before issueTime, and is at least 1 hour + if expireTime - issueTime < 3600*1000: + expireTime = issueTime + 3600*1000 + + #round to next 'roundMinutes' + roundValue = roundMinutes*60*1000 #in milliseconds + delta = expireTime % roundValue # in milliseconds + baseTime = int(expireTime/roundValue)*roundValue + if delta/60*1000 >= 1: #add the next increment + expireTime = baseTime + roundValue + else: #within 1 minute, don't add the next increment + expireTime = baseTime + + return expireTime + + def getHeadlinesAndSections(self, vtecRecords, productID, issueTime): + ''' + Order vtec records and create the sections for the segment + + @param vtecRecords: vtecRecords for a segment + @param metaDataList: list of (metaData, hazardEvent) for the segment + @param productID: product ID e.g. FFA, CWF, etc. + @param issueTime: in seconds so that it compares to the vtec records + ''' + sections = [] + headlines = [] + headlineStr = '' + hList = copy.deepcopy(vtecRecords) + if len(hList): + if productID in ['CWF','NSH','OFF','GLF']: + hList.sort(self.marineSortHazardAlg) + else: + hList.sort(self.regularSortHazardAlg) + + while len(hList) > 0: + vtecRecord = hList[0] + + # Can't make phrases with vtecRecords with no 'hdln' entry + if vtecRecord['hdln'] == '': + hList.remove(vtecRecord) + continue + + # make sure the vtecRecord is still in effect or within EXP critiera + if (vtecRecord['act'] != 'EXP' and issueTime >= vtecRecord['endTime']) or \ + (vtecRecord['act'] == 'EXP' and issueTime > 30*60 + vtecRecord['endTime']): + hList.remove(vtecRecord) + continue # no headline for expired vtecRecords + + #assemble the vtecRecord type + hazStr = vtecRecord['hdln'] + headlines.append(hazStr) + #hazStr = self.convertToLower(hazStr) + + # if the vtecRecord is a convective watch, tack on the etn + phenSig = vtecRecord['phen'] + '.' + vtecRecord['sig'] + if phenSig in ['TO.A', 'SV.A']: + hazStr = hazStr + ' ' + str(vtecRecord['etn']) + + # add on the action + actionWords = self.actionControlWord(vtecRecord, issueTime) + hazStr = hazStr + ' ' + actionWords + + if len(hazStr): + # Call user hook + localStr = self.hazard_hook( + None, None, vtecRecord['phen'], vtecRecord['sig'], vtecRecord['act'], + vtecRecord['startTime'], vtecRecord['endTime']) # May need to add leading space if non-null + headlineStr = headlineStr + '...' + hazStr + localStr + '...\n' + + # always remove the main vtecRecord from the list + hList.remove(vtecRecord) + + return headlineStr, headlines + + def formatUGCs(self, ugcs, expireTime): + ''' + Create ugc header with expire time + Examples: + 'COC123-112330-' + 'FLZ066>068-071-072-063-069-073>075-168-172>174-070-230515-' + ''' + ugcStr = self.makeUGCString(ugcs) + ddhhmmTime = self.getFormattedTime( + expireTime/1000, '%d%H%M', shiftToLocal=0, stripLeading=0).upper() + ugcStr = ugcStr + '-' + ddhhmmTime + '-' + return ugcStr + + def getFormattedTime(self, time_secs, format='%I%M %p %Z %a %b %d %Y', + shiftToLocal=1, upperCase=0, stripLeading=1): + ''' + Return a text string of the given time in seconds in the given format + This method is used for product headers. + ''' + if time_secs == 0: + time_secs = time.time() + if shiftToLocal == 1: + curTime = time.localtime(time_secs) + else: + curTime = time.gmtime(time_secs) + localTime = time.localtime(time_secs) + zoneName = time.strftime('%Z',localTime) + timeStr = time.strftime(format, curTime) + if shiftToLocal == 0: + timeStr = string.replace(timeStr, zoneName, 'GMT') + if stripLeading==1 and (timeStr[0] == '0' or timeStr[0] == ' '): + timeStr = timeStr[1:] + if upperCase == 1: + timeStr = string.upper(timeStr) + timeStr = string.replace(timeStr, ' ', ' ') + return timeStr + + def formatUGC_names(self, ugcs, alphabetize=False, separator='-'): + ''' + For example: Saunders-Douglas-Sarpy-Lancaster-Cass-Otoe- + ''' + nameList = [] + for ugc in ugcs: + entry = self._areaDictionary.get(ugc) + nameList.append(entry.get('ugcName', ugc)) + if alphabetize: + nameList.sort() + return self.formatNameString(nameList, separator) + + def formatNameString(self, nameList, separator, state=None): + nameString = '' + for name in nameList: + nameString+= name + separator + if state: + nameString = nameString.rstrip(separator) + ' ('+state+') ' + return nameString + + def getVal(self, dictionary, key, default=None, altDict=None): + ''' + Convenience method to access dictionary keys and account for :skip and :editable suffixes + + @param dictionary + @param key, potentially without a suffix e.g. 'info' + @return the key value accounting for suffixes e.g. 'info:skip' + ''' + for dictKey in [key, key+':skip', key+':editable']: + if dictionary.get(dictKey): + return dictionary.get(dictKey) + if altDict and altDict.get(dictKey): + return altDict.get(dictKey) + return default + + def formatDatetime(self, dt, format='ISO', timeZone=None): + ''' + @param dt: datetime object + @param format: format string e.g. '%H%M %p %Z %a %e %b %Y' + @param zone: time zone e.g.'CST7CDT'. If None use UTC + @return datetime formatted with time zone e.g. '1400 PM CST Mon 12 Feb 2011' + ''' + import datetime + from dateutil import tz + # TODO REMOVE THIS BLOCK AS PART OF THE JSON REFACTOR. + if type(dt) is float: + dt = datetime.fromtimestamp(dt / 1000) + + from_zone = tz.tzutc() + new_time = dt.replace(tzinfo=from_zone) + if timeZone is not None: + to_zone = tz.gettz(timeZone) + new_time = new_time.astimezone(to_zone) + if format == 'ISO': + return new_time.isoformat() + else: + return new_time.strftime(format) + + def flush(self): + ''' Flush the print buffer ''' + os.sys.__stdout__.flush() + + def makeUGCString(self, ugcs): + ''' + Create the UGC string for product / segment headers. + + Examples: + FLZ173- + FLZ066>068-071-072-063-069-073>075-168-172>174-070- + ''' + # if nothing in the list, return empty string + if len(ugcs) == 0: + return '' + ugcList = copy.deepcopy(ugcs) + # Remove any blank UGC lines from the list + listsize=len(ugcList) + j=0 + while j < listsize: + if ugcList[j] == '': + del ugcList[j] + j=j+1 + + # Set up state variables and process initialize ugcStr with first ugc + # in ugcList + inSeq = 0 + ugcStr = ugcList[0] + curState = ugcStr[0:3] + lastNum = int(ugcList[0][3:]) + firstNum = 0 + lastUgc = ugcList[0] + + # By initializing properly we don't need the first item + ugcList.remove(ugcList[0]) + + for ugc in ugcList: + ugcState = ugc[:3] + ugcNumStr = ugc[3:] + num = int(ugcNumStr) + if ugcState == curState: + if num == lastNum + 1: + if inSeq > 0: + # Replace the last ugcNumStr in sequence with the + # current ugcNumStr + # e.g. 062>063 becomes 062>064 + ugcStr = ugcStr[:len(ugcStr)-3] + ugcNumStr + inSeq += 1 + else: + ugcStr += '>' + ugcNumStr + inSeq = 1 + else: # num != lastNum + 1 + ugcStr = self.checkLastArrow(inSeq, ugcStr) + inSeq = 0 # reset sequence when number not in sequence + ugcStr += '-' + ugcNumStr + else: + ugcStr = self.checkLastArrow(inSeq, ugcStr) + ugcStr += '-' + ugc + curState = ugcState + inSeq = 0 #reset sequence when switching states + lastNum = num + lastUgc = ugc + + # May have to clean up last arrow at the end + ugcStr = self.checkLastArrow(inSeq, ugcStr) + return ugcStr + + def checkLastArrow(self, inSeq, ugcStr): + ''' + Part of formatUGCs + ''' + if inSeq == 1: + # Change the last arrow to - since + # we only had 2 in the sequence e.g. + # 062>063 should be 062-063 + arrowIndex = ugcStr.rfind('>') + if arrowIndex >= 0: + ugcStr = ugcStr[:arrowIndex] + '-' + ugcStr[arrowIndex+1:] + return ugcStr + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HSF.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HSF.py index 3df206ca8a..388d099cd8 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HSF.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HSF.py @@ -1,2782 +1,2784 @@ -#------------------------------------------------------------------------- -# Description: HSF (High Seas Forecast) -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Version: 26 July 2016 - Received from Jeff Lewitsky -## -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ----------- ---------- ----------- -------------------------- -# 07/29/2016 - tlefebvre Changed edit area retrieval and storage to work -# outside CAVE so edit areas could be shared. -# 12/20/2017 DCS17686 tlefebvre Initial baseline version. -# -## -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# HSF.py, HSF ___Definition, HSF__Override -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "COASTAL WATERS FORECAST" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "OFFBOS" -# areaName (opt.) Area name for product header, such as "WESTERN NEW YORK" -# wfoCityState City,state that the WFO is located in, such as "BUFFALO, NY" -# -# synopsisUGC UGC code for Synopsis -# synopsisHeading Heading for Synopsis -# -# Optional Configuration Items -# -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# includeEveningPeriod Include a 6 hour Evening period on the 3rd day -# useAbbreviations -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, -# NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# areaDictionary Modify the AreaDictionary utility with UGC -# information about zones -# -# useHolidays Set to 1 to use holidays in the time period labels -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# OVERRIDES -# -# Required Overrides -# -# _Text1(), _Text2() Descriptive text for header -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) -# WaveHeight and/or WindWaveHgt -# (every 6 hours to 3 days, then every 12 hours to 7 days) -# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) -# Optional: -# WindGust (every 3 hours to 7 days) -# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# OFFPeriod (component) -# OFFPeriodMid (component) -# OFFExtended (component) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from OFF: -# _Text1 -# _Text2 -# _issuance_list -# riverBarForecast_dict -# from MarinePhrases -# inlandWatersAreas -# inlandWatersWave_element -# seasWaveHeight_element -# seasWindWave_element -# waveHeight_wind_threshold -# marine_wind_flag -# marine_wind_combining_flag -# marine_wind_verbose_flag -# from ConfigVariables -# phrase_descriptor_dict -# phrase_connector_dict -# null_nlValue_dict -# first_null_phrase_dict -# null_phrase_dict -# maximum_range_nlValue_dict -# combine_singleValues_flag_dict -# from WxPhrases: -# embedded_visibility_flag -# visibility_wx_threshold -# significant_wx_visibility_subkeys -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# from SampleAnalysis -# moderated_dict -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, re, types, cPickle, os, textwrap, sys -import TimeRange -import AbsTime -from math import * -import numpy -import copy -import UserInfo -import subprocess -import xml.etree.ElementTree as ET -import EditAreaUtilities - -import sys -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -#from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData_CoordinateType as CoordinateType - -class Node: - def __init__(self, childList, methodList): - self.childList = childList - self.methodList = methodList - self.parent = None - # Make tree bi-directional - for child in childList: - child.parent = self - # Keep track of changes made to this node - self.changeFlag = 0 - # Keep track of methods that are done - self.doneList = [] - def getIndex(self): - # If this node is a child, - # return it's index in the childList of the parent - try: - return self.parent.childList.index(self) - except: - return None - def getParent(self): - return self.parent - - def getComponent(self): - # Return this node's ancestor at the second level in the tree - prevNode = None - node = self - i = 0 - while node.getParent() is not None and i < 100: - prevNode = node - node = node.getParent() - i = i + 1 - return prevNode - - def getComponentName(self): - node = self - compName = node.get("componentName") - if compName is not None: - return compName - else: - comp = node.getComponent() - if comp is not None: - return comp.get("name") - else: - return None - - def getNext(self): - if self.parent is not None: - index = self.getIndex() - childList = self.parent.childList - if len(childList) > index+1: - return childList[index+1] - - def getPrev(self): - if self.parent is not None: - index = self.getIndex() - childList = self.parent.childList - if index > 0: - return childList[index-1] - - def set(self, member, value): - #print " Setting", member, - if hasattr(self, member): - current = getattr(self, member) - #print "current/value", current, value - if current == value: - #print " No Change" - return - setattr(self, member, value) - self.changeFlag = 1 - #print " Changed" - - def get(self, member, default=None): - if hasattr(self, member): - return getattr(self, member) - else: - return default - - def printNode(self, node, indentStr=""): - print "Node", node - print indentStr + " Methods" - for method in node.methodList: - if method in node.doneList: - done = "DONE" - else: - done = "" - print indentStr + " ", method.__name__, done - print indentStr + " Attributes" - dict = node.__dict__ - for key in dict: - if key == "methodList" or key == "doneList": - continue - print indentStr + " ", key, dict[key] - print indentStr + " Children ", len(node.childList) - for child in node.childList: - self.printNode(child, indentStr + " ") - - def insertChild(self, sibling, newChild, newFirst=0): - # Insert the newChild - # If newFirst, insert newChild before sibling, - # else afterward. - newChild.parent = self - new = [] - for child in self.childList: - if child == sibling: - if newFirst: - new.append(newChild) - new.append(child) - else: - new.append(child) - new.append(newChild) - else: - new.append(child) - self.childList = new - - def remove(self): - # Remove this node from it's parent child list - parent = self.parent - new = [] - for child in parent.childList: - if child != self: - new.append(child) - parent.childList = new - # Set the attribute for removing the child - setattr(self, "removed", 1) - - def findChild(self, attr, value): - # Find the child of this node with the given attribute - # of the given value - for child in self.childList: - if child.get(attr) == value: - return child - def getProgeny(self): - # Return a list of all progeny of this node - progeny = self.childList - for child in self.childList: - childProgeny = child.getProgeny() - if childProgeny is not None: - progeny = progeny + child.getProgeny() - return progeny - def replace(self, nodeList): - # Replace the current child node with the node list. - # If top of tree, does nothing. - childList = self.parent.childList - newList = [] - for child in childList: - if child == self: - newList = newList + nodeList - else: - newList.append(child) - self.parent.childList = newList - # Remove any children of current node - self.childList = [] - # Make this node defunct - self.doneList = self.methodList - def getTimeRange(self): - if hasattr(self, "timeRange"): - return self.timeRange - # Look for an ancestor that has a timeRange associated with it - if self.parent is not None: - return self.parent.getTimeRange() - return None - def getStatDict(self): - # Assume we are a subPhrase - if hasattr(self, "statDict"): - statDict = self.statDict - disabledElements = self.getAncestor("disabledElements") - if disabledElements is not None: - for key in statDict.keys(): - for element in self.parent.disabledElements: - if key == element: - statDict[element] = None - disabledSubkeys = self.getAncestor("disabledSubkeys") - #print "disabledSubkey", disabledSubkeys - if disabledSubkeys is not None: - disabledWxTypes = [] - for disabledSubkey in disabledSubkeys: - disabledWxTypes.append(disabledSubkey.wxType()) - for key in statDict.keys(): - if key == "Wx": - subkeys = statDict[key] - newList = [] - for subkey in subkeys: - # Need to handle both "dominantWx" and - # "rankedWx" analysis - appendVal = subkey - if type(subkey) is types.TupleType: - subkey, rank = subkey - if subkey not in disabledSubkeys \ - and subkey.wxType() not in disabledWxTypes: - newList.append(appendVal) - statDict[key] = newList - return statDict - else: - return None - def getAreaLabel(self): - if hasattr(self, "areaLabel"): - return self.areaLabel - # Look for an ancestor that has an areaLabel associated with it - if self.parent is not None: - return self.parent.getAreaLabel() - return None - def getAncestor(self, attr): - if hasattr(self, attr): - return getattr(self, attr) - # Look for an ancestor that has the given attribute associated with it - if self.parent is not None: - return self.parent.getAncestor(attr) - return None - def setAncestor(self, attr, value): - if hasattr(self, attr): - setattr(self, attr, value) - return None - # Look for an ancestor that has the given attribute associated with it - if self.parent is not None: - return self.parent.setAncestor(attr, value) - return None - def getDescendent(self, attr): - if hasattr(self, attr): - return getattr(self, attr) - # Look for the first descendent that has the given attribute associated with it - for child in self.childList: - value = child.getDescendent(attr) - if value is not None: - return value - return None - -class Narrative(Node, TextRules.TextRules): - # This is the root of the tree and, as such, has some special methods - # and data members - def __init__(self, methodList, componentList, statisticsDictionary, - issuanceInfo, library, histoSampler): - self.stats = statisticsDictionary - # Access to inherited methods - self.library = library - # A histoSampler for access to Topo - self.histoSampler = histoSampler - self.issuanceInfo = issuanceInfo - - # This is the root of the tree - Node.__init__(self, componentList, methodList) - TextRules.TextRules.__init__(self) - - def printTree(self): - print "\n\nNarrative Tree\n" - self.printNode(self, "") - def getTopoHisto(self, areaLabel): - editArea = self.library.findEditArea(None, areaLabel) - return self.get("histoSampler").getTopoHisto(editArea.id()) - def makeNode(self, children, methods, parent=None): - node = Node(children, methods) - node.parent = parent - return node - def statisticsDictionary(self): - return self.statisticsDictionary.dictionary() - def getDataType(self, element): - return self.library.getDataType(element) - def getLimits(self, element): - return self.library.getLimits(element) - def makeComponent(self, name, timeRange, definition): - return self.library.makeComponent(name, timeRange, definition) - def makePhrase(self, phraseDef): - return self.library.makePhrase(phraseDef) - def copyPhrase(self, node, timeRange=None, areaLabel=None, parent=None, - copyAttrs=[]): - phraseDef = node.get("phraseDef") - newNode = self.library.makePhrase(phraseDef) - # copy attributes from original node - for attr in copyAttrs: - newVal = node.get(attr) - if type(newVal) is types.ListType: - newList = [] - for item in newVal: - newList.append(item) - newVal = newList - newNode.set(attr, newVal) - if areaLabel is None: - areaLabel = node.getAreaLabel() - newNode.set("areaLabel", areaLabel) - if timeRange is None: - timeRange = node.getTimeRange() - newNode.set("timeRange", timeRange) - if parent is None: - parent = node.parent - newNode.parent = parent - # Preserve attributes - newNode.set("args", node.get("args")) - return newNode - - def addPhrase(self, prevPhrase, timeRange=None, areaLabel=None): - # Make the new phrase follow given phrase - newPhrase = self.copyPhrase(prevPhrase, timeRange, areaLabel) - parent = prevPhrase.parent - parent.insertChild(prevPhrase, newPhrase) - return newPhrase - def addPhraseDef(self, prevPhrase, phraseDef, timeRange=None, areaLabel=None): - # Make the new phrase follow given prevPhrase using the given phraseDef - newPhrase = self.library.makePhrase(phraseDef) - if areaLabel is None: - areaLabel = prevPhrase.getAreaLabel() - newPhrase.set("areaLabel", areaLabel) - if timeRange is None: - timeRange = prevPhrase.getTimeRange() - newPhrase.set("timeRange", timeRange) - parent = prevPhrase.parent - newPhrase.parent = parent - parent.insertChild(prevPhrase, newPhrase) - return newPhrase - -class Statistics: - def __init__(self, statDict): - self._statDict = statDict - def get(self, element, timeRange, areaLabel=None, statLabel="", mergeMethod="List", - intersectWith=None): - return self._statDict.get(element) - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/HSF_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Marine_Zones_", - - "lineLength": 69, - ## Edit Areas: Create Combinations file with edit area combinations. - "defaultEditAreas" : "Combinations_OFF__", - "editAreaSuffix": None, - # product identifiers - "productName": "HIGH SEAS FORECAST", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "GEORGIA" -- optional - "wfoCityState": "", # Location of WFO - city state - - "synopsisUGC": "", # UGC code for synopsis - "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "hazardSamplingThreshold": (0, 1), #(%cov, #points) - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "periodCombining" : 0, # If 1, combine periods, if possible - # Product-specific variables: - # Set to one if you want a 6-hour evening period instead of - # 18-hour period without lows - "includeEveningPeriod": 0, - "useAbbreviations": 1, - - "ccc": "MIA", # AFOS node - "tcmBasin": "EP", # AT = Atlantic, EP = East Pacific, CP = Central Pacific - - # CCode flag - added for OPC 11/14/2017 CNJ - "ccode": 0, - - # Weather-related flags - "hoursSChcEnds": 24, - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - # Language - "language": "english", - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - # Mixed Case - # LowerCase below needs to = 1 for mixed case AND must change mixedCaseProductIds.txt file under TextWS for store to not upper all -JL 05/24/2016 - # Also will need to change all hard-coded phrases AND phrases passed from MakeHSEditAreas changed to mixed case - "lowerCase": 0, - "autoStore": 0, - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - #editAreasPath = "/scratch/local/HighSeas/EditAreas/" # for development in Boulder - editAreasPath = "/data/local/HighSeas/NH2/EditAreas/" # for operations - self._eaUtils = EditAreaUtilities.EditAreaUtilities(editAreasPath) - - - def _Text1(self): - return "SUPERSEDED BY NEXT ISSUANCE IN 6 HOURS\n\n" + \ - "SEAS GIVEN AS SIGNIFICANT WAVE HEIGHT...WHICH IS THE AVERAGE\n" + \ - "HEIGHT OF THE HIGHEST 1/3 OF THE WAVES. INDIVIDUAL WAVES MAY BE\n" + \ - "MORE THAN TWICE THE SIGNIFICANT WAVE HEIGHT.\n\n" - - # override _Text2 for each specific basin/product - def _Text2(self): - return "ATLANTIC FROM 07N TO 31N W OF 35W INCLUDING CARIBBEAN SEA AND\n" + \ - "GULF OF MEXICO\n\n" -# def _Text2(self): -# return "E PACIFIC FROM THE EQUATOR TO 30N E OF 140W AND 03.4S TO THE\n" + \ -# "EQUATOR E OF 120W\n\n" - - # Returns the specified product (string) with newlines inserted - # such that no line exceeds maxChars characters. - def _wrapLines(self, product, maxChars=64): - - # break out the product into lines - lineList = [] - startPos = 0 - while startPos < len(product): - pos = product.find("\n", startPos) - if pos == startPos: - lineList.append("") - elif pos == -1: - lineList.append(product[startPos:]) #get the rest - break - - line = product[startPos:pos] # slice out line - wrappedLines = textwrap.wrap(line, maxChars) - for w in wrappedLines: - lineList.append(w) - - startPos = pos + 1 - - finalProduct = "" - for line in lineList: - finalProduct = finalProduct + line + "\n" - - return finalProduct - - # Top-level object that calls all main subroutines - def generateForecast(self, argDict): - - print "Generate Forecast" - - # baseline code - gets variables from the Definitions - error = self._getVariables(argDict) - if error is not None: - return error - - # Determine time ranges - issuance times set here - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Creating the Features class from MakeHSFEditAreas tool input - self._createFeatures(argDict) - - # Creating the Features class from grid-based tool input - self._createGridBasedFeatures(argDict) - - # Creating the Features class from TCM conversion script input - self._createTCM_BasedFeatures(argDict) - - # Creating the Features class from VGF / XML input - #self._createDrawableFeatures(argDict) - - - print "Sampling data" - # Sample the data for the areas in the Features created above - error = self._sampleData(argDict) - if error is not None: - return error - - # Populate the Features with the sampled data - self._populateFeatures(argDict) - - # Order the Features based on rules for the HSF product - self._orderFeatures() - - #for feature in self._features: - # feature.printFeature() - - # Building the forecast text string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - fcst = self._makeSection(fcst, argDict, self._warningFeatures, "Warning") - fcst = self._makeSection(fcst, argDict, self._synopsisFeatures, "Synopsis") - fcst = self._postProcessProduct(fcst, argDict) - - return fcst - - # sets variables from the Definitions - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._databaseID = argDict["databaseID"] - self._ifpClient = argDict["ifpClient"] - - self._language = argDict["language"] - return None - - # Sets up issuance times - can be done in dictionary elsewhere - # This method or dictionary goes into overrides to change issuance times for each product - def _determineTimeRanges(self, argDict): - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) - self._timeLabel = staticIssueTime + " " + self.getCurrentTime( - #argDict, " %a %b %e %Y", stripLeading=1) #commented out per 00 UTC issue per M. Sardi (JL 07/21/16) - argDict, " %a %b %e %Y", shiftToLocal=0, stripLeading=1) - # Re-calculate issueTime - self._issueTime = self.strToGMT(staticIssueTime) - validTimeDict = { - "0430 UTC": 430, - "1030 UTC": 1030, - "1630 UTC": 1630, - "2230 UTC": 2230, - } - validTime = validTimeDict[self._productIssuance] - 430 - self._validTime = `validTime`.zfill(4) + " UTC" - return None - - ############## - # Organize Features - - class Feature: - def __init__(self): - self.name = None - self.basinName = None - # Feature Type -- 'Named', 'GridBased', 'TCM_Based', 'Drawable' - # e.g. (Fog / Visibility, Convection, Heavy Freezing Spray) - self.featureType = None - self.periods = [] - - self.highestWarning = None - self.highestWarningTimePeriod = None - self.highestWarningHeadline = None - self.earliestTimePeriod = None - self.wxType = None - self.wxIntensity = None - self.wxCoverage = None - self.phenomenonType = None - self.GBareaList = None - - # Drawable Feature -- ingest from Drawable Features XML - self.drawableFeature = None - self.autoText = None - def printFeature(self): - print "\nFeature -- Feature Type, Basin Name:", self.featureType, self.basinName - print "HighestWarning, HighestWarning TimePeriod:", self.highestWarning, self.highestWarningTimePeriod - print "wxType:", self.wxType, "wxCoverage:", self.wxCoverage, "wxIntensity:", self.wxIntensity - print "phenomenonType:", self.phenomenonType - - if self.featureType == "GridBased": - for area in self.GBareaList: - print "areaName:", area.areaName - print "areaLabel:", area.areaLabel - print "timePeriod:", area.timePeriod - print "headline:", area.headline - print "warningType:", area.warningType - print "phenomenonType:", area.phenomenonType - print "wxType:", area.wxType - print "intensity:", area.intensity - print "coverage:", area.coverage - else: - print "periods:", self.periods - - - #print " Periods" - #for period in self.periods: - # period.printPeriod() - - class Period: - def __init__(self): - self.timePeriod = None - self.areas = [] - self.drawables = [] - def printPeriod(self): - print ' TimeRange', self.timePeriod - print ' Areas' - for area in self.areas: - area.printArea() - for drawable in self.drawables: - drawable.printDrawable() - - class Area: - def __init__(self): - self.areaName = None - self.areaLabel = None - self.refData = None - self.timePeriod = None - - # Named Feature attributes - self.statDict = {} - self.headline = "None" - self.methodList = [] - self.windWave = False - self.windOnly = False - self.waveOnly = False - self.warningType = None - - # GridBased Feature attributes - self.phenomenonType = None - self.wxType = None - self.intensity = None - self.coverage = None - - def printArea(self): - print ' name, label', self.areaName, self.areaLabel - print ' warningType', self.warningType - print ' windWave, windOnly, waveOnly', self.windWave, self.windOnly, self.waveOnly - print ' statDict', self.statDict - print ' wxType, intensity', self.wxType, self.intensity - - class Drawable: - def __init__(self): - self.timePeriod = None - self.drawableType = None - self.pressureTag = None - self.latLons = None - self.movement = None - def printDrawable(self): - print ' drawableType', self.drawableType - print ' timePeriod, pressureTag', self.timePeriod, self.pressureTag - print ' latLons', self.latLons - - def _createFeatures(self, argDict): - ''' Set up 'skeleton' Feature objects from toolFeatures - Input toolFeatures: - [ - {'timePeriod': '00h', 'featureName': 'Feature1', 'basin': 'ATLC', - 'areaList': [ - {'lon': None, 'pieState': None, 'radius': None, 'lat': None, - 'areaDesc': 'WITHIN AREA BOUNDED BY 15S170E TO 15S169E TO 16S168E TO 17S169E TO 16S170E TO 15S170E', - 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA1'}, - {'lon': None, 'pieState': None, 'radius': None, 'lat': None, - 'areaDesc': 'WITHIN AREA BOUNDED BY 15S171E TO 14S170E TO 15S168E TO 17S168E TO 18S169E TO 17S171E TO 15S171E', - 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA2'}, - {'lon': None, 'pieState': None, 'radius': None, 'lat': None, - 'areaDesc': 'WITHIN AREA BOUNDED BY 13S171E TO 13S169E TO 14S168E TO 17S167E TO 18S170E TO 16S172E TO 13S171E', - 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA3'} - ] - } - ] - ''' - - # Call a sequence of method in order to get a gridLoc - # We need his to process edit areas. - parmNameLevel = "Wind_SFC" - self.setUp(parmNameLevel, argDict) - self._gridLoc = self.getGridLoc() - self._dataMgr = argDict["dataMgr"] - - print "GridLoc:", dir(self._gridLoc) - - - self._savePathFile = self.descriptorFileName() - print "descFileName:", self._savePathFile - # Try to fetch the old object - try: - with open(self._savePathFile, "r") as f: - toolFeatures = cPickle.load(f) - f.close() - except: - toolFeatures = [] - print "Starting with an empty descriptor." - - for f in toolFeatures: - print f - - # Initialize windMax across features - self._windMax = 0.0 - featureNameList = self._getFeatureNames(toolFeatures) - self._features = [] - for featureName, basinName in featureNameList: - print "Createfeature basinName:", basinName - feature = self.Feature() - self._features.append(feature) - feature.name = featureName - feature.basinName = basinName - feature.featureType = 'Named' - feature.periods = [] - - toolTimePeriodList = self._getTimePeriodList(featureName, toolFeatures) - - for toolTimePeriod in toolTimePeriodList: - period = self.Period() - feature.periods.append(period) - - toolAreaList = self._getAreaList(featureName, toolTimePeriod, toolFeatures) - period.timePeriod = self._convertToTimeRange(toolTimePeriod) - - period.areas = [] - for areaName, areaDesc in toolAreaList: - area = self.Area() - period.areas.append(area) - area.areaName = areaName - print "^^^^^^^^^^^^^^^^^^^^^^^^^^^^^AREA NAME IS:", area.areaName - area.refData = None - area.areaLabel = areaDesc - area.timePeriod = period.timePeriod - print "*****************************area desc is:", area.areaLabel - self._processAreaOverlaps(period.areas) - - def _getFeatureNames(self, toolFeatures): - nameList = [] - featureList = [] - for t in toolFeatures: - print "Tool feature:", t - featureName = t.get("featureName") - if featureName not in nameList: - featureList.append((t.get("featureName"), t.get('basin'))) - nameList.append(featureName) - return featureList - - def _getTimePeriodList(self, featureName, toolFeatures): - tpList = [] - for t in toolFeatures: - if t["featureName"] != featureName: - continue - if t["timePeriod"] not in tpList: - tpList.append(t["timePeriod"]) - tpList.sort() - return tpList - - def _getAreaList(self, featureName, timePeriod, toolFeatures): - areaList = [] - #print "getting area list:", featureName, timePeriod - for t in toolFeatures: - if t["featureName"] != featureName or t["timePeriod"] != timePeriod: - continue - toolAreaList = t["areaList"] - for area in toolAreaList: - areaTuple = (area["areaName"], area["areaDesc"]) - areaList.append(areaTuple) - return areaList - - def _createGridBasedFeatures(self, argDict): - # Add gridBasedFeatures to self._features - - - # open the file - self._gridBasedPathFile = self.gridBasedFileName() - # Try to fetch the old object - try: - f = open(self._gridBasedPathFile, "r") - gridBasedFeatureList = cPickle.load(f) - f.close() - except: - gridBasedFeatures = [] - print "Starting with an empty gridBasedFeature descriptor." - - ### below this is the new code - 11/16/2017 CNJ ### - for gridBasedFeature in gridBasedFeatureList: # step through the feature list from the tool - feature = self.Feature() - feature.featureType = 'GridBased' - feature.wxType = gridBasedFeature[0]["wxType"] - feature.wxIntensity = gridBasedFeature[0]["intensity"] - feature.wxCoverage = gridBasedFeature[0]["wxCoverage"] - - feature.phenomenonType = self._phenomenonTypeDict()[gridBasedFeature[0]["wxType"]] - - feature.earliestTimePeriod = self._convertToTimeRange(gridBasedFeature[0]["timePeriod"]) - # Populate the areas in this feature - feature.GBareaList = [] - feature.periods = [] - for gbArea in gridBasedFeature: - feature.periods.append(self._convertToTimeRange(gridBasedFeature[0]["timePeriod"])) - area = self.Area() - area.areaName = gbArea["areaName"] - area.areaLabel = gbArea["areaDesc"] - area.timePeriod = self._convertToTimeRange(gbArea["timePeriod"]) - area.wxType = gbArea["wxType"] - area.intensity = gbArea["intensity"] - area.coverage = gbArea["wxCoverage"] - area.phenomenonType = feature.phenomenonType - feature.GBareaList.append(area) - # Populate the warning attributes - if feature.wxType == "VA": - feature.highestWarning = "Ashfall" - area.warningType = "Ashfall" - feature.warningTimePeriod = feature.earliestTimePeriod - feature.highestWarningHeadline = self._getGridBasedHeadline(area) - elif feature.wxType == 'ZY' and feature.wxIntensity == '+': - feature.highestWarning = "Heavy Freezing Spray" - area.warningType = "Heavy Freezing Spray" - feature.warningTimePeriod = feature.earliestTimePeriod - feature.highestWarningTimePeriod = feature.earliestTimePeriod - feature.highestWarningHeadline = self._getGridBasedHeadline(area) - - self._features.append(feature) - - - #feature.printFeature() - return - - def _getGridBasedHeadline(self, area): - if area.wxType == 'ZY' and area.intensity == '+': - return "...Heavy Freezing Spray Warning..." - elif area.wxType == 'VA': - return "...ASHFALL ADVISORY...\n[VOLCANO NAME] VOLCANO AT POSITION " + \ - "[xx.xN xx.xW] IS CURRENTLY IN A STATE OF UNREST AND COULD ERUPT WITH " + \ - "LITTLE NOTICE. MARINERS TRAVELING IN THE VICINITY OF [VOLCANO NAME] " + \ - "ARE URGED TO EXERCISE CAUTION. IF MARINERS ENCOUNTER VOLCANIC ASH OR " + \ - "FLOATING VOLCANIC DEBRIS...YOU ARE ENCOURAGED TO REPORT THE OBSERVATION " + \ - "TO THE NATIONAL HURRICANE CENTER BY CALLING 305-229-4424.\n" - return "" - - def _createTCM_BasedFeatures(self, argDict): - # Create Feature classes from TCM conversion script input - - ccc = "MIA" - siteID = "AT" - tcmBody="" - for index in ["1", "2", "3", "4", "5"]: - #for index in [tcm1, tcm2, tcm3]: - pil = ccc + "WRK" + siteID + index - tcmText = subprocess.check_output(["/awips2/fxa/bin/textdb", "-r", pil]) - - tcmLines = tcmText.split('\n') - tcmTimeStr = tcmLines[0] # "2100 UTC FRI JAN 15 2016" - if not self._tcmTimeOverlaps(tcmTimeStr): - continue - - tcmBegin = tcmLines[2] - tcmBody = string.join(tcmLines[2:], "\n") - - warningDict = { - "Hurricane": "...Hurricane Warning...", - "Hurricane Force": "...Hurricane Force Wind Warning...", - "Tropical Storm": "...Tropical Storm Warning", - "Storm": "...Storm Warning", - "Gale": "...Gale Warning", - } - - phenomenonDict = { - "Tropical Depression": "Tropical Depression", - "Post-Tropical": "Post-Tropical Cyclone", - "Remnants": "Remnants", - } - - feature = self.Feature() - feature.featureType = 'TCM_Based' - featureAreaList = [] - for key in warningDict: - headline = warningDict.get(key) - - if tcmBegin.find(headline) > -1 or tcmBegin.find(headline.upper()) > -1: - feature.highestWarning = key - feature.highestWarningTimePeriod = self._convertToTimeRange("00h") - break - - if not feature.highestWarning: - for key in phenomenonDict: - phen = phenomenonDict.get(key) - if tcmBegin.find(phen) > -1 or tcmBegin.find(phen.upper()) > -1: - feature.phenomenonType = key - break - feature.earliestTimePeriod = self._convertToTimeRange("00h") - feature.autoText = tcmBody.strip() - self._features.append(feature) - - def _tcmTimeOverlaps(self, tcmTimeStr): - tcmTime = self.convertBaseTime(tcmTimeStr) - curTime = time.time() - - ### 3 is the max number of hours for TCM overlap to be true - threshold = 6 * 3600 - - if abs(curTime - tcmTime) < threshold: - return True - return False - - def convertBaseTime(self, timeStr): - # extract time parts from the str - hour = int(timeStr[0:2]) - minute = int(timeStr[2:4]) - strList = timeStr.split(" ") - monthStr = strList[3] - month = self.monthNum(monthStr) - day = int(strList[4]) - year = int(strList[5]) - - # time.mktime returns time in seconds but in local time - baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) - - # Adjustment to UTC - diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) - - # subtract timeZone and round to the nearest hour - roundedTime = int((baseTime - diffTime) / 3600) * 3600 - - return roundedTime - - def monthNum(self, monthStr): - monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", - "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] - try: - return monthList.index(monthStr) + 1 - except ValueError: - return 0 - - def _readCurrentTCM(self, argDict): - pass - - def _createDrawableFeatures(self, argDict): - # Create Features from VGF / XML Drawable files - - # Associating any drawables that match and existing Named Feature - print "***In Create Drawables***" - remainingDrawables = [] - for drawableElement in self._ingestDrawables(): - - print "DrawableElement:", drawableElement.printDrawable() - - if drawableElement.drawableType not in ['Ridge', 'Ice Edge', 'Gulf Stream']: - if self._associateDrawableElementWithFeature(drawableElement): - continue - remainingDrawables.append(drawableElement) - - # For the remaining Drawables, group them based on compatibility types and proximity - groups = [] - - # group is a list of drawables - # CJ change - group = [remainingDrawables[0]] - #group = [drawables[0]] - - remainingDrawables = remainingDrawables[1:] - i = 0 - while remainingDrawables and i < 100: - - group, remainingDrawables, done = self._groupDrawables(group, remainingDrawables) - if done: - groups.append(group) - if len(remainingDrawables) > 0: - group = remainingDrawables[0] - remainingDrawables = remainingDrawables[1:] - i = i + 1 - print "i=", i - if group: - groups.append(group) - - # this line replaced commented out code block above - group = [remainingDrawables] - - # Create a Feature from each group - for group in groups: - # Create a Drawable Feature - feature = self.Feature() - feature.featureType = 'Drawable' - featureAreaList = [] - - # Create all the periods as placeholders - periods = [] - for index in ['00','24','48']: - period = self.Period() - period.timePeriod = self._convertToTimeRange(index+'h') - periods.append(period) - - ### uncommenting the line below causes an infinite loop - #feature.periods = periods - - if type(group) is types.ListType: - for drawable in group: - print "feature.periods:", feature.periods - for period in feature.periods: - if drawable.timePeriod == period.timePeriod: - period.drawables.append(drawable) - print "appending to period.drawables in list type" - else: - continue - else: - for period in feature.periods: - if group.timePeriod == period.timePeriod: - period.drawables.append(group) - print "appending to period.drawables in non-list type" - else: - continue - - - for period in periods: - if period.drawables: - feature.periods.append(period) - feature.periods.sort(self._sortPeriodsByTime) - if len(feature.periods) > 0: - feature.earliestTimePeriod = feature.periods[0].timePeriod - self._features.append(feature) - - def _groupDrawables(self, group, drawables): - # Try to add each drawable to the group - done = True - newGroup = [] -# for g in group: -# print "group is:", g -# newGroup.append(g) - print "group is:", type(group) - newGroup = self._copyDrawables(group) - returnedDrawables = [] - if type(group) is types.ListType: - for d1 in group: - for d2 in drawables: - if self._compatibleDrawableTypes(d1, d2): - if self._proximity(d1, d2): - newGroup.append(d2) - done = False - else: - returnedDrawables.append(d2) - return newGroup, returnedDrawables, done - else: - return group, returnedDrawables, True - - def _copyDrawables(self, group): - print "dir:", dir(group) - if type(group) is types.ListType: - newList = [] - for g in group: - newList.append(g) - return newList - else: # it's a singleton - drawable = self.Drawable() - drawable.timePeriod = group.timePeriod - drawable.latLons = group.latLons - drawable.pressureTag = group.pressureTag - drawable.movement = group.movement - drawable.drawableType = group.drawableType - return drawable - - return - - def _ingestDrawables(self): - # Read in the files and use ElementTree to parse them and create Drawables - drawables = [] - print 'IngestDrawables' - for t in ['24']: - #for t in ['00','24','48']: - fileName = '/localapps/dev/HSF/'+t+'.xml' - #Below is where cron files live (note they get purged at H+45) - #fileName = '/data/fxa/LOCAL/getvgf/data/'+t+'.xml' - print "fileName", fileName - - tree = ET.parse(fileName) - timePeriod = self._convertToTimeRange(t+'h') - # Get the Lines - for line in tree.iter("Line"): - drawable = self.Drawable() - pgenType = line.attrib.get('pgenType') - print "pgenType", pgenType - - #pgenExcludeList = ["LINE_SOLID", "LINE_DASHED_6", "FILLED_ARROW", "POINTED_ARROW", "DRY_LINE", "General Text", "Contours", "None"] - pgenExcludeList = ["LINE_SOLID", "LINE_DASHED_6", "FILLED_ARROW", "POINTED_ARROW", "DRY_LINE", "Contours", "None"] - if pgenType in pgenExcludeList: - print "pgenType skipped:", pgenType - continue - drawable.drawableType = self._pgenTypeDecodeDict().get(pgenType) - drawable.timePeriod = timePeriod - drawable.latLons = self._getLatLons(line) - drawable.printDrawable() - drawables.append(drawable) - - # Get the collections with Symbols - for collection in tree.iter("DECollection"): - for symbol in collection.iter("Symbol"): - drawable = self.Drawable() - pgenType = symbol.attrib.get('pgenType') - print "pgenType", pgenType - drawable.drawableType = self._pgenTypeDecodeDict().get(pgenType) - drawable.timePeriod = timePeriod - drawable.latLons = self._getLatLons(symbol) - for textline in collection.iter("textline"): - drawable.pressureTag = textline.text + " mb" - print "printing collection drawable" - drawable.printDrawable() - drawables.append(drawable) - return drawables - - def _best_way(self, number): - if number%2==0: - return "even" - else: - return "odd" - - def _getLatLons(self, node): - latLons = [] - for point in node.findall("Point"): - - lat = round(float(point.attrib.get("Lat")),1) - lat = int((lat + 0.25) * 2.0) / 2.0 - lat = float(lat) - latmult = lat * 10 - if (self._best_way(latmult)) == "even": - lat = int(lat) - - lon = round(float(point.attrib.get("Lon")),1) - lon = int((lon + 0.25) * 2.0) / 2.0 - lon = float(lon) - lonmult = lon * 10 - if (self._best_way(lonmult)) == "even": - lon = int(lon) -# lat = float(point.attrib.get("Lat")) -# lon = float(point.attrib.get("Lon")) - latLons.append((lat, lon)) - return latLons - - def _associateDrawableElementWithFeature(self, drawableElement): - # Determine if the drawableElement can be associated with a feature - # If so, determine if is associated - found = False - latLons = drawableElement.latLons - for feature in self._features: - if feature.featureType not in ['Named']: - continue - for period in feature.periods: - if self._drawableElementOverlaps(period.areas, latLons): - period.drawables.append(drawableElement) - print "appending to period.drawables in associate" - found = True - return found - - # TO DO -- complete this - def _compatibleDrawableTypes(self, d1, d2): - compatibleTypes = [('High', 'Ridge'), ('Trough', 'Low'), ('Tropical Wave', 'Low'), ('Low', 'Cold Front')] - t1 = d1.drawableType - t2 = d2.drawableType - if t1 == t2: - return True - if (t1, t2) in compatibleTypes or (t2, t1) in compatibleTypes: - return True - else: - return False - - def _sampleData(self, argDict): - elements = self._analysisList(argDict) - periods = [] - areaTuples = [] - for feature in self._features: - if feature.featureType != 'Named': - continue - for period in feature.periods: - periods.append((period.timePeriod, 'timeLabel')) - for area in period.areas: - if area.refData: - editArea = area.refData - else: - editArea = area.areaName - areaTuples.append((editArea, area.areaLabel)) - - sampleInfo = (elements, periods, areaTuples) - #print "\nSampleInfo", sampleInfo - self._sampler = self.getSampler(argDict, sampleInfo) - print "Sampler", self._sampler - -##### - def getSampler(self, argDict, sampleInfo, sampleFromServer=0): - # Get a HistoSampler given - # sampleInfo, which is a list of tuples, or just a single tuple - # of tuples ([elements], [periods], [areas]) - # the elements are [(name, method)] -- basically the analysis list - # the periods [(timeRange, label)] - # areas [(name,label)] or [(refData, label)] or [(refID, label)] - ifpClient = argDict["ifpClient"] - databaseID = argDict["databaseID"] - - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData - from com.raytheon.viz.gfe.sampler import SamplerRequest, HistoSampler - from java.util import ArrayList - - # convert input sampleInfo to samplerRequests - samplerRequests = ArrayList() - if type(sampleInfo) == tuple: - sampleInfo = [sampleInfo] - for si in sampleInfo: - elements, periods, areas = si - for e in elements: - parmID = self.getParmID(e[0], databaseID) - for p in periods: - for editArea, areaName in areas: - if type(editArea) is str: - samplerRequests.add(SamplerRequest( \ - parmID, ReferenceID(editArea), p[0].toJavaObj())) - elif str(type(editArea)) == "": - samplerRequests.add(SamplerRequest( \ - parmID, editArea, p[0].toJavaObj())) - - else: - raise Exception, "area specification incorrect" - - # do sampling - if sampleFromServer: - sampler = ifpClient.sampleRequest(samplerRequests) - else: - sampler = HistoSampler(ifpClient.getJavaClient(), samplerRequests) - if sampler.isValid() != 1: - print "Cannot Sample: Check for invalid Weather Elements, ",\ - "Invalid Areas", str(samplerRequests) - return None - #print "sampler ", sampler - return sampler - -##### - - def _populateFeatures(self, argDict): - - # Populate Features with product information - elements = self._analysisList(argDict) - - for feature in self._features: - if feature.featureType != 'Named': - continue - featureAreaList = [] - - print "Raw feature basin:", feature.basinName - - for period in feature.periods: - areas = period.areas - #print "******time period:", timePeriod, "**********" - for area in areas: - featureAreaList.append(area) - - if area.refData: - areaData = area.refData - else: - areaData = area.areaName - - print "Populate Features using data:", areaData - statDict = self.getStatDict(self._sampler, elements, period.timePeriod, areaData) - print "elements:", elements - area.statDict = statDict - print "PopulateFeatures....Area....StatDict", statDict - print "Area refdata:", area.areaName - if area.refData: - polygons = area.refData.getPolygons - print "polygon methods:", dir(area.refData.getPolygons) - print "Polygons size:", polygons.__sizeof__() - - # Look for various warnings for this period and area - for warningMethod in self._warningMethods(): - found, headline, methodList, warningType = warningMethod(statDict) - print "warnings into warningMethod:", found, headline - - if found: - area.warningType = warningType - area.headline = headline - area.methodList = methodList - break - else: - if self._checkForWx(statDict): - methodList = self._wxMethodList() - else: - methodList = self._windWaveMethodList() - area.methodList = methodList - self._setWindWave(area, statDict) - - # If there are warnings: Find the highest earliest warning type - if len(featureAreaList) > 0: - featureAreaList.sort(self._sortAreasForWarningType) - chosenArea = featureAreaList[0] - feature.highestWarning = chosenArea.warningType - feature.highestWarningHeadline = chosenArea.headline - feature.highestWarningTimePeriod = chosenArea.timePeriod - # Find earliest timePeriod - timePeriodList = sorted(feature.periods, self._sortTimePeriods) - feature.earliestTimePeriod = timePeriodList[0].timePeriod - return - - def _orderFeatures(self): - # Sort at the area level - for feature in self._features: - # Grid based features don't have periods so ignore this - if feature.featureType == "GridBased": - continue - for period in feature.periods: - period.areas.sort(self._sortAreas) - - self._warningFeatures = [] - self._synopsisFeatures = [] - # Sort at the feature level - for feature in self._features: - if feature.highestWarning: - self._warningFeatures.append(feature) - else: - self._synopsisFeatures.append(feature) - self._warningFeatures.sort(self._sortWarningFeatures) - self._synopsisFeatures.sort(self._sortSynopsisFeatures) - - - ########## - - def _preProcessProduct(self, fcst, argDict): - productName = self._productName.strip() - issuedByString = self.getIssuedByString() - if self._windMax > 63: - self._issuanceType = "PAN PAN" - else: - self._issuanceType = "SECURITE" - - fcst = fcst + self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" + \ - productName + "\n" +\ - "NWS " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - - if self._ccode: - ### the next line must be changed to check HSFAT1 pil after testing - 11/14/2017 CNJ - if self._pil == "HSFAT1": - fcst = fcst + "CCODE/1:31:04:01:00/AOW/NWS/CCODE" + "\n" - elif self._pil == "HSFEP1": - fcst = fcst + "CCODE/1:31:12:01:00/AOW+POR/NWS/CCODE" + "\n" - elif self._pil == "HSFEP3": - fcst = fcst + "CCODE/1:31:16:01:00/AOW/NWS/CCODE" + "\n" - else: - pass - - fcst = fcst + self._Text1() + self._issuanceType + "\n\n" + self._Text2() - fcst = self._validLabel(fcst, argDict) - return fcst - - - def _makeSection(self, fcst, argDict, features, sectionType): - print "feature dump" - for feature in features: - print "FeatureType:", feature.featureType - print "Feature.phenomenonType:", feature.phenomenonType - - - if sectionType == "Warning": - fcst = fcst + ".WARNINGS.\n\n" - else: - fcst = fcst + ".SYNOPSIS AND FORECAST.\n\n" - - elements = self._analysisList(argDict) - - print "Feature Count:", len(self._features), "Section type:", sectionType - - for feature in features: - print "^^^FEATURE^^^" - #feature.printFeature() - print "^^^end feature^^^" - fname = feature.name - print "### FEATURE NAME:", fname - if feature.featureType == "TCM_Based": - fcst = fcst + feature.autoText + "\n\n" - continue - - # Process Volcanic Ash wxType -# if feature.wxType == 'VA': -# fcst = fcst + feature.area.headline -# #print "Feature wxType is:", feature.wxType, "and is being skipped." -# continue - - if sectionType == "Warning": - print "Section type is Warning" - fcst = fcst + feature.highestWarningHeadline + "\n" - print "highestWarningHeadline", feature.highestWarningHeadline - -# elif sectionType == "Synopsis": - #print "Section type is Synopsis" - print "Formatting feature type:", feature.featureType - if feature.featureType != "GridBased": - for period in feature.periods: - timePeriod = period.timePeriod - #print "******time period:", timePeriod, "*******************" - if feature.periods.index(period) == 0: - basinName = feature.basinName - else: - basinName = None - - # next line was reporting blank time label for convection - #fcst = fcst + self._getTimePeriodLabel(argDict, timePeriod, basinName) - - # next code block to not report blank time label for grid-based feature - if feature.featureType == 'GridBased': - pass - else: - fcst = fcst + self._getTimePeriodLabel(argDict, timePeriod, basinName) - - print "*** before period drawables ***" - # Add in associated drawables - if period.drawables: - print "INTO PERIOD.DRAWABLES", period.drawables - for drawable in period.drawables: - fcst = fcst + self._formatDrawable(drawable) #+ ".\n\n" - - print "*** after period drawables ***" - - for area in period.areas: - areaWords = self._getAreaWords(area).rstrip() - elementWords = self._getElementWords(feature, area, sectionType) - fcst = fcst + areaWords + " " + elementWords #+ ". " - #removed space above between "" because when a feature is dissipated - #it was adding an extra space in front of WINDS 20 KT OR LESS. -JL/3/24/16 - - # next two lines may be printing extra carriage returns when there are no features - fcst = fcst + "\n" - elif feature.featureType == "GridBased": - print "Formatting grid based features.............................................." - for area in feature.GBareaList: - areaWords = self._getAreaWords(area).rstrip() - elementWords = self._getGridBasedWxWords(area) - - # next line to include time label for grid based feature - if area.wxType != 'VA': - fcst = fcst + self._getTimePeriodLabel(argDict, area.timePeriod) - #if feature.wxType == 'VA': - if area.wxType == 'VA': - fcst = fcst + "\n\n" - #fcst = fcst + area.headline + "\n\n" - else: - fcst = fcst + elementWords + " " + areaWords + ".\n" - print "**** element words:", elementWords, "****" - print "**** area words:", areaWords, "****" - #fcst = fcst + elementWords + " " + areaWords + ". " - else: - print "Skipping this feature in makeSection.+++++++++++++++++++++++++++++++++++++++++++++++++++" - print "Feature Dump:", feature.printFeature() - - - fcst = fcst + "\n" - - if sectionType == "Warning": - if self._noWarningFeatures(): - print "Found no warning features. " - fcst = fcst + ".NONE.\n\n" - - return fcst - - - def _getAreaWords(self, area): - areaLabel = area.areaLabel - # Killed feature have no area label so remove a space - print "+&++++++++++++++++++++++++++++++AREA LABEL:", areaLabel - if areaLabel == "": - return areaLabel - else: - return areaLabel + " " - - def _getElementWords(self, feature, area, sectionType): - # Set up the correct method depending on the weather element or phenomenon - statDict = area.statDict - if feature.featureType == 'GridBased': - periodStr = "" - methodList = self._gridBasedWxMethods(area) - else: - periodStr = ". " - if sectionType == "Warning": - methodList = area.methodList - else: - if self._checkForWx(statDict): - methodList = self._wxMethodList() - else: - methodList = self._windWaveMethodList() - if area.wxType == "F" or area.wxType == "K": - periodStr = "." - words = self._makePhrases("", methodList, statDict, area.timePeriod, area.areaLabel, periodStr) - return words - - def _formatDrawable(self, drawable): - print "### DRAWABLE ###" - drawable.printDrawable() - print "### END DRAWABLE ###" - outputStr = drawable.drawableType + ' from ' - length = len(drawable.latLons) - for index in range(length): - lat, lon = drawable.latLons[index] - - # Format lat/lon output - if lat >= 0: - lathemi = "N" - else: - lathemi = "S" - # check dateline later - if lon > 0: - lonhemi = "E" - else: - lonhemi = "W" - lat = abs(lat) - lon = abs(lon) - - # removed space between lat and lon for drawables - if lat < 10: - outputStr = outputStr + '0' + str(lat) + lathemi + str(lon) + lonhemi - else: - outputStr = outputStr + str(lat) + lathemi + str(lon) + lonhemi - - if index < length-1: - outputStr = outputStr + ' to ' - return outputStr - - def _noSynopsisFeatures(self): - - for f in self._features: - if f.featureType == "Named": - return False - return True - - def _noWarningFeatures(self): - for f in self._features: - print "Feature.highestWarning:", f.highestWarning - if f.highestWarning is not None: - return False - return True - - def _postProcessProduct(self, fcst, argDict): - ## Insert Labe and Forecaster Name at bottom of product - #forecasterName = self._forecasterName.strip() - #First line below only needed for HSFEP2# - if self._noSynopsisFeatures(): - fcst = fcst + ".ENTIRE AREA WINDS 20 KT OR LESS. SEAS LESS THAN 8 FT." - else: - fcst = fcst + ".REMAINDER OF AREA WINDS 20 KT OR LESS. SEAS LESS THAN 8 FT." - - self._userInfo = UserInfo.UserInfo() - forecasterName = self._userInfo._getForecasterName(argDict) - - #if fcst.find('HURRICANE') != -1: - if re.search(r'\.\.\.HURRICANE*', fcst): - fcst = re.sub(r'SECURITE', r'PAN PAN', fcst) - if self._ccode: - if self._pil == "HSFAT2": - fcst = re.sub(r'CCODE/1:31:04:01:00/AOW/NWS/CCODE', r'CCODE/2:31:04:11:00/AOW+AOE/NWS/CCODE', fcst) - elif self._pil == "HSFEP1": - fcst = re.sub(r'CCODE/1:31:12:01:00/AOW+POR/NWS/CCODE', r'CCODE/2:31:12:11:00/AOW+POR+AOE/NWS/CCODE', fcst) - elif self._pil == "HSFEP3": - fcst = re.sub(r'CCODE/1:31:16:01:00/AOW/NWS/CCODE', r'CCODE/2:31:16:11:00/AOW+POR+AOE/NWS/CCODE', fcst) - else: - pass - - fcst = fcst + "\n\n" + "$$" + "\n" + ".FORECASTER " + forecasterName + ". NATIONAL HURRICANE CENTER." - - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - - fcst = self._wrapLines(fcst) - - return fcst - - # SORT METHODS - def _warningOrder(self): - return [ - "Hurricane", - "Typhoon", - "Hurricane Force", - "Tropical Storm", - "Storm", - "Gale", - "Heavy Freezing Spray", - "Ashfall", - "Space Weather", - None, - ] - - def _basinOrder(self): - return ["ATLC", - "ATLC AND CARIBBEAN", - "ATLC AND CARIBBEAN AND GULF OF MEXICO", - "ATLC AND GULF OF MEXICO", - "CARIBBEAN", - "CARIBBEAN AND GULF OF MEXICO", - "GULF OF MEXICO", - None, - ] - - def _phenomenonOrder(self): - return [ - "Tropical Depression", - "Post-Tropical", - "Remnants", - "Freezing Spray", - "Fog", - "Smoke", - "Convection", - None, - ] - - def _phenomenonTypeDict(self): - return { - 'ZY': 'Freezing Spray', - 'VA': 'Ashfall', - 'T': 'Convection', - 'F' : 'Fog', - 'K' : 'Smoke', - #default: None, - } - - def _drawableOrder(self): - return { - "ITCZ", - "Cold Front", - "Forming Cold Front", - "Dissipating Cold Front", - "Warm Front", - "Forming Warm Front", - "Dissipating Warm Front", - "Stationary Front", - "Forming Stationary Front", - "Dissipating Stationary Front", - "Occluded Front", - "Forming Occluded Front", - "Dissipating Occluded Front", - "Trough", - "Tropical Wave", - "Low", - "High", - "Ridge", - "Ice Edge", - "Gulf Stream", - "Ignore" - } - - def _pgenTypeDecodeDict(self): - return { - 'COLD_FRONT': 'Cold Front', - 'COLD_FRONT_FORM': 'Forming Cold Front', - 'COLD_FRONT_DISS': 'Dissipating Cold Front', - 'WARM_FRONT': 'Warm Front', - 'WARM_FRONT_FORM': 'Forming Warm Front', - 'WARM_FRONT_DISS': 'Dissipating Warm Front', - 'STATIONARY_FRONT': 'Stationary Front', - 'STATIONARY_FRONT_FORM': 'Forming Stationary Front', - 'STATIONARY_FRONT_DISS': 'Dissipating Stationary Front', - 'OCCLUDED_FRONT': 'Occluded Front', - 'OCCULUDED_FRONT_FORM': 'Forming Occluded Front', - 'OCCLUDED_FRONT_DISS': 'Dissipating Occluded Front', - 'TROF': 'Trough', - 'TROPICAL_TROF': 'Tropical Wave', - 'LINE_DASHED_8': 'Shear Line', - 'FILLED_HIGH_PRESSURE_H': 'High', - 'LOW_X_FILLED': 'Low', - 'ZIGZAG': 'Ridge', - 'ZZZ_LINE': 'ITCZ', - 'DOUBLE_LINE': 'Monsoon Trough', - 'FILLED_CIRCLES': 'Ice Edge', - 'LINE_SOLID': 'Western Edge of the Gulf Stream', - 'LINE_DASHED_2': 'Eastern Edge of the Gulf Stream', - 'LINE_DASHED_6': 'Ignore', - 'FILLED_ARROW': 'Ignore', - 'POINTED_ARROW': 'Ignore', - 'DRY_LINE': 'Ignore', - 'General Text': 'Ignore', - 'Contours': 'Ignore', - 'None': 'Ignore' - } - - - def _sortAreasForWarningType(self, a, b): - # Sorting to find the highest warning type for an entire Feature - order = self._warningOrder() - if order.index(a.warningType) < order.index(b.warningType): - return -1 - elif order.index(a.warningType) > order.index(b.warningType): - return 1 - if a.timePeriod.startTime() < b.timePeriod.startTime(): - return -1 - elif a.timePeriod.startTime() > b.timePeriod.startTime(): - return 1 - return 0 - - def _sortTimePeriods(self,a,b): - # Sorting time periods within a Feature from earliest to latest - if a.timePeriod.startTime() < b.timePeriod.startTime(): - return -1 - elif a.timePeriod.startTime() > b.timePeriod.startTime(): - return 1 - return 0 - - def _sortAreas(self, a, b): - # Sorting areas within a period - # a, b are area objects - #print "SortAreas", "a:", a.windWave, "b:", b.windWave - order = self._warningOrder() - if order.index(a.warningType) < order.index(b.warningType): - return -1 - elif order.index(a.warningType) > order.index(b.warningType): - return 1 - if a.windWave: - return 1 - if b.windWave: - return -1 - if a.windOnly: - return 1 - if b.windOnly: - return -1 - if a.waveOnly: - return 1 - if b.waveOnly: - return -1 - if a.areaName < b.areaName: - return -1 - if a.areaName > b.areaName: - return 1 - - return 0 - - def _sortWarningFeatures(self, a, b): - # Sorting Features with Warnings into product order - # a, b are Feature objects - order = self._warningOrder() - if a.featureType == "Named" and b.featureType == "GridBased": - return -1 - elif b.featureType == "Named" and a.featureType == "GridBased": - return 1 - if order.index(a.highestWarning) < order.index(b.highestWarning): - return -1 - elif order.index(a.highestWarning) > order.index(b.highestWarning): - return 1 - if a.highestWarningTimePeriod.startTime() < b.highestWarningTimePeriod.startTime(): - return -1 - elif a.highestWarningTimePeriod.startTime() > b.highestWarningTimePeriod.startTime(): - return 1 - order = self._basinOrder() - if order.index(a.basinName) < order.index(b.basinName): - return -1 - elif order.index(a.basinName) > order.index(b.basinName): - return 1 - return 0 - - def _sortSynopsisFeatures(self, a, b): - # Sorting Features without Warnings into product order - # a, b are Feature objects - if a.featureType == "Named" and b.featureType == "GridBased": - return -1 - elif b.featureType == "Named" and a.featureType == "GridBased": - return 1 - if a.earliestTimePeriod is None: - return 1 - elif b.earliestTimePeriod is None: - return -1 - order = self._phenomenonOrder() - if order.index(a.phenomenonType) < order.index(b.phenomenonType): - return -1 - elif order.index(a.phenomenonType) > order.index(b.phenomenonType): - return 1 - - #print "a.basinName:", a.basinName, "b.basinName:", b.basinName - order = self._basinOrder() - if order.index(a.basinName) < order.index(b.basinName): - return -1 - elif order.index(a.basinName) > order.index(b.basinName): - return 1 - if a.earliestTimePeriod.startTime() > b.earliestTimePeriod.startTime(): - return 1 - #return -1 - elif a.earliestTimePeriod.startTime() < b.earliestTimePeriod.startTime(): - return -1 - #return 1 - return 0 - - def _sortPeriodsByTime(self, a, b): - if a.timePeriod.startTime() > b.timePeriod.startTime(): - return -1 - elif a.timePeriod.startTime() < b.timePeriod.startTime(): - return 1 - return 0 - - # Methods for populating Features, determining which have Warnings - def _warningMethods(self): - return [ - self._checkHurricane, - self._checkHurricaneForce, - self._checkTyphoon, - self._checkTropicalStorm, - self._checkStorm, - self._checkGale, - self._checkFreezingSpray, - #self._checkAshfall, - self._checkSpaceWx, - ] - - def _checkHurricane(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "HU.W": - return True, "...HURRICANE WARNING...", self._windWaveMethodList(), "Hurricane" - return False, None, None, None - - def _checkTyphoon(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "TY.W": - return True, "...TYPHOON WARNING...", self._windWaveMethodList(), "Typhoon" - return False, None, None, None - - def _checkHurricaneForce(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "HF.W": - return True, "...HURRICANE FORCE WIND WARNING...", self._windWaveMethodList(), "Hurricane Force" - return False, None, None, None - - def _checkTropicalStorm(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "TR.W": - return True, "...TROPICAL STORM WARNING...", self._windWaveMethodList(), "Tropical Storm" - return False, None, None, None - - def _checkStorm(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "SR.W": - return True, "...STORM WARNING...", self._windWaveMethodList(), "Storm" - return False, None, None, None - - def _checkGale(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "GL.W": - return True, "...GALE WARNING...", self._windWaveMethodList(), "Gale" - return False, None, None, None - - def _checkFreezingSpray(self, statDict): - hazards = self.getStats(statDict, "Hazards") - for hazardType, timeRange in hazards: - if hazardType == "UP.W": - return True, "...HEAVY FREEZING SPRAY WARNING...", self._WxMethodList(), "Heavy Freezing Spray" - return False, None, None, None - - def _checkSpaceWx(self, statDict): - return False, None, None, None - - def _setWindWave(self, area, statDict): - minMax, dir = self.getStats(statDict, "Wind", "MinMax") - windMin, windMax = minMax - windThreshold = 22.5 - minMag, waveMax = self.getStats(statDict, "WaveHeight", "MinMax") - waveThreshold = self.nlValue(self.null_nlValue( - None, None, "WaveHeight", "WaveHeight"), waveMax) - if windMax >= windThreshold and waveMax > waveThreshold: - area.windWave = True - elif windMax >= windThreshold: - area.windOnly = True - elif waveMax > waveThreshold: - area.waveOnly = True - - ### Below not working to automatically change Securite to Pan Pan - if windMax > self._windMax: - self._windMax = windMax - - def _validLabel(self, fcst, argDict): - - curTime = argDict.get("creationTime") - timeStr24 = time.strftime(" %a %b %e.", time.gmtime(curTime + 24*3600)) - timeStr48 = time.strftime(" %a %b %e.", time.gmtime(curTime + 48*3600)) - - fcst = fcst + "SYNOPSIS VALID " + self._validTime + \ - self.getCurrentTime(argDict, " %a %b %e.", shiftToLocal=0) + \ - "\n" + "24 hour forecast valid " + self._validTime + \ - timeStr24 + "\n" + \ - "48 hour forecast valid " + self._validTime + \ - timeStr48 + "\n\n" - - return fcst - - ### modified to add leading zero for 06 hour forecast ## - def _getTimePeriodLabel(self, argDict, timeRange, basinName=None): - now = argDict.get("creationTime") - # truncate the current time to the last six hour time - now = int(now / (3600* 6)) * 3600 * 6 - diffTime = timeRange.startTime() - AbsTime.AbsTime(now) - - if basinName: - leading = '.'+ basinName + ' ' - else: - leading = '.' - - if diffTime <= 0: - return leading - diffTime = diffTime / 3600 - - ## added code - CJ 3/9/15 - if diffTime < 10: - diffTime = str(diffTime) - diffTime = diffTime.zfill(2) - label = leading + diffTime + " hour forecast " - else: - label = leading + `diffTime`+ " hour forecast " - - return label - - # Methods for translating from Feature Descriptors coming from the Tool - # to Feature Objects - - - def _convertToTimeRange(self, timePeriod): - - hour = int(timePeriod[0:2]) - - baseTime = int(time.time() / (3600 * 6)) * (3600 * 6) - productTime = baseTime + (hour * 3600) - - timeRange = TimeRange.TimeRange(AbsTime.AbsTime(productTime), - AbsTime.AbsTime(productTime + 6 * 3600)) - return timeRange - - - # Returns the name of the file used to store the edit area information. - def descriptorFileName(self): - - domain = self._displayName[-3:] - - #TextProduct instance has no attribute '_siteID below for AT2 - return "/data/local/HighSeas/Formatter/NH2/HSF_AT2HighSeasDescriptors.pic" - - # Returns the name of the file used to store the edit area information. - def gridBasedFileName(self): - - sys, nodeName, release, version, machine = os.uname() - domain = self._displayName[-3:] - - #TextProduct instance has no attribute '_siteID below for AT2 - return "/data/local/HighSeas/Formatter/NH2/HSF_AT2GridBasedFeatures.pic" - - - def _checkForWx(self, statDict): - wxStats = statDict.get("Wx") - if wxStats is None: - return False - for wxStat in wxStats: - subkey, rank = wxStat - vis = self.getVis([subkey]) - if subkey.wxType() == "F"and vis <= 1: - return True - return False - - def _makePhrases(self, fcst, methodList, statDict, timePeriod, areaLabel, periodStr=". "): - - phraseWords = "" - print "makePhrases methodList", methodList - - for methodInfo in methodList: - wordMethod, elementName, maxMin, elementType = methodInfo - - tree, node = self._makeTreeNode( - methodInfo, statDict, timePeriod, areaLabel) - #print "statDict after makeTreeNode", statDict - self._applyRanges(tree, node, statDict, elementName, elementType) - #print "statDict after applyRanges", statDict - for subPhrase in node.get("childList"): - wordMethod(tree, subPhrase) - #tree.printNode(node) - self.fillNulls(tree, node) - if elementName == "Wind": - self.embedDescriptor(tree, node) - if wordMethod == self.visibility_words: - descriptor = self.phrase_descriptor(tree, node, "Visibility", "Visibility") - node.set("descriptor", descriptor) - self.assembleSubPhrases(tree, node) - phraseWords = phraseWords + node.get("words") + periodStr - - fcst = fcst + phraseWords - return fcst - - def _gridBasedWxMethods(self, area): - return [(self._gridBasedWxWords(area), "Wx", "List", self.WEATHER())] - - def _gridBasedWxWords(self, area): - - wxType = area.wxType - intensity = area.intensity - coverage = area.coverage - if wxType == 'T' and intensity == '+' and coverage == "Iso": - words = 'isolated strong convection' - elif wxType == 'T' and intensity == '+' and coverage == "Sct": - words = 'scattered strong convection' - elif wxType == 'T' and intensity == '+' and coverage == "Num": - words = 'numerous strong convection' - elif wxType == 'T' and coverage == "Iso": - words = 'isolated moderate convection' - elif wxType == 'T' and coverage == "Sct": - words = 'scattered moderate convection' - elif wxType == 'T' and coverage == "Num": - words = 'numerous moderate convection' - elif wxType == 'F' and intensity == '+': - words = 'dense fog' - elif wxType == 'ZY' and intensity == '+': - words = 'heavy freezing spray' - elif wxType == 'ZY' and intensity == 'm': - words = 'moderate freezing spray' - elif wxType == 'ZY' and intensity == '-': - words = 'light freezing spray' - elif wxType == 'VA': - words = '' - elif wxType == 'K': - words = 'dense smoke' - else: - words = 'Wx Type not found' - return self.setWords(node, words) - - def _getGridBasedWxWords(self, area): - wxType = area.wxType - intensity = area.intensity - coverage = area.coverage - if wxType == 'T' and intensity == '+' and coverage == "Iso": - words = 'isolated strong convection' - elif wxType == 'T' and intensity == '+' and coverage == "Sct": - words = 'scattered strong convection' - elif wxType == 'T' and intensity == '+' and coverage == "Num": - words = 'numerous strong convection' - elif wxType == 'T' and coverage == "Iso": - words = 'isolated moderate convection' - elif wxType == 'T' and coverage == "Sct": - words = 'scattered moderate convection' - elif wxType == 'T' and coverage == "Num": - words = 'numerous moderate convection' - elif wxType == 'F' and intensity == '+': - words = 'dense fog' - elif wxType == 'ZY' and intensity == '+': - words = 'heavy freezing spray' - elif wxType == 'ZY' and intensity == 'm': - words = 'moderate freezing spray' - elif wxType == 'ZY' and intensity == '-': - words = 'light freezing spray' - elif wxType == 'VA': - words = '' - elif wxType == 'K': - words = 'dense smoke' - else: - words = 'Wx Type not found' - - return words - - - ##################### - # Overrides - - def _windWaveMethodList(self): - return [ - # WINDS - # wind_phrase - (self.vector_words, "Wind", "Max", self.VECTOR()), - #self.gust_phrase, - # WAVES - #(self.waveHeight_words, "WaveHeight", "Max", self.SCALAR()), - (self.wave_words, "WaveHeight", "Max", self.SCALAR()), - ] - - def _wxMethodList(self): - return [ - # WEATHER - (self.weather_words, "Wx", "List", self.WEATHER()), - (self.visibility_words, "Wx", "List", self.WEATHER()), - ] - - - def _analysisList(self, argDict): - return [ - #("Wind", self.vectorModeratedMinMax), - ("Wind", self.vectorMinMax), - #("WindGust", self.moderatedMax), - ("WaveHeight", self.moderatedMinMax), - ("Wx", self.rankedWx), - ("Hazards", self.discreteTimeRangesByKey), - ] - - - - - def vector_mag(self, tree, node, minMag, maxMag, units, - elementName="Wind"): - "Create a phrase for a Range of magnitudes" - - # Check for "null" value (below threshold) - threshold = self.nlValue(self.null_nlValue( - tree, node, elementName, elementName), maxMag) - if maxMag < threshold: - return "null" - - # Apply max reported threshold - maxReportedMag = self.maxReported_threshold(tree, node, elementName, elementName) - if maxMag >= maxReportedMag: - maxMag = maxReportedMag - #minMag = 0 - - units = self.units_descriptor(tree, node, "units", units) - if elementName == "Wind": - if self.marine_wind_flag(tree, node): - return self.marine_wind_mag(tree, node, minMag, maxMag, units, elementName) - - # round to the nearest 5 - # Handle special caseS of 22.5 minMag. - if maxMag < 22.5: - return 'null' - if maxMag >= 22.5 and maxMag < 27.5 and minMag >= 22.5 and minMag < 27.5: - words = '25 ' + units - elif minMag >= 20 and minMag < 22.5 and maxMag >= 22.5 and maxMag < 27.5: - words = '20 to 25 '+ units - elif minMag >= 20 and minMag < 22.5 and maxMag >= 27.5 and maxMag < 32.5: - words = '20 to 30 '+units - else: - minMag = int((minMag + 2.5) / 5.0) * 5.0 - maxMag = int((maxMag + 2.5) / 5.0) * 5.0 - - # Check for SingleValue - if maxMag == minMag: #or minMag == 0: - around = self.addSpace( - self.phrase_descriptor(tree, node, "around", elementName)) - words = around + `int(maxMag)` + " " + units - else: - if int(minMag) < threshold: - upTo = self.addSpace( - self.phrase_descriptor(tree, node, "up to", elementName)) - words = upTo + `int(maxMag)` + " " + units - else: - valueConnector = self.value_connector(tree, node, elementName, elementName) - words = `int(minMag)` + valueConnector + `int(maxMag)` + " " + units - - # This is an additional hook for customizing the magnitude wording - words = self.vector_mag_hook(tree, node, minMag, maxMag, units, elementName, words) - - return words - - # OVERRIDE - to get single letter directions. - def vector_dir(self, dir): - if not type(dir)== types.StringType: - dir = self.dirToText(dir) - # Commented this out to get single letter directions - Tom. -## dir = string.replace(dir, "N", "north") -## dir = string.replace(dir, "S", "south") -## dir = string.replace(dir, "E", "east") -## dir = string.replace(dir, "W", "west") - return dir - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Visibility"] = "NM" - dict["Wind"] = "kts" - dict["WaveHeight"] = "ft" - return dict - - def units_descriptor_dict(self, tree, node): - # Dictionary of descriptors for various units - return { - "units": { - "ft": "FT", - "F":"", - "C":"degrees", - "K":"kelvins", - "%":" percent", - "in":"inches", - "kts":"KT", - "s":"seconds", - "hrs":"hours", - "m/s":"meters/second", - "mph":"mph", - "m":"meters", - "m^2/s":"meters^2/second", - "kt-ft":"knots-feet", - "mm":"millimeters", - "degrees": "degrees", - "percent": "percent", - }, - "unit": { - "ft":"FT", - "F":"", - "C":"degree", - "K":"kelvin", - "%":" percent", - "in":"inch", - "kts":"KT", - "s":"second", - "hrs":"hour", - "m/s":"meter/second", - "mph":"mph", - "m":"meter", - "m^2/s":"meter^2/second", - "kt-ft":"knot-foot", - "mm":"millimeter", - "degree": "degree", - "percent": "percent", - }, - } - - ##### - # NULL value phrases - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - return { - "Wind": "Winds 20 kt or less", - "Wind20ft": "light winds", - "TransWind": "light winds", - "FreeWind": "light winds", - "Swell": "light swells", - "Swell2": "", - "Wx": "", - "WindGust": "", - "WaveHeight": "Seas less than 8 ft", - "WindWaveHgt": "waves 2 ft or less", - "CWR": "", - } - - def null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - return { - "Wind": "Winds 20 kt or less", - "Wind20ft": "light winds", - "TransWind": "light winds", - "FreeWind": "light winds", - "Swell": "light swells", - "Swell2": "", - "Wx": "", - "WindGust": "", - "WaveHeight": "Seas less than 8 ft", - "WindWaveHgt": "waves 2 ft or less", - "CWR": "", - } - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and - # reported using the null_phrase (see above) - return { - "otherwise": 0, - "Wind": 20, - #"Wind": 22.5, - "WaveHeight": 8, - #"WindGust": 20, - "Visibility": 1, - } - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - #----------------------------------------------------------------------- - # COMMENT: Override max ranges for certain fields - # This dict specifications allows for wind speed ranges of up to 20 mph - # during tropical cyclone situations allowing for far better wind speed - # phrases. - #----------------------------------------------------------------------- - dict["Wind"] = { - (0, 30): 10, - (30,50): 15, - (50, 200):20, - "default":5, - } - - dict["WaveHeight"] = { - (8,10):2, - (10,20):5, - (20,200):10, - "default":1, - } - return dict - - # added to force ranges for sea heights with tropical turned on 9/7/11 CNJ/JL - def minimum_range_nlValue_dict(self, tree, node): - # This threshold is the "smallest" min/max difference allowed between values reported. - # For example, if threshold is set to 5 for "MaxT", and the min value is 45 - # and the max value is 46, the range will be adjusted to at least a 5 degree - # range e.g. 43-48. These are the values that are then submitted for phrasing - # such as: - dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) - # HIGHS IN THE MID 40S - dict["Wind"] = { - (0,30):0, - (30,50):5, - (50,200):10, - "default":5, - } - dict["WaveHeight"] = { - (8,10):1, - (10,16):2, - (16,28):4, - (28,40):6, - (40,200):10, - "default":1, - } - return dict - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "winds" - dict["WaveHeight"] = "seas" - dict["Visibility"] = "vsby occasionally" - dict["seas"] = "seas" - dict["mixed swell"] = "mixed swell" - dict["waves"] = "seas" - dict["up to"] = "winds to" - dict["around"] = "" - return dict - -## def rounding_method_dict(self, tree, node): -## # Special rounding methods -## # -## return { -## "Wind": self.marineRounding, -## } - - # WxPhrases Overrides - def pop_wx_lower_threshold(self, tree, node): - # Always report weather - return 0 - - # MarinePhrases Overrides - def seasWaveHeight_element(self, tree, node): - # Weather element to use for reporting seas - # "COMBINED SEAS 10 TO 15 FEET." - # IF above wind or swell thresholds - return "WaveHeight" - - def waveHeight_wind_threshold(self, tree, node): - # wind value above which waveHeight is reported vs. wind waves - # Unit is knots - return 0 - - def wave_range(self, avg): - # Make wave ranges based off the average wave value - table = ((0, "less than 1 ft"), (1, "1 foot or less"), - (1.5, "1 to 2 ft"), (2, "1 to 3 ft"), - (3, "2 to 4 ft"), (4, "3 to 5 ft"), - (5, "3 to 6 ft"), (6, "4 to 7 ft"), - (7, "5 to 8 ft"), (8, "6 to 10 ft"), - (10, "8 to 12 ft"), (12, "10 to 14 ft"), - (14, "12 to 16 ft"), (18, "14 to 18 ft"), - (20, "15 to 20 ft"), (100, "over 20 ft")) - range = "" - for max, str in table: - if avg <= max: - range = str - break - return range - - - # SampleAnalysis overrides - def moderated_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. - dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) - dict["Wind"] = (0, 20) - dict["WaveHeight"] = (5,5) - return dict - - def dirList(self): - dirSpan = 22.5 - base = 11.25 - return[ - ('N', 360-base, 361), - ('N', 0, base), - ('N TO NE', base, base+1*dirSpan), - ('NE', base+1*dirSpan, base+2*dirSpan), - ('NE TO E', base+2*dirSpan, base+3*dirSpan), - ('E', base+3*dirSpan, base+4*dirSpan), - ('E TO SE', base+4*dirSpan, base+5*dirSpan), - ('SE', base+5*dirSpan, base+6*dirSpan), - ('SE TO S', base+6*dirSpan, base+7*dirSpan), - ('S', base+7*dirSpan, base+8*dirSpan), - ('S TO SW', base+8*dirSpan, base+9*dirSpan), - ('SW', base+9*dirSpan, base+10*dirSpan), - ('SW TO W', base+10*dirSpan, base+11*dirSpan), - ('W', base+11*dirSpan, base+12*dirSpan), - ('W TO NW', base+12*dirSpan, base+13*dirSpan), - ('NW', base+13*dirSpan, base+14*dirSpan), - ('NW TO N', base+14*dirSpan, base+15*dirSpan), - ] - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING - ('TR.W', tropicalActions, 'Tropical'), # TROPICAL STORM WARNING - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine'), # GALE WARNING - ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('MH.Y', allActions, 'Ashfall') # VOLCANIC ASHFALL ADVISORY - ] - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* *")] - - def wxCoverageDescriptors(self): - # This is the list of coverages, wxTypes, intensities, attributes for which special - # weather coverage wording is desired. Wildcards (*) can be used to match any value. - # If a weather subkey is not found in this list, default wording - # will be used from the Weather Definition in the server. - # The format of each tuple is: - # (coverage, wxType, intensity, attribute, descriptor) - # For example: - #return [ - # ("Chc", "*", "*", "*", "a chance of"), - # ] - # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments - return [("*", "F", "*", "*", "")] - - - ####################### - - def _makeTreeNode(self, phraseInfo, statDict, timePeriod, areaLabel): - phraseMethod, elementName, maxMin, elementType = phraseInfo - - # Set up temporary tree, node for this phrase - subPhrase = Node([], []) - node = Node([subPhrase], []) - - - treeStatDict = {} - for key in statDict: - treeStatDict[key] = statDict.get(key) - wxStats = treeStatDict.get("Wx") - treeStatDict["Wx"] = [(wxStats, timePeriod)] - - statistics = Statistics(treeStatDict) - tree = Narrative([],[node], statistics, None, self, None) - tree.set("timeRange", timePeriod) - tree.set("areaLabel", areaLabel) - - elementInfo = self.ElementInfo(elementName, maxMin, elementType) - elementInfo.outUnits = self.element_outUnits(tree, node, elementName, elementName) - - subPhrase.set("statDict", statDict) - - node.set("descriptor", self.phrase_descriptor(tree, node, elementName, elementName)) - node.set("firstElement", elementInfo) - node.set("elementInfo", elementInfo) - node.set("elementInfoList", [elementInfo]) - node.set("elementName", elementName) - node.set("setUpMethod", None) - node.set("doneList", [None]) - return tree, node - - def _applyRanges(self, tree, node, statDict, elementName, elementType): - if elementType == self.VECTOR(): - speed, dir = self.getStats(statDict, elementName) - min, max = speed - #print "IN _applyRanges min, max", min, max, elementName - min, max = self.applyRanges(tree, node, min, max, elementName) - statDict[elementName] = (min, max), dir - elif elementType == self.SCALAR(): - min, max = self.getStats(statDict, elementName) - #print "min, max", min, max, elementName - min, max = self.applyRanges(tree, node, min, max, elementName) - statDict[elementName] = (min, max) - elif elementType == self.WEATHER(): - return - - # Overriding from TextUtils - def getLimits(self, element): - parmID = self.getParmID(element, self._databaseID) - gridParmInfo = self._ifpClient.getGridParmInfo(parmID) - return gridParmInfo.getMinValue(), gridParmInfo.getMaxValue() - -############################################################################################## -####################### Edit Area support methods ########################################## -############################################################################################## - - def _getEditArea(self, editAreaName): - # Returns an AFPS.ReferenceData object given an edit area name - # as defined in the GFE - - eaMask = self._eaUtils.fetchEditArea(editAreaName) - # Convert to standard Edit Area type - editArea = self.decodeEditArea(eaMask) - - return editArea - - def _editAreaToMask(self, editArea): - - grid = editArea.getGrid().getNDArray().astype(numpy.bool8) - - return grid -# return editArea.getGrid().__numpy__[0].astype(numpy.bool8) - - - def _maskToEditArea(self, mask): - # Returns a refData object for the given mask - from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID - - nx = mask.shape[1] - ny = mask.shape[0] - bytes = mask.astype('int8') - grid = Grid2DBit.createBitGrid(nx, ny, bytes) - - return ReferenceData(self._gridLoc, ReferenceID("test"), grid) - - def _setActiveEditArea(self, area): - - self._dataMgr.getRefManager().setActiveRefSet(area) - return - - def _saveEditArea(self, editAreaName, refData): - # Saves the AFPS.ReferenceData object with the given name - - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID - refID = ReferenceID(editAreaName) - refData = ReferenceData(refData.getGloc(), refID, refData.getGrid()) - self._dataMgr.getRefManager().saveRefSet(refData) - - - def _processAreaOverlaps(self, areas): - # Taking care of "donuts" - gridSize = (self._gridLoc.gridSize().y, self._gridLoc.gridSize().x) - - sumMask = numpy.zeros(gridSize, bool) - for area in areas: - mask = self._eaUtils.fetchEditArea(area.areaName) - ea = self._maskToEditArea(mask) - - overlap = mask & sumMask - if sum(sum(overlap)): - newMask = numpy.bitwise_xor(mask, overlap) - newRefArea = self._maskToEditArea(newMask) - else: - newRefArea = ea - - self._saveEditArea(area.areaName+"Modified", newRefArea) - area.areaName = area.areaName+"Modified" - print "processOverlap...RefData", area.areaName, newRefArea - sumMask = sumMask | mask - - return - - def _drawableElementOverlaps(self, areas, latLons): - for area in areas: - #ea = self._getEditArea(area.areaName) - eaMask = self._eaUtils.fetchEditArea(area.areaName) - ea = self._maskToEditArea(eaMask) - - polygons = ea.getPolygons(ReferenceData.CoordinateType.LATLON) - coords = polygons.getCoordinates() - for c in coords: - for lat, lon in latLons: - if self._close((c.x, c.y), (lat, lon)): - return True - return False - - def _close(self, (lat1, lon1), (lat2, lon2)): - distanceThreshold = 300 # km - distance = acos(sin(lat1)*sin(lat2)+cos(lat1)*cos(lat2)*cos(lon2-lon1)) * 6371 - if distance < distanceThreshold: - return True - else: - return False - - def _proximity(self, drawable1, drawable2): - for lat1, lon1 in drawable1.latLons: - for lat2, lon2 in drawable2.latLons: - if self._close((lat1, lon1), (lat2, lon2)): - return True - return False - - - - - -############################################################################################## -####################### END Edit Area support methods ########################################## -############################################################################################## +#------------------------------------------------------------------------- +# Description: HSF (High Seas Forecast) +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Version: 26 July 2016 - Received from Jeff Lewitsky +## +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ----------- ---------- ----------- -------------------------- +# 07/29/2016 - tlefebvre Changed edit area retrieval and storage to work +# outside CAVE so edit areas could be shared. +# 12/20/2017 DCS17686 tlefebvre Initial baseline version. +# +## +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# HSF.py, HSF ___Definition, HSF__Override +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "COASTAL WATERS FORECAST" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "OFFBOS" +# areaName (opt.) Area name for product header, such as "WESTERN NEW YORK" +# wfoCityState City,state that the WFO is located in, such as "BUFFALO, NY" +# +# synopsisUGC UGC code for Synopsis +# synopsisHeading Heading for Synopsis +# +# Optional Configuration Items +# +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# includeEveningPeriod Include a 6 hour Evening period on the 3rd day +# useAbbreviations +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, +# NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# areaDictionary Modify the AreaDictionary utility with UGC +# information about zones +# +# useHolidays Set to 1 to use holidays in the time period labels +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# OVERRIDES +# +# Required Overrides +# +# _Text1(), _Text2() Descriptive text for header +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) +# WaveHeight and/or WindWaveHgt +# (every 6 hours to 3 days, then every 12 hours to 7 days) +# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) +# Optional: +# WindGust (every 3 hours to 7 days) +# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# OFFPeriod (component) +# OFFPeriodMid (component) +# OFFExtended (component) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from OFF: +# _Text1 +# _Text2 +# _issuance_list +# riverBarForecast_dict +# from MarinePhrases +# inlandWatersAreas +# inlandWatersWave_element +# seasWaveHeight_element +# seasWindWave_element +# waveHeight_wind_threshold +# marine_wind_flag +# marine_wind_combining_flag +# marine_wind_verbose_flag +# from ConfigVariables +# phrase_descriptor_dict +# phrase_connector_dict +# null_nlValue_dict +# first_null_phrase_dict +# null_phrase_dict +# maximum_range_nlValue_dict +# combine_singleValues_flag_dict +# from WxPhrases: +# embedded_visibility_flag +# visibility_wx_threshold +# significant_wx_visibility_subkeys +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# from SampleAnalysis +# moderated_dict +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, re, types, pickle, os, textwrap, sys +import TimeRange +import AbsTime +from math import * +import numpy +import copy +import UserInfo +import subprocess +import xml.etree.ElementTree as ET +import EditAreaUtilities + +import sys +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +#from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData_CoordinateType as CoordinateType + +class Node: + def __init__(self, childList, methodList): + self.childList = childList + self.methodList = methodList + self.parent = None + # Make tree bi-directional + for child in childList: + child.parent = self + # Keep track of changes made to this node + self.changeFlag = 0 + # Keep track of methods that are done + self.doneList = [] + def getIndex(self): + # If this node is a child, + # return it's index in the childList of the parent + try: + return self.parent.childList.index(self) + except: + return None + def getParent(self): + return self.parent + + def getComponent(self): + # Return this node's ancestor at the second level in the tree + prevNode = None + node = self + i = 0 + while node.getParent() is not None and i < 100: + prevNode = node + node = node.getParent() + i = i + 1 + return prevNode + + def getComponentName(self): + node = self + compName = node.get("componentName") + if compName is not None: + return compName + else: + comp = node.getComponent() + if comp is not None: + return comp.get("name") + else: + return None + + def getNext(self): + if self.parent is not None: + index = self.getIndex() + childList = self.parent.childList + if len(childList) > index+1: + return childList[index+1] + + def getPrev(self): + if self.parent is not None: + index = self.getIndex() + childList = self.parent.childList + if index > 0: + return childList[index-1] + + def set(self, member, value): + #print " Setting", member, + if hasattr(self, member): + current = getattr(self, member) + #print "current/value", current, value + if current == value: + #print " No Change" + return + setattr(self, member, value) + self.changeFlag = 1 + #print " Changed" + + def get(self, member, default=None): + if hasattr(self, member): + return getattr(self, member) + else: + return default + + def printNode(self, node, indentStr=""): + print("Node", node) + print(indentStr + " Methods") + for method in node.methodList: + if method in node.doneList: + done = "DONE" + else: + done = "" + print(indentStr + " ", method.__name__, done) + print(indentStr + " Attributes") + dict = node.__dict__ + for key in dict: + if key == "methodList" or key == "doneList": + continue + print(indentStr + " ", key, dict[key]) + print(indentStr + " Children ", len(node.childList)) + for child in node.childList: + self.printNode(child, indentStr + " ") + + def insertChild(self, sibling, newChild, newFirst=0): + # Insert the newChild + # If newFirst, insert newChild before sibling, + # else afterward. + newChild.parent = self + new = [] + for child in self.childList: + if child == sibling: + if newFirst: + new.append(newChild) + new.append(child) + else: + new.append(child) + new.append(newChild) + else: + new.append(child) + self.childList = new + + def remove(self): + # Remove this node from it's parent child list + parent = self.parent + new = [] + for child in parent.childList: + if child != self: + new.append(child) + parent.childList = new + # Set the attribute for removing the child + setattr(self, "removed", 1) + + def findChild(self, attr, value): + # Find the child of this node with the given attribute + # of the given value + for child in self.childList: + if child.get(attr) == value: + return child + def getProgeny(self): + # Return a list of all progeny of this node + progeny = self.childList + for child in self.childList: + childProgeny = child.getProgeny() + if childProgeny is not None: + progeny = progeny + child.getProgeny() + return progeny + def replace(self, nodeList): + # Replace the current child node with the node list. + # If top of tree, does nothing. + childList = self.parent.childList + newList = [] + for child in childList: + if child == self: + newList = newList + nodeList + else: + newList.append(child) + self.parent.childList = newList + # Remove any children of current node + self.childList = [] + # Make this node defunct + self.doneList = self.methodList + def getTimeRange(self): + if hasattr(self, "timeRange"): + return self.timeRange + # Look for an ancestor that has a timeRange associated with it + if self.parent is not None: + return self.parent.getTimeRange() + return None + def getStatDict(self): + # Assume we are a subPhrase + if hasattr(self, "statDict"): + statDict = self.statDict + disabledElements = self.getAncestor("disabledElements") + if disabledElements is not None: + for key in list(statDict.keys()): + for element in self.parent.disabledElements: + if key == element: + statDict[element] = None + disabledSubkeys = self.getAncestor("disabledSubkeys") + #print "disabledSubkey", disabledSubkeys + if disabledSubkeys is not None: + disabledWxTypes = [] + for disabledSubkey in disabledSubkeys: + disabledWxTypes.append(disabledSubkey.wxType()) + for key in list(statDict.keys()): + if key == "Wx": + subkeys = statDict[key] + newList = [] + for subkey in subkeys: + # Need to handle both "dominantWx" and + # "rankedWx" analysis + appendVal = subkey + if type(subkey) is tuple: + subkey, rank = subkey + if subkey not in disabledSubkeys \ + and subkey.wxType() not in disabledWxTypes: + newList.append(appendVal) + statDict[key] = newList + return statDict + else: + return None + def getAreaLabel(self): + if hasattr(self, "areaLabel"): + return self.areaLabel + # Look for an ancestor that has an areaLabel associated with it + if self.parent is not None: + return self.parent.getAreaLabel() + return None + def getAncestor(self, attr): + if hasattr(self, attr): + return getattr(self, attr) + # Look for an ancestor that has the given attribute associated with it + if self.parent is not None: + return self.parent.getAncestor(attr) + return None + def setAncestor(self, attr, value): + if hasattr(self, attr): + setattr(self, attr, value) + return None + # Look for an ancestor that has the given attribute associated with it + if self.parent is not None: + return self.parent.setAncestor(attr, value) + return None + def getDescendent(self, attr): + if hasattr(self, attr): + return getattr(self, attr) + # Look for the first descendent that has the given attribute associated with it + for child in self.childList: + value = child.getDescendent(attr) + if value is not None: + return value + return None + +class Narrative(Node, TextRules.TextRules): + # This is the root of the tree and, as such, has some special methods + # and data members + def __init__(self, methodList, componentList, statisticsDictionary, + issuanceInfo, library, histoSampler): + self.stats = statisticsDictionary + # Access to inherited methods + self.library = library + # A histoSampler for access to Topo + self.histoSampler = histoSampler + self.issuanceInfo = issuanceInfo + + # This is the root of the tree + Node.__init__(self, componentList, methodList) + TextRules.TextRules.__init__(self) + + def printTree(self): + print("\n\nNarrative Tree\n") + self.printNode(self, "") + def getTopoHisto(self, areaLabel): + editArea = self.library.findEditArea(None, areaLabel) + return self.get("histoSampler").getTopoHisto(editArea.id()) + def makeNode(self, children, methods, parent=None): + node = Node(children, methods) + node.parent = parent + return node + def statisticsDictionary(self): + return self.statisticsDictionary.dictionary() + def getDataType(self, element): + return self.library.getDataType(element) + def getLimits(self, element): + return self.library.getLimits(element) + def makeComponent(self, name, timeRange, definition): + return self.library.makeComponent(name, timeRange, definition) + def makePhrase(self, phraseDef): + return self.library.makePhrase(phraseDef) + def copyPhrase(self, node, timeRange=None, areaLabel=None, parent=None, + copyAttrs=[]): + phraseDef = node.get("phraseDef") + newNode = self.library.makePhrase(phraseDef) + # copy attributes from original node + for attr in copyAttrs: + newVal = node.get(attr) + if type(newVal) is list: + newList = [] + for item in newVal: + newList.append(item) + newVal = newList + newNode.set(attr, newVal) + if areaLabel is None: + areaLabel = node.getAreaLabel() + newNode.set("areaLabel", areaLabel) + if timeRange is None: + timeRange = node.getTimeRange() + newNode.set("timeRange", timeRange) + if parent is None: + parent = node.parent + newNode.parent = parent + # Preserve attributes + newNode.set("args", node.get("args")) + return newNode + + def addPhrase(self, prevPhrase, timeRange=None, areaLabel=None): + # Make the new phrase follow given phrase + newPhrase = self.copyPhrase(prevPhrase, timeRange, areaLabel) + parent = prevPhrase.parent + parent.insertChild(prevPhrase, newPhrase) + return newPhrase + def addPhraseDef(self, prevPhrase, phraseDef, timeRange=None, areaLabel=None): + # Make the new phrase follow given prevPhrase using the given phraseDef + newPhrase = self.library.makePhrase(phraseDef) + if areaLabel is None: + areaLabel = prevPhrase.getAreaLabel() + newPhrase.set("areaLabel", areaLabel) + if timeRange is None: + timeRange = prevPhrase.getTimeRange() + newPhrase.set("timeRange", timeRange) + parent = prevPhrase.parent + newPhrase.parent = parent + parent.insertChild(prevPhrase, newPhrase) + return newPhrase + +class Statistics: + def __init__(self, statDict): + self._statDict = statDict + def get(self, element, timeRange, areaLabel=None, statLabel="", mergeMethod="List", + intersectWith=None): + return self._statDict.get(element) + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/HSF_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Marine_Zones_", + + "lineLength": 69, + ## Edit Areas: Create Combinations file with edit area combinations. + "defaultEditAreas" : "Combinations_OFF__", + "editAreaSuffix": None, + # product identifiers + "productName": "HIGH SEAS FORECAST", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "GEORGIA" -- optional + "wfoCityState": "", # Location of WFO - city state + + "synopsisUGC": "", # UGC code for synopsis + "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "hazardSamplingThreshold": (0, 1), #(%cov, #points) + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "periodCombining" : 0, # If 1, combine periods, if possible + # Product-specific variables: + # Set to one if you want a 6-hour evening period instead of + # 18-hour period without lows + "includeEveningPeriod": 0, + "useAbbreviations": 1, + + "ccc": "MIA", # AFOS node + "tcmBasin": "EP", # AT = Atlantic, EP = East Pacific, CP = Central Pacific + + # CCode flag - added for OPC 11/14/2017 CNJ + "ccode": 0, + + # Weather-related flags + "hoursSChcEnds": 24, + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + # Language + "language": "english", + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + # Mixed Case + # LowerCase below needs to = 1 for mixed case AND must change mixedCaseProductIds.txt file under TextWS for store to not upper all -JL 05/24/2016 + # Also will need to change all hard-coded phrases AND phrases passed from MakeHSEditAreas changed to mixed case + "lowerCase": 0, + "autoStore": 0, + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + #editAreasPath = "/scratch/local/HighSeas/EditAreas/" # for development in Boulder + editAreasPath = "/data/local/HighSeas/NH2/EditAreas/" # for operations + self._eaUtils = EditAreaUtilities.EditAreaUtilities(editAreasPath) + + + def _Text1(self): + return "SUPERSEDED BY NEXT ISSUANCE IN 6 HOURS\n\n" + \ + "SEAS GIVEN AS SIGNIFICANT WAVE HEIGHT...WHICH IS THE AVERAGE\n" + \ + "HEIGHT OF THE HIGHEST 1/3 OF THE WAVES. INDIVIDUAL WAVES MAY BE\n" + \ + "MORE THAN TWICE THE SIGNIFICANT WAVE HEIGHT.\n\n" + + # override _Text2 for each specific basin/product + def _Text2(self): + return "ATLANTIC FROM 07N TO 31N W OF 35W INCLUDING CARIBBEAN SEA AND\n" + \ + "GULF OF MEXICO\n\n" +# def _Text2(self): +# return "E PACIFIC FROM THE EQUATOR TO 30N E OF 140W AND 03.4S TO THE\n" + \ +# "EQUATOR E OF 120W\n\n" + + # Returns the specified product (string) with newlines inserted + # such that no line exceeds maxChars characters. + def _wrapLines(self, product, maxChars=64): + + # break out the product into lines + lineList = [] + startPos = 0 + while startPos < len(product): + pos = product.find("\n", startPos) + if pos == startPos: + lineList.append("") + elif pos == -1: + lineList.append(product[startPos:]) #get the rest + break + + line = product[startPos:pos] # slice out line + wrappedLines = textwrap.wrap(line, maxChars) + for w in wrappedLines: + lineList.append(w) + + startPos = pos + 1 + + finalProduct = "" + for line in lineList: + finalProduct = finalProduct + line + "\n" + + return finalProduct + + # Top-level object that calls all main subroutines + def generateForecast(self, argDict): + + print("Generate Forecast") + + # baseline code - gets variables from the Definitions + error = self._getVariables(argDict) + if error is not None: + return error + + # Determine time ranges - issuance times set here + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Creating the Features class from MakeHSFEditAreas tool input + self._createFeatures(argDict) + + # Creating the Features class from grid-based tool input + self._createGridBasedFeatures(argDict) + + # Creating the Features class from TCM conversion script input + self._createTCM_BasedFeatures(argDict) + + # Creating the Features class from VGF / XML input + #self._createDrawableFeatures(argDict) + + + print("Sampling data") + # Sample the data for the areas in the Features created above + error = self._sampleData(argDict) + if error is not None: + return error + + # Populate the Features with the sampled data + self._populateFeatures(argDict) + + # Order the Features based on rules for the HSF product + self._orderFeatures() + + #for feature in self._features: + # feature.printFeature() + + # Building the forecast text string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + fcst = self._makeSection(fcst, argDict, self._warningFeatures, "Warning") + fcst = self._makeSection(fcst, argDict, self._synopsisFeatures, "Synopsis") + fcst = self._postProcessProduct(fcst, argDict) + + return fcst + + # sets variables from the Definitions + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._databaseID = argDict["databaseID"] + self._ifpClient = argDict["ifpClient"] + + self._language = argDict["language"] + return None + + # Sets up issuance times - can be done in dictionary elsewhere + # This method or dictionary goes into overrides to change issuance times for each product + def _determineTimeRanges(self, argDict): + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) + self._timeLabel = staticIssueTime + " " + self.getCurrentTime( + #argDict, " %a %b %e %Y", stripLeading=1) #commented out per 00 UTC issue per M. Sardi (JL 07/21/16) + argDict, " %a %b %e %Y", shiftToLocal=0, stripLeading=1) + # Re-calculate issueTime + self._issueTime = self.strToGMT(staticIssueTime) + validTimeDict = { + "0430 UTC": 430, + "1030 UTC": 1030, + "1630 UTC": 1630, + "2230 UTC": 2230, + } + validTime = validTimeDict[self._productIssuance] - 430 + self._validTime = repr(validTime).zfill(4) + " UTC" + return None + + ############## + # Organize Features + + class Feature: + def __init__(self): + self.name = None + self.basinName = None + # Feature Type -- 'Named', 'GridBased', 'TCM_Based', 'Drawable' + # e.g. (Fog / Visibility, Convection, Heavy Freezing Spray) + self.featureType = None + self.periods = [] + + self.highestWarning = None + self.highestWarningTimePeriod = None + self.highestWarningHeadline = None + self.earliestTimePeriod = None + self.wxType = None + self.wxIntensity = None + self.wxCoverage = None + self.phenomenonType = None + self.GBareaList = None + + # Drawable Feature -- ingest from Drawable Features XML + self.drawableFeature = None + self.autoText = None + def printFeature(self): + print("\nFeature -- Feature Type, Basin Name:", self.featureType, self.basinName) + print("HighestWarning, HighestWarning TimePeriod:", self.highestWarning, self.highestWarningTimePeriod) + print("wxType:", self.wxType, "wxCoverage:", self.wxCoverage, "wxIntensity:", self.wxIntensity) + print("phenomenonType:", self.phenomenonType) + + if self.featureType == "GridBased": + for area in self.GBareaList: + print("areaName:", area.areaName) + print("areaLabel:", area.areaLabel) + print("timePeriod:", area.timePeriod) + print("headline:", area.headline) + print("warningType:", area.warningType) + print("phenomenonType:", area.phenomenonType) + print("wxType:", area.wxType) + print("intensity:", area.intensity) + print("coverage:", area.coverage) + else: + print("periods:", self.periods) + + + #print " Periods" + #for period in self.periods: + # period.printPeriod() + + class Period: + def __init__(self): + self.timePeriod = None + self.areas = [] + self.drawables = [] + def printPeriod(self): + print(' TimeRange', self.timePeriod) + print(' Areas') + for area in self.areas: + area.printArea() + for drawable in self.drawables: + drawable.printDrawable() + + class Area: + def __init__(self): + self.areaName = None + self.areaLabel = None + self.refData = None + self.timePeriod = None + + # Named Feature attributes + self.statDict = {} + self.headline = "None" + self.methodList = [] + self.windWave = False + self.windOnly = False + self.waveOnly = False + self.warningType = None + + # GridBased Feature attributes + self.phenomenonType = None + self.wxType = None + self.intensity = None + self.coverage = None + + def printArea(self): + print(' name, label', self.areaName, self.areaLabel) + print(' warningType', self.warningType) + print(' windWave, windOnly, waveOnly', self.windWave, self.windOnly, self.waveOnly) + print(' statDict', self.statDict) + print(' wxType, intensity', self.wxType, self.intensity) + + class Drawable: + def __init__(self): + self.timePeriod = None + self.drawableType = None + self.pressureTag = None + self.latLons = None + self.movement = None + def printDrawable(self): + print(' drawableType', self.drawableType) + print(' timePeriod, pressureTag', self.timePeriod, self.pressureTag) + print(' latLons', self.latLons) + + def _createFeatures(self, argDict): + ''' Set up 'skeleton' Feature objects from toolFeatures + Input toolFeatures: + [ + {'timePeriod': '00h', 'featureName': 'Feature1', 'basin': 'ATLC', + 'areaList': [ + {'lon': None, 'pieState': None, 'radius': None, 'lat': None, + 'areaDesc': 'WITHIN AREA BOUNDED BY 15S170E TO 15S169E TO 16S168E TO 17S169E TO 16S170E TO 15S170E', + 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA1'}, + {'lon': None, 'pieState': None, 'radius': None, 'lat': None, + 'areaDesc': 'WITHIN AREA BOUNDED BY 15S171E TO 14S170E TO 15S168E TO 17S168E TO 18S169E TO 17S171E TO 15S171E', + 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA2'}, + {'lon': None, 'pieState': None, 'radius': None, 'lat': None, + 'areaDesc': 'WITHIN AREA BOUNDED BY 13S171E TO 13S169E TO 14S168E TO 17S167E TO 18S170E TO 16S172E TO 13S171E', + 'basin': 'HSF_SP', 'areaName': 'Feature1_00h_EA3'} + ] + } + ] + ''' + + # Call a sequence of method in order to get a gridLoc + # We need his to process edit areas. + parmNameLevel = "Wind_SFC" + self.setUp(parmNameLevel, argDict) + self._gridLoc = self.getGridLoc() + self._dataMgr = argDict["dataMgr"] + + print("GridLoc:", dir(self._gridLoc)) + + + self._savePathFile = self.descriptorFileName() + print("descFileName:", self._savePathFile) + # Try to fetch the old object + try: + with open(self._savePathFile, "r") as f: + toolFeatures = pickle.load(f) + f.close() + except: + toolFeatures = [] + print("Starting with an empty descriptor.") + + for f in toolFeatures: + print(f) + + # Initialize windMax across features + self._windMax = 0.0 + featureNameList = self._getFeatureNames(toolFeatures) + self._features = [] + for featureName, basinName in featureNameList: + print("Createfeature basinName:", basinName) + feature = self.Feature() + self._features.append(feature) + feature.name = featureName + feature.basinName = basinName + feature.featureType = 'Named' + feature.periods = [] + + toolTimePeriodList = self._getTimePeriodList(featureName, toolFeatures) + + for toolTimePeriod in toolTimePeriodList: + period = self.Period() + feature.periods.append(period) + + toolAreaList = self._getAreaList(featureName, toolTimePeriod, toolFeatures) + period.timePeriod = self._convertToTimeRange(toolTimePeriod) + + period.areas = [] + for areaName, areaDesc in toolAreaList: + area = self.Area() + period.areas.append(area) + area.areaName = areaName + print("^^^^^^^^^^^^^^^^^^^^^^^^^^^^^AREA NAME IS:", area.areaName) + area.refData = None + area.areaLabel = areaDesc + area.timePeriod = period.timePeriod + print("*****************************area desc is:", area.areaLabel) + self._processAreaOverlaps(period.areas) + + def _getFeatureNames(self, toolFeatures): + nameList = [] + featureList = [] + for t in toolFeatures: + print("Tool feature:", t) + featureName = t.get("featureName") + if featureName not in nameList: + featureList.append((t.get("featureName"), t.get('basin'))) + nameList.append(featureName) + return featureList + + def _getTimePeriodList(self, featureName, toolFeatures): + tpList = [] + for t in toolFeatures: + if t["featureName"] != featureName: + continue + if t["timePeriod"] not in tpList: + tpList.append(t["timePeriod"]) + tpList.sort() + return tpList + + def _getAreaList(self, featureName, timePeriod, toolFeatures): + areaList = [] + #print "getting area list:", featureName, timePeriod + for t in toolFeatures: + if t["featureName"] != featureName or t["timePeriod"] != timePeriod: + continue + toolAreaList = t["areaList"] + for area in toolAreaList: + areaTuple = (area["areaName"], area["areaDesc"]) + areaList.append(areaTuple) + return areaList + + def _createGridBasedFeatures(self, argDict): + # Add gridBasedFeatures to self._features + + + # open the file + self._gridBasedPathFile = self.gridBasedFileName() + # Try to fetch the old object + try: + f = open(self._gridBasedPathFile, "r") + gridBasedFeatureList = pickle.load(f) + f.close() + except: + gridBasedFeatures = [] + print("Starting with an empty gridBasedFeature descriptor.") + + ### below this is the new code - 11/16/2017 CNJ ### + for gridBasedFeature in gridBasedFeatureList: # step through the feature list from the tool + feature = self.Feature() + feature.featureType = 'GridBased' + feature.wxType = gridBasedFeature[0]["wxType"] + feature.wxIntensity = gridBasedFeature[0]["intensity"] + feature.wxCoverage = gridBasedFeature[0]["wxCoverage"] + + feature.phenomenonType = self._phenomenonTypeDict()[gridBasedFeature[0]["wxType"]] + + feature.earliestTimePeriod = self._convertToTimeRange(gridBasedFeature[0]["timePeriod"]) + # Populate the areas in this feature + feature.GBareaList = [] + feature.periods = [] + for gbArea in gridBasedFeature: + feature.periods.append(self._convertToTimeRange(gridBasedFeature[0]["timePeriod"])) + area = self.Area() + area.areaName = gbArea["areaName"] + area.areaLabel = gbArea["areaDesc"] + area.timePeriod = self._convertToTimeRange(gbArea["timePeriod"]) + area.wxType = gbArea["wxType"] + area.intensity = gbArea["intensity"] + area.coverage = gbArea["wxCoverage"] + area.phenomenonType = feature.phenomenonType + feature.GBareaList.append(area) + # Populate the warning attributes + if feature.wxType == "VA": + feature.highestWarning = "Ashfall" + area.warningType = "Ashfall" + feature.warningTimePeriod = feature.earliestTimePeriod + feature.highestWarningHeadline = self._getGridBasedHeadline(area) + elif feature.wxType == 'ZY' and feature.wxIntensity == '+': + feature.highestWarning = "Heavy Freezing Spray" + area.warningType = "Heavy Freezing Spray" + feature.warningTimePeriod = feature.earliestTimePeriod + feature.highestWarningTimePeriod = feature.earliestTimePeriod + feature.highestWarningHeadline = self._getGridBasedHeadline(area) + + self._features.append(feature) + + + #feature.printFeature() + return + + def _getGridBasedHeadline(self, area): + if area.wxType == 'ZY' and area.intensity == '+': + return "...Heavy Freezing Spray Warning..." + elif area.wxType == 'VA': + return "...ASHFALL ADVISORY...\n[VOLCANO NAME] VOLCANO AT POSITION " + \ + "[xx.xN xx.xW] IS CURRENTLY IN A STATE OF UNREST AND COULD ERUPT WITH " + \ + "LITTLE NOTICE. MARINERS TRAVELING IN THE VICINITY OF [VOLCANO NAME] " + \ + "ARE URGED TO EXERCISE CAUTION. IF MARINERS ENCOUNTER VOLCANIC ASH OR " + \ + "FLOATING VOLCANIC DEBRIS...YOU ARE ENCOURAGED TO REPORT THE OBSERVATION " + \ + "TO THE NATIONAL HURRICANE CENTER BY CALLING 305-229-4424.\n" + return "" + + def _createTCM_BasedFeatures(self, argDict): + # Create Feature classes from TCM conversion script input + + ccc = "MIA" + siteID = "AT" + tcmBody="" + for index in ["1", "2", "3", "4", "5"]: + #for index in [tcm1, tcm2, tcm3]: + pil = ccc + "WRK" + siteID + index + tcmText = subprocess.check_output(["/awips2/fxa/bin/textdb", "-r", pil]) + + tcmLines = tcmText.split('\n') + tcmTimeStr = tcmLines[0] # "2100 UTC FRI JAN 15 2016" + if not self._tcmTimeOverlaps(tcmTimeStr): + continue + + tcmBegin = tcmLines[2] + tcmBody = string.join(tcmLines[2:], "\n") + + warningDict = { + "Hurricane": "...Hurricane Warning...", + "Hurricane Force": "...Hurricane Force Wind Warning...", + "Tropical Storm": "...Tropical Storm Warning", + "Storm": "...Storm Warning", + "Gale": "...Gale Warning", + } + + phenomenonDict = { + "Tropical Depression": "Tropical Depression", + "Post-Tropical": "Post-Tropical Cyclone", + "Remnants": "Remnants", + } + + feature = self.Feature() + feature.featureType = 'TCM_Based' + featureAreaList = [] + for key in warningDict: + headline = warningDict.get(key) + + if tcmBegin.find(headline) > -1 or tcmBegin.find(headline.upper()) > -1: + feature.highestWarning = key + feature.highestWarningTimePeriod = self._convertToTimeRange("00h") + break + + if not feature.highestWarning: + for key in phenomenonDict: + phen = phenomenonDict.get(key) + if tcmBegin.find(phen) > -1 or tcmBegin.find(phen.upper()) > -1: + feature.phenomenonType = key + break + feature.earliestTimePeriod = self._convertToTimeRange("00h") + feature.autoText = tcmBody.strip() + self._features.append(feature) + + def _tcmTimeOverlaps(self, tcmTimeStr): + tcmTime = self.convertBaseTime(tcmTimeStr) + curTime = time.time() + + ### 3 is the max number of hours for TCM overlap to be true + threshold = 6 * 3600 + + if abs(curTime - tcmTime) < threshold: + return True + return False + + def convertBaseTime(self, timeStr): + # extract time parts from the str + hour = int(timeStr[0:2]) + minute = int(timeStr[2:4]) + strList = timeStr.split(" ") + monthStr = strList[3] + month = self.monthNum(monthStr) + day = int(strList[4]) + year = int(strList[5]) + + # time.mktime returns time in seconds but in local time + baseTime = time.mktime((year, month, day, hour, minute, 0, 0, 0, 0)) + + # Adjustment to UTC + diffTime = time.mktime(time.gmtime()) - time.mktime(time.localtime()) + + # subtract timeZone and round to the nearest hour + roundedTime = int((baseTime - diffTime) / 3600) * 3600 + + return roundedTime + + def monthNum(self, monthStr): + monthList = ["JAN", "FEB", "MAR", "APR", "MAY", "JUN", + "JUL", "AUG", "SEP", "OCT", "NOV", "DEC"] + try: + return monthList.index(monthStr) + 1 + except ValueError: + return 0 + + def _readCurrentTCM(self, argDict): + pass + + def _createDrawableFeatures(self, argDict): + # Create Features from VGF / XML Drawable files + + # Associating any drawables that match and existing Named Feature + print("***In Create Drawables***") + remainingDrawables = [] + for drawableElement in self._ingestDrawables(): + + print("DrawableElement:", drawableElement.printDrawable()) + + if drawableElement.drawableType not in ['Ridge', 'Ice Edge', 'Gulf Stream']: + if self._associateDrawableElementWithFeature(drawableElement): + continue + remainingDrawables.append(drawableElement) + + # For the remaining Drawables, group them based on compatibility types and proximity + groups = [] + + # group is a list of drawables + # CJ change + group = [remainingDrawables[0]] + #group = [drawables[0]] + + remainingDrawables = remainingDrawables[1:] + i = 0 + while remainingDrawables and i < 100: + + group, remainingDrawables, done = self._groupDrawables(group, remainingDrawables) + if done: + groups.append(group) + if len(remainingDrawables) > 0: + group = remainingDrawables[0] + remainingDrawables = remainingDrawables[1:] + i = i + 1 + print("i=", i) + if group: + groups.append(group) + + # this line replaced commented out code block above + group = [remainingDrawables] + + # Create a Feature from each group + for group in groups: + # Create a Drawable Feature + feature = self.Feature() + feature.featureType = 'Drawable' + featureAreaList = [] + + # Create all the periods as placeholders + periods = [] + for index in ['00','24','48']: + period = self.Period() + period.timePeriod = self._convertToTimeRange(index+'h') + periods.append(period) + + ### uncommenting the line below causes an infinite loop + #feature.periods = periods + + if type(group) is list: + for drawable in group: + print("feature.periods:", feature.periods) + for period in feature.periods: + if drawable.timePeriod == period.timePeriod: + period.drawables.append(drawable) + print("appending to period.drawables in list type") + else: + continue + else: + for period in feature.periods: + if group.timePeriod == period.timePeriod: + period.drawables.append(group) + print("appending to period.drawables in non-list type") + else: + continue + + + for period in periods: + if period.drawables: + feature.periods.append(period) + feature.periods.sort(self._sortPeriodsByTime) + if len(feature.periods) > 0: + feature.earliestTimePeriod = feature.periods[0].timePeriod + self._features.append(feature) + + def _groupDrawables(self, group, drawables): + # Try to add each drawable to the group + done = True + newGroup = [] +# for g in group: +# print "group is:", g +# newGroup.append(g) + print("group is:", type(group)) + newGroup = self._copyDrawables(group) + returnedDrawables = [] + if type(group) is list: + for d1 in group: + for d2 in drawables: + if self._compatibleDrawableTypes(d1, d2): + if self._proximity(d1, d2): + newGroup.append(d2) + done = False + else: + returnedDrawables.append(d2) + return newGroup, returnedDrawables, done + else: + return group, returnedDrawables, True + + def _copyDrawables(self, group): + print("dir:", dir(group)) + if type(group) is list: + newList = [] + for g in group: + newList.append(g) + return newList + else: # it's a singleton + drawable = self.Drawable() + drawable.timePeriod = group.timePeriod + drawable.latLons = group.latLons + drawable.pressureTag = group.pressureTag + drawable.movement = group.movement + drawable.drawableType = group.drawableType + return drawable + + return + + def _ingestDrawables(self): + # Read in the files and use ElementTree to parse them and create Drawables + drawables = [] + print('IngestDrawables') + for t in ['24']: + #for t in ['00','24','48']: + fileName = '/localapps/dev/HSF/'+t+'.xml' + #Below is where cron files live (note they get purged at H+45) + #fileName = '/data/fxa/LOCAL/getvgf/data/'+t+'.xml' + print("fileName", fileName) + + tree = ET.parse(fileName) + timePeriod = self._convertToTimeRange(t+'h') + # Get the Lines + for line in tree.iter("Line"): + drawable = self.Drawable() + pgenType = line.attrib.get('pgenType') + print("pgenType", pgenType) + + #pgenExcludeList = ["LINE_SOLID", "LINE_DASHED_6", "FILLED_ARROW", "POINTED_ARROW", "DRY_LINE", "General Text", "Contours", "None"] + pgenExcludeList = ["LINE_SOLID", "LINE_DASHED_6", "FILLED_ARROW", "POINTED_ARROW", "DRY_LINE", "Contours", "None"] + if pgenType in pgenExcludeList: + print("pgenType skipped:", pgenType) + continue + drawable.drawableType = self._pgenTypeDecodeDict().get(pgenType) + drawable.timePeriod = timePeriod + drawable.latLons = self._getLatLons(line) + drawable.printDrawable() + drawables.append(drawable) + + # Get the collections with Symbols + for collection in tree.iter("DECollection"): + for symbol in collection.iter("Symbol"): + drawable = self.Drawable() + pgenType = symbol.attrib.get('pgenType') + print("pgenType", pgenType) + drawable.drawableType = self._pgenTypeDecodeDict().get(pgenType) + drawable.timePeriod = timePeriod + drawable.latLons = self._getLatLons(symbol) + for textline in collection.iter("textline"): + drawable.pressureTag = textline.text + " mb" + print("printing collection drawable") + drawable.printDrawable() + drawables.append(drawable) + return drawables + + def _best_way(self, number): + if number%2==0: + return "even" + else: + return "odd" + + def _getLatLons(self, node): + latLons = [] + for point in node.findall("Point"): + + lat = round(float(point.attrib.get("Lat")),1) + lat = int((lat + 0.25) * 2.0) / 2.0 + lat = float(lat) + latmult = lat * 10 + if (self._best_way(latmult)) == "even": + lat = int(lat) + + lon = round(float(point.attrib.get("Lon")),1) + lon = int((lon + 0.25) * 2.0) / 2.0 + lon = float(lon) + lonmult = lon * 10 + if (self._best_way(lonmult)) == "even": + lon = int(lon) +# lat = float(point.attrib.get("Lat")) +# lon = float(point.attrib.get("Lon")) + latLons.append((lat, lon)) + return latLons + + def _associateDrawableElementWithFeature(self, drawableElement): + # Determine if the drawableElement can be associated with a feature + # If so, determine if is associated + found = False + latLons = drawableElement.latLons + for feature in self._features: + if feature.featureType not in ['Named']: + continue + for period in feature.periods: + if self._drawableElementOverlaps(period.areas, latLons): + period.drawables.append(drawableElement) + print("appending to period.drawables in associate") + found = True + return found + + # TO DO -- complete this + def _compatibleDrawableTypes(self, d1, d2): + compatibleTypes = [('High', 'Ridge'), ('Trough', 'Low'), ('Tropical Wave', 'Low'), ('Low', 'Cold Front')] + t1 = d1.drawableType + t2 = d2.drawableType + if t1 == t2: + return True + if (t1, t2) in compatibleTypes or (t2, t1) in compatibleTypes: + return True + else: + return False + + def _sampleData(self, argDict): + elements = self._analysisList(argDict) + periods = [] + areaTuples = [] + for feature in self._features: + if feature.featureType != 'Named': + continue + for period in feature.periods: + periods.append((period.timePeriod, 'timeLabel')) + for area in period.areas: + if area.refData: + editArea = area.refData + else: + editArea = area.areaName + areaTuples.append((editArea, area.areaLabel)) + + sampleInfo = (elements, periods, areaTuples) + #print "\nSampleInfo", sampleInfo + self._sampler = self.getSampler(argDict, sampleInfo) + print("Sampler", self._sampler) + +##### + def getSampler(self, argDict, sampleInfo, sampleFromServer=0): + # Get a HistoSampler given + # sampleInfo, which is a list of tuples, or just a single tuple + # of tuples ([elements], [periods], [areas]) + # the elements are [(name, method)] -- basically the analysis list + # the periods [(timeRange, label)] + # areas [(name,label)] or [(refData, label)] or [(refID, label)] + ifpClient = argDict["ifpClient"] + databaseID = argDict["databaseID"] + + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData + from com.raytheon.viz.gfe.sampler import SamplerRequest, HistoSampler + from java.util import ArrayList + + # convert input sampleInfo to samplerRequests + samplerRequests = ArrayList() + if type(sampleInfo) == tuple: + sampleInfo = [sampleInfo] + for si in sampleInfo: + elements, periods, areas = si + for e in elements: + parmID = self.getParmID(e[0], databaseID) + for p in periods: + for editArea, areaName in areas: + if type(editArea) is str: + samplerRequests.add(SamplerRequest( \ + parmID, ReferenceID(editArea), p[0].toJavaObj())) + elif str(type(editArea)) == "": + samplerRequests.add(SamplerRequest( \ + parmID, editArea, p[0].toJavaObj())) + + else: + raise Exception("area specification incorrect") + + # do sampling + if sampleFromServer: + sampler = ifpClient.sampleRequest(samplerRequests) + else: + sampler = HistoSampler(ifpClient.getJavaClient(), samplerRequests) + if sampler.isValid() != 1: + print("Cannot Sample: Check for invalid Weather Elements, ",\ + "Invalid Areas", str(samplerRequests)) + return None + #print "sampler ", sampler + return sampler + +##### + + def _populateFeatures(self, argDict): + + # Populate Features with product information + elements = self._analysisList(argDict) + + for feature in self._features: + if feature.featureType != 'Named': + continue + featureAreaList = [] + + print("Raw feature basin:", feature.basinName) + + for period in feature.periods: + areas = period.areas + #print "******time period:", timePeriod, "**********" + for area in areas: + featureAreaList.append(area) + + if area.refData: + areaData = area.refData + else: + areaData = area.areaName + + print("Populate Features using data:", areaData) + statDict = self.getStatDict(self._sampler, elements, period.timePeriod, areaData) + print("elements:", elements) + area.statDict = statDict + print("PopulateFeatures....Area....StatDict", statDict) + print("Area refdata:", area.areaName) + if area.refData: + polygons = area.refData.getPolygons + print("polygon methods:", dir(area.refData.getPolygons)) + print("Polygons size:", polygons.__sizeof__()) + + # Look for various warnings for this period and area + for warningMethod in self._warningMethods(): + found, headline, methodList, warningType = warningMethod(statDict) + print("warnings into warningMethod:", found, headline) + + if found: + area.warningType = warningType + area.headline = headline + area.methodList = methodList + break + else: + if self._checkForWx(statDict): + methodList = self._wxMethodList() + else: + methodList = self._windWaveMethodList() + area.methodList = methodList + self._setWindWave(area, statDict) + + # If there are warnings: Find the highest earliest warning type + if len(featureAreaList) > 0: + featureAreaList.sort(self._sortAreasForWarningType) + chosenArea = featureAreaList[0] + feature.highestWarning = chosenArea.warningType + feature.highestWarningHeadline = chosenArea.headline + feature.highestWarningTimePeriod = chosenArea.timePeriod + # Find earliest timePeriod + timePeriodList = sorted(feature.periods, self._sortTimePeriods) + feature.earliestTimePeriod = timePeriodList[0].timePeriod + return + + def _orderFeatures(self): + # Sort at the area level + for feature in self._features: + # Grid based features don't have periods so ignore this + if feature.featureType == "GridBased": + continue + for period in feature.periods: + period.areas.sort(self._sortAreas) + + self._warningFeatures = [] + self._synopsisFeatures = [] + # Sort at the feature level + for feature in self._features: + if feature.highestWarning: + self._warningFeatures.append(feature) + else: + self._synopsisFeatures.append(feature) + self._warningFeatures.sort(self._sortWarningFeatures) + self._synopsisFeatures.sort(self._sortSynopsisFeatures) + + + ########## + + def _preProcessProduct(self, fcst, argDict): + productName = self._productName.strip() + issuedByString = self.getIssuedByString() + if self._windMax > 63: + self._issuanceType = "PAN PAN" + else: + self._issuanceType = "SECURITE" + + fcst = fcst + self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + \ + productName + "\n" +\ + "NWS " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + + if self._ccode: + ### the next line must be changed to check HSFAT1 pil after testing - 11/14/2017 CNJ + if self._pil == "HSFAT1": + fcst = fcst + "CCODE/1:31:04:01:00/AOW/NWS/CCODE" + "\n" + elif self._pil == "HSFEP1": + fcst = fcst + "CCODE/1:31:12:01:00/AOW+POR/NWS/CCODE" + "\n" + elif self._pil == "HSFEP3": + fcst = fcst + "CCODE/1:31:16:01:00/AOW/NWS/CCODE" + "\n" + else: + pass + + fcst = fcst + self._Text1() + self._issuanceType + "\n\n" + self._Text2() + fcst = self._validLabel(fcst, argDict) + return fcst + + + def _makeSection(self, fcst, argDict, features, sectionType): + print("feature dump") + for feature in features: + print("FeatureType:", feature.featureType) + print("Feature.phenomenonType:", feature.phenomenonType) + + + if sectionType == "Warning": + fcst = fcst + ".WARNINGS.\n\n" + else: + fcst = fcst + ".SYNOPSIS AND FORECAST.\n\n" + + elements = self._analysisList(argDict) + + print("Feature Count:", len(self._features), "Section type:", sectionType) + + for feature in features: + print("^^^FEATURE^^^") + #feature.printFeature() + print("^^^end feature^^^") + fname = feature.name + print("### FEATURE NAME:", fname) + if feature.featureType == "TCM_Based": + fcst = fcst + feature.autoText + "\n\n" + continue + + # Process Volcanic Ash wxType +# if feature.wxType == 'VA': +# fcst = fcst + feature.area.headline +# #print "Feature wxType is:", feature.wxType, "and is being skipped." +# continue + + if sectionType == "Warning": + print("Section type is Warning") + fcst = fcst + feature.highestWarningHeadline + "\n" + print("highestWarningHeadline", feature.highestWarningHeadline) + +# elif sectionType == "Synopsis": + #print "Section type is Synopsis" + print("Formatting feature type:", feature.featureType) + if feature.featureType != "GridBased": + for period in feature.periods: + timePeriod = period.timePeriod + #print "******time period:", timePeriod, "*******************" + if feature.periods.index(period) == 0: + basinName = feature.basinName + else: + basinName = None + + # next line was reporting blank time label for convection + #fcst = fcst + self._getTimePeriodLabel(argDict, timePeriod, basinName) + + # next code block to not report blank time label for grid-based feature + if feature.featureType == 'GridBased': + pass + else: + fcst = fcst + self._getTimePeriodLabel(argDict, timePeriod, basinName) + + print("*** before period drawables ***") + # Add in associated drawables + if period.drawables: + print("INTO PERIOD.DRAWABLES", period.drawables) + for drawable in period.drawables: + fcst = fcst + self._formatDrawable(drawable) #+ ".\n\n" + + print("*** after period drawables ***") + + for area in period.areas: + areaWords = self._getAreaWords(area).rstrip() + elementWords = self._getElementWords(feature, area, sectionType) + fcst = fcst + areaWords + " " + elementWords #+ ". " + #removed space above between "" because when a feature is dissipated + #it was adding an extra space in front of WINDS 20 KT OR LESS. -JL/3/24/16 + + # next two lines may be printing extra carriage returns when there are no features + fcst = fcst + "\n" + elif feature.featureType == "GridBased": + print("Formatting grid based features..............................................") + for area in feature.GBareaList: + areaWords = self._getAreaWords(area).rstrip() + elementWords = self._getGridBasedWxWords(area) + + # next line to include time label for grid based feature + if area.wxType != 'VA': + fcst = fcst + self._getTimePeriodLabel(argDict, area.timePeriod) + #if feature.wxType == 'VA': + if area.wxType == 'VA': + fcst = fcst + "\n\n" + #fcst = fcst + area.headline + "\n\n" + else: + fcst = fcst + elementWords + " " + areaWords + ".\n" + print("**** element words:", elementWords, "****") + print("**** area words:", areaWords, "****") + #fcst = fcst + elementWords + " " + areaWords + ". " + else: + print("Skipping this feature in makeSection.+++++++++++++++++++++++++++++++++++++++++++++++++++") + print("Feature Dump:", feature.printFeature()) + + + fcst = fcst + "\n" + + if sectionType == "Warning": + if self._noWarningFeatures(): + print("Found no warning features. ") + fcst = fcst + ".NONE.\n\n" + + return fcst + + + def _getAreaWords(self, area): + areaLabel = area.areaLabel + # Killed feature have no area label so remove a space + print("+&++++++++++++++++++++++++++++++AREA LABEL:", areaLabel) + if areaLabel == "": + return areaLabel + else: + return areaLabel + " " + + def _getElementWords(self, feature, area, sectionType): + # Set up the correct method depending on the weather element or phenomenon + statDict = area.statDict + if feature.featureType == 'GridBased': + periodStr = "" + methodList = self._gridBasedWxMethods(area) + else: + periodStr = ". " + if sectionType == "Warning": + methodList = area.methodList + else: + if self._checkForWx(statDict): + methodList = self._wxMethodList() + else: + methodList = self._windWaveMethodList() + if area.wxType == "F" or area.wxType == "K": + periodStr = "." + words = self._makePhrases("", methodList, statDict, area.timePeriod, area.areaLabel, periodStr) + return words + + def _formatDrawable(self, drawable): + print("### DRAWABLE ###") + drawable.printDrawable() + print("### END DRAWABLE ###") + outputStr = drawable.drawableType + ' from ' + length = len(drawable.latLons) + for index in range(length): + lat, lon = drawable.latLons[index] + + # Format lat/lon output + if lat >= 0: + lathemi = "N" + else: + lathemi = "S" + # check dateline later + if lon > 0: + lonhemi = "E" + else: + lonhemi = "W" + lat = abs(lat) + lon = abs(lon) + + # removed space between lat and lon for drawables + if lat < 10: + outputStr = outputStr + '0' + str(lat) + lathemi + str(lon) + lonhemi + else: + outputStr = outputStr + str(lat) + lathemi + str(lon) + lonhemi + + if index < length-1: + outputStr = outputStr + ' to ' + return outputStr + + def _noSynopsisFeatures(self): + + for f in self._features: + if f.featureType == "Named": + return False + return True + + def _noWarningFeatures(self): + for f in self._features: + print("Feature.highestWarning:", f.highestWarning) + if f.highestWarning is not None: + return False + return True + + def _postProcessProduct(self, fcst, argDict): + ## Insert Labe and Forecaster Name at bottom of product + #forecasterName = self._forecasterName.strip() + #First line below only needed for HSFEP2# + if self._noSynopsisFeatures(): + fcst = fcst + ".ENTIRE AREA WINDS 20 KT OR LESS. SEAS LESS THAN 8 FT." + else: + fcst = fcst + ".REMAINDER OF AREA WINDS 20 KT OR LESS. SEAS LESS THAN 8 FT." + + self._userInfo = UserInfo.UserInfo() + forecasterName = self._userInfo._getForecasterName(argDict) + + #if fcst.find('HURRICANE') != -1: + if re.search(r'\.\.\.HURRICANE*', fcst): + fcst = re.sub(r'SECURITE', r'PAN PAN', fcst) + if self._ccode: + if self._pil == "HSFAT2": + fcst = re.sub(r'CCODE/1:31:04:01:00/AOW/NWS/CCODE', r'CCODE/2:31:04:11:00/AOW+AOE/NWS/CCODE', fcst) + elif self._pil == "HSFEP1": + fcst = re.sub(r'CCODE/1:31:12:01:00/AOW+POR/NWS/CCODE', r'CCODE/2:31:12:11:00/AOW+POR+AOE/NWS/CCODE', fcst) + elif self._pil == "HSFEP3": + fcst = re.sub(r'CCODE/1:31:16:01:00/AOW/NWS/CCODE', r'CCODE/2:31:16:11:00/AOW+POR+AOE/NWS/CCODE', fcst) + else: + pass + + fcst = fcst + "\n\n" + "$$" + "\n" + ".FORECASTER " + forecasterName + ". NATIONAL HURRICANE CENTER." + + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + + fcst = self._wrapLines(fcst) + + return fcst + + # SORT METHODS + def _warningOrder(self): + return [ + "Hurricane", + "Typhoon", + "Hurricane Force", + "Tropical Storm", + "Storm", + "Gale", + "Heavy Freezing Spray", + "Ashfall", + "Space Weather", + None, + ] + + def _basinOrder(self): + return ["ATLC", + "ATLC AND CARIBBEAN", + "ATLC AND CARIBBEAN AND GULF OF MEXICO", + "ATLC AND GULF OF MEXICO", + "CARIBBEAN", + "CARIBBEAN AND GULF OF MEXICO", + "GULF OF MEXICO", + None, + ] + + def _phenomenonOrder(self): + return [ + "Tropical Depression", + "Post-Tropical", + "Remnants", + "Freezing Spray", + "Fog", + "Smoke", + "Convection", + None, + ] + + def _phenomenonTypeDict(self): + return { + 'ZY': 'Freezing Spray', + 'VA': 'Ashfall', + 'T': 'Convection', + 'F' : 'Fog', + 'K' : 'Smoke', + #default: None, + } + + def _drawableOrder(self): + return { + "ITCZ", + "Cold Front", + "Forming Cold Front", + "Dissipating Cold Front", + "Warm Front", + "Forming Warm Front", + "Dissipating Warm Front", + "Stationary Front", + "Forming Stationary Front", + "Dissipating Stationary Front", + "Occluded Front", + "Forming Occluded Front", + "Dissipating Occluded Front", + "Trough", + "Tropical Wave", + "Low", + "High", + "Ridge", + "Ice Edge", + "Gulf Stream", + "Ignore" + } + + def _pgenTypeDecodeDict(self): + return { + 'COLD_FRONT': 'Cold Front', + 'COLD_FRONT_FORM': 'Forming Cold Front', + 'COLD_FRONT_DISS': 'Dissipating Cold Front', + 'WARM_FRONT': 'Warm Front', + 'WARM_FRONT_FORM': 'Forming Warm Front', + 'WARM_FRONT_DISS': 'Dissipating Warm Front', + 'STATIONARY_FRONT': 'Stationary Front', + 'STATIONARY_FRONT_FORM': 'Forming Stationary Front', + 'STATIONARY_FRONT_DISS': 'Dissipating Stationary Front', + 'OCCLUDED_FRONT': 'Occluded Front', + 'OCCULUDED_FRONT_FORM': 'Forming Occluded Front', + 'OCCLUDED_FRONT_DISS': 'Dissipating Occluded Front', + 'TROF': 'Trough', + 'TROPICAL_TROF': 'Tropical Wave', + 'LINE_DASHED_8': 'Shear Line', + 'FILLED_HIGH_PRESSURE_H': 'High', + 'LOW_X_FILLED': 'Low', + 'ZIGZAG': 'Ridge', + 'ZZZ_LINE': 'ITCZ', + 'DOUBLE_LINE': 'Monsoon Trough', + 'FILLED_CIRCLES': 'Ice Edge', + 'LINE_SOLID': 'Western Edge of the Gulf Stream', + 'LINE_DASHED_2': 'Eastern Edge of the Gulf Stream', + 'LINE_DASHED_6': 'Ignore', + 'FILLED_ARROW': 'Ignore', + 'POINTED_ARROW': 'Ignore', + 'DRY_LINE': 'Ignore', + 'General Text': 'Ignore', + 'Contours': 'Ignore', + 'None': 'Ignore' + } + + + def _sortAreasForWarningType(self, a, b): + # Sorting to find the highest warning type for an entire Feature + order = self._warningOrder() + if order.index(a.warningType) < order.index(b.warningType): + return -1 + elif order.index(a.warningType) > order.index(b.warningType): + return 1 + if a.timePeriod.startTime() < b.timePeriod.startTime(): + return -1 + elif a.timePeriod.startTime() > b.timePeriod.startTime(): + return 1 + return 0 + + def _sortTimePeriods(self,a,b): + # Sorting time periods within a Feature from earliest to latest + if a.timePeriod.startTime() < b.timePeriod.startTime(): + return -1 + elif a.timePeriod.startTime() > b.timePeriod.startTime(): + return 1 + return 0 + + def _sortAreas(self, a, b): + # Sorting areas within a period + # a, b are area objects + #print "SortAreas", "a:", a.windWave, "b:", b.windWave + order = self._warningOrder() + if order.index(a.warningType) < order.index(b.warningType): + return -1 + elif order.index(a.warningType) > order.index(b.warningType): + return 1 + if a.windWave: + return 1 + if b.windWave: + return -1 + if a.windOnly: + return 1 + if b.windOnly: + return -1 + if a.waveOnly: + return 1 + if b.waveOnly: + return -1 + if a.areaName < b.areaName: + return -1 + if a.areaName > b.areaName: + return 1 + + return 0 + + def _sortWarningFeatures(self, a, b): + # Sorting Features with Warnings into product order + # a, b are Feature objects + order = self._warningOrder() + if a.featureType == "Named" and b.featureType == "GridBased": + return -1 + elif b.featureType == "Named" and a.featureType == "GridBased": + return 1 + if order.index(a.highestWarning) < order.index(b.highestWarning): + return -1 + elif order.index(a.highestWarning) > order.index(b.highestWarning): + return 1 + if a.highestWarningTimePeriod.startTime() < b.highestWarningTimePeriod.startTime(): + return -1 + elif a.highestWarningTimePeriod.startTime() > b.highestWarningTimePeriod.startTime(): + return 1 + order = self._basinOrder() + if order.index(a.basinName) < order.index(b.basinName): + return -1 + elif order.index(a.basinName) > order.index(b.basinName): + return 1 + return 0 + + def _sortSynopsisFeatures(self, a, b): + # Sorting Features without Warnings into product order + # a, b are Feature objects + if a.featureType == "Named" and b.featureType == "GridBased": + return -1 + elif b.featureType == "Named" and a.featureType == "GridBased": + return 1 + if a.earliestTimePeriod is None: + return 1 + elif b.earliestTimePeriod is None: + return -1 + order = self._phenomenonOrder() + if order.index(a.phenomenonType) < order.index(b.phenomenonType): + return -1 + elif order.index(a.phenomenonType) > order.index(b.phenomenonType): + return 1 + + #print "a.basinName:", a.basinName, "b.basinName:", b.basinName + order = self._basinOrder() + if order.index(a.basinName) < order.index(b.basinName): + return -1 + elif order.index(a.basinName) > order.index(b.basinName): + return 1 + if a.earliestTimePeriod.startTime() > b.earliestTimePeriod.startTime(): + return 1 + #return -1 + elif a.earliestTimePeriod.startTime() < b.earliestTimePeriod.startTime(): + return -1 + #return 1 + return 0 + + def _sortPeriodsByTime(self, a, b): + if a.timePeriod.startTime() > b.timePeriod.startTime(): + return -1 + elif a.timePeriod.startTime() < b.timePeriod.startTime(): + return 1 + return 0 + + # Methods for populating Features, determining which have Warnings + def _warningMethods(self): + return [ + self._checkHurricane, + self._checkHurricaneForce, + self._checkTyphoon, + self._checkTropicalStorm, + self._checkStorm, + self._checkGale, + self._checkFreezingSpray, + #self._checkAshfall, + self._checkSpaceWx, + ] + + def _checkHurricane(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "HU.W": + return True, "...HURRICANE WARNING...", self._windWaveMethodList(), "Hurricane" + return False, None, None, None + + def _checkTyphoon(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "TY.W": + return True, "...TYPHOON WARNING...", self._windWaveMethodList(), "Typhoon" + return False, None, None, None + + def _checkHurricaneForce(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "HF.W": + return True, "...HURRICANE FORCE WIND WARNING...", self._windWaveMethodList(), "Hurricane Force" + return False, None, None, None + + def _checkTropicalStorm(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "TR.W": + return True, "...TROPICAL STORM WARNING...", self._windWaveMethodList(), "Tropical Storm" + return False, None, None, None + + def _checkStorm(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "SR.W": + return True, "...STORM WARNING...", self._windWaveMethodList(), "Storm" + return False, None, None, None + + def _checkGale(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "GL.W": + return True, "...GALE WARNING...", self._windWaveMethodList(), "Gale" + return False, None, None, None + + def _checkFreezingSpray(self, statDict): + hazards = self.getStats(statDict, "Hazards") + for hazardType, timeRange in hazards: + if hazardType == "UP.W": + return True, "...HEAVY FREEZING SPRAY WARNING...", self._WxMethodList(), "Heavy Freezing Spray" + return False, None, None, None + + def _checkSpaceWx(self, statDict): + return False, None, None, None + + def _setWindWave(self, area, statDict): + minMax, dir = self.getStats(statDict, "Wind", "MinMax") + windMin, windMax = minMax + windThreshold = 22.5 + minMag, waveMax = self.getStats(statDict, "WaveHeight", "MinMax") + waveThreshold = self.nlValue(self.null_nlValue( + None, None, "WaveHeight", "WaveHeight"), waveMax) + if windMax >= windThreshold and waveMax > waveThreshold: + area.windWave = True + elif windMax >= windThreshold: + area.windOnly = True + elif waveMax > waveThreshold: + area.waveOnly = True + + ### Below not working to automatically change Securite to Pan Pan + if windMax > self._windMax: + self._windMax = windMax + + def _validLabel(self, fcst, argDict): + + curTime = argDict.get("creationTime") + timeStr24 = time.strftime(" %a %b %e.", time.gmtime(curTime + 24*3600)) + timeStr48 = time.strftime(" %a %b %e.", time.gmtime(curTime + 48*3600)) + + fcst = fcst + "SYNOPSIS VALID " + self._validTime + \ + self.getCurrentTime(argDict, " %a %b %e.", shiftToLocal=0) + \ + "\n" + "24 hour forecast valid " + self._validTime + \ + timeStr24 + "\n" + \ + "48 hour forecast valid " + self._validTime + \ + timeStr48 + "\n\n" + + return fcst + + ### modified to add leading zero for 06 hour forecast ## + def _getTimePeriodLabel(self, argDict, timeRange, basinName=None): + now = argDict.get("creationTime") + # truncate the current time to the last six hour time + now = int(now / (3600* 6)) * 3600 * 6 + diffTime = timeRange.startTime() - AbsTime.AbsTime(now) + + if basinName: + leading = '.'+ basinName + ' ' + else: + leading = '.' + + if diffTime <= 0: + return leading + diffTime = diffTime / 3600 + + ## added code - CJ 3/9/15 + if diffTime < 10: + diffTime = str(diffTime) + diffTime = diffTime.zfill(2) + label = leading + diffTime + " hour forecast " + else: + label = leading + repr(diffTime)+ " hour forecast " + + return label + + # Methods for translating from Feature Descriptors coming from the Tool + # to Feature Objects + + + def _convertToTimeRange(self, timePeriod): + + hour = int(timePeriod[0:2]) + + baseTime = int(time.time() / (3600 * 6)) * (3600 * 6) + productTime = baseTime + (hour * 3600) + + timeRange = TimeRange.TimeRange(AbsTime.AbsTime(productTime), + AbsTime.AbsTime(productTime + 6 * 3600)) + return timeRange + + + # Returns the name of the file used to store the edit area information. + def descriptorFileName(self): + + domain = self._displayName[-3:] + + #TextProduct instance has no attribute '_siteID below for AT2 + return "/data/local/HighSeas/Formatter/NH2/HSF_AT2HighSeasDescriptors.pic" + + # Returns the name of the file used to store the edit area information. + def gridBasedFileName(self): + + sys, nodeName, release, version, machine = os.uname() + domain = self._displayName[-3:] + + #TextProduct instance has no attribute '_siteID below for AT2 + return "/data/local/HighSeas/Formatter/NH2/HSF_AT2GridBasedFeatures.pic" + + + def _checkForWx(self, statDict): + wxStats = statDict.get("Wx") + if wxStats is None: + return False + for wxStat in wxStats: + subkey, rank = wxStat + vis = self.getVis([subkey]) + if subkey.wxType() == "F"and vis <= 1: + return True + return False + + def _makePhrases(self, fcst, methodList, statDict, timePeriod, areaLabel, periodStr=". "): + + phraseWords = "" + print("makePhrases methodList", methodList) + + for methodInfo in methodList: + wordMethod, elementName, maxMin, elementType = methodInfo + + tree, node = self._makeTreeNode( + methodInfo, statDict, timePeriod, areaLabel) + #print "statDict after makeTreeNode", statDict + self._applyRanges(tree, node, statDict, elementName, elementType) + #print "statDict after applyRanges", statDict + for subPhrase in node.get("childList"): + wordMethod(tree, subPhrase) + #tree.printNode(node) + self.fillNulls(tree, node) + if elementName == "Wind": + self.embedDescriptor(tree, node) + if wordMethod == self.visibility_words: + descriptor = self.phrase_descriptor(tree, node, "Visibility", "Visibility") + node.set("descriptor", descriptor) + self.assembleSubPhrases(tree, node) + phraseWords = phraseWords + node.get("words") + periodStr + + fcst = fcst + phraseWords + return fcst + + def _gridBasedWxMethods(self, area): + return [(self._gridBasedWxWords(area), "Wx", "List", self.WEATHER())] + + def _gridBasedWxWords(self, area): + + wxType = area.wxType + intensity = area.intensity + coverage = area.coverage + if wxType == 'T' and intensity == '+' and coverage == "Iso": + words = 'isolated strong convection' + elif wxType == 'T' and intensity == '+' and coverage == "Sct": + words = 'scattered strong convection' + elif wxType == 'T' and intensity == '+' and coverage == "Num": + words = 'numerous strong convection' + elif wxType == 'T' and coverage == "Iso": + words = 'isolated moderate convection' + elif wxType == 'T' and coverage == "Sct": + words = 'scattered moderate convection' + elif wxType == 'T' and coverage == "Num": + words = 'numerous moderate convection' + elif wxType == 'F' and intensity == '+': + words = 'dense fog' + elif wxType == 'ZY' and intensity == '+': + words = 'heavy freezing spray' + elif wxType == 'ZY' and intensity == 'm': + words = 'moderate freezing spray' + elif wxType == 'ZY' and intensity == '-': + words = 'light freezing spray' + elif wxType == 'VA': + words = '' + elif wxType == 'K': + words = 'dense smoke' + else: + words = 'Wx Type not found' + return self.setWords(node, words) + + def _getGridBasedWxWords(self, area): + wxType = area.wxType + intensity = area.intensity + coverage = area.coverage + if wxType == 'T' and intensity == '+' and coverage == "Iso": + words = 'isolated strong convection' + elif wxType == 'T' and intensity == '+' and coverage == "Sct": + words = 'scattered strong convection' + elif wxType == 'T' and intensity == '+' and coverage == "Num": + words = 'numerous strong convection' + elif wxType == 'T' and coverage == "Iso": + words = 'isolated moderate convection' + elif wxType == 'T' and coverage == "Sct": + words = 'scattered moderate convection' + elif wxType == 'T' and coverage == "Num": + words = 'numerous moderate convection' + elif wxType == 'F' and intensity == '+': + words = 'dense fog' + elif wxType == 'ZY' and intensity == '+': + words = 'heavy freezing spray' + elif wxType == 'ZY' and intensity == 'm': + words = 'moderate freezing spray' + elif wxType == 'ZY' and intensity == '-': + words = 'light freezing spray' + elif wxType == 'VA': + words = '' + elif wxType == 'K': + words = 'dense smoke' + else: + words = 'Wx Type not found' + + return words + + + ##################### + # Overrides + + def _windWaveMethodList(self): + return [ + # WINDS + # wind_phrase + (self.vector_words, "Wind", "Max", self.VECTOR()), + #self.gust_phrase, + # WAVES + #(self.waveHeight_words, "WaveHeight", "Max", self.SCALAR()), + (self.wave_words, "WaveHeight", "Max", self.SCALAR()), + ] + + def _wxMethodList(self): + return [ + # WEATHER + (self.weather_words, "Wx", "List", self.WEATHER()), + (self.visibility_words, "Wx", "List", self.WEATHER()), + ] + + + def _analysisList(self, argDict): + return [ + #("Wind", self.vectorModeratedMinMax), + ("Wind", self.vectorMinMax), + #("WindGust", self.moderatedMax), + ("WaveHeight", self.moderatedMinMax), + ("Wx", self.rankedWx), + ("Hazards", self.discreteTimeRangesByKey), + ] + + + + + def vector_mag(self, tree, node, minMag, maxMag, units, + elementName="Wind"): + "Create a phrase for a Range of magnitudes" + + # Check for "null" value (below threshold) + threshold = self.nlValue(self.null_nlValue( + tree, node, elementName, elementName), maxMag) + if maxMag < threshold: + return "null" + + # Apply max reported threshold + maxReportedMag = self.maxReported_threshold(tree, node, elementName, elementName) + if maxMag >= maxReportedMag: + maxMag = maxReportedMag + #minMag = 0 + + units = self.units_descriptor(tree, node, "units", units) + if elementName == "Wind": + if self.marine_wind_flag(tree, node): + return self.marine_wind_mag(tree, node, minMag, maxMag, units, elementName) + + # round to the nearest 5 + # Handle special caseS of 22.5 minMag. + if maxMag < 22.5: + return 'null' + if maxMag >= 22.5 and maxMag < 27.5 and minMag >= 22.5 and minMag < 27.5: + words = '25 ' + units + elif minMag >= 20 and minMag < 22.5 and maxMag >= 22.5 and maxMag < 27.5: + words = '20 to 25 '+ units + elif minMag >= 20 and minMag < 22.5 and maxMag >= 27.5 and maxMag < 32.5: + words = '20 to 30 '+units + else: + minMag = int((minMag + 2.5) / 5.0) * 5.0 + maxMag = int((maxMag + 2.5) / 5.0) * 5.0 + + # Check for SingleValue + if maxMag == minMag: #or minMag == 0: + around = self.addSpace( + self.phrase_descriptor(tree, node, "around", elementName)) + words = around + repr(int(maxMag)) + " " + units + else: + if int(minMag) < threshold: + upTo = self.addSpace( + self.phrase_descriptor(tree, node, "up to", elementName)) + words = upTo + repr(int(maxMag)) + " " + units + else: + valueConnector = self.value_connector(tree, node, elementName, elementName) + words = repr(int(minMag)) + valueConnector + repr(int(maxMag)) + " " + units + + # This is an additional hook for customizing the magnitude wording + words = self.vector_mag_hook(tree, node, minMag, maxMag, units, elementName, words) + + return words + + # OVERRIDE - to get single letter directions. + def vector_dir(self, dir): + if not type(dir)== bytes: + dir = self.dirToText(dir) + # Commented this out to get single letter directions - Tom. +## dir = string.replace(dir, "N", "north") +## dir = string.replace(dir, "S", "south") +## dir = string.replace(dir, "E", "east") +## dir = string.replace(dir, "W", "west") + return dir + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Visibility"] = "NM" + dict["Wind"] = "kts" + dict["WaveHeight"] = "ft" + return dict + + def units_descriptor_dict(self, tree, node): + # Dictionary of descriptors for various units + return { + "units": { + "ft": "FT", + "F":"", + "C":"degrees", + "K":"kelvins", + "%":" percent", + "in":"inches", + "kts":"KT", + "s":"seconds", + "hrs":"hours", + "m/s":"meters/second", + "mph":"mph", + "m":"meters", + "m^2/s":"meters^2/second", + "kt-ft":"knots-feet", + "mm":"millimeters", + "degrees": "degrees", + "percent": "percent", + }, + "unit": { + "ft":"FT", + "F":"", + "C":"degree", + "K":"kelvin", + "%":" percent", + "in":"inch", + "kts":"KT", + "s":"second", + "hrs":"hour", + "m/s":"meter/second", + "mph":"mph", + "m":"meter", + "m^2/s":"meter^2/second", + "kt-ft":"knot-foot", + "mm":"millimeter", + "degree": "degree", + "percent": "percent", + }, + } + + ##### + # NULL value phrases + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + return { + "Wind": "Winds 20 kt or less", + "Wind20ft": "light winds", + "TransWind": "light winds", + "FreeWind": "light winds", + "Swell": "light swells", + "Swell2": "", + "Wx": "", + "WindGust": "", + "WaveHeight": "Seas less than 8 ft", + "WindWaveHgt": "waves 2 ft or less", + "CWR": "", + } + + def null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + return { + "Wind": "Winds 20 kt or less", + "Wind20ft": "light winds", + "TransWind": "light winds", + "FreeWind": "light winds", + "Swell": "light swells", + "Swell2": "", + "Wx": "", + "WindGust": "", + "WaveHeight": "Seas less than 8 ft", + "WindWaveHgt": "waves 2 ft or less", + "CWR": "", + } + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and + # reported using the null_phrase (see above) + return { + "otherwise": 0, + "Wind": 20, + #"Wind": 22.5, + "WaveHeight": 8, + #"WindGust": 20, + "Visibility": 1, + } + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + #----------------------------------------------------------------------- + # COMMENT: Override max ranges for certain fields + # This dict specifications allows for wind speed ranges of up to 20 mph + # during tropical cyclone situations allowing for far better wind speed + # phrases. + #----------------------------------------------------------------------- + dict["Wind"] = { + (0, 30): 10, + (30,50): 15, + (50, 200):20, + "default":5, + } + + dict["WaveHeight"] = { + (8,10):2, + (10,20):5, + (20,200):10, + "default":1, + } + return dict + + # added to force ranges for sea heights with tropical turned on 9/7/11 CNJ/JL + def minimum_range_nlValue_dict(self, tree, node): + # This threshold is the "smallest" min/max difference allowed between values reported. + # For example, if threshold is set to 5 for "MaxT", and the min value is 45 + # and the max value is 46, the range will be adjusted to at least a 5 degree + # range e.g. 43-48. These are the values that are then submitted for phrasing + # such as: + dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) + # HIGHS IN THE MID 40S + dict["Wind"] = { + (0,30):0, + (30,50):5, + (50,200):10, + "default":5, + } + dict["WaveHeight"] = { + (8,10):1, + (10,16):2, + (16,28):4, + (28,40):6, + (40,200):10, + "default":1, + } + return dict + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "winds" + dict["WaveHeight"] = "seas" + dict["Visibility"] = "vsby occasionally" + dict["seas"] = "seas" + dict["mixed swell"] = "mixed swell" + dict["waves"] = "seas" + dict["up to"] = "winds to" + dict["around"] = "" + return dict + +## def rounding_method_dict(self, tree, node): +## # Special rounding methods +## # +## return { +## "Wind": self.marineRounding, +## } + + # WxPhrases Overrides + def pop_wx_lower_threshold(self, tree, node): + # Always report weather + return 0 + + # MarinePhrases Overrides + def seasWaveHeight_element(self, tree, node): + # Weather element to use for reporting seas + # "COMBINED SEAS 10 TO 15 FEET." + # IF above wind or swell thresholds + return "WaveHeight" + + def waveHeight_wind_threshold(self, tree, node): + # wind value above which waveHeight is reported vs. wind waves + # Unit is knots + return 0 + + def wave_range(self, avg): + # Make wave ranges based off the average wave value + table = ((0, "less than 1 ft"), (1, "1 foot or less"), + (1.5, "1 to 2 ft"), (2, "1 to 3 ft"), + (3, "2 to 4 ft"), (4, "3 to 5 ft"), + (5, "3 to 6 ft"), (6, "4 to 7 ft"), + (7, "5 to 8 ft"), (8, "6 to 10 ft"), + (10, "8 to 12 ft"), (12, "10 to 14 ft"), + (14, "12 to 16 ft"), (18, "14 to 18 ft"), + (20, "15 to 20 ft"), (100, "over 20 ft")) + range = "" + for max, str in table: + if avg <= max: + range = str + break + return range + + + # SampleAnalysis overrides + def moderated_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. + dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) + dict["Wind"] = (0, 20) + dict["WaveHeight"] = (5,5) + return dict + + def dirList(self): + dirSpan = 22.5 + base = 11.25 + return[ + ('N', 360-base, 361), + ('N', 0, base), + ('N TO NE', base, base+1*dirSpan), + ('NE', base+1*dirSpan, base+2*dirSpan), + ('NE TO E', base+2*dirSpan, base+3*dirSpan), + ('E', base+3*dirSpan, base+4*dirSpan), + ('E TO SE', base+4*dirSpan, base+5*dirSpan), + ('SE', base+5*dirSpan, base+6*dirSpan), + ('SE TO S', base+6*dirSpan, base+7*dirSpan), + ('S', base+7*dirSpan, base+8*dirSpan), + ('S TO SW', base+8*dirSpan, base+9*dirSpan), + ('SW', base+9*dirSpan, base+10*dirSpan), + ('SW TO W', base+10*dirSpan, base+11*dirSpan), + ('W', base+11*dirSpan, base+12*dirSpan), + ('W TO NW', base+12*dirSpan, base+13*dirSpan), + ('NW', base+13*dirSpan, base+14*dirSpan), + ('NW TO N', base+14*dirSpan, base+15*dirSpan), + ] + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING + ('TR.W', tropicalActions, 'Tropical'), # TROPICAL STORM WARNING + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine'), # GALE WARNING + ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('MH.Y', allActions, 'Ashfall') # VOLCANIC ASHFALL ADVISORY + ] + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* *")] + + def wxCoverageDescriptors(self): + # This is the list of coverages, wxTypes, intensities, attributes for which special + # weather coverage wording is desired. Wildcards (*) can be used to match any value. + # If a weather subkey is not found in this list, default wording + # will be used from the Weather Definition in the server. + # The format of each tuple is: + # (coverage, wxType, intensity, attribute, descriptor) + # For example: + #return [ + # ("Chc", "*", "*", "*", "a chance of"), + # ] + # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments + return [("*", "F", "*", "*", "")] + + + ####################### + + def _makeTreeNode(self, phraseInfo, statDict, timePeriod, areaLabel): + phraseMethod, elementName, maxMin, elementType = phraseInfo + + # Set up temporary tree, node for this phrase + subPhrase = Node([], []) + node = Node([subPhrase], []) + + + treeStatDict = {} + for key in statDict: + treeStatDict[key] = statDict.get(key) + wxStats = treeStatDict.get("Wx") + treeStatDict["Wx"] = [(wxStats, timePeriod)] + + statistics = Statistics(treeStatDict) + tree = Narrative([],[node], statistics, None, self, None) + tree.set("timeRange", timePeriod) + tree.set("areaLabel", areaLabel) + + elementInfo = self.ElementInfo(elementName, maxMin, elementType) + elementInfo.outUnits = self.element_outUnits(tree, node, elementName, elementName) + + subPhrase.set("statDict", statDict) + + node.set("descriptor", self.phrase_descriptor(tree, node, elementName, elementName)) + node.set("firstElement", elementInfo) + node.set("elementInfo", elementInfo) + node.set("elementInfoList", [elementInfo]) + node.set("elementName", elementName) + node.set("setUpMethod", None) + node.set("doneList", [None]) + return tree, node + + def _applyRanges(self, tree, node, statDict, elementName, elementType): + if elementType == self.VECTOR(): + speed, dir = self.getStats(statDict, elementName) + min, max = speed + #print "IN _applyRanges min, max", min, max, elementName + min, max = self.applyRanges(tree, node, min, max, elementName) + statDict[elementName] = (min, max), dir + elif elementType == self.SCALAR(): + min, max = self.getStats(statDict, elementName) + #print "min, max", min, max, elementName + min, max = self.applyRanges(tree, node, min, max, elementName) + statDict[elementName] = (min, max) + elif elementType == self.WEATHER(): + return + + # Overriding from TextUtils + def getLimits(self, element): + parmID = self.getParmID(element, self._databaseID) + gridParmInfo = self._ifpClient.getGridParmInfo(parmID) + return gridParmInfo.getMinValue(), gridParmInfo.getMaxValue() + +############################################################################################## +####################### Edit Area support methods ########################################## +############################################################################################## + + def _getEditArea(self, editAreaName): + # Returns an AFPS.ReferenceData object given an edit area name + # as defined in the GFE + + eaMask = self._eaUtils.fetchEditArea(editAreaName) + # Convert to standard Edit Area type + editArea = self.decodeEditArea(eaMask) + + return editArea + + def _editAreaToMask(self, editArea): + + grid = editArea.getGrid().getNDArray().astype(numpy.bool8) + + return grid +# return editArea.getGrid().__numpy__[0].astype(numpy.bool8) + + + def _maskToEditArea(self, mask): + # Returns a refData object for the given mask + from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID + + nx = mask.shape[1] + ny = mask.shape[0] + bytes = mask.astype('int8') + grid = Grid2DBit.createBitGrid(nx, ny, bytes) + + return ReferenceData(self._gridLoc, ReferenceID("test"), grid) + + def _setActiveEditArea(self, area): + + self._dataMgr.getRefManager().setActiveRefSet(area) + return + + def _saveEditArea(self, editAreaName, refData): + # Saves the AFPS.ReferenceData object with the given name + + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID + refID = ReferenceID(editAreaName) + refData = ReferenceData(refData.getGloc(), refID, refData.getGrid()) + self._dataMgr.getRefManager().saveRefSet(refData) + + + def _processAreaOverlaps(self, areas): + # Taking care of "donuts" + gridSize = (self._gridLoc.gridSize().y, self._gridLoc.gridSize().x) + + sumMask = numpy.zeros(gridSize, bool) + for area in areas: + mask = self._eaUtils.fetchEditArea(area.areaName) + ea = self._maskToEditArea(mask) + + overlap = mask & sumMask + if sum(sum(overlap)): + newMask = numpy.bitwise_xor(mask, overlap) + newRefArea = self._maskToEditArea(newMask) + else: + newRefArea = ea + + self._saveEditArea(area.areaName+"Modified", newRefArea) + area.areaName = area.areaName+"Modified" + print("processOverlap...RefData", area.areaName, newRefArea) + sumMask = sumMask | mask + + return + + def _drawableElementOverlaps(self, areas, latLons): + for area in areas: + #ea = self._getEditArea(area.areaName) + eaMask = self._eaUtils.fetchEditArea(area.areaName) + ea = self._maskToEditArea(eaMask) + + polygons = ea.getPolygons(ReferenceData.CoordinateType.LATLON) + coords = polygons.getCoordinates() + for c in coords: + for lat, lon in latLons: + if self._close((c.x, c.y), (lat, lon)): + return True + return False + + def _close(self, xxx_todo_changeme, xxx_todo_changeme1): + (lat1, lon1) = xxx_todo_changeme + (lat2, lon2) = xxx_todo_changeme1 + distanceThreshold = 300 # km + distance = acos(sin(lat1)*sin(lat2)+cos(lat1)*cos(lat2)*cos(lon2-lon1)) * 6371 + if distance < distanceThreshold: + return True + else: + return False + + def _proximity(self, drawable1, drawable2): + for lat1, lon1 in drawable1.latLons: + for lat2, lon2 in drawable2.latLons: + if self._close((lat1, lon1), (lat2, lon2)): + return True + return False + + + + + +############################################################################################## +####################### END Edit Area support methods ########################################## +############################################################################################## diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HighSeas_AT2.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HighSeas_AT2.py index 2fd16ce144..d813f4beb1 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HighSeas_AT2.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/HighSeas_AT2.py @@ -1,74 +1,74 @@ -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# --------------------------------------------------------------------- -## -# HighSeas -# -# This file should not be edited by the site. -# Site changes should go in HighSeas_Overrides for methods and -# HighSeas_Definition to set up Product -# Definition Settings. -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ----------- ---------- ----------- -------------------------- -# 12/20/2017 DCS17686 tlefebvre Initial baseline version. -# -## -# --------------------------------------------------------------------- - -import HSF -import sys, copy, types, string - -# Construct the names of the definition and override TextUtilities -siteDefinition = "HighSeas_AT2_Definition" -siteOverrides = "HighSeas_AT2_Overrides" - -# Import the local site's Product Definition specifications -exec "import "+siteDefinition - -# Import the local site's Overrides -exec "import "+siteOverrides - -# Import Regional Overrides -#exec "import "+regionOverrides - -# Patches -import Patch_Overrides - - -# These statements get the class object for the region and site overrides class -# The class and the module name (the file name) must be the same! - -siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] - -# Get the region and site definitions into a known variable name -exec "localDefinition = " + siteDefinition + ".Definition" - -class TextProduct( - siteOverrides_object, - Patch_Overrides.Patch_Overrides, - HSF.TextProduct - ): - Definition = copy.deepcopy(HSF.TextProduct.Definition) - - - # Get the Site Definition Settings - for key in localDefinition.keys(): - Definition[key] = localDefinition[key] - - # Get the VariableList if overridden in Site - try: - exec "VariableList = "+siteDefinition+".VariableList" - except: - pass - - # Definition overrides should go in OFF_NH2_NT4 Definition - # but may be put here for testing. - # Most common would be the need to set a unique display name - - - def __init__(self): - HSF.TextProduct.__init__(self) +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# --------------------------------------------------------------------- +## +# HighSeas +# +# This file should not be edited by the site. +# Site changes should go in HighSeas_Overrides for methods and +# HighSeas_Definition to set up Product +# Definition Settings. +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ----------- ---------- ----------- -------------------------- +# 12/20/2017 DCS17686 tlefebvre Initial baseline version. +# +## +# --------------------------------------------------------------------- + +import HSF +import sys, copy, types, string + +# Construct the names of the definition and override TextUtilities +siteDefinition = "HighSeas_AT2_Definition" +siteOverrides = "HighSeas_AT2_Overrides" + +# Import the local site's Product Definition specifications +exec("import "+siteDefinition) + +# Import the local site's Overrides +exec("import "+siteOverrides) + +# Import Regional Overrides +#exec "import "+regionOverrides + +# Patches +import Patch_Overrides + + +# These statements get the class object for the region and site overrides class +# The class and the module name (the file name) must be the same! + +siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] + +# Get the region and site definitions into a known variable name +exec("localDefinition = " + siteDefinition + ".Definition") + +class TextProduct( + siteOverrides_object, + Patch_Overrides.Patch_Overrides, + HSF.TextProduct + ): + Definition = copy.deepcopy(HSF.TextProduct.Definition) + + + # Get the Site Definition Settings + for key in list(localDefinition.keys()): + Definition[key] = localDefinition[key] + + # Get the VariableList if overridden in Site + try: + exec("VariableList = "+siteDefinition+".VariableList") + except: + pass + + # Definition overrides should go in OFF_NH2_NT4 Definition + # but may be put here for testing. + # Most common would be the need to set a unique display name + + + def __init__(self): + HSF.TextProduct.__init__(self) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/LE_Test_Local.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/LE_Test_Local.py index 91b0e5c8e7..bdb655352b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/LE_Test_Local.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/LE_Test_Local.py @@ -1,847 +1,847 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Local_Effects_Test_Local -# Local customizations for AreaFcst as Base class to test Local Effects -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import AreaFcst -import string -import TextRules -import types -import copy - -class TextProduct(AreaFcst.TextProduct): - Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) - Definition['displayName'] = "None" - - # REQUIRED CONFIGURATION ITEMS - Definition['displayName'] = "TEST_LocalEffectsTest" - #Definition["outputFile"] = "/awips/GFESuite/products/TEXT/ZFP.txt" - - # Header configuration items - #Definition["productName"] = "ZONE FORECAST PRODUCT" # name of product - #Definition["fullStationID"] = "Kxxx" # full station identifier (4letter) - #Definition["wmoID"] = "FOUS45" # WMO ID - #Definition["pil"] = "ZFPxxx" # product pil - #Definition["areaName"] = "STATENAME" # Name of state, such as "GEORGIA" - #Definition["wfoCity"] = "WfoCity" # Location of WFO - city name - #Definition["wfoState"] = "WfoState" # Location of WFO - state name - - # OPTIONAL CONFIGURATION ITEMS - Definition["defaultEditAreas"] = [ - ("area3", "Area 3"), - # ("area2", "Area 2"), - ] - - Definition["windLE_list"] = 1 - Definition["tempLE_list"] = 1 - Definition["Period_1_version"] = 1 - Definition["tempLE_method"] = 1 - - #Definition["directiveType"] = "C11" - #Definition["directiveType"] = "10-503" # Can be "C11" - #Definition["includeFloodingQuestion"] = 1 # Set to 1 to include flooding question - - #Definition["includeMultipleElementTable"] = 1 # Will include a TempPoPTable - #Definition["cityDictionary"] = "CityDictionary" # For TempPoPTable - - #Definition["areaDictionary"] = "AreaDictionary" # For product headers - #Definition["language"] = "english" - - # Apply to C11 only: - #Definition["includeExtended"] = 1 # To include extended forecast - #Definition["extendedLabel"] = 1 # To include extended label - #Definition["includeEveningPeriod"] = 0 # To turn off evening period - - #Definition["includeMultipleElementTable"] = 1 # Will include a MultipleElementTable - # Uncomment just one elementList below - #Definition["elementList"] = ["Temp", "PoP"] # Default - #Definition["elementList"] = ["Temp", "Humidity", "PoP"] - #Definition["singleValueFormat"] = 1 # Default is 0 - - # Sampling Performance - #Definition["sampleFromServer"] = 1 # If 1, sample directly from server - # Trouble-shooting items - #Definition["passLimit"] = 20 # Limit on passes allowed through - # Narrative Tree - #Definition["trace"] = 1 # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - #Definition["debug"] = 1 - - def __init__(self): - AreaFcst.TextProduct.__init__(self) - - # OPTIONAL OVERRIDES - #def DAY(self): - # return 6 - #def NIGHT(self): - # return 18 - - # The thresholds and variables included here were selected because - # they are commonly overridden for your product. - # See the Text Product User Guide for other thresholds and variables - # that may be relevant to your product and for more information - # about the ones included here. - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - # Uncomment this line for invoking areal or chance pop descriptor - #dict["PoP"] = self.areal_or_chance_pop_descriptor, - return dict - - def pop_lower_threshold(self, tree, node): - # Pop values below this amount will not be reported - return 20 - - def pop_upper_threshold(self, tree, node): - # Pop values above this amount will not be reported - return 70 - - def pop_wx_lower_threshold(self, tree, node): - # Pop-related Wx will not be reported if Pop is below this threshold - return 20 - - def pop_sky_lower_threshold(self, tree, node): - # Sky condition will not be reported if Pop is above this threshold - return 60 - - def pop_snow_lower_threshold(self, tree, node): - # Snow accumulation will not be reported if Pop is below this threshold - return 60 - - def pop_snowLevel_upper_threshold(self, tree, node): - # Snow level will be reported if Pop is above this threshold - return 60 - - def snowLevel_maximum_phrase(self, tree, node): - # This returns the maximum snow level value to be reported and the - # the corresponding snow level phrase. It can be set up by - # edit area as follows: - # editAreaList = [ - # ("area1", 8000, "above 8000 feet"), - # ("area2", 6000, "above 6000 feet"), - # # Don't mention snow level at all in area3: - # ("area3", 0, ""), - # ] - #maxElev = 0 - #phrase = "" - #for area, elev, elevPhrase in editAreaList: - # if self.currentAreaContains(tree, [area]): - # if elev > maxElev: - # maxElev = elev - # phrase = elevPhrase - #return (maxElev, phrase) - return (8000, "above 8000 feet") - - def null_nlValue_dict(self, tree, node): - # Threshold for reporting null values - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["Wind"] = 5 - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["Wind"] = "light winds" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["Wind"] = "light" - dict["Wx"] = "" - return dict - - def increment_nlValue_dict(self, tree, node): - # Increment for rounding values - # Units depend on the product - dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) - dict["Wind"] = 5 - return dict - - def vector_mag_difference_dict(self, tree, node): - # Replaces WIND_THRESHOLD - # Magnitude difference. If the difference between magnitudes - # for the first and second half of a period is greater than this value, - # the different magnitudes will be noted in the phrase. - # Units can vary depending on the element - dict = TextRules.TextRules.vector_mag_difference_dict(self, tree, node) - dict["Wind"] = 10 - return dict - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than this value, - # the different values will be noted in the phrase. - return { - "WindGust": 10, # knots or mph depending on product - "Period": 5, # seconds - "PoP": 10, # percentage - } - - def lake_wind_areaNames(self, tree, node): - # Return list of edit area names for which the lake_wind_phrase - # should be generated - # If you want the phrase potentially generated for all zones, use: - # return ["ALL"] - return [] - - def useWindsForGusts_flag(self, tree, node): - # Turn this on if you want to use the maximum Wind - # for reporting Gusts if a WindGust grid is not found - return 0 - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a vector phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - #dict["MaxT"] = 15 - #dict["MinT"] = 15 - return dict - - def minimum_range_nlValue_dict(self, tree, node): - # This threshold is the "smallest" min/max difference allowed between values reported. - # For example, if threshold is set to 5 for "MaxT", and the min value is 45 - # and the max value is 46, the range will be adjusted to at least a 5 degree - # range e.g. 43-48. These are the values that are then submitted for phrasing - # such as: - # HIGHS IN THE MID 40S - dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) - #dict["MaxT"] = 5 - #dict["MinT"] = 5 - return dict - - def range_threshold_nlValue_dict(self, tree, node): - # Range for reporting temperature ranges in temp_range_phrase - # e.g HIGHS 80 TO 85 - dict = TextRules.TextRules.range_threshold_nlValue_dict(self, tree, node) - dict["MaxT"] = 5 - dict["MinT"] = 5 - dict["MinRH"] = 5 - dict["MaxRH"] = 5 - dict["WindChill"] = 5 - dict["HeatIndex"] = 5 - return dict - - def temp_trend_nlValue(self, tree, node): - # THRESHOLD FOR REPORTING TEMPERATURE TRENDS - return 20.0 - - def stdDev_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating stdDev stats. - # These tuples represent the (low, high) number of standard - # deviations. Any values falling outside this range will - # not be included in the calculated statistic. - return { - "LAL": (1.0, 1.0), - "MinRH": (1.0, 1.0), - "MaxRH": (1.0, 1.0), - "MinT": (1.0, 1.0), - "MaxT": (1.0, 1.0), - "Haines": (1.0, 1.0), - "PoP" : (1.0, 1.0), - "T" : (1.0, 1.0), - "Wind" : (1.0, 1.0), - } - - def value_connector_dict(self, tree, node): - dict = TextRules.TextRules.value_connector_dict(self, tree, node) - dict["MaxT"] = " to " - dict["MinT"] = " to " - return dict - - def windChillTemp_difference(self, tree, node): - # Difference between wind chill and temperature - # for reporting wind chill - return 5 - - def heatIndexTemp_difference(self, tree, node): - # Difference between heat index and temperature - # for reporting heat index - return 5 - - - def Period_1(self): - exec "value = self.Period_1_version" + `self._Period_1_version` + "()" - return value - - def Period_1_version1(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [3]), - ("Sky", self.binnedPercent, [6]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("SnowAmt", self.accumMinMax), - ("IceAccum", self.accumMinMax), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [3]), - ], - "phraseList":[ -## self.skyPopWx_phrase, -## (self.skyPopWx_phrase, self._wxLocalEffects_list()), -## self.sky_phrase, -## self.wind_summary, -## self.reportTrends, -## self.weather_phrase, -## (self.weather_phrase, self._wxLE_list), - -## (self.weather_phrase,self._wxLocalEffects_list()), -## (self.snow_phrase,self._snowAmtLocalEffects_list()), -## (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), - - (self.highs_phrase, self._tempLocalEffects_list()), -## (self.highs_phrase, self._tempLocalEffects_method), - - (self.wind_withGusts_phrase, self._windLocalEffects_list()), - -## self.popMax_phrase, - ], -## "additionalAreas": [ -## # Areas listed by weather element that will be -## # intersected with the current area then -## # sampled and analysed. -## # E.g. used in local effects methods. -## ("MaxT", ["area2", "area1"]), -## ], - "intersectAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("MaxT", ["AboveElev", "BelowElev"]), - ("Wind", ["AboveElev", "BelowElev"]), - ("WindGust", ["AboveElev", "BelowElev"]), - ("SnowAmt", ["AboveElev", "BelowElev"]), - ("Wx", ["AboveElev", "BelowElev"]), - ("PoP", ["AboveElev", "BelowElev"]), - ], - } - - def TotalSnowSampling(self): - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": [ - ("SnowAmt", self.accumMinMax), - ], - "phraseList":[], - "intersectAreas": [ - ("SnowAmt", ["AboveElev", "BelowElev"]), - ], - } - - def _wxLE_list(self, tree, node): - leArea5 = self.LocalEffectArea("BelowElev", "") - leArea6 = self.LocalEffectArea("AboveElev", "along major rivers", intersectFlag=1) - return [self.LocalEffect([leArea5, leArea6], 0, ", except ")] - - def _tempLocalEffects_list(self): - exec "value = self._tempLocalEffects_list" + `self._tempLE_list` + "()" - return value - - def _tempLocalEffects_list1(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") - return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] - - def _windLocalEffects_list(self): - exec "value = self._windLocalEffects_list" + `self._windLE_list` + "()" - return value - - def _windLocalEffects_list1(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") - return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] - - def Period_2_3(self): - # No Lake Wind phrase - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "intersectAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("SnowAmt", ["AboveElev", "BelowElev"]), - ("Wx", ["AboveElev", "BelowElev"]), - ("PoP", ["AboveElev", "BelowElev"]), - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Period_2_3"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax), - ("HeatIndex", self.minMax), - ], - "phraseList":[ - self.sky_phrase, - self.wind_summary, - self.reportTrends, - (self.weather_phrase,self._wxLocalEffects_list()), - #self.weather_phrase, - self.severeWeather_phrase, - (self.snow_phrase,self._snowAmtLocalEffects_list()), - #self.snow_phrase, - (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), - self.snowLevel_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.temp_trends, - self.wind_withGusts_phrase, -# self.lake_wind_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ], - } - - def Period_4_5(self): - # Descriptive snow phrase - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Period_4_5"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax), - ("HeatIndex", self.minMax), - ], - "phraseList":[ - self.sky_phrase, - self.wind_summary, - self.reportTrends, - (self.weather_phrase,self._wxLocalEffects_list()), - #self.weather_phrase, - self.severeWeather_phrase, - self.snow_phrase, - self.total_snow_phrase, - self.snowLevel_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.temp_trends, - self.wind_withGusts_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ], - "intersectAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("SnowAmt", ["AboveElev", "BelowElev"]), - ("Wx", ["AboveElev", "BelowElev"]), - ("PoP", ["AboveElev", "BelowElev"]), - ], - } - - def _snowAmtLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "above timberline") - return [self.LocalEffect([leArea1, leArea2], 2, ", except ")] - - def _totalSnowAmtLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "above timberline") - return [self.LocalEffect( - [leArea1, leArea2], self._checkTotalSnow, ", except ")] - - def _checkTotalSnow(self, tree, node, localEffect, leArea1Label, leArea2Label): - totalSnow1 = self.getTotalSnow(tree, node, leArea1Label) - totalSnow2 = self.getTotalSnow(tree, node, leArea2Label) - if totalSnow1 is None or totalSnow2 is None: - return 0 - if type(totalSnow1) is types.TupleType: - min,totalSnow1 = totalSnow1 - if type(totalSnow2) is types.TupleType: - min,totalSnow2 = totalSnow2 - if abs(totalSnow1 - totalSnow2) > 3: - return 1 - return 0 - - def _wxLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "above timberline") - return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] - - def _10_503_issuance_list(self, argDict): - seriesDefAM = [ - ("Period_1", "period1"), -## ("Period_2_3", 12), ("Period_2_3", 12), ("Period_4_5", 12), ("Period_4_5", 12), -## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), -## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ] - seriesDefPM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), - ] - - return [ - ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, seriesDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, seriesDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, seriesDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - ".REST OF TONIGHT...", "early in the morning","early in the evening", - 1, seriesDefPM), - # For the early morning update, this produces: - # REST OF TONIGHT: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, seriesDefPM), - # Alternative - # For the early morning update, this produces: - # EARLY THIS MORNING: - # TODAY - # TONIGHT - #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, seriesDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, seriesDefPM), - ] - -## def checkThreshold(self, tree, node, triggerMethod, leArea1, leArea2): -## # Return 1 if the difference between leArea1 and leArea2 stats is -## # greater than the threshold -## # Handles stats that are a min/max or a singleValue -## leArea1Label = self.getLeAreaLabel(tree, node, leArea1) -## leArea2Label = self.getLeAreaLabel(tree, node, leArea2) -## if type(triggerMethod) is types.MethodType: -## flag = triggerMethod(tree, node, node.get("localEffect"), leArea1Label, leArea2Label) -## else: -## first = node.getAncestor("firstElement") -## element = first.name -## dataType = first.dataType -## if dataType == self.WEATHER(): -## mergeMethod = "Average" -## else: -## mergeMethod = "MinMax" -## timeRange = node.getTimeRange() -## area1Stats = tree.stats.get(element, timeRange, leArea1Label, -## mergeMethod=mergeMethod) -## area2Stats = tree.stats.get(element, timeRange, leArea2Label, -## mergeMethod=mergeMethod) -## print "\nLocal effects", element, timeRange -## print leArea1Label, area1Stats -## print leArea2Label, area2Stats -## if area1Stats is None or area2Stats is None: -## return 0 -## flag = self.checkLocalEffectDifference( -## tree, node, dataType, triggerMethod, area1Stats, area2Stats, -## leArea1Label, leArea2Label) -## print "returning", flag -## return flag - - - -################################################### -#### TEST OVERRIDE SECTION - - # EXCEPT VS OTHERWISE WORDING: - - def _windLocalEffects_list2(self): - leArea1 = self.LocalEffectArea("BelowElev", "", "in the valleys") - leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") - return [self.LocalEffect([leArea2, leArea1], 10, ", otherwise ")] - - # USING A METHOD TO SPECIFY LOCAL EFFECT AREAS: - - def _tempLocalEffects_list2(self): - return [self.LocalEffect(self._getTempLeAreas, 8, ", except ")] - - def _getTempLeAreas(self, tree, node): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") - return [leArea1, leArea2] - - # LOCAL EFFECT AREAS THAT DO NOT INTERSECT THE CURRENT AREA - - def Period_1_version2(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [3]), - ("Sky", self.binnedPercent, [6]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("SnowAmt", self.minMax), - ("IceAmt", self.minMax), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [3]), - ], - "phraseList":[ -## self.skyPopWx_phrase, -## (self.skyPopWx_phrase, self._wxLocalEffects_list()), -## self.sky_phrase, -## self.wind_summary, -## self.reportTrends, -## self.weather_phrase, -## (self.weather_phrase,self._wxLocalEffects_list()), -## self.severeWeather_phrase, -## -## (self.snow_phrase,self._snowAmtLocalEffects_list()), -## (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), - -## (self.highs_phrase, self._tempLocalEffects_list()), - (self.highs_phrase, self._tempLocalEffects_method), - -## (self.wind_withGusts_phrase, self._windLocalEffects_list()), - -## self.popMax_phrase, - ], - "additionalAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("MaxT", ["area2", "area1"]), - ], - "intersectAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("MaxT", ["AboveElev", "BelowElev"]), - ("Wind", ["AboveElev", "BelowElev"]), - ("WindGust", ["AboveElev", "BelowElev"]), - ("SnowAmt", ["AboveElev", "BelowElev"]), - ("Wx", ["AboveElev", "BelowElev"]), - ("PoP", ["AboveElev", "BelowElev"]), - ], - } - - def _tempLocalEffects_method(self, tree, node): - exec "value = self._tempLocalEffects_method" + `self._tempLE_method` + "(tree, node)" - return value - - def _tempLocalEffects_method1(self, tree, node): - if self.currentAreaContains(tree, ["area3"]): - leArea1 = self.LocalEffectArea( - "__Current__","",intersectFlag=0) - leArea2 = self.LocalEffectArea("area1", - "in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [ - self.LocalEffect([leArea1, leArea2], 5, ", except "), - self.LocalEffect([leArea1, leArea3], 5, ", except "), - ] - else: - return [] - - def _tempLocalEffects_method2(self, tree, node): - if self.currentAreaContains(tree, ["area3"]): - leArea1 = self.LocalEffectArea( - "__Current__","in the city",intersectFlag=0) - leArea2 = self.LocalEffectArea("area1", - "in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [self.LocalEffect([leArea1, leArea2, leArea3], 5, ", and ")] - #return [ - # self.LocalEffect([leArea1,leArea2],5,", except "), - # self.LocalEffect([leArea1,leArea3],5,", except "), - # ] - else: - return [] - - - def Period_1_version3(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Sky", self.median, [3]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("Wx", self.rankedWx, [3]), - ], - "phraseList":[ - (self.sky_phrase, self._skyLocalEffects_list()), - (self.skyPopWx_phrase, self._skyPopWxLocalEffects_list()), - (self.weather_phrase,self._wxLocalEffects_list()), - (self.popMax_phrase, self._popLocalEffects_list()), - ], - "additionalAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("Sky", ["area2", "area1"]), - ("Wx", ["area2", "area1"]), - ("PoP", ["area2", "area1"]), - ], - } - - def _skyLocalEffects_list(self): - leArea1 = self.LocalEffectArea( - "__Current__","in the city",intersectFlag=0) - leArea2 = self.LocalEffectArea( - "area1","in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [ - self.LocalEffect([leArea1, leArea2, leArea3], self.checkSkyDifference, ", "), - ] - - def _wxLocalEffects_list(self): - leArea1 = self.LocalEffectArea( - "__Current__","in the city",intersectFlag=0) - leArea2 = self.LocalEffectArea( - "area1","in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [ - self.LocalEffect([leArea1, leArea2, leArea3], 0, ", "), - ] - - def _popLocalEffects_list(self): - leArea1 = self.LocalEffectArea( - "__Current__","in the city",intersectFlag=0) - leArea2 = self.LocalEffectArea( - "area1","in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [ - self.LocalEffect([leArea1, leArea2, leArea3], 20, ", "), - ] - - def _skyPopWxLocalEffects_list(self): - leArea1 = self.LocalEffectArea( - "__Current__","in the city",intersectFlag=0) - leArea2 = self.LocalEffectArea( - "area1","in the rush valley", intersectFlag=0) - leArea3 = self.LocalEffectArea( - "area2", "in the benches",intersectFlag=0) - return [ - self.LocalEffect([leArea1, leArea2, leArea3], self.checkSkyWxDifference, ", "), - ] +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Local_Effects_Test_Local +# Local customizations for AreaFcst as Base class to test Local Effects +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import AreaFcst +import string +import TextRules +import types +import copy + +class TextProduct(AreaFcst.TextProduct): + Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) + Definition['displayName'] = "None" + + # REQUIRED CONFIGURATION ITEMS + Definition['displayName'] = "TEST_LocalEffectsTest" + #Definition["outputFile"] = "/awips/GFESuite/products/TEXT/ZFP.txt" + + # Header configuration items + #Definition["productName"] = "ZONE FORECAST PRODUCT" # name of product + #Definition["fullStationID"] = "Kxxx" # full station identifier (4letter) + #Definition["wmoID"] = "FOUS45" # WMO ID + #Definition["pil"] = "ZFPxxx" # product pil + #Definition["areaName"] = "STATENAME" # Name of state, such as "GEORGIA" + #Definition["wfoCity"] = "WfoCity" # Location of WFO - city name + #Definition["wfoState"] = "WfoState" # Location of WFO - state name + + # OPTIONAL CONFIGURATION ITEMS + Definition["defaultEditAreas"] = [ + ("area3", "Area 3"), + # ("area2", "Area 2"), + ] + + Definition["windLE_list"] = 1 + Definition["tempLE_list"] = 1 + Definition["Period_1_version"] = 1 + Definition["tempLE_method"] = 1 + + #Definition["directiveType"] = "C11" + #Definition["directiveType"] = "10-503" # Can be "C11" + #Definition["includeFloodingQuestion"] = 1 # Set to 1 to include flooding question + + #Definition["includeMultipleElementTable"] = 1 # Will include a TempPoPTable + #Definition["cityDictionary"] = "CityDictionary" # For TempPoPTable + + #Definition["areaDictionary"] = "AreaDictionary" # For product headers + #Definition["language"] = "english" + + # Apply to C11 only: + #Definition["includeExtended"] = 1 # To include extended forecast + #Definition["extendedLabel"] = 1 # To include extended label + #Definition["includeEveningPeriod"] = 0 # To turn off evening period + + #Definition["includeMultipleElementTable"] = 1 # Will include a MultipleElementTable + # Uncomment just one elementList below + #Definition["elementList"] = ["Temp", "PoP"] # Default + #Definition["elementList"] = ["Temp", "Humidity", "PoP"] + #Definition["singleValueFormat"] = 1 # Default is 0 + + # Sampling Performance + #Definition["sampleFromServer"] = 1 # If 1, sample directly from server + # Trouble-shooting items + #Definition["passLimit"] = 20 # Limit on passes allowed through + # Narrative Tree + #Definition["trace"] = 1 # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + #Definition["debug"] = 1 + + def __init__(self): + AreaFcst.TextProduct.__init__(self) + + # OPTIONAL OVERRIDES + #def DAY(self): + # return 6 + #def NIGHT(self): + # return 18 + + # The thresholds and variables included here were selected because + # they are commonly overridden for your product. + # See the Text Product User Guide for other thresholds and variables + # that may be relevant to your product and for more information + # about the ones included here. + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + # Uncomment this line for invoking areal or chance pop descriptor + #dict["PoP"] = self.areal_or_chance_pop_descriptor, + return dict + + def pop_lower_threshold(self, tree, node): + # Pop values below this amount will not be reported + return 20 + + def pop_upper_threshold(self, tree, node): + # Pop values above this amount will not be reported + return 70 + + def pop_wx_lower_threshold(self, tree, node): + # Pop-related Wx will not be reported if Pop is below this threshold + return 20 + + def pop_sky_lower_threshold(self, tree, node): + # Sky condition will not be reported if Pop is above this threshold + return 60 + + def pop_snow_lower_threshold(self, tree, node): + # Snow accumulation will not be reported if Pop is below this threshold + return 60 + + def pop_snowLevel_upper_threshold(self, tree, node): + # Snow level will be reported if Pop is above this threshold + return 60 + + def snowLevel_maximum_phrase(self, tree, node): + # This returns the maximum snow level value to be reported and the + # the corresponding snow level phrase. It can be set up by + # edit area as follows: + # editAreaList = [ + # ("area1", 8000, "above 8000 feet"), + # ("area2", 6000, "above 6000 feet"), + # # Don't mention snow level at all in area3: + # ("area3", 0, ""), + # ] + #maxElev = 0 + #phrase = "" + #for area, elev, elevPhrase in editAreaList: + # if self.currentAreaContains(tree, [area]): + # if elev > maxElev: + # maxElev = elev + # phrase = elevPhrase + #return (maxElev, phrase) + return (8000, "above 8000 feet") + + def null_nlValue_dict(self, tree, node): + # Threshold for reporting null values + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["Wind"] = 5 + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["Wind"] = "light winds" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["Wind"] = "light" + dict["Wx"] = "" + return dict + + def increment_nlValue_dict(self, tree, node): + # Increment for rounding values + # Units depend on the product + dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) + dict["Wind"] = 5 + return dict + + def vector_mag_difference_dict(self, tree, node): + # Replaces WIND_THRESHOLD + # Magnitude difference. If the difference between magnitudes + # for the first and second half of a period is greater than this value, + # the different magnitudes will be noted in the phrase. + # Units can vary depending on the element + dict = TextRules.TextRules.vector_mag_difference_dict(self, tree, node) + dict["Wind"] = 10 + return dict + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than this value, + # the different values will be noted in the phrase. + return { + "WindGust": 10, # knots or mph depending on product + "Period": 5, # seconds + "PoP": 10, # percentage + } + + def lake_wind_areaNames(self, tree, node): + # Return list of edit area names for which the lake_wind_phrase + # should be generated + # If you want the phrase potentially generated for all zones, use: + # return ["ALL"] + return [] + + def useWindsForGusts_flag(self, tree, node): + # Turn this on if you want to use the maximum Wind + # for reporting Gusts if a WindGust grid is not found + return 0 + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a vector phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + #dict["MaxT"] = 15 + #dict["MinT"] = 15 + return dict + + def minimum_range_nlValue_dict(self, tree, node): + # This threshold is the "smallest" min/max difference allowed between values reported. + # For example, if threshold is set to 5 for "MaxT", and the min value is 45 + # and the max value is 46, the range will be adjusted to at least a 5 degree + # range e.g. 43-48. These are the values that are then submitted for phrasing + # such as: + # HIGHS IN THE MID 40S + dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) + #dict["MaxT"] = 5 + #dict["MinT"] = 5 + return dict + + def range_threshold_nlValue_dict(self, tree, node): + # Range for reporting temperature ranges in temp_range_phrase + # e.g HIGHS 80 TO 85 + dict = TextRules.TextRules.range_threshold_nlValue_dict(self, tree, node) + dict["MaxT"] = 5 + dict["MinT"] = 5 + dict["MinRH"] = 5 + dict["MaxRH"] = 5 + dict["WindChill"] = 5 + dict["HeatIndex"] = 5 + return dict + + def temp_trend_nlValue(self, tree, node): + # THRESHOLD FOR REPORTING TEMPERATURE TRENDS + return 20.0 + + def stdDev_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating stdDev stats. + # These tuples represent the (low, high) number of standard + # deviations. Any values falling outside this range will + # not be included in the calculated statistic. + return { + "LAL": (1.0, 1.0), + "MinRH": (1.0, 1.0), + "MaxRH": (1.0, 1.0), + "MinT": (1.0, 1.0), + "MaxT": (1.0, 1.0), + "Haines": (1.0, 1.0), + "PoP" : (1.0, 1.0), + "T" : (1.0, 1.0), + "Wind" : (1.0, 1.0), + } + + def value_connector_dict(self, tree, node): + dict = TextRules.TextRules.value_connector_dict(self, tree, node) + dict["MaxT"] = " to " + dict["MinT"] = " to " + return dict + + def windChillTemp_difference(self, tree, node): + # Difference between wind chill and temperature + # for reporting wind chill + return 5 + + def heatIndexTemp_difference(self, tree, node): + # Difference between heat index and temperature + # for reporting heat index + return 5 + + + def Period_1(self): + exec("value = self.Period_1_version" + repr(self._Period_1_version) + "()") + return value + + def Period_1_version1(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [3]), + ("Sky", self.binnedPercent, [6]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("SnowAmt", self.accumMinMax), + ("IceAccum", self.accumMinMax), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [3]), + ], + "phraseList":[ +## self.skyPopWx_phrase, +## (self.skyPopWx_phrase, self._wxLocalEffects_list()), +## self.sky_phrase, +## self.wind_summary, +## self.reportTrends, +## self.weather_phrase, +## (self.weather_phrase, self._wxLE_list), + +## (self.weather_phrase,self._wxLocalEffects_list()), +## (self.snow_phrase,self._snowAmtLocalEffects_list()), +## (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), + + (self.highs_phrase, self._tempLocalEffects_list()), +## (self.highs_phrase, self._tempLocalEffects_method), + + (self.wind_withGusts_phrase, self._windLocalEffects_list()), + +## self.popMax_phrase, + ], +## "additionalAreas": [ +## # Areas listed by weather element that will be +## # intersected with the current area then +## # sampled and analysed. +## # E.g. used in local effects methods. +## ("MaxT", ["area2", "area1"]), +## ], + "intersectAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("MaxT", ["AboveElev", "BelowElev"]), + ("Wind", ["AboveElev", "BelowElev"]), + ("WindGust", ["AboveElev", "BelowElev"]), + ("SnowAmt", ["AboveElev", "BelowElev"]), + ("Wx", ["AboveElev", "BelowElev"]), + ("PoP", ["AboveElev", "BelowElev"]), + ], + } + + def TotalSnowSampling(self): + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": [ + ("SnowAmt", self.accumMinMax), + ], + "phraseList":[], + "intersectAreas": [ + ("SnowAmt", ["AboveElev", "BelowElev"]), + ], + } + + def _wxLE_list(self, tree, node): + leArea5 = self.LocalEffectArea("BelowElev", "") + leArea6 = self.LocalEffectArea("AboveElev", "along major rivers", intersectFlag=1) + return [self.LocalEffect([leArea5, leArea6], 0, ", except ")] + + def _tempLocalEffects_list(self): + exec("value = self._tempLocalEffects_list" + repr(self._tempLE_list) + "()") + return value + + def _tempLocalEffects_list1(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") + return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] + + def _windLocalEffects_list(self): + exec("value = self._windLocalEffects_list" + repr(self._windLE_list) + "()") + return value + + def _windLocalEffects_list1(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") + return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] + + def Period_2_3(self): + # No Lake Wind phrase + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "intersectAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("SnowAmt", ["AboveElev", "BelowElev"]), + ("Wx", ["AboveElev", "BelowElev"]), + ("PoP", ["AboveElev", "BelowElev"]), + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Period_2_3"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax), + ("HeatIndex", self.minMax), + ], + "phraseList":[ + self.sky_phrase, + self.wind_summary, + self.reportTrends, + (self.weather_phrase,self._wxLocalEffects_list()), + #self.weather_phrase, + self.severeWeather_phrase, + (self.snow_phrase,self._snowAmtLocalEffects_list()), + #self.snow_phrase, + (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), + self.snowLevel_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.temp_trends, + self.wind_withGusts_phrase, +# self.lake_wind_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ], + } + + def Period_4_5(self): + # Descriptive snow phrase + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Period_4_5"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax), + ("HeatIndex", self.minMax), + ], + "phraseList":[ + self.sky_phrase, + self.wind_summary, + self.reportTrends, + (self.weather_phrase,self._wxLocalEffects_list()), + #self.weather_phrase, + self.severeWeather_phrase, + self.snow_phrase, + self.total_snow_phrase, + self.snowLevel_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.temp_trends, + self.wind_withGusts_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ], + "intersectAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("SnowAmt", ["AboveElev", "BelowElev"]), + ("Wx", ["AboveElev", "BelowElev"]), + ("PoP", ["AboveElev", "BelowElev"]), + ], + } + + def _snowAmtLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "above timberline") + return [self.LocalEffect([leArea1, leArea2], 2, ", except ")] + + def _totalSnowAmtLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "above timberline") + return [self.LocalEffect( + [leArea1, leArea2], self._checkTotalSnow, ", except ")] + + def _checkTotalSnow(self, tree, node, localEffect, leArea1Label, leArea2Label): + totalSnow1 = self.getTotalSnow(tree, node, leArea1Label) + totalSnow2 = self.getTotalSnow(tree, node, leArea2Label) + if totalSnow1 is None or totalSnow2 is None: + return 0 + if type(totalSnow1) is tuple: + min,totalSnow1 = totalSnow1 + if type(totalSnow2) is tuple: + min,totalSnow2 = totalSnow2 + if abs(totalSnow1 - totalSnow2) > 3: + return 1 + return 0 + + def _wxLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "above timberline") + return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] + + def _10_503_issuance_list(self, argDict): + seriesDefAM = [ + ("Period_1", "period1"), +## ("Period_2_3", 12), ("Period_2_3", 12), ("Period_4_5", 12), ("Period_4_5", 12), +## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), +## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ] + seriesDefPM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), + ] + + return [ + ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, seriesDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, seriesDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, seriesDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + ".REST OF TONIGHT...", "early in the morning","early in the evening", + 1, seriesDefPM), + # For the early morning update, this produces: + # REST OF TONIGHT: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, seriesDefPM), + # Alternative + # For the early morning update, this produces: + # EARLY THIS MORNING: + # TODAY + # TONIGHT + #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, seriesDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, seriesDefPM), + ] + +## def checkThreshold(self, tree, node, triggerMethod, leArea1, leArea2): +## # Return 1 if the difference between leArea1 and leArea2 stats is +## # greater than the threshold +## # Handles stats that are a min/max or a singleValue +## leArea1Label = self.getLeAreaLabel(tree, node, leArea1) +## leArea2Label = self.getLeAreaLabel(tree, node, leArea2) +## if type(triggerMethod) is types.MethodType: +## flag = triggerMethod(tree, node, node.get("localEffect"), leArea1Label, leArea2Label) +## else: +## first = node.getAncestor("firstElement") +## element = first.name +## dataType = first.dataType +## if dataType == self.WEATHER(): +## mergeMethod = "Average" +## else: +## mergeMethod = "MinMax" +## timeRange = node.getTimeRange() +## area1Stats = tree.stats.get(element, timeRange, leArea1Label, +## mergeMethod=mergeMethod) +## area2Stats = tree.stats.get(element, timeRange, leArea2Label, +## mergeMethod=mergeMethod) +## print "\nLocal effects", element, timeRange +## print leArea1Label, area1Stats +## print leArea2Label, area2Stats +## if area1Stats is None or area2Stats is None: +## return 0 +## flag = self.checkLocalEffectDifference( +## tree, node, dataType, triggerMethod, area1Stats, area2Stats, +## leArea1Label, leArea2Label) +## print "returning", flag +## return flag + + + +################################################### +#### TEST OVERRIDE SECTION + + # EXCEPT VS OTHERWISE WORDING: + + def _windLocalEffects_list2(self): + leArea1 = self.LocalEffectArea("BelowElev", "", "in the valleys") + leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") + return [self.LocalEffect([leArea2, leArea1], 10, ", otherwise ")] + + # USING A METHOD TO SPECIFY LOCAL EFFECT AREAS: + + def _tempLocalEffects_list2(self): + return [self.LocalEffect(self._getTempLeAreas, 8, ", except ")] + + def _getTempLeAreas(self, tree, node): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "in the mountains") + return [leArea1, leArea2] + + # LOCAL EFFECT AREAS THAT DO NOT INTERSECT THE CURRENT AREA + + def Period_1_version2(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [3]), + ("Sky", self.binnedPercent, [6]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("SnowAmt", self.minMax), + ("IceAmt", self.minMax), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [3]), + ], + "phraseList":[ +## self.skyPopWx_phrase, +## (self.skyPopWx_phrase, self._wxLocalEffects_list()), +## self.sky_phrase, +## self.wind_summary, +## self.reportTrends, +## self.weather_phrase, +## (self.weather_phrase,self._wxLocalEffects_list()), +## self.severeWeather_phrase, +## +## (self.snow_phrase,self._snowAmtLocalEffects_list()), +## (self.total_snow_phrase,self._totalSnowAmtLocalEffects_list()), + +## (self.highs_phrase, self._tempLocalEffects_list()), + (self.highs_phrase, self._tempLocalEffects_method), + +## (self.wind_withGusts_phrase, self._windLocalEffects_list()), + +## self.popMax_phrase, + ], + "additionalAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("MaxT", ["area2", "area1"]), + ], + "intersectAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("MaxT", ["AboveElev", "BelowElev"]), + ("Wind", ["AboveElev", "BelowElev"]), + ("WindGust", ["AboveElev", "BelowElev"]), + ("SnowAmt", ["AboveElev", "BelowElev"]), + ("Wx", ["AboveElev", "BelowElev"]), + ("PoP", ["AboveElev", "BelowElev"]), + ], + } + + def _tempLocalEffects_method(self, tree, node): + exec("value = self._tempLocalEffects_method" + repr(self._tempLE_method) + "(tree, node)") + return value + + def _tempLocalEffects_method1(self, tree, node): + if self.currentAreaContains(tree, ["area3"]): + leArea1 = self.LocalEffectArea( + "__Current__","",intersectFlag=0) + leArea2 = self.LocalEffectArea("area1", + "in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [ + self.LocalEffect([leArea1, leArea2], 5, ", except "), + self.LocalEffect([leArea1, leArea3], 5, ", except "), + ] + else: + return [] + + def _tempLocalEffects_method2(self, tree, node): + if self.currentAreaContains(tree, ["area3"]): + leArea1 = self.LocalEffectArea( + "__Current__","in the city",intersectFlag=0) + leArea2 = self.LocalEffectArea("area1", + "in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [self.LocalEffect([leArea1, leArea2, leArea3], 5, ", and ")] + #return [ + # self.LocalEffect([leArea1,leArea2],5,", except "), + # self.LocalEffect([leArea1,leArea3],5,", except "), + # ] + else: + return [] + + + def Period_1_version3(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Sky", self.median, [3]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("Wx", self.rankedWx, [3]), + ], + "phraseList":[ + (self.sky_phrase, self._skyLocalEffects_list()), + (self.skyPopWx_phrase, self._skyPopWxLocalEffects_list()), + (self.weather_phrase,self._wxLocalEffects_list()), + (self.popMax_phrase, self._popLocalEffects_list()), + ], + "additionalAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("Sky", ["area2", "area1"]), + ("Wx", ["area2", "area1"]), + ("PoP", ["area2", "area1"]), + ], + } + + def _skyLocalEffects_list(self): + leArea1 = self.LocalEffectArea( + "__Current__","in the city",intersectFlag=0) + leArea2 = self.LocalEffectArea( + "area1","in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [ + self.LocalEffect([leArea1, leArea2, leArea3], self.checkSkyDifference, ", "), + ] + + def _wxLocalEffects_list(self): + leArea1 = self.LocalEffectArea( + "__Current__","in the city",intersectFlag=0) + leArea2 = self.LocalEffectArea( + "area1","in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [ + self.LocalEffect([leArea1, leArea2, leArea3], 0, ", "), + ] + + def _popLocalEffects_list(self): + leArea1 = self.LocalEffectArea( + "__Current__","in the city",intersectFlag=0) + leArea2 = self.LocalEffectArea( + "area1","in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [ + self.LocalEffect([leArea1, leArea2, leArea3], 20, ", "), + ] + + def _skyPopWxLocalEffects_list(self): + leArea1 = self.LocalEffectArea( + "__Current__","in the city",intersectFlag=0) + leArea2 = self.LocalEffectArea( + "area1","in the rush valley", intersectFlag=0) + leArea3 = self.LocalEffectArea( + "area2", "in the benches",intersectFlag=0) + return [ + self.LocalEffect([leArea1, leArea2, leArea3], self.checkSkyWxDifference, ", "), + ] diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable.py index 9dfbd32167..ca267cc188 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable.py @@ -1,360 +1,360 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -#------------------------------------------------------------------------- -# Description: This product creates a Multiple Element Table. -# The possible elements are Temperature (MaxT, MinT), Humidity (MinRH, MaxRH), and PoP -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# MultipleElementTableTable, MultipleElementTable_Local, MultipleElementTable_Aux_Local -#------------------------------------------------------------------------- -# User Configurable Variables: -#------------------------------------------------------------------------- -# Weather Elements Needed: -#------------------------------------------------------------------------- -# Edit Areas Needed: -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -#------------------------------------------------------------------------- -# Component Products: -#------------------------------------------------------------------------- -# Programmers and Support including product team leader's email: -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -#------------------------------------------------------------------------- -# Additional Information: -#------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# GFE Training Guide->GFE Text Products User Guide section of the GFE Online -# Help for guidance on creating a new text product. -## - -import TextRules -import SampleAnalysis -import string, time, types - - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Forecast Product", "productIssuance") , "Morning", "radio", - ["Morning","Afternoon"]), - ] - - Definition = { - "type": "smart", - "displayName": "None", - "outputFile": "/awips/GFESuite/products/TEXT/MultipleElementTable.txt", - "defaultEditAreas": [ - ("area1","AREA 1"), - ("area2","AREA 2"), - ("area3","AREA 3"), - ], - # Product-specific variables - "regionList" : [ - ("/33",["AREA 1","AREA 2"]), - ("/19",["AREA 3"]) - ], - # Possible elements are: - # "Temp" -- lists MaxT for daytime, MinT for nighttime - # "PoP" - # "Humidity" -- lists MinRH for daytime, MaxRH for nighttime - "elementList" : ["Temp", "PoP"], - # If set to 1, only one value for each element is listed - "singleValueFormat": 0, # Default is 0 - "includeTitle": 1, - "introLetters": "&&\n ", - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from the user if desired - self._areaList = self.getAreaList(argDict) - - # Determine time ranges for which the data will be sampled - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - for editArea, areaLabel in self._areaList: - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Determine whether Morning or Afternoon product type - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - # Make argDict accessible - self.__argDict = argDict - - # Set up any other product-specific variables from the Definition - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - self._currentRegion = None - - # The analysisList tells which weather elements and statistics - # are desired for the product. - self._analysisList = self._getAnalysisList() - - def _determineTimeRanges(self, argDict): - # Determine time ranges for product - # Sets up self._timeRangeList - - try: - byTimeRange = argDict["byTimeRange"] - except: - byTimeRange = 0 - - if byTimeRange: - timeRange = argDict["timeRange"] - day = self.getPeriod(timeRange) - if day == self.DAYTIME(): - self._productIssuance = "Morning" - else: - self._productIssuance = "Afternoon" - # Force singleValueFormat - self._singleValueFormat = 1 - else: - timeRange = None - self._timeRangeList = self.getMultipleElementTableRanges( - self._productIssuance, self._singleValueFormat, timeRange) - return - - def _sampleData(self, argDict): - # Sample the data - self._sampler = self.getSampler(argDict, - (self._analysisList, self._timeRangeList, self._areaList)) - return - - def _preProcessProduct(self, fcst, argDict): - # Set up format spacing and title line spacing - - numElements = len(self._elementList) - if numElements > 2: - self._spaceStr = "" - else: - self._spaceStr = " " - if self._includeTitle == 0: - return fcst - - if self._singleValueFormat == 1: - self._titles = self._titleDict()["SingleValue"] - self._headingLen = 5 - else: - self._titles = self._titleDict()["MultipleValue"] - if numElements > 2: - if self._productIssuance == "Morning": - self._headingLen = 15 - else: - self._headingLen = 19 - else: - if self._productIssuance == "Morning": - self._headingLen = 21 - else: - self._headingLen = 28 - - # Create title line - title = self._introLetters + " " - if self._singleValueFormat == 1: - title = title + " " - index = 0 - for element in self._elementList: - title = title + string.center( - self._titles[element], self._headingLen) - if index < len(self._elementList)-1: - title = title + "/" - index += 1 - return fcst + title + "\n" - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # If we are in a new region, add region header - for region, areaList in self._regionList: - if areaLabel in areaList: - break - if region != self._currentRegion: - if self._currentRegion is not None: - # End the Region - fcst = fcst + "\n$$\n\n" - self._currentRegion = region - fcst = fcst + region - - return fcst + "\n" + string.ljust(areaLabel, 10) - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - # Get the Statistics - statList = self.getStatList(self._sampler, self._analysisList, - self._timeRangeList, editArea) - - numElements = len(self._elementList) - index = 0 - for element in self._elementList: - exec "fcst = fcst + self._get" + element + "Values(statList, argDict)" - if index < numElements-1 and self._singleValueFormat == 0: - fcst = fcst + " /" - index +=1 - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst - - def _postProcessProduct(self, fcst, argDict): - fcst = fcst + "\n" - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _getAnalysisList(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ("MinRH", self.avg), - ("MaxRH", self.avg), - ("PoP", self.stdDevMaxAvg), - ] - - def _titleDict(self): - return { - "MultipleValue": { - "Temp": "TEMPERATURE", - "PoP": "PRECIPITATION", - "Humidity":"HUMIDITY", - }, - "SingleValue": - { - "Temp": "TEMP", - "PoP": "POP", - "Humidity":"HUM", - } - } - - def _getTempValues(self, statList, argDict): - # Return a string of Temperature values given statList - stats1 = statList[0] - if self._productIssuance == "Morning": - if self._singleValueFormat == 0: - stats2 = statList[1] - stats3 = statList[2] - t1 = self.getScalarVal(stats1["MaxT"]) - t2 = self.getScalarVal(stats2["MinT"]) - t3 = self.getScalarVal(stats3["MaxT"]) - str = " " + t1+ self._spaceStr +t2+ self._spaceStr +t3 - return str - else: - return self.getScalarVal(stats1["MaxT"]) + " " - else: - if self._singleValueFormat == 0: - stats2 = statList[1] - stats3 = statList[2] - stats4 = statList[3] - t1 = self.getScalarVal(stats1["MinT"]) - t2 = self.getScalarVal(stats2["MaxT"]) - t3 = self.getScalarVal(stats3["MinT"]) - t4 = self.getScalarVal(stats4["MaxT"]) - str = " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr+t4 - return str - else: - return self.getScalarVal(stats1["MinT"]) + " " - - def _getHumidityValues(self, statList, argDict): - # Return a string of Humidity values given statList - stats1 = statList[0] - if self._productIssuance == "Morning": - if self._singleValueFormat == 0: - stats2 = statList[1] - stats3 = statList[2] - t1 = self.getScalarVal(stats1["MinRH"]) - t2 = self.getScalarVal(stats2["MaxRH"]) - t3 = self.getScalarVal(stats3["MinRH"]) - return " " +t1+ self._spaceStr +t2+ self._spaceStr+t3 - else: - return self.getScalarVal(stats1["MinRH"]) + " " - else: - if self._singleValueFormat == 0: - stats2 = statList[1] - stats3 = statList[2] - stats4 = statList[3] - t1 = self.getScalarVal(stats1["MaxRH"]) - t2 = self.getScalarVal(stats2["MinRH"]) - t3 = self.getScalarVal(stats3["MaxRH"]) - t4 = self.getScalarVal(stats4["MinRH"]) - return " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr +t4 - else: - return self.getScalarVal(stats1["MaxRH"]) + " " - - def _getPoPValues(self, statList, argDict): - # Return a string of PoP values in the statList - pop = [] - popStr = "" - if self._singleValueFormat == 0: - index = 0 - for stats in statList: - val = self._getPoPValue(stats) - if index < len(statList)-1: - popStr = popStr + val + self._spaceStr - else: - popStr = popStr + val - index += 1 - popStr = popStr + " " - else: - popStr = self._getPoPValue(statList[0]) + " " - return popStr - - def _getPoPValue(self, stats): - pop = self.getStats(stats,"PoP") - if pop is None: - val = " " - else: - max = self.round(pop, "Nearest", 10) - val = self.getScalarVal(max) - return val +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +#------------------------------------------------------------------------- +# Description: This product creates a Multiple Element Table. +# The possible elements are Temperature (MaxT, MinT), Humidity (MinRH, MaxRH), and PoP +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# MultipleElementTableTable, MultipleElementTable_Local, MultipleElementTable_Aux_Local +#------------------------------------------------------------------------- +# User Configurable Variables: +#------------------------------------------------------------------------- +# Weather Elements Needed: +#------------------------------------------------------------------------- +# Edit Areas Needed: +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +#------------------------------------------------------------------------- +# Component Products: +#------------------------------------------------------------------------- +# Programmers and Support including product team leader's email: +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +#------------------------------------------------------------------------- +# Additional Information: +#------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# GFE Training Guide->GFE Text Products User Guide section of the GFE Online +# Help for guidance on creating a new text product. +## + +import TextRules +import SampleAnalysis +import string, time, types + + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Forecast Product", "productIssuance") , "Morning", "radio", + ["Morning","Afternoon"]), + ] + + Definition = { + "type": "smart", + "displayName": "None", + "outputFile": "/awips/GFESuite/products/TEXT/MultipleElementTable.txt", + "defaultEditAreas": [ + ("area1","AREA 1"), + ("area2","AREA 2"), + ("area3","AREA 3"), + ], + # Product-specific variables + "regionList" : [ + ("/33",["AREA 1","AREA 2"]), + ("/19",["AREA 3"]) + ], + # Possible elements are: + # "Temp" -- lists MaxT for daytime, MinT for nighttime + # "PoP" + # "Humidity" -- lists MinRH for daytime, MaxRH for nighttime + "elementList" : ["Temp", "PoP"], + # If set to 1, only one value for each element is listed + "singleValueFormat": 0, # Default is 0 + "includeTitle": 1, + "introLetters": "&&\n ", + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from the user if desired + self._areaList = self.getAreaList(argDict) + + # Determine time ranges for which the data will be sampled + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + for editArea, areaLabel in self._areaList: + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Determine whether Morning or Afternoon product type + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + # Make argDict accessible + self.__argDict = argDict + + # Set up any other product-specific variables from the Definition + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + self._currentRegion = None + + # The analysisList tells which weather elements and statistics + # are desired for the product. + self._analysisList = self._getAnalysisList() + + def _determineTimeRanges(self, argDict): + # Determine time ranges for product + # Sets up self._timeRangeList + + try: + byTimeRange = argDict["byTimeRange"] + except: + byTimeRange = 0 + + if byTimeRange: + timeRange = argDict["timeRange"] + day = self.getPeriod(timeRange) + if day == self.DAYTIME(): + self._productIssuance = "Morning" + else: + self._productIssuance = "Afternoon" + # Force singleValueFormat + self._singleValueFormat = 1 + else: + timeRange = None + self._timeRangeList = self.getMultipleElementTableRanges( + self._productIssuance, self._singleValueFormat, timeRange) + return + + def _sampleData(self, argDict): + # Sample the data + self._sampler = self.getSampler(argDict, + (self._analysisList, self._timeRangeList, self._areaList)) + return + + def _preProcessProduct(self, fcst, argDict): + # Set up format spacing and title line spacing + + numElements = len(self._elementList) + if numElements > 2: + self._spaceStr = "" + else: + self._spaceStr = " " + if self._includeTitle == 0: + return fcst + + if self._singleValueFormat == 1: + self._titles = self._titleDict()["SingleValue"] + self._headingLen = 5 + else: + self._titles = self._titleDict()["MultipleValue"] + if numElements > 2: + if self._productIssuance == "Morning": + self._headingLen = 15 + else: + self._headingLen = 19 + else: + if self._productIssuance == "Morning": + self._headingLen = 21 + else: + self._headingLen = 28 + + # Create title line + title = self._introLetters + " " + if self._singleValueFormat == 1: + title = title + " " + index = 0 + for element in self._elementList: + title = title + string.center( + self._titles[element], self._headingLen) + if index < len(self._elementList)-1: + title = title + "/" + index += 1 + return fcst + title + "\n" + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # If we are in a new region, add region header + for region, areaList in self._regionList: + if areaLabel in areaList: + break + if region != self._currentRegion: + if self._currentRegion is not None: + # End the Region + fcst = fcst + "\n$$\n\n" + self._currentRegion = region + fcst = fcst + region + + return fcst + "\n" + string.ljust(areaLabel, 10) + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + # Get the Statistics + statList = self.getStatList(self._sampler, self._analysisList, + self._timeRangeList, editArea) + + numElements = len(self._elementList) + index = 0 + for element in self._elementList: + exec("fcst = fcst + self._get" + element + "Values(statList, argDict)") + if index < numElements-1 and self._singleValueFormat == 0: + fcst = fcst + " /" + index +=1 + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + + def _postProcessProduct(self, fcst, argDict): + fcst = fcst + "\n" + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _getAnalysisList(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ("MinRH", self.avg), + ("MaxRH", self.avg), + ("PoP", self.stdDevMaxAvg), + ] + + def _titleDict(self): + return { + "MultipleValue": { + "Temp": "TEMPERATURE", + "PoP": "PRECIPITATION", + "Humidity":"HUMIDITY", + }, + "SingleValue": + { + "Temp": "TEMP", + "PoP": "POP", + "Humidity":"HUM", + } + } + + def _getTempValues(self, statList, argDict): + # Return a string of Temperature values given statList + stats1 = statList[0] + if self._productIssuance == "Morning": + if self._singleValueFormat == 0: + stats2 = statList[1] + stats3 = statList[2] + t1 = self.getScalarVal(stats1["MaxT"]) + t2 = self.getScalarVal(stats2["MinT"]) + t3 = self.getScalarVal(stats3["MaxT"]) + str = " " + t1+ self._spaceStr +t2+ self._spaceStr +t3 + return str + else: + return self.getScalarVal(stats1["MaxT"]) + " " + else: + if self._singleValueFormat == 0: + stats2 = statList[1] + stats3 = statList[2] + stats4 = statList[3] + t1 = self.getScalarVal(stats1["MinT"]) + t2 = self.getScalarVal(stats2["MaxT"]) + t3 = self.getScalarVal(stats3["MinT"]) + t4 = self.getScalarVal(stats4["MaxT"]) + str = " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr+t4 + return str + else: + return self.getScalarVal(stats1["MinT"]) + " " + + def _getHumidityValues(self, statList, argDict): + # Return a string of Humidity values given statList + stats1 = statList[0] + if self._productIssuance == "Morning": + if self._singleValueFormat == 0: + stats2 = statList[1] + stats3 = statList[2] + t1 = self.getScalarVal(stats1["MinRH"]) + t2 = self.getScalarVal(stats2["MaxRH"]) + t3 = self.getScalarVal(stats3["MinRH"]) + return " " +t1+ self._spaceStr +t2+ self._spaceStr+t3 + else: + return self.getScalarVal(stats1["MinRH"]) + " " + else: + if self._singleValueFormat == 0: + stats2 = statList[1] + stats3 = statList[2] + stats4 = statList[3] + t1 = self.getScalarVal(stats1["MaxRH"]) + t2 = self.getScalarVal(stats2["MinRH"]) + t3 = self.getScalarVal(stats3["MaxRH"]) + t4 = self.getScalarVal(stats4["MinRH"]) + return " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr +t4 + else: + return self.getScalarVal(stats1["MaxRH"]) + " " + + def _getPoPValues(self, statList, argDict): + # Return a string of PoP values in the statList + pop = [] + popStr = "" + if self._singleValueFormat == 0: + index = 0 + for stats in statList: + val = self._getPoPValue(stats) + if index < len(statList)-1: + popStr = popStr + val + self._spaceStr + else: + popStr = popStr + val + index += 1 + popStr = popStr + " " + else: + popStr = self._getPoPValue(statList[0]) + " " + return popStr + + def _getPoPValue(self, stats): + pop = self.getStats(stats,"PoP") + if pop is None: + val = " " + else: + max = self.round(pop, "Nearest", 10) + val = self.getScalarVal(max) + return val diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable_Aux_Local.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable_Aux_Local.py index c842a324d9..ac96f72379 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable_Aux_Local.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textProducts/MultipleElementTable_Aux_Local.py @@ -1,81 +1,81 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -######################################################################## -# MultipleElementTable_Aux_Local -# This routine is to be used in conjunction with the FWF and AreaFcst -# products -# -# Type: smart -# Local product: -# MultipleElementTable_Aux_Local(type: smart) -# To customize this product for your site: -# Set up MultipleElementTable_Aux_Local (see template below) -# to override variables, definitions, thresholds, and methods -## -########################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import MultipleElementTable -import string, time, re, os, types, copy - -class TextProduct(MultipleElementTable.TextProduct): - Definition = copy.deepcopy(MultipleElementTable.TextProduct.Definition) - Definition["displayName"] = "None" - Definition["regionList"] = [] - - def __init__(self): - MultipleElementTable.TextProduct.__init__(self) - - def _getVariables(self, argDict): - # Determine whether Morning or Afternoon product type - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - # Set up any other product-specific variables from the Definition - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - self._elementList = argDict["elementList"] - self._singleValueFormat = argDict["singleValueFormat"] - self._includeTitle = argDict["includeTitle"] - self._currentRegion = None - - # The analysisList tells which weather elements and statistics - # are desired for the product. - self._analysisList = self._getAnalysisList() - - # Headers and Footers to override - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area - #print "Generating Forecast for", areaLabel - header = "\n" + string.ljust(areaLabel, 10) - return fcst + header - - def _postProcessProduct(self, fcst, argDict): - return fcst +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +######################################################################## +# MultipleElementTable_Aux_Local +# This routine is to be used in conjunction with the FWF and AreaFcst +# products +# +# Type: smart +# Local product: +# MultipleElementTable_Aux_Local(type: smart) +# To customize this product for your site: +# Set up MultipleElementTable_Aux_Local (see template below) +# to override variables, definitions, thresholds, and methods +## +########################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import MultipleElementTable +import string, time, re, os, types, copy + +class TextProduct(MultipleElementTable.TextProduct): + Definition = copy.deepcopy(MultipleElementTable.TextProduct.Definition) + Definition["displayName"] = "None" + Definition["regionList"] = [] + + def __init__(self): + MultipleElementTable.TextProduct.__init__(self) + + def _getVariables(self, argDict): + # Determine whether Morning or Afternoon product type + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + # Set up any other product-specific variables from the Definition + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + self._elementList = argDict["elementList"] + self._singleValueFormat = argDict["singleValueFormat"] + self._includeTitle = argDict["includeTitle"] + self._currentRegion = None + + # The analysisList tells which weather elements and statistics + # are desired for the product. + self._analysisList = self._getAnalysisList() + + # Headers and Footers to override + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area + #print "Generating Forecast for", areaLabel + header = "\n" + string.ljust(areaLabel, 10) + return fcst + header + + def _postProcessProduct(self, fcst, argDict): + return fcst diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/Analysis.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/Analysis.py index 83b8d77368..fc30a4bbb5 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/Analysis.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/Analysis.py @@ -1,97 +1,97 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Analysis.py -# Class for Analysis of grid data producing summary statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, types - -from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID - -class Analysis: - def __init__(self, histoSampler): - self._histoSampler = histoSampler - if type(histoSampler) is types.DictionaryType: - try: - sampler = histoSampler[histoSampler.keys()[0]] - except IndexError: - sampler = None - else: - sampler = histoSampler - self.__parmID = sampler.getParmID() - if self.__parmID.equals(ParmID()): - self.__parmID = None - - def getParmID(self, parmNameAndLevel, databaseID): - index = string.find(parmNameAndLevel, "_") - if index == -1: - name = parmNameAndLevel - level = "SFC" - else: - name = parmNameAndLevel[0:index] - level = parmNameAndLevel[index+1:] - return ParmID(name, databaseID, level) - - def createStats(self, analysisDef, referenceID, timeRange, component): - " Create the Statistic dictionary for the forecast " - statDict = {} - - sampler = self._histoSampler - for analysis in analysisDef: - if len(analysis) == 2: - parmNameAndLevel, method = analysis - args = None - else: - parmNameAndLevel, method, args = analysis - dbID = self.__parmID.getDbId() - parmID = self.getParmID(parmNameAndLevel, dbID) - if type(self._histoSampler) is types.DictionaryType: - sampler = self._histoSampler[parmNameAndLevel] - if sampler is not None: - parmHisto = sampler.getParmHisto(parmID, referenceID, timeRange.toJavaObj()) - statName = parmNameAndLevel + "__" + method.__name__ - if parmHisto.getSampleLen() == 0 or sampler is None: - stats = None - else: - if args is not None: - stats = method(parmHisto, timeRange, component, args) - if len(args) > 0 and type(args[0]) is types.IntType: - statName = statName + "_" + `args[0]` - else: - stats = method(parmHisto, timeRange, component) - if parmNameAndLevel not in statDict.keys(): - statDict[parmNameAndLevel] = stats - statDict[statName] = stats - - return statDict - -if __name__ == '__main__': - print "Cannot run stand-alone" +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Analysis.py +# Class for Analysis of grid data producing summary statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, types + +from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + +class Analysis: + def __init__(self, histoSampler): + self._histoSampler = histoSampler + if type(histoSampler) is dict: + try: + sampler = histoSampler[list(histoSampler.keys())[0]] + except IndexError: + sampler = None + else: + sampler = histoSampler + self.__parmID = sampler.getParmID() + if self.__parmID.equals(ParmID()): + self.__parmID = None + + def getParmID(self, parmNameAndLevel, databaseID): + index = string.find(parmNameAndLevel, "_") + if index == -1: + name = parmNameAndLevel + level = "SFC" + else: + name = parmNameAndLevel[0:index] + level = parmNameAndLevel[index+1:] + return ParmID(name, databaseID, level) + + def createStats(self, analysisDef, referenceID, timeRange, component): + " Create the Statistic dictionary for the forecast " + statDict = {} + + sampler = self._histoSampler + for analysis in analysisDef: + if len(analysis) == 2: + parmNameAndLevel, method = analysis + args = None + else: + parmNameAndLevel, method, args = analysis + dbID = self.__parmID.getDbId() + parmID = self.getParmID(parmNameAndLevel, dbID) + if type(self._histoSampler) is dict: + sampler = self._histoSampler[parmNameAndLevel] + if sampler is not None: + parmHisto = sampler.getParmHisto(parmID, referenceID, timeRange.toJavaObj()) + statName = parmNameAndLevel + "__" + method.__name__ + if parmHisto.getSampleLen() == 0 or sampler is None: + stats = None + else: + if args is not None: + stats = method(parmHisto, timeRange, component, args) + if len(args) > 0 and type(args[0]) is int: + statName = statName + "_" + repr(args[0]) + else: + stats = method(parmHisto, timeRange, component) + if parmNameAndLevel not in list(statDict.keys()): + statDict[parmNameAndLevel] = stats + statDict[statName] = stats + + return statDict + +if __name__ == '__main__': + print("Cannot run stand-alone") diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastNarrative.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastNarrative.py index 4a6a8f37b3..f45c6f3d92 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastNarrative.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastNarrative.py @@ -1,1667 +1,1667 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ForecastNarrative.py -# -# Forecast type: "narrative" -# Class for processing Narrative Forecasts -# -# Author: hansen -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import time, types -import TextRules -import SampleAnalysis -import Translator -import logging -import AbsTime -import TimeRange -from com.raytheon.uf.common.time import TimeRange as JavaTimeRange -from com.raytheon.viz.gfe.sampler import HistoSampler -from com.raytheon.viz.gfe.sampler import SamplerRequest -from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID -from java.util import ArrayList -import copy -import traceback -import JUtil - -class Node: - def __init__(self, childList, methodList): - self.childList = childList - self.methodList = methodList - self.parent = None - # Make tree bi-directional - for child in childList: - child.parent = self - # Keep track of changes made to this node - self.changeFlag = 0 - # Keep track of methods that are done - self.doneList = [] - def getIndex(self): - # If this node is a child, - # return it's index in the childList of the parent - try: - return self.parent.childList.index(self) - except: - return None - def getParent(self): - return self.parent - def getComponent(self): - # Return this node's ancestor at the second level in the tree - prevNode = None - node = self - while node.getParent() is not None: - prevNode = node - node = node.getParent() - return prevNode - def getComponentName(self): - node = self - compName = node.get("componentName") - if compName is not None: - return compName - else: - comp = node.getComponent() - if comp is not None: - return comp.get("name") - else: - return None - def getNext(self): - if self.parent is not None: - index = self.getIndex() - childList = self.parent.childList - if len(childList) > index+1: - return childList[index+1] - def getPrev(self): - if self.parent is not None: - index = self.getIndex() - childList = self.parent.childList - if index > 0: - return childList[index-1] - def set(self, member, value): - #print " Setting", member, - if hasattr(self, member): - current = getattr(self, member) - #print "current/value", current, value - if current == value: - #print " No Change" - return - setattr(self, member, value) - self.changeFlag = 1 - #print " Changed" - def get(self, member, default=None): - if hasattr(self, member): - return getattr(self, member) - else: - return default - def printNode(self, node, indentStr=""): - print "Node", node - print indentStr + " Methods" - for method in node.methodList: - if method in node.doneList: - done = "DONE" - else: - done = "" - print indentStr + " ", method.__name__, done - print indentStr + " Attributes" - dict = node.__dict__ - for key in dict: - if key == "methodList" or key == "doneList": - continue - print indentStr + " ", key, dict[key] - print indentStr + " Children ", len(node.childList) - for child in node.childList: - self.printNode(child, indentStr + " ") - def copy(self): - newNode = Node([], []) - dict = self.__dict__ - for key in dict: - newNode.set(key, self.get(key)) - return newNode - def insertChild(self, sibling, newChild, newFirst=0): - # Insert the newChild - # If newFirst, insert newChild before sibling, - # else afterward. - newChild.parent = self - new = [] - for child in self.childList: - if child == sibling: - if newFirst: - new.append(newChild) - new.append(child) - else: - new.append(child) - new.append(newChild) - else: - new.append(child) - self.childList = new - def remove(self): - # Remove this node from it's parent child list - parent = self.parent - new = [] - for child in parent.childList: - if child != self: - new.append(child) - parent.childList = new - # Set the attribute for removing the child - setattr(self, "removed", 1) - def findChild(self, attr, value): - # Find the child of this node with the given attribute - # of the given value - for child in self.childList: - if child.get(attr) == value: - return child - def getProgeny(self): - # Return a list of all progeny of this node - progeny = self.childList - for child in self.childList: - childProgeny = child.getProgeny() - if childProgeny is not None: - progeny = progeny + child.getProgeny() - return progeny - def replace(self, nodeList): - # Replace the current child node with the node list. - # If top of tree, does nothing. - childList = self.parent.childList - newList = [] - for child in childList: - if child == self: - newList = newList + nodeList - else: - newList.append(child) - self.parent.childList = newList - # Remove any children of current node - self.childList = [] - # Make this node defunct - self.doneList = self.methodList - def getTimeRange(self): - if hasattr(self, "timeRange"): - return self.timeRange - # Look for an ancestor that has a timeRange associated with it - if self.parent is not None: - return self.parent.getTimeRange() - return None - def getStatDict(self): - # Assume we are a subPhrase - if hasattr(self, "statDict"): - statDict = self.statDict - disabledElements = self.getAncestor("disabledElements") - if disabledElements is not None: - for key in statDict.keys(): - for element in self.parent.disabledElements: - if key == element: - statDict[element] = None - disabledSubkeys = self.getAncestor("disabledSubkeys") - #print "disabledSubkey", disabledSubkeys - if disabledSubkeys is not None: - disabledWxTypes = [] - for disabledSubkey in disabledSubkeys: - disabledWxTypes.append(disabledSubkey.wxType()) - for key in statDict.keys(): - if key == "Wx": - subkeys = statDict[key] - newList = [] - for subkey in subkeys: - # Need to handle both "dominantWx" and - # "rankedWx" analysis - appendVal = subkey - if type(subkey) is types.TupleType: - subkey, rank = subkey - if subkey not in disabledSubkeys \ - and subkey.wxType() not in disabledWxTypes: - newList.append(appendVal) - statDict[key] = newList - return statDict - else: - return None - def getAreaLabel(self): - if hasattr(self, "areaLabel"): - return self.areaLabel - # Look for an ancestor that has an areaLabel associated with it - if self.parent is not None: - return self.parent.getAreaLabel() - return None - def getAncestor(self, attr): - if hasattr(self, attr): - return getattr(self, attr) - # Look for an ancestor that has the given attribute associated with it - if self.parent is not None: - return self.parent.getAncestor(attr) - return None - def setAncestor(self, attr, value): - if hasattr(self, attr): - setattr(self, attr, value) - return None - # Look for an ancestor that has the given attribute associated with it - if self.parent is not None: - return self.parent.setAncestor(attr, value) - return None - def getDescendent(self, attr): - if hasattr(self, attr): - return getattr(self, attr) - # Look for the first descendent that has the given attribute associated with it - for child in self.childList: - value = child.getDescendent(attr) - if value is not None: - return value - return None - -class Narrative(Node, TextRules.TextRules): - # This is the root of the tree and, as such, has some special methods - # and data members - def __init__(self, methodList, componentList, statisticsDictionary, - issuanceInfo, library, histoSampler): - self.stats = statisticsDictionary - # Access to inherited methods - self.library = library - # A histoSampler for access to Topo - self.histoSampler = histoSampler - self.issuanceInfo = issuanceInfo - - # This is the root of the tree - Node.__init__(self, componentList, methodList) - TextRules.TextRules.__init__(self) - - def printTree(self): - print "\n\nNarrative Tree\n" - self.printNode(self, "") - def getTopoHisto(self, areaLabel): - editArea = self.library.findEditArea(None, areaLabel) - return self.get("histoSampler").getTopoHisto(editArea.getId()) - def makeNode(self, children, methods, parent=None): - node = Node(children, methods) - node.parent = parent - return node - def statisticsDictionary(self): - return self.statisticsDictionary.dictionary() - def getDataType(self, element): - return self.library.getDataType(element) - def getLimits(self, element): - return self.library.getLimits(element) - def makeComponent(self, name, timeRange, definition): - return self.library.makeComponent(name, timeRange, definition) - def makePhrase(self, phraseDef): - return self.library.makePhrase(phraseDef) - def copyPhrase(self, node, timeRange=None, areaLabel=None, parent=None, - copyAttrs=[]): - phraseDef = node.get("phraseDef") - newNode = self.library.makePhrase(phraseDef) - # copy attributes from original node - for attr in copyAttrs: - newVal = node.get(attr) - if type(newVal) is types.ListType: - newList = [] - for item in newVal: - newList.append(item) - newVal = newList - newNode.set(attr, newVal) - if areaLabel is None: - areaLabel = node.getAreaLabel() - newNode.set("areaLabel", areaLabel) - if timeRange is None: - timeRange = node.getTimeRange() - newNode.set("timeRange", timeRange) - if parent is None: - parent = node.parent - newNode.parent = parent - # Preserve attributes - newNode.set("args", node.get("args")) - return newNode - - def addPhrase(self, prevPhrase, timeRange=None, areaLabel=None): - # Make the new phrase follow given phrase - newPhrase = self.copyPhrase(prevPhrase, timeRange, areaLabel) - parent = prevPhrase.parent - parent.insertChild(prevPhrase, newPhrase) - return newPhrase - def addPhraseDef(self, prevPhrase, phraseDef, timeRange=None, areaLabel=None): - # Make the new phrase follow given prevPhrase using the given phraseDef - newPhrase = self.library.makePhrase(phraseDef) - if areaLabel is None: - areaLabel = prevPhrase.getAreaLabel() - newPhrase.set("areaLabel", areaLabel) - if timeRange is None: - timeRange = prevPhrase.getTimeRange() - newPhrase.set("timeRange", timeRange) - parent = prevPhrase.parent - newPhrase.parent = parent - parent.insertChild(prevPhrase, newPhrase) - return newPhrase - -class StatisticsDictionary(TextRules.TextRules): - def __init__(self, dictionary, library): - # Dictionary is a multi-level dictionary storing statistics - self.dictionary = dictionary - self.library = library - TextRules.TextRules.__init__(self) - def set(self, element, areaLabel, timeRange, statLabel, value): - # Set the dictionary value according to keyOrder - # E.g. dict[element][areaLabel][timeRange][statLabel] = value - keyOrder = ["element", "areaLabel", "timeRange", "statLabel"] - dict = self.dictionary - execStr = "dict" - index = 0 - lastIndex = len(self.keyOrder)-1 - for keyName in keyOrder: - execStr = execStr + "["+keyName+"]" - if index == lastIndex: - exec execStr + "= value" - else: - # Make sure there is at least an empty dictionary - # for this keyName - try: - exec "result = " + execStr - except: - exec execStr + "= {}" - index = index + 1 - def get(self, element, timeRange, areaLabel=None, statLabel="", mergeMethod="List", - intersectWith=None): - if areaLabel is None: - areaLabel = self.areaLabel - if intersectWith is not None: - areaLabel = self.library.getIntersectName(intersectWith, areaLabel) - dictionary = self.dictionary - dataType = self.library.getDataType(element) - #print "Getting stats", element, mergeMethod, timeRange, areaLabel, statLabel - - # Get the raw value (could be simple value OR statsByRange) - try: - # See if there is an exact match entry - value = dictionary[element][areaLabel][timeRange][statLabel] - except: - # Gather statsByRange for anything overlapping the timeRange - value = [] - try: - dict = dictionary[element][areaLabel] - except: - return None - if statLabel != "": - statLabel = element + "__" + statLabel - matchFound = 0 - #if element == "Wx": - # print "\n\nstatLabel", statLabel, dict.keys() - for subRange in dict.keys(): - statDict = dict[subRange] - #if element == "Wx": - # print "statDict keys", statDict.keys() - if statLabel in statDict.keys(): - if subRange.overlaps(timeRange): - # If subRange covers the timeRange, treat as exact match - subValue = statDict[statLabel] - if subRange.contains(timeRange): - value = subValue - matchFound = 1 - break - if self.library.isStatsByRange(dataType, subValue): - for subStats, range in subValue: - #print "appending", subStats, range - value.append((subStats, range)) - else: - #print "appending2", subValue, subRange - value.append((subValue, subRange)) - - # IF we have "glued" together stats from timeRanges - # overlapping the time range in question, - # then we have to eliminate duplicates and - # make sure the resulting statsByRange are - # in chronological order. - if matchFound == 0:# and mergeMethod == "List": - # Make sure the subRanges are in order - if len(value) > 0: - temp = [] - #print "before sort", timeRange - for stats, subRange in value: - #print stats, subRange - temp.append((subRange.startTime(), (stats, subRange))) - temp.sort() - value = [] - #print "after sort" - lastRange = None - for t in temp: - stats, subRange = t[1] - if lastRange is not None and subRange == lastRange: - continue - lastRange = subRange - #print t[1] - value.append(t[1]) - - # Apply mergeMethod to the value -## if areaLabel == "_OffShoreArea_intersect_Region_1" or areaLabel == "Region 1": -## if element == "MaxT" and timeRange.duration > 24*3600: -## print "\n Area", areaLabel, timeRange -## print " Merging", value - value = self.library.getMergedStats( - value, mergeMethod, element, timeRange, areaLabel, dataType) - -## if element == "MaxT" and timeRange.duration > 24*3600: -## print " returning", value - return value - - def printDictionary(self, element=None): - if element is None: - print "\n\nStatistics Dictionary\n" - self.printDict(self.dictionary, "") - else: - try: - print "\n\nStatistics Dictionary for "+element+"\n" - self.printDict(self.dictionary[element], "") - except: - pass - def printDict(self, dictionary, indentStr): - for key in dictionary.keys(): - value = dictionary[key] - if type(key) is types.StringType and key == "": - key = "EmptyString" - print indentStr, key - if type(value) is types.DictionaryType: - self.printDict(value, indentStr + " ") - else: - print indentStr, indentStr, value - - -class ForecastNarrative(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - # This class processes a narrative-type text product. - # To use this class, set up a NarrativeDefinition indicating which - # components and consecutive time periods are to be in the narrative. - # After instantiating the class, call getNarrativeData to do all the - # sampling and analysis for the narrative. - # Then, for each edit area, call generateForecast. - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - self.__gpiCache = {} - self.log = logging.getLogger("FormatterRunner.ForecastNarrative.ForecastNarrative") - - def getNarrativeData(self, argDict, narrativeDefinition, timeRange, areaList, issuanceInfo): - # Assemble the Tree for the Narrative - # Samples and Analyzes all data for the Narrative - # Assemble the StatisticsDictionary - # If successful, returns None - # Otherwise returns a text string indicating the problem - - # General set up - self.__ut = argDict["utility"] - self.__narrativeDefinition = narrativeDefinition - self.__timeRange = timeRange - self.__areaList = areaList - self.__priorPeriod = self.__ut.set(narrativeDefinition, "priorPeriod", None) - self.__issuanceInfo = issuanceInfo - self.__currentLocalTime, self.__shift = self.determineTimeShift() - self.__lineLength = argDict["lineLength"] - self.__combinations = argDict["combinations"] - self.__ifpClient = argDict["ifpClient"] - self.__argDict = argDict - sampleFromServer = self.__ut.set(narrativeDefinition, "sampleFromServer", 0) - try: - self.__productTR = argDict["productTimeRange"] - #print "Set from argDict" - except: - self.__productTR = timeRange - #print "Setting productTR", self.__productTR - - # Caching for ReferenceData and ParmID's - self.__areaCache = {} - areaList = [] - for editArea, areaLabel in self.__areaList: - if type(editArea) is types.StringType: - #print "Get Edit Area FN: getting edit area 1", editArea - editArea = self.getEditArea(editArea, argDict) - self.__areaCache[areaLabel] = editArea - areaList.append((editArea, areaLabel)) - self.__areaList = areaList - self.__elementCache = {} - self.__databaseID = argDict["databaseID"] - - # Break out the Definition into components and sampling information - # sets up self.__compList, self.__samplerRequests, self.__statisticsDict - #print "\nNarrative Def", narrativeDefinition - error = self.__breakOutDefinition(argDict, narrativeDefinition, timeRange, areaList) - if error is not None: - return error - #for component, timeRange, definition in self.__compList: - # print component, timeRange - - # Call the Samplers - # Get sample dictionary - # Sets up samplerDict: WeatherElement: histoSampler - time1 = time.time() - if sampleFromServer: - self.progressMessage(.3, 80, "Sampling Data from Server -- please wait...") - self.__sampler = argDict["ifpClient"].sampleRequest(self.__samplerRequests) - else: - self.progressMessage(.3, 80, "Sampling Data -- please wait...") - self.__sampler = HistoSampler(argDict["ifpClient"].getJavaClient(), self.__samplerRequests) - print "Time to Sample Data", time.time()-time1 - #if error is not None: - # return error - #print "Sampler", self.__sampler - - # Call SampleAnalysis methods to complete statisticsDict - # and instantiate the StatisticsDictionary class - # Sets up self.__statisticsDict - time1 = time.time() - self.progressMessage(.6, 80, "Analyzing Data -- please wait...") - error = self.__createStatisticsDictionary(argDict) - print "Time to Get Statistics", time.time()-time1 - #error = self.__createStatisticsDictionary0(argDict) - if error is not None: - return error - return None - - def ut(self): - return self.__ut - - def sampler(self): - return self.__sampler - - def statisticsDictionary(self): - return self.__statisticsDictionary - - def issuanceInfo(self): - return self.__issuanceInfo - - def generateForecast(self, argDict, editArea=None, areaLabel=None): - self.__createNarrativeTree(argDict) - if editArea is None: - editArea, areaLabel = argDict["editArea"] - if type(editArea) is types.StringType: - editArea = self.getEditArea(editArea, argDict) - # Otherwise, set argDict - else: - argDict["editArea"] = (editArea, areaLabel) - self.__narrativeTree.editArea = editArea - self.__narrativeTree.areaLabel = areaLabel - self.__narrativeTree.changeFlag = 0 - self.__narrativeTree.stats.areaLabel = areaLabel - self.__trace = self.__narrativeTree.get("trace") - self.__infiniteTrace = 0 - argDict["combinations"] = self.__narrativeTree.get("combinations") - #self.__trace = 1 - self.__narrativeTree.lastChance = 0 - self.__narrativeTree.fixedIt = 0 - argDict["tree"] = self.__narrativeTree - changesMade = 1 - time1 = time.time() - passes = 0 - while changesMade: - if self.__trace: print "\n####### Pass %d #####\n" % (passes + 1) - - # Tree traversal - changesMade = self.traverseTree(self.__narrativeTree) - if self.__trace: print "\n\nCHANGES IN PASS", changesMade - #changesMade = 1 - passes = passes + 1 - self.__narrativeTree.passes = passes - - # Error recovery: - # - # Check for infinite loop - # There are still changes being made to the tree, - # but we're in an infinite loop - # Check for no changes and empty words - # There are no more changes to the tree, - # but the words have not completed - # If either of these are true: - # --Do a "lastChance" pass to alert methods to - # finish if possible. - # --If still not done, - # Do a "FixIt" pass to fill in leaf - # nodes that have not finished and - # continue execution. - - if passes > self.__narrativeTree.get("passLimit") or \ - (changesMade == 0 and self.__narrativeTree.get('words') is None): - if not self.__narrativeTree.lastChance: - # Do last chance pass - print "\nDoing Last Chance Pass" - self.__narrativeTree.lastChance = 1 - changesMade = 1 - else: - # We already did a lastChance pass - if not self.__narrativeTree.fixedIt: - # Fix it, re-set passes and continue - print "\nDoing Fix it Pass" - changesMade = 1 - self.__narrativeTree.passes = 0 - self.__narrativeTree.fixedIt = 1 - self.__problemPhrases = [] - self.errorTraverse(self.__narrativeTree) - else: - # We already fixed it and still not done - # (This should never happen!) - # Stop execution: no more error recovery to attempt - changesMade = 0 - - print "Time for phrase generation for ", areaLabel, ":", time.time()-time1, "Passes", passes - words = self.__narrativeTree.get("words") - if self.__narrativeTree.fixedIt: - self.__problemPhrases = [] - self.errorTraverse(self.__narrativeTree) - problems = self.errorMsg(passes) - self.log.error(problems) - if words is None: - words = problems - return words - - def traverseTree(self, node): - # Top Down traversal - - if self.__trace: - print "Traversing node:", node.get("name"), node.getAreaLabel(), node.getTimeRange() - print " ", node, node.parent - - # Execute methods at this level - methodList = node.methodList - for method in methodList: - # Need to make sure that node has not been removed by some other method - if method not in node.doneList and not hasattr(node, "removed"): - time1 = time.time() - done = method(self.__narrativeTree, node) - if done: - # Add method to doneList - node.doneList.append(method) - - # Trace output - if self.__infiniteTrace: - print "Traversing node:", node.get("name"), node.getAreaLabel(), \ - node.getTimeRange() - print " Method:", method.__name__ - print " Done", done - print " Node", node - if self.__trace: - if done: - doneStr = "DONE" - else: - doneStr = "" - print "Method", method.__name__, doneStr, time.time()-time1, - print " Words", node.get("words") - - # Execute methods of children - # If ANY child is changed, we are not done - childrenChanged = 0 - for child in node.childList: - childChanged = self.traverseTree(child) - if childChanged: - childrenChanged = 1 - - # See if we made any changes at this level OR at a child's level - changesMade = childrenChanged | node.changeFlag - if self.__trace: print 'Changes made:', changesMade - # Re-set changeFlag - node.changeFlag = 0 - return changesMade - - def __breakOutDefinition(self, argDict, definition, timeRange, areaList): - # Sets up self.__compList: (componentName, timeRange, definition), - # self.__samplerRequests - # self.__statisticsDict - # - # Set up self.__compList - time1 = time.time() - narrativeDef = self.__ut.set(definition,"narrativeDef", None) - # Get list of tuples: forecastType, timeRange, definition - self.__compList = self.__breakOutTimeRange( - argDict, timeRange, narrativeDef, self.__currentLocalTime, self.__shift) - # If error message, return error string - if type(self.__compList) is types.StringType: - return self.__compList - - #print "Time to make compList", time.time() - time1 - - # Make samplerRequests - firstTime = 1 - samplerRequests = ArrayList() - statisticsDict = {} - moreAreas = [] - - time1 = time.time() - for compName, timeRange, compDefinition in self.__compList: - # Add the analysisList entries to the sampleList and statisticsDict - # for this component - analysisList = self.__ut.set(compDefinition, "analysisList", []) - additionalAnalysisList = self.__ut.set(compDefinition, "additionalAnalysisList", []) - if len(analysisList) == [] and additionalAnalysisList == []: - continue - sampleList = [] - # Make sampleList: (element, methodArgs, editArea, areaLabel) - - # First include the analysisList methods self.__areaList - #print "\nRegular list" - for analysis in analysisList: - element, methodArgs = self.__getAnalysis(analysis) - for editArea, areaLabel in self.__areaList: - #print "appending to sampleList", element, editArea.id(), areaLabel - sampleList.append((element, methodArgs, editArea, areaLabel)) - - # Handle additional areas - additionalAreas = self.__ut.set(compDefinition,"additionalAreas", []) - additionalAnalysisList = self.__ut.set( - compDefinition,"additionalAnalysisList", analysisList) - #print "\nForecastNarrative additionalAreas" - for element, leAreaList in additionalAreas: - #print "element, leAreaList", element, leAreaList - for areaLabel in leAreaList: - methodArgsList = self.__findMethods(element, additionalAnalysisList) - for methodArgs in methodArgsList: - editArea = self.findEditArea(None, areaLabel) - #print "appending to sampleList", element, editArea.id(), areaLabel - sampleList.append((element, methodArgs, editArea, areaLabel)) - - # Handle intersect areas - intersectAreas = self.__ut.set(compDefinition,"intersectAreas", []) - # Determine intersectWithAreas - intersectWithAreas = self.__ut.set( - compDefinition,"intersectWithAreas", []) - if intersectWithAreas == []: - intersectWithAreas = self.__areaList - else: - intAreas = [] - for areaLabel in intersectWithAreas: - editArea = self.findEditArea(None, areaLabel) - intAreas.append((editArea, areaLabel)) - intersectWithAreas = intAreas - # Determine intersectAnalysisList - intersectAnalysisList = self.__ut.set( - compDefinition,"intersectAnalysisList", analysisList) - # Set up intersections and sampleList entries - #print "\nIntersect Areas" - for element, leAreaList in intersectAreas: - for leAreaLabel in leAreaList: - editAreas = self.__intersectEditAreas( - leAreaLabel, argDict, intersectWithAreas) - methodArgsList = self.__findMethods(element, intersectAnalysisList) - for editArea in editAreas: - for methodArgs in methodArgsList: - #print "appending to sampleList", element, editArea.id(), areaLabel - sampleList.append((element, methodArgs, editArea, editArea.getId().getName())) - - # Add to samplerRequests and statisticsDict - self.__addToRequests(argDict, timeRange, compName, sampleList, - samplerRequests, statisticsDict) - - self.__samplerRequests = samplerRequests - self.__statisticsDict = statisticsDict - #print "Time to create samplerRequests", time.time() - time1 - return None - - def __breakOutTimeRange(self, argDict, timeRange, narrative, currentLocalTime, shift): - "Return a list of tuples: forecastType, timeRange " - # A time period of 0 will be a 1-hour time range but the following - # period will also begin at the same start time. - - # "shift" is the number of hours to add to GMT to get local time. - # All forecasts and start-end times are in GMT time, so the shift is - # used only for labeling purposes, e.g. Today, Tonight, Monday, etc... - - getFcstDef = argDict["getFcstDef"] - crange = timeRange - prevPeriod = 0 - compList = [] - - # Compute midnight of the creation day - creationTime = argDict["creationTime"] - localTime = time.localtime(creationTime) - year = localTime[0] - month = localTime[1] - day = localTime[2] - midnight = AbsTime.absTimeYMD(year, month, day, 0) - shift # midnight LT - - for subType, period in narrative: - #print "subType, period", subType, period - - # Determine sub-TimeRange - if subType == "Custom": - # Handle custom components - added in OB8.2. - # "Custom" components are intended to replace "priorPeriod" which is removed. - # "Custom" component entries in a narrative definition are of the form: - # ("Custom", (componentName, timeRange)) - # where timeRange can be (start_hours, end_hours) or an AFPS.TimeRange. - # Start_hours and end_hours are relative to midnight local time - # of the product creation date. - subType, period = period - if type(period) == types.TupleType and len(period) == 2: - startHour, endHour = period - compRange = TimeRange.TimeRange(midnight + startHour*3600, - midnight + endHour*3600) - else: - compRange = period - else: - # Handle normal component - # If period is zero, make a 1 hour time range - if period == 0: - duration = 1 - else: - duration = period - if prevPeriod == 0: - start = crange.startTime() - else: - start = crange.endTime() - compRange = TimeRange.TimeRange(start, start + self.hrToSec(duration)) - crange = compRange - prevPeriod = period - - # Get definition for component - #print "finding in ForecastNarrative", subType - if subType == "Phantom": - found = 1 - argDict["forecastDef"] = {} - else: - found, module = getFcstDef(subType, argDict) - #print "found" - - if found == 0: - s = "\nProblem finding or importing Text Product " + \ - "Definition: " + subType + "\n" - raise Exception, s - forecastDef = argDict["forecastDef"] - - # Append to component list - #print "Appending", subType, compRange, forecastDef - compList.append((subType, compRange, forecastDef)) - - # Re-set argDict - argDict["forecastDef"] = self.__narrativeDefinition - return compList - - def __getAnalysis(self, analysis): - if len(analysis) == 2: - element, method = analysis - args = None - else: - element, method, args = analysis - return element, (method, args) - - def __findMethods(self, element, analysisList): - # Find the entries in the analysisList for the given element - # and return a list of (method, args) for that element - methodArgsList = [] - for analysis in analysisList: - analysisElement, methodArgs = self.__getAnalysis(analysis) - if element == analysisElement: - methodArgsList.append(methodArgs) - return methodArgsList - - def __intersectEditAreas(self, leAreaLabel, argDict, intersectWithAreas): - # Make a list of intersections of the local effect area (leAreaLabel) with - # all the edit areas in self.__areaList - intersectAreas = [] - for editArea, areaLabel in intersectWithAreas: - # Get the intersect name and see if it is in the cache - intersectLabel = self.getIntersectName(areaLabel, leAreaLabel) - try: - intersectArea = self.__areaCache[intersectLabel] - except: - leArea = self.findEditArea(None, leAreaLabel) - intersectArea = self.intersectAreas(intersectLabel, editArea, leArea) - self.__areaCache[intersectLabel] = intersectArea - if intersectArea is not None: - #print " Appending", intersectLabel - intersectAreas.append(intersectArea) - else: - print " Empty Intersection, skipping", intersectName - return intersectAreas - - def __addToRequests(self, argDict, timeRange, componentName, - sampleList, samplerRequests, statisticsDict): - innerList = ArrayList(len(sampleList)) - for element, methodArgs, editArea, areaLabel in sampleList: - if element not in statisticsDict.keys(): - statisticsDict[element] = {} - areaDict = statisticsDict[element] - if areaLabel not in areaDict.keys(): - areaDict[areaLabel] = {} - trDict = areaDict[areaLabel] - if timeRange not in trDict.keys(): - trDict[timeRange] = ([], componentName, {}) - parmID = self.__parmID(element) - innerList.add(SamplerRequest(parmID, editArea, timeRange.toJavaObj())) - #print "Adding to sampler Requests", parmID, editArea.id(), timeRange - methodArgsList, componentName, statDict = trDict[timeRange] - methodArgsList.append(methodArgs) - samplerRequests.addAll(innerList) - - def findEditArea(self, editArea, areaLabel): - # Return given editArea or cached editArea for the given label - # Add to cache if necessary - if areaLabel in self.__areaCache.keys(): - return self.__areaCache[areaLabel] - else: - if type(editArea) is str or str(editArea).find('Id') == -1: - #print "Get Edit Area FN: getting edit area 2", areaLabel - editArea = self.getEditAreas(self.__argDict, [areaLabel])[0] - self.__areaCache[areaLabel] = editArea - return editArea - - def __parmID(self, element): - if element in self.__elementCache.keys(): - return self.__elementCache[element] - else: - parmID = self.getParmID(element, self.__databaseID) - self.__elementCache[element] = parmID - return parmID - - def __createStatisticsDictionary(self, argDict): - # Call the SamplerAnalysis methods and expand self.__statisticsDict - - # Set up skeleton tree and node to be used when looking up user-configurable thresholds. - tree, node = self.getSkeleton(self.__timeRange, None) - - for element in self.__statisticsDict.keys(): - parmID = self.__parmID(element) - # Get conversion information - dataType = self.getDataType(element) - #print "Element", element, dataType - # Must use product self to get any overrides to the - # TextRules library that are in standard or local file - # These may have to be looked up per areaLabel and timeRange if the field - # wants it (see above), but currently looked up only once per element - # to save on performance - productSelf = self.__argDict["self"] - inUnits = productSelf.element_inUnits(tree, node, element, element) - outUnits = productSelf.element_outUnits(tree, node, element, element) - convertMethod = productSelf.getConvertMethod(inUnits, outUnits, element) - adjustMethod = productSelf.adjust_method(tree, node, element, element) - roundingMethod = productSelf.rounding_method(tree, node, element, element) - #rangeInfo = productSelf.getRangeInfo(tree, node, element) - # - areaDict = self.__statisticsDict[element] - for areaLabel in areaDict.keys(): - tree.areaLabel = areaLabel - node.areaLabel = areaLabel - editArea = self.findEditArea(None, areaLabel) - tree.editArea = editArea - trDict = areaDict[areaLabel] - keys = trDict.keys() - for timeRange in keys: - node.timeRange = timeRange - methodArgsList, componentName, statDict = trDict[timeRange] - node.componentName = componentName - parmHisto = self.__sampler.getParmHisto(parmID, editArea.getId(), timeRange.toJavaObj()) - index = 0 - for methodArgs in methodArgsList: - stats = self.__getStatistics( - element, methodArgs, parmHisto, timeRange, editArea, componentName) - #if element == "WindChill": - # method, args = methodArgs - # print "before conversion ", method.__name__, stats, timeRange, editArea.id() - stats = self.__convertStatistics( - tree, node, productSelf, stats, dataType, - convertMethod, adjustMethod, roundingMethod, element, methodArgs) - #if element == "WindChill": - # print "after conversion ", stats - self.__storeStatistics(stats, element, methodArgs, timeRange, trDict, index) - index = index +1 - - # Another pass to remove the methodArgsList, componentName from trDict - for element in self.__statisticsDict.keys(): - areaDict = self.__statisticsDict[element] - for areaLabel in areaDict.keys(): - trDict = areaDict[areaLabel] - for timeRange in trDict.keys(): - methodArgsList, componentName, statDict = trDict[timeRange] - trDict[timeRange] = statDict - - self.__statisticsDictionary = StatisticsDictionary(self.__statisticsDict, self) - #self.__statisticsDictionary.printDictionary("WindGust") - #self.__statisticsDictionary.printDictionary("Wind") - #self.__statisticsDictionary.printDictionary("PoP") - #self.__statisticsDictionary.printDictionary("Wx") - #self.__statisticsDictionary.printDictionary("MaxT") - #self.__statisticsDictionary.printDictionary("WaveHeight") - return None - - def __getStatistics(self, element, methodArgs, parmHisto, timeRange, editArea, componentName): - method, args = methodArgs - if parmHisto.getSampleLen() == 0: - stats = None - else: - if args is not None: - stats = method(parmHisto, timeRange, componentName, args) - else: - stats = method(parmHisto, timeRange, componentName) - parmName = parmHisto.parmID().compositeNameUI() - #if element == "SnowAmt": - # print "Called method", parmName, editArea.id().name(), method.__name__, timeRange - # print " Result", stats - return stats - - def __convertStatistics(self, tree, node, productSelf, statsByRange, dataType, - convertMethod, adjustMethod, roundingMethod, - elementName, methodArgs): - # Converts the statistics in the statsByRange given the inUnits, outUnits and increment - # Assumes that stats are either a single value or min/max - # Vectors can be handled as well. - # Weather keys are filtered. - - if statsByRange is None: - return statsByRange - if dataType == self.DISCRETE(): - return statsByRange - - simpleStatFlag = 0 - # Note: we do not checkTupleLists here since we want hourlyTemp - # to pass the "isStatsByRange" test. This way it will go thru - # the conversion code below which works because - # hourlyTemp is composed of 2-tuples and the statement: - # stats,subRange = statsByRange[i] - # assigns the hour to the subRange and then works with the stats. - #print "statsByRange", elementName, statsByRange - if not self.isStatsByRange(dataType, statsByRange, checkTupleLists=0): - simpleStatFlag = 1 - statsByRange = [(statsByRange, "")] - - # Check for binnedPercent which cannot be converted - method, args = methodArgs - if method.__name__ == "binnedPercent": - return statsByRange - - numStats = len(statsByRange) - newList = [] - for i in range(numStats): - stats, subRange = statsByRange[i] - if dataType == self.WEATHER(): - #print "stats before filtering", stats - stats = productSelf.filterSubkeys(tree, node, stats) - #print "stats after filtering", stats - else: - if dataType == self.VECTOR(): - stats, dir = stats - if type(stats) is types.TupleType: - min, max = stats - increment_nlValue = productSelf.increment_nlValue( - tree, node, elementName, elementName) - if min is not None: - min = convertMethod(min) - if type(adjustMethod) is types.MethodType: - min = adjustMethod(min) - min = productSelf.roundValue( - min, roundingMethod, "Nearest", increment_nlValue, 0) - if max is not None: - max = convertMethod(max) - if type(adjustMethod) is types.MethodType: - max = adjustMethod(max) - max = productSelf.roundValue( - max, roundingMethod, "Nearest", increment_nlValue, 1) - #min, max = productSelf.applyRangeValues( - # tree, node, min, max, elementName, rangeInfo) - min, max = productSelf.applyRanges(tree, node, min, max, elementName) - stats = (min, max) - else: - if stats is not None: - stats = convertMethod(stats) - if type(adjustMethod) is types.MethodType: - stats = adjustMethod(stats) - increment_nlValue = productSelf.increment_nlValue( - tree, node, elementName, elementName) - stats = productSelf.roundValue( - stats, roundingMethod, "Nearest", increment_nlValue, 1) - if dataType == self.VECTOR(): - stats = (stats, dir) - newList.append(((stats), subRange)) - - if simpleStatFlag: - stats, tr = newList[0] - return stats - return newList - - def __storeStatistics(self, stats, element, methodArgs, timeRange, trDict, index): - method, args = methodArgs - statLabel = element + "__" + method.__name__ - if index == 0: - statLabels = ["", statLabel] - else: - statLabels = [statLabel] - #if element == "Hazards": - # print "Storing Hazards", stats - if type(stats) is types.ListType: - # Expand statsByRange to individual time range entries - # Skip special cases of hourlyTemp, discrete_percentages, - # and list of wx or discrete subkeys - try: - value, tr = stats[0] - except: - tr = 0 - if type(tr) is types.IntType or type(tr) is types.FloatType: - pass - else: - for value, tr in stats: - if tr not in trDict.keys(): - trDict[tr] = ([], "", {}) - methodArgsList, componentName, statDict = trDict[tr] - for statLabel in statLabels: - statDict[statLabel] = value - methodArgsList, componentName, statDict = trDict[timeRange] - for statLabel in statLabels: - #if element == "Hazards": - # print "storing stats", statLabel, stats - statDict[statLabel] = stats - - def __createNarrativeTree(self, argDict): - #Components - componentList = [] - for componentName, timeRange, definition in self.__compList: - component = self.makeComponent(componentName, timeRange, definition) - componentList.append(component) - - # Narrative - methodList = self.__ut.set(self.__narrativeDefinition, "methodList", []) - self.__narrativeTree = Narrative(methodList, componentList, - self.__statisticsDictionary, - self.__issuanceInfo, self, self.__sampler) - self.__narrativeTree.set("timeRange", self.__timeRange) - self.__narrativeTree.set("productTimeRange", self.__productTR) - self.__narrativeTree.set("lineLength", self.__lineLength) - self.__narrativeTree.set("ifpClient", ["ifpClient"]) - self.__narrativeTree.set("combinations", self.__combinations) - self.__narrativeTree.set("argDict", self.__argDict) - for attr, default in [ - ("passLimit", 20), - ("trace", 0), - ("troubleList", None), - ("problemList", None), - ]: - val = self.__ut.set(self.__narrativeDefinition, attr, default) - self.__narrativeTree.set(attr, val) - #self.__narrativeTree.printTree() - return None - - def getDataType(self, element): - if element in self.__gpiCache: - return self.__gpiCache[element] - - parmID = self.__parmID(element) - try: - gridParmInfo = self.__ifpClient.getGridParmInfo(parmID) - except RuntimeError, e: - # AWIPS-I doesn't throw an error here best I can tell and - # most of the time when we hit this except it will be because - # a grid parm was requested that the server doesn't know about. - # So we will not force an alert to be thrown here. - # self.log.exception("Could not retrieve GridParmInfo for " + str(parmID) - gridParmInfo = GridParmInfo() - gridType = str(gridParmInfo.getGridType()) - if gridType == "VECTOR": - gridType = self.VECTOR() - elif gridType == "SCALAR": - gridType = self.SCALAR() - elif gridType == "WEATHER": - gridType = self.WEATHER() - else: - gridType = self.DISCRETE() - self.__gpiCache[element] = gridType - return gridType - - def getLimits(self, element): - parmID = self.__parmID(element) - gridParmInfo = self.__ifpClient.getGridParmInfo(parmID) - return gridParmInfo.getMinValue(), gridParmInfo.getMaxValue() - - def makeComponent(self, componentName, timeRange, definition): - # Phrases and Subphrases - phrases = self.__ut.set(definition, "phraseList", []) - phraseList = [] - for phraseDef in phrases: - newPhrase = self.makePhrase(phraseDef) - phraseList.append(newPhrase) - # Components - methodList = self.__ut.set(definition, "methodList", []) - component = Node(phraseList, methodList) - component.set("timeRange", timeRange) - component.set("definition", definition) - component.set("name", componentName) - if componentName == "Phantom": - component.set("words", "") - return component - - def makePhrase(self, phraseDef): - # Phrases can be a simple method or a tuple containing - # an optional args list and/or an optional localEffects expressed as - # a list of LocalEffect objects or as a method - args = None - localEffectsList = None - if type(phraseDef) is types.TupleType: - if len(phraseDef) == 2: - phraseDef, object1 = phraseDef - objects = [object1] - else: - phraseDef, object1, object2 = phraseDef - objects = [object1, object2] - for object in objects: - # An object can be: - # A local effect expressed as a method - # A local effect expressed as a list of LocalEffect objects - # A list of arguments - if type(object) is types.MethodType: - localEffectsList = object - else: # must be list - if len(object) == 0: - localEffectsList = object - else: - entry = object[0] - if isinstance(entry, self.LocalEffect): - localEffectsList = object - else: - args = object - phraseDict = phraseDef() - phraseMethods = self.__ut.set(phraseDict, "phraseMethods", []) - subPhraseMethods = self.__ut.set(phraseDict, "subPhraseMethods", []) - # Add wordMethod and setUpMethod to methodLists - setUpMethod = self.__ut.set(phraseDict, "setUpMethod", None) - wordMethod = self.__ut.set(phraseDict, "wordMethod", None) - - if 0: - if setUpMethod is not None: - if localEffectsList is not None: - productSelf = self.__argDict["self"] - phraseMethods = [setUpMethod, productSelf.checkLocalEffects] + phraseMethods - else: - phraseMethods = [setUpMethod] + phraseMethods - else: - if setUpMethod is not None: - phraseMethods = [setUpMethod] + phraseMethods - - - - if wordMethod is not None: - subPhraseMethods = subPhraseMethods + [wordMethod] - # Phrases can have child phrases - phraseList = self.__ut.set(phraseDict, "phraseList", []) - phraseChildren = [] - for childPhrase in phraseList: - phraseChildren.append(self.makePhrase(childPhrase)) - # Make new node - phraseNode = Node(phraseChildren, phraseMethods) - phraseNode.set("phraseDef", phraseDef) - phraseNode.set("name", phraseDef.__name__) - phraseNode.set("wordMethod", wordMethod) - phraseNode.set("setUpMethod", setUpMethod) - phraseNode.set("subPhraseMethods", subPhraseMethods) - phraseNode.set("args", args) - phraseNode.set("localEffectsList", localEffectsList) - #print "\nMaking new phrase", phraseNode - #traceback.print_stack(limit=6) - - return phraseNode - - def errorMsg(self, passes): - if passes > self.__narrativeTree.passes: - msg = "\n\nWARNING: TOO MANY PASSES ON TREE. \nTraversal:" - else: - msg = "\n\nWARNING: EMPTY WORDS FROM TREE. Traversal:" - probPhrases = self.getProblemPhrases() - msg += "\nPotential problem phrases are:\n" + probPhrases + \ - """ - - Try overriding "subPhrase_limit" from WxPhrases and setting it to 10. - Then report the problem on the listserver. - Also, see the Text Product User Guide section, - "Trouble-shooting Narrative Products". - - """ - return "\n" + `passes` + " PASSES. " + msg - - def getProblemPhrases(self, - attrList=["name", "words", "methodList", "doneList"], - ancestorList=["name"]): - probList = self.__narrativeTree.get("problemList") - if probList is not None: - attrList = probList - probStr = "" - for node in self.__problemPhrases: - probStr += "\n From Component: " + str(node.getComponentName()) - for attr in attrList: - if attr in ["methodList", "doneList"]: - probStr = self.addMethods(probStr, node, attr) - else: - probStr += "\n " + attr + " " + str(node.get(attr)) - probStr += "\n Ancestor attributes" - for ancAttr in ancestorList: - probStr += "\n " + ancAttr + " " + str(node.getAncestor(ancAttr)) - return probStr - - def addMethods(self, probStr, node, attr): - methodList = node.get(attr) - probStr += "\n " + attr - if methodList is not None: - for method in methodList: - probStr += "\n " + method.func_name - return probStr - - def errorTraverse(self, node, attrList=["name", "words"], ancestorList=["name"], - fixPhrases=1): - # Print out relevant attributes of each node - #print "\nNode", node, node.getComponentName() - #for attr in attrList: - # print " ", attr, node.get(attr) - #print " DoneList", node.doneList - #print " MethodList", node.methodList - #print " Ancestor attributes" - #for ancAttr in ancestorList: - # print " ", ancAttr, node.getAncestor(ancAttr) - print node, node.get('name'), node.get('words') - childList = node.get('childList') - if childList is None or childList == []: - print "LEAF NODE" - if node.get('words') is None: - print "WITHOUT WORDS!!", node.getAncestor('name') - errorWords = "|* Please enter " + node.getAncestor('name') + \ - " and refer to log files for more explanation *|" - node.set('words', errorWords) - self.__problemPhrases.append(node) - else: - print " Children" - for child in node.get('childList'): - self.errorTraverse(child, attrList=attrList) - - def getSkeleton(self, timeRange, areaLabel): - tree = Node([],[]) - tree.combinations = self.__combinations - tree.timeRange = self.__timeRange - tree.productTimeRange = self.__productTR - tree.areaLabel = areaLabel - if areaLabel is not None: - editArea = self.findEditArea(None, areaLabel) - tree.editArea = editArea - tree.library = self - node = Node([],[]) - node.parent = tree - node.timeRange = timeRange - node.areaLabel = areaLabel - node.componentName = "" - for compName, compTR, compDefinition in self.__compList: - if compTR.contains(timeRange): - node.componentName = compName - node.componentDef = compDefinition - return tree, node - - def getMergedStats(self, value, mergeMethod, elementName, timeRange, areaLabel, dataType=None): - # Merge the stats according to the mergeMethod: - # Works with either single value or tuple stats (e.g. minMax, medianRange) - # Works with either simple stats or statsByRange - productSelf = self.__argDict["self"] - - if value is None: - return value - if dataType is None: - dataType = self.SCALAR() - if not self.isStatsByRange(dataType, value): - if mergeMethod == "List": - return [(value, timeRange)] - else: - if dataType == self.SCALAR() or dataType == self.VECTOR(): - return self.getValue(value, mergeMethod, dataType) - else: - return value - else: - # Take only subRanges that overlap time range - #print "statsByRange", elementName, mergeMethod, timeRange, value - value = self.screenStatsByRange(value, timeRange) - if mergeMethod == "List": - return value - elif mergeMethod == "MergeBins": - return self.mergeBins(value, timeRange) - else: - if value == []: - return None - #print "value for ", elementName, timeRange, areaLabel - #print " value", value - # For performance, we spell out all the cases - if dataType == self.SCALAR(): - if mergeMethod == "Min": - val = None - for stats, subRange in value: - if stats is None: - continue - stats = self.getValue(stats, "Min") - if val is None or stats < val: - val = stats - return val - elif mergeMethod == "Max": - val = None - for stats, subRange in value: - if stats is None: - continue - stats = self.getValue(stats, "Max") - if val is None or stats > val: - val = stats - return val - elif mergeMethod == "MinMax": - min = None - max = None - for stats, subRange in value: - if stats is None: - continue - min1, max1 = self.getValue(stats, "MinMax") - if min1 < min or min is None: - min = min1 - if max1 > max or max is None: - max = max1 - if min is None or max is None: - return None - tree, node = self.getSkeleton(timeRange, areaLabel) - min, max = productSelf.applyRanges(tree, node, min, max, elementName) - return (min, max) - else: # Handle sum or average - sum = 0 - count = 0 - for stats, subRange in value: - if stats is None: - continue - stats = self.getValue(stats, "Average") - sum = sum + stats - count += 1 - if count == 0: - return None - if mergeMethod == "Sum": - return sum - else: # Average - average = float(sum)/count - tree, node = self.getSkeleton(timeRange, areaLabel) - return productSelf.roundStatistic(tree, node, average, elementName) - elif dataType == self.VECTOR(): - # Note that in these cases, a mag, dir is returned, but - # the dir is simply taken from the last stat pair so that - # it is somewhat meaningless - if mergeMethod == "Min": - val = None - for stats, subRange in value: - if stats is None: - continue - stats, dir = stats - stats = self.getValue(stats, "Min") - if val is None or stats < val: - val = stats - if val is None: - return None - return (val, dir) - elif mergeMethod == "Max": - val = None - for stats, subRange in value: - if stats is None: - continue - stats, dir = stats - stats = self.getValue(stats, "Max") - if val is None or stats > val: - val = stats - if val is None: - return None - return (val, dir) - elif mergeMethod == "MinMax": - min = None - max = None - for stats, subRange in value: - if stats is None: - continue - stats, dir = stats - min1, max1 = self.getValue(stats, "MinMax") - if min1 < min or min is None: - min = min1 - if max1 > max or max is None: - max = max1 - if min is None or max is None: - return None - tree, node = self.getSkeleton(timeRange, areaLabel) - min, max = productSelf.applyRanges(tree, node, min, max, elementName) - return ((min, max), dir) - else: # Handle sum or average - sum = 0 - count = 0 - for stats, subRange in value: - if stats is None: - continue - stats, dir = stats - stats = self.getValue(stats, "Average") - sum = sum + stats - count += 1 - if count == 0: - return None - if mergeMethod == "Sum": - return (sum, dir) - else: # Average - average = float(sum)/count - tree, node = self.getSkeleton(timeRange, areaLabel) - return (productSelf.roundStatistic(tree, node, average, elementName), dir) - elif dataType == self.WEATHER(): - # Weather: add up all subkeys/remove duplicates by - # making wxkey and re-making subkeylist - # Then filter subkeylist - subkeyList = [] - for stats, subRange in value: - if stats is None: - continue - subkeyList = subkeyList + stats - tree, node = self.getSkeleton(timeRange, areaLabel) - #subkeyList = productSelf.combineSubKeys(tree, node, subkeyList) - #print "subkeyList before", subkeyList - subkeyList = productSelf.filterSubkeys(tree, node, subkeyList) - #print "subkeyList after", subkeyList - return subkeyList - else: - # Discrete: add up all keys/ need to remove duplicates? - keyList = [] - for stats, subRange in value: - if stats is None: - continue - keyList = keyList + stats - return keyList - - def isStatsByRange(self, dataType, value, checkTupleLists=1): - if dataType == self.VECTOR() or dataType == self.SCALAR(): - if type(value) is types.ListType: - if checkTupleLists: - # Look for special cases like hourlyTemp which - # does not return statsByRange, but is list of - # tuples - try: - firstValue = value[0] - subRange = firstValue[1] - if type(subRange) is types.IntType or \ - type(subRange) is types.FloatType: - return 0 - except: - pass - return 1 - else: - return 0 - else: - # Need to check more closely for Weather and Discrete - # Possibilities: - # **dominantWx: list of subkeys : return 0 - # **rankedWx: list of (subkey, rank) tuples : return 0 - # **dominantDiscreteValue: list of discrete keys: return 0 - # discretePercentages: list of (key, percentage) tuples: return 0 - # discreteTimeRangesByKey: list of (key, timerange) tuples: return 1 - #print "\ngot here", value - if type(value) is types.ListType and len(value) > 0: - try: - stats, tr = value[0] - if isinstance(tr, TimeRange.TimeRange): - return 1 - else: - #print "returning0 0" - return 0 - except: - #print "returning1 0" - return 0 - else: - #print "returning2 0" - return 0 - - def screenStatsByRange(self, statsByRange, timeRange): - newStatsByRange = [] - for stats, subRange in statsByRange: - if subRange.overlaps(timeRange): - newStatsByRange.append((stats, subRange)) - return newStatsByRange - - def mergeBins(self, value, timeRange): - # Value is statsByRange of binLists i.e. (binList, subRange). - # Each binList consists of tuples: (low, high, percent) - # timeRange is the time range over which to merge bins - # Return one merged time-weighted binList - if value is None or len(value) == 0: - return None - newPercents = [] - newBins = [] - binList, subRange = value[0] - if binList is None: - return None - numBins = len(binList) - for bin in binList: - low, high, percent = bin - newPercents.append(0.0) - newBins.append((low, high)) - - for binList, subRange in value: - # print "binList, subRange", binList, subRange - if binList is None: - continue - weight = float(subRange.duration())/timeRange.duration() - # If time range is greater than subRange, give a weight of 1 - # so that the percentage never exceeds 100. - if weight > 1.0: - weight = 1.0 - for i in range(numBins): - low, high, percent = binList[i] - newPercents[i] += percent * weight - - # Glue bin values to merged percentages - for i in range(numBins): - low, high = newBins[i] - newBins[i] = ((low, high, newPercents[i])) - return newBins +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ForecastNarrative.py +# +# Forecast type: "narrative" +# Class for processing Narrative Forecasts +# +# Author: hansen +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import time, types +import TextRules +import SampleAnalysis +import Translator +import logging +import AbsTime +import TimeRange +from com.raytheon.uf.common.time import TimeRange as JavaTimeRange +from com.raytheon.viz.gfe.sampler import HistoSampler +from com.raytheon.viz.gfe.sampler import SamplerRequest +from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID +from java.util import ArrayList +import copy +import traceback +import JUtil + +class Node: + def __init__(self, childList, methodList): + self.childList = childList + self.methodList = methodList + self.parent = None + # Make tree bi-directional + for child in childList: + child.parent = self + # Keep track of changes made to this node + self.changeFlag = 0 + # Keep track of methods that are done + self.doneList = [] + def getIndex(self): + # If this node is a child, + # return it's index in the childList of the parent + try: + return self.parent.childList.index(self) + except: + return None + def getParent(self): + return self.parent + def getComponent(self): + # Return this node's ancestor at the second level in the tree + prevNode = None + node = self + while node.getParent() is not None: + prevNode = node + node = node.getParent() + return prevNode + def getComponentName(self): + node = self + compName = node.get("componentName") + if compName is not None: + return compName + else: + comp = node.getComponent() + if comp is not None: + return comp.get("name") + else: + return None + def getNext(self): + if self.parent is not None: + index = self.getIndex() + childList = self.parent.childList + if len(childList) > index+1: + return childList[index+1] + def getPrev(self): + if self.parent is not None: + index = self.getIndex() + childList = self.parent.childList + if index > 0: + return childList[index-1] + def set(self, member, value): + #print " Setting", member, + if hasattr(self, member): + current = getattr(self, member) + #print "current/value", current, value + if current == value: + #print " No Change" + return + setattr(self, member, value) + self.changeFlag = 1 + #print " Changed" + def get(self, member, default=None): + if hasattr(self, member): + return getattr(self, member) + else: + return default + def printNode(self, node, indentStr=""): + print("Node", node) + print(indentStr + " Methods") + for method in node.methodList: + if method in node.doneList: + done = "DONE" + else: + done = "" + print(indentStr + " ", method.__name__, done) + print(indentStr + " Attributes") + dict = node.__dict__ + for key in dict: + if key == "methodList" or key == "doneList": + continue + print(indentStr + " ", key, dict[key]) + print(indentStr + " Children ", len(node.childList)) + for child in node.childList: + self.printNode(child, indentStr + " ") + def copy(self): + newNode = Node([], []) + dict = self.__dict__ + for key in dict: + newNode.set(key, self.get(key)) + return newNode + def insertChild(self, sibling, newChild, newFirst=0): + # Insert the newChild + # If newFirst, insert newChild before sibling, + # else afterward. + newChild.parent = self + new = [] + for child in self.childList: + if child == sibling: + if newFirst: + new.append(newChild) + new.append(child) + else: + new.append(child) + new.append(newChild) + else: + new.append(child) + self.childList = new + def remove(self): + # Remove this node from it's parent child list + parent = self.parent + new = [] + for child in parent.childList: + if child != self: + new.append(child) + parent.childList = new + # Set the attribute for removing the child + setattr(self, "removed", 1) + def findChild(self, attr, value): + # Find the child of this node with the given attribute + # of the given value + for child in self.childList: + if child.get(attr) == value: + return child + def getProgeny(self): + # Return a list of all progeny of this node + progeny = self.childList + for child in self.childList: + childProgeny = child.getProgeny() + if childProgeny is not None: + progeny = progeny + child.getProgeny() + return progeny + def replace(self, nodeList): + # Replace the current child node with the node list. + # If top of tree, does nothing. + childList = self.parent.childList + newList = [] + for child in childList: + if child == self: + newList = newList + nodeList + else: + newList.append(child) + self.parent.childList = newList + # Remove any children of current node + self.childList = [] + # Make this node defunct + self.doneList = self.methodList + def getTimeRange(self): + if hasattr(self, "timeRange"): + return self.timeRange + # Look for an ancestor that has a timeRange associated with it + if self.parent is not None: + return self.parent.getTimeRange() + return None + def getStatDict(self): + # Assume we are a subPhrase + if hasattr(self, "statDict"): + statDict = self.statDict + disabledElements = self.getAncestor("disabledElements") + if disabledElements is not None: + for key in list(statDict.keys()): + for element in self.parent.disabledElements: + if key == element: + statDict[element] = None + disabledSubkeys = self.getAncestor("disabledSubkeys") + #print "disabledSubkey", disabledSubkeys + if disabledSubkeys is not None: + disabledWxTypes = [] + for disabledSubkey in disabledSubkeys: + disabledWxTypes.append(disabledSubkey.wxType()) + for key in list(statDict.keys()): + if key == "Wx": + subkeys = statDict[key] + newList = [] + for subkey in subkeys: + # Need to handle both "dominantWx" and + # "rankedWx" analysis + appendVal = subkey + if type(subkey) is tuple: + subkey, rank = subkey + if subkey not in disabledSubkeys \ + and subkey.wxType() not in disabledWxTypes: + newList.append(appendVal) + statDict[key] = newList + return statDict + else: + return None + def getAreaLabel(self): + if hasattr(self, "areaLabel"): + return self.areaLabel + # Look for an ancestor that has an areaLabel associated with it + if self.parent is not None: + return self.parent.getAreaLabel() + return None + def getAncestor(self, attr): + if hasattr(self, attr): + return getattr(self, attr) + # Look for an ancestor that has the given attribute associated with it + if self.parent is not None: + return self.parent.getAncestor(attr) + return None + def setAncestor(self, attr, value): + if hasattr(self, attr): + setattr(self, attr, value) + return None + # Look for an ancestor that has the given attribute associated with it + if self.parent is not None: + return self.parent.setAncestor(attr, value) + return None + def getDescendent(self, attr): + if hasattr(self, attr): + return getattr(self, attr) + # Look for the first descendent that has the given attribute associated with it + for child in self.childList: + value = child.getDescendent(attr) + if value is not None: + return value + return None + +class Narrative(Node, TextRules.TextRules): + # This is the root of the tree and, as such, has some special methods + # and data members + def __init__(self, methodList, componentList, statisticsDictionary, + issuanceInfo, library, histoSampler): + self.stats = statisticsDictionary + # Access to inherited methods + self.library = library + # A histoSampler for access to Topo + self.histoSampler = histoSampler + self.issuanceInfo = issuanceInfo + + # This is the root of the tree + Node.__init__(self, componentList, methodList) + TextRules.TextRules.__init__(self) + + def printTree(self): + print("\n\nNarrative Tree\n") + self.printNode(self, "") + def getTopoHisto(self, areaLabel): + editArea = self.library.findEditArea(None, areaLabel) + return self.get("histoSampler").getTopoHisto(editArea.getId()) + def makeNode(self, children, methods, parent=None): + node = Node(children, methods) + node.parent = parent + return node + def statisticsDictionary(self): + return self.statisticsDictionary.dictionary() + def getDataType(self, element): + return self.library.getDataType(element) + def getLimits(self, element): + return self.library.getLimits(element) + def makeComponent(self, name, timeRange, definition): + return self.library.makeComponent(name, timeRange, definition) + def makePhrase(self, phraseDef): + return self.library.makePhrase(phraseDef) + def copyPhrase(self, node, timeRange=None, areaLabel=None, parent=None, + copyAttrs=[]): + phraseDef = node.get("phraseDef") + newNode = self.library.makePhrase(phraseDef) + # copy attributes from original node + for attr in copyAttrs: + newVal = node.get(attr) + if type(newVal) is list: + newList = [] + for item in newVal: + newList.append(item) + newVal = newList + newNode.set(attr, newVal) + if areaLabel is None: + areaLabel = node.getAreaLabel() + newNode.set("areaLabel", areaLabel) + if timeRange is None: + timeRange = node.getTimeRange() + newNode.set("timeRange", timeRange) + if parent is None: + parent = node.parent + newNode.parent = parent + # Preserve attributes + newNode.set("args", node.get("args")) + return newNode + + def addPhrase(self, prevPhrase, timeRange=None, areaLabel=None): + # Make the new phrase follow given phrase + newPhrase = self.copyPhrase(prevPhrase, timeRange, areaLabel) + parent = prevPhrase.parent + parent.insertChild(prevPhrase, newPhrase) + return newPhrase + def addPhraseDef(self, prevPhrase, phraseDef, timeRange=None, areaLabel=None): + # Make the new phrase follow given prevPhrase using the given phraseDef + newPhrase = self.library.makePhrase(phraseDef) + if areaLabel is None: + areaLabel = prevPhrase.getAreaLabel() + newPhrase.set("areaLabel", areaLabel) + if timeRange is None: + timeRange = prevPhrase.getTimeRange() + newPhrase.set("timeRange", timeRange) + parent = prevPhrase.parent + newPhrase.parent = parent + parent.insertChild(prevPhrase, newPhrase) + return newPhrase + +class StatisticsDictionary(TextRules.TextRules): + def __init__(self, dictionary, library): + # Dictionary is a multi-level dictionary storing statistics + self.dictionary = dictionary + self.library = library + TextRules.TextRules.__init__(self) + def set(self, element, areaLabel, timeRange, statLabel, value): + # Set the dictionary value according to keyOrder + # E.g. dict[element][areaLabel][timeRange][statLabel] = value + keyOrder = ["element", "areaLabel", "timeRange", "statLabel"] + dict = self.dictionary + execStr = "dict" + index = 0 + lastIndex = len(self.keyOrder)-1 + for keyName in keyOrder: + execStr = execStr + "["+keyName+"]" + if index == lastIndex: + exec(execStr + "= value") + else: + # Make sure there is at least an empty dictionary + # for this keyName + try: + exec("result = " + execStr) + except: + exec(execStr + "= {}") + index = index + 1 + def get(self, element, timeRange, areaLabel=None, statLabel="", mergeMethod="List", + intersectWith=None): + if areaLabel is None: + areaLabel = self.areaLabel + if intersectWith is not None: + areaLabel = self.library.getIntersectName(intersectWith, areaLabel) + dictionary = self.dictionary + dataType = self.library.getDataType(element) + #print "Getting stats", element, mergeMethod, timeRange, areaLabel, statLabel + + # Get the raw value (could be simple value OR statsByRange) + try: + # See if there is an exact match entry + value = dictionary[element][areaLabel][timeRange][statLabel] + except: + # Gather statsByRange for anything overlapping the timeRange + value = [] + try: + dict = dictionary[element][areaLabel] + except: + return None + if statLabel != "": + statLabel = element + "__" + statLabel + matchFound = 0 + #if element == "Wx": + # print "\n\nstatLabel", statLabel, dict.keys() + for subRange in list(dict.keys()): + statDict = dict[subRange] + #if element == "Wx": + # print "statDict keys", statDict.keys() + if statLabel in list(statDict.keys()): + if subRange.overlaps(timeRange): + # If subRange covers the timeRange, treat as exact match + subValue = statDict[statLabel] + if subRange.contains(timeRange): + value = subValue + matchFound = 1 + break + if self.library.isStatsByRange(dataType, subValue): + for subStats, range in subValue: + #print "appending", subStats, range + value.append((subStats, range)) + else: + #print "appending2", subValue, subRange + value.append((subValue, subRange)) + + # IF we have "glued" together stats from timeRanges + # overlapping the time range in question, + # then we have to eliminate duplicates and + # make sure the resulting statsByRange are + # in chronological order. + if matchFound == 0:# and mergeMethod == "List": + # Make sure the subRanges are in order + if len(value) > 0: + temp = [] + #print "before sort", timeRange + for stats, subRange in value: + #print stats, subRange + temp.append((subRange.startTime(), (stats, subRange))) + temp.sort() + value = [] + #print "after sort" + lastRange = None + for t in temp: + stats, subRange = t[1] + if lastRange is not None and subRange == lastRange: + continue + lastRange = subRange + #print t[1] + value.append(t[1]) + + # Apply mergeMethod to the value +## if areaLabel == "_OffShoreArea_intersect_Region_1" or areaLabel == "Region 1": +## if element == "MaxT" and timeRange.duration > 24*3600: +## print "\n Area", areaLabel, timeRange +## print " Merging", value + value = self.library.getMergedStats( + value, mergeMethod, element, timeRange, areaLabel, dataType) + +## if element == "MaxT" and timeRange.duration > 24*3600: +## print " returning", value + return value + + def printDictionary(self, element=None): + if element is None: + print("\n\nStatistics Dictionary\n") + self.printDict(self.dictionary, "") + else: + try: + print("\n\nStatistics Dictionary for "+element+"\n") + self.printDict(self.dictionary[element], "") + except: + pass + def printDict(self, dictionary, indentStr): + for key in list(dictionary.keys()): + value = dictionary[key] + if type(key) is bytes and key == "": + key = "EmptyString" + print(indentStr, key) + if type(value) is dict: + self.printDict(value, indentStr + " ") + else: + print(indentStr, indentStr, value) + + +class ForecastNarrative(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + # This class processes a narrative-type text product. + # To use this class, set up a NarrativeDefinition indicating which + # components and consecutive time periods are to be in the narrative. + # After instantiating the class, call getNarrativeData to do all the + # sampling and analysis for the narrative. + # Then, for each edit area, call generateForecast. + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + self.__gpiCache = {} + self.log = logging.getLogger("FormatterRunner.ForecastNarrative.ForecastNarrative") + + def getNarrativeData(self, argDict, narrativeDefinition, timeRange, areaList, issuanceInfo): + # Assemble the Tree for the Narrative + # Samples and Analyzes all data for the Narrative + # Assemble the StatisticsDictionary + # If successful, returns None + # Otherwise returns a text string indicating the problem + + # General set up + self.__ut = argDict["utility"] + self.__narrativeDefinition = narrativeDefinition + self.__timeRange = timeRange + self.__areaList = areaList + self.__priorPeriod = self.__ut.set(narrativeDefinition, "priorPeriod", None) + self.__issuanceInfo = issuanceInfo + self.__currentLocalTime, self.__shift = self.determineTimeShift() + self.__lineLength = argDict["lineLength"] + self.__combinations = argDict["combinations"] + self.__ifpClient = argDict["ifpClient"] + self.__argDict = argDict + sampleFromServer = self.__ut.set(narrativeDefinition, "sampleFromServer", 0) + try: + self.__productTR = argDict["productTimeRange"] + #print "Set from argDict" + except: + self.__productTR = timeRange + #print "Setting productTR", self.__productTR + + # Caching for ReferenceData and ParmID's + self.__areaCache = {} + areaList = [] + for editArea, areaLabel in self.__areaList: + if type(editArea) is bytes: + #print "Get Edit Area FN: getting edit area 1", editArea + editArea = self.getEditArea(editArea, argDict) + self.__areaCache[areaLabel] = editArea + areaList.append((editArea, areaLabel)) + self.__areaList = areaList + self.__elementCache = {} + self.__databaseID = argDict["databaseID"] + + # Break out the Definition into components and sampling information + # sets up self.__compList, self.__samplerRequests, self.__statisticsDict + #print "\nNarrative Def", narrativeDefinition + error = self.__breakOutDefinition(argDict, narrativeDefinition, timeRange, areaList) + if error is not None: + return error + #for component, timeRange, definition in self.__compList: + # print component, timeRange + + # Call the Samplers + # Get sample dictionary + # Sets up samplerDict: WeatherElement: histoSampler + time1 = time.time() + if sampleFromServer: + self.progressMessage(.3, 80, "Sampling Data from Server -- please wait...") + self.__sampler = argDict["ifpClient"].sampleRequest(self.__samplerRequests) + else: + self.progressMessage(.3, 80, "Sampling Data -- please wait...") + self.__sampler = HistoSampler(argDict["ifpClient"].getJavaClient(), self.__samplerRequests) + print("Time to Sample Data", time.time()-time1) + #if error is not None: + # return error + #print "Sampler", self.__sampler + + # Call SampleAnalysis methods to complete statisticsDict + # and instantiate the StatisticsDictionary class + # Sets up self.__statisticsDict + time1 = time.time() + self.progressMessage(.6, 80, "Analyzing Data -- please wait...") + error = self.__createStatisticsDictionary(argDict) + print("Time to Get Statistics", time.time()-time1) + #error = self.__createStatisticsDictionary0(argDict) + if error is not None: + return error + return None + + def ut(self): + return self.__ut + + def sampler(self): + return self.__sampler + + def statisticsDictionary(self): + return self.__statisticsDictionary + + def issuanceInfo(self): + return self.__issuanceInfo + + def generateForecast(self, argDict, editArea=None, areaLabel=None): + self.__createNarrativeTree(argDict) + if editArea is None: + editArea, areaLabel = argDict["editArea"] + if type(editArea) is bytes: + editArea = self.getEditArea(editArea, argDict) + # Otherwise, set argDict + else: + argDict["editArea"] = (editArea, areaLabel) + self.__narrativeTree.editArea = editArea + self.__narrativeTree.areaLabel = areaLabel + self.__narrativeTree.changeFlag = 0 + self.__narrativeTree.stats.areaLabel = areaLabel + self.__trace = self.__narrativeTree.get("trace") + self.__infiniteTrace = 0 + argDict["combinations"] = self.__narrativeTree.get("combinations") + #self.__trace = 1 + self.__narrativeTree.lastChance = 0 + self.__narrativeTree.fixedIt = 0 + argDict["tree"] = self.__narrativeTree + changesMade = 1 + time1 = time.time() + passes = 0 + while changesMade: + if self.__trace: print("\n####### Pass %d #####\n" % (passes + 1)) + + # Tree traversal + changesMade = self.traverseTree(self.__narrativeTree) + if self.__trace: print("\n\nCHANGES IN PASS", changesMade) + #changesMade = 1 + passes = passes + 1 + self.__narrativeTree.passes = passes + + # Error recovery: + # + # Check for infinite loop + # There are still changes being made to the tree, + # but we're in an infinite loop + # Check for no changes and empty words + # There are no more changes to the tree, + # but the words have not completed + # If either of these are true: + # --Do a "lastChance" pass to alert methods to + # finish if possible. + # --If still not done, + # Do a "FixIt" pass to fill in leaf + # nodes that have not finished and + # continue execution. + + if passes > self.__narrativeTree.get("passLimit") or \ + (changesMade == 0 and self.__narrativeTree.get('words') is None): + if not self.__narrativeTree.lastChance: + # Do last chance pass + print("\nDoing Last Chance Pass") + self.__narrativeTree.lastChance = 1 + changesMade = 1 + else: + # We already did a lastChance pass + if not self.__narrativeTree.fixedIt: + # Fix it, re-set passes and continue + print("\nDoing Fix it Pass") + changesMade = 1 + self.__narrativeTree.passes = 0 + self.__narrativeTree.fixedIt = 1 + self.__problemPhrases = [] + self.errorTraverse(self.__narrativeTree) + else: + # We already fixed it and still not done + # (This should never happen!) + # Stop execution: no more error recovery to attempt + changesMade = 0 + + print("Time for phrase generation for ", areaLabel, ":", time.time()-time1, "Passes", passes) + words = self.__narrativeTree.get("words") + if self.__narrativeTree.fixedIt: + self.__problemPhrases = [] + self.errorTraverse(self.__narrativeTree) + problems = self.errorMsg(passes) + self.log.error(problems) + if words is None: + words = problems + return words + + def traverseTree(self, node): + # Top Down traversal + + if self.__trace: + print("Traversing node:", node.get("name"), node.getAreaLabel(), node.getTimeRange()) + print(" ", node, node.parent) + + # Execute methods at this level + methodList = node.methodList + for method in methodList: + # Need to make sure that node has not been removed by some other method + if method not in node.doneList and not hasattr(node, "removed"): + time1 = time.time() + done = method(self.__narrativeTree, node) + if done: + # Add method to doneList + node.doneList.append(method) + + # Trace output + if self.__infiniteTrace: + print("Traversing node:", node.get("name"), node.getAreaLabel(), \ + node.getTimeRange()) + print(" Method:", method.__name__) + print(" Done", done) + print(" Node", node) + if self.__trace: + if done: + doneStr = "DONE" + else: + doneStr = "" + print("Method", method.__name__, doneStr, time.time()-time1, end=' ') + print(" Words", node.get("words")) + + # Execute methods of children + # If ANY child is changed, we are not done + childrenChanged = 0 + for child in node.childList: + childChanged = self.traverseTree(child) + if childChanged: + childrenChanged = 1 + + # See if we made any changes at this level OR at a child's level + changesMade = childrenChanged | node.changeFlag + if self.__trace: print('Changes made:', changesMade) + # Re-set changeFlag + node.changeFlag = 0 + return changesMade + + def __breakOutDefinition(self, argDict, definition, timeRange, areaList): + # Sets up self.__compList: (componentName, timeRange, definition), + # self.__samplerRequests + # self.__statisticsDict + # + # Set up self.__compList + time1 = time.time() + narrativeDef = self.__ut.set(definition,"narrativeDef", None) + # Get list of tuples: forecastType, timeRange, definition + self.__compList = self.__breakOutTimeRange( + argDict, timeRange, narrativeDef, self.__currentLocalTime, self.__shift) + # If error message, return error string + if type(self.__compList) is bytes: + return self.__compList + + #print "Time to make compList", time.time() - time1 + + # Make samplerRequests + firstTime = 1 + samplerRequests = ArrayList() + statisticsDict = {} + moreAreas = [] + + time1 = time.time() + for compName, timeRange, compDefinition in self.__compList: + # Add the analysisList entries to the sampleList and statisticsDict + # for this component + analysisList = self.__ut.set(compDefinition, "analysisList", []) + additionalAnalysisList = self.__ut.set(compDefinition, "additionalAnalysisList", []) + if len(analysisList) == [] and additionalAnalysisList == []: + continue + sampleList = [] + # Make sampleList: (element, methodArgs, editArea, areaLabel) + + # First include the analysisList methods self.__areaList + #print "\nRegular list" + for analysis in analysisList: + element, methodArgs = self.__getAnalysis(analysis) + for editArea, areaLabel in self.__areaList: + #print "appending to sampleList", element, editArea.id(), areaLabel + sampleList.append((element, methodArgs, editArea, areaLabel)) + + # Handle additional areas + additionalAreas = self.__ut.set(compDefinition,"additionalAreas", []) + additionalAnalysisList = self.__ut.set( + compDefinition,"additionalAnalysisList", analysisList) + #print "\nForecastNarrative additionalAreas" + for element, leAreaList in additionalAreas: + #print "element, leAreaList", element, leAreaList + for areaLabel in leAreaList: + methodArgsList = self.__findMethods(element, additionalAnalysisList) + for methodArgs in methodArgsList: + editArea = self.findEditArea(None, areaLabel) + #print "appending to sampleList", element, editArea.id(), areaLabel + sampleList.append((element, methodArgs, editArea, areaLabel)) + + # Handle intersect areas + intersectAreas = self.__ut.set(compDefinition,"intersectAreas", []) + # Determine intersectWithAreas + intersectWithAreas = self.__ut.set( + compDefinition,"intersectWithAreas", []) + if intersectWithAreas == []: + intersectWithAreas = self.__areaList + else: + intAreas = [] + for areaLabel in intersectWithAreas: + editArea = self.findEditArea(None, areaLabel) + intAreas.append((editArea, areaLabel)) + intersectWithAreas = intAreas + # Determine intersectAnalysisList + intersectAnalysisList = self.__ut.set( + compDefinition,"intersectAnalysisList", analysisList) + # Set up intersections and sampleList entries + #print "\nIntersect Areas" + for element, leAreaList in intersectAreas: + for leAreaLabel in leAreaList: + editAreas = self.__intersectEditAreas( + leAreaLabel, argDict, intersectWithAreas) + methodArgsList = self.__findMethods(element, intersectAnalysisList) + for editArea in editAreas: + for methodArgs in methodArgsList: + #print "appending to sampleList", element, editArea.id(), areaLabel + sampleList.append((element, methodArgs, editArea, editArea.getId().getName())) + + # Add to samplerRequests and statisticsDict + self.__addToRequests(argDict, timeRange, compName, sampleList, + samplerRequests, statisticsDict) + + self.__samplerRequests = samplerRequests + self.__statisticsDict = statisticsDict + #print "Time to create samplerRequests", time.time() - time1 + return None + + def __breakOutTimeRange(self, argDict, timeRange, narrative, currentLocalTime, shift): + "Return a list of tuples: forecastType, timeRange " + # A time period of 0 will be a 1-hour time range but the following + # period will also begin at the same start time. + + # "shift" is the number of hours to add to GMT to get local time. + # All forecasts and start-end times are in GMT time, so the shift is + # used only for labeling purposes, e.g. Today, Tonight, Monday, etc... + + getFcstDef = argDict["getFcstDef"] + crange = timeRange + prevPeriod = 0 + compList = [] + + # Compute midnight of the creation day + creationTime = argDict["creationTime"] + localTime = time.localtime(creationTime) + year = localTime[0] + month = localTime[1] + day = localTime[2] + midnight = AbsTime.absTimeYMD(year, month, day, 0) - shift # midnight LT + + for subType, period in narrative: + #print "subType, period", subType, period + + # Determine sub-TimeRange + if subType == "Custom": + # Handle custom components - added in OB8.2. + # "Custom" components are intended to replace "priorPeriod" which is removed. + # "Custom" component entries in a narrative definition are of the form: + # ("Custom", (componentName, timeRange)) + # where timeRange can be (start_hours, end_hours) or an AFPS.TimeRange. + # Start_hours and end_hours are relative to midnight local time + # of the product creation date. + subType, period = period + if type(period) == tuple and len(period) == 2: + startHour, endHour = period + compRange = TimeRange.TimeRange(midnight + startHour*3600, + midnight + endHour*3600) + else: + compRange = period + else: + # Handle normal component + # If period is zero, make a 1 hour time range + if period == 0: + duration = 1 + else: + duration = period + if prevPeriod == 0: + start = crange.startTime() + else: + start = crange.endTime() + compRange = TimeRange.TimeRange(start, start + self.hrToSec(duration)) + crange = compRange + prevPeriod = period + + # Get definition for component + #print "finding in ForecastNarrative", subType + if subType == "Phantom": + found = 1 + argDict["forecastDef"] = {} + else: + found, module = getFcstDef(subType, argDict) + #print "found" + + if found == 0: + s = "\nProblem finding or importing Text Product " + \ + "Definition: " + subType + "\n" + raise Exception(s) + forecastDef = argDict["forecastDef"] + + # Append to component list + #print "Appending", subType, compRange, forecastDef + compList.append((subType, compRange, forecastDef)) + + # Re-set argDict + argDict["forecastDef"] = self.__narrativeDefinition + return compList + + def __getAnalysis(self, analysis): + if len(analysis) == 2: + element, method = analysis + args = None + else: + element, method, args = analysis + return element, (method, args) + + def __findMethods(self, element, analysisList): + # Find the entries in the analysisList for the given element + # and return a list of (method, args) for that element + methodArgsList = [] + for analysis in analysisList: + analysisElement, methodArgs = self.__getAnalysis(analysis) + if element == analysisElement: + methodArgsList.append(methodArgs) + return methodArgsList + + def __intersectEditAreas(self, leAreaLabel, argDict, intersectWithAreas): + # Make a list of intersections of the local effect area (leAreaLabel) with + # all the edit areas in self.__areaList + intersectAreas = [] + for editArea, areaLabel in intersectWithAreas: + # Get the intersect name and see if it is in the cache + intersectLabel = self.getIntersectName(areaLabel, leAreaLabel) + try: + intersectArea = self.__areaCache[intersectLabel] + except: + leArea = self.findEditArea(None, leAreaLabel) + intersectArea = self.intersectAreas(intersectLabel, editArea, leArea) + self.__areaCache[intersectLabel] = intersectArea + if intersectArea is not None: + #print " Appending", intersectLabel + intersectAreas.append(intersectArea) + else: + print(" Empty Intersection, skipping", intersectName) + return intersectAreas + + def __addToRequests(self, argDict, timeRange, componentName, + sampleList, samplerRequests, statisticsDict): + innerList = ArrayList(len(sampleList)) + for element, methodArgs, editArea, areaLabel in sampleList: + if element not in list(statisticsDict.keys()): + statisticsDict[element] = {} + areaDict = statisticsDict[element] + if areaLabel not in list(areaDict.keys()): + areaDict[areaLabel] = {} + trDict = areaDict[areaLabel] + if timeRange not in list(trDict.keys()): + trDict[timeRange] = ([], componentName, {}) + parmID = self.__parmID(element) + innerList.add(SamplerRequest(parmID, editArea, timeRange.toJavaObj())) + #print "Adding to sampler Requests", parmID, editArea.id(), timeRange + methodArgsList, componentName, statDict = trDict[timeRange] + methodArgsList.append(methodArgs) + samplerRequests.addAll(innerList) + + def findEditArea(self, editArea, areaLabel): + # Return given editArea or cached editArea for the given label + # Add to cache if necessary + if areaLabel in list(self.__areaCache.keys()): + return self.__areaCache[areaLabel] + else: + if type(editArea) is str or str(editArea).find('Id') == -1: + #print "Get Edit Area FN: getting edit area 2", areaLabel + editArea = self.getEditAreas(self.__argDict, [areaLabel])[0] + self.__areaCache[areaLabel] = editArea + return editArea + + def __parmID(self, element): + if element in list(self.__elementCache.keys()): + return self.__elementCache[element] + else: + parmID = self.getParmID(element, self.__databaseID) + self.__elementCache[element] = parmID + return parmID + + def __createStatisticsDictionary(self, argDict): + # Call the SamplerAnalysis methods and expand self.__statisticsDict + + # Set up skeleton tree and node to be used when looking up user-configurable thresholds. + tree, node = self.getSkeleton(self.__timeRange, None) + + for element in list(self.__statisticsDict.keys()): + parmID = self.__parmID(element) + # Get conversion information + dataType = self.getDataType(element) + #print "Element", element, dataType + # Must use product self to get any overrides to the + # TextRules library that are in standard or local file + # These may have to be looked up per areaLabel and timeRange if the field + # wants it (see above), but currently looked up only once per element + # to save on performance + productSelf = self.__argDict["self"] + inUnits = productSelf.element_inUnits(tree, node, element, element) + outUnits = productSelf.element_outUnits(tree, node, element, element) + convertMethod = productSelf.getConvertMethod(inUnits, outUnits, element) + adjustMethod = productSelf.adjust_method(tree, node, element, element) + roundingMethod = productSelf.rounding_method(tree, node, element, element) + #rangeInfo = productSelf.getRangeInfo(tree, node, element) + # + areaDict = self.__statisticsDict[element] + for areaLabel in list(areaDict.keys()): + tree.areaLabel = areaLabel + node.areaLabel = areaLabel + editArea = self.findEditArea(None, areaLabel) + tree.editArea = editArea + trDict = areaDict[areaLabel] + keys = list(trDict.keys()) + for timeRange in keys: + node.timeRange = timeRange + methodArgsList, componentName, statDict = trDict[timeRange] + node.componentName = componentName + parmHisto = self.__sampler.getParmHisto(parmID, editArea.getId(), timeRange.toJavaObj()) + index = 0 + for methodArgs in methodArgsList: + stats = self.__getStatistics( + element, methodArgs, parmHisto, timeRange, editArea, componentName) + #if element == "WindChill": + # method, args = methodArgs + # print "before conversion ", method.__name__, stats, timeRange, editArea.id() + stats = self.__convertStatistics( + tree, node, productSelf, stats, dataType, + convertMethod, adjustMethod, roundingMethod, element, methodArgs) + #if element == "WindChill": + # print "after conversion ", stats + self.__storeStatistics(stats, element, methodArgs, timeRange, trDict, index) + index = index +1 + + # Another pass to remove the methodArgsList, componentName from trDict + for element in list(self.__statisticsDict.keys()): + areaDict = self.__statisticsDict[element] + for areaLabel in list(areaDict.keys()): + trDict = areaDict[areaLabel] + for timeRange in list(trDict.keys()): + methodArgsList, componentName, statDict = trDict[timeRange] + trDict[timeRange] = statDict + + self.__statisticsDictionary = StatisticsDictionary(self.__statisticsDict, self) + #self.__statisticsDictionary.printDictionary("WindGust") + #self.__statisticsDictionary.printDictionary("Wind") + #self.__statisticsDictionary.printDictionary("PoP") + #self.__statisticsDictionary.printDictionary("Wx") + #self.__statisticsDictionary.printDictionary("MaxT") + #self.__statisticsDictionary.printDictionary("WaveHeight") + return None + + def __getStatistics(self, element, methodArgs, parmHisto, timeRange, editArea, componentName): + method, args = methodArgs + if parmHisto.getSampleLen() == 0: + stats = None + else: + if args is not None: + stats = method(parmHisto, timeRange, componentName, args) + else: + stats = method(parmHisto, timeRange, componentName) + parmName = parmHisto.parmID().compositeNameUI() + #if element == "SnowAmt": + # print "Called method", parmName, editArea.id().name(), method.__name__, timeRange + # print " Result", stats + return stats + + def __convertStatistics(self, tree, node, productSelf, statsByRange, dataType, + convertMethod, adjustMethod, roundingMethod, + elementName, methodArgs): + # Converts the statistics in the statsByRange given the inUnits, outUnits and increment + # Assumes that stats are either a single value or min/max + # Vectors can be handled as well. + # Weather keys are filtered. + + if statsByRange is None: + return statsByRange + if dataType == self.DISCRETE(): + return statsByRange + + simpleStatFlag = 0 + # Note: we do not checkTupleLists here since we want hourlyTemp + # to pass the "isStatsByRange" test. This way it will go thru + # the conversion code below which works because + # hourlyTemp is composed of 2-tuples and the statement: + # stats,subRange = statsByRange[i] + # assigns the hour to the subRange and then works with the stats. + #print "statsByRange", elementName, statsByRange + if not self.isStatsByRange(dataType, statsByRange, checkTupleLists=0): + simpleStatFlag = 1 + statsByRange = [(statsByRange, "")] + + # Check for binnedPercent which cannot be converted + method, args = methodArgs + if method.__name__ == "binnedPercent": + return statsByRange + + numStats = len(statsByRange) + newList = [] + for i in range(numStats): + stats, subRange = statsByRange[i] + if dataType == self.WEATHER(): + #print "stats before filtering", stats + stats = productSelf.filterSubkeys(tree, node, stats) + #print "stats after filtering", stats + else: + if dataType == self.VECTOR(): + stats, dir = stats + if type(stats) is tuple: + min, max = stats + increment_nlValue = productSelf.increment_nlValue( + tree, node, elementName, elementName) + if min is not None: + min = convertMethod(min) + if type(adjustMethod) is types.MethodType: + min = adjustMethod(min) + min = productSelf.roundValue( + min, roundingMethod, "Nearest", increment_nlValue, 0) + if max is not None: + max = convertMethod(max) + if type(adjustMethod) is types.MethodType: + max = adjustMethod(max) + max = productSelf.roundValue( + max, roundingMethod, "Nearest", increment_nlValue, 1) + #min, max = productSelf.applyRangeValues( + # tree, node, min, max, elementName, rangeInfo) + min, max = productSelf.applyRanges(tree, node, min, max, elementName) + stats = (min, max) + else: + if stats is not None: + stats = convertMethod(stats) + if type(adjustMethod) is types.MethodType: + stats = adjustMethod(stats) + increment_nlValue = productSelf.increment_nlValue( + tree, node, elementName, elementName) + stats = productSelf.roundValue( + stats, roundingMethod, "Nearest", increment_nlValue, 1) + if dataType == self.VECTOR(): + stats = (stats, dir) + newList.append(((stats), subRange)) + + if simpleStatFlag: + stats, tr = newList[0] + return stats + return newList + + def __storeStatistics(self, stats, element, methodArgs, timeRange, trDict, index): + method, args = methodArgs + statLabel = element + "__" + method.__name__ + if index == 0: + statLabels = ["", statLabel] + else: + statLabels = [statLabel] + #if element == "Hazards": + # print "Storing Hazards", stats + if type(stats) is list: + # Expand statsByRange to individual time range entries + # Skip special cases of hourlyTemp, discrete_percentages, + # and list of wx or discrete subkeys + try: + value, tr = stats[0] + except: + tr = 0 + if type(tr) is int or type(tr) is float: + pass + else: + for value, tr in stats: + if tr not in list(trDict.keys()): + trDict[tr] = ([], "", {}) + methodArgsList, componentName, statDict = trDict[tr] + for statLabel in statLabels: + statDict[statLabel] = value + methodArgsList, componentName, statDict = trDict[timeRange] + for statLabel in statLabels: + #if element == "Hazards": + # print "storing stats", statLabel, stats + statDict[statLabel] = stats + + def __createNarrativeTree(self, argDict): + #Components + componentList = [] + for componentName, timeRange, definition in self.__compList: + component = self.makeComponent(componentName, timeRange, definition) + componentList.append(component) + + # Narrative + methodList = self.__ut.set(self.__narrativeDefinition, "methodList", []) + self.__narrativeTree = Narrative(methodList, componentList, + self.__statisticsDictionary, + self.__issuanceInfo, self, self.__sampler) + self.__narrativeTree.set("timeRange", self.__timeRange) + self.__narrativeTree.set("productTimeRange", self.__productTR) + self.__narrativeTree.set("lineLength", self.__lineLength) + self.__narrativeTree.set("ifpClient", ["ifpClient"]) + self.__narrativeTree.set("combinations", self.__combinations) + self.__narrativeTree.set("argDict", self.__argDict) + for attr, default in [ + ("passLimit", 20), + ("trace", 0), + ("troubleList", None), + ("problemList", None), + ]: + val = self.__ut.set(self.__narrativeDefinition, attr, default) + self.__narrativeTree.set(attr, val) + #self.__narrativeTree.printTree() + return None + + def getDataType(self, element): + if element in self.__gpiCache: + return self.__gpiCache[element] + + parmID = self.__parmID(element) + try: + gridParmInfo = self.__ifpClient.getGridParmInfo(parmID) + except RuntimeError as e: + # AWIPS-I doesn't throw an error here best I can tell and + # most of the time when we hit this except it will be because + # a grid parm was requested that the server doesn't know about. + # So we will not force an alert to be thrown here. + # self.log.exception("Could not retrieve GridParmInfo for " + str(parmID) + gridParmInfo = GridParmInfo() + gridType = str(gridParmInfo.getGridType()) + if gridType == "VECTOR": + gridType = self.VECTOR() + elif gridType == "SCALAR": + gridType = self.SCALAR() + elif gridType == "WEATHER": + gridType = self.WEATHER() + else: + gridType = self.DISCRETE() + self.__gpiCache[element] = gridType + return gridType + + def getLimits(self, element): + parmID = self.__parmID(element) + gridParmInfo = self.__ifpClient.getGridParmInfo(parmID) + return gridParmInfo.getMinValue(), gridParmInfo.getMaxValue() + + def makeComponent(self, componentName, timeRange, definition): + # Phrases and Subphrases + phrases = self.__ut.set(definition, "phraseList", []) + phraseList = [] + for phraseDef in phrases: + newPhrase = self.makePhrase(phraseDef) + phraseList.append(newPhrase) + # Components + methodList = self.__ut.set(definition, "methodList", []) + component = Node(phraseList, methodList) + component.set("timeRange", timeRange) + component.set("definition", definition) + component.set("name", componentName) + if componentName == "Phantom": + component.set("words", "") + return component + + def makePhrase(self, phraseDef): + # Phrases can be a simple method or a tuple containing + # an optional args list and/or an optional localEffects expressed as + # a list of LocalEffect objects or as a method + args = None + localEffectsList = None + if type(phraseDef) is tuple: + if len(phraseDef) == 2: + phraseDef, object1 = phraseDef + objects = [object1] + else: + phraseDef, object1, object2 = phraseDef + objects = [object1, object2] + for object in objects: + # An object can be: + # A local effect expressed as a method + # A local effect expressed as a list of LocalEffect objects + # A list of arguments + if type(object) is types.MethodType: + localEffectsList = object + else: # must be list + if len(object) == 0: + localEffectsList = object + else: + entry = object[0] + if isinstance(entry, self.LocalEffect): + localEffectsList = object + else: + args = object + phraseDict = phraseDef() + phraseMethods = self.__ut.set(phraseDict, "phraseMethods", []) + subPhraseMethods = self.__ut.set(phraseDict, "subPhraseMethods", []) + # Add wordMethod and setUpMethod to methodLists + setUpMethod = self.__ut.set(phraseDict, "setUpMethod", None) + wordMethod = self.__ut.set(phraseDict, "wordMethod", None) + + if 0: + if setUpMethod is not None: + if localEffectsList is not None: + productSelf = self.__argDict["self"] + phraseMethods = [setUpMethod, productSelf.checkLocalEffects] + phraseMethods + else: + phraseMethods = [setUpMethod] + phraseMethods + else: + if setUpMethod is not None: + phraseMethods = [setUpMethod] + phraseMethods + + + + if wordMethod is not None: + subPhraseMethods = subPhraseMethods + [wordMethod] + # Phrases can have child phrases + phraseList = self.__ut.set(phraseDict, "phraseList", []) + phraseChildren = [] + for childPhrase in phraseList: + phraseChildren.append(self.makePhrase(childPhrase)) + # Make new node + phraseNode = Node(phraseChildren, phraseMethods) + phraseNode.set("phraseDef", phraseDef) + phraseNode.set("name", phraseDef.__name__) + phraseNode.set("wordMethod", wordMethod) + phraseNode.set("setUpMethod", setUpMethod) + phraseNode.set("subPhraseMethods", subPhraseMethods) + phraseNode.set("args", args) + phraseNode.set("localEffectsList", localEffectsList) + #print "\nMaking new phrase", phraseNode + #traceback.print_stack(limit=6) + + return phraseNode + + def errorMsg(self, passes): + if passes > self.__narrativeTree.passes: + msg = "\n\nWARNING: TOO MANY PASSES ON TREE. \nTraversal:" + else: + msg = "\n\nWARNING: EMPTY WORDS FROM TREE. Traversal:" + probPhrases = self.getProblemPhrases() + msg += "\nPotential problem phrases are:\n" + probPhrases + \ + """ + + Try overriding "subPhrase_limit" from WxPhrases and setting it to 10. + Then report the problem on the listserver. + Also, see the Text Product User Guide section, + "Trouble-shooting Narrative Products". + + """ + return "\n" + repr(passes) + " PASSES. " + msg + + def getProblemPhrases(self, + attrList=["name", "words", "methodList", "doneList"], + ancestorList=["name"]): + probList = self.__narrativeTree.get("problemList") + if probList is not None: + attrList = probList + probStr = "" + for node in self.__problemPhrases: + probStr += "\n From Component: " + str(node.getComponentName()) + for attr in attrList: + if attr in ["methodList", "doneList"]: + probStr = self.addMethods(probStr, node, attr) + else: + probStr += "\n " + attr + " " + str(node.get(attr)) + probStr += "\n Ancestor attributes" + for ancAttr in ancestorList: + probStr += "\n " + ancAttr + " " + str(node.getAncestor(ancAttr)) + return probStr + + def addMethods(self, probStr, node, attr): + methodList = node.get(attr) + probStr += "\n " + attr + if methodList is not None: + for method in methodList: + probStr += "\n " + method.__name__ + return probStr + + def errorTraverse(self, node, attrList=["name", "words"], ancestorList=["name"], + fixPhrases=1): + # Print out relevant attributes of each node + #print "\nNode", node, node.getComponentName() + #for attr in attrList: + # print " ", attr, node.get(attr) + #print " DoneList", node.doneList + #print " MethodList", node.methodList + #print " Ancestor attributes" + #for ancAttr in ancestorList: + # print " ", ancAttr, node.getAncestor(ancAttr) + print(node, node.get('name'), node.get('words')) + childList = node.get('childList') + if childList is None or childList == []: + print("LEAF NODE") + if node.get('words') is None: + print("WITHOUT WORDS!!", node.getAncestor('name')) + errorWords = "|* Please enter " + node.getAncestor('name') + \ + " and refer to log files for more explanation *|" + node.set('words', errorWords) + self.__problemPhrases.append(node) + else: + print(" Children") + for child in node.get('childList'): + self.errorTraverse(child, attrList=attrList) + + def getSkeleton(self, timeRange, areaLabel): + tree = Node([],[]) + tree.combinations = self.__combinations + tree.timeRange = self.__timeRange + tree.productTimeRange = self.__productTR + tree.areaLabel = areaLabel + if areaLabel is not None: + editArea = self.findEditArea(None, areaLabel) + tree.editArea = editArea + tree.library = self + node = Node([],[]) + node.parent = tree + node.timeRange = timeRange + node.areaLabel = areaLabel + node.componentName = "" + for compName, compTR, compDefinition in self.__compList: + if compTR.contains(timeRange): + node.componentName = compName + node.componentDef = compDefinition + return tree, node + + def getMergedStats(self, value, mergeMethod, elementName, timeRange, areaLabel, dataType=None): + # Merge the stats according to the mergeMethod: + # Works with either single value or tuple stats (e.g. minMax, medianRange) + # Works with either simple stats or statsByRange + productSelf = self.__argDict["self"] + + if value is None: + return value + if dataType is None: + dataType = self.SCALAR() + if not self.isStatsByRange(dataType, value): + if mergeMethod == "List": + return [(value, timeRange)] + else: + if dataType == self.SCALAR() or dataType == self.VECTOR(): + return self.getValue(value, mergeMethod, dataType) + else: + return value + else: + # Take only subRanges that overlap time range + #print "statsByRange", elementName, mergeMethod, timeRange, value + value = self.screenStatsByRange(value, timeRange) + if mergeMethod == "List": + return value + elif mergeMethod == "MergeBins": + return self.mergeBins(value, timeRange) + else: + if value == []: + return None + #print "value for ", elementName, timeRange, areaLabel + #print " value", value + # For performance, we spell out all the cases + if dataType == self.SCALAR(): + if mergeMethod == "Min": + val = None + for stats, subRange in value: + if stats is None: + continue + stats = self.getValue(stats, "Min") + if val is None or stats < val: + val = stats + return val + elif mergeMethod == "Max": + val = None + for stats, subRange in value: + if stats is None: + continue + stats = self.getValue(stats, "Max") + if val is None or stats > val: + val = stats + return val + elif mergeMethod == "MinMax": + min = None + max = None + for stats, subRange in value: + if stats is None: + continue + min1, max1 = self.getValue(stats, "MinMax") + if min1 < min or min is None: + min = min1 + if max1 > max or max is None: + max = max1 + if min is None or max is None: + return None + tree, node = self.getSkeleton(timeRange, areaLabel) + min, max = productSelf.applyRanges(tree, node, min, max, elementName) + return (min, max) + else: # Handle sum or average + sum = 0 + count = 0 + for stats, subRange in value: + if stats is None: + continue + stats = self.getValue(stats, "Average") + sum = sum + stats + count += 1 + if count == 0: + return None + if mergeMethod == "Sum": + return sum + else: # Average + average = float(sum)/count + tree, node = self.getSkeleton(timeRange, areaLabel) + return productSelf.roundStatistic(tree, node, average, elementName) + elif dataType == self.VECTOR(): + # Note that in these cases, a mag, dir is returned, but + # the dir is simply taken from the last stat pair so that + # it is somewhat meaningless + if mergeMethod == "Min": + val = None + for stats, subRange in value: + if stats is None: + continue + stats, dir = stats + stats = self.getValue(stats, "Min") + if val is None or stats < val: + val = stats + if val is None: + return None + return (val, dir) + elif mergeMethod == "Max": + val = None + for stats, subRange in value: + if stats is None: + continue + stats, dir = stats + stats = self.getValue(stats, "Max") + if val is None or stats > val: + val = stats + if val is None: + return None + return (val, dir) + elif mergeMethod == "MinMax": + min = None + max = None + for stats, subRange in value: + if stats is None: + continue + stats, dir = stats + min1, max1 = self.getValue(stats, "MinMax") + if min1 < min or min is None: + min = min1 + if max1 > max or max is None: + max = max1 + if min is None or max is None: + return None + tree, node = self.getSkeleton(timeRange, areaLabel) + min, max = productSelf.applyRanges(tree, node, min, max, elementName) + return ((min, max), dir) + else: # Handle sum or average + sum = 0 + count = 0 + for stats, subRange in value: + if stats is None: + continue + stats, dir = stats + stats = self.getValue(stats, "Average") + sum = sum + stats + count += 1 + if count == 0: + return None + if mergeMethod == "Sum": + return (sum, dir) + else: # Average + average = float(sum)/count + tree, node = self.getSkeleton(timeRange, areaLabel) + return (productSelf.roundStatistic(tree, node, average, elementName), dir) + elif dataType == self.WEATHER(): + # Weather: add up all subkeys/remove duplicates by + # making wxkey and re-making subkeylist + # Then filter subkeylist + subkeyList = [] + for stats, subRange in value: + if stats is None: + continue + subkeyList = subkeyList + stats + tree, node = self.getSkeleton(timeRange, areaLabel) + #subkeyList = productSelf.combineSubKeys(tree, node, subkeyList) + #print "subkeyList before", subkeyList + subkeyList = productSelf.filterSubkeys(tree, node, subkeyList) + #print "subkeyList after", subkeyList + return subkeyList + else: + # Discrete: add up all keys/ need to remove duplicates? + keyList = [] + for stats, subRange in value: + if stats is None: + continue + keyList = keyList + stats + return keyList + + def isStatsByRange(self, dataType, value, checkTupleLists=1): + if dataType == self.VECTOR() or dataType == self.SCALAR(): + if type(value) is list: + if checkTupleLists: + # Look for special cases like hourlyTemp which + # does not return statsByRange, but is list of + # tuples + try: + firstValue = value[0] + subRange = firstValue[1] + if type(subRange) is int or \ + type(subRange) is float: + return 0 + except: + pass + return 1 + else: + return 0 + else: + # Need to check more closely for Weather and Discrete + # Possibilities: + # **dominantWx: list of subkeys : return 0 + # **rankedWx: list of (subkey, rank) tuples : return 0 + # **dominantDiscreteValue: list of discrete keys: return 0 + # discretePercentages: list of (key, percentage) tuples: return 0 + # discreteTimeRangesByKey: list of (key, timerange) tuples: return 1 + #print "\ngot here", value + if type(value) is list and len(value) > 0: + try: + stats, tr = value[0] + if isinstance(tr, TimeRange.TimeRange): + return 1 + else: + #print "returning0 0" + return 0 + except: + #print "returning1 0" + return 0 + else: + #print "returning2 0" + return 0 + + def screenStatsByRange(self, statsByRange, timeRange): + newStatsByRange = [] + for stats, subRange in statsByRange: + if subRange.overlaps(timeRange): + newStatsByRange.append((stats, subRange)) + return newStatsByRange + + def mergeBins(self, value, timeRange): + # Value is statsByRange of binLists i.e. (binList, subRange). + # Each binList consists of tuples: (low, high, percent) + # timeRange is the time range over which to merge bins + # Return one merged time-weighted binList + if value is None or len(value) == 0: + return None + newPercents = [] + newBins = [] + binList, subRange = value[0] + if binList is None: + return None + numBins = len(binList) + for bin in binList: + low, high, percent = bin + newPercents.append(0.0) + newBins.append((low, high)) + + for binList, subRange in value: + # print "binList, subRange", binList, subRange + if binList is None: + continue + weight = float(subRange.duration())/timeRange.duration() + # If time range is greater than subRange, give a weight of 1 + # so that the percentage never exceeds 100. + if weight > 1.0: + weight = 1.0 + for i in range(numBins): + low, high, percent = binList[i] + newPercents[i] += percent * weight + + # Glue bin values to merged percentages + for i in range(numBins): + low, high = newBins[i] + newBins[i] = ((low, high, newPercents[i])) + return newBins diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastTable.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastTable.py index bab68c40fc..be03c853ff 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastTable.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/ForecastTable.py @@ -1,529 +1,529 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ForecastTable.py -# -# Forecast type: "table" -# Class for processing table Forecasts. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, getopt, sys, time, types -import TextRules -import SampleAnalysis - -class WeEntry: - def __init__(self, name, label, analysis, format, dataType, - roundVal=5, conversion=None, maxWidth=3): - # Weather element name, - self.__name = name - # Weather Element label -- - # If you want the label to appear on multiple lines, - # separate the lines with vertical bars - # e.g. Maximum|Temperature - self.__label = label - # SampleAnalysis method - self.__analysis = analysis - # Method to format the analyzed value(s)given dataType - # TextRules.singleValue -- - # needs one value : reports single value - # TextRules.range2Value -- - # needs two values : reports range of 2 values - #NOTE: The Format method must accept input which matches - # the output of the analysis method. - self.__format = format - # DataType: Scalar or Vector - self.__dataType = dataType - # Rounding increment e.g. 5 = round final value to - # nearest multiple of 5 - self.__roundVal = roundVal - # Conversion method - self.__conversion = conversion - # Number of digits in max value -- used to determine - # column width for entries - self.__maxWidth = maxWidth - # Period can be set during processing for TextRules - # to access. - self.__period = None - def name(self): - return self.__name - def label(self): - return self.__label - def analysis(self): - return self.__analysis - def format(self): - return self.__format - def dataType(self): - return self.__dataType - def roundVal(self): - return self.__roundVal - def conversion(self): - return self.__conversion - def maxWidth(self): - return self.__maxWidth - def setPeriod(self, period): - self.__period = period - def getPeriod(self): - return self.__period - -class TableVariables: - - # Class to hold the row/column variables for the table - - def __init__(self, constVal, rowList, colList, - setElement, setPeriod, setArea): - - # This is the value of the variable held constant. - self.__constVal = constVal - - # rowList and colList are tuples of label, value pairs - self.__rowList = rowList - self.__colList = colList - - # These are executable python statements to set up the - # current element, period, and area from the current - # row, column, or constant value. - self.__setElement = setElement - self.__setPeriod = setPeriod - self.__setArea = setArea - - def constVal(self): - return self.__constVal - def rowList(self): - return self.__rowList - def colList(self): - return self.__colList - def setElement(self): - return self.__setElement - def setPeriod(self): - return self.__setPeriod - def setArea(self): - return self.__setArea - -class ForecastTable(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Create a table - - # Set Up table variables - self.__argDict = argDict - self.__language = argDict["language"] - self.__rowHeading = argDict["heading"] - - self.__userDict = {} - - fcstDef = argDict["forecastDef"] - self.__ut = argDict["utility"] - - begText = self.__ut.set(fcstDef,"beginningText","") - endText = self.__ut.set(fcstDef,"endingText","") - if type(begText) in [types.MethodType, types.FunctionType]: - begText = begText(self, fcstDef, argDict) - if type(endText) in [types.MethodType, types.FunctionType]: - endText = endText(self, fcstDef, argDict) - editAreaLoopBegText = self.__ut.set(fcstDef,"editAreaLoopBegText","") - timeRangeLoopBegText = self.__ut.set(fcstDef,"timeRangeLoopBegText","") - editAreaLoopEndText = self.__ut.set(fcstDef,"editAreaLoopEndText","") - timeRangeLoopEndText = self.__ut.set(fcstDef,"timeRangeLoopEndText","") - - self.__loopMethod = self.__ut.set(fcstDef,"loopMethod") - self.__endMethod = self.__ut.set(fcstDef,"endMethod") - if type(self.__loopMethod) == types.StringType: - exec "self.__loopMethod = self."+self.__loopMethod - if type(self.__endMethod) == types.StringType: - exec "self.__endMethod = self."+self.__endMethod - - colJust = self.__ut.set(fcstDef,"columnJustification", - "Center") - if colJust == "Center": - self.__alignMethod = string.center - elif colJust == "Right": - self.__alignMethod = string.rjust - else: - self.__alignMethod = string.ljust - - self.__minColWidth = self.__ut.set(fcstDef,"minimumColumnWidth",8) - - constVar = self.__ut.set(fcstDef, "constantVariable", "period") - outerLoop = self.__ut.set(fcstDef, "outerLoop", "EditArea") - self.__editAreas = argDict["editAreas"] - self.__rawRanges = argDict["rawRanges"] - # Loop through constant variable if multiple edit areas - # or ranges given - fcst = begText - if constVar == "EditArea": - if outerLoop == "EditArea": - for editArea in self.__editAreas: - argDict["editArea"] = editArea - fcst = fcst + editAreaLoopBegText - for rawRange, rangeName in self.__rawRanges: - fcst = fcst + timeRangeLoopBegText - argDict["timeRange"] = rawRange - argDict["timeRangeName"] = rangeName - fcst = fcst + self.__generateTable(fcstDef, argDict) - fcst = fcst + timeRangeLoopEndText - fcst = self.fillSpecial(fcst, argDict) - fcst = fcst + editAreaLoopEndText - fcst = self.fillSpecial(fcst, argDict) - else: - for rawRange, rangeName in self.__rawRanges: - argDict["timeRange"] = rawRange - argDict["timeRangeName"] = rangeName - fcst = fcst + timeRangeLoopBegText - for editArea in self.__editAreas: - argDict["editArea"] = editArea - fcst = fcst + editAreaLoopBegText - fcst = fcst + self.__generateTable(fcstDef, argDict) - fcst = fcst + editAreaLoopEndText - fcst = self.fillSpecial(fcst, argDict) - fcst = fcst + timeRangeLoopEndText - fcst = self.fillSpecial(fcst, argDict) - elif constVar == "TimePeriod" or constVar == "WeatherElement": - for rawRange, rangeName in self.__rawRanges: - argDict["timeRange"] = rawRange - argDict["timeRangeName"] = rangeName - fcst = fcst + timeRangeLoopBegText - fcst = fcst + self.__generateTable(fcstDef, argDict) - fcst = fcst + timeRangeLoopEndText - fcst = self.fillSpecial(fcst, argDict) - fcst = fcst + endText - fcst = self.fillSpecial(fcst, argDict) - return fcst - - def __generateTable(self, fcstDef, argDict): - # Set up the constant value, row values, and column variables - tableVars = self.__setupVariables(fcstDef, argDict) - if tableVars is None: - return self.__errorMsg - if self.__sampler is None: - return "Cannot Sample Database: Check for Invalid Weather Elements or Edit Areas" - - # Add to argDict so endMethod could use it for labeling the - # table - argDict["tableVars"] = tableVars - - # Create Table - - # Beginning Text - table = "" - # Create table heading and determine column lengths - tableHeading, colLengths = self.__getTableHeading( - self.__rowHeading, tableVars) - table = table + tableHeading - - # Fill in each Row and Column - for rowValue, rowLabel in tableVars.rowList(): - # rowEntries is a list of colValue, value tuples - # describing the entries in this row. - # It is passed to a loopMethod which might be - # collecting statistics or modifying the row. - rowEntries = [] - entries = [rowLabel] - for colValue, colLabel in tableVars.colList(): - entry = self.__getNextEntry( - tableVars, colValue, rowValue, rowEntries) - entries.append(entry) - row = self.__createRow(entries, colLengths) - # Hook for gathering statistics or modifying row - if not self.__loopMethod is None: - args = self.__loopMethod.func_code.co_varnames - if args[0] == "self": - rowText = self.__loopMethod( - self, row, rowLabel, rowEntries, self.__userDict, argDict) - else: - rowText = self.__loopMethod( - row, rowLabel, rowEntries, self.__userDict, argDict) - # For backward compatibility, only use returned value if is not None - if rowText is not None: - row = rowText - table = table + row - - # Call User's end method - if not self.__endMethod is None: - args = self.__endMethod.func_code.co_varnames - if args[0] == "self": - table = self.__endMethod(self, table, self.__userDict, argDict) - else: - table = self.__endMethod(table, self.__userDict, argDict) - - - argDict["element"] = self.__weList[0].name() - - # Translate - table = self.translateForecast(table, self.__language) - # Generate Html - self.__doHtml(argDict, table) - return table - - def __createRow(self, rowEntries, colLengths): - # Given a set of rowEntries and corresponding colLengths - # return a row which can span more than one line. - # rowEntries will have vertical bars separating entry lines. - # E.g. rowEntry = "PARTLY|CLOUDY" will appear on two lines - # The first rowEntry is left-justified, the rest use the - # self.__alignMethod - lines = [""] - index = 0 - # First row entry is left-justified - alignMethod = string.ljust - for rowEntry in rowEntries: - # Split entry into list of words - words = string.split(rowEntry, "|") - - if len(words) > len(lines): - # Add more lines if necessary - for i in range(len(words)-len(lines)): - spaces = 0 - for ind in range(0,index): - spaces = spaces + colLengths[ind] - lines.append(alignMethod("",spaces)) - - # Add the words to the appropriate line of the heading - ind = 0 - newlines = [] - for line in lines: - if ind > len(words)-1: - word = "" - else: - word = words[ind] - line = line + alignMethod(word, colLengths[index]) - newlines.append(line) - ind = ind + 1 - lines = newlines - -# for word in words: -# lines[words.index(word)] = lines[words.index(word)] + \ -# alignMethod(word,colLengths[index]) - - # After first time through, switch the alignment method - alignMethod = self.__alignMethod - index = index + 1 - - # Compose the lines of the row - row = "" - for line in lines: - row = row + line + "\n" - return row - - def __setupVariables(self, fcstDef, argDict): - - constVar = self.__ut.set(fcstDef, "constantVariable", "period") - rowVar = self.__ut.set(fcstDef, "rowVariable", "area") - columnVar = self.__ut.set(fcstDef, "columnVariable", "element") - periodLabelMethod = self.__ut.set(fcstDef,"periodLabelMethod", - self.periodLabel) - if type(periodLabelMethod) == types.StringType: - exec "periodLabelMethod = self."+periodLabelMethod - periodLabelFormat = self.__ut.set(fcstDef,"periodLabelFormat", - None) - weTuples = self.__ut.set(fcstDef,"elementList") - self.__weList = self.__setUpElements(weTuples) - if self.__weList is None: - return None - period = argDict["timePeriod"] - span = self.__ut.set(fcstDef,"timeSpan", period) - if span == "timePeriod": - span = period - - # Set up Constant variable - # If time varies, we need to compute a variable histo - # for each time period. - if constVar == "TimePeriod": - constVal = argDict["timeRange"] - periods = [(constVal, "")] - setPeriod = "period = constVal" - if constVar == "EditArea": - constVal, constLabel = argDict["editArea"] - areas = [(constVal, constLabel)] - setArea = "area = constVal" - if constVar == "WeatherElement": - constVal = self.__weList[0] - elements = [constVal] - setElement = "element = constVal" - - # Set up row variables and labels - if rowVar == "TimePeriod": - periods = rowList = self.getPeriods( - argDict["timeRange"], period, span, None, periodLabelMethod, - periodLabelFormat) - setPeriod = "period = rowValue" - if rowVar == "EditArea": - areas = rowList = self.__editAreas - setArea = "area = rowValue" - if rowVar == "WeatherElement": - elements = rowList = self.__getElements(self.__weList) - setElement = "element = rowValue" - - # Set up column variables and labels - if columnVar == "TimePeriod": - periods = colList = self.getPeriods( - argDict["timeRange"], period, span, None, periodLabelMethod, - periodLabelFormat) - setPeriod = "period = colValue" - if columnVar == "EditArea": - areas = colList = self.__editAreas - setArea = "area = colValue" - if columnVar == "WeatherElement": - elements = colList = self.__getElements(self.__weList) - setElement = "element = colValue" - - # Create the HistoSampler and the SampleAnalysis objects - elementList = [] - for element in self.__weList: - elementList.append((element.name(), element.analysis())) - sampleInfo = [(elementList, periods, areas)] - self.__sampler = self.getSampler(argDict, sampleInfo) - return TableVariables(constVal, rowList, colList, - setPeriod, setArea, setElement) - - def __getElements(self, weList): - # Make a list of label, element tuples using the element list - elementList = [] - for element in weList: - elementList.append((element, element.label())) - return elementList - - def __getTableHeading(self, rowType, tableVars): - # Create the column label line(s) from the colList information - # Return the column lengths - - # The table heading can be multiple lines. - # The rowType is the leftmost heading followed by the - # headings for the columns. - # The heading for a Weather element will contain slashes to denote - # how the heading is split between lines. - # E.g. AREAL/COVERAGE will appear on two lines: - # AREAL centered over COVERAGE - - colLengths = [] - - # Find the maximum rowValue length - maxlen = len(rowType) - for rowValue, rowLabel in tableVars.rowList(): - if len(rowLabel) > maxlen: - maxlen = len(rowLabel) - - # Set up the row type label e.g. City - colLengths.append(maxlen + 2) - rowHeading = string.ljust(rowType, colLengths[0]) - - # Set up the column labels - entries = [rowHeading] - for colValue, colLabel in tableVars.colList(): - # Split label into list of words separated by slashes, - # one word for each line, padded and centered relative to - # longest word - entries.append(colLabel) - words = string.split(colLabel, "|") - maxlen = len(words[0]) - maxlen = maxlen + 2 - if maxlen < self.__minColWidth: - maxlen = self.__minColWidth - colLengths.append(maxlen) - - # Compose the lines of the table heading - tableHeading = self.__createRow(entries, colLengths) - tableHeading = tableHeading + "\n" - - return tableHeading, colLengths - - def __getNextEntry(self, tableVars, colValue, rowValue, rowEntries): - - constVal = tableVars.constVal() - # Set up variables: period, area, element - exec tableVars.setPeriod() - exec tableVars.setArea() - exec tableVars.setElement() - - # Analyze the entry given the area, element, and period - analysisList = [(element.name(), element.analysis())] - analysisList = self.convertAnalysisList(analysisList) - statDict = self.getStatDict( - self.__sampler, analysisList, period, area) - value = statDict[element.name()] - rowEntries.append((colValue, value)) - - # Format the analyzed value(s) - element.setPeriod(period) - format = element.format() - #print "element", element.name(), value - # If format method is in quotes, assume a method within this - # inheritance hierarchy. - if type(format) == types.StringType: - exec "format = self."+format - entry = format(element, value) - else: - # Check for first argument "self" - args = format.func_code.co_varnames - if args[0] == "self": - entry = format(self, element, value) - else: - entry = format(element, value) - return entry - - def __doHtml(self, argDict, table): - # Generate HTML if required - if argDict.has_key("template") and not argDict["template"] is None: - template = argDict["template"] - htmlFile = argDict["htmlFile"] - htmlTable = "
" + table + "
" - argDict["type"] = type - valueDict = self.getValueDict(argDict) - valueDict["Table"] = htmlTable - valueDict['AudioText'] = table - argDict["issueRange"] = argDict["timeRange"] - valueDict['Time'] = self.getIssueTime(argDict) - self.generateHtml(valueDict, template, htmlFile) - - def __setUpDict(self, list): - # Set up a dictionary from the list of tuples - if list is None: - return None - dict = {} - for name, value in list: - dict[name] = value - return dict - - def __setUpElements(self, list): - # Set up a list of WeEntries from the list of tuples - if list is None: - self.__errorMsg = "Empty Weather Element List." - return None - newList = [] - for name,label,analysis,format,dataType,roundVal,conversion in list: - if type(conversion) == types.StringType: - exec "conversion = self."+conversion - newList.append(WeEntry(name,label,analysis,format,dataType, - roundVal, conversion)) - return newList - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ForecastTable.py +# +# Forecast type: "table" +# Class for processing table Forecasts. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, getopt, sys, time, types +import TextRules +import SampleAnalysis + +class WeEntry: + def __init__(self, name, label, analysis, format, dataType, + roundVal=5, conversion=None, maxWidth=3): + # Weather element name, + self.__name = name + # Weather Element label -- + # If you want the label to appear on multiple lines, + # separate the lines with vertical bars + # e.g. Maximum|Temperature + self.__label = label + # SampleAnalysis method + self.__analysis = analysis + # Method to format the analyzed value(s)given dataType + # TextRules.singleValue -- + # needs one value : reports single value + # TextRules.range2Value -- + # needs two values : reports range of 2 values + #NOTE: The Format method must accept input which matches + # the output of the analysis method. + self.__format = format + # DataType: Scalar or Vector + self.__dataType = dataType + # Rounding increment e.g. 5 = round final value to + # nearest multiple of 5 + self.__roundVal = roundVal + # Conversion method + self.__conversion = conversion + # Number of digits in max value -- used to determine + # column width for entries + self.__maxWidth = maxWidth + # Period can be set during processing for TextRules + # to access. + self.__period = None + def name(self): + return self.__name + def label(self): + return self.__label + def analysis(self): + return self.__analysis + def format(self): + return self.__format + def dataType(self): + return self.__dataType + def roundVal(self): + return self.__roundVal + def conversion(self): + return self.__conversion + def maxWidth(self): + return self.__maxWidth + def setPeriod(self, period): + self.__period = period + def getPeriod(self): + return self.__period + +class TableVariables: + + # Class to hold the row/column variables for the table + + def __init__(self, constVal, rowList, colList, + setElement, setPeriod, setArea): + + # This is the value of the variable held constant. + self.__constVal = constVal + + # rowList and colList are tuples of label, value pairs + self.__rowList = rowList + self.__colList = colList + + # These are executable python statements to set up the + # current element, period, and area from the current + # row, column, or constant value. + self.__setElement = setElement + self.__setPeriod = setPeriod + self.__setArea = setArea + + def constVal(self): + return self.__constVal + def rowList(self): + return self.__rowList + def colList(self): + return self.__colList + def setElement(self): + return self.__setElement + def setPeriod(self): + return self.__setPeriod + def setArea(self): + return self.__setArea + +class ForecastTable(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Create a table + + # Set Up table variables + self.__argDict = argDict + self.__language = argDict["language"] + self.__rowHeading = argDict["heading"] + + self.__userDict = {} + + fcstDef = argDict["forecastDef"] + self.__ut = argDict["utility"] + + begText = self.__ut.set(fcstDef,"beginningText","") + endText = self.__ut.set(fcstDef,"endingText","") + if type(begText) in [types.MethodType, types.FunctionType]: + begText = begText(self, fcstDef, argDict) + if type(endText) in [types.MethodType, types.FunctionType]: + endText = endText(self, fcstDef, argDict) + editAreaLoopBegText = self.__ut.set(fcstDef,"editAreaLoopBegText","") + timeRangeLoopBegText = self.__ut.set(fcstDef,"timeRangeLoopBegText","") + editAreaLoopEndText = self.__ut.set(fcstDef,"editAreaLoopEndText","") + timeRangeLoopEndText = self.__ut.set(fcstDef,"timeRangeLoopEndText","") + + self.__loopMethod = self.__ut.set(fcstDef,"loopMethod") + self.__endMethod = self.__ut.set(fcstDef,"endMethod") + if type(self.__loopMethod) == bytes: + exec("self.__loopMethod = self."+self.__loopMethod) + if type(self.__endMethod) == bytes: + exec("self.__endMethod = self."+self.__endMethod) + + colJust = self.__ut.set(fcstDef,"columnJustification", + "Center") + if colJust == "Center": + self.__alignMethod = string.center + elif colJust == "Right": + self.__alignMethod = string.rjust + else: + self.__alignMethod = string.ljust + + self.__minColWidth = self.__ut.set(fcstDef,"minimumColumnWidth",8) + + constVar = self.__ut.set(fcstDef, "constantVariable", "period") + outerLoop = self.__ut.set(fcstDef, "outerLoop", "EditArea") + self.__editAreas = argDict["editAreas"] + self.__rawRanges = argDict["rawRanges"] + # Loop through constant variable if multiple edit areas + # or ranges given + fcst = begText + if constVar == "EditArea": + if outerLoop == "EditArea": + for editArea in self.__editAreas: + argDict["editArea"] = editArea + fcst = fcst + editAreaLoopBegText + for rawRange, rangeName in self.__rawRanges: + fcst = fcst + timeRangeLoopBegText + argDict["timeRange"] = rawRange + argDict["timeRangeName"] = rangeName + fcst = fcst + self.__generateTable(fcstDef, argDict) + fcst = fcst + timeRangeLoopEndText + fcst = self.fillSpecial(fcst, argDict) + fcst = fcst + editAreaLoopEndText + fcst = self.fillSpecial(fcst, argDict) + else: + for rawRange, rangeName in self.__rawRanges: + argDict["timeRange"] = rawRange + argDict["timeRangeName"] = rangeName + fcst = fcst + timeRangeLoopBegText + for editArea in self.__editAreas: + argDict["editArea"] = editArea + fcst = fcst + editAreaLoopBegText + fcst = fcst + self.__generateTable(fcstDef, argDict) + fcst = fcst + editAreaLoopEndText + fcst = self.fillSpecial(fcst, argDict) + fcst = fcst + timeRangeLoopEndText + fcst = self.fillSpecial(fcst, argDict) + elif constVar == "TimePeriod" or constVar == "WeatherElement": + for rawRange, rangeName in self.__rawRanges: + argDict["timeRange"] = rawRange + argDict["timeRangeName"] = rangeName + fcst = fcst + timeRangeLoopBegText + fcst = fcst + self.__generateTable(fcstDef, argDict) + fcst = fcst + timeRangeLoopEndText + fcst = self.fillSpecial(fcst, argDict) + fcst = fcst + endText + fcst = self.fillSpecial(fcst, argDict) + return fcst + + def __generateTable(self, fcstDef, argDict): + # Set up the constant value, row values, and column variables + tableVars = self.__setupVariables(fcstDef, argDict) + if tableVars is None: + return self.__errorMsg + if self.__sampler is None: + return "Cannot Sample Database: Check for Invalid Weather Elements or Edit Areas" + + # Add to argDict so endMethod could use it for labeling the + # table + argDict["tableVars"] = tableVars + + # Create Table + + # Beginning Text + table = "" + # Create table heading and determine column lengths + tableHeading, colLengths = self.__getTableHeading( + self.__rowHeading, tableVars) + table = table + tableHeading + + # Fill in each Row and Column + for rowValue, rowLabel in tableVars.rowList(): + # rowEntries is a list of colValue, value tuples + # describing the entries in this row. + # It is passed to a loopMethod which might be + # collecting statistics or modifying the row. + rowEntries = [] + entries = [rowLabel] + for colValue, colLabel in tableVars.colList(): + entry = self.__getNextEntry( + tableVars, colValue, rowValue, rowEntries) + entries.append(entry) + row = self.__createRow(entries, colLengths) + # Hook for gathering statistics or modifying row + if not self.__loopMethod is None: + args = self.__loopMethod.__code__.co_varnames + if args[0] == "self": + rowText = self.__loopMethod( + self, row, rowLabel, rowEntries, self.__userDict, argDict) + else: + rowText = self.__loopMethod( + row, rowLabel, rowEntries, self.__userDict, argDict) + # For backward compatibility, only use returned value if is not None + if rowText is not None: + row = rowText + table = table + row + + # Call User's end method + if not self.__endMethod is None: + args = self.__endMethod.__code__.co_varnames + if args[0] == "self": + table = self.__endMethod(self, table, self.__userDict, argDict) + else: + table = self.__endMethod(table, self.__userDict, argDict) + + + argDict["element"] = self.__weList[0].name() + + # Translate + table = self.translateForecast(table, self.__language) + # Generate Html + self.__doHtml(argDict, table) + return table + + def __createRow(self, rowEntries, colLengths): + # Given a set of rowEntries and corresponding colLengths + # return a row which can span more than one line. + # rowEntries will have vertical bars separating entry lines. + # E.g. rowEntry = "PARTLY|CLOUDY" will appear on two lines + # The first rowEntry is left-justified, the rest use the + # self.__alignMethod + lines = [""] + index = 0 + # First row entry is left-justified + alignMethod = string.ljust + for rowEntry in rowEntries: + # Split entry into list of words + words = string.split(rowEntry, "|") + + if len(words) > len(lines): + # Add more lines if necessary + for i in range(len(words)-len(lines)): + spaces = 0 + for ind in range(0,index): + spaces = spaces + colLengths[ind] + lines.append(alignMethod("",spaces)) + + # Add the words to the appropriate line of the heading + ind = 0 + newlines = [] + for line in lines: + if ind > len(words)-1: + word = "" + else: + word = words[ind] + line = line + alignMethod(word, colLengths[index]) + newlines.append(line) + ind = ind + 1 + lines = newlines + +# for word in words: +# lines[words.index(word)] = lines[words.index(word)] + \ +# alignMethod(word,colLengths[index]) + + # After first time through, switch the alignment method + alignMethod = self.__alignMethod + index = index + 1 + + # Compose the lines of the row + row = "" + for line in lines: + row = row + line + "\n" + return row + + def __setupVariables(self, fcstDef, argDict): + + constVar = self.__ut.set(fcstDef, "constantVariable", "period") + rowVar = self.__ut.set(fcstDef, "rowVariable", "area") + columnVar = self.__ut.set(fcstDef, "columnVariable", "element") + periodLabelMethod = self.__ut.set(fcstDef,"periodLabelMethod", + self.periodLabel) + if type(periodLabelMethod) == bytes: + exec("periodLabelMethod = self."+periodLabelMethod) + periodLabelFormat = self.__ut.set(fcstDef,"periodLabelFormat", + None) + weTuples = self.__ut.set(fcstDef,"elementList") + self.__weList = self.__setUpElements(weTuples) + if self.__weList is None: + return None + period = argDict["timePeriod"] + span = self.__ut.set(fcstDef,"timeSpan", period) + if span == "timePeriod": + span = period + + # Set up Constant variable + # If time varies, we need to compute a variable histo + # for each time period. + if constVar == "TimePeriod": + constVal = argDict["timeRange"] + periods = [(constVal, "")] + setPeriod = "period = constVal" + if constVar == "EditArea": + constVal, constLabel = argDict["editArea"] + areas = [(constVal, constLabel)] + setArea = "area = constVal" + if constVar == "WeatherElement": + constVal = self.__weList[0] + elements = [constVal] + setElement = "element = constVal" + + # Set up row variables and labels + if rowVar == "TimePeriod": + periods = rowList = self.getPeriods( + argDict["timeRange"], period, span, None, periodLabelMethod, + periodLabelFormat) + setPeriod = "period = rowValue" + if rowVar == "EditArea": + areas = rowList = self.__editAreas + setArea = "area = rowValue" + if rowVar == "WeatherElement": + elements = rowList = self.__getElements(self.__weList) + setElement = "element = rowValue" + + # Set up column variables and labels + if columnVar == "TimePeriod": + periods = colList = self.getPeriods( + argDict["timeRange"], period, span, None, periodLabelMethod, + periodLabelFormat) + setPeriod = "period = colValue" + if columnVar == "EditArea": + areas = colList = self.__editAreas + setArea = "area = colValue" + if columnVar == "WeatherElement": + elements = colList = self.__getElements(self.__weList) + setElement = "element = colValue" + + # Create the HistoSampler and the SampleAnalysis objects + elementList = [] + for element in self.__weList: + elementList.append((element.name(), element.analysis())) + sampleInfo = [(elementList, periods, areas)] + self.__sampler = self.getSampler(argDict, sampleInfo) + return TableVariables(constVal, rowList, colList, + setPeriod, setArea, setElement) + + def __getElements(self, weList): + # Make a list of label, element tuples using the element list + elementList = [] + for element in weList: + elementList.append((element, element.label())) + return elementList + + def __getTableHeading(self, rowType, tableVars): + # Create the column label line(s) from the colList information + # Return the column lengths + + # The table heading can be multiple lines. + # The rowType is the leftmost heading followed by the + # headings for the columns. + # The heading for a Weather element will contain slashes to denote + # how the heading is split between lines. + # E.g. AREAL/COVERAGE will appear on two lines: + # AREAL centered over COVERAGE + + colLengths = [] + + # Find the maximum rowValue length + maxlen = len(rowType) + for rowValue, rowLabel in tableVars.rowList(): + if len(rowLabel) > maxlen: + maxlen = len(rowLabel) + + # Set up the row type label e.g. City + colLengths.append(maxlen + 2) + rowHeading = string.ljust(rowType, colLengths[0]) + + # Set up the column labels + entries = [rowHeading] + for colValue, colLabel in tableVars.colList(): + # Split label into list of words separated by slashes, + # one word for each line, padded and centered relative to + # longest word + entries.append(colLabel) + words = string.split(colLabel, "|") + maxlen = len(words[0]) + maxlen = maxlen + 2 + if maxlen < self.__minColWidth: + maxlen = self.__minColWidth + colLengths.append(maxlen) + + # Compose the lines of the table heading + tableHeading = self.__createRow(entries, colLengths) + tableHeading = tableHeading + "\n" + + return tableHeading, colLengths + + def __getNextEntry(self, tableVars, colValue, rowValue, rowEntries): + + constVal = tableVars.constVal() + # Set up variables: period, area, element + exec(tableVars.setPeriod()) + exec(tableVars.setArea()) + exec(tableVars.setElement()) + + # Analyze the entry given the area, element, and period + analysisList = [(element.name(), element.analysis())] + analysisList = self.convertAnalysisList(analysisList) + statDict = self.getStatDict( + self.__sampler, analysisList, period, area) + value = statDict[element.name()] + rowEntries.append((colValue, value)) + + # Format the analyzed value(s) + element.setPeriod(period) + format = element.format() + #print "element", element.name(), value + # If format method is in quotes, assume a method within this + # inheritance hierarchy. + if type(format) == bytes: + exec("format = self."+format) + entry = format(element, value) + else: + # Check for first argument "self" + args = format.__code__.co_varnames + if args[0] == "self": + entry = format(self, element, value) + else: + entry = format(element, value) + return entry + + def __doHtml(self, argDict, table): + # Generate HTML if required + if "template" in argDict and not argDict["template"] is None: + template = argDict["template"] + htmlFile = argDict["htmlFile"] + htmlTable = "
" + table + "
" + argDict["type"] = type + valueDict = self.getValueDict(argDict) + valueDict["Table"] = htmlTable + valueDict['AudioText'] = table + argDict["issueRange"] = argDict["timeRange"] + valueDict['Time'] = self.getIssueTime(argDict) + self.generateHtml(valueDict, template, htmlFile) + + def __setUpDict(self, list): + # Set up a dictionary from the list of tuples + if list is None: + return None + dict = {} + for name, value in list: + dict[name] = value + return dict + + def __setUpElements(self, list): + # Set up a list of WeEntries from the list of tuples + if list is None: + self.__errorMsg = "Empty Weather Element List." + return None + newList = [] + for name,label,analysis,format,dataType,roundVal,conversion in list: + if type(conversion) == bytes: + exec("conversion = self."+conversion) + newList.append(WeEntry(name,label,analysis,format,dataType, + roundVal, conversion)) + return newList + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/FormatterRunner.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/FormatterRunner.py index 794d61c188..3c72b2265f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/FormatterRunner.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/FormatterRunner.py @@ -1,554 +1,554 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import TimeRange, AbsTime -import logging -import TextFormatter -import time, os, string, inspect, sys -import JUtil, VarDictGroker -import RedirectLogging -import UFStatusHandler - -from com.raytheon.uf.viz.core import VizApp -from com.raytheon.uf.common.gfe.ifpclient import PyFPClient - -# -# Runs the text formatter to generate text products -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# May 29, 2008 njensen Initial Creation. -# Dev 10, 2014 14946 ryu Add getTimeZones() function. -# Apr 16, 2015 14946 ryu Fix getTimeZones to return the office TZ if timezone -# is not set for any zone in a segment. -# Apr 20, 2015 4027 randerso Fixes for formatter autotests -# Apr 25, 2015 4952 njensen Updated for new JEP API -# May 06, 2015 4467 randerso Convert to upper case before writing to files if -# mixed case is not enabled for the product. -# Cleaned up file writing code -# Jul 29, 2015 4263 dgilling Support updated TextProductManager. -# Nov 30, 2015 5129 dgilling Support new IFPClient. -# Sep 28, 2016 19293 randerso Log formatter exceptions to formatter log file -# Feb 07, 2017 6092 randerso Changed startTime and endTime to be time.struct_times -# Feb 26, 2018 7230 mapeters Don't reset DRT time to real time -# -## - -## -# This is a base file that is not intended to be overridden. -## - -displayNameDict = {} - -# Set up logging info -PLUGIN_NAME = 'com.raytheon.viz.gfe' -CATEGORY = 'GFE' -DEFAULT_LOG_FILENAME = '/tmp/gfe.log' -FILEMODE='w' -PATH_MGR = None - -try: - logging.basicConfig(level=logging.INFO) - logger = logging.getLogger("FormatterRunner") - - - formatter = logging.Formatter("%(asctime)s:%(name)s:%(levelname)s:%(message)s") - - # Get the information for the file logger - from com.raytheon.uf.common.localization import PathManagerFactory - from com.raytheon.uf.common.localization import LocalizationContext - LocalizationType = LocalizationContext.LocalizationType - LocalizationLevel = LocalizationContext.LocalizationLevel - PATH_MGR = PathManagerFactory.getPathManager() -except: - logging.basicConfig(filename=DEFAULT_LOG_FILENAME,level=logging.DEBUG) - logger = logging.getLogger() - logger.exception("Exception occurred") - -## TODO: Remove use of DataManager in this code. Will need to coordinate with -## the field developers to ensure local site overrides aren't relying on having -## access to it. -def executeFromJava(databaseID, site, username, dataMgr, forecastList, logFile, cmdLineVarDict=None, - drtTime=None, vtecMode=None, vtecActiveTable="active", testMode=0 ): - if type(forecastList) is not list: - forecastList = [str(forecastList)] - - # Set up the file logger for this product -# ctx = PATH_MGR.getContext(LocalizationType.valueOf('CAVE_STATIC'), LocalizationLevel.valueOf('USER')) -# logFile = PATH_MGR.getFile(ctx, os.path.join('gfe', 'logs', forecastList[0])).getPath() - logger.info("logFile: " + str(logFile)) - fh = logging.FileHandler(filename=logFile, mode=FILEMODE) - fh.setLevel(logging.INFO) - fh.setFormatter(formatter) - logger.addHandler(fh) - - # redirect stdout and stderr to logger - RedirectLogging.redirect(logger, stdout=True, stderr=True) - - logger.info(forecastList[0]) - - site = str(site) - databaseID = str(databaseID) - username = str(username) - - startTime = time.time() - logger.info("Text Formatter Starting") - - try: - forecasts = runFormatter(databaseID=databaseID, site=site, forecastList=forecastList, testMode=testMode, - cmdLineVarDict=cmdLineVarDict, vtecMode=vtecMode, username=username, - dataMgr=dataMgr, drtTime=drtTime, vtecActiveTable=vtecActiveTable) - except: - logger.exception("Error generating text product") - raise - - elapsedTime = (time.time() - startTime)*1000 - logger.info("Text Formatter Finished, took: %d ms",elapsedTime) - - RedirectLogging.restore() - return forecasts - -def getPid(forecast): - # taken from ProductParser.py - import re - - sl = r'^' # start of line - el = r'\s*?\n' # end of line - id3 = r'[A-Za-z]{3}' # 3 charater word - empty = r'^\s*' + el # empty line - - wmoid = r'(?P[A-Z]{4}\d{2})' # wmoid - fsid = r'(?P[A-Z]{4})' # full station id - pit = r'(?P\d{6})' # product issuance time UTC - ff = r'(?P ' + id3 + ')?' # "funny" field - - # CI block - ci_start = sl + wmoid + ' ' + fsid + ' ' + pit + ff + el - awipsid = r'(?P(?P[A-Z0-9]{3})(?P[A-Z0-9]{1,3}))' + el - ci_block = r'(?P' + ci_start + awipsid + '\n?)' - - ci_re = re.compile(ci_block) - - pid = None - m = ci_re.search(forecast) - if m is not None: - pid = m.group('cat') - - return pid - -def runFormatter(databaseID, site, forecastList, cmdLineVarDict, vtecMode, - username, dataMgr, serverFile=None, - editAreas=[], timeRanges=[], timePeriod=None, drtTime=None, - vtecActiveTable='active', testMode=0, experimentalMode=0, serverOutputFile=None, - startTime=None, endTime=None, language=None, outputFile=None, appendFile=None - ): - - if cmdLineVarDict: - exec "cmdLineVarDict = " + cmdLineVarDict - else: - cmdLineVarDict = {} - - # Set default Forecast Type - if len(forecastList) == 0: - usage() - logger.error("ForecastList [-t] is empty or missing") - return - - # Can't have both T and E modes - if testMode and experimentalMode: - usage() - logger.error("Can't have both -T and -E switches") - return - - if drtTime: - import offsetTime - offsetTime.setDrtOffset(drtTime) - - # Create Time Range - useRawTR = 0 - if startTime is not None and endTime is not None: - start = decodeTimeStruct(startTime) - end = decodeTimeStruct(endTime) - timeRange = TimeRange.TimeRange(start, end) - # Set so this time range will override all others - useRawTR = 1 - else: - timeRange = None - - # Handle the VTEC modes - if vtecMode is not None and vtecMode not in ['X','O','T','E']: - usage() - logger.error("-v vtecMode must be ['X', 'O', 'T', 'E']") - sys.exit(1) - - #force VTEC mode to "T" if in TEST mode and another vtecCode is specified - if testMode and vtecMode is not None: - vtecMode = "T" - - #force VTEC mode to "E" if in EXPERIMENTAL mode and another vtecCode - #is specified - elif experimentalMode and vtecMode is not None: - vtecMode = "E" - - #force into TEST mode, if vtec code is 'T' - if vtecMode == "T": - testMode = 1 - experimentalMode = 0 - elif vtecMode == "E": - experimentalMode = 1 - testMode = 0 - - # Create an ifpClient - ifpClient = PyFPClient(VizApp.getWsId(), site) - - global GridLoc - GridLoc = ifpClient.getDBGridLocation() - #importer = TextIFPImporter(ifpClient) - #importer.install() - import Utility - - import ForecastNarrative - import ForecastTable - - import Analysis - - site = str(ifpClient.getSiteID()[0]) - - # Create dictionary of arguments - argDict = { - #"host" : host, - #"port" : port, - "databaseID": databaseID, - "site" : site, - "cmdLineVarDict": cmdLineVarDict, - "serverFile": serverFile, - "editAreas": editAreas, - "timeRanges": timeRanges, - "timeRange": timeRange, - "timePeriod": timePeriod, - "useRawTR": useRawTR, - "vtecMode": vtecMode, - "vtecActiveTable": vtecActiveTable, - "testMode": testMode, - "experimentalMode": experimentalMode, - "serverOutputFile": serverOutputFile, - } - # Handle command line switches for variables that can be - # set elsewhere i.e. in the command line varDict OR the - # product definition section. - # If there was a command line switch for these items, - # make an entry in argDict. Otherwise, do not. - for item in ["language", "outputFile", "appendFile"]: - exec "if " + item + " is not None: argDict['" + item + "'] = " + item - - logger.info("Arguments: " + str(argDict)) - - argDict["ifpClient"] = ifpClient - argDict["utility"] = Utility.Utility(None, None, ifpClient) - #argDict["AFPS"] = AFPS - #argDict["AFPSSup"] = AFPSSup - argDict["Analysis"] = Analysis - argDict["ForecastNarrative"] = ForecastNarrative - argDict["ForecastTable"] = ForecastTable - - # get product creation time to the minute - almost all fmtrs use this - argDict['creationTime'] = int(time.time()/60)*60.0 - - # Set the Site Time Zone - tz = str(ifpClient.getSiteTimeZone()) - os.environ['TZ'] = tz - time.tzset() - - # Create the formatter - formatter = TextFormatter.TextFormatter(dataMgr, ifpClient) - - # For each Forecast Type, - # Create generate forecast - forecasts = "" # returned value - outForecasts = "" # written to output files - for forecastType in forecastList: - forecast = formatter.getForecast(forecastType, argDict) - forecasts = forecasts + forecast - - # Convert data written to files to upper case if required - mixedCase = False - pid = getPid(forecast) - if pid is None: - logger.warning("Unable to determine PID: defaulting to upper case") - else: - from com.raytheon.uf.common.dataplugin.text.db import MixedCaseProductSupport - mixedCase = MixedCaseProductSupport.isMixedCase(str(pid)) - - if mixedCase: - outForecasts = outForecasts + forecast - else: - outForecasts = outForecasts + forecast.upper() - - logger.info("Text:\n" + str(forecasts)) - - try: - outputFile = argDict["outputFile"] - success = writeToFile(outForecasts, outputFile, "w") - if success == 0: - print "Couldn't open output file", outputFile - logger.error("Couldn't open output file: ", outputFile) - sys.exit(1) - except: - pass - - try: - outputFile = argDict["serverOutputFile"] - success = writeToFile(outForecasts, outputFile, "w") - if success == 0: - print "Couldn't open output file", outputFile - logger.error("Couldn't open output file: ", outputFile) - sys.exit(1) - except: - pass - - - try: - appendFile = argDict["appendFile"] - success = writeToFile(outForecasts, appendFile, "a") - if success == 0: - print "Couldn't open append file", appendFile - logger.error("Couldn't write to append file: ", appendFile) - sys.exit(1) - except: - pass - - try: - serverFile = argDict["serverFile"] - writeToSite = (username == "SITE") - success = writeToServerFile(outForecasts, serverFile, writeToSite) - if success == 0: - print "Couldn't open server output file", serverFile - logger.error("Couldn't open server output file: ", serverFile) - sys.exit(1) - except: - pass - - del outForecasts - - # Remove any lat/lon areas created temporarily - #global LatLonIds - #argDict["ifpClient"].deleteReferenceData(LatLonIds) - - # Somebody is holding onto an ifpClient and thus the C++ - # object is not being destroyed. This causes the network - # connection to stay open. Below is a kludge to force - # the destruction of the C++ object. - #del ifpClient.this - - # This also means that you may not import any new modules after this - # point!!!!!!!!!!!!!!! - return forecasts - -def decodeTimeStruct(timeStruct): - return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, - timeStruct.tm_mday, - timeStruct.tm_hour, timeStruct.tm_min) - -def writeToFile(forecasts, outputFile, mode): - if outputFile: - logger.info("Writing forecast to " + outputFile) - try: - with open(outputFile, mode) as outfile: - outfile.write(forecasts) - - os.chmod(outputFile, 0644) - except: - logger.exception("Error writing forecast to "+outputFile) - return 0 - return 1 - -def writeToServerFile(forecasts, outputFile, writeToSite): - if outputFile: - try: - if writeToSite: - ctx = PATH_MGR.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE) - else: - ctx = PATH_MGR.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.USER) - filePath = PATH_MGR.SEPARATOR.join(["gfe", "text", "PRODGEN", outputFile + ".PRODGEN"]) - lFile = PATH_MGR.getLocalizationFile(ctx, filePath) - logger.info("Writing forecast to " + str(lFile)) - - from LockingFile import File - with File(lFile.getFile(), "", 'w') as outfile: - outfile.write(forecasts) - - return lFile.save() - except: - logger.exception("Error writing forecast to " + str(lFile)) - return 0 - return 1 - -def importModules(paths): - global displayNameDict - displayNameDict = {} - - split = paths.split(os.path.pathsep) - for path in split: - if not path in sys.path: - sys.path.append(path) - - inv = [] - if os.path.exists(path): - inv = os.listdir(path) - inv = filter(filterScripts, inv) - - for pid in inv: - name = os.path.splitext(pid)[0] - if sys.modules.has_key(name): - del sys.modules[name] - try: - mod = __import__(name) - except: - logger.exception("Import Failed " + name) - mod = None - definition = None - if mod is not None: - d = mod.__dict__ - #search for Definition at top-level - definition = d.get('Definition', None) - if definition is None: - # search for definition within class name - definition = d.get(name, None) - if definition is None: - tp = d.get('TextProduct', None) - if tp is not None: - #search for definition within TextProduct class - definition = getattr(tp, 'Definition', None) - if definition is None or type(definition) is not dict: - logger.info("Formatter: No Definition Found " + - name) - continue - dspName = getDisplayName(definition) - if dspName is None or dspName == "None": - continue - displayNameDict[dspName] = (mod, definition) - -def getScripts(paths, getVtecCodes): - from java.util import ArrayList - from com.raytheon.uf.common.dataplugin.gfe.textproduct import ProductDefinition - from com.raytheon.viz.gfe.textformatter import TextProductConfigData - from com.raytheon.viz.gfe.textformatter import TextProductMetadata - - logger.info("TextProduct FormatterLauncher Processing....") - importModules(paths) - textProducts = ArrayList() - for (displayName, value) in displayNameDict.items(): - (module, definition) = value - moduleName = module.__name__ - pdef = ProductDefinition(JUtil.pyDictToJavaMap(definition)) - productMetadata = TextProductMetadata(moduleName, displayName, pdef) - textProducts.add(productMetadata) - - vtecCodes = {} - if getVtecCodes: - import VTECMessageType - vtecCodes = VTECMessageType.VTECMessageTypeDict - - logger.info("TextProduct FormatterLauncher Done....") - return TextProductConfigData(JUtil.pyValToJavaObj(vtecCodes), textProducts) - -def filterScripts(name): - (filename, ext) = os.path.splitext(name) - return ext == ".py" and not filename.endswith("Definition") - -def getDisplayName(definition): - try: - dspName = definition['displayName'] - except: - dspName = None - return dspName - -def ppDef(definition): - "pretty prints the definition to make it more readable. Returns string." - - s = "\n" - if definition is None: - return "" - if type(definition) == dict and len(definition.keys()): - keys = definition.keys() - keys.sort() - #get maximum length of key - maxL = 0 - for k in keys: - maxL = max(len(k), maxL) - # output the data, formatted - fmt = "%-" + `maxL` + "s" - for k in keys: - s = s + fmt % k + ": " + str(definition[k]) + '\n' - return s - else: - return "\n" + `definition` - -## TODO: Investigate if the dependency on DataManager can be removed here. -## At the moment this passes through to ValuesDialog for building special -## widgets in the DialogAreaComposite. -def getVarDict(paths, dspName, dataMgr, ifpClient, issuedBy, dataSource): - importModules(paths) - - tz = str(ifpClient.getSiteTimeZone()) - os.environ['TZ'] = tz - time.tzset() - productDef = displayNameDict[dspName][1] - productDef['database'] = dataSource - vdg = VarDictGroker.VarDictGroker(displayNameDict[dspName][0], productDef, dspName, issuedBy, dataMgr) - return vdg.getVarDict() - -def getVTECMessageType(productCategory): - import VTECMessageType - return VTECMessageType.getVTECMessageType(productCategory) - -def getTimeZones(zones, officeTZ): - import AreaDictionary - timezones = [] - if zones is not None: - for zone in zones: - zdict = AreaDictionary.AreaDictionary.get(zone, {}) - tzs = zdict.get("ugcTimeZone", []) - if type(tzs) is str: - tzs = [tzs] - for tz in tzs: - if tz not in timezones: - timezones.append(tz) - if officeTZ in timezones and officeTZ != timezones[0]: - timezones.remove(officeTZ) - timezones.insert(0, officeTZ) - if len(timezones) == 0: - timezones.append(officeTZ) - return JUtil.pylistToJavaStringList(timezones) - -def reloadModule(moduleName): -# m = __import__(moduleName) -# reload(m) - if sys.modules.has_key(moduleName): - del sys.modules[moduleName] - try: - __import__(moduleName) - except: - logger.exception("Import Failed " + moduleName) - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +import TimeRange, AbsTime +import logging +import TextFormatter +import time, os, string, inspect, sys +import JUtil, VarDictGroker +import RedirectLogging +import UFStatusHandler + +from com.raytheon.uf.viz.core import VizApp +from com.raytheon.uf.common.gfe.ifpclient import PyFPClient + +# +# Runs the text formatter to generate text products +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# May 29, 2008 njensen Initial Creation. +# Dev 10, 2014 14946 ryu Add getTimeZones() function. +# Apr 16, 2015 14946 ryu Fix getTimeZones to return the office TZ if timezone +# is not set for any zone in a segment. +# Apr 20, 2015 4027 randerso Fixes for formatter autotests +# Apr 25, 2015 4952 njensen Updated for new JEP API +# May 06, 2015 4467 randerso Convert to upper case before writing to files if +# mixed case is not enabled for the product. +# Cleaned up file writing code +# Jul 29, 2015 4263 dgilling Support updated TextProductManager. +# Nov 30, 2015 5129 dgilling Support new IFPClient. +# Sep 28, 2016 19293 randerso Log formatter exceptions to formatter log file +# Feb 07, 2017 6092 randerso Changed startTime and endTime to be time.struct_times +# Feb 26, 2018 7230 mapeters Don't reset DRT time to real time +# +## + +## +# This is a base file that is not intended to be overridden. +## + +displayNameDict = {} + +# Set up logging info +PLUGIN_NAME = 'com.raytheon.viz.gfe' +CATEGORY = 'GFE' +DEFAULT_LOG_FILENAME = '/tmp/gfe.log' +FILEMODE='w' +PATH_MGR = None + +try: + logging.basicConfig(level=logging.INFO) + logger = logging.getLogger("FormatterRunner") + + + formatter = logging.Formatter("%(asctime)s:%(name)s:%(levelname)s:%(message)s") + + # Get the information for the file logger + from com.raytheon.uf.common.localization import PathManagerFactory + from com.raytheon.uf.common.localization import LocalizationContext + LocalizationType = LocalizationContext.LocalizationType + LocalizationLevel = LocalizationContext.LocalizationLevel + PATH_MGR = PathManagerFactory.getPathManager() +except: + logging.basicConfig(filename=DEFAULT_LOG_FILENAME,level=logging.DEBUG) + logger = logging.getLogger() + logger.exception("Exception occurred") + +## TODO: Remove use of DataManager in this code. Will need to coordinate with +## the field developers to ensure local site overrides aren't relying on having +## access to it. +def executeFromJava(databaseID, site, username, dataMgr, forecastList, logFile, cmdLineVarDict=None, + drtTime=None, vtecMode=None, vtecActiveTable="active", testMode=0 ): + if type(forecastList) is not list: + forecastList = [str(forecastList)] + + # Set up the file logger for this product +# ctx = PATH_MGR.getContext(LocalizationType.valueOf('CAVE_STATIC'), LocalizationLevel.valueOf('USER')) +# logFile = PATH_MGR.getFile(ctx, os.path.join('gfe', 'logs', forecastList[0])).getPath() + logger.info("logFile: " + str(logFile)) + fh = logging.FileHandler(filename=logFile, mode=FILEMODE) + fh.setLevel(logging.INFO) + fh.setFormatter(formatter) + logger.addHandler(fh) + + # redirect stdout and stderr to logger + RedirectLogging.redirect(logger, stdout=True, stderr=True) + + logger.info(forecastList[0]) + + site = str(site) + databaseID = str(databaseID) + username = str(username) + + startTime = time.time() + logger.info("Text Formatter Starting") + + try: + forecasts = runFormatter(databaseID=databaseID, site=site, forecastList=forecastList, testMode=testMode, + cmdLineVarDict=cmdLineVarDict, vtecMode=vtecMode, username=username, + dataMgr=dataMgr, drtTime=drtTime, vtecActiveTable=vtecActiveTable) + except: + logger.exception("Error generating text product") + raise + + elapsedTime = (time.time() - startTime)*1000 + logger.info("Text Formatter Finished, took: %d ms",elapsedTime) + + RedirectLogging.restore() + return forecasts + +def getPid(forecast): + # taken from ProductParser.py + import re + + sl = r'^' # start of line + el = r'\s*?\n' # end of line + id3 = r'[A-Za-z]{3}' # 3 charater word + empty = r'^\s*' + el # empty line + + wmoid = r'(?P[A-Z]{4}\d{2})' # wmoid + fsid = r'(?P[A-Z]{4})' # full station id + pit = r'(?P\d{6})' # product issuance time UTC + ff = r'(?P ' + id3 + ')?' # "funny" field + + # CI block + ci_start = sl + wmoid + ' ' + fsid + ' ' + pit + ff + el + awipsid = r'(?P(?P[A-Z0-9]{3})(?P[A-Z0-9]{1,3}))' + el + ci_block = r'(?P' + ci_start + awipsid + '\n?)' + + ci_re = re.compile(ci_block) + + pid = None + m = ci_re.search(forecast) + if m is not None: + pid = m.group('cat') + + return pid + +def runFormatter(databaseID, site, forecastList, cmdLineVarDict, vtecMode, + username, dataMgr, serverFile=None, + editAreas=[], timeRanges=[], timePeriod=None, drtTime=None, + vtecActiveTable='active', testMode=0, experimentalMode=0, serverOutputFile=None, + startTime=None, endTime=None, language=None, outputFile=None, appendFile=None + ): + + if cmdLineVarDict: + exec("cmdLineVarDict = " + cmdLineVarDict) + else: + cmdLineVarDict = {} + + # Set default Forecast Type + if len(forecastList) == 0: + usage() + logger.error("ForecastList [-t] is empty or missing") + return + + # Can't have both T and E modes + if testMode and experimentalMode: + usage() + logger.error("Can't have both -T and -E switches") + return + + if drtTime: + import offsetTime + offsetTime.setDrtOffset(drtTime) + + # Create Time Range + useRawTR = 0 + if startTime is not None and endTime is not None: + start = decodeTimeStruct(startTime) + end = decodeTimeStruct(endTime) + timeRange = TimeRange.TimeRange(start, end) + # Set so this time range will override all others + useRawTR = 1 + else: + timeRange = None + + # Handle the VTEC modes + if vtecMode is not None and vtecMode not in ['X','O','T','E']: + usage() + logger.error("-v vtecMode must be ['X', 'O', 'T', 'E']") + sys.exit(1) + + #force VTEC mode to "T" if in TEST mode and another vtecCode is specified + if testMode and vtecMode is not None: + vtecMode = "T" + + #force VTEC mode to "E" if in EXPERIMENTAL mode and another vtecCode + #is specified + elif experimentalMode and vtecMode is not None: + vtecMode = "E" + + #force into TEST mode, if vtec code is 'T' + if vtecMode == "T": + testMode = 1 + experimentalMode = 0 + elif vtecMode == "E": + experimentalMode = 1 + testMode = 0 + + # Create an ifpClient + ifpClient = PyFPClient(VizApp.getWsId(), site) + + global GridLoc + GridLoc = ifpClient.getDBGridLocation() + #importer = TextIFPImporter(ifpClient) + #importer.install() + import Utility + + import ForecastNarrative + import ForecastTable + + import Analysis + + site = str(ifpClient.getSiteID()[0]) + + # Create dictionary of arguments + argDict = { + #"host" : host, + #"port" : port, + "databaseID": databaseID, + "site" : site, + "cmdLineVarDict": cmdLineVarDict, + "serverFile": serverFile, + "editAreas": editAreas, + "timeRanges": timeRanges, + "timeRange": timeRange, + "timePeriod": timePeriod, + "useRawTR": useRawTR, + "vtecMode": vtecMode, + "vtecActiveTable": vtecActiveTable, + "testMode": testMode, + "experimentalMode": experimentalMode, + "serverOutputFile": serverOutputFile, + } + # Handle command line switches for variables that can be + # set elsewhere i.e. in the command line varDict OR the + # product definition section. + # If there was a command line switch for these items, + # make an entry in argDict. Otherwise, do not. + for item in ["language", "outputFile", "appendFile"]: + exec("if " + item + " is not None: argDict['" + item + "'] = " + item) + + logger.info("Arguments: " + str(argDict)) + + argDict["ifpClient"] = ifpClient + argDict["utility"] = Utility.Utility(None, None, ifpClient) + #argDict["AFPS"] = AFPS + #argDict["AFPSSup"] = AFPSSup + argDict["Analysis"] = Analysis + argDict["ForecastNarrative"] = ForecastNarrative + argDict["ForecastTable"] = ForecastTable + + # get product creation time to the minute - almost all fmtrs use this + argDict['creationTime'] = int(time.time()/60)*60.0 + + # Set the Site Time Zone + tz = str(ifpClient.getSiteTimeZone()) + os.environ['TZ'] = tz + time.tzset() + + # Create the formatter + formatter = TextFormatter.TextFormatter(dataMgr, ifpClient) + + # For each Forecast Type, + # Create generate forecast + forecasts = "" # returned value + outForecasts = "" # written to output files + for forecastType in forecastList: + forecast = formatter.getForecast(forecastType, argDict) + forecasts = forecasts + forecast + + # Convert data written to files to upper case if required + mixedCase = False + pid = getPid(forecast) + if pid is None: + logger.warning("Unable to determine PID: defaulting to upper case") + else: + from com.raytheon.uf.common.dataplugin.text.db import MixedCaseProductSupport + mixedCase = MixedCaseProductSupport.isMixedCase(str(pid)) + + if mixedCase: + outForecasts = outForecasts + forecast + else: + outForecasts = outForecasts + forecast.upper() + + logger.info("Text:\n" + str(forecasts)) + + try: + outputFile = argDict["outputFile"] + success = writeToFile(outForecasts, outputFile, "w") + if success == 0: + print("Couldn't open output file", outputFile) + logger.error("Couldn't open output file: ", outputFile) + sys.exit(1) + except: + pass + + try: + outputFile = argDict["serverOutputFile"] + success = writeToFile(outForecasts, outputFile, "w") + if success == 0: + print("Couldn't open output file", outputFile) + logger.error("Couldn't open output file: ", outputFile) + sys.exit(1) + except: + pass + + + try: + appendFile = argDict["appendFile"] + success = writeToFile(outForecasts, appendFile, "a") + if success == 0: + print("Couldn't open append file", appendFile) + logger.error("Couldn't write to append file: ", appendFile) + sys.exit(1) + except: + pass + + try: + serverFile = argDict["serverFile"] + writeToSite = (username == "SITE") + success = writeToServerFile(outForecasts, serverFile, writeToSite) + if success == 0: + print("Couldn't open server output file", serverFile) + logger.error("Couldn't open server output file: ", serverFile) + sys.exit(1) + except: + pass + + del outForecasts + + # Remove any lat/lon areas created temporarily + #global LatLonIds + #argDict["ifpClient"].deleteReferenceData(LatLonIds) + + # Somebody is holding onto an ifpClient and thus the C++ + # object is not being destroyed. This causes the network + # connection to stay open. Below is a kludge to force + # the destruction of the C++ object. + #del ifpClient.this + + # This also means that you may not import any new modules after this + # point!!!!!!!!!!!!!!! + return forecasts + +def decodeTimeStruct(timeStruct): + return AbsTime.absTimeYMD(timeStruct.tm_year, timeStruct.tm_mon, + timeStruct.tm_mday, + timeStruct.tm_hour, timeStruct.tm_min) + +def writeToFile(forecasts, outputFile, mode): + if outputFile: + logger.info("Writing forecast to " + outputFile) + try: + with open(outputFile, mode) as outfile: + outfile.write(forecasts) + + os.chmod(outputFile, 0o644) + except: + logger.exception("Error writing forecast to "+outputFile) + return 0 + return 1 + +def writeToServerFile(forecasts, outputFile, writeToSite): + if outputFile: + try: + if writeToSite: + ctx = PATH_MGR.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.SITE) + else: + ctx = PATH_MGR.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.USER) + filePath = PATH_MGR.SEPARATOR.join(["gfe", "text", "PRODGEN", outputFile + ".PRODGEN"]) + lFile = PATH_MGR.getLocalizationFile(ctx, filePath) + logger.info("Writing forecast to " + str(lFile)) + + from LockingFile import File + with File(lFile.getFile(), "", 'w') as outfile: + outfile.write(forecasts) + + return lFile.save() + except: + logger.exception("Error writing forecast to " + str(lFile)) + return 0 + return 1 + +def importModules(paths): + global displayNameDict + displayNameDict = {} + + split = paths.split(os.path.pathsep) + for path in split: + if not path in sys.path: + sys.path.append(path) + + inv = [] + if os.path.exists(path): + inv = os.listdir(path) + inv = list(filter(filterScripts, inv)) + + for pid in inv: + name = os.path.splitext(pid)[0] + if name in sys.modules: + del sys.modules[name] + try: + mod = __import__(name) + except: + logger.exception("Import Failed " + name) + mod = None + definition = None + if mod is not None: + d = mod.__dict__ + #search for Definition at top-level + definition = d.get('Definition', None) + if definition is None: + # search for definition within class name + definition = d.get(name, None) + if definition is None: + tp = d.get('TextProduct', None) + if tp is not None: + #search for definition within TextProduct class + definition = getattr(tp, 'Definition', None) + if definition is None or type(definition) is not dict: + logger.info("Formatter: No Definition Found " + + name) + continue + dspName = getDisplayName(definition) + if dspName is None or dspName == "None": + continue + displayNameDict[dspName] = (mod, definition) + +def getScripts(paths, getVtecCodes): + from java.util import ArrayList + from com.raytheon.uf.common.dataplugin.gfe.textproduct import ProductDefinition + from com.raytheon.viz.gfe.textformatter import TextProductConfigData + from com.raytheon.viz.gfe.textformatter import TextProductMetadata + + logger.info("TextProduct FormatterLauncher Processing....") + importModules(paths) + textProducts = ArrayList() + for (displayName, value) in list(displayNameDict.items()): + (module, definition) = value + moduleName = module.__name__ + pdef = ProductDefinition(JUtil.pyDictToJavaMap(definition)) + productMetadata = TextProductMetadata(moduleName, displayName, pdef) + textProducts.add(productMetadata) + + vtecCodes = {} + if getVtecCodes: + import VTECMessageType + vtecCodes = VTECMessageType.VTECMessageTypeDict + + logger.info("TextProduct FormatterLauncher Done....") + return TextProductConfigData(JUtil.pyValToJavaObj(vtecCodes), textProducts) + +def filterScripts(name): + (filename, ext) = os.path.splitext(name) + return ext == ".py" and not filename.endswith("Definition") + +def getDisplayName(definition): + try: + dspName = definition['displayName'] + except: + dspName = None + return dspName + +def ppDef(definition): + "pretty prints the definition to make it more readable. Returns string." + + s = "\n" + if definition is None: + return "" + if type(definition) == dict and len(list(definition.keys())): + keys = list(definition.keys()) + keys.sort() + #get maximum length of key + maxL = 0 + for k in keys: + maxL = max(len(k), maxL) + # output the data, formatted + fmt = "%-" + repr(maxL) + "s" + for k in keys: + s = s + fmt % k + ": " + str(definition[k]) + '\n' + return s + else: + return "\n" + repr(definition) + +## TODO: Investigate if the dependency on DataManager can be removed here. +## At the moment this passes through to ValuesDialog for building special +## widgets in the DialogAreaComposite. +def getVarDict(paths, dspName, dataMgr, ifpClient, issuedBy, dataSource): + importModules(paths) + + tz = str(ifpClient.getSiteTimeZone()) + os.environ['TZ'] = tz + time.tzset() + productDef = displayNameDict[dspName][1] + productDef['database'] = dataSource + vdg = VarDictGroker.VarDictGroker(displayNameDict[dspName][0], productDef, dspName, issuedBy, dataMgr) + return vdg.getVarDict() + +def getVTECMessageType(productCategory): + import VTECMessageType + return VTECMessageType.getVTECMessageType(productCategory) + +def getTimeZones(zones, officeTZ): + import AreaDictionary + timezones = [] + if zones is not None: + for zone in zones: + zdict = AreaDictionary.AreaDictionary.get(zone, {}) + tzs = zdict.get("ugcTimeZone", []) + if type(tzs) is str: + tzs = [tzs] + for tz in tzs: + if tz not in timezones: + timezones.append(tz) + if officeTZ in timezones and officeTZ != timezones[0]: + timezones.remove(officeTZ) + timezones.insert(0, officeTZ) + if len(timezones) == 0: + timezones.append(officeTZ) + return JUtil.pylistToJavaStringList(timezones) + +def reloadModule(moduleName): +# m = __import__(moduleName) +# reload(m) + if moduleName in sys.modules: + del sys.modules[moduleName] + try: + __import__(moduleName) + except: + logger.exception("Import Failed " + moduleName) + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/HazardsTable.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/HazardsTable.py index 782239cb30..f190e77c44 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/HazardsTable.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/HazardsTable.py @@ -1,2479 +1,2474 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Port of A1 HazardsTable.py. -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# ??/??/?? ???????? Initial Creation. -# 05/14/13 1842 dgilling Use GFEVtecUtil to handle NEW -# ETN assignment. -# 09/24/13 1843 dgilling Handle GetNextEtnResponse. -# 11/20/13 2490 randerso Corrected error handling in __getActiveTable -# -# 02/05/14 2774 dgilling Fix error logging statements in -# __warnETNduplication() and -# __highestETNActiveTable. -# 11/11/14 4953 randerso Changed type of endTime from float to int -# 01/22/2015 4027 randerso Fix comparison of in __getCities -# 02/05/15 4099 randerso Fixed exception handling in __getActiveTable -# 05/07/2015 4027 randerso Fixed error handling, -# added NOTE about false postives for duplicate ETNs -# 10/16/2015 17771 dgilling Remove __sitesIgnoreNatlEtn. -# 10/29/2015 17701 yteng Correct parm selection for Hazards to exclude Hazardsnc -# 12/07/2015 5129 dgilling Support new IFPClient. -# 09/13/2016 19348 ryu Validate ETN for tropical events. -# 11/21/2016 5959 njensen Removed unused imports and made more pythonic -# 02/16/2017 18215 ryu Fix issue of re-creating EXP records when they have -# already been issued before the end time of an event. -# - -## -# This is a base file that is not intended to be overridden. -## - -import time, copy, string, logging -import os -import VTECTableUtil, VTECTable -import TimeRange, AbsTime, ActiveTableVtec -from java.util import ArrayList -from com.raytheon.uf.common.activetable import ActiveTableMode -from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID as JavaDatabaseID -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey -from com.raytheon.viz.gfe.sampler import HistoSampler, SamplerRequest -from com.raytheon.viz.gfe.vtec import GFEVtecUtil - -# This class makes an object that interfaces to the GFE hazard grid -# sampling code and the TimeCombine code and generates formatted -# hazard strings and VTEC strings for formatters. Alternate active tables -# may be defined for test purposes. -class HazardsTable(VTECTableUtil.VTECTableUtil): - def __init__(self, ifpClient, editAreas, productCategory, - filterMethod, databaseID, siteID4, activeTableName="", - vtecMode=None, samplingThreshold=(10, None), hazardEndTime=None, - creationTime=None, dataMgr=None, accurateCities=False, cityEditAreas=[]): - self.log = logging.getLogger("FormatterRunner.HazardsTable.HazardsTable") -# self.log.setLevel(logging.DEBUG) - - - VTECTableUtil.VTECTableUtil.__init__(self, None) - - # save data - self.__ifpClient = ifpClient - self.__databaseID = databaseID - self.__dataMgr = dataMgr - self.__editAreas = editAreas - self.__pil = productCategory - self.__siteID4 = siteID4 - self.__spcSiteID4 = "KWNS" - self.__tpcSiteID4 = "KNHC" - self.filterMethod = filterMethod - self.__activeTable = None - self.__allGEOActiveTable = None #not filtered by edit areas - self.__vtecMode = vtecMode - self.__etnCache = {} - - if activeTableName == "PRACTICE": - self.__activeTableMode = ActiveTableMode.PRACTICE - else: - self.__activeTableMode = ActiveTableMode.OPERATIONAL - - if hazardEndTime is None: - self.__hazardEndTime = None - else: - self.__hazardEndTime = hazardEndTime.unixTime() - - # list of marine products - self.__marineProds = ["CWF", "NSH", "GLF", "MWW", "OFF"] - - # list of phen/sig from national centers and "until further notice" - self.__tpcKeys = self.__processJavaCollection(GFEVtecUtil.TROPICAL_PHENSIGS, self.__convertPhensig) - self.__tpcBaseETN = '1001' - self.__ncKeys = self.__processJavaCollection(GFEVtecUtil.NATIONAL_PHENSIGS, self.__convertPhensig) - self.__ufnKeys = [('HU', 'A'), ('HU', 'W'), ('TR', 'A'), ('TR', 'W'), - ('TY', 'A'), ('TY', 'W'), ('SS', 'A'), ('SS', 'W')] - - self.__marineZonesPrefix = ["AM", "GM", "PZ", "PK", "PH", "PM", "AN", - "PS", "SL"] #list of zone name prefix that are marine zones - - # tuple of (% area coverage, numberGridCells) - self.__samplingThreshold = \ - (samplingThreshold[0] / 100.0, samplingThreshold[1]) - - #determine creation time - if creationTime is not None: - self.__time = creationTime - else: - self.__time = time.time() #now time - self.__time = (int(self.__time) / 60) * 60 #truncated to minute - - # accurate cities - self.__accurateCities = accurateCities - self.__cityEditAreas = cityEditAreas - - #convert edit areas to a single zone list - self.__zoneList = self.__singleZoneList(editAreas) - - #sample, and merge vtec codes - self.__rawAnalyzedTable = self.__analyzedTable(self.__zoneList, - self.filterMethod) - - #reorganize raw analyzed table into hazards by zone, might cause - #change in combinations - self.__hazardsByZoneDict = {} - if len(self.__rawAnalyzedTable) > 0: - # organize by id - self.__hazardsByZoneDict = self.__organizeByZone( - self.__rawAnalyzedTable) - - - self.__hazardCombinations = self.__recombineZoneGroups( - self.__hazardsByZoneDict, editAreas) - else: - # if we got an empty table, set the combos to what was specified - self.__hazardCombinations = editAreas - - self.log.debug("RecombinedZoneGroups: initial: " + str(self.__editAreas) + - " final: " + str(self.__hazardCombinations)) - - self.__cityHazards = self.__createCityHazards() - - def activeTable(self): - # Returns the raw active table as a list of dictionaries - return self.__activeTable - - def rawAnalyzedTable(self): - # Returns the raw analyzed table as a list of dictionaries - return self.__rawAnalyzedTable - - def consolidatedTableByID(self): - # Returns the raw analyzed table consolidated by geo IDs, i.e., - # the ['id'] field is a list of ids. - return self.consolidateByID(self.__rawAnalyzedTable) - - def getHazardAreaCombinations(self): - # Returns a list of combinations to use that are guaranteed to - # not have different hazards within each combination. - return self.__hazardCombinations - - def getHazardList(self, editAreaList): - # Find the hazards that apply to the area and timeRange, and returns - # a list of dictionaries. This function can take a single string or - # a list. Restriction: only looks at the first element in the list. - # The returned list's 'id' field is a list of zones with that - # hazard. - - if type(editAreaList) is list and len(editAreaList): - ea = editAreaList[0] - eaList = editAreaList - elif type(editAreaList) is str: - ea = editAreaList - eaList = [editAreaList] - else: - return [] - - hazards = [] - - if self.__hazardsByZoneDict.has_key(ea): - haz = self.__hazardsByZoneDict[ea] - for h in haz: - # if a segment number is present copy while removing seg - # from the key - if h.has_key('seg') and h['seg'] != "": - # make a copy and change the key if we need to - newH = copy.deepcopy(h) - newH['id'] = eaList # preserve the old list of areas - # strip segments - updated to make sure GUM TRW/A hazards keep local ETN - if ((newH['phen'], newH['sig']) not in self.__ncKeys): - if string.find(newH['phensig'], ":") >= 0: - newH['phensig'] = newH['phen'] + '.' + newH['sig'] - - hazards.append(newH) - else: - # otherwise just append the hazard record - hazards.append(h) - - # Now consolidate this list of hazards with segment numbers removed. - hazards = self.__consolidateTime(hazards) - - return hazards - - def getVTECString(self, fcstArea): - # Returns a string containing the vtec strings for the given forecast - # area and time range. - - # get the list of hazards for this fcst area and time range - hazards = self.getHazardList(fcstArea) #could sort in here - - # sort the list of hazards depending on the type of product - if self.__pil in self.__marineProds: # it's a marine product - hazards.sort(self.__marineHazardsSort) - else: # non-marine product - hazards.sort(self.__hazardsSort) - # hazards need upgrade records to be paired up - hazards = self.__pairUpgradeRecords(hazards) - - # get VTEC strings and VTEC records - vtecStrings = [] - for h in hazards: - vtecS = h['vtecstr'] - if len(vtecS) == 0: - continue - vtecStrings.append(vtecS) - - returnStr = "" - for s in vtecStrings: - returnStr = returnStr + s + '\n' - return returnStr - - # Returns the cities associated with the hazards that could afflict - # the cities in cityList - def getCities(self, cityList, zoneHazards): - if self.__cityHazards is None: - return - - relevant = [] - compare = ('phen', 'sig', 'endTime') - for p in self.__cityHazards: - for h in zoneHazards: - if self.hazardCompare(p, h, compare): - relevant.append(p) - break - - return self.__getCities(cityList, relevant) - - # Get cities associated with a VTEC with an EXP action - # returns None if the grid is deleted - def getCitiesForEXP(self, cityList, zone, phen, sig, expTime): - if self.__cityHazards is None: - return - - # check zone hazards for existence of the grid - if expTime <= self.__time: - for rec in self.__oldZoneTable: - if rec['id'] == zone and \ - rec['phen'] == phen and rec['sig'] == sig and \ - rec['endTime'] == expTime: - break - else: - self.log.info("No grid found for " + \ - `phen` + "." + `sig` + \ - " expired at " + \ - time.asctime(time.gmtime(expTime))) - return - - # filter by phen, sig, expTime - matches = [] - for rec in self.__cityHazards: - if rec['phen'] == phen and rec['sig'] == sig and \ - rec['endTime'] == expTime: - matches.append(rec) - - return self.__getCities(cityList, matches) - - # Get cities that appear in both cityList and hazardList - # Ordering of cities should be same as cityList - def __getCities(self, cityList, hazardList): - cities = [] - for city in cityList: - for p in hazardList: - if p['id'].upper() == city.upper(): - cities.append(city) - break - return cities - - # Check the AT for the last issued records to determine cities - # that were affected by the cancelled/expired events. - # We could include cities from other events, in which case the result - # is uncertain. - - def getCitiesFromPrevious(self, ugcList, checkedVTEC, ignoredVTEC=[]): - - # local function for dict key - def event(rec): - return rec['phen'], rec['sig'], rec['etn'] - - # we only need the records from the lastest issuance of this product - - myRecords = filter(lambda x: x['officeid'] == self.__siteID4 and \ - x['pil'] == self.__pil and \ - x['id'] in ugcList, - self.__activeTable) - - lastIssued = [] - issueT = 0 - for rec in myRecords: - it = rec['issueTime'] - if self.__time >= it > issueT: - lastIssued = [rec] - issueT = it - elif it == issueT: - lastIssued.append(rec) - - if not lastIssued: - return None, 1 - - # keep track of matches - unmatched = {} - for rec in checkedVTEC: - unmatched[event(rec)] = ugcList[:] - - cities = [] - certain = 1 - compare = ('phen', 'sig', 'etn') - - for active in lastIssued: - - if active['act'] in ['CAN', 'EXP']: - # this will definitely make the result uncertain - certain = 0 - continue - elif active['act'] in ['UPG']: - continue - - match = 0 - for rec in checkedVTEC: - if self.hazardCompare(active, rec, compare): - match = 1 - break - - if match: - try: - unmatched[event(active)].remove(active['id']) - except ValueError: - certain = 0 - self.log.error("Too many matches for %s.%s:%04d"\ - % event(active)\ - + " in zone %s" % active['id']) - - if active.get('cities') is not None: - for city in active['cities']: - if city not in cities: - cities.append(city) - else: - certain = 0 - msg = "Active table record has no cities attribute." - self.log.error(msg) - - else: - # see if it should be ignored - for rec in ignoredVTEC: - if self.hazardCompare(active, rec, compare): - break - else: - # This active record doesn't match checked or ignored - # VTEC list - flag the result as uncertain - certain = 0 - - # check if all hazard/zone combinations have been covered - # there should be nothing in unmatched dict - - for key, zones in unmatched.items(): - if len(zones) > 0: - certain = 0 - break - - msg = [] - for key, zones in unmatched.items(): - if len(zones) > 0: - msg.append("%s.%s:%d " % key + str(zones)) - if len(msg): - msg = '\n'.join(msg) - self.log.error("The following hazard/zones are not found" - " in active table:\n" + str(msg)) - - return cities, certain - - - def __hazardsSort(self, a, b): - # Returns 1, 0, or -1 depending on whether the first hazard - # is considered higher, equal, or lower priority when compared to - # the second as defined in the VTEC directive. - # 1) action code [CAN, EXP, UPG, NEW, EXB, EXA, EXT, CON] - # 2) significance (W, Y, A, O, S) - # 3) start time - # 4) phenomena (alphabetical) - - # check action code - actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", - "EXT", "CON"] - try: - aIndex = actionCodeOrder.index(a['act']) - bIndex = actionCodeOrder.index(b['act']) - except ValueError: - self.log.error("Invalid action code in hazard %s %s", a, b) - return 0 - - if aIndex > bIndex: - return 1 - elif aIndex < bIndex: - return -1 - - # check sig - sigOrder = ["W", "Y", "A", "O", "S", "F"] - try: - aIndex = sigOrder.index(a['sig']) - bIndex = sigOrder.index(b['sig']) - except ValueError: - self.log.error("Invalid sig code in hazard %s %s", a, b) - return 0 - - if aIndex > bIndex: - return 1 - elif aIndex < bIndex: - return -1 - - # check startTime - if a['startTime'] > b['startTime']: - return 1 - elif a['startTime'] < b['startTime']: - return -1 - - # check phen - if a['phen'] > b['phen']: - return 1 - elif a['phen'] < b['phen']: - return -1 - - self.log.error("Hazards are identical in __hazardsSort %s %s", a, b) - return 0 - - def __marineHazardsSort(self, a, b): - # Returns 1, 0, or -1 depending on whether the first MARINE hazard - # is considered higher, equal, or lower priority when compared to - # the second as defined in the VTEC directive. - # 1) start time - # 2) action code [CAN, EXP, UPG, NEW, EXB, EXA, EXT, CON] - # 3) significance (W, Y, A, S) - # 5) phenomena (alphabetical) - - # check startTime - if a['startTime'] > b['startTime']: - return 1 - elif a['startTime'] < b['startTime']: - return -1 - - # check action code - actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", - "EXT", "CON"] - try: - aIndex = actionCodeOrder.index(a['act']) - bIndex = actionCodeOrder.index(b['act']) - except ValueError: - self.log.error("Invalid action code in hazard %s %s", a, b) - return 0 - - if aIndex > bIndex: - return 1 - elif aIndex < bIndex: - return -1 - - - # check sig - sigOrder = ["W", "Y", "A", "S", "F"] - try: - aIndex = sigOrder.index(a['sig']) - bIndex = sigOrder.index(b['sig']) - except ValueError: - self.log.error("Invalid sig code in hazard %s %s", a, b) - return 0 - - if aIndex > bIndex: - return 1 - elif aIndex < bIndex: - return -1 - - # check phen - if a['phen'] > b['phen']: - return 1 - elif a['phen'] < b['phen']: - return -1 - - self.log.error("Marine Hazards are identical in __marineHazardsSort %s %s", a, b) - return 0 - - def __pairUpgradeRecords(self, hazardsList): - # This method moves items in the hazardsList around such that - # upgrades and downgrades are sequential (UPG, NEW), (CAN, NEW) - # Hazard upgradeFrom fields records must match in the categories: - # start, end, etn, phen, and sig. - - # get the list of upgraded or downgraded records - upDownList = [] - for h in hazardsList: - if h.has_key('upgradeFrom') or h.has_key('downgradeFrom'): - upDownList.append(h) - - # temporarily remove these guys from the hazardsList - for upDown in upDownList: - hazardsList.remove(upDown) - - # Hunt down their counterparts and add the record in the correct slot - for upDown in upDownList: - # get the fields from the up/downgradeFrom record - oldRec = {} - if upDown.has_key('upgradeFrom'): - oldRec = upDown['upgradeFrom'] - elif upDown.has_key('downgradeFrom'): - oldRec = upDown['downgradeFrom'] - - # find its match - foundMatch = 0 # set a flag - for h in hazardsList: - if oldRec['etn'] == h['etn'] and \ - oldRec['phen'] == h['phen'] and oldRec['sig'] == h['sig']: - # found a match - hazardsList.insert(hazardsList.index(h) + 1, upDown) # insert after - foundMatch = 1 - break # done with this pass through hazardsList - - if foundMatch == 0: - self.log.error("Match not found for upgrade/downgrade.") - - return hazardsList - - #----------------------------------------------------------------- - # The following set of functions are utility functions. - #----------------------------------------------------------------- - - # Pretty-print a time range or a time range list - def __printTR(self, t): - s = "" - if type(t) is list: - s = '[' - for e in t: - s = s + '(' + time.asctime(time.gmtime(e[0])) + \ - ',' + time.asctime(time.gmtime(e[1])) + '),' - s = s + ']' - return s - else: - s = '(' + time.asctime(time.gmtime(t[0])) + \ - ',' + time.asctime(time.gmtime(t[1])) + ')' - return s - - #Pretty-prints the hazard by zone table - def __printHBZ(self, hazardsByZone): - s = '\n' - for id in hazardsByZone.keys(): - s = s + " Hazards for " + `id` + \ - self.printActiveTable(hazardsByZone[id]) - return s - - #provides intersection of two time ranges - def __timeIntersection(self, tr1, tr2): #tr1, tr2 tuples (startT, endT) - if tr1[0] < tr2[0]: - startTime = tr2[0] - else: - startTime = tr1[0] - if tr1[1] > tr2[1]: - endTime = tr2[1] - else: - endTime = tr1[1] - if startTime >= endTime: - return None # no intersection - else: - return (startTime, endTime) - - #provides the time ranges of non-intersection in tr1, based on - #the time range tr2. Returns a list of 0, 1, or 2 items. - def __nonTimeIntersection(self, tr1, tr2): - #returns list of non intersections between tr1 and tr2 within tr1 - intersect = self.__timeIntersection(tr1, tr2) - if intersect is None: - return [tr1] - #exact match - if tr1 == tr2: - return [] - #startT same - elif tr1[0] == intersect[0]: - return [(intersect[1], tr1[1])] - #endT same - elif tr1[1] == intersect[1]: - return [(tr1[0], intersect[0])] - #middle - else: - return [(tr1[0], intersect[0]), (intersect[1], tr1[1])] - - # time contains, if time range (tr) contains time (t), return 1 - def __containsT(self, tr, t): - return (t >= tr[0] and t < tr[1]) - - # time overlaps, if tr1 overlaps tr2 (adjacent is not an overlap) - def __overlaps(self, tr1, tr2): - if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): - return 1 - return 0 - - # hazard records' time overlaps - def __hazardsOverlap(self, h1, h2): - tr1 = (h1['startTime'], h1['endTime']) - tr2 = (h2['startTime'], h2['endTime']) - if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): - return 1 - return 0 - - # time range is adjacent to each other - def __isAdjacent(self, tr1, tr2): - if tr1[0] == tr2[1] or tr1[1] == tr2[0]: - return 1 - return 0 - - # combine two time ranges - def __combineTR(self, tr1, tr2): - return (min(tr1[0], tr2[0]), max(tr1[1], tr2[1])) - - # prepare etn cache. Adds new entries to the etn cache, but doesn't - # figure out the etn values at this point. Organizes the information - # by phen.sig, then maintains a list of start/end/etn/ids - def __prepETNCache(self, proposedRecord): - - phensig = (proposedRecord['phen'], proposedRecord['sig']) - id = proposedRecord['id'] - if self.__etnCache.has_key(phensig): - for start, end, etn, ids in self.__etnCache[phensig]: - if proposedRecord['startTime'] == start and \ - proposedRecord['endTime'] == end: - ids.append(id) # add the id - return #already in the cache - times = self.__etnCache[phensig] - times.append((proposedRecord['startTime'], proposedRecord['endTime'], 0, [id])) - - else: - self.__etnCache[phensig] = [(proposedRecord['startTime'], - proposedRecord['endTime'], 0, [id])] - - # assign new etns to the etn cache. This is done after all requests - # for new etns have been made - def __assignNewETNs(self, activeTable): - - # go through each new phen,sig - for phen, sig in self.__etnCache.keys(): - - #determine the first new ETN to use if we need a new one - etn_base = self.__highestETNActiveTable(phen, sig, - self.__allGEOActiveTable) - etn_base = int(etn_base) + 1 #the next one in sequence - - #sort the etn cache by (start, end, etn, ids) - self.__etnCache[(phen, sig)].sort() #sort the start,end,etn,ids - - # keep track of the ids that have been given each etn - coverage = {} - - #process sequentially each (phen, sig). Entries in cache - #are list of startT (0), endT (1), etn# (2), [id] (3). - times = self.__etnCache[(phen, sig)] - for x in xrange(len(times)): - s1, e1, etn1, ids = times[x] - #if no etn, then use a new one - if etn1 == 0: #etn == 0? - etn1 = etn_base - etn_base = etn_base + 1 - times[x] = (s1, e1, etn1, ids) - coverage[etn1] = ids[:] - - # the ids for which a record with etn1 already exists - assigned = coverage[etn1] - - #search for all adjacent or overlapping, give it the same etn - for y in xrange(x + 1, len(times)): - s2, e2, etn2, ids2 = times[y] - if etn2 == 0 and \ - (self.__isAdjacent((s1, e1), (s2, e2)) or\ - self.__overlaps((s1, e1), (s2, e2))): - - # check for potential ETN duplication - for id2 in ids2: - if id2 in assigned: - # cannot assign etn1 to this group since etn1 - # is already assigned to a record for the zone - break - else: - # ok to assign etn1 to this group - etn2 = etn1 #reuse the etn - times[y] = (s2, e2, etn2, ids2) - - # add the ids to assigned list - assigned.extend(ids2) - - # find highest etn in active table for phen/sig, returns it. - # This method has been dramatically re-written for A2 to use - # GFEVtecUtil to do preliminary ETN assignment instead of scrubbing - # the whole set of ActiveTableRecords to calculate it. - def __highestETNActiveTable(self, phen, sig, activeTable): - etn_base = 0 - phensig = (phen, sig) - - # find the max ETN... - # 1. highest ETN period for non-tropical - # or - # 2. highest ETN > 1000 for the tropical, non-GUM products (tpcKeys) - # - # Local WFOs do not assign these numbers, so they should have - # numbers < 1000 - if phensig not in self.__tpcKeys: - etn_base = GFEVtecUtil.getNextEtn(self.__siteID4, '.'.join(phensig), False, self.__activeTableMode).getNextEtn() - 1 - else: - presentyear = time.gmtime(self.__time).tm_year - for active in activeTable: - activeyear = time.gmtime(active['issueTime']).tm_year - activephensig = (active['phen'], active['sig']) - if phensig == activephensig and presentyear == activeyear: - # causes failure if tropical hazards are less than 1001 - if active['etn'] < int(self.__tpcBaseETN): - self.log.error("Incorrect ETN for tropical hazard.") - return etn_base - - #determine the new etn to use, using the etn cache - def __getNewETN(self, pRecord): - key = (pRecord['phen'], pRecord['sig']) - if self.__etnCache.has_key(key): - times = self.__etnCache[key] - for startT, endT, etn, ids in times: - if pRecord['startTime'] == startT and pRecord['endTime'] == endT: - return etn - return "???" #should never get here - - - - #----------------------------------------------------------------- - # The following set of functions are used to recombining - # records from the raw analyzed table to keep the geographic - # groups together. - #----------------------------------------------------------------- - - def __singleZoneList(self, comboList): - #Utility function to break apart a combinations list (list of list - #of zones) into a set of single zones. Returns the list of zones. - newList = [] - for c in comboList: - for z in c: - newList.append(z) - return newList - - # Returns a dictionary that is keyed on zonename, and contains a list - # of all hazards for that zone. - def __organizeByZone(self, hazardList): - hazardsByZone = {} - for h in hazardList: - if hazardsByZone.has_key(h['id']): - hazardsByZone[h['id']].append(h) - else: - hazardsByZone[h['id']] = [h] - - self.log.debug("HazardByZone: " + self.__printHBZ(hazardsByZone)) - return hazardsByZone - - # Returns a dictionary that is keyed on (phen, sig), and contains a list - # of all hazards for each key value. - def __organizeByPhenSig(self, hazardList): - hazards = {} - for h in hazardList: - key = (h['phen'], h['sig']) - hazards.setdefault(key, []).append(h) - - self.log.debug("HazardByPhenSig:" + self.__printHBZ(hazards)) - return hazards - - - #compares two lists of hazards (zone1, zone2) for two zones. Returns - #whether the same hazards exist in both zones. Must be an exact - #match (like a operator==) - def __comboCompare(self, hazardsByZone, zone1, zone2): - compareList = ['phen', 'sig', 'pil', 'startTime', 'endTime', 'officeid', 'act'] - if hazardsByZone.has_key(zone1) and hazardsByZone.has_key(zone2): - list1 = hazardsByZone[zone1] - list2 = hazardsByZone[zone2] - if len(list1) != len(list2): - return 0 - for i in range(len(list1)): - found = 0 - for j in range(len(list2)): - if self.hazardCompare(list1[i], list2[j], compareList): - found = 1 - break - if found == 0: - return 0 - return 1 - - elif not hazardsByZone.has_key(zone1) and \ - not hazardsByZone.has_key(zone2): - return 1 - else: - return 0 - - - #analyzes the hazardsByZone and the list of desired editArea combinations, - #and ensures that the hazards are the same for every zone in each - #combination. If not, separates out those zones. Returns the new - #zone grouping. - def __recombineZoneGroups(self, hazardsByZone, editAreas): - outEditAreas = [] - for combo in editAreas: - newCombo = [[combo[0]]] - for i in range(1, len(combo)): - found = 0 - for j in range(len(newCombo)): - if self.__comboCompare(hazardsByZone, newCombo[j][0], - combo[i]): - newCombo[j].append(combo[i]) - found = 1 - break - if found == 0: - newCombo.append([combo[i]]) - for nc in newCombo: - outEditAreas.append(nc) - - return outEditAreas - - #-------------------------------------------------------------- - # The following methods sample Hazard grids, obtain the active - # table, and create the analyzed table (including injecting - # the vtec strings into the table. - #-------------------------------------------------------------- - - def __analyzedTable(self, areas, filter): - # main routine to obtain the analyzed table. Analyzed table - # is the composite between the proposed and active tables. - # filter is the function that filters out the hazards that - # should be considered. - - # Sample the Hazards Grid - atable = self.__getProposedTable(areas) - self.log.info("Proposed Table length: " + str(len(atable))) - self.log.debug("Sampled Proposed Table: " + - self.printActiveTable(atable, combine=True)) - - # Combine time entries - atable = self.__timeCombine(atable) - self.log.info("Time Combine Proposed Table length: " + str(len(atable))) - self.log.info("Proposed Table:" + - self.printActiveTable(atable, combine=True)) - - # Get the active table from the IFPServer - rawactTable = self.__getActiveTable() - self.log.info("Raw Active Table: " + - self.printActiveTable(rawactTable, combine=True)) - if rawactTable is None: - self.log.error("Unable to retrieve VTEC active table. " + - "Product VTEC codes may be suspect.") - rawactTable = [] - self.log.info("Raw Active Table length: " + str(len(rawactTable))) - - # Do specific product filtering - self.log.debug("Analyzed Table, prior to site/product filtering: " + - self.printActiveTable(atable, combine=True)) - atable = filter(atable, allowedHazardsOnly=False) - self.log.info(\ - "Filtered Analyzed Table length, prior to VTEC injection: " + - str(len(atable))) - - # Perform site filtering on the active table. We keep - # our site and SPC. - allGEOTable = [] - siteFilter = [self.__siteID4, self.__spcSiteID4] - for a in rawactTable: - if a['officeid'] in siteFilter: - allGEOTable.append(a) - - # Perform GEO (edit area) filtering on the active table. - # Also filter for TEST mode - self.__allGEOActiveTable = copy.deepcopy(allGEOTable) - actTable = [] - for a in self.__allGEOActiveTable: - if a['id'] not in self.__zoneList: - continue #skip over entries not in our zone list - # If we are in TEST mode, filter out all except 'T' - # Otherwise, filter out all 'T' - testEntry = a['vtecstr'].find('/T.') == 0 - if self.__vtecMode == "T": - if testEntry: - actTable.append(a) - else: - if not testEntry: - actTable.append(a) - actTable = filter(actTable, allowedHazardsOnly=True) #also filter the active table - - self.log.info("Filtered Active Table length: " + str(len(actTable))) - self.log.info("Filtered Active Table:" + - self.printActiveTable(actTable, combine=True)) - self.__activeTable = copy.deepcopy(actTable) - - # Merge the proposed and active tables, to arrive at the analyzed table - atable = self.__mergeActiveProposed(atable, actTable, self.__pil, - areas) - self.log.info("Analyzed Table length: " + str(len(atable))) - - # Finished - self.log.info("Analyzed Table: " + self.printActiveTable(atable, - combine=True)) - - return atable - - - def __getActiveTable(self): - #Uses the IFPClient interface to get the VTEC active table from - #the server. Returns None on failure. - - try: - table = self.__ifpClient.getVTECActiveTable(self.__activeTableMode) - table = ActiveTableVtec.transformActiveTableToPython(table) - return table - - except: - self.log.exception("Unable to access VTEC Active Table: ") - raise - - def __createCityHazards(self): - if not self.__accurateCities: - return None - - self.log.info("Evaluating hazards for cities.") - - # set up sample requests and get the ParmHistos - eaMap = {} - editAreas = [] - for ea in self.__cityEditAreas: - ea, city = ea - editAreas.append(ea) - id = ea.getId().getName() - eaMap[id] = city - - parmHistos = self.__doSamplingOfHazards(editAreas) - - # make proposed table - pTable = self.__makeCityTable(parmHistos, eaMap) - - # consolidate - pTable = self.__consolidateTime(pTable) - - # remove old - keep those ended within 30 min - cutoff = self.__time - 30 * 60 - pTable = filter(lambda x: x['endTime'] > cutoff, pTable) - - # handle UFN events - convert ending time to max - for proposed in pTable: - if (proposed['phen'], proposed['sig']) in self.__ufnKeys: - proposed['startTime'] = self.__time #now - proposed['endTime'] = 2 ** 31 - 1 #forever - proposed['ufn'] = 1 #until further notice - - self.log.info("Hazards afflicting cities:" + - self.printActiveTable(pTable, combine=True, idType='city')) - - return pTable - - # Create city hazard table from samples - def __makeCityTable(self, parmHistos, eaMap): - rval = [] - - phIter = parmHistos.iterator() - while phIter.hasNext(): - ph = phIter.next() - areaID = ph.area().getId().getName() - areaPoints = ph.numberOfGridPoints() - samples = ph.histoSamples() - city = eaMap.get(areaID) - - for s in samples: - areaTime = TimeRange.TimeRange(s.validTime()) # timerange - histpairs = s.histogram() - for p in histpairs: - subkeys = p.value().discrete().getSubKeys() - for sk in subkeys: - # skip if no hazard - if sk == "": - continue - - d = {} - d['act'] = '' - d['id'] = city - d['phensig'] = sk - d['seg'] = 0 #normally zero, except if aux data - d['startTime'] = float(areaTime.startTime().unixTime()) - - # possibly shorten the endTime based on - # self.__hazardEndTime - if self.__hazardEndTime is not None and \ - areaTime.endTime().unixTime() > self.__hazardEndTime: - d['endTime'] = float(self.__hazardEndTime) - else: - d['endTime'] = float(areaTime.endTime().unixTime()) - - if VTECTable.VTECTable.has_key(sk[:4]): - d['phen'] = sk[:2] - d['sig'] = sk[3] - else: # locally defined hazard - d['phen'] = sk - d['sig'] = "" # empty significance - - rval.append(d) - - return rval - - - def __doSamplingOfHazards(self, editAreas): - # Samples the Hazards Grid in the ifpServer. Returns a list - # of ParmHistos. - - # Determine the ParmID for Hazards out of the given database - dbid = JavaDatabaseID(self.__databaseID) -# pid = filter(lambda x: str(x).find("Hazards") != -1, -# self.__ifpClient.getParmList(self.__databaseID))[0] - parmList = self.__ifpClient.getParmList(dbid) - for p in parmList: - if p.getParmName() == "Hazards": - pid = p - break - - # TimeRange to sample - # Use hazardEndTime if present - if self.__hazardEndTime is not None: - tr = TimeRange.TimeRange(AbsTime.AbsTime.current(), - AbsTime.AbsTime(self.__hazardEndTime)) - else: #(everything) - tr = TimeRange.allTimes() - - # Determine the sampler request structures - sampreqs = ArrayList() - for ea in editAreas: - if type(ea) is str: - sampreqs.add(SamplerRequest(pid, - ReferenceID(ea), tr.toJavaObj())) - else: - sampreqs.add(SamplerRequest(pid, ea, tr.toJavaObj())) - - # Perform sampling - hs = HistoSampler(self.__ifpClient.getJavaClient(), sampreqs) - #parmHistos = hs.getParmHisto_SeqOf() - parmHistos = hs.getParmHisto() - - return parmHistos - - # Create proposed table from samples - def __makeProposedTable(self, parmHistos): - rval = [] - size = parmHistos.size() - #for ph in parmHistos: - for x in range(size): - ph = parmHistos.get(x) - areaID = ph.area().getId().getName() - areaPoints = ph.numberOfGridPoints() - samples = ph.histoSamples() - - for s in samples: - areaTime = TimeRange.TimeRange(s.validTime()) # timerange - histpairs = s.histogram() - for p in histpairs: - subkeys = p.value().discrete().getSubKeys() - sksize = subkeys.size() - for y in range(sksize): - sk = str(subkeys.get(y)) - d = {} - d['id'] = areaID - d['officeid'] = self.__siteID4 - d['pil'] = self.__pil - d['phensig'] = sk - d['seg'] = 0 #normally zero, except if aux data - d['startTime'] = float(areaTime.startTime().unixTime()) - - # possibly shorten the endTime based on - # self.__hazardEndTime - if self.__hazardEndTime is not None and \ - areaTime.endTime().unixTime() > self.__hazardEndTime: - d['endTime'] = float(self.__hazardEndTime) - else: - d['endTime'] = float(areaTime.endTime().unixTime()) - - d['areaPoints'] = areaPoints - d['valuePoints'] = p.count() - d['act'] = "???" #Determined after merges - d['etn'] = "???" #Mostly Determined after merges - if VTECTable.VTECTable.has_key(sk[:4]): - d['phen'] = sk[:2] - d['sig'] = sk[3] - d['hdln'] = VTECTable.VTECTable[sk[:4]]['hdln'] - else: # locally defined hazard - d['phen'] = sk - d['sig'] = "" # empty significance - desc = \ - DiscreteKey.discreteDefinition(self.__dataMgr.getSiteID()).keyDesc( - "Hazards_SFC", sk) - d['hdln'] = desc - - #special checks for aux data - auxindex = sk.find(':') - if auxindex != -1: - auxData = sk[auxindex + 1:] - #national center uses: aux data is the etn number - if (d['phen'], d['sig']) in self.__ncKeys: - try: - number = int(auxData) - #tropical events may be either seg or etn - if (d['phen'], d['sig']) in self.__tpcKeys: - if number >= int(self.__tpcBaseETN): - d['etn'] = number - else: - d['seg'] = number - else: - d['etn'] = number - except: - self.log.error("Bad auxData for ", - "National Center:" + auxData + str(d)) - - #other aux data interpreted as segment number - else: - try: - segment = int(auxData) - d['seg'] = segment - except: - self.log.error("Bad auxData for seg:" + - auxData + str(d)) - rval.append(d) - return rval - - - # Gets the proposed hazards table from the server. - # Note that proposed table has 'areaPoints', and 'valuePoints' within - # it, which will be later stripped out. - def __getProposedTable(self, editAreas): - rval = [] - - # set up sample requests and get the ParmHistos - parmHistos = self.__doSamplingOfHazards(editAreas) - - # make proposed table - pTable = self.__makeProposedTable(parmHistos) - - # handle UFN events - convert ending time to max - for proposed in pTable: - if (proposed['phen'], proposed['sig']) in self.__ufnKeys: - proposed['startTime'] = self.__time #now - proposed['endTime'] = 2 ** 31 - 1 #forever - proposed['ufn'] = 1 #until further notice - return pTable - - - # Utility function to combine - def __timeReduce(self, atable, index): - if index >= len(atable) - 1: - return - if atable[index]['endTime'] == atable[index + 1]['startTime']: - atable[index]['endTime'] = atable[index + 1]['endTime'] - del atable[index + 1] - self.__timeReduce(atable, index) - - # Remove any None Headlines - def __stripNone(self, atable): - # First punt any headlines - return filter(lambda x : x['phensig'] != '', atable) - - # Remove any entries that are in the past - def __stripOld(self, atable): - now = self.__time - return filter(lambda x : x['endTime'] > now, atable) - - # Truncate entries to current hour that start in the past - # must call after stripOld - def __truncateCurrentTime(self, atable): - nowHour = int(self.__time / 3600) * 3600 - for a in atable: - if a['startTime'] < nowHour: - a['startTime'] = nowHour - return atable - - # Remove any entries that occupy less than the sampling threshold - # of the area. Threshold is met for a given % of the area covered - # or a number of grid points covered. If None is given, then that - # critera is not considered. - def __coverageFilter(self, atable): - percent = self.__samplingThreshold[0] - points = self.__samplingThreshold[1] - if percent is not None and points is not None: - atable = filter(lambda x : - x['valuePoints'] / float(x['areaPoints']) >= percent or \ - x['valuePoints'] >= points, atable) - elif percent is not None: - atable = filter(lambda x : - x['valuePoints'] / float(x['areaPoints']) >= percent, atable) - elif points is not None: - atable = filter(lambda x : x['valuePoints'] >= points, atable) - else: - return [] #complete filtering - - for i in atable: - del i['valuePoints'] - del i['areaPoints'] - return atable - - # Returns a set of values found under the specified key in atable. - def __keySet(self, atable, key): - tmp = map(lambda x : x[key], atable) - rval = [] - for x in tmp: - if x not in rval: - rval.append(x) - return rval - - # Assumes that atable is for a sinlge area - def __compressTime(self, atable): - # Sort by time - atable.sort(lambda x, y: cmp(x['startTime'], y['startTime'])) - - types = self.__keySet(atable, 'phensig') - - rval = [] - for t in types: - a = filter(lambda x : x['phensig'] == t, atable) - i = 0 - while i < len(a): - self.__timeReduce(a, i) - i = i + 1 - rval = rval + a - - rval.sort(lambda x, y: cmp(x['startTime'], y['startTime'])) - return rval - - def __consolidateTime(self, atable): - actions = self.__keySet(atable, 'act') - rval = [] - for i in actions: - actT = filter(lambda x: x['act'] == i, atable) - areas = self.__keySet(actT, 'id') - for j in areas: - a = filter(lambda x: x['id'] == j, actT) - rval = rval + self.__compressTime(a) - return rval - - def __timeCombine(self, atable): - atable = self.__stripNone(atable) - atable = self.__coverageFilter(atable) - atable = self.__consolidateTime(atable) - - # for cities list - keep these records to check for existence of grid - self.__oldZoneTable = filter(lambda x: - 0 <= self.__time - x['endTime'] < 1800, - atable) - - atable = self.__stripOld(atable) - atable = self.__truncateCurrentTime(atable) - return atable - - def __copyFields(self, record, fields): - #copies the specified fields and returns a dictionary - #containing those fields - d = {} - for f in fields: - if record.has_key(f): - d[f] = record[f] - return d - - #------------------------------------------------------------- - # The following functions handle the merging of the - # proposed and active tables. VTEC strings are calculated - # in these routines. - #------------------------------------------------------------- - - # Converts active table EXP codes that are still in effect to CON - # codes. This simplifies the logic of VTEC comparisons. Returns - # the modified active table. - def __convertEXPtoCON(self, aTable): - for a in aTable: - if a['act'] == 'EXP' and a['endTime'] > self.__time: - a['act'] = 'CON' - a['expired'] = True - return aTable - - - # Handles the special case SPC Watches, which are TO.A, SV.A - # Logic: watch in active table that matches one in proposed table from - # my office, if not, then "NEW" action code, copy the times (if within - # 30 minutes) from the SPC active table match into the proposed table. - # If match of active and proposed for my office, then do normal - # logic - but still copy the times but from my active record for my office. - # if within 30 minutes). - def __handleSPCWatches(self, proposedTable, activeTable): - compare = ['phen', 'sig', 'etn'] - for proposed in proposedTable: - # TO.A, SV.A - are the watches originally from SPC - if proposed['phen'] in ['TO', 'SV'] and proposed['sig'] == 'A': - - #attempt to find a match in the active table by my office - #attempt to find a match in the active table by SPC - #We don't care about the geography ('id') at this point. - myActive = None - spcActive = None - for active in activeTable: - if self.hazardCompare(proposed, active, compare) and \ - active['act'] not in ['CAN', 'UPG', 'EXP']: - if active['officeid'] == self.__siteID4: - myActive = copy.deepcopy(active) - elif active['officeid'] == self.__spcSiteID4: - spcActive = copy.deepcopy(active) - if myActive is not None and spcActive is not None: - break #for effen - got what we want - - # This is a new watch that we haven't issued before - if myActive is None: - proposed['act'] = "NEW" - - #get the times from the SPC watch - if spcActive is not None: - activeStart = spcActive['startTime'] - activeEnd = spcActive['endTime'] - else: - self.log.error("Unable to match SPC watch for " + - self.printActiveTable(proposed)) - activeStart = proposed['startTime'] - activeEnd = proposed['endTime'] #failsafe code - - # we matched the active table, so we have issued it before - # we get the times from our active watch - else: - activeStart = myActive['startTime'] - activeEnd = myActive['endTime'] - - # we need to adjust the times possibly. We compare active - # vs. proposed, and within 30minutes, then we assume that - # the time hasn't changed. Due to hourly grids, but less - # than that SPC times, we copy over the active table times. - deltaStart = abs(proposed['startTime'] - activeStart) - deltaEnd = abs(proposed['endTime'] - activeEnd) - if deltaStart < 1800: #30 minutes - proposed['startTime'] = activeStart - if deltaEnd < 1800: #30 minutes - proposed['endTime'] = activeEnd - return proposedTable - - # Checks for events that have merged together. This could result - # in dropped VTEC entries so we need to EXT one and CAN the other. - # We remove entries from the active table (memory copy) and generate - # additional CAN events. - def __checkForMergedEvents(self, proposedTable, activeTable): - - compare = ['id', 'phen', 'sig', 'pil'] - - createdCANEntries = [] - - for proposed in proposedTable: - matches = [] - - #record match and time overlaps for real events - for active in activeTable: - if self.hazardCompare(proposed, active, compare) and \ - active['act'] not in ['CAN', 'UPG', 'EXP'] and \ - active['endTime'] > self.__time and \ - proposed['startTime'] <= active['endTime'] and \ - proposed['endTime'] >= active['startTime']: - matches.append(active) - - #if multiple records match, we have a merged event - #we need to find the highest etn for the event matches - if len(matches) > 1: - self.log.debug("MERGE event: proposed=" + - self.printActiveTable(proposed) + - " matches=" + self.printActiveTable(matches)) - highestETN = 0 - for m in matches: - highestETN = max(highestETN, m['etn']) - - # find all other entries (non highest etn) and generate - # new CAN records, then remove the entries from activeTable - for m in matches: - if m['etn'] != highestETN: - canEntry = copy.deepcopy(m) - canEntry['act'] = 'CAN' - createdCANEntries.append(canEntry) - self.log.debug("CAN event: %s%s%s", - self.printActiveTable(canEntry), - " remEntry: ", self.printActiveTable(m)) - del activeTable[activeTable.index(m)] - - #append the set of generated CAN events - for c in createdCANEntries: - proposedTable.append(c) - - #return the modified set of records - return (proposedTable, activeTable) - - - # Checks for "CON" continuation and "EXT" extended in time codes. - # An event is considered continued two hazards have the same - # id, phen, sig, and pil, and if the end times match. An event - # is considered to be extended in time if the event overlaps - # in time. - def __checkForCONEXT(self, proposedTable, activeTable): - - compare = ['id', 'phen', 'sig', 'pil', 'officeid'] #considered equal - - for proposed in proposedTable: - - if proposed['act'] == 'CAN': - continue #only occurs with merged events - - if len(proposed['sig']): #is VTEC, must compare with active - for active in activeTable: - if self.hazardCompare(proposed, active, compare) and \ - active['act'] not in ['CAN', 'UPG', 'EXP']: -# and not self.__separateETNtrack(proposed, active): - - #convective watch (special case, also compare etn) - if proposed['phen'] in ['SV', 'TO'] and \ - proposed['sig'] == "A" and \ - proposed['etn'] != active['etn']: - continue #allows CAN/NEW for new convect watches - - # times exactly match - if proposed['startTime'] == active['startTime'] and \ - proposed['endTime'] == active['endTime']: - proposed['act'] = 'CON' - proposed['etn'] = active['etn'] - self.__copyTextFields(proposed, active) - - # start times both before current time, end - # times the same, CON state - elif self.__time >= proposed['startTime'] and \ - self.__time >= active['startTime'] and \ - proposed['endTime'] == active['endTime']: - proposed['act'] = 'CON' - proposed['etn'] = active['etn'] - self.__copyTextFields(proposed, active) - - # special case of event ended already, don't - # assign "EXT" even with overlap - elif self.__time >= active['endTime']: - pass #force of a new event since it ended - - # start and/or end times overlap, "EXT" case - # except when user changed the start time - # of an event has gone into effect. - elif self.__hazardsOverlap(proposed, active): - - if active['startTime'] <= self.__time: - if proposed['startTime'] <= self.__time or \ - active.has_key('conexted'): - proposed['act'] = 'EXT' - else: - proposed['act'] = 'EXT' - - if proposed['act'] == 'EXT': - active['conexted'] = 1 - proposed['etn'] = active['etn'] - self.__copyTextFields(proposed, active) - - #save original time so we can later determine - #whether it is EXTENDED or SHORTENED - proposed['previousStart'] = active['startTime'] - proposed['previousEnd'] = active['endTime'] - - else: #is Local, no changes to local events - pass - - for active in activeTable: - if active.has_key('conexted'): - del active['conexted'] - - return proposedTable - - # Checks for CAN, EXP, UPG - def __checkForCANEXPUPG(self, pTable, activeTable): - compare1 = ['id', 'phen', 'sig'] - newEntries = [] - - for active in activeTable: - if active['officeid'] != self.__siteID4: - continue #for a different site - - if active['act'] in ['CAN', 'UPG', 'EXP']: - continue #skip these records, event already over - - if active['pil'] != self.__pil: - continue #skip these records, since it is for another prod - - cancel_needed = 1 - - # determine if cancel is needed, cancel (CAN, EXP, UPG). - # Cancel not needed if we have an entry in proposed that - # is already in active and the times overlap, and the active - # ending time is still in the future - for proposed in pTable: - if self.hazardCompare(active, proposed, compare1): - if self.__hazardsOverlap(proposed, active) and \ - self.__time < active['endTime']: - - # active event is in effect and proposed event is in future - # cancel active event - if active['startTime'] <= self.__time and \ - proposed['startTime'] > self.__time: - break - - #convective watch, also check etn - if proposed['phen'] in ['SV', 'TO'] and \ - proposed['sig'] == 'A': - if proposed['etn'] == active['etn']: - cancel_needed = 0 - break - - else: - cancel_needed = 0 - break - - # CAN's have three special forms. CAN when a product is no longer - # in the proposed table, EXP when the product is no longer - # in the proposed table, and the end was within 30 min of now, - # and UPG when the phen is the same, but - # sig is upgraded, and the VTEC is still in effect. - # - if cancel_needed == 1: - - # Case One - UPG - # Area matches, phen matches, and we are going from an - # advisory to a watch, a watch to a warning, or an - # advisory to a warning. - - for proposed in pTable: - #find matches in area, do phen later - if self.hazardCompare(active, proposed, ['id']): - - #find overlaps in time - if self.__hazardsOverlap(proposed, active): - - if self.__isUpgrade(proposed, active): - active['act'] = 'UPG' - active['seg'] = 0 - if active not in newEntries: - newEntries.append(active) - cancel_needed = 0 - - # Case Two - EXP - # If it wasn't an UPG, then check for EXP. EXP if entry - # not in the proposed table, and current time is after - # the EXP time. - - if cancel_needed == 1: - timeFromEnd = self.__time - active['endTime'] # +after - if timeFromEnd >= 0: - active['act'] = 'EXP' - active['seg'] = 0 - if active not in newEntries: - newEntries.append(active) - cancel_needed = 0 - - # Final Case - CAN - # Only Allow "CAN" entries if the event is still ongoing, - # otherwise ignore the entry. - if cancel_needed == 1: - if self.__time < active['endTime']: - active['act'] = 'CAN' - active['seg'] = 0 - if active not in newEntries: - newEntries.append(active) - cancel_needed = 0 - - - # add in new entries, change any text to prevText, overviewText to - # prevOverviewText. Strip out any VTEC coding from active table. - for entry in newEntries: - if entry.has_key('segText'): - entry['prevText'] = entry['segText'] - del entry['segText'] - if entry.has_key('overviewText'): - entry['prevOverviewText'] = entry['overviewText'] - del entry['overviewText'] - if entry.has_key('vtec'): - entry['vtecstr'] = "" #erase the VTEC string. - del entry['overviewText'] - pTable.append(entry) - return pTable - - - ######################################################################## - # This function checks the pTable against the activeTable to determine # - # EXA or EXB - ######################################################################## - - def __checkForEXAEXB(self, pTable, activeTable): - compare1 = ['id', 'phen', 'sig', 'etn', 'pil', 'officeid'] - compare2 = ['phen', 'sig', 'pil'] - - for proposed in pTable: - - # first check to see if we have already assigned "NEW". This - # is a special case for SPC watches that now appear in the - # proposed table, but haven't been issued yet. In this case, - # we skip processing this record. - if proposed['act'] != "???": - continue - - # Assume first that this is EXA or EXB - exaexb_flag = 1 - - #if we find a match, and it overlaps in time, - #then it isn't an EXA, EXB - for active in activeTable: - if self.hazardCompare(proposed, active, compare1): - #if proposed['startTime'] <= active['endTime'] and - # proposed['endTime'] >= active['startTime'] and - if self.__hazardsOverlap(proposed, active) and \ - active['act'] not in ['CAN', 'EXP', 'UPG']: - exaexb_flag = 0 - - # no match was found, thus this is either a EXA, or EXB, - # match records with phen and sig the same - if exaexb_flag == 1: - #first check for EXA, must check ALL records before - #deciding it isn't an EXA - for active in activeTable: - if self.hazardCompare(proposed, active, compare2): -# and not self.__separateETNtrack(proposed, active): - if active['act'] not in ['CAN', 'UPG', 'EXP']: - - #if times are identical, then we extended in area - if proposed['startTime'] == active['startTime'] and \ - proposed['endTime'] == active['endTime']: - if proposed['etn'] == "???" or \ - proposed['etn'] == active['etn']: - proposed['exaexb'] = 'EXA' - proposed['active'] = active - break - - #if start times are both in the past or - #current, but end times equal, then it is - #an EXA - elif proposed['startTime'] <= self.__time and \ - active['startTime'] <= self.__time and \ - proposed['endTime'] == active['endTime']: - if proposed['etn'] == "???" or \ - proposed['etn'] == active['etn']: - proposed['exaexb'] = 'EXA' - proposed['active'] = active - break - - if proposed.has_key('exaexb'): - continue - - #if it isn't an EXA, now we check the records again, but - #check for overlapping or adjacent times, that do - #not occur in the past in the active table, but ensure - #that there is an event in the proposed that overlaps - #with time. Results in EXB - if proposed['act'] == "???": - for active in activeTable: - if self.hazardCompare(proposed, active, compare2): -# and not self.__separateETNtrack(proposed, active): - if active['act'] not in ['CAN', 'UPG', 'EXP']: - #if self.__hazardsOverlap(proposed, active) and - if proposed['startTime'] <= active['endTime'] and \ - proposed['endTime'] >= active['startTime'] and \ - active['endTime'] > self.__time: - if proposed['etn'] == "???" or \ - proposed['etn'] == active['etn']: - #ensure record overlaps with proposed - #event - for p1 in pTable: - if p1 == proposed: - continue #skip itself - if self.hazardCompare(p1, proposed, - compare2) and self.__hazardsOverlap(p1, proposed): - proposed['exaexb'] = 'EXB' - proposed['active'] = active - break - break - - # Now set the marked records to EXA/EXB unless - # there is already a record with the same ETN - # for the same phen/sig in the same zone - - # Organize hazards by zone - hazardDict = self.__organizeByZone(pTable) - for zone, hazards in hazardDict.iteritems(): - # then organize by hazard key - hazards = self.__organizeByPhenSig(hazards) - for key, hzds in hazards.iteritems(): - for proposed in hzds: - - if proposed.has_key('exaexb'): - act = proposed.pop('exaexb') - active = proposed.pop('active') - # checking if the etn is used - for p in hzds: - if p['etn'] == active['etn'] and \ - p['act'] != '???': - break - else: - proposed['act'] = act - proposed['etn'] = active['etn'] - self.__copyTextFields(proposed, active) - - if act == 'EXB': - #save original time so we can later - #determine whether it is EXTENDED - #or SHORTENED - proposed['previousStart'] = active['startTime'] - proposed['previousEnd'] = active['endTime'] - - return pTable - - - # Assigns NEW to remaining records. Has to determine the appropriate - # ETN number. - def __checkForNEW(self, pTable, activeTable): - compare = ['id', 'phen', 'sig', 'officeid'] - - #check for any remaining records that have an undefined action - #these records must be "NEW". Need to allocate a new etn, except - #in two cases: one is already identified in the proposed table, - #existing record in active table (phen,sig,id) regardless of pil. - # - #Already identified are basic TO.A, SV.A using aux data fields, - - allowedActions = ['NEW', 'CON', 'EXT', 'EXA', 'EXB'] - - for proposed in pTable: - if proposed['act'] == '???': - if proposed['etn'] == "???": - #check in active table for a match (from other product), - #with events that still are occurring - etn = 0 - for act in activeTable: - if self.__hazardsOverlap(proposed, act) and \ - act['act'] in allowedActions and \ - self.hazardCompare(proposed, act, compare) and \ - act['endTime'] > self.__time: - etn = act['etn'] - break - - #not found in active nor proposed, prep for new one - if etn == 0: - self.__prepETNCache(proposed) - else: - proposed['etn'] = etn #match found in active table - proposed['act'] = "NEW" - - # determine any new ETNs - self.__assignNewETNs(activeTable) - self.log.debug("New ETN cache: " + str(self.__etnCache)) - - # process again for records that are now marked NEW, but no etn - for proposed in pTable: - if proposed['act'] == 'NEW' and proposed['etn'] == "???": - proposed['etn'] = self.__getNewETN(proposed) - - return pTable - - - # Eliminates EXP codes from the table (for marine). - # Returns the filtered table. - def __eliminateEXPCodes(self, pTable): - rTable = [] - for h in pTable: - #accept all non-EXP codes - if h['act'] != 'EXP': - rTable.append(h) - - #Convert EXP into CON codes for non-yet expired events (30min) - #since marine does not permit EXP codes - elif h['endTime'] > self.__time: - h['act'] = 'CON' #convert to CON code - rTable.append(h) - - #Ignore the events if at or after the EXP time - else: - pass - - return rTable - - # add in EXP codes (for events just about ready to expire) - def __addEXPCodes(self, pTable): - #looks for events that have "CON", but are within 30 minutes of - #event ending time and converts those events to EXP. - for each_hazard in pTable: - if each_hazard['act'] == 'CON': - timeFromEnd = self.__time - each_hazard['endTime'] # +after - if timeFromEnd >= -30 * 60 and timeFromEnd <= 0: - each_hazard['act'] = 'EXP' #convert to expired - return pTable - - # remove EXP (actual EXP codes) when another event of same phen/sig is - # now ongoing, but only if same issuance year - def __removeEXPWithOngoingCodes(self, pTable): - compare = ['phen', 'sig', 'etn', 'id'] - tmp = [] - for h in pTable: - #events with EXP, and after ending time - removeIt = 0 - if h['act'] == 'EXP' and self.__time >= h['endTime']: - hIssueT = h.get('issueTime', self.__time) - hIssueYear = time.gmtime(hIssueT)[0] - for h1 in pTable: - #active event with same phen/sig/etn - h1IssueT = h1.get('issueTime', self.__time) - h1IssueYear = time.gmtime(h1IssueT)[0] - if h1['act'] in ['CON', 'EXA', 'EXB', 'EXT'] and \ - self.hazardCompare(h, h1, compare) and \ - h1IssueYear == hIssueYear: - removeIt = 1 - break - if removeIt == 0: - tmp.append(h) - return tmp - - - # generate VTEC strings for hazards - def __addVTECStrings(self, pTable): - for h in pTable: - # get the three middle characters of the product pil - if h.has_key('pil'): - prodCat = h['pil'] - else: - prodCat = '???' - - # get the VTEC Mode - if self.__vtecMode is None: - h['vtecstr'] = "" - continue - - # Phen and Significance - phen = h['phen'] - sig = h['sig'] - if len(sig) == 0: #local headline, non-VTEC - h['vtecstr'] = "" - continue - - # get the office ID - if h.has_key('officeid'): - siteID = h['officeid'] #4letter id - else: - siteID = "????" - - # get the ETN - if h.has_key('etn'): - if type(h['etn']) is int: - ETN = "%04i" % h['etn'] - else: - ETN = h['etn'] - else: - ETN = "????" - - # get the action - if h.has_key('act'): - action = h['act'] - else: - action = "???" - - # adjust time of NEW events to ensure they don't start - # earlier than now - if h['startTime'] < self.__time: - h['startTime'] = self.__time - - - # use 00000000 or explicit times for the start time? - if action is 'NEW' or \ - (action == 'EXT' and h['previousStart'] > self.__time) or \ - (action == 'EXB' and h['previousStart'] > self.__time) or \ - (h['startTime'] > self.__time): - startStr = time.strftime("%y%m%dT%H%MZ-", - time.gmtime(h['startTime'])) - else: - startStr = "000000T0000Z-" #ongoing - - # use 00000000 if event is "Until Further notice" - if h.get('ufn', 0): - endStr = "000000T0000Z/" - else: - endStr = time.strftime("%y%m%dT%H%MZ/", time.gmtime(h['endTime'])) - - # format the beastly string - vtec = '/' + self.__vtecMode + "." + action + "." + \ - siteID + '.' + phen + '.' + sig + '.' + ETN + '.' + \ - startStr + endStr - h['vtecstr'] = vtec - - - # Add in headlines if missing in the table, note that headlines - # are not added for situations of merged events, i.e., an event - # that has a CAN and a ongoing with same phen/sig and overlapping time. - # Leaving 'hdln' blank indicates no headline and no mention in hazard - # products. - def __addHeadlinesIfMissing(self, pTable): - compare = ['id', 'phen', 'sig', 'pil'] - ongoingAct = ['EXT', 'EXB', 'CON', 'NEW', 'EXA'] - for h in pTable: - if h.has_key('hdln'): - continue - phensig = h['phen'] + '.' + h['sig'] - if VTECTable.VTECTable.has_key(phensig): - - #ongoing (merged) and CAN situation? - mergedFound = 0 - for h1 in pTable: - if self.hazardCompare(h, h1, compare) and \ - h['act'] == 'CAN' and h1['act'] in ongoingAct and \ - h1['endTime'] > self.__time and \ - h['startTime'] <= h1['endTime'] and \ - h['endTime'] >= h1['startTime']: - mergedFound = 1 - h['hdln'] = "" - - if mergedFound == 1: - h['hdln'] = "" - else: - h['hdln'] = VTECTable.VTECTable[phensig]['hdln'] - else: - h['hdln'] = "" - - - # isUpgrade(), indicates whether rec2 upgrades rec1, only looks - # at act, phen and sig. Proposed gets NEW, EXA or EXB active gets UPG - def __isUpgrade(self, proposed, active): - # To change HazardsTable to have an UPG - # only if the other hazard is a NEW, EXA or EXB and a CAN if the - # associated hazard is CON or EXT. - if proposed['act'] in ['CON', 'EXT']: - return 0 #not an upgrade - else: - if VTECTable.checkForUpgrade(proposed['phen'], proposed['sig'], - active['phen'], active['sig']): - return 1 - else: - return 0 #not an upgrade - - # isDowngrade(), indicates whether rec2 downgrades rec1, only looks - # at phen and sig. Proposed gets NEW, active gets CAN. - def __isDowngrade(self, proposed, active): - if VTECTable.checkForDowngrade(proposed['phen'], proposed['sig'], - active['phen'], active['sig']): - return 1 - else: - return 0 #not an downgrade - - # Checks for records with the same phen/sig for the same geographical - # area (id). Eliminates the records with the lower segment number with - # same times. Combines records with multiple segment numbers with - # different times. Result is only to have 1 record per ID for phen/sig. - def __checkForMultipleSegsInSameID(self, pTable): - - #step 1: reorganize the proposed table by zone, then by phen/sig. - #dict of zones, then dict of phensigs, value is list of records. - #Also create dictionary of originally max segment numbers for phen/sig. - orgHaz = {} - orgMaxSeg = {} #key:phensig, value: max seg number - for p in pTable: - phensig = (p['phen'], p['sig']) - id = p['id'] - if orgHaz.has_key(id): - psOrgHaz = orgHaz[id] - if psOrgHaz.has_key(phensig): - records = psOrgHaz[phensig] - records.append(p) - orgHaz[id][phensig] = records - else: - orgHaz[id][phensig] = [p] - else: - orgHaz[id] = {phensig: [p]} - - # tally the original max segment number per phen/sig - if orgMaxSeg.has_key(phensig): - orgMaxSeg[phensig] = max(p['seg'], orgMaxSeg[phensig]) - else: - orgMaxSeg[phensig] = p['seg'] - - - #step 2: Check for multiple records for phensig and zone. - #Mark records that can be combined (adjacent/overlap). - for zone in orgHaz.keys(): - for phensig in orgHaz[zone].keys(): - records = orgHaz[zone][phensig] - # if only 1 record, we have nothing to do - if len(records) == 1: - continue - records.sort(self.__hazardSortSTET) - - #find adjacent/overlapping, mark them as record number in - #the dict entry 'rn', track overall tr in trDict (key is 'rn') - trDict = {} - for x in xrange(len(records)): - xtr = (records[x]['startTime'], records[x]['endTime']) - - #search for adjacent/overlapping - for y in xrange(x + 1, len(records)): - ytr = (records[y]['startTime'], records[y]['endTime']) - rny = records[y].get('rn', None) - if rny is None and (self.__isAdjacent(xtr, ytr) or \ - self.__overlaps(xtr, ytr)): - rnx = records[x].get('rn', x) - records[y]['rn'] = rnx #overlaps/adjacent,reuse rn - records[x]['rn'] = rnx #assign to orig to match - if trDict.has_key(rnx): - trDict[rnx] = self.__combineTR(ytr, trDict[rnx]) - else: - trDict[rnx] = self.__combineTR(xtr, ytr) - - maxSN = self.__maxSegNumber(orgHaz, phensig) #max seg num - - #now assign new segment numbers, reassign starting/ending - #times for the adjacent/overlaps, delete the temp markers - for x in xrange(len(records)): - rnx = records[x].get('rn', None) - if rnx is not None: - records[x]['seg'] = maxSN + rnx + 1 - records[x]['startTime'] = trDict[rnx][0] - records[x]['endTime'] = trDict[rnx][1] - records[x]['phensig'] = records[x]['phen'] + '.' + \ - records[x]['sig'] + ':' + `records[x]['seg']` - del records[x]['rn'] - - #now eliminate records duplicate records - newrecs = [] - for rec in records: - if rec not in newrecs: - newrecs.append(rec) - orgHaz[zone][phensig] = newrecs - - #step 3: Expand back out to list - updatedList = [] - for zone in orgHaz.keys(): - for phensig in orgHaz[zone].keys(): - records = orgHaz[zone][phensig] - for r in records: - updatedList.append(r) - - #step 4: Combine new segments if possible. We can tell we have - #generated new segments based on the orgMaxSeg dictionary. We assign - #them the same segments. - compare = ['pil', 'startTime', 'endTime', 'phen', 'sig'] - for x in xrange(len(updatedList)): - p = updatedList[x] - phensig = (p['phen'], p['sig']) - if orgMaxSeg.has_key(phensig): - orgMax = orgMaxSeg[phensig] - if p['seg'] > orgMax: #must be generated segment numb - - #find matching records and assign all the same seg# - #and key - for y in xrange(x + 1, len(updatedList)): - p1 = updatedList[y] - if self.hazardCompare(p, p1, compare) and \ - p1['seg'] > orgMax: - p1['seg'] = p['seg'] - p1['phensig'] = p1['phen'] + '.' + p1['sig'] + \ - ':' + `p1['seg']` - - #step 5: Eliminate duplicate entries - finalList = [] - for p in updatedList: - if p not in finalList: - finalList.append(p) - - return finalList - - # sort function: hazard records by starting time, then ending time - def __hazardSortSTET(self, r1, r2): - if r1['startTime'] < r2['startTime']: - return -1 - elif r1['startTime'] > r2['startTime']: - return 1 - else: - if r1['endTime'] < r2['endTime']: - return -1 - elif r1['endTime'] > r2['endTime']: - return 1 - else: - return 0 - - # returns max segment number for zone, phen/sig directory (support routine) - def __maxSegNumber(self, orgHaz, phensig): - maxSegNumber = 0 - for zone in orgHaz.keys(): - if orgHaz[zone].has_key(phensig): - entries = orgHaz[zone][phensig] - for e in entries: - maxSegNumber = max(maxSegNumber, e['seg']) - return maxSegNumber - - # check for valid etns for all national center products. if not, abort - def __checkValidETNcw(self, pTable): - errorLine = '**************************************************\n' - for p in pTable: - if (p['phen'], p['sig']) in self.__ncKeys and p['officeid'] != 'PGUM': - try: - a = int(p['etn']) - except: - raise Exception, "\n\n" + errorLine + "\n" + \ - "ABORTING: Found National Hazard " + \ - "with no ETN in grids. \n" + self.printActiveTable(p) + \ - " Fix your grids by adding watch/storm number." + \ - "\nFor tropical hazards, an override to MakeHazard" + \ - "\n is likely to blame.\n" + errorLine - - # Check the ETN for tropical events - def __validateTropicalETN(self, pTable): - errorLine = '\n**************************************************\n' - for d in pTable: - if d['act'] == 'NEW' and \ - (d['phen'], d['sig']) in self.__tpcKeys and \ - d['etn'] < int(self.__tpcBaseETN): - s = errorLine + \ - "Tropical event %s.%s has an invalid ETN of %d."\ - " Must edit the Hazards grid(s) and assign the correct ETN value.\n" % \ - (d['phen'], d['sig'], int(d['etn'])) + self.printActiveTable(d) + errorLine - self.log.error(s) - raise Exception, s - - # check for valid ETN/Actions in the analyzed table. Cannot have - # a split ETN where one part of ongoing/NEW, and the other part - # is being dropped (e.g., CAN, UPG). pTable is the analyzed active table. - def __checkValidETNsActions(self, pTable): - byZones = self.__organizeByZone(pTable) - compare = ['etn', 'phen', 'sig'] - errorLine = '**************************************************\n' - currentYear = time.gmtime(self.__time)[0] - for key in byZones: - for h in byZones[key]: - if (h['phen'], h['sig']) not in self.__ncKeys: - continue #only interested in checking national keys - if h['act'] in ['EXP', 'UPG', 'CAN']: - hissueTime = h.get('issueTime', 0) - hissueYear = time.gmtime(hissueTime)[0] #issueYear - for h1 in byZones[key]: - if self.hazardCompare(h, h1, compare) and \ - h1['act'] in ['NEW', 'CON', 'EXA', 'EXT', 'EXB'] and \ - currentYear == hissueYear: - raise Exception, "\n\n" + errorLine + "\n" + \ - "ABORTING: Found VTEC Error"\ - " with same ETN, same hazard, conflicting "\ - "actions.\n" + self.printActiveTable(h) + \ - self.printActiveTable(h1) + "\n" + \ - "Fix, if convective watch, by coordinating "\ - "with SPC. Otherwise serious software error.\n"\ - "Cannot have new hazard with same ETN as one "\ - "that is no longer in effect (EXP, UPG, CAN)."\ - "\n" + errorLine - - # Remove EXP actions that are 30min past the end of event - # The records were kept for conflict resolution for national events - def __removeOverdueEXPs(self, pTable): - newTable = [] - for p in pTable: - if p['act'] == 'EXP' and \ - (self.__time - p['endTime']) >= 30 * 60: - pass - else: - newTable.append(p) - - return newTable - - # Remove EXP codes that have already been issued - def __removeIssuedEXPs(self, pTable, activeTable): - newTable = [] - for proposed in pTable: - if proposed['act'] == 'EXP' and \ - proposed['endTime'] >= self.__time: - issued = False - for active in activeTable: - if active['pil'] == self.__pil and \ - active['officeid'] == self.__siteID4 and \ - active.has_key('expired'): - if proposed['id'] == active['id'] and \ - proposed['endTime'] == active['endTime']: - issued = True - break - if issued: - continue - newTable.append(proposed) - - return newTable - - #ensure that we don't have two vtecs with same action code, same etns. - #Switch the 2nd one to NEW. - def __checkETNdups(self, pTable): - keyetnmax = {} - compare = ['etn', 'phen', 'sig', 'id'] - compare2 = ['phen', 'sig'] - for p in pTable: - #look for all events to get max etn for each phen/sig - vteckey = p['phen'] + p['sig'] - if not keyetnmax.has_key(vteckey): - etn_max = 0 - for e in pTable: - if self.hazardCompare(p, e, compare2) and \ - e['etn'] > etn_max: - etn_max = e['etn'] - keyetnmax[vteckey] = etn_max - - assigned = {} - for p in pTable: - #only look for EXT, EXA, EXB events - if p['act'] in ['NEW', 'EXP', 'UPG', 'CAN', 'CON']: - continue - vteckey = p['phen'] + p['sig'] - - for p1 in pTable: - #check for matching id,etn,phen,sig,act combinations, these - #are the ones that need to be reassigned. - if self.hazardCompare(p, p1, compare) and \ - p['startTime'] > p1['endTime']: - #found a newer record that needs to be reassigned - #see if we have already reassigned one that overlaps in time - # phensig startend etn doublenested dictionary - akey = p['phen'] + p['sig'] - tr = (p['startTime'], p['endTime']) - trs = assigned.get(akey, {}) - etna = None - for tre in trs.keys(): - if self.__overlaps(tr, tre): - etna = trs[tre] #get previously reassigned - #update dictionary if time overlapped - trComb = self.__combineTR(tr, tre) - if tr != trComb: - del trs[tre] - trs[trComb] = etna - assigned[akey] = trs - break - - if etna is not None: - p['act'] = 'NEW' - p['etn'] = etna - - else: - #take the newest record and assign new and give new ETN - p['act'] = 'NEW' - p['etn'] = int(keyetnmax[vteckey]) + 1 - trs[tr] = p['etn'] #new etn assigned - assigned[akey] = trs #put back into dictionary - keyetnmax[vteckey] = p['etn'] #updated for new assign - - def __warnETNduplication(self, pTable): - # Check should only operate on applicable VTEC products. - # NOTE: this falsely identifies duplicates across year-end - # since pTable does not have issueTimes we can't determine - # which year the product was originally issued - if self.__pil not in \ - ['CFW', 'FFA', 'MWW', 'NPW', 'RFW', 'WSW']: - return - - dups = [] - byZones = self.__organizeByZone(pTable) - for id, hazards in byZones.iteritems(): - visited = [] - for p in hazards: - key = p['phen'], p['sig'], p['etn'] - if key in visited: - estr = "%s.%s:%d" % key - if estr not in dups: - dups.append(estr) - else: - visited.append(key) - - if len(dups) > 0: - errorLine = '\n******************************************************\n' - self.log.error("Illegal ETN duplication is found for:\n" + \ - str(dups) + errorLine) - - # send message to GFE - msg = "The formatted %s product contains a duplicate ETN.\n"\ - "Please transmit the product and then open a trouble ticket with the NCF."\ - % self.__pil - os.system("sendGfeMessage -u -c GFE -m '" + msg + "'") - - # copy text/overviewText into record from active to proposed - def __copyTextFields(self, proposed, active): - if active.has_key("segText"): - proposed['prevText'] = active['segText'] - if active.has_key("overviewText"): - proposed['prevOverviewText'] = active['overviewText'] - - - # add upgrade/downgrade records from the active table - def __addUpgradeDowngradeRec(self, proposedTable): - compare = ['id', 'pil', 'officeid'] - fields = ['etn', 'startTime', 'endTime', 'phen', 'sig', 'phensig', 'act'] - for rec in proposedTable: - if rec['act'] != 'NEW': - continue - for checkR in proposedTable: - if checkR['act'] not in ['CAN', 'UPG']: - continue - if self.__hazardsOverlap(checkR, rec) and \ - self.hazardCompare(checkR, rec, compare): - ################### - if self.__isDowngrade(rec, checkR): - rec['downgradeFrom'] = self.__copyFields(checkR, fields) - elif self.__isUpgrade(rec, checkR): - rec['upgradeFrom'] = self.__copyFields(checkR, fields) - - return proposedTable - - - ############################################ - # 'inject' is the main function in vtec.py # - ############################################ - - def __mergeActiveProposed(self, pTable, activeTable, pil, areas): - - # convert active table EXP still in effect to CON - activeTable = self.__convertEXPtoCON(activeTable) - self.log.debug("After convertEXPtoCON: " + - self.printActiveTable(pTable, combine=True)) - - # Special handling for the SPC watches (TO.A, SV.A) - pTable = self.__handleSPCWatches(pTable, activeTable) - self.log.debug("After handleSPCWatches: " + - self.printActiveTable(pTable, combine=True)) - - # Drop multiple segments for same phen/sig in same "id" - pTable = self.__checkForMultipleSegsInSameID(pTable) - self.log.debug("After checkForMultipleSegsInSameID: " + - self.printActiveTable(pTable, combine=True)) - - # Check for Merged Events - pTable, activeTable = self.__checkForMergedEvents(pTable, activeTable) - self.log.debug("After checkForMergedEvents: " + - self.printActiveTable(pTable, combine=True)) - - # Check for CON and EXT actions - pTable = self.__checkForCONEXT(pTable, activeTable) - self.log.debug("After checkForCONEXT: " + - self.printActiveTable(pTable, combine=True)) - - # Check for CAN, EXP, and UPG - pTable = self.__checkForCANEXPUPG(pTable, activeTable) - self.log.debug("After checkForCANEXPUPG: " + - self.printActiveTable(pTable, combine=True)) - - # Check for EXA/EXB - pTable = self.__checkForEXAEXB(pTable, activeTable) - self.log.debug("After checkForEXAEXB: " + - self.printActiveTable(pTable, combine=True)) - - # Assign NEW to remaining records - pTable = self.__checkForNEW(pTable, activeTable) - self.log.debug("After checkForNEW: " + - self.printActiveTable(pTable, combine=True)) - - # Check for upgrades and downgrades, add records if needed - pTable = self.__addUpgradeDowngradeRec(pTable) - self.log.debug("After addUpgradeDowngradeRec: " + - self.printActiveTable(pTable, combine=True)) - - # Convert ongoing events about ready to expire (still in the - # proposed grids) to switch from CON to EXP - pTable = self.__addEXPCodes(pTable) - self.log.debug("After addEXPCodes: " + - self.printActiveTable(pTable, combine=True)) - - # Eliminate any EXPs if other events (same phen/sig) in effect - # at present time. - pTable = self.__removeEXPWithOngoingCodes(pTable) - self.log.debug("After removeEXPWithOngoingCodes: " + - self.printActiveTable(pTable, combine=True)) - - # Ensure valid ETN/Actions - no EXP/CAN with valid same ETN - # for national events - self.__checkValidETNsActions(pTable) - self.log.debug("After checkValidETNsActions:" + - self.printActiveTable(pTable, combine=True)) - - # Remove EXPs that are 30mins past the end of events - pTable = self.__removeOverdueEXPs(pTable) - self.log.debug("After removeOverdueEXPs:" + - self.printActiveTable(pTable, combine=True)) - - # Remove EXPs that have already been issued - pTable = self.__removeIssuedEXPs(pTable, activeTable) - self.log.debug("After removeIssuedEXPs:" + - self.printActiveTable(pTable, combine=True)) - - # Ensure that there are not ETN dups in the same segment w/diff - # action codes - self.__checkETNdups(pTable) - self.log.debug("After checkETNdups:" + - self.printActiveTable(pTable, combine=True)) - - # Warn user about ETN duplication if any - self.__warnETNduplication(pTable) - - # Complete the VTEC Strings - self.__addVTECStrings(pTable) - self.log.debug("After addVTECStrings: " + - self.printActiveTable(pTable, combine=True)) - - #add in hdln entries if they are missing - self.__addHeadlinesIfMissing(pTable) - self.log.debug("After addHeadlinesIfMissing: " + - self.printActiveTable(pTable, combine=True)) - - # Ensure that all SV.A and TO.A have valid ETNs - self.__checkValidETNcw(pTable) - - # Ensure that all tropical events have valid ETNs - self.__validateTropicalETN(pTable) - - # Return pTable, which is essentially analyzedTable at this point - return pTable - -# This section no longer needed with tropical ETN consolidation -# # is marine zone? -# def __isMarineZone(self, id): -# if id[0:2] in self.__marineZonesPrefix: -# return True; -# else: -# return False; -# -# # marine zones and non-marine zones for tpc phen/sigs follow their own -# # sequence of ETNs and actions. This routine determines if separate -# # ETNs/actions should occur between id1 and id2. Returns true if -# # separate ETN tracks are required - basically if id1 and id2 are one -# # marine and the other not, and the phen/sigs are identical and are tpc -# # phen/sigs. Also returns true if phen/sigs are not identical. Otherwise -# # returns false. Only considers phen/sig/id. -# def __separateETNtrack(self, rec1, rec2): -# ps1 = (rec1['phen'], rec1['sig']) -# ps2 = (rec2['phen'], rec2['sig']) -# # same phen/sig -# if ps1 == ps2: -# # tropical? -# if ps1 in self.__tpcKeys: -# # one a marine zone, the other not?, that requires sepa track -# return (self.__isMarineZone(rec1['id']) != \ -# self.__isMarineZone(rec2['id'])) -# else: -# return False #same phen/sig, not tpc, so. non separate track -# else: -# return true; - - def __processJavaCollection(self, javaObj, processMethod=None): - retVal = [] - iter = javaObj.iterator() - while iter.hasNext(): - nextObj = iter.next() - if processMethod is not None: - nextObj = processMethod(nextObj) - retVal.append(nextObj) - return retVal - - def __convertPhensig(self, javaPhensig): - phenSig = tuple(str(javaPhensig).split('.')) - return phenSig +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# +# Port of A1 HazardsTable.py. +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# ??/??/?? ???????? Initial Creation. +# 05/14/13 1842 dgilling Use GFEVtecUtil to handle NEW +# ETN assignment. +# 09/24/13 1843 dgilling Handle GetNextEtnResponse. +# 11/20/13 2490 randerso Corrected error handling in __getActiveTable +# +# 02/05/14 2774 dgilling Fix error logging statements in +# __warnETNduplication() and +# __highestETNActiveTable. +# 11/11/14 4953 randerso Changed type of endTime from float to int +# 01/22/2015 4027 randerso Fix comparison of in __getCities +# 02/05/15 4099 randerso Fixed exception handling in __getActiveTable +# 05/07/2015 4027 randerso Fixed error handling, +# added NOTE about false postives for duplicate ETNs +# 10/16/2015 17771 dgilling Remove __sitesIgnoreNatlEtn. +# 10/29/2015 17701 yteng Correct parm selection for Hazards to exclude Hazardsnc +# 12/07/2015 5129 dgilling Support new IFPClient. +# 09/13/2016 19348 ryu Validate ETN for tropical events. +# 11/21/2016 5959 njensen Removed unused imports and made more pythonic +# 02/16/2017 18215 ryu Fix issue of re-creating EXP records when they have +# already been issued before the end time of an event. +# + +## +# This is a base file that is not intended to be overridden. +## + +import time, copy, string, logging +import os +import VTECTableUtil, VTECTable +import TimeRange, AbsTime, ActiveTableVtec +from java.util import ArrayList +from com.raytheon.uf.common.activetable import ActiveTableMode +from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID as JavaDatabaseID +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey +from com.raytheon.viz.gfe.sampler import HistoSampler, SamplerRequest +from com.raytheon.viz.gfe.vtec import GFEVtecUtil + +# This class makes an object that interfaces to the GFE hazard grid +# sampling code and the TimeCombine code and generates formatted +# hazard strings and VTEC strings for formatters. Alternate active tables +# may be defined for test purposes. +class HazardsTable(VTECTableUtil.VTECTableUtil): + def __init__(self, ifpClient, editAreas, productCategory, + filterMethod, databaseID, siteID4, activeTableName="", + vtecMode=None, samplingThreshold=(10, None), hazardEndTime=None, + creationTime=None, dataMgr=None, accurateCities=False, cityEditAreas=[]): + self.log = logging.getLogger("FormatterRunner.HazardsTable.HazardsTable") +# self.log.setLevel(logging.DEBUG) + + + VTECTableUtil.VTECTableUtil.__init__(self, None) + + # save data + self.__ifpClient = ifpClient + self.__databaseID = databaseID + self.__dataMgr = dataMgr + self.__editAreas = editAreas + self.__pil = productCategory + self.__siteID4 = siteID4 + self.__spcSiteID4 = "KWNS" + self.__tpcSiteID4 = "KNHC" + self.filterMethod = filterMethod + self.__activeTable = None + self.__allGEOActiveTable = None #not filtered by edit areas + self.__vtecMode = vtecMode + self.__etnCache = {} + + if activeTableName == "PRACTICE": + self.__activeTableMode = ActiveTableMode.PRACTICE + else: + self.__activeTableMode = ActiveTableMode.OPERATIONAL + + if hazardEndTime is None: + self.__hazardEndTime = None + else: + self.__hazardEndTime = hazardEndTime.unixTime() + + # list of marine products + self.__marineProds = ["CWF", "NSH", "GLF", "MWW", "OFF"] + + # list of phen/sig from national centers and "until further notice" + self.__tpcKeys = self.__processJavaCollection(GFEVtecUtil.TROPICAL_PHENSIGS, self.__convertPhensig) + self.__tpcBaseETN = '1001' + self.__ncKeys = self.__processJavaCollection(GFEVtecUtil.NATIONAL_PHENSIGS, self.__convertPhensig) + self.__ufnKeys = [('HU', 'A'), ('HU', 'W'), ('TR', 'A'), ('TR', 'W'), + ('TY', 'A'), ('TY', 'W'), ('SS', 'A'), ('SS', 'W')] + + self.__marineZonesPrefix = ["AM", "GM", "PZ", "PK", "PH", "PM", "AN", + "PS", "SL"] #list of zone name prefix that are marine zones + + # tuple of (% area coverage, numberGridCells) + self.__samplingThreshold = \ + (samplingThreshold[0] / 100.0, samplingThreshold[1]) + + #determine creation time + if creationTime is not None: + self.__time = creationTime + else: + self.__time = time.time() #now time + self.__time = (int(self.__time) / 60) * 60 #truncated to minute + + # accurate cities + self.__accurateCities = accurateCities + self.__cityEditAreas = cityEditAreas + + #convert edit areas to a single zone list + self.__zoneList = self.__singleZoneList(editAreas) + + #sample, and merge vtec codes + self.__rawAnalyzedTable = self.__analyzedTable(self.__zoneList, + self.filterMethod) + + #reorganize raw analyzed table into hazards by zone, might cause + #change in combinations + self.__hazardsByZoneDict = {} + if len(self.__rawAnalyzedTable) > 0: + # organize by id + self.__hazardsByZoneDict = self.__organizeByZone( + self.__rawAnalyzedTable) + + + self.__hazardCombinations = self.__recombineZoneGroups( + self.__hazardsByZoneDict, editAreas) + else: + # if we got an empty table, set the combos to what was specified + self.__hazardCombinations = editAreas + + self.log.debug("RecombinedZoneGroups: initial: " + str(self.__editAreas) + + " final: " + str(self.__hazardCombinations)) + + self.__cityHazards = self.__createCityHazards() + + def activeTable(self): + # Returns the raw active table as a list of dictionaries + return self.__activeTable + + def rawAnalyzedTable(self): + # Returns the raw analyzed table as a list of dictionaries + return self.__rawAnalyzedTable + + def consolidatedTableByID(self): + # Returns the raw analyzed table consolidated by geo IDs, i.e., + # the ['id'] field is a list of ids. + return self.consolidateByID(self.__rawAnalyzedTable) + + def getHazardAreaCombinations(self): + # Returns a list of combinations to use that are guaranteed to + # not have different hazards within each combination. + return self.__hazardCombinations + + def getHazardList(self, editAreaList): + # Find the hazards that apply to the area and timeRange, and returns + # a list of dictionaries. This function can take a single string or + # a list. Restriction: only looks at the first element in the list. + # The returned list's 'id' field is a list of zones with that + # hazard. + + if type(editAreaList) is list and len(editAreaList): + ea = editAreaList[0] + eaList = editAreaList + elif type(editAreaList) is str: + ea = editAreaList + eaList = [editAreaList] + else: + return [] + + hazards = [] + + if ea in self.__hazardsByZoneDict: + haz = self.__hazardsByZoneDict[ea] + for h in haz: + # if a segment number is present copy while removing seg + # from the key + if 'seg' in h and h['seg'] != "": + # make a copy and change the key if we need to + newH = copy.deepcopy(h) + newH['id'] = eaList # preserve the old list of areas + # strip segments - updated to make sure GUM TRW/A hazards keep local ETN + if ((newH['phen'], newH['sig']) not in self.__ncKeys): + if string.find(newH['phensig'], ":") >= 0: + newH['phensig'] = newH['phen'] + '.' + newH['sig'] + + hazards.append(newH) + else: + # otherwise just append the hazard record + hazards.append(h) + + # Now consolidate this list of hazards with segment numbers removed. + hazards = self.__consolidateTime(hazards) + + return hazards + + def getVTECString(self, fcstArea): + # Returns a string containing the vtec strings for the given forecast + # area and time range. + + # get the list of hazards for this fcst area and time range + hazards = self.getHazardList(fcstArea) #could sort in here + + # sort the list of hazards depending on the type of product + if self.__pil in self.__marineProds: # it's a marine product + hazards.sort(self.__marineHazardsSort) + else: # non-marine product + hazards.sort(self.__hazardsSort) + # hazards need upgrade records to be paired up + hazards = self.__pairUpgradeRecords(hazards) + + # get VTEC strings and VTEC records + vtecStrings = [] + for h in hazards: + vtecS = h['vtecstr'] + if len(vtecS) == 0: + continue + vtecStrings.append(vtecS) + + returnStr = "" + for s in vtecStrings: + returnStr = returnStr + s + '\n' + return returnStr + + # Returns the cities associated with the hazards that could afflict + # the cities in cityList + def getCities(self, cityList, zoneHazards): + if self.__cityHazards is None: + return + + relevant = [] + compare = ('phen', 'sig', 'endTime') + for p in self.__cityHazards: + for h in zoneHazards: + if self.hazardCompare(p, h, compare): + relevant.append(p) + break + + return self.__getCities(cityList, relevant) + + # Get cities associated with a VTEC with an EXP action + # returns None if the grid is deleted + def getCitiesForEXP(self, cityList, zone, phen, sig, expTime): + if self.__cityHazards is None: + return + + # check zone hazards for existence of the grid + if expTime <= self.__time: + for rec in self.__oldZoneTable: + if rec['id'] == zone and \ + rec['phen'] == phen and rec['sig'] == sig and \ + rec['endTime'] == expTime: + break + else: + self.log.info("No grid found for " + \ + repr(phen) + "." + repr(sig) + \ + " expired at " + \ + time.asctime(time.gmtime(expTime))) + return + + # filter by phen, sig, expTime + matches = [] + for rec in self.__cityHazards: + if rec['phen'] == phen and rec['sig'] == sig and \ + rec['endTime'] == expTime: + matches.append(rec) + + return self.__getCities(cityList, matches) + + # Get cities that appear in both cityList and hazardList + # Ordering of cities should be same as cityList + def __getCities(self, cityList, hazardList): + cities = [] + for city in cityList: + for p in hazardList: + if p['id'].upper() == city.upper(): + cities.append(city) + break + return cities + + # Check the AT for the last issued records to determine cities + # that were affected by the cancelled/expired events. + # We could include cities from other events, in which case the result + # is uncertain. + + def getCitiesFromPrevious(self, ugcList, checkedVTEC, ignoredVTEC=[]): + + # local function for dict key + def event(rec): + return rec['phen'], rec['sig'], rec['etn'] + + # we only need the records from the lastest issuance of this product + + myRecords = [x for x in self.__activeTable if x['officeid'] == self.__siteID4 and \ + x['pil'] == self.__pil and \ + x['id'] in ugcList] + + lastIssued = [] + issueT = 0 + for rec in myRecords: + it = rec['issueTime'] + if self.__time >= it > issueT: + lastIssued = [rec] + issueT = it + elif it == issueT: + lastIssued.append(rec) + + if not lastIssued: + return None, 1 + + # keep track of matches + unmatched = {} + for rec in checkedVTEC: + unmatched[event(rec)] = ugcList[:] + + cities = [] + certain = 1 + compare = ('phen', 'sig', 'etn') + + for active in lastIssued: + + if active['act'] in ['CAN', 'EXP']: + # this will definitely make the result uncertain + certain = 0 + continue + elif active['act'] in ['UPG']: + continue + + match = 0 + for rec in checkedVTEC: + if self.hazardCompare(active, rec, compare): + match = 1 + break + + if match: + try: + unmatched[event(active)].remove(active['id']) + except ValueError: + certain = 0 + self.log.error("Too many matches for %s.%s:%04d"\ + % event(active)\ + + " in zone %s" % active['id']) + + if active.get('cities') is not None: + for city in active['cities']: + if city not in cities: + cities.append(city) + else: + certain = 0 + msg = "Active table record has no cities attribute." + self.log.error(msg) + + else: + # see if it should be ignored + for rec in ignoredVTEC: + if self.hazardCompare(active, rec, compare): + break + else: + # This active record doesn't match checked or ignored + # VTEC list - flag the result as uncertain + certain = 0 + + # check if all hazard/zone combinations have been covered + # there should be nothing in unmatched dict + + for key, zones in list(unmatched.items()): + if len(zones) > 0: + certain = 0 + break + + msg = [] + for key, zones in list(unmatched.items()): + if len(zones) > 0: + msg.append("%s.%s:%d " % key + str(zones)) + if len(msg): + msg = '\n'.join(msg) + self.log.error("The following hazard/zones are not found" + " in active table:\n" + str(msg)) + + return cities, certain + + + def __hazardsSort(self, a, b): + # Returns 1, 0, or -1 depending on whether the first hazard + # is considered higher, equal, or lower priority when compared to + # the second as defined in the VTEC directive. + # 1) action code [CAN, EXP, UPG, NEW, EXB, EXA, EXT, CON] + # 2) significance (W, Y, A, O, S) + # 3) start time + # 4) phenomena (alphabetical) + + # check action code + actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", + "EXT", "CON"] + try: + aIndex = actionCodeOrder.index(a['act']) + bIndex = actionCodeOrder.index(b['act']) + except ValueError: + self.log.error("Invalid action code in hazard %s %s", a, b) + return 0 + + if aIndex > bIndex: + return 1 + elif aIndex < bIndex: + return -1 + + # check sig + sigOrder = ["W", "Y", "A", "O", "S", "F"] + try: + aIndex = sigOrder.index(a['sig']) + bIndex = sigOrder.index(b['sig']) + except ValueError: + self.log.error("Invalid sig code in hazard %s %s", a, b) + return 0 + + if aIndex > bIndex: + return 1 + elif aIndex < bIndex: + return -1 + + # check startTime + if a['startTime'] > b['startTime']: + return 1 + elif a['startTime'] < b['startTime']: + return -1 + + # check phen + if a['phen'] > b['phen']: + return 1 + elif a['phen'] < b['phen']: + return -1 + + self.log.error("Hazards are identical in __hazardsSort %s %s", a, b) + return 0 + + def __marineHazardsSort(self, a, b): + # Returns 1, 0, or -1 depending on whether the first MARINE hazard + # is considered higher, equal, or lower priority when compared to + # the second as defined in the VTEC directive. + # 1) start time + # 2) action code [CAN, EXP, UPG, NEW, EXB, EXA, EXT, CON] + # 3) significance (W, Y, A, S) + # 5) phenomena (alphabetical) + + # check startTime + if a['startTime'] > b['startTime']: + return 1 + elif a['startTime'] < b['startTime']: + return -1 + + # check action code + actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", + "EXT", "CON"] + try: + aIndex = actionCodeOrder.index(a['act']) + bIndex = actionCodeOrder.index(b['act']) + except ValueError: + self.log.error("Invalid action code in hazard %s %s", a, b) + return 0 + + if aIndex > bIndex: + return 1 + elif aIndex < bIndex: + return -1 + + + # check sig + sigOrder = ["W", "Y", "A", "S", "F"] + try: + aIndex = sigOrder.index(a['sig']) + bIndex = sigOrder.index(b['sig']) + except ValueError: + self.log.error("Invalid sig code in hazard %s %s", a, b) + return 0 + + if aIndex > bIndex: + return 1 + elif aIndex < bIndex: + return -1 + + # check phen + if a['phen'] > b['phen']: + return 1 + elif a['phen'] < b['phen']: + return -1 + + self.log.error("Marine Hazards are identical in __marineHazardsSort %s %s", a, b) + return 0 + + def __pairUpgradeRecords(self, hazardsList): + # This method moves items in the hazardsList around such that + # upgrades and downgrades are sequential (UPG, NEW), (CAN, NEW) + # Hazard upgradeFrom fields records must match in the categories: + # start, end, etn, phen, and sig. + + # get the list of upgraded or downgraded records + upDownList = [] + for h in hazardsList: + if 'upgradeFrom' in h or 'downgradeFrom' in h: + upDownList.append(h) + + # temporarily remove these guys from the hazardsList + for upDown in upDownList: + hazardsList.remove(upDown) + + # Hunt down their counterparts and add the record in the correct slot + for upDown in upDownList: + # get the fields from the up/downgradeFrom record + oldRec = {} + if 'upgradeFrom' in upDown: + oldRec = upDown['upgradeFrom'] + elif 'downgradeFrom' in upDown: + oldRec = upDown['downgradeFrom'] + + # find its match + foundMatch = 0 # set a flag + for h in hazardsList: + if oldRec['etn'] == h['etn'] and \ + oldRec['phen'] == h['phen'] and oldRec['sig'] == h['sig']: + # found a match + hazardsList.insert(hazardsList.index(h) + 1, upDown) # insert after + foundMatch = 1 + break # done with this pass through hazardsList + + if foundMatch == 0: + self.log.error("Match not found for upgrade/downgrade.") + + return hazardsList + + #----------------------------------------------------------------- + # The following set of functions are utility functions. + #----------------------------------------------------------------- + + # Pretty-print a time range or a time range list + def __printTR(self, t): + s = "" + if type(t) is list: + s = '[' + for e in t: + s = s + '(' + time.asctime(time.gmtime(e[0])) + \ + ',' + time.asctime(time.gmtime(e[1])) + '),' + s = s + ']' + return s + else: + s = '(' + time.asctime(time.gmtime(t[0])) + \ + ',' + time.asctime(time.gmtime(t[1])) + ')' + return s + + #Pretty-prints the hazard by zone table + def __printHBZ(self, hazardsByZone): + s = '\n' + for id in list(hazardsByZone.keys()): + s = s + " Hazards for " + repr(id) + \ + self.printActiveTable(hazardsByZone[id]) + return s + + #provides intersection of two time ranges + def __timeIntersection(self, tr1, tr2): #tr1, tr2 tuples (startT, endT) + if tr1[0] < tr2[0]: + startTime = tr2[0] + else: + startTime = tr1[0] + if tr1[1] > tr2[1]: + endTime = tr2[1] + else: + endTime = tr1[1] + if startTime >= endTime: + return None # no intersection + else: + return (startTime, endTime) + + #provides the time ranges of non-intersection in tr1, based on + #the time range tr2. Returns a list of 0, 1, or 2 items. + def __nonTimeIntersection(self, tr1, tr2): + #returns list of non intersections between tr1 and tr2 within tr1 + intersect = self.__timeIntersection(tr1, tr2) + if intersect is None: + return [tr1] + #exact match + if tr1 == tr2: + return [] + #startT same + elif tr1[0] == intersect[0]: + return [(intersect[1], tr1[1])] + #endT same + elif tr1[1] == intersect[1]: + return [(tr1[0], intersect[0])] + #middle + else: + return [(tr1[0], intersect[0]), (intersect[1], tr1[1])] + + # time contains, if time range (tr) contains time (t), return 1 + def __containsT(self, tr, t): + return (t >= tr[0] and t < tr[1]) + + # time overlaps, if tr1 overlaps tr2 (adjacent is not an overlap) + def __overlaps(self, tr1, tr2): + if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): + return 1 + return 0 + + # hazard records' time overlaps + def __hazardsOverlap(self, h1, h2): + tr1 = (h1['startTime'], h1['endTime']) + tr2 = (h2['startTime'], h2['endTime']) + if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): + return 1 + return 0 + + # time range is adjacent to each other + def __isAdjacent(self, tr1, tr2): + if tr1[0] == tr2[1] or tr1[1] == tr2[0]: + return 1 + return 0 + + # combine two time ranges + def __combineTR(self, tr1, tr2): + return (min(tr1[0], tr2[0]), max(tr1[1], tr2[1])) + + # prepare etn cache. Adds new entries to the etn cache, but doesn't + # figure out the etn values at this point. Organizes the information + # by phen.sig, then maintains a list of start/end/etn/ids + def __prepETNCache(self, proposedRecord): + + phensig = (proposedRecord['phen'], proposedRecord['sig']) + id = proposedRecord['id'] + if phensig in self.__etnCache: + for start, end, etn, ids in self.__etnCache[phensig]: + if proposedRecord['startTime'] == start and \ + proposedRecord['endTime'] == end: + ids.append(id) # add the id + return #already in the cache + times = self.__etnCache[phensig] + times.append((proposedRecord['startTime'], proposedRecord['endTime'], 0, [id])) + + else: + self.__etnCache[phensig] = [(proposedRecord['startTime'], + proposedRecord['endTime'], 0, [id])] + + # assign new etns to the etn cache. This is done after all requests + # for new etns have been made + def __assignNewETNs(self, activeTable): + + # go through each new phen,sig + for phen, sig in list(self.__etnCache.keys()): + + #determine the first new ETN to use if we need a new one + etn_base = self.__highestETNActiveTable(phen, sig, + self.__allGEOActiveTable) + etn_base = int(etn_base) + 1 #the next one in sequence + + #sort the etn cache by (start, end, etn, ids) + self.__etnCache[(phen, sig)].sort() #sort the start,end,etn,ids + + # keep track of the ids that have been given each etn + coverage = {} + + #process sequentially each (phen, sig). Entries in cache + #are list of startT (0), endT (1), etn# (2), [id] (3). + times = self.__etnCache[(phen, sig)] + for x in range(len(times)): + s1, e1, etn1, ids = times[x] + #if no etn, then use a new one + if etn1 == 0: #etn == 0? + etn1 = etn_base + etn_base = etn_base + 1 + times[x] = (s1, e1, etn1, ids) + coverage[etn1] = ids[:] + + # the ids for which a record with etn1 already exists + assigned = coverage[etn1] + + #search for all adjacent or overlapping, give it the same etn + for y in range(x + 1, len(times)): + s2, e2, etn2, ids2 = times[y] + if etn2 == 0 and \ + (self.__isAdjacent((s1, e1), (s2, e2)) or\ + self.__overlaps((s1, e1), (s2, e2))): + + # check for potential ETN duplication + for id2 in ids2: + if id2 in assigned: + # cannot assign etn1 to this group since etn1 + # is already assigned to a record for the zone + break + else: + # ok to assign etn1 to this group + etn2 = etn1 #reuse the etn + times[y] = (s2, e2, etn2, ids2) + + # add the ids to assigned list + assigned.extend(ids2) + + # find highest etn in active table for phen/sig, returns it. + # This method has been dramatically re-written for A2 to use + # GFEVtecUtil to do preliminary ETN assignment instead of scrubbing + # the whole set of ActiveTableRecords to calculate it. + def __highestETNActiveTable(self, phen, sig, activeTable): + etn_base = 0 + phensig = (phen, sig) + + # find the max ETN... + # 1. highest ETN period for non-tropical + # or + # 2. highest ETN > 1000 for the tropical, non-GUM products (tpcKeys) + # + # Local WFOs do not assign these numbers, so they should have + # numbers < 1000 + if phensig not in self.__tpcKeys: + etn_base = GFEVtecUtil.getNextEtn(self.__siteID4, '.'.join(phensig), False, self.__activeTableMode).getNextEtn() - 1 + else: + presentyear = time.gmtime(self.__time).tm_year + for active in activeTable: + activeyear = time.gmtime(active['issueTime']).tm_year + activephensig = (active['phen'], active['sig']) + if phensig == activephensig and presentyear == activeyear: + # causes failure if tropical hazards are less than 1001 + if active['etn'] < int(self.__tpcBaseETN): + self.log.error("Incorrect ETN for tropical hazard.") + return etn_base + + #determine the new etn to use, using the etn cache + def __getNewETN(self, pRecord): + key = (pRecord['phen'], pRecord['sig']) + if key in self.__etnCache: + times = self.__etnCache[key] + for startT, endT, etn, ids in times: + if pRecord['startTime'] == startT and pRecord['endTime'] == endT: + return etn + return "???" #should never get here + + + + #----------------------------------------------------------------- + # The following set of functions are used to recombining + # records from the raw analyzed table to keep the geographic + # groups together. + #----------------------------------------------------------------- + + def __singleZoneList(self, comboList): + #Utility function to break apart a combinations list (list of list + #of zones) into a set of single zones. Returns the list of zones. + newList = [] + for c in comboList: + for z in c: + newList.append(z) + return newList + + # Returns a dictionary that is keyed on zonename, and contains a list + # of all hazards for that zone. + def __organizeByZone(self, hazardList): + hazardsByZone = {} + for h in hazardList: + if h['id'] in hazardsByZone: + hazardsByZone[h['id']].append(h) + else: + hazardsByZone[h['id']] = [h] + + self.log.debug("HazardByZone: " + self.__printHBZ(hazardsByZone)) + return hazardsByZone + + # Returns a dictionary that is keyed on (phen, sig), and contains a list + # of all hazards for each key value. + def __organizeByPhenSig(self, hazardList): + hazards = {} + for h in hazardList: + key = (h['phen'], h['sig']) + hazards.setdefault(key, []).append(h) + + self.log.debug("HazardByPhenSig:" + self.__printHBZ(hazards)) + return hazards + + + #compares two lists of hazards (zone1, zone2) for two zones. Returns + #whether the same hazards exist in both zones. Must be an exact + #match (like a operator==) + def __comboCompare(self, hazardsByZone, zone1, zone2): + compareList = ['phen', 'sig', 'pil', 'startTime', 'endTime', 'officeid', 'act'] + if zone1 in hazardsByZone and zone2 in hazardsByZone: + list1 = hazardsByZone[zone1] + list2 = hazardsByZone[zone2] + if len(list1) != len(list2): + return 0 + for i in range(len(list1)): + found = 0 + for j in range(len(list2)): + if self.hazardCompare(list1[i], list2[j], compareList): + found = 1 + break + if found == 0: + return 0 + return 1 + + elif zone1 not in hazardsByZone and \ + zone2 not in hazardsByZone: + return 1 + else: + return 0 + + + #analyzes the hazardsByZone and the list of desired editArea combinations, + #and ensures that the hazards are the same for every zone in each + #combination. If not, separates out those zones. Returns the new + #zone grouping. + def __recombineZoneGroups(self, hazardsByZone, editAreas): + outEditAreas = [] + for combo in editAreas: + newCombo = [[combo[0]]] + for i in range(1, len(combo)): + found = 0 + for j in range(len(newCombo)): + if self.__comboCompare(hazardsByZone, newCombo[j][0], + combo[i]): + newCombo[j].append(combo[i]) + found = 1 + break + if found == 0: + newCombo.append([combo[i]]) + for nc in newCombo: + outEditAreas.append(nc) + + return outEditAreas + + #-------------------------------------------------------------- + # The following methods sample Hazard grids, obtain the active + # table, and create the analyzed table (including injecting + # the vtec strings into the table. + #-------------------------------------------------------------- + + def __analyzedTable(self, areas, filter): + # main routine to obtain the analyzed table. Analyzed table + # is the composite between the proposed and active tables. + # filter is the function that filters out the hazards that + # should be considered. + + # Sample the Hazards Grid + atable = self.__getProposedTable(areas) + self.log.info("Proposed Table length: " + str(len(atable))) + self.log.debug("Sampled Proposed Table: " + + self.printActiveTable(atable, combine=True)) + + # Combine time entries + atable = self.__timeCombine(atable) + self.log.info("Time Combine Proposed Table length: " + str(len(atable))) + self.log.info("Proposed Table:" + + self.printActiveTable(atable, combine=True)) + + # Get the active table from the IFPServer + rawactTable = self.__getActiveTable() + self.log.info("Raw Active Table: " + + self.printActiveTable(rawactTable, combine=True)) + if rawactTable is None: + self.log.error("Unable to retrieve VTEC active table. " + + "Product VTEC codes may be suspect.") + rawactTable = [] + self.log.info("Raw Active Table length: " + str(len(rawactTable))) + + # Do specific product filtering + self.log.debug("Analyzed Table, prior to site/product filtering: " + + self.printActiveTable(atable, combine=True)) + atable = list(filter(atable, allowedHazardsOnly=False)) + self.log.info(\ + "Filtered Analyzed Table length, prior to VTEC injection: " + + str(len(atable))) + + # Perform site filtering on the active table. We keep + # our site and SPC. + allGEOTable = [] + siteFilter = [self.__siteID4, self.__spcSiteID4] + for a in rawactTable: + if a['officeid'] in siteFilter: + allGEOTable.append(a) + + # Perform GEO (edit area) filtering on the active table. + # Also filter for TEST mode + self.__allGEOActiveTable = copy.deepcopy(allGEOTable) + actTable = [] + for a in self.__allGEOActiveTable: + if a['id'] not in self.__zoneList: + continue #skip over entries not in our zone list + # If we are in TEST mode, filter out all except 'T' + # Otherwise, filter out all 'T' + testEntry = a['vtecstr'].find('/T.') == 0 + if self.__vtecMode == "T": + if testEntry: + actTable.append(a) + else: + if not testEntry: + actTable.append(a) + actTable = list(filter(actTable, allowedHazardsOnly=True)) #also filter the active table + + self.log.info("Filtered Active Table length: " + str(len(actTable))) + self.log.info("Filtered Active Table:" + + self.printActiveTable(actTable, combine=True)) + self.__activeTable = copy.deepcopy(actTable) + + # Merge the proposed and active tables, to arrive at the analyzed table + atable = self.__mergeActiveProposed(atable, actTable, self.__pil, + areas) + self.log.info("Analyzed Table length: " + str(len(atable))) + + # Finished + self.log.info("Analyzed Table: " + self.printActiveTable(atable, + combine=True)) + + return atable + + + def __getActiveTable(self): + #Uses the IFPClient interface to get the VTEC active table from + #the server. Returns None on failure. + + try: + table = self.__ifpClient.getVTECActiveTable(self.__activeTableMode) + table = ActiveTableVtec.transformActiveTableToPython(table) + return table + + except: + self.log.exception("Unable to access VTEC Active Table: ") + raise + + def __createCityHazards(self): + if not self.__accurateCities: + return None + + self.log.info("Evaluating hazards for cities.") + + # set up sample requests and get the ParmHistos + eaMap = {} + editAreas = [] + for ea in self.__cityEditAreas: + ea, city = ea + editAreas.append(ea) + id = ea.getId().getName() + eaMap[id] = city + + parmHistos = self.__doSamplingOfHazards(editAreas) + + # make proposed table + pTable = self.__makeCityTable(parmHistos, eaMap) + + # consolidate + pTable = self.__consolidateTime(pTable) + + # remove old - keep those ended within 30 min + cutoff = self.__time - 30 * 60 + pTable = [x for x in pTable if x['endTime'] > cutoff] + + # handle UFN events - convert ending time to max + for proposed in pTable: + if (proposed['phen'], proposed['sig']) in self.__ufnKeys: + proposed['startTime'] = self.__time #now + proposed['endTime'] = 2 ** 31 - 1 #forever + proposed['ufn'] = 1 #until further notice + + self.log.info("Hazards afflicting cities:" + + self.printActiveTable(pTable, combine=True, idType='city')) + + return pTable + + # Create city hazard table from samples + def __makeCityTable(self, parmHistos, eaMap): + rval = [] + + phIter = parmHistos.iterator() + while phIter.hasNext(): + ph = next(phIter) + areaID = ph.area().getId().getName() + areaPoints = ph.numberOfGridPoints() + samples = ph.histoSamples() + city = eaMap.get(areaID) + + for s in samples: + areaTime = TimeRange.TimeRange(s.validTime()) # timerange + histpairs = s.histogram() + for p in histpairs: + subkeys = p.value().discrete().getSubKeys() + for sk in subkeys: + # skip if no hazard + if sk == "": + continue + + d = {} + d['act'] = '' + d['id'] = city + d['phensig'] = sk + d['seg'] = 0 #normally zero, except if aux data + d['startTime'] = float(areaTime.startTime().unixTime()) + + # possibly shorten the endTime based on + # self.__hazardEndTime + if self.__hazardEndTime is not None and \ + areaTime.endTime().unixTime() > self.__hazardEndTime: + d['endTime'] = float(self.__hazardEndTime) + else: + d['endTime'] = float(areaTime.endTime().unixTime()) + + if sk[:4] in VTECTable.VTECTable: + d['phen'] = sk[:2] + d['sig'] = sk[3] + else: # locally defined hazard + d['phen'] = sk + d['sig'] = "" # empty significance + + rval.append(d) + + return rval + + + def __doSamplingOfHazards(self, editAreas): + # Samples the Hazards Grid in the ifpServer. Returns a list + # of ParmHistos. + + # Determine the ParmID for Hazards out of the given database + dbid = JavaDatabaseID(self.__databaseID) +# pid = filter(lambda x: str(x).find("Hazards") != -1, +# self.__ifpClient.getParmList(self.__databaseID))[0] + parmList = self.__ifpClient.getParmList(dbid) + for p in parmList: + if p.getParmName() == "Hazards": + pid = p + break + + # TimeRange to sample + # Use hazardEndTime if present + if self.__hazardEndTime is not None: + tr = TimeRange.TimeRange(AbsTime.AbsTime.current(), + AbsTime.AbsTime(self.__hazardEndTime)) + else: #(everything) + tr = TimeRange.allTimes() + + # Determine the sampler request structures + sampreqs = ArrayList() + for ea in editAreas: + if type(ea) is str: + sampreqs.add(SamplerRequest(pid, + ReferenceID(ea), tr.toJavaObj())) + else: + sampreqs.add(SamplerRequest(pid, ea, tr.toJavaObj())) + + # Perform sampling + hs = HistoSampler(self.__ifpClient.getJavaClient(), sampreqs) + #parmHistos = hs.getParmHisto_SeqOf() + parmHistos = hs.getParmHisto() + + return parmHistos + + # Create proposed table from samples + def __makeProposedTable(self, parmHistos): + rval = [] + size = parmHistos.size() + #for ph in parmHistos: + for x in range(size): + ph = parmHistos.get(x) + areaID = ph.area().getId().getName() + areaPoints = ph.numberOfGridPoints() + samples = ph.histoSamples() + + for s in samples: + areaTime = TimeRange.TimeRange(s.validTime()) # timerange + histpairs = s.histogram() + for p in histpairs: + subkeys = p.value().discrete().getSubKeys() + sksize = subkeys.size() + for y in range(sksize): + sk = str(subkeys.get(y)) + d = {} + d['id'] = areaID + d['officeid'] = self.__siteID4 + d['pil'] = self.__pil + d['phensig'] = sk + d['seg'] = 0 #normally zero, except if aux data + d['startTime'] = float(areaTime.startTime().unixTime()) + + # possibly shorten the endTime based on + # self.__hazardEndTime + if self.__hazardEndTime is not None and \ + areaTime.endTime().unixTime() > self.__hazardEndTime: + d['endTime'] = float(self.__hazardEndTime) + else: + d['endTime'] = float(areaTime.endTime().unixTime()) + + d['areaPoints'] = areaPoints + d['valuePoints'] = p.count() + d['act'] = "???" #Determined after merges + d['etn'] = "???" #Mostly Determined after merges + if sk[:4] in VTECTable.VTECTable: + d['phen'] = sk[:2] + d['sig'] = sk[3] + d['hdln'] = VTECTable.VTECTable[sk[:4]]['hdln'] + else: # locally defined hazard + d['phen'] = sk + d['sig'] = "" # empty significance + desc = \ + DiscreteKey.discreteDefinition(self.__dataMgr.getSiteID()).keyDesc( + "Hazards_SFC", sk) + d['hdln'] = desc + + #special checks for aux data + auxindex = sk.find(':') + if auxindex != -1: + auxData = sk[auxindex + 1:] + #national center uses: aux data is the etn number + if (d['phen'], d['sig']) in self.__ncKeys: + try: + number = int(auxData) + #tropical events may be either seg or etn + if (d['phen'], d['sig']) in self.__tpcKeys: + if number >= int(self.__tpcBaseETN): + d['etn'] = number + else: + d['seg'] = number + else: + d['etn'] = number + except: + self.log.error("Bad auxData for ", + "National Center:" + auxData + str(d)) + + #other aux data interpreted as segment number + else: + try: + segment = int(auxData) + d['seg'] = segment + except: + self.log.error("Bad auxData for seg:" + + auxData + str(d)) + rval.append(d) + return rval + + + # Gets the proposed hazards table from the server. + # Note that proposed table has 'areaPoints', and 'valuePoints' within + # it, which will be later stripped out. + def __getProposedTable(self, editAreas): + rval = [] + + # set up sample requests and get the ParmHistos + parmHistos = self.__doSamplingOfHazards(editAreas) + + # make proposed table + pTable = self.__makeProposedTable(parmHistos) + + # handle UFN events - convert ending time to max + for proposed in pTable: + if (proposed['phen'], proposed['sig']) in self.__ufnKeys: + proposed['startTime'] = self.__time #now + proposed['endTime'] = 2 ** 31 - 1 #forever + proposed['ufn'] = 1 #until further notice + return pTable + + + # Utility function to combine + def __timeReduce(self, atable, index): + if index >= len(atable) - 1: + return + if atable[index]['endTime'] == atable[index + 1]['startTime']: + atable[index]['endTime'] = atable[index + 1]['endTime'] + del atable[index + 1] + self.__timeReduce(atable, index) + + # Remove any None Headlines + def __stripNone(self, atable): + # First punt any headlines + return [x for x in atable if x['phensig'] != ''] + + # Remove any entries that are in the past + def __stripOld(self, atable): + now = self.__time + return [x for x in atable if x['endTime'] > now] + + # Truncate entries to current hour that start in the past + # must call after stripOld + def __truncateCurrentTime(self, atable): + nowHour = int(self.__time / 3600) * 3600 + for a in atable: + if a['startTime'] < nowHour: + a['startTime'] = nowHour + return atable + + # Remove any entries that occupy less than the sampling threshold + # of the area. Threshold is met for a given % of the area covered + # or a number of grid points covered. If None is given, then that + # critera is not considered. + def __coverageFilter(self, atable): + percent = self.__samplingThreshold[0] + points = self.__samplingThreshold[1] + if percent is not None and points is not None: + atable = [x for x in atable if x['valuePoints'] / float(x['areaPoints']) >= percent or \ + x['valuePoints'] >= points] + elif percent is not None: + atable = [x for x in atable if x['valuePoints'] / float(x['areaPoints']) >= percent] + elif points is not None: + atable = [x for x in atable if x['valuePoints'] >= points] + else: + return [] #complete filtering + + for i in atable: + del i['valuePoints'] + del i['areaPoints'] + return atable + + # Returns a set of values found under the specified key in atable. + def __keySet(self, atable, key): + tmp = [x[key] for x in atable] + rval = [] + for x in tmp: + if x not in rval: + rval.append(x) + return rval + + # Assumes that atable is for a sinlge area + def __compressTime(self, atable): + # Sort by time + atable.sort(lambda x, y: cmp(x['startTime'], y['startTime'])) + + types = self.__keySet(atable, 'phensig') + + rval = [] + for t in types: + a = [x for x in atable if x['phensig'] == t] + i = 0 + while i < len(a): + self.__timeReduce(a, i) + i = i + 1 + rval = rval + a + + rval.sort(lambda x, y: cmp(x['startTime'], y['startTime'])) + return rval + + def __consolidateTime(self, atable): + actions = self.__keySet(atable, 'act') + rval = [] + for i in actions: + actT = [x for x in atable if x['act'] == i] + areas = self.__keySet(actT, 'id') + for j in areas: + a = [x for x in actT if x['id'] == j] + rval = rval + self.__compressTime(a) + return rval + + def __timeCombine(self, atable): + atable = self.__stripNone(atable) + atable = self.__coverageFilter(atable) + atable = self.__consolidateTime(atable) + + # for cities list - keep these records to check for existence of grid + self.__oldZoneTable = [x for x in atable if 0 <= self.__time - x['endTime'] < 1800] + + atable = self.__stripOld(atable) + atable = self.__truncateCurrentTime(atable) + return atable + + def __copyFields(self, record, fields): + #copies the specified fields and returns a dictionary + #containing those fields + d = {} + for f in fields: + if f in record: + d[f] = record[f] + return d + + #------------------------------------------------------------- + # The following functions handle the merging of the + # proposed and active tables. VTEC strings are calculated + # in these routines. + #------------------------------------------------------------- + + # Converts active table EXP codes that are still in effect to CON + # codes. This simplifies the logic of VTEC comparisons. Returns + # the modified active table. + def __convertEXPtoCON(self, aTable): + for a in aTable: + if a['act'] == 'EXP' and a['endTime'] > self.__time: + a['act'] = 'CON' + a['expired'] = True + return aTable + + + # Handles the special case SPC Watches, which are TO.A, SV.A + # Logic: watch in active table that matches one in proposed table from + # my office, if not, then "NEW" action code, copy the times (if within + # 30 minutes) from the SPC active table match into the proposed table. + # If match of active and proposed for my office, then do normal + # logic - but still copy the times but from my active record for my office. + # if within 30 minutes). + def __handleSPCWatches(self, proposedTable, activeTable): + compare = ['phen', 'sig', 'etn'] + for proposed in proposedTable: + # TO.A, SV.A - are the watches originally from SPC + if proposed['phen'] in ['TO', 'SV'] and proposed['sig'] == 'A': + + #attempt to find a match in the active table by my office + #attempt to find a match in the active table by SPC + #We don't care about the geography ('id') at this point. + myActive = None + spcActive = None + for active in activeTable: + if self.hazardCompare(proposed, active, compare) and \ + active['act'] not in ['CAN', 'UPG', 'EXP']: + if active['officeid'] == self.__siteID4: + myActive = copy.deepcopy(active) + elif active['officeid'] == self.__spcSiteID4: + spcActive = copy.deepcopy(active) + if myActive is not None and spcActive is not None: + break #for effen - got what we want + + # This is a new watch that we haven't issued before + if myActive is None: + proposed['act'] = "NEW" + + #get the times from the SPC watch + if spcActive is not None: + activeStart = spcActive['startTime'] + activeEnd = spcActive['endTime'] + else: + self.log.error("Unable to match SPC watch for " + + self.printActiveTable(proposed)) + activeStart = proposed['startTime'] + activeEnd = proposed['endTime'] #failsafe code + + # we matched the active table, so we have issued it before + # we get the times from our active watch + else: + activeStart = myActive['startTime'] + activeEnd = myActive['endTime'] + + # we need to adjust the times possibly. We compare active + # vs. proposed, and within 30minutes, then we assume that + # the time hasn't changed. Due to hourly grids, but less + # than that SPC times, we copy over the active table times. + deltaStart = abs(proposed['startTime'] - activeStart) + deltaEnd = abs(proposed['endTime'] - activeEnd) + if deltaStart < 1800: #30 minutes + proposed['startTime'] = activeStart + if deltaEnd < 1800: #30 minutes + proposed['endTime'] = activeEnd + return proposedTable + + # Checks for events that have merged together. This could result + # in dropped VTEC entries so we need to EXT one and CAN the other. + # We remove entries from the active table (memory copy) and generate + # additional CAN events. + def __checkForMergedEvents(self, proposedTable, activeTable): + + compare = ['id', 'phen', 'sig', 'pil'] + + createdCANEntries = [] + + for proposed in proposedTable: + matches = [] + + #record match and time overlaps for real events + for active in activeTable: + if self.hazardCompare(proposed, active, compare) and \ + active['act'] not in ['CAN', 'UPG', 'EXP'] and \ + active['endTime'] > self.__time and \ + proposed['startTime'] <= active['endTime'] and \ + proposed['endTime'] >= active['startTime']: + matches.append(active) + + #if multiple records match, we have a merged event + #we need to find the highest etn for the event matches + if len(matches) > 1: + self.log.debug("MERGE event: proposed=" + + self.printActiveTable(proposed) + + " matches=" + self.printActiveTable(matches)) + highestETN = 0 + for m in matches: + highestETN = max(highestETN, m['etn']) + + # find all other entries (non highest etn) and generate + # new CAN records, then remove the entries from activeTable + for m in matches: + if m['etn'] != highestETN: + canEntry = copy.deepcopy(m) + canEntry['act'] = 'CAN' + createdCANEntries.append(canEntry) + self.log.debug("CAN event: %s%s%s", + self.printActiveTable(canEntry), + " remEntry: ", self.printActiveTable(m)) + del activeTable[activeTable.index(m)] + + #append the set of generated CAN events + for c in createdCANEntries: + proposedTable.append(c) + + #return the modified set of records + return (proposedTable, activeTable) + + + # Checks for "CON" continuation and "EXT" extended in time codes. + # An event is considered continued two hazards have the same + # id, phen, sig, and pil, and if the end times match. An event + # is considered to be extended in time if the event overlaps + # in time. + def __checkForCONEXT(self, proposedTable, activeTable): + + compare = ['id', 'phen', 'sig', 'pil', 'officeid'] #considered equal + + for proposed in proposedTable: + + if proposed['act'] == 'CAN': + continue #only occurs with merged events + + if len(proposed['sig']): #is VTEC, must compare with active + for active in activeTable: + if self.hazardCompare(proposed, active, compare) and \ + active['act'] not in ['CAN', 'UPG', 'EXP']: +# and not self.__separateETNtrack(proposed, active): + + #convective watch (special case, also compare etn) + if proposed['phen'] in ['SV', 'TO'] and \ + proposed['sig'] == "A" and \ + proposed['etn'] != active['etn']: + continue #allows CAN/NEW for new convect watches + + # times exactly match + if proposed['startTime'] == active['startTime'] and \ + proposed['endTime'] == active['endTime']: + proposed['act'] = 'CON' + proposed['etn'] = active['etn'] + self.__copyTextFields(proposed, active) + + # start times both before current time, end + # times the same, CON state + elif self.__time >= proposed['startTime'] and \ + self.__time >= active['startTime'] and \ + proposed['endTime'] == active['endTime']: + proposed['act'] = 'CON' + proposed['etn'] = active['etn'] + self.__copyTextFields(proposed, active) + + # special case of event ended already, don't + # assign "EXT" even with overlap + elif self.__time >= active['endTime']: + pass #force of a new event since it ended + + # start and/or end times overlap, "EXT" case + # except when user changed the start time + # of an event has gone into effect. + elif self.__hazardsOverlap(proposed, active): + + if active['startTime'] <= self.__time: + if proposed['startTime'] <= self.__time or \ + 'conexted' in active: + proposed['act'] = 'EXT' + else: + proposed['act'] = 'EXT' + + if proposed['act'] == 'EXT': + active['conexted'] = 1 + proposed['etn'] = active['etn'] + self.__copyTextFields(proposed, active) + + #save original time so we can later determine + #whether it is EXTENDED or SHORTENED + proposed['previousStart'] = active['startTime'] + proposed['previousEnd'] = active['endTime'] + + else: #is Local, no changes to local events + pass + + for active in activeTable: + if 'conexted' in active: + del active['conexted'] + + return proposedTable + + # Checks for CAN, EXP, UPG + def __checkForCANEXPUPG(self, pTable, activeTable): + compare1 = ['id', 'phen', 'sig'] + newEntries = [] + + for active in activeTable: + if active['officeid'] != self.__siteID4: + continue #for a different site + + if active['act'] in ['CAN', 'UPG', 'EXP']: + continue #skip these records, event already over + + if active['pil'] != self.__pil: + continue #skip these records, since it is for another prod + + cancel_needed = 1 + + # determine if cancel is needed, cancel (CAN, EXP, UPG). + # Cancel not needed if we have an entry in proposed that + # is already in active and the times overlap, and the active + # ending time is still in the future + for proposed in pTable: + if self.hazardCompare(active, proposed, compare1): + if self.__hazardsOverlap(proposed, active) and \ + self.__time < active['endTime']: + + # active event is in effect and proposed event is in future + # cancel active event + if active['startTime'] <= self.__time and \ + proposed['startTime'] > self.__time: + break + + #convective watch, also check etn + if proposed['phen'] in ['SV', 'TO'] and \ + proposed['sig'] == 'A': + if proposed['etn'] == active['etn']: + cancel_needed = 0 + break + + else: + cancel_needed = 0 + break + + # CAN's have three special forms. CAN when a product is no longer + # in the proposed table, EXP when the product is no longer + # in the proposed table, and the end was within 30 min of now, + # and UPG when the phen is the same, but + # sig is upgraded, and the VTEC is still in effect. + # + if cancel_needed == 1: + + # Case One - UPG + # Area matches, phen matches, and we are going from an + # advisory to a watch, a watch to a warning, or an + # advisory to a warning. + + for proposed in pTable: + #find matches in area, do phen later + if self.hazardCompare(active, proposed, ['id']): + + #find overlaps in time + if self.__hazardsOverlap(proposed, active): + + if self.__isUpgrade(proposed, active): + active['act'] = 'UPG' + active['seg'] = 0 + if active not in newEntries: + newEntries.append(active) + cancel_needed = 0 + + # Case Two - EXP + # If it wasn't an UPG, then check for EXP. EXP if entry + # not in the proposed table, and current time is after + # the EXP time. + + if cancel_needed == 1: + timeFromEnd = self.__time - active['endTime'] # +after + if timeFromEnd >= 0: + active['act'] = 'EXP' + active['seg'] = 0 + if active not in newEntries: + newEntries.append(active) + cancel_needed = 0 + + # Final Case - CAN + # Only Allow "CAN" entries if the event is still ongoing, + # otherwise ignore the entry. + if cancel_needed == 1: + if self.__time < active['endTime']: + active['act'] = 'CAN' + active['seg'] = 0 + if active not in newEntries: + newEntries.append(active) + cancel_needed = 0 + + + # add in new entries, change any text to prevText, overviewText to + # prevOverviewText. Strip out any VTEC coding from active table. + for entry in newEntries: + if 'segText' in entry: + entry['prevText'] = entry['segText'] + del entry['segText'] + if 'overviewText' in entry: + entry['prevOverviewText'] = entry['overviewText'] + del entry['overviewText'] + if 'vtec' in entry: + entry['vtecstr'] = "" #erase the VTEC string. + del entry['overviewText'] + pTable.append(entry) + return pTable + + + ######################################################################## + # This function checks the pTable against the activeTable to determine # + # EXA or EXB + ######################################################################## + + def __checkForEXAEXB(self, pTable, activeTable): + compare1 = ['id', 'phen', 'sig', 'etn', 'pil', 'officeid'] + compare2 = ['phen', 'sig', 'pil'] + + for proposed in pTable: + + # first check to see if we have already assigned "NEW". This + # is a special case for SPC watches that now appear in the + # proposed table, but haven't been issued yet. In this case, + # we skip processing this record. + if proposed['act'] != "???": + continue + + # Assume first that this is EXA or EXB + exaexb_flag = 1 + + #if we find a match, and it overlaps in time, + #then it isn't an EXA, EXB + for active in activeTable: + if self.hazardCompare(proposed, active, compare1): + #if proposed['startTime'] <= active['endTime'] and + # proposed['endTime'] >= active['startTime'] and + if self.__hazardsOverlap(proposed, active) and \ + active['act'] not in ['CAN', 'EXP', 'UPG']: + exaexb_flag = 0 + + # no match was found, thus this is either a EXA, or EXB, + # match records with phen and sig the same + if exaexb_flag == 1: + #first check for EXA, must check ALL records before + #deciding it isn't an EXA + for active in activeTable: + if self.hazardCompare(proposed, active, compare2): +# and not self.__separateETNtrack(proposed, active): + if active['act'] not in ['CAN', 'UPG', 'EXP']: + + #if times are identical, then we extended in area + if proposed['startTime'] == active['startTime'] and \ + proposed['endTime'] == active['endTime']: + if proposed['etn'] == "???" or \ + proposed['etn'] == active['etn']: + proposed['exaexb'] = 'EXA' + proposed['active'] = active + break + + #if start times are both in the past or + #current, but end times equal, then it is + #an EXA + elif proposed['startTime'] <= self.__time and \ + active['startTime'] <= self.__time and \ + proposed['endTime'] == active['endTime']: + if proposed['etn'] == "???" or \ + proposed['etn'] == active['etn']: + proposed['exaexb'] = 'EXA' + proposed['active'] = active + break + + if 'exaexb' in proposed: + continue + + #if it isn't an EXA, now we check the records again, but + #check for overlapping or adjacent times, that do + #not occur in the past in the active table, but ensure + #that there is an event in the proposed that overlaps + #with time. Results in EXB + if proposed['act'] == "???": + for active in activeTable: + if self.hazardCompare(proposed, active, compare2): +# and not self.__separateETNtrack(proposed, active): + if active['act'] not in ['CAN', 'UPG', 'EXP']: + #if self.__hazardsOverlap(proposed, active) and + if proposed['startTime'] <= active['endTime'] and \ + proposed['endTime'] >= active['startTime'] and \ + active['endTime'] > self.__time: + if proposed['etn'] == "???" or \ + proposed['etn'] == active['etn']: + #ensure record overlaps with proposed + #event + for p1 in pTable: + if p1 == proposed: + continue #skip itself + if self.hazardCompare(p1, proposed, + compare2) and self.__hazardsOverlap(p1, proposed): + proposed['exaexb'] = 'EXB' + proposed['active'] = active + break + break + + # Now set the marked records to EXA/EXB unless + # there is already a record with the same ETN + # for the same phen/sig in the same zone + + # Organize hazards by zone + hazardDict = self.__organizeByZone(pTable) + for zone, hazards in hazardDict.items(): + # then organize by hazard key + hazards = self.__organizeByPhenSig(hazards) + for key, hzds in hazards.items(): + for proposed in hzds: + + if 'exaexb' in proposed: + act = proposed.pop('exaexb') + active = proposed.pop('active') + # checking if the etn is used + for p in hzds: + if p['etn'] == active['etn'] and \ + p['act'] != '???': + break + else: + proposed['act'] = act + proposed['etn'] = active['etn'] + self.__copyTextFields(proposed, active) + + if act == 'EXB': + #save original time so we can later + #determine whether it is EXTENDED + #or SHORTENED + proposed['previousStart'] = active['startTime'] + proposed['previousEnd'] = active['endTime'] + + return pTable + + + # Assigns NEW to remaining records. Has to determine the appropriate + # ETN number. + def __checkForNEW(self, pTable, activeTable): + compare = ['id', 'phen', 'sig', 'officeid'] + + #check for any remaining records that have an undefined action + #these records must be "NEW". Need to allocate a new etn, except + #in two cases: one is already identified in the proposed table, + #existing record in active table (phen,sig,id) regardless of pil. + # + #Already identified are basic TO.A, SV.A using aux data fields, + + allowedActions = ['NEW', 'CON', 'EXT', 'EXA', 'EXB'] + + for proposed in pTable: + if proposed['act'] == '???': + if proposed['etn'] == "???": + #check in active table for a match (from other product), + #with events that still are occurring + etn = 0 + for act in activeTable: + if self.__hazardsOverlap(proposed, act) and \ + act['act'] in allowedActions and \ + self.hazardCompare(proposed, act, compare) and \ + act['endTime'] > self.__time: + etn = act['etn'] + break + + #not found in active nor proposed, prep for new one + if etn == 0: + self.__prepETNCache(proposed) + else: + proposed['etn'] = etn #match found in active table + proposed['act'] = "NEW" + + # determine any new ETNs + self.__assignNewETNs(activeTable) + self.log.debug("New ETN cache: " + str(self.__etnCache)) + + # process again for records that are now marked NEW, but no etn + for proposed in pTable: + if proposed['act'] == 'NEW' and proposed['etn'] == "???": + proposed['etn'] = self.__getNewETN(proposed) + + return pTable + + + # Eliminates EXP codes from the table (for marine). + # Returns the filtered table. + def __eliminateEXPCodes(self, pTable): + rTable = [] + for h in pTable: + #accept all non-EXP codes + if h['act'] != 'EXP': + rTable.append(h) + + #Convert EXP into CON codes for non-yet expired events (30min) + #since marine does not permit EXP codes + elif h['endTime'] > self.__time: + h['act'] = 'CON' #convert to CON code + rTable.append(h) + + #Ignore the events if at or after the EXP time + else: + pass + + return rTable + + # add in EXP codes (for events just about ready to expire) + def __addEXPCodes(self, pTable): + #looks for events that have "CON", but are within 30 minutes of + #event ending time and converts those events to EXP. + for each_hazard in pTable: + if each_hazard['act'] == 'CON': + timeFromEnd = self.__time - each_hazard['endTime'] # +after + if timeFromEnd >= -30 * 60 and timeFromEnd <= 0: + each_hazard['act'] = 'EXP' #convert to expired + return pTable + + # remove EXP (actual EXP codes) when another event of same phen/sig is + # now ongoing, but only if same issuance year + def __removeEXPWithOngoingCodes(self, pTable): + compare = ['phen', 'sig', 'etn', 'id'] + tmp = [] + for h in pTable: + #events with EXP, and after ending time + removeIt = 0 + if h['act'] == 'EXP' and self.__time >= h['endTime']: + hIssueT = h.get('issueTime', self.__time) + hIssueYear = time.gmtime(hIssueT)[0] + for h1 in pTable: + #active event with same phen/sig/etn + h1IssueT = h1.get('issueTime', self.__time) + h1IssueYear = time.gmtime(h1IssueT)[0] + if h1['act'] in ['CON', 'EXA', 'EXB', 'EXT'] and \ + self.hazardCompare(h, h1, compare) and \ + h1IssueYear == hIssueYear: + removeIt = 1 + break + if removeIt == 0: + tmp.append(h) + return tmp + + + # generate VTEC strings for hazards + def __addVTECStrings(self, pTable): + for h in pTable: + # get the three middle characters of the product pil + if 'pil' in h: + prodCat = h['pil'] + else: + prodCat = '???' + + # get the VTEC Mode + if self.__vtecMode is None: + h['vtecstr'] = "" + continue + + # Phen and Significance + phen = h['phen'] + sig = h['sig'] + if len(sig) == 0: #local headline, non-VTEC + h['vtecstr'] = "" + continue + + # get the office ID + if 'officeid' in h: + siteID = h['officeid'] #4letter id + else: + siteID = "????" + + # get the ETN + if 'etn' in h: + if type(h['etn']) is int: + ETN = "%04i" % h['etn'] + else: + ETN = h['etn'] + else: + ETN = "????" + + # get the action + if 'act' in h: + action = h['act'] + else: + action = "???" + + # adjust time of NEW events to ensure they don't start + # earlier than now + if h['startTime'] < self.__time: + h['startTime'] = self.__time + + + # use 00000000 or explicit times for the start time? + if action is 'NEW' or \ + (action == 'EXT' and h['previousStart'] > self.__time) or \ + (action == 'EXB' and h['previousStart'] > self.__time) or \ + (h['startTime'] > self.__time): + startStr = time.strftime("%y%m%dT%H%MZ-", + time.gmtime(h['startTime'])) + else: + startStr = "000000T0000Z-" #ongoing + + # use 00000000 if event is "Until Further notice" + if h.get('ufn', 0): + endStr = "000000T0000Z/" + else: + endStr = time.strftime("%y%m%dT%H%MZ/", time.gmtime(h['endTime'])) + + # format the beastly string + vtec = '/' + self.__vtecMode + "." + action + "." + \ + siteID + '.' + phen + '.' + sig + '.' + ETN + '.' + \ + startStr + endStr + h['vtecstr'] = vtec + + + # Add in headlines if missing in the table, note that headlines + # are not added for situations of merged events, i.e., an event + # that has a CAN and a ongoing with same phen/sig and overlapping time. + # Leaving 'hdln' blank indicates no headline and no mention in hazard + # products. + def __addHeadlinesIfMissing(self, pTable): + compare = ['id', 'phen', 'sig', 'pil'] + ongoingAct = ['EXT', 'EXB', 'CON', 'NEW', 'EXA'] + for h in pTable: + if 'hdln' in h: + continue + phensig = h['phen'] + '.' + h['sig'] + if phensig in VTECTable.VTECTable: + + #ongoing (merged) and CAN situation? + mergedFound = 0 + for h1 in pTable: + if self.hazardCompare(h, h1, compare) and \ + h['act'] == 'CAN' and h1['act'] in ongoingAct and \ + h1['endTime'] > self.__time and \ + h['startTime'] <= h1['endTime'] and \ + h['endTime'] >= h1['startTime']: + mergedFound = 1 + h['hdln'] = "" + + if mergedFound == 1: + h['hdln'] = "" + else: + h['hdln'] = VTECTable.VTECTable[phensig]['hdln'] + else: + h['hdln'] = "" + + + # isUpgrade(), indicates whether rec2 upgrades rec1, only looks + # at act, phen and sig. Proposed gets NEW, EXA or EXB active gets UPG + def __isUpgrade(self, proposed, active): + # To change HazardsTable to have an UPG + # only if the other hazard is a NEW, EXA or EXB and a CAN if the + # associated hazard is CON or EXT. + if proposed['act'] in ['CON', 'EXT']: + return 0 #not an upgrade + else: + if VTECTable.checkForUpgrade(proposed['phen'], proposed['sig'], + active['phen'], active['sig']): + return 1 + else: + return 0 #not an upgrade + + # isDowngrade(), indicates whether rec2 downgrades rec1, only looks + # at phen and sig. Proposed gets NEW, active gets CAN. + def __isDowngrade(self, proposed, active): + if VTECTable.checkForDowngrade(proposed['phen'], proposed['sig'], + active['phen'], active['sig']): + return 1 + else: + return 0 #not an downgrade + + # Checks for records with the same phen/sig for the same geographical + # area (id). Eliminates the records with the lower segment number with + # same times. Combines records with multiple segment numbers with + # different times. Result is only to have 1 record per ID for phen/sig. + def __checkForMultipleSegsInSameID(self, pTable): + + #step 1: reorganize the proposed table by zone, then by phen/sig. + #dict of zones, then dict of phensigs, value is list of records. + #Also create dictionary of originally max segment numbers for phen/sig. + orgHaz = {} + orgMaxSeg = {} #key:phensig, value: max seg number + for p in pTable: + phensig = (p['phen'], p['sig']) + id = p['id'] + if id in orgHaz: + psOrgHaz = orgHaz[id] + if phensig in psOrgHaz: + records = psOrgHaz[phensig] + records.append(p) + orgHaz[id][phensig] = records + else: + orgHaz[id][phensig] = [p] + else: + orgHaz[id] = {phensig: [p]} + + # tally the original max segment number per phen/sig + if phensig in orgMaxSeg: + orgMaxSeg[phensig] = max(p['seg'], orgMaxSeg[phensig]) + else: + orgMaxSeg[phensig] = p['seg'] + + + #step 2: Check for multiple records for phensig and zone. + #Mark records that can be combined (adjacent/overlap). + for zone in list(orgHaz.keys()): + for phensig in list(orgHaz[zone].keys()): + records = orgHaz[zone][phensig] + # if only 1 record, we have nothing to do + if len(records) == 1: + continue + records.sort(self.__hazardSortSTET) + + #find adjacent/overlapping, mark them as record number in + #the dict entry 'rn', track overall tr in trDict (key is 'rn') + trDict = {} + for x in range(len(records)): + xtr = (records[x]['startTime'], records[x]['endTime']) + + #search for adjacent/overlapping + for y in range(x + 1, len(records)): + ytr = (records[y]['startTime'], records[y]['endTime']) + rny = records[y].get('rn', None) + if rny is None and (self.__isAdjacent(xtr, ytr) or \ + self.__overlaps(xtr, ytr)): + rnx = records[x].get('rn', x) + records[y]['rn'] = rnx #overlaps/adjacent,reuse rn + records[x]['rn'] = rnx #assign to orig to match + if rnx in trDict: + trDict[rnx] = self.__combineTR(ytr, trDict[rnx]) + else: + trDict[rnx] = self.__combineTR(xtr, ytr) + + maxSN = self.__maxSegNumber(orgHaz, phensig) #max seg num + + #now assign new segment numbers, reassign starting/ending + #times for the adjacent/overlaps, delete the temp markers + for x in range(len(records)): + rnx = records[x].get('rn', None) + if rnx is not None: + records[x]['seg'] = maxSN + rnx + 1 + records[x]['startTime'] = trDict[rnx][0] + records[x]['endTime'] = trDict[rnx][1] + records[x]['phensig'] = records[x]['phen'] + '.' + \ + records[x]['sig'] + ':' + repr(records[x]['seg']) + del records[x]['rn'] + + #now eliminate records duplicate records + newrecs = [] + for rec in records: + if rec not in newrecs: + newrecs.append(rec) + orgHaz[zone][phensig] = newrecs + + #step 3: Expand back out to list + updatedList = [] + for zone in list(orgHaz.keys()): + for phensig in list(orgHaz[zone].keys()): + records = orgHaz[zone][phensig] + for r in records: + updatedList.append(r) + + #step 4: Combine new segments if possible. We can tell we have + #generated new segments based on the orgMaxSeg dictionary. We assign + #them the same segments. + compare = ['pil', 'startTime', 'endTime', 'phen', 'sig'] + for x in range(len(updatedList)): + p = updatedList[x] + phensig = (p['phen'], p['sig']) + if phensig in orgMaxSeg: + orgMax = orgMaxSeg[phensig] + if p['seg'] > orgMax: #must be generated segment numb + + #find matching records and assign all the same seg# + #and key + for y in range(x + 1, len(updatedList)): + p1 = updatedList[y] + if self.hazardCompare(p, p1, compare) and \ + p1['seg'] > orgMax: + p1['seg'] = p['seg'] + p1['phensig'] = p1['phen'] + '.' + p1['sig'] + \ + ':' + repr(p1['seg']) + + #step 5: Eliminate duplicate entries + finalList = [] + for p in updatedList: + if p not in finalList: + finalList.append(p) + + return finalList + + # sort function: hazard records by starting time, then ending time + def __hazardSortSTET(self, r1, r2): + if r1['startTime'] < r2['startTime']: + return -1 + elif r1['startTime'] > r2['startTime']: + return 1 + else: + if r1['endTime'] < r2['endTime']: + return -1 + elif r1['endTime'] > r2['endTime']: + return 1 + else: + return 0 + + # returns max segment number for zone, phen/sig directory (support routine) + def __maxSegNumber(self, orgHaz, phensig): + maxSegNumber = 0 + for zone in list(orgHaz.keys()): + if phensig in orgHaz[zone]: + entries = orgHaz[zone][phensig] + for e in entries: + maxSegNumber = max(maxSegNumber, e['seg']) + return maxSegNumber + + # check for valid etns for all national center products. if not, abort + def __checkValidETNcw(self, pTable): + errorLine = '**************************************************\n' + for p in pTable: + if (p['phen'], p['sig']) in self.__ncKeys and p['officeid'] != 'PGUM': + try: + a = int(p['etn']) + except: + raise Exception("\n\n" + errorLine + "\n" + \ + "ABORTING: Found National Hazard " + \ + "with no ETN in grids. \n" + self.printActiveTable(p) + \ + " Fix your grids by adding watch/storm number." + \ + "\nFor tropical hazards, an override to MakeHazard" + \ + "\n is likely to blame.\n" + errorLine) + + # Check the ETN for tropical events + def __validateTropicalETN(self, pTable): + errorLine = '\n**************************************************\n' + for d in pTable: + if d['act'] == 'NEW' and \ + (d['phen'], d['sig']) in self.__tpcKeys and \ + d['etn'] < int(self.__tpcBaseETN): + s = errorLine + \ + "Tropical event %s.%s has an invalid ETN of %d."\ + " Must edit the Hazards grid(s) and assign the correct ETN value.\n" % \ + (d['phen'], d['sig'], int(d['etn'])) + self.printActiveTable(d) + errorLine + self.log.error(s) + raise Exception(s) + + # check for valid ETN/Actions in the analyzed table. Cannot have + # a split ETN where one part of ongoing/NEW, and the other part + # is being dropped (e.g., CAN, UPG). pTable is the analyzed active table. + def __checkValidETNsActions(self, pTable): + byZones = self.__organizeByZone(pTable) + compare = ['etn', 'phen', 'sig'] + errorLine = '**************************************************\n' + currentYear = time.gmtime(self.__time)[0] + for key in byZones: + for h in byZones[key]: + if (h['phen'], h['sig']) not in self.__ncKeys: + continue #only interested in checking national keys + if h['act'] in ['EXP', 'UPG', 'CAN']: + hissueTime = h.get('issueTime', 0) + hissueYear = time.gmtime(hissueTime)[0] #issueYear + for h1 in byZones[key]: + if self.hazardCompare(h, h1, compare) and \ + h1['act'] in ['NEW', 'CON', 'EXA', 'EXT', 'EXB'] and \ + currentYear == hissueYear: + raise Exception("\n\n" + errorLine + "\n" + \ + "ABORTING: Found VTEC Error"\ + " with same ETN, same hazard, conflicting "\ + "actions.\n" + self.printActiveTable(h) + \ + self.printActiveTable(h1) + "\n" + \ + "Fix, if convective watch, by coordinating "\ + "with SPC. Otherwise serious software error.\n"\ + "Cannot have new hazard with same ETN as one "\ + "that is no longer in effect (EXP, UPG, CAN)."\ + "\n" + errorLine) + + # Remove EXP actions that are 30min past the end of event + # The records were kept for conflict resolution for national events + def __removeOverdueEXPs(self, pTable): + newTable = [] + for p in pTable: + if p['act'] == 'EXP' and \ + (self.__time - p['endTime']) >= 30 * 60: + pass + else: + newTable.append(p) + + return newTable + + # Remove EXP codes that have already been issued + def __removeIssuedEXPs(self, pTable, activeTable): + newTable = [] + for proposed in pTable: + if proposed['act'] == 'EXP' and \ + proposed['endTime'] >= self.__time: + issued = False + for active in activeTable: + if active['pil'] == self.__pil and \ + active['officeid'] == self.__siteID4 and \ + 'expired' in active: + if proposed['id'] == active['id'] and \ + proposed['endTime'] == active['endTime']: + issued = True + break + if issued: + continue + newTable.append(proposed) + + return newTable + + #ensure that we don't have two vtecs with same action code, same etns. + #Switch the 2nd one to NEW. + def __checkETNdups(self, pTable): + keyetnmax = {} + compare = ['etn', 'phen', 'sig', 'id'] + compare2 = ['phen', 'sig'] + for p in pTable: + #look for all events to get max etn for each phen/sig + vteckey = p['phen'] + p['sig'] + if vteckey not in keyetnmax: + etn_max = 0 + for e in pTable: + if self.hazardCompare(p, e, compare2) and \ + e['etn'] > etn_max: + etn_max = e['etn'] + keyetnmax[vteckey] = etn_max + + assigned = {} + for p in pTable: + #only look for EXT, EXA, EXB events + if p['act'] in ['NEW', 'EXP', 'UPG', 'CAN', 'CON']: + continue + vteckey = p['phen'] + p['sig'] + + for p1 in pTable: + #check for matching id,etn,phen,sig,act combinations, these + #are the ones that need to be reassigned. + if self.hazardCompare(p, p1, compare) and \ + p['startTime'] > p1['endTime']: + #found a newer record that needs to be reassigned + #see if we have already reassigned one that overlaps in time + # phensig startend etn doublenested dictionary + akey = p['phen'] + p['sig'] + tr = (p['startTime'], p['endTime']) + trs = assigned.get(akey, {}) + etna = None + for tre in list(trs.keys()): + if self.__overlaps(tr, tre): + etna = trs[tre] #get previously reassigned + #update dictionary if time overlapped + trComb = self.__combineTR(tr, tre) + if tr != trComb: + del trs[tre] + trs[trComb] = etna + assigned[akey] = trs + break + + if etna is not None: + p['act'] = 'NEW' + p['etn'] = etna + + else: + #take the newest record and assign new and give new ETN + p['act'] = 'NEW' + p['etn'] = int(keyetnmax[vteckey]) + 1 + trs[tr] = p['etn'] #new etn assigned + assigned[akey] = trs #put back into dictionary + keyetnmax[vteckey] = p['etn'] #updated for new assign + + def __warnETNduplication(self, pTable): + # Check should only operate on applicable VTEC products. + # NOTE: this falsely identifies duplicates across year-end + # since pTable does not have issueTimes we can't determine + # which year the product was originally issued + if self.__pil not in \ + ['CFW', 'FFA', 'MWW', 'NPW', 'RFW', 'WSW']: + return + + dups = [] + byZones = self.__organizeByZone(pTable) + for id, hazards in byZones.items(): + visited = [] + for p in hazards: + key = p['phen'], p['sig'], p['etn'] + if key in visited: + estr = "%s.%s:%d" % key + if estr not in dups: + dups.append(estr) + else: + visited.append(key) + + if len(dups) > 0: + errorLine = '\n******************************************************\n' + self.log.error("Illegal ETN duplication is found for:\n" + \ + str(dups) + errorLine) + + # send message to GFE + msg = "The formatted %s product contains a duplicate ETN.\n"\ + "Please transmit the product and then open a trouble ticket with the NCF."\ + % self.__pil + os.system("sendGfeMessage -u -c GFE -m '" + msg + "'") + + # copy text/overviewText into record from active to proposed + def __copyTextFields(self, proposed, active): + if "segText" in active: + proposed['prevText'] = active['segText'] + if "overviewText" in active: + proposed['prevOverviewText'] = active['overviewText'] + + + # add upgrade/downgrade records from the active table + def __addUpgradeDowngradeRec(self, proposedTable): + compare = ['id', 'pil', 'officeid'] + fields = ['etn', 'startTime', 'endTime', 'phen', 'sig', 'phensig', 'act'] + for rec in proposedTable: + if rec['act'] != 'NEW': + continue + for checkR in proposedTable: + if checkR['act'] not in ['CAN', 'UPG']: + continue + if self.__hazardsOverlap(checkR, rec) and \ + self.hazardCompare(checkR, rec, compare): + ################### + if self.__isDowngrade(rec, checkR): + rec['downgradeFrom'] = self.__copyFields(checkR, fields) + elif self.__isUpgrade(rec, checkR): + rec['upgradeFrom'] = self.__copyFields(checkR, fields) + + return proposedTable + + + ############################################ + # 'inject' is the main function in vtec.py # + ############################################ + + def __mergeActiveProposed(self, pTable, activeTable, pil, areas): + + # convert active table EXP still in effect to CON + activeTable = self.__convertEXPtoCON(activeTable) + self.log.debug("After convertEXPtoCON: " + + self.printActiveTable(pTable, combine=True)) + + # Special handling for the SPC watches (TO.A, SV.A) + pTable = self.__handleSPCWatches(pTable, activeTable) + self.log.debug("After handleSPCWatches: " + + self.printActiveTable(pTable, combine=True)) + + # Drop multiple segments for same phen/sig in same "id" + pTable = self.__checkForMultipleSegsInSameID(pTable) + self.log.debug("After checkForMultipleSegsInSameID: " + + self.printActiveTable(pTable, combine=True)) + + # Check for Merged Events + pTable, activeTable = self.__checkForMergedEvents(pTable, activeTable) + self.log.debug("After checkForMergedEvents: " + + self.printActiveTable(pTable, combine=True)) + + # Check for CON and EXT actions + pTable = self.__checkForCONEXT(pTable, activeTable) + self.log.debug("After checkForCONEXT: " + + self.printActiveTable(pTable, combine=True)) + + # Check for CAN, EXP, and UPG + pTable = self.__checkForCANEXPUPG(pTable, activeTable) + self.log.debug("After checkForCANEXPUPG: " + + self.printActiveTable(pTable, combine=True)) + + # Check for EXA/EXB + pTable = self.__checkForEXAEXB(pTable, activeTable) + self.log.debug("After checkForEXAEXB: " + + self.printActiveTable(pTable, combine=True)) + + # Assign NEW to remaining records + pTable = self.__checkForNEW(pTable, activeTable) + self.log.debug("After checkForNEW: " + + self.printActiveTable(pTable, combine=True)) + + # Check for upgrades and downgrades, add records if needed + pTable = self.__addUpgradeDowngradeRec(pTable) + self.log.debug("After addUpgradeDowngradeRec: " + + self.printActiveTable(pTable, combine=True)) + + # Convert ongoing events about ready to expire (still in the + # proposed grids) to switch from CON to EXP + pTable = self.__addEXPCodes(pTable) + self.log.debug("After addEXPCodes: " + + self.printActiveTable(pTable, combine=True)) + + # Eliminate any EXPs if other events (same phen/sig) in effect + # at present time. + pTable = self.__removeEXPWithOngoingCodes(pTable) + self.log.debug("After removeEXPWithOngoingCodes: " + + self.printActiveTable(pTable, combine=True)) + + # Ensure valid ETN/Actions - no EXP/CAN with valid same ETN + # for national events + self.__checkValidETNsActions(pTable) + self.log.debug("After checkValidETNsActions:" + + self.printActiveTable(pTable, combine=True)) + + # Remove EXPs that are 30mins past the end of events + pTable = self.__removeOverdueEXPs(pTable) + self.log.debug("After removeOverdueEXPs:" + + self.printActiveTable(pTable, combine=True)) + + # Remove EXPs that have already been issued + pTable = self.__removeIssuedEXPs(pTable, activeTable) + self.log.debug("After removeIssuedEXPs:" + + self.printActiveTable(pTable, combine=True)) + + # Ensure that there are not ETN dups in the same segment w/diff + # action codes + self.__checkETNdups(pTable) + self.log.debug("After checkETNdups:" + + self.printActiveTable(pTable, combine=True)) + + # Warn user about ETN duplication if any + self.__warnETNduplication(pTable) + + # Complete the VTEC Strings + self.__addVTECStrings(pTable) + self.log.debug("After addVTECStrings: " + + self.printActiveTable(pTable, combine=True)) + + #add in hdln entries if they are missing + self.__addHeadlinesIfMissing(pTable) + self.log.debug("After addHeadlinesIfMissing: " + + self.printActiveTable(pTable, combine=True)) + + # Ensure that all SV.A and TO.A have valid ETNs + self.__checkValidETNcw(pTable) + + # Ensure that all tropical events have valid ETNs + self.__validateTropicalETN(pTable) + + # Return pTable, which is essentially analyzedTable at this point + return pTable + +# This section no longer needed with tropical ETN consolidation +# # is marine zone? +# def __isMarineZone(self, id): +# if id[0:2] in self.__marineZonesPrefix: +# return True; +# else: +# return False; +# +# # marine zones and non-marine zones for tpc phen/sigs follow their own +# # sequence of ETNs and actions. This routine determines if separate +# # ETNs/actions should occur between id1 and id2. Returns true if +# # separate ETN tracks are required - basically if id1 and id2 are one +# # marine and the other not, and the phen/sigs are identical and are tpc +# # phen/sigs. Also returns true if phen/sigs are not identical. Otherwise +# # returns false. Only considers phen/sig/id. +# def __separateETNtrack(self, rec1, rec2): +# ps1 = (rec1['phen'], rec1['sig']) +# ps2 = (rec2['phen'], rec2['sig']) +# # same phen/sig +# if ps1 == ps2: +# # tropical? +# if ps1 in self.__tpcKeys: +# # one a marine zone, the other not?, that requires sepa track +# return (self.__isMarineZone(rec1['id']) != \ +# self.__isMarineZone(rec2['id'])) +# else: +# return False #same phen/sig, not tpc, so. non separate track +# else: +# return true; + + def __processJavaCollection(self, javaObj, processMethod=None): + retVal = [] + iter = javaObj.iterator() + while iter.hasNext(): + nextObj = next(iter) + if processMethod is not None: + nextObj = processMethod(nextObj) + retVal.append(nextObj) + return retVal + + def __convertPhensig(self, javaPhensig): + phenSig = tuple(str(javaPhensig).split('.')) + return phenSig diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/TextFormatter.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/TextFormatter.py index 0e8293d2e1..73b34a46a5 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/TextFormatter.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/TextFormatter.py @@ -1,1010 +1,1010 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TextFormatter.py -# Main program and Control class for producing Forecasts -# -# Author: hansen -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 02/12/2014 #2591 randerso Added retry when loading combinations fails -# 10/20/2014 #3685 randerso Changed default of lowerCase to True if not specified -# 11/30/2015 #5129 dgilling Support new IFPClient. -# 03/02/2016 #5411 randerso Fixed exception in exception handler -# 07/19/2016 #5749 randerso Fix issue with new Jep in __createArea() -# 10/31/2016 #5979 njensen Cast to primitives for compatibility - -## -# This is a base file that is not intended to be overridden. -## - -import string, getopt, sys, time, os, types, math -import ModuleAccessor -import Utility, logging, traceback -import AbsTime -from java.lang import ThreadDeath -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData - -GridLoc = None -LatLonIds = [] - -MAX_TRIES = 2 - -# If someone imports TextFormatter and needs this instance -# they should either be fixed to use the IFPImporter module -# or turn this line on (which is a kludge but should make -# the incorrect code run without fixing it). -#IFPImporter = IFPImporter.IFPImporter - -class TextFormatter: - ## TODO: Remove dataManager from constructor and do not add it to - ## the argDict - def __init__(self, dataManager, ifpClient): - # Variable for unique combinations - self.__comboNumber = -1 - self.dataMgr = dataManager - self.ifpClient = ifpClient - self.log = logging.getLogger("FormatterRunner.TextFormatter.TextFormatter") - -# def __del__(self): -# for i in LatLonIds: -# self.dataMgr.getRefManager().deleteRefSet(i, False) - - def getForecast(self, fcstName, argDict): - " Create the forecast " - - ForecastNarrative = argDict["ForecastNarrative"] - ForecastTable = argDict["ForecastTable"] - Analysis = argDict["Analysis"] - - argDict["prevStats"] = () - argDict["productName"] = fcstName - argDict["combinations"] = None - # This allows the Interfaces class to later call this - # method for component forecasts. - # See: Interfaces.generateProduct - argDict["getForecast"] = self.getForecast - argDict["getFcstDef"] = self.getFcstDef - argDict["dataMgr"] = self.dataMgr - self.__ut = argDict["utility"] - - # Get the Forecast Definition and type from the server - #print "finding", fcstName - found, module = self.getFcstDef(fcstName, argDict) - #print "found ", found - if found == 0: - text = "Text Product Definition Not Found: " + fcstName + " " + \ - traceback.format_exc() - self.log.error("Text Product Definition Not Found: Caught Exception: " + fcstName, exc_info=True) - raise Exception, text - forecastDef = argDict["forecastDef"] - fcstType = self.__ut.set(forecastDef, "type", None) - argDict["fcstType"] = fcstType - if fcstType is None: - text = "Text Product Type Not Found: " + fcstName + " " + \ - traceback.format_exc() - self.log.error("Text Product Type Not Found: Caught Exception: " + fcstName, exc_info=True) - raise Exception, text - - argDict["varDict"] = argDict["cmdLineVarDict"] - error = self.__getRunTimeVariables(fcstName, forecastDef, fcstType, module, argDict) - if error is not None: - return error - - # Sanity checks - # Unless a "smart" product, - # Must have at least one edit area and time range specified - if fcstType != "smart" and fcstType != "component": - if argDict["editAreas"] == []: - text = "No Edit Areas Specified over which to generate product." - text = text + '\nTry setting "defaultEditAreas" in the product Definition' - text = text + '\nOr, if running from the command line, add a -r flag.' - text = text + '\n' + string.join(traceback.format_exc()) - self.log.error("Caught Exception: " + text) - raise Exception, text - if argDict["rawRanges"] == []: - text = "No Time Ranges Specified over which to generate product." - text = text + '\nTry setting "defaultRanges" in the product Definition' - text = text + '\nOr, if running from the command line, add a -w flag.' - text = text + '\n' + string.join(traceback.format_exc()) - self.log.error("Caught Exception: " + text) - raise Exception, text - - argDict["subType"] = fcstName - - # Component Phrase Forecasts - # Create a narrative of one forecast - if fcstType == "component": - timeRange, label = argDict["rawRanges"][0] - forecastDef = self.__createNarrativeDef(fcstName, timeRange) - fcstType = "narrative" - - # Table Forecasts - if fcstType == "table": - forecast = ForecastTable.ForecastTable() - forecast._debug = 0 - forecast._argDict = argDict - try: - text = forecast.generateForecast(argDict) - except: - self.log.error("Caught Exception: ", exc_info=True) - raise Exception, string.join(traceback.format_exc()) - - # Narrative Phrase Forecasts - elif fcstType == "narrative": - forecast = ForecastNarrative.ForecastNarrative() - forecast._debug = 0 - forecast._argDict = argDict - timeRange, label = argDict["rawRanges"][0] - forecast.getNarrativeData( - argDict, forecastDef, timeRange, argDict["editAreas"], None) - text = self.__loop(argDict, forecast, forecastDef) - - # Smart Text - elif fcstType == "smart": - product = module.TextProduct() - product._debug = 0 - argDict["self"] = product - argDict["module"] = module - product.setUp("T", argDict) - product._argDict = argDict - - try: - text = product.generateForecast(argDict) - except RuntimeError as e: - if 'java.lang.ThreadDeath' in str(e): - self.log.info("Formatter Canceled") - else: - self.log.error("Caught Exception: ", exc_info=True) - raise e - - # requirement for TEST phrasing for TEST products - if argDict.get('testMode', 0): - testMsg = "\nTHIS IS A TEST MESSAGE. DO NOT TAKE ACTION" + \ - " BASED ON THIS TEST\nMESSAGE.\n" - #split by "$$" - segs = text.split('\n$$') - for i in xrange(len(segs) - 1): #never the last one - if text.find(testMsg) == -1: #not found, add it in - segs[i] = segs[i] + testMsg - text = '\n$$'.join(segs) #put text back together again - if text.find(testMsg) == -1: - text = text + '\n' + testMsg - - # Translator - language = forecastDef.get('language', None) - if language is not None: - self.log.info("Translating product to %s" % language) - text = product.translateForecast(text, language) - # Convert to Upper Case - if not forecastDef.get('lowerCase', True): - text = text.upper() - else: - text = "Text Product Type Invalid " + \ - "(must be 'table', 'component' or 'narrative'): ", fcstName, type - text = text + '\n' + string.join(traceback.format_exc()) - self.log.error("Caught Exception: " + text) - raise Exception, text - - return text - - def __createNarrativeDef(self, fcstName, timeRange): - return { - "methodList": [self.assembleChildWords], - "narrativeDef": [(fcstName, timeRange.duration() / 3600)], - } - - def __loop(self, argDict, forecast, forecastDef): - # Loop through product by edit areas and time ranges - - begText = self.__ut.set(forecastDef, "beginningText", "") - endText = self.__ut.set(forecastDef, "endingText", "") - editAreaLoopBegText = self.__ut.set(forecastDef, "editAreaLoopBegText", "") - timeRangeLoopBegText = self.__ut.set(forecastDef, "timeRangeLoopBegText", "") - editAreaLoopEndText = self.__ut.set(forecastDef, "editAreaLoopEndText", "") - timeRangeLoopEndText = self.__ut.set(forecastDef, "timeRangeLoopEndText", "") - outerLoop = self.__ut.set(forecastDef, "outerLoop", "EditArea") - - editAreas = argDict["editAreas"] - rawRanges = argDict["rawRanges"] - # Loop through Edit Areas - text = begText - if outerLoop == "EditArea": - for editArea in editAreas: - argDict["editArea"] = editArea - text = text + editAreaLoopBegText - # Loop through time ranges - if len(rawRanges) > 0: - argDict["issueRange"] = rawRanges[0] - for rawRange, rangeName in rawRanges: - argDict["timeRange"] = rawRange - argDict["timeRangeName"] = rangeName - text = text + timeRangeLoopBegText - # forecastDef gets destroyed in narrative?, so must restore - argDict["forecastDef"] = forecastDef - try: - subText = forecast.generateForecast(argDict) - except: - self.log.error("Caught Exception: ", exc_info=True) - raise Exception, string.join(traceback.format_exc()) - try: - subText, statDict, valueDict = subText - except: - pass - text = text + subText + timeRangeLoopEndText - text = forecast.fillSpecial(text, argDict) - text = text + editAreaLoopEndText - text = forecast.fillSpecial(text, argDict) - else: - for rawRange, rangeName in rawRanges: - argDict["timeRange"] = rawRange - argDict["timeRangeName"] = rangeName - text = text + timeRangeLoopBegText - for editArea in editAreas: - argDict["editArea"] = editArea - text = text + editAreaLoopBegText - # Loop through time ranges - # forecastDef gets destroyed in narrative, so must restore - argDict["forecastDef"] = forecastDef - try: - subText, statDict, valueDict = \ - forecast.generateForecast(argDict) - except: - self.log.error("Caught Exception: ", exc_info=True) - raise Exception, string.join(traceback.format_exc()) - text = text + subText + editAreaLoopEndText - text = forecast.fillSpecial(text, argDict) - text = text + timeRangeLoopEndText - text = forecast.fillSpecial(text, argDict) - return text - - def __getRunTimeVariables(self, fcstName, forecastDef, fcstType, module, argDict): - # Input variables can come from various sources: - # varDict from command line - # command line switch e.g. -l language (will be in argDict) - # definition section of product - # We must check all three in that order. - - varDict = argDict["varDict"] - #print "varDict", varDict - - for item, default in [ - ("language", "english"), - ("appendFile", None), - ("lineLength", 69), # no command line option - ("timePeriod", 3), - ]: - try: # Try the varDict - #print "trying varDict", item - argDict[item] = varDict[item] - #print "got it in varDict" - except: - try: # Try argDict i.e. from command line switch - # If so, argDict is already set - #print "trying argDict", item - argValue = argDict[item] - #print "got it in argDict", argValue - except: - argValue = None - # Try - #print "getting from definition", item - if argValue is None: - argDict[item] = self.__ut.set(forecastDef, item, default) - #print "value from definition", argDict[item] - # These need to be integers - for item in ["lineLength", "timePeriod"]: - if argDict[item] is not None: - argDict[item] = int(argDict[item]) - - # Edit Areas and Time Ranges - # - # Set up these argDict values: - # editAreas -- list of (refData, label) pairs - # timeRanges -- list of named ranges - # rawRanges -- list of (rawRange, rangeName) pairs - # - # As we eventually loop through the product, these values will be set: - # editArea -- current edit area pair - # timeRange -- current raw time range - # issueTime -- first raw time range - # timeRangeName -- current time range name (if available) - # combinations -- list of (editAreaList, comboLabel) tuples - # where editAreaList is the list of edit areas composing - # the combination - - # Set up Edit Areas - # May be from these sources: - # "AreaLabel" - # varDict from command line -- list of names - # command line as named reference areas - # defaultEditAreas - # list of (name, label) pairs - # "LatLon" - # OR list of ((lat,lon,dimension), label) - # "Combinations" - # OR A Combinations file - - # We may have had edit areas entered from the command line - # or from the Interfaces "generateForecast" command - # If so, argDict["editAreas"] will be a list of either - # (name, label) or (refData, label) pairs - editAreaType = "AreaLabel" - - # We may have had edit areas entered from the command line - # If so, argDict["editAreas"] will be a list of either - # (name, label) or (refData, label) pairs - if len(argDict["editAreas"]) == 0: - dfEditAreas = self.__ut.set(forecastDef, "defaultEditAreas", []) - try: # Try the varDict - chosenAreas = varDict["Choose Edit Areas"] - # Turn the list of chosen areas into (name, label) pairs - # using the defaultEditAreas list - dfEditAreas = self.__pairAreaWithLabel(chosenAreas, dfEditAreas) - except: - pass - - # Set up edit areas as ReferenceData's for AreaLabel and LatLon areas - if type(dfEditAreas) == types.StringType: - editAreaType = "Combinations" - # Get edit areas from file with format: - # Combinations = [ - # ([editArea1, editArea2,...],label) - # ... - # ] - # For example: - # Combinations = [ - # (["Zones48", "Zones49", "Zones50"],"/48/49/50"), - # (["Zones37","Zones38"], "/37/38"),"/37/38"), - # (["Zones57","Zones58","Zones59"],"57/58/59") - # ] - - comboName = dfEditAreas - for retryCount in xrange(MAX_TRIES): - accessor = ModuleAccessor.ModuleAccessor() - dfEditAreas = accessor.variable(comboName, "Combinations") - if dfEditAreas is None: - if sys.modules.has_key(comboName): - comboMod = sys.modules[comboName] - if comboMod.__file__.endswith(".pyo"): - os.remove(comboMod.__file__) - comboMod = None - del sys.modules[comboName] - - # if not last try, log and try again - if retryCount < MAX_TRIES - 1: - # log but don't pop up - self.log.error("Error loading combinations file: %s, retrying", comboName) - else: - return "COMBINATION FILE NOT FOUND: " + \ - self.__ut.set(forecastDef, "defaultEditAreas", []) - else: - break - - elif len(dfEditAreas) > 0: - refDataList = [] - tempRefData = [] - for area, label in dfEditAreas: - if type(area) is types.TupleType: - # Create a referenceData given lat, lon, dim - refData = self.__createArea(area, argDict) - tempRefData.append(refData) - else: # Get named Reference Data - id = ReferenceID(area) - refData = self.getEditArea(area, argDict) - if refData is None: - return "EDIT AREA NOT FOUND: " + str(id) - refDataList.append((refData, label)) - - argDict["editAreas"] = refDataList - storeReferenceData(self.dataMgr.getRefManager(), tempRefData) - - # Set up HazardsTable - # Product must be: - # --Type "smart" - # --Have an "filterMethod" method - if fcstType == "smart": - product = module.TextProduct() - - # Test Code: Uncomment to test - #allowedHazards = product.allowedHazards() - #filterMethod = product.filterMethod - #print "allowedHazards", allowedHazards - - try: - allowedHazards = product.allowedHazards() - filterMethod = product.filterMethod - except: - allowedHazards = None - - if allowedHazards is not None and allowedHazards != []: - # Set up editAreas as a list of combinations - # Cases: - # lat/lon or (area, label) pairs -- call HazardsTable, - # but the edit areas will not change - # Combinations -- call HazardsTable and check for changed combinations - - # Set up edit areas as list of lists - editAreas = [] - - for area, label in dfEditAreas: - if type(area) is types.ListType: - editAreas.append(area) - elif type(area) is types.TupleType: #LatLon - editAreas.append([self.__getLatLonAreaName(area)]) - else: - editAreas.append([area]) - - # if Definition['separateByTimeZone'] set to "effectiveTZ" - # or "actualTZ", change the set of edit areas to ensure - # that time zones are same in each grouping. - separateByTZ = product.Definition.get('separateByTimeZone', - None) - if separateByTZ is not None: - areaDictName = product.Definition.get('areaDictionary', - "AreaDictionary") - editAreas = self._separateByTimeZone(editAreas, - areaDictName, argDict['creationTime'], - effectiveTZ=separateByTZ) - - accurateCities = product.Definition.get('accurateCities', 0) - cityRefData = [] - if accurateCities: - cityLocationName = product.Definition.get('cityLocation', - "CityLocation") - accessor = ModuleAccessor.ModuleAccessor() - citydict = accessor.variable(cityLocationName, - "CityLocation") - - cityEAs = [] - if citydict is None: - msg = "CityLocation dictionary module was not found for"\ - " city location:" - self.log.error(msg + `cityLocationName`) - else: - for ea in editAreas: - for ean in ea: - if ean not in citydict: - msg = "CityLocation dictionary does not "\ - "contain entry for edit area: " - self.log.error(msg + `ean`) - continue - - for city, llrec in citydict[ean].iteritems(): - # Create a referenceData given lat, lon, dim - area = (llrec[0], llrec[1], 0) - refData = self.__createArea(area, argDict) - cityEAs.append(refData) - cityRefData.append((refData, city)) - - # Store temporary reference data in the server - #storeReferenceData(argDict['ifpClient'], cityEAs) - storeReferenceData(self.dataMgr.getRefManager(), cityEAs) - - # Get Product ID and other info for HazardsTable - pil = self.__ut.set(forecastDef, "pil", None) - stationID4 = product.Definition['fullStationID'] - productCategory = pil[0:3] #part of the pil - sampleThreshold = product.Definition.get(\ - "hazardSamplingThreshold", (10, None)) - - # Process the hazards - import HazardsTable - hazards = HazardsTable.HazardsTable( - argDict["ifpClient"], editAreas, productCategory, - filterMethod, argDict["databaseID"], stationID4, - argDict["vtecActiveTable"], argDict["vtecMode"], - sampleThreshold, creationTime=argDict["creationTime"], dataMgr=self.dataMgr, - accurateCities=accurateCities, - cityEditAreas=cityRefData) - - # Store hazards object for later use - argDict["hazards"] = hazards - - # Get revised combinations - if editAreaType == "Combinations": - reorganizeCombos = product.Definition.get("reorganizeCombinations", 1) - if reorganizeCombos: - hazardAreas = hazards.getHazardAreaCombinations() - # Add a bogus label - newCombos = [] - for combo in hazardAreas: - newCombos.append((combo, "")) - # Re-assign new combinations - dfEditAreas = newCombos - - # Set up Combinations as ReferenceDatas - if editAreaType == "Combinations": - argDict["editAreas"], dfEditAreas = self.getCombinations(dfEditAreas, argDict) - argDict["combinations"] = dfEditAreas - - # Set up Time Ranges - # May be from these sources: - # varDict from command line - # defaultTimeRanges - # command line as named time ranges - # command line as start and end times OR - # from argDict already set up by Interfaces::generateProduct - # In these cases "useRawTR" will be set to 1 - if len(argDict["timeRanges"]) > 0: - # Use named time ranges from command line - dfRanges = argDict["timeRanges"] - else: - try: # Try the varDict - dfRanges = varDict["Choose Time Ranges"] - except: - dfRanges = self.__ut.set(forecastDef, "defaultRanges", []) - argDict["timeRanges"] = dfRanges - - rawRanges = [] - argDict["rawRanges"] = rawRanges - if argDict["useRawTR"] == 1: - tr = argDict["timeRange"] - try: - trName = argDict["timeRangeName"] - except: - trName = "" - if tr is not None: - rawRanges.append((tr, trName)) - elif len(dfRanges) == 0: - pass - else: - import TimeRangeUtils - forecast = TimeRangeUtils.TimeRangeUtils() - for rangeName in dfRanges: - rawRange = forecast.getTimeRange(rangeName, argDict) - rawRanges.append((rawRange, rangeName)) - argDict["rawRanges"] = rawRanges - #print "rawRanges", rawRanges - - # Row Label - areaType = self.__ut.set(forecastDef, "areaType", "") - rowVariable = self.__ut.set(forecastDef, "rowVariable", "EditArea") - if rowVariable == "EditArea": - rowLabel = areaType - elif rowVariable == "WeatherElement": - rowLabel = "Weather Element" - else: - rowLabel = "Time Period" - argDict["heading"] = rowLabel - - def __pairAreaWithLabel(self, chosenAreas, defaultEditAreas): - # Pair the chosen edit areas with associated labels from - # default list and return new list - dfEditAreas = [] - for area in chosenAreas: - for name, label in defaultEditAreas: - if area == name: - dfEditAreas.append((name, label)) - elif area == label: - # Pair back with (lat,lon,dim) tuple - dfEditAreas.append((name, label)) - return dfEditAreas - - def __createArea(self, latLonTuple, argDict): - # Return a ReferenceData created for the given lat,lon and dimension - # If dim is zero, make edit area of the one grid - # point closest to the lat/lon value. - lat, lon, dim = latLonTuple - name = self.__getLatLonAreaName((lat, lon, dim)) - #print "\ncreateArea", lat, lon, dim, name - if dim != 0: - for x in range(100): - points = makeSquare(lat, lon, dim) - pointList = [] - for point in points: - pointList.append(makePoint(point)) - refData = makeArea(self.ifpClient.getDBGridLocation(), pointList, refname=name) - # Make sure we have at least one grid point in - # the edit area - if refData.getGrid().isAnyBitsSet(): - #print "returning", dim - return refData - # Increment dim and try again - #print "iterating", dim - dim += 0.25 - msg = "\nWARNING!!! EMPTY EDIT AREA. INCREASE LAT/LON AREA DIMENSION!!\n" - self.log.warning(msg) - return None - else: - from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit - # Get grid cell coordinates for lat/lon - gridLoc = self.ifpClient.getDBGridLocation() - nx = int(gridLoc.getNx()) - ny = int(gridLoc.getNy()) - cc2D = gridLoc.gridCell(float(lat), float(lon)) - # convert grid cell to Grid2DBit with single bit set - grid2Dbit = Grid2DBit(nx, ny) - if (nx > cc2D.x >= 0 and ny > cc2D.y >= 0): - grid2Dbit.set(int(cc2D.x), int(cc2D.y)) - #refData = GridLoc.convertToReferenceData(grid2Dbit) - refID = ReferenceID(name) - refData = ReferenceData(gridLoc, refID, grid2Dbit) - return refData - - def __getLatLonAreaName(self, latLonTuple): - lat, lon, dim = latLonTuple - name = "Ref" + '%s%s%s' % (lat, lon, dim) - name = name.replace(".", "") - name = name.replace("-", "") - return name - - def getCombinations(self, combinations, argDict): - editAreas = [] - newCombinations = [] - for comboList, areaLabel in combinations: - newComboList = [] - for editArea in comboList: - #print "Get edit area TF: get edit area combo", editArea - #print "TF2: Get Edit Area set up edit areas", editArea - newArea = self.getEditArea(editArea, argDict) - if comboList.index(editArea) == 0: - comboNumber = self.getComboNumber() - label = "Combo" + `comboNumber` - refId = ReferenceID(label) - #global GridLoc - #GridLoc = newArea.getGloc() - area = ReferenceData(newArea) - area.setId(refId) - #GridLoc, refId, newArea.getPolygons("LATLON"), "LATLON") - # randerso: I don't think this is necessary - # area.convertToAWIPS() - newComboList.append(newArea.getId().getName()) - area = self.unionAreas(label, area, newArea) - if argDict["fcstType"] == "table": - # Allow user-supplied area labels to be used for simple tables - editAreas.append((area, areaLabel)) - else: - editAreas.append((area, label)) - newCombinations.append((newComboList, label)) - return editAreas, newCombinations - - def getComboNumber(self): - self.__comboNumber = self.__comboNumber + 1 - return self.__comboNumber - - def getFcstDef(self, fcstName, argDict): - # Find the given Forecast Definition. - # Look for a method in the current forecast smart "TextProduct" class (if one exists) - # i.e. product.fcstName() - # These can be removed eventually: - # Next look for Definition variable i.e. module.Definition - # Next look by name in modules already imported i.e. module.fcstName - # Next try to find a module with that name i.e. module.__name__ = fcstName - # If found, - # set argDict["forecastDef"] to the definition and return 1 and the module - # Else return 0 (not found) - # - # The Definition can be in various forms due to various product types and - # backward compatibility: - # Existing TextProduct class: product.fcstName() - # Find file with fcstName and look for Definition as variable in file - # Otherwise if TextProduct class, instantiate and look for Definition method - # - # Is the method in the current forecast smart TextProduct class? - - try: - product = argDict["self"] - exec "fcstDef = product." + fcstName + "()" - module = argDict["module"] - except: - # See if fcstName is variable in imported modules e.g. MyTable = {} - # This can be removed eventually - fcstDef, module = self.__ut.findInImported(fcstName) - if fcstDef is None or type(fcstDef) is not dict: - # Go get new module, fcstName - module = self.__ut.module(fcstName, 0) - if module is None: - return 0, module - try: - # Look for Definition = {} - # This can be removed eventually - exec "fcstDef = module.Definition" - except: - try: - # Look for fcstName = {} - # This can be removed eventually - exec "fcstDef = module." + fcstName - except: - try: - # Try to instantiate smart text product class - # and look for product.Definition() method - fcstDef = module.TextProduct.Definition - #product = module.TextProduct() - #fcstDef = product.Definition() - except: - return 0, module - - # Let the command line Variables override Definition variables - # Handle cases of varDict key as single value or tuple - # e.g. - # ('editAreaSuffix':'_pt') - # or - # ('Edit Area Suffix', 'editAreaSuffix'): '_pt' - - varDict = argDict["cmdLineVarDict"] - for key in fcstDef.keys(): - for varKey in varDict.keys(): - if varKey == key: - fcstDef[key] = varDict[varKey] - elif type(varKey) is types.TupleType: - if varKey[1] == key: - fcstDef[key] = varDict[varKey] - - argDict["forecastDef"] = fcstDef - return 1, module - - def assembleChildWords(self, tree, node): - fcst = "" - for child in node.get("childList"): - words = child.get("words") - if words is None: - return - fcst = fcst + words - node.set("words", fcst) - return 1 - - def unionAreas(self, name, area1, area2): - # OR the areas (ReferenceData objects) - # together and return a ReferenceData object - refData = area1.orEquals(area2) - #refData.convertToLatLon() - refData.setId(ReferenceID(name)) - refData.getGrid() - return refData - - def getEditArea(self, editAreaName, argDict): - # Returns an AFPS.ReferenceData object given an edit area name - # as defined in the GFE - # Apply suffix if appropriate - refID = ReferenceID(editAreaName) - #print "Getting edit area" - definition = argDict["forecastDef"] - if definition.has_key("editAreaSuffix"): - eaSuffix = definition["editAreaSuffix"] - #print "eaSuffix", eaSuffix - if eaSuffix is not None: - #inv = argDict["ifpClient"].getReferenceInventory() - inv = self.dataMgr.getRefManager().getAvailableSets() - inventory = [] - sz = inv.size() - for x in range(sz): - invID = inv.get(x) - inventory.append(str(invID.getName())) - suffName = editAreaName + eaSuffix - if suffName in inventory: - #print " Setting suffix id", suffName - refID = ReferenceID(suffName) - #print " Adding editArea", refID - from java.util import ArrayList - refList = ArrayList() - refList.add(refID) - tmp = self.dataMgr.getRefManager().getReferenceData(refList).get(0) - #tmp.getGrid() - return tmp - - def _separateByTimeZone(self, editAreaGroups, areaDictName, creationTime, - effectiveTZ="effectiveTZ"): - #takes the list of areas, and based on the time zones breaks - #them up to ensure that each grouping using the same time zone. - #areaDictName is name of the area dictionary. creationTime is the - #run time of the formatter. EffectiveTZ organizes the groups by - #the effective time zone, rather than the TZ environment variable. - #Typically used for the PFM/AFM. - - #print "separateByTimeZone: ", editAreaGroups - out = [] #list of editAreaGroups, with edit areas within each group - - import ModuleAccessor - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(areaDictName, "AreaDictionary") - localTZ = os.environ['TZ'] #current WFO time zone - localTZid = time.strftime("%Z", time.localtime(creationTime)) - #print "Current WFO tz: ", localTZ - for areas in editAreaGroups: - tzDir = {} #key is TZ var (EST5EDT), value is edit areas - tzidDir = {} #key is effective TZ (EST), value is edit areas - #print "Areas in group: ", areas - tzs = None - for area in areas: - #print "processing area: ", area - try: - zoneTZ = areaDict[area]['ugcTimeZone'] - prevTZ = os.environ['TZ'] - os.environ['TZ'] = zoneTZ - time.tzset() - tzid = time.strftime("%Z", time.localtime(creationTime)) - os.environ['TZ'] = prevTZ - time.tzset() - - #print "areadict entry: ", zoneTZ - except: - zoneTZ = localTZ - tzid = localTZid - #print "falling back to WFOtz: ", zoneTZ - self.log.warning("WARNING: Entry " + area + - " missing from AreaDictionary. Using default time zone.") - - zones = tzDir.get(zoneTZ, []) - zones.append(area) - tzDir[zoneTZ] = zones - zones = tzidDir.get(tzid, []) - zones.append(area) - tzidDir[tzid] = zones - #print "TZs for areas: ", tzDir - #print "TZids for areas: ", tzidDir - - #organize the effective time zones - if effectiveTZ == "effectiveTZ": - dict = tzidDir - elif effectiveTZ == "actualTZ": - dict = tzDir - else: - self.log.error("Invalid effectiveTZ for separateByTZ() " + - effectiveTZ) - return editAreaGroups - keys = dict.keys() - keys.sort() - for key in keys: - out.append(dict[key]) - #print "After TZ separate: ", out - return out - - - -################################################################# -def makeSquare(lat, lon, km): - " Make a list of square of given km around lat,lon" - latinc = km / 222.0 - loninc = math.cos(lat / 57.17) * km / 222.0 - - latTop = lat + latinc - latBottom = lat - latinc - lonLeft = lon - loninc - lonRight = lon + loninc - - points = [] - points.append(`latTop` + "," + `lonRight`) - points.append(`latTop` + "," + `lonLeft`) - points.append(`latBottom` + "," + `lonLeft`) - points.append(`latBottom` + "," + `lonRight`) - return points - -def makePoint(point): - " Make a CartCoord2D from the point in format: x,y" - from com.vividsolutions.jts.geom import Coordinate - ind = string.find(point, ",") - latStr = point[0:ind - 1] - lonStr = point[ind + 1:len(point)] - lat = float(latStr) - lon = float(lonStr) - return Coordinate(lon, lat) - -def makeArea(gridLoc, pointList, refname=None): - " Make a Reference Area with a unique ReferenceID" - from com.vividsolutions.jts.geom import GeometryFactory, LinearRing, Coordinate, Polygon - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData - CoordinateType = ReferenceData.CoordinateType - geomFactory = GeometryFactory() - import jep - size = len(pointList) - if pointList[0] != pointList[size - 1]: # closing the loop - pointList.append(pointList[0]) - pointArray = jep.jarray(len(pointList), Coordinate) - for i in range(len(pointList)): - pointArray[i] = pointList[i] - lr = geomFactory.createLinearRing(pointArray) - poly = geomFactory.createPolygon(lr, jep.jarray(0, LinearRing)) - polyArray = jep.jarray(1, Polygon) - polyArray[0] = poly - region = geomFactory.createMultiPolygon(polyArray) - if refname is None: - refname = "Ref" + getTime() - refId = ReferenceID(refname) - refData = ReferenceData(gridLoc, refId, region, CoordinateType.LATLON) - # randerso: I don't think this is necessary - # refData.convertToAWIPS() - return refData - -def storeReferenceData(refSetMgr, refData, temp=True): - if type(refData) is not list: - refData = [refData] - for ref in refData: - refSetMgr.saveRefSet(ref) - # Save it's name to delete later - if temp: - for r in refData: - LatLonIds.append(r.getId()) - - -def getTime(): - "Return an ascii string for the current time without spaces or :'s" - timeStr = `time.time()` - timeStr = string.replace(timeStr, ".", "_") - return timeStr - -def getAbsTime(timeStr): - "Create an AbsTime from a string: YYYYMMDD_HHMM" - - year = string.atoi(timeStr[0:4]) - month = string.atoi(timeStr[4:6]) - day = string.atoi(timeStr[6:8]) - hour = string.atoi(timeStr[9:11]) - minute = string.atoi(timeStr[11:13]) - - return AbsTime.absTimeYMD(year, month, day, hour, minute) - -def usage(): - print """ -Usage: python TextFormatter.py - -d database - -t forecastType - [-o output file for text -- default None] - [-O server output file for text -- default None] - [-S server controlled output file -- default None] - [-A append text to given file name] - [-h host -- default orca.fsl.noaa.gov] - [-p port -- default 98000000] - [-l language -- english, french, spanish: default english] - [-z displaced real time -- format YYYYMMDD_HHMM] - [-T] Generates a "TEST" product. - [-E] Generates a "EXPERIMENTAL" product. - [-v vtecMode] Specifies vtec mode ('X','O','T','E') - [-a vtecActiveTableName] Specifies alternate active table - [-V vardict] use this option to provide a run-time VariableList - instead of displaying the user dialog. - The dictionary must be in the form of a Python - dictionary string, e.g., - '{("Forecast Product", "productType"):"Morning", - ("Issuance", "issuanceType"):"Routine"}' - The entries must be complete or the product will be cancelled. - - For Simple Table products: - [-r Edit Area Name] - [-w Time Range Name] OR - [-s startTime -e endTime] - [-i Period for Tables with variable period (rows or columns)] - """ - -def writeToFile(forecasts, outputFile, mode): - if not outputFile is None and outputFile != "": - outfile = open(outputFile, mode) - os.chmod(outputFile, 0644) - if outfile is None: - return 0 - else: - outfile.write(forecasts) - outfile.close() - return 1 - -def writeToServerFile(forecasts, outputFile, ifpClient): - if not outputFile is None and outputFile != "": - id = AFPS.TextFileID(outputFile, "PRODGEN") - textFile = AFPS.TextFile(id, forecasts) - ifpClient.saveTextData([textFile]) - return 1 - return 1 +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TextFormatter.py +# Main program and Control class for producing Forecasts +# +# Author: hansen +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 02/12/2014 #2591 randerso Added retry when loading combinations fails +# 10/20/2014 #3685 randerso Changed default of lowerCase to True if not specified +# 11/30/2015 #5129 dgilling Support new IFPClient. +# 03/02/2016 #5411 randerso Fixed exception in exception handler +# 07/19/2016 #5749 randerso Fix issue with new Jep in __createArea() +# 10/31/2016 #5979 njensen Cast to primitives for compatibility + +## +# This is a base file that is not intended to be overridden. +## + +import string, getopt, sys, time, os, types, math +import ModuleAccessor +import Utility, logging, traceback +import AbsTime +from java.lang import ThreadDeath +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData + +GridLoc = None +LatLonIds = [] + +MAX_TRIES = 2 + +# If someone imports TextFormatter and needs this instance +# they should either be fixed to use the IFPImporter module +# or turn this line on (which is a kludge but should make +# the incorrect code run without fixing it). +#IFPImporter = IFPImporter.IFPImporter + +class TextFormatter: + ## TODO: Remove dataManager from constructor and do not add it to + ## the argDict + def __init__(self, dataManager, ifpClient): + # Variable for unique combinations + self.__comboNumber = -1 + self.dataMgr = dataManager + self.ifpClient = ifpClient + self.log = logging.getLogger("FormatterRunner.TextFormatter.TextFormatter") + +# def __del__(self): +# for i in LatLonIds: +# self.dataMgr.getRefManager().deleteRefSet(i, False) + + def getForecast(self, fcstName, argDict): + " Create the forecast " + + ForecastNarrative = argDict["ForecastNarrative"] + ForecastTable = argDict["ForecastTable"] + Analysis = argDict["Analysis"] + + argDict["prevStats"] = () + argDict["productName"] = fcstName + argDict["combinations"] = None + # This allows the Interfaces class to later call this + # method for component forecasts. + # See: Interfaces.generateProduct + argDict["getForecast"] = self.getForecast + argDict["getFcstDef"] = self.getFcstDef + argDict["dataMgr"] = self.dataMgr + self.__ut = argDict["utility"] + + # Get the Forecast Definition and type from the server + #print "finding", fcstName + found, module = self.getFcstDef(fcstName, argDict) + #print "found ", found + if found == 0: + text = "Text Product Definition Not Found: " + fcstName + " " + \ + traceback.format_exc() + self.log.error("Text Product Definition Not Found: Caught Exception: " + fcstName, exc_info=True) + raise Exception(text) + forecastDef = argDict["forecastDef"] + fcstType = self.__ut.set(forecastDef, "type", None) + argDict["fcstType"] = fcstType + if fcstType is None: + text = "Text Product Type Not Found: " + fcstName + " " + \ + traceback.format_exc() + self.log.error("Text Product Type Not Found: Caught Exception: " + fcstName, exc_info=True) + raise Exception(text) + + argDict["varDict"] = argDict["cmdLineVarDict"] + error = self.__getRunTimeVariables(fcstName, forecastDef, fcstType, module, argDict) + if error is not None: + return error + + # Sanity checks + # Unless a "smart" product, + # Must have at least one edit area and time range specified + if fcstType != "smart" and fcstType != "component": + if argDict["editAreas"] == []: + text = "No Edit Areas Specified over which to generate product." + text = text + '\nTry setting "defaultEditAreas" in the product Definition' + text = text + '\nOr, if running from the command line, add a -r flag.' + text = text + '\n' + string.join(traceback.format_exc()) + self.log.error("Caught Exception: " + text) + raise Exception(text) + if argDict["rawRanges"] == []: + text = "No Time Ranges Specified over which to generate product." + text = text + '\nTry setting "defaultRanges" in the product Definition' + text = text + '\nOr, if running from the command line, add a -w flag.' + text = text + '\n' + string.join(traceback.format_exc()) + self.log.error("Caught Exception: " + text) + raise Exception(text) + + argDict["subType"] = fcstName + + # Component Phrase Forecasts + # Create a narrative of one forecast + if fcstType == "component": + timeRange, label = argDict["rawRanges"][0] + forecastDef = self.__createNarrativeDef(fcstName, timeRange) + fcstType = "narrative" + + # Table Forecasts + if fcstType == "table": + forecast = ForecastTable.ForecastTable() + forecast._debug = 0 + forecast._argDict = argDict + try: + text = forecast.generateForecast(argDict) + except: + self.log.error("Caught Exception: ", exc_info=True) + raise Exception(string.join(traceback.format_exc())) + + # Narrative Phrase Forecasts + elif fcstType == "narrative": + forecast = ForecastNarrative.ForecastNarrative() + forecast._debug = 0 + forecast._argDict = argDict + timeRange, label = argDict["rawRanges"][0] + forecast.getNarrativeData( + argDict, forecastDef, timeRange, argDict["editAreas"], None) + text = self.__loop(argDict, forecast, forecastDef) + + # Smart Text + elif fcstType == "smart": + product = module.TextProduct() + product._debug = 0 + argDict["self"] = product + argDict["module"] = module + product.setUp("T", argDict) + product._argDict = argDict + + try: + text = product.generateForecast(argDict) + except RuntimeError as e: + if 'java.lang.ThreadDeath' in str(e): + self.log.info("Formatter Canceled") + else: + self.log.error("Caught Exception: ", exc_info=True) + raise e + + # requirement for TEST phrasing for TEST products + if argDict.get('testMode', 0): + testMsg = "\nTHIS IS A TEST MESSAGE. DO NOT TAKE ACTION" + \ + " BASED ON THIS TEST\nMESSAGE.\n" + #split by "$$" + segs = text.split('\n$$') + for i in range(len(segs) - 1): #never the last one + if text.find(testMsg) == -1: #not found, add it in + segs[i] = segs[i] + testMsg + text = '\n$$'.join(segs) #put text back together again + if text.find(testMsg) == -1: + text = text + '\n' + testMsg + + # Translator + language = forecastDef.get('language', None) + if language is not None: + self.log.info("Translating product to %s" % language) + text = product.translateForecast(text, language) + # Convert to Upper Case + if not forecastDef.get('lowerCase', True): + text = text.upper() + else: + text = "Text Product Type Invalid " + \ + "(must be 'table', 'component' or 'narrative'): ", fcstName, type + text = text + '\n' + string.join(traceback.format_exc()) + self.log.error("Caught Exception: " + text) + raise Exception(text) + + return text + + def __createNarrativeDef(self, fcstName, timeRange): + return { + "methodList": [self.assembleChildWords], + "narrativeDef": [(fcstName, timeRange.duration() / 3600)], + } + + def __loop(self, argDict, forecast, forecastDef): + # Loop through product by edit areas and time ranges + + begText = self.__ut.set(forecastDef, "beginningText", "") + endText = self.__ut.set(forecastDef, "endingText", "") + editAreaLoopBegText = self.__ut.set(forecastDef, "editAreaLoopBegText", "") + timeRangeLoopBegText = self.__ut.set(forecastDef, "timeRangeLoopBegText", "") + editAreaLoopEndText = self.__ut.set(forecastDef, "editAreaLoopEndText", "") + timeRangeLoopEndText = self.__ut.set(forecastDef, "timeRangeLoopEndText", "") + outerLoop = self.__ut.set(forecastDef, "outerLoop", "EditArea") + + editAreas = argDict["editAreas"] + rawRanges = argDict["rawRanges"] + # Loop through Edit Areas + text = begText + if outerLoop == "EditArea": + for editArea in editAreas: + argDict["editArea"] = editArea + text = text + editAreaLoopBegText + # Loop through time ranges + if len(rawRanges) > 0: + argDict["issueRange"] = rawRanges[0] + for rawRange, rangeName in rawRanges: + argDict["timeRange"] = rawRange + argDict["timeRangeName"] = rangeName + text = text + timeRangeLoopBegText + # forecastDef gets destroyed in narrative?, so must restore + argDict["forecastDef"] = forecastDef + try: + subText = forecast.generateForecast(argDict) + except: + self.log.error("Caught Exception: ", exc_info=True) + raise Exception(string.join(traceback.format_exc())) + try: + subText, statDict, valueDict = subText + except: + pass + text = text + subText + timeRangeLoopEndText + text = forecast.fillSpecial(text, argDict) + text = text + editAreaLoopEndText + text = forecast.fillSpecial(text, argDict) + else: + for rawRange, rangeName in rawRanges: + argDict["timeRange"] = rawRange + argDict["timeRangeName"] = rangeName + text = text + timeRangeLoopBegText + for editArea in editAreas: + argDict["editArea"] = editArea + text = text + editAreaLoopBegText + # Loop through time ranges + # forecastDef gets destroyed in narrative, so must restore + argDict["forecastDef"] = forecastDef + try: + subText, statDict, valueDict = \ + forecast.generateForecast(argDict) + except: + self.log.error("Caught Exception: ", exc_info=True) + raise Exception(string.join(traceback.format_exc())) + text = text + subText + editAreaLoopEndText + text = forecast.fillSpecial(text, argDict) + text = text + timeRangeLoopEndText + text = forecast.fillSpecial(text, argDict) + return text + + def __getRunTimeVariables(self, fcstName, forecastDef, fcstType, module, argDict): + # Input variables can come from various sources: + # varDict from command line + # command line switch e.g. -l language (will be in argDict) + # definition section of product + # We must check all three in that order. + + varDict = argDict["varDict"] + #print "varDict", varDict + + for item, default in [ + ("language", "english"), + ("appendFile", None), + ("lineLength", 69), # no command line option + ("timePeriod", 3), + ]: + try: # Try the varDict + #print "trying varDict", item + argDict[item] = varDict[item] + #print "got it in varDict" + except: + try: # Try argDict i.e. from command line switch + # If so, argDict is already set + #print "trying argDict", item + argValue = argDict[item] + #print "got it in argDict", argValue + except: + argValue = None + # Try + #print "getting from definition", item + if argValue is None: + argDict[item] = self.__ut.set(forecastDef, item, default) + #print "value from definition", argDict[item] + # These need to be integers + for item in ["lineLength", "timePeriod"]: + if argDict[item] is not None: + argDict[item] = int(argDict[item]) + + # Edit Areas and Time Ranges + # + # Set up these argDict values: + # editAreas -- list of (refData, label) pairs + # timeRanges -- list of named ranges + # rawRanges -- list of (rawRange, rangeName) pairs + # + # As we eventually loop through the product, these values will be set: + # editArea -- current edit area pair + # timeRange -- current raw time range + # issueTime -- first raw time range + # timeRangeName -- current time range name (if available) + # combinations -- list of (editAreaList, comboLabel) tuples + # where editAreaList is the list of edit areas composing + # the combination + + # Set up Edit Areas + # May be from these sources: + # "AreaLabel" + # varDict from command line -- list of names + # command line as named reference areas + # defaultEditAreas + # list of (name, label) pairs + # "LatLon" + # OR list of ((lat,lon,dimension), label) + # "Combinations" + # OR A Combinations file + + # We may have had edit areas entered from the command line + # or from the Interfaces "generateForecast" command + # If so, argDict["editAreas"] will be a list of either + # (name, label) or (refData, label) pairs + editAreaType = "AreaLabel" + + # We may have had edit areas entered from the command line + # If so, argDict["editAreas"] will be a list of either + # (name, label) or (refData, label) pairs + if len(argDict["editAreas"]) == 0: + dfEditAreas = self.__ut.set(forecastDef, "defaultEditAreas", []) + try: # Try the varDict + chosenAreas = varDict["Choose Edit Areas"] + # Turn the list of chosen areas into (name, label) pairs + # using the defaultEditAreas list + dfEditAreas = self.__pairAreaWithLabel(chosenAreas, dfEditAreas) + except: + pass + + # Set up edit areas as ReferenceData's for AreaLabel and LatLon areas + if type(dfEditAreas) == bytes: + editAreaType = "Combinations" + # Get edit areas from file with format: + # Combinations = [ + # ([editArea1, editArea2,...],label) + # ... + # ] + # For example: + # Combinations = [ + # (["Zones48", "Zones49", "Zones50"],"/48/49/50"), + # (["Zones37","Zones38"], "/37/38"),"/37/38"), + # (["Zones57","Zones58","Zones59"],"57/58/59") + # ] + + comboName = dfEditAreas + for retryCount in range(MAX_TRIES): + accessor = ModuleAccessor.ModuleAccessor() + dfEditAreas = accessor.variable(comboName, "Combinations") + if dfEditAreas is None: + if comboName in sys.modules: + comboMod = sys.modules[comboName] + if comboMod.__file__.endswith(".pyo"): + os.remove(comboMod.__file__) + comboMod = None + del sys.modules[comboName] + + # if not last try, log and try again + if retryCount < MAX_TRIES - 1: + # log but don't pop up + self.log.error("Error loading combinations file: %s, retrying", comboName) + else: + return "COMBINATION FILE NOT FOUND: " + \ + self.__ut.set(forecastDef, "defaultEditAreas", []) + else: + break + + elif len(dfEditAreas) > 0: + refDataList = [] + tempRefData = [] + for area, label in dfEditAreas: + if type(area) is tuple: + # Create a referenceData given lat, lon, dim + refData = self.__createArea(area, argDict) + tempRefData.append(refData) + else: # Get named Reference Data + id = ReferenceID(area) + refData = self.getEditArea(area, argDict) + if refData is None: + return "EDIT AREA NOT FOUND: " + str(id) + refDataList.append((refData, label)) + + argDict["editAreas"] = refDataList + storeReferenceData(self.dataMgr.getRefManager(), tempRefData) + + # Set up HazardsTable + # Product must be: + # --Type "smart" + # --Have an "filterMethod" method + if fcstType == "smart": + product = module.TextProduct() + + # Test Code: Uncomment to test + #allowedHazards = product.allowedHazards() + #filterMethod = product.filterMethod + #print "allowedHazards", allowedHazards + + try: + allowedHazards = product.allowedHazards() + filterMethod = product.filterMethod + except: + allowedHazards = None + + if allowedHazards is not None and allowedHazards != []: + # Set up editAreas as a list of combinations + # Cases: + # lat/lon or (area, label) pairs -- call HazardsTable, + # but the edit areas will not change + # Combinations -- call HazardsTable and check for changed combinations + + # Set up edit areas as list of lists + editAreas = [] + + for area, label in dfEditAreas: + if type(area) is list: + editAreas.append(area) + elif type(area) is tuple: #LatLon + editAreas.append([self.__getLatLonAreaName(area)]) + else: + editAreas.append([area]) + + # if Definition['separateByTimeZone'] set to "effectiveTZ" + # or "actualTZ", change the set of edit areas to ensure + # that time zones are same in each grouping. + separateByTZ = product.Definition.get('separateByTimeZone', + None) + if separateByTZ is not None: + areaDictName = product.Definition.get('areaDictionary', + "AreaDictionary") + editAreas = self._separateByTimeZone(editAreas, + areaDictName, argDict['creationTime'], + effectiveTZ=separateByTZ) + + accurateCities = product.Definition.get('accurateCities', 0) + cityRefData = [] + if accurateCities: + cityLocationName = product.Definition.get('cityLocation', + "CityLocation") + accessor = ModuleAccessor.ModuleAccessor() + citydict = accessor.variable(cityLocationName, + "CityLocation") + + cityEAs = [] + if citydict is None: + msg = "CityLocation dictionary module was not found for"\ + " city location:" + self.log.error(msg + repr(cityLocationName)) + else: + for ea in editAreas: + for ean in ea: + if ean not in citydict: + msg = "CityLocation dictionary does not "\ + "contain entry for edit area: " + self.log.error(msg + repr(ean)) + continue + + for city, llrec in citydict[ean].items(): + # Create a referenceData given lat, lon, dim + area = (llrec[0], llrec[1], 0) + refData = self.__createArea(area, argDict) + cityEAs.append(refData) + cityRefData.append((refData, city)) + + # Store temporary reference data in the server + #storeReferenceData(argDict['ifpClient'], cityEAs) + storeReferenceData(self.dataMgr.getRefManager(), cityEAs) + + # Get Product ID and other info for HazardsTable + pil = self.__ut.set(forecastDef, "pil", None) + stationID4 = product.Definition['fullStationID'] + productCategory = pil[0:3] #part of the pil + sampleThreshold = product.Definition.get(\ + "hazardSamplingThreshold", (10, None)) + + # Process the hazards + import HazardsTable + hazards = HazardsTable.HazardsTable( + argDict["ifpClient"], editAreas, productCategory, + filterMethod, argDict["databaseID"], stationID4, + argDict["vtecActiveTable"], argDict["vtecMode"], + sampleThreshold, creationTime=argDict["creationTime"], dataMgr=self.dataMgr, + accurateCities=accurateCities, + cityEditAreas=cityRefData) + + # Store hazards object for later use + argDict["hazards"] = hazards + + # Get revised combinations + if editAreaType == "Combinations": + reorganizeCombos = product.Definition.get("reorganizeCombinations", 1) + if reorganizeCombos: + hazardAreas = hazards.getHazardAreaCombinations() + # Add a bogus label + newCombos = [] + for combo in hazardAreas: + newCombos.append((combo, "")) + # Re-assign new combinations + dfEditAreas = newCombos + + # Set up Combinations as ReferenceDatas + if editAreaType == "Combinations": + argDict["editAreas"], dfEditAreas = self.getCombinations(dfEditAreas, argDict) + argDict["combinations"] = dfEditAreas + + # Set up Time Ranges + # May be from these sources: + # varDict from command line + # defaultTimeRanges + # command line as named time ranges + # command line as start and end times OR + # from argDict already set up by Interfaces::generateProduct + # In these cases "useRawTR" will be set to 1 + if len(argDict["timeRanges"]) > 0: + # Use named time ranges from command line + dfRanges = argDict["timeRanges"] + else: + try: # Try the varDict + dfRanges = varDict["Choose Time Ranges"] + except: + dfRanges = self.__ut.set(forecastDef, "defaultRanges", []) + argDict["timeRanges"] = dfRanges + + rawRanges = [] + argDict["rawRanges"] = rawRanges + if argDict["useRawTR"] == 1: + tr = argDict["timeRange"] + try: + trName = argDict["timeRangeName"] + except: + trName = "" + if tr is not None: + rawRanges.append((tr, trName)) + elif len(dfRanges) == 0: + pass + else: + import TimeRangeUtils + forecast = TimeRangeUtils.TimeRangeUtils() + for rangeName in dfRanges: + rawRange = forecast.getTimeRange(rangeName, argDict) + rawRanges.append((rawRange, rangeName)) + argDict["rawRanges"] = rawRanges + #print "rawRanges", rawRanges + + # Row Label + areaType = self.__ut.set(forecastDef, "areaType", "") + rowVariable = self.__ut.set(forecastDef, "rowVariable", "EditArea") + if rowVariable == "EditArea": + rowLabel = areaType + elif rowVariable == "WeatherElement": + rowLabel = "Weather Element" + else: + rowLabel = "Time Period" + argDict["heading"] = rowLabel + + def __pairAreaWithLabel(self, chosenAreas, defaultEditAreas): + # Pair the chosen edit areas with associated labels from + # default list and return new list + dfEditAreas = [] + for area in chosenAreas: + for name, label in defaultEditAreas: + if area == name: + dfEditAreas.append((name, label)) + elif area == label: + # Pair back with (lat,lon,dim) tuple + dfEditAreas.append((name, label)) + return dfEditAreas + + def __createArea(self, latLonTuple, argDict): + # Return a ReferenceData created for the given lat,lon and dimension + # If dim is zero, make edit area of the one grid + # point closest to the lat/lon value. + lat, lon, dim = latLonTuple + name = self.__getLatLonAreaName((lat, lon, dim)) + #print "\ncreateArea", lat, lon, dim, name + if dim != 0: + for x in range(100): + points = makeSquare(lat, lon, dim) + pointList = [] + for point in points: + pointList.append(makePoint(point)) + refData = makeArea(self.ifpClient.getDBGridLocation(), pointList, refname=name) + # Make sure we have at least one grid point in + # the edit area + if refData.getGrid().isAnyBitsSet(): + #print "returning", dim + return refData + # Increment dim and try again + #print "iterating", dim + dim += 0.25 + msg = "\nWARNING!!! EMPTY EDIT AREA. INCREASE LAT/LON AREA DIMENSION!!\n" + self.log.warning(msg) + return None + else: + from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit + # Get grid cell coordinates for lat/lon + gridLoc = self.ifpClient.getDBGridLocation() + nx = int(gridLoc.getNx()) + ny = int(gridLoc.getNy()) + cc2D = gridLoc.gridCell(float(lat), float(lon)) + # convert grid cell to Grid2DBit with single bit set + grid2Dbit = Grid2DBit(nx, ny) + if (nx > cc2D.x >= 0 and ny > cc2D.y >= 0): + grid2Dbit.set(int(cc2D.x), int(cc2D.y)) + #refData = GridLoc.convertToReferenceData(grid2Dbit) + refID = ReferenceID(name) + refData = ReferenceData(gridLoc, refID, grid2Dbit) + return refData + + def __getLatLonAreaName(self, latLonTuple): + lat, lon, dim = latLonTuple + name = "Ref" + '%s%s%s' % (lat, lon, dim) + name = name.replace(".", "") + name = name.replace("-", "") + return name + + def getCombinations(self, combinations, argDict): + editAreas = [] + newCombinations = [] + for comboList, areaLabel in combinations: + newComboList = [] + for editArea in comboList: + #print "Get edit area TF: get edit area combo", editArea + #print "TF2: Get Edit Area set up edit areas", editArea + newArea = self.getEditArea(editArea, argDict) + if comboList.index(editArea) == 0: + comboNumber = self.getComboNumber() + label = "Combo" + repr(comboNumber) + refId = ReferenceID(label) + #global GridLoc + #GridLoc = newArea.getGloc() + area = ReferenceData(newArea) + area.setId(refId) + #GridLoc, refId, newArea.getPolygons("LATLON"), "LATLON") + # randerso: I don't think this is necessary + # area.convertToAWIPS() + newComboList.append(newArea.getId().getName()) + area = self.unionAreas(label, area, newArea) + if argDict["fcstType"] == "table": + # Allow user-supplied area labels to be used for simple tables + editAreas.append((area, areaLabel)) + else: + editAreas.append((area, label)) + newCombinations.append((newComboList, label)) + return editAreas, newCombinations + + def getComboNumber(self): + self.__comboNumber = self.__comboNumber + 1 + return self.__comboNumber + + def getFcstDef(self, fcstName, argDict): + # Find the given Forecast Definition. + # Look for a method in the current forecast smart "TextProduct" class (if one exists) + # i.e. product.fcstName() + # These can be removed eventually: + # Next look for Definition variable i.e. module.Definition + # Next look by name in modules already imported i.e. module.fcstName + # Next try to find a module with that name i.e. module.__name__ = fcstName + # If found, + # set argDict["forecastDef"] to the definition and return 1 and the module + # Else return 0 (not found) + # + # The Definition can be in various forms due to various product types and + # backward compatibility: + # Existing TextProduct class: product.fcstName() + # Find file with fcstName and look for Definition as variable in file + # Otherwise if TextProduct class, instantiate and look for Definition method + # + # Is the method in the current forecast smart TextProduct class? + + try: + product = argDict["self"] + exec("fcstDef = product." + fcstName + "()") + module = argDict["module"] + except: + # See if fcstName is variable in imported modules e.g. MyTable = {} + # This can be removed eventually + fcstDef, module = self.__ut.findInImported(fcstName) + if fcstDef is None or type(fcstDef) is not dict: + # Go get new module, fcstName + module = self.__ut.module(fcstName, 0) + if module is None: + return 0, module + try: + # Look for Definition = {} + # This can be removed eventually + exec("fcstDef = module.Definition") + except: + try: + # Look for fcstName = {} + # This can be removed eventually + exec("fcstDef = module." + fcstName) + except: + try: + # Try to instantiate smart text product class + # and look for product.Definition() method + fcstDef = module.TextProduct.Definition + #product = module.TextProduct() + #fcstDef = product.Definition() + except: + return 0, module + + # Let the command line Variables override Definition variables + # Handle cases of varDict key as single value or tuple + # e.g. + # ('editAreaSuffix':'_pt') + # or + # ('Edit Area Suffix', 'editAreaSuffix'): '_pt' + + varDict = argDict["cmdLineVarDict"] + for key in list(fcstDef.keys()): + for varKey in list(varDict.keys()): + if varKey == key: + fcstDef[key] = varDict[varKey] + elif type(varKey) is tuple: + if varKey[1] == key: + fcstDef[key] = varDict[varKey] + + argDict["forecastDef"] = fcstDef + return 1, module + + def assembleChildWords(self, tree, node): + fcst = "" + for child in node.get("childList"): + words = child.get("words") + if words is None: + return + fcst = fcst + words + node.set("words", fcst) + return 1 + + def unionAreas(self, name, area1, area2): + # OR the areas (ReferenceData objects) + # together and return a ReferenceData object + refData = area1.orEquals(area2) + #refData.convertToLatLon() + refData.setId(ReferenceID(name)) + refData.getGrid() + return refData + + def getEditArea(self, editAreaName, argDict): + # Returns an AFPS.ReferenceData object given an edit area name + # as defined in the GFE + # Apply suffix if appropriate + refID = ReferenceID(editAreaName) + #print "Getting edit area" + definition = argDict["forecastDef"] + if "editAreaSuffix" in definition: + eaSuffix = definition["editAreaSuffix"] + #print "eaSuffix", eaSuffix + if eaSuffix is not None: + #inv = argDict["ifpClient"].getReferenceInventory() + inv = self.dataMgr.getRefManager().getAvailableSets() + inventory = [] + sz = inv.size() + for x in range(sz): + invID = inv.get(x) + inventory.append(str(invID.getName())) + suffName = editAreaName + eaSuffix + if suffName in inventory: + #print " Setting suffix id", suffName + refID = ReferenceID(suffName) + #print " Adding editArea", refID + from java.util import ArrayList + refList = ArrayList() + refList.add(refID) + tmp = self.dataMgr.getRefManager().getReferenceData(refList).get(0) + #tmp.getGrid() + return tmp + + def _separateByTimeZone(self, editAreaGroups, areaDictName, creationTime, + effectiveTZ="effectiveTZ"): + #takes the list of areas, and based on the time zones breaks + #them up to ensure that each grouping using the same time zone. + #areaDictName is name of the area dictionary. creationTime is the + #run time of the formatter. EffectiveTZ organizes the groups by + #the effective time zone, rather than the TZ environment variable. + #Typically used for the PFM/AFM. + + #print "separateByTimeZone: ", editAreaGroups + out = [] #list of editAreaGroups, with edit areas within each group + + import ModuleAccessor + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(areaDictName, "AreaDictionary") + localTZ = os.environ['TZ'] #current WFO time zone + localTZid = time.strftime("%Z", time.localtime(creationTime)) + #print "Current WFO tz: ", localTZ + for areas in editAreaGroups: + tzDir = {} #key is TZ var (EST5EDT), value is edit areas + tzidDir = {} #key is effective TZ (EST), value is edit areas + #print "Areas in group: ", areas + tzs = None + for area in areas: + #print "processing area: ", area + try: + zoneTZ = areaDict[area]['ugcTimeZone'] + prevTZ = os.environ['TZ'] + os.environ['TZ'] = zoneTZ + time.tzset() + tzid = time.strftime("%Z", time.localtime(creationTime)) + os.environ['TZ'] = prevTZ + time.tzset() + + #print "areadict entry: ", zoneTZ + except: + zoneTZ = localTZ + tzid = localTZid + #print "falling back to WFOtz: ", zoneTZ + self.log.warning("WARNING: Entry " + area + + " missing from AreaDictionary. Using default time zone.") + + zones = tzDir.get(zoneTZ, []) + zones.append(area) + tzDir[zoneTZ] = zones + zones = tzidDir.get(tzid, []) + zones.append(area) + tzidDir[tzid] = zones + #print "TZs for areas: ", tzDir + #print "TZids for areas: ", tzidDir + + #organize the effective time zones + if effectiveTZ == "effectiveTZ": + dict = tzidDir + elif effectiveTZ == "actualTZ": + dict = tzDir + else: + self.log.error("Invalid effectiveTZ for separateByTZ() " + + effectiveTZ) + return editAreaGroups + keys = list(dict.keys()) + keys.sort() + for key in keys: + out.append(dict[key]) + #print "After TZ separate: ", out + return out + + + +################################################################# +def makeSquare(lat, lon, km): + " Make a list of square of given km around lat,lon" + latinc = km / 222.0 + loninc = math.cos(lat / 57.17) * km / 222.0 + + latTop = lat + latinc + latBottom = lat - latinc + lonLeft = lon - loninc + lonRight = lon + loninc + + points = [] + points.append(repr(latTop) + "," + repr(lonRight)) + points.append(repr(latTop) + "," + repr(lonLeft)) + points.append(repr(latBottom) + "," + repr(lonLeft)) + points.append(repr(latBottom) + "," + repr(lonRight)) + return points + +def makePoint(point): + " Make a CartCoord2D from the point in format: x,y" + from com.vividsolutions.jts.geom import Coordinate + ind = string.find(point, ",") + latStr = point[0:ind - 1] + lonStr = point[ind + 1:len(point)] + lat = float(latStr) + lon = float(lonStr) + return Coordinate(lon, lat) + +def makeArea(gridLoc, pointList, refname=None): + " Make a Reference Area with a unique ReferenceID" + from com.vividsolutions.jts.geom import GeometryFactory, LinearRing, Coordinate, Polygon + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData + CoordinateType = ReferenceData.CoordinateType + geomFactory = GeometryFactory() + import jep + size = len(pointList) + if pointList[0] != pointList[size - 1]: # closing the loop + pointList.append(pointList[0]) + pointArray = jep.jarray(len(pointList), Coordinate) + for i in range(len(pointList)): + pointArray[i] = pointList[i] + lr = geomFactory.createLinearRing(pointArray) + poly = geomFactory.createPolygon(lr, jep.jarray(0, LinearRing)) + polyArray = jep.jarray(1, Polygon) + polyArray[0] = poly + region = geomFactory.createMultiPolygon(polyArray) + if refname is None: + refname = "Ref" + getTime() + refId = ReferenceID(refname) + refData = ReferenceData(gridLoc, refId, region, CoordinateType.LATLON) + # randerso: I don't think this is necessary + # refData.convertToAWIPS() + return refData + +def storeReferenceData(refSetMgr, refData, temp=True): + if type(refData) is not list: + refData = [refData] + for ref in refData: + refSetMgr.saveRefSet(ref) + # Save it's name to delete later + if temp: + for r in refData: + LatLonIds.append(r.getId()) + + +def getTime(): + "Return an ascii string for the current time without spaces or :'s" + timeStr = repr(time.time()) + timeStr = string.replace(timeStr, ".", "_") + return timeStr + +def getAbsTime(timeStr): + "Create an AbsTime from a string: YYYYMMDD_HHMM" + + year = string.atoi(timeStr[0:4]) + month = string.atoi(timeStr[4:6]) + day = string.atoi(timeStr[6:8]) + hour = string.atoi(timeStr[9:11]) + minute = string.atoi(timeStr[11:13]) + + return AbsTime.absTimeYMD(year, month, day, hour, minute) + +def usage(): + print(""" +Usage: python TextFormatter.py + -d database + -t forecastType + [-o output file for text -- default None] + [-O server output file for text -- default None] + [-S server controlled output file -- default None] + [-A append text to given file name] + [-h host -- default orca.fsl.noaa.gov] + [-p port -- default 98000000] + [-l language -- english, french, spanish: default english] + [-z displaced real time -- format YYYYMMDD_HHMM] + [-T] Generates a "TEST" product. + [-E] Generates a "EXPERIMENTAL" product. + [-v vtecMode] Specifies vtec mode ('X','O','T','E') + [-a vtecActiveTableName] Specifies alternate active table + [-V vardict] use this option to provide a run-time VariableList + instead of displaying the user dialog. + The dictionary must be in the form of a Python + dictionary string, e.g., + '{("Forecast Product", "productType"):"Morning", + ("Issuance", "issuanceType"):"Routine"}' + The entries must be complete or the product will be cancelled. + + For Simple Table products: + [-r Edit Area Name] + [-w Time Range Name] OR + [-s startTime -e endTime] + [-i Period for Tables with variable period (rows or columns)] + """) + +def writeToFile(forecasts, outputFile, mode): + if not outputFile is None and outputFile != "": + outfile = open(outputFile, mode) + os.chmod(outputFile, 0o644) + if outfile is None: + return 0 + else: + outfile.write(forecasts) + outfile.close() + return 1 + +def writeToServerFile(forecasts, outputFile, ifpClient): + if not outputFile is None and outputFile != "": + id = AFPS.TextFileID(outputFile, "PRODGEN") + textFile = AFPS.TextFile(id, forecasts) + ifpClient.saveTextData([textFile]) + return 1 + return 1 diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/offsetTime.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/offsetTime.py index c2b96dc428..070c914361 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/offsetTime.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/headline/offsetTime.py @@ -1,149 +1,149 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# offsetTime.py -# Handles Displaced Real Time for various applications -# -# Author: hansen/romberg -# ---------------------------------------------------------------------------- -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/30/16 6016 randerso Changed to use SimulatedTime.setTimeOffset() -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import time, string -import logging -from awips import TimeUtil - -offset = 0 -timeStr = "" -launchStr = "" - -oldTime = time.time -oldLocaltime = time.localtime -oldGmtime = time.gmtime -oldAsctime = time.asctime -oldCtime = time.ctime -oldStrftime = time.strftime - -log = logging.getLogger("FormatterRunner.offsetTime") -# Method called by modules running applications -# to be run in Displaced Real Time (DRT). -# "timeString" can be in 3 formats: -# YYYYMMDD_HHMM -- Desired Displaced Real Time -# SYYYYMMDD_HHMM -- Synchronize to most recent hour -# YYYYMMDD_HHMM,YYYYMMDD_HHMM -- Determine offset based on -# difference between given times -def setDrtOffset(timeString): - global offset, timeStr, launchStr - # Do not re-set offset after set once - if offset != 0: - return - seconds, launchString = TimeUtil.determineDrtOffset(timeString) - # Save the offset and timeStr - timeStr = timeString - launchStr = launchString - # Try to set AbsTime offset as well - # for applications involving C++ - try: - from com.raytheon.uf.common.time import SimulatedTime - stOffset = SimulatedTime.getSystemTime().getOffset() / 1000.0 - if abs(seconds - stOffset) > 60: - offset = seconds - SimulatedTime.getSystemTime().setTimeOffset(offset * 1000) - else: - offset = stOffset - except: - log.exception("Problem setting simulated time ") - - # Override the time module methods - time.gmtime = offsetGmtime - time.time = offsetTime - time.localtime = offsetLocaltime - time.asctime = offsetAsctime - time.ctime = offsetCtime - time.strftime = offsetStrftime - # Log Event - log.info("Setting DRT mode: " + timeStr + \ - "\n Offset: " + `offset` + " seconds" +\ - "\n LaunchStr: " + launchString) - -# Methods substituted for time module when in -# DRT mode -def offsetTime(): - tmp = oldTime() - return tmp + offset - -def offsetGmtime(secs=None): - if secs is None: - secs = oldTime() + offset - return oldGmtime(secs) - -def offsetLocaltime(secs=None): - if secs is None: - secs= oldTime() + offset - return oldLocaltime(secs) - -def offsetAsctime(time_s=None): - if time_s is None: - time_s = time.localtime() - return oldAsctime(time_s) - -def offsetCtime(secs=None): - if secs is None: - secs = oldTime() + offset - return oldCtime(secs) - -def offsetStrftime(format, time_s=None): - if time_s is None: - time_s = time.localtime() - return oldStrftime(format, time_s) - -# Accessor methods -def drtOffset(): - return offset - -def drtTimeStr(): - return timeStr - -def drtLaunchStr(): - return launchStr - -def reset(): - #reload(time) - time.time = oldTime - time.localtime = oldLocaltime - time.gmtime = oldGmtime - time.asctime = oldAsctime - time.ctime = oldCtime - time.strftime = oldStrftime - from com.raytheon.uf.common.time import SimulatedTime - SimulatedTime.getSystemTime().setRealTime() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# offsetTime.py +# Handles Displaced Real Time for various applications +# +# Author: hansen/romberg +# ---------------------------------------------------------------------------- +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 11/30/16 6016 randerso Changed to use SimulatedTime.setTimeOffset() +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import time, string +import logging +from awips import TimeUtil + +offset = 0 +timeStr = "" +launchStr = "" + +oldTime = time.time +oldLocaltime = time.localtime +oldGmtime = time.gmtime +oldAsctime = time.asctime +oldCtime = time.ctime +oldStrftime = time.strftime + +log = logging.getLogger("FormatterRunner.offsetTime") +# Method called by modules running applications +# to be run in Displaced Real Time (DRT). +# "timeString" can be in 3 formats: +# YYYYMMDD_HHMM -- Desired Displaced Real Time +# SYYYYMMDD_HHMM -- Synchronize to most recent hour +# YYYYMMDD_HHMM,YYYYMMDD_HHMM -- Determine offset based on +# difference between given times +def setDrtOffset(timeString): + global offset, timeStr, launchStr + # Do not re-set offset after set once + if offset != 0: + return + seconds, launchString = TimeUtil.determineDrtOffset(timeString) + # Save the offset and timeStr + timeStr = timeString + launchStr = launchString + # Try to set AbsTime offset as well + # for applications involving C++ + try: + from com.raytheon.uf.common.time import SimulatedTime + stOffset = SimulatedTime.getSystemTime().getOffset() / 1000.0 + if abs(seconds - stOffset) > 60: + offset = seconds + SimulatedTime.getSystemTime().setTimeOffset(offset * 1000) + else: + offset = stOffset + except: + log.exception("Problem setting simulated time ") + + # Override the time module methods + time.gmtime = offsetGmtime + time.time = offsetTime + time.localtime = offsetLocaltime + time.asctime = offsetAsctime + time.ctime = offsetCtime + time.strftime = offsetStrftime + # Log Event + log.info("Setting DRT mode: " + timeStr + \ + "\n Offset: " + repr(offset) + " seconds" +\ + "\n LaunchStr: " + launchString) + +# Methods substituted for time module when in +# DRT mode +def offsetTime(): + tmp = oldTime() + return tmp + offset + +def offsetGmtime(secs=None): + if secs is None: + secs = oldTime() + offset + return oldGmtime(secs) + +def offsetLocaltime(secs=None): + if secs is None: + secs= oldTime() + offset + return oldLocaltime(secs) + +def offsetAsctime(time_s=None): + if time_s is None: + time_s = time.localtime() + return oldAsctime(time_s) + +def offsetCtime(secs=None): + if secs is None: + secs = oldTime() + offset + return oldCtime(secs) + +def offsetStrftime(format, time_s=None): + if time_s is None: + time_s = time.localtime() + return oldStrftime(format, time_s) + +# Accessor methods +def drtOffset(): + return offset + +def drtTimeStr(): + return timeStr + +def drtLaunchStr(): + return launchStr + +def reset(): + #reload(time) + time.time = oldTime + time.localtime = oldLocaltime + time.gmtime = oldGmtime + time.asctime = oldAsctime + time.ctime = oldCtime + time.strftime = oldStrftime + from com.raytheon.uf.common.time import SimulatedTime + SimulatedTime.getSystemTime().setRealTime() diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/CombinedPhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/CombinedPhrases.py index a25772a78d..f2c92002e8 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/CombinedPhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/CombinedPhrases.py @@ -1,706 +1,706 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CombinedPhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import ScalarPhrases -import VectorRelatedPhrases -import WxPhrases -import DiscretePhrases -import string - -class CombinedPhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, - WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): - def __init__(self): - ScalarPhrases.ScalarPhrases.__init__(self) - VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) - WxPhrases.WxPhrases.__init__(self) - DiscretePhrases.DiscretePhrases.__init__(self) - - ############################################ - ### COMBINED ELEMENT PHRASES - - # Weather OR Sky - def weather_orSky_phrase(self): - return { - "phraseList": [ - self.weather_phrase, - self.sky_phrase, - ], - "phraseMethods": [ - self.orSkyTest, - ], - } - def orSkyTest(self, tree, node): - " Develop phrase for weather Stats" - # If there is not weather, report Sky - # Check for empty words - for child in node.get("childList"): - words = child.get("words") - if words is None: - return - # Order the weather phrases - self.orderWxPhrases(tree, node) - # Gather the words for the child phrases - wordList = [] - for child in node.get("childList"): - wordList.append((child.get("name"), child.get("words"))) - wxWords = "" - skyWords = "" - # If weather, use that. Else use sky. - for name, words in wordList: - if name == "weather_phrase": - if wxWords != "": - wxWords = wxWords + ". " - wxWords = wxWords + words - if name == "sky_phrase": - if skyWords != "": - skyWords = skyWords + ". " - skyWords = skyWords + words - if wxWords != "": - return self.setWords(node, wxWords) - else: - return self.setWords(node, skyWords) - - ### Sky, PoP, Wx - -## The skyPopWx_phrase will produce a combined Sky, Pop, Wx phrase IF appropriate. - -## For example: -## "Partly cloudy with a 20 percent chance of showers and thunderstorms." -## "Sunny then partly cloudy with a 20 percent chance of -## showers and thunderstorms in the afternoon." -## -## NOTE: IF you are using this phrase, you must also include the -## sky_phrase, weather_phrase and popMax_phrase in your phraseList to be -## used if the combined phrase cannot be generated. -## -## Based on algorithms by Chris Gibson and Brian Walawender -## -## This phrase operates much like the weather_phrase according to these rules : - -## --Sub-phrases are split out initially based on the Wx parameter. In other words, -## "Wx" is the primary element. -## Sub-phrases are consolidated and combined based on "Wx". -## --Only precip-related weather types will be included in the phrase. -## Non-precip-related weather types will be reported in a separate weather -## phrase. -## --After combining, if there are more than 2 sub-phrases, this phrase -## is removed and independent Sky, PoP, and Wx phrases are generated. -## Otherwise, independent sky_phrase, weather_phrase, popMax_phrases -## are removed IF the information is to be included in the skyPopWx_phrase: -## --If there are ANY areal coverage terms in the weather phrase, the PoP will -## be reported in a separate phrase. However, the Sky and Wx can still be reported -## in the combined phrase. -## --Only one PoP value can be reported in a combined phrase, so if there are -## multiple sub-phrases with precip, an independent PoP phrase will be generated. -## E.g.: Partly cloudy with a chance of rain in the morning, then a -## chance of thunderstorms in the afternoon. Probability of precipitation 50 percent. - -## --If the value for Sky is the same throughout the period it will not be repeated. -## Instead of: Partly cloudy with a chance of rain showers in the morning -## then partly cloudy with a slight chance of thunderstorms in the afternoon. -## Produce: Partly cloudy. A chance of rain showers in the morning then a -## slight chance of thunderstorms in the afternoon. -## - -## IMPLEMENTATION: This phrase takes into consideration Sky PoP and Wx, -## checking for required criteria ("checkSkyPopWx"), combining on a sub-phrase by -## sub-phrase basis, special handling of non-precip -## ("separateNonPrecip") and the word method ("skyPopWx_words"). - - - - def useCombinedSkyPopWx(self, tree, node): - # If set to 1, the combined skyPopWx_phrase will be used when appropriate - # as long as it is included in the product component definition - # If this is set to zero, the combined skyPopWx_phrase will not be used - return 1 - - def useSkyPopWx_consolidation(self, tree, node): - # If set to 1, the skyPopWx phrase will consolidate weather keys that - # span all time ranges to produce: - # Partly cloudy with a chance of rain. - # Snow in the morning, then sleet in the afternoon. - # - # instead of: - # Partly cloudy. Chance of rain and snow in the morning, - # then a chance of rain and sleet in the afternoon. - return 0 - - def skyPopWx_excludePoP_flag(self, tree, node): - # If set to 1, PoP will not be included in the skyPopWx_phrase - return 0 - - def skyPopWx_phrase(self): - return { - "setUpMethod": self.skyPopWx_setUp, - "wordMethod": self.skyPopWx_words, - "phraseMethods": [ - self.skyPopWx_separateNonPrecip, - self.skyPopWx_consolidateWx, - self.checkLocalEffects, - self.combinePhraseStats, - self.checkSkyPopWx, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - } - - def skyPopWx_setUp(self, tree, node): - if self.useCombinedSkyPopWx(tree, node) == 0: - return self.setWords(node, "") - resolution = node.get("resolution") - if resolution is not None: - mergeMethod = "Average" - else: - mergeMethod = "List" - elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, - resolution) - if self.areal_sky_flag(tree, node): - self.disableSkyRelatedWx(tree, node) - spawnedWxPhrases = node.get("spawnedWxPhrases") - if spawnedWxPhrases is None: - node.set("spawnedWxPhrases", []) - node.set("allTimeDescriptors", 1) - return self.DONE() - - def skyPopWx_separateNonPrecip(self, tree, node): - # If > designated subPhrases, separate into precip/non-precip - statList = self.getSubStats(node, "Wx") - length = len(statList) - if self.__dict__.get("_leDebug", 0): - print "\n\nSPW separateNonPrecip", node.get('name'), node.getAreaLabel() - print " node", node - print " disabled", node.getAncestor("disabledSubkeys") - print " timerange", node.getTimeRange() - print " statList", statList - #print " doneList", node.doneList - #print " disabled", node.get('disabledSubkeys') - if length > 0: - precip = [] - nonPrecip = [] - for rankList in statList: - subkeys = self.getSubkeys(rankList) - for subkey in subkeys: - if subkey.wxType() == "": - continue - if self.precip_related_flag(tree, node, subkey): - precip.append(subkey) - else: - nonPrecip.append(subkey) - if self.__dict__.get("_leDebug", 0): print "precip, nonPrecip", precip, nonPrecip - if len(precip) >= 0 and len(nonPrecip) >= 1: - # Save this information so we can remove this new phrase later if - # we do not end up doing a combined sky, pop, weather phrase. - #print "\nCalling splitWxPhrase for SPW" - newPhrase = self.splitWxPhrase( - tree, node, nonPrecip, precip, - [self.separateNonPrecip, self.skyPopWx_separateNonPrecip, - self.consolidateWx], - newPhraseDef=self.weather_phrase) - spawnedWxPhrases = node.get("spawnedWxPhrases") - spawnedWxPhrases.append(newPhrase) - node.set("spawnedWxPhrases", spawnedWxPhrases) - return self.DONE() - - def skyPopWx_consolidateWx(self, tree, node): - # If any wxTypes span all subPhrases, separate into their own phrase - if self.useSkyPopWx_consolidation(tree, node) == 0: - return self.DONE() - statList = self.getSubStats(node, "Wx") - length = len(statList) - subkeyDict = {} - if self.__dict__.get("_leDebug", 0): - print "\nSPW Consolidating ", node.get('name'), node.getAreaLabel() - print " node", node - print " disabled", node.getAncestor("disabledSubkeys") - print " timerange", node.getTimeRange() - print " statList", statList - #print " doneList", node.doneList - if length > 1: - # Count occurrences of each weather key - for rankList in statList: - subkeys = self.getSubkeys(rankList) - for subkey in subkeys: - if subkey not in subkeyDict.keys(): - subkeyDict[subkey] = 1 - else: - subkeyDict[subkey] += 1 - if self.__dict__.get("_leDebug", 0): - print "subkeyDict", subkeyDict - - # Find subkeys to disable in first phrase and second phrase, - # respectively - list1 = [] - list2 = [] - for subkey in subkeyDict.keys(): - count = subkeyDict[subkey] - if count >= length: - list2.append(subkey) - else: - list1.append(subkey) - if self.__dict__.get("_leDebug", 0): - print "list1", list1 - print "list2", list2 - - if len(list1) > 0 and len(list2) > 0: - newPhrase = self.splitWxPhrase( - tree, node, list1, list2, - [self.consolidateWx, self.separateNonPrecip, - self.skyPopWx_consolidateWx]) - newPhrase.set("includeSky", 0) - newPhrase.set("includePoP", 0) - node.set("includePoP", 0) - return self.DONE() - - # This method takes care of removing independent sky, pop and weather phrases if we are - # going to report combined skyPopWx. - # - # For PoP: - # If PoP is included in the combined phrase (includePoP == 1), - # we remove all independent PoP phrases from the component. - # This means that if this is a local effect phrase, - # we remove the local effect Pop phrase in addition to the - # Pop phrase for the component area. - # - # For Sky: - # If Sky is to be included in the combined phrase (includeSky == 1), - # we remove the independent Sky phrase associated with this - # node area. - # - # For Wx: - # We cannot simply remove all independent weather phrases because - # --They may have been spawned from consolidateWx or separateNonPrecip - # and need to remain. - # --If this is a local effect node, weather phrases may have been - # spawned by the component level area and need to remain. - # Thus, we remove non-spawned weather phrases for the node area. - # If this is a local effect area, - # we also remove weather phrases for the component area IFF - # they would be reporting the same subkeys as this skyPopWx phrase. - # - def checkSkyPopWx(self, tree, node): - # Check criteria to see if we can produce a combined phrase - # Enhanced by Dave Zaff - - if self.__dict__.get("_leDebug", 0): print "\nCheckSPW", node.getTimeRange(), node.getAreaLabel() - - # Determine non-empty weather subPhrases - wxSubPhrases = self.getNonEmptyWxSubPhrases(tree, node) - length = len(wxSubPhrases) - #print "length in skyPopWx", length - if length > 2: - return self.skyPopWx_cleanUp(tree, node) - # PoP - includePoP = self.checkIncludePoP(tree, node) - if self.__dict__.get("_leDebug", 0): - print "\nBefore removing independent phrases:" - self.printCompPhrases(tree, node) - print - print "includePoP", includePoP - print "If -1, clean-up. If 1, remove area and compArea popMax" - if includePoP == -1: - #print "cleaning up" - return self.skyPopWx_cleanUp(tree, node) - #return self.setWords(node, "") - if includePoP: - # Remove the independent PoP phrases - # Note that we remove both the local effect phrase - # and the component level phrase. - self.removeComponentPhrases( - tree, node, "popMax_phrase", - areaLabels=[ - node.getAreaLabel(), - node.getComponent().getAreaLabel() - ]) - node.set("includePoP", includePoP) - - # Sky - includeSky = self.checkIncludeSky(tree, node, wxSubPhrases) - if self.__dict__.get("_leDebug", 0): - print "includeSky", includeSky - print "If 1, remove sky_phrase for area" - if includeSky: - self.removeComponentPhrases(tree, node, "sky_phrase", - areaLabels=[node.getAreaLabel()]) - node.set("includeSky", includeSky) - - # Wx - # Don't remove the spawned phrases - spawnedWxPhrases = node.get("spawnedWxPhrases") - wxExceptionPhrases = self.getWxExceptions(tree, node, "weather_phrase") - self.removeComponentPhrases( - tree, node, "weather_phrase", - spawnedWxPhrases + wxExceptionPhrases, - areaLabels=[ - node.getAreaLabel(), - node.getComponent().getAreaLabel() - ]) - if self.__dict__.get("_leDebug", 0): - print "Removed weather phrases", node.getAreaLabel() - print "\nAfter removing independent phrases:" - self.printCompPhrases(tree, node) - return self.DONE() - - def getWxExceptions(self, tree, node, phraseName): - # Return a list of the phrases with the given phraseName that - # --do not have the same areaLabel AND - # --do not have the same Wx stats as the given node. - nodeStats = self.getWxStats(tree, node) - nodeKeys = [] - for subkey, rank in nodeStats: - nodeKeys.append(subkey) - component = node.getComponent() - progeny = component.getProgeny() - wxExceptions = [] - if self.__dict__.get("_leDebug", 0): print "\nGetting exceptions for", nodeStats - for child in progeny: - if child.getAreaLabel() == node.getAreaLabel(): - continue - name = child.get("name") - if name == phraseName: - # Check the stats - wxStats = self.getWxStats(tree, child) - if self.__dict__.get("_leDebug", 0): print "\nChecking", wxStats - for subkey, rank in wxStats: - if subkey in nodeKeys or subkey.wxType() == "": - continue - if self.__dict__.get("_leDebug", 0): print "Appending" - wxExceptions.append(child) - return wxExceptions - - def getWxStats(self, tree, node): - wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Average") - wxStats = self.applyDisabled(tree, node, wxStats) - #print "\ngetWxStats", node.getAreaLabel(), node.getTimeRange() - #print " wxStats", wxStats - return wxStats - - def getNonEmptyWxSubPhrases(self, tree, node): - wxSubPhrases = [] - for child in node.get("childList"): - statDict = child.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - continue - # See if all subkeys are NoWx - for subkey, rank in rankList: - if subkey.wxType() != "": - wxSubPhrases.append(child) - break - return wxSubPhrases - - def checkIncludePoP(self, tree, node): - # Determine number of sub-phrases with precip-related Wx - # Also, determine if there are any areal coverage terms - # for a precip-related weather type - # Return 1 if we are to include PoP - # 0 if we are not to include PoP - # -1 if there is no precip weather and we are to abort the skyPopWx_phrase - includePoP = 1 - withPrecip = 0 - arealCov = 0 - arealCovs = self.arealCoverages() - pop = self.matchToWx(tree, node, "PoP", algorithm="Max") - if pop is None: - return -1 - - covList=[] - noWx = 1 - for subPhrase in node.childList: - precipFound = 0 - statDict = subPhrase.getStatDict() - if statDict is None: - continue - rankList = statDict["Wx"] - subkeys = self.getSubkeys(rankList) - #print "sub-phrase keys", subkeys - for subkey in subkeys: - if pop < self.pop_wx_lower_threshold(tree, node) and \ - not self.pop_related_flag(tree, node, subkey): - noWx = 0 - elif pop >= self.pop_wx_lower_threshold(tree, node) and \ - subkey.wxType() != "": - noWx = 0 - if noWx == 0 and self.precip_related_flag(tree, node, subkey): - precipFound = 1 - cov = subkey.coverage() - if cov in arealCovs: - arealCov = 1 - # Condition added by Dave Zaff: - # If any coverages are different, - # we will report PoP separately - else: - for getCov in covList: - if cov!=getCov: - includePoP=0 - covList.append(cov) - if precipFound: - withPrecip = withPrecip + 1 - - if withPrecip == 0 and noWx: - # Do not use combined skyPopWx_phrase - self.skyPopWx_cleanUp(tree, node) - return -1 # Signal that we are Done - - # Check for excluding PoP - if self.skyPopWx_excludePoP_flag(tree, node): - return 0 - - # If there is more than one sub-phrase with precip, - # report PoP independently. - #print "withPrecip", withPrecip - if withPrecip > 1: - includePoP = 0 - - # If there are ANY areal coverage terms for a precip-related - # weather type, report PoP independently. - if arealCov == 1: - includePoP = 0 - - # Check to see if includePoP has been set previously - prevIncludePoP = node.get("includePoP") - if prevIncludePoP == 0: - includePoP = 0 - - # If PoP >= 60, then report PoP independently - if pop >= 60: - includePoP = 0 - return includePoP - - def checkIncludeSky(self, tree, node, wxSubPhrases): - # Inclusion of Sky in the skyPopWx_phrase. - # The "includeSky" flag is checked in "checkSkyPopWx" and if set, - # the independent sky_phrase is removed. - # The general rules for including Sky are outlined below. - # In addition: - # The "includeSky" flag is set to zero when a new skyPopWx_phrase - # is spawned by "skyPopWx_consolideWx" since the sky condition - # will be addressed by the original skyPopWx_phrase. - # The "includeSky" flag may be set in "checkLocalEffects" (PhraseBuilder): - # If we find a local effect for the skyPopWx phrase (using the - # "checkSkyWxDifference" method in PhraseBuilder), then - # we will not includeSky in the new local effect phrases - # UNLESS there was also a sky local effect, in which case we - # will "checkIncludeSky" for the local effect nodes. - # - # Check to see if already set by "skyPopWx_consolidateWx" or by "checkLocalEffects" - includeSky = node.get("includeSky") - if includeSky is None: - # We need to check for the following situations to set "includeSky": - # Number of sub-phrases (length) Wx Sky includeSky - # - # 1 similar similar 1 - # Mostly sunny with a 50 percent chance of rain. - # - # 2 different similar 0 - # Mostly sunny. A chance of rain then a slight chance of snow - # in the afternoon. - # - # 1 similar different 0 - # Mostly sunny in the morning then becoming partly cloudy. A - # 50 percent chance of rain. - # - # 2 different different 1 - # Mostly sunny with a chance of rain then partly cloudy with - # a slight chance of snow in the afternoon. A 50 percent chance - # of rain and snow. - # - # Compare sky for similarity in the 1st and 2nd half of the period. - # Note: We can't count on Sky having a temporal resolution of [6], - # but, since the skyPopWx_phrase bails after 2 sub-phrases, - # looking at the 2 halves of the timeRange is sufficient. - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - timeRange1, timeRange2 = self.splitRange(timeRange) - skyStats1 = tree.stats.get("Sky", timeRange1, areaLabel, - mergeMethod="Average") - skyStats2 = tree.stats.get("Sky", timeRange2, areaLabel, - mergeMethod="Average") - sky1 = self.getSkyValue(tree, node, skyStats1, timeRange1) - sky2 = self.getSkyValue(tree, node, skyStats2, timeRange2) - #print "similarSky", self.similarSkyWords_flag(tree, node, sky1, sky2) - # Determine if Wx is similar - similarWx = 1 - wxSubPhraseLen = len(wxSubPhrases) - if wxSubPhraseLen > 1: - similarWx = 0 - # There is one subphrase and we have to see if it covers the - # entire phrase timeRange. If not, we have empty and non-empty - # weather subPhrases and similarWx is 0 - elif wxSubPhraseLen == 1 and wxSubPhrases[0].getTimeRange() != node.getTimeRange(): - similarWx = 0 - if self.similarSkyWords_flag(tree, node, sky1, sky2): - if similarWx: - includeSky = 1 - else: - includeSky = 0 - else: - if similarWx: - includeSky = 0 - else: - includeSky = 1 - return includeSky - - def skyPopWx_cleanUp(self, tree, node): - # Clean up any non-precip node that we spawned - spawnedWxPhrases = node.get("spawnedWxPhrases") - for phrase in spawnedWxPhrases: - phrase.remove() - node.set('childList', []) - return self.setWords(node, "") - - def skyPopWx_words(self, tree, node): - # Create a combined sky, pop, weather sub-phrase - - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - - # Sky words - includeSky = node.getAncestor("includeSky") - if includeSky is None: - includeSky = 1 - # Add sky to the statDict for this node - if includeSky: - skyWords = self.getSkyWords(tree, node) - # If this is the second node, see if the sky words are similar to the first - # If so, do not repeat - index = node.getIndex() - if index == 1: - prevSkyWords = node.getPrev().get("words") - if self.similarSkyWords_flag(tree, node, skyWords, prevSkyWords): - prevSkyWords = self.preferredSkyWords(tree, node, skyWords, prevSkyWords) - node.getPrev().set("words", prevSkyWords) - skyWords = "" - else: - skyWords = "" - - # Pop words - includePoP = node.getAncestor("includePoP") - if includePoP: - pop = self.matchToWx(tree, node, "PoP") - if pop < self.pop_lower_threshold(tree, node) or \ - pop > self.pop_upper_threshold(tree, node): - popWords = "" - else: - popStr = self.getPopStr(tree, node, pop) - popWords = "a " + popStr + " percent chance of" - else: - popWords = "" - - - # Weather words - self.weather_words(tree, node) - weatherWords = node.get("words") - if weatherWords == "null": - weatherWords = "" - if weatherWords == "": - popWords = "" - -## print "\n\n", areaLabel, timeRange -## print "includeSky", includeSky -## print "includePoP", includePoP -## print "skyWords", skyWords -## print "popWords", popWords -## print "weatherWords", weatherWords - - if skyWords == "" and weatherWords == "": - return self.setWords(node, "") - - words = "" - if includePoP: - if popWords != "": - weatherWords = string.replace(weatherWords, "a slight chance of", "") - weatherWords = string.replace(weatherWords, "a chance of", "") - weatherWords = string.replace(weatherWords, "slight chance of", "") - weatherWords = string.replace(weatherWords, "chance of", "") - weatherWords = string.replace(weatherWords, "likely", "") - weatherWords = string.replace(weatherWords, "occasional", "") - weatherWords = weatherWords.strip() - - # Do not repeat weather words from previous sub-phrase - node.set("weatherWords", weatherWords) - if node.getIndex() > 0: - weatherWords = self.checkRepeatingString( - tree, node, weatherWords, "weatherWords") - if weatherWords == "": - popWords = popWords.replace(" of", "") - - if popWords == "" and weatherWords == "": - words = skyWords - elif popWords == "" and skyWords == "": - words = weatherWords - elif skyWords == "" and weatherWords == "": - words = popWords - elif popWords == "": - # There must be sky and weather - words = skyWords + " with " + weatherWords - elif skyWords == "": - words = popWords + " " + weatherWords - elif weatherWords == "": - words = skyWords + " with " + popWords - else: - words = skyWords + " with " + popWords + " " + weatherWords - else: - weatherWords = weatherWords.lstrip() - if skyWords != "" and weatherWords != "": - words = skyWords + " with " + weatherWords - elif skyWords == "" and weatherWords != "": - words = weatherWords - else: - words = skyWords - return self.setWords(node, words) - - def getSkyWords(self, tree, node): - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - sky = tree.stats.get("Sky", timeRange, areaLabel, mergeMethod="Average") - statDict = node.getStatDict() - statDict["Sky"] = sky - node.set("statDict", statDict) - self.sky_words(tree, node) - skyWords = node.get("words") - return skyWords - - def getSkyValue(self, tree, node, skyStats, timeRange): - if timeRange.duration() > 12*3600: - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - return self.sky_value(tree, node, skyStats, dayNight) - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CombinedPhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import ScalarPhrases +import VectorRelatedPhrases +import WxPhrases +import DiscretePhrases +import string + +class CombinedPhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, + WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): + def __init__(self): + ScalarPhrases.ScalarPhrases.__init__(self) + VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) + WxPhrases.WxPhrases.__init__(self) + DiscretePhrases.DiscretePhrases.__init__(self) + + ############################################ + ### COMBINED ELEMENT PHRASES + + # Weather OR Sky + def weather_orSky_phrase(self): + return { + "phraseList": [ + self.weather_phrase, + self.sky_phrase, + ], + "phraseMethods": [ + self.orSkyTest, + ], + } + def orSkyTest(self, tree, node): + " Develop phrase for weather Stats" + # If there is not weather, report Sky + # Check for empty words + for child in node.get("childList"): + words = child.get("words") + if words is None: + return + # Order the weather phrases + self.orderWxPhrases(tree, node) + # Gather the words for the child phrases + wordList = [] + for child in node.get("childList"): + wordList.append((child.get("name"), child.get("words"))) + wxWords = "" + skyWords = "" + # If weather, use that. Else use sky. + for name, words in wordList: + if name == "weather_phrase": + if wxWords != "": + wxWords = wxWords + ". " + wxWords = wxWords + words + if name == "sky_phrase": + if skyWords != "": + skyWords = skyWords + ". " + skyWords = skyWords + words + if wxWords != "": + return self.setWords(node, wxWords) + else: + return self.setWords(node, skyWords) + + ### Sky, PoP, Wx + +## The skyPopWx_phrase will produce a combined Sky, Pop, Wx phrase IF appropriate. + +## For example: +## "Partly cloudy with a 20 percent chance of showers and thunderstorms." +## "Sunny then partly cloudy with a 20 percent chance of +## showers and thunderstorms in the afternoon." +## +## NOTE: IF you are using this phrase, you must also include the +## sky_phrase, weather_phrase and popMax_phrase in your phraseList to be +## used if the combined phrase cannot be generated. +## +## Based on algorithms by Chris Gibson and Brian Walawender +## +## This phrase operates much like the weather_phrase according to these rules : + +## --Sub-phrases are split out initially based on the Wx parameter. In other words, +## "Wx" is the primary element. +## Sub-phrases are consolidated and combined based on "Wx". +## --Only precip-related weather types will be included in the phrase. +## Non-precip-related weather types will be reported in a separate weather +## phrase. +## --After combining, if there are more than 2 sub-phrases, this phrase +## is removed and independent Sky, PoP, and Wx phrases are generated. +## Otherwise, independent sky_phrase, weather_phrase, popMax_phrases +## are removed IF the information is to be included in the skyPopWx_phrase: +## --If there are ANY areal coverage terms in the weather phrase, the PoP will +## be reported in a separate phrase. However, the Sky and Wx can still be reported +## in the combined phrase. +## --Only one PoP value can be reported in a combined phrase, so if there are +## multiple sub-phrases with precip, an independent PoP phrase will be generated. +## E.g.: Partly cloudy with a chance of rain in the morning, then a +## chance of thunderstorms in the afternoon. Probability of precipitation 50 percent. + +## --If the value for Sky is the same throughout the period it will not be repeated. +## Instead of: Partly cloudy with a chance of rain showers in the morning +## then partly cloudy with a slight chance of thunderstorms in the afternoon. +## Produce: Partly cloudy. A chance of rain showers in the morning then a +## slight chance of thunderstorms in the afternoon. +## + +## IMPLEMENTATION: This phrase takes into consideration Sky PoP and Wx, +## checking for required criteria ("checkSkyPopWx"), combining on a sub-phrase by +## sub-phrase basis, special handling of non-precip +## ("separateNonPrecip") and the word method ("skyPopWx_words"). + + + + def useCombinedSkyPopWx(self, tree, node): + # If set to 1, the combined skyPopWx_phrase will be used when appropriate + # as long as it is included in the product component definition + # If this is set to zero, the combined skyPopWx_phrase will not be used + return 1 + + def useSkyPopWx_consolidation(self, tree, node): + # If set to 1, the skyPopWx phrase will consolidate weather keys that + # span all time ranges to produce: + # Partly cloudy with a chance of rain. + # Snow in the morning, then sleet in the afternoon. + # + # instead of: + # Partly cloudy. Chance of rain and snow in the morning, + # then a chance of rain and sleet in the afternoon. + return 0 + + def skyPopWx_excludePoP_flag(self, tree, node): + # If set to 1, PoP will not be included in the skyPopWx_phrase + return 0 + + def skyPopWx_phrase(self): + return { + "setUpMethod": self.skyPopWx_setUp, + "wordMethod": self.skyPopWx_words, + "phraseMethods": [ + self.skyPopWx_separateNonPrecip, + self.skyPopWx_consolidateWx, + self.checkLocalEffects, + self.combinePhraseStats, + self.checkSkyPopWx, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + } + + def skyPopWx_setUp(self, tree, node): + if self.useCombinedSkyPopWx(tree, node) == 0: + return self.setWords(node, "") + resolution = node.get("resolution") + if resolution is not None: + mergeMethod = "Average" + else: + mergeMethod = "List" + elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, + resolution) + if self.areal_sky_flag(tree, node): + self.disableSkyRelatedWx(tree, node) + spawnedWxPhrases = node.get("spawnedWxPhrases") + if spawnedWxPhrases is None: + node.set("spawnedWxPhrases", []) + node.set("allTimeDescriptors", 1) + return self.DONE() + + def skyPopWx_separateNonPrecip(self, tree, node): + # If > designated subPhrases, separate into precip/non-precip + statList = self.getSubStats(node, "Wx") + length = len(statList) + if self.__dict__.get("_leDebug", 0): + print("\n\nSPW separateNonPrecip", node.get('name'), node.getAreaLabel()) + print(" node", node) + print(" disabled", node.getAncestor("disabledSubkeys")) + print(" timerange", node.getTimeRange()) + print(" statList", statList) + #print " doneList", node.doneList + #print " disabled", node.get('disabledSubkeys') + if length > 0: + precip = [] + nonPrecip = [] + for rankList in statList: + subkeys = self.getSubkeys(rankList) + for subkey in subkeys: + if subkey.wxType() == "": + continue + if self.precip_related_flag(tree, node, subkey): + precip.append(subkey) + else: + nonPrecip.append(subkey) + if self.__dict__.get("_leDebug", 0): print("precip, nonPrecip", precip, nonPrecip) + if len(precip) >= 0 and len(nonPrecip) >= 1: + # Save this information so we can remove this new phrase later if + # we do not end up doing a combined sky, pop, weather phrase. + #print "\nCalling splitWxPhrase for SPW" + newPhrase = self.splitWxPhrase( + tree, node, nonPrecip, precip, + [self.separateNonPrecip, self.skyPopWx_separateNonPrecip, + self.consolidateWx], + newPhraseDef=self.weather_phrase) + spawnedWxPhrases = node.get("spawnedWxPhrases") + spawnedWxPhrases.append(newPhrase) + node.set("spawnedWxPhrases", spawnedWxPhrases) + return self.DONE() + + def skyPopWx_consolidateWx(self, tree, node): + # If any wxTypes span all subPhrases, separate into their own phrase + if self.useSkyPopWx_consolidation(tree, node) == 0: + return self.DONE() + statList = self.getSubStats(node, "Wx") + length = len(statList) + subkeyDict = {} + if self.__dict__.get("_leDebug", 0): + print("\nSPW Consolidating ", node.get('name'), node.getAreaLabel()) + print(" node", node) + print(" disabled", node.getAncestor("disabledSubkeys")) + print(" timerange", node.getTimeRange()) + print(" statList", statList) + #print " doneList", node.doneList + if length > 1: + # Count occurrences of each weather key + for rankList in statList: + subkeys = self.getSubkeys(rankList) + for subkey in subkeys: + if subkey not in list(subkeyDict.keys()): + subkeyDict[subkey] = 1 + else: + subkeyDict[subkey] += 1 + if self.__dict__.get("_leDebug", 0): + print("subkeyDict", subkeyDict) + + # Find subkeys to disable in first phrase and second phrase, + # respectively + list1 = [] + list2 = [] + for subkey in list(subkeyDict.keys()): + count = subkeyDict[subkey] + if count >= length: + list2.append(subkey) + else: + list1.append(subkey) + if self.__dict__.get("_leDebug", 0): + print("list1", list1) + print("list2", list2) + + if len(list1) > 0 and len(list2) > 0: + newPhrase = self.splitWxPhrase( + tree, node, list1, list2, + [self.consolidateWx, self.separateNonPrecip, + self.skyPopWx_consolidateWx]) + newPhrase.set("includeSky", 0) + newPhrase.set("includePoP", 0) + node.set("includePoP", 0) + return self.DONE() + + # This method takes care of removing independent sky, pop and weather phrases if we are + # going to report combined skyPopWx. + # + # For PoP: + # If PoP is included in the combined phrase (includePoP == 1), + # we remove all independent PoP phrases from the component. + # This means that if this is a local effect phrase, + # we remove the local effect Pop phrase in addition to the + # Pop phrase for the component area. + # + # For Sky: + # If Sky is to be included in the combined phrase (includeSky == 1), + # we remove the independent Sky phrase associated with this + # node area. + # + # For Wx: + # We cannot simply remove all independent weather phrases because + # --They may have been spawned from consolidateWx or separateNonPrecip + # and need to remain. + # --If this is a local effect node, weather phrases may have been + # spawned by the component level area and need to remain. + # Thus, we remove non-spawned weather phrases for the node area. + # If this is a local effect area, + # we also remove weather phrases for the component area IFF + # they would be reporting the same subkeys as this skyPopWx phrase. + # + def checkSkyPopWx(self, tree, node): + # Check criteria to see if we can produce a combined phrase + # Enhanced by Dave Zaff + + if self.__dict__.get("_leDebug", 0): print("\nCheckSPW", node.getTimeRange(), node.getAreaLabel()) + + # Determine non-empty weather subPhrases + wxSubPhrases = self.getNonEmptyWxSubPhrases(tree, node) + length = len(wxSubPhrases) + #print "length in skyPopWx", length + if length > 2: + return self.skyPopWx_cleanUp(tree, node) + # PoP + includePoP = self.checkIncludePoP(tree, node) + if self.__dict__.get("_leDebug", 0): + print("\nBefore removing independent phrases:") + self.printCompPhrases(tree, node) + print() + print("includePoP", includePoP) + print("If -1, clean-up. If 1, remove area and compArea popMax") + if includePoP == -1: + #print "cleaning up" + return self.skyPopWx_cleanUp(tree, node) + #return self.setWords(node, "") + if includePoP: + # Remove the independent PoP phrases + # Note that we remove both the local effect phrase + # and the component level phrase. + self.removeComponentPhrases( + tree, node, "popMax_phrase", + areaLabels=[ + node.getAreaLabel(), + node.getComponent().getAreaLabel() + ]) + node.set("includePoP", includePoP) + + # Sky + includeSky = self.checkIncludeSky(tree, node, wxSubPhrases) + if self.__dict__.get("_leDebug", 0): + print("includeSky", includeSky) + print("If 1, remove sky_phrase for area") + if includeSky: + self.removeComponentPhrases(tree, node, "sky_phrase", + areaLabels=[node.getAreaLabel()]) + node.set("includeSky", includeSky) + + # Wx + # Don't remove the spawned phrases + spawnedWxPhrases = node.get("spawnedWxPhrases") + wxExceptionPhrases = self.getWxExceptions(tree, node, "weather_phrase") + self.removeComponentPhrases( + tree, node, "weather_phrase", + spawnedWxPhrases + wxExceptionPhrases, + areaLabels=[ + node.getAreaLabel(), + node.getComponent().getAreaLabel() + ]) + if self.__dict__.get("_leDebug", 0): + print("Removed weather phrases", node.getAreaLabel()) + print("\nAfter removing independent phrases:") + self.printCompPhrases(tree, node) + return self.DONE() + + def getWxExceptions(self, tree, node, phraseName): + # Return a list of the phrases with the given phraseName that + # --do not have the same areaLabel AND + # --do not have the same Wx stats as the given node. + nodeStats = self.getWxStats(tree, node) + nodeKeys = [] + for subkey, rank in nodeStats: + nodeKeys.append(subkey) + component = node.getComponent() + progeny = component.getProgeny() + wxExceptions = [] + if self.__dict__.get("_leDebug", 0): print("\nGetting exceptions for", nodeStats) + for child in progeny: + if child.getAreaLabel() == node.getAreaLabel(): + continue + name = child.get("name") + if name == phraseName: + # Check the stats + wxStats = self.getWxStats(tree, child) + if self.__dict__.get("_leDebug", 0): print("\nChecking", wxStats) + for subkey, rank in wxStats: + if subkey in nodeKeys or subkey.wxType() == "": + continue + if self.__dict__.get("_leDebug", 0): print("Appending") + wxExceptions.append(child) + return wxExceptions + + def getWxStats(self, tree, node): + wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Average") + wxStats = self.applyDisabled(tree, node, wxStats) + #print "\ngetWxStats", node.getAreaLabel(), node.getTimeRange() + #print " wxStats", wxStats + return wxStats + + def getNonEmptyWxSubPhrases(self, tree, node): + wxSubPhrases = [] + for child in node.get("childList"): + statDict = child.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + continue + # See if all subkeys are NoWx + for subkey, rank in rankList: + if subkey.wxType() != "": + wxSubPhrases.append(child) + break + return wxSubPhrases + + def checkIncludePoP(self, tree, node): + # Determine number of sub-phrases with precip-related Wx + # Also, determine if there are any areal coverage terms + # for a precip-related weather type + # Return 1 if we are to include PoP + # 0 if we are not to include PoP + # -1 if there is no precip weather and we are to abort the skyPopWx_phrase + includePoP = 1 + withPrecip = 0 + arealCov = 0 + arealCovs = self.arealCoverages() + pop = self.matchToWx(tree, node, "PoP", algorithm="Max") + if pop is None: + return -1 + + covList=[] + noWx = 1 + for subPhrase in node.childList: + precipFound = 0 + statDict = subPhrase.getStatDict() + if statDict is None: + continue + rankList = statDict["Wx"] + subkeys = self.getSubkeys(rankList) + #print "sub-phrase keys", subkeys + for subkey in subkeys: + if pop < self.pop_wx_lower_threshold(tree, node) and \ + not self.pop_related_flag(tree, node, subkey): + noWx = 0 + elif pop >= self.pop_wx_lower_threshold(tree, node) and \ + subkey.wxType() != "": + noWx = 0 + if noWx == 0 and self.precip_related_flag(tree, node, subkey): + precipFound = 1 + cov = subkey.coverage() + if cov in arealCovs: + arealCov = 1 + # Condition added by Dave Zaff: + # If any coverages are different, + # we will report PoP separately + else: + for getCov in covList: + if cov!=getCov: + includePoP=0 + covList.append(cov) + if precipFound: + withPrecip = withPrecip + 1 + + if withPrecip == 0 and noWx: + # Do not use combined skyPopWx_phrase + self.skyPopWx_cleanUp(tree, node) + return -1 # Signal that we are Done + + # Check for excluding PoP + if self.skyPopWx_excludePoP_flag(tree, node): + return 0 + + # If there is more than one sub-phrase with precip, + # report PoP independently. + #print "withPrecip", withPrecip + if withPrecip > 1: + includePoP = 0 + + # If there are ANY areal coverage terms for a precip-related + # weather type, report PoP independently. + if arealCov == 1: + includePoP = 0 + + # Check to see if includePoP has been set previously + prevIncludePoP = node.get("includePoP") + if prevIncludePoP == 0: + includePoP = 0 + + # If PoP >= 60, then report PoP independently + if pop >= 60: + includePoP = 0 + return includePoP + + def checkIncludeSky(self, tree, node, wxSubPhrases): + # Inclusion of Sky in the skyPopWx_phrase. + # The "includeSky" flag is checked in "checkSkyPopWx" and if set, + # the independent sky_phrase is removed. + # The general rules for including Sky are outlined below. + # In addition: + # The "includeSky" flag is set to zero when a new skyPopWx_phrase + # is spawned by "skyPopWx_consolideWx" since the sky condition + # will be addressed by the original skyPopWx_phrase. + # The "includeSky" flag may be set in "checkLocalEffects" (PhraseBuilder): + # If we find a local effect for the skyPopWx phrase (using the + # "checkSkyWxDifference" method in PhraseBuilder), then + # we will not includeSky in the new local effect phrases + # UNLESS there was also a sky local effect, in which case we + # will "checkIncludeSky" for the local effect nodes. + # + # Check to see if already set by "skyPopWx_consolidateWx" or by "checkLocalEffects" + includeSky = node.get("includeSky") + if includeSky is None: + # We need to check for the following situations to set "includeSky": + # Number of sub-phrases (length) Wx Sky includeSky + # + # 1 similar similar 1 + # Mostly sunny with a 50 percent chance of rain. + # + # 2 different similar 0 + # Mostly sunny. A chance of rain then a slight chance of snow + # in the afternoon. + # + # 1 similar different 0 + # Mostly sunny in the morning then becoming partly cloudy. A + # 50 percent chance of rain. + # + # 2 different different 1 + # Mostly sunny with a chance of rain then partly cloudy with + # a slight chance of snow in the afternoon. A 50 percent chance + # of rain and snow. + # + # Compare sky for similarity in the 1st and 2nd half of the period. + # Note: We can't count on Sky having a temporal resolution of [6], + # but, since the skyPopWx_phrase bails after 2 sub-phrases, + # looking at the 2 halves of the timeRange is sufficient. + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + timeRange1, timeRange2 = self.splitRange(timeRange) + skyStats1 = tree.stats.get("Sky", timeRange1, areaLabel, + mergeMethod="Average") + skyStats2 = tree.stats.get("Sky", timeRange2, areaLabel, + mergeMethod="Average") + sky1 = self.getSkyValue(tree, node, skyStats1, timeRange1) + sky2 = self.getSkyValue(tree, node, skyStats2, timeRange2) + #print "similarSky", self.similarSkyWords_flag(tree, node, sky1, sky2) + # Determine if Wx is similar + similarWx = 1 + wxSubPhraseLen = len(wxSubPhrases) + if wxSubPhraseLen > 1: + similarWx = 0 + # There is one subphrase and we have to see if it covers the + # entire phrase timeRange. If not, we have empty and non-empty + # weather subPhrases and similarWx is 0 + elif wxSubPhraseLen == 1 and wxSubPhrases[0].getTimeRange() != node.getTimeRange(): + similarWx = 0 + if self.similarSkyWords_flag(tree, node, sky1, sky2): + if similarWx: + includeSky = 1 + else: + includeSky = 0 + else: + if similarWx: + includeSky = 0 + else: + includeSky = 1 + return includeSky + + def skyPopWx_cleanUp(self, tree, node): + # Clean up any non-precip node that we spawned + spawnedWxPhrases = node.get("spawnedWxPhrases") + for phrase in spawnedWxPhrases: + phrase.remove() + node.set('childList', []) + return self.setWords(node, "") + + def skyPopWx_words(self, tree, node): + # Create a combined sky, pop, weather sub-phrase + + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + + # Sky words + includeSky = node.getAncestor("includeSky") + if includeSky is None: + includeSky = 1 + # Add sky to the statDict for this node + if includeSky: + skyWords = self.getSkyWords(tree, node) + # If this is the second node, see if the sky words are similar to the first + # If so, do not repeat + index = node.getIndex() + if index == 1: + prevSkyWords = node.getPrev().get("words") + if self.similarSkyWords_flag(tree, node, skyWords, prevSkyWords): + prevSkyWords = self.preferredSkyWords(tree, node, skyWords, prevSkyWords) + node.getPrev().set("words", prevSkyWords) + skyWords = "" + else: + skyWords = "" + + # Pop words + includePoP = node.getAncestor("includePoP") + if includePoP: + pop = self.matchToWx(tree, node, "PoP") + if pop < self.pop_lower_threshold(tree, node) or \ + pop > self.pop_upper_threshold(tree, node): + popWords = "" + else: + popStr = self.getPopStr(tree, node, pop) + popWords = "a " + popStr + " percent chance of" + else: + popWords = "" + + + # Weather words + self.weather_words(tree, node) + weatherWords = node.get("words") + if weatherWords == "null": + weatherWords = "" + if weatherWords == "": + popWords = "" + +## print "\n\n", areaLabel, timeRange +## print "includeSky", includeSky +## print "includePoP", includePoP +## print "skyWords", skyWords +## print "popWords", popWords +## print "weatherWords", weatherWords + + if skyWords == "" and weatherWords == "": + return self.setWords(node, "") + + words = "" + if includePoP: + if popWords != "": + weatherWords = string.replace(weatherWords, "a slight chance of", "") + weatherWords = string.replace(weatherWords, "a chance of", "") + weatherWords = string.replace(weatherWords, "slight chance of", "") + weatherWords = string.replace(weatherWords, "chance of", "") + weatherWords = string.replace(weatherWords, "likely", "") + weatherWords = string.replace(weatherWords, "occasional", "") + weatherWords = weatherWords.strip() + + # Do not repeat weather words from previous sub-phrase + node.set("weatherWords", weatherWords) + if node.getIndex() > 0: + weatherWords = self.checkRepeatingString( + tree, node, weatherWords, "weatherWords") + if weatherWords == "": + popWords = popWords.replace(" of", "") + + if popWords == "" and weatherWords == "": + words = skyWords + elif popWords == "" and skyWords == "": + words = weatherWords + elif skyWords == "" and weatherWords == "": + words = popWords + elif popWords == "": + # There must be sky and weather + words = skyWords + " with " + weatherWords + elif skyWords == "": + words = popWords + " " + weatherWords + elif weatherWords == "": + words = skyWords + " with " + popWords + else: + words = skyWords + " with " + popWords + " " + weatherWords + else: + weatherWords = weatherWords.lstrip() + if skyWords != "" and weatherWords != "": + words = skyWords + " with " + weatherWords + elif skyWords == "" and weatherWords != "": + words = weatherWords + else: + words = skyWords + return self.setWords(node, words) + + def getSkyWords(self, tree, node): + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + sky = tree.stats.get("Sky", timeRange, areaLabel, mergeMethod="Average") + statDict = node.getStatDict() + statDict["Sky"] = sky + node.set("statDict", statDict) + self.sky_words(tree, node) + skyWords = node.get("words") + return skyWords + + def getSkyValue(self, tree, node, skyStats, timeRange): + if timeRange.duration() > 12*3600: + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + return self.sky_value(tree, node, skyStats, dayNight) + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ConfigurableIssuance.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ConfigurableIssuance.py index 0b51456b58..e14b211103 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ConfigurableIssuance.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ConfigurableIssuance.py @@ -1,202 +1,202 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ConfigurableIssuance.py -# Methods for setting up configurable issuance. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import TimeRangeUtils -import string, types, time -import TimeRange, AbsTime - -class ConfigurableIssuance(TimeRangeUtils.TimeRangeUtils): - def __init__(self): - TimeRangeUtils.TimeRangeUtils.__init__(self) - - def getIssuanceInfo(self, productIssuance, issuanceList, creationTime=None): - # Create a NarrativeDef for a "narrative" type of product - # from an issuanceList and selected item - - (currentLocalTime, self._shift) = self.determineTimeShift() - if creationTime is None: - day = currentLocalTime.day - month = currentLocalTime.month - year = currentLocalTime.year - hour = currentLocalTime.hour - minutes = currentLocalTime.minute - else: - localTime = time.localtime(creationTime) - year = localTime[0] - month = localTime[1] - day = localTime[2] - hour = localTime[3] - minutes = localTime[4] - - # Determine "issuanceHour" - startTime = AbsTime.absTimeYMD(year,month,day,hour) - - # find the entry for our selection - #print productIssuance, issuanceList - entry = self.getEntry(productIssuance, issuanceList) - desc, startHour, endHour, expireHour, p1Label, \ - lateNightPhrase, lateDayPhrase, todayFlag, narrativeDef = entry - period1Label = p1Label - period1LateDayPhrase = lateDayPhrase - period1LateNightPhrase = lateNightPhrase - - # Take care of "issuanceHour" variable - startHour = self.convertIssuanceHour(startHour, hour, minutes) - endHour = self.convertIssuanceHour(endHour, hour, minutes) - expireHour = self.convertIssuanceHour(expireHour, hour, minutes) - - # Determine startTime and period1 - startTime = AbsTime.absTimeYMD(year, month, day, 0) - startTime = startTime + startHour * 3600 - endTime = AbsTime.absTimeYMD(year, month, day, 0) - endTime = endTime + endHour * 3600 - period1 = (endTime.unixTime() - startTime.unixTime())/3600 - - # Set "period1" if it appears in narrativeDef - newNarrativeDef = [] - totalHours = 0 - firstPeriod = 1 - for component, period in narrativeDef: - # Handle custom components - added in OB8.2. - # "Custom" components are intended to replace "priorPeriod" which is removed. - # "Custom" component entries in a narrative definition are of the form: - # ("Custom", (componentName, timeRange)) - # where timeRange can be (start_hours, end_hours) or an AFPS.TimeRange. - # Start_hours and end_hours are relative to midnight local time - # of the product creation date. - - if component == "Custom": - newNarrativeDef.append((component, period)) - continue - - if firstPeriod: - if period == "period1": - period = period1 - else: - period1 = period - firstPeriod = 0 - totalHours = totalHours + period - newNarrativeDef.append((component, period)) - - # Convert to GMT time before making time range - startTime = startTime - self._shift - tr = TimeRange.TimeRange(startTime, startTime + (totalHours * 3600)) - timeRange = tr - period1TimeRange = TimeRange.TimeRange( - tr.startTime(), tr.startTime() + period1*3600) - narrativeDef = newNarrativeDef - # Expiration time -- convert to GMT - expireStartTime = AbsTime.absTimeYMD(year, month, day, 0) - self._shift - expireStartTime = expireStartTime + expireHour * 3600 - expireTime = expireStartTime - issueTime = AbsTime.current() - #issueTime = self.getCurrentTime( - # None, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - #expireTimeRange = AFPS.TimeRange(expireStartTime, expireStartTime + 3600) - #expireTime = string.upper(self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "")) - return Issuance(entry, timeRange, expireTime, issueTime, narrativeDef, - period1TimeRange, period1LateDayPhrase, period1LateNightPhrase, - period1Label, todayFlag) - - def convertIssuanceHour(self, issuanceHour, currentHour, currentMinutes): - if type(issuanceHour) == types.StringType: - if currentMinutes > self.issuanceHour_minutesPastHour(): - currentHour = currentHour + 1 - # Don't cross to the next day - if currentHour == 24: - currentHour = 23 - issuanceHour = string.replace(issuanceHour, "issuanceHour", `currentHour`) - exec "resultHour = " + issuanceHour - return resultHour - else: - return issuanceHour - - def getEntry(self, productIssuance, issuanceList): - found =0 - for entry in issuanceList: - issuanceDescription = entry[0] - if productIssuance == issuanceDescription: - found = 1 - break - if found == 0: - return None - else: - return entry - - def issuanceHour_minutesPastHour(self): - # Minutes past the hour after which "issuanceHour" will jump to the next hour - # The exception is Hour 23 which will always be truncated i.e. we won't jump - # to the next day. - # - # Default is to truncate the hour so that we always get the hazards - # reported for that hour. - return 65 - -class Issuance: - def __init__(self, entry, timeRange, expireTime, issueTime, narrativeDef, - period1TimeRange, period1LateDayPhrase, period1LateNightPhrase, - period1Label, todayFlag): - self.__entry = entry - self.__timeRange = timeRange - self.__expireTime = expireTime - self.__issueTime = issueTime - self.__narrativeDef = narrativeDef - self.__period1TimeRange = period1TimeRange - self.__period1LateDayPhrase = period1LateDayPhrase - self.__period1LateNightPhrase = period1LateNightPhrase - self.__period1Label = period1Label - self.__todayFlag = todayFlag - def entry(self): - return self.__entry - def timeRange(self): - return self.__timeRange - def expireTime(self): - return self.__expireTime - def issueTime(self): - return self.__issueTime - def narrativeDef(self): - return self.__narrativeDef - def period1TimeRange(self): - return self.__period1TimeRange - def period1LateDayPhrase(self): - return self.__period1LateDayPhrase - def period1LateNightPhrase(self): - return self.__period1LateNightPhrase - def period1Label(self): - return self.__period1Label - def todayFlag(self): - return self.__todayFlag - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ConfigurableIssuance.py +# Methods for setting up configurable issuance. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import TimeRangeUtils +import string, types, time +import TimeRange, AbsTime + +class ConfigurableIssuance(TimeRangeUtils.TimeRangeUtils): + def __init__(self): + TimeRangeUtils.TimeRangeUtils.__init__(self) + + def getIssuanceInfo(self, productIssuance, issuanceList, creationTime=None): + # Create a NarrativeDef for a "narrative" type of product + # from an issuanceList and selected item + + (currentLocalTime, self._shift) = self.determineTimeShift() + if creationTime is None: + day = currentLocalTime.day + month = currentLocalTime.month + year = currentLocalTime.year + hour = currentLocalTime.hour + minutes = currentLocalTime.minute + else: + localTime = time.localtime(creationTime) + year = localTime[0] + month = localTime[1] + day = localTime[2] + hour = localTime[3] + minutes = localTime[4] + + # Determine "issuanceHour" + startTime = AbsTime.absTimeYMD(year,month,day,hour) + + # find the entry for our selection + #print productIssuance, issuanceList + entry = self.getEntry(productIssuance, issuanceList) + desc, startHour, endHour, expireHour, p1Label, \ + lateNightPhrase, lateDayPhrase, todayFlag, narrativeDef = entry + period1Label = p1Label + period1LateDayPhrase = lateDayPhrase + period1LateNightPhrase = lateNightPhrase + + # Take care of "issuanceHour" variable + startHour = self.convertIssuanceHour(startHour, hour, minutes) + endHour = self.convertIssuanceHour(endHour, hour, minutes) + expireHour = self.convertIssuanceHour(expireHour, hour, minutes) + + # Determine startTime and period1 + startTime = AbsTime.absTimeYMD(year, month, day, 0) + startTime = startTime + startHour * 3600 + endTime = AbsTime.absTimeYMD(year, month, day, 0) + endTime = endTime + endHour * 3600 + period1 = (endTime.unixTime() - startTime.unixTime())/3600 + + # Set "period1" if it appears in narrativeDef + newNarrativeDef = [] + totalHours = 0 + firstPeriod = 1 + for component, period in narrativeDef: + # Handle custom components - added in OB8.2. + # "Custom" components are intended to replace "priorPeriod" which is removed. + # "Custom" component entries in a narrative definition are of the form: + # ("Custom", (componentName, timeRange)) + # where timeRange can be (start_hours, end_hours) or an AFPS.TimeRange. + # Start_hours and end_hours are relative to midnight local time + # of the product creation date. + + if component == "Custom": + newNarrativeDef.append((component, period)) + continue + + if firstPeriod: + if period == "period1": + period = period1 + else: + period1 = period + firstPeriod = 0 + totalHours = totalHours + period + newNarrativeDef.append((component, period)) + + # Convert to GMT time before making time range + startTime = startTime - self._shift + tr = TimeRange.TimeRange(startTime, startTime + (totalHours * 3600)) + timeRange = tr + period1TimeRange = TimeRange.TimeRange( + tr.startTime(), tr.startTime() + period1*3600) + narrativeDef = newNarrativeDef + # Expiration time -- convert to GMT + expireStartTime = AbsTime.absTimeYMD(year, month, day, 0) - self._shift + expireStartTime = expireStartTime + expireHour * 3600 + expireTime = expireStartTime + issueTime = AbsTime.current() + #issueTime = self.getCurrentTime( + # None, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + #expireTimeRange = AFPS.TimeRange(expireStartTime, expireStartTime + 3600) + #expireTime = string.upper(self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "")) + return Issuance(entry, timeRange, expireTime, issueTime, narrativeDef, + period1TimeRange, period1LateDayPhrase, period1LateNightPhrase, + period1Label, todayFlag) + + def convertIssuanceHour(self, issuanceHour, currentHour, currentMinutes): + if type(issuanceHour) == bytes: + if currentMinutes > self.issuanceHour_minutesPastHour(): + currentHour = currentHour + 1 + # Don't cross to the next day + if currentHour == 24: + currentHour = 23 + issuanceHour = string.replace(issuanceHour, "issuanceHour", repr(currentHour)) + exec("resultHour = " + issuanceHour) + return resultHour + else: + return issuanceHour + + def getEntry(self, productIssuance, issuanceList): + found =0 + for entry in issuanceList: + issuanceDescription = entry[0] + if productIssuance == issuanceDescription: + found = 1 + break + if found == 0: + return None + else: + return entry + + def issuanceHour_minutesPastHour(self): + # Minutes past the hour after which "issuanceHour" will jump to the next hour + # The exception is Hour 23 which will always be truncated i.e. we won't jump + # to the next day. + # + # Default is to truncate the hour so that we always get the hazards + # reported for that hour. + return 65 + +class Issuance: + def __init__(self, entry, timeRange, expireTime, issueTime, narrativeDef, + period1TimeRange, period1LateDayPhrase, period1LateNightPhrase, + period1Label, todayFlag): + self.__entry = entry + self.__timeRange = timeRange + self.__expireTime = expireTime + self.__issueTime = issueTime + self.__narrativeDef = narrativeDef + self.__period1TimeRange = period1TimeRange + self.__period1LateDayPhrase = period1LateDayPhrase + self.__period1LateNightPhrase = period1LateNightPhrase + self.__period1Label = period1Label + self.__todayFlag = todayFlag + def entry(self): + return self.__entry + def timeRange(self): + return self.__timeRange + def expireTime(self): + return self.__expireTime + def issueTime(self): + return self.__issueTime + def narrativeDef(self): + return self.__narrativeDef + def period1TimeRange(self): + return self.__period1TimeRange + def period1LateDayPhrase(self): + return self.__period1LateDayPhrase + def period1LateNightPhrase(self): + return self.__period1LateNightPhrase + def period1Label(self): + return self.__period1Label + def todayFlag(self): + return self.__todayFlag + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DefaultCallToActions.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DefaultCallToActions.py index 655d3c2ee1..0827697528 100755 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DefaultCallToActions.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DefaultCallToActions.py @@ -1,883 +1,883 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# DefaultCallToActions.py -# -# This module contains all of the call to action statements based on the -# VTEC phen/sig code. Sites override CallToActions.py to make modifications -# to the list of call to actions. -# -# Author: Matt Davis (ARX), Mathewson -# Updated 9/24 Shannon for WW simplification -# Updated 11/25 Shannon for AS.O -# Updated 3/27/09 Shannon for tropical and AF.W -# Updated 3/29/10 Shannon for tropical -# Updated 1/12/11 Shannon to remove HI/TI hazards and fix typos -# Updated 7/28/15 yteng to change Red Flag Warning CTA language for DR 17777 -# Updated 2/24/16 randerso for mixed case guidelines -# Updated 6/17/16 dgilling to fix a spelling error. -# Updated 7/15/16 randerso replace ellipses with commas -# Updated 6/23/17 dgilling to remove obsolete winter weather phensigs. -# Updated 7/13/17 dgilling renamed to allow easier overrides. -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - - - -class CallToActions(object): - - def pydevDebug(self): - import sys - PYDEVD_PATH='/home/rtran/awipsdr4/Ade/eclipse/plugins/org.python.pydev.debug_1.5.4.2010011921/pysrc' - if sys.path.count(PYDEVD_PATH) < 1: - sys.path.append(PYDEVD_PATH) - - import pydevd - pydevd.settrace() - - # returns the default Call To Action - def defaultCTA(self, phensig): - if self.ctaDict().has_key(phensig): - func = self.ctaDict()[phensig] - items = func() - if len(items) > 0: - return items[0] - return "" #No call to action - - def allCTAs(self, phensig): - if self.ctaDict().has_key(phensig): - func = self.ctaDict()[phensig] - return func() - return [] #no Call to actions - - - def pilCTAs(self, pil): - if self.ctaPilDict().has_key(pil): - func = self.ctaPilDict()[pil] - return func() - return [] #no Call to actions - - - # returns list of generic call to action statements - def genericCTAs(self): - return [ - """Monitor NOAA Weather Radio for the latest information, forecasts, and warnings.""", - """Listen to NOAA Weather Radio or your local media for the latest updates on this situation.""", - ] - - - -##### PLEASE KEEP PHENSIG IN ALPHABETICAL ORDER ####### - -# CallToAction dictionary. The key is the phen/sig, such as "BZ.W". The -# value is a LIST of call to action statements. The default formatter -# uses the first one in the list. Users can add additional entries which -# are accessible in the product editor. The lists are actually function -# calls that the user can override if necessary. -# Updated in 9.3 to sync with VTECTable entries - def ctaDict(self): - return { - "AF.W": self.ctaAFW, - "AF.Y": self.ctaAFY, - "AS.O": self.ctaASO, - "AS.Y": self.ctaASY, - "BH.S": self.ctaBHS, - "BW.Y": self.ctaBWY, - "BZ.W": self.ctaBZW, - "CF.A": self.ctaCFA, - "CF.W": self.ctaCFW, - "CF.Y": self.ctaCFY, - "DU.W": self.ctaDUW, - "DU.Y": self.ctaDUY, - "EC.A": self.ctaECA, - "EC.W": self.ctaECW, - "EH.A": self.ctaEHA, - "EH.W": self.ctaEHW, - "FA.A": self.ctaFAA, - "FF.A": self.ctaFFA, - "FG.Y": self.ctaFGY, - "FR.Y": self.ctaFRY, - "FW.A": self.ctaFWA, - "FW.W": self.ctaFWW, - "FZ.A": self.ctaFZA, - "FZ.W": self.ctaFZW, - "GL.A": self.ctaGLA, - "GL.W": self.ctaGLW, - "HF.A": self.ctaHFA, - "HF.W": self.ctaHFW, - "HT.Y": self.ctaHTY, - "HU.A": self.ctaHUA, - "HU.W": self.ctaHUW, - "HW.A": self.ctaHWA, - "HW.W": self.ctaHWW, - "HZ.A": self.ctaHZA, - "HZ.W": self.ctaHZW, - "IS.W": self.ctaISW, - "LE.W": self.ctaLEW, - "LO.Y": self.ctaLOY, - "LS.A": self.ctaLSA, - "LS.W": self.ctaLSW, - "LS.Y": self.ctaLSY, - "LW.Y": self.ctaLWY, - "MF.Y": self.ctaMFY, - "MH.W": self.ctaMHW, - "MH.Y": self.ctaMHY, - "MS.Y": self.ctaMSY, - "RB.Y": self.ctaRBY, - "RP.S": self.ctaRPS, - "SC.Y": self.ctaSCY, - "SE.A": self.ctaSEA, - "SE.W": self.ctaSEW, - "SI.Y": self.ctaSIY, - "SM.Y": self.ctaSMY, - "SR.A": self.ctaSRA, - "SR.W": self.ctaSRW, - "SU.W": self.ctaSUW, - "SU.Y": self.ctaSUY, - "SW.Y": self.ctaSWY, - "TR.A": self.ctaTRA, - "TR.W": self.ctaTRW, - "UP.A": self.ctaUPA, - "UP.W": self.ctaUPW, - "UP.Y": self.ctaUPY, - "WC.A": self.ctaWCA, - "WC.W": self.ctaWCW, - "WC.Y": self.ctaWCY, - "WI.Y": self.ctaWIY, - "WS.A": self.ctaWSA, - "WS.W": self.ctaWSW, - "WW.Y": self.ctaWWY, - "ZF.Y": self.ctaZFY, - } - - -##### PLEASE KEEP PILS IN ALPHABETICAL ORDER ####### - -# CallToAction PIL dictionary. The key is the product pil, such as "HLS". -# The entries are available for a particular product. None of these -# are entered automatically by the formatter, but are available through -# the product editor. -# Users can add additional entries which are accessible in the product -# editor. The lists are actually function calls that the user can -# override if necessary. - def ctaPilDict(self): - return { - 'ADR': self.ctaPilADR, - 'AFD': self.ctaPilAFD, - 'AFM': self.ctaPilAFM, - 'AVA': self.ctaPilAVA, - 'AVW': self.ctaPilAVW, - 'CAE': self.ctaPilCAE, - 'CCF': self.ctaPilCCF, - 'CDW': self.ctaPilCDW, - 'CEM': self.ctaPilCEM, - 'CFW': self.ctaPilCFW, - 'CWF': self.ctaPilCWF, - 'EQR': self.ctaPilEQR, - 'EQW': self.ctaPilEQW, - 'ESF': self.ctaPilESF, - 'EVI': self.ctaPilEVI, - 'FFA': self.ctaPilFFA, - 'FRW': self.ctaPilFRW, - 'FWF': self.ctaPilFWF, - 'FWM': self.ctaPilFWM, - 'FWS': self.ctaPilFWS, - 'GLF': self.ctaPilGLF, - 'HLS': self.ctaPilHLS, - 'HMW': self.ctaPilHMW, - 'HWO': self.ctaPilHWO, - 'LAE': self.ctaPilLAE, - 'LEW': self.ctaPilLEW, - 'MWS': self.ctaPilMWS, - 'MVF': self.ctaPilMVF, - 'MWW': self.ctaPilMWW, - 'NOW': self.ctaPilNOW, - 'NPW': self.ctaPilNPW, - 'NSH': self.ctaPilNSH, - 'NUW': self.ctaPilNUW, - 'OFF': self.ctaPilOFF, - 'PFM': self.ctaPilPFM, - 'PNS': self.ctaPilPNS, - 'RFD': self.ctaPilRFD, - 'RFW': self.ctaPilRFW, - 'RHW': self.ctaPilRHW, - 'SAF': self.ctaPilSAF, - 'SRF': self.ctaPilSRF, - 'SFT': self.ctaPilSFT, - 'SPS': self.ctaPilSPS, - 'SPW': self.ctaPilSPW, - 'TOE': self.ctaPilTOE, - 'VOW': self.ctaPilVOW, - 'WCN': self.ctaPilWCN, - 'WSW': self.ctaPilWSW, - 'ZFP': self.ctaPilZFP, - } - - -#------------------------------------------------------------------------ -# CALL TO ACTIONS - winter events -# With the winter weather simplification, specfic winter hazard defs are not -# readily available. Forecaster can choose from the specific defs via the defs below. -# Since these statements are so long, we use the descriptive word format. -#------------------------------------------------------------------------ - def winterWScta(self): - return [ - ("***HEAVY SNOW", """A Winter Storm Warning for heavy snow means severe winter weather conditions are expected or occurring. Significant amounts of snow are forecast that will make travel dangerous. Only travel in an emergency. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency."""), - ("***SLEET", """A Winter Storm Warning for sleet means that a winter storm system is impacting the area with significant amounts of sleet. Travel is likely to be severely impacted."""), - ("***MIXED PRECIP", """A Winter Storm Warning means significant amounts of snow, sleet, and ice are expected or occurring. Strong winds are also possible. This will make travel very hazardous or impossible."""), - ] - - def winterWWcta(self): - return [ - ("***BLOWING SNOW", """A Winter Weather Advisory for blowing snow means that visibilities will be limited due to strong winds blowing snow around. Use caution when traveling, especially in open areas."""), - ("***SLEET", """A Winter Weather Advisory for sleet means periods of sleet are imminent or occurring. Sleet may cause driving to become extremely dangerous, so be prepared to use caution when traveling."""), - ("***SNOW AND BLOWING SNOW", """A Winter Weather Advisory for |*lake effect*| snow and blowing snow means that visibilities will be limited due to a combination of falling and blowing snow. Use caution when traveling, especially in open areas."""), - ("***SNOW", """A Winter Weather Advisory for snow means that periods of snow will cause primarily travel difficulties. Be prepared for snow covered roads and limited visibilities, and use caution while driving."""), - ("***MIXED PRECIP", """A Winter Weather Advisory means that periods of snow, sleet, or freezing rain will cause travel difficulties. Be prepared for slippery roads and limited visibilities, and use caution while driving."""), - ] -#------------------------------------------------------------------------ -# CALL TO ACTIONS - individual functions for each phen/sig -#------------------------------------------------------------------------ -# These are lists of strings. The first one is used in the formatters, -# the others are available through the call to actions menu. - - def ctaAFW(self): - return [ -"""An Ashfall Warning means that significant accumulation of volcanic ash is expected or occurring due to a volcanic eruption or resuspension of previously deposited ash. - -Seal windows and doors. Protect electronics and cover air intakes and open water sources. Avoid driving. Remain indoors unless absolutely necessary. Use extreme caution clearing rooftops of ash. - -Listen to NOAA Weather Radio or local media for further information.""", - ] - - def ctaAFY(self): - return [ - """An Ashfall Advisory means that large amounts of ash will be deposited in the advisory area. Persons with respiratory illnesses should remain indoors to avoid inhaling the ash particles, and all persons outside should cover their mouth and nose with a mask or cloth.""", - ] - - def ctaASO(self): - return [ - """An Air Stagnation Outlook is issued when an extended period of weather conditions are anticipated that could contribute to poor ventilation, and thus potentially poor air quality. Be prepared for these conditions to develop in the next 2 to 3 days, and for the issuance of air stagnation advisories as the situation becomes imminent.""", - ] - - def ctaASY(self): - return [ - """An Air Stagnation Advisory indicates that due to limited movement of an air mass across the advisory area, pollution will increase to dangerous levels. Persons with respiratory illness should follow their physicians advice for dealing with high levels of air pollution.""", - ] - - def ctaBHS(self): - return [ -"""A Beach Hazards Statement is issued when threats such as rip currents, longshore currents, sneaker waves and other hazards create life-threatening conditions in the surf zone. Caution should be used when in or near the water.""", - ] - - def ctaBWY(self): - return [ - """A Brisk Wind Advisory means that winds will reach Small Craft Advisory criteria in areas that are primarily ice covered. Moving ice floes could damage small craft.""", - ] - - def ctaBZW(self): - return [ - """A Blizzard Warning means severe winter weather conditions are expected or occurring. Falling and blowing snow with strong winds and poor visibilities are likely. This will lead to whiteout conditions, making travel extremely dangerous. Do not travel. If you must travel, have a winter survival kit with you. If you get stranded, stay with your vehicle.""", - ] - - def ctaCFA(self): - return [ - """A Coastal Flood Watch means that conditions favorable for flooding are expected to develop. Coastal residents should be alert for later statements or warnings, and take action to protect property.""", - ] - - def ctaCFW(self): - return [ - """A Coastal Flood Warning means that flooding is occurring or imminent. Coastal residents in the warned area should be alert for rising water, and take appropriate action to protect life and property.""", - ] - - def ctaCFY(self): - return [ - """A Coastal Flood Advisory indicates that onshore winds and tides will combine to generate flooding of low areas along the shore.""", - ] - - def ctaDUW(self): - return [ - """A Blowing Dust Warning means severely limited visibilities are expected with blowing dust. Travel could become extremely dangerous. Persons with respiratory problems should make preparations to stay indoors until the storm passes.""", - ] - - def ctaDUY(self): - return [ - """A Blowing Dust Advisory means that blowing dust will restrict visibilities. Travelers are urged to use caution.""", - ] - - def ctaECA(self): - return [ - """An Extreme Cold Watch means that prolonged periods of very cold temperatures are expected. Ensure that outdoor animals have warm shelter, and that children wear a hat and gloves.""", - ] - - def ctaECW(self): - return [ - """An Extreme Cold Warning means that dangerously low temperatures are expected for a prolonged period of time. Frostbite and hypothermia are likely if exposed to these temperatures, so make sure a hat, facemask, and heavy gloves or mittens are available.""", - ] - - def ctaEHA(self): - return [ - """An Excessive Heat Watch means that a prolonged period of hot temperatures is expected. The combination of hot temperatures and high humidity will combine to create a DANGEROUS SITUATION in which heat illnesses are possible. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.""", - """Young children and pets should never be left unattended in vehicles under any circumstances. This is especially true during warm or hot weather when car interiors can reach lethal temperatures in a matter of minutes.""", - ] - - def ctaEHW(self): - return [ - """Take extra precautions, if you work or spend time outside. When possible, reschedule strenuous activities to early morning or evening. Know the signs and symptoms of heat exhaustion and heat stroke. Wear light weight and loose fitting clothing when possible and drink plenty of water.\n\n - To reduce risk during outdoor work, the occupational safety and health administration recommends scheduling frequent rest breaks in shaded or air conditioned environments. Anyone overcome by heat should be moved to a cool and shaded location. Heat stroke is an emergency, call 9 1 1.\n\n - An Excessive Heat Warning means that a prolonged period of dangerously hot temperatures will occur. The combination of hot temperatures and high humidity will combine to create a DANGEROUS SITUATION in which heat illnesses are likely. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.\n\n - Young children and pets should never be left unattended in vehicles under any circumstances. This is especially true during warm or hot weather when car interiors can reach lethal temperatures in a matter of minutes.""", - ] - - def ctaFAA(self): - return [ - """A Flood Watch means there is a potential for flooding based on current forecasts.\n\nYou should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.""", - ] - - def ctaFFA(self): - return [ - """A Flash Flood Watch means that conditions may develop that lead to flash flooding. Flash flooding is a VERY DANGEROUS SITUATION.\n\nYou should monitor later forecasts and be prepared to take action should Flash Flood Warnings be issued.""", - ] - - def ctaFGY(self): - return [ -"""A Dense Fog Advisory means visibilities will frequently be reduced to less than one quarter mile. If driving, slow down, use your headlights, and leave plenty of distance ahead of you.""", - ] - - def ctaFRY(self): - return [ - """A Frost Advisory means that widespread frost is expected. Sensitive outdoor plants may be killed if left uncovered.""", - ] - - def ctaFWA(self): - return [ - """A Fire Weather Watch means that critical fire weather conditions are forecast to occur. Listen for later forecasts and possible Red Flag Warnings.""", - ] - - def ctaFWW(self): - return [ - """A Red Flag Warning means that critical fire weather conditions are either occurring now, or will shortly. A combination of strong winds, low relative humidity, and warm temperatures can contribute to extreme fire behavior.""", - ] - - def ctaFZA(self): - return [ - """A Freeze Watch means sub-freezing temperatures are possible. These conditions could kill crops and other sensitive vegetation.""", - ] - - def ctaFZW(self): - return [ - """A Freeze Warning means sub-freezing temperatures are imminent or highly likely. These conditions will kill crops and other sensitive vegetation.""", - ] - - def ctaGLA(self): - return [ - """A Gale Watch is issued when the risk of gale force winds of 34 to 47 knots has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", - ] - - def ctaGLW(self): - return [ - """A Gale Warning means winds of 34 to 47 knots are imminent or occurring. Operating a vessel in gale conditions requires experience and properly equipped vessels. It is highly recommended that mariners without the proper experience seek safe harbor prior to the onset of gale conditions.""", - ] - - def ctaHFA(self): - return [ - """A Hurricane Force Wind Watch is issued when the risk of hurricane force winds of 64 knots or greater has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", - ] - - def ctaHFW(self): - return [ - """A Hurricane Force Wind Warning means winds of 64 knots or greater are imminent or occurring. All vessels should remain in port, or take shelter as soon as possible, until winds and waves subside.""", - ] - - def ctaHTY(self): - return [ - """A Heat Advisory means that a period of hot temperatures is expected. The combination of hot temperatures and high humidity will combine to create a situation in which heat illnesses are possible. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.\n\n - Take extra precautions, if you work or spend time outside. When possible, reschedule strenuous activities to early morning or evening. Know the signs and symptoms of heat exhaustion and heat stroke. Wear light weight and loose fitting clothing when possible and drink plenty of water.\n\n - To reduce risk during outdoor work, the occupational safety and health administration recommends scheduling frequent rest breaks in shaded or air conditioned environments. Anyone overcome by heat should be moved to a cool and shaded location. Heat stroke is an emergency, call 9 1 1.""", - ] - - def ctaHUA(self): - return [ - """A Hurricane Watch is issued when sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are possible within 48 hours.""", - ] - - def ctaHUW(self): - return [ - """A Hurricane Warning means sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are expected within 36 hours. A Hurricane Warning can remain in effect when dangerously high water or a combination of dangerously high water and exceptionally high waves continue, even though winds may be less than hurricane force.""", - ] - - def ctaHWA(self): - return [ - """A High Wind Watch means there is the potential for a hazardous high wind event. Sustained winds of at least 40 mph, or gusts of 58 mph or stronger may occur. Continue to monitor the latest forecasts.""", - ] - - def ctaHWW(self): - return [ - """A High Wind Warning means a hazardous high wind event is expected or occurring. Sustained wind speeds of at least 40 mph or gusts of 58 mph or more can lead to property damage.""", - ] - - def ctaHZA(self): - return [ - """A Hard Freeze Watch means sub-freezing temperatures are possible. These conditions could kill crops and other sensitive vegetation.""", - ] - - def ctaHZW(self): - return [ - """A Hard Freeze Warning means sub-freezing temperatures are imminent or highly likely. These conditions will kill crops and other sensitive vegetation.""", - ] - - def ctaISW(self): - return [ - """An Ice Storm Warning means severe winter weather conditions are expected or occurring. Significant amounts of ice accumulations will make travel dangerous or impossible. Travel is strongly discouraged. Commerce will likely be severely impacted. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency. Ice accumulations and winds will likely lead to snapped power lines and falling tree branches that add to the danger.""", - ] - - def ctaLEW(self): - return [ - """A Lake Effect Snow Warning means significant amounts of lake-effect snow are forecast that will make travel very hazardous or impossible. Lake-effect snow showers typically align themselves in bands and will likely be intense enough to drop 1 to several inches of snow per hour for several hours. Visibilities vary greatly and can drop to zero within minutes. Travel is strongly discouraged. Commerce could be severely impacted. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency.""", - ] - - def ctaLOY(self): - return [ - """A Low Water Advisory means water levels are expected to be significantly below average. Mariners should use extreme caution and transit at the slowest safe navigable speed to minimize impact.""", - ] - - def ctaLSA(self): - return [ - """A Lakeshore Flood Watch means that conditions favorable for lakeshore flooding are expected to develop. Residents on or near the shore should take action to protect property, and listen for later statements or warnings.""", - ] - - def ctaLSW(self): - return [ - """A Lakeshore Flood Warning means that flooding is occurring or imminent along the lake. Residents on or near the shore in the warned area should be alert for rising water, and take appropriate action to protect life and property.""", - ] - - def ctaLSY(self): - return [ - """A Lakeshore Flood Advisory indicates that onshore winds will generate flooding of low areas along the lakeshore.""", - ] - - def ctaLWY(self): - return [ - """A Lake Wind Advisory indicates that winds will cause rough chop on area lakes. Small boats will be especially prone to capsizing.""", - ] - - def ctaMHW(self): - return [ - """An Ashfall Warning means that significant accumulation of ashfall is expected on vessels. It is recommended that vessels be prepared to take the necessary counter measures before putting to sea or entering the warning area.""", - ] - - def ctaMFY(self): - return [ -"""A Dense Fog Advisory means visibilities will frequently be reduced to less than one mile. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions. """, - ] - - def ctaMHY(self): - return [ -"""An Ashfall Advisory means that a light accumulation of ashfall is expected on vessels. It is recommended that vessels be prepared to take appropriate counter measures before putting to sea or entering the advisory area.""", - ] - - def ctaMSY(self): - return [ - """A Dense Smoke Advisory means widespread fires will create smoke, limiting visibilities. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", - ] - - def ctaRBY(self): - return [ - """A Small Craft Advisory for rough bar means that wave conditions are expected to be hazardous to small craft in or near harbor entrances.""", - ] - - def ctaRPS(self): - return [ - """There is a high risk of rip currents. - -Rip currents are powerful channels of water flowing quickly away from shore, which occur most often at low spots or breaks in the sandbar and in the vicinity of structures such as groins, jetties and piers. Heed the advice of lifeguards, beach patrol flags and signs. - -If you become caught in a rip current, yell for help. Remain calm, do not exhaust yourself and stay afloat while waiting for help. If you have to swim out of a rip current, swim parallel to shore and back toward the beach when possible. Do not attempt to swim directly against a rip current as you will tire quickly. """, - ] - - def ctaSCY(self): - return [ - """A Small Craft Advisory means that wind speeds of 21 to 33 knots are expected to produce hazardous wave conditions to small craft. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", - ] - - def ctaSEA(self): - return [ - """A Hazardous Seas Watch is issued when the risk of hazardous seas has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", - ] - - def ctaSEW(self): - return [ - """A Hazardous Seas Warning means hazardous sea conditions are imminent or occurring. Recreational boaters should remain in port, or take shelter until waves subside. Commercial vessels should prepare for rough seas and consider remaining in port or taking shelter in port until hazardous seas subside.""", - ] - - def ctaSIY(self): - return [ - """A Small Craft Advisory for wind means that wind speeds of 21 to 33 knots are expected. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", - ] - - def ctaSMY(self): - return [ - """A Dense Smoke Advisory means widespread fires will create smoke, limiting visibilities. If driving, slow down, use your headlights, and leave plenty of distance ahead of you in case a sudden stop is needed.""", - ] - - def ctaSRA(self): - return [ - """A Storm Watch is issued when the risk of storm force winds of 48 to 63 knots has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", - ] - - def ctaSRW(self): - return [ - """A Storm Warning means winds of 48 to 63 knots are imminent or occurring. Recreational boaters should remain in port, or take shelter until winds and waves subside. Commercial vessels should prepare for very strong winds and dangerous sea conditions, and consider remaining in port or taking shelter in port until winds and waves subside.""", - ] - - def ctaSUW(self): - return [ - """A High Surf Warning indicates that dangerous, battering waves will pound the shoreline. This will result in life-threatening conditions.""", - ] - - def ctaSUY(self): - return [ - """A High Surf Advisory means that high surf will affect beaches in the advisory area, producing localized beach erosion and dangerous swimming conditions.""", - ] - - def ctaSWY(self): - return [ - """A Small Craft Advisory for hazardous seas means that waves are expected to be hazardous to small craft. Mariners should avoid shoaling areas. Long period swell can sharpen into large breaking waves in shoaling areas. It is not unusual for waves to break much farther from shoaling areas than is normally experienced. Remember, breaking waves can easily capsize even larger vessels.""", - ] - - def ctaTRA(self): - return [ - """A Tropical Storm Watch means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are possible due to a tropical storm within 48 hours.""", - ] - - def ctaTRW(self): - return [ - """A Tropical Storm Warning means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are expected due to a tropical storm within 36 hours.""", - ] - - def ctaUPA(self): - return [ - """A Heavy Freezing Spray Watch is issued when the risk of heavy freezing spray has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", - ] - - def ctaUPW(self): - return [ - """A Heavy Freezing Spray Warning means heavy freezing spray is expected to rapidly accumulate on vessels. These conditions can be extremely hazardous to navigation. It is recommended that mariners not trained to operate in these conditions or vessels not properly equiped to do so, remain in port or avoid the waring area.""", - ] - - def ctaUPY(self): - return [ - """A Freezing Spray Advisory means that light to moderate accumulation of ice is expected on vessels. Operating a vessel in freezing spray can be hazardous. It is recommended that vessels be prepared to take appropriate counter measures before putting to sea or enter the advisory area.""", - ] - - def ctaWCA(self): - return [ - """A Wind Chill Watch means the there is the potential for a combination of very cold air and strong winds to create dangerously low wind chill values. Monitor the latest forecasts and warnings for updates on this situation.""", - ] - - def ctaWCW(self): - return [ - """A Wind Chill Warning means the combination of very cold air and strong winds will create dangerously low wind chill values. This will result in frost bite and lead to hypothermia or death if precautions are not taken.""", - ] - - def ctaWCY(self): - return [ - """A Wind Chill Advisory means that very cold air and strong winds will combine to generate low wind chills. This will result in frost bite and lead to hypothermia if precautions are not taken. If you must venture outdoors, make sure you wear a hat and gloves.""", - ] - - def ctaWIY(self): - return [ - """A Wind Advisory means that winds of 35 mph are expected. Winds this strong can make driving difficult, especially for high profile vehicles. Use extra caution.""", - ] - - def ctaWSA(self): - return [ - """A Winter Storm Watch means there is a potential for significant snow, sleet, or ice accumulations that may impact travel. Continue to monitor the latest forecasts.""", - ] - - def ctaWSW(self): - return [ - """|*Choose the appropriate CTA below and delete the rest*| - -A Winter Storm Warning for heavy snow means severe winter weather conditions are expected or occurring. Significant amounts of snow are forecast that will make travel dangerous. Only travel in an emergency. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency. - -A Winter Storm Warning means significant amounts of snow, sleet, and ice are expected or occurring. Strong winds are also possible. This will make travel very hazardous or impossible. - -A Winter Storm Warning for sleet means that a winter storm system is impacting the area with significant amounts of sleet. Travel is likely to be severely impacted.""", - ] - - def ctaWWY(self): - return [ - """|*Choose the appropriate CTA below and delete the rest*| - -A Winter Weather Advisory means that periods of snow, sleet, or freezing rain will cause travel difficulties. Be prepared for slippery roads and limited visibilities, and use caution while driving. - -A Winter Weather Advisory for blowing snow means that visibilities will be limited due to strong winds blowing snow around. Use caution when traveling, especially in open areas. - -A Winter Weather Advisory for sleet means periods of sleet are imminent or occurring. Sleet may cause driving to become extremely dangerous, so be prepared to use caution when traveling. - -A Winter Weather Advisory for |*lake effect*| snow and blowing snow means that visibilities will be limited due to a combination of falling and blowing snow. Use caution when traveling, especially in open areas. - -A Winter Weather Advisory for snow means that periods of snow will cause primarily travel difficulties. Be prepared for snow covered roads and limited visibilities, and use caution while driving.""", - - ] - - def ctaZFY(self): - return [ - """A Freezing Fog Advisory means visibilities will frequently be reduced to less than one quarter mile. If driving, slow down, use your headlights, and leave plenty of distance ahead of you. Also, be alert for frost on bridge decks causing slippery roads.""", - ] - - -#------------------------------------------------------------------------ -# CALL TO ACTIONS - individual functions for each product pil -#------------------------------------------------------------------------ -# These are lists of strings. These are available through the call to -# actions menu. - - def ctaPilADR(self): - return [ - ] - - def ctaPilAFD(self): - return [ - ] - - def ctaPilAFM(self): - return [ - ] - - def ctaPilAVA(self): - return [ - ] - - def ctaPilAVW(self): - return [ - ] - - def ctaPilCAE(self): - return [ - ] - - def ctaPilCCF(self): - return [ - ] - - def ctaPilCDW(self): - return [ - ] - - def ctaPilCEM(self): - return [ - ] - - def ctaPilCFW(self): - return [("***RIP CURRENTS CTA", """Rip currents are powerful channels of water flowing quickly away from shore, which occur most often at low spots or breaks in the sandbar and in the vicinity of structures such as groins, jetties and piers. Heed the advice of lifeguards, beach patrol flags and signs. - -If you become caught in a rip current, yell for help. Remain calm, do not exhaust yourself and stay afloat while waiting for help. If you have to swim out of a rip current, SWIM PARALLEL TO SHORE and back toward the beach when possible. Do not attempt to swim directly against a rip current as you will tire quickly."""), - ("***LONGSHORE CURRENTS CTA", """Longshore currents commonly occur when waves approach the shoreline at an angle. They can be particularly dangerous near a jetty or pier."""), - ("***SNEAKER WAVES CTA", """Add CTA here."""), - ("***RED TIDE CTA", """Add CTA here"""), - ("***SEA NETTLES CTA", """Add CTA here"""), - ("***TSUNAMI DEBRIS CTA", """Add CTA here"""), - ("***OTHER BEACH HAZARDS CTA", """Add CTA here"""), - ] - - def ctaPilCWF(self): - return [ - ] - - def ctaPilEQR(self): - return [ - ] - - def ctaPilESF(self): - return [ - ] - - def ctaPilEQW(self): - return [ - ] - - def ctaPilEVI(self): - return [ - ] - - def ctaPilFFA(self): - return [ - ] - - def ctaPilFRW(self): - return [ - ] - - def ctaPilFWF(self): - return [ - ] - - def ctaPilFWM(self): - return [ - ] - - def ctaPilFWS(self): - return [ - ] - - def ctaPilGLF(self): - return [ - ] - - def ctaPilHLS(self): - return [("***MINOR FLOODING", """Residents can expect minor flooding of roads, especially those with poor drainage. Known intersections with very poor drainage may have water levels up to 3 feet. Other poor drainage areas will have water rises of 1 foot."""), - ("***WIDESPREAD FLOODING", """Residents can expect widespread flooding. In poor drainage areas, minor to moderate property damage is expected, and several main thoroughfares may be closed. Known intersections with very poor drainage may have water levels up to 5 feet. Other poor drainage areas will have water rises up to 3 feet. Levels will rise 1 foot elsewhere."""), - """Small streams will surpass bank full, but only for one hour or less.""", - ("***WIDESPREAD STREAM FLOODING", """Most small streams and creeks will surpass bank full, for up to 3 hours. Larger rivers will rise, and those which respond quickly to very heavy rain may briefly exceed flood stage."""), - ("***PRIOR NOTICE OF EXTENSIVE AREAL FLOODING", """Extensive flooding is expected |**today or tonight or next day**|. \n\n Persons living near or in poor drainage locations should prepare for possible evacuation later |**today or tonight or next day**|. In these areas, significant property damage will occur, and some power outages are likely. Minor property damage is possible elsewhere. \n\nWater levels in very poor drainage areas will approach 7 feet. Other poor drainage locations will have rises between 3 and 5 feet. Elsewhere, expect water rises to near 2 feet. Numerous main roads will be closed. Driving is highly discouraged except for emergencies."""), - ("***DANGEROUS FLOODING", """This is a dangerous flood situation! \n\nPersons living in or near poor drainage areas should evacuate immediately. Significant property damage will occur in these locations. Minor property damage is possible in other areas. Some power outages are expected. \n\n Water levels in very poor drainage areas will approach 7 feet. Other poor drainage locations will have rises between 3 and 5 feet. Elsewhere, expect water rises to near 2 feet. Numerous main roads will be closed. Driving is highly discouraged until well after flood waters recede. \n\n Move to safety immediately."""), - ("***PRIOR NOTICE OF EXTENSIVE RIVER FLOODING", """Extensive flooding is expected |**today or tonight or next day**|. \n\n By |**time**|, all small streams and creeks will have surpassed bank full. These conditions will last between 3 and 6 hours. Some streams will exceed their banks by several feet and may flood nearby homes. Evacuations are possible.\n\n Rivers in affected areas will rise, with some reaching or exceeding flood stage. Normally quick-rising rivers will exceed flood stage by several feet, flooding homes along the riverside. Pastures will also flood, but livestock losses should be minimal. Several secondary roads and bridges will be washed out. Driving is highly discouraged."""), - ("***DANGEROUS RIVER FLOODING", """This is a DANGEROUS SITUATION! \n\nAll streams, creeks, and some rivers will surpass bankfull, for between 3 and 6 hours. Some streams will exceed their banks by several feet, flooding nearby homes. Evacuations are possible. \n\n Rivers in affected areas will rise, with some reaching or exceeding flood stage. Normally quick rising rivers will exceed flood stage by several feet, flooding homes along the riverside. Pastures will also flood, but livestock losses should be minimal."""), - ("***CATASTROPHIC FLOODING EXPECTED", """Catastrophic flooding is expected later |**edit day or night periods**|. \n\n A state of emergency has been issued |**by agency**| for |**edit area here**|. \n\n Residents in flood prone areas should rush to completion preparations to protect their property, then move to a place of safety, this |**edit time period**|. Mandatory evacuations are underway. \n\n |** opening paragraph describing antecedent rainfall and expected heavier rainfall **| \n\n life threatening flooding is likely! In urban areas, extensive property damage will occur in all poor drainage areas, with moderate to major property damage elsewhere. Widespread power outages are likely. \n\n In rural locations, all streams, creeks, and arroyos will surpass bank full for more than 6 hours. Each will exceed their banks by several feet, flooding homes, even those up to one half mile away from the banks. \n\n In all areas, hundreds of roads will flood. Dozens of secondary roads may become washed out in rural areas. Numerous low water bridges will likely wash out as well. \n\n Water levels will exceed 5 feet in all poor drainage urban areas, and average at least 2 feet elsewhere. All rivers in affected areas will rise, and most will exceed flood stage. Quick rising rivers will exceed flood stage, and reach near record crests, causing inundation of nearby homes. In rural locations, extensive pastureland flooding will occur as water levels rise to 2 feet or more. Widespread livestock losses are likely."""), - ("***CATASTROPHIC FLOODING OCCURRING", """Catastrophic flooding is occurring in |**edit area**|. \n\n States of emergency remain in effect for the following locations: \n\n |**edit counties and cities here**| \n\n residents remain prohibited from venturing out. Law enforcement and |**military support group edit here**| evacuations are now underway. \n\n This remains a life threatening situation! Extensive property damage is occurring in all poor drainage areas. Elsewhere, moderate to major property damage is occurring. Hundreds of roads are closed, and some are likely damaged. Several area bridges are washed out. Streams, creeks, and arroyos are several feet above bank full, and will remain so for hours. Many rivers are nearing flood stage, and some have already surpassed it. Homes near these rivers are likely flooded. Flood waters will continue for several more hours. \n\n Water levels are in excess of 5 feet in all poor drainage areas. Elsewhere, average water levels are at least 2 feet. Power outages are widespread. \n\n Stay tuned to NOAA Weather Radio for further information on this dangerous flood. Heed all evacuation orders from law enforcement or military personnel."""), - ("***GENERATOR PRECAUTIONS", """If you plan on using a portable generator, be sure to observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fire. Be sure to operate your generator in a dry outdoor area away from windows, doors and vents. Carbon monoxide poisoning deaths can occur due to improperly located portable generators!"""), - ("***FLAMMABLES PRECAUTION", """Flammable liquids such as gasoline or kerosene should only be stored outside of the living areas in properly labeled, non glass safety containers. Do not store in an attached garage as gas fumes can travel into the home and potentially ignite, especially if the home has natural or propane gas lines that could become damaged during the hurricane."""), - ("***HURRICANE WARNING DEFINITION", """A Hurricane Warning means sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are expected within 36 hours. A Hurricane Warning can remain in effect when dangerously high water or a combination of dangerously high water and exceptionally high waves continue, even though winds may be less than hurricane force."""), - ("***HURRICANE WATCH DEFINITION", """A Hurricane Watch is issued when sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are possible within 48 hours."""), - ("***HURRICANE WIND WARNING DEFINITION", """A Hurricane Wind Warning is issued when a landfalling hurricane is expected to spread hurricane force winds well inland. Serious property damage, power outages, blowing debris, and fallen trees can be expected as winds reach or exceed 74 mph."""), - ("***HURRICANE WIND WATCH DEFINITION", """A Hurricane Wind Watch is issued when a landfalling hurricane is expected to spread hurricane force winds well inland within the next 48 hours. Prepare for winds in excess of 74 mph."""), - ("***TROPICAL STORM WARNING DEFINITION", """A Tropical Storm Warning means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are expected due to a tropical cyclone within 36 hours."""), - ("***TROPICAL STORM WIND WARNING DEFINITION", """A Tropical Storm Wind Warning means winds of 39 to 73 mph are expected due to a landfalling hurricane or tropical storm. Winds of this magnitude are likely to cause sporadic power outages, fallen trees, minor property damage, and dangerous driving conditions for high profile vehicles."""), - ("***TROPICAL STORM WATCH DEFINITION", """A Tropical Storm Watch means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are possible due to a tropical cyclone within 48 hours."""), - ("***TROPICAL STORM WIND WATCH DEFINITION", """A Tropical Storm Wind Watch means winds of 39 to 73 mph are expected due to a landfalling hurricane or tropical storm within 48 hours."""), - ] - - def ctaPilHMW(self): - return [ - ] - - def ctaPilHWO(self): - return [ - ] - - def ctaPilLAE(self): - return [ - ] - - def ctaPilLEW(self): - return [ - ] - - def ctaPilMWS(self): - return [ - ] - - def ctaPilMWW(self): - return [ - """Mariners should pay close attention to the marine forecast, and consider wind and sea conditions in planning.""", - ] - - def ctaPilMVF(self): - return [ - ] - - def ctaPilNOW(self): - return [ - ] - - def ctaPilNPW(self): - return [ - ] - - def ctaPilNSH(self): - return [ - ] - - def ctaPilNUW(self): - return [ - ] - - def ctaPilOFF(self): - return [ - ] - - def ctaPilPFM(self): - return [ - ] - - def ctaPilPNS(self): - return [ - ] - - def ctaPilRFD(self): - return [ - ] - - def ctaPilRFW(self): - return [ - ] - - def ctaPilRHW(self): - return [ - ] - - def ctaPilSAF(self): - return [ - ] - - def ctaPilSRF(self): - return [ - ] - - def ctaPilSFT(self): - return [ - ] - - def ctaPilSPS(self): - return [ - ] - - def ctaPilSPW(self): - return [ - ] - - def ctaPilTOE(self): - return [ - ] - - def ctaPilVOW(self): - return [ - ] - - def ctaPilWCN(self): - return [ - ] - - def ctaPilWSW(self): - return [ - ] - - def ctaPilZFP(self): - return [ - ] +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# DefaultCallToActions.py +# +# This module contains all of the call to action statements based on the +# VTEC phen/sig code. Sites override CallToActions.py to make modifications +# to the list of call to actions. +# +# Author: Matt Davis (ARX), Mathewson +# Updated 9/24 Shannon for WW simplification +# Updated 11/25 Shannon for AS.O +# Updated 3/27/09 Shannon for tropical and AF.W +# Updated 3/29/10 Shannon for tropical +# Updated 1/12/11 Shannon to remove HI/TI hazards and fix typos +# Updated 7/28/15 yteng to change Red Flag Warning CTA language for DR 17777 +# Updated 2/24/16 randerso for mixed case guidelines +# Updated 6/17/16 dgilling to fix a spelling error. +# Updated 7/15/16 randerso replace ellipses with commas +# Updated 6/23/17 dgilling to remove obsolete winter weather phensigs. +# Updated 7/13/17 dgilling renamed to allow easier overrides. +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + + + +class CallToActions(object): + + def pydevDebug(self): + import sys + PYDEVD_PATH='/home/rtran/awipsdr4/Ade/eclipse/plugins/org.python.pydev.debug_1.5.4.2010011921/pysrc' + if sys.path.count(PYDEVD_PATH) < 1: + sys.path.append(PYDEVD_PATH) + + import pydevd + pydevd.settrace() + + # returns the default Call To Action + def defaultCTA(self, phensig): + if phensig in self.ctaDict(): + func = self.ctaDict()[phensig] + items = func() + if len(items) > 0: + return items[0] + return "" #No call to action + + def allCTAs(self, phensig): + if phensig in self.ctaDict(): + func = self.ctaDict()[phensig] + return func() + return [] #no Call to actions + + + def pilCTAs(self, pil): + if pil in self.ctaPilDict(): + func = self.ctaPilDict()[pil] + return func() + return [] #no Call to actions + + + # returns list of generic call to action statements + def genericCTAs(self): + return [ + """Monitor NOAA Weather Radio for the latest information, forecasts, and warnings.""", + """Listen to NOAA Weather Radio or your local media for the latest updates on this situation.""", + ] + + + +##### PLEASE KEEP PHENSIG IN ALPHABETICAL ORDER ####### + +# CallToAction dictionary. The key is the phen/sig, such as "BZ.W". The +# value is a LIST of call to action statements. The default formatter +# uses the first one in the list. Users can add additional entries which +# are accessible in the product editor. The lists are actually function +# calls that the user can override if necessary. +# Updated in 9.3 to sync with VTECTable entries + def ctaDict(self): + return { + "AF.W": self.ctaAFW, + "AF.Y": self.ctaAFY, + "AS.O": self.ctaASO, + "AS.Y": self.ctaASY, + "BH.S": self.ctaBHS, + "BW.Y": self.ctaBWY, + "BZ.W": self.ctaBZW, + "CF.A": self.ctaCFA, + "CF.W": self.ctaCFW, + "CF.Y": self.ctaCFY, + "DU.W": self.ctaDUW, + "DU.Y": self.ctaDUY, + "EC.A": self.ctaECA, + "EC.W": self.ctaECW, + "EH.A": self.ctaEHA, + "EH.W": self.ctaEHW, + "FA.A": self.ctaFAA, + "FF.A": self.ctaFFA, + "FG.Y": self.ctaFGY, + "FR.Y": self.ctaFRY, + "FW.A": self.ctaFWA, + "FW.W": self.ctaFWW, + "FZ.A": self.ctaFZA, + "FZ.W": self.ctaFZW, + "GL.A": self.ctaGLA, + "GL.W": self.ctaGLW, + "HF.A": self.ctaHFA, + "HF.W": self.ctaHFW, + "HT.Y": self.ctaHTY, + "HU.A": self.ctaHUA, + "HU.W": self.ctaHUW, + "HW.A": self.ctaHWA, + "HW.W": self.ctaHWW, + "HZ.A": self.ctaHZA, + "HZ.W": self.ctaHZW, + "IS.W": self.ctaISW, + "LE.W": self.ctaLEW, + "LO.Y": self.ctaLOY, + "LS.A": self.ctaLSA, + "LS.W": self.ctaLSW, + "LS.Y": self.ctaLSY, + "LW.Y": self.ctaLWY, + "MF.Y": self.ctaMFY, + "MH.W": self.ctaMHW, + "MH.Y": self.ctaMHY, + "MS.Y": self.ctaMSY, + "RB.Y": self.ctaRBY, + "RP.S": self.ctaRPS, + "SC.Y": self.ctaSCY, + "SE.A": self.ctaSEA, + "SE.W": self.ctaSEW, + "SI.Y": self.ctaSIY, + "SM.Y": self.ctaSMY, + "SR.A": self.ctaSRA, + "SR.W": self.ctaSRW, + "SU.W": self.ctaSUW, + "SU.Y": self.ctaSUY, + "SW.Y": self.ctaSWY, + "TR.A": self.ctaTRA, + "TR.W": self.ctaTRW, + "UP.A": self.ctaUPA, + "UP.W": self.ctaUPW, + "UP.Y": self.ctaUPY, + "WC.A": self.ctaWCA, + "WC.W": self.ctaWCW, + "WC.Y": self.ctaWCY, + "WI.Y": self.ctaWIY, + "WS.A": self.ctaWSA, + "WS.W": self.ctaWSW, + "WW.Y": self.ctaWWY, + "ZF.Y": self.ctaZFY, + } + + +##### PLEASE KEEP PILS IN ALPHABETICAL ORDER ####### + +# CallToAction PIL dictionary. The key is the product pil, such as "HLS". +# The entries are available for a particular product. None of these +# are entered automatically by the formatter, but are available through +# the product editor. +# Users can add additional entries which are accessible in the product +# editor. The lists are actually function calls that the user can +# override if necessary. + def ctaPilDict(self): + return { + 'ADR': self.ctaPilADR, + 'AFD': self.ctaPilAFD, + 'AFM': self.ctaPilAFM, + 'AVA': self.ctaPilAVA, + 'AVW': self.ctaPilAVW, + 'CAE': self.ctaPilCAE, + 'CCF': self.ctaPilCCF, + 'CDW': self.ctaPilCDW, + 'CEM': self.ctaPilCEM, + 'CFW': self.ctaPilCFW, + 'CWF': self.ctaPilCWF, + 'EQR': self.ctaPilEQR, + 'EQW': self.ctaPilEQW, + 'ESF': self.ctaPilESF, + 'EVI': self.ctaPilEVI, + 'FFA': self.ctaPilFFA, + 'FRW': self.ctaPilFRW, + 'FWF': self.ctaPilFWF, + 'FWM': self.ctaPilFWM, + 'FWS': self.ctaPilFWS, + 'GLF': self.ctaPilGLF, + 'HLS': self.ctaPilHLS, + 'HMW': self.ctaPilHMW, + 'HWO': self.ctaPilHWO, + 'LAE': self.ctaPilLAE, + 'LEW': self.ctaPilLEW, + 'MWS': self.ctaPilMWS, + 'MVF': self.ctaPilMVF, + 'MWW': self.ctaPilMWW, + 'NOW': self.ctaPilNOW, + 'NPW': self.ctaPilNPW, + 'NSH': self.ctaPilNSH, + 'NUW': self.ctaPilNUW, + 'OFF': self.ctaPilOFF, + 'PFM': self.ctaPilPFM, + 'PNS': self.ctaPilPNS, + 'RFD': self.ctaPilRFD, + 'RFW': self.ctaPilRFW, + 'RHW': self.ctaPilRHW, + 'SAF': self.ctaPilSAF, + 'SRF': self.ctaPilSRF, + 'SFT': self.ctaPilSFT, + 'SPS': self.ctaPilSPS, + 'SPW': self.ctaPilSPW, + 'TOE': self.ctaPilTOE, + 'VOW': self.ctaPilVOW, + 'WCN': self.ctaPilWCN, + 'WSW': self.ctaPilWSW, + 'ZFP': self.ctaPilZFP, + } + + +#------------------------------------------------------------------------ +# CALL TO ACTIONS - winter events +# With the winter weather simplification, specfic winter hazard defs are not +# readily available. Forecaster can choose from the specific defs via the defs below. +# Since these statements are so long, we use the descriptive word format. +#------------------------------------------------------------------------ + def winterWScta(self): + return [ + ("***HEAVY SNOW", """A Winter Storm Warning for heavy snow means severe winter weather conditions are expected or occurring. Significant amounts of snow are forecast that will make travel dangerous. Only travel in an emergency. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency."""), + ("***SLEET", """A Winter Storm Warning for sleet means that a winter storm system is impacting the area with significant amounts of sleet. Travel is likely to be severely impacted."""), + ("***MIXED PRECIP", """A Winter Storm Warning means significant amounts of snow, sleet, and ice are expected or occurring. Strong winds are also possible. This will make travel very hazardous or impossible."""), + ] + + def winterWWcta(self): + return [ + ("***BLOWING SNOW", """A Winter Weather Advisory for blowing snow means that visibilities will be limited due to strong winds blowing snow around. Use caution when traveling, especially in open areas."""), + ("***SLEET", """A Winter Weather Advisory for sleet means periods of sleet are imminent or occurring. Sleet may cause driving to become extremely dangerous, so be prepared to use caution when traveling."""), + ("***SNOW AND BLOWING SNOW", """A Winter Weather Advisory for |*lake effect*| snow and blowing snow means that visibilities will be limited due to a combination of falling and blowing snow. Use caution when traveling, especially in open areas."""), + ("***SNOW", """A Winter Weather Advisory for snow means that periods of snow will cause primarily travel difficulties. Be prepared for snow covered roads and limited visibilities, and use caution while driving."""), + ("***MIXED PRECIP", """A Winter Weather Advisory means that periods of snow, sleet, or freezing rain will cause travel difficulties. Be prepared for slippery roads and limited visibilities, and use caution while driving."""), + ] +#------------------------------------------------------------------------ +# CALL TO ACTIONS - individual functions for each phen/sig +#------------------------------------------------------------------------ +# These are lists of strings. The first one is used in the formatters, +# the others are available through the call to actions menu. + + def ctaAFW(self): + return [ +"""An Ashfall Warning means that significant accumulation of volcanic ash is expected or occurring due to a volcanic eruption or resuspension of previously deposited ash. + +Seal windows and doors. Protect electronics and cover air intakes and open water sources. Avoid driving. Remain indoors unless absolutely necessary. Use extreme caution clearing rooftops of ash. + +Listen to NOAA Weather Radio or local media for further information.""", + ] + + def ctaAFY(self): + return [ + """An Ashfall Advisory means that large amounts of ash will be deposited in the advisory area. Persons with respiratory illnesses should remain indoors to avoid inhaling the ash particles, and all persons outside should cover their mouth and nose with a mask or cloth.""", + ] + + def ctaASO(self): + return [ + """An Air Stagnation Outlook is issued when an extended period of weather conditions are anticipated that could contribute to poor ventilation, and thus potentially poor air quality. Be prepared for these conditions to develop in the next 2 to 3 days, and for the issuance of air stagnation advisories as the situation becomes imminent.""", + ] + + def ctaASY(self): + return [ + """An Air Stagnation Advisory indicates that due to limited movement of an air mass across the advisory area, pollution will increase to dangerous levels. Persons with respiratory illness should follow their physicians advice for dealing with high levels of air pollution.""", + ] + + def ctaBHS(self): + return [ +"""A Beach Hazards Statement is issued when threats such as rip currents, longshore currents, sneaker waves and other hazards create life-threatening conditions in the surf zone. Caution should be used when in or near the water.""", + ] + + def ctaBWY(self): + return [ + """A Brisk Wind Advisory means that winds will reach Small Craft Advisory criteria in areas that are primarily ice covered. Moving ice floes could damage small craft.""", + ] + + def ctaBZW(self): + return [ + """A Blizzard Warning means severe winter weather conditions are expected or occurring. Falling and blowing snow with strong winds and poor visibilities are likely. This will lead to whiteout conditions, making travel extremely dangerous. Do not travel. If you must travel, have a winter survival kit with you. If you get stranded, stay with your vehicle.""", + ] + + def ctaCFA(self): + return [ + """A Coastal Flood Watch means that conditions favorable for flooding are expected to develop. Coastal residents should be alert for later statements or warnings, and take action to protect property.""", + ] + + def ctaCFW(self): + return [ + """A Coastal Flood Warning means that flooding is occurring or imminent. Coastal residents in the warned area should be alert for rising water, and take appropriate action to protect life and property.""", + ] + + def ctaCFY(self): + return [ + """A Coastal Flood Advisory indicates that onshore winds and tides will combine to generate flooding of low areas along the shore.""", + ] + + def ctaDUW(self): + return [ + """A Blowing Dust Warning means severely limited visibilities are expected with blowing dust. Travel could become extremely dangerous. Persons with respiratory problems should make preparations to stay indoors until the storm passes.""", + ] + + def ctaDUY(self): + return [ + """A Blowing Dust Advisory means that blowing dust will restrict visibilities. Travelers are urged to use caution.""", + ] + + def ctaECA(self): + return [ + """An Extreme Cold Watch means that prolonged periods of very cold temperatures are expected. Ensure that outdoor animals have warm shelter, and that children wear a hat and gloves.""", + ] + + def ctaECW(self): + return [ + """An Extreme Cold Warning means that dangerously low temperatures are expected for a prolonged period of time. Frostbite and hypothermia are likely if exposed to these temperatures, so make sure a hat, facemask, and heavy gloves or mittens are available.""", + ] + + def ctaEHA(self): + return [ + """An Excessive Heat Watch means that a prolonged period of hot temperatures is expected. The combination of hot temperatures and high humidity will combine to create a DANGEROUS SITUATION in which heat illnesses are possible. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.""", + """Young children and pets should never be left unattended in vehicles under any circumstances. This is especially true during warm or hot weather when car interiors can reach lethal temperatures in a matter of minutes.""", + ] + + def ctaEHW(self): + return [ + """Take extra precautions, if you work or spend time outside. When possible, reschedule strenuous activities to early morning or evening. Know the signs and symptoms of heat exhaustion and heat stroke. Wear light weight and loose fitting clothing when possible and drink plenty of water.\n\n + To reduce risk during outdoor work, the occupational safety and health administration recommends scheduling frequent rest breaks in shaded or air conditioned environments. Anyone overcome by heat should be moved to a cool and shaded location. Heat stroke is an emergency, call 9 1 1.\n\n + An Excessive Heat Warning means that a prolonged period of dangerously hot temperatures will occur. The combination of hot temperatures and high humidity will combine to create a DANGEROUS SITUATION in which heat illnesses are likely. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.\n\n + Young children and pets should never be left unattended in vehicles under any circumstances. This is especially true during warm or hot weather when car interiors can reach lethal temperatures in a matter of minutes.""", + ] + + def ctaFAA(self): + return [ + """A Flood Watch means there is a potential for flooding based on current forecasts.\n\nYou should monitor later forecasts and be alert for possible Flood Warnings. Those living in areas prone to flooding should be prepared to take action should flooding develop.""", + ] + + def ctaFFA(self): + return [ + """A Flash Flood Watch means that conditions may develop that lead to flash flooding. Flash flooding is a VERY DANGEROUS SITUATION.\n\nYou should monitor later forecasts and be prepared to take action should Flash Flood Warnings be issued.""", + ] + + def ctaFGY(self): + return [ +"""A Dense Fog Advisory means visibilities will frequently be reduced to less than one quarter mile. If driving, slow down, use your headlights, and leave plenty of distance ahead of you.""", + ] + + def ctaFRY(self): + return [ + """A Frost Advisory means that widespread frost is expected. Sensitive outdoor plants may be killed if left uncovered.""", + ] + + def ctaFWA(self): + return [ + """A Fire Weather Watch means that critical fire weather conditions are forecast to occur. Listen for later forecasts and possible Red Flag Warnings.""", + ] + + def ctaFWW(self): + return [ + """A Red Flag Warning means that critical fire weather conditions are either occurring now, or will shortly. A combination of strong winds, low relative humidity, and warm temperatures can contribute to extreme fire behavior.""", + ] + + def ctaFZA(self): + return [ + """A Freeze Watch means sub-freezing temperatures are possible. These conditions could kill crops and other sensitive vegetation.""", + ] + + def ctaFZW(self): + return [ + """A Freeze Warning means sub-freezing temperatures are imminent or highly likely. These conditions will kill crops and other sensitive vegetation.""", + ] + + def ctaGLA(self): + return [ + """A Gale Watch is issued when the risk of gale force winds of 34 to 47 knots has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", + ] + + def ctaGLW(self): + return [ + """A Gale Warning means winds of 34 to 47 knots are imminent or occurring. Operating a vessel in gale conditions requires experience and properly equipped vessels. It is highly recommended that mariners without the proper experience seek safe harbor prior to the onset of gale conditions.""", + ] + + def ctaHFA(self): + return [ + """A Hurricane Force Wind Watch is issued when the risk of hurricane force winds of 64 knots or greater has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", + ] + + def ctaHFW(self): + return [ + """A Hurricane Force Wind Warning means winds of 64 knots or greater are imminent or occurring. All vessels should remain in port, or take shelter as soon as possible, until winds and waves subside.""", + ] + + def ctaHTY(self): + return [ + """A Heat Advisory means that a period of hot temperatures is expected. The combination of hot temperatures and high humidity will combine to create a situation in which heat illnesses are possible. Drink plenty of fluids, stay in an air-conditioned room, stay out of the sun, and check up on relatives and neighbors.\n\n + Take extra precautions, if you work or spend time outside. When possible, reschedule strenuous activities to early morning or evening. Know the signs and symptoms of heat exhaustion and heat stroke. Wear light weight and loose fitting clothing when possible and drink plenty of water.\n\n + To reduce risk during outdoor work, the occupational safety and health administration recommends scheduling frequent rest breaks in shaded or air conditioned environments. Anyone overcome by heat should be moved to a cool and shaded location. Heat stroke is an emergency, call 9 1 1.""", + ] + + def ctaHUA(self): + return [ + """A Hurricane Watch is issued when sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are possible within 48 hours.""", + ] + + def ctaHUW(self): + return [ + """A Hurricane Warning means sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are expected within 36 hours. A Hurricane Warning can remain in effect when dangerously high water or a combination of dangerously high water and exceptionally high waves continue, even though winds may be less than hurricane force.""", + ] + + def ctaHWA(self): + return [ + """A High Wind Watch means there is the potential for a hazardous high wind event. Sustained winds of at least 40 mph, or gusts of 58 mph or stronger may occur. Continue to monitor the latest forecasts.""", + ] + + def ctaHWW(self): + return [ + """A High Wind Warning means a hazardous high wind event is expected or occurring. Sustained wind speeds of at least 40 mph or gusts of 58 mph or more can lead to property damage.""", + ] + + def ctaHZA(self): + return [ + """A Hard Freeze Watch means sub-freezing temperatures are possible. These conditions could kill crops and other sensitive vegetation.""", + ] + + def ctaHZW(self): + return [ + """A Hard Freeze Warning means sub-freezing temperatures are imminent or highly likely. These conditions will kill crops and other sensitive vegetation.""", + ] + + def ctaISW(self): + return [ + """An Ice Storm Warning means severe winter weather conditions are expected or occurring. Significant amounts of ice accumulations will make travel dangerous or impossible. Travel is strongly discouraged. Commerce will likely be severely impacted. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency. Ice accumulations and winds will likely lead to snapped power lines and falling tree branches that add to the danger.""", + ] + + def ctaLEW(self): + return [ + """A Lake Effect Snow Warning means significant amounts of lake-effect snow are forecast that will make travel very hazardous or impossible. Lake-effect snow showers typically align themselves in bands and will likely be intense enough to drop 1 to several inches of snow per hour for several hours. Visibilities vary greatly and can drop to zero within minutes. Travel is strongly discouraged. Commerce could be severely impacted. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency.""", + ] + + def ctaLOY(self): + return [ + """A Low Water Advisory means water levels are expected to be significantly below average. Mariners should use extreme caution and transit at the slowest safe navigable speed to minimize impact.""", + ] + + def ctaLSA(self): + return [ + """A Lakeshore Flood Watch means that conditions favorable for lakeshore flooding are expected to develop. Residents on or near the shore should take action to protect property, and listen for later statements or warnings.""", + ] + + def ctaLSW(self): + return [ + """A Lakeshore Flood Warning means that flooding is occurring or imminent along the lake. Residents on or near the shore in the warned area should be alert for rising water, and take appropriate action to protect life and property.""", + ] + + def ctaLSY(self): + return [ + """A Lakeshore Flood Advisory indicates that onshore winds will generate flooding of low areas along the lakeshore.""", + ] + + def ctaLWY(self): + return [ + """A Lake Wind Advisory indicates that winds will cause rough chop on area lakes. Small boats will be especially prone to capsizing.""", + ] + + def ctaMHW(self): + return [ + """An Ashfall Warning means that significant accumulation of ashfall is expected on vessels. It is recommended that vessels be prepared to take the necessary counter measures before putting to sea or entering the warning area.""", + ] + + def ctaMFY(self): + return [ +"""A Dense Fog Advisory means visibilities will frequently be reduced to less than one mile. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions. """, + ] + + def ctaMHY(self): + return [ +"""An Ashfall Advisory means that a light accumulation of ashfall is expected on vessels. It is recommended that vessels be prepared to take appropriate counter measures before putting to sea or entering the advisory area.""", + ] + + def ctaMSY(self): + return [ + """A Dense Smoke Advisory means widespread fires will create smoke, limiting visibilities. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", + ] + + def ctaRBY(self): + return [ + """A Small Craft Advisory for rough bar means that wave conditions are expected to be hazardous to small craft in or near harbor entrances.""", + ] + + def ctaRPS(self): + return [ + """There is a high risk of rip currents. + +Rip currents are powerful channels of water flowing quickly away from shore, which occur most often at low spots or breaks in the sandbar and in the vicinity of structures such as groins, jetties and piers. Heed the advice of lifeguards, beach patrol flags and signs. + +If you become caught in a rip current, yell for help. Remain calm, do not exhaust yourself and stay afloat while waiting for help. If you have to swim out of a rip current, swim parallel to shore and back toward the beach when possible. Do not attempt to swim directly against a rip current as you will tire quickly. """, + ] + + def ctaSCY(self): + return [ + """A Small Craft Advisory means that wind speeds of 21 to 33 knots are expected to produce hazardous wave conditions to small craft. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", + ] + + def ctaSEA(self): + return [ + """A Hazardous Seas Watch is issued when the risk of hazardous seas has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", + ] + + def ctaSEW(self): + return [ + """A Hazardous Seas Warning means hazardous sea conditions are imminent or occurring. Recreational boaters should remain in port, or take shelter until waves subside. Commercial vessels should prepare for rough seas and consider remaining in port or taking shelter in port until hazardous seas subside.""", + ] + + def ctaSIY(self): + return [ + """A Small Craft Advisory for wind means that wind speeds of 21 to 33 knots are expected. Inexperienced mariners, especially those operating smaller vessels should avoid navigating in these conditions.""", + ] + + def ctaSMY(self): + return [ + """A Dense Smoke Advisory means widespread fires will create smoke, limiting visibilities. If driving, slow down, use your headlights, and leave plenty of distance ahead of you in case a sudden stop is needed.""", + ] + + def ctaSRA(self): + return [ + """A Storm Watch is issued when the risk of storm force winds of 48 to 63 knots has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", + ] + + def ctaSRW(self): + return [ + """A Storm Warning means winds of 48 to 63 knots are imminent or occurring. Recreational boaters should remain in port, or take shelter until winds and waves subside. Commercial vessels should prepare for very strong winds and dangerous sea conditions, and consider remaining in port or taking shelter in port until winds and waves subside.""", + ] + + def ctaSUW(self): + return [ + """A High Surf Warning indicates that dangerous, battering waves will pound the shoreline. This will result in life-threatening conditions.""", + ] + + def ctaSUY(self): + return [ + """A High Surf Advisory means that high surf will affect beaches in the advisory area, producing localized beach erosion and dangerous swimming conditions.""", + ] + + def ctaSWY(self): + return [ + """A Small Craft Advisory for hazardous seas means that waves are expected to be hazardous to small craft. Mariners should avoid shoaling areas. Long period swell can sharpen into large breaking waves in shoaling areas. It is not unusual for waves to break much farther from shoaling areas than is normally experienced. Remember, breaking waves can easily capsize even larger vessels.""", + ] + + def ctaTRA(self): + return [ + """A Tropical Storm Watch means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are possible due to a tropical storm within 48 hours.""", + ] + + def ctaTRW(self): + return [ + """A Tropical Storm Warning means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are expected due to a tropical storm within 36 hours.""", + ] + + def ctaUPA(self): + return [ + """A Heavy Freezing Spray Watch is issued when the risk of heavy freezing spray has significantly increased, but the specific timing and/or location is still uncertain. It is intended to provide additional lead time for mariners who may wish to consider altering their plans.""", + ] + + def ctaUPW(self): + return [ + """A Heavy Freezing Spray Warning means heavy freezing spray is expected to rapidly accumulate on vessels. These conditions can be extremely hazardous to navigation. It is recommended that mariners not trained to operate in these conditions or vessels not properly equiped to do so, remain in port or avoid the waring area.""", + ] + + def ctaUPY(self): + return [ + """A Freezing Spray Advisory means that light to moderate accumulation of ice is expected on vessels. Operating a vessel in freezing spray can be hazardous. It is recommended that vessels be prepared to take appropriate counter measures before putting to sea or enter the advisory area.""", + ] + + def ctaWCA(self): + return [ + """A Wind Chill Watch means the there is the potential for a combination of very cold air and strong winds to create dangerously low wind chill values. Monitor the latest forecasts and warnings for updates on this situation.""", + ] + + def ctaWCW(self): + return [ + """A Wind Chill Warning means the combination of very cold air and strong winds will create dangerously low wind chill values. This will result in frost bite and lead to hypothermia or death if precautions are not taken.""", + ] + + def ctaWCY(self): + return [ + """A Wind Chill Advisory means that very cold air and strong winds will combine to generate low wind chills. This will result in frost bite and lead to hypothermia if precautions are not taken. If you must venture outdoors, make sure you wear a hat and gloves.""", + ] + + def ctaWIY(self): + return [ + """A Wind Advisory means that winds of 35 mph are expected. Winds this strong can make driving difficult, especially for high profile vehicles. Use extra caution.""", + ] + + def ctaWSA(self): + return [ + """A Winter Storm Watch means there is a potential for significant snow, sleet, or ice accumulations that may impact travel. Continue to monitor the latest forecasts.""", + ] + + def ctaWSW(self): + return [ + """|*Choose the appropriate CTA below and delete the rest*| + +A Winter Storm Warning for heavy snow means severe winter weather conditions are expected or occurring. Significant amounts of snow are forecast that will make travel dangerous. Only travel in an emergency. If you must travel, keep an extra flashlight, food, and water in your vehicle in case of an emergency. + +A Winter Storm Warning means significant amounts of snow, sleet, and ice are expected or occurring. Strong winds are also possible. This will make travel very hazardous or impossible. + +A Winter Storm Warning for sleet means that a winter storm system is impacting the area with significant amounts of sleet. Travel is likely to be severely impacted.""", + ] + + def ctaWWY(self): + return [ + """|*Choose the appropriate CTA below and delete the rest*| + +A Winter Weather Advisory means that periods of snow, sleet, or freezing rain will cause travel difficulties. Be prepared for slippery roads and limited visibilities, and use caution while driving. + +A Winter Weather Advisory for blowing snow means that visibilities will be limited due to strong winds blowing snow around. Use caution when traveling, especially in open areas. + +A Winter Weather Advisory for sleet means periods of sleet are imminent or occurring. Sleet may cause driving to become extremely dangerous, so be prepared to use caution when traveling. + +A Winter Weather Advisory for |*lake effect*| snow and blowing snow means that visibilities will be limited due to a combination of falling and blowing snow. Use caution when traveling, especially in open areas. + +A Winter Weather Advisory for snow means that periods of snow will cause primarily travel difficulties. Be prepared for snow covered roads and limited visibilities, and use caution while driving.""", + + ] + + def ctaZFY(self): + return [ + """A Freezing Fog Advisory means visibilities will frequently be reduced to less than one quarter mile. If driving, slow down, use your headlights, and leave plenty of distance ahead of you. Also, be alert for frost on bridge decks causing slippery roads.""", + ] + + +#------------------------------------------------------------------------ +# CALL TO ACTIONS - individual functions for each product pil +#------------------------------------------------------------------------ +# These are lists of strings. These are available through the call to +# actions menu. + + def ctaPilADR(self): + return [ + ] + + def ctaPilAFD(self): + return [ + ] + + def ctaPilAFM(self): + return [ + ] + + def ctaPilAVA(self): + return [ + ] + + def ctaPilAVW(self): + return [ + ] + + def ctaPilCAE(self): + return [ + ] + + def ctaPilCCF(self): + return [ + ] + + def ctaPilCDW(self): + return [ + ] + + def ctaPilCEM(self): + return [ + ] + + def ctaPilCFW(self): + return [("***RIP CURRENTS CTA", """Rip currents are powerful channels of water flowing quickly away from shore, which occur most often at low spots or breaks in the sandbar and in the vicinity of structures such as groins, jetties and piers. Heed the advice of lifeguards, beach patrol flags and signs. + +If you become caught in a rip current, yell for help. Remain calm, do not exhaust yourself and stay afloat while waiting for help. If you have to swim out of a rip current, SWIM PARALLEL TO SHORE and back toward the beach when possible. Do not attempt to swim directly against a rip current as you will tire quickly."""), + ("***LONGSHORE CURRENTS CTA", """Longshore currents commonly occur when waves approach the shoreline at an angle. They can be particularly dangerous near a jetty or pier."""), + ("***SNEAKER WAVES CTA", """Add CTA here."""), + ("***RED TIDE CTA", """Add CTA here"""), + ("***SEA NETTLES CTA", """Add CTA here"""), + ("***TSUNAMI DEBRIS CTA", """Add CTA here"""), + ("***OTHER BEACH HAZARDS CTA", """Add CTA here"""), + ] + + def ctaPilCWF(self): + return [ + ] + + def ctaPilEQR(self): + return [ + ] + + def ctaPilESF(self): + return [ + ] + + def ctaPilEQW(self): + return [ + ] + + def ctaPilEVI(self): + return [ + ] + + def ctaPilFFA(self): + return [ + ] + + def ctaPilFRW(self): + return [ + ] + + def ctaPilFWF(self): + return [ + ] + + def ctaPilFWM(self): + return [ + ] + + def ctaPilFWS(self): + return [ + ] + + def ctaPilGLF(self): + return [ + ] + + def ctaPilHLS(self): + return [("***MINOR FLOODING", """Residents can expect minor flooding of roads, especially those with poor drainage. Known intersections with very poor drainage may have water levels up to 3 feet. Other poor drainage areas will have water rises of 1 foot."""), + ("***WIDESPREAD FLOODING", """Residents can expect widespread flooding. In poor drainage areas, minor to moderate property damage is expected, and several main thoroughfares may be closed. Known intersections with very poor drainage may have water levels up to 5 feet. Other poor drainage areas will have water rises up to 3 feet. Levels will rise 1 foot elsewhere."""), + """Small streams will surpass bank full, but only for one hour or less.""", + ("***WIDESPREAD STREAM FLOODING", """Most small streams and creeks will surpass bank full, for up to 3 hours. Larger rivers will rise, and those which respond quickly to very heavy rain may briefly exceed flood stage."""), + ("***PRIOR NOTICE OF EXTENSIVE AREAL FLOODING", """Extensive flooding is expected |**today or tonight or next day**|. \n\n Persons living near or in poor drainage locations should prepare for possible evacuation later |**today or tonight or next day**|. In these areas, significant property damage will occur, and some power outages are likely. Minor property damage is possible elsewhere. \n\nWater levels in very poor drainage areas will approach 7 feet. Other poor drainage locations will have rises between 3 and 5 feet. Elsewhere, expect water rises to near 2 feet. Numerous main roads will be closed. Driving is highly discouraged except for emergencies."""), + ("***DANGEROUS FLOODING", """This is a dangerous flood situation! \n\nPersons living in or near poor drainage areas should evacuate immediately. Significant property damage will occur in these locations. Minor property damage is possible in other areas. Some power outages are expected. \n\n Water levels in very poor drainage areas will approach 7 feet. Other poor drainage locations will have rises between 3 and 5 feet. Elsewhere, expect water rises to near 2 feet. Numerous main roads will be closed. Driving is highly discouraged until well after flood waters recede. \n\n Move to safety immediately."""), + ("***PRIOR NOTICE OF EXTENSIVE RIVER FLOODING", """Extensive flooding is expected |**today or tonight or next day**|. \n\n By |**time**|, all small streams and creeks will have surpassed bank full. These conditions will last between 3 and 6 hours. Some streams will exceed their banks by several feet and may flood nearby homes. Evacuations are possible.\n\n Rivers in affected areas will rise, with some reaching or exceeding flood stage. Normally quick-rising rivers will exceed flood stage by several feet, flooding homes along the riverside. Pastures will also flood, but livestock losses should be minimal. Several secondary roads and bridges will be washed out. Driving is highly discouraged."""), + ("***DANGEROUS RIVER FLOODING", """This is a DANGEROUS SITUATION! \n\nAll streams, creeks, and some rivers will surpass bankfull, for between 3 and 6 hours. Some streams will exceed their banks by several feet, flooding nearby homes. Evacuations are possible. \n\n Rivers in affected areas will rise, with some reaching or exceeding flood stage. Normally quick rising rivers will exceed flood stage by several feet, flooding homes along the riverside. Pastures will also flood, but livestock losses should be minimal."""), + ("***CATASTROPHIC FLOODING EXPECTED", """Catastrophic flooding is expected later |**edit day or night periods**|. \n\n A state of emergency has been issued |**by agency**| for |**edit area here**|. \n\n Residents in flood prone areas should rush to completion preparations to protect their property, then move to a place of safety, this |**edit time period**|. Mandatory evacuations are underway. \n\n |** opening paragraph describing antecedent rainfall and expected heavier rainfall **| \n\n life threatening flooding is likely! In urban areas, extensive property damage will occur in all poor drainage areas, with moderate to major property damage elsewhere. Widespread power outages are likely. \n\n In rural locations, all streams, creeks, and arroyos will surpass bank full for more than 6 hours. Each will exceed their banks by several feet, flooding homes, even those up to one half mile away from the banks. \n\n In all areas, hundreds of roads will flood. Dozens of secondary roads may become washed out in rural areas. Numerous low water bridges will likely wash out as well. \n\n Water levels will exceed 5 feet in all poor drainage urban areas, and average at least 2 feet elsewhere. All rivers in affected areas will rise, and most will exceed flood stage. Quick rising rivers will exceed flood stage, and reach near record crests, causing inundation of nearby homes. In rural locations, extensive pastureland flooding will occur as water levels rise to 2 feet or more. Widespread livestock losses are likely."""), + ("***CATASTROPHIC FLOODING OCCURRING", """Catastrophic flooding is occurring in |**edit area**|. \n\n States of emergency remain in effect for the following locations: \n\n |**edit counties and cities here**| \n\n residents remain prohibited from venturing out. Law enforcement and |**military support group edit here**| evacuations are now underway. \n\n This remains a life threatening situation! Extensive property damage is occurring in all poor drainage areas. Elsewhere, moderate to major property damage is occurring. Hundreds of roads are closed, and some are likely damaged. Several area bridges are washed out. Streams, creeks, and arroyos are several feet above bank full, and will remain so for hours. Many rivers are nearing flood stage, and some have already surpassed it. Homes near these rivers are likely flooded. Flood waters will continue for several more hours. \n\n Water levels are in excess of 5 feet in all poor drainage areas. Elsewhere, average water levels are at least 2 feet. Power outages are widespread. \n\n Stay tuned to NOAA Weather Radio for further information on this dangerous flood. Heed all evacuation orders from law enforcement or military personnel."""), + ("***GENERATOR PRECAUTIONS", """If you plan on using a portable generator, be sure to observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fire. Be sure to operate your generator in a dry outdoor area away from windows, doors and vents. Carbon monoxide poisoning deaths can occur due to improperly located portable generators!"""), + ("***FLAMMABLES PRECAUTION", """Flammable liquids such as gasoline or kerosene should only be stored outside of the living areas in properly labeled, non glass safety containers. Do not store in an attached garage as gas fumes can travel into the home and potentially ignite, especially if the home has natural or propane gas lines that could become damaged during the hurricane."""), + ("***HURRICANE WARNING DEFINITION", """A Hurricane Warning means sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are expected within 36 hours. A Hurricane Warning can remain in effect when dangerously high water or a combination of dangerously high water and exceptionally high waves continue, even though winds may be less than hurricane force."""), + ("***HURRICANE WATCH DEFINITION", """A Hurricane Watch is issued when sustained winds of |* 64 kts or 74 mph *| or higher associated with a hurricane are possible within 48 hours."""), + ("***HURRICANE WIND WARNING DEFINITION", """A Hurricane Wind Warning is issued when a landfalling hurricane is expected to spread hurricane force winds well inland. Serious property damage, power outages, blowing debris, and fallen trees can be expected as winds reach or exceed 74 mph."""), + ("***HURRICANE WIND WATCH DEFINITION", """A Hurricane Wind Watch is issued when a landfalling hurricane is expected to spread hurricane force winds well inland within the next 48 hours. Prepare for winds in excess of 74 mph."""), + ("***TROPICAL STORM WARNING DEFINITION", """A Tropical Storm Warning means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are expected due to a tropical cyclone within 36 hours."""), + ("***TROPICAL STORM WIND WARNING DEFINITION", """A Tropical Storm Wind Warning means winds of 39 to 73 mph are expected due to a landfalling hurricane or tropical storm. Winds of this magnitude are likely to cause sporadic power outages, fallen trees, minor property damage, and dangerous driving conditions for high profile vehicles."""), + ("***TROPICAL STORM WATCH DEFINITION", """A Tropical Storm Watch means sustained winds of |* 34 to 63 kt or 39 to 73 mph or 63 to 118 km per hr *| are possible due to a tropical cyclone within 48 hours."""), + ("***TROPICAL STORM WIND WATCH DEFINITION", """A Tropical Storm Wind Watch means winds of 39 to 73 mph are expected due to a landfalling hurricane or tropical storm within 48 hours."""), + ] + + def ctaPilHMW(self): + return [ + ] + + def ctaPilHWO(self): + return [ + ] + + def ctaPilLAE(self): + return [ + ] + + def ctaPilLEW(self): + return [ + ] + + def ctaPilMWS(self): + return [ + ] + + def ctaPilMWW(self): + return [ + """Mariners should pay close attention to the marine forecast, and consider wind and sea conditions in planning.""", + ] + + def ctaPilMVF(self): + return [ + ] + + def ctaPilNOW(self): + return [ + ] + + def ctaPilNPW(self): + return [ + ] + + def ctaPilNSH(self): + return [ + ] + + def ctaPilNUW(self): + return [ + ] + + def ctaPilOFF(self): + return [ + ] + + def ctaPilPFM(self): + return [ + ] + + def ctaPilPNS(self): + return [ + ] + + def ctaPilRFD(self): + return [ + ] + + def ctaPilRFW(self): + return [ + ] + + def ctaPilRHW(self): + return [ + ] + + def ctaPilSAF(self): + return [ + ] + + def ctaPilSRF(self): + return [ + ] + + def ctaPilSFT(self): + return [ + ] + + def ctaPilSPS(self): + return [ + ] + + def ctaPilSPW(self): + return [ + ] + + def ctaPilTOE(self): + return [ + ] + + def ctaPilVOW(self): + return [ + ] + + def ctaPilWCN(self): + return [ + ] + + def ctaPilWSW(self): + return [ + ] + + def ctaPilZFP(self): + return [ + ] diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DiscretePhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DiscretePhrases.py index 7669abd2a1..ac44bd2669 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DiscretePhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/DiscretePhrases.py @@ -1,2090 +1,2090 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# DiscretePhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 04/28/2015 4027 randerso Changes for mixed case -# Added sort for consistent ordering of multiple timezones -# 10/02/2017 20335 ryu Add storm surge w/w to tpcEvents so -# no timing phrase appear in headline -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import PhraseBuilder -import ModuleAccessor -import types, copy, time, string, sets, os -import SampleAnalysis -import TimeRange, AbsTime - -class DiscretePhrases(PhraseBuilder.PhraseBuilder): - def __init__(self): - PhraseBuilder.PhraseBuilder.__init__(self) - - ### Local non-VTEC headlines. - # To sample the Hazards grid and produce locally generated headlines - # independent of the VTEC Headlines structure, follow these steps: - # - # 1. Put an "allowedHeadlines" method into your product with the - # same format as the "allowedHazards" method. - # - # 2. Generate the headlines using "generateProduct" in, for example, the - # _preProcessArea method: - - # headlines = self.generateProduct("Headlines", argDict, area = editArea, - # areaLabel=areaLabel, - # timeRange = self._timeRange) - # fcst = fcst + headlines - # - # 3. If desired, override "headlinesTiming" to adjust or remove the time descriptors - # for the headline. - # - - def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime): - # Return - # "startPhraseType" and "endPhraseType" - # Each can be one of these phraseTypes: - # "EXPLICIT" will return words such as "5 PM" - # "FUZZY4" will return words such as "THIS EVENING" - # "DAY_NIGHT_ONLY" use only weekday or weekday "NIGHT" e.g. - # "SUNDAY" or "SUNDAY NIGHT" or "TODAY" or "TONIGHT" - # Note: You will probably want to set both the - # startPhraseType and endPhraseType to DAY_NIGHT_ONLY to - # have this work correctly. - # "NONE" will result in no words - # OR a method which takes arguments: - # issueTime, eventTime, timeZone, and timeType - # and returns: - # phraseType, (hourStr, hourTZstr, description) - # You can use "timingWordTableFUZZY8" as an example to - # write your own method. - # - # If you simply return None, no timing words will be used. - - # Note that you can use the information given to determine which - # timing phrases to use. In particular, the "key" is the Hazard - # key so different local headlines can use different timing. - # - startPhraseType = "FUZZY" - endPhraseType = "FUZZY" - - #Example code - #startTime = timeRange.startTime().unixTime() - #if startTime <= issuanceTime + 12 * 3600: # 12 hours past issuance - #startPhraseType = "EXPLICIT" - #endTime = timeRange.endTime().unixTime() - #if endTime <= issuanceTime + 12 * 3600: # 12 hours past issuance - #endPhraseType = "EXPLICIT" - - #return startPhraseType, endPhraseType - return None, None - - def Headlines(self): - return { - "type": "component", - "lineLength": 69, - "methodList": [ - self.assembleChildWords, - self.wordWrap, - ], - "analysisList":[ - ("Hazards", - SampleAnalysis.SampleAnalysis().discreteTimeRangesByKey), - ], - - "phraseList":[ - self.headlines_phrase, - ], - "autoSentence": 0, - } - - def headlines_phrase(self): - return { - "setUpMethod": self.headlines_setUp, - "wordMethod": self.headlines_words, - "phraseMethods": [self.assembleSubPhrases, - self.postProcessPhrase, - ] - } - - def headlines_setUp(self, tree, node): - self.subPhraseSetUp(tree, node, [], self.scalarConnector) - return self.DONE() - - def headlines_words(self, tree, node): - "Create the phrase for local headlines from the Hazards grids" - - words = "" - areaLabel = tree.getAreaLabel() - headlines = tree.stats.get("Hazards", tree.getTimeRange(), - areaLabel, mergeMethod = "List") - if headlines is None: - return self.setWords(node, "") - - # Sort the headlines by startTime - temp = [] - for h, tr in headlines: - temp.append((tr.startTime(), (h, tr))) - temp.sort() - newList = [] - for t in temp: - newList.append(t[1]) - headlines = newList - - # Fetch the set of local headlines allowed for this product - allowedHeadlines = [] - for key, allActions, cat in self.allowedHeadlines(): - allowedHeadlines.append(key) - issuanceTime = self._issueTime.unixTime() - - from com.raytheon.uf.viz.core.localization import LocalizationManager - siteId = LocalizationManager.getInstance().getSite() - for key, tr in headlines: # value == list of subkeys - if key not in allowedHeadlines: - continue - - timeDescriptor = self.headlinesTimeRange_descriptor( - tree, node, key, tr, areaLabel, issuanceTime) - from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey - headlineWords = DiscreteKey.discreteDefinition(siteId).keyDesc( - "Hazards" + "_SFC", key) - if headlineWords == "": # Don't process the "" key - continue - hookWords = self.hazard_hook(tree, node, key, "", "",tr.startTime(), tr.endTime()) - headlinePhrase = "..." + headlineWords + timeDescriptor +hookWords + "...\n" - words = words + headlinePhrase - - words = self.convertToUpper(words) - return self.setWords(node, words) - - def headlinesTimeRange_descriptor(self, tree, node, key, tr, areaLabel, issuanceTime): - # Return a time range descriptor for the headline - # This method can be overridden to customize timing descriptors for - # non-VTEC local headlines - - headlinesTiming = self.headlinesTiming(tree, node, key, tr, - areaLabel, issuanceTime) - if headlinesTiming is None: - return "" - try: - startPhraseType, endPhraseType = headlinesTiming - except: - # For backward compatibility -- the startBoundary argument - # was formerly part of the headlinesTiming method - startPhraseType, endPhraseType, startBoundary = headlinesTiming - startTime = tr.startTime().unixTime() - endTime = tr.endTime().unixTime() - tree.combinations = self._combinations - areaList = self.getCurrentAreaNames(tree, areaLabel) - hazRec = { - 'id': areaList, - 'startTime': startTime, - 'endTime': endTime, - 'act': "NEW", - } - if startPhraseType == "FUZZY": - startPhraseType = "FUZZY4" - if endPhraseType == "FUZZY": - endPhraseType = "FUZZY4" - - phrase = self.getTimingPhrase( - hazRec, issuanceTime, startPhraseType, endPhraseType) - return " " + phrase - - -############################################################################################ - ### WARNING!!!! VTEC CODE -- DO NOT OVERRIDE ANY CODE BELOW THIS POINT!!!!! - - ### IF YOU USE A METHOD BELOW THIS POINT AND WANT TO ALTER IT, - ### COPY IT TO YOUR LOCAL FILE AND RE-NAME IT. THEN OVERRIDE ANY - ### METHODS THAT CALL IT AND USE THE NEW NAME. - - def getHazards(self, argDict, areaList): - # This is for setting up the argDict hazards entry AFTER the TextFormatter - # has created the Hazards Table. - # This is necessary for products that allow the user to specify through - # the GUI which edit areas will be sampled. - # Set up edit areas - editAreas = [] - for area, label in areaList: - editAreas.append([area]) - # Process the hazards - import HazardsTable - hazards = HazardsTable.HazardsTable( - argDict["ifpClient"], editAreas, self._pil[0:3], - self.filterMethod, argDict["databaseID"], - self._fullStationID, - activeTableName = argDict['vtecActiveTable'], - vtecMode=argDict['vtecMode'], - dataMgr=argDict['dataMgr']) - # Store hazards object for later use - argDict["hazards"] = hazards - - def Hazards(self): - return { - "type": "component", - "lineLength": 66, - "methodList": [ - self.assembleChildWords, - self.wordWrap, - ], - "analysisList":[], - - "phraseList":[ - self.hazards_phrase, - ], - "autoSentence": 0, - } - - def hazards_phrase(self): - return { - "setUpMethod": self.hazards_setUp, - "wordMethod": self.hazards_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def hazards_setUp(self, tree, node): - self.subPhraseSetUp(tree, node, [], self.scalarConnector) - - return self.DONE() - - def hazards_words(self, tree, node): - "Create the phrase for any watches, warnings or advisories" - hazardsTable = self._hazards - tree.combinations = self._combinations - if self._combinations is None: - areaLabel = None - else: - areaLabel = tree.getAreaLabel() - - editAreas = self.getCurrentAreaNames(tree, areaLabel) - try: - # Remove suffixes if necessary - if self._editAreaSuffix is not None: - editAreas = self.removeSuffixes(editAreas, self._editAreaSuffix) - except: - pass - - - # Check for a particular entry in argDict that is inserted when - # we're formatting hazards type products like WSW, NPW. - argDict = tree.get("argDict") - # look for segmentAreas in the argDict and override editAreas - if argDict.has_key("segmentAreas"): - editAreas = argDict['segmentAreas'] # override editAreas - - words = self.getHazardString(tree, node, editAreas) - - words = self.convertToUpper(words) # convert to upper case - - return self.setWords(node, words) - - ##### VTEC methods ##### - - # Return just a simple list of hazards in the form phen.sig (WS.W) - def getAllowedHazardList(self): - allowedHazardList = self.allowedHazards() - - hazardList = [] - for h in allowedHazardList: - if type(h) is types.TupleType: - hazardList.append(h[0]) - else: - hazardList.append(h) - - return hazardList - - # Return the list of action codes given the hazard, if hazard not found - # or actions not specified, return "ALL codes" - - def getAllowedActionCodes(self, hazard): - allowedHazardList = self.allowedHazards() - - for h in allowedHazardList: - if type(h) is types.TupleType: - if h[0] == hazard: - return h[1] - return ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] - - # Returns the words to be used in the headline for 'act' field in the - # specified hazard. - def actionControlWord(self, hazard, issuanceTime): - if not hazard.has_key('act'): - print "Error! No field act in hazard record." - return "" - - actionCode = hazard['act'] - if actionCode in ["NEW", "EXA", "EXB"]: - return "in effect" - elif actionCode == "CON": - return "remains in effect" - elif actionCode == "CAN": - return "is cancelled" - elif actionCode == "EXT": - return "now in effect" - elif actionCode == "EXP": - deltaTime = issuanceTime - hazard['endTime'] - if deltaTime >= 0: - return "has expired" - else: - return "will expire" - elif actionCode == "UPG": - return "no longer in effect" - else: - print actionCode, "not recognized in actionControlWord." - return "" - - # - # Determine the category for Hazard overrides - # - def getHazardCategory(self, hazard): - allowedHazardList = self.allowedHazards() - - for h in allowedHazardList: - if h[0] == hazard: - if len(h) == 3: - if type(h[2]) is types.StringType: - return h[2] - elif len(h) == 4: - if type(h[3]) is types.StringType: - return h[3] - - return None - # - # Determine the priority of a Hazard (lower count = higher priority) - # - - def getHazardImportance(self, hazard): - allowedHazardList = self.allowedHazards() - count = 0 - for h in allowedHazardList: - count = count + 1 - if h[0] == hazard: - return count - - return 1000 # no priority - - - # This method uses the allowedHazards() list to determine which - # hazardTable entry has the most important priority and removes - # the entry or piece thereof in place. Returns 1 if something was - # modified and 0 otherwise - def fixHazardConflict(self, index1, index2, hazardTable): - - allowedHazardList = self.getAllowedHazardList() - phen1 = hazardTable[index1]['phen'] - phen2 = hazardTable[index2]['phen'] - sig1 = hazardTable[index1]['sig'] - sig2 = hazardTable[index2]['sig'] - act1 = hazardTable[index1]['act'] - act2 = hazardTable[index2]['act'] - haz1 = phen1 + "." + sig1 - haz2 = phen2 + "." + sig2 - ignoreList = ['CAN', 'EXP', 'UPG'] - if haz1 in allowedHazardList and haz2 in allowedHazardList and \ - act1 not in ignoreList and act2 not in ignoreList: - - - if (self.getHazardCategory(haz1) != self.getHazardCategory(haz2)) or \ - self.getHazardCategory(haz1) is None or \ - self.getHazardCategory(haz2) is None: - return 0 - - else: - return 0 # no changes were made - - if self.getHazardImportance(haz1) < self.getHazardImportance(haz2): - lowIndex = index2 - highIndex = index1 - else: - lowIndex = index1 - highIndex = index2 - - # - # Added to prevent a current lower TO.A from overiding a higher SV.A - # - - if hazardTable[lowIndex]['phen'] == 'SV' and \ - hazardTable[lowIndex]['sig'] == 'A' and \ - hazardTable[highIndex]['phen'] == 'TO' and \ - hazardTable[highIndex]['sig'] == 'A': - if (int(hazardTable[lowIndex]['etn']) > int(hazardTable[highIndex]['etn']) and - (int(hazardTable[highIndex]['etn']) - int(hazardTable[lowIndex]['etn'])) > 50): - lowIndexTemp = lowIndex - lowIndex = highIndex - highIndex = lowIndexTemp - - lowStart = hazardTable[lowIndex]['startTime'] - lowEnd = hazardTable[lowIndex]['endTime'] - highStart = hazardTable[highIndex]['startTime'] - highEnd = hazardTable[highIndex]['endTime'] - - # first check to see if high pri completely covers low pri - if highStart <= lowStart and highEnd >= lowEnd: # remove low priority - del hazardTable[lowIndex] - - # next check to see if high pri lies within low pri - elif lowStart <= highStart and lowEnd >= highEnd: # high pri in middle - if lowStart < highStart: - h = copy.deepcopy(hazardTable[lowIndex]) - # trim the early piece - hazardTable[lowIndex]['endTime'] = highStart - if lowEnd > highEnd: - # make a new end piece - h['startTime'] = highEnd - hazardTable.append(h) - elif lowStart == highStart: - hazardTable[lowIndex]['startTime'] = highEnd - - elif highEnd >= lowStart: - hazardTable[lowIndex]['startTime'] = highEnd # change low start - - elif highStart <= lowEnd: - hazardTable[lowIndex]['endTime'] = highStart # change low end - - return 1 - - - # This method removes all entries of the specified hazardTable that - # are not in the allowedHazards list. - def filterAllowedHazards(self, hazardTable): - - newTable = [] - allowedHazardList = self.getAllowedHazardList() - - hazStr = "" - for i in range(len(hazardTable)): - if hazardTable[i]['sig'] != "": # VTEC - hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] - else: #non-VTEC - hazStr = hazardTable[i]['phen'] - - if hazStr in allowedHazardList: - newTable.append(hazardTable[i]) - return newTable - - # This method searches all entries of the specified hazardTable for - # entries matching the specified zone. Then for each entry it finds - # it looks for a conflicting entry in time. If it finds one, it calls - # fixHazardsConflict, which fixes the table and then calls itself again - # recursively with the fixed table. If it doesn't find one it returns - # None. - def filterZoneHazards(self, zone, hazardTable): - for i in range(len(hazardTable)): - if hazardTable[i]['id'] == zone: - for j in range(len(hazardTable)): - if hazardTable[j]['id'] == zone and i != j: - tr1 = TimeRange.TimeRange( - AbsTime.AbsTime(int(hazardTable[i]['startTime'])), - AbsTime.AbsTime(int(hazardTable[i]['endTime']))) - tr2 = TimeRange.TimeRange( - AbsTime.AbsTime(int(hazardTable[j]['startTime'])), - AbsTime.AbsTime(int(hazardTable[j]['endTime']))) - if tr1.overlaps(tr2): - if self.fixHazardConflict(i, j, hazardTable): - self.filterZoneHazards(zone, hazardTable) - return None - return None - - # Main method that drives the code to filter hazards that conflict in time. - # Only one hazard of the same phenomenon is allowed per zone per time. - # This method processes the table, removing any time conflicts, so the one - # hazard per zone, time rule is adhered to. - def filterMethod(self, hazardTable, allowedHazardsOnly=False): - # Remove hazards not in allowedHazards list - newTable = self.filterAllowedHazards(hazardTable) - if allowedHazardsOnly: - return newTable - - # get a raw list of unique edit areas - zoneList = [] - for t in newTable: - if t['id'] not in zoneList: - zoneList.append(t['id']) - - for zone in zoneList: - # Remove lower priority hazards of the same type - self.filterZoneHazards(zone, newTable) - - return newTable - - - # function returns the timing phrase to use for the area, hazard, - # and issuance time. Can force the type of timing phrase given the - # stype and etype. The stype/etype may be: NONE, EXPLICIT, FUZZY4, - # FUZZY8, or DAY_NIGHT_ONLY. Returns phrase like: - # FROM 4 PM MST THIS AFTERNOON THROUGH TUESDAY EVENING - def getTimingPhrase(self, hazRec, issueTime, stype=None, etype=None): - #Returns the timing phrase to use - - # Get the timing type - if stype is None or etype is None: - stype, etype = self.getTimingType(hazRec, issueTime) - - # Get the time zones for the areas - timeZones = self.hazardTimeZones(hazRec['id']) - - # Get the starting time - stext = [] - if type(stype) is types.MethodType: - for tz in timeZones: - newType, info = stype( - issueTime, hazRec['startTime'], tz, "start") - if info is not None and info not in stext: - stext.append(info) - stype = newType - elif stype == "EXPLICIT": - for tz in timeZones: - info = self.timingWordTableEXPLICIT(issueTime, - hazRec['startTime'], tz, "start") - if info not in stext: - stext.append(info) - elif stype == "FUZZY4": - for tz in timeZones: - info = self.timingWordTableFUZZY4(issueTime, - hazRec['startTime'], tz, "start") - if info not in stext: - stext.append(info) - elif stype == "FUZZY8": - for tz in timeZones: - info = self.timingWordTableFUZZY8(issueTime, - hazRec['startTime'], tz, "start") - if info not in stext: - stext.append(info) - elif stype == "DAY_NIGHT_ONLY": - for tz in timeZones: - info = self.timingWordTableDAYNIGHT(issueTime, - hazRec['startTime'], tz, "start") - if info not in stext: - stext.append(info) - - # Get the ending time - etext = [] - if type(etype) is types.MethodType: - for tz in timeZones: - newType, info = etype( - issueTime, hazRec['endTime'], tz, "end") - if info is not None and info not in etext: - etext.append(info) - etype = newType - elif etype == "EXPLICIT": - for tz in timeZones: - info = self.timingWordTableEXPLICIT(issueTime, - hazRec['endTime'], tz, "end") - if info not in etext: - etext.append(info) - elif etype == "FUZZY4": - for tz in timeZones: - info = self.timingWordTableFUZZY4(issueTime, - hazRec['endTime'], tz, "end") - if info not in etext: - etext.append(info) - elif etype == "FUZZY8": - for tz in timeZones: - info = self.timingWordTableFUZZY8(issueTime, - hazRec['endTime'], tz, "end") - if info not in etext: - etext.append(info) - elif etype == "DAY_NIGHT_ONLY": - for tz in timeZones: - info = self.timingWordTableDAYNIGHT(issueTime, - hazRec['endTime'], tz, "end") - if info not in etext: - etext.append(info) - - # timing connection types - startPrefix, endPrefix = self.getTimingConnectorType((stype, etype), - hazRec['act']) - - # get the timing phrase - phrase = self.calculateTimingPhrase(stype, etype, stext, etext, - startPrefix, endPrefix) - - return phrase - - # calculates the timing phrase based on the timing type, the calculated - # timing words, and the prefixes - def calculateTimingPhrase(self, stype, etype, stext, etext, startPrefix, - endPrefix): - - if (stype, etype) == ("NONE", "NONE"): - return "" #no timing phrase - - elif (stype, etype) in [("NONE", "EXPLICIT")]: - return self.ctp_NONE_EXPLICIT(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("NONE", "FUZZY4"), ("NONE", "FUZZY8")]: - return self.ctp_NONE_FUZZY(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("EXPLICIT", "EXPLICIT")]: - return self.ctp_EXPLICIT_EXPLICIT(stext,etext,startPrefix, - endPrefix) - - elif (stype, etype) in [("EXPLICIT", "FUZZY4"), ("EXPLICIT", "FUZZY8")]: - return self.ctp_EXPLICIT_FUZZY(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("FUZZY4", "FUZZY4"), ("FUZZY8", "FUZZY4"), - ("FUZZY4", "FUZZY8"), ("FUZZY8", "FUZZY8")]: - return self.ctp_FUZZY_FUZZY(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("NONE", "DAY_NIGHT_ONLY")]: - return self.ctp_NONE_DAYNIGHT(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("EXPLICIT", "DAY_NIGHT_ONLY")]: - return self.ctp_EXPLICIT_DAYNIGHT(stext,etext,startPrefix, - endPrefix) - - elif (stype, etype) in [("FUZZY4", "DAY_NIGHT_ONLY"), - ("FUZZY8", "DAY_NIGHT_ONLY")]: - return self.ctp_FUZZY_DAYNIGHT(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("DAY_NIGHT_ONLY", "DAY_NIGHT_ONLY")]: - return self.ctp_DAYNIGHT_DAYNIGHT(stext,etext,startPrefix, - endPrefix) - - elif (stype, etype) in [("DAY_NIGHT_ONLY", "NONE")]: - return self.ctp_DAYNIGHT_NONE(stext,etext,startPrefix,endPrefix) - - elif (stype, etype) in [("DAY_NIGHT_ONLY", "EXPLICIT")]: - return self.ctp_DAYNIGHT_EXPLICIT(stext,etext,startPrefix, - endPrefix) - - elif (stype, etype) in [("DAY_NIGHT_ONLY", "FUZZY4"), - ("DAY_NIGHT_ONLY", "FUZZY8")]: - return self.ctp_DAYNIGHT_FUZZY(stext,etext,startPrefix,endPrefix) - - else: - return "" - - - #calculates the NONE/EXPLICIT timing phrase - def ctp_NONE_EXPLICIT(self, stext, etext, startPrefix, endPrefix): - #single time zone - if len(etext) == 1: - hourStr, hourTZstr, description = etext[0] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - return endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + \ - description - - #multiple time zones - elif len(etext) > 1: - hourStr, hourTZstr, description = etext[0] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' - for x in xrange(1, len(etext)): - hourStr, hourTZstr, othDescription = etext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + description - return s - - #calculates the NONE/FUZZY timing phrase - def ctp_NONE_FUZZY(self, stext, etext, startPrefix, endPrefix): - #returns phrase like: THROUGH THIS EVENING - hourStr, hourTZstr, description = etext[0] #ending text - s = endPrefix + ' ' + description - return s - - #calculates the NONE/EXPLICIT timing phrase - def ctp_EXPLICIT_EXPLICIT(self, stext, etext, startPrefix, endPrefix): - #return phrases like: - # FROM 2 AM WEDNESDAY TO 2 AM CST THURSDAY - # FROM 2 AM TO 5 AM CST THURSDAY - # FROM 2 AM CST /1 AM MST/ WEDNESDAY TO 2 AM CST /1 AM MST/ THURSDAY - # FROM 2 AM CST /1 AM MST/ TO 6 AM CST /5AM MST/ THURSDAY - - shourStr, shourTZstr, sdescription = stext[0] #starting text - ehourStr, ehourTZstr, edescription = etext[0] #ending text - - #special cases NOON - if shourStr == "12 PM": - shourStr = "noon" - - #special cases NOON - if ehourStr == "12 PM": - ehourStr = "noon" - - # special case EARLY THIS MORNING and THIS MORNING, replace with - # just THIS MORNING - if sdescription == "early this morning" and \ - edescription == "this morning": - sdescription = "this morning" #combine two phrases - - - # single time zone, same time zone for start/end times - same day - if len(stext) == 1 and len(etext) == 1 and \ - shourTZstr == ehourTZstr and sdescription == edescription: - return startPrefix + ' ' + shourStr + ' ' + endPrefix + ' ' +\ - ehourStr + ' ' + ehourTZstr + ' ' + edescription - - # single time zone, same time zone for start/end times - diff day - if len(stext) == 1 and len(etext) == 1 and \ - shourTZstr == ehourTZstr and sdescription != edescription: - return startPrefix + ' ' + shourStr + ' ' + sdescription + \ - ' ' + endPrefix + ' ' + ehourStr + ' ' + ehourTZstr + \ - ' ' + edescription - - # mult time zones, same day for start/end times - if sdescription == edescription: - s = startPrefix + ' ' + shourStr + ' ' + shourTZstr + ' ' - for x in xrange(1, len(stext)): - hourStr, hourTZstr, description = stext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + endPrefix + ' ' + ehourStr + ' ' + ehourTZstr + ' ' - for x in xrange(1, len(etext)): - hourStr, hourTZstr, description = etext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + edescription - return s - - # mult time zones, different day for start/end times - else: - s = startPrefix + ' ' + shourStr + ' ' + shourTZstr + ' ' - for x in xrange(1, len(stext)): - hourStr, hourTZstr, description = stext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + sdescription + ' ' + endPrefix + ' ' + ehourStr + \ - ' ' + ehourTZstr + ' ' - for x in xrange(1, len(etext)): - hourStr, hourTZstr, description = etext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + edescription - return s - - #calculates the NONE/EXPLICIT timing phrase - def ctp_EXPLICIT_FUZZY(self, stext, etext, startPrefix, endPrefix): - #returns phrase like: - # FROM 2 AM CST WEDNESDAY THROUGH LATE WEDNESDAY NIGHT - # FROM 2 AM CST /1 AM MST/ WEDNESDAY THROUGH LATE WEDNESDAY NIGHT - - #start phrase - hourStr, hourTZstr, description0 = stext[0] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = startPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' - for x in xrange(1, len(stext)): - hourStr, hourTZstr, description = stext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + description0 + ' ' - - #end phrase - hourStr, hourTZstr, description = etext[0] - s = s + endPrefix + ' ' + description - - return s - - #calculates the FUZZY/FUZZY timing phrase - def ctp_FUZZY_FUZZY(self, stext, etext, startPrefix, endPrefix): - #return phrases like FROM THIS EVENING THROUGH LATE WEDNESDAY NIGHT - #return phrases like LATE WEDNESDAY NIGHT - - hourStr, hourTZstr, s_description = stext[0] #starting text - hourStr, hourTZstr, e_description = etext[0] #ending text - - #special case of description the same - if s_description == e_description: - return s_description - - #normal case of different descriptions - s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ - e_description - - return s - - def ctp_NONE_DAYNIGHT(self,stext,etext,startPrefix,endPrefix): - #return phrases like THROUGH WEDNESDAY - - hourStr, hourTZstr, e_description = etext[0] #ending text - - s = endPrefix + ' ' + e_description - - return s - - def ctp_EXPLICIT_DAYNIGHT(self, stext, etext, startPrefix, endPrefix): - #returns phrase like: - # FROM 2 AM CST WEDNESDAY THROUGH WEDNESDAY - # FROM 2 AM CST /1 AM MST/ WEDNESDAY THROUGH WEDNESDAY - - #start phrase - hourStr, hourTZstr, description0 = stext[0] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = startPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' - for x in xrange(1, len(stext)): - hourStr, hourTZstr, description = stext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + description0 + ' ' - - #end phrase - hourStr, hourTZstr, description = etext[0] - s = s + endPrefix + ' ' + description - - return s - - def ctp_FUZZY_DAYNIGHT(self, stext,etext,startPrefix,endPrefix): - #return phrases like FROM THIS EVENING THROUGH WEDNESDAY NIGHT - - hourStr, hourTZstr, s_description = stext[0] #starting text - hourStr, hourTZstr, e_description = etext[0] #ending text - - #special case of description the same - if s_description == e_description: - return s_description - - #normal case of different descriptions - s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ - e_description - - return s - - def ctp_DAYNIGHT_DAYNIGHT(self,stext,etext,startPrefix,endPrefix): - #return phrases like FROM TONIGHT THROUGH WEDNESDAY - - hourStr, hourTZstr, s_description = stext[0] #starting text - hourStr, hourTZstr, e_description = etext[0] #ending text - - #special case of description the same - if s_description == e_description: - return s_description - - #normal case of different descriptions - s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ - e_description - - return s - - def ctp_DAYNIGHT_EXPLICIT(self, stext,etext,startPrefix,endPrefix): - #returns phrase like: - # FROM TUESDAY UNTIL 2 AM CST WEDNESDAY - # FROM TUESDAY UNTIL 2 AM CST /1 AM MST/ WEDNESDAY - - #start phrase - hourStr, hourTZstr, description = stext[0] - s = startPrefix + ' ' + description + ' ' - - #end phrase - hourStr, hourTZstr, description0 = etext[0] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' - for x in xrange(1, len(etext)): - hourStr, hourTZstr, description = etext[x] - #special cases NOON - if hourStr == "12 PM": - hourStr = "noon" - s = s + "/" + hourStr + ' ' + hourTZstr + "/ " - s = s + description0 + ' ' - - return s - - def ctp_DAYNIGHT_NONE(self, stext,etext,startPrefix,endPrefix): - #return phrases like FROM TONIGHT - - hourStr, hourTZstr, s_description = stext[0] #starting text - - s = startPrefix + ' ' + s_description - - return s - - def ctp_DAYNIGHT_FUZZY(self,stext,etext,startPrefix,endPrefix): - #return phrases like FROM TONIGHT THROUGH WEDNESDAY NIGHT - - hourStr, hourTZstr, s_description = stext[0] #starting text - hourStr, hourTZstr, e_description = etext[0] #ending text - - #special case of description the same - if s_description == e_description: - return s_description - - #normal case of different descriptions - s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ - e_description - - return s - - - def getTimingConnectorType(self, timingType, action): - # Returns the start and end prefix for the given start and end phrase - # type and action code. - d = {("NONE", "NONE"): (None, None), - ("NONE", "EXPLICIT"): (None, "until"), - ("NONE", "FUZZY4"): (None, "through"), - ("NONE", "FUZZY8"): (None, "through"), - ("EXPLICIT", "EXPLICIT"): ("from", "to"), - ("EXPLICIT", "FUZZY4"): ("from", "through"), - ("EXPLICIT", "FUZZY8"): ("from", "through"), - ("FUZZY4", "FUZZY4"): ("from", "through"), - ("FUZZY4", "FUZZY8"): ("from", "through"), - ("FUZZY8", "FUZZY4"): ("from", "through"), - ("FUZZY8", "FUZZY8"): ("from", "through"), - ("NONE", "DAY_NIGHT_ONLY"): (None, "through"), - ("EXPLICIT", "DAY_NIGHT_ONLY"): ("from", "through"), - ("FUZZY4", "DAY_NIGHT_ONLY"): ("from", "through"), - ("FUZZY8", "DAY_NIGHT_ONLY"): ("from", "through"), - ("DAY_NIGHT_ONLY", "DAY_NIGHT_ONLY"): ("from", "through"), - ("DAY_NIGHT_ONLY", "NONE"): ("from", None), - ("DAY_NIGHT_ONLY", "EXPLICIT"): ("from", "to"), - ("DAY_NIGHT_ONLY", "FUZZY4"): ("from", "through"), - ("DAY_NIGHT_ONLY", "FUZZY8"): ("from", "through"), - } - - # special case for expirations. - if action == 'EXP': - return (None, "at") - - return d.get(timingType, ("", "")) - - def getTimingType(self, hazRec, issueTime): - #Returns the timing type based on the issuanceTime and hazard record - #Returns (startType, endType), which is NONE, EXPLICIT, FUZZY4, FUZZY8 - - # Get the local headlines customizable timing - tr = self.makeTimeRange(hazRec['startTime'], hazRec['endTime']) - locStart, locEnd = self.getLocalHeadlinesTiming( - None, None, hazRec['phen'], tr, hazRec['id'], issueTime) - - #time from issuanceTime - deltaTstart = hazRec['startTime'] - issueTime #seconds past now - deltaTend = hazRec['endTime'] - issueTime #seconds past now - - HR=3600 #convenience constants - MIN=60 #convenience constants - - # record in the past, ignore - if deltaTend <= 0: - return ("NONE", "NONE") - - # upgrades and cancels - if hazRec['act'] in ['UPG', 'CAN']: - return ("NONE", "NONE") #upgrades/cancels never get timing phrases - - # expirations EXP codes are always expressed explictly, only end time - if hazRec['act'] == 'EXP': - return ('NONE', 'EXPLICIT') - - phensig = hazRec['phen'] + '.' + hazRec['sig'] - - # SPC Watches always get explicit times, 3 hour start mention - spcWatches = ['TO.A', 'SV.A'] - if phensig in spcWatches: - if deltaTstart < 3*HR: - return ('NONE', 'EXPLICIT') - else: - return ('EXPLICIT', 'EXPLICIT') - - # Tropical events never get times at all - tpcEvents = ['TY.A','TY.W','HU.A','HU.S','HU.W','TR.A','TR.W', - 'SS.A','SS.W'] - if phensig in tpcEvents: - return ('NONE', 'NONE') - - # special marine case? - marineHazList = ["SC.Y", "SW.Y", "GL.W", "SR.W", 'HF.W', 'BW.Y', - 'UP.W', 'UP.Y', 'RB.Y', 'SE.W', 'SI.Y'] #treat like watches - marinePils = ['CWF', 'OFF', 'NSH', 'GLF'] #specific marine pils - oconusSites = ['PGUM','PHFO','PAFC','PAJK','PAFG'] - - # regular products - not marine - if hazRec['pil'] not in marinePils: - #advisories/warnings - if hazRec['sig'] in ['Y','W']: #advisories/warnings - explicit - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - else: - start = 'EXPLICIT' #explicit start time after 3 hours - end = 'EXPLICIT' #end time always explicit - - #watches - elif hazRec['sig'] in ['A']: #watches - mix of explicit/fuzzy - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - #local hazards - elif locStart is not None and locEnd is not None: - start = locStart - end = locEnd - else: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - - # marine - CONUS - elif hazRec['officeid'] not in oconusSites: - - #advisories/warnings - explicit, but not some phensigs - if hazRec['sig'] in ['Y','W'] and phensig not in marineHazList: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - else: - start = 'EXPLICIT' #explicit start time after 3 hours - end = 'EXPLICIT' #end time always explicit - - #watches - mix of explicit/fuzzy, some phensig treated as watches - elif hazRec['sig'] in ['A'] or phensig in marineHazList: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - #local hazards - treat as watches - elif locStart is not None and locEnd is not None: - start = locStart - end = locEnd - else: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - # marine - OCONUS - else: - - #advisories/warnings - explicit, but not some phensigs - if hazRec['sig'] in ['Y','W'] and phensig not in marineHazList: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - else: - start = 'EXPLICIT' #explicit start time after 3 hours - end = 'EXPLICIT' #end time always explicit - - #special marine phensigs - treat as watches, with fuzzy8 - elif phensig in marineHazList: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - else: - start = 'FUZZY8' #fuzzy start times - end = 'FUZZY8' #always fuzzy end times - - - #regular watches - fuzzy4 - elif hazRec['sig'] in ['A']: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - #local hazards - treat as watches - elif locStart is not None and locEnd is not None: - start = locStart - end = locEnd - else: - if deltaTstart < 3*HR: #no start time in first 3 hours - start = 'NONE' - elif deltaTstart < 12*HR: - start = 'EXPLICIT' #explicit start time 3-12 hours - else: - start = 'FUZZY4' #fuzzy times after 12 (4/day) - if deltaTend < 12*HR: #explicit end time 0-12 hours - end = 'EXPLICIT' - else: - end = 'FUZZY4' #fuzzy times after 12 (4/day) - - return (start, end) - - def getLocalHeadlinesTiming(self,tree, node, key, tr, - areaLabel, issuanceTime): - headlinesTiming = self.headlinesTiming(tree, node, key, tr, - areaLabel, issuanceTime) - if headlinesTiming is None: - locStart = None - locEnd = None - else: - locStart, locEnd = headlinesTiming - if locStart == "FUZZY": - locStart = "FUZZY4" - if locEnd == "FUZZY": - locEnd = "FUZZY4" - return locStart, locEnd - - def hazardTimeZones(self, areaList): - #returns list of time zones for the starting time - #and list of time zones for the ending time. The areaList provides - #a complete list of areas for this headline. startT, endT are the - #hazard times. - - # sort the areaList so time zones are in consistent order - areaList.sort() - - # get this time zone - thisTimeZone = os.environ["TZ"] - zoneList = [] - - # get the AreaDictionary that contains time zones per edit area - areaDictName = self._areaDictionary - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(areaDictName, "AreaDictionary") - - # check to see if we have any areas outside our time zone - for areaName in areaList: - if areaName in areaDict.keys(): - entry = areaDict[areaName] - if not entry.has_key("ugcTimeZone"): #add your site id - if thisTimeZone not in zoneList: - zoneList.append(thisTimeZone) - continue # skip it - timeZoneList = entry["ugcTimeZone"] - if type(timeZoneList) == types.StringType: # a single value - timeZoneList = [timeZoneList] # make it into a list - for timeZone in timeZoneList: - if timeZone not in zoneList: - zoneList.append(timeZone) - - # if the resulting zoneList is empty, put in our time zone - if len(zoneList) == 0: - zoneList.append(thisTimeZone) - - # if the resulting zoneList has our time zone in it, be sure it - # is the first one in the list - try: - index = zoneList.index(thisTimeZone) - if index != 0: - del zoneList[index] - zoneList.insert(0, thisTimeZone) - except: - pass - - return zoneList - - def timingWordTableEXPLICIT(self, issueTime, eventTime, timezone, - timeType='start'): - #returns (timeValue, timeZone, descriptiveWord). - #eventTime is either the starting or ending time, based on - #the timeType flag. timezone is the time zone for the hazard area - - HR=3600 - sameDay = [ - (0*HR, 6*HR, "early this morning"), #midnght-559am - (6*HR, 12*HR-1, "this morning"), #600am-1159am - (12*HR, 12*HR+1, "today"), #noon - (12*HR+1, 18*HR-1, "this afternoon"), #1201pm-559pm - (18*HR, 24*HR, "this evening")] #6pm-1159pm - - nextDay = [ - (0*HR, 0*HR+1, "tonight"), #midnght - (0*HR, 24*HR, ""),] #midnght-1159pm - - subsequentDay = [ - (0*HR, 0*HR+1, " night"), #midnght - (0*HR, 24*HR, ""),] #midnght-1159pm - - - #determine local time - myTimeZone = os.environ["TZ"] # save the defined time zone - os.environ["TZ"] = timezone # set the new time zone - ltissue = time.localtime(issueTime) # issuance local time - ltevent = time.localtime(eventTime) # event local time - #get the hour string (e.g., 8 PM) - hourStr = time.strftime("%I %p", ltevent) - if hourStr[0] == '0': - hourStr = hourStr[1:] #eliminate leading zero - - #get the time zone (e.g., MDT) - hourTZstr = time.strftime("%Z", ltevent) - - #determine the delta days from issuance to event - diffDays = ltevent[7] - ltissue[7] #julian day - if diffDays < 0: #year wrap around, assume Dec/Jan - diffDays = ltevent[2] + 31 - ltissue[2] #day of month - - #get description time phrase - description = "" - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if diffDays == 0: - for (startT, endT, desc) in sameDay: - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - - else: - #choose proper table - if diffDays == 1: - table = nextDay - else: - table = subsequentDay - for (startT, endT, desc) in table: - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - dow = ltevent[6] #day of week - dowMinusOne = ltevent[6] - 1 - if dowMinusOne < 0: - dowMinusOne = 6 #week wraparound - description = string.replace(description, "", - self.asciiDayOfWeek(dow)) #day of week - description = string.replace(description, "", - self.asciiDayOfWeek(dowMinusOne)) #day of week - - #special cases NOON - if hourStr == "12 PM" and description == "today": - hourStr = "noon" - - #special cases MIDNIGHT - if hourStr == "12 AM": - hourStr = "midnight" - - os.environ["TZ"] = myTimeZone # reset the defined time zone - - return (hourStr, hourTZstr, description) - - - def timingWordTableFUZZY4(self, issueTime, eventTime, timeZone, - timeType='start'): - #returns (timeValue, timeZone, descriptiveWord). - #eventTime is either the starting or ending time, based on - #the timeType flag. timezone is the time zone for the hazard area - #table is local time, start, end, descriptive phrase - HR=3600 - sameDay = [ - (0*HR, 6*HR, "early this morning"), #midnght-559am - (6*HR, 12*HR, "this morning"), #600am-noon - (12*HR, 18*HR, "this afternoon"), #1200pm-559pm - (18*HR, 24*HR, "this evening")] #6pm-1159pm - - nextDay = [ - (0*HR, 0*HR, "this evening"), #midnght tonight - (0*HR, 6*HR, "late tonight"), #midnght-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " evening")] #6pm-1159pm - - subsequentDay = [ - (0*HR, 0*HR, " evening"), #midnght ystdy - (0*HR, 6*HR, "late night"), #midnght-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " evening")] #6pm-1159pm - - - #determine local time - myTimeZone = os.environ["TZ"] # save the defined time zone - os.environ["TZ"] = timeZone # set the new time zone - ltissue = time.localtime(issueTime) # issuance local time - ltevent = time.localtime(eventTime) # event local time - - #determine the delta days from issuance to event - diffDays = ltevent[7] - ltissue[7] #julian day - if diffDays < 0: #year wrap around, assume Dec/Jan - diffDays = ltevent[2] + 31 - ltissue[2] #day of month - - #get description time phrase - description = "" - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if diffDays == 0: - for (startT, endT, desc) in sameDay: - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - - else: - #choose proper table - if diffDays == 1: - table = nextDay - else: - table = subsequentDay - for (startT, endT, desc) in table: - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - dow = ltevent[6] #day of week - dowMinusOne = ltevent[6] - 1 - if dowMinusOne < 0: - dowMinusOne = 6 #week wraparound - description = string.replace(description, "", - self.asciiDayOfWeek(dow)) #day of week - description = string.replace(description, "", - self.asciiDayOfWeek(dowMinusOne)) #day of week - - os.environ["TZ"] = myTimeZone # reset the defined time zone - - hourStr = None - hourTZstr = None - return (hourStr, hourTZstr, description) - - - def timingWordTableFUZZY8(self, issueTime, eventTime, timeZone, - timeType='start'): - #returns the descriptive word for the event. eventTime is either - #the starting or ending time, based on the timeType flag. - #table is local time, start, end, descriptive phrase-A - - HR=3600 - sameDay = [ - (0*HR, 3*HR, "late night"), #midnght-259am - (3*HR, 6*HR, "early this morning"), #300am-559am - (6*HR, 9*HR, "this morning"), #600am-859am - (9*HR, 12*HR, "late this morning"), #900am-1159am - (12*HR, 15*HR, "early this afternoon"), #noon-259pm - (15*HR, 18*HR, "late this afternoon"), #300pm-559pm - (18*HR, 21*HR, "this evening"), #600pm-859pm - (21*HR, 24*HR, "tonight")] #900pm-1159pm - - nextDayStart = [ - (0*HR, 3*HR, "late night"), #midnght-259am - (3*HR, 6*HR, "early morning"), #300am-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " evening")] #6pm-1159pm - - nextDayEnd = [ - (0*HR, 0*HR, "tonight"), #midnght tonight - (0*HR, 3*HR, "late night"), #midnght-259am - (3*HR, 6*HR, "early morning"), #300am-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " night")] #6pm-1159pm - - subsequentDayStart = [ - (0*HR, 6*HR, "late night"), #midnght-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " evening")] #6pm-1159pm - - subsequentDayEnd = [ - (0*HR, 0*HR, " night"), #midnght tonight - (0*HR, 6*HR, "early morning"), #midnght-559am - (6*HR, 12*HR, " morning"), #600am-noon - (12*HR, 18*HR, " afternoon"), #1200pm-559pm - (18*HR, 24*HR, " night")] #6pm-1159pm - - - #determine local time - myTimeZone = os.environ["TZ"] # save the defined time zone - os.environ["TZ"] = timeZone # set the new time zone - ltissue = time.localtime(issueTime) # issuance local time - ltevent = time.localtime(eventTime) # event local time - - #determine the delta days from issuance to event - diffDays = ltevent[7] - ltissue[7] #julian day - if diffDays < 0: #year wrap around, assume Dec/Jan - diffDays = ltevent[2] + 31 - ltissue[2] #day of month - - #get description time phrase - description = "" - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if diffDays == 0: - for (startT, endT, desc) in sameDay: - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - - else: - #choose proper table - if timeType == 'start': - if diffDays == 1: - table = nextDayStart - else: - table = subsequentDayStart - else: - if diffDays == 1: - table = nextDayEnd - else: - table = subsequentDayEnd - for (startT, endT, desc) in table: - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - - #do substitution - dow = ltevent[6] #day of week - dowMinusOne = ltevent[6] - 1 - if dowMinusOne < 0: - dowMinusOne = 6 #week wraparound - description = string.replace(description, "", - self.asciiDayOfWeek(dow)) #day of week - description = string.replace(description, "", - self.asciiDayOfWeek(dowMinusOne)) #day of week - - os.environ["TZ"] = myTimeZone # reset the defined time zone - - hourStr = None - hourTZstr = None - return (hourStr, hourTZstr, description) - - def timingWordTableDAYNIGHT(self, issueTime, eventTime, timeZone, - timeType='start'): - #returns (timeValue, timeZone, descriptiveWord). - #eventTime is either the starting or ending time, based on - #the timeType flag. timezone is the time zone for the hazard area - #table is local time, start, end, descriptive phrase - HR=3600 - sameDay = [ - (0*HR, self.DAY()*HR, "early today"), #midnght-559am - (self.DAY()*HR, self.NIGHT()*HR, "today"), #600am-6pm - (self.NIGHT()*HR, 24*HR, "tonight")] #6pm-midnight - - nextDay = [ - (0*HR, self.DAY()*HR, "tonight"), #midnght-559am - (self.DAY()*HR, self.NIGHT()*HR, ""), #600am-6pm - (self.NIGHT()*HR, 24*HR, " night")] #6pm-midnight - - subsequentDay = [ - (0*HR, self.DAY()*HR, " night"), #midnght-559am - (self.DAY()*HR, self.NIGHT()*HR, ""), #600am-6pm - (self.NIGHT()*HR, 24*HR, " night")] #6pm-midnight - - #determine local time - myTimeZone = os.environ["TZ"] # save the defined time zone - os.environ["TZ"] = timeZone # set the new time zone - ltissue = time.localtime(issueTime) # issuance local time - ltevent = time.localtime(eventTime) # event local time - - #determine the delta days from issuance to event - diffDays = ltevent[7] - ltissue[7] #julian day - if diffDays < 0: #year wrap around, assume Dec/Jan - diffDays = ltevent[2] + 31 - ltissue[2] #day of month - - #get description time phrase - description = "" - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if diffDays == 0: - for (startT, endT, desc) in sameDay: - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - - else: - #choose proper table - if diffDays == 1: - table = nextDay - else: - table = subsequentDay - for (startT, endT, desc) in table: - hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute - if hourmin >= startT and hourmin < endT and timeType=='start': - description = desc - break - elif hourmin <= endT and timeType=='end': - description = desc - break - dow = ltevent[6] #day of week - dowMinusOne = ltevent[6] - 1 - if dowMinusOne < 0: - dowMinusOne = 6 #week wraparound - description = string.replace(description, "", - self.asciiDayOfWeek(dow)) #day of week - description = string.replace(description, "", - self.asciiDayOfWeek(dowMinusOne)) #day of week - - os.environ["TZ"] = myTimeZone # reset the defined time zone - - hourStr = None - hourTZstr = None - return (hourStr, hourTZstr, description) - - - def asciiDayOfWeek(self, number): - #converts number (0-Monday) to day of week - days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', - 'Saturday', 'Sunday'] - if number >= 0 and number < 7: - return days[number] - else: - return "?" + `number` + "?" - - - # Returns the headline phrase based on the specified hazard. - # The hazard record contains all geoIDs in the hazard['id'] field, - # not just a single one. Doesn't add the dots. - def makeStandardPhrase(self, hazard, issuanceTime): - - # hdln field present? - if not hazard.has_key('hdln'): - return "" - - # make sure the hazard is still in effect or within EXP critiera - if (hazard['act'] != 'EXP' and issuanceTime >= hazard['endTime']) or \ - (hazard['act'] == 'EXP' and issuanceTime > 30*60 + hazard['endTime']): - return "" # no headline for expired hazards - - #assemble the hazard type - hazStr = hazard['hdln'] - - # if the hazard is a convective watch, tack on the etn - phenSig = hazard['phen'] + "." + hazard['sig'] - if phenSig in ["TO.A", "SV.A"]: - hazStr = hazStr + " " + str(hazard["etn"]) - - # add on the action - actionWords = self.actionControlWord(hazard, issuanceTime) - hazStr = hazStr + ' ' + actionWords - - #get the timing words - timeWords = self.getTimingPhrase(hazard, issuanceTime) - if len(timeWords): - hazStr = hazStr + ' ' + timeWords - - return hazStr - - def timeCompare(self, haz1, haz2): - if haz1['startTime'] < haz2['startTime']: - return -1 - elif haz1['startTime'] == haz2['startTime']: - return 0 - else: - return 1 - - # Sorts headlines for marine products. sort algorithm - # cronological ordering by start time, then action, - # then significance, then phen alphabetically. - def marineSortHazardAlg(self, r1, r2): - #1st by start time - if r1['startTime'] < r2['startTime']: - return -1 - elif r1['startTime'] > r2['startTime']: - return 1 - - #2nd by action - actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", - "EXT", "ROU", "CON"] - try: - aIndex = actionCodeOrder.index(r1['act']) - except: - aIndex = 99 - try: - bIndex = actionCodeOrder.index(r2['act']) - except: - bIndex = 99 - if aIndex < bIndex: - return -1 - elif aIndex > bIndex: - return 1 - - #3rd by significance - sig = ['W','Y','A'] - try: - index1 = sig.index(r1['sig']) - except: - index1 = 99 - try: - index2 = sig.index(r2['sig']) - except: - index2 = 99 - if index1 < index2: - return -1 - elif index1 > index2: - return 1 - - #4th by phen (alphabetically) - if r1['phen'] < r2['phen']: - return -1 - elif r1['phen'] > r2['phen']: - return 1 - - #equal - return 0 - - - # Sorts headlines for regular products. - def regularSortHazardAlg(self, r1, r2): - actActions = ["NEW", "EXB", "EXA", "EXT", "ROU", "CON"] - inactActions = ["CAN", "EXP", "UPG"] - actionCodeOrder = actActions + inactActions - - # 1st by general action category - if r1['act'] in actActions and r2['act'] in inactActions: - return -1 - elif r1['act'] in inactActions and r2['act'] in actActions: - return 1 - - # 2nd by chronological event starting time - if r1['startTime'] < r2['startTime']: - return -1 - elif r1['startTime'] > r2['startTime']: - return 1 - - # 3rd by action code order - try: - aIndex = actionCodeOrder.index(r1['act']) - except: - aIndex = 99 - try: - bIndex = actionCodeOrder.index(r2['act']) - except: - bIndex = 99 - if aIndex < bIndex: - return -1 - elif aIndex > bIndex: - return 1 - - #4th by significance - sig = ['W','Y','A'] - try: - index1 = sig.index(r1['sig']) - except: - index1 = 99 - try: - index2 = sig.index(r2['sig']) - except: - index2 = 99 - if index1 < index2: - return -1 - elif index1 > index2: - return 1 - - #5th by phen (alphabetically) - if r1['phen'] < r2['phen']: - return -1 - elif r1['phen'] > r2['phen']: - return 1 - - #equal - return 0 - - - # Makes multiple headlines based on the hazards list and returns the lot. - def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, - testMode=0): - returnStr = "" - # make a deepcopy since we plan to mess with it. - hList = copy.deepcopy(hazardList) - - # sort headlines in appropriate order - if len(hList): - if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: - hList.sort(self.marineSortHazardAlg) - else: - hList.sort(self.regularSortHazardAlg) - - while len(hList) > 0: - hazard = hList[0] - - # Can't make phrases with hazards with no 'hdln' entry - if hazard['hdln'] == "": - hList.remove(hazard) - continue - - phenSig = hazard['phen'] + "." + hazard['sig'] - actionCodeList = self.getAllowedActionCodes(phenSig) - - # if the action is not in the actionCodeList, skip it - if hazard['sig'] != "": # it's not locally defined - if not hazard['act'] in actionCodeList: - print "...Ignoring action code:", hazard['act'], \ - hazard['hdln'] - hList.remove(hazard) - continue - - # get the headline phrase - hazStr = self.makeStandardPhrase(hazard, issuanceTime) - if len(hazStr): - # Call user hook - localStr = self.addSpace(self.hazard_hook( - tree, node, hazard['phen'], hazard['sig'], hazard['act'], - hazard['startTime'], hazard['endTime']), "leading") - returnStr = returnStr + "..." + hazStr + localStr + "...\n" - - # always remove the main hazard from the list - hList.remove(hazard) - - return returnStr - - # Returns a formatted string announcing the hazards that are valid with - # timing phrases - def getHazardString(self, tree, node, fcstArea): - if len(fcstArea) <= 0: - return "" - hazardTable = self._hazards.getHazardList(fcstArea) - returnStr = "" - issuanceTime = self._issueTime.unixTime() - - returnStr = self.makeHeadlinePhrases(tree, node, hazardTable, - issuanceTime) - #Test mode? - returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), - returnStr) - - return returnStr.upper() - - - # The organizeHazard method brings in the raw analyzed table, - # then organizes it by edit area, returing a list of - # editArea lists. The first element of the list must the the first - # segment in a hazard based product. Ensures that a group of edit areas - # does not contain both zones and FIPS code - per 10-1702. - def organizeHazards(self, rawATable): - - # Initialize data structures to be used. - byIdDict = {} - byHazardDict = {} - masterEditAreaList = [] - - # Loop over the activeTable, and organize by editArea - - # - # Added code to discard segment identifer when cancelling a product. - # this was creating bogus segments. - # - - for eachHazard in rawATable: - if byIdDict.has_key(eachHazard['id']): - byIdDict[eachHazard['id']].append(\ - (eachHazard['phen'], eachHazard['sig'], eachHazard['seg'], - eachHazard['act'], eachHazard['startTime'], eachHazard['endTime'], - eachHazard['etn'])) - else: - byIdDict[eachHazard['id']] = [(eachHazard['phen'], - eachHazard['sig'], eachHazard['seg'], - eachHazard['act'], eachHazard['startTime'], eachHazard['endTime'], - eachHazard['etn'])] - - # - # Go through the sorted dictionary, organize into combos - # - - idsList = byIdDict.keys() - unsortedHazards = byIdDict.values() - sortedHazards = [] - for eachHazard in unsortedHazards: - if not self.__sortedContains(eachHazard, sortedHazards): - sortedHazards.append(eachHazard) - - # - # The following section determines the VTEC/segment ordering - # - - weightedList = [] - - # - # this list ranks by 'sig' and 'act' from least [0] to most - # importance [n]. All CANs go at the end, because cancel is the - # most important action. - # - - segmentVTECOrderList = [ - # Place holder for local hazards - 'LocalHazard', - 'F.ROU', 'F.CON', 'F.EXT', 'F.EXA', 'F.EXB', 'F.NEW', - 'F.UPG', 'S.ROU', 'S.CON', 'S.EXT', 'S.EXA', 'S.EXB', - 'S.NEW', 'S.UPG', 'A.ROU', 'A.CON', 'A.EXT', 'A.EXA', - 'A.EXB', 'A.NEW', 'A.UPG', 'Y.ROU', 'Y.CON', 'Y.EXT', - 'Y.EXA', 'Y.EXB', 'Y.NEW', 'Y.UPG', 'W.ROU', 'W.CON', - 'W.EXT', 'W.EXA', 'W.EXB', 'W.NEW', 'W.UPG', 'F.EXP', - 'F.CAN', 'S.EXP', 'S.CAN', 'A.EXP', 'A.CAN', 'Y.EXP', - 'Y.CAN', 'W.EXP', 'W.CAN'] - - for eachHazard in sortedHazards: - tempEditAreaList = [] - tempElementWeight = -1.0 - tempElementWeightCheck = -1.0 - secondaryWeight = 0.0 - segmentWeight = 0.0 - timeWeight = 0.0 - - # - # Figure out the maximum weight based on each - # element in the hazard combination. - # - - for eachElement in eachHazard: - - # - # This section checks of the hazard's index in - # segmentVTECOrderList - # - - if eachElement[1] is not None and eachElement[3] is not None: - sigAction = eachElement[1] + '.' + eachElement[3] - if sigAction in segmentVTECOrderList: - tempElementWeightCheck = float(segmentVTECOrderList.index(sigAction)) - else: - # Local hazards are not in list so - # assign it least importance - tempElementWeightCheck = 0.0 - - # - # secondaryWeight is a cumulative value << 1 that allows - # combinations of actions and sigs to take precedence over - # single actions or sigs of the same primary importance. For - # instance, a BZ.W^WC.Y will come before BZ.W by itself, - # even though they are the same priority. It also takes - # into account the hazards position in the allowedHazardTable, - # so that a a blizzard warning will trump a winter storm - # warning - # - - # - # from 1 (important) to 1001 (undefined). 1 is added to - # prevent division errors. - # - - allowedHazardValue = float(self.getHazardImportance(\ - eachElement[0] + '.' + eachElement[1])) + 1.0 - # - # Ensure that secondary weight never approaches 1 (ten thousandths...) - # - - secondaryWeight = secondaryWeight + 0.0001/allowedHazardValue - - # - # Check the tempElementWeightCheck against the - # tempElementWeight. If it's more, then the current hazard - # is the higher priority of the combo, and set - # tempElementWeight to it's index value. If it's less, then - # this hazard is of lower priority, but do give it a little - # weight (<< 1) so that for instance a warn + advisory - # segment will come before just a warn segment. - # - - if tempElementWeightCheck > tempElementWeight: - # This hazard is more important - tempElementWeight = tempElementWeightCheck - - # - # Add a factor for segment number. Lowest segments go first. Never - # Approach one (millionths...) - # - - segmentWeight = 1.0/(10000000.0 + float(eachElement[2])) - - # - # Add a factor for time. Earliest start times go first. Never - # approach one (e-10) - # - - timeWeight = 1.0/float(eachElement[4] + 100.0) - - # - # Assign the sum of weights before adding - # list for sorting - # - - tempElementWeight = tempElementWeight + secondaryWeight +\ - segmentWeight + timeWeight - - secondaryWeight = 0.0 - segmentWeight = 0.0 - timeWeight = 0.0 - - for eachID in idsList: - if sets.Set(byIdDict[eachID]) == sets.Set(eachHazard): - tempEditAreaList.append(eachID) - - weightedList.append((tempElementWeight, tempEditAreaList)) - - # Sort the list by weight - weightedList.sort(self._wtListSort) - - # Make the list of geoareas - finalList = [] - for w in weightedList: - finalList.append(w[1]) - - # Seperate out the zones and FIPS into separate UGC blocks - s = [] - for s1 in finalList: - fips = [] - zones = [] - for s2 in s1: - if s2[2] == 'Z': - zones.append(s2) - elif s2[2] == 'C': - fips.append(s2) - if len(fips): - s.append(fips) - if len(zones): - s.append(zones) - finalList = s - - return finalList - - - # Determines if hazard in sorted hazards. Hazard can be a list, thus we - # need to compare all elements for their inclusion, rather than simply - # using the "in" operator. - def __sortedContains(self, hazard, sorted_hazards): - hazard.sort() - for indSorted in sorted_hazards: - indSorted.sort() - if hazard == indSorted: - return 1 - return 0 - - # Sorts tuples of (weight, list, time), by weight - def _wtListSort(self, a, b): - if a[0] > b[0]: - return -1 - elif a[0] == b[0]: - return 0 - else: - return 1 - - - # Modifies string to have ...TEST... if we are in TEST mode. This - # to the MND header. Modifies string to have EXPERIMENTAL... if - # we are in EXPERIMENTAL mode. - def checkTestMode(self, argDict, str): - # testMode is set, then we are in product test mode. - # modify the str to have beginning and ending TEST indication. - if argDict.get('testMode', 0): - return "TEST..."+str+"...TEST" - elif argDict.get('experimentalMode', 0): - return "EXPERIMENTAL..." + str - else: - return str - - # Modifies headline string to have TEST if we are in TEST mode. - def headlinePhraseTESTcheck(self, argDict, str): - if argDict.get('testMode', 0): - lines = str.split('\n') - str = "...THIS MESSAGE IS FOR TEST PURPOSES ONLY...\n" - for x in xrange(len(lines)-1): #-1 for trailing new line - line = lines[x] - - #beginning of line - if line.find("...") == 0: - line = line[0:3] + "TEST " + line[3:] - #end of line - index = line.rfind("...") - if index != 0 and index == len(line)-3: - line = line[0:-3] + " TEST..." - - lines[x] = line - - return str + string.join(lines,'\n') - - #normal mode (not test mode) - else: - return str - - # utility for attribution, takes hazard description ['hdln'] field and - # adds TEST if appropriate in test mode, adds "A" or "AN" as appropriate - # if desired. - def hazardName(self, name, argDict, addA=False): - - if len(name) == 0: - return name - - # test mode - if argDict.get('testMode', 0): - phrase = 'Test ' + name #test mode, prepend "TEST" - else: - phrase = name - - # want A or AN? - if addA: - if phrase[0] in ['A','E','I','O','U','a','e','i','o','u']: - phrase = "an " + phrase - else: - phrase = "a " + phrase - return phrase - - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# DiscretePhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 04/28/2015 4027 randerso Changes for mixed case +# Added sort for consistent ordering of multiple timezones +# 10/02/2017 20335 ryu Add storm surge w/w to tpcEvents so +# no timing phrase appear in headline +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import PhraseBuilder +import ModuleAccessor +import types, copy, time, string, sets, os +import SampleAnalysis +import TimeRange, AbsTime + +class DiscretePhrases(PhraseBuilder.PhraseBuilder): + def __init__(self): + PhraseBuilder.PhraseBuilder.__init__(self) + + ### Local non-VTEC headlines. + # To sample the Hazards grid and produce locally generated headlines + # independent of the VTEC Headlines structure, follow these steps: + # + # 1. Put an "allowedHeadlines" method into your product with the + # same format as the "allowedHazards" method. + # + # 2. Generate the headlines using "generateProduct" in, for example, the + # _preProcessArea method: + + # headlines = self.generateProduct("Headlines", argDict, area = editArea, + # areaLabel=areaLabel, + # timeRange = self._timeRange) + # fcst = fcst + headlines + # + # 3. If desired, override "headlinesTiming" to adjust or remove the time descriptors + # for the headline. + # + + def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime): + # Return + # "startPhraseType" and "endPhraseType" + # Each can be one of these phraseTypes: + # "EXPLICIT" will return words such as "5 PM" + # "FUZZY4" will return words such as "THIS EVENING" + # "DAY_NIGHT_ONLY" use only weekday or weekday "NIGHT" e.g. + # "SUNDAY" or "SUNDAY NIGHT" or "TODAY" or "TONIGHT" + # Note: You will probably want to set both the + # startPhraseType and endPhraseType to DAY_NIGHT_ONLY to + # have this work correctly. + # "NONE" will result in no words + # OR a method which takes arguments: + # issueTime, eventTime, timeZone, and timeType + # and returns: + # phraseType, (hourStr, hourTZstr, description) + # You can use "timingWordTableFUZZY8" as an example to + # write your own method. + # + # If you simply return None, no timing words will be used. + + # Note that you can use the information given to determine which + # timing phrases to use. In particular, the "key" is the Hazard + # key so different local headlines can use different timing. + # + startPhraseType = "FUZZY" + endPhraseType = "FUZZY" + + #Example code + #startTime = timeRange.startTime().unixTime() + #if startTime <= issuanceTime + 12 * 3600: # 12 hours past issuance + #startPhraseType = "EXPLICIT" + #endTime = timeRange.endTime().unixTime() + #if endTime <= issuanceTime + 12 * 3600: # 12 hours past issuance + #endPhraseType = "EXPLICIT" + + #return startPhraseType, endPhraseType + return None, None + + def Headlines(self): + return { + "type": "component", + "lineLength": 69, + "methodList": [ + self.assembleChildWords, + self.wordWrap, + ], + "analysisList":[ + ("Hazards", + SampleAnalysis.SampleAnalysis().discreteTimeRangesByKey), + ], + + "phraseList":[ + self.headlines_phrase, + ], + "autoSentence": 0, + } + + def headlines_phrase(self): + return { + "setUpMethod": self.headlines_setUp, + "wordMethod": self.headlines_words, + "phraseMethods": [self.assembleSubPhrases, + self.postProcessPhrase, + ] + } + + def headlines_setUp(self, tree, node): + self.subPhraseSetUp(tree, node, [], self.scalarConnector) + return self.DONE() + + def headlines_words(self, tree, node): + "Create the phrase for local headlines from the Hazards grids" + + words = "" + areaLabel = tree.getAreaLabel() + headlines = tree.stats.get("Hazards", tree.getTimeRange(), + areaLabel, mergeMethod = "List") + if headlines is None: + return self.setWords(node, "") + + # Sort the headlines by startTime + temp = [] + for h, tr in headlines: + temp.append((tr.startTime(), (h, tr))) + temp.sort() + newList = [] + for t in temp: + newList.append(t[1]) + headlines = newList + + # Fetch the set of local headlines allowed for this product + allowedHeadlines = [] + for key, allActions, cat in self.allowedHeadlines(): + allowedHeadlines.append(key) + issuanceTime = self._issueTime.unixTime() + + from com.raytheon.uf.viz.core.localization import LocalizationManager + siteId = LocalizationManager.getInstance().getSite() + for key, tr in headlines: # value == list of subkeys + if key not in allowedHeadlines: + continue + + timeDescriptor = self.headlinesTimeRange_descriptor( + tree, node, key, tr, areaLabel, issuanceTime) + from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey + headlineWords = DiscreteKey.discreteDefinition(siteId).keyDesc( + "Hazards" + "_SFC", key) + if headlineWords == "": # Don't process the "" key + continue + hookWords = self.hazard_hook(tree, node, key, "", "",tr.startTime(), tr.endTime()) + headlinePhrase = "..." + headlineWords + timeDescriptor +hookWords + "...\n" + words = words + headlinePhrase + + words = self.convertToUpper(words) + return self.setWords(node, words) + + def headlinesTimeRange_descriptor(self, tree, node, key, tr, areaLabel, issuanceTime): + # Return a time range descriptor for the headline + # This method can be overridden to customize timing descriptors for + # non-VTEC local headlines + + headlinesTiming = self.headlinesTiming(tree, node, key, tr, + areaLabel, issuanceTime) + if headlinesTiming is None: + return "" + try: + startPhraseType, endPhraseType = headlinesTiming + except: + # For backward compatibility -- the startBoundary argument + # was formerly part of the headlinesTiming method + startPhraseType, endPhraseType, startBoundary = headlinesTiming + startTime = tr.startTime().unixTime() + endTime = tr.endTime().unixTime() + tree.combinations = self._combinations + areaList = self.getCurrentAreaNames(tree, areaLabel) + hazRec = { + 'id': areaList, + 'startTime': startTime, + 'endTime': endTime, + 'act': "NEW", + } + if startPhraseType == "FUZZY": + startPhraseType = "FUZZY4" + if endPhraseType == "FUZZY": + endPhraseType = "FUZZY4" + + phrase = self.getTimingPhrase( + hazRec, issuanceTime, startPhraseType, endPhraseType) + return " " + phrase + + +############################################################################################ + ### WARNING!!!! VTEC CODE -- DO NOT OVERRIDE ANY CODE BELOW THIS POINT!!!!! + + ### IF YOU USE A METHOD BELOW THIS POINT AND WANT TO ALTER IT, + ### COPY IT TO YOUR LOCAL FILE AND RE-NAME IT. THEN OVERRIDE ANY + ### METHODS THAT CALL IT AND USE THE NEW NAME. + + def getHazards(self, argDict, areaList): + # This is for setting up the argDict hazards entry AFTER the TextFormatter + # has created the Hazards Table. + # This is necessary for products that allow the user to specify through + # the GUI which edit areas will be sampled. + # Set up edit areas + editAreas = [] + for area, label in areaList: + editAreas.append([area]) + # Process the hazards + import HazardsTable + hazards = HazardsTable.HazardsTable( + argDict["ifpClient"], editAreas, self._pil[0:3], + self.filterMethod, argDict["databaseID"], + self._fullStationID, + activeTableName = argDict['vtecActiveTable'], + vtecMode=argDict['vtecMode'], + dataMgr=argDict['dataMgr']) + # Store hazards object for later use + argDict["hazards"] = hazards + + def Hazards(self): + return { + "type": "component", + "lineLength": 66, + "methodList": [ + self.assembleChildWords, + self.wordWrap, + ], + "analysisList":[], + + "phraseList":[ + self.hazards_phrase, + ], + "autoSentence": 0, + } + + def hazards_phrase(self): + return { + "setUpMethod": self.hazards_setUp, + "wordMethod": self.hazards_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def hazards_setUp(self, tree, node): + self.subPhraseSetUp(tree, node, [], self.scalarConnector) + + return self.DONE() + + def hazards_words(self, tree, node): + "Create the phrase for any watches, warnings or advisories" + hazardsTable = self._hazards + tree.combinations = self._combinations + if self._combinations is None: + areaLabel = None + else: + areaLabel = tree.getAreaLabel() + + editAreas = self.getCurrentAreaNames(tree, areaLabel) + try: + # Remove suffixes if necessary + if self._editAreaSuffix is not None: + editAreas = self.removeSuffixes(editAreas, self._editAreaSuffix) + except: + pass + + + # Check for a particular entry in argDict that is inserted when + # we're formatting hazards type products like WSW, NPW. + argDict = tree.get("argDict") + # look for segmentAreas in the argDict and override editAreas + if "segmentAreas" in argDict: + editAreas = argDict['segmentAreas'] # override editAreas + + words = self.getHazardString(tree, node, editAreas) + + words = self.convertToUpper(words) # convert to upper case + + return self.setWords(node, words) + + ##### VTEC methods ##### + + # Return just a simple list of hazards in the form phen.sig (WS.W) + def getAllowedHazardList(self): + allowedHazardList = self.allowedHazards() + + hazardList = [] + for h in allowedHazardList: + if type(h) is tuple: + hazardList.append(h[0]) + else: + hazardList.append(h) + + return hazardList + + # Return the list of action codes given the hazard, if hazard not found + # or actions not specified, return "ALL codes" + + def getAllowedActionCodes(self, hazard): + allowedHazardList = self.allowedHazards() + + for h in allowedHazardList: + if type(h) is tuple: + if h[0] == hazard: + return h[1] + return ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] + + # Returns the words to be used in the headline for 'act' field in the + # specified hazard. + def actionControlWord(self, hazard, issuanceTime): + if 'act' not in hazard: + print("Error! No field act in hazard record.") + return "" + + actionCode = hazard['act'] + if actionCode in ["NEW", "EXA", "EXB"]: + return "in effect" + elif actionCode == "CON": + return "remains in effect" + elif actionCode == "CAN": + return "is cancelled" + elif actionCode == "EXT": + return "now in effect" + elif actionCode == "EXP": + deltaTime = issuanceTime - hazard['endTime'] + if deltaTime >= 0: + return "has expired" + else: + return "will expire" + elif actionCode == "UPG": + return "no longer in effect" + else: + print(actionCode, "not recognized in actionControlWord.") + return "" + + # + # Determine the category for Hazard overrides + # + def getHazardCategory(self, hazard): + allowedHazardList = self.allowedHazards() + + for h in allowedHazardList: + if h[0] == hazard: + if len(h) == 3: + if type(h[2]) is bytes: + return h[2] + elif len(h) == 4: + if type(h[3]) is bytes: + return h[3] + + return None + # + # Determine the priority of a Hazard (lower count = higher priority) + # + + def getHazardImportance(self, hazard): + allowedHazardList = self.allowedHazards() + count = 0 + for h in allowedHazardList: + count = count + 1 + if h[0] == hazard: + return count + + return 1000 # no priority + + + # This method uses the allowedHazards() list to determine which + # hazardTable entry has the most important priority and removes + # the entry or piece thereof in place. Returns 1 if something was + # modified and 0 otherwise + def fixHazardConflict(self, index1, index2, hazardTable): + + allowedHazardList = self.getAllowedHazardList() + phen1 = hazardTable[index1]['phen'] + phen2 = hazardTable[index2]['phen'] + sig1 = hazardTable[index1]['sig'] + sig2 = hazardTable[index2]['sig'] + act1 = hazardTable[index1]['act'] + act2 = hazardTable[index2]['act'] + haz1 = phen1 + "." + sig1 + haz2 = phen2 + "." + sig2 + ignoreList = ['CAN', 'EXP', 'UPG'] + if haz1 in allowedHazardList and haz2 in allowedHazardList and \ + act1 not in ignoreList and act2 not in ignoreList: + + + if (self.getHazardCategory(haz1) != self.getHazardCategory(haz2)) or \ + self.getHazardCategory(haz1) is None or \ + self.getHazardCategory(haz2) is None: + return 0 + + else: + return 0 # no changes were made + + if self.getHazardImportance(haz1) < self.getHazardImportance(haz2): + lowIndex = index2 + highIndex = index1 + else: + lowIndex = index1 + highIndex = index2 + + # + # Added to prevent a current lower TO.A from overiding a higher SV.A + # + + if hazardTable[lowIndex]['phen'] == 'SV' and \ + hazardTable[lowIndex]['sig'] == 'A' and \ + hazardTable[highIndex]['phen'] == 'TO' and \ + hazardTable[highIndex]['sig'] == 'A': + if (int(hazardTable[lowIndex]['etn']) > int(hazardTable[highIndex]['etn']) and + (int(hazardTable[highIndex]['etn']) - int(hazardTable[lowIndex]['etn'])) > 50): + lowIndexTemp = lowIndex + lowIndex = highIndex + highIndex = lowIndexTemp + + lowStart = hazardTable[lowIndex]['startTime'] + lowEnd = hazardTable[lowIndex]['endTime'] + highStart = hazardTable[highIndex]['startTime'] + highEnd = hazardTable[highIndex]['endTime'] + + # first check to see if high pri completely covers low pri + if highStart <= lowStart and highEnd >= lowEnd: # remove low priority + del hazardTable[lowIndex] + + # next check to see if high pri lies within low pri + elif lowStart <= highStart and lowEnd >= highEnd: # high pri in middle + if lowStart < highStart: + h = copy.deepcopy(hazardTable[lowIndex]) + # trim the early piece + hazardTable[lowIndex]['endTime'] = highStart + if lowEnd > highEnd: + # make a new end piece + h['startTime'] = highEnd + hazardTable.append(h) + elif lowStart == highStart: + hazardTable[lowIndex]['startTime'] = highEnd + + elif highEnd >= lowStart: + hazardTable[lowIndex]['startTime'] = highEnd # change low start + + elif highStart <= lowEnd: + hazardTable[lowIndex]['endTime'] = highStart # change low end + + return 1 + + + # This method removes all entries of the specified hazardTable that + # are not in the allowedHazards list. + def filterAllowedHazards(self, hazardTable): + + newTable = [] + allowedHazardList = self.getAllowedHazardList() + + hazStr = "" + for i in range(len(hazardTable)): + if hazardTable[i]['sig'] != "": # VTEC + hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] + else: #non-VTEC + hazStr = hazardTable[i]['phen'] + + if hazStr in allowedHazardList: + newTable.append(hazardTable[i]) + return newTable + + # This method searches all entries of the specified hazardTable for + # entries matching the specified zone. Then for each entry it finds + # it looks for a conflicting entry in time. If it finds one, it calls + # fixHazardsConflict, which fixes the table and then calls itself again + # recursively with the fixed table. If it doesn't find one it returns + # None. + def filterZoneHazards(self, zone, hazardTable): + for i in range(len(hazardTable)): + if hazardTable[i]['id'] == zone: + for j in range(len(hazardTable)): + if hazardTable[j]['id'] == zone and i != j: + tr1 = TimeRange.TimeRange( + AbsTime.AbsTime(int(hazardTable[i]['startTime'])), + AbsTime.AbsTime(int(hazardTable[i]['endTime']))) + tr2 = TimeRange.TimeRange( + AbsTime.AbsTime(int(hazardTable[j]['startTime'])), + AbsTime.AbsTime(int(hazardTable[j]['endTime']))) + if tr1.overlaps(tr2): + if self.fixHazardConflict(i, j, hazardTable): + self.filterZoneHazards(zone, hazardTable) + return None + return None + + # Main method that drives the code to filter hazards that conflict in time. + # Only one hazard of the same phenomenon is allowed per zone per time. + # This method processes the table, removing any time conflicts, so the one + # hazard per zone, time rule is adhered to. + def filterMethod(self, hazardTable, allowedHazardsOnly=False): + # Remove hazards not in allowedHazards list + newTable = self.filterAllowedHazards(hazardTable) + if allowedHazardsOnly: + return newTable + + # get a raw list of unique edit areas + zoneList = [] + for t in newTable: + if t['id'] not in zoneList: + zoneList.append(t['id']) + + for zone in zoneList: + # Remove lower priority hazards of the same type + self.filterZoneHazards(zone, newTable) + + return newTable + + + # function returns the timing phrase to use for the area, hazard, + # and issuance time. Can force the type of timing phrase given the + # stype and etype. The stype/etype may be: NONE, EXPLICIT, FUZZY4, + # FUZZY8, or DAY_NIGHT_ONLY. Returns phrase like: + # FROM 4 PM MST THIS AFTERNOON THROUGH TUESDAY EVENING + def getTimingPhrase(self, hazRec, issueTime, stype=None, etype=None): + #Returns the timing phrase to use + + # Get the timing type + if stype is None or etype is None: + stype, etype = self.getTimingType(hazRec, issueTime) + + # Get the time zones for the areas + timeZones = self.hazardTimeZones(hazRec['id']) + + # Get the starting time + stext = [] + if type(stype) is types.MethodType: + for tz in timeZones: + newType, info = stype( + issueTime, hazRec['startTime'], tz, "start") + if info is not None and info not in stext: + stext.append(info) + stype = newType + elif stype == "EXPLICIT": + for tz in timeZones: + info = self.timingWordTableEXPLICIT(issueTime, + hazRec['startTime'], tz, "start") + if info not in stext: + stext.append(info) + elif stype == "FUZZY4": + for tz in timeZones: + info = self.timingWordTableFUZZY4(issueTime, + hazRec['startTime'], tz, "start") + if info not in stext: + stext.append(info) + elif stype == "FUZZY8": + for tz in timeZones: + info = self.timingWordTableFUZZY8(issueTime, + hazRec['startTime'], tz, "start") + if info not in stext: + stext.append(info) + elif stype == "DAY_NIGHT_ONLY": + for tz in timeZones: + info = self.timingWordTableDAYNIGHT(issueTime, + hazRec['startTime'], tz, "start") + if info not in stext: + stext.append(info) + + # Get the ending time + etext = [] + if type(etype) is types.MethodType: + for tz in timeZones: + newType, info = etype( + issueTime, hazRec['endTime'], tz, "end") + if info is not None and info not in etext: + etext.append(info) + etype = newType + elif etype == "EXPLICIT": + for tz in timeZones: + info = self.timingWordTableEXPLICIT(issueTime, + hazRec['endTime'], tz, "end") + if info not in etext: + etext.append(info) + elif etype == "FUZZY4": + for tz in timeZones: + info = self.timingWordTableFUZZY4(issueTime, + hazRec['endTime'], tz, "end") + if info not in etext: + etext.append(info) + elif etype == "FUZZY8": + for tz in timeZones: + info = self.timingWordTableFUZZY8(issueTime, + hazRec['endTime'], tz, "end") + if info not in etext: + etext.append(info) + elif etype == "DAY_NIGHT_ONLY": + for tz in timeZones: + info = self.timingWordTableDAYNIGHT(issueTime, + hazRec['endTime'], tz, "end") + if info not in etext: + etext.append(info) + + # timing connection types + startPrefix, endPrefix = self.getTimingConnectorType((stype, etype), + hazRec['act']) + + # get the timing phrase + phrase = self.calculateTimingPhrase(stype, etype, stext, etext, + startPrefix, endPrefix) + + return phrase + + # calculates the timing phrase based on the timing type, the calculated + # timing words, and the prefixes + def calculateTimingPhrase(self, stype, etype, stext, etext, startPrefix, + endPrefix): + + if (stype, etype) == ("NONE", "NONE"): + return "" #no timing phrase + + elif (stype, etype) in [("NONE", "EXPLICIT")]: + return self.ctp_NONE_EXPLICIT(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("NONE", "FUZZY4"), ("NONE", "FUZZY8")]: + return self.ctp_NONE_FUZZY(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("EXPLICIT", "EXPLICIT")]: + return self.ctp_EXPLICIT_EXPLICIT(stext,etext,startPrefix, + endPrefix) + + elif (stype, etype) in [("EXPLICIT", "FUZZY4"), ("EXPLICIT", "FUZZY8")]: + return self.ctp_EXPLICIT_FUZZY(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("FUZZY4", "FUZZY4"), ("FUZZY8", "FUZZY4"), + ("FUZZY4", "FUZZY8"), ("FUZZY8", "FUZZY8")]: + return self.ctp_FUZZY_FUZZY(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("NONE", "DAY_NIGHT_ONLY")]: + return self.ctp_NONE_DAYNIGHT(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("EXPLICIT", "DAY_NIGHT_ONLY")]: + return self.ctp_EXPLICIT_DAYNIGHT(stext,etext,startPrefix, + endPrefix) + + elif (stype, etype) in [("FUZZY4", "DAY_NIGHT_ONLY"), + ("FUZZY8", "DAY_NIGHT_ONLY")]: + return self.ctp_FUZZY_DAYNIGHT(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("DAY_NIGHT_ONLY", "DAY_NIGHT_ONLY")]: + return self.ctp_DAYNIGHT_DAYNIGHT(stext,etext,startPrefix, + endPrefix) + + elif (stype, etype) in [("DAY_NIGHT_ONLY", "NONE")]: + return self.ctp_DAYNIGHT_NONE(stext,etext,startPrefix,endPrefix) + + elif (stype, etype) in [("DAY_NIGHT_ONLY", "EXPLICIT")]: + return self.ctp_DAYNIGHT_EXPLICIT(stext,etext,startPrefix, + endPrefix) + + elif (stype, etype) in [("DAY_NIGHT_ONLY", "FUZZY4"), + ("DAY_NIGHT_ONLY", "FUZZY8")]: + return self.ctp_DAYNIGHT_FUZZY(stext,etext,startPrefix,endPrefix) + + else: + return "" + + + #calculates the NONE/EXPLICIT timing phrase + def ctp_NONE_EXPLICIT(self, stext, etext, startPrefix, endPrefix): + #single time zone + if len(etext) == 1: + hourStr, hourTZstr, description = etext[0] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + return endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + \ + description + + #multiple time zones + elif len(etext) > 1: + hourStr, hourTZstr, description = etext[0] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + for x in range(1, len(etext)): + hourStr, hourTZstr, othDescription = etext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + description + return s + + #calculates the NONE/FUZZY timing phrase + def ctp_NONE_FUZZY(self, stext, etext, startPrefix, endPrefix): + #returns phrase like: THROUGH THIS EVENING + hourStr, hourTZstr, description = etext[0] #ending text + s = endPrefix + ' ' + description + return s + + #calculates the NONE/EXPLICIT timing phrase + def ctp_EXPLICIT_EXPLICIT(self, stext, etext, startPrefix, endPrefix): + #return phrases like: + # FROM 2 AM WEDNESDAY TO 2 AM CST THURSDAY + # FROM 2 AM TO 5 AM CST THURSDAY + # FROM 2 AM CST /1 AM MST/ WEDNESDAY TO 2 AM CST /1 AM MST/ THURSDAY + # FROM 2 AM CST /1 AM MST/ TO 6 AM CST /5AM MST/ THURSDAY + + shourStr, shourTZstr, sdescription = stext[0] #starting text + ehourStr, ehourTZstr, edescription = etext[0] #ending text + + #special cases NOON + if shourStr == "12 PM": + shourStr = "noon" + + #special cases NOON + if ehourStr == "12 PM": + ehourStr = "noon" + + # special case EARLY THIS MORNING and THIS MORNING, replace with + # just THIS MORNING + if sdescription == "early this morning" and \ + edescription == "this morning": + sdescription = "this morning" #combine two phrases + + + # single time zone, same time zone for start/end times - same day + if len(stext) == 1 and len(etext) == 1 and \ + shourTZstr == ehourTZstr and sdescription == edescription: + return startPrefix + ' ' + shourStr + ' ' + endPrefix + ' ' +\ + ehourStr + ' ' + ehourTZstr + ' ' + edescription + + # single time zone, same time zone for start/end times - diff day + if len(stext) == 1 and len(etext) == 1 and \ + shourTZstr == ehourTZstr and sdescription != edescription: + return startPrefix + ' ' + shourStr + ' ' + sdescription + \ + ' ' + endPrefix + ' ' + ehourStr + ' ' + ehourTZstr + \ + ' ' + edescription + + # mult time zones, same day for start/end times + if sdescription == edescription: + s = startPrefix + ' ' + shourStr + ' ' + shourTZstr + ' ' + for x in range(1, len(stext)): + hourStr, hourTZstr, description = stext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + endPrefix + ' ' + ehourStr + ' ' + ehourTZstr + ' ' + for x in range(1, len(etext)): + hourStr, hourTZstr, description = etext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + edescription + return s + + # mult time zones, different day for start/end times + else: + s = startPrefix + ' ' + shourStr + ' ' + shourTZstr + ' ' + for x in range(1, len(stext)): + hourStr, hourTZstr, description = stext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + sdescription + ' ' + endPrefix + ' ' + ehourStr + \ + ' ' + ehourTZstr + ' ' + for x in range(1, len(etext)): + hourStr, hourTZstr, description = etext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + edescription + return s + + #calculates the NONE/EXPLICIT timing phrase + def ctp_EXPLICIT_FUZZY(self, stext, etext, startPrefix, endPrefix): + #returns phrase like: + # FROM 2 AM CST WEDNESDAY THROUGH LATE WEDNESDAY NIGHT + # FROM 2 AM CST /1 AM MST/ WEDNESDAY THROUGH LATE WEDNESDAY NIGHT + + #start phrase + hourStr, hourTZstr, description0 = stext[0] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = startPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + for x in range(1, len(stext)): + hourStr, hourTZstr, description = stext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + description0 + ' ' + + #end phrase + hourStr, hourTZstr, description = etext[0] + s = s + endPrefix + ' ' + description + + return s + + #calculates the FUZZY/FUZZY timing phrase + def ctp_FUZZY_FUZZY(self, stext, etext, startPrefix, endPrefix): + #return phrases like FROM THIS EVENING THROUGH LATE WEDNESDAY NIGHT + #return phrases like LATE WEDNESDAY NIGHT + + hourStr, hourTZstr, s_description = stext[0] #starting text + hourStr, hourTZstr, e_description = etext[0] #ending text + + #special case of description the same + if s_description == e_description: + return s_description + + #normal case of different descriptions + s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ + e_description + + return s + + def ctp_NONE_DAYNIGHT(self,stext,etext,startPrefix,endPrefix): + #return phrases like THROUGH WEDNESDAY + + hourStr, hourTZstr, e_description = etext[0] #ending text + + s = endPrefix + ' ' + e_description + + return s + + def ctp_EXPLICIT_DAYNIGHT(self, stext, etext, startPrefix, endPrefix): + #returns phrase like: + # FROM 2 AM CST WEDNESDAY THROUGH WEDNESDAY + # FROM 2 AM CST /1 AM MST/ WEDNESDAY THROUGH WEDNESDAY + + #start phrase + hourStr, hourTZstr, description0 = stext[0] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = startPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + for x in range(1, len(stext)): + hourStr, hourTZstr, description = stext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + description0 + ' ' + + #end phrase + hourStr, hourTZstr, description = etext[0] + s = s + endPrefix + ' ' + description + + return s + + def ctp_FUZZY_DAYNIGHT(self, stext,etext,startPrefix,endPrefix): + #return phrases like FROM THIS EVENING THROUGH WEDNESDAY NIGHT + + hourStr, hourTZstr, s_description = stext[0] #starting text + hourStr, hourTZstr, e_description = etext[0] #ending text + + #special case of description the same + if s_description == e_description: + return s_description + + #normal case of different descriptions + s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ + e_description + + return s + + def ctp_DAYNIGHT_DAYNIGHT(self,stext,etext,startPrefix,endPrefix): + #return phrases like FROM TONIGHT THROUGH WEDNESDAY + + hourStr, hourTZstr, s_description = stext[0] #starting text + hourStr, hourTZstr, e_description = etext[0] #ending text + + #special case of description the same + if s_description == e_description: + return s_description + + #normal case of different descriptions + s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ + e_description + + return s + + def ctp_DAYNIGHT_EXPLICIT(self, stext,etext,startPrefix,endPrefix): + #returns phrase like: + # FROM TUESDAY UNTIL 2 AM CST WEDNESDAY + # FROM TUESDAY UNTIL 2 AM CST /1 AM MST/ WEDNESDAY + + #start phrase + hourStr, hourTZstr, description = stext[0] + s = startPrefix + ' ' + description + ' ' + + #end phrase + hourStr, hourTZstr, description0 = etext[0] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + endPrefix + ' ' + hourStr + ' ' + hourTZstr + ' ' + for x in range(1, len(etext)): + hourStr, hourTZstr, description = etext[x] + #special cases NOON + if hourStr == "12 PM": + hourStr = "noon" + s = s + "/" + hourStr + ' ' + hourTZstr + "/ " + s = s + description0 + ' ' + + return s + + def ctp_DAYNIGHT_NONE(self, stext,etext,startPrefix,endPrefix): + #return phrases like FROM TONIGHT + + hourStr, hourTZstr, s_description = stext[0] #starting text + + s = startPrefix + ' ' + s_description + + return s + + def ctp_DAYNIGHT_FUZZY(self,stext,etext,startPrefix,endPrefix): + #return phrases like FROM TONIGHT THROUGH WEDNESDAY NIGHT + + hourStr, hourTZstr, s_description = stext[0] #starting text + hourStr, hourTZstr, e_description = etext[0] #ending text + + #special case of description the same + if s_description == e_description: + return s_description + + #normal case of different descriptions + s = startPrefix + ' ' + s_description + ' ' + endPrefix + ' ' +\ + e_description + + return s + + + def getTimingConnectorType(self, timingType, action): + # Returns the start and end prefix for the given start and end phrase + # type and action code. + d = {("NONE", "NONE"): (None, None), + ("NONE", "EXPLICIT"): (None, "until"), + ("NONE", "FUZZY4"): (None, "through"), + ("NONE", "FUZZY8"): (None, "through"), + ("EXPLICIT", "EXPLICIT"): ("from", "to"), + ("EXPLICIT", "FUZZY4"): ("from", "through"), + ("EXPLICIT", "FUZZY8"): ("from", "through"), + ("FUZZY4", "FUZZY4"): ("from", "through"), + ("FUZZY4", "FUZZY8"): ("from", "through"), + ("FUZZY8", "FUZZY4"): ("from", "through"), + ("FUZZY8", "FUZZY8"): ("from", "through"), + ("NONE", "DAY_NIGHT_ONLY"): (None, "through"), + ("EXPLICIT", "DAY_NIGHT_ONLY"): ("from", "through"), + ("FUZZY4", "DAY_NIGHT_ONLY"): ("from", "through"), + ("FUZZY8", "DAY_NIGHT_ONLY"): ("from", "through"), + ("DAY_NIGHT_ONLY", "DAY_NIGHT_ONLY"): ("from", "through"), + ("DAY_NIGHT_ONLY", "NONE"): ("from", None), + ("DAY_NIGHT_ONLY", "EXPLICIT"): ("from", "to"), + ("DAY_NIGHT_ONLY", "FUZZY4"): ("from", "through"), + ("DAY_NIGHT_ONLY", "FUZZY8"): ("from", "through"), + } + + # special case for expirations. + if action == 'EXP': + return (None, "at") + + return d.get(timingType, ("", "")) + + def getTimingType(self, hazRec, issueTime): + #Returns the timing type based on the issuanceTime and hazard record + #Returns (startType, endType), which is NONE, EXPLICIT, FUZZY4, FUZZY8 + + # Get the local headlines customizable timing + tr = self.makeTimeRange(hazRec['startTime'], hazRec['endTime']) + locStart, locEnd = self.getLocalHeadlinesTiming( + None, None, hazRec['phen'], tr, hazRec['id'], issueTime) + + #time from issuanceTime + deltaTstart = hazRec['startTime'] - issueTime #seconds past now + deltaTend = hazRec['endTime'] - issueTime #seconds past now + + HR=3600 #convenience constants + MIN=60 #convenience constants + + # record in the past, ignore + if deltaTend <= 0: + return ("NONE", "NONE") + + # upgrades and cancels + if hazRec['act'] in ['UPG', 'CAN']: + return ("NONE", "NONE") #upgrades/cancels never get timing phrases + + # expirations EXP codes are always expressed explictly, only end time + if hazRec['act'] == 'EXP': + return ('NONE', 'EXPLICIT') + + phensig = hazRec['phen'] + '.' + hazRec['sig'] + + # SPC Watches always get explicit times, 3 hour start mention + spcWatches = ['TO.A', 'SV.A'] + if phensig in spcWatches: + if deltaTstart < 3*HR: + return ('NONE', 'EXPLICIT') + else: + return ('EXPLICIT', 'EXPLICIT') + + # Tropical events never get times at all + tpcEvents = ['TY.A','TY.W','HU.A','HU.S','HU.W','TR.A','TR.W', + 'SS.A','SS.W'] + if phensig in tpcEvents: + return ('NONE', 'NONE') + + # special marine case? + marineHazList = ["SC.Y", "SW.Y", "GL.W", "SR.W", 'HF.W', 'BW.Y', + 'UP.W', 'UP.Y', 'RB.Y', 'SE.W', 'SI.Y'] #treat like watches + marinePils = ['CWF', 'OFF', 'NSH', 'GLF'] #specific marine pils + oconusSites = ['PGUM','PHFO','PAFC','PAJK','PAFG'] + + # regular products - not marine + if hazRec['pil'] not in marinePils: + #advisories/warnings + if hazRec['sig'] in ['Y','W']: #advisories/warnings - explicit + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + else: + start = 'EXPLICIT' #explicit start time after 3 hours + end = 'EXPLICIT' #end time always explicit + + #watches + elif hazRec['sig'] in ['A']: #watches - mix of explicit/fuzzy + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + #local hazards + elif locStart is not None and locEnd is not None: + start = locStart + end = locEnd + else: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + + # marine - CONUS + elif hazRec['officeid'] not in oconusSites: + + #advisories/warnings - explicit, but not some phensigs + if hazRec['sig'] in ['Y','W'] and phensig not in marineHazList: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + else: + start = 'EXPLICIT' #explicit start time after 3 hours + end = 'EXPLICIT' #end time always explicit + + #watches - mix of explicit/fuzzy, some phensig treated as watches + elif hazRec['sig'] in ['A'] or phensig in marineHazList: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + #local hazards - treat as watches + elif locStart is not None and locEnd is not None: + start = locStart + end = locEnd + else: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + # marine - OCONUS + else: + + #advisories/warnings - explicit, but not some phensigs + if hazRec['sig'] in ['Y','W'] and phensig not in marineHazList: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + else: + start = 'EXPLICIT' #explicit start time after 3 hours + end = 'EXPLICIT' #end time always explicit + + #special marine phensigs - treat as watches, with fuzzy8 + elif phensig in marineHazList: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + else: + start = 'FUZZY8' #fuzzy start times + end = 'FUZZY8' #always fuzzy end times + + + #regular watches - fuzzy4 + elif hazRec['sig'] in ['A']: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + #local hazards - treat as watches + elif locStart is not None and locEnd is not None: + start = locStart + end = locEnd + else: + if deltaTstart < 3*HR: #no start time in first 3 hours + start = 'NONE' + elif deltaTstart < 12*HR: + start = 'EXPLICIT' #explicit start time 3-12 hours + else: + start = 'FUZZY4' #fuzzy times after 12 (4/day) + if deltaTend < 12*HR: #explicit end time 0-12 hours + end = 'EXPLICIT' + else: + end = 'FUZZY4' #fuzzy times after 12 (4/day) + + return (start, end) + + def getLocalHeadlinesTiming(self,tree, node, key, tr, + areaLabel, issuanceTime): + headlinesTiming = self.headlinesTiming(tree, node, key, tr, + areaLabel, issuanceTime) + if headlinesTiming is None: + locStart = None + locEnd = None + else: + locStart, locEnd = headlinesTiming + if locStart == "FUZZY": + locStart = "FUZZY4" + if locEnd == "FUZZY": + locEnd = "FUZZY4" + return locStart, locEnd + + def hazardTimeZones(self, areaList): + #returns list of time zones for the starting time + #and list of time zones for the ending time. The areaList provides + #a complete list of areas for this headline. startT, endT are the + #hazard times. + + # sort the areaList so time zones are in consistent order + areaList.sort() + + # get this time zone + thisTimeZone = os.environ["TZ"] + zoneList = [] + + # get the AreaDictionary that contains time zones per edit area + areaDictName = self._areaDictionary + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(areaDictName, "AreaDictionary") + + # check to see if we have any areas outside our time zone + for areaName in areaList: + if areaName in list(areaDict.keys()): + entry = areaDict[areaName] + if "ugcTimeZone" not in entry: #add your site id + if thisTimeZone not in zoneList: + zoneList.append(thisTimeZone) + continue # skip it + timeZoneList = entry["ugcTimeZone"] + if type(timeZoneList) == bytes: # a single value + timeZoneList = [timeZoneList] # make it into a list + for timeZone in timeZoneList: + if timeZone not in zoneList: + zoneList.append(timeZone) + + # if the resulting zoneList is empty, put in our time zone + if len(zoneList) == 0: + zoneList.append(thisTimeZone) + + # if the resulting zoneList has our time zone in it, be sure it + # is the first one in the list + try: + index = zoneList.index(thisTimeZone) + if index != 0: + del zoneList[index] + zoneList.insert(0, thisTimeZone) + except: + pass + + return zoneList + + def timingWordTableEXPLICIT(self, issueTime, eventTime, timezone, + timeType='start'): + #returns (timeValue, timeZone, descriptiveWord). + #eventTime is either the starting or ending time, based on + #the timeType flag. timezone is the time zone for the hazard area + + HR=3600 + sameDay = [ + (0*HR, 6*HR, "early this morning"), #midnght-559am + (6*HR, 12*HR-1, "this morning"), #600am-1159am + (12*HR, 12*HR+1, "today"), #noon + (12*HR+1, 18*HR-1, "this afternoon"), #1201pm-559pm + (18*HR, 24*HR, "this evening")] #6pm-1159pm + + nextDay = [ + (0*HR, 0*HR+1, "tonight"), #midnght + (0*HR, 24*HR, ""),] #midnght-1159pm + + subsequentDay = [ + (0*HR, 0*HR+1, " night"), #midnght + (0*HR, 24*HR, ""),] #midnght-1159pm + + + #determine local time + myTimeZone = os.environ["TZ"] # save the defined time zone + os.environ["TZ"] = timezone # set the new time zone + ltissue = time.localtime(issueTime) # issuance local time + ltevent = time.localtime(eventTime) # event local time + #get the hour string (e.g., 8 PM) + hourStr = time.strftime("%I %p", ltevent) + if hourStr[0] == '0': + hourStr = hourStr[1:] #eliminate leading zero + + #get the time zone (e.g., MDT) + hourTZstr = time.strftime("%Z", ltevent) + + #determine the delta days from issuance to event + diffDays = ltevent[7] - ltissue[7] #julian day + if diffDays < 0: #year wrap around, assume Dec/Jan + diffDays = ltevent[2] + 31 - ltissue[2] #day of month + + #get description time phrase + description = "" + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if diffDays == 0: + for (startT, endT, desc) in sameDay: + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + + else: + #choose proper table + if diffDays == 1: + table = nextDay + else: + table = subsequentDay + for (startT, endT, desc) in table: + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + dow = ltevent[6] #day of week + dowMinusOne = ltevent[6] - 1 + if dowMinusOne < 0: + dowMinusOne = 6 #week wraparound + description = string.replace(description, "", + self.asciiDayOfWeek(dow)) #day of week + description = string.replace(description, "", + self.asciiDayOfWeek(dowMinusOne)) #day of week + + #special cases NOON + if hourStr == "12 PM" and description == "today": + hourStr = "noon" + + #special cases MIDNIGHT + if hourStr == "12 AM": + hourStr = "midnight" + + os.environ["TZ"] = myTimeZone # reset the defined time zone + + return (hourStr, hourTZstr, description) + + + def timingWordTableFUZZY4(self, issueTime, eventTime, timeZone, + timeType='start'): + #returns (timeValue, timeZone, descriptiveWord). + #eventTime is either the starting or ending time, based on + #the timeType flag. timezone is the time zone for the hazard area + #table is local time, start, end, descriptive phrase + HR=3600 + sameDay = [ + (0*HR, 6*HR, "early this morning"), #midnght-559am + (6*HR, 12*HR, "this morning"), #600am-noon + (12*HR, 18*HR, "this afternoon"), #1200pm-559pm + (18*HR, 24*HR, "this evening")] #6pm-1159pm + + nextDay = [ + (0*HR, 0*HR, "this evening"), #midnght tonight + (0*HR, 6*HR, "late tonight"), #midnght-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " evening")] #6pm-1159pm + + subsequentDay = [ + (0*HR, 0*HR, " evening"), #midnght ystdy + (0*HR, 6*HR, "late night"), #midnght-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " evening")] #6pm-1159pm + + + #determine local time + myTimeZone = os.environ["TZ"] # save the defined time zone + os.environ["TZ"] = timeZone # set the new time zone + ltissue = time.localtime(issueTime) # issuance local time + ltevent = time.localtime(eventTime) # event local time + + #determine the delta days from issuance to event + diffDays = ltevent[7] - ltissue[7] #julian day + if diffDays < 0: #year wrap around, assume Dec/Jan + diffDays = ltevent[2] + 31 - ltissue[2] #day of month + + #get description time phrase + description = "" + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if diffDays == 0: + for (startT, endT, desc) in sameDay: + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + + else: + #choose proper table + if diffDays == 1: + table = nextDay + else: + table = subsequentDay + for (startT, endT, desc) in table: + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + dow = ltevent[6] #day of week + dowMinusOne = ltevent[6] - 1 + if dowMinusOne < 0: + dowMinusOne = 6 #week wraparound + description = string.replace(description, "", + self.asciiDayOfWeek(dow)) #day of week + description = string.replace(description, "", + self.asciiDayOfWeek(dowMinusOne)) #day of week + + os.environ["TZ"] = myTimeZone # reset the defined time zone + + hourStr = None + hourTZstr = None + return (hourStr, hourTZstr, description) + + + def timingWordTableFUZZY8(self, issueTime, eventTime, timeZone, + timeType='start'): + #returns the descriptive word for the event. eventTime is either + #the starting or ending time, based on the timeType flag. + #table is local time, start, end, descriptive phrase-A + + HR=3600 + sameDay = [ + (0*HR, 3*HR, "late night"), #midnght-259am + (3*HR, 6*HR, "early this morning"), #300am-559am + (6*HR, 9*HR, "this morning"), #600am-859am + (9*HR, 12*HR, "late this morning"), #900am-1159am + (12*HR, 15*HR, "early this afternoon"), #noon-259pm + (15*HR, 18*HR, "late this afternoon"), #300pm-559pm + (18*HR, 21*HR, "this evening"), #600pm-859pm + (21*HR, 24*HR, "tonight")] #900pm-1159pm + + nextDayStart = [ + (0*HR, 3*HR, "late night"), #midnght-259am + (3*HR, 6*HR, "early morning"), #300am-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " evening")] #6pm-1159pm + + nextDayEnd = [ + (0*HR, 0*HR, "tonight"), #midnght tonight + (0*HR, 3*HR, "late night"), #midnght-259am + (3*HR, 6*HR, "early morning"), #300am-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " night")] #6pm-1159pm + + subsequentDayStart = [ + (0*HR, 6*HR, "late night"), #midnght-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " evening")] #6pm-1159pm + + subsequentDayEnd = [ + (0*HR, 0*HR, " night"), #midnght tonight + (0*HR, 6*HR, "early morning"), #midnght-559am + (6*HR, 12*HR, " morning"), #600am-noon + (12*HR, 18*HR, " afternoon"), #1200pm-559pm + (18*HR, 24*HR, " night")] #6pm-1159pm + + + #determine local time + myTimeZone = os.environ["TZ"] # save the defined time zone + os.environ["TZ"] = timeZone # set the new time zone + ltissue = time.localtime(issueTime) # issuance local time + ltevent = time.localtime(eventTime) # event local time + + #determine the delta days from issuance to event + diffDays = ltevent[7] - ltissue[7] #julian day + if diffDays < 0: #year wrap around, assume Dec/Jan + diffDays = ltevent[2] + 31 - ltissue[2] #day of month + + #get description time phrase + description = "" + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if diffDays == 0: + for (startT, endT, desc) in sameDay: + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + + else: + #choose proper table + if timeType == 'start': + if diffDays == 1: + table = nextDayStart + else: + table = subsequentDayStart + else: + if diffDays == 1: + table = nextDayEnd + else: + table = subsequentDayEnd + for (startT, endT, desc) in table: + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + + #do substitution + dow = ltevent[6] #day of week + dowMinusOne = ltevent[6] - 1 + if dowMinusOne < 0: + dowMinusOne = 6 #week wraparound + description = string.replace(description, "", + self.asciiDayOfWeek(dow)) #day of week + description = string.replace(description, "", + self.asciiDayOfWeek(dowMinusOne)) #day of week + + os.environ["TZ"] = myTimeZone # reset the defined time zone + + hourStr = None + hourTZstr = None + return (hourStr, hourTZstr, description) + + def timingWordTableDAYNIGHT(self, issueTime, eventTime, timeZone, + timeType='start'): + #returns (timeValue, timeZone, descriptiveWord). + #eventTime is either the starting or ending time, based on + #the timeType flag. timezone is the time zone for the hazard area + #table is local time, start, end, descriptive phrase + HR=3600 + sameDay = [ + (0*HR, self.DAY()*HR, "early today"), #midnght-559am + (self.DAY()*HR, self.NIGHT()*HR, "today"), #600am-6pm + (self.NIGHT()*HR, 24*HR, "tonight")] #6pm-midnight + + nextDay = [ + (0*HR, self.DAY()*HR, "tonight"), #midnght-559am + (self.DAY()*HR, self.NIGHT()*HR, ""), #600am-6pm + (self.NIGHT()*HR, 24*HR, " night")] #6pm-midnight + + subsequentDay = [ + (0*HR, self.DAY()*HR, " night"), #midnght-559am + (self.DAY()*HR, self.NIGHT()*HR, ""), #600am-6pm + (self.NIGHT()*HR, 24*HR, " night")] #6pm-midnight + + #determine local time + myTimeZone = os.environ["TZ"] # save the defined time zone + os.environ["TZ"] = timeZone # set the new time zone + ltissue = time.localtime(issueTime) # issuance local time + ltevent = time.localtime(eventTime) # event local time + + #determine the delta days from issuance to event + diffDays = ltevent[7] - ltissue[7] #julian day + if diffDays < 0: #year wrap around, assume Dec/Jan + diffDays = ltevent[2] + 31 - ltissue[2] #day of month + + #get description time phrase + description = "" + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if diffDays == 0: + for (startT, endT, desc) in sameDay: + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + + else: + #choose proper table + if diffDays == 1: + table = nextDay + else: + table = subsequentDay + for (startT, endT, desc) in table: + hourmin = ltevent[3]*3600 + ltevent[4]*60 #hour, minute + if hourmin >= startT and hourmin < endT and timeType=='start': + description = desc + break + elif hourmin <= endT and timeType=='end': + description = desc + break + dow = ltevent[6] #day of week + dowMinusOne = ltevent[6] - 1 + if dowMinusOne < 0: + dowMinusOne = 6 #week wraparound + description = string.replace(description, "", + self.asciiDayOfWeek(dow)) #day of week + description = string.replace(description, "", + self.asciiDayOfWeek(dowMinusOne)) #day of week + + os.environ["TZ"] = myTimeZone # reset the defined time zone + + hourStr = None + hourTZstr = None + return (hourStr, hourTZstr, description) + + + def asciiDayOfWeek(self, number): + #converts number (0-Monday) to day of week + days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', + 'Saturday', 'Sunday'] + if number >= 0 and number < 7: + return days[number] + else: + return "?" + repr(number) + "?" + + + # Returns the headline phrase based on the specified hazard. + # The hazard record contains all geoIDs in the hazard['id'] field, + # not just a single one. Doesn't add the dots. + def makeStandardPhrase(self, hazard, issuanceTime): + + # hdln field present? + if 'hdln' not in hazard: + return "" + + # make sure the hazard is still in effect or within EXP critiera + if (hazard['act'] != 'EXP' and issuanceTime >= hazard['endTime']) or \ + (hazard['act'] == 'EXP' and issuanceTime > 30*60 + hazard['endTime']): + return "" # no headline for expired hazards + + #assemble the hazard type + hazStr = hazard['hdln'] + + # if the hazard is a convective watch, tack on the etn + phenSig = hazard['phen'] + "." + hazard['sig'] + if phenSig in ["TO.A", "SV.A"]: + hazStr = hazStr + " " + str(hazard["etn"]) + + # add on the action + actionWords = self.actionControlWord(hazard, issuanceTime) + hazStr = hazStr + ' ' + actionWords + + #get the timing words + timeWords = self.getTimingPhrase(hazard, issuanceTime) + if len(timeWords): + hazStr = hazStr + ' ' + timeWords + + return hazStr + + def timeCompare(self, haz1, haz2): + if haz1['startTime'] < haz2['startTime']: + return -1 + elif haz1['startTime'] == haz2['startTime']: + return 0 + else: + return 1 + + # Sorts headlines for marine products. sort algorithm + # cronological ordering by start time, then action, + # then significance, then phen alphabetically. + def marineSortHazardAlg(self, r1, r2): + #1st by start time + if r1['startTime'] < r2['startTime']: + return -1 + elif r1['startTime'] > r2['startTime']: + return 1 + + #2nd by action + actionCodeOrder = ["CAN", "EXP", "UPG", "NEW", "EXB", "EXA", + "EXT", "ROU", "CON"] + try: + aIndex = actionCodeOrder.index(r1['act']) + except: + aIndex = 99 + try: + bIndex = actionCodeOrder.index(r2['act']) + except: + bIndex = 99 + if aIndex < bIndex: + return -1 + elif aIndex > bIndex: + return 1 + + #3rd by significance + sig = ['W','Y','A'] + try: + index1 = sig.index(r1['sig']) + except: + index1 = 99 + try: + index2 = sig.index(r2['sig']) + except: + index2 = 99 + if index1 < index2: + return -1 + elif index1 > index2: + return 1 + + #4th by phen (alphabetically) + if r1['phen'] < r2['phen']: + return -1 + elif r1['phen'] > r2['phen']: + return 1 + + #equal + return 0 + + + # Sorts headlines for regular products. + def regularSortHazardAlg(self, r1, r2): + actActions = ["NEW", "EXB", "EXA", "EXT", "ROU", "CON"] + inactActions = ["CAN", "EXP", "UPG"] + actionCodeOrder = actActions + inactActions + + # 1st by general action category + if r1['act'] in actActions and r2['act'] in inactActions: + return -1 + elif r1['act'] in inactActions and r2['act'] in actActions: + return 1 + + # 2nd by chronological event starting time + if r1['startTime'] < r2['startTime']: + return -1 + elif r1['startTime'] > r2['startTime']: + return 1 + + # 3rd by action code order + try: + aIndex = actionCodeOrder.index(r1['act']) + except: + aIndex = 99 + try: + bIndex = actionCodeOrder.index(r2['act']) + except: + bIndex = 99 + if aIndex < bIndex: + return -1 + elif aIndex > bIndex: + return 1 + + #4th by significance + sig = ['W','Y','A'] + try: + index1 = sig.index(r1['sig']) + except: + index1 = 99 + try: + index2 = sig.index(r2['sig']) + except: + index2 = 99 + if index1 < index2: + return -1 + elif index1 > index2: + return 1 + + #5th by phen (alphabetically) + if r1['phen'] < r2['phen']: + return -1 + elif r1['phen'] > r2['phen']: + return 1 + + #equal + return 0 + + + # Makes multiple headlines based on the hazards list and returns the lot. + def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, + testMode=0): + returnStr = "" + # make a deepcopy since we plan to mess with it. + hList = copy.deepcopy(hazardList) + + # sort headlines in appropriate order + if len(hList): + if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: + hList.sort(self.marineSortHazardAlg) + else: + hList.sort(self.regularSortHazardAlg) + + while len(hList) > 0: + hazard = hList[0] + + # Can't make phrases with hazards with no 'hdln' entry + if hazard['hdln'] == "": + hList.remove(hazard) + continue + + phenSig = hazard['phen'] + "." + hazard['sig'] + actionCodeList = self.getAllowedActionCodes(phenSig) + + # if the action is not in the actionCodeList, skip it + if hazard['sig'] != "": # it's not locally defined + if not hazard['act'] in actionCodeList: + print("...Ignoring action code:", hazard['act'], \ + hazard['hdln']) + hList.remove(hazard) + continue + + # get the headline phrase + hazStr = self.makeStandardPhrase(hazard, issuanceTime) + if len(hazStr): + # Call user hook + localStr = self.addSpace(self.hazard_hook( + tree, node, hazard['phen'], hazard['sig'], hazard['act'], + hazard['startTime'], hazard['endTime']), "leading") + returnStr = returnStr + "..." + hazStr + localStr + "...\n" + + # always remove the main hazard from the list + hList.remove(hazard) + + return returnStr + + # Returns a formatted string announcing the hazards that are valid with + # timing phrases + def getHazardString(self, tree, node, fcstArea): + if len(fcstArea) <= 0: + return "" + hazardTable = self._hazards.getHazardList(fcstArea) + returnStr = "" + issuanceTime = self._issueTime.unixTime() + + returnStr = self.makeHeadlinePhrases(tree, node, hazardTable, + issuanceTime) + #Test mode? + returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), + returnStr) + + return returnStr.upper() + + + # The organizeHazard method brings in the raw analyzed table, + # then organizes it by edit area, returing a list of + # editArea lists. The first element of the list must the the first + # segment in a hazard based product. Ensures that a group of edit areas + # does not contain both zones and FIPS code - per 10-1702. + def organizeHazards(self, rawATable): + + # Initialize data structures to be used. + byIdDict = {} + byHazardDict = {} + masterEditAreaList = [] + + # Loop over the activeTable, and organize by editArea + + # + # Added code to discard segment identifer when cancelling a product. + # this was creating bogus segments. + # + + for eachHazard in rawATable: + if eachHazard['id'] in byIdDict: + byIdDict[eachHazard['id']].append(\ + (eachHazard['phen'], eachHazard['sig'], eachHazard['seg'], + eachHazard['act'], eachHazard['startTime'], eachHazard['endTime'], + eachHazard['etn'])) + else: + byIdDict[eachHazard['id']] = [(eachHazard['phen'], + eachHazard['sig'], eachHazard['seg'], + eachHazard['act'], eachHazard['startTime'], eachHazard['endTime'], + eachHazard['etn'])] + + # + # Go through the sorted dictionary, organize into combos + # + + idsList = list(byIdDict.keys()) + unsortedHazards = list(byIdDict.values()) + sortedHazards = [] + for eachHazard in unsortedHazards: + if not self.__sortedContains(eachHazard, sortedHazards): + sortedHazards.append(eachHazard) + + # + # The following section determines the VTEC/segment ordering + # + + weightedList = [] + + # + # this list ranks by 'sig' and 'act' from least [0] to most + # importance [n]. All CANs go at the end, because cancel is the + # most important action. + # + + segmentVTECOrderList = [ + # Place holder for local hazards + 'LocalHazard', + 'F.ROU', 'F.CON', 'F.EXT', 'F.EXA', 'F.EXB', 'F.NEW', + 'F.UPG', 'S.ROU', 'S.CON', 'S.EXT', 'S.EXA', 'S.EXB', + 'S.NEW', 'S.UPG', 'A.ROU', 'A.CON', 'A.EXT', 'A.EXA', + 'A.EXB', 'A.NEW', 'A.UPG', 'Y.ROU', 'Y.CON', 'Y.EXT', + 'Y.EXA', 'Y.EXB', 'Y.NEW', 'Y.UPG', 'W.ROU', 'W.CON', + 'W.EXT', 'W.EXA', 'W.EXB', 'W.NEW', 'W.UPG', 'F.EXP', + 'F.CAN', 'S.EXP', 'S.CAN', 'A.EXP', 'A.CAN', 'Y.EXP', + 'Y.CAN', 'W.EXP', 'W.CAN'] + + for eachHazard in sortedHazards: + tempEditAreaList = [] + tempElementWeight = -1.0 + tempElementWeightCheck = -1.0 + secondaryWeight = 0.0 + segmentWeight = 0.0 + timeWeight = 0.0 + + # + # Figure out the maximum weight based on each + # element in the hazard combination. + # + + for eachElement in eachHazard: + + # + # This section checks of the hazard's index in + # segmentVTECOrderList + # + + if eachElement[1] is not None and eachElement[3] is not None: + sigAction = eachElement[1] + '.' + eachElement[3] + if sigAction in segmentVTECOrderList: + tempElementWeightCheck = float(segmentVTECOrderList.index(sigAction)) + else: + # Local hazards are not in list so + # assign it least importance + tempElementWeightCheck = 0.0 + + # + # secondaryWeight is a cumulative value << 1 that allows + # combinations of actions and sigs to take precedence over + # single actions or sigs of the same primary importance. For + # instance, a BZ.W^WC.Y will come before BZ.W by itself, + # even though they are the same priority. It also takes + # into account the hazards position in the allowedHazardTable, + # so that a a blizzard warning will trump a winter storm + # warning + # + + # + # from 1 (important) to 1001 (undefined). 1 is added to + # prevent division errors. + # + + allowedHazardValue = float(self.getHazardImportance(\ + eachElement[0] + '.' + eachElement[1])) + 1.0 + # + # Ensure that secondary weight never approaches 1 (ten thousandths...) + # + + secondaryWeight = secondaryWeight + 0.0001/allowedHazardValue + + # + # Check the tempElementWeightCheck against the + # tempElementWeight. If it's more, then the current hazard + # is the higher priority of the combo, and set + # tempElementWeight to it's index value. If it's less, then + # this hazard is of lower priority, but do give it a little + # weight (<< 1) so that for instance a warn + advisory + # segment will come before just a warn segment. + # + + if tempElementWeightCheck > tempElementWeight: + # This hazard is more important + tempElementWeight = tempElementWeightCheck + + # + # Add a factor for segment number. Lowest segments go first. Never + # Approach one (millionths...) + # + + segmentWeight = 1.0/(10000000.0 + float(eachElement[2])) + + # + # Add a factor for time. Earliest start times go first. Never + # approach one (e-10) + # + + timeWeight = 1.0/float(eachElement[4] + 100.0) + + # + # Assign the sum of weights before adding + # list for sorting + # + + tempElementWeight = tempElementWeight + secondaryWeight +\ + segmentWeight + timeWeight + + secondaryWeight = 0.0 + segmentWeight = 0.0 + timeWeight = 0.0 + + for eachID in idsList: + if sets.Set(byIdDict[eachID]) == sets.Set(eachHazard): + tempEditAreaList.append(eachID) + + weightedList.append((tempElementWeight, tempEditAreaList)) + + # Sort the list by weight + weightedList.sort(self._wtListSort) + + # Make the list of geoareas + finalList = [] + for w in weightedList: + finalList.append(w[1]) + + # Seperate out the zones and FIPS into separate UGC blocks + s = [] + for s1 in finalList: + fips = [] + zones = [] + for s2 in s1: + if s2[2] == 'Z': + zones.append(s2) + elif s2[2] == 'C': + fips.append(s2) + if len(fips): + s.append(fips) + if len(zones): + s.append(zones) + finalList = s + + return finalList + + + # Determines if hazard in sorted hazards. Hazard can be a list, thus we + # need to compare all elements for their inclusion, rather than simply + # using the "in" operator. + def __sortedContains(self, hazard, sorted_hazards): + hazard.sort() + for indSorted in sorted_hazards: + indSorted.sort() + if hazard == indSorted: + return 1 + return 0 + + # Sorts tuples of (weight, list, time), by weight + def _wtListSort(self, a, b): + if a[0] > b[0]: + return -1 + elif a[0] == b[0]: + return 0 + else: + return 1 + + + # Modifies string to have ...TEST... if we are in TEST mode. This + # to the MND header. Modifies string to have EXPERIMENTAL... if + # we are in EXPERIMENTAL mode. + def checkTestMode(self, argDict, str): + # testMode is set, then we are in product test mode. + # modify the str to have beginning and ending TEST indication. + if argDict.get('testMode', 0): + return "TEST..."+str+"...TEST" + elif argDict.get('experimentalMode', 0): + return "EXPERIMENTAL..." + str + else: + return str + + # Modifies headline string to have TEST if we are in TEST mode. + def headlinePhraseTESTcheck(self, argDict, str): + if argDict.get('testMode', 0): + lines = str.split('\n') + str = "...THIS MESSAGE IS FOR TEST PURPOSES ONLY...\n" + for x in range(len(lines)-1): #-1 for trailing new line + line = lines[x] + + #beginning of line + if line.find("...") == 0: + line = line[0:3] + "TEST " + line[3:] + #end of line + index = line.rfind("...") + if index != 0 and index == len(line)-3: + line = line[0:-3] + " TEST..." + + lines[x] = line + + return str + string.join(lines,'\n') + + #normal mode (not test mode) + else: + return str + + # utility for attribution, takes hazard description ['hdln'] field and + # adds TEST if appropriate in test mode, adds "A" or "AN" as appropriate + # if desired. + def hazardName(self, name, argDict, addA=False): + + if len(name) == 0: + return name + + # test mode + if argDict.get('testMode', 0): + phrase = 'Test ' + name #test mode, prepend "TEST" + else: + phrase = name + + # want A or AN? + if addA: + if phrase[0] in ['A','E','I','O','U','a','e','i','o','u']: + phrase = "an " + phrase + else: + phrase = "a " + phrase + return phrase + + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/EditAreaUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/EditAreaUtils.py index 8cf85a4c9d..ae5006ba2f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/EditAreaUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/EditAreaUtils.py @@ -1,367 +1,367 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# EditAreaUtils.py -# Utilities for dealing with Edit Areas. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, types, time -import math, logging -import TextUtils -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit as JavaGrid2DBit -from java.util import ArrayList - -class EditAreaUtils(TextUtils.TextUtils): - def __init__(self): - self.__gridLoc = None - TextUtils.TextUtils.__init__(self) - self.__comboNumber = -1 - self.log = logging.getLogger("FormatterRunner.EditAreaUtils.EditAreaUtils") - - def setUp(self, parmNameAndLevel, argDict): - # Must set this up in order to do intersections and unions - self.__ifpClient = argDict["ifpClient"] - self.__gridLoc = self.__ifpClient.getDBGridLocation() - - def getIFPClient(self): - return self.__ifpClient - - def getGridLoc(self): - return self.__gridLoc - - def getAreaList(self, argDict): - # Get the list of edit areas and display names for the product - return argDict["editAreas"] - - def currentAreaContains(self, tree, areaNameList, areaName=None): - # Returns 1 if any of the current edit area(s) are equal to - # OR intersect with any of the areas listed in the areaNameList - # If a Combinations file is being used, this method - # will return 1 if ANY of the current component edit areas - # are in the areaNameList - # Otherwise, returns 0 - # - # Example: - # inlandWaters = self.inlandWatersAreas(tree, node) - # if self.currentAreaContains(tree, inlandWaters): - - curAreaNames = self.getCurrentAreaNames(tree, areaName) - for curAreaName in curAreaNames: - for areaName in areaNameList: - if curAreaName == areaName: - return 1 - if self.inQuery(tree, areaName, curAreaName): - return 1 - return 0 - - def currentAreaConsistsOf(self, tree, areaNameList): - # Returns 1 if the current edit area is equal to OR consists - # exclusively of areas listed in the areaNameList - # If a Combinations file is being used, this method - # will return 1 if ALL of the current component edit areas - # are in the areaNameList - # Otherwise, returns 0 - areaNames = self.getCurrentAreaNames(tree) - for areaName in areaNames: - if not areaName in areaNameList: - return 0 - return 1 - - def getEditAreas(self, argDict, areas): - # Get the ReferenceArea given - # a name or ReferenceID or ReferenceData(just return) - refDataList = [] - for area in areas: - if type(area) is str: # Get area from name - id = ReferenceID(area) - jlist = ArrayList() - jlist.add(id) - tmp = argDict["ifpClient"].getReferenceData(jlist) - refDataList.append(tmp.get(0)) - else: - if str(area).find('Id') > -1: - refDataList.append(area) - else: - jlist = ArrayList() - jlist.add(id) - tmp = argDict["ifpClient"].getReferenceData(jlist) - refDataList.append(tmp.get(0)) - - return refDataList - - def getEditArea(self, editAreaName, argDict): - # Returns an AFPS.ReferenceData object given an edit area name - # as defined in the GFE - refID = ReferenceID(editAreaName) - refList = ArrayList() - refList.add(refID) - tmp = argDict["ifpClient"].getReferenceData(refList).get(0) - if tmp is not None: - tmp.getGrid() - return tmp - - def createLatLonArea(self, lat, lon, dim): - # Create a square edit area given a latitude, longitude, - # and kilometer dimension for the sides. - # Example: - # area = self.createLatLonArea(40.93, -106.26, 5) - # - # If dim is zero, make edit area of the one grid - # point closest to the lat/lon value. - # - name = self.getLatLonAreaName((lat, lon, dim)) - if dim != 0: - for x in range(100): - points = self.makeSquare(lat, lon, dim) - pointList = [] - for point in points: - pointList.append(self.makePoint(point)) - refData = self.makeArea(pointList, refname=name) - # Make sure we have at least one grid point in - # the edit area - if refData.getGrid().isAnyBitsSet(): - return refData - # Increment dim and try again - dim += 0.25 - msg = "\nWARNING!!! EMPTY EDIT AREA. INCREASE LAT/LON AREA DIMENSION!!\n" - self.log.warning(msg) - return None - else: - # Get grid cell coordinates for lat/lon - gridLoc = self.getGridLoc() - cc2D = gridLoc.gridCell(lat, lon) - # convert grid cell to Grid2DBit with single bit set - grid2Dbit = JavaGrid2DBit( - gridLoc.gridSize().x, gridLoc.gridSize().y) - grid2Dbit.set(int(cc2D.x), int(cc2D.y)) - #refData = gridLoc.convertToReferenceData(grid2Dbit) - refID = ReferenceID(name) - refData = ReferenceData(gridLoc, refID, grid2Dbit) - #refData.setId(refID) - return refData - - def getLatLonAreaName(self, latLonTuple): - lat, lon, dim = latLonTuple - name = "Ref" + '%s%s%s' % (lat, lon, dim) - name = name.replace(".","") - name = name.replace("-","") - return name - - def makeSquare(self, lat, lon, km): - " Make a list of square of given km around lat,lon" - # The 222 value should probably by 111. - latinc = km/222.0 - loninc = math.cos(lat/57.17) * km / 222.0 - - latTop = lat + latinc - latBottom =lat - latinc - lonLeft = lon - loninc - lonRight = lon + loninc - - points = [] - points.append(`latTop`+","+ `lonRight`) - points.append(`latTop`+","+ `lonLeft`) - points.append(`latBottom`+","+ `lonLeft`) - points.append(`latBottom`+","+`lonRight`) - return points - - def makePoint(self, point): - " Make a CartCoord2D from the point in format: x,y" - from com.vividsolutions.jts.geom import Coordinate - ind = string.find(point,",") - latStr = point[0:ind-1] - lonStr = point[ind+1:len(point)] - lat = float(latStr) - lon = float(lonStr) - return Coordinate(lon,lat) - - def makeArea(self, pointList, refname=None): - " Make a Reference Area with a unique ReferenceID" - from com.vividsolutions.jts.geom import GeometryFactory, LinearRing, Coordinate, Polygon - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData - CoordinateType = ReferenceData.CoordinateType - geomFactory = GeometryFactory() - import jep - size = len(pointList) - if pointList[0] != pointList[size-1]: # closing the loop - pointList.append(pointList[0]) - pointArray = jep.jarray(len(pointList), Coordinate) - for i in range(len(pointList)): - pointArray[i] = pointList[i] - lr = geomFactory.createLinearRing(pointArray) - poly = geomFactory.createPolygon(lr, jep.jarray(0, LinearRing)) - polyArray = jep.jarray(1, Polygon) - polyArray[0] = poly - region = geomFactory.createMultiPolygon(polyArray) - if refname is None: - refname = "Ref" + getTime() - refId = ReferenceID(refname) - refData = ReferenceData(self.__gridLoc, refId, region, CoordinateType.valueOf("LATLON")) - # randerso: I don't think this is necessary - # refData.convertToAWIPS() - return refData - - def getTime(self): - "Return an ascii string for the current time without spaces or :'s" - timeStr = `time.time()` - timeStr = string.replace(timeStr,".","_") - return timeStr - - def getIntersectName(self, areaName, localEffectArea): - name = "_" + localEffectArea + "_intersect_"+areaName - name = string.replace(name, " ", "_") - return name - - def inQuery(self, tree, areaName, queryStr): - # Check to see if areaName is part of an intersection - if queryStr[0] == "_": - queryStr = queryStr[1:] - queryNames = queryStr.split("_intersect_") - for queryName in queryNames: - queryName = queryName.replace("_", " ") - queryAreaNames = self.getCurrentAreaNames(tree, queryName) - if areaName in queryAreaNames: - return 1 - return 0 - - def unionAreas(self, name, area1, area2): - # OR the areas (ReferenceData objects) - # together and return a ReferenceData object - cpy = ReferenceData(area1) - refData = cpy.orEquals(area2) - #refData.convertToLatLon() - refData.setId(ReferenceID(name)) - refData.getGrid() - return refData - - def intersectAreas(self, name, area1, area2): - # AND the areas (ReferenceData objects) - # together and return a ReferenceData object - cpy = ReferenceData(area1) - refData = cpy.andEquals(area2) - #refData.convertToLatLon() - refData.setId(ReferenceID(name)) - refData.getGrid() - return refData - - def getCurrentAreaNames(self, tree, areaName=None): - # Returns the current list of areaNames being processed. - # If the areaName is None, use the current area - # If a Combinations file is being used, there may - # be multiple names in the list - # Otherwise, it will be a list of one edit area name - # - # "tree" could be a narrative tree OR argDict - combinations = None - if type(tree) is types.DictType: - # tree is argDict - if areaName is None: - editArea, areaLabel = tree["editArea"] - areaName = editArea.getId().getName() - else: - areaLabel = areaName - combinations = tree["combinations"] - else: - # tree - if areaName is None: - editArea = tree.get("editArea") - areaName = editArea.getId().getName() - areaLabel = tree.get("areaLabel") - else: - areaLabel = areaName - combinations = tree.get("combinations") - areaNames = [areaName] - - if combinations is not None: - for comboList, label in combinations: - if label == areaLabel: - areaNames = comboList - return areaNames - - def saveEditAreas(self, editAreas): - javaEditAreas = ArrayList() - for editArea in editAreas: - javaEditAreas.add(editArea) - # Save a list of ReferenceData objects - ifpClient = self.getIFPClient() - ifpClient.saveReferenceData(javaEditAreas) - - def getComboNumber(self): - # Put initial comboNumber from constructor into EditAreaUtils as well. - self.__comboNumber = self.__comboNumber + 1 - return self.__comboNumber - - def getUnion(self, argDict, areaLabels, areaPrefix): - GridLoc = self.getIFPClient().getDBGridLocation() - area = None - for areaLabel in areaLabels: - newArea = self.getEditArea(areaLabel, argDict) - if areaLabels.index(areaLabel) == 0: - comboNumber = self.getComboNumber() - label = areaPrefix + `int(time.time())` + `comboNumber` - refId = ReferenceID(label) - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData - CoordinateType = ReferenceData.CoordinateType - coordType = CoordinateType.valueOf('LATLON') - area = ReferenceData( - GridLoc, refId, newArea.getPolygons(coordType), coordType) - # randerso: I don't think this is necessary - # area.convertToAWIPS() - area = self.unionAreas(label, area, newArea) - return area - - def cleanOutEditAreas(self, areaPrefix): - # Delete all edit areas that have the given areaPrefix - #time1 = time.time() - areaList = [] - inventory = self.getIFPClient().getReferenceInventory() - for areaId in inventory: - areaName = areaId.name() - if areaName.find(areaPrefix) == 0: - areaList.append(areaName) - self.deleteEditAreas(areaList) - #print "Time to delete", time.time() - time1 - - def deleteEditAreas(self, editAreas): - # Delete a list of ReferenceData, ReferenceID, or string objects - ifpClient = self.getIFPClient() - ids = ArrayList() - for area in editAreas: - if type(area) is str: - ids.add(ReferenceID(area)) - else: - try: - # reference data - ids.add(area.getId()) - except: - # reference id - ids.add(area) - ifpClient.deleteReferenceData(ids) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# EditAreaUtils.py +# Utilities for dealing with Edit Areas. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, types, time +import math, logging +import TextUtils +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit as JavaGrid2DBit +from java.util import ArrayList + +class EditAreaUtils(TextUtils.TextUtils): + def __init__(self): + self.__gridLoc = None + TextUtils.TextUtils.__init__(self) + self.__comboNumber = -1 + self.log = logging.getLogger("FormatterRunner.EditAreaUtils.EditAreaUtils") + + def setUp(self, parmNameAndLevel, argDict): + # Must set this up in order to do intersections and unions + self.__ifpClient = argDict["ifpClient"] + self.__gridLoc = self.__ifpClient.getDBGridLocation() + + def getIFPClient(self): + return self.__ifpClient + + def getGridLoc(self): + return self.__gridLoc + + def getAreaList(self, argDict): + # Get the list of edit areas and display names for the product + return argDict["editAreas"] + + def currentAreaContains(self, tree, areaNameList, areaName=None): + # Returns 1 if any of the current edit area(s) are equal to + # OR intersect with any of the areas listed in the areaNameList + # If a Combinations file is being used, this method + # will return 1 if ANY of the current component edit areas + # are in the areaNameList + # Otherwise, returns 0 + # + # Example: + # inlandWaters = self.inlandWatersAreas(tree, node) + # if self.currentAreaContains(tree, inlandWaters): + + curAreaNames = self.getCurrentAreaNames(tree, areaName) + for curAreaName in curAreaNames: + for areaName in areaNameList: + if curAreaName == areaName: + return 1 + if self.inQuery(tree, areaName, curAreaName): + return 1 + return 0 + + def currentAreaConsistsOf(self, tree, areaNameList): + # Returns 1 if the current edit area is equal to OR consists + # exclusively of areas listed in the areaNameList + # If a Combinations file is being used, this method + # will return 1 if ALL of the current component edit areas + # are in the areaNameList + # Otherwise, returns 0 + areaNames = self.getCurrentAreaNames(tree) + for areaName in areaNames: + if not areaName in areaNameList: + return 0 + return 1 + + def getEditAreas(self, argDict, areas): + # Get the ReferenceArea given + # a name or ReferenceID or ReferenceData(just return) + refDataList = [] + for area in areas: + if type(area) is str: # Get area from name + id = ReferenceID(area) + jlist = ArrayList() + jlist.add(id) + tmp = argDict["ifpClient"].getReferenceData(jlist) + refDataList.append(tmp.get(0)) + else: + if str(area).find('Id') > -1: + refDataList.append(area) + else: + jlist = ArrayList() + jlist.add(id) + tmp = argDict["ifpClient"].getReferenceData(jlist) + refDataList.append(tmp.get(0)) + + return refDataList + + def getEditArea(self, editAreaName, argDict): + # Returns an AFPS.ReferenceData object given an edit area name + # as defined in the GFE + refID = ReferenceID(editAreaName) + refList = ArrayList() + refList.add(refID) + tmp = argDict["ifpClient"].getReferenceData(refList).get(0) + if tmp is not None: + tmp.getGrid() + return tmp + + def createLatLonArea(self, lat, lon, dim): + # Create a square edit area given a latitude, longitude, + # and kilometer dimension for the sides. + # Example: + # area = self.createLatLonArea(40.93, -106.26, 5) + # + # If dim is zero, make edit area of the one grid + # point closest to the lat/lon value. + # + name = self.getLatLonAreaName((lat, lon, dim)) + if dim != 0: + for x in range(100): + points = self.makeSquare(lat, lon, dim) + pointList = [] + for point in points: + pointList.append(self.makePoint(point)) + refData = self.makeArea(pointList, refname=name) + # Make sure we have at least one grid point in + # the edit area + if refData.getGrid().isAnyBitsSet(): + return refData + # Increment dim and try again + dim += 0.25 + msg = "\nWARNING!!! EMPTY EDIT AREA. INCREASE LAT/LON AREA DIMENSION!!\n" + self.log.warning(msg) + return None + else: + # Get grid cell coordinates for lat/lon + gridLoc = self.getGridLoc() + cc2D = gridLoc.gridCell(lat, lon) + # convert grid cell to Grid2DBit with single bit set + grid2Dbit = JavaGrid2DBit( + gridLoc.gridSize().x, gridLoc.gridSize().y) + grid2Dbit.set(int(cc2D.x), int(cc2D.y)) + #refData = gridLoc.convertToReferenceData(grid2Dbit) + refID = ReferenceID(name) + refData = ReferenceData(gridLoc, refID, grid2Dbit) + #refData.setId(refID) + return refData + + def getLatLonAreaName(self, latLonTuple): + lat, lon, dim = latLonTuple + name = "Ref" + '%s%s%s' % (lat, lon, dim) + name = name.replace(".","") + name = name.replace("-","") + return name + + def makeSquare(self, lat, lon, km): + " Make a list of square of given km around lat,lon" + # The 222 value should probably by 111. + latinc = km/222.0 + loninc = math.cos(lat/57.17) * km / 222.0 + + latTop = lat + latinc + latBottom =lat - latinc + lonLeft = lon - loninc + lonRight = lon + loninc + + points = [] + points.append(repr(latTop)+","+ repr(lonRight)) + points.append(repr(latTop)+","+ repr(lonLeft)) + points.append(repr(latBottom)+","+ repr(lonLeft)) + points.append(repr(latBottom)+","+repr(lonRight)) + return points + + def makePoint(self, point): + " Make a CartCoord2D from the point in format: x,y" + from com.vividsolutions.jts.geom import Coordinate + ind = string.find(point,",") + latStr = point[0:ind-1] + lonStr = point[ind+1:len(point)] + lat = float(latStr) + lon = float(lonStr) + return Coordinate(lon,lat) + + def makeArea(self, pointList, refname=None): + " Make a Reference Area with a unique ReferenceID" + from com.vividsolutions.jts.geom import GeometryFactory, LinearRing, Coordinate, Polygon + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData + CoordinateType = ReferenceData.CoordinateType + geomFactory = GeometryFactory() + import jep + size = len(pointList) + if pointList[0] != pointList[size-1]: # closing the loop + pointList.append(pointList[0]) + pointArray = jep.jarray(len(pointList), Coordinate) + for i in range(len(pointList)): + pointArray[i] = pointList[i] + lr = geomFactory.createLinearRing(pointArray) + poly = geomFactory.createPolygon(lr, jep.jarray(0, LinearRing)) + polyArray = jep.jarray(1, Polygon) + polyArray[0] = poly + region = geomFactory.createMultiPolygon(polyArray) + if refname is None: + refname = "Ref" + getTime() + refId = ReferenceID(refname) + refData = ReferenceData(self.__gridLoc, refId, region, CoordinateType.valueOf("LATLON")) + # randerso: I don't think this is necessary + # refData.convertToAWIPS() + return refData + + def getTime(self): + "Return an ascii string for the current time without spaces or :'s" + timeStr = repr(time.time()) + timeStr = string.replace(timeStr,".","_") + return timeStr + + def getIntersectName(self, areaName, localEffectArea): + name = "_" + localEffectArea + "_intersect_"+areaName + name = string.replace(name, " ", "_") + return name + + def inQuery(self, tree, areaName, queryStr): + # Check to see if areaName is part of an intersection + if queryStr[0] == "_": + queryStr = queryStr[1:] + queryNames = queryStr.split("_intersect_") + for queryName in queryNames: + queryName = queryName.replace("_", " ") + queryAreaNames = self.getCurrentAreaNames(tree, queryName) + if areaName in queryAreaNames: + return 1 + return 0 + + def unionAreas(self, name, area1, area2): + # OR the areas (ReferenceData objects) + # together and return a ReferenceData object + cpy = ReferenceData(area1) + refData = cpy.orEquals(area2) + #refData.convertToLatLon() + refData.setId(ReferenceID(name)) + refData.getGrid() + return refData + + def intersectAreas(self, name, area1, area2): + # AND the areas (ReferenceData objects) + # together and return a ReferenceData object + cpy = ReferenceData(area1) + refData = cpy.andEquals(area2) + #refData.convertToLatLon() + refData.setId(ReferenceID(name)) + refData.getGrid() + return refData + + def getCurrentAreaNames(self, tree, areaName=None): + # Returns the current list of areaNames being processed. + # If the areaName is None, use the current area + # If a Combinations file is being used, there may + # be multiple names in the list + # Otherwise, it will be a list of one edit area name + # + # "tree" could be a narrative tree OR argDict + combinations = None + if type(tree) is dict: + # tree is argDict + if areaName is None: + editArea, areaLabel = tree["editArea"] + areaName = editArea.getId().getName() + else: + areaLabel = areaName + combinations = tree["combinations"] + else: + # tree + if areaName is None: + editArea = tree.get("editArea") + areaName = editArea.getId().getName() + areaLabel = tree.get("areaLabel") + else: + areaLabel = areaName + combinations = tree.get("combinations") + areaNames = [areaName] + + if combinations is not None: + for comboList, label in combinations: + if label == areaLabel: + areaNames = comboList + return areaNames + + def saveEditAreas(self, editAreas): + javaEditAreas = ArrayList() + for editArea in editAreas: + javaEditAreas.add(editArea) + # Save a list of ReferenceData objects + ifpClient = self.getIFPClient() + ifpClient.saveReferenceData(javaEditAreas) + + def getComboNumber(self): + # Put initial comboNumber from constructor into EditAreaUtils as well. + self.__comboNumber = self.__comboNumber + 1 + return self.__comboNumber + + def getUnion(self, argDict, areaLabels, areaPrefix): + GridLoc = self.getIFPClient().getDBGridLocation() + area = None + for areaLabel in areaLabels: + newArea = self.getEditArea(areaLabel, argDict) + if areaLabels.index(areaLabel) == 0: + comboNumber = self.getComboNumber() + label = areaPrefix + repr(int(time.time())) + repr(comboNumber) + refId = ReferenceID(label) + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData + CoordinateType = ReferenceData.CoordinateType + coordType = CoordinateType.valueOf('LATLON') + area = ReferenceData( + GridLoc, refId, newArea.getPolygons(coordType), coordType) + # randerso: I don't think this is necessary + # area.convertToAWIPS() + area = self.unionAreas(label, area, newArea) + return area + + def cleanOutEditAreas(self, areaPrefix): + # Delete all edit areas that have the given areaPrefix + #time1 = time.time() + areaList = [] + inventory = self.getIFPClient().getReferenceInventory() + for areaId in inventory: + areaName = areaId.name() + if areaName.find(areaPrefix) == 0: + areaList.append(areaName) + self.deleteEditAreas(areaList) + #print "Time to delete", time.time() - time1 + + def deleteEditAreas(self, editAreas): + # Delete a list of ReferenceData, ReferenceID, or string objects + ifpClient = self.getIFPClient() + ids = ArrayList() + for area in editAreas: + if type(area) is str: + ids.add(ReferenceID(area)) + else: + try: + # reference data + ids.add(area.getId()) + except: + # reference id + ids.add(area) + ifpClient.deleteReferenceData(ids) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FWS_Overrides.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FWS_Overrides.py index 0967f5b4ee..85fa811454 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FWS_Overrides.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FWS_Overrides.py @@ -1,5070 +1,5070 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# -# FWS_Overrides -# -# This file provides any product specific overrides for the -# FWS product. This file is part of the baseline. -# -# Definition Section: -# Overrides: -# Additions: -# -# Methods: -# Overrides: -# Additions: -# -# --------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, time, re, os, types, copy -import TextRules -import ProcessVariableList -import math -import HazardsTable, TimeRange, AbsTime - -# Define overrides of Product Definition settings and -# default values of additional Definition settings -# ( This Definition section must be before the Class definition) - -#***** THIS NEXT LINE IS REQUIRED ***** -Definition = {} -# -# FWS Definitions: -# Definition statements must start in column 1 - -# REQUIRED CONFIGURATION ITEMS -#Definition['displayName'] = "FWS" -Definition["statePil"] = "GTF" # State Pil ID - -Definition["productName"] = "Spot Forecast" # name of product -Definition["fullStationID"] = "" # full station identifier (4letter) -Definition["wmoID"] = "" # WMO ID -Definition["pil"] = "" -Definition["stqPil"] = "STQ" # STQ pil -Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. -Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - -Definition["summaryExtended"] = 0 -Definition["individualExtended"] = 1 -Definition["extendedLabel"] = 1 - -### FWS settings of baseline options: ### -Definition["mapNameForCombinations"] = None -Definition["defaultEditAreas"] = [] - -# agencyList - This is a list of agency abbreviations as you want them to -# appear in the product header. For Example... -# Spot Forecast for Willie Fire...USFS GNF -# where "USFS GNF" is an example of agency abbreviation. -# The FWS formatter will read the STQ spot request product -# and will try to first guess the agency abbreviation from the -# "REQUESTING AGENCY" line of the STQ product. If the first guess -# is found in your agencyList list, then the Quality Control GUI -# with have that agency pre-selected. If list is left empty, -# then the formatter will always use what the user submitted -# for the agency. - -#Definition["agencyList"] = [ -# (1,"Agency 1"), -# (2,"Agency 2"), -# (3,"Agency 3"), -# (4,"Agency 4"), -# (5,"Agency 5"), -# ] -Definition["agencyList"] = [] - -# forecasterList - This is a list of forecaster numbers, forecaster awips login name, -# and forecaster last names. The Quality Control GUI will -# list the forecaster's last name and the forecaster will -# check all of the forecaster's names that were involved -# in that forecast. - -Definition["forecasterList"] = [ - (1,"forecastera","Forecaster A"), - (2,"forecasterb","Forecaster B"), - (3,"forecasterc","Forecaster C"), - (4,"forecasterd","Forecaster D"), - (5,"forecastere","Forecaster E"), - ] - -# stqNumberVersions - When you launch the FWS formatter, you will get a GUI -# that asks you to select which spot request you want to -# format a spot forecast for. This variable specifies -# how many spots you want to list in the GUI. If you do -# increase the number, then make sure you increase the -# number of versions stored in awips. -Definition["stqNumberVersions"] = 10 - -# stqWmoID - helps find the timestamp line in the STQ product. Only change if -# WSH changes the WMO id of the STQ product. - -Definition["stqWmoID"] = "BMBB91 K" - -# wind20ftHeader: This definition set to "1" allows offices to -# format winds in this format... -# -# Wind (20 ft)........ -# Slope/valley.......WEST 10-20 MPH -# Ridgetop...........NORTHWEST 20 MPH -# -# By setting this definition to "0", you will get... -# -# Wind (20 ft)........WEST 10-20 MPH -# Ridgetop wind.......NORTHWEST 20 MPH -Definition["wind20ftHeader"] = 1 # Use 1 for yes, 0 for no - -# typeList - This is a list of project types and are formatted in the "REASON FOR -# REQUEST" line of the FWS forecast. Do not edit this list unless WSH -# directs you to do so. - -Definition["typeList"] = ["WILDFIRE", "PRESCRIBED", "HAZMAT", "SAR", "TEST"] - -# Set shortTermOnly to 1 if you don't want to give your forecasters an option -# include extended forecasts and/or outlooks with their spot forecasts. -Definition["shortTermOnly"] = 1 -#Definition["shortTermOnly"] = 0 - -Definition["outputFile"] = "{prddir}/TEXT/FWS.txt" - -# Definitions to insert unrepresentativeness of the forecast -# instructions for the user. -Definition["insertUnrepresentStatement"] = 1 # Use 1 for yes, 0 for no -Definition["unrepresentStatement"] = "If conditions become unrepresentative, " + \ - "contact the National Weather\nService." -# Definitions to insert the FWF discussion from a separate file. -# Discussion is edited separately in XNOW for the FWF forecast. -# Advantage of this is to have a first guess for the discussion in -# the Spot forecast...saving some composition time. -Definition["insertDiscussionFromFile"] = 0 # Use 1 for yes, 0 for no -Definition["discussionFile"] = "/home/local_apps/xnow/temp/DISFWF" - -# Definitions to insert the FWF 8 to 14 day outlook from a separate -# file if the user requests that information in their request. (Not -# very likely). Outlook is edited separately in XNOW for the FWF -# Forecast with the advantage of saving time in the composition of -# the Spot Forecast. -Definition["insertOutlookFromFile"] = 0 # Use 1 for yes, 0 for no -Definition["outlookFile"] = "/home/local_apps/xnow/temp/OLKFWF" - - -# wildfireElementList is a subset list of the requestedElementList list. -# The directive states that Sky/Weather, Temp, RH, and Winds are required -# for wildfire spot forecasts. Even if the user doesn't select these elements, -# the formatter will put them in anyway because of the directive requirements. - -# You may add weather elements corresponding to the entries you see in your STQ product. - -Definition["wildfireElementList"] = [ - "SKY/WEATHER", - "TEMPERATURE", - "HUMIDITY", - "20 FOOT WINDS", - "EYE LEVEL WINDS", - ] - -Definition["stqPil"] = "STQ" # STQ pil - -# Definitions to insert unrepresentativeness of the forecast -# instructions for the user. -#Definition["insertUnrepresentStatement"] = 0 # Use 1 for yes, 0 for no -#Definition["unrepresentStatement"] = "If conditions become unrepresentative, " + \ -# "contact the National Weather\nService." - -# wind20ftHeader: This definition set to "1" allows offices to -# format winds in this format... -# -# Wind (20 ft)........ -# Slope/valley.......WEST 10-20 MPH -# Ridgetop...........NORTHWEST 20 MPH -# -# By setting this definition to "0", you will get... -# -# Wind (20 ft)........WEST 10-20 MPH -# Ridgetop wind.......NORTHWEST 20 MPH -#Definition["wind20ftHeader"] = 0 # Use 1 for yes (default), 0 for no - -# Definitions to insert the FWF discussion from a separate file. -# Discussion is edited separately in XNOW for the FWF forecast. -# Advantage of this is to have a first guess for the discussion in -# the Spot forecast...saving some composition time. -#Definition["insertDiscussionFromFile"] = 1 # Use 1 for yes, 0 for no -#Definition["discussionFile"] = "/home/local_apps/xnow/temp/DISFWFBYZ" - -# Definitions to insert the FWF 8 to 14 day outlook from a separate -# file if the user requests that information in their request. (Not -# very likely). Outlook is edited separately in XNOW for the FWF -# Forecast with the advantage of saving time in the composition of -# the Spot Forecast. -#Definition["insertOutlookFromFile"] = 1 # Use 1 for yes, 0 for no -#Definition["outlookFile"] = "/home/local_apps/xnow/temp/OLKFWFBYZ" - -#Definition["tempLocalEffects"] = 1 # Set to 1 to enable Temp and RH local effects AFTER - # creating AboveElev and BelowElev edit areas -#Definition["windLocalEffects"] = 1 # Set to 1 to enable wind local effects AFTER - # creating Ridges and Valleys edit areas -# OPTIONAL CONFIGURATION ITEMS -#Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" -#Definition["debug"] = 1 -#Definition["lineLength"] = 66 #Product line length - -# Set the following variable to 1 if you want Lightning Activity -# reported with phrases like "1-8 STRIKES", "9-15 STRIKES", etc. -#Definition["lightningPhrases"] = 1 - -# The following variable sets a wind adjustment factor for surface -# (20 ft) winds. Wind speeds will be multiplied by this factor. -# Winds reported by RAWS sites are frequently lower than ASOS winds -# due to the fact that they measure wind speeds at lower heights. -# A common adjustment factor is 80% (0.80). If you want no adjustment -# to the winds then set this variable to 1.00 -#Definition["windAdjustmentFactor"] = 1.00 - -# The following variable sets a wind adjustment factor for eye level -# winds. Wind speeds will be multiplied by this factor. Eye level -# winds are frequently lower than ASOS winds due to the fact that -# winds are slower when measured closer to the ground. A common -# adjustment factor is 60% (0.60). If you want no adjustment to -# the winds then set this variable to 1.00 -Definition["eyeWindAdjustmentFactor"] = 0.60 - -#Definition["language"] = "english" - -# Trouble-shooting items -#Definition["passLimit"] = 20 # Limit on passes allowed through - # Narrative Tree -#Definition["trace"] = 1 # Set to 1 to turn on trace -# useRH If 1, use RH grids instead of MaxRH, MinRH -Definition["useRH"] = 0 - -### *** START TABULAR TEST SECTION *** ### - -Definition["fwfPil"] = "FWFBYZ" # FWF pil - -# Definitions to insert the FWF discussion from a separate file or -# from the last FWF your office has issued. - -# Discussion is edited separately in GFE for the FWF forecast. -# Advantage of this is to have a first guess for the discussion in -# the Spot forecast...saving some composition time. - -# Use 1 to get Discussion from a file -# Use 2 to get Discussion from your last FWF product -# Use 0 to use a blank Discussion template -Definition["insertDiscussionFromFile"] = 2 -Definition["discussionFile"] = "/data/local/DISFWFBYZ" - -# Definitions to insert the FWF 8 to 14 day outlook from a separate -# file if the user requests that information in their request. (Not -# very likely). Outlook is edited separately in XNOW for the FWF -# Forecast with the advantage of saving time in the composition of -# the Spot Forecast. - -# Use 1 to get Outlook from a file -# Use 2 to get Outlook from your last FWF product -# Use 0 to use a blank Outlook template -Definition["insertOutlookFromFile"] = 2 # Use 1 for yes, 0 for no -Definition["outlookFile"] = "/data/local/OLKFWFBYZ" - -# If set to 1, the user can enter a creation date/time -# for product generation. It will be as if the product was run -# at the creation time specified by the user at run-time. -Definition["includeCreationTimeOnGUI"] = 1 -#Definition["includeCreationTimeOnGUI"] = 0 - -# forecastTypeList - This definition contains a list of spot forecast formats that a -# forecaster can select via the formatter gui. The formats are: -# -# Narrative Only: The spot forecast is in a narrative format. -# Tabular/Narrative: This format is a tabular/narrative mix as specified -# in the "_rowList" (see FWS_Overrides). -# -# For each forecastType, you can specify a label that will appear in the GUI. -# -Definition["forecastTypeList"] = [ - # Label Forecast Type - ("Narrative Only", "Narrative Only"), - ("Tabular/Narrative", "Tabular/Narrative"), - ("Tabular Only", "Tabular Only"), - - # If your _rowList specifies an all Tabular product, - # you may want to change this entry to: - #("Tabular", "Tabular/Narrative"), - ] - -# defaultForecastType - This defintion sets the default setting for which spot forecast -# format your WFO wants to use. Value for definition must be included -# in the forecastTypeList definition and must be either "Narrative", -# "Tabular", or "With Ignition Forecast". -#Definition["defaultForecastType"] = "Narrative Only" -Definition["defaultForecastType"] = "Tabular/Narrative" - -# withIgnitionTimes: If "yes", ertain weather elements can be configured to include -# an ignition time forecast within the narrative. -Definition["withIgnitionTimes"] = "no" - -# includeIgnitionOptionOnGUI: If 1, the GUI will include this option at run-time. -Definition["includeIgnitionOptionOnGUI"] = 1 -#Definition["includeIgnitionOptionOnGUI"] = 0 - -# tabularResolutionDict - This definition contains the list of table time resolutions -# (per period) that you want to appear in the GUI to the forecaster. -# Some WFOs may not want to give forecasters an option to generate -# a table with an 1 hour resolution (for example), so you can -# delete "1" from this list and it will not appear on the gui. -# Possible values are 1, 2, 3, 4 hours and 123 in which case, -# hourly resolution will be 1 hour in the 1st period, -# 2 hours in the 2nd period, and 3 hours in the third period -# (if needed).. -Definition["tabularResolutionDict"] = { - "Today": [1, 2, 3, 4, "None"], - "Tonight": [1, 2, 3, 4, "None"], - "Tomorrow": [1, 2, 3, 4, "None"], - } - -# defaultTabularResolution - This definition must be set to one of values listed in the -# tabularResolutionList definition. This will be the value -# that the gui will use for a default for each period. -# Values are limited to 1, 2, 3, and 4 and must be included in -# the tabularResolutionList definition. -Definition["defaultTabularResolution"] = { - "Today": 2, - "Tonight": 2, - "Tomorrow": 2 - } - -# tabularAllPeriods - Setting this definition to "no" will generate a 12 -# hour table only in the first period. -# The table will start at either the current time or -# the ignition time depending on the setting of -# tableStartTimeMode (see below). -# Setting this definition to "yes" will allow tables in -# all periods with snapshot values covering the -# time range of each period. - -Definition["tabularAllPeriods"] = "yes" -#Definition["tabularAllPeriods"] = "no" - -# tabularForWildfire - This is a nationally mandated setting which requires -# a narrative forecast for wildfire spot requests. When -# set to "no", a narrative will be produced, even if the -# tabular option is selected. Your office must issue a -# PDD to switch this definition to "yes". - -Definition["tabularForWildfire"] = "no" -#Definition["tabularForWildfire"] = "yes" - -# tableStartTimeMode - The setting of this definition will tell the formatter how to -# determine the start time for the table. -# If "productStart" is used, then the table will start at the -# beginning of the product issuance. -# If "ignitionTime" is used, then the formatter will use the ignition time -# if it is within the first period of the product. -# Otherwise the formatter will use the productStart time. -# If "current" is used, then the table will start at the time the -# formatter was launched. - -#Definition["tableStartTimeMode"] = "current" -#Definition["tableStartTimeMode"] = "productStart" -Definition["tableStartTimeMode"] = "ignitionTime" - -# tableStartTimeOffset - When the ignition time is used for the table start time, -# you can start the table a set number of hours before the -# ignition time. You can configure the tableStartTimeOffset -# definition for this purpose. Value is the number of hours -# before the ignition time desired. Note, if this new time -# is before the product start time, then the product start -# time will be used. -Definition["tableStartTimeOffset"] = 0 - -# ignitionForecastTypeList - The formatter can produce Ignition Time/Request Time -# forecasts for certain weather elements, like T and RH. -# This list will produce this forecast as a default for -# certain types of spot requests. List elements must be -# a subset of the typeList definition. - -#Definition["ignitionForecastTypeList"] = ["PRESCRIBED"] -Definition["ignitionForecastTypeList"] = [] - -# elementFormatDict - This defines the format as "alpha" or "numeric" for various -# tabular weather elements. - -# Sky - This definition allows a WFO to use a text description of the -# Sky Cover or use numeric values. Examples are as follows... -# alpha 1hr : SKY.............MC MC MC MC MC MC MC MC MC MC MC MC PC -# alpha 2hr : SKY.............MCLDY MCLDY MCLDY MCLDY MCLDY MCLDY PCLDY -# numeric 2hr: Sky (%).........90 90 90 83 83 83 69 - -# Tabular Wind Format Definitions -# "alpha" will cause the direction to be formatted in the alphabetic characters -# of N, NW, W, SW, S, SE, E, and NE. -# "numeric" will return the wind direction in tens of degrees. i.e. 000, 010, -# etc. When a numeric wind direction is combined with wind speed it will look -# something like this...030/10 - -Definition["elementFormatDict"] = { - "Sky" : "numeric", - "Wind": "alpha", - "Wind20ft": "alpha", - "EyeWind": "alpha", - "RidgeWind": "alpha", - "TransWind": "alpha", - "TransMetWind": "alpha", - } - -# bothAlphaNumericDict - For certain elements both alpha and numeric values -# are needed. In particular, sky cover and wind direction. -# Only possible values are "Yes" or "No". -# If you do configure a wind element to "Yes", then -# ensure the corresponding setting for elementFormatDict -# is set to "alpha". Otherwise, you will get two lines -# of numeric values. - -Definition["bothAlphaNumericDict"] = { - "Sky" : "No", - "Wind": "No", - "Wind20ft": "No", - "EyeWind": "No", - "SfcWind": "No", - "RidgeWind": "No", - "TransWind": "No", - } - -# tabularMixingHeightUnits - This definition allows the WFO to specify their preferance -# on how mixing height is expressed. In thousands of feet or -# in just feet? The definition can only be set to "kft" or -# "ft". Note: the 1 hour resolution table is limited to a -# three character space, so mixing height will always be -# expressed in kft when the 1 hour resolution is selected -# regardless to what this definition is set to. Examples... -# KFT 2hr: Mix hgt (kft)...0.3 0.3 0.3 0.3 0.3 7.9 11 -# FT 2hr : Mix hgt (ft)....300 300 300 300 300 7900 11100 - -#Definition["tabularMixingHeightUnits"] = "kft" # So we can fit a number in a 3 character space. -Definition["tabularMixingHeightUnits"] = "ft" # Will only be used for 2,3, or 4 time resolutions. - -# transportWindLabel - Some WFOs use "Transport Winds", while others use "Mixing Winds". -# They are one in the same in terms of the forecast. This definition -# allows the WFO to configure their preference for the tabular section. -#Definition["transportWindLabel"] = "mix" -Definition["transportWindLabel"] = "tran" - -# includeMetricDispersion - Some users need mixing height and transport winds -# in metric units. If you want to include the metric -# in addition to english values, then set definition -# to "yes". Otherwise "no". - -#Definition["includeMetricDispersion"] = "yes" -Definition["includeMetricDispersion"] = "no" - -# 20ftWindParm - Some WFOs actually produce a Wind20ft grid, so the 20 FOOT WIND -# phrase can be configured to sample that grid (the "Wind20ft" -# setting). Other WFOs just use a conversion factor (windAdjustmentFactor) -# of what they have in the Wind grid (the "Wind" setting). - -Definition["20ftWindParm"] = "Wind" -#Definition["20ftWindParm"] = "Wind20ft" - -# wind20ftHeader: This definition set to "1" allows offices to -# format winds in this format... -# -# Wind (20 ft)........ -# Slope/valley.......WEST 10-20 MPH -# Ridgetop...........NORTHWEST 20 MPH -# -# By setting this definition to "0", you will get... -# -# Wind (20 ft)........WEST 10-20 MPH -# Ridgetop wind.......NORTHWEST 20 MPH -Definition["wind20ftHeader"] = 0 # Use 1 for yes, 0 for no -#Definition["wind20ftHeader"] = 1 # Use 1 for yes, 0 for no - -# tableWindElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for wind direction, -# wind speed, and wind gust speed. When the resolution -# is 2 hours or more, then a WFO has a choice of formats. -# They can set tableWindElementSplit to "yes" and wind -# direction, speed, and gusts will remain in their -# separate lines. Or the WFO can set tableWindElementSplit -# to "no". For the two hour resolution, direction and -# speed will be combined. For three and four hour -# resolution, direction, speed, and gusts will be -# combined. Examples follow... - -# yes 2hr: 20 ft wind dir..SW W W W W W W -# : 20 ft wind spd..26 26 18 18 18 14 14 -# : 20 ft wind gust.40 40 - -# no 2 hr: 20 ft wind......W 26 W 26 W 18 W 18 W 18 W 14 W 14 -# : 20 ft wind gust.40 40 - -# yes 3hr: 20 ft wind dir..W W W W W -# : 20 ft wind spd..26 25 13 14 13 -# : 20 ft wind gust.40 40 - -# no 3 hr: 20 ft wind......W 26G40 W 25G40 W 13 W 14 W 13 - -#Definition["tableWindElementSplit"] = "yes" -Definition["tableWindElementSplit"] = "no" - -# tableEyeWindElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for wind direction, -# wind speed, and wind gust speed. When the resolution -# is 2 hours or more, then a WFO has a choice of formats. -# They can set tableEyeWindElementSplit to "yes" and wind -# direction, speed, and gusts will remain in their -# separate lines. Or the WFO can set tableEyeWindElementSplit -# to "no". For the two hour resolution, direction and -# speed will be combined. For three and four hour -# resolution, direction, speed, and gusts will be -# combined. Examples follow... - -# yes 2hr: Eye lvl wnd dir.SW W W W W W W -# : Eye lvl wnd spd.26 26 18 18 18 14 14 -# : Eye lvl wnd gst.40 40 - -# no 2 hr: Eye level wind..W 26 W 26 W 18 W 18 W 18 W 14 W 14 -# : Eye lvl wnd gst.40 40 - -# yes 3hr: Eye lvl wnd dir.W W W W W -# : Eye lvl wnd spd.26 25 13 14 13 -# : Eye lvl wnd gst.40 40 - -# no 3 hr: Eye level wind..W 26G40 W 25G40 W 13 W 14 W 13 - -#Definition["tableEyeWindElementSplit"] = "yes" -Definition["tableEyeWindElementSplit"] = "no" - -# tableRidgeElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for wind direction -# and wind speed. When the resolution is 2 hours or more, -# then a WFO has a choice of formats. They can set -# tableRidgeElementSplit to "yes" and wind direction and -# speed will remain in their separate lines. Or the WFO -# can set tableRidgeElementSplit to "no" and the wind -# direction and speed will be combined into one line. -# Examples follow... - -# yes 2hr: Ridge wnd dir..W W W W W W W -# : Ridge wnd spd..36 36 36 36 36 36 16 - -# no 2 hr: Ridgetop wind...W 36 W 36 W 36 W 36 W 36 W 36 W 16 - -#Definition["tableRidgeElementSplit"] = "yes" -Definition["tableRidgeElementSplit"] = "no" - -# tableTransElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for wind direction -# and wind speed. When the resolution is 2 hours or more, -# then a WFO has a choice of formats. They can set -# tableTransElementSplit to "yes" and wind direction and -# speed will remain in their separate lines. Or the WFO -# can set tableTransElementSplit to "no" and the wind -# direction and speed will be combined into one line. -# Examples follow... - -# yes 2hr: Transp wind dir.W W W W W W W -# : Transp wind spd.8 8 8 8 8 8 20 - -# no 2 hr: Transport wind..W 8 W 8 W 8 W 8 W 8 W 8 W 20 - -#Definition["tableTransElementSplit"] = "yes" -Definition["tableTransElementSplit"] = "no" - -# tableSwellElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for swell direction -# and swell height. When the resolution is 2 hours or more, -# then a WFO has a choice of formats. They can set -# tableSwellElementSplit to "yes" and swell direction and -# height will remain in their separate lines. Or the WFO -# can set tableSwellElementSplit to "no" and the swell -# direction and height will be combined into one line. -# Examples follow... - -# yes 2hr: Swell direction.W W W W W W W -# : Swell hgt (ft)..36 36 36 36 36 36 16 - -# no 2 hr: Swell hgt (ft)..W 36 W 36 W 36 W 36 W 36 W 36 W 16 - -#Definition["tableSwellElementSplit"] = "yes" -Definition["tableSwellElementSplit"] = "no" - -# tableSfcWindElementSplit - When the time resolution of the table is 1 hour, then -# I am forced to create separate lines for wind direction, -# wind speed, and wind gust speed. When the resolution -# is 2 hours or more, then a WFO has a choice of formats. -# They can set tableSfcWindElementSplit to "yes" and wind -# direction, speed, and gusts will remain in their -# separate lines. Or the WFO can set tableSfcWindElementSplit -# to "no". For the two hour resolution, direction and -# speed will be combined. For three and four hour -# resolution, direction, speed, and gusts will be -# combined. Examples follow... - -# yes 2hr: Surface wnd dir.SW W W W W W W -# : Surface wnd spd.26 26 18 18 18 14 14 -# : Surface wnd gst.40 40 - -# no 2 hr: Surface wind....W 26 W 26 W 18 W 18 W 18 W 14 W 14 -# : Surface wnd gst.40 40 - -# yes 3hr: Surface wnd dir.W W W W W -# : Surface wnd spd.26 25 13 14 13 -# : Surface wnd gst.40 40 - -# no 3 hr: Surface wind....W 26G40 W 25G40 W 13 W 14 W 13 - -#Definition["tableSfcWindElementSplit"] = "yes" -Definition["tableSfcWindElementSplit"] = "no" - -# cwrParm - Some WFOs (especially in wetter climates) use the PoP grid for -# chance of wetting rain, whereas offices in dry climates create a -# CWR grid that has values lower than the PoP grid. Value values -# for this definition is either "CWR" or "PoP". - -Definition["cwrParm"] = "PoP" - - -### *** END TABULAR TEST SECTION *** ### - -# END definitions -############################################################ - -#********************************************************************** -# MAKE NO CHANGES HERE -# The minimum contents of this file are the above Definition = {} line -# plus following class definition and the __init__ method with only -# the "pass" line in it. - -class FWS_Overrides: - """Class NNN_FILETYPE - Version: IFPS""" - - def __init__(self): - pass - -# End MAKE NO CHANGES HERE -#********************************************************************** - # Add methods here making sure to indent inside the class statement - # FWS Overrides ------------------------ - - # It is helpful to put a debug statement at the beginning of each - # method to help with trouble-shooting. - #def _method(self): - #self.debug_print("Debug: _method in FWS_Overrides") - - def _processVariableList(self, definition): - - # Get Definition variables - for key in definition.keys(): - exec "self._" + key + "= definition[key]" - - # Load in a user specified number of STQ products into the formatter. - products = self._getStqProducts() - - # Get the information for the specific fire. - # IF there are STQ products in the directory, - # selection GUI will be displayed - cancel = self._getFireInfo(products) - if cancel: - # User cancelled - return None - - # Get the user information for the specific fire - # and return the resulting varDict - return self._displayFireInfo() - - def _getStqProducts(self): - # Load in a user specified number of STQ products into the formatter. - # If no products found, return empty list - products = [] - version = 0 - stqPil = self._statePil + self._stqPil - searchString="" - for version in range(self._stqNumberVersions): - product = self.getPreviousProduct(stqPil, searchString, version=version) - if product is None or product == "": - break - - # Let's filter the product just in case single quote is put - # into the the request. - product = string.replace(product,"\'","") - - product = string.split(product, "\n") - missingFlag=1 - feedbackFlag=0 - deleteFlag=0 - for line in product: - line = string.replace(line, "\n", "") - if "PROJECT NAME" in line: - missingFlag=0 - if "Feedback was just received for project" in line: - feedbackFlag=1 - if "The Spot Forecast Request for project" in line: - deleteFlag=1 - if not missingFlag and not feedbackFlag and not deleteFlag: - products.append(product) - return products - - def _getFireInfo(self, products): - # If there were STQ products, display their names for user to select - # Return 1 if user cancels - product, issuance, forecasters = self._getFireProduct(products) - if issuance is None: - return 1 # User cancelled - if len(products) > 0: - self._noStqProduct = 0 - else: - product = None - self._noStqProduct = 1 - self._getProductInfo(product, issuance, forecasters) - - def _getFireProduct(self, products): - # Create the fireNameList used for the spot selection menu. - fireNameList = [] - ofileList = [] - validProductFound = 0 - productNumber = 0 - masterProductList = [] - for product in products: - fireName = "NAME MISSING" - timeStamp = "DDHHMM" - tag = "YYYYMMDD.XXXXX.NN" - tagFlag = 0 - feedbackFlag=0 - deleteFlag=0 - for line in product: - line = string.replace(line, "\n", "") - if "PROJECT NAME" in line: - fireName = string.upper(line[22:]) - if self._stqWmoID in line: - timeStamp = line[12:] - if "OFILE" in line: - tag = string.upper(line[8:]) - if tag not in ofileList: - ofileList.append(tag) - tagFlag = 1 - productNumber = productNumber + 1 - if tagFlag: - fireNameList.append(`productNumber` + ") " + fireName + \ - " -- " + timeStamp + " -- " + tag) - masterProductList.append(product) - validProductFound = 1 - - varList = [] - - if validProductFound: - fireNameList.append("Manually Enter in Request Info") - desFireName = "Please Choose a Fire", "fireName" - varList.append((desFireName, fireNameList[0], "radio", fireNameList)) - - # Product Issuance Processing - issuanceList = [ - "Morning", "Morning Update", "Afternoon Update", - "Afternoon", "Afternoon with 4 periods", "Evening Update", - "Evening Update with 4 periods", "Early Morning Update", - "Early Morning Update with 4 periods", "Next Day" - ] - desIssuanceList = "Product Issuance:", "productIssuance" - varList.append((desIssuanceList, issuanceList[0], "radio", issuanceList)) - - # Forecaster List Section of the GUI - forecasterNameList = [] - defaultForecasterNameList = [] - cmd = "whoami" - db = os.popen(cmd,'r') - awipsLogin = db.read() - db.close() - awipsLogin = string.replace(awipsLogin, "\n", "") - for forecaster in self._forecasterList: - id, awipsName, name = forecaster - forecasterNameList.append(name) - if awipsLogin == awipsName: - defaultForecasterNameList.append(name) - desForecasterNameList = "Forecaster:", "forecaster" - varList.append((desForecasterNameList, defaultForecasterNameList, "check", forecasterNameList)) - - if self._includeCreationTimeOnGUI: - # Get start date and time from user - desCreationDate = "Forecast Start Date (ex. 5/25/06)", "creationDate" - varList.append((desCreationDate, "", "alphaNumeric")) - desCreationTime = "Forecast Start Time in LT (ex 0900)", "creationTime" - varList.append((desCreationTime, "", "alphaNumeric")) - - # Launch the Spot Request selection GUI. - varDict = self._callProcessVariableList("Select Spot Request", varList, varDict={}) - if varDict is None: - return None, None, None - - productIssuance = varDict[desIssuanceList] - forecasters = varDict[desForecasterNameList] - if self._includeCreationTimeOnGUI: - self._creationDate = varDict[desCreationDate] - self._creationTime = varDict[desCreationTime] - - if validProductFound: - if varDict[desFireName] == "Manually Enter in Request Info": - return None, productIssuance, forecasters - else: - stqIndex = fireNameList.index(varDict[desFireName]) - return masterProductList[stqIndex], productIssuance, forecasters - else: - return None, productIssuance, forecasters - - def _callProcessVariableList(self, title, varList, varDict): - processVarList = ProcessVariableList.ProcessVariableList( - title, varList, varDict={}) - self._selectionStatus = processVarList.status() - if not self._selectionStatus == "OK": - return None # User Cancelled - return processVarList.varDict() - - - def _weInfoList(self): - # This is the list of possible weather parameters listed under the - # ...WEATHER PARAMETERS REQUESTED... section in your STQ Product. - # These are listed in the order they will appear in the product. - # - # Weather Elements: If you have a weather element to add, - # then send an email to Virgil.Middendorf@noaa.gov with your addition. - # I will baseline it. - # - # Phrases: You can override this method and edit the phrase method if you - # don't like the one used in baseline. - - # For each element, we list: - # --an identifier - # --flag to indicate if this is a default element - # --the FWF phrase (or list of phrases) to include in the product - # --a list of search strings that must appear in - # the STQ product to specify the element. - # Each search string in the list may be a tuple in which case any of - # the entries in the tuple will satsify the search. - - if self._useRH: - dayRH = "RH" - nightRH = "RH" - else: - dayRH = "MinRH" - nightRH = "MaxRH" - if self._wind20ftHeader: - wind = [self.fireWind_label_phrase, self.fireWind_compoundPhrase] - else: - wind = [self.fireWind_compoundPhrase] - return [ - ("SKY/WEATHER", 1, self.skyWeather_byTimeRange_compoundPhrase, - [("SKY", "CLOUDS"), "WEATHER"]), - ("BEGIN/END OF PCPN", 0, self.pcpnTiming_phrase, - ["BEGIN", "END", "PCPN"]), - ("TEMPERATURE", 1, (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1]), - [("TEMPERATURE", "TEMP")]), - ("HUMIDITY", 1, (self.dayOrNight_phrase, [dayRH, nightRH, 1, 1]), - [("RH", "HUMIDITY")]), - ("DEWPOINT", 0, self.td_phrase, - ["DEWPOINT"]), - ("20 FOOT WINDS", 0, wind, - ["20", "WIND", ("FT", "FOOT")]), - ("EYE LEVEL WINDS", 1, self.fireEyeWind_compoundPhrase, - [("EYE","10"), "WIND"]), - ("SURFACE WINDS", 0, self.fireSfcWind_compoundPhrase, - ["SURFACE", "WIND"]), - ("WIND SHIFT", 0, self.fireWindShift_label_phrase, - ["WIND", "SHIFT"]), - ("RIDGE TOP WIND", 0, self.freeWind_phrase, - ["WIND", "RIDGE", "TOP"]), - ("SURROUNDING RIDGE", 0, self.surroundingRidgeWind_phrase, - ["SURROUNDING", "RIDGE", "WIND"]), - ("CWR", 0, self.cwr_phrase, - [("CWR", "WETTING RAIN")]), - ("POP", 0, self.pop_phrase, - [("PRECIPITATION", "CHANCE OF PCPN", "POP")]), - ("LIGHTNING ACTIVITY LEVEL", 0, self.lal_phrase, - [("LAL", "LIGHTNING")]), - ("SMOKE DISPERSION", 1, [self.mixingHgt_phrase, self.transportWind_phrase], - [("SMOKE", "DISPERSION")]), - ("MIXING HEIGHT", 0, self.mixingHgt_phrase, - ["MIXING"]), - ("TRANSPORT WINDS", 0, self.transportWind_phrase, - ["TRANSPORT", "WIND"]), - ("LDSI", 0, self.ldsi_phrase, - ["LDSI"]), - ("LVORI", 0, self.lvori_phrase, - ["LVORI"]), - ("ADI",0, self.adi_phrase, - ["ADI"]), - ("DISPERSION INDEX", 0, self.dsi_phrase, - ["DISPERSION", "INDEX"]), - ("CLEARING INDEX", 0, self.smokeDispersal_phrase, - ["CLEARING", "INDEX"]), - ("STABILITY CLASS", 0, self.stabilityClass_phrase, - ["STABILITY"]), - ("MARINE LAYER", 0, self.marineLayer_phrase, - ["MARINE", "LAYER"]), - ("HAINES INDEX", 0, self.haines_phrase, - ["HAINES", "INDEX"]), - ("VENTILATION RATE", 0, self.smokeDispersal_phrase, - ["VENTILATION", "RATE"]), - ("SWELL HEIGHT", 0, self.swell_phrase, - ["SWELL", "HEIGHT"]), - ("WAVE HEIGHT", 0, self.waveHeight_phrase, - ["WAVE","HEIGHT"]), - ("SWELL PERIOD", 0, self.period_phrase, - ["SWELL", "PERIOD"]), - ("WIND WAVE", 0, self.windWave_phrase, - ["WIND", "WAVE"]), - ("RAINFALL AMOUNT", 0, self.qpf_phrase, - ["RAINFALL", "AMOUNT"]), - ("SNOWFALL AMOUNT", 0, self.snow_phrase, - ["SNOWFALL", "AMOUNT"]), - ("FREEZING LEVEL", 0, self.freezingLevel_phrase, - ["FREEZING", "LEVEL"]), - ("CEILING", 0, self.ceiling_phrase, - ["CEILING"]), - ("VISIBILITY", 0, self.visibility_phrase, - ["VISIBILITY"]), - ("ICING", 0, self.icing_phrase, - ["ICING"]), - ("HAZARDS", 0, self.ceiling_phrase, - ["HAZARDS"]), - ("HEAT INDEX", 0, self.heatIndex_phrase, - ["HEAT", "INDEX"]), - ] - - def _weInfoHiddenList(self): - # This is the list of possible weather parameters that are NOT listed - # under the ...WEATHER PARAMETERS REQUESTED... section in your STQ - # Product. There are times when a WFO will want to format certain - # weather elements in the spot forecast, but do NOT want those elements - # to be listed in the website. - - # These elements will be appended below the "weather parameters requested" - # elements and will be in the order as specified in _weInfoHiddenList - # - # All weather elements will be commented out and they are the same - # weather elements listed in _weInfoList. If you have a weather element - # to add, then send an email to Virgil.Middendorf@noaa.gov for baselining. - # - # Phrases: Phrases associated with each element listed in this method - # is still configured in the _weInfoList method - - # For each element, we list: - # --an identifier - # --flag to indicate if this is a default element - - return [ -## ("SKY/WEATHER", 0), -## ("BEGIN/END OF PCPN", 0), -## ("TEMPERATURE", 0), -## ("HUMIDITY", 0), -## ("DEWPOINT", 0), -## ("20 FOOT WINDS", 0), -## ("EYE LEVEL WINDS", 0), -## ("SURFACE WINDS", 0), -## ("WIND SHIFT", 0), -## ("RIDGE TOP WIND", 0), -## ("SURROUNDING RIDGE", 0), -## ("CWR", 0), -## ("POP", 0), -## ("LIGHTNING ACTIVITY LEVEL", 0), -## ("SMOKE DISPERSION", 0), -## ("MIXING HEIGHT", 0), -## ("TRANSPORT WINDS", 0), -## ("DISPERSION INDEX", 0), -## ("LDSI", 0), -## ("LVORI", 0), -## ("ADI", 0), -## ("CLEARING INDEX", 0), -## ("STABILITY CLASS", 0), -## ("MARINE LAYER", 0), -## ("HAINES INDEX", 0), -## ("VENTILATION RATE", 0), -## ("SWELL HEIGHT", 0), -## ("WAVE HEIGHT", 0), -## ("SWELL PERIOD", 0), -## ("WIND WAVE", 0), -## ("RAINFALL AMOUNT", 0), -## ("SNOWFALL AMOUNT", 0), -## ("FREEZING LEVEL", 0), -## ("CEILING", 0), -## ("VISIBILITY", 0), -## ("ICING", 0), -## ("HAZARDS", 0), -## ("HEAT INDEX", 0), - ] - - def _rowList(self, colWidth=1): - - ### 20 foot wind tabular phrase configuration ### - - if self._tableWindElementSplit == "no" and colWidth == 7: # 2 hourly - if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": - wind = [("20 ft wind......", self._wind_value), - ("20 ft wind gust.", self._windGust_value)] - else: - wind = [("20 ft wind......", self._wind_value), - ("20 ft wind gust.", self._windGust_value), - ("20 ft wind dir..", self._windNumDir_value)] - elif self._tableWindElementSplit == "no" and colWidth > 7: # 3-4 hourly - if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": - wind = [("20 ft wind......", self._windWithGust_value)] - else: - wind = [("20 ft wind......", self._windWithGust_value), - ("20 ft wind dir..", self._windNumDir_value)] - else: - if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": - wind = [("20 ft wind dir..", self._windDir_value), # 1 hourly - ("20 ft wind spd..", self._windSpd_value), - ("20 ft wind gust.", self._windGust_value)] - else: - wind = [("20 ft wind dir..", self._windDir_value), # 1 hourly - ("20 ft wind dir..", self._windNumDir_value), - ("20 ft wind spd..", self._windSpd_value), - ("20 ft wind gust.", self._windGust_value)] - - ### eye level wind tabular phrase configuration ### - - if self._tableEyeWindElementSplit =="no" and colWidth == 7: - if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": - eyewind = [("Eye level wind..", self._eyewind_value), - ("Eye lvl wnd gst.", self._eyewindGust_value)] - else: - eyewind = [("Eye level wind..", self._eyewind_value), - ("Eye lvl wnd gst.", self._eyewindGust_value), - ("Eye lvl wnd dir.", self._eyewindNumDir_value)] - elif self._tableEyeWindElementSplit == "no" and colWidth > 7: - if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": - eyewind = [("Eye level wind..", self._eyewindWithGust_value)] - else: - eyewind = [("Eye level wind..", self._eyewindWithGust_value), - ("Eye lvl wnd dir.", self._eyewindNumDir_value)] - else: - if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": - eyewind = [("Eye lvl wnd dir.", self._eyewindDir_value), - ("Eye lvl wnd spd.", self._eyewindSpd_value), - ("Eye lvl wnd gst.", self._eyewindGust_value)] - else: - eyewind = [("Eye lvl wnd dir.", self._eyewindDir_value), - ("Eye lvl wnd dir.", self._eyewindNumDir_value), - ("Eye lvl wnd spd.", self._eyewindSpd_value), - ("Eye lvl wnd gst.", self._eyewindGust_value)] - - ### surface wind (10m) tabular phrase configuration ### - - if self._tableSfcWindElementSplit =="no" and colWidth == 7: - - if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": - sfcwind = [("Surface wind....", self._sfcwind_value), - ("Surface wnd gst.", self._sfcwindGust_value)] - else: - sfcwind = [("Surface wind....", self._sfcwind_value), - ("Surface wnd gst.", self._sfcwindGust_value), - ("Surface wnd dir.", self._sfcwindNumDir_value)] - - elif self._tableSfcWindElementSplit == "no" and colWidth > 7: - - if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": - sfcwind = [("Surface wind....", self._sfcwindWithGust_value)] - else: - sfcwind = [("Surface wind....", self._sfcwindWithGust_value), - ("Surface wnd dir.", self._sfcwindNumDir_value)] - - else: - - if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": - sfcwind = [("Surface wnd dir.", self._sfcwindDir_value), - ("Surface wnd spd.", self._sfcwindSpd_value), - ("Surface wnd gst.", self._sfcwindGust_value)] - else: - sfcwind = [("Surface wnd dir.", self._sfcwindDir_value), - ("Surface wnd dir.", self._sfcwindNumDir_value), - ("Surface wnd spd.", self._sfcwindSpd_value), - ("Surface wnd gst.", self._sfcwindGust_value)] - - ### ridge top wind tabular phrase configuration ### - - if self._tableRidgeElementSplit == "no" and colWidth >=7: - if self._bothAlphaNumericDict.get("RidgeWind", "No") == "No": - ridge = [("Ridgetop wind...", self._ridge_value)] - else: - ridge = [("Ridgetop wind...", self._ridge_value), - ("Ridge wnd dir..", self._ridgeNumDir_value)] - else: - if self._bothAlphaNumericDict.get("RidgeWind", "No") == "No": - ridge = [("Ridge wnd dir..", self._ridgeDir_value), - ("Ridge wnd spd..", self._ridgeSpd_value)] - else: - ridge = [("Ridge wnd dir..", self._ridgeDir_value), - ("Ridge wnd dir..", self._ridgeNumDir_value), - ("Ridge wnd spd..", self._ridgeSpd_value)] - - ### swell tabular phrase configuration ### - - if self._tableSwellElementSplit == "no" and colWidth >=7: - swell = [("Swell hgt (ft)..", self._swell_value)] - else: - swell = [("Swell direction.", self._swellDir_value), - ("Swell hgt (ft)..", self._swellHgt_value)] - - ### Mixing Height and Transport wind label configuration ### - - if self._tabularMixingHeightUnits == "ft" and colWidth > 4: - mixLabel = "Mix hgt (ft)...." - mixMetricLabel = "Mix hgt (m)....." - else: - mixLabel = "Mix hgt (kft)..." - mixMetricLabel = "Mix hgt (km)...." - - if self._transportWindLabel == "mix": - transLabel = "Mixing wind......" - transMetricLabel = "Mix wind (m/s).." - transDirLabel = "Mixng wind dir.." - transSpdLabel = "Mixng wind spd.." - transSpdMetricLabel = "Mix wnd spd m/s." - else: - transLabel = "Transport wind.." - transMetricLabel = "Tran wind (m/s)." - transDirLabel = "Transp wind dir." - transSpdLabel = "Transp wind spd." - transSpdMetricLabel = "Trans spd (m/s)." - - if self._tableTransElementSplit == "no" and colWidth >=7: - # Baseline - if self._includeMetricDispersion == "yes": - if self._bothAlphaNumericDict.get("TransWind", "No") == "No": - smoke = [(mixLabel, self._mixingHeight_value), - (mixMetricLabel, self._mixingHeightMetric_value), - (transLabel, self._trans_value), - (transMetricLabel, self._transMetric_value)] - trans = [(transLabel, self._trans_value), - (transMetricLabel, self._transMetric_value)] - else: - smoke = [(mixLabel, self._mixingHeight_value), - (mixMetricLabel, self._mixingHeightMetric_value), - (transLabel, self._trans_value), - (transDirLabel, self._transNumDir_value), - (transMetricLabel, self._transMetric_value)] - trans = [(transLabel, self._trans_value), - (transDirLabel, self._transNumDir_value), - (transMetricLabel, self._transMetric_value)] - else: - if self._bothAlphaNumericDict.get("TransWind", "No") == "No": - smoke = [(mixLabel, self._mixingHeight_value), - (transLabel, self._trans_value)] - trans = [(transLabel, self._trans_value)] - else: - smoke = [(mixLabel, self._mixingHeight_value), - (transLabel, self._trans_value), - (transDirLabel, self._transNumDir_value)] - trans = [(transLabel, self._trans_value), - (transDirLabel, self._transNumDir_value)] - else: - # Baseline - if self._includeMetricDispersion == "yes": - if self._bothAlphaNumericDict.get("TransWind", "No") == "No": - smoke = [(mixLabel, self._mixingHeight_value), - (mixMetricLabel, self._mixingHeightMetric_value), - (transDirLabel, self._transDir_value), - (transSpdLabel, self._transSpd_value), - (transSpdMetricLabel, self._transSpdMetric_value)] - trans = [(transDirLabel, self._transDir_value), - (transSpdLabel, self._transSpd_value), - (transSpdMetricLabel, self._transSpdMetric_value)] - else: - smoke = [(mixLabel, self._mixingHeight_value), - (mixMetricLabel, self._mixingHeightMetric_value), - (transDirLabel, self._transDir_value), - (transDirLabel, self._transNumDir_value), - (transSpdLabel, self._transSpd_value), - (transSpdMetricLabel, self._transSpdMetric_value)] - trans = [(transDirLabel, self._transDir_value), - (transDirLabel, self._transNumDir_value), - (transSpdLabel, self._transSpd_value), - (transSpdMetricLabel, self._transSpdMetric_value)] - else: - if self._bothAlphaNumericDict.get("TransWind", "No") == "No": - smoke = [(mixLabel, self._mixingHeight_value), - (transDirLabel, self._transDir_value), - (transSpdLabel, self._transSpd_value)] - trans = [(transDirLabel, self._transDir_value), - (transSpdLabel, self._transSpd_value)] - else: - smoke = [(mixLabel, self._mixingHeight_value), - (transDirLabel, self._transDir_value), - (transDirLabel, self._transNumDir_value), - (transSpdLabel, self._transSpd_value)] - trans = [(transDirLabel, self._transDir_value), - (transDirLabel, self._transNumDir_value), - (transSpdLabel, self._transSpd_value)] - if self._includeMetricDispersion == "yes": - mix = [(mixLabel, self._mixingHeight_value), - (mixMetricLabel, self._mixingHeightMetric_value)] - else: - mix = [(mixLabel, self._mixingHeight_value)] - - ### sky/wx/hazard tabular phrase configuration ### - - if self._elementFormatDict.get("Sky", "alpha") == "alpha": - if self._bothAlphaNumericDict.get("Sky", "No") == "No": - skywx = [("Sky cover.......", self._sky_value), - ("Weather cov.....", self._weatherCov_value), - ("Weather type....", self._weatherType_value), - ("Tstm cov........", self._tstmCov_value)] - else: - skywx = [("Sky cover.......", self._sky_value), - ("Sky (%).........", self._numSky_value), - ("Weather cov.....", self._weatherCov_value), - ("Weather type....", self._weatherType_value), - ("Tstm cov........", self._tstmCov_value)] - else: - skywx = [("Sky (%).........", self._sky_value), - ("Weather cov.....", self._weatherCov_value), - ("Weather type....", self._weatherType_value), - ("Tstm cov........", self._tstmCov_value)] - - hazard = [("Hazard VTEC 1..", self._wwa_value), - ("Hazard VTEC 2..", self._wwa2_value), - ("Hazard VTEC 3..", self._wwa3_value)] - - return [ - # Set to Directive requirements - # Each entry is a tuple: - # (Narrative Element, narrativeToo, tableRows) - # - # If narrativeToo is 1, then the narrative phrase will be included - # in the narrative portion of the product as well. - # tableRows is a list of (label:method) pairs. - # - ("SKY/WEATHER" , 1, skywx), - ("TEMPERATURE" , 1,[("Temp............", self._temp_value)]), - ("DEWPOINT" , 1,[("Dewpoint........", self._td_value)]), - ("HUMIDITY" , 1,[("RH..............", self._rh_value)]), - ("20 FOOT WINDS" , 1, wind), - ("EYE LEVEL WINDS" , 1, eyewind), - ("SURFACE WINDS" , 1, sfcwind), - #("RIDGE TOP WIND" , 1, ridge), - #("SMOKE DISPERSION" , 1, smoke), - #("MIXING HEIGHT" , 1, mix), - #("TRANSPORT WINDS" , 1, trans), - ("DISPERSION INDEX" , 1,[("Dispersion......", self._dsi_value)]), - ("LDSI" , 1,[("Dispersion idx..", self._ldsi_value)]), - ("LVORI" , 1,[("LVORI...........", self._lvori_value)]), - ("ADI" , 1,[("ADI.............", self._adi_value)]), - #("CWR" , 1,[("CWR.............", self._cwr_value)]), - ("POP" , 1,[("Chc of pcpn (%).", self._pop_value)]), - #("LIGHTNING ACTIVITY LEVEL", 1,[("LAL.............", self._lal_value)]), - ("HAINES INDEX" , 1,[("Haines Index....", self._haines_value)]), - ("VENTILATION RATE" , 1,[("Vrate kt-ft/1000", self._ventrate_value)]), - ("SWELL HEIGHT" , 1, swell), - ("SWELL PERIOD" , 1,[("Swell period (s)", self._swellPeriod_value)]), - ("WIND WAVE" , 1,[("Wind wave (ft)..", self._windWave_value)]), - ("WAVE HEIGHT" , 1,[("Wave height (ft)", self._waveHeight_value)]), - ("FREEZING LEVEL" , 1,[("Fz level (kft)..", self._freezingLevel_value)]), - ("CEILING" , 1,[("Ceiling (kft)...", self._ceiling_value)]), - ("VISIBILITY" , 1,[("Visibility (sm).", self._visibility_value)]), - ("ICING" , 1,[("Icing...........", self._ceiling_value)]), - ("HAZARDS" , 0, hazard), - ("HEAT INDEX" , 1,[("Heat index (F)..", self._heatIndex_value)]), - ] - - def _getProductInfo(self, product, issuance, forecasters): - # Parse the spot request information selected and - # return the FireInfo for display. - timezone = os.environ["TZ"] - spotRequestInfo = [ - ("PROJECT NAME:", "fireName", "'xxxx'"), - ("PROJECT TYPE:", "fireType", "'WILDFIRE'"), - ("REQUESTING AGENCY:", "requestingAgency", "'xxxx'"), - ("REQUESTING OFFICIAL:", "agencyContact", "'yyyy'"), - ("DLAT:", "fireLatitude", "28.27"), - ("DLON:", "fireLongitude", "82.19"), - ("SIZE (ACRES):", "fireSize", "1"), - ("SITE:", "wfoID", "''"), - ("OFILE:", "webSiteTag", "''"), - ("TIMEZONE:", "webTimeZone", "timezone"), - ("DATE:", "fireDate", "'1/1/01'"), - ("TIME:", "fireTime", "'1300'"), - ] - - obs = [] - self._spotList = ["This is a New Incident"] - remarksFlag = 0 - remarks = "" - - self._periodElementDict = { - "Today": [], "Tonight": [], "Tomorrow": [] - } - - self._periodAllElementDict = { - "Today": [], "Tonight": [], "Tomorrow": [] - } - - # Set default values - for field, variable, default in spotRequestInfo: - exec "self._"+variable + " = " + default - - # If no issuance to use, we are done. - if issuance is None: - return - self._productIssuance = issuance - - # If no forecasters included, we are done. - if forecasters is None: - return - self._forecasters = forecasters - - # If no product to parse, we are done - if product is None: - # Use default list of weather elements - for element, defaultFlag, phrases, searchStrings in self._weInfoList(): - if defaultFlag: - self._periodAllElementDict["Today"].append(element) - self._periodAllElementDict["Tonight"].append(element) - self._periodAllElementDict["Tomorrow"].append(element) - self._periodElementDict["Today"].append(element) - self._periodElementDict["Tonight"].append(element) - self._periodElementDict["Tomorrow"].append(element) - #self._allPeriodElementDict["Today"].append(element) - #self._allPeriodElementDict["Tonight"].append(element) - #self._allPeriodElementDict["Tomorrow"].append(element) - #if defaultFlag: - # self._periodElementDict["Today"].append(element) - # self._periodElementDict["Tonight"].append(element) - # self._periodElementDict["Tomorrow"].append(element) - for element, defaultFlag in self._weInfoHiddenList(): - if defaultFlag: - self._periodElementDict["Today"].append(element) - self._periodElementDict["Tonight"].append(element) - self._periodElementDict["Tomorrow"].append(element) - self._periodAllElementDict["Today"].append(element) - self._periodAllElementDict["Tonight"].append(element) - self._periodAllElementDict["Tomorrow"].append(element) - return - - # Parse product - wxParmFlag = 0 - for line in product: - print line - if line.find("...WEATHER PARAMETERS REQUESTED...") >= 0: - wxParmFlag = 1 - if line.find("SITE:") >= 0: - wxParmFlag = 0 - # If the line has a colon, split it into fieldName/value - cleanLine = string.replace(string.upper(line),"\n", "") - cleanLine = cleanLine.strip() - index = cleanLine.find(":") - if index >= 0: - # Handle STQ fields (lines with a colon) - fieldName = cleanLine[:index].strip() - value = cleanLine[index+1:].strip() - - for field, variable, default in spotRequestInfo: - if field in cleanLine and cleanLine.find(field) == 0: - # Assign to variable - exec "self._"+variable + " = value" - - if wxParmFlag: - for element, defaultFlag, phrases, searchStrings in self._weInfoList(): - if self._checkStrs(searchStrings, fieldName) == 1: - #Enter flags in dictionary e.g. 1,1,1 for Today, Tonight, Tomorrow - flags = value.split(",") - if flags[0] == "1": - self._periodElementDict["Today"].append(element) - if flags[1] == "1": - self._periodElementDict["Tonight"].append(element) - if flags[2] == "1": - self._periodElementDict["Tomorrow"].append(element) - self._periodAllElementDict["Today"].append(element) - self._periodAllElementDict["Tonight"].append(element) - self._periodAllElementDict["Tomorrow"].append(element) - - if "ELEV=" in line and "TIME=" in line: - ob = string.replace(string.upper(line),"\n","") - if "ELEV= TIME=" not in ob: - obs.append(ob) - if remarksFlag and "FORECAST ELEMENTS" not in line: - remarks = remarks + line - if "...REMARKS..." in line: - remarksFlag = 1 - if "...WEATHER PARAMETERS REQUESTED..." in line: - remarksFlag = 0 - remarks = string.replace(remarks,"\n\n","\n") - remarks = string.replace(remarks,"\n\n","\n") - - for element, defaultFlag in self._weInfoHiddenList(): - if defaultFlag: - if len(self._periodElementDict["Today"]) != 0: - self._periodElementDict["Today"].append(element) - if len(self._periodElementDict["Tonight"]) != 0: - self._periodElementDict["Tonight"].append(element) - if len(self._periodElementDict["Tomorrow"]) != 0: - self._periodElementDict["Tomorrow"].append(element) - self._periodAllElementDict["Today"].append(element) - self._periodAllElementDict["Tonight"].append(element) - self._periodAllElementDict["Tomorrow"].append(element) - - def _displayFireInfo(self): - - # Build and display GUI using the fireInfo - varList = [] - - # Fire Type Section of the GUI - desTypeList = "Type of Fire:", "fireType" - varList.append((desTypeList, self._fireType, "radio", self._typeList)) - - # requesting Agency Section of the GUI - desAgencyNameList = "Agency:", "requestingAgency" - agencyNameList = [] - findAgencyFlag = 0 - for agency in self._agencyList: - id,name = agency - agencyNameList.append(name) - if self._requestingAgency == name: - findAgencyFlag = 1 - requestingAgencyDefault = self._requestingAgency - if not findAgencyFlag: - agencyNameList.append("Unlisted") - requestingAgencyDefault = "Unlisted" - varList.append((desAgencyNameList, requestingAgencyDefault, "radio", agencyNameList)) - - # Include Extendeds/Outlook Section of the GUI - if not self._shortTermOnly: - questionList = ["Include Day 3-5 Extended?", - "Include Day 6-7 Extended?", - "Include Day 8-14 Outlook?"] - desExtendedQuestions = "Check Items to Include:","extendedQuestions" - varList.append((desExtendedQuestions, [], "check", questionList)) - - # Forecast Type - desFcstType = "What Type of Forecast?", "forecastType" - labelList = [] - for label, forecastType in self._forecastTypeList: - labelList.append(label) - varList.append((desFcstType, self._defaultForecastType, "radio", labelList)) - - # Include Ignition Time Forecast Section of the GUI - if self._includeIgnitionOptionOnGUI: - desIT = ("Include Ignition Times?", "withIgnitionTimes") - varList.append((desIT, self._withIgnitionTimes, "radio", ["yes", "no"])) - - # Unlisted Agency Name Section of the GUI - if not findAgencyFlag: - desOtherAgencyName = "Name of Agency if not listed....", "otherAgencyName" - varList.append((desOtherAgencyName, self._requestingAgency, "alphaNumeric")) - - # Fire Name Section of the GUI - desFireName = "Name of Fire ...................................", "fireName" - varList.append((desFireName, self._fireName, "alphaNumeric")) - - # Fire Time Section of the GUI - desFireTime = "Time of Fire .....................................", "fireTime" - varList.append((desFireTime, self._fireTime, "alphaNumeric")) - - # Fire Date Section of the GUI - desFireDate = "Date of Fire .....................................", "fireDate" - varList.append((desFireDate, self._fireDate, "alphaNumeric")) - - # Agency Contact Section of the GUI - desAgencyContact = "Name of Agency Contact..........", "agencyContact" - varList.append((desAgencyContact, self._agencyContact, "alphaNumeric")) - - # Fire Latitude Section of the GUI - desFireLatitude = "Fire Latitude (Deg).......................", "fireLatitude" - varList.append((desFireLatitude, self._fireLatitude, "alphaNumeric")) - - # Fire Longitude Section of the GUI - desFireLongitude = "Fire Longitude (Deg)...................", "fireLongitude" - varList.append((desFireLongitude, self._fireLongitude, "alphaNumeric")) - - # Fire Size Section of the GUI - desFireSize = "Fire Size (Acres) .........................", "fireSize" - varList.append((desFireSize, self._fireSize, "alphaNumeric")) - - # Forecast Elements Section of the GUI - tableHoursDesc = "Tab Hrs" - if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: - desElementList = "Today Elements", "todayElements" - varList.append((desElementList, self._periodElementDict["Today"], - "check", self._periodAllElementDict["Today"])) - desTableRes = tableHoursDesc,"todayTableRes" - varList.append((desTableRes, self._defaultTabularResolution["Today"],"radio", - self._tabularResolutionDict["Today"])) - desElementList = "Tonight Elements", "tonightElements" - varList.append((desElementList, self._periodElementDict["Tonight"] , - "check", self._periodAllElementDict["Tonight"] )) - if self._tabularAllPeriods == "yes": - desTableRes = tableHoursDesc,"tonightTableRes" - varList.append((desTableRes, self._defaultTabularResolution["Tonight"],"radio", - self._tabularResolutionDict["Tonight"])) - desElementList = "Tomorrow Elements", "tomorrowElements" - varList.append((desElementList, self._periodElementDict["Tomorrow"], - "check", self._periodAllElementDict["Tomorrow"] )) - if self._tabularAllPeriods == "yes": - desTableRes = tableHoursDesc,"tomorrowTableRes" - varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", - self._tabularResolutionDict["Tomorrow"])) - - if self._productIssuance in ["Afternoon with 4 periods", "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - desElementList = "Tomorrow Night Elements", "tomorrowNightElements" - varList.append((desElementList, self._periodElementDict["Tomorrow"], - "check", self._periodAllElementDict["Tomorrow"] )) - if self._tabularAllPeriods == "yes": - desTableRes = tableHoursDesc,"tomorrowNightTableRes" - varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", - self._tabularResolutionDict["Tomorrow"])) - desElementList = "Next Day Elements", "nextDayElements" - varList.append((desElementList, self._periodElementDict["Tomorrow"], - "check", self._periodAllElementDict["Tomorrow"] )) - if self._tabularAllPeriods == "yes": - desTableRes = tableHoursDesc,"nextDayTableRes" - varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", - self._tabularResolutionDict["Tomorrow"])) - - # Launch the Spot Request Quality Control GUI. - varDict = self._callProcessVariableList("Input Info", varList, varDict={}) - if varDict is None: - return None - - # Set up varDict for forecastType using labels - value = varDict[desFcstType] - for label, forecastType in self._forecastTypeList: - if label == value: - varDict[desFcstType] = forecastType - break - - # This section of code filters the forecaster entries to ensure that - # single quotes are not included. - if not findAgencyFlag: - try: - varDict[desOtherAgencyName] = string.replace(varDict[desOtherAgencyName],"\'","") - except AttributeError: - print "Other Agency Name is not a string." - try: - varDict[desFireName] = string.replace(varDict[desFireName],"\'","") - except AttributeError: - print "Fire Name is not a string." - try: - varDict[desAgencyContact] = string.replace(varDict[desAgencyContact],"\'","") - except AttributeError: - print "Fire Size is not a string." - try: - varDict[desFireSize] = string.replace(str(varDict[desFireSize]),"\'","") - except AttributeError: - print "Fire Size is not a string." - try: - varDict[desFireLatitude] = string.replace(str(varDict[desFireLatitude]),"\'","") - except AttributeError: - print "Latitude is not a string." - try: - varDict[desFireLongitude] = string.replace(str(varDict[desFireLongitude]),"\'","") - except AttributeError: - print "Longitude is not a string." - try: - varDict[desFireTime] = string.replace(str(varDict[desFireTime]),"\'","") - except AttributeError: - print "Ignition Time is not a string." - try: - varDict[desFireDate] = string.replace(str(varDict[desFireDate]),"\'","") - except AttributeError: - print "Ignition Date is not a string." - - # This section of code filters the forecaster entries to ensure that - # double quotes are not included. - if not findAgencyFlag: - try: - varDict[desOtherAgencyName] = string.replace(varDict[desOtherAgencyName],"\"","") - except AttributeError: - print "Other Agency Name is not a string." - try: - varDict[desFireName] = string.replace(varDict[desFireName],"\"","") - except AttributeError: - print "Fire Name is not a string." - try: - varDict[desAgencyContact] = string.replace(varDict[desAgencyContact],"\"","") - except AttributeError: - print "Fire Size is not a string." - try: - varDict[desFireSize] = string.replace(varDict[desFireSize],"\"","") - except AttributeError: - print "Fire Size is not a string." - try: - varDict[desFireLatitude] = string.replace(varDict[desFireLatitude],"\"","") - except AttributeError: - print "Latitude is not a string." - try: - varDict[desFireLongitude] = string.replace(varDict[desFireLongitude],"\"","") - except AttributeError: - print "Longitude is not a string." - try: - varDict[desFireTime] = string.replace(varDict[desFireTime],"\"","") - except AttributeError: - print "Ignition Time is not a string." - try: - varDict[desFireDate] = string.replace(varDict[desFireDate],"\"","") - except AttributeError: - print "Ignition Date is not a string." - - # convert lat/lon to floats - try: - varDict[desFireLatitude] = string.atof(varDict[desFireLatitude]) - except ValueError: - print "Latitude is not a float." - try: - varDict[desFireLongitude] = string.atof(varDict[desFireLongitude]) - except ValueError: - print "Longitude is not a float." - - # Convert fireTime - fireTime = varDict[desFireTime] - fireTime = "000" + str(int(float(fireTime))) - fireTime = fireTime[-4:] - varDict[desFireTime] = fireTime - - # Here are more varDict settings that need to be set before we launch - # the formatter. - varDict[("Product Issuance:", "productIssuance")] = self._productIssuance - varDict[("Forecaster:", "forecaster")] = self._forecasters - if self._includeCreationTimeOnGUI: - varDict[("Creation Date", "creationDate")] = self._creationDate - varDict[("Creation Time", "creationTime")] = self._creationTime - varDict[("WebSiteTag:", "webSiteTag")] = self._webSiteTag - varDict[("WFOid:", "wfoID")] = self._wfoID - varDict[("TimeZone:", "fireTZ")] = self._webTimeZone - - if self._shortTermOnly: - varDict[("Check Items to Include:","extendedQuestions")] = [] - - return varDict - - # From FWF. Needed to change .EXTENDED... to .FORECAST DAYS 3 THROUGH 7 - def setLabel(self, tree, component): - if self._includeExtended: - if "Include Day 3-5 Extended?" not in self._extendedQuestions: - component.set("words", ".FORECAST DAYS 6 THROUGH 7...\n") - else: - component.set("words", ".FORECAST DAYS 3 THROUGH 7...\n") - else: - component.set("words", ".FORECAST DAYS 3 THROUGH 5...\n") - return self.DONE() - - # From FWF. Modifed to write the output to a file in a user specified - # directory on the local lx machine. In addition, added sections to - # insert headlines, discussion, and 8-14 day outlook. - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Quality Control Gui data - error = self._qualityControlFormData() - if error is not None: - return error - - # Get the areaList -- derived from the lat, lon, size of fire (acres), - # and the name of the fire. - error = self._determineAreaList(argDict) - if error is not None: - return error - - # Set the extended forecast configuration based on what was - # requested by the user. - error = self._setExtendedConfig() - if error is not None: - return error - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the unrepresentative statement for the Product - fcst = self._makeUnrepresentStatement(fcst, argDict) - - # Generate the Headlines for the Product - for editArea, areaLabel in self._areaList: - fcst = self._makeHeadline(fcst, editArea, areaLabel, argDict) - - # Generate the Discussion section - fcst = self._makeDiscussion(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - # Generate the summary extended section (if wanted) - fcst = self._makeSummaryExtended(fcst, argDict) - - # Generate the 8 to 14 Day Outlook section - error = self._generateOutlookLabels(argDict) - if error is not None: - return error - fcst = self._make8to14DayOutlook(fcst, argDict) - - fcst = self._postProcessProduct(fcst, argDict) - - return fcst - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict), argDict["creationTime"]) - - if self._tableStartTimeMode == "current": - # Add a "custom" component to sample data from current time - # to product start time - ct = self._issuanceInfo.issueTime() - currentTime = AbsTime.absTimeYMD(ct.year, ct.month, ct.day, - ct.hour) - productStart = self._issuanceInfo.timeRange().startTime() - tr = TimeRange.TimeRange(currentTime, productStart) - if tr.duration() > 0: - self._issuanceInfo.narrativeDef().append(\ - ("Custom", ("PreFirePeriod1", tr))) - - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - # Determine the extended range - if self._individualExtended == 1: - self._extendedStart = self._timeRange.endTime() - 24*5*3600 - else: - self._extendedStart = self._timeRange.endTime() - self._extendedRange = TimeRange.TimeRange( - self._extendedStart, self._extendedStart + 3600) - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - -# Quality Control Form Information from user dialog to ensure completeness. - - - # 04/24/07: Tabular/Narrative is okay for wildfires. Changed code to make - # Tabular Only into Tabular/Narrative for wildfires. - - # From FWS_Overrides. Fixed a bug that causes the formatter to crash - # when a number was entered for the agency or the contact. Added a - # method called _convertToStr to do this cleanly. - def _qualityControlFormData(self): - - # If fireSize is not an integer, then I default the size to 1 acre. - # This will allow the formatter to run even if the user puts invalid - # characters into the size field (like 10-20). - try: - self._fireSize = int(float(self._fireSize)+0.5) - except ValueError: - self._fireSize = 1 - if self._fireSize <= 0: - self._fireSize = 1 - - try: - lat = float(self._fireLatitude) - except ValueError: - return "Invalid latitude value." - if lat < 0.0 or lat > 90: - return "Invalid latitude value." - - try: - lon = float(self._fireLongitude) - except ValueError: - return "Invalid longitude value." - if lon < 0.0 or lon > 180.0: - return "Invalid longitude value. Must be positive." - - if len(self._forecaster) == 0: - return "You must select at least one forecaster in the list." - - if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: - elementLists = [self._todayElements, self._tonightElements, self._tomorrowElements] - elif self._productIssuance in ["Afternoon with 4 periods", "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - elementLists = [self._tonightElements, self._tomorrowElements, - self._tomorrowNightElements, self._nextDayElements] - else: - elementLists = [self._tonightElements, self._tomorrowElements] - - # Check to make sure at least weather element is requested. - elementsFound = 0 - for elementList in elementLists: - if len(elementList) > 0: - elementsFound = 1 - break - if not elementsFound and \ - "Include Day 3-5 Extended?" not in self._extendedQuestions and \ - "Include Day 6-7 Extended?" not in self._extendedQuestions and \ - "Include Day 8-14 Outlook?" not in self._extendedQuestions: - return "You must select at least one weather element to " + \ - "forecast in the gui." - - # Code to ensure the wildfireElementList parameters are included in - # the FWS product (if this is a wildfire incident) was added to this - # method. - if self._fireType.upper() == "WILDFIRE": - for element in self._wildfireElementList: - for elementList in elementLists: - if element not in elementList and len(elementList) != 0: - elementList.append(element) - if self._tabularForWildfire == "no" and \ - self._forecastType == "Tabular Only": - self._forecastType = "Tabular/Narrative" - - self._fireName = self._convertToStr(self._fireName) - if len(self._fireName) == 0: - return "You must enter the Name of Fire." - - self._agencyContact = self._convertToStr(self._agencyContact) - if len(self._agencyContact) == 0: - return "You must enter the Name of Agency Contact." - - self._requestingAgency = self._convertToStr(self._requestingAgency) - if len(self._requestingAgency) == 0: - return "You must choose a requesting agency." - - if self._requestingAgency == "Unlisted": - self._otherAgencyName = self._convertToStr(self._otherAgencyName) - if len(self._otherAgencyName) == 0: - return "You must enter a requesting agency." - - def _convertToStr(self, var): - try: - stringSize = len(var) - return var - except TypeError: - try: - var = `int(var+0.5)` - except TypeError: - var = `var` - return var - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - self._todayTableRes = "None" - self._tonightTableRes = "None" - self._tomorrowTableRes = "None" - self._tomorrowNightTableRes = "None" - self._nextDayTableRes = "None" - self._todayElements = [] - self._tonightElements = [] - self._tomorrowElements = [] - self._tomorrowNightElements = [] - self._nextDayElements = [] - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - - # Adjust creationTime if user-supplied creation date and time - if self._includeCreationTimeOnGUI: - if self._creationDate != "": - argDict["creationTime"] = self._getTime(self._creationDate, self._creationTime) - return None - - def _parseTime(self,date,t,rtz): - try: - cTime = time.strptime(t + ' ' + date + ' ' + rtz, '%H%M %m/%d/%y %Z') - except ValueError: - cTime = time.strptime(t + ' ' + date + ' ' + rtz, '%H%M %m/%d/%Y %Z') - - return cTime - - def _getTime(self, date, t): - # Make a unix time integer from the given date and time strings - if t == "": - t = "0000" - else: - t = "000" + `int(t)` - t = t[-4:] - rtz = self._getActualTimeZone() - stz = time.tzname[0] - dtz = time.tzname[1] - otz = stz[0:1] - ptz = rtz[0:1] - offset = 0 - if otz == ptz: - cTime = self._parseTime (date,t,rtz) - else: - if ptz == "E": - if otz == "E": - offset = 0 - elif otz == "C": - offset = -1 - elif otz == "M": - offset = -2 - elif otz == "P": - offset = -3 - elif ptz == "C": - if otz == "E": - offset = 1 - elif otz == "C": - offset = 0 - elif otz == "M": - offset = -1 - elif otz == "P": - offset = -2 - elif ptz == "M": - if otz == "E": - offset = 2 - elif otz == "C": - offset = 1 - elif otz == "M": - offset = 0 - elif otz == "P": - offset = -1 - elif ptz == "P": - if otz == "E": - offset = 3 - elif otz == "C": - offset = 2 - elif otz == "M": - offset = 1 - elif otz == "P": - offset = 0 - if stz[1:3] == rtz[1:3]: - cTime = self._parseTime (date,t,stz) - else: - cTime = self._parseTime (date,t,dtz) - - return time.mktime(cTime) + offset*3600 - - def _getActualTimeZone(self): - # Return the correct time zone based on DST and fireTZ variable - if self._fireTZ.find('/') >= 0: - standardTimeZone, daylightTimeZone = time.tzname - elif len(self._fireTZ) == 9: - standardTimeZone = self._fireTZ[:4] - daylightTimeZone = self._fireTZ[5:] - else: - standardTimeZone = self._fireTZ[:3] - daylightTimeZone = self._fireTZ[4:] - - if self.daylight() == 1: - actualTimeZone = daylightTimeZone - else: - actualTimeZone = standardTimeZone - return actualTimeZone - - def _makeFcstTimeStatement(self, fcst, argDict): - requestWords = self._getRequestWords() - rtz = self._getActualTimeZone() - stz = time.tzname[0] - dtz = time.tzname[1] - otz = stz[0:1] - ptz = rtz[0:1] - if otz == ptz: - self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,rtz) - fcst = fcst + time.strftime( - 'Forecast is based on ' + requestWords + ' time of %H%M %Z on %B %d. ', - self._fireDateTime) - else: - offset = 0 - if ptz == "E": - if otz == "E": - offset = 0 - elif otz == "C": - offset = -1 - elif otz == "M": - offset = -2 - elif otz == "P": - offset = -3 - elif ptz == "C": - if otz == "E": - offset = 1 - elif otz == "C": - offset = 0 - elif otz == "M": - offset = -1 - elif otz == "P": - offset = -2 - elif ptz == "M": - if otz == "E": - offset = 2 - elif otz == "C": - offset = 1 - elif otz == "M": - offset = 0 - elif otz == "P": - offset = -1 - elif ptz == "P": - if otz == "E": - offset = 3 - elif otz == "C": - offset = 2 - elif otz == "M": - offset = 1 - elif otz == "P": - offset = 0 - if stz[1:3] == rtz[1:3]: - self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,stz) - tempTime = time.mktime(self._fireDateTime) + offset*3600 - self._fireDateTime = time.localtime(tempTime) - else: - self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,dtz) - tempTime = time.mktime(self._fireDateTime) + offset*3600 - self._fireDateTime = time.localtime(tempTime) - try: - fireDateTime = time.strptime( - self._fireTime + ' ' + self._fireDate, '%H%M %m/%d/%y') - except ValueError: - fireDateTime = time.strptime( - self._fireTime + ' ' + self._fireDate, '%H%M %m/%d/%Y') - fcst = fcst + time.strftime( - 'Forecast is based on ' + requestWords + ' time of %H%M ' + rtz + ' on %B %d. ', - fireDateTime) - fcst = fcst + "\n" - self._makeFireTimeRange() - return fcst - - def _makeFireTimeRange(self): - # Make a 1-hour fire time range for the fireTime - if self._withIgnitionTimes == "no": - return None - fireDateTime = time.mktime(self._fireDateTime) - self._fireTR = self.makeTimeRange(fireDateTime, fireDateTime+3600) - print "Fire Time Range:", self._fireTR - - def _checkFireTR(self, tr): - if self._fireTR is None: - return 0 - return self._fireTR.overlaps(tr) - - # This is a new method that Matt Davis wrote. Figures out whether or not - # we are using a ignition time, request time, or incident time. - def _getRequestWords(self): - if self._fireType.upper() == "WILDFIRE": - return "request" - elif self._fireType.upper() == "PRESCRIBED": - return "ignition" - else: - return "incident" - - # Import the discussion from a previously edited discussion file. - def _makeDiscussion(self, fcst, argDict): - - discussionHeader = "" - discussionHeader = ".DISCUSSION...\n" - - if self._insertDiscussionFromFile == 1: - discussion = "" - if os.path.isfile(self._discussionFile): - input = open(self._discussionFile) - text = input.readlines() - for line in text: - discussion = discussion + line - discussion = string.join(string.split(discussion,"\n\n"),"\n") - discussion = string.join(string.split(discussion,"\n\n"),"\n") - return fcst + discussionHeader + discussion + "\n" - else: - discussion = "...Put discussion text here..." - return fcst + discussionHeader + discussion + "\n\n" - elif self._insertDiscussionFromFile == 2: - version = 0 - fwfPil = self._statePil + self._fwfPil - searchString="" - product = self.getPreviousProduct(fwfPil, searchString, version=version) - product = string.split(product, "\n") - discussion = "" - disFlag = 0 - foundDiscussion = 0 - for line in product: - if string.find(line,"Discussion...") != -1: - disFlag = 1 - foundDiscussion = 1 - try: - if line[2] == "Z" and line[-1] == "-" and \ - (line[6] == "-" or line[6] == ">"): - disFlag = 0 - except IndexError: - #print "Discussion Index Error",line - a = 0 - if line[:2] == "$$": - disFlag = 0 - if disFlag: - discussion = discussion + line + "\n" - if foundDiscussion: - return fcst + discussion + "\n\n" - else: - discussion = "...Put discussion text here..." - return fcst + discussionHeader + discussion + "\n\n" - else: - return fcst + discussionHeader + "\n\n\n" - -# Create areaList based on lat/lon/size/firename. - def _determineAreaList(self, argDict): - - # Size of the fire is entered as acres. - # Convert this area into square kilometers. - # createLatLonArea only needs the length of the side of a square. - size_out = int(math.sqrt(float(self._fireSize)/247.0) + 0.5) - area = self.createLatLonArea(float(self._fireLatitude), - float(0.0 - self._fireLongitude), - size_out) - # SET UP FOR HAZARDS - # Save to server - self.saveEditAreas([area]) - # Create Hazards Table for this area - hazards = HazardsTable.HazardsTable( - argDict["ifpClient"], [[area.getId().getName()]], "FWS", - self.filterMethod, argDict["databaseID"], - self._fullStationID, - activeTableName = argDict['vtecActiveTable'], - vtecMode = argDict['vtecMode'], - dataMgr=argDict['dataMgr']) - argDict["hazards"] = hazards - # Remove from server - self.deleteEditAreas([area]) - - self._areaList = [(area, self._fireName)] - -# Set the extended configuration based on user input. - def _setExtendedConfig(self): - - # Include extended forecast if wanted and allowed. - if "Include Day 3-5 Extended?" not in self._extendedQuestions and \ - "Include Day 6-7 Extended?" not in self._extendedQuestions: - if self._individualExtended == 1: - self._individualExtended = 0 - if self._summaryExtended == 1: - self._summaryExtended = 0 - self._extendedLabel = 0 - else: - if self._individualExtended == 1: - self._individualExtended = 1 - if self._summaryExtended == 1: - self._summaryExtended = 1 - self._extendedLabel = 1 - - if "Include Day 3-5 Extended?" in self._extendedQuestions: - self._includeExtendedShortTerm = 1 - else: - self._includeExtendedShortTerm = 0 - - if "Include Day 6-7 Extended?" in self._extendedQuestions: - self._includeExtended = 1 - else: - self._includeExtended = 0 - - # Add the "if conditions become unrep..." statement. - def _makeUnrepresentStatement(self, fcst, argDict): - - if self._insertUnrepresentStatement == 1: - return fcst + self._unrepresentStatement + "\n\n" - else: - return fcst - - # Place the headlines above the discussion. - def _makeHeadline(self, fcst, editArea, areaLabel, argDict): - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - #hlList = tree.stats.get("Hazards", self._timeRange, areaLabel) - #print "hlList = ",hlList - headlines = self.generateProduct("Hazards", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - return fcst - - # From FWF. Modified to eliminate the UGC header not needed in the FWS. - # Since Headlines are placed above the discussion...that eliminated too. - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst - - # From FWF. Modified to eliminate everything. - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - if self._individualExtended == 1: - fcst = fcst + "\n" - return fcst - - # Deal with the summary extended more cleanly. - def _makeSummaryExtended(self, fcst, argDict): - - # Add one extended - if self._summaryExtended == 1: - extended = self.generateProduct("ExtendedNarrative", - argDict, area=self._summaryArea, - timeRange=self._extendedRange) - fcst = fcst + extended - fcst = fcst + "\n" - return fcst - -# From FWS_Overrides. changes commented on the right margin. (just two lines) - - def _generateOutlookLabels(self, argDict): - - today = argDict["creationTime"] - if self._productIssuance in ["Morning", "Morning Update", "Afternoon Update", "Next Day"]: - day8 = today + 7*24*3600 - day14 = today + 13*24*3600 - dow = time.gmtime(today)[6] - if dow == 0 or dow == 2 or dow == 4: - self._insertOutlookFlag = 1 - else: - self._insertOutlookFlag = 0 - self._insertOutlookFlag = 1 - else: - currentHour = time.gmtime(today)[3] - if currentHour < 16: - day8 = today + 7*24*3600 - day14 = today + 13*24*3600 - dow = time.gmtime(today)[6] - if dow == 0 or dow == 2 or dow == 4: - self._insertOutlookFlag = 1 - else: - self._insertOutlookFlag = 0 - self._insertOutlookFlag = 1 - else: - day8 = today + 8*24*3600 - day14 = today + 14*24*3600 - dow = time.gmtime(today + 24*3600)[6] - if dow == 1 or dow == 3 or dow == 6: - self._insertOutlookFlag = 1 - else: - self._insertOutlookFlag = 0 - self._insertOutlookFlag = 1 - - self._outlookDay8Label = time.strftime("%A %B %d",time.gmtime(day8)).upper() - self._outlookDay14Label = time.strftime("%A %B %d",time.gmtime(day14)).upper() - - return None - - # Import the 8 to 14 day outlook into the product - # if the user requests it for the spot forecast. - def _make8to14DayOutlook(self, fcst, argDict): - - if "Include Day 8-14 Outlook?" not in self._extendedQuestions: - return fcst - - outlookHeader = ".OUTLOOK FOR " + self._outlookDay8Label + " THROUGH " \ - + self._outlookDay14Label + "...\n" - outlookHeader = string.upper(outlookHeader) - - if self._insertOutlookFromFile == 1: - outlook = "" - if os.path.isfile(self._outlookFile): - input = open(self._outlookFile) - text = input.readlines() - for line in text: - outlook = outlook + line - outlook = string.join(string.split(outlook,"\n\n"),"\n") - outlook = string.join(string.split(outlook,"\n\n"),"\n") - return fcst + outlookHeader + outlook + "\n" - else: - outlook = "...Put 8 to 14 day outlook text here..." - return fcst + outlookHeader + outlook + "\n\n" - elif self._insertDiscussionFromFile == 2: - version = 0 - fwfPil = self._statePil + self._fwfPil - searchString="" - product = self.getPreviousProduct(fwfPil, searchString, version=version) - product = string.split(product, "\n") - outlook = "" - outFlag = 0 - foundOutlook = 0 - for line in product: - if line[:2] == "$$": - outFlag = 0 - if outFlag: - outlook = outlook + line + "\n" - if string.find(line,".OUTLOOK") != -1: - outFlag = 1 - foundOutlook = 1 - if foundOutlook: - return fcst + outlookHeader + outlook + "\n\n" - else: - outlook = "...Put 8 to 14 day outlook text here..." - return fcst + outlookHeader + outlook + "\n\n" - else: - return fcst + outlookHeader + "\n\n\n" - - # From FWF. Modified to append the fire name and agency name to the - # product name. Modified to eliminate the discussion from method. - # Modified to include Matt Davis' enhancement (unlisted agency) - def _preProcessProduct(self, fcst, argDict): - - if self._requestingAgency == "Unlisted": - newFireName = self._fireName + "..." + self._otherAgencyName - else: - newFireName = self._fireName + "..." + self._requestingAgency - productLabel = self._productName + " for " + newFireName - - productLabel = self.checkTestMode(argDict, productLabel) - - issuedByString = self.getIssuedByString() - - # Product header - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productLabel + \ - "\nNational Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # Add time disclaimer - self._fireTR = None - if self._withIgnitionTimes == "yes" or self._tableStartTimeMode == "ignitionTime": - fcst = self._makeFcstTimeStatement(fcst, argDict) - try: - timeTup = time.strptime(self._timeLabel, '%I%M %p %Z %a %b %d %Y') - issueTime = time.mktime(timeTup) - except: - issueTime = time.time() - now = time.time() - if ((issueTime - now) < -24*3600) or ((issueTime - now) > 9*24*3600): - message = \ -'''|* The start time for this product is %s. -This is either more than a day in the past or more than 9 days -in the future. *|''' % self._timeLabel - fcst = '%s\n%s\n\n' % (fcst, message) - return fcst - - def _postProcessProduct(self, fcst, argDict): - fcst = string.join(string.split(fcst, "\n\n\n"), "\n") - forecasterString = string.join(self._forecaster,"/") - if self._webSiteTag == "": - tagLineString = "" - else: - tagLineString = ".TAG " + self._webSiteTag + "/" + self._wfoID + "\n" - fcst = fcst + "$$\nForecaster..." + forecasterString + "\n" + \ - "Requested by..." + self._agencyContact + "\n" + \ - "Type of request..." + self._fireType + "\n" + tagLineString - #self.storeAWIPS(fcst, self._awipsProductID) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - -############################################################################# -# Weather Element Sampling and Phrase Configuration # -############################################################################# - - def _issuance_list(self, argDict): - narrativeDef = [] - if self._tabularAllPeriods == "yes": - phantom = "Phantom" - else: - # If we are generating a 12-hour table - # in the first period, need to have an empty - # narrative so that the sampling will get done. - phantom = "EmptyFirePeriod" - if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: - # Add the first period - if len(self._todayElements) == 0: - period = (phantom, "period1") - else: - period = ("FirePeriod1", "period1") - narrativeDef.append(period) - - if len(self._tonightElements) == 0: - period = (phantom, 12) - else: - period = ("FirePeriod2", 12) - narrativeDef.append(period) - - # Add the third period - if len(self._tomorrowElements) == 0: - period = (phantom, 12) - else: - period = ("FirePeriod3", 12) - narrativeDef.append(period) - else: - # Add the first period. - if len(self._tonightElements) == 0: - period = (phantom, "period1") - else: - period = ("FirePeriod2", "period1") - narrativeDef.append(period) - - # Add the second period - if len(self._tomorrowElements) == 0: - period = (phantom, 12) - else: - period = ("FirePeriod3", 12) - narrativeDef.append(period) - if self._productIssuance in ["Afternoon with 4 periods", - "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - # Add the third period - if len(self._tomorrowNightElements) == 0: - period = (phantom, 12) - else: - period = ("FirePeriod4", 12) - narrativeDef.append(period) - - # Add the fourth period - if len(self._nextDayElements) == 0: - period = (phantom, 12) - else: - period = ("FirePeriod5", 12) - narrativeDef.append(period) - - # Add extended if configured to appear - if "Include Day 3-5 Extended?" in self._extendedQuestions: - if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: - extendedShortTerm = [ - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ] - elif self._productIssuance in ["Afternoon with 4 periods", - "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - extendedShortTerm = [ - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ] - else: - extendedShortTerm = [ - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ] - else: - if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: - extendedShortTerm = [ - ("Phantom", 24), - ("Phantom", 24), - ("Phantom", 24), - ] - elif self._productIssuance in ["Afternoon with 4 periods", - "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - extendedShortTerm = [ - ("Phantom", 24), - ("Phantom", 24), - ("Phantom", 24), - ] - else: - extendedShortTerm = [ - ("Phantom", 24), - ("Phantom", 24), - ("Phantom", 24), - ("Phantom", 24), - ] - if "Include Day 6-7 Extended?" in self._extendedQuestions: - extended = [ - ("FireExtended", 24), - ("FireExtended", 24), - ] - else: - extended = [] - - # Combine sections - try: - if self._individualExtended == 1: - if self._extendedLabel == 1: - narrativeDef.append(("ExtendedLabel",0)) - if self._includeExtendedShortTerm or self._includeExtended: - narrativeDef = narrativeDef + extendedShortTerm - if self._includeExtended: - narrativeDef = narrativeDef + extended - except: - pass - return [ - ("Next Day", 24 + self.DAY(), 24 + self.NIGHT(), 24 + self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDef), - ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDef), - ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDef), - ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, narrativeDef), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDef), - ("Afternoon with 4 periods", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDef), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - ".REST OF TONIGHT...", "late in the night","early in the evening", - 1, narrativeDef), - ("Evening Update with 4 periods", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - ".REST OF TONIGHT...", "late in the night","early in the evening", - 1, narrativeDef), - # For the early morning update, this produces: - # Rest of Tonight: - # Monday - # Monday Night - ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, narrativeDef), - ("Early Morning Update with 4 periods", "issuanceHour", self.DAY(), self.DAY(), - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, narrativeDef), - ] - - def FirePeriod1(self): - phraseList = self.getFirePeriod_phraseList(self._todayElements) - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(1) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def FirePeriod2(self): - phraseList = self.getFirePeriod_phraseList(self._tonightElements) - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(2) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def FirePeriod3(self): - phraseList = self.getFirePeriod_phraseList(self._tomorrowElements) - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(3) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def FirePeriod4(self): - phraseList = self.getFirePeriod_phraseList(self._tomorrowNightElements) - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(4) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def FirePeriod5(self): - phraseList = self.getFirePeriod_phraseList(self._nextDayElements) - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(5) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def EmptyFirePeriod(self): - phraseList = [] - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(1) - return { - "type": "component", - "methodList": [ - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def PreFirePeriod1(self): - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas(1) - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": analysisList, - "phraseList": [], - "intersectAreas": intersectAreas, - } - - def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): - # Make a label given the timeRange in GMT and the shift to - # convert it to local time. currentLocalTime can be used to - # compare to current day. - if timeRange.duration() <= 3600: - return "" - - curLocal, shift = self.determineTimeShift() - if index == 0 and self._equalDates(currentLocalTime, curLocal): - try: - label = issuanceInfo.period1Label() - if label != "": - return label + "\n" - except: - pass - try: - today = issuanceInfo.todayFlag() - except: - today = 1 - try: - useHolidays = self._useHolidays - except: - useHolidays = 1 - nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) - splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) - label = self.getWeekday(timeRange, holidays=useHolidays, shiftToLocal=1, - labelType="CapitalWithPeriod", today=today, - tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, - splitDay24HourLabel=splitDay24HourLabel) - return label + "\n" - - def _equalDates(self, t1, t2): - # If AbsTimes t1 and t2 represent the same day, month, year - # return 1 else 0 - d1 = t1.day - d2 = t2.day - m1 = t1.month - m2 = t2.month - y1 = t1.year - y2 = t2.year - if d1==d2 and m1==m2 and y1==y2: - return 1 - else: - return 0 - - def increment_nlValue_dict(self, tree, node): - # Increment for rounding values - # Units depend on the product - dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) - dict["Wind"] = 1 - dict["Wind20ft"] = 1 - dict["TransWind"] = 1 - dict["CWR"] = 1 - dict["QPF"] = .0001 - dict["Vsby"] = .01 - return dict - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than this value, - # the different values will be noted in the phrase. - dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) - dict["Vsby"] = { - (0.00,1.00) : 0.25, - (1.00,3.00) : 0.50, - (3.00,5.00) : 1.00, - "default" : 2.00, - } - dict["PredHgt"] = { - (0,10) : 1, - (10,30) : 5, - (30,100) : 10, - "default" : 25, - } - dict["Td"] = 5 - dict["PoP"] = 10 - return dict - - def getFirePeriod_phraseList(self, periodElements): - phraseList = [] - if self._forecastType in ["Tabular/Narrative"]: - # Figure out which narrative phrases should be included - narratives = [] - tableElements = [] - for rowElement, narrativeToo, tableRows in self._rowList(): - tableElements.append(rowElement) - if narrativeToo: - narratives.append(rowElement) - if self._forecastType in ["Narrative Only", "Tabular/Narrative"]: - for elementId in periodElements: - for element, default, phrases, searchStrings in self._weInfoList(): - if elementId == element: - if self._forecastType == "Tabular/Narrative": - if elementId in tableElements and elementId not in narratives: - break - if type(phrases) is not types.ListType: - phrases = [phrases] - phraseList += phrases - if self._forecastType in ["Tabular/Narrative", "Tabular Only"]: - phraseList.append(self._fwsTable_phrase) - return phraseList - - # From FWS_Overrides. Added one hourly sampling for T and RH. - # This sampling is used for the ignition time forecasts. - def getFirePeriod_analysisList(self): - if self._forecastType in ["Tabular/Narrative", "Tabular Only"] or \ - self._withIgnitionTimes == "yes": - analysisList = [ - ("Sky", self.median, [1]), - ("Wx", self.rankedWx, [1]), - ("PoP", self.stdDevMaxAvg, [1]), - ("PoP", self.binnedPercent, [1]), - ("LAL", self.maximum, [1]), - ("LAL", self.binnedPercent, [1]), - ("MaxT", self.moderatedMinMax), - ("MinT", self.moderatedMinMax), - ("MaxRH", self.moderatedMinMax), - ("MinRH", self.moderatedMinMax), - ("RH", self.avg, [1]), - ("RH", self.moderatedMinMax), - ("MaxT", self.avg), # for trends - ("MinT", self.avg), # for trends - ("MaxRH", self.avg), # for trends - ("MinRH", self.avg), # for trends - ("RH", self.avg), # for trends - ("T", self.avg, [1]), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Td", self.avg, [1]), - ("Td", self.hourlyTemp), - ("Td", self.minMax), - ("Wind", self.vectorMinMax, [1]), - ("WindGust", self.maximum, [1]), - ("Wind20ft", self.vectorMinMax, [1]), - ("Haines", self.maximum, [1]), - ("TransWind", self.vectorAvg, [1]), - ("FreeWind", self.vectorAvg, [1]), - ("MixHgt", self.moderatedMin, [1]), - ("VentRate", self.minMax, [1]), - ("DSI", self.maximum,[1]), - ("LDSI", self.maximum,[1]), - ("LVORI", self.maximum,[1]), - ("ADI", self.maximum,[1]), - ("CWR", self.maximum, [1]), - ("Stability", self.maximum, [1]), - ("MarineLayer", self.maximum, [1]), - ("Swell", self.vectorMinMax, [1]), - ("Period", self.maximum, [1]), - ("WindWaveHgt", self.maximum, [1]), - ("WaveHeight", self.maximum, [1]), - ("QPF", self.accumSum, [6]), - ("SnowAmt", self.accumSum, [6]), - ("FzLevel", self.median, [1]), - ("Hazards", self.dominantDiscreteValue, [1]), - ("Vsby", self.minimum, [1]), - ("PredHgt", self.minimum, [1]), - ("HeatIndex", self.maximum, [1]), - ("ApparentT", self.maximum, [1]), - ] - else: - analysisList = [ - ("Sky", self.median, [6]), - ("PoP", self.stdDevMaxAvg, [6]), - ("PoP", self.binnedPercent, [6]), - ("Wx", self.rankedWx, [6]), - ("LAL", self.maximum, [12]), - ("LAL", self.binnedPercent, [0]), - ("MaxT", self.moderatedMinMax), - ("MinT", self.moderatedMinMax), - ("MaxRH", self.moderatedMinMax), - ("MinRH", self.moderatedMinMax), - ("RH", self.avg, [1]), - ("RH", self.moderatedMinMax), - ("MaxT", self.avg), # for trends - ("MinT", self.avg), # for trends - ("MaxRH", self.avg), # for trends - ("MinRH", self.avg), # for trends - ("RH", self.avg), # for trends - ("T", self.avg, [1]), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Td", self.avg, [1]), - ("Td", self.hourlyTemp), - ("Td", self.minMax), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wind20ft", self.vectorMinMax, [6]), - ("Haines", self.maximum), - ("TransWind", self.vectorAvg, [12]), - ("FreeWind", self.vectorAvg, [12]), - ("MixHgt", self.moderatedMin, [1]), - ("VentRate", self.minMax), - ("CWR", self.maximum), - ("DSI", self.maximum,[12]), - ("LDSI", self.maximum,[12]), - ("LVORI", self.maximum,[12]), - ("ADI", self.maximum,[12]), - ("Stability", self.maximum), - ("MarineLayer", self.maximum), - ("Swell", self.vectorMinMax, [6]), - ("Period", self.maximum, [6]), - ("WindWaveHgt", self.maximum, [6]), - ("WaveHeight", self.maximum, [6]), - ("QPF", self.accumMinMax, [6]), - ("SnowAmt", self.accumMinMax, [6]), - ("FzLevel", self.median, [6]), - ("Hazards", self.dominantDiscreteValue), - ("Vsby", self.minimum, [6]), - ("PredHgt", self.minimum, [6]), - ("HeatIndex", self.maximum, [6]), - ("ApparentT", self.maximum, [6]), - ] - return analysisList - - def getFirePeriod_intersectAreas(self, periodNum): - return [] - - # From ConfigVariables. - - def phrase_descriptor_dict(self, tree, node): - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - if self._wind20ftHeader: - dict["WIND.(20 FT)........"]="Wind (20 ft)........" - dict["20-foot winds......."]=" Slope/valley......." - dict["Free winds.........."]=" Ridgetop..........." - dict["Surrounding ridge..."]=" Surrounding ridge.." - else: - dict["20-foot winds......."]="Wind (20 ft)........" - dict["Free winds.........."]="Ridgetop wind......." - dict["Surrounding ridge..."]="Surrounding ridge..." - dict["Eye level winds....."]="Eye level winds....." - dict["Surface winds......."]="Surface winds......." - dict["Wind shift.........."]="Wind shift.........." - if self._transportWindLabel == "mix": - dict["Transport winds....."]="Mixing winds........" - else: - dict["Transport winds....."]="Transport winds....." - dict["CWR................."]="CWR................." - dict["DSI................."]="Dispersion.........." - dict["LDSI................"]="Dispersion index...." - dict["LVORI..............."]="LVORI..............." - dict["ADI................."]="ADI................." - dict["POP................."]="Chance of pcpn......" - dict["Dewpoint............"]="Dewpoint............" - dict["Begin/end of pcpn..."]="Begin/end of pcpn..." - dict["Stability class....."]="Stability class....." - dict["Wind wave..........."]="Wind wave..........." - dict["Rainfall amount....."]="Rainfall amount....." - dict["Snowfall amount....."]="Snowfall amount....." - dict["Swell period........"]="Swell period........" - dict["Swell height........"]="Swell height........" - dict["Freezing level......"]="Freezing level......" - dict["Ceiling............."]="Ceiling............." - dict["Visibility.........."]="Visibility.........." - dict["Icing..............."]="Icing..............." - dict["Heat index.........."]="Heat index.........." - dict["erraticWind"]="gusty and erratic winds expected near thunderstorms" - if self._withIgnitionTimes == "yes": - dict["MinT_FireWx"]="Temperature........." - dict["MaxT_FireWx"]="Temperature........." - dict["MinRH_FireWx"]="RH.................." - dict["MaxRH_FireWx"]="RH.................." - return dict - - # From FirePhrases. Changed to eliminate the area test. Thus, - # this label will appear even though there is no ridgetop wind. - def fireWind_label_setUp(self, tree, node): - self.setWords(node, "") - node.set("descriptor", "") - node.set("indentLabel", "WIND.(20 FT)........") - return self.DONE() - - # The methods below this line override baseline - # methods to accomodate ignition times. - # They were derived by Tracy Hansen from code originally - # from Matt Davis and renamed by Virgil Middendorf. - # The last two methods were created by Matt Davis to - # check and make time ranges for the ignition time forecasts. - - def fire_dayOrNight_words(self, tree, node): - # Temp or RH elements - elementName = node.getAncestor("elementName") - statDict = node.getStatDict() - if elementName == "MaxT" or elementName == "MinT": - stats = self.getTempStats(tree, node) - if stats is None: - return self.setWords(node.parent, "MISSING") - connector = self.value_connector(tree, node, elementName, elementName) - igWords = `int(self.getValue(stats, "avg"))` - words = self.getTempRangePhrase(tree, node, stats, elementName) - else: # MinRH, MaxRH or RH - stats = self.getStats(statDict, elementName) - if stats is None: - return self.setWords(node.parent, "MISSING") - connector = self.value_connector(tree, node, elementName, elementName) - igWords = `int(self.getValue(stats, "avg"))` - min, max = self.getValue(stats, "MinMax") - if min > 100: - min = 100 - if max > 100: - max = 100 - if min == max: - words = `int(min)` - else: - words = `int(min)` + connector + `int(max)` - outUnits = self.element_outUnits(tree, node, elementName, elementName) - units = self.units_descriptor(tree, node,"units", outUnits) - words = words + units - igWords = igWords + units - - # Add ignition element if applicable - if self._withIgnitionTimes == "yes": - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - tempElement = "Max" - rhElement = "Min" - else: - tempElement = "Min" - rhElement = "Max" - if elementName == "MaxT" or elementName == "MinT": - ignitionElement = "T" - elementType = tempElement - else: - ignitionElement = "RH" - elementType = rhElement - if self._checkFireTR(node.getTimeRange()): - ignitionStats = tree.stats.get( - ignitionElement, self._fireTR, node.getAreaLabel(), mergeMethod="Max") - if ignitionStats is not None: - ignitionPhrase = `int(self.getValue(ignitionStats))` - reqType = self._getRequestWords() - words = ignitionPhrase + units + " at " + reqType + "..." + elementType + " " + igWords - else: - words = elementType + " " + igWords - else: - words = elementType + " " + igWords - return self.setWords(node, words) - - def fireWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - #self.wind_phrase, - self.wind_withGusts_phrase, - self.erraticWind_phrase - ], - "phraseMethods": [ - self.assembleSentences, - self.fireWind_finishUp - ], - } - - def fireWind_finishUp(self, tree, node): - "Create a phrase for Winds" - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 1: - return self.setWords(node, "") - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - - # Add ignitionTime if appropriate - igWords = "" - if self._checkFireTR(node.getTimeRange()): - ignitionWindStats = tree.stats.get( - "Wind", self._fireTR, node.getAreaLabel(), mergeMethod="Max") - if ignitionWindStats is not None: - igMagStr = `int(ignitionWindStats[0])` - igDirStr = self.vector_dir(int(ignitionWindStats[1])) - reqType = self._getRequestWords() - igWords = "Winds " + igDirStr + " at " + igMagStr + " mph at " + reqType + ", otherwise " - - words = igWords + words - node.set("descriptor", "") - node.set("indentLabel", "20-foot winds.......") - node.set("compound", 1) - return self.setWords(node, words) - - def fireSfcWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - self.wind_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.fireSfcWind_finishUp - ], - } - - def fireSfcWind_finishUp(self, tree, node): - "Create a phrase for Winds" - # Empty phrase if doing ridge/valley winds - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 1: - return self.setWords(node, "") - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", "Surface winds.......") - node.set("compound", 1) - return self.setWords(node, words) - - def erraticWind_phrase(self): - return { - "setUpMethod": self.erraticWind_setUp, - "wordMethod": self.erraticWind_words, - "phraseMethods": [ - self.preProcessWx, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - - def erraticWind_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) - # Set this flag used by the "checkWeatherSimilarity" method - node.set("noIntensityCombining", 1) - self.determineSevereTimeDescriptors(tree, node) - return self.DONE() - - def erraticWind_words(self, tree, node): - # If T is in the Wx grids, then produce phrase. - # Wx Statistics: rankedWx - - statDict = node.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - # Check against PoP - #rankList = self.checkPoP(tree, node, rankList) - subkeyList = self.getSubkeys(rankList) - - severe = 0 - thunder = 0 - attrTextList = [] - for subkey in subkeyList: - wxType = subkey.wxType() - if wxType == "T": - thunder = 1 - intensity = subkey.intensity() - if intensity == "+": - severe = 1 - wxDef = subkey.wxDef() - for attr in subkey.attributes(): - if attr in ["Primary", "Mention", "Dry"]: - continue - attrText = wxDef.attributeDesc(subkey.wxType(), attr).lower() - if attrText not in attrTextList: - attrTextList.append(attrText) - - if thunder == 0: - return self.setWords(node, "") - words = self.phrase_descriptor(tree, node, "erraticWind", "Wx") - - return self.setWords(node, words) - - def smokeDispersal_words(self, tree, node): - "Create phrase for Smoke Dispersal" - statDict = node.getStatDict() - stats = self.getStats(statDict, "VentRate") - if stats is None: - return self.setWords(node.parent, "MISSING") - - if self._checkFireTR(node.getTimeRange()): - # Handle phrase if including ignition time - minVal, maxVal = self.getValue(stats, "MinMax") - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - vr = int(maxVal) - ventType = "Max" - mergeMethod = "Max" - else: - vr = int(minVal) - ventType = "Min" - mergeMethod = "Min" - vrCat = self.smokeDispersal_valueStr(vr) - words = ventType + "..." + vrCat + " " + " /" + `vr` + " knot-ft/" - reqType = self._getRequestWords() - ignitionDispersal = tree.stats.get( - "VentRate", self._fireTR, node.getAreaLabel(), mergeMethod=mergeMethod) - vrCat = self.smokeDispersal_valueStr(ignitionDispersal) - igWords = vrCat + " /" + `int(ignitionDispersal)` + " knot-ft/ at " + reqType + ". \n" - words = igWords + " " + words - else: - # Handle phrase with range if not including ignition time - vr1, vr2 = self.getValue(stats, "MinMax") - vr1 = int(vr1) - vr2 = int(vr2) - vrCat1 = self.smokeDispersal_valueStr(vr1) - vrCat2 = self.smokeDispersal_valueStr(vr2) - # Single Value input - if vr1 == vr2: - words = vrCat1 + " (" + `vr1` + " knot-ft)" - # Range - else: - words = vrCat1 + " to " + vrCat2 + " (" + `vr1` + "-" + \ - `vr2` + " knot-ft)" - return self.setWords(node, words) - - # SMOKE DISPERSAL CATEGORIES - def smokeDispersal_valueStr(self, value): - "Convert smoke dispersal value to corresponding category" - - if value < 13000 : - return "poor" - - if value >= 13000 and value < 30000: - return "fair" - - if value >= 30000 and value < 60000 : - return "good" - - if value >= 60000 : - return "excellent" - - ### MixHgt - def mixingHgt_words(self, tree, node): - "Create phrase for Mixing Height" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "MixHgt") - if stats is None: - return self.setWords(node.parent, "MISSING") - - mix1, mix2 = self.getValue(stats, "MinMax") - outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") - mix1 = int(mix1) - mix2 = int(mix2) - threshold = self.nlValue(self.null_nlValue( - tree, node, "MixHgt", "MixHgt"), max) - if int(mix1) < threshold and int(mix2) < threshold: - return self.setWords(node, "null") - - # Single Value input - if mix1 == mix2: - words = `mix1` + " " + outUnits + " AGL" - # Range - else: - words = `mix1`+ "-" + `mix2` + " " + outUnits + " AGL" - - # Handle ignition time - if self._checkFireTR(node.getTimeRange()): - reqType = self._getRequestWords() - ignitionMixStats = tree.stats.get( - "MixHgt", self._fireTR, node.getAreaLabel(), mergeMethod="Max") - igWords = `int(ignitionMixStats)` + " " + outUnits + " AGL at " + reqType +", otherwise " - words = igWords + words - - return self.setWords(node, words) - - def adi_phrase(self): - return { - "setUpMethod": self.adi_setUp, - "wordMethod": self.adi_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def adi_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("ADI", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "ADI.................") - return self.DONE() - - def adi_words(self, tree, node): - statDict = node.getStatDict() - adi = self.getStats(statDict, "ADI") - if adi is None: - return self.setWords(node.parent, "MISSING") - adi = self.getValue(adi) - words = `int(adi + 0.5)` - return self.setWords(node, words) - - def haines_words(self, tree, node): - "Create phrase for Haines Index" - statDict = node.getStatDict() - stats = self.getStats(statDict, "Haines") - if stats is None: - return self.setWords(node.parent, "MISSING") - - # Handle ignition time - ignitionFlag = 0 - if self._checkFireTR(node.getTimeRange()): - haines1 = int(self.getValue(stats, "Max")) - ignitionStats = tree.stats.get("Haines", self._fireTR, node.getAreaLabel(), - mergeMethod="Max") - if ignitionStats is not None: - ignitionPhrase = `int(self.getValue(ignitionStats))` - #print "Haines ignitionStats", ignitionStats - reqType = self._getRequestWords() - hainesDict = self.hainesDict() - words = ignitionPhrase + " " + hainesDict[int(ignitionPhrase)] + \ - " at " + reqType + "...max " + `haines1` - ignitionFlag = 1 - if not ignitionFlag: - haines1, haines2 = self.getValue(stats, "MinMax") - hainesDict = self.hainesDict() - haines1 = int(haines1) - haines2 = int(haines2) - words1 = hainesDict[haines1] - words2 = hainesDict[haines2] - - # Single Value input - if haines1 == haines2: - words = `haines1` + " " + words1 - # Range - else: - if words1 == words2: - words = words1 - else: - words = words1 + " to " + words2 - words = `haines1` + " to " + `haines2` + " OR " + words - return self.setWords(node, words) - - def cwr_words(self, tree, node): - # Handle ignition time - if self._checkFireTR(node.getTimeRange()): - cwr = tree.stats.get(self._cwrParm, self._fireTR, node.getAreaLabel(), mergeMethod="Max") - else: - cwr = tree.stats.get(self._cwrParm, node.getTimeRange(), node.getAreaLabel(), mergeMethod="Max") - if cwr is None: - return self.setWords(node.parent, "MISSING") - cwr = self.getValue(cwr) - threshold = self.nlValue(self.null_nlValue(tree, node, "CWR", "CWR"), cwr) - if int(cwr) < threshold: - return self.setWords(node, "null") - else: - words = `int(cwr)` + " percent" - return self.setWords(node, words) - - def windWave_phrase(self): - return { - "setUpMethod": self.windWave_setUp, - "wordMethod": self.windWave_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def windWave_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("WindWaveHgt", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Wind wave...........") - return self.DONE() - - def windWave_words(self, tree, node): - "Create phrase Wind Wave" - statDict = node.getStatDict() - height = self.getValue(self.getStats(statDict, "WindWaveHgt"), "Max") - if height is None: - return self.setWords(node.parent, "MISSING") - words = `int(height + 0.5)` + " FEET" - return self.setWords(node, words) - - def waveHeight_phrase(self): - return { - "setUpMethod": self.waveHeight_setUp, - "wordMethod": self.waveHeight_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def waveHeight_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("WaveHeight", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "WAVE HEIGHT.........") - return self.DONE() - - def waveHeight_words(self, tree, node): - "Create phrase Wind Wave" - statDict = node.getStatDict() - height = self.getValue(self.getStats(statDict, "WaveHeight"), "Max") - if height is None: - return self.setWords(node.parent, "MISSING") - words = `int(height + 0.5)` + " FEET" - return self.setWords(node, words) - - def qpf_phrase(self): - return { - "setUpMethod": self.qpf_setUp, - "wordMethod": self.qpf_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def qpf_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("QPF", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Rainfall amount.....") - return self.DONE() - - def qpf_words(self, tree, node): - "Create phrase QPF" - statDict = node.getStatDict() - qpf = self.getValue(self.getStats(statDict, "QPF"), "Max") - if qpf is None: - return self.setWords(node.parent, "MISSING") - if qpf == 0.0: - qpfWords = "0.00" - else: - qpf = qpf + 0.005 - qpfWords = string.strip("%5.2f" % qpf) - words = qpfWords + " INCHES" - return self.setWords(node, words) - - def period_phrase(self): - return { - "setUpMethod": self.period_setUp, - "wordMethod": self.period_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def period_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Period", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Swell period........") - return self.DONE() - - def period_words(self, tree, node): - "Create phrase Swell Period" - statDict = node.getStatDict() - period = self.getValue(self.getStats(statDict, "Period"), "Max") - if period is None: - return self.setWords(node.parent, "MISSING") - words = `int(period + 0.5)` + " SECONDS" - return self.setWords(node, words) - - def swell_phrase(self): - return { - "setUpMethod": self.swell_setUp, - "wordMethod": self.swell_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def swell_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Swell", self.VECTOR())] - self.subPhraseSetUp(tree, node, elementInfoList, self.vectorConnector) - node.set("descriptor", "") - node.set("indentLabel", "Swell height........") - return self.DONE() - - def swell_words(self, tree, node): - "Create phrase Swell Height" - statDict = node.getStatDict() - stats = self.getStats(statDict, "Swell") - if stats is None: - return self.setWords(node, "") - height, dir = self.getValue(stats, "Max", self.VECTOR()) - if height is None: - return self.setWords(node.parent, "MISSING") - if dir >= 22.5 and dir < 67.5: - dirWords = "northeast" - elif dir >= 67.5 and dir < 112.5: - dirWords = "east" - elif dir >= 112.5 and dir < 157.5: - dirWords = "southeast" - elif dir >= 157.5 and dir < 202.5: - dirWords = "south" - elif dir >= 202.5 and dir < 247.5: - dirWords = "southwest" - elif dir >= 247.5 and dir < 292.5: - dirWords = "west" - elif dir >= 292.5 and dir < 337.5: - dirWords = "northwest" - else: - dirWords = "north" - heightWords = `int(height + 0.5)` - words = dirWords + " swell " + heightWords + " feet" - return self.setWords(node, words) - - def ceiling_phrase(self): - return { - "setUpMethod": self.ceiling_setUp, - "wordMethod": self.ceiling_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def ceiling_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("PredHgt", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Ceiling (kft).......") - return self.DONE() - - def ceiling_words(self, tree, node): - "Create phrase Visibility" - statDict = node.getStatDict() - hgt = self.getValue(self.getStats(statDict, "PredHgt"), "Min") - if hgt is None: - return self.setWords(node.parent, "MISSING") - hgt = hgt / 10.0 - if hgt == 0.0: - hgtWords = "less than 0.1" - else: - if hgt < 10: - hgtWords = string.strip("%5.1f" % hgt) - else: - hgtWords = `int(hgt + 0.5)` - words = hgtWords - return self.setWords(node, words) - - def visibility_phrase(self): - return { - "setUpMethod": self.visibility_setUp, - "wordMethod": self.visibility_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def visibility_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Vsby", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Visibility (sm).....") - return self.DONE() - - def visibility_words(self, tree, node): - "Create phrase Visibility" - statDict = node.getStatDict() - vis = self.getValue(self.getStats(statDict, "Vsby"), "Min") - if vis is None: - return self.setWords(node.parent, "MISSING") - if vis == 0.0: - visWords = "0.0" - else: - if vis < 3: - visWords = string.strip("%5.2f" % vis) - else: - visWords = `int(vis + 0.5)` - words = visWords - return self.setWords(node, words) - - def icing_phrase(self): - return { - "setUpMethod": self.icing_setUp, - "phraseMethods": [self.postProcessPhrase], - } - - def icing_setUp(self, tree, node): - self.setWords(node, "") - node.set("descriptor", "") - node.set("indentLabel", "Icing...............") - return self.DONE() - - def freezingLevel_phrase(self): - return { - "setUpMethod": self.freezingLevel_setUp, - "wordMethod": self.freezingLevel_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def freezingLevel_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("FzLevel", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Freezing level......") - return self.DONE() - - def freezingLevel_words(self, tree, node): - "Create phrase for Freezing Level" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "FzLevel") - if stats is None: - return self.setWords(node.parent, "MISSING") - - mix1, mix2 = self.getValue(stats, "MinMax") - outUnits = self.element_outUnits(tree, node, "FzLevel", "FzLevel") - mix1 = int(mix1) - mix2 = int(mix2) - threshold = self.nlValue(self.null_nlValue( - tree, node, "FzLevel", "FzLevel"), max) - if int(mix1) < threshold and int(mix2) < threshold: - return self.setWords(node, "null") - - # Single Value input - if mix1 == mix2: - words = `mix1` + " " + outUnits - # Range - else: - words = `mix1`+ "-" + `mix2` + " " + outUnits - - # Handle ignition time - if self._checkFireTR(node.getTimeRange()): - reqType = self._getRequestWords() - ignitionMixStats = tree.stats.get( - "FzLevel", self._fireTR, node.getAreaLabel(), mergeMethod="Max") - igWords = `int(ignitionMixStats)` + " " + outUnits + " at " + reqType +", otherwise " - words = igWords + words - - return self.setWords(node, words) - - def snow_phrase(self): - return { - "setUpMethod": self.snow_setUp, - "wordMethod": self.snow_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def snow_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("SnowAmt", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Snowfall amount.....") - return self.DONE() - - def snow_words(self, tree, node): - "Create phrase Snow" - statDict = node.getStatDict() - snow = self.getValue(self.getStats(statDict, "SnowAmt"), "Max") - if snow is None: - return self.setWords(node.parent, "MISSING") - if snow == 0.0: - snowWords = "0.0" - else: - snow = snow + 0.05 - snowWords = string.strip("%5.1f" % snow) - words = snowWords + " INCHES" - return self.setWords(node, words) - - def heatIndex_phrase(self): - return { - "setUpMethod": self.heatIndex_setUp, - "wordMethod": self.heatIndex_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def heatIndex_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("HeatIndex", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Heat index..........") - return self.DONE() - - def heatIndex_words(self, tree, node): - "Create phrase Td" - statDict = node.getStatDict() - hi = self.getValue(self.getStats(statDict, "HeatIndex"), "Max") - if hi is None: - return self.setWords(node.parent, "MISSING") - words = `int(hi)` - return self.setWords(node, words) - - ### Methods for Spot Table ### - - def _fwsTable_phrase(self): - return { - "setUpMethod": self._fwsTable_setUp, - "wordMethod": self._fwsTable_words, - "phraseMethods": [ - self.assembleChildWords, - ], - } - - def _fwsTable_setUp(self, tree, node): - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - def _fwsTable_words(self, tree, node): - # See if we're doing a table for this time period - tableVars = self._determineTableVars(tree, node) - if tableVars is None: - return self.setWords(node, "") - timeRangeList, statList, colWidth, header, elements = tableVars - #print "header", header - #print "colWidth", colWidth - #print "timeRangeList" - #for tr, label in timeRangeList: - # print tr, label - words = header - argList = [tree, node, colWidth] - for rowElement, narrativeToo, tableRows in self._rowList(colWidth): - if rowElement not in elements: - continue - for label, method in tableRows: - # Call makeRow adding to words - words += self.makeRow( - label, colWidth, timeRangeList, statList, - method, argList, justify="l") - return self.setWords(node, words) - - def _determineTableVars(self, tree, node): - # Make timeRangeList, empty statList, colWidth, and header - - # Get table resolution (period) based on today, tonight, tomorrow - componentName = node.getComponentName() - period = None - - if self._productIssuance in ["Afternoon with 4 periods", - "Evening Update with 4 periods", - "Early Morning Update with 4 periods"]: - tablePeriodList = [ - ("FirePeriod1", self._todayTableRes, self._todayElements), - ("FirePeriod2", self._tonightTableRes, self._tonightElements), - ("FirePeriod3", self._tomorrowTableRes, self._tomorrowElements), - ("FirePeriod4", self._tomorrowNightTableRes, self._tomorrowNightElements), - ("FirePeriod5", self._nextDayTableRes, self._nextDayElements), - ] - else: - tablePeriodList = [ - ("FirePeriod1", self._todayTableRes, self._todayElements), - ("FirePeriod2", self._tonightTableRes, self._tonightElements), - ("FirePeriod3", self._tomorrowTableRes, self._tomorrowElements), - ] - - for name, variable, elements in tablePeriodList: - if componentName == name: - period = variable - tableElements = elements - if period is None or period == "None": - # No table for this component - return None - - # Determine colWidth given the period - colWidth = 4 - for hrs, colWidth in self._colWidths(): - if period == hrs: - break - - # Determine Time Ranges over which to create table - fireTimeZone = self._getActualTimeZone() - timeRange = self._determineTableTimeRange(tree, node, fireTimeZone) - timeRangeList = self.getPeriods(timeRange, period, 1, None) - - # Make header - header = "Time ("+fireTimeZone+") " - for tr, label in timeRangeList: - label = self._makeTableLabel(tree, tr, colWidth) - header += string.ljust(label, colWidth) - header += "\n" - - # Make empty statList (dummy for calling "makeRow") - statList = [] - for i in range(len(timeRangeList)): - statList.append({}) - return timeRangeList, statList, colWidth, header, tableElements - - def _colWidths(self): - # Lists table resolutions hours, corresponding column width - return [ - (1, 4), - (2, 7), - (3, 10), - (4, 13), - ] - - def _determineTableTimeRange(self, tree, node, fireTimeZone): - tr = node.getTimeRange() - # See if this is first period of product - prev = node.getComponent().getPrev() - if prev is None: - # Adjust timeRange if necessary - if self._tableStartTimeMode == "current": - currentTime = tree.get('argDict').get('creationTime') - currentTime = int(currentTime/3600.0)*3600.0 - tr = self.makeTimeRange(currentTime, tr.endTime().unixTime()) - elif self._tableStartTimeMode == "ignitionTime": - fireDateTime = time.mktime(self._fireDateTime) - fireDateTime = int(fireDateTime/3600.0)*3600.0 - fireTime = fireDateTime - (self._tableStartTimeOffset * 3600) - if fireTime >= tr.startTime().unixTime() and \ - fireTime < tr.endTime().unixTime(): - tr = self.makeTimeRange(fireTime, tr.endTime().unixTime()) - if self._tabularAllPeriods == "yes": - timeRange = tr - else: - # One 12-hour period - timeRange = self.makeTimeRange(tr.startTime(), - tr.startTime()+12*3600) - #print "Table time range", timeRange, node.getTimeRange() - return timeRange - - def _makeTableLabel(self, tree, timeRange, colWidth): - localTime, shift = self.determineTimeShift() - rtz = self._getActualTimeZone() - stz = time.tzname[0] - dtz = time.tzname[1] - otz = stz[0:1] - ptz = rtz[0:1] - if otz == ptz: - start = timeRange.startTime() + shift - else: - offset = 0 - if ptz == "E": - if otz == "E": - offset = 0 - elif otz == "C": - offset = 1 - elif otz == "M": - offset = 2 - elif otz == "P": - offset = 3 - elif ptz == "C": - if otz == "E": - offset = -1 - elif otz == "C": - offset = 0 - elif otz == "M": - offset = 1 - elif otz == "P": - offset = 2 - elif ptz == "M": - if otz == "E": - offset = -2 - elif otz == "C": - offset = -1 - elif otz == "M": - offset = 0 - elif otz == "P": - offset = 1 - elif ptz == "P": - if otz == "E": - offset = -3 - elif otz == "C": - offset = -2 - elif otz == "M": - offset = -1 - elif otz == "P": - offset = 0 - if stz[1:3] == rtz[1:3]: - start = timeRange.startTime() + shift + offset*3600 - else: - start = timeRange.startTime() + shift + offset*3600 - militaryHour = start.hour - hour, ampm = self.hourAmPm(militaryHour) - for low, hi, shortVal, longVal in self._tableLabels(): - if militaryHour >= low and militaryHour <= hi: - if colWidth > 4: - val = longVal - else: - val = shortVal - val = val.replace("hour", `hour`) - break - return val - - def _tableLabels(self): - return [ - (0,0, "Mid", "Midngt"), - (1,9, "hourAM", "hour AM"), - (10,11, "hourA", "hour AM"), - (12,12, "12P", "Noon"), - (13,21, "hourPM", "hour PM"), - (22,23, "hourP", "hour PM"), - ] - - def assembleIndentedPhrases(self, tree, component): - # Assemble and indent component phrases and add Label - # Qualify the phrases with local effect qualifiers - # if present. - # e.g. "near the coast" - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - self.consolidateLocalEffectPhrases(tree, component) - self.combineConjunctivePhrases(tree, component) - fcst = "" - lastQualifier = None - lastPhrase = None - self.orderWxPhrases(tree, component) - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - if words == "": - if self.removeEmptyPhrase(tree, phrase): - continue - - # Handle multiple element table phrase - # that appears per period - # No need to indent or qualify - name = phrase.get("name") - if name == "multipleElementTable_perPeriod_phrase": - fcst = fcst + words - continue - if name == "_fwsTable_phrase": - if words != "": - fcst = fcst + "\n" + words - continue - - if phrase.get("compound"): - makeSentence = 0 - else: - makeSentence = 1 - words, lastQualifier = self.qualifyWords( - phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase, - makeSentence=makeSentence) - lastPhrase = phrase - indentLabel = phrase.get("indentLabel") - label = self.phrase_descriptor( - tree, phrase, indentLabel, indentLabel) - #print "indentLabel, label", indentLabel, label - if indentLabel is not None and label == "": - label = indentLabel - if words == "": - words = " " - words = self.labelIndent(words, label) - fcst = fcst + words - if fcst == "": - return self.setWords(component,"") - # Add label - issuanceInfo = tree.get("issuanceInfo") - index = component.getIndex() - curLocalTime, shift = self.determineTimeShift() - creationTime = tree.get('argDict').get('creationTime') - curLocalTime = AbsTime.AbsTime(creationTime) - label = self.createLabel(tree, component, component.get("timeRange"), - issuanceInfo, curLocalTime, shift, index) - return self.setWords(component, label + "\n" + fcst + "\n") - - def _getTableStats(self, tree, element, tr, area, mergeMethod="Max", getValueMethod="Average"): - stats = tree.stats.get(element, tr, area, mergeMethod=mergeMethod) - if stats is None: - return None - return self.getValue(stats, getValueMethod) - - def _sky_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - sky = self._getTableStats(tree, "Sky", timeRange, node.getAreaLabel()) - if sky is None: - value = "M" - elif self._elementFormatDict.get("Sky", "numeric") == "numeric": - value = `int(sky + 0.5)` - else: - for threshold, shortVal, longVal in self._skyTableValues(): - if sky <= threshold: - if colWidth <= 4: - value = shortVal - else: - value = longVal - break - return value - - def _numSky_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - sky = self._getTableStats(tree, "Sky", timeRange, node.getAreaLabel()) - if sky is None: - value = "M" - else: - value = `int(sky + 0.5)` - return value - - def _skyTableValues(self): - return [ - (5, "CLR", "CLEAR"), - (25,"MCR", "MCLEAR"), - (50,"PC", "PCLDY"), - (69,"MC", "MCLDY"), - (87,"MC", "MCLDY"), - (100,"CDY", "CLOUDY"), - ] - - def _weatherType_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - areaLabel = node.getAreaLabel() - wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") - #print "wxStats = ", wxStats,tr - if wxStats is None or len(wxStats) == 0: - return "M" - # If there are 2 subkeys tied for the highest rank, - # search for a weather mix table. - # Otherwise, use the weather code table to find the weather value - coRank = None - hiRank = -1 - for subkey, rank in wxStats: - # Find top 2 ranked subkeys to look for mixtures - if rank > hiRank and subkey.wxType() != "T": - hiKey = subkey - hiRank = rank - elif rank == hiRank and subkey.wxType() != "T": - coKey = subkey - coRank = rank - if hiRank == -1: - return "" - keyAttrs = hiKey.attributes() - keyType = hiKey.wxType() - if coRank == hiRank: - keyAttrs, keyType = self._matchMix(hiKey, coKey, colWidth) - value = self._matchType(keyAttrs, keyType, colWidth) - return value - - def _matchType(self, keyAttrs, keyType, colWidth): - # Try to match the weatherCodeTable to the subkey - # If no match found, return None - value = None - for wxAttr, wxType, shortVal, longVal in self._weatherCodeTable(): - if wxAttr == "" or wxAttr in keyAttrs: - if wxType == keyType: - if colWidth == 4: - value = shortVal - else: - value = longVal - break - if value is None: - if colWidth == 4: - value = "???" - else: - value = "??????" - return value - - def _matchMix(self, hiKey, coKey, colWidth): - # Try to match the weather mix - # Return the attribute and wxType - # If not found, return the hiKey attributes and wxType - for attr1, type1, attr2, type2, keyAttr, keyType in self._weatherMixTable(): - for key1, key2 in [(hiKey, coKey), (coKey, hiKey)]: - if type1 == key1.wxType() and type2 == key2.wxType(): - if len(key1.attributes()) == 0 and \ - len(key2.attributes()) == 0 and \ - attr1 == "" and attr2 == "": - # Match found - return [keyAttr], keyType - elif len(key1.attributes()) == 0 and \ - len(key2.attributes()) != 0 and \ - attr1 == "" and attr2 in key2.attributes(): - # Match found - return [keyAttr], keyType - elif len(key1.attributes()) != 0 and \ - len(key2.attributes()) == 0 and \ - attr1 in key1.attributes() and attr2 == "": - # Match found - return [keyAttr], keyType - elif len(key1.attributes()) != 0 and \ - len(key2.attributes()) != 0 and \ - attr1 in key1.attributes() and \ - attr2 in key2.attributes(): - # Match found - return [keyAttr], keyType - - # No match found - return hiKey.attributes(), hiKey.wxType() - - def _weatherCodeTable(self): - return [ - ("", "", "", "NONE" ), - ("Dry", "T", "DYT","DRYTSM"), - ("", "T", "TSM","TSTORM"), - ("GW", "T", "TSM","TSTORM"), - ("SmA", "T", "TSM","TSTORM"), - ("", "S", "SN", "SNOW" ), - ("", "R", "RN", "RAIN" ), - ("", "SW", "SW", "SNSHWR"), - ("", "RW", "RW", "RNSHWR"), - ("", "L", "DZL","DRZL" ), - ("", "ZR", "FZR","FZRAIN"), - ("", "ZL", "FZD","FZDRZL"), - ("", "IP", "SLT","SLEET" ), - ("", "F", "FOG","FOG" ), - ("", "ZF", "FZF","FZFOG" ), - ("", "IF", "IFG","ICEFOG"), - ("", "IC", "ICR","ICECRL"), - ("", "H", "HAZ","HAZE" ), - ("", "BS", "BSN","BLSNOW"), - ("", "BN", "BSD","BLSAND"), - ("", "BD", "BDT","BLDUST"), - ("", "K", "SMK","SMOKE" ), - ("", "FR", "FST","FROST" ), - ("", "ZY", "FZS","FZSPRY"), - ("", "VA", "ASH","VOLASH"), - # Mixed Weather Types - ("", "RS", "RS", "RNSN" ), - ("", "LF", "DZF","DZL/FG"), - ("", "SF", "SNF","SN/FG "), - ("", "RF", "RNF","RN/FG "), - ("", "ZRS", "ZRS","ZRN/SN"), - # Unknown Mixed Weather Type - ("", "XX", "???","??????"), - ] - - def _weatherMixTable(self): - return [ - ("", "S", "","R", "", "RS"), - ("", "SW","","RW","", "RS"), - ("", "RW","","T", "", "T"), - ("Dry","T", "","RW","Dry","T"), - ("", "L", "","F", "", "LF"), - ("", "S", "","F", "", "SF"), - ("", "R", "","F", "", "RF"), - ("", "SW","","F", "", "SF"), - ("", "RW","","F", "", "RF"), - ("", "ZR","","S", "", "ZRS"), - ("", "ZR","","SW","", "ZRS"), - ] - - def _tstmCov_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - areaLabel = node.getAreaLabel() - wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") - if wxStats is None or len(wxStats) == 0: - return "M" - hiRank = -1 - for subkey, rank in wxStats: - print "*** vtm ***" - print subkey, rank - if rank > hiRank and subkey.wxType() == "T": - hiKey = subkey - hiRank = rank - if hiRank == -1: - return "" - value = None - for cov, shortVal, longVal in self._coverageCodeTable(): - if hiKey.coverage() == cov: - if colWidth == 4: - value = shortVal - else: - value = longVal - break - if value is None: - if colWidth == 4: - value = "???" - else: - value = "??????" - return value - - def _weatherCov_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - areaLabel = node.getAreaLabel() - wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") - if wxStats is None or len(wxStats) == 0: - return "M" - hiRank = -1 - for subkey, rank in wxStats: - if rank > hiRank and subkey.wxType() != "T": - hiKey = subkey - hiRank = rank - if hiRank == -1: - return "" - value = None - for cov, shortVal, longVal in self._coverageCodeTable(): - if hiKey.coverage() == cov: - if colWidth == 4: - value = shortVal - else: - value = longVal - break - if value is None: - if colWidth == 4: - value = "???" - else: - value = "??????" - return value - - def _coverageCodeTable(self): - return [ - ("","", ""), - ("SChc", "SCH","S CHC"), - ("Iso", "ISO","ISOLTD"), - ("Chc", "CHC","CHANCE"), - ("Sct", "SCT","SCTTRD"), - ("Lkly", "LKY","LIKELY"), - ("Num", "NUM","NUMRUS"), - ("Def", "DEF","DEFNTE"), - ("Wide", "WID","WIDSPD"), - ("Ocnl", "OCL","OCNL"), - ("Frq", "FRQ","FRQNT"), - ("Brf", "BRF","BRIEF"), - ("Pds", "PDS","PERIOD"), - ("Inter", "ITR","ITRMT"), - ("Areas", "ARS","AREAS"), - ("Patchy", "PTY","PATCHY") - ] - - def _temp_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - temp = self._getTableStats(tree, "T", timeRange, node.getAreaLabel()) - if temp is None: - return "M" - if temp >= 0: - temp = int(temp + 0.5) - else: - temp = int(temp - 0.5) - return `temp` - - def _rh_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - rh =self._getTableStats(tree, "RH", timeRange, node.getAreaLabel()) - if rh is None: - return "M" - rh = int(rh + 0.5) - return `rh` - - # Wind Methods - # Utility for Wind Methods - - # Tabular Transport Wind bug found by John DeBlock and Stephen Miller. - # tree.stats.get was using self._20ftWindParm instead of element. - def _getWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): - windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), - mergeMethod="Max") - if windStats is None: - return None - wspd,wdir = windStats - if formatElement is None: - formatElement = element - if self._elementFormatDict.get(formatElement, "alpha") == "alpha": - wdir = int(wdir + 0.5) - dirString = self._dirConvert(wdir) - else: - dir = int(wdir/10.0 + 0.5) * 10 - if dir < 10: - dirString = "00" + `dir` - elif dir < 100: - dirString = "0" + `dir` - else: - dirString = `dir` - if element == "Wind": - wspd = wspd * self._windAdjustmentFactor - if units == "Metric": - wspd = int(wspd*.44704 + 0.5) - else: - wspd = int(wspd + 0.5) - spdString = `wspd` - return dirString, spdString - - def _getWindNumDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): - windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), - mergeMethod="Max") - if windStats is None: - return None - wspd,wdir = windStats - if formatElement is None: - formatElement = element - dir = int(wdir/10.0 + 0.5) * 10 - if dir < 10: - dirString = "00" + `dir` - elif dir < 100: - dirString = "0" + `dir` - else: - dirString = `dir` - if element == "Wind": - wspd = wspd * self._windAdjustmentFactor - if units == "Metric": - wspd = int(wspd*.44704 + 0.5) - else: - wspd = int(wspd + 0.5) - spdString = `wspd` - return dirString, spdString - - def _getEyeWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): - windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), - mergeMethod="Max") - if windStats is None: - return None - wspd,wdir = windStats - if formatElement is None: - formatElement = element - if self._elementFormatDict.get(formatElement, "alpha") == "alpha": - wdir = int(wdir + 0.5) - dirString = self._dirConvert(wdir) - else: - dir = int(wdir/10.0 + 0.5) * 10 - if dir < 10: - dirString = "00" + `dir` - elif dir < 100: - dirString = "0" + `dir` - else: - dirString = `dir` - if element == "Wind": - wspd = wspd * self._eyeWindAdjustmentFactor - if units == "Metric": - wspd = int(wspd*.44704 + 0.5) - else: - wspd = int(wspd + 0.5) - spdString = `wspd` - return dirString, spdString - - def _getSfcWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): - windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), - mergeMethod="Max") - if windStats is None: - return None - wspd,wdir = windStats - if formatElement is None: - formatElement = element - if self._elementFormatDict.get(formatElement, "alpha") == "alpha": - wdir = int(wdir + 0.5) - dirString = self._dirConvert(wdir) - else: - dir = int(wdir/10.0 + 0.5) * 10 - if dir < 10: - dirString = "00" + `dir` - elif dir < 100: - dirString = "0" + `dir` - else: - dirString = `dir` - if units == "Metric": - wspd = int(wspd*.44704 + 0.5) - else: - wspd = int(wspd + 0.5) - spdString = `wspd` - return dirString, spdString - - def _dirConvert(self, wdir): - dirString = "" - if wdir >= 338 or wdir <= 22: - dirString = "N" - elif wdir >= 23 and wdir <= 67: - dirString = "NE" - elif wdir >= 68 and wdir <= 112: - dirString = "E" - elif wdir >= 113 and wdir <= 157: - dirString = "SE" - elif wdir >= 158 and wdir <= 202: - dirString = "S" - elif wdir >= 203 and wdir <= 247: - dirString = "SW" - elif wdir >= 248 and wdir <= 292: - dirString = "W" - elif wdir >= 293 and wdir <= 337: - dirString = "NW" - return dirString - - def _adjustEyeWind(self, value): - # adjustment for winds - factor = self.nlValue(self._eyeWindAdjustmentFactor, value) - value = value * factor - return value - - def _wind_value(self, statDict, timeRange, argList, element=None, formatElement=None): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - if formatElement == "TransMetWind": - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement, "Metric") - elif formatElement == "EyeWind": - windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) - elif formatElement == "SfcWind": - windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) - else: - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - return "M" - dirString, spdString = windString - if self._elementFormatDict.get(formatElement, "alpha") == "alpha": - value = dirString + " " + spdString - else: - value = dirString + "/" + spdString - return value - - def _windWithGust_value(self, statDict, timeRange, argList, element=None, formatElement=None): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - if formatElement == "EyeWind": - windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) - elif formatElement == "SfcWind": - windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) - else: - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - return "M" - dirString, spdString = windString - gust = self._getTableStats(tree, "WindGust", timeRange, node.getAreaLabel(), - getValueMethod="Max") - if gust is None: - gstString = "GMM" - gstString = "" - gust = int(self.getValue(gust) + 0.5) - if gust > string.atoi(spdString): - gstString = "G" + `gust` - if self._elementFormatDict.get(formatElement, "alpha") == "alpha": - value = dirString + " " + spdString + gstString - else: - value = dirString + "/" + spdString + gstString - return value - - def _windDir_value(self, statDict, timeRange, argList, element=None, formatElement=None): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - return "M" - dirString, spdString = windString - return dirString - - def _windNumDir_value(self, statDict, timeRange, argList, element=None, formatElement=None): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - windString = self._getWindNumDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - return "M" - dirString, spdString = windString - return dirString - - def _eyewindNumDir_value(self, statDict, timeRange, argList): - return self._windNumDir_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _sfcwind_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _sfcwindWithGust_value(self, statDict, timeRange, argList): - return self._windWithGust_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _sfcwindDir_value(self, statDict, timeRange, argList): - return self._windDir_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _sfcwindSpd_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _sfcwindGust_value(self, statDict, timeRange, argList): - return self._windGust_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _sfcwindNumDir_value(self, statDict, timeRange, argList): - return self._windNumDir_value(statDict, timeRange, argList, "Wind", "SfcWind") - - def _ridgeNumDir_value(self, statDict, timeRange, argList): - return self._windNumDir_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") - - def _transNumDir_value(self, statDict, timeRange, argList): - return self._windNumDir_value(statDict, timeRange, argList, "TransWind", "TransWind") - - def _windSpd_value(self, statDict, timeRange, argList, element=None, formatElement=None): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - if formatElement == "TransMetWind": - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement, "Metric") - elif formatElement == "EyeWind": - windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) - elif formatElement == "SfcWind": - windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) - else: - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - return "M" - dirString, spdString = windString - return spdString - - def _windGust_value(self, statDict, timeRange, argList, element=None, formatElement=None ): - if element is None: - element = self._20ftWindParm - if formatElement is None: - formatElement = self._20ftWindParm - tree, node, colWidth = tuple(argList) - windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) - if windString is None: - spdString = '0' - else: - dirString, spdString = windString - gust = self._getTableStats(tree, "WindGust", timeRange, node.getAreaLabel(), - getValueMethod="Max") - if gust is None: - return "M" - gstString = " " - gust = int(gust + 0.5) - if gust > string.atoi(spdString): - gstString = `gust` - return gstString - - def _eyewind_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _eyewindWithGust_value(self, statDict, timeRange, argList): - return self._windWithGust_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _eyewindDir_value(self, statDict, timeRange, argList): - return self._windDir_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _eyewindSpd_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _eyewindGust_value(self, statDict, timeRange, argList): - return self._windGust_value(statDict, timeRange, argList, "Wind", "EyeWind") - - def _ridge_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList,"FreeWind", "RidgeWind" ) - - def _ridgeDir_value(self, statDict, timeRange, argList): - return self._windDir_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") - - def _ridgeSpd_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") - - def _trans_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList, "TransWind", "TransWind") - - def _transDir_value(self, statDict, timeRange, argList): - return self._windDir_value(statDict, timeRange, argList, "TransWind", "TransWind") - - def _transSpd_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "TransWind", "TransWind") - - def _transMetric_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList, "TransWind", "TransMetWind") - - def _transSpdMetric_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "TransWind", "TransMetWind") - - def _mixingHeight_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - mix = self._getTableStats(tree, "MixHgt", timeRange, node.getAreaLabel()) - if mix is None: - return "M" - if self._tabularMixingHeightUnits == "ft" and colWidth != 4: - mixft = int(mix/100.0+0.5) * 100 - if mixft < 100: - value = "BLW100" - else: - value = `mixft` - else: - if mix < 50: - mix = 100.0 - kmix = mix / 1000.0 - kmix = round(kmix,1) - if kmix < 10: - value = str(round(kmix,1)) - else: - kmix = mix / 1000.0 - kmix = int(kmix + 0.5) - value = `kmix` - return value - - def _mixingHeightMetric_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - mix = self._getTableStats(tree, "MixHgt", timeRange, node.getAreaLabel()) - if mix is None: - return "M" - if self._tabularMixingHeightUnits == "ft" and colWidth != 4: - mixMetric = mix * 0.3048 - mixRounded = int(mixMetric/10.0+0.5) * 10 - if mixRounded < 10: - value = "BLW10M" - else: - value = `mixRounded` - else: - if mix < 330: - mix = 330.0 - mixMetric = mix * 0.3048 / 1000.0 - kmix = round(mixMetric,1) - if kmix < 10: - value = str(round(kmix,1)) - else: - value = `kmix` - return value - - def _cwr_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - cwr = self._getTableStats(tree, self._cwrParm, timeRange, node.getAreaLabel()) - if cwr is None: - return "M" - return `int(cwr/10 + 0.5)*10` - - def _pop_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - pop = self._getTableStats(tree, "PoP", timeRange, node.getAreaLabel()) - if pop is None: - return "M" - return `int(pop/10 + 0.5)*10` - - def _lal_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - lal = self._getTableStats(tree, "LAL", timeRange, node.getAreaLabel()) - if lal is None: - return "M" - return `int(lal+0.5)` - - def _dsi_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - dsi = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) - if dsi is None: - return "M" - return `int(dsi + 0.5)` - - - def _ldsi_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - #dsi = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) - dsi = self._getTableStats(tree, "LDSI", timeRange, node.getAreaLabel()) - if dsi is None: - return "M" - return `int(dsi + 0.5)` - - def _lvori_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - #lvori = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) - lvori = self._getTableStats(tree, "LVORI", timeRange, node.getAreaLabel()) - if lvori is None: - return "M" - return `int(lvori + 0.5)` - - def _adi_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - adi = self._getTableStats(tree, "ADI", timeRange, node.getAreaLabel()) - if adi is None: - return "M" - return `int(adi + 0.5)` - - def _haines_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "Haines", timeRange, node.getAreaLabel()) - if stats is None: - return "M" - return `int(stats + 0.5)` - - def _ventrate_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - ventrate = self._getTableStats(tree, "VentRate", timeRange, node.getAreaLabel()) - if ventrate is None: - return "M" - return `int(ventrate/1000.0 + 0.5)` - - def _windWave_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "WindWaveHgt", timeRange, node.getAreaLabel()) - if stats is None: - return "M" - return `int(stats + 0.5)` - - def _waveHeight_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "WaveHeight", timeRange, node.getAreaLabel()) - if stats is None: - return "M" - return `int(stats + 0.5)` - - def _swellPeriod_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "Period", timeRange, node.getAreaLabel()) - if stats is None: - return "M" - return `int(stats + 0.5)` - - def _swell_value(self, statDict, timeRange, argList): - return self._wind_value(statDict, timeRange, argList,"Swell", "RidgeWind" ) - - def _swellDir_value(self, statDict, timeRange, argList): - return self._windDir_value(statDict, timeRange, argList, "Swell", "RidgeWind") - - def _swellHgt_value(self, statDict, timeRange, argList): - return self._windSpd_value(statDict, timeRange, argList, "Swell", "RidgeWind") - - def _freezingLevel_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - mix = self._getTableStats(tree, "FzLevel", timeRange, node.getAreaLabel()) - if mix is None: - return "M" - if mix < 50: - mix = 100.0 - kmix = mix / 1000.0 - kmix = round(kmix,1) - if kmix < 10: - value = str(round(kmix,1)) - else: - kmix = mix / 1000.0 - kmix = int(kmix + 0.5) - value = `kmix` - return value - - def _ceiling_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - temp = self._getTableStats(tree, "PredHgt", timeRange, node.getAreaLabel()) - if temp is None: - return " " - temp = temp / 10.0 - if temp < 10: - tempWords = string.strip("%4.1f" % temp) - else: - tempWords = `int(temp + 0.5)` - return tempWords - - def _visibility_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - temp = self._getTableStats(tree, "Vsby", timeRange, node.getAreaLabel()) - if temp is None: - return " " - print "colWidth =", colWidth - if colWidth > 4: - if temp < 1.0: - tempWords = string.strip("%4.2f" % temp) - elif temp >= 1.0 and temp < 3.0: - tempWords = string.strip("%4.1f" % temp) - else: - tempWords = `int(temp + 0.5)` - else: - if temp < 1.0: - tempWords = string.strip("%3.2f" % temp) - tempWords = tempWords[1:] - elif temp >= 1.0 and temp < 3.0: - tempWords = string.strip("%3.1f" % temp) - else: - tempWords = `int(temp + 0.5)` - return tempWords - - def _icing_value(self, statDict, timeRange, argList): - return " " - - def _td_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - temp = self._getTableStats(tree, "Td", timeRange, node.getAreaLabel()) - if temp is None: - return "M" - if temp >= 0: - temp = int(temp + 0.5) - else: - temp = int(temp - 0.5) - return `temp` - - def _heatIndex_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - temp = self._getTableStats(tree, "HeatIndex", timeRange, node.getAreaLabel()) - if temp is None: - return "M" - if temp >= 0: - temp = int(temp + 0.5) - else: - temp = int(temp - 0.5) - return `temp` - - def _wwa_exclude(self,stats): - list = [] - index = 0 - newstats = [] - while index < len(stats): - eidx = 0 - flag = 1 - while eidx < len(list): - if stats[index] == list[eidx]: - flag = 0 - eidx = eidx + 1 - if flag: - newstats.append(stats[index]) - index = index + 1 - return newstats - - def _wwa_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) - if stats is None: - return " " - if stats[0] == "": - return " " - stats = self._wwa_exclude(stats) - return stats[0][0:2] + stats[0][3:4] - - def _wwa2_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) - if stats is None: - return " " - stats = self._wwa_exclude(stats) - if len(stats) < 2: - return " " - return stats[1][0:2] + stats[1][3:4] - - def _wwa3_value(self, statDict, timeRange, argList): - tree, node, colWidth = tuple(argList) - stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) - if stats is None: - return " " - stats = self._wwa_exclude(stats) - if len(stats) < 3: - return " " - return stats[2][0:2] + stats[2][3:4] - - ### NEW NARRATIVE PHRASES ### - - def dsi_phrase(self): - return { - "setUpMethod": self.dsi_setUp, - "wordMethod": self.dsi_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def dsi_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("DSI", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "DSI.................") - return self.DONE() - - def dsi_words(self, tree, node) : - "Create phrase Probability of Precipitation" - statDict = node.getStatDict() - dsi = self.getStats(statDict, "DSI") - if dsi is None: - return self.setWords(node.parent, "MISSING") - dsi = self.getValue(dsi) - words = `int(dsi + 0.5)` - return self.setWords(node, words) - - def ldsi_phrase(self): - return { - "setUpMethod": self.ldsi_setUp, - "wordMethod": self.ldsi_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def ldsi_setUp(self, tree, node): - #elementInfoList = [self.ElementInfo("DSI", "List")] - elementInfoList = [self.ElementInfo("LDSI", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "LDSI................") - return self.DONE() - - def ldsi_words(self, tree, node): - "Create phrase Probability of Precipitation" - statDict = node.getStatDict() - #ldsi = self.getStats(statDict, "DSI") - ldsi = self.getStats(statDict, "LDSI") - if ldsi is None: - return self.setWords(node.parent, "MISSING") - ldsi = self.getValue(ldsi) - words = `int(ldsi + 0.5)` - return self.setWords(node, words) - - def lvori_phrase(self): - return { - "setUpMethod": self.lvori_setUp, - "wordMethod": self.lvori_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def lvori_setUp(self, tree, node): - #elementInfoList = [self.ElementInfo("DSI", "List")] - elementInfoList = [self.ElementInfo("LVORI", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "LVORI...............") - return self.DONE() - - def lvori_words(self, tree, node): - statDict = node.getStatDict() - lvori = self.getStats(statDict, "LVORI") - #lvori = self.getStats(statDict, "DSI") - if lvori is None: - return self.setWords(node.parent, "MISSING") - lvori = self.getValue(lvori) - words = `int(lvori + 0.5)` - return self.setWords(node, words) - - def pop_phrase(self): - return { - "setUpMethod": self.pop_setUp, - "wordMethod": self.pop_words, - "phraseMethods": self.standard_phraseMethods(), - } - def pop_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("PoP", "Max")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "POP.................") - return self.DONE() - - def pop_words(self, tree, node) : - "Create phrase Probability of Precipitation" - statDict = node.getStatDict() - popStats = self.getStats(statDict, "PoP") - if popStats is None: - return self.setWords(node.parent, "MISSING") - pop = self.getValue(popStats) - threshold = self.nlValue(self.null_nlValue( - tree, node, "PoP", "PoP"), pop) - if int(pop) < threshold: - return self.setWords(node, "null") - else: - words = `int(pop)` + " percent" - return self.setWords(node, words) - - ### *** END TABULAR TEST SECTION HERE *** ### - -# I had to create these phrases or labels so the FWS formatter will work -# for any WFO out of the baseline. I created labels for elements that -# grids are not created for (that I know of). If offices do have grids -# for these elements, then they can create the phrase to get it into -# the FWS product. - - # For EYE LEVEL WINDS - def fireEyeWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - self.wind_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.fireEyeWind_finishUp - ], - } - def fireEyeWind_finishUp(self, tree, node): - "Create a phrase for Winds" - # Empty phrase if doing ridge/valley winds - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 1: - return self.setWords(node, "") - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", "Eye level winds.....") - node.set("compound", 1) - return self.setWords(node, words) - - # For Wind shift. Just need the label since there is not phrase. - def fireWindShift_label_phrase(self): - return { - "setUpMethod": self.fireWindShift_label_setUp, - "phraseMethods": [self.postProcessPhrase], - } - - def fireWindShift_label_setUp(self, tree, node): - self.setWords(node, "") - node.set("descriptor", "") - node.set("indentLabel", "Wind shift..........") - return self.DONE() - - # For Surrounding Ridge Wind. - def surroundingRidgeWind_phrase(self): - return { - "setUpMethod": self.surroundingRidgeWind_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - def surroundingRidgeWind_setUp(self, tree, node): - self.wind_setUp(tree, node, gustFlag=0, element="FreeWind") - node.set("descriptor", "") - node.set("indentLabel","Surrounding ridge...") - return self.DONE() - - # For Chance of Preciptiation. - def pop_phrase(self): - return { - "setUpMethod": self.pop_setUp, - "wordMethod": self.pop_words, - "phraseMethods": self.standard_phraseMethods(), - } - def pop_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("PoP", "Average")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "POP.................") - return self.DONE() - - def pop_words(self, tree, node) : - "Create phrase Probability of Precipitation" - statDict = node.getStatDict() - popStats = self.getStats(statDict, "PoP") - if popStats is None: - return self.setWords(node.parent, "MISSING") - pop = self.getValue(popStats) - threshold = self.nlValue(self.null_nlValue( - tree, node, "PoP", "PoP"), pop) - if int(pop) < threshold: - return self.setWords(node, "null") - else: - words = `int(pop)` + " percent" - return self.setWords(node, words) - - # For Stability Class. - def stabilityClass_phrase(self): - return { - "setUpMethod": self.stabilityClass_setUp, - "wordMethod": self.stabilityClass_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def stabilityClass_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Stability", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Stability class.....") - return self.DONE() - - def stabilityClass_words(self, tree, node) : - "Create phrase Stability Class" - statDict = node.getStatDict() - stability = self.getStats(statDict, "Stability") - if stability is None: - return self.setWords(node.parent, "MISSING") - words = `int(self.getValue(stability))` - return self.setWords(node, words) - - # For Marine Layer. - def marineLayer_phrase(self): - return { - "setUpMethod": self.marineLayer_setUp, - "wordMethod": self.marineLayer_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def marineLayer_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MarineLayer", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Marine layer........") - return self.DONE() - - def marineLayer_words(self, tree, node) : - "Create phrase MarineLayer" - statDict = node.getStatDict() - marineLayer = self.getStats(statDict, "MarineLayer") - if marineLayer is None: - return self.setWords(node.parent, "MISSING") - words = `int(self.getValue(marineLayer))` - return self.setWords(node, words) - - def td_phrase(self): - return { - "setUpMethod": self.td_setUp, - "wordMethod": self.td_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def td_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Td", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Dewpoint............") - return self.DONE() - - def td_words(self, tree, node): - "Create phrase Td" - statDict = node.getStatDict() - td = self.getValue(self.getStats(statDict, "Td"), "Avg") - if td is None: - return self.setWords(node.parent, "MISSING") - words = `int(td)` - return self.setWords(node, words) - - # For Begin/End of Preciptiation. - def pcpnTiming_phrase(self): - return { - "setUpMethod": self.pcpnTiming_setUp, - "phraseMethods": [self.postProcessPhrase], - } - - def pcpnTiming_setUp(self, tree, node): - self.setWords(node, " ") - node.set("descriptor", "") - node.set("indentLabel", "Begin/end of pcpn...") - return self.DONE() - - def _checkStrs(self, checkStrings, inputStr, orderStrings=0, checkMode=1): - # Check the inputStr for the list of checkStrings. - # If a checkString is a tuple, at least one of the - # given tuple strings must be found in the inputStr - # If orderStrings == 1, the strings must occur in order in the inputStr - # If checkMode == 0, the strings should NOT be found in the inputStr - # Returns 1 if successful, the failed checkString if not. - curIndex = -1 - for cStr in checkStrings: - if type(cStr) == types.TupleType: - # Will pass if ANY of these strings are found - # Not valid with checkMode of zero - if not checkMode: - continue - found = 0 - for subStr in cStr: - strIndex = inputStr.find(subStr) - if strIndex >= 0: - found = 1 - break - else: - found = 0 - if not found: - return subStr - else: - # Must find exact string - strIndex = inputStr.find(cStr) - if strIndex < 0: - if checkMode: - return cStr - else: - if not checkMode: - return cStr - # Check the ordering - if orderStrings: - inputStr = inputStr[strIndex:] - return 1 - - - -### For Testing -## def getPreviousProduct(self, stqPil, searchString, version=0): -## f = open("/home/eagle6/hansen/ui/middendorf/GTFSTQBYZ"+`version`, "r") -## product = f.read() -## f.close() -## #print "returning", product -## return product - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# +# FWS_Overrides +# +# This file provides any product specific overrides for the +# FWS product. This file is part of the baseline. +# +# Definition Section: +# Overrides: +# Additions: +# +# Methods: +# Overrides: +# Additions: +# +# --------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, time, re, os, types, copy +import TextRules +import ProcessVariableList +import math +import HazardsTable, TimeRange, AbsTime + +# Define overrides of Product Definition settings and +# default values of additional Definition settings +# ( This Definition section must be before the Class definition) + +#***** THIS NEXT LINE IS REQUIRED ***** +Definition = {} +# +# FWS Definitions: +# Definition statements must start in column 1 + +# REQUIRED CONFIGURATION ITEMS +#Definition['displayName'] = "FWS" +Definition["statePil"] = "GTF" # State Pil ID + +Definition["productName"] = "Spot Forecast" # name of product +Definition["fullStationID"] = "" # full station identifier (4letter) +Definition["wmoID"] = "" # WMO ID +Definition["pil"] = "" +Definition["stqPil"] = "STQ" # STQ pil +Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. +Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + +Definition["summaryExtended"] = 0 +Definition["individualExtended"] = 1 +Definition["extendedLabel"] = 1 + +### FWS settings of baseline options: ### +Definition["mapNameForCombinations"] = None +Definition["defaultEditAreas"] = [] + +# agencyList - This is a list of agency abbreviations as you want them to +# appear in the product header. For Example... +# Spot Forecast for Willie Fire...USFS GNF +# where "USFS GNF" is an example of agency abbreviation. +# The FWS formatter will read the STQ spot request product +# and will try to first guess the agency abbreviation from the +# "REQUESTING AGENCY" line of the STQ product. If the first guess +# is found in your agencyList list, then the Quality Control GUI +# with have that agency pre-selected. If list is left empty, +# then the formatter will always use what the user submitted +# for the agency. + +#Definition["agencyList"] = [ +# (1,"Agency 1"), +# (2,"Agency 2"), +# (3,"Agency 3"), +# (4,"Agency 4"), +# (5,"Agency 5"), +# ] +Definition["agencyList"] = [] + +# forecasterList - This is a list of forecaster numbers, forecaster awips login name, +# and forecaster last names. The Quality Control GUI will +# list the forecaster's last name and the forecaster will +# check all of the forecaster's names that were involved +# in that forecast. + +Definition["forecasterList"] = [ + (1,"forecastera","Forecaster A"), + (2,"forecasterb","Forecaster B"), + (3,"forecasterc","Forecaster C"), + (4,"forecasterd","Forecaster D"), + (5,"forecastere","Forecaster E"), + ] + +# stqNumberVersions - When you launch the FWS formatter, you will get a GUI +# that asks you to select which spot request you want to +# format a spot forecast for. This variable specifies +# how many spots you want to list in the GUI. If you do +# increase the number, then make sure you increase the +# number of versions stored in awips. +Definition["stqNumberVersions"] = 10 + +# stqWmoID - helps find the timestamp line in the STQ product. Only change if +# WSH changes the WMO id of the STQ product. + +Definition["stqWmoID"] = "BMBB91 K" + +# wind20ftHeader: This definition set to "1" allows offices to +# format winds in this format... +# +# Wind (20 ft)........ +# Slope/valley.......WEST 10-20 MPH +# Ridgetop...........NORTHWEST 20 MPH +# +# By setting this definition to "0", you will get... +# +# Wind (20 ft)........WEST 10-20 MPH +# Ridgetop wind.......NORTHWEST 20 MPH +Definition["wind20ftHeader"] = 1 # Use 1 for yes, 0 for no + +# typeList - This is a list of project types and are formatted in the "REASON FOR +# REQUEST" line of the FWS forecast. Do not edit this list unless WSH +# directs you to do so. + +Definition["typeList"] = ["WILDFIRE", "PRESCRIBED", "HAZMAT", "SAR", "TEST"] + +# Set shortTermOnly to 1 if you don't want to give your forecasters an option +# include extended forecasts and/or outlooks with their spot forecasts. +Definition["shortTermOnly"] = 1 +#Definition["shortTermOnly"] = 0 + +Definition["outputFile"] = "{prddir}/TEXT/FWS.txt" + +# Definitions to insert unrepresentativeness of the forecast +# instructions for the user. +Definition["insertUnrepresentStatement"] = 1 # Use 1 for yes, 0 for no +Definition["unrepresentStatement"] = "If conditions become unrepresentative, " + \ + "contact the National Weather\nService." +# Definitions to insert the FWF discussion from a separate file. +# Discussion is edited separately in XNOW for the FWF forecast. +# Advantage of this is to have a first guess for the discussion in +# the Spot forecast...saving some composition time. +Definition["insertDiscussionFromFile"] = 0 # Use 1 for yes, 0 for no +Definition["discussionFile"] = "/home/local_apps/xnow/temp/DISFWF" + +# Definitions to insert the FWF 8 to 14 day outlook from a separate +# file if the user requests that information in their request. (Not +# very likely). Outlook is edited separately in XNOW for the FWF +# Forecast with the advantage of saving time in the composition of +# the Spot Forecast. +Definition["insertOutlookFromFile"] = 0 # Use 1 for yes, 0 for no +Definition["outlookFile"] = "/home/local_apps/xnow/temp/OLKFWF" + + +# wildfireElementList is a subset list of the requestedElementList list. +# The directive states that Sky/Weather, Temp, RH, and Winds are required +# for wildfire spot forecasts. Even if the user doesn't select these elements, +# the formatter will put them in anyway because of the directive requirements. + +# You may add weather elements corresponding to the entries you see in your STQ product. + +Definition["wildfireElementList"] = [ + "SKY/WEATHER", + "TEMPERATURE", + "HUMIDITY", + "20 FOOT WINDS", + "EYE LEVEL WINDS", + ] + +Definition["stqPil"] = "STQ" # STQ pil + +# Definitions to insert unrepresentativeness of the forecast +# instructions for the user. +#Definition["insertUnrepresentStatement"] = 0 # Use 1 for yes, 0 for no +#Definition["unrepresentStatement"] = "If conditions become unrepresentative, " + \ +# "contact the National Weather\nService." + +# wind20ftHeader: This definition set to "1" allows offices to +# format winds in this format... +# +# Wind (20 ft)........ +# Slope/valley.......WEST 10-20 MPH +# Ridgetop...........NORTHWEST 20 MPH +# +# By setting this definition to "0", you will get... +# +# Wind (20 ft)........WEST 10-20 MPH +# Ridgetop wind.......NORTHWEST 20 MPH +#Definition["wind20ftHeader"] = 0 # Use 1 for yes (default), 0 for no + +# Definitions to insert the FWF discussion from a separate file. +# Discussion is edited separately in XNOW for the FWF forecast. +# Advantage of this is to have a first guess for the discussion in +# the Spot forecast...saving some composition time. +#Definition["insertDiscussionFromFile"] = 1 # Use 1 for yes, 0 for no +#Definition["discussionFile"] = "/home/local_apps/xnow/temp/DISFWFBYZ" + +# Definitions to insert the FWF 8 to 14 day outlook from a separate +# file if the user requests that information in their request. (Not +# very likely). Outlook is edited separately in XNOW for the FWF +# Forecast with the advantage of saving time in the composition of +# the Spot Forecast. +#Definition["insertOutlookFromFile"] = 1 # Use 1 for yes, 0 for no +#Definition["outlookFile"] = "/home/local_apps/xnow/temp/OLKFWFBYZ" + +#Definition["tempLocalEffects"] = 1 # Set to 1 to enable Temp and RH local effects AFTER + # creating AboveElev and BelowElev edit areas +#Definition["windLocalEffects"] = 1 # Set to 1 to enable wind local effects AFTER + # creating Ridges and Valleys edit areas +# OPTIONAL CONFIGURATION ITEMS +#Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" +#Definition["debug"] = 1 +#Definition["lineLength"] = 66 #Product line length + +# Set the following variable to 1 if you want Lightning Activity +# reported with phrases like "1-8 STRIKES", "9-15 STRIKES", etc. +#Definition["lightningPhrases"] = 1 + +# The following variable sets a wind adjustment factor for surface +# (20 ft) winds. Wind speeds will be multiplied by this factor. +# Winds reported by RAWS sites are frequently lower than ASOS winds +# due to the fact that they measure wind speeds at lower heights. +# A common adjustment factor is 80% (0.80). If you want no adjustment +# to the winds then set this variable to 1.00 +#Definition["windAdjustmentFactor"] = 1.00 + +# The following variable sets a wind adjustment factor for eye level +# winds. Wind speeds will be multiplied by this factor. Eye level +# winds are frequently lower than ASOS winds due to the fact that +# winds are slower when measured closer to the ground. A common +# adjustment factor is 60% (0.60). If you want no adjustment to +# the winds then set this variable to 1.00 +Definition["eyeWindAdjustmentFactor"] = 0.60 + +#Definition["language"] = "english" + +# Trouble-shooting items +#Definition["passLimit"] = 20 # Limit on passes allowed through + # Narrative Tree +#Definition["trace"] = 1 # Set to 1 to turn on trace +# useRH If 1, use RH grids instead of MaxRH, MinRH +Definition["useRH"] = 0 + +### *** START TABULAR TEST SECTION *** ### + +Definition["fwfPil"] = "FWFBYZ" # FWF pil + +# Definitions to insert the FWF discussion from a separate file or +# from the last FWF your office has issued. + +# Discussion is edited separately in GFE for the FWF forecast. +# Advantage of this is to have a first guess for the discussion in +# the Spot forecast...saving some composition time. + +# Use 1 to get Discussion from a file +# Use 2 to get Discussion from your last FWF product +# Use 0 to use a blank Discussion template +Definition["insertDiscussionFromFile"] = 2 +Definition["discussionFile"] = "/data/local/DISFWFBYZ" + +# Definitions to insert the FWF 8 to 14 day outlook from a separate +# file if the user requests that information in their request. (Not +# very likely). Outlook is edited separately in XNOW for the FWF +# Forecast with the advantage of saving time in the composition of +# the Spot Forecast. + +# Use 1 to get Outlook from a file +# Use 2 to get Outlook from your last FWF product +# Use 0 to use a blank Outlook template +Definition["insertOutlookFromFile"] = 2 # Use 1 for yes, 0 for no +Definition["outlookFile"] = "/data/local/OLKFWFBYZ" + +# If set to 1, the user can enter a creation date/time +# for product generation. It will be as if the product was run +# at the creation time specified by the user at run-time. +Definition["includeCreationTimeOnGUI"] = 1 +#Definition["includeCreationTimeOnGUI"] = 0 + +# forecastTypeList - This definition contains a list of spot forecast formats that a +# forecaster can select via the formatter gui. The formats are: +# +# Narrative Only: The spot forecast is in a narrative format. +# Tabular/Narrative: This format is a tabular/narrative mix as specified +# in the "_rowList" (see FWS_Overrides). +# +# For each forecastType, you can specify a label that will appear in the GUI. +# +Definition["forecastTypeList"] = [ + # Label Forecast Type + ("Narrative Only", "Narrative Only"), + ("Tabular/Narrative", "Tabular/Narrative"), + ("Tabular Only", "Tabular Only"), + + # If your _rowList specifies an all Tabular product, + # you may want to change this entry to: + #("Tabular", "Tabular/Narrative"), + ] + +# defaultForecastType - This defintion sets the default setting for which spot forecast +# format your WFO wants to use. Value for definition must be included +# in the forecastTypeList definition and must be either "Narrative", +# "Tabular", or "With Ignition Forecast". +#Definition["defaultForecastType"] = "Narrative Only" +Definition["defaultForecastType"] = "Tabular/Narrative" + +# withIgnitionTimes: If "yes", ertain weather elements can be configured to include +# an ignition time forecast within the narrative. +Definition["withIgnitionTimes"] = "no" + +# includeIgnitionOptionOnGUI: If 1, the GUI will include this option at run-time. +Definition["includeIgnitionOptionOnGUI"] = 1 +#Definition["includeIgnitionOptionOnGUI"] = 0 + +# tabularResolutionDict - This definition contains the list of table time resolutions +# (per period) that you want to appear in the GUI to the forecaster. +# Some WFOs may not want to give forecasters an option to generate +# a table with an 1 hour resolution (for example), so you can +# delete "1" from this list and it will not appear on the gui. +# Possible values are 1, 2, 3, 4 hours and 123 in which case, +# hourly resolution will be 1 hour in the 1st period, +# 2 hours in the 2nd period, and 3 hours in the third period +# (if needed).. +Definition["tabularResolutionDict"] = { + "Today": [1, 2, 3, 4, "None"], + "Tonight": [1, 2, 3, 4, "None"], + "Tomorrow": [1, 2, 3, 4, "None"], + } + +# defaultTabularResolution - This definition must be set to one of values listed in the +# tabularResolutionList definition. This will be the value +# that the gui will use for a default for each period. +# Values are limited to 1, 2, 3, and 4 and must be included in +# the tabularResolutionList definition. +Definition["defaultTabularResolution"] = { + "Today": 2, + "Tonight": 2, + "Tomorrow": 2 + } + +# tabularAllPeriods - Setting this definition to "no" will generate a 12 +# hour table only in the first period. +# The table will start at either the current time or +# the ignition time depending on the setting of +# tableStartTimeMode (see below). +# Setting this definition to "yes" will allow tables in +# all periods with snapshot values covering the +# time range of each period. + +Definition["tabularAllPeriods"] = "yes" +#Definition["tabularAllPeriods"] = "no" + +# tabularForWildfire - This is a nationally mandated setting which requires +# a narrative forecast for wildfire spot requests. When +# set to "no", a narrative will be produced, even if the +# tabular option is selected. Your office must issue a +# PDD to switch this definition to "yes". + +Definition["tabularForWildfire"] = "no" +#Definition["tabularForWildfire"] = "yes" + +# tableStartTimeMode - The setting of this definition will tell the formatter how to +# determine the start time for the table. +# If "productStart" is used, then the table will start at the +# beginning of the product issuance. +# If "ignitionTime" is used, then the formatter will use the ignition time +# if it is within the first period of the product. +# Otherwise the formatter will use the productStart time. +# If "current" is used, then the table will start at the time the +# formatter was launched. + +#Definition["tableStartTimeMode"] = "current" +#Definition["tableStartTimeMode"] = "productStart" +Definition["tableStartTimeMode"] = "ignitionTime" + +# tableStartTimeOffset - When the ignition time is used for the table start time, +# you can start the table a set number of hours before the +# ignition time. You can configure the tableStartTimeOffset +# definition for this purpose. Value is the number of hours +# before the ignition time desired. Note, if this new time +# is before the product start time, then the product start +# time will be used. +Definition["tableStartTimeOffset"] = 0 + +# ignitionForecastTypeList - The formatter can produce Ignition Time/Request Time +# forecasts for certain weather elements, like T and RH. +# This list will produce this forecast as a default for +# certain types of spot requests. List elements must be +# a subset of the typeList definition. + +#Definition["ignitionForecastTypeList"] = ["PRESCRIBED"] +Definition["ignitionForecastTypeList"] = [] + +# elementFormatDict - This defines the format as "alpha" or "numeric" for various +# tabular weather elements. + +# Sky - This definition allows a WFO to use a text description of the +# Sky Cover or use numeric values. Examples are as follows... +# alpha 1hr : SKY.............MC MC MC MC MC MC MC MC MC MC MC MC PC +# alpha 2hr : SKY.............MCLDY MCLDY MCLDY MCLDY MCLDY MCLDY PCLDY +# numeric 2hr: Sky (%).........90 90 90 83 83 83 69 + +# Tabular Wind Format Definitions +# "alpha" will cause the direction to be formatted in the alphabetic characters +# of N, NW, W, SW, S, SE, E, and NE. +# "numeric" will return the wind direction in tens of degrees. i.e. 000, 010, +# etc. When a numeric wind direction is combined with wind speed it will look +# something like this...030/10 + +Definition["elementFormatDict"] = { + "Sky" : "numeric", + "Wind": "alpha", + "Wind20ft": "alpha", + "EyeWind": "alpha", + "RidgeWind": "alpha", + "TransWind": "alpha", + "TransMetWind": "alpha", + } + +# bothAlphaNumericDict - For certain elements both alpha and numeric values +# are needed. In particular, sky cover and wind direction. +# Only possible values are "Yes" or "No". +# If you do configure a wind element to "Yes", then +# ensure the corresponding setting for elementFormatDict +# is set to "alpha". Otherwise, you will get two lines +# of numeric values. + +Definition["bothAlphaNumericDict"] = { + "Sky" : "No", + "Wind": "No", + "Wind20ft": "No", + "EyeWind": "No", + "SfcWind": "No", + "RidgeWind": "No", + "TransWind": "No", + } + +# tabularMixingHeightUnits - This definition allows the WFO to specify their preferance +# on how mixing height is expressed. In thousands of feet or +# in just feet? The definition can only be set to "kft" or +# "ft". Note: the 1 hour resolution table is limited to a +# three character space, so mixing height will always be +# expressed in kft when the 1 hour resolution is selected +# regardless to what this definition is set to. Examples... +# KFT 2hr: Mix hgt (kft)...0.3 0.3 0.3 0.3 0.3 7.9 11 +# FT 2hr : Mix hgt (ft)....300 300 300 300 300 7900 11100 + +#Definition["tabularMixingHeightUnits"] = "kft" # So we can fit a number in a 3 character space. +Definition["tabularMixingHeightUnits"] = "ft" # Will only be used for 2,3, or 4 time resolutions. + +# transportWindLabel - Some WFOs use "Transport Winds", while others use "Mixing Winds". +# They are one in the same in terms of the forecast. This definition +# allows the WFO to configure their preference for the tabular section. +#Definition["transportWindLabel"] = "mix" +Definition["transportWindLabel"] = "tran" + +# includeMetricDispersion - Some users need mixing height and transport winds +# in metric units. If you want to include the metric +# in addition to english values, then set definition +# to "yes". Otherwise "no". + +#Definition["includeMetricDispersion"] = "yes" +Definition["includeMetricDispersion"] = "no" + +# 20ftWindParm - Some WFOs actually produce a Wind20ft grid, so the 20 FOOT WIND +# phrase can be configured to sample that grid (the "Wind20ft" +# setting). Other WFOs just use a conversion factor (windAdjustmentFactor) +# of what they have in the Wind grid (the "Wind" setting). + +Definition["20ftWindParm"] = "Wind" +#Definition["20ftWindParm"] = "Wind20ft" + +# wind20ftHeader: This definition set to "1" allows offices to +# format winds in this format... +# +# Wind (20 ft)........ +# Slope/valley.......WEST 10-20 MPH +# Ridgetop...........NORTHWEST 20 MPH +# +# By setting this definition to "0", you will get... +# +# Wind (20 ft)........WEST 10-20 MPH +# Ridgetop wind.......NORTHWEST 20 MPH +Definition["wind20ftHeader"] = 0 # Use 1 for yes, 0 for no +#Definition["wind20ftHeader"] = 1 # Use 1 for yes, 0 for no + +# tableWindElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for wind direction, +# wind speed, and wind gust speed. When the resolution +# is 2 hours or more, then a WFO has a choice of formats. +# They can set tableWindElementSplit to "yes" and wind +# direction, speed, and gusts will remain in their +# separate lines. Or the WFO can set tableWindElementSplit +# to "no". For the two hour resolution, direction and +# speed will be combined. For three and four hour +# resolution, direction, speed, and gusts will be +# combined. Examples follow... + +# yes 2hr: 20 ft wind dir..SW W W W W W W +# : 20 ft wind spd..26 26 18 18 18 14 14 +# : 20 ft wind gust.40 40 + +# no 2 hr: 20 ft wind......W 26 W 26 W 18 W 18 W 18 W 14 W 14 +# : 20 ft wind gust.40 40 + +# yes 3hr: 20 ft wind dir..W W W W W +# : 20 ft wind spd..26 25 13 14 13 +# : 20 ft wind gust.40 40 + +# no 3 hr: 20 ft wind......W 26G40 W 25G40 W 13 W 14 W 13 + +#Definition["tableWindElementSplit"] = "yes" +Definition["tableWindElementSplit"] = "no" + +# tableEyeWindElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for wind direction, +# wind speed, and wind gust speed. When the resolution +# is 2 hours or more, then a WFO has a choice of formats. +# They can set tableEyeWindElementSplit to "yes" and wind +# direction, speed, and gusts will remain in their +# separate lines. Or the WFO can set tableEyeWindElementSplit +# to "no". For the two hour resolution, direction and +# speed will be combined. For three and four hour +# resolution, direction, speed, and gusts will be +# combined. Examples follow... + +# yes 2hr: Eye lvl wnd dir.SW W W W W W W +# : Eye lvl wnd spd.26 26 18 18 18 14 14 +# : Eye lvl wnd gst.40 40 + +# no 2 hr: Eye level wind..W 26 W 26 W 18 W 18 W 18 W 14 W 14 +# : Eye lvl wnd gst.40 40 + +# yes 3hr: Eye lvl wnd dir.W W W W W +# : Eye lvl wnd spd.26 25 13 14 13 +# : Eye lvl wnd gst.40 40 + +# no 3 hr: Eye level wind..W 26G40 W 25G40 W 13 W 14 W 13 + +#Definition["tableEyeWindElementSplit"] = "yes" +Definition["tableEyeWindElementSplit"] = "no" + +# tableRidgeElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for wind direction +# and wind speed. When the resolution is 2 hours or more, +# then a WFO has a choice of formats. They can set +# tableRidgeElementSplit to "yes" and wind direction and +# speed will remain in their separate lines. Or the WFO +# can set tableRidgeElementSplit to "no" and the wind +# direction and speed will be combined into one line. +# Examples follow... + +# yes 2hr: Ridge wnd dir..W W W W W W W +# : Ridge wnd spd..36 36 36 36 36 36 16 + +# no 2 hr: Ridgetop wind...W 36 W 36 W 36 W 36 W 36 W 36 W 16 + +#Definition["tableRidgeElementSplit"] = "yes" +Definition["tableRidgeElementSplit"] = "no" + +# tableTransElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for wind direction +# and wind speed. When the resolution is 2 hours or more, +# then a WFO has a choice of formats. They can set +# tableTransElementSplit to "yes" and wind direction and +# speed will remain in their separate lines. Or the WFO +# can set tableTransElementSplit to "no" and the wind +# direction and speed will be combined into one line. +# Examples follow... + +# yes 2hr: Transp wind dir.W W W W W W W +# : Transp wind spd.8 8 8 8 8 8 20 + +# no 2 hr: Transport wind..W 8 W 8 W 8 W 8 W 8 W 8 W 20 + +#Definition["tableTransElementSplit"] = "yes" +Definition["tableTransElementSplit"] = "no" + +# tableSwellElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for swell direction +# and swell height. When the resolution is 2 hours or more, +# then a WFO has a choice of formats. They can set +# tableSwellElementSplit to "yes" and swell direction and +# height will remain in their separate lines. Or the WFO +# can set tableSwellElementSplit to "no" and the swell +# direction and height will be combined into one line. +# Examples follow... + +# yes 2hr: Swell direction.W W W W W W W +# : Swell hgt (ft)..36 36 36 36 36 36 16 + +# no 2 hr: Swell hgt (ft)..W 36 W 36 W 36 W 36 W 36 W 36 W 16 + +#Definition["tableSwellElementSplit"] = "yes" +Definition["tableSwellElementSplit"] = "no" + +# tableSfcWindElementSplit - When the time resolution of the table is 1 hour, then +# I am forced to create separate lines for wind direction, +# wind speed, and wind gust speed. When the resolution +# is 2 hours or more, then a WFO has a choice of formats. +# They can set tableSfcWindElementSplit to "yes" and wind +# direction, speed, and gusts will remain in their +# separate lines. Or the WFO can set tableSfcWindElementSplit +# to "no". For the two hour resolution, direction and +# speed will be combined. For three and four hour +# resolution, direction, speed, and gusts will be +# combined. Examples follow... + +# yes 2hr: Surface wnd dir.SW W W W W W W +# : Surface wnd spd.26 26 18 18 18 14 14 +# : Surface wnd gst.40 40 + +# no 2 hr: Surface wind....W 26 W 26 W 18 W 18 W 18 W 14 W 14 +# : Surface wnd gst.40 40 + +# yes 3hr: Surface wnd dir.W W W W W +# : Surface wnd spd.26 25 13 14 13 +# : Surface wnd gst.40 40 + +# no 3 hr: Surface wind....W 26G40 W 25G40 W 13 W 14 W 13 + +#Definition["tableSfcWindElementSplit"] = "yes" +Definition["tableSfcWindElementSplit"] = "no" + +# cwrParm - Some WFOs (especially in wetter climates) use the PoP grid for +# chance of wetting rain, whereas offices in dry climates create a +# CWR grid that has values lower than the PoP grid. Value values +# for this definition is either "CWR" or "PoP". + +Definition["cwrParm"] = "PoP" + + +### *** END TABULAR TEST SECTION *** ### + +# END definitions +############################################################ + +#********************************************************************** +# MAKE NO CHANGES HERE +# The minimum contents of this file are the above Definition = {} line +# plus following class definition and the __init__ method with only +# the "pass" line in it. + +class FWS_Overrides: + """Class NNN_FILETYPE - Version: IFPS""" + + def __init__(self): + pass + +# End MAKE NO CHANGES HERE +#********************************************************************** + # Add methods here making sure to indent inside the class statement + # FWS Overrides ------------------------ + + # It is helpful to put a debug statement at the beginning of each + # method to help with trouble-shooting. + #def _method(self): + #self.debug_print("Debug: _method in FWS_Overrides") + + def _processVariableList(self, definition): + + # Get Definition variables + for key in list(definition.keys()): + exec("self._" + key + "= definition[key]") + + # Load in a user specified number of STQ products into the formatter. + products = self._getStqProducts() + + # Get the information for the specific fire. + # IF there are STQ products in the directory, + # selection GUI will be displayed + cancel = self._getFireInfo(products) + if cancel: + # User cancelled + return None + + # Get the user information for the specific fire + # and return the resulting varDict + return self._displayFireInfo() + + def _getStqProducts(self): + # Load in a user specified number of STQ products into the formatter. + # If no products found, return empty list + products = [] + version = 0 + stqPil = self._statePil + self._stqPil + searchString="" + for version in range(self._stqNumberVersions): + product = self.getPreviousProduct(stqPil, searchString, version=version) + if product is None or product == "": + break + + # Let's filter the product just in case single quote is put + # into the the request. + product = string.replace(product,"\'","") + + product = string.split(product, "\n") + missingFlag=1 + feedbackFlag=0 + deleteFlag=0 + for line in product: + line = string.replace(line, "\n", "") + if "PROJECT NAME" in line: + missingFlag=0 + if "Feedback was just received for project" in line: + feedbackFlag=1 + if "The Spot Forecast Request for project" in line: + deleteFlag=1 + if not missingFlag and not feedbackFlag and not deleteFlag: + products.append(product) + return products + + def _getFireInfo(self, products): + # If there were STQ products, display their names for user to select + # Return 1 if user cancels + product, issuance, forecasters = self._getFireProduct(products) + if issuance is None: + return 1 # User cancelled + if len(products) > 0: + self._noStqProduct = 0 + else: + product = None + self._noStqProduct = 1 + self._getProductInfo(product, issuance, forecasters) + + def _getFireProduct(self, products): + # Create the fireNameList used for the spot selection menu. + fireNameList = [] + ofileList = [] + validProductFound = 0 + productNumber = 0 + masterProductList = [] + for product in products: + fireName = "NAME MISSING" + timeStamp = "DDHHMM" + tag = "YYYYMMDD.XXXXX.NN" + tagFlag = 0 + feedbackFlag=0 + deleteFlag=0 + for line in product: + line = string.replace(line, "\n", "") + if "PROJECT NAME" in line: + fireName = string.upper(line[22:]) + if self._stqWmoID in line: + timeStamp = line[12:] + if "OFILE" in line: + tag = string.upper(line[8:]) + if tag not in ofileList: + ofileList.append(tag) + tagFlag = 1 + productNumber = productNumber + 1 + if tagFlag: + fireNameList.append(repr(productNumber) + ") " + fireName + \ + " -- " + timeStamp + " -- " + tag) + masterProductList.append(product) + validProductFound = 1 + + varList = [] + + if validProductFound: + fireNameList.append("Manually Enter in Request Info") + desFireName = "Please Choose a Fire", "fireName" + varList.append((desFireName, fireNameList[0], "radio", fireNameList)) + + # Product Issuance Processing + issuanceList = [ + "Morning", "Morning Update", "Afternoon Update", + "Afternoon", "Afternoon with 4 periods", "Evening Update", + "Evening Update with 4 periods", "Early Morning Update", + "Early Morning Update with 4 periods", "Next Day" + ] + desIssuanceList = "Product Issuance:", "productIssuance" + varList.append((desIssuanceList, issuanceList[0], "radio", issuanceList)) + + # Forecaster List Section of the GUI + forecasterNameList = [] + defaultForecasterNameList = [] + cmd = "whoami" + db = os.popen(cmd,'r') + awipsLogin = db.read() + db.close() + awipsLogin = string.replace(awipsLogin, "\n", "") + for forecaster in self._forecasterList: + id, awipsName, name = forecaster + forecasterNameList.append(name) + if awipsLogin == awipsName: + defaultForecasterNameList.append(name) + desForecasterNameList = "Forecaster:", "forecaster" + varList.append((desForecasterNameList, defaultForecasterNameList, "check", forecasterNameList)) + + if self._includeCreationTimeOnGUI: + # Get start date and time from user + desCreationDate = "Forecast Start Date (ex. 5/25/06)", "creationDate" + varList.append((desCreationDate, "", "alphaNumeric")) + desCreationTime = "Forecast Start Time in LT (ex 0900)", "creationTime" + varList.append((desCreationTime, "", "alphaNumeric")) + + # Launch the Spot Request selection GUI. + varDict = self._callProcessVariableList("Select Spot Request", varList, varDict={}) + if varDict is None: + return None, None, None + + productIssuance = varDict[desIssuanceList] + forecasters = varDict[desForecasterNameList] + if self._includeCreationTimeOnGUI: + self._creationDate = varDict[desCreationDate] + self._creationTime = varDict[desCreationTime] + + if validProductFound: + if varDict[desFireName] == "Manually Enter in Request Info": + return None, productIssuance, forecasters + else: + stqIndex = fireNameList.index(varDict[desFireName]) + return masterProductList[stqIndex], productIssuance, forecasters + else: + return None, productIssuance, forecasters + + def _callProcessVariableList(self, title, varList, varDict): + processVarList = ProcessVariableList.ProcessVariableList( + title, varList, varDict={}) + self._selectionStatus = processVarList.status() + if not self._selectionStatus == "OK": + return None # User Cancelled + return processVarList.varDict() + + + def _weInfoList(self): + # This is the list of possible weather parameters listed under the + # ...WEATHER PARAMETERS REQUESTED... section in your STQ Product. + # These are listed in the order they will appear in the product. + # + # Weather Elements: If you have a weather element to add, + # then send an email to Virgil.Middendorf@noaa.gov with your addition. + # I will baseline it. + # + # Phrases: You can override this method and edit the phrase method if you + # don't like the one used in baseline. + + # For each element, we list: + # --an identifier + # --flag to indicate if this is a default element + # --the FWF phrase (or list of phrases) to include in the product + # --a list of search strings that must appear in + # the STQ product to specify the element. + # Each search string in the list may be a tuple in which case any of + # the entries in the tuple will satsify the search. + + if self._useRH: + dayRH = "RH" + nightRH = "RH" + else: + dayRH = "MinRH" + nightRH = "MaxRH" + if self._wind20ftHeader: + wind = [self.fireWind_label_phrase, self.fireWind_compoundPhrase] + else: + wind = [self.fireWind_compoundPhrase] + return [ + ("SKY/WEATHER", 1, self.skyWeather_byTimeRange_compoundPhrase, + [("SKY", "CLOUDS"), "WEATHER"]), + ("BEGIN/END OF PCPN", 0, self.pcpnTiming_phrase, + ["BEGIN", "END", "PCPN"]), + ("TEMPERATURE", 1, (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1]), + [("TEMPERATURE", "TEMP")]), + ("HUMIDITY", 1, (self.dayOrNight_phrase, [dayRH, nightRH, 1, 1]), + [("RH", "HUMIDITY")]), + ("DEWPOINT", 0, self.td_phrase, + ["DEWPOINT"]), + ("20 FOOT WINDS", 0, wind, + ["20", "WIND", ("FT", "FOOT")]), + ("EYE LEVEL WINDS", 1, self.fireEyeWind_compoundPhrase, + [("EYE","10"), "WIND"]), + ("SURFACE WINDS", 0, self.fireSfcWind_compoundPhrase, + ["SURFACE", "WIND"]), + ("WIND SHIFT", 0, self.fireWindShift_label_phrase, + ["WIND", "SHIFT"]), + ("RIDGE TOP WIND", 0, self.freeWind_phrase, + ["WIND", "RIDGE", "TOP"]), + ("SURROUNDING RIDGE", 0, self.surroundingRidgeWind_phrase, + ["SURROUNDING", "RIDGE", "WIND"]), + ("CWR", 0, self.cwr_phrase, + [("CWR", "WETTING RAIN")]), + ("POP", 0, self.pop_phrase, + [("PRECIPITATION", "CHANCE OF PCPN", "POP")]), + ("LIGHTNING ACTIVITY LEVEL", 0, self.lal_phrase, + [("LAL", "LIGHTNING")]), + ("SMOKE DISPERSION", 1, [self.mixingHgt_phrase, self.transportWind_phrase], + [("SMOKE", "DISPERSION")]), + ("MIXING HEIGHT", 0, self.mixingHgt_phrase, + ["MIXING"]), + ("TRANSPORT WINDS", 0, self.transportWind_phrase, + ["TRANSPORT", "WIND"]), + ("LDSI", 0, self.ldsi_phrase, + ["LDSI"]), + ("LVORI", 0, self.lvori_phrase, + ["LVORI"]), + ("ADI",0, self.adi_phrase, + ["ADI"]), + ("DISPERSION INDEX", 0, self.dsi_phrase, + ["DISPERSION", "INDEX"]), + ("CLEARING INDEX", 0, self.smokeDispersal_phrase, + ["CLEARING", "INDEX"]), + ("STABILITY CLASS", 0, self.stabilityClass_phrase, + ["STABILITY"]), + ("MARINE LAYER", 0, self.marineLayer_phrase, + ["MARINE", "LAYER"]), + ("HAINES INDEX", 0, self.haines_phrase, + ["HAINES", "INDEX"]), + ("VENTILATION RATE", 0, self.smokeDispersal_phrase, + ["VENTILATION", "RATE"]), + ("SWELL HEIGHT", 0, self.swell_phrase, + ["SWELL", "HEIGHT"]), + ("WAVE HEIGHT", 0, self.waveHeight_phrase, + ["WAVE","HEIGHT"]), + ("SWELL PERIOD", 0, self.period_phrase, + ["SWELL", "PERIOD"]), + ("WIND WAVE", 0, self.windWave_phrase, + ["WIND", "WAVE"]), + ("RAINFALL AMOUNT", 0, self.qpf_phrase, + ["RAINFALL", "AMOUNT"]), + ("SNOWFALL AMOUNT", 0, self.snow_phrase, + ["SNOWFALL", "AMOUNT"]), + ("FREEZING LEVEL", 0, self.freezingLevel_phrase, + ["FREEZING", "LEVEL"]), + ("CEILING", 0, self.ceiling_phrase, + ["CEILING"]), + ("VISIBILITY", 0, self.visibility_phrase, + ["VISIBILITY"]), + ("ICING", 0, self.icing_phrase, + ["ICING"]), + ("HAZARDS", 0, self.ceiling_phrase, + ["HAZARDS"]), + ("HEAT INDEX", 0, self.heatIndex_phrase, + ["HEAT", "INDEX"]), + ] + + def _weInfoHiddenList(self): + # This is the list of possible weather parameters that are NOT listed + # under the ...WEATHER PARAMETERS REQUESTED... section in your STQ + # Product. There are times when a WFO will want to format certain + # weather elements in the spot forecast, but do NOT want those elements + # to be listed in the website. + + # These elements will be appended below the "weather parameters requested" + # elements and will be in the order as specified in _weInfoHiddenList + # + # All weather elements will be commented out and they are the same + # weather elements listed in _weInfoList. If you have a weather element + # to add, then send an email to Virgil.Middendorf@noaa.gov for baselining. + # + # Phrases: Phrases associated with each element listed in this method + # is still configured in the _weInfoList method + + # For each element, we list: + # --an identifier + # --flag to indicate if this is a default element + + return [ +## ("SKY/WEATHER", 0), +## ("BEGIN/END OF PCPN", 0), +## ("TEMPERATURE", 0), +## ("HUMIDITY", 0), +## ("DEWPOINT", 0), +## ("20 FOOT WINDS", 0), +## ("EYE LEVEL WINDS", 0), +## ("SURFACE WINDS", 0), +## ("WIND SHIFT", 0), +## ("RIDGE TOP WIND", 0), +## ("SURROUNDING RIDGE", 0), +## ("CWR", 0), +## ("POP", 0), +## ("LIGHTNING ACTIVITY LEVEL", 0), +## ("SMOKE DISPERSION", 0), +## ("MIXING HEIGHT", 0), +## ("TRANSPORT WINDS", 0), +## ("DISPERSION INDEX", 0), +## ("LDSI", 0), +## ("LVORI", 0), +## ("ADI", 0), +## ("CLEARING INDEX", 0), +## ("STABILITY CLASS", 0), +## ("MARINE LAYER", 0), +## ("HAINES INDEX", 0), +## ("VENTILATION RATE", 0), +## ("SWELL HEIGHT", 0), +## ("WAVE HEIGHT", 0), +## ("SWELL PERIOD", 0), +## ("WIND WAVE", 0), +## ("RAINFALL AMOUNT", 0), +## ("SNOWFALL AMOUNT", 0), +## ("FREEZING LEVEL", 0), +## ("CEILING", 0), +## ("VISIBILITY", 0), +## ("ICING", 0), +## ("HAZARDS", 0), +## ("HEAT INDEX", 0), + ] + + def _rowList(self, colWidth=1): + + ### 20 foot wind tabular phrase configuration ### + + if self._tableWindElementSplit == "no" and colWidth == 7: # 2 hourly + if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": + wind = [("20 ft wind......", self._wind_value), + ("20 ft wind gust.", self._windGust_value)] + else: + wind = [("20 ft wind......", self._wind_value), + ("20 ft wind gust.", self._windGust_value), + ("20 ft wind dir..", self._windNumDir_value)] + elif self._tableWindElementSplit == "no" and colWidth > 7: # 3-4 hourly + if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": + wind = [("20 ft wind......", self._windWithGust_value)] + else: + wind = [("20 ft wind......", self._windWithGust_value), + ("20 ft wind dir..", self._windNumDir_value)] + else: + if self._bothAlphaNumericDict.get(self._20ftWindParm, "No") == "No": + wind = [("20 ft wind dir..", self._windDir_value), # 1 hourly + ("20 ft wind spd..", self._windSpd_value), + ("20 ft wind gust.", self._windGust_value)] + else: + wind = [("20 ft wind dir..", self._windDir_value), # 1 hourly + ("20 ft wind dir..", self._windNumDir_value), + ("20 ft wind spd..", self._windSpd_value), + ("20 ft wind gust.", self._windGust_value)] + + ### eye level wind tabular phrase configuration ### + + if self._tableEyeWindElementSplit =="no" and colWidth == 7: + if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": + eyewind = [("Eye level wind..", self._eyewind_value), + ("Eye lvl wnd gst.", self._eyewindGust_value)] + else: + eyewind = [("Eye level wind..", self._eyewind_value), + ("Eye lvl wnd gst.", self._eyewindGust_value), + ("Eye lvl wnd dir.", self._eyewindNumDir_value)] + elif self._tableEyeWindElementSplit == "no" and colWidth > 7: + if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": + eyewind = [("Eye level wind..", self._eyewindWithGust_value)] + else: + eyewind = [("Eye level wind..", self._eyewindWithGust_value), + ("Eye lvl wnd dir.", self._eyewindNumDir_value)] + else: + if self._bothAlphaNumericDict.get("EyeWind", "No") == "No": + eyewind = [("Eye lvl wnd dir.", self._eyewindDir_value), + ("Eye lvl wnd spd.", self._eyewindSpd_value), + ("Eye lvl wnd gst.", self._eyewindGust_value)] + else: + eyewind = [("Eye lvl wnd dir.", self._eyewindDir_value), + ("Eye lvl wnd dir.", self._eyewindNumDir_value), + ("Eye lvl wnd spd.", self._eyewindSpd_value), + ("Eye lvl wnd gst.", self._eyewindGust_value)] + + ### surface wind (10m) tabular phrase configuration ### + + if self._tableSfcWindElementSplit =="no" and colWidth == 7: + + if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": + sfcwind = [("Surface wind....", self._sfcwind_value), + ("Surface wnd gst.", self._sfcwindGust_value)] + else: + sfcwind = [("Surface wind....", self._sfcwind_value), + ("Surface wnd gst.", self._sfcwindGust_value), + ("Surface wnd dir.", self._sfcwindNumDir_value)] + + elif self._tableSfcWindElementSplit == "no" and colWidth > 7: + + if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": + sfcwind = [("Surface wind....", self._sfcwindWithGust_value)] + else: + sfcwind = [("Surface wind....", self._sfcwindWithGust_value), + ("Surface wnd dir.", self._sfcwindNumDir_value)] + + else: + + if self._bothAlphaNumericDict.get("SfcWind", "No") == "No": + sfcwind = [("Surface wnd dir.", self._sfcwindDir_value), + ("Surface wnd spd.", self._sfcwindSpd_value), + ("Surface wnd gst.", self._sfcwindGust_value)] + else: + sfcwind = [("Surface wnd dir.", self._sfcwindDir_value), + ("Surface wnd dir.", self._sfcwindNumDir_value), + ("Surface wnd spd.", self._sfcwindSpd_value), + ("Surface wnd gst.", self._sfcwindGust_value)] + + ### ridge top wind tabular phrase configuration ### + + if self._tableRidgeElementSplit == "no" and colWidth >=7: + if self._bothAlphaNumericDict.get("RidgeWind", "No") == "No": + ridge = [("Ridgetop wind...", self._ridge_value)] + else: + ridge = [("Ridgetop wind...", self._ridge_value), + ("Ridge wnd dir..", self._ridgeNumDir_value)] + else: + if self._bothAlphaNumericDict.get("RidgeWind", "No") == "No": + ridge = [("Ridge wnd dir..", self._ridgeDir_value), + ("Ridge wnd spd..", self._ridgeSpd_value)] + else: + ridge = [("Ridge wnd dir..", self._ridgeDir_value), + ("Ridge wnd dir..", self._ridgeNumDir_value), + ("Ridge wnd spd..", self._ridgeSpd_value)] + + ### swell tabular phrase configuration ### + + if self._tableSwellElementSplit == "no" and colWidth >=7: + swell = [("Swell hgt (ft)..", self._swell_value)] + else: + swell = [("Swell direction.", self._swellDir_value), + ("Swell hgt (ft)..", self._swellHgt_value)] + + ### Mixing Height and Transport wind label configuration ### + + if self._tabularMixingHeightUnits == "ft" and colWidth > 4: + mixLabel = "Mix hgt (ft)...." + mixMetricLabel = "Mix hgt (m)....." + else: + mixLabel = "Mix hgt (kft)..." + mixMetricLabel = "Mix hgt (km)...." + + if self._transportWindLabel == "mix": + transLabel = "Mixing wind......" + transMetricLabel = "Mix wind (m/s).." + transDirLabel = "Mixng wind dir.." + transSpdLabel = "Mixng wind spd.." + transSpdMetricLabel = "Mix wnd spd m/s." + else: + transLabel = "Transport wind.." + transMetricLabel = "Tran wind (m/s)." + transDirLabel = "Transp wind dir." + transSpdLabel = "Transp wind spd." + transSpdMetricLabel = "Trans spd (m/s)." + + if self._tableTransElementSplit == "no" and colWidth >=7: + # Baseline + if self._includeMetricDispersion == "yes": + if self._bothAlphaNumericDict.get("TransWind", "No") == "No": + smoke = [(mixLabel, self._mixingHeight_value), + (mixMetricLabel, self._mixingHeightMetric_value), + (transLabel, self._trans_value), + (transMetricLabel, self._transMetric_value)] + trans = [(transLabel, self._trans_value), + (transMetricLabel, self._transMetric_value)] + else: + smoke = [(mixLabel, self._mixingHeight_value), + (mixMetricLabel, self._mixingHeightMetric_value), + (transLabel, self._trans_value), + (transDirLabel, self._transNumDir_value), + (transMetricLabel, self._transMetric_value)] + trans = [(transLabel, self._trans_value), + (transDirLabel, self._transNumDir_value), + (transMetricLabel, self._transMetric_value)] + else: + if self._bothAlphaNumericDict.get("TransWind", "No") == "No": + smoke = [(mixLabel, self._mixingHeight_value), + (transLabel, self._trans_value)] + trans = [(transLabel, self._trans_value)] + else: + smoke = [(mixLabel, self._mixingHeight_value), + (transLabel, self._trans_value), + (transDirLabel, self._transNumDir_value)] + trans = [(transLabel, self._trans_value), + (transDirLabel, self._transNumDir_value)] + else: + # Baseline + if self._includeMetricDispersion == "yes": + if self._bothAlphaNumericDict.get("TransWind", "No") == "No": + smoke = [(mixLabel, self._mixingHeight_value), + (mixMetricLabel, self._mixingHeightMetric_value), + (transDirLabel, self._transDir_value), + (transSpdLabel, self._transSpd_value), + (transSpdMetricLabel, self._transSpdMetric_value)] + trans = [(transDirLabel, self._transDir_value), + (transSpdLabel, self._transSpd_value), + (transSpdMetricLabel, self._transSpdMetric_value)] + else: + smoke = [(mixLabel, self._mixingHeight_value), + (mixMetricLabel, self._mixingHeightMetric_value), + (transDirLabel, self._transDir_value), + (transDirLabel, self._transNumDir_value), + (transSpdLabel, self._transSpd_value), + (transSpdMetricLabel, self._transSpdMetric_value)] + trans = [(transDirLabel, self._transDir_value), + (transDirLabel, self._transNumDir_value), + (transSpdLabel, self._transSpd_value), + (transSpdMetricLabel, self._transSpdMetric_value)] + else: + if self._bothAlphaNumericDict.get("TransWind", "No") == "No": + smoke = [(mixLabel, self._mixingHeight_value), + (transDirLabel, self._transDir_value), + (transSpdLabel, self._transSpd_value)] + trans = [(transDirLabel, self._transDir_value), + (transSpdLabel, self._transSpd_value)] + else: + smoke = [(mixLabel, self._mixingHeight_value), + (transDirLabel, self._transDir_value), + (transDirLabel, self._transNumDir_value), + (transSpdLabel, self._transSpd_value)] + trans = [(transDirLabel, self._transDir_value), + (transDirLabel, self._transNumDir_value), + (transSpdLabel, self._transSpd_value)] + if self._includeMetricDispersion == "yes": + mix = [(mixLabel, self._mixingHeight_value), + (mixMetricLabel, self._mixingHeightMetric_value)] + else: + mix = [(mixLabel, self._mixingHeight_value)] + + ### sky/wx/hazard tabular phrase configuration ### + + if self._elementFormatDict.get("Sky", "alpha") == "alpha": + if self._bothAlphaNumericDict.get("Sky", "No") == "No": + skywx = [("Sky cover.......", self._sky_value), + ("Weather cov.....", self._weatherCov_value), + ("Weather type....", self._weatherType_value), + ("Tstm cov........", self._tstmCov_value)] + else: + skywx = [("Sky cover.......", self._sky_value), + ("Sky (%).........", self._numSky_value), + ("Weather cov.....", self._weatherCov_value), + ("Weather type....", self._weatherType_value), + ("Tstm cov........", self._tstmCov_value)] + else: + skywx = [("Sky (%).........", self._sky_value), + ("Weather cov.....", self._weatherCov_value), + ("Weather type....", self._weatherType_value), + ("Tstm cov........", self._tstmCov_value)] + + hazard = [("Hazard VTEC 1..", self._wwa_value), + ("Hazard VTEC 2..", self._wwa2_value), + ("Hazard VTEC 3..", self._wwa3_value)] + + return [ + # Set to Directive requirements + # Each entry is a tuple: + # (Narrative Element, narrativeToo, tableRows) + # + # If narrativeToo is 1, then the narrative phrase will be included + # in the narrative portion of the product as well. + # tableRows is a list of (label:method) pairs. + # + ("SKY/WEATHER" , 1, skywx), + ("TEMPERATURE" , 1,[("Temp............", self._temp_value)]), + ("DEWPOINT" , 1,[("Dewpoint........", self._td_value)]), + ("HUMIDITY" , 1,[("RH..............", self._rh_value)]), + ("20 FOOT WINDS" , 1, wind), + ("EYE LEVEL WINDS" , 1, eyewind), + ("SURFACE WINDS" , 1, sfcwind), + #("RIDGE TOP WIND" , 1, ridge), + #("SMOKE DISPERSION" , 1, smoke), + #("MIXING HEIGHT" , 1, mix), + #("TRANSPORT WINDS" , 1, trans), + ("DISPERSION INDEX" , 1,[("Dispersion......", self._dsi_value)]), + ("LDSI" , 1,[("Dispersion idx..", self._ldsi_value)]), + ("LVORI" , 1,[("LVORI...........", self._lvori_value)]), + ("ADI" , 1,[("ADI.............", self._adi_value)]), + #("CWR" , 1,[("CWR.............", self._cwr_value)]), + ("POP" , 1,[("Chc of pcpn (%).", self._pop_value)]), + #("LIGHTNING ACTIVITY LEVEL", 1,[("LAL.............", self._lal_value)]), + ("HAINES INDEX" , 1,[("Haines Index....", self._haines_value)]), + ("VENTILATION RATE" , 1,[("Vrate kt-ft/1000", self._ventrate_value)]), + ("SWELL HEIGHT" , 1, swell), + ("SWELL PERIOD" , 1,[("Swell period (s)", self._swellPeriod_value)]), + ("WIND WAVE" , 1,[("Wind wave (ft)..", self._windWave_value)]), + ("WAVE HEIGHT" , 1,[("Wave height (ft)", self._waveHeight_value)]), + ("FREEZING LEVEL" , 1,[("Fz level (kft)..", self._freezingLevel_value)]), + ("CEILING" , 1,[("Ceiling (kft)...", self._ceiling_value)]), + ("VISIBILITY" , 1,[("Visibility (sm).", self._visibility_value)]), + ("ICING" , 1,[("Icing...........", self._ceiling_value)]), + ("HAZARDS" , 0, hazard), + ("HEAT INDEX" , 1,[("Heat index (F)..", self._heatIndex_value)]), + ] + + def _getProductInfo(self, product, issuance, forecasters): + # Parse the spot request information selected and + # return the FireInfo for display. + timezone = os.environ["TZ"] + spotRequestInfo = [ + ("PROJECT NAME:", "fireName", "'xxxx'"), + ("PROJECT TYPE:", "fireType", "'WILDFIRE'"), + ("REQUESTING AGENCY:", "requestingAgency", "'xxxx'"), + ("REQUESTING OFFICIAL:", "agencyContact", "'yyyy'"), + ("DLAT:", "fireLatitude", "28.27"), + ("DLON:", "fireLongitude", "82.19"), + ("SIZE (ACRES):", "fireSize", "1"), + ("SITE:", "wfoID", "''"), + ("OFILE:", "webSiteTag", "''"), + ("TIMEZONE:", "webTimeZone", "timezone"), + ("DATE:", "fireDate", "'1/1/01'"), + ("TIME:", "fireTime", "'1300'"), + ] + + obs = [] + self._spotList = ["This is a New Incident"] + remarksFlag = 0 + remarks = "" + + self._periodElementDict = { + "Today": [], "Tonight": [], "Tomorrow": [] + } + + self._periodAllElementDict = { + "Today": [], "Tonight": [], "Tomorrow": [] + } + + # Set default values + for field, variable, default in spotRequestInfo: + exec("self._"+variable + " = " + default) + + # If no issuance to use, we are done. + if issuance is None: + return + self._productIssuance = issuance + + # If no forecasters included, we are done. + if forecasters is None: + return + self._forecasters = forecasters + + # If no product to parse, we are done + if product is None: + # Use default list of weather elements + for element, defaultFlag, phrases, searchStrings in self._weInfoList(): + if defaultFlag: + self._periodAllElementDict["Today"].append(element) + self._periodAllElementDict["Tonight"].append(element) + self._periodAllElementDict["Tomorrow"].append(element) + self._periodElementDict["Today"].append(element) + self._periodElementDict["Tonight"].append(element) + self._periodElementDict["Tomorrow"].append(element) + #self._allPeriodElementDict["Today"].append(element) + #self._allPeriodElementDict["Tonight"].append(element) + #self._allPeriodElementDict["Tomorrow"].append(element) + #if defaultFlag: + # self._periodElementDict["Today"].append(element) + # self._periodElementDict["Tonight"].append(element) + # self._periodElementDict["Tomorrow"].append(element) + for element, defaultFlag in self._weInfoHiddenList(): + if defaultFlag: + self._periodElementDict["Today"].append(element) + self._periodElementDict["Tonight"].append(element) + self._periodElementDict["Tomorrow"].append(element) + self._periodAllElementDict["Today"].append(element) + self._periodAllElementDict["Tonight"].append(element) + self._periodAllElementDict["Tomorrow"].append(element) + return + + # Parse product + wxParmFlag = 0 + for line in product: + print(line) + if line.find("...WEATHER PARAMETERS REQUESTED...") >= 0: + wxParmFlag = 1 + if line.find("SITE:") >= 0: + wxParmFlag = 0 + # If the line has a colon, split it into fieldName/value + cleanLine = string.replace(string.upper(line),"\n", "") + cleanLine = cleanLine.strip() + index = cleanLine.find(":") + if index >= 0: + # Handle STQ fields (lines with a colon) + fieldName = cleanLine[:index].strip() + value = cleanLine[index+1:].strip() + + for field, variable, default in spotRequestInfo: + if field in cleanLine and cleanLine.find(field) == 0: + # Assign to variable + exec("self._"+variable + " = value") + + if wxParmFlag: + for element, defaultFlag, phrases, searchStrings in self._weInfoList(): + if self._checkStrs(searchStrings, fieldName) == 1: + #Enter flags in dictionary e.g. 1,1,1 for Today, Tonight, Tomorrow + flags = value.split(",") + if flags[0] == "1": + self._periodElementDict["Today"].append(element) + if flags[1] == "1": + self._periodElementDict["Tonight"].append(element) + if flags[2] == "1": + self._periodElementDict["Tomorrow"].append(element) + self._periodAllElementDict["Today"].append(element) + self._periodAllElementDict["Tonight"].append(element) + self._periodAllElementDict["Tomorrow"].append(element) + + if "ELEV=" in line and "TIME=" in line: + ob = string.replace(string.upper(line),"\n","") + if "ELEV= TIME=" not in ob: + obs.append(ob) + if remarksFlag and "FORECAST ELEMENTS" not in line: + remarks = remarks + line + if "...REMARKS..." in line: + remarksFlag = 1 + if "...WEATHER PARAMETERS REQUESTED..." in line: + remarksFlag = 0 + remarks = string.replace(remarks,"\n\n","\n") + remarks = string.replace(remarks,"\n\n","\n") + + for element, defaultFlag in self._weInfoHiddenList(): + if defaultFlag: + if len(self._periodElementDict["Today"]) != 0: + self._periodElementDict["Today"].append(element) + if len(self._periodElementDict["Tonight"]) != 0: + self._periodElementDict["Tonight"].append(element) + if len(self._periodElementDict["Tomorrow"]) != 0: + self._periodElementDict["Tomorrow"].append(element) + self._periodAllElementDict["Today"].append(element) + self._periodAllElementDict["Tonight"].append(element) + self._periodAllElementDict["Tomorrow"].append(element) + + def _displayFireInfo(self): + + # Build and display GUI using the fireInfo + varList = [] + + # Fire Type Section of the GUI + desTypeList = "Type of Fire:", "fireType" + varList.append((desTypeList, self._fireType, "radio", self._typeList)) + + # requesting Agency Section of the GUI + desAgencyNameList = "Agency:", "requestingAgency" + agencyNameList = [] + findAgencyFlag = 0 + for agency in self._agencyList: + id,name = agency + agencyNameList.append(name) + if self._requestingAgency == name: + findAgencyFlag = 1 + requestingAgencyDefault = self._requestingAgency + if not findAgencyFlag: + agencyNameList.append("Unlisted") + requestingAgencyDefault = "Unlisted" + varList.append((desAgencyNameList, requestingAgencyDefault, "radio", agencyNameList)) + + # Include Extendeds/Outlook Section of the GUI + if not self._shortTermOnly: + questionList = ["Include Day 3-5 Extended?", + "Include Day 6-7 Extended?", + "Include Day 8-14 Outlook?"] + desExtendedQuestions = "Check Items to Include:","extendedQuestions" + varList.append((desExtendedQuestions, [], "check", questionList)) + + # Forecast Type + desFcstType = "What Type of Forecast?", "forecastType" + labelList = [] + for label, forecastType in self._forecastTypeList: + labelList.append(label) + varList.append((desFcstType, self._defaultForecastType, "radio", labelList)) + + # Include Ignition Time Forecast Section of the GUI + if self._includeIgnitionOptionOnGUI: + desIT = ("Include Ignition Times?", "withIgnitionTimes") + varList.append((desIT, self._withIgnitionTimes, "radio", ["yes", "no"])) + + # Unlisted Agency Name Section of the GUI + if not findAgencyFlag: + desOtherAgencyName = "Name of Agency if not listed....", "otherAgencyName" + varList.append((desOtherAgencyName, self._requestingAgency, "alphaNumeric")) + + # Fire Name Section of the GUI + desFireName = "Name of Fire ...................................", "fireName" + varList.append((desFireName, self._fireName, "alphaNumeric")) + + # Fire Time Section of the GUI + desFireTime = "Time of Fire .....................................", "fireTime" + varList.append((desFireTime, self._fireTime, "alphaNumeric")) + + # Fire Date Section of the GUI + desFireDate = "Date of Fire .....................................", "fireDate" + varList.append((desFireDate, self._fireDate, "alphaNumeric")) + + # Agency Contact Section of the GUI + desAgencyContact = "Name of Agency Contact..........", "agencyContact" + varList.append((desAgencyContact, self._agencyContact, "alphaNumeric")) + + # Fire Latitude Section of the GUI + desFireLatitude = "Fire Latitude (Deg).......................", "fireLatitude" + varList.append((desFireLatitude, self._fireLatitude, "alphaNumeric")) + + # Fire Longitude Section of the GUI + desFireLongitude = "Fire Longitude (Deg)...................", "fireLongitude" + varList.append((desFireLongitude, self._fireLongitude, "alphaNumeric")) + + # Fire Size Section of the GUI + desFireSize = "Fire Size (Acres) .........................", "fireSize" + varList.append((desFireSize, self._fireSize, "alphaNumeric")) + + # Forecast Elements Section of the GUI + tableHoursDesc = "Tab Hrs" + if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: + desElementList = "Today Elements", "todayElements" + varList.append((desElementList, self._periodElementDict["Today"], + "check", self._periodAllElementDict["Today"])) + desTableRes = tableHoursDesc,"todayTableRes" + varList.append((desTableRes, self._defaultTabularResolution["Today"],"radio", + self._tabularResolutionDict["Today"])) + desElementList = "Tonight Elements", "tonightElements" + varList.append((desElementList, self._periodElementDict["Tonight"] , + "check", self._periodAllElementDict["Tonight"] )) + if self._tabularAllPeriods == "yes": + desTableRes = tableHoursDesc,"tonightTableRes" + varList.append((desTableRes, self._defaultTabularResolution["Tonight"],"radio", + self._tabularResolutionDict["Tonight"])) + desElementList = "Tomorrow Elements", "tomorrowElements" + varList.append((desElementList, self._periodElementDict["Tomorrow"], + "check", self._periodAllElementDict["Tomorrow"] )) + if self._tabularAllPeriods == "yes": + desTableRes = tableHoursDesc,"tomorrowTableRes" + varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", + self._tabularResolutionDict["Tomorrow"])) + + if self._productIssuance in ["Afternoon with 4 periods", "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + desElementList = "Tomorrow Night Elements", "tomorrowNightElements" + varList.append((desElementList, self._periodElementDict["Tomorrow"], + "check", self._periodAllElementDict["Tomorrow"] )) + if self._tabularAllPeriods == "yes": + desTableRes = tableHoursDesc,"tomorrowNightTableRes" + varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", + self._tabularResolutionDict["Tomorrow"])) + desElementList = "Next Day Elements", "nextDayElements" + varList.append((desElementList, self._periodElementDict["Tomorrow"], + "check", self._periodAllElementDict["Tomorrow"] )) + if self._tabularAllPeriods == "yes": + desTableRes = tableHoursDesc,"nextDayTableRes" + varList.append((desTableRes, self._defaultTabularResolution["Tomorrow"],"radio", + self._tabularResolutionDict["Tomorrow"])) + + # Launch the Spot Request Quality Control GUI. + varDict = self._callProcessVariableList("Input Info", varList, varDict={}) + if varDict is None: + return None + + # Set up varDict for forecastType using labels + value = varDict[desFcstType] + for label, forecastType in self._forecastTypeList: + if label == value: + varDict[desFcstType] = forecastType + break + + # This section of code filters the forecaster entries to ensure that + # single quotes are not included. + if not findAgencyFlag: + try: + varDict[desOtherAgencyName] = string.replace(varDict[desOtherAgencyName],"\'","") + except AttributeError: + print("Other Agency Name is not a string.") + try: + varDict[desFireName] = string.replace(varDict[desFireName],"\'","") + except AttributeError: + print("Fire Name is not a string.") + try: + varDict[desAgencyContact] = string.replace(varDict[desAgencyContact],"\'","") + except AttributeError: + print("Fire Size is not a string.") + try: + varDict[desFireSize] = string.replace(str(varDict[desFireSize]),"\'","") + except AttributeError: + print("Fire Size is not a string.") + try: + varDict[desFireLatitude] = string.replace(str(varDict[desFireLatitude]),"\'","") + except AttributeError: + print("Latitude is not a string.") + try: + varDict[desFireLongitude] = string.replace(str(varDict[desFireLongitude]),"\'","") + except AttributeError: + print("Longitude is not a string.") + try: + varDict[desFireTime] = string.replace(str(varDict[desFireTime]),"\'","") + except AttributeError: + print("Ignition Time is not a string.") + try: + varDict[desFireDate] = string.replace(str(varDict[desFireDate]),"\'","") + except AttributeError: + print("Ignition Date is not a string.") + + # This section of code filters the forecaster entries to ensure that + # double quotes are not included. + if not findAgencyFlag: + try: + varDict[desOtherAgencyName] = string.replace(varDict[desOtherAgencyName],"\"","") + except AttributeError: + print("Other Agency Name is not a string.") + try: + varDict[desFireName] = string.replace(varDict[desFireName],"\"","") + except AttributeError: + print("Fire Name is not a string.") + try: + varDict[desAgencyContact] = string.replace(varDict[desAgencyContact],"\"","") + except AttributeError: + print("Fire Size is not a string.") + try: + varDict[desFireSize] = string.replace(varDict[desFireSize],"\"","") + except AttributeError: + print("Fire Size is not a string.") + try: + varDict[desFireLatitude] = string.replace(varDict[desFireLatitude],"\"","") + except AttributeError: + print("Latitude is not a string.") + try: + varDict[desFireLongitude] = string.replace(varDict[desFireLongitude],"\"","") + except AttributeError: + print("Longitude is not a string.") + try: + varDict[desFireTime] = string.replace(varDict[desFireTime],"\"","") + except AttributeError: + print("Ignition Time is not a string.") + try: + varDict[desFireDate] = string.replace(varDict[desFireDate],"\"","") + except AttributeError: + print("Ignition Date is not a string.") + + # convert lat/lon to floats + try: + varDict[desFireLatitude] = string.atof(varDict[desFireLatitude]) + except ValueError: + print("Latitude is not a float.") + try: + varDict[desFireLongitude] = string.atof(varDict[desFireLongitude]) + except ValueError: + print("Longitude is not a float.") + + # Convert fireTime + fireTime = varDict[desFireTime] + fireTime = "000" + str(int(float(fireTime))) + fireTime = fireTime[-4:] + varDict[desFireTime] = fireTime + + # Here are more varDict settings that need to be set before we launch + # the formatter. + varDict[("Product Issuance:", "productIssuance")] = self._productIssuance + varDict[("Forecaster:", "forecaster")] = self._forecasters + if self._includeCreationTimeOnGUI: + varDict[("Creation Date", "creationDate")] = self._creationDate + varDict[("Creation Time", "creationTime")] = self._creationTime + varDict[("WebSiteTag:", "webSiteTag")] = self._webSiteTag + varDict[("WFOid:", "wfoID")] = self._wfoID + varDict[("TimeZone:", "fireTZ")] = self._webTimeZone + + if self._shortTermOnly: + varDict[("Check Items to Include:","extendedQuestions")] = [] + + return varDict + + # From FWF. Needed to change .EXTENDED... to .FORECAST DAYS 3 THROUGH 7 + def setLabel(self, tree, component): + if self._includeExtended: + if "Include Day 3-5 Extended?" not in self._extendedQuestions: + component.set("words", ".FORECAST DAYS 6 THROUGH 7...\n") + else: + component.set("words", ".FORECAST DAYS 3 THROUGH 7...\n") + else: + component.set("words", ".FORECAST DAYS 3 THROUGH 5...\n") + return self.DONE() + + # From FWF. Modifed to write the output to a file in a user specified + # directory on the local lx machine. In addition, added sections to + # insert headlines, discussion, and 8-14 day outlook. + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Quality Control Gui data + error = self._qualityControlFormData() + if error is not None: + return error + + # Get the areaList -- derived from the lat, lon, size of fire (acres), + # and the name of the fire. + error = self._determineAreaList(argDict) + if error is not None: + return error + + # Set the extended forecast configuration based on what was + # requested by the user. + error = self._setExtendedConfig() + if error is not None: + return error + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the unrepresentative statement for the Product + fcst = self._makeUnrepresentStatement(fcst, argDict) + + # Generate the Headlines for the Product + for editArea, areaLabel in self._areaList: + fcst = self._makeHeadline(fcst, editArea, areaLabel, argDict) + + # Generate the Discussion section + fcst = self._makeDiscussion(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + # Generate the summary extended section (if wanted) + fcst = self._makeSummaryExtended(fcst, argDict) + + # Generate the 8 to 14 Day Outlook section + error = self._generateOutlookLabels(argDict) + if error is not None: + return error + fcst = self._make8to14DayOutlook(fcst, argDict) + + fcst = self._postProcessProduct(fcst, argDict) + + return fcst + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict), argDict["creationTime"]) + + if self._tableStartTimeMode == "current": + # Add a "custom" component to sample data from current time + # to product start time + ct = self._issuanceInfo.issueTime() + currentTime = AbsTime.absTimeYMD(ct.year, ct.month, ct.day, + ct.hour) + productStart = self._issuanceInfo.timeRange().startTime() + tr = TimeRange.TimeRange(currentTime, productStart) + if tr.duration() > 0: + self._issuanceInfo.narrativeDef().append(\ + ("Custom", ("PreFirePeriod1", tr))) + + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + # Determine the extended range + if self._individualExtended == 1: + self._extendedStart = self._timeRange.endTime() - 24*5*3600 + else: + self._extendedStart = self._timeRange.endTime() + self._extendedRange = TimeRange.TimeRange( + self._extendedStart, self._extendedStart + 3600) + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + +# Quality Control Form Information from user dialog to ensure completeness. + + + # 04/24/07: Tabular/Narrative is okay for wildfires. Changed code to make + # Tabular Only into Tabular/Narrative for wildfires. + + # From FWS_Overrides. Fixed a bug that causes the formatter to crash + # when a number was entered for the agency or the contact. Added a + # method called _convertToStr to do this cleanly. + def _qualityControlFormData(self): + + # If fireSize is not an integer, then I default the size to 1 acre. + # This will allow the formatter to run even if the user puts invalid + # characters into the size field (like 10-20). + try: + self._fireSize = int(float(self._fireSize)+0.5) + except ValueError: + self._fireSize = 1 + if self._fireSize <= 0: + self._fireSize = 1 + + try: + lat = float(self._fireLatitude) + except ValueError: + return "Invalid latitude value." + if lat < 0.0 or lat > 90: + return "Invalid latitude value." + + try: + lon = float(self._fireLongitude) + except ValueError: + return "Invalid longitude value." + if lon < 0.0 or lon > 180.0: + return "Invalid longitude value. Must be positive." + + if len(self._forecaster) == 0: + return "You must select at least one forecaster in the list." + + if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: + elementLists = [self._todayElements, self._tonightElements, self._tomorrowElements] + elif self._productIssuance in ["Afternoon with 4 periods", "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + elementLists = [self._tonightElements, self._tomorrowElements, + self._tomorrowNightElements, self._nextDayElements] + else: + elementLists = [self._tonightElements, self._tomorrowElements] + + # Check to make sure at least weather element is requested. + elementsFound = 0 + for elementList in elementLists: + if len(elementList) > 0: + elementsFound = 1 + break + if not elementsFound and \ + "Include Day 3-5 Extended?" not in self._extendedQuestions and \ + "Include Day 6-7 Extended?" not in self._extendedQuestions and \ + "Include Day 8-14 Outlook?" not in self._extendedQuestions: + return "You must select at least one weather element to " + \ + "forecast in the gui." + + # Code to ensure the wildfireElementList parameters are included in + # the FWS product (if this is a wildfire incident) was added to this + # method. + if self._fireType.upper() == "WILDFIRE": + for element in self._wildfireElementList: + for elementList in elementLists: + if element not in elementList and len(elementList) != 0: + elementList.append(element) + if self._tabularForWildfire == "no" and \ + self._forecastType == "Tabular Only": + self._forecastType = "Tabular/Narrative" + + self._fireName = self._convertToStr(self._fireName) + if len(self._fireName) == 0: + return "You must enter the Name of Fire." + + self._agencyContact = self._convertToStr(self._agencyContact) + if len(self._agencyContact) == 0: + return "You must enter the Name of Agency Contact." + + self._requestingAgency = self._convertToStr(self._requestingAgency) + if len(self._requestingAgency) == 0: + return "You must choose a requesting agency." + + if self._requestingAgency == "Unlisted": + self._otherAgencyName = self._convertToStr(self._otherAgencyName) + if len(self._otherAgencyName) == 0: + return "You must enter a requesting agency." + + def _convertToStr(self, var): + try: + stringSize = len(var) + return var + except TypeError: + try: + var = repr(int(var+0.5)) + except TypeError: + var = repr(var) + return var + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + self._todayTableRes = "None" + self._tonightTableRes = "None" + self._tomorrowTableRes = "None" + self._tomorrowNightTableRes = "None" + self._nextDayTableRes = "None" + self._todayElements = [] + self._tonightElements = [] + self._tomorrowElements = [] + self._tomorrowNightElements = [] + self._nextDayElements = [] + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + + # Adjust creationTime if user-supplied creation date and time + if self._includeCreationTimeOnGUI: + if self._creationDate != "": + argDict["creationTime"] = self._getTime(self._creationDate, self._creationTime) + return None + + def _parseTime(self,date,t,rtz): + try: + cTime = time.strptime(t + ' ' + date + ' ' + rtz, '%H%M %m/%d/%y %Z') + except ValueError: + cTime = time.strptime(t + ' ' + date + ' ' + rtz, '%H%M %m/%d/%Y %Z') + + return cTime + + def _getTime(self, date, t): + # Make a unix time integer from the given date and time strings + if t == "": + t = "0000" + else: + t = "000" + repr(int(t)) + t = t[-4:] + rtz = self._getActualTimeZone() + stz = time.tzname[0] + dtz = time.tzname[1] + otz = stz[0:1] + ptz = rtz[0:1] + offset = 0 + if otz == ptz: + cTime = self._parseTime (date,t,rtz) + else: + if ptz == "E": + if otz == "E": + offset = 0 + elif otz == "C": + offset = -1 + elif otz == "M": + offset = -2 + elif otz == "P": + offset = -3 + elif ptz == "C": + if otz == "E": + offset = 1 + elif otz == "C": + offset = 0 + elif otz == "M": + offset = -1 + elif otz == "P": + offset = -2 + elif ptz == "M": + if otz == "E": + offset = 2 + elif otz == "C": + offset = 1 + elif otz == "M": + offset = 0 + elif otz == "P": + offset = -1 + elif ptz == "P": + if otz == "E": + offset = 3 + elif otz == "C": + offset = 2 + elif otz == "M": + offset = 1 + elif otz == "P": + offset = 0 + if stz[1:3] == rtz[1:3]: + cTime = self._parseTime (date,t,stz) + else: + cTime = self._parseTime (date,t,dtz) + + return time.mktime(cTime) + offset*3600 + + def _getActualTimeZone(self): + # Return the correct time zone based on DST and fireTZ variable + if self._fireTZ.find('/') >= 0: + standardTimeZone, daylightTimeZone = time.tzname + elif len(self._fireTZ) == 9: + standardTimeZone = self._fireTZ[:4] + daylightTimeZone = self._fireTZ[5:] + else: + standardTimeZone = self._fireTZ[:3] + daylightTimeZone = self._fireTZ[4:] + + if self.daylight() == 1: + actualTimeZone = daylightTimeZone + else: + actualTimeZone = standardTimeZone + return actualTimeZone + + def _makeFcstTimeStatement(self, fcst, argDict): + requestWords = self._getRequestWords() + rtz = self._getActualTimeZone() + stz = time.tzname[0] + dtz = time.tzname[1] + otz = stz[0:1] + ptz = rtz[0:1] + if otz == ptz: + self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,rtz) + fcst = fcst + time.strftime( + 'Forecast is based on ' + requestWords + ' time of %H%M %Z on %B %d. ', + self._fireDateTime) + else: + offset = 0 + if ptz == "E": + if otz == "E": + offset = 0 + elif otz == "C": + offset = -1 + elif otz == "M": + offset = -2 + elif otz == "P": + offset = -3 + elif ptz == "C": + if otz == "E": + offset = 1 + elif otz == "C": + offset = 0 + elif otz == "M": + offset = -1 + elif otz == "P": + offset = -2 + elif ptz == "M": + if otz == "E": + offset = 2 + elif otz == "C": + offset = 1 + elif otz == "M": + offset = 0 + elif otz == "P": + offset = -1 + elif ptz == "P": + if otz == "E": + offset = 3 + elif otz == "C": + offset = 2 + elif otz == "M": + offset = 1 + elif otz == "P": + offset = 0 + if stz[1:3] == rtz[1:3]: + self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,stz) + tempTime = time.mktime(self._fireDateTime) + offset*3600 + self._fireDateTime = time.localtime(tempTime) + else: + self._fireDateTime = self._parseTime (self._fireDate,self._fireTime,dtz) + tempTime = time.mktime(self._fireDateTime) + offset*3600 + self._fireDateTime = time.localtime(tempTime) + try: + fireDateTime = time.strptime( + self._fireTime + ' ' + self._fireDate, '%H%M %m/%d/%y') + except ValueError: + fireDateTime = time.strptime( + self._fireTime + ' ' + self._fireDate, '%H%M %m/%d/%Y') + fcst = fcst + time.strftime( + 'Forecast is based on ' + requestWords + ' time of %H%M ' + rtz + ' on %B %d. ', + fireDateTime) + fcst = fcst + "\n" + self._makeFireTimeRange() + return fcst + + def _makeFireTimeRange(self): + # Make a 1-hour fire time range for the fireTime + if self._withIgnitionTimes == "no": + return None + fireDateTime = time.mktime(self._fireDateTime) + self._fireTR = self.makeTimeRange(fireDateTime, fireDateTime+3600) + print("Fire Time Range:", self._fireTR) + + def _checkFireTR(self, tr): + if self._fireTR is None: + return 0 + return self._fireTR.overlaps(tr) + + # This is a new method that Matt Davis wrote. Figures out whether or not + # we are using a ignition time, request time, or incident time. + def _getRequestWords(self): + if self._fireType.upper() == "WILDFIRE": + return "request" + elif self._fireType.upper() == "PRESCRIBED": + return "ignition" + else: + return "incident" + + # Import the discussion from a previously edited discussion file. + def _makeDiscussion(self, fcst, argDict): + + discussionHeader = "" + discussionHeader = ".DISCUSSION...\n" + + if self._insertDiscussionFromFile == 1: + discussion = "" + if os.path.isfile(self._discussionFile): + input = open(self._discussionFile) + text = input.readlines() + for line in text: + discussion = discussion + line + discussion = string.join(string.split(discussion,"\n\n"),"\n") + discussion = string.join(string.split(discussion,"\n\n"),"\n") + return fcst + discussionHeader + discussion + "\n" + else: + discussion = "...Put discussion text here..." + return fcst + discussionHeader + discussion + "\n\n" + elif self._insertDiscussionFromFile == 2: + version = 0 + fwfPil = self._statePil + self._fwfPil + searchString="" + product = self.getPreviousProduct(fwfPil, searchString, version=version) + product = string.split(product, "\n") + discussion = "" + disFlag = 0 + foundDiscussion = 0 + for line in product: + if string.find(line,"Discussion...") != -1: + disFlag = 1 + foundDiscussion = 1 + try: + if line[2] == "Z" and line[-1] == "-" and \ + (line[6] == "-" or line[6] == ">"): + disFlag = 0 + except IndexError: + #print "Discussion Index Error",line + a = 0 + if line[:2] == "$$": + disFlag = 0 + if disFlag: + discussion = discussion + line + "\n" + if foundDiscussion: + return fcst + discussion + "\n\n" + else: + discussion = "...Put discussion text here..." + return fcst + discussionHeader + discussion + "\n\n" + else: + return fcst + discussionHeader + "\n\n\n" + +# Create areaList based on lat/lon/size/firename. + def _determineAreaList(self, argDict): + + # Size of the fire is entered as acres. + # Convert this area into square kilometers. + # createLatLonArea only needs the length of the side of a square. + size_out = int(math.sqrt(float(self._fireSize)/247.0) + 0.5) + area = self.createLatLonArea(float(self._fireLatitude), + float(0.0 - self._fireLongitude), + size_out) + # SET UP FOR HAZARDS + # Save to server + self.saveEditAreas([area]) + # Create Hazards Table for this area + hazards = HazardsTable.HazardsTable( + argDict["ifpClient"], [[area.getId().getName()]], "FWS", + self.filterMethod, argDict["databaseID"], + self._fullStationID, + activeTableName = argDict['vtecActiveTable'], + vtecMode = argDict['vtecMode'], + dataMgr=argDict['dataMgr']) + argDict["hazards"] = hazards + # Remove from server + self.deleteEditAreas([area]) + + self._areaList = [(area, self._fireName)] + +# Set the extended configuration based on user input. + def _setExtendedConfig(self): + + # Include extended forecast if wanted and allowed. + if "Include Day 3-5 Extended?" not in self._extendedQuestions and \ + "Include Day 6-7 Extended?" not in self._extendedQuestions: + if self._individualExtended == 1: + self._individualExtended = 0 + if self._summaryExtended == 1: + self._summaryExtended = 0 + self._extendedLabel = 0 + else: + if self._individualExtended == 1: + self._individualExtended = 1 + if self._summaryExtended == 1: + self._summaryExtended = 1 + self._extendedLabel = 1 + + if "Include Day 3-5 Extended?" in self._extendedQuestions: + self._includeExtendedShortTerm = 1 + else: + self._includeExtendedShortTerm = 0 + + if "Include Day 6-7 Extended?" in self._extendedQuestions: + self._includeExtended = 1 + else: + self._includeExtended = 0 + + # Add the "if conditions become unrep..." statement. + def _makeUnrepresentStatement(self, fcst, argDict): + + if self._insertUnrepresentStatement == 1: + return fcst + self._unrepresentStatement + "\n\n" + else: + return fcst + + # Place the headlines above the discussion. + def _makeHeadline(self, fcst, editArea, areaLabel, argDict): + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + #hlList = tree.stats.get("Hazards", self._timeRange, areaLabel) + #print "hlList = ",hlList + headlines = self.generateProduct("Hazards", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + return fcst + + # From FWF. Modified to eliminate the UGC header not needed in the FWS. + # Since Headlines are placed above the discussion...that eliminated too. + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + + # From FWF. Modified to eliminate everything. + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + if self._individualExtended == 1: + fcst = fcst + "\n" + return fcst + + # Deal with the summary extended more cleanly. + def _makeSummaryExtended(self, fcst, argDict): + + # Add one extended + if self._summaryExtended == 1: + extended = self.generateProduct("ExtendedNarrative", + argDict, area=self._summaryArea, + timeRange=self._extendedRange) + fcst = fcst + extended + fcst = fcst + "\n" + return fcst + +# From FWS_Overrides. changes commented on the right margin. (just two lines) + + def _generateOutlookLabels(self, argDict): + + today = argDict["creationTime"] + if self._productIssuance in ["Morning", "Morning Update", "Afternoon Update", "Next Day"]: + day8 = today + 7*24*3600 + day14 = today + 13*24*3600 + dow = time.gmtime(today)[6] + if dow == 0 or dow == 2 or dow == 4: + self._insertOutlookFlag = 1 + else: + self._insertOutlookFlag = 0 + self._insertOutlookFlag = 1 + else: + currentHour = time.gmtime(today)[3] + if currentHour < 16: + day8 = today + 7*24*3600 + day14 = today + 13*24*3600 + dow = time.gmtime(today)[6] + if dow == 0 or dow == 2 or dow == 4: + self._insertOutlookFlag = 1 + else: + self._insertOutlookFlag = 0 + self._insertOutlookFlag = 1 + else: + day8 = today + 8*24*3600 + day14 = today + 14*24*3600 + dow = time.gmtime(today + 24*3600)[6] + if dow == 1 or dow == 3 or dow == 6: + self._insertOutlookFlag = 1 + else: + self._insertOutlookFlag = 0 + self._insertOutlookFlag = 1 + + self._outlookDay8Label = time.strftime("%A %B %d",time.gmtime(day8)).upper() + self._outlookDay14Label = time.strftime("%A %B %d",time.gmtime(day14)).upper() + + return None + + # Import the 8 to 14 day outlook into the product + # if the user requests it for the spot forecast. + def _make8to14DayOutlook(self, fcst, argDict): + + if "Include Day 8-14 Outlook?" not in self._extendedQuestions: + return fcst + + outlookHeader = ".OUTLOOK FOR " + self._outlookDay8Label + " THROUGH " \ + + self._outlookDay14Label + "...\n" + outlookHeader = string.upper(outlookHeader) + + if self._insertOutlookFromFile == 1: + outlook = "" + if os.path.isfile(self._outlookFile): + input = open(self._outlookFile) + text = input.readlines() + for line in text: + outlook = outlook + line + outlook = string.join(string.split(outlook,"\n\n"),"\n") + outlook = string.join(string.split(outlook,"\n\n"),"\n") + return fcst + outlookHeader + outlook + "\n" + else: + outlook = "...Put 8 to 14 day outlook text here..." + return fcst + outlookHeader + outlook + "\n\n" + elif self._insertDiscussionFromFile == 2: + version = 0 + fwfPil = self._statePil + self._fwfPil + searchString="" + product = self.getPreviousProduct(fwfPil, searchString, version=version) + product = string.split(product, "\n") + outlook = "" + outFlag = 0 + foundOutlook = 0 + for line in product: + if line[:2] == "$$": + outFlag = 0 + if outFlag: + outlook = outlook + line + "\n" + if string.find(line,".OUTLOOK") != -1: + outFlag = 1 + foundOutlook = 1 + if foundOutlook: + return fcst + outlookHeader + outlook + "\n\n" + else: + outlook = "...Put 8 to 14 day outlook text here..." + return fcst + outlookHeader + outlook + "\n\n" + else: + return fcst + outlookHeader + "\n\n\n" + + # From FWF. Modified to append the fire name and agency name to the + # product name. Modified to eliminate the discussion from method. + # Modified to include Matt Davis' enhancement (unlisted agency) + def _preProcessProduct(self, fcst, argDict): + + if self._requestingAgency == "Unlisted": + newFireName = self._fireName + "..." + self._otherAgencyName + else: + newFireName = self._fireName + "..." + self._requestingAgency + productLabel = self._productName + " for " + newFireName + + productLabel = self.checkTestMode(argDict, productLabel) + + issuedByString = self.getIssuedByString() + + # Product header + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productLabel + \ + "\nNational Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # Add time disclaimer + self._fireTR = None + if self._withIgnitionTimes == "yes" or self._tableStartTimeMode == "ignitionTime": + fcst = self._makeFcstTimeStatement(fcst, argDict) + try: + timeTup = time.strptime(self._timeLabel, '%I%M %p %Z %a %b %d %Y') + issueTime = time.mktime(timeTup) + except: + issueTime = time.time() + now = time.time() + if ((issueTime - now) < -24*3600) or ((issueTime - now) > 9*24*3600): + message = \ +'''|* The start time for this product is %s. +This is either more than a day in the past or more than 9 days +in the future. *|''' % self._timeLabel + fcst = '%s\n%s\n\n' % (fcst, message) + return fcst + + def _postProcessProduct(self, fcst, argDict): + fcst = string.join(string.split(fcst, "\n\n\n"), "\n") + forecasterString = string.join(self._forecaster,"/") + if self._webSiteTag == "": + tagLineString = "" + else: + tagLineString = ".TAG " + self._webSiteTag + "/" + self._wfoID + "\n" + fcst = fcst + "$$\nForecaster..." + forecasterString + "\n" + \ + "Requested by..." + self._agencyContact + "\n" + \ + "Type of request..." + self._fireType + "\n" + tagLineString + #self.storeAWIPS(fcst, self._awipsProductID) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + +############################################################################# +# Weather Element Sampling and Phrase Configuration # +############################################################################# + + def _issuance_list(self, argDict): + narrativeDef = [] + if self._tabularAllPeriods == "yes": + phantom = "Phantom" + else: + # If we are generating a 12-hour table + # in the first period, need to have an empty + # narrative so that the sampling will get done. + phantom = "EmptyFirePeriod" + if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: + # Add the first period + if len(self._todayElements) == 0: + period = (phantom, "period1") + else: + period = ("FirePeriod1", "period1") + narrativeDef.append(period) + + if len(self._tonightElements) == 0: + period = (phantom, 12) + else: + period = ("FirePeriod2", 12) + narrativeDef.append(period) + + # Add the third period + if len(self._tomorrowElements) == 0: + period = (phantom, 12) + else: + period = ("FirePeriod3", 12) + narrativeDef.append(period) + else: + # Add the first period. + if len(self._tonightElements) == 0: + period = (phantom, "period1") + else: + period = ("FirePeriod2", "period1") + narrativeDef.append(period) + + # Add the second period + if len(self._tomorrowElements) == 0: + period = (phantom, 12) + else: + period = ("FirePeriod3", 12) + narrativeDef.append(period) + if self._productIssuance in ["Afternoon with 4 periods", + "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + # Add the third period + if len(self._tomorrowNightElements) == 0: + period = (phantom, 12) + else: + period = ("FirePeriod4", 12) + narrativeDef.append(period) + + # Add the fourth period + if len(self._nextDayElements) == 0: + period = (phantom, 12) + else: + period = ("FirePeriod5", 12) + narrativeDef.append(period) + + # Add extended if configured to appear + if "Include Day 3-5 Extended?" in self._extendedQuestions: + if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: + extendedShortTerm = [ + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ] + elif self._productIssuance in ["Afternoon with 4 periods", + "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + extendedShortTerm = [ + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ] + else: + extendedShortTerm = [ + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ] + else: + if self._productIssuance in ["Next Day", "Morning", "Morning Update", "Afternoon Update"]: + extendedShortTerm = [ + ("Phantom", 24), + ("Phantom", 24), + ("Phantom", 24), + ] + elif self._productIssuance in ["Afternoon with 4 periods", + "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + extendedShortTerm = [ + ("Phantom", 24), + ("Phantom", 24), + ("Phantom", 24), + ] + else: + extendedShortTerm = [ + ("Phantom", 24), + ("Phantom", 24), + ("Phantom", 24), + ("Phantom", 24), + ] + if "Include Day 6-7 Extended?" in self._extendedQuestions: + extended = [ + ("FireExtended", 24), + ("FireExtended", 24), + ] + else: + extended = [] + + # Combine sections + try: + if self._individualExtended == 1: + if self._extendedLabel == 1: + narrativeDef.append(("ExtendedLabel",0)) + if self._includeExtendedShortTerm or self._includeExtended: + narrativeDef = narrativeDef + extendedShortTerm + if self._includeExtended: + narrativeDef = narrativeDef + extended + except: + pass + return [ + ("Next Day", 24 + self.DAY(), 24 + self.NIGHT(), 24 + self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDef), + ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDef), + ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDef), + ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, narrativeDef), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDef), + ("Afternoon with 4 periods", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDef), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + ".REST OF TONIGHT...", "late in the night","early in the evening", + 1, narrativeDef), + ("Evening Update with 4 periods", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + ".REST OF TONIGHT...", "late in the night","early in the evening", + 1, narrativeDef), + # For the early morning update, this produces: + # Rest of Tonight: + # Monday + # Monday Night + ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, narrativeDef), + ("Early Morning Update with 4 periods", "issuanceHour", self.DAY(), self.DAY(), + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, narrativeDef), + ] + + def FirePeriod1(self): + phraseList = self.getFirePeriod_phraseList(self._todayElements) + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(1) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def FirePeriod2(self): + phraseList = self.getFirePeriod_phraseList(self._tonightElements) + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(2) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def FirePeriod3(self): + phraseList = self.getFirePeriod_phraseList(self._tomorrowElements) + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(3) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def FirePeriod4(self): + phraseList = self.getFirePeriod_phraseList(self._tomorrowNightElements) + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(4) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def FirePeriod5(self): + phraseList = self.getFirePeriod_phraseList(self._nextDayElements) + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(5) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def EmptyFirePeriod(self): + phraseList = [] + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(1) + return { + "type": "component", + "methodList": [ + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def PreFirePeriod1(self): + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas(1) + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": analysisList, + "phraseList": [], + "intersectAreas": intersectAreas, + } + + def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): + # Make a label given the timeRange in GMT and the shift to + # convert it to local time. currentLocalTime can be used to + # compare to current day. + if timeRange.duration() <= 3600: + return "" + + curLocal, shift = self.determineTimeShift() + if index == 0 and self._equalDates(currentLocalTime, curLocal): + try: + label = issuanceInfo.period1Label() + if label != "": + return label + "\n" + except: + pass + try: + today = issuanceInfo.todayFlag() + except: + today = 1 + try: + useHolidays = self._useHolidays + except: + useHolidays = 1 + nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) + splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) + label = self.getWeekday(timeRange, holidays=useHolidays, shiftToLocal=1, + labelType="CapitalWithPeriod", today=today, + tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, + splitDay24HourLabel=splitDay24HourLabel) + return label + "\n" + + def _equalDates(self, t1, t2): + # If AbsTimes t1 and t2 represent the same day, month, year + # return 1 else 0 + d1 = t1.day + d2 = t2.day + m1 = t1.month + m2 = t2.month + y1 = t1.year + y2 = t2.year + if d1==d2 and m1==m2 and y1==y2: + return 1 + else: + return 0 + + def increment_nlValue_dict(self, tree, node): + # Increment for rounding values + # Units depend on the product + dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) + dict["Wind"] = 1 + dict["Wind20ft"] = 1 + dict["TransWind"] = 1 + dict["CWR"] = 1 + dict["QPF"] = .0001 + dict["Vsby"] = .01 + return dict + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than this value, + # the different values will be noted in the phrase. + dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) + dict["Vsby"] = { + (0.00,1.00) : 0.25, + (1.00,3.00) : 0.50, + (3.00,5.00) : 1.00, + "default" : 2.00, + } + dict["PredHgt"] = { + (0,10) : 1, + (10,30) : 5, + (30,100) : 10, + "default" : 25, + } + dict["Td"] = 5 + dict["PoP"] = 10 + return dict + + def getFirePeriod_phraseList(self, periodElements): + phraseList = [] + if self._forecastType in ["Tabular/Narrative"]: + # Figure out which narrative phrases should be included + narratives = [] + tableElements = [] + for rowElement, narrativeToo, tableRows in self._rowList(): + tableElements.append(rowElement) + if narrativeToo: + narratives.append(rowElement) + if self._forecastType in ["Narrative Only", "Tabular/Narrative"]: + for elementId in periodElements: + for element, default, phrases, searchStrings in self._weInfoList(): + if elementId == element: + if self._forecastType == "Tabular/Narrative": + if elementId in tableElements and elementId not in narratives: + break + if type(phrases) is not list: + phrases = [phrases] + phraseList += phrases + if self._forecastType in ["Tabular/Narrative", "Tabular Only"]: + phraseList.append(self._fwsTable_phrase) + return phraseList + + # From FWS_Overrides. Added one hourly sampling for T and RH. + # This sampling is used for the ignition time forecasts. + def getFirePeriod_analysisList(self): + if self._forecastType in ["Tabular/Narrative", "Tabular Only"] or \ + self._withIgnitionTimes == "yes": + analysisList = [ + ("Sky", self.median, [1]), + ("Wx", self.rankedWx, [1]), + ("PoP", self.stdDevMaxAvg, [1]), + ("PoP", self.binnedPercent, [1]), + ("LAL", self.maximum, [1]), + ("LAL", self.binnedPercent, [1]), + ("MaxT", self.moderatedMinMax), + ("MinT", self.moderatedMinMax), + ("MaxRH", self.moderatedMinMax), + ("MinRH", self.moderatedMinMax), + ("RH", self.avg, [1]), + ("RH", self.moderatedMinMax), + ("MaxT", self.avg), # for trends + ("MinT", self.avg), # for trends + ("MaxRH", self.avg), # for trends + ("MinRH", self.avg), # for trends + ("RH", self.avg), # for trends + ("T", self.avg, [1]), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Td", self.avg, [1]), + ("Td", self.hourlyTemp), + ("Td", self.minMax), + ("Wind", self.vectorMinMax, [1]), + ("WindGust", self.maximum, [1]), + ("Wind20ft", self.vectorMinMax, [1]), + ("Haines", self.maximum, [1]), + ("TransWind", self.vectorAvg, [1]), + ("FreeWind", self.vectorAvg, [1]), + ("MixHgt", self.moderatedMin, [1]), + ("VentRate", self.minMax, [1]), + ("DSI", self.maximum,[1]), + ("LDSI", self.maximum,[1]), + ("LVORI", self.maximum,[1]), + ("ADI", self.maximum,[1]), + ("CWR", self.maximum, [1]), + ("Stability", self.maximum, [1]), + ("MarineLayer", self.maximum, [1]), + ("Swell", self.vectorMinMax, [1]), + ("Period", self.maximum, [1]), + ("WindWaveHgt", self.maximum, [1]), + ("WaveHeight", self.maximum, [1]), + ("QPF", self.accumSum, [6]), + ("SnowAmt", self.accumSum, [6]), + ("FzLevel", self.median, [1]), + ("Hazards", self.dominantDiscreteValue, [1]), + ("Vsby", self.minimum, [1]), + ("PredHgt", self.minimum, [1]), + ("HeatIndex", self.maximum, [1]), + ("ApparentT", self.maximum, [1]), + ] + else: + analysisList = [ + ("Sky", self.median, [6]), + ("PoP", self.stdDevMaxAvg, [6]), + ("PoP", self.binnedPercent, [6]), + ("Wx", self.rankedWx, [6]), + ("LAL", self.maximum, [12]), + ("LAL", self.binnedPercent, [0]), + ("MaxT", self.moderatedMinMax), + ("MinT", self.moderatedMinMax), + ("MaxRH", self.moderatedMinMax), + ("MinRH", self.moderatedMinMax), + ("RH", self.avg, [1]), + ("RH", self.moderatedMinMax), + ("MaxT", self.avg), # for trends + ("MinT", self.avg), # for trends + ("MaxRH", self.avg), # for trends + ("MinRH", self.avg), # for trends + ("RH", self.avg), # for trends + ("T", self.avg, [1]), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Td", self.avg, [1]), + ("Td", self.hourlyTemp), + ("Td", self.minMax), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wind20ft", self.vectorMinMax, [6]), + ("Haines", self.maximum), + ("TransWind", self.vectorAvg, [12]), + ("FreeWind", self.vectorAvg, [12]), + ("MixHgt", self.moderatedMin, [1]), + ("VentRate", self.minMax), + ("CWR", self.maximum), + ("DSI", self.maximum,[12]), + ("LDSI", self.maximum,[12]), + ("LVORI", self.maximum,[12]), + ("ADI", self.maximum,[12]), + ("Stability", self.maximum), + ("MarineLayer", self.maximum), + ("Swell", self.vectorMinMax, [6]), + ("Period", self.maximum, [6]), + ("WindWaveHgt", self.maximum, [6]), + ("WaveHeight", self.maximum, [6]), + ("QPF", self.accumMinMax, [6]), + ("SnowAmt", self.accumMinMax, [6]), + ("FzLevel", self.median, [6]), + ("Hazards", self.dominantDiscreteValue), + ("Vsby", self.minimum, [6]), + ("PredHgt", self.minimum, [6]), + ("HeatIndex", self.maximum, [6]), + ("ApparentT", self.maximum, [6]), + ] + return analysisList + + def getFirePeriod_intersectAreas(self, periodNum): + return [] + + # From ConfigVariables. + + def phrase_descriptor_dict(self, tree, node): + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + if self._wind20ftHeader: + dict["WIND.(20 FT)........"]="Wind (20 ft)........" + dict["20-foot winds......."]=" Slope/valley......." + dict["Free winds.........."]=" Ridgetop..........." + dict["Surrounding ridge..."]=" Surrounding ridge.." + else: + dict["20-foot winds......."]="Wind (20 ft)........" + dict["Free winds.........."]="Ridgetop wind......." + dict["Surrounding ridge..."]="Surrounding ridge..." + dict["Eye level winds....."]="Eye level winds....." + dict["Surface winds......."]="Surface winds......." + dict["Wind shift.........."]="Wind shift.........." + if self._transportWindLabel == "mix": + dict["Transport winds....."]="Mixing winds........" + else: + dict["Transport winds....."]="Transport winds....." + dict["CWR................."]="CWR................." + dict["DSI................."]="Dispersion.........." + dict["LDSI................"]="Dispersion index...." + dict["LVORI..............."]="LVORI..............." + dict["ADI................."]="ADI................." + dict["POP................."]="Chance of pcpn......" + dict["Dewpoint............"]="Dewpoint............" + dict["Begin/end of pcpn..."]="Begin/end of pcpn..." + dict["Stability class....."]="Stability class....." + dict["Wind wave..........."]="Wind wave..........." + dict["Rainfall amount....."]="Rainfall amount....." + dict["Snowfall amount....."]="Snowfall amount....." + dict["Swell period........"]="Swell period........" + dict["Swell height........"]="Swell height........" + dict["Freezing level......"]="Freezing level......" + dict["Ceiling............."]="Ceiling............." + dict["Visibility.........."]="Visibility.........." + dict["Icing..............."]="Icing..............." + dict["Heat index.........."]="Heat index.........." + dict["erraticWind"]="gusty and erratic winds expected near thunderstorms" + if self._withIgnitionTimes == "yes": + dict["MinT_FireWx"]="Temperature........." + dict["MaxT_FireWx"]="Temperature........." + dict["MinRH_FireWx"]="RH.................." + dict["MaxRH_FireWx"]="RH.................." + return dict + + # From FirePhrases. Changed to eliminate the area test. Thus, + # this label will appear even though there is no ridgetop wind. + def fireWind_label_setUp(self, tree, node): + self.setWords(node, "") + node.set("descriptor", "") + node.set("indentLabel", "WIND.(20 FT)........") + return self.DONE() + + # The methods below this line override baseline + # methods to accomodate ignition times. + # They were derived by Tracy Hansen from code originally + # from Matt Davis and renamed by Virgil Middendorf. + # The last two methods were created by Matt Davis to + # check and make time ranges for the ignition time forecasts. + + def fire_dayOrNight_words(self, tree, node): + # Temp or RH elements + elementName = node.getAncestor("elementName") + statDict = node.getStatDict() + if elementName == "MaxT" or elementName == "MinT": + stats = self.getTempStats(tree, node) + if stats is None: + return self.setWords(node.parent, "MISSING") + connector = self.value_connector(tree, node, elementName, elementName) + igWords = repr(int(self.getValue(stats, "avg"))) + words = self.getTempRangePhrase(tree, node, stats, elementName) + else: # MinRH, MaxRH or RH + stats = self.getStats(statDict, elementName) + if stats is None: + return self.setWords(node.parent, "MISSING") + connector = self.value_connector(tree, node, elementName, elementName) + igWords = repr(int(self.getValue(stats, "avg"))) + min, max = self.getValue(stats, "MinMax") + if min > 100: + min = 100 + if max > 100: + max = 100 + if min == max: + words = repr(int(min)) + else: + words = repr(int(min)) + connector + repr(int(max)) + outUnits = self.element_outUnits(tree, node, elementName, elementName) + units = self.units_descriptor(tree, node,"units", outUnits) + words = words + units + igWords = igWords + units + + # Add ignition element if applicable + if self._withIgnitionTimes == "yes": + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + tempElement = "Max" + rhElement = "Min" + else: + tempElement = "Min" + rhElement = "Max" + if elementName == "MaxT" or elementName == "MinT": + ignitionElement = "T" + elementType = tempElement + else: + ignitionElement = "RH" + elementType = rhElement + if self._checkFireTR(node.getTimeRange()): + ignitionStats = tree.stats.get( + ignitionElement, self._fireTR, node.getAreaLabel(), mergeMethod="Max") + if ignitionStats is not None: + ignitionPhrase = repr(int(self.getValue(ignitionStats))) + reqType = self._getRequestWords() + words = ignitionPhrase + units + " at " + reqType + "..." + elementType + " " + igWords + else: + words = elementType + " " + igWords + else: + words = elementType + " " + igWords + return self.setWords(node, words) + + def fireWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + #self.wind_phrase, + self.wind_withGusts_phrase, + self.erraticWind_phrase + ], + "phraseMethods": [ + self.assembleSentences, + self.fireWind_finishUp + ], + } + + def fireWind_finishUp(self, tree, node): + "Create a phrase for Winds" + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 1: + return self.setWords(node, "") + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + + # Add ignitionTime if appropriate + igWords = "" + if self._checkFireTR(node.getTimeRange()): + ignitionWindStats = tree.stats.get( + "Wind", self._fireTR, node.getAreaLabel(), mergeMethod="Max") + if ignitionWindStats is not None: + igMagStr = repr(int(ignitionWindStats[0])) + igDirStr = self.vector_dir(int(ignitionWindStats[1])) + reqType = self._getRequestWords() + igWords = "Winds " + igDirStr + " at " + igMagStr + " mph at " + reqType + ", otherwise " + + words = igWords + words + node.set("descriptor", "") + node.set("indentLabel", "20-foot winds.......") + node.set("compound", 1) + return self.setWords(node, words) + + def fireSfcWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + self.wind_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.fireSfcWind_finishUp + ], + } + + def fireSfcWind_finishUp(self, tree, node): + "Create a phrase for Winds" + # Empty phrase if doing ridge/valley winds + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 1: + return self.setWords(node, "") + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", "Surface winds.......") + node.set("compound", 1) + return self.setWords(node, words) + + def erraticWind_phrase(self): + return { + "setUpMethod": self.erraticWind_setUp, + "wordMethod": self.erraticWind_words, + "phraseMethods": [ + self.preProcessWx, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + + def erraticWind_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) + # Set this flag used by the "checkWeatherSimilarity" method + node.set("noIntensityCombining", 1) + self.determineSevereTimeDescriptors(tree, node) + return self.DONE() + + def erraticWind_words(self, tree, node): + # If T is in the Wx grids, then produce phrase. + # Wx Statistics: rankedWx + + statDict = node.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + # Check against PoP + #rankList = self.checkPoP(tree, node, rankList) + subkeyList = self.getSubkeys(rankList) + + severe = 0 + thunder = 0 + attrTextList = [] + for subkey in subkeyList: + wxType = subkey.wxType() + if wxType == "T": + thunder = 1 + intensity = subkey.intensity() + if intensity == "+": + severe = 1 + wxDef = subkey.wxDef() + for attr in subkey.attributes(): + if attr in ["Primary", "Mention", "Dry"]: + continue + attrText = wxDef.attributeDesc(subkey.wxType(), attr).lower() + if attrText not in attrTextList: + attrTextList.append(attrText) + + if thunder == 0: + return self.setWords(node, "") + words = self.phrase_descriptor(tree, node, "erraticWind", "Wx") + + return self.setWords(node, words) + + def smokeDispersal_words(self, tree, node): + "Create phrase for Smoke Dispersal" + statDict = node.getStatDict() + stats = self.getStats(statDict, "VentRate") + if stats is None: + return self.setWords(node.parent, "MISSING") + + if self._checkFireTR(node.getTimeRange()): + # Handle phrase if including ignition time + minVal, maxVal = self.getValue(stats, "MinMax") + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + vr = int(maxVal) + ventType = "Max" + mergeMethod = "Max" + else: + vr = int(minVal) + ventType = "Min" + mergeMethod = "Min" + vrCat = self.smokeDispersal_valueStr(vr) + words = ventType + "..." + vrCat + " " + " /" + repr(vr) + " knot-ft/" + reqType = self._getRequestWords() + ignitionDispersal = tree.stats.get( + "VentRate", self._fireTR, node.getAreaLabel(), mergeMethod=mergeMethod) + vrCat = self.smokeDispersal_valueStr(ignitionDispersal) + igWords = vrCat + " /" + repr(int(ignitionDispersal)) + " knot-ft/ at " + reqType + ". \n" + words = igWords + " " + words + else: + # Handle phrase with range if not including ignition time + vr1, vr2 = self.getValue(stats, "MinMax") + vr1 = int(vr1) + vr2 = int(vr2) + vrCat1 = self.smokeDispersal_valueStr(vr1) + vrCat2 = self.smokeDispersal_valueStr(vr2) + # Single Value input + if vr1 == vr2: + words = vrCat1 + " (" + repr(vr1) + " knot-ft)" + # Range + else: + words = vrCat1 + " to " + vrCat2 + " (" + repr(vr1) + "-" + \ + repr(vr2) + " knot-ft)" + return self.setWords(node, words) + + # SMOKE DISPERSAL CATEGORIES + def smokeDispersal_valueStr(self, value): + "Convert smoke dispersal value to corresponding category" + + if value < 13000 : + return "poor" + + if value >= 13000 and value < 30000: + return "fair" + + if value >= 30000 and value < 60000 : + return "good" + + if value >= 60000 : + return "excellent" + + ### MixHgt + def mixingHgt_words(self, tree, node): + "Create phrase for Mixing Height" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "MixHgt") + if stats is None: + return self.setWords(node.parent, "MISSING") + + mix1, mix2 = self.getValue(stats, "MinMax") + outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") + mix1 = int(mix1) + mix2 = int(mix2) + threshold = self.nlValue(self.null_nlValue( + tree, node, "MixHgt", "MixHgt"), max) + if int(mix1) < threshold and int(mix2) < threshold: + return self.setWords(node, "null") + + # Single Value input + if mix1 == mix2: + words = repr(mix1) + " " + outUnits + " AGL" + # Range + else: + words = repr(mix1)+ "-" + repr(mix2) + " " + outUnits + " AGL" + + # Handle ignition time + if self._checkFireTR(node.getTimeRange()): + reqType = self._getRequestWords() + ignitionMixStats = tree.stats.get( + "MixHgt", self._fireTR, node.getAreaLabel(), mergeMethod="Max") + igWords = repr(int(ignitionMixStats)) + " " + outUnits + " AGL at " + reqType +", otherwise " + words = igWords + words + + return self.setWords(node, words) + + def adi_phrase(self): + return { + "setUpMethod": self.adi_setUp, + "wordMethod": self.adi_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def adi_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("ADI", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "ADI.................") + return self.DONE() + + def adi_words(self, tree, node): + statDict = node.getStatDict() + adi = self.getStats(statDict, "ADI") + if adi is None: + return self.setWords(node.parent, "MISSING") + adi = self.getValue(adi) + words = repr(int(adi + 0.5)) + return self.setWords(node, words) + + def haines_words(self, tree, node): + "Create phrase for Haines Index" + statDict = node.getStatDict() + stats = self.getStats(statDict, "Haines") + if stats is None: + return self.setWords(node.parent, "MISSING") + + # Handle ignition time + ignitionFlag = 0 + if self._checkFireTR(node.getTimeRange()): + haines1 = int(self.getValue(stats, "Max")) + ignitionStats = tree.stats.get("Haines", self._fireTR, node.getAreaLabel(), + mergeMethod="Max") + if ignitionStats is not None: + ignitionPhrase = repr(int(self.getValue(ignitionStats))) + #print "Haines ignitionStats", ignitionStats + reqType = self._getRequestWords() + hainesDict = self.hainesDict() + words = ignitionPhrase + " " + hainesDict[int(ignitionPhrase)] + \ + " at " + reqType + "...max " + repr(haines1) + ignitionFlag = 1 + if not ignitionFlag: + haines1, haines2 = self.getValue(stats, "MinMax") + hainesDict = self.hainesDict() + haines1 = int(haines1) + haines2 = int(haines2) + words1 = hainesDict[haines1] + words2 = hainesDict[haines2] + + # Single Value input + if haines1 == haines2: + words = repr(haines1) + " " + words1 + # Range + else: + if words1 == words2: + words = words1 + else: + words = words1 + " to " + words2 + words = repr(haines1) + " to " + repr(haines2) + " OR " + words + return self.setWords(node, words) + + def cwr_words(self, tree, node): + # Handle ignition time + if self._checkFireTR(node.getTimeRange()): + cwr = tree.stats.get(self._cwrParm, self._fireTR, node.getAreaLabel(), mergeMethod="Max") + else: + cwr = tree.stats.get(self._cwrParm, node.getTimeRange(), node.getAreaLabel(), mergeMethod="Max") + if cwr is None: + return self.setWords(node.parent, "MISSING") + cwr = self.getValue(cwr) + threshold = self.nlValue(self.null_nlValue(tree, node, "CWR", "CWR"), cwr) + if int(cwr) < threshold: + return self.setWords(node, "null") + else: + words = repr(int(cwr)) + " percent" + return self.setWords(node, words) + + def windWave_phrase(self): + return { + "setUpMethod": self.windWave_setUp, + "wordMethod": self.windWave_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def windWave_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("WindWaveHgt", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Wind wave...........") + return self.DONE() + + def windWave_words(self, tree, node): + "Create phrase Wind Wave" + statDict = node.getStatDict() + height = self.getValue(self.getStats(statDict, "WindWaveHgt"), "Max") + if height is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(height + 0.5)) + " FEET" + return self.setWords(node, words) + + def waveHeight_phrase(self): + return { + "setUpMethod": self.waveHeight_setUp, + "wordMethod": self.waveHeight_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def waveHeight_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("WaveHeight", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "WAVE HEIGHT.........") + return self.DONE() + + def waveHeight_words(self, tree, node): + "Create phrase Wind Wave" + statDict = node.getStatDict() + height = self.getValue(self.getStats(statDict, "WaveHeight"), "Max") + if height is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(height + 0.5)) + " FEET" + return self.setWords(node, words) + + def qpf_phrase(self): + return { + "setUpMethod": self.qpf_setUp, + "wordMethod": self.qpf_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def qpf_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("QPF", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Rainfall amount.....") + return self.DONE() + + def qpf_words(self, tree, node): + "Create phrase QPF" + statDict = node.getStatDict() + qpf = self.getValue(self.getStats(statDict, "QPF"), "Max") + if qpf is None: + return self.setWords(node.parent, "MISSING") + if qpf == 0.0: + qpfWords = "0.00" + else: + qpf = qpf + 0.005 + qpfWords = string.strip("%5.2f" % qpf) + words = qpfWords + " INCHES" + return self.setWords(node, words) + + def period_phrase(self): + return { + "setUpMethod": self.period_setUp, + "wordMethod": self.period_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def period_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Period", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Swell period........") + return self.DONE() + + def period_words(self, tree, node): + "Create phrase Swell Period" + statDict = node.getStatDict() + period = self.getValue(self.getStats(statDict, "Period"), "Max") + if period is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(period + 0.5)) + " SECONDS" + return self.setWords(node, words) + + def swell_phrase(self): + return { + "setUpMethod": self.swell_setUp, + "wordMethod": self.swell_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def swell_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Swell", self.VECTOR())] + self.subPhraseSetUp(tree, node, elementInfoList, self.vectorConnector) + node.set("descriptor", "") + node.set("indentLabel", "Swell height........") + return self.DONE() + + def swell_words(self, tree, node): + "Create phrase Swell Height" + statDict = node.getStatDict() + stats = self.getStats(statDict, "Swell") + if stats is None: + return self.setWords(node, "") + height, dir = self.getValue(stats, "Max", self.VECTOR()) + if height is None: + return self.setWords(node.parent, "MISSING") + if dir >= 22.5 and dir < 67.5: + dirWords = "northeast" + elif dir >= 67.5 and dir < 112.5: + dirWords = "east" + elif dir >= 112.5 and dir < 157.5: + dirWords = "southeast" + elif dir >= 157.5 and dir < 202.5: + dirWords = "south" + elif dir >= 202.5 and dir < 247.5: + dirWords = "southwest" + elif dir >= 247.5 and dir < 292.5: + dirWords = "west" + elif dir >= 292.5 and dir < 337.5: + dirWords = "northwest" + else: + dirWords = "north" + heightWords = repr(int(height + 0.5)) + words = dirWords + " swell " + heightWords + " feet" + return self.setWords(node, words) + + def ceiling_phrase(self): + return { + "setUpMethod": self.ceiling_setUp, + "wordMethod": self.ceiling_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def ceiling_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("PredHgt", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Ceiling (kft).......") + return self.DONE() + + def ceiling_words(self, tree, node): + "Create phrase Visibility" + statDict = node.getStatDict() + hgt = self.getValue(self.getStats(statDict, "PredHgt"), "Min") + if hgt is None: + return self.setWords(node.parent, "MISSING") + hgt = hgt / 10.0 + if hgt == 0.0: + hgtWords = "less than 0.1" + else: + if hgt < 10: + hgtWords = string.strip("%5.1f" % hgt) + else: + hgtWords = repr(int(hgt + 0.5)) + words = hgtWords + return self.setWords(node, words) + + def visibility_phrase(self): + return { + "setUpMethod": self.visibility_setUp, + "wordMethod": self.visibility_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def visibility_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Vsby", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Visibility (sm).....") + return self.DONE() + + def visibility_words(self, tree, node): + "Create phrase Visibility" + statDict = node.getStatDict() + vis = self.getValue(self.getStats(statDict, "Vsby"), "Min") + if vis is None: + return self.setWords(node.parent, "MISSING") + if vis == 0.0: + visWords = "0.0" + else: + if vis < 3: + visWords = string.strip("%5.2f" % vis) + else: + visWords = repr(int(vis + 0.5)) + words = visWords + return self.setWords(node, words) + + def icing_phrase(self): + return { + "setUpMethod": self.icing_setUp, + "phraseMethods": [self.postProcessPhrase], + } + + def icing_setUp(self, tree, node): + self.setWords(node, "") + node.set("descriptor", "") + node.set("indentLabel", "Icing...............") + return self.DONE() + + def freezingLevel_phrase(self): + return { + "setUpMethod": self.freezingLevel_setUp, + "wordMethod": self.freezingLevel_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def freezingLevel_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("FzLevel", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Freezing level......") + return self.DONE() + + def freezingLevel_words(self, tree, node): + "Create phrase for Freezing Level" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "FzLevel") + if stats is None: + return self.setWords(node.parent, "MISSING") + + mix1, mix2 = self.getValue(stats, "MinMax") + outUnits = self.element_outUnits(tree, node, "FzLevel", "FzLevel") + mix1 = int(mix1) + mix2 = int(mix2) + threshold = self.nlValue(self.null_nlValue( + tree, node, "FzLevel", "FzLevel"), max) + if int(mix1) < threshold and int(mix2) < threshold: + return self.setWords(node, "null") + + # Single Value input + if mix1 == mix2: + words = repr(mix1) + " " + outUnits + # Range + else: + words = repr(mix1)+ "-" + repr(mix2) + " " + outUnits + + # Handle ignition time + if self._checkFireTR(node.getTimeRange()): + reqType = self._getRequestWords() + ignitionMixStats = tree.stats.get( + "FzLevel", self._fireTR, node.getAreaLabel(), mergeMethod="Max") + igWords = repr(int(ignitionMixStats)) + " " + outUnits + " at " + reqType +", otherwise " + words = igWords + words + + return self.setWords(node, words) + + def snow_phrase(self): + return { + "setUpMethod": self.snow_setUp, + "wordMethod": self.snow_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def snow_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("SnowAmt", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Snowfall amount.....") + return self.DONE() + + def snow_words(self, tree, node): + "Create phrase Snow" + statDict = node.getStatDict() + snow = self.getValue(self.getStats(statDict, "SnowAmt"), "Max") + if snow is None: + return self.setWords(node.parent, "MISSING") + if snow == 0.0: + snowWords = "0.0" + else: + snow = snow + 0.05 + snowWords = string.strip("%5.1f" % snow) + words = snowWords + " INCHES" + return self.setWords(node, words) + + def heatIndex_phrase(self): + return { + "setUpMethod": self.heatIndex_setUp, + "wordMethod": self.heatIndex_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def heatIndex_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("HeatIndex", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Heat index..........") + return self.DONE() + + def heatIndex_words(self, tree, node): + "Create phrase Td" + statDict = node.getStatDict() + hi = self.getValue(self.getStats(statDict, "HeatIndex"), "Max") + if hi is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(hi)) + return self.setWords(node, words) + + ### Methods for Spot Table ### + + def _fwsTable_phrase(self): + return { + "setUpMethod": self._fwsTable_setUp, + "wordMethod": self._fwsTable_words, + "phraseMethods": [ + self.assembleChildWords, + ], + } + + def _fwsTable_setUp(self, tree, node): + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + def _fwsTable_words(self, tree, node): + # See if we're doing a table for this time period + tableVars = self._determineTableVars(tree, node) + if tableVars is None: + return self.setWords(node, "") + timeRangeList, statList, colWidth, header, elements = tableVars + #print "header", header + #print "colWidth", colWidth + #print "timeRangeList" + #for tr, label in timeRangeList: + # print tr, label + words = header + argList = [tree, node, colWidth] + for rowElement, narrativeToo, tableRows in self._rowList(colWidth): + if rowElement not in elements: + continue + for label, method in tableRows: + # Call makeRow adding to words + words += self.makeRow( + label, colWidth, timeRangeList, statList, + method, argList, justify="l") + return self.setWords(node, words) + + def _determineTableVars(self, tree, node): + # Make timeRangeList, empty statList, colWidth, and header + + # Get table resolution (period) based on today, tonight, tomorrow + componentName = node.getComponentName() + period = None + + if self._productIssuance in ["Afternoon with 4 periods", + "Evening Update with 4 periods", + "Early Morning Update with 4 periods"]: + tablePeriodList = [ + ("FirePeriod1", self._todayTableRes, self._todayElements), + ("FirePeriod2", self._tonightTableRes, self._tonightElements), + ("FirePeriod3", self._tomorrowTableRes, self._tomorrowElements), + ("FirePeriod4", self._tomorrowNightTableRes, self._tomorrowNightElements), + ("FirePeriod5", self._nextDayTableRes, self._nextDayElements), + ] + else: + tablePeriodList = [ + ("FirePeriod1", self._todayTableRes, self._todayElements), + ("FirePeriod2", self._tonightTableRes, self._tonightElements), + ("FirePeriod3", self._tomorrowTableRes, self._tomorrowElements), + ] + + for name, variable, elements in tablePeriodList: + if componentName == name: + period = variable + tableElements = elements + if period is None or period == "None": + # No table for this component + return None + + # Determine colWidth given the period + colWidth = 4 + for hrs, colWidth in self._colWidths(): + if period == hrs: + break + + # Determine Time Ranges over which to create table + fireTimeZone = self._getActualTimeZone() + timeRange = self._determineTableTimeRange(tree, node, fireTimeZone) + timeRangeList = self.getPeriods(timeRange, period, 1, None) + + # Make header + header = "Time ("+fireTimeZone+") " + for tr, label in timeRangeList: + label = self._makeTableLabel(tree, tr, colWidth) + header += string.ljust(label, colWidth) + header += "\n" + + # Make empty statList (dummy for calling "makeRow") + statList = [] + for i in range(len(timeRangeList)): + statList.append({}) + return timeRangeList, statList, colWidth, header, tableElements + + def _colWidths(self): + # Lists table resolutions hours, corresponding column width + return [ + (1, 4), + (2, 7), + (3, 10), + (4, 13), + ] + + def _determineTableTimeRange(self, tree, node, fireTimeZone): + tr = node.getTimeRange() + # See if this is first period of product + prev = node.getComponent().getPrev() + if prev is None: + # Adjust timeRange if necessary + if self._tableStartTimeMode == "current": + currentTime = tree.get('argDict').get('creationTime') + currentTime = int(currentTime/3600.0)*3600.0 + tr = self.makeTimeRange(currentTime, tr.endTime().unixTime()) + elif self._tableStartTimeMode == "ignitionTime": + fireDateTime = time.mktime(self._fireDateTime) + fireDateTime = int(fireDateTime/3600.0)*3600.0 + fireTime = fireDateTime - (self._tableStartTimeOffset * 3600) + if fireTime >= tr.startTime().unixTime() and \ + fireTime < tr.endTime().unixTime(): + tr = self.makeTimeRange(fireTime, tr.endTime().unixTime()) + if self._tabularAllPeriods == "yes": + timeRange = tr + else: + # One 12-hour period + timeRange = self.makeTimeRange(tr.startTime(), + tr.startTime()+12*3600) + #print "Table time range", timeRange, node.getTimeRange() + return timeRange + + def _makeTableLabel(self, tree, timeRange, colWidth): + localTime, shift = self.determineTimeShift() + rtz = self._getActualTimeZone() + stz = time.tzname[0] + dtz = time.tzname[1] + otz = stz[0:1] + ptz = rtz[0:1] + if otz == ptz: + start = timeRange.startTime() + shift + else: + offset = 0 + if ptz == "E": + if otz == "E": + offset = 0 + elif otz == "C": + offset = 1 + elif otz == "M": + offset = 2 + elif otz == "P": + offset = 3 + elif ptz == "C": + if otz == "E": + offset = -1 + elif otz == "C": + offset = 0 + elif otz == "M": + offset = 1 + elif otz == "P": + offset = 2 + elif ptz == "M": + if otz == "E": + offset = -2 + elif otz == "C": + offset = -1 + elif otz == "M": + offset = 0 + elif otz == "P": + offset = 1 + elif ptz == "P": + if otz == "E": + offset = -3 + elif otz == "C": + offset = -2 + elif otz == "M": + offset = -1 + elif otz == "P": + offset = 0 + if stz[1:3] == rtz[1:3]: + start = timeRange.startTime() + shift + offset*3600 + else: + start = timeRange.startTime() + shift + offset*3600 + militaryHour = start.hour + hour, ampm = self.hourAmPm(militaryHour) + for low, hi, shortVal, longVal in self._tableLabels(): + if militaryHour >= low and militaryHour <= hi: + if colWidth > 4: + val = longVal + else: + val = shortVal + val = val.replace("hour", repr(hour)) + break + return val + + def _tableLabels(self): + return [ + (0,0, "Mid", "Midngt"), + (1,9, "hourAM", "hour AM"), + (10,11, "hourA", "hour AM"), + (12,12, "12P", "Noon"), + (13,21, "hourPM", "hour PM"), + (22,23, "hourP", "hour PM"), + ] + + def assembleIndentedPhrases(self, tree, component): + # Assemble and indent component phrases and add Label + # Qualify the phrases with local effect qualifiers + # if present. + # e.g. "near the coast" + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + self.consolidateLocalEffectPhrases(tree, component) + self.combineConjunctivePhrases(tree, component) + fcst = "" + lastQualifier = None + lastPhrase = None + self.orderWxPhrases(tree, component) + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + if words == "": + if self.removeEmptyPhrase(tree, phrase): + continue + + # Handle multiple element table phrase + # that appears per period + # No need to indent or qualify + name = phrase.get("name") + if name == "multipleElementTable_perPeriod_phrase": + fcst = fcst + words + continue + if name == "_fwsTable_phrase": + if words != "": + fcst = fcst + "\n" + words + continue + + if phrase.get("compound"): + makeSentence = 0 + else: + makeSentence = 1 + words, lastQualifier = self.qualifyWords( + phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase, + makeSentence=makeSentence) + lastPhrase = phrase + indentLabel = phrase.get("indentLabel") + label = self.phrase_descriptor( + tree, phrase, indentLabel, indentLabel) + #print "indentLabel, label", indentLabel, label + if indentLabel is not None and label == "": + label = indentLabel + if words == "": + words = " " + words = self.labelIndent(words, label) + fcst = fcst + words + if fcst == "": + return self.setWords(component,"") + # Add label + issuanceInfo = tree.get("issuanceInfo") + index = component.getIndex() + curLocalTime, shift = self.determineTimeShift() + creationTime = tree.get('argDict').get('creationTime') + curLocalTime = AbsTime.AbsTime(creationTime) + label = self.createLabel(tree, component, component.get("timeRange"), + issuanceInfo, curLocalTime, shift, index) + return self.setWords(component, label + "\n" + fcst + "\n") + + def _getTableStats(self, tree, element, tr, area, mergeMethod="Max", getValueMethod="Average"): + stats = tree.stats.get(element, tr, area, mergeMethod=mergeMethod) + if stats is None: + return None + return self.getValue(stats, getValueMethod) + + def _sky_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + sky = self._getTableStats(tree, "Sky", timeRange, node.getAreaLabel()) + if sky is None: + value = "M" + elif self._elementFormatDict.get("Sky", "numeric") == "numeric": + value = repr(int(sky + 0.5)) + else: + for threshold, shortVal, longVal in self._skyTableValues(): + if sky <= threshold: + if colWidth <= 4: + value = shortVal + else: + value = longVal + break + return value + + def _numSky_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + sky = self._getTableStats(tree, "Sky", timeRange, node.getAreaLabel()) + if sky is None: + value = "M" + else: + value = repr(int(sky + 0.5)) + return value + + def _skyTableValues(self): + return [ + (5, "CLR", "CLEAR"), + (25,"MCR", "MCLEAR"), + (50,"PC", "PCLDY"), + (69,"MC", "MCLDY"), + (87,"MC", "MCLDY"), + (100,"CDY", "CLOUDY"), + ] + + def _weatherType_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + areaLabel = node.getAreaLabel() + wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") + #print "wxStats = ", wxStats,tr + if wxStats is None or len(wxStats) == 0: + return "M" + # If there are 2 subkeys tied for the highest rank, + # search for a weather mix table. + # Otherwise, use the weather code table to find the weather value + coRank = None + hiRank = -1 + for subkey, rank in wxStats: + # Find top 2 ranked subkeys to look for mixtures + if rank > hiRank and subkey.wxType() != "T": + hiKey = subkey + hiRank = rank + elif rank == hiRank and subkey.wxType() != "T": + coKey = subkey + coRank = rank + if hiRank == -1: + return "" + keyAttrs = hiKey.attributes() + keyType = hiKey.wxType() + if coRank == hiRank: + keyAttrs, keyType = self._matchMix(hiKey, coKey, colWidth) + value = self._matchType(keyAttrs, keyType, colWidth) + return value + + def _matchType(self, keyAttrs, keyType, colWidth): + # Try to match the weatherCodeTable to the subkey + # If no match found, return None + value = None + for wxAttr, wxType, shortVal, longVal in self._weatherCodeTable(): + if wxAttr == "" or wxAttr in keyAttrs: + if wxType == keyType: + if colWidth == 4: + value = shortVal + else: + value = longVal + break + if value is None: + if colWidth == 4: + value = "???" + else: + value = "??????" + return value + + def _matchMix(self, hiKey, coKey, colWidth): + # Try to match the weather mix + # Return the attribute and wxType + # If not found, return the hiKey attributes and wxType + for attr1, type1, attr2, type2, keyAttr, keyType in self._weatherMixTable(): + for key1, key2 in [(hiKey, coKey), (coKey, hiKey)]: + if type1 == key1.wxType() and type2 == key2.wxType(): + if len(key1.attributes()) == 0 and \ + len(key2.attributes()) == 0 and \ + attr1 == "" and attr2 == "": + # Match found + return [keyAttr], keyType + elif len(key1.attributes()) == 0 and \ + len(key2.attributes()) != 0 and \ + attr1 == "" and attr2 in key2.attributes(): + # Match found + return [keyAttr], keyType + elif len(key1.attributes()) != 0 and \ + len(key2.attributes()) == 0 and \ + attr1 in key1.attributes() and attr2 == "": + # Match found + return [keyAttr], keyType + elif len(key1.attributes()) != 0 and \ + len(key2.attributes()) != 0 and \ + attr1 in key1.attributes() and \ + attr2 in key2.attributes(): + # Match found + return [keyAttr], keyType + + # No match found + return hiKey.attributes(), hiKey.wxType() + + def _weatherCodeTable(self): + return [ + ("", "", "", "NONE" ), + ("Dry", "T", "DYT","DRYTSM"), + ("", "T", "TSM","TSTORM"), + ("GW", "T", "TSM","TSTORM"), + ("SmA", "T", "TSM","TSTORM"), + ("", "S", "SN", "SNOW" ), + ("", "R", "RN", "RAIN" ), + ("", "SW", "SW", "SNSHWR"), + ("", "RW", "RW", "RNSHWR"), + ("", "L", "DZL","DRZL" ), + ("", "ZR", "FZR","FZRAIN"), + ("", "ZL", "FZD","FZDRZL"), + ("", "IP", "SLT","SLEET" ), + ("", "F", "FOG","FOG" ), + ("", "ZF", "FZF","FZFOG" ), + ("", "IF", "IFG","ICEFOG"), + ("", "IC", "ICR","ICECRL"), + ("", "H", "HAZ","HAZE" ), + ("", "BS", "BSN","BLSNOW"), + ("", "BN", "BSD","BLSAND"), + ("", "BD", "BDT","BLDUST"), + ("", "K", "SMK","SMOKE" ), + ("", "FR", "FST","FROST" ), + ("", "ZY", "FZS","FZSPRY"), + ("", "VA", "ASH","VOLASH"), + # Mixed Weather Types + ("", "RS", "RS", "RNSN" ), + ("", "LF", "DZF","DZL/FG"), + ("", "SF", "SNF","SN/FG "), + ("", "RF", "RNF","RN/FG "), + ("", "ZRS", "ZRS","ZRN/SN"), + # Unknown Mixed Weather Type + ("", "XX", "???","??????"), + ] + + def _weatherMixTable(self): + return [ + ("", "S", "","R", "", "RS"), + ("", "SW","","RW","", "RS"), + ("", "RW","","T", "", "T"), + ("Dry","T", "","RW","Dry","T"), + ("", "L", "","F", "", "LF"), + ("", "S", "","F", "", "SF"), + ("", "R", "","F", "", "RF"), + ("", "SW","","F", "", "SF"), + ("", "RW","","F", "", "RF"), + ("", "ZR","","S", "", "ZRS"), + ("", "ZR","","SW","", "ZRS"), + ] + + def _tstmCov_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + areaLabel = node.getAreaLabel() + wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") + if wxStats is None or len(wxStats) == 0: + return "M" + hiRank = -1 + for subkey, rank in wxStats: + print("*** vtm ***") + print(subkey, rank) + if rank > hiRank and subkey.wxType() == "T": + hiKey = subkey + hiRank = rank + if hiRank == -1: + return "" + value = None + for cov, shortVal, longVal in self._coverageCodeTable(): + if hiKey.coverage() == cov: + if colWidth == 4: + value = shortVal + else: + value = longVal + break + if value is None: + if colWidth == 4: + value = "???" + else: + value = "??????" + return value + + def _weatherCov_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + areaLabel = node.getAreaLabel() + wxStats = tree.stats.get("Wx", timeRange, areaLabel, mergeMethod="Max") + if wxStats is None or len(wxStats) == 0: + return "M" + hiRank = -1 + for subkey, rank in wxStats: + if rank > hiRank and subkey.wxType() != "T": + hiKey = subkey + hiRank = rank + if hiRank == -1: + return "" + value = None + for cov, shortVal, longVal in self._coverageCodeTable(): + if hiKey.coverage() == cov: + if colWidth == 4: + value = shortVal + else: + value = longVal + break + if value is None: + if colWidth == 4: + value = "???" + else: + value = "??????" + return value + + def _coverageCodeTable(self): + return [ + ("","", ""), + ("SChc", "SCH","S CHC"), + ("Iso", "ISO","ISOLTD"), + ("Chc", "CHC","CHANCE"), + ("Sct", "SCT","SCTTRD"), + ("Lkly", "LKY","LIKELY"), + ("Num", "NUM","NUMRUS"), + ("Def", "DEF","DEFNTE"), + ("Wide", "WID","WIDSPD"), + ("Ocnl", "OCL","OCNL"), + ("Frq", "FRQ","FRQNT"), + ("Brf", "BRF","BRIEF"), + ("Pds", "PDS","PERIOD"), + ("Inter", "ITR","ITRMT"), + ("Areas", "ARS","AREAS"), + ("Patchy", "PTY","PATCHY") + ] + + def _temp_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + temp = self._getTableStats(tree, "T", timeRange, node.getAreaLabel()) + if temp is None: + return "M" + if temp >= 0: + temp = int(temp + 0.5) + else: + temp = int(temp - 0.5) + return repr(temp) + + def _rh_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + rh =self._getTableStats(tree, "RH", timeRange, node.getAreaLabel()) + if rh is None: + return "M" + rh = int(rh + 0.5) + return repr(rh) + + # Wind Methods + # Utility for Wind Methods + + # Tabular Transport Wind bug found by John DeBlock and Stephen Miller. + # tree.stats.get was using self._20ftWindParm instead of element. + def _getWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): + windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), + mergeMethod="Max") + if windStats is None: + return None + wspd,wdir = windStats + if formatElement is None: + formatElement = element + if self._elementFormatDict.get(formatElement, "alpha") == "alpha": + wdir = int(wdir + 0.5) + dirString = self._dirConvert(wdir) + else: + dir = int(wdir/10.0 + 0.5) * 10 + if dir < 10: + dirString = "00" + repr(dir) + elif dir < 100: + dirString = "0" + repr(dir) + else: + dirString = repr(dir) + if element == "Wind": + wspd = wspd * self._windAdjustmentFactor + if units == "Metric": + wspd = int(wspd*.44704 + 0.5) + else: + wspd = int(wspd + 0.5) + spdString = repr(wspd) + return dirString, spdString + + def _getWindNumDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): + windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), + mergeMethod="Max") + if windStats is None: + return None + wspd,wdir = windStats + if formatElement is None: + formatElement = element + dir = int(wdir/10.0 + 0.5) * 10 + if dir < 10: + dirString = "00" + repr(dir) + elif dir < 100: + dirString = "0" + repr(dir) + else: + dirString = repr(dir) + if element == "Wind": + wspd = wspd * self._windAdjustmentFactor + if units == "Metric": + wspd = int(wspd*.44704 + 0.5) + else: + wspd = int(wspd + 0.5) + spdString = repr(wspd) + return dirString, spdString + + def _getEyeWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): + windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), + mergeMethod="Max") + if windStats is None: + return None + wspd,wdir = windStats + if formatElement is None: + formatElement = element + if self._elementFormatDict.get(formatElement, "alpha") == "alpha": + wdir = int(wdir + 0.5) + dirString = self._dirConvert(wdir) + else: + dir = int(wdir/10.0 + 0.5) * 10 + if dir < 10: + dirString = "00" + repr(dir) + elif dir < 100: + dirString = "0" + repr(dir) + else: + dirString = repr(dir) + if element == "Wind": + wspd = wspd * self._eyeWindAdjustmentFactor + if units == "Metric": + wspd = int(wspd*.44704 + 0.5) + else: + wspd = int(wspd + 0.5) + spdString = repr(wspd) + return dirString, spdString + + def _getSfcWindDirSpdStr(self, tree, node, timeRange, element, formatElement=None, units=None): + windStats = tree.stats.get(element, timeRange, node.getAreaLabel(), + mergeMethod="Max") + if windStats is None: + return None + wspd,wdir = windStats + if formatElement is None: + formatElement = element + if self._elementFormatDict.get(formatElement, "alpha") == "alpha": + wdir = int(wdir + 0.5) + dirString = self._dirConvert(wdir) + else: + dir = int(wdir/10.0 + 0.5) * 10 + if dir < 10: + dirString = "00" + repr(dir) + elif dir < 100: + dirString = "0" + repr(dir) + else: + dirString = repr(dir) + if units == "Metric": + wspd = int(wspd*.44704 + 0.5) + else: + wspd = int(wspd + 0.5) + spdString = repr(wspd) + return dirString, spdString + + def _dirConvert(self, wdir): + dirString = "" + if wdir >= 338 or wdir <= 22: + dirString = "N" + elif wdir >= 23 and wdir <= 67: + dirString = "NE" + elif wdir >= 68 and wdir <= 112: + dirString = "E" + elif wdir >= 113 and wdir <= 157: + dirString = "SE" + elif wdir >= 158 and wdir <= 202: + dirString = "S" + elif wdir >= 203 and wdir <= 247: + dirString = "SW" + elif wdir >= 248 and wdir <= 292: + dirString = "W" + elif wdir >= 293 and wdir <= 337: + dirString = "NW" + return dirString + + def _adjustEyeWind(self, value): + # adjustment for winds + factor = self.nlValue(self._eyeWindAdjustmentFactor, value) + value = value * factor + return value + + def _wind_value(self, statDict, timeRange, argList, element=None, formatElement=None): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + if formatElement == "TransMetWind": + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement, "Metric") + elif formatElement == "EyeWind": + windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) + elif formatElement == "SfcWind": + windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) + else: + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + return "M" + dirString, spdString = windString + if self._elementFormatDict.get(formatElement, "alpha") == "alpha": + value = dirString + " " + spdString + else: + value = dirString + "/" + spdString + return value + + def _windWithGust_value(self, statDict, timeRange, argList, element=None, formatElement=None): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + if formatElement == "EyeWind": + windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) + elif formatElement == "SfcWind": + windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) + else: + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + return "M" + dirString, spdString = windString + gust = self._getTableStats(tree, "WindGust", timeRange, node.getAreaLabel(), + getValueMethod="Max") + if gust is None: + gstString = "GMM" + gstString = "" + gust = int(self.getValue(gust) + 0.5) + if gust > string.atoi(spdString): + gstString = "G" + repr(gust) + if self._elementFormatDict.get(formatElement, "alpha") == "alpha": + value = dirString + " " + spdString + gstString + else: + value = dirString + "/" + spdString + gstString + return value + + def _windDir_value(self, statDict, timeRange, argList, element=None, formatElement=None): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + return "M" + dirString, spdString = windString + return dirString + + def _windNumDir_value(self, statDict, timeRange, argList, element=None, formatElement=None): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + windString = self._getWindNumDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + return "M" + dirString, spdString = windString + return dirString + + def _eyewindNumDir_value(self, statDict, timeRange, argList): + return self._windNumDir_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _sfcwind_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _sfcwindWithGust_value(self, statDict, timeRange, argList): + return self._windWithGust_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _sfcwindDir_value(self, statDict, timeRange, argList): + return self._windDir_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _sfcwindSpd_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _sfcwindGust_value(self, statDict, timeRange, argList): + return self._windGust_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _sfcwindNumDir_value(self, statDict, timeRange, argList): + return self._windNumDir_value(statDict, timeRange, argList, "Wind", "SfcWind") + + def _ridgeNumDir_value(self, statDict, timeRange, argList): + return self._windNumDir_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") + + def _transNumDir_value(self, statDict, timeRange, argList): + return self._windNumDir_value(statDict, timeRange, argList, "TransWind", "TransWind") + + def _windSpd_value(self, statDict, timeRange, argList, element=None, formatElement=None): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + if formatElement == "TransMetWind": + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement, "Metric") + elif formatElement == "EyeWind": + windString = self._getEyeWindDirSpdStr(tree, node, timeRange, element, formatElement) + elif formatElement == "SfcWind": + windString = self._getSfcWindDirSpdStr(tree, node, timeRange, element, formatElement) + else: + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + return "M" + dirString, spdString = windString + return spdString + + def _windGust_value(self, statDict, timeRange, argList, element=None, formatElement=None ): + if element is None: + element = self._20ftWindParm + if formatElement is None: + formatElement = self._20ftWindParm + tree, node, colWidth = tuple(argList) + windString = self._getWindDirSpdStr(tree, node, timeRange, element, formatElement) + if windString is None: + spdString = '0' + else: + dirString, spdString = windString + gust = self._getTableStats(tree, "WindGust", timeRange, node.getAreaLabel(), + getValueMethod="Max") + if gust is None: + return "M" + gstString = " " + gust = int(gust + 0.5) + if gust > string.atoi(spdString): + gstString = repr(gust) + return gstString + + def _eyewind_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _eyewindWithGust_value(self, statDict, timeRange, argList): + return self._windWithGust_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _eyewindDir_value(self, statDict, timeRange, argList): + return self._windDir_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _eyewindSpd_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _eyewindGust_value(self, statDict, timeRange, argList): + return self._windGust_value(statDict, timeRange, argList, "Wind", "EyeWind") + + def _ridge_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList,"FreeWind", "RidgeWind" ) + + def _ridgeDir_value(self, statDict, timeRange, argList): + return self._windDir_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") + + def _ridgeSpd_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "FreeWind", "RidgeWind") + + def _trans_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList, "TransWind", "TransWind") + + def _transDir_value(self, statDict, timeRange, argList): + return self._windDir_value(statDict, timeRange, argList, "TransWind", "TransWind") + + def _transSpd_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "TransWind", "TransWind") + + def _transMetric_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList, "TransWind", "TransMetWind") + + def _transSpdMetric_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "TransWind", "TransMetWind") + + def _mixingHeight_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + mix = self._getTableStats(tree, "MixHgt", timeRange, node.getAreaLabel()) + if mix is None: + return "M" + if self._tabularMixingHeightUnits == "ft" and colWidth != 4: + mixft = int(mix/100.0+0.5) * 100 + if mixft < 100: + value = "BLW100" + else: + value = repr(mixft) + else: + if mix < 50: + mix = 100.0 + kmix = mix / 1000.0 + kmix = round(kmix,1) + if kmix < 10: + value = str(round(kmix,1)) + else: + kmix = mix / 1000.0 + kmix = int(kmix + 0.5) + value = repr(kmix) + return value + + def _mixingHeightMetric_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + mix = self._getTableStats(tree, "MixHgt", timeRange, node.getAreaLabel()) + if mix is None: + return "M" + if self._tabularMixingHeightUnits == "ft" and colWidth != 4: + mixMetric = mix * 0.3048 + mixRounded = int(mixMetric/10.0+0.5) * 10 + if mixRounded < 10: + value = "BLW10M" + else: + value = repr(mixRounded) + else: + if mix < 330: + mix = 330.0 + mixMetric = mix * 0.3048 / 1000.0 + kmix = round(mixMetric,1) + if kmix < 10: + value = str(round(kmix,1)) + else: + value = repr(kmix) + return value + + def _cwr_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + cwr = self._getTableStats(tree, self._cwrParm, timeRange, node.getAreaLabel()) + if cwr is None: + return "M" + return repr(int(cwr/10 + 0.5)*10) + + def _pop_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + pop = self._getTableStats(tree, "PoP", timeRange, node.getAreaLabel()) + if pop is None: + return "M" + return repr(int(pop/10 + 0.5)*10) + + def _lal_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + lal = self._getTableStats(tree, "LAL", timeRange, node.getAreaLabel()) + if lal is None: + return "M" + return repr(int(lal+0.5)) + + def _dsi_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + dsi = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) + if dsi is None: + return "M" + return repr(int(dsi + 0.5)) + + + def _ldsi_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + #dsi = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) + dsi = self._getTableStats(tree, "LDSI", timeRange, node.getAreaLabel()) + if dsi is None: + return "M" + return repr(int(dsi + 0.5)) + + def _lvori_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + #lvori = self._getTableStats(tree, "DSI", timeRange, node.getAreaLabel()) + lvori = self._getTableStats(tree, "LVORI", timeRange, node.getAreaLabel()) + if lvori is None: + return "M" + return repr(int(lvori + 0.5)) + + def _adi_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + adi = self._getTableStats(tree, "ADI", timeRange, node.getAreaLabel()) + if adi is None: + return "M" + return repr(int(adi + 0.5)) + + def _haines_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "Haines", timeRange, node.getAreaLabel()) + if stats is None: + return "M" + return repr(int(stats + 0.5)) + + def _ventrate_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + ventrate = self._getTableStats(tree, "VentRate", timeRange, node.getAreaLabel()) + if ventrate is None: + return "M" + return repr(int(ventrate/1000.0 + 0.5)) + + def _windWave_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "WindWaveHgt", timeRange, node.getAreaLabel()) + if stats is None: + return "M" + return repr(int(stats + 0.5)) + + def _waveHeight_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "WaveHeight", timeRange, node.getAreaLabel()) + if stats is None: + return "M" + return repr(int(stats + 0.5)) + + def _swellPeriod_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "Period", timeRange, node.getAreaLabel()) + if stats is None: + return "M" + return repr(int(stats + 0.5)) + + def _swell_value(self, statDict, timeRange, argList): + return self._wind_value(statDict, timeRange, argList,"Swell", "RidgeWind" ) + + def _swellDir_value(self, statDict, timeRange, argList): + return self._windDir_value(statDict, timeRange, argList, "Swell", "RidgeWind") + + def _swellHgt_value(self, statDict, timeRange, argList): + return self._windSpd_value(statDict, timeRange, argList, "Swell", "RidgeWind") + + def _freezingLevel_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + mix = self._getTableStats(tree, "FzLevel", timeRange, node.getAreaLabel()) + if mix is None: + return "M" + if mix < 50: + mix = 100.0 + kmix = mix / 1000.0 + kmix = round(kmix,1) + if kmix < 10: + value = str(round(kmix,1)) + else: + kmix = mix / 1000.0 + kmix = int(kmix + 0.5) + value = repr(kmix) + return value + + def _ceiling_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + temp = self._getTableStats(tree, "PredHgt", timeRange, node.getAreaLabel()) + if temp is None: + return " " + temp = temp / 10.0 + if temp < 10: + tempWords = string.strip("%4.1f" % temp) + else: + tempWords = repr(int(temp + 0.5)) + return tempWords + + def _visibility_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + temp = self._getTableStats(tree, "Vsby", timeRange, node.getAreaLabel()) + if temp is None: + return " " + print("colWidth =", colWidth) + if colWidth > 4: + if temp < 1.0: + tempWords = string.strip("%4.2f" % temp) + elif temp >= 1.0 and temp < 3.0: + tempWords = string.strip("%4.1f" % temp) + else: + tempWords = repr(int(temp + 0.5)) + else: + if temp < 1.0: + tempWords = string.strip("%3.2f" % temp) + tempWords = tempWords[1:] + elif temp >= 1.0 and temp < 3.0: + tempWords = string.strip("%3.1f" % temp) + else: + tempWords = repr(int(temp + 0.5)) + return tempWords + + def _icing_value(self, statDict, timeRange, argList): + return " " + + def _td_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + temp = self._getTableStats(tree, "Td", timeRange, node.getAreaLabel()) + if temp is None: + return "M" + if temp >= 0: + temp = int(temp + 0.5) + else: + temp = int(temp - 0.5) + return repr(temp) + + def _heatIndex_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + temp = self._getTableStats(tree, "HeatIndex", timeRange, node.getAreaLabel()) + if temp is None: + return "M" + if temp >= 0: + temp = int(temp + 0.5) + else: + temp = int(temp - 0.5) + return repr(temp) + + def _wwa_exclude(self,stats): + list = [] + index = 0 + newstats = [] + while index < len(stats): + eidx = 0 + flag = 1 + while eidx < len(list): + if stats[index] == list[eidx]: + flag = 0 + eidx = eidx + 1 + if flag: + newstats.append(stats[index]) + index = index + 1 + return newstats + + def _wwa_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) + if stats is None: + return " " + if stats[0] == "": + return " " + stats = self._wwa_exclude(stats) + return stats[0][0:2] + stats[0][3:4] + + def _wwa2_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) + if stats is None: + return " " + stats = self._wwa_exclude(stats) + if len(stats) < 2: + return " " + return stats[1][0:2] + stats[1][3:4] + + def _wwa3_value(self, statDict, timeRange, argList): + tree, node, colWidth = tuple(argList) + stats = self._getTableStats(tree, "Hazards", timeRange, node.getAreaLabel()) + if stats is None: + return " " + stats = self._wwa_exclude(stats) + if len(stats) < 3: + return " " + return stats[2][0:2] + stats[2][3:4] + + ### NEW NARRATIVE PHRASES ### + + def dsi_phrase(self): + return { + "setUpMethod": self.dsi_setUp, + "wordMethod": self.dsi_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def dsi_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("DSI", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "DSI.................") + return self.DONE() + + def dsi_words(self, tree, node) : + "Create phrase Probability of Precipitation" + statDict = node.getStatDict() + dsi = self.getStats(statDict, "DSI") + if dsi is None: + return self.setWords(node.parent, "MISSING") + dsi = self.getValue(dsi) + words = repr(int(dsi + 0.5)) + return self.setWords(node, words) + + def ldsi_phrase(self): + return { + "setUpMethod": self.ldsi_setUp, + "wordMethod": self.ldsi_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def ldsi_setUp(self, tree, node): + #elementInfoList = [self.ElementInfo("DSI", "List")] + elementInfoList = [self.ElementInfo("LDSI", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "LDSI................") + return self.DONE() + + def ldsi_words(self, tree, node): + "Create phrase Probability of Precipitation" + statDict = node.getStatDict() + #ldsi = self.getStats(statDict, "DSI") + ldsi = self.getStats(statDict, "LDSI") + if ldsi is None: + return self.setWords(node.parent, "MISSING") + ldsi = self.getValue(ldsi) + words = repr(int(ldsi + 0.5)) + return self.setWords(node, words) + + def lvori_phrase(self): + return { + "setUpMethod": self.lvori_setUp, + "wordMethod": self.lvori_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def lvori_setUp(self, tree, node): + #elementInfoList = [self.ElementInfo("DSI", "List")] + elementInfoList = [self.ElementInfo("LVORI", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "LVORI...............") + return self.DONE() + + def lvori_words(self, tree, node): + statDict = node.getStatDict() + lvori = self.getStats(statDict, "LVORI") + #lvori = self.getStats(statDict, "DSI") + if lvori is None: + return self.setWords(node.parent, "MISSING") + lvori = self.getValue(lvori) + words = repr(int(lvori + 0.5)) + return self.setWords(node, words) + + def pop_phrase(self): + return { + "setUpMethod": self.pop_setUp, + "wordMethod": self.pop_words, + "phraseMethods": self.standard_phraseMethods(), + } + def pop_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("PoP", "Max")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "POP.................") + return self.DONE() + + def pop_words(self, tree, node) : + "Create phrase Probability of Precipitation" + statDict = node.getStatDict() + popStats = self.getStats(statDict, "PoP") + if popStats is None: + return self.setWords(node.parent, "MISSING") + pop = self.getValue(popStats) + threshold = self.nlValue(self.null_nlValue( + tree, node, "PoP", "PoP"), pop) + if int(pop) < threshold: + return self.setWords(node, "null") + else: + words = repr(int(pop)) + " percent" + return self.setWords(node, words) + + ### *** END TABULAR TEST SECTION HERE *** ### + +# I had to create these phrases or labels so the FWS formatter will work +# for any WFO out of the baseline. I created labels for elements that +# grids are not created for (that I know of). If offices do have grids +# for these elements, then they can create the phrase to get it into +# the FWS product. + + # For EYE LEVEL WINDS + def fireEyeWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + self.wind_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.fireEyeWind_finishUp + ], + } + def fireEyeWind_finishUp(self, tree, node): + "Create a phrase for Winds" + # Empty phrase if doing ridge/valley winds + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 1: + return self.setWords(node, "") + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", "Eye level winds.....") + node.set("compound", 1) + return self.setWords(node, words) + + # For Wind shift. Just need the label since there is not phrase. + def fireWindShift_label_phrase(self): + return { + "setUpMethod": self.fireWindShift_label_setUp, + "phraseMethods": [self.postProcessPhrase], + } + + def fireWindShift_label_setUp(self, tree, node): + self.setWords(node, "") + node.set("descriptor", "") + node.set("indentLabel", "Wind shift..........") + return self.DONE() + + # For Surrounding Ridge Wind. + def surroundingRidgeWind_phrase(self): + return { + "setUpMethod": self.surroundingRidgeWind_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + def surroundingRidgeWind_setUp(self, tree, node): + self.wind_setUp(tree, node, gustFlag=0, element="FreeWind") + node.set("descriptor", "") + node.set("indentLabel","Surrounding ridge...") + return self.DONE() + + # For Chance of Preciptiation. + def pop_phrase(self): + return { + "setUpMethod": self.pop_setUp, + "wordMethod": self.pop_words, + "phraseMethods": self.standard_phraseMethods(), + } + def pop_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("PoP", "Average")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "POP.................") + return self.DONE() + + def pop_words(self, tree, node) : + "Create phrase Probability of Precipitation" + statDict = node.getStatDict() + popStats = self.getStats(statDict, "PoP") + if popStats is None: + return self.setWords(node.parent, "MISSING") + pop = self.getValue(popStats) + threshold = self.nlValue(self.null_nlValue( + tree, node, "PoP", "PoP"), pop) + if int(pop) < threshold: + return self.setWords(node, "null") + else: + words = repr(int(pop)) + " percent" + return self.setWords(node, words) + + # For Stability Class. + def stabilityClass_phrase(self): + return { + "setUpMethod": self.stabilityClass_setUp, + "wordMethod": self.stabilityClass_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def stabilityClass_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Stability", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Stability class.....") + return self.DONE() + + def stabilityClass_words(self, tree, node) : + "Create phrase Stability Class" + statDict = node.getStatDict() + stability = self.getStats(statDict, "Stability") + if stability is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(self.getValue(stability))) + return self.setWords(node, words) + + # For Marine Layer. + def marineLayer_phrase(self): + return { + "setUpMethod": self.marineLayer_setUp, + "wordMethod": self.marineLayer_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def marineLayer_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MarineLayer", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Marine layer........") + return self.DONE() + + def marineLayer_words(self, tree, node) : + "Create phrase MarineLayer" + statDict = node.getStatDict() + marineLayer = self.getStats(statDict, "MarineLayer") + if marineLayer is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(self.getValue(marineLayer))) + return self.setWords(node, words) + + def td_phrase(self): + return { + "setUpMethod": self.td_setUp, + "wordMethod": self.td_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def td_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Td", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Dewpoint............") + return self.DONE() + + def td_words(self, tree, node): + "Create phrase Td" + statDict = node.getStatDict() + td = self.getValue(self.getStats(statDict, "Td"), "Avg") + if td is None: + return self.setWords(node.parent, "MISSING") + words = repr(int(td)) + return self.setWords(node, words) + + # For Begin/End of Preciptiation. + def pcpnTiming_phrase(self): + return { + "setUpMethod": self.pcpnTiming_setUp, + "phraseMethods": [self.postProcessPhrase], + } + + def pcpnTiming_setUp(self, tree, node): + self.setWords(node, " ") + node.set("descriptor", "") + node.set("indentLabel", "Begin/end of pcpn...") + return self.DONE() + + def _checkStrs(self, checkStrings, inputStr, orderStrings=0, checkMode=1): + # Check the inputStr for the list of checkStrings. + # If a checkString is a tuple, at least one of the + # given tuple strings must be found in the inputStr + # If orderStrings == 1, the strings must occur in order in the inputStr + # If checkMode == 0, the strings should NOT be found in the inputStr + # Returns 1 if successful, the failed checkString if not. + curIndex = -1 + for cStr in checkStrings: + if type(cStr) == tuple: + # Will pass if ANY of these strings are found + # Not valid with checkMode of zero + if not checkMode: + continue + found = 0 + for subStr in cStr: + strIndex = inputStr.find(subStr) + if strIndex >= 0: + found = 1 + break + else: + found = 0 + if not found: + return subStr + else: + # Must find exact string + strIndex = inputStr.find(cStr) + if strIndex < 0: + if checkMode: + return cStr + else: + if not checkMode: + return cStr + # Check the ordering + if orderStrings: + inputStr = inputStr[strIndex:] + return 1 + + + +### For Testing +## def getPreviousProduct(self, stqPil, searchString, version=0): +## f = open("/home/eagle6/hansen/ui/middendorf/GTFSTQBYZ"+`version`, "r") +## product = f.read() +## f.close() +## #print "returning", product +## return product + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FirePhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FirePhrases.py index 84ddd99b95..2e81d16e28 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FirePhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/FirePhrases.py @@ -1,928 +1,928 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# FirePhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import ScalarPhrases -import VectorRelatedPhrases -import WxPhrases -import DiscretePhrases - -class FirePhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, - WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): - def __init__(self): - ScalarPhrases.ScalarPhrases.__init__(self) - VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) - WxPhrases.WxPhrases.__init__(self) - DiscretePhrases.DiscretePhrases.__init__(self) - - ############################################ - ### FIRE WEATHER PHRASES - - ### Sky and Weather - def includeSkyRanges_flag(self, tree, node): - # Set to 0 if you do not want ranges reported with sky phrases: - # Partly cloudy (35-40 PERCENT) - return 1 - - def skyWeather_byTimeRange_compoundPhrase(self): - return { - "phraseList": [ - self.fireSky_phrase, - self.weather_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.skyWeather_finishUp, - ], - } - def skyWeather_finishUp(self, tree, node): - "Create a phrase for sky/weather" - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", "Sky/weather.........") - node.set("compound", 1) - return self.DONE() - - def fireSky_phrase(self): - return { - "setUpMethod": self.fireSky_setUp, - "wordMethod": self.fireSky_words, - "phraseMethods": [ - self.checkLocalEffects, - self.combineSky, - self.combineWords, - self.fillNulls, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - def fireSky_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Sky", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def fireSky_words(self, tree, node): - # Report average as a worded summary, also return average - statDict = node.getStatDict() - stats = self.getStats(statDict, "Sky") - if stats is None: - return self.setWords(node, "") - avg = self.getValue(stats) - dayNight = self.getPeriod(node.getTimeRange(), 1) - words = self.sky_value(tree, node, avg, dayNight) - words = self.addSkyRange(tree, node, words, avg) - return self.setWords(node, words) - - def addSkyRange(self, tree, node, words, avg): - # Add range if desired - if self.includeSkyRanges_flag(tree, node): - roundAvg = int(self.round(avg, "Nearest", 5)) - if roundAvg < 5: - min = 0 - else: - min = roundAvg-5 - if roundAvg >= 100: - max = 100 - else: - max = roundAvg + 5 - units = self.units_descriptor(tree, node, "units", "%") - words = words + " (" + `min` + "-" + `max` + units + ")" - return words - - # Trends - ### MinT, MaxT, MinRH, MaxRH, RH - def trend_DayOrNight_phrase(self): - return { - "setUpMethod": self.trend_DayOrNight_setUp, - "wordMethod": self.trend_DayOrNight_words, - "phraseMethods": [ - self.checkLocalEffects, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - def trend_DayOrNight_setUp(self, tree, node): - dayElement, nightElement, trendElement, indent, endWithPeriod = node.get("args") - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - elementName = dayElement - else: - elementName = nightElement - - elementInfoList = [self.ElementInfo(elementName, "MinMax")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("trendElement", trendElement) - node.set("descriptor", "") - if indent == 1: - node.set("indentLabel", " 24 hr trend......") - return self.DONE() - - def trend_DayOrNight_words(self, tree, node): - "Compare current analysis to previous analysis for trends" - elementName = node.get("elementName") - - higher = self.phrase_descriptor(tree, node, "higher", elementName ) - lower = self.phrase_descriptor(tree, node, "lower", elementName ) - threshold = self.trend_threshold(tree, node, elementName, elementName) - if elementName == "MinT" or elementName == "MaxT": - units = self.units_descriptor(tree, node, "units", "degrees") - unit = self.units_descriptor(tree, node, "unit", "degree") - if elementName == "MaxRH" or elementName == "MinRH" or elementName == "RH": - units = self.units_descriptor(tree, node, "units", "%") - unit = self.units_descriptor(tree, node, "unit", "%") - trendElement = node.getAncestor("trendElement") - words, diff = self.getTrend( - tree, node, elementName, trendElement, "", units, unit, threshold, - higher, lower) - if words is not None: - if diff == 0: - words = self.phrase_descriptor(tree, node, "unchanged", elementName ) - else: - words = self.phrase_descriptor(tree, node, "missing", elementName ) - return self.setWords(node, words) - - def getTrend(self, tree, phrase, element, trendElement, introWords, units, unit, threshold, - positiveDescriptor, negativeDescriptor): - - timeRange = phrase.getTimeRange() - areaLabel = phrase.getAreaLabel() - absDiff, rawDiff = self.getTrendStats( - tree, phrase, element, timeRange, areaLabel, trendElement) - - if absDiff is None: - return absDiff, rawDiff - - diff = int(absDiff) - if absDiff >= threshold: - if rawDiff >=0: - descriptor = positiveDescriptor - else: - descriptor = negativeDescriptor - if diff == 1: - units = unit - introWords = self.addSpace(introWords) - if units != "%": - units = " " + units - words = introWords + `diff` + units + " " + descriptor - else: - words = "" - #print "returning", words, diff - return words, diff - - def getTrendStats(self, tree, phrase, element, timeRange, areaLabel, trendElement): - # NO CONVERSION DONE - # Try the trend element first - stats = tree.stats.get(trendElement, timeRange, areaLabel, - mergeMethod="Average") - #print "\ngetTrendStats" - if stats is not None: - rawDiff = int(self.getValue(stats)) - absDiff = abs(rawDiff) - else: - #Use Max/Min element - curStats = tree.stats.get(element, timeRange, areaLabel, - mergeMethod = "Average", statLabel="mode") - #print "curstats", curStats - if curStats is None: - return None, 0 - - prevTimeRange = self.adjustTimeRange(timeRange, -24) - prevStats = tree.stats.get(element, prevTimeRange, areaLabel, - mergeMethod = "Average", statLabel="mode") - #print "prevstats", prevStats - if prevStats is None: - return None, 0 - - prevStats = self.getValue(prevStats) - curStats = self.getValue(curStats) - #print "cur, prev", element, curStats, prevStats - absDiff = self.absDiff(curStats, prevStats) - rawDiff = curStats - prevStats - #print "absDiff, rawDiff", absDiff, rawDiff - return absDiff, rawDiff - - def dayOrNight_phrase(self): - return { - "setUpMethod": self.dayOrNight_setUp, - "wordMethod": self.fire_dayOrNight_words, - "phraseMethods": [ - self.checkLocalEffects, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - def dayOrNight_setUp(self, tree, node): - dayElement, nightElement, indent, endWithPeriod = node.get("args") - elementName = self.dayOrNight_element(tree, node, dayElement, nightElement) - indentName = elementName+"_FireWx" - method = "MinMax" - if elementName == "RH": - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - indentName = "MinRH_FireWx" - method = "Min" - else: - indentName = "MaxRH_FireWx" - method = "Max" - elementInfoList = [self.ElementInfo(elementName, method)] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", indentName) - return self.DONE() - - def fire_dayOrNight_words(self, tree, node): - elementName = node.getAncestor("elementName") - statDict = node.getStatDict() - if elementName == "MaxT" or elementName == "MinT": - stats = self.getTempStats(tree, node) - if stats is None: - return self.setWords(node.parent, "MISSING") - words = self.getTempRangePhrase(tree, node, stats, elementName) - else: # MinRH, MaxRH or RH - stats = self.getStats(statDict, elementName) - if stats is None: - return self.setWords(node.parent, "MISSING") - connector = self.value_connector(tree, node, elementName, elementName) - min, max = self.getValue(stats, "MinMax") - if min == max: - words = `int(min)` - else: - words = `int(min)` + connector + `int(max)` - outUnits = self.element_outUnits(tree, node, elementName, elementName) - units = self.units_descriptor(tree, node,"units", outUnits) - words = words + units - return self.setWords(node, words) - - ### CWR - def cwr_phrase(self): - return { - "setUpMethod": self.cwr_setUp, - "wordMethod": self.cwr_words, - "phraseMethods": self.standard_phraseMethods(), - } - def cwr_setUp(self, tree, node): - try: - cwr = self._cwrParm - except: - cwr = "CWR" - elementInfoList = [self.ElementInfo(cwr, "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "CWR.................") - return self.DONE() - - def cwr_words(self, tree, node) : - "Create phrase Probability of Precipitation" - statDict = node.getStatDict() - cwr = self.getStats(statDict, "CWR") - if cwr is None: - return self.setWords(node.parent, "MISSING") - cwr = self.getValue(cwr) - threshold = self.nlValue(self.null_nlValue( - tree, node, "CWR", "CWR"), cwr) - if int(cwr) < threshold: - return self.setWords(node, "null") - else: - words = `int(cwr)` + " percent" - return self.setWords(node, words) - - ### VentRate or smoke dispersal phrase - def smokeDispersal_phrase(self): - return { - "setUpMethod": self.smokeDispersal_setUp, - "wordMethod": self.smokeDispersal_words, - "phraseMethods": self.standard_phraseMethods(), - } - def smokeDispersal_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("VentRate", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel","Smoke dispersal.....") - return self.DONE() - - def smokeDispersal_words(self, tree, node): - "Create phrase for Smoke Dispersal" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "VentRate") - if stats is None: - return self.setWords(node.parent, "MISSING") - vr1, vr2 = self.getValue(stats, "MinMax") - - vr1 = int(vr1) - vr2 = int(vr2) - vrCat1 = self.smokeDispersal_valueStr(vr1) - vrCat2 = self.smokeDispersal_valueStr(vr2) - # Single Value input - if vr1 == vr2: - words = vrCat1 + " (" + `vr1` + " knot-ft)" - # Range - else: - words = vrCat1 + " to " + vrCat2 + " (" + `vr1` + "-" + \ - `vr2` + " knot-ft)" - return self.setWords(node, words) - - # SMOKE DISPERSAL CATEGORIES - def smokeDispersal_valueStr(self, value): - "Convert smoke dispersal value to corresponding category" - - if value < 40000 : - return "poor" - - if value >= 40000 and value < 60000: - return "fair" - - if value >= 60000 and value < 100000 : - return "good" - - if value >= 100000 and value < 150000 : - return "very good" - - if value >= 150000 : - return "excellent" - - ### Fire Winds - def transportWind_phrase(self): - return { - "setUpMethod": self.transportWind_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - def transportWind_setUp(self, tree, node): - self.wind_setUp(tree, node, gustFlag=0, element="TransWind") - node.set("descriptor", "") - node.set("indentLabel","Transport winds.....") - return self.DONE() - - def freeWind_phrase(self): - return { - "setUpMethod": self.freeWind_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - def freeWind_setUp(self, tree, node): - self.wind_setUp(tree, node, gustFlag=0, element="FreeWind") - node.set("descriptor", "") - node.set("indentLabel","Free winds..........") - return self.DONE() - - def ridgeValleyAreas(self, tree, node): - # List of edit area names for which we want - # ridge/valley winds reported: - # - # 20-foot winds... - # Valleys/lwr slopes... - # Ridges/upr slopes.... - # - # e.g. - # return ["Area1"] - # - return [] - - def valleyRidgeAreaNames(self, tree, node): - # These are the areas for valleys and ridges, respectively, - # to be intersected with the current edit area for - # reporting valley winds and ridge winds, respectively. - # NOTE: If you change these area names, you will also - # need to change the names in the FirePeriod "intersectAreas" - # section. - return "Valleys", "Ridges" - - def fireWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - self.wind_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.fireWind_finishUp - ], - } - def fireWind_finishUp(self, tree, node): - "Create a phrase for Winds" - # Empty phrase if doing ridge/valley winds - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 1: - return self.setWords(node, "") - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", "20-foot winds.......") - node.set("compound", 1) - return self.setWords(node, words) - - def fireWind_label_phrase(self): - return { - "setUpMethod": self.fireWind_label_setUp, - "phraseMethods": [self.postProcessPhrase], - } - def fireWind_label_setUp(self, tree, node): - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 0: - return self.setWords(node, "") - self.setWords(node, "") - node.set("descriptor", "") - node.set("indentLabel", "20-foot winds.......") - return self.DONE() - - # Valley/Ridge split set-up - def fireValleyWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - self.wind_phrase, - ], - "phraseMethods": [ - self.fireRidgeValleyWind_setUp, - self.consolidateSubPhrases, - self.assembleSentences, - self.fireValleyWind_finishUp - ], - } - def fireRidgeValleyWind_setUp(self, tree, node): - # Used for set-up of fireRidgeWind_compoundPhrase as well. - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 0: - return self.setWords(node, "") - # Set up intersect area to be used for the node - areaName = node.getAreaLabel() - phraseName = node.get("name") - valleys, ridges = self.valleyRidgeAreaNames(tree, node) - if phraseName.find("Valley") >= 0: - area = valleys - else: - area = ridges - intersectName = self.getIntersectName(areaName, area) - #print "setting intersect", intersectName - node.set("areaLabel", intersectName) - return self.DONE() - - def fireValleyWind_finishUp(self, tree, node): - "Create a phrase for Winds" - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel"," Valleys/lwr slopes...") - node.set("compound", 1) - return self.setWords(node, words) - - def fireRidgeWind_compoundPhrase(self): - return { - "phraseList": [ - self.wind_summary, - self.wind_phrase, - ], - "phraseMethods": [ - self.fireRidgeValleyWind_setUp, - self.consolidateSubPhrases, - self.assembleSentences, - self.fireRidgeWind_finishUp - ], - } - def fireRidgeWind_finishUp(self, tree, node): - "Create a phrase for Winds" - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", " Ridges/upr slopes....") - node.set("compound", 1) - return self.setWords(node, words) - - ### Haines - def hainesDict(self): - return { - 0:"or very low potential for large plume dominated fire growth", - 1:"or very low potential for large plume dominated fire growth", - 2:"or very low potential for large plume dominated fire growth", - 3:"or very low potential for large plume dominated fire growth", - 4:"or low potential for large plume dominated fire growth", - 5:"or moderate potential for large plume dominated fire growth", - 6:"or high potential for large plume dominated fire growth", - 7:"or high potential for large plume dominated fire growth", - 8:"or high potential for large plume dominated fire growth", - 9:"or high potential for large plume dominated fire growth", - 10:"or high potential for large plume dominated fire growth" - } - - def haines_phrase(self): - return { - "setUpMethod": self.haines_setUp, - "wordMethod": self.haines_words, - "phraseMethods": [ - self.consolidatePhrase, - self.checkLocalEffects, - self.combinePhraseStats, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - } - def haines_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Haines", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel","Haines Index........") - return self.DONE() - - def haines_words(self, tree, node): - "Create phrase for Haines Index" - statDict = node.getStatDict() - stats = self.getStats(statDict, "Haines") - if stats is None: - return self.setWords(node.parent, "MISSING") - - haines1, haines2 = self.getValue(stats, "MinMax") - hainesDict = self.hainesDict() - haines1 = int(haines1) - haines2 = int(haines2) - words1 = hainesDict[haines1] - words2 = hainesDict[haines2] - - # Single Value input - if haines1 == haines2: - words = `haines1` + " " + words1 - # Range - else: - if words1 == words2: - words = words1 - else: - words = words1 + " to " + words2 - words = `haines1` + " to " + `haines2` + " OR " + words - return self.setWords(node, words) - - ### Humidity - def humidityRecovery_percentage(self, tree, node): - # If the maximum humidity is greater than this percentage, - # humidity recovery will be Excellent. - return 50 - - def humidityRecovery_phrase(self): - return { - "setUpMethod": self.humidityRecovery_setUp, - "wordMethod": self.humidityRecovery_words, - "phraseMethods": self.standard_phraseMethods(), - } - def humidityRecovery_setUp(self, tree, node): - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange,1) - if dayNight != self.NIGHTTIME(): - return self.setWords(node, "") - if self._useRH: - elementName = "RH" - else: - elementName = "MaxRH" - elementInfoList = [self.ElementInfo(elementName, "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Humidity recovery...") - return self.DONE() - - def humidityRecovery_words(self, tree, node): - "Create phrase for Humidity recovery" - - if self._useRH: - elementName = "RH" - else: - elementName = "MaxRH" - statDict = node.getStatDict() - curStats = self.getStats(statDict, elementName) - if curStats is None: - return self.setWords(node.parent, "MISSING") - maxRH = self.getValue(curStats, "Max") - if maxRH > self.humidityRecovery_percentage(tree, node): - return self.setWords(node, "Excellent") - timeRange = node.getTimeRange() - prevTimeRange = self.adjustTimeRange(timeRange, -24) - prevStats = tree.stats.get(elementName, prevTimeRange, node.getAreaLabel(), - statLabel="mode", mergeMethod="Max") - if prevStats is None: - return self.setWords(node, "") - curStats = self.getValue(curStats) - prevStats = self.getValue(prevStats) - - diff = curStats - prevStats - words = "" - for threshold, label in self.humidityRecovery_valueList(tree, node): - if diff <= threshold: - words = label - break - return self.setWords(node, words) - - # Humidity recovery values - def humidityRecovery_valueList(self, tree, node): - "Used to convert percent difference to corresponding category" - # If you want to return different thresholds based on edit areas: - # - # editAreaNames = ["area1", "area2"] - # if self.currentAreaContains(tree, editAreaNames): - # return [ - # (15, "Poor"), - # (20, "Fair"), - # (30, "Good"), - # ] - return [ - (25, "Poor"), - (55, "Moderate"), - (70, "Good"), - (100,"Excellent"), - ] - - ### LAL - ## ################################### - ## # General LAL Definition # - ## # mainly defined for wrn 1/2 of US# - ## # but still different across the # - ## # country (from Dave Metze # - ## ################################### - - ## LALs.....(L)ightning (A)ctivity (L)evels numbered 1 through 6. - - ## * LAL 1 - No thunderstorms. - ## * LAL 2 - isolated/slight chance thunderstorms..............PoP 5-14% - ## * LAL 3 - isolated/slight chance thunderstorms..............PoP 15-24% - ## * LAL 4 - scattered/chance thunderstorms....................PoP 25-54% - ## * LAL 5 - numerous/likely to wide/def thunderstorms.........PoP 55-100% - ## * LAL 6 - Dry lightning(thunderstorms) for LAL 3 through 5..PoP 15-100% - - ## The number of lightning strikes is also a variable, but not sure if the - ## science of meteorology is in place to forecast the actual number of - ## lightning strikes, so most offices use the potential/coverage of - ## thunderstorms to determine the LAL value. - - def lal_phrase(self): - return { - "setUpMethod": self.lal_setUp, - "wordMethod": self.lal_words, - "phraseMethods": [ - self.consolidatePhrase, - self.checkLocalEffects, - self.combinePhraseStats, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - } - - def lal_setUp(self, tree, node): - # Wait for all Wx (and other) phrases to complete - # NOTE that we are sending the areaLabel so IF you have a local - # effect set up for LAL, you must have the same local effect set up - # for weather, AND LAL and weather should be consistent in the grids. - phraseList = self.checkPhrasesDone( - tree, node, areaLabel=node.getAreaLabel(), - exceptions=[node.get('name')]) - if phraseList is None: - return - - # Check to see if the weather phrase changed it's resolution. - # If so, we want to match it in the LAL phrase - nodeRes = None - for phrase in phraseList: - firstElement = phrase.get("firstElement") - if firstElement is not None and firstElement.name=="Wx": - resolution = phrase.get('resolution') - if resolution is not None: - nodeRes = resolution - break - - # Use resolution of Wx phrase - elementInfoList = [self.ElementInfo("LAL", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector, - resolution=nodeRes) - node.set("descriptor", "") - node.set("indentLabel", "LAL.................") - return self.DONE() - - def lal_words(self, tree, node) : - "Create phrase for Lightning Activity Level" - #statDict = node.getStatDict() - #stats = self.getStats(statDict, "LAL") - # Check low pop. If low, set LAL to 1. - popThreshold = self.pop_wx_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, popThreshold) - if lowPopFlag == 1: - lal = 1 - else: - lal = self.matchToWx(tree, node, "LAL") - if lal is None: - return self.setWords(node, "null") - if self._lightningPhrases: - words = self.lal_value(tree, node, lal) - else: - words = `int(lal)` - return self.setWords(node, words) - - def lal_value(self, tree, node, lal): - value = "No Tstms" - if lal > 1: - value = "1-8 strikes" - if lal > 2: - value = "9-15 strikes" - if lal > 3: - value = "16-25 strikes" - if lal > 4: - value = ">25 strikes" - if lal > 5: - value = "Dry lightning" - return value - - def coverageLAL_value(self, coverage): - # LAL ranges that correspond to each of the weather coverages - lalValue = self.coverageLAL_table() - return lalValue[coverage] - - def coverageLAL_table(self): - # LAL ranges that correspond to each of the weather coverages - return { - "": (1,1), - "Iso": self.lal_2_3, - "SChc": self.lal_2_3, - "Patchy": self.lal_wx_value, - "Areas":self.lal_wx_value, - "Chc": self.lal_wx_value, - "Sct": self.lal_wx_value, - "Lkly": self.lal_wx_value, - "Num": self.lal_wx_value, - "Brf": self.lal_wx_value, - "Frq": self.lal_wx_value, - "Ocnl": self.lal_wx_value, - "Pds": self.lal_wx_value, - "Inter":self.lal_wx_value, - "Def": self.lal_wx_value, - "Wide": self.lal_wx_value, - } - - def lal_2_3(self, tree, node, highKey): - pop = tree.stats.get("PoP", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Max") - if pop > 10: - if highKey.wxType() == "T" and "Dry" in highKey.attributes(): - return (6,6) - else: - return (3,3) - else: - return (2,2) - - def lal_wx_value(self, tree, node, highKey): - # Check for Dry Thunderstorms - if highKey.wxType() == "T" and "Dry" in highKey.attributes(): - return (6,6) - coverage = highKey.coverage() - lal_dict = { - "Patchy": (2,2), - "Areas":(4,4), - "Chc": (4,4), - "Sct": (4,4), - "Lkly": (5,5), - "Num": (5,5), - "Brf": (5,5), - "Frq": (5,5), - "Ocnl": (5,5), - "Pds": (5,5), - "Inter":(5,5), - "Def": (5,5), - "Wide": (5,5), - } - return lal_dict[coverage] - - - ### MixHgt - def mixingHgt_phrase(self): - return { - "setUpMethod": self.mixingHgt_setUp, - "wordMethod": self.mixingHgt_words, - "phraseMethods": self.standard_phraseMethods(), - } - def mixingHgt_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MixHgt", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "Mixing height.......") - return self.DONE() - - def mixingHgt_words(self, tree, node): - "Create phrase for Mixing Height" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "MixHgt") - if stats is None: - return self.setWords(node.parent, "MISSING") - - mix1, mix2 = self.getValue(stats, "MinMax") - outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") - mix1 = int(mix1) - mix2 = int(mix2) - threshold = self.nlValue(self.null_nlValue( - tree, node, "MixHgt", "MixHgt"), max) - if int(mix1) < threshold and int(mix2) < threshold: - return self.setWords(node, "null") - - # Single Value input - if mix1 == mix2: - words = `mix1` + " " + outUnits + " AGL" - # Range - else: - words = `mix1`+ "-" + `mix2` + " " + outUnits + " AGL" - return self.setWords(node, words) - - ###--------------------------------------------------------- - ### OPTIONAL Phrase: Contributed by Ben Moyer, LOX - ### Marine Layer - taken from mixingHgt phrase methods - ###--------------------------------------------------------- - - def marineLayer_phrase(self): - return { - "setUpMethod": self.marineLayer_setUp, - "wordMethod": self.marineLayer_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def marineLayer_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MarineLayer", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - - # Do not generate for desert areas - #desertArea = ["Desert"] - #if self.currentAreaContains(tree, desertArea): - # return self.DONE() - - node.set("descriptor", "") - node.set("indentLabel", "Marine layer........") - return self.DONE() - - def marineLayer_words(self, tree, node): - "Create phrase for Marine Layer" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "MarineLayer") - if stats is None: - return self.setWords(node.parent, "MISSING") - - mix1, mix2 = self.getValue(stats, "MinMax") - outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") - mix1 = int(mix1) - mix2 = int(mix2) - - # Single Value input - # Makes "0 ft asl" be returned as "none" - if mix1 == 0 and mix2 == 0: - words = "none" - elif mix1 == mix2: - words = `mix1` + " " + outUnits + " asl" - # Makes phrases such as "0-800 ft asl" be simply "800 ft asl" - elif mix1 == 0 and mix2 > 0: - words = `mix2` + " " + outUnits + " asl" - # Range - else: - words = `mix1`+ "-" + `mix2` + " " + outUnits + " asl" - return self.setWords(node, words) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# FirePhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import ScalarPhrases +import VectorRelatedPhrases +import WxPhrases +import DiscretePhrases + +class FirePhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, + WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): + def __init__(self): + ScalarPhrases.ScalarPhrases.__init__(self) + VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) + WxPhrases.WxPhrases.__init__(self) + DiscretePhrases.DiscretePhrases.__init__(self) + + ############################################ + ### FIRE WEATHER PHRASES + + ### Sky and Weather + def includeSkyRanges_flag(self, tree, node): + # Set to 0 if you do not want ranges reported with sky phrases: + # Partly cloudy (35-40 PERCENT) + return 1 + + def skyWeather_byTimeRange_compoundPhrase(self): + return { + "phraseList": [ + self.fireSky_phrase, + self.weather_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.skyWeather_finishUp, + ], + } + def skyWeather_finishUp(self, tree, node): + "Create a phrase for sky/weather" + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", "Sky/weather.........") + node.set("compound", 1) + return self.DONE() + + def fireSky_phrase(self): + return { + "setUpMethod": self.fireSky_setUp, + "wordMethod": self.fireSky_words, + "phraseMethods": [ + self.checkLocalEffects, + self.combineSky, + self.combineWords, + self.fillNulls, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + def fireSky_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Sky", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def fireSky_words(self, tree, node): + # Report average as a worded summary, also return average + statDict = node.getStatDict() + stats = self.getStats(statDict, "Sky") + if stats is None: + return self.setWords(node, "") + avg = self.getValue(stats) + dayNight = self.getPeriod(node.getTimeRange(), 1) + words = self.sky_value(tree, node, avg, dayNight) + words = self.addSkyRange(tree, node, words, avg) + return self.setWords(node, words) + + def addSkyRange(self, tree, node, words, avg): + # Add range if desired + if self.includeSkyRanges_flag(tree, node): + roundAvg = int(self.round(avg, "Nearest", 5)) + if roundAvg < 5: + min = 0 + else: + min = roundAvg-5 + if roundAvg >= 100: + max = 100 + else: + max = roundAvg + 5 + units = self.units_descriptor(tree, node, "units", "%") + words = words + " (" + repr(min) + "-" + repr(max) + units + ")" + return words + + # Trends + ### MinT, MaxT, MinRH, MaxRH, RH + def trend_DayOrNight_phrase(self): + return { + "setUpMethod": self.trend_DayOrNight_setUp, + "wordMethod": self.trend_DayOrNight_words, + "phraseMethods": [ + self.checkLocalEffects, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + def trend_DayOrNight_setUp(self, tree, node): + dayElement, nightElement, trendElement, indent, endWithPeriod = node.get("args") + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + elementName = dayElement + else: + elementName = nightElement + + elementInfoList = [self.ElementInfo(elementName, "MinMax")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("trendElement", trendElement) + node.set("descriptor", "") + if indent == 1: + node.set("indentLabel", " 24 hr trend......") + return self.DONE() + + def trend_DayOrNight_words(self, tree, node): + "Compare current analysis to previous analysis for trends" + elementName = node.get("elementName") + + higher = self.phrase_descriptor(tree, node, "higher", elementName ) + lower = self.phrase_descriptor(tree, node, "lower", elementName ) + threshold = self.trend_threshold(tree, node, elementName, elementName) + if elementName == "MinT" or elementName == "MaxT": + units = self.units_descriptor(tree, node, "units", "degrees") + unit = self.units_descriptor(tree, node, "unit", "degree") + if elementName == "MaxRH" or elementName == "MinRH" or elementName == "RH": + units = self.units_descriptor(tree, node, "units", "%") + unit = self.units_descriptor(tree, node, "unit", "%") + trendElement = node.getAncestor("trendElement") + words, diff = self.getTrend( + tree, node, elementName, trendElement, "", units, unit, threshold, + higher, lower) + if words is not None: + if diff == 0: + words = self.phrase_descriptor(tree, node, "unchanged", elementName ) + else: + words = self.phrase_descriptor(tree, node, "missing", elementName ) + return self.setWords(node, words) + + def getTrend(self, tree, phrase, element, trendElement, introWords, units, unit, threshold, + positiveDescriptor, negativeDescriptor): + + timeRange = phrase.getTimeRange() + areaLabel = phrase.getAreaLabel() + absDiff, rawDiff = self.getTrendStats( + tree, phrase, element, timeRange, areaLabel, trendElement) + + if absDiff is None: + return absDiff, rawDiff + + diff = int(absDiff) + if absDiff >= threshold: + if rawDiff >=0: + descriptor = positiveDescriptor + else: + descriptor = negativeDescriptor + if diff == 1: + units = unit + introWords = self.addSpace(introWords) + if units != "%": + units = " " + units + words = introWords + repr(diff) + units + " " + descriptor + else: + words = "" + #print "returning", words, diff + return words, diff + + def getTrendStats(self, tree, phrase, element, timeRange, areaLabel, trendElement): + # NO CONVERSION DONE + # Try the trend element first + stats = tree.stats.get(trendElement, timeRange, areaLabel, + mergeMethod="Average") + #print "\ngetTrendStats" + if stats is not None: + rawDiff = int(self.getValue(stats)) + absDiff = abs(rawDiff) + else: + #Use Max/Min element + curStats = tree.stats.get(element, timeRange, areaLabel, + mergeMethod = "Average", statLabel="mode") + #print "curstats", curStats + if curStats is None: + return None, 0 + + prevTimeRange = self.adjustTimeRange(timeRange, -24) + prevStats = tree.stats.get(element, prevTimeRange, areaLabel, + mergeMethod = "Average", statLabel="mode") + #print "prevstats", prevStats + if prevStats is None: + return None, 0 + + prevStats = self.getValue(prevStats) + curStats = self.getValue(curStats) + #print "cur, prev", element, curStats, prevStats + absDiff = self.absDiff(curStats, prevStats) + rawDiff = curStats - prevStats + #print "absDiff, rawDiff", absDiff, rawDiff + return absDiff, rawDiff + + def dayOrNight_phrase(self): + return { + "setUpMethod": self.dayOrNight_setUp, + "wordMethod": self.fire_dayOrNight_words, + "phraseMethods": [ + self.checkLocalEffects, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + def dayOrNight_setUp(self, tree, node): + dayElement, nightElement, indent, endWithPeriod = node.get("args") + elementName = self.dayOrNight_element(tree, node, dayElement, nightElement) + indentName = elementName+"_FireWx" + method = "MinMax" + if elementName == "RH": + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + indentName = "MinRH_FireWx" + method = "Min" + else: + indentName = "MaxRH_FireWx" + method = "Max" + elementInfoList = [self.ElementInfo(elementName, method)] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", indentName) + return self.DONE() + + def fire_dayOrNight_words(self, tree, node): + elementName = node.getAncestor("elementName") + statDict = node.getStatDict() + if elementName == "MaxT" or elementName == "MinT": + stats = self.getTempStats(tree, node) + if stats is None: + return self.setWords(node.parent, "MISSING") + words = self.getTempRangePhrase(tree, node, stats, elementName) + else: # MinRH, MaxRH or RH + stats = self.getStats(statDict, elementName) + if stats is None: + return self.setWords(node.parent, "MISSING") + connector = self.value_connector(tree, node, elementName, elementName) + min, max = self.getValue(stats, "MinMax") + if min == max: + words = repr(int(min)) + else: + words = repr(int(min)) + connector + repr(int(max)) + outUnits = self.element_outUnits(tree, node, elementName, elementName) + units = self.units_descriptor(tree, node,"units", outUnits) + words = words + units + return self.setWords(node, words) + + ### CWR + def cwr_phrase(self): + return { + "setUpMethod": self.cwr_setUp, + "wordMethod": self.cwr_words, + "phraseMethods": self.standard_phraseMethods(), + } + def cwr_setUp(self, tree, node): + try: + cwr = self._cwrParm + except: + cwr = "CWR" + elementInfoList = [self.ElementInfo(cwr, "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "CWR.................") + return self.DONE() + + def cwr_words(self, tree, node) : + "Create phrase Probability of Precipitation" + statDict = node.getStatDict() + cwr = self.getStats(statDict, "CWR") + if cwr is None: + return self.setWords(node.parent, "MISSING") + cwr = self.getValue(cwr) + threshold = self.nlValue(self.null_nlValue( + tree, node, "CWR", "CWR"), cwr) + if int(cwr) < threshold: + return self.setWords(node, "null") + else: + words = repr(int(cwr)) + " percent" + return self.setWords(node, words) + + ### VentRate or smoke dispersal phrase + def smokeDispersal_phrase(self): + return { + "setUpMethod": self.smokeDispersal_setUp, + "wordMethod": self.smokeDispersal_words, + "phraseMethods": self.standard_phraseMethods(), + } + def smokeDispersal_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("VentRate", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel","Smoke dispersal.....") + return self.DONE() + + def smokeDispersal_words(self, tree, node): + "Create phrase for Smoke Dispersal" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "VentRate") + if stats is None: + return self.setWords(node.parent, "MISSING") + vr1, vr2 = self.getValue(stats, "MinMax") + + vr1 = int(vr1) + vr2 = int(vr2) + vrCat1 = self.smokeDispersal_valueStr(vr1) + vrCat2 = self.smokeDispersal_valueStr(vr2) + # Single Value input + if vr1 == vr2: + words = vrCat1 + " (" + repr(vr1) + " knot-ft)" + # Range + else: + words = vrCat1 + " to " + vrCat2 + " (" + repr(vr1) + "-" + \ + repr(vr2) + " knot-ft)" + return self.setWords(node, words) + + # SMOKE DISPERSAL CATEGORIES + def smokeDispersal_valueStr(self, value): + "Convert smoke dispersal value to corresponding category" + + if value < 40000 : + return "poor" + + if value >= 40000 and value < 60000: + return "fair" + + if value >= 60000 and value < 100000 : + return "good" + + if value >= 100000 and value < 150000 : + return "very good" + + if value >= 150000 : + return "excellent" + + ### Fire Winds + def transportWind_phrase(self): + return { + "setUpMethod": self.transportWind_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + def transportWind_setUp(self, tree, node): + self.wind_setUp(tree, node, gustFlag=0, element="TransWind") + node.set("descriptor", "") + node.set("indentLabel","Transport winds.....") + return self.DONE() + + def freeWind_phrase(self): + return { + "setUpMethod": self.freeWind_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + def freeWind_setUp(self, tree, node): + self.wind_setUp(tree, node, gustFlag=0, element="FreeWind") + node.set("descriptor", "") + node.set("indentLabel","Free winds..........") + return self.DONE() + + def ridgeValleyAreas(self, tree, node): + # List of edit area names for which we want + # ridge/valley winds reported: + # + # 20-foot winds... + # Valleys/lwr slopes... + # Ridges/upr slopes.... + # + # e.g. + # return ["Area1"] + # + return [] + + def valleyRidgeAreaNames(self, tree, node): + # These are the areas for valleys and ridges, respectively, + # to be intersected with the current edit area for + # reporting valley winds and ridge winds, respectively. + # NOTE: If you change these area names, you will also + # need to change the names in the FirePeriod "intersectAreas" + # section. + return "Valleys", "Ridges" + + def fireWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + self.wind_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.fireWind_finishUp + ], + } + def fireWind_finishUp(self, tree, node): + "Create a phrase for Winds" + # Empty phrase if doing ridge/valley winds + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 1: + return self.setWords(node, "") + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", "20-foot winds.......") + node.set("compound", 1) + return self.setWords(node, words) + + def fireWind_label_phrase(self): + return { + "setUpMethod": self.fireWind_label_setUp, + "phraseMethods": [self.postProcessPhrase], + } + def fireWind_label_setUp(self, tree, node): + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 0: + return self.setWords(node, "") + self.setWords(node, "") + node.set("descriptor", "") + node.set("indentLabel", "20-foot winds.......") + return self.DONE() + + # Valley/Ridge split set-up + def fireValleyWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + self.wind_phrase, + ], + "phraseMethods": [ + self.fireRidgeValleyWind_setUp, + self.consolidateSubPhrases, + self.assembleSentences, + self.fireValleyWind_finishUp + ], + } + def fireRidgeValleyWind_setUp(self, tree, node): + # Used for set-up of fireRidgeWind_compoundPhrase as well. + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 0: + return self.setWords(node, "") + # Set up intersect area to be used for the node + areaName = node.getAreaLabel() + phraseName = node.get("name") + valleys, ridges = self.valleyRidgeAreaNames(tree, node) + if phraseName.find("Valley") >= 0: + area = valleys + else: + area = ridges + intersectName = self.getIntersectName(areaName, area) + #print "setting intersect", intersectName + node.set("areaLabel", intersectName) + return self.DONE() + + def fireValleyWind_finishUp(self, tree, node): + "Create a phrase for Winds" + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel"," Valleys/lwr slopes...") + node.set("compound", 1) + return self.setWords(node, words) + + def fireRidgeWind_compoundPhrase(self): + return { + "phraseList": [ + self.wind_summary, + self.wind_phrase, + ], + "phraseMethods": [ + self.fireRidgeValleyWind_setUp, + self.consolidateSubPhrases, + self.assembleSentences, + self.fireRidgeWind_finishUp + ], + } + def fireRidgeWind_finishUp(self, tree, node): + "Create a phrase for Winds" + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", " Ridges/upr slopes....") + node.set("compound", 1) + return self.setWords(node, words) + + ### Haines + def hainesDict(self): + return { + 0:"or very low potential for large plume dominated fire growth", + 1:"or very low potential for large plume dominated fire growth", + 2:"or very low potential for large plume dominated fire growth", + 3:"or very low potential for large plume dominated fire growth", + 4:"or low potential for large plume dominated fire growth", + 5:"or moderate potential for large plume dominated fire growth", + 6:"or high potential for large plume dominated fire growth", + 7:"or high potential for large plume dominated fire growth", + 8:"or high potential for large plume dominated fire growth", + 9:"or high potential for large plume dominated fire growth", + 10:"or high potential for large plume dominated fire growth" + } + + def haines_phrase(self): + return { + "setUpMethod": self.haines_setUp, + "wordMethod": self.haines_words, + "phraseMethods": [ + self.consolidatePhrase, + self.checkLocalEffects, + self.combinePhraseStats, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + } + def haines_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Haines", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel","Haines Index........") + return self.DONE() + + def haines_words(self, tree, node): + "Create phrase for Haines Index" + statDict = node.getStatDict() + stats = self.getStats(statDict, "Haines") + if stats is None: + return self.setWords(node.parent, "MISSING") + + haines1, haines2 = self.getValue(stats, "MinMax") + hainesDict = self.hainesDict() + haines1 = int(haines1) + haines2 = int(haines2) + words1 = hainesDict[haines1] + words2 = hainesDict[haines2] + + # Single Value input + if haines1 == haines2: + words = repr(haines1) + " " + words1 + # Range + else: + if words1 == words2: + words = words1 + else: + words = words1 + " to " + words2 + words = repr(haines1) + " to " + repr(haines2) + " OR " + words + return self.setWords(node, words) + + ### Humidity + def humidityRecovery_percentage(self, tree, node): + # If the maximum humidity is greater than this percentage, + # humidity recovery will be Excellent. + return 50 + + def humidityRecovery_phrase(self): + return { + "setUpMethod": self.humidityRecovery_setUp, + "wordMethod": self.humidityRecovery_words, + "phraseMethods": self.standard_phraseMethods(), + } + def humidityRecovery_setUp(self, tree, node): + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange,1) + if dayNight != self.NIGHTTIME(): + return self.setWords(node, "") + if self._useRH: + elementName = "RH" + else: + elementName = "MaxRH" + elementInfoList = [self.ElementInfo(elementName, "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Humidity recovery...") + return self.DONE() + + def humidityRecovery_words(self, tree, node): + "Create phrase for Humidity recovery" + + if self._useRH: + elementName = "RH" + else: + elementName = "MaxRH" + statDict = node.getStatDict() + curStats = self.getStats(statDict, elementName) + if curStats is None: + return self.setWords(node.parent, "MISSING") + maxRH = self.getValue(curStats, "Max") + if maxRH > self.humidityRecovery_percentage(tree, node): + return self.setWords(node, "Excellent") + timeRange = node.getTimeRange() + prevTimeRange = self.adjustTimeRange(timeRange, -24) + prevStats = tree.stats.get(elementName, prevTimeRange, node.getAreaLabel(), + statLabel="mode", mergeMethod="Max") + if prevStats is None: + return self.setWords(node, "") + curStats = self.getValue(curStats) + prevStats = self.getValue(prevStats) + + diff = curStats - prevStats + words = "" + for threshold, label in self.humidityRecovery_valueList(tree, node): + if diff <= threshold: + words = label + break + return self.setWords(node, words) + + # Humidity recovery values + def humidityRecovery_valueList(self, tree, node): + "Used to convert percent difference to corresponding category" + # If you want to return different thresholds based on edit areas: + # + # editAreaNames = ["area1", "area2"] + # if self.currentAreaContains(tree, editAreaNames): + # return [ + # (15, "Poor"), + # (20, "Fair"), + # (30, "Good"), + # ] + return [ + (25, "Poor"), + (55, "Moderate"), + (70, "Good"), + (100,"Excellent"), + ] + + ### LAL + ## ################################### + ## # General LAL Definition # + ## # mainly defined for wrn 1/2 of US# + ## # but still different across the # + ## # country (from Dave Metze # + ## ################################### + + ## LALs.....(L)ightning (A)ctivity (L)evels numbered 1 through 6. + + ## * LAL 1 - No thunderstorms. + ## * LAL 2 - isolated/slight chance thunderstorms..............PoP 5-14% + ## * LAL 3 - isolated/slight chance thunderstorms..............PoP 15-24% + ## * LAL 4 - scattered/chance thunderstorms....................PoP 25-54% + ## * LAL 5 - numerous/likely to wide/def thunderstorms.........PoP 55-100% + ## * LAL 6 - Dry lightning(thunderstorms) for LAL 3 through 5..PoP 15-100% + + ## The number of lightning strikes is also a variable, but not sure if the + ## science of meteorology is in place to forecast the actual number of + ## lightning strikes, so most offices use the potential/coverage of + ## thunderstorms to determine the LAL value. + + def lal_phrase(self): + return { + "setUpMethod": self.lal_setUp, + "wordMethod": self.lal_words, + "phraseMethods": [ + self.consolidatePhrase, + self.checkLocalEffects, + self.combinePhraseStats, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + } + + def lal_setUp(self, tree, node): + # Wait for all Wx (and other) phrases to complete + # NOTE that we are sending the areaLabel so IF you have a local + # effect set up for LAL, you must have the same local effect set up + # for weather, AND LAL and weather should be consistent in the grids. + phraseList = self.checkPhrasesDone( + tree, node, areaLabel=node.getAreaLabel(), + exceptions=[node.get('name')]) + if phraseList is None: + return + + # Check to see if the weather phrase changed it's resolution. + # If so, we want to match it in the LAL phrase + nodeRes = None + for phrase in phraseList: + firstElement = phrase.get("firstElement") + if firstElement is not None and firstElement.name=="Wx": + resolution = phrase.get('resolution') + if resolution is not None: + nodeRes = resolution + break + + # Use resolution of Wx phrase + elementInfoList = [self.ElementInfo("LAL", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector, + resolution=nodeRes) + node.set("descriptor", "") + node.set("indentLabel", "LAL.................") + return self.DONE() + + def lal_words(self, tree, node) : + "Create phrase for Lightning Activity Level" + #statDict = node.getStatDict() + #stats = self.getStats(statDict, "LAL") + # Check low pop. If low, set LAL to 1. + popThreshold = self.pop_wx_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, popThreshold) + if lowPopFlag == 1: + lal = 1 + else: + lal = self.matchToWx(tree, node, "LAL") + if lal is None: + return self.setWords(node, "null") + if self._lightningPhrases: + words = self.lal_value(tree, node, lal) + else: + words = repr(int(lal)) + return self.setWords(node, words) + + def lal_value(self, tree, node, lal): + value = "No Tstms" + if lal > 1: + value = "1-8 strikes" + if lal > 2: + value = "9-15 strikes" + if lal > 3: + value = "16-25 strikes" + if lal > 4: + value = ">25 strikes" + if lal > 5: + value = "Dry lightning" + return value + + def coverageLAL_value(self, coverage): + # LAL ranges that correspond to each of the weather coverages + lalValue = self.coverageLAL_table() + return lalValue[coverage] + + def coverageLAL_table(self): + # LAL ranges that correspond to each of the weather coverages + return { + "": (1,1), + "Iso": self.lal_2_3, + "SChc": self.lal_2_3, + "Patchy": self.lal_wx_value, + "Areas":self.lal_wx_value, + "Chc": self.lal_wx_value, + "Sct": self.lal_wx_value, + "Lkly": self.lal_wx_value, + "Num": self.lal_wx_value, + "Brf": self.lal_wx_value, + "Frq": self.lal_wx_value, + "Ocnl": self.lal_wx_value, + "Pds": self.lal_wx_value, + "Inter":self.lal_wx_value, + "Def": self.lal_wx_value, + "Wide": self.lal_wx_value, + } + + def lal_2_3(self, tree, node, highKey): + pop = tree.stats.get("PoP", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Max") + if pop > 10: + if highKey.wxType() == "T" and "Dry" in highKey.attributes(): + return (6,6) + else: + return (3,3) + else: + return (2,2) + + def lal_wx_value(self, tree, node, highKey): + # Check for Dry Thunderstorms + if highKey.wxType() == "T" and "Dry" in highKey.attributes(): + return (6,6) + coverage = highKey.coverage() + lal_dict = { + "Patchy": (2,2), + "Areas":(4,4), + "Chc": (4,4), + "Sct": (4,4), + "Lkly": (5,5), + "Num": (5,5), + "Brf": (5,5), + "Frq": (5,5), + "Ocnl": (5,5), + "Pds": (5,5), + "Inter":(5,5), + "Def": (5,5), + "Wide": (5,5), + } + return lal_dict[coverage] + + + ### MixHgt + def mixingHgt_phrase(self): + return { + "setUpMethod": self.mixingHgt_setUp, + "wordMethod": self.mixingHgt_words, + "phraseMethods": self.standard_phraseMethods(), + } + def mixingHgt_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MixHgt", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "Mixing height.......") + return self.DONE() + + def mixingHgt_words(self, tree, node): + "Create phrase for Mixing Height" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "MixHgt") + if stats is None: + return self.setWords(node.parent, "MISSING") + + mix1, mix2 = self.getValue(stats, "MinMax") + outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") + mix1 = int(mix1) + mix2 = int(mix2) + threshold = self.nlValue(self.null_nlValue( + tree, node, "MixHgt", "MixHgt"), max) + if int(mix1) < threshold and int(mix2) < threshold: + return self.setWords(node, "null") + + # Single Value input + if mix1 == mix2: + words = repr(mix1) + " " + outUnits + " AGL" + # Range + else: + words = repr(mix1)+ "-" + repr(mix2) + " " + outUnits + " AGL" + return self.setWords(node, words) + + ###--------------------------------------------------------- + ### OPTIONAL Phrase: Contributed by Ben Moyer, LOX + ### Marine Layer - taken from mixingHgt phrase methods + ###--------------------------------------------------------- + + def marineLayer_phrase(self): + return { + "setUpMethod": self.marineLayer_setUp, + "wordMethod": self.marineLayer_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def marineLayer_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MarineLayer", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + + # Do not generate for desert areas + #desertArea = ["Desert"] + #if self.currentAreaContains(tree, desertArea): + # return self.DONE() + + node.set("descriptor", "") + node.set("indentLabel", "Marine layer........") + return self.DONE() + + def marineLayer_words(self, tree, node): + "Create phrase for Marine Layer" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "MarineLayer") + if stats is None: + return self.setWords(node.parent, "MISSING") + + mix1, mix2 = self.getValue(stats, "MinMax") + outUnits = self.element_outUnits(tree, node, "MixHgt", "MixHgt") + mix1 = int(mix1) + mix2 = int(mix2) + + # Single Value input + # Makes "0 ft asl" be returned as "none" + if mix1 == 0 and mix2 == 0: + words = "none" + elif mix1 == mix2: + words = repr(mix1) + " " + outUnits + " asl" + # Makes phrases such as "0-800 ft asl" be simply "800 ft asl" + elif mix1 == 0 and mix2 > 0: + words = repr(mix2) + " " + outUnits + " asl" + # Range + else: + words = repr(mix1)+ "-" + repr(mix2) + " " + outUnits + " asl" + return self.setWords(node, words) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Header.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Header.py index 3dc42b4b86..0be5bd074b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Header.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Header.py @@ -1,1005 +1,1005 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Header.py -# Methods for producing headers. -# -# Author: hansen -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 10/20/2014 #3685 randerso Changes to support mixed case products -# 07/15/2016 #5749 randerso Format lists with commas -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import EditAreaUtils -import StringUtils -import ModuleAccessor -import string, os, time, types, re -import TimeRangeUtils -import TimeRange, AbsTime -import logging - -class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils): -# TimeRangeUtils): - def __init__(self): - EditAreaUtils.EditAreaUtils.__init__(self) - StringUtils.StringUtils.__init__(self) -# TimeRangeUtils.TimeRangeUtils.__init__(self) - self.log = logging.getLogger("FormatterRunner.Header.Header") - - def makeAreaHeader(self, argDict, areaLabel, issueTime, expireTime, - areaDictName, defaultEditAreas, - cityDescriptor ="Including the cities of", - areaList=None, includeCities=1, includeZoneNames=1, - includeIssueTime=1, includeCodes=1, includeVTECString=1, - hVTECString=None, accurateCities=False): - # Make a UGC area header for the given areaLabel - # Determine list of areas (there could be more than one if we are using a combination) - - if areaDictName is None or areaDictName == "None": - return areaLabel + "\n" - - # If we didn't supply an areaList, - # Use combinations file or defaultEditAreas - if areaList is None: - combinations = argDict["combinations"] - if combinations is not None: - areaList = self.getCurrentAreaNames(argDict, areaLabel) - else: - for editArea, label in defaultEditAreas: - if label == areaLabel: - areaList = [editArea] - - try: - # Remove suffixes if necessary - if self._editAreaSuffix is not None: - areaList = self.removeSuffixes(areaList, self._editAreaSuffix) - except: - pass - - # Access the UGC information for the area(s) if available - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(areaDictName, "AreaDictionary") - ugcCityList = [] - if areaDict is None: # create a dummy header - codeString = "STZxxx-" - nameString = areaLabel - cityString = "" - else: - codeString = "" - nameString = "" - cityString = "" - areaList, ugcList = self.makeUGCList(areaDict, areaList) - codeString = self.makeUGCString(ugcList) - ugcNameList = [] - for areaName in areaList: - if areaName in areaDict.keys(): - if areaDict.has_key(areaName): - entry = areaDict[areaName] - else: - entry = {} - log.error(\ - "AreaDictionary missing definition for [" + \ - areaName + "].") - if entry.has_key('ugcName'): - ugcName = entry['ugcName'] - else: - ugcName = areaName #missing UGCname - log.error(\ - "AreaDictionary missing ugcName definition for [" + \ - areaName + "].") - if ugcName not in ugcNameList: - ugcNameList.append(ugcName) - if entry.has_key("ugcCities"): - cities = entry["ugcCities"] - for c in cities: - if len(c) and c not in ugcCityList: - ugcCityList.append(c) - else: - ugcNameList.append(areaName) - log.error("AreaDictionary does not contain " +\ - 'ugcName definition for ', areaName) - - if self.alphabetizeHeaders() == 1: - # Alphabetize both lists. - ugcNameList.sort() - ugcCityList.sort() - - # Build nameString and cityString strings: - for ugcName in ugcNameList: - nameString = nameString + ugcName + "-" - - cityString = self.punctuateList(ugcCityList) - - # Compute accurate city list - if accurateCities and \ - len(ugcCityList) > 0 and argDict.has_key("hazards"): - ugcCityList, cityString = self.makeAccurateCityList(areaList, \ - ugcCityList, argDict) - - # get the VTEC string from the HazardsTable - VTECString = "" - VTECRecords = [] - if argDict.has_key("hazards") and includeVTECString: - hazards = argDict["hazards"] - VTECString = hazards.getVTECString(areaList) - #must have VTECString, in order to have hVTEC string - if hVTECString is not None and len(VTECString) and len(hVTECString): - VTECString = VTECString + hVTECString + "\n" - - # expiration time is dependent upon the passed in expiration time - # and the VTEC strings. expireT is seconds since epoch - if type(expireTime) is types.IntType or\ - type(expireTime) is types.FloatType: - expireTime = AbsTime.AbsTime(int(expireTime)) - try: - if self._fixedExpire == 1: - fixed = 1 - else: - fixed = 0 - except: - fixed = 0 - expireT = self.getExpireTime(issueTime.unixTime(), - expireTime.unixTime(), VTECString, fixedExpire = fixed) - - # format the expiration time - expireTimeRange = TimeRange.TimeRange(AbsTime.AbsTime(expireT), - AbsTime.AbsTime(expireT+1)) - expireTime = self.timeDisplay(expireTimeRange, "", "","%d%H%M", "") - codeString = self.endline(codeString + "-" + expireTime + "-", - linelength=self._lineLength, breakStr=["-"]) - - # get this time zone - thisTimeZone = os.environ["TZ"] - zoneList = [] - # check to see if we have any areas outside our time zone - for areaName in areaList: - if areaName in areaDict.keys(): - entry = areaDict[areaName] - if not entry.has_key("ugcTimeZone"): #add your site tz - if thisTimeZone not in zoneList: - zoneList.append(thisTimeZone) - continue # skip this entry - timeZoneList = entry["ugcTimeZone"] - if type(timeZoneList) == types.StringType: # a single value - timeZoneList = [timeZoneList] # make it into a list - for timeZone in timeZoneList: - if timeZone not in zoneList: - zoneList.append(timeZone) - - # if the resulting zoneList is empty, put in our time zone - if len(zoneList) == 0: - zoneList.append(thisTimeZone) - - # if the resulting zoneList has our time zone in it, be sure it - # is the first one in the list - try: - index = zoneList.index(thisTimeZone) - if index != 0: - del zoneList[index] - zoneList.insert(0, thisTimeZone) - except: - pass - - # now create the time string - issueTimeStr = '' - timeStrs = [] - for timeZone in zoneList: - timeStr = self.formatTimeString( - issueTime.unixTime(), "%l%M %p %Z %a %b %e %Y", timeZone) - timeStr = string.replace(timeStr, " ", " ") - timeStr = string.strip(timeStr) - if timeStr not in timeStrs: - timeStrs.append(timeStr) - if len(timeStrs) == 1: - issueTimeStr = timeStrs[0] - else: - issueTimeStr = timeStrs[0] - for i in xrange(1, len(timeStrs)): - issueTimeStr = issueTimeStr + " /" + timeStrs[i] + "/" - - try: - if self._useRegionLabel == 1: - if (areaLabel != ""): - nameString = areaLabel - except: - pass - - - nameString = self.endline(nameString, linelength=self._lineLength,breakStr=["-"]) - if cityString != "": - numCities = len(ugcCityList) - if numCities == 1: - def preserveCase(matchobj): - orig = matchobj.group(0) - repl = 'city' - retv = '' - for i in range(len(repl)): - c = repl[i] - if orig[i].isupper(): - c = c.upper() - retv = retv + c - return retv - cityDescriptor = re.sub("cities", preserveCase, cityDescriptor, flags=re.IGNORECASE) - cityString = self.endline(cityDescriptor + " " + cityString, - linelength=self._lineLength, breakStr=[", "]) - issueTimeStr = issueTimeStr + "\n\n" - try: - if self._includeHeader == 0: - issueTimeStr = "\n" - codeString = "" - cityString = "" - except: - pass - if includeCities == 0: - cityString = "" - if includeZoneNames == 0: - nameString = "" - if includeIssueTime == 0: - issueTimeStr = "" - if includeCodes == 0: - codeString = "" - if includeVTECString == 0: - VTECString = "" - header = codeString + VTECString + nameString + cityString + issueTimeStr - return header - - # Make accurate city list based on the grids - # In case of missing grid (CAN/EXP actions), check active table for cities - # in the previous product - # When cities cannot be determined with certainty, add framing code to city - # list so it may be unlocked for editing - - def makeAccurateCityList(self, areaList, ugcCityList, argDict): - hazards = argDict["hazards"] - vtecS = hazards.getHazardList(areaList) - - # Separate hazards according to action - canRecords = [] - upgRecords = [] - expRecords = [] - actRecords = [] - for vtec in vtecS: - if vtec['act'] == 'CAN': - canRecords.append(vtec) - elif vtec['act'] == 'UPG': - upgRecords.append(vtec) - elif vtec['act'] == 'EXP': - expRecords.append(vtec) - else: - actRecords.append(vtec) - - # Now determine the cities corresponding to the active grids - citylist = hazards.getCities(ugcCityList, actRecords) - - # See if we can determine cities for EXP records from grids - unresolved = [] - if len(expRecords): - for expRec in expRecords: - cities = hazards.getCitiesForEXP(ugcCityList, - areaList[0], expRec['phen'], expRec['sig'], expRec['endTime']) - if cities is None: - unresolved.append(expRec) - else: - citylist += cities - - # check if the full list is used and if we need to check the - # previous product - - fullListUsed = 1 - for city in ugcCityList: - if city not in citylist: - fullListUsed = 0 - break - - editable = 0 - if fullListUsed: - citylist = ugcCityList[:] - - elif (unresolved + canRecords): - - # For VTEC not associated with a grid, - # try to extract cities from previous product - # If without absolute certainty, make city list editable - - cities, certain = hazards.getCitiesFromPrevious(areaList, - expRecords + canRecords, - ignoredVTEC = upgRecords) - if cities is not None: - citylist += cities - editable = not certain - else: - # failed...use the full list and make it editable - editable = 1 - citylist = ugcCityList[:] - - # filter and order - newlist = [] - for city in ugcCityList: - if city in citylist: - newlist.append(city) - citylist = newlist - - cityString = self.punctuateList(citylist) - - # add framing code so the city list will be editable - if editable and len(cityString) > 0: - cityString = '|*' + cityString + '*|' - - return citylist, cityString - - - # Return new areaList and associated ugcList both sorted by ugcCode. - # Extracts ugcCode from the area dictionary for the each areaName in areaList. - # Will accept complex UGC strings in the area dictionary such as: - # ORZ001-004-WAZ021>023-029. - # However, in this case, one areaName could correspond to multiple - # ugcCodes and thus, the areaList is not guaranteed to follow - # the sorted ugcCode list order. - def makeUGCList(self, areaDict, areaList): - # Make a list of (areaName, ugcCode) tuples - areaUgcList = [] - for areaName in areaList: - if areaName in areaDict.keys(): - ugc = areaDict[areaName]['ugcCode'] - if ugc.find("-") >= 0 or ugc.find(">") >= 0: - ugcs = self.expandComplexUgc(ugc) - for ugc in ugcs: - areaUgcList.append((areaName, ugc)) - else: - areaUgcList.append((areaName, ugc)) - - # Sort this list in ugc order - areaUgcList.sort(self.ugcSort) - - # Make new "parallel" lists of areaNames and ugcCodes - ugcList = [] - newAreaList = [] - for areaName, ugcCode in areaUgcList: - if areaName not in newAreaList: - newAreaList.append(areaName) - if ugcCode not in ugcList: - ugcList.append(ugcCode) - - #print "newAreaList, ugcList", newAreaList, ugcList - return newAreaList, ugcList - - - def expandComplexUgc(self, complexUgc): - # Given a complex ugc string e.g. ORZ001-004-WAZ021>023-029, - # return a list of all ugcs represented - ugcList = [] - curState = "" - arrowFlag = 0 - lastNum = 0 - # While we still have a dash or arrow - while len(complexUgc) > 0: - dash = complexUgc.find("-") - arrow = complexUgc.find(">") - # Peel off the next ugc from the complexUgc string - if dash < 0 and arrow < 0: - sep = len(complexUgc) - elif arrow >= 0 and arrow < dash: - sep = arrow - elif dash >= 0 and dash < arrow: - sep = dash - elif dash >= 0: - sep = dash - else: - sep = arrow - ugc = complexUgc[:sep] - complexUgc = complexUgc[sep+1:] - # Add this ugc to the list - nextUgcs, curState, lastNum = self.expandUgc(ugc, curState, lastNum, arrowFlag) - arrowFlag = 0 - if sep == arrow: - arrowFlag = 1 - for nextUgc in nextUgcs: - ugcList.append(nextUgc) - return ugcList - - def expandUgc(self, ugc, curState, lastNum, arrowFlag): - # If the ugc has a state identifier on it, - # return it as is and return it's state as the curState. - # Otherwise append the curState to the ugc and - # return it and curState - if curState == "": - curState = ugc[:3] - return [ugc], curState, int(ugc[3:]) - state = ugc[:3] - try: - # If simply a number, add the current state - num = int(state) - # Check for arrow - ugcList = [] - if arrowFlag: - for ugcNum in range(lastNum+1, num+1): - curNum = str(ugcNum) - curNum = curNum.zfill(3) - ugcList.append(curState + curNum) - else: - ugcList.append(curState + ugc) - return ugcList, curState, num - except: - curState = state - return [ugc], curState, int(ugc[3:]) - - def ugcSort(self, val1, val2): - name1, ugc1 = val1 - name2, ugc2 = val2 - if ugc1 > ugc2: - return 1 - elif ugc1 == ugc2: - return 0 - else: - return -1 - - ### creates a UGCCode header string from the specified list of UGC codes. - def makeUGCString(self, ugcList): - # if nothing in the list, return empty string - if len(ugcList) == 0: - return "" - - # Remove any blank UGC lines from the list - listsize=len(ugcList) - j=0 - while j < listsize: - if ugcList[j] == "": - del ugcList[j] - j=j+1 - - # Set up state variables and process intialize ugcStr with first ugc - # in ugcList - inSeq = 0 - ugcStr = ugcList[0] - curState = ugcStr[0:3] - lastNum = int(ugcList[0][3:]) - firstNum = 0 - lastUgc = ugcList[0] - #print "ugcList", ugcList - - # By initializing properly we don't need the first item - ugcList.remove(ugcList[0]) - - for ugc in ugcList: - ugcState = ugc[:3] - ugcNumStr = ugc[3:] - num = int(ugcNumStr) - if ugcState == curState: - if num == lastNum + 1: - if inSeq > 0: - # Replace the last ugcNumStr in sequence with the - # current ugcNumStr - # e.g. 062>063 becomes 062>064 - ugcStr = ugcStr[:len(ugcStr)-3] + ugcNumStr - inSeq += 1 - else: - ugcStr += ">" + ugcNumStr - inSeq = 1 - else: # num != lastNum + 1 - ugcStr = self.checkLastArrow(inSeq, ugcStr) - inSeq = 0 # reset sequence when number not in sequence - ugcStr += "-" + ugcNumStr - else: - ugcStr = self.checkLastArrow(inSeq, ugcStr) - ugcStr += "-" + ugc - curState = ugcState - inSeq = 0 #reset sequence when switching states - lastNum = num - lastUgc = ugc - - # May have to clean up last arrow at the end - ugcStr = self.checkLastArrow(inSeq, ugcStr) - #print "returning", ugcStr - return ugcStr - - def checkLastArrow(self, inSeq, ugcStr): - if inSeq == 1: - # Change the last arrow to - since - # we only had 2 in the sequence e.g. - # 062>063 should be 062-063 - arrowIndex = ugcStr.rfind(">") - if arrowIndex >= 0: - ugcStr = ugcStr[:arrowIndex] + "-" + ugcStr[arrowIndex+1:] - return ugcStr - - - # Header support for Watch/Warning products - def getCityList(self, areaList, label="This includes the cities of", - lineLength=66, areaDictName="AreaDictionary", addPeriod = False, - forceAlphaSort=False): - # Returns a list of cities (from the AreaDictionary) - - # Access the UGC information for the area(s) if available - areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, "AreaDictionary") - if areaDict is None: - return "" - cities = [] - for areaName in areaList: - entry = areaDict[areaName] - if entry.has_key("ugcCities"): - ct = entry['ugcCities'] - for c in ct: - if len(c): - cities.append(c) - if len(cities) and (self.alphabetizeHeaders() == 1 or forceAlphaSort): - cities.sort() - - cityString = self.punctuateList(cities) - - if len(cityString) == 0: - return "" - else: - if addPeriod: - cityString = cityString + '.' - return self.endline(label + " " + cityString, lineLength, breakStr=[", "]) + "\n" - - # Returns a list of strings that describe the "areaList", such as - # Southwest Kansas, along with their county/zone names. Format returned - # is [(stateName, portionOfState, [(county/zone list,type)])]. The type - # is PARISH, COUNTY, ZONE, INDEPENDENT CITY. Duplicate names are - # eliminated. - def getGeneralAreaList(self, areaList, areaDictName="AreaDictionary"): - - # Access the UGC information for the area(s) if available - areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, - "AreaDictionary") - if areaDict is None: - return [] - - geoAreas = {} - for areaName in areaList: - entry = areaDict[areaName] - if entry.has_key("ugcName"): - # Get state - state = areaName[0:2] - if entry.has_key("fullStateName"): - state = entry["fullStateName"] - #Special District of Columbia case - if state.upper() == "DISTRICT OF COLUMBIA": - state = "The District of Columbia" - # Get part-of-state information - partOfState = "" - if entry.has_key("partOfState"): - partOfState = entry["partOfState"] - - # get the county/zone name - zoneName = entry["ugcName"] - if entry.has_key("locationName"): - zoneName = entry["locationName"] #alternative name - if entry.has_key("ugcCode"): - codeType = entry["ugcCode"][2] - if codeType == "Z": - nameType = "zone" - elif codeType == "C": - indCty=entry.get("independentCity", 0) - if indCty == 1: - nameType = "independent city" - elif state == "Louisiana": - nameType = "parish" - else: - nameType = "county" - else: - codeType == "?" - value = (state, partOfState) - znt = (zoneName, nameType) - - if geoAreas.has_key(value): - names = geoAreas[value] - if znt not in names: - names.append(znt) - else: - geoAreas[value] = [znt] - - #now sort the zoneName or countyNames - for state, partState in geoAreas.keys(): - names = geoAreas[(state,partState)] - names.sort() - - #now separate them by land and water - #Anything to do with WATERS or other related items go last - waters = ['WATERS','LAKE','RIVER'] - gaLAND = [] - gaWATER = [] - for g,pg in geoAreas.keys(): - names = geoAreas[(g,pg)] - words = g.split(' ') - found = 0 - for w in waters: - if w in words: - found = 1 - break - if found: - gaWATER.append((g,pg,names)) - else: - gaLAND.append((g,pg,names)) - - #convert the output to a list with land first, then waters - geoAreas = [] - for g in gaLAND: - geoAreas.append(g) - for g in gaWATER: - geoAreas.append(g) - - geoAreas.sort() - - return geoAreas - - - - def formatCountyColumns(self, counties, colWidth, lineLength): - result = '' - curCol = 0 - for county in counties: - #print "county", len(county), county - # Need 2 spaces between county names - columns = (len(county)+2)/(colWidth+1) + 1 - countyWidth = columns*colWidth - if curCol > 0 and curCol + countyWidth > lineLength: - # Need to start on new line - result = result + "\n" - curCol = 0 - result = result + county.ljust(countyWidth) - curCol = curCol + countyWidth - return result - - def formatCountyString(self, state, counties): - # In Minnesota, (if state defined) plus list - if len(state): - result = "\n\nIn " + state + ", \n" - else: - result = "\n" - - return result + self.punctuateList(counties) - - def getIssuedByString(self, words = "Issued by National Weather Service "): - issuedByString = "" - try: - if self._issuedBy is not None: - issuedByString = "Issued by National Weather Service " + \ - self.getSiteInfo("wfoCityState",self._issuedBy) + "\n" - except: - pass - return issuedByString - - def timeFromDDHHMM(self, dtgString): - # converts a DDHHMM string into a time value. - #group1=day, group2=hour, group3=minute, returns seconds since epoch - try: - wmo_day = int(dtgString[0:2]) - wmo_hour = int(dtgString[2:4]) - wmo_min = int(dtgString[4:6]) - except: - s = "timeFromDDHHMM(), input string not in DDHHMM format: " +\ - dtgString - raise Exception, s - - #reset time zone to GMT for this function - prevTZ = os.environ.get('TZ', None) - os.environ['TZ'] = "GMT0" - time.tzset() - - #assemble time tuple - gmtuple = time.gmtime(time.time()) - wmo_year = gmtuple[0] #based on current time - wmo_month = gmtuple[1] #based on current time - current_day = gmtuple[2] - if current_day - wmo_day > 15: - # next month - wmo_month = wmo_month + 1 - if wmo_month > 12: - wmo_month = 1 - wmo_year = wmo_year + 1 - elif current_day - wmo_day < -15: - # previous month - wmo_month = wmo_month -1 - if wmo_month < 1: - wmo_month = 12 - wmo_year = wmo_year - 1 - - s = `wmo_year` + "%02i" % wmo_month + "%02i" % wmo_day + \ - "%02i" % wmo_hour + "%02i" % wmo_min + "UTC" - timeTuple = time.strptime(s, "%Y%m%d%H%M%Z") - wmoTime = time.mktime(timeTuple) #TZ is GMT0, so this mktime works - - #reset the time zone - if prevTZ is not None: - os.environ['TZ'] = prevTZ - else: - del os.environ['TZ'] - time.tzset() - return wmoTime - - # Given the issuance time, offset in hours from local time midnight, - # vtecString, returns the expire time (time_t), and the expire time - # as ddhhmm. - def getExpireTimeFromLToffset(self, issueTime, offsetLTHours, - vtecString, roundMinutes=15, fixedExpire=0): - - # get issue time in local time, then today's midnight local time - lclTime = time.localtime(issueTime) - midnight = time.mktime((lclTime[0], lclTime[1], lclTime[2], - 0, 0, 0, 0, 0, 0)) #midnight today local time - - # calculate expire time as offset into the future - expireTime = offsetLTHours*3600 + midnight #actual time for expire - - #use the other getExpireTime() - expireTime = self.getExpireTime(issueTime, expireTime, vtecString, - roundMinutes, fixedExpire) - - ddhhmm = time.strftime("%d%H%M", time.gmtime(expireTime)) - - return (expireTime, ddhhmm) - - # Given the issuance time, expiration time (desired), and the VTEC codes, - # returns the appropriate expiration time. Expiration time is the - # earliest of the specified expiration time, 1 hr if a CAN code - # is detected, or the ending time of ongoing events (CON, EXT, EXB, NEW). - # The issueTime and expireTime are ints, VTECCodes are a list of - # records from the Hazard's analyzed table. The fixedExpire flag - # indicates to ignore the VTEC actions when computing the expiration time. - def getExpireTime(self, issueTime, expireTime, vtecString, roundMinutes=15, - fixedExpire=0): - - - #hazard product - if not fixedExpire: - #vtec regular expression - vtecRE = '/[OTEX]\.([A-Z]{3})\.([A-Z]{4})\.([A-Z]{2})\.' + \ - '([WAYSOFN])\.([0-9]{4})\.([0-9]{6})T([0-9]{4})Z\-' + \ - '([0-9]{6})T([0-9]{4})Z/' - - #break up the VTEC strings, decode ending time - vstrs = vtecString.split('\n') - canExpFound = 0 - activeFound = 0 - laterActive = None #later end time of all active events - for vs in vstrs: - search = re.search(vtecRE, vs) - if search: - action = search.group(1) - if action in ['CAN','EXP']: - canExpFound = 1 - elif action in ['NEW','CON','EXT','EXB','EXA']: - activeFound = 1 - endTime = self.timeFromYYYYMMDD_HHMM(search.group(8), - search.group(9)) - if endTime != 0: - if laterActive is not None: - laterActive = max(laterActive, endTime) - else: - laterActive = endTime - if laterActive is not None: - expireTime = min(expireTime, laterActive) - elif canExpFound and not activeFound: - expireTime = min(expireTime, issueTime+3600) #1hr from now - - #ensure expireTime is not before issueTime, and is at least 1 hour - if expireTime - issueTime < 3600: - expireTime = issueTime + 3600 - - - #round to next "roundMinutes" - roundValue = roundMinutes*60 #in seconds - delta = expireTime % roundValue # in seconds - baseTime = int(expireTime/roundValue)*roundValue - if delta/60 >= 1: #add the next increment - expireTime = baseTime + roundValue - else: #within 1 minute, don't add the next increment - expireTime = baseTime - - return expireTime - - - def timeFromYYYYMMDD_HHMM(self, yyyymmdd, hhmm): - #returns a value of seconds since epoch from a encoded YYYYMMDD - #and hhmm string. - - #if all zeros, return 0 - if yyyymmdd == "000000" and hhmm == "0000": - return 0 - else: - #reset time zone to GMT for this function - prevTZ = os.environ.get('TZ', None) - os.environ['TZ'] = "GMT0" - time.tzset() - - timeString = yyyymmdd + hhmm - timeTuple = time.strptime(timeString, "%y%m%d%H%M") - seconds = time.mktime(timeTuple) #TZ is GMT0, so mktime works - - #reset the time zone - if prevTZ is not None: - os.environ['TZ'] = prevTZ - else: - del os.environ['TZ'] - time.tzset() - return seconds - -############################# - - def getAreaDictEntry(self, accessArg, dict, entryName, firstOnly=0): - # Access the given area dictionary for the given entryName - # using the accessArg to find the area (or areas) - # "accessArg" can be: - # tree or argDict: Gets entry from current area - # "all": Gets all dict entries and returns a list - # a list of area labels: Gets a list of dict entries for the given areaLabels - # If firstOnly==1, return only the value for the first area found - # Otherwise, return a list of entry values from which duplicates have been removed - # If not found, return an empty list - entryValues = [] - for areaLabel in dict.keys(): - if accessArg != "all": - if type(accessArg) is types.DictType: - # tree or argDict - if not self.currentAreaContains(tree, [areaLabel]): - continue - if type(accessArg) is types.ListType: - # list of areas - if areaLabel not in accessArg: - continue - entry = dict[areaLabel] - if entry.has_key(entryName): - entryValue = entry[entryName] - if firstOnly: - # Only the FIRST entryValue is taken if multiple ones exist - return entryValue - else: - if type(entryValue) is types.ListType: - entryValues += entryValue - else: - entryValues.append(entryValue) - return self.removeDups(entryValues) - -############################# - - def synopsisUGC(self, siteID, pil=None): - # Get the synopsis UGC for the CWF - synopsisDict = self.synopsisCWF_dict() - if pil is None: - ugcInfo = synopsisDict.get(siteID) - else: - ugcInfo = synopsisDict.get((siteID, pil)) - if ugcInfo is None: - return "" - ugc = ugcInfo[0] - if type(ugc) is types.ListType: - # Add hyphens e.g. AMZ600-GMZ606 - ugc = "-".join(ugc) - return ugc - - def synopsisHeading(self, siteID, pil=None): - # Get the synopsis heading for the CWF - synopsisDict = self.synopsisCWF_dict() - if pil is None: - ugcInfo = synopsisDict.get(siteID) - else: - ugcInfo = synopsisDict.get((siteID, pil)) - if ugcInfo is None: - return "" - return self.endline(ugcInfo[1], self._lineLength) - - def synopsisCWF_dict(self): - #key is site id, or (site id, pil) for sites that produce multiple CWFs - #value is ('ugcCode','ugcDescription') - return { - 'CAR': ('ANZ005', - 'Synopsis for Eastport ME to Stonington (Deer Isle) ME out 25 NM'), - 'GYX': ('ANZ100', - 'Synopsis for Stonington (Deer Isle) ME to Merrimack River MA out 25 NM'), - 'BOX': ('ANZ200', - 'Synopsis for MA and RI waters'), - 'OKX': ('ANZ300', - 'Synopsis for Long Island waters and New York Harbor'), - 'PHI': ('ANZ400', - 'Synopsis for Sandy Hook NJ to Fenwick Island DE'), - 'LWX': ('ANZ500', - 'Synopsis for North Chesapeake Bay and the Tidal Potomac'), - 'AKQ': ('ANZ600', - 'Synopsis for Fenwick Island DE to Currituck Beach Light NC out ' +\ - '20 NM including Virginia portion of the Chesapeake Bay and ' +\ - 'Currituck Sound'), - 'MHX': ('AMZ100', - 'Synopsis for Currituck Beach Light to Surf City NC out 20 NM ' +\ - 'including Albemarle Sound and Pamlico Sound'), - 'ILM': ('AMZ200', - 'Synopsis for Surf City NC to South Santee River SC out 20 NM'), - 'CHS': ('AMZ300', - 'Synopsis for South Santee River SC to Savannah GA out 20 NM ' +\ - 'and Savannah GA to Altamaha Sound GA out 60 NM, including ' +\ - 'Charleston Harbor and Grays Reef National Marine Sanctuary'), - 'JAX': ('AMZ400', - 'Synopsis for Altamaha Sound GA to Flagler Beach FL out 60 NM'), - 'MLB': ('AMZ500', - 'Synopsis for Flagler Beach to Jupiter Inlet FL out 60 NM'), - 'MFL': (['AMZ600','GMZ606'], - 'Synopsis for Jupiter Inlet to Ocean Reef FL including Biscayne ' +\ - 'Bay out 60 NM and for East Cape Sable to Bonita Beach FL out 60 NM'), - 'SJU': ('AMZ700', - 'Synopsis for PR and U.S. Virgin Islands coastal waters'), - 'KEY': ('GMZ005', - 'Synopsis for Florida Bay and the Keys'), - 'TBW': ('GMZ800', - 'Synopsis for Bonita Beach to Suwannee River FL out 60 NM'), - 'TAE': ('GMZ700', - 'Synopsis for Suwannee River to Destin FL out 60 NM'), - 'MOB': ('GMZ600', - 'Synopsis for Destin FL to Pascagoula MS out 60 NM including Mobile Bay'), - 'LIX': (['GMZ500','GMZ501'], - 'Synopsis for Pascagoula MS to Southwest Pass of the Mississippi River' +\ - ' and Southwest Pass of the Mississippi River to Atchafalaya River LA' +\ - ' out 60 NM'), - 'LCH': ('GMZ400', - 'Synopsis for Atchafalaya River LA to High Island TX out 60 NM'), - 'HGX': ('GMZ300', - 'Synopsis for High Island to Matagorda Ship Channel TX out 60 NM'), - 'CRP': ('GMZ200', - 'Synopsis for Baffin Bay to Matagorda Ship Channel out 60 NM'), - 'BRO': ('GMZ100', - 'Synopsis from Baffin Bary to Rio Grande River TX out 60 NM'), - 'SEW': ('PZZ100', - 'Synopsis for Northern Washington Coast and Puget Sound'), - 'PQR': ('PZZ200', - 'Synopsis for Southern Washington and Northern Oregon Coast'), - 'MFR': ('PZZ300', - 'Synopsis for the Southern Oregon Coastal Waters'), - 'EKA': ('PZZ400', - 'Synopsis for Northern California Waters'), - 'MTR': ('PZZ500', - 'Synopsis for Central California Coast and Bays'), - 'LOX': ('PZZ600', - 'Synopsis for Southern California Coast and Santa Barbara Channel'), - 'SGX': ('PZZ700', - 'Synopsis for Far Southern California Coast'), - 'AJK': ('PKZ098', - 'Synopsis for Inside Waters'), - 'AER': (['PKZ196','PKZ197'], - 'Synopsis for the North Gulf Coast and Valdez Port Narrows and Arm'), - 'ALU': (['PKZ198','PKZ199'], - 'Synopsis for Southwest Alaska and the Aluetians ' +\ - 'including Chiniak and Marmot Bays'), - ('AFG','WCZ'): ('PKZ299', 'Synopsis for Northwest Coast'), - ('AFG','NSB'): ('PKZ298', 'Synopsis for Arctic Coast'), - 'HFO': ('PHZ100', 'Synopsis for Hawaiian Waters'), - 'GUM': ('PMZ150', 'Synopsis for Marianas Waters'), - } - - - - - - - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Header.py +# Methods for producing headers. +# +# Author: hansen +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 10/20/2014 #3685 randerso Changes to support mixed case products +# 07/15/2016 #5749 randerso Format lists with commas +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import EditAreaUtils +import StringUtils +import ModuleAccessor +import string, os, time, types, re +import TimeRangeUtils +import TimeRange, AbsTime +import logging + +class Header(EditAreaUtils.EditAreaUtils, StringUtils.StringUtils): +# TimeRangeUtils): + def __init__(self): + EditAreaUtils.EditAreaUtils.__init__(self) + StringUtils.StringUtils.__init__(self) +# TimeRangeUtils.TimeRangeUtils.__init__(self) + self.log = logging.getLogger("FormatterRunner.Header.Header") + + def makeAreaHeader(self, argDict, areaLabel, issueTime, expireTime, + areaDictName, defaultEditAreas, + cityDescriptor ="Including the cities of", + areaList=None, includeCities=1, includeZoneNames=1, + includeIssueTime=1, includeCodes=1, includeVTECString=1, + hVTECString=None, accurateCities=False): + # Make a UGC area header for the given areaLabel + # Determine list of areas (there could be more than one if we are using a combination) + + if areaDictName is None or areaDictName == "None": + return areaLabel + "\n" + + # If we didn't supply an areaList, + # Use combinations file or defaultEditAreas + if areaList is None: + combinations = argDict["combinations"] + if combinations is not None: + areaList = self.getCurrentAreaNames(argDict, areaLabel) + else: + for editArea, label in defaultEditAreas: + if label == areaLabel: + areaList = [editArea] + + try: + # Remove suffixes if necessary + if self._editAreaSuffix is not None: + areaList = self.removeSuffixes(areaList, self._editAreaSuffix) + except: + pass + + # Access the UGC information for the area(s) if available + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(areaDictName, "AreaDictionary") + ugcCityList = [] + if areaDict is None: # create a dummy header + codeString = "STZxxx-" + nameString = areaLabel + cityString = "" + else: + codeString = "" + nameString = "" + cityString = "" + areaList, ugcList = self.makeUGCList(areaDict, areaList) + codeString = self.makeUGCString(ugcList) + ugcNameList = [] + for areaName in areaList: + if areaName in list(areaDict.keys()): + if areaName in areaDict: + entry = areaDict[areaName] + else: + entry = {} + log.error(\ + "AreaDictionary missing definition for [" + \ + areaName + "].") + if 'ugcName' in entry: + ugcName = entry['ugcName'] + else: + ugcName = areaName #missing UGCname + log.error(\ + "AreaDictionary missing ugcName definition for [" + \ + areaName + "].") + if ugcName not in ugcNameList: + ugcNameList.append(ugcName) + if "ugcCities" in entry: + cities = entry["ugcCities"] + for c in cities: + if len(c) and c not in ugcCityList: + ugcCityList.append(c) + else: + ugcNameList.append(areaName) + log.error("AreaDictionary does not contain " +\ + 'ugcName definition for ', areaName) + + if self.alphabetizeHeaders() == 1: + # Alphabetize both lists. + ugcNameList.sort() + ugcCityList.sort() + + # Build nameString and cityString strings: + for ugcName in ugcNameList: + nameString = nameString + ugcName + "-" + + cityString = self.punctuateList(ugcCityList) + + # Compute accurate city list + if accurateCities and \ + len(ugcCityList) > 0 and "hazards" in argDict: + ugcCityList, cityString = self.makeAccurateCityList(areaList, \ + ugcCityList, argDict) + + # get the VTEC string from the HazardsTable + VTECString = "" + VTECRecords = [] + if "hazards" in argDict and includeVTECString: + hazards = argDict["hazards"] + VTECString = hazards.getVTECString(areaList) + #must have VTECString, in order to have hVTEC string + if hVTECString is not None and len(VTECString) and len(hVTECString): + VTECString = VTECString + hVTECString + "\n" + + # expiration time is dependent upon the passed in expiration time + # and the VTEC strings. expireT is seconds since epoch + if type(expireTime) is int or\ + type(expireTime) is float: + expireTime = AbsTime.AbsTime(int(expireTime)) + try: + if self._fixedExpire == 1: + fixed = 1 + else: + fixed = 0 + except: + fixed = 0 + expireT = self.getExpireTime(issueTime.unixTime(), + expireTime.unixTime(), VTECString, fixedExpire = fixed) + + # format the expiration time + expireTimeRange = TimeRange.TimeRange(AbsTime.AbsTime(expireT), + AbsTime.AbsTime(expireT+1)) + expireTime = self.timeDisplay(expireTimeRange, "", "","%d%H%M", "") + codeString = self.endline(codeString + "-" + expireTime + "-", + linelength=self._lineLength, breakStr=["-"]) + + # get this time zone + thisTimeZone = os.environ["TZ"] + zoneList = [] + # check to see if we have any areas outside our time zone + for areaName in areaList: + if areaName in list(areaDict.keys()): + entry = areaDict[areaName] + if "ugcTimeZone" not in entry: #add your site tz + if thisTimeZone not in zoneList: + zoneList.append(thisTimeZone) + continue # skip this entry + timeZoneList = entry["ugcTimeZone"] + if type(timeZoneList) == bytes: # a single value + timeZoneList = [timeZoneList] # make it into a list + for timeZone in timeZoneList: + if timeZone not in zoneList: + zoneList.append(timeZone) + + # if the resulting zoneList is empty, put in our time zone + if len(zoneList) == 0: + zoneList.append(thisTimeZone) + + # if the resulting zoneList has our time zone in it, be sure it + # is the first one in the list + try: + index = zoneList.index(thisTimeZone) + if index != 0: + del zoneList[index] + zoneList.insert(0, thisTimeZone) + except: + pass + + # now create the time string + issueTimeStr = '' + timeStrs = [] + for timeZone in zoneList: + timeStr = self.formatTimeString( + issueTime.unixTime(), "%l%M %p %Z %a %b %e %Y", timeZone) + timeStr = string.replace(timeStr, " ", " ") + timeStr = string.strip(timeStr) + if timeStr not in timeStrs: + timeStrs.append(timeStr) + if len(timeStrs) == 1: + issueTimeStr = timeStrs[0] + else: + issueTimeStr = timeStrs[0] + for i in range(1, len(timeStrs)): + issueTimeStr = issueTimeStr + " /" + timeStrs[i] + "/" + + try: + if self._useRegionLabel == 1: + if (areaLabel != ""): + nameString = areaLabel + except: + pass + + + nameString = self.endline(nameString, linelength=self._lineLength,breakStr=["-"]) + if cityString != "": + numCities = len(ugcCityList) + if numCities == 1: + def preserveCase(matchobj): + orig = matchobj.group(0) + repl = 'city' + retv = '' + for i in range(len(repl)): + c = repl[i] + if orig[i].isupper(): + c = c.upper() + retv = retv + c + return retv + cityDescriptor = re.sub("cities", preserveCase, cityDescriptor, flags=re.IGNORECASE) + cityString = self.endline(cityDescriptor + " " + cityString, + linelength=self._lineLength, breakStr=[", "]) + issueTimeStr = issueTimeStr + "\n\n" + try: + if self._includeHeader == 0: + issueTimeStr = "\n" + codeString = "" + cityString = "" + except: + pass + if includeCities == 0: + cityString = "" + if includeZoneNames == 0: + nameString = "" + if includeIssueTime == 0: + issueTimeStr = "" + if includeCodes == 0: + codeString = "" + if includeVTECString == 0: + VTECString = "" + header = codeString + VTECString + nameString + cityString + issueTimeStr + return header + + # Make accurate city list based on the grids + # In case of missing grid (CAN/EXP actions), check active table for cities + # in the previous product + # When cities cannot be determined with certainty, add framing code to city + # list so it may be unlocked for editing + + def makeAccurateCityList(self, areaList, ugcCityList, argDict): + hazards = argDict["hazards"] + vtecS = hazards.getHazardList(areaList) + + # Separate hazards according to action + canRecords = [] + upgRecords = [] + expRecords = [] + actRecords = [] + for vtec in vtecS: + if vtec['act'] == 'CAN': + canRecords.append(vtec) + elif vtec['act'] == 'UPG': + upgRecords.append(vtec) + elif vtec['act'] == 'EXP': + expRecords.append(vtec) + else: + actRecords.append(vtec) + + # Now determine the cities corresponding to the active grids + citylist = hazards.getCities(ugcCityList, actRecords) + + # See if we can determine cities for EXP records from grids + unresolved = [] + if len(expRecords): + for expRec in expRecords: + cities = hazards.getCitiesForEXP(ugcCityList, + areaList[0], expRec['phen'], expRec['sig'], expRec['endTime']) + if cities is None: + unresolved.append(expRec) + else: + citylist += cities + + # check if the full list is used and if we need to check the + # previous product + + fullListUsed = 1 + for city in ugcCityList: + if city not in citylist: + fullListUsed = 0 + break + + editable = 0 + if fullListUsed: + citylist = ugcCityList[:] + + elif (unresolved + canRecords): + + # For VTEC not associated with a grid, + # try to extract cities from previous product + # If without absolute certainty, make city list editable + + cities, certain = hazards.getCitiesFromPrevious(areaList, + expRecords + canRecords, + ignoredVTEC = upgRecords) + if cities is not None: + citylist += cities + editable = not certain + else: + # failed...use the full list and make it editable + editable = 1 + citylist = ugcCityList[:] + + # filter and order + newlist = [] + for city in ugcCityList: + if city in citylist: + newlist.append(city) + citylist = newlist + + cityString = self.punctuateList(citylist) + + # add framing code so the city list will be editable + if editable and len(cityString) > 0: + cityString = '|*' + cityString + '*|' + + return citylist, cityString + + + # Return new areaList and associated ugcList both sorted by ugcCode. + # Extracts ugcCode from the area dictionary for the each areaName in areaList. + # Will accept complex UGC strings in the area dictionary such as: + # ORZ001-004-WAZ021>023-029. + # However, in this case, one areaName could correspond to multiple + # ugcCodes and thus, the areaList is not guaranteed to follow + # the sorted ugcCode list order. + def makeUGCList(self, areaDict, areaList): + # Make a list of (areaName, ugcCode) tuples + areaUgcList = [] + for areaName in areaList: + if areaName in list(areaDict.keys()): + ugc = areaDict[areaName]['ugcCode'] + if ugc.find("-") >= 0 or ugc.find(">") >= 0: + ugcs = self.expandComplexUgc(ugc) + for ugc in ugcs: + areaUgcList.append((areaName, ugc)) + else: + areaUgcList.append((areaName, ugc)) + + # Sort this list in ugc order + areaUgcList.sort(self.ugcSort) + + # Make new "parallel" lists of areaNames and ugcCodes + ugcList = [] + newAreaList = [] + for areaName, ugcCode in areaUgcList: + if areaName not in newAreaList: + newAreaList.append(areaName) + if ugcCode not in ugcList: + ugcList.append(ugcCode) + + #print "newAreaList, ugcList", newAreaList, ugcList + return newAreaList, ugcList + + + def expandComplexUgc(self, complexUgc): + # Given a complex ugc string e.g. ORZ001-004-WAZ021>023-029, + # return a list of all ugcs represented + ugcList = [] + curState = "" + arrowFlag = 0 + lastNum = 0 + # While we still have a dash or arrow + while len(complexUgc) > 0: + dash = complexUgc.find("-") + arrow = complexUgc.find(">") + # Peel off the next ugc from the complexUgc string + if dash < 0 and arrow < 0: + sep = len(complexUgc) + elif arrow >= 0 and arrow < dash: + sep = arrow + elif dash >= 0 and dash < arrow: + sep = dash + elif dash >= 0: + sep = dash + else: + sep = arrow + ugc = complexUgc[:sep] + complexUgc = complexUgc[sep+1:] + # Add this ugc to the list + nextUgcs, curState, lastNum = self.expandUgc(ugc, curState, lastNum, arrowFlag) + arrowFlag = 0 + if sep == arrow: + arrowFlag = 1 + for nextUgc in nextUgcs: + ugcList.append(nextUgc) + return ugcList + + def expandUgc(self, ugc, curState, lastNum, arrowFlag): + # If the ugc has a state identifier on it, + # return it as is and return it's state as the curState. + # Otherwise append the curState to the ugc and + # return it and curState + if curState == "": + curState = ugc[:3] + return [ugc], curState, int(ugc[3:]) + state = ugc[:3] + try: + # If simply a number, add the current state + num = int(state) + # Check for arrow + ugcList = [] + if arrowFlag: + for ugcNum in range(lastNum+1, num+1): + curNum = str(ugcNum) + curNum = curNum.zfill(3) + ugcList.append(curState + curNum) + else: + ugcList.append(curState + ugc) + return ugcList, curState, num + except: + curState = state + return [ugc], curState, int(ugc[3:]) + + def ugcSort(self, val1, val2): + name1, ugc1 = val1 + name2, ugc2 = val2 + if ugc1 > ugc2: + return 1 + elif ugc1 == ugc2: + return 0 + else: + return -1 + + ### creates a UGCCode header string from the specified list of UGC codes. + def makeUGCString(self, ugcList): + # if nothing in the list, return empty string + if len(ugcList) == 0: + return "" + + # Remove any blank UGC lines from the list + listsize=len(ugcList) + j=0 + while j < listsize: + if ugcList[j] == "": + del ugcList[j] + j=j+1 + + # Set up state variables and process intialize ugcStr with first ugc + # in ugcList + inSeq = 0 + ugcStr = ugcList[0] + curState = ugcStr[0:3] + lastNum = int(ugcList[0][3:]) + firstNum = 0 + lastUgc = ugcList[0] + #print "ugcList", ugcList + + # By initializing properly we don't need the first item + ugcList.remove(ugcList[0]) + + for ugc in ugcList: + ugcState = ugc[:3] + ugcNumStr = ugc[3:] + num = int(ugcNumStr) + if ugcState == curState: + if num == lastNum + 1: + if inSeq > 0: + # Replace the last ugcNumStr in sequence with the + # current ugcNumStr + # e.g. 062>063 becomes 062>064 + ugcStr = ugcStr[:len(ugcStr)-3] + ugcNumStr + inSeq += 1 + else: + ugcStr += ">" + ugcNumStr + inSeq = 1 + else: # num != lastNum + 1 + ugcStr = self.checkLastArrow(inSeq, ugcStr) + inSeq = 0 # reset sequence when number not in sequence + ugcStr += "-" + ugcNumStr + else: + ugcStr = self.checkLastArrow(inSeq, ugcStr) + ugcStr += "-" + ugc + curState = ugcState + inSeq = 0 #reset sequence when switching states + lastNum = num + lastUgc = ugc + + # May have to clean up last arrow at the end + ugcStr = self.checkLastArrow(inSeq, ugcStr) + #print "returning", ugcStr + return ugcStr + + def checkLastArrow(self, inSeq, ugcStr): + if inSeq == 1: + # Change the last arrow to - since + # we only had 2 in the sequence e.g. + # 062>063 should be 062-063 + arrowIndex = ugcStr.rfind(">") + if arrowIndex >= 0: + ugcStr = ugcStr[:arrowIndex] + "-" + ugcStr[arrowIndex+1:] + return ugcStr + + + # Header support for Watch/Warning products + def getCityList(self, areaList, label="This includes the cities of", + lineLength=66, areaDictName="AreaDictionary", addPeriod = False, + forceAlphaSort=False): + # Returns a list of cities (from the AreaDictionary) + + # Access the UGC information for the area(s) if available + areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, "AreaDictionary") + if areaDict is None: + return "" + cities = [] + for areaName in areaList: + entry = areaDict[areaName] + if "ugcCities" in entry: + ct = entry['ugcCities'] + for c in ct: + if len(c): + cities.append(c) + if len(cities) and (self.alphabetizeHeaders() == 1 or forceAlphaSort): + cities.sort() + + cityString = self.punctuateList(cities) + + if len(cityString) == 0: + return "" + else: + if addPeriod: + cityString = cityString + '.' + return self.endline(label + " " + cityString, lineLength, breakStr=[", "]) + "\n" + + # Returns a list of strings that describe the "areaList", such as + # Southwest Kansas, along with their county/zone names. Format returned + # is [(stateName, portionOfState, [(county/zone list,type)])]. The type + # is PARISH, COUNTY, ZONE, INDEPENDENT CITY. Duplicate names are + # eliminated. + def getGeneralAreaList(self, areaList, areaDictName="AreaDictionary"): + + # Access the UGC information for the area(s) if available + areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, + "AreaDictionary") + if areaDict is None: + return [] + + geoAreas = {} + for areaName in areaList: + entry = areaDict[areaName] + if "ugcName" in entry: + # Get state + state = areaName[0:2] + if "fullStateName" in entry: + state = entry["fullStateName"] + #Special District of Columbia case + if state.upper() == "DISTRICT OF COLUMBIA": + state = "The District of Columbia" + # Get part-of-state information + partOfState = "" + if "partOfState" in entry: + partOfState = entry["partOfState"] + + # get the county/zone name + zoneName = entry["ugcName"] + if "locationName" in entry: + zoneName = entry["locationName"] #alternative name + if "ugcCode" in entry: + codeType = entry["ugcCode"][2] + if codeType == "Z": + nameType = "zone" + elif codeType == "C": + indCty=entry.get("independentCity", 0) + if indCty == 1: + nameType = "independent city" + elif state == "Louisiana": + nameType = "parish" + else: + nameType = "county" + else: + codeType == "?" + value = (state, partOfState) + znt = (zoneName, nameType) + + if value in geoAreas: + names = geoAreas[value] + if znt not in names: + names.append(znt) + else: + geoAreas[value] = [znt] + + #now sort the zoneName or countyNames + for state, partState in list(geoAreas.keys()): + names = geoAreas[(state,partState)] + names.sort() + + #now separate them by land and water + #Anything to do with WATERS or other related items go last + waters = ['WATERS','LAKE','RIVER'] + gaLAND = [] + gaWATER = [] + for g,pg in list(geoAreas.keys()): + names = geoAreas[(g,pg)] + words = g.split(' ') + found = 0 + for w in waters: + if w in words: + found = 1 + break + if found: + gaWATER.append((g,pg,names)) + else: + gaLAND.append((g,pg,names)) + + #convert the output to a list with land first, then waters + geoAreas = [] + for g in gaLAND: + geoAreas.append(g) + for g in gaWATER: + geoAreas.append(g) + + geoAreas.sort() + + return geoAreas + + + + def formatCountyColumns(self, counties, colWidth, lineLength): + result = '' + curCol = 0 + for county in counties: + #print "county", len(county), county + # Need 2 spaces between county names + columns = (len(county)+2)/(colWidth+1) + 1 + countyWidth = columns*colWidth + if curCol > 0 and curCol + countyWidth > lineLength: + # Need to start on new line + result = result + "\n" + curCol = 0 + result = result + county.ljust(countyWidth) + curCol = curCol + countyWidth + return result + + def formatCountyString(self, state, counties): + # In Minnesota, (if state defined) plus list + if len(state): + result = "\n\nIn " + state + ", \n" + else: + result = "\n" + + return result + self.punctuateList(counties) + + def getIssuedByString(self, words = "Issued by National Weather Service "): + issuedByString = "" + try: + if self._issuedBy is not None: + issuedByString = "Issued by National Weather Service " + \ + self.getSiteInfo("wfoCityState",self._issuedBy) + "\n" + except: + pass + return issuedByString + + def timeFromDDHHMM(self, dtgString): + # converts a DDHHMM string into a time value. + #group1=day, group2=hour, group3=minute, returns seconds since epoch + try: + wmo_day = int(dtgString[0:2]) + wmo_hour = int(dtgString[2:4]) + wmo_min = int(dtgString[4:6]) + except: + s = "timeFromDDHHMM(), input string not in DDHHMM format: " +\ + dtgString + raise Exception(s) + + #reset time zone to GMT for this function + prevTZ = os.environ.get('TZ', None) + os.environ['TZ'] = "GMT0" + time.tzset() + + #assemble time tuple + gmtuple = time.gmtime(time.time()) + wmo_year = gmtuple[0] #based on current time + wmo_month = gmtuple[1] #based on current time + current_day = gmtuple[2] + if current_day - wmo_day > 15: + # next month + wmo_month = wmo_month + 1 + if wmo_month > 12: + wmo_month = 1 + wmo_year = wmo_year + 1 + elif current_day - wmo_day < -15: + # previous month + wmo_month = wmo_month -1 + if wmo_month < 1: + wmo_month = 12 + wmo_year = wmo_year - 1 + + s = repr(wmo_year) + "%02i" % wmo_month + "%02i" % wmo_day + \ + "%02i" % wmo_hour + "%02i" % wmo_min + "UTC" + timeTuple = time.strptime(s, "%Y%m%d%H%M%Z") + wmoTime = time.mktime(timeTuple) #TZ is GMT0, so this mktime works + + #reset the time zone + if prevTZ is not None: + os.environ['TZ'] = prevTZ + else: + del os.environ['TZ'] + time.tzset() + return wmoTime + + # Given the issuance time, offset in hours from local time midnight, + # vtecString, returns the expire time (time_t), and the expire time + # as ddhhmm. + def getExpireTimeFromLToffset(self, issueTime, offsetLTHours, + vtecString, roundMinutes=15, fixedExpire=0): + + # get issue time in local time, then today's midnight local time + lclTime = time.localtime(issueTime) + midnight = time.mktime((lclTime[0], lclTime[1], lclTime[2], + 0, 0, 0, 0, 0, 0)) #midnight today local time + + # calculate expire time as offset into the future + expireTime = offsetLTHours*3600 + midnight #actual time for expire + + #use the other getExpireTime() + expireTime = self.getExpireTime(issueTime, expireTime, vtecString, + roundMinutes, fixedExpire) + + ddhhmm = time.strftime("%d%H%M", time.gmtime(expireTime)) + + return (expireTime, ddhhmm) + + # Given the issuance time, expiration time (desired), and the VTEC codes, + # returns the appropriate expiration time. Expiration time is the + # earliest of the specified expiration time, 1 hr if a CAN code + # is detected, or the ending time of ongoing events (CON, EXT, EXB, NEW). + # The issueTime and expireTime are ints, VTECCodes are a list of + # records from the Hazard's analyzed table. The fixedExpire flag + # indicates to ignore the VTEC actions when computing the expiration time. + def getExpireTime(self, issueTime, expireTime, vtecString, roundMinutes=15, + fixedExpire=0): + + + #hazard product + if not fixedExpire: + #vtec regular expression + vtecRE = '/[OTEX]\.([A-Z]{3})\.([A-Z]{4})\.([A-Z]{2})\.' + \ + '([WAYSOFN])\.([0-9]{4})\.([0-9]{6})T([0-9]{4})Z\-' + \ + '([0-9]{6})T([0-9]{4})Z/' + + #break up the VTEC strings, decode ending time + vstrs = vtecString.split('\n') + canExpFound = 0 + activeFound = 0 + laterActive = None #later end time of all active events + for vs in vstrs: + search = re.search(vtecRE, vs) + if search: + action = search.group(1) + if action in ['CAN','EXP']: + canExpFound = 1 + elif action in ['NEW','CON','EXT','EXB','EXA']: + activeFound = 1 + endTime = self.timeFromYYYYMMDD_HHMM(search.group(8), + search.group(9)) + if endTime != 0: + if laterActive is not None: + laterActive = max(laterActive, endTime) + else: + laterActive = endTime + if laterActive is not None: + expireTime = min(expireTime, laterActive) + elif canExpFound and not activeFound: + expireTime = min(expireTime, issueTime+3600) #1hr from now + + #ensure expireTime is not before issueTime, and is at least 1 hour + if expireTime - issueTime < 3600: + expireTime = issueTime + 3600 + + + #round to next "roundMinutes" + roundValue = roundMinutes*60 #in seconds + delta = expireTime % roundValue # in seconds + baseTime = int(expireTime/roundValue)*roundValue + if delta/60 >= 1: #add the next increment + expireTime = baseTime + roundValue + else: #within 1 minute, don't add the next increment + expireTime = baseTime + + return expireTime + + + def timeFromYYYYMMDD_HHMM(self, yyyymmdd, hhmm): + #returns a value of seconds since epoch from a encoded YYYYMMDD + #and hhmm string. + + #if all zeros, return 0 + if yyyymmdd == "000000" and hhmm == "0000": + return 0 + else: + #reset time zone to GMT for this function + prevTZ = os.environ.get('TZ', None) + os.environ['TZ'] = "GMT0" + time.tzset() + + timeString = yyyymmdd + hhmm + timeTuple = time.strptime(timeString, "%y%m%d%H%M") + seconds = time.mktime(timeTuple) #TZ is GMT0, so mktime works + + #reset the time zone + if prevTZ is not None: + os.environ['TZ'] = prevTZ + else: + del os.environ['TZ'] + time.tzset() + return seconds + +############################# + + def getAreaDictEntry(self, accessArg, dict, entryName, firstOnly=0): + # Access the given area dictionary for the given entryName + # using the accessArg to find the area (or areas) + # "accessArg" can be: + # tree or argDict: Gets entry from current area + # "all": Gets all dict entries and returns a list + # a list of area labels: Gets a list of dict entries for the given areaLabels + # If firstOnly==1, return only the value for the first area found + # Otherwise, return a list of entry values from which duplicates have been removed + # If not found, return an empty list + entryValues = [] + for areaLabel in list(dict.keys()): + if accessArg != "all": + if type(accessArg) is dict: + # tree or argDict + if not self.currentAreaContains(tree, [areaLabel]): + continue + if type(accessArg) is list: + # list of areas + if areaLabel not in accessArg: + continue + entry = dict[areaLabel] + if entryName in entry: + entryValue = entry[entryName] + if firstOnly: + # Only the FIRST entryValue is taken if multiple ones exist + return entryValue + else: + if type(entryValue) is list: + entryValues += entryValue + else: + entryValues.append(entryValue) + return self.removeDups(entryValues) + +############################# + + def synopsisUGC(self, siteID, pil=None): + # Get the synopsis UGC for the CWF + synopsisDict = self.synopsisCWF_dict() + if pil is None: + ugcInfo = synopsisDict.get(siteID) + else: + ugcInfo = synopsisDict.get((siteID, pil)) + if ugcInfo is None: + return "" + ugc = ugcInfo[0] + if type(ugc) is list: + # Add hyphens e.g. AMZ600-GMZ606 + ugc = "-".join(ugc) + return ugc + + def synopsisHeading(self, siteID, pil=None): + # Get the synopsis heading for the CWF + synopsisDict = self.synopsisCWF_dict() + if pil is None: + ugcInfo = synopsisDict.get(siteID) + else: + ugcInfo = synopsisDict.get((siteID, pil)) + if ugcInfo is None: + return "" + return self.endline(ugcInfo[1], self._lineLength) + + def synopsisCWF_dict(self): + #key is site id, or (site id, pil) for sites that produce multiple CWFs + #value is ('ugcCode','ugcDescription') + return { + 'CAR': ('ANZ005', + 'Synopsis for Eastport ME to Stonington (Deer Isle) ME out 25 NM'), + 'GYX': ('ANZ100', + 'Synopsis for Stonington (Deer Isle) ME to Merrimack River MA out 25 NM'), + 'BOX': ('ANZ200', + 'Synopsis for MA and RI waters'), + 'OKX': ('ANZ300', + 'Synopsis for Long Island waters and New York Harbor'), + 'PHI': ('ANZ400', + 'Synopsis for Sandy Hook NJ to Fenwick Island DE'), + 'LWX': ('ANZ500', + 'Synopsis for North Chesapeake Bay and the Tidal Potomac'), + 'AKQ': ('ANZ600', + 'Synopsis for Fenwick Island DE to Currituck Beach Light NC out ' +\ + '20 NM including Virginia portion of the Chesapeake Bay and ' +\ + 'Currituck Sound'), + 'MHX': ('AMZ100', + 'Synopsis for Currituck Beach Light to Surf City NC out 20 NM ' +\ + 'including Albemarle Sound and Pamlico Sound'), + 'ILM': ('AMZ200', + 'Synopsis for Surf City NC to South Santee River SC out 20 NM'), + 'CHS': ('AMZ300', + 'Synopsis for South Santee River SC to Savannah GA out 20 NM ' +\ + 'and Savannah GA to Altamaha Sound GA out 60 NM, including ' +\ + 'Charleston Harbor and Grays Reef National Marine Sanctuary'), + 'JAX': ('AMZ400', + 'Synopsis for Altamaha Sound GA to Flagler Beach FL out 60 NM'), + 'MLB': ('AMZ500', + 'Synopsis for Flagler Beach to Jupiter Inlet FL out 60 NM'), + 'MFL': (['AMZ600','GMZ606'], + 'Synopsis for Jupiter Inlet to Ocean Reef FL including Biscayne ' +\ + 'Bay out 60 NM and for East Cape Sable to Bonita Beach FL out 60 NM'), + 'SJU': ('AMZ700', + 'Synopsis for PR and U.S. Virgin Islands coastal waters'), + 'KEY': ('GMZ005', + 'Synopsis for Florida Bay and the Keys'), + 'TBW': ('GMZ800', + 'Synopsis for Bonita Beach to Suwannee River FL out 60 NM'), + 'TAE': ('GMZ700', + 'Synopsis for Suwannee River to Destin FL out 60 NM'), + 'MOB': ('GMZ600', + 'Synopsis for Destin FL to Pascagoula MS out 60 NM including Mobile Bay'), + 'LIX': (['GMZ500','GMZ501'], + 'Synopsis for Pascagoula MS to Southwest Pass of the Mississippi River' +\ + ' and Southwest Pass of the Mississippi River to Atchafalaya River LA' +\ + ' out 60 NM'), + 'LCH': ('GMZ400', + 'Synopsis for Atchafalaya River LA to High Island TX out 60 NM'), + 'HGX': ('GMZ300', + 'Synopsis for High Island to Matagorda Ship Channel TX out 60 NM'), + 'CRP': ('GMZ200', + 'Synopsis for Baffin Bay to Matagorda Ship Channel out 60 NM'), + 'BRO': ('GMZ100', + 'Synopsis from Baffin Bary to Rio Grande River TX out 60 NM'), + 'SEW': ('PZZ100', + 'Synopsis for Northern Washington Coast and Puget Sound'), + 'PQR': ('PZZ200', + 'Synopsis for Southern Washington and Northern Oregon Coast'), + 'MFR': ('PZZ300', + 'Synopsis for the Southern Oregon Coastal Waters'), + 'EKA': ('PZZ400', + 'Synopsis for Northern California Waters'), + 'MTR': ('PZZ500', + 'Synopsis for Central California Coast and Bays'), + 'LOX': ('PZZ600', + 'Synopsis for Southern California Coast and Santa Barbara Channel'), + 'SGX': ('PZZ700', + 'Synopsis for Far Southern California Coast'), + 'AJK': ('PKZ098', + 'Synopsis for Inside Waters'), + 'AER': (['PKZ196','PKZ197'], + 'Synopsis for the North Gulf Coast and Valdez Port Narrows and Arm'), + 'ALU': (['PKZ198','PKZ199'], + 'Synopsis for Southwest Alaska and the Aluetians ' +\ + 'including Chiniak and Marmot Bays'), + ('AFG','WCZ'): ('PKZ299', 'Synopsis for Northwest Coast'), + ('AFG','NSB'): ('PKZ298', 'Synopsis for Arctic Coast'), + 'HFO': ('PHZ100', 'Synopsis for Hawaiian Waters'), + 'GUM': ('PMZ150', 'Synopsis for Marianas Waters'), + } + + + + + + + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Interfaces.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Interfaces.py index 4cbec9ff39..cad14bdea7 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Interfaces.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Interfaces.py @@ -1,196 +1,196 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Interfaces.py -# Methods for interfacing to the Text Formatter infrastructure components. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, types -import Analysis -import Translator -import TextUtils -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID - -class Interfaces(TextUtils.TextUtils): - def __init__(self): - TextUtils.TextUtils.__init__(self) - - def getSampler(self, argDict, sampleInfo, sampleFromServer=0): - # Get a HistoSampler given - # sampleInfo, which is a list of tuples, or just a single tuple - # of tuples ([elements], [periods], [areas]) - # the elements are [(name, method)] -- basically the analysis list - # the periods [(timeRange, label)] - # areas [(name,label)] or [(refData, label)] or [(refID, label)] - ifpClient = argDict["ifpClient"] - databaseID = argDict["databaseID"] - - from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData - from com.raytheon.viz.gfe.sampler import SamplerRequest, HistoSampler - from java.util import ArrayList - - # convert input sampleInfo to samplerRequests - samplerRequests = ArrayList() - if type(sampleInfo) == tuple: - sampleInfo = [sampleInfo] - for si in sampleInfo: - elements, periods, areas = si - for e in elements: - parmID = self.getParmID(e[0], databaseID) - for p in periods: - for a in areas: - if type(a[0]) is str: - samplerRequests.add(SamplerRequest( \ - parmID, ReferenceID(a[0]), p[0].toJavaObj())) - elif hasattr(a[0], "java_name") and \ - (a[0].java_name == ReferenceID.java_name or - a[0].java_name == ReferenceData.java_name): - samplerRequests.add( - SamplerRequest(parmID, a[0], p[0].toJavaObj())) - else: - raise TypeError("area is not String, ReferenceID, or ReferenceData") - - # do sampling - if sampleFromServer: - sampler = ifpClient.sampleRequest(samplerRequests) - else: - sampler = HistoSampler(ifpClient.getJavaClient(), samplerRequests) - if sampler.isValid() != 1: - print "Cannot Sample: Check for invalid Weather Elements, ",\ - "Invalid Areas", str(samplerRequests) - return None - #print "sampler ", sampler - return sampler - - # Interfaces to Analysis - def getStatList(self, sampler, analysisList, timeRanges, editArea, componentName=None): - # For each period, get Statistics specified in analysisList - # over the Edit Area - statList = [] - for timeRange, label in timeRanges: - stats = self.getStatDict( - sampler, analysisList, timeRange, editArea, componentName) - statList.append(stats) - return statList - - def getStatDict(self, sampler, analysisList, timeRange, area, componentName=None): - # Get Statistic dictionary for given analysisList, timeRange, - # and area - # Area can be ReferenceData or text string - - # Convert area to ID if necessary - if area is None: - return None - if isinstance(area, str): - area = ReferenceID(area) - else: - area = area.getId() - - return Analysis.Analysis(sampler).createStats( - analysisList, area, timeRange, componentName) - - def getStats(self, statDict, entry, argDict=None): - if statDict is None: - return None - try: - stats = statDict[entry] - except: - stats = None - # Look for any temporal resolution e.g. - # if entry = "Wind__vectorAvg", - # return any temporal resolution for - # vectorAvg - if string.find(entry, "__") >= 0: - for key in statDict.keys(): - if string.find(key, entry) >= 0: - stats = statDict[key] - #if stats is None: - # Check for notification - # self.checkMissingData_notification(statDict, argDict, entry) - return stats - - - # Interfaces to TextFormatter - def generateProduct(self, productName, argDict, area=None, - timeRange=None, elements=None, areaLabel="", - timeRangeName=""): - # Generate the given product and return a text string - # representing the results. - # If areas are specified or a timeRange given, the - # product will be generated accordingly. - # Time Range is assumed to be an AFPS.TimeRange. - # Time Range Name is the name of the time range, if available - # Area is the name of the area over which to generate the - # product. This need only be supplied if you are - # looping through areas. - # Otherwise, this information is assumed to be included - # in argDict. - - # Set up time range - if timeRange is not None: - argDict["timeRange"] = timeRange - argDict["useRawTR"] = 1 - argDict["timeRangeName"] = timeRangeName - # Set up area -- It must be a ReferenceID - if area is not None: - argDict["editAreas"] = [(area, areaLabel)] - if "areaList" in argDict.keys(): - #area = self.getEditArea(area, argDict) - argDict["areaList"][0] = (area, areaLabel) - - # Reinitialize variables - argDict["timePeriod"] = None - argDict["language"] = None - # Preserve information - saveSelf = argDict["self"] - combinations = argDict["combinations"] - # Generate the forecast - getForecast = argDict["getForecast"] - fcst = getForecast(productName, argDict) - - # Restore information - argDict["self"] = saveSelf - argDict["combinations"] = combinations - return fcst - - # Interfaces to Translator - def translateExpr(self, phrase, language): - "Translate the time or heading" - if language == "english" or phrase == "": - return phrase - trans = Translator.Translator(language) - return trans.getExpression(phrase) - - def translateForecast(self, forecast, language): - "Translate the forecast" - if language == "english": - return forecast - trans = Translator.Translator(language) - return trans.getForecast(forecast) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Interfaces.py +# Methods for interfacing to the Text Formatter infrastructure components. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, types +import Analysis +import Translator +import TextUtils +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID + +class Interfaces(TextUtils.TextUtils): + def __init__(self): + TextUtils.TextUtils.__init__(self) + + def getSampler(self, argDict, sampleInfo, sampleFromServer=0): + # Get a HistoSampler given + # sampleInfo, which is a list of tuples, or just a single tuple + # of tuples ([elements], [periods], [areas]) + # the elements are [(name, method)] -- basically the analysis list + # the periods [(timeRange, label)] + # areas [(name,label)] or [(refData, label)] or [(refID, label)] + ifpClient = argDict["ifpClient"] + databaseID = argDict["databaseID"] + + from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID, ReferenceData + from com.raytheon.viz.gfe.sampler import SamplerRequest, HistoSampler + from java.util import ArrayList + + # convert input sampleInfo to samplerRequests + samplerRequests = ArrayList() + if type(sampleInfo) == tuple: + sampleInfo = [sampleInfo] + for si in sampleInfo: + elements, periods, areas = si + for e in elements: + parmID = self.getParmID(e[0], databaseID) + for p in periods: + for a in areas: + if type(a[0]) is str: + samplerRequests.add(SamplerRequest( \ + parmID, ReferenceID(a[0]), p[0].toJavaObj())) + elif hasattr(a[0], "java_name") and \ + (a[0].java_name == ReferenceID.java_name or + a[0].java_name == ReferenceData.java_name): + samplerRequests.add( + SamplerRequest(parmID, a[0], p[0].toJavaObj())) + else: + raise TypeError("area is not String, ReferenceID, or ReferenceData") + + # do sampling + if sampleFromServer: + sampler = ifpClient.sampleRequest(samplerRequests) + else: + sampler = HistoSampler(ifpClient.getJavaClient(), samplerRequests) + if sampler.isValid() != 1: + print("Cannot Sample: Check for invalid Weather Elements, ",\ + "Invalid Areas", str(samplerRequests)) + return None + #print "sampler ", sampler + return sampler + + # Interfaces to Analysis + def getStatList(self, sampler, analysisList, timeRanges, editArea, componentName=None): + # For each period, get Statistics specified in analysisList + # over the Edit Area + statList = [] + for timeRange, label in timeRanges: + stats = self.getStatDict( + sampler, analysisList, timeRange, editArea, componentName) + statList.append(stats) + return statList + + def getStatDict(self, sampler, analysisList, timeRange, area, componentName=None): + # Get Statistic dictionary for given analysisList, timeRange, + # and area + # Area can be ReferenceData or text string + + # Convert area to ID if necessary + if area is None: + return None + if isinstance(area, str): + area = ReferenceID(area) + else: + area = area.getId() + + return Analysis.Analysis(sampler).createStats( + analysisList, area, timeRange, componentName) + + def getStats(self, statDict, entry, argDict=None): + if statDict is None: + return None + try: + stats = statDict[entry] + except: + stats = None + # Look for any temporal resolution e.g. + # if entry = "Wind__vectorAvg", + # return any temporal resolution for + # vectorAvg + if string.find(entry, "__") >= 0: + for key in list(statDict.keys()): + if string.find(key, entry) >= 0: + stats = statDict[key] + #if stats is None: + # Check for notification + # self.checkMissingData_notification(statDict, argDict, entry) + return stats + + + # Interfaces to TextFormatter + def generateProduct(self, productName, argDict, area=None, + timeRange=None, elements=None, areaLabel="", + timeRangeName=""): + # Generate the given product and return a text string + # representing the results. + # If areas are specified or a timeRange given, the + # product will be generated accordingly. + # Time Range is assumed to be an AFPS.TimeRange. + # Time Range Name is the name of the time range, if available + # Area is the name of the area over which to generate the + # product. This need only be supplied if you are + # looping through areas. + # Otherwise, this information is assumed to be included + # in argDict. + + # Set up time range + if timeRange is not None: + argDict["timeRange"] = timeRange + argDict["useRawTR"] = 1 + argDict["timeRangeName"] = timeRangeName + # Set up area -- It must be a ReferenceID + if area is not None: + argDict["editAreas"] = [(area, areaLabel)] + if "areaList" in list(argDict.keys()): + #area = self.getEditArea(area, argDict) + argDict["areaList"][0] = (area, areaLabel) + + # Reinitialize variables + argDict["timePeriod"] = None + argDict["language"] = None + # Preserve information + saveSelf = argDict["self"] + combinations = argDict["combinations"] + # Generate the forecast + getForecast = argDict["getForecast"] + fcst = getForecast(productName, argDict) + + # Restore information + argDict["self"] = saveSelf + argDict["combinations"] = combinations + return fcst + + # Interfaces to Translator + def translateExpr(self, phrase, language): + "Translate the time or heading" + if language == "english" or phrase == "": + return phrase + trans = Translator.Translator(language) + return trans.getExpression(phrase) + + def translateForecast(self, forecast, language): + "Translate the forecast" + if language == "english": + return forecast + trans = Translator.Translator(language) + return trans.getForecast(forecast) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/MarinePhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/MarinePhrases.py index 7608460d02..bee7d365cd 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/MarinePhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/MarinePhrases.py @@ -1,558 +1,558 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MarinePhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import ScalarPhrases -import VectorRelatedPhrases -import WxPhrases -import DiscretePhrases -import re, string - -class MarinePhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, - WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): - def __init__(self): - ScalarPhrases.ScalarPhrases.__init__(self) - VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) - WxPhrases.WxPhrases.__init__(self) - DiscretePhrases.DiscretePhrases.__init__(self) - - ############################################ - ### MARINE PHRASES - - def marine_wind_flag(self, tree, node): - # If 1, Wind wording will reflect the - # crossing of significant thresholds such as gales. - # E.g. "West gales to 35 knots." instead of "West winds 35 knots." - return 0 - - def marine_wind_combining_flag(self, tree, node): - # If 1, Wind combining will reflect the - # crossing of significant thresholds such as gales. - # E.g. "HURRICANE FORCE WINDS TO 100 KNOTS." instead of - # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO - # HURRICANE FORCE WINDS TO 80 KNOTS IN THE AFTERNOON." - return 0 - - def marine_wind_verbose_flag(self, tree, node): - # Applies only if marine_wind_flag is 1 and the - # and marine_wind_combining_flag is 0. - # If 1, Wind phrasing will repeat special descriptors to produce: - # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO - # HURRICANCE FORCE WINDS TO 80 KNOTS IN THE AFTERNOON." - # If 0, will produce: - # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO - # 80 KNOTS IN THE AFTERNOON." - # - return 1 - - def marine_wind_phrase(self): - return { - "setUpMethod": self.marine_wind_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def marine_wind_withGusts_phrase(self): - return { - "setUpMethod": self.marine_wind_withGusts_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def marine_wind_setUp(self, tree, node): - return self.wind_setUp(tree, node, gustFlag=0, - connectorMethod=self.marine_vectorConnector) - - def marine_wind_withGusts_setUp(self, tree, node): - return self.wind_setUp(tree, node, gustFlag=1, - connectorMethod=self.marine_vectorConnector) - - def noWaveHeight_phrase(self, tree, node, elementName1, elementName2=""): - if elementName1 != elementName2: - elementNames = elementName1 + " or " + elementName2 - else: - elementNames = elementName1 - return "|* Insufficient grids for " + elementNames + " during " + \ - str(node.getTimeRange())+ "*|" - - def waveHeight_wind_threshold(self, tree, node): - # Wind value above which waveHeight (combined seas) - # is reported vs. wind waves. - # Also, the Swell phrase is omitted if this threshold is exceeded. - # Unit is knots - return 34 - - def inlandWatersAreas(self, tree, node): - # List of edit area names that are inland or bay waters - # as opposed to "seas" - # The phrasing for these areas will be treated differently - # (see the wave_phrase) - # - # e.g. - # return ["TampaBayWaters"] - return [] - - def inlandWatersWave_element(self, tree, node): - # Weather element first and second choice to use for reporting inland waters waves - # If there is incomplete or no data for the first element, the second will be used. - return ("WindWaveHgt", "WaveHeight") - - def combinedSeas_threshold(self, tree, node): - # See wave_phrase - # If waves and swells are above this threshold, - # combined seas will be reported AND no Swell phrase will be reported. - # Units: feet - return 7 - - def seasWaveHeight_element(self, tree, node): - # Weather element to use for reporting seas waves - # IF above wind or swell thresholds - return "WaveHeight" - - def seasWindWave_element(self, tree, node): - # Weather element to use for reporting seas waves - # IF above wind or swell thresholds - return "WindWaveHgt" - - ### WaveHeight and WindWaveHgt - def wave_withPeriods_phrase(self): - return { - "setUpMethod": self.wave_withPeriods_setUp, - "wordMethod": self.wave_words, - "phraseMethods": self.standard_phraseMethods() - } - def wave_phrase(self): - return { - "setUpMethod": self.wave_setUp, - "wordMethod": self.wave_words, - "phraseMethods": self.standard_phraseMethods() - } - - def wave_withPeriods_setUp(self, tree, node): - return self.wave_setUp(tree, node, periodFlag=1) - - def wave_setUp(self, tree, node, periodFlag=0): - areaLabel = node.getAreaLabel() - timeRange = node.getTimeRange() - - inlandWaters = self.inlandWatersAreas(tree, node) - if self.currentAreaContains(tree, inlandWaters) == 1: - elementName, elementName2 = self.inlandWatersWave_element(tree, node) - statsByRange = tree.stats.get(elementName, timeRange, areaLabel, mergeMethod="List") - if statsByRange is None: - elementName = elementName2 - # Do not report Period for inland waters - periodFlag = 0 - descriptor = self.phrase_descriptor(tree, node, "inland waters", elementName) - node.set("descriptor", descriptor) - elif self.seasFlag(tree, node): - # Use wave height elementName (default) - elementName = self.seasWaveHeight_element(tree, node) - descriptor = self.phrase_descriptor(tree, node, "seas", elementName) - node.set("descriptor", descriptor) - else: - # Use wind waves (default) - elementName = self.seasWindWave_element(tree, node) - periodFlag = 0 - descriptor = self.phrase_descriptor(tree, node, "waves", elementName) - node.set("descriptor", descriptor) - - wave = self.ElementInfo(elementName, "List") - elementInfoList = [wave] - if periodFlag: - node.set("periodFlag", 1) - period = self.ElementInfo("Period", "Average", primary=0) - elementInfoList.append(period) - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def seasFlag(self, tree, node): - # Return 1 if we are to report combined seas - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - winds = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Max") - if winds is None: - return 0 - maxWind, dir = winds - - # Determine if we will report combined seas OR wind waves - seasFlag = 0 - if maxWind > self.waveHeight_wind_threshold(tree, node): - seasFlag = 1 - else: - swell = tree.stats.get("Swell", timeRange, areaLabel, mergeMethod="Max") - swell2 = tree.stats.get("Swell2", timeRange, areaLabel, mergeMethod="Max") - maxWave = tree.stats.get("WindWaveHgt", timeRange, areaLabel, mergeMethod="Max") - if swell is None or maxWave is None: - pass # Leave seasFlag at zero - else: - # We'll decide to report combined seas by looking at - # the MAX of waves and swells over the entire time period - swells, dir = swell - if swell2 is None: - swells2 = 0 - else: - swells2, dir = swell2 - threshold = self.combinedSeas_threshold(tree, node) - if maxWave > threshold and \ - (swells > threshold or swells2 > threshold): - seasFlag = 1 - return seasFlag - - def wave_words(self, tree, node): - # Return a phrase for wave and optionally Period for the given subPhrase - elementInfo = node.getAncestor("firstElement") - elementName = elementInfo.name - statDict = node.getStatDict() - if statDict is None: - return self.setWords(node,"") - wave = self.getStats(statDict, elementName) - if wave is None: - return self.setWords(node, "") - min, max = self.getValue(wave, "MinMax") - threshold = self.nlValue(self.null_nlValue( - tree, node, elementName, elementName), max) - if int(min) < threshold and int(max) < threshold: - return self.setWords(node, "null") - waveStr = self.getScalarRangeStr(tree, node, elementName, min, max) - units = self.units_descriptor(tree, node, "units", "ft") - waveUnit = self.units_descriptor(tree, node, "unit", "ft") - if int(min) == 1 and int(max) == 1: - units = waveUnit - words = waveStr + " " + units - if "Period" in statDict.keys(): - period = self.getStats(statDict, "Period") - if period is not None: - avg = self.getValue(period, "Average") - periodUnits = self.units_descriptor(tree, node, "units", "s") - periodUnit = self.units_descriptor(tree, node, "unit", "s") - avg = int(avg) - if avg == 1: - periodUnits = periodUnit - periodDescriptor = self.phrase_descriptor( - tree, node, "dominant period", elementName) - words = words + " " + periodDescriptor + " " + `avg` + " " + periodUnits - return self.setWords(node, words) - - def waveHeight_phrase(self): - return { - "setUpMethod": self.waveHeight_setUp, - "wordMethod": self.waveHeight_words, - "phraseMethods": self.standard_phraseMethods() - } - def waveHeight_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("WaveHeight", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def waveHeight_words(self, tree, node): - "Create phrase for waves" - statDict = node.getStatDict() - stats = self.getStats(statDict, "WaveHeight") - if stats is None: - nodataPhrase = self.noWaveHeight_phrase( - tree, node, "WaveHeight", "WaveHeight") - return self.setWords(node.parent, nodataPhrase) - - min, max = self.getValue(stats, "MinMax") - avg = (min + max)/2 - words = self.wave_range(avg) - return self.setWords(node, words) - - def wave_range(self, avg): - # Make wave ranges based off the average wave value - table = ((0, "less than 1 foot"), (1, "1 foot or less"), - (1.5, "1 to 2 feet"), (2, "1 to 3 feet"), - (3, "2 to 4 feet"), (4, "3 to 5 feet"), - (5, "3 to 6 feet"), (6, "4 to 7 feet"), - (7, "5 to 8 feet"), (8, "6 to 10 feet"), - (10, "8 to 12 feet"), (12, "10 to 14 feet"), - (14, "12 to 16 feet"), (18, "14 to 18 feet"), - (20, "15 to 20 feet"), (100, "over 20 feet")) - range = "" - for max, str in table: - if avg <= max: - range = str - break - return range - - ### Chop - def chop_phrase(self): - return { - "setUpMethod": self.chop_setUp, - "wordMethod": self.chop_words, - "phraseMethods": self.standard_phraseMethods() - } - def chop_setUp(self, tree, node): - # Only generate this phrase for inland waters areas - inlandWaters = self.inlandWatersAreas(tree, node) - if self.currentAreaContains(tree, inlandWaters) == 0: - return self.setWords(node, "") - - # Set up for only one subPhrase. - chop = self.ElementInfo("Wind", "Max", self.VECTOR()) - - # Uncomment the following line if you want the chop_phrase to - # have subPhrases e.g. "A light chop in morning." - #chop = self.ElementInfo("Wind", "List", self.VECTOR()) - elementInfoList = [chop] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - descriptor = self.phrase_descriptor(tree, node, "chop", "chop") - node.set("descriptor", descriptor) - return self.DONE() - - def chop_words(self, tree, node): - "Create phrase for chop" - statDict = node.getStatDict() - stats = self.getStats(statDict, "Wind") - if stats is None: - return self.setWords(node, "") - maxWind, dir = self.getValue(stats, "Max", self.VECTOR()) - if maxWind <= 7: - value = "smooth" - elif maxWind > 7 and maxWind <= 12: - value = "a light chop" - elif maxWind > 12 and maxWind <= 17: - value = "a moderate chop" - elif maxWind > 17 and maxWind <= 22: - value = "choppy" - elif maxWind > 22 and maxWind <= 27: - value = "rough" - elif maxWind > 27 and maxWind <= 32: - value = "very rough" - elif maxWind > 32: - value = "extremely rough" - else: - value = "!!!Chop phrase problem!!!" - return self.setWords(node, value) - - ### Swell - def swell_phrase(self): - return { - "setUpMethod": self.swell_setUp, - "wordMethod": self.swell_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def swell_withPeriods_phrase(self): - return { - "setUpMethod": self.swell_withPeriods_setUp, - "wordMethod": self.swell_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def swell_withPeriods_setUp(self, tree, node): - return self.swell_setUp(tree, node, periodFlag=1) - - def swell_setUp(self, tree, node, periodFlag=0): - # Do not report swells for inland waters - inlandWaters = self.inlandWatersAreas(tree, node) - if self.currentAreaContains(tree, inlandWaters) == 1: - return self.setWords(node, "") - - # Do not report swells if we are reporting combined seas - if self.seasFlag(tree, node) == 1: - return self.setWords(node, "") - - swell = self.ElementInfo("Swell", "List", self.VECTOR()) - elementInfoList = [swell] - if periodFlag: - swellPhrase = self.swell_withPeriods_phrase - else: - swellPhrase = self.swell_phrase - swell2 = self.ElementInfo( - "Swell2", "MinMax", self.VECTOR(), phraseDef=swellPhrase) #, primary=0) - elementInfoList = [swell, swell2] - if periodFlag: - node.set("periodFlag", 1) - period = self.ElementInfo("Period", "MinMax", primary=0) - period2 = self.ElementInfo("Period2", "MinMax", primary=0) - elementInfoList.append(period) - elementInfoList.append(period2) - self.subPhraseSetUp(tree, node, elementInfoList, self.vectorConnector) - return self.DONE() - - def swell_words(self, tree, node): - # Create phrase for swell for a given set of stats in statsByRange - #print "\n in swell words" - periodFlag = node.getAncestor("periodFlag") - statDict = node.getStatDict() - - #Check for Swell alone - swell2 = self.getStats(statDict, "Swell2") - if swell2 is None: - oneSwell = 1 - else: - oneSwell = 0 - - # Swell and Swell2 subPhrases - subPhraseParts = [] - elementInfoList = node.getAncestor("elementInfoList") - for swell, period in [("Swell", "Period"), ("Swell2", "Period2")]: - if swell == "Swell": - checkRepeating = 1 - else: - checkRepeating = 0 - for elementInfo in elementInfoList: - if elementInfo.name == swell: - swellInfo = elementInfo - break - swellWords = self.simple_vector_phrase(tree, node, swellInfo, checkRepeating) - if swellWords == "null" or swellWords == "": - subPhraseParts.append("") - continue - # Add Period - periodPhrase = "" - if periodFlag == 1: - periodStats = self.getStats(statDict, period) - periodPhrase = self.embedded_period_phrase(tree, node, periodStats) - swellWords = swellWords + periodPhrase - subPhraseParts.append(swellWords) - - #print "swell", node.getTimeRange(), subPhraseParts - if subPhraseParts[0] != "" and subPhraseParts[1] != "": - words = subPhraseParts[0] + " and " + subPhraseParts[1] - # Check for mixed swell on first subPhrase - if node.getIndex() == 0: - mixedSwell = self.checkMixedSwell(tree, node, statDict) - if mixedSwell: - mixedSwellDesc = self.phrase_descriptor(tree, node, "mixed swell", "Swell") - phrase = node.getParent() - phrase.set("descriptor", mixedSwellDesc) - phrase.doneList.append(self.embedDescriptor) - elif subPhraseParts[0] != "": - words = subPhraseParts[0] - elif subPhraseParts[1] != "": - words = subPhraseParts[1] - else: - words = "null" - - return self.setWords(node, words) - - def checkMixedSwell(self, tree, node, statDict): - # Check for mixed swell wording - # Return mixed swell phrase if appropriate - # Otherwise, return None - swell = self.getStats(statDict, "Swell") - swell2 = self.getStats(statDict, "Swell2") - if swell is None or swell2 is None: - return 0 - swellMag, swellDir = swell - swell2Mag, swell2Dir = swell2 - swellMag = self.getValue(swellMag) - swell2Mag = self.getValue(swell2Mag) - if self.direction_difference(swellDir, swell2Dir) >= 90.0 and \ - swellMag > 0 and \ - swell2Mag / swellMag > 0.50: - return 1 - else: - return 0 - - ### Period - def embedded_period_phrase(self, tree, node, periodStats): - # Create a period phrase to be embedded with a Swell phrase - if periodStats is None: - return "" - period = int(self.getValue(periodStats)) - - outUnits = self.element_outUnits(tree, node, "Period", "Period") - units = self.units_descriptor(tree, node, "units", outUnits) - unit = self.units_descriptor(tree, node, "unit", outUnits) - if period == 1: - units = unit - return " at " + `period` + " " + units - - def period_phrase(self): - return { - "setUpMethod": self.period_setUp, - "wordMethod": self.period_words, - "phraseMethods": self.standard_phraseMethods() - } - def period_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Period", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def period_words(self, tree, node): - # Return a phrase for Period for the given index in statsByRange - statDict = node.getStatDict() - stats = self.getStats(statDict, "Period") - if stats is None: - return self.setWords(node, "") - periodValue = int(self.getValue(stats)) - - outUnits = self.element_outUnits(tree, node, "Period", "Period") - units = self.units_descriptor(tree, node, "units", outUnits) - unit = self.units_descriptor(tree, node, "unit", outUnits) - if periodValue == 1: - units = unit - return self.setWords(node, `periodValue` + " " + units) - - def marine_abbreviateText(self, fcst): - #add a space at the beginning to create a word boundary on the first word - #(space is removed at end of method). - fcst = " " + fcst - fcst = re.sub(r'\n', r' ',fcst) - fcst = re.sub(r'(?i)(\W)NORTH(\W)', r'\1N\2',fcst) - fcst = re.sub(r'(?i)(\W)SOUTH(\W)', r'\1S\2',fcst) - fcst = re.sub(r'(?i)(\W)EAST(\W)', r'\1E\2',fcst) - fcst = re.sub(r'(?i)(\W)WEST(\W)', r'\1W\2',fcst) - fcst = re.sub(r'(?i)(\W)NORTHEAST(\W)', r'\1NE\2',fcst) - fcst = re.sub(r'(?i)(\W)SOUTHEAST(\W)', r'\1SE\2',fcst) - fcst = re.sub(r'(?i)(\W)SOUTHWEST(\W)', r'\1SW\2',fcst) - fcst = re.sub(r'(?i)(\W)NORTHWEST(\W)', r'\1NW\2',fcst) - fcst = re.sub(r'(?i)(\W)KNOTS?(\W)', r'\1kt\2',fcst) -## fcst = re.sub(r'(?i)(\W)FOOT(\W)', r'\1FT\2',fcst) - fcst = re.sub(r'(?i)(\W)FEET(\W)', r'\1ft\2',fcst) - fcst = re.sub(r'(?i)(\W)POSITION(\W)', r'\1PSN\2',fcst) - fcst = re.sub(r'(?i)(\W)VISIBILITY(\W)', r'\1VSBY\2',fcst) - fcst = re.sub(r'(?i)(\W)THUNDERSTORM', r'\1TSTM',fcst) - fcst = re.sub(r'(?i)(\W)AVERAGE(\W)', r'\1AVG\2',fcst) - fcst = re.sub(r'(?i)(\W)NAUTICAL MILES?(\W)', r'\1nm\2',fcst) - fcst = re.sub(r'(?i)(\W)ATLANTIC(\W)', r'\1ATLC\2',fcst) - fcst = re.sub(r'(?i)(\W)FATHOMS?(\W)', r'\1fm\2',fcst) - fcst = re.sub(r'(?i)(\W)LONGITUDE(\W)', r'\1LONG\2',fcst) - fcst = re.sub(r'(?i)(\W)PACIFIC(\W)', r'\1PAC\2',fcst) - fcst = re.sub(r'(?i)(\W)DEGREES?(\W)', r'\1deg\2',fcst) - fcst = re.sub(r'(?i)(\W)MILLIBARS?(\W)', r'\1mb\2',fcst) - fcst = re.sub(r'(?i)(\W)PRESSURE(\W)', r'\1PRES\2',fcst) - fcst = re.sub(r'(?i)(\W)(SUN)DAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(MON)DAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(TUE)SDAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(WED)NESDAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(THU)RSDAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(FRI)DAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'(?i)(\W)(SAT)URDAY(\W)', r'\1\2\3',fcst) - fcst = re.sub(r'^ ', r'',fcst) - return fcst - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MarinePhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import ScalarPhrases +import VectorRelatedPhrases +import WxPhrases +import DiscretePhrases +import re, string + +class MarinePhrases(ScalarPhrases.ScalarPhrases, VectorRelatedPhrases.VectorRelatedPhrases, + WxPhrases.WxPhrases, DiscretePhrases.DiscretePhrases): + def __init__(self): + ScalarPhrases.ScalarPhrases.__init__(self) + VectorRelatedPhrases.VectorRelatedPhrases.__init__(self) + WxPhrases.WxPhrases.__init__(self) + DiscretePhrases.DiscretePhrases.__init__(self) + + ############################################ + ### MARINE PHRASES + + def marine_wind_flag(self, tree, node): + # If 1, Wind wording will reflect the + # crossing of significant thresholds such as gales. + # E.g. "West gales to 35 knots." instead of "West winds 35 knots." + return 0 + + def marine_wind_combining_flag(self, tree, node): + # If 1, Wind combining will reflect the + # crossing of significant thresholds such as gales. + # E.g. "HURRICANE FORCE WINDS TO 100 KNOTS." instead of + # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO + # HURRICANE FORCE WINDS TO 80 KNOTS IN THE AFTERNOON." + return 0 + + def marine_wind_verbose_flag(self, tree, node): + # Applies only if marine_wind_flag is 1 and the + # and marine_wind_combining_flag is 0. + # If 1, Wind phrasing will repeat special descriptors to produce: + # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO + # HURRICANCE FORCE WINDS TO 80 KNOTS IN THE AFTERNOON." + # If 0, will produce: + # "NORTH HURRICANE FORCE WINDS TO 100 KNOTS EASING TO + # 80 KNOTS IN THE AFTERNOON." + # + return 1 + + def marine_wind_phrase(self): + return { + "setUpMethod": self.marine_wind_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def marine_wind_withGusts_phrase(self): + return { + "setUpMethod": self.marine_wind_withGusts_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def marine_wind_setUp(self, tree, node): + return self.wind_setUp(tree, node, gustFlag=0, + connectorMethod=self.marine_vectorConnector) + + def marine_wind_withGusts_setUp(self, tree, node): + return self.wind_setUp(tree, node, gustFlag=1, + connectorMethod=self.marine_vectorConnector) + + def noWaveHeight_phrase(self, tree, node, elementName1, elementName2=""): + if elementName1 != elementName2: + elementNames = elementName1 + " or " + elementName2 + else: + elementNames = elementName1 + return "|* Insufficient grids for " + elementNames + " during " + \ + str(node.getTimeRange())+ "*|" + + def waveHeight_wind_threshold(self, tree, node): + # Wind value above which waveHeight (combined seas) + # is reported vs. wind waves. + # Also, the Swell phrase is omitted if this threshold is exceeded. + # Unit is knots + return 34 + + def inlandWatersAreas(self, tree, node): + # List of edit area names that are inland or bay waters + # as opposed to "seas" + # The phrasing for these areas will be treated differently + # (see the wave_phrase) + # + # e.g. + # return ["TampaBayWaters"] + return [] + + def inlandWatersWave_element(self, tree, node): + # Weather element first and second choice to use for reporting inland waters waves + # If there is incomplete or no data for the first element, the second will be used. + return ("WindWaveHgt", "WaveHeight") + + def combinedSeas_threshold(self, tree, node): + # See wave_phrase + # If waves and swells are above this threshold, + # combined seas will be reported AND no Swell phrase will be reported. + # Units: feet + return 7 + + def seasWaveHeight_element(self, tree, node): + # Weather element to use for reporting seas waves + # IF above wind or swell thresholds + return "WaveHeight" + + def seasWindWave_element(self, tree, node): + # Weather element to use for reporting seas waves + # IF above wind or swell thresholds + return "WindWaveHgt" + + ### WaveHeight and WindWaveHgt + def wave_withPeriods_phrase(self): + return { + "setUpMethod": self.wave_withPeriods_setUp, + "wordMethod": self.wave_words, + "phraseMethods": self.standard_phraseMethods() + } + def wave_phrase(self): + return { + "setUpMethod": self.wave_setUp, + "wordMethod": self.wave_words, + "phraseMethods": self.standard_phraseMethods() + } + + def wave_withPeriods_setUp(self, tree, node): + return self.wave_setUp(tree, node, periodFlag=1) + + def wave_setUp(self, tree, node, periodFlag=0): + areaLabel = node.getAreaLabel() + timeRange = node.getTimeRange() + + inlandWaters = self.inlandWatersAreas(tree, node) + if self.currentAreaContains(tree, inlandWaters) == 1: + elementName, elementName2 = self.inlandWatersWave_element(tree, node) + statsByRange = tree.stats.get(elementName, timeRange, areaLabel, mergeMethod="List") + if statsByRange is None: + elementName = elementName2 + # Do not report Period for inland waters + periodFlag = 0 + descriptor = self.phrase_descriptor(tree, node, "inland waters", elementName) + node.set("descriptor", descriptor) + elif self.seasFlag(tree, node): + # Use wave height elementName (default) + elementName = self.seasWaveHeight_element(tree, node) + descriptor = self.phrase_descriptor(tree, node, "seas", elementName) + node.set("descriptor", descriptor) + else: + # Use wind waves (default) + elementName = self.seasWindWave_element(tree, node) + periodFlag = 0 + descriptor = self.phrase_descriptor(tree, node, "waves", elementName) + node.set("descriptor", descriptor) + + wave = self.ElementInfo(elementName, "List") + elementInfoList = [wave] + if periodFlag: + node.set("periodFlag", 1) + period = self.ElementInfo("Period", "Average", primary=0) + elementInfoList.append(period) + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def seasFlag(self, tree, node): + # Return 1 if we are to report combined seas + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + winds = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Max") + if winds is None: + return 0 + maxWind, dir = winds + + # Determine if we will report combined seas OR wind waves + seasFlag = 0 + if maxWind > self.waveHeight_wind_threshold(tree, node): + seasFlag = 1 + else: + swell = tree.stats.get("Swell", timeRange, areaLabel, mergeMethod="Max") + swell2 = tree.stats.get("Swell2", timeRange, areaLabel, mergeMethod="Max") + maxWave = tree.stats.get("WindWaveHgt", timeRange, areaLabel, mergeMethod="Max") + if swell is None or maxWave is None: + pass # Leave seasFlag at zero + else: + # We'll decide to report combined seas by looking at + # the MAX of waves and swells over the entire time period + swells, dir = swell + if swell2 is None: + swells2 = 0 + else: + swells2, dir = swell2 + threshold = self.combinedSeas_threshold(tree, node) + if maxWave > threshold and \ + (swells > threshold or swells2 > threshold): + seasFlag = 1 + return seasFlag + + def wave_words(self, tree, node): + # Return a phrase for wave and optionally Period for the given subPhrase + elementInfo = node.getAncestor("firstElement") + elementName = elementInfo.name + statDict = node.getStatDict() + if statDict is None: + return self.setWords(node,"") + wave = self.getStats(statDict, elementName) + if wave is None: + return self.setWords(node, "") + min, max = self.getValue(wave, "MinMax") + threshold = self.nlValue(self.null_nlValue( + tree, node, elementName, elementName), max) + if int(min) < threshold and int(max) < threshold: + return self.setWords(node, "null") + waveStr = self.getScalarRangeStr(tree, node, elementName, min, max) + units = self.units_descriptor(tree, node, "units", "ft") + waveUnit = self.units_descriptor(tree, node, "unit", "ft") + if int(min) == 1 and int(max) == 1: + units = waveUnit + words = waveStr + " " + units + if "Period" in list(statDict.keys()): + period = self.getStats(statDict, "Period") + if period is not None: + avg = self.getValue(period, "Average") + periodUnits = self.units_descriptor(tree, node, "units", "s") + periodUnit = self.units_descriptor(tree, node, "unit", "s") + avg = int(avg) + if avg == 1: + periodUnits = periodUnit + periodDescriptor = self.phrase_descriptor( + tree, node, "dominant period", elementName) + words = words + " " + periodDescriptor + " " + repr(avg) + " " + periodUnits + return self.setWords(node, words) + + def waveHeight_phrase(self): + return { + "setUpMethod": self.waveHeight_setUp, + "wordMethod": self.waveHeight_words, + "phraseMethods": self.standard_phraseMethods() + } + def waveHeight_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("WaveHeight", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def waveHeight_words(self, tree, node): + "Create phrase for waves" + statDict = node.getStatDict() + stats = self.getStats(statDict, "WaveHeight") + if stats is None: + nodataPhrase = self.noWaveHeight_phrase( + tree, node, "WaveHeight", "WaveHeight") + return self.setWords(node.parent, nodataPhrase) + + min, max = self.getValue(stats, "MinMax") + avg = (min + max)/2 + words = self.wave_range(avg) + return self.setWords(node, words) + + def wave_range(self, avg): + # Make wave ranges based off the average wave value + table = ((0, "less than 1 foot"), (1, "1 foot or less"), + (1.5, "1 to 2 feet"), (2, "1 to 3 feet"), + (3, "2 to 4 feet"), (4, "3 to 5 feet"), + (5, "3 to 6 feet"), (6, "4 to 7 feet"), + (7, "5 to 8 feet"), (8, "6 to 10 feet"), + (10, "8 to 12 feet"), (12, "10 to 14 feet"), + (14, "12 to 16 feet"), (18, "14 to 18 feet"), + (20, "15 to 20 feet"), (100, "over 20 feet")) + range = "" + for max, str in table: + if avg <= max: + range = str + break + return range + + ### Chop + def chop_phrase(self): + return { + "setUpMethod": self.chop_setUp, + "wordMethod": self.chop_words, + "phraseMethods": self.standard_phraseMethods() + } + def chop_setUp(self, tree, node): + # Only generate this phrase for inland waters areas + inlandWaters = self.inlandWatersAreas(tree, node) + if self.currentAreaContains(tree, inlandWaters) == 0: + return self.setWords(node, "") + + # Set up for only one subPhrase. + chop = self.ElementInfo("Wind", "Max", self.VECTOR()) + + # Uncomment the following line if you want the chop_phrase to + # have subPhrases e.g. "A light chop in morning." + #chop = self.ElementInfo("Wind", "List", self.VECTOR()) + elementInfoList = [chop] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + descriptor = self.phrase_descriptor(tree, node, "chop", "chop") + node.set("descriptor", descriptor) + return self.DONE() + + def chop_words(self, tree, node): + "Create phrase for chop" + statDict = node.getStatDict() + stats = self.getStats(statDict, "Wind") + if stats is None: + return self.setWords(node, "") + maxWind, dir = self.getValue(stats, "Max", self.VECTOR()) + if maxWind <= 7: + value = "smooth" + elif maxWind > 7 and maxWind <= 12: + value = "a light chop" + elif maxWind > 12 and maxWind <= 17: + value = "a moderate chop" + elif maxWind > 17 and maxWind <= 22: + value = "choppy" + elif maxWind > 22 and maxWind <= 27: + value = "rough" + elif maxWind > 27 and maxWind <= 32: + value = "very rough" + elif maxWind > 32: + value = "extremely rough" + else: + value = "!!!Chop phrase problem!!!" + return self.setWords(node, value) + + ### Swell + def swell_phrase(self): + return { + "setUpMethod": self.swell_setUp, + "wordMethod": self.swell_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def swell_withPeriods_phrase(self): + return { + "setUpMethod": self.swell_withPeriods_setUp, + "wordMethod": self.swell_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def swell_withPeriods_setUp(self, tree, node): + return self.swell_setUp(tree, node, periodFlag=1) + + def swell_setUp(self, tree, node, periodFlag=0): + # Do not report swells for inland waters + inlandWaters = self.inlandWatersAreas(tree, node) + if self.currentAreaContains(tree, inlandWaters) == 1: + return self.setWords(node, "") + + # Do not report swells if we are reporting combined seas + if self.seasFlag(tree, node) == 1: + return self.setWords(node, "") + + swell = self.ElementInfo("Swell", "List", self.VECTOR()) + elementInfoList = [swell] + if periodFlag: + swellPhrase = self.swell_withPeriods_phrase + else: + swellPhrase = self.swell_phrase + swell2 = self.ElementInfo( + "Swell2", "MinMax", self.VECTOR(), phraseDef=swellPhrase) #, primary=0) + elementInfoList = [swell, swell2] + if periodFlag: + node.set("periodFlag", 1) + period = self.ElementInfo("Period", "MinMax", primary=0) + period2 = self.ElementInfo("Period2", "MinMax", primary=0) + elementInfoList.append(period) + elementInfoList.append(period2) + self.subPhraseSetUp(tree, node, elementInfoList, self.vectorConnector) + return self.DONE() + + def swell_words(self, tree, node): + # Create phrase for swell for a given set of stats in statsByRange + #print "\n in swell words" + periodFlag = node.getAncestor("periodFlag") + statDict = node.getStatDict() + + #Check for Swell alone + swell2 = self.getStats(statDict, "Swell2") + if swell2 is None: + oneSwell = 1 + else: + oneSwell = 0 + + # Swell and Swell2 subPhrases + subPhraseParts = [] + elementInfoList = node.getAncestor("elementInfoList") + for swell, period in [("Swell", "Period"), ("Swell2", "Period2")]: + if swell == "Swell": + checkRepeating = 1 + else: + checkRepeating = 0 + for elementInfo in elementInfoList: + if elementInfo.name == swell: + swellInfo = elementInfo + break + swellWords = self.simple_vector_phrase(tree, node, swellInfo, checkRepeating) + if swellWords == "null" or swellWords == "": + subPhraseParts.append("") + continue + # Add Period + periodPhrase = "" + if periodFlag == 1: + periodStats = self.getStats(statDict, period) + periodPhrase = self.embedded_period_phrase(tree, node, periodStats) + swellWords = swellWords + periodPhrase + subPhraseParts.append(swellWords) + + #print "swell", node.getTimeRange(), subPhraseParts + if subPhraseParts[0] != "" and subPhraseParts[1] != "": + words = subPhraseParts[0] + " and " + subPhraseParts[1] + # Check for mixed swell on first subPhrase + if node.getIndex() == 0: + mixedSwell = self.checkMixedSwell(tree, node, statDict) + if mixedSwell: + mixedSwellDesc = self.phrase_descriptor(tree, node, "mixed swell", "Swell") + phrase = node.getParent() + phrase.set("descriptor", mixedSwellDesc) + phrase.doneList.append(self.embedDescriptor) + elif subPhraseParts[0] != "": + words = subPhraseParts[0] + elif subPhraseParts[1] != "": + words = subPhraseParts[1] + else: + words = "null" + + return self.setWords(node, words) + + def checkMixedSwell(self, tree, node, statDict): + # Check for mixed swell wording + # Return mixed swell phrase if appropriate + # Otherwise, return None + swell = self.getStats(statDict, "Swell") + swell2 = self.getStats(statDict, "Swell2") + if swell is None or swell2 is None: + return 0 + swellMag, swellDir = swell + swell2Mag, swell2Dir = swell2 + swellMag = self.getValue(swellMag) + swell2Mag = self.getValue(swell2Mag) + if self.direction_difference(swellDir, swell2Dir) >= 90.0 and \ + swellMag > 0 and \ + swell2Mag / swellMag > 0.50: + return 1 + else: + return 0 + + ### Period + def embedded_period_phrase(self, tree, node, periodStats): + # Create a period phrase to be embedded with a Swell phrase + if periodStats is None: + return "" + period = int(self.getValue(periodStats)) + + outUnits = self.element_outUnits(tree, node, "Period", "Period") + units = self.units_descriptor(tree, node, "units", outUnits) + unit = self.units_descriptor(tree, node, "unit", outUnits) + if period == 1: + units = unit + return " at " + repr(period) + " " + units + + def period_phrase(self): + return { + "setUpMethod": self.period_setUp, + "wordMethod": self.period_words, + "phraseMethods": self.standard_phraseMethods() + } + def period_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Period", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def period_words(self, tree, node): + # Return a phrase for Period for the given index in statsByRange + statDict = node.getStatDict() + stats = self.getStats(statDict, "Period") + if stats is None: + return self.setWords(node, "") + periodValue = int(self.getValue(stats)) + + outUnits = self.element_outUnits(tree, node, "Period", "Period") + units = self.units_descriptor(tree, node, "units", outUnits) + unit = self.units_descriptor(tree, node, "unit", outUnits) + if periodValue == 1: + units = unit + return self.setWords(node, repr(periodValue) + " " + units) + + def marine_abbreviateText(self, fcst): + #add a space at the beginning to create a word boundary on the first word + #(space is removed at end of method). + fcst = " " + fcst + fcst = re.sub(r'\n', r' ',fcst) + fcst = re.sub(r'(?i)(\W)NORTH(\W)', r'\1N\2',fcst) + fcst = re.sub(r'(?i)(\W)SOUTH(\W)', r'\1S\2',fcst) + fcst = re.sub(r'(?i)(\W)EAST(\W)', r'\1E\2',fcst) + fcst = re.sub(r'(?i)(\W)WEST(\W)', r'\1W\2',fcst) + fcst = re.sub(r'(?i)(\W)NORTHEAST(\W)', r'\1NE\2',fcst) + fcst = re.sub(r'(?i)(\W)SOUTHEAST(\W)', r'\1SE\2',fcst) + fcst = re.sub(r'(?i)(\W)SOUTHWEST(\W)', r'\1SW\2',fcst) + fcst = re.sub(r'(?i)(\W)NORTHWEST(\W)', r'\1NW\2',fcst) + fcst = re.sub(r'(?i)(\W)KNOTS?(\W)', r'\1kt\2',fcst) +## fcst = re.sub(r'(?i)(\W)FOOT(\W)', r'\1FT\2',fcst) + fcst = re.sub(r'(?i)(\W)FEET(\W)', r'\1ft\2',fcst) + fcst = re.sub(r'(?i)(\W)POSITION(\W)', r'\1PSN\2',fcst) + fcst = re.sub(r'(?i)(\W)VISIBILITY(\W)', r'\1VSBY\2',fcst) + fcst = re.sub(r'(?i)(\W)THUNDERSTORM', r'\1TSTM',fcst) + fcst = re.sub(r'(?i)(\W)AVERAGE(\W)', r'\1AVG\2',fcst) + fcst = re.sub(r'(?i)(\W)NAUTICAL MILES?(\W)', r'\1nm\2',fcst) + fcst = re.sub(r'(?i)(\W)ATLANTIC(\W)', r'\1ATLC\2',fcst) + fcst = re.sub(r'(?i)(\W)FATHOMS?(\W)', r'\1fm\2',fcst) + fcst = re.sub(r'(?i)(\W)LONGITUDE(\W)', r'\1LONG\2',fcst) + fcst = re.sub(r'(?i)(\W)PACIFIC(\W)', r'\1PAC\2',fcst) + fcst = re.sub(r'(?i)(\W)DEGREES?(\W)', r'\1deg\2',fcst) + fcst = re.sub(r'(?i)(\W)MILLIBARS?(\W)', r'\1mb\2',fcst) + fcst = re.sub(r'(?i)(\W)PRESSURE(\W)', r'\1PRES\2',fcst) + fcst = re.sub(r'(?i)(\W)(SUN)DAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(MON)DAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(TUE)SDAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(WED)NESDAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(THU)RSDAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(FRI)DAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'(?i)(\W)(SAT)URDAY(\W)', r'\1\2\3',fcst) + fcst = re.sub(r'^ ', r'',fcst) + return fcst + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ModuleAccessor.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ModuleAccessor.py index 68dcc4fb70..89b5ccb3d3 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ModuleAccessor.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ModuleAccessor.py @@ -1,166 +1,166 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ModuleAccessor.py -# Access to the internals of Modules -# -# Author: hansen -# ---------------------------------------------------------------------------- -######################################################################## - -## -# This is a base file that is not intended to be overridden. -## - -import types, sys -import traceback - -class ModuleAccessor: - # Used to access objects within Modules - def __init__(self, errorCB=None): - # Used for error messages - self.__errorCB = errorCB - - def module(self, moduleName, showError=1): - # Return the module with the given name - try: - if sys.modules.has_key(moduleName): - #del sys.modules[moduleName] # this is bad for the automated tests code replacement - return sys.modules[moduleName] - module = __import__(moduleName) - except: - if showError and self.__errorCB is not None: - self.__errorCB("Problem finding or importing module: " - + moduleName, tracebackFlag=1) - return None - return module - - def variables(self, moduleName, variableList, showError=1): - # Return the global variables in the given module - module = self.module(moduleName, showError) - if module is None: - return None - variables = [] - for variableName in variableList: - if variableName in module.__dict__.keys(): - variables.append(module.__dict__[variableName]) - else: - variables.append(None) - return tuple(variables) - - def variable(self, moduleName, variableName, showError=1): - # Return the global variable in the given module - module = self.module(moduleName, showError) - if module is None: - return None - if variableName in module.__dict__.keys(): - return module.__dict__[variableName] - else: - return None - - def classDefinition(self, moduleName, className): - # Returns the class in the given module - if className is None: - return None, None - module = self.module(moduleName, 1) - if module is None: - return None, None - # Look for Class - classDefinition = self.getClassDefinition(module, className) - return module, classDefinition - - def getClassDefinition(self, module, className): - # Check for the given class in the module - if className in module.__dict__.keys() and \ - type(module.__dict__[className]) is types.ClassType: - return module.__dict__[className] - else: - return None - - def getFunctions(self, moduleName, functionNames, className=None, - classArgs=None, classOnly=0): - # Returns a dictionary containing the executable functions - # Looks first for functions in a class - # If not found, looks for functions in the module itself - # These functions are definitions, not executable instances - module, classDefinition = self.classDefinition(moduleName, className) - if module is None: - return None, None, None - - classInstance = None - if classDefinition is not None: - # Create the callable class instance and set up the - # functions - classInstance = classDefinition(classArgs) - functionDict = self.getClassFunctions( - classInstance, functionNames) - elif not classOnly == 1: - # Look for a Function with same name as module - functionDict = self.getModuleFunctions( - module, functionNames) - else: - return None, None, None - - return module, classInstance, functionDict - - def getClassFunctions(self, classInstance, functionNames): - # Returns a dictionary containing the functions specified - # in the given classInstance - functionDict = {} - for functionName in functionNames: - functionDict[functionName] = getattr(classInstance, - functionName, None) - return functionDict - - def getModuleFunctions(self, module, functionNames): - # Returns a dictionary containing the functions specified - # for the given module - functionDict = {} - for functionName in functionNames: - if functionName in module.__dict__.keys(): - result = module.__dict__[functionName] - else: - result = None - functionDict[functionName] = result - return functionDict - - def callMethod(self, method, argCallback, classInstance=None): - # Get arguments and call the method - if method is None: - return None - elif hasattr(method, 'im_func'): # It is a user defined method - co = method.im_func.func_code - elif hasattr(method, 'func_code'): # It is a user defined function - co = method.func_code - else: # Don't know what it is - return None - - # Set up variables and values for arguments in args - argValueList = argCallback(co.co_varnames[:co.co_argcount], []) - if type(argValueList) is not types.ListType: - error = argValueList - return error - - # Format the arguments and call the method - return method(*argValueList) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ModuleAccessor.py +# Access to the internals of Modules +# +# Author: hansen +# ---------------------------------------------------------------------------- +######################################################################## + +## +# This is a base file that is not intended to be overridden. +## + +import types, sys +import traceback + +class ModuleAccessor: + # Used to access objects within Modules + def __init__(self, errorCB=None): + # Used for error messages + self.__errorCB = errorCB + + def module(self, moduleName, showError=1): + # Return the module with the given name + try: + if moduleName in sys.modules: + #del sys.modules[moduleName] # this is bad for the automated tests code replacement + return sys.modules[moduleName] + module = __import__(moduleName) + except: + if showError and self.__errorCB is not None: + self.__errorCB("Problem finding or importing module: " + + moduleName, tracebackFlag=1) + return None + return module + + def variables(self, moduleName, variableList, showError=1): + # Return the global variables in the given module + module = self.module(moduleName, showError) + if module is None: + return None + variables = [] + for variableName in variableList: + if variableName in list(module.__dict__.keys()): + variables.append(module.__dict__[variableName]) + else: + variables.append(None) + return tuple(variables) + + def variable(self, moduleName, variableName, showError=1): + # Return the global variable in the given module + module = self.module(moduleName, showError) + if module is None: + return None + if variableName in list(module.__dict__.keys()): + return module.__dict__[variableName] + else: + return None + + def classDefinition(self, moduleName, className): + # Returns the class in the given module + if className is None: + return None, None + module = self.module(moduleName, 1) + if module is None: + return None, None + # Look for Class + classDefinition = self.getClassDefinition(module, className) + return module, classDefinition + + def getClassDefinition(self, module, className): + # Check for the given class in the module + if className in list(module.__dict__.keys()) and \ + type(module.__dict__[className]) is type: + return module.__dict__[className] + else: + return None + + def getFunctions(self, moduleName, functionNames, className=None, + classArgs=None, classOnly=0): + # Returns a dictionary containing the executable functions + # Looks first for functions in a class + # If not found, looks for functions in the module itself + # These functions are definitions, not executable instances + module, classDefinition = self.classDefinition(moduleName, className) + if module is None: + return None, None, None + + classInstance = None + if classDefinition is not None: + # Create the callable class instance and set up the + # functions + classInstance = classDefinition(classArgs) + functionDict = self.getClassFunctions( + classInstance, functionNames) + elif not classOnly == 1: + # Look for a Function with same name as module + functionDict = self.getModuleFunctions( + module, functionNames) + else: + return None, None, None + + return module, classInstance, functionDict + + def getClassFunctions(self, classInstance, functionNames): + # Returns a dictionary containing the functions specified + # in the given classInstance + functionDict = {} + for functionName in functionNames: + functionDict[functionName] = getattr(classInstance, + functionName, None) + return functionDict + + def getModuleFunctions(self, module, functionNames): + # Returns a dictionary containing the functions specified + # for the given module + functionDict = {} + for functionName in functionNames: + if functionName in list(module.__dict__.keys()): + result = module.__dict__[functionName] + else: + result = None + functionDict[functionName] = result + return functionDict + + def callMethod(self, method, argCallback, classInstance=None): + # Get arguments and call the method + if method is None: + return None + elif hasattr(method, 'im_func'): # It is a user defined method + co = method.__func__.__code__ + elif hasattr(method, 'func_code'): # It is a user defined function + co = method.__code__ + else: # Don't know what it is + return None + + # Set up variables and values for arguments in args + argValueList = argCallback(co.co_varnames[:co.co_argcount], []) + if type(argValueList) is not list: + error = argValueList + return error + + # Format the arguments and call the method + return method(*argValueList) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/PhraseBuilder.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/PhraseBuilder.py index b63665cfd7..26cb71c6eb 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/PhraseBuilder.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/PhraseBuilder.py @@ -1,4215 +1,4215 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# PhraseBuilder.py +# Methods for building phrases for Narrative products. # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# PhraseBuilder.py -# Methods for building phrases for Narrative products. -# -# Author: hansen -# History -# Time Ticket Number Developer Comments -# ----------------------------------------------------------------------------- -# 12/28/2012 DR 15596 J.Zeng Added checkWeatherSimilarity -# for two lists based on Virgil's -# suggestion -# 04/20/2015 4027 randerso Changes for mixed case product generation. -# 01/08/2016 5129 dgilling Fix signatures to calls in WeatherSubKey. -# 07/15/2016 5749 randerso Replace ellipses with commas -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import types -import TimeDescriptor -import ConfigVariables -import StringUtils -import UnitConvertor -import TimeRange, WeatherSubKey - -class PhraseBuilder(ConfigVariables.ConfigVariables, - StringUtils.StringUtils, - TimeDescriptor.TimeDescriptor, - UnitConvertor.UnitConvertor): - - def __init__(self): - ConfigVariables.ConfigVariables.__init__(self) - StringUtils.StringUtils.__init__(self) - TimeDescriptor.TimeDescriptor.__init__(self) - UnitConvertor.UnitConvertor.__init__(self) - - def SCALAR(self): - return 0 - def MAGNITUDE(self): - return 1 - def DIRECTION(self): - return 2 - def VECTOR(self): - return 3 - def VECTOR_TEXT(self): - return 4 - def VECTOR_NUM(self): - return 5 - def WEATHER(self): - return 6 - def DISCRETE(self): - return 7 - - def DONE(self): - return 1 - - def call(self, method, tree, node): - # Try to call with tree, node arguments - try: - return method(tree, node) - except: - return method() - - def setWords(self, node, words): - node.set("words", words) - # If the words are empty, we can close down node - if words == "": - #node.remove() - node.doneList = node.methodList - return 1 - - def useCommas(self, tree, node, words): - # Instead of: - # "Chance of rain and snow and freezing rain..." - # use - # "Chance of rain, snow and freezing rain..." - if words.count(" and ") > 1: - hiIndex = words.rfind(" and ") - words = words[:hiIndex].replace(" and ", ", ") + words[hiIndex:] - return words - - def setDone(self, node): - # Set the node doneList to the methodList - # so that the node is defunct - node.doneList = node.methodList - - def standard_phraseMethods(self): - return [ - self.consolidatePhrase, - self.checkLocalEffects, - self.combinePhraseStats, - self.consolidateTrends, - self.chooseMostImportant, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - - def postProcessPhrase(self, tree, node): - words = node.get("words") - rval = None - if words is not None: - words = words.replace("rain showers and thunderstorms", "showers and thunderstorms") - # To handle snow amt local effects - words = words.replace("except of", "except") - # Translate phrase - # This is necessary so that word-wrap works correctly - try: - words = self.translateForecast(words, self._language) - except: - words = self.translateForecast(words, "english") - rval = self.setWords(node, words) - return rval - - def roundStatistic(self, tree, node, value, elementName): - roundingMethod = self.rounding_method(tree, node, elementName, elementName) - nlIncrement = self.nlValue(self.increment_nlValue( - tree, node, elementName, elementName), value) - return self.roundValue(value, roundingMethod, "Nearest", nlIncrement) - - def findWords(self, tree, node, firstElementName, areaLabel=None, phraseList=None, - ignoreAreaIfLastChance=1, phraseLevel=0, attributes=None): - # Return a text string which is the concatenation of the words for all - # leaves of the tree in the same component as the given node - # and have the given firstElement at their phrase level. - # "Leaves" will be sub-phrases in most case unless a phrase node has - # words set and has empty sub-phrases. - # The method returns None unless all leaves that meet the qualifications - # have words set. - # - # If an areaLabel is given, it is applied to the selection of leaves. - # Otherwise all leaves meeting the firstElementName criteria are accepted. - # "areaLabel" can optionally be a list of areaLabels. - # - # If a phraseList is given, only leaves belonging to a phrase - # in that list are examined. - # If phraseLevel is 1 and words are set at the phrase level, those - # are concatenated to the words as well. - # - # if attributes is not None, return a dictionary: - # {attribute: [list of attribute values for leaves with words]} - # - - leaves = self.getLeaves(tree, node) - found = 0 - words = "" - attrDict = {} - #print "\nFind Words", firstElementName - if areaLabel is not None: - if type(areaLabel) is not types.ListType: - areaLabel = [areaLabel] - for child in leaves: - firstElement = child.getAncestor("firstElement") - if firstElement is None: - continue - if firstElement.name == firstElementName or firstElementName is None: - if phraseList is not None: - if child.getAncestor("name") not in phraseList: - continue - #print "firstElement", firstElement.name - #print "child", child, child.get("childList") - #print "words", child.get('words') - - # Check the area - if ignoreAreaIfLastChance: - # If last time thru because no changes were made to tree, - # ignore area - if tree.get("lastChance") == 1: - areaLabel = None - if areaLabel is not None: - if child.getAreaLabel() not in areaLabel: - continue - - # This is a phrase for which we want words - childWords = child.get('words') - if childWords is not None: - found = 1 - #print "Adding words", child.get('name'), child.getAreaLabel() - #print " ", child.getTimeRange() - #print " ", childWords - words = words + " " + childWords - if attributes is not None: - for attribute in attributes: - attrVal = child.getAncestor(attribute) - if attrVal is not None: - self.addToDictionary(attrDict, attribute, attrVal) - if phraseLevel: - parentWords = child.parent.get('words') - if parentWords is not None: - words = words + " " + parentWords - else: - # wait for words to complete - words = None - break - if not found: - words = None - if attributes is not None: - return words, attrDict - else: - return words - - def addToDictionary(self, dictionary, key, value): - # Add the given value to the dictionary where the key - # entry is a list of values - if dictionary.has_key(key): - if type(value) is types.ListType: - dictionary[key] += value - else: - dictionary[key].append(value) - else: - if type(value) is types.ListType: - dictionary[key] = value - else: - dictionary[key] = [value] - - def getLeaves(self, tree, node): - # Return a list of "leaves" for the component to which the node belongs. - # "Leaves" will be nodes that have no children (sub-phrases) - # EXCEPT if a node has words set AND it's children do not. - # (Some phrases e.g. waveHeight_phrase, will sometimes by-pass - # the sub-phrases and set the phrase words directly.) - # - component = node.getComponent() - progeny = component.getProgeny() - leaves = [] - for child in progeny: - childList = child.get('childList') - #print "child", child.getAncestor('name'), childList - #print " ", child.get('words'), child.getAreaLabel() - if childList == [] or childList is None: - # This is a leaf - # Check to see if it has words - leaf = child - if child.get('words') is None: - # If not AND it's parent has words, - # take the parent as the leaf. - if child.parent.get('words') is not None: - leaf = child.parent - leaves.append(leaf) - return leaves - - def dayOrNight_element(self, tree, node, dayElement, nightElement): - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - element = dayElement - else: - element = nightElement - return element - - def removeComponentPhrases(self, tree, node, phraseName, exceptions=[], areaLabels=[]): - # Remove all phrases with the given phraseName in the current component - # If areaLabel is not None, check for a match there as well and only - # remove the phrase if it is for that area. - component = node.getComponent() - progeny = component.getProgeny() - #print "\nRemoving Phrases", phraseName, areaLabels, exceptions - for child in progeny: - name = child.get("name") - if name == phraseName: - #print "child", child, child.getAreaLabel() - if child not in exceptions: - if areaLabels != []: - if child.getAreaLabel() in areaLabels: - #print "Removing 1", name, areaLabels, child - #import traceback - #traceback.print_stack(limit=3) - child.remove() - else: - child.remove() - #print "Removing 2", name, areaLabels - #import traceback - #traceback.print_stack(limit=3) - - class ElementInfo: - def __init__(self, name, mergeMethod="Average", dataType=0, - statLabel="", primary=1, phraseDef=None): - self.name = name - self.mergeMethod = mergeMethod - self.statLabel = statLabel - self.primary=primary - self.dataType = dataType - self.phraseDef = phraseDef -## def name(self): -## return self.name -## def mergeMethod(self): -## return self.mergeMethod -## def statLabel(self): -## return self.statLabel -## def primary(self): -## return self.primary() -## def dataType(self): -## return self.dataType() -## def phraseDef(self): -## return self.phraseDef() - - class LocalEffectArea: - def __init__(self, areaLabel, areaWords, conjAreaWords=None, intersectFlag=1): - # AreaLabel can be "__Current__" which will use the - # current edit area. - self.areaLabel = areaLabel - # Words to describe this local effect area. - # This can be a text string or a method - # arguments: (tree, node, LocalEffectArea) - # returns a text string - self.areaWords = areaWords - # This can be a text string or a method - # arguments: (tree, node, LocalEffectArea) - # returns a text string - # If you want different area words for the conjunctive - # phrase versus the embedded phrase, add them here - if conjAreaWords is None: - self.conjAreaWords = areaWords - else: - self.conjAreaWords = conjAreaWords - # If 1, area is to be intersected with the current area to - # look for local effects and the area must be listed as an - # intersectArea in the current product component - self.intersectFlag = intersectFlag - - class LocalEffect: - def __init__(self, leAreaList, triggerMethod, exceptionWords): - # List of LocalEffectAreas - # Currently only two areas can be handled by the local effect methods - self.leAreaList = leAreaList - self.triggerMethod = triggerMethod - self.exceptionWords = exceptionWords - - def makeRangeStats(self, tree, dataType, stats, timeRange): - if stats is None: - return None - if tree.library.isStatsByRange(dataType, stats, timeRange): - return stats - else: - return[(stats, timeRange)] - - def sumPrevStats(self, tree, node, areaLabel, elementName, - mergeMethod="Average", increment=1): - # Return a sum of stats going backward in time - # until there is a zero value (when rounded to the nearest increment) - # and NOT including the current value - # For example: - # Period1 = 2 inches snow amt - # Period2 = 0 inches - # Period3 = 2 inches - # Period4 = 2 inches - # Period5 = 4 inches - # If sumPrevStats is called during evaluation of - # Period5, the sum will be 4 (sum of Period3 and Period4) - - # Calculate past snow - prodTR = tree.getTimeRange() - pastSnowMin = 0 - pastSnowMax = 0 - pastSnowTimeRange = self.makeTimeRange(prodTR.startTime() - 12*3600, - prodTR.startTime()) - stats = tree.stats.get("SnowAmt", pastSnowTimeRange, - areaLabel, mergeMethod="MinMax") - - if stats is not None: - pastSnowMin, pastSnowMax = self.getValue(stats, "MinMax") - pastSnowMin = int(pastSnowMin+0.5) - pastSnowMax = int(pastSnowMax+0.5) - else: - pastSnowMin = 0 - pastSnowMax = 0 - - minSum = pastSnowMin - maxSum = pastSnowMax - - # Calculate snow in forecast periods - childList = node.getParent().get("childList") - timeRange = node.getTimeRange() - for child in childList: - childTimeRange = child.getTimeRange() - if childTimeRange == timeRange: - break - - stats = tree.stats.get("SnowAmt", childTimeRange, - areaLabel, mergeMethod="MinMax") - if stats is None: - continue - min, max = self.getValue(stats, "MinMax") - min = int(min+0.5) - max = int(max+0.5) - threshold = self.pop_snow_lower_threshold(tree, node) - popStats = self.matchToWx(tree, node, "PoP", childTimeRange) - if popStats < threshold: - min = 0 - max = 0 - if max == 0: - # Start over - minSum = 0 - maxSum = 0 - else: - minSum = minSum + min - maxSum = maxSum + max - - return minSum, maxSum - - def getScalarRangeStr(self, tree, node, element, min, max): - min1=int(min) - max1=int(max) - if min1 == max1: - return `min1` - else: - maxRange = self.maximum_range_nlValue(tree, node, element, element) - maxRange = self.nlValue(maxRange, max1) - if (maxRange == 0): - return `max1` - if abs(min1-max1) > maxRange: - min1 = max1 - maxRange - connector = self.value_connector(tree, node, element, element) - return `min1` + connector + `max1` - - def makeSentence(self, tree, node): - "Make a sentence from the words at the node level" - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING." - else: - words = self.sentence(words) - return self.setWords(node, words) - - def chooseElement(self, tree, node, elementNameList): - # Return first elementName for which there are stats for - # this node. If none, return last elementName. - for elementName in elementNameList: - stats = tree.stats.get(elementName, node.getTimeRange(), - node.getAreaLabel(), mergeMethod="Avg") - if stats is not None: - return elementName - return elementName - - # Narrative Level - def assembleChildWords(self, tree, node): - fcst = "" - for child in node.get("childList"): - words = child.get("words") - if words is None: - return - fcst = fcst + words - return self.setWords(node, fcst) - - # Component Level - - def noWords(self, tree, component): - self.setWords(component, "") - return self.DONE() - - def assembleSentences(self, tree, node): - for phrase in node.get("childList"): - words = phrase.get("words") - if words is None: - return - fcst = "" - lastQualifier = None - lastPhrase = None - self.orderWxPhrases(tree, node) - self.consolidateLocalEffectPhrases(tree, node) - for child in node.get("childList"): - words = child.get("words") - words, lastQualifier = self.qualifyWords( - child, words, "conjunctiveQualifier", lastQualifier, - lastPhrase) - lastPhrase = child - fcst = fcst + words - return self.setWords(node, fcst) - - - def assemblePhrases(self, tree, component): - # Assemble component phrases and add Label - # Qualify the phrases with local effect qualifiers - # if present. - # e.g. "near the coast" - phrases = [] - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - if words != "": - phrases.append(phrase) - #print "\nAssemblePhrases" - # Remove empty word phrases - component.childList = phrases - self.orderWxPhrases(tree, component) - self.consolidateLocalEffectPhrases(tree, component) - #print - fcst = "" - lastQualifier = None - lastPhrase = None - phraseList = [] - includeOnlyPhrases = self.includeOnlyPhrases_list(tree, component) - - for phrase in component.get("childList"): - words = phrase.get("words") - words = self.adjustWords(tree, phrase, words) - #print phrase.get('name'), phrase.getAreaLabel() - #print " ", words - if type(includeOnlyPhrases) is types.ListType and len(includeOnlyPhrases) > 0 and \ - phrase.get('name') not in includeOnlyPhrases: - # Do not include this phrase - continue - - words, lastQualifier = self.qualifyWords( - phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) - lastPhrase = phrase - if words not in phraseList: - phraseList.append(words) - fcst = fcst + words - # Add label - curLocalTime, shift = self.determineTimeShift() - issuanceInfo = tree.get("issuanceInfo") - index = component.getIndex() - label = self.createLabel(tree, component, component.get("timeRange"), - issuanceInfo, curLocalTime, shift, index) - fcst = self.combineSentences(fcst) - return self.setWords(component, label + fcst) - - def adjustWords(self, tree, phrase, words): - # Make any special adjustments to phrases - # This one is necessary for popMax since we have - # removed repeating popType wording assuming - # an embedded PoP phrase will result. - # If it happens to end up as a conjunctive, - # then we have to put the popType back. - if phrase.get('name') in ["popMax_phrase"]: - if not phrase.get('embedded'): - popType = phrase.getDescendent('popType') - if words.find(popType) == -1: - desc = phrase.get('descriptor') - words = words.replace(desc, desc + " " + popType) - return words - - def assembleIndentedPhrases(self, tree, component): - # Assemble and indent component phrases and add Label - # Qualify the phrases with local effect qualifiers - # if present. - # e.g. "near the coast" - for phrase in component.get("childList"): - words = phrase.get("words") - #print phrase, words - if words is None: - return - - # DR_18964 - self.consolidateLEPerPhraseInstance(tree, component) - #self.consolidatePerPhraseNameGroup(tree, component) - - fcst = "" - lastQualifier = None - lastPhrase = None - self.orderWxPhrases(tree, component) - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - if words == "": - if self.removeEmptyPhrase(tree, phrase): - continue - - # Handle multiple element table phrase - # that appears per period - # No need to indent or qualify - name = phrase.get("name") - if name == "multipleElementTable_perPeriod_phrase": - fcst = fcst + words - continue - - if phrase.get("compound"): - makeSentence = 0 - else: - makeSentence = 1 - words, lastQualifier = self.qualifyWords( - phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase, - makeSentence=makeSentence) - lastPhrase = phrase - indentLabel = phrase.get("indentLabel") - label = self.phrase_descriptor( - tree, phrase, indentLabel, indentLabel) - #print "indentLabel, label", indentLabel, label - if indentLabel is not None and label == "": - label = indentLabel - if words == "": - words = " " - words = self.labelIndent(words, label) - print phrase, words - fcst = fcst + words - # Add label - curLocalTime, shift = self.determineTimeShift() - issuanceInfo = tree.get("issuanceInfo") - index = component.getIndex() - label = self.createLabel(tree, component, component.get("timeRange"), - issuanceInfo, curLocalTime, shift, index) - return self.setWords(component, label + "\n" + fcst + "\n") - - def consolidateLEPerPhraseInstance(self, tree, component): - # Do the LE consolidation/combination for each (compound) phrase - # create a pseudo component for the LE phrases to hang onto - pseudo = component.copy() - - lePhraseNameGroups = [] - le_groups = {} - for phrase in component.get("childList"): - if phrase.get("localEffect"): - lePhraseNameGroup, firstName = self.getLePhraseNameGroup( - tree, component, lePhraseNameGroups, phrase) - le_groups.setdefault(lePhraseNameGroup, []).append(phrase) - - for name, nodes in le_groups.iteritems(): - #print name, nodes - - # put the nodes under the pseudo-component and do the - # LE consolidation/combination - pseudo.set("childList", nodes) - self.consolidateLocalEffectPhrases(tree, pseudo) - #self.combineConjunctivePhrases(tree, pseudo) - - # add the resultant nodes back under the component, - # replacing the original ones - newChildren = pseudo.get("childList") - #print 'new nodes:', newChildren - if newChildren == nodes: - continue - - childList = [] - inserted = 0 - for child in component.get("childList"): - if child in nodes: - if not inserted: - childList += newChildren - inserted = 1 - else: - childList.append(child) - component.set("childList", childList) - - # delete the pseudo component - pseudo.remove() - pseudo.set("parent", None) - pseudo.set("childList", []) - - - def consolidatePerPhraseNameGroup(self, tree, component): - # Do the LE consolidation/combination for each (compound) phrase - # create a pseudo component for the LE phrases to hang onto - pseudo = component.copy() - - lePhraseNameGroups = self.lePhraseNameGroups(tree, component) - le_groups = {} - for phrase in component.get("childList"): - if phrase.get("localEffect"): - lePhraseNameGroup, firstName = self.getLePhraseNameGroup( - tree, component, lePhraseNameGroups, phrase) - le_groups.setdefault(lePhraseNameGroup, []).append(phrase) - - for name, nodes in le_groups.iteritems(): - #print name, nodes - - # put the nodes under the pseudo-component and do the - # LE consolidation/combination - pseudo.childList = nodes - self.consolidateLocalEffectPhrases(tree, pseudo) - self.combineConjunctivePhrases(tree, pseudo) - - # add the resultant nodes back under the component, - # replacing the original ones - newChildren = pseudo.childList - childList = [] - inserted = 0 - for child in component.childList: - if child in nodes: - if not inserted: - childList += newChildren - inserted = 1 - else: - childList.append(child) - component.childList = childList - - # delete the pseudo component - pseudo.remove() - - def weatherPhraseNames(self, tree, node): - return ["weather_phrase", "skyPopWx_phrase"] - - def orderWxPhrases(self, tree, component): - # Sort the weather phrases (weather_phrase, skyPopWx_phrase) - # according to their time span for non-empty sub-phrases. - # Then replace the weather phrases in the component childList. - # We will assume that all the weather phrases for the - # component are consecutive. - wxPhraseNames = self.weatherPhraseNames(tree, component) - phraseList = component.get("childList") - wxPhrases = [] - for phrase in phraseList: - if phrase.get("name") in wxPhraseNames: - wxPhrases.append(phrase) - #print "appending", phrase.get('name'), phrase.get('words') - wxPhrases.sort(self.sortPhraseTimeSpans) - #print "sorted list" - #for phrase in wxPhrases: - # print phrase.get('name'), phrase.get('words') - - newPhraseList = [] - firstTime = 1 - for phrase in phraseList: - if phrase.get("name") in wxPhraseNames: - if firstTime: - # Add in the sorted wxPhrases - newPhraseList += wxPhrases - firstTime = 0 - continue - # If not a wx phrase, append to the list - newPhraseList.append(phrase) - component.set("childList", newPhraseList) - - def sortPhraseTimeSpans(self, phrase1, phrase2): - # Determine which phrase should come first according - # to it's time span of non-empty sub-phrases - # First, determine the timeSpan of each phrase: - defaultTR = TimeRange.default() - for phrase in [phrase1, phrase2]: - # If time span already calculated, skip it - timeSpan = phrase.get("timeSpan") - if timeSpan is not None: - continue - startTime = None - endTime = None - for subPhrase in phrase.get("childList"): - if subPhrase.get("words") != "": - tr = subPhrase.getTimeRange() - trStart = tr.startTime() - trEnd = tr.endTime() - if startTime is None: - startTime = trStart - elif startTime > trStart: - startTime = trStart - if endTime is None: - endTime = trEnd - elif endTime < trEnd: - endTime = trEnd - if startTime is not None and endTime is not None: - # Make a time span for this phrase's words - phraseTR = TimeRange.TimeRange(startTime, endTime) - phrase.set("timeSpan", phraseTR) - else: - phrase.set("timeSpan", defaultTR) - # Order the phrases according to their time spans. - timeSpan1 = phrase1.get("timeSpan") - timeSpan2 = phrase2.get("timeSpan") - if timeSpan1 == defaultTR or timeSpan2 == defaultTR: - return 0 - #print "\ntimeSpan1, timeSpan2", timeSpan2, timeSpan2 - #print " order", self.orderTimeRanges(timeSpan2, timeSpan2) - return self.orderTimeRanges(timeSpan1, timeSpan2) - - def orderTimeRanges(self, tr1, tr2): - # If tr1 should come before tr2, return -1 - # If equal, return 0, else return 1 - #print "\nin orderTimeRanges", tr1, tr2 - s1 = tr1.startTime() - s2 = tr2.startTime() - if s1 < s2: - #print "return1 -1" - return -1 - elif s2 < s1: - #print "return2 1" - return 1 - else: - # They start at the same time - e1 = tr1.endTime() - e2 = tr2.endTime() - # Put the one with the shortest - # span first - if e1 < e2: - #print "return3 -1" - return -1 - elif e1 > e2: - #print "return4 1" - return 1 - else: - #print "return5 0" - return 0 - - def consolidateSubPhrases(self, tree, component): - ## Timing: This method runs at the component level - ## AFTER all sub-phrase words have been set and - ## BEFORE they have been assembled into phrases at the phrase level. - ## - ## Purpose: Check for duplicate subPhrases and consolidate - # them into one. - ## - ## For example: (see Case 2 below) - ## Chance of thunderstorms in the morning (windward) - ## Chance of thunderstorms in the morning (leeward) - ## Chance of rain in the afternoon (windward) - ## Chance of snow in the afternoon (leeward) - ## - ## becomes: - ## Chance of thunderstorms in the morning (unqualified) - ## Chance of rain in the afternoon (windward) - ## Chance of snow in the afternoon (leeward) - - # Set a flag to make sure we pass by this method the first time - # so that the phrase set-up methods have a chance to run and - # create sub-phrases before we try to consolidate them - if component.get('first') is None: - component.set('first', 1) - return - - # Make sure all subPhrases have completed i.e. have words set - subPhraseList = [] - leaves = self.getLeaves(tree, component) - leFlag = 0 - for child in leaves: - words = child.get("words") - #print "Consolidate SubPhrases", child.getAncestor("name"), words - if words is None: - #print "Returning" - return - le = child.getAncestor('localEffect') - if le is not None: - leFlag = 1 - subPhraseList.append(child) - - # If no localEffects, skip this method - if not leFlag: - #print "In Consolidate SubPhrases: No local effects" - return self.DONE() - - if self.__dict__.get("_leDebug", 0): - print "\nConsolidateSubPhrases", tree.get('passes') - - # Create subPhraseDict = - # {(words, tr, lePhraseNameGroup): - # list of subPhrases with those words, tr, and lePhraseNameGroup} - lePhraseNameGroups = self.lePhraseNameGroups(tree, component) - subPhraseDict = {} - for subPhrase in subPhraseList: - tr = subPhrase.getTimeRange() - words = subPhrase.get("words") - lePhraseNameGroup, firstName = self.getLePhraseNameGroup( - tree, component, lePhraseNameGroups, subPhrase.parent) - if words == "": - continue - if self.__dict__.get("_leDebug", 0): - print subPhrase.getAncestor("name")#, subPhrase.parent - print " ", subPhrase.getAreaLabel(), tr, words - print " local effect", subPhrase.getAncestor('localEffect') - self.addToDictionary(subPhraseDict, (words,tr,lePhraseNameGroup), subPhrase) - if self.__dict__.get('_leDebug', 0): print "subPhraseDict", subPhraseDict - - # Check for duplicate subPhrases and consolidate them into one. - # Case 1: If the duplicates are all for the same areaLabel, - # set the areaLabel for the consolidated subPhrase to that. - # Case 2: If the duplicates are for a local effect and - # cover all possible local effect areas for their phrase, - # create a new phrase for component.getAreaLabel() - # with this subPhrase wording. Remove the local effect subPhrases. - # Case 3: If the duplicates are for a local effect - # and they cover a subset of the local effect areas, - # leave them alone except for removing any component.getAreaLabel() - # duplicate subPhrases. - compArea = component.getAreaLabel() - if self.__dict__.get('_leDebug',0): - print "\nDetermine Case for each set of duplicate phrases. compArea", compArea - for key in subPhraseDict.keys(): - words, tr, lePhraseNameGroup = key - subPhrases = subPhraseDict[key] - if len(subPhrases) <= 1: - continue - # We have duplicate subPhrases to consolidate. - # Gather the areaLabels for these duplicate subphrases - # and the possible localEffect Area labels - areaLabels, leAreas = self.gatherDupAreaLabels( - tree, component, compArea, subPhrases) - if self.__dict__.get('_leDebug',0): - print "\n", words - print " ", tr, len(subPhrases) - print "areaLabels, leAreas", areaLabels, leAreas - # Determine the consolidated areaLabel - if len(areaLabels) == 1: - # Case 1 - if self.__dict__.get('_leDebug',0): print "CASE 1" - # Remove all but the first subPhrase - for subPhrase in subPhrases[1:]: - subPhrase.set('words', "") - else: - parent = subPhrases[0].parent - localEffect = subPhrases[0].getAncestor('localEffect') - if localEffect is None: - continue - # See if all local effect areas are covered - allAreasCovered = self.allLeAreasCovered( - tree, component, compArea, leAreas, areaLabels) - if allAreasCovered: - # Case 2: Consolidate - if self.__dict__.get('_leDebug',0): print "CASE 2" - parent = subPhrases[0].parent - newNode = tree.copyPhrase( - parent, areaLabel=compArea, - copyAttrs=["doneList", "disabledSubkeys", "disabledElements", - "firstElement", "elementName", "elementInfoList", - "descriptor", "indentLabel"]) - component.insertChild(parent, newNode) - newSubPhrase = subPhrases[0].copy() - newNode.set('childList', [newSubPhrase]) - for subPhrase in subPhrases: - subPhrase.set('words', "") - else: - # Case 3: Throw out any compArea subPhrase and - # leave local effect ones alone for now - if self.__dict__.get('_leDebug',0): print "CASE 3" - for subPhrase in subPhrases: - if subPhrase.getAreaLabel() == compArea: - subPhrase.set("words", "") - return self.DONE() - - def gatherDupAreaLabels(self, tree, component, compArea, subPhrases): - areaLabels = [] - leAreas = [] - for subPhrase in subPhrases: - subArea = subPhrase.getAreaLabel() - if subArea not in areaLabels: - areaLabels.append(subArea) - #print "subArea", subArea, subPhrase.getAncestor('name') - if subArea != compArea: - localEffect = subPhrase.getAncestor("localEffect") - if localEffect is not None: - leAreas += self.getLeAreaList(tree, subPhrase, localEffect) - return areaLabels, leAreas - - def allLeAreasCovered(self, tree, component, compArea, leAreas, areaLabels): - allAreasCovered = 1 - if leAreas != []: - # Determine if the subPhrases cover all possible local effect areas - for leArea in leAreas: - if leArea.intersectFlag: - areaName = self.getIntersectName( - compArea, leArea.areaLabel) - elif leArea.areaLabel == "__Current__": - areaName = compArea - else: - areaName = leArea.areaLabel - #print "le Area Name", areaName - if areaName not in areaLabels: - allAreasCovered = 0 - break - return allAreasCovered - - def consolidateSubPhrases_trigger(self, tree, node): - # Checking to see if consolidateSubPhrases has been - # completed (if it is on the component methodList) - # This assumes that "node" is a phrase and its parent is - # a component. - parent = node.parent - if self.consolidateSubPhrases in parent.methodList and \ - self.consolidateSubPhrases not in parent.doneList: - return 0 - return 1 - - - def consolidateLocalEffectPhrases(self, tree, node): - # Organize the local effect and non-local effect phrases. - # "node" can be a component or a compound phrase. - # Convert to embedded local effect phrases if appropriate. - # Apply the Local Effect thresholds: - # repeatingEmbedded_localEffect_threshold - # repeatingPhrase_localEffect_threshold - hasLE = 0 - for phrase in node.get('childList'): - le = phrase.get('localEffect') - if le is not None: - hasLE = 1 - break - if not hasLE: - # No local effect phrases so no work to to be done - return - if not self.incorporateNonLocalEffectPhrases(tree, node): - self.convertToEmbedded(tree, node) - self.orderLocalEffectPhrases(tree, node) - - # Add later as an enhancement - def incorporateNonLocalEffectPhrases(self, tree, node): - # Try to incorporate non-qualified phrases - # If there is exactly one leArea group in the set of phrases - # AND this group is composed of intersect areas - # AND there is more than one local effect phrase - # AND the number of non-local effect phrases - # < repeatingPhrase_localEffect_threshold: - # Convert them to conjunctive local effect phrases - # (one for each intersect local effect area) - # return 1 - # Else: - # return 0 - - # EXAMPLE: - # Instead of: - # Chance of thunderstorms in the morning. - # Windward...Cloudy...Rain likely...Chance of precipitation 70 percent. - # Leeward...Partly cloudy...Scattered showers...Chance of precipitation 30 - # percent. Highs in the 40s. Winds 20 mph. - # - # We will produce: - # Windward...Cloudy....Rain likely...Chance of thunderstorms in the morning... - # Chance of precipitation 70 percent. - # Leeward...Partly cloudy...Scattered showers...Chance of thunderstorms in - # the morning...Chance of precipitation 30 percent. Highs in the 40s. - # Winds 20 mph. - return 0 - - # Replaces combineConjunctiveLocalEffects - def convertToEmbedded(self, tree, component): - # - # Converts conjunctive local effects to embedded if possible. - # For each leGroup: - # If number of possible embedded phrases - # < repeatingEmbedded_localEffect_threshold - # AND there are NO mandatory conjunctives: - # Replace conjunctive phrases with an embedded phrase. - # - if self.__dict__.get('_leDebug',0): print "\nConvert to embedded" - lePhraseNameGroups = self.lePhraseNameGroups(tree, component) - lePhraseDict = self.createLePhraseDict(tree, component, lePhraseNameGroups) - if self.__dict__.get('_leDebug',0): print "\nlePhraseDict", lePhraseDict - - repeatThreshold = self.repeatingEmbedded_localEffect_threshold( - tree, component) - qualifiersDict = self.createQualifiersDict( - tree, component, lePhraseDict, repeatThreshold) - if self.__dict__.get('_leDebug',0): print "\nqualifiersDict", qualifiersDict - - self.createEmbeddedPhrases( - tree, component, lePhraseDict, qualifiersDict, repeatThreshold) - if self.__dict__.get('_leDebug',0):print "\nlePhraseDict", lePhraseDict - - self.insertEmbeddedPhrases( - tree, component, lePhraseDict, lePhraseNameGroups) - - def createLePhraseDict(self, tree, component, lePhraseNameGroups): - # Organize phrases in the component by lePhraseNameGroups. lePhraseDict: - # lePhraseNameGroup: { - # qualifiers: e.g. ["leeward", "windward"] - # phrases: [phrases] - # firstElementName: firstElement.name - # } - lePhraseDict = {} - for phrase in component.get("childList"): - if self.__dict__.get('_leDebug',0): - print "phrase", phrase.get('name'), phrase.get('words') - print " ", phrase.getAreaLabel() - print " ", phrase.get('conjunctiveQualifier') - print " ", phrase.get('embeddedQualifier') - localEffect = phrase.get('localEffect') - if localEffect is None: - continue - lePhraseNameGroup, firstName = self.getLePhraseNameGroup( - tree, component, lePhraseNameGroups, phrase) - qualifier = phrase.get('embeddedQualifier') - # Add the entry to the dictionary - if lePhraseDict.has_key(lePhraseNameGroup): - entry = lePhraseDict[lePhraseNameGroup] - if qualifier not in entry["qualifiers"]: - entry["qualifiers"].append(qualifier) - entry["phrases"].append(phrase) - else: - lePhraseDict[lePhraseNameGroup] = { - "qualifiers": [qualifier], - "phrases": [phrase], - "firstElementName": firstName, - } - return lePhraseDict - - def createQualifiersDict(self, tree, component, lePhraseDict, repeatThreshold): - # Find out how many potential embedded phrases there are for each - # unique set of qualifiers. - # Create a qualifiersDict: qualifiers: count - # for lePhraseNameGroup in lePhraseDict: - # Can it be embedded i.e. are there NO mandatory conjunctives? - # If so, flag it as such and increase count for it's qualifier set. - # - qualifiersDict = {} - for lePhraseNameGroup in lePhraseDict.keys(): - embedded = 1 - nameDict = lePhraseDict[lePhraseNameGroup] - phrases = nameDict["phrases"] - # For the phraseNameGroup to be embedded, - # all phrases in the phrase name group must qualify - # to be embedded i.e.the phrase must have just one subphrase - # which is non-empty and covers the phrase time range. - # - # Also, create areaCountDict to keep track of number of - # potentially embedded phrases per areaLabel: - # areaLabel:count - # Unless the phraseNameGroup has multiple phrases (e.g. sky, pop, wx), - # the count for each areaLabel will be 1. - # If the number of embedded phrases for any area exceeds the - # repeatThreshold, do not do an embedded phrase. - # - areaCountDict = {} - for phrase in phrases: - subPhrases = phrase.get('childList') - if len(subPhrases) != 1: - embedded = 0 - break - subPhrase = subPhrases[0] - if subPhrase.get("words") == "" or \ - subPhrase.getTimeRange() != phrase.getTimeRange(): - embedded = 0 - break - # Keep track of count for each area - areaLabel = phrase.getAreaLabel() - if areaCountDict.has_key(areaLabel): - areaCountDict[areaLabel] += 1 - else: - areaCountDict[areaLabel] = 1 - if areaCountDict[areaLabel] > repeatThreshold: - if self.__dict__.get('_leDebug',0): - print "areaCount exceeded", areaLabel, areaCountDict[areaLabel] - embedded = 0 - break - if embedded: - qualifiers = nameDict["qualifiers"] - # Sort, removeDups and re-store qualifiers as a tuple - # We convert the qualifiers from a list to a tuple - # so that the qualifiers can be dictionary keys in qualifiersDict - qualifiers.sort() - qualifiers = self.removeDups(qualifiers) - tQualifiers = tuple(qualifiers) - nameDict["qualifiers"] = tQualifiers - if qualifiersDict.has_key(tQualifiers): - qualifiersDict[tQualifiers] += 1 - else: - qualifiersDict[tQualifiers] = 1 - nameDict["embedded"] = embedded - return qualifiersDict - - def createEmbeddedPhrases(self, tree, component, lePhraseDict, qualifiersDict, - repeatThreshold): - # Convert to embedded if repeatingEmbedded_localEffect_threshold is not exceeded. - # for lePhraseNameGroup in lePhraseDict: - # If it's leGroup count < repeatingEmbedded_localEffect_threshold: - # convert to embedded and add to lePhraseNameGroup entry - # - for lePhraseNameGroup in lePhraseDict: - nameDict = lePhraseDict[lePhraseNameGroup] - if not nameDict["embedded"]: - continue - qualifiers = nameDict["qualifiers"] - count = qualifiersDict[qualifiers] - if count > repeatThreshold: - nameDict["embedded"] = 0 - continue - # Create an embedded phrase - phrases = nameDict["phrases"] - nameDict["embeddedPhrase"] = self.makeEmbeddedFromConjunctiveLE( - tree, component, phrases) - - def insertEmbeddedPhrases(self, tree, component, lePhraseDict, lePhraseNameGroups): - # Insert the embedded phrases at the proper places in the component - # phraseList and remove the associated conjunctive phrases. - # Embedded phrases are inserted at the site of the first - # associated conjunctive phrase. - # - newPhraseList = [] - # doneList keeps track of those lePhraseNameGroups for which we've already - # inserted the embedded phrase - doneList = [] - if self.__dict__.get('_leDebug',0):print "\nStep" - for phrase in component.get("childList"): - if self.__dict__.get('_leDebug',0):print "phrase", phrase.get('words') - localEffect = phrase.get('localEffect') - if localEffect is None: - newPhraseList.append(phrase) - continue - # Determine lePhraseNameGroup for this phrase - lePhraseNameGroup, firstName = self.getLePhraseNameGroup( - tree, component, lePhraseNameGroups, phrase) - if self.__dict__.get('_leDebug',0): - print " lePhraseNameGroup", lePhraseNameGroup - nameDict = lePhraseDict[lePhraseNameGroup] - if not nameDict["embedded"]: - newPhraseList.append(phrase) - continue - if lePhraseNameGroup not in doneList: - # insert the embedded phrase - newPhraseList.append(nameDict["embeddedPhrase"]) - doneList.append(lePhraseNameGroup) - component.set("childList", newPhraseList) - - def getLePhraseNameGroup(self, tree, node, lePhraseNameGroups, phrase): - # Unless the group is an explicitly defined by lePhraseNameGroups, - # the name returned will be the . - # Make sure all the phrases for each lePhraseNameGroup have the - # same firstElement UNLESS the lePhraseNameGroup is listed explicitly - # in self.lePhraseNameGroups. - # If not, make a separate dictionary entry name for each firstElement. - # For example: In the FWF, the"dayOrNight_phrase" may have MaxT for some phrases - # and MinRH for others. We want to keep them separate when converting - # to embedded local effect phrases. - explicitGroup = 0 - phraseName = phrase.get('name') - for group in lePhraseNameGroups: - if phraseName in group: - lePhraseNameGroup = group - explicitGroup = 1 - # Check the firstElement - firstElement = phrase.get('firstElement') - if firstElement is None: - firstName = "None" - else: - firstName = firstElement.name - if not explicitGroup: - lePhraseNameGroup = phraseName + "_" + firstName - return lePhraseNameGroup, firstName - - def orderLocalEffectPhrases(self, tree, node): - # - # Group all conjunctive local effect phrases - # for each local effect area together - # (at the location of the first occurrence). - # - # EXAMPLE: - # LEEWARD...SUNNY IN THE MORNING THEN BECOMING PARTLY SUNNY...SCATTERED SHOWERS. - # WINDWARD...MOSTLY CLOUDY WITH SCATTERED SHOWERS - # - # instead of: - # LEEWARD...SUNNY IN THE MORNING THEN BECOMING PARTLY SUNNY. - # WINDWARD...MOSTLY CLOUDY WITH SCATTERED SHOWERS. - # LEEWARD...SCATTERED SHOWERS. - # - phraseList = node.get("childList") - newList = [] - doneAreas = [] - #print "\nOrder LE phrases" - for phrase in phraseList: - localEffect = phrase.get("localEffect") - areaLabel = phrase.getAreaLabel() - embedded = phrase.get("embedded") - #print "PHRASE", phrase.get('words') - if localEffect is None or embedded: - newList.append(phrase) - else: - #print " phrase", phrase.get("name") - #print " conjqualifier", phrase.get("conjunctiveQualifier") - #print " area", areaLabel - if areaLabel in doneAreas: - # We already added this phrase to the newList - # as a Local Effect area - continue - newList.append(phrase) - # Gather the other phrases for this local effect area. - index = phraseList.index(phrase) - for p in phraseList[index+1:]: - p_localEffect = p.get("localEffect") - p_area = p.getAreaLabel() - if p_localEffect is not None and p_area == areaLabel: - newList.append(p) - doneAreas.append(areaLabel) - node.set("childList", newList) - - def combineConjunctivePhrases(self, tree, component): - # Check for Conjunctive Local Effects and make sure - # we do not repeat the indented label. - # - # For example: - # LAL.................IN THE VALLEYS ...1. - # LAL.................IN THE MOUNTAINS...3 UNTIL 2400, THEN 1. - # - # Should be: - # LAL.................IN THE VALLEYS ...1. - # IN THE MOUNTAINS...3 UNTIL 2400, THEN 1. - # - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - newChildList = [] - lastName = "" - lastElement = "" - lastPhrase = None - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - curName = phrase.get("name") - curElement = phrase.get("elementName") - if lastPhrase is None: - lastPhrase = phrase - lastName = curName - else: - # Look for a local effect phrase to be combined with - # lastPhrase - localEffect = phrase.get("localEffect") - embedded = phrase.get('embedded') - #print "phrase", curName, lastName, localEffect - if localEffect is not None and curName == lastName and \ - embedded != 1 and curElement == lastElement: - # Combine this phrase words into last one - # Add conjunctive qualifier - #print "combining" - phraseWords = phrase.get("words") - qualifier = phrase.get("conjunctiveQualifier") - if qualifier is not None and qualifier != "": - phraseWords = qualifier + " "+ phraseWords - newWords = lastPhrase.get("words") + "." + \ - phraseWords - lastPhrase.set("words", newWords) - else: - # Add phrase to new list - #print "switching" - newChildList.append(lastPhrase) - lastPhrase = phrase - lastName = curName - lastElement = curElement - # Clean up lastPhrase - if lastPhrase is not None: - newChildList.append(lastPhrase) - component.set("childList", newChildList) - - - def makeEmbeddedFromConjunctiveLE(self, tree, component, conjList): - # Make an embedded phrase from the list of conjunctive phrases - # - conjWords = "" - embeddedPhrase = tree.copyPhrase( - conjList[0], areaLabel = component.getAreaLabel(), - copyAttrs=["doneList", "descriptor", "indentLabel", - "embeddedDescriptor", "localEffect"]) - descriptor = self.addSpace(embeddedPhrase.get("descriptor")) - if descriptor == "": - descriptor = self.addSpace(embeddedPhrase.get("embeddedDescriptor")) - #print "\nIn makeEmbeddedFromConjunctiveLE", descriptor - index = 0 - localEffect = embeddedPhrase.get("localEffect") - fcst = "" - # Gather words - for phrase in conjList: - words = phrase.get("words") - if words == "": - continue - if not index == 0: - # Get connector - connector = localEffect.exceptionWords - fcst = fcst + connector - # Get rid of duplicate descriptor - if descriptor != "": - words = words.replace(descriptor, "") - # Local Effect Descriptor - areaWords = phrase.getAncestor("embeddedQualifier") - if areaWords is None: - areaWords = "" - areaWords = self.addSpace(areaWords, "leading") - fcst = fcst + words + areaWords - index = index + 1 - embeddedPhrase.set("words", fcst) - embeddedPhrase.set('embedded', 1) - self.postProcessPhrase(tree, embeddedPhrase) - #print "embedded words", embeddedPhrase.get('words') - return embeddedPhrase - - def qualifyWords(self, node, words, qualifierName, lastQualifier, - lastPhrase, makeSentence=1): - # Qualifies words with local effect qualifiers - # Also, if makeSentence==1, makes the words into a sentence - # when appropriate. - # Returns the modified words and the qualifier (if any) - # - # Logic: - # If empty words, skip. - # If no qualifier: - # if makeSentence: - # makeSentence and return words and lastQualifier - # If there is a qualifier: - # Handle a new qualifier. - # If qualifier is new and non-empty: - # Add the qualifier and ellipses to beginning of words - # Handle a continuation: If the next phrase will be qualified - # with the same qualifier, - # Add ellipses to the end of the words. In this case, - # we will not add a period to the end of the words - # when making a sentence. - # if makeSentence, make the words into a sentence with or without - # a period at the end. - # return words and qualifier - # - qualifier = node.get(qualifierName) - #print "\nQualify words: qualifier, lastQualifier, words", qualifier, lastQualifier, words - if words == "": - return words, lastQualifier - addPeriod = 1 - if qualifier is not None: - if qualifier != lastQualifier and qualifier != "": - words = qualifier + ", " + words - next = self.getNext_nonEmpty(node, "words") - if next is not None: - nextQualifier = next.get(qualifierName) - #print "nextQualifier, qualifier", nextQualifier, "X", qualifier, "X", words - if nextQualifier == qualifier: - addPeriod = 0 - words = words + ", " - if makeSentence: - words = self.sentence(words, addPeriod) - #print "returning", words - return words, qualifier - - def getNext_nonEmpty(self, node, attrName): - next = node.getNext() - while 1: - if next is None: - break - val = next.get(attrName) - if val is not None and val != "": - break - next = next.getNext() - return next - - def namesEqual(self, name1, name2): - weatherPhrases = ["skyPopWx_phrase", "weather_phrase"] - if name1 == name2 or \ - (name1 in weatherPhrases and name2 in weatherPhrases): - namesEqual = 1 - else: - namesEqual = 0 - return namesEqual - - def wordWrap(self, tree, component): - # Wrap the component.words() - compWords = component.get("words") - if compWords is None: - return - compWords = self.endline(compWords, tree.get("lineLength")) - return self.setWords(component, compWords) - - def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): - # Make a label given the timeRange in GMT and the shift to - # convert it to local time. currentLocalTime can be used to - # compare to current day. - - # NOTE: If you make changes to this method, change the SAF_Overrides - # file as it is overridden there. - - if timeRange.duration() <= 3600: - return "" - if index == 0: - try: - label = issuanceInfo.period1Label() - if label != "": - return label - except: - pass - try: - today = issuanceInfo.todayFlag() - except: - today = 1 - try: - useHolidays = self._useHolidays - except: - useHolidays = 1 - nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) - splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) - label = self.getWeekday(timeRange, holidays=useHolidays, shiftToLocal=1, - labelType="CapitalWithPeriod", today=today, - tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, - splitDay24HourLabel=splitDay24HourLabel) - return label - - - # Ordering phrases - def orderPhrases(self, tree, component): - # Reorder highs and lows based on start period - - reorderList = [] - timeRange=component.getTimeRange() - areaLabel= component.getAreaLabel() - - if timeRange.duration() >= 24 * 3600: - startTR = TimeRange.TimeRange(timeRange.startTime(), - timeRange.startTime() + (12 * 3600)) - dayNight = self.getPeriod(startTR, 1) - if dayNight == self.NIGHTTIME(): - reorderList.append(("lows_phrase", "highs_phrase")) - reorderList.append(("lows_range_phrase", "highs_range_phrase")) - - for phrase1, phrase2 in reorderList: - self.moveAbove(tree, component, phrase1, phrase2) - - return self.DONE() - - def moveAbove(self, tree, component, phrase1, phrase2): - # Move the phrase phrase1 above phrase2 in the - # component list - - # Find Phrase to move - savedPhrase = "" - for phrase in component.childList: - name = phrase.get("name") - if name == phrase1: - savedPhrase = phrase - break - # Create new phrase list inserting savedPhrase in - # the new location - newPhraseList = [] - for phrase in component.childList: - name = phrase.get("name") - if name == phrase1: - continue - if name == phrase2 and savedPhrase != "": - newPhraseList.append(savedPhrase) - newPhraseList.append(phrase) - component.set("childList", newPhraseList) - - return self.DONE() - - # Phrase Level - def subPhraseSetUp(self, tree, phrase, elementInfoList, connectorMethod, resolution=None): - # Set up subPhrase nodes and "statDict" statistics for multiple elements. - # The temporal resolution of the first element determines the number of subPhrases created. - # If the elementInfoList is empty, one empty subPhrase is created with an empty "statDict". - # If there is noData for the first element, one empty subPhrase is created. - # - # Sets up the following attributes: - # Phrase Level - # descriptor -- based on phrase_descriptor for the first element - # connectorMethod -- based on setUp arguments - # elementInfoList -- adds "outUnits" to each elementInfo in list - # firstElement -- elementInfo for first in elementInfoList - # elementName -- elementName for first in elementInfoList - # - # SubPhrase Level - # elementName -- elementName for first in elementInfoList - # timeRange - # statDict -- entries for all elements in elementInfoList - # - timeRange = phrase.getTimeRange() - areaLabel = phrase.getAreaLabel() - statDictList = [] - - if len(elementInfoList) < 1: - # Make phrase with one empty subphrase - self.makeEmptySubPhrase(tree, phrase, None) - return self.DONE() - - # Make sub ranges based on first element - first = elementInfoList[0] - elementName = first.name - first.outUnits = self.element_outUnits(tree, phrase, first.name, first.name) - - # Check to see if the timeRange is great enough to collapse sub-phrases - # automatically - hours = self.collapseSubPhrase_hours(tree, phrase, elementName, elementName) - if timeRange.duration() > hours * 3600: - first.mergeMethod = self.mergeMethod(tree, phrase, elementName, elementName) - - #print "Getting first", first.name, timeRange, areaLabel - # Check to see if we are requesting a particular time resolution - if resolution is not None: - # Create sub-ranges with this resolution and provide list - # of stats for each time range - subRanges = self.divideRange(timeRange, resolution) - stats = [] - for subRange in subRanges: - subStats = tree.stats.get(first.name, subRange, areaLabel, - first.statLabel, first.mergeMethod) - stats.append((subStats, subRange)) - else: - stats = tree.stats.get(first.name, timeRange, areaLabel, - first.statLabel, first.mergeMethod) - #print "stats", stats - statsByRange = self.makeRangeStats(tree, first.dataType, stats, timeRange) - #print "statsByRange", first.name, statsByRange - - # Case of no data for first element - if statsByRange is None: - self.makeEmptySubPhrase(tree, phrase, first) - return self.setWords(phrase, "") - - phrase.set("emptyPhrase", 0) - # Set up descriptor and connector - if phrase.get("descriptor") is None: - descriptor = self.phrase_descriptor(tree, phrase, first.name, first.name) - phrase.set("descriptor", descriptor) - phrase.set("connectorMethod", connectorMethod) - - # Create sub phrases based on first element - # Create subPhrase List of (statDict, subRange) pairs - # This list will be added to by each element - subPhraseList = [] - for stats, subRange in statsByRange: - #print "stats going into statDict", stats - subPhraseList.append(({first.name:stats}, subRange)) - - for subPhrase in subPhraseList: - # Add each additional element to the sub range statDict - statDict, subRange = subPhrase - for elementInfo in elementInfoList[1:]: - name = elementInfo.name - elementInfo.outUnits = self.element_outUnits(tree, subPhrase, name, name) - #print "Getting sub stats", elementInfo - stats = tree.stats.get( elementInfo.name, subRange, areaLabel, - elementInfo.statLabel, elementInfo.mergeMethod) - # Add to subPhrase statDict for each subPhrase - statDict[name] = stats - - # Make SubPhrase children - subPhraseMethods = phrase.get("subPhraseMethods") - childList = [] - #print "subPhraseList", subPhraseList - for statDict, subRange in subPhraseList: - subPhrase = tree.makeNode([], subPhraseMethods, phrase) - #print "statDict", statDict - subPhrase.set("statDict", statDict) - subPhrase.set("timeRange", subRange) - subPhrase.set("changeFlag", 0) - subPhrase.set("elementName", elementName) - childList.append(subPhrase) - - #print "Setting childList" - phrase.set("firstElement", first) - phrase.set("elementInfoList", elementInfoList) - phrase.set("elementName", elementName) - phrase.set("childList", childList) - if childList == []: - self.setWords(phrase, "") - - #print "AFTER SET-UP" - #if elementName == "Wx": - # print phrase.printNode(phrase) - return self.DONE() - - def makeEmptySubPhrase(self, tree, phrase, firstElement): - phrase.set("emptyPhrase", 1) - phrase.set("firstElement", firstElement) - phrase.set("connectorMethod", None) - subPhraseMethods = phrase.get("subPhraseMethods") - subPhrase = tree.makeNode([], subPhraseMethods, phrase) - if firstElement is None: - phrase.set("elementInfoList", []) - phrase.set("elementName", None) - subPhrase.set("elementName", None) - subPhrase.set("statDict", {}) - else: - phrase.set("elementInfoList", [firstElement]) - phrase.set("elementName", firstElement.name) - subPhrase.set("elementName", firstElement.name) - subPhrase.set("statDict", {firstElement.name:None}) - subPhrase.set("timeRange", phrase.getTimeRange()) - subPhrase.set("changeFlag", 0) - phrase.set("childList", [subPhrase]) - - - ### Checking for differences between sub-phrases - def checkForDifferences(self, tree, node, elementInfo, magOnly=0, dirOnly=0): - # Return 1 if there are differences among the subPhrase values - # for the given element. - # If VECTOR and magOnly==1, only the magnitude is checked. - # If VECTOR and dirOnly==1, only the direction is checked. - # If no data, return 1 as well - elementName = elementInfo.name - dataType = elementInfo.dataType - statList = self.getSubStats(node, elementName) - if len(statList) > 1: - # Check each subphrase against the first - # Return when a difference is found - if dataType == self.SCALAR(): - if statList[0] is None: - return 1 - value = self.getValue(statList[0], "MinMax") - min1, max1 = value - for statVal in statList[1:]: - if statVal is None: - return 1 - statVal = self.getValue(statVal, "MinMax") - min2, max2 = statVal - differenceFlag = self.checkScalarDifference( - tree, node, elementName, min1, max1, min2, max2) - if differenceFlag: - return 1 - return 0 - if dataType == self.VECTOR(): - if statList[0] is None: - return 1 - mag, dir1 = self.getValue(statList[0], "MinMax", self.VECTOR()) - min1, max1 = mag - for stats in statList[1:]: - if stats is None: - return 1 - statMag, dir2 = self.getValue(stats, "MinMax", self.VECTOR()) - min2, max2 = statMag - differenceFlag = self.checkVectorDifference( - tree, node, elementName, min1, max1, dir1, min2, max2, dir2, magOnly, dirOnly) - if differenceFlag: - return 1 - return 0 - if dataType == self.WEATHER() or dataType == self.DISCRETE(): - wx = statList[0] - for wxVal in statList[1:]: - if wxVal is None or wx is None: - return 1 - if wxVal != wx: - return 1 - return 0 - - def checkScalarDifference(self, tree, node, elementName, min1, max1, min2, max2): - # Return 1 if the min/max pairs show a difference - # First see if both are below null threshold - threshold = self.null_nlValue(tree, node, elementName, elementName) - threshold1 = self.nlValue(threshold, max1) - threshold2 = self.nlValue(threshold, max2) - if max1 < threshold1 and max2 < threshold2: - return 0 - # See if only one is below null threshold - if self.null_alwaysDistinct_flag(tree, node, elementName, elementName): - if max1 < threshold1 or max2 < threshold2: - return 1 - # If one set of min/max has only one value, - # and that value matches the min or - # max of the other set, show no difference. - if min1 == max1 and (min1==min2 or max1==max2): - return 0 - if min2 == max2 and (min1==min2 or max1==max2): - return 0 - # Compare mins and compare maxs - diff_nlValue = self.scalar_difference_nlValue(tree, node, elementName, elementName) - diff_min = self.nlValue(diff_nlValue, min(min1, min2)) - diff_max = self.nlValue(diff_nlValue, max(max1, max2)) - if abs(min1-min2) < diff_min and abs(max1-max2) < diff_max: - return 0 - return 1 - - def checkVectorDifference(self, tree, node, elementName, - min1, max1, dir1, min2, max2, dir2, magOnly=0, dirOnly=0): - # Return 1 if the min/max/dir pairs show a difference - #print "Checking", elementName, min1, max2, dir1, min2, max2, dir2, magOnly - if magOnly == 0 or dirOnly == 1: - # DR_18632 -# if self.direction_difference(dir1, dir2) >= self.vector_dir_difference( -# tree, node, elementName, elementName): -# return 1 - diff = self.direction_difference(dir1, dir2) - nlValue_dict = self.vector_dir_difference_nlValue( - tree, node, elementName, elementName) - threshold_min = self.nlValue(nlValue_dict, min(min1, min2)) - threshold_max = self.nlValue(nlValue_dict, max(max1, max2)) - if diff >= min(threshold_min, threshold_max): - return 1 - if dirOnly == 1: - return 0 - - # Check magnitude - # Compare mins and maxs - - # Add special check for marine wording: - # This will prevent: - # NORTHWEST GALES TO 35 KNOTS RISING TO GALES TO 35 KNOTS AFTER MIDNIGHT. - # And will facilitate: - # "N WINDS 30 KT IN THE MORNING INCREASING TO - # GALES TO 35 KT EARLY IN THE AFTERNOON, THEN - # EASING TO 30 KT LATE IN THE AFTERNOON." - if elementName == "Wind": - if self.marine_wind_combining_flag(tree, node): - if max1 > 30 or max2 > 30: - # Check for both within the same warning thresholds - warnThreshold1 = self.getWarnThreshold(max1) - warnThreshold2 = self.getWarnThreshold(max2) - if warnThreshold1 == warnThreshold2: - return 0 - else: - return 1 - - # First see if both are below null threshold - threshold = self.null_nlValue(tree, node, elementName, elementName) - threshold1 = self.nlValue(threshold, max1) - threshold2 = self.nlValue(threshold, max2) - if max1 < threshold1 and max2 < threshold2: - return 0 - # See if only one is below null threshold - if self.null_alwaysDistinct_flag(tree, node, elementName, elementName): - if max1 < threshold1 or max2 < threshold2: - return 1 - # If one set of min/max has only one value, - # and that value matches the min or - # max of the other set, show no difference. - if min1 == max1 and (min1==min2 or max1==max2): - return 0 - if min2 == max2 and (min1==min2 or max1==max2): - return 0 - # Check for magnitude differences - mag_nlValue = self.vector_mag_difference_nlValue( - tree, node, elementName, elementName) - mag_diff_min = self.nlValue(mag_nlValue, min(min1, min2)) - mag_diff_max = self.nlValue(mag_nlValue, max(max1, max2)) - if abs(min1-min2) >= mag_diff_min or abs(max1-max2) >= mag_diff_max: - return 1 - return 0 - - def getWarnThreshold(self, max): - if max >= 65: - return 3 - elif max > 45: - return 2 - elif max > 30: - return 1 - else: - return 0 - - def maskSubkeys(self, subkeyList, intensity=None): - # Make a new weather key masking the given intensity with the given value - if intensity is not None: - newkeyList = [] - for subkey in subkeyList: - newSubkey = WeatherSubKey.weatherSubKey(self._argDict['site'], subkey.coverage(), subkey.wxType(), intensity, - subkey.visibility(), subkey.attributes()) - newkeyList.append(newSubkey) - subkeyList = newkeyList - return subkeyList - - def checkWeatherSimilarity(self, tree, node, rankList1, rankList2, - node1=None, node2=None, tr1=None, tr2=None, - al1=None, al2=None): - ### FIXES BUG BY FORCING ATTRIBUTES CHECK WHEN CHECKING FOR SIMILAR WX - # Return 0 if the two sets of subkeys in the rankLists are significantly - # different - # If the keys can be considered similar: - # Return 1 if the first set of keys presides - # Return 2 if the second set of keys presides - # Return a new aggregated rankList if there are multiple subkeys in the - # rankLists AND they are similar. - # - # Optional nodes and time ranges may be supplied. These are used for - # accessing PoP stats. All are necessary since "similar_diurnal" does - # comparisons for various time ranges and local effects does comparisons - # for various areas. - - #print "\nCheckWxSimilarity" - - # The ranks are available, but not currently used - stats1 = self.getSubkeys(rankList1) - stats2 = self.getSubkeys(rankList2) - # Sort for comparison - stats1.sort(self.rankedSortOrder) - stats2.sort(self.rankedSortOrder) - - diff = [] - for element in stats1: - test = 1 - for el in stats2: - if str(element) == str(el): - test = 0 - if test and str(element) not in diff: - diff.append(str(element)) - for element in stats2: - test = 1 - for el in stats1: - if str(element) == str(el): - test = 0 - if test and str(element) not in diff: - diff.append(str(element)) - if len(diff) == 0: - return 1 - - if stats1 == stats2: - #print 'checkWx return 1' - return 1 - - # Check for equal length of statistics - if len(stats1) == len(stats2): - # If there is only one subkey to worry about - if len(stats1) == 1: - # If the types, intensities, and coverages are similar - if self.similarWxTypes(tree, node, stats1[0], stats2[0]): - if self.similarIntensities(tree, node, stats1[0], stats2[0]): - if self.similarAttributes(tree, node, stats1[0], stats2[0]): - flag = self.similarCoverages(tree, node, stats1[0], stats2[0]) - if flag > 0: - #print "returning flag", flag - return flag - else: - # Different wxTypes are not similar - #print "returning diff wxTypes 0" - return 0 - # Node can turn off this check. - # Some phrases (severeWeather_phrase, heavyPrecip_phrase, heavyRain_phrase) - # are checking intensities, so we don't want to loose them - if node.getAncestor("noIntensityCombining") != 1: - # Make new subkeys that all have the same intensity - stats1 = self.maskSubkeys(stats1, intensity="-") - stats2 = self.maskSubkeys(stats2, intensity="-") - if stats1 == stats2: - #print 'checkWx return 1' - return 1 - # Handle case of len(stats) > 1 - if len(stats1) > 1: - return self.checkSubkeysSimilarity( - tree, node, rankList1, rankList2, node1, node2, tr1, tr2, al1, al2) - - # Check the PoP. - # If low for both time periods and areas - # AND there is no non-precip Wx - # then we can assume the Wx is the same - if node1 is None: - node1 = node - if node2 is None: - node2 = node - if tr1 is None: - tr1 = node1.getTimeRange() - if tr2 is None: - tr2 = node2.getTimeRange() - if al1 is None: - al1 = node1.getAreaLabel() - if al2 is None: - al2 = node2.getAreaLabel() - popstats1 = self.matchToWx(tree, node1, "PoP", tr1, al1) - popstats2 = self.matchToWx(tree, node2, "PoP", tr2, al2) - #print "popstats", popstats1, popstats2 - if popstats1 < self.pop_wx_lower_threshold(tree, node1) and \ - popstats2 < self.pop_wx_lower_threshold(tree, node2): - for subkey in stats1: - if not self.pop_related_flag(tree, node1, subkey): - return 0 - for subkey in stats2: - if not self.pop_related_flag(tree, node2, subkey): - return 0 - return 1 - #print 'checkWx return 0' - return 0 - - def checkSubkeysSimilarity(self, tree, node, rankList1, rankList2, - node1, node2, tr1, tr2, al1, al2): - # Return 0 if the two sets of subkeys in the rankLists are significantly - # different - # Otherwise, return a new rankList of the combined subkeys and ranks - # sorted in rank order - # - # We combine if: - # The set of wxTypes in rankList1 is equal to the set of wxTypes in rankList2 AND - # Each wxType individually can be combined i.e. they have similar coverages - #print "\nCheckSubkeysSimilarity" - - # Sort ranklists by wxType - list1 = self.removeNoWx(rankList1) - list2 = self.removeNoWx(rankList2) - list1.sort(self.rankedWxTypeOrder) - list2.sort(self.rankedWxTypeOrder) - - #print "rankList1, rankList2", rankList1, rankList2 - - newRankList = [] - for i in range(len(list1)): - subkey1, rank1 = list1[i] - wxType1 = subkey1.wxType() - subkey2, rank2 = list2[i] - wxType2 = subkey2.wxType() - if not wxType1 == wxType2: - # We cannot combine - return 0 - # See of the wxTypes have similar coverages - similarFlag = self.checkWeatherSimilarity( - tree, node, [list1[i]],[list2[i]], node1, node2, tr1, tr2, al1, al2) - if similarFlag == 0: - return 0 - newRank = int((rank1 + rank2)/2.0) - newSubkey = self.makeAggregateSubkey(subkey1, rank1, subkey2, rank2) - newRankList.append((newSubkey, newRank)) -## if similarFlag == 1: -## newRankList.append((subkey1, newRank)) -## else: -## newRankList.append((subkey2, newRank)) - # Sort newRankList - newRankList.sort(self.rankedSortOrder) - #print "returning", newRankList - return newRankList - - def removeNoWx(self, rankList): - newList = [] - for subkey, rank in rankList: - if subkey.wxType() == "": - continue - newList.append((subkey, rank)) - return newList - - def similarWxTypes(self, tree, node, subkey1, subkey2): - # If wxTypes should be similar, return 1 - # else return 0 - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - inten1 = subkey1.intensity() - inten2 = subkey2.intensity() - # Take care of sprinkles and flurries - if wxType1 == wxType2 and wxType1 in ["RW", "SW"]: - if inten1 != inten2 and (inten1 == "--" or inten2 == "--"): - return 0 - if wxType1 == wxType2: - return 1 - return 0 - - def similarIntensities(self, tree, node, subkey1, subkey2): - intenList = ['', '-', 'm'] - # If intensities are close enough - inten1 = subkey1.intensity() - inten2 = subkey2.intensity() - if (inten1==inten2 or - (inten1 in intenList and inten2 in intenList)): - return 1 - return 0 - - def similarCoverages(self, tree, node, subkey1, subkey2): - # Return 0 if coverages of subkey1 and subkey2 are significantly - # different - # Return 1 if coverages are similar and the coverage of subkey1 - # is dominant. - # Return 2 if coverages are similar and the coverage of subkey2 - # is dominant. - cov1 = subkey1.coverage() - cov2 = subkey2.coverage() - for coverageList in self.similarCoverageLists(tree, node, subkey1, subkey2): - if (cov1 in coverageList and cov2 in coverageList): - index1 = coverageList.index(cov1) - index2 = coverageList.index(cov2) - if index1 >= index2: - #print 'checkWx return 1 - use subkey1' - return 1 - else: - #print 'checkWx return 2 - use subkey2' - return 2 - return 0 - - def similarAttributeLists(self): - # Lists weather attributes that can be combined or considered equal. - # These lists are examined when producing rankLists, - # combining sub-phrases, and - # determining if there is a local effect to report. - # Used by - # PhraseBuilder:checkWeatherSimilarity - # SampleAnalysis: getDominantValues - return [ - ["DmgW", "GW"], - ["LgA", "SmA"], - ] - - def similarAttributes(self, tree, node, subkey1, subkey2): - # If weather attributes are similar, return 1; otherwise return 0 - attrs1 = subkey1.attributes() - attrs2 = subkey2.attributes() - attrs1 = self.removeSpecialAttributes(attrs1) - attrs2 = self.removeSpecialAttributes(attrs2) - attrs1.sort() - attrs2.sort() - - # If the lists are equal, they are similar. - if attrs1 == attrs2: - return True - - # Otherwise, check that each attribute for subkey1 matches - # an attribute for subkey2 and vice versa - if self.matchAttrs(attrs1, attrs2) and self.matchAttrs(attrs2, attrs1): - return True - else: - return False - - def removeSpecialAttributes(self, attrs): - rv = [] - for attr in attrs: - if attr not in ["MX", "OR", "Mention", "Primary"]: - rv.append(attr) - return rv - - def matchAttrs(self, attrs1, attrs2): - for attr1 in attrs1: - if not self.checkAttrs(attr1, attrs2): - return False - return True - - def checkAttrs(self, attr1, attrs2): - # Check to see if there is a match for attr1 in attrs2 - # A "match" is equality OR there exists an attr2 in attrs2 - # such that both attr1 and attr2 are in one of the similarAttrsLists - # E.g., "GW" is attr1, "DmgW" is attr2 and attrList is ["GW","DmgW"] - if attr1 in attrs2: - return True - for attrList in self.similarAttributeLists(): - if attr1 in attrList: - for attr2 in attrs2: - if attr2 in attrList: - return True - return False - - - # Consolidation - def consolidatePhrase(self, tree, phrase): - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - # Separate out primary elements that are constant throughout phrase - elementInfoList = phrase.get("elementInfoList") - if elementInfoList is None or len(elementInfoList) <= 1: - return self.DONE() - subPhrases = phrase.get("childList") - if len(subPhrases) <= 1: - return self.DONE() - first = 1 - constants = [] - nonConstants = [] - constantFirst = 0 - disabled = phrase.get("disabledElements", []) - if disabled is None: - disabled = [] - for elementInfo in elementInfoList: - if elementInfo.name in disabled: - continue - if elementInfo.primary: - # Primary elements - diffFlag = self.checkForDifferences(tree, phrase, elementInfo) - #print "element", elementInfo.name, diffFlag - if diffFlag == 0: - constants.append(elementInfo) - # If first element is in constant list, - # make that the first phrase when split - # else, make it the second phrase - if first: - constantFirst = 1 - else: - nonConstants.append(elementInfo) - else: - # Secondary elements remain with first element - if constantFirst: - constants.append(elementInfo) - else: - nonConstants.append(elementInfo) - first = 0 - # Split off elements that are not in the same list as the first element - #print "Constants", constantFirst - #for eleInfo in constants: - # print eleInfo.name - #print "NonConstants" - #for eleInfo in nonConstants: - # print eleInfo.name - if constantFirst: - splitElements = nonConstants - curElements = constants - else: - splitElements = constants - curElements = nonConstants - length = len(splitElements) - if length > 0 and length < len(elementInfoList): - self.splitPhrase(tree, phrase, curElements, splitElements) - return self.DONE() - - def splitPhrase(self, tree, phrase, curElements, splitElements): - # For each element in splitElements e.g. Swell2 or WindGust: - # set the current phrase disabledElements - # (to turn them off for the original phrase) - # add a new phrase for the split element - # For each element left in the current phrase e.g. Swell or Wind, - # set the new phrase disabled elements - # (to turn them off in the new phrase) - # - disabledElements = [] - newDis = [] - for elementInfo in curElements: - newDis.append(elementInfo.name) - for elementInfo in splitElements: - disabledElements.append(elementInfo.name) - newPhrase = tree.addPhraseDef(phrase, elementInfo.phraseDef) - newPhrase.set("disabledElements", newDis) - currentNone = phrase.getAncestor("disabledElements") - if currentNone is not None: - phrase.set("disabledElements", disabledElements + currentNone) - else: - phrase.set("disabledElements", disabledElements) - for key in ["spawnedWxPhrases", "conjunctiveQualifier", - "embeddedQualifier", "localEffect", "localEffectsList", - "firstElement", "elementName", "elementInfoList"]: - #"descriptor", "indentLabel"]: - newPhrase.set(key, phrase.get(key)) - #print "\nSplitPhrase: New phrase", newPhrase, newDis - #print "Current phrase disabled", phrase, phrase.get("disabledElements") - - def consolidateDirection(self, tree, phrase): - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - # If vector direction is progressive and mags are similar, - - # use only first and last subPhrases - elementInfoList = phrase.get("elementInfoList") - if elementInfoList is None or len(elementInfoList) < 1: - return self.DONE() - subPhrases = phrase.get("childList") - if len(subPhrases) <= 1: - return self.DONE() - firstElement = phrase.get("firstElement") - diffFlag = self.checkForDifferences(tree, phrase, firstElement, magOnly=1) - if diffFlag == 0: - vectorStats = self.getSubStats(phrase, firstElement.name) - dirList = [] - for mag, dir in vectorStats: - dirList.append(dir) - progression = self.checkProgression(dirList) - if progression: - childList = phrase.get("childList") - new = [] - new.append(childList[0]) - new.append(childList[len(childList)-1]) - phrase.set("childList", new) - return self.DONE() - - def consolidateTrends(self, tree, phrase): - # See if we need to ignore this method - if self.ignoreTrends(tree, phrase): - return self.DONE() - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - # If there is a progression of magnitudes, - # use only first and last subPhrases with no time descriptor - elementInfoList = phrase.get("elementInfoList") - if elementInfoList is None or len(elementInfoList) < 1: - return self.DONE() - subPhrases = phrase.get("childList") - if len(subPhrases) <= 2: - return self.DONE() - firstElement = phrase.get("firstElement") - # If Vector, make sure directions are the same - diffFlag = 0 - dataType = firstElement.dataType - if dataType == self.VECTOR(): - diffFlag = self.checkForDifferences(tree, phrase, firstElement, dirOnly=1) - # Check for an increasing or decreasing magnitude progression - if diffFlag == 0: - statList = self.getSubStats(phrase, firstElement.name) - trend = self.checkTrend(statList, dataType) - # If trend, take first and last children only - if trend: - childList = phrase.get("childList") - new = [] - new.append(childList[0]) - new.append(childList[len(childList)-1]) - phrase.set("childList", new) - # Turn off time descriptors - phrase.set("noTimeDescriptors", 1) - return self.DONE() - - def ignoreTrends(self, tree, node): - if node.get('name') in ["windChill_phrase", "windBased_windChill_phrase", - "heatIndex_phrase", "apparentT_phrase"]: - return 1 - else: - return 0 - - def checkProgression(self, dirList): - # make a list of differences - diffList = [] - for i in range(1, len(dirList)): - diff = dirList[i] - dirList[i-1] # calc difference - # normalize the difference to remove the 359 -> 0 effect - if diff > 180: - diff = diff - 360 - elif diff < -180: - diff = diff + 360 - diffList.append(diff) - - minVal = min(diffList) - maxVal = max(diffList) - - # any diffs >= 90 not allowed - if maxVal >= 90 or minVal <= -90: - return 0 - - # see if all the diff are of the same sign, if not return 0 - if minVal * maxVal < 0: - return 0 - else: - return 1 - - def checkTrend(self, statList, dataType): - # check to see if increasing/decreasing values - lastMax = None - trend = None - for stats in statList: - if dataType == self.VECTOR(): - stats, dir = stats - min, max = self.getValue(stats, "MinMax") - if lastMax is None: - lastMax = max - elif trend is None: - trend = lastMax > max - lastMax = max - else: - # Test for an decreasing trend - if trend == 1 and lastMax > max: - lastMax = max - continue - # Test for an increasing trend - if trend == 0 and lastMax <= max: - lastMax = max - continue - return 0 - return 1 - - def chooseMostImportant(self, tree, phrase): - # If there is more than 1 sub-phrase AND mostImportant_dict - # is set, report only the "Min" or "Max" sub-phrase using - # the "mostImportant_descriptor" - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - elementInfoList = phrase.get("elementInfoList") - if elementInfoList is None or len(elementInfoList) < 1: - return self.DONE() - subPhrases = phrase.get("childList") - if len(subPhrases) <= 1: - return self.DONE() - elementName = phrase.get("elementName") - mostImportant = self.mostImportant(tree, phrase, elementName, elementName) - if mostImportant is None: - return self.DONE() - - # Find the index of the sub-phrase with the Min or Max value - firstElement = phrase.get("firstElement") - statList = self.getSubStats(phrase, elementName) - dataType = firstElement.dataType - for i in range(len(statList)): - if dataType == self.VECTOR(): - stats, dir = statList[i] - else: - stats = statList[i] - min, max = self.getValue(stats, "MinMax") - if i == 0: - if mostImportant == "Min": - importantVal = min - else: - importantVal = max - importantIndex = 0 - else: - if mostImportant == "Min": - if min < importantVal: - importantVal = min - importantIndex = i - else: - if max > importantVal: - importantVal = max - importantIndex = i - # Null out the other sub-phrases - for i in range(len(subPhrases)): - if i != importantIndex: - statDict = subPhrases[i].getStatDict() - statDict[elementName] = None - #print "subPhrase", subPhrases[i].getTimeRange() - # Set up the mostImportant_descriptor - descriptor = self.mostImportant_descriptor( - tree, phrase, elementName, elementName) - if descriptor is not None: - phrase.set("descriptor", descriptor) - return self.DONE() - - def getSubStats(self, phrase, elementName): - # Return a list of stats for the subPhrases - statList = [] - for subPhrase in phrase.childList: - statDict = subPhrase.getStatDict() - if statDict is None: - continue - statList.append(statDict[elementName]) - return statList - - def splitWxPhrase(self, tree, node, disabledSubkeys1, disabledSubkeys2, doneList, - newPhraseDef=None): - # Set disableSubkeys1 for original node - # Create a new node with disabledSubkeys2 - # Set new phrase doneList using "doneList" - # Add to new phrase according to newPhraseDef if provided, - # otherwise, duplicate current node - # Make sure to propagate "disabledSubkeys" and "spawnedWxPhrases" - # from the original node to the new node - #print "\nSplit Wx Phrase: original node", node.get("name"), node.getAreaLabel() - #print " ", node - #import traceback - #traceback.print_stack(limit=3) - #print " localEffect", node.get('localEffect') - #print " parent", node.parent - #tree.printNode(node) - disabled = node.getAncestor("disabledSubkeys") - if disabled is None: - disabled = [] - disabledSubkeys1 = disabledSubkeys1 + disabled - node.set("disabledSubkeys", disabledSubkeys1) - if newPhraseDef is None: - newPhrase = tree.addPhrase(node) - disabledSubkeys2 = disabledSubkeys2 + disabled - else: - newPhrase = tree.addPhraseDef(node, newPhraseDef) - newPhrase.set("disabledSubkeys", disabledSubkeys2) - newPhrase.set("doneList", doneList) - #print " disabled", node.get("disabledSubkeys") - #print "new node", newPhrase, newPhrase.get("name") - #print " parent", newPhrase.parent - for key in ["spawnedWxPhrases", "conjunctiveQualifier", - "embeddedQualifier", "localEffect", "localEffectsList", - "firstElement", "elementName", "elementInfoList"]: - #"descriptor", "indentLabel"]: - #print " setting ", key, node.get(key) - newPhrase.set(key, node.get(key)) - #print " ", newPhrase.getAreaLabel() - #print " disabled", newPhrase.get("disabledSubkeys") - #tree.printNode(newPhrase) - return newPhrase - - ## Combining - ## If you want to alter the combining criteria, - ## override starred (**) methods - ## - ## combinePhraseStats (phrase level) - ## combineWords (phrase level) - ## combineComponentStats (tree level) - ## combineWords (any level) - - ## combineChildren -- loops through child nodes - ## (tree, node, combineMethod) - - ## **combineScalars (subPhrase1, subPhrase2) - ## **combineVectors (subPhrase1, subPhrase2) - ## **combineWx (subPhrase1, subPhrase2) - ## **combineComponents (component1, component2) - - ## combine2SubPhrases (tree, phrase, subPhrase1, subPhrase2) - ## combine2Components (tree, tree, component1, component2) - - ## combineChildWords (any node with children that has words) - ## combine2Children (tree, node, child1, child2) - - def combinePhraseStats(self, tree, phrase): - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - return self.combineChildren(tree, phrase, self.combineStats) - #print "before combine ", phrase.get("elementName"), len(phrase.get("childList")) - #result = self.combineChildren(tree, phrase, self.combineStats) - #print "after combine ", phrase.get("elementName"), len(phrase.get("childList")) - #return result - - def recallCombinePhraseStats(self, tree, phrase): - # This is needed because we want to call combinePhraseStats twice in some phrases. - # If we simply put in two calls to a method, it gets put on the "doneList" with - # the first call and is not called again. - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - return self.combinePhraseStats(tree, phrase) - - def combineComponentStats(self, tree, node): - return self.combineChildren(tree, node, self.combineComponents) - - def combineWords(self, tree, node): - # Check for data - if not self.phrase_trigger(tree, node): - return - children = node.get("childList") - if len(children) <= 1: - return self.DONE() - return self.combineChildren(tree, node, self.combineChildWords) - - def combineChildren(self, tree, node, combineMethod): - # Combine similar nodes if possible - length = len(node.childList) - if length <= 1: - return self.DONE() - index = 1 - while index < len(node.childList): - # Try to combine with previous subPhrase - combineFlag, combinedChild = combineMethod( - tree, node, node.childList[index-1], node.childList[index]) - if combineFlag: - # Reset childList for phrase - node.childList[index-1] = combinedChild - del node.childList[index] - else: - index = index + 1 - return self.DONE() - - def combineComponents(self, tree, node, component1, component2): - # Criteria to set combine_flag - - #print "\nTrying to combine" - # Don't combine components with different names since their - # analysis lists could be different. - comp1Name = component1.get("name") - comp2Name = component2.get("name") - if comp1Name != comp2Name: - #print "Different component names", comp1Name, comp2Name - return 0, None - - # Make sure we don't combine periods any earlier than we should - noCombineUntil = self.periodCombining_startHour(tree, node) - if self.hoursPastProductStart(tree, component1) <= noCombineUntil: - return 0, None - - ## call the "similar" methods to see if each element is roughly - ## the same. Any element that is not similar will cause the - ## combine flag to evaluate to 0 or false. - elements = self.periodCombining_elementList(tree, component1) - combine_flag = 1 - for element in elements: - #print "trying to combine", element - exec "combine_flag = combine_flag and self.similar"+element+\ - "(tree, component1, component2)" - #print "result", combine_flag - - if combine_flag: - #print "combining" - newComp = self.combine2Components(tree, tree, component1, component2) - return 1, newComp - return 0, None - - def similarWind(self, tree, comp1, comp2): - # Returns true if the wind stats are similar - # Also, return true (combine) if past the first 5 period since - # wind is not reported in these periods - - # these numbers determine if components are close enough to combine - magThreshold = 10 - dirThreshold = 45 - - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - stats1 = tree.stats.get("Wind", tr1, al1, mergeMethod = "Average") - stats2 = tree.stats.get("Wind", tr2, al2, mergeMethod = "Average") - - # If past the first 5 periods, return 1 (combine) - hours = self.hoursPastProductStart(tree, comp1) - if hours >= 5*12: - return 1 - - # check for none - if stats1 is None or stats2 is None: - return 0 - - mag1 = stats1[0] - mag2 = stats2[0] - dir1 = stats1[1] - dir2 = stats2[1] - # calculate the differences, mag and dir - magDiff = abs(mag1 - mag2) - dirDiff = abs(dir1 - dir2) - - # account for the 360 to 0 problem - if dirDiff > 180: - dirDiff = abs(dirDiff - 360.0) - - if magDiff <= magThreshold and dirDiff <= dirThreshold: - return 1 - - return 0 - - def hoursPastProductStart(self, tree, node): - # Compute the hours past the product start time (prodTR) - # that the current time range (curTR) starts. - # If the prodTR is not a multiple of 12, then it is either - # --an update and the first period is less than 12 hours, or - # --a pre-first period issuance. - # In these case, we return the hours past the product start - # as if the first period was a full 12-hour period. - # For example, - # A morning update issuance starting at 10 am would - # have an hoursPastProductStart for the first period - # of 4 hours. - # A pre-first period issuance starting at 4 am would - # have an hoursPastProductStart for the first period - # of -2 hours. - prodTR = tree.getTimeRange() - curTR = node.getTimeRange() - prodHours = prodTR.duration()/3600 - prodMod = prodHours%12 - if prodMod > 0: - try: - # check for 'pre-first period issuances' - period1Hours = self._issuanceInfo.period1TimeRange().duration()/3600 - if period1Hours > 12: - adjustHours = prodMod - else: - adjustHours = -(12-prodMod) - except: - adjustHours = 0 - else: - adjustHours = 0 - prodStart = prodTR.startTime() + adjustHours*3600 - return (curTR.startTime() - prodStart)/3600 - - def similarSky(self, tree, comp1, comp2): - # Returns true if sky stats are similar - # Necessary because of the override to sky_valueList above - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - return self.similarSkyLogic(tree, comp1, comp2, tr1, al1, tr2, al2) - - def similarWx(self, tree, comp1, comp2): - # Returns true if wx stats are similar - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - return self.similarWxLogic(tree, comp1, comp2, tr1, al1, tr2, al2) - - def similarPoP(self, tree, comp1, comp2): - # returns true if PoP stats are similar - stats1 = self.matchToWx(tree, comp1, "PoP") - stats2 = self.matchToWx(tree, comp2, "PoP") - - if stats1 is None and stats2 is None: - return 1 - - # check for none - #if stats1 is None or stats2 is None: - # return 0 - - if stats1 == stats2: - return 1 - - if stats1 < self.pop_lower_threshold(tree, comp1) and \ - stats2 < self.pop_lower_threshold(tree, comp2): - return 1 - - if stats1 > self.pop_upper_threshold(tree, comp1) and \ - stats2 > self.pop_upper_threshold(tree, comp2): - return 1 - - return 0 - - - ## Submitted by Brian Walawender 3/05 - ## The problem with combining long time periods, is that the - ## combined period is growing 12 hours at a time. If you get rid of the bleed - ## over grids for MinT and MaxT (SampleAnalysis temporalCoverage_hours_dict), - ## then you start returning None for either MaxT or MinT during these 12 hour periods. - ## To combat this, I check the duration of tr1 and tr2. - ## If it is 12 or less then I check to see if it is day or night. - ## For MaxT, it will return a combine if the period is 12 hours or less and it is a - ## nighttime period. - ## For MinT, it will return a combine if the period is 12 hours - ## or less and it is a daytime period. This allowed long periods to be grouped - ## together without bleed over. - - def similarMaxT(self, tree, comp1, comp2): - # returns true if temp stats are similar - - # this number determines if components are close enough to combine - tempThreshold = 5 # degrees - - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - - hours = (tr2.endTime()-tr1.startTime())/3600 - if hours <= 24: - return 1 - - if (tr1.duration()/3600) <= 12: - dayNight = self.getPeriod(tr1, 1) - if dayNight == self.NIGHTTIME(): - return 1 - - if (tr2.duration()/3600) <= 12: - dayNight = self.getPeriod(tr2, 1) - if dayNight == self.NIGHTTIME(): - return 1 - - stats1 = tree.stats.get("MaxT", tr1, al1, mergeMethod = "Average") - stats2 = tree.stats.get("MaxT", tr2, al2, mergeMethod = "Average") - # check for none - if stats1 is None or stats2 is None: - return 0 - - if abs(stats1 - stats2) < tempThreshold: - return 1 - - return 0 - - def similarMinT(self, tree, comp1, comp2): - # returns true if temp stats are similar - - # this number determines if components are close enough to combine - tempThreshold = 5 # degrees - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - hours = (tr2.endTime()-tr1.startTime())/3600 - if hours <= 24: - return 1 - - if (tr1.duration()/3600) <= 12: - dayNight = self.getPeriod(tr1, 1) - if dayNight == self.DAYTIME(): - return 1 - - if (tr2.duration()/3600) <= 12: - dayNight = self.getPeriod(tr2, 1) - if dayNight == self.DAYTIME(): - return 1 - - # check for none - stats1 = tree.stats.get("MinT", tr1, al1, mergeMethod = "Average") - stats2 = tree.stats.get("MinT", tr2, al2, mergeMethod = "Average") - - if stats1 is None or stats2 is None: - return 0 - - if abs(stats1 - stats2) < tempThreshold: - return 1 - - return 0 - - - def similarWaveHeight(self, tree, comp1, comp2): - # returns true if seas stats are similar - - # this number dtermines if components are close enough to combine - seaThreshold = 4 # feet - - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - tr1 = comp1.getTimeRange() - tr2 = comp2.getTimeRange() - stats1 = tree.stats.get("WaveHeight", tr1, al1, mergeMethod ="Average") - stats2 = tree.stats.get("WaveHeight", tr2, al2, mergeMethod ="Average") - - # check for none - if stats1 is None or stats2 is None: - return 0 - - if stats1 == None or stats2 == None: - return 0 - - if abs(stats1 - stats2) < seaThreshold: - return 1 - return 0 - - def similarDiurnalSkyWx(self, tree, comp1, comp2): - return self.similar_diurnal(tree, comp1, comp2, ["Sky", "Wx"]) - - def similar_diurnal(self, tree, comp1, comp2, elementList): - # Returns true if stats for the given elements are similar - # in the night and morning AND the afternoon and evening. - # NOTE: the night and morning MAY be similar to the afternoon - # and evening, so word methods need to test for this case. - # - # Meant to handle the case of clouds and fog in the - # night and morning clearing in the afternoon and - # evening. - # Assumes comp2 is a 12-hour period. - - #print "similar_diurnal" - al1 = comp1.getAreaLabel() - al2 = comp2.getAreaLabel() - comp1TR = comp1.getTimeRange() - comp2TR = comp2.getTimeRange() - # comp2 morning, comp2 afternoon OR - # comp2 evening, comp2 night - c2tr1, c2tr2 = self.divideRange(comp2TR,6) - comparisons = [] - if comp1TR.duration() == 12*3600: - # Compare comp1 night to comp2 morning - # and comp1 evening to comp2 afternoon - # OR comp1 afternoon to comp2 evening - # and comp1 morning to comp2 night - c1tr1, c1tr2 = self.divideRange(comp1TR,6) - comparisons.append((c1tr1, c2tr2)) - comparisons.append((c1tr2, c2tr1)) - else: - # We have already combined at least once so - # comp1 is at least 24 hours. Use the most - # recent 24 hours for comparison. - # if comp2 is daytime: - # compare comp1 morning to comp2 morning - # and comp1 afternoon to comp2 afternoon - # else - # compare comp1 evening to comp2 evening - # and comp1 night to comp2 night - subRanges = self.divideRange(comp1TR, 6) - length = len(subRanges)-1 - c1tr1 = subRanges[length-3] - c1tr2 = subRanges[length-2] - comparisons.append((c1tr1, c2tr1)) - comparisons.append((c1tr2, c2tr2)) - - # Do comparisons - wordDict = {} - for element in elementList: - wordDict[element] = [] - #print "\nComparisons" - for tr1, tr2 in comparisons: - for element in elementList: - #print "comparing", tr1, tr2 - #print " ", element - exec "flag = self.similar"+element+\ - "Logic(tree, comp1, comp2, tr1, al1, tr2, al2)" - #print "flag", flag - if not flag: - #print "returning 0" - return 0 - #print "returning 1" - return 1 - - def similarSkyLogic(self, tree, comp1, comp2, tr1, al1, tr2, al2): - stats1 = tree.stats.get("Sky", tr1, al1, mergeMethod ="Average") - stats2 = tree.stats.get("Sky", tr2, al2, mergeMethod ="Average") - # check for none - #print "stats1", stats1 - #print "stats2", stats2 - if stats1 is None or stats2 is None: - return 0 - if stats1 == None or stats2 == None: - return 0 - saveTR1 = comp1.timeRange - saveTR2 = comp2.timeRange - comp1.timeRange = tr1 - comp2.timeRange = tr2 - words1 = self.sky_value(tree, comp1, self.getValue(stats1), -1) - words2 = self.sky_value(tree, comp2, self.getValue(stats2), -1) - comp1.timeRange = saveTR1 - comp2.timeRange = saveTR2 - #print "words1, words2", words1, words2 - if words1 == words2: - return 1 - #if words1.find("partly") > -1 and words2.find("partly")> -1: - # return 1 - return 0 - - def similarWxLogic(self, tree, comp1, comp2, tr1, al1, tr2, al2): - # Returns true if wx stats are similar - stats1 = tree.stats.get("Wx", tr1, al1, mergeMethod = "Average") - stats2 = tree.stats.get("Wx", tr2, al2, mergeMethod = "Average") - # check for none - #print "stats1, stats2", stats1, stats2 - if stats1 is None or stats2 is None: - return 0 - stats1 = self.cleanOutNoWx(stats1) - stats2 = self.cleanOutNoWx(stats2) - similarWx = self.checkWeatherSimilarity( - tree, comp1, stats1, stats2, comp1, comp2, tr1, tr2, al1, al2) - #print "similarWx", similarWx - if similarWx == 0: - return 0 - else: - return 1 - - def cleanOutNoWx(self, stats): - # Cleans out NoWx from stats list - if stats is None: - return None - newList = [] - for stat in stats: - if type(stat) is types.TupleType: - subkey, rank = stat - else: - subkey = stat - if subkey.wxType() == "": - continue - newList.append(stat) - return newList - - def combineStats(self, tree, phrase, subPhrase1, subPhrase2): - firstElement = phrase.get("firstElement") - elementName = firstElement.name - dataType = firstElement.dataType - if dataType == self.SCALAR(): - combineFlag, newVal = self.combineScalars( - tree, phrase, subPhrase1, subPhrase2, elementName) - elif dataType == self.VECTOR(): - combineFlag, newVal = self.combineVectors( - tree, phrase, subPhrase1, subPhrase2, elementName) - elif dataType == self.WEATHER(): - combineFlag, newVal = self.combineWeather( - tree, phrase, subPhrase1, subPhrase2, elementName) - elif dataType == self.DISCRETE(): - combineFlag, newVal = self.combineDiscrete( - tree, phrase, subPhrase1, subPhrase2, elementName) - if combineFlag: - elementInfoList = phrase.get("elementInfoList") - newSubPhrase = self.combine2SubPhrases( - tree, phrase, subPhrase1, subPhrase2, elementInfoList, newVal) - return 1, newSubPhrase - else: - return 0, None - - def combineScalars(self, tree, node, subPhrase1, subPhrase2, elementName): - min1, max1 = self.getScalarData(tree, subPhrase1, elementName, "MinMax") - min2, max2 = self.getScalarData(tree, subPhrase2, elementName, "MinMax") - #print "combining", min1, max1, min2, max2 - if min1 is None and max1 is None and min2 is None and max2 is None: - return 1, None - if min1 is None or max1 is None or min2 is None or max2 is None: - return 0, None - - differenceFlag = self.checkScalarDifference( - tree, subPhrase1, elementName, min1, max1, min2, max2) - if differenceFlag == 0: - combine_singleValues = self.combine_singleValues_flag( - tree, subPhrase1, elementName, elementName) - if combine_singleValues == 1: - newValue = self.average(min(min1, min2), max(max1, max2)) - newValue = self.roundStatistic(tree, subPhrase1, newValue, elementName) - else: - # Combine using mins and maxs to catch slow trends - min1 = self.roundStatistic(tree, subPhrase1, min(min1, min2), elementName) - max1 = self.roundStatistic(tree, subPhrase1, max(max1, max2), elementName) - min1, max1 = self.applyRanges(tree, node, min1, max1, elementName) - newValue = (min1, max1) - #print "combined" - return 1, newValue - #print "not combined" - return 0, None - - def combineVectors(self, tree, phrase, subPhrase1, subPhrase2, elementName): - mag1, dir1, dirStr1 = self.getVectorData(tree, subPhrase1, elementName, "MinMax") - mag2, dir2, dirStr2 = self.getVectorData(tree, subPhrase2, elementName, "MinMax") - if mag1 is None and mag2 is None: - return 1, (None, dir1) - if mag1 is None or mag2 is None: - return 0, (None, dir1) - - min1, max1 = mag1 - min2, max2 = mag2 - - differenceFlag = self.checkVectorDifference( - tree, subPhrase1, elementName, min1, max1, dir1, min2, max2, dir2) - if differenceFlag == 0: - combine_singleValues = self.combine_singleValues_flag( - tree, subPhrase1, elementName, elementName) - if combine_singleValues == 1: - newMag, newDir = self.vectorAverage((min(min1, min2), dir1), (max(max1, max2), dir2)) - newMag = self.roundStatistic(tree, subPhrase1, newMag, elementName) - newValue = (newMag, newDir) - else: - # Combine using mins and maxs to catch slow trends - newMin = min(min1, min2) - newMax = max(max1, max2) - newMin, newMax = self.applyRanges(tree, phrase, newMin, newMax, elementName) - magAvg, newDir = self.vectorAverage((newMin, dir1), (newMax, dir2)) - newValue = ((newMin, newMax), newDir) - return 1, newValue - return 0, None - - def combineWeather(self, tree, phrase, subPhrase1, subPhrase2, elementName): - # This method now only used for skyPopWx and visibility phrases - statDict1 = subPhrase1.getStatDict() - stats1 = statDict1[elementName] - statDict2 = subPhrase2.getStatDict() - stats2 = statDict2[elementName] - if stats1 is None and stats2 is None: - return 1, None - if stats1 is None or stats2 is None: - return 0, None - - subkeys1 = self.getSubkeys(stats1) - subkeys2 = self.getSubkeys(stats2) - # Special case of combining based only on Visibility - combineVisibility = phrase.get("combineVisibility") - if combineVisibility == 1: - # Combine if low visibility is the same for each subPhrase - lowVis1 = self.getVis(subkeys1) - lowVis2 = self.getVis(subkeys2) - if lowVis1 == lowVis2: - return 1, stats1 - else: - return 0, None - - # Check weather key differences - similarResult = self.checkWeatherSimilarity( - tree, phrase, stats1, stats2, subPhrase1, subPhrase2) - if type(similarResult) is types.ListType: - return 1, similarResult - elif similarResult == 1: - return 1, stats1 - elif similarResult == 2: - return 1, stats2 - else: - return 0, None - - def combineDiscrete(self, tree, phrase, subPhrase1, subPhrase2, elementName): - statDict1 = subPhrase1.getStatDict() - stats1 = statDict1[elementName] - statDict2 = subPhrase2.getStatDict() - stats2 = statDict2[elementName] - if stats1 is None and stats2 is None: - return 1, None - if stats1 is None or stats2 is None: - return 0, None - - if stats1 == stats2: - return 1, stats1 - return 0, None - - def combineChildWords(self, tree, node, child1, child2): - words1 = child1.get("words") - if words1 is None: - return 0, None - words2 = child2.get("words") - if words2 is None: - return 0, None - - if words1 == words2: - newChild = self.combine2Children(tree, node, child1, child2) - return 1, newChild - return 0, None - - def combine2SubPhrases(self, tree, node, subPhrase1, subPhrase2, elementInfoList, newVal): - # Combine time ranges - subRange1 = subPhrase1.get("timeRange") - subRange2 = subPhrase2.get("timeRange") - newTimeRange = TimeRange.TimeRange(subRange1.startTime(), subRange2.endTime()) - - # Make new Node so methods will be re-run - # Preserve other elements in statDict - newSubPhrase = tree.makeNode([], subPhrase1.methodList) - # Make new statDict based on new time range - first = elementInfoList[0] - statDict = {} - statDict[first.name] = newVal - areaLabel = node.getAreaLabel() - for elementInfo in elementInfoList[1:]: - stats = tree.stats.get( - elementInfo.name, newTimeRange, areaLabel, elementInfo.statLabel, - elementInfo.mergeMethod) - statDict[elementInfo.name] = stats - newSubPhrase.set("statDict", statDict) - newSubPhrase.set("timeRange", newTimeRange) - newSubPhrase.parent = node - return newSubPhrase - - def combine2Components(self, tree, node, comp1, comp2): - # Combine time ranges - timeRange1 = comp1.getTimeRange() - timeRange2 = comp2.getTimeRange() - newTimeRange = TimeRange.TimeRange(timeRange1.startTime(), timeRange2.endTime()) - # Get fresh component definition so methods will be re-run - newComp = tree.makeComponent(comp1.get("name"), newTimeRange, comp1.get("definition")) - newComp.parent = tree - return newComp - - def combine2Children(self, tree, node, child1, child2): - # Combine time ranges and take same values so methods will not be re-run - # Used for combining words - timeRange1 = child1.get("timeRange") - timeRange2 = child2.get("timeRange") - newTimeRange = TimeRange.TimeRange(timeRange1.startTime(), timeRange2.endTime()) - child1.set("timeRange", newTimeRange) - return child1 - - #################################### - - def fillNulls(self, tree, node): - # Data Needed: subPhrase "words" - # Fill in the subPhrases designated as "null" with configurable - # null phrases (first_null_phrase, null_phrase) - - # See if ready to process - if not self.phrase_trigger(tree, node): - return - - index = 0 - #print "fillNulls node", node.get("firstElement"), node.get("elementName") - try: - elementName = node.get("firstElement").name - except: - return self.DONE() - firstNullPhrase = self.first_null_phrase(tree, node, elementName, elementName) - nullPhrase = self.null_phrase(tree, node, elementName, elementName) - for subPhrase in node.get("childList"): - words = subPhrase.get("words") - if words is None: - return - if words == "null": - if index == 0: - subPhrase.set("words", firstNullPhrase) - else: - subPhrase.set("words", nullPhrase) - subPhrase.set("null",1) - index = index + 1 - # Collapse empty word sub-phrases - self.collapsePhraseWords(tree, node) - return self.DONE() - - def collapsePhraseWords(self, tree, phrase): - # Collapse empty word sub-phrases - childList = phrase.childList - if len(childList) <= 1: - return - newList = [] - lastWords = None - index = 0 - emptyIndex = None - for subPhrase in phrase.childList: - subWords = subPhrase.get("words") - if subWords == "": - if lastWords == "": - # Add to empty phrase - subRange = subPhrase.getTimeRange() - emptyRange = childList[emptyIndex].getTimeRange() - newRange = TimeRange.TimeRange(emptyRange.startTime(), subRange.endTime()) - childList[emptyIndex].set("timeRange", newRange) - else: - # Start an empty phrase - emptyIndex = index - else: - if lastWords == "": - newList.append(childList[emptyIndex]) - emptyIndex = None - newList.append(subPhrase) - lastWords = subWords - index = index + 1 - if emptyIndex is not None: - newList.append(childList[emptyIndex]) - phrase.childList = newList - - - def timeDescriptorModeration(self, tree, phrase): - # Moderates the time descriptor - # Needs subPhrase "words" - # Looks at subPhrase "null" or empty - # Sets subPhrase "timeDescFlag" indicating whether or not - # to generate a timeDescriptor for this subPhrase - # - # Algorithm: - # if last subPhrase is null, (make sure to flag last non-null) - # If odd number of subPhrases, - # flag even else flag odd - # elif the first even subPhrase is null, flag odd subPhrases - # else, flag even subPhrases - # - # See if ready to process - if not self.phrase_trigger(tree, phrase): - return - childList = phrase.get("childList") - length = len(childList) - if length == 0: - return self.DONE() - # Set all subPhrases if time descriptors are always - # to be on OR off - flag = None - if phrase.get("noTimeDescriptors") == 1: - flag = 0 - elif phrase.get("allTimeDescriptors") == 1: - flag = 1 - if flag is not None: - for subPhrase in childList: - subPhrase.set("timeDescFlag", flag) - return self.DONE() - # If one subPhrase, we need time descriptor IF - # the subPhrase time range differs from the - # phrase time range - if length == 1: - subPhrase = childList[0] - if subPhrase.getTimeRange() == phrase.getTimeRange(): - flag = 0 - else: - flag = 1 - subPhrase.set("timeDescFlag",flag) - return self.DONE() - odd = length%2 - lastNull = self.isNull(childList[length-1]) - if lastNull: - if odd: - flagOdd = 0 - else: - flagOdd = 1 - else: - firstEven = self.isNull(childList[1]) - if firstEven: - flagOdd = 1 - else: - flagOdd = 0 - - index = 0 - for subPhrase in childList: - #print "words", subPhrase.get("words") - #print "null", subPhrase.get("null") - flag = 0 - if index%2 == 0: # odd subPhrase - if flagOdd == 1: - flag = 1 - else: # even subPhrase - if flagOdd == 0: - flag = 1 - - # Uncomment the following line if you want ALL sub-phrases - # to have a time descriptor. - # - #flag = 1 - # - # Alternatively, you could test per weather element: - # - #if phrase.get("elementName") == "Wx": - # flag = 1 - - subPhrase.set("timeDescFlag", flag) - index = index + 1 - return self.DONE() - - def isNull(self, subPhrase): - if subPhrase.get("null") == 1: - return 1 - if subPhrase.get("words") == "": - return 1 - return 0 - - def checkPhrasesDone(self, tree, node, areaLabel=None, exceptions=[]): - # Check that all phrases (except those with names listed in exceptions) - # are done for the component associated with node - # If areaLabel is not None, check only those phrases that have - # the given areaLabel. - # Return the list of phrases that are done. - - # We need to look at all progeny -- not just children - # since phrases can have child phrases - leaves = self.getLeaves(tree, node) - phraseList = [] - for child in leaves: - childWords = child.get("words") - childName = child.getAncestor('name') - #print " child", childName, childWords - if childName is None or childName in exceptions: - continue - if areaLabel is not None: - if child.getAreaLabel() != areaLabel: - continue - if childWords is not None: - phraseList.append(child) - else: - # If no words yet, return - #print "returning to wait" - return None - if phraseList is []: - return None - else: - return phraseList - - def phrase_trigger(self, tree, phrase, setUpOnly=0): - # Return 1 if trigger is met, else 0 - # If setUpOnly == 1, trigger will be met if setUp method - # has been completed - # Make sure set-up method was completed - #if len(phrase.get("childList")) == 0 and phrase.get("words") is None: - # return 0 - if not phrase.setUpMethod in phrase.doneList: - return 0 - if setUpOnly: - return 1 - # Make sure sub-phrases have words - for subPhrase in phrase.get("childList"): - # Check to make sure we have words - words = subPhrase.get("words") - if words is None: - return 0 - return 1 - - def assembleSubPhrases(self, tree, phrase): - # Assembles sub-phrases adding the time descriptor - # Check for data - - # See if ready to process - if not self.phrase_trigger(tree, phrase): - return - if not self.consolidateSubPhrases_trigger(tree, phrase): - return - - #print "NODE", phrase.get("name"), phrase.getTimeRange() - if self.useUntilPhrasing(tree, phrase): - return self.assembleUntilSubPhrases(tree, phrase) - - fcst = "" - index = 0 - - #print "\nAssemble Subphrases", phrase.get('name'), phrase - - for subPhrase in phrase.get("childList"): - # Check to make sure we have words - words = subPhrase.get("words") - if words is None: - return - #print " words", words - #print " ", subPhrase.getTimeRange(), subPhrase - #print " ", subPhrase.getAncestor("conjunctiveQualifier") - #print " ", subPhrase.getAreaLabel() - if words == "": - continue - - if index == 0: - #if not subPhrase.get("null"): - if not self.isNull(subPhrase): - # Get descriptor - descriptor = phrase.get("descriptor") - if descriptor is not None and descriptor != "": - fcst = fcst + descriptor + " " - else: - # Get connector - connectorMethod = phrase.get("connectorMethod") - connector = connectorMethod(tree, subPhrase) - - if index == 2: - # Add conjunctive "THEN" to make 3+ subPhrase phrases - # flow better. e.g. - # "N WIND 10 TO 20 KT RISING TO 30 KT EARLY IN THE - # AFTERNOON, THEN RISING TO GALES TO 40 KT LATE - # IN THE AFTERNOON." - elementName = phrase.getAncestor("elementName") - useThenConnector = self.useThenConnector( - tree, phrase, elementName, elementName) - if useThenConnector: - thenConnector = self.thenConnector( - tree, phrase, elementName, elementName) - if thenConnector != "": - # Add another time descriptor - subPhrase.set("timeDescFlag", 1) - connector = thenConnector + connector - - fcst = fcst + connector - - # Time Descriptor - timeDescriptor = self.format( - self.subPhrase_timeDescriptor(tree, phrase, subPhrase)) - - # Get words again in case they were changed by connector method - fcst = fcst + subPhrase.get("words") + timeDescriptor - index = index + 1 - #print " words", fcst - phrase.set("words", fcst) - return self.DONE() - - def assembleUntilSubPhrases(self, tree, phrase): - # Create a phrase that reports a list of (value, timeRange) - # tuples. Optionally, an associated range may be added to the - # phrase values. - - elementName = phrase.getAncestor("elementName") - untilFormat = self.untilPhrasing_format(tree, phrase, elementName, elementName) - timeRange = phrase.getTimeRange() - - # Make lists of consecutive subphrases - phraseLists = [] - curList = [] - lastTR = None - for subPhrase in phrase.get("childList"): - tr = subPhrase.getTimeRange() - # First time thru -- start curList - if lastTR is None: - curList.append(subPhrase) - lastTR = tr - continue - # Check for consecutive sub ranges - if tr.startTime() == lastTR.endTime(): - curList.append(subPhrase) - # If not consecutive, clear out curList - # and append current subPhrase - else: - if curList != []: - phraseLists.append(curList) - curList = [] - curList.append(subPhrase) - lastTR = tr - if curList != []: - phraseLists.append(curList) - - #print "\nUNTIL NODE", phrase.getTimeRange() - phrases = [] - for phraseList in phraseLists: - words = "" - index = 0 - subWords = "" - firstWords = 1 - for subPhrase in phraseList: - # Check to make sure we have words - lastWords = subWords - subWords = subPhrase.get("words") - if subWords is None: - return - #print " words", subWords, subPhrase.getTimeRange() - - if index == 0: - #if not subPhrase.get("null"): - if not self.isNull(subPhrase): - # Get descriptor - descriptor = phrase.get("descriptor") - if descriptor is not None and descriptor != "": - words = words + descriptor + " " - index += 1 - if subWords == "": - continue - subRange = subPhrase.getTimeRange() - # Add connector , then if words came before this - if not firstWords: - words = words + ", then " - # Use after if lastWords were empty and subRange - # starts after timeRange - words = words + subWords - if lastWords == "": - if subRange.startTime() != timeRange.startTime(): - afterTime = self.getTimeStr(untilFormat, subRange, "begin") - if afterTime == "0000": - afterTime = "2400" - words = words + " after " + afterTime - # Use until if the subRange ends before the time range - if subRange.endTime() < timeRange.endTime(): - untilTime = self.getTimeStr(untilFormat, subRange, "end") - if untilTime == "0000": - untilTime = "2400" - words = words + " until " + untilTime - firstWords = 0 - - phrases.append(words) - - # String together phrases and insert periods - index = 0 - words = "" - for str in phrases: - words = words + str - if index < len(phrases)-1: - if phrases[index+1] != "": - if not words == "": - words = words + ". " - index += 1 - - return self.setWords(phrase, words) - - def getTimeStr(self, format, timeRange, endBegin): - if endBegin == "end": - if format == "military": - return self.timeDisplay(timeRange, "LT","","","%H")+"00" - else: - str = self.timeDisplay(timeRange, "LT", "", "", "%I %p") - if str[0] == "0": - str = str[1:] - return str - else: - if format == "military": - return self.timeDisplay(timeRange, "LT","","%H","")+"00" - else: - str = self.timeDisplay(timeRange, "LT", "", "%I %p", "") - if str[0] == "0": - str = str[1:] - return str - - def useUntilPhrasing(self, tree, phrase): - # Check to see if the subPhrases warrant "until" phrasing - # Can be set for the phrase - elementName = phrase.getAncestor("elementName") - if self.untilPhrasing_flag(tree, phrase, elementName, elementName): - return 1 - elif self.onTheFly_untilPhrasing_flag( - tree, phrase, elementName, elementName) != 1: - return 0 - # Examine sub-phrase time ranges - tr = phrase.getTimeRange() - timeStart = tr.startTime() - timeEnd = tr.endTime() - for subPhrase in phrase.get("childList"): - #print "subTimeRange", subPhrase.getTimeRange() - subTr = subPhrase.getTimeRange() - subStart = subTr.startTime() - subEnd = subTr.endTime() - # See if subRange end time or start time is - # not a multiple of 3 hours back from timeRange end time. - # If start time is the same as phrase start time, - # do not count as until phrasing. - if timeEnd != subEnd: - timeDiff = timeEnd - subEnd - #print timeDiff, timeDiff % (3*3600) - if timeDiff % (3*3600) != 0: - return 1 - if timeStart != subStart: - timeDiff = timeEnd - subEnd - if timeDiff % (3*3600) != 0: - return 1 - return 0 - - def format(self, str): - if str is None: - str = "" - str = self.addSpace(str, "leading") - return str - - def subPhrase_timeDescriptor(self, tree, phrase, subPhrase): - if subPhrase.get("timeDescFlag"): - subRange = subPhrase.getTimeRange() - phraseRange = phrase.getTimeRange() - if phrase.get("name") in self.weatherPhraseNames(tree, phrase) and \ - len(phrase.get("childList")) > 1 and subRange == phraseRange: - dayNight = self.getPeriod(phraseRange, 1) - elementName = phrase.get("elementName") - if dayNight == self.DAYTIME(): - return self.phrase_descriptor( - tree, phrase, "through the day", elementName) - elif dayNight == self.NIGHTTIME(): - return self.phrase_descriptor( - tree, phrase, "through the night", elementName) - else: - return self.timePeriod_descriptor(tree, phrase, subRange) - else: - return "" - - # Connectors - def scalarConnector(self, tree, subPhrase): - # return connector phrase to connect subPhrase and previous one - elementName = subPhrase.getAncestor("firstElement").name - then = self.phrase_connector(tree, subPhrase, "then", elementName) - #if subPhrase.get("null") or subPhrase.getPrev().get("null"): - prev = subPhrase.getPrev() - if self.isNull(subPhrase) or self.isNull(prev): - return then - # Check for either subPhrase specifying only special connector - connector = subPhrase.get("connector") - if connector is not None: - return connector - connector = prev.get("connector") - if connector is not None: - return connector - - # Check for increasing/decreasing values - subPhrase1 = subPhrase.getPrev() - val1 = self.getScalarData(tree, subPhrase1, elementName, "Average") - val2 = self.getScalarData(tree, subPhrase, elementName, "Average") - if val1 > val2: - connector = self.phrase_connector( - tree, subPhrase, "decreasing to", elementName) - elif val1 < val2: - connector = self.phrase_connector( - tree, subPhrase, "increasing to", elementName) - else: - connector = then - return connector - - def wxConnector(self, tree, subPhrase): - # Return connector string to connect subPhrase and previous one. - # If subPhrases cover neighboring time ranges, connect them with "then" - # Otherwise, connect them with ". " - # Make sure that we do not connect more than two subPhrases in a row - # with a "then" connector by setting and re-setting the "useThenConnector" - # flag at the phrase level. - thenConnector = self.phrase_connector(tree, subPhrase, "then", "Wx") - connector = '. ' - prev = subPhrase.getPrev() - if prev is None: - return "" - phrase = subPhrase.getParent() - index = subPhrase.getIndex() - if index == 1: - # Initialize so that we are ready to use the thenConnector - # if appropriate - phrase.set("useThenConnector", 1) - - useThenConnector = phrase.get("useThenConnector") - prevEnd = prev.getTimeRange().endTime() - # If the start time of this subPhrase is the same - # as the end time of the previous subphrase - if useThenConnector and prevEnd == subPhrase.getTimeRange().startTime(): - # use the then connector - connector = ', then ' - # Re-set useThenConnector so we don't get - # a long string of ", then" connected sub-phrases - phrase.set("useThenConnector", 0) - else: - # Can re-set connector so we are ready to use the - # then connector on the next subPhrase - phrase.set("useThenConnector", 1) - subPhrase.set("words", subPhrase.get("words").capitalize()) - - return connector - - def visConnector(self, tree, subPhrase): - # return connector phrase to connect subPhrase and previous one - elementName = subPhrase.getAncestor("firstElement").name - then = self.phrase_connector(tree, subPhrase, "then", elementName) - #if subPhrase.get("null") or subPhrase.getPrev().get("null"): - prev = subPhrase.getPrev() - if self.isNull(subPhrase) or self.isNull(prev): - return then - # Check for either subPhrase specifying only special connector - connector = subPhrase.get("connector") - if connector is not None: - return connector - connector = prev.get("connector") - if connector is not None: - return connector - - # Check for increasing/decreasing values - subPhrase1 = subPhrase.getPrev() - # Get vis for previous sub-phrase - statDict = subPhrase1.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - subkeyList = self.getSubkeys(rankList) - val1 = self.getVis(subkeyList) - if val1 is None: - return then - # Get vis for current sub-phrase - statDict = subPhrase.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - subkeyList = self.getSubkeys(rankList) - val2 = self.getVis(subkeyList) - if val2 is None: - return then - - if val1 > val2: - connector = self.phrase_connector( - tree, subPhrase, "decreasing to", elementName) - elif val1 < val2: - connector = self.phrase_connector( - tree, subPhrase, "increasing to", elementName) - else: - connector = then - return connector - - def vectorConnector(self, tree, subPhrase): - # return connector phrase to connect subPhrase and previous one - elementName = subPhrase.getAncestor("firstElement").name - becoming = self.phrase_connector(tree, subPhrase, "becoming", elementName) - #if subPhrase.get("null") or subPhrase.getPrev().get("null"): - if self.isNull(subPhrase) or self.isNull(subPhrase.getPrev()): - return becoming - - subPhrase1 = subPhrase.getPrev() - mag1, dir1, dirStr1 = self.getVectorData( - tree, subPhrase1, elementName, "Average") - mag2, dir2, dirStr2 = self.getVectorData( - tree, subPhrase, elementName, "Average") - - increasingTo = self.phrase_connector( - tree, subPhrase, "increasing to", elementName) - decreasingTo = self.phrase_connector( - tree, subPhrase, "decreasing to", elementName) - - # Directions same - if dirStr1 == dirStr2: - increment = self.nlValue(self.increment_nlValue( - tree, subPhrase, elementName, elementName), mag1) - # Magnitudes same - if abs(mag1-mag2) < increment: - connector = becoming - # Magnitudes different - elif mag1 < mag2: - connector = increasingTo - else: - connector = decreasingTo - # Directions different - else: - magDiff = self.nlValue(self.vector_mag_difference_nlValue( - tree, subPhrase, elementName, elementName), mag1) - # Magnitudes same - if abs(mag1 - mag2) < magDiff: - connector = self.phrase_connector( - tree, subPhrase, "shifting to the", elementName) - # Magnitudes different - else: - # If high wind conditions report both "becoming" and - # "increasing/decreasing" - # Southeast winds around 70 mph becoming south - # and increasing to around 105 mph - increasing = mag1 < mag2 - if max(mag1, mag2) > self.highValue_threshold( - tree, subPhrase, elementName, elementName): - dirStr = subPhrase.get("dirStr") - words = subPhrase.get("words") - words = words.replace(dirStr+" ", "") - subPhrase.set("words", words) - direction = becoming + dirStr + " and" - if increasing: - connector = direction + increasingTo - else: - connector = direction + decreasingTo - # Otherwise, report both "increasing" or "becoming" - # SOUTHEAST WINDS AROUND 20 MPH BECOMING SOUTH - # AROUND 15 MPH - else: - if increasing: - connector = increasingTo - else: - connector = becoming - return connector - - def marine_vectorConnector(self, tree, subPhrase): - # return connector phrase to connect subPhrase and previous one - elementName = subPhrase.parent.get("firstElement").name - if self.isNull(subPhrase) or self.isNull(subPhrase.getPrev()): - return self.phrase_connector(tree, subPhrase, "becoming", elementName) - - subPhrase1 = subPhrase.getPrev() - mag1, dir1, dirStr1 = self.getVectorData( - tree, subPhrase1, elementName, "Average") - mag2, dir2, dirStr2 = self.getVectorData( - tree, subPhrase, elementName, "Average") - - if dirStr1 == dirStr2: - increment = self.nlValue(self.increment_nlValue( - tree, subPhrase, elementName, elementName), mag1) - if abs(mag2-mag1) < increment: - connector = self.phrase_connector(tree, subPhrase, "becoming", elementName) - elif mag1 < mag2: - connector = self.phrase_connector(tree, subPhrase, "rising to", elementName) - else: - connector = self.phrase_connector(tree, subPhrase, "easing to", elementName) - else: - magDiff = self.nlValue(self.vector_mag_difference_nlValue( - tree, subPhrase, elementName, elementName), mag1) - if abs(mag2 - mag1) < magDiff: - # Put in test for sea breeze i.e. becoming onshore - if self.seaBreeze_flag(tree, subPhrase, elementName) == 1: - connector = self.phrase_connector(tree, subPhrase, "becoming onshore", elementName) - # Remove subPhrase words - subPhrase.set("words", "") - else: - movement = self.direction_movement(dir1, dir2) - if movement > 0: # clockwise - connector = self.phrase_connector(tree, subPhrase, "veering", elementName) - else: - connector = self.phrase_connector(tree, subPhrase, "backing", elementName) - else: - connector = self.phrase_connector(tree, subPhrase, "becoming", elementName) - return connector - - def removeDirection(self, tree, subPhrase): - # Remove the direction from the subPhrase words - dirStr = subPhrase.get("dirStr") - if dirStr is not None: - words = subPhrase.get("words") - words = words.replace(dirStr, "") - subPhrase.set("words",words) - - def getVectorData(self, tree, subPhrase, elementName, accessMethod): - # Get vector data for subPhrase for the given elementName - statDict = subPhrase.getStatDict() - stats = statDict[elementName] - if stats is None: - return None, None, None - mag, dir = stats - mag = self.getValue(mag, accessMethod) - dirStr= self.dirToText(dir) - return mag, dir, dirStr - - def getScalarData(self, tree, subPhrase, elementName, accessMethod): - # Get scalar data for subPhrase for the given elementName - matchingInfo = self.matchToWxInfo(tree, subPhrase, elementName, elementName) - if matchingInfo != "": - val = self.matchToWx(tree, subPhrase, elementName) - if accessMethod == "MinMax": - val = (val, val) - else: - firstElement = subPhrase.getAncestor("firstElement") - dataType = firstElement.dataType - statDict = subPhrase.getStatDict() - val = statDict[elementName] - val = self.getValue(val, accessMethod, dataType) - return val - - def seaBreeze_flag(self, tree, subPhrase, elementName): - # Return 1 if an onshore breeze is detected from the prior range - # Get local effects areas directions for Offshore previous subPhrase - # and Onshore for subPhrase - # offshoreDir, onshoreDir - offShoreArea, onShoreArea = self.seaBreeze_areaLabels(tree, subPhrase) - if offShoreArea is None: - return 0 - subPhrase1 = subPhrase.getPrev() - if subPhrase1 is None: - return 0 - timeRange1 = subPhrase1.getTimeRange() - timeRange2 = subPhrase.getTimeRange() - areaLabel1 = subPhrase1.getAreaLabel() - areaLabel2 = subPhrase.getAreaLabel() - offshore = tree.stats.get( - "Wind", timeRange1, offShoreArea, mergeMethod="Max", - intersectWith=areaLabel1) - onshore = tree.stats.get( - "Wind", timeRange2, onShoreArea, mergeMethod="Max", - intersectWith=areaLabel2) - if offshore is None or onshore is None: - return 0 - mag, offshoreDir = offshore - mag, onshoreDir = onshore - # Get thresholds - offshore1, offshore2, onshore1, onshore2 = self.seaBreeze_thresholds(tree, subPhrase) - if self.direction_between(offshoreDir, offshore1, offshore2) and \ - self.direction_between(onshoreDir, onshore1, onshore2): - return 1 - return 0 - - # Subphrase Level - def checkRepeatingString(self, tree, node, str, strName, matchAreaLabels=1): - # Given a text string, str, and a descriptive name for that string, - # see if it repeats in the previous phrase, sub-phrase or embedded phrase. - # If we find a repeating string, return an empty string - # Otherwise return the original string. - # If matchAreaLabels, the areaLabel of previous node must match - # that of the current if we are to return an empty string. - # This prevents phrases such as: - # Chance of rain and snow 20 percent windward rain and snow 40 percent leeward. - # - - # Check sub-phrases - #print "Check Repeating", node.getAncestor('name'), str - #print " matchAreaLabels", matchAreaLabels - prevNode = node.getPrev() - if prevNode is not None: - if matchAreaLabels and \ - prevNode.getAreaLabel() != node.getAreaLabel(): - return str - prevStr = prevNode.get(strName) - if prevStr is not None and str == prevStr: - # Do not repeat previous str - #print "return 1" - return "" - # Check degenerate conjunctive local effect - # We are looking for these conditions: - # --This phrase has only one sub-phrase - # --The previous phrase has only one sub-phrase AND - # has the same name as the current phrase (e.g. popMax_phrase - # --The str for the sub-phrases are the same - phrase = node.getParent() - #tree.printNode(phrase.parent) - if len(phrase.childList) == 1: - prevPhrase = phrase.getPrev() - if prevPhrase is not None: - if matchAreaLabels and \ - prevPhrase.getAreaLabel() != node.getAreaLabel(): - return str - if prevPhrase.get("name") == phrase.get("name"): - if len(prevPhrase.childList) == 1: - prevSubPhrase = prevPhrase.childList[0] - prevStr = prevSubPhrase.get(strName) - if prevSubPhrase.get('words') is None: - # Must wait for previous words to finish - return -1 - if prevStr is not None and str == prevStr: - # Do not repeat previous str - #print "return 2" - return "" - return str - - # Local Effects - - def checkLocalEffects(self, tree, node): - localEffectsList = self.getLocalEffectsList(tree, node) - #print " le list", localEffectsList - if localEffectsList is None or len(localEffectsList) == 0: - return self.DONE() - childList = node.get("childList") - if childList is None or len(childList) < 1: - return self.DONE() - if self.__dict__.get('_leDebug',0): - print "\nChecking local effects for", node.get('name'), node.getAreaLabel() - print " node", node - print " parent", node.parent - print " disabled", node.get('disabledSubkeys'), node.getAncestor('disabledSubkeys') - print "\ncomp phrases before:" - self.printCompPhrases(tree, node) - - for localEffect in localEffectsList: - # If ANY subPhrase has a local effect, create conjunctive local effect. - # If ALL subPhrases have the same local effect "groups", use that grouping. - # Otherwise, create a conjunctive phrase for each local effect area. - flag = 0 - firstTime = 1 - sameGroups = 1 - for checkNode in childList: - nodeFlag, nodeGroups = self.checkLocalEffect(tree, checkNode, localEffect) - if nodeFlag: - flag = 1 - if firstTime: - groups = nodeGroups - firstTime = 0 - elif groups != nodeGroups: - # flag must be 1 - sameGroups = 0 - break - if flag: - # Create conjunctive local effect - #print "Creating conjunctive local effect" - if sameGroups == 0: - groups = [] - leAreaList = self.getLeAreaList(tree, node, localEffect) - for leArea in leAreaList: - groups.append([leArea]) - nodeList = self.makeLocalEffectNodes(tree, node, localEffect, groups) - - # Applies only to the skyPopWx_phrase - # Set up includeSky for new local effect nodes - includeSky = self.getIncludeSky(tree, node) - for newNode in nodeList: - newNode.set("includeSky", includeSky) - if self.__dict__.get('_leDebug',0): - print "newNode", newNode.get("name"), newNode.get("areaLabel") - print " includeSky", includeSky, newNode - node.replace(nodeList) - - - if flag: # There is a local effect - self.localEffect_hook(tree, node) - if self.__dict__.get('_leDebug',0): - print "\ncomp phrases after:", self.printCompPhrases(tree, node) - return self.DONE() - - def checkLocalEffect(self, tree, node, localEffect): - # Check each local effect area against all others for the given node. - # Determine "groups" i.e. group the local effect areas according to - # similar statistics. - # Return - # -- a flag to indicate if any local effect areas showed differing - # statistics. - # -- the "groups" - triggerMethod = localEffect.triggerMethod - leAreaList = self.getLeAreaList(tree, node, localEffect) - if len(leAreaList) == 0: - return 0, [] - - # Begin with one group consisting of first local effect edit area - groups = [[leAreaList[0]]] - # This loop checks each subsequent local effect edit area against - # the existing groups and appends it to the first group which - # has similar statistics. - # If no existing group has similar statistics, a new group is - # created. - for leArea1 in leAreaList[1:]: - addedToExisting = 0 - for group in groups: - leArea2 = group[0] - difference = self.checkThreshold( - tree, node, triggerMethod, leArea1, leArea2, localEffect) - if difference == 0: - # Similar statistics, so - # append it to the current group - group.append(leArea1) - addedToExisting = 1 - break - if addedToExisting == 0: - # Did not find similar group, so create a new group - groups.append([leArea1]) - if len(groups) == 1: - flag = 0 - else: - flag = 1 - return flag, groups - - def getLocalEffectsList(self, tree, node): - leList = node.get("localEffectsList") - if type(leList) is types.MethodType: - return leList(tree, node) - else: - return leList - - def getLeAreaList(self, tree, node, localEffect): - leAreaList = localEffect.leAreaList - if type(leAreaList) is types.MethodType: - return leAreaList(tree, node) - else: - return leAreaList - - def getLeAreaLabel(self, tree, node, leArea): - if leArea.areaLabel == "__Current__": - return node.getAreaLabel() - elif leArea.intersectFlag: - return self.getIntersectName(node.getAreaLabel(), leArea.areaLabel) - #return self.getIntersectName(tree.getAreaLabel(), leArea.areaLabel) - else: - return leArea.areaLabel - - def getLeQualifiers(self, tree, node, group): - # Return the qualifiers for this group of leAreas - # There is a qualifer for embedded local effect phrases - # and one for conjunctive local effect phrases. - embeddedQualifier = "" - conjQualifier = "" - length = len(group) - index = 0 - for leArea in group: - areaWords = leArea.areaWords - if type(areaWords) is types.MethodType: - areaWords = areaWords(tree, node, leArea) - embeddedQualifier = embeddedQualifier + areaWords - conjWords = leArea.conjAreaWords - if type(conjWords) is types.MethodType: - conjWords = conjWords(tree, node, leArea) - conjQualifier = conjQualifier + conjWords - # if last one, do not add conjunction - if index == length - 1: break - embeddedQualifier = embeddedQualifier + " and " - conjQualifier = conjQualifier + " and " - index = index + 1 - return embeddedQualifier, conjQualifier - - def checkThreshold(self, tree, node, triggerMethod, leArea1, leArea2, localEffect): - # Return 1 if the difference between leArea1 and leArea2 stats is - # greater than the threshold - # Handles stats that are a min/max or a singleValue - leArea1Label = self.getLeAreaLabel(tree, node, leArea1) - leArea2Label = self.getLeAreaLabel(tree, node, leArea2) - if type(triggerMethod) is types.MethodType: - flag = triggerMethod(tree, node, localEffect, leArea1Label, leArea2Label) - else: - first = node.getAncestor("firstElement") - element = first.name - dataType = first.dataType - if dataType == self.WEATHER(): - mergeMethod = "Average" - else: - mergeMethod = "MinMax" - timeRange = node.getTimeRange() - area1Stats = tree.stats.get(element, timeRange, leArea1Label, - mergeMethod=mergeMethod) - area2Stats = tree.stats.get(element, timeRange, leArea2Label, - mergeMethod=mergeMethod) - area1Stats = self.applyDisabled(tree, node, area1Stats) - area2Stats = self.applyDisabled(tree, node, area2Stats) - if self.__dict__.get("_leDebug", 0): - print "\nCheckThreshold", element, timeRange - print leArea1Label, area1Stats - print leArea2Label, area2Stats - if area1Stats is None or area2Stats is None: - return 0 - flag = self.checkLocalEffectDifference( - tree, node, dataType, triggerMethod, area1Stats, area2Stats, - leArea1Label, leArea2Label) - if self.__dict__.get("_leDebug", 0): - print "returning", flag - return flag - - def applyDisabled(self, tree, node, stats): - if stats is None: - return stats - disabledSubkeys = node.getAncestor('disabledSubkeys') - #print "/n applyDisabled: disabled", disabledSubkeys - #print "stats", stats - if disabledSubkeys is not None: - newStats = [] - for subkey, rank in stats: - if subkey not in disabledSubkeys: - newStats.append((subkey, rank)) - stats = newStats - if stats == []: - emptyKey = WeatherSubKey.weatherSubKey(self._argDict['site'], - "", "", "", "", []) - stats = [(emptyKey, 100)] - return stats - - def checkLocalEffectDifference(self, tree, node, dataType, threshold, - area1Stats, area2Stats, al1, al2): - if dataType == self.DISCRETE(): - if area1Stats != area2Stats: - return 1 - else: - return 0 - if dataType == self.WEATHER(): - flag = self.checkWeatherSimilarity( - tree, node, area1Stats, area2Stats, al1=al1, al2=al2) - # checkWeatherSimilarity returns 0 if there IS a difference and, thus, - # should be a local effect - if flag == 0: - return 1 - else: - return 0 - if dataType == self.VECTOR(): - area1Stats, dir = area1Stats - area2Stats, dir = area2Stats - - if type(area1Stats) is types.TupleType: - min1, max1 = area1Stats - min2, max2 = area2Stats - diff1 = self.absDiff(min1, min2) - diff2 = self.absDiff(max1, max2) - # Check to see if one range is included within the other - if self.rangeIncluded(min1, max1, min2, max2) == 1: - return 0 - if self.rangeIncluded(min2, max2, min1, max1) == 1: - return 0 - # Check to see if either min or max is greater than threshold - if diff1 > threshold or diff2 > threshold: - return 1 - else: - return 0 - else: - absDiff = self.absDiff(area1Stats, area2Stats) - if absDiff > threshold: - return 1 - else: - return 0 - - def checkSkyWxDifference(self, tree, node, localEffect, leArea1Label, leArea2Label): - timeRange = node.getTimeRange() - wxStats1 = tree.stats.get("Wx", timeRange, leArea1Label, - mergeMethod="Average") - wxStats2 = tree.stats.get("Wx", timeRange, leArea2Label, - mergeMethod="Average") - wxStats1 = self.applyDisabled(tree, node, wxStats1) - wxStats2 = self.applyDisabled(tree, node, wxStats2) - #print "wxStats1", wxStats1 - #print "wxStats2", wxStats2 - wxSame = self.checkWeatherSimilarity( - tree, node, wxStats1, wxStats2, al1=leArea1Label, al2=leArea2Label) - #print "wxSame", wxSame - if wxSame == 0: - wxDiff = 1 - else: - wxDiff = 0 - - skyDiff = self.checkSkyDifference(tree, node, localEffect, - leArea1Label, leArea2Label) - - # Determine if ANY of the sub-phrases have a sky local effect - # and store this information at the parent level for later - # use by the "checkLocalEffects" method. - skyLE = node.parent.get("skyLE") - if skyLE is None: - node.parent.set("skyLE", 0) - if skyDiff: - node.parent.set("skyLE", 1) - #return wxDiff - return skyDiff or wxDiff - - def getIncludeSky(self, tree, node): - # If this is called, then we know we have a LE - # i.e. there was a wx local effect. - skyLE = node.get("skyLE") - if skyLE: - return None # Want to make sure we check at the LE level - else: - return 0 - - def checkSkyDifference(self, tree, node, localEffect, - leArea1Label, leArea2Label): - timeRange = node.getTimeRange() - skyValue1 = tree.stats.get("Sky", timeRange, leArea1Label, - mergeMethod="Average") - skyValue2 = tree.stats.get("Sky", timeRange, leArea2Label, - mergeMethod="Average") - if timeRange.duration() > 12*3600: - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - words1 = self.sky_value(tree, node, self.getValue(skyValue1), dayNight) - words2 = self.sky_value(tree, node, self.getValue(skyValue2), dayNight) - return not self.similarSkyWords_flag(tree, node, words1, words2) - - def localEffect_hook(self, tree, node): - return - - def rangeIncluded(self, min1, max1, min2, max2): - # Return 1 if min1, max1 are included in min2, max2 - if min1 >= min2 and max1 <= max2: - return 1 - return 0 - - def absDiff(self, val1, val2): - # Return the absolute difference between the values - # Note: this handles negative values - if (val1 > 0 and val2 > 0) or (val1 < 0 and val2 < 0): - return abs(val1 - val2) - else: - return abs(val1) + abs(val2) - - def makeLocalEffectNodes(self, tree, node, localEffect, groups): - # Make a node phrase for each group of local effect areas in groups - nodeList = [] - for group in groups: - leAreaLabel = self.getLeAreaLabel(tree, node, group[0]) - newNode = tree.copyPhrase( - node, node.getTimeRange(), leAreaLabel, - copyAttrs=["disabledSubkeys", "disabledElements", - "firstElement", "elementName", "elementInfoList", - "descriptor", "indentLabel"]) - embeddedQualifier, conjQualifier = self.getLeQualifiers(tree, node, group) - newNode.set("embeddedQualifier", embeddedQualifier) - newNode.set("conjunctiveQualifier", conjQualifier) - newNode.set("localEffect", localEffect) - newNode.set("leGroup", group) - nodeList.append(newNode) - return nodeList - - def printCompPhrases(self, tree, node): - comp = node.getComponent() - print "Component phrases for", node - for phrase in comp.get('childList'): - print phrase.get('name'), phrase.getAreaLabel(), phrase - print " ", phrase.get('words') +# Author: hansen +# History +# Time Ticket Number Developer Comments +# ----------------------------------------------------------------------------- +# 12/28/2012 DR 15596 J.Zeng Added checkWeatherSimilarity +# for two lists based on Virgil's +# suggestion +# 04/20/2015 4027 randerso Changes for mixed case product generation. +# 01/08/2016 5129 dgilling Fix signatures to calls in WeatherSubKey. +# 07/15/2016 5749 randerso Replace ellipses with commas +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import types +import TimeDescriptor +import ConfigVariables +import StringUtils +import UnitConvertor +import TimeRange, WeatherSubKey + +class PhraseBuilder(ConfigVariables.ConfigVariables, + StringUtils.StringUtils, + TimeDescriptor.TimeDescriptor, + UnitConvertor.UnitConvertor): + + def __init__(self): + ConfigVariables.ConfigVariables.__init__(self) + StringUtils.StringUtils.__init__(self) + TimeDescriptor.TimeDescriptor.__init__(self) + UnitConvertor.UnitConvertor.__init__(self) + + def SCALAR(self): + return 0 + def MAGNITUDE(self): + return 1 + def DIRECTION(self): + return 2 + def VECTOR(self): + return 3 + def VECTOR_TEXT(self): + return 4 + def VECTOR_NUM(self): + return 5 + def WEATHER(self): + return 6 + def DISCRETE(self): + return 7 + + def DONE(self): + return 1 + + def call(self, method, tree, node): + # Try to call with tree, node arguments + try: + return method(tree, node) + except: + return method() + + def setWords(self, node, words): + node.set("words", words) + # If the words are empty, we can close down node + if words == "": + #node.remove() + node.doneList = node.methodList + return 1 + + def useCommas(self, tree, node, words): + # Instead of: + # "Chance of rain and snow and freezing rain..." + # use + # "Chance of rain, snow and freezing rain..." + if words.count(" and ") > 1: + hiIndex = words.rfind(" and ") + words = words[:hiIndex].replace(" and ", ", ") + words[hiIndex:] + return words + + def setDone(self, node): + # Set the node doneList to the methodList + # so that the node is defunct + node.doneList = node.methodList + + def standard_phraseMethods(self): + return [ + self.consolidatePhrase, + self.checkLocalEffects, + self.combinePhraseStats, + self.consolidateTrends, + self.chooseMostImportant, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + + def postProcessPhrase(self, tree, node): + words = node.get("words") + rval = None + if words is not None: + words = words.replace("rain showers and thunderstorms", "showers and thunderstorms") + # To handle snow amt local effects + words = words.replace("except of", "except") + # Translate phrase + # This is necessary so that word-wrap works correctly + try: + words = self.translateForecast(words, self._language) + except: + words = self.translateForecast(words, "english") + rval = self.setWords(node, words) + return rval + + def roundStatistic(self, tree, node, value, elementName): + roundingMethod = self.rounding_method(tree, node, elementName, elementName) + nlIncrement = self.nlValue(self.increment_nlValue( + tree, node, elementName, elementName), value) + return self.roundValue(value, roundingMethod, "Nearest", nlIncrement) + + def findWords(self, tree, node, firstElementName, areaLabel=None, phraseList=None, + ignoreAreaIfLastChance=1, phraseLevel=0, attributes=None): + # Return a text string which is the concatenation of the words for all + # leaves of the tree in the same component as the given node + # and have the given firstElement at their phrase level. + # "Leaves" will be sub-phrases in most case unless a phrase node has + # words set and has empty sub-phrases. + # The method returns None unless all leaves that meet the qualifications + # have words set. + # + # If an areaLabel is given, it is applied to the selection of leaves. + # Otherwise all leaves meeting the firstElementName criteria are accepted. + # "areaLabel" can optionally be a list of areaLabels. + # + # If a phraseList is given, only leaves belonging to a phrase + # in that list are examined. + # If phraseLevel is 1 and words are set at the phrase level, those + # are concatenated to the words as well. + # + # if attributes is not None, return a dictionary: + # {attribute: [list of attribute values for leaves with words]} + # + + leaves = self.getLeaves(tree, node) + found = 0 + words = "" + attrDict = {} + #print "\nFind Words", firstElementName + if areaLabel is not None: + if type(areaLabel) is not list: + areaLabel = [areaLabel] + for child in leaves: + firstElement = child.getAncestor("firstElement") + if firstElement is None: + continue + if firstElement.name == firstElementName or firstElementName is None: + if phraseList is not None: + if child.getAncestor("name") not in phraseList: + continue + #print "firstElement", firstElement.name + #print "child", child, child.get("childList") + #print "words", child.get('words') + + # Check the area + if ignoreAreaIfLastChance: + # If last time thru because no changes were made to tree, + # ignore area + if tree.get("lastChance") == 1: + areaLabel = None + if areaLabel is not None: + if child.getAreaLabel() not in areaLabel: + continue + + # This is a phrase for which we want words + childWords = child.get('words') + if childWords is not None: + found = 1 + #print "Adding words", child.get('name'), child.getAreaLabel() + #print " ", child.getTimeRange() + #print " ", childWords + words = words + " " + childWords + if attributes is not None: + for attribute in attributes: + attrVal = child.getAncestor(attribute) + if attrVal is not None: + self.addToDictionary(attrDict, attribute, attrVal) + if phraseLevel: + parentWords = child.parent.get('words') + if parentWords is not None: + words = words + " " + parentWords + else: + # wait for words to complete + words = None + break + if not found: + words = None + if attributes is not None: + return words, attrDict + else: + return words + + def addToDictionary(self, dictionary, key, value): + # Add the given value to the dictionary where the key + # entry is a list of values + if key in dictionary: + if type(value) is list: + dictionary[key] += value + else: + dictionary[key].append(value) + else: + if type(value) is list: + dictionary[key] = value + else: + dictionary[key] = [value] + + def getLeaves(self, tree, node): + # Return a list of "leaves" for the component to which the node belongs. + # "Leaves" will be nodes that have no children (sub-phrases) + # EXCEPT if a node has words set AND it's children do not. + # (Some phrases e.g. waveHeight_phrase, will sometimes by-pass + # the sub-phrases and set the phrase words directly.) + # + component = node.getComponent() + progeny = component.getProgeny() + leaves = [] + for child in progeny: + childList = child.get('childList') + #print "child", child.getAncestor('name'), childList + #print " ", child.get('words'), child.getAreaLabel() + if childList == [] or childList is None: + # This is a leaf + # Check to see if it has words + leaf = child + if child.get('words') is None: + # If not AND it's parent has words, + # take the parent as the leaf. + if child.parent.get('words') is not None: + leaf = child.parent + leaves.append(leaf) + return leaves + + def dayOrNight_element(self, tree, node, dayElement, nightElement): + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + element = dayElement + else: + element = nightElement + return element + + def removeComponentPhrases(self, tree, node, phraseName, exceptions=[], areaLabels=[]): + # Remove all phrases with the given phraseName in the current component + # If areaLabel is not None, check for a match there as well and only + # remove the phrase if it is for that area. + component = node.getComponent() + progeny = component.getProgeny() + #print "\nRemoving Phrases", phraseName, areaLabels, exceptions + for child in progeny: + name = child.get("name") + if name == phraseName: + #print "child", child, child.getAreaLabel() + if child not in exceptions: + if areaLabels != []: + if child.getAreaLabel() in areaLabels: + #print "Removing 1", name, areaLabels, child + #import traceback + #traceback.print_stack(limit=3) + child.remove() + else: + child.remove() + #print "Removing 2", name, areaLabels + #import traceback + #traceback.print_stack(limit=3) + + class ElementInfo: + def __init__(self, name, mergeMethod="Average", dataType=0, + statLabel="", primary=1, phraseDef=None): + self.name = name + self.mergeMethod = mergeMethod + self.statLabel = statLabel + self.primary=primary + self.dataType = dataType + self.phraseDef = phraseDef +## def name(self): +## return self.name +## def mergeMethod(self): +## return self.mergeMethod +## def statLabel(self): +## return self.statLabel +## def primary(self): +## return self.primary() +## def dataType(self): +## return self.dataType() +## def phraseDef(self): +## return self.phraseDef() + + class LocalEffectArea: + def __init__(self, areaLabel, areaWords, conjAreaWords=None, intersectFlag=1): + # AreaLabel can be "__Current__" which will use the + # current edit area. + self.areaLabel = areaLabel + # Words to describe this local effect area. + # This can be a text string or a method + # arguments: (tree, node, LocalEffectArea) + # returns a text string + self.areaWords = areaWords + # This can be a text string or a method + # arguments: (tree, node, LocalEffectArea) + # returns a text string + # If you want different area words for the conjunctive + # phrase versus the embedded phrase, add them here + if conjAreaWords is None: + self.conjAreaWords = areaWords + else: + self.conjAreaWords = conjAreaWords + # If 1, area is to be intersected with the current area to + # look for local effects and the area must be listed as an + # intersectArea in the current product component + self.intersectFlag = intersectFlag + + class LocalEffect: + def __init__(self, leAreaList, triggerMethod, exceptionWords): + # List of LocalEffectAreas + # Currently only two areas can be handled by the local effect methods + self.leAreaList = leAreaList + self.triggerMethod = triggerMethod + self.exceptionWords = exceptionWords + + def makeRangeStats(self, tree, dataType, stats, timeRange): + if stats is None: + return None + if tree.library.isStatsByRange(dataType, stats, timeRange): + return stats + else: + return[(stats, timeRange)] + + def sumPrevStats(self, tree, node, areaLabel, elementName, + mergeMethod="Average", increment=1): + # Return a sum of stats going backward in time + # until there is a zero value (when rounded to the nearest increment) + # and NOT including the current value + # For example: + # Period1 = 2 inches snow amt + # Period2 = 0 inches + # Period3 = 2 inches + # Period4 = 2 inches + # Period5 = 4 inches + # If sumPrevStats is called during evaluation of + # Period5, the sum will be 4 (sum of Period3 and Period4) + + # Calculate past snow + prodTR = tree.getTimeRange() + pastSnowMin = 0 + pastSnowMax = 0 + pastSnowTimeRange = self.makeTimeRange(prodTR.startTime() - 12*3600, + prodTR.startTime()) + stats = tree.stats.get("SnowAmt", pastSnowTimeRange, + areaLabel, mergeMethod="MinMax") + + if stats is not None: + pastSnowMin, pastSnowMax = self.getValue(stats, "MinMax") + pastSnowMin = int(pastSnowMin+0.5) + pastSnowMax = int(pastSnowMax+0.5) + else: + pastSnowMin = 0 + pastSnowMax = 0 + + minSum = pastSnowMin + maxSum = pastSnowMax + + # Calculate snow in forecast periods + childList = node.getParent().get("childList") + timeRange = node.getTimeRange() + for child in childList: + childTimeRange = child.getTimeRange() + if childTimeRange == timeRange: + break + + stats = tree.stats.get("SnowAmt", childTimeRange, + areaLabel, mergeMethod="MinMax") + if stats is None: + continue + min, max = self.getValue(stats, "MinMax") + min = int(min+0.5) + max = int(max+0.5) + threshold = self.pop_snow_lower_threshold(tree, node) + popStats = self.matchToWx(tree, node, "PoP", childTimeRange) + if popStats < threshold: + min = 0 + max = 0 + if max == 0: + # Start over + minSum = 0 + maxSum = 0 + else: + minSum = minSum + min + maxSum = maxSum + max + + return minSum, maxSum + + def getScalarRangeStr(self, tree, node, element, min, max): + min1=int(min) + max1=int(max) + if min1 == max1: + return repr(min1) + else: + maxRange = self.maximum_range_nlValue(tree, node, element, element) + maxRange = self.nlValue(maxRange, max1) + if (maxRange == 0): + return repr(max1) + if abs(min1-max1) > maxRange: + min1 = max1 - maxRange + connector = self.value_connector(tree, node, element, element) + return repr(min1) + connector + repr(max1) + + def makeSentence(self, tree, node): + "Make a sentence from the words at the node level" + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING." + else: + words = self.sentence(words) + return self.setWords(node, words) + + def chooseElement(self, tree, node, elementNameList): + # Return first elementName for which there are stats for + # this node. If none, return last elementName. + for elementName in elementNameList: + stats = tree.stats.get(elementName, node.getTimeRange(), + node.getAreaLabel(), mergeMethod="Avg") + if stats is not None: + return elementName + return elementName + + # Narrative Level + def assembleChildWords(self, tree, node): + fcst = "" + for child in node.get("childList"): + words = child.get("words") + if words is None: + return + fcst = fcst + words + return self.setWords(node, fcst) + + # Component Level + + def noWords(self, tree, component): + self.setWords(component, "") + return self.DONE() + + def assembleSentences(self, tree, node): + for phrase in node.get("childList"): + words = phrase.get("words") + if words is None: + return + fcst = "" + lastQualifier = None + lastPhrase = None + self.orderWxPhrases(tree, node) + self.consolidateLocalEffectPhrases(tree, node) + for child in node.get("childList"): + words = child.get("words") + words, lastQualifier = self.qualifyWords( + child, words, "conjunctiveQualifier", lastQualifier, + lastPhrase) + lastPhrase = child + fcst = fcst + words + return self.setWords(node, fcst) + + + def assemblePhrases(self, tree, component): + # Assemble component phrases and add Label + # Qualify the phrases with local effect qualifiers + # if present. + # e.g. "near the coast" + phrases = [] + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + if words != "": + phrases.append(phrase) + #print "\nAssemblePhrases" + # Remove empty word phrases + component.childList = phrases + self.orderWxPhrases(tree, component) + self.consolidateLocalEffectPhrases(tree, component) + #print + fcst = "" + lastQualifier = None + lastPhrase = None + phraseList = [] + includeOnlyPhrases = self.includeOnlyPhrases_list(tree, component) + + for phrase in component.get("childList"): + words = phrase.get("words") + words = self.adjustWords(tree, phrase, words) + #print phrase.get('name'), phrase.getAreaLabel() + #print " ", words + if type(includeOnlyPhrases) is list and len(includeOnlyPhrases) > 0 and \ + phrase.get('name') not in includeOnlyPhrases: + # Do not include this phrase + continue + + words, lastQualifier = self.qualifyWords( + phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) + lastPhrase = phrase + if words not in phraseList: + phraseList.append(words) + fcst = fcst + words + # Add label + curLocalTime, shift = self.determineTimeShift() + issuanceInfo = tree.get("issuanceInfo") + index = component.getIndex() + label = self.createLabel(tree, component, component.get("timeRange"), + issuanceInfo, curLocalTime, shift, index) + fcst = self.combineSentences(fcst) + return self.setWords(component, label + fcst) + + def adjustWords(self, tree, phrase, words): + # Make any special adjustments to phrases + # This one is necessary for popMax since we have + # removed repeating popType wording assuming + # an embedded PoP phrase will result. + # If it happens to end up as a conjunctive, + # then we have to put the popType back. + if phrase.get('name') in ["popMax_phrase"]: + if not phrase.get('embedded'): + popType = phrase.getDescendent('popType') + if words.find(popType) == -1: + desc = phrase.get('descriptor') + words = words.replace(desc, desc + " " + popType) + return words + + def assembleIndentedPhrases(self, tree, component): + # Assemble and indent component phrases and add Label + # Qualify the phrases with local effect qualifiers + # if present. + # e.g. "near the coast" + for phrase in component.get("childList"): + words = phrase.get("words") + #print phrase, words + if words is None: + return + + # DR_18964 + self.consolidateLEPerPhraseInstance(tree, component) + #self.consolidatePerPhraseNameGroup(tree, component) + + fcst = "" + lastQualifier = None + lastPhrase = None + self.orderWxPhrases(tree, component) + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + if words == "": + if self.removeEmptyPhrase(tree, phrase): + continue + + # Handle multiple element table phrase + # that appears per period + # No need to indent or qualify + name = phrase.get("name") + if name == "multipleElementTable_perPeriod_phrase": + fcst = fcst + words + continue + + if phrase.get("compound"): + makeSentence = 0 + else: + makeSentence = 1 + words, lastQualifier = self.qualifyWords( + phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase, + makeSentence=makeSentence) + lastPhrase = phrase + indentLabel = phrase.get("indentLabel") + label = self.phrase_descriptor( + tree, phrase, indentLabel, indentLabel) + #print "indentLabel, label", indentLabel, label + if indentLabel is not None and label == "": + label = indentLabel + if words == "": + words = " " + words = self.labelIndent(words, label) + print(phrase, words) + fcst = fcst + words + # Add label + curLocalTime, shift = self.determineTimeShift() + issuanceInfo = tree.get("issuanceInfo") + index = component.getIndex() + label = self.createLabel(tree, component, component.get("timeRange"), + issuanceInfo, curLocalTime, shift, index) + return self.setWords(component, label + "\n" + fcst + "\n") + + def consolidateLEPerPhraseInstance(self, tree, component): + # Do the LE consolidation/combination for each (compound) phrase + # create a pseudo component for the LE phrases to hang onto + pseudo = component.copy() + + lePhraseNameGroups = [] + le_groups = {} + for phrase in component.get("childList"): + if phrase.get("localEffect"): + lePhraseNameGroup, firstName = self.getLePhraseNameGroup( + tree, component, lePhraseNameGroups, phrase) + le_groups.setdefault(lePhraseNameGroup, []).append(phrase) + + for name, nodes in le_groups.items(): + #print name, nodes + + # put the nodes under the pseudo-component and do the + # LE consolidation/combination + pseudo.set("childList", nodes) + self.consolidateLocalEffectPhrases(tree, pseudo) + #self.combineConjunctivePhrases(tree, pseudo) + + # add the resultant nodes back under the component, + # replacing the original ones + newChildren = pseudo.get("childList") + #print 'new nodes:', newChildren + if newChildren == nodes: + continue + + childList = [] + inserted = 0 + for child in component.get("childList"): + if child in nodes: + if not inserted: + childList += newChildren + inserted = 1 + else: + childList.append(child) + component.set("childList", childList) + + # delete the pseudo component + pseudo.remove() + pseudo.set("parent", None) + pseudo.set("childList", []) + + + def consolidatePerPhraseNameGroup(self, tree, component): + # Do the LE consolidation/combination for each (compound) phrase + # create a pseudo component for the LE phrases to hang onto + pseudo = component.copy() + + lePhraseNameGroups = self.lePhraseNameGroups(tree, component) + le_groups = {} + for phrase in component.get("childList"): + if phrase.get("localEffect"): + lePhraseNameGroup, firstName = self.getLePhraseNameGroup( + tree, component, lePhraseNameGroups, phrase) + le_groups.setdefault(lePhraseNameGroup, []).append(phrase) + + for name, nodes in le_groups.items(): + #print name, nodes + + # put the nodes under the pseudo-component and do the + # LE consolidation/combination + pseudo.childList = nodes + self.consolidateLocalEffectPhrases(tree, pseudo) + self.combineConjunctivePhrases(tree, pseudo) + + # add the resultant nodes back under the component, + # replacing the original ones + newChildren = pseudo.childList + childList = [] + inserted = 0 + for child in component.childList: + if child in nodes: + if not inserted: + childList += newChildren + inserted = 1 + else: + childList.append(child) + component.childList = childList + + # delete the pseudo component + pseudo.remove() + + def weatherPhraseNames(self, tree, node): + return ["weather_phrase", "skyPopWx_phrase"] + + def orderWxPhrases(self, tree, component): + # Sort the weather phrases (weather_phrase, skyPopWx_phrase) + # according to their time span for non-empty sub-phrases. + # Then replace the weather phrases in the component childList. + # We will assume that all the weather phrases for the + # component are consecutive. + wxPhraseNames = self.weatherPhraseNames(tree, component) + phraseList = component.get("childList") + wxPhrases = [] + for phrase in phraseList: + if phrase.get("name") in wxPhraseNames: + wxPhrases.append(phrase) + #print "appending", phrase.get('name'), phrase.get('words') + wxPhrases.sort(self.sortPhraseTimeSpans) + #print "sorted list" + #for phrase in wxPhrases: + # print phrase.get('name'), phrase.get('words') + + newPhraseList = [] + firstTime = 1 + for phrase in phraseList: + if phrase.get("name") in wxPhraseNames: + if firstTime: + # Add in the sorted wxPhrases + newPhraseList += wxPhrases + firstTime = 0 + continue + # If not a wx phrase, append to the list + newPhraseList.append(phrase) + component.set("childList", newPhraseList) + + def sortPhraseTimeSpans(self, phrase1, phrase2): + # Determine which phrase should come first according + # to it's time span of non-empty sub-phrases + # First, determine the timeSpan of each phrase: + defaultTR = TimeRange.default() + for phrase in [phrase1, phrase2]: + # If time span already calculated, skip it + timeSpan = phrase.get("timeSpan") + if timeSpan is not None: + continue + startTime = None + endTime = None + for subPhrase in phrase.get("childList"): + if subPhrase.get("words") != "": + tr = subPhrase.getTimeRange() + trStart = tr.startTime() + trEnd = tr.endTime() + if startTime is None: + startTime = trStart + elif startTime > trStart: + startTime = trStart + if endTime is None: + endTime = trEnd + elif endTime < trEnd: + endTime = trEnd + if startTime is not None and endTime is not None: + # Make a time span for this phrase's words + phraseTR = TimeRange.TimeRange(startTime, endTime) + phrase.set("timeSpan", phraseTR) + else: + phrase.set("timeSpan", defaultTR) + # Order the phrases according to their time spans. + timeSpan1 = phrase1.get("timeSpan") + timeSpan2 = phrase2.get("timeSpan") + if timeSpan1 == defaultTR or timeSpan2 == defaultTR: + return 0 + #print "\ntimeSpan1, timeSpan2", timeSpan2, timeSpan2 + #print " order", self.orderTimeRanges(timeSpan2, timeSpan2) + return self.orderTimeRanges(timeSpan1, timeSpan2) + + def orderTimeRanges(self, tr1, tr2): + # If tr1 should come before tr2, return -1 + # If equal, return 0, else return 1 + #print "\nin orderTimeRanges", tr1, tr2 + s1 = tr1.startTime() + s2 = tr2.startTime() + if s1 < s2: + #print "return1 -1" + return -1 + elif s2 < s1: + #print "return2 1" + return 1 + else: + # They start at the same time + e1 = tr1.endTime() + e2 = tr2.endTime() + # Put the one with the shortest + # span first + if e1 < e2: + #print "return3 -1" + return -1 + elif e1 > e2: + #print "return4 1" + return 1 + else: + #print "return5 0" + return 0 + + def consolidateSubPhrases(self, tree, component): + ## Timing: This method runs at the component level + ## AFTER all sub-phrase words have been set and + ## BEFORE they have been assembled into phrases at the phrase level. + ## + ## Purpose: Check for duplicate subPhrases and consolidate + # them into one. + ## + ## For example: (see Case 2 below) + ## Chance of thunderstorms in the morning (windward) + ## Chance of thunderstorms in the morning (leeward) + ## Chance of rain in the afternoon (windward) + ## Chance of snow in the afternoon (leeward) + ## + ## becomes: + ## Chance of thunderstorms in the morning (unqualified) + ## Chance of rain in the afternoon (windward) + ## Chance of snow in the afternoon (leeward) + + # Set a flag to make sure we pass by this method the first time + # so that the phrase set-up methods have a chance to run and + # create sub-phrases before we try to consolidate them + if component.get('first') is None: + component.set('first', 1) + return + + # Make sure all subPhrases have completed i.e. have words set + subPhraseList = [] + leaves = self.getLeaves(tree, component) + leFlag = 0 + for child in leaves: + words = child.get("words") + #print "Consolidate SubPhrases", child.getAncestor("name"), words + if words is None: + #print "Returning" + return + le = child.getAncestor('localEffect') + if le is not None: + leFlag = 1 + subPhraseList.append(child) + + # If no localEffects, skip this method + if not leFlag: + #print "In Consolidate SubPhrases: No local effects" + return self.DONE() + + if self.__dict__.get("_leDebug", 0): + print("\nConsolidateSubPhrases", tree.get('passes')) + + # Create subPhraseDict = + # {(words, tr, lePhraseNameGroup): + # list of subPhrases with those words, tr, and lePhraseNameGroup} + lePhraseNameGroups = self.lePhraseNameGroups(tree, component) + subPhraseDict = {} + for subPhrase in subPhraseList: + tr = subPhrase.getTimeRange() + words = subPhrase.get("words") + lePhraseNameGroup, firstName = self.getLePhraseNameGroup( + tree, component, lePhraseNameGroups, subPhrase.parent) + if words == "": + continue + if self.__dict__.get("_leDebug", 0): + print(subPhrase.getAncestor("name"))#, subPhrase.parent + print(" ", subPhrase.getAreaLabel(), tr, words) + print(" local effect", subPhrase.getAncestor('localEffect')) + self.addToDictionary(subPhraseDict, (words,tr,lePhraseNameGroup), subPhrase) + if self.__dict__.get('_leDebug', 0): print("subPhraseDict", subPhraseDict) + + # Check for duplicate subPhrases and consolidate them into one. + # Case 1: If the duplicates are all for the same areaLabel, + # set the areaLabel for the consolidated subPhrase to that. + # Case 2: If the duplicates are for a local effect and + # cover all possible local effect areas for their phrase, + # create a new phrase for component.getAreaLabel() + # with this subPhrase wording. Remove the local effect subPhrases. + # Case 3: If the duplicates are for a local effect + # and they cover a subset of the local effect areas, + # leave them alone except for removing any component.getAreaLabel() + # duplicate subPhrases. + compArea = component.getAreaLabel() + if self.__dict__.get('_leDebug',0): + print("\nDetermine Case for each set of duplicate phrases. compArea", compArea) + for key in list(subPhraseDict.keys()): + words, tr, lePhraseNameGroup = key + subPhrases = subPhraseDict[key] + if len(subPhrases) <= 1: + continue + # We have duplicate subPhrases to consolidate. + # Gather the areaLabels for these duplicate subphrases + # and the possible localEffect Area labels + areaLabels, leAreas = self.gatherDupAreaLabels( + tree, component, compArea, subPhrases) + if self.__dict__.get('_leDebug',0): + print("\n", words) + print(" ", tr, len(subPhrases)) + print("areaLabels, leAreas", areaLabels, leAreas) + # Determine the consolidated areaLabel + if len(areaLabels) == 1: + # Case 1 + if self.__dict__.get('_leDebug',0): print("CASE 1") + # Remove all but the first subPhrase + for subPhrase in subPhrases[1:]: + subPhrase.set('words', "") + else: + parent = subPhrases[0].parent + localEffect = subPhrases[0].getAncestor('localEffect') + if localEffect is None: + continue + # See if all local effect areas are covered + allAreasCovered = self.allLeAreasCovered( + tree, component, compArea, leAreas, areaLabels) + if allAreasCovered: + # Case 2: Consolidate + if self.__dict__.get('_leDebug',0): print("CASE 2") + parent = subPhrases[0].parent + newNode = tree.copyPhrase( + parent, areaLabel=compArea, + copyAttrs=["doneList", "disabledSubkeys", "disabledElements", + "firstElement", "elementName", "elementInfoList", + "descriptor", "indentLabel"]) + component.insertChild(parent, newNode) + newSubPhrase = subPhrases[0].copy() + newNode.set('childList', [newSubPhrase]) + for subPhrase in subPhrases: + subPhrase.set('words', "") + else: + # Case 3: Throw out any compArea subPhrase and + # leave local effect ones alone for now + if self.__dict__.get('_leDebug',0): print("CASE 3") + for subPhrase in subPhrases: + if subPhrase.getAreaLabel() == compArea: + subPhrase.set("words", "") + return self.DONE() + + def gatherDupAreaLabels(self, tree, component, compArea, subPhrases): + areaLabels = [] + leAreas = [] + for subPhrase in subPhrases: + subArea = subPhrase.getAreaLabel() + if subArea not in areaLabels: + areaLabels.append(subArea) + #print "subArea", subArea, subPhrase.getAncestor('name') + if subArea != compArea: + localEffect = subPhrase.getAncestor("localEffect") + if localEffect is not None: + leAreas += self.getLeAreaList(tree, subPhrase, localEffect) + return areaLabels, leAreas + + def allLeAreasCovered(self, tree, component, compArea, leAreas, areaLabels): + allAreasCovered = 1 + if leAreas != []: + # Determine if the subPhrases cover all possible local effect areas + for leArea in leAreas: + if leArea.intersectFlag: + areaName = self.getIntersectName( + compArea, leArea.areaLabel) + elif leArea.areaLabel == "__Current__": + areaName = compArea + else: + areaName = leArea.areaLabel + #print "le Area Name", areaName + if areaName not in areaLabels: + allAreasCovered = 0 + break + return allAreasCovered + + def consolidateSubPhrases_trigger(self, tree, node): + # Checking to see if consolidateSubPhrases has been + # completed (if it is on the component methodList) + # This assumes that "node" is a phrase and its parent is + # a component. + parent = node.parent + if self.consolidateSubPhrases in parent.methodList and \ + self.consolidateSubPhrases not in parent.doneList: + return 0 + return 1 + + + def consolidateLocalEffectPhrases(self, tree, node): + # Organize the local effect and non-local effect phrases. + # "node" can be a component or a compound phrase. + # Convert to embedded local effect phrases if appropriate. + # Apply the Local Effect thresholds: + # repeatingEmbedded_localEffect_threshold + # repeatingPhrase_localEffect_threshold + hasLE = 0 + for phrase in node.get('childList'): + le = phrase.get('localEffect') + if le is not None: + hasLE = 1 + break + if not hasLE: + # No local effect phrases so no work to to be done + return + if not self.incorporateNonLocalEffectPhrases(tree, node): + self.convertToEmbedded(tree, node) + self.orderLocalEffectPhrases(tree, node) + + # Add later as an enhancement + def incorporateNonLocalEffectPhrases(self, tree, node): + # Try to incorporate non-qualified phrases + # If there is exactly one leArea group in the set of phrases + # AND this group is composed of intersect areas + # AND there is more than one local effect phrase + # AND the number of non-local effect phrases + # < repeatingPhrase_localEffect_threshold: + # Convert them to conjunctive local effect phrases + # (one for each intersect local effect area) + # return 1 + # Else: + # return 0 + + # EXAMPLE: + # Instead of: + # Chance of thunderstorms in the morning. + # Windward...Cloudy...Rain likely...Chance of precipitation 70 percent. + # Leeward...Partly cloudy...Scattered showers...Chance of precipitation 30 + # percent. Highs in the 40s. Winds 20 mph. + # + # We will produce: + # Windward...Cloudy....Rain likely...Chance of thunderstorms in the morning... + # Chance of precipitation 70 percent. + # Leeward...Partly cloudy...Scattered showers...Chance of thunderstorms in + # the morning...Chance of precipitation 30 percent. Highs in the 40s. + # Winds 20 mph. + return 0 + + # Replaces combineConjunctiveLocalEffects + def convertToEmbedded(self, tree, component): + # + # Converts conjunctive local effects to embedded if possible. + # For each leGroup: + # If number of possible embedded phrases + # < repeatingEmbedded_localEffect_threshold + # AND there are NO mandatory conjunctives: + # Replace conjunctive phrases with an embedded phrase. + # + if self.__dict__.get('_leDebug',0): print("\nConvert to embedded") + lePhraseNameGroups = self.lePhraseNameGroups(tree, component) + lePhraseDict = self.createLePhraseDict(tree, component, lePhraseNameGroups) + if self.__dict__.get('_leDebug',0): print("\nlePhraseDict", lePhraseDict) + + repeatThreshold = self.repeatingEmbedded_localEffect_threshold( + tree, component) + qualifiersDict = self.createQualifiersDict( + tree, component, lePhraseDict, repeatThreshold) + if self.__dict__.get('_leDebug',0): print("\nqualifiersDict", qualifiersDict) + + self.createEmbeddedPhrases( + tree, component, lePhraseDict, qualifiersDict, repeatThreshold) + if self.__dict__.get('_leDebug',0):print("\nlePhraseDict", lePhraseDict) + + self.insertEmbeddedPhrases( + tree, component, lePhraseDict, lePhraseNameGroups) + + def createLePhraseDict(self, tree, component, lePhraseNameGroups): + # Organize phrases in the component by lePhraseNameGroups. lePhraseDict: + # lePhraseNameGroup: { + # qualifiers: e.g. ["leeward", "windward"] + # phrases: [phrases] + # firstElementName: firstElement.name + # } + lePhraseDict = {} + for phrase in component.get("childList"): + if self.__dict__.get('_leDebug',0): + print("phrase", phrase.get('name'), phrase.get('words')) + print(" ", phrase.getAreaLabel()) + print(" ", phrase.get('conjunctiveQualifier')) + print(" ", phrase.get('embeddedQualifier')) + localEffect = phrase.get('localEffect') + if localEffect is None: + continue + lePhraseNameGroup, firstName = self.getLePhraseNameGroup( + tree, component, lePhraseNameGroups, phrase) + qualifier = phrase.get('embeddedQualifier') + # Add the entry to the dictionary + if lePhraseNameGroup in lePhraseDict: + entry = lePhraseDict[lePhraseNameGroup] + if qualifier not in entry["qualifiers"]: + entry["qualifiers"].append(qualifier) + entry["phrases"].append(phrase) + else: + lePhraseDict[lePhraseNameGroup] = { + "qualifiers": [qualifier], + "phrases": [phrase], + "firstElementName": firstName, + } + return lePhraseDict + + def createQualifiersDict(self, tree, component, lePhraseDict, repeatThreshold): + # Find out how many potential embedded phrases there are for each + # unique set of qualifiers. + # Create a qualifiersDict: qualifiers: count + # for lePhraseNameGroup in lePhraseDict: + # Can it be embedded i.e. are there NO mandatory conjunctives? + # If so, flag it as such and increase count for it's qualifier set. + # + qualifiersDict = {} + for lePhraseNameGroup in list(lePhraseDict.keys()): + embedded = 1 + nameDict = lePhraseDict[lePhraseNameGroup] + phrases = nameDict["phrases"] + # For the phraseNameGroup to be embedded, + # all phrases in the phrase name group must qualify + # to be embedded i.e.the phrase must have just one subphrase + # which is non-empty and covers the phrase time range. + # + # Also, create areaCountDict to keep track of number of + # potentially embedded phrases per areaLabel: + # areaLabel:count + # Unless the phraseNameGroup has multiple phrases (e.g. sky, pop, wx), + # the count for each areaLabel will be 1. + # If the number of embedded phrases for any area exceeds the + # repeatThreshold, do not do an embedded phrase. + # + areaCountDict = {} + for phrase in phrases: + subPhrases = phrase.get('childList') + if len(subPhrases) != 1: + embedded = 0 + break + subPhrase = subPhrases[0] + if subPhrase.get("words") == "" or \ + subPhrase.getTimeRange() != phrase.getTimeRange(): + embedded = 0 + break + # Keep track of count for each area + areaLabel = phrase.getAreaLabel() + if areaLabel in areaCountDict: + areaCountDict[areaLabel] += 1 + else: + areaCountDict[areaLabel] = 1 + if areaCountDict[areaLabel] > repeatThreshold: + if self.__dict__.get('_leDebug',0): + print("areaCount exceeded", areaLabel, areaCountDict[areaLabel]) + embedded = 0 + break + if embedded: + qualifiers = nameDict["qualifiers"] + # Sort, removeDups and re-store qualifiers as a tuple + # We convert the qualifiers from a list to a tuple + # so that the qualifiers can be dictionary keys in qualifiersDict + qualifiers.sort() + qualifiers = self.removeDups(qualifiers) + tQualifiers = tuple(qualifiers) + nameDict["qualifiers"] = tQualifiers + if tQualifiers in qualifiersDict: + qualifiersDict[tQualifiers] += 1 + else: + qualifiersDict[tQualifiers] = 1 + nameDict["embedded"] = embedded + return qualifiersDict + + def createEmbeddedPhrases(self, tree, component, lePhraseDict, qualifiersDict, + repeatThreshold): + # Convert to embedded if repeatingEmbedded_localEffect_threshold is not exceeded. + # for lePhraseNameGroup in lePhraseDict: + # If it's leGroup count < repeatingEmbedded_localEffect_threshold: + # convert to embedded and add to lePhraseNameGroup entry + # + for lePhraseNameGroup in lePhraseDict: + nameDict = lePhraseDict[lePhraseNameGroup] + if not nameDict["embedded"]: + continue + qualifiers = nameDict["qualifiers"] + count = qualifiersDict[qualifiers] + if count > repeatThreshold: + nameDict["embedded"] = 0 + continue + # Create an embedded phrase + phrases = nameDict["phrases"] + nameDict["embeddedPhrase"] = self.makeEmbeddedFromConjunctiveLE( + tree, component, phrases) + + def insertEmbeddedPhrases(self, tree, component, lePhraseDict, lePhraseNameGroups): + # Insert the embedded phrases at the proper places in the component + # phraseList and remove the associated conjunctive phrases. + # Embedded phrases are inserted at the site of the first + # associated conjunctive phrase. + # + newPhraseList = [] + # doneList keeps track of those lePhraseNameGroups for which we've already + # inserted the embedded phrase + doneList = [] + if self.__dict__.get('_leDebug',0):print("\nStep") + for phrase in component.get("childList"): + if self.__dict__.get('_leDebug',0):print("phrase", phrase.get('words')) + localEffect = phrase.get('localEffect') + if localEffect is None: + newPhraseList.append(phrase) + continue + # Determine lePhraseNameGroup for this phrase + lePhraseNameGroup, firstName = self.getLePhraseNameGroup( + tree, component, lePhraseNameGroups, phrase) + if self.__dict__.get('_leDebug',0): + print(" lePhraseNameGroup", lePhraseNameGroup) + nameDict = lePhraseDict[lePhraseNameGroup] + if not nameDict["embedded"]: + newPhraseList.append(phrase) + continue + if lePhraseNameGroup not in doneList: + # insert the embedded phrase + newPhraseList.append(nameDict["embeddedPhrase"]) + doneList.append(lePhraseNameGroup) + component.set("childList", newPhraseList) + + def getLePhraseNameGroup(self, tree, node, lePhraseNameGroups, phrase): + # Unless the group is an explicitly defined by lePhraseNameGroups, + # the name returned will be the . + # Make sure all the phrases for each lePhraseNameGroup have the + # same firstElement UNLESS the lePhraseNameGroup is listed explicitly + # in self.lePhraseNameGroups. + # If not, make a separate dictionary entry name for each firstElement. + # For example: In the FWF, the"dayOrNight_phrase" may have MaxT for some phrases + # and MinRH for others. We want to keep them separate when converting + # to embedded local effect phrases. + explicitGroup = 0 + phraseName = phrase.get('name') + for group in lePhraseNameGroups: + if phraseName in group: + lePhraseNameGroup = group + explicitGroup = 1 + # Check the firstElement + firstElement = phrase.get('firstElement') + if firstElement is None: + firstName = "None" + else: + firstName = firstElement.name + if not explicitGroup: + lePhraseNameGroup = phraseName + "_" + firstName + return lePhraseNameGroup, firstName + + def orderLocalEffectPhrases(self, tree, node): + # + # Group all conjunctive local effect phrases + # for each local effect area together + # (at the location of the first occurrence). + # + # EXAMPLE: + # LEEWARD...SUNNY IN THE MORNING THEN BECOMING PARTLY SUNNY...SCATTERED SHOWERS. + # WINDWARD...MOSTLY CLOUDY WITH SCATTERED SHOWERS + # + # instead of: + # LEEWARD...SUNNY IN THE MORNING THEN BECOMING PARTLY SUNNY. + # WINDWARD...MOSTLY CLOUDY WITH SCATTERED SHOWERS. + # LEEWARD...SCATTERED SHOWERS. + # + phraseList = node.get("childList") + newList = [] + doneAreas = [] + #print "\nOrder LE phrases" + for phrase in phraseList: + localEffect = phrase.get("localEffect") + areaLabel = phrase.getAreaLabel() + embedded = phrase.get("embedded") + #print "PHRASE", phrase.get('words') + if localEffect is None or embedded: + newList.append(phrase) + else: + #print " phrase", phrase.get("name") + #print " conjqualifier", phrase.get("conjunctiveQualifier") + #print " area", areaLabel + if areaLabel in doneAreas: + # We already added this phrase to the newList + # as a Local Effect area + continue + newList.append(phrase) + # Gather the other phrases for this local effect area. + index = phraseList.index(phrase) + for p in phraseList[index+1:]: + p_localEffect = p.get("localEffect") + p_area = p.getAreaLabel() + if p_localEffect is not None and p_area == areaLabel: + newList.append(p) + doneAreas.append(areaLabel) + node.set("childList", newList) + + def combineConjunctivePhrases(self, tree, component): + # Check for Conjunctive Local Effects and make sure + # we do not repeat the indented label. + # + # For example: + # LAL.................IN THE VALLEYS ...1. + # LAL.................IN THE MOUNTAINS...3 UNTIL 2400, THEN 1. + # + # Should be: + # LAL.................IN THE VALLEYS ...1. + # IN THE MOUNTAINS...3 UNTIL 2400, THEN 1. + # + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + newChildList = [] + lastName = "" + lastElement = "" + lastPhrase = None + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + curName = phrase.get("name") + curElement = phrase.get("elementName") + if lastPhrase is None: + lastPhrase = phrase + lastName = curName + else: + # Look for a local effect phrase to be combined with + # lastPhrase + localEffect = phrase.get("localEffect") + embedded = phrase.get('embedded') + #print "phrase", curName, lastName, localEffect + if localEffect is not None and curName == lastName and \ + embedded != 1 and curElement == lastElement: + # Combine this phrase words into last one + # Add conjunctive qualifier + #print "combining" + phraseWords = phrase.get("words") + qualifier = phrase.get("conjunctiveQualifier") + if qualifier is not None and qualifier != "": + phraseWords = qualifier + " "+ phraseWords + newWords = lastPhrase.get("words") + "." + \ + phraseWords + lastPhrase.set("words", newWords) + else: + # Add phrase to new list + #print "switching" + newChildList.append(lastPhrase) + lastPhrase = phrase + lastName = curName + lastElement = curElement + # Clean up lastPhrase + if lastPhrase is not None: + newChildList.append(lastPhrase) + component.set("childList", newChildList) + + + def makeEmbeddedFromConjunctiveLE(self, tree, component, conjList): + # Make an embedded phrase from the list of conjunctive phrases + # + conjWords = "" + embeddedPhrase = tree.copyPhrase( + conjList[0], areaLabel = component.getAreaLabel(), + copyAttrs=["doneList", "descriptor", "indentLabel", + "embeddedDescriptor", "localEffect"]) + descriptor = self.addSpace(embeddedPhrase.get("descriptor")) + if descriptor == "": + descriptor = self.addSpace(embeddedPhrase.get("embeddedDescriptor")) + #print "\nIn makeEmbeddedFromConjunctiveLE", descriptor + index = 0 + localEffect = embeddedPhrase.get("localEffect") + fcst = "" + # Gather words + for phrase in conjList: + words = phrase.get("words") + if words == "": + continue + if not index == 0: + # Get connector + connector = localEffect.exceptionWords + fcst = fcst + connector + # Get rid of duplicate descriptor + if descriptor != "": + words = words.replace(descriptor, "") + # Local Effect Descriptor + areaWords = phrase.getAncestor("embeddedQualifier") + if areaWords is None: + areaWords = "" + areaWords = self.addSpace(areaWords, "leading") + fcst = fcst + words + areaWords + index = index + 1 + embeddedPhrase.set("words", fcst) + embeddedPhrase.set('embedded', 1) + self.postProcessPhrase(tree, embeddedPhrase) + #print "embedded words", embeddedPhrase.get('words') + return embeddedPhrase + + def qualifyWords(self, node, words, qualifierName, lastQualifier, + lastPhrase, makeSentence=1): + # Qualifies words with local effect qualifiers + # Also, if makeSentence==1, makes the words into a sentence + # when appropriate. + # Returns the modified words and the qualifier (if any) + # + # Logic: + # If empty words, skip. + # If no qualifier: + # if makeSentence: + # makeSentence and return words and lastQualifier + # If there is a qualifier: + # Handle a new qualifier. + # If qualifier is new and non-empty: + # Add the qualifier and ellipses to beginning of words + # Handle a continuation: If the next phrase will be qualified + # with the same qualifier, + # Add ellipses to the end of the words. In this case, + # we will not add a period to the end of the words + # when making a sentence. + # if makeSentence, make the words into a sentence with or without + # a period at the end. + # return words and qualifier + # + qualifier = node.get(qualifierName) + #print "\nQualify words: qualifier, lastQualifier, words", qualifier, lastQualifier, words + if words == "": + return words, lastQualifier + addPeriod = 1 + if qualifier is not None: + if qualifier != lastQualifier and qualifier != "": + words = qualifier + ", " + words + next = self.getNext_nonEmpty(node, "words") + if next is not None: + nextQualifier = next.get(qualifierName) + #print "nextQualifier, qualifier", nextQualifier, "X", qualifier, "X", words + if nextQualifier == qualifier: + addPeriod = 0 + words = words + ", " + if makeSentence: + words = self.sentence(words, addPeriod) + #print "returning", words + return words, qualifier + + def getNext_nonEmpty(self, node, attrName): + next = node.getNext() + while 1: + if next is None: + break + val = next.get(attrName) + if val is not None and val != "": + break + next = next.getNext() + return next + + def namesEqual(self, name1, name2): + weatherPhrases = ["skyPopWx_phrase", "weather_phrase"] + if name1 == name2 or \ + (name1 in weatherPhrases and name2 in weatherPhrases): + namesEqual = 1 + else: + namesEqual = 0 + return namesEqual + + def wordWrap(self, tree, component): + # Wrap the component.words() + compWords = component.get("words") + if compWords is None: + return + compWords = self.endline(compWords, tree.get("lineLength")) + return self.setWords(component, compWords) + + def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): + # Make a label given the timeRange in GMT and the shift to + # convert it to local time. currentLocalTime can be used to + # compare to current day. + + # NOTE: If you make changes to this method, change the SAF_Overrides + # file as it is overridden there. + + if timeRange.duration() <= 3600: + return "" + if index == 0: + try: + label = issuanceInfo.period1Label() + if label != "": + return label + except: + pass + try: + today = issuanceInfo.todayFlag() + except: + today = 1 + try: + useHolidays = self._useHolidays + except: + useHolidays = 1 + nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) + splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) + label = self.getWeekday(timeRange, holidays=useHolidays, shiftToLocal=1, + labelType="CapitalWithPeriod", today=today, + tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, + splitDay24HourLabel=splitDay24HourLabel) + return label + + + # Ordering phrases + def orderPhrases(self, tree, component): + # Reorder highs and lows based on start period + + reorderList = [] + timeRange=component.getTimeRange() + areaLabel= component.getAreaLabel() + + if timeRange.duration() >= 24 * 3600: + startTR = TimeRange.TimeRange(timeRange.startTime(), + timeRange.startTime() + (12 * 3600)) + dayNight = self.getPeriod(startTR, 1) + if dayNight == self.NIGHTTIME(): + reorderList.append(("lows_phrase", "highs_phrase")) + reorderList.append(("lows_range_phrase", "highs_range_phrase")) + + for phrase1, phrase2 in reorderList: + self.moveAbove(tree, component, phrase1, phrase2) + + return self.DONE() + + def moveAbove(self, tree, component, phrase1, phrase2): + # Move the phrase phrase1 above phrase2 in the + # component list + + # Find Phrase to move + savedPhrase = "" + for phrase in component.childList: + name = phrase.get("name") + if name == phrase1: + savedPhrase = phrase + break + # Create new phrase list inserting savedPhrase in + # the new location + newPhraseList = [] + for phrase in component.childList: + name = phrase.get("name") + if name == phrase1: + continue + if name == phrase2 and savedPhrase != "": + newPhraseList.append(savedPhrase) + newPhraseList.append(phrase) + component.set("childList", newPhraseList) + + return self.DONE() + + # Phrase Level + def subPhraseSetUp(self, tree, phrase, elementInfoList, connectorMethod, resolution=None): + # Set up subPhrase nodes and "statDict" statistics for multiple elements. + # The temporal resolution of the first element determines the number of subPhrases created. + # If the elementInfoList is empty, one empty subPhrase is created with an empty "statDict". + # If there is noData for the first element, one empty subPhrase is created. + # + # Sets up the following attributes: + # Phrase Level + # descriptor -- based on phrase_descriptor for the first element + # connectorMethod -- based on setUp arguments + # elementInfoList -- adds "outUnits" to each elementInfo in list + # firstElement -- elementInfo for first in elementInfoList + # elementName -- elementName for first in elementInfoList + # + # SubPhrase Level + # elementName -- elementName for first in elementInfoList + # timeRange + # statDict -- entries for all elements in elementInfoList + # + timeRange = phrase.getTimeRange() + areaLabel = phrase.getAreaLabel() + statDictList = [] + + if len(elementInfoList) < 1: + # Make phrase with one empty subphrase + self.makeEmptySubPhrase(tree, phrase, None) + return self.DONE() + + # Make sub ranges based on first element + first = elementInfoList[0] + elementName = first.name + first.outUnits = self.element_outUnits(tree, phrase, first.name, first.name) + + # Check to see if the timeRange is great enough to collapse sub-phrases + # automatically + hours = self.collapseSubPhrase_hours(tree, phrase, elementName, elementName) + if timeRange.duration() > hours * 3600: + first.mergeMethod = self.mergeMethod(tree, phrase, elementName, elementName) + + #print "Getting first", first.name, timeRange, areaLabel + # Check to see if we are requesting a particular time resolution + if resolution is not None: + # Create sub-ranges with this resolution and provide list + # of stats for each time range + subRanges = self.divideRange(timeRange, resolution) + stats = [] + for subRange in subRanges: + subStats = tree.stats.get(first.name, subRange, areaLabel, + first.statLabel, first.mergeMethod) + stats.append((subStats, subRange)) + else: + stats = tree.stats.get(first.name, timeRange, areaLabel, + first.statLabel, first.mergeMethod) + #print "stats", stats + statsByRange = self.makeRangeStats(tree, first.dataType, stats, timeRange) + #print "statsByRange", first.name, statsByRange + + # Case of no data for first element + if statsByRange is None: + self.makeEmptySubPhrase(tree, phrase, first) + return self.setWords(phrase, "") + + phrase.set("emptyPhrase", 0) + # Set up descriptor and connector + if phrase.get("descriptor") is None: + descriptor = self.phrase_descriptor(tree, phrase, first.name, first.name) + phrase.set("descriptor", descriptor) + phrase.set("connectorMethod", connectorMethod) + + # Create sub phrases based on first element + # Create subPhrase List of (statDict, subRange) pairs + # This list will be added to by each element + subPhraseList = [] + for stats, subRange in statsByRange: + #print "stats going into statDict", stats + subPhraseList.append(({first.name:stats}, subRange)) + + for subPhrase in subPhraseList: + # Add each additional element to the sub range statDict + statDict, subRange = subPhrase + for elementInfo in elementInfoList[1:]: + name = elementInfo.name + elementInfo.outUnits = self.element_outUnits(tree, subPhrase, name, name) + #print "Getting sub stats", elementInfo + stats = tree.stats.get( elementInfo.name, subRange, areaLabel, + elementInfo.statLabel, elementInfo.mergeMethod) + # Add to subPhrase statDict for each subPhrase + statDict[name] = stats + + # Make SubPhrase children + subPhraseMethods = phrase.get("subPhraseMethods") + childList = [] + #print "subPhraseList", subPhraseList + for statDict, subRange in subPhraseList: + subPhrase = tree.makeNode([], subPhraseMethods, phrase) + #print "statDict", statDict + subPhrase.set("statDict", statDict) + subPhrase.set("timeRange", subRange) + subPhrase.set("changeFlag", 0) + subPhrase.set("elementName", elementName) + childList.append(subPhrase) + + #print "Setting childList" + phrase.set("firstElement", first) + phrase.set("elementInfoList", elementInfoList) + phrase.set("elementName", elementName) + phrase.set("childList", childList) + if childList == []: + self.setWords(phrase, "") + + #print "AFTER SET-UP" + #if elementName == "Wx": + # print phrase.printNode(phrase) + return self.DONE() + + def makeEmptySubPhrase(self, tree, phrase, firstElement): + phrase.set("emptyPhrase", 1) + phrase.set("firstElement", firstElement) + phrase.set("connectorMethod", None) + subPhraseMethods = phrase.get("subPhraseMethods") + subPhrase = tree.makeNode([], subPhraseMethods, phrase) + if firstElement is None: + phrase.set("elementInfoList", []) + phrase.set("elementName", None) + subPhrase.set("elementName", None) + subPhrase.set("statDict", {}) + else: + phrase.set("elementInfoList", [firstElement]) + phrase.set("elementName", firstElement.name) + subPhrase.set("elementName", firstElement.name) + subPhrase.set("statDict", {firstElement.name:None}) + subPhrase.set("timeRange", phrase.getTimeRange()) + subPhrase.set("changeFlag", 0) + phrase.set("childList", [subPhrase]) + + + ### Checking for differences between sub-phrases + def checkForDifferences(self, tree, node, elementInfo, magOnly=0, dirOnly=0): + # Return 1 if there are differences among the subPhrase values + # for the given element. + # If VECTOR and magOnly==1, only the magnitude is checked. + # If VECTOR and dirOnly==1, only the direction is checked. + # If no data, return 1 as well + elementName = elementInfo.name + dataType = elementInfo.dataType + statList = self.getSubStats(node, elementName) + if len(statList) > 1: + # Check each subphrase against the first + # Return when a difference is found + if dataType == self.SCALAR(): + if statList[0] is None: + return 1 + value = self.getValue(statList[0], "MinMax") + min1, max1 = value + for statVal in statList[1:]: + if statVal is None: + return 1 + statVal = self.getValue(statVal, "MinMax") + min2, max2 = statVal + differenceFlag = self.checkScalarDifference( + tree, node, elementName, min1, max1, min2, max2) + if differenceFlag: + return 1 + return 0 + if dataType == self.VECTOR(): + if statList[0] is None: + return 1 + mag, dir1 = self.getValue(statList[0], "MinMax", self.VECTOR()) + min1, max1 = mag + for stats in statList[1:]: + if stats is None: + return 1 + statMag, dir2 = self.getValue(stats, "MinMax", self.VECTOR()) + min2, max2 = statMag + differenceFlag = self.checkVectorDifference( + tree, node, elementName, min1, max1, dir1, min2, max2, dir2, magOnly, dirOnly) + if differenceFlag: + return 1 + return 0 + if dataType == self.WEATHER() or dataType == self.DISCRETE(): + wx = statList[0] + for wxVal in statList[1:]: + if wxVal is None or wx is None: + return 1 + if wxVal != wx: + return 1 + return 0 + + def checkScalarDifference(self, tree, node, elementName, min1, max1, min2, max2): + # Return 1 if the min/max pairs show a difference + # First see if both are below null threshold + threshold = self.null_nlValue(tree, node, elementName, elementName) + threshold1 = self.nlValue(threshold, max1) + threshold2 = self.nlValue(threshold, max2) + if max1 < threshold1 and max2 < threshold2: + return 0 + # See if only one is below null threshold + if self.null_alwaysDistinct_flag(tree, node, elementName, elementName): + if max1 < threshold1 or max2 < threshold2: + return 1 + # If one set of min/max has only one value, + # and that value matches the min or + # max of the other set, show no difference. + if min1 == max1 and (min1==min2 or max1==max2): + return 0 + if min2 == max2 and (min1==min2 or max1==max2): + return 0 + # Compare mins and compare maxs + diff_nlValue = self.scalar_difference_nlValue(tree, node, elementName, elementName) + diff_min = self.nlValue(diff_nlValue, min(min1, min2)) + diff_max = self.nlValue(diff_nlValue, max(max1, max2)) + if abs(min1-min2) < diff_min and abs(max1-max2) < diff_max: + return 0 + return 1 + + def checkVectorDifference(self, tree, node, elementName, + min1, max1, dir1, min2, max2, dir2, magOnly=0, dirOnly=0): + # Return 1 if the min/max/dir pairs show a difference + #print "Checking", elementName, min1, max2, dir1, min2, max2, dir2, magOnly + if magOnly == 0 or dirOnly == 1: + # DR_18632 +# if self.direction_difference(dir1, dir2) >= self.vector_dir_difference( +# tree, node, elementName, elementName): +# return 1 + diff = self.direction_difference(dir1, dir2) + nlValue_dict = self.vector_dir_difference_nlValue( + tree, node, elementName, elementName) + threshold_min = self.nlValue(nlValue_dict, min(min1, min2)) + threshold_max = self.nlValue(nlValue_dict, max(max1, max2)) + if diff >= min(threshold_min, threshold_max): + return 1 + if dirOnly == 1: + return 0 + + # Check magnitude + # Compare mins and maxs + + # Add special check for marine wording: + # This will prevent: + # NORTHWEST GALES TO 35 KNOTS RISING TO GALES TO 35 KNOTS AFTER MIDNIGHT. + # And will facilitate: + # "N WINDS 30 KT IN THE MORNING INCREASING TO + # GALES TO 35 KT EARLY IN THE AFTERNOON, THEN + # EASING TO 30 KT LATE IN THE AFTERNOON." + if elementName == "Wind": + if self.marine_wind_combining_flag(tree, node): + if max1 > 30 or max2 > 30: + # Check for both within the same warning thresholds + warnThreshold1 = self.getWarnThreshold(max1) + warnThreshold2 = self.getWarnThreshold(max2) + if warnThreshold1 == warnThreshold2: + return 0 + else: + return 1 + + # First see if both are below null threshold + threshold = self.null_nlValue(tree, node, elementName, elementName) + threshold1 = self.nlValue(threshold, max1) + threshold2 = self.nlValue(threshold, max2) + if max1 < threshold1 and max2 < threshold2: + return 0 + # See if only one is below null threshold + if self.null_alwaysDistinct_flag(tree, node, elementName, elementName): + if max1 < threshold1 or max2 < threshold2: + return 1 + # If one set of min/max has only one value, + # and that value matches the min or + # max of the other set, show no difference. + if min1 == max1 and (min1==min2 or max1==max2): + return 0 + if min2 == max2 and (min1==min2 or max1==max2): + return 0 + # Check for magnitude differences + mag_nlValue = self.vector_mag_difference_nlValue( + tree, node, elementName, elementName) + mag_diff_min = self.nlValue(mag_nlValue, min(min1, min2)) + mag_diff_max = self.nlValue(mag_nlValue, max(max1, max2)) + if abs(min1-min2) >= mag_diff_min or abs(max1-max2) >= mag_diff_max: + return 1 + return 0 + + def getWarnThreshold(self, max): + if max >= 65: + return 3 + elif max > 45: + return 2 + elif max > 30: + return 1 + else: + return 0 + + def maskSubkeys(self, subkeyList, intensity=None): + # Make a new weather key masking the given intensity with the given value + if intensity is not None: + newkeyList = [] + for subkey in subkeyList: + newSubkey = WeatherSubKey.weatherSubKey(self._argDict['site'], subkey.coverage(), subkey.wxType(), intensity, + subkey.visibility(), subkey.attributes()) + newkeyList.append(newSubkey) + subkeyList = newkeyList + return subkeyList + + def checkWeatherSimilarity(self, tree, node, rankList1, rankList2, + node1=None, node2=None, tr1=None, tr2=None, + al1=None, al2=None): + ### FIXES BUG BY FORCING ATTRIBUTES CHECK WHEN CHECKING FOR SIMILAR WX + # Return 0 if the two sets of subkeys in the rankLists are significantly + # different + # If the keys can be considered similar: + # Return 1 if the first set of keys presides + # Return 2 if the second set of keys presides + # Return a new aggregated rankList if there are multiple subkeys in the + # rankLists AND they are similar. + # + # Optional nodes and time ranges may be supplied. These are used for + # accessing PoP stats. All are necessary since "similar_diurnal" does + # comparisons for various time ranges and local effects does comparisons + # for various areas. + + #print "\nCheckWxSimilarity" + + # The ranks are available, but not currently used + stats1 = self.getSubkeys(rankList1) + stats2 = self.getSubkeys(rankList2) + # Sort for comparison + stats1.sort(self.rankedSortOrder) + stats2.sort(self.rankedSortOrder) + + diff = [] + for element in stats1: + test = 1 + for el in stats2: + if str(element) == str(el): + test = 0 + if test and str(element) not in diff: + diff.append(str(element)) + for element in stats2: + test = 1 + for el in stats1: + if str(element) == str(el): + test = 0 + if test and str(element) not in diff: + diff.append(str(element)) + if len(diff) == 0: + return 1 + + if stats1 == stats2: + #print 'checkWx return 1' + return 1 + + # Check for equal length of statistics + if len(stats1) == len(stats2): + # If there is only one subkey to worry about + if len(stats1) == 1: + # If the types, intensities, and coverages are similar + if self.similarWxTypes(tree, node, stats1[0], stats2[0]): + if self.similarIntensities(tree, node, stats1[0], stats2[0]): + if self.similarAttributes(tree, node, stats1[0], stats2[0]): + flag = self.similarCoverages(tree, node, stats1[0], stats2[0]) + if flag > 0: + #print "returning flag", flag + return flag + else: + # Different wxTypes are not similar + #print "returning diff wxTypes 0" + return 0 + # Node can turn off this check. + # Some phrases (severeWeather_phrase, heavyPrecip_phrase, heavyRain_phrase) + # are checking intensities, so we don't want to loose them + if node.getAncestor("noIntensityCombining") != 1: + # Make new subkeys that all have the same intensity + stats1 = self.maskSubkeys(stats1, intensity="-") + stats2 = self.maskSubkeys(stats2, intensity="-") + if stats1 == stats2: + #print 'checkWx return 1' + return 1 + # Handle case of len(stats) > 1 + if len(stats1) > 1: + return self.checkSubkeysSimilarity( + tree, node, rankList1, rankList2, node1, node2, tr1, tr2, al1, al2) + + # Check the PoP. + # If low for both time periods and areas + # AND there is no non-precip Wx + # then we can assume the Wx is the same + if node1 is None: + node1 = node + if node2 is None: + node2 = node + if tr1 is None: + tr1 = node1.getTimeRange() + if tr2 is None: + tr2 = node2.getTimeRange() + if al1 is None: + al1 = node1.getAreaLabel() + if al2 is None: + al2 = node2.getAreaLabel() + popstats1 = self.matchToWx(tree, node1, "PoP", tr1, al1) + popstats2 = self.matchToWx(tree, node2, "PoP", tr2, al2) + #print "popstats", popstats1, popstats2 + if popstats1 < self.pop_wx_lower_threshold(tree, node1) and \ + popstats2 < self.pop_wx_lower_threshold(tree, node2): + for subkey in stats1: + if not self.pop_related_flag(tree, node1, subkey): + return 0 + for subkey in stats2: + if not self.pop_related_flag(tree, node2, subkey): + return 0 + return 1 + #print 'checkWx return 0' + return 0 + + def checkSubkeysSimilarity(self, tree, node, rankList1, rankList2, + node1, node2, tr1, tr2, al1, al2): + # Return 0 if the two sets of subkeys in the rankLists are significantly + # different + # Otherwise, return a new rankList of the combined subkeys and ranks + # sorted in rank order + # + # We combine if: + # The set of wxTypes in rankList1 is equal to the set of wxTypes in rankList2 AND + # Each wxType individually can be combined i.e. they have similar coverages + #print "\nCheckSubkeysSimilarity" + + # Sort ranklists by wxType + list1 = self.removeNoWx(rankList1) + list2 = self.removeNoWx(rankList2) + list1.sort(self.rankedWxTypeOrder) + list2.sort(self.rankedWxTypeOrder) + + #print "rankList1, rankList2", rankList1, rankList2 + + newRankList = [] + for i in range(len(list1)): + subkey1, rank1 = list1[i] + wxType1 = subkey1.wxType() + subkey2, rank2 = list2[i] + wxType2 = subkey2.wxType() + if not wxType1 == wxType2: + # We cannot combine + return 0 + # See of the wxTypes have similar coverages + similarFlag = self.checkWeatherSimilarity( + tree, node, [list1[i]],[list2[i]], node1, node2, tr1, tr2, al1, al2) + if similarFlag == 0: + return 0 + newRank = int((rank1 + rank2)/2.0) + newSubkey = self.makeAggregateSubkey(subkey1, rank1, subkey2, rank2) + newRankList.append((newSubkey, newRank)) +## if similarFlag == 1: +## newRankList.append((subkey1, newRank)) +## else: +## newRankList.append((subkey2, newRank)) + # Sort newRankList + newRankList.sort(self.rankedSortOrder) + #print "returning", newRankList + return newRankList + + def removeNoWx(self, rankList): + newList = [] + for subkey, rank in rankList: + if subkey.wxType() == "": + continue + newList.append((subkey, rank)) + return newList + + def similarWxTypes(self, tree, node, subkey1, subkey2): + # If wxTypes should be similar, return 1 + # else return 0 + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + inten1 = subkey1.intensity() + inten2 = subkey2.intensity() + # Take care of sprinkles and flurries + if wxType1 == wxType2 and wxType1 in ["RW", "SW"]: + if inten1 != inten2 and (inten1 == "--" or inten2 == "--"): + return 0 + if wxType1 == wxType2: + return 1 + return 0 + + def similarIntensities(self, tree, node, subkey1, subkey2): + intenList = ['', '-', 'm'] + # If intensities are close enough + inten1 = subkey1.intensity() + inten2 = subkey2.intensity() + if (inten1==inten2 or + (inten1 in intenList and inten2 in intenList)): + return 1 + return 0 + + def similarCoverages(self, tree, node, subkey1, subkey2): + # Return 0 if coverages of subkey1 and subkey2 are significantly + # different + # Return 1 if coverages are similar and the coverage of subkey1 + # is dominant. + # Return 2 if coverages are similar and the coverage of subkey2 + # is dominant. + cov1 = subkey1.coverage() + cov2 = subkey2.coverage() + for coverageList in self.similarCoverageLists(tree, node, subkey1, subkey2): + if (cov1 in coverageList and cov2 in coverageList): + index1 = coverageList.index(cov1) + index2 = coverageList.index(cov2) + if index1 >= index2: + #print 'checkWx return 1 - use subkey1' + return 1 + else: + #print 'checkWx return 2 - use subkey2' + return 2 + return 0 + + def similarAttributeLists(self): + # Lists weather attributes that can be combined or considered equal. + # These lists are examined when producing rankLists, + # combining sub-phrases, and + # determining if there is a local effect to report. + # Used by + # PhraseBuilder:checkWeatherSimilarity + # SampleAnalysis: getDominantValues + return [ + ["DmgW", "GW"], + ["LgA", "SmA"], + ] + + def similarAttributes(self, tree, node, subkey1, subkey2): + # If weather attributes are similar, return 1; otherwise return 0 + attrs1 = subkey1.attributes() + attrs2 = subkey2.attributes() + attrs1 = self.removeSpecialAttributes(attrs1) + attrs2 = self.removeSpecialAttributes(attrs2) + attrs1.sort() + attrs2.sort() + + # If the lists are equal, they are similar. + if attrs1 == attrs2: + return True + + # Otherwise, check that each attribute for subkey1 matches + # an attribute for subkey2 and vice versa + if self.matchAttrs(attrs1, attrs2) and self.matchAttrs(attrs2, attrs1): + return True + else: + return False + + def removeSpecialAttributes(self, attrs): + rv = [] + for attr in attrs: + if attr not in ["MX", "OR", "Mention", "Primary"]: + rv.append(attr) + return rv + + def matchAttrs(self, attrs1, attrs2): + for attr1 in attrs1: + if not self.checkAttrs(attr1, attrs2): + return False + return True + + def checkAttrs(self, attr1, attrs2): + # Check to see if there is a match for attr1 in attrs2 + # A "match" is equality OR there exists an attr2 in attrs2 + # such that both attr1 and attr2 are in one of the similarAttrsLists + # E.g., "GW" is attr1, "DmgW" is attr2 and attrList is ["GW","DmgW"] + if attr1 in attrs2: + return True + for attrList in self.similarAttributeLists(): + if attr1 in attrList: + for attr2 in attrs2: + if attr2 in attrList: + return True + return False + + + # Consolidation + def consolidatePhrase(self, tree, phrase): + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + # Separate out primary elements that are constant throughout phrase + elementInfoList = phrase.get("elementInfoList") + if elementInfoList is None or len(elementInfoList) <= 1: + return self.DONE() + subPhrases = phrase.get("childList") + if len(subPhrases) <= 1: + return self.DONE() + first = 1 + constants = [] + nonConstants = [] + constantFirst = 0 + disabled = phrase.get("disabledElements", []) + if disabled is None: + disabled = [] + for elementInfo in elementInfoList: + if elementInfo.name in disabled: + continue + if elementInfo.primary: + # Primary elements + diffFlag = self.checkForDifferences(tree, phrase, elementInfo) + #print "element", elementInfo.name, diffFlag + if diffFlag == 0: + constants.append(elementInfo) + # If first element is in constant list, + # make that the first phrase when split + # else, make it the second phrase + if first: + constantFirst = 1 + else: + nonConstants.append(elementInfo) + else: + # Secondary elements remain with first element + if constantFirst: + constants.append(elementInfo) + else: + nonConstants.append(elementInfo) + first = 0 + # Split off elements that are not in the same list as the first element + #print "Constants", constantFirst + #for eleInfo in constants: + # print eleInfo.name + #print "NonConstants" + #for eleInfo in nonConstants: + # print eleInfo.name + if constantFirst: + splitElements = nonConstants + curElements = constants + else: + splitElements = constants + curElements = nonConstants + length = len(splitElements) + if length > 0 and length < len(elementInfoList): + self.splitPhrase(tree, phrase, curElements, splitElements) + return self.DONE() + + def splitPhrase(self, tree, phrase, curElements, splitElements): + # For each element in splitElements e.g. Swell2 or WindGust: + # set the current phrase disabledElements + # (to turn them off for the original phrase) + # add a new phrase for the split element + # For each element left in the current phrase e.g. Swell or Wind, + # set the new phrase disabled elements + # (to turn them off in the new phrase) + # + disabledElements = [] + newDis = [] + for elementInfo in curElements: + newDis.append(elementInfo.name) + for elementInfo in splitElements: + disabledElements.append(elementInfo.name) + newPhrase = tree.addPhraseDef(phrase, elementInfo.phraseDef) + newPhrase.set("disabledElements", newDis) + currentNone = phrase.getAncestor("disabledElements") + if currentNone is not None: + phrase.set("disabledElements", disabledElements + currentNone) + else: + phrase.set("disabledElements", disabledElements) + for key in ["spawnedWxPhrases", "conjunctiveQualifier", + "embeddedQualifier", "localEffect", "localEffectsList", + "firstElement", "elementName", "elementInfoList"]: + #"descriptor", "indentLabel"]: + newPhrase.set(key, phrase.get(key)) + #print "\nSplitPhrase: New phrase", newPhrase, newDis + #print "Current phrase disabled", phrase, phrase.get("disabledElements") + + def consolidateDirection(self, tree, phrase): + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + # If vector direction is progressive and mags are similar, + + # use only first and last subPhrases + elementInfoList = phrase.get("elementInfoList") + if elementInfoList is None or len(elementInfoList) < 1: + return self.DONE() + subPhrases = phrase.get("childList") + if len(subPhrases) <= 1: + return self.DONE() + firstElement = phrase.get("firstElement") + diffFlag = self.checkForDifferences(tree, phrase, firstElement, magOnly=1) + if diffFlag == 0: + vectorStats = self.getSubStats(phrase, firstElement.name) + dirList = [] + for mag, dir in vectorStats: + dirList.append(dir) + progression = self.checkProgression(dirList) + if progression: + childList = phrase.get("childList") + new = [] + new.append(childList[0]) + new.append(childList[len(childList)-1]) + phrase.set("childList", new) + return self.DONE() + + def consolidateTrends(self, tree, phrase): + # See if we need to ignore this method + if self.ignoreTrends(tree, phrase): + return self.DONE() + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + # If there is a progression of magnitudes, + # use only first and last subPhrases with no time descriptor + elementInfoList = phrase.get("elementInfoList") + if elementInfoList is None or len(elementInfoList) < 1: + return self.DONE() + subPhrases = phrase.get("childList") + if len(subPhrases) <= 2: + return self.DONE() + firstElement = phrase.get("firstElement") + # If Vector, make sure directions are the same + diffFlag = 0 + dataType = firstElement.dataType + if dataType == self.VECTOR(): + diffFlag = self.checkForDifferences(tree, phrase, firstElement, dirOnly=1) + # Check for an increasing or decreasing magnitude progression + if diffFlag == 0: + statList = self.getSubStats(phrase, firstElement.name) + trend = self.checkTrend(statList, dataType) + # If trend, take first and last children only + if trend: + childList = phrase.get("childList") + new = [] + new.append(childList[0]) + new.append(childList[len(childList)-1]) + phrase.set("childList", new) + # Turn off time descriptors + phrase.set("noTimeDescriptors", 1) + return self.DONE() + + def ignoreTrends(self, tree, node): + if node.get('name') in ["windChill_phrase", "windBased_windChill_phrase", + "heatIndex_phrase", "apparentT_phrase"]: + return 1 + else: + return 0 + + def checkProgression(self, dirList): + # make a list of differences + diffList = [] + for i in range(1, len(dirList)): + diff = dirList[i] - dirList[i-1] # calc difference + # normalize the difference to remove the 359 -> 0 effect + if diff > 180: + diff = diff - 360 + elif diff < -180: + diff = diff + 360 + diffList.append(diff) + + minVal = min(diffList) + maxVal = max(diffList) + + # any diffs >= 90 not allowed + if maxVal >= 90 or minVal <= -90: + return 0 + + # see if all the diff are of the same sign, if not return 0 + if minVal * maxVal < 0: + return 0 + else: + return 1 + + def checkTrend(self, statList, dataType): + # check to see if increasing/decreasing values + lastMax = None + trend = None + for stats in statList: + if dataType == self.VECTOR(): + stats, dir = stats + min, max = self.getValue(stats, "MinMax") + if lastMax is None: + lastMax = max + elif trend is None: + trend = lastMax > max + lastMax = max + else: + # Test for an decreasing trend + if trend == 1 and lastMax > max: + lastMax = max + continue + # Test for an increasing trend + if trend == 0 and lastMax <= max: + lastMax = max + continue + return 0 + return 1 + + def chooseMostImportant(self, tree, phrase): + # If there is more than 1 sub-phrase AND mostImportant_dict + # is set, report only the "Min" or "Max" sub-phrase using + # the "mostImportant_descriptor" + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + elementInfoList = phrase.get("elementInfoList") + if elementInfoList is None or len(elementInfoList) < 1: + return self.DONE() + subPhrases = phrase.get("childList") + if len(subPhrases) <= 1: + return self.DONE() + elementName = phrase.get("elementName") + mostImportant = self.mostImportant(tree, phrase, elementName, elementName) + if mostImportant is None: + return self.DONE() + + # Find the index of the sub-phrase with the Min or Max value + firstElement = phrase.get("firstElement") + statList = self.getSubStats(phrase, elementName) + dataType = firstElement.dataType + for i in range(len(statList)): + if dataType == self.VECTOR(): + stats, dir = statList[i] + else: + stats = statList[i] + min, max = self.getValue(stats, "MinMax") + if i == 0: + if mostImportant == "Min": + importantVal = min + else: + importantVal = max + importantIndex = 0 + else: + if mostImportant == "Min": + if min < importantVal: + importantVal = min + importantIndex = i + else: + if max > importantVal: + importantVal = max + importantIndex = i + # Null out the other sub-phrases + for i in range(len(subPhrases)): + if i != importantIndex: + statDict = subPhrases[i].getStatDict() + statDict[elementName] = None + #print "subPhrase", subPhrases[i].getTimeRange() + # Set up the mostImportant_descriptor + descriptor = self.mostImportant_descriptor( + tree, phrase, elementName, elementName) + if descriptor is not None: + phrase.set("descriptor", descriptor) + return self.DONE() + + def getSubStats(self, phrase, elementName): + # Return a list of stats for the subPhrases + statList = [] + for subPhrase in phrase.childList: + statDict = subPhrase.getStatDict() + if statDict is None: + continue + statList.append(statDict[elementName]) + return statList + + def splitWxPhrase(self, tree, node, disabledSubkeys1, disabledSubkeys2, doneList, + newPhraseDef=None): + # Set disableSubkeys1 for original node + # Create a new node with disabledSubkeys2 + # Set new phrase doneList using "doneList" + # Add to new phrase according to newPhraseDef if provided, + # otherwise, duplicate current node + # Make sure to propagate "disabledSubkeys" and "spawnedWxPhrases" + # from the original node to the new node + #print "\nSplit Wx Phrase: original node", node.get("name"), node.getAreaLabel() + #print " ", node + #import traceback + #traceback.print_stack(limit=3) + #print " localEffect", node.get('localEffect') + #print " parent", node.parent + #tree.printNode(node) + disabled = node.getAncestor("disabledSubkeys") + if disabled is None: + disabled = [] + disabledSubkeys1 = disabledSubkeys1 + disabled + node.set("disabledSubkeys", disabledSubkeys1) + if newPhraseDef is None: + newPhrase = tree.addPhrase(node) + disabledSubkeys2 = disabledSubkeys2 + disabled + else: + newPhrase = tree.addPhraseDef(node, newPhraseDef) + newPhrase.set("disabledSubkeys", disabledSubkeys2) + newPhrase.set("doneList", doneList) + #print " disabled", node.get("disabledSubkeys") + #print "new node", newPhrase, newPhrase.get("name") + #print " parent", newPhrase.parent + for key in ["spawnedWxPhrases", "conjunctiveQualifier", + "embeddedQualifier", "localEffect", "localEffectsList", + "firstElement", "elementName", "elementInfoList"]: + #"descriptor", "indentLabel"]: + #print " setting ", key, node.get(key) + newPhrase.set(key, node.get(key)) + #print " ", newPhrase.getAreaLabel() + #print " disabled", newPhrase.get("disabledSubkeys") + #tree.printNode(newPhrase) + return newPhrase + + ## Combining + ## If you want to alter the combining criteria, + ## override starred (**) methods + ## + ## combinePhraseStats (phrase level) + ## combineWords (phrase level) + ## combineComponentStats (tree level) + ## combineWords (any level) + + ## combineChildren -- loops through child nodes + ## (tree, node, combineMethod) + + ## **combineScalars (subPhrase1, subPhrase2) + ## **combineVectors (subPhrase1, subPhrase2) + ## **combineWx (subPhrase1, subPhrase2) + ## **combineComponents (component1, component2) + + ## combine2SubPhrases (tree, phrase, subPhrase1, subPhrase2) + ## combine2Components (tree, tree, component1, component2) + + ## combineChildWords (any node with children that has words) + ## combine2Children (tree, node, child1, child2) + + def combinePhraseStats(self, tree, phrase): + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + return self.combineChildren(tree, phrase, self.combineStats) + #print "before combine ", phrase.get("elementName"), len(phrase.get("childList")) + #result = self.combineChildren(tree, phrase, self.combineStats) + #print "after combine ", phrase.get("elementName"), len(phrase.get("childList")) + #return result + + def recallCombinePhraseStats(self, tree, phrase): + # This is needed because we want to call combinePhraseStats twice in some phrases. + # If we simply put in two calls to a method, it gets put on the "doneList" with + # the first call and is not called again. + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + return self.combinePhraseStats(tree, phrase) + + def combineComponentStats(self, tree, node): + return self.combineChildren(tree, node, self.combineComponents) + + def combineWords(self, tree, node): + # Check for data + if not self.phrase_trigger(tree, node): + return + children = node.get("childList") + if len(children) <= 1: + return self.DONE() + return self.combineChildren(tree, node, self.combineChildWords) + + def combineChildren(self, tree, node, combineMethod): + # Combine similar nodes if possible + length = len(node.childList) + if length <= 1: + return self.DONE() + index = 1 + while index < len(node.childList): + # Try to combine with previous subPhrase + combineFlag, combinedChild = combineMethod( + tree, node, node.childList[index-1], node.childList[index]) + if combineFlag: + # Reset childList for phrase + node.childList[index-1] = combinedChild + del node.childList[index] + else: + index = index + 1 + return self.DONE() + + def combineComponents(self, tree, node, component1, component2): + # Criteria to set combine_flag + + #print "\nTrying to combine" + # Don't combine components with different names since their + # analysis lists could be different. + comp1Name = component1.get("name") + comp2Name = component2.get("name") + if comp1Name != comp2Name: + #print "Different component names", comp1Name, comp2Name + return 0, None + + # Make sure we don't combine periods any earlier than we should + noCombineUntil = self.periodCombining_startHour(tree, node) + if self.hoursPastProductStart(tree, component1) <= noCombineUntil: + return 0, None + + ## call the "similar" methods to see if each element is roughly + ## the same. Any element that is not similar will cause the + ## combine flag to evaluate to 0 or false. + elements = self.periodCombining_elementList(tree, component1) + combine_flag = 1 + for element in elements: + #print "trying to combine", element + exec("combine_flag = combine_flag and self.similar"+element+\ + "(tree, component1, component2)") + #print "result", combine_flag + + if combine_flag: + #print "combining" + newComp = self.combine2Components(tree, tree, component1, component2) + return 1, newComp + return 0, None + + def similarWind(self, tree, comp1, comp2): + # Returns true if the wind stats are similar + # Also, return true (combine) if past the first 5 period since + # wind is not reported in these periods + + # these numbers determine if components are close enough to combine + magThreshold = 10 + dirThreshold = 45 + + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + stats1 = tree.stats.get("Wind", tr1, al1, mergeMethod = "Average") + stats2 = tree.stats.get("Wind", tr2, al2, mergeMethod = "Average") + + # If past the first 5 periods, return 1 (combine) + hours = self.hoursPastProductStart(tree, comp1) + if hours >= 5*12: + return 1 + + # check for none + if stats1 is None or stats2 is None: + return 0 + + mag1 = stats1[0] + mag2 = stats2[0] + dir1 = stats1[1] + dir2 = stats2[1] + # calculate the differences, mag and dir + magDiff = abs(mag1 - mag2) + dirDiff = abs(dir1 - dir2) + + # account for the 360 to 0 problem + if dirDiff > 180: + dirDiff = abs(dirDiff - 360.0) + + if magDiff <= magThreshold and dirDiff <= dirThreshold: + return 1 + + return 0 + + def hoursPastProductStart(self, tree, node): + # Compute the hours past the product start time (prodTR) + # that the current time range (curTR) starts. + # If the prodTR is not a multiple of 12, then it is either + # --an update and the first period is less than 12 hours, or + # --a pre-first period issuance. + # In these case, we return the hours past the product start + # as if the first period was a full 12-hour period. + # For example, + # A morning update issuance starting at 10 am would + # have an hoursPastProductStart for the first period + # of 4 hours. + # A pre-first period issuance starting at 4 am would + # have an hoursPastProductStart for the first period + # of -2 hours. + prodTR = tree.getTimeRange() + curTR = node.getTimeRange() + prodHours = prodTR.duration()/3600 + prodMod = prodHours%12 + if prodMod > 0: + try: + # check for 'pre-first period issuances' + period1Hours = self._issuanceInfo.period1TimeRange().duration()/3600 + if period1Hours > 12: + adjustHours = prodMod + else: + adjustHours = -(12-prodMod) + except: + adjustHours = 0 + else: + adjustHours = 0 + prodStart = prodTR.startTime() + adjustHours*3600 + return (curTR.startTime() - prodStart)/3600 + + def similarSky(self, tree, comp1, comp2): + # Returns true if sky stats are similar + # Necessary because of the override to sky_valueList above + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + return self.similarSkyLogic(tree, comp1, comp2, tr1, al1, tr2, al2) + + def similarWx(self, tree, comp1, comp2): + # Returns true if wx stats are similar + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + return self.similarWxLogic(tree, comp1, comp2, tr1, al1, tr2, al2) + + def similarPoP(self, tree, comp1, comp2): + # returns true if PoP stats are similar + stats1 = self.matchToWx(tree, comp1, "PoP") + stats2 = self.matchToWx(tree, comp2, "PoP") + + if stats1 is None and stats2 is None: + return 1 + + # check for none + #if stats1 is None or stats2 is None: + # return 0 + + if stats1 == stats2: + return 1 + + if stats1 < self.pop_lower_threshold(tree, comp1) and \ + stats2 < self.pop_lower_threshold(tree, comp2): + return 1 + + if stats1 > self.pop_upper_threshold(tree, comp1) and \ + stats2 > self.pop_upper_threshold(tree, comp2): + return 1 + + return 0 + + + ## Submitted by Brian Walawender 3/05 + ## The problem with combining long time periods, is that the + ## combined period is growing 12 hours at a time. If you get rid of the bleed + ## over grids for MinT and MaxT (SampleAnalysis temporalCoverage_hours_dict), + ## then you start returning None for either MaxT or MinT during these 12 hour periods. + ## To combat this, I check the duration of tr1 and tr2. + ## If it is 12 or less then I check to see if it is day or night. + ## For MaxT, it will return a combine if the period is 12 hours or less and it is a + ## nighttime period. + ## For MinT, it will return a combine if the period is 12 hours + ## or less and it is a daytime period. This allowed long periods to be grouped + ## together without bleed over. + + def similarMaxT(self, tree, comp1, comp2): + # returns true if temp stats are similar + + # this number determines if components are close enough to combine + tempThreshold = 5 # degrees + + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + + hours = (tr2.endTime()-tr1.startTime())/3600 + if hours <= 24: + return 1 + + if (tr1.duration()/3600) <= 12: + dayNight = self.getPeriod(tr1, 1) + if dayNight == self.NIGHTTIME(): + return 1 + + if (tr2.duration()/3600) <= 12: + dayNight = self.getPeriod(tr2, 1) + if dayNight == self.NIGHTTIME(): + return 1 + + stats1 = tree.stats.get("MaxT", tr1, al1, mergeMethod = "Average") + stats2 = tree.stats.get("MaxT", tr2, al2, mergeMethod = "Average") + # check for none + if stats1 is None or stats2 is None: + return 0 + + if abs(stats1 - stats2) < tempThreshold: + return 1 + + return 0 + + def similarMinT(self, tree, comp1, comp2): + # returns true if temp stats are similar + + # this number determines if components are close enough to combine + tempThreshold = 5 # degrees + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + hours = (tr2.endTime()-tr1.startTime())/3600 + if hours <= 24: + return 1 + + if (tr1.duration()/3600) <= 12: + dayNight = self.getPeriod(tr1, 1) + if dayNight == self.DAYTIME(): + return 1 + + if (tr2.duration()/3600) <= 12: + dayNight = self.getPeriod(tr2, 1) + if dayNight == self.DAYTIME(): + return 1 + + # check for none + stats1 = tree.stats.get("MinT", tr1, al1, mergeMethod = "Average") + stats2 = tree.stats.get("MinT", tr2, al2, mergeMethod = "Average") + + if stats1 is None or stats2 is None: + return 0 + + if abs(stats1 - stats2) < tempThreshold: + return 1 + + return 0 + + + def similarWaveHeight(self, tree, comp1, comp2): + # returns true if seas stats are similar + + # this number dtermines if components are close enough to combine + seaThreshold = 4 # feet + + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + tr1 = comp1.getTimeRange() + tr2 = comp2.getTimeRange() + stats1 = tree.stats.get("WaveHeight", tr1, al1, mergeMethod ="Average") + stats2 = tree.stats.get("WaveHeight", tr2, al2, mergeMethod ="Average") + + # check for none + if stats1 is None or stats2 is None: + return 0 + + if stats1 == None or stats2 == None: + return 0 + + if abs(stats1 - stats2) < seaThreshold: + return 1 + return 0 + + def similarDiurnalSkyWx(self, tree, comp1, comp2): + return self.similar_diurnal(tree, comp1, comp2, ["Sky", "Wx"]) + + def similar_diurnal(self, tree, comp1, comp2, elementList): + # Returns true if stats for the given elements are similar + # in the night and morning AND the afternoon and evening. + # NOTE: the night and morning MAY be similar to the afternoon + # and evening, so word methods need to test for this case. + # + # Meant to handle the case of clouds and fog in the + # night and morning clearing in the afternoon and + # evening. + # Assumes comp2 is a 12-hour period. + + #print "similar_diurnal" + al1 = comp1.getAreaLabel() + al2 = comp2.getAreaLabel() + comp1TR = comp1.getTimeRange() + comp2TR = comp2.getTimeRange() + # comp2 morning, comp2 afternoon OR + # comp2 evening, comp2 night + c2tr1, c2tr2 = self.divideRange(comp2TR,6) + comparisons = [] + if comp1TR.duration() == 12*3600: + # Compare comp1 night to comp2 morning + # and comp1 evening to comp2 afternoon + # OR comp1 afternoon to comp2 evening + # and comp1 morning to comp2 night + c1tr1, c1tr2 = self.divideRange(comp1TR,6) + comparisons.append((c1tr1, c2tr2)) + comparisons.append((c1tr2, c2tr1)) + else: + # We have already combined at least once so + # comp1 is at least 24 hours. Use the most + # recent 24 hours for comparison. + # if comp2 is daytime: + # compare comp1 morning to comp2 morning + # and comp1 afternoon to comp2 afternoon + # else + # compare comp1 evening to comp2 evening + # and comp1 night to comp2 night + subRanges = self.divideRange(comp1TR, 6) + length = len(subRanges)-1 + c1tr1 = subRanges[length-3] + c1tr2 = subRanges[length-2] + comparisons.append((c1tr1, c2tr1)) + comparisons.append((c1tr2, c2tr2)) + + # Do comparisons + wordDict = {} + for element in elementList: + wordDict[element] = [] + #print "\nComparisons" + for tr1, tr2 in comparisons: + for element in elementList: + #print "comparing", tr1, tr2 + #print " ", element + exec("flag = self.similar"+element+\ + "Logic(tree, comp1, comp2, tr1, al1, tr2, al2)") + #print "flag", flag + if not flag: + #print "returning 0" + return 0 + #print "returning 1" + return 1 + + def similarSkyLogic(self, tree, comp1, comp2, tr1, al1, tr2, al2): + stats1 = tree.stats.get("Sky", tr1, al1, mergeMethod ="Average") + stats2 = tree.stats.get("Sky", tr2, al2, mergeMethod ="Average") + # check for none + #print "stats1", stats1 + #print "stats2", stats2 + if stats1 is None or stats2 is None: + return 0 + if stats1 == None or stats2 == None: + return 0 + saveTR1 = comp1.timeRange + saveTR2 = comp2.timeRange + comp1.timeRange = tr1 + comp2.timeRange = tr2 + words1 = self.sky_value(tree, comp1, self.getValue(stats1), -1) + words2 = self.sky_value(tree, comp2, self.getValue(stats2), -1) + comp1.timeRange = saveTR1 + comp2.timeRange = saveTR2 + #print "words1, words2", words1, words2 + if words1 == words2: + return 1 + #if words1.find("partly") > -1 and words2.find("partly")> -1: + # return 1 + return 0 + + def similarWxLogic(self, tree, comp1, comp2, tr1, al1, tr2, al2): + # Returns true if wx stats are similar + stats1 = tree.stats.get("Wx", tr1, al1, mergeMethod = "Average") + stats2 = tree.stats.get("Wx", tr2, al2, mergeMethod = "Average") + # check for none + #print "stats1, stats2", stats1, stats2 + if stats1 is None or stats2 is None: + return 0 + stats1 = self.cleanOutNoWx(stats1) + stats2 = self.cleanOutNoWx(stats2) + similarWx = self.checkWeatherSimilarity( + tree, comp1, stats1, stats2, comp1, comp2, tr1, tr2, al1, al2) + #print "similarWx", similarWx + if similarWx == 0: + return 0 + else: + return 1 + + def cleanOutNoWx(self, stats): + # Cleans out NoWx from stats list + if stats is None: + return None + newList = [] + for stat in stats: + if type(stat) is tuple: + subkey, rank = stat + else: + subkey = stat + if subkey.wxType() == "": + continue + newList.append(stat) + return newList + + def combineStats(self, tree, phrase, subPhrase1, subPhrase2): + firstElement = phrase.get("firstElement") + elementName = firstElement.name + dataType = firstElement.dataType + if dataType == self.SCALAR(): + combineFlag, newVal = self.combineScalars( + tree, phrase, subPhrase1, subPhrase2, elementName) + elif dataType == self.VECTOR(): + combineFlag, newVal = self.combineVectors( + tree, phrase, subPhrase1, subPhrase2, elementName) + elif dataType == self.WEATHER(): + combineFlag, newVal = self.combineWeather( + tree, phrase, subPhrase1, subPhrase2, elementName) + elif dataType == self.DISCRETE(): + combineFlag, newVal = self.combineDiscrete( + tree, phrase, subPhrase1, subPhrase2, elementName) + if combineFlag: + elementInfoList = phrase.get("elementInfoList") + newSubPhrase = self.combine2SubPhrases( + tree, phrase, subPhrase1, subPhrase2, elementInfoList, newVal) + return 1, newSubPhrase + else: + return 0, None + + def combineScalars(self, tree, node, subPhrase1, subPhrase2, elementName): + min1, max1 = self.getScalarData(tree, subPhrase1, elementName, "MinMax") + min2, max2 = self.getScalarData(tree, subPhrase2, elementName, "MinMax") + #print "combining", min1, max1, min2, max2 + if min1 is None and max1 is None and min2 is None and max2 is None: + return 1, None + if min1 is None or max1 is None or min2 is None or max2 is None: + return 0, None + + differenceFlag = self.checkScalarDifference( + tree, subPhrase1, elementName, min1, max1, min2, max2) + if differenceFlag == 0: + combine_singleValues = self.combine_singleValues_flag( + tree, subPhrase1, elementName, elementName) + if combine_singleValues == 1: + newValue = self.average(min(min1, min2), max(max1, max2)) + newValue = self.roundStatistic(tree, subPhrase1, newValue, elementName) + else: + # Combine using mins and maxs to catch slow trends + min1 = self.roundStatistic(tree, subPhrase1, min(min1, min2), elementName) + max1 = self.roundStatistic(tree, subPhrase1, max(max1, max2), elementName) + min1, max1 = self.applyRanges(tree, node, min1, max1, elementName) + newValue = (min1, max1) + #print "combined" + return 1, newValue + #print "not combined" + return 0, None + + def combineVectors(self, tree, phrase, subPhrase1, subPhrase2, elementName): + mag1, dir1, dirStr1 = self.getVectorData(tree, subPhrase1, elementName, "MinMax") + mag2, dir2, dirStr2 = self.getVectorData(tree, subPhrase2, elementName, "MinMax") + if mag1 is None and mag2 is None: + return 1, (None, dir1) + if mag1 is None or mag2 is None: + return 0, (None, dir1) + + min1, max1 = mag1 + min2, max2 = mag2 + + differenceFlag = self.checkVectorDifference( + tree, subPhrase1, elementName, min1, max1, dir1, min2, max2, dir2) + if differenceFlag == 0: + combine_singleValues = self.combine_singleValues_flag( + tree, subPhrase1, elementName, elementName) + if combine_singleValues == 1: + newMag, newDir = self.vectorAverage((min(min1, min2), dir1), (max(max1, max2), dir2)) + newMag = self.roundStatistic(tree, subPhrase1, newMag, elementName) + newValue = (newMag, newDir) + else: + # Combine using mins and maxs to catch slow trends + newMin = min(min1, min2) + newMax = max(max1, max2) + newMin, newMax = self.applyRanges(tree, phrase, newMin, newMax, elementName) + magAvg, newDir = self.vectorAverage((newMin, dir1), (newMax, dir2)) + newValue = ((newMin, newMax), newDir) + return 1, newValue + return 0, None + + def combineWeather(self, tree, phrase, subPhrase1, subPhrase2, elementName): + # This method now only used for skyPopWx and visibility phrases + statDict1 = subPhrase1.getStatDict() + stats1 = statDict1[elementName] + statDict2 = subPhrase2.getStatDict() + stats2 = statDict2[elementName] + if stats1 is None and stats2 is None: + return 1, None + if stats1 is None or stats2 is None: + return 0, None + + subkeys1 = self.getSubkeys(stats1) + subkeys2 = self.getSubkeys(stats2) + # Special case of combining based only on Visibility + combineVisibility = phrase.get("combineVisibility") + if combineVisibility == 1: + # Combine if low visibility is the same for each subPhrase + lowVis1 = self.getVis(subkeys1) + lowVis2 = self.getVis(subkeys2) + if lowVis1 == lowVis2: + return 1, stats1 + else: + return 0, None + + # Check weather key differences + similarResult = self.checkWeatherSimilarity( + tree, phrase, stats1, stats2, subPhrase1, subPhrase2) + if type(similarResult) is list: + return 1, similarResult + elif similarResult == 1: + return 1, stats1 + elif similarResult == 2: + return 1, stats2 + else: + return 0, None + + def combineDiscrete(self, tree, phrase, subPhrase1, subPhrase2, elementName): + statDict1 = subPhrase1.getStatDict() + stats1 = statDict1[elementName] + statDict2 = subPhrase2.getStatDict() + stats2 = statDict2[elementName] + if stats1 is None and stats2 is None: + return 1, None + if stats1 is None or stats2 is None: + return 0, None + + if stats1 == stats2: + return 1, stats1 + return 0, None + + def combineChildWords(self, tree, node, child1, child2): + words1 = child1.get("words") + if words1 is None: + return 0, None + words2 = child2.get("words") + if words2 is None: + return 0, None + + if words1 == words2: + newChild = self.combine2Children(tree, node, child1, child2) + return 1, newChild + return 0, None + + def combine2SubPhrases(self, tree, node, subPhrase1, subPhrase2, elementInfoList, newVal): + # Combine time ranges + subRange1 = subPhrase1.get("timeRange") + subRange2 = subPhrase2.get("timeRange") + newTimeRange = TimeRange.TimeRange(subRange1.startTime(), subRange2.endTime()) + + # Make new Node so methods will be re-run + # Preserve other elements in statDict + newSubPhrase = tree.makeNode([], subPhrase1.methodList) + # Make new statDict based on new time range + first = elementInfoList[0] + statDict = {} + statDict[first.name] = newVal + areaLabel = node.getAreaLabel() + for elementInfo in elementInfoList[1:]: + stats = tree.stats.get( + elementInfo.name, newTimeRange, areaLabel, elementInfo.statLabel, + elementInfo.mergeMethod) + statDict[elementInfo.name] = stats + newSubPhrase.set("statDict", statDict) + newSubPhrase.set("timeRange", newTimeRange) + newSubPhrase.parent = node + return newSubPhrase + + def combine2Components(self, tree, node, comp1, comp2): + # Combine time ranges + timeRange1 = comp1.getTimeRange() + timeRange2 = comp2.getTimeRange() + newTimeRange = TimeRange.TimeRange(timeRange1.startTime(), timeRange2.endTime()) + # Get fresh component definition so methods will be re-run + newComp = tree.makeComponent(comp1.get("name"), newTimeRange, comp1.get("definition")) + newComp.parent = tree + return newComp + + def combine2Children(self, tree, node, child1, child2): + # Combine time ranges and take same values so methods will not be re-run + # Used for combining words + timeRange1 = child1.get("timeRange") + timeRange2 = child2.get("timeRange") + newTimeRange = TimeRange.TimeRange(timeRange1.startTime(), timeRange2.endTime()) + child1.set("timeRange", newTimeRange) + return child1 + + #################################### + + def fillNulls(self, tree, node): + # Data Needed: subPhrase "words" + # Fill in the subPhrases designated as "null" with configurable + # null phrases (first_null_phrase, null_phrase) + + # See if ready to process + if not self.phrase_trigger(tree, node): + return + + index = 0 + #print "fillNulls node", node.get("firstElement"), node.get("elementName") + try: + elementName = node.get("firstElement").name + except: + return self.DONE() + firstNullPhrase = self.first_null_phrase(tree, node, elementName, elementName) + nullPhrase = self.null_phrase(tree, node, elementName, elementName) + for subPhrase in node.get("childList"): + words = subPhrase.get("words") + if words is None: + return + if words == "null": + if index == 0: + subPhrase.set("words", firstNullPhrase) + else: + subPhrase.set("words", nullPhrase) + subPhrase.set("null",1) + index = index + 1 + # Collapse empty word sub-phrases + self.collapsePhraseWords(tree, node) + return self.DONE() + + def collapsePhraseWords(self, tree, phrase): + # Collapse empty word sub-phrases + childList = phrase.childList + if len(childList) <= 1: + return + newList = [] + lastWords = None + index = 0 + emptyIndex = None + for subPhrase in phrase.childList: + subWords = subPhrase.get("words") + if subWords == "": + if lastWords == "": + # Add to empty phrase + subRange = subPhrase.getTimeRange() + emptyRange = childList[emptyIndex].getTimeRange() + newRange = TimeRange.TimeRange(emptyRange.startTime(), subRange.endTime()) + childList[emptyIndex].set("timeRange", newRange) + else: + # Start an empty phrase + emptyIndex = index + else: + if lastWords == "": + newList.append(childList[emptyIndex]) + emptyIndex = None + newList.append(subPhrase) + lastWords = subWords + index = index + 1 + if emptyIndex is not None: + newList.append(childList[emptyIndex]) + phrase.childList = newList + + + def timeDescriptorModeration(self, tree, phrase): + # Moderates the time descriptor + # Needs subPhrase "words" + # Looks at subPhrase "null" or empty + # Sets subPhrase "timeDescFlag" indicating whether or not + # to generate a timeDescriptor for this subPhrase + # + # Algorithm: + # if last subPhrase is null, (make sure to flag last non-null) + # If odd number of subPhrases, + # flag even else flag odd + # elif the first even subPhrase is null, flag odd subPhrases + # else, flag even subPhrases + # + # See if ready to process + if not self.phrase_trigger(tree, phrase): + return + childList = phrase.get("childList") + length = len(childList) + if length == 0: + return self.DONE() + # Set all subPhrases if time descriptors are always + # to be on OR off + flag = None + if phrase.get("noTimeDescriptors") == 1: + flag = 0 + elif phrase.get("allTimeDescriptors") == 1: + flag = 1 + if flag is not None: + for subPhrase in childList: + subPhrase.set("timeDescFlag", flag) + return self.DONE() + # If one subPhrase, we need time descriptor IF + # the subPhrase time range differs from the + # phrase time range + if length == 1: + subPhrase = childList[0] + if subPhrase.getTimeRange() == phrase.getTimeRange(): + flag = 0 + else: + flag = 1 + subPhrase.set("timeDescFlag",flag) + return self.DONE() + odd = length%2 + lastNull = self.isNull(childList[length-1]) + if lastNull: + if odd: + flagOdd = 0 + else: + flagOdd = 1 + else: + firstEven = self.isNull(childList[1]) + if firstEven: + flagOdd = 1 + else: + flagOdd = 0 + + index = 0 + for subPhrase in childList: + #print "words", subPhrase.get("words") + #print "null", subPhrase.get("null") + flag = 0 + if index%2 == 0: # odd subPhrase + if flagOdd == 1: + flag = 1 + else: # even subPhrase + if flagOdd == 0: + flag = 1 + + # Uncomment the following line if you want ALL sub-phrases + # to have a time descriptor. + # + #flag = 1 + # + # Alternatively, you could test per weather element: + # + #if phrase.get("elementName") == "Wx": + # flag = 1 + + subPhrase.set("timeDescFlag", flag) + index = index + 1 + return self.DONE() + + def isNull(self, subPhrase): + if subPhrase.get("null") == 1: + return 1 + if subPhrase.get("words") == "": + return 1 + return 0 + + def checkPhrasesDone(self, tree, node, areaLabel=None, exceptions=[]): + # Check that all phrases (except those with names listed in exceptions) + # are done for the component associated with node + # If areaLabel is not None, check only those phrases that have + # the given areaLabel. + # Return the list of phrases that are done. + + # We need to look at all progeny -- not just children + # since phrases can have child phrases + leaves = self.getLeaves(tree, node) + phraseList = [] + for child in leaves: + childWords = child.get("words") + childName = child.getAncestor('name') + #print " child", childName, childWords + if childName is None or childName in exceptions: + continue + if areaLabel is not None: + if child.getAreaLabel() != areaLabel: + continue + if childWords is not None: + phraseList.append(child) + else: + # If no words yet, return + #print "returning to wait" + return None + if phraseList is []: + return None + else: + return phraseList + + def phrase_trigger(self, tree, phrase, setUpOnly=0): + # Return 1 if trigger is met, else 0 + # If setUpOnly == 1, trigger will be met if setUp method + # has been completed + # Make sure set-up method was completed + #if len(phrase.get("childList")) == 0 and phrase.get("words") is None: + # return 0 + if not phrase.setUpMethod in phrase.doneList: + return 0 + if setUpOnly: + return 1 + # Make sure sub-phrases have words + for subPhrase in phrase.get("childList"): + # Check to make sure we have words + words = subPhrase.get("words") + if words is None: + return 0 + return 1 + + def assembleSubPhrases(self, tree, phrase): + # Assembles sub-phrases adding the time descriptor + # Check for data + + # See if ready to process + if not self.phrase_trigger(tree, phrase): + return + if not self.consolidateSubPhrases_trigger(tree, phrase): + return + + #print "NODE", phrase.get("name"), phrase.getTimeRange() + if self.useUntilPhrasing(tree, phrase): + return self.assembleUntilSubPhrases(tree, phrase) + + fcst = "" + index = 0 + + #print "\nAssemble Subphrases", phrase.get('name'), phrase + + for subPhrase in phrase.get("childList"): + # Check to make sure we have words + words = subPhrase.get("words") + if words is None: + return + #print " words", words + #print " ", subPhrase.getTimeRange(), subPhrase + #print " ", subPhrase.getAncestor("conjunctiveQualifier") + #print " ", subPhrase.getAreaLabel() + if words == "": + continue + + if index == 0: + #if not subPhrase.get("null"): + if not self.isNull(subPhrase): + # Get descriptor + descriptor = phrase.get("descriptor") + if descriptor is not None and descriptor != "": + fcst = fcst + descriptor + " " + else: + # Get connector + connectorMethod = phrase.get("connectorMethod") + connector = connectorMethod(tree, subPhrase) + + if index == 2: + # Add conjunctive "THEN" to make 3+ subPhrase phrases + # flow better. e.g. + # "N WIND 10 TO 20 KT RISING TO 30 KT EARLY IN THE + # AFTERNOON, THEN RISING TO GALES TO 40 KT LATE + # IN THE AFTERNOON." + elementName = phrase.getAncestor("elementName") + useThenConnector = self.useThenConnector( + tree, phrase, elementName, elementName) + if useThenConnector: + thenConnector = self.thenConnector( + tree, phrase, elementName, elementName) + if thenConnector != "": + # Add another time descriptor + subPhrase.set("timeDescFlag", 1) + connector = thenConnector + connector + + fcst = fcst + connector + + # Time Descriptor + timeDescriptor = self.format( + self.subPhrase_timeDescriptor(tree, phrase, subPhrase)) + + # Get words again in case they were changed by connector method + fcst = fcst + subPhrase.get("words") + timeDescriptor + index = index + 1 + #print " words", fcst + phrase.set("words", fcst) + return self.DONE() + + def assembleUntilSubPhrases(self, tree, phrase): + # Create a phrase that reports a list of (value, timeRange) + # tuples. Optionally, an associated range may be added to the + # phrase values. + + elementName = phrase.getAncestor("elementName") + untilFormat = self.untilPhrasing_format(tree, phrase, elementName, elementName) + timeRange = phrase.getTimeRange() + + # Make lists of consecutive subphrases + phraseLists = [] + curList = [] + lastTR = None + for subPhrase in phrase.get("childList"): + tr = subPhrase.getTimeRange() + # First time thru -- start curList + if lastTR is None: + curList.append(subPhrase) + lastTR = tr + continue + # Check for consecutive sub ranges + if tr.startTime() == lastTR.endTime(): + curList.append(subPhrase) + # If not consecutive, clear out curList + # and append current subPhrase + else: + if curList != []: + phraseLists.append(curList) + curList = [] + curList.append(subPhrase) + lastTR = tr + if curList != []: + phraseLists.append(curList) + + #print "\nUNTIL NODE", phrase.getTimeRange() + phrases = [] + for phraseList in phraseLists: + words = "" + index = 0 + subWords = "" + firstWords = 1 + for subPhrase in phraseList: + # Check to make sure we have words + lastWords = subWords + subWords = subPhrase.get("words") + if subWords is None: + return + #print " words", subWords, subPhrase.getTimeRange() + + if index == 0: + #if not subPhrase.get("null"): + if not self.isNull(subPhrase): + # Get descriptor + descriptor = phrase.get("descriptor") + if descriptor is not None and descriptor != "": + words = words + descriptor + " " + index += 1 + if subWords == "": + continue + subRange = subPhrase.getTimeRange() + # Add connector , then if words came before this + if not firstWords: + words = words + ", then " + # Use after if lastWords were empty and subRange + # starts after timeRange + words = words + subWords + if lastWords == "": + if subRange.startTime() != timeRange.startTime(): + afterTime = self.getTimeStr(untilFormat, subRange, "begin") + if afterTime == "0000": + afterTime = "2400" + words = words + " after " + afterTime + # Use until if the subRange ends before the time range + if subRange.endTime() < timeRange.endTime(): + untilTime = self.getTimeStr(untilFormat, subRange, "end") + if untilTime == "0000": + untilTime = "2400" + words = words + " until " + untilTime + firstWords = 0 + + phrases.append(words) + + # String together phrases and insert periods + index = 0 + words = "" + for str in phrases: + words = words + str + if index < len(phrases)-1: + if phrases[index+1] != "": + if not words == "": + words = words + ". " + index += 1 + + return self.setWords(phrase, words) + + def getTimeStr(self, format, timeRange, endBegin): + if endBegin == "end": + if format == "military": + return self.timeDisplay(timeRange, "LT","","","%H")+"00" + else: + str = self.timeDisplay(timeRange, "LT", "", "", "%I %p") + if str[0] == "0": + str = str[1:] + return str + else: + if format == "military": + return self.timeDisplay(timeRange, "LT","","%H","")+"00" + else: + str = self.timeDisplay(timeRange, "LT", "", "%I %p", "") + if str[0] == "0": + str = str[1:] + return str + + def useUntilPhrasing(self, tree, phrase): + # Check to see if the subPhrases warrant "until" phrasing + # Can be set for the phrase + elementName = phrase.getAncestor("elementName") + if self.untilPhrasing_flag(tree, phrase, elementName, elementName): + return 1 + elif self.onTheFly_untilPhrasing_flag( + tree, phrase, elementName, elementName) != 1: + return 0 + # Examine sub-phrase time ranges + tr = phrase.getTimeRange() + timeStart = tr.startTime() + timeEnd = tr.endTime() + for subPhrase in phrase.get("childList"): + #print "subTimeRange", subPhrase.getTimeRange() + subTr = subPhrase.getTimeRange() + subStart = subTr.startTime() + subEnd = subTr.endTime() + # See if subRange end time or start time is + # not a multiple of 3 hours back from timeRange end time. + # If start time is the same as phrase start time, + # do not count as until phrasing. + if timeEnd != subEnd: + timeDiff = timeEnd - subEnd + #print timeDiff, timeDiff % (3*3600) + if timeDiff % (3*3600) != 0: + return 1 + if timeStart != subStart: + timeDiff = timeEnd - subEnd + if timeDiff % (3*3600) != 0: + return 1 + return 0 + + def format(self, str): + if str is None: + str = "" + str = self.addSpace(str, "leading") + return str + + def subPhrase_timeDescriptor(self, tree, phrase, subPhrase): + if subPhrase.get("timeDescFlag"): + subRange = subPhrase.getTimeRange() + phraseRange = phrase.getTimeRange() + if phrase.get("name") in self.weatherPhraseNames(tree, phrase) and \ + len(phrase.get("childList")) > 1 and subRange == phraseRange: + dayNight = self.getPeriod(phraseRange, 1) + elementName = phrase.get("elementName") + if dayNight == self.DAYTIME(): + return self.phrase_descriptor( + tree, phrase, "through the day", elementName) + elif dayNight == self.NIGHTTIME(): + return self.phrase_descriptor( + tree, phrase, "through the night", elementName) + else: + return self.timePeriod_descriptor(tree, phrase, subRange) + else: + return "" + + # Connectors + def scalarConnector(self, tree, subPhrase): + # return connector phrase to connect subPhrase and previous one + elementName = subPhrase.getAncestor("firstElement").name + then = self.phrase_connector(tree, subPhrase, "then", elementName) + #if subPhrase.get("null") or subPhrase.getPrev().get("null"): + prev = subPhrase.getPrev() + if self.isNull(subPhrase) or self.isNull(prev): + return then + # Check for either subPhrase specifying only special connector + connector = subPhrase.get("connector") + if connector is not None: + return connector + connector = prev.get("connector") + if connector is not None: + return connector + + # Check for increasing/decreasing values + subPhrase1 = subPhrase.getPrev() + val1 = self.getScalarData(tree, subPhrase1, elementName, "Average") + val2 = self.getScalarData(tree, subPhrase, elementName, "Average") + if val1 > val2: + connector = self.phrase_connector( + tree, subPhrase, "decreasing to", elementName) + elif val1 < val2: + connector = self.phrase_connector( + tree, subPhrase, "increasing to", elementName) + else: + connector = then + return connector + + def wxConnector(self, tree, subPhrase): + # Return connector string to connect subPhrase and previous one. + # If subPhrases cover neighboring time ranges, connect them with "then" + # Otherwise, connect them with ". " + # Make sure that we do not connect more than two subPhrases in a row + # with a "then" connector by setting and re-setting the "useThenConnector" + # flag at the phrase level. + thenConnector = self.phrase_connector(tree, subPhrase, "then", "Wx") + connector = '. ' + prev = subPhrase.getPrev() + if prev is None: + return "" + phrase = subPhrase.getParent() + index = subPhrase.getIndex() + if index == 1: + # Initialize so that we are ready to use the thenConnector + # if appropriate + phrase.set("useThenConnector", 1) + + useThenConnector = phrase.get("useThenConnector") + prevEnd = prev.getTimeRange().endTime() + # If the start time of this subPhrase is the same + # as the end time of the previous subphrase + if useThenConnector and prevEnd == subPhrase.getTimeRange().startTime(): + # use the then connector + connector = ', then ' + # Re-set useThenConnector so we don't get + # a long string of ", then" connected sub-phrases + phrase.set("useThenConnector", 0) + else: + # Can re-set connector so we are ready to use the + # then connector on the next subPhrase + phrase.set("useThenConnector", 1) + subPhrase.set("words", subPhrase.get("words").capitalize()) + + return connector + + def visConnector(self, tree, subPhrase): + # return connector phrase to connect subPhrase and previous one + elementName = subPhrase.getAncestor("firstElement").name + then = self.phrase_connector(tree, subPhrase, "then", elementName) + #if subPhrase.get("null") or subPhrase.getPrev().get("null"): + prev = subPhrase.getPrev() + if self.isNull(subPhrase) or self.isNull(prev): + return then + # Check for either subPhrase specifying only special connector + connector = subPhrase.get("connector") + if connector is not None: + return connector + connector = prev.get("connector") + if connector is not None: + return connector + + # Check for increasing/decreasing values + subPhrase1 = subPhrase.getPrev() + # Get vis for previous sub-phrase + statDict = subPhrase1.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + subkeyList = self.getSubkeys(rankList) + val1 = self.getVis(subkeyList) + if val1 is None: + return then + # Get vis for current sub-phrase + statDict = subPhrase.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + subkeyList = self.getSubkeys(rankList) + val2 = self.getVis(subkeyList) + if val2 is None: + return then + + if val1 > val2: + connector = self.phrase_connector( + tree, subPhrase, "decreasing to", elementName) + elif val1 < val2: + connector = self.phrase_connector( + tree, subPhrase, "increasing to", elementName) + else: + connector = then + return connector + + def vectorConnector(self, tree, subPhrase): + # return connector phrase to connect subPhrase and previous one + elementName = subPhrase.getAncestor("firstElement").name + becoming = self.phrase_connector(tree, subPhrase, "becoming", elementName) + #if subPhrase.get("null") or subPhrase.getPrev().get("null"): + if self.isNull(subPhrase) or self.isNull(subPhrase.getPrev()): + return becoming + + subPhrase1 = subPhrase.getPrev() + mag1, dir1, dirStr1 = self.getVectorData( + tree, subPhrase1, elementName, "Average") + mag2, dir2, dirStr2 = self.getVectorData( + tree, subPhrase, elementName, "Average") + + increasingTo = self.phrase_connector( + tree, subPhrase, "increasing to", elementName) + decreasingTo = self.phrase_connector( + tree, subPhrase, "decreasing to", elementName) + + # Directions same + if dirStr1 == dirStr2: + increment = self.nlValue(self.increment_nlValue( + tree, subPhrase, elementName, elementName), mag1) + # Magnitudes same + if abs(mag1-mag2) < increment: + connector = becoming + # Magnitudes different + elif mag1 < mag2: + connector = increasingTo + else: + connector = decreasingTo + # Directions different + else: + magDiff = self.nlValue(self.vector_mag_difference_nlValue( + tree, subPhrase, elementName, elementName), mag1) + # Magnitudes same + if abs(mag1 - mag2) < magDiff: + connector = self.phrase_connector( + tree, subPhrase, "shifting to the", elementName) + # Magnitudes different + else: + # If high wind conditions report both "becoming" and + # "increasing/decreasing" + # Southeast winds around 70 mph becoming south + # and increasing to around 105 mph + increasing = mag1 < mag2 + if max(mag1, mag2) > self.highValue_threshold( + tree, subPhrase, elementName, elementName): + dirStr = subPhrase.get("dirStr") + words = subPhrase.get("words") + words = words.replace(dirStr+" ", "") + subPhrase.set("words", words) + direction = becoming + dirStr + " and" + if increasing: + connector = direction + increasingTo + else: + connector = direction + decreasingTo + # Otherwise, report both "increasing" or "becoming" + # SOUTHEAST WINDS AROUND 20 MPH BECOMING SOUTH + # AROUND 15 MPH + else: + if increasing: + connector = increasingTo + else: + connector = becoming + return connector + + def marine_vectorConnector(self, tree, subPhrase): + # return connector phrase to connect subPhrase and previous one + elementName = subPhrase.parent.get("firstElement").name + if self.isNull(subPhrase) or self.isNull(subPhrase.getPrev()): + return self.phrase_connector(tree, subPhrase, "becoming", elementName) + + subPhrase1 = subPhrase.getPrev() + mag1, dir1, dirStr1 = self.getVectorData( + tree, subPhrase1, elementName, "Average") + mag2, dir2, dirStr2 = self.getVectorData( + tree, subPhrase, elementName, "Average") + + if dirStr1 == dirStr2: + increment = self.nlValue(self.increment_nlValue( + tree, subPhrase, elementName, elementName), mag1) + if abs(mag2-mag1) < increment: + connector = self.phrase_connector(tree, subPhrase, "becoming", elementName) + elif mag1 < mag2: + connector = self.phrase_connector(tree, subPhrase, "rising to", elementName) + else: + connector = self.phrase_connector(tree, subPhrase, "easing to", elementName) + else: + magDiff = self.nlValue(self.vector_mag_difference_nlValue( + tree, subPhrase, elementName, elementName), mag1) + if abs(mag2 - mag1) < magDiff: + # Put in test for sea breeze i.e. becoming onshore + if self.seaBreeze_flag(tree, subPhrase, elementName) == 1: + connector = self.phrase_connector(tree, subPhrase, "becoming onshore", elementName) + # Remove subPhrase words + subPhrase.set("words", "") + else: + movement = self.direction_movement(dir1, dir2) + if movement > 0: # clockwise + connector = self.phrase_connector(tree, subPhrase, "veering", elementName) + else: + connector = self.phrase_connector(tree, subPhrase, "backing", elementName) + else: + connector = self.phrase_connector(tree, subPhrase, "becoming", elementName) + return connector + + def removeDirection(self, tree, subPhrase): + # Remove the direction from the subPhrase words + dirStr = subPhrase.get("dirStr") + if dirStr is not None: + words = subPhrase.get("words") + words = words.replace(dirStr, "") + subPhrase.set("words",words) + + def getVectorData(self, tree, subPhrase, elementName, accessMethod): + # Get vector data for subPhrase for the given elementName + statDict = subPhrase.getStatDict() + stats = statDict[elementName] + if stats is None: + return None, None, None + mag, dir = stats + mag = self.getValue(mag, accessMethod) + dirStr= self.dirToText(dir) + return mag, dir, dirStr + + def getScalarData(self, tree, subPhrase, elementName, accessMethod): + # Get scalar data for subPhrase for the given elementName + matchingInfo = self.matchToWxInfo(tree, subPhrase, elementName, elementName) + if matchingInfo != "": + val = self.matchToWx(tree, subPhrase, elementName) + if accessMethod == "MinMax": + val = (val, val) + else: + firstElement = subPhrase.getAncestor("firstElement") + dataType = firstElement.dataType + statDict = subPhrase.getStatDict() + val = statDict[elementName] + val = self.getValue(val, accessMethod, dataType) + return val + + def seaBreeze_flag(self, tree, subPhrase, elementName): + # Return 1 if an onshore breeze is detected from the prior range + # Get local effects areas directions for Offshore previous subPhrase + # and Onshore for subPhrase + # offshoreDir, onshoreDir + offShoreArea, onShoreArea = self.seaBreeze_areaLabels(tree, subPhrase) + if offShoreArea is None: + return 0 + subPhrase1 = subPhrase.getPrev() + if subPhrase1 is None: + return 0 + timeRange1 = subPhrase1.getTimeRange() + timeRange2 = subPhrase.getTimeRange() + areaLabel1 = subPhrase1.getAreaLabel() + areaLabel2 = subPhrase.getAreaLabel() + offshore = tree.stats.get( + "Wind", timeRange1, offShoreArea, mergeMethod="Max", + intersectWith=areaLabel1) + onshore = tree.stats.get( + "Wind", timeRange2, onShoreArea, mergeMethod="Max", + intersectWith=areaLabel2) + if offshore is None or onshore is None: + return 0 + mag, offshoreDir = offshore + mag, onshoreDir = onshore + # Get thresholds + offshore1, offshore2, onshore1, onshore2 = self.seaBreeze_thresholds(tree, subPhrase) + if self.direction_between(offshoreDir, offshore1, offshore2) and \ + self.direction_between(onshoreDir, onshore1, onshore2): + return 1 + return 0 + + # Subphrase Level + def checkRepeatingString(self, tree, node, str, strName, matchAreaLabels=1): + # Given a text string, str, and a descriptive name for that string, + # see if it repeats in the previous phrase, sub-phrase or embedded phrase. + # If we find a repeating string, return an empty string + # Otherwise return the original string. + # If matchAreaLabels, the areaLabel of previous node must match + # that of the current if we are to return an empty string. + # This prevents phrases such as: + # Chance of rain and snow 20 percent windward rain and snow 40 percent leeward. + # + + # Check sub-phrases + #print "Check Repeating", node.getAncestor('name'), str + #print " matchAreaLabels", matchAreaLabels + prevNode = node.getPrev() + if prevNode is not None: + if matchAreaLabels and \ + prevNode.getAreaLabel() != node.getAreaLabel(): + return str + prevStr = prevNode.get(strName) + if prevStr is not None and str == prevStr: + # Do not repeat previous str + #print "return 1" + return "" + # Check degenerate conjunctive local effect + # We are looking for these conditions: + # --This phrase has only one sub-phrase + # --The previous phrase has only one sub-phrase AND + # has the same name as the current phrase (e.g. popMax_phrase + # --The str for the sub-phrases are the same + phrase = node.getParent() + #tree.printNode(phrase.parent) + if len(phrase.childList) == 1: + prevPhrase = phrase.getPrev() + if prevPhrase is not None: + if matchAreaLabels and \ + prevPhrase.getAreaLabel() != node.getAreaLabel(): + return str + if prevPhrase.get("name") == phrase.get("name"): + if len(prevPhrase.childList) == 1: + prevSubPhrase = prevPhrase.childList[0] + prevStr = prevSubPhrase.get(strName) + if prevSubPhrase.get('words') is None: + # Must wait for previous words to finish + return -1 + if prevStr is not None and str == prevStr: + # Do not repeat previous str + #print "return 2" + return "" + return str + + # Local Effects + + def checkLocalEffects(self, tree, node): + localEffectsList = self.getLocalEffectsList(tree, node) + #print " le list", localEffectsList + if localEffectsList is None or len(localEffectsList) == 0: + return self.DONE() + childList = node.get("childList") + if childList is None or len(childList) < 1: + return self.DONE() + if self.__dict__.get('_leDebug',0): + print("\nChecking local effects for", node.get('name'), node.getAreaLabel()) + print(" node", node) + print(" parent", node.parent) + print(" disabled", node.get('disabledSubkeys'), node.getAncestor('disabledSubkeys')) + print("\ncomp phrases before:") + self.printCompPhrases(tree, node) + + for localEffect in localEffectsList: + # If ANY subPhrase has a local effect, create conjunctive local effect. + # If ALL subPhrases have the same local effect "groups", use that grouping. + # Otherwise, create a conjunctive phrase for each local effect area. + flag = 0 + firstTime = 1 + sameGroups = 1 + for checkNode in childList: + nodeFlag, nodeGroups = self.checkLocalEffect(tree, checkNode, localEffect) + if nodeFlag: + flag = 1 + if firstTime: + groups = nodeGroups + firstTime = 0 + elif groups != nodeGroups: + # flag must be 1 + sameGroups = 0 + break + if flag: + # Create conjunctive local effect + #print "Creating conjunctive local effect" + if sameGroups == 0: + groups = [] + leAreaList = self.getLeAreaList(tree, node, localEffect) + for leArea in leAreaList: + groups.append([leArea]) + nodeList = self.makeLocalEffectNodes(tree, node, localEffect, groups) + + # Applies only to the skyPopWx_phrase + # Set up includeSky for new local effect nodes + includeSky = self.getIncludeSky(tree, node) + for newNode in nodeList: + newNode.set("includeSky", includeSky) + if self.__dict__.get('_leDebug',0): + print("newNode", newNode.get("name"), newNode.get("areaLabel")) + print(" includeSky", includeSky, newNode) + node.replace(nodeList) + + + if flag: # There is a local effect + self.localEffect_hook(tree, node) + if self.__dict__.get('_leDebug',0): + print("\ncomp phrases after:", self.printCompPhrases(tree, node)) + return self.DONE() + + def checkLocalEffect(self, tree, node, localEffect): + # Check each local effect area against all others for the given node. + # Determine "groups" i.e. group the local effect areas according to + # similar statistics. + # Return + # -- a flag to indicate if any local effect areas showed differing + # statistics. + # -- the "groups" + triggerMethod = localEffect.triggerMethod + leAreaList = self.getLeAreaList(tree, node, localEffect) + if len(leAreaList) == 0: + return 0, [] + + # Begin with one group consisting of first local effect edit area + groups = [[leAreaList[0]]] + # This loop checks each subsequent local effect edit area against + # the existing groups and appends it to the first group which + # has similar statistics. + # If no existing group has similar statistics, a new group is + # created. + for leArea1 in leAreaList[1:]: + addedToExisting = 0 + for group in groups: + leArea2 = group[0] + difference = self.checkThreshold( + tree, node, triggerMethod, leArea1, leArea2, localEffect) + if difference == 0: + # Similar statistics, so + # append it to the current group + group.append(leArea1) + addedToExisting = 1 + break + if addedToExisting == 0: + # Did not find similar group, so create a new group + groups.append([leArea1]) + if len(groups) == 1: + flag = 0 + else: + flag = 1 + return flag, groups + + def getLocalEffectsList(self, tree, node): + leList = node.get("localEffectsList") + if type(leList) is types.MethodType: + return leList(tree, node) + else: + return leList + + def getLeAreaList(self, tree, node, localEffect): + leAreaList = localEffect.leAreaList + if type(leAreaList) is types.MethodType: + return leAreaList(tree, node) + else: + return leAreaList + + def getLeAreaLabel(self, tree, node, leArea): + if leArea.areaLabel == "__Current__": + return node.getAreaLabel() + elif leArea.intersectFlag: + return self.getIntersectName(node.getAreaLabel(), leArea.areaLabel) + #return self.getIntersectName(tree.getAreaLabel(), leArea.areaLabel) + else: + return leArea.areaLabel + + def getLeQualifiers(self, tree, node, group): + # Return the qualifiers for this group of leAreas + # There is a qualifer for embedded local effect phrases + # and one for conjunctive local effect phrases. + embeddedQualifier = "" + conjQualifier = "" + length = len(group) + index = 0 + for leArea in group: + areaWords = leArea.areaWords + if type(areaWords) is types.MethodType: + areaWords = areaWords(tree, node, leArea) + embeddedQualifier = embeddedQualifier + areaWords + conjWords = leArea.conjAreaWords + if type(conjWords) is types.MethodType: + conjWords = conjWords(tree, node, leArea) + conjQualifier = conjQualifier + conjWords + # if last one, do not add conjunction + if index == length - 1: break + embeddedQualifier = embeddedQualifier + " and " + conjQualifier = conjQualifier + " and " + index = index + 1 + return embeddedQualifier, conjQualifier + + def checkThreshold(self, tree, node, triggerMethod, leArea1, leArea2, localEffect): + # Return 1 if the difference between leArea1 and leArea2 stats is + # greater than the threshold + # Handles stats that are a min/max or a singleValue + leArea1Label = self.getLeAreaLabel(tree, node, leArea1) + leArea2Label = self.getLeAreaLabel(tree, node, leArea2) + if type(triggerMethod) is types.MethodType: + flag = triggerMethod(tree, node, localEffect, leArea1Label, leArea2Label) + else: + first = node.getAncestor("firstElement") + element = first.name + dataType = first.dataType + if dataType == self.WEATHER(): + mergeMethod = "Average" + else: + mergeMethod = "MinMax" + timeRange = node.getTimeRange() + area1Stats = tree.stats.get(element, timeRange, leArea1Label, + mergeMethod=mergeMethod) + area2Stats = tree.stats.get(element, timeRange, leArea2Label, + mergeMethod=mergeMethod) + area1Stats = self.applyDisabled(tree, node, area1Stats) + area2Stats = self.applyDisabled(tree, node, area2Stats) + if self.__dict__.get("_leDebug", 0): + print("\nCheckThreshold", element, timeRange) + print(leArea1Label, area1Stats) + print(leArea2Label, area2Stats) + if area1Stats is None or area2Stats is None: + return 0 + flag = self.checkLocalEffectDifference( + tree, node, dataType, triggerMethod, area1Stats, area2Stats, + leArea1Label, leArea2Label) + if self.__dict__.get("_leDebug", 0): + print("returning", flag) + return flag + + def applyDisabled(self, tree, node, stats): + if stats is None: + return stats + disabledSubkeys = node.getAncestor('disabledSubkeys') + #print "/n applyDisabled: disabled", disabledSubkeys + #print "stats", stats + if disabledSubkeys is not None: + newStats = [] + for subkey, rank in stats: + if subkey not in disabledSubkeys: + newStats.append((subkey, rank)) + stats = newStats + if stats == []: + emptyKey = WeatherSubKey.weatherSubKey(self._argDict['site'], + "", "", "", "", []) + stats = [(emptyKey, 100)] + return stats + + def checkLocalEffectDifference(self, tree, node, dataType, threshold, + area1Stats, area2Stats, al1, al2): + if dataType == self.DISCRETE(): + if area1Stats != area2Stats: + return 1 + else: + return 0 + if dataType == self.WEATHER(): + flag = self.checkWeatherSimilarity( + tree, node, area1Stats, area2Stats, al1=al1, al2=al2) + # checkWeatherSimilarity returns 0 if there IS a difference and, thus, + # should be a local effect + if flag == 0: + return 1 + else: + return 0 + if dataType == self.VECTOR(): + area1Stats, dir = area1Stats + area2Stats, dir = area2Stats + + if type(area1Stats) is tuple: + min1, max1 = area1Stats + min2, max2 = area2Stats + diff1 = self.absDiff(min1, min2) + diff2 = self.absDiff(max1, max2) + # Check to see if one range is included within the other + if self.rangeIncluded(min1, max1, min2, max2) == 1: + return 0 + if self.rangeIncluded(min2, max2, min1, max1) == 1: + return 0 + # Check to see if either min or max is greater than threshold + if diff1 > threshold or diff2 > threshold: + return 1 + else: + return 0 + else: + absDiff = self.absDiff(area1Stats, area2Stats) + if absDiff > threshold: + return 1 + else: + return 0 + + def checkSkyWxDifference(self, tree, node, localEffect, leArea1Label, leArea2Label): + timeRange = node.getTimeRange() + wxStats1 = tree.stats.get("Wx", timeRange, leArea1Label, + mergeMethod="Average") + wxStats2 = tree.stats.get("Wx", timeRange, leArea2Label, + mergeMethod="Average") + wxStats1 = self.applyDisabled(tree, node, wxStats1) + wxStats2 = self.applyDisabled(tree, node, wxStats2) + #print "wxStats1", wxStats1 + #print "wxStats2", wxStats2 + wxSame = self.checkWeatherSimilarity( + tree, node, wxStats1, wxStats2, al1=leArea1Label, al2=leArea2Label) + #print "wxSame", wxSame + if wxSame == 0: + wxDiff = 1 + else: + wxDiff = 0 + + skyDiff = self.checkSkyDifference(tree, node, localEffect, + leArea1Label, leArea2Label) + + # Determine if ANY of the sub-phrases have a sky local effect + # and store this information at the parent level for later + # use by the "checkLocalEffects" method. + skyLE = node.parent.get("skyLE") + if skyLE is None: + node.parent.set("skyLE", 0) + if skyDiff: + node.parent.set("skyLE", 1) + #return wxDiff + return skyDiff or wxDiff + + def getIncludeSky(self, tree, node): + # If this is called, then we know we have a LE + # i.e. there was a wx local effect. + skyLE = node.get("skyLE") + if skyLE: + return None # Want to make sure we check at the LE level + else: + return 0 + + def checkSkyDifference(self, tree, node, localEffect, + leArea1Label, leArea2Label): + timeRange = node.getTimeRange() + skyValue1 = tree.stats.get("Sky", timeRange, leArea1Label, + mergeMethod="Average") + skyValue2 = tree.stats.get("Sky", timeRange, leArea2Label, + mergeMethod="Average") + if timeRange.duration() > 12*3600: + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + words1 = self.sky_value(tree, node, self.getValue(skyValue1), dayNight) + words2 = self.sky_value(tree, node, self.getValue(skyValue2), dayNight) + return not self.similarSkyWords_flag(tree, node, words1, words2) + + def localEffect_hook(self, tree, node): + return + + def rangeIncluded(self, min1, max1, min2, max2): + # Return 1 if min1, max1 are included in min2, max2 + if min1 >= min2 and max1 <= max2: + return 1 + return 0 + + def absDiff(self, val1, val2): + # Return the absolute difference between the values + # Note: this handles negative values + if (val1 > 0 and val2 > 0) or (val1 < 0 and val2 < 0): + return abs(val1 - val2) + else: + return abs(val1) + abs(val2) + + def makeLocalEffectNodes(self, tree, node, localEffect, groups): + # Make a node phrase for each group of local effect areas in groups + nodeList = [] + for group in groups: + leAreaLabel = self.getLeAreaLabel(tree, node, group[0]) + newNode = tree.copyPhrase( + node, node.getTimeRange(), leAreaLabel, + copyAttrs=["disabledSubkeys", "disabledElements", + "firstElement", "elementName", "elementInfoList", + "descriptor", "indentLabel"]) + embeddedQualifier, conjQualifier = self.getLeQualifiers(tree, node, group) + newNode.set("embeddedQualifier", embeddedQualifier) + newNode.set("conjunctiveQualifier", conjQualifier) + newNode.set("localEffect", localEffect) + newNode.set("leGroup", group) + nodeList.append(newNode) + return nodeList + + def printCompPhrases(self, tree, node): + comp = node.getComponent() + print("Component phrases for", node) + for phrase in comp.get('childList'): + print(phrase.get('name'), phrase.getAreaLabel(), phrase) + print(" ", phrase.get('words')) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Phrase_Test_Local.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Phrase_Test_Local.py index 2eebd28bb3..99b3dc5ea5 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Phrase_Test_Local.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Phrase_Test_Local.py @@ -1,789 +1,789 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Phrase_Test_Local -# Local customizations for AreaFcst as Base class for testing Narrative Phrases -# Refer to Test Cases for Text Products: tp003 -# Author: -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import AreaFcst -import string -import TextRules -import string, time, re, os, types, copy - -class TextProduct(AreaFcst.TextProduct): - Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) - - # REQUIRED CONFIGURATION ITEMS - Definition['displayName'] = "TEST_PhraseTest" - #Definition["outputFile"] = "/awips/GFESuite/products/TEXT/ZFP.txt" - - # Header configuration items - #Definition["productName"] = "Zone Forecast Product" # name of product - #Definition["fullStationID"] = "Kxxx" # full station identifier (4letter) - #Definition["wmoID"] = "FOUS45" # WMO ID - #Definition["pil"] = "ZFPxxx" # product pil - #Definition["areaName"] = "stateName" # Name of state, such as "Georgia" - #Definition["wfoCity"] = "WfoCity" # Location of WFO - city name - #Definition["wfoState"] = "WfoState" # Location of WFO - state name - - # OPTIONAL CONFIGURATION ITEMS - Definition["defaultEditAreas"] = [ - ("area3", "Area 1"), - # ("area2", "Area 2"), - ] - - Definition["Period_1_version"] = 1 - - #Definition["directiveType"] = "C11" - #Definition["directiveType"] = "10-503" # Can be "C11" - #Definition["includeFloodingQuestion"] = 1 # Set to 1 to include flooding question - - #Definition["includeMultipleElementTable"] = 1 # Will include a TempPoPTable - #Definition["cityDictionary"] = "CityDictionary" # For TempPoPTable - - #Definition["areaDictionary"] = "AreaDictionary" # For product headers - #Definition["language"] = "english" - #Definition["hoursSChcEnds"] = 0 - - # Apply to C11 only: - #Definition["includeExtended"] = 1 # To include extended forecast - #Definition["extendedLabel"] = 1 # To include extended label - #Definition["includeEveningPeriod"] = 0 # To turn off evening period - - #Definition["includeMultipleElementTable"] = 1 # Will include a MultipleElementTable - # Uncomment just one elementList below - #Definition["elementList"] = ["Temp", "PoP"] # Default - #Definition["elementList"] = ["Temp", "Humidity", "PoP"] - #Definition["singleValueFormat"] = 1 # Default is 0 - - # Sampling Performance - #Definition["sampleFromServer"] = 1 # If 1, sample directly from server - # Trouble-shooting items - #Definition["passLimit"] = 20 # Limit on passes allowed through - # Narrative Tree - #Definition["trace"] = 1 # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - #Definition["debug"] = 1 - - Definition["arealSkyAnalysis"] = 1 - - def __init__(self): - AreaFcst.TextProduct.__init__(self) - - # OPTIONAL OVERRIDES - #def DAY(self): - # return 6 - #def NIGHT(self): - # return 18 - - # The thresholds and variables included here were selected because - # they are commonly overridden for your product. - # See the Text Product User Guide for other thresholds and variables - # that may be relevant to your product and for more information - # about the ones included here. - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - # This is the default. Triggers if ALL coverage terms are areal - #dict["PoP"] = self.allAreal_or_chance_pop_descriptor, - # Uncomment this line for invoking areal or chance pop descriptor - # Triggers if ANY coverage terms are areal - #dict["PoP"] = self.areal_or_chance_pop_descriptor, - # Uncomment this line to use "chance" descriptor in all cases - #dict["PoP"] = "chance of" - return dict - - def pop_snow_lower_threshold(self, tree, node): - # Snow accumulation will not be reported if Pop is below this threshold - return 60 - - def pop_snowLevel_upper_threshold(self, tree, node): - # Snow level will be reported if Pop is above this threshold - return 60 - - def snowLevel_maximum_phrase(self, tree, node): - # This returns the maximum snow level value to be reported and the - # the corresponding snow level phrase. It can be set up by - # edit area as follows: - # editAreaList = [ - # ("area1", 8000, "above 8000 feet"), - # ("area2", 6000, "above 6000 feet"), - # # Don't mention snow level at all in area3: - # ("area3", 0, ""), - # ] - #maxElev = 0 - #phrase = "" - #for area, elev, elevPhrase in editAreaList: - # if self.currentAreaContains(tree, [area]): - # if elev > maxElev: - # maxElev = elev - # phrase = elevPhrase - #return (maxElev, phrase) - return (8000, "above 8000 feet") - - def vector_mag_difference_dict(self, tree, node): - # Replaces WIND_THRESHOLD - # Magnitude difference. If the difference between magnitudes - # for the first and second half of a period is greater than this value, - # the different magnitudes will be noted in the phrase. - # Units can vary depending on the element - dict = TextRules.TextRules.vector_mag_difference_dict(self, tree, node) - #dict["Wind"] = 20 - return dict - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than this value, - # the different values will be noted in the phrase. - dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) - #dict["WindGust"] = 20 - return dict - - def lake_wind_areaNames(self, tree, node): - # Return list of edit area names for which the lake_wind_phrase - # should be generated - # If you want the phrase potentially generated for all zones, use: - # return ["ALL"] - return [] - - def useWindsForGusts_flag(self, tree, node): - # Turn this on if you want to use the maximum Wind - # for reporting Gusts if a WindGust grid is not found - return 0 - - def range_threshold_nlValue_dict(self, tree, node): - # Range for reporting temperature ranges in temp_range_phrase - # e.g HIGHS 80 TO 85 - dict = TextRules.TextRules.range_threshold_nlValue_dict(self, tree, node) - dict["MaxT"] = 5 - dict["MinT"] = 5 - dict["MinRH"] = 5 - dict["MaxRH"] = 5 - return dict - - def temp_trend_nlValue(self, tree, node): - # THRESHOLD FOR REPORTING TEMPERATURE TRENDS - return 20.0 - - def stdDev_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating stdDev stats. - # These tuples represent the (low, high) number of standard - # deviations. Any values falling outside this range will - # not be included in the calculated statistic. - return { - "LAL": (1.0, 1.0), - "MinRH": (1.0, 1.0), - "MaxRH": (1.0, 1.0), - "MinT": (1.0, 1.0), - "MaxT": (1.0, 1.0), - "Haines": (1.0, 1.0), - "PoP" : (1.0, 1.0), - "T" : (1.0, 1.0), - "Wind" : (1.0, 1.0), - } - - def value_connector_dict(self, tree, node): - dict = TextRules.TextRules.value_connector_dict(self, tree, node) - dict["MaxT"] = " to " - dict["MinT"] = " to " - return dict - - def windChillTemp_difference(self, tree, node): - # Difference between wind chill and temperature - # for reporting wind chill - return 5 - - def heatIndexTemp_difference(self, tree, node): - # Difference between heat index and temperature - # for reporting heat index - return 5 - - def Period_1(self): - exec "value = self.Period_1_version" + `self._Period_1_version` + "()" - return value - - def Period_1_version1(self): - component = { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [3]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("Wind", self.vectorModeratedMinMax, [0]), - ("Wind", self.vectorMinMax, [0]), - ("WindGust", self.maximum, [0]), - ("Wx", self.rankedWx, [3]), - ("WindChill", self.minMax), - ("HeatIndex", self.minMax), - ("SnowAmt", self.accumMinMax), - ], - "phraseList":[ - self.sky_phrase, - self.skyPopWx_phrase, -## (self.skyPopWx_phrase, self._wxLocalEffects_list()), - self.wind_summary, - self.reportTrends, - self.weather_phrase, -## (self.weather_phrase,self._wxLocalEffects_list()), - self.heavyPrecip_phrase, - self.severeWeather_phrase, - self.visibility_phrase, - self.snow_phrase, - self.extremeTemps_phrase, - self.steady_temp_trends, - self.highs_phrase, - self.lows_phrase, - self.temp_trends, - self.wind_withGusts_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ], -## "additionalAreas": [ -## # Areas listed by weather element that will be -## # intersected with the current area then -## # sampled and analysed. -## # E.g. used in local effects methods. -## ("Sky", ["Rush_Valley"]), -## ("Wx", ["Rush_Valley"]), -## ("PoP", ["Rush_Valley"]), -## ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [3])) - if self._useStormTotalSnow: - phraseList = component["phraseList"] - index = phraseList.index(self.total_snow_phrase) - phraseList[index] = self.stormTotalSnow_phrase - component["phraseList"] = phraseList - return component - - def _wxLocalEffects_addlArea_list(self): - leArea1 = self.LocalEffectArea("__Current__", "", intersectFlag=0) - leArea2 = self.LocalEffectArea("Rush_Valley", "in the Rush Valley", - intersectFlag=0) - return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] - - def _10_503_issuance_list(self, argDict): - seriesDefAM = [ - ("Period_1", "period1"), #("Phantom", 12), -## ("Period_2_3", 12), ("Period_2_3", 12), -## ("Period_4_5", 12), ("Period_4_5", 12), -## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), -## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ] - seriesDefPM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), - ] - - return [ - ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), - ".TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, seriesDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, seriesDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, seriesDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), - ".TONIGHT...", "late in the night", "early in the evening", - 1, seriesDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - ".REST OF TONIGHT...", "early in the morning","early in the evening", - 1, seriesDefPM), - # For the early morning update, this produces: - # REST OF TONIGHT: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, seriesDefPM), - # Alternative - # For the early morning update, this produces: - # EARLY THIS MORNING: - # TODAY - # TONIGHT - #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, seriesDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, seriesDefPM), - ] - - - -###################################### -## Section F: -## -## Period_1 For Rush_Valley skyPopWx Local Effects -## -## def Period_1(self): -## return { -## "type": "component", -## "methodList": [ -## self.orderPhrases, -## self.consolidateSubPhrases, -## self.assemblePhrases, -## self.wordWrap, -## ], -## "analysisList": [ -## ("MinT", self.stdDevMinMax), -## ("MaxT", self.stdDevMinMax), -## ("T", self.hourlyTemp), -## ("T", self.minMax), -## ("Sky", self.median, [3]), -## ("PoP", self._PoP_analysisMethod("Period_1"), [3]), -## ("PoP", self.binnedPercent, [3]), -## #("Wind", self.vectorMedianRange, [0]), -## ("Wind", self.vectorModeratedMinMax, [0]), -## ("Wind", self.vectorMinMax, [0]), -## ("WindGust", self.maximum, [0]), -## ("Wx", self.rankedWx, [3]), -## ("WindChill", self.minMax), -## ("HeatIndex", self.minMax), -## ], -## "phraseList":[ -## self.sky_phrase, -#### self.skyPopWx_phrase, -## (self.skyPopWx_phrase, self._wxLocalEffects_addlArea_list()), -## self.wind_summary, -## self.reportTrends, -#### self.weather_phrase, -## (self.weather_phrase,self._wxLocalEffects_addlArea_list()), -## self.heavyPrecip_phrase, -## self.severeWeather_phrase, -## self.highs_phrase, -## self.lows_phrase, -## self.temp_trends, -## self.wind_withGusts_phrase, -## self.popMax_phrase, -## self.windChill_phrase, -## self.heatIndex_phrase, -## ], -## "additionalAreas": [ -## # Areas listed by weather element that will be -## # intersected with the current area then -## # sampled and analysed. -## # E.g. used in local effects methods. -## ("Sky", ["Rush_Valley"]), -## ("Wx", ["Rush_Valley"]), -## ("PoP", ["Rush_Valley"]), -## ], -## } -## -## Period_1 For skyPopWx Local Effects -## - - - - def Period_1_version2(self): - return { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Sky", self.median, [3]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("Wx", self.rankedWx, [3]), - ], - "phraseList":[ - (self.sky_phrase, self._skyLocalEffects_list()), - (self.skyPopWx_phrase, self._skyPopWxLocalEffects_list()), - (self.weather_phrase,self._wxLocalEffects_list()), - (self.popMax_phrase, self._popLocalEffects_list()), - ], - "intersectAreas": [ - # Areas listed by weather element that will be - # intersected with the current area then - # sampled and analysed. - # E.g. used in local effects methods. - ("Sky", ["AboveElev", "BelowElev"]), - ("Wx", ["AboveElev", "BelowElev"]), - ("PoP", ["AboveElev", "BelowElev"]), - ], - } - - def _skyLocalEffects_list(self): - leArea1 = self.LocalEffectArea("AboveElev", "windward") - leArea2 = self.LocalEffectArea("BelowElev", "leeward") - return [self.LocalEffect([leArea1, leArea2], self.checkSkyDifference, ", ")] - - def _wxLocalEffects_list(self): - leArea1 = self.LocalEffectArea("AboveElev", "windward") - leArea2 = self.LocalEffectArea("BelowElev", "leeward") - return [self.LocalEffect([leArea1, leArea2], 0, ", ")] - - def _popLocalEffects_list(self): - leArea1 = self.LocalEffectArea("AboveElev", "windward") - leArea2 = self.LocalEffectArea("BelowElev", "leeward") - return [self.LocalEffect([leArea1, leArea2], 20, ", ")] - - def _skyPopWxLocalEffects_list(self): - leArea1 = self.LocalEffectArea("AboveElev", "windward") - leArea2 = self.LocalEffectArea("BelowElev", "leeward") - # Set threshold to be used by checkSkyWxDifference - self._skyLocalEffectThreshold = 38 - return [self.LocalEffect([leArea1, leArea2], - self.checkSkyWxDifference, ", ")] - - -###################################### - - -## Section G: Unit Conversion - - def element_outUnits_dict(self, tree, node): - dict = AreaFcst.TextProduct.element_outUnits_dict(self, tree, node) - # Default is mph - #dict["Wind"] = "kts" - return dict - -## Section G: Standard and Non-Standard Rounding - - def increment_nlValue_dict(self, tree, node): - # Increment for rounding values - # Units depend on the product - dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) - # Default is 5 - #dict["Wind"] = 10 - #dict["Wind"] = { - # 'default': 15, - # (0, 21): 5, - # (21, 40): 10, - # } - return dict - -## Section G: Range Adjustment - - def minimum_range_nlValue_dict(self, tree, node): - # This threshold is the "smallest" min/max difference allowed between values reported. - # For example, if threshold is set to 5 for "MaxT", and the min value is 45 - # and the max value is 46, the range will be adjusted to at least a 5 degree - # range e.g. 43-48. These are the values that are then submitted for phrasing - # such as: - # HIGHS IN THE MID 40S - dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) - # Default is 0 - #dict["Wind"] = 10 - return dict - - def minimum_range_bias_nlValue_dict(self, tree, node): - # "Min", "Average", "Max" - # Should the minimum_range be taken from the "min" "average" or "max" - # value of the current range? - dict = TextRules.TextRules.minimum_range_bias_nlValue_dict(self, tree, node) - # Default is Max - #dict["Wind"] = "Average" - return dict - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a vector phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - #dict["Wind"] = 10 - return dict - -## Section G: Null Values - - def null_nlValue_dict(self, tree, node): - # Threshold for reporting null values - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["Wind"] = 5 - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. light winds. or light winds becoming N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - #dict["Wind"] = "light winds" - #dict["Wind"] = "" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "north winds 20 to 25 Knots becoming light" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - #dict["Wind"] = "light" - #dict["Wind"] = "" - dict["Wx"] = "" - return dict - -## Section G: Until Phrasing - - def untilPhrasing_flag_dict(self, tree, node): - # If set to 1, "until" time descriptor phrasing will be used. - # E.g. "north winds 20 MPH until 10 AM, then 35 MPH" - return { - "otherwise": 0, - #"Wind" : 1, - } - - def onTheFly_untilPhrasing_flag_dict(self, tree, node): - # If set to 1, "until" time descriptor phrasing will be used. - # E.g. "north winds 20 MPH until 10 AM, then 35 MPH" - return { - "otherwise": 0, - #"Wind" : 1, - } - - def untilPhrasing_format_dict(self, tree, node): - # Format for "until" time descriptors. - # If "military": until 1000 - # If "standard": until 10 AM - return { - "otherwise": "military", - #"Wind": "standard", - } - -###################################### -## Section G: Period_1 For Visibility Tests -## def Period_1(self): -## return { -## "type": "component", -## "methodList": [ -## self.orderPhrases, -## self.consolidateSubPhrases, -## self.assemblePhrases, -## self.wordWrap, -## ], -## "analysisList": [ -## ("MinT", self.stdDevMinMax), -## ("MaxT", self.stdDevMinMax), -## ("T", self.hourlyTemp), -## ("T", self.minMax), -## ("Sky", self.median, [3]), -## ("PoP", self._PoP_analysisMethod("Period_1"), [3]), -## ("PoP", self.binnedPercent, [3]), -## #("Wind", self.vectorMedianRange, [0]), -## ("Wind", self.vectorModeratedMinMax, [0]), -## ("Wind", self.vectorMinMax, [0]), -## ("WindGust", self.maximum, [0]), -## ("Wx", self.rankedWx, [3]), -## ("WindChill", self.minMax), -## ("HeatIndex", self.minMax), -## ], -## "phraseList":[ -## self.sky_phrase, -#### self.skyPopWx_phrase, -#### (self.skyPopWx_phrase, self._wxLocalEffects_list()), -## self.wind_summary, -## self.reportTrends, -## self.weather_phrase, -#### (self.weather_phrase,self._wxLocalEffects_list()), -## self.heavyPrecip_phrase, -## self.severeWeather_phrase, -## self.highs_phrase, -## self.lows_phrase, -## self.temp_trends, -## self.wind_withGusts_phrase, -## self.popMax_phrase, -## self.windChill_phrase, -## self.heatIndex_phrase, -## ], -#### "additionalAreas": [ -#### # Areas listed by weather element that will be -#### # intersected with the current area then -#### # sampled and analysed. -#### # E.g. used in local effects methods. -#### ("Sky", ["Rush_Valley"]), -#### ("Wx", ["Rush_Valley"]), -#### ("PoP", ["Rush_Valley"]), -#### ], -## } - -## # Handling visibility within the weather phrase -## def embedded_visibility_flag(self, tree, node): -## # If 1, report visibility embedded with the -## # weather phrase. Set this to 0 if you are using the -## # visibility_phrase. -## return 0 - -###################################### - - -## Section G: Weather Key Filtering - - def wxCombinations(self): - # This is the list of which wxTypes should be combined into one. - # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will - # be combined into one key and the key with the dominant coverage will - # be used as the combined key. - # You may also specify a method which will be - # -- given arguments subkey1 and subkey2 and - # -- should return - # -- a flag = 1 if they are to be combined, 0 otherwise - # -- the combined key to be used - # Note: The method will be called twice, once with (subkey1, subkey2) - # and once with (subkey2, subkey1) so you can assume one ordering. - # See the example below, "combine_T_RW" - # - return [ - ("RW", "R"), - ("SW", "S"), - self.combine_T_RW, - ] - - def combine_T_RW(self, subkey1, subkey2): - # Combine T and RW only if the coverage of T - # is dominant over the coverage of RW and - # RW does not have + intensity - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - if wxType1 == "T" and wxType2 == "RW": - if subkey2.intensity() != "+": - order = self.dominantCoverageOrder(subkey1, subkey2) - if order == -1 or order == 0: - return 1, subkey1 - return 0, None - - def useSkyPopWx_consolidation(self, tree, node): - # If set to 1, the skyPopWx phrase will consolidate weather keys that - # span all time ranges to produce: - # Partly cloudy with a chance of rain. - # Snow in the morning, then sleet in the afternoon. - # - # instead of: - # Partly cloudy. Chance of rain and snow in the morning, - # then a chance of rain and sleet in the afternoon. - - #return 1 - return 0 - - def areal_sky_flag(self, tree, node): - # Set to 1 if you want to use areal (e.g. patchy clouds, areas of clouds) - # vs. traditional sky wording when appropriate. - # BE SURE AND SET THE "arealSkyAnalysis" flag to 1 in the Definition section! - # You may want to base this decision on the current edit area and/or - # component e.g. "Period_1" - return 0 - - def matchToWxInfo_dict(self, tree, node): - # The system will automatically match the following elements to - # the highest ranking weather subkey coverage. - # Each entry is a tuple of (increment, algorithm, noPrecipValue) where - - # increment: This is the increment from the low "bin" value - # to be added. For example, PoP has a bin of 55-65, so - # its increment is 5 to end up with values as multiples of 10. - - # algorithm: Can be - # Max: The MAXIMUM value that falls within the coverage range - # for the highest ranking subkey will be chosen. - # Mode: The MOST FREQUENT (over space and time) value that - # falls within the coverage range for the highest ranking - # subkey will be chosen. - # MaxMode: This is the MAXIMUM value over time of the MOST - # FREQUENT values over area for each of the grids in the timeRange. - # In other words, for each grid, we find the Mode i.e. MOST FREQUENT - # value that falls within the coverage range for the highest - # ranking subkey. Then we find the MAXIMUM of these values - # over the grids again falling within the coverage values. - # AnalysisMethod: This will simply use whatever analysis method - # is specified as the first entry in the product component - # for the element. For example, if you have - # - # ("PoP", self.stdDevMaxAvg, [3]), - # ("PoP", self.binnedPercent, [3]), - # - # the "stdDevMaxAvg" method will be used. - # noPrecipValue: The value that should be returned if there is - # no precipitating weather. Can be: - # None - # Max: The maximum value found that has a greater > 0% occurrence. - # AnalysisMethod: As above, will return the result of the product - # component analysis method e.g. stdDevMaxAvg or maximum. - # - # EXAMPLE 1: Suppose we have: - - # Wx Hours 1-12: Chc R (coverage range is 30-60) - # PoP Hours 1-3: 40% (over 70% of area), 50% (over 30% of area) - # Hours 4-12: 30 - - # For the 12-hour PoP, - # If set to Max, we will get PoP: 50 - # If set to Mode, we will get PoP: 30 - # If set to MaxMode, we will get PoP: 40 - - # For the Hours 1-3 PoP: - # If set to Max, we will get PoP: 50 - # If set to Mode, we will get PoP: 40 - # If set to MaxMode, we will get PoP: 40 - - # NOTE: IF you add a new element to this list, you MUST include - # a coverage table named "coverage_value". Follow - # the example for "coveragePoP_value" in CommonUtils. You can - # then access the element value by calling "matchToWx" (WxPhrases). - # - # Test case 5_1 PopWx1 - return { - "PoP": (5, "Max", None), # 50 - #"PoP": (5, "Mode", None), # 30 - #"PoP": (5, "MaxMode", None), # 40 - #"PoP": (5, "AnalysisMethod", None), # 40 - "LAL": (0, "Max", "Max"), - } - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Phrase_Test_Local +# Local customizations for AreaFcst as Base class for testing Narrative Phrases +# Refer to Test Cases for Text Products: tp003 +# Author: +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import AreaFcst +import string +import TextRules +import string, time, re, os, types, copy + +class TextProduct(AreaFcst.TextProduct): + Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) + + # REQUIRED CONFIGURATION ITEMS + Definition['displayName'] = "TEST_PhraseTest" + #Definition["outputFile"] = "/awips/GFESuite/products/TEXT/ZFP.txt" + + # Header configuration items + #Definition["productName"] = "Zone Forecast Product" # name of product + #Definition["fullStationID"] = "Kxxx" # full station identifier (4letter) + #Definition["wmoID"] = "FOUS45" # WMO ID + #Definition["pil"] = "ZFPxxx" # product pil + #Definition["areaName"] = "stateName" # Name of state, such as "Georgia" + #Definition["wfoCity"] = "WfoCity" # Location of WFO - city name + #Definition["wfoState"] = "WfoState" # Location of WFO - state name + + # OPTIONAL CONFIGURATION ITEMS + Definition["defaultEditAreas"] = [ + ("area3", "Area 1"), + # ("area2", "Area 2"), + ] + + Definition["Period_1_version"] = 1 + + #Definition["directiveType"] = "C11" + #Definition["directiveType"] = "10-503" # Can be "C11" + #Definition["includeFloodingQuestion"] = 1 # Set to 1 to include flooding question + + #Definition["includeMultipleElementTable"] = 1 # Will include a TempPoPTable + #Definition["cityDictionary"] = "CityDictionary" # For TempPoPTable + + #Definition["areaDictionary"] = "AreaDictionary" # For product headers + #Definition["language"] = "english" + #Definition["hoursSChcEnds"] = 0 + + # Apply to C11 only: + #Definition["includeExtended"] = 1 # To include extended forecast + #Definition["extendedLabel"] = 1 # To include extended label + #Definition["includeEveningPeriod"] = 0 # To turn off evening period + + #Definition["includeMultipleElementTable"] = 1 # Will include a MultipleElementTable + # Uncomment just one elementList below + #Definition["elementList"] = ["Temp", "PoP"] # Default + #Definition["elementList"] = ["Temp", "Humidity", "PoP"] + #Definition["singleValueFormat"] = 1 # Default is 0 + + # Sampling Performance + #Definition["sampleFromServer"] = 1 # If 1, sample directly from server + # Trouble-shooting items + #Definition["passLimit"] = 20 # Limit on passes allowed through + # Narrative Tree + #Definition["trace"] = 1 # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + #Definition["debug"] = 1 + + Definition["arealSkyAnalysis"] = 1 + + def __init__(self): + AreaFcst.TextProduct.__init__(self) + + # OPTIONAL OVERRIDES + #def DAY(self): + # return 6 + #def NIGHT(self): + # return 18 + + # The thresholds and variables included here were selected because + # they are commonly overridden for your product. + # See the Text Product User Guide for other thresholds and variables + # that may be relevant to your product and for more information + # about the ones included here. + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + # This is the default. Triggers if ALL coverage terms are areal + #dict["PoP"] = self.allAreal_or_chance_pop_descriptor, + # Uncomment this line for invoking areal or chance pop descriptor + # Triggers if ANY coverage terms are areal + #dict["PoP"] = self.areal_or_chance_pop_descriptor, + # Uncomment this line to use "chance" descriptor in all cases + #dict["PoP"] = "chance of" + return dict + + def pop_snow_lower_threshold(self, tree, node): + # Snow accumulation will not be reported if Pop is below this threshold + return 60 + + def pop_snowLevel_upper_threshold(self, tree, node): + # Snow level will be reported if Pop is above this threshold + return 60 + + def snowLevel_maximum_phrase(self, tree, node): + # This returns the maximum snow level value to be reported and the + # the corresponding snow level phrase. It can be set up by + # edit area as follows: + # editAreaList = [ + # ("area1", 8000, "above 8000 feet"), + # ("area2", 6000, "above 6000 feet"), + # # Don't mention snow level at all in area3: + # ("area3", 0, ""), + # ] + #maxElev = 0 + #phrase = "" + #for area, elev, elevPhrase in editAreaList: + # if self.currentAreaContains(tree, [area]): + # if elev > maxElev: + # maxElev = elev + # phrase = elevPhrase + #return (maxElev, phrase) + return (8000, "above 8000 feet") + + def vector_mag_difference_dict(self, tree, node): + # Replaces WIND_THRESHOLD + # Magnitude difference. If the difference between magnitudes + # for the first and second half of a period is greater than this value, + # the different magnitudes will be noted in the phrase. + # Units can vary depending on the element + dict = TextRules.TextRules.vector_mag_difference_dict(self, tree, node) + #dict["Wind"] = 20 + return dict + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than this value, + # the different values will be noted in the phrase. + dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) + #dict["WindGust"] = 20 + return dict + + def lake_wind_areaNames(self, tree, node): + # Return list of edit area names for which the lake_wind_phrase + # should be generated + # If you want the phrase potentially generated for all zones, use: + # return ["ALL"] + return [] + + def useWindsForGusts_flag(self, tree, node): + # Turn this on if you want to use the maximum Wind + # for reporting Gusts if a WindGust grid is not found + return 0 + + def range_threshold_nlValue_dict(self, tree, node): + # Range for reporting temperature ranges in temp_range_phrase + # e.g HIGHS 80 TO 85 + dict = TextRules.TextRules.range_threshold_nlValue_dict(self, tree, node) + dict["MaxT"] = 5 + dict["MinT"] = 5 + dict["MinRH"] = 5 + dict["MaxRH"] = 5 + return dict + + def temp_trend_nlValue(self, tree, node): + # THRESHOLD FOR REPORTING TEMPERATURE TRENDS + return 20.0 + + def stdDev_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating stdDev stats. + # These tuples represent the (low, high) number of standard + # deviations. Any values falling outside this range will + # not be included in the calculated statistic. + return { + "LAL": (1.0, 1.0), + "MinRH": (1.0, 1.0), + "MaxRH": (1.0, 1.0), + "MinT": (1.0, 1.0), + "MaxT": (1.0, 1.0), + "Haines": (1.0, 1.0), + "PoP" : (1.0, 1.0), + "T" : (1.0, 1.0), + "Wind" : (1.0, 1.0), + } + + def value_connector_dict(self, tree, node): + dict = TextRules.TextRules.value_connector_dict(self, tree, node) + dict["MaxT"] = " to " + dict["MinT"] = " to " + return dict + + def windChillTemp_difference(self, tree, node): + # Difference between wind chill and temperature + # for reporting wind chill + return 5 + + def heatIndexTemp_difference(self, tree, node): + # Difference between heat index and temperature + # for reporting heat index + return 5 + + def Period_1(self): + exec("value = self.Period_1_version" + repr(self._Period_1_version) + "()") + return value + + def Period_1_version1(self): + component = { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [3]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("Wind", self.vectorModeratedMinMax, [0]), + ("Wind", self.vectorMinMax, [0]), + ("WindGust", self.maximum, [0]), + ("Wx", self.rankedWx, [3]), + ("WindChill", self.minMax), + ("HeatIndex", self.minMax), + ("SnowAmt", self.accumMinMax), + ], + "phraseList":[ + self.sky_phrase, + self.skyPopWx_phrase, +## (self.skyPopWx_phrase, self._wxLocalEffects_list()), + self.wind_summary, + self.reportTrends, + self.weather_phrase, +## (self.weather_phrase,self._wxLocalEffects_list()), + self.heavyPrecip_phrase, + self.severeWeather_phrase, + self.visibility_phrase, + self.snow_phrase, + self.extremeTemps_phrase, + self.steady_temp_trends, + self.highs_phrase, + self.lows_phrase, + self.temp_trends, + self.wind_withGusts_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ], +## "additionalAreas": [ +## # Areas listed by weather element that will be +## # intersected with the current area then +## # sampled and analysed. +## # E.g. used in local effects methods. +## ("Sky", ["Rush_Valley"]), +## ("Wx", ["Rush_Valley"]), +## ("PoP", ["Rush_Valley"]), +## ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [3])) + if self._useStormTotalSnow: + phraseList = component["phraseList"] + index = phraseList.index(self.total_snow_phrase) + phraseList[index] = self.stormTotalSnow_phrase + component["phraseList"] = phraseList + return component + + def _wxLocalEffects_addlArea_list(self): + leArea1 = self.LocalEffectArea("__Current__", "", intersectFlag=0) + leArea2 = self.LocalEffectArea("Rush_Valley", "in the Rush Valley", + intersectFlag=0) + return [self.LocalEffect([leArea1, leArea2], 10, ", except ")] + + def _10_503_issuance_list(self, argDict): + seriesDefAM = [ + ("Period_1", "period1"), #("Phantom", 12), +## ("Period_2_3", 12), ("Period_2_3", 12), +## ("Period_4_5", 12), ("Period_4_5", 12), +## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), +## ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ] + seriesDefPM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), + ] + + return [ + ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), + ".TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, seriesDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, seriesDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, seriesDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), + ".TONIGHT...", "late in the night", "early in the evening", + 1, seriesDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + ".REST OF TONIGHT...", "early in the morning","early in the evening", + 1, seriesDefPM), + # For the early morning update, this produces: + # REST OF TONIGHT: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, seriesDefPM), + # Alternative + # For the early morning update, this produces: + # EARLY THIS MORNING: + # TODAY + # TONIGHT + #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, seriesDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, seriesDefPM), + ] + + + +###################################### +## Section F: +## +## Period_1 For Rush_Valley skyPopWx Local Effects +## +## def Period_1(self): +## return { +## "type": "component", +## "methodList": [ +## self.orderPhrases, +## self.consolidateSubPhrases, +## self.assemblePhrases, +## self.wordWrap, +## ], +## "analysisList": [ +## ("MinT", self.stdDevMinMax), +## ("MaxT", self.stdDevMinMax), +## ("T", self.hourlyTemp), +## ("T", self.minMax), +## ("Sky", self.median, [3]), +## ("PoP", self._PoP_analysisMethod("Period_1"), [3]), +## ("PoP", self.binnedPercent, [3]), +## #("Wind", self.vectorMedianRange, [0]), +## ("Wind", self.vectorModeratedMinMax, [0]), +## ("Wind", self.vectorMinMax, [0]), +## ("WindGust", self.maximum, [0]), +## ("Wx", self.rankedWx, [3]), +## ("WindChill", self.minMax), +## ("HeatIndex", self.minMax), +## ], +## "phraseList":[ +## self.sky_phrase, +#### self.skyPopWx_phrase, +## (self.skyPopWx_phrase, self._wxLocalEffects_addlArea_list()), +## self.wind_summary, +## self.reportTrends, +#### self.weather_phrase, +## (self.weather_phrase,self._wxLocalEffects_addlArea_list()), +## self.heavyPrecip_phrase, +## self.severeWeather_phrase, +## self.highs_phrase, +## self.lows_phrase, +## self.temp_trends, +## self.wind_withGusts_phrase, +## self.popMax_phrase, +## self.windChill_phrase, +## self.heatIndex_phrase, +## ], +## "additionalAreas": [ +## # Areas listed by weather element that will be +## # intersected with the current area then +## # sampled and analysed. +## # E.g. used in local effects methods. +## ("Sky", ["Rush_Valley"]), +## ("Wx", ["Rush_Valley"]), +## ("PoP", ["Rush_Valley"]), +## ], +## } +## +## Period_1 For skyPopWx Local Effects +## + + + + def Period_1_version2(self): + return { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Sky", self.median, [3]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("Wx", self.rankedWx, [3]), + ], + "phraseList":[ + (self.sky_phrase, self._skyLocalEffects_list()), + (self.skyPopWx_phrase, self._skyPopWxLocalEffects_list()), + (self.weather_phrase,self._wxLocalEffects_list()), + (self.popMax_phrase, self._popLocalEffects_list()), + ], + "intersectAreas": [ + # Areas listed by weather element that will be + # intersected with the current area then + # sampled and analysed. + # E.g. used in local effects methods. + ("Sky", ["AboveElev", "BelowElev"]), + ("Wx", ["AboveElev", "BelowElev"]), + ("PoP", ["AboveElev", "BelowElev"]), + ], + } + + def _skyLocalEffects_list(self): + leArea1 = self.LocalEffectArea("AboveElev", "windward") + leArea2 = self.LocalEffectArea("BelowElev", "leeward") + return [self.LocalEffect([leArea1, leArea2], self.checkSkyDifference, ", ")] + + def _wxLocalEffects_list(self): + leArea1 = self.LocalEffectArea("AboveElev", "windward") + leArea2 = self.LocalEffectArea("BelowElev", "leeward") + return [self.LocalEffect([leArea1, leArea2], 0, ", ")] + + def _popLocalEffects_list(self): + leArea1 = self.LocalEffectArea("AboveElev", "windward") + leArea2 = self.LocalEffectArea("BelowElev", "leeward") + return [self.LocalEffect([leArea1, leArea2], 20, ", ")] + + def _skyPopWxLocalEffects_list(self): + leArea1 = self.LocalEffectArea("AboveElev", "windward") + leArea2 = self.LocalEffectArea("BelowElev", "leeward") + # Set threshold to be used by checkSkyWxDifference + self._skyLocalEffectThreshold = 38 + return [self.LocalEffect([leArea1, leArea2], + self.checkSkyWxDifference, ", ")] + + +###################################### + + +## Section G: Unit Conversion + + def element_outUnits_dict(self, tree, node): + dict = AreaFcst.TextProduct.element_outUnits_dict(self, tree, node) + # Default is mph + #dict["Wind"] = "kts" + return dict + +## Section G: Standard and Non-Standard Rounding + + def increment_nlValue_dict(self, tree, node): + # Increment for rounding values + # Units depend on the product + dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) + # Default is 5 + #dict["Wind"] = 10 + #dict["Wind"] = { + # 'default': 15, + # (0, 21): 5, + # (21, 40): 10, + # } + return dict + +## Section G: Range Adjustment + + def minimum_range_nlValue_dict(self, tree, node): + # This threshold is the "smallest" min/max difference allowed between values reported. + # For example, if threshold is set to 5 for "MaxT", and the min value is 45 + # and the max value is 46, the range will be adjusted to at least a 5 degree + # range e.g. 43-48. These are the values that are then submitted for phrasing + # such as: + # HIGHS IN THE MID 40S + dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) + # Default is 0 + #dict["Wind"] = 10 + return dict + + def minimum_range_bias_nlValue_dict(self, tree, node): + # "Min", "Average", "Max" + # Should the minimum_range be taken from the "min" "average" or "max" + # value of the current range? + dict = TextRules.TextRules.minimum_range_bias_nlValue_dict(self, tree, node) + # Default is Max + #dict["Wind"] = "Average" + return dict + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a vector phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + #dict["Wind"] = 10 + return dict + +## Section G: Null Values + + def null_nlValue_dict(self, tree, node): + # Threshold for reporting null values + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["Wind"] = 5 + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. light winds. or light winds becoming N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + #dict["Wind"] = "light winds" + #dict["Wind"] = "" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "north winds 20 to 25 Knots becoming light" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + #dict["Wind"] = "light" + #dict["Wind"] = "" + dict["Wx"] = "" + return dict + +## Section G: Until Phrasing + + def untilPhrasing_flag_dict(self, tree, node): + # If set to 1, "until" time descriptor phrasing will be used. + # E.g. "north winds 20 MPH until 10 AM, then 35 MPH" + return { + "otherwise": 0, + #"Wind" : 1, + } + + def onTheFly_untilPhrasing_flag_dict(self, tree, node): + # If set to 1, "until" time descriptor phrasing will be used. + # E.g. "north winds 20 MPH until 10 AM, then 35 MPH" + return { + "otherwise": 0, + #"Wind" : 1, + } + + def untilPhrasing_format_dict(self, tree, node): + # Format for "until" time descriptors. + # If "military": until 1000 + # If "standard": until 10 AM + return { + "otherwise": "military", + #"Wind": "standard", + } + +###################################### +## Section G: Period_1 For Visibility Tests +## def Period_1(self): +## return { +## "type": "component", +## "methodList": [ +## self.orderPhrases, +## self.consolidateSubPhrases, +## self.assemblePhrases, +## self.wordWrap, +## ], +## "analysisList": [ +## ("MinT", self.stdDevMinMax), +## ("MaxT", self.stdDevMinMax), +## ("T", self.hourlyTemp), +## ("T", self.minMax), +## ("Sky", self.median, [3]), +## ("PoP", self._PoP_analysisMethod("Period_1"), [3]), +## ("PoP", self.binnedPercent, [3]), +## #("Wind", self.vectorMedianRange, [0]), +## ("Wind", self.vectorModeratedMinMax, [0]), +## ("Wind", self.vectorMinMax, [0]), +## ("WindGust", self.maximum, [0]), +## ("Wx", self.rankedWx, [3]), +## ("WindChill", self.minMax), +## ("HeatIndex", self.minMax), +## ], +## "phraseList":[ +## self.sky_phrase, +#### self.skyPopWx_phrase, +#### (self.skyPopWx_phrase, self._wxLocalEffects_list()), +## self.wind_summary, +## self.reportTrends, +## self.weather_phrase, +#### (self.weather_phrase,self._wxLocalEffects_list()), +## self.heavyPrecip_phrase, +## self.severeWeather_phrase, +## self.highs_phrase, +## self.lows_phrase, +## self.temp_trends, +## self.wind_withGusts_phrase, +## self.popMax_phrase, +## self.windChill_phrase, +## self.heatIndex_phrase, +## ], +#### "additionalAreas": [ +#### # Areas listed by weather element that will be +#### # intersected with the current area then +#### # sampled and analysed. +#### # E.g. used in local effects methods. +#### ("Sky", ["Rush_Valley"]), +#### ("Wx", ["Rush_Valley"]), +#### ("PoP", ["Rush_Valley"]), +#### ], +## } + +## # Handling visibility within the weather phrase +## def embedded_visibility_flag(self, tree, node): +## # If 1, report visibility embedded with the +## # weather phrase. Set this to 0 if you are using the +## # visibility_phrase. +## return 0 + +###################################### + + +## Section G: Weather Key Filtering + + def wxCombinations(self): + # This is the list of which wxTypes should be combined into one. + # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will + # be combined into one key and the key with the dominant coverage will + # be used as the combined key. + # You may also specify a method which will be + # -- given arguments subkey1 and subkey2 and + # -- should return + # -- a flag = 1 if they are to be combined, 0 otherwise + # -- the combined key to be used + # Note: The method will be called twice, once with (subkey1, subkey2) + # and once with (subkey2, subkey1) so you can assume one ordering. + # See the example below, "combine_T_RW" + # + return [ + ("RW", "R"), + ("SW", "S"), + self.combine_T_RW, + ] + + def combine_T_RW(self, subkey1, subkey2): + # Combine T and RW only if the coverage of T + # is dominant over the coverage of RW and + # RW does not have + intensity + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + if wxType1 == "T" and wxType2 == "RW": + if subkey2.intensity() != "+": + order = self.dominantCoverageOrder(subkey1, subkey2) + if order == -1 or order == 0: + return 1, subkey1 + return 0, None + + def useSkyPopWx_consolidation(self, tree, node): + # If set to 1, the skyPopWx phrase will consolidate weather keys that + # span all time ranges to produce: + # Partly cloudy with a chance of rain. + # Snow in the morning, then sleet in the afternoon. + # + # instead of: + # Partly cloudy. Chance of rain and snow in the morning, + # then a chance of rain and sleet in the afternoon. + + #return 1 + return 0 + + def areal_sky_flag(self, tree, node): + # Set to 1 if you want to use areal (e.g. patchy clouds, areas of clouds) + # vs. traditional sky wording when appropriate. + # BE SURE AND SET THE "arealSkyAnalysis" flag to 1 in the Definition section! + # You may want to base this decision on the current edit area and/or + # component e.g. "Period_1" + return 0 + + def matchToWxInfo_dict(self, tree, node): + # The system will automatically match the following elements to + # the highest ranking weather subkey coverage. + # Each entry is a tuple of (increment, algorithm, noPrecipValue) where + + # increment: This is the increment from the low "bin" value + # to be added. For example, PoP has a bin of 55-65, so + # its increment is 5 to end up with values as multiples of 10. + + # algorithm: Can be + # Max: The MAXIMUM value that falls within the coverage range + # for the highest ranking subkey will be chosen. + # Mode: The MOST FREQUENT (over space and time) value that + # falls within the coverage range for the highest ranking + # subkey will be chosen. + # MaxMode: This is the MAXIMUM value over time of the MOST + # FREQUENT values over area for each of the grids in the timeRange. + # In other words, for each grid, we find the Mode i.e. MOST FREQUENT + # value that falls within the coverage range for the highest + # ranking subkey. Then we find the MAXIMUM of these values + # over the grids again falling within the coverage values. + # AnalysisMethod: This will simply use whatever analysis method + # is specified as the first entry in the product component + # for the element. For example, if you have + # + # ("PoP", self.stdDevMaxAvg, [3]), + # ("PoP", self.binnedPercent, [3]), + # + # the "stdDevMaxAvg" method will be used. + # noPrecipValue: The value that should be returned if there is + # no precipitating weather. Can be: + # None + # Max: The maximum value found that has a greater > 0% occurrence. + # AnalysisMethod: As above, will return the result of the product + # component analysis method e.g. stdDevMaxAvg or maximum. + # + # EXAMPLE 1: Suppose we have: + + # Wx Hours 1-12: Chc R (coverage range is 30-60) + # PoP Hours 1-3: 40% (over 70% of area), 50% (over 30% of area) + # Hours 4-12: 30 + + # For the 12-hour PoP, + # If set to Max, we will get PoP: 50 + # If set to Mode, we will get PoP: 30 + # If set to MaxMode, we will get PoP: 40 + + # For the Hours 1-3 PoP: + # If set to Max, we will get PoP: 50 + # If set to Mode, we will get PoP: 40 + # If set to MaxMode, we will get PoP: 40 + + # NOTE: IF you add a new element to this list, you MUST include + # a coverage table named "coverage_value". Follow + # the example for "coveragePoP_value" in CommonUtils. You can + # then access the element value by calling "matchToWx" (WxPhrases). + # + # Test case 5_1 PopWx1 + return { + "PoP": (5, "Max", None), # 50 + #"PoP": (5, "Mode", None), # 30 + #"PoP": (5, "MaxMode", None), # 40 + #"PoP": (5, "AnalysisMethod", None), # 40 + "LAL": (0, "Max", "Max"), + } + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SAF_Overrides.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SAF_Overrides.py index 1f16fe6953..61208196be 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SAF_Overrides.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SAF_Overrides.py @@ -1,1028 +1,1028 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# -# ---------------------------------------------------------------------------- -# History -# Time Ticket Number Developer Comments -# ----------------------------------------------------------------------------- -# 05/25/2016 18594 ryu Fix placing punctuation in the middle of a -# headline. -# ---------------------------------------------------------------------------- -# SAF_Overrides -# -# This file provides any product specific overrides for the -# SAF product. This file is part of the baseline. -# -# Definition Section: -# Overrides: -# Additions: -# -# Methods: -# Overrides: -# Additions: -# -# --------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, time, re, os, types, copy -import TextRules -import TimeRange, AbsTime - -# Define overrides of Product Definition settings and -# default values of additional Definition settings -# ( This Definition section must be before the Class definition) - -#***** THIS NEXT LINE IS REQUIRED ***** -Definition = {} -# -# SAF Definitions: -# Definition statements must start in column 1 - -### SAF settings of baseline options: ### - -#Definition['displayName'] = "SAF" -Definition["productName"] = "Zone Forecast Prodact" # name of product -Definition["outputFile"] = "{prddir}/TEXT/SAF.txt" -Definition["extendedLabel"] = 1 # To include extended label -Definition["includeEveningPeriod"] = 0 # To turn off evening period - -# lacList - Listening area code(s). -# Must be a list of LACs to create forecasts for multiple towers. -Definition["lacList"] = [] - -# lacAreaDict - For each defaultEditArea, enter it's associated LAC -Definition["lacAreaDict"] = {} - -# pilDict - Dictionary with keys of LAC and values of -# product pil used as CRS ID in the CRS header. - -Definition["pilDict"] = {} - -############################ -# OPTIONAL SET-UP: - -# Flag to repeat first period at the end. 1 or 0 -Definition["repeat1stPeriod"] = 0 - -# summaryExtended - flag 0 or 1 to generate a summary extended -# forecast. If 1, you must define summaryAreaDict. -# Also turn off extendedLabel and includeExtended -Definition["summaryExtended"] = 0 - -# summaryAreaDict - dictionary with keys of the LAC ID -# and values of a tuple of (editAreaName, areaLabel) where -# areaLabel is a label string such as "Western Virginia" -# editAreaName must be the name of a GFE defined edit area -Definition["summaryAreaDict"] = {} - -# summaryExtendedIntro is a string to introduce the extended -# such as "The extended forecast for" -Definition["summaryExtendedIntro"] = "The extended forecast for" - -# lacFileDict - Dictionary with keys of LAC and values -# of full pathname of where to store the data on disk. -Definition["lacFileDict"] = {} - -### New Definitions not in the baseline ### - -# END definitions -############################################################ - -#********************************************************************** -# MAKE NO CHANGES HERE -# The minimum contents of this file are the above Definition = {} line -# plus following class definition and the __init__ method with only -# the "pass" line in it. - -class SAF_Overrides: - """Class NNN_FILETYPE - Version: IFPS""" - - def __init__(self): - pass - -# End MAKE NO CHANGES HERE -#********************************************************************** - # Add methods here making sure to indent inside the class statement - # SAF Overrides ------------------------ - - # It is helpful to put a debug statement at the beginning of each - # method to help with trouble-shooting. - #def _method(self): - #self.debug_print("Debug: _method in SAF_Overrides") - - def generateForecast(self, argDict): - """This allows for multiple towers to be generated by the formatter. - Each tower will be a separate section in the output text. - The forecast for each tower can be sent to a different PIL in - the textdb and better yet, written directly to a file in the - NWR pending or ready directory. - This requires Definition["lacList"] to be specified as a list. - """ - self.debug_print("") - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcstDict = self._initializeFcst(argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(50) - # All edit areas for all transmitters must be in _areaList - # with each areaLabel prepended with the LAC - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - areaLac = self._getAreaLac(editArea, areaLabel, fcstDict, argDict) - fcst = fcstDict[areaLac] - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fcstDict[areaLac] = fcst - fraction = fractionOne - - finalFcst = self._getFinalFcst(fcstDict, argDict) - return finalFcst - - def _getAreaLac(self, editArea, areaLabel, fcstDict, argDict): - self.debug_print("") - eaName = editArea.getId().getName() - if self._multiTower: - try: - areaLac = self._lacAreaDict[eaName] - except: - msg = "Edit Area Name " + eaName + " " + areaLabel + \ - " not in lacAreaDict " - raise "defaultEditAreasError", msg - else: - areaLac = self._lac - return areaLac - - def _determineTimeRanges(self, argDict): - self.debug_print("") - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) - self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") - - # This section determines when to start the extended forecast - # for the summary extended. - (currentLocalTime, self._shift) = self.determineTimeShift() - day = currentLocalTime.day - month = currentLocalTime.month - year = currentLocalTime.year - #print "_determineTimeRanges: p1 TR=",self._issuanceInfo.period1TimeRange() - p1_endtime = self._issuanceInfo.period1TimeRange().endTime() - - # If the first period ends at the same time as self.NIGHT(), then - # the product starts with a daytime issuance, otherwise a nighttime - # issuance and the extended will start 24 hrs later - nightTime = AbsTime.absTimeYMD(year,month,day,self.NIGHT()) - self._shift - dayTime = AbsTime.absTimeYMD(year,month,day,self.DAY()) - self._shift - #print "_determineTimeRanges: p1, day, night=",p1_endtime,dayTime,nightTime - # Offset is hours from self.DAY() - if p1_endtime == nightTime: - # Daytime run - today, tonight, day2, night2 - offset = 48 - elif p1_endtime == dayTime: - # PM run after midnight- rest of night, today, tonight, day2, night2 - offset = 48 - else: - # Normal pm run - tonight, day1, night1, day2, night2 - offset = 72 - - # Determine "issuanceHour" - startTime = AbsTime.absTimeYMD(year,month,day,self.DAY()) - # Convert to GMT time before making time range - startTime = startTime - self._shift + offset*3600 - #print "_determineTimeRanges: p1 TR=",self._issuanceInfo.period1TimeRange() - self._extendedTimeRange = TimeRange.TimeRange(startTime, startTime + 3600) - #print "_determineTimeRanges: extended TR=",self._extendedTimeRange - - return None - - def _initializeFcst(self, argDict): - self.debug_print("") - # For backward compatibility, if there are Definition entries - # of "lac" and "pil", use them instead of lacList and pilList - try: - self._lacList = [self._lac] - self._pilDict = {self._lac:self._pil} - self._multiTower = 0 - except: - self._multiTower = 1 - - fcstDict = {} - for lac in self._lacList: - fcst = "" - # Make sure there is a crsID for each LAC - if self._pilDict.has_key(lac): - self._currentPil = self._pilDict[lac] - self._currentLac = lac - fcstDict[lac] = self._preProcessProduct(fcst, argDict) - else: - msg = "Setup Error! Definition['pilDict']" + \ - " has no entry for LAC ID=" + lac - raise "definitionError", msg - return fcstDict - - def _preProcessProduct(self, fcst, argDict): - self.debug_print("") - # SAF Heading: - # aT_ENGOKCZFPLTS03053014120305301412 CD OKZ036c0305310612 - language = argDict["language"] - if language == "english": - languageStr = "T_ENG" - elif language == "spanish": - languageStr = "T_SPA" - elif language == "french": - languageStr = "T_FRE" - issueTimeStr = self.formatTimeString( - self._issueTime.unixTime(), "%y%m%d%H%M", "GMT") - expireTimeStr = self.formatTimeString( - self._expireTime.unixTime(),"%y%m%d%H%M", "GMT") - fcst = fcst + languageStr - fcst = fcst + self._currentPil - fcst = fcst + issueTimeStr + issueTimeStr - fcst = fcst + " CD " - fcst = fcst + self._currentLac - fcst = fcst + expireTimeStr + "\n\n" - - # Get Synopsis from previous forecast - #productID = "PDXCWFPQR" - #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") - #SynopsisHeading = ".SYNOPSIS FOR SOUTHERN WASHINGTON AND NORTHERN OREGON COAST..." - #synopsis = re.sub(r'\n', r' ', synopsis) - #synopsis = self.endline(synopsis, linelength=self._lineLength) - #fcst = fcst + "-\n" + SynopsisHeading + "\n" + synopsis + "\n$$\n\n" - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - self.debug_print("") - # This is the header for an edit area combination - #ERH fcst=fcst+"$$\nNow for the official National " - fcst=fcst+"Now for the official National " - fcst=fcst+"Weather Service forecast\nfor "+areaLabel+"\n\n" - if self.allowedHazards() != []: - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - headlines = self.generateProduct("Hazards", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._timeRange) - if headlines != "": - # Replace ellipsis at end of each headline with period - headlines = headlines.replace("...\n", ".\n") - - # Remove other ellipses - headlines = headlines.replace("...", "") - - # Remove multiple trailing new lines and replace - # with a single one - headlines = string.strip(headlines) + "\n" - - fcst = fcst + headlines + "\n" - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - """Save first period text for repeating at the end of the SAF. """ - self.debug_print("") - if self._repeat1stPeriod == 1: - # Clean up the area label to avoid possibly - # repeat "forecast for" - e=re.compile('.*forecast for',re.IGNORECASE) - intro = "Again, the forecast for " + \ - e.sub("",areaLabel).strip() - # Now strip off any punctuation on the area label - # and add the period label, ie, today, tonight - intro=intro.rstrip(",.") + " for " + \ - self._1stPeriodLabel + ", " - # Wrap it up! - text = self.endline(intro + self._1stPeriodFcst) - fcst = fcst + "\n" + text - return fcst + "\n" - - def _postProcessProduct(self, fcst, argDict): - self.debug_print("") - if self._summaryExtended == 1: - fcst = fcst + self._getSummaryExtended(argDict) - - fcst = string.replace(fcst, "%expireTime", self._expireTimeStr) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - - fcst = fcst.replace("...", ", ") - #This is to get rid of leading "." at the start of a fcst period. - fcst = fcst.replace("\n.", "\n") - #Get rid of $$ - fcst = fcst.replace("$$\n", "") - - escA="" + "a" - escB="" + "b" - fcst = escA + fcst + "\n" + escB - - #Now uncapitalize special characters - fcst = self._lower_special_chars(fcst) - return fcst - - def _getFinalFcst(self, fcstDict, argDict): - self.debug_print("") - # Assemble the entries from fcstDict into a fcst string - finalFcst = "" - for lac in self._lacList: - fcst = fcstDict[lac] - self._currentLac = lac - fcst = self._postProcessProduct(fcst, argDict) - finalFcst = finalFcst + fcst + "\n\n" - # Write tower specific text to disk. Normally - # this will be the /data/fxa/workFiles/nwr/pending - # directory. Must be full pathname! - if self._lacFileDict.has_key(lac): - f=open(self._lacFileDict[lac],"w") - f.write(fcst) - f.close() - return finalFcst - - def setLabel(self, tree, component): - exLabel= "\n\nAnd now the extended forecast for the radio listening area.\n" - component.set("words", exLabel) - return self.DONE() - - def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): - # Make a label given the timeRange in GMT and the shift to - # convert it to local time. currentLocalTime can be used to - # compare to current day. - - if timeRange.duration() <= 3600: - return "" - if index == 0: - try: - label = issuanceInfo.period1Label() - if label != "": - return label - except: - pass - try: - today = issuanceInfo.todayFlag() - except: - today = 1 - try: - useHolidays = self._useHolidays - except: - useHolidays = 1 - nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) - splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) - label = self.getWeekday(timeRange, holidays=1, shiftToLocal=1, - labelType="CapitalWithComma", today=today, - tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, - splitDay24HourLabel=splitDay24HourLabel) - return label - - def Labels(self): - labels = TextRules.TextRules.Labels(self) - labels["CapitalWithComma"] = { - "PrePunctuation": "", - "PostPunctuation": ", ", - "Weekday" : { - 6 : "Sunday", - 0 : "Monday", - 1 : "Tuesday", - 2 : "Wednesday", - 3 : "Thursday", - 4 : "Friday", - 5 : "Saturday" - }, - "Now": "Now", - "Today":"Today", - "Tonight": "Tonight", - "Rest of Today":"Rest of Today", - "Rest of Tonight": "Rest of Tonight", - "Night": "Night", - "Evening": "Evening", - "Afternoon": "This Afternoon", - } - return labels - - def _issuance_list(self, argDict): - """This shows how to add customized issuance lists via Definition - settings. Supply the name of a different issuance_list method - in Definition["directiveType"]. There must be a method with - the name supplied or else the formatter will bomb out.""" - self.debug_print("") - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - if self._definition["directiveType"] == "C11": - return self._C11_issuance_list(argDict) - elif self._definition["directiveType"] == "10-503": - return self._10_503_issuance_list(argDict) - else: - # Warning! if self._definition["directiveType"] is not - # a defined method, the formatter will error out. - exec "meth=self." + self._definition["directiveType"] - return meth(argDict) - - def _C11_issuance_list(self, argDict): - try: - if self._definition["includeEveningPeriod"] == 1: - narrativeDefAM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), - ("Evening", 6), - ] - narrativeDefPM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriod", 12), - ("Evening", 6), - ] - else: - narrativeDefAM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), - ] - narrativeDefPM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), - ] - extended = [("C11Extended", 24),("C11Extended", 24), ("C11Extended", 24),("C11Extended", 24)] - if self._includeExtended == 1: - if self._extendedLabel == 1: - narrativeDefAM.append(("ExtendedLabel",0)) - narrativeDefPM.append(("ExtendedLabel",0)) - narrativeDefAM = narrativeDefAM + extended - narrativeDefPM = narrativeDefPM + extended - except: - narrativeDefAM = None - narrativeDefPM = None - - try: - if self._numPeriods != "All": - numPeriods = string.atoi(self._numPeriods) - narrativeDefAM = narrativeDefAM[0:numPeriods] - narrativeDefPM = narrativeDefPM[0:numPeriods] - except: - pass - - narrativeDefAM.append(("Custom", ("ExtraSampling", (-24, 12)))) - narrativeDefPM.append(("Custom", ("ExtraSampling", (-24, 24)))) - - #(description, startHour, endHour, period1 label, period1 lateNight lateDay phrase, todayFlag, series definition) - return [ - ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), - "Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), - "Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - "Rest of Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - "Rest of Today, ", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - "Tonight, ", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), - "Tonight, ", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - "Rest of Tonight, ", "late in the night","early in the evening", - 1, narrativeDefPM), - ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - "Rest of Tonight, ", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - ] - - def _10_503_issuance_list(self, argDict): - narrativeDefAM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ] - narrativeDefPM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), - ] - try: - if self._numPeriods != "All": - numPeriods = string.atoi(self._numPeriods) - narrativeDefAM = narrativeDefAM[0:numPeriods] - narrativeDefPM = narrativeDefPM[0:numPeriods] - except: - pass - - narrativeDefAM.append(("Custom", ("ExtraSampling", (-24, 12)))) - narrativeDefPM.append(("Custom", ("ExtraSampling", (-24, 24)))) - - return [ - ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), - "Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), - "Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - "Rest of Today, ", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), - "Rest of Today, ", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - "Tonight, ", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), - "Tonight, ", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - "Rest of Tonight, ", "early in the morning","early in the evening", - 1, narrativeDefPM), - # For the early morning update, this produces: - # Rest of Tonight: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - "Rest of Tonight, ", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - ] - - def range_threshold_dict(self, statDict, argDict): - # Range for reporting temperature ranges in temp_range_phrase - # e.g HIGHS 80 TO 85 - return { - "MaxT": 10, - "MinT": 10, - } - - def timePeriod_descriptor_list(self, tree, node): - """SAFmulti_ER_Overrides version of timeDescriptor.timePeriod_descriptor_list. - - Added descriptors for 12 hour periods.""" - self.debug_print("") - - # Based on code from Dave Zaff - - # Contains definition for localtime start/end times and phrase - # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase - day = self.DAY() - return [ - (day, (day+3)%24, "early in the morning"), # 6a-9a - (day, (day+6)%24, "in the morning"), # 6a-noon - (day, (day+9)%24, "until late afternoon"), # 6a-3p - (day, (day+12)%24, "during the day"), # 6a-6p - #(day, (day+15)%24, "until early evening"), # 6a-9p - (day, (day+15)%24, "until evening"), # 6a-9p - (day, (day+18)%24, "through the evening"), # 6a-midnite - - ((day+3)%24, (day+6)%24, "late in the morning"), # 9a-noon - ((day+3)%24, (day+9)%24, "in the late morning and early afternoon"), # 9a-3p - ((day+3)%24, (day+12)%24, "in the late morning and afternoon"), # 9a-6p - #((day+3)%24, (day+15)%24, "until early evening"), # 9a-9p - ((day+3)%24, (day+15)%24, "until evening"), # 9a-9p - ((day+3)%24, (day+18)%24, "through the evening"), # 9a-midnite - - ((day+6)%24, (day+9)%24, "early in the afternoon"), # noon-3p - ((day+6)%24, (day+12)%24, "in the afternoon"), # noon-6p - ((day+6)%24, (day+15)%24, "in the afternoon and evening"),# noon-9p - ((day+6)%24, (day+18)%24, "in the afternoon and evening"),# noon-midnite - - ((day+9)%24, (day+12)%24, self.lateDay_descriptor), # 3p-6p - #((day+9)%24, (day+15)%24, "early in the evening"), # 3p-9p - ((day+9)%24, (day+15)%24, "in the evening"), # 3p-9p - #((day+9)%24, (day+18)%24, "in the evening"), # 3p-midnite - ((day+9)%24, (day+18)%24, "until midnight"), # 3p-midnite - ((day+9)%24, (day+21)%24, "this evening and overnight"), # 3p-3a - ((day+9)%24, day, ""), # 3p-6a - - ((day+12)%24, (day+15)%24, "early in the evening"), # 6p-9p - #((day+12)%24, (day+18)%24, "in the evening"), # 6p-midnite - ((day+12)%24, (day+18)%24, "until midnight"), # 6p-midnite - ((day+12)%24, (day+21)%24, "this evening and overnight"), # 6p-3a - ((day+12)%24, day, "overnight"), # 6p-6a - - #((day+15)%24, (day+18)%24, "late in the evening"), # 9p-midnite - ((day+15)%24, (day+18)%24, "until midnight"), # 9p-midnite - - #((day+15)%24, (day+21)%24, "in the late evening and early morning"),# 9p-3a - ((day+15)%24, (day+21)%24, "overnight"), # 9p-3a - #((day+15)%24, day, "in the late evening and overnight"), # 9p-6a - ((day+15)%24, day, "overnight"), # 9p-6a - - ((day+18)%24, (day+21)%24, "after midnight"), # midnite-3a - ((day+18)%24, day, "after midnight"), # midnite-6a - ((day+18)%24, (day+6)%24, ""), # midnite-noon - - ((day+21)%24, day, self.lateNight_descriptor), # 3a-6a - ((day+21)%24, (day+3)%24, "early in the morning"), # 3a-9a - ((day+21)%24, (day+6)%24, "early in the morning"), # 3a-noon - ((day+21)%24, (day+9)%24, "until afternoon"), # 3a-3p - ((day+21)%24, (day+12)%24, ""), # 3a-6p - ] - - def assemblePhrases(self, tree, component): - # Assemble component phrases and add Label - # Qualify the phrases with local effect qualifiers - # if present. - # e.g. "near the coast" - """Override to save first period text for repeating at the end - of the SAF. From Steve Nelson. Coordinated with change to - _postProcessArea. """ - self.debug_print("") - phrases = [] - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - if words != "": - phrases.append(phrase) - # Remove empty word phrases - component.childList = phrases - self.orderWxPhrases(tree, component) - self.consolidateLocalEffectPhrases(tree, component) - fcst = "" - lastQualifier = None - lastPhrase = None - phraseList = [] - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - words, lastQualifier = self.qualifyWords( - phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) - lastPhrase = phrase - if words not in phraseList: - phraseList.append(words) - fcst = fcst + words - # Add label - curLocalTime, shift = self.determineTimeShift() - issuanceInfo = tree.get("issuanceInfo") - index = component.getIndex() - label = self.createLabel(tree, component, component.get("timeRange"), - issuanceInfo, curLocalTime, shift, index) - fcst = self.combineSentences(fcst) - - # From Steve Nelson - if index == 0: - self._1stPeriodLabel = label.strip() - # Strip off any leading dots... - self._1stPeriodLabel = self._1stPeriodLabel.lstrip(".") - - if self._1stPeriodLabel[0:4] == "rest": - self._1stPeriodLabel = "The " + self._1stPeriodLabel - #Now strip off any trailing punctuation on the label - self._1stPeriodLabel = self._1stPeriodLabel.rstrip(",.") - self._1stPeriodFcst = fcst - # end - return self.setWords(component, label + fcst) - - def _five12hr_24hrExtended_issuance_list(self, argDict): - """This specifies a product with 5 (4 for AM) 12hr periods followed - by five 24 hr periods for an extended forecast. Requires - new methods _a24hr_Period and _a24hr_Period_WithoutLows - to define the extended periods. self._numPeriods is used to limit - the number of 12 hr periods actually produced as in SAF_Overrides. - """ - self.debug_print("") - # Based on code from Dave Zaff - - # dz 031010 - from Brian Walawender - # Code below sets the start time for the afternoon update - # Local noon or the current hour (whichever is greater) - - currentTime = time.time() - updateHour = self.DAY() + 6 - minute = time.strftime("%M", time.localtime(currentTime)) - minute = int(minute) - if minute < 15: - currentHour = time.strftime("%H", time.localtime(currentTime)) - else: - seconds = currentTime - seconds = seconds + 3600 - currentHour = time.strftime("%H", time.localtime(seconds)) - - currentHour = int(currentHour) - updateHour = int(updateHour) - - if currentHour > updateHour: - updateHour = currentHour - - - narrativeDefAM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), - ] - narrativeDefPM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ] - # Set the extended to use 24 hour periods starting at 6 am. - narrativeExtended=[ - ("_a24hr_Period_WithoutLows", 24), ("_a24hr_Period", 24), - ("_a24hr_Period", 24), ("_a24hr_Period", 24),("_a24hr_Period", 24), - ] - try: - if self._includeExtended == 1: - if self._extendedLabel == 1: - narrativeDefAM.append(("ExtendedLabel",0)) - narrativeDefPM.append(("ExtendedLabel",0)) - narrativeDefAM = narrativeDefAM + narrativeExtended - narrativeDefPM = narrativeDefPM + narrativeExtended - except: - pass - - try: - if self._numPeriods != "All": - numPeriods = string.atoi(self._numPeriods) - narrativeDefAM = narrativeDefAM[0:numPeriods] - narrativeDefPM = narrativeDefPM[0:numPeriods] - except: - pass - return [ - ("TODAY - beginning at 6AM", self.DAY(), self.NIGHT(), self.NIGHT(), - ".Today...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Early this morning/today - beginning at 4AM", self.DAY()-2, self.NIGHT(), self.NIGHT(), - ".Today...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Rest of Today - beginning now", "issuanceHour", self.NIGHT(), self.NIGHT(), - ".Rest of Today...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update - beginning at noon/afternoon", updateHour, self.NIGHT(), self.NIGHT(), - ".This Afternoon...", "early in the afternoon","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("TONIGHT - beginning at 6PM", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), - ".Tonight...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Rest of tonight - beginning now", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - ".Rest of Tonight...", "early in the morning","early in the evening", - 1, narrativeDefPM), - # For the early morning update, this produces: - # Rest of Tonight: - # MONDAY - # MONDAY NIGHT - ("Rest of tonight - use after midnight", "issuanceHour", self.DAY(), self.DAY(), - ".Rest of Tonight...", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - # Alternative - # For the early morning update, this produces: - # Early this morning: - # Today - # Tonight - #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), - # ".Rest of Tonight...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), - # ".Early this morning...", "early in the morning", "late in the afternoon", - # 1, narrativeDefPM), - ] - - def _a24hr_Period(self): - """Defines a 24 hr long component for the extended forecast.""" - self.debug_print("") - # Based on code from Dave Zaff - - HrSky=12 - HrPoP=12 - HrWind=12 - HrWx=12 - - component = { - "type": "component", - "methodList": [ - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.stdDevFirstAvg), - ("MaxT", self.stdDevAvg), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [HrSky]), - ("PoP", self._PoP_analysisMethod("_a24hr_Period"),[HrPoP]), - ("PoP", self.binnedPercent, [HrPoP]), - ("Wind", self.vectorMedianRange, [HrWind]), - ("WindGust", self.moderatedMax, [HrWind]), - #("SnowAmt", self.minMax), - ("Wx", self.rankedWx, [HrWx]), - #("WindChill", self.minMax), - #("HeatIndex", self.minMax), - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - #self.severeWeather_phrase, - #self.descriptive_snow_phrase, - self.highs_phrase, - self.lows_phrase, - self.steady_temp_trends, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.temp_trends, - self.popMax_phrase, - #self.windChill_phrase, - #self.heatIndex_phrase, - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [HrSky])) - return component - - def _a24hr_Period_WithoutLows(self): - """This should be the same as _a24hr_Period except no lows phrase in - the phraseList.""" - self.debug_print("") - # Based on code from Dave Zaff - - HrSky=12 - HrPoP=12 - HrWind=12 - HrWx=12 - - component = { - "type": "component", - "methodList": [ - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.stdDevFirstAvg), - ("MaxT", self.stdDevAvg), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [HrSky]), - ("PoP", self._PoP_analysisMethod("_a24hr_Period"),[HrPoP]), - ("PoP", self.binnedPercent, [HrPoP]), - ("Wind", self.vectorMedianRange, [HrWind]), - ("WindGust", self.moderatedMax, [HrWind]), - #("SnowAmt", self.minMax), - ("Wx", self.rankedWx, [HrWx]), - #("WindChill", self.minMax), - #("HeatIndex", self.minMax), - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - #self.severeWeather_phrase, - #self.descriptive_snow_phrase, - self.highs_phrase, - self.steady_temp_trends, - #self.highs_range_phrase, - self.temp_trends, - self.popMax_phrase, - #self.windChill_phrase, - #self.heatIndex_phrase, - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def _ExtendedNarrative(self): - """This defines a component product for a summary extended forecast - which uses a different edit area from the short term forecasts. - This is so there can be several areas for the short term forecasts - for a tower, and just one summary extended forecast for the entire - listening area. """ - self.debug_print("") - - # check for period combining first - # Set the extended to use 24 hour periods. - narrativeExtended=[ - ("_a24hr_Period_WithoutLows", 24), ("_a24hr_Period", 24), - ("_a24hr_Period", 24), ("_a24hr_Period", 24),("_a24hr_Period", 24), - ] - if self._periodCombining: - methodList = [self.combineComponentStats, self.assembleChildWords] - else: - methodList = [self.assembleChildWords] - - return { - "type": "narrative", - "displayName": None, - "timePeriodMethod ": self.timeRangeLabel, - ## Components - "methodList": methodList, - "narrativeDef": narrativeExtended, - } - - def _lower_special_chars(self, fcst): - """Replaces special characters that need to be lower case.""" - self.debug_print("") - - escB="" + "b" - escA="" + "a" - # Make sure escape sequences are not capitalized - fcst = fcst.replace("" +"A", escA) - fcst = fcst.replace("" + "B", escB) - # Also there may be a special char in the lac - lacupper = string.upper(self._currentLac) - fcst = fcst.replace(lacupper, self._currentLac) - - return fcst - - def _getSummaryExtended(self, argDict): - """Creates a component product summary extended forecast.""" - self.debug_print("") - - # _summaryAreaDict is a dictionary with keys of the LAC ID - # and values a tuple of (editAreaName, areaLabel) - # _summaryExtendedIntro is a string to indroduce the extended - # such as "The extended forecast for" - # areaLabel is a label string such as "Western Virginia" - # editAreaName must be the name of a GFE defined edit area - fcst = "" - if self._summaryAreaDict.has_key(self._currentLac): - editAreaName, areaLabel = self._summaryAreaDict[self._currentLac] - intro = self._summaryExtendedIntro - intro = intro + " " + areaLabel - # Note endline appends a newline - intro = self.endline(intro) - extended = self.generateProduct("_ExtendedNarrative", argDict, - area = editAreaName, timeRange=self._extendedTimeRange) - fcst = intro + extended - - return fcst - - def allowedHazards(self): - return [] +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# +# ---------------------------------------------------------------------------- +# History +# Time Ticket Number Developer Comments +# ----------------------------------------------------------------------------- +# 05/25/2016 18594 ryu Fix placing punctuation in the middle of a +# headline. +# ---------------------------------------------------------------------------- +# SAF_Overrides +# +# This file provides any product specific overrides for the +# SAF product. This file is part of the baseline. +# +# Definition Section: +# Overrides: +# Additions: +# +# Methods: +# Overrides: +# Additions: +# +# --------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, time, re, os, types, copy +import TextRules +import TimeRange, AbsTime + +# Define overrides of Product Definition settings and +# default values of additional Definition settings +# ( This Definition section must be before the Class definition) + +#***** THIS NEXT LINE IS REQUIRED ***** +Definition = {} +# +# SAF Definitions: +# Definition statements must start in column 1 + +### SAF settings of baseline options: ### + +#Definition['displayName'] = "SAF" +Definition["productName"] = "Zone Forecast Prodact" # name of product +Definition["outputFile"] = "{prddir}/TEXT/SAF.txt" +Definition["extendedLabel"] = 1 # To include extended label +Definition["includeEveningPeriod"] = 0 # To turn off evening period + +# lacList - Listening area code(s). +# Must be a list of LACs to create forecasts for multiple towers. +Definition["lacList"] = [] + +# lacAreaDict - For each defaultEditArea, enter it's associated LAC +Definition["lacAreaDict"] = {} + +# pilDict - Dictionary with keys of LAC and values of +# product pil used as CRS ID in the CRS header. + +Definition["pilDict"] = {} + +############################ +# OPTIONAL SET-UP: + +# Flag to repeat first period at the end. 1 or 0 +Definition["repeat1stPeriod"] = 0 + +# summaryExtended - flag 0 or 1 to generate a summary extended +# forecast. If 1, you must define summaryAreaDict. +# Also turn off extendedLabel and includeExtended +Definition["summaryExtended"] = 0 + +# summaryAreaDict - dictionary with keys of the LAC ID +# and values of a tuple of (editAreaName, areaLabel) where +# areaLabel is a label string such as "Western Virginia" +# editAreaName must be the name of a GFE defined edit area +Definition["summaryAreaDict"] = {} + +# summaryExtendedIntro is a string to introduce the extended +# such as "The extended forecast for" +Definition["summaryExtendedIntro"] = "The extended forecast for" + +# lacFileDict - Dictionary with keys of LAC and values +# of full pathname of where to store the data on disk. +Definition["lacFileDict"] = {} + +### New Definitions not in the baseline ### + +# END definitions +############################################################ + +#********************************************************************** +# MAKE NO CHANGES HERE +# The minimum contents of this file are the above Definition = {} line +# plus following class definition and the __init__ method with only +# the "pass" line in it. + +class SAF_Overrides: + """Class NNN_FILETYPE - Version: IFPS""" + + def __init__(self): + pass + +# End MAKE NO CHANGES HERE +#********************************************************************** + # Add methods here making sure to indent inside the class statement + # SAF Overrides ------------------------ + + # It is helpful to put a debug statement at the beginning of each + # method to help with trouble-shooting. + #def _method(self): + #self.debug_print("Debug: _method in SAF_Overrides") + + def generateForecast(self, argDict): + """This allows for multiple towers to be generated by the formatter. + Each tower will be a separate section in the output text. + The forecast for each tower can be sent to a different PIL in + the textdb and better yet, written directly to a file in the + NWR pending or ready directory. + This requires Definition["lacList"] to be specified as a list. + """ + self.debug_print("") + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcstDict = self._initializeFcst(argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(50) + # All edit areas for all transmitters must be in _areaList + # with each areaLabel prepended with the LAC + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + areaLac = self._getAreaLac(editArea, areaLabel, fcstDict, argDict) + fcst = fcstDict[areaLac] + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fcstDict[areaLac] = fcst + fraction = fractionOne + + finalFcst = self._getFinalFcst(fcstDict, argDict) + return finalFcst + + def _getAreaLac(self, editArea, areaLabel, fcstDict, argDict): + self.debug_print("") + eaName = editArea.getId().getName() + if self._multiTower: + try: + areaLac = self._lacAreaDict[eaName] + except: + msg = "Edit Area Name " + eaName + " " + areaLabel + \ + " not in lacAreaDict " + raise "defaultEditAreasError", msg + else: + areaLac = self._lac + return areaLac + + def _determineTimeRanges(self, argDict): + self.debug_print("") + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) + self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") + + # This section determines when to start the extended forecast + # for the summary extended. + (currentLocalTime, self._shift) = self.determineTimeShift() + day = currentLocalTime.day + month = currentLocalTime.month + year = currentLocalTime.year + #print "_determineTimeRanges: p1 TR=",self._issuanceInfo.period1TimeRange() + p1_endtime = self._issuanceInfo.period1TimeRange().endTime() + + # If the first period ends at the same time as self.NIGHT(), then + # the product starts with a daytime issuance, otherwise a nighttime + # issuance and the extended will start 24 hrs later + nightTime = AbsTime.absTimeYMD(year,month,day,self.NIGHT()) - self._shift + dayTime = AbsTime.absTimeYMD(year,month,day,self.DAY()) - self._shift + #print "_determineTimeRanges: p1, day, night=",p1_endtime,dayTime,nightTime + # Offset is hours from self.DAY() + if p1_endtime == nightTime: + # Daytime run - today, tonight, day2, night2 + offset = 48 + elif p1_endtime == dayTime: + # PM run after midnight- rest of night, today, tonight, day2, night2 + offset = 48 + else: + # Normal pm run - tonight, day1, night1, day2, night2 + offset = 72 + + # Determine "issuanceHour" + startTime = AbsTime.absTimeYMD(year,month,day,self.DAY()) + # Convert to GMT time before making time range + startTime = startTime - self._shift + offset*3600 + #print "_determineTimeRanges: p1 TR=",self._issuanceInfo.period1TimeRange() + self._extendedTimeRange = TimeRange.TimeRange(startTime, startTime + 3600) + #print "_determineTimeRanges: extended TR=",self._extendedTimeRange + + return None + + def _initializeFcst(self, argDict): + self.debug_print("") + # For backward compatibility, if there are Definition entries + # of "lac" and "pil", use them instead of lacList and pilList + try: + self._lacList = [self._lac] + self._pilDict = {self._lac:self._pil} + self._multiTower = 0 + except: + self._multiTower = 1 + + fcstDict = {} + for lac in self._lacList: + fcst = "" + # Make sure there is a crsID for each LAC + if lac in self._pilDict: + self._currentPil = self._pilDict[lac] + self._currentLac = lac + fcstDict[lac] = self._preProcessProduct(fcst, argDict) + else: + msg = "Setup Error! Definition['pilDict']" + \ + " has no entry for LAC ID=" + lac + raise "definitionError", msg + return fcstDict + + def _preProcessProduct(self, fcst, argDict): + self.debug_print("") + # SAF Heading: + # aT_ENGOKCZFPLTS03053014120305301412 CD OKZ036c0305310612 + language = argDict["language"] + if language == "english": + languageStr = "T_ENG" + elif language == "spanish": + languageStr = "T_SPA" + elif language == "french": + languageStr = "T_FRE" + issueTimeStr = self.formatTimeString( + self._issueTime.unixTime(), "%y%m%d%H%M", "GMT") + expireTimeStr = self.formatTimeString( + self._expireTime.unixTime(),"%y%m%d%H%M", "GMT") + fcst = fcst + languageStr + fcst = fcst + self._currentPil + fcst = fcst + issueTimeStr + issueTimeStr + fcst = fcst + " CD " + fcst = fcst + self._currentLac + fcst = fcst + expireTimeStr + "\n\n" + + # Get Synopsis from previous forecast + #productID = "PDXCWFPQR" + #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") + #SynopsisHeading = ".SYNOPSIS FOR SOUTHERN WASHINGTON AND NORTHERN OREGON COAST..." + #synopsis = re.sub(r'\n', r' ', synopsis) + #synopsis = self.endline(synopsis, linelength=self._lineLength) + #fcst = fcst + "-\n" + SynopsisHeading + "\n" + synopsis + "\n$$\n\n" + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + self.debug_print("") + # This is the header for an edit area combination + #ERH fcst=fcst+"$$\nNow for the official National " + fcst=fcst+"Now for the official National " + fcst=fcst+"Weather Service forecast\nfor "+areaLabel+"\n\n" + if self.allowedHazards() != []: + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + headlines = self.generateProduct("Hazards", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._timeRange) + if headlines != "": + # Replace ellipsis at end of each headline with period + headlines = headlines.replace("...\n", ".\n") + + # Remove other ellipses + headlines = headlines.replace("...", "") + + # Remove multiple trailing new lines and replace + # with a single one + headlines = string.strip(headlines) + "\n" + + fcst = fcst + headlines + "\n" + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + """Save first period text for repeating at the end of the SAF. """ + self.debug_print("") + if self._repeat1stPeriod == 1: + # Clean up the area label to avoid possibly + # repeat "forecast for" + e=re.compile('.*forecast for',re.IGNORECASE) + intro = "Again, the forecast for " + \ + e.sub("",areaLabel).strip() + # Now strip off any punctuation on the area label + # and add the period label, ie, today, tonight + intro=intro.rstrip(",.") + " for " + \ + self._1stPeriodLabel + ", " + # Wrap it up! + text = self.endline(intro + self._1stPeriodFcst) + fcst = fcst + "\n" + text + return fcst + "\n" + + def _postProcessProduct(self, fcst, argDict): + self.debug_print("") + if self._summaryExtended == 1: + fcst = fcst + self._getSummaryExtended(argDict) + + fcst = string.replace(fcst, "%expireTime", self._expireTimeStr) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + + fcst = fcst.replace("...", ", ") + #This is to get rid of leading "." at the start of a fcst period. + fcst = fcst.replace("\n.", "\n") + #Get rid of $$ + fcst = fcst.replace("$$\n", "") + + escA="" + "a" + escB="" + "b" + fcst = escA + fcst + "\n" + escB + + #Now uncapitalize special characters + fcst = self._lower_special_chars(fcst) + return fcst + + def _getFinalFcst(self, fcstDict, argDict): + self.debug_print("") + # Assemble the entries from fcstDict into a fcst string + finalFcst = "" + for lac in self._lacList: + fcst = fcstDict[lac] + self._currentLac = lac + fcst = self._postProcessProduct(fcst, argDict) + finalFcst = finalFcst + fcst + "\n\n" + # Write tower specific text to disk. Normally + # this will be the /data/fxa/workFiles/nwr/pending + # directory. Must be full pathname! + if lac in self._lacFileDict: + f=open(self._lacFileDict[lac],"w") + f.write(fcst) + f.close() + return finalFcst + + def setLabel(self, tree, component): + exLabel= "\n\nAnd now the extended forecast for the radio listening area.\n" + component.set("words", exLabel) + return self.DONE() + + def createLabel(self, tree, node, timeRange, issuanceInfo, currentLocalTime, shift, index=0): + # Make a label given the timeRange in GMT and the shift to + # convert it to local time. currentLocalTime can be used to + # compare to current day. + + if timeRange.duration() <= 3600: + return "" + if index == 0: + try: + label = issuanceInfo.period1Label() + if label != "": + return label + except: + pass + try: + today = issuanceInfo.todayFlag() + except: + today = 1 + try: + useHolidays = self._useHolidays + except: + useHolidays = 1 + nextDay24HourLabel = self.nextDay24HourLabel_flag(tree, node) + splitDay24HourLabel = self.splitDay24HourLabel_flag(tree, node) + label = self.getWeekday(timeRange, holidays=1, shiftToLocal=1, + labelType="CapitalWithComma", today=today, + tomorrow=0, nextDay24HourLabel=nextDay24HourLabel, + splitDay24HourLabel=splitDay24HourLabel) + return label + + def Labels(self): + labels = TextRules.TextRules.Labels(self) + labels["CapitalWithComma"] = { + "PrePunctuation": "", + "PostPunctuation": ", ", + "Weekday" : { + 6 : "Sunday", + 0 : "Monday", + 1 : "Tuesday", + 2 : "Wednesday", + 3 : "Thursday", + 4 : "Friday", + 5 : "Saturday" + }, + "Now": "Now", + "Today":"Today", + "Tonight": "Tonight", + "Rest of Today":"Rest of Today", + "Rest of Tonight": "Rest of Tonight", + "Night": "Night", + "Evening": "Evening", + "Afternoon": "This Afternoon", + } + return labels + + def _issuance_list(self, argDict): + """This shows how to add customized issuance lists via Definition + settings. Supply the name of a different issuance_list method + in Definition["directiveType"]. There must be a method with + the name supplied or else the formatter will bomb out.""" + self.debug_print("") + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + if self._definition["directiveType"] == "C11": + return self._C11_issuance_list(argDict) + elif self._definition["directiveType"] == "10-503": + return self._10_503_issuance_list(argDict) + else: + # Warning! if self._definition["directiveType"] is not + # a defined method, the formatter will error out. + exec("meth=self." + self._definition["directiveType"]) + return meth(argDict) + + def _C11_issuance_list(self, argDict): + try: + if self._definition["includeEveningPeriod"] == 1: + narrativeDefAM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), + ("Evening", 6), + ] + narrativeDefPM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriod", 12), + ("Evening", 6), + ] + else: + narrativeDefAM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), + ] + narrativeDefPM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), + ] + extended = [("C11Extended", 24),("C11Extended", 24), ("C11Extended", 24),("C11Extended", 24)] + if self._includeExtended == 1: + if self._extendedLabel == 1: + narrativeDefAM.append(("ExtendedLabel",0)) + narrativeDefPM.append(("ExtendedLabel",0)) + narrativeDefAM = narrativeDefAM + extended + narrativeDefPM = narrativeDefPM + extended + except: + narrativeDefAM = None + narrativeDefPM = None + + try: + if self._numPeriods != "All": + numPeriods = string.atoi(self._numPeriods) + narrativeDefAM = narrativeDefAM[0:numPeriods] + narrativeDefPM = narrativeDefPM[0:numPeriods] + except: + pass + + narrativeDefAM.append(("Custom", ("ExtraSampling", (-24, 12)))) + narrativeDefPM.append(("Custom", ("ExtraSampling", (-24, 24)))) + + #(description, startHour, endHour, period1 label, period1 lateNight lateDay phrase, todayFlag, series definition) + return [ + ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), + "Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), + "Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + "Rest of Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + "Rest of Today, ", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + "Tonight, ", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), + "Tonight, ", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + "Rest of Tonight, ", "late in the night","early in the evening", + 1, narrativeDefPM), + ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + "Rest of Tonight, ", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + ] + + def _10_503_issuance_list(self, argDict): + narrativeDefAM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ] + narrativeDefPM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), + ] + try: + if self._numPeriods != "All": + numPeriods = string.atoi(self._numPeriods) + narrativeDefAM = narrativeDefAM[0:numPeriods] + narrativeDefPM = narrativeDefPM[0:numPeriods] + except: + pass + + narrativeDefAM.append(("Custom", ("ExtraSampling", (-24, 12)))) + narrativeDefPM.append(("Custom", ("ExtraSampling", (-24, 24)))) + + return [ + ("Morning", self.DAY(), self.NIGHT(), self.NIGHT(), + "Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), self.NIGHT(), + "Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + "Rest of Today, ", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), self.NIGHT(), + "Rest of Today, ", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + "Tonight, ", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + self.DAY(), + "Tonight, ", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + "Rest of Tonight, ", "early in the morning","early in the evening", + 1, narrativeDefPM), + # For the early morning update, this produces: + # Rest of Tonight: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + "Rest of Tonight, ", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + ] + + def range_threshold_dict(self, statDict, argDict): + # Range for reporting temperature ranges in temp_range_phrase + # e.g HIGHS 80 TO 85 + return { + "MaxT": 10, + "MinT": 10, + } + + def timePeriod_descriptor_list(self, tree, node): + """SAFmulti_ER_Overrides version of timeDescriptor.timePeriod_descriptor_list. + + Added descriptors for 12 hour periods.""" + self.debug_print("") + + # Based on code from Dave Zaff + + # Contains definition for localtime start/end times and phrase + # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase + day = self.DAY() + return [ + (day, (day+3)%24, "early in the morning"), # 6a-9a + (day, (day+6)%24, "in the morning"), # 6a-noon + (day, (day+9)%24, "until late afternoon"), # 6a-3p + (day, (day+12)%24, "during the day"), # 6a-6p + #(day, (day+15)%24, "until early evening"), # 6a-9p + (day, (day+15)%24, "until evening"), # 6a-9p + (day, (day+18)%24, "through the evening"), # 6a-midnite + + ((day+3)%24, (day+6)%24, "late in the morning"), # 9a-noon + ((day+3)%24, (day+9)%24, "in the late morning and early afternoon"), # 9a-3p + ((day+3)%24, (day+12)%24, "in the late morning and afternoon"), # 9a-6p + #((day+3)%24, (day+15)%24, "until early evening"), # 9a-9p + ((day+3)%24, (day+15)%24, "until evening"), # 9a-9p + ((day+3)%24, (day+18)%24, "through the evening"), # 9a-midnite + + ((day+6)%24, (day+9)%24, "early in the afternoon"), # noon-3p + ((day+6)%24, (day+12)%24, "in the afternoon"), # noon-6p + ((day+6)%24, (day+15)%24, "in the afternoon and evening"),# noon-9p + ((day+6)%24, (day+18)%24, "in the afternoon and evening"),# noon-midnite + + ((day+9)%24, (day+12)%24, self.lateDay_descriptor), # 3p-6p + #((day+9)%24, (day+15)%24, "early in the evening"), # 3p-9p + ((day+9)%24, (day+15)%24, "in the evening"), # 3p-9p + #((day+9)%24, (day+18)%24, "in the evening"), # 3p-midnite + ((day+9)%24, (day+18)%24, "until midnight"), # 3p-midnite + ((day+9)%24, (day+21)%24, "this evening and overnight"), # 3p-3a + ((day+9)%24, day, ""), # 3p-6a + + ((day+12)%24, (day+15)%24, "early in the evening"), # 6p-9p + #((day+12)%24, (day+18)%24, "in the evening"), # 6p-midnite + ((day+12)%24, (day+18)%24, "until midnight"), # 6p-midnite + ((day+12)%24, (day+21)%24, "this evening and overnight"), # 6p-3a + ((day+12)%24, day, "overnight"), # 6p-6a + + #((day+15)%24, (day+18)%24, "late in the evening"), # 9p-midnite + ((day+15)%24, (day+18)%24, "until midnight"), # 9p-midnite + + #((day+15)%24, (day+21)%24, "in the late evening and early morning"),# 9p-3a + ((day+15)%24, (day+21)%24, "overnight"), # 9p-3a + #((day+15)%24, day, "in the late evening and overnight"), # 9p-6a + ((day+15)%24, day, "overnight"), # 9p-6a + + ((day+18)%24, (day+21)%24, "after midnight"), # midnite-3a + ((day+18)%24, day, "after midnight"), # midnite-6a + ((day+18)%24, (day+6)%24, ""), # midnite-noon + + ((day+21)%24, day, self.lateNight_descriptor), # 3a-6a + ((day+21)%24, (day+3)%24, "early in the morning"), # 3a-9a + ((day+21)%24, (day+6)%24, "early in the morning"), # 3a-noon + ((day+21)%24, (day+9)%24, "until afternoon"), # 3a-3p + ((day+21)%24, (day+12)%24, ""), # 3a-6p + ] + + def assemblePhrases(self, tree, component): + # Assemble component phrases and add Label + # Qualify the phrases with local effect qualifiers + # if present. + # e.g. "near the coast" + """Override to save first period text for repeating at the end + of the SAF. From Steve Nelson. Coordinated with change to + _postProcessArea. """ + self.debug_print("") + phrases = [] + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + if words != "": + phrases.append(phrase) + # Remove empty word phrases + component.childList = phrases + self.orderWxPhrases(tree, component) + self.consolidateLocalEffectPhrases(tree, component) + fcst = "" + lastQualifier = None + lastPhrase = None + phraseList = [] + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + words, lastQualifier = self.qualifyWords( + phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) + lastPhrase = phrase + if words not in phraseList: + phraseList.append(words) + fcst = fcst + words + # Add label + curLocalTime, shift = self.determineTimeShift() + issuanceInfo = tree.get("issuanceInfo") + index = component.getIndex() + label = self.createLabel(tree, component, component.get("timeRange"), + issuanceInfo, curLocalTime, shift, index) + fcst = self.combineSentences(fcst) + + # From Steve Nelson + if index == 0: + self._1stPeriodLabel = label.strip() + # Strip off any leading dots... + self._1stPeriodLabel = self._1stPeriodLabel.lstrip(".") + + if self._1stPeriodLabel[0:4] == "rest": + self._1stPeriodLabel = "The " + self._1stPeriodLabel + #Now strip off any trailing punctuation on the label + self._1stPeriodLabel = self._1stPeriodLabel.rstrip(",.") + self._1stPeriodFcst = fcst + # end + return self.setWords(component, label + fcst) + + def _five12hr_24hrExtended_issuance_list(self, argDict): + """This specifies a product with 5 (4 for AM) 12hr periods followed + by five 24 hr periods for an extended forecast. Requires + new methods _a24hr_Period and _a24hr_Period_WithoutLows + to define the extended periods. self._numPeriods is used to limit + the number of 12 hr periods actually produced as in SAF_Overrides. + """ + self.debug_print("") + # Based on code from Dave Zaff + + # dz 031010 - from Brian Walawender + # Code below sets the start time for the afternoon update + # Local noon or the current hour (whichever is greater) + + currentTime = time.time() + updateHour = self.DAY() + 6 + minute = time.strftime("%M", time.localtime(currentTime)) + minute = int(minute) + if minute < 15: + currentHour = time.strftime("%H", time.localtime(currentTime)) + else: + seconds = currentTime + seconds = seconds + 3600 + currentHour = time.strftime("%H", time.localtime(seconds)) + + currentHour = int(currentHour) + updateHour = int(updateHour) + + if currentHour > updateHour: + updateHour = currentHour + + + narrativeDefAM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), + ] + narrativeDefPM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ] + # Set the extended to use 24 hour periods starting at 6 am. + narrativeExtended=[ + ("_a24hr_Period_WithoutLows", 24), ("_a24hr_Period", 24), + ("_a24hr_Period", 24), ("_a24hr_Period", 24),("_a24hr_Period", 24), + ] + try: + if self._includeExtended == 1: + if self._extendedLabel == 1: + narrativeDefAM.append(("ExtendedLabel",0)) + narrativeDefPM.append(("ExtendedLabel",0)) + narrativeDefAM = narrativeDefAM + narrativeExtended + narrativeDefPM = narrativeDefPM + narrativeExtended + except: + pass + + try: + if self._numPeriods != "All": + numPeriods = string.atoi(self._numPeriods) + narrativeDefAM = narrativeDefAM[0:numPeriods] + narrativeDefPM = narrativeDefPM[0:numPeriods] + except: + pass + return [ + ("TODAY - beginning at 6AM", self.DAY(), self.NIGHT(), self.NIGHT(), + ".Today...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Early this morning/today - beginning at 4AM", self.DAY()-2, self.NIGHT(), self.NIGHT(), + ".Today...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Rest of Today - beginning now", "issuanceHour", self.NIGHT(), self.NIGHT(), + ".Rest of Today...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update - beginning at noon/afternoon", updateHour, self.NIGHT(), self.NIGHT(), + ".This Afternoon...", "early in the afternoon","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("TONIGHT - beginning at 6PM", self.NIGHT(), 24 + self.DAY(), 24 + self.DAY(), + ".Tonight...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Rest of tonight - beginning now", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + ".Rest of Tonight...", "early in the morning","early in the evening", + 1, narrativeDefPM), + # For the early morning update, this produces: + # Rest of Tonight: + # MONDAY + # MONDAY NIGHT + ("Rest of tonight - use after midnight", "issuanceHour", self.DAY(), self.DAY(), + ".Rest of Tonight...", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + # Alternative + # For the early morning update, this produces: + # Early this morning: + # Today + # Tonight + #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + self.DAY(), + # ".Rest of Tonight...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), self.DAY(), + # ".Early this morning...", "early in the morning", "late in the afternoon", + # 1, narrativeDefPM), + ] + + def _a24hr_Period(self): + """Defines a 24 hr long component for the extended forecast.""" + self.debug_print("") + # Based on code from Dave Zaff + + HrSky=12 + HrPoP=12 + HrWind=12 + HrWx=12 + + component = { + "type": "component", + "methodList": [ + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.stdDevFirstAvg), + ("MaxT", self.stdDevAvg), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [HrSky]), + ("PoP", self._PoP_analysisMethod("_a24hr_Period"),[HrPoP]), + ("PoP", self.binnedPercent, [HrPoP]), + ("Wind", self.vectorMedianRange, [HrWind]), + ("WindGust", self.moderatedMax, [HrWind]), + #("SnowAmt", self.minMax), + ("Wx", self.rankedWx, [HrWx]), + #("WindChill", self.minMax), + #("HeatIndex", self.minMax), + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + #self.severeWeather_phrase, + #self.descriptive_snow_phrase, + self.highs_phrase, + self.lows_phrase, + self.steady_temp_trends, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.temp_trends, + self.popMax_phrase, + #self.windChill_phrase, + #self.heatIndex_phrase, + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [HrSky])) + return component + + def _a24hr_Period_WithoutLows(self): + """This should be the same as _a24hr_Period except no lows phrase in + the phraseList.""" + self.debug_print("") + # Based on code from Dave Zaff + + HrSky=12 + HrPoP=12 + HrWind=12 + HrWx=12 + + component = { + "type": "component", + "methodList": [ + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.stdDevFirstAvg), + ("MaxT", self.stdDevAvg), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [HrSky]), + ("PoP", self._PoP_analysisMethod("_a24hr_Period"),[HrPoP]), + ("PoP", self.binnedPercent, [HrPoP]), + ("Wind", self.vectorMedianRange, [HrWind]), + ("WindGust", self.moderatedMax, [HrWind]), + #("SnowAmt", self.minMax), + ("Wx", self.rankedWx, [HrWx]), + #("WindChill", self.minMax), + #("HeatIndex", self.minMax), + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + #self.severeWeather_phrase, + #self.descriptive_snow_phrase, + self.highs_phrase, + self.steady_temp_trends, + #self.highs_range_phrase, + self.temp_trends, + self.popMax_phrase, + #self.windChill_phrase, + #self.heatIndex_phrase, + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def _ExtendedNarrative(self): + """This defines a component product for a summary extended forecast + which uses a different edit area from the short term forecasts. + This is so there can be several areas for the short term forecasts + for a tower, and just one summary extended forecast for the entire + listening area. """ + self.debug_print("") + + # check for period combining first + # Set the extended to use 24 hour periods. + narrativeExtended=[ + ("_a24hr_Period_WithoutLows", 24), ("_a24hr_Period", 24), + ("_a24hr_Period", 24), ("_a24hr_Period", 24),("_a24hr_Period", 24), + ] + if self._periodCombining: + methodList = [self.combineComponentStats, self.assembleChildWords] + else: + methodList = [self.assembleChildWords] + + return { + "type": "narrative", + "displayName": None, + "timePeriodMethod ": self.timeRangeLabel, + ## Components + "methodList": methodList, + "narrativeDef": narrativeExtended, + } + + def _lower_special_chars(self, fcst): + """Replaces special characters that need to be lower case.""" + self.debug_print("") + + escB="" + "b" + escA="" + "a" + # Make sure escape sequences are not capitalized + fcst = fcst.replace("" +"A", escA) + fcst = fcst.replace("" + "B", escB) + # Also there may be a special char in the lac + lacupper = string.upper(self._currentLac) + fcst = fcst.replace(lacupper, self._currentLac) + + return fcst + + def _getSummaryExtended(self, argDict): + """Creates a component product summary extended forecast.""" + self.debug_print("") + + # _summaryAreaDict is a dictionary with keys of the LAC ID + # and values a tuple of (editAreaName, areaLabel) + # _summaryExtendedIntro is a string to indroduce the extended + # such as "The extended forecast for" + # areaLabel is a label string such as "Western Virginia" + # editAreaName must be the name of a GFE defined edit area + fcst = "" + if self._currentLac in self._summaryAreaDict: + editAreaName, areaLabel = self._summaryAreaDict[self._currentLac] + intro = self._summaryExtendedIntro + intro = intro + " " + areaLabel + # Note endline appends a newline + intro = self.endline(intro) + extended = self.generateProduct("_ExtendedNarrative", argDict, + area = editAreaName, timeRange=self._extendedTimeRange) + fcst = intro + extended + + return fcst + + def allowedHazards(self): + return [] diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SampleAnalysis.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SampleAnalysis.py index 71dc08e01b..d35c1c27b9 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SampleAnalysis.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SampleAnalysis.py @@ -1,2968 +1,2968 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# SampleAnalysis.py -# Class for producing summary statistics from Sampler data. -# Typically used for Text Product generation. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import string, sys, types - -import logging -import TimeRange, AbsTime, WeatherSubKey, JUtil -from math import * -from com.raytheon.viz.gfe.sampler import HistValue, HistPair -from com.raytheon.viz.gfe.textformatter import FormatterUtil - - ## For complete documentation on the analysis methods available in this class, - ## refer to the Text Product User Guide. - - ## Utility Methods: - - ## The following methods return an integer that indicated the expected - ## return type.... - ## SCALAR - ## MAGNITUDE - ## DIRECTION - ## VECTOR - ## WEATHER - ## - ## getModeratedLimits - ## returns the min and max percentages allowed for all moderated methods - ## getStdDevLimits - ## returns the min and max standard deviation limits for all stdDev methods - ## getAccumSum - ## returns the accumulated sum for the specified time period - ## getAccumMinMax - ## return the min and max for the specified time period - ## getAverage - ## returns the absolute average - ## getMinMax - ## returns the absolute minimum and maximum - ## getStdDevAvg - ## returns the average after filtering data based on standard deviation - ## getStdDevMinMax - ## returns the min and max values after filtering based in standard deviation - ## getModeratedAvg - ## returns the average after filtering based on percentage - ## getModeratedMinMax - ## returns the min and max after filtering based on percentage - ## getVectorAvg - ## returns the absolute vector average with no filtering - ## getDominantDirection - ## returns either the Average or Most Frequent direction based on: - ## vectorDirection_algorithm - ## getAverageDirection - ## returns the average direction over the specified time period - ## getMostFrequentDirection - ## returns the most frequent direction over the specified time period - ## temporalCoverage_flag - ## returns 1 if the specified grid sufficiently overlaps the sample time period - ## getDominantWx - ## returns the dominant weather over the sample period - ## getDominantDiscreteValue - ## returns the dominant discrete values over the sample period - ## getDominant -- handles both getDominantWx and getDominantDiscreteValue - ## getSubkey_percentages -- gather all the Weather or Discrete SubKeys and percentages - ## dirList - ## converts a numerical direction into a string (e.g., N, SE) - ## determineGridWeight - ## returns the time wieght for a particular grid ( 1.0 = full weight) - ## createStats - ## reports statistics based on the method specified - ## splitRange(timeRange, numPeriods) - ## splits a timerange into the specified number of periods and returns a - ## timeRange list - ## getGridTimeRanges - ## returns the list of timeRanges after splitting along sample time periods - ## divideRange(timeRange, hours) - ## splits a timeRange into sub periods each with the duration specified - ## getMode - ## returns a range around the median indicating the most frequent values - ## getMedian - ## returns median value over given timeRange - ## getMedianRange - ## returns 2-value range chosen around median, uses getRange - ## getMedianHistPair - ## returns median histPair over given timeRange - ## getHistPairMean - ## given two HistPairs returns a HistPair representing the mean of the two - ## getModeRange - ## range chosen around mode, uses getRange - ## getModeHistPair - ## returns most common HistPair over given timeRange - ## getRange - ## range chosen around given histPair - ## returns: scalar: (min, max) - ## vector: (minMag, maxMag, avgDir) - ## getDeviation - ## returns a deviation around median to include in range - ## getBinnedPercent - ## returns a list of tuples representing "bins" and corresponding - ## percentages of values in each bin - ## - ## Conversion methods - ## UVToMagDir - ## MagDirToUV - ## convertAnalysisList - ## Breakdown of Sampler data: - ## HistoSampler : Parms, TimeRanges, EditAreas - ## Contains SeqOf - ## ParmHisto: Parm, TimeRange, EditArea - ## Contains SeqOf : (one for each grid overlapping timeRange) - ## HistSample is a histogram for a Parm(implicit), Grid, Area - ## Contains SeqOf - ## HistPair : Parm(implicit), Grid(implicit), Area(implicit) - ## count, HistValue (value) - ## count = how many times that value occurred within the grid - ## HistValue value: scalar, Vector (magnitude, direction), weather - -import CommonUtils -import logging - -class SampleAnalysis(CommonUtils.CommonUtils): - def __init__(self): - CommonUtils.CommonUtils.__init__(self) - self.log = logging.getLogger("FormatterRunner.SampleAnalysis.SampleAnalysis") - - ### CONSTANTS -- Do not override - def SCALAR(self): - return 0 - def MAGNITUDE(self): - return 1 - def DIRECTION(self): - return 2 - def VECTOR(self): - return 3 - def WEATHER(self): - return 4 - def DISCRETE(self): - return 5 - - ### GLOBAL THRESHOLDS AND VARIABLES - ### To override, override the associated method in your text product class. - # To be included in the analysis, a grid must either: - # 1. Be completely contained in the time range OR - # 2. Meet BOTH the temporalCoverage_percentage and temporalCoverage_hours - # requirements. - # The temporalCoverage_percentage is: - # The percentage of the TIMERANGE covered by the - # grid in order to include it in the analysis. - # The temporalCoverage_hours is: - # The required hours of overlap of a grid with the TIMERANGE - # in order to include it in the analysis. - # In addition, if the temporalCoverage_hours is greater than or equal to the - # TIMERANGE duration and the grid covers the entire TIMERANGE, - # it will be included. - def temporalCoverage_percentage(self, parmHisto, timeRange, componentName): - # This is the percentage of the TIMERANGE covered by the - # grid in order to include it in the analysis. - # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) - # Used by temporalCoverage_flag - return 20 - - def temporalCoverage_dict(self, parmHisto, timeRange, componentName): - # This is temporalCoverage percentage by weather element - # Used by temporalCoverage_flag - return { - "LAL": 0, - "MinRH": 0, - "MaxRH": 0, - "MinT": 1, - "MaxT": 1, - "Haines": 0, - "PoP" : 0, - "Hazards" : 0, - } - - def temporalCoverage_hours(self, parmHisto, timeRange, componentName): - # This is the required hours of overlap of a grid with the TIMERANGE - # in order to include it in the analysis. - # In addition, if the temporalCoverage_hours is greater than or equal to the - # TIMERANGE duration and the grid covers the entire TIMERANGE, - # it will be included. - # Temporal coverage hours default value - # (if not found in temporalCoverage_hours_dict) - # Used by temporalCoverage_flag - return 0 - - def temporalCoverage_hours_dict(self, parmHisto, timeRange, componentName): - # This is the temporalCoverage_hours specified per weather element. - # Used by temporalCoverage_flag - return { - #"MinRH": 0, - #"MaxRH": 0, - "MinT": 5, - "MaxT": 5, - #"Haines":0, - #"PoP" : 0, - "pws34": 4, - "pws64": 4, - "pwsD34": 4, - "pwsN34": 4, - "pwsD64": 4, - "pwsN64": 4, - } - - def moderated_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. - return { - "T" : (10, 10), - "Wind": (0, 20), - "LAL": (10, 10), - "MinRH": (10, 10), - "MaxRH": (10, 10), - "MinT": (10, 10), - "MaxT": (10, 10), - "Haines": (10, 10), - "PoP" : (10, 10), - } - - def getModeratedLimits(self, parmHisto, timeRange, componentName): - compositeNameUI = parmHisto.getCompositeNameUI() - # get the stdDict min and max values - modMin = self.moderatedDefault(parmHisto, timeRange, componentName) - modMax = self.moderatedDefault(parmHisto, timeRange, componentName) - modDict = self.moderated_dict(parmHisto, timeRange, componentName) - if modDict.has_key(compositeNameUI): - modMin, modMax = modDict[compositeNameUI] - - return modMin, modMax - - def moderatedDefault(self, parmHisto, timeRange, componentName): - "Value used by moderated functions if not explicitly defined in moderated_dict" - return 5 - - def maxMode_increment_dict(self, parmHisto, timeRange, componentName): - return { - "PoP" : 10, - } - - def stdDev_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating stdDev stats. - # These tuples represent the (low, high) number of standard - # deviations. Any values falling outside this range will - # not be included in the calculated statistic. - return { - "LAL": (1.0, 1.0), - "MinRH": (1.0, 1.0), - "MaxRH": (1.0, 1.0), - "MinT": (1.0, 1.0), - "MaxT": (1.0, 1.0), - "Haines": (1.0, 1.0), - "PoP" : (1.0, 1.0), - "T" : (1.0, 1.0), - "Wind" : (1.0, 1.0), - } - - def getStdDevLimits(self, parmHisto, timeRange, componentName): - compositeNameUI = parmHisto.getCompositeNameUI() - # get the stdDict min and max values - stdDevDict = self.stdDev_dict(parmHisto, timeRange, componentName) - minStdDev = self.stdDevDefault(parmHisto, timeRange, componentName) - maxStdDev = self.stdDevDefault(parmHisto, timeRange, componentName) - if stdDevDict.has_key(compositeNameUI): - minStdDev, maxStdDev = stdDevDict[compositeNameUI] - - return minStdDev, maxStdDev - - def stdDevDefault(self, parmHisto, timeRange, componentName): - "Value used by all moderated functions if not explicitly defined in stdDev_dict" - return 1.0 - - def vectorDirection_algorithm(self, parmHisto, timeRange, componentName): - # Algorithm to use for computing vector direction for vector analysis methods. - # Can be "Average" or "MostFrequent" - return "Average" - - # Variables for converting Wind Direction from degrees to letters - def dirList(self): - dirSpan = 45 # 45 degrees per direction - base = 22.5 # start with N - return [ - ('N', 360-base, 361), - ('N', 0, base), - ('NE',base , base + 1*dirSpan), - ('E', base + 1*dirSpan, base + 2*dirSpan), - ('SE',base + 2*dirSpan, base + 3*dirSpan), - ('S', base + 3*dirSpan, base + 4*dirSpan), - ('SW',base + 4*dirSpan, base + 5*dirSpan), - ('W', base + 5*dirSpan, base + 6*dirSpan), - ('NW',base + 6*dirSpan, base + 7*dirSpan) - ] - - - ######################################## - ## SCALAR - - def avg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getAverage(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def minMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - results = self.createStats(parmHisto,timeRange, componentName, args, primaryMethod) - return results - - def minimum(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Min") - - def maximum(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Max") - - def accumMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def accumSum(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def moderatedAccumMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def moderatedAccumSum(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def stdDevAccumMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def stdDevAccumSum(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def median(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMedian(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def medianRange(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMedianRange(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def mode(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMode(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def modeRange(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeRange(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def maxMode(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMaxMode(self.SCALAR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def stdDevAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevAvg(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def stdDevMin(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Min") - - def stdDevMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Max") - - def stdDevMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def stdDevFirstAvg(self, parmHisto, timeRange, componentName, args=None): - return self.getStdDevAvg(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def stdDevFirstMinMax(self, parmHisto, timeRange, componentName, args=None): - return self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def moderatedAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedAvg(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def moderatedMin(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Min") - - def moderatedMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractMinMax(result, "Max") - - def moderatedMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def binnedPercent(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getBinnedPercent(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def moderatedFirstAvg(self, parmHisto, timeRange, componentName, args=None): - return self.getModeratedAvg(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def moderatedFirstMinMax(self, parmHisto, timeRange, componentName, args=None): - return self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def minMaxAvg(self, parmHisto, timeRange, componentName, args=None): - # Find Min and Max values - minMax = self.minMax( parmHisto, timeRange, componentName) - avg = self.avg(parmHisto, timeRange, componentName) - if minMax is None or avg is None: - return None - else: - min, max = minMax - return (min, max, avg) - - def minMaxSum(self, parmHisto, timeRange, componentName, args=None): - values = parmHisto.minMaxSum() - if values is None: - return None - else: - minV, maxV, sumV = values - return minV, maxV, sumV - - def maxAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def stdDevMaxAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def moderatedMaxAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def firstAvg(self, parmHisto, timeRange, componentName, args=None): - return self.getAverage(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def firstMinMax(self, parmHisto, timeRange, componentName, args=None): - return self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) - - def hourlyTemp(self, parmHisto, timeRange, componentName, args=None): - "Create hourly temperature stats" - # Produces a list of hourly temperature values in tuples - # Each tuple has an average temperature value and - # its hour of occurrence - # Assumptions: - # If there is no data for an hour, None is - # given instead of a temp value - - if parmHisto.getSampleLen() == 0: - return None - - start = timeRange.startTime() - start = AbsTime.absTimeYMD(start.year, start.month, - start.day, start.hour, 0, 0) - stats = [] - while start < timeRange.endTime(): - # Create Time Range for current hour - end = start + 3600 # 1 hour in seconds - hour = start.hour - tr = TimeRange.TimeRange(start, end) - #Get the Average T for current hour - value = self.getAverage(self.SCALAR(), parmHisto, tr, componentName) - - # Append Value and Hour to Stat List - stats.append((value, hour)) - start = end - return stats - - ######################################## - ## VECTOR - - def vectorAvg(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector ((minMag, maxMag), TextDir) Stats" - primaryMethod = "self.getAverage(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def vectorMinMax(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector ((minMag, maxMag), TextDir) Stats" - primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def vectorMin(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector ((minMag, maxMag), TextDir) Stats" - primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Min") - - def vectorMax(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector ((minMag, maxMag), TextDir) Stats" - primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Max") - - def vectorMedian(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector (median, TextDir) Stats" - primaryMethod = "self.getMedian(self.VECTOR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def vectorMode(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector (mode, TextDir) Stats" - primaryMethod = "self.getMode(self.VECTOR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def vectorMedianRange(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector (medianRange, TextDir) Stats" - primaryMethod = "self.getMedianRange(self.VECTOR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def vectorModeRange(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector (modeRange, TextDir) Stats" - primaryMethod = "self.getModeRange(self.VECTOR(), parmHisto, timeRange, componentName)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def vectorStdDevAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevAvg(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod,) - return result - - def vectorStdDevMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def vectorStdDevMin(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Min") - - def vectorStdDevMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Max") - - def vectorModeratedAvg(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedAvg(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod,) - return result - - def vectorModeratedMinMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def vectorBinnedPercent(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getBinnedPercent(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return result - - def vectorModeratedMin(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Min") - - def vectorModeratedMax(self, parmHisto, timeRange, componentName, args=None): - primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" - result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - return self.extractVectorMinMax(result, "Max") - - def vectorMagMinMax(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector min/max Stats" - return self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, componentName) - - def vectorMagMin(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector min Stats" - minResult, maxResult = self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, - componentName) - return minResult - - def vectorMagMax(self, parmHisto, timeRange, componentName, args=None): - "Create a Vector max Stats" - minResult, maxResult = self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, - componentName) - return maxResult - - ## This method is being kept for "table" type products - def vectorRange(self, parmHisto, timeRange, componentName=None): - "Create a Vector Stats" - # Split Time Period in half - # For each half find average values: - # mag1 = min Period 1, mag2 = max Period 1, - # mag3 = min Period 2, mag4 = max Period 2 - periods = self.splitRange(timeRange) - period1 = periods[0] - period2 = periods[1] - result1 = self.getAverage(self.VECTOR(), parmHisto, period1, componentName) - result2 = self.getAverage(self.VECTOR(), parmHisto, period2, componentName) - - if result1 is None or result2 is None: - return None - mag1, dir1 = result1 - mag2, dir2 = result2 - return (mag1, mag2, dir1, dir2) - - ######################################## - ## WEATHER - ## - ## dominantWx - ## - ## Thresholds and variables: - - def coverage_weights_dict(self): - # Weight (between 0 and 1) for the coverage terms - return { - "": 0, - "Iso": .15, - "SChc": .15, - "Patchy": .15, - "Areas": .4, - "Chc": .4, - "Sct": .4, - "Lkly": .7, - "Num": .7, - "Brf": 1.0, - "Frq": 1.0, - "Ocnl": 1.0, - "Pds": 1.0, - "Inter": 1.0, - "Def": 1.0, - "Wide": 1.0, - } - - def wxkey_coverage_weight(self, parmHisto, timeRange, componentName, wxkey): - # Return a weight (between 0 and 1) for the wxkey coverage term - cov = wxkey.coverage() - return self.coverage_weights_dict()[cov] - - def wxkey_coverage_percentage(self, parmHisto, timeRange, componentName, wxkey): - # Return the required coverage percentage for the given wxkey which will be - # compared to its "rank" i.e. the percentage of areal coverage over the time period. - wxType = wxkey.wxType() - wxCov = wxkey.coverage() - inten = wxkey.intensity() - # These rules were from the workshop - if wxType == "T" and inten == "+": - return 0 - if wxType in ["ZR", "ZL"]: - return 0 - # Large Hail - attrList = wxkey.attributes() - if "LgA" in attrList: - return 0 - # Heavy Fog - if wxType == "F" and inten == "+": - return 0 - # Low visibility - if wxType in ["F", "H", "BS", "K", "BD"]: - vis = wxkey.visibility() - if vis == "1/4SM" or vis == "0SM": - return 0 - if wxType in ["T", "R", "RW", "S", "SW", "L", "IP"]: - return 15 - # For the rest: ["F", "H", "BS", "K", "BD", "SA", "LC", "FR", "WG", "VA"] - return 15 - - def checkPercentages(self, parmHisto, timeRange, componentName, wxKey, keyRankDict): - # If a wxKey does not pass the wxkey_coverage_percentage, this method will be called - # to give another chance. - # You can use the keyRankDict: - # subkey : (rank, percent coverage) - # to allow the wxKey to pass based on other values in the grid. - # For example: If I have 10% RW 10% SW, neither RW or SW will be reported - # Using the keyRankDict, I can allow them to pass when I learn - # that 20% of my area is covered with precip. - # Here's how this might be done: - # - #precip = ["SW", "RW", "R", "S"] - #totalPrecip = 0 - #if wxKey.wxType() in precip: - # for subkey in keyRankDict.keys(): - # if subkey.wxType() in precip: - # rank, percent = keyRankDict[subkey] - # totalPrecip += percent - #if totalPrecip > 15: - # return 1 - #else: - # return 0 - return 0 - - def attribute_coverage_percentage(self, parmHisto, timeRange, componentName, wxType, attr): - # Return the required coverage percentage for the given attribute. - # May be based on the wxType and attr if desired. - if attr == "Dry": - return 20 - else: - return 0 - - def dominantKeys_threshold(self, parmHisto, timeRange, componentName): - # This is the maximum number of weather keys desired from the rankedWx method - return 10 - - def cleanOutEmptyValues(self, parmHisto, timeRange, componentName, dataType): - return 0 - - def noWx_percentage(self, parmHisto, timeRange, componentName): - # If the raw rank (areal and temporal coverage) of NoWx exceeds this value, - # NoWx will be reported (all other weather keys will be ignored). - return 100 - - def dominantWx(self, parmHisto, timeRange, componentName, args=None): - "Return a list of dominant wx subkeys in order by ranking" - primaryMethod = "self.getDominantWx(parmHisto, timeRange, componentName, withRank=0)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def rankedWx(self, parmHisto, timeRange, componentName, args=None): - "Return a list of ranked (subkey, ranking) tuples" - primaryMethod = "self.getDominantWx(parmHisto, timeRange, componentName, withRank=1)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def getDominantWx(self, parmHisto, timeRange, componentName, withRank=0): - # Determine the dominant Wx considering the following: - # areal coverage over time - return self.getDominantValues(parmHisto, timeRange, componentName, - dataType="WEATHER", withRank=withRank) - - - def getDominantValues(self, parmHisto, timeRange, componentName, - dataType="WEATHER", withRank=0, withAux=0): - # Determine the dominant Wx subkey OR Discrete subkey - # considering the following: - # areal coverage over time, called the "rank" - # Sub-methods: - # temporalCoverage_flag - # wxKey_coverage_percentage and dominantKeys_threshold OR - # discreteKey_coverage_percentage and dominantDiscreteKeys_threshold - # The algorithm: - # Temporal coverage by grid: Any individual grid must - # cover enough time in the timerange to set the temporalCoverage_flag. - # Loop over all samples, which are for all grids and - # all weather or discrete keys on each of those grids. - # For WEATHER, - # We aggregate weather types i.e. - # for each weather type (S, RW, R, etc.) we compute - # an aggregate subkey such that the resulting rankList - # will have just one entry per weather type. - # Aggregate Subkey: - # coverage: time-weighted average of coverages - # intensity: highest ranking OR if ranks are close, dominant - # visibility: lowest visibility - # attributes: aggregate attributes - # Rank: percentage of areal/temporal coverage over time - # For WEATHER, we weight this by the coverage term - # If a subkey does not meet the "wxkey_coverage_percentage" threshold - # or "discreteKey_coverage_percentage", it is removed. - # For WEATHER, in all cases, if a subkey has a Primary or Mention attribute, - # it automatically "wins" and is used. - # Finally, the highest ranking dominantKeys_threshold OR - # dominantDiscreteKeys_threshold number of keys are returned. - # If withRank == 1: return the ranked list of (subkey, rank) tuples - # Else: return the list of subkeys - # - totalHours = 0 - totalPoints = parmHisto.numberOfGridPoints() - compositeNameUI = parmHisto.getCompositeNameUI() - - # Loop over all samples, which are for all grids and - # all keys on each of those grids. - # In this process, we aggregate subkey types e.g. - # for each weather type (S, RW, R, etc.) we compute - # an aggregate subkey such that the resulting rankList - # will have just one entry per weather type. - # For discrete, we will have just one entry per - # discrete subkey (with or without Aux value). - - #print "\n\nIn getDominantValues: DataType, TimeRange", dataType, timeRange - #print "STEP 1 -- Aggregate per grid" - - subkeyTypeDict = {} - # STEP 1: - # For each wxType found in the grids, - # gather its 'hours' of coverage and 'count' of points covered. - for histSample in parmHisto.histoSamples(): - validTime = TimeRange.TimeRange(histSample.validTime()) - if self.temporalCoverage_flag( - parmHisto, timeRange, componentName, histSample) == 0: - continue - # Get the number of hours inside the timeRange that this - # sample comes from (i.e., we can get a sample that lasts - # for 3 weeks - but only 1 hour of it is inside the - # timeRange - and we only want to rank it by the 1 hour - # inside the range) - # - hours = validTime.intersection(timeRange).duration()/3600 - if hours < 1: - continue - - totalHours += hours - - # Gather the subkey Types for this grid in subkeyTypeDict - # Each entry ends up being a list of tuples: (subkey, hours, count) - self.gatherSubkeyTypes( - parmHisto, timeRange, componentName, histSample, dataType, hours, - subkeyTypeDict, withAux) - - # STEP 2: For each subkeyType, - # --determine an aggregate subkey and rank i.e. - # aggregate areal coverage over time percentage - # --compare the rank to coverage threshold for the wxType. - rankList = [] - #print "subkeyTypeDict", subkeyTypeDict - subkeyTypePointsDict = {} - noWxRawRank = 0 - keyRankDict = {} # Holds: aggregateKey: (rank, rawRank) - for subkeyType in subkeyTypeDict.keys(): - #print "\nsubkeyType", subkeyType - subkeyList = subkeyTypeDict[subkeyType] - if dataType == "WEATHER": - covDict = {} - intenDict = {} - visDict = {} - attrList = [] - attrDict = {} - primaryKey = None - mentionKey = None - subkeyTypeRank = 0 - # IF WEATHER: - # Gather the coverages, intensities, visibilities and attributes - # for this weather type - # If Primary or Mention, use the subkey as the aggregate key - # Determine a subkeyType rank - subkeyTotalPoints = 0 - for subkey, hours, count in subkeyList: - #print " subkey, hours, count", subkey, hours, count - subkeyTotalPoints += count - if dataType == "WEATHER": - attrs = subkey.attributes() - attrList = attrList + attrs - if "Primary" in attrs: - primaryKey = subkey - continue - if "Mention" in attrs: - mentionKey = subkey - continue - self.addToDict(covDict, subkey.coverage(), hours, count, 1) - self.addToDict(intenDict, subkey.intensity(), hours, count) - self.addToDict(visDict, subkey.visibility(), hours, count) - for attr in attrs: - self.addToDict(attrDict, attr, hours, count) - subkeyTypeRank += hours * count - subkeyTypePointsDict[subkeyType] = subkeyTotalPoints - - # Determine aggregate key - #print " subkeyTypeRank", subkeyTypeRank, subkeyTotalPoints - #print " totalHours, totalPoints", totalHours, totalPoints - subkeyPoints = subkeyTypePointsDict[subkeyType] - if dataType == "WEATHER": - aggregateKey = self.getWxAggregateKey( - parmHisto, timeRange, componentName, - primaryKey, mentionKey, subkeyType, covDict, intenDict, visDict, - attrList, attrDict, totalHours, totalPoints, subkeyPoints) - else: - aggregateKey = subkeyType - - # Determine rawRank and rank for the aggregateKey - if dataType == "WEATHER" \ - and "Primary" in aggregateKey.attributes(): - rank = 200 - rawRank = 200 - else: - rawRank = int(round(float(subkeyTypeRank)/(totalHours*totalPoints)*100.0)) - if dataType == "WEATHER": - # Save the raw rank for NoWx - if aggregateKey.wxType() == "": - noWxRawRank = rawRank - # Multiply by the coverage weight - rank = int(rawRank * self.wxkey_coverage_weight( - parmHisto, timeRange, componentName, aggregateKey)) - else: - rank = rawRank - #print " aggregateKey, rank", aggregateKey, rank, rawRank - keyRankDict[aggregateKey] = (rank, rawRank) - - # Check to see if each aggregateKey meets the required coverage percentage - for aggregateKey in keyRankDict.keys(): - rank, rawRank = keyRankDict[aggregateKey] - if dataType == "WEATHER" \ - and ("Mention" in aggregateKey.attributes() \ - or "Primary" in aggregateKey.attributes()): - rankList.append((aggregateKey, rank)) - else: - if dataType == "WEATHER": - # Use rawRank which is the percentage of areal/temporal coverage - threshold = self.wxkey_coverage_percentage( - parmHisto, timeRange, componentName, aggregateKey) - flag = rawRank >= threshold - else: - threshold = self.discreteKey_coverage_percentage( - parmHisto, timeRange, componentName, aggregateKey) - flag = rawRank >= threshold - if not flag: - # Get another chance to pass - flag = self.checkPercentages( - parmHisto, timeRange, componentName, aggregateKey, keyRankDict) - if flag: - rankList.append((aggregateKey, rank)) - else: - pass - #print "didn't make the cut", rank, aggregateKey - - #print " rankList", rankList - if len(rankList) == 0: - return None - - # Check the NoWx Threshold - if noWxRawRank > self.noWx_percentage(parmHisto, timeRange, componentName): - # Report NoWx - newList = [] - for key, rank in rankList: - if key.wxType() == "": - newList.append((key, rank)) - rankList = newList - - # Clean out NoWx and None (Discrete) - if self.cleanOutEmptyValues(parmHisto, timeRange, componentName, dataType): - newList = [] - for subkey, rank in rankList: - if dataType == "WEATHER": - if subkey.wxType() == "": - continue - else: # DISCRETE - if subkey == "": - continue - newList.append((subkey, rank)) - rankList = newList - - # Sort into ranked order - # Limit the number of keys returned - if dataType == "WEATHER": - rankList.sort(self.rankedSortOrder) - rankList = [ - (WeatherSubKey.weatherSubKey(self._argDict["site"], subkey.coverage(), subkey.wxType(), subkey.intensity(), - subkey.visibility(), - self.removeSimilarAttrs(subkey.attributes())), - rank) for subkey, rank in rankList - ] - dominantKeys = self.dominantKeys_threshold(parmHisto, timeRange, componentName) - else: # DISCRETE - rankList.sort() - dominantKeys = self.dominantDiscreteKeys_threshold(parmHisto, timeRange, componentName) - - if len(rankList) > dominantKeys: - rankList = rankList[0:dominantKeys] - if self._debug: - print "\nSampleAnalysis::ranked", dataType, " \n TimeRange ", timeRange - print " Area", parmHisto.area().getId() - if withRank: - if self._debug: - print " returning with rank %s" % (rankList) - return rankList - else: - newList = [] - for subkey, rank in rankList: - newList.append(subkey) - if self._debug: - print " returning %s" % (newList) - return newList - - def gatherSubkeyTypes(self, parmHisto, timeRange, componentName, - histSample, dataType, hours, subkeyTypeDict, withAux): - for histPair in histSample.histogram(): - count = float(histPair.count()) - if dataType == "WEATHER": - subkey = WeatherSubKey.WeatherSubKey(histPair.value().weather().get(0)) - subkeyType = subkey.wxType() - if subkeyType == "RW" or subkeyType == "SW": - if subkey.intensity() == "--": - subkeyType = subkeyType + "--" - else: # DISCRETE - subkeyType = histPair.value().discrete().get(0) - if withAux == 0: - subkeyType = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), subkeyType) - subkey = subkeyType - if subkeyTypeDict.has_key(subkeyType): - subkeyTypeDict[subkeyType].append((subkey, hours, count)) - # Make new entry - else: - subkeyTypeDict[subkeyType] = [(subkey, hours, count)] - - def getWxAggregateKey(self, parmHisto, timeRange, componentName, - primaryKey, mentionKey, subkeyType, covDict, intenDict, visDict, - attrList, attrDict, totalHours, totalPoints, subkeyPoints): - # Compute the aggregate key - # If Primary was an attribute in any subkey, take it as the aggregate - # plus all the other attributes - # Otherwise, if Mention was an attribute in any subkey, take it as the - # aggregate plus all the other attributes - # Otherwise, compute the aggregate subkey from the coverage, intensity, - # visibilities for this subkeyType weighted by temporal and areal coverages - #print "covDict", covDict - #print "intenDict", intenDict - #print "visDict", visDict - #print "attrDict", attrDict - if subkeyType in ["RW--", "SW--"]: - subkeyType = subkeyType.replace("--","") - aggregateKey = None - if primaryKey is not None: - aggregateKey = primaryKey - elif mentionKey is not None: - aggregateKey = mentionKey - - if aggregateKey is None: - algorithm = self.aggregateCov_algorithm(parmHisto, timeRange, componentName) - aggCov = algorithm(parmHisto, timeRange, componentName, - subkeyType, "coverage", covDict, totalHours, totalPoints, subkeyPoints) - aggInten = self.getAggregate(parmHisto, timeRange, componentName, - subkeyType, "intensity", intenDict, totalHours, totalPoints, subkeyPoints) - aggVis = self.getAggregate(parmHisto, timeRange, componentName, - subkeyType, "visibility", visDict, totalHours, totalPoints, subkeyPoints) - aggAttrs = self.getAggregateAttributes( - parmHisto, timeRange, componentName, - subkeyType, attrDict, totalHours, totalPoints, subkeyPoints) - else: - aggCov = aggregateKey.coverage() - aggInten = aggregateKey.intensity() - aggVis = aggregateKey.visibility() - aggAttrs = aggregateKey.attributes() - attrList = self.removeDups(attrList) - aggregateKey = WeatherSubKey.weatherSubKey(self._argDict["site"], aggCov, subkeyType, aggInten, aggVis, aggAttrs) - #print "aggregateKey", aggregateKey - return aggregateKey - - def addToDict(self, dict, key, hours, count, tuple=0): - # Add to a dictionary whose values are lists - - if dict.has_key(key): - - if tuple: - (curValue, curMaxCov) = dict[key] - - if count > curMaxCov: - curMaxCov = count - - dict[key] = ((curValue + hours*count), curMaxCov) - - else: - curValue = dict[key] - - dict[key] = curValue + hours * count - - # Make new entry - else: - if tuple: - dict[key] = ((hours * count), count) - else: - dict[key] = hours * count - - def aggregateCov_algorithm(self, parmHisto, timeRange, componentName): - # The algorithm for choosing the coverage term for multiple - # instances of a weather type. - # "getAggregateCov" chooses the coverage with the highest rank - # (in terms of areal and temporal coverage.) - # "getExistingWeightedAggregateCov" chooses the coverage with the - # highest WEIGHTED rank that exists in the grids. - # "getWeightedAggregateCov" computes a weighted average coverage - # If the resulting coverage is not in the grids, - # "creates" an appropriate coverage. - # "getHighestWeightedAggregateCov" returns the coverage with the - # highest weight in "coverage_weights_dict" - # - # For example, - # If you have - # Iso T(covers 50% of the zone) - # Sct T(covers 25% of the zone) - # Num T(covers 25% of the zone) - # "getAggregateCov" returns "Iso" - # "getWeightedAggregateCov" returns "Sct" - # "getExistingWeightedAggregateCov" returns "Num" - # "getHighestWeightedAggregateCov" returns "Num" - # - # If you have - # Iso T(covers 60% of the zone) - # Num T(covers 40% of the zone) - # "getAggregateCov" returns "Iso" - # "getWeightedAggregateCov" returns "Sct" - # "getExistingWeightedAggregateCov" returns "Num" - # "getHighestWeightedAggregateCov" returns "Num" - # - return self.getAggregateCov - # return self.getWeightedAggregateCov - # return self.getExistingWeightedAggregateCov - # return self.getHighestWeightedAggregateCov - - # Submitted by Matt Belk 8/04 - def getAggregateCov(self, parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints): - # From the entries in the dictionary, - # find the aggregate coverage - # Return coverage in one of two ways: - # - # 1) Coverage covers >= 90% of zone, or - # 2) Coverage has the highest subkey rank - - # If there is only one coverage - if len(dict) == 1: - for key in dict.keys(): - return key - - # Get ready to track properties of all coverage terms - maxRank = 0.0 - aggCov = None - sameRank = [] - highRank = [] - - # For each coverage - for key in dict.keys(): - # If this is a tuple - if type(dict[key]) == type(()): - (sum, max) = dict[key] # get the point sum and max coverage - # Otherwise, get the point sum and assume a max coverage - else: - sum = dict[key] - max = 0 - - # Compute subkey rank - subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 - # If this is the highest subkey rank we have so far - if subkeyRank > maxRank: - # Store this as the highest subkey rank - maxRank = subkeyRank - aggCov = key - sameRank = [] - sameRank.append(key) - # Otherwise, if this ties as the highest subkey rank - elif subkeyRank == maxRank: - sameRank.append(key) - - # If the areal coverage of this subkey coverage is >= 90% - arealThreshold = self.__dict__.get("_aggregateCov_arealThreshold", 90.0) - if (max * 100.0)/float(totalPoints) >= arealThreshold: - # Store this as a candidate for highest coverage key - highRank.append(key) - - # Get ready to process sameRank list (if needed) - maxVal = 0 - # If there is more than one key in the highRank list - if len(highRank) > 0: - # Use this list to find the aggregate coverage - testRank = highRank - # Otherwise, if there are items in the sameRank list - elif len(sameRank) > 0: - testRank = sameRank - - # Grab the most significant coverage - for cov in testRank: - # Compute the PoP range and a test value for this coverage - (lowVal, highVal) = self.coveragePoP_value(cov) - avgVal = (lowVal + highVal)/2.0 - # If this is the most significant value - if avgVal >= maxVal: - # Use this coverage - aggCov = cov - maxVal = avgVal - - # if the aggregate coverage is still not found - if aggCov is None: - aggCov = self.processNoAggCov(dict, wxType) - self.debug_print('in getAggregateCov -> returning %s' % (aggCov), 1) - return aggCov - - def aggregateCov_weights_dict(self): - # Weight (between 0 and 1) for the coverage terms - return { - "": 0, - "Iso": .1, - "SChc": .1, - "Patchy": .1, - "Areas": .4, - "Chc": .4, - "Sct": .4, - "Lkly": .7, - "Num": .7, - "Brf": .9, - "Frq": .9, - "Ocnl": .9, - "Pds": .9, - "Inter": .9, - "Def": .9, - "Wide": .9, - } - - def aggregateCov_weight(self, parmHisto, timeRange, componentName, cov): - # Return a weight (between 0 and 1) for the coverage term - return self.aggregateCov_weights_dict()[cov] - - def getExistingWeightedAggregateCov(self, parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints): - # From the entries in the dictionary, find the aggregate coverage by - # using a weighting scheme. - # If the resulting coverage is not in the grids, use the coverage - # with the greatest weight. - if len(dict) == 1: - for key in dict.keys(): - return key - aggCov, wtSum = self.getAggCov_and_WtSum(parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints) - if aggCov is None: - aggCov = self.processNoAggCov(dict, wxType) - return aggCov - - def getWeightedAggregateCov(self, parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints): - # From the entries in the dictionary, find the aggregate coverage by - # using a weighting scheme. - # If the resulting coverage is not in the grids, "create" an appropriate - # coverage. - if len(dict) == 1: - for key in dict.keys(): - return key - aggCov, wtSum = self.getAggCov_and_WtSum(parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints) - # Assign the new coverage - popValue = self.coveragePoP_table() - inGrids = 0 - candidates = [] - aggCov = None - for key in popValue.keys(): - lowVal, highVal = popValue[key] - #print "key, low, high", key, lowVal, highVal - # Ranges are inclusive and not contiguous, - # so we have to adjust - lowVal = lowVal - 10 - if wtSum > lowVal and wtSum <= highVal: - # If this coverage was in the grids, - # choose it and we're done - #print "dict", dict - if key in dict.keys(): - aggCov = key - inGrids = 1 - break - else: - candidates.append(key) - - #print "inGrids", inGrids - if not inGrids: - # If the weighted average was not in the grids, - # we need to choose a coverage or prob term from - # the candidates - - # Determine coverage or probability based on - # first dictionary key - arealCovs = self.arealCoverages() - for key in dict.keys(): - if key in arealCovs: - areal = 1 - else: - areal = 0 - break - for cov in candidates: - if cov in arealCovs: - covAreal = 1 - else: - covAreal = 0 - if covAreal == areal: - # Make sure this cov can be used with - # the wxType - availableCoverages = WeatherSubKey.availableCoverages(self._argDict["site"], wxType) - if cov in availableCoverages: - aggCov = cov - break - if aggCov is None: - aggCov = self.processNoAggCov(dict, wxType) - return aggCov - - def getAggCov_and_WtSum(self, parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, - subkeyPoints): - if len(dict) == 1: - for key in dict.keys(): - return key, 1 - # Compute weighted Sum - wtSum = 0.0 - maxContrib=0 - aggCov="" - for key in dict.keys(): - if type(dict[key]) == type(()): - (sum, max) = dict[key] - else: - sum = dict[key] - subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 - #print "key", key - covLowVal, covHighVal = self.coveragePoP_value(key) - covWt = self.aggregateCov_weight( - parmHisto, timeRange, componentName,key) - #print " covWt, subkeyRank", covWt, subkeyRank - #print " contribution", covWt * subkeyRank - contrib = covWt * subkeyRank - wtSum += contrib - if contrib > maxContrib: - aggCov = key - maxContrib=contrib - #print "weighted value", aggCov, wtSum - return aggCov, wtSum - - def getHighestWeightedAggregateCov( - self, parmHisto, timeRange, componentName, wxType, wxPart, dict, - totalHours, totalPoints, subkeyPoints): - # Return the Coverage with the highest weight in coverage_weights_dict - # Throw out Coverages that do not meet the wxkey_coverage_percentage - # EXCEPT if no Coverages meet that threshold, return the Coverage - # with the highest percentage. - # - # NOTE: In cases where the total percentages, e.g. 5% Sct, 7% Num, - # do not meet the threshold, it's ok for this method to return Num. - # because the total percentage will be checked in a later step. - # - # Handle case of only one coverage - if len(dict) == 1: - for cov in dict.keys(): - return cov - # Aggregate Coverage - aggCov = None - # Coverage Weight for aggregate Coverage - maxWt = -1 - # Aggregate Cov for those that do not meet threshold - aggCovReject = None - # Max Percentage for those Coverages that do not meet threshold - maxPercentReject = -1 - - for cov in dict.keys(): - covWt = self.aggregateCov_weight( - parmHisto, timeRange, componentName, cov) - sum, maxCov = dict[cov] - percentCov = float(sum)/(totalHours*totalPoints)*100.0 - percentCov = int(round(percentCov)) - # Check to see if it meets threshold - # Make a temporary wxKey to check wxkey_coverage_percentage - wxKey = WeatherSubKey.weatherSubKey(self._argDict["site"], cov, wxType, "", "", []) - #print "wxKey", wxKey, sum, percentCov - if percentCov >= self.wxkey_coverage_percentage( - parmHisto, timeRange, componentName, wxKey): - if covWt > maxWt: - aggCov = cov - maxWt = covWt - else: - if percentCov > maxPercentReject: - aggCovReject = cov - maxPercentReject = percentCov - #print "aggCov, wt",aggCov, maxWt - #print "aggCovReject, %", aggCovReject, maxPercentReject - if aggCov is None: - aggCov = aggCovReject - #print "Returning", aggCov - return aggCov - - def processNoAggCov(self, dict, wxType): - msg = "WARNING -- SampleAnalysis cannot aggregate coverages for " + wxType - log.warning(msg) - # There was no appropriate coverage for the wxType and given weight - # So take any coverage that exists in the grid - aggCov = "" - for key in dict.keys(): - aggCov = key - break - return aggCov - - def getAggregate(self, parmHisto, timeRange, componentName, - wxType, wxPart, dict, totalHours, totalPoints, subkeyPoints): - # From the entries in the dictionary, - # find the aggregate wxPart (coverage, intensity, visibility) - # Do it 2 at a time and aggregate the ranks as you go - if len(dict) == 1: - for key in dict.keys(): - return key - - firstTime = 1 - for key in dict.keys(): - sum = dict[key] - subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 - if wxPart == "coverage": - subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], key, wxType, "", "", []) - elif wxPart == "intensity": - subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], "", wxType, key, "", []) - elif wxPart == "visibility": - subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], "", wxType,"" , key, []) - if firstTime: - aggRank = subkeyRank - curKey = subkey - firstTime = 0 - else: - curKey = self.makeAggregateSubkey(curKey, aggRank, subkey, subkeyRank) - aggRank = int((aggRank + subkeyRank)/2.0) - exec "aggValue = curKey." + wxPart + "()" - return aggValue - - def getAggregateAttributes(self, parmHisto, timeRange, componentName, - wxType, dict, totalHours, totalPoints, subkeyPoints): - # Take only attributes that meet the threshold - attrList = [] - for key in dict.keys(): - sum = dict[key] - attrRank = float(sum)/(totalHours * totalPoints) * 100.0 - threshold = self.attribute_coverage_percentage( - parmHisto, timeRange, componentName, wxType, key) - if attrRank > threshold: - attrList.append(key) - return attrList - - def makeSubkeyList(self, weatherKey): - # Make sure subkeyList is a true list - length = len(weatherKey) - newList = [] - index = 0 - for subkey in weatherKey: - newList.append(subkey) - index = index + 1 - if index >= length: - break - return newList - - def weather_percentages(self, parmHisto, timeRange, componentName, args=None): - # Return a list of tuples: - # weather subkey, percentage of coverage - # All WeatherSubKeys are included - return self.getSubkey_percentages(parmHisto, timeRange, componentName, - dataType="WEATHER") - - def getSubkey_percentages(self, parmHisto, timeRange, componentName, - dataType="WEATHER", withAux=1): - # Gather all the Weather or Discrete SubKeys and percentages - numPoints = parmHisto.numberOfGridPoints() - percentageList = [] - - # Each histSample represents a grid - # To determine percentage for a weather value, we need - # to aggregate it over grids - for histSample in parmHisto.histoSamples(): - timeWeight = self.determineGridWeight(histSample, timeRange) - for histPair in histSample.histogram(): - count = float(histPair.count()) - gridPercentage = int((count/numPoints) * 100.0) - timeRangePercentage = gridPercentage * timeWeight - if dataType == "WEATHER": - subkey = WeatherSubKey.WeatherSubKey(histPair.value().weather().get(0)) - else: # DISCRETE - subkey = histPair.value().discrete().get(0) - if withAux == 0: - subkey = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), subkey) - # See if subkey is already in list - found = 0 - for value, percentage in percentageList: - # If so, add it's percentage - if value == subkey: - found = 1 - index = percentageList.index((value,percentage)) - newPercentage = percentage + timeRangePercentage - percentageList[index] = (subkey, newPercentage) - if found == 0: - percentageList.append((subkey,timeRangePercentage)) - #print "percentage list", dataType, percentageList - return percentageList - - ######################################## - ## DISCRETE - - def discreteKey_coverage_percentage(self, parmHisto, timeRange, componentName, keyStr): - # Return the required coverage percentage for the given wxkey which will be - # compared to its "rank" i.e. the percentage of areal coverage over the time period. - return 1 - - def dominantDiscreteKeys_threshold(self, parmHisto, timeRange, componentName): - # This is the maximum number of discrete keys desired from the - # getDominantDiscreteKey method. - return 10 - - def dominantDiscreteValue(self, parmHisto, timeRange, componentName, args=None): - "Return the most common discrete value over the given timeRange" - primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=0)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def dominantDiscreteValue_withAux(self, parmHisto, timeRange, componentName, args=None): - "Return the most common discrete value over the given timeRange" - primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=1)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def rankedDiscreteValue(self, parmHisto, timeRange, componentName, args=None): - "Return the most common discrete value over the given timeRange" - primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=0, withRank=1)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def rankedDiscreteValue_withAux(self, parmHisto, timeRange, componentName, args=None): - "Return the most common discrete value over the given timeRange" - primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=1, withAux=1)" - return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) - - def getDominantDiscreteValue(self, parmHisto, timeRange, componentName, withRank=0, withAux=0): - # Return a list of dominant discrete subkeys - # If withRank, return (key, rank) pairs - # If withAux, include the auxillary field as part of the key - return self.getDominantValues(parmHisto, timeRange, componentName, - dataType="DISCRETE", withRank=withRank, withAux=withAux) - - def discrete_percentages(self, parmHisto, timeRange, componentName, args=None): - return self.getSubkey_percentages(parmHisto, timeRange, componentName, dataType="DISCRETE", - withAux=0) - - def discrete_percentages_withAux(self, parmHisto, timeRange, componentName, args=None): - return self.getSubkey_percentages(parmHisto, timeRange, componentName, dataType="DISCRETE", - withAux=1) - - def discreteTimeRangesByKey(self, parmHisto, timeRange, componentName, args=None): - return self.getDiscreteTimeRangesByKey( - parmHisto, timeRange, componentName, args=None, withAux=0) - - def discreteTimeRangesByKey_withAux(self, parmHisto, timeRange, componentName, args=None): - return self.getDiscreteTimeRangesByKey( - parmHisto, timeRange, componentName, args=None, withAux=1) - - def getDiscreteTimeRangesByKey(self, parmHisto, timeRange, componentName, args=None, - withAux=0): - # This method returns a list of (discreteSubkey, timeRange) pairs ordered in ascending - # order by timeRange and then by priority of discrete keys as defined in the - # serverConfig files. - - keyDict = {} - covDict = {} - totalHours = timeRange.duration()/3600 - totalPoints = parmHisto.numberOfGridPoints() - - for histSample in parmHisto.histoSamples(): - validTime = TimeRange.TimeRange(histSample.validTime()) - if self.temporalCoverage_flag( - parmHisto, timeRange, componentName, histSample) == 0: - continue - - hours = validTime.intersection(timeRange).duration()/3600 - if hours < 1: - continue - - for histPair in histSample.histogram(): - keyStr = histPair.value().discrete().get(0) # discrete value - if withAux == 0: - keyStr = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), keyStr) - - if keyDict.has_key(keyStr): - keyDict[keyStr].append(validTime) - else: - keyDict[keyStr] = [validTime] - - # Keep a running total of the temporal and areal percentages - count = float(histPair.count()) - if covDict.has_key(keyStr): - keyRank = covDict[keyStr] - newRank = keyRank + hours*count - covDict[keyStr] = newRank - else: # new entry - covDict[keyStr] = hours*count - -## keyList = covDict.keys() -## for k in keyList: -## t, a, n = covDict[k] -## print "key:", k, "time%:", t, "area#:", a, "totalPoints:", n - - keyList = covDict.keys() - for keyStr in keyList: - # get the temporal and areal thresholds - keyRank = covDict[keyStr] - rank = int(round(float(keyRank)/(totalHours*totalPoints)*100.0)) - if rank < self.discreteKey_coverage_percentage( - parmHisto, timeRange, componentName, keyStr): - # remove the dict entry - del keyDict[keyStr] - - # glue the timeranges that share a common end/start time - keyList = [] - for k in keyDict.keys(): - trList = keyDict[k] - trList.sort() - tr = trList[0] - for i in range(1, len(trList)): - if tr.endTime() == trList[i].startTime(): - # keep extending the time, if TRs are contiguous - tr = TimeRange.TimeRange(tr.startTime(), trList[i].endTime()) - else: - # no match, append the tuple - keyList.append((k, tr)) - tr = trList[i] - - # Don't forget the last one - keyList.append((k, tr)) - - #print "discreteTimeRangesByKey keyList", keyList - return keyList - - def mostSignificantDiscreteValue(self, parmHisto, timeRange, componentName, withAux=0): - """Using mostSignificantDiscrete_keyOrder_dict and mostSignificantDiscrete_coveragePercentage_dict, - report the most significant discrete value for the given timeRange. If there is a tie, - report the most significant value. - """ - totalHours = 0 - totalPoints = parmHisto.numberOfGridPoints() - compositeNameUI = parmHisto.parmID().compositeNameUI() - - # Loop over all samples, which are for all grids and - # all keys on each of those grids. - # We will have just one entry per - # discrete key (with or without Aux value). - - #print "\n\nIn mostSignificantDiscreteValue: DataType, TimeRange", "DISCRETE", timeRange - #print "STEP 1 -- Aggregate per grid" - - subkeyTypeDict = {} - # STEP 1: - # For each discrete key found in the grids, - # gather its 'hours' of coverage and 'count' of points covered. - for histSample in parmHisto.histoSamples(): - validTime = TimeRange.TimeRange(histSample.validTime()) - if self.temporalCoverage_flag( - parmHisto, timeRange, componentName, histSample) == 0: - continue - # Get the number of hours inside the timeRange that this - # sample comes from (i.e., we can get a sample that lasts - # for 3 weeks - but only 1 hour of it is inside the - # timeRange - and we only want to rank it by the 1 hour - # inside the range) - # - hours = validTime.intersection(timeRange).duration()/3600 - if hours < 1: - continue - - totalHours += hours - - # Gather the subkey Types for this grid in subkeyTypeDict - # Each entry ends up being a list of tuples: (discreteKey, hours, count) - self.gatherSubkeyTypes( - parmHisto, timeRange, componentName, histSample, 'DISCRETE', hours, - subkeyTypeDict, withAux) - - # STEP 2: For each subkeyType, - # --determine an aggregate subkey and rank i.e. - # aggregate areal coverage over time percentage - # --compare the rank to coverage threshold. - #print "subkeyTypeDict", subkeyTypeDict - keyRankDict = {} # Holds: subkeyType: rank - for subkeyType in subkeyTypeDict.keys(): - #print "\nsubkeyType", subkeyType - subkeyList = subkeyTypeDict[subkeyType] - subkeyTypeRank = 0 - # Determine a subkeyType rank - subkeyTotalPoints = 0 - for subkey, hours, count in subkeyList: - #print " subkey, hours, count", subkey, hours, count - subkeyTotalPoints += count - subkeyTypeRank += hours * count - #print "total points =", subkeyTotalPoints - - #print "subkeyTypeRank =", subkeyTypeRank - #print "totalHours =", totalHours - #print "totalPoints =", totalPoints - # Determine rank for the subkeyType - rank = int(round(float(subkeyTypeRank)/(totalHours*totalPoints)*100.0)) - keyRankDict[subkeyType] = rank - #print "rank =", rank - - # Check to see if each subkeyType meets the required coverage percentage - keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(parmHisto, timeRange, compositeNameUI) - keyOrder = keyOrderDict[compositeNameUI] - mostSignificantSubkey = None - highestOrderIndex = None - for subkeyType in keyRankDict.keys(): - rank = keyRankDict[subkeyType] - thresholdDict = self.mostSignificantDiscrete_coveragePercentage_dict( - parmHisto, timeRange, componentName, subkeyType) - threshold = thresholdDict.get(compositeNameUI, 0) - #print "threshold =", threshold - flag = rank >= threshold - if not flag: - # Get another chance to pass - flag = self.checkPercentages( - parmHisto, timeRange, componentName, subkeyType, keyRankDict) - if flag: # This type meets the threshold criteria - if self.cleanOutEmptyValues(parmHisto, timeRange, componentName, "DISCRETE"): - # Don't save empty values - #print "Ignoring", subkeyType - continue - try: - orderIndex = keyOrder.index(subkeyType) - if highestOrderIndex is None or orderIndex > highestOrderIndex: - highestOrderIndex = orderIndex - mostSignificantSubkey = subkeyType - #print "Found higher significance key =", subkeyType - except: - pass - else: - #print "didn't make the cut", rank, subkeyType - pass - - #print "mostSignificantSubkey =", mostSignificantSubkey - return mostSignificantSubkey - - def mostSignificantDiscrete_coveragePercentage_dict(self, parmHisto, timeRange, componentName, keyStr): - """ Return the required coverage percentage for the given key which will be - compared to its "rank" i.e. the percentage of areal coverage over the time period. - """ - return { - "WindThreat": 5, - "FloodingRainThreat": 5, - "StormSurgeThreat": 5, - "TornadoThreat": 5, - } - - def mostSignificantDiscrete_keyOrder_dict(self, parmHisto, timeRange, componentName): - """ Returns a list of keys from least to most significant for a discrete type (componentName). """ - threatKeyOrder = [None, "None", "Elevated", "Mod", "High", "Extreme"] - return { - "WindThreat": threatKeyOrder, - "FloodingRainThreat": threatKeyOrder, - "StormSurgeThreat": threatKeyOrder, - "TornadoThreat": threatKeyOrder, - } - - - ######################################## - ## UTILITIES - - - def determineGridWeight(self, histSample, timeRange): - # Returns the ratio: histSample overlap duration / timeRange duration - validTime = TimeRange.TimeRange(histSample.validTime()) - if validTime.contains(timeRange): - gridWeight = 1.0 - # Determine time histSample intersects timeRange - else: - intersect = validTime.intersection(timeRange).duration() - try: - gridWeight = float(intersect)/timeRange.duration() - except: - gridWeight = 0.0 - return gridWeight - - def createStats(self, parmHisto, timeRange, componentName, args, primaryMethod): - # Call appropriate methods to produce statistics based on args which tell us - # how to report the statistics with respect to the time range. - # - if args is None: - exec "result = " + primaryMethod - return result - period = args[0] - if period == 0: - subRanges = self.getGridTimeRanges(parmHisto, timeRange) - else: - subRanges = self.divideRange(timeRange, period) - statsByRange = [] - for subRange in subRanges: - timeRange = subRange - exec "result = " + primaryMethod - # Handle no data - # If a subRange has no data continue - if result is None: - continue - statsByRange.append((result, subRange)) - return statsByRange - - def temporalCoverage_flag(self, parmHisto, timeRange, componentName, - histSample): - # Return 1 if the histSample time range is completely included in the timeRange - # OR the histSample time range sufficiently covers the timeRange - # i.e. meets BOTH the temporalCoverage_percentage and temporalCoverage_hours - # requirements. - - # Sub-methods: - # temporalCoverage_dict - # temporalCoverage_percentage - # temporalCoverage_hours - # temporalCoverage_hours_dict - # - - # njensen: I changed this to act directly on the java time ranges since they aren't - # part of the return value and it's faster to skip creating python TimeRanges - javaValidTime = histSample.validTime() - javaTimeRange = timeRange.toJavaObj() - compositeNameUI = parmHisto.getCompositeNameUI() - - # Is the histSample time range completely included in the timeRange? - #if timeRange.contains(validTime): - if javaTimeRange.contains(javaValidTime): - result = 1 - # Look at intersection of histSample and timeRange - else: - covDict = self.temporalCoverage_dict(parmHisto, timeRange, componentName) - if compositeNameUI in covDict.keys(): - percentage = covDict[compositeNameUI] - else: - percentage = self.temporalCoverage_percentage( - parmHisto, timeRange, componentName) - hoursDict = self.temporalCoverage_hours_dict( - parmHisto, timeRange, componentName) - if compositeNameUI in hoursDict.keys(): - coverageHours = hoursDict[compositeNameUI] - else: - coverageHours = self.temporalCoverage_hours( - parmHisto, timeRange, componentName) - #intersect = javaValidTime.intersection(javaTimeRange).getDuration() - intersect = FormatterUtil.getTimeRangeIntersectionDuration(javaValidTime, javaTimeRange) - # The intersection should be at least the percentage of the timeRange - # AND at least the number of coverageHours - fullPeriod = javaTimeRange.getDuration() - try: - if fullPeriod > 0: - percentIn = float(intersect)/fullPeriod - else: - percentIn = 0.0 - if percentIn > 0 and percentIn >= percentage/100.0: - result = 1 - else: # saying no - not enough is inside timeRange" - result = 0 - except: # saying no - could not figure percentIn" - result = 0 - # If temporal coverage percentage requirement met, - # check temporal coverage hours requirement - if result == 1: - intersectHours = intersect/3600 - trHours = fullPeriod/3600 - if intersectHours >= coverageHours: - result = 1 - elif coverageHours >= trHours and intersectHours == trHours: - result = 1 - else: - result = 0 - return result - - def getAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the cummulative sum over the given time period" - minVal, maxVal, sumVal = parmHisto.minMaxSum() - return sumVal - - def getAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the cummulative min/max over the given time period" - minVal, maxVal, sumVal = parmHisto.minMaxSum() - return minVal, maxVal - - def getModAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the moderated cummulative sum over the given time period" - minLimit, maxLimit = self.getModeratedLimits(parmHisto, timeRange, componentName) - minVal, maxVal, sumVal = parmHisto.moderatedMinMaxSum(minLimit, maxLimit) - return sumVal - - def getModAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the modereted cummulative min/max over the given time period" - minLimit, maxLimit = self.getModeratedLimits(parmHisto, timeRange, componentName) - minVal, maxVal, sumVal = parmHisto.moderatedMinMaxSum(minLimit, maxLimit) - return minVal, maxVal - - def getStdDevAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the standard deviation sum over the given time period" - minLimit, maxLimit = self.getStdDevLimits(parmHisto, timeRange, componentName) - minVal, maxVal, sumVal = parmHisto.stdDevMinMaxSum(minLimit, maxLimit) - return sumVal - - def getStdDevAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): - "Return the standard deviation min/max over the given time period" - minLimit, maxLimit = self.getStdDevLimits(parmHisto, timeRange, componentName) - minVal, maxVal, sumVal = parmHisto.stdDevMinMaxSum(minLimit, maxLimit) - return minVal, maxVal - - def getAverage(self, dataType, parmHisto, timeRange, componentName, firstOnly = 0): - "Return the time weighted average values over the given time period" - totValue = 0.0 - totWeight = 0.0 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - - avg = histSample.average(True) - - # njensen: faster to do this without wrapping java objects - validTime = histSample.validTime() - weight = FormatterUtil.getTimeRangeIntersectionDuration(validTime, timeRange.toJavaObj()) - - if dataType == self.SCALAR(): - value = avg.scalar() - elif dataType == self.VECTOR(): - value = avg.magnitude() - - # sum weighted averages - totValue = totValue + weight * value - totWeight = totWeight + weight - - if firstOnly == 1: - break - - if totWeight > 0.0: - result = totValue / totWeight - else: - return None - - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return result, dir - - return result - - def getMinMax(self, dataType, parmHisto, timeRange, componentName, - firstOnly = 0): - "Return the minimum and maximum values over the given time period" - firstTime = 1 - minValue = 0.0 - maxValue = 0.0 - minResult = 0.0 - maxResult = 0.0 - noData = 1 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - min = histSample.absoluteMin() - max = histSample.absoluteMax() - if dataType == self.SCALAR(): - minValue = min.scalar() - maxValue = max.scalar() - elif dataType == self.VECTOR() or dataType == self.MAGNITUDE(): - minValue = min.magnitude() - maxValue = max.magnitude() - if firstTime == 1: - firstTime = 0 - minResult = minValue - maxResult = maxValue - else: - if minValue < minResult: - minResult = minValue - if maxValue > maxResult: - maxResult = maxValue - if firstOnly == 1: - break - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, - timeRange, componentName) - return (minResult, maxResult), dir - - return minResult, maxResult - - def getStdDevAvg(self, dataType, parmHisto, timeRange, componentName, - firstOnly = 0): - "Return the time wieghted average values over the given time period" - # get the stdDev limits from the stdDev dictionary - minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) - totValue = 0.0 - totWeight = 0.0 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - - # In AWIPS1, stdDevAvg utilized a default value of True for - # separateMagDir argument - avg = histSample.stdDevAvg(minStd, maxStd, True) - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - - if dataType == self.SCALAR(): - value = avg.scalar() - elif dataType == self.VECTOR(): - value = avg.magnitude() - - # sum weighted averages - totValue = totValue + weight * value - totWeight = totWeight + weight - - if firstOnly == 1: - break - - if totWeight > 0.0: - result = totValue / totWeight - else: - return None - - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return result, dir - - return result - - def getStdDevMinMax(self, dataType, parmHisto, timeRange, componentName, - firstOnly = 0): - "Return the minimum and maximum values over the given time period" - firstTime = 1 - minValue = 0.0 - maxValue = 0.0 - minResult = 0.0 - maxResult = 0.0 - noData = 1 - # get the stdDev limits from the stdDev dictionary - minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - min = histSample.stdDevMin(minStd) - max = histSample.stdDevMax(maxStd) - if dataType == self.SCALAR(): - minValue = min.scalar() - maxValue = max.scalar() - elif dataType == self.VECTOR(): - minValue = min.magnitude() - maxValue = max.magnitude() - if firstTime == 1: - firstTime = 0 - minResult = minValue - maxResult = maxValue - else: - if minValue < minResult: - minResult = minValue - if maxValue > maxResult: - maxResult = maxValue - if firstOnly == 1: - break - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, - timeRange, componentName) - return (minResult, maxResult), dir - - return minResult, maxResult - - def getModeratedAvg(self, dataType, parmHisto, timeRange, componentName, - firstOnly = 0): - "Return the time weighted average values over the given time period" - # get the stdDev limits from the stdDev dictionary - minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) - totValue = 0.0 - totWeight = 0.0 - noData = 1 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - avg = histSample.moderatedAverage(minMod, maxMod, True) - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - - if dataType == self.SCALAR(): - value = avg.scalar() - elif dataType == self.VECTOR(): - value = avg.magnitude() - - # sum weighted averages - totValue = totValue + weight * value - totWeight = totWeight + weight - - if firstOnly == 1: - break - - if noData == 1: - return None - if totWeight > 0.0: - result = totValue / totWeight - else: - return None - - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return result, dir - - return result - - def getModeratedMinMax(self, dataType, parmHisto, timeRange, componentName, - firstOnly = 0): - "Return the minimum and maximum values over the given time period" - firstTime = 1 - minValue = 0.0 - maxValue = 0.0 - minResult = 0.0 - maxResult = 0.0 - noData = 1 - # get the stdDev limits from the stdDev dictionary - minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - min = histSample.moderatedMin(minMod) - max = histSample.moderatedMax(maxMod) - if dataType == self.SCALAR(): - minValue = min.scalar() - maxValue = max.scalar() - elif dataType == self.VECTOR(): - minValue = min.magnitude() - maxValue = max.magnitude() - if firstTime == 1: - firstTime = 0 - minResult = minValue - maxResult = maxValue - else: - if minValue < minResult: - minResult = minValue - if maxValue > maxResult: - maxResult = maxValue - if firstOnly == 1: - break - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return (minResult, maxResult), dir - - return minResult, maxResult - - def getMaxAvg(self, dataType, parmHisto, timeRange, componentName): - "Return the maximum average value over the given time period" - firstTime = 1 - maxValue = 0.0 - maxResult = 0.0 - noData = 1 - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - maxV = histSample.average() - if dataType == self.SCALAR(): - maxValue = maxV.scalar() - elif dataType == self.VECTOR(): - maxValue = maxV.magnitude() - if firstTime == 1: - firstTime = 0 - maxResult = maxValue - else: - if maxValue > maxResult: - maxResult = maxValue - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return (maxResult, dir) - - return maxResult - - def getStdDevMaxAvg(self, dataType, parmHisto, timeRange, componentName): - "Return the maximum average value filtering by standard deviation" - firstTime = 1 - maxValue = 0.0 - maxResult = 0.0 - noData = 1 - # get the stdDev limits from the stdDev dictionary - minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - maxV = histSample.stdDevAvg(minStd, maxStd, True) - if dataType == self.SCALAR(): - maxValue = maxV.scalar() - elif dataType == self.VECTOR(): - maxValue = maxV.magnitude() - if firstTime == 1: - firstTime = 0 - maxResult = maxValue - else: - if maxValue > maxResult: - maxResult = maxValue - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return (maxResult, dir) - - return maxResult - - def getModeratedMaxAvg(self, dataType, parmHisto, timeRange, componentName): - "Return the maximum average value filtering by percentage" - firstTime = 1 - maxValue = 0.0 - maxResult = 0.0 - noData = 1 - # get the moderated limits from the stdDev dictionary - minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - if histSample.numOfPoints() == 0: - return None - noData = 0 - - maxV = histSample.moderatedAverage(minMod, maxMod, True) - if dataType == self.SCALAR(): - maxValue = maxV.scalar() - elif dataType == self.VECTOR(): - maxValue = maxV.magnitude() - if firstTime == 1: - firstTime = 0 - maxResult = maxValue - else: - if maxValue > maxResult: - maxResult = maxValue - - if noData == 1: - return None - if dataType == self.VECTOR(): - dir = self.getDominantDirection(dataType, parmHisto, timeRange, - componentName) - return (maxResult, dir) - - return maxResult - - def getVectorAvg(self, histPairs): - # Temporary method to emulate new HistSample.average(0) - # to be supplied in next release - uSum = 0.0 - vSum = 0.0 - totCount = 0 - for histPair in histPairs: - count = histPair.count() - totCount = totCount + count - val = histPair.value() - uw, vw = self.MagDirToUV(val.magnitude(), val.direction()) - uSum = uSum + uw * count - vSum = vSum + vw * count - - # calculate the average wind vector - if totCount > 0: - u = uSum / float(totCount) - v = vSum / float(totCount) - mag, dir = self.UVToMagDir(u, v) - mag = int(mag + 0.5) - dir = int(dir + 0.5) - return HistValue(float(mag), float(dir)) - else: - return HistValue() - - def extractMinMax(self, minMaxList, minOrMax, dataType=None): - # returns the min or max value in the list depending on minOrMax - # minMaxList is a list returned from createStats - # minOrMax can have the values "Min" or "Max" and nothing else - - if dataType == self.VECTOR(): - return self.extractVectorMinMax(minMaxList, minOrMax) - - # sanity checks - must be a list or tuple - if type(minMaxList) != types.ListType and \ - type(minMaxList) != types.TupleType: - return None - - # minOrMax must be "Min" or "Max" - if not (minOrMax == "Min" or minOrMax == "Max"): - return None - - if type(minMaxList) is types.TupleType: - if minOrMax == "Min": - return minMaxList[0] # return min value - elif minOrMax == "Max": - return minMaxList[1] # return max value - else: - print "extractMinMax error - Bad min/max string:", minOrMax - print "Must be: 'Min' or 'Max'. " - return None - - # check for empty list - if len(minMaxList) <= 0: - return None - - newList = [] - # loop through and find the min and max - for (vMin, vMax), timeRange in minMaxList: - if minOrMax == "Min": - value = vMin - else: - value = vMax - newList.append((value, timeRange)) - return newList - - def extractVectorMinMax(self, minMaxList, minOrMax): - # returns the min or max value in the list depending on minOrMax - # minMaxList is a list returned from createStats - # minOrMax can have the values "Min" or "Max" and nothing else - - # sanity checks - must be a list or tuple - if type(minMaxList) != types.ListType and \ - type(minMaxList) != types.TupleType: - return None - - # minOrMax must be "Min" or "Max" - if not (minOrMax == "Min" or minOrMax == "Max"): - return None - - if type(minMaxList) is types.TupleType: - (minMag, maxMag), dir = minMaxList - if minOrMax == "Min": - mag = minMag # return min value - elif minOrMax == "Max": - mag = maxMag # return max value - else: - print "extractMinMax error - Bad min/max string:", minOrMax - print "Must be: 'Min' or 'Max'. " - return None - return (mag, dir) - - # check for empty list - if len(minMaxList) <= 0: - return None - - newList = [] - # loop through and find the min and max - for ((vMin, vMax), dir), timeRange in minMaxList: - if minOrMax == "Min": - value = vMin - else: - value = vMax - newList.append(((value, dir), timeRange)) - return newList - - def getDominantDirection(self, dataType, parmHisto, timeRange, componentName): - # returns the dominant direction according to "vectorDirection_algorithm" - # which can be "Average" or "MostFrequent" - - if not dataType == self.VECTOR(): - return None - if self.vectorDirection_algorithm(parmHisto, timeRange, componentName) == "Average": - return self.getAverageDirection(parmHisto, timeRange, componentName) - else: #Most Frequent - return self.getMostFrequentDirection(parmHisto, timeRange, componentName) - - def getAverageDirection(self, parmHisto, timeRange, componentName): - # returns the dominant direction calculated by assuming a mag of 1 always - uSum = 0.0 - vSum = 0.0 - totCount = 0 - weight = 0 - totWeight = 0 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, histSample) == 0: - continue - # sum u and v components assigning a magnitude one 1 always - histPairs = histSample.histogram() - for histPair in histPairs: - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - totWeight = totWeight + weight - count = histPair.count() - totCount = totCount + count - uw, vw = self.MagDirToUV(1.0, histPair.value().direction()) - uSum = uSum + (uw * count) * weight - vSum = vSum + (vw * count) * weight - - # calculate the average wind vector - if totCount > 0: - u = uSum / (float(totCount) * totWeight) - v = vSum / (float(totCount) * totWeight) - mag, dir = self.UVToMagDir(u, v) - return dir - else: - return None - - def getMostFrequentDirection(self, parmHisto, timeRange, componentName): - # returns the most frequent direction binned to 8-point numerical direction - binDict = {} - totWeight = 0.0 - #print "\nGetting most frequent", timeRange - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, histSample) == 0: - continue - numOfPoints = histSample.numOfPoints() - if numOfPoints == 0: - return None - histPairs = histSample.histogram() - for histPair in histPairs: - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - weight = weight/float(timeRange.duration()) * 100.0 - totWeight += weight - count = float(histPair.count()) - binnedDir = self.binDir(histPair.value().direction()) - #print "dir, binnedDir", histPair.value().direction(), binnedDir - percent = count/numOfPoints * weight - if binDict.has_key(binnedDir): - binDict[binnedDir] += percent - else: - binDict[binnedDir] = percent - - if totWeight == 0.0: - return None - - # Pick the most frequent direction - maxFreq = 0 - mostFreqDir = None - for dir in binDict.keys(): - freq = binDict[dir] - #print "dir, freq", dir, freq - if freq > maxFreq: - maxFreq = freq - mostFreqDir = dir - #print "returning", mostFreqDir - return mostFreqDir - - def binDir(self, dir): - # Return the "bin" direction value for the given direction - for textDir, low, high in self.dirList(): - if dir >= low and dir < high: - # Handle N - if textDir == "N": - return 0 - else: - return int(low+high)/2.0 - - - def splitRange(self, timeRange, numPeriods=2): - "Split the timeRange into the given number of periods and return the resulting list of time ranges" - - periods = [] - duration = (timeRange.endTime()-timeRange.startTime())/numPeriods - startTime = timeRange.startTime() - for i in range(numPeriods): - endTime = startTime + duration - newRange = TimeRange.TimeRange(startTime, endTime) - periods.append(newRange) - startTime = endTime - return periods - - def getGridTimeRanges(self, parmHisto, timeRange): - # Return the set of timeRanges that overlap the specified timeRange - # If a histSample partially overlaps, trim the timeRange to the - # specified timeRange's startTime() or endTime() - subRanges = [] - for histSample in parmHisto.histoSamples(): - tr = TimeRange.TimeRange(histSample.validTime()) # get the histSample timeRange - overlap = timeRange.intersection(tr).duration() # calc overlap - if overlap == 0: # no overlap -> skip to next grid - continue - if overlap == tr.duration(): # the whole grid is included - subRanges.append(tr) - elif timeRange.startTime() > tr.startTime(): - newTR = TimeRange.TimeRange(timeRange.startTime(), tr.endTime()) - subRanges.append(newTR) - elif timeRange.endTime() < tr.endTime(): - newTR = TimeRange.TimeRange(tr.startTime(), timeRange.endTime()) - subRanges.append(newTR) - - return subRanges - - def getMedianHistPair(self, dataType, parmHisto, timeRange, componentName): - # Return the median HistPair over the timeRange - # Note: There could be multiple grids (histSamples) in this timeRange - # over which we are sampling. - # - # we can't figure a median if there are no samples - # - if len(parmHisto.histoSamples()) == 0: - return None - # - # we can only figure the median based on the scalar value, - # or, for vectors, the magnitude or direction. Other types - # are invalid, and we have to return None. - # - if ((dataType!=self.SCALAR()) and (dataType!=self.MAGNITUDE()) - and (dataType!=self.DIRECTION())): - return None - # - # Get the samples inside the time range, keeping track - # of the values along the way (to sort later). Since - # there may be several grids, each with the same values - # that cross the desired timeRange, we need to add to - # the saved histogram counts when we encounter such - # values. Make a key with consistent floating point - # numbers so that the sorting works right later. - # - - totalCount=0 - pairDict = {} - compositeNameUI = parmHisto.getCompositeNameUI() - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag( - parmHisto, timeRange, componentName, histSample) == 0: - continue - # calc the time weight - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - for histPair in histSample.histogram(): - tempCount=histPair.count() - tempValue=histPair.value() - tempKey=0.0 - if dataType == self.SCALAR(): - tempKey = tempValue.scalar() - elif dataType == self.MAGNITUDE(): - tempKey = tempValue.magnitude() - elif dataType == self.DIRECTION(): - tempKey = tempValue.direction() - valuestring="%020.10f" % float(tempKey) - totalCount = totalCount + tempCount * weight - if pairDict.has_key(valuestring): - pairDict[valuestring].incrementCount(int(tempCount * weight)) - else: - # njensen: I added the clone(), because otherwise we are incrementing - # the original histpair reference, which corrupts the statistics when - # the same method is called against a different subTimeRange from - # createStats - pairDict[valuestring] = histPair.clone() - pairDict[valuestring].incrementCount(int((tempCount * weight) - 1)) - # - # if no samples landed within the timeRange then we have - # to return a median of None - # - if totalCount == 0: - return None - # - # now we know the total number of pairs in the timeRange - # so we figure out the middle pair number and then - # go through the pairs in numerical order until we get - # to that count value - # - medianNumber=int(totalCount/2.0) - odd = 0 - if medianNumber%2 == 1: - medianNumber == medianNumber + 1 - odd = 1 - count=0 - names=pairDict.keys() - names.sort() - for valueStr in names: - addCount=pairDict[valueStr].count() - if ((count+addCount)>=medianNumber): - if odd == 1: - return pairDict[valueStr] - elif ((count+addCount)) == medianNumber: - # need to take mean of this value and the next - histPair1 = pairDict[valueStr] - index1 = names.index(valueStr) - if index1 < len(names)-1: - valueStr2 = names[index1+1] - histPair2 = pairDict[valueStr2] - return self.getHistPairMean(dataType, histPair1, histPair2) - else: - return histPair1 - else: - return pairDict[valueStr] - count+=addCount - return None - - def getHistPairMean(self, dataType, histPair1, histPair2): - # Return a HistPair that represents the mean of the two histPair values - # for the given dataType which can be "SCALAR", "MAGNITUDE", "DIRECTION" - if dataType == self.SCALAR(): - val1 = histPair1.value().scalar() - val2 = histPair2.value().scalar() - avg = float(val1 + val2)/2.0 - value = HistValue(avg) - return HistPair(value) - - elif dataType == self.MAGNITUDE(): - val1 = histPair1.value().magnitude() - val2 = histPair2.value().magnitude() - avg = float(val1 + val2)/2.0 - value = HistValue(avg, 0.0) - return HistPair(value) - - else: #dataType == self.DIRECTION(): - dir1 = histPair1.value().direction() - dir2 = histPair2.value().direction() - u1, v1 = self.MagDirToUV(1.0, dir1) - u2, v2 = self.MagDirToUV(1.0, dir2) - u = (u1 + u2)/2 - v = (v1 + v2)/2 - mag, dir = self.UVToMagDir(u,v) - value = HistValue(mag, dir) - return HistPair(value) - - def getModeHistPair(self, dataType, parmHisto, timeRange, componentName): - # Return the most common HistPair over the timeRange - # Note: There could be multiple grids (histSamples) in this timeRange - # over which we are sampling. - - if len(parmHisto.histoSamples()) == 0: - return None - maxCount = 0 - modePair = None - compositeNameUI = parmHisto.getCompositeNameUI() - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - - # calc the time weight - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - for histPair in histSample.histogram(): - if (histPair.count() * weight) > maxCount: - maxCount = histPair.count() * weight - modePair = histPair - - return modePair - - def getMedian(self, dataType, parmHisto, timeRange, componentName): - # Return a range around the median - # For vector also return an average direction over that range. - - if len(parmHisto.histoSamples()) == 0: - return None - - if dataType == self.VECTOR(): - # For VECTOR, base the median on the magnitude - pairType = self.MAGNITUDE() - else: - pairType = dataType - - # Determine the median - medianPair = self.getMedianHistPair(pairType, parmHisto, timeRange, componentName) - if medianPair is None: - return None - if dataType == self.VECTOR(): - return (medianPair.value().magnitude(), medianPair.value().direction()) - else: - return medianPair.value().scalar() - - def getMedianRange(self, dataType, parmHisto, timeRange, componentName): - # Return a range around the median - # For vector also return an average direction over that range. - - if len(parmHisto.histoSamples()) == 0: - return None - - if dataType == self.VECTOR(): - # For VECTOR, base the median on the magnitude - pairType = self.MAGNITUDE() - else: - pairType = dataType - - # Determine the median - medianPair = self.getMedianHistPair(pairType, parmHisto, timeRange, componentName) - if medianPair is None: - return None -# print "\nGetting Median Range" - return self.getRange(dataType, medianPair, parmHisto, timeRange, componentName, "Median") - - def getMode(self, dataType, parmHisto, timeRange, componentName): - # Return a range around the median - # For vector also return an average direction over that range. - - if len(parmHisto.histoSamples()) == 0: - return None - - if dataType == self.VECTOR(): - # For VECTOR, base the median on the magnitude - pairType = self.MAGNITUDE() - else: - pairType = dataType - - # Determine the median - modePair = self.getModeHistPair(pairType, parmHisto, timeRange, componentName) - if modePair is None: - return None -# print "\nGetting Median Range" - if dataType == self.VECTOR(): - return modePair.value().magnitude(), modePair.value().direction() - else: - return modePair.value().scalar() - - def getModeRange(self, dataType, parmHisto, timeRange, componentName): - # Return a range around the mode. - # For vector also return an average direction over that range. - - if len(parmHisto.histoSamples()) == 0: - return None - - if dataType == self.VECTOR(): - pairType = self.MAGNITUDE() - else: - pairType = dataType - - # Determine the median - modePair = self.getModeHistPair(pairType, parmHisto, timeRange, componentName) - if modePair is None: - return None - return self.getRange(dataType, modePair, parmHisto, timeRange, componentName, "Mode") - - def getMaxMode(self, dataType, parmHisto, timeRange, componentName): - # Return the maximum mode over all grids - - if len(parmHisto.histoSamples()) == 0: - return None - - compositeNameUI = parmHisto.getCompositeNameUI() - incDict = self.maxMode_increment_dict(parmHisto, timeRange, componentName) - if incDict.has_key(compositeNameUI): - binRes = incDict[compositeNameUI] - else: - binRes = 10 # default value - - maxMode = -99999 - for histSample in parmHisto.histoSamples(): - # Ignore samples that are less than the temporal threshold - if self.temporalCoverage_flag( - parmHisto, timeRange, componentName, histSample) == 0: - continue - mode = histSample.mostCommonValueBinned(float(binRes)).scalar() - if mode > maxMode: - maxMode = mode - - return maxMode - - def getRange(self, dataType, basePair, parmHisto, timeRange, - componentName, rangeBase="Median"): - # Return a range around the basePair. - # For vector also return an average direction over that range. - - compositeNameUI = parmHisto.getCompositeNameUI() - # Determine deviation - deviation = self.getDeviation(dataType, compositeNameUI, basePair) - # Take pairs that are within deviation of basePair - # and determine min, max and number of points covered by range - if dataType == self.SCALAR(): - scalarflag = 1 - baseValue = basePair.value().scalar() - else: - scalarflag = 0 - baseValue = basePair.value().magnitude() - - min = baseValue - max = baseValue - pairs = [] - samplesInRange = 0 - totalCount = 0 - - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - samplesInRange = samplesInRange + 1 - for histPair in histSample.histogram(): - if scalarflag: - histValue = histPair.value().scalar() - else: - histValue = histPair.value().magnitude() - #print "histvalue, baseValue, deviation", \ - # histValue, baseValue, deviation - if histValue >= baseValue - deviation and \ - histValue <= baseValue + deviation: - pairs.append(histPair) - if histValue < min: - min = histValue - if histValue > max: - max = histValue - totalCount = totalCount + histPair.count() - - # If vector, find average direction for pairs in range - if dataType == self.VECTOR(): - avgValue = self.getVectorAvg(pairs) - direction = avgValue.direction() - return ((min, max), direction) - else: - return (min, max) - - def getDeviation(self, dataType, compositeNameUI, histPair): - # Returns a deviation around the median to include in range - if dataType == self.VECTOR(): - mag = histPair.value().magnitude() - if mag < 15: - return 3 - elif mag < 30: - return 5 - else: - return 8 - else: - return 10 - - def UVToMagDir(self, u, v): - # Converts u, v to magnitude, direction - RAD_TO_DEG = 57.296083 - speed = sqrt(u * u + v * v) - dir = atan2(u, v) * RAD_TO_DEG - while dir < 0.0: - dir = dir + 360.0 - while dir >= 360.0: - dir = dir - 360.0 - #print "Speed, dir ", speed, dir - return (speed, dir) - - def MagDirToUV(self, mag, dir): - #Converts magnitude, direction to u, v - DEG_TO_RAD = 0.017453292 - uw = sin(dir * DEG_TO_RAD) * mag - vw = cos(dir * DEG_TO_RAD) * mag - return (uw, vw) - - def convertAnalysisList(self, analysisList): - # Replace text string methods with SampleAnalysis methods - newList = [] - for entry in analysisList: - if len(entry) == 2: - element, method = entry - if type(method) == types.StringType: - exec "method = self."+method - newList.append((element,method)) - if len(entry) == 3: - element, method, args = entry - if type(method) == types.StringType: - exec "method = self."+method - newList.append((element,method, args)) - return newList - - def bin_dict(self, parmHisto, timeRange, componentName): - # Bins for binnedPercent. Bins are inclusive. - return { - "Sky": [(0,89),(90, 100)], - "PoP": [(0,4), (5,14), (15,24), (25,34), (35,44), (45,54), - (55,64), (65,74), (75,84), (85,94), (95,100)], - "LAL": [(1,1), (2,2), (3,3), (4,4), (5,5), (6,6)], - } - - def getBinnedPercent(self, dataType, parmHisto, timeRange, componentName, firstOnly = 0): - "Returns a list of tuples representing bins and corresponding percentages of values in each bin" - binsDict = self.bin_dict(parmHisto, timeRange, componentName) - try: - bins = binsDict[parmHisto.getCompositeNameUI()] - except: - return None - - # Returns a list of tuples - # Each tuple is of the form: - # (lowBin_value, highBin_value, percent) - # lowBin_value and highBin_value are the inclusive values of the bin - # percent is the percentage of data values in that bin - - percents = [] - for bin in bins: - percents.append(0.0) - numBins = len(bins) - - totWeight = 0.0 - for histSample in parmHisto.histoSamples(): - if self.temporalCoverage_flag(parmHisto, timeRange, componentName, - histSample) == 0: - continue - # return None if no histSample pairs - numOfPoints = histSample.numOfPoints() - if numOfPoints == 0: - return None - - validTime = TimeRange.TimeRange(histSample.validTime()) - weight = validTime.intersection(timeRange).duration() - weight = weight/float(timeRange.duration()) * 100.0 - totWeight = totWeight + weight - - for histPair in histSample.histogram(): - if dataType == self.SCALAR(): - value = histPair.value().scalar() - elif dataType == self.VECTOR(): - value = histPair.value().magnitude() - count = float(histPair.count()) - percent = count/numOfPoints * weight - - # Find the bin for this histPair value - for i in range(numBins): - low,high = bins[i] - if value >= low and value <= high: - # add to percentage for this bin - percents[i] += percent - - if totWeight == 0.0: - return None - - # Glue the bins and the percents together - newBins = [] - for i in range(numBins): - low, high = bins[i] - newBins.append((low, high, percents[i])) - #print "returning bins", newBins, timeRange - return newBins +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# SampleAnalysis.py +# Class for producing summary statistics from Sampler data. +# Typically used for Text Product generation. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import string, sys, types + +import logging +import TimeRange, AbsTime, WeatherSubKey, JUtil +from math import * +from com.raytheon.viz.gfe.sampler import HistValue, HistPair +from com.raytheon.viz.gfe.textformatter import FormatterUtil + + ## For complete documentation on the analysis methods available in this class, + ## refer to the Text Product User Guide. + + ## Utility Methods: + + ## The following methods return an integer that indicated the expected + ## return type.... + ## SCALAR + ## MAGNITUDE + ## DIRECTION + ## VECTOR + ## WEATHER + ## + ## getModeratedLimits + ## returns the min and max percentages allowed for all moderated methods + ## getStdDevLimits + ## returns the min and max standard deviation limits for all stdDev methods + ## getAccumSum + ## returns the accumulated sum for the specified time period + ## getAccumMinMax + ## return the min and max for the specified time period + ## getAverage + ## returns the absolute average + ## getMinMax + ## returns the absolute minimum and maximum + ## getStdDevAvg + ## returns the average after filtering data based on standard deviation + ## getStdDevMinMax + ## returns the min and max values after filtering based in standard deviation + ## getModeratedAvg + ## returns the average after filtering based on percentage + ## getModeratedMinMax + ## returns the min and max after filtering based on percentage + ## getVectorAvg + ## returns the absolute vector average with no filtering + ## getDominantDirection + ## returns either the Average or Most Frequent direction based on: + ## vectorDirection_algorithm + ## getAverageDirection + ## returns the average direction over the specified time period + ## getMostFrequentDirection + ## returns the most frequent direction over the specified time period + ## temporalCoverage_flag + ## returns 1 if the specified grid sufficiently overlaps the sample time period + ## getDominantWx + ## returns the dominant weather over the sample period + ## getDominantDiscreteValue + ## returns the dominant discrete values over the sample period + ## getDominant -- handles both getDominantWx and getDominantDiscreteValue + ## getSubkey_percentages -- gather all the Weather or Discrete SubKeys and percentages + ## dirList + ## converts a numerical direction into a string (e.g., N, SE) + ## determineGridWeight + ## returns the time wieght for a particular grid ( 1.0 = full weight) + ## createStats + ## reports statistics based on the method specified + ## splitRange(timeRange, numPeriods) + ## splits a timerange into the specified number of periods and returns a + ## timeRange list + ## getGridTimeRanges + ## returns the list of timeRanges after splitting along sample time periods + ## divideRange(timeRange, hours) + ## splits a timeRange into sub periods each with the duration specified + ## getMode + ## returns a range around the median indicating the most frequent values + ## getMedian + ## returns median value over given timeRange + ## getMedianRange + ## returns 2-value range chosen around median, uses getRange + ## getMedianHistPair + ## returns median histPair over given timeRange + ## getHistPairMean + ## given two HistPairs returns a HistPair representing the mean of the two + ## getModeRange + ## range chosen around mode, uses getRange + ## getModeHistPair + ## returns most common HistPair over given timeRange + ## getRange + ## range chosen around given histPair + ## returns: scalar: (min, max) + ## vector: (minMag, maxMag, avgDir) + ## getDeviation + ## returns a deviation around median to include in range + ## getBinnedPercent + ## returns a list of tuples representing "bins" and corresponding + ## percentages of values in each bin + ## + ## Conversion methods + ## UVToMagDir + ## MagDirToUV + ## convertAnalysisList + ## Breakdown of Sampler data: + ## HistoSampler : Parms, TimeRanges, EditAreas + ## Contains SeqOf + ## ParmHisto: Parm, TimeRange, EditArea + ## Contains SeqOf : (one for each grid overlapping timeRange) + ## HistSample is a histogram for a Parm(implicit), Grid, Area + ## Contains SeqOf + ## HistPair : Parm(implicit), Grid(implicit), Area(implicit) + ## count, HistValue (value) + ## count = how many times that value occurred within the grid + ## HistValue value: scalar, Vector (magnitude, direction), weather + +import CommonUtils +import logging + +class SampleAnalysis(CommonUtils.CommonUtils): + def __init__(self): + CommonUtils.CommonUtils.__init__(self) + self.log = logging.getLogger("FormatterRunner.SampleAnalysis.SampleAnalysis") + + ### CONSTANTS -- Do not override + def SCALAR(self): + return 0 + def MAGNITUDE(self): + return 1 + def DIRECTION(self): + return 2 + def VECTOR(self): + return 3 + def WEATHER(self): + return 4 + def DISCRETE(self): + return 5 + + ### GLOBAL THRESHOLDS AND VARIABLES + ### To override, override the associated method in your text product class. + # To be included in the analysis, a grid must either: + # 1. Be completely contained in the time range OR + # 2. Meet BOTH the temporalCoverage_percentage and temporalCoverage_hours + # requirements. + # The temporalCoverage_percentage is: + # The percentage of the TIMERANGE covered by the + # grid in order to include it in the analysis. + # The temporalCoverage_hours is: + # The required hours of overlap of a grid with the TIMERANGE + # in order to include it in the analysis. + # In addition, if the temporalCoverage_hours is greater than or equal to the + # TIMERANGE duration and the grid covers the entire TIMERANGE, + # it will be included. + def temporalCoverage_percentage(self, parmHisto, timeRange, componentName): + # This is the percentage of the TIMERANGE covered by the + # grid in order to include it in the analysis. + # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) + # Used by temporalCoverage_flag + return 20 + + def temporalCoverage_dict(self, parmHisto, timeRange, componentName): + # This is temporalCoverage percentage by weather element + # Used by temporalCoverage_flag + return { + "LAL": 0, + "MinRH": 0, + "MaxRH": 0, + "MinT": 1, + "MaxT": 1, + "Haines": 0, + "PoP" : 0, + "Hazards" : 0, + } + + def temporalCoverage_hours(self, parmHisto, timeRange, componentName): + # This is the required hours of overlap of a grid with the TIMERANGE + # in order to include it in the analysis. + # In addition, if the temporalCoverage_hours is greater than or equal to the + # TIMERANGE duration and the grid covers the entire TIMERANGE, + # it will be included. + # Temporal coverage hours default value + # (if not found in temporalCoverage_hours_dict) + # Used by temporalCoverage_flag + return 0 + + def temporalCoverage_hours_dict(self, parmHisto, timeRange, componentName): + # This is the temporalCoverage_hours specified per weather element. + # Used by temporalCoverage_flag + return { + #"MinRH": 0, + #"MaxRH": 0, + "MinT": 5, + "MaxT": 5, + #"Haines":0, + #"PoP" : 0, + "pws34": 4, + "pws64": 4, + "pwsD34": 4, + "pwsN34": 4, + "pwsD64": 4, + "pwsN64": 4, + } + + def moderated_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. + return { + "T" : (10, 10), + "Wind": (0, 20), + "LAL": (10, 10), + "MinRH": (10, 10), + "MaxRH": (10, 10), + "MinT": (10, 10), + "MaxT": (10, 10), + "Haines": (10, 10), + "PoP" : (10, 10), + } + + def getModeratedLimits(self, parmHisto, timeRange, componentName): + compositeNameUI = parmHisto.getCompositeNameUI() + # get the stdDict min and max values + modMin = self.moderatedDefault(parmHisto, timeRange, componentName) + modMax = self.moderatedDefault(parmHisto, timeRange, componentName) + modDict = self.moderated_dict(parmHisto, timeRange, componentName) + if compositeNameUI in modDict: + modMin, modMax = modDict[compositeNameUI] + + return modMin, modMax + + def moderatedDefault(self, parmHisto, timeRange, componentName): + "Value used by moderated functions if not explicitly defined in moderated_dict" + return 5 + + def maxMode_increment_dict(self, parmHisto, timeRange, componentName): + return { + "PoP" : 10, + } + + def stdDev_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating stdDev stats. + # These tuples represent the (low, high) number of standard + # deviations. Any values falling outside this range will + # not be included in the calculated statistic. + return { + "LAL": (1.0, 1.0), + "MinRH": (1.0, 1.0), + "MaxRH": (1.0, 1.0), + "MinT": (1.0, 1.0), + "MaxT": (1.0, 1.0), + "Haines": (1.0, 1.0), + "PoP" : (1.0, 1.0), + "T" : (1.0, 1.0), + "Wind" : (1.0, 1.0), + } + + def getStdDevLimits(self, parmHisto, timeRange, componentName): + compositeNameUI = parmHisto.getCompositeNameUI() + # get the stdDict min and max values + stdDevDict = self.stdDev_dict(parmHisto, timeRange, componentName) + minStdDev = self.stdDevDefault(parmHisto, timeRange, componentName) + maxStdDev = self.stdDevDefault(parmHisto, timeRange, componentName) + if compositeNameUI in stdDevDict: + minStdDev, maxStdDev = stdDevDict[compositeNameUI] + + return minStdDev, maxStdDev + + def stdDevDefault(self, parmHisto, timeRange, componentName): + "Value used by all moderated functions if not explicitly defined in stdDev_dict" + return 1.0 + + def vectorDirection_algorithm(self, parmHisto, timeRange, componentName): + # Algorithm to use for computing vector direction for vector analysis methods. + # Can be "Average" or "MostFrequent" + return "Average" + + # Variables for converting Wind Direction from degrees to letters + def dirList(self): + dirSpan = 45 # 45 degrees per direction + base = 22.5 # start with N + return [ + ('N', 360-base, 361), + ('N', 0, base), + ('NE',base , base + 1*dirSpan), + ('E', base + 1*dirSpan, base + 2*dirSpan), + ('SE',base + 2*dirSpan, base + 3*dirSpan), + ('S', base + 3*dirSpan, base + 4*dirSpan), + ('SW',base + 4*dirSpan, base + 5*dirSpan), + ('W', base + 5*dirSpan, base + 6*dirSpan), + ('NW',base + 6*dirSpan, base + 7*dirSpan) + ] + + + ######################################## + ## SCALAR + + def avg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getAverage(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def minMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + results = self.createStats(parmHisto,timeRange, componentName, args, primaryMethod) + return results + + def minimum(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Min") + + def maximum(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Max") + + def accumMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def accumSum(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def moderatedAccumMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def moderatedAccumSum(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def stdDevAccumMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevAccumMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def stdDevAccumSum(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevAccumSum(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def median(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMedian(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def medianRange(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMedianRange(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def mode(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMode(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def modeRange(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeRange(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def maxMode(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMaxMode(self.SCALAR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def stdDevAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevAvg(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def stdDevMin(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Min") + + def stdDevMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Max") + + def stdDevMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def stdDevFirstAvg(self, parmHisto, timeRange, componentName, args=None): + return self.getStdDevAvg(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def stdDevFirstMinMax(self, parmHisto, timeRange, componentName, args=None): + return self.getStdDevMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def moderatedAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedAvg(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def moderatedMin(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Min") + + def moderatedMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractMinMax(result, "Max") + + def moderatedMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def binnedPercent(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getBinnedPercent(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def moderatedFirstAvg(self, parmHisto, timeRange, componentName, args=None): + return self.getModeratedAvg(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def moderatedFirstMinMax(self, parmHisto, timeRange, componentName, args=None): + return self.getModeratedMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def minMaxAvg(self, parmHisto, timeRange, componentName, args=None): + # Find Min and Max values + minMax = self.minMax( parmHisto, timeRange, componentName) + avg = self.avg(parmHisto, timeRange, componentName) + if minMax is None or avg is None: + return None + else: + min, max = minMax + return (min, max, avg) + + def minMaxSum(self, parmHisto, timeRange, componentName, args=None): + values = parmHisto.minMaxSum() + if values is None: + return None + else: + minV, maxV, sumV = values + return minV, maxV, sumV + + def maxAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def stdDevMaxAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def moderatedMaxAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMaxAvg(self.SCALAR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def firstAvg(self, parmHisto, timeRange, componentName, args=None): + return self.getAverage(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def firstMinMax(self, parmHisto, timeRange, componentName, args=None): + return self.getMinMax(self.SCALAR(), parmHisto, timeRange, componentName, 1) + + def hourlyTemp(self, parmHisto, timeRange, componentName, args=None): + "Create hourly temperature stats" + # Produces a list of hourly temperature values in tuples + # Each tuple has an average temperature value and + # its hour of occurrence + # Assumptions: + # If there is no data for an hour, None is + # given instead of a temp value + + if parmHisto.getSampleLen() == 0: + return None + + start = timeRange.startTime() + start = AbsTime.absTimeYMD(start.year, start.month, + start.day, start.hour, 0, 0) + stats = [] + while start < timeRange.endTime(): + # Create Time Range for current hour + end = start + 3600 # 1 hour in seconds + hour = start.hour + tr = TimeRange.TimeRange(start, end) + #Get the Average T for current hour + value = self.getAverage(self.SCALAR(), parmHisto, tr, componentName) + + # Append Value and Hour to Stat List + stats.append((value, hour)) + start = end + return stats + + ######################################## + ## VECTOR + + def vectorAvg(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector ((minMag, maxMag), TextDir) Stats" + primaryMethod = "self.getAverage(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def vectorMinMax(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector ((minMag, maxMag), TextDir) Stats" + primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def vectorMin(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector ((minMag, maxMag), TextDir) Stats" + primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Min") + + def vectorMax(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector ((minMag, maxMag), TextDir) Stats" + primaryMethod = "self.getMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Max") + + def vectorMedian(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector (median, TextDir) Stats" + primaryMethod = "self.getMedian(self.VECTOR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def vectorMode(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector (mode, TextDir) Stats" + primaryMethod = "self.getMode(self.VECTOR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def vectorMedianRange(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector (medianRange, TextDir) Stats" + primaryMethod = "self.getMedianRange(self.VECTOR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def vectorModeRange(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector (modeRange, TextDir) Stats" + primaryMethod = "self.getModeRange(self.VECTOR(), parmHisto, timeRange, componentName)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def vectorStdDevAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevAvg(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod,) + return result + + def vectorStdDevMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def vectorStdDevMin(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Min") + + def vectorStdDevMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getStdDevMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Max") + + def vectorModeratedAvg(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedAvg(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod,) + return result + + def vectorModeratedMinMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def vectorBinnedPercent(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getBinnedPercent(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return result + + def vectorModeratedMin(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Min") + + def vectorModeratedMax(self, parmHisto, timeRange, componentName, args=None): + primaryMethod = "self.getModeratedMinMax(self.VECTOR(), parmHisto, timeRange, componentName)" + result = self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + return self.extractVectorMinMax(result, "Max") + + def vectorMagMinMax(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector min/max Stats" + return self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, componentName) + + def vectorMagMin(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector min Stats" + minResult, maxResult = self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, + componentName) + return minResult + + def vectorMagMax(self, parmHisto, timeRange, componentName, args=None): + "Create a Vector max Stats" + minResult, maxResult = self.getMinMax(self.MAGNITUDE(), parmHisto, timeRange, + componentName) + return maxResult + + ## This method is being kept for "table" type products + def vectorRange(self, parmHisto, timeRange, componentName=None): + "Create a Vector Stats" + # Split Time Period in half + # For each half find average values: + # mag1 = min Period 1, mag2 = max Period 1, + # mag3 = min Period 2, mag4 = max Period 2 + periods = self.splitRange(timeRange) + period1 = periods[0] + period2 = periods[1] + result1 = self.getAverage(self.VECTOR(), parmHisto, period1, componentName) + result2 = self.getAverage(self.VECTOR(), parmHisto, period2, componentName) + + if result1 is None or result2 is None: + return None + mag1, dir1 = result1 + mag2, dir2 = result2 + return (mag1, mag2, dir1, dir2) + + ######################################## + ## WEATHER + ## + ## dominantWx + ## + ## Thresholds and variables: + + def coverage_weights_dict(self): + # Weight (between 0 and 1) for the coverage terms + return { + "": 0, + "Iso": .15, + "SChc": .15, + "Patchy": .15, + "Areas": .4, + "Chc": .4, + "Sct": .4, + "Lkly": .7, + "Num": .7, + "Brf": 1.0, + "Frq": 1.0, + "Ocnl": 1.0, + "Pds": 1.0, + "Inter": 1.0, + "Def": 1.0, + "Wide": 1.0, + } + + def wxkey_coverage_weight(self, parmHisto, timeRange, componentName, wxkey): + # Return a weight (between 0 and 1) for the wxkey coverage term + cov = wxkey.coverage() + return self.coverage_weights_dict()[cov] + + def wxkey_coverage_percentage(self, parmHisto, timeRange, componentName, wxkey): + # Return the required coverage percentage for the given wxkey which will be + # compared to its "rank" i.e. the percentage of areal coverage over the time period. + wxType = wxkey.wxType() + wxCov = wxkey.coverage() + inten = wxkey.intensity() + # These rules were from the workshop + if wxType == "T" and inten == "+": + return 0 + if wxType in ["ZR", "ZL"]: + return 0 + # Large Hail + attrList = wxkey.attributes() + if "LgA" in attrList: + return 0 + # Heavy Fog + if wxType == "F" and inten == "+": + return 0 + # Low visibility + if wxType in ["F", "H", "BS", "K", "BD"]: + vis = wxkey.visibility() + if vis == "1/4SM" or vis == "0SM": + return 0 + if wxType in ["T", "R", "RW", "S", "SW", "L", "IP"]: + return 15 + # For the rest: ["F", "H", "BS", "K", "BD", "SA", "LC", "FR", "WG", "VA"] + return 15 + + def checkPercentages(self, parmHisto, timeRange, componentName, wxKey, keyRankDict): + # If a wxKey does not pass the wxkey_coverage_percentage, this method will be called + # to give another chance. + # You can use the keyRankDict: + # subkey : (rank, percent coverage) + # to allow the wxKey to pass based on other values in the grid. + # For example: If I have 10% RW 10% SW, neither RW or SW will be reported + # Using the keyRankDict, I can allow them to pass when I learn + # that 20% of my area is covered with precip. + # Here's how this might be done: + # + #precip = ["SW", "RW", "R", "S"] + #totalPrecip = 0 + #if wxKey.wxType() in precip: + # for subkey in keyRankDict.keys(): + # if subkey.wxType() in precip: + # rank, percent = keyRankDict[subkey] + # totalPrecip += percent + #if totalPrecip > 15: + # return 1 + #else: + # return 0 + return 0 + + def attribute_coverage_percentage(self, parmHisto, timeRange, componentName, wxType, attr): + # Return the required coverage percentage for the given attribute. + # May be based on the wxType and attr if desired. + if attr == "Dry": + return 20 + else: + return 0 + + def dominantKeys_threshold(self, parmHisto, timeRange, componentName): + # This is the maximum number of weather keys desired from the rankedWx method + return 10 + + def cleanOutEmptyValues(self, parmHisto, timeRange, componentName, dataType): + return 0 + + def noWx_percentage(self, parmHisto, timeRange, componentName): + # If the raw rank (areal and temporal coverage) of NoWx exceeds this value, + # NoWx will be reported (all other weather keys will be ignored). + return 100 + + def dominantWx(self, parmHisto, timeRange, componentName, args=None): + "Return a list of dominant wx subkeys in order by ranking" + primaryMethod = "self.getDominantWx(parmHisto, timeRange, componentName, withRank=0)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def rankedWx(self, parmHisto, timeRange, componentName, args=None): + "Return a list of ranked (subkey, ranking) tuples" + primaryMethod = "self.getDominantWx(parmHisto, timeRange, componentName, withRank=1)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def getDominantWx(self, parmHisto, timeRange, componentName, withRank=0): + # Determine the dominant Wx considering the following: + # areal coverage over time + return self.getDominantValues(parmHisto, timeRange, componentName, + dataType="WEATHER", withRank=withRank) + + + def getDominantValues(self, parmHisto, timeRange, componentName, + dataType="WEATHER", withRank=0, withAux=0): + # Determine the dominant Wx subkey OR Discrete subkey + # considering the following: + # areal coverage over time, called the "rank" + # Sub-methods: + # temporalCoverage_flag + # wxKey_coverage_percentage and dominantKeys_threshold OR + # discreteKey_coverage_percentage and dominantDiscreteKeys_threshold + # The algorithm: + # Temporal coverage by grid: Any individual grid must + # cover enough time in the timerange to set the temporalCoverage_flag. + # Loop over all samples, which are for all grids and + # all weather or discrete keys on each of those grids. + # For WEATHER, + # We aggregate weather types i.e. + # for each weather type (S, RW, R, etc.) we compute + # an aggregate subkey such that the resulting rankList + # will have just one entry per weather type. + # Aggregate Subkey: + # coverage: time-weighted average of coverages + # intensity: highest ranking OR if ranks are close, dominant + # visibility: lowest visibility + # attributes: aggregate attributes + # Rank: percentage of areal/temporal coverage over time + # For WEATHER, we weight this by the coverage term + # If a subkey does not meet the "wxkey_coverage_percentage" threshold + # or "discreteKey_coverage_percentage", it is removed. + # For WEATHER, in all cases, if a subkey has a Primary or Mention attribute, + # it automatically "wins" and is used. + # Finally, the highest ranking dominantKeys_threshold OR + # dominantDiscreteKeys_threshold number of keys are returned. + # If withRank == 1: return the ranked list of (subkey, rank) tuples + # Else: return the list of subkeys + # + totalHours = 0 + totalPoints = parmHisto.numberOfGridPoints() + compositeNameUI = parmHisto.getCompositeNameUI() + + # Loop over all samples, which are for all grids and + # all keys on each of those grids. + # In this process, we aggregate subkey types e.g. + # for each weather type (S, RW, R, etc.) we compute + # an aggregate subkey such that the resulting rankList + # will have just one entry per weather type. + # For discrete, we will have just one entry per + # discrete subkey (with or without Aux value). + + #print "\n\nIn getDominantValues: DataType, TimeRange", dataType, timeRange + #print "STEP 1 -- Aggregate per grid" + + subkeyTypeDict = {} + # STEP 1: + # For each wxType found in the grids, + # gather its 'hours' of coverage and 'count' of points covered. + for histSample in parmHisto.histoSamples(): + validTime = TimeRange.TimeRange(histSample.validTime()) + if self.temporalCoverage_flag( + parmHisto, timeRange, componentName, histSample) == 0: + continue + # Get the number of hours inside the timeRange that this + # sample comes from (i.e., we can get a sample that lasts + # for 3 weeks - but only 1 hour of it is inside the + # timeRange - and we only want to rank it by the 1 hour + # inside the range) + # + hours = validTime.intersection(timeRange).duration()/3600 + if hours < 1: + continue + + totalHours += hours + + # Gather the subkey Types for this grid in subkeyTypeDict + # Each entry ends up being a list of tuples: (subkey, hours, count) + self.gatherSubkeyTypes( + parmHisto, timeRange, componentName, histSample, dataType, hours, + subkeyTypeDict, withAux) + + # STEP 2: For each subkeyType, + # --determine an aggregate subkey and rank i.e. + # aggregate areal coverage over time percentage + # --compare the rank to coverage threshold for the wxType. + rankList = [] + #print "subkeyTypeDict", subkeyTypeDict + subkeyTypePointsDict = {} + noWxRawRank = 0 + keyRankDict = {} # Holds: aggregateKey: (rank, rawRank) + for subkeyType in list(subkeyTypeDict.keys()): + #print "\nsubkeyType", subkeyType + subkeyList = subkeyTypeDict[subkeyType] + if dataType == "WEATHER": + covDict = {} + intenDict = {} + visDict = {} + attrList = [] + attrDict = {} + primaryKey = None + mentionKey = None + subkeyTypeRank = 0 + # IF WEATHER: + # Gather the coverages, intensities, visibilities and attributes + # for this weather type + # If Primary or Mention, use the subkey as the aggregate key + # Determine a subkeyType rank + subkeyTotalPoints = 0 + for subkey, hours, count in subkeyList: + #print " subkey, hours, count", subkey, hours, count + subkeyTotalPoints += count + if dataType == "WEATHER": + attrs = subkey.attributes() + attrList = attrList + attrs + if "Primary" in attrs: + primaryKey = subkey + continue + if "Mention" in attrs: + mentionKey = subkey + continue + self.addToDict(covDict, subkey.coverage(), hours, count, 1) + self.addToDict(intenDict, subkey.intensity(), hours, count) + self.addToDict(visDict, subkey.visibility(), hours, count) + for attr in attrs: + self.addToDict(attrDict, attr, hours, count) + subkeyTypeRank += hours * count + subkeyTypePointsDict[subkeyType] = subkeyTotalPoints + + # Determine aggregate key + #print " subkeyTypeRank", subkeyTypeRank, subkeyTotalPoints + #print " totalHours, totalPoints", totalHours, totalPoints + subkeyPoints = subkeyTypePointsDict[subkeyType] + if dataType == "WEATHER": + aggregateKey = self.getWxAggregateKey( + parmHisto, timeRange, componentName, + primaryKey, mentionKey, subkeyType, covDict, intenDict, visDict, + attrList, attrDict, totalHours, totalPoints, subkeyPoints) + else: + aggregateKey = subkeyType + + # Determine rawRank and rank for the aggregateKey + if dataType == "WEATHER" \ + and "Primary" in aggregateKey.attributes(): + rank = 200 + rawRank = 200 + else: + rawRank = int(round(float(subkeyTypeRank)/(totalHours*totalPoints)*100.0)) + if dataType == "WEATHER": + # Save the raw rank for NoWx + if aggregateKey.wxType() == "": + noWxRawRank = rawRank + # Multiply by the coverage weight + rank = int(rawRank * self.wxkey_coverage_weight( + parmHisto, timeRange, componentName, aggregateKey)) + else: + rank = rawRank + #print " aggregateKey, rank", aggregateKey, rank, rawRank + keyRankDict[aggregateKey] = (rank, rawRank) + + # Check to see if each aggregateKey meets the required coverage percentage + for aggregateKey in list(keyRankDict.keys()): + rank, rawRank = keyRankDict[aggregateKey] + if dataType == "WEATHER" \ + and ("Mention" in aggregateKey.attributes() \ + or "Primary" in aggregateKey.attributes()): + rankList.append((aggregateKey, rank)) + else: + if dataType == "WEATHER": + # Use rawRank which is the percentage of areal/temporal coverage + threshold = self.wxkey_coverage_percentage( + parmHisto, timeRange, componentName, aggregateKey) + flag = rawRank >= threshold + else: + threshold = self.discreteKey_coverage_percentage( + parmHisto, timeRange, componentName, aggregateKey) + flag = rawRank >= threshold + if not flag: + # Get another chance to pass + flag = self.checkPercentages( + parmHisto, timeRange, componentName, aggregateKey, keyRankDict) + if flag: + rankList.append((aggregateKey, rank)) + else: + pass + #print "didn't make the cut", rank, aggregateKey + + #print " rankList", rankList + if len(rankList) == 0: + return None + + # Check the NoWx Threshold + if noWxRawRank > self.noWx_percentage(parmHisto, timeRange, componentName): + # Report NoWx + newList = [] + for key, rank in rankList: + if key.wxType() == "": + newList.append((key, rank)) + rankList = newList + + # Clean out NoWx and None (Discrete) + if self.cleanOutEmptyValues(parmHisto, timeRange, componentName, dataType): + newList = [] + for subkey, rank in rankList: + if dataType == "WEATHER": + if subkey.wxType() == "": + continue + else: # DISCRETE + if subkey == "": + continue + newList.append((subkey, rank)) + rankList = newList + + # Sort into ranked order + # Limit the number of keys returned + if dataType == "WEATHER": + rankList.sort(self.rankedSortOrder) + rankList = [ + (WeatherSubKey.weatherSubKey(self._argDict["site"], subkey.coverage(), subkey.wxType(), subkey.intensity(), + subkey.visibility(), + self.removeSimilarAttrs(subkey.attributes())), + rank) for subkey, rank in rankList + ] + dominantKeys = self.dominantKeys_threshold(parmHisto, timeRange, componentName) + else: # DISCRETE + rankList.sort() + dominantKeys = self.dominantDiscreteKeys_threshold(parmHisto, timeRange, componentName) + + if len(rankList) > dominantKeys: + rankList = rankList[0:dominantKeys] + if self._debug: + print("\nSampleAnalysis::ranked", dataType, " \n TimeRange ", timeRange) + print(" Area", parmHisto.area().getId()) + if withRank: + if self._debug: + print(" returning with rank %s" % (rankList)) + return rankList + else: + newList = [] + for subkey, rank in rankList: + newList.append(subkey) + if self._debug: + print(" returning %s" % (newList)) + return newList + + def gatherSubkeyTypes(self, parmHisto, timeRange, componentName, + histSample, dataType, hours, subkeyTypeDict, withAux): + for histPair in histSample.histogram(): + count = float(histPair.count()) + if dataType == "WEATHER": + subkey = WeatherSubKey.WeatherSubKey(histPair.value().weather().get(0)) + subkeyType = subkey.wxType() + if subkeyType == "RW" or subkeyType == "SW": + if subkey.intensity() == "--": + subkeyType = subkeyType + "--" + else: # DISCRETE + subkeyType = histPair.value().discrete().get(0) + if withAux == 0: + subkeyType = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), subkeyType) + subkey = subkeyType + if subkeyType in subkeyTypeDict: + subkeyTypeDict[subkeyType].append((subkey, hours, count)) + # Make new entry + else: + subkeyTypeDict[subkeyType] = [(subkey, hours, count)] + + def getWxAggregateKey(self, parmHisto, timeRange, componentName, + primaryKey, mentionKey, subkeyType, covDict, intenDict, visDict, + attrList, attrDict, totalHours, totalPoints, subkeyPoints): + # Compute the aggregate key + # If Primary was an attribute in any subkey, take it as the aggregate + # plus all the other attributes + # Otherwise, if Mention was an attribute in any subkey, take it as the + # aggregate plus all the other attributes + # Otherwise, compute the aggregate subkey from the coverage, intensity, + # visibilities for this subkeyType weighted by temporal and areal coverages + #print "covDict", covDict + #print "intenDict", intenDict + #print "visDict", visDict + #print "attrDict", attrDict + if subkeyType in ["RW--", "SW--"]: + subkeyType = subkeyType.replace("--","") + aggregateKey = None + if primaryKey is not None: + aggregateKey = primaryKey + elif mentionKey is not None: + aggregateKey = mentionKey + + if aggregateKey is None: + algorithm = self.aggregateCov_algorithm(parmHisto, timeRange, componentName) + aggCov = algorithm(parmHisto, timeRange, componentName, + subkeyType, "coverage", covDict, totalHours, totalPoints, subkeyPoints) + aggInten = self.getAggregate(parmHisto, timeRange, componentName, + subkeyType, "intensity", intenDict, totalHours, totalPoints, subkeyPoints) + aggVis = self.getAggregate(parmHisto, timeRange, componentName, + subkeyType, "visibility", visDict, totalHours, totalPoints, subkeyPoints) + aggAttrs = self.getAggregateAttributes( + parmHisto, timeRange, componentName, + subkeyType, attrDict, totalHours, totalPoints, subkeyPoints) + else: + aggCov = aggregateKey.coverage() + aggInten = aggregateKey.intensity() + aggVis = aggregateKey.visibility() + aggAttrs = aggregateKey.attributes() + attrList = self.removeDups(attrList) + aggregateKey = WeatherSubKey.weatherSubKey(self._argDict["site"], aggCov, subkeyType, aggInten, aggVis, aggAttrs) + #print "aggregateKey", aggregateKey + return aggregateKey + + def addToDict(self, dict, key, hours, count, tuple=0): + # Add to a dictionary whose values are lists + + if key in dict: + + if tuple: + (curValue, curMaxCov) = dict[key] + + if count > curMaxCov: + curMaxCov = count + + dict[key] = ((curValue + hours*count), curMaxCov) + + else: + curValue = dict[key] + + dict[key] = curValue + hours * count + + # Make new entry + else: + if tuple: + dict[key] = ((hours * count), count) + else: + dict[key] = hours * count + + def aggregateCov_algorithm(self, parmHisto, timeRange, componentName): + # The algorithm for choosing the coverage term for multiple + # instances of a weather type. + # "getAggregateCov" chooses the coverage with the highest rank + # (in terms of areal and temporal coverage.) + # "getExistingWeightedAggregateCov" chooses the coverage with the + # highest WEIGHTED rank that exists in the grids. + # "getWeightedAggregateCov" computes a weighted average coverage + # If the resulting coverage is not in the grids, + # "creates" an appropriate coverage. + # "getHighestWeightedAggregateCov" returns the coverage with the + # highest weight in "coverage_weights_dict" + # + # For example, + # If you have + # Iso T(covers 50% of the zone) + # Sct T(covers 25% of the zone) + # Num T(covers 25% of the zone) + # "getAggregateCov" returns "Iso" + # "getWeightedAggregateCov" returns "Sct" + # "getExistingWeightedAggregateCov" returns "Num" + # "getHighestWeightedAggregateCov" returns "Num" + # + # If you have + # Iso T(covers 60% of the zone) + # Num T(covers 40% of the zone) + # "getAggregateCov" returns "Iso" + # "getWeightedAggregateCov" returns "Sct" + # "getExistingWeightedAggregateCov" returns "Num" + # "getHighestWeightedAggregateCov" returns "Num" + # + return self.getAggregateCov + # return self.getWeightedAggregateCov + # return self.getExistingWeightedAggregateCov + # return self.getHighestWeightedAggregateCov + + # Submitted by Matt Belk 8/04 + def getAggregateCov(self, parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints): + # From the entries in the dictionary, + # find the aggregate coverage + # Return coverage in one of two ways: + # + # 1) Coverage covers >= 90% of zone, or + # 2) Coverage has the highest subkey rank + + # If there is only one coverage + if len(dict) == 1: + for key in list(dict.keys()): + return key + + # Get ready to track properties of all coverage terms + maxRank = 0.0 + aggCov = None + sameRank = [] + highRank = [] + + # For each coverage + for key in list(dict.keys()): + # If this is a tuple + if type(dict[key]) == type(()): + (sum, max) = dict[key] # get the point sum and max coverage + # Otherwise, get the point sum and assume a max coverage + else: + sum = dict[key] + max = 0 + + # Compute subkey rank + subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 + # If this is the highest subkey rank we have so far + if subkeyRank > maxRank: + # Store this as the highest subkey rank + maxRank = subkeyRank + aggCov = key + sameRank = [] + sameRank.append(key) + # Otherwise, if this ties as the highest subkey rank + elif subkeyRank == maxRank: + sameRank.append(key) + + # If the areal coverage of this subkey coverage is >= 90% + arealThreshold = self.__dict__.get("_aggregateCov_arealThreshold", 90.0) + if (max * 100.0)/float(totalPoints) >= arealThreshold: + # Store this as a candidate for highest coverage key + highRank.append(key) + + # Get ready to process sameRank list (if needed) + maxVal = 0 + # If there is more than one key in the highRank list + if len(highRank) > 0: + # Use this list to find the aggregate coverage + testRank = highRank + # Otherwise, if there are items in the sameRank list + elif len(sameRank) > 0: + testRank = sameRank + + # Grab the most significant coverage + for cov in testRank: + # Compute the PoP range and a test value for this coverage + (lowVal, highVal) = self.coveragePoP_value(cov) + avgVal = (lowVal + highVal)/2.0 + # If this is the most significant value + if avgVal >= maxVal: + # Use this coverage + aggCov = cov + maxVal = avgVal + + # if the aggregate coverage is still not found + if aggCov is None: + aggCov = self.processNoAggCov(dict, wxType) + self.debug_print('in getAggregateCov -> returning %s' % (aggCov), 1) + return aggCov + + def aggregateCov_weights_dict(self): + # Weight (between 0 and 1) for the coverage terms + return { + "": 0, + "Iso": .1, + "SChc": .1, + "Patchy": .1, + "Areas": .4, + "Chc": .4, + "Sct": .4, + "Lkly": .7, + "Num": .7, + "Brf": .9, + "Frq": .9, + "Ocnl": .9, + "Pds": .9, + "Inter": .9, + "Def": .9, + "Wide": .9, + } + + def aggregateCov_weight(self, parmHisto, timeRange, componentName, cov): + # Return a weight (between 0 and 1) for the coverage term + return self.aggregateCov_weights_dict()[cov] + + def getExistingWeightedAggregateCov(self, parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints): + # From the entries in the dictionary, find the aggregate coverage by + # using a weighting scheme. + # If the resulting coverage is not in the grids, use the coverage + # with the greatest weight. + if len(dict) == 1: + for key in list(dict.keys()): + return key + aggCov, wtSum = self.getAggCov_and_WtSum(parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints) + if aggCov is None: + aggCov = self.processNoAggCov(dict, wxType) + return aggCov + + def getWeightedAggregateCov(self, parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints): + # From the entries in the dictionary, find the aggregate coverage by + # using a weighting scheme. + # If the resulting coverage is not in the grids, "create" an appropriate + # coverage. + if len(dict) == 1: + for key in list(dict.keys()): + return key + aggCov, wtSum = self.getAggCov_and_WtSum(parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints) + # Assign the new coverage + popValue = self.coveragePoP_table() + inGrids = 0 + candidates = [] + aggCov = None + for key in list(popValue.keys()): + lowVal, highVal = popValue[key] + #print "key, low, high", key, lowVal, highVal + # Ranges are inclusive and not contiguous, + # so we have to adjust + lowVal = lowVal - 10 + if wtSum > lowVal and wtSum <= highVal: + # If this coverage was in the grids, + # choose it and we're done + #print "dict", dict + if key in list(dict.keys()): + aggCov = key + inGrids = 1 + break + else: + candidates.append(key) + + #print "inGrids", inGrids + if not inGrids: + # If the weighted average was not in the grids, + # we need to choose a coverage or prob term from + # the candidates + + # Determine coverage or probability based on + # first dictionary key + arealCovs = self.arealCoverages() + for key in list(dict.keys()): + if key in arealCovs: + areal = 1 + else: + areal = 0 + break + for cov in candidates: + if cov in arealCovs: + covAreal = 1 + else: + covAreal = 0 + if covAreal == areal: + # Make sure this cov can be used with + # the wxType + availableCoverages = WeatherSubKey.availableCoverages(self._argDict["site"], wxType) + if cov in availableCoverages: + aggCov = cov + break + if aggCov is None: + aggCov = self.processNoAggCov(dict, wxType) + return aggCov + + def getAggCov_and_WtSum(self, parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, + subkeyPoints): + if len(dict) == 1: + for key in list(dict.keys()): + return key, 1 + # Compute weighted Sum + wtSum = 0.0 + maxContrib=0 + aggCov="" + for key in list(dict.keys()): + if type(dict[key]) == type(()): + (sum, max) = dict[key] + else: + sum = dict[key] + subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 + #print "key", key + covLowVal, covHighVal = self.coveragePoP_value(key) + covWt = self.aggregateCov_weight( + parmHisto, timeRange, componentName,key) + #print " covWt, subkeyRank", covWt, subkeyRank + #print " contribution", covWt * subkeyRank + contrib = covWt * subkeyRank + wtSum += contrib + if contrib > maxContrib: + aggCov = key + maxContrib=contrib + #print "weighted value", aggCov, wtSum + return aggCov, wtSum + + def getHighestWeightedAggregateCov( + self, parmHisto, timeRange, componentName, wxType, wxPart, dict, + totalHours, totalPoints, subkeyPoints): + # Return the Coverage with the highest weight in coverage_weights_dict + # Throw out Coverages that do not meet the wxkey_coverage_percentage + # EXCEPT if no Coverages meet that threshold, return the Coverage + # with the highest percentage. + # + # NOTE: In cases where the total percentages, e.g. 5% Sct, 7% Num, + # do not meet the threshold, it's ok for this method to return Num. + # because the total percentage will be checked in a later step. + # + # Handle case of only one coverage + if len(dict) == 1: + for cov in list(dict.keys()): + return cov + # Aggregate Coverage + aggCov = None + # Coverage Weight for aggregate Coverage + maxWt = -1 + # Aggregate Cov for those that do not meet threshold + aggCovReject = None + # Max Percentage for those Coverages that do not meet threshold + maxPercentReject = -1 + + for cov in list(dict.keys()): + covWt = self.aggregateCov_weight( + parmHisto, timeRange, componentName, cov) + sum, maxCov = dict[cov] + percentCov = float(sum)/(totalHours*totalPoints)*100.0 + percentCov = int(round(percentCov)) + # Check to see if it meets threshold + # Make a temporary wxKey to check wxkey_coverage_percentage + wxKey = WeatherSubKey.weatherSubKey(self._argDict["site"], cov, wxType, "", "", []) + #print "wxKey", wxKey, sum, percentCov + if percentCov >= self.wxkey_coverage_percentage( + parmHisto, timeRange, componentName, wxKey): + if covWt > maxWt: + aggCov = cov + maxWt = covWt + else: + if percentCov > maxPercentReject: + aggCovReject = cov + maxPercentReject = percentCov + #print "aggCov, wt",aggCov, maxWt + #print "aggCovReject, %", aggCovReject, maxPercentReject + if aggCov is None: + aggCov = aggCovReject + #print "Returning", aggCov + return aggCov + + def processNoAggCov(self, dict, wxType): + msg = "WARNING -- SampleAnalysis cannot aggregate coverages for " + wxType + log.warning(msg) + # There was no appropriate coverage for the wxType and given weight + # So take any coverage that exists in the grid + aggCov = "" + for key in list(dict.keys()): + aggCov = key + break + return aggCov + + def getAggregate(self, parmHisto, timeRange, componentName, + wxType, wxPart, dict, totalHours, totalPoints, subkeyPoints): + # From the entries in the dictionary, + # find the aggregate wxPart (coverage, intensity, visibility) + # Do it 2 at a time and aggregate the ranks as you go + if len(dict) == 1: + for key in list(dict.keys()): + return key + + firstTime = 1 + for key in list(dict.keys()): + sum = dict[key] + subkeyRank = float(sum)/(totalHours * totalPoints) * 100.0 + if wxPart == "coverage": + subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], key, wxType, "", "", []) + elif wxPart == "intensity": + subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], "", wxType, key, "", []) + elif wxPart == "visibility": + subkey = WeatherSubKey.weatherSubKey(self._argDict["site"], "", wxType,"" , key, []) + if firstTime: + aggRank = subkeyRank + curKey = subkey + firstTime = 0 + else: + curKey = self.makeAggregateSubkey(curKey, aggRank, subkey, subkeyRank) + aggRank = int((aggRank + subkeyRank)/2.0) + exec("aggValue = curKey." + wxPart + "()") + return aggValue + + def getAggregateAttributes(self, parmHisto, timeRange, componentName, + wxType, dict, totalHours, totalPoints, subkeyPoints): + # Take only attributes that meet the threshold + attrList = [] + for key in list(dict.keys()): + sum = dict[key] + attrRank = float(sum)/(totalHours * totalPoints) * 100.0 + threshold = self.attribute_coverage_percentage( + parmHisto, timeRange, componentName, wxType, key) + if attrRank > threshold: + attrList.append(key) + return attrList + + def makeSubkeyList(self, weatherKey): + # Make sure subkeyList is a true list + length = len(weatherKey) + newList = [] + index = 0 + for subkey in weatherKey: + newList.append(subkey) + index = index + 1 + if index >= length: + break + return newList + + def weather_percentages(self, parmHisto, timeRange, componentName, args=None): + # Return a list of tuples: + # weather subkey, percentage of coverage + # All WeatherSubKeys are included + return self.getSubkey_percentages(parmHisto, timeRange, componentName, + dataType="WEATHER") + + def getSubkey_percentages(self, parmHisto, timeRange, componentName, + dataType="WEATHER", withAux=1): + # Gather all the Weather or Discrete SubKeys and percentages + numPoints = parmHisto.numberOfGridPoints() + percentageList = [] + + # Each histSample represents a grid + # To determine percentage for a weather value, we need + # to aggregate it over grids + for histSample in parmHisto.histoSamples(): + timeWeight = self.determineGridWeight(histSample, timeRange) + for histPair in histSample.histogram(): + count = float(histPair.count()) + gridPercentage = int((count/numPoints) * 100.0) + timeRangePercentage = gridPercentage * timeWeight + if dataType == "WEATHER": + subkey = WeatherSubKey.WeatherSubKey(histPair.value().weather().get(0)) + else: # DISCRETE + subkey = histPair.value().discrete().get(0) + if withAux == 0: + subkey = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), subkey) + # See if subkey is already in list + found = 0 + for value, percentage in percentageList: + # If so, add it's percentage + if value == subkey: + found = 1 + index = percentageList.index((value,percentage)) + newPercentage = percentage + timeRangePercentage + percentageList[index] = (subkey, newPercentage) + if found == 0: + percentageList.append((subkey,timeRangePercentage)) + #print "percentage list", dataType, percentageList + return percentageList + + ######################################## + ## DISCRETE + + def discreteKey_coverage_percentage(self, parmHisto, timeRange, componentName, keyStr): + # Return the required coverage percentage for the given wxkey which will be + # compared to its "rank" i.e. the percentage of areal coverage over the time period. + return 1 + + def dominantDiscreteKeys_threshold(self, parmHisto, timeRange, componentName): + # This is the maximum number of discrete keys desired from the + # getDominantDiscreteKey method. + return 10 + + def dominantDiscreteValue(self, parmHisto, timeRange, componentName, args=None): + "Return the most common discrete value over the given timeRange" + primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=0)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def dominantDiscreteValue_withAux(self, parmHisto, timeRange, componentName, args=None): + "Return the most common discrete value over the given timeRange" + primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=1)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def rankedDiscreteValue(self, parmHisto, timeRange, componentName, args=None): + "Return the most common discrete value over the given timeRange" + primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=0, withRank=1)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def rankedDiscreteValue_withAux(self, parmHisto, timeRange, componentName, args=None): + "Return the most common discrete value over the given timeRange" + primaryMethod = "self.getDominantDiscreteValue(parmHisto, timeRange, componentName, withAux=1, withAux=1)" + return self.createStats(parmHisto, timeRange, componentName, args, primaryMethod) + + def getDominantDiscreteValue(self, parmHisto, timeRange, componentName, withRank=0, withAux=0): + # Return a list of dominant discrete subkeys + # If withRank, return (key, rank) pairs + # If withAux, include the auxillary field as part of the key + return self.getDominantValues(parmHisto, timeRange, componentName, + dataType="DISCRETE", withRank=withRank, withAux=withAux) + + def discrete_percentages(self, parmHisto, timeRange, componentName, args=None): + return self.getSubkey_percentages(parmHisto, timeRange, componentName, dataType="DISCRETE", + withAux=0) + + def discrete_percentages_withAux(self, parmHisto, timeRange, componentName, args=None): + return self.getSubkey_percentages(parmHisto, timeRange, componentName, dataType="DISCRETE", + withAux=1) + + def discreteTimeRangesByKey(self, parmHisto, timeRange, componentName, args=None): + return self.getDiscreteTimeRangesByKey( + parmHisto, timeRange, componentName, args=None, withAux=0) + + def discreteTimeRangesByKey_withAux(self, parmHisto, timeRange, componentName, args=None): + return self.getDiscreteTimeRangesByKey( + parmHisto, timeRange, componentName, args=None, withAux=1) + + def getDiscreteTimeRangesByKey(self, parmHisto, timeRange, componentName, args=None, + withAux=0): + # This method returns a list of (discreteSubkey, timeRange) pairs ordered in ascending + # order by timeRange and then by priority of discrete keys as defined in the + # serverConfig files. + + keyDict = {} + covDict = {} + totalHours = timeRange.duration()/3600 + totalPoints = parmHisto.numberOfGridPoints() + + for histSample in parmHisto.histoSamples(): + validTime = TimeRange.TimeRange(histSample.validTime()) + if self.temporalCoverage_flag( + parmHisto, timeRange, componentName, histSample) == 0: + continue + + hours = validTime.intersection(timeRange).duration()/3600 + if hours < 1: + continue + + for histPair in histSample.histogram(): + keyStr = histPair.value().discrete().get(0) # discrete value + if withAux == 0: + keyStr = histPair.value().discrete().baseData(histPair.value().discrete().getSiteId(), keyStr) + + if keyStr in keyDict: + keyDict[keyStr].append(validTime) + else: + keyDict[keyStr] = [validTime] + + # Keep a running total of the temporal and areal percentages + count = float(histPair.count()) + if keyStr in covDict: + keyRank = covDict[keyStr] + newRank = keyRank + hours*count + covDict[keyStr] = newRank + else: # new entry + covDict[keyStr] = hours*count + +## keyList = covDict.keys() +## for k in keyList: +## t, a, n = covDict[k] +## print "key:", k, "time%:", t, "area#:", a, "totalPoints:", n + + keyList = list(covDict.keys()) + for keyStr in keyList: + # get the temporal and areal thresholds + keyRank = covDict[keyStr] + rank = int(round(float(keyRank)/(totalHours*totalPoints)*100.0)) + if rank < self.discreteKey_coverage_percentage( + parmHisto, timeRange, componentName, keyStr): + # remove the dict entry + del keyDict[keyStr] + + # glue the timeranges that share a common end/start time + keyList = [] + for k in list(keyDict.keys()): + trList = keyDict[k] + trList.sort() + tr = trList[0] + for i in range(1, len(trList)): + if tr.endTime() == trList[i].startTime(): + # keep extending the time, if TRs are contiguous + tr = TimeRange.TimeRange(tr.startTime(), trList[i].endTime()) + else: + # no match, append the tuple + keyList.append((k, tr)) + tr = trList[i] + + # Don't forget the last one + keyList.append((k, tr)) + + #print "discreteTimeRangesByKey keyList", keyList + return keyList + + def mostSignificantDiscreteValue(self, parmHisto, timeRange, componentName, withAux=0): + """Using mostSignificantDiscrete_keyOrder_dict and mostSignificantDiscrete_coveragePercentage_dict, + report the most significant discrete value for the given timeRange. If there is a tie, + report the most significant value. + """ + totalHours = 0 + totalPoints = parmHisto.numberOfGridPoints() + compositeNameUI = parmHisto.parmID().compositeNameUI() + + # Loop over all samples, which are for all grids and + # all keys on each of those grids. + # We will have just one entry per + # discrete key (with or without Aux value). + + #print "\n\nIn mostSignificantDiscreteValue: DataType, TimeRange", "DISCRETE", timeRange + #print "STEP 1 -- Aggregate per grid" + + subkeyTypeDict = {} + # STEP 1: + # For each discrete key found in the grids, + # gather its 'hours' of coverage and 'count' of points covered. + for histSample in parmHisto.histoSamples(): + validTime = TimeRange.TimeRange(histSample.validTime()) + if self.temporalCoverage_flag( + parmHisto, timeRange, componentName, histSample) == 0: + continue + # Get the number of hours inside the timeRange that this + # sample comes from (i.e., we can get a sample that lasts + # for 3 weeks - but only 1 hour of it is inside the + # timeRange - and we only want to rank it by the 1 hour + # inside the range) + # + hours = validTime.intersection(timeRange).duration()/3600 + if hours < 1: + continue + + totalHours += hours + + # Gather the subkey Types for this grid in subkeyTypeDict + # Each entry ends up being a list of tuples: (discreteKey, hours, count) + self.gatherSubkeyTypes( + parmHisto, timeRange, componentName, histSample, 'DISCRETE', hours, + subkeyTypeDict, withAux) + + # STEP 2: For each subkeyType, + # --determine an aggregate subkey and rank i.e. + # aggregate areal coverage over time percentage + # --compare the rank to coverage threshold. + #print "subkeyTypeDict", subkeyTypeDict + keyRankDict = {} # Holds: subkeyType: rank + for subkeyType in list(subkeyTypeDict.keys()): + #print "\nsubkeyType", subkeyType + subkeyList = subkeyTypeDict[subkeyType] + subkeyTypeRank = 0 + # Determine a subkeyType rank + subkeyTotalPoints = 0 + for subkey, hours, count in subkeyList: + #print " subkey, hours, count", subkey, hours, count + subkeyTotalPoints += count + subkeyTypeRank += hours * count + #print "total points =", subkeyTotalPoints + + #print "subkeyTypeRank =", subkeyTypeRank + #print "totalHours =", totalHours + #print "totalPoints =", totalPoints + # Determine rank for the subkeyType + rank = int(round(float(subkeyTypeRank)/(totalHours*totalPoints)*100.0)) + keyRankDict[subkeyType] = rank + #print "rank =", rank + + # Check to see if each subkeyType meets the required coverage percentage + keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(parmHisto, timeRange, compositeNameUI) + keyOrder = keyOrderDict[compositeNameUI] + mostSignificantSubkey = None + highestOrderIndex = None + for subkeyType in list(keyRankDict.keys()): + rank = keyRankDict[subkeyType] + thresholdDict = self.mostSignificantDiscrete_coveragePercentage_dict( + parmHisto, timeRange, componentName, subkeyType) + threshold = thresholdDict.get(compositeNameUI, 0) + #print "threshold =", threshold + flag = rank >= threshold + if not flag: + # Get another chance to pass + flag = self.checkPercentages( + parmHisto, timeRange, componentName, subkeyType, keyRankDict) + if flag: # This type meets the threshold criteria + if self.cleanOutEmptyValues(parmHisto, timeRange, componentName, "DISCRETE"): + # Don't save empty values + #print "Ignoring", subkeyType + continue + try: + orderIndex = keyOrder.index(subkeyType) + if highestOrderIndex is None or orderIndex > highestOrderIndex: + highestOrderIndex = orderIndex + mostSignificantSubkey = subkeyType + #print "Found higher significance key =", subkeyType + except: + pass + else: + #print "didn't make the cut", rank, subkeyType + pass + + #print "mostSignificantSubkey =", mostSignificantSubkey + return mostSignificantSubkey + + def mostSignificantDiscrete_coveragePercentage_dict(self, parmHisto, timeRange, componentName, keyStr): + """ Return the required coverage percentage for the given key which will be + compared to its "rank" i.e. the percentage of areal coverage over the time period. + """ + return { + "WindThreat": 5, + "FloodingRainThreat": 5, + "StormSurgeThreat": 5, + "TornadoThreat": 5, + } + + def mostSignificantDiscrete_keyOrder_dict(self, parmHisto, timeRange, componentName): + """ Returns a list of keys from least to most significant for a discrete type (componentName). """ + threatKeyOrder = [None, "None", "Elevated", "Mod", "High", "Extreme"] + return { + "WindThreat": threatKeyOrder, + "FloodingRainThreat": threatKeyOrder, + "StormSurgeThreat": threatKeyOrder, + "TornadoThreat": threatKeyOrder, + } + + + ######################################## + ## UTILITIES + + + def determineGridWeight(self, histSample, timeRange): + # Returns the ratio: histSample overlap duration / timeRange duration + validTime = TimeRange.TimeRange(histSample.validTime()) + if validTime.contains(timeRange): + gridWeight = 1.0 + # Determine time histSample intersects timeRange + else: + intersect = validTime.intersection(timeRange).duration() + try: + gridWeight = float(intersect)/timeRange.duration() + except: + gridWeight = 0.0 + return gridWeight + + def createStats(self, parmHisto, timeRange, componentName, args, primaryMethod): + # Call appropriate methods to produce statistics based on args which tell us + # how to report the statistics with respect to the time range. + # + if args is None: + exec("result = " + primaryMethod) + return result + period = args[0] + if period == 0: + subRanges = self.getGridTimeRanges(parmHisto, timeRange) + else: + subRanges = self.divideRange(timeRange, period) + statsByRange = [] + for subRange in subRanges: + timeRange = subRange + exec("result = " + primaryMethod) + # Handle no data + # If a subRange has no data continue + if result is None: + continue + statsByRange.append((result, subRange)) + return statsByRange + + def temporalCoverage_flag(self, parmHisto, timeRange, componentName, + histSample): + # Return 1 if the histSample time range is completely included in the timeRange + # OR the histSample time range sufficiently covers the timeRange + # i.e. meets BOTH the temporalCoverage_percentage and temporalCoverage_hours + # requirements. + + # Sub-methods: + # temporalCoverage_dict + # temporalCoverage_percentage + # temporalCoverage_hours + # temporalCoverage_hours_dict + # + + # njensen: I changed this to act directly on the java time ranges since they aren't + # part of the return value and it's faster to skip creating python TimeRanges + javaValidTime = histSample.validTime() + javaTimeRange = timeRange.toJavaObj() + compositeNameUI = parmHisto.getCompositeNameUI() + + # Is the histSample time range completely included in the timeRange? + #if timeRange.contains(validTime): + if javaTimeRange.contains(javaValidTime): + result = 1 + # Look at intersection of histSample and timeRange + else: + covDict = self.temporalCoverage_dict(parmHisto, timeRange, componentName) + if compositeNameUI in list(covDict.keys()): + percentage = covDict[compositeNameUI] + else: + percentage = self.temporalCoverage_percentage( + parmHisto, timeRange, componentName) + hoursDict = self.temporalCoverage_hours_dict( + parmHisto, timeRange, componentName) + if compositeNameUI in list(hoursDict.keys()): + coverageHours = hoursDict[compositeNameUI] + else: + coverageHours = self.temporalCoverage_hours( + parmHisto, timeRange, componentName) + #intersect = javaValidTime.intersection(javaTimeRange).getDuration() + intersect = FormatterUtil.getTimeRangeIntersectionDuration(javaValidTime, javaTimeRange) + # The intersection should be at least the percentage of the timeRange + # AND at least the number of coverageHours + fullPeriod = javaTimeRange.getDuration() + try: + if fullPeriod > 0: + percentIn = float(intersect)/fullPeriod + else: + percentIn = 0.0 + if percentIn > 0 and percentIn >= percentage/100.0: + result = 1 + else: # saying no - not enough is inside timeRange" + result = 0 + except: # saying no - could not figure percentIn" + result = 0 + # If temporal coverage percentage requirement met, + # check temporal coverage hours requirement + if result == 1: + intersectHours = intersect/3600 + trHours = fullPeriod/3600 + if intersectHours >= coverageHours: + result = 1 + elif coverageHours >= trHours and intersectHours == trHours: + result = 1 + else: + result = 0 + return result + + def getAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the cummulative sum over the given time period" + minVal, maxVal, sumVal = parmHisto.minMaxSum() + return sumVal + + def getAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the cummulative min/max over the given time period" + minVal, maxVal, sumVal = parmHisto.minMaxSum() + return minVal, maxVal + + def getModAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the moderated cummulative sum over the given time period" + minLimit, maxLimit = self.getModeratedLimits(parmHisto, timeRange, componentName) + minVal, maxVal, sumVal = parmHisto.moderatedMinMaxSum(minLimit, maxLimit) + return sumVal + + def getModAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the modereted cummulative min/max over the given time period" + minLimit, maxLimit = self.getModeratedLimits(parmHisto, timeRange, componentName) + minVal, maxVal, sumVal = parmHisto.moderatedMinMaxSum(minLimit, maxLimit) + return minVal, maxVal + + def getStdDevAccumSum(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the standard deviation sum over the given time period" + minLimit, maxLimit = self.getStdDevLimits(parmHisto, timeRange, componentName) + minVal, maxVal, sumVal = parmHisto.stdDevMinMaxSum(minLimit, maxLimit) + return sumVal + + def getStdDevAccumMinMax(self, dataType, parmHisto, timeRange, componentName, firstOnly=0): + "Return the standard deviation min/max over the given time period" + minLimit, maxLimit = self.getStdDevLimits(parmHisto, timeRange, componentName) + minVal, maxVal, sumVal = parmHisto.stdDevMinMaxSum(minLimit, maxLimit) + return minVal, maxVal + + def getAverage(self, dataType, parmHisto, timeRange, componentName, firstOnly = 0): + "Return the time weighted average values over the given time period" + totValue = 0.0 + totWeight = 0.0 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + + avg = histSample.average(True) + + # njensen: faster to do this without wrapping java objects + validTime = histSample.validTime() + weight = FormatterUtil.getTimeRangeIntersectionDuration(validTime, timeRange.toJavaObj()) + + if dataType == self.SCALAR(): + value = avg.scalar() + elif dataType == self.VECTOR(): + value = avg.magnitude() + + # sum weighted averages + totValue = totValue + weight * value + totWeight = totWeight + weight + + if firstOnly == 1: + break + + if totWeight > 0.0: + result = totValue / totWeight + else: + return None + + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return result, dir + + return result + + def getMinMax(self, dataType, parmHisto, timeRange, componentName, + firstOnly = 0): + "Return the minimum and maximum values over the given time period" + firstTime = 1 + minValue = 0.0 + maxValue = 0.0 + minResult = 0.0 + maxResult = 0.0 + noData = 1 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + min = histSample.absoluteMin() + max = histSample.absoluteMax() + if dataType == self.SCALAR(): + minValue = min.scalar() + maxValue = max.scalar() + elif dataType == self.VECTOR() or dataType == self.MAGNITUDE(): + minValue = min.magnitude() + maxValue = max.magnitude() + if firstTime == 1: + firstTime = 0 + minResult = minValue + maxResult = maxValue + else: + if minValue < minResult: + minResult = minValue + if maxValue > maxResult: + maxResult = maxValue + if firstOnly == 1: + break + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, + timeRange, componentName) + return (minResult, maxResult), dir + + return minResult, maxResult + + def getStdDevAvg(self, dataType, parmHisto, timeRange, componentName, + firstOnly = 0): + "Return the time wieghted average values over the given time period" + # get the stdDev limits from the stdDev dictionary + minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) + totValue = 0.0 + totWeight = 0.0 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + + # In AWIPS1, stdDevAvg utilized a default value of True for + # separateMagDir argument + avg = histSample.stdDevAvg(minStd, maxStd, True) + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + + if dataType == self.SCALAR(): + value = avg.scalar() + elif dataType == self.VECTOR(): + value = avg.magnitude() + + # sum weighted averages + totValue = totValue + weight * value + totWeight = totWeight + weight + + if firstOnly == 1: + break + + if totWeight > 0.0: + result = totValue / totWeight + else: + return None + + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return result, dir + + return result + + def getStdDevMinMax(self, dataType, parmHisto, timeRange, componentName, + firstOnly = 0): + "Return the minimum and maximum values over the given time period" + firstTime = 1 + minValue = 0.0 + maxValue = 0.0 + minResult = 0.0 + maxResult = 0.0 + noData = 1 + # get the stdDev limits from the stdDev dictionary + minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + min = histSample.stdDevMin(minStd) + max = histSample.stdDevMax(maxStd) + if dataType == self.SCALAR(): + minValue = min.scalar() + maxValue = max.scalar() + elif dataType == self.VECTOR(): + minValue = min.magnitude() + maxValue = max.magnitude() + if firstTime == 1: + firstTime = 0 + minResult = minValue + maxResult = maxValue + else: + if minValue < minResult: + minResult = minValue + if maxValue > maxResult: + maxResult = maxValue + if firstOnly == 1: + break + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, + timeRange, componentName) + return (minResult, maxResult), dir + + return minResult, maxResult + + def getModeratedAvg(self, dataType, parmHisto, timeRange, componentName, + firstOnly = 0): + "Return the time weighted average values over the given time period" + # get the stdDev limits from the stdDev dictionary + minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) + totValue = 0.0 + totWeight = 0.0 + noData = 1 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + avg = histSample.moderatedAverage(minMod, maxMod, True) + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + + if dataType == self.SCALAR(): + value = avg.scalar() + elif dataType == self.VECTOR(): + value = avg.magnitude() + + # sum weighted averages + totValue = totValue + weight * value + totWeight = totWeight + weight + + if firstOnly == 1: + break + + if noData == 1: + return None + if totWeight > 0.0: + result = totValue / totWeight + else: + return None + + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return result, dir + + return result + + def getModeratedMinMax(self, dataType, parmHisto, timeRange, componentName, + firstOnly = 0): + "Return the minimum and maximum values over the given time period" + firstTime = 1 + minValue = 0.0 + maxValue = 0.0 + minResult = 0.0 + maxResult = 0.0 + noData = 1 + # get the stdDev limits from the stdDev dictionary + minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + min = histSample.moderatedMin(minMod) + max = histSample.moderatedMax(maxMod) + if dataType == self.SCALAR(): + minValue = min.scalar() + maxValue = max.scalar() + elif dataType == self.VECTOR(): + minValue = min.magnitude() + maxValue = max.magnitude() + if firstTime == 1: + firstTime = 0 + minResult = minValue + maxResult = maxValue + else: + if minValue < minResult: + minResult = minValue + if maxValue > maxResult: + maxResult = maxValue + if firstOnly == 1: + break + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return (minResult, maxResult), dir + + return minResult, maxResult + + def getMaxAvg(self, dataType, parmHisto, timeRange, componentName): + "Return the maximum average value over the given time period" + firstTime = 1 + maxValue = 0.0 + maxResult = 0.0 + noData = 1 + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + maxV = histSample.average() + if dataType == self.SCALAR(): + maxValue = maxV.scalar() + elif dataType == self.VECTOR(): + maxValue = maxV.magnitude() + if firstTime == 1: + firstTime = 0 + maxResult = maxValue + else: + if maxValue > maxResult: + maxResult = maxValue + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return (maxResult, dir) + + return maxResult + + def getStdDevMaxAvg(self, dataType, parmHisto, timeRange, componentName): + "Return the maximum average value filtering by standard deviation" + firstTime = 1 + maxValue = 0.0 + maxResult = 0.0 + noData = 1 + # get the stdDev limits from the stdDev dictionary + minStd, maxStd = self.getStdDevLimits(parmHisto, timeRange, componentName) + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + maxV = histSample.stdDevAvg(minStd, maxStd, True) + if dataType == self.SCALAR(): + maxValue = maxV.scalar() + elif dataType == self.VECTOR(): + maxValue = maxV.magnitude() + if firstTime == 1: + firstTime = 0 + maxResult = maxValue + else: + if maxValue > maxResult: + maxResult = maxValue + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return (maxResult, dir) + + return maxResult + + def getModeratedMaxAvg(self, dataType, parmHisto, timeRange, componentName): + "Return the maximum average value filtering by percentage" + firstTime = 1 + maxValue = 0.0 + maxResult = 0.0 + noData = 1 + # get the moderated limits from the stdDev dictionary + minMod, maxMod = self.getModeratedLimits(parmHisto, timeRange, componentName) + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + if histSample.numOfPoints() == 0: + return None + noData = 0 + + maxV = histSample.moderatedAverage(minMod, maxMod, True) + if dataType == self.SCALAR(): + maxValue = maxV.scalar() + elif dataType == self.VECTOR(): + maxValue = maxV.magnitude() + if firstTime == 1: + firstTime = 0 + maxResult = maxValue + else: + if maxValue > maxResult: + maxResult = maxValue + + if noData == 1: + return None + if dataType == self.VECTOR(): + dir = self.getDominantDirection(dataType, parmHisto, timeRange, + componentName) + return (maxResult, dir) + + return maxResult + + def getVectorAvg(self, histPairs): + # Temporary method to emulate new HistSample.average(0) + # to be supplied in next release + uSum = 0.0 + vSum = 0.0 + totCount = 0 + for histPair in histPairs: + count = histPair.count() + totCount = totCount + count + val = histPair.value() + uw, vw = self.MagDirToUV(val.magnitude(), val.direction()) + uSum = uSum + uw * count + vSum = vSum + vw * count + + # calculate the average wind vector + if totCount > 0: + u = uSum / float(totCount) + v = vSum / float(totCount) + mag, dir = self.UVToMagDir(u, v) + mag = int(mag + 0.5) + dir = int(dir + 0.5) + return HistValue(float(mag), float(dir)) + else: + return HistValue() + + def extractMinMax(self, minMaxList, minOrMax, dataType=None): + # returns the min or max value in the list depending on minOrMax + # minMaxList is a list returned from createStats + # minOrMax can have the values "Min" or "Max" and nothing else + + if dataType == self.VECTOR(): + return self.extractVectorMinMax(minMaxList, minOrMax) + + # sanity checks - must be a list or tuple + if type(minMaxList) != list and \ + type(minMaxList) != tuple: + return None + + # minOrMax must be "Min" or "Max" + if not (minOrMax == "Min" or minOrMax == "Max"): + return None + + if type(minMaxList) is tuple: + if minOrMax == "Min": + return minMaxList[0] # return min value + elif minOrMax == "Max": + return minMaxList[1] # return max value + else: + print("extractMinMax error - Bad min/max string:", minOrMax) + print("Must be: 'Min' or 'Max'. ") + return None + + # check for empty list + if len(minMaxList) <= 0: + return None + + newList = [] + # loop through and find the min and max + for (vMin, vMax), timeRange in minMaxList: + if minOrMax == "Min": + value = vMin + else: + value = vMax + newList.append((value, timeRange)) + return newList + + def extractVectorMinMax(self, minMaxList, minOrMax): + # returns the min or max value in the list depending on minOrMax + # minMaxList is a list returned from createStats + # minOrMax can have the values "Min" or "Max" and nothing else + + # sanity checks - must be a list or tuple + if type(minMaxList) != list and \ + type(minMaxList) != tuple: + return None + + # minOrMax must be "Min" or "Max" + if not (minOrMax == "Min" or minOrMax == "Max"): + return None + + if type(minMaxList) is tuple: + (minMag, maxMag), dir = minMaxList + if minOrMax == "Min": + mag = minMag # return min value + elif minOrMax == "Max": + mag = maxMag # return max value + else: + print("extractMinMax error - Bad min/max string:", minOrMax) + print("Must be: 'Min' or 'Max'. ") + return None + return (mag, dir) + + # check for empty list + if len(minMaxList) <= 0: + return None + + newList = [] + # loop through and find the min and max + for ((vMin, vMax), dir), timeRange in minMaxList: + if minOrMax == "Min": + value = vMin + else: + value = vMax + newList.append(((value, dir), timeRange)) + return newList + + def getDominantDirection(self, dataType, parmHisto, timeRange, componentName): + # returns the dominant direction according to "vectorDirection_algorithm" + # which can be "Average" or "MostFrequent" + + if not dataType == self.VECTOR(): + return None + if self.vectorDirection_algorithm(parmHisto, timeRange, componentName) == "Average": + return self.getAverageDirection(parmHisto, timeRange, componentName) + else: #Most Frequent + return self.getMostFrequentDirection(parmHisto, timeRange, componentName) + + def getAverageDirection(self, parmHisto, timeRange, componentName): + # returns the dominant direction calculated by assuming a mag of 1 always + uSum = 0.0 + vSum = 0.0 + totCount = 0 + weight = 0 + totWeight = 0 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, histSample) == 0: + continue + # sum u and v components assigning a magnitude one 1 always + histPairs = histSample.histogram() + for histPair in histPairs: + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + totWeight = totWeight + weight + count = histPair.count() + totCount = totCount + count + uw, vw = self.MagDirToUV(1.0, histPair.value().direction()) + uSum = uSum + (uw * count) * weight + vSum = vSum + (vw * count) * weight + + # calculate the average wind vector + if totCount > 0: + u = uSum / (float(totCount) * totWeight) + v = vSum / (float(totCount) * totWeight) + mag, dir = self.UVToMagDir(u, v) + return dir + else: + return None + + def getMostFrequentDirection(self, parmHisto, timeRange, componentName): + # returns the most frequent direction binned to 8-point numerical direction + binDict = {} + totWeight = 0.0 + #print "\nGetting most frequent", timeRange + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, histSample) == 0: + continue + numOfPoints = histSample.numOfPoints() + if numOfPoints == 0: + return None + histPairs = histSample.histogram() + for histPair in histPairs: + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + weight = weight/float(timeRange.duration()) * 100.0 + totWeight += weight + count = float(histPair.count()) + binnedDir = self.binDir(histPair.value().direction()) + #print "dir, binnedDir", histPair.value().direction(), binnedDir + percent = count/numOfPoints * weight + if binnedDir in binDict: + binDict[binnedDir] += percent + else: + binDict[binnedDir] = percent + + if totWeight == 0.0: + return None + + # Pick the most frequent direction + maxFreq = 0 + mostFreqDir = None + for dir in list(binDict.keys()): + freq = binDict[dir] + #print "dir, freq", dir, freq + if freq > maxFreq: + maxFreq = freq + mostFreqDir = dir + #print "returning", mostFreqDir + return mostFreqDir + + def binDir(self, dir): + # Return the "bin" direction value for the given direction + for textDir, low, high in self.dirList(): + if dir >= low and dir < high: + # Handle N + if textDir == "N": + return 0 + else: + return int(low+high)/2.0 + + + def splitRange(self, timeRange, numPeriods=2): + "Split the timeRange into the given number of periods and return the resulting list of time ranges" + + periods = [] + duration = (timeRange.endTime()-timeRange.startTime())/numPeriods + startTime = timeRange.startTime() + for i in range(numPeriods): + endTime = startTime + duration + newRange = TimeRange.TimeRange(startTime, endTime) + periods.append(newRange) + startTime = endTime + return periods + + def getGridTimeRanges(self, parmHisto, timeRange): + # Return the set of timeRanges that overlap the specified timeRange + # If a histSample partially overlaps, trim the timeRange to the + # specified timeRange's startTime() or endTime() + subRanges = [] + for histSample in parmHisto.histoSamples(): + tr = TimeRange.TimeRange(histSample.validTime()) # get the histSample timeRange + overlap = timeRange.intersection(tr).duration() # calc overlap + if overlap == 0: # no overlap -> skip to next grid + continue + if overlap == tr.duration(): # the whole grid is included + subRanges.append(tr) + elif timeRange.startTime() > tr.startTime(): + newTR = TimeRange.TimeRange(timeRange.startTime(), tr.endTime()) + subRanges.append(newTR) + elif timeRange.endTime() < tr.endTime(): + newTR = TimeRange.TimeRange(tr.startTime(), timeRange.endTime()) + subRanges.append(newTR) + + return subRanges + + def getMedianHistPair(self, dataType, parmHisto, timeRange, componentName): + # Return the median HistPair over the timeRange + # Note: There could be multiple grids (histSamples) in this timeRange + # over which we are sampling. + # + # we can't figure a median if there are no samples + # + if len(parmHisto.histoSamples()) == 0: + return None + # + # we can only figure the median based on the scalar value, + # or, for vectors, the magnitude or direction. Other types + # are invalid, and we have to return None. + # + if ((dataType!=self.SCALAR()) and (dataType!=self.MAGNITUDE()) + and (dataType!=self.DIRECTION())): + return None + # + # Get the samples inside the time range, keeping track + # of the values along the way (to sort later). Since + # there may be several grids, each with the same values + # that cross the desired timeRange, we need to add to + # the saved histogram counts when we encounter such + # values. Make a key with consistent floating point + # numbers so that the sorting works right later. + # + + totalCount=0 + pairDict = {} + compositeNameUI = parmHisto.getCompositeNameUI() + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag( + parmHisto, timeRange, componentName, histSample) == 0: + continue + # calc the time weight + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + for histPair in histSample.histogram(): + tempCount=histPair.count() + tempValue=histPair.value() + tempKey=0.0 + if dataType == self.SCALAR(): + tempKey = tempValue.scalar() + elif dataType == self.MAGNITUDE(): + tempKey = tempValue.magnitude() + elif dataType == self.DIRECTION(): + tempKey = tempValue.direction() + valuestring="%020.10f" % float(tempKey) + totalCount = totalCount + tempCount * weight + if valuestring in pairDict: + pairDict[valuestring].incrementCount(int(tempCount * weight)) + else: + # njensen: I added the clone(), because otherwise we are incrementing + # the original histpair reference, which corrupts the statistics when + # the same method is called against a different subTimeRange from + # createStats + pairDict[valuestring] = histPair.clone() + pairDict[valuestring].incrementCount(int((tempCount * weight) - 1)) + # + # if no samples landed within the timeRange then we have + # to return a median of None + # + if totalCount == 0: + return None + # + # now we know the total number of pairs in the timeRange + # so we figure out the middle pair number and then + # go through the pairs in numerical order until we get + # to that count value + # + medianNumber=int(totalCount/2.0) + odd = 0 + if medianNumber%2 == 1: + medianNumber == medianNumber + 1 + odd = 1 + count=0 + names=list(pairDict.keys()) + names.sort() + for valueStr in names: + addCount=pairDict[valueStr].count() + if ((count+addCount)>=medianNumber): + if odd == 1: + return pairDict[valueStr] + elif ((count+addCount)) == medianNumber: + # need to take mean of this value and the next + histPair1 = pairDict[valueStr] + index1 = names.index(valueStr) + if index1 < len(names)-1: + valueStr2 = names[index1+1] + histPair2 = pairDict[valueStr2] + return self.getHistPairMean(dataType, histPair1, histPair2) + else: + return histPair1 + else: + return pairDict[valueStr] + count+=addCount + return None + + def getHistPairMean(self, dataType, histPair1, histPair2): + # Return a HistPair that represents the mean of the two histPair values + # for the given dataType which can be "SCALAR", "MAGNITUDE", "DIRECTION" + if dataType == self.SCALAR(): + val1 = histPair1.value().scalar() + val2 = histPair2.value().scalar() + avg = float(val1 + val2)/2.0 + value = HistValue(avg) + return HistPair(value) + + elif dataType == self.MAGNITUDE(): + val1 = histPair1.value().magnitude() + val2 = histPair2.value().magnitude() + avg = float(val1 + val2)/2.0 + value = HistValue(avg, 0.0) + return HistPair(value) + + else: #dataType == self.DIRECTION(): + dir1 = histPair1.value().direction() + dir2 = histPair2.value().direction() + u1, v1 = self.MagDirToUV(1.0, dir1) + u2, v2 = self.MagDirToUV(1.0, dir2) + u = (u1 + u2)/2 + v = (v1 + v2)/2 + mag, dir = self.UVToMagDir(u,v) + value = HistValue(mag, dir) + return HistPair(value) + + def getModeHistPair(self, dataType, parmHisto, timeRange, componentName): + # Return the most common HistPair over the timeRange + # Note: There could be multiple grids (histSamples) in this timeRange + # over which we are sampling. + + if len(parmHisto.histoSamples()) == 0: + return None + maxCount = 0 + modePair = None + compositeNameUI = parmHisto.getCompositeNameUI() + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + + # calc the time weight + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + for histPair in histSample.histogram(): + if (histPair.count() * weight) > maxCount: + maxCount = histPair.count() * weight + modePair = histPair + + return modePair + + def getMedian(self, dataType, parmHisto, timeRange, componentName): + # Return a range around the median + # For vector also return an average direction over that range. + + if len(parmHisto.histoSamples()) == 0: + return None + + if dataType == self.VECTOR(): + # For VECTOR, base the median on the magnitude + pairType = self.MAGNITUDE() + else: + pairType = dataType + + # Determine the median + medianPair = self.getMedianHistPair(pairType, parmHisto, timeRange, componentName) + if medianPair is None: + return None + if dataType == self.VECTOR(): + return (medianPair.value().magnitude(), medianPair.value().direction()) + else: + return medianPair.value().scalar() + + def getMedianRange(self, dataType, parmHisto, timeRange, componentName): + # Return a range around the median + # For vector also return an average direction over that range. + + if len(parmHisto.histoSamples()) == 0: + return None + + if dataType == self.VECTOR(): + # For VECTOR, base the median on the magnitude + pairType = self.MAGNITUDE() + else: + pairType = dataType + + # Determine the median + medianPair = self.getMedianHistPair(pairType, parmHisto, timeRange, componentName) + if medianPair is None: + return None +# print "\nGetting Median Range" + return self.getRange(dataType, medianPair, parmHisto, timeRange, componentName, "Median") + + def getMode(self, dataType, parmHisto, timeRange, componentName): + # Return a range around the median + # For vector also return an average direction over that range. + + if len(parmHisto.histoSamples()) == 0: + return None + + if dataType == self.VECTOR(): + # For VECTOR, base the median on the magnitude + pairType = self.MAGNITUDE() + else: + pairType = dataType + + # Determine the median + modePair = self.getModeHistPair(pairType, parmHisto, timeRange, componentName) + if modePair is None: + return None +# print "\nGetting Median Range" + if dataType == self.VECTOR(): + return modePair.value().magnitude(), modePair.value().direction() + else: + return modePair.value().scalar() + + def getModeRange(self, dataType, parmHisto, timeRange, componentName): + # Return a range around the mode. + # For vector also return an average direction over that range. + + if len(parmHisto.histoSamples()) == 0: + return None + + if dataType == self.VECTOR(): + pairType = self.MAGNITUDE() + else: + pairType = dataType + + # Determine the median + modePair = self.getModeHistPair(pairType, parmHisto, timeRange, componentName) + if modePair is None: + return None + return self.getRange(dataType, modePair, parmHisto, timeRange, componentName, "Mode") + + def getMaxMode(self, dataType, parmHisto, timeRange, componentName): + # Return the maximum mode over all grids + + if len(parmHisto.histoSamples()) == 0: + return None + + compositeNameUI = parmHisto.getCompositeNameUI() + incDict = self.maxMode_increment_dict(parmHisto, timeRange, componentName) + if compositeNameUI in incDict: + binRes = incDict[compositeNameUI] + else: + binRes = 10 # default value + + maxMode = -99999 + for histSample in parmHisto.histoSamples(): + # Ignore samples that are less than the temporal threshold + if self.temporalCoverage_flag( + parmHisto, timeRange, componentName, histSample) == 0: + continue + mode = histSample.mostCommonValueBinned(float(binRes)).scalar() + if mode > maxMode: + maxMode = mode + + return maxMode + + def getRange(self, dataType, basePair, parmHisto, timeRange, + componentName, rangeBase="Median"): + # Return a range around the basePair. + # For vector also return an average direction over that range. + + compositeNameUI = parmHisto.getCompositeNameUI() + # Determine deviation + deviation = self.getDeviation(dataType, compositeNameUI, basePair) + # Take pairs that are within deviation of basePair + # and determine min, max and number of points covered by range + if dataType == self.SCALAR(): + scalarflag = 1 + baseValue = basePair.value().scalar() + else: + scalarflag = 0 + baseValue = basePair.value().magnitude() + + min = baseValue + max = baseValue + pairs = [] + samplesInRange = 0 + totalCount = 0 + + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + samplesInRange = samplesInRange + 1 + for histPair in histSample.histogram(): + if scalarflag: + histValue = histPair.value().scalar() + else: + histValue = histPair.value().magnitude() + #print "histvalue, baseValue, deviation", \ + # histValue, baseValue, deviation + if histValue >= baseValue - deviation and \ + histValue <= baseValue + deviation: + pairs.append(histPair) + if histValue < min: + min = histValue + if histValue > max: + max = histValue + totalCount = totalCount + histPair.count() + + # If vector, find average direction for pairs in range + if dataType == self.VECTOR(): + avgValue = self.getVectorAvg(pairs) + direction = avgValue.direction() + return ((min, max), direction) + else: + return (min, max) + + def getDeviation(self, dataType, compositeNameUI, histPair): + # Returns a deviation around the median to include in range + if dataType == self.VECTOR(): + mag = histPair.value().magnitude() + if mag < 15: + return 3 + elif mag < 30: + return 5 + else: + return 8 + else: + return 10 + + def UVToMagDir(self, u, v): + # Converts u, v to magnitude, direction + RAD_TO_DEG = 57.296083 + speed = sqrt(u * u + v * v) + dir = atan2(u, v) * RAD_TO_DEG + while dir < 0.0: + dir = dir + 360.0 + while dir >= 360.0: + dir = dir - 360.0 + #print "Speed, dir ", speed, dir + return (speed, dir) + + def MagDirToUV(self, mag, dir): + #Converts magnitude, direction to u, v + DEG_TO_RAD = 0.017453292 + uw = sin(dir * DEG_TO_RAD) * mag + vw = cos(dir * DEG_TO_RAD) * mag + return (uw, vw) + + def convertAnalysisList(self, analysisList): + # Replace text string methods with SampleAnalysis methods + newList = [] + for entry in analysisList: + if len(entry) == 2: + element, method = entry + if type(method) == bytes: + exec("method = self."+method) + newList.append((element,method)) + if len(entry) == 3: + element, method, args = entry + if type(method) == bytes: + exec("method = self."+method) + newList.append((element,method, args)) + return newList + + def bin_dict(self, parmHisto, timeRange, componentName): + # Bins for binnedPercent. Bins are inclusive. + return { + "Sky": [(0,89),(90, 100)], + "PoP": [(0,4), (5,14), (15,24), (25,34), (35,44), (45,54), + (55,64), (65,74), (75,84), (85,94), (95,100)], + "LAL": [(1,1), (2,2), (3,3), (4,4), (5,5), (6,6)], + } + + def getBinnedPercent(self, dataType, parmHisto, timeRange, componentName, firstOnly = 0): + "Returns a list of tuples representing bins and corresponding percentages of values in each bin" + binsDict = self.bin_dict(parmHisto, timeRange, componentName) + try: + bins = binsDict[parmHisto.getCompositeNameUI()] + except: + return None + + # Returns a list of tuples + # Each tuple is of the form: + # (lowBin_value, highBin_value, percent) + # lowBin_value and highBin_value are the inclusive values of the bin + # percent is the percentage of data values in that bin + + percents = [] + for bin in bins: + percents.append(0.0) + numBins = len(bins) + + totWeight = 0.0 + for histSample in parmHisto.histoSamples(): + if self.temporalCoverage_flag(parmHisto, timeRange, componentName, + histSample) == 0: + continue + # return None if no histSample pairs + numOfPoints = histSample.numOfPoints() + if numOfPoints == 0: + return None + + validTime = TimeRange.TimeRange(histSample.validTime()) + weight = validTime.intersection(timeRange).duration() + weight = weight/float(timeRange.duration()) * 100.0 + totWeight = totWeight + weight + + for histPair in histSample.histogram(): + if dataType == self.SCALAR(): + value = histPair.value().scalar() + elif dataType == self.VECTOR(): + value = histPair.value().magnitude() + count = float(histPair.count()) + percent = count/numOfPoints * weight + + # Find the bin for this histPair value + for i in range(numBins): + low,high = bins[i] + if value >= low and value <= high: + # add to percentage for this bin + percents[i] += percent + + if totWeight == 0.0: + return None + + # Glue the bins and the percents together + newBins = [] + for i in range(numBins): + low, high = bins[i] + newBins.append((low, high, percents[i])) + #print "returning bins", newBins, timeRange + return newBins diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ScalarPhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ScalarPhrases.py index 7feee4abe0..f984860e7e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ScalarPhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/ScalarPhrases.py @@ -1,2747 +1,2747 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ScalarPhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import PhraseBuilder -import types - -class ScalarPhrases(PhraseBuilder.PhraseBuilder): - def __init__(self): - PhraseBuilder.PhraseBuilder.__init__(self) - - ############################################ - ### PUBLIC SCALAR WEATHER ELEMENT PHRASES - ### To override, override the associated method in your text product class. - - ### T - def temp_trend_nlValue(self, tree, node): - # THRESHOLD FOR REPORTING TEMPERATURE TRENDS - return 20.0 - ### Td - ### MaxT and MinT - ### Sky - - def pop_sky_lower_threshold(self, tree, node): - """Do not include an explicit Sky forecast when PoPs are - >= 60% for the majority of the forecast period. - """ - # Get all the PoP stats for this component - component = node.getComponent() - compRange = component.getTimeRange() - popStats = tree.stats.get('PoP', compRange, node.getAreaLabel(), - mergeMethod="List") - - # If the PoP stats are missing - if popStats is None or popStats == []: - return 100.0 # keep sky cover as a precaution - - # Initialize a counter to keep track of the number of subperiods - # where the PoP >= 55% (rounds to 60%) - count = 0 - # Look at each PoP value - for (value, timeRange) in popStats: - # See if PoP is 'likely' or 'categorical' - if value >= 55.0: - count += 1 # count this subphrase period - - # Determine the percentage of the time PoP is 'likely' or 'categorical' - percent = 100.0 * float(count)/float(len(popStats)) - # If the majority of the period has 'likely' or 'categorical' PoPs - if percent > 50.0: - val = 59.0 # omit sky cover from the forecast - else: - val = 100.0 # sky cover required - return val - - def clearing_threshold(self, tree, node): - # Threshold for phrases such as: - # mostly cloudy in the morning then clearing - # Used by sky_phrase - return 31 - - def sky_valueList(self, tree, node): - # Phrases for sky given values. Tuples consist of: - # (threshold, dayTime phrase, nightTime phrase) - # Used by skyRange_phrase - # NOTE: If you change these words, you MUST also - # adjust the similarSkyWords_list and preferredSkyWords - # used for sub-phrase combining and reporting sky trends. - return [ - (5, "sunny", "clear"), - (25, "sunny", "mostly clear"), - (50, "mostly sunny", "partly cloudy"), - (69, "partly sunny", "mostly cloudy"), - (87, "mostly cloudy", "mostly cloudy"), - (100, "cloudy", "cloudy"), - ] - - def similarSkyWords_list(self, tree, node): - # The following pairs of sky words will be considered - # "equal" when comparing for phrase combining - # and redundancy - # - # For trends, (e.g. Sunny in the morning then partly cloudy in the afternoon.) - # the following transitions are not allowed: - # Day time: - # Sunny <--> mostly sunny - # Mostly sunny <--> partly sunny - # Partly cloudy <--> mostly cloudy - # Night time: - # Clear <--> mostly clear - # Mostly clear <--> partly cloudy - # Mostly cloudy <--> cloudy - # - # In other words these transitions are allowed: - # Day time: - # sunny <--> partly sunny or above - # mostly sunny <--> mostly cloudy or above - # partly sunny <--> sunny or cloudy - # mostly cloudy <--> mostly sunny - # Night time: - # clear can go to partly cloudy or above - # mostly clear <--> mostly cloudy or above - # partly cloudy <--> mostly cloudy or above - # mostly cloudy <--> partly cloudy or below - - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - return [ - ("sunny", "mostly sunny"), - ("mostly sunny", "partly sunny"), - ("partly sunny", "mostly cloudy"), - ("mostly cloudy", "cloudy"), - ] - else: - return [ - ("clear", "mostly clear"), - ("mostly clear", "partly cloudy"), - ("mostly cloudy", "cloudy"), - ] - - def similarSkyWords_flag(self, tree, node, words1, words2): - # Returns 1 if the pair of words is equal or similar - # according to the "similarSkyWords_list" - if words1 == words2: - return 1 - # Check for similarity - for value1, value2 in self.similarSkyWords_list(tree, node): - if (words1 == value1 and words2 == value2) or \ - (words2 == value1 and words1 == value2): - return 1 - return 0 - - def preferredSkyWords(self, tree, node, words1, words2): - # Returns the preferred words given the pair - # of words1, words2 - preferredList = ["mostly sunny", "mostly clear", "cloudy"] - if words1 in preferredList: - return words1 - if words2 in preferredList: - return words2 - return words1 - - def reportIncreasingDecreasingSky_flag(self, tree, node): - # If 1, will use "increasing clouds", "decreasing clouds" - # wording instead of "mostly cloudy becoming sunny" - - # You have 3 options: - # return 0 -- do not use increasing/decreasing wording - # return 1 -- use increasing/decreasing wording if applicable - # Use the code shown below to use increasing/decreasing wording - # but avoid repetitive usage. - return 0 - #return 1 - - # Use the following code to avoid redundancy e.g. - # SUNDAY...Increasing clouds. - # SUNDAY NIGHT...Increasing clouds. - # - #If the previous period had increasing or decreasing wording, return 0 - # Otherwise, return 1 - - # Check to see if previous period had increasing or decreasing wording - component = node.getComponent() - prevComp = component.getPrev() - if prevComp is not None: - # Look at the sky_phrase - skyWords = self.findWords( - tree, prevComp, "Sky", node.getAreaLabel(), - phraseList=[node.getAncestor('name')], phraseLevel=1) - if skyWords is not None: - if skyWords.find("increasing") >= 0 or \ - skyWords.find("decreasing") >= 0: - return 0 - return 1 - return 1 - - def reportClearSkyForExtendedPeriod_flag(self, tree, node): - # If 1, will report clear/mostly clear wording for periods that - # exceed 12 hours. Otherwise, will report sunny/mostly sunny. - return 1 - - def sky_value(self, tree, node, value, dayNight, returnIndex=0): - # Check for areal coverage term - # Otherwise, access the sky_valueList and return words corresponding to value - if value is None: - return "" - words = self.areal_sky_value(tree, node, value, dayNight) - if words is not None: - # Set to use then connector only - node.set("connector", " then ") - # Return areal wording - if returnIndex: - return words, 0 - else: - return words - sky_valueList = self.sky_valueList(tree, node) - for i in range(len(sky_valueList)): - threshold, dayWords, nightWords = sky_valueList[i] - if value <= threshold: - flag = self.reportClearSkyForExtendedPeriod_flag(tree, node) - if flag == 1: - if dayNight == self.DAYTIME(): - words = dayWords - else: - words = nightWords - else: - if dayNight == self.NIGHTTIME(): - words = nightWords - else: - words = dayWords - if returnIndex: - return words, i - else: - return words - - def areal_sky_flag(self, tree, node): - # Set to 1 if you want to use areal (e.g. patchy clouds, areas of clouds) - # vs. traditional sky wording when appropriate. - # BE SURE AND SET THE "arealSkyAnalysis" flag to 1 in the Definition section! - # You may want to base this decision on the current edit area and/or - # component e.g. "Period_1" - return 0 - - def areal_sky_value(self, tree, node, value, dayNight): - if not self.areal_sky_flag(tree, node): - return None - skyBins = tree.stats.get("Sky", node.getTimeRange(), - node.getAreaLabel(), - statLabel="binnedPercent", - mergeMethod="MergeBins") - #print "skyBins", skyBins, node.getTimeRange() - if skyBins is None: - return None - - # Determine percent in highest bin - length = len(skyBins) - highBin = skyBins[length-1] - low, high, highBinPercent = highBin - - # Base wording on high bin percent - words = None - #print "highBinPercent", highBinPercent - for skyPercent, skyWords in self.areal_skyPercentages(tree, node): - #print "skyPercent", skyPercent - if highBinPercent > skyPercent: - words = skyWords - break - #print "words", words - if words is None: - return None # Revert to traditional coverage - - # Check for sky-related Wx - wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Average") - if wxStats is None: - return None - # Keep track of skyRelatedWx that we have added to the wording already - # so we don't end up with "Areas of low clouds and fog and fog." - foundWx = [] - for wx in self.areal_skyRelatedWx(tree, node): - # Look for "dense" fog - dense = "" - if wx == "F": - for subkey, rank in wxStats: - if subkey.wxType() == "F" and subkey.intensity() == "+": - dense = "dense " - for subkey, rank in wxStats: - if subkey.wxType() == wx and wx not in foundWx: - foundWx.append(wx) - # Add wording - words = words + " and " + dense + subkey.wxDef().typeDesc(wx).lower() - return words - - def areal_skyPercentages(self, tree, node): - # Used IF the areal_sky_flag is 1. - # Each tuple is a (skyValue, words) pair such that if the - # sky percentage with the highest areal coverage exceeds - # the given skyValue, the associated words are used. - return [ - (80, "low clouds"), - (40, "areas of clouds"), - (9, "patchy clouds"), - ] - - def areal_skyRelatedWx(self, tree, node): - # Used IF the areal_sky_flag is 1. - # Weather types that are related to sky cover and will be included in the - # sky phrase if their areal coverage matches the sky areal coverage. - # For example: areas of low clouds and fog in the morning, then mostly sunny. - return ["F", "L"] - - def disableSkyRelatedWx(self, tree, node): - # Disable the areal_skyRelatedWx subkeys for the given node - wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Average") - if wxStats is None: - return - disabled = node.getAncestor("disabledSubkeys") - if disabled is None: - disabled = [] - #print "wxStats", wxStats - for wx in self.areal_skyRelatedWx(tree, node): - for subkey, rank in wxStats: - if subkey.wxType() == wx: - disabled.append(subkey) - node.set("disabledSubkeys", disabled) - - def sky_phrase(self): - return { - "setUpMethod": self.sky_setUp, - "wordMethod": self.sky_words, - "phraseMethods": [ - self.checkLocalEffects, - self.combineSky, - self.skySpecialCases, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.sky_timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - } - def sky_setUp(self, tree, node): - sky = self.ElementInfo("Sky", "List") - elementInfoList = [sky] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def combineSky(self, tree, node): - return self.combineChildren(tree, node, self.combine_sky) - def combine_sky(self, tree, node, subPhrase1, subPhrase2): - skyValue1 = self.getScalarData(tree, subPhrase1, "Sky", "MinMax") - skyValue2 = self.getScalarData(tree, subPhrase2, "Sky", "MinMax") - if skyValue1 is None and skyValue2 is None: - return 1, None - if skyValue1 is None or skyValue2 is None: - return 0, None - timeRange = node.getTimeRange() - if timeRange.duration() > 12*3600: - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - words1 = self.sky_value(tree, subPhrase1, self.getValue(skyValue1), dayNight) - words2 = self.sky_value(tree, subPhrase2, self.getValue(skyValue2), dayNight) - if self.similarSkyWords_flag(tree, subPhrase1, words1, words2): - min1, max1 = skyValue1 - min2, max2 = skyValue2 - newVal = (min(min1, min2), max(max1, max2)) - elementInfoList = node.get("elementInfoList") - newSubPhrase = self.combine2SubPhrases( - tree, node, subPhrase1, subPhrase2, elementInfoList, newVal) - return 1, newSubPhrase - else: - return 0, None - - def skySpecialCases(self, tree, node): - # If phrase has exactly 2 subphrases, - # Look for clearing. - # If not, then if reportIncreasingDecreasing, - # report increasing/decreasing wording. - subPhrases = node.get("childList") - if len(subPhrases) == 2: - words = None - skyValue1 = self.getScalarData(tree, subPhrases[0], "Sky", "Average") - skyValue2 = self.getScalarData(tree, subPhrases[1], "Sky", "Average") - # Look for clearing - clearing_threshold = self.clearing_threshold(tree, node) - if skyValue1 > skyValue2 and skyValue2 <= clearing_threshold and skyValue1 > clearing_threshold: - period1Phrase = self.timePeriod_descriptor(tree, node, subPhrases[0].getTimeRange()) - period1Phrase = self.addSpace(period1Phrase, "leading") - timeRange = node.getTimeRange() - if timeRange.duration() > 12*3600: - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - words1 = self.sky_value(tree, subPhrases[0], skyValue1, dayNight) - words = words1 + period1Phrase + " then clearing" - else: - reportIncreasingDecreasing = self.reportIncreasingDecreasingSky_flag(tree, node) - if reportIncreasingDecreasing: - if skyValue2 > skyValue1: - words = "increasing clouds" - else: - words = "decreasing clouds" - if words is not None: - # End processing of the phrase; we are done - node.set("doneList", node.get("methodList")) - return self.setWords(node, words) - return self.DONE() - - def sky_timeDescriptorModeration(self, tree, node): - # If only two subphrases, turn off second time descriptor - # - childList = node.get("childList") - length = len(childList) - # Check for words - if length > 0: - words = childList[0].get("words") - if words is None: - return - else: - return self.DONE() - if length == 2: - words0 = childList[0].get("words") - words1 = childList[1].get("words") - if words0 != "" and words1 != "": - # Neither is null - flag0 = 1 - flag1 = 0 - else: # One is null - flag0 = 1 - flag1 = 1 - if words0 == "": # First sub-phrase is null - childList[1].set("words", "becoming " + words1) - childList[0].set("timeDescFlag", flag0) - childList[1].set("timeDescFlag", flag1) - return self.DONE() - - def sky_words(self, tree, node): - # Create sky phrase. - statDict = node.getStatDict() - sky = self.getStats(statDict, "Sky") - if sky is None: - return self.setWords(node, "") - - # Check Pop i.e. don't report sky if we can assume overcast - threshold = self.pop_sky_lower_threshold(tree, node) - if self.lowPop_flag(tree, node, threshold) == 0: - return self.setWords(node, "") - - sky = self.getValue(sky) - timeRange = node.getTimeRange() - if timeRange.duration() > 12*3600: - words = self.getSkyDiurnalWords(tree, node) - if words is not None: - return self.setWords(node, words) - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - words = self.sky_value(tree, node, sky, dayNight) - return self.setWords(node, words) - - def getSkyDiurnalWords(self, tree, node): - # Produce words such as - # xx in the night and morning otherwise yy - # where xx is the sky value for the night and morning - # and yy is the sky value otherwise - # - # If the night and morning words are the same as the - # evening and afternoon, (no diurnal pattern), - # return None - - # If we have not tested for diurnal sky and wx, return - if "DiurnalSkyWx" not in self.periodCombining_elementList(tree, node): - return None - - wordList = [] - index = 0 - trList = self.divideRange(node.getTimeRange(), 6) - dayNight = self.getPeriod(trList[0], 1) - # Need to save timeRange so we can re-set it for determining - # words for sub-ranges - saveTR = node.getTimeRange() - # Only need to use first 12 hours to check for similarity - for tr in trList[0:2]: - sky = tree.stats.get("Sky", tr, node.getAreaLabel(), - mergeMethod="Average") - sky = self.getValue(sky) - node.timeRange = tr - result = self.sky_value(tree, node, sky, dayNight) - wordList.append(result) - #print "\nsky, tr", sky, tr - #print "words", result - index += 1 - # Re-set timeRange - node.timeRange = saveTR - #print "\nwordList", wordList - if wordList[0] == wordList[1]: - return None - if dayNight == self.DAYTIME(): - # First period is the morning - words1 = wordList[0] - words2 = wordList[1] - descriptor = " in the morning and night" - else: - # First period is the evening - words1 = wordList[1] - words2 = wordList[0] - descriptor = " in the night and morning" - words2 = words2.replace("sunny", "clear") - words = words1 + descriptor + ", otherwise " + words2 - #print "returning", words - return words - - def simple_sky_phrase(self): - return { - "phraseMethods": [ - self.simple_sky_words, # phrase.words - ], - } - def simple_sky_words(self, tree, phrase): - # Create sky phrase. - - # If no information, do not report sky condition - timeRange = phrase.getTimeRange() - #print "Getting sky" - skyStats = tree.stats.get("Sky", timeRange, phrase.getAreaLabel(), mergeMethod="List") - #print "Sky ", skyStats - statsByRange = self.makeRangeStats(tree, self.SCALAR(), skyStats, timeRange) - #print "Sky ", statsByRange - if statsByRange is None: - return self.setWords(phrase, "") - - # Check Pop i.e. don't report sky if we can assume overcast - threshold = self.pop_sky_lower_threshold(tree, phrase) - if self.lowPop_flag(tree, phrase, threshold) == 0: - return self.setWords(phrase, "") - - reportIncreasingDecreasing = self.reportIncreasingDecreasingSky_flag(tree, phrase) - - # Get values for each part of time range - if len(statsByRange) == 1: - skyTime1, period1 = statsByRange[0] - skyTime2, period2 = statsByRange[0] - else: - skyTime1, period1 = statsByRange[0] - skyTime2, period2 = statsByRange[1] - - skyTime1 = self.getValue(skyTime1) - skyTime2 = self.getValue(skyTime2) - - dayNight1 = self.getPeriod(period1, 1) - dayNight2 = self.getPeriod(period2, 1) - - # Determine category and phrase for skyTime1 and skyTime2 - index = 1 - for skyValue, dayNight in [(skyTime1, dayNight1), (skyTime2, dayNight2)]: - skyPhrase, valueIndex = self.sky_value(tree, phrase, skyValue, dayNight, 1) - exec "words"+`index`+"=skyPhrase" - exec "index"+`index`+"=valueIndex" - index = index+1 - - period1Phrase = self.timePeriod_descriptor(tree, phrase, period1) - period1Phrase = self.addSpace(period1Phrase, "leading") - - # Look for clearing - clearing_threshold = self.clearing_threshold(tree, phrase) - if skyTime1 > skyTime2 and skyTime2 <= clearing_threshold and skyTime1 > clearing_threshold: - return self.setWords(phrase, words1 + period1Phrase + " then clearing") - - # See if skyTime1 is different from skyTime2 by more than - # one category of sky values - if abs(index1 - index2) > self.sky_index_difference(tree, phrase): - if reportIncreasingDecreasing == 1: - if skyTime2 > skyTime1: - return self.setWords(phrase, "increasing clouds") - else: - return self.setWords(phrase, "decreasing clouds") - else: - return self.setWords(phrase, words1 + period1Phrase + " then becoming " + words2) - # Report Average value - else: - skyValue = self.average(skyTime1, skyTime2) - if timeRange.duration() > 12*3600: - dayNight = -1 - else: - dayNight = self.getPeriod(timeRange, 1) - words = self.sky_value(tree, phrase, skyValue, dayNight) - return self.setWords(phrase, words) - - # PoP - def wxQualifiedPoP_flag(self, tree, node): - # If 1, PoP phrases will be qualified with the weather type - # E.g. "Chance of rain and snow 20 percent." instead of - # "Chance of precipitation 20 percent." - return 1 - - def popMax_phrase(self): - return { - "setUpMethod": self.popMax_setUp, - "wordMethod": self.popMax_words, - "phraseMethods": self.standard_phraseMethods() - } - def popMax_setUp(self, tree, node): - # NOTE: The method is set to "Average" instead of "List" so - # that the PoP phrase will always cover the full period. - # It doesn't matter what method (other than List) we choose - # since the popMax_words method gets its PoP value directly from - # the "matchToWx" method. - elementInfoList = [self.ElementInfo("PoP", "Average")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - - def popMax_words(self, tree, node) : - "Create phrase Probability of Precipitation for maximum value" - # Wait for weather phrase to complete - wxWords = "" - attrDict = {} - if self.wxQualifiedPoP_flag(tree, node) == 1: - compArea = node.getComponent().getAreaLabel() - wxWords, attrDict = self.findWords(tree, node, "Wx", [node.getAreaLabel(), compArea], - phraseList=["weather_phrase", "skyPopWx_phrase"], - attributes=['reportedRankList']) - if wxWords is None: - return - #print "wxWords", wxWords - if wxWords == "": - #print "setting popMax to Null" - return self.setWords(node, "null") - - #print "PopMax", node.getAreaLabel(), wxWords - pop = self.matchToWx(tree, node, "PoP") - #print " Pop", pop - if pop is None: - return self.setWords(node, "") - - # Check pop thresholds - pop = self.getValue(pop, "Max") - if pop < self.pop_lower_threshold(tree, node) or \ - pop > self.pop_upper_threshold(tree, node): - return self.setWords(node, "") - - popType = self.getPopType(tree, node, pop, wxWords, attrDict) - node.set("popType", popType) - result = self.checkRepeatingString(tree, node, popType, "popType",0) - if result == -1: - # Wait for previous phrase to finish - return - popType = self.addSpace(result) - - unit = self.units_descriptor(tree, node, "unit", "percent") - popStr = self.getPopStr(tree, node, pop) - words = popType + popStr + " " + unit - - # Need to try and set phrase descriptor at this point since - # weather phrase was not complete during phrase set-up - phrase = node.parent - if phrase.get("descriptor") is None: - descriptor = self.phrase_descriptor(tree, phrase, "PoP", "PoP") - phrase.set("descriptor", descriptor) - - return self.setWords(node, words) - - def getPopStr(self, tree, node, pop): - pop = int(pop) - if pop >= 100: - popWords = "near 100" - else: - popWords = `pop` - return popWords - - def getPopType(self, tree, node, pop, wxWords, attrDict): - popType = "precipitation" - if self.wxQualifiedPoP_flag(tree, node) == 1: - # Examine reported weather type(s) from phrase. - # If there is more than one descriptor for precipitating weather - # or if they are general weather types, - # return "precipitation" - # Otherwise, describe the weather type - # e.g. chance of rain, chance of snow - wxTypes = [] - if attrDict.has_key("reportedRankList"): - rankList = attrDict["reportedRankList"] - for subkey, rank in rankList: - wxTypes.append(subkey.wxType()) - generalTypes = ["IP", "ZL", "ZR", "ZF", "ZY"] - for general in generalTypes: - if general in wxTypes: - return "precipitation" - descriptors = { - "R": "rain", - "RW": "showers", - "S": "snow", - "SW": "snow", - "T": "thunderstorms", - } - popTypes = [] - for wxType in wxTypes: - if wxType in ["R", "S", "RW", "SW", "T"]: - desc = descriptors[wxType] - if desc not in popTypes: - popTypes.append(desc) - if len(popTypes) > 1: - popType = "precipitation" - elif len(popTypes) == 1: - popType = popTypes[0] - return popType - - # This version will report only the weather types that - # match the reported PoP -## def getPopType(self, tree, node, pop, wxWords, attrDict): -## popType = "precipitation" -## if self.wxQualifiedPoP_flag(tree, node) == 1: -## ## Need to find weather type(s) from phrase. -## ## "wxWords" is the concatenation of all weather phrases -## ## for this component. -## ## Returns "popType" e.g. chance of rain, chance of rain and snow -## wxTypes = [] -## if attrDict.has_key("reportedRankList"): -## rankList = attrDict["reportedRankList"] -## for subkey, rank in rankList: -## # Check the coverage against the reported PoP -## covLow, covHigh = self.coveragePoP_value(subkey.coverage()) -## if covHigh >= pop: -## wxTypes.append(subkey.wxType()) -## popType = None -## generalTypes = ["IP", "ZL", "ZR", "ZF", "ZY"] -## for general in generalTypes: -## if general in wxTypes: -## popType = "precipitation" -## if popType is None: -## rain = 0 -## snow = 0 -## thunder = 0 -## showers = 0 -## snowShowers = 0 -## rainShowers = 0 -## if "R" in wxTypes: -## rain = 1 -## if "S" in wxTypes: -## snow = 1 -## if "RW" in wxTypes: -## showers = 1 -## if "SW" in wxTypes: -## snowShowers = 1 -## if "T" in wxTypes: -## thunder = 1 -## if showers and not snowShowers: -## rainShowers = 1 -## if (rain or rainShowers or thunder) and snow: -## popType = "precipitation" -## else: -## if snow or snowShowers: -## if rain or rainShowers: -## if wxWords.find(" or ") > -1: -## popType = "rain or snow" -## else: -## popType = "rain and snow" -## else: -## popType = "snow" -## elif rain and not rainShowers: -## popType = "rain" -## elif showers: -## popType = "showers" -## if thunder: -## popType = "showers and thunderstorms" -## elif thunder: -## popType = "thunderstorms" -## else: -## popType = "precipitation" -## if popType is None: -## popType = "precipitation" -## return popType - - def areal_or_chance_pop_descriptor(self, tree, node, key, elementName): - # Stats: dominantWx - # Returns descriptor for a pop phrase based on Wx - # Returns areal coverage of precipitation OR - # chance of precipitation - # Get weather. Determine if ANY terms in the period are convective. If so, - # change the phrase to "areal coverage". This is an Amarillo WFO - # preference. - wxPhrase = self.findWords(tree, node, "Wx", node.getAreaLabel(), - phraseList=["weather_phrase", "skyPopWx_phrase"]) - if wxPhrase is None: - return None - if wxPhrase == "": - return "chance of" - use_areal = 0 - - if wxPhrase.find("isolated") >= 0: - use_areal = 1 - if wxPhrase.find("scattered") >= 0: - use_areal = 1 - if wxPhrase.find("numerous") >= 0: - use_areal = 1 - if wxPhrase.find("widespread") >= 0: - use_areal = 1 - - if use_areal == 1: - return "areal coverage of" - else: - return "chance of" - - def allAreal_or_chance_pop_descriptor(self, tree, node, key, elementName): - # Stats: rankedWx - # Returns descriptor for a pop phrase based on Wx - # Returns areal coverage of precipitation OR - # chance of precipitation - # Get weather. Determine if ALL terms in the period are convective. If so, - # change the phrase to "areal coverage". This is an Amarillo WFO - # preference. - statsByRange = tree.stats.get( - "Wx", node.getTimeRange(), node.getAreaLabel(), mergeMethod="List") - if statsByRange is None: - return "chance of" - use_areal = 1 - - for rankList, subRange in statsByRange: - subkeys = self.getSubkeys(rankList) - for subkey in subkeys: - if self.precip_related_flag(tree, node, subkey): - cov = subkey.coverage() - if cov not in ["Iso", "Sct", "Num", "Wide", ""]: - use_areal = 0 - break - - if use_areal == 1: - return "areal coverage of" - else: - return "chance of" - - # Temperature worded phrases: - # HIGHS IN THE MIDDLE 80S - # HIGHS IN THE MIDDLE 80S TO LOWER 90S - # using temp_phrase_threshold - - def highs_phrase(self): - return { - "setUpMethod": self.highs_setUp, - "wordMethod": self.temp_words, - "phraseMethods": self.standard_phraseMethods() - } - def highs_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MaxT", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def lows_phrase(self): - return { - "setUpMethod": self.lows_setUp, - "wordMethod": self.temp_words, - "phraseMethods": self.standard_phraseMethods(), - } - def lows_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MinT", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def temp_words(self, tree, node): - stats = self.getTempStats(tree, node) - if stats is None: - return self.setWords(node, "") - elementName = node.getAncestor("elementName") - words = self.getTempPhrase(tree, node, stats, elementName) - return self.setWords(node, words) - - def tempDiff_threshold(self, tree, node): - # If the difference between the minimum and maximum temperature values - # exceeds this range, report the actual values e.g. 23 to 29. - return 4 - - def getTempPhrase(self, tree, node, temp, elementName): - minVal, maxVal = self.getValue(temp, "MinMax") - minVal = int(minVal) - maxVal = int(maxVal) - - # Chris Gibson's version -## # Handle teens -## ave = int((maxVal + minVal)/2) -## if ave < 20 and ave > 9: -## if ave > 15: -## return "15-20" -## elif ave == 15 or ave == 14: -## return "near 15" -## elif ave == 10 or ave == 11: -## return "near 10" -## else: -## return "10-15" -## if ave < 10 and ave > 0: -## if ave > 4: -## return "5 to 10 above" -## else: -## return "zero to 5 above" - -## if minVal <=0 or maxVal <=0: -## maxVal = int(self.round(maxVal, "Nearest", 5)) -## minVal = int(self.round(minVal, "Nearest", 5)) - - # End Chris Gibson's version - - # Check for exceptions - exceptions = self.tempPhrase_exceptions(tree, node) - for minBoundaries, maxBoundaries, equalityPhrase, phrase in exceptions: - if minVal >= minBoundaries[0] and minVal <= minBoundaries[1] and \ - maxVal >= maxBoundaries[0] and maxVal <= maxBoundaries[1]: - if minVal == maxVal: - resultPhrase = equalityPhrase - else: - resultPhrase = phrase - return self.constructTempException(resultPhrase, minVal, maxVal) - - # Handle actual range values - if abs(maxVal-minVal) > self.tempDiff_threshold(tree, node): - return `minVal` + " to " + `maxVal` - - # set up for "lower," "mid," or "upper" wording - # Modulus (%) gets tricky below zero so have to take - # modulus of abs(temperature) - decadeMaxStr = self.getDecadeStr(maxVal) - decadeMinStr = self.getDecadeStr(minVal) - digitMax = abs(maxVal) % 10 - digitMin = abs(minVal) % 10 - boundaries = self.tempPhrase_boundary_dict(tree, node) - digitMinStr = self.getDigitStr(digitMin, boundaries) - digitMaxStr = self.getDigitStr(digitMax, boundaries) - lowerMax = boundaries["lower"][1] - upperMin = boundaries["upper"][0] - if decadeMinStr == decadeMaxStr: - # this solves the problem of returning "...IN THE LOWER 60s TO LOWER 60s..." - if digitMinStr == digitMaxStr: - return "in the " + digitMinStr + " " + decadeMinStr - - # shortens a return of "...lower to upper..." to "...in the xxS" - elif digitMin <= lowerMax and digitMax >= upperMin: - return "in the " + decadeMaxStr - - else: - return "in the " + digitMinStr + " to " + digitMaxStr + " " + decadeMaxStr - elif digitMinStr == digitMaxStr: - # return 50s TO LOWER 60s (not LOWER 50s TO LOWER 60s) - return "in the " + decadeMinStr + " to " + digitMaxStr + " " + decadeMaxStr - else: # different decade - if maxVal >= 100 and minVal < 100: # UPPER 80s to 102 - return digitMinStr + " " + decadeMinStr + " to " + str(maxVal) - # return NEAR 60 (not UPPER 50s TO LOWER 60s) - elif digitMin >= upperMin and digitMax <= lowerMax and maxVal - minVal <= 10: - roundedMax = int(self.round(maxVal, "Nearest", 10)) - return self.constructTempException("near %max", minVal, roundedMax) - # return 50s and 60s (not lower 50s to upper 60s) - elif digitMin <= lowerMax and digitMax >= upperMin: - return "in the " + decadeMinStr + " to " + decadeMaxStr - digitMinPhrase = digitMinStr + " " + decadeMinStr - digitMaxPhrase = digitMaxStr + " " + decadeMaxStr - return "in the " + digitMinPhrase + " to " + digitMaxPhrase - - def constructTempException(self, phrase, minVal, maxVal): - phrase = phrase.replace("%min", `minVal`) - phrase = phrase.replace("%max", `maxVal`) - zeroPhraseMin = self.getZeroPhrase(minVal) - zeroPhraseMax = self.getZeroPhrase(maxVal) - phrase = phrase.replace("%zeroPhraseMin", zeroPhraseMin) - phrase = phrase.replace("%zeroPhraseMax", zeroPhraseMax) - return phrase - - def getDecade(self, value): - decade = abs(int(value)) / 10 * 10 - if value < 0: - decade = -decade - return decade - - def getDecadeStr(self, value): - decade = self.getDecade(value) - if decade == 0: - return "single digits" - elif decade == 10: - return "teens" - elif decade == -10: - return "teens below zero" - else: - return `decade` + "s" - - def getDigitStr(self, value, boundaries): - for key in boundaries.keys(): - lower, upper = boundaries[key] - if value >= lower and value <= upper: - return key - - def tempPhrase_exceptions(self, tree, node): - # These exceptions to the getTempPhrase are processed before trying to - # generate a phrase such as "in the lower 20's to upper 30's". - return [ - # Boundaries are inclusive - # Min boundaries # Max boundaries # phrase if Min == Max # phrase if Min != Max - # %min will be replaced by the minimum temperature value - # %max will be replaced by the maximum temperature value - # %zeroPhraseMin will be replaced with a zero-based phrase for the min e.g. - # 12 below - # %zeroPhraseMax will be replaced with a zero-based phrase for the min e.g. - # 5 above - - # Both 100 and above - [(100,200), (100,200), "around %min", "%min to %max"], - # Min in 90's, Max 100 and above - [(90, 99), (100,200), "", "%min to %max"], - - # Handle lower temperatures - [(1, 19), (1, 29), "around %min", "%min to %max"], - # Handle zero temperatures - [(0, 0), (0, 29), "near zero", "zero to %zeroPhraseMax"], - [(-200, 0), (0, 0), "near zero", "%zeroPhraseMin to zero"], - - # Min below zero, Max above zero - [(-200,-1), (1,200), "near zero", "%zeroPhraseMin to %zeroPhraseMax zero"], - # Both below zero - #[(-200,-1), (-200,-1), "%zeroPhraseMin","%zeroPhraseMax to %zeroPhraseMin zero"], - [(-200,-1), (-200,-1), "around %zeroPhraseMin","%zeroPhraseMax to %zeroPhraseMin zero"], - -## # Chris Gibson's version Comment out the above exception and use this instead: -## #[(-200,-1), (-200,-1), "near %zeroPhraseMax","%zeroPhraseMax to %zeroPhraseMin zero"] - - # Around phrases fix from Steve Nelson - [(20, 20), (20, 20), "around %min", "%min to %max"], - [(30, 30), (30, 30), "around %min", "%min to %max"], - [(40, 40), (40, 40), "around %min", "%min to %max"], - [(50, 50), (50, 50), "around %min", "%min to %max"], - [(60, 60), (60, 60), "around %min", "%min to %max"], - [(70, 70), (70, 70), "around %min", "%min to %max"], - [(80, 80), (80, 80), "around %min", "%min to %max"], - [(90, 90), (90, 90), "around %min", "%min to %max"], - - ] - - def tempPhrase_boundary_dict(self, tree, node): - return { - "lower": (0,3), - "mid": (4,6), - "upper": (7,9), - } - - # Temperature worded phrases: - # HIGHS 45 TO 50 - # using range_nlValue for "MinT", "MaxT" - - def highs_range_phrase(self): - return { - "setUpMethod": self.highs_setUp, - "wordMethod": self.tempRange_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def lows_range_phrase(self): - return { - "setUpMethod": self.lows_setUp, - "wordMethod": self.tempRange_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def tempRange_words(self, tree, node) : - "Create phrase for Min or Max Temperature" - stats = self.getTempStats(tree, node) - if stats is None: - return self.setWords(node, "") - elementName = node.getAncestor("elementName") - words = self.getTempRangePhrase(tree, node, stats, elementName) - return self.setWords(node, words) - - def getTempRangePhrase(self, tree, node, temp, elementName): - connector = self.value_connector(tree, node, elementName, elementName) - min, max = self.getValue(temp, "MinMax") - - decadeMax = self.getDecade(max) - digitMax = max % 10 - decadeMin = self.getDecade(min) - digitMin = min % 10 - diff = abs(max - min) - - # "Around" phrases - # e.g. a range of 19-21 --> "highs around 20" - around = self.addSpace(self.phrase_descriptor(tree, node, "around", elementName)) - if 0 < diff <= 3 and (digitMax == 0 or digitMax == 1): - if decadeMax <= 10: - decadeMax = self.getZeroPhrase(decadeMax) - else: - decadeMax = `decadeMax` - return around + decadeMax - - # Report the range - min = int(min) - max = int(max) - if min == max: - # Adjust descriptor e.g. highs --> high - descriptor = node.parent.get("descriptor") - if descriptor is not None and around == "": - descriptor = descriptor.replace("s", "") - node.parent.set("descriptor", descriptor) - if min <= 10: - min = self.getZeroPhrase(min) - else: - min = `min` - return around + min - elif min > 0 and max > 0: - return `min` + connector + `max` - elif min <= 0 and max > 0: - minval = self.getZeroPhrase(min) - maxval = self.getZeroPhrase(max, 1) - return minval + " to " + maxval - else: - if min < 0 and max < 0: - firstVal = self.getZeroPhrase(max) - secondVal = self.getZeroPhrase(min, 1) - else: - firstVal = self.getZeroPhrase(min) - secondVal = self.getZeroPhrase(max, 1) - return firstVal + connector + secondVal - - def getZeroPhrase(self, val, addZero=0): - if val == 0: - return "zero" - if val < 0: - phrase = `abs(val)` + " below" - else: - phrase = `val` + " above" - if addZero == 1: - phrase = phrase + " zero" - return phrase - - # Extended Temperatures - def extended_temp_range(self, tree, node): - # Range for extended temperatures e.g. - # "Highs 45 to 55." - # This value must be 10 or 5. - # Other values are not supported for extended ranges. - return 10 - #return 5 - - def extended_highs_phrase(self): - return { - "setUpMethod": self.highs_setUp, - "wordMethod": self.extended_temp_words, - "phraseMethods": self.standard_phraseMethods(), - } - def extended_lows_phrase(self): - return { - "setUpMethod": self.lows_setUp, - "wordMethod": self.extended_temp_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def extended_temp_words(self, tree, node) : - "Create phrase for Min or Max Temperature" - stats = self.getTempStats(tree, node) - if stats is None: - return self.setWords(node, "") - elementName = node.get("elementName") - if elementName == "MaxT": - mergeMethod = "Max" - else: - mergeMethod = "Min" - temp = int(self.getValue(stats, mergeMethod)) - words = self.getExtendedTempPhrase(tree, node, temp) - return self.setWords(node, words) - - def getExtendedTempPhrase(self, tree, node, temp): - # Temperatures above 99 - # Give exact value - if temp > 99: - if node.getIndex() == 0: - parent = node.getParent() - descriptor = parent.get("descriptor") - descriptor = descriptor.replace("s ", " ") - parent.set("descriptor", descriptor) - return `int(temp)` - - # Temperatures below 10 - # Build and return special phrases - if temp < -27: - return "25 below to 35 below" - elif temp < -22: - return "20 below to 30 below" - elif temp < -17: - return "15 below to 25 below" - elif temp < -12: - return "10 below to 20 below" - elif temp < -7: - return "5 below to 15 below" - elif temp < -2: - return "zero to 10 below" - elif temp < 3: - return "5 below zero to 5 above" - elif temp < 8: - return "zero to 10 above" - elif temp < 10: - return "5 to 15" - - # Determine modifier for temperature: around, lower, mid, upper - decade = self.getDecade(temp) - digit = temp % 10 - - range = self.extended_temp_range(tree, node) - if range == 10: - if digit >= 0 and digit <= 2: - phrase = self.getExtTemp(decade-5, decade+5) - elif digit >= 3 and digit <= 7: - if decade == 10: - phrase = "in the " + "teens" - elif decade <= 0 or decade >= 100: - phrase = self.getExtTemp(decade, decade+10) - else: - phrase = "in the " + `decade` + "s" - elif digit >= 8 and digit <=9: - phrase = self.getExtTemp(decade+5, decade+15) - else: # Assume range of 5 - if digit >= 0 and digit <= 2: - phrase = self.getExtTemp(decade, decade+5) - elif digit >= 3 and digit <= 7: - if decade == 10: - phrase = "in the " + "teens" - elif decade <= 0 or decade >= 100: - phrase = self.getExtTemp(decade, decade+5) - else: - phrase = "in the " + `decade` + "s" - elif digit >= 8 and digit <=9: - phrase = self.getExtTemp(decade+5, decade+10) - - return phrase - - def getExtTemp(self, val1, val2): - v1 = `val1` - if val1 < 0: - v1 = v1 + " below" - v2 = `val2` - if val2 < 0: - v2 = v2 + " below" - return v1 + " to " + v2 - - def getTempStats(self, tree, node): - "Get correct Temperature stats (MaxT or MinT) to determine temperature phrase" - elementName = node.getAncestor("elementName") - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - day = self.getPeriod(timeRange, 1) - # day is 1=DAYTIME or 0=NIGHTTIME or -1=DAYNIGHT (spans both day and night) - # In the normal case, MaxT is greater than MinT and: - # for highs, return MaxT - # for lows, return MinT - # If, however, MaxT is less than MinT, then MaxT and MinT have to be switched - - # Don't do highs at night or lows in the day - if elementName == "MaxT": - dayValue = self.DAYTIME() - else: - dayValue = self.NIGHTTIME() - if not day == self.DAYNIGHT() and not day == dayValue: - return None - - if timeRange.duration() <= 12*3600: - statDict = node.getStatDict() - stats = self.getStats(statDict, elementName) - return stats - else: - # If the time period spans day and night, - # get the conglomerate stats. - maxT = tree.stats.get("MaxT", timeRange, areaLabel, - mergeMethod="MinMax") - minT = tree.stats.get("MinT", timeRange, areaLabel, - mergeMethod="MinMax") - if maxT is None and minT is None: - return None - if maxT is None: - if dayValue == self.DAYTIME(): - return None - else: - return minT - if minT is None: - if dayValue == self.NIGHTTIME(): - return None - else: - return maxT - # Check for case of MaxT < MinT - max = self.getValue(maxT, "Max") - min = self.getValue(minT, "Max") - if max < min: - temp = maxT - maxT = minT - minT = temp - if dayValue == self.DAYTIME(): - return maxT - else: - return minT - - def temp_trends_addToPhrase_flag(self, tree, node): - # If set to 0, will report: - # "Temperatures falling in the afternoon." - # If set to 1: - # "Temperatures falling to the 50's in the afternoon." - # If set to 2: - # "Temperatures falling to the lower 50's in the afternoon." - return 2 - - def temp_trends(self): - return { - "setUpMethod": self.temp_trends_setUp, - "wordMethod": self.temp_trends_words, - "phraseMethods": self.standard_phraseMethods(), - } - def temp_trends_setUp(self, tree, node): - duration = node.getTimeRange().duration() - if duration > 12*3600: - return self.setWords(node, "") - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.NIGHTTIME(): - eleInfo = self.ElementInfo("MinT", "Min") - else: - eleInfo = self.ElementInfo("MaxT", "Max") - elementInfoList = [eleInfo] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - def temp_trends_words(self, tree, node): - "Look for sharp temperature increases or decreases" - # Determine if temps rise or fall in a non-diurnal way. - # MaxT/MinT temps -- min/max tuple for each - # Hourly Temp Stats: list of hourly temperature tuples - # Each tuple has: - # -- average temperature value - # -- hour of occurrence - # For a Daytime period, compare MaxT to T for the last grid - # of the period and report "temperatures falling in the afternoon" - # if the difference exceeds the temp_trend_nlValue - # For a Nighttime period, compare MinT to T for the last grid - # of the period and report "temperatures rising overnight" - # if the difference exceeds the temp_trend_threshold. - - statDict = node.getStatDict() - timeRange = node.getTimeRange() - tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), - mergeMethod="List") - if tStats is None: - return self.setWords(node, "") - tStats, subRange = tStats[0] - if tStats is None: - return self.setWords(node, "") - dayNight = self.getPeriod(timeRange,1) - trend_nlValue = self.temp_trend_nlValue(tree, node) - if dayNight == self.DAYTIME(): - maxT = self.getStats(statDict, "MaxT") - if maxT is None: - return self.setWords(node, "") - maxT = self.getValue(maxT) - threshold = self.nlValue(trend_nlValue, maxT) - else: - minT = self.getStats(statDict, "MinT") - if minT is None: - return self.setWords(node, "") - minT = self.getValue(minT) - threshold = self.nlValue(trend_nlValue, minT) - halfWay = len(tStats)/2 - - index = len(tStats)-1 - while index >= halfWay: - tempValue, curHour = tStats[index] - if tempValue is None: - index = index - 1 - continue - - if dayNight == self.DAYTIME(): - if tempValue <= (maxT - threshold): - toPhrase = self.getToPhrase(tree, node, tempValue) - words = "temperatures falling" + toPhrase + " in the afternoon" - return self.setWords(node, words) - else: - if tempValue >= (minT + threshold): - toPhrase = self.getToPhrase(tree, node, tempValue) - words = "temperatures rising" + toPhrase + " after midnight" - return self.setWords(node, words) - break - return self.setWords(node, "") - - def getToPhrase(self, tree, node, tempValue): - flag = self.temp_trends_addToPhrase_flag(tree, node) - if flag > 0: - if flag > 1: - rangeStr = self.getDigitStr( - abs(tempValue)%10, self.tempPhrase_boundary_dict(tree, node)) - rangeStr += " " - else: - rangeStr = "" - return " into the " + rangeStr + self.getDecadeStr(tempValue) - else: - return "" - -## def temp_trends_words(self, tree, node): -## "Look for sharp temperature increases or decreases" - -## # Here is an alternative temp_trends method provided by Tom Spriggs. -## # If a 12-hour period, it looks at the 12, 3, and 5 o'clock grids -## # (both am/pm depending on time of day) and verifies the trend (either -## # going down or up) and then looks at the difference between the -## # 5 o'clock grid and the MaxT/MinT grid. It only needs to look at the -## # 5 o'clock grid since that is the last one in the 12-hour period, -## # and if it is going to trip the threshold anywhere, it will be on that -## # hour since if you have an unusual temperature trend, it will peak at -## # that grid. If less than a 12-hour period, then the 3 times that it -## # checks will be adjusted accordingly inside the smaller time range. -## statDict = node.getStatDict() -## timeRange = node.getTimeRange() -## tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), -## mergeMethod="List") -## if tStats is None: -## return self.setWords(node, "") -## tStats, subRange = tStats[0] -## if tStats is None: -## return self.setWords(node, "") -## dayNight = self.getPeriod(timeRange,1) -## trend_nlValue = self.temp_trend_nlValue(tree, node) -## if dayNight == self.DAYTIME(): -## maxT = self.getStats(statDict, "MaxT") -## if maxT is None: -## return self.setWords(node, "") -## maxT = self.getValue(maxT) -## threshold = self.nlValue(trend_nlValue, maxT) -## else: -## minT = self.getStats(statDict, "MinT") -## if minT is None: -## return self.setWords(node, "") -## minT = self.getValue(minT) -## threshold = self.nlValue(trend_nlValue, minT) - -## if len(tStats) >= 6: -## halfWay = len(tStats) - 6 -## quarterWay = len(tStats) - 3 -## endPoint = len(tStats) - 1 -## elif len(tStats) >= 4: -## halfWay = 0 -## quarterWay = len(tStats) - 3 -## endPoint = len(tStats) - 1 -## elif len(tStats) == 1: -## halfWay = 0 -## quarterWay = 0 -## endPoint = 0 -## else: -## halfWay = 0 -## quarterWay = 1 -## endPoint = len(tStats) - 1 - -## tempValue_halfWay, curHour1 = tStats[halfWay] -## tempValue_quarterWay, curHour2 = tStats[quarterWay] -## tempValue_endPoint, curHour3 = tStats[endPoint] - -## if tempValue_halfWay is None: -## return self.setWords(node, "") -## if tempValue_quarterWay is None: -## return self.setWords(node, "") -## if tempValue_endPoint is None: -## return self.setWords(node, "") - -## words = "" -## if dayNight == self.DAYTIME(): -## if tempValue_quarterWay < tempValue_halfWay: -## if tempValue_endPoint <= tempValue_quarterWay: -## if tempValue_endPoint <= (maxT - threshold): -## # large temp fall (i.e. >= threshold) -## toPhrase = self.getToPhrase(tree, node, tempValue_endPoint) -## mxPhrase = self.getToPhrase(tree, node, maxT) -## if (toPhrase == mxPhrase): -## # avoid saying--"high in the upper 50s. temperature falling -## # into the 50s in the afternoon." -## # instead say--"high in the upper 50s. temperature falling -## # through the 50s in the afternoon." -## toPhrase = " through" + toPhrase[5:] -## if len(tStats) <= 6: #assumes already in the afternoon -## words = "temperature falling" + toPhrase + " by late afternoon" -## else: -## words = "temperature falling" + toPhrase + " in the afternoon" -## elif tempValue_endPoint < maxT: -## # small temp fall (i.e. < threshold) -## if len(tStats) <= 6: #assumes already in the afternoon -## words = "temperature steady or slowly falling through late afternoon" -## else: -## words = "temperature steady or slowly falling in the afternoon" -## else: -## if tempValue_quarterWay > tempValue_halfWay: -## if tempValue_endPoint >= tempValue_quarterWay: -## if tempValue_endPoint >= (minT + threshold): -## # large temp rise (i.e. >= threshold) -## toPhrase = self.getToPhrase(tree, node, tempValue_endPoint) -## mnPhrase = self.getToPhrase(tree, node, minT) -## if (toPhrase == mnPhrase): -## # avoid saying--"low in the lower 30s. temperature rising -## # into the 30s after midnight." -## # instead say--"low in the lower 30s. temperature rising -## # through the 30s after midnight." -## toPhrase = " through" + toPhrase[5:] -## if len(tStats) <= 6: #assumes already after midnight -## words = "temperature rising" + toPhrase + " through sunrise" -## else: -## words = "temperature rising" + toPhrase + " after midnight" -## elif tempValue_endPoint > minT: -## # small temp rise (i.e. < threshold) -## if len(tStats) <= 6: #assumes already after midnight -## words = "temperature steady or slowly rising through sunrise" -## else: -## words = "temperature steady or slowly rising after midnight" - -## return self.setWords(node, words) - - - - def reportTrends(self): - return { - "setUpMethod": self.reportTrends_setUp, - "wordMethod": self.reportTrends_words, - "phraseMethods": self.standard_phraseMethods(), - } - def reportTrends_setUp(self, tree, node): - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.NIGHTTIME(): - eleInfo = self.ElementInfo("MinT", "Min") - else: - eleInfo = self.ElementInfo("MaxT", "Max") - elementName = "MaxT" - elementInfoList = [eleInfo] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - def reportTrends_words(self, tree, node): - "Compare current analysis to previous analysis for trends" - elementName = node.get("elementName") - statDict = node.getStatDict() - curStats = self.getStats(statDict, elementName) - if curStats is None: - return self.setWords(node, "") - timeRange = node.getTimeRange() - areaLabel = node.getAreaLabel() - prevTimeRange = self.adjustTimeRange(timeRange, -24) - prevStats = tree.stats.get(elementName, prevTimeRange, areaLabel, - mergeMethod="Average") - #print "Report trends", timeRange, elementName, curStats, prevStats - if prevStats is None: - return self.setWords(node, "") - - prevStats = self.getValue(prevStats) - curStats = self.getValue(curStats) - #print "stats", prevStats, curStats - diff = curStats - prevStats - value = self.reportTrends_valueStr(tree, node, diff, curStats) - #print " returning ", value - return self.setWords(node, value) - - def reportTrends_valueStr(self, tree, node, diff, temp): - # Given a difference between current and 24-hour prior - # MaxT or MinT grids, report a trend. - - var = self.colder_warmer_dict(tree, node) - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.DAYTIME(): - if diff > 10: - return self.nlValue(var["HighWarmer"], temp) - elif diff < -20: - return self.nlValue(var["HighMuchColder"], temp) - elif diff <= -10 and diff >= -20: - return self.nlValue(var["HighColder"], temp) - else: - return "" - - else: - if diff > 10: - return self.nlValue(var["LowWarmer"], temp) - elif diff < -20: - return self.nlValue(var["LowMuchColder"], temp) - elif diff <= -10 and diff >= -20: - return self.nlValue(var["LowColder"], temp) - else: - return "" - - return "" - - # colder_warmer_Dict - # Dictionary of non-linear dictionaries each with - # phrases to use instead of colder/warmer - # based on the temperature - - def colder_warmer_dict(self, tree, node): - # This dictionary of non-linear dictionaries controls what phrase is returned - # for cold/much colder warmer/much warmer. It is based off - # of the maxT or MinT - dict = {} - dict["LowColder"] = { - (-80,45): "colder", - (45,70): "cooler", - (70,150): "not as warm", - "default": "", - } - dict["LowMuchColder"] = { - (-80,45): "much colder", - (45,70): "much cooler", - (70,150): "not as warm", - "default": "", - } - dict["LowWarmer"] = { - (-80,35): "not as cold", - (35,50): "not as cool", - (50,150): "warmer", - "default": "", - } - dict["HighColder"]= { - (-80,45): "colder", - (45,75): "cooler", - (75,90): "not as warm", - (90,150): "not as hot", - "default": "", - } - dict["HighMuchColder"]= { - (-80,45): "much colder", - (45,75): "much cooler", - (75,90): "not as warm", - (90,150): "not as hot", - "default": "", - } - dict["HighWarmer"]= { - (-80,45): "not as cold", - (45,65): "not as cool", - (65,150): "warmer", - "default": "", - } - return dict - - -## def reportTrends_valueStr(self, tree, node, diff): -## # Given a difference between current and 24-hour prior -## # MaxT or MinT grids, report a trend. -## if diff > 15 and diff < 25: -## return "warmer" -## elif diff >= 25: -## return "much warmer" -## elif diff < -15 and diff > -25: -## return "cooler" -## elif diff <= -25: -## return "much colder" -## else: -## return "" - - def extremeTemps_phrase(self): - ### NEW METHOD written by Tom Spriggs - ### ZFP_Local - return { - "setUpMethod": self.extremeTemps_setUp, - "wordMethod": self.extremeTemps_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def extremeTemps_setUp(self, tree, node): - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - elementInfoList = [ - self.ElementInfo("MaxT", "Max"), - self.ElementInfo("MinT", "Min"), - ] - else: - elementInfoList = [ - self.ElementInfo("MinT", "Min"), - self.ElementInfo("MaxT", "Max"), - ] - elementInfoList.append(self.ElementInfo("HeatIndex", "Max")) - elementInfoList.append(self.ElementInfo("WindChill", "Min")) - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - - def extremeTemps_words(self, tree, node): - "Compare current analysis to previous analysis for trends" - - tempPhrases = ["reportTrends"] - words = self.findWords(tree, node, None, node.getAreaLabel(), - phraseList=tempPhrases) - - if words is None: - # If words have not yet been set, return - # We need to wait for reportTrends to complete - # before doing the extremeTemps_phrase - return - - statDict = node.getStatDict() - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.DAYTIME(): - element = "MaxT" - else: - element = "MinT" - tStats = self.getStats(statDict, element) - if tStats is None: - return self.setWords(node, "") - tStats = self.getValue(tStats) - - chillStats = self.getStats(statDict, "WindChill") - chillStats = self.getValue(chillStats, "Min") - heatStats = self.getStats(statDict, "HeatIndex") - heatStats = self.getValue(heatStats, "Max") - - words = "" - - if dayNight == self.DAYTIME(): - if tStats > 99: - if heatStats is None: - words = "very hot" - elif (heatStats - tStats) > 7: - words = "very hot and humid" - else: - words = "very hot" - elif tStats > 95: - if heatStats is None: - words = "hot" - elif (heatStats - tStats) > 6: - words = "hot and humid" - else: - words = "hot" - elif tStats < 20: - if chillStats is None: - words = "very cold" - elif chillStats < -9: - words = "bitterly cold" - else: - words = "very cold" - elif heatStats is None: - words = "" - elif heatStats >= self.heatIndex_threshold(tree, node): - words = "hot and humid" - elif chillStats is None: - words = "" - elif chillStats <= self.windChill_threshold(tree, node): - words = "bitterly cold" - else: - if tStats < 5: - if chillStats is None: - words = "very cold" - elif chillStats <= self.windChill_threshold(tree, node): - words = "bitterly cold" - else: - words = "very cold" - elif chillStats is None: - words = "" - elif chillStats <= self.windChill_threshold(tree, node): - words = "bitterly cold" - - if words == "": - return self.setWords(node, words) - - # Clear the words in reportTrends to - # prevent extra temperature phrases - component = node.getComponent() - progeny = component.getProgeny() - for child in progeny: - phraseName = child.get("name") - if phraseName in tempPhrases: - child.set("words", "") - return self.setWords(node, words) - - ## Submitted by Brian Walawender - ## Reviewed by Tracy Hansen - def steady_temp_threshold(self, tree, node): - # Diurnal ranges less than this value will - # be reported as steady temperatures - return 4 - - def steady_temp_trends(self): - return { - "setUpMethod": self.steady_temp_trends_setUp, - "wordMethod": self.steady_temp_trends_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def steady_temp_trends_setUp(self, tree, node): - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - def steady_temp_trends_words(self, tree, node): - "Look for small diurnal changes" - # Check Diurnal range in T. If range is - # less than steady_temp_threshold report - # a temperatures steady phrase - # i.e. "temperature steady in the mid 20s" - tempPhrases = ["highs_phrase", "lows_phrase", - "highs_range_phrase", "lows_range_phrase", - "temp_trends", - "extended_lows_phrase", "extended_highs_phrase" - ] - words = self.findWords(tree, node, None, node.getAreaLabel(), - phraseList=tempPhrases) - if words is None: - # If words have not yet been set, return - # We need to wait for all highs_phrases to complete - # before doing the steady_temp_phrase - return - - timeRange = node.getTimeRange() - tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), - mergeMethod="List") - if tStats is None: - return self.setWords(node, "") - # tStats is a list of (hourlyTemp, subRange) tuples - - max = -999 - min = 999 - words = "" - sum = 0 - count = 0 - for hourlyTemps, subRange in tStats: - if hourlyTemps is None: - return self.setWords(node, "") - for t, hr in hourlyTemps: - if t is None: - return self.setWords(node, "") - if t < min: - min = t - if t > max: - max = t - sum = sum + t - count = count + 1 - - diff = max - min - - if diff >= self.steady_temp_threshold(tree,node): - return self.setWords(node, "") - - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.DAYTIME(): - avg = int((sum/count)+0.5) - else: - avg = int(sum/count) - - phrase = self.getTempPhrase(tree, node, avg, "") - words = "near steady temperature " + phrase - - # Clear the words in high and lows phrase - # prevent extra temperature phrases - component = node.getComponent() - progeny = component.getProgeny() - for child in progeny: - phraseName = child.get("name") - if phraseName in tempPhrases: - child.set("words", "") - - # Begin ER changes - # Not sure if this is used...but set anyway - child.set("emptyPhrase", 1) - - # Now erase subphrase words. This is what seems to fix - # the problem of high/low phrases still appearing with - # the steady phrase - PJ - subphrases=child.get("childList") - - if subphrases is not None: - for n in subphrases: - n.set("words", "") - - return self.setWords(node, words) - - ### SnowAmt - def pop_snow_lower_threshold(self, tree, node): - # Snow accumulation will not be reported - # if Pop is below this threshold - return 60 - - def getSnowReportEndDay(self, tree, node): - # This is the first day we do not try to report total accumulation. - return self.createTimeRange(96,108) - - def snow_phrase(self): - return { - "setUpMethod": self.snow_setUp, - "wordMethod": self.snow_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def snow_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("SnowAmt", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - component = node.getComponent() - index = component.getIndex() - - # Calculate past snow - prodTR = tree.getTimeRange() - pastSnowMin = 0 - pastSnowMax = 0 - pastSnowTimeRange = self.makeTimeRange(prodTR.startTime() - 12*3600, - prodTR.startTime()) - stats = tree.stats.get("SnowAmt", pastSnowTimeRange, - node.getAreaLabel(), mergeMethod="MinMax") - - if stats is not None: - # site is using a past SnowAmt grid - pastSnowMin, pastSnowMax = self.getValue(stats, "MinMax") - # If first period is less than 12 hours long, thus an "update" - # report as "new" snow accumulation ONLY IF - # there was some previous snow accumulation - timeRange = node.getTimeRange() - if index == 0 and timeRange.duration() < 12*3600 and \ - pastSnowMax > 0.0: - node.set("newFlag", 1) - else: - # site is NOT using a past SnowAmt grid - # If first period is less than 12 hours long, thus an "update" - # report as "new" snow accumulation - timeRange = node.getTimeRange() - if index == 0 and timeRange.duration() < 12*3600: - node.set("newFlag", 1) - - return self.DONE() - - def snow_words(self, tree, node): - # First check if the pop threshold has been met - # If not, then do not generate phrase - threshold = self.pop_snow_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, threshold) - if lowPopFlag == 1: - return self.setWords(node, "") - - # Second, wait for weather phrase to complete and make sure there - # is mention of accumulating weather - wxWords, attrDict = self.findWords(tree, node, "Wx", node.getAreaLabel(), - phraseList=["weather_phrase", "skyPopWx_phrase"], - attributes=["reportedRankList"]) - if wxWords is None: - return - accumFlag, descriptor = self.checkAccumulatingWx(tree, node, wxWords, attrDict) - if accumFlag == 0: - return self.setWords(node, "null") - - # Third, load in the SnowAmt statistics, check for low amounts, then round to nearest inch - currentSnow = tree.stats.get("SnowAmt", node.getTimeRange(), node.getAreaLabel(), mergeMethod="MinMax") - if currentSnow is None: - return self.setWords(node, "") - min, max = self.getValue(currentSnow, "MinMax") - if min == 0 and max == 0: - node.parent.set("descriptor", "") - return self.setWords(node, "no " + descriptor) - elif min < 0.5 and max < 0.5: - node.parent.set("descriptor", "") - return self.setWords(node, "little or no " + descriptor) - min = int(min+0.5) - max = int(max+0.5) - - # Finally, generate the snow accumulation phrase - # Decide on singular or plural units - if max == 1: - units = self.units_descriptor(tree, node, "unit", "in") - else: - units = self.units_descriptor(tree, node, "units", "in") - # Create worded phrase based on type of range - if min == 0: - upTo = self.addSpace(self.phrase_descriptor(tree, node, "up to", "SnowAmt")) - snowPhrase = upTo + `max` - elif min == max: - around = self.addSpace(self.phrase_descriptor(tree, node, "around", "SnowAmt")) - snowPhrase = around + `max` - else: - snowPhrase = "of " + `min` + " to " + `max` - snowPhrase = snowPhrase + " " + units - - return self.setWords(node, snowPhrase) - - def checkAccumulatingWx(self, tree, node, wxWords, attrDict): - accumulatingWx = [ - ('S', 'Snow'), - ('SW', 'Snow'), - ('IP', 'Sleet'), - ('IC', 'IceCrystal'), - ] - desc = "" - wxTypes = [] - if attrDict.has_key("reportedRankList"): - rankList = attrDict["reportedRankList"] - for subkey, rank in rankList: - # DR_18506 - if subkey.wxType() in ['SW'] and subkey.intensity() == "--": - pass - elif subkey.wxType() in ['IC']: - pass - else: - wxTypes.append(subkey.wxType()) - for wxType, wxVar in accumulatingWx: - if wxType in wxTypes: - desc += wxVar - if desc == "": - return 0, "" - # Determine the phrase descriptor - descriptor = self.phrase_descriptor(tree, node, desc, "SnowAmt") - if node.getAncestor('newFlag') == 1: - new = self.addSpace(self.phrase_descriptor(tree, node, "New", "SnowAmt")) - if new != "": - descriptor = new + descriptor - node.parent.set("descriptor", descriptor) - # The handle the case of embedded local effects, set the parent's parent as well - node.parent.parent.set("descriptor", descriptor) - return 1, descriptor - - ## Modifications submitted by Tom Spriggs LSX for accurately reporting total snow - - ## Since the total_snow_phrase uses the SnowAmt element exclusively for tallying - ## up storm totals, you can use a SnowAmt grid that exists in the past - ## (before the current hour) to tell us how much snow has already fallen from THIS STORM - ## (this will not include already existing snow pack from other storms) - ## and thus compensate for the diminishing/shrinking forecast totals. - ## The method simply samples and adds the already fallen snow to what is still forecast - ## to produce an accuate total snow amount for an ongoing event - ## (as well as an event still yet to happen). - - ## This "past" SnowAmt grid will end at the current time and can go as far back as - ## the user wants--but it MUST be a single grid for previously fallen snow, - ## not a bunch of fragmented grids. The actual values in the "past" SnowAmt grid - ## will then need to be filled with values based on already collected snow reports. - ## One method to create this "old" SnowAmt grid would be through use of the CONTOUR tool. - - ## Using this method, it is now possible to kick off a total snow phrase in the - ## first period when the total snow differs from the still yet to fall/forecasted - ## snow in the first period. - - ## If you leave the "past" SnowAmt grid as zero, the formatter will know to treat - ## it as a non-ongoing event. - - def total_snow_phrase(self): - return { - "setUpMethod": self.total_snow_setUp, - "wordMethod": self.total_snow_words, - "phraseMethods": self.standard_phraseMethods() - } - - def total_snow_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("SnowAmt", "MinMax"), - self.ElementInfo("IceAccum", "MinMax", primary=0)] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - descriptor = self.phrase_descriptor(tree, node, "TotalSnow", "SnowAmt") - node.set("descriptor", descriptor) - return self.DONE() - - def total_snow_words(self, tree, node): - # Return a total accumulation phrase if appropriate - # Example: - # TOTAL SNOW ACCUMULATION 7 INCHES. - - component = node.getComponent() - index = component.getIndex() - totalSnow = "" - - # See if we are prior to the snow report end day - timeRange = node.getParent().getTimeRange() - snowReportEndDay = self.getSnowReportEndDay(tree, node) - shiftedTimeRange = self.shiftedTimeRange(timeRange) - if shiftedTimeRange.startTime() < snowReportEndDay.startTime(): - ### Round up stats--need current period snow, next period snow, and past snow - # Obtain minimum PoP needed to report accumulations - threshold = self.pop_snow_lower_threshold(tree, node) - # Get snow stats for the current period - currentSnow = tree.stats.get("SnowAmt", node.getTimeRange(), node.getAreaLabel(), mergeMethod="MinMax") - if currentSnow is None: - return self.setWords(node, "") - currentMin, currentMax = self.getValue(currentSnow, "MinMax") - currentMin = int(currentMin+0.5) - currentMax = int(currentMax+0.5) - # Check PoP threshold for the current period--zero out if below threshold PoP - popStats = self.matchToWx(tree, node, "PoP", node.getTimeRange()) - if popStats < threshold: - currentMin = 0 - currentMax = 0 - # Get snow stats for the next period--does the event come to an end? - nextComp = component.getNext() - if nextComp is None: - return self.setWords(node, "") - nextTimeRange = nextComp.getTimeRange() - nextSnow = tree.stats.get("SnowAmt", nextTimeRange, - node.getAreaLabel(), mergeMethod="Max") - if nextSnow is None: - return self.setWords(node, "") - nextSnow = int(nextSnow+0.5) - # Check PoP threshold for the next period--zero out if below threshold PoP - threshold = self.pop_snow_lower_threshold(tree, node) - popStats = self.matchToWx(tree, node, "PoP", nextTimeRange) - if popStats < threshold: - nextSnow = 0 - # Get snow stats for both already fallen snow AND preceding forecast periods - minSum, maxSum = self.sumPrevStats(tree, component, - node.getAreaLabel(), "SnowAmt", "MinMax") - - ### Generate total snow accumulation phrase if conditions met - # We produce a total accumulation phrase for the current period IF - # the next period's snow is 0--thus snow will cease by the end of the current period AND - # there is snow accumulation expected in the current period AND - # there is snow accumulation in one or more periods immediately preceding - if nextSnow == 0 and currentMax > 0 and maxSum > 0: - # Finalize total snow amount - finalMinSum = int(currentMin + minSum) - finalMaxSum = int(currentMax + maxSum) - # Decide on singular or plural units - if finalMaxSum == 1: - units = self.units_descriptor(tree, node, "unit", "in") - else: - units = self.units_descriptor(tree, node, "units", "in") - # Create worded phrase based on type of range - if finalMinSum == 0: - upTo = self.addSpace(self.phrase_descriptor(tree, node, - "up to", "SnowAmt")) - totalSnowPhrase = upTo + `finalMaxSum` - elif finalMinSum == finalMaxSum: - around = self.addSpace(self.phrase_descriptor(tree, node, - "around", "SnowAmt")) - totalSnowPhrase = around + `finalMaxSum` - else: - totalSnowPhrase = `finalMinSum` + " to " + `finalMaxSum` - totalSnow = totalSnowPhrase + " " + units - else: - return self.setWords(node, "") - - return self.setWords(node, totalSnow) - - def getSnowValue(self, tree, node, areaLabel=None): - # Return min and max snow values - threshold = self.pop_snow_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, threshold) - if lowPopFlag == 1: - return None - if areaLabel is None: - areaLabel = node.getAreaLabel() - stats = tree.stats.get("SnowAmt", node.getTimeRange(), - areaLabel, mergeMethod="MinMax") - if stats is None: - return None - min, max = self.getValue(stats, "MinMax") - min = int(min+0.5) - max = int(max+0.5) - if min < 1 and max < 1: - return None - - return min, max - - def getTotalSnow(self, tree, node, areaLabel=None, snowValue=None): - component = node.getComponent() - # Get sum of previous periods - if areaLabel is None: - areaLabel = node.getAreaLabel() - if snowValue is None: - snowValue = self.getSnowValue(tree, node, areaLabel) - if snowValue is None: - return None - minSnowValue, maxSnowValue = snowValue - minSum, maxSum = self.sumPrevStats(tree, component, areaLabel, "SnowAmt", "MinMax") - # Add this period's value to the sum - minSum = minSum + minSnowValue - maxSum = maxSum + maxSnowValue - minIncrement = self.nlValue(self.increment_nlValue( - tree, node, "SnowAmt", "SnowAmt"), minSum) - maxIncrement = self.nlValue(self.increment_nlValue( - tree, node, "SnowAmt", "SnowAmt"), maxSum) - minSum = self.round(minSum, "Nearest", minIncrement) - maxSum = self.round(maxSum, "Nearest", maxIncrement) - return minSum, maxSum - - ## TOTAL SNOW Phrase submitted by Virgil Mittendorf - def stormTotalSnow_phrase(self): - return { - "setUpMethod": self.stormTotalSnow_setUp, - "wordMethod": self.stormTotalSnow_words, - "phraseMethods": self.standard_phraseMethods(), - } - def stormTotalSnow_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("StormTotalSnow", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def stormTotalSnow_words(self, tree, node): - "Create phrase for Storm Total Snow accumulation" - - # Load in the statistics for storm total snow - elementName = "StormTotalSnow" - statDict = node.getStatDict() - stats = self.getStats(statDict, elementName) - - #print "storm total snow stats", stats - - # test...if no stats then don't create phrase (i.e. grid missing) - if stats is None: - return self.setWords(node, "") - - min, max = self.getValue(stats, "MinMax") - threshold = 1 - incMin = 1 - incMax = 1 - - if min%1 == 0: - min = int(min) - minStr = `min` - else: - minStr = `int(min+0.5)` - if max%1 == 0: - max = int(max) - maxStr = `max` - else: - maxStr = `int(max+0.5)` - - #print "min, max", min, max, node.getTimeRange(), node.getAreaLabel(), "storm total accumulation" - - if min == 0 and max == 0: - return self.setWords(node,"") - elif min < 0.5 and max < 0.5: - return self.setWords(node,"") - - outUnits = self.element_outUnits(tree, node, elementName, elementName) - unit = self.units_descriptor(tree, node,"unit", outUnits) - units = self.units_descriptor(tree, node,"units", outUnits) - - min = int(min+0.5) - max = int(max+0.5) - - # Single Value input - if min == max: - # Handle case of 1 inch - if min == 1: - units = unit - value = "around " + minStr - - # Range - else: - value = "of " + minStr + " to " + maxStr - # Handle case when lower value is 0 - if min == 0: - value = "up to " + maxStr - if max == 1: - units = unit - - snowPhrase = value + " " + units - return self.setWords(node, snowPhrase) - - # New def by Scott. According to Directive 10-503, descriptive terms - # should be used in period 4 and beyond. This function returns - # a descriptive snow phrase. - def descriptive_snow_phrase(self): - return { - "setUpMethod": self.descriptive_snow_setUp, - "wordMethod": self.descriptive_snow_words, - "phraseMethods": self.standard_phraseMethods(), - } - def descriptive_snow_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("SnowAmt", "MinMax")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - # Do not want phrase descriptor - node.set("descriptor", "") - return self.DONE() - - def descriptive_snow_words(self, tree, node): - "Create phrase for snow accumulation" - # According to Directive 10-503, descriptive terms - # should be used in period 4 and beyond. This function returns - # a descriptive snow phrase. - # - # According to Directive 10-503, snow accumulation - # should not be mentioned if PoP is under 60%. - threshold = self.pop_snow_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, threshold) - if lowPopFlag == 1: - return self.setWords(node, "") - - statDict = node.getStatDict() - stats = self.getStats(statDict, "SnowAmt") - if stats is None: - return self.setWords(node, "") - - max = int(self.getValue(stats, "Max")) - - if max < 1: - words = "" - elif max >= 1 and max <= 2: - words = "light snow accumulations" - elif max > 2 and max <= 5: - words = "moderate snow accumulations" - else: - words = "heavy snow accumulations" - return self.setWords(node, words) - - ### SnowLevel - def pop_snowLevel_upper_threshold(self, tree, node): - # Snow level will be reported if Pop is above this threshold - return 60 - - def snowLevel_maximum_phrase(self, tree, node): - # This returns the maximum snow level value to be reported and the - # the corresponding snow level phrase. It can be set up by - # edit area as follows: - # editAreaList = [ - # ("area1", 8000, "above 8000 feet"), - # ("area2", 6000, "above 6000 feet"), - # # Don't mention snow level at all in area3: - # ("area3", 0, ""), - # ] - #maxElev = 0 - #phrase = "" - #for area, elev, elevPhrase in editAreaList: - # if self.currentAreaContains(tree, [area]): - # if elev > maxElev: - # maxElev = elev - # phrase = elevPhrase - #return (maxElev, phrase) - return (8000, "above 8000 feet") - - def snowLevel_upper_topo_percentage(self, tree, node): - # If this percentage of the edit area is above the snow level, - # do not report snow level - return 80 - - def snowLevel_lower_topo_percentage(self, tree, node): - # If this percentage of the edit area is below or equal to the snow level, - # do not report snow level - return 80 - - def snowLevel_phrase(self): - return { - "setUpMethod": self.snowLevel_setUp, - "wordMethod": self.snowLevel_words, - "phraseMethods": self.standard_phraseMethods(), - } - def snowLevel_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("SnowLevel", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def snowLevel_words(self, tree, node): - "Create phrase for reporting snow level" - - # Check for low pop - threshold = self.pop_snowLevel_upper_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, threshold) - - if lowPopFlag == 1: - return self.setWords(node, "") - - statDict = node.getStatDict() - snowLevel = self.getStats(statDict, "SnowLevel") - if snowLevel is None: - return self.setWords(node, "") - snowLevel = self.getValue(snowLevel) - element = "SnowLevel" - roundingMethod = self.rounding_method(tree, node, element, element) - increment_nlValue = self.increment_nlValue(tree, node, element, element) - snowLevel = self.roundValue(snowLevel, roundingMethod, "Nearest", increment_nlValue, 0) - - # Check Wx for R or RW - stats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="List") - if stats is None: - return self.setWords(node, "") - - found = 0 - for rankList, subRange in stats: - subkeys = self.getSubkeys(rankList) - for subkey in subkeys: - if subkey.wxType() == "R" or subkey.wxType() == "RW": - found = 1 - break - - if found == 0: - return self.setWords(node, "") - - # Check for upper and lower topo percentages - percentage_above = self.calcTopoPercentage(tree, node, node.getAreaLabel(), snowLevel) - percentage_below = 100 - percentage_above - - if percentage_above > self.snowLevel_upper_topo_percentage(tree, node): - return self.setWords(node, "") - if percentage_below > self.snowLevel_lower_topo_percentage(tree, node): - return self.setWords(node, "") - - # Check for maximum snow level to be reported - max, words = self.snowLevel_maximum_phrase(tree, node) - - if snowLevel < max: - units = self.units_descriptor(tree, node, "units", "ft") - words = `int(snowLevel)` + " " + units - return self.setWords(node, words) - - ### IceAccum - def ice_accumulation_threshold(self, tree, node): - # If maximum IceAccum is greater than this threshold, it will be - # reported instead of SnowAmt in the snow_phrase - return .10 - - def iceAccumulation_phrase(self): - return { - "setUpMethod": self.iceAccumulation_setUp, - "wordMethod": self.iceAccumulation_words, - "phraseMethods": self.standard_phraseMethods(), - } - def iceAccumulation_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("IceAccum", "MinMax", primary=0)] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def iceAccumulation_words(self, tree, node): - "Create phrase for ice accumulation" - - threshold = self.pop_snow_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, threshold) - if lowPopFlag == 1: - return self.setWords(node, "") - - # Check for IceAccum. If it is significant, report it. - statDict = node.getStatDict() - - stats = self.getStats(statDict, "IceAccum") - reportIceAccum = 0 - if stats is not None: - threshold = self.ice_accumulation_threshold(tree, node) - min, max = self.getValue(stats, "MinMax") - if max >= threshold: - reportIceAccum = 1 - if reportIceAccum == 1: - component = node.getComponent() - index = component.getIndex() - timeRange = node.getTimeRange() - if index == 0 and timeRange.duration() < 12*3600: - descriptor = self.phrase_descriptor( - tree, node, "NewIceAccum", "IceAccum") - else: - descriptor = self.phrase_descriptor(tree, node, "IceAccum", "IceAccum") - node.parent.set("descriptor", descriptor) - elementName = "IceAccum" - else: - return self.setWords(node, "") - - if min < 0.2: - minStr = "less than one quarter" - elif min >= 0.2 and min < 0.4: - minStr = "one quarter" - elif min >= 0.4 and min < 0.7: - minStr = "one half" - elif min >= 0.7 and min < 0.9: - minStr = "three quarters" - elif min >= 0.9 and min < 1.3: - minStr = "one" - elif min >= 1.3 and min < 1.8: - minStr = "one and a half" - elif min >= 1.8: - minStr = `int(min+0.5)` - if max < 0.2: - maxStr = "less than one quarter" - elif max >= 0.2 and max < 0.4: - maxStr = "one quarter" - elif max >= 0.4 and max < 0.7: - maxStr = "one half" - elif max >= 0.7 and max < 0.9: - maxStr = "three quarters" - elif max >= 0.9 and max < 1.3: - maxStr = "one" - elif max >= 1.3 and max < 1.8: - maxStr = "one and a half" - elif max >= 1.8: - maxStr = `int(max+0.5)` - if min >= 0.9 and min < 1.3: - minStr = `int(min+0.5)` - - #print "min, max", min, max, node.getTimeRange(), node.getAreaLabel() - - outUnits = self.element_outUnits(tree, node, elementName, elementName) - unit = self.units_descriptor(tree, node,"unit", outUnits) - units = self.units_descriptor(tree, node,"units", outUnits) - - # Single Value input - if minStr == maxStr: - if min < 0.2: - icePhrase = "of " + minStr + " of an " + unit - elif min >= 0.2 and min < 0.9: - icePhrase = "around " + minStr + " of an " + unit - elif min >= 0.9 and min < 1.3: - icePhrase = "around " + minStr + " " + unit - elif min >= 1.3: - icePhrase = "around " + minStr + " " + units - else: - return self.setWords(node, "") - - # Range - else: - if min < 0.2: - if max < 0.9: - icePhrase = "of up to " + maxStr + " of an " + unit - elif max >= 0.9 and max < 1.3: - icePhrase = "of up to " + maxStr + " " + unit - elif max >= 1.3: - icePhrase = "of up to " + maxStr + " " + units - else: - return self.setWords(node, "") - elif min >= 0.2 and min < 0.9: - if max < 0.9: - icePhrase = "of " + minStr + " to " + maxStr + " of an " + unit - elif max >= 0.9 and max < 1.3: - icePhrase = "of " + minStr + " of an " + unit + " to " + maxStr + " " + unit - elif max >= 1.3: - icePhrase = "of " + minStr + " of an " + unit + " to " + maxStr + " " + units - else: - return self.setWords(node, "") - elif min >= 0.9: - if max >= 1.3: - icePhrase = "of " + minStr + " to " + maxStr + " " + units - else: - return self.setWords(node, "") - return self.setWords(node, icePhrase) - - ### FzLevel - ### WindChill - def windChill_threshold(self, tree, node): - # THRESHOLD FOR REPORTING WIND CHILL - return 0.0 - - def windChillTemp_difference(self, tree, node): - # Difference between wind chill and temperature - # for reporting wind chill - return 5 - - def windChill_phrase(self): - return { - "setUpMethod": self.windChill_setUp, - "wordMethod": self.windChill_words, - "phraseMethods": self.standard_phraseMethods(), - } - def windChill_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("WindChill", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def windChill_words(self, tree, node): - "Create phrase for Wind Chill" - - statDict = node.getStatDict() - stats = self.getStats(statDict, "WindChill") - if stats is None: - return self.setWords(node, "") - t = tree.stats.get("T", node.getTimeRange(), - node.getAreaLabel(), statLabel="minMax", - mergeMethod="MinMax") - if t is None: - return self.setWords(node, "") - - min, max = self.getValue(stats, "MinMax") - - timeRange = node.getTimeRange() - day = self.getPeriod(timeRange, 1) - if day == self.DAYTIME(): - # Compare to max T - t = self.getValue(t,"Max") - else: - # Compare to min T - t = self.getValue(t,"Min") - - diff = self.windChillTemp_difference(tree, node) - if min <= self.windChill_threshold(tree, node) and min <= t - diff: - words = self.getTempRangePhrase(tree, node, (min, max), "WindChill") - else: - words = "" - return self.setWords(node, words) - - # Alternate phrase based on wind speed - def windChill_wind_threshold(self, tree, node): - # Minimum wind speed (mph) required for reporting wind chill - return 10 - - def windBased_windChill_phrase(self): - return { - "setUpMethod": self.windChill_setUp, - "wordMethod": self.windBased_windChill_words, - "phraseMethods": self.standard_phraseMethods(), - } - - def windBased_windChill_words(self, tree, node) : - "Create phrase for Wind Chill" - - # Wait for wind phrase to complete - windWords = self.findWords(tree, node, "Wind", node.getAreaLabel()) - if windWords is None: - return - - statDict = node.getStatDict() - stats = self.getStats(statDict, "WindChill") - if stats is None: - return self.setWords(node, "") - - if windWords == "": - return self.setWords(node, "") - - min, max = self.getValue(stats, "MinMax") - - # Check wind speed - # First try to re-use information from wind_phrase - maxWind = node.getComponent().get("maxMag") - if maxWind is None: - # Have to access it from statistics dictionary - timeRange = node.getTimeRange() - wind = tree.stats.get("Wind", timeRange, node.getAreaLabel(), mergeMethod="Max") - if wind is None: - return self.setWords(node, "") - maxWind, dir = wind - - if maxWind < self.windChill_wind_threshold(tree, node): - return self.setWords(node, "") - - # WC must be less or equal to threshold - if min <= self.windChill_threshold(tree, node): - words = self.getTempRangePhrase(tree, node, (min, max), "WindChill") - else: - words = "" - return self.setWords(node, words) - - ### HeatIndex - def heatIndex_threshold(self, tree, node): - # THRESHOLD FOR REPORTING HEAT INDEX - return 108.0 - - def heatIndexTemp_difference(self, tree, node): - # Difference between heat index and temperature - # for reporting heat index - return 5 - - def heatIndex_phrase(self): - return { - "setUpMethod": self.heatIndex_setUp, - "wordMethod": self.heatIndex_words, - "phraseMethods": self.standard_phraseMethods(), - } - def heatIndex_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("HeatIndex", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def heatIndex_words(self, tree, node) : - "Create phrase for Heat Index" - statDict = node.getStatDict() - stats = self.getStats(statDict, "HeatIndex") - if stats is None: - return self.setWords(node, "") - t = tree.stats.get("T", node.getTimeRange(), - node.getAreaLabel(), statLabel="minMax", - mergeMethod="MinMax") - if t is None: - return self.setWords(node, "") - - min, max = self.getValue(stats, "MinMax") - - timeRange = node.getTimeRange() - day = self.getPeriod(timeRange,1) - if day == self.DAYTIME(): - # Compare to max T - t = self.getValue(t,"Max") - else: - # Compare to min T - t = self.getValue(t,"Min") - # HI must be greater or equal to threshold and at least - # two degrees higher than the maximum T. - diff = self.heatIndexTemp_difference(tree, node) - if max >= self.heatIndex_threshold(tree, node) and max >= t + diff: - words = self.getTempRangePhrase(tree, node, (min, max), "HeatIndex") - else: - words = "" - return self.setWords(node, words) - - # RH -- Contributed by ER 8/04 - def rh_threshold(self, tree, node): - # Threshold for reporting RH in extended narrative. If MinRH grid is - # lower than this threshold, an RH phrase will be formatted. - # To turn off phrase completely, set to -1. - if self.__dict__.has_key("_rhPhraseThreshold"): - # Use Definition setting if defined - return self._rhPhraseThreshold - else: - # Default to no phrase - return -1 - - def rh_phrase(self): - return { - "setUpMethod": self.rh_setUp, - "wordMethod": self.rh_words, - "phraseMethods": self.standard_phraseMethods(), - } - def rh_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("MinRH", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def rh_words(self, tree, node): - # Creates phrase for MinRH. Phrase will be generated if the MinRH - # value is <= rh_threshold(tree, node). Also uses only MinRH - # grids during the day part of the extended period. Requires the - # sample analysis method to use [0] for the time duration. - minRH = None - words = "" - statDict = node.getStatDict() - - rhStats = tree.stats.get("MinRH", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="List") - - if rhStats is None: - return self.setWords(node, "") - for rhValues, tr in rhStats: - # Use data only from daytime timeranges - if self.getPeriod(tr, 1): - rh = self.getValue(rhValues, "Min") - if minRH == None or rh < minRH: - minRH = rh - if minRH is not None and minRH <= self.rh_threshold(tree, node): - words = "minimum RH " + `int(minRH)` + " percent" - return self.setWords(node, words) - - # MultipleElementTable calls - def multipleElementTable_perPeriod_phrase(self): - return { - "setUpMethod": self.multipleElementTable_perPeriod_setUp, - "wordMethod": self.multipleElementTable_perPeriod_words, - "phraseMethods": [], - } - def multipleElementTable_perPeriod_setUp(self, tree, node): - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def multipleElementTable_perPeriod_words(self, tree, node): - # Make a MultipleElementTable for this period - words = self.makeMultipleElementTable( - node.getAreaLabel(), node.getTimeRange(), tree, - byTimeRange=1) - return self.setWords(node.parent, words) - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ScalarPhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import PhraseBuilder +import types + +class ScalarPhrases(PhraseBuilder.PhraseBuilder): + def __init__(self): + PhraseBuilder.PhraseBuilder.__init__(self) + + ############################################ + ### PUBLIC SCALAR WEATHER ELEMENT PHRASES + ### To override, override the associated method in your text product class. + + ### T + def temp_trend_nlValue(self, tree, node): + # THRESHOLD FOR REPORTING TEMPERATURE TRENDS + return 20.0 + ### Td + ### MaxT and MinT + ### Sky + + def pop_sky_lower_threshold(self, tree, node): + """Do not include an explicit Sky forecast when PoPs are + >= 60% for the majority of the forecast period. + """ + # Get all the PoP stats for this component + component = node.getComponent() + compRange = component.getTimeRange() + popStats = tree.stats.get('PoP', compRange, node.getAreaLabel(), + mergeMethod="List") + + # If the PoP stats are missing + if popStats is None or popStats == []: + return 100.0 # keep sky cover as a precaution + + # Initialize a counter to keep track of the number of subperiods + # where the PoP >= 55% (rounds to 60%) + count = 0 + # Look at each PoP value + for (value, timeRange) in popStats: + # See if PoP is 'likely' or 'categorical' + if value >= 55.0: + count += 1 # count this subphrase period + + # Determine the percentage of the time PoP is 'likely' or 'categorical' + percent = 100.0 * float(count)/float(len(popStats)) + # If the majority of the period has 'likely' or 'categorical' PoPs + if percent > 50.0: + val = 59.0 # omit sky cover from the forecast + else: + val = 100.0 # sky cover required + return val + + def clearing_threshold(self, tree, node): + # Threshold for phrases such as: + # mostly cloudy in the morning then clearing + # Used by sky_phrase + return 31 + + def sky_valueList(self, tree, node): + # Phrases for sky given values. Tuples consist of: + # (threshold, dayTime phrase, nightTime phrase) + # Used by skyRange_phrase + # NOTE: If you change these words, you MUST also + # adjust the similarSkyWords_list and preferredSkyWords + # used for sub-phrase combining and reporting sky trends. + return [ + (5, "sunny", "clear"), + (25, "sunny", "mostly clear"), + (50, "mostly sunny", "partly cloudy"), + (69, "partly sunny", "mostly cloudy"), + (87, "mostly cloudy", "mostly cloudy"), + (100, "cloudy", "cloudy"), + ] + + def similarSkyWords_list(self, tree, node): + # The following pairs of sky words will be considered + # "equal" when comparing for phrase combining + # and redundancy + # + # For trends, (e.g. Sunny in the morning then partly cloudy in the afternoon.) + # the following transitions are not allowed: + # Day time: + # Sunny <--> mostly sunny + # Mostly sunny <--> partly sunny + # Partly cloudy <--> mostly cloudy + # Night time: + # Clear <--> mostly clear + # Mostly clear <--> partly cloudy + # Mostly cloudy <--> cloudy + # + # In other words these transitions are allowed: + # Day time: + # sunny <--> partly sunny or above + # mostly sunny <--> mostly cloudy or above + # partly sunny <--> sunny or cloudy + # mostly cloudy <--> mostly sunny + # Night time: + # clear can go to partly cloudy or above + # mostly clear <--> mostly cloudy or above + # partly cloudy <--> mostly cloudy or above + # mostly cloudy <--> partly cloudy or below + + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + return [ + ("sunny", "mostly sunny"), + ("mostly sunny", "partly sunny"), + ("partly sunny", "mostly cloudy"), + ("mostly cloudy", "cloudy"), + ] + else: + return [ + ("clear", "mostly clear"), + ("mostly clear", "partly cloudy"), + ("mostly cloudy", "cloudy"), + ] + + def similarSkyWords_flag(self, tree, node, words1, words2): + # Returns 1 if the pair of words is equal or similar + # according to the "similarSkyWords_list" + if words1 == words2: + return 1 + # Check for similarity + for value1, value2 in self.similarSkyWords_list(tree, node): + if (words1 == value1 and words2 == value2) or \ + (words2 == value1 and words1 == value2): + return 1 + return 0 + + def preferredSkyWords(self, tree, node, words1, words2): + # Returns the preferred words given the pair + # of words1, words2 + preferredList = ["mostly sunny", "mostly clear", "cloudy"] + if words1 in preferredList: + return words1 + if words2 in preferredList: + return words2 + return words1 + + def reportIncreasingDecreasingSky_flag(self, tree, node): + # If 1, will use "increasing clouds", "decreasing clouds" + # wording instead of "mostly cloudy becoming sunny" + + # You have 3 options: + # return 0 -- do not use increasing/decreasing wording + # return 1 -- use increasing/decreasing wording if applicable + # Use the code shown below to use increasing/decreasing wording + # but avoid repetitive usage. + return 0 + #return 1 + + # Use the following code to avoid redundancy e.g. + # SUNDAY...Increasing clouds. + # SUNDAY NIGHT...Increasing clouds. + # + #If the previous period had increasing or decreasing wording, return 0 + # Otherwise, return 1 + + # Check to see if previous period had increasing or decreasing wording + component = node.getComponent() + prevComp = component.getPrev() + if prevComp is not None: + # Look at the sky_phrase + skyWords = self.findWords( + tree, prevComp, "Sky", node.getAreaLabel(), + phraseList=[node.getAncestor('name')], phraseLevel=1) + if skyWords is not None: + if skyWords.find("increasing") >= 0 or \ + skyWords.find("decreasing") >= 0: + return 0 + return 1 + return 1 + + def reportClearSkyForExtendedPeriod_flag(self, tree, node): + # If 1, will report clear/mostly clear wording for periods that + # exceed 12 hours. Otherwise, will report sunny/mostly sunny. + return 1 + + def sky_value(self, tree, node, value, dayNight, returnIndex=0): + # Check for areal coverage term + # Otherwise, access the sky_valueList and return words corresponding to value + if value is None: + return "" + words = self.areal_sky_value(tree, node, value, dayNight) + if words is not None: + # Set to use then connector only + node.set("connector", " then ") + # Return areal wording + if returnIndex: + return words, 0 + else: + return words + sky_valueList = self.sky_valueList(tree, node) + for i in range(len(sky_valueList)): + threshold, dayWords, nightWords = sky_valueList[i] + if value <= threshold: + flag = self.reportClearSkyForExtendedPeriod_flag(tree, node) + if flag == 1: + if dayNight == self.DAYTIME(): + words = dayWords + else: + words = nightWords + else: + if dayNight == self.NIGHTTIME(): + words = nightWords + else: + words = dayWords + if returnIndex: + return words, i + else: + return words + + def areal_sky_flag(self, tree, node): + # Set to 1 if you want to use areal (e.g. patchy clouds, areas of clouds) + # vs. traditional sky wording when appropriate. + # BE SURE AND SET THE "arealSkyAnalysis" flag to 1 in the Definition section! + # You may want to base this decision on the current edit area and/or + # component e.g. "Period_1" + return 0 + + def areal_sky_value(self, tree, node, value, dayNight): + if not self.areal_sky_flag(tree, node): + return None + skyBins = tree.stats.get("Sky", node.getTimeRange(), + node.getAreaLabel(), + statLabel="binnedPercent", + mergeMethod="MergeBins") + #print "skyBins", skyBins, node.getTimeRange() + if skyBins is None: + return None + + # Determine percent in highest bin + length = len(skyBins) + highBin = skyBins[length-1] + low, high, highBinPercent = highBin + + # Base wording on high bin percent + words = None + #print "highBinPercent", highBinPercent + for skyPercent, skyWords in self.areal_skyPercentages(tree, node): + #print "skyPercent", skyPercent + if highBinPercent > skyPercent: + words = skyWords + break + #print "words", words + if words is None: + return None # Revert to traditional coverage + + # Check for sky-related Wx + wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Average") + if wxStats is None: + return None + # Keep track of skyRelatedWx that we have added to the wording already + # so we don't end up with "Areas of low clouds and fog and fog." + foundWx = [] + for wx in self.areal_skyRelatedWx(tree, node): + # Look for "dense" fog + dense = "" + if wx == "F": + for subkey, rank in wxStats: + if subkey.wxType() == "F" and subkey.intensity() == "+": + dense = "dense " + for subkey, rank in wxStats: + if subkey.wxType() == wx and wx not in foundWx: + foundWx.append(wx) + # Add wording + words = words + " and " + dense + subkey.wxDef().typeDesc(wx).lower() + return words + + def areal_skyPercentages(self, tree, node): + # Used IF the areal_sky_flag is 1. + # Each tuple is a (skyValue, words) pair such that if the + # sky percentage with the highest areal coverage exceeds + # the given skyValue, the associated words are used. + return [ + (80, "low clouds"), + (40, "areas of clouds"), + (9, "patchy clouds"), + ] + + def areal_skyRelatedWx(self, tree, node): + # Used IF the areal_sky_flag is 1. + # Weather types that are related to sky cover and will be included in the + # sky phrase if their areal coverage matches the sky areal coverage. + # For example: areas of low clouds and fog in the morning, then mostly sunny. + return ["F", "L"] + + def disableSkyRelatedWx(self, tree, node): + # Disable the areal_skyRelatedWx subkeys for the given node + wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Average") + if wxStats is None: + return + disabled = node.getAncestor("disabledSubkeys") + if disabled is None: + disabled = [] + #print "wxStats", wxStats + for wx in self.areal_skyRelatedWx(tree, node): + for subkey, rank in wxStats: + if subkey.wxType() == wx: + disabled.append(subkey) + node.set("disabledSubkeys", disabled) + + def sky_phrase(self): + return { + "setUpMethod": self.sky_setUp, + "wordMethod": self.sky_words, + "phraseMethods": [ + self.checkLocalEffects, + self.combineSky, + self.skySpecialCases, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.sky_timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + } + def sky_setUp(self, tree, node): + sky = self.ElementInfo("Sky", "List") + elementInfoList = [sky] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def combineSky(self, tree, node): + return self.combineChildren(tree, node, self.combine_sky) + def combine_sky(self, tree, node, subPhrase1, subPhrase2): + skyValue1 = self.getScalarData(tree, subPhrase1, "Sky", "MinMax") + skyValue2 = self.getScalarData(tree, subPhrase2, "Sky", "MinMax") + if skyValue1 is None and skyValue2 is None: + return 1, None + if skyValue1 is None or skyValue2 is None: + return 0, None + timeRange = node.getTimeRange() + if timeRange.duration() > 12*3600: + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + words1 = self.sky_value(tree, subPhrase1, self.getValue(skyValue1), dayNight) + words2 = self.sky_value(tree, subPhrase2, self.getValue(skyValue2), dayNight) + if self.similarSkyWords_flag(tree, subPhrase1, words1, words2): + min1, max1 = skyValue1 + min2, max2 = skyValue2 + newVal = (min(min1, min2), max(max1, max2)) + elementInfoList = node.get("elementInfoList") + newSubPhrase = self.combine2SubPhrases( + tree, node, subPhrase1, subPhrase2, elementInfoList, newVal) + return 1, newSubPhrase + else: + return 0, None + + def skySpecialCases(self, tree, node): + # If phrase has exactly 2 subphrases, + # Look for clearing. + # If not, then if reportIncreasingDecreasing, + # report increasing/decreasing wording. + subPhrases = node.get("childList") + if len(subPhrases) == 2: + words = None + skyValue1 = self.getScalarData(tree, subPhrases[0], "Sky", "Average") + skyValue2 = self.getScalarData(tree, subPhrases[1], "Sky", "Average") + # Look for clearing + clearing_threshold = self.clearing_threshold(tree, node) + if skyValue1 > skyValue2 and skyValue2 <= clearing_threshold and skyValue1 > clearing_threshold: + period1Phrase = self.timePeriod_descriptor(tree, node, subPhrases[0].getTimeRange()) + period1Phrase = self.addSpace(period1Phrase, "leading") + timeRange = node.getTimeRange() + if timeRange.duration() > 12*3600: + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + words1 = self.sky_value(tree, subPhrases[0], skyValue1, dayNight) + words = words1 + period1Phrase + " then clearing" + else: + reportIncreasingDecreasing = self.reportIncreasingDecreasingSky_flag(tree, node) + if reportIncreasingDecreasing: + if skyValue2 > skyValue1: + words = "increasing clouds" + else: + words = "decreasing clouds" + if words is not None: + # End processing of the phrase; we are done + node.set("doneList", node.get("methodList")) + return self.setWords(node, words) + return self.DONE() + + def sky_timeDescriptorModeration(self, tree, node): + # If only two subphrases, turn off second time descriptor + # + childList = node.get("childList") + length = len(childList) + # Check for words + if length > 0: + words = childList[0].get("words") + if words is None: + return + else: + return self.DONE() + if length == 2: + words0 = childList[0].get("words") + words1 = childList[1].get("words") + if words0 != "" and words1 != "": + # Neither is null + flag0 = 1 + flag1 = 0 + else: # One is null + flag0 = 1 + flag1 = 1 + if words0 == "": # First sub-phrase is null + childList[1].set("words", "becoming " + words1) + childList[0].set("timeDescFlag", flag0) + childList[1].set("timeDescFlag", flag1) + return self.DONE() + + def sky_words(self, tree, node): + # Create sky phrase. + statDict = node.getStatDict() + sky = self.getStats(statDict, "Sky") + if sky is None: + return self.setWords(node, "") + + # Check Pop i.e. don't report sky if we can assume overcast + threshold = self.pop_sky_lower_threshold(tree, node) + if self.lowPop_flag(tree, node, threshold) == 0: + return self.setWords(node, "") + + sky = self.getValue(sky) + timeRange = node.getTimeRange() + if timeRange.duration() > 12*3600: + words = self.getSkyDiurnalWords(tree, node) + if words is not None: + return self.setWords(node, words) + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + words = self.sky_value(tree, node, sky, dayNight) + return self.setWords(node, words) + + def getSkyDiurnalWords(self, tree, node): + # Produce words such as + # xx in the night and morning otherwise yy + # where xx is the sky value for the night and morning + # and yy is the sky value otherwise + # + # If the night and morning words are the same as the + # evening and afternoon, (no diurnal pattern), + # return None + + # If we have not tested for diurnal sky and wx, return + if "DiurnalSkyWx" not in self.periodCombining_elementList(tree, node): + return None + + wordList = [] + index = 0 + trList = self.divideRange(node.getTimeRange(), 6) + dayNight = self.getPeriod(trList[0], 1) + # Need to save timeRange so we can re-set it for determining + # words for sub-ranges + saveTR = node.getTimeRange() + # Only need to use first 12 hours to check for similarity + for tr in trList[0:2]: + sky = tree.stats.get("Sky", tr, node.getAreaLabel(), + mergeMethod="Average") + sky = self.getValue(sky) + node.timeRange = tr + result = self.sky_value(tree, node, sky, dayNight) + wordList.append(result) + #print "\nsky, tr", sky, tr + #print "words", result + index += 1 + # Re-set timeRange + node.timeRange = saveTR + #print "\nwordList", wordList + if wordList[0] == wordList[1]: + return None + if dayNight == self.DAYTIME(): + # First period is the morning + words1 = wordList[0] + words2 = wordList[1] + descriptor = " in the morning and night" + else: + # First period is the evening + words1 = wordList[1] + words2 = wordList[0] + descriptor = " in the night and morning" + words2 = words2.replace("sunny", "clear") + words = words1 + descriptor + ", otherwise " + words2 + #print "returning", words + return words + + def simple_sky_phrase(self): + return { + "phraseMethods": [ + self.simple_sky_words, # phrase.words + ], + } + def simple_sky_words(self, tree, phrase): + # Create sky phrase. + + # If no information, do not report sky condition + timeRange = phrase.getTimeRange() + #print "Getting sky" + skyStats = tree.stats.get("Sky", timeRange, phrase.getAreaLabel(), mergeMethod="List") + #print "Sky ", skyStats + statsByRange = self.makeRangeStats(tree, self.SCALAR(), skyStats, timeRange) + #print "Sky ", statsByRange + if statsByRange is None: + return self.setWords(phrase, "") + + # Check Pop i.e. don't report sky if we can assume overcast + threshold = self.pop_sky_lower_threshold(tree, phrase) + if self.lowPop_flag(tree, phrase, threshold) == 0: + return self.setWords(phrase, "") + + reportIncreasingDecreasing = self.reportIncreasingDecreasingSky_flag(tree, phrase) + + # Get values for each part of time range + if len(statsByRange) == 1: + skyTime1, period1 = statsByRange[0] + skyTime2, period2 = statsByRange[0] + else: + skyTime1, period1 = statsByRange[0] + skyTime2, period2 = statsByRange[1] + + skyTime1 = self.getValue(skyTime1) + skyTime2 = self.getValue(skyTime2) + + dayNight1 = self.getPeriod(period1, 1) + dayNight2 = self.getPeriod(period2, 1) + + # Determine category and phrase for skyTime1 and skyTime2 + index = 1 + for skyValue, dayNight in [(skyTime1, dayNight1), (skyTime2, dayNight2)]: + skyPhrase, valueIndex = self.sky_value(tree, phrase, skyValue, dayNight, 1) + exec("words"+repr(index)+"=skyPhrase") + exec("index"+repr(index)+"=valueIndex") + index = index+1 + + period1Phrase = self.timePeriod_descriptor(tree, phrase, period1) + period1Phrase = self.addSpace(period1Phrase, "leading") + + # Look for clearing + clearing_threshold = self.clearing_threshold(tree, phrase) + if skyTime1 > skyTime2 and skyTime2 <= clearing_threshold and skyTime1 > clearing_threshold: + return self.setWords(phrase, words1 + period1Phrase + " then clearing") + + # See if skyTime1 is different from skyTime2 by more than + # one category of sky values + if abs(index1 - index2) > self.sky_index_difference(tree, phrase): + if reportIncreasingDecreasing == 1: + if skyTime2 > skyTime1: + return self.setWords(phrase, "increasing clouds") + else: + return self.setWords(phrase, "decreasing clouds") + else: + return self.setWords(phrase, words1 + period1Phrase + " then becoming " + words2) + # Report Average value + else: + skyValue = self.average(skyTime1, skyTime2) + if timeRange.duration() > 12*3600: + dayNight = -1 + else: + dayNight = self.getPeriod(timeRange, 1) + words = self.sky_value(tree, phrase, skyValue, dayNight) + return self.setWords(phrase, words) + + # PoP + def wxQualifiedPoP_flag(self, tree, node): + # If 1, PoP phrases will be qualified with the weather type + # E.g. "Chance of rain and snow 20 percent." instead of + # "Chance of precipitation 20 percent." + return 1 + + def popMax_phrase(self): + return { + "setUpMethod": self.popMax_setUp, + "wordMethod": self.popMax_words, + "phraseMethods": self.standard_phraseMethods() + } + def popMax_setUp(self, tree, node): + # NOTE: The method is set to "Average" instead of "List" so + # that the PoP phrase will always cover the full period. + # It doesn't matter what method (other than List) we choose + # since the popMax_words method gets its PoP value directly from + # the "matchToWx" method. + elementInfoList = [self.ElementInfo("PoP", "Average")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + + def popMax_words(self, tree, node) : + "Create phrase Probability of Precipitation for maximum value" + # Wait for weather phrase to complete + wxWords = "" + attrDict = {} + if self.wxQualifiedPoP_flag(tree, node) == 1: + compArea = node.getComponent().getAreaLabel() + wxWords, attrDict = self.findWords(tree, node, "Wx", [node.getAreaLabel(), compArea], + phraseList=["weather_phrase", "skyPopWx_phrase"], + attributes=['reportedRankList']) + if wxWords is None: + return + #print "wxWords", wxWords + if wxWords == "": + #print "setting popMax to Null" + return self.setWords(node, "null") + + #print "PopMax", node.getAreaLabel(), wxWords + pop = self.matchToWx(tree, node, "PoP") + #print " Pop", pop + if pop is None: + return self.setWords(node, "") + + # Check pop thresholds + pop = self.getValue(pop, "Max") + if pop < self.pop_lower_threshold(tree, node) or \ + pop > self.pop_upper_threshold(tree, node): + return self.setWords(node, "") + + popType = self.getPopType(tree, node, pop, wxWords, attrDict) + node.set("popType", popType) + result = self.checkRepeatingString(tree, node, popType, "popType",0) + if result == -1: + # Wait for previous phrase to finish + return + popType = self.addSpace(result) + + unit = self.units_descriptor(tree, node, "unit", "percent") + popStr = self.getPopStr(tree, node, pop) + words = popType + popStr + " " + unit + + # Need to try and set phrase descriptor at this point since + # weather phrase was not complete during phrase set-up + phrase = node.parent + if phrase.get("descriptor") is None: + descriptor = self.phrase_descriptor(tree, phrase, "PoP", "PoP") + phrase.set("descriptor", descriptor) + + return self.setWords(node, words) + + def getPopStr(self, tree, node, pop): + pop = int(pop) + if pop >= 100: + popWords = "near 100" + else: + popWords = repr(pop) + return popWords + + def getPopType(self, tree, node, pop, wxWords, attrDict): + popType = "precipitation" + if self.wxQualifiedPoP_flag(tree, node) == 1: + # Examine reported weather type(s) from phrase. + # If there is more than one descriptor for precipitating weather + # or if they are general weather types, + # return "precipitation" + # Otherwise, describe the weather type + # e.g. chance of rain, chance of snow + wxTypes = [] + if "reportedRankList" in attrDict: + rankList = attrDict["reportedRankList"] + for subkey, rank in rankList: + wxTypes.append(subkey.wxType()) + generalTypes = ["IP", "ZL", "ZR", "ZF", "ZY"] + for general in generalTypes: + if general in wxTypes: + return "precipitation" + descriptors = { + "R": "rain", + "RW": "showers", + "S": "snow", + "SW": "snow", + "T": "thunderstorms", + } + popTypes = [] + for wxType in wxTypes: + if wxType in ["R", "S", "RW", "SW", "T"]: + desc = descriptors[wxType] + if desc not in popTypes: + popTypes.append(desc) + if len(popTypes) > 1: + popType = "precipitation" + elif len(popTypes) == 1: + popType = popTypes[0] + return popType + + # This version will report only the weather types that + # match the reported PoP +## def getPopType(self, tree, node, pop, wxWords, attrDict): +## popType = "precipitation" +## if self.wxQualifiedPoP_flag(tree, node) == 1: +## ## Need to find weather type(s) from phrase. +## ## "wxWords" is the concatenation of all weather phrases +## ## for this component. +## ## Returns "popType" e.g. chance of rain, chance of rain and snow +## wxTypes = [] +## if attrDict.has_key("reportedRankList"): +## rankList = attrDict["reportedRankList"] +## for subkey, rank in rankList: +## # Check the coverage against the reported PoP +## covLow, covHigh = self.coveragePoP_value(subkey.coverage()) +## if covHigh >= pop: +## wxTypes.append(subkey.wxType()) +## popType = None +## generalTypes = ["IP", "ZL", "ZR", "ZF", "ZY"] +## for general in generalTypes: +## if general in wxTypes: +## popType = "precipitation" +## if popType is None: +## rain = 0 +## snow = 0 +## thunder = 0 +## showers = 0 +## snowShowers = 0 +## rainShowers = 0 +## if "R" in wxTypes: +## rain = 1 +## if "S" in wxTypes: +## snow = 1 +## if "RW" in wxTypes: +## showers = 1 +## if "SW" in wxTypes: +## snowShowers = 1 +## if "T" in wxTypes: +## thunder = 1 +## if showers and not snowShowers: +## rainShowers = 1 +## if (rain or rainShowers or thunder) and snow: +## popType = "precipitation" +## else: +## if snow or snowShowers: +## if rain or rainShowers: +## if wxWords.find(" or ") > -1: +## popType = "rain or snow" +## else: +## popType = "rain and snow" +## else: +## popType = "snow" +## elif rain and not rainShowers: +## popType = "rain" +## elif showers: +## popType = "showers" +## if thunder: +## popType = "showers and thunderstorms" +## elif thunder: +## popType = "thunderstorms" +## else: +## popType = "precipitation" +## if popType is None: +## popType = "precipitation" +## return popType + + def areal_or_chance_pop_descriptor(self, tree, node, key, elementName): + # Stats: dominantWx + # Returns descriptor for a pop phrase based on Wx + # Returns areal coverage of precipitation OR + # chance of precipitation + # Get weather. Determine if ANY terms in the period are convective. If so, + # change the phrase to "areal coverage". This is an Amarillo WFO + # preference. + wxPhrase = self.findWords(tree, node, "Wx", node.getAreaLabel(), + phraseList=["weather_phrase", "skyPopWx_phrase"]) + if wxPhrase is None: + return None + if wxPhrase == "": + return "chance of" + use_areal = 0 + + if wxPhrase.find("isolated") >= 0: + use_areal = 1 + if wxPhrase.find("scattered") >= 0: + use_areal = 1 + if wxPhrase.find("numerous") >= 0: + use_areal = 1 + if wxPhrase.find("widespread") >= 0: + use_areal = 1 + + if use_areal == 1: + return "areal coverage of" + else: + return "chance of" + + def allAreal_or_chance_pop_descriptor(self, tree, node, key, elementName): + # Stats: rankedWx + # Returns descriptor for a pop phrase based on Wx + # Returns areal coverage of precipitation OR + # chance of precipitation + # Get weather. Determine if ALL terms in the period are convective. If so, + # change the phrase to "areal coverage". This is an Amarillo WFO + # preference. + statsByRange = tree.stats.get( + "Wx", node.getTimeRange(), node.getAreaLabel(), mergeMethod="List") + if statsByRange is None: + return "chance of" + use_areal = 1 + + for rankList, subRange in statsByRange: + subkeys = self.getSubkeys(rankList) + for subkey in subkeys: + if self.precip_related_flag(tree, node, subkey): + cov = subkey.coverage() + if cov not in ["Iso", "Sct", "Num", "Wide", ""]: + use_areal = 0 + break + + if use_areal == 1: + return "areal coverage of" + else: + return "chance of" + + # Temperature worded phrases: + # HIGHS IN THE MIDDLE 80S + # HIGHS IN THE MIDDLE 80S TO LOWER 90S + # using temp_phrase_threshold + + def highs_phrase(self): + return { + "setUpMethod": self.highs_setUp, + "wordMethod": self.temp_words, + "phraseMethods": self.standard_phraseMethods() + } + def highs_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MaxT", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def lows_phrase(self): + return { + "setUpMethod": self.lows_setUp, + "wordMethod": self.temp_words, + "phraseMethods": self.standard_phraseMethods(), + } + def lows_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MinT", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def temp_words(self, tree, node): + stats = self.getTempStats(tree, node) + if stats is None: + return self.setWords(node, "") + elementName = node.getAncestor("elementName") + words = self.getTempPhrase(tree, node, stats, elementName) + return self.setWords(node, words) + + def tempDiff_threshold(self, tree, node): + # If the difference between the minimum and maximum temperature values + # exceeds this range, report the actual values e.g. 23 to 29. + return 4 + + def getTempPhrase(self, tree, node, temp, elementName): + minVal, maxVal = self.getValue(temp, "MinMax") + minVal = int(minVal) + maxVal = int(maxVal) + + # Chris Gibson's version +## # Handle teens +## ave = int((maxVal + minVal)/2) +## if ave < 20 and ave > 9: +## if ave > 15: +## return "15-20" +## elif ave == 15 or ave == 14: +## return "near 15" +## elif ave == 10 or ave == 11: +## return "near 10" +## else: +## return "10-15" +## if ave < 10 and ave > 0: +## if ave > 4: +## return "5 to 10 above" +## else: +## return "zero to 5 above" + +## if minVal <=0 or maxVal <=0: +## maxVal = int(self.round(maxVal, "Nearest", 5)) +## minVal = int(self.round(minVal, "Nearest", 5)) + + # End Chris Gibson's version + + # Check for exceptions + exceptions = self.tempPhrase_exceptions(tree, node) + for minBoundaries, maxBoundaries, equalityPhrase, phrase in exceptions: + if minVal >= minBoundaries[0] and minVal <= minBoundaries[1] and \ + maxVal >= maxBoundaries[0] and maxVal <= maxBoundaries[1]: + if minVal == maxVal: + resultPhrase = equalityPhrase + else: + resultPhrase = phrase + return self.constructTempException(resultPhrase, minVal, maxVal) + + # Handle actual range values + if abs(maxVal-minVal) > self.tempDiff_threshold(tree, node): + return repr(minVal) + " to " + repr(maxVal) + + # set up for "lower," "mid," or "upper" wording + # Modulus (%) gets tricky below zero so have to take + # modulus of abs(temperature) + decadeMaxStr = self.getDecadeStr(maxVal) + decadeMinStr = self.getDecadeStr(minVal) + digitMax = abs(maxVal) % 10 + digitMin = abs(minVal) % 10 + boundaries = self.tempPhrase_boundary_dict(tree, node) + digitMinStr = self.getDigitStr(digitMin, boundaries) + digitMaxStr = self.getDigitStr(digitMax, boundaries) + lowerMax = boundaries["lower"][1] + upperMin = boundaries["upper"][0] + if decadeMinStr == decadeMaxStr: + # this solves the problem of returning "...IN THE LOWER 60s TO LOWER 60s..." + if digitMinStr == digitMaxStr: + return "in the " + digitMinStr + " " + decadeMinStr + + # shortens a return of "...lower to upper..." to "...in the xxS" + elif digitMin <= lowerMax and digitMax >= upperMin: + return "in the " + decadeMaxStr + + else: + return "in the " + digitMinStr + " to " + digitMaxStr + " " + decadeMaxStr + elif digitMinStr == digitMaxStr: + # return 50s TO LOWER 60s (not LOWER 50s TO LOWER 60s) + return "in the " + decadeMinStr + " to " + digitMaxStr + " " + decadeMaxStr + else: # different decade + if maxVal >= 100 and minVal < 100: # UPPER 80s to 102 + return digitMinStr + " " + decadeMinStr + " to " + str(maxVal) + # return NEAR 60 (not UPPER 50s TO LOWER 60s) + elif digitMin >= upperMin and digitMax <= lowerMax and maxVal - minVal <= 10: + roundedMax = int(self.round(maxVal, "Nearest", 10)) + return self.constructTempException("near %max", minVal, roundedMax) + # return 50s and 60s (not lower 50s to upper 60s) + elif digitMin <= lowerMax and digitMax >= upperMin: + return "in the " + decadeMinStr + " to " + decadeMaxStr + digitMinPhrase = digitMinStr + " " + decadeMinStr + digitMaxPhrase = digitMaxStr + " " + decadeMaxStr + return "in the " + digitMinPhrase + " to " + digitMaxPhrase + + def constructTempException(self, phrase, minVal, maxVal): + phrase = phrase.replace("%min", repr(minVal)) + phrase = phrase.replace("%max", repr(maxVal)) + zeroPhraseMin = self.getZeroPhrase(minVal) + zeroPhraseMax = self.getZeroPhrase(maxVal) + phrase = phrase.replace("%zeroPhraseMin", zeroPhraseMin) + phrase = phrase.replace("%zeroPhraseMax", zeroPhraseMax) + return phrase + + def getDecade(self, value): + decade = abs(int(value)) / 10 * 10 + if value < 0: + decade = -decade + return decade + + def getDecadeStr(self, value): + decade = self.getDecade(value) + if decade == 0: + return "single digits" + elif decade == 10: + return "teens" + elif decade == -10: + return "teens below zero" + else: + return repr(decade) + "s" + + def getDigitStr(self, value, boundaries): + for key in list(boundaries.keys()): + lower, upper = boundaries[key] + if value >= lower and value <= upper: + return key + + def tempPhrase_exceptions(self, tree, node): + # These exceptions to the getTempPhrase are processed before trying to + # generate a phrase such as "in the lower 20's to upper 30's". + return [ + # Boundaries are inclusive + # Min boundaries # Max boundaries # phrase if Min == Max # phrase if Min != Max + # %min will be replaced by the minimum temperature value + # %max will be replaced by the maximum temperature value + # %zeroPhraseMin will be replaced with a zero-based phrase for the min e.g. + # 12 below + # %zeroPhraseMax will be replaced with a zero-based phrase for the min e.g. + # 5 above + + # Both 100 and above + [(100,200), (100,200), "around %min", "%min to %max"], + # Min in 90's, Max 100 and above + [(90, 99), (100,200), "", "%min to %max"], + + # Handle lower temperatures + [(1, 19), (1, 29), "around %min", "%min to %max"], + # Handle zero temperatures + [(0, 0), (0, 29), "near zero", "zero to %zeroPhraseMax"], + [(-200, 0), (0, 0), "near zero", "%zeroPhraseMin to zero"], + + # Min below zero, Max above zero + [(-200,-1), (1,200), "near zero", "%zeroPhraseMin to %zeroPhraseMax zero"], + # Both below zero + #[(-200,-1), (-200,-1), "%zeroPhraseMin","%zeroPhraseMax to %zeroPhraseMin zero"], + [(-200,-1), (-200,-1), "around %zeroPhraseMin","%zeroPhraseMax to %zeroPhraseMin zero"], + +## # Chris Gibson's version Comment out the above exception and use this instead: +## #[(-200,-1), (-200,-1), "near %zeroPhraseMax","%zeroPhraseMax to %zeroPhraseMin zero"] + + # Around phrases fix from Steve Nelson + [(20, 20), (20, 20), "around %min", "%min to %max"], + [(30, 30), (30, 30), "around %min", "%min to %max"], + [(40, 40), (40, 40), "around %min", "%min to %max"], + [(50, 50), (50, 50), "around %min", "%min to %max"], + [(60, 60), (60, 60), "around %min", "%min to %max"], + [(70, 70), (70, 70), "around %min", "%min to %max"], + [(80, 80), (80, 80), "around %min", "%min to %max"], + [(90, 90), (90, 90), "around %min", "%min to %max"], + + ] + + def tempPhrase_boundary_dict(self, tree, node): + return { + "lower": (0,3), + "mid": (4,6), + "upper": (7,9), + } + + # Temperature worded phrases: + # HIGHS 45 TO 50 + # using range_nlValue for "MinT", "MaxT" + + def highs_range_phrase(self): + return { + "setUpMethod": self.highs_setUp, + "wordMethod": self.tempRange_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def lows_range_phrase(self): + return { + "setUpMethod": self.lows_setUp, + "wordMethod": self.tempRange_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def tempRange_words(self, tree, node) : + "Create phrase for Min or Max Temperature" + stats = self.getTempStats(tree, node) + if stats is None: + return self.setWords(node, "") + elementName = node.getAncestor("elementName") + words = self.getTempRangePhrase(tree, node, stats, elementName) + return self.setWords(node, words) + + def getTempRangePhrase(self, tree, node, temp, elementName): + connector = self.value_connector(tree, node, elementName, elementName) + min, max = self.getValue(temp, "MinMax") + + decadeMax = self.getDecade(max) + digitMax = max % 10 + decadeMin = self.getDecade(min) + digitMin = min % 10 + diff = abs(max - min) + + # "Around" phrases + # e.g. a range of 19-21 --> "highs around 20" + around = self.addSpace(self.phrase_descriptor(tree, node, "around", elementName)) + if 0 < diff <= 3 and (digitMax == 0 or digitMax == 1): + if decadeMax <= 10: + decadeMax = self.getZeroPhrase(decadeMax) + else: + decadeMax = repr(decadeMax) + return around + decadeMax + + # Report the range + min = int(min) + max = int(max) + if min == max: + # Adjust descriptor e.g. highs --> high + descriptor = node.parent.get("descriptor") + if descriptor is not None and around == "": + descriptor = descriptor.replace("s", "") + node.parent.set("descriptor", descriptor) + if min <= 10: + min = self.getZeroPhrase(min) + else: + min = repr(min) + return around + min + elif min > 0 and max > 0: + return repr(min) + connector + repr(max) + elif min <= 0 and max > 0: + minval = self.getZeroPhrase(min) + maxval = self.getZeroPhrase(max, 1) + return minval + " to " + maxval + else: + if min < 0 and max < 0: + firstVal = self.getZeroPhrase(max) + secondVal = self.getZeroPhrase(min, 1) + else: + firstVal = self.getZeroPhrase(min) + secondVal = self.getZeroPhrase(max, 1) + return firstVal + connector + secondVal + + def getZeroPhrase(self, val, addZero=0): + if val == 0: + return "zero" + if val < 0: + phrase = repr(abs(val)) + " below" + else: + phrase = repr(val) + " above" + if addZero == 1: + phrase = phrase + " zero" + return phrase + + # Extended Temperatures + def extended_temp_range(self, tree, node): + # Range for extended temperatures e.g. + # "Highs 45 to 55." + # This value must be 10 or 5. + # Other values are not supported for extended ranges. + return 10 + #return 5 + + def extended_highs_phrase(self): + return { + "setUpMethod": self.highs_setUp, + "wordMethod": self.extended_temp_words, + "phraseMethods": self.standard_phraseMethods(), + } + def extended_lows_phrase(self): + return { + "setUpMethod": self.lows_setUp, + "wordMethod": self.extended_temp_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def extended_temp_words(self, tree, node) : + "Create phrase for Min or Max Temperature" + stats = self.getTempStats(tree, node) + if stats is None: + return self.setWords(node, "") + elementName = node.get("elementName") + if elementName == "MaxT": + mergeMethod = "Max" + else: + mergeMethod = "Min" + temp = int(self.getValue(stats, mergeMethod)) + words = self.getExtendedTempPhrase(tree, node, temp) + return self.setWords(node, words) + + def getExtendedTempPhrase(self, tree, node, temp): + # Temperatures above 99 + # Give exact value + if temp > 99: + if node.getIndex() == 0: + parent = node.getParent() + descriptor = parent.get("descriptor") + descriptor = descriptor.replace("s ", " ") + parent.set("descriptor", descriptor) + return repr(int(temp)) + + # Temperatures below 10 + # Build and return special phrases + if temp < -27: + return "25 below to 35 below" + elif temp < -22: + return "20 below to 30 below" + elif temp < -17: + return "15 below to 25 below" + elif temp < -12: + return "10 below to 20 below" + elif temp < -7: + return "5 below to 15 below" + elif temp < -2: + return "zero to 10 below" + elif temp < 3: + return "5 below zero to 5 above" + elif temp < 8: + return "zero to 10 above" + elif temp < 10: + return "5 to 15" + + # Determine modifier for temperature: around, lower, mid, upper + decade = self.getDecade(temp) + digit = temp % 10 + + range = self.extended_temp_range(tree, node) + if range == 10: + if digit >= 0 and digit <= 2: + phrase = self.getExtTemp(decade-5, decade+5) + elif digit >= 3 and digit <= 7: + if decade == 10: + phrase = "in the " + "teens" + elif decade <= 0 or decade >= 100: + phrase = self.getExtTemp(decade, decade+10) + else: + phrase = "in the " + repr(decade) + "s" + elif digit >= 8 and digit <=9: + phrase = self.getExtTemp(decade+5, decade+15) + else: # Assume range of 5 + if digit >= 0 and digit <= 2: + phrase = self.getExtTemp(decade, decade+5) + elif digit >= 3 and digit <= 7: + if decade == 10: + phrase = "in the " + "teens" + elif decade <= 0 or decade >= 100: + phrase = self.getExtTemp(decade, decade+5) + else: + phrase = "in the " + repr(decade) + "s" + elif digit >= 8 and digit <=9: + phrase = self.getExtTemp(decade+5, decade+10) + + return phrase + + def getExtTemp(self, val1, val2): + v1 = repr(val1) + if val1 < 0: + v1 = v1 + " below" + v2 = repr(val2) + if val2 < 0: + v2 = v2 + " below" + return v1 + " to " + v2 + + def getTempStats(self, tree, node): + "Get correct Temperature stats (MaxT or MinT) to determine temperature phrase" + elementName = node.getAncestor("elementName") + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + day = self.getPeriod(timeRange, 1) + # day is 1=DAYTIME or 0=NIGHTTIME or -1=DAYNIGHT (spans both day and night) + # In the normal case, MaxT is greater than MinT and: + # for highs, return MaxT + # for lows, return MinT + # If, however, MaxT is less than MinT, then MaxT and MinT have to be switched + + # Don't do highs at night or lows in the day + if elementName == "MaxT": + dayValue = self.DAYTIME() + else: + dayValue = self.NIGHTTIME() + if not day == self.DAYNIGHT() and not day == dayValue: + return None + + if timeRange.duration() <= 12*3600: + statDict = node.getStatDict() + stats = self.getStats(statDict, elementName) + return stats + else: + # If the time period spans day and night, + # get the conglomerate stats. + maxT = tree.stats.get("MaxT", timeRange, areaLabel, + mergeMethod="MinMax") + minT = tree.stats.get("MinT", timeRange, areaLabel, + mergeMethod="MinMax") + if maxT is None and minT is None: + return None + if maxT is None: + if dayValue == self.DAYTIME(): + return None + else: + return minT + if minT is None: + if dayValue == self.NIGHTTIME(): + return None + else: + return maxT + # Check for case of MaxT < MinT + max = self.getValue(maxT, "Max") + min = self.getValue(minT, "Max") + if max < min: + temp = maxT + maxT = minT + minT = temp + if dayValue == self.DAYTIME(): + return maxT + else: + return minT + + def temp_trends_addToPhrase_flag(self, tree, node): + # If set to 0, will report: + # "Temperatures falling in the afternoon." + # If set to 1: + # "Temperatures falling to the 50's in the afternoon." + # If set to 2: + # "Temperatures falling to the lower 50's in the afternoon." + return 2 + + def temp_trends(self): + return { + "setUpMethod": self.temp_trends_setUp, + "wordMethod": self.temp_trends_words, + "phraseMethods": self.standard_phraseMethods(), + } + def temp_trends_setUp(self, tree, node): + duration = node.getTimeRange().duration() + if duration > 12*3600: + return self.setWords(node, "") + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.NIGHTTIME(): + eleInfo = self.ElementInfo("MinT", "Min") + else: + eleInfo = self.ElementInfo("MaxT", "Max") + elementInfoList = [eleInfo] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + def temp_trends_words(self, tree, node): + "Look for sharp temperature increases or decreases" + # Determine if temps rise or fall in a non-diurnal way. + # MaxT/MinT temps -- min/max tuple for each + # Hourly Temp Stats: list of hourly temperature tuples + # Each tuple has: + # -- average temperature value + # -- hour of occurrence + # For a Daytime period, compare MaxT to T for the last grid + # of the period and report "temperatures falling in the afternoon" + # if the difference exceeds the temp_trend_nlValue + # For a Nighttime period, compare MinT to T for the last grid + # of the period and report "temperatures rising overnight" + # if the difference exceeds the temp_trend_threshold. + + statDict = node.getStatDict() + timeRange = node.getTimeRange() + tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), + mergeMethod="List") + if tStats is None: + return self.setWords(node, "") + tStats, subRange = tStats[0] + if tStats is None: + return self.setWords(node, "") + dayNight = self.getPeriod(timeRange,1) + trend_nlValue = self.temp_trend_nlValue(tree, node) + if dayNight == self.DAYTIME(): + maxT = self.getStats(statDict, "MaxT") + if maxT is None: + return self.setWords(node, "") + maxT = self.getValue(maxT) + threshold = self.nlValue(trend_nlValue, maxT) + else: + minT = self.getStats(statDict, "MinT") + if minT is None: + return self.setWords(node, "") + minT = self.getValue(minT) + threshold = self.nlValue(trend_nlValue, minT) + halfWay = len(tStats)/2 + + index = len(tStats)-1 + while index >= halfWay: + tempValue, curHour = tStats[index] + if tempValue is None: + index = index - 1 + continue + + if dayNight == self.DAYTIME(): + if tempValue <= (maxT - threshold): + toPhrase = self.getToPhrase(tree, node, tempValue) + words = "temperatures falling" + toPhrase + " in the afternoon" + return self.setWords(node, words) + else: + if tempValue >= (minT + threshold): + toPhrase = self.getToPhrase(tree, node, tempValue) + words = "temperatures rising" + toPhrase + " after midnight" + return self.setWords(node, words) + break + return self.setWords(node, "") + + def getToPhrase(self, tree, node, tempValue): + flag = self.temp_trends_addToPhrase_flag(tree, node) + if flag > 0: + if flag > 1: + rangeStr = self.getDigitStr( + abs(tempValue)%10, self.tempPhrase_boundary_dict(tree, node)) + rangeStr += " " + else: + rangeStr = "" + return " into the " + rangeStr + self.getDecadeStr(tempValue) + else: + return "" + +## def temp_trends_words(self, tree, node): +## "Look for sharp temperature increases or decreases" + +## # Here is an alternative temp_trends method provided by Tom Spriggs. +## # If a 12-hour period, it looks at the 12, 3, and 5 o'clock grids +## # (both am/pm depending on time of day) and verifies the trend (either +## # going down or up) and then looks at the difference between the +## # 5 o'clock grid and the MaxT/MinT grid. It only needs to look at the +## # 5 o'clock grid since that is the last one in the 12-hour period, +## # and if it is going to trip the threshold anywhere, it will be on that +## # hour since if you have an unusual temperature trend, it will peak at +## # that grid. If less than a 12-hour period, then the 3 times that it +## # checks will be adjusted accordingly inside the smaller time range. +## statDict = node.getStatDict() +## timeRange = node.getTimeRange() +## tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), +## mergeMethod="List") +## if tStats is None: +## return self.setWords(node, "") +## tStats, subRange = tStats[0] +## if tStats is None: +## return self.setWords(node, "") +## dayNight = self.getPeriod(timeRange,1) +## trend_nlValue = self.temp_trend_nlValue(tree, node) +## if dayNight == self.DAYTIME(): +## maxT = self.getStats(statDict, "MaxT") +## if maxT is None: +## return self.setWords(node, "") +## maxT = self.getValue(maxT) +## threshold = self.nlValue(trend_nlValue, maxT) +## else: +## minT = self.getStats(statDict, "MinT") +## if minT is None: +## return self.setWords(node, "") +## minT = self.getValue(minT) +## threshold = self.nlValue(trend_nlValue, minT) + +## if len(tStats) >= 6: +## halfWay = len(tStats) - 6 +## quarterWay = len(tStats) - 3 +## endPoint = len(tStats) - 1 +## elif len(tStats) >= 4: +## halfWay = 0 +## quarterWay = len(tStats) - 3 +## endPoint = len(tStats) - 1 +## elif len(tStats) == 1: +## halfWay = 0 +## quarterWay = 0 +## endPoint = 0 +## else: +## halfWay = 0 +## quarterWay = 1 +## endPoint = len(tStats) - 1 + +## tempValue_halfWay, curHour1 = tStats[halfWay] +## tempValue_quarterWay, curHour2 = tStats[quarterWay] +## tempValue_endPoint, curHour3 = tStats[endPoint] + +## if tempValue_halfWay is None: +## return self.setWords(node, "") +## if tempValue_quarterWay is None: +## return self.setWords(node, "") +## if tempValue_endPoint is None: +## return self.setWords(node, "") + +## words = "" +## if dayNight == self.DAYTIME(): +## if tempValue_quarterWay < tempValue_halfWay: +## if tempValue_endPoint <= tempValue_quarterWay: +## if tempValue_endPoint <= (maxT - threshold): +## # large temp fall (i.e. >= threshold) +## toPhrase = self.getToPhrase(tree, node, tempValue_endPoint) +## mxPhrase = self.getToPhrase(tree, node, maxT) +## if (toPhrase == mxPhrase): +## # avoid saying--"high in the upper 50s. temperature falling +## # into the 50s in the afternoon." +## # instead say--"high in the upper 50s. temperature falling +## # through the 50s in the afternoon." +## toPhrase = " through" + toPhrase[5:] +## if len(tStats) <= 6: #assumes already in the afternoon +## words = "temperature falling" + toPhrase + " by late afternoon" +## else: +## words = "temperature falling" + toPhrase + " in the afternoon" +## elif tempValue_endPoint < maxT: +## # small temp fall (i.e. < threshold) +## if len(tStats) <= 6: #assumes already in the afternoon +## words = "temperature steady or slowly falling through late afternoon" +## else: +## words = "temperature steady or slowly falling in the afternoon" +## else: +## if tempValue_quarterWay > tempValue_halfWay: +## if tempValue_endPoint >= tempValue_quarterWay: +## if tempValue_endPoint >= (minT + threshold): +## # large temp rise (i.e. >= threshold) +## toPhrase = self.getToPhrase(tree, node, tempValue_endPoint) +## mnPhrase = self.getToPhrase(tree, node, minT) +## if (toPhrase == mnPhrase): +## # avoid saying--"low in the lower 30s. temperature rising +## # into the 30s after midnight." +## # instead say--"low in the lower 30s. temperature rising +## # through the 30s after midnight." +## toPhrase = " through" + toPhrase[5:] +## if len(tStats) <= 6: #assumes already after midnight +## words = "temperature rising" + toPhrase + " through sunrise" +## else: +## words = "temperature rising" + toPhrase + " after midnight" +## elif tempValue_endPoint > minT: +## # small temp rise (i.e. < threshold) +## if len(tStats) <= 6: #assumes already after midnight +## words = "temperature steady or slowly rising through sunrise" +## else: +## words = "temperature steady or slowly rising after midnight" + +## return self.setWords(node, words) + + + + def reportTrends(self): + return { + "setUpMethod": self.reportTrends_setUp, + "wordMethod": self.reportTrends_words, + "phraseMethods": self.standard_phraseMethods(), + } + def reportTrends_setUp(self, tree, node): + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.NIGHTTIME(): + eleInfo = self.ElementInfo("MinT", "Min") + else: + eleInfo = self.ElementInfo("MaxT", "Max") + elementName = "MaxT" + elementInfoList = [eleInfo] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + def reportTrends_words(self, tree, node): + "Compare current analysis to previous analysis for trends" + elementName = node.get("elementName") + statDict = node.getStatDict() + curStats = self.getStats(statDict, elementName) + if curStats is None: + return self.setWords(node, "") + timeRange = node.getTimeRange() + areaLabel = node.getAreaLabel() + prevTimeRange = self.adjustTimeRange(timeRange, -24) + prevStats = tree.stats.get(elementName, prevTimeRange, areaLabel, + mergeMethod="Average") + #print "Report trends", timeRange, elementName, curStats, prevStats + if prevStats is None: + return self.setWords(node, "") + + prevStats = self.getValue(prevStats) + curStats = self.getValue(curStats) + #print "stats", prevStats, curStats + diff = curStats - prevStats + value = self.reportTrends_valueStr(tree, node, diff, curStats) + #print " returning ", value + return self.setWords(node, value) + + def reportTrends_valueStr(self, tree, node, diff, temp): + # Given a difference between current and 24-hour prior + # MaxT or MinT grids, report a trend. + + var = self.colder_warmer_dict(tree, node) + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.DAYTIME(): + if diff > 10: + return self.nlValue(var["HighWarmer"], temp) + elif diff < -20: + return self.nlValue(var["HighMuchColder"], temp) + elif diff <= -10 and diff >= -20: + return self.nlValue(var["HighColder"], temp) + else: + return "" + + else: + if diff > 10: + return self.nlValue(var["LowWarmer"], temp) + elif diff < -20: + return self.nlValue(var["LowMuchColder"], temp) + elif diff <= -10 and diff >= -20: + return self.nlValue(var["LowColder"], temp) + else: + return "" + + return "" + + # colder_warmer_Dict + # Dictionary of non-linear dictionaries each with + # phrases to use instead of colder/warmer + # based on the temperature + + def colder_warmer_dict(self, tree, node): + # This dictionary of non-linear dictionaries controls what phrase is returned + # for cold/much colder warmer/much warmer. It is based off + # of the maxT or MinT + dict = {} + dict["LowColder"] = { + (-80,45): "colder", + (45,70): "cooler", + (70,150): "not as warm", + "default": "", + } + dict["LowMuchColder"] = { + (-80,45): "much colder", + (45,70): "much cooler", + (70,150): "not as warm", + "default": "", + } + dict["LowWarmer"] = { + (-80,35): "not as cold", + (35,50): "not as cool", + (50,150): "warmer", + "default": "", + } + dict["HighColder"]= { + (-80,45): "colder", + (45,75): "cooler", + (75,90): "not as warm", + (90,150): "not as hot", + "default": "", + } + dict["HighMuchColder"]= { + (-80,45): "much colder", + (45,75): "much cooler", + (75,90): "not as warm", + (90,150): "not as hot", + "default": "", + } + dict["HighWarmer"]= { + (-80,45): "not as cold", + (45,65): "not as cool", + (65,150): "warmer", + "default": "", + } + return dict + + +## def reportTrends_valueStr(self, tree, node, diff): +## # Given a difference between current and 24-hour prior +## # MaxT or MinT grids, report a trend. +## if diff > 15 and diff < 25: +## return "warmer" +## elif diff >= 25: +## return "much warmer" +## elif diff < -15 and diff > -25: +## return "cooler" +## elif diff <= -25: +## return "much colder" +## else: +## return "" + + def extremeTemps_phrase(self): + ### NEW METHOD written by Tom Spriggs + ### ZFP_Local + return { + "setUpMethod": self.extremeTemps_setUp, + "wordMethod": self.extremeTemps_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def extremeTemps_setUp(self, tree, node): + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + elementInfoList = [ + self.ElementInfo("MaxT", "Max"), + self.ElementInfo("MinT", "Min"), + ] + else: + elementInfoList = [ + self.ElementInfo("MinT", "Min"), + self.ElementInfo("MaxT", "Max"), + ] + elementInfoList.append(self.ElementInfo("HeatIndex", "Max")) + elementInfoList.append(self.ElementInfo("WindChill", "Min")) + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + + def extremeTemps_words(self, tree, node): + "Compare current analysis to previous analysis for trends" + + tempPhrases = ["reportTrends"] + words = self.findWords(tree, node, None, node.getAreaLabel(), + phraseList=tempPhrases) + + if words is None: + # If words have not yet been set, return + # We need to wait for reportTrends to complete + # before doing the extremeTemps_phrase + return + + statDict = node.getStatDict() + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.DAYTIME(): + element = "MaxT" + else: + element = "MinT" + tStats = self.getStats(statDict, element) + if tStats is None: + return self.setWords(node, "") + tStats = self.getValue(tStats) + + chillStats = self.getStats(statDict, "WindChill") + chillStats = self.getValue(chillStats, "Min") + heatStats = self.getStats(statDict, "HeatIndex") + heatStats = self.getValue(heatStats, "Max") + + words = "" + + if dayNight == self.DAYTIME(): + if tStats > 99: + if heatStats is None: + words = "very hot" + elif (heatStats - tStats) > 7: + words = "very hot and humid" + else: + words = "very hot" + elif tStats > 95: + if heatStats is None: + words = "hot" + elif (heatStats - tStats) > 6: + words = "hot and humid" + else: + words = "hot" + elif tStats < 20: + if chillStats is None: + words = "very cold" + elif chillStats < -9: + words = "bitterly cold" + else: + words = "very cold" + elif heatStats is None: + words = "" + elif heatStats >= self.heatIndex_threshold(tree, node): + words = "hot and humid" + elif chillStats is None: + words = "" + elif chillStats <= self.windChill_threshold(tree, node): + words = "bitterly cold" + else: + if tStats < 5: + if chillStats is None: + words = "very cold" + elif chillStats <= self.windChill_threshold(tree, node): + words = "bitterly cold" + else: + words = "very cold" + elif chillStats is None: + words = "" + elif chillStats <= self.windChill_threshold(tree, node): + words = "bitterly cold" + + if words == "": + return self.setWords(node, words) + + # Clear the words in reportTrends to + # prevent extra temperature phrases + component = node.getComponent() + progeny = component.getProgeny() + for child in progeny: + phraseName = child.get("name") + if phraseName in tempPhrases: + child.set("words", "") + return self.setWords(node, words) + + ## Submitted by Brian Walawender + ## Reviewed by Tracy Hansen + def steady_temp_threshold(self, tree, node): + # Diurnal ranges less than this value will + # be reported as steady temperatures + return 4 + + def steady_temp_trends(self): + return { + "setUpMethod": self.steady_temp_trends_setUp, + "wordMethod": self.steady_temp_trends_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def steady_temp_trends_setUp(self, tree, node): + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + def steady_temp_trends_words(self, tree, node): + "Look for small diurnal changes" + # Check Diurnal range in T. If range is + # less than steady_temp_threshold report + # a temperatures steady phrase + # i.e. "temperature steady in the mid 20s" + tempPhrases = ["highs_phrase", "lows_phrase", + "highs_range_phrase", "lows_range_phrase", + "temp_trends", + "extended_lows_phrase", "extended_highs_phrase" + ] + words = self.findWords(tree, node, None, node.getAreaLabel(), + phraseList=tempPhrases) + if words is None: + # If words have not yet been set, return + # We need to wait for all highs_phrases to complete + # before doing the steady_temp_phrase + return + + timeRange = node.getTimeRange() + tStats = tree.stats.get("T", timeRange, node.getAreaLabel(), + mergeMethod="List") + if tStats is None: + return self.setWords(node, "") + # tStats is a list of (hourlyTemp, subRange) tuples + + max = -999 + min = 999 + words = "" + sum = 0 + count = 0 + for hourlyTemps, subRange in tStats: + if hourlyTemps is None: + return self.setWords(node, "") + for t, hr in hourlyTemps: + if t is None: + return self.setWords(node, "") + if t < min: + min = t + if t > max: + max = t + sum = sum + t + count = count + 1 + + diff = max - min + + if diff >= self.steady_temp_threshold(tree,node): + return self.setWords(node, "") + + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.DAYTIME(): + avg = int((sum/count)+0.5) + else: + avg = int(sum/count) + + phrase = self.getTempPhrase(tree, node, avg, "") + words = "near steady temperature " + phrase + + # Clear the words in high and lows phrase + # prevent extra temperature phrases + component = node.getComponent() + progeny = component.getProgeny() + for child in progeny: + phraseName = child.get("name") + if phraseName in tempPhrases: + child.set("words", "") + + # Begin ER changes + # Not sure if this is used...but set anyway + child.set("emptyPhrase", 1) + + # Now erase subphrase words. This is what seems to fix + # the problem of high/low phrases still appearing with + # the steady phrase - PJ + subphrases=child.get("childList") + + if subphrases is not None: + for n in subphrases: + n.set("words", "") + + return self.setWords(node, words) + + ### SnowAmt + def pop_snow_lower_threshold(self, tree, node): + # Snow accumulation will not be reported + # if Pop is below this threshold + return 60 + + def getSnowReportEndDay(self, tree, node): + # This is the first day we do not try to report total accumulation. + return self.createTimeRange(96,108) + + def snow_phrase(self): + return { + "setUpMethod": self.snow_setUp, + "wordMethod": self.snow_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def snow_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("SnowAmt", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + component = node.getComponent() + index = component.getIndex() + + # Calculate past snow + prodTR = tree.getTimeRange() + pastSnowMin = 0 + pastSnowMax = 0 + pastSnowTimeRange = self.makeTimeRange(prodTR.startTime() - 12*3600, + prodTR.startTime()) + stats = tree.stats.get("SnowAmt", pastSnowTimeRange, + node.getAreaLabel(), mergeMethod="MinMax") + + if stats is not None: + # site is using a past SnowAmt grid + pastSnowMin, pastSnowMax = self.getValue(stats, "MinMax") + # If first period is less than 12 hours long, thus an "update" + # report as "new" snow accumulation ONLY IF + # there was some previous snow accumulation + timeRange = node.getTimeRange() + if index == 0 and timeRange.duration() < 12*3600 and \ + pastSnowMax > 0.0: + node.set("newFlag", 1) + else: + # site is NOT using a past SnowAmt grid + # If first period is less than 12 hours long, thus an "update" + # report as "new" snow accumulation + timeRange = node.getTimeRange() + if index == 0 and timeRange.duration() < 12*3600: + node.set("newFlag", 1) + + return self.DONE() + + def snow_words(self, tree, node): + # First check if the pop threshold has been met + # If not, then do not generate phrase + threshold = self.pop_snow_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, threshold) + if lowPopFlag == 1: + return self.setWords(node, "") + + # Second, wait for weather phrase to complete and make sure there + # is mention of accumulating weather + wxWords, attrDict = self.findWords(tree, node, "Wx", node.getAreaLabel(), + phraseList=["weather_phrase", "skyPopWx_phrase"], + attributes=["reportedRankList"]) + if wxWords is None: + return + accumFlag, descriptor = self.checkAccumulatingWx(tree, node, wxWords, attrDict) + if accumFlag == 0: + return self.setWords(node, "null") + + # Third, load in the SnowAmt statistics, check for low amounts, then round to nearest inch + currentSnow = tree.stats.get("SnowAmt", node.getTimeRange(), node.getAreaLabel(), mergeMethod="MinMax") + if currentSnow is None: + return self.setWords(node, "") + min, max = self.getValue(currentSnow, "MinMax") + if min == 0 and max == 0: + node.parent.set("descriptor", "") + return self.setWords(node, "no " + descriptor) + elif min < 0.5 and max < 0.5: + node.parent.set("descriptor", "") + return self.setWords(node, "little or no " + descriptor) + min = int(min+0.5) + max = int(max+0.5) + + # Finally, generate the snow accumulation phrase + # Decide on singular or plural units + if max == 1: + units = self.units_descriptor(tree, node, "unit", "in") + else: + units = self.units_descriptor(tree, node, "units", "in") + # Create worded phrase based on type of range + if min == 0: + upTo = self.addSpace(self.phrase_descriptor(tree, node, "up to", "SnowAmt")) + snowPhrase = upTo + repr(max) + elif min == max: + around = self.addSpace(self.phrase_descriptor(tree, node, "around", "SnowAmt")) + snowPhrase = around + repr(max) + else: + snowPhrase = "of " + repr(min) + " to " + repr(max) + snowPhrase = snowPhrase + " " + units + + return self.setWords(node, snowPhrase) + + def checkAccumulatingWx(self, tree, node, wxWords, attrDict): + accumulatingWx = [ + ('S', 'Snow'), + ('SW', 'Snow'), + ('IP', 'Sleet'), + ('IC', 'IceCrystal'), + ] + desc = "" + wxTypes = [] + if "reportedRankList" in attrDict: + rankList = attrDict["reportedRankList"] + for subkey, rank in rankList: + # DR_18506 + if subkey.wxType() in ['SW'] and subkey.intensity() == "--": + pass + elif subkey.wxType() in ['IC']: + pass + else: + wxTypes.append(subkey.wxType()) + for wxType, wxVar in accumulatingWx: + if wxType in wxTypes: + desc += wxVar + if desc == "": + return 0, "" + # Determine the phrase descriptor + descriptor = self.phrase_descriptor(tree, node, desc, "SnowAmt") + if node.getAncestor('newFlag') == 1: + new = self.addSpace(self.phrase_descriptor(tree, node, "New", "SnowAmt")) + if new != "": + descriptor = new + descriptor + node.parent.set("descriptor", descriptor) + # The handle the case of embedded local effects, set the parent's parent as well + node.parent.parent.set("descriptor", descriptor) + return 1, descriptor + + ## Modifications submitted by Tom Spriggs LSX for accurately reporting total snow + + ## Since the total_snow_phrase uses the SnowAmt element exclusively for tallying + ## up storm totals, you can use a SnowAmt grid that exists in the past + ## (before the current hour) to tell us how much snow has already fallen from THIS STORM + ## (this will not include already existing snow pack from other storms) + ## and thus compensate for the diminishing/shrinking forecast totals. + ## The method simply samples and adds the already fallen snow to what is still forecast + ## to produce an accuate total snow amount for an ongoing event + ## (as well as an event still yet to happen). + + ## This "past" SnowAmt grid will end at the current time and can go as far back as + ## the user wants--but it MUST be a single grid for previously fallen snow, + ## not a bunch of fragmented grids. The actual values in the "past" SnowAmt grid + ## will then need to be filled with values based on already collected snow reports. + ## One method to create this "old" SnowAmt grid would be through use of the CONTOUR tool. + + ## Using this method, it is now possible to kick off a total snow phrase in the + ## first period when the total snow differs from the still yet to fall/forecasted + ## snow in the first period. + + ## If you leave the "past" SnowAmt grid as zero, the formatter will know to treat + ## it as a non-ongoing event. + + def total_snow_phrase(self): + return { + "setUpMethod": self.total_snow_setUp, + "wordMethod": self.total_snow_words, + "phraseMethods": self.standard_phraseMethods() + } + + def total_snow_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("SnowAmt", "MinMax"), + self.ElementInfo("IceAccum", "MinMax", primary=0)] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + descriptor = self.phrase_descriptor(tree, node, "TotalSnow", "SnowAmt") + node.set("descriptor", descriptor) + return self.DONE() + + def total_snow_words(self, tree, node): + # Return a total accumulation phrase if appropriate + # Example: + # TOTAL SNOW ACCUMULATION 7 INCHES. + + component = node.getComponent() + index = component.getIndex() + totalSnow = "" + + # See if we are prior to the snow report end day + timeRange = node.getParent().getTimeRange() + snowReportEndDay = self.getSnowReportEndDay(tree, node) + shiftedTimeRange = self.shiftedTimeRange(timeRange) + if shiftedTimeRange.startTime() < snowReportEndDay.startTime(): + ### Round up stats--need current period snow, next period snow, and past snow + # Obtain minimum PoP needed to report accumulations + threshold = self.pop_snow_lower_threshold(tree, node) + # Get snow stats for the current period + currentSnow = tree.stats.get("SnowAmt", node.getTimeRange(), node.getAreaLabel(), mergeMethod="MinMax") + if currentSnow is None: + return self.setWords(node, "") + currentMin, currentMax = self.getValue(currentSnow, "MinMax") + currentMin = int(currentMin+0.5) + currentMax = int(currentMax+0.5) + # Check PoP threshold for the current period--zero out if below threshold PoP + popStats = self.matchToWx(tree, node, "PoP", node.getTimeRange()) + if popStats < threshold: + currentMin = 0 + currentMax = 0 + # Get snow stats for the next period--does the event come to an end? + nextComp = component.getNext() + if nextComp is None: + return self.setWords(node, "") + nextTimeRange = nextComp.getTimeRange() + nextSnow = tree.stats.get("SnowAmt", nextTimeRange, + node.getAreaLabel(), mergeMethod="Max") + if nextSnow is None: + return self.setWords(node, "") + nextSnow = int(nextSnow+0.5) + # Check PoP threshold for the next period--zero out if below threshold PoP + threshold = self.pop_snow_lower_threshold(tree, node) + popStats = self.matchToWx(tree, node, "PoP", nextTimeRange) + if popStats < threshold: + nextSnow = 0 + # Get snow stats for both already fallen snow AND preceding forecast periods + minSum, maxSum = self.sumPrevStats(tree, component, + node.getAreaLabel(), "SnowAmt", "MinMax") + + ### Generate total snow accumulation phrase if conditions met + # We produce a total accumulation phrase for the current period IF + # the next period's snow is 0--thus snow will cease by the end of the current period AND + # there is snow accumulation expected in the current period AND + # there is snow accumulation in one or more periods immediately preceding + if nextSnow == 0 and currentMax > 0 and maxSum > 0: + # Finalize total snow amount + finalMinSum = int(currentMin + minSum) + finalMaxSum = int(currentMax + maxSum) + # Decide on singular or plural units + if finalMaxSum == 1: + units = self.units_descriptor(tree, node, "unit", "in") + else: + units = self.units_descriptor(tree, node, "units", "in") + # Create worded phrase based on type of range + if finalMinSum == 0: + upTo = self.addSpace(self.phrase_descriptor(tree, node, + "up to", "SnowAmt")) + totalSnowPhrase = upTo + repr(finalMaxSum) + elif finalMinSum == finalMaxSum: + around = self.addSpace(self.phrase_descriptor(tree, node, + "around", "SnowAmt")) + totalSnowPhrase = around + repr(finalMaxSum) + else: + totalSnowPhrase = repr(finalMinSum) + " to " + repr(finalMaxSum) + totalSnow = totalSnowPhrase + " " + units + else: + return self.setWords(node, "") + + return self.setWords(node, totalSnow) + + def getSnowValue(self, tree, node, areaLabel=None): + # Return min and max snow values + threshold = self.pop_snow_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, threshold) + if lowPopFlag == 1: + return None + if areaLabel is None: + areaLabel = node.getAreaLabel() + stats = tree.stats.get("SnowAmt", node.getTimeRange(), + areaLabel, mergeMethod="MinMax") + if stats is None: + return None + min, max = self.getValue(stats, "MinMax") + min = int(min+0.5) + max = int(max+0.5) + if min < 1 and max < 1: + return None + + return min, max + + def getTotalSnow(self, tree, node, areaLabel=None, snowValue=None): + component = node.getComponent() + # Get sum of previous periods + if areaLabel is None: + areaLabel = node.getAreaLabel() + if snowValue is None: + snowValue = self.getSnowValue(tree, node, areaLabel) + if snowValue is None: + return None + minSnowValue, maxSnowValue = snowValue + minSum, maxSum = self.sumPrevStats(tree, component, areaLabel, "SnowAmt", "MinMax") + # Add this period's value to the sum + minSum = minSum + minSnowValue + maxSum = maxSum + maxSnowValue + minIncrement = self.nlValue(self.increment_nlValue( + tree, node, "SnowAmt", "SnowAmt"), minSum) + maxIncrement = self.nlValue(self.increment_nlValue( + tree, node, "SnowAmt", "SnowAmt"), maxSum) + minSum = self.round(minSum, "Nearest", minIncrement) + maxSum = self.round(maxSum, "Nearest", maxIncrement) + return minSum, maxSum + + ## TOTAL SNOW Phrase submitted by Virgil Mittendorf + def stormTotalSnow_phrase(self): + return { + "setUpMethod": self.stormTotalSnow_setUp, + "wordMethod": self.stormTotalSnow_words, + "phraseMethods": self.standard_phraseMethods(), + } + def stormTotalSnow_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("StormTotalSnow", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def stormTotalSnow_words(self, tree, node): + "Create phrase for Storm Total Snow accumulation" + + # Load in the statistics for storm total snow + elementName = "StormTotalSnow" + statDict = node.getStatDict() + stats = self.getStats(statDict, elementName) + + #print "storm total snow stats", stats + + # test...if no stats then don't create phrase (i.e. grid missing) + if stats is None: + return self.setWords(node, "") + + min, max = self.getValue(stats, "MinMax") + threshold = 1 + incMin = 1 + incMax = 1 + + if min%1 == 0: + min = int(min) + minStr = repr(min) + else: + minStr = repr(int(min+0.5)) + if max%1 == 0: + max = int(max) + maxStr = repr(max) + else: + maxStr = repr(int(max+0.5)) + + #print "min, max", min, max, node.getTimeRange(), node.getAreaLabel(), "storm total accumulation" + + if min == 0 and max == 0: + return self.setWords(node,"") + elif min < 0.5 and max < 0.5: + return self.setWords(node,"") + + outUnits = self.element_outUnits(tree, node, elementName, elementName) + unit = self.units_descriptor(tree, node,"unit", outUnits) + units = self.units_descriptor(tree, node,"units", outUnits) + + min = int(min+0.5) + max = int(max+0.5) + + # Single Value input + if min == max: + # Handle case of 1 inch + if min == 1: + units = unit + value = "around " + minStr + + # Range + else: + value = "of " + minStr + " to " + maxStr + # Handle case when lower value is 0 + if min == 0: + value = "up to " + maxStr + if max == 1: + units = unit + + snowPhrase = value + " " + units + return self.setWords(node, snowPhrase) + + # New def by Scott. According to Directive 10-503, descriptive terms + # should be used in period 4 and beyond. This function returns + # a descriptive snow phrase. + def descriptive_snow_phrase(self): + return { + "setUpMethod": self.descriptive_snow_setUp, + "wordMethod": self.descriptive_snow_words, + "phraseMethods": self.standard_phraseMethods(), + } + def descriptive_snow_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("SnowAmt", "MinMax")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + # Do not want phrase descriptor + node.set("descriptor", "") + return self.DONE() + + def descriptive_snow_words(self, tree, node): + "Create phrase for snow accumulation" + # According to Directive 10-503, descriptive terms + # should be used in period 4 and beyond. This function returns + # a descriptive snow phrase. + # + # According to Directive 10-503, snow accumulation + # should not be mentioned if PoP is under 60%. + threshold = self.pop_snow_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, threshold) + if lowPopFlag == 1: + return self.setWords(node, "") + + statDict = node.getStatDict() + stats = self.getStats(statDict, "SnowAmt") + if stats is None: + return self.setWords(node, "") + + max = int(self.getValue(stats, "Max")) + + if max < 1: + words = "" + elif max >= 1 and max <= 2: + words = "light snow accumulations" + elif max > 2 and max <= 5: + words = "moderate snow accumulations" + else: + words = "heavy snow accumulations" + return self.setWords(node, words) + + ### SnowLevel + def pop_snowLevel_upper_threshold(self, tree, node): + # Snow level will be reported if Pop is above this threshold + return 60 + + def snowLevel_maximum_phrase(self, tree, node): + # This returns the maximum snow level value to be reported and the + # the corresponding snow level phrase. It can be set up by + # edit area as follows: + # editAreaList = [ + # ("area1", 8000, "above 8000 feet"), + # ("area2", 6000, "above 6000 feet"), + # # Don't mention snow level at all in area3: + # ("area3", 0, ""), + # ] + #maxElev = 0 + #phrase = "" + #for area, elev, elevPhrase in editAreaList: + # if self.currentAreaContains(tree, [area]): + # if elev > maxElev: + # maxElev = elev + # phrase = elevPhrase + #return (maxElev, phrase) + return (8000, "above 8000 feet") + + def snowLevel_upper_topo_percentage(self, tree, node): + # If this percentage of the edit area is above the snow level, + # do not report snow level + return 80 + + def snowLevel_lower_topo_percentage(self, tree, node): + # If this percentage of the edit area is below or equal to the snow level, + # do not report snow level + return 80 + + def snowLevel_phrase(self): + return { + "setUpMethod": self.snowLevel_setUp, + "wordMethod": self.snowLevel_words, + "phraseMethods": self.standard_phraseMethods(), + } + def snowLevel_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("SnowLevel", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def snowLevel_words(self, tree, node): + "Create phrase for reporting snow level" + + # Check for low pop + threshold = self.pop_snowLevel_upper_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, threshold) + + if lowPopFlag == 1: + return self.setWords(node, "") + + statDict = node.getStatDict() + snowLevel = self.getStats(statDict, "SnowLevel") + if snowLevel is None: + return self.setWords(node, "") + snowLevel = self.getValue(snowLevel) + element = "SnowLevel" + roundingMethod = self.rounding_method(tree, node, element, element) + increment_nlValue = self.increment_nlValue(tree, node, element, element) + snowLevel = self.roundValue(snowLevel, roundingMethod, "Nearest", increment_nlValue, 0) + + # Check Wx for R or RW + stats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="List") + if stats is None: + return self.setWords(node, "") + + found = 0 + for rankList, subRange in stats: + subkeys = self.getSubkeys(rankList) + for subkey in subkeys: + if subkey.wxType() == "R" or subkey.wxType() == "RW": + found = 1 + break + + if found == 0: + return self.setWords(node, "") + + # Check for upper and lower topo percentages + percentage_above = self.calcTopoPercentage(tree, node, node.getAreaLabel(), snowLevel) + percentage_below = 100 - percentage_above + + if percentage_above > self.snowLevel_upper_topo_percentage(tree, node): + return self.setWords(node, "") + if percentage_below > self.snowLevel_lower_topo_percentage(tree, node): + return self.setWords(node, "") + + # Check for maximum snow level to be reported + max, words = self.snowLevel_maximum_phrase(tree, node) + + if snowLevel < max: + units = self.units_descriptor(tree, node, "units", "ft") + words = repr(int(snowLevel)) + " " + units + return self.setWords(node, words) + + ### IceAccum + def ice_accumulation_threshold(self, tree, node): + # If maximum IceAccum is greater than this threshold, it will be + # reported instead of SnowAmt in the snow_phrase + return .10 + + def iceAccumulation_phrase(self): + return { + "setUpMethod": self.iceAccumulation_setUp, + "wordMethod": self.iceAccumulation_words, + "phraseMethods": self.standard_phraseMethods(), + } + def iceAccumulation_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("IceAccum", "MinMax", primary=0)] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def iceAccumulation_words(self, tree, node): + "Create phrase for ice accumulation" + + threshold = self.pop_snow_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, threshold) + if lowPopFlag == 1: + return self.setWords(node, "") + + # Check for IceAccum. If it is significant, report it. + statDict = node.getStatDict() + + stats = self.getStats(statDict, "IceAccum") + reportIceAccum = 0 + if stats is not None: + threshold = self.ice_accumulation_threshold(tree, node) + min, max = self.getValue(stats, "MinMax") + if max >= threshold: + reportIceAccum = 1 + if reportIceAccum == 1: + component = node.getComponent() + index = component.getIndex() + timeRange = node.getTimeRange() + if index == 0 and timeRange.duration() < 12*3600: + descriptor = self.phrase_descriptor( + tree, node, "NewIceAccum", "IceAccum") + else: + descriptor = self.phrase_descriptor(tree, node, "IceAccum", "IceAccum") + node.parent.set("descriptor", descriptor) + elementName = "IceAccum" + else: + return self.setWords(node, "") + + if min < 0.2: + minStr = "less than one quarter" + elif min >= 0.2 and min < 0.4: + minStr = "one quarter" + elif min >= 0.4 and min < 0.7: + minStr = "one half" + elif min >= 0.7 and min < 0.9: + minStr = "three quarters" + elif min >= 0.9 and min < 1.3: + minStr = "one" + elif min >= 1.3 and min < 1.8: + minStr = "one and a half" + elif min >= 1.8: + minStr = repr(int(min+0.5)) + if max < 0.2: + maxStr = "less than one quarter" + elif max >= 0.2 and max < 0.4: + maxStr = "one quarter" + elif max >= 0.4 and max < 0.7: + maxStr = "one half" + elif max >= 0.7 and max < 0.9: + maxStr = "three quarters" + elif max >= 0.9 and max < 1.3: + maxStr = "one" + elif max >= 1.3 and max < 1.8: + maxStr = "one and a half" + elif max >= 1.8: + maxStr = repr(int(max+0.5)) + if min >= 0.9 and min < 1.3: + minStr = repr(int(min+0.5)) + + #print "min, max", min, max, node.getTimeRange(), node.getAreaLabel() + + outUnits = self.element_outUnits(tree, node, elementName, elementName) + unit = self.units_descriptor(tree, node,"unit", outUnits) + units = self.units_descriptor(tree, node,"units", outUnits) + + # Single Value input + if minStr == maxStr: + if min < 0.2: + icePhrase = "of " + minStr + " of an " + unit + elif min >= 0.2 and min < 0.9: + icePhrase = "around " + minStr + " of an " + unit + elif min >= 0.9 and min < 1.3: + icePhrase = "around " + minStr + " " + unit + elif min >= 1.3: + icePhrase = "around " + minStr + " " + units + else: + return self.setWords(node, "") + + # Range + else: + if min < 0.2: + if max < 0.9: + icePhrase = "of up to " + maxStr + " of an " + unit + elif max >= 0.9 and max < 1.3: + icePhrase = "of up to " + maxStr + " " + unit + elif max >= 1.3: + icePhrase = "of up to " + maxStr + " " + units + else: + return self.setWords(node, "") + elif min >= 0.2 and min < 0.9: + if max < 0.9: + icePhrase = "of " + minStr + " to " + maxStr + " of an " + unit + elif max >= 0.9 and max < 1.3: + icePhrase = "of " + minStr + " of an " + unit + " to " + maxStr + " " + unit + elif max >= 1.3: + icePhrase = "of " + minStr + " of an " + unit + " to " + maxStr + " " + units + else: + return self.setWords(node, "") + elif min >= 0.9: + if max >= 1.3: + icePhrase = "of " + minStr + " to " + maxStr + " " + units + else: + return self.setWords(node, "") + return self.setWords(node, icePhrase) + + ### FzLevel + ### WindChill + def windChill_threshold(self, tree, node): + # THRESHOLD FOR REPORTING WIND CHILL + return 0.0 + + def windChillTemp_difference(self, tree, node): + # Difference between wind chill and temperature + # for reporting wind chill + return 5 + + def windChill_phrase(self): + return { + "setUpMethod": self.windChill_setUp, + "wordMethod": self.windChill_words, + "phraseMethods": self.standard_phraseMethods(), + } + def windChill_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("WindChill", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def windChill_words(self, tree, node): + "Create phrase for Wind Chill" + + statDict = node.getStatDict() + stats = self.getStats(statDict, "WindChill") + if stats is None: + return self.setWords(node, "") + t = tree.stats.get("T", node.getTimeRange(), + node.getAreaLabel(), statLabel="minMax", + mergeMethod="MinMax") + if t is None: + return self.setWords(node, "") + + min, max = self.getValue(stats, "MinMax") + + timeRange = node.getTimeRange() + day = self.getPeriod(timeRange, 1) + if day == self.DAYTIME(): + # Compare to max T + t = self.getValue(t,"Max") + else: + # Compare to min T + t = self.getValue(t,"Min") + + diff = self.windChillTemp_difference(tree, node) + if min <= self.windChill_threshold(tree, node) and min <= t - diff: + words = self.getTempRangePhrase(tree, node, (min, max), "WindChill") + else: + words = "" + return self.setWords(node, words) + + # Alternate phrase based on wind speed + def windChill_wind_threshold(self, tree, node): + # Minimum wind speed (mph) required for reporting wind chill + return 10 + + def windBased_windChill_phrase(self): + return { + "setUpMethod": self.windChill_setUp, + "wordMethod": self.windBased_windChill_words, + "phraseMethods": self.standard_phraseMethods(), + } + + def windBased_windChill_words(self, tree, node) : + "Create phrase for Wind Chill" + + # Wait for wind phrase to complete + windWords = self.findWords(tree, node, "Wind", node.getAreaLabel()) + if windWords is None: + return + + statDict = node.getStatDict() + stats = self.getStats(statDict, "WindChill") + if stats is None: + return self.setWords(node, "") + + if windWords == "": + return self.setWords(node, "") + + min, max = self.getValue(stats, "MinMax") + + # Check wind speed + # First try to re-use information from wind_phrase + maxWind = node.getComponent().get("maxMag") + if maxWind is None: + # Have to access it from statistics dictionary + timeRange = node.getTimeRange() + wind = tree.stats.get("Wind", timeRange, node.getAreaLabel(), mergeMethod="Max") + if wind is None: + return self.setWords(node, "") + maxWind, dir = wind + + if maxWind < self.windChill_wind_threshold(tree, node): + return self.setWords(node, "") + + # WC must be less or equal to threshold + if min <= self.windChill_threshold(tree, node): + words = self.getTempRangePhrase(tree, node, (min, max), "WindChill") + else: + words = "" + return self.setWords(node, words) + + ### HeatIndex + def heatIndex_threshold(self, tree, node): + # THRESHOLD FOR REPORTING HEAT INDEX + return 108.0 + + def heatIndexTemp_difference(self, tree, node): + # Difference between heat index and temperature + # for reporting heat index + return 5 + + def heatIndex_phrase(self): + return { + "setUpMethod": self.heatIndex_setUp, + "wordMethod": self.heatIndex_words, + "phraseMethods": self.standard_phraseMethods(), + } + def heatIndex_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("HeatIndex", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def heatIndex_words(self, tree, node) : + "Create phrase for Heat Index" + statDict = node.getStatDict() + stats = self.getStats(statDict, "HeatIndex") + if stats is None: + return self.setWords(node, "") + t = tree.stats.get("T", node.getTimeRange(), + node.getAreaLabel(), statLabel="minMax", + mergeMethod="MinMax") + if t is None: + return self.setWords(node, "") + + min, max = self.getValue(stats, "MinMax") + + timeRange = node.getTimeRange() + day = self.getPeriod(timeRange,1) + if day == self.DAYTIME(): + # Compare to max T + t = self.getValue(t,"Max") + else: + # Compare to min T + t = self.getValue(t,"Min") + # HI must be greater or equal to threshold and at least + # two degrees higher than the maximum T. + diff = self.heatIndexTemp_difference(tree, node) + if max >= self.heatIndex_threshold(tree, node) and max >= t + diff: + words = self.getTempRangePhrase(tree, node, (min, max), "HeatIndex") + else: + words = "" + return self.setWords(node, words) + + # RH -- Contributed by ER 8/04 + def rh_threshold(self, tree, node): + # Threshold for reporting RH in extended narrative. If MinRH grid is + # lower than this threshold, an RH phrase will be formatted. + # To turn off phrase completely, set to -1. + if "_rhPhraseThreshold" in self.__dict__: + # Use Definition setting if defined + return self._rhPhraseThreshold + else: + # Default to no phrase + return -1 + + def rh_phrase(self): + return { + "setUpMethod": self.rh_setUp, + "wordMethod": self.rh_words, + "phraseMethods": self.standard_phraseMethods(), + } + def rh_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("MinRH", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def rh_words(self, tree, node): + # Creates phrase for MinRH. Phrase will be generated if the MinRH + # value is <= rh_threshold(tree, node). Also uses only MinRH + # grids during the day part of the extended period. Requires the + # sample analysis method to use [0] for the time duration. + minRH = None + words = "" + statDict = node.getStatDict() + + rhStats = tree.stats.get("MinRH", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="List") + + if rhStats is None: + return self.setWords(node, "") + for rhValues, tr in rhStats: + # Use data only from daytime timeranges + if self.getPeriod(tr, 1): + rh = self.getValue(rhValues, "Min") + if minRH == None or rh < minRH: + minRH = rh + if minRH is not None and minRH <= self.rh_threshold(tree, node): + words = "minimum RH " + repr(int(minRH)) + " percent" + return self.setWords(node, words) + + # MultipleElementTable calls + def multipleElementTable_perPeriod_phrase(self): + return { + "setUpMethod": self.multipleElementTable_perPeriod_setUp, + "wordMethod": self.multipleElementTable_perPeriod_words, + "phraseMethods": [], + } + def multipleElementTable_perPeriod_setUp(self, tree, node): + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def multipleElementTable_perPeriod_words(self, tree, node): + # Make a MultipleElementTable for this period + words = self.makeMultipleElementTable( + node.getAreaLabel(), node.getTimeRange(), tree, + byTimeRange=1) + return self.setWords(node.parent, words) + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SimpleTableUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SimpleTableUtils.py index fe2ea1eacc..c60dbe0b51 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SimpleTableUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/SimpleTableUtils.py @@ -1,196 +1,196 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# SimpleTableUtils.py -# Methods for producing simple engine-driven tables. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import TextUtils -import string, types - -class SimpleTableUtils(TextUtils.TextUtils): - def __init__(self): - TextUtils.TextUtils.__init__(self) - - def table_light_winds_phrase(self): - # Phrase to use for winds under the wind_threshold when reported in a Table - return "CALM" - - def table_wind_threshold(self): - # Phrase to use for winds under the wind_threshold when reported in a Table - return 5 - - #******************** - # Table Product ReportAs Methods for formatting weather element values for tables - - def singleValue(self, element, stats): - # Takes in one value, formats, and returns it - # If vector type, stats is mag,dir where direction can - # be mag or dir - - valStr = "" - if stats is None: - return valStr - - if element.dataType() == "Vector": - mag = self.callMethod(stats[0], element.conversion()) - mag = self.round(mag,"Nearest",element.roundVal()) - if mag <= self.table_wind_threshold(): - valStr = self.table_light_winds_phrase() - else: - magStr = self.fformat(mag, element.roundVal()) - magStr = string.rjust(magStr, element.maxWidth()) - dir = stats[1] - if type(dir) is not types.StringType: - dir = self.round(dir,"Nearest",element.roundVal()) - dirStr = self.convertDirection(dir) - else: - dirStr = dir - valStr = string.rjust(dirStr,2) + magStr - else: - val = self.callMethod(stats, element.conversion()) - value = self.round(val, "Nearest", element.roundVal()) - valueStr = self.fformat(value, element.roundVal()) - valStr = string.rjust(valueStr, element.maxWidth()) - - return valStr - - def avgValue(self, element, stats): - # Takes in values, averages, formats, and returns them - # Scalar input: (val1, val2) - # Vector input: vectorRange:(mag1, mag2, dir1, dir2) - - valStr = "" - if stats is None: - return valStr - - if element.dataType() == "Vector": - mag1 = stats[0] - mag2 = stats[1] - #print "stats", stats - dir1 = stats[2] - dir2 = stats[3] - mag, dir = self.vectorAverage((mag1, dir1),(mag2, dir2)) - mag = self.callMethod(mag, element.conversion()) - mag = self.round(mag, "Nearest", element.roundVal()) - if mag <= self.table_wind_threshold(): - valStr = self.table_light_winds_phrase() - else: - valStr = self.fformat(mag, element.roundVal()) - valStr = string.rjust(valStr, element.maxWidth()) - # put in the direction - dir = self.convertDirection(dir) - valStr = string.rjust(dir, 2) + valStr - else: - val1 = self.average(stats[0], stats[1]) - val1 = self.callMethod(val1, element.conversion()) - val = self.round(val1, "Nearest", element.roundVal()) - valStr = self.fformat(val, element.roundVal()) - valStr = string.rjust(valStr, element.maxWidth()) - - return valStr - - def range2Value(self, element, stats): - # Takes in two values and reports range - # Vector input: vectorRange: (mag1, mag2, dir1, dir2) - - valStr = "" - if stats is None: - return valStr - - val1 = self.callMethod(stats[0], element.conversion()) - val2 = self.callMethod(stats[1], element.conversion()) - val1 = self.round(val1,"Nearest", element.roundVal()) - val2 = self.round(val2,"Nearest", element.roundVal()) - val1Str = self.fformat(val1, element.roundVal()) - val2Str = self.fformat(val2, element.roundVal()) - if element.dataType() == "Scalar": - if val1 == val2: - valStr = string.rjust(val1Str, element.maxWidth()) - else: - valStr = string.rjust(val1Str, element.maxWidth()) + "-" + \ - string.rjust(val2Str, element.maxWidth()) - else: - dir1 = self.dirToText(stats[2]) - dir2 = self.dirToText(stats[3]) - dir1 = string.rjust(dir1,2) - dir2 = string.rjust(dir2,2) - if val1 == val2 and dir1 == dir2: - valStr = string.rjust(val1Str, element.maxWidth()) + dir1 - else: - valStr = dir1 + " " + string.rjust(val1Str, element.maxWidth()) + \ - " - " + \ - dir2 + " " + string.rjust(val2Str, element.maxWidth()) - - valStr = string.replace(valStr,"- ","-") - return valStr - - def range4Value(self, element, stats): - # Reports range of two averages - # Scalar input: (min1, max1, min2, max2) - # Vector input: vectorText: (mag1, mag2, mag3, mag4, dir1, dir2) - - valStr = "" - if stats is None: - return valStr - - if element.dataType() == "Vector": - mag1 = self.average(stats[0], stats[1]) - mag2 = self.average(stats[2], stats[3]) - mag1 = self.callMethod(mag1, element.conversion()) - mag1 = self.round(mag1, "Nearest", element.roundVal()) - mag2 = self.callMethod(mag2, element.conversion()) - mag2 = self.round(mag2, "Nearest", element.roundVal()) - dir1 = stats[4] - dir2 = stats[5] - val1Str = self.fformat(mag1, element.roundVal()) - val1Str = string.rjust(val1Str, element.maxWidth()) - val1Str = string.rjust(dir1,2) + val1Str - val2Str = self.fformat(mag2, element.roundVal()) - val2Str = string.rjust(val2Str, element.maxWidth()) - val2Str = string.rjust(dir2,2) + val2Str - else: - val1 = self.average(stats[0], stats[1]) - val2 = self.average(stats[2], stats[3]) - val1 = self.callMethod(val1, element.conversion()) - val2 = self.callMethod(val2, element.conversion()) - val1 = string.rjust(self.round(val1,"Nearest", element.roundVal()), - element.maxWidth()) - val2 = string.rjust(self.round(val2,"Nearest", element.roundVal()), - element.maxWidth()) - val1Str = self.fformat(val1, element.roundVal()) - val2Str = self.fformat(val2, element.roundVal()) - if val1Str == val2Str: - valStr = val1Str - else : - valStr = val1Str + " - " + val2Str - - return valStr - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# SimpleTableUtils.py +# Methods for producing simple engine-driven tables. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import TextUtils +import string, types + +class SimpleTableUtils(TextUtils.TextUtils): + def __init__(self): + TextUtils.TextUtils.__init__(self) + + def table_light_winds_phrase(self): + # Phrase to use for winds under the wind_threshold when reported in a Table + return "CALM" + + def table_wind_threshold(self): + # Phrase to use for winds under the wind_threshold when reported in a Table + return 5 + + #******************** + # Table Product ReportAs Methods for formatting weather element values for tables + + def singleValue(self, element, stats): + # Takes in one value, formats, and returns it + # If vector type, stats is mag,dir where direction can + # be mag or dir + + valStr = "" + if stats is None: + return valStr + + if element.dataType() == "Vector": + mag = self.callMethod(stats[0], element.conversion()) + mag = self.round(mag,"Nearest",element.roundVal()) + if mag <= self.table_wind_threshold(): + valStr = self.table_light_winds_phrase() + else: + magStr = self.fformat(mag, element.roundVal()) + magStr = string.rjust(magStr, element.maxWidth()) + dir = stats[1] + if type(dir) is not bytes: + dir = self.round(dir,"Nearest",element.roundVal()) + dirStr = self.convertDirection(dir) + else: + dirStr = dir + valStr = string.rjust(dirStr,2) + magStr + else: + val = self.callMethod(stats, element.conversion()) + value = self.round(val, "Nearest", element.roundVal()) + valueStr = self.fformat(value, element.roundVal()) + valStr = string.rjust(valueStr, element.maxWidth()) + + return valStr + + def avgValue(self, element, stats): + # Takes in values, averages, formats, and returns them + # Scalar input: (val1, val2) + # Vector input: vectorRange:(mag1, mag2, dir1, dir2) + + valStr = "" + if stats is None: + return valStr + + if element.dataType() == "Vector": + mag1 = stats[0] + mag2 = stats[1] + #print "stats", stats + dir1 = stats[2] + dir2 = stats[3] + mag, dir = self.vectorAverage((mag1, dir1),(mag2, dir2)) + mag = self.callMethod(mag, element.conversion()) + mag = self.round(mag, "Nearest", element.roundVal()) + if mag <= self.table_wind_threshold(): + valStr = self.table_light_winds_phrase() + else: + valStr = self.fformat(mag, element.roundVal()) + valStr = string.rjust(valStr, element.maxWidth()) + # put in the direction + dir = self.convertDirection(dir) + valStr = string.rjust(dir, 2) + valStr + else: + val1 = self.average(stats[0], stats[1]) + val1 = self.callMethod(val1, element.conversion()) + val = self.round(val1, "Nearest", element.roundVal()) + valStr = self.fformat(val, element.roundVal()) + valStr = string.rjust(valStr, element.maxWidth()) + + return valStr + + def range2Value(self, element, stats): + # Takes in two values and reports range + # Vector input: vectorRange: (mag1, mag2, dir1, dir2) + + valStr = "" + if stats is None: + return valStr + + val1 = self.callMethod(stats[0], element.conversion()) + val2 = self.callMethod(stats[1], element.conversion()) + val1 = self.round(val1,"Nearest", element.roundVal()) + val2 = self.round(val2,"Nearest", element.roundVal()) + val1Str = self.fformat(val1, element.roundVal()) + val2Str = self.fformat(val2, element.roundVal()) + if element.dataType() == "Scalar": + if val1 == val2: + valStr = string.rjust(val1Str, element.maxWidth()) + else: + valStr = string.rjust(val1Str, element.maxWidth()) + "-" + \ + string.rjust(val2Str, element.maxWidth()) + else: + dir1 = self.dirToText(stats[2]) + dir2 = self.dirToText(stats[3]) + dir1 = string.rjust(dir1,2) + dir2 = string.rjust(dir2,2) + if val1 == val2 and dir1 == dir2: + valStr = string.rjust(val1Str, element.maxWidth()) + dir1 + else: + valStr = dir1 + " " + string.rjust(val1Str, element.maxWidth()) + \ + " - " + \ + dir2 + " " + string.rjust(val2Str, element.maxWidth()) + + valStr = string.replace(valStr,"- ","-") + return valStr + + def range4Value(self, element, stats): + # Reports range of two averages + # Scalar input: (min1, max1, min2, max2) + # Vector input: vectorText: (mag1, mag2, mag3, mag4, dir1, dir2) + + valStr = "" + if stats is None: + return valStr + + if element.dataType() == "Vector": + mag1 = self.average(stats[0], stats[1]) + mag2 = self.average(stats[2], stats[3]) + mag1 = self.callMethod(mag1, element.conversion()) + mag1 = self.round(mag1, "Nearest", element.roundVal()) + mag2 = self.callMethod(mag2, element.conversion()) + mag2 = self.round(mag2, "Nearest", element.roundVal()) + dir1 = stats[4] + dir2 = stats[5] + val1Str = self.fformat(mag1, element.roundVal()) + val1Str = string.rjust(val1Str, element.maxWidth()) + val1Str = string.rjust(dir1,2) + val1Str + val2Str = self.fformat(mag2, element.roundVal()) + val2Str = string.rjust(val2Str, element.maxWidth()) + val2Str = string.rjust(dir2,2) + val2Str + else: + val1 = self.average(stats[0], stats[1]) + val2 = self.average(stats[2], stats[3]) + val1 = self.callMethod(val1, element.conversion()) + val2 = self.callMethod(val2, element.conversion()) + val1 = string.rjust(self.round(val1,"Nearest", element.roundVal()), + element.maxWidth()) + val2 = string.rjust(self.round(val2,"Nearest", element.roundVal()), + element.maxWidth()) + val1Str = self.fformat(val1, element.roundVal()) + val2Str = self.fformat(val2, element.roundVal()) + if val1Str == val2Str: + valStr = val1Str + else : + valStr = val1Str + " - " + val2Str + + return valStr + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/StringUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/StringUtils.py index c689ed2822..5888dc0557 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/StringUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/StringUtils.py @@ -1,457 +1,457 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# StringUtils.py -# Methods for manipulating strings such as sentences, phrases, indentations. -# -# Author: hansen -# ---------------------------------------------------------------------------- -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/22/2015 4027 randerso Changed lowerCase default to True -# 07/15/2016 5749 randerso Added punctuateList method -# 10/27/2016 5749 randerso Changed combinePhrases to use commas -# 11/28/2016 5749 randerso Changed addTextList to use commas -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import string, re - -class StringUtils: - def __init__(self): - pass - - def sentence(self, str, addPeriod=1): - "Make a sentence out of the string, s" - - # if string is entirely whitespace return empty string - if re.match(r'^\s*$', str) is not None: - return "" - - # Remove leading and trailing spaces - if addPeriod: - period = ". " - else: - period = "" - if len(str) == 1: - return string.capitalize(str[0]) + period - return string.capitalize(str[0]) + str[1:len(str)] + period - - def endline(self, phrase, linelength=66, breakStr=[" ", "..."]): - "Insert endlines into phrase" - - # Break into sub-phrases separated by \n - subPhrases = string.split(phrase,"\n") - - # Break each sub-phrase into lines - str = "" - for subPhrase in subPhrases: - if subPhrase == "": - str = str + "\n" - else: - str = str + self.linebreak(subPhrase, linelength, breakStr) - return str - -## Contribution from Jay Smith TK 3268. Old version follows. -## I discovered what I consider a bug in the linebreak method of the -## StringUtils module. Let's say you pass in breakStr=['...', ' ']. The -## method will never break on ' ' if '...' is present in the string being -## tested. I believe the linebreak method should check each element of -## breakStr, determine which element appears farthest to the right in the -## string, and use that element as the break. -## I've attached a modified linebreak method which does what I've outlined. -## Additionally, my linebreak method will not let a line end with a number. -## This is to prevent a number and its units from appearing on different -## lines. - -## Restructured by Hansen TK to add forceBreakStr capability when no breakStr -## is found in a given linelength of characters. - def linebreak(self, phrase, linelength, breakStr=[' ', '...'], - forceBreakStr=[" ","/"]): - # Break phrase into lines of the given linelength - # Prevents a line break on a number. - # If no breakStr is found for a given linelength of characters, - # force a break on the rightmost forceBreakStr. - text = '' - start = 0 - end = start + linelength - subPhrase = phrase[start:end] - while len(subPhrase) == linelength: - maxIndex, breakChars = self.findRightMost(subPhrase, breakStr) - if maxIndex == -1: - # Didn't find any breakStr; line is too long. - # Find the rightmost force break string, if possible. - forceIndex, breakChars = self.findRightMost(subPhrase, forceBreakStr) - if forceIndex == 0: - # space in first position: will be skipped. - pass - elif forceIndex > 0: - subPhrase = subPhrase[0:forceIndex] - text = '%s%s\n' % (text, subPhrase) - start += forceIndex - else: - # no forcebreak spot, either. - # break at linelength. - text = '%s%s\n' % (text, subPhrase) - start += linelength - elif maxIndex == 0: - pass # space in first position: will be skipped - else: - text = '%s%s\n' % (text, subPhrase[:maxIndex]) - start += maxIndex - if breakChars == " ": - # Skip the space - start +=1 - end = start + linelength - subPhrase = phrase[start:end] - if subPhrase: - return '%s%s\n' % (text, subPhrase) - else: - # It's possible for subPhrase to be [] coming out of the while - # loop. In that case, we just need to return text. - return text - - def findRightMost(self, text, breakStr=[" "], nonNumeric=1): - # Return the index of the right most break string characters - # and the break characters that were found. - # If nonNumeric, then make sure the index does not refer to - # a numeric character. - # If the break characters are a space, the index indicate - # the character prior to the space. - maxIndex = -1 - maxChars = '' - for breakChars in breakStr: - index = text.rfind(breakChars) - done = False - while index > 0 and not done: - # Check for a numeric at end of line - if nonNumeric and breakChars == " " and text[index-1].isdigit(): - # Try to find the next right most break char - index = text.rfind(breakChars, 0, index-1) - continue - done = True - if index > maxIndex: - maxIndex = index - maxChars = breakChars - if maxIndex == -1: - return maxIndex, maxChars - if maxChars == ' ': - index = maxIndex - else: - # We want to keep the breakChars, which are assumed not to end - # with a number - index = maxIndex + len(maxChars) - return index, maxChars - - -## def linebreak(self, phrase, linelength, breakStr=[" ", "..."]): -## # Break phrase into lines the given linelength -## start = 0 -## str = "" -## further = 0 -## while start < len(phrase): -## end = start + linelength + further -## if end >= len(phrase): -## str = str + phrase[start:len(phrase)] + "\n" -## break -## breakFound = 0 -## #search for break characters in string -## for breakChars in breakStr: -## ind = string.rfind(phrase, breakChars, start, end) -## if ind >= 0: -## breakFound = 1 -## break -## #if not found, then we need to search further, this makes the -## #line too long, but it is better than simply splitting a word -## #in the middle of it. -## if breakFound == 0: -## further = further + 1 -## continue - -## if breakChars != " ": -## # We want to preserve the break characters, not drop them -## includeInd = ind + len(breakChars) -## else: -## includeInd = ind - -## str = str + phrase[start:includeInd] + "\n" -## start = ind + len(breakChars) -## further = 0 -## return str - - - def combineSentences(self, textStr): - # Given a string of sentences, combine consecutive single word - # sentences, e.g. Warm. Dry. --> Warm...Dry. - if textStr == '': - return '' - newTextStr = '' - # Split the string into sentences - sentences = textStr.split('.') - singleWords = [] - for sent in sentences[:-1]: - # See if sentence consists of a single word - words = sent.split() - # If single word, append it to list - if len(words) == 1: - singleWords.append(words[0].lower()) - # Otherwise, make sentence of any previous single words - # and add untouched sentence to new string - else: - if len(singleWords) > 0: - if newTextStr != '': - newTextStr = newTextStr + ' ' - newTextStr = newTextStr + self.combinePhrases(singleWords) - singleWords = [] - newTextStr = newTextStr + sent + '.' - # Clear out remaining single words - if newTextStr[:-1] != ' ': - newTextStr = newTextStr + ' ' - if len(singleWords) > 0: - newTextStr = newTextStr + self.combinePhrases(singleWords) - newTextStr = newTextStr.replace('... ', '...') - return newTextStr - - def combinePhrases(self, phrases, separator=", ", conjunction=", "): - # Combine the list of phrases using separator and conjunction - newPhrase = "" - index = 0 - length = len(phrases) - for phrase in phrases: - newPhrase = newPhrase + phrase - - # if last one, do not add conjunction - if index == length - 1: break - - # if second to last one use conjunction - if index == length - 2: - newPhrase = newPhrase + conjunction - # otherwise - else: - newPhrase = newPhrase + separator - index = index + 1 - - newPhrase = self.sentence(newPhrase) - newPhrase = string.strip(newPhrase) - return newPhrase - - def labelIndent(self, phrase, descriptor): - indentString = "" - for i in range(len(descriptor)): - i = i # for pychecker - indentString = string.join([indentString," "], "") - result = self.indentText(descriptor + phrase, "", indentString) - #print descriptor, indentString, len(descriptor), len(indentString) - #print result, "\n" - return result - - def indentText(self, text, indentFirstString = '', indentNextString = '', - maxWidth=69, breakStrings=[" "]): - # indentText returns a formatted string which is at most maxWidth - # columns in width, with the first line indented by "indentFirstString" - # and subsequent lines indented by indentNextString. Any leading spaces - # in the first line are preserved. - - - out = '' # total output - line = '' # each line - - #print "text before", text - # eliminate all new lines and create a list of words - words = string.split(text, '\n') - words = self.splitIntoWords(words, breakStrings) - #print "split words", words - - # eliminate all new lines and create a list of words - #words = string.split(text, '\n') - #textData = string.join(words) - #words = string.split(textData) - if len(words) == 0: - return "" - #print "words", words - - # find out how many spaces the 1st line has been indented based on - # the input text. - additionalIndent = string.find(text, words[0]) - firstLineAdditionalIndent = '' - for i in xrange(additionalIndent): - i = i # for pychecker - firstLineAdditionalIndent = firstLineAdditionalIndent + ' ' - - # now start assembling the output - line = line + indentFirstString + firstLineAdditionalIndent - additional = indentFirstString + firstLineAdditionalIndent - for w in words: - if len(line) + len(w) + 1 > maxWidth: - out = out + line + "\n" - line = indentNextString + w - else: - if len(out) == 0 and len(line) == len(additional): - line = line + w #first line, don't add a space - else: - #line = line + ' ' + w #subsequent words, add a space - line = line + w #subsequent words, add a space - if len(line): - out = out + line - - #print "text after", out + "\n" - return out + "\n" - - def splitIntoWords(self, words, breakStrings=[" "]): - # Break the list of words further - # using the list of breakStrings. - for breakStr in breakStrings: - newWords = [] - # Break each word on the breakStr - for word in words: - # Split the word with breakStr - strWords = string.split(word, breakStr) - if len(strWords) > 1: - newStrWords = [] - # Add the breakStr back in except for last one - index = 0 - length = len(strWords)-1 - for strWord in strWords: - if strWord == "": - continue - if index < length: - strWord += breakStr - newStrWords.append(strWord) - index += 1 - strWords = newStrWords - # Add these words to the new words list - newWords = newWords + strWords - words = newWords - return words - - def removeLast(self, str, removeStr): - # If the str ends in removeStr, remove it - # For example, - # str = rain and - # removeStr = and - # return rain - str = str.rstrip() - removeStr = removeStr.strip() - words = string.splitfields(str, " ") - length = len(words) - if words[length-1] == removeStr: - return string.joinfields(words[0:length-1], " ") - else: - return str - - def addTextList(self, words, wordList, preposition=" with ", conjunction=", and "): - # Add a list of text phrases to the given words using the - # given preposition and conjunction. - # For example: - # words = "Some thunderstorms may be severe" - # wordList = ["damaging winds", "hail"] - # Some thunderstorms may be severe with damaging winds and hail. - length = len(wordList) - for index, wordStr in enumerate(wordList): - if index == 0: - words = words + preposition + wordStr - else: - # if last one, use "and" instead of "," - if index == length - 1: - words = words + conjunction + wordStr - # otherwise - else: - words = words + ", " + wordStr - return words - - - def addSpace(self, str, place="trailing"): - # Add a trailing space to str - # IF it is non-empty and the last character is not a trailing space - if str is None: - return "" - if str != "" and str[-1:] != " ": - if place == "trailing": - str = str + " " - elif place == "leading": - str = " " + str - return str - - def removeSuffixes(self, names, suffix): - newNames = [] - sIndex = -len(suffix) - #print "\nRemoving", suffix, names - for name in names: - if name[sIndex:] == suffix: - newNames.append(name.rstrip(suffix)) - else: - newNames.append(name) - #print "Returning", newNames - return newNames - - def convertToUpper(self, text): - try: - lowerCase = self._lowerCase - except: - lowerCase = True - if not lowerCase: - text = text.upper() # convert to upper case - return text - - def convertToLower(self, text, allLower=0): - if allLower: - return text.lower() - try: - lowerCase = self._lowerCase - except: - lowerCase = True - if lowerCase: - words = text.split() - new = [] - for word in words: - new.append(word.capitalize()) - return string.join(new) - else: - return text - - def replaceLast(self, str, str1, str2): - """ Replace the last occurrence of str1 in str with str2 - """ - - return str2.join(str.rsplit(str1,1)) - - def punctuateList(self, items): - """ Joins a list of strings into a comma separated list using - the Oxford comma if more than 3 items in the list - """ - - s = ", ".join(items) - - if len(items) > 2: - s = self.replaceLast(s, ", ", ", and ") - elif len(items) == 2: - s = self.replaceLast(s, ", ", " and ") - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# StringUtils.py +# Methods for manipulating strings such as sentences, phrases, indentations. +# +# Author: hansen +# ---------------------------------------------------------------------------- +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/22/2015 4027 randerso Changed lowerCase default to True +# 07/15/2016 5749 randerso Added punctuateList method +# 10/27/2016 5749 randerso Changed combinePhrases to use commas +# 11/28/2016 5749 randerso Changed addTextList to use commas +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import string, re + +class StringUtils: + def __init__(self): + pass + + def sentence(self, str, addPeriod=1): + "Make a sentence out of the string, s" + + # if string is entirely whitespace return empty string + if re.match(r'^\s*$', str) is not None: + return "" + + # Remove leading and trailing spaces + if addPeriod: + period = ". " + else: + period = "" + if len(str) == 1: + return string.capitalize(str[0]) + period + return string.capitalize(str[0]) + str[1:len(str)] + period + + def endline(self, phrase, linelength=66, breakStr=[" ", "..."]): + "Insert endlines into phrase" + + # Break into sub-phrases separated by \n + subPhrases = string.split(phrase,"\n") + + # Break each sub-phrase into lines + str = "" + for subPhrase in subPhrases: + if subPhrase == "": + str = str + "\n" + else: + str = str + self.linebreak(subPhrase, linelength, breakStr) + return str + +## Contribution from Jay Smith TK 3268. Old version follows. +## I discovered what I consider a bug in the linebreak method of the +## StringUtils module. Let's say you pass in breakStr=['...', ' ']. The +## method will never break on ' ' if '...' is present in the string being +## tested. I believe the linebreak method should check each element of +## breakStr, determine which element appears farthest to the right in the +## string, and use that element as the break. +## I've attached a modified linebreak method which does what I've outlined. +## Additionally, my linebreak method will not let a line end with a number. +## This is to prevent a number and its units from appearing on different +## lines. + +## Restructured by Hansen TK to add forceBreakStr capability when no breakStr +## is found in a given linelength of characters. + def linebreak(self, phrase, linelength, breakStr=[' ', '...'], + forceBreakStr=[" ","/"]): + # Break phrase into lines of the given linelength + # Prevents a line break on a number. + # If no breakStr is found for a given linelength of characters, + # force a break on the rightmost forceBreakStr. + text = '' + start = 0 + end = start + linelength + subPhrase = phrase[start:end] + while len(subPhrase) == linelength: + maxIndex, breakChars = self.findRightMost(subPhrase, breakStr) + if maxIndex == -1: + # Didn't find any breakStr; line is too long. + # Find the rightmost force break string, if possible. + forceIndex, breakChars = self.findRightMost(subPhrase, forceBreakStr) + if forceIndex == 0: + # space in first position: will be skipped. + pass + elif forceIndex > 0: + subPhrase = subPhrase[0:forceIndex] + text = '%s%s\n' % (text, subPhrase) + start += forceIndex + else: + # no forcebreak spot, either. + # break at linelength. + text = '%s%s\n' % (text, subPhrase) + start += linelength + elif maxIndex == 0: + pass # space in first position: will be skipped + else: + text = '%s%s\n' % (text, subPhrase[:maxIndex]) + start += maxIndex + if breakChars == " ": + # Skip the space + start +=1 + end = start + linelength + subPhrase = phrase[start:end] + if subPhrase: + return '%s%s\n' % (text, subPhrase) + else: + # It's possible for subPhrase to be [] coming out of the while + # loop. In that case, we just need to return text. + return text + + def findRightMost(self, text, breakStr=[" "], nonNumeric=1): + # Return the index of the right most break string characters + # and the break characters that were found. + # If nonNumeric, then make sure the index does not refer to + # a numeric character. + # If the break characters are a space, the index indicate + # the character prior to the space. + maxIndex = -1 + maxChars = '' + for breakChars in breakStr: + index = text.rfind(breakChars) + done = False + while index > 0 and not done: + # Check for a numeric at end of line + if nonNumeric and breakChars == " " and text[index-1].isdigit(): + # Try to find the next right most break char + index = text.rfind(breakChars, 0, index-1) + continue + done = True + if index > maxIndex: + maxIndex = index + maxChars = breakChars + if maxIndex == -1: + return maxIndex, maxChars + if maxChars == ' ': + index = maxIndex + else: + # We want to keep the breakChars, which are assumed not to end + # with a number + index = maxIndex + len(maxChars) + return index, maxChars + + +## def linebreak(self, phrase, linelength, breakStr=[" ", "..."]): +## # Break phrase into lines the given linelength +## start = 0 +## str = "" +## further = 0 +## while start < len(phrase): +## end = start + linelength + further +## if end >= len(phrase): +## str = str + phrase[start:len(phrase)] + "\n" +## break +## breakFound = 0 +## #search for break characters in string +## for breakChars in breakStr: +## ind = string.rfind(phrase, breakChars, start, end) +## if ind >= 0: +## breakFound = 1 +## break +## #if not found, then we need to search further, this makes the +## #line too long, but it is better than simply splitting a word +## #in the middle of it. +## if breakFound == 0: +## further = further + 1 +## continue + +## if breakChars != " ": +## # We want to preserve the break characters, not drop them +## includeInd = ind + len(breakChars) +## else: +## includeInd = ind + +## str = str + phrase[start:includeInd] + "\n" +## start = ind + len(breakChars) +## further = 0 +## return str + + + def combineSentences(self, textStr): + # Given a string of sentences, combine consecutive single word + # sentences, e.g. Warm. Dry. --> Warm...Dry. + if textStr == '': + return '' + newTextStr = '' + # Split the string into sentences + sentences = textStr.split('.') + singleWords = [] + for sent in sentences[:-1]: + # See if sentence consists of a single word + words = sent.split() + # If single word, append it to list + if len(words) == 1: + singleWords.append(words[0].lower()) + # Otherwise, make sentence of any previous single words + # and add untouched sentence to new string + else: + if len(singleWords) > 0: + if newTextStr != '': + newTextStr = newTextStr + ' ' + newTextStr = newTextStr + self.combinePhrases(singleWords) + singleWords = [] + newTextStr = newTextStr + sent + '.' + # Clear out remaining single words + if newTextStr[:-1] != ' ': + newTextStr = newTextStr + ' ' + if len(singleWords) > 0: + newTextStr = newTextStr + self.combinePhrases(singleWords) + newTextStr = newTextStr.replace('... ', '...') + return newTextStr + + def combinePhrases(self, phrases, separator=", ", conjunction=", "): + # Combine the list of phrases using separator and conjunction + newPhrase = "" + index = 0 + length = len(phrases) + for phrase in phrases: + newPhrase = newPhrase + phrase + + # if last one, do not add conjunction + if index == length - 1: break + + # if second to last one use conjunction + if index == length - 2: + newPhrase = newPhrase + conjunction + # otherwise + else: + newPhrase = newPhrase + separator + index = index + 1 + + newPhrase = self.sentence(newPhrase) + newPhrase = string.strip(newPhrase) + return newPhrase + + def labelIndent(self, phrase, descriptor): + indentString = "" + for i in range(len(descriptor)): + i = i # for pychecker + indentString = string.join([indentString," "], "") + result = self.indentText(descriptor + phrase, "", indentString) + #print descriptor, indentString, len(descriptor), len(indentString) + #print result, "\n" + return result + + def indentText(self, text, indentFirstString = '', indentNextString = '', + maxWidth=69, breakStrings=[" "]): + # indentText returns a formatted string which is at most maxWidth + # columns in width, with the first line indented by "indentFirstString" + # and subsequent lines indented by indentNextString. Any leading spaces + # in the first line are preserved. + + + out = '' # total output + line = '' # each line + + #print "text before", text + # eliminate all new lines and create a list of words + words = string.split(text, '\n') + words = self.splitIntoWords(words, breakStrings) + #print "split words", words + + # eliminate all new lines and create a list of words + #words = string.split(text, '\n') + #textData = string.join(words) + #words = string.split(textData) + if len(words) == 0: + return "" + #print "words", words + + # find out how many spaces the 1st line has been indented based on + # the input text. + additionalIndent = string.find(text, words[0]) + firstLineAdditionalIndent = '' + for i in range(additionalIndent): + i = i # for pychecker + firstLineAdditionalIndent = firstLineAdditionalIndent + ' ' + + # now start assembling the output + line = line + indentFirstString + firstLineAdditionalIndent + additional = indentFirstString + firstLineAdditionalIndent + for w in words: + if len(line) + len(w) + 1 > maxWidth: + out = out + line + "\n" + line = indentNextString + w + else: + if len(out) == 0 and len(line) == len(additional): + line = line + w #first line, don't add a space + else: + #line = line + ' ' + w #subsequent words, add a space + line = line + w #subsequent words, add a space + if len(line): + out = out + line + + #print "text after", out + "\n" + return out + "\n" + + def splitIntoWords(self, words, breakStrings=[" "]): + # Break the list of words further + # using the list of breakStrings. + for breakStr in breakStrings: + newWords = [] + # Break each word on the breakStr + for word in words: + # Split the word with breakStr + strWords = string.split(word, breakStr) + if len(strWords) > 1: + newStrWords = [] + # Add the breakStr back in except for last one + index = 0 + length = len(strWords)-1 + for strWord in strWords: + if strWord == "": + continue + if index < length: + strWord += breakStr + newStrWords.append(strWord) + index += 1 + strWords = newStrWords + # Add these words to the new words list + newWords = newWords + strWords + words = newWords + return words + + def removeLast(self, str, removeStr): + # If the str ends in removeStr, remove it + # For example, + # str = rain and + # removeStr = and + # return rain + str = str.rstrip() + removeStr = removeStr.strip() + words = string.splitfields(str, " ") + length = len(words) + if words[length-1] == removeStr: + return string.joinfields(words[0:length-1], " ") + else: + return str + + def addTextList(self, words, wordList, preposition=" with ", conjunction=", and "): + # Add a list of text phrases to the given words using the + # given preposition and conjunction. + # For example: + # words = "Some thunderstorms may be severe" + # wordList = ["damaging winds", "hail"] + # Some thunderstorms may be severe with damaging winds and hail. + length = len(wordList) + for index, wordStr in enumerate(wordList): + if index == 0: + words = words + preposition + wordStr + else: + # if last one, use "and" instead of "," + if index == length - 1: + words = words + conjunction + wordStr + # otherwise + else: + words = words + ", " + wordStr + return words + + + def addSpace(self, str, place="trailing"): + # Add a trailing space to str + # IF it is non-empty and the last character is not a trailing space + if str is None: + return "" + if str != "" and str[-1:] != " ": + if place == "trailing": + str = str + " " + elif place == "leading": + str = " " + str + return str + + def removeSuffixes(self, names, suffix): + newNames = [] + sIndex = -len(suffix) + #print "\nRemoving", suffix, names + for name in names: + if name[sIndex:] == suffix: + newNames.append(name.rstrip(suffix)) + else: + newNames.append(name) + #print "Returning", newNames + return newNames + + def convertToUpper(self, text): + try: + lowerCase = self._lowerCase + except: + lowerCase = True + if not lowerCase: + text = text.upper() # convert to upper case + return text + + def convertToLower(self, text, allLower=0): + if allLower: + return text.lower() + try: + lowerCase = self._lowerCase + except: + lowerCase = True + if lowerCase: + words = text.split() + new = [] + for word in words: + new.append(word.capitalize()) + return string.join(new) + else: + return text + + def replaceLast(self, str, str1, str2): + """ Replace the last occurrence of str1 in str with str2 + """ + + return str2.join(str.rsplit(str1,1)) + + def punctuateList(self, items): + """ Joins a list of strings into a comma separated list using + the Oxford comma if more than 3 items in the list + """ + + s = ", ".join(items) + + if len(items) > 2: + s = self.replaceLast(s, ", ", ", and ") + elif len(items) == 2: + s = self.replaceLast(s, ", ", " and ") + return s \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TableBuilder.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TableBuilder.py index 6f1ddf6633..abd85f3c5c 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TableBuilder.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TableBuilder.py @@ -1,925 +1,925 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TableBuilder.py -# Methods for Smart Table products. -# -# Author: hansen -# ---------------------------------------------------------------------------- -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/22/2015 4027 randerso Removed upper casing of weather and discrete phrases -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import TextUtils -from WxMethods import * -import types -import TimeRange, AbsTime -from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey - -class TableBuilder(TextUtils.TextUtils): - def __init__(self): - pass - - def columnLabels(self, statDict, argDict, weList): - # Return the time labels and the column length for - # the elements in weList - - # Could be made more general, but currently gets - # grid time labels for each we listed - hours = [] - maxLen = 0 - for we in weList: - stats = statDict[we.name] - #print stats - for value, start in stats: - hourStr = `start.hour`+"Z/"+`start.day` - hours.append(hourStr) - if len(hourStr) > maxLen: - maxLen = len(hourStr) - - colLen = maxLen + 4 - colLabels = "" - for hour in hours: - colLabels = colLabels + string.center(hour,colLen) - colLabels = colLabels + "\n" - return colLabels,colLen - - def columnValues(self, statDict, argDict, weList, label): - # Return the column values as text strings - values = [] - for we in weList: - stats = statDict[we.name()] - for value, hour in stats: - values.append(fformat(value, we.roundVal())) - return label, values - - def makeRow(self, rowLabel, colWidth, timeRangeList, - statList, method, argList=None, rowLabelWidth=None, - firstValWidth=None, justify = "r"): - # Produce a row beginning with the label followed by a value - # for each period. Note that colWidth can be a number, for a - # fixed column width, or a list, for a variable column width. If - # a list, then it is a parallel list with the timeRangeList. - # If provided, firstValWidth overrides the colWidth (list or value) - # Justify is "r" for right justify values, "l" for left justify values - # Justify may also be a list, which then is specified for each - # column entry. - # Each value is obtained via the given method which is given arguments: - # (statDict, timeRange, argList) - # and must return a text string value (i.e. numerical values - # must be converted to text strings before being returned). - if type(colWidth) is types.ListType: - fixedColWidth = colWidth[0] - else: - fixedColWidth = colWidth - if rowLabelWidth is None: - rowLabelWidth = fixedColWidth - if firstValWidth is None: - firstValWidth = fixedColWidth - values = [] - index = 0 - for timeRange, label in timeRangeList: - statDict = statList[index] - if statDict is None or method is None: - value = "" - else: - value = method(statDict, timeRange, argList) - values.append(value) - index = index + 1 - row = string.ljust(rowLabel, rowLabelWidth) - for x in xrange(len(values)): - if x == 0: - width = firstValWidth - elif type(colWidth) is types.ListType: - width = colWidth[x] - else: - width = fixedColWidth - if type(justify) is types.ListType: - curJustify = justify[x] - else: - curJustify = justify - if curJustify == 'r': - row = row + string.rjust(string.strip(values[x]), width) - else: - row = row + string.ljust(string.strip(values[x]), width) - width = fixedColWidth - return row + "\n" - - def addColValue(self, fcst, str, width): - return fcst + string.rjust(string.strip(str), width) - - def addRowLabel(self, fcst, str, width): - return fcst + string.ljust(str, width) - - def maxVal(self, stats, timeRange, argList): - # Return a scalar text string value representing the max value - # The desired element name must be the first element of argList - element = argList[0] - value = self.getStats(stats, element) - if value is None: - return "" - min, max = value - return self.getScalarVal(max) - - def minVal(self, stats, timeRange, argList): - # Return a scalar text string value representing the min value - # The desired element name must be the first element of argList - element = argList[0] - value = self.getStats(stats, element) - if value is None: - return "" - min, max = value - return self.getScalarVal(min) - - def scalarVal(self, stats, timeRange, argList): - # Return a scalar text string value - # The desired element name must be the first element of argList - element = argList[0] - value = self.getStats(stats, element) - if value is None: - return "" - return self.getScalarVal(value) - - def vectorVal(self, stats, timeRange, argList): - # Return a vector text string value - # The desired element name must be the first element of argList - # E.g. SW 19 - element = argList[0] - value = self.getStats(stats, element) - if value is None: - return "" - return self.getVectorVal(value) - - def wxVal(self, stats, timeRange, argList): - # Return a weather text string value - # The desired element name must be the first element of argList - # E.g. SNOW - element = argList[0] - wxStats = self.getStats(stats, element) - if wxStats is None: - return "" - value = "" - #print "\nIn wxVal" - for wxValue, timeRange in wxStats: - #print wxValue, timeRange - val = self.short_weather_phrase( - element,wxValue) - val = string.replace(val,"|"," ") - val = string.replace(val,"THUNDER STORMS","THUNDERSTORMS") - val = string.replace(val, "THUNDERSTORMS", "TSTMS") - if self.wxOrder(val) < self.wxOrder(value) or value == "": - value = val - #print "value", value - if value == "": - value = "NONE" - - #print "Returning ", value - return value - - - def dayOrNightVal(self, statDict, timeRange, argList): - # Return a min or max value based on the timeRange - # as Day or Night - # The argList contains the weather element for the - # daytime value followed by the element for the nighttime - # value - # Try to report a trend as well - - day = self.getPeriod(timeRange,1) - dayElement = argList[0] - nightElement = argList[1] - dayMinMax = argList[2] - nightMinMax = argList[3] - trendElement = argList[4] - priorStatDict = argList[5] - statList = argList[6] - timeRangeList = argList[7] - - if day == self.DAYTIME(): - element = dayElement - minMax = dayMinMax - else: - element = nightElement - minMax = nightMinMax - curVal = self.getStats(statDict, element) - if curVal is not None: - curVal = self.getValue(curVal, minMax) - value = self.getScalarVal(curVal) - else: - return "" - - # Try to get trend - if trendElement is None: - return value - - # Get trend 1st or 2nd period - index = 0 - for i in range(len(timeRangeList)): - tr, label = timeRangeList[i] - if timeRange == tr: - index = i - break - if index >= 2: - return value - - # Try the trend element first - stats = self.getStats(statDict, trendElement) - rawDiff = None - if stats is not None: - rawDiff = int(self.getValue(stats)) - else: - # Get data from prior day - # Since we want 24 hours prior AND we are only - # looking at the first 2 periods for trends, - # we always look at the priorStatDict - val = None - val = self.getStats(priorStatDict, element) - if val is not None: - rawDiff = int(curVal) - int(self.getValue(val, minMax)) - if rawDiff is None: - return value - else: - if rawDiff > 0: - sign = "+" - else: - sign = "" - return value + " (" + sign + `rawDiff` + ")" - -#################################################################### -# Weather Codes -#################################################################### - - def getCode(self, stats, timeRange): - # Return the weather code - # Assumes analysis list: - # "analysisList": [ - # ("MinT", AnalysisMethods.avg), - # ("MaxT", AnalysisMethods.avg), - # ("PoP", AnalysisMethods.stdDevMaxAvg), - # ("Wx", AnalysisMethods.dominantWx), - # ("Sky", AnalysisMethods.avg), - # ("Wind", AnalysisMethods.vectorTextAvg) - # ], - # - - # Get the statistics - popMax = self.getStats(stats, "PoP__stdDevMaxAvg") - maxT = self.getStats(stats,"MaxT__avg") - wxKey = self.getStats(stats,"Wx__dominantWx") - sky = self.getStats(stats,"Sky__avg") - wind = self.getStats(stats,"Wind__vectorAvg") - if wind is not None: - windMag, windDir = wind - else: - windMag = None - if wxKey is not None: - wxSize = len(wxKey) - wxStr = "" - for x in range(wxSize): - wxStr += str(wxKey[x]) - if x < wxSize - 1: - wxStr += '^' - wxKey = wxStr - else: - return "?" - - # the first if code statement satisfied is used - # the order of the if statements prioritizes the code returned - # eg. if fzrain code = Y is higher priority than snowrain code=O - # then move block of code - # code = self.getCode_Y(popMax, maxT, wxKey, sky, windMag) - # if code is not None: - # return code - ##ahead of - # code = self.getCode_O(popMax, maxT, wxKey, sky, windMag) - # if code is not None: - # return code - - - code = self.getCode_P(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_T(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_O(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_R(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_S(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_W(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_J(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_L(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_X(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_Y(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_Z(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_M(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_Q(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_N(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_F(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_G(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_I(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_D(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_H(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_K(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - code = self.getCode_Sky(popMax, maxT, wxKey, sky, windMag, timeRange) - if code is not None: - return code - code = self.getCode_A(popMax, maxT, wxKey, sky, windMag) - if code is not None: - return code - return None - - - def getCode_P(self, popMax, maxT, wxKey, sky, windMag): - if windMag: - # P -- BLZZRD - # edit conditions for Blizzard below - if (WxContains(wxKey, "Wide S + 1/4SM") or \ - WxContains(wxKey, "Wide S + 0SM")) and \ - windMag > 35/1.15: # 35 = wind speed in mph - return "P" - return None - - def getCode_T(self, popMax, maxT, wxKey, sky, windMag): - #RW and T needed - #Sct,Num,Wide,Chc,Lkly,Def needed - - if popMax is not None: - # T -- TSTRMS - # ->first block (commented out) requires RW and T Chc or - # greater be in wx to satisfy. Second block requires - # only T Chc or greater to satisfy - # both blocks require pop >= 45 to satisfy -# if (WxContains(wxKey,"Lkly RW") or WxContains(wxKey,"Wide RW") or \ -# WxContains(wxKey,"Num RW") or WxContains(wxKey,"Sct RW") or \ -# WxContains(wxKey,"Ocnl RW") or \ -# WxContains(wxKey,"Chc RW") or WxContains(wxKey,"Def RW")) and \ -# (WxContains(wxKey,"Lkly T") or WxContains(wxKey,"Wide T") or \ -# WxContains(wxKey,"Num T") or WxContains(wxKey,"Sct T") or \ -# WxContains(wxKey,"Chc T") or WxContains(wxKey,"Def T")) and \ -# popMax >= 45: - if (WxContains(wxKey,"Lkly T") or WxContains(wxKey,"Wide T") or \ - WxContains(wxKey,"Num T") or WxContains(wxKey,"Sct T") or \ - WxContains(wxKey,"Chc T") or WxContains(wxKey,"Def T") or \ - WxContains(wxKey,"Ocnl T")) and \ - popMax >= 45: - - - return "T" - - return None - - def getCode_O(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # O -- RNSNOW - if WxContains(wxKey, "* R") and WxContains(wxKey, "* S") \ - and popMax >= 45: - return "O" - return None - - def getCode_R(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # R -- RAIN - if WxContains(wxKey, "* R") and popMax >= 45: - return "R" - return None - - def getCode_S(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # S -- SNOW - if WxContains(wxKey, "* S") and popMax >= 45: - return "S" - return None - - def getCode_W(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # W -- SHWRS - if WxContains(wxKey, "* RW") and popMax >= 45: - return "W" - return None - - def getCode_J(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # J -- SNOWSHWR - if WxContains(wxKey, "* SW") and popMax >= 45: - return "J" - return None - - def getCode_L(self, popMax, maxT, wxKey, sky, windMag): - # L -- DRZL - if WxContains(wxKey, "* L"): - return "L" - return None - - def getCode_X(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # X -- SLEET - if WxContains(wxKey, "* IP") and popMax >= 45: - return "X" - return None - - def getCode_Y(self, popMax, maxT, wxKey, sky, windMag): - if popMax is not None: - # Y -- FZRAIN - if WxContains(wxKey, "* ZR") and popMax >= 45: - return "Y" - return None - - def getCode_Z(self, popMax, maxT, wxKey, sky, windMag): - # Z -- FZDRZL - if WxContains(wxKey, "* ZL"): - return "Z" - return None - - def getCode_M(self, popMax, maxT, wxKey, sky, windMag): - # M -- FLURRIES - # SW-- or S-- will return flurries as defined below - # if you want S-- to be returned as light snow then use - #if WxContains(wxKey, "* SW --"): - if WxContains(wxKey, "* SW --") or WxContains(wxKey, "* S --"): - return "M" - return None - - def getCode_Q(self, popMax, maxT, wxKey, sky, windMag): - # Q -- BLGSNO - if WxContains(wxKey, "* BS"): - return "Q" - return None - - def getCode_N(self, popMax, maxT, wxKey, sky, windMag): - # edit windthresh for threshhold for getting N windy code returned - windthresh = 25.0 # wind threshold in mph - #print "wind ",windthresh,windMag,"\n" - if windMag is not None: - # N -- WINDY - # the uncommented assumes all your wind speeds in gfe are in knts - # if you have converted them to mph then use - #if windMag > windthresh : # - if windMag > windthresh/1.15: #mph from kts - return "N" - return None - - def getCode_F(self, popMax, maxT, wxKey, sky, windMag): - # F -- FOGGY - # if wx contains F+ any coverage return F - # you may want to change this to include F only if Areas of F - # are fcst any intensity - # if WxContains(wxKey, "Areas F *"): - if WxContains(wxKey, "* F +"): - return "F" - return None - - def getCode_G(self, popMax, maxT, wxKey, sky, windMag): - if maxT is not None: - # G -- VRYHOT edit your threshhold in degs F - threshhold = 105 - if maxT > threshhold: - return "G" - return None - - def getCode_I(self, popMax, maxT, wxKey, sky, windMag): - if maxT is not None: - # I -- VRYCOLD edit your threshhold in degs F - threshhold = 20 - if maxT < threshhold: - return "I" - return None - - def getCode_D(self, popMax, maxT, wxKey, sky, windMag): - # D -- DUST - if WxContains(wxKey, "* BD"): - return "D" - return None - - def getCode_H(self, popMax, maxT, wxKey, sky, windMag): - # H -- HAZE - if WxContains(wxKey, "* H"): - return "H" - return None - - def getCode_K(self, popMax, maxT, wxKey, sky, windMag): - # K -- SMOKE - if WxContains(wxKey, "* K"): - return "K" - return None - - def getCode_Sky(self, popMax, maxT, wxKey, sky, windMag, timeRange): - # C, E, B, U, V, depending upon sky and time of day - if sky is not None: - # C -- CLDY - if sky > 94: - return "C" - - # E -- MCLDY - elif sky > 69: - return "E" - - # B -- PTCLDY - elif sky > 31: - return "B" - - # U -- SUNNY - else: - localTimeRange = self.shiftedTimeRange(timeRange) - dayNight = self.getPeriod(localTimeRange) - if dayNight == self.DAYTIME(): - return "U" - else: - return "V" - return None - - def getCode_A(self, popMax, maxT, wxKey, sky, windMag): - # A -- FAIR - return "A" - - def getScalarVal(self, value): - # Return 4-digit right-justified text representation of value - - # Check for no data - if value == () or value is None: - return " " - else: - # Convert to integer string - return string.rjust(`int(value)`,4) - - def getVectorVal(self, value): - # Return text representation of vector value - # Value is a tuple of magnitude and direction - # E.g. returned value: SW 19 - - # Check for no data - if value == () or value is None: - return " " - else: - mag = value[0] - dir = value[1] - magStr = string.rjust(`int(mag)`,3) - if type(dir) is not types.StringType: - dir = self.dirToText(dir) - dirStr = string.rjust(dir,2) - return dirStr + magStr - - def getWxVal(self, value): - # Return text representation of value - return self.wx_phrase({}, {}, value) - - def wxOrder(self, value): - value = string.lower(value) - if value == "thunderstorms": - return 0 - elif value == "rain showers": - return 1 - elif value == "rain": - return 2 - elif value == "snow": - return 3 - else: - return 4 - - def short_weather_phrase(self, element, stats): - " Develop short phrase for weather in a table" - # Weather Stats: - # SubKey List : list of all subkeys mentioned in time period - - if stats is None: - return "" - subkeyList = self.makeSubkeyList(stats) - if len(subkeyList) == 0: - return "" - value = "" - #print "In short_weather_phrase" - for subKey in subkeyList: - val, cov = self.weather_value(None, None, subKey, typeOnly=1) - #print "subKey", val - if self.wxOrder(val) < self.wxOrder(value) or value == "": - value = val - #print "value", value - value = string.replace(value, " ", "|") - value = string.replace(value, "thunderstorm","thunder|storm") - #print "returning", value - return value - - def long_weather_phrase(self, element, stats): - # Stats from SampleAnalysis method: weather_percentages - words = "" - index = 0 - prevCoverage = None - conjunction = "|" - if stats is None: - return "None" - length = len(stats) - for subkey, percentage in stats: - #print "subkey, percent", subkey, percentage - if subkey is None or subkey.wxType() == "": - index += 1 - continue - - # If not last one, determine nextCoverage - if index < length-1: - nextSubkey, percentage = stats[index+1] - else: - nextSubkey = None - - value, prevCoverage = self.weather_value( - None, None, subkey, prevCoverage, nextSubkey) - percentage = int(self.round(percentage,"Nearest", 1)) - words = words + value + " (" + `percentage` + "%) " - #prevSubkey = subkey - - # if last one, do not add conjunction - if index == length - 1: break - words = words + conjunction - index = index + 1 - - if words == "": - words = "None" - return words - - def discrete_value(self, element, stats): - " Return string of hazards" - # Weather Stats: - # SubKey List : list of all subkeys mentioned in time period - - if stats is None: - return "" - subkeyList = self.makeSubkeyList(stats) - if len(subkeyList) == 0: - return "" - - from com.raytheon.uf.viz.core.localization import LocalizationManager - siteId = LocalizationManager.getInstance().getSite() - value = "" - #print "In discrete_value" - for subKey in subkeyList: - str = subKey.split(":")[0] - discreteWords = DiscreteKey.discreteDefinition(siteId).keyDesc( - "Hazards" + "_SFC", str) - value = value + discreteWords + " " - #print "returning", value - return value - - def long_discrete_phrase(self, element, stats): - # Stats from SampleAnalysis method: discrete_percentages - words = "" - index = 0 - prevCoverage = None - conjunction = "|" - if stats is None: - return "None" - length = len(stats) - from com.raytheon.uf.viz.core.localization import LocalizationManager - siteId = LocalizationManager.getInstance().getSite() - for subkey, percentage in stats: - #print "subkey, percent", subkey, percentage - if subkey is None or subkey == "": - index += 1 - continue - - percentage = int(self.round(percentage,"Nearest", 1)) - str = subkey.split(":")[0] - discreteWords = DiscreteKey.discreteDefinition(siteId).keyDesc( - "Hazards" + "_SFC", str) - words = words + discreteWords + " (" + `percentage` + "%) " - - # if last one, do not add conjunction - if index == length - 1: break - words = words + conjunction - index = index + 1 - - if words == "": - words = "None" - return words - - def cloudCover(self, element, stats): - # Return a text cloud cover given Sky average value - valStr = "" - if stats is None: - return valStr - - val = self.callMethod(stats, element.conversion()) - value = self.round(val, "Nearest", element.roundVal()) - if value < 20: - shift = self.determineShift() - period = element.getPeriod() - tr = TimeRange.TimeRange(period.startTime() + shift, period.endTime() + shift) - dayNight = self.getPeriod(tr) - if dayNight == self.NIGHTTIME(): - valStr = "Clear" - else: - valStr = "Sunny" - elif value < 55: - valStr = "Partly|Cloudy" - elif value < 85: - valStr = "Mostly|Cloudy" - else: - valStr = "Cloudy" - - return valStr - - def wxPrecipSubkey(self, subkey): - # List of wxTypes that should be counted as precipitation - # for the calculation of Wx Duration - if subkey.wxType() in ["R", "RW", "S", "SW", "ZR", "IP", "SA"]: - return 1 - else: - return 0 - - def wxCoveragePercent(self, coverage): - percents = [ - ("Def", 100), - ("Wide", 100), - ("Ocnl", 80), - ("Lkly", 75), - ("Num", 60), - ("Sct", 40), - ("Chc", 20), - ("SChc", 10), - ("Iso", 10), - ] - for cov, percent in percents: - if cov == coverage: - return percent - return 0 - - def wxDuration(self, statsByRange, timeRange): - # Used in the FWM and FWFTable for weather duration - # Weather duration is determined as follows: - # Coverages are weighted according to the - # values in wxCoveragePercent. For example, "Wide" - # gets 100% weighting while "Sct" on get 40%. - # Only precip weather types (according to the wxPrecipSubkey) - # are counted toward the duration. - # - # Create a total duration by adding a contribution from each grid: - # For each grid in the time range: - # Among the precip subkeys, find the maximum Coverage percent. - # Weight it's contibution according to the amount of time - # it overlaps the given time range and the maximum coverage percent. - # - total = 0.0 - for subkeyList, subRange in statsByRange: - subkeyList = self.makeSubkeyList(subkeyList) - maxPercent = 0 - for subkey in subkeyList: - if self.wxPrecipSubkey(subkey): - percent = self.wxCoveragePercent(subkey.coverage()) - #print subkey.coverage(), percent - if percent > maxPercent: - maxPercent = percent - subRange = timeRange.intersection(subRange) - value = maxPercent/100.0 * subRange.duration()/3600 - #print value - total = total + value - total = self.round(total, "Nearest", 1) - #print "wxDur", total, timeRange - return `int(total)` - - # Special interface to Multiple Element Table - def makeMultipleElementTable(self, areaLabel, timeRange, argDict, - byTimeRange=0, product="MultipleElementTable_Aux_Local"): - # For each area in the areaLabel group of Combinations, - # For each city - # Generate a MultipleElementTable - comboList = self.getCurrentAreaNames(argDict) - exec "import " + self._cityDictionary - exec "cityDict = " + self._cityDictionary + ".CityDictionary" - table = "" - - if type(argDict) is not types.DictType: - # "argDict" is really "tree" - argDict = argDict.get("argDict") - argDict["elementList"] = self._elementList - argDict["singleValueFormat"] = self._singleValueFormat - argDict["includeTitle"] = 1 - argDict["byTimeRange"] = byTimeRange - - for area in comboList: - try: - cities = cityDict[area] - except: - continue - for city, cityLabel in cities: - table = table + self.generateProduct( - product, argDict, city, - timeRange=timeRange, areaLabel=cityLabel) - # ensure table has valid data - if table == "": - continue - argDict["includeTitle"] = 0 - table = "\n" + table + "\n\n" - return table - - def getMultipleElementTableRanges(self, productIssuance, singleValueFormat, - timeRange=None): - if productIssuance in [ - "Morning", "Morning Update", "Afternoon Update", - "Morning with Pre-1st Period"]: - self._productIssuance = "Morning" - startHour = self.DAY() - numPeriods = 3 - else: # "Afternoon", "Afternoon with Pre-1st Period", - # "Evening Update", "Early Morning Update" - self._productIssuance = "Afternoon" - startHour = self.NIGHT() - numPeriods = 4 - labelMethod = self.getLocalWeekday - - #print "\nMET Ranges", productIssuance, timeRange - if timeRange is None: - currentLocalTime, self._shift = self.determineTimeShift() - day = currentLocalTime.day - month = currentLocalTime.month - year = currentLocalTime.year - - # Use getPeriods to set up a list of - # time periods to be sampled - # Returns a list of tuples: (timeRange, timeRangeLabel) - - # Convert to GMT time before making time range - if productIssuance == "Early Morning Update": - self._shift = self._shift + 24*3600 - labelMethod = self.getLocalWeekdayName - - startTime = AbsTime.absTimeYMD(year,month,day,startHour) - startTime = startTime - self._shift - timeRange = TimeRange.TimeRange(startTime, startTime + 12*3600) - - if singleValueFormat == 1: - numPeriods = 1 - timeRangeList = self.getPeriods(timeRange, 12, 12, numPeriods, - labelMethod=labelMethod) - - # Adjust the first time range if an update issuance - if productIssuance not in ["Morning", "Afternoon"]: - updateTime = AbsTime.absTimeYMD(year, month, day, currentLocalTime.hour) - updateTime = updateTime - self._shift - tr, label = timeRangeList[0] - updateTR = TimeRange.TimeRange(updateTime, tr.endTime()) - timeRangeList[0] = (updateTR, labelMethod(updateTR)) - - #print "Returning", timeRangeList - return timeRangeList +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TableBuilder.py +# Methods for Smart Table products. +# +# Author: hansen +# ---------------------------------------------------------------------------- +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/22/2015 4027 randerso Removed upper casing of weather and discrete phrases +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import TextUtils +from WxMethods import * +import types +import TimeRange, AbsTime +from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey + +class TableBuilder(TextUtils.TextUtils): + def __init__(self): + pass + + def columnLabels(self, statDict, argDict, weList): + # Return the time labels and the column length for + # the elements in weList + + # Could be made more general, but currently gets + # grid time labels for each we listed + hours = [] + maxLen = 0 + for we in weList: + stats = statDict[we.name] + #print stats + for value, start in stats: + hourStr = repr(start.hour)+"Z/"+repr(start.day) + hours.append(hourStr) + if len(hourStr) > maxLen: + maxLen = len(hourStr) + + colLen = maxLen + 4 + colLabels = "" + for hour in hours: + colLabels = colLabels + string.center(hour,colLen) + colLabels = colLabels + "\n" + return colLabels,colLen + + def columnValues(self, statDict, argDict, weList, label): + # Return the column values as text strings + values = [] + for we in weList: + stats = statDict[we.name()] + for value, hour in stats: + values.append(fformat(value, we.roundVal())) + return label, values + + def makeRow(self, rowLabel, colWidth, timeRangeList, + statList, method, argList=None, rowLabelWidth=None, + firstValWidth=None, justify = "r"): + # Produce a row beginning with the label followed by a value + # for each period. Note that colWidth can be a number, for a + # fixed column width, or a list, for a variable column width. If + # a list, then it is a parallel list with the timeRangeList. + # If provided, firstValWidth overrides the colWidth (list or value) + # Justify is "r" for right justify values, "l" for left justify values + # Justify may also be a list, which then is specified for each + # column entry. + # Each value is obtained via the given method which is given arguments: + # (statDict, timeRange, argList) + # and must return a text string value (i.e. numerical values + # must be converted to text strings before being returned). + if type(colWidth) is list: + fixedColWidth = colWidth[0] + else: + fixedColWidth = colWidth + if rowLabelWidth is None: + rowLabelWidth = fixedColWidth + if firstValWidth is None: + firstValWidth = fixedColWidth + values = [] + index = 0 + for timeRange, label in timeRangeList: + statDict = statList[index] + if statDict is None or method is None: + value = "" + else: + value = method(statDict, timeRange, argList) + values.append(value) + index = index + 1 + row = string.ljust(rowLabel, rowLabelWidth) + for x in range(len(values)): + if x == 0: + width = firstValWidth + elif type(colWidth) is list: + width = colWidth[x] + else: + width = fixedColWidth + if type(justify) is list: + curJustify = justify[x] + else: + curJustify = justify + if curJustify == 'r': + row = row + string.rjust(string.strip(values[x]), width) + else: + row = row + string.ljust(string.strip(values[x]), width) + width = fixedColWidth + return row + "\n" + + def addColValue(self, fcst, str, width): + return fcst + string.rjust(string.strip(str), width) + + def addRowLabel(self, fcst, str, width): + return fcst + string.ljust(str, width) + + def maxVal(self, stats, timeRange, argList): + # Return a scalar text string value representing the max value + # The desired element name must be the first element of argList + element = argList[0] + value = self.getStats(stats, element) + if value is None: + return "" + min, max = value + return self.getScalarVal(max) + + def minVal(self, stats, timeRange, argList): + # Return a scalar text string value representing the min value + # The desired element name must be the first element of argList + element = argList[0] + value = self.getStats(stats, element) + if value is None: + return "" + min, max = value + return self.getScalarVal(min) + + def scalarVal(self, stats, timeRange, argList): + # Return a scalar text string value + # The desired element name must be the first element of argList + element = argList[0] + value = self.getStats(stats, element) + if value is None: + return "" + return self.getScalarVal(value) + + def vectorVal(self, stats, timeRange, argList): + # Return a vector text string value + # The desired element name must be the first element of argList + # E.g. SW 19 + element = argList[0] + value = self.getStats(stats, element) + if value is None: + return "" + return self.getVectorVal(value) + + def wxVal(self, stats, timeRange, argList): + # Return a weather text string value + # The desired element name must be the first element of argList + # E.g. SNOW + element = argList[0] + wxStats = self.getStats(stats, element) + if wxStats is None: + return "" + value = "" + #print "\nIn wxVal" + for wxValue, timeRange in wxStats: + #print wxValue, timeRange + val = self.short_weather_phrase( + element,wxValue) + val = string.replace(val,"|"," ") + val = string.replace(val,"THUNDER STORMS","THUNDERSTORMS") + val = string.replace(val, "THUNDERSTORMS", "TSTMS") + if self.wxOrder(val) < self.wxOrder(value) or value == "": + value = val + #print "value", value + if value == "": + value = "NONE" + + #print "Returning ", value + return value + + + def dayOrNightVal(self, statDict, timeRange, argList): + # Return a min or max value based on the timeRange + # as Day or Night + # The argList contains the weather element for the + # daytime value followed by the element for the nighttime + # value + # Try to report a trend as well + + day = self.getPeriod(timeRange,1) + dayElement = argList[0] + nightElement = argList[1] + dayMinMax = argList[2] + nightMinMax = argList[3] + trendElement = argList[4] + priorStatDict = argList[5] + statList = argList[6] + timeRangeList = argList[7] + + if day == self.DAYTIME(): + element = dayElement + minMax = dayMinMax + else: + element = nightElement + minMax = nightMinMax + curVal = self.getStats(statDict, element) + if curVal is not None: + curVal = self.getValue(curVal, minMax) + value = self.getScalarVal(curVal) + else: + return "" + + # Try to get trend + if trendElement is None: + return value + + # Get trend 1st or 2nd period + index = 0 + for i in range(len(timeRangeList)): + tr, label = timeRangeList[i] + if timeRange == tr: + index = i + break + if index >= 2: + return value + + # Try the trend element first + stats = self.getStats(statDict, trendElement) + rawDiff = None + if stats is not None: + rawDiff = int(self.getValue(stats)) + else: + # Get data from prior day + # Since we want 24 hours prior AND we are only + # looking at the first 2 periods for trends, + # we always look at the priorStatDict + val = None + val = self.getStats(priorStatDict, element) + if val is not None: + rawDiff = int(curVal) - int(self.getValue(val, minMax)) + if rawDiff is None: + return value + else: + if rawDiff > 0: + sign = "+" + else: + sign = "" + return value + " (" + sign + repr(rawDiff) + ")" + +#################################################################### +# Weather Codes +#################################################################### + + def getCode(self, stats, timeRange): + # Return the weather code + # Assumes analysis list: + # "analysisList": [ + # ("MinT", AnalysisMethods.avg), + # ("MaxT", AnalysisMethods.avg), + # ("PoP", AnalysisMethods.stdDevMaxAvg), + # ("Wx", AnalysisMethods.dominantWx), + # ("Sky", AnalysisMethods.avg), + # ("Wind", AnalysisMethods.vectorTextAvg) + # ], + # + + # Get the statistics + popMax = self.getStats(stats, "PoP__stdDevMaxAvg") + maxT = self.getStats(stats,"MaxT__avg") + wxKey = self.getStats(stats,"Wx__dominantWx") + sky = self.getStats(stats,"Sky__avg") + wind = self.getStats(stats,"Wind__vectorAvg") + if wind is not None: + windMag, windDir = wind + else: + windMag = None + if wxKey is not None: + wxSize = len(wxKey) + wxStr = "" + for x in range(wxSize): + wxStr += str(wxKey[x]) + if x < wxSize - 1: + wxStr += '^' + wxKey = wxStr + else: + return "?" + + # the first if code statement satisfied is used + # the order of the if statements prioritizes the code returned + # eg. if fzrain code = Y is higher priority than snowrain code=O + # then move block of code + # code = self.getCode_Y(popMax, maxT, wxKey, sky, windMag) + # if code is not None: + # return code + ##ahead of + # code = self.getCode_O(popMax, maxT, wxKey, sky, windMag) + # if code is not None: + # return code + + + code = self.getCode_P(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_T(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_O(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_R(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_S(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_W(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_J(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_L(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_X(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_Y(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_Z(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_M(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_Q(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_N(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_F(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_G(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_I(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_D(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_H(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_K(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + code = self.getCode_Sky(popMax, maxT, wxKey, sky, windMag, timeRange) + if code is not None: + return code + code = self.getCode_A(popMax, maxT, wxKey, sky, windMag) + if code is not None: + return code + return None + + + def getCode_P(self, popMax, maxT, wxKey, sky, windMag): + if windMag: + # P -- BLZZRD + # edit conditions for Blizzard below + if (WxContains(wxKey, "Wide S + 1/4SM") or \ + WxContains(wxKey, "Wide S + 0SM")) and \ + windMag > 35/1.15: # 35 = wind speed in mph + return "P" + return None + + def getCode_T(self, popMax, maxT, wxKey, sky, windMag): + #RW and T needed + #Sct,Num,Wide,Chc,Lkly,Def needed + + if popMax is not None: + # T -- TSTRMS + # ->first block (commented out) requires RW and T Chc or + # greater be in wx to satisfy. Second block requires + # only T Chc or greater to satisfy + # both blocks require pop >= 45 to satisfy +# if (WxContains(wxKey,"Lkly RW") or WxContains(wxKey,"Wide RW") or \ +# WxContains(wxKey,"Num RW") or WxContains(wxKey,"Sct RW") or \ +# WxContains(wxKey,"Ocnl RW") or \ +# WxContains(wxKey,"Chc RW") or WxContains(wxKey,"Def RW")) and \ +# (WxContains(wxKey,"Lkly T") or WxContains(wxKey,"Wide T") or \ +# WxContains(wxKey,"Num T") or WxContains(wxKey,"Sct T") or \ +# WxContains(wxKey,"Chc T") or WxContains(wxKey,"Def T")) and \ +# popMax >= 45: + if (WxContains(wxKey,"Lkly T") or WxContains(wxKey,"Wide T") or \ + WxContains(wxKey,"Num T") or WxContains(wxKey,"Sct T") or \ + WxContains(wxKey,"Chc T") or WxContains(wxKey,"Def T") or \ + WxContains(wxKey,"Ocnl T")) and \ + popMax >= 45: + + + return "T" + + return None + + def getCode_O(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # O -- RNSNOW + if WxContains(wxKey, "* R") and WxContains(wxKey, "* S") \ + and popMax >= 45: + return "O" + return None + + def getCode_R(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # R -- RAIN + if WxContains(wxKey, "* R") and popMax >= 45: + return "R" + return None + + def getCode_S(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # S -- SNOW + if WxContains(wxKey, "* S") and popMax >= 45: + return "S" + return None + + def getCode_W(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # W -- SHWRS + if WxContains(wxKey, "* RW") and popMax >= 45: + return "W" + return None + + def getCode_J(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # J -- SNOWSHWR + if WxContains(wxKey, "* SW") and popMax >= 45: + return "J" + return None + + def getCode_L(self, popMax, maxT, wxKey, sky, windMag): + # L -- DRZL + if WxContains(wxKey, "* L"): + return "L" + return None + + def getCode_X(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # X -- SLEET + if WxContains(wxKey, "* IP") and popMax >= 45: + return "X" + return None + + def getCode_Y(self, popMax, maxT, wxKey, sky, windMag): + if popMax is not None: + # Y -- FZRAIN + if WxContains(wxKey, "* ZR") and popMax >= 45: + return "Y" + return None + + def getCode_Z(self, popMax, maxT, wxKey, sky, windMag): + # Z -- FZDRZL + if WxContains(wxKey, "* ZL"): + return "Z" + return None + + def getCode_M(self, popMax, maxT, wxKey, sky, windMag): + # M -- FLURRIES + # SW-- or S-- will return flurries as defined below + # if you want S-- to be returned as light snow then use + #if WxContains(wxKey, "* SW --"): + if WxContains(wxKey, "* SW --") or WxContains(wxKey, "* S --"): + return "M" + return None + + def getCode_Q(self, popMax, maxT, wxKey, sky, windMag): + # Q -- BLGSNO + if WxContains(wxKey, "* BS"): + return "Q" + return None + + def getCode_N(self, popMax, maxT, wxKey, sky, windMag): + # edit windthresh for threshhold for getting N windy code returned + windthresh = 25.0 # wind threshold in mph + #print "wind ",windthresh,windMag,"\n" + if windMag is not None: + # N -- WINDY + # the uncommented assumes all your wind speeds in gfe are in knts + # if you have converted them to mph then use + #if windMag > windthresh : # + if windMag > windthresh/1.15: #mph from kts + return "N" + return None + + def getCode_F(self, popMax, maxT, wxKey, sky, windMag): + # F -- FOGGY + # if wx contains F+ any coverage return F + # you may want to change this to include F only if Areas of F + # are fcst any intensity + # if WxContains(wxKey, "Areas F *"): + if WxContains(wxKey, "* F +"): + return "F" + return None + + def getCode_G(self, popMax, maxT, wxKey, sky, windMag): + if maxT is not None: + # G -- VRYHOT edit your threshhold in degs F + threshhold = 105 + if maxT > threshhold: + return "G" + return None + + def getCode_I(self, popMax, maxT, wxKey, sky, windMag): + if maxT is not None: + # I -- VRYCOLD edit your threshhold in degs F + threshhold = 20 + if maxT < threshhold: + return "I" + return None + + def getCode_D(self, popMax, maxT, wxKey, sky, windMag): + # D -- DUST + if WxContains(wxKey, "* BD"): + return "D" + return None + + def getCode_H(self, popMax, maxT, wxKey, sky, windMag): + # H -- HAZE + if WxContains(wxKey, "* H"): + return "H" + return None + + def getCode_K(self, popMax, maxT, wxKey, sky, windMag): + # K -- SMOKE + if WxContains(wxKey, "* K"): + return "K" + return None + + def getCode_Sky(self, popMax, maxT, wxKey, sky, windMag, timeRange): + # C, E, B, U, V, depending upon sky and time of day + if sky is not None: + # C -- CLDY + if sky > 94: + return "C" + + # E -- MCLDY + elif sky > 69: + return "E" + + # B -- PTCLDY + elif sky > 31: + return "B" + + # U -- SUNNY + else: + localTimeRange = self.shiftedTimeRange(timeRange) + dayNight = self.getPeriod(localTimeRange) + if dayNight == self.DAYTIME(): + return "U" + else: + return "V" + return None + + def getCode_A(self, popMax, maxT, wxKey, sky, windMag): + # A -- FAIR + return "A" + + def getScalarVal(self, value): + # Return 4-digit right-justified text representation of value + + # Check for no data + if value == () or value is None: + return " " + else: + # Convert to integer string + return string.rjust(repr(int(value)),4) + + def getVectorVal(self, value): + # Return text representation of vector value + # Value is a tuple of magnitude and direction + # E.g. returned value: SW 19 + + # Check for no data + if value == () or value is None: + return " " + else: + mag = value[0] + dir = value[1] + magStr = string.rjust(repr(int(mag)),3) + if type(dir) is not bytes: + dir = self.dirToText(dir) + dirStr = string.rjust(dir,2) + return dirStr + magStr + + def getWxVal(self, value): + # Return text representation of value + return self.wx_phrase({}, {}, value) + + def wxOrder(self, value): + value = string.lower(value) + if value == "thunderstorms": + return 0 + elif value == "rain showers": + return 1 + elif value == "rain": + return 2 + elif value == "snow": + return 3 + else: + return 4 + + def short_weather_phrase(self, element, stats): + " Develop short phrase for weather in a table" + # Weather Stats: + # SubKey List : list of all subkeys mentioned in time period + + if stats is None: + return "" + subkeyList = self.makeSubkeyList(stats) + if len(subkeyList) == 0: + return "" + value = "" + #print "In short_weather_phrase" + for subKey in subkeyList: + val, cov = self.weather_value(None, None, subKey, typeOnly=1) + #print "subKey", val + if self.wxOrder(val) < self.wxOrder(value) or value == "": + value = val + #print "value", value + value = string.replace(value, " ", "|") + value = string.replace(value, "thunderstorm","thunder|storm") + #print "returning", value + return value + + def long_weather_phrase(self, element, stats): + # Stats from SampleAnalysis method: weather_percentages + words = "" + index = 0 + prevCoverage = None + conjunction = "|" + if stats is None: + return "None" + length = len(stats) + for subkey, percentage in stats: + #print "subkey, percent", subkey, percentage + if subkey is None or subkey.wxType() == "": + index += 1 + continue + + # If not last one, determine nextCoverage + if index < length-1: + nextSubkey, percentage = stats[index+1] + else: + nextSubkey = None + + value, prevCoverage = self.weather_value( + None, None, subkey, prevCoverage, nextSubkey) + percentage = int(self.round(percentage,"Nearest", 1)) + words = words + value + " (" + repr(percentage) + "%) " + #prevSubkey = subkey + + # if last one, do not add conjunction + if index == length - 1: break + words = words + conjunction + index = index + 1 + + if words == "": + words = "None" + return words + + def discrete_value(self, element, stats): + " Return string of hazards" + # Weather Stats: + # SubKey List : list of all subkeys mentioned in time period + + if stats is None: + return "" + subkeyList = self.makeSubkeyList(stats) + if len(subkeyList) == 0: + return "" + + from com.raytheon.uf.viz.core.localization import LocalizationManager + siteId = LocalizationManager.getInstance().getSite() + value = "" + #print "In discrete_value" + for subKey in subkeyList: + str = subKey.split(":")[0] + discreteWords = DiscreteKey.discreteDefinition(siteId).keyDesc( + "Hazards" + "_SFC", str) + value = value + discreteWords + " " + #print "returning", value + return value + + def long_discrete_phrase(self, element, stats): + # Stats from SampleAnalysis method: discrete_percentages + words = "" + index = 0 + prevCoverage = None + conjunction = "|" + if stats is None: + return "None" + length = len(stats) + from com.raytheon.uf.viz.core.localization import LocalizationManager + siteId = LocalizationManager.getInstance().getSite() + for subkey, percentage in stats: + #print "subkey, percent", subkey, percentage + if subkey is None or subkey == "": + index += 1 + continue + + percentage = int(self.round(percentage,"Nearest", 1)) + str = subkey.split(":")[0] + discreteWords = DiscreteKey.discreteDefinition(siteId).keyDesc( + "Hazards" + "_SFC", str) + words = words + discreteWords + " (" + repr(percentage) + "%) " + + # if last one, do not add conjunction + if index == length - 1: break + words = words + conjunction + index = index + 1 + + if words == "": + words = "None" + return words + + def cloudCover(self, element, stats): + # Return a text cloud cover given Sky average value + valStr = "" + if stats is None: + return valStr + + val = self.callMethod(stats, element.conversion()) + value = self.round(val, "Nearest", element.roundVal()) + if value < 20: + shift = self.determineShift() + period = element.getPeriod() + tr = TimeRange.TimeRange(period.startTime() + shift, period.endTime() + shift) + dayNight = self.getPeriod(tr) + if dayNight == self.NIGHTTIME(): + valStr = "Clear" + else: + valStr = "Sunny" + elif value < 55: + valStr = "Partly|Cloudy" + elif value < 85: + valStr = "Mostly|Cloudy" + else: + valStr = "Cloudy" + + return valStr + + def wxPrecipSubkey(self, subkey): + # List of wxTypes that should be counted as precipitation + # for the calculation of Wx Duration + if subkey.wxType() in ["R", "RW", "S", "SW", "ZR", "IP", "SA"]: + return 1 + else: + return 0 + + def wxCoveragePercent(self, coverage): + percents = [ + ("Def", 100), + ("Wide", 100), + ("Ocnl", 80), + ("Lkly", 75), + ("Num", 60), + ("Sct", 40), + ("Chc", 20), + ("SChc", 10), + ("Iso", 10), + ] + for cov, percent in percents: + if cov == coverage: + return percent + return 0 + + def wxDuration(self, statsByRange, timeRange): + # Used in the FWM and FWFTable for weather duration + # Weather duration is determined as follows: + # Coverages are weighted according to the + # values in wxCoveragePercent. For example, "Wide" + # gets 100% weighting while "Sct" on get 40%. + # Only precip weather types (according to the wxPrecipSubkey) + # are counted toward the duration. + # + # Create a total duration by adding a contribution from each grid: + # For each grid in the time range: + # Among the precip subkeys, find the maximum Coverage percent. + # Weight it's contibution according to the amount of time + # it overlaps the given time range and the maximum coverage percent. + # + total = 0.0 + for subkeyList, subRange in statsByRange: + subkeyList = self.makeSubkeyList(subkeyList) + maxPercent = 0 + for subkey in subkeyList: + if self.wxPrecipSubkey(subkey): + percent = self.wxCoveragePercent(subkey.coverage()) + #print subkey.coverage(), percent + if percent > maxPercent: + maxPercent = percent + subRange = timeRange.intersection(subRange) + value = maxPercent/100.0 * subRange.duration()/3600 + #print value + total = total + value + total = self.round(total, "Nearest", 1) + #print "wxDur", total, timeRange + return repr(int(total)) + + # Special interface to Multiple Element Table + def makeMultipleElementTable(self, areaLabel, timeRange, argDict, + byTimeRange=0, product="MultipleElementTable_Aux_Local"): + # For each area in the areaLabel group of Combinations, + # For each city + # Generate a MultipleElementTable + comboList = self.getCurrentAreaNames(argDict) + exec("import " + self._cityDictionary) + exec("cityDict = " + self._cityDictionary + ".CityDictionary") + table = "" + + if type(argDict) is not dict: + # "argDict" is really "tree" + argDict = argDict.get("argDict") + argDict["elementList"] = self._elementList + argDict["singleValueFormat"] = self._singleValueFormat + argDict["includeTitle"] = 1 + argDict["byTimeRange"] = byTimeRange + + for area in comboList: + try: + cities = cityDict[area] + except: + continue + for city, cityLabel in cities: + table = table + self.generateProduct( + product, argDict, city, + timeRange=timeRange, areaLabel=cityLabel) + # ensure table has valid data + if table == "": + continue + argDict["includeTitle"] = 0 + table = "\n" + table + "\n\n" + return table + + def getMultipleElementTableRanges(self, productIssuance, singleValueFormat, + timeRange=None): + if productIssuance in [ + "Morning", "Morning Update", "Afternoon Update", + "Morning with Pre-1st Period"]: + self._productIssuance = "Morning" + startHour = self.DAY() + numPeriods = 3 + else: # "Afternoon", "Afternoon with Pre-1st Period", + # "Evening Update", "Early Morning Update" + self._productIssuance = "Afternoon" + startHour = self.NIGHT() + numPeriods = 4 + labelMethod = self.getLocalWeekday + + #print "\nMET Ranges", productIssuance, timeRange + if timeRange is None: + currentLocalTime, self._shift = self.determineTimeShift() + day = currentLocalTime.day + month = currentLocalTime.month + year = currentLocalTime.year + + # Use getPeriods to set up a list of + # time periods to be sampled + # Returns a list of tuples: (timeRange, timeRangeLabel) + + # Convert to GMT time before making time range + if productIssuance == "Early Morning Update": + self._shift = self._shift + 24*3600 + labelMethod = self.getLocalWeekdayName + + startTime = AbsTime.absTimeYMD(year,month,day,startHour) + startTime = startTime - self._shift + timeRange = TimeRange.TimeRange(startTime, startTime + 12*3600) + + if singleValueFormat == 1: + numPeriods = 1 + timeRangeList = self.getPeriods(timeRange, 12, 12, numPeriods, + labelMethod=labelMethod) + + # Adjust the first time range if an update issuance + if productIssuance not in ["Morning", "Afternoon"]: + updateTime = AbsTime.absTimeYMD(year, month, day, currentLocalTime.hour) + updateTime = updateTime - self._shift + tr, label = timeRangeList[0] + updateTR = TimeRange.TimeRange(updateTime, tr.endTime()) + timeRangeList[0] = (updateTR, labelMethod(updateTR)) + + #print "Returning", timeRangeList + return timeRangeList diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextRules.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextRules.py index 79b089bef1..a840ed1f5d 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextRules.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextRules.py @@ -1,111 +1,111 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TextRules.py -# Methods for producing text forecast from Analysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import ConfigurableIssuance -import Header -import SimpleTableUtils -import TableBuilder -import CombinedPhrases -import MarinePhrases -import FirePhrases -import CommonUtils - -import string, types, time, sys, re -import math -import ModuleAccessor - -class TextRules(ConfigurableIssuance.ConfigurableIssuance, - Header.Header, TableBuilder.TableBuilder, SimpleTableUtils.SimpleTableUtils, - CombinedPhrases.CombinedPhrases, - MarinePhrases.MarinePhrases, FirePhrases.FirePhrases, - CommonUtils.CommonUtils): - def __init__(self): - ConfigurableIssuance.ConfigurableIssuance.__init__(self) - Header.Header.__init__(self) - SimpleTableUtils.SimpleTableUtils.__init__(self) - TableBuilder.TableBuilder.__init__(self) - CombinedPhrases.CombinedPhrases.__init__(self) - MarinePhrases.MarinePhrases.__init__(self) - FirePhrases.FirePhrases.__init__(self) - CommonUtils.CommonUtils.__init__(self) - - ############################################ - ### GLOBAL THRESHOLDS AND VARIABLES - ### To override, override the associated method in your text product class. - - def IFP(self): - return 0#AFPS - - def getSiteID(self, argDict): - ifpClient = argDict["ifpClient"] - return str(ifpClient.getSiteID().get(0)) - - def getGFESuiteVersion(self): - return 0#AFPS.DBSubsystem.getBuildVersion()[9:] - - def fillSpecial(self, fcst, argDict): - # Substitute appropriate strings for special variables - fcstDef = argDict["forecastDef"] - ut = argDict["utility"] - trMethod = ut.set(fcstDef, "timePeriodMethod", self.timeRangeLabel) - if type(trMethod) == types.StringType: - exec "trMethod = self."+trMethod - labelFormat = ut.set(fcstDef,"timePeriodFormat", None) - tr = argDict["timeRange"] - if labelFormat is not None: - LTorZulu, durFmt, startFmt, endFmt = labelFormat - timeperiod = self.timeDisplay( - tr, LTorZulu, durFmt, startFmt, endFmt) - else: - timeperiod = trMethod(tr) - - try: - trName = argDict["timeRangeName"] - except: - trName = "" - try: - eaName, eaLabel = argDict["editArea"] - except: - eaName, eaLabel = "","" - try: - elementName = argDict["element"] - except: - elementName = "" - fcst = string.replace(fcst, "%TimePeriod",timeperiod) - fcst = string.replace(fcst, "%EditArea", eaLabel) - fcst = string.replace(fcst, "%WeatherElement", elementName) - fcst = string.replace(fcst, "%TimeRange", trName) - return fcst - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TextRules.py +# Methods for producing text forecast from Analysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import ConfigurableIssuance +import Header +import SimpleTableUtils +import TableBuilder +import CombinedPhrases +import MarinePhrases +import FirePhrases +import CommonUtils + +import string, types, time, sys, re +import math +import ModuleAccessor + +class TextRules(ConfigurableIssuance.ConfigurableIssuance, + Header.Header, TableBuilder.TableBuilder, SimpleTableUtils.SimpleTableUtils, + CombinedPhrases.CombinedPhrases, + MarinePhrases.MarinePhrases, FirePhrases.FirePhrases, + CommonUtils.CommonUtils): + def __init__(self): + ConfigurableIssuance.ConfigurableIssuance.__init__(self) + Header.Header.__init__(self) + SimpleTableUtils.SimpleTableUtils.__init__(self) + TableBuilder.TableBuilder.__init__(self) + CombinedPhrases.CombinedPhrases.__init__(self) + MarinePhrases.MarinePhrases.__init__(self) + FirePhrases.FirePhrases.__init__(self) + CommonUtils.CommonUtils.__init__(self) + + ############################################ + ### GLOBAL THRESHOLDS AND VARIABLES + ### To override, override the associated method in your text product class. + + def IFP(self): + return 0#AFPS + + def getSiteID(self, argDict): + ifpClient = argDict["ifpClient"] + return str(ifpClient.getSiteID().get(0)) + + def getGFESuiteVersion(self): + return 0#AFPS.DBSubsystem.getBuildVersion()[9:] + + def fillSpecial(self, fcst, argDict): + # Substitute appropriate strings for special variables + fcstDef = argDict["forecastDef"] + ut = argDict["utility"] + trMethod = ut.set(fcstDef, "timePeriodMethod", self.timeRangeLabel) + if type(trMethod) == bytes: + exec("trMethod = self."+trMethod) + labelFormat = ut.set(fcstDef,"timePeriodFormat", None) + tr = argDict["timeRange"] + if labelFormat is not None: + LTorZulu, durFmt, startFmt, endFmt = labelFormat + timeperiod = self.timeDisplay( + tr, LTorZulu, durFmt, startFmt, endFmt) + else: + timeperiod = trMethod(tr) + + try: + trName = argDict["timeRangeName"] + except: + trName = "" + try: + eaName, eaLabel = argDict["editArea"] + except: + eaName, eaLabel = "","" + try: + elementName = argDict["element"] + except: + elementName = "" + fcst = string.replace(fcst, "%TimePeriod",timeperiod) + fcst = string.replace(fcst, "%EditArea", eaLabel) + fcst = string.replace(fcst, "%WeatherElement", elementName) + fcst = string.replace(fcst, "%TimeRange", trName) + return fcst + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextUtils.py index 9e28310e23..13a45948e9 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TextUtils.py @@ -1,816 +1,816 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TextUtils.py -# Utility methods for Text Products. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -from math import * -import types, string -import os, re, time -import WxMethods -import SiteInfo -from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID - -class TextUtils: - def __init__(self): - self.__percentCompleted = 0 - - def DAY(self): - return 6 - def NIGHT(self): - return 18 - - def DAYTIME(self): - return 1 - def NIGHTTIME(self): - return 0 - def DAYNIGHT(self): - return -1 - - def dirList(self): - dirSpan = 45 # 45 degrees per direction - base = 22.5 # start with N - return [ - ('N', 360-base, 361), - ('N', 0, base), - ('NE',base , base + 1*dirSpan), - ('E', base + 1*dirSpan, base + 2*dirSpan), - ('SE',base + 2*dirSpan, base + 3*dirSpan), - ('S', base + 3*dirSpan, base + 4*dirSpan), - ('SW',base + 4*dirSpan, base + 5*dirSpan), - ('W', base + 5*dirSpan, base + 6*dirSpan), - ('NW',base + 6*dirSpan, base + 7*dirSpan) - ] - - def dir16PtList(self): - dirSpan = 22.5 # 22.5 degrees per direction - base = 11.25 # start with N - return [ - ('N', 360-base, 361), - ('N', 0, base), - ('NNE', base , base + 1*dirSpan), - ('NE', base + 1*dirSpan, base + 2*dirSpan), - ('ENE', base + 2*dirSpan, base + 3*dirSpan), - ('E', base + 3*dirSpan, base + 4*dirSpan), - ('ESE', base + 4*dirSpan, base + 5*dirSpan), - ('SE', base + 5*dirSpan, base + 6*dirSpan), - ('SSE', base + 6*dirSpan, base + 7*dirSpan), - ('S', base + 7*dirSpan, base + 8*dirSpan), - ('SSW', base + 8*dirSpan, base + 9*dirSpan), - ('SW', base + 9*dirSpan, base + 10*dirSpan), - ('WSW', base + 10*dirSpan, base + 11*dirSpan), - ('W', base + 11*dirSpan, base + 12*dirSpan), - ('WNW', base + 12*dirSpan, base + 13*dirSpan), - ('NW', base + 13*dirSpan, base + 14*dirSpan), - ('NNW', base + 14*dirSpan, base + 15*dirSpan), - ] - - # Dictionary for converting Wind Direction from letters to degrees - def dirList2(self): - return { - 'N' : 0, - 'NE':45, - 'E' :90, - 'SE':135, - 'S' :180, - 'SW':225, - 'W' :270, - 'NW':315, - } - - def dirToText(self, numDir): - "Convert the numerical direction to a string: N, NE, E, ..." - dirList = self.dirList() - for dir in dirList: - if numDir >= dir[1] and numDir < dir[2]: - return dir[0] - print "WARNING -- illegal direction for conversion: ", numDir - return None - - def dirTo16PtText(self, numDir): - "Convert the numerical direction to a string: N, NE, E, ..." - dirList = self.dir16PtList() - for dir in dirList: - if numDir >= dir[1] and numDir < dir[2]: - return dir[0] - print "WARNING -- illegal direction for conversion: ", numDir - return None - - def vector_dir(self, dir): - if not type(dir)== types.StringType: - dir = self.dirToText(dir) - dir = string.replace(dir, "N", "north") - dir = string.replace(dir, "S", "south") - dir = string.replace(dir, "E", "east") - dir = string.replace(dir, "W", "west") - return dir - - - def getVis(self, subkeyList, outputFormat="NM"): - # Find the "lowest" visibility specified in the subkeys - conversionDict = self.visibilityConversionDict() - resultVisNM = None - resultVis = "" - for subkey in subkeyList: - vis = subkey.visibility() - if vis == "": - continue - if resultVisNM is None: - resultVisNM = conversionDict[vis] - resultVis = vis - else: - # Find lowest visibility - visNM = conversionDict[vis] - if visNM < resultVisNM: - resultVisNM = visNM - resultVis = vis - if outputFormat == "NM": - return resultVisNM - else: - return resultVis - - def visibilityConversionDict(self): - # Conversion from text to nautical miles - return { - "0SM": 0, - "1/4SM": 0.2174, - "1/2SM": 0.4348, - "3/4SM": 0.6522, - "1SM": 0.8696, - "11/2SM": 1.304, - "2SM": 1.739, - "21/2SM": 2.174, - "3SM": 2.609, - "4SM": 3.478, - "5SM": 4.348, - "6SM": 5.217, - "P6SM": 6.087, - } - - def findSubkeys(self, subkeys, searchKeys): - # Return 1 if any of the searchKeys are found in subkeys - wxSize = len(subkeys) - wxStr = "" - for x in range(wxSize): - wxStr += str(subkeys[x]) - if x < wxSize - 1: - wxStr += '^' - wx = wxStr - for searchKey in searchKeys: - if WxMethods.WxContains(wx, searchKey): - return 1 - return 0 - - ######################################################################## - # Methods for accessing customizable dictionaries and tables - - # Dictionary access - def access_dictionary(self, tree, node, key, value, dictName, execMethods=1): - # Access the dictionary with the given name for the given key value - # The value for a key may be : - # a text string - # a method - # a dictionary. The dictionary may be of several forms: - # a non-linear value dictionary (nlValue). This - # dictionary has entries that are tuples with values - # or the keyword "default" with a value. (See the - # Text Product User Guide section on Non-linear Thresholds). - # a dictionary by weather element. In this case, - # the dictionary can have the optional entry "otherwise" - # to be used if the given element has not entry. - # - # If a method, it will be called with arguments: - # tree, node - # If there is no entry found, an empty string will be returned - # - dictionary = getattr(self, dictName)(tree, node) - #print dictionary - if dictionary.has_key(key): - entry = dictionary[key] - #print type(entry), entry - if execMethods and type(entry) is types.MethodType: - return entry(tree, node, key, value) - # For some reason, if a method is assigned within - # the Local class, it appears as a tuple instead of a - # method - if execMethods and type(entry) is types.TupleType: - try: - return entry[0](tree, node, key, value) - except: - # In case it's really a tuple - return entry - elif type(entry) is types.DictType: - # Check for nlValue dictionary - for key in entry.keys(): - if key == "default" or type(key) is types.TupleType: - return entry - # Otherwise, look for value in dictionary - try: - return entry[value] - except: - # See if there is an "otherwise" entry - try: - return entry["otherwise"] - except: - return "" - else: - return entry - else: - if "otherwise" in dictionary.keys(): - return dictionary["otherwise"] - return "" - - def calcTopoPercentage(self, tree, node, areaLabel, value): - # Calculate the percentage of topo points in the current edit area that are above - # the given value - parmHisto = tree.getTopoHisto(areaLabel) - totalPoints = parmHisto.numberOfGridPoints() - if totalPoints == 0: - return 0.0 - countAbove = 0 - - for histSample in parmHisto.histoSamples(): - for histPair in histSample.histogram(): - if histPair.value().scalar() > value: - countAbove = countAbove + histPair.count() - return float(countAbove)/totalPoints * 100.0 - - def callMethod(self, value, method): - "Call the given method with the value" - - if method is not None: - value = method(value) - return value - - def fformat(self, value, roundVal): - # Return a string for the floating point value - # truncated to the resolution given by roundVal - if roundVal > 1.0: - return `int(value)` - else: - # Determine how many decimal points - # e.g. if roundVal is .01, dec will be 2 - val = roundVal - dec = 0 - while val < 1: - val = val * 10 - dec = dec + 1 - dec = `dec` - - format = "%10."+dec+"f" - value = format %value - value = string.strip(value) - return value - - def convertDirection(self, numDir): - "Convert the numerical direction to a string: N, NE, E, ..." - dirList = self.dirList() - for dir in dirList: - if numDir >= dir[1] and numDir < dir[2]: - return dir[0] - return "N" # this line for pychecker - - def direction_movement(self, dir1, dir2): - # Returns -1, 0, or 1 if the change from dir1 to dir2 is - # counterclockwise, no change, or clockwise, respectively. - # Note differences of 180 degrees can return -1 or 1. - dirList2 = self.dirList2() - if type(dir1) is types.StringType: - dir1 = dirList2[dir1] - if type(dir2) is types.StringType: - dir2 = dirList2[dir2] - diff = dir2 - dir1 - absDiff = abs(diff) - if diff == 0: - return 0 - elif absDiff <= 180: - return diff / absDiff - else: - return -diff / absDiff - - def direction_difference(self, dir1, dir2): - # Returns the difference dir2 - dir2. Values <0 or more than - # 180 are normalized so that this function always return values - # between 0 and 180. - dirList2 = self.dirList2() - if type(dir1) is types.StringType: - dir1 = dirList2[dir1] - if type(dir2) is types.StringType: - dir2 = dirList2[dir2] - diff = dir2 - dir1 - absDiff = abs(diff) - if absDiff <= 180: - return absDiff - else: - return abs(absDiff - 360) - - def direction_between(self, dir, dir1, dir2): - # Returns 1 if dir is between dir1 and dir2, 0 otherwise - # Note if dir1 - dir2 == 180 this function always returns 1 - dirList2 = self.dirList2() - if type(dir) is types.StringType: - dir = dirList2[dir] - totalDiff = self.direction_difference(dir1, dir2) - diff1 = self.direction_difference(dir, dir1) - diff2 = self.direction_difference(dir, dir2) - # if dir is inbetween the sum of the differences will be the same - if abs(diff1 + diff2 - totalDiff) < 0.1: - return 1 - else: - return 0 - - def handleError(self, errorMsg, argDict): - ut = argDict["utility"] - ut.handleError(errorMsg) - #tkMessageBox.showwarning("Warning", errorMsg) - - def round(self, val, mode, increment): - if type(increment) is types.StringType: - return float(val) - if not (mode == "RoundUp" or mode == "RoundDown" or mode == "Nearest"): - print mode, "is an invalid mode." - return float(val) - # convert to float - value = float(val) - # check for the case where no work is needed. - if value % increment == 0: - return value - - sign = abs(value) / value - delta = 0 - if mode == "RoundUp" and sign > 0: - delta = sign * increment - elif mode == "RoundDown" and sign < 0: - delta = sign * increment - - if mode == "RoundUp": - value = (int(value / increment) * increment) + delta - elif mode == "RoundDown": - value = (int(value / increment) * increment) + delta - elif mode == "Nearest": - value = int((value + (sign * increment / 2.0)) / increment) * increment - return float(value) - - def average(self,v1,v2): - return (v1+v2)/2.0 - - def vectorAverage(self, v1, v2): - # v1, v2 are (mag,dir) tuples - uw1, vw1 = self.MagDirToUV(v1[0], v1[1]) - uw2, vw2 = self.MagDirToUV(v2[0], v2[1]) - u = (uw1 + uw2) / 2.0 - v = (vw1 + vw2) / 2.0 - return self.UVToMagDir(u, v) - - def MagDirToUV(self, mag, dir): - #Converts magnitude, direction to u, v - DEG_TO_RAD = 0.017453292 - uw = sin(dir * DEG_TO_RAD) * mag - vw = cos(dir * DEG_TO_RAD) * mag - return (uw, vw) - - def UVToMagDir(self, u, v): - # Converts u, v to magnitude, direction - RAD_TO_DEG = 57.296083 - speed = sqrt(u * u + v * v) - dir = atan2(u, v) * RAD_TO_DEG - while dir < 0.0: - dir = dir + 360.0 - while dir >= 360.0: - dir = dir - 360.0 - #print "Speed, dir ", speed, dir - return (speed, dir) - - def setProgressPercentage(self, percentage): - self.__percentCompleted = percentage - - def progressMessage(self, fraction, percent, message): - percent = int(fraction * percent) - self.__percentCompleted = int(self.__percentCompleted + percent) - print "Progress: " + `self.__percentCompleted` + "% " + message - - def getParmID(self, parmNameAndLevel, databaseID): - index = string.find(parmNameAndLevel, "_") - if index == -1: - name = parmNameAndLevel - level = "SFC" - parm = ParmID(name,databaseID,level) - else: - name = parmNameAndLevel[0:index] - level = parmNameAndLevel[index+1:] - parm = ParmID(name,databaseID,level) - return parm - - def nlValue(self, nlValue, lookupValue): - # Apply a non-linear value to the given value - # nlValue might be a dictionary to be applied to value - # OR it could be a simple constant - if type(nlValue) is types.DictType: - # Dictionary lookup - result = None - if nlValue.has_key('default'): - result = nlValue['default'] - dictkeys = nlValue.keys() - for key in dictkeys: - if((lookupValue >= key[0]) and (lookupValue < key[1])): - return nlValue[key] - if result is None: - msgString = """ILLEGAL NON-LINEAR THRESHOLD dictionary. - No dictionary entry for value: """ + `lookupValue` + """ - Make sure your non-linear threshold dictionaries do not - have "gaps" in the ranges. For example, your dictionary - should look like this: - - def maximum_range_nlValue_dict(self, tree, node): - ### ConfigVariables - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self,tree, node) - dict["Wind"] = { - (0, 5) : 0, - (5, 13) : 5, - (13, 28) : 10, - "default" : 15, - } - return dict - - NOT this: - - def maximum_range_nlValue_dict(self, tree, node): - ### ConfigVariables - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self,tree, node) - dict["Wind"] = { - (0, 4) : 0, - (5, 12) : 5, - (13, 27) : 10, - "default" : 15, - } - return dict - """ - raise ValueError, msgString - return result - elif type(nlValue) is types.MethodType: - return nlValue(lookupValue) - else: - # Constant value - return nlValue - - def roundValue(self, value, roundingMethod, mode, increment_nlValue, maxFlag=0): - nlIncrement = self.nlValue(increment_nlValue, value) - if type(roundingMethod) is types.MethodType: - return roundingMethod(value, mode, nlIncrement, maxFlag) - else: - return self.round(value, mode, nlIncrement) - - - def getRangeInfo(self, tree, node, elementName): - rangeThreshold_nlValue = self.range_nlValue(tree, node, elementName, elementName) - rangeBias_nlValue = self.range_bias_nlValue(tree, node, elementName, elementName) - minRange_nlValue = self.minimum_range_nlValue(tree, node, elementName, elementName) - minBias_nlValue = self.minimum_range_bias_nlValue(tree, node, elementName, elementName) - maxRange_nlValue = self.maximum_range_nlValue(tree, node, elementName, elementName) - maxBias_nlValue = self.maximum_range_bias_nlValue(tree, node, elementName, elementName) - increment_nlValue = self.increment_nlValue(tree, node, elementName, elementName) - null_nlValue = self.null_nlValue(tree, node, elementName, elementName) - return self.RangeInfo(rangeThreshold_nlValue, rangeBias_nlValue, minRange_nlValue, minBias_nlValue, - maxRange_nlValue, maxBias_nlValue, increment_nlValue, null_nlValue) - - def applyRanges(self, tree, node, min, max, elementName): - rangeInfo = self.getRangeInfo(tree, node, elementName) - return self.applyRangeValues(tree, node, min, max, elementName, rangeInfo) - - def applyRangeValues(self, tree, node, min, max, elementName, rangeInfo): - avg = self.average(float(min),max) - diff = abs(max - min) - # If the range is not great enough, return as a single value - if rangeInfo.rangeThreshold_nlValue != "": - threshold = self.nlValue(rangeInfo.rangeThreshold_nlValue, avg) - if diff < threshold: - bias = self.nlValue(rangeInfo.rangeBias_nlValue, avg) - if bias == "Average": - avg = self.roundStatistic(tree, node, avg, elementName) - return avg, avg - elif bias == "Max": - return max, max - else: - return min, min - # Apply minimum range - if rangeInfo.minRange_nlValue != "": - minRange = self.nlValue(rangeInfo.minRange_nlValue, avg) - if diff < minRange: - min, max = self.applyBias( - tree, node, elementName, min, max, - rangeInfo.minBias_nlValue, avg, minRange, - rangeInfo.increment_nlValue) - # Apply maximum range - if rangeInfo.maxRange_nlValue != "": - maxRange = self.nlValue(rangeInfo.maxRange_nlValue, avg) - if diff > maxRange: - min, max = self.applyBias( - tree, node, elementName, min, max, - rangeInfo.maxBias_nlValue, avg, maxRange, - rangeInfo.increment_nlValue) - # Cut-off at null_nlValue if max > null_nlValue and min < null_nlValue - threshold = self.nlValue(rangeInfo.null_nlValue, max) - if min > 0 and max >= threshold and min < threshold: - #print "cut-off", min, max, threshold, elementName - roundingMethod = self.rounding_method(tree, node, elementName, elementName) - nlIncrement = self.nlValue(self.increment_nlValue( - tree, node, elementName, elementName), threshold) - min = self.roundValue(threshold, roundingMethod, "RoundUp", nlIncrement) - if min < threshold: - min = threshold - #print " new min", min - return min, max - - def applyBias(self, tree, node, elementName, min, max, bias_nlValue, - avg, rangeValue, increment_nlValue): - bias = self.nlValue(bias_nlValue, avg) - #print "applying bias", min, max, elementName, bias, rangeValue - if bias == "Average": - inc = rangeValue/2.0 - min = self.roundStatistic(tree, node, avg - inc, elementName) - max = self.roundStatistic(tree, node, avg + inc, elementName) - ## If ranges are being applied to values that - ## span zero, you can end up with a max-min being greater than - ## rangeValue because Python rounds away from zero. (-0.5 rounds to - ## -1.0, not 0.0.) The test below checks for this and adds - ## 1 back to the min. - if max-min > rangeValue: - min += 1 - else: - increment = self.nlValue(increment_nlValue, avg) - rangeValue = self.round(rangeValue, "Nearest", increment) - minAllowedValue, maxAllowedValue = tree.library.getLimits(elementName) - if bias == "Max": - min = max - rangeValue - else: - max = min + rangeValue - if max > maxAllowedValue: - max = maxAllowedValue - if min < minAllowedValue: - min = minAllowedValue - #print " returning new", min, max - return min, max - - ####################################################### - # Statistics manipulation - - def SCALAR(self): - return 0 - def MAGNITUDE(self): - return 1 - def DIRECTION(self): - return 2 - def VECTOR(self): - return 3 - def VECTOR_TEXT(self): - return 4 - def VECTOR_NUM(self): - return 5 - def WEATHER(self): - return 6 - def DISCRETE(self): - return 7 - - def getValue(self, stats, method="Average", dataType=None): - # "stats" is either a single value or a tuple of 2 values - # method is any mergeMethod - - if dataType == self.VECTOR(): - mag, dir = stats - mag = self.getValue(mag, method) - return (mag, dir) - - if isinstance(stats, types.TupleType): - if method == "Max": - return stats[1] - elif method == "Min": - return stats[0] - elif method == "Sum": - return stats[0] + stats[1] - elif method == "MinMax": - return stats - else: - if stats[0] is None or stats[1] is None: - return None - return self.average(stats[0], stats[1]) - else: - if method == "MinMax": - return (stats, stats) - else: - return stats - - def makeSubkeyList(self, weatherKey): - # Make sure subkeyList is a true list - length = len(weatherKey) - newList = [] - index = 0 - for subkey in weatherKey: - newList.append(subkey) - index = index + 1 - if index >= length: - break - return newList - - def storeAWIPS(self, product, AWIPSkey="", host=None): - # Stores text in string "product" into - # the AWIPS text database via the given host if host is defined using - # ssh technique. Otherwise uses the AWIPS textdb command directly. - # Note: for the ssh mode, you need to have an entry in the .rhosts - # file of your home directory on lx1 - # - - if AWIPSkey == "": - return # do nothing - - # use the command directly - assumes FXA environment setup - if host is None: - # (code adopted from Paul Jendrowski 9/18/03) - # set path to textdb command - cmd = "gfetextdb -w " + AWIPSkey - # issue the command - db = os.popen(cmd, 'w') - db.write(product) - db.close() - - # use ssh (or rsh) to communicate with the textdb - else: - try: - command= "ssh " + host + " 'textdb -w " + AWIPSkey +"'" - except: - command= "rsh " + host + " 'textdb -w " + AWIPSkey +"'" - saveProduct = os.popen(command,'w') - saveProduct.write(product) - saveProduct.close() - - def getPreviousProduct(self, productID, searchString="", version=0): - # gets a previous product from the AWIPS database - - from com.raytheon.viz.gfe.core import DataManagerUIFactory - from com.raytheon.viz.gfe.product import TextDBUtil - - # Redmine #17120 - return to pre-DR 15703 behavior. - opMode = DataManagerUIFactory.getCurrentInstance().getOpMode().name() == "OPERATIONAL" - previousProduct = TextDBUtil.retrieveProduct(productID, version, opMode) - previousProduct = string.strip(previousProduct) - - if searchString != "": - # extract the specified section - section = re.sub(r'^[=, A-Za-z0-9\-\n\./]+' + - searchString + r'[=, A-Za-z0-9\-\n\/]*\.\.\.\n' + - r'*([=, A-Za-z0-9\-\n\./]+)\$\$[=, A-Za-z0-9\-\n' - + r'\.\$@/]+', r'\1', previousProduct) - return section - else: - return previousProduct - - def formatTimeString(self, gmTime, format, newTimeZone=None): - # converts the specified time (in seconds) to the specified time zone - # the time returned is in local time in units of seconds. - # newTimeZone must be an accepted time zone identifier such as - # "EST5EDT", CST6CDT", "MST7MDT", "PST8PST", "AST9ADT" "HST10" - myTimeZone = os.environ["TZ"] # save the defined time zone - if newTimeZone is None: - newTimeZone = myTimeZone - os.environ["TZ"] = newTimeZone # set the new time zone - time.tzset() - timeZoneStr = time.strftime(format, time.localtime(gmTime)) - os.environ["TZ"] = myTimeZone # set the time zone back - time.tzset() - return timeZoneStr # return the time as a string - - # Adopted from ER 8/04 - def debug_print(self, msg="", trace=0, limit=10): - """ZFP_ER_Overrides addition of ZFP_ER_Overrides._debug_print. - - ER method for generic debug prints switched on/off by self._debug. - Automatically prints calling method's name, file, class and line - number plus an optional message string. - - (e.g. self.debug_print('Debug message') - - If the 'trace' flag is set to 1, the Python traceback info will not be - displayed. This is useful for displaying multiple formatted DEBUG - messages. - - (e.g. self.debug_print('2nd Debug message', 1) - """ - - # This method requires: import traceback - import traceback - - # If debug is set up as a dictionary, then you could turn on - # method specific printing. - # Definition["debug"] = {"_myMeth1":1, "_myMeth2":0} - - # Try to get traceback info - if debug flag is defined and not off - try: - if self._debug: - - # Get debug info - file, lineno, name, text = traceback.extract_stack()[-2] - else: - return # bail now - turned off - except: - return # bail now - not defined - - # Define counter to track number of times this method has been printed - count = 0 - - # See if debug counter dictionary has been defined - try: - if type(self._debugDict) == type({}): - pass - except: - self._debugDict = {} - - # See if method-specific printing is being used - if type(self._debug) == type({}): - - # If this method has a method specific flag - get it - if self._debug.has_key(name): - flag = self._debug[name] - else: - flag = 0 # not specified - don't display - else: - flag = self._debug - - # If debug flag is turned off - if not flag: - return # bail out now - - # Track number of times this method has been displayed - if self._debugDict.has_key(name): - count = self._debugDict[name] + 1 - else: - count = 1 - - # If debug message limit is not reached - if count <= limit: - - # If this is not a continuation debug message - if trace == 0: - - # Record the printing of this message - self._debugDict[name] = count - - # Print the traceback message - print "DEBUG: %s in %s at line %d" % (name, file, lineno) - print "DEBUG: Class = %s %d\n\n" % (self.__class__, count) - #print "Super classes:",self.__class__.__bases__ - - # If there is a message, print that too - if msg != "": - print '\t%s' % (msg) - - class RangeInfo: - def __init__(self,rangeThreshold_nlValue, rangeBias_nlValue, - minRange_nlValue, minBias_nlValue, - maxRange_nlValue, maxBias_nlValue, increment_nlValue, - null_nlValue): - self.rangeThreshold_nlValue = rangeThreshold_nlValue - self.rangeBias_nlValue = rangeBias_nlValue - self.minRange_nlValue = minRange_nlValue - self.minBias_nlValue = minBias_nlValue - self.maxRange_nlValue = maxRange_nlValue - self.maxBias_nlValue = maxBias_nlValue - self.increment_nlValue = increment_nlValue - self.null_nlValue = null_nlValue - - def getSiteInfo(self, infoType, siteID): - # Get information about an NWS site given the 3-letter site id - # infoType can be: "region", "wfoCity", "wfoCityState", "fullStationID - return SiteInfo.SiteInfo().getInfo(infoType, siteID) +# TextUtils.py +# Utility methods for Text Products. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +from math import * +import types, string +import os, re, time +import WxMethods +import SiteInfo +from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + +class TextUtils: + def __init__(self): + self.__percentCompleted = 0 + + def DAY(self): + return 6 + def NIGHT(self): + return 18 + + def DAYTIME(self): + return 1 + def NIGHTTIME(self): + return 0 + def DAYNIGHT(self): + return -1 + + def dirList(self): + dirSpan = 45 # 45 degrees per direction + base = 22.5 # start with N + return [ + ('N', 360-base, 361), + ('N', 0, base), + ('NE',base , base + 1*dirSpan), + ('E', base + 1*dirSpan, base + 2*dirSpan), + ('SE',base + 2*dirSpan, base + 3*dirSpan), + ('S', base + 3*dirSpan, base + 4*dirSpan), + ('SW',base + 4*dirSpan, base + 5*dirSpan), + ('W', base + 5*dirSpan, base + 6*dirSpan), + ('NW',base + 6*dirSpan, base + 7*dirSpan) + ] + + def dir16PtList(self): + dirSpan = 22.5 # 22.5 degrees per direction + base = 11.25 # start with N + return [ + ('N', 360-base, 361), + ('N', 0, base), + ('NNE', base , base + 1*dirSpan), + ('NE', base + 1*dirSpan, base + 2*dirSpan), + ('ENE', base + 2*dirSpan, base + 3*dirSpan), + ('E', base + 3*dirSpan, base + 4*dirSpan), + ('ESE', base + 4*dirSpan, base + 5*dirSpan), + ('SE', base + 5*dirSpan, base + 6*dirSpan), + ('SSE', base + 6*dirSpan, base + 7*dirSpan), + ('S', base + 7*dirSpan, base + 8*dirSpan), + ('SSW', base + 8*dirSpan, base + 9*dirSpan), + ('SW', base + 9*dirSpan, base + 10*dirSpan), + ('WSW', base + 10*dirSpan, base + 11*dirSpan), + ('W', base + 11*dirSpan, base + 12*dirSpan), + ('WNW', base + 12*dirSpan, base + 13*dirSpan), + ('NW', base + 13*dirSpan, base + 14*dirSpan), + ('NNW', base + 14*dirSpan, base + 15*dirSpan), + ] + + # Dictionary for converting Wind Direction from letters to degrees + def dirList2(self): + return { + 'N' : 0, + 'NE':45, + 'E' :90, + 'SE':135, + 'S' :180, + 'SW':225, + 'W' :270, + 'NW':315, + } + + def dirToText(self, numDir): + "Convert the numerical direction to a string: N, NE, E, ..." + dirList = self.dirList() + for dir in dirList: + if numDir >= dir[1] and numDir < dir[2]: + return dir[0] + print("WARNING -- illegal direction for conversion: ", numDir) + return None + + def dirTo16PtText(self, numDir): + "Convert the numerical direction to a string: N, NE, E, ..." + dirList = self.dir16PtList() + for dir in dirList: + if numDir >= dir[1] and numDir < dir[2]: + return dir[0] + print("WARNING -- illegal direction for conversion: ", numDir) + return None + + def vector_dir(self, dir): + if not type(dir)== bytes: + dir = self.dirToText(dir) + dir = string.replace(dir, "N", "north") + dir = string.replace(dir, "S", "south") + dir = string.replace(dir, "E", "east") + dir = string.replace(dir, "W", "west") + return dir + + + def getVis(self, subkeyList, outputFormat="NM"): + # Find the "lowest" visibility specified in the subkeys + conversionDict = self.visibilityConversionDict() + resultVisNM = None + resultVis = "" + for subkey in subkeyList: + vis = subkey.visibility() + if vis == "": + continue + if resultVisNM is None: + resultVisNM = conversionDict[vis] + resultVis = vis + else: + # Find lowest visibility + visNM = conversionDict[vis] + if visNM < resultVisNM: + resultVisNM = visNM + resultVis = vis + if outputFormat == "NM": + return resultVisNM + else: + return resultVis + + def visibilityConversionDict(self): + # Conversion from text to nautical miles + return { + "0SM": 0, + "1/4SM": 0.2174, + "1/2SM": 0.4348, + "3/4SM": 0.6522, + "1SM": 0.8696, + "11/2SM": 1.304, + "2SM": 1.739, + "21/2SM": 2.174, + "3SM": 2.609, + "4SM": 3.478, + "5SM": 4.348, + "6SM": 5.217, + "P6SM": 6.087, + } + + def findSubkeys(self, subkeys, searchKeys): + # Return 1 if any of the searchKeys are found in subkeys + wxSize = len(subkeys) + wxStr = "" + for x in range(wxSize): + wxStr += str(subkeys[x]) + if x < wxSize - 1: + wxStr += '^' + wx = wxStr + for searchKey in searchKeys: + if WxMethods.WxContains(wx, searchKey): + return 1 + return 0 + + ######################################################################## + # Methods for accessing customizable dictionaries and tables + + # Dictionary access + def access_dictionary(self, tree, node, key, value, dictName, execMethods=1): + # Access the dictionary with the given name for the given key value + # The value for a key may be : + # a text string + # a method + # a dictionary. The dictionary may be of several forms: + # a non-linear value dictionary (nlValue). This + # dictionary has entries that are tuples with values + # or the keyword "default" with a value. (See the + # Text Product User Guide section on Non-linear Thresholds). + # a dictionary by weather element. In this case, + # the dictionary can have the optional entry "otherwise" + # to be used if the given element has not entry. + # + # If a method, it will be called with arguments: + # tree, node + # If there is no entry found, an empty string will be returned + # + dictionary = getattr(self, dictName)(tree, node) + #print dictionary + if key in dictionary: + entry = dictionary[key] + #print type(entry), entry + if execMethods and type(entry) is types.MethodType: + return entry(tree, node, key, value) + # For some reason, if a method is assigned within + # the Local class, it appears as a tuple instead of a + # method + if execMethods and type(entry) is tuple: + try: + return entry[0](tree, node, key, value) + except: + # In case it's really a tuple + return entry + elif type(entry) is dict: + # Check for nlValue dictionary + for key in list(entry.keys()): + if key == "default" or type(key) is tuple: + return entry + # Otherwise, look for value in dictionary + try: + return entry[value] + except: + # See if there is an "otherwise" entry + try: + return entry["otherwise"] + except: + return "" + else: + return entry + else: + if "otherwise" in list(dictionary.keys()): + return dictionary["otherwise"] + return "" + + def calcTopoPercentage(self, tree, node, areaLabel, value): + # Calculate the percentage of topo points in the current edit area that are above + # the given value + parmHisto = tree.getTopoHisto(areaLabel) + totalPoints = parmHisto.numberOfGridPoints() + if totalPoints == 0: + return 0.0 + countAbove = 0 + + for histSample in parmHisto.histoSamples(): + for histPair in histSample.histogram(): + if histPair.value().scalar() > value: + countAbove = countAbove + histPair.count() + return float(countAbove)/totalPoints * 100.0 + + def callMethod(self, value, method): + "Call the given method with the value" + + if method is not None: + value = method(value) + return value + + def fformat(self, value, roundVal): + # Return a string for the floating point value + # truncated to the resolution given by roundVal + if roundVal > 1.0: + return repr(int(value)) + else: + # Determine how many decimal points + # e.g. if roundVal is .01, dec will be 2 + val = roundVal + dec = 0 + while val < 1: + val = val * 10 + dec = dec + 1 + dec = repr(dec) + + format = "%10."+dec+"f" + value = format %value + value = string.strip(value) + return value + + def convertDirection(self, numDir): + "Convert the numerical direction to a string: N, NE, E, ..." + dirList = self.dirList() + for dir in dirList: + if numDir >= dir[1] and numDir < dir[2]: + return dir[0] + return "N" # this line for pychecker + + def direction_movement(self, dir1, dir2): + # Returns -1, 0, or 1 if the change from dir1 to dir2 is + # counterclockwise, no change, or clockwise, respectively. + # Note differences of 180 degrees can return -1 or 1. + dirList2 = self.dirList2() + if type(dir1) is bytes: + dir1 = dirList2[dir1] + if type(dir2) is bytes: + dir2 = dirList2[dir2] + diff = dir2 - dir1 + absDiff = abs(diff) + if diff == 0: + return 0 + elif absDiff <= 180: + return diff / absDiff + else: + return -diff / absDiff + + def direction_difference(self, dir1, dir2): + # Returns the difference dir2 - dir2. Values <0 or more than + # 180 are normalized so that this function always return values + # between 0 and 180. + dirList2 = self.dirList2() + if type(dir1) is bytes: + dir1 = dirList2[dir1] + if type(dir2) is bytes: + dir2 = dirList2[dir2] + diff = dir2 - dir1 + absDiff = abs(diff) + if absDiff <= 180: + return absDiff + else: + return abs(absDiff - 360) + + def direction_between(self, dir, dir1, dir2): + # Returns 1 if dir is between dir1 and dir2, 0 otherwise + # Note if dir1 - dir2 == 180 this function always returns 1 + dirList2 = self.dirList2() + if type(dir) is bytes: + dir = dirList2[dir] + totalDiff = self.direction_difference(dir1, dir2) + diff1 = self.direction_difference(dir, dir1) + diff2 = self.direction_difference(dir, dir2) + # if dir is inbetween the sum of the differences will be the same + if abs(diff1 + diff2 - totalDiff) < 0.1: + return 1 + else: + return 0 + + def handleError(self, errorMsg, argDict): + ut = argDict["utility"] + ut.handleError(errorMsg) + #tkMessageBox.showwarning("Warning", errorMsg) + + def round(self, val, mode, increment): + if type(increment) is bytes: + return float(val) + if not (mode == "RoundUp" or mode == "RoundDown" or mode == "Nearest"): + print(mode, "is an invalid mode.") + return float(val) + # convert to float + value = float(val) + # check for the case where no work is needed. + if value % increment == 0: + return value + + sign = abs(value) / value + delta = 0 + if mode == "RoundUp" and sign > 0: + delta = sign * increment + elif mode == "RoundDown" and sign < 0: + delta = sign * increment + + if mode == "RoundUp": + value = (int(value / increment) * increment) + delta + elif mode == "RoundDown": + value = (int(value / increment) * increment) + delta + elif mode == "Nearest": + value = int((value + (sign * increment / 2.0)) / increment) * increment + return float(value) + + def average(self,v1,v2): + return (v1+v2)/2.0 + + def vectorAverage(self, v1, v2): + # v1, v2 are (mag,dir) tuples + uw1, vw1 = self.MagDirToUV(v1[0], v1[1]) + uw2, vw2 = self.MagDirToUV(v2[0], v2[1]) + u = (uw1 + uw2) / 2.0 + v = (vw1 + vw2) / 2.0 + return self.UVToMagDir(u, v) + + def MagDirToUV(self, mag, dir): + #Converts magnitude, direction to u, v + DEG_TO_RAD = 0.017453292 + uw = sin(dir * DEG_TO_RAD) * mag + vw = cos(dir * DEG_TO_RAD) * mag + return (uw, vw) + + def UVToMagDir(self, u, v): + # Converts u, v to magnitude, direction + RAD_TO_DEG = 57.296083 + speed = sqrt(u * u + v * v) + dir = atan2(u, v) * RAD_TO_DEG + while dir < 0.0: + dir = dir + 360.0 + while dir >= 360.0: + dir = dir - 360.0 + #print "Speed, dir ", speed, dir + return (speed, dir) + + def setProgressPercentage(self, percentage): + self.__percentCompleted = percentage + + def progressMessage(self, fraction, percent, message): + percent = int(fraction * percent) + self.__percentCompleted = int(self.__percentCompleted + percent) + print("Progress: " + repr(self.__percentCompleted) + "% " + message) + + def getParmID(self, parmNameAndLevel, databaseID): + index = string.find(parmNameAndLevel, "_") + if index == -1: + name = parmNameAndLevel + level = "SFC" + parm = ParmID(name,databaseID,level) + else: + name = parmNameAndLevel[0:index] + level = parmNameAndLevel[index+1:] + parm = ParmID(name,databaseID,level) + return parm + + def nlValue(self, nlValue, lookupValue): + # Apply a non-linear value to the given value + # nlValue might be a dictionary to be applied to value + # OR it could be a simple constant + if type(nlValue) is dict: + # Dictionary lookup + result = None + if 'default' in nlValue: + result = nlValue['default'] + dictkeys = list(nlValue.keys()) + for key in dictkeys: + if((lookupValue >= key[0]) and (lookupValue < key[1])): + return nlValue[key] + if result is None: + msgString = """ILLEGAL NON-LINEAR THRESHOLD dictionary. + No dictionary entry for value: """ + repr(lookupValue) + """ + Make sure your non-linear threshold dictionaries do not + have "gaps" in the ranges. For example, your dictionary + should look like this: + + def maximum_range_nlValue_dict(self, tree, node): + ### ConfigVariables + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self,tree, node) + dict["Wind"] = { + (0, 5) : 0, + (5, 13) : 5, + (13, 28) : 10, + "default" : 15, + } + return dict + + NOT this: + + def maximum_range_nlValue_dict(self, tree, node): + ### ConfigVariables + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self,tree, node) + dict["Wind"] = { + (0, 4) : 0, + (5, 12) : 5, + (13, 27) : 10, + "default" : 15, + } + return dict + """ + raise ValueError(msgString) + return result + elif type(nlValue) is types.MethodType: + return nlValue(lookupValue) + else: + # Constant value + return nlValue + + def roundValue(self, value, roundingMethod, mode, increment_nlValue, maxFlag=0): + nlIncrement = self.nlValue(increment_nlValue, value) + if type(roundingMethod) is types.MethodType: + return roundingMethod(value, mode, nlIncrement, maxFlag) + else: + return self.round(value, mode, nlIncrement) + + + def getRangeInfo(self, tree, node, elementName): + rangeThreshold_nlValue = self.range_nlValue(tree, node, elementName, elementName) + rangeBias_nlValue = self.range_bias_nlValue(tree, node, elementName, elementName) + minRange_nlValue = self.minimum_range_nlValue(tree, node, elementName, elementName) + minBias_nlValue = self.minimum_range_bias_nlValue(tree, node, elementName, elementName) + maxRange_nlValue = self.maximum_range_nlValue(tree, node, elementName, elementName) + maxBias_nlValue = self.maximum_range_bias_nlValue(tree, node, elementName, elementName) + increment_nlValue = self.increment_nlValue(tree, node, elementName, elementName) + null_nlValue = self.null_nlValue(tree, node, elementName, elementName) + return self.RangeInfo(rangeThreshold_nlValue, rangeBias_nlValue, minRange_nlValue, minBias_nlValue, + maxRange_nlValue, maxBias_nlValue, increment_nlValue, null_nlValue) + + def applyRanges(self, tree, node, min, max, elementName): + rangeInfo = self.getRangeInfo(tree, node, elementName) + return self.applyRangeValues(tree, node, min, max, elementName, rangeInfo) + + def applyRangeValues(self, tree, node, min, max, elementName, rangeInfo): + avg = self.average(float(min),max) + diff = abs(max - min) + # If the range is not great enough, return as a single value + if rangeInfo.rangeThreshold_nlValue != "": + threshold = self.nlValue(rangeInfo.rangeThreshold_nlValue, avg) + if diff < threshold: + bias = self.nlValue(rangeInfo.rangeBias_nlValue, avg) + if bias == "Average": + avg = self.roundStatistic(tree, node, avg, elementName) + return avg, avg + elif bias == "Max": + return max, max + else: + return min, min + # Apply minimum range + if rangeInfo.minRange_nlValue != "": + minRange = self.nlValue(rangeInfo.minRange_nlValue, avg) + if diff < minRange: + min, max = self.applyBias( + tree, node, elementName, min, max, + rangeInfo.minBias_nlValue, avg, minRange, + rangeInfo.increment_nlValue) + # Apply maximum range + if rangeInfo.maxRange_nlValue != "": + maxRange = self.nlValue(rangeInfo.maxRange_nlValue, avg) + if diff > maxRange: + min, max = self.applyBias( + tree, node, elementName, min, max, + rangeInfo.maxBias_nlValue, avg, maxRange, + rangeInfo.increment_nlValue) + # Cut-off at null_nlValue if max > null_nlValue and min < null_nlValue + threshold = self.nlValue(rangeInfo.null_nlValue, max) + if min > 0 and max >= threshold and min < threshold: + #print "cut-off", min, max, threshold, elementName + roundingMethod = self.rounding_method(tree, node, elementName, elementName) + nlIncrement = self.nlValue(self.increment_nlValue( + tree, node, elementName, elementName), threshold) + min = self.roundValue(threshold, roundingMethod, "RoundUp", nlIncrement) + if min < threshold: + min = threshold + #print " new min", min + return min, max + + def applyBias(self, tree, node, elementName, min, max, bias_nlValue, + avg, rangeValue, increment_nlValue): + bias = self.nlValue(bias_nlValue, avg) + #print "applying bias", min, max, elementName, bias, rangeValue + if bias == "Average": + inc = rangeValue/2.0 + min = self.roundStatistic(tree, node, avg - inc, elementName) + max = self.roundStatistic(tree, node, avg + inc, elementName) + ## If ranges are being applied to values that + ## span zero, you can end up with a max-min being greater than + ## rangeValue because Python rounds away from zero. (-0.5 rounds to + ## -1.0, not 0.0.) The test below checks for this and adds + ## 1 back to the min. + if max-min > rangeValue: + min += 1 + else: + increment = self.nlValue(increment_nlValue, avg) + rangeValue = self.round(rangeValue, "Nearest", increment) + minAllowedValue, maxAllowedValue = tree.library.getLimits(elementName) + if bias == "Max": + min = max - rangeValue + else: + max = min + rangeValue + if max > maxAllowedValue: + max = maxAllowedValue + if min < minAllowedValue: + min = minAllowedValue + #print " returning new", min, max + return min, max + + ####################################################### + # Statistics manipulation + + def SCALAR(self): + return 0 + def MAGNITUDE(self): + return 1 + def DIRECTION(self): + return 2 + def VECTOR(self): + return 3 + def VECTOR_TEXT(self): + return 4 + def VECTOR_NUM(self): + return 5 + def WEATHER(self): + return 6 + def DISCRETE(self): + return 7 + + def getValue(self, stats, method="Average", dataType=None): + # "stats" is either a single value or a tuple of 2 values + # method is any mergeMethod + + if dataType == self.VECTOR(): + mag, dir = stats + mag = self.getValue(mag, method) + return (mag, dir) + + if isinstance(stats, tuple): + if method == "Max": + return stats[1] + elif method == "Min": + return stats[0] + elif method == "Sum": + return stats[0] + stats[1] + elif method == "MinMax": + return stats + else: + if stats[0] is None or stats[1] is None: + return None + return self.average(stats[0], stats[1]) + else: + if method == "MinMax": + return (stats, stats) + else: + return stats + + def makeSubkeyList(self, weatherKey): + # Make sure subkeyList is a true list + length = len(weatherKey) + newList = [] + index = 0 + for subkey in weatherKey: + newList.append(subkey) + index = index + 1 + if index >= length: + break + return newList + + def storeAWIPS(self, product, AWIPSkey="", host=None): + # Stores text in string "product" into + # the AWIPS text database via the given host if host is defined using + # ssh technique. Otherwise uses the AWIPS textdb command directly. + # Note: for the ssh mode, you need to have an entry in the .rhosts + # file of your home directory on lx1 + # + + if AWIPSkey == "": + return # do nothing + + # use the command directly - assumes FXA environment setup + if host is None: + # (code adopted from Paul Jendrowski 9/18/03) + # set path to textdb command + cmd = "gfetextdb -w " + AWIPSkey + # issue the command + db = os.popen(cmd, 'w') + db.write(product) + db.close() + + # use ssh (or rsh) to communicate with the textdb + else: + try: + command= "ssh " + host + " 'textdb -w " + AWIPSkey +"'" + except: + command= "rsh " + host + " 'textdb -w " + AWIPSkey +"'" + saveProduct = os.popen(command,'w') + saveProduct.write(product) + saveProduct.close() + + def getPreviousProduct(self, productID, searchString="", version=0): + # gets a previous product from the AWIPS database + + from com.raytheon.viz.gfe.core import DataManagerUIFactory + from com.raytheon.viz.gfe.product import TextDBUtil + + # Redmine #17120 - return to pre-DR 15703 behavior. + opMode = DataManagerUIFactory.getCurrentInstance().getOpMode().name() == "OPERATIONAL" + previousProduct = TextDBUtil.retrieveProduct(productID, version, opMode) + previousProduct = string.strip(previousProduct) + + if searchString != "": + # extract the specified section + section = re.sub(r'^[=, A-Za-z0-9\-\n\./]+' + + searchString + r'[=, A-Za-z0-9\-\n\/]*\.\.\.\n' + + r'*([=, A-Za-z0-9\-\n\./]+)\$\$[=, A-Za-z0-9\-\n' + + r'\.\$@/]+', r'\1', previousProduct) + return section + else: + return previousProduct + + def formatTimeString(self, gmTime, format, newTimeZone=None): + # converts the specified time (in seconds) to the specified time zone + # the time returned is in local time in units of seconds. + # newTimeZone must be an accepted time zone identifier such as + # "EST5EDT", CST6CDT", "MST7MDT", "PST8PST", "AST9ADT" "HST10" + myTimeZone = os.environ["TZ"] # save the defined time zone + if newTimeZone is None: + newTimeZone = myTimeZone + os.environ["TZ"] = newTimeZone # set the new time zone + time.tzset() + timeZoneStr = time.strftime(format, time.localtime(gmTime)) + os.environ["TZ"] = myTimeZone # set the time zone back + time.tzset() + return timeZoneStr # return the time as a string + + # Adopted from ER 8/04 + def debug_print(self, msg="", trace=0, limit=10): + """ZFP_ER_Overrides addition of ZFP_ER_Overrides._debug_print. + + ER method for generic debug prints switched on/off by self._debug. + Automatically prints calling method's name, file, class and line + number plus an optional message string. + + (e.g. self.debug_print('Debug message') + + If the 'trace' flag is set to 1, the Python traceback info will not be + displayed. This is useful for displaying multiple formatted DEBUG + messages. + + (e.g. self.debug_print('2nd Debug message', 1) + """ + + # This method requires: import traceback + import traceback + + # If debug is set up as a dictionary, then you could turn on + # method specific printing. + # Definition["debug"] = {"_myMeth1":1, "_myMeth2":0} + + # Try to get traceback info - if debug flag is defined and not off + try: + if self._debug: + + # Get debug info + file, lineno, name, text = traceback.extract_stack()[-2] + else: + return # bail now - turned off + except: + return # bail now - not defined + + # Define counter to track number of times this method has been printed + count = 0 + + # See if debug counter dictionary has been defined + try: + if type(self._debugDict) == type({}): + pass + except: + self._debugDict = {} + + # See if method-specific printing is being used + if type(self._debug) == type({}): + + # If this method has a method specific flag - get it + if name in self._debug: + flag = self._debug[name] + else: + flag = 0 # not specified - don't display + else: + flag = self._debug + + # If debug flag is turned off + if not flag: + return # bail out now + + # Track number of times this method has been displayed + if name in self._debugDict: + count = self._debugDict[name] + 1 + else: + count = 1 + + # If debug message limit is not reached + if count <= limit: + + # If this is not a continuation debug message + if trace == 0: + + # Record the printing of this message + self._debugDict[name] = count + + # Print the traceback message + print("DEBUG: %s in %s at line %d" % (name, file, lineno)) + print("DEBUG: Class = %s %d\n\n" % (self.__class__, count)) + #print "Super classes:",self.__class__.__bases__ + + # If there is a message, print that too + if msg != "": + print('\t%s' % (msg)) + + class RangeInfo: + def __init__(self,rangeThreshold_nlValue, rangeBias_nlValue, + minRange_nlValue, minBias_nlValue, + maxRange_nlValue, maxBias_nlValue, increment_nlValue, + null_nlValue): + self.rangeThreshold_nlValue = rangeThreshold_nlValue + self.rangeBias_nlValue = rangeBias_nlValue + self.minRange_nlValue = minRange_nlValue + self.minBias_nlValue = minBias_nlValue + self.maxRange_nlValue = maxRange_nlValue + self.maxBias_nlValue = maxBias_nlValue + self.increment_nlValue = increment_nlValue + self.null_nlValue = null_nlValue + + def getSiteInfo(self, infoType, siteID): + # Get information about an NWS site given the 3-letter site id + # infoType can be: "region", "wfoCity", "wfoCityState", "fullStationID + return SiteInfo.SiteInfo().getInfo(infoType, siteID) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeDescriptor.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeDescriptor.py index b8a9b5df10..f06059ba54 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeDescriptor.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeDescriptor.py @@ -1,750 +1,750 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TimeDescriptor.py -# Methods for producing time descriptors. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import types -import time, string -import TimeRangeUtils -import Interfaces -import Holidays -import AbsTime -import TimeRange - -class TimeDescriptor(TimeRangeUtils.TimeRangeUtils, Interfaces.Interfaces): - def __init__(self): - Interfaces.Interfaces.__init__(self) - TimeRangeUtils.TimeRangeUtils.__init__(self) - -## def getHolidayLabel(self, tr): -## return "holiday" - - def getCurrentTime(self, argDict=None, format="%I%M %p %Z %a %b %d %Y", - shiftToLocal=1, upperCase=0, stripLeading=1): - # Return a text string of the current time in the given format - if argDict is not None and argDict.has_key("creationTime"): - ctime = argDict['creationTime'] - else: - ctime = time.time() - if shiftToLocal == 1: - curTime = time.localtime(ctime) - else: - curTime = time.gmtime(ctime) - localTime = time.localtime(ctime) - zoneName = time.strftime("%Z",localTime) - timeStr = time.strftime(format, curTime) - if shiftToLocal == 0: - timeStr = string.replace(timeStr, zoneName, "GMT") - if stripLeading==1 and (timeStr[0] == "0" or timeStr[0] == " "): - timeStr = timeStr[1:] - if argDict is None: - language = "english" - else: - language = argDict["language"] - timeStr = self.translateExpr(timeStr, language) - if upperCase == 1: - timeStr = string.upper(timeStr) - timeStr = string.replace(timeStr, " ", " ") - return timeStr - - def getIssueTime(self, argDict, upperCase=0): - # Return the issue time formatted and translated - # issueRange is set up as the first time range of the - # product. - timeRange = argDict["issueRange"] - timeStr = timeRange.startTime().string() - timeStr = self.translateExpr(timeStr,argDict["language"]) - if upperCase == 1: - timeStr = string.upper(timeStr) - return timeStr - - def getWeekday(self, timeRange, holidays=0, shiftToLocal=0, - labelType="Worded", today=0, tomorrow=0, holidayModule="Holidays", - nextDay24HourLabel=0, splitDay24HourLabel=0): - # Check for long time ranges - if timeRange.duration() > 24 * 3600: - # If splitDay24HourLabel is not set, we need to make the - # timeRanges 24 hours long so that getWeekday_descriptor - # will honor the splitDay24HourLabel setting and leave - # weekdays as simply the weekday name e.g. - # Saturday instead of Saturday and Saturday night - if splitDay24HourLabel: - # Make 12-hour start and end timeRanges - durHours1 = 12 - durHours2 = 0 - else: - # Make 24-hour start and end timeRanges - durHours1 = 24 - durHours2 = 12 - startTR = TimeRange.TimeRange(timeRange.startTime(), - timeRange.startTime() + (durHours1 * 3600)) - endTR = TimeRange.TimeRange(timeRange.endTime() - (12 * 3600), - timeRange.endTime() + (durHours2*3600)) - startDay = self.getWeekday_descriptor(startTR, holidays, shiftToLocal, - "Combo", today, tomorrow, holidayModule, nextDay24HourLabel) - endDay = self.getWeekday_descriptor(endTR, holidays, shiftToLocal, - "Capital", today, tomorrow, holidayModule, nextDay24HourLabel) - s = startDay + " through " + endDay - return s.upper() - else: # do the normal thing - return self.getWeekday_descriptor(timeRange, holidays, shiftToLocal, - labelType, today, tomorrow, holidayModule, - nextDay24HourLabel, splitDay24HourLabel) - - def getWeekday_descriptor(self, timeRange, holidays=0, shiftToLocal=0, - labelType="Worded", today=0, tomorrow=0, - holidayModule="Holidays", nextDay24HourLabel=0, - splitDay24HourLabel=0): - # Return a weekday text string - # Arguments: - # timeRange - # holidays : if 1, the holiday file will be consulted - # shiftToLocal : if 1, will shift the given time range to local time - # labelType : See Labels dictionary below for types - # today : if 1, Today, Tonight, - # will be returned instead of corresponding weekday name - # tomorrow: if 1, Tomorrow, Tomorrow Night - # will be returned instead of corresponding weekday name - # holidayModule: file containing holiday dates - # nextDay24HourLabel: if 1, a 24-hour time period starting - # after 1600, will be labeled as the next day. - # This is to accommodate 24 extended periods that go from - # 6pm-6pm. - # splitDay24HourLabel: if 0, a 24-hour period will be labeled with - # simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # - # If the time range is for today AND is less than 12 hours, - # we must accurately describe the period. - # At some point, this may need to be coordinated with TextRules - # timePeriod descriptors. - - currentLocalTime, shift = self.determineTimeShift() - if shiftToLocal == 1: - timeRange = TimeRange.TimeRange(timeRange.startTime() + shift, - timeRange.endTime() + shift) - labels = self.Labels()[labelType] - pre = labels["PrePunctuation"] - post = labels["PostPunctuation"] - todayFlag = currentLocalTime.day == timeRange.startTime().day - try: - if self._productIssuance == "Next Day": - todayFlag = currentLocalTime.day + 1 == timeRange.startTime().day - except: - pass - startHour = timeRange.startTime().hour - dayNight = self.getPeriod(timeRange) - durationHours = timeRange.duration() / 3600 - if nextDay24HourLabel: - nextDay24Hour = durationHours >= 24 and timeRange.startTime().hour > 16 - else: - nextDay24Hour = 0 - splitDay24Hour = splitDay24HourLabel and durationHours == 24 - # Do not say "Night" if: - # startHour is between midnight self.DAY (e.g. 6 am) - nightTimeFlag = durationHours <= 12 and dayNight == self.NIGHTTIME() \ - and startHour > self.DAY() - - # Check for holiday - if not splitDay24Hour and not todayFlag and holidays == 1 and \ - (dayNight == self.DAYTIME() or dayNight == self.DAYNIGHT()): - - if nextDay24Hour == 1: - label = Holidays.getHolidayLabel(timeRange.endTime()) - else: - label = Holidays.getHolidayLabel(timeRange.startTime()) - - if label != "": - if labelType == "CapitalWithPeriod": - label = label.upper() - - return pre + label + post - - # Check for today or tonight - if today == 1: - if todayFlag: - if dayNight == self.DAYNIGHT(): - # Key off of end time - keyRange = TimeRange.TimeRange( - timeRange.endTime()-3600, timeRange.endTime()) - dayNight = self.getPeriod(keyRange) - #if durationHours == 1: - # label = labels["Now"] - if durationHours < 12 and durationHours > 1: - if dayNight == self.DAYTIME(): - label = labels["Rest of Today"] - else: - label = labels["Rest of Tonight"] - elif dayNight == self.NIGHTTIME(): - label = labels["Tonight"] - else: - label = labels["Today"] - return pre + label + post - - # Check for tomorrow or tomorrow night - if tomorrow == 1: - startTime = timeRange.startTime() - 24*3600 - if startTime.day == currentLocalTime.day: - if durationHours == 1: - label = timeRange.startTime().string() + ": " - elif nightTimeFlag: - label = labels["Tomorrow"] + " " + labels["Night"] - else: - label = labels["Tomorrow"] - try: - if self._productIssuance == "Next Day": - label = "Tonight" - except: - pass - return pre + label + post - - # Week day - weekdayName = labels["Weekday"][timeRange.startTime().weekday()] - if durationHours == 1: - label = self.timeDisplay(timeRange, "Zulu", "", "", "%I %p") - if label[0] == "0": - label = label[1:] - label = label + " " + weekdayName - # Check for Night and Evening - elif nightTimeFlag: - if durationHours <= 6: - label = weekdayName + " " + labels["Evening"] - else: - label = weekdayName + " " + labels["Night"] - elif nextDay24Hour == 1: - # If we have a 24 hour time period starting late in the day, - # use the next day as the label. - label = labels["Weekday"][timeRange.endTime().weekday()] - elif splitDay24Hour: - # See if we are starting with a night or day period - weekNight = weekdayName + " " + labels["Night"] - if weekdayName[-1].isupper(): - connector = " AND " - else: - connector = " and " - - if startHour < self.NIGHT(): - # Monday and Monday Night OR - # Labor Day and Monday Night - weekDayHoliday = self.getHolidayLabel(timeRange.startTime(), holidays) - if weekDayHoliday != "": - weekdayName = weekDayHoliday - label = weekdayName + connector + weekNight - else: - # Sunday Night and Monday OR - # Sunday Night and Labor Day - nextWeekdayName = self.getHolidayLabel(timeRange.endTime(), holidays) - if nextWeekdayName == "": - nextWeekdayName = labels["Weekday"][timeRange.endTime().weekday()] - label = weekNight + connector + nextWeekdayName - else: - label = weekdayName - # Check for Evening - - return pre + label + post - - def getHolidayLabel(self, absTime, holidays): - if holidays: - return Holidays.getHolidayLabel(absTime) - else: - return "" - - def getPeriodLabel(self, range, currentLocalTime, shift, labelType, - holidays=0): - # Do a worded label, e.g. Now, Today, Tonight, Tuesday... etc. - # Compare range (in GMT time) to current local time - - label = self.getWeekday(range, holidays, 1, labelType) - return label - - def getLocalWeekday(self, timeRange): - return self.getWeekday(timeRange, holidays=1, shiftToLocal=1, - labelType="SimpleWorded", today=1, tomorrow=0) - - def getLocalWeekdayName(self, timeRange): - return self.getWeekday(timeRange, holidays=1, shiftToLocal=1, - labelType="SimpleWorded") - - def Labels(self): - return { - "SimpleWorded": { - "PrePunctuation": "", - "PostPunctuation": "", - "Weekday" : { - 6 : "Sunday", - 0 : "Monday", - 1 : "Tuesday", - 2 : "Wednesday", - 3 : "Thursday", - 4 : "Friday", - 5 : "Saturday" - }, - "Now": "now", - "Today":"today", - "Tonight": "tonight", - "Rest of Today":"the rest of today", - "Rest of Tonight": "the rest of tonight", - "Night": "night", - "Evening": "evening", - "Afternoon": "this afternoon", - }, - "Worded": { - "PrePunctuation": "", - "PostPunctuation": ": ", - "Weekday" : { - 6 : "Sunday", - 0 : "Monday", - 1 : "Tuesday", - 2 : "Wednesday", - 3 : "Thursday", - 4 : "Friday", - 5 : "Saturday" - }, - "Now": "Now", - "Today":"Today", - "Tonight": "Tonight", - "Rest of Today":"Rest of Today", - "Rest of Tonight": "Rest of Tonight", - "Night": "Night", - "Evening": "Evening", - "Afternoon": "This Afternoon", - }, - "Capital": { - "PrePunctuation": "", - "PostPunctuation": "...", - "Weekday" : { - 6 : "Sunday", - 0 : "Monday", - 1 : "Tuesday", - 2 : "Wednesday", - 3 : "Thursday", - 4 : "Friday", - 5 : "Saturday" - }, - "Now": "Now", - "Today":"Today", - "Tonight": "Tonight", - "Rest of Today":"Rest of Today", - "Rest of Tonight": "Rest of Tonight", - "Night": "Night", - "Evening": "Evening", - "Afternoon": "This Afternoon", - }, - "CapitalWithPeriod": { - "PrePunctuation": ".", - "PostPunctuation": "...", - "Weekday" : { - 6 : "SUNDAY", - 0 : "MONDAY", - 1 : "TUESDAY", - 2 : "WEDNESDAY", - 3 : "THURSDAY", - 4 : "FRIDAY", - 5 : "SATURDAY" - }, - "Now": "NOW", - "Today":"TODAY", - "Tonight": "TONIGHT", - "Rest of Today":"REST OF TODAY", - "Rest of Tonight": "REST OF TONIGHT", - "Night": "NIGHT", - "Evening": "EVENING", - "Afternoon": "THIS AFTERNOON", - }, - "Abbreviated": { - "PrePunctuation": "", - "PostPunctuation": "", - "Weekday" : { - 6 : "Sun", - 0 : "Mon", - 1 : "Tue", - 2 : "Wed", - 3 : "Thu", - 4 : "Fri", - 5 : "Sat" - }, - "Now": "now", - "Today":"today", - "Tonight": "tonight", - "Rest of Today":"rest of today", - "Rest of Tonight": "rest of tonight", - "Night": "night", - "Evening": "evening", - "Afternoon": "this afternoon", - }, - "CapsAbbreviated": { - "PrePunctuation": "", - "PostPunctuation": "", - "Weekday" : { - 6 : "Sun", - 0 : "Mon", - 1 : "Tue", - 2 : "Wed", - 3 : "Thu", - 4 : "Fri", - 5 : "Sat" - }, - "Now": "Now", - "Today":"Today", - "Tonight": "Tonight", - "Rest of Today":"Rest of today", - "Rest of Tonight": "Rest of tonight", - "Night": "Night", - "Evening": "Evening", - "Afternoon": "This afternoon", - }, - "Combo": { - "PrePunctuation": ".", - "PostPunctuation": "", - "Weekday" : { - 6 : "Sunday", - 0 : "Monday", - 1 : "Tuesday", - 2 : "Wednesday", - 3 : "Thursday", - 4 : "Friday", - 5 : "Saturday" - }, - "Now": "Now", - "Today":"Today", - "Tonight": "Tonight", - "Rest of Today":"Rest of Today", - "Rest of Tonight": "Rest of Tonight", - "Night": "Night", - "Evening": "Evening", - "Afternoon": "This Afternoon", - } - } - - def timePeriod_descriptor_list(self, tree, node): - # Contains definition for localtime start/end times and phrase - # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase - day = self.DAY() - return [ - (day, (day+3)%24, "early in the morning"), # 6a-9a - (day, (day+6)%24, "in the morning"), # 6a-noon - (day, (day+9)%24, "until late afternoon"), # 6a-3p - (day, (day+12)%24, ""), # 6a-6p - (day, (day+15)%24, "until early evening"), # 6a-9p - (day, (day+18)%24, "through the evening"), # 6a-midnite - - ((day+2)%24, (day+3)%24, "early in the morning"), # 8a-9a - - ((day+3)%24, (day+6)%24, "late in the morning"), # 9a-noon - ((day+3)%24, (day+9)%24, "in the late morning and early afternoon"), # 9a-3p - ((day+3)%24, (day+12)%24, "in the late morning and afternoon"), # 9a-6p - ((day+3)%24, (day+15)%24, "until early evening"), # 9a-9p - ((day+3)%24, (day+18)%24, "through the evening"), # 9a-midnite - - ((day+5)%24, (day+6)%24, "late in the morning"), # 11a-noon - - ((day+6)%24, (day+9)%24, "early in the afternoon"), # noon-3p - ((day+6)%24, (day+12)%24, "in the afternoon"), # noon-6p - ((day+6)%24, (day+15)%24, "in the afternoon and evening"),# noon-9p - ((day+6)%24, (day+18)%24, "in the afternoon and evening"),# noon-midnite - - ((day+8)%24, (day+9)%24, "early in the afternoon"), # 2pm-3pm - - ((day+9)%24, (day+12)%24, self.lateDay_descriptor), # 3p-6p - ((day+9)%24, (day+15)%24, "early in the evening"), # 3p-9p - ((day+9)%24, (day+18)%24, "in the evening"), # 3p-midnite - ((day+9)%24, (day+21)%24, "until early morning"), # 3p-3a - ((day+9)%24, day, ""), # 3p-6a - - ((day+11)%24, (day+12)%24, self.lateDay_descriptor), # 5p-6p - - ((day+12)%24, (day+15)%24, "early in the evening"), # 6p-9p - ((day+12)%24, (day+18)%24, "in the evening"), # 6p-midnite - ((day+12)%24, (day+21)%24, "until early morning"), # 6p-3a - ((day+12)%24, day, ""), # 6p-6a - - ((day+14)%24, (day+15)%24, "early in the evening"), # 8p-9p - - ((day+15)%24, (day+18)%24, "late in the evening"), # 9p-midnite - ((day+15)%24, (day+21)%24, "in the late evening and early morning"),# 9p-3a - ((day+15)%24, day, "in the late evening and overnight"), # 9p-6a - - ((day+17)%24, (day+18)%24, "late in the evening"), # 11p-midnight - - ((day+18)%24, (day+21)%24, "after midnight"), # midnite-3a - ((day+18)%24, day, "after midnight"), # midnite-6a - ((day+18)%24, (day+6)%24, ""), # midnite-noon - - ((day+20)%24, (day+21)%24, "after midnight"), # 2a-3a - - ((day+21)%24, day, self.lateNight_descriptor), # 3a-6a - ((day+21)%24, (day+3)%24, "early in the morning"), # 3a-9a - ((day+21)%24, (day+6)%24, "early in the morning"), # 3a-noon - ((day+21)%24, (day+9)%24, "until afternoon"), # 3a-3p - ((day+21)%24, (day+12)%24, ""), # 3a-6p - - ((day+23)%24, (day)%24, self.lateNight_descriptor), # 5a-6a - - ] - - def lateDay_descriptor(self, tree, node, timeRange): - return "late in the afternoon" - - def lateNight_descriptor(self, tree, node, timeRange): - return "early in the morning" - - def include_timePeriod_descriptor_flag( - self, tree, node, statsByRange, index, element): - # For "range" phrases, tells whether or not to include the time_descriptor - # i.e "North winds 10-15 knots becoming 20-30 knots in the afternoon." - # "index" tells which sub-phrase of the phrase we're qualifying, and the default - # is to qualify every other sub-phrase. - # - # Default only applies to Wind and reports "every other" sub-phrase - if element != "Wind": - return 1 - flag = 0 - odd = len(statsByRange)%2 - # If there is an odd number of sub-phrases, - # report on 1st sub-phrase and then every other one - if odd == 1: - if index%2 == 0: - flag = 1 - # If there is an even number of sub-phrases, - # report on 2nd sub-phrase and then every other one - elif index%2 == 1: - flag = 1 - return flag - - # Time period Table Access - def timePeriod_descriptor(self, tree, node, timeRange): - # Returns a descriptor phrase for the time range. - # Assumes the timeRange is in GMT and converts it to Local time. - - # more than 12 hours, return empty string - if timeRange.duration() > 12*3600: - return "" - if timeRange == node.getTimeRange(): - return "" - - # determine the local time - localTime, shift = self.determineTimeShift() - periodStart = timeRange.startTime() + shift - periodEnd = timeRange.endTime() + shift - startHour = periodStart.hour - endHour = periodEnd.hour - - # get the table - table = self.timePeriod_descriptor_list(tree, node) - - # look for the best match entry, start with the startTime match - bestIndexes = [] - bestTime = 9999 - for i in xrange(len(table)): - diff = self.hourDiff(startHour, table[i][0]) - if diff < bestTime: - bestTime = diff - for i in xrange(len(table)): - diff = self.hourDiff(startHour, table[i][0]) - if diff == bestTime: - bestIndexes.append(table[i]) - - # if nothing found, return "" string - if len(bestIndexes) == 0: - return "" - - # now find the best match for the ending time, from the ones earlier - bestTime = 9999 - returnValue = '' - for i in xrange(len(bestIndexes)): - diff = self.hourDiff(endHour, bestIndexes[i][1]) - if diff < bestTime: - returnValue = bestIndexes[i][2] - bestTime = diff - - if type(returnValue) is types.MethodType: - return returnValue(tree, node, timeRange) - else: - return returnValue - - def hourDiff(self, h1, h2): - # returns the number of hours difference - h = abs(h1-h2) - if h > 12: - h = 24 - h - return h - - def dayDict(self): - return {0:"SUN",1:"MON",2:"TUE",3:"WED",4:"THU",5:"FRI",6:"SAT"} - def monthDict(self): - return {1:"JAN",2:"FEB",3:"MAR",4:"APR",5:"MAY",6:"JUN", - 7:"JUL", 8:"AUG",9:"SEP",10:"OCT",11:"NOV",12:"DEC"} - def monthDict1(self): - return {1:"Jan",2:"Feb",3:"Mar",4:"Apr",5:"May",6:"Jun",7:"Jul", - 8:"Aug",9:"Sep",10:"Oct",11:"Nov",12:"Dec"} - - #******************** - # Methods for formatting TimeRange labels - - def timeRangeLabel(self, timeRange): - # Return the label for the given time range - start = timeRange.startTime() - end = timeRange.endTime() - return start.string() + " - " + end.string() - - def hourAmPm(self, hour): - # Given a military time hour, return - # Non-military time plus AM or PM - if hour == 0: - hour = 12 - ampm = " AM" - elif hour == 12: - ampm = " PM" - elif hour < 12: - ampm = " AM" - else: - hour = hour - 12 - ampm = " PM" - return hour, ampm - - def periodLabel(self, timeRange): - # Return the label for the given time range - start = timeRange.startTime() - hourStr = `start.hour`+"Z/"+`start.day` - return hourStr - - def localTimeRangeLabel(self, timeRange): - # Return the label for the given time range - localTime, shift = self.determineTimeShift() - localStart = timeRange.startTime() + shift - localEnd = timeRange.endTime() + shift - localRange = TimeRange.TimeRange(localStart, localEnd) - - # Determine name of time zone (e.g. MST, MDT...) - localTime = time.localtime(time.time()) - zoneName = time.strftime("%Z",localTime) - - # Create label - localStr = self.timeRangeLabel(localRange) - localStr = string.replace(localStr, "GMT", zoneName) - return localStr - - def localOneHourTRLabel(self, timeRange): - # Return label in form: Month Day, Year hour - hour - # E.g. Sept 30, 2000 1 AM - 2 AM - localTime, shift = self.determineTimeShift() - start = timeRange.startTime() + shift - localTime = time.localtime(time.time()) - zoneName = time.strftime("%Z",localTime) - monthDict = self.monthDict() - str = monthDict[start.month] + " " + `start.day` + ", " + `start.year` - hour1, ampm1 = self.hourAmPm(start.hour) - hour2, ampm2 = self.hourAmPm(start.hour+1) - return str + " " + `hour1` + ampm1 + " " + zoneName + " - " + \ - `hour2` + ampm2 + " " + zoneName - - def localTRLabel(self, timeRange): - # Return label in form - # E.g. Sept 30, 1 AM MST TO Oct 1, 2 AM MST - localTime, shift = self.determineTimeShift() - start = timeRange.startTime() + shift - end = timeRange.endTime() + shift - localTime = time.localtime(time.time()) - zoneName = time.strftime("%Z",localTime) - hour1, ampm1 = self.hourAmPm(start.hour) - hour2, ampm2 = self.hourAmPm(end.hour) - monthDict1 = self.monthDict1() - str1 = `hour1` + ampm1 + " " + zoneName + " " + monthDict1[start.month] + " " + `start.day` - str2 = `hour2` + ampm2 + " " + zoneName + " " + monthDict1[end.month] + " " + `end.day` - return str1 + " TO " + str2 - - def localPeriodLabel(self, timeRange): - # Return the label for the given time range - - # Adjust time from GMT to local - localTime, shift = self.determineTimeShift() - start = timeRange.startTime() + shift - - # Determine name of time zone (e.g. MST, MDT...) - localTime = time.localtime(time.time()) - zoneName = time.strftime("%Z",localTime) - - # Create label - hourStr = `start.hour`+ " " + zoneName + "/"+`start.day` - return hourStr - - def localTimeLabel(self, timeRange): - # Return a label of length 6 the form: hour AM/PM - # E.g. 5 PM - return self.localHourLabel(timeRange.startTime(),6) - - def localRangeLabel(self, timeRange): - # Return label of range: 5AM-6PM - label1 = self.localHourLabel(timeRange.startTime(),4) - label2 = self.localHourLabel(timeRange.endTime(),4) - str = label1 + "-" + label2 - return string.replace(str," ","") - - def localHourLabel(self, absTime, length=6): - # Convert from gmt to local time - localTime, shift = self.determineTimeShift() - start = absTime + shift - hour = start.hour - hour, ampm = self.hourAmPm(hour) - label = `hour` + ampm - return string.rjust(label,length) - - def strToGMT(self, timeStr): - # Convert a time str in the form of local time - # hours and minutes to a GMT AbsTime for the current day. - # The time string must be 6 or 7 characters long - # E.g. "1030 AM" - # "800 PM" - - # Get the hour and minutes - length = len(timeStr) - amPm = timeStr[length-2:length] - minutes = timeStr[length-5:length-3] - minutes = int(minutes) - if length > 6: - hour = timeStr[0:2] - else: - hour = timeStr[0] - hour = int(hour) - if hour == 12: - if amPm == "AM": - hour = 0 - else: - if amPm == "PM": - hour = hour + 12 - # Make a local AbsTime - curLocalTime, shift = self.determineTimeShift() - newTime = AbsTime.absTimeYMD(curLocalTime.year, curLocalTime.month, - curLocalTime.day, hour, minutes) - # Convert to GMT - return newTime - shift +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TimeDescriptor.py +# Methods for producing time descriptors. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import types +import time, string +import TimeRangeUtils +import Interfaces +import Holidays +import AbsTime +import TimeRange + +class TimeDescriptor(TimeRangeUtils.TimeRangeUtils, Interfaces.Interfaces): + def __init__(self): + Interfaces.Interfaces.__init__(self) + TimeRangeUtils.TimeRangeUtils.__init__(self) + +## def getHolidayLabel(self, tr): +## return "holiday" + + def getCurrentTime(self, argDict=None, format="%I%M %p %Z %a %b %d %Y", + shiftToLocal=1, upperCase=0, stripLeading=1): + # Return a text string of the current time in the given format + if argDict is not None and "creationTime" in argDict: + ctime = argDict['creationTime'] + else: + ctime = time.time() + if shiftToLocal == 1: + curTime = time.localtime(ctime) + else: + curTime = time.gmtime(ctime) + localTime = time.localtime(ctime) + zoneName = time.strftime("%Z",localTime) + timeStr = time.strftime(format, curTime) + if shiftToLocal == 0: + timeStr = string.replace(timeStr, zoneName, "GMT") + if stripLeading==1 and (timeStr[0] == "0" or timeStr[0] == " "): + timeStr = timeStr[1:] + if argDict is None: + language = "english" + else: + language = argDict["language"] + timeStr = self.translateExpr(timeStr, language) + if upperCase == 1: + timeStr = string.upper(timeStr) + timeStr = string.replace(timeStr, " ", " ") + return timeStr + + def getIssueTime(self, argDict, upperCase=0): + # Return the issue time formatted and translated + # issueRange is set up as the first time range of the + # product. + timeRange = argDict["issueRange"] + timeStr = timeRange.startTime().string() + timeStr = self.translateExpr(timeStr,argDict["language"]) + if upperCase == 1: + timeStr = string.upper(timeStr) + return timeStr + + def getWeekday(self, timeRange, holidays=0, shiftToLocal=0, + labelType="Worded", today=0, tomorrow=0, holidayModule="Holidays", + nextDay24HourLabel=0, splitDay24HourLabel=0): + # Check for long time ranges + if timeRange.duration() > 24 * 3600: + # If splitDay24HourLabel is not set, we need to make the + # timeRanges 24 hours long so that getWeekday_descriptor + # will honor the splitDay24HourLabel setting and leave + # weekdays as simply the weekday name e.g. + # Saturday instead of Saturday and Saturday night + if splitDay24HourLabel: + # Make 12-hour start and end timeRanges + durHours1 = 12 + durHours2 = 0 + else: + # Make 24-hour start and end timeRanges + durHours1 = 24 + durHours2 = 12 + startTR = TimeRange.TimeRange(timeRange.startTime(), + timeRange.startTime() + (durHours1 * 3600)) + endTR = TimeRange.TimeRange(timeRange.endTime() - (12 * 3600), + timeRange.endTime() + (durHours2*3600)) + startDay = self.getWeekday_descriptor(startTR, holidays, shiftToLocal, + "Combo", today, tomorrow, holidayModule, nextDay24HourLabel) + endDay = self.getWeekday_descriptor(endTR, holidays, shiftToLocal, + "Capital", today, tomorrow, holidayModule, nextDay24HourLabel) + s = startDay + " through " + endDay + return s.upper() + else: # do the normal thing + return self.getWeekday_descriptor(timeRange, holidays, shiftToLocal, + labelType, today, tomorrow, holidayModule, + nextDay24HourLabel, splitDay24HourLabel) + + def getWeekday_descriptor(self, timeRange, holidays=0, shiftToLocal=0, + labelType="Worded", today=0, tomorrow=0, + holidayModule="Holidays", nextDay24HourLabel=0, + splitDay24HourLabel=0): + # Return a weekday text string + # Arguments: + # timeRange + # holidays : if 1, the holiday file will be consulted + # shiftToLocal : if 1, will shift the given time range to local time + # labelType : See Labels dictionary below for types + # today : if 1, Today, Tonight, + # will be returned instead of corresponding weekday name + # tomorrow: if 1, Tomorrow, Tomorrow Night + # will be returned instead of corresponding weekday name + # holidayModule: file containing holiday dates + # nextDay24HourLabel: if 1, a 24-hour time period starting + # after 1600, will be labeled as the next day. + # This is to accommodate 24 extended periods that go from + # 6pm-6pm. + # splitDay24HourLabel: if 0, a 24-hour period will be labeled with + # simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # + # If the time range is for today AND is less than 12 hours, + # we must accurately describe the period. + # At some point, this may need to be coordinated with TextRules + # timePeriod descriptors. + + currentLocalTime, shift = self.determineTimeShift() + if shiftToLocal == 1: + timeRange = TimeRange.TimeRange(timeRange.startTime() + shift, + timeRange.endTime() + shift) + labels = self.Labels()[labelType] + pre = labels["PrePunctuation"] + post = labels["PostPunctuation"] + todayFlag = currentLocalTime.day == timeRange.startTime().day + try: + if self._productIssuance == "Next Day": + todayFlag = currentLocalTime.day + 1 == timeRange.startTime().day + except: + pass + startHour = timeRange.startTime().hour + dayNight = self.getPeriod(timeRange) + durationHours = timeRange.duration() / 3600 + if nextDay24HourLabel: + nextDay24Hour = durationHours >= 24 and timeRange.startTime().hour > 16 + else: + nextDay24Hour = 0 + splitDay24Hour = splitDay24HourLabel and durationHours == 24 + # Do not say "Night" if: + # startHour is between midnight self.DAY (e.g. 6 am) + nightTimeFlag = durationHours <= 12 and dayNight == self.NIGHTTIME() \ + and startHour > self.DAY() + + # Check for holiday + if not splitDay24Hour and not todayFlag and holidays == 1 and \ + (dayNight == self.DAYTIME() or dayNight == self.DAYNIGHT()): + + if nextDay24Hour == 1: + label = Holidays.getHolidayLabel(timeRange.endTime()) + else: + label = Holidays.getHolidayLabel(timeRange.startTime()) + + if label != "": + if labelType == "CapitalWithPeriod": + label = label.upper() + + return pre + label + post + + # Check for today or tonight + if today == 1: + if todayFlag: + if dayNight == self.DAYNIGHT(): + # Key off of end time + keyRange = TimeRange.TimeRange( + timeRange.endTime()-3600, timeRange.endTime()) + dayNight = self.getPeriod(keyRange) + #if durationHours == 1: + # label = labels["Now"] + if durationHours < 12 and durationHours > 1: + if dayNight == self.DAYTIME(): + label = labels["Rest of Today"] + else: + label = labels["Rest of Tonight"] + elif dayNight == self.NIGHTTIME(): + label = labels["Tonight"] + else: + label = labels["Today"] + return pre + label + post + + # Check for tomorrow or tomorrow night + if tomorrow == 1: + startTime = timeRange.startTime() - 24*3600 + if startTime.day == currentLocalTime.day: + if durationHours == 1: + label = timeRange.startTime().string() + ": " + elif nightTimeFlag: + label = labels["Tomorrow"] + " " + labels["Night"] + else: + label = labels["Tomorrow"] + try: + if self._productIssuance == "Next Day": + label = "Tonight" + except: + pass + return pre + label + post + + # Week day + weekdayName = labels["Weekday"][timeRange.startTime().weekday()] + if durationHours == 1: + label = self.timeDisplay(timeRange, "Zulu", "", "", "%I %p") + if label[0] == "0": + label = label[1:] + label = label + " " + weekdayName + # Check for Night and Evening + elif nightTimeFlag: + if durationHours <= 6: + label = weekdayName + " " + labels["Evening"] + else: + label = weekdayName + " " + labels["Night"] + elif nextDay24Hour == 1: + # If we have a 24 hour time period starting late in the day, + # use the next day as the label. + label = labels["Weekday"][timeRange.endTime().weekday()] + elif splitDay24Hour: + # See if we are starting with a night or day period + weekNight = weekdayName + " " + labels["Night"] + if weekdayName[-1].isupper(): + connector = " AND " + else: + connector = " and " + + if startHour < self.NIGHT(): + # Monday and Monday Night OR + # Labor Day and Monday Night + weekDayHoliday = self.getHolidayLabel(timeRange.startTime(), holidays) + if weekDayHoliday != "": + weekdayName = weekDayHoliday + label = weekdayName + connector + weekNight + else: + # Sunday Night and Monday OR + # Sunday Night and Labor Day + nextWeekdayName = self.getHolidayLabel(timeRange.endTime(), holidays) + if nextWeekdayName == "": + nextWeekdayName = labels["Weekday"][timeRange.endTime().weekday()] + label = weekNight + connector + nextWeekdayName + else: + label = weekdayName + # Check for Evening + + return pre + label + post + + def getHolidayLabel(self, absTime, holidays): + if holidays: + return Holidays.getHolidayLabel(absTime) + else: + return "" + + def getPeriodLabel(self, range, currentLocalTime, shift, labelType, + holidays=0): + # Do a worded label, e.g. Now, Today, Tonight, Tuesday... etc. + # Compare range (in GMT time) to current local time + + label = self.getWeekday(range, holidays, 1, labelType) + return label + + def getLocalWeekday(self, timeRange): + return self.getWeekday(timeRange, holidays=1, shiftToLocal=1, + labelType="SimpleWorded", today=1, tomorrow=0) + + def getLocalWeekdayName(self, timeRange): + return self.getWeekday(timeRange, holidays=1, shiftToLocal=1, + labelType="SimpleWorded") + + def Labels(self): + return { + "SimpleWorded": { + "PrePunctuation": "", + "PostPunctuation": "", + "Weekday" : { + 6 : "Sunday", + 0 : "Monday", + 1 : "Tuesday", + 2 : "Wednesday", + 3 : "Thursday", + 4 : "Friday", + 5 : "Saturday" + }, + "Now": "now", + "Today":"today", + "Tonight": "tonight", + "Rest of Today":"the rest of today", + "Rest of Tonight": "the rest of tonight", + "Night": "night", + "Evening": "evening", + "Afternoon": "this afternoon", + }, + "Worded": { + "PrePunctuation": "", + "PostPunctuation": ": ", + "Weekday" : { + 6 : "Sunday", + 0 : "Monday", + 1 : "Tuesday", + 2 : "Wednesday", + 3 : "Thursday", + 4 : "Friday", + 5 : "Saturday" + }, + "Now": "Now", + "Today":"Today", + "Tonight": "Tonight", + "Rest of Today":"Rest of Today", + "Rest of Tonight": "Rest of Tonight", + "Night": "Night", + "Evening": "Evening", + "Afternoon": "This Afternoon", + }, + "Capital": { + "PrePunctuation": "", + "PostPunctuation": "...", + "Weekday" : { + 6 : "Sunday", + 0 : "Monday", + 1 : "Tuesday", + 2 : "Wednesday", + 3 : "Thursday", + 4 : "Friday", + 5 : "Saturday" + }, + "Now": "Now", + "Today":"Today", + "Tonight": "Tonight", + "Rest of Today":"Rest of Today", + "Rest of Tonight": "Rest of Tonight", + "Night": "Night", + "Evening": "Evening", + "Afternoon": "This Afternoon", + }, + "CapitalWithPeriod": { + "PrePunctuation": ".", + "PostPunctuation": "...", + "Weekday" : { + 6 : "SUNDAY", + 0 : "MONDAY", + 1 : "TUESDAY", + 2 : "WEDNESDAY", + 3 : "THURSDAY", + 4 : "FRIDAY", + 5 : "SATURDAY" + }, + "Now": "NOW", + "Today":"TODAY", + "Tonight": "TONIGHT", + "Rest of Today":"REST OF TODAY", + "Rest of Tonight": "REST OF TONIGHT", + "Night": "NIGHT", + "Evening": "EVENING", + "Afternoon": "THIS AFTERNOON", + }, + "Abbreviated": { + "PrePunctuation": "", + "PostPunctuation": "", + "Weekday" : { + 6 : "Sun", + 0 : "Mon", + 1 : "Tue", + 2 : "Wed", + 3 : "Thu", + 4 : "Fri", + 5 : "Sat" + }, + "Now": "now", + "Today":"today", + "Tonight": "tonight", + "Rest of Today":"rest of today", + "Rest of Tonight": "rest of tonight", + "Night": "night", + "Evening": "evening", + "Afternoon": "this afternoon", + }, + "CapsAbbreviated": { + "PrePunctuation": "", + "PostPunctuation": "", + "Weekday" : { + 6 : "Sun", + 0 : "Mon", + 1 : "Tue", + 2 : "Wed", + 3 : "Thu", + 4 : "Fri", + 5 : "Sat" + }, + "Now": "Now", + "Today":"Today", + "Tonight": "Tonight", + "Rest of Today":"Rest of today", + "Rest of Tonight": "Rest of tonight", + "Night": "Night", + "Evening": "Evening", + "Afternoon": "This afternoon", + }, + "Combo": { + "PrePunctuation": ".", + "PostPunctuation": "", + "Weekday" : { + 6 : "Sunday", + 0 : "Monday", + 1 : "Tuesday", + 2 : "Wednesday", + 3 : "Thursday", + 4 : "Friday", + 5 : "Saturday" + }, + "Now": "Now", + "Today":"Today", + "Tonight": "Tonight", + "Rest of Today":"Rest of Today", + "Rest of Tonight": "Rest of Tonight", + "Night": "Night", + "Evening": "Evening", + "Afternoon": "This Afternoon", + } + } + + def timePeriod_descriptor_list(self, tree, node): + # Contains definition for localtime start/end times and phrase + # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase + day = self.DAY() + return [ + (day, (day+3)%24, "early in the morning"), # 6a-9a + (day, (day+6)%24, "in the morning"), # 6a-noon + (day, (day+9)%24, "until late afternoon"), # 6a-3p + (day, (day+12)%24, ""), # 6a-6p + (day, (day+15)%24, "until early evening"), # 6a-9p + (day, (day+18)%24, "through the evening"), # 6a-midnite + + ((day+2)%24, (day+3)%24, "early in the morning"), # 8a-9a + + ((day+3)%24, (day+6)%24, "late in the morning"), # 9a-noon + ((day+3)%24, (day+9)%24, "in the late morning and early afternoon"), # 9a-3p + ((day+3)%24, (day+12)%24, "in the late morning and afternoon"), # 9a-6p + ((day+3)%24, (day+15)%24, "until early evening"), # 9a-9p + ((day+3)%24, (day+18)%24, "through the evening"), # 9a-midnite + + ((day+5)%24, (day+6)%24, "late in the morning"), # 11a-noon + + ((day+6)%24, (day+9)%24, "early in the afternoon"), # noon-3p + ((day+6)%24, (day+12)%24, "in the afternoon"), # noon-6p + ((day+6)%24, (day+15)%24, "in the afternoon and evening"),# noon-9p + ((day+6)%24, (day+18)%24, "in the afternoon and evening"),# noon-midnite + + ((day+8)%24, (day+9)%24, "early in the afternoon"), # 2pm-3pm + + ((day+9)%24, (day+12)%24, self.lateDay_descriptor), # 3p-6p + ((day+9)%24, (day+15)%24, "early in the evening"), # 3p-9p + ((day+9)%24, (day+18)%24, "in the evening"), # 3p-midnite + ((day+9)%24, (day+21)%24, "until early morning"), # 3p-3a + ((day+9)%24, day, ""), # 3p-6a + + ((day+11)%24, (day+12)%24, self.lateDay_descriptor), # 5p-6p + + ((day+12)%24, (day+15)%24, "early in the evening"), # 6p-9p + ((day+12)%24, (day+18)%24, "in the evening"), # 6p-midnite + ((day+12)%24, (day+21)%24, "until early morning"), # 6p-3a + ((day+12)%24, day, ""), # 6p-6a + + ((day+14)%24, (day+15)%24, "early in the evening"), # 8p-9p + + ((day+15)%24, (day+18)%24, "late in the evening"), # 9p-midnite + ((day+15)%24, (day+21)%24, "in the late evening and early morning"),# 9p-3a + ((day+15)%24, day, "in the late evening and overnight"), # 9p-6a + + ((day+17)%24, (day+18)%24, "late in the evening"), # 11p-midnight + + ((day+18)%24, (day+21)%24, "after midnight"), # midnite-3a + ((day+18)%24, day, "after midnight"), # midnite-6a + ((day+18)%24, (day+6)%24, ""), # midnite-noon + + ((day+20)%24, (day+21)%24, "after midnight"), # 2a-3a + + ((day+21)%24, day, self.lateNight_descriptor), # 3a-6a + ((day+21)%24, (day+3)%24, "early in the morning"), # 3a-9a + ((day+21)%24, (day+6)%24, "early in the morning"), # 3a-noon + ((day+21)%24, (day+9)%24, "until afternoon"), # 3a-3p + ((day+21)%24, (day+12)%24, ""), # 3a-6p + + ((day+23)%24, (day)%24, self.lateNight_descriptor), # 5a-6a + + ] + + def lateDay_descriptor(self, tree, node, timeRange): + return "late in the afternoon" + + def lateNight_descriptor(self, tree, node, timeRange): + return "early in the morning" + + def include_timePeriod_descriptor_flag( + self, tree, node, statsByRange, index, element): + # For "range" phrases, tells whether or not to include the time_descriptor + # i.e "North winds 10-15 knots becoming 20-30 knots in the afternoon." + # "index" tells which sub-phrase of the phrase we're qualifying, and the default + # is to qualify every other sub-phrase. + # + # Default only applies to Wind and reports "every other" sub-phrase + if element != "Wind": + return 1 + flag = 0 + odd = len(statsByRange)%2 + # If there is an odd number of sub-phrases, + # report on 1st sub-phrase and then every other one + if odd == 1: + if index%2 == 0: + flag = 1 + # If there is an even number of sub-phrases, + # report on 2nd sub-phrase and then every other one + elif index%2 == 1: + flag = 1 + return flag + + # Time period Table Access + def timePeriod_descriptor(self, tree, node, timeRange): + # Returns a descriptor phrase for the time range. + # Assumes the timeRange is in GMT and converts it to Local time. + + # more than 12 hours, return empty string + if timeRange.duration() > 12*3600: + return "" + if timeRange == node.getTimeRange(): + return "" + + # determine the local time + localTime, shift = self.determineTimeShift() + periodStart = timeRange.startTime() + shift + periodEnd = timeRange.endTime() + shift + startHour = periodStart.hour + endHour = periodEnd.hour + + # get the table + table = self.timePeriod_descriptor_list(tree, node) + + # look for the best match entry, start with the startTime match + bestIndexes = [] + bestTime = 9999 + for i in range(len(table)): + diff = self.hourDiff(startHour, table[i][0]) + if diff < bestTime: + bestTime = diff + for i in range(len(table)): + diff = self.hourDiff(startHour, table[i][0]) + if diff == bestTime: + bestIndexes.append(table[i]) + + # if nothing found, return "" string + if len(bestIndexes) == 0: + return "" + + # now find the best match for the ending time, from the ones earlier + bestTime = 9999 + returnValue = '' + for i in range(len(bestIndexes)): + diff = self.hourDiff(endHour, bestIndexes[i][1]) + if diff < bestTime: + returnValue = bestIndexes[i][2] + bestTime = diff + + if type(returnValue) is types.MethodType: + return returnValue(tree, node, timeRange) + else: + return returnValue + + def hourDiff(self, h1, h2): + # returns the number of hours difference + h = abs(h1-h2) + if h > 12: + h = 24 - h + return h + + def dayDict(self): + return {0:"SUN",1:"MON",2:"TUE",3:"WED",4:"THU",5:"FRI",6:"SAT"} + def monthDict(self): + return {1:"JAN",2:"FEB",3:"MAR",4:"APR",5:"MAY",6:"JUN", + 7:"JUL", 8:"AUG",9:"SEP",10:"OCT",11:"NOV",12:"DEC"} + def monthDict1(self): + return {1:"Jan",2:"Feb",3:"Mar",4:"Apr",5:"May",6:"Jun",7:"Jul", + 8:"Aug",9:"Sep",10:"Oct",11:"Nov",12:"Dec"} + + #******************** + # Methods for formatting TimeRange labels + + def timeRangeLabel(self, timeRange): + # Return the label for the given time range + start = timeRange.startTime() + end = timeRange.endTime() + return start.string() + " - " + end.string() + + def hourAmPm(self, hour): + # Given a military time hour, return + # Non-military time plus AM or PM + if hour == 0: + hour = 12 + ampm = " AM" + elif hour == 12: + ampm = " PM" + elif hour < 12: + ampm = " AM" + else: + hour = hour - 12 + ampm = " PM" + return hour, ampm + + def periodLabel(self, timeRange): + # Return the label for the given time range + start = timeRange.startTime() + hourStr = repr(start.hour)+"Z/"+repr(start.day) + return hourStr + + def localTimeRangeLabel(self, timeRange): + # Return the label for the given time range + localTime, shift = self.determineTimeShift() + localStart = timeRange.startTime() + shift + localEnd = timeRange.endTime() + shift + localRange = TimeRange.TimeRange(localStart, localEnd) + + # Determine name of time zone (e.g. MST, MDT...) + localTime = time.localtime(time.time()) + zoneName = time.strftime("%Z",localTime) + + # Create label + localStr = self.timeRangeLabel(localRange) + localStr = string.replace(localStr, "GMT", zoneName) + return localStr + + def localOneHourTRLabel(self, timeRange): + # Return label in form: Month Day, Year hour - hour + # E.g. Sept 30, 2000 1 AM - 2 AM + localTime, shift = self.determineTimeShift() + start = timeRange.startTime() + shift + localTime = time.localtime(time.time()) + zoneName = time.strftime("%Z",localTime) + monthDict = self.monthDict() + str = monthDict[start.month] + " " + repr(start.day) + ", " + repr(start.year) + hour1, ampm1 = self.hourAmPm(start.hour) + hour2, ampm2 = self.hourAmPm(start.hour+1) + return str + " " + repr(hour1) + ampm1 + " " + zoneName + " - " + \ + repr(hour2) + ampm2 + " " + zoneName + + def localTRLabel(self, timeRange): + # Return label in form + # E.g. Sept 30, 1 AM MST TO Oct 1, 2 AM MST + localTime, shift = self.determineTimeShift() + start = timeRange.startTime() + shift + end = timeRange.endTime() + shift + localTime = time.localtime(time.time()) + zoneName = time.strftime("%Z",localTime) + hour1, ampm1 = self.hourAmPm(start.hour) + hour2, ampm2 = self.hourAmPm(end.hour) + monthDict1 = self.monthDict1() + str1 = repr(hour1) + ampm1 + " " + zoneName + " " + monthDict1[start.month] + " " + repr(start.day) + str2 = repr(hour2) + ampm2 + " " + zoneName + " " + monthDict1[end.month] + " " + repr(end.day) + return str1 + " TO " + str2 + + def localPeriodLabel(self, timeRange): + # Return the label for the given time range + + # Adjust time from GMT to local + localTime, shift = self.determineTimeShift() + start = timeRange.startTime() + shift + + # Determine name of time zone (e.g. MST, MDT...) + localTime = time.localtime(time.time()) + zoneName = time.strftime("%Z",localTime) + + # Create label + hourStr = repr(start.hour)+ " " + zoneName + "/"+repr(start.day) + return hourStr + + def localTimeLabel(self, timeRange): + # Return a label of length 6 the form: hour AM/PM + # E.g. 5 PM + return self.localHourLabel(timeRange.startTime(),6) + + def localRangeLabel(self, timeRange): + # Return label of range: 5AM-6PM + label1 = self.localHourLabel(timeRange.startTime(),4) + label2 = self.localHourLabel(timeRange.endTime(),4) + str = label1 + "-" + label2 + return string.replace(str," ","") + + def localHourLabel(self, absTime, length=6): + # Convert from gmt to local time + localTime, shift = self.determineTimeShift() + start = absTime + shift + hour = start.hour + hour, ampm = self.hourAmPm(hour) + label = repr(hour) + ampm + return string.rjust(label,length) + + def strToGMT(self, timeStr): + # Convert a time str in the form of local time + # hours and minutes to a GMT AbsTime for the current day. + # The time string must be 6 or 7 characters long + # E.g. "1030 AM" + # "800 PM" + + # Get the hour and minutes + length = len(timeStr) + amPm = timeStr[length-2:length] + minutes = timeStr[length-5:length-3] + minutes = int(minutes) + if length > 6: + hour = timeStr[0:2] + else: + hour = timeStr[0] + hour = int(hour) + if hour == 12: + if amPm == "AM": + hour = 0 + else: + if amPm == "PM": + hour = hour + 12 + # Make a local AbsTime + curLocalTime, shift = self.determineTimeShift() + newTime = AbsTime.absTimeYMD(curLocalTime.year, curLocalTime.month, + curLocalTime.day, hour, minutes) + # Convert to GMT + return newTime - shift diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeRangeUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeRangeUtils.py index 5ebec56ef7..5a25aa0a86 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeRangeUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TimeRangeUtils.py @@ -1,271 +1,271 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TimeRangeUtils.py -# Utilities for dealing with Time Ranges in Text Products. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import time, string, types, os -import TextUtils -import AbsTime, TimeRange - -class TimeRangeUtils(TextUtils.TextUtils): - def __init__(self): - TextUtils.TextUtils.__init__(self) - - def makeTimeRange(self, startTime, endTime): - try: - return TimeRange.TimeRange(startTime, endTime) - except: - startTime = AbsTime.AbsTime(int(startTime)) - endTime = AbsTime.AbsTime(int(endTime)) - return TimeRange.TimeRange(startTime, endTime) - - def createTimeRange(self, startHour, endHour, mode="LT"): - # Returns an TimeRange.TimeRange object given by: - # startHour, endHour - # (range is startHour up to and not including endHour) - # startHour and endHour are relative to midnight of the - # current day either in Local or Zulu time (see below) - # mode can be: - # "LT" : the startHour and endHour are relative to midnight local time - # "Zulu": relative to 0Z Zulu time - # - # E.g. - # timeRange = self.createTimeRange(0, 121, "Zulu") - - if mode == "LT": - localTime, shift = self.determineTimeShift() - today = AbsTime.absTimeYMD(localTime.year, localTime.month, - localTime.day, 0, 0, 0) - start = today + (startHour *3600) - shift - end = today + (endHour * 3600) - shift - return TimeRange.TimeRange(start, end) - else: - gmTime = time.gmtime(time.time()) - today = AbsTime.absTimeYMD(gmTime[0],gmTime[1],gmTime[2], 0, 0, 0) - start = today + (startHour *3600) - end = today + (endHour * 3600) - return TimeRange.TimeRange(start, end) - - ## - # Get the time range corresponding to the named time range. - # @param timeRangeName: The time range name to find, i.e., "tonight" - # @type timeRangeName: string - # @param argDict: argDict - # @return: The named time range, based on the current time. - # @rtype: Python TimeRange - def getTimeRange(self, timeRangeName, argDict=None): - # Return a timeRange corresponding to the named time range - return TimeRange.TimeRange(argDict['dataMgr'].getSelectTimeRangeManager().getRange(timeRangeName).toTimeRange()); - - def getPeriod(self, timeRange, shiftToLocal=0): - # Based on midpoint hour, - # Return 1 if day, 0 if night - # Return -1 if greater than or equal 24 hours duration - if shiftToLocal == 1: - timeRange = self.shiftedTimeRange(timeRange) - if timeRange.duration() >= 24 * 3600: - return self.DAYNIGHT() - # Find midpoint hour - startTime = timeRange.startTime() + timeRange.duration()/2 - hour = startTime.hour - day = self.DAY() - night = self.NIGHT() - if hour >= day and hour < night: - return self.DAYTIME() - else: - return self.NIGHTTIME() - - def shiftedTimeRange(self, timeRange): - # Shift the given time range to local time. - # It is assumed to be in GMT. - localTime, shift = self.determineTimeShift() - return TimeRange.TimeRange(timeRange.startTime() + shift, - timeRange.endTime() + shift) - - def getTimeRangeList(self, argDict, rangeNames, labelMethod=None, labelFormat=None): - # Make a list of (timeRange, label) tuples for the given - # timeRanges and labelMethod - # - #Arguments: - # rangeNames = a list of time range names or actual time ranges - # labelMethod = text method to label each time range. - # labelFormat = format for labeling time ranges. - # If included, overrides labelMethod. - # Format is of form: ( LT_OR_Zulu, durationFmt, startFmt, endFmt) - # See Text Product User Guide to see possible formats. - # e.g. ("Zulu", "", "%HZ/%d", "") - # - trList = [] - for name in rangeNames: - if isinstance(name, TimeRange.TimeRange): - range = name - else: - range = self.getTimeRange(name, argDict) - if labelFormat is not None: - LTorZulu, durFmt, startFmt, endFmt = labelFormat - label = self.timeDisplay( - range, LTorZulu, durFmt, startFmt, endFmt) - elif labelMethod is not None: - label = labelMethod(range) - else: - label = "" - trList.append((range, label)) - return trList - - def getPeriods(self, timeRange, period, span, numPeriods=None, - labelMethod=None, labelFormat=None): - # Make a list of (timeRange, label) tuples for the given - # timeRange, period and span using the labelMethod or labelFormat - # - #Arguments: - # timeRange = a TimeRange from TimeRange.py - # period = number of hours between periods beginning at - # the start of the given timeRange - # span = number of hours duration for each period. - # (Note that with the "period" and "span" arguments, you - # could have periods every 12 hours which are only - # 1-hour in duration: period=12, span=1.) - # numPeriods = Number of periods desired. - # If None, periods go to the end of the timeRange. - # labelMethod = text method to label the period. - # labelFormat = format for labeling periods. - # If included, overrides labelMethod. - # Format is of form: ( LT_OR_Zulu, durationFmt, startFmt, endFmt) - # See Text Product User Guide to see possible formats. - # e.g. ("Zulu", "", "%HZ/%d", "") - # - - actualTR = timeRange - if numPeriods is not None: - actualTR = TimeRange.TimeRange( - timeRange.startTime(), - timeRange.startTime() + int(numPeriods * period * 3600)) - periodList = [] - start = actualTR.startTime() - while start < actualTR.endTime(): - # Create Time Range for next period - end = start + 3600 * span # 3600 = 1 hour in seconds - tr = TimeRange.TimeRange(start, end) - if labelFormat is not None: - LTorZulu, durFmt, startFmt, endFmt = labelFormat - label = self.timeDisplay( - tr, LTorZulu, durFmt, startFmt, endFmt) - elif labelMethod is not None: - label = labelMethod(tr) - else: - label = "" - periodList.append((tr, label)) - start = start + int(3600 * period) - return periodList - - def adjustTimeRange(self, timeRange, adjustHours): - # Return a time range adjusted by the given number of hours - return TimeRange.TimeRange(timeRange.startTime() + adjustHours*3600, - timeRange.endTime() + adjustHours*3600) - - def localTime(self, startTime, hours, shift): - # Return the local time using shift and the startTime - return startTime + hours * 3600 - shift - - def hrToSec(self, hours): - "Convert hours given by period to seconds" - return hours * 60 * 60 - - - def daylight(self): - # Return 1 if local time is currently daylight savings - localtime = time.localtime(time.time()) - dayLight = localtime[8] - if dayLight > 0: - return 1 - else: - return 0 - - def determineShift(self): - # Return the difference: Local Time - GMT time - localTime, shift = self.determineTimeShift() - return shift - - def determineTimeShift(self): - # Return the current local time and the difference: - # Local Time - GMT time - curTime = time.time() - localtime = time.localtime(curTime) - currentLocalTime = AbsTime.absTimeYMD( - localtime[0],localtime[1],localtime[2],localtime[3],localtime[4]) - gmTime = time.gmtime(curTime) - currentGMTime = AbsTime.absTimeYMD( - gmTime[0],gmTime[1],gmTime[2],gmTime[3],gmTime[4]) - shift = currentLocalTime - currentGMTime - return currentLocalTime, shift - - def timeDisplay(self, timeRange, LTorZulu, durFmt, startFmt, endFmt): - # Return a string display for the given timeRange, assumed to be - # in GMT. - # If LTorZulu == "LT", the timeRange will be converted from GMT - # to local time. - # durationFmt, startFmt, endFmt are format strings for the - # timeRange duration, the start time and end time respectively. - # See Text Product User Guide to see possible formats. - # - # Example: - # self.timeDisplay(timeRange, "LT", "%H hours ", - # "%a %b %d, %Y %I:%M %p", - # " to %a %b %d, %Y %I:%M %p %Z") - # - # yields a string such as: - # - # 12 hours Mon Apr 23, 2001 06:00 AM to Mon Apr 23, 2001 06:00 PM MDT. - - if LTorZulu == "LT": - # Convert to local time - timeRange = self.shiftedTimeRange(timeRange) - display = "" - if durFmt != "": - duration = timeRange.duration() - durHours = duration / 3600 - durMinutes = duration / 3600 / 60 - durStr = string.replace(durFmt, "%H", `durHours`) - durStr = string.replace(durStr, "%M", `durMinutes`) - display = display + durStr - if startFmt != "": - #display = display + timeRange.startTime().stringFmt(startFmt) - display = display + timeRange.startTime().strftime(startFmt) - if endFmt != "": - #display = display + timeRange.endTime().stringFmt(endFmt) - display = display + timeRange.endTime().strftime(endFmt) - if LTorZulu == "LT": - # Adjust time zone to local time - localTime = time.localtime(time.time()) - zoneName = time.strftime("%Z",localTime) - display = string.replace(display,"GMT",zoneName) - return display +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TimeRangeUtils.py +# Utilities for dealing with Time Ranges in Text Products. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import time, string, types, os +import TextUtils +import AbsTime, TimeRange + +class TimeRangeUtils(TextUtils.TextUtils): + def __init__(self): + TextUtils.TextUtils.__init__(self) + + def makeTimeRange(self, startTime, endTime): + try: + return TimeRange.TimeRange(startTime, endTime) + except: + startTime = AbsTime.AbsTime(int(startTime)) + endTime = AbsTime.AbsTime(int(endTime)) + return TimeRange.TimeRange(startTime, endTime) + + def createTimeRange(self, startHour, endHour, mode="LT"): + # Returns an TimeRange.TimeRange object given by: + # startHour, endHour + # (range is startHour up to and not including endHour) + # startHour and endHour are relative to midnight of the + # current day either in Local or Zulu time (see below) + # mode can be: + # "LT" : the startHour and endHour are relative to midnight local time + # "Zulu": relative to 0Z Zulu time + # + # E.g. + # timeRange = self.createTimeRange(0, 121, "Zulu") + + if mode == "LT": + localTime, shift = self.determineTimeShift() + today = AbsTime.absTimeYMD(localTime.year, localTime.month, + localTime.day, 0, 0, 0) + start = today + (startHour *3600) - shift + end = today + (endHour * 3600) - shift + return TimeRange.TimeRange(start, end) + else: + gmTime = time.gmtime(time.time()) + today = AbsTime.absTimeYMD(gmTime[0],gmTime[1],gmTime[2], 0, 0, 0) + start = today + (startHour *3600) + end = today + (endHour * 3600) + return TimeRange.TimeRange(start, end) + + ## + # Get the time range corresponding to the named time range. + # @param timeRangeName: The time range name to find, i.e., "tonight" + # @type timeRangeName: string + # @param argDict: argDict + # @return: The named time range, based on the current time. + # @rtype: Python TimeRange + def getTimeRange(self, timeRangeName, argDict=None): + # Return a timeRange corresponding to the named time range + return TimeRange.TimeRange(argDict['dataMgr'].getSelectTimeRangeManager().getRange(timeRangeName).toTimeRange()); + + def getPeriod(self, timeRange, shiftToLocal=0): + # Based on midpoint hour, + # Return 1 if day, 0 if night + # Return -1 if greater than or equal 24 hours duration + if shiftToLocal == 1: + timeRange = self.shiftedTimeRange(timeRange) + if timeRange.duration() >= 24 * 3600: + return self.DAYNIGHT() + # Find midpoint hour + startTime = timeRange.startTime() + timeRange.duration()/2 + hour = startTime.hour + day = self.DAY() + night = self.NIGHT() + if hour >= day and hour < night: + return self.DAYTIME() + else: + return self.NIGHTTIME() + + def shiftedTimeRange(self, timeRange): + # Shift the given time range to local time. + # It is assumed to be in GMT. + localTime, shift = self.determineTimeShift() + return TimeRange.TimeRange(timeRange.startTime() + shift, + timeRange.endTime() + shift) + + def getTimeRangeList(self, argDict, rangeNames, labelMethod=None, labelFormat=None): + # Make a list of (timeRange, label) tuples for the given + # timeRanges and labelMethod + # + #Arguments: + # rangeNames = a list of time range names or actual time ranges + # labelMethod = text method to label each time range. + # labelFormat = format for labeling time ranges. + # If included, overrides labelMethod. + # Format is of form: ( LT_OR_Zulu, durationFmt, startFmt, endFmt) + # See Text Product User Guide to see possible formats. + # e.g. ("Zulu", "", "%HZ/%d", "") + # + trList = [] + for name in rangeNames: + if isinstance(name, TimeRange.TimeRange): + range = name + else: + range = self.getTimeRange(name, argDict) + if labelFormat is not None: + LTorZulu, durFmt, startFmt, endFmt = labelFormat + label = self.timeDisplay( + range, LTorZulu, durFmt, startFmt, endFmt) + elif labelMethod is not None: + label = labelMethod(range) + else: + label = "" + trList.append((range, label)) + return trList + + def getPeriods(self, timeRange, period, span, numPeriods=None, + labelMethod=None, labelFormat=None): + # Make a list of (timeRange, label) tuples for the given + # timeRange, period and span using the labelMethod or labelFormat + # + #Arguments: + # timeRange = a TimeRange from TimeRange.py + # period = number of hours between periods beginning at + # the start of the given timeRange + # span = number of hours duration for each period. + # (Note that with the "period" and "span" arguments, you + # could have periods every 12 hours which are only + # 1-hour in duration: period=12, span=1.) + # numPeriods = Number of periods desired. + # If None, periods go to the end of the timeRange. + # labelMethod = text method to label the period. + # labelFormat = format for labeling periods. + # If included, overrides labelMethod. + # Format is of form: ( LT_OR_Zulu, durationFmt, startFmt, endFmt) + # See Text Product User Guide to see possible formats. + # e.g. ("Zulu", "", "%HZ/%d", "") + # + + actualTR = timeRange + if numPeriods is not None: + actualTR = TimeRange.TimeRange( + timeRange.startTime(), + timeRange.startTime() + int(numPeriods * period * 3600)) + periodList = [] + start = actualTR.startTime() + while start < actualTR.endTime(): + # Create Time Range for next period + end = start + 3600 * span # 3600 = 1 hour in seconds + tr = TimeRange.TimeRange(start, end) + if labelFormat is not None: + LTorZulu, durFmt, startFmt, endFmt = labelFormat + label = self.timeDisplay( + tr, LTorZulu, durFmt, startFmt, endFmt) + elif labelMethod is not None: + label = labelMethod(tr) + else: + label = "" + periodList.append((tr, label)) + start = start + int(3600 * period) + return periodList + + def adjustTimeRange(self, timeRange, adjustHours): + # Return a time range adjusted by the given number of hours + return TimeRange.TimeRange(timeRange.startTime() + adjustHours*3600, + timeRange.endTime() + adjustHours*3600) + + def localTime(self, startTime, hours, shift): + # Return the local time using shift and the startTime + return startTime + hours * 3600 - shift + + def hrToSec(self, hours): + "Convert hours given by period to seconds" + return hours * 60 * 60 + + + def daylight(self): + # Return 1 if local time is currently daylight savings + localtime = time.localtime(time.time()) + dayLight = localtime[8] + if dayLight > 0: + return 1 + else: + return 0 + + def determineShift(self): + # Return the difference: Local Time - GMT time + localTime, shift = self.determineTimeShift() + return shift + + def determineTimeShift(self): + # Return the current local time and the difference: + # Local Time - GMT time + curTime = time.time() + localtime = time.localtime(curTime) + currentLocalTime = AbsTime.absTimeYMD( + localtime[0],localtime[1],localtime[2],localtime[3],localtime[4]) + gmTime = time.gmtime(curTime) + currentGMTime = AbsTime.absTimeYMD( + gmTime[0],gmTime[1],gmTime[2],gmTime[3],gmTime[4]) + shift = currentLocalTime - currentGMTime + return currentLocalTime, shift + + def timeDisplay(self, timeRange, LTorZulu, durFmt, startFmt, endFmt): + # Return a string display for the given timeRange, assumed to be + # in GMT. + # If LTorZulu == "LT", the timeRange will be converted from GMT + # to local time. + # durationFmt, startFmt, endFmt are format strings for the + # timeRange duration, the start time and end time respectively. + # See Text Product User Guide to see possible formats. + # + # Example: + # self.timeDisplay(timeRange, "LT", "%H hours ", + # "%a %b %d, %Y %I:%M %p", + # " to %a %b %d, %Y %I:%M %p %Z") + # + # yields a string such as: + # + # 12 hours Mon Apr 23, 2001 06:00 AM to Mon Apr 23, 2001 06:00 PM MDT. + + if LTorZulu == "LT": + # Convert to local time + timeRange = self.shiftedTimeRange(timeRange) + display = "" + if durFmt != "": + duration = timeRange.duration() + durHours = duration / 3600 + durMinutes = duration / 3600 / 60 + durStr = string.replace(durFmt, "%H", repr(durHours)) + durStr = string.replace(durStr, "%M", repr(durMinutes)) + display = display + durStr + if startFmt != "": + #display = display + timeRange.startTime().stringFmt(startFmt) + display = display + timeRange.startTime().strftime(startFmt) + if endFmt != "": + #display = display + timeRange.endTime().stringFmt(endFmt) + display = display + timeRange.endTime().strftime(endFmt) + if LTorZulu == "LT": + # Adjust time zone to local time + localTime = time.localtime(time.time()) + zoneName = time.strftime("%Z",localTime) + display = string.replace(display,"GMT",zoneName) + return display diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Translator.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Translator.py index 8baab78daa..ff7e26d076 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Translator.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Translator.py @@ -1,689 +1,689 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +#!/usr/bin/env python +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Translator.py -# Class for Translator. -# -# Author: mathwig -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import string - -LanguageTables = { - "english" : {}, - "french" : { - "Expressions" : [ - ('mostly sunny', 'generalement ensoleille'), - ('mostly clear', 'generalement degage'), - ('mostly cloudy', 'generalement nuageux'), - ('partly cloudy', 'partiellement nuageux'), - ('sunny', 'ensoleille'), - ('clear', 'ciel degage'), - ('cloudy', 'nuageux'), - ('high winds', 'vents forts'), - ('light winds', 'vents faibles'), - ('very windy', 'tres venteux'), - ('probability of precipitation', 'probabilite de precipitations'), - ('chance of precipitation', 'risque de precipitations'), - ('areal coverage of precipitation', 'areal coverage de precipitations'), - ('slight chance of', 'faible risque de'), - ('chance of', 'risque de'), - ('snow accumulation of', 'accumulation de neige de'), - ('northeast winds at', 'vents du nord-est de'), - ('northwest winds at', 'vents du nord-ouest de'), - ('southest winds at', 'vents du sud-est de'), - ('southwest winds at', 'vents du sud-ouest de'), - ('east winds at', "vents de l'est de"), - ('west winds at', "vents de l'ouest de"), - ('north winds at', 'vents du nord de'), - ('south winds at', 'vents du sud de'), - ('northeast winds up', "vents du nord-est jusqu'"), - ('northwest winds up', "vents du nord-ouest jusqu'"), - ('southest winds up', "vents du sud-est jusqu'"), - ('southwest winds up', "vents du sud-ouest jusqu'"), - ('east winds up', "vents de l'est jusqu'"), - ('west winds up', "vents de l'ouest jusqu'"), - ('north winds up', "vents du nord jusqu'"), - ('south winds up', "vents du sud jusqu'"), - ('northeast', 'nord-est'), - ('northwest', 'nord-ouest'), - ('southeast', 'sud-est'), - ('southwest', 'sud-ouest'), - ('east', 'est'), - ('west', 'ouest'), - ('north', 'nord'), - ('south', 'sud'), - ('in the evening', 'au cours de la soiree'), - ('in the night', 'au cours de la nuit'), - ('in the morning', 'au cours de la matinee'), - ('in the afternoon', "au cours de l'apres-midi"), - ('through the evening', 'pret de la soiree'), - ('through the night', 'pret de la nuit'), - ('through the morning', 'pret de la matinee'), - ('through the afternoon', "pret de l'apres-midi"), - ('through', "pret"), - ('overnight', 'pendant la nuit'), - ('decreasing to', 'changeant a'), - ('increasing to', 'augmentant a'), - ('shifting to the', 'devenant du'), - ('becoming', 'devenant'), - ('much warmer', 'beaucoup plus chaud'), - ('warmer','plus chaud'), - ('cooler','plus frais'), - ('sharply colder', 'beaucoup plus froid'), - ('clearing later', 'se degageant plus tard'), - ('becoming cloudy', 'devenant nuageux'), - ('increasing cloudiness', 'augmentation de nuages'), - ('decreasing cloudiness', 'diminution de nuages'), - ('around', 'de pres de'), - ('in the lower', 'dans les bas'), - ('in the mid', 'dans les mi-'), - ('in the upper', 'dans les hauts'), - ('in the', 'dans les'), - ('highs', 'maximums'), - ('lows', 'minimums'), - ('. high', '. maximum'), - ('. low', '. minimum'), - ('rising', 'montant'), - ('falling', 'descendant'), - ('high', 'eleve'), - ('low', 'bas'), - ('temperatures', 'temperaturas'), - ('percent', 'pour cent'), - ('inches', 'pouces'), - ('inch', 'pouce'), - (' to ', ' a '), - (' at ', ' a '), - (' and ', ' et '), - ('mixed with', 'avec'), - ('with', 'avec'), - ('no swells', 'aucune houle'), - ('swell', 'houle'), - ('waves', 'mer'), - ('less than', 'de moins de'), - ('sky/weather...', 'ciel/meteo.......'), - ('lal...........', 'LAL..............'), - ('temperature...', 'temperature......'), - ('humidity......', 'humidite.........'), - ('wind - 20 ft..', 'vent - 20 pieds..'), - ('valleys...', 'vallees...'), - ('ridges....', 'aretes....'), - ('haines index..', 'indice haines....'), - ('smoke dispersal', 'dispersion de fumee'), - ('mixing height...', 'hauteur de melange..'), - ('transport wind..', 'vent de transportation..'), - ('visibility','visibilite'), - ('frequent lightning','foudre frequente'), - ('gusty winds','vents brisques'), - ('heavy rainfall','pluie abondante'), - ('damaging winds','vents damageux'), - ('small hail','petite grele'), - ('large hail','grosse grele'), - ('then', 'alors'), - ], - "Types" : [ - ('freezing rain', 'pluie verglacante', 'FS'), - ('rain showers', 'averses de pluie', 'FP'), - ('rain', 'pluie', 'FS'), - ('freezing drizzle', 'bruine', 'FS'), - ('drizzle', 'bruine', 'FS'), - ('snow showers', 'averses de neige', 'FP'), - ('snow', 'neige', 'FS'), - ('dust', 'poussiere', 'FS'), - ('fog', 'brouillard', 'MS'), - ('haze', 'brume', 'FS'), - ('hail', 'grele', 'FS'), - ('sleet', 'verglas', 'MS'), - ('smoke', 'fumee', 'FS'), - ('thunderstorms', 'orages', 'MP'), - ('volcanic ash', 'cendre volcanique', 'FS') - ], - "Intensities" :[ - ('very light', 'tres faible', 'tres faible', 'tres faible', - 'tres faibles'), - ('light', 'faible', 'faibles', 'faible', 'faibles'), - ('moderate', 'modere', 'moderes', 'moderee', 'moderees'), - ('very heavy', 'tres abondant', 'tres abondants', - 'tres abondante', 'tres abondantes'), - ('heavy', 'abondant', 'abondants', 'abondante', 'abondantes') - ], - "Coverages" : [ - ('isolated', 'isole', 'isoles', 'islolee', 'isolees'), - ('widely scattered', 'largement disperse', - 'largement disperses', - 'largement dispersee', 'largement dispersees'), - ('scattered', 'disperse', 'disperses', 'dispersee', - 'dispersees'), - ('widespread', 'repandu', 'repandus', 'repanduee', 'repanduees'), - ('occasional', 'passage', 'passages', 'passagee', 'passagees'), - ('numerous','nombreux','nombreux','nombreuse','nombreuses') - ], - "Exceptions" : [ - ('likely', 'probable', 'probables', 'probable', 'probables') - ], - "CleanUp" : [ - ('de a', "d'a"), - ('mi- ', 'mi-'), - ("jusqu' a", "jusqu'a") - ] - }, - "spanish" : { - "Expressions" : [ - ('mostly sunny', 'mayormente soleado'), - ('mostly clear', 'mayormente claro'), - ('mostly cloudy', 'mayormente nublado'), - ('partly cloudy', 'parcialmente nublado'), - ('sunny', 'soleado'), - ('clearing','despejandose'), - ('clear', 'despejado'), - ('later','mas tarde'), - ('cloudy', 'nublado'), - ('snow accumulation of', 'acumulacion de nieve'), - ('probability of precipitation', 'probabilidad de lluvias'), - ('chance of precipitation', 'probabilidad de lluvias'), - ('areal coverage of precipitation', 'areal coverage de lluvias'), - ('slight chance of', 'leve probabilidad de'), - ('chance of', 'probabilidad de'), - ('in the night', 'en la noche'), - ('during the night','durante la noche'), - ('in the morning', 'en la manana'), - ('in the afternoon', 'en la tarde'), - ('in the evening', 'temprano en la noche'), - ('through the morning', 'recorriendo la manana'), - ('through the afternoon', 'recorriendo la tarde'), - ('through the evening', 'recorriendo la temprano noche'), - ('through', 'recorriendo'), - ('overnight', 'durante la noche'), - ('early evening','temprano en la noche'), - ('evening','temprano en la noche'), - ('decreasing to', 'disminuyendo'), - ('increasing to', 'aumentando'), - ('shifting to the', 'tornandose del'), - ('becoming', 'tornandose'), - ('then','luego'), - ('followed by','seguido por'), - ('much warmer', 'mucho mas caliente'), - ('warmer', 'mas caliente'), - ('sharply colder', 'marcadamente frio'), - ('cooler', 'mas fresco'), - ('clearing later', 'aclrarando tarde'), - ('becoming cloudy', 'llegando a ser nublado'), - ('increasing cloudiness', 'nubosidad aumentando'), - ('decreasing cloudiness', 'nubosidad disminuyendo'), - ('high winds', 'vientos fuertes'), - ('. highs', '. temperaturas maximas'), - ('. lows', '. temperaturas minimas'), - ('northeast winds', 'vientos del noreste'), - ('northwest winds', 'vientos del noroeste'), - ('southeast winds', 'vientos del sureste'), - ('southwest winds', 'vientos del suroeste'), - ('east winds', 'vientos del este'), - ('west winds', 'vientos del oeste'), - ('north winds', 'vientos del norte'), - ('south winds', 'vientos del sur'), - ('northeast winds up', 'vientos del noreste hasta de'), - ('northwest winds up', 'vientos del noroeste hasta de'), - ('southest winds up', 'vientos del sureste hasta de'), - ('southwest winds up', 'vientos del suroeste hasta de'), - ('east winds up', 'vientos del este hasta de'), - ('west winds up', 'vientos del oeste hasta de'), - ('north winds up', 'vientos del norte hasta de'), - ('south winds up', 'vientos del sur hasta de'), - ('northeast', 'noreste'), - ('northwest', 'noroeste'), - ('southeast', 'sureste'), - ('southwest', 'suroeste'), - ('small craft exercise caution','precaucion a embarcaciones pequenas'), - ('small craft advisory','advertencias a embarcaciones pequenas'), - ('knots','nudos'), - ('seas','mares de'), - ('bay and inland waters','aguas tierra adentro y de las bahias'), - ('inland waters','aguas tierra adentro'), - ('a light chop','picadas ligeramente'), - ('a moderate chop','picadas moderadamente'), - ('rough','turbulentas'), - ('very rough','bien turbulentas'), - ('almost smooth','casi llanas'), - ('smooth','llanas'), - ('choppy','picadas'), - ('extended forecast','pronostico extendido'), - ('forecast for', 'pronostico de'), - ('bay waters','aguas de la bahia'), - ('lake waters','aguas del lago'), - ('feet','pies'), - ('foot','pie'), - ('east', 'este'), - ('west', 'oeste'), - ('north', 'norte'), - ('south', 'sur'), - ('patchy dense fog','niebla densa esparcida'), - ('areas of dense fog','areas de niebla densa'), - ('widespread dense fog','niebla densa extensa'), - ('patchy fog','niebla esparcida'), - ('areas of fog','areas de niebla'), - ('widespread fog','niebla extensa'), - ('dense fog','niebla densa'), - ('around', 'alrededor de'), - ('in the lower to mid', 'en los bajos a medios'), - ('in the mid to upper', 'en los medios a altos'), - ('in the lower', 'en los bajos'), - ('in the mid', 'en los medios'), - ('in the upper', 'en los altos'), - ('in the', 'en los'), - ('low', 'bajo'), - ('high', 'alto'), - ('no swells', 'no marejeda'), - ('swell', 'marejeda'), - ('waves', 'ondas'), - ('less than', 'menos de'), - ('percent', 'por ciento'), - ('inches', 'pulgadas'), - ('inch', 'pulgada'), - ('light winds', 'vientos ligeros'), - ('very windy', 'muy ventoso'), - ('windy', 'ventoso'), - ('breezy','brisas'), - ('and gusty','con rafagas mas altas'), - (' and ', ' y '), - (' to ', ' a '), - (' at ', ' en '), - (' with ',' con '), - ('mixed with', 'con'), - ('sky/weather...', 'cielo/tiempo.....'), - ('lal...........', 'NAE..............'), - ('temperature...', 'temperatura......'), - ('humidity......', 'humedad........'), - ('wind - 20 ft..', 'viento - 20 ft..'), - ('valleys...', 'valles...'), - ('ridges....', 'vrestas....'), - ('haines index..', 'indice de haines....'), - ('smoke dispersal', 'dispersion de humo'), - ('mixing height...', 'altura de mezcla..'), - ('transport wind..', 'viento transportador..'), - ('visibility','visibilidad'), - ('frequent lightning','rayos frequentes'), - ('gusty winds','rachas de viento'), - ('heavy rainfall','lluvia intensa'), - ('damaging winds','vientos perjudiciales'), - ('small hail','granizo pequeno'), - ('large hail','granizo grande'), - ('Sunday night','Domingo por la noche'), - ('Monday night','Lunes por la noche'), - ('Tuesday night','Martes por la noche'), - ('Wednesday night','Miercoles por la noche'), - ('Thursday night','Jueves por la noche'), - ('Friday night','Viernes por la noche'), - ('Saturday night','Sabado por la noche'), - ('Sunday','Domingo'), - ('Monday','Lunes'), - ('Tuesday','Martes'), - ('Wednesday','Miercoles'), - ('Thursday','Jueves'), - ('Friday','Viernes'), - ('Saturday','Sabado'), - ('tonight','esta noche'), - ('today','hoy'), - ('scattered thunderstorms','tormentas dispersas'), - ('isolated thunderstorms','tormentas aisladas'), - ('near the coast','cerca de la costa'), - ('inland','tierra adentro'), - ('winds','vientos'), - ('wind','vientos'), - ('or less','o menos') - ], - "Types" : [ - ('freezing rain', 'lluvia helada', 'FS'), - ('rain showers', 'chubascos', 'MP'), - ('showers', 'chubascos', 'MP'), - ('freezing drizzle', 'llovizna helada', 'FS'), - ('rain', 'lluvia', 'FS'), - ('drizzle', 'llovizna', 'FS'), - ('snow showers', 'chuvascos de nieve', 'FS'), - ('snow', 'nieve', 'FS'), - ('fog', 'nieblas', 'MS'), - ('dust', 'polvo', 'MS'), - ('haze', 'neblina', 'FS'), - ('hail', 'granizo', 'MS'), - ('sleet', 'aguanieve', 'FS'), - ('smoke', 'humo', 'MS'), - ('thunderstorms', 'tormentas', 'MP'), - ('volcanic ash', 'ceniza volcanica', 'FS') - ], - "Intensities" : [ - ('light', 'muy ligero', 'muy ligeros', 'muy ligera', - 'muy ligeras'), - ('light', 'ligero', 'ligero', 'ligero', 'ligero'), - ('moderate', 'moderado', 'moderado', 'moderado', 'moderado'), - ('very heavy', 'muy intenso', 'muy intensos', 'muy intensa', - 'muy intensas'), - ('heavy', 'intenso', 'intensos', 'intensa', 'intensas'), - ('numerous','numeroso','numerosos','numerosa','numerosas') - ], - "Coverages" : [ - ('isolated', 'aislado', 'aislados', 'aislada', 'aisladas'), - ('widely scattered', 'extensamente disperso', - 'extensamente dispersos', 'extensamente dispersa', - 'extensamente dispersas'), - ('scattered', 'disperso', 'dispersos', 'dispersa', 'dispersas'), - ('occasional', 'ocasional', 'ocasionales', 'ocasional', - 'ocasionales'), - ('widespread', 'muy difundido', 'muy difundidos', - 'muy difundida','muy difundidas') - ], - "Exceptions" :[ - ('likely', 'probable', 'probables', 'probable', 'probables') - ], - "CleanUp" :[ ] - } - } - - -# ValueDict : This index and table contain subsitution values for the HTML -# Template pages, FastWx.html and Table.html -Index = { - "english": 1, - "french" : 2, - "spanish": 3 -} - -ValueDictTable = [ - ("Type", "Category", "Categorie", "Categoria"), - ("Public", "Public", "Publiques", "Publico"), - ("FireWeather", "FireWeather", "Previsions Feu", "Incendios-Tiempo"), - ("Aviation", "Aviation", "Aviation", "Aviacion"), - ("Marine", "Marine", "Marine", "Marino"), - ("Language", "Language", "Langue", "Lenguaje"), - ("Audio", "Audio", "Audio", "Audio"), - ("Click", "Click", "Cliquer", "Haga Clic"), - ("ClickText", "on a location OR Select:", - "sur une location ou Choisir", - "en una localidad o Seleccionela"), - ("CityTable", "Table of Cities", "Table de Villes", "Tabla de Ciudades"), - ("CountyTable", "Table of Counties", "Table de Comtes", - "Tabla de Condados "), - ("issued", "Issued", "Emises", "Emitido"), - ("Site","Forecast Location: ","Site de Previsions: ", - "Terreno de Pronostico: "), - ("English", "English", "Anglais", "Ingles"), - ("Spanish", "Spanish", "Espagnol", "Espanol"), - ("French", "French", "Francais", "Franceses") - ] - -# General Expressions: Time, Column headings, Web page -Expression = [ - ("Tonight", "Ce soir", "Esta Noche"), - ('Today', "Aujourd'hui", "Hoy"), - ('Night', "soir", "Noche"), - ('Monday',"Lundi","Lunes"), - ('Tuesday',"Mardi", "Martes"), - ('Wednesday',"Mercredi", "Miercoles"), - ('Thursday',"Jeudi","Jueves"), - ('Friday',"Vendredi","Viernes"), - ('Saturday',"Samedi","Sabado"), - ('Sunday',"Dimanche","Domingo"), - ("Tonight", "Ce soir", "Esta noche"), - ('Today', "Aujourd'Hui", "Hoy"), - ('Night', "Soir", "Noche"), - ('Monday',"Lundi","Lunes"), - ('Tuesday',"Mardi", "Martes"), - ('Wednesday',"Mercredi", "Miercoles"), - ('Thursday',"Jeudi","Jueves"), - ('Friday',"Vendredi","Viernes"), - ('Saturday',"Samedi","Sabado"), - ('Sunday',"Dimanche","Domingo"), - ('Jan', "Jan", "Erno"), - ('Feb', "Fev", "Febrero"), - ('Mar', "Mar", "Marzo"), - ('Apr',"Avr","Abril"), - ('May',"Mai","Mayo"), - ('Jun',"Juin","Junio"), - ('Jul',"Juil","Julio"), - ('Aug',"Aout", "Agosto"), - ('Sep',"Sep","Septiembre"), - ('Oct',"Oct","Octubre"), - ('Nov',"Nov","Noviembre"), - ('Dec',"Dec","Diciembre"), - ('Sky','Ciel','Cielo'), - ('Wind (mph)','Vent (mph)','Viento (mph)'), - ('Max Temp','Temp Max','Temp Max'), - ('Min Temp','Temp Min','Temp Min'), - ('Precip','Precip','Lluvias'), - ('Wind (kt)','Vent (kt)','Viento (kt)'), - ('Waves (ft)','Vagues (pd)','Ondas (ft)'), - ('Swells (ft)','Houles (ft)','Swells (ft)'), - ('LAL','LAL','LAL'), - ('RelHum(%)','HumRel(%)','RelHum(%)'), - ('MaxT','TMax','TMax',), - ('MinT','TMin','TMin'), - ('FreeWind(mph)','VentLibre(mph)','VientoLibre(mph)'), - ('Haines','Haines','Haines'), - ('TransWind(mph)','VentTrans(mph)','VientoTrans(mph)'), - ('MixHgt(ft AGL)','ElevMelang(ft AGL)','AltuMezcl(ft AGL)'), - ('City','Ville', 'Ciudad'), - ('County','Comte', 'Condado'), - ('Nowcast','Previsions Courantes','Pronostico Sobre Tiempo'), - ('Short Term Forecast','Previsions Court Terme',\ - 'Pronostico a Corto Plazo'), - ('Extended Forecast','Previsions Long Terme','Pronostico a Largo Plazo'), - ('Spot Forecast','Previsions Spot','Pronostico Spot'), - ('Outlook','Perspective','Panorama'), - ('Marine Nowcast','Previsions Marines Courantes', - 'Pronostico Sobre-Tiempo Maritimo'), - ('Coastal Marine Forecast','Coastal Marine Forecast', - 'Pronostico Maritimo Costero'), - ('Terminal Aerodrome Forecast',"Previsions a l'Aerodrome", - 'Pronostico Para Terminal Aerodromo'), - ('Latitude','Latitude','Latitud'), - ('Longitude','Longitude','Longitud'), - ('Area','Aire','Area'), - ('Cities','Villes','Ciudades'), - ('Counties','Comtes','Condados'), - ('in the morning','du matin','por la manana'), - ('in the afternoon',"de l'apres-midi", "por la tarde"), - ('in the evening','du soir',"por la tarde"), - ('during the night','pendant la nuit','durante la noche'), - ('followed by', 'suivi par', 'seguido poru'), - ('overnight', 'pendant la nuit', 'durante la noche'), - ] - -class Translator: - def __init__(self, language, parent=None): - - self._language = language - self._langDict = LanguageTables[language] - - # Function translating a forecast - def getForecast(self, forecast): - lwForecast = string.lower(forecast) - - # Convert forecast using translation tuples - transForecast = self._translateExpForecast(lwForecast) - - # Convert the exceptions - exceptForecast = self._translateExceptions(transForecast) - - # Convert forecast using type and intensity tuples - transForecast = self._translateTypeForecast(exceptForecast) - - # Clean up the translated forecast - cleanTransForecast = self._cleanUp(transForecast) - - # Capitalize the beginning of sentences - self.capTransForecast = self._capital(cleanTransForecast) - - return self.capTransForecast - - # Function converting appropriate letters of a string to capital letters - def _capital(self, str): - - if str == "": - return str - - # Find all the periods - index = [] - - for i in range(0, len(str)-1): - if str[i] == "." and str[i+1] == " ": - index.append(i+2) - elif str[i] == "." and str[i+1] == ".": - index.append(i+2) - - # Always capitalize the first letter - capitalStr = string.upper(str[0]) - - # Capitalize the letters following the periods and a space - for i in range(1, len(str)): - if i in index: - capitalStr = capitalStr + string.upper(str[i]) - else: - capitalStr = capitalStr + str[i] - - return capitalStr - - - # Function translating a forecast using the translation expression tuples - def _translateExpForecast(self, lwForecast): - - for expr, transExpr in self._langDict['Expressions']: - #print expr, transExpr - lwForecast = string.replace(lwForecast, expr, transExpr) - - return lwForecast - - # Function translating a forecast using the translation type and - # intensity tuples - def _translateTypeForecast(self, lwForecast): - - # translate combination of type, intensity, and coverage - for ttuple in self._langDict['Types']: - for ituple in self._langDict['Intensities']: - for ctuple in self._langDict['Coverages']: - origEx = ctuple[0] + ' ' + ituple[0] + ' ' + ttuple[0] - transEx = '' - if ttuple[2] == 'MS': - transEx = ttuple[1] + ' ' + ituple[1] + ' ' + ctuple[1] - elif ttuple[2] == 'MP': - transEx = ttuple[1] + ' ' + ituple[2] + ' ' + ctuple[2] - elif ttuple[2] == 'FS': - transEx = ttuple[1] + ' ' + ituple[3] + ' ' + ctuple[3] - elif ttuple[2] == 'FP': - transEx = ttuple[1] + ' ' + ituple[4] + ' ' + ctuple[4] - lwForecast = string.replace(lwForecast, origEx, transEx) - - # translate combination of type and intensity (no coverage) - for ttuple in self._langDict['Types']: - for ituple in self._langDict['Intensities']: - origEx = ituple[0] + ' ' + ttuple[0] - transEx = '' - if ttuple[2] == 'MS': - transEx = ttuple[1] + ' ' + ituple[1] - elif ttuple[2] == 'MP': - transEx = ttuple[1] + ' ' + ituple[2] - elif ttuple[2] == 'FS': - transEx = ttuple[1] + ' ' + ituple[3] - elif ttuple[2] == 'FP': - transEx = ttuple[1] + ' ' + ituple[4] - lwForecast = string.replace(lwForecast, origEx, transEx) - - # translate combination of type and coverage (no intensity) - for ttuple in self._langDict['Types']: - for ctuple in self._langDict['Coverages']: - origEx = ctuple[0] + ' ' + ttuple[0] - transEx = '' - if ttuple[2] == 'MS': - transEx = ttuple[1] + ' ' + ctuple[1] - elif ttuple[2] == 'MP': - transEx = ttuple[1] + ' ' + ctuple[2] - elif ttuple[2] == 'FS': - transEx = ttuple[1] + ' ' + ctuple[3] - elif ttuple[2] == 'FP': - transEx = ttuple[1] + ' ' + ctuple[4] - lwForecast = string.replace(lwForecast, origEx, transEx) - - # translate type (no coverage and no intensity) - for ttuple in self._langDict['Types']: - lwForecast = string.replace(lwForecast, ttuple[0], ttuple[1]) - - return lwForecast - - # Convert the exceptions - def _translateExceptions(self, transForecast): - for ttuple in self._langDict['Types']: - for etuple in self._langDict['Exceptions']: - origEx = ttuple[0] + ' ' + etuple[0] - transEx = '' - if ttuple[2] == 'MS': - transEx = ttuple[0] + ' ' + etuple[1] - elif ttuple[2] == 'MP': - transEx = ttuple[0] + ' ' + etuple[2] - elif ttuple[2] == 'FS': - transEx = ttuple[0] + ' ' + etuple[3] - elif ttuple[2] == 'FP': - transEx = ttuple[0] + ' ' + etuple[4] - transForecast = string.replace(transForecast, origEx, transEx) - return transForecast - - # Function cleaning up the translated forecast - def _cleanUp(self, lwForecast): - - for expr, transExpr in self._langDict['CleanUp']: - lwForecast = string.replace(lwForecast, expr, transExpr) - - return lwForecast - - def getExpression(self, phrase): - "Translate the phrase" - - if self._language == "english": - return phrase - index = Index[self._language] - 1 - for expr in Expression: - phrase = string.replace(phrase, expr[0], expr[index]) - return phrase - - -if __name__ == '__main__': - - forecastList = [ - "High winds in the afternoon. Partly cloudy. Very heavy rain showers likely. Snow accumulation of 1 inch. Lows in the mid 30s. East winds at 75 mph. Probability of precipitation 65 percent.", - "Mostly sunny. Widespread heavy volcanic ash. Snow accumulation of 1 to 20 inches. Highs around 120. Probability of precipitation 99 percent.", - "High winds. Partly cloudy. Slight chance of very heavy rain showers. Snow accumulation of 1 inch. Lows in the mid 30s. East winds at 75 mph. Probability of precipitation 1 percent. Southwest winds up to 15 mph.", - "Sky/weather...Mostly cloudy with scattered rain showers and thunderstorms\nLAL...........3-4\nTemperature...Lows in the mid 30s\nHumidity......70 pct\nWind - 20 ft..Northwest to Southeast in the evening\n Valleys...\n Ridges....\nHaines Index..4 low\nSmoke dispersal:\n Mixing height...Decreasing to 500-1,000 ft AGL\n Transport wind..Northeast to southeast 3-8 mph", - "High winds. Decreasing cloudiness. Widely scattered light sleet. Snow accumulation of 1 to 50 inches. Low 0. Northwest winds at 90 to 100 mph becoming southwest at 80 to 90 mph. Probability of precipitation 110 percent." ] - - for forecast in forecastList: - #transForecast = Translator('french') - transForecast = Translator('spanish') - print ' ' - print 'Original Forecast' - print forecast - print ' ' - print 'Translated Forecast' - print transForecast.getForecast(forecast) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Translator.py +# Class for Translator. +# +# Author: mathwig +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import string + +LanguageTables = { + "english" : {}, + "french" : { + "Expressions" : [ + ('mostly sunny', 'generalement ensoleille'), + ('mostly clear', 'generalement degage'), + ('mostly cloudy', 'generalement nuageux'), + ('partly cloudy', 'partiellement nuageux'), + ('sunny', 'ensoleille'), + ('clear', 'ciel degage'), + ('cloudy', 'nuageux'), + ('high winds', 'vents forts'), + ('light winds', 'vents faibles'), + ('very windy', 'tres venteux'), + ('probability of precipitation', 'probabilite de precipitations'), + ('chance of precipitation', 'risque de precipitations'), + ('areal coverage of precipitation', 'areal coverage de precipitations'), + ('slight chance of', 'faible risque de'), + ('chance of', 'risque de'), + ('snow accumulation of', 'accumulation de neige de'), + ('northeast winds at', 'vents du nord-est de'), + ('northwest winds at', 'vents du nord-ouest de'), + ('southest winds at', 'vents du sud-est de'), + ('southwest winds at', 'vents du sud-ouest de'), + ('east winds at', "vents de l'est de"), + ('west winds at', "vents de l'ouest de"), + ('north winds at', 'vents du nord de'), + ('south winds at', 'vents du sud de'), + ('northeast winds up', "vents du nord-est jusqu'"), + ('northwest winds up', "vents du nord-ouest jusqu'"), + ('southest winds up', "vents du sud-est jusqu'"), + ('southwest winds up', "vents du sud-ouest jusqu'"), + ('east winds up', "vents de l'est jusqu'"), + ('west winds up', "vents de l'ouest jusqu'"), + ('north winds up', "vents du nord jusqu'"), + ('south winds up', "vents du sud jusqu'"), + ('northeast', 'nord-est'), + ('northwest', 'nord-ouest'), + ('southeast', 'sud-est'), + ('southwest', 'sud-ouest'), + ('east', 'est'), + ('west', 'ouest'), + ('north', 'nord'), + ('south', 'sud'), + ('in the evening', 'au cours de la soiree'), + ('in the night', 'au cours de la nuit'), + ('in the morning', 'au cours de la matinee'), + ('in the afternoon', "au cours de l'apres-midi"), + ('through the evening', 'pret de la soiree'), + ('through the night', 'pret de la nuit'), + ('through the morning', 'pret de la matinee'), + ('through the afternoon', "pret de l'apres-midi"), + ('through', "pret"), + ('overnight', 'pendant la nuit'), + ('decreasing to', 'changeant a'), + ('increasing to', 'augmentant a'), + ('shifting to the', 'devenant du'), + ('becoming', 'devenant'), + ('much warmer', 'beaucoup plus chaud'), + ('warmer','plus chaud'), + ('cooler','plus frais'), + ('sharply colder', 'beaucoup plus froid'), + ('clearing later', 'se degageant plus tard'), + ('becoming cloudy', 'devenant nuageux'), + ('increasing cloudiness', 'augmentation de nuages'), + ('decreasing cloudiness', 'diminution de nuages'), + ('around', 'de pres de'), + ('in the lower', 'dans les bas'), + ('in the mid', 'dans les mi-'), + ('in the upper', 'dans les hauts'), + ('in the', 'dans les'), + ('highs', 'maximums'), + ('lows', 'minimums'), + ('. high', '. maximum'), + ('. low', '. minimum'), + ('rising', 'montant'), + ('falling', 'descendant'), + ('high', 'eleve'), + ('low', 'bas'), + ('temperatures', 'temperaturas'), + ('percent', 'pour cent'), + ('inches', 'pouces'), + ('inch', 'pouce'), + (' to ', ' a '), + (' at ', ' a '), + (' and ', ' et '), + ('mixed with', 'avec'), + ('with', 'avec'), + ('no swells', 'aucune houle'), + ('swell', 'houle'), + ('waves', 'mer'), + ('less than', 'de moins de'), + ('sky/weather...', 'ciel/meteo.......'), + ('lal...........', 'LAL..............'), + ('temperature...', 'temperature......'), + ('humidity......', 'humidite.........'), + ('wind - 20 ft..', 'vent - 20 pieds..'), + ('valleys...', 'vallees...'), + ('ridges....', 'aretes....'), + ('haines index..', 'indice haines....'), + ('smoke dispersal', 'dispersion de fumee'), + ('mixing height...', 'hauteur de melange..'), + ('transport wind..', 'vent de transportation..'), + ('visibility','visibilite'), + ('frequent lightning','foudre frequente'), + ('gusty winds','vents brisques'), + ('heavy rainfall','pluie abondante'), + ('damaging winds','vents damageux'), + ('small hail','petite grele'), + ('large hail','grosse grele'), + ('then', 'alors'), + ], + "Types" : [ + ('freezing rain', 'pluie verglacante', 'FS'), + ('rain showers', 'averses de pluie', 'FP'), + ('rain', 'pluie', 'FS'), + ('freezing drizzle', 'bruine', 'FS'), + ('drizzle', 'bruine', 'FS'), + ('snow showers', 'averses de neige', 'FP'), + ('snow', 'neige', 'FS'), + ('dust', 'poussiere', 'FS'), + ('fog', 'brouillard', 'MS'), + ('haze', 'brume', 'FS'), + ('hail', 'grele', 'FS'), + ('sleet', 'verglas', 'MS'), + ('smoke', 'fumee', 'FS'), + ('thunderstorms', 'orages', 'MP'), + ('volcanic ash', 'cendre volcanique', 'FS') + ], + "Intensities" :[ + ('very light', 'tres faible', 'tres faible', 'tres faible', + 'tres faibles'), + ('light', 'faible', 'faibles', 'faible', 'faibles'), + ('moderate', 'modere', 'moderes', 'moderee', 'moderees'), + ('very heavy', 'tres abondant', 'tres abondants', + 'tres abondante', 'tres abondantes'), + ('heavy', 'abondant', 'abondants', 'abondante', 'abondantes') + ], + "Coverages" : [ + ('isolated', 'isole', 'isoles', 'islolee', 'isolees'), + ('widely scattered', 'largement disperse', + 'largement disperses', + 'largement dispersee', 'largement dispersees'), + ('scattered', 'disperse', 'disperses', 'dispersee', + 'dispersees'), + ('widespread', 'repandu', 'repandus', 'repanduee', 'repanduees'), + ('occasional', 'passage', 'passages', 'passagee', 'passagees'), + ('numerous','nombreux','nombreux','nombreuse','nombreuses') + ], + "Exceptions" : [ + ('likely', 'probable', 'probables', 'probable', 'probables') + ], + "CleanUp" : [ + ('de a', "d'a"), + ('mi- ', 'mi-'), + ("jusqu' a", "jusqu'a") + ] + }, + "spanish" : { + "Expressions" : [ + ('mostly sunny', 'mayormente soleado'), + ('mostly clear', 'mayormente claro'), + ('mostly cloudy', 'mayormente nublado'), + ('partly cloudy', 'parcialmente nublado'), + ('sunny', 'soleado'), + ('clearing','despejandose'), + ('clear', 'despejado'), + ('later','mas tarde'), + ('cloudy', 'nublado'), + ('snow accumulation of', 'acumulacion de nieve'), + ('probability of precipitation', 'probabilidad de lluvias'), + ('chance of precipitation', 'probabilidad de lluvias'), + ('areal coverage of precipitation', 'areal coverage de lluvias'), + ('slight chance of', 'leve probabilidad de'), + ('chance of', 'probabilidad de'), + ('in the night', 'en la noche'), + ('during the night','durante la noche'), + ('in the morning', 'en la manana'), + ('in the afternoon', 'en la tarde'), + ('in the evening', 'temprano en la noche'), + ('through the morning', 'recorriendo la manana'), + ('through the afternoon', 'recorriendo la tarde'), + ('through the evening', 'recorriendo la temprano noche'), + ('through', 'recorriendo'), + ('overnight', 'durante la noche'), + ('early evening','temprano en la noche'), + ('evening','temprano en la noche'), + ('decreasing to', 'disminuyendo'), + ('increasing to', 'aumentando'), + ('shifting to the', 'tornandose del'), + ('becoming', 'tornandose'), + ('then','luego'), + ('followed by','seguido por'), + ('much warmer', 'mucho mas caliente'), + ('warmer', 'mas caliente'), + ('sharply colder', 'marcadamente frio'), + ('cooler', 'mas fresco'), + ('clearing later', 'aclrarando tarde'), + ('becoming cloudy', 'llegando a ser nublado'), + ('increasing cloudiness', 'nubosidad aumentando'), + ('decreasing cloudiness', 'nubosidad disminuyendo'), + ('high winds', 'vientos fuertes'), + ('. highs', '. temperaturas maximas'), + ('. lows', '. temperaturas minimas'), + ('northeast winds', 'vientos del noreste'), + ('northwest winds', 'vientos del noroeste'), + ('southeast winds', 'vientos del sureste'), + ('southwest winds', 'vientos del suroeste'), + ('east winds', 'vientos del este'), + ('west winds', 'vientos del oeste'), + ('north winds', 'vientos del norte'), + ('south winds', 'vientos del sur'), + ('northeast winds up', 'vientos del noreste hasta de'), + ('northwest winds up', 'vientos del noroeste hasta de'), + ('southest winds up', 'vientos del sureste hasta de'), + ('southwest winds up', 'vientos del suroeste hasta de'), + ('east winds up', 'vientos del este hasta de'), + ('west winds up', 'vientos del oeste hasta de'), + ('north winds up', 'vientos del norte hasta de'), + ('south winds up', 'vientos del sur hasta de'), + ('northeast', 'noreste'), + ('northwest', 'noroeste'), + ('southeast', 'sureste'), + ('southwest', 'suroeste'), + ('small craft exercise caution','precaucion a embarcaciones pequenas'), + ('small craft advisory','advertencias a embarcaciones pequenas'), + ('knots','nudos'), + ('seas','mares de'), + ('bay and inland waters','aguas tierra adentro y de las bahias'), + ('inland waters','aguas tierra adentro'), + ('a light chop','picadas ligeramente'), + ('a moderate chop','picadas moderadamente'), + ('rough','turbulentas'), + ('very rough','bien turbulentas'), + ('almost smooth','casi llanas'), + ('smooth','llanas'), + ('choppy','picadas'), + ('extended forecast','pronostico extendido'), + ('forecast for', 'pronostico de'), + ('bay waters','aguas de la bahia'), + ('lake waters','aguas del lago'), + ('feet','pies'), + ('foot','pie'), + ('east', 'este'), + ('west', 'oeste'), + ('north', 'norte'), + ('south', 'sur'), + ('patchy dense fog','niebla densa esparcida'), + ('areas of dense fog','areas de niebla densa'), + ('widespread dense fog','niebla densa extensa'), + ('patchy fog','niebla esparcida'), + ('areas of fog','areas de niebla'), + ('widespread fog','niebla extensa'), + ('dense fog','niebla densa'), + ('around', 'alrededor de'), + ('in the lower to mid', 'en los bajos a medios'), + ('in the mid to upper', 'en los medios a altos'), + ('in the lower', 'en los bajos'), + ('in the mid', 'en los medios'), + ('in the upper', 'en los altos'), + ('in the', 'en los'), + ('low', 'bajo'), + ('high', 'alto'), + ('no swells', 'no marejeda'), + ('swell', 'marejeda'), + ('waves', 'ondas'), + ('less than', 'menos de'), + ('percent', 'por ciento'), + ('inches', 'pulgadas'), + ('inch', 'pulgada'), + ('light winds', 'vientos ligeros'), + ('very windy', 'muy ventoso'), + ('windy', 'ventoso'), + ('breezy','brisas'), + ('and gusty','con rafagas mas altas'), + (' and ', ' y '), + (' to ', ' a '), + (' at ', ' en '), + (' with ',' con '), + ('mixed with', 'con'), + ('sky/weather...', 'cielo/tiempo.....'), + ('lal...........', 'NAE..............'), + ('temperature...', 'temperatura......'), + ('humidity......', 'humedad........'), + ('wind - 20 ft..', 'viento - 20 ft..'), + ('valleys...', 'valles...'), + ('ridges....', 'vrestas....'), + ('haines index..', 'indice de haines....'), + ('smoke dispersal', 'dispersion de humo'), + ('mixing height...', 'altura de mezcla..'), + ('transport wind..', 'viento transportador..'), + ('visibility','visibilidad'), + ('frequent lightning','rayos frequentes'), + ('gusty winds','rachas de viento'), + ('heavy rainfall','lluvia intensa'), + ('damaging winds','vientos perjudiciales'), + ('small hail','granizo pequeno'), + ('large hail','granizo grande'), + ('Sunday night','Domingo por la noche'), + ('Monday night','Lunes por la noche'), + ('Tuesday night','Martes por la noche'), + ('Wednesday night','Miercoles por la noche'), + ('Thursday night','Jueves por la noche'), + ('Friday night','Viernes por la noche'), + ('Saturday night','Sabado por la noche'), + ('Sunday','Domingo'), + ('Monday','Lunes'), + ('Tuesday','Martes'), + ('Wednesday','Miercoles'), + ('Thursday','Jueves'), + ('Friday','Viernes'), + ('Saturday','Sabado'), + ('tonight','esta noche'), + ('today','hoy'), + ('scattered thunderstorms','tormentas dispersas'), + ('isolated thunderstorms','tormentas aisladas'), + ('near the coast','cerca de la costa'), + ('inland','tierra adentro'), + ('winds','vientos'), + ('wind','vientos'), + ('or less','o menos') + ], + "Types" : [ + ('freezing rain', 'lluvia helada', 'FS'), + ('rain showers', 'chubascos', 'MP'), + ('showers', 'chubascos', 'MP'), + ('freezing drizzle', 'llovizna helada', 'FS'), + ('rain', 'lluvia', 'FS'), + ('drizzle', 'llovizna', 'FS'), + ('snow showers', 'chuvascos de nieve', 'FS'), + ('snow', 'nieve', 'FS'), + ('fog', 'nieblas', 'MS'), + ('dust', 'polvo', 'MS'), + ('haze', 'neblina', 'FS'), + ('hail', 'granizo', 'MS'), + ('sleet', 'aguanieve', 'FS'), + ('smoke', 'humo', 'MS'), + ('thunderstorms', 'tormentas', 'MP'), + ('volcanic ash', 'ceniza volcanica', 'FS') + ], + "Intensities" : [ + ('light', 'muy ligero', 'muy ligeros', 'muy ligera', + 'muy ligeras'), + ('light', 'ligero', 'ligero', 'ligero', 'ligero'), + ('moderate', 'moderado', 'moderado', 'moderado', 'moderado'), + ('very heavy', 'muy intenso', 'muy intensos', 'muy intensa', + 'muy intensas'), + ('heavy', 'intenso', 'intensos', 'intensa', 'intensas'), + ('numerous','numeroso','numerosos','numerosa','numerosas') + ], + "Coverages" : [ + ('isolated', 'aislado', 'aislados', 'aislada', 'aisladas'), + ('widely scattered', 'extensamente disperso', + 'extensamente dispersos', 'extensamente dispersa', + 'extensamente dispersas'), + ('scattered', 'disperso', 'dispersos', 'dispersa', 'dispersas'), + ('occasional', 'ocasional', 'ocasionales', 'ocasional', + 'ocasionales'), + ('widespread', 'muy difundido', 'muy difundidos', + 'muy difundida','muy difundidas') + ], + "Exceptions" :[ + ('likely', 'probable', 'probables', 'probable', 'probables') + ], + "CleanUp" :[ ] + } + } + + +# ValueDict : This index and table contain subsitution values for the HTML +# Template pages, FastWx.html and Table.html +Index = { + "english": 1, + "french" : 2, + "spanish": 3 +} + +ValueDictTable = [ + ("Type", "Category", "Categorie", "Categoria"), + ("Public", "Public", "Publiques", "Publico"), + ("FireWeather", "FireWeather", "Previsions Feu", "Incendios-Tiempo"), + ("Aviation", "Aviation", "Aviation", "Aviacion"), + ("Marine", "Marine", "Marine", "Marino"), + ("Language", "Language", "Langue", "Lenguaje"), + ("Audio", "Audio", "Audio", "Audio"), + ("Click", "Click", "Cliquer", "Haga Clic"), + ("ClickText", "on a location OR Select:", + "sur une location ou Choisir", + "en una localidad o Seleccionela"), + ("CityTable", "Table of Cities", "Table de Villes", "Tabla de Ciudades"), + ("CountyTable", "Table of Counties", "Table de Comtes", + "Tabla de Condados "), + ("issued", "Issued", "Emises", "Emitido"), + ("Site","Forecast Location: ","Site de Previsions: ", + "Terreno de Pronostico: "), + ("English", "English", "Anglais", "Ingles"), + ("Spanish", "Spanish", "Espagnol", "Espanol"), + ("French", "French", "Francais", "Franceses") + ] + +# General Expressions: Time, Column headings, Web page +Expression = [ + ("Tonight", "Ce soir", "Esta Noche"), + ('Today', "Aujourd'hui", "Hoy"), + ('Night', "soir", "Noche"), + ('Monday',"Lundi","Lunes"), + ('Tuesday',"Mardi", "Martes"), + ('Wednesday',"Mercredi", "Miercoles"), + ('Thursday',"Jeudi","Jueves"), + ('Friday',"Vendredi","Viernes"), + ('Saturday',"Samedi","Sabado"), + ('Sunday',"Dimanche","Domingo"), + ("Tonight", "Ce soir", "Esta noche"), + ('Today', "Aujourd'Hui", "Hoy"), + ('Night', "Soir", "Noche"), + ('Monday',"Lundi","Lunes"), + ('Tuesday',"Mardi", "Martes"), + ('Wednesday',"Mercredi", "Miercoles"), + ('Thursday',"Jeudi","Jueves"), + ('Friday',"Vendredi","Viernes"), + ('Saturday',"Samedi","Sabado"), + ('Sunday',"Dimanche","Domingo"), + ('Jan', "Jan", "Erno"), + ('Feb', "Fev", "Febrero"), + ('Mar', "Mar", "Marzo"), + ('Apr',"Avr","Abril"), + ('May',"Mai","Mayo"), + ('Jun',"Juin","Junio"), + ('Jul',"Juil","Julio"), + ('Aug',"Aout", "Agosto"), + ('Sep',"Sep","Septiembre"), + ('Oct',"Oct","Octubre"), + ('Nov',"Nov","Noviembre"), + ('Dec',"Dec","Diciembre"), + ('Sky','Ciel','Cielo'), + ('Wind (mph)','Vent (mph)','Viento (mph)'), + ('Max Temp','Temp Max','Temp Max'), + ('Min Temp','Temp Min','Temp Min'), + ('Precip','Precip','Lluvias'), + ('Wind (kt)','Vent (kt)','Viento (kt)'), + ('Waves (ft)','Vagues (pd)','Ondas (ft)'), + ('Swells (ft)','Houles (ft)','Swells (ft)'), + ('LAL','LAL','LAL'), + ('RelHum(%)','HumRel(%)','RelHum(%)'), + ('MaxT','TMax','TMax',), + ('MinT','TMin','TMin'), + ('FreeWind(mph)','VentLibre(mph)','VientoLibre(mph)'), + ('Haines','Haines','Haines'), + ('TransWind(mph)','VentTrans(mph)','VientoTrans(mph)'), + ('MixHgt(ft AGL)','ElevMelang(ft AGL)','AltuMezcl(ft AGL)'), + ('City','Ville', 'Ciudad'), + ('County','Comte', 'Condado'), + ('Nowcast','Previsions Courantes','Pronostico Sobre Tiempo'), + ('Short Term Forecast','Previsions Court Terme',\ + 'Pronostico a Corto Plazo'), + ('Extended Forecast','Previsions Long Terme','Pronostico a Largo Plazo'), + ('Spot Forecast','Previsions Spot','Pronostico Spot'), + ('Outlook','Perspective','Panorama'), + ('Marine Nowcast','Previsions Marines Courantes', + 'Pronostico Sobre-Tiempo Maritimo'), + ('Coastal Marine Forecast','Coastal Marine Forecast', + 'Pronostico Maritimo Costero'), + ('Terminal Aerodrome Forecast',"Previsions a l'Aerodrome", + 'Pronostico Para Terminal Aerodromo'), + ('Latitude','Latitude','Latitud'), + ('Longitude','Longitude','Longitud'), + ('Area','Aire','Area'), + ('Cities','Villes','Ciudades'), + ('Counties','Comtes','Condados'), + ('in the morning','du matin','por la manana'), + ('in the afternoon',"de l'apres-midi", "por la tarde"), + ('in the evening','du soir',"por la tarde"), + ('during the night','pendant la nuit','durante la noche'), + ('followed by', 'suivi par', 'seguido poru'), + ('overnight', 'pendant la nuit', 'durante la noche'), + ] + +class Translator: + def __init__(self, language, parent=None): + + self._language = language + self._langDict = LanguageTables[language] + + # Function translating a forecast + def getForecast(self, forecast): + lwForecast = string.lower(forecast) + + # Convert forecast using translation tuples + transForecast = self._translateExpForecast(lwForecast) + + # Convert the exceptions + exceptForecast = self._translateExceptions(transForecast) + + # Convert forecast using type and intensity tuples + transForecast = self._translateTypeForecast(exceptForecast) + + # Clean up the translated forecast + cleanTransForecast = self._cleanUp(transForecast) + + # Capitalize the beginning of sentences + self.capTransForecast = self._capital(cleanTransForecast) + + return self.capTransForecast + + # Function converting appropriate letters of a string to capital letters + def _capital(self, str): + + if str == "": + return str + + # Find all the periods + index = [] + + for i in range(0, len(str)-1): + if str[i] == "." and str[i+1] == " ": + index.append(i+2) + elif str[i] == "." and str[i+1] == ".": + index.append(i+2) + + # Always capitalize the first letter + capitalStr = string.upper(str[0]) + + # Capitalize the letters following the periods and a space + for i in range(1, len(str)): + if i in index: + capitalStr = capitalStr + string.upper(str[i]) + else: + capitalStr = capitalStr + str[i] + + return capitalStr + + + # Function translating a forecast using the translation expression tuples + def _translateExpForecast(self, lwForecast): + + for expr, transExpr in self._langDict['Expressions']: + #print expr, transExpr + lwForecast = string.replace(lwForecast, expr, transExpr) + + return lwForecast + + # Function translating a forecast using the translation type and + # intensity tuples + def _translateTypeForecast(self, lwForecast): + + # translate combination of type, intensity, and coverage + for ttuple in self._langDict['Types']: + for ituple in self._langDict['Intensities']: + for ctuple in self._langDict['Coverages']: + origEx = ctuple[0] + ' ' + ituple[0] + ' ' + ttuple[0] + transEx = '' + if ttuple[2] == 'MS': + transEx = ttuple[1] + ' ' + ituple[1] + ' ' + ctuple[1] + elif ttuple[2] == 'MP': + transEx = ttuple[1] + ' ' + ituple[2] + ' ' + ctuple[2] + elif ttuple[2] == 'FS': + transEx = ttuple[1] + ' ' + ituple[3] + ' ' + ctuple[3] + elif ttuple[2] == 'FP': + transEx = ttuple[1] + ' ' + ituple[4] + ' ' + ctuple[4] + lwForecast = string.replace(lwForecast, origEx, transEx) + + # translate combination of type and intensity (no coverage) + for ttuple in self._langDict['Types']: + for ituple in self._langDict['Intensities']: + origEx = ituple[0] + ' ' + ttuple[0] + transEx = '' + if ttuple[2] == 'MS': + transEx = ttuple[1] + ' ' + ituple[1] + elif ttuple[2] == 'MP': + transEx = ttuple[1] + ' ' + ituple[2] + elif ttuple[2] == 'FS': + transEx = ttuple[1] + ' ' + ituple[3] + elif ttuple[2] == 'FP': + transEx = ttuple[1] + ' ' + ituple[4] + lwForecast = string.replace(lwForecast, origEx, transEx) + + # translate combination of type and coverage (no intensity) + for ttuple in self._langDict['Types']: + for ctuple in self._langDict['Coverages']: + origEx = ctuple[0] + ' ' + ttuple[0] + transEx = '' + if ttuple[2] == 'MS': + transEx = ttuple[1] + ' ' + ctuple[1] + elif ttuple[2] == 'MP': + transEx = ttuple[1] + ' ' + ctuple[2] + elif ttuple[2] == 'FS': + transEx = ttuple[1] + ' ' + ctuple[3] + elif ttuple[2] == 'FP': + transEx = ttuple[1] + ' ' + ctuple[4] + lwForecast = string.replace(lwForecast, origEx, transEx) + + # translate type (no coverage and no intensity) + for ttuple in self._langDict['Types']: + lwForecast = string.replace(lwForecast, ttuple[0], ttuple[1]) + + return lwForecast + + # Convert the exceptions + def _translateExceptions(self, transForecast): + for ttuple in self._langDict['Types']: + for etuple in self._langDict['Exceptions']: + origEx = ttuple[0] + ' ' + etuple[0] + transEx = '' + if ttuple[2] == 'MS': + transEx = ttuple[0] + ' ' + etuple[1] + elif ttuple[2] == 'MP': + transEx = ttuple[0] + ' ' + etuple[2] + elif ttuple[2] == 'FS': + transEx = ttuple[0] + ' ' + etuple[3] + elif ttuple[2] == 'FP': + transEx = ttuple[0] + ' ' + etuple[4] + transForecast = string.replace(transForecast, origEx, transEx) + return transForecast + + # Function cleaning up the translated forecast + def _cleanUp(self, lwForecast): + + for expr, transExpr in self._langDict['CleanUp']: + lwForecast = string.replace(lwForecast, expr, transExpr) + + return lwForecast + + def getExpression(self, phrase): + "Translate the phrase" + + if self._language == "english": + return phrase + index = Index[self._language] - 1 + for expr in Expression: + phrase = string.replace(phrase, expr[0], expr[index]) + return phrase + + +if __name__ == '__main__': + + forecastList = [ + "High winds in the afternoon. Partly cloudy. Very heavy rain showers likely. Snow accumulation of 1 inch. Lows in the mid 30s. East winds at 75 mph. Probability of precipitation 65 percent.", + "Mostly sunny. Widespread heavy volcanic ash. Snow accumulation of 1 to 20 inches. Highs around 120. Probability of precipitation 99 percent.", + "High winds. Partly cloudy. Slight chance of very heavy rain showers. Snow accumulation of 1 inch. Lows in the mid 30s. East winds at 75 mph. Probability of precipitation 1 percent. Southwest winds up to 15 mph.", + "Sky/weather...Mostly cloudy with scattered rain showers and thunderstorms\nLAL...........3-4\nTemperature...Lows in the mid 30s\nHumidity......70 pct\nWind - 20 ft..Northwest to Southeast in the evening\n Valleys...\n Ridges....\nHaines Index..4 low\nSmoke dispersal:\n Mixing height...Decreasing to 500-1,000 ft AGL\n Transport wind..Northeast to southeast 3-8 mph", + "High winds. Decreasing cloudiness. Widely scattered light sleet. Snow accumulation of 1 to 50 inches. Low 0. Northwest winds at 90 to 100 mph becoming southwest at 80 to 90 mph. Probability of precipitation 110 percent." ] + + for forecast in forecastList: + #transForecast = Translator('french') + transForecast = Translator('spanish') + print(' ') + print('Original Forecast') + print(forecast) + print(' ') + print('Translated Forecast') + print(transForecast.getForecast(forecast)) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TropicalHazards.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TropicalHazards.py index 09ff4424f3..0cb3ee660e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TropicalHazards.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/TropicalHazards.py @@ -1,1291 +1,1289 @@ - -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# TropicalHazards -# -# Author: Matthew H. Belk WFO BOX -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- -------------------------------------------- -# Jun 22, 2013 mbelk Initial creation -# Jul 14, 2016 mbelk Changes for 2017 season -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import time, string, os, re, errno - -import Header -import JsonSupport, LocalizationSupport -import LogStream -import ModuleAccessor -import pprint as pp - - -class TropicalHazards(Header.Header): - - def __init__(self): - - Header.Header.__init__(self) - - #----------------------------------------------------------------------- - # Make a link to the TropicalAreaDictionary - - # Access the information for the breakpoint area(s) if available - self._tropicalAreaDict = \ - ModuleAccessor.ModuleAccessor().variable("AreaDictionary", - "AreaDictionary") - - # Then make a cache of sorted hash keys to this dictionary - self._tropicalAreaDictKeys = self._tropicalAreaDict.keys() - self._tropicalAreaDictKeys.sort(self._sortBreakpoints) - - -################################################################################ -# New common utility methods for tropical hazard formatters -################################################################################ - - def allowedHazards(self): - """TropicalHazards version of GenericHazards.allowedHazards. - - This method defines the allowed hazards for tropical hazard products. - """ - tropicalActions = ["NEW", "EXA", "CAN", "CON"] - return [ - ('HU.W',tropicalActions,'Hurricane'), - ('HU.A',tropicalActions,'Hurricane'), - ('SS.W',tropicalActions,'Surge'), - ('SS.A',tropicalActions,'Surge'), - ('TR.W',tropicalActions,'Tropical'), - ('TR.A',tropicalActions,'Tropical'), - ] - - -################################################################################ -# New common utility methods for tropical hazard formatters -################################################################################ - - #=========================================================================== - # Define a method to determine which WFOs are impacted based on a list of - # affected edit area ID's - - def _getAffectedWFOs(self, idList): - """TropicalHazards addition of _getAffectedWFOs. - - This method will produce a list of NWS WFOs impacted by tropical - hazards with the specified identifiers. - - Arguments: - idList -> list TropicalAreaDictionary keys - """ - - # Look at each breakpoint segment in this list - for id in idList: - - # Get the AreaDictionary entry for this segment - if self._tropicalAreaDict.has_key(id): - entry = self._tropicalAreaDict[id] - else: - LogStream.logProblem(\ - "AreaDictionary missing definition for [" + id + "].") - continue - - # Get the WFO responsible for this segment - always use the WFO of the - # starting point to avoid including WFOs unnecessarily - if 'wfo' in entry: - wfo = entry['wfo'].strip() - else: - LogStream.logProblem(\ - "AreaDictionary missing WFO definition for [" + id + "].") - continue - - # If we have a valid WFO identifier, and it is not already noted in the - # impacted WFO list - if len(wfo) > 0 and wfo not in self._wfoList: - - # Add this WFO to the list of impacted WFOs - self._wfoList.append(wfo) - - - #=========================================================================== - # Define a method to format a breakpoint text line within a TCV segment - - def _formatTCVline(self, entry, type="start"): - """TropicalHazards addition of _formatTCVline. - - This method will produce a list of NWS WFOs impacted by tropical - hazards with the specified identifiers. - - Arguments: - entry -> TropicalAreaDictionary entry for an edit area - type -> type of breakpoint to produce (optional, defaults to start) - """ - - # If this is not an ending point - if type != "end": - - # Get the information we need - point = entry["startBreakpoint"].strip() - lat = entry["startLat"].strip() - lon = entry["startLon"].strip() - state = entry["startState"].strip() - - # Otherwise - get the end point info for this segment - else: - - # Get the information we need - point = entry["endBreakpoint"].strip() - lat = entry["endLat"].strip() - lon = entry["endLon"].strip() - state = entry["endState"].strip() - - - # Clean up the state so there are no spaces or dashes - state = re.sub("[ _-]+", "", state) - - #----------------------------------------------------------------------- - # If this is not the border of a state or country - - if re.search("(?i)border", point) is None: - - # Add the state/country - point += "-" + state - - #----------------------------------------------------------------------- - # Append the appropriate hemisphere of the latitude - - if lat.find("-") != -1: - lat += "S" - lat = lat.replace("-", "") - else: - lat += "N" - - #----------------------------------------------------------------------- - # Append the appropriate hemisphere of the longitude - - if lon.find("-") != -1: - lon += "W" - lon = lon.replace("-", "") - else: - lon += "E" - - #----------------------------------------------------------------------- - # Now construct the final formatted line - - text = "%-36s%6s%7s\n" % (re.sub("[ _]+", "-", point) + " ", lat, lon) - - # Return the text - return text - - - #=========================================================================== - # Define a method to sort breakpoint record keys - - def _sortBreakpoints(self, a, b): - """TropicalHazards addition of _sortBreakpoints. - - This method will produce a sorted list of breakpoint segments. This - ensures the order of contiguous breakpoint segments - """ - - # Make a list of valid string identifier parts - validTypes = [ - "LN", # mainland segments - any country - "KEY", # Florida Keys - "ISL", # islands - "CUBA", # Cuba - "HISP", # Hispaniola - "NAI", # North Atlantic islands - "WTDE", # Deleware Bay - "WTTP", # Tidal Potomac - "WTCP", # Chesapeake Bay - "WTPT", # Generic water points - - # Zones used by VTEC - "GYC", # Guyana - "VEC", # Venezuela - "COC", # Colombia - "PAC", # Panama - "CRC", # Costa Rica - "NIC", # Nicaragua - "HNC", # Honduras - "GTC", # Guatemala - "BZC", # Belize - "MXC", # Mexico - "USC", # United States - "CNC", # Canada - "KEC", # Dry Tortugas - "AWC", # Aruba - "CWC", # Curacao - "TTC", # Trinidad and Tobago - "BBC", # Barbados - "LCC", # St. Lucia - "MQC", # France - Caribbean - "AGC", # Antigua and Barbuda - "BSC", # Bahamas - "BMC", # Bermuda - "JMC", # Jamaica - "KYC", # Cayman Islands - "CUC", # Cuba - "DOC", # Dominican Republic - "HTC", # Haiti - "PMC", # France - North Atlantic - "LOC", # Lake_Okeechobee - "FBC", # Florida Bay - "PSC", # Pamlico Sound - "ASC", # Albemarle Sound - "TXZ", # Texas - "LAZ", # Louisiana - "MSZ", # Mississippi - "ALZ", # Alabama - "FLZ", # Florida - "GAZ", # Georgia - "SCZ", # South Carolina - "NCZ", # North Carolina - "VAZ", # Virginia - "MDZ", # Maryland - "DCZ", # District of Columbia - "DEZ", # Deleware - "NJZ", # New Jersey - "NYZ", # New York - "CTZ", # Connecticut - "RIZ", # Rhode Island - "MAZ", # Massachusetts - "NHZ", # New Hampshire - "MEZ", # Maine - "NMZ", # New Mexico - "ARZ", # Arkansas - "OKZ", # Oklahoma - "MOZ", # Missouri - "TNZ", # Tennessee - "WVZ", # West Virginia - "PAZ", # Pennsylvania - "VTZ", # Vermont - "PRZ", # Puerto Rico - "VIZ", # U.S. Virgin Islands - "RE", # General edit area collection - ] - - # Get the first part of each identifier - aSeg = a.split("_")[0] - bSeg = b.split("_")[0] - - # Get ready to split these identifiers into alpha and numeric parts - aSegType = "" - bSegType = "" - aSegNum = "" - bSegNum = "" - - # Start with the alpha components - for c in aSeg: - if c in string.letters: - aSegType = aSegType + c - - for c in bSeg: - if c in string.letters: - bSegType = bSegType + c - - # Now get the numeric components - for c in aSeg: - if c in string.digits: - aSegNum = aSegNum + c - - for c in bSeg: - if c in string.digits: - bSegNum = bSegNum + c - - # Determine the order of these areas based on segment type first - aTypeIndex = validTypes.index(aSegType) - try: - bTypeIndex = validTypes.index(bSegType) - except: - bTypeIndex = aTypeIndex - bSegNum = aSegNum - - # Try to determine order based on segment type - if aTypeIndex < bTypeIndex: - return -1 - elif bTypeIndex < aTypeIndex: - return 1 - - # If the segment types are the same, use the numeric component last - if int(aSegNum) < int(bSegNum): - return -1 - elif int(bSegNum) < int(aSegNum): - return 1 - else: -## print "ERROR!!!!!!! Segment names are equal!!!!!!!" - return 0 - - - #=========================================================================== - # Define a method to organize breakpoint segment zones by government - - def _organizeZonesByGovt(self): - """TropicalHazards addition of _organizeZonesByGovt. - - This method will produce a dictionary of sorted lists where each list - contains all the zone identifiers for which that government is the - responsible entity. - """ - - # Create a dictionary to hold all zones managed by a government - zoneDict = {} - - # Also make a list to hold governments in the order we find them - govtList = [] - - # Initialize some lists for handling zone codes - ugcList = [] - ugcListUsa = [] - - #----------------------------------------------------------------------- - # Look at each edit area we have - - for key in self._tropicalAreaDictKeys: - - # Get the type of this zone - zoneId = key.split("_")[0] - - zoneType = "" - - # Get all the letters from this zone ID - for c in zoneId: - if not c.isdigit(): - zoneType += c - - # If this is primary representation of segment zones - # (this is to avoid duplication with the individual zones) - # Make an exception for US zones though - if zoneType in ["LN", "KEY", "CUBA", "HISP", "ISL", "NAI", "WTDE", - "WTTP", "WTCP", "WTPT", "USC"]: - - # Get the government responsible for this zone - zoneGovt = self._tropicalAreaDict[key]["hazardIssuer"].strip() - - # Handle the case of the USA, which receives no attribution - if zoneGovt.strip() == "": - zoneGovt = "US" - - # If we do not already know about this government - if zoneGovt not in govtList: - - # Add it now - govtList.append(zoneGovt) - - #--------------------------------------------------------------- - # Get the impacted zones for this segment - - ugcCode = self._tropicalAreaDict[key]["ugcCode"].strip() - - #--------------------------------------------------------------- - # Expand UGC code if there is more than one zone represented - - if len(ugcCode.split("-")) > 1 or ugcCode.find(">") != -1: - ugcList = self.expandComplexUgc(ugcCode) - - # Otherwise, just use this single zone - else: - ugcList = [ugcCode] - - #--------------------------------------------------------------- - # Expand UGC code if there is more than one zone represented - - try: - ugcCodeUsa = self._tropicalAreaDict[key]["ugcCodeUsa"].strip() - - if len(ugcCodeUsa.split("-")) > 1 or ugcCodeUsa.find(">") != -1: - ugcListUsa = self.expandComplexUgc(ugcCodeUsa) - - # Otherwise, just use this single zone - else: - ugcListUsa = [ugcCodeUsa] - - except: - ugcListUsa = [] - - #--------------------------------------------------------------- - # If we already have an entry for this government - - if zoneDict.has_key(zoneGovt): - - # Get the zones already associated - curZoneList = zoneDict[zoneGovt] - - # Otherwise make a new list for this governement - else: - curZoneList = [] - - #--------------------------------------------------------------- - # Now add all the new zones - - for ugc in ugcList + ugcListUsa: - - # If we don't already have this ugc - if ugc not in curZoneList and len(ugc.strip()) > 0: - - # Add it - curZoneList.append(ugc) - -## print curZoneList - - # Sort the UGC list - curZoneList.sort() - - # Store the list of zones for this government - zoneDict[zoneGovt] = curZoneList - - # Always ensure the USA comes first - if "US" in govtList: - govtList.remove("US") - finalGovtList = ["US"] + govtList - - # Return the completed dictionary - return zoneDict, finalGovtList - - - #=========================================================================== - # Define a method to filter a segment list by government - - def _filterAreaListByGovernment(self, govtList, areaList): - """TropicalHazards addition of _filterAreaListByGovernment. - - This method will produce a list of all zones managed by a particular - government contained within the specified area list. - - Arguments: - govtList -> list of identifiers managed by a government - areaList -> list of edit area identifiers to process - """ - - # Initialize a new list - newList = [] - - # Look through each edit area - for area in areaList: - - # If this edit area is managed by this government - if area in govtList: - - # Add it to the filtered list - newList.append(area) - - # Return the filtered list - return newList - - - #=========================================================================== - # Define a method to filter a segment list by government - - def _organizeAreasByType(self, areaList): - """TropicalHazards addition of _organizeAreasByType. - - This method will separate a list of areas into one of four types: - mainland segments, UGC zones, zones and islands. These will be stored - in the processed hazard dictionary for easier access later. - - Arguments: - areaList -> list of edit area identifiers to process - """ - - # Initialize both lists - segmentList = [] - ugcZoneList = [] - zoneList = [] - islandList = [] - waterList = [] - - # Look through each edit area - for area in areaList: - - # Assume this is a "land" area - areaType = "land" - - # If the TropicalAreaDictionary has a record for this area - if self._tropicalAreaDict.has_key(area): - - # Get the type of this area - if we can - if self._tropicalAreaDict[area].has_key("segmentType"): - areaType = self._tropicalAreaDict[area]["segmentType"] - - # Get the type of this area - try: - usaZoneList = self.expandComplexUgc( - self._tropicalAreaDict[area]["ugcCodeUsa"]) - except: - usaZoneList = [] - - - #--------------------------------------------------------------- - # If this is an island - - if areaType == "island": - - # If we do not already have a record for this area - if area not in islandList: - - # Add it to the list of islands - islandList.append(area) - - #--------------------------------------------------------------- - # Otherwise, if this is a water area - - elif areaType == "water": - - # If we do not already have a record for this area - if area not in waterList: - - # Add it to the list of islands - waterList.append(area) - - #--------------------------------------------------------------- - # Otherwise, organize the land-based areas - - else: - - # If this is a zone-based identifier - if len(area) == 6 and area[2] in ["Z", "C"]: - - # Place this zone into the proper list - if area[2] == "Z": - - # If this area has not already been recorded - if area not in ugcZoneList: - - # Add it to the UGC zone list - ugcZoneList.append(area) - else: - - # If this area has not already been recorded - if area not in zoneList: - - # Add it to the zone list - zoneList.append(area) - - # If there any zones associated with this segment - if len(usaZoneList) > 0: - - for usZone in usaZoneList: - - if usZone not in ugcZoneList: - - # Add it to the UGC zone list - ugcZoneList.append(usZone) - - # Otherwise, this is a breakpoint segment - elif area not in segmentList: - segmentList.append(area) - - # Get any UGC codes associated with this segment - areaUgc = self._tropicalAreaDict[area]["ugcCode"] - - # If there is more than 1 zone associated with segment - if len(areaUgc) > 7: - - # Expand the UGC codes - ugcList = self.expandComplexUgc(areaUgc) - - # Otherwise, make a simpler list so we can proceed - else: - ugcList = [areaUgc] - - #------------------------------------------------------- - # Add each zone code into the list as needed - - for ugcCode in ugcList: - - # Clean up any extra characters - ugcCode = ugcCode.replace("-", "") - - # If this is a zone-based identifier - if len(ugcCode) >= 6 and ugcCode[2] in ["Z", "C"]: - - # Place this zone into the proper list - if ugcCode[2] == "Z": - - # If this area has not already been recorded - if ugcCode not in ugcZoneList: - - # Add it to the UGC zone list - ugcZoneList.append(ugcCode) - else: - - # If this area has not already been recorded - if ugcCode not in zoneList: - - # Add it to the zone list - zoneList.append(ugcCode) - - - #----------------------------------------------------------------------- - # Sort all lists to keep them ordered - as needed - - if len(segmentList) > 1: - segmentList.sort(self._sortBreakpoints) - - if len(ugcZoneList) > 1: - ugcZoneList.sort(self._sortBreakpoints) - - if len(zoneList) > 1: - zoneList.sort(self._sortBreakpoints) - - if len(islandList) > 1: - islandList.sort(self._sortBreakpoints) - - if len(waterList) > 1: - waterList.sort(self._sortBreakpoints) - - # Return the compiled lists - return (segmentList, ugcZoneList, zoneList, islandList, waterList) - - - #=========================================================================== - # Define a method to construct a processed hazard dictionary - - def _constructHazardDict(self, hazardPhenSig, filterEtn): - """TropicalHazards addition of _constructHazardDict. - - This method will produce a processed dictionary of tropical hazards - for easier use later on. - - Arguments: - hazardPhenSig -> dictionary of hazards keyed by phenomenon and - significance. Values are a list of all hazards - which share that same phenomenon and significance. - filterEtn -> Event Tracking Number of interest which will be used - to filter hazards for a particular product. - """ - - #----------------------------------------------------------------------- - # Get ready to populate the hazard dictionary for this storm - - hazardAreaDict = {} - hazardAreaDictKeyList = [] - - # Assume this is going to be the last product we issue for this storm - self._allCAN = True - - #======================================================================= - # Look for each of the tropical hazards in order - - for phenSig in [("SS","W"), ("HU","W"), ("SS","A"), ("HU","A"), - ("TR","W"), ("TR","A")]: - - if hazardPhenSig.has_key(phenSig): - print "="*90 - print "\n\tConstructing -> %s" % (repr(phenSig)) - print len(hazardPhenSig[phenSig]), hazardPhenSig[phenSig] - - # Look through all the sampled hazards - for phen in hazardPhenSig[phenSig]: - print "-"*90 - print "phen = %s" % (phen) - - # Set aside the headline for each action in this area - NEW = [] - CAN = [] - UPG = [] - EXA = [] - CON = [] - - #----------------------------------------------------------- - # If we have items for this particular phen.sig - # combination, and this is the storm we are after - - if phen["etn"] != filterEtn: - - print "\tWrong storm!", phen - - # Move on to the next one - continue - - - # Get the full VTEC code for this phenomena -# curHazardKey = (phen["act"], phen["key"]) - curHazardKey = (phen["act"], phen["phensig"]) - print "+++++ %s" % (repr(curHazardKey)) - - # If this action is anything other than "CAN", indicate it - # so we don't delete the JSON file for this storm at end - if phen["act"] != "CAN": - self._allCAN = False - - # If we do not have the ETN of this hazard - if re.search(":\d{4}$", curHazardKey[1]) is None: - - newHazardType = curHazardKey[1] + ":%d" % \ - (phen["etn"]) - else: - newHazardType = "" - - # If we need to adjust the hazard key - if len(newHazardType) > 0: - - # Make the changes - newCurHazardKey = (curHazardKey[0], newHazardType) - curHazardKey = newCurHazardKey - - # See if there are upgrades or replacements for this area - areaHazardList = self._hazards.getHazardList(phen["id"]) - - #----------------------------------------------------------- - # Construct a hazard key which incorporates all hazards - # and actions for this area - - tempHazardList = [curHazardKey] - for areaHazard in areaHazardList: - - #------------------------------------------------------- - # Record headline for each action we find - - if areaHazard["act"] == "NEW" and \ - areaHazard["hdln"] not in NEW: - NEW.append(areaHazard["hdln"]) - - elif areaHazard["act"] == "CAN" and \ - areaHazard["hdln"] not in CAN: - CAN.append(areaHazard["hdln"]) - - elif areaHazard["act"] == "UPG" and \ - areaHazard["hdln"] not in UPG: - UPG.append(areaHazard["hdln"]) - - elif areaHazard["act"] == "CON" and \ - areaHazard["hdln"] not in CON: - CON.append(areaHazard["hdln"]) - - elif areaHazard["act"] == "EXA" and \ - areaHazard["hdln"] not in EXA: - EXA.append(areaHazard["hdln"]) - - #------------------------------------------------------- - # Make a key for this particular hazard/action combo - - tempHazardKey = (areaHazard["act"], areaHazard["phensig"]) - - # If we do not have the ETN of this hazard - if re.search(":\d{4}$", tempHazardKey[1]) is None: - - newHazardType = tempHazardKey[1] + ":%d" % \ - (areaHazard["etn"]) - else: - newHazardType = "" - - # If we need to adjust the hazard key - if len(newHazardType) > 0: - - # Make the changes - newTempHazardKey = (tempHazardKey[0], newHazardType) - tempHazardKey = newTempHazardKey - - # If this is not already part of the hazard key - if tempHazardKey != curHazardKey: - - # Add this hazard/action combo to the list - tempHazardList.append(tempHazardKey) - - # Sort the keys so we have some consistency in ordering - tempHazardList.sort() - - # Convert the list of hazards for this area into a tuple - # so we can use it as a dictionary key - hazardKey = tuple(tempHazardList) - - #----------------------------------------------------------- - # Ensure we only group areas associated with same storm - # and hazard/action combos - - # If we already have an entry for this storm and hazard - if hazardAreaDict.has_key(hazardKey): - - # Add to what is already there - tempList = hazardAreaDict[hazardKey]["AREAS"] - tempList.append(phen["id"]) - hazardAreaDict[hazardKey]["AREAS"] = tempList - - # Otherwise, make a new entry - else: -## "AREAS":[phen["id"]], "HDLN":phen["hdln"], - hazardAreaDict[hazardKey] = { - "AREAS":[phen["id"]], "NEW":NEW, "CAN":CAN, - "CON":CON, "EXA":EXA, "UPG":UPG - } - - # Add this key the list - if hazardKey not in hazardAreaDictKeyList: - hazardAreaDictKeyList.append(hazardKey) - - print "\n\n", "+"*100 - print "in the middle" - print pp.pformat(hazardAreaDictKeyList), "\n" - print pp.pformat(hazardAreaDict) - - #----------------------------------------------------------------------- - # Keep track of segments and zones. We will need segments to - # ensure proper grouping and breakpoints. We will need UGC zones - # to get the proper VTEC action - - segments = [] - ugcZones = [] - zones = [] - islands = [] - water = [] - - #======================================================================= - # Organize all the impacted areas by type - - for key in hazardAreaDictKeyList: - - #------------------------------------------------------------------- - # Organize various areas associated with this hazard by type - - (segments, ugcZones, zones, islands, water) = \ - self._organizeAreasByType(hazardAreaDict[key]["AREAS"]) - - # Add these organized zones to the dictionary for this hazard - hazardAreaDict[key]["SEGMENTS"] = segments - hazardAreaDict[key]["UGCZONES"] = ugcZones - hazardAreaDict[key]["ZONES"] = zones - hazardAreaDict[key]["ISLANDS"] = islands - hazardAreaDict[key]["WATER"] = water - - print "+"*90 + "\nFinally!" - print "found -> %s" % (repr(hazardAreaDictKeyList)) - print "allCAN = %s" % (self._allCAN) - for key in hazardAreaDictKeyList: - print "-"*60 - print "%s\n\t%s" % (key, pp.pformat(hazardAreaDict[key])) - - # Return the completed hazard dictionary and sorted keys - return (hazardAreaDict, hazardAreaDictKeyList) - - - #=========================================================================== - # Process the shortcut hazard dictionary and organize a list of segments - # which are organized by similar type. - - def _constructSegmentList(self, hazardAreaDict, hazardAreaDictKeyList, - UStcv=0): - """TropicalHazards addition of _constructSegmentList. - - This method will produce create all the appropriate segments which - should go into a tropical hazard product, particularly a TCV. - - Arguments: - hazardAreaDict -> processed dictionary of hazards - hazardAreaDictKeyList -> list of keys of active hazards to process. - UStcv -> (optional) toggle to group islands within a single segment - {1 = Yes (default - USA TCV) / 0 = No} - """ - - # Initialize a list to hold the final group of segments - finalSegmentList = [] - - segmentDict = {"CUC": [], "HTC":[], "DOC":[], "USC":[]} - - #----------------------------------------------------------------------- - # Look at every hazard we found - - for hazardKey in hazardAreaDictKeyList: - - segmentList = [] - zoneList = [] - - #------------------------------------------------------------------- - # Double check to be sure Cuba and Hispaniola are not part of - # the mainland segment lists - - for curId in hazardAreaDict[hazardKey]["SEGMENTS"]: - - # If this is a Cuba or Hispaniola code - if curId[:3] in segmentDict.keys(): - -## print "Found segment -> curId =", curId - # Add this identifier to the dictionary - tempList = segmentDict[curId[:3]] - - # If this identifier is not already in the list - if curId not in tempList: - tempList.append(curId) - - # Store the updated list - segmentDict[curId[:3]] = tempList - - # Otherwise, - elif curId not in segmentList: - - segmentList.append(curId) - - #------------------------------------------------------------------- - # Double check to be sure Cuba and Hispaniola are not part of - # the mainland zone lists - - for curId in hazardAreaDict[hazardKey]["ZONES"]: - - # If this is a Cuba or Hispaniola code - if curId[:3] in segmentDict.keys(): - -## print "Found zone -> curId =", curId - # Add this identifier to the dictionary - tempList = segmentDict[curId[:3]] - - # If this identifier is not already in the list - if curId not in tempList: - tempList.append(curId) - - # Store the updated list - segmentDict[curId[:3]] = tempList - - # Otherwise, - elif curId not in segmentList: - - zoneList.append(curId) - - #------------------------------------------------------------------- - # Make a separate segment for Cuba - - if len(segmentDict["CUC"]) > 0: - - sortedZones = segmentDict["CUC"] - sortedZones.sort() - - record = ([], # Segments - [], # UGC zones - sortedZones, # Zones - [], # Islands - []) # Water - - # If we do not already have a record for these particular zones - if record not in finalSegmentList: - - print "\n\nCuba record", record - - # Add it now - finalSegmentList.append(record) - - #------------------------------------------------------------------- - # Make a separate segment for Hispaniola - - if len(segmentDict["HTC"]) > 0 or len(segmentDict["DOC"]) > 0: - - # Get a compined list of zones - combinedZones = segmentDict["HTC"] + segmentDict["DOC"] - - # Sort these zones - combinedZones.sort(self._sortBreakpoints) - - record = ([], # Segments - [], # UGC zones - combinedZones, # Zones - [], # Islands - []) # Water - - # If we do not already have a record for these particular zones - if record not in finalSegmentList: - - print "\n\nHispaniola record", record - - # Add it now - finalSegmentList.append(record) - -## #------------------------------------------------------------------- -## # Make a separate segment for the United States -## -## if len(segmentDict["USC"]) > 0: -## -## sortedZones = segmentDict["USC"] -## sortedZones.sort() -## -## record = ([], # Segments -## [], # UGC zones -## sortedZones, # Zones -## [], # Islands -## []) # Water -## -## # If we do not already have a record for these particular zones -## if record not in finalSegmentList: -## -## print "\n\nCuba record", record -## -## # Add it now -## finalSegmentList.append(record) - - #------------------------------------------------------------------- - # If we have filtered segments or zones - - if len(segmentList) != 0 and \ - len(segmentList) != len(hazardAreaDict[hazardKey]["SEGMENTS"]): - - # Update the record for this segment - curSegments = segmentList - - # Otherwise, keep segments we already have - else: - curSegments = hazardAreaDict[hazardKey]["SEGMENTS"] - - if len(zoneList) != 0 and \ - len(zoneList) != len(hazardAreaDict[hazardKey]["ZONES"]): - - # Update the record for this segment - curZones = zoneList - - # Otherwise, keep segments we already have - else: - curZones = hazardAreaDict[hazardKey]["ZONES"] - - - #------------------------------------------------------------------- - # Create a new segment for each island which was found - if we - # need to - - islandList = [] - - # USA TCV - if UStcv: - - # Make a new record for this island - record = ([], # Segments - [hazardAreaDict[hazardKey]["ISLANDS"]], # UGC zones - [], # Zones - [hazardAreaDict[hazardKey]["ISLANDS"]], # Islands - []) # Water - - # If we do not already have a record for this island - if record not in finalSegmentList: - - print "\n\nUSA Island record -> %s" % (record) - - # Add it now - finalSegmentList.append(record) - - # International TCV - else: - - for island in hazardAreaDict[hazardKey]["ISLANDS"]: - - # Make a new record for this island - record = ([], # Segments - [], # UGC zones - [island], # Zones - [island], # Islands - []) # Water - - # If we do not already have a record for this island - if record not in finalSegmentList: - -# print "\n\nIntl Island record -> %s" % (record) - - # Add it now - finalSegmentList.append(record) - - #=================================================================== - # Make a record to group all the various areas associated with a - # particular hazard - - record = (curSegments, - hazardAreaDict[hazardKey]["UGCZONES"], - curZones, - hazardAreaDict[hazardKey]["ISLANDS"], - hazardAreaDict[hazardKey]["WATER"]) - - # If we do not already have a record for these particular zones - if record not in finalSegmentList: - - # Add it now - finalSegmentList.append(record) - - print "\n\n", "*"*90 - print "finalSegmentList =", pp.pformat(finalSegmentList) - - # Return the final organized list - return finalSegmentList - - - #=========================================================================== - # Define a method to find missing zone codes for breakpoint segments, - # islands and water. - - def _findZoneCodes(self, areaList): - - # Get ready to find the zone codes - ugcZones = [] - zones = [] - - # Look at each area in the list - for areaId in areaList: - - # If there is an entry in the TropicalAreaDictionary for this area - if self._tropicalAreaDict.has_key(areaId): - - # Get the 'ugcCode' for this area - ugcCode = self._tropicalAreaDict[areaId]["ugcCode"] - - # If this is a generic zone code - if len(ugcCode) > 3 and ugcCode[2] == "C": - - # If we do not already have it in the zone list - if ugcCode not in zones: - - # Add it now - zones.append(ugcCode) - - # Otherwise, this must be a UGC code - else: - - # See if we need to expand it - if len(ugcCode) > 7: - - # Expand the UGC code into individual zones - expandUgcList = self.expandComplexUgc(ugcCode) - - # Add each UGC code to the list - if not already there - for ugc in expandUgcList: - - if ugc not in ugcZones: - ugcZones.append(ugc) - - # Otherwise, just add this UGC zone if it is not already there - elif ugcCode not in ugcZones: - ugcZones.append(ugc) - - # Sort these lists as needed - if len(ugcZones) > 1: - ugcZones.sort(self._sortBreakpoints) - - if len(zones) > 1: - zones.sort(self._sortBreakpoints) - - if len(ugcZones) == 0: - return "There are no areas in this TCV" - - # Return the zones we found - return (ugcZones, zones) - - -#=============================================================================== -# Code to process StormInfo files - - - # same as HLSTCV_Common - def _synchronizeAdvisories(self): - # Retrieving a directory causes synching to occur. - # This code can throw an exception but don't catch it - # so that forecasters can be made aware of the issue. - file = LocalizationSupport.getLocalizationFile( - LocalizationSupport.CAVE_STATIC, - LocalizationSupport.SITE, self._site, - self._getAdvisoryPath()).getFile() - - return file - - # same as HLSTCV_Common - def _getLocalAdvisoryDirectoryPath(self): - file = self._synchronizeAdvisories() - path = file.getPath() - print "\n\nLooking for JSON files in '%s'" % (path) - - try: - os.makedirs(path) - except OSError as exception: - if exception.errno != errno.EEXIST: - raise - - return path - - def _getStormAdvisoryNames(self): - advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath() - filenames = os.listdir(advisoryDirectoryPath) - allAdvisories = filter(lambda filename: filename[-5:] == ".json", - filenames) - - stormAdvisories = filter(lambda filename: filename[:2] == "AT", - allAdvisories) - - return stormAdvisories - - def _loadAdvisory(self, advisoryName): - self._synchronizeAdvisories() - fileName = self._getAdvisoryFilename(advisoryName) - - try: - pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC, - self._site, fileName) - - statFileName = os.path.join(os.environ["HOME"], "caveData", "etc", - "site", self._site, fileName) - lastModified = os.stat(statFileName).st_mtime - pythonDict["lastModified"] = lastModified - - print "File contents for %s:" % (fileName) - print pp.pformat(pythonDict) - - return pythonDict - - except Exception, e: - print "Load Exception for %s : %s" % (fileName, e) - return None - - def _saveAdvisory(self, advisoryName, advisoryDict): - self._synchronizeAdvisories() - fileName = self._getAdvisoryFilename(advisoryName) - - print "Saving %s to %s" % (advisoryName, fileName) - print "advisoryDict: %s" % (pp.pformat(advisoryDict)) - - try: - JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC, - self._site, fileName, advisoryDict) -# os.system('chmod 664 %s' % (fileName)) - except Exception as e: - print "Save Exception for %s : %s" % (fileName, e) - else: # No exceptions occurred - print "Wrote file contents for: %s" % (fileName) - - # Purposely allow this to throw - self._synchronizeAdvisories() - - def _deleteAdvisory(self): - - # Sync the CAVE localization store - self._synchronizeAdvisories() - fileName = self._getAdvisoryFilename(self._advisoryFileName) - - print "\n\nDeleting -> " + fileName - - try: - LocalizationSupport.deleteFile(LocalizationSupport.CAVE_STATIC, - LocalizationSupport.SITE, self._site, - fileName) - - except Exception, e: - print "Delete Exception for %s : %s" % (fileName, e) - return None - - - # same as HLSTCV_Common - def _getAdvisoryPath(self): - dataMgr = self._argDict["dataMgr"] - gfeMode = dataMgr.getOpMode().name() - - if gfeMode == "PRACTICE": - return os.path.join("gfe", "tcvAdvisories", "practice") - else: - return os.path.join("gfe", "tcvAdvisories") - - - def _getAdvisoryFilename(self, advisoryName): - advisoryFilename = os.path.join(self._getAdvisoryPath(), advisoryName) - - if not advisoryFilename.endswith(".json"): - advisoryFilename += ".json" - - return advisoryFilename + +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# TropicalHazards +# +# Author: Matthew H. Belk WFO BOX +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- -------------------------------------------- +# Jun 22, 2013 mbelk Initial creation +# Jul 14, 2016 mbelk Changes for 2017 season +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import time, string, os, re, errno + +import Header +import JsonSupport, LocalizationSupport +import LogStream +import ModuleAccessor +import pprint as pp + + +class TropicalHazards(Header.Header): + + def __init__(self): + + Header.Header.__init__(self) + + #----------------------------------------------------------------------- + # Make a link to the TropicalAreaDictionary + + # Access the information for the breakpoint area(s) if available + self._tropicalAreaDict = \ + ModuleAccessor.ModuleAccessor().variable("AreaDictionary", + "AreaDictionary") + + # Then make a cache of sorted hash keys to this dictionary + self._tropicalAreaDictKeys = list(self._tropicalAreaDict.keys()) + self._tropicalAreaDictKeys.sort(self._sortBreakpoints) + + +################################################################################ +# New common utility methods for tropical hazard formatters +################################################################################ + + def allowedHazards(self): + """TropicalHazards version of GenericHazards.allowedHazards. + + This method defines the allowed hazards for tropical hazard products. + """ + tropicalActions = ["NEW", "EXA", "CAN", "CON"] + return [ + ('HU.W',tropicalActions,'Hurricane'), + ('HU.A',tropicalActions,'Hurricane'), + ('SS.W',tropicalActions,'Surge'), + ('SS.A',tropicalActions,'Surge'), + ('TR.W',tropicalActions,'Tropical'), + ('TR.A',tropicalActions,'Tropical'), + ] + + +################################################################################ +# New common utility methods for tropical hazard formatters +################################################################################ + + #=========================================================================== + # Define a method to determine which WFOs are impacted based on a list of + # affected edit area ID's + + def _getAffectedWFOs(self, idList): + """TropicalHazards addition of _getAffectedWFOs. + + This method will produce a list of NWS WFOs impacted by tropical + hazards with the specified identifiers. + + Arguments: + idList -> list TropicalAreaDictionary keys + """ + + # Look at each breakpoint segment in this list + for id in idList: + + # Get the AreaDictionary entry for this segment + if id in self._tropicalAreaDict: + entry = self._tropicalAreaDict[id] + else: + LogStream.logProblem(\ + "AreaDictionary missing definition for [" + id + "].") + continue + + # Get the WFO responsible for this segment - always use the WFO of the + # starting point to avoid including WFOs unnecessarily + if 'wfo' in entry: + wfo = entry['wfo'].strip() + else: + LogStream.logProblem(\ + "AreaDictionary missing WFO definition for [" + id + "].") + continue + + # If we have a valid WFO identifier, and it is not already noted in the + # impacted WFO list + if len(wfo) > 0 and wfo not in self._wfoList: + + # Add this WFO to the list of impacted WFOs + self._wfoList.append(wfo) + + + #=========================================================================== + # Define a method to format a breakpoint text line within a TCV segment + + def _formatTCVline(self, entry, type="start"): + """TropicalHazards addition of _formatTCVline. + + This method will produce a list of NWS WFOs impacted by tropical + hazards with the specified identifiers. + + Arguments: + entry -> TropicalAreaDictionary entry for an edit area + type -> type of breakpoint to produce (optional, defaults to start) + """ + + # If this is not an ending point + if type != "end": + + # Get the information we need + point = entry["startBreakpoint"].strip() + lat = entry["startLat"].strip() + lon = entry["startLon"].strip() + state = entry["startState"].strip() + + # Otherwise - get the end point info for this segment + else: + + # Get the information we need + point = entry["endBreakpoint"].strip() + lat = entry["endLat"].strip() + lon = entry["endLon"].strip() + state = entry["endState"].strip() + + + # Clean up the state so there are no spaces or dashes + state = re.sub("[ _-]+", "", state) + + #----------------------------------------------------------------------- + # If this is not the border of a state or country + + if re.search("(?i)border", point) is None: + + # Add the state/country + point += "-" + state + + #----------------------------------------------------------------------- + # Append the appropriate hemisphere of the latitude + + if lat.find("-") != -1: + lat += "S" + lat = lat.replace("-", "") + else: + lat += "N" + + #----------------------------------------------------------------------- + # Append the appropriate hemisphere of the longitude + + if lon.find("-") != -1: + lon += "W" + lon = lon.replace("-", "") + else: + lon += "E" + + #----------------------------------------------------------------------- + # Now construct the final formatted line + + text = "%-36s%6s%7s\n" % (re.sub("[ _]+", "-", point) + " ", lat, lon) + + # Return the text + return text + + + #=========================================================================== + # Define a method to sort breakpoint record keys + + def _sortBreakpoints(self, a, b): + """TropicalHazards addition of _sortBreakpoints. + + This method will produce a sorted list of breakpoint segments. This + ensures the order of contiguous breakpoint segments + """ + + # Make a list of valid string identifier parts + validTypes = [ + "LN", # mainland segments - any country + "KEY", # Florida Keys + "ISL", # islands + "CUBA", # Cuba + "HISP", # Hispaniola + "NAI", # North Atlantic islands + "WTDE", # Deleware Bay + "WTTP", # Tidal Potomac + "WTCP", # Chesapeake Bay + "WTPT", # Generic water points + + # Zones used by VTEC + "GYC", # Guyana + "VEC", # Venezuela + "COC", # Colombia + "PAC", # Panama + "CRC", # Costa Rica + "NIC", # Nicaragua + "HNC", # Honduras + "GTC", # Guatemala + "BZC", # Belize + "MXC", # Mexico + "USC", # United States + "CNC", # Canada + "KEC", # Dry Tortugas + "AWC", # Aruba + "CWC", # Curacao + "TTC", # Trinidad and Tobago + "BBC", # Barbados + "LCC", # St. Lucia + "MQC", # France - Caribbean + "AGC", # Antigua and Barbuda + "BSC", # Bahamas + "BMC", # Bermuda + "JMC", # Jamaica + "KYC", # Cayman Islands + "CUC", # Cuba + "DOC", # Dominican Republic + "HTC", # Haiti + "PMC", # France - North Atlantic + "LOC", # Lake_Okeechobee + "FBC", # Florida Bay + "PSC", # Pamlico Sound + "ASC", # Albemarle Sound + "TXZ", # Texas + "LAZ", # Louisiana + "MSZ", # Mississippi + "ALZ", # Alabama + "FLZ", # Florida + "GAZ", # Georgia + "SCZ", # South Carolina + "NCZ", # North Carolina + "VAZ", # Virginia + "MDZ", # Maryland + "DCZ", # District of Columbia + "DEZ", # Deleware + "NJZ", # New Jersey + "NYZ", # New York + "CTZ", # Connecticut + "RIZ", # Rhode Island + "MAZ", # Massachusetts + "NHZ", # New Hampshire + "MEZ", # Maine + "NMZ", # New Mexico + "ARZ", # Arkansas + "OKZ", # Oklahoma + "MOZ", # Missouri + "TNZ", # Tennessee + "WVZ", # West Virginia + "PAZ", # Pennsylvania + "VTZ", # Vermont + "PRZ", # Puerto Rico + "VIZ", # U.S. Virgin Islands + "RE", # General edit area collection + ] + + # Get the first part of each identifier + aSeg = a.split("_")[0] + bSeg = b.split("_")[0] + + # Get ready to split these identifiers into alpha and numeric parts + aSegType = "" + bSegType = "" + aSegNum = "" + bSegNum = "" + + # Start with the alpha components + for c in aSeg: + if c in string.letters: + aSegType = aSegType + c + + for c in bSeg: + if c in string.letters: + bSegType = bSegType + c + + # Now get the numeric components + for c in aSeg: + if c in string.digits: + aSegNum = aSegNum + c + + for c in bSeg: + if c in string.digits: + bSegNum = bSegNum + c + + # Determine the order of these areas based on segment type first + aTypeIndex = validTypes.index(aSegType) + try: + bTypeIndex = validTypes.index(bSegType) + except: + bTypeIndex = aTypeIndex + bSegNum = aSegNum + + # Try to determine order based on segment type + if aTypeIndex < bTypeIndex: + return -1 + elif bTypeIndex < aTypeIndex: + return 1 + + # If the segment types are the same, use the numeric component last + if int(aSegNum) < int(bSegNum): + return -1 + elif int(bSegNum) < int(aSegNum): + return 1 + else: +## print "ERROR!!!!!!! Segment names are equal!!!!!!!" + return 0 + + + #=========================================================================== + # Define a method to organize breakpoint segment zones by government + + def _organizeZonesByGovt(self): + """TropicalHazards addition of _organizeZonesByGovt. + + This method will produce a dictionary of sorted lists where each list + contains all the zone identifiers for which that government is the + responsible entity. + """ + + # Create a dictionary to hold all zones managed by a government + zoneDict = {} + + # Also make a list to hold governments in the order we find them + govtList = [] + + # Initialize some lists for handling zone codes + ugcList = [] + ugcListUsa = [] + + #----------------------------------------------------------------------- + # Look at each edit area we have + + for key in self._tropicalAreaDictKeys: + + # Get the type of this zone + zoneId = key.split("_")[0] + + zoneType = "" + + # Get all the letters from this zone ID + for c in zoneId: + if not c.isdigit(): + zoneType += c + + # If this is primary representation of segment zones + # (this is to avoid duplication with the individual zones) + # Make an exception for US zones though + if zoneType in ["LN", "KEY", "CUBA", "HISP", "ISL", "NAI", "WTDE", + "WTTP", "WTCP", "WTPT", "USC"]: + + # Get the government responsible for this zone + zoneGovt = self._tropicalAreaDict[key]["hazardIssuer"].strip() + + # Handle the case of the USA, which receives no attribution + if zoneGovt.strip() == "": + zoneGovt = "US" + + # If we do not already know about this government + if zoneGovt not in govtList: + + # Add it now + govtList.append(zoneGovt) + + #--------------------------------------------------------------- + # Get the impacted zones for this segment + + ugcCode = self._tropicalAreaDict[key]["ugcCode"].strip() + + #--------------------------------------------------------------- + # Expand UGC code if there is more than one zone represented + + if len(ugcCode.split("-")) > 1 or ugcCode.find(">") != -1: + ugcList = self.expandComplexUgc(ugcCode) + + # Otherwise, just use this single zone + else: + ugcList = [ugcCode] + + #--------------------------------------------------------------- + # Expand UGC code if there is more than one zone represented + + try: + ugcCodeUsa = self._tropicalAreaDict[key]["ugcCodeUsa"].strip() + + if len(ugcCodeUsa.split("-")) > 1 or ugcCodeUsa.find(">") != -1: + ugcListUsa = self.expandComplexUgc(ugcCodeUsa) + + # Otherwise, just use this single zone + else: + ugcListUsa = [ugcCodeUsa] + + except: + ugcListUsa = [] + + #--------------------------------------------------------------- + # If we already have an entry for this government + + if zoneGovt in zoneDict: + + # Get the zones already associated + curZoneList = zoneDict[zoneGovt] + + # Otherwise make a new list for this governement + else: + curZoneList = [] + + #--------------------------------------------------------------- + # Now add all the new zones + + for ugc in ugcList + ugcListUsa: + + # If we don't already have this ugc + if ugc not in curZoneList and len(ugc.strip()) > 0: + + # Add it + curZoneList.append(ugc) + +## print curZoneList + + # Sort the UGC list + curZoneList.sort() + + # Store the list of zones for this government + zoneDict[zoneGovt] = curZoneList + + # Always ensure the USA comes first + if "US" in govtList: + govtList.remove("US") + finalGovtList = ["US"] + govtList + + # Return the completed dictionary + return zoneDict, finalGovtList + + + #=========================================================================== + # Define a method to filter a segment list by government + + def _filterAreaListByGovernment(self, govtList, areaList): + """TropicalHazards addition of _filterAreaListByGovernment. + + This method will produce a list of all zones managed by a particular + government contained within the specified area list. + + Arguments: + govtList -> list of identifiers managed by a government + areaList -> list of edit area identifiers to process + """ + + # Initialize a new list + newList = [] + + # Look through each edit area + for area in areaList: + + # If this edit area is managed by this government + if area in govtList: + + # Add it to the filtered list + newList.append(area) + + # Return the filtered list + return newList + + + #=========================================================================== + # Define a method to filter a segment list by government + + def _organizeAreasByType(self, areaList): + """TropicalHazards addition of _organizeAreasByType. + + This method will separate a list of areas into one of four types: + mainland segments, UGC zones, zones and islands. These will be stored + in the processed hazard dictionary for easier access later. + + Arguments: + areaList -> list of edit area identifiers to process + """ + + # Initialize both lists + segmentList = [] + ugcZoneList = [] + zoneList = [] + islandList = [] + waterList = [] + + # Look through each edit area + for area in areaList: + + # Assume this is a "land" area + areaType = "land" + + # If the TropicalAreaDictionary has a record for this area + if area in self._tropicalAreaDict: + + # Get the type of this area - if we can + if "segmentType" in self._tropicalAreaDict[area]: + areaType = self._tropicalAreaDict[area]["segmentType"] + + # Get the type of this area + try: + usaZoneList = self.expandComplexUgc( + self._tropicalAreaDict[area]["ugcCodeUsa"]) + except: + usaZoneList = [] + + + #--------------------------------------------------------------- + # If this is an island + + if areaType == "island": + + # If we do not already have a record for this area + if area not in islandList: + + # Add it to the list of islands + islandList.append(area) + + #--------------------------------------------------------------- + # Otherwise, if this is a water area + + elif areaType == "water": + + # If we do not already have a record for this area + if area not in waterList: + + # Add it to the list of islands + waterList.append(area) + + #--------------------------------------------------------------- + # Otherwise, organize the land-based areas + + else: + + # If this is a zone-based identifier + if len(area) == 6 and area[2] in ["Z", "C"]: + + # Place this zone into the proper list + if area[2] == "Z": + + # If this area has not already been recorded + if area not in ugcZoneList: + + # Add it to the UGC zone list + ugcZoneList.append(area) + else: + + # If this area has not already been recorded + if area not in zoneList: + + # Add it to the zone list + zoneList.append(area) + + # If there any zones associated with this segment + if len(usaZoneList) > 0: + + for usZone in usaZoneList: + + if usZone not in ugcZoneList: + + # Add it to the UGC zone list + ugcZoneList.append(usZone) + + # Otherwise, this is a breakpoint segment + elif area not in segmentList: + segmentList.append(area) + + # Get any UGC codes associated with this segment + areaUgc = self._tropicalAreaDict[area]["ugcCode"] + + # If there is more than 1 zone associated with segment + if len(areaUgc) > 7: + + # Expand the UGC codes + ugcList = self.expandComplexUgc(areaUgc) + + # Otherwise, make a simpler list so we can proceed + else: + ugcList = [areaUgc] + + #------------------------------------------------------- + # Add each zone code into the list as needed + + for ugcCode in ugcList: + + # Clean up any extra characters + ugcCode = ugcCode.replace("-", "") + + # If this is a zone-based identifier + if len(ugcCode) >= 6 and ugcCode[2] in ["Z", "C"]: + + # Place this zone into the proper list + if ugcCode[2] == "Z": + + # If this area has not already been recorded + if ugcCode not in ugcZoneList: + + # Add it to the UGC zone list + ugcZoneList.append(ugcCode) + else: + + # If this area has not already been recorded + if ugcCode not in zoneList: + + # Add it to the zone list + zoneList.append(ugcCode) + + + #----------------------------------------------------------------------- + # Sort all lists to keep them ordered - as needed + + if len(segmentList) > 1: + segmentList.sort(self._sortBreakpoints) + + if len(ugcZoneList) > 1: + ugcZoneList.sort(self._sortBreakpoints) + + if len(zoneList) > 1: + zoneList.sort(self._sortBreakpoints) + + if len(islandList) > 1: + islandList.sort(self._sortBreakpoints) + + if len(waterList) > 1: + waterList.sort(self._sortBreakpoints) + + # Return the compiled lists + return (segmentList, ugcZoneList, zoneList, islandList, waterList) + + + #=========================================================================== + # Define a method to construct a processed hazard dictionary + + def _constructHazardDict(self, hazardPhenSig, filterEtn): + """TropicalHazards addition of _constructHazardDict. + + This method will produce a processed dictionary of tropical hazards + for easier use later on. + + Arguments: + hazardPhenSig -> dictionary of hazards keyed by phenomenon and + significance. Values are a list of all hazards + which share that same phenomenon and significance. + filterEtn -> Event Tracking Number of interest which will be used + to filter hazards for a particular product. + """ + + #----------------------------------------------------------------------- + # Get ready to populate the hazard dictionary for this storm + + hazardAreaDict = {} + hazardAreaDictKeyList = [] + + # Assume this is going to be the last product we issue for this storm + self._allCAN = True + + #======================================================================= + # Look for each of the tropical hazards in order + + for phenSig in [("SS","W"), ("HU","W"), ("SS","A"), ("HU","A"), + ("TR","W"), ("TR","A")]: + + if phenSig in hazardPhenSig: + print("="*90) + print("\n\tConstructing -> %s" % (repr(phenSig))) + print(len(hazardPhenSig[phenSig]), hazardPhenSig[phenSig]) + + # Look through all the sampled hazards + for phen in hazardPhenSig[phenSig]: + print("-"*90) + print("phen = %s" % (phen)) + + # Set aside the headline for each action in this area + NEW = [] + CAN = [] + UPG = [] + EXA = [] + CON = [] + + #----------------------------------------------------------- + # If we have items for this particular phen.sig + # combination, and this is the storm we are after + + if phen["etn"] != filterEtn: + + print("\tWrong storm!", phen) + + # Move on to the next one + continue + + + # Get the full VTEC code for this phenomena +# curHazardKey = (phen["act"], phen["key"]) + curHazardKey = (phen["act"], phen["phensig"]) + print("+++++ %s" % (repr(curHazardKey))) + + # If this action is anything other than "CAN", indicate it + # so we don't delete the JSON file for this storm at end + if phen["act"] != "CAN": + self._allCAN = False + + # If we do not have the ETN of this hazard + if re.search(":\d{4}$", curHazardKey[1]) is None: + + newHazardType = curHazardKey[1] + ":%d" % \ + (phen["etn"]) + else: + newHazardType = "" + + # If we need to adjust the hazard key + if len(newHazardType) > 0: + + # Make the changes + newCurHazardKey = (curHazardKey[0], newHazardType) + curHazardKey = newCurHazardKey + + # See if there are upgrades or replacements for this area + areaHazardList = self._hazards.getHazardList(phen["id"]) + + #----------------------------------------------------------- + # Construct a hazard key which incorporates all hazards + # and actions for this area + + tempHazardList = [curHazardKey] + for areaHazard in areaHazardList: + + #------------------------------------------------------- + # Record headline for each action we find + + if areaHazard["act"] == "NEW" and \ + areaHazard["hdln"] not in NEW: + NEW.append(areaHazard["hdln"]) + + elif areaHazard["act"] == "CAN" and \ + areaHazard["hdln"] not in CAN: + CAN.append(areaHazard["hdln"]) + + elif areaHazard["act"] == "UPG" and \ + areaHazard["hdln"] not in UPG: + UPG.append(areaHazard["hdln"]) + + elif areaHazard["act"] == "CON" and \ + areaHazard["hdln"] not in CON: + CON.append(areaHazard["hdln"]) + + elif areaHazard["act"] == "EXA" and \ + areaHazard["hdln"] not in EXA: + EXA.append(areaHazard["hdln"]) + + #------------------------------------------------------- + # Make a key for this particular hazard/action combo + + tempHazardKey = (areaHazard["act"], areaHazard["phensig"]) + + # If we do not have the ETN of this hazard + if re.search(":\d{4}$", tempHazardKey[1]) is None: + + newHazardType = tempHazardKey[1] + ":%d" % \ + (areaHazard["etn"]) + else: + newHazardType = "" + + # If we need to adjust the hazard key + if len(newHazardType) > 0: + + # Make the changes + newTempHazardKey = (tempHazardKey[0], newHazardType) + tempHazardKey = newTempHazardKey + + # If this is not already part of the hazard key + if tempHazardKey != curHazardKey: + + # Add this hazard/action combo to the list + tempHazardList.append(tempHazardKey) + + # Sort the keys so we have some consistency in ordering + tempHazardList.sort() + + # Convert the list of hazards for this area into a tuple + # so we can use it as a dictionary key + hazardKey = tuple(tempHazardList) + + #----------------------------------------------------------- + # Ensure we only group areas associated with same storm + # and hazard/action combos + + # If we already have an entry for this storm and hazard + if hazardKey in hazardAreaDict: + + # Add to what is already there + tempList = hazardAreaDict[hazardKey]["AREAS"] + tempList.append(phen["id"]) + hazardAreaDict[hazardKey]["AREAS"] = tempList + + # Otherwise, make a new entry + else: +## "AREAS":[phen["id"]], "HDLN":phen["hdln"], + hazardAreaDict[hazardKey] = { + "AREAS":[phen["id"]], "NEW":NEW, "CAN":CAN, + "CON":CON, "EXA":EXA, "UPG":UPG + } + + # Add this key the list + if hazardKey not in hazardAreaDictKeyList: + hazardAreaDictKeyList.append(hazardKey) + + print("\n\n", "+"*100) + print("in the middle") + print(pp.pformat(hazardAreaDictKeyList), "\n") + print(pp.pformat(hazardAreaDict)) + + #----------------------------------------------------------------------- + # Keep track of segments and zones. We will need segments to + # ensure proper grouping and breakpoints. We will need UGC zones + # to get the proper VTEC action + + segments = [] + ugcZones = [] + zones = [] + islands = [] + water = [] + + #======================================================================= + # Organize all the impacted areas by type + + for key in hazardAreaDictKeyList: + + #------------------------------------------------------------------- + # Organize various areas associated with this hazard by type + + (segments, ugcZones, zones, islands, water) = \ + self._organizeAreasByType(hazardAreaDict[key]["AREAS"]) + + # Add these organized zones to the dictionary for this hazard + hazardAreaDict[key]["SEGMENTS"] = segments + hazardAreaDict[key]["UGCZONES"] = ugcZones + hazardAreaDict[key]["ZONES"] = zones + hazardAreaDict[key]["ISLANDS"] = islands + hazardAreaDict[key]["WATER"] = water + + print("+"*90 + "\nFinally!") + print("found -> %s" % (repr(hazardAreaDictKeyList))) + print("allCAN = %s" % (self._allCAN)) + for key in hazardAreaDictKeyList: + print("-"*60) + print("%s\n\t%s" % (key, pp.pformat(hazardAreaDict[key]))) + + # Return the completed hazard dictionary and sorted keys + return (hazardAreaDict, hazardAreaDictKeyList) + + + #=========================================================================== + # Process the shortcut hazard dictionary and organize a list of segments + # which are organized by similar type. + + def _constructSegmentList(self, hazardAreaDict, hazardAreaDictKeyList, + UStcv=0): + """TropicalHazards addition of _constructSegmentList. + + This method will produce create all the appropriate segments which + should go into a tropical hazard product, particularly a TCV. + + Arguments: + hazardAreaDict -> processed dictionary of hazards + hazardAreaDictKeyList -> list of keys of active hazards to process. + UStcv -> (optional) toggle to group islands within a single segment + {1 = Yes (default - USA TCV) / 0 = No} + """ + + # Initialize a list to hold the final group of segments + finalSegmentList = [] + + segmentDict = {"CUC": [], "HTC":[], "DOC":[], "USC":[]} + + #----------------------------------------------------------------------- + # Look at every hazard we found + + for hazardKey in hazardAreaDictKeyList: + + segmentList = [] + zoneList = [] + + #------------------------------------------------------------------- + # Double check to be sure Cuba and Hispaniola are not part of + # the mainland segment lists + + for curId in hazardAreaDict[hazardKey]["SEGMENTS"]: + + # If this is a Cuba or Hispaniola code + if curId[:3] in list(segmentDict.keys()): + +## print "Found segment -> curId =", curId + # Add this identifier to the dictionary + tempList = segmentDict[curId[:3]] + + # If this identifier is not already in the list + if curId not in tempList: + tempList.append(curId) + + # Store the updated list + segmentDict[curId[:3]] = tempList + + # Otherwise, + elif curId not in segmentList: + + segmentList.append(curId) + + #------------------------------------------------------------------- + # Double check to be sure Cuba and Hispaniola are not part of + # the mainland zone lists + + for curId in hazardAreaDict[hazardKey]["ZONES"]: + + # If this is a Cuba or Hispaniola code + if curId[:3] in list(segmentDict.keys()): + +## print "Found zone -> curId =", curId + # Add this identifier to the dictionary + tempList = segmentDict[curId[:3]] + + # If this identifier is not already in the list + if curId not in tempList: + tempList.append(curId) + + # Store the updated list + segmentDict[curId[:3]] = tempList + + # Otherwise, + elif curId not in segmentList: + + zoneList.append(curId) + + #------------------------------------------------------------------- + # Make a separate segment for Cuba + + if len(segmentDict["CUC"]) > 0: + + sortedZones = segmentDict["CUC"] + sortedZones.sort() + + record = ([], # Segments + [], # UGC zones + sortedZones, # Zones + [], # Islands + []) # Water + + # If we do not already have a record for these particular zones + if record not in finalSegmentList: + + print("\n\nCuba record", record) + + # Add it now + finalSegmentList.append(record) + + #------------------------------------------------------------------- + # Make a separate segment for Hispaniola + + if len(segmentDict["HTC"]) > 0 or len(segmentDict["DOC"]) > 0: + + # Get a compined list of zones + combinedZones = segmentDict["HTC"] + segmentDict["DOC"] + + # Sort these zones + combinedZones.sort(self._sortBreakpoints) + + record = ([], # Segments + [], # UGC zones + combinedZones, # Zones + [], # Islands + []) # Water + + # If we do not already have a record for these particular zones + if record not in finalSegmentList: + + print("\n\nHispaniola record", record) + + # Add it now + finalSegmentList.append(record) + +## #------------------------------------------------------------------- +## # Make a separate segment for the United States +## +## if len(segmentDict["USC"]) > 0: +## +## sortedZones = segmentDict["USC"] +## sortedZones.sort() +## +## record = ([], # Segments +## [], # UGC zones +## sortedZones, # Zones +## [], # Islands +## []) # Water +## +## # If we do not already have a record for these particular zones +## if record not in finalSegmentList: +## +## print "\n\nCuba record", record +## +## # Add it now +## finalSegmentList.append(record) + + #------------------------------------------------------------------- + # If we have filtered segments or zones + + if len(segmentList) != 0 and \ + len(segmentList) != len(hazardAreaDict[hazardKey]["SEGMENTS"]): + + # Update the record for this segment + curSegments = segmentList + + # Otherwise, keep segments we already have + else: + curSegments = hazardAreaDict[hazardKey]["SEGMENTS"] + + if len(zoneList) != 0 and \ + len(zoneList) != len(hazardAreaDict[hazardKey]["ZONES"]): + + # Update the record for this segment + curZones = zoneList + + # Otherwise, keep segments we already have + else: + curZones = hazardAreaDict[hazardKey]["ZONES"] + + + #------------------------------------------------------------------- + # Create a new segment for each island which was found - if we + # need to + + islandList = [] + + # USA TCV + if UStcv: + + # Make a new record for this island + record = ([], # Segments + [hazardAreaDict[hazardKey]["ISLANDS"]], # UGC zones + [], # Zones + [hazardAreaDict[hazardKey]["ISLANDS"]], # Islands + []) # Water + + # If we do not already have a record for this island + if record not in finalSegmentList: + + print("\n\nUSA Island record -> %s" % (record)) + + # Add it now + finalSegmentList.append(record) + + # International TCV + else: + + for island in hazardAreaDict[hazardKey]["ISLANDS"]: + + # Make a new record for this island + record = ([], # Segments + [], # UGC zones + [island], # Zones + [island], # Islands + []) # Water + + # If we do not already have a record for this island + if record not in finalSegmentList: + +# print "\n\nIntl Island record -> %s" % (record) + + # Add it now + finalSegmentList.append(record) + + #=================================================================== + # Make a record to group all the various areas associated with a + # particular hazard + + record = (curSegments, + hazardAreaDict[hazardKey]["UGCZONES"], + curZones, + hazardAreaDict[hazardKey]["ISLANDS"], + hazardAreaDict[hazardKey]["WATER"]) + + # If we do not already have a record for these particular zones + if record not in finalSegmentList: + + # Add it now + finalSegmentList.append(record) + + print("\n\n", "*"*90) + print("finalSegmentList =", pp.pformat(finalSegmentList)) + + # Return the final organized list + return finalSegmentList + + + #=========================================================================== + # Define a method to find missing zone codes for breakpoint segments, + # islands and water. + + def _findZoneCodes(self, areaList): + + # Get ready to find the zone codes + ugcZones = [] + zones = [] + + # Look at each area in the list + for areaId in areaList: + + # If there is an entry in the TropicalAreaDictionary for this area + if areaId in self._tropicalAreaDict: + + # Get the 'ugcCode' for this area + ugcCode = self._tropicalAreaDict[areaId]["ugcCode"] + + # If this is a generic zone code + if len(ugcCode) > 3 and ugcCode[2] == "C": + + # If we do not already have it in the zone list + if ugcCode not in zones: + + # Add it now + zones.append(ugcCode) + + # Otherwise, this must be a UGC code + else: + + # See if we need to expand it + if len(ugcCode) > 7: + + # Expand the UGC code into individual zones + expandUgcList = self.expandComplexUgc(ugcCode) + + # Add each UGC code to the list - if not already there + for ugc in expandUgcList: + + if ugc not in ugcZones: + ugcZones.append(ugc) + + # Otherwise, just add this UGC zone if it is not already there + elif ugcCode not in ugcZones: + ugcZones.append(ugc) + + # Sort these lists as needed + if len(ugcZones) > 1: + ugcZones.sort(self._sortBreakpoints) + + if len(zones) > 1: + zones.sort(self._sortBreakpoints) + + if len(ugcZones) == 0: + return "There are no areas in this TCV" + + # Return the zones we found + return (ugcZones, zones) + + +#=============================================================================== +# Code to process StormInfo files - + + # same as HLSTCV_Common + def _synchronizeAdvisories(self): + # Retrieving a directory causes synching to occur. + # This code can throw an exception but don't catch it + # so that forecasters can be made aware of the issue. + file = LocalizationSupport.getLocalizationFile( + LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, self._site, + self._getAdvisoryPath()).getFile() + + return file + + # same as HLSTCV_Common + def _getLocalAdvisoryDirectoryPath(self): + file = self._synchronizeAdvisories() + path = file.getPath() + print("\n\nLooking for JSON files in '%s'" % (path)) + + try: + os.makedirs(path) + except OSError as exception: + if exception.errno != errno.EEXIST: + raise + + return path + + def _getStormAdvisoryNames(self): + advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath() + filenames = os.listdir(advisoryDirectoryPath) + allAdvisories = [filename for filename in filenames if filename[-5:] == ".json"] + + stormAdvisories = [filename for filename in allAdvisories if filename[:2] == "AT"] + + return stormAdvisories + + def _loadAdvisory(self, advisoryName): + self._synchronizeAdvisories() + fileName = self._getAdvisoryFilename(advisoryName) + + try: + pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC, + self._site, fileName) + + statFileName = os.path.join(os.environ["HOME"], "caveData", "etc", + "site", self._site, fileName) + lastModified = os.stat(statFileName).st_mtime + pythonDict["lastModified"] = lastModified + + print("File contents for %s:" % (fileName)) + print(pp.pformat(pythonDict)) + + return pythonDict + + except Exception as e: + print("Load Exception for %s : %s" % (fileName, e)) + return None + + def _saveAdvisory(self, advisoryName, advisoryDict): + self._synchronizeAdvisories() + fileName = self._getAdvisoryFilename(advisoryName) + + print("Saving %s to %s" % (advisoryName, fileName)) + print("advisoryDict: %s" % (pp.pformat(advisoryDict))) + + try: + JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC, + self._site, fileName, advisoryDict) +# os.system('chmod 664 %s' % (fileName)) + except Exception as e: + print("Save Exception for %s : %s" % (fileName, e)) + else: # No exceptions occurred + print("Wrote file contents for: %s" % (fileName)) + + # Purposely allow this to throw + self._synchronizeAdvisories() + + def _deleteAdvisory(self): + + # Sync the CAVE localization store + self._synchronizeAdvisories() + fileName = self._getAdvisoryFilename(self._advisoryFileName) + + print("\n\nDeleting -> " + fileName) + + try: + LocalizationSupport.deleteFile(LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, self._site, + fileName) + + except Exception as e: + print("Delete Exception for %s : %s" % (fileName, e)) + return None + + + # same as HLSTCV_Common + def _getAdvisoryPath(self): + dataMgr = self._argDict["dataMgr"] + gfeMode = dataMgr.getOpMode().name() + + if gfeMode == "PRACTICE": + return os.path.join("gfe", "tcvAdvisories", "practice") + else: + return os.path.join("gfe", "tcvAdvisories") + + + def _getAdvisoryFilename(self, advisoryName): + advisoryFilename = os.path.join(self._getAdvisoryPath(), advisoryName) + + if not advisoryFilename.endswith(".json"): + advisoryFilename += ".json" + + return advisoryFilename diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/UserInfo.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/UserInfo.py index e82a49629a..953791c251 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/UserInfo.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/UserInfo.py @@ -1,121 +1,121 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# UserInfo.py -# -# getForecasterName -# Return the forecaster name that should be added to the end of the product -# -# Author: fachorn -# -# History: -# F.Achorn/OPC 03/08/13 initial creation -# F.Achorn/OPC 09/24/13 modified to get the debug level corectly from argDict -# ---------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import time, string -import os - -class UserInfo(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def forecasterDict(self): - self.debug_print("Debug: forecasterDict in UserInfo") - return { - "clandsea": "Landsea", - "mnelson": "Nelson", - "sstripli": "Stripling", - "echrist": "Christensen", - "jlewitsk": "Lewitsky", - "alevine": "AL", - "jaguirre": "Aguirre", - "grubio": "GR", - "dmundell": "Mundell", - "cmcelroy": "McElroy", - "alatto": "Latto", - "mformosa": "Formosa", - "nramos": "Ramos", - "mtichace": "MT", - "erivera": "ERA", - "jcangial": "Cangialosi", - "sstewart": "Stewart", - "pmanougi": "Manougian", - "dfigursk": "DJF", - "Jsienkie": "Sienkiewicz", - "fachorn": "FAchorn", - "cjuckins": "Juckins", - "jclark": "Clark", - "tcollins": "Collins", - "jkells": "Kells", - "dmills": "Mills", - "dscovil": "Scovil", - "kachorn": "KAchorn", - "gbancrof": "Bancroft", - "kbell": "Bell", - "tholley": "Holley", - "mhuffman": "Huffman", - "dkosier": "Kosier", - "jkrekele": "Krekeler", - "fmusonda": "Musonda", - "jkrekele": "Krekeler", - "jnolt": "Nolt", - "breinhar": "Reinhart", - "jkrekele": "Krekeler", - "mrowland": "Rowland", - "tshaw": "Shaw", - "lsommerv": "Sommerville", - "elau": "ELau", - "cbrenchl": "Brenchley", - "rballard": "RBallard", - "jbravend": "Bravender", - "kkodama": "Kodama", - "tbirchar": "Birchard", - "cjacobso": "Jacobson", - "jjelsema": "Jelsema", - "jpowell": "Powell", - "dwroe": "Wroe", - "mballard": "MBallard", - "abedal": "Bedal", - "rbohlin": "Bohlin", - "bburke": "Burke", - "pdonalds": "Donaldson", - "leaton": "Eaton", - "mfoster": "Foster", - "agibbs": "Gibbs", - "shouston": "Houston", - "nhui": "Hui", - "rkinel": "Kinel", - "hlau": "Lau", - "imorriso": "Morrison", - "tstall": "Stall", - "valmanza": "Almanza", - "chevalie": "Chevalier", - "mdye": "Dye", - "jsaucier": "Saucier", - "msardi": "MAS", - "tlefebvre": "LeFebvre", - "thansen": "Hansen", - "swhite": "White", - } - - def _getForecasterName(self, argDict): - # get the debug level from the argDict, if available - try: - self._debug = argDict["debug"] - except: - self._debug = 1 - self.debug_print("Debug: _getForecasterName in UserInfo") - userName = os.environ["USER"] - if userName in self.forecasterDict().keys(): - forecaster = self.forecasterDict()[userName] - else: - forecaster = "NATIONAL HURRICANE CENTER" - self.debug_print("Forecaster = " + forecaster) - return forecaster - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# UserInfo.py +# +# getForecasterName +# Return the forecaster name that should be added to the end of the product +# +# Author: fachorn +# +# History: +# F.Achorn/OPC 03/08/13 initial creation +# F.Achorn/OPC 09/24/13 modified to get the debug level corectly from argDict +# ---------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import time, string +import os + +class UserInfo(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def forecasterDict(self): + self.debug_print("Debug: forecasterDict in UserInfo") + return { + "clandsea": "Landsea", + "mnelson": "Nelson", + "sstripli": "Stripling", + "echrist": "Christensen", + "jlewitsk": "Lewitsky", + "alevine": "AL", + "jaguirre": "Aguirre", + "grubio": "GR", + "dmundell": "Mundell", + "cmcelroy": "McElroy", + "alatto": "Latto", + "mformosa": "Formosa", + "nramos": "Ramos", + "mtichace": "MT", + "erivera": "ERA", + "jcangial": "Cangialosi", + "sstewart": "Stewart", + "pmanougi": "Manougian", + "dfigursk": "DJF", + "Jsienkie": "Sienkiewicz", + "fachorn": "FAchorn", + "cjuckins": "Juckins", + "jclark": "Clark", + "tcollins": "Collins", + "jkells": "Kells", + "dmills": "Mills", + "dscovil": "Scovil", + "kachorn": "KAchorn", + "gbancrof": "Bancroft", + "kbell": "Bell", + "tholley": "Holley", + "mhuffman": "Huffman", + "dkosier": "Kosier", + "jkrekele": "Krekeler", + "fmusonda": "Musonda", + "jkrekele": "Krekeler", + "jnolt": "Nolt", + "breinhar": "Reinhart", + "jkrekele": "Krekeler", + "mrowland": "Rowland", + "tshaw": "Shaw", + "lsommerv": "Sommerville", + "elau": "ELau", + "cbrenchl": "Brenchley", + "rballard": "RBallard", + "jbravend": "Bravender", + "kkodama": "Kodama", + "tbirchar": "Birchard", + "cjacobso": "Jacobson", + "jjelsema": "Jelsema", + "jpowell": "Powell", + "dwroe": "Wroe", + "mballard": "MBallard", + "abedal": "Bedal", + "rbohlin": "Bohlin", + "bburke": "Burke", + "pdonalds": "Donaldson", + "leaton": "Eaton", + "mfoster": "Foster", + "agibbs": "Gibbs", + "shouston": "Houston", + "nhui": "Hui", + "rkinel": "Kinel", + "hlau": "Lau", + "imorriso": "Morrison", + "tstall": "Stall", + "valmanza": "Almanza", + "chevalie": "Chevalier", + "mdye": "Dye", + "jsaucier": "Saucier", + "msardi": "MAS", + "tlefebvre": "LeFebvre", + "thansen": "Hansen", + "swhite": "White", + } + + def _getForecasterName(self, argDict): + # get the debug level from the argDict, if available + try: + self._debug = argDict["debug"] + except: + self._debug = 1 + self.debug_print("Debug: _getForecasterName in UserInfo") + userName = os.environ["USER"] + if userName in list(self.forecasterDict().keys()): + forecaster = self.forecasterDict()[userName] + else: + forecaster = "NATIONAL HURRICANE CENTER" + self.debug_print("Forecaster = " + forecaster) + return forecaster + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Utility.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Utility.py index dbf5418851..37b15bf225 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Utility.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/Utility.py @@ -1,141 +1,141 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Utility.py -# Class which takes a dbSubsystem or an ifpClient -# and performs various utility functions. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import sys, traceback -import logging - -class Utility: - - def __init__(self, dbSubsystem, root, ifpClient=None): - # Utility class -- - # Provides various methods used across diverse classes - # Can work with a full dbSubsystem or just an ifpClient - - self.__dbSubsystem = dbSubsystem - self.__invMgr = self.__msgHand = None - if dbSubsystem is not None: - self.__invMgr = dbSubsystem.dataManager() - self.__msgHand = dbSubsystem.msgHandler() - elif ifpClient is not None: - self.__invMgr = ifpClient - self.__root = root - self.__ifpClient = ifpClient - self.log = logging.getLogger("FormatterRunner.Utility.Utility") - - def getTextInventory(self, category): - # Get the text products from the server - if self.__invMgr is None: - self.handleError("Utility: No Access to Text inventory") - return None - return self.__invMgr.getTextInventory(category) - - def module(self, moduleName, showError=1): - # Return the module with the given name - try: - #if sys.modules.has_key(moduleName): - # del sys.modules[moduleName] - module = __import__(moduleName) - except: - if showError: - self.handleError("Problem importing module: " + moduleName, tracebackFlag=1) - else: - self.log.error("Problem importing module: " + moduleName + " " + \ - traceback.format_exc()) - return None - return module - - def set(self, dict, value, default=None): - # Try to find the value in the dictionary - try: - val = dict[value] - except: - val = default - return val - - def handleError(self, errorMsg, tracebackFlag=0): - type, value, tb = sys.exc_info() - if type is None or tracebackFlag == 0: - message = errorMsg - else: - sys.last_type = type - sys.last_value = value - sys.last_traceback = tb - tblist = traceback.extract_tb(tb) - del tblist[:1] - list = traceback.format_list(tblist) - if list: - list.insert(0, "\nTraceback (innermost last):\n") - list[len(list):] = traceback.format_exception_only(type, value) - message = errorMsg + "--\n" - for item in list: - message = message + item - if self.__msgHand is not None: - AFPS.UserAlertMsg_send_mh(self.__msgHand, message, "S", "GFE") - PyErrorDialog.PyErrorDialog(message=message) - - def removeDups(self, list): - # Return a list that has removed duplicates from the original - # and preserves the order - # Sort the list - sortedList = list - sortedList.sort() - curItem = None - # Create new list without duplicates - reducedList = [] - for item in sortedList: - if curItem is None or not item == curItem: - reducedList.append(item) - curItem = item - # Go through original list eliminating duplicates, - # but preserving the order - orderedList = [] - for item in list: - if item in reducedList: - orderedList.append(item) - reducedList.remove(item) - return orderedList - - def findInImported(self, varName): - # Look for the given variable within modules that - # are already imported - for key in sys.modules.keys(): - module = sys.modules[key] - try: - var = getattr(module, varName) - return var, module - except: - continue - return None, None +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Utility.py +# Class which takes a dbSubsystem or an ifpClient +# and performs various utility functions. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import sys, traceback +import logging + +class Utility: + + def __init__(self, dbSubsystem, root, ifpClient=None): + # Utility class -- + # Provides various methods used across diverse classes + # Can work with a full dbSubsystem or just an ifpClient + + self.__dbSubsystem = dbSubsystem + self.__invMgr = self.__msgHand = None + if dbSubsystem is not None: + self.__invMgr = dbSubsystem.dataManager() + self.__msgHand = dbSubsystem.msgHandler() + elif ifpClient is not None: + self.__invMgr = ifpClient + self.__root = root + self.__ifpClient = ifpClient + self.log = logging.getLogger("FormatterRunner.Utility.Utility") + + def getTextInventory(self, category): + # Get the text products from the server + if self.__invMgr is None: + self.handleError("Utility: No Access to Text inventory") + return None + return self.__invMgr.getTextInventory(category) + + def module(self, moduleName, showError=1): + # Return the module with the given name + try: + #if sys.modules.has_key(moduleName): + # del sys.modules[moduleName] + module = __import__(moduleName) + except: + if showError: + self.handleError("Problem importing module: " + moduleName, tracebackFlag=1) + else: + self.log.error("Problem importing module: " + moduleName + " " + \ + traceback.format_exc()) + return None + return module + + def set(self, dict, value, default=None): + # Try to find the value in the dictionary + try: + val = dict[value] + except: + val = default + return val + + def handleError(self, errorMsg, tracebackFlag=0): + type, value, tb = sys.exc_info() + if type is None or tracebackFlag == 0: + message = errorMsg + else: + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + tblist = traceback.extract_tb(tb) + del tblist[:1] + list = traceback.format_list(tblist) + if list: + list.insert(0, "\nTraceback (innermost last):\n") + list[len(list):] = traceback.format_exception_only(type, value) + message = errorMsg + "--\n" + for item in list: + message = message + item + if self.__msgHand is not None: + AFPS.UserAlertMsg_send_mh(self.__msgHand, message, "S", "GFE") + PyErrorDialog.PyErrorDialog(message=message) + + def removeDups(self, list): + # Return a list that has removed duplicates from the original + # and preserves the order + # Sort the list + sortedList = list + sortedList.sort() + curItem = None + # Create new list without duplicates + reducedList = [] + for item in sortedList: + if curItem is None or not item == curItem: + reducedList.append(item) + curItem = item + # Go through original list eliminating duplicates, + # but preserving the order + orderedList = [] + for item in list: + if item in reducedList: + orderedList.append(item) + reducedList.remove(item) + return orderedList + + def findInImported(self, varName): + # Look for the given variable within modules that + # are already imported + for key in list(sys.modules.keys()): + module = sys.modules[key] + try: + var = getattr(module, varName) + return var, module + except: + continue + return None, None diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VTECMessageType.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VTECMessageType.py index 7697281b89..d7168bfa2a 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VTECMessageType.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VTECMessageType.py @@ -1,57 +1,57 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# VTECMessageType.py -# -# This module stores the VTEC message type for each product category -# -# Author: lefebvre -# ---------------------------------------------------------------------------- -#Products not listed will be considered VTEC disabled. - -## -# This is a base file that is not intended to be overridden. -## - -VTECMessageTypeDict = { - 'WSW' : 'O', - 'WCN' : 'O', - 'NPW' : 'O', - 'FFA' : 'O', - 'RFW' : 'O', - 'CFW' : 'O', - 'HLS' : 'O', - 'MWW' : 'O', - 'TCV' : 'O', - } - - - -# This method fetches the message type for the specified product Category -# If not found, return None. -def getVTECMessageType(productCategory): - if VTECMessageTypeDict.has_key(productCategory): - return VTECMessageTypeDict[productCategory] - else: - return None +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# VTECMessageType.py +# +# This module stores the VTEC message type for each product category +# +# Author: lefebvre +# ---------------------------------------------------------------------------- +#Products not listed will be considered VTEC disabled. + +## +# This is a base file that is not intended to be overridden. +## + +VTECMessageTypeDict = { + 'WSW' : 'O', + 'WCN' : 'O', + 'NPW' : 'O', + 'FFA' : 'O', + 'RFW' : 'O', + 'CFW' : 'O', + 'HLS' : 'O', + 'MWW' : 'O', + 'TCV' : 'O', + } + + + +# This method fetches the message type for the specified product Category +# If not found, return None. +def getVTECMessageType(productCategory): + if productCategory in VTECMessageTypeDict: + return VTECMessageTypeDict[productCategory] + else: + return None diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VarDictGroker.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VarDictGroker.py index c445a36931..73441ad6b2 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VarDictGroker.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VarDictGroker.py @@ -1,187 +1,187 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import ProcessVariableList, types - -class VarDictGroker: - def __init__(self, module, definition, name, issuedBy=None, dataMgr=None): - self._module = module - self._definition = definition - self._name = name - self._issuedBy = issuedBy - self._dataMgr = dataMgr - - def getVarDict(self): - # Create and display the User Dialog for this product. - # - # If the product has a _processVariableList method, - # use it to get the varDict - # Otherwise, - # Create the User Dialog entries from the VariableList (if present) - # and the _issuanceList (if present) - # Return a text string command line argument for the User Responses - # Return None if user cancelled - - varDict = {} - - # Look for method _processVariableList - try: # Try to instantiate the smart text product - product = self._module.TextProduct() - processVariableList = product._processVariableList - except: # Simple table product - processVariableList = None - product = None - - if processVariableList is not None: - print "processVariableList is not None" - co = processVariableList.im_func.func_code - if co.co_argcount > 2: - argValues = [self._definition, self._dataMgr] - else: - argValues = [self._definition] - - import Tkinter,sys - sys.argv = ["FormatterRunner"] - root=Tkinter.Tk() - root.title("FormatterRunner") - root.withdraw() - - varDict = processVariableList(*argValues) - - root.destroy() - - if varDict is None: - return None - - else: - - # Get _issuanceList entries for the User Dialog - print "TextProduct()" - try: # Try to instantiate the smart text product - product = self._module.TextProduct() - product._definition = product.Definition - except: # Simple table product - product = None - - varList = [] - try: # Try to find an issuance list in the smart text product - issuanceList = product._issuance_list({}) - if len(issuanceList) > 0: - issuanceEntry = self._createVariableListEntry( - ("Product Issuance", "productIssuance"), issuanceList) - varList = [issuanceEntry] - except: - pass - - # Add in module VariableList - try: # Try to find a VariableList in the smart text product - productVarList = product.VariableList - varList += productVarList - except: - pass - - # Look for "runTime" variables in the Definition - varList = self._addRunTimeVariables(varList, self._definition) - - if varList is not None and len(varList): - # Display User Dialog - print "ProcessVariableList.ProcessVariableList" - processVarList = ProcessVariableList.ProcessVariableList( - self._name, varList, varDict={}, dataMgr=self._dataMgr) - self._selectionStatus = processVarList.status() - if not self._selectionStatus == "OK": - return None # User Cancelled - varDict = processVarList.varDict() - - # Add "issuedBy" to the varDict if applicable - varDict[("Issued By","issuedBy")] = self._issuedBy - - # Return a text string version of the User Responses (varDict) - return str(varDict) - - def _createVariableListEntry(self, entryName, entryList, entryType="radio", - default=""): - # Create a VariableList entry for the entryList - # and the given entryType and default value - # "entryList" is a list of items for a radio or checkbutton - # VariableList entry. If entryList items are tuples, the first - # value of the tuple is used. - if len(entryList) == 1: - # Do not need to put in the dialog - return None - entries = [] - for entry in entryList: - if type(entry) is types.TupleType: - entry = entry[0] - entries.append(entry) - result = (entryName, default, entryType, entries) - return result - - def _addRunTimeVariables(self, varList, definition): - # Add entries to varList to solicit variables designated as - # "runTime" in the product definition - - for item, default, rtItem, varType in [ - ("language","english","runTimeLanguage", ["radio", - ["english","french","spanish"]]), - ("outputFile","/tmp/forecast.out","runTimeOutputFile", - ["alphaNumeric"]), - ("appendFile",None,"runTimeAppendFile", ["alphaNumeric"]), - ("lineLength",69,"runTimeLineLength",["alphaNumeric"]), - ("timePeriod",3, "runTimePeriod", ["alphaNumeric"]), - ]: - value = definition.setdefault(item, default) - rtValue = definition.setdefault(item, default) - if rtValue == "yes": - varEntry = [item, value] + varType - varEntry = tuple(varEntry) - varList.append(varEntry) - - # Handle Edit Areas - rtEditAreas = definition.setdefault("runTimeEditAreas", "no") - if rtEditAreas == "yes": - # Add an entry to varList to solicit edit areas at run time - # Use the defaultEdit areas as choices - dfEditAreas = definition.setdefault("defaultEditAreas", []) - editAreaList = [] - for name, label in dfEditAreas: - if type(name) == types.TupleType: - # use label instead of lat/lon values - editAreaList.append(label) - else: - editAreaList.append(name) - varList.append( - ("Choose Edit Areas", [],"check", editAreaList)) - - # Handle Time Ranges - rtRanges = definition.setdefault("runTimeRanges", "no") - if rtRanges == "yes": - # Add an entry to varList to solicit time ranges at run time - # Use the defaultRanges as choices - dfRanges = definition.setdefault("defaultRanges", []) - varList.append( - ("Choose Time Ranges", [],"check", dfRanges)) - - return varList +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import ProcessVariableList, types + +class VarDictGroker: + def __init__(self, module, definition, name, issuedBy=None, dataMgr=None): + self._module = module + self._definition = definition + self._name = name + self._issuedBy = issuedBy + self._dataMgr = dataMgr + + def getVarDict(self): + # Create and display the User Dialog for this product. + # + # If the product has a _processVariableList method, + # use it to get the varDict + # Otherwise, + # Create the User Dialog entries from the VariableList (if present) + # and the _issuanceList (if present) + # Return a text string command line argument for the User Responses + # Return None if user cancelled + + varDict = {} + + # Look for method _processVariableList + try: # Try to instantiate the smart text product + product = self._module.TextProduct() + processVariableList = product._processVariableList + except: # Simple table product + processVariableList = None + product = None + + if processVariableList is not None: + print("processVariableList is not None") + co = processVariableList.__func__.__code__ + if co.co_argcount > 2: + argValues = [self._definition, self._dataMgr] + else: + argValues = [self._definition] + + import tkinter,sys + sys.argv = ["FormatterRunner"] + root=tkinter.Tk() + root.title("FormatterRunner") + root.withdraw() + + varDict = processVariableList(*argValues) + + root.destroy() + + if varDict is None: + return None + + else: + + # Get _issuanceList entries for the User Dialog + print("TextProduct()") + try: # Try to instantiate the smart text product + product = self._module.TextProduct() + product._definition = product.Definition + except: # Simple table product + product = None + + varList = [] + try: # Try to find an issuance list in the smart text product + issuanceList = product._issuance_list({}) + if len(issuanceList) > 0: + issuanceEntry = self._createVariableListEntry( + ("Product Issuance", "productIssuance"), issuanceList) + varList = [issuanceEntry] + except: + pass + + # Add in module VariableList + try: # Try to find a VariableList in the smart text product + productVarList = product.VariableList + varList += productVarList + except: + pass + + # Look for "runTime" variables in the Definition + varList = self._addRunTimeVariables(varList, self._definition) + + if varList is not None and len(varList): + # Display User Dialog + print("ProcessVariableList.ProcessVariableList") + processVarList = ProcessVariableList.ProcessVariableList( + self._name, varList, varDict={}, dataMgr=self._dataMgr) + self._selectionStatus = processVarList.status() + if not self._selectionStatus == "OK": + return None # User Cancelled + varDict = processVarList.varDict() + + # Add "issuedBy" to the varDict if applicable + varDict[("Issued By","issuedBy")] = self._issuedBy + + # Return a text string version of the User Responses (varDict) + return str(varDict) + + def _createVariableListEntry(self, entryName, entryList, entryType="radio", + default=""): + # Create a VariableList entry for the entryList + # and the given entryType and default value + # "entryList" is a list of items for a radio or checkbutton + # VariableList entry. If entryList items are tuples, the first + # value of the tuple is used. + if len(entryList) == 1: + # Do not need to put in the dialog + return None + entries = [] + for entry in entryList: + if type(entry) is tuple: + entry = entry[0] + entries.append(entry) + result = (entryName, default, entryType, entries) + return result + + def _addRunTimeVariables(self, varList, definition): + # Add entries to varList to solicit variables designated as + # "runTime" in the product definition + + for item, default, rtItem, varType in [ + ("language","english","runTimeLanguage", ["radio", + ["english","french","spanish"]]), + ("outputFile","/tmp/forecast.out","runTimeOutputFile", + ["alphaNumeric"]), + ("appendFile",None,"runTimeAppendFile", ["alphaNumeric"]), + ("lineLength",69,"runTimeLineLength",["alphaNumeric"]), + ("timePeriod",3, "runTimePeriod", ["alphaNumeric"]), + ]: + value = definition.setdefault(item, default) + rtValue = definition.setdefault(item, default) + if rtValue == "yes": + varEntry = [item, value] + varType + varEntry = tuple(varEntry) + varList.append(varEntry) + + # Handle Edit Areas + rtEditAreas = definition.setdefault("runTimeEditAreas", "no") + if rtEditAreas == "yes": + # Add an entry to varList to solicit edit areas at run time + # Use the defaultEdit areas as choices + dfEditAreas = definition.setdefault("defaultEditAreas", []) + editAreaList = [] + for name, label in dfEditAreas: + if type(name) == tuple: + # use label instead of lat/lon values + editAreaList.append(label) + else: + editAreaList.append(name) + varList.append( + ("Choose Edit Areas", [],"check", editAreaList)) + + # Handle Time Ranges + rtRanges = definition.setdefault("runTimeRanges", "no") + if rtRanges == "yes": + # Add an entry to varList to solicit time ranges at run time + # Use the defaultRanges as choices + dfRanges = definition.setdefault("defaultRanges", []) + varList.append( + ("Choose Time Ranges", [],"check", dfRanges)) + + return varList diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VectorRelatedPhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VectorRelatedPhrases.py index 1803a231da..fd99e70b4b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VectorRelatedPhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/VectorRelatedPhrases.py @@ -1,1497 +1,1497 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# VectorRelatedPhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import PhraseBuilder - -class VectorRelatedPhrases(PhraseBuilder.PhraseBuilder): - def __init__(self): - PhraseBuilder.PhraseBuilder.__init__(self) - - ############################################ - ### VECTOR PHRASES - - def standard_vector_phraseMethods(self): - return [ - self.consolidatePhrase, - self.checkLocalEffects, - self.combinePhraseStats, - self.consolidateDirection, - self.consolidateTrends, - self.chooseMostImportant, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.embedDescriptor, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - - ### Wind - def lake_wind_thresholds(self, tree, node): - # Return upper and lower lake_wind thresholds in mph. - # Only apply phrase for max wind speed of 25 to 35 mph. At 35 mph - # and higher, an advisory of some sort will be in effect and phrase - # will not be needed. - return 25, 35 - - def lake_wind_areaNames(self, tree, node): - # Return list of edit area names for which the lake_wind_phrase - # should be generated - # If you want the phrase potentially generated for all zones, use: - # return ["ALL"] - return [] - - def useWindsForGusts_flag(self, tree, node): - # Turn this on if you want to use the maximum Wind - # for reporting Gusts if a WindGust grid is not found. - # Note that if the difference between the maximum wind speed - # and the reported wind speed (e.g. using stdDevMinMax) is - # not greater than the gust_wind_difference_nlValue, - # no wind gust will be reported. - return 0 - - def lake_wind_phrase(self): - return { - "setUpMethod": self.lake_wind_setUp, - "wordMethod": self.lake_wind_words, - "phraseMethods": self.standard_phraseMethods() - } - def lake_wind_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Wind", "Max", self.VECTOR())] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def lake_wind_words(self, tree, node): - # Wind Statistics -- vectorAvg, vectorMinMax -- any temporal resolution - # Customization Points: - # lake_wind_areaNames - # lake_wind_thresholds - # descriptor_phrase for lakeWinds - - timeRange = node.getTimeRange() - statDict = node.getStatDict() - stats = self.getStats(statDict, "Wind") - if stats is None: - return self.setWords(node, "") - areaNames = self.getCurrentAreaNames(tree) - include_phrase = 0 - lakeWindNames = self.lake_wind_areaNames(tree, node) - if "ALL" in lakeWindNames: - include_phrase = 1 - else: - for areaName in areaNames: - if areaName in self.lake_wind_areaNames(tree, node): - include_phrase = 1 - break - if include_phrase == 0 or stats is None: - return self.setWords(node, "") - - max, dir = self.getValue(stats, "Max", self.VECTOR()) - phrase = "" - lower_threshold, upper_threshold = self.lake_wind_thresholds(tree, node) - if max >= lower_threshold and max < upper_threshold: - descriptor = self.phrase_descriptor(tree, node, "lakeWinds", "Wind") - phrase = descriptor - node.getParent().set("descriptor", "") - return self.setWords(node, phrase) - - # Wind Range methods - def wind_phrase(self): - return { - "setUpMethod": self.wind_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def wind_withGusts_phrase(self): - return { - "setUpMethod": self.wind_withGusts_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def wind_withGusts_setUp(self, tree, node): - return self.wind_setUp(tree, node, gustFlag=1) - - def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): - wind = self.ElementInfo(element, "List", self.VECTOR()) - elementInfoList = [wind] - if gustFlag: - windGust = self.ElementInfo( - "WindGust", "Max", phraseDef=self.gust_phrase) - elementInfoList.append(windGust) - node.set("gustFlag", 1) - if connectorMethod is None: - connectorMethod = self.vectorConnector - self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) - return self.DONE() - - def vector_words(self, tree, node): - # Create a words for a vector element - elementInfo = node.getAncestor("firstElement") - if elementInfo is None: - return self.setWords(node, "") - words = self.simple_vector_phrase(tree, node, elementInfo) - if words == "null": - return self.setWords(node, "null") - gustPhrase = "" - if words != "": - # Add gusts - gustFlag = node.getAncestor("gustFlag") - if gustFlag == 1: - windStats = tree.stats.get("Wind", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Max") - if windStats is not None: - maxMag, dir = windStats - statDict = node.getStatDict() - gustStats = self.getStats(statDict, "WindGust") - subRange = node.get("timeRange") - gustPhrase = self.embedded_gust_phrase( - tree, node, gustStats, maxMag, subRange) - return self.setWords(node, words + gustPhrase) - - def embedded_gust_phrase(self, tree, node, gustStats, maxWind, subRange): - # Determine what type of gust phrase to add. Day and night are treated - # differently with gusts phrases toned down a bit for night. - try: - includeTropical = self._includeTropical - except: - includeTropical = False - if includeTropical: - statLabel = "" # Use the moderatedMinMax from the Tropical components - else: - statLabel = "vectorMinMax" - gusts = None - if gustStats is None: - # If useWindForGusts_flag is set, use max Wind for reporting gusts - if self.useWindsForGusts_flag(tree, node) == 1: - windStats = tree.stats.get( - "Wind", subRange, node.getAreaLabel(), statLabel=statLabel, - mergeMethod="Max") - if windStats is None: - return "" - else: - gusts, dir = windStats - else: - gusts = self.getValue(gustStats,"Max") - if gusts is None: - return "" - - if includeTropical: - # Round gusts and maxWind to the nearest 5 kt regardless of users' overrides - gusts = self.round(gusts, 'Nearest', 5.0) - maxWind = self.round(maxWind, 'Nearest', 5.0) - - threshold = self.nlValue(self.null_nlValue(tree, node, "WindGust", "WindGust"), gusts) - if gusts < threshold: - return "" - gustPhrase = "" - outUnits = self.element_outUnits(tree, node, "WindGust", "WindGust") - units = self.units_descriptor(tree, node, "units", outUnits) - windDifference = self.nlValue(self.gust_wind_difference_nlValue(tree, node), maxWind) - if gusts - maxWind > windDifference: - gustPhrase = " with gusts to around " + `int(gusts)` + " " + units - return gustPhrase - - def simple_vector_phrase(self, tree, node, elementInfo, checkRepeating=1): - # Create a vector subPhrase - # Do not repeat mag, dir if same as previous phrase - elementName = elementInfo.name - statDict = node.getStatDict() - stats = self.getStats(statDict, elementName) - if stats is None: - return "" - mag, dir = stats - minMag, maxMag = self.getValue(mag, "MinMax") - - # Save maxMag at component level for other methods to use. - # THIS IS PARTICULARLY IMPORTANT FOR USE IN THE includeOnlyPhrases_list def - # below to eliminate certainly wx elements during tropical cyclone - # situations when certain conditions are met. - component = node.getComponent() - maxMagList = component.get("maxMagList") - if maxMagList is None: - maxMagList = [maxMag] - else: - maxMagList.append(maxMag) - component.set("maxMagList", maxMagList) - - words = self.vector_mag(tree, node, minMag, maxMag, - elementInfo.outUnits, elementName) - if words == "null": - return words - magStr = words - dirStr = self.vector_dir(dir) - - if checkRepeating: - # Set for future reference - node.set("dirStr", dirStr) - node.set("magStr", magStr) - node.set("minMag", minMag) - node.set("maxMag", maxMag) - if minMag == 0.0: - minMag = maxMag - # Check for repeating mag or dir - prevNode = node.getPrev() - if prevNode is not None: - prevDirStr = prevNode.get("dirStr") - prevMagStr = prevNode.get("magStr") - prevMin = prevNode.get("minMag") - prevMax = prevNode.get("maxMag") - if prevMin == 0.0: - prevMin = prevMax - if prevMin is None or prevMax is None or \ - prevDirStr is None or prevMagStr is None: - pass - elif prevDirStr == dirStr and prevMagStr == magStr: - pass - elif prevDirStr == dirStr: - dirStr = "" - elif prevMagStr == magStr: - magStr = "" - # Prevent "around 10 becoming 5 to 10" - # "around 10 becoming 10 to 15" - elif prevMin == prevMax: - if (minMag == prevMax - 5.0) or (maxMag == prevMax + 5.0): - magStr = "" - # Prevent "5 to 10 becoming around 10" - # "10 to 15 becoming around 10" - elif minMag == maxMag: - if (prevMin == maxMag - 5.0) or (prevMax == maxMag + 5.0): - magStr = "" - words = dirStr + self.format(magStr) - return words.lstrip() - - def vector_mag(self, tree, node, minMag, maxMag, units, - elementName="Wind"): - "Create a phrase for a Range of magnitudes" - - # Check for "null" value (below threshold) - threshold = self.nlValue(self.null_nlValue( - tree, node, elementName, elementName), maxMag) - if maxMag < threshold: - return "null" - - # Apply max reported threshold - maxReportedMag = self.maxReported_threshold(tree, node, elementName, elementName) - if maxMag >= maxReportedMag: - maxMag = maxReportedMag - #minMag = 0 - - units = self.units_descriptor(tree, node, "units", units) - - if elementName == "Wind": - if self.marine_wind_flag(tree, node): - return self.marine_wind_mag(tree, node, minMag, maxMag, units, elementName) - - # Check for SingleValue - if maxMag == minMag: #or minMag == 0: - around = self.addSpace( - self.phrase_descriptor(tree, node, "around", elementName)) - words = around + `int(maxMag)` + " " + units - else: - if int(minMag) < threshold: - upTo = self.addSpace( - self.phrase_descriptor(tree, node, "up to", elementName)) - words = upTo + `int(maxMag)` + " " + units - else: - valueConnector = self.value_connector(tree, node, elementName, elementName) - words = `int(minMag)` + valueConnector + `int(maxMag)` + " " + units - - # This is an additional hook for customizing the magnitude wording - words = self.vector_mag_hook(tree, node, minMag, maxMag, units, elementName, words) - return words - - def vector_mag_hook(self, tree, node, minMag, maxMag, units, elementName, words): - # Further refinement and customization of the wind phrase can be done here - return words - - def marine_wind_mag(self, tree, node, minMag, maxMag, units, elementName): - # Produce marine descriptor wording such as "storm force", "gales" - specialDescriptor = 0 - prevSpecial = None - if node.getIndex() > 0 and self.marine_wind_verbose_flag(tree, node) == 0: - # Check for previous descriptor - prevSpecial = node.getPrev().get("specialDescriptor") - # Check for special descriptors - windWordList = [(64, "hurricane force winds to"), - (45, "storm force winds to"), - (34, "gales to"), - ] - for threshold, windWords in windWordList: - if maxMag >= threshold: - descriptor = self.addSpace( - self.phrase_descriptor(tree, node, windWords, elementName)) - if descriptor == prevSpecial: - descriptor = "" - words = descriptor + `int(maxMag)` + " " + units - specialDescriptor = 1 - break - - if not specialDescriptor: - if maxMag > 25: - descriptor = self.addSpace( - self.phrase_descriptor(tree, node, "up to", elementName)) - words = descriptor + `int(maxMag)` + " " + units - else: - if minMag == maxMag or minMag == 0: - around = self.addSpace( - self.phrase_descriptor(tree, node, "around", elementName)) - words = around + `int(maxMag)` + " " + units - else: - valueConnector = self.value_connector(tree, node, elementName, elementName) - words = `int(minMag)` + valueConnector + `int(maxMag)` + " " + units - else: - # If special marine descriptor is included in the resulting - # words for the first subPhrase, turn off the phrase descriptor - if node.getIndex() == 0: - node.getParent().set("descriptor","") - node.set("specialDescriptor", descriptor) - - return words - - - def embedDescriptor(self, tree, node): - # See if ready to process - if not self.phrase_trigger(tree, node): - return - # If appropriate, embed descriptor in first part of non-empty subPhrase - elementInfoList = node.get("elementInfoList") - if len(elementInfoList) < 1: - return self.DONE() - elementName = node.getAncestor("firstElement").name - if self.embedded_vector_descriptor_flag( - tree, node, elementName, elementName) == 0: - return self.DONE() - for node in node.get("childList"): - words = node.get("words") - if words is None: - return - # Find first non-empty phrase to embed descriptor - if words != "": - #if node.get("null"): - if self.isNull(node): - # Do not embed descriptor into null-filled words - break - dirStr = node.get("dirStr") - if dirStr is not None: - # Embed only if there is a dirStr - phrase = node.getParent() - descriptor = phrase.get("descriptor") - phrase.set("embeddedDescriptor", descriptor) - descriptor = self.format(descriptor) - words = words.replace(dirStr, dirStr + descriptor, 1) - node.set("words", words) - phrase.set("descriptor", "") - break - return self.DONE() - - def wind_summary(self): - return { - "setUpMethod": self.wind_summary_setUp, - "wordMethod": self.wind_summary_words, - "phraseMethods": self.standard_phraseMethods(), - } - def wind_summary_setUp(self, tree, node): - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def wind_summary_words(self, tree, node): - # Uses vectorAvg, vectorMedian, vectorMinMax - elementName = "Wind" - words = self.vector_summary(tree, node, elementName) - return self.setWords(node, words) - - def vector_summary(self, tree, node, elementName): - "Determine summary of given element" - # Uses vectorAvg, vectorMedian, vectorMinMax - stats = tree.stats.get( - elementName, node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Max") - if stats is None: - return "" - max, dir = stats - return self.vector_summary_valueStr(max, elementName) - - - def vector_summary_valueStr(self, value, elementName): - # Thresholds and corresponding phrases - # Defaults are for Winds converted to mph - words = "" - if value < 25: - words = "" - elif value < 30: - words = "breezy" - elif value < 40: - words = "windy" - elif value < 50: - words = "very windy" - elif value < 74: - words = "strong winds" - else: - words = "hurricane force winds" - return words - - ### WindGust - def gust_wind_difference_nlValue(self, tree, node): - # Difference between gust and maxWind below which gusts are not mentioned - # Units are mph - return 10 - - # WindGust - def gust_phrase(self): - return { - "setUpMethod": self.gust_setUp, - "wordMethod": self.gust_words, - "phraseMethods": self.standard_phraseMethods(), - } - def gust_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("WindGust", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def gust_words(self, tree, node): - statDict = node.getStatDict() - stats = self.getStats(statDict, "WindGust") - if stats is None: - return self.setWords(node, "") - gustValue = self.getValue(stats, "Max") - threshold = self.nlValue(self.null_nlValue(tree, node, "WindGust", "WindGust"), gustValue) - if gustValue < threshold: - return self.setWords(node, "null") - # Check WindGust against Wind - maxWind, dir = tree.stats.get("Wind", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Max") - windDifference = self.nlValue(self.gust_wind_difference_nlValue(tree, node), maxWind) - if gustValue - maxWind <= windDifference: - return self.setWords(node, "null") - outUnits = self.element_outUnits(tree, node, "WindGust", "WindGust") - units = self.units_descriptor(tree, node, "units", outUnits) - words = `int(gustValue)` + " " + units - return self.setWords(node, words) - - - #--------------------------------------------------------------------------- - # Tropical Phrasing - Updated for OB9.5 - #--------------------------------------------------------------------------- - - def windSpdProb_thresholds(self, tree, node): - return [ - ((45.0, 80.0), (25.0, 60.0)), # Per 1 - (35.0, 20.0), # Per 2 - (30.0, 15.0), # Per 3 - (25.0, 12.5), # Per 4 - (22.5, 10.0), # Per 5 - (20.0, 8.0), # Per 6 - (17.5, 7.0), # Per 7 - (15.0, 6.0), # Per 8 - (12.5, 5.0), # Per 9 - (10.0, 4.0), # Per 10 - ] - - def firstComponentPeriod(self, tree, node): - # Define forecast period number for first component of this product. - # This is for greater flexibility in production of a tropical SAF - # valid values 1-14 - return 1 - - def includeOnlyPhrases_list(self, tree, component): - """ - Used for Tropical phrases. - Determines which phrases to keep in each period of the product. - """ - # Return list of phrases to include in the component - # Return an empty list if all phrases should be included - try: - includeTropical = self._includeTropical - except: - includeTropical = False - if not includeTropical: - return [] - - # See which period we are in - compPeriod = int(component.getIndex() + self.firstComponentPeriod(tree, component)) - self.debug_print("Working in Period %d" % (compPeriod), 1) - - # See which list of periods we may want to modify - if self._pil.find("ZFP") == 0: - productType = "ZFP" - includeSomeList = [1] - else: - productType = "CWF" - includeSomeList = [6,7,8,9,10] - - # If this is not one of the periods we might want to remove phrases, - # then return - if compPeriod not in includeSomeList: - # Ensure all phrases are used - return [] - - # Grab thresholds for this period - handle the first period case - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - if compPeriod == 1: - (thresh34low, thresh34high) = windSpdProb_thresholds[0][0] - (thresh64low, thresh64high) = windSpdProb_thresholds[0][1] - - # Display thresholds so we know what we're using - self.debug_print("34 kt thresholds = (%.2f, %.2f)" % - (thresh34low, thresh34high), 1) - self.debug_print("64 kt thresholds = (%.2f, %.2f)" % - (thresh64low, thresh64high), 1) - - # Otherwise, handle all other periods - else: - index = int(component.getIndex()) - (thresh34, thresh64) = windSpdProb_thresholds[index] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - # Get some information about this forecast period - dayNight = self.getPeriod(component.getTimeRange(), 1) - timeRange = component.getTimeRange() - areaLabel = component.getAreaLabel() - self.debug_print("dayNight = %s\ttimeRange = %s" % (dayNight, - repr(timeRange)), 1) - - # Get pws64 - if dayNight == 1: - pws64 = tree.stats.get("pwsD64", timeRange, areaLabel, mergeMethod="Max") - self.debug_print("USING pwsD64", 1) - else: - pws64 = tree.stats.get("pwsN64", timeRange, areaLabel, mergeMethod="Max") - self.debug_print("USING pwsN64", 1) - - self.debug_print("PWS64 = %s" % (pws64), 1) - - if pws64 is None: - return [] - - - # Get pws34 - if dayNight == 1: - pws34 = tree.stats.get("pwsD34", timeRange, areaLabel, mergeMethod="Max") - self.debug_print("USING pwsD34", 1) - else: - pws34 = tree.stats.get("pwsN34", timeRange, areaLabel, mergeMethod="Max") - self.debug_print("USING pwsN34", 1) - - self.debug_print("PWS34 = %s" % (pws34), 1) - - if pws34 is None: - return [] - - # COMMENT: Get the stored wind stats from the component level. - # IF WE WERE TO LIMIT ELEMENTS IN THE ZFP BEYOND PERIOD 5, - # THE WIND STAT LABEL ABOVE WOULD ALSO BE NEEDED. - - maxMagList = component.get("maxMagList") - if maxMagList is None: - return self.setWords(component, "") - - self.debug_print("maxMag from includeOnlyPhrases_list: %s " % (maxMagList), 1) - print "maxMagList from includeOnlyPhrases_list: ", maxMagList - - maxMag = 0.0 - for mag in maxMagList: - if mag > maxMag: - maxMag = mag - - ## maxMag, dir = wind - if productType == "ZFP": - maxMag = maxMag*0.868976242 - self.debug_print("maxMag in includeOnlyPhrases_list: %s " % (maxMag), 1) - print "maxMag in includeOnlyPhrases_list: ", maxMag - - if maxMag is None: - maxMag = 0.0 - - # Retrieve the headlineKeys stored at the component level - headlineKeys = component.get("headlineKeys") - if headlineKeys is None: - headlineKeys = [] - - # If this is the first period, and in the list of periods we might - # want to modify - if productType == "ZFP": - if compPeriod == 1 and compPeriod in includeSomeList: - if "HU.W" in headlineKeys or "HI.W" in headlineKeys: - if pws64 >= thresh64high and maxMag >= 64.0: - # Limit the phrases we'll report - return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] - elif pws64 >= thresh64low and maxMag >= 50.0: - # Keep all phrases - return [] - elif pws34 >= thresh34high and maxMag >= 34.0: - # Limit the phrases we'll report - return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] - - elif "TR.W" in headlineKeys or "TI.W" in headlineKeys: - if pws34 >= thresh34high and maxMag >= 34.0: - # Limit the phrases we'll report - return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] - - else: - return [] # keep all phrases - - # If this period is beyond the fifth period, and in the list of - # periods we might want to modify - else: - if compPeriod >= 6 and compPeriod in includeSomeList: - if ((pws34 >= thresh34 or pws34+2.5 >= thresh34) and maxMag >= 20.0) \ - or ((pws64 >= thresh64 or pws64+1.0 >= thresh64) and maxMag >= 20.0) \ - or maxMag >= 34.0: - # Limit the phrases we'll report - return ["pws_phrase", "weather_phrase"] - else: - # Return all phrases - return [] - - - #--------------------------------------------------------------------------- - # Probabilistic Wind Phrase - #--------------------------------------------------------------------------- - - def pws_phrase(self): - """ - Added to produce the tropical probabilistic wind phrase. - """ - return { - "setUpMethod": self.pws_setUp, - "wordMethod": self.pws_words, - "phraseMethods": [ - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - ], - } - - def pws_setUp(self, tree, node): - """ - Setup method for the tropical probabilistic wind phrase. - """ - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def pws_words(self, tree, node): - """ - Words method for the tropical probabilistic wind phrase. - """ - # Get Wind - self.debug_print("\nBegin period***********", 1) - self.debug_print("\nNode time range -> %s" % - (repr(node.getTimeRange())), 1) - self.debug_print("Parent time range -> %s" % - (repr(node.parent.getTimeRange())), 1) - - # Get name and index of this node's component - component = node.getComponent() - compIndex = node.getComponent().getIndex() - compPeriod = int(compIndex + self.firstComponentPeriod(tree, node)) - print "COMPONENT IN pws_words", compPeriod - componentName = node.getComponentName() - - if self._pil.find("ZFP") == 0: - productType = "ZFP" - else: - productType = "CWF" - - # COMMENT: If this is one of the first 5 periods of the ZFP, or this is the CWF - if not productType == "ZFP" or compPeriod <= 5: - print "I AM IN: ", node.getTimeRange() - #!!! Wait for wind phrase to complete - # We're assuming that all the wind phrases have completed (including - # local effect phrases) if one has. - if productType == "ZFP": - phraseList = ["wind_withGusts_phrase"] - else: - phraseList = ["marine_wind_withGusts_phrase"] - windWords = self.findWords(tree, node, "Wind", phraseList = phraseList) - self.debug_print("windWords = '%s'" % (windWords), 1) - # Wait for Wind phrase - if windWords is None: - return - - # Get the stored wind stats from the component level - maxMagList = component.get("maxMagList") - if maxMagList is None: - return self.setWords(node, "") - - self.debug_print("MaxMagList from pws_words %s %s" % (maxMagList, - repr(node.getTimeRange())), 1) - # print "MaxMagList from pws_words", maxMagList, node.getTimeRange() - maxMag = 0.0 - for mag in maxMagList: - if mag > maxMag: - maxMag = mag - - if productType == "ZFP": - # print "PWS MAXMAG in MPH IS: ", maxMag - maxMag = maxMag*0.868976242 - # print "PWS MAXMAG IN KNOTS: ", maxMag - # - # COMMENT: Othwerwise Periods 6 and beyond in the ZFP. - # Although wind phrases are not included in extended ZFP you - # still need to do the analysis so tropical cyclone formatter - # logic can be carried out through the extended (day 5) periods. - # - else: - print "I AM IN: ", node.getTimeRange() - windStats = tree.stats.get( - "Wind", node.getTimeRange(), node.getAreaLabel(), - statLabel="vectorModeratedMinMax", mergeMethod="Max") - ## print "WINDSTATS", windStats - if windStats is None: - return self.setWords(node, "") - maxMag, dir = windStats - maxMag = maxMag*0.868976242 - - # Display maximum wind speed in MPH and KTS - self.debug_print("PWS MAXMAG in MPH IS: %s" % (maxMag), 1) - self.debug_print("PWS MAXMAG in KTS IS: %s" % (maxMag), 1) - - dayNight = self.getPeriod(node.getTimeRange(), 1) - self.debug_print("dayNight IS %s" % (dayNight), 1) - - # See which grids to use for probability of 34 and 64 kts - if dayNight == 1: - prob34 = "pwsD34" - prob64 = "pwsD64" - else: - prob34 = "pwsN34" - prob64 = "pwsN64" - self.debug_print("USING pws34 = "+prob34, 1) - self.debug_print("USING pws64 = "+prob64, 1) - pws64 = tree.stats.get(prob64, node.getTimeRange(), - node.getAreaLabel(), mergeMethod="Max") - if pws64 is None: - self.debug_print("pws64 NONE", 1) - return self.setWords(node, "") - pws34 = tree.stats.get(prob34, node.getTimeRange(), - node.getAreaLabel(), mergeMethod="Max") - if pws34 is None: - self.debug_print("pws34 NONE", 1) - return self.setWords(node, "") - - #print "check ", "check" - #################################################################### - #print "WORDS1", words - words = "" - areaLabel = tree.getAreaLabel() - print "\nBegin period***********", node.getTimeRange() - self.debug_print("\nNode time range -> %s" % - (repr(node.getTimeRange())), 1) - self.debug_print("Parent time range -> %s" % - (repr(node.parent.getTimeRange())), 1) - self.debug_print("MAXMAG IS -> %s KTS" % (maxMag), 1) - self.debug_print("\nNode time and label -> %s %s" % - (repr(node.getTimeRange()), - repr(node.getAreaLabel())), 1) - #tree.stats.printDictionary("Hazards") - # Get Hazards - headlines = tree.stats.get("Hazards", node.getTimeRange(), - areaLabel, mergeMethod = "List") - - self.debug_print("maxMag = %s" % (maxMag), 1) - self.debug_print("warningpws64 = %s" % (pws64), 1) - self.debug_print("warningpws34 = %s" % (pws34), 1) - self.debug_print("Headline stats for warning -> %s" % - (repr(headlines)), 1) - print "maxMag = ", maxMag - print "warningpws64 = ", pws64 - print "warningpws34 = ", pws34 - print "Headline stats for warning ", headlines - - if headlines is not None: - # Sort the headlines by startTime - temp = [] - for h, tr in headlines: - temp.append((tr.startTime(), (h, tr))) - temp.sort() - newList = [] - for t in temp: - newList.append(t[1]) - headlines = newList - - # Fetch the set of local headlines allowed for this product - allowedHazards = [] - for key, allActions, cat in self.allowedHazards(): - allowedHazards.append(key) - - # Create a list of headline keys as strings e.g. HU.A - headlineKeys = [] - for key, tr in headlines: # value == list of subkeys - if key not in allowedHazards: - continue - # Don't call headlinesTimeRange_descriptor function due to - # an exception which is caused - DR19483 - #timeDescriptor = self.headlinesTimeRange_descriptor( - # tree, node, key, tr, areaLabel, issuanceTime) - if key == "": - continue - if key not in headlineKeys: - headlineKeys.append(key) - self.debug_print("key: %s" % (key), 1) - - self.debug_print("headlineKeys: %s" % (repr(headlineKeys)), 1) - words = self.getTropicalDescription( - tree, node, headlineKeys, maxMag, pws64, pws34) - # Store the headlineKeys at the component node for later examination - component = node.getComponent() - component.set("headlineKeys", headlineKeys) - - elif headlines is None or headlines is NoData: - words = words + self.getTropicalDescription( - tree, node, "", maxMag, pws64, pws34) - - # COMMENT: If we have words from the pws_phrase during tropical cyclones - # the following lines of code will make sure wind_summary is - # not printed out. - if words is not None and len(words.strip()) > 0: - # Remove the wind sumamry phrase from this component and any local - # effect areas - no need to replace undesirable phrases later on - self.removeComponentPhrases(tree, node, "wind_summary", - areaLabels=[node.getAreaLabel(), - node.getComponent().getAreaLabel() - ]) - self.debug_print("\nSetting words '%s' for %s" % - (words, node.getAncestor('name')), 1) - self.debug_print("%s %s\n" % (node.getComponentName(), - repr(node.getTimeRange())), 1) - return self.setWords(node, words) - - def getTropicalDescription(self, tree, node, headlineKeys, maxMag, pws64, - pws34): - """ - Determines which tropical descriptions to use for current period. - """ - self.debug_print("\tgetTropicalDescription") - # Get some information about the component of this node - compName = node.getComponentName() - compIndex = node.getComponent().getIndex() - # Convert convert component index to a forecast period number - compPeriod = int(compIndex + self.firstComponentPeriod(tree, node)) - self.debug_print("-"*80, 1) - self.debug_print("Component name = %s" % (compName) + - "\tForecast Period = %d" % (compPeriod) + - "\tmaxMag = %s" % (maxMag), 1) - descMethod = None - words = "" - # If this is one of the first 4 periods of the forecast - if compPeriod <= 4: - exec "descMethod = self.getPeriod_%d_Desc" % (compPeriod) - # Otherwise, If this is one of the fifth to ninth forecast periods - elif 5 <= compPeriod <= 9: - descMethod = self.getPeriod_5_9_Desc - # Otherwise - else: - descMethod = self.getPeriod_10_14_Desc - - # Ensure the tropical boolean variables are set using current - # set of headlines - self.tropicalBooleanConditions(headlineKeys) - - # Get the description from this method - if descMethod is not None: - desc = descMethod(tree, node, maxMag, pws64, pws34) - # If we found the description - prepare it to be returned - if desc != "": - words = " " + self.phrase_descriptor(tree, node, desc, desc) - return words - - def tropicalBooleanConditions(self, headlineKeys): - ''' - This method sets the conditions needed by subsequent methods - (getPeriodX_Desc) to form the appropriate wording: - Each entry in the conditionsList is - the condition name (e.g. self._Hurricane_W) and - the Hazard keys whose presence will trigger the condition. - Note that we only need four conditions to cover all the - logic for generating wording. - - ''' - conditionList = [ - ("Hurricane_W", ["HU.W", "HI.W"]), - ("Hurricane_A", ["HU.A", "HI.A"]), - ("TropStorm_W", ["TR.W", "TI.W"]), - ("TropStorm_A", ["TR.A", "TI.A"]), - ] - - for varName, hazardList in conditionList: - found = False - for key in hazardList: - if key in headlineKeys: - found = True - break - exec "self._"+varName+"= found" - - -## def tropicalBooleanConditions(self, headlineKeys): -## """ -## Sets various boolean variables used by the pws_phrase logic based -## upon contents of current headlines. -## """ -## self.debug_print("\ttropicalBooleanConditions") -## # COMMENT: All boolean variables are defined globally within the tropical -## # formatter, so there is nothing to 'return' -## # These are all the tropical headline combinations accounted for. -## conditionLists = [ -## ["HI.W"], -## ["HI.A"], -## ["HU.W"], -## ['HU.A'], -## ["TI.W"], -## ["TI.A"], -## ["TR.W"], -## ["TR.A"], -## ["HU.A", "TR.W"], -## ["HI.A", "TI.W"], -## ["HU.W", "TI.W"], -## ["HI.W", "TI.W"], -## ["HI.W", "TR.W"], -## ["HI.A", "TR.A"], -## ["HU.A", "TI.A"], -## ["HI.A", "TI.A"], -## ["TI.W", "TR.A"], -## ["TI.W", "HU.A"], -## ["HI.W", "TR.A"], -## ["HI.W", "HU.A"], -## ["HI.W", "HU.W"], -## ["HI.A", "HU.A"], -## ["TI.A", "TR.A"], -## ["TI.W", "TR.W"], -## ["HI.A", "TI.W", "TR.W"], -## ["HI.A","TI.W","HU.A","TR.W"], -## ["HI.A", "TI.W", "HU.A", "TR.W"], -## ] - -## kLen = len(headlineKeys) -## for keyList in conditionLists: -## conditionName = "" -## klLen = len(keyList) -## if kLen == klLen: cond = True -## else: cond = False -## for keyStr in keyList: -## conditionName = conditionName + "_" + keyStr -## if kLen == klLen: -## if keyStr not in headlineKeys: -## cond = False -## conditionName = conditionName.replace(".", "_") -## exec "self." + conditionName + "= cond" - -## self.debug_print("HU_A_TR_W %s" % (self._HU_A_TR_W), 1) -## self.debug_print("HI_A_TI_W %s" % (self._HI_A_TI_W), 1) -## self.debug_print("HI_A_TI_W_HU_A_TR_W %s" % (self._HI_A_TI_W_HU_A_TR_W), 1) -## self.debug_print("HI_A_TI_W_TR_W %s" % (self._HI_A_TI_W_TR_W), 1) -## self.debug_print("HI_A_TI_W_HU_A_TR_W %s" % (self._HI_A_TI_W_HU_A_TR_W), 1) -## self.debug_print("HU_W_TI_W %s" % (self._HU_W_TI_W), 1) -## self.debug_print("HI_W_TI_W %s" % (self._HI_W_TI_W), 1) -## self.debug_print("HI_W_TR_W %s" % (self._HI_W_TR_W), 1) -## self.debug_print("HI_A_TR_A %s" % (self._HI_A_TR_A), 1) -## self.debug_print("HU_A_TI_A %s" % (self._HU_A_TI_A), 1) -## self.debug_print("HI_A_TI_A %s" % (self._HI_A_TI_A), 1) -## self.debug_print("TI_W_TR_A %s" % (self._TI_W_TR_A), 1) -## self.debug_print("TI_W_HU_A %s" % (self._TI_W_HU_A), 1) -## self.debug_print("HI_W_TR_A %s" % (self._HI_W_TR_A), 1) -## self.debug_print("HI_W_HU_A %s" % (self._HI_W_HU_A), 1) -## self.debug_print("HI_W %s" % (self._HI_W), 1) -## self.debug_print("HI_A %s" % (self._HI_A), 1) -## self.debug_print("HU_W %s" % (self._HU_W), 1) -## self.debug_print("HU_A %s" % (self._HU_A), 1) -## self.debug_print("HI_W_HU_W %s" % (self._HI_W_HU_W), 1) -## self.debug_print("HI_A_HU_A %s" % (self._HI_A_HU_A), 1) -## self.debug_print("TI_W %s" % (self._TI_W), 1) -## self.debug_print("TI_A %s" % (self._TI_A), 1) -## self.debug_print("TR_W %s" % (self._TR_W), 1) -## self.debug_print("TR_A %s" % (self._TR_A), 1) -## self.debug_print("TI_A_TR_A %s" % (self._TI_A_TR_A), 1) -## self.debug_print("TI_W_TR_W %s" % (self._TI_W_TR_W), 1) - -# -# COMMENT: getPeriod_#_ definitions below contain the guts of the tropical -# cyclone formatter logic used to determine pws phrases or expressions of -# uncertainty. -# - def getPeriod_1_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a first period forecast. - """ - self.debug_print("\tgetPeriod_1_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - special case 2 for each - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34low, thresh34high) = windSpdProb_thresholds[0][0] - (thresh64low, thresh64high) = windSpdProb_thresholds[0][1] - - # Display thresholds so we know what we're using - self.debug_print("34 kt thresholds = (%.2f, %.2f)" % - (thresh34low, thresh34high), 1) - self.debug_print("64 kt thresholds = (%.2f, %.2f)" % - (thresh64low, thresh64high), 1) - - if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: - #if (self._Hurricane_W or self._Hurricane_A) and (self._TropStorm_W or self._TropStorm_A): - if maxMag >= 34.0: - if pws34 >= thresh34high: - desc = "iminTSposHR" - else: - desc = "expTSposHR" - elif pws34 >= thresh34low and maxMag >= 25.0: - desc = "expTSposHR" - elif pws64 >= thresh64low: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse1!!! %s" % (maxMag)) - - elif self._Hurricane_W or self._Hurricane_A: - - if maxMag >= 64.0: - if pws64 >= thresh64high: - desc = "iminHR" - else: - desc = "expHR" - elif pws64 >= thresh64low and maxMag >= 50.0: - desc = "expHR" - elif maxMag >= 34.0: - if pws34 >= thresh34high: - desc = "iminTSposHR" - else: - desc = "expTSposHR" - elif pws34 >= thresh34low and maxMag >= 25.0: - desc = "expTSposHR" - elif pws64 >= thresh64low: - desc = "posHR" - elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: - desc = "posTSbcmgposHR" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse2!!! %s" % (maxMag)) - - elif self._TropStorm_W or self._TropStorm_A: - if maxMag >= 34.0: - if pws34 >= thresh34high: - desc = "iminTS" - else: - desc = "expTS" - elif pws34 >= thresh34low and maxMag >= 25.0: - desc = "expTS" - elif pws64 >= thresh64low: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse3!!! %s" % (maxMag)) - - else: - print "check.......... ", "check" - if maxMag >= 64.0: - desc = "posHR" - elif maxMag >= 34.0: - desc = "posTS" - elif pws64 >= thresh64low or pws64 +5.0 >= thresh64low: - desc = "posHR" - elif pws34 >= thresh34low or pws34+10.0 >= thresh34low: - desc = "posTS" - else: - desc = "" - - return desc - - - def getPeriod_2_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a second period forecast. - """ - self.debug_print("\tgetPeriod_2_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34, thresh64) = windSpdProb_thresholds[1] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: - #if (self._Hurricane_W or self._Hurricane_A) and (self._TropStorm_W or self._TropStorm_A): - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse1!!! %s" % (maxMag)) - - elif self._Hurricane_W or self._Hurricane_A: - if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): - desc = "expHR" - elif maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posHR" - elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: - desc = "posTSbcmgposHR" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse2!!! %s" % (maxMag)) - - elif self._TropStorm_W or self._TropStorm_A: - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTS" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" # or regular phrasing - self.debug_print("ifelse3!!! %s" % (maxMag)) - - else: - # print "check.......... ", "check" - if maxMag >= 64.0: - desc = "posHR" - elif maxMag >= 34.0: - desc = "posTS" - elif pws64 >= thresh64 or pws64 +5.0 >= thresh64: - desc = "posHR" - elif pws34 >= thresh34 or pws34+10.0 >= thresh34: - desc = "posTS" - else: - desc = "" - - return desc - - - def getPeriod_3_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a third period forecast. - """ - self.debug_print("\tgetPeriod_3_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34, thresh64) = windSpdProb_thresholds[2] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: - - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse1!!! %s" % (maxMag)) - - elif self._Hurricane_W: - - if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): - desc = "expHR" - elif maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTSbcmgposHR" - else: - desc = "" - self.debug_print("ifelse2!!! %s" % (maxMag)) - - elif self._TropStorm_W: - - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTS" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse3!!! %s" % (maxMag)) - - elif self._Hurricane_A: - - if maxMag >= 50.0 or pws64 >= thresh64: - desc = "posHR" - elif maxMag >= 25.0 or pws34 >= thresh34 or pws34+5.0 >= thresh34: - desc = "posTSbcmgposHR" - else: - desc = "" - self.debug_print("ifelse4!!! %s" % (maxMag)) - - elif self._TropStorm_A: - - if maxMag >= 34.0: - if pws64 >= thresh64: - desc = "posTSbcmgposHR" - else: - desc = "posTS" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse5!!! %s" % (maxMag)) - - else: - self.debug_print("HERE I AM") - if pws64 >= thresh64 or pws64+2.5 >= thresh64: - desc = "posHR" - elif maxMag >= 64.0: - desc = "posHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34: - desc = "posTS" - elif maxMag >= 34.0: - desc = "posTS" - else: - desc = "" - - return desc - - - def getPeriod_4_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a fourth period forecast. - """ - self.debug_print("\tgetPeriod_4_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34, thresh64) = windSpdProb_thresholds[3] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: - - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse1!!! %s" % (maxMag)) - - elif self._Hurricane_W: - - if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): - desc = "expHR" - elif maxMag >= 34 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTSposHR" - elif pws64 >= thresh64: - desc = "posHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTSbcmgposHR" - else: - desc = "" - self.debug_print("ifelse2!!! %s" % (maxMag)) - - elif self._TropStorm_W: - - if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): - desc = "expTS" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse3!!! %s" % (maxMag)) - - elif self._Hurricane_A: - - if maxMag >= 50.0 or pws64 >= thresh64: - desc = "posHR" - elif maxMag >= 25.0 or pws34 >= thresh34 or pws34+5.0 >= thresh34: - desc = "posTSbcmgposHR" - else: - desc = "" - self.debug_print("ifelse4!!! %s" % (maxMag)) - - elif self._TropStorm_A: - - if maxMag >= 34.0: - if pws64 >= thresh64: - desc = "posTSbcmgposHR" - else: - desc = "posTS" - elif pws64 >= thresh64: - desc = "posTSbcmgposHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: - desc = "posTS" - else: - desc = "" - self.debug_print("ifelse5!!! %s" % (maxMag)) - - else: - self.debug_print("HERE I AM") - if pws64 >= thresh64 or pws64+2.5 >= thresh64: - desc = "posHR" - elif maxMag >= 64.0: - desc = "posHR" - elif pws34 >= thresh34 or pws34+5.0 >= thresh34: - desc = "posTS" - elif maxMag >= 34.0: - desc = "posTS" - else: - desc = "" - - return desc - - - def getPeriod_5_9_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a fifth to ninth period forecast. - """ - self.debug_print("\tgetPeriod_5_9_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34, thresh64) = \ - windSpdProb_thresholds[node.getComponent().getIndex()] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - if (pws64 >= thresh64 or (pws64 + 1.0) >= thresh64): - desc = "posHR" - elif maxMag >= 64.0: - desc = "posHR" - elif (self._Hurricane_A or self._Hurricane_W) and maxMag >= 50: - desc = "posHR" - elif (pws34 >= thresh34 or (pws34 + 2.5) >= thresh34): - desc = "posTS" - elif maxMag >= 34.0: - desc = "posTS" - elif (self._Hurricane_A or self._Hurricane_W or self._TropStorm_A or self._TropStorm_W) and maxMag >= 25: - desc = "posTS" - else: - desc = "" - - return desc - - - def getPeriod_10_14_Desc(self, tree, node, maxMag, pws64, pws34): - """ - Determines contents of PWS phrase for a fourth period forecast. - """ - self.debug_print("\tgetPeriod_4_Desc") - - desc = "" - self.debug_print("Period time range = %s" % - (repr(node.getComponent().getTimeRange())), 1) - self.debug_print("PWS34_wrng = %s" % (pws34), 1) - self.debug_print("PWS64_wrng = %s" % (pws64), 1) - - # Grab thresholds for this period - component = node.getComponent() - windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) - (thresh34, thresh64) = windSpdProb_thresholds[9] - - # Display thresholds so we know what we're using - self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % - (thresh34, thresh64), 1) - - if (pws64 >= thresh64 or (pws64 + 1.0) >= thresh64): - desc = "posHR" - elif maxMag >= 64.0: - desc = "posHR" - elif (self._Hurricane_A or self._Hurricane_W) and maxMag >= 50: - desc = "posHR" - elif (pws34 >= thresh34 or (pws34 + 2.5) >= thresh34): - desc = "posTS" - elif maxMag >= 34.0: - desc = "posTS" - elif (self._Hurricane_A or self._Hurricane_W or self._TropStorm_A or self._TropStorm_W) and maxMag >= 25: - desc = "posTS" - else: - desc = "" - - return desc - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# VectorRelatedPhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import PhraseBuilder + +class VectorRelatedPhrases(PhraseBuilder.PhraseBuilder): + def __init__(self): + PhraseBuilder.PhraseBuilder.__init__(self) + + ############################################ + ### VECTOR PHRASES + + def standard_vector_phraseMethods(self): + return [ + self.consolidatePhrase, + self.checkLocalEffects, + self.combinePhraseStats, + self.consolidateDirection, + self.consolidateTrends, + self.chooseMostImportant, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.embedDescriptor, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + + ### Wind + def lake_wind_thresholds(self, tree, node): + # Return upper and lower lake_wind thresholds in mph. + # Only apply phrase for max wind speed of 25 to 35 mph. At 35 mph + # and higher, an advisory of some sort will be in effect and phrase + # will not be needed. + return 25, 35 + + def lake_wind_areaNames(self, tree, node): + # Return list of edit area names for which the lake_wind_phrase + # should be generated + # If you want the phrase potentially generated for all zones, use: + # return ["ALL"] + return [] + + def useWindsForGusts_flag(self, tree, node): + # Turn this on if you want to use the maximum Wind + # for reporting Gusts if a WindGust grid is not found. + # Note that if the difference between the maximum wind speed + # and the reported wind speed (e.g. using stdDevMinMax) is + # not greater than the gust_wind_difference_nlValue, + # no wind gust will be reported. + return 0 + + def lake_wind_phrase(self): + return { + "setUpMethod": self.lake_wind_setUp, + "wordMethod": self.lake_wind_words, + "phraseMethods": self.standard_phraseMethods() + } + def lake_wind_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Wind", "Max", self.VECTOR())] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def lake_wind_words(self, tree, node): + # Wind Statistics -- vectorAvg, vectorMinMax -- any temporal resolution + # Customization Points: + # lake_wind_areaNames + # lake_wind_thresholds + # descriptor_phrase for lakeWinds + + timeRange = node.getTimeRange() + statDict = node.getStatDict() + stats = self.getStats(statDict, "Wind") + if stats is None: + return self.setWords(node, "") + areaNames = self.getCurrentAreaNames(tree) + include_phrase = 0 + lakeWindNames = self.lake_wind_areaNames(tree, node) + if "ALL" in lakeWindNames: + include_phrase = 1 + else: + for areaName in areaNames: + if areaName in self.lake_wind_areaNames(tree, node): + include_phrase = 1 + break + if include_phrase == 0 or stats is None: + return self.setWords(node, "") + + max, dir = self.getValue(stats, "Max", self.VECTOR()) + phrase = "" + lower_threshold, upper_threshold = self.lake_wind_thresholds(tree, node) + if max >= lower_threshold and max < upper_threshold: + descriptor = self.phrase_descriptor(tree, node, "lakeWinds", "Wind") + phrase = descriptor + node.getParent().set("descriptor", "") + return self.setWords(node, phrase) + + # Wind Range methods + def wind_phrase(self): + return { + "setUpMethod": self.wind_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def wind_withGusts_phrase(self): + return { + "setUpMethod": self.wind_withGusts_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def wind_withGusts_setUp(self, tree, node): + return self.wind_setUp(tree, node, gustFlag=1) + + def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): + wind = self.ElementInfo(element, "List", self.VECTOR()) + elementInfoList = [wind] + if gustFlag: + windGust = self.ElementInfo( + "WindGust", "Max", phraseDef=self.gust_phrase) + elementInfoList.append(windGust) + node.set("gustFlag", 1) + if connectorMethod is None: + connectorMethod = self.vectorConnector + self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) + return self.DONE() + + def vector_words(self, tree, node): + # Create a words for a vector element + elementInfo = node.getAncestor("firstElement") + if elementInfo is None: + return self.setWords(node, "") + words = self.simple_vector_phrase(tree, node, elementInfo) + if words == "null": + return self.setWords(node, "null") + gustPhrase = "" + if words != "": + # Add gusts + gustFlag = node.getAncestor("gustFlag") + if gustFlag == 1: + windStats = tree.stats.get("Wind", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Max") + if windStats is not None: + maxMag, dir = windStats + statDict = node.getStatDict() + gustStats = self.getStats(statDict, "WindGust") + subRange = node.get("timeRange") + gustPhrase = self.embedded_gust_phrase( + tree, node, gustStats, maxMag, subRange) + return self.setWords(node, words + gustPhrase) + + def embedded_gust_phrase(self, tree, node, gustStats, maxWind, subRange): + # Determine what type of gust phrase to add. Day and night are treated + # differently with gusts phrases toned down a bit for night. + try: + includeTropical = self._includeTropical + except: + includeTropical = False + if includeTropical: + statLabel = "" # Use the moderatedMinMax from the Tropical components + else: + statLabel = "vectorMinMax" + gusts = None + if gustStats is None: + # If useWindForGusts_flag is set, use max Wind for reporting gusts + if self.useWindsForGusts_flag(tree, node) == 1: + windStats = tree.stats.get( + "Wind", subRange, node.getAreaLabel(), statLabel=statLabel, + mergeMethod="Max") + if windStats is None: + return "" + else: + gusts, dir = windStats + else: + gusts = self.getValue(gustStats,"Max") + if gusts is None: + return "" + + if includeTropical: + # Round gusts and maxWind to the nearest 5 kt regardless of users' overrides + gusts = self.round(gusts, 'Nearest', 5.0) + maxWind = self.round(maxWind, 'Nearest', 5.0) + + threshold = self.nlValue(self.null_nlValue(tree, node, "WindGust", "WindGust"), gusts) + if gusts < threshold: + return "" + gustPhrase = "" + outUnits = self.element_outUnits(tree, node, "WindGust", "WindGust") + units = self.units_descriptor(tree, node, "units", outUnits) + windDifference = self.nlValue(self.gust_wind_difference_nlValue(tree, node), maxWind) + if gusts - maxWind > windDifference: + gustPhrase = " with gusts to around " + repr(int(gusts)) + " " + units + return gustPhrase + + def simple_vector_phrase(self, tree, node, elementInfo, checkRepeating=1): + # Create a vector subPhrase + # Do not repeat mag, dir if same as previous phrase + elementName = elementInfo.name + statDict = node.getStatDict() + stats = self.getStats(statDict, elementName) + if stats is None: + return "" + mag, dir = stats + minMag, maxMag = self.getValue(mag, "MinMax") + + # Save maxMag at component level for other methods to use. + # THIS IS PARTICULARLY IMPORTANT FOR USE IN THE includeOnlyPhrases_list def + # below to eliminate certainly wx elements during tropical cyclone + # situations when certain conditions are met. + component = node.getComponent() + maxMagList = component.get("maxMagList") + if maxMagList is None: + maxMagList = [maxMag] + else: + maxMagList.append(maxMag) + component.set("maxMagList", maxMagList) + + words = self.vector_mag(tree, node, minMag, maxMag, + elementInfo.outUnits, elementName) + if words == "null": + return words + magStr = words + dirStr = self.vector_dir(dir) + + if checkRepeating: + # Set for future reference + node.set("dirStr", dirStr) + node.set("magStr", magStr) + node.set("minMag", minMag) + node.set("maxMag", maxMag) + if minMag == 0.0: + minMag = maxMag + # Check for repeating mag or dir + prevNode = node.getPrev() + if prevNode is not None: + prevDirStr = prevNode.get("dirStr") + prevMagStr = prevNode.get("magStr") + prevMin = prevNode.get("minMag") + prevMax = prevNode.get("maxMag") + if prevMin == 0.0: + prevMin = prevMax + if prevMin is None or prevMax is None or \ + prevDirStr is None or prevMagStr is None: + pass + elif prevDirStr == dirStr and prevMagStr == magStr: + pass + elif prevDirStr == dirStr: + dirStr = "" + elif prevMagStr == magStr: + magStr = "" + # Prevent "around 10 becoming 5 to 10" + # "around 10 becoming 10 to 15" + elif prevMin == prevMax: + if (minMag == prevMax - 5.0) or (maxMag == prevMax + 5.0): + magStr = "" + # Prevent "5 to 10 becoming around 10" + # "10 to 15 becoming around 10" + elif minMag == maxMag: + if (prevMin == maxMag - 5.0) or (prevMax == maxMag + 5.0): + magStr = "" + words = dirStr + self.format(magStr) + return words.lstrip() + + def vector_mag(self, tree, node, minMag, maxMag, units, + elementName="Wind"): + "Create a phrase for a Range of magnitudes" + + # Check for "null" value (below threshold) + threshold = self.nlValue(self.null_nlValue( + tree, node, elementName, elementName), maxMag) + if maxMag < threshold: + return "null" + + # Apply max reported threshold + maxReportedMag = self.maxReported_threshold(tree, node, elementName, elementName) + if maxMag >= maxReportedMag: + maxMag = maxReportedMag + #minMag = 0 + + units = self.units_descriptor(tree, node, "units", units) + + if elementName == "Wind": + if self.marine_wind_flag(tree, node): + return self.marine_wind_mag(tree, node, minMag, maxMag, units, elementName) + + # Check for SingleValue + if maxMag == minMag: #or minMag == 0: + around = self.addSpace( + self.phrase_descriptor(tree, node, "around", elementName)) + words = around + repr(int(maxMag)) + " " + units + else: + if int(minMag) < threshold: + upTo = self.addSpace( + self.phrase_descriptor(tree, node, "up to", elementName)) + words = upTo + repr(int(maxMag)) + " " + units + else: + valueConnector = self.value_connector(tree, node, elementName, elementName) + words = repr(int(minMag)) + valueConnector + repr(int(maxMag)) + " " + units + + # This is an additional hook for customizing the magnitude wording + words = self.vector_mag_hook(tree, node, minMag, maxMag, units, elementName, words) + return words + + def vector_mag_hook(self, tree, node, minMag, maxMag, units, elementName, words): + # Further refinement and customization of the wind phrase can be done here + return words + + def marine_wind_mag(self, tree, node, minMag, maxMag, units, elementName): + # Produce marine descriptor wording such as "storm force", "gales" + specialDescriptor = 0 + prevSpecial = None + if node.getIndex() > 0 and self.marine_wind_verbose_flag(tree, node) == 0: + # Check for previous descriptor + prevSpecial = node.getPrev().get("specialDescriptor") + # Check for special descriptors + windWordList = [(64, "hurricane force winds to"), + (45, "storm force winds to"), + (34, "gales to"), + ] + for threshold, windWords in windWordList: + if maxMag >= threshold: + descriptor = self.addSpace( + self.phrase_descriptor(tree, node, windWords, elementName)) + if descriptor == prevSpecial: + descriptor = "" + words = descriptor + repr(int(maxMag)) + " " + units + specialDescriptor = 1 + break + + if not specialDescriptor: + if maxMag > 25: + descriptor = self.addSpace( + self.phrase_descriptor(tree, node, "up to", elementName)) + words = descriptor + repr(int(maxMag)) + " " + units + else: + if minMag == maxMag or minMag == 0: + around = self.addSpace( + self.phrase_descriptor(tree, node, "around", elementName)) + words = around + repr(int(maxMag)) + " " + units + else: + valueConnector = self.value_connector(tree, node, elementName, elementName) + words = repr(int(minMag)) + valueConnector + repr(int(maxMag)) + " " + units + else: + # If special marine descriptor is included in the resulting + # words for the first subPhrase, turn off the phrase descriptor + if node.getIndex() == 0: + node.getParent().set("descriptor","") + node.set("specialDescriptor", descriptor) + + return words + + + def embedDescriptor(self, tree, node): + # See if ready to process + if not self.phrase_trigger(tree, node): + return + # If appropriate, embed descriptor in first part of non-empty subPhrase + elementInfoList = node.get("elementInfoList") + if len(elementInfoList) < 1: + return self.DONE() + elementName = node.getAncestor("firstElement").name + if self.embedded_vector_descriptor_flag( + tree, node, elementName, elementName) == 0: + return self.DONE() + for node in node.get("childList"): + words = node.get("words") + if words is None: + return + # Find first non-empty phrase to embed descriptor + if words != "": + #if node.get("null"): + if self.isNull(node): + # Do not embed descriptor into null-filled words + break + dirStr = node.get("dirStr") + if dirStr is not None: + # Embed only if there is a dirStr + phrase = node.getParent() + descriptor = phrase.get("descriptor") + phrase.set("embeddedDescriptor", descriptor) + descriptor = self.format(descriptor) + words = words.replace(dirStr, dirStr + descriptor, 1) + node.set("words", words) + phrase.set("descriptor", "") + break + return self.DONE() + + def wind_summary(self): + return { + "setUpMethod": self.wind_summary_setUp, + "wordMethod": self.wind_summary_words, + "phraseMethods": self.standard_phraseMethods(), + } + def wind_summary_setUp(self, tree, node): + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def wind_summary_words(self, tree, node): + # Uses vectorAvg, vectorMedian, vectorMinMax + elementName = "Wind" + words = self.vector_summary(tree, node, elementName) + return self.setWords(node, words) + + def vector_summary(self, tree, node, elementName): + "Determine summary of given element" + # Uses vectorAvg, vectorMedian, vectorMinMax + stats = tree.stats.get( + elementName, node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Max") + if stats is None: + return "" + max, dir = stats + return self.vector_summary_valueStr(max, elementName) + + + def vector_summary_valueStr(self, value, elementName): + # Thresholds and corresponding phrases + # Defaults are for Winds converted to mph + words = "" + if value < 25: + words = "" + elif value < 30: + words = "breezy" + elif value < 40: + words = "windy" + elif value < 50: + words = "very windy" + elif value < 74: + words = "strong winds" + else: + words = "hurricane force winds" + return words + + ### WindGust + def gust_wind_difference_nlValue(self, tree, node): + # Difference between gust and maxWind below which gusts are not mentioned + # Units are mph + return 10 + + # WindGust + def gust_phrase(self): + return { + "setUpMethod": self.gust_setUp, + "wordMethod": self.gust_words, + "phraseMethods": self.standard_phraseMethods(), + } + def gust_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("WindGust", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def gust_words(self, tree, node): + statDict = node.getStatDict() + stats = self.getStats(statDict, "WindGust") + if stats is None: + return self.setWords(node, "") + gustValue = self.getValue(stats, "Max") + threshold = self.nlValue(self.null_nlValue(tree, node, "WindGust", "WindGust"), gustValue) + if gustValue < threshold: + return self.setWords(node, "null") + # Check WindGust against Wind + maxWind, dir = tree.stats.get("Wind", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Max") + windDifference = self.nlValue(self.gust_wind_difference_nlValue(tree, node), maxWind) + if gustValue - maxWind <= windDifference: + return self.setWords(node, "null") + outUnits = self.element_outUnits(tree, node, "WindGust", "WindGust") + units = self.units_descriptor(tree, node, "units", outUnits) + words = repr(int(gustValue)) + " " + units + return self.setWords(node, words) + + + #--------------------------------------------------------------------------- + # Tropical Phrasing - Updated for OB9.5 + #--------------------------------------------------------------------------- + + def windSpdProb_thresholds(self, tree, node): + return [ + ((45.0, 80.0), (25.0, 60.0)), # Per 1 + (35.0, 20.0), # Per 2 + (30.0, 15.0), # Per 3 + (25.0, 12.5), # Per 4 + (22.5, 10.0), # Per 5 + (20.0, 8.0), # Per 6 + (17.5, 7.0), # Per 7 + (15.0, 6.0), # Per 8 + (12.5, 5.0), # Per 9 + (10.0, 4.0), # Per 10 + ] + + def firstComponentPeriod(self, tree, node): + # Define forecast period number for first component of this product. + # This is for greater flexibility in production of a tropical SAF + # valid values 1-14 + return 1 + + def includeOnlyPhrases_list(self, tree, component): + """ + Used for Tropical phrases. + Determines which phrases to keep in each period of the product. + """ + # Return list of phrases to include in the component + # Return an empty list if all phrases should be included + try: + includeTropical = self._includeTropical + except: + includeTropical = False + if not includeTropical: + return [] + + # See which period we are in + compPeriod = int(component.getIndex() + self.firstComponentPeriod(tree, component)) + self.debug_print("Working in Period %d" % (compPeriod), 1) + + # See which list of periods we may want to modify + if self._pil.find("ZFP") == 0: + productType = "ZFP" + includeSomeList = [1] + else: + productType = "CWF" + includeSomeList = [6,7,8,9,10] + + # If this is not one of the periods we might want to remove phrases, + # then return + if compPeriod not in includeSomeList: + # Ensure all phrases are used + return [] + + # Grab thresholds for this period - handle the first period case + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + if compPeriod == 1: + (thresh34low, thresh34high) = windSpdProb_thresholds[0][0] + (thresh64low, thresh64high) = windSpdProb_thresholds[0][1] + + # Display thresholds so we know what we're using + self.debug_print("34 kt thresholds = (%.2f, %.2f)" % + (thresh34low, thresh34high), 1) + self.debug_print("64 kt thresholds = (%.2f, %.2f)" % + (thresh64low, thresh64high), 1) + + # Otherwise, handle all other periods + else: + index = int(component.getIndex()) + (thresh34, thresh64) = windSpdProb_thresholds[index] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + # Get some information about this forecast period + dayNight = self.getPeriod(component.getTimeRange(), 1) + timeRange = component.getTimeRange() + areaLabel = component.getAreaLabel() + self.debug_print("dayNight = %s\ttimeRange = %s" % (dayNight, + repr(timeRange)), 1) + + # Get pws64 + if dayNight == 1: + pws64 = tree.stats.get("pwsD64", timeRange, areaLabel, mergeMethod="Max") + self.debug_print("USING pwsD64", 1) + else: + pws64 = tree.stats.get("pwsN64", timeRange, areaLabel, mergeMethod="Max") + self.debug_print("USING pwsN64", 1) + + self.debug_print("PWS64 = %s" % (pws64), 1) + + if pws64 is None: + return [] + + + # Get pws34 + if dayNight == 1: + pws34 = tree.stats.get("pwsD34", timeRange, areaLabel, mergeMethod="Max") + self.debug_print("USING pwsD34", 1) + else: + pws34 = tree.stats.get("pwsN34", timeRange, areaLabel, mergeMethod="Max") + self.debug_print("USING pwsN34", 1) + + self.debug_print("PWS34 = %s" % (pws34), 1) + + if pws34 is None: + return [] + + # COMMENT: Get the stored wind stats from the component level. + # IF WE WERE TO LIMIT ELEMENTS IN THE ZFP BEYOND PERIOD 5, + # THE WIND STAT LABEL ABOVE WOULD ALSO BE NEEDED. + + maxMagList = component.get("maxMagList") + if maxMagList is None: + return self.setWords(component, "") + + self.debug_print("maxMag from includeOnlyPhrases_list: %s " % (maxMagList), 1) + print("maxMagList from includeOnlyPhrases_list: ", maxMagList) + + maxMag = 0.0 + for mag in maxMagList: + if mag > maxMag: + maxMag = mag + + ## maxMag, dir = wind + if productType == "ZFP": + maxMag = maxMag*0.868976242 + self.debug_print("maxMag in includeOnlyPhrases_list: %s " % (maxMag), 1) + print("maxMag in includeOnlyPhrases_list: ", maxMag) + + if maxMag is None: + maxMag = 0.0 + + # Retrieve the headlineKeys stored at the component level + headlineKeys = component.get("headlineKeys") + if headlineKeys is None: + headlineKeys = [] + + # If this is the first period, and in the list of periods we might + # want to modify + if productType == "ZFP": + if compPeriod == 1 and compPeriod in includeSomeList: + if "HU.W" in headlineKeys or "HI.W" in headlineKeys: + if pws64 >= thresh64high and maxMag >= 64.0: + # Limit the phrases we'll report + return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] + elif pws64 >= thresh64low and maxMag >= 50.0: + # Keep all phrases + return [] + elif pws34 >= thresh34high and maxMag >= 34.0: + # Limit the phrases we'll report + return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] + + elif "TR.W" in headlineKeys or "TI.W" in headlineKeys: + if pws34 >= thresh34high and maxMag >= 34.0: + # Limit the phrases we'll report + return ["pws_phrase", "wind_withGusts_phrase", "weather_phrase"] + + else: + return [] # keep all phrases + + # If this period is beyond the fifth period, and in the list of + # periods we might want to modify + else: + if compPeriod >= 6 and compPeriod in includeSomeList: + if ((pws34 >= thresh34 or pws34+2.5 >= thresh34) and maxMag >= 20.0) \ + or ((pws64 >= thresh64 or pws64+1.0 >= thresh64) and maxMag >= 20.0) \ + or maxMag >= 34.0: + # Limit the phrases we'll report + return ["pws_phrase", "weather_phrase"] + else: + # Return all phrases + return [] + + + #--------------------------------------------------------------------------- + # Probabilistic Wind Phrase + #--------------------------------------------------------------------------- + + def pws_phrase(self): + """ + Added to produce the tropical probabilistic wind phrase. + """ + return { + "setUpMethod": self.pws_setUp, + "wordMethod": self.pws_words, + "phraseMethods": [ + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + ], + } + + def pws_setUp(self, tree, node): + """ + Setup method for the tropical probabilistic wind phrase. + """ + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def pws_words(self, tree, node): + """ + Words method for the tropical probabilistic wind phrase. + """ + # Get Wind + self.debug_print("\nBegin period***********", 1) + self.debug_print("\nNode time range -> %s" % + (repr(node.getTimeRange())), 1) + self.debug_print("Parent time range -> %s" % + (repr(node.parent.getTimeRange())), 1) + + # Get name and index of this node's component + component = node.getComponent() + compIndex = node.getComponent().getIndex() + compPeriod = int(compIndex + self.firstComponentPeriod(tree, node)) + print("COMPONENT IN pws_words", compPeriod) + componentName = node.getComponentName() + + if self._pil.find("ZFP") == 0: + productType = "ZFP" + else: + productType = "CWF" + + # COMMENT: If this is one of the first 5 periods of the ZFP, or this is the CWF + if not productType == "ZFP" or compPeriod <= 5: + print("I AM IN: ", node.getTimeRange()) + #!!! Wait for wind phrase to complete + # We're assuming that all the wind phrases have completed (including + # local effect phrases) if one has. + if productType == "ZFP": + phraseList = ["wind_withGusts_phrase"] + else: + phraseList = ["marine_wind_withGusts_phrase"] + windWords = self.findWords(tree, node, "Wind", phraseList = phraseList) + self.debug_print("windWords = '%s'" % (windWords), 1) + # Wait for Wind phrase + if windWords is None: + return + + # Get the stored wind stats from the component level + maxMagList = component.get("maxMagList") + if maxMagList is None: + return self.setWords(node, "") + + self.debug_print("MaxMagList from pws_words %s %s" % (maxMagList, + repr(node.getTimeRange())), 1) + # print "MaxMagList from pws_words", maxMagList, node.getTimeRange() + maxMag = 0.0 + for mag in maxMagList: + if mag > maxMag: + maxMag = mag + + if productType == "ZFP": + # print "PWS MAXMAG in MPH IS: ", maxMag + maxMag = maxMag*0.868976242 + # print "PWS MAXMAG IN KNOTS: ", maxMag + # + # COMMENT: Othwerwise Periods 6 and beyond in the ZFP. + # Although wind phrases are not included in extended ZFP you + # still need to do the analysis so tropical cyclone formatter + # logic can be carried out through the extended (day 5) periods. + # + else: + print("I AM IN: ", node.getTimeRange()) + windStats = tree.stats.get( + "Wind", node.getTimeRange(), node.getAreaLabel(), + statLabel="vectorModeratedMinMax", mergeMethod="Max") + ## print "WINDSTATS", windStats + if windStats is None: + return self.setWords(node, "") + maxMag, dir = windStats + maxMag = maxMag*0.868976242 + + # Display maximum wind speed in MPH and KTS + self.debug_print("PWS MAXMAG in MPH IS: %s" % (maxMag), 1) + self.debug_print("PWS MAXMAG in KTS IS: %s" % (maxMag), 1) + + dayNight = self.getPeriod(node.getTimeRange(), 1) + self.debug_print("dayNight IS %s" % (dayNight), 1) + + # See which grids to use for probability of 34 and 64 kts + if dayNight == 1: + prob34 = "pwsD34" + prob64 = "pwsD64" + else: + prob34 = "pwsN34" + prob64 = "pwsN64" + self.debug_print("USING pws34 = "+prob34, 1) + self.debug_print("USING pws64 = "+prob64, 1) + pws64 = tree.stats.get(prob64, node.getTimeRange(), + node.getAreaLabel(), mergeMethod="Max") + if pws64 is None: + self.debug_print("pws64 NONE", 1) + return self.setWords(node, "") + pws34 = tree.stats.get(prob34, node.getTimeRange(), + node.getAreaLabel(), mergeMethod="Max") + if pws34 is None: + self.debug_print("pws34 NONE", 1) + return self.setWords(node, "") + + #print "check ", "check" + #################################################################### + #print "WORDS1", words + words = "" + areaLabel = tree.getAreaLabel() + print("\nBegin period***********", node.getTimeRange()) + self.debug_print("\nNode time range -> %s" % + (repr(node.getTimeRange())), 1) + self.debug_print("Parent time range -> %s" % + (repr(node.parent.getTimeRange())), 1) + self.debug_print("MAXMAG IS -> %s KTS" % (maxMag), 1) + self.debug_print("\nNode time and label -> %s %s" % + (repr(node.getTimeRange()), + repr(node.getAreaLabel())), 1) + #tree.stats.printDictionary("Hazards") + # Get Hazards + headlines = tree.stats.get("Hazards", node.getTimeRange(), + areaLabel, mergeMethod = "List") + + self.debug_print("maxMag = %s" % (maxMag), 1) + self.debug_print("warningpws64 = %s" % (pws64), 1) + self.debug_print("warningpws34 = %s" % (pws34), 1) + self.debug_print("Headline stats for warning -> %s" % + (repr(headlines)), 1) + print("maxMag = ", maxMag) + print("warningpws64 = ", pws64) + print("warningpws34 = ", pws34) + print("Headline stats for warning ", headlines) + + if headlines is not None: + # Sort the headlines by startTime + temp = [] + for h, tr in headlines: + temp.append((tr.startTime(), (h, tr))) + temp.sort() + newList = [] + for t in temp: + newList.append(t[1]) + headlines = newList + + # Fetch the set of local headlines allowed for this product + allowedHazards = [] + for key, allActions, cat in self.allowedHazards(): + allowedHazards.append(key) + + # Create a list of headline keys as strings e.g. HU.A + headlineKeys = [] + for key, tr in headlines: # value == list of subkeys + if key not in allowedHazards: + continue + # Don't call headlinesTimeRange_descriptor function due to + # an exception which is caused - DR19483 + #timeDescriptor = self.headlinesTimeRange_descriptor( + # tree, node, key, tr, areaLabel, issuanceTime) + if key == "": + continue + if key not in headlineKeys: + headlineKeys.append(key) + self.debug_print("key: %s" % (key), 1) + + self.debug_print("headlineKeys: %s" % (repr(headlineKeys)), 1) + words = self.getTropicalDescription( + tree, node, headlineKeys, maxMag, pws64, pws34) + # Store the headlineKeys at the component node for later examination + component = node.getComponent() + component.set("headlineKeys", headlineKeys) + + elif headlines is None or headlines is NoData: + words = words + self.getTropicalDescription( + tree, node, "", maxMag, pws64, pws34) + + # COMMENT: If we have words from the pws_phrase during tropical cyclones + # the following lines of code will make sure wind_summary is + # not printed out. + if words is not None and len(words.strip()) > 0: + # Remove the wind sumamry phrase from this component and any local + # effect areas - no need to replace undesirable phrases later on + self.removeComponentPhrases(tree, node, "wind_summary", + areaLabels=[node.getAreaLabel(), + node.getComponent().getAreaLabel() + ]) + self.debug_print("\nSetting words '%s' for %s" % + (words, node.getAncestor('name')), 1) + self.debug_print("%s %s\n" % (node.getComponentName(), + repr(node.getTimeRange())), 1) + return self.setWords(node, words) + + def getTropicalDescription(self, tree, node, headlineKeys, maxMag, pws64, + pws34): + """ + Determines which tropical descriptions to use for current period. + """ + self.debug_print("\tgetTropicalDescription") + # Get some information about the component of this node + compName = node.getComponentName() + compIndex = node.getComponent().getIndex() + # Convert convert component index to a forecast period number + compPeriod = int(compIndex + self.firstComponentPeriod(tree, node)) + self.debug_print("-"*80, 1) + self.debug_print("Component name = %s" % (compName) + + "\tForecast Period = %d" % (compPeriod) + + "\tmaxMag = %s" % (maxMag), 1) + descMethod = None + words = "" + # If this is one of the first 4 periods of the forecast + if compPeriod <= 4: + exec("descMethod = self.getPeriod_%d_Desc" % (compPeriod)) + # Otherwise, If this is one of the fifth to ninth forecast periods + elif 5 <= compPeriod <= 9: + descMethod = self.getPeriod_5_9_Desc + # Otherwise + else: + descMethod = self.getPeriod_10_14_Desc + + # Ensure the tropical boolean variables are set using current + # set of headlines + self.tropicalBooleanConditions(headlineKeys) + + # Get the description from this method + if descMethod is not None: + desc = descMethod(tree, node, maxMag, pws64, pws34) + # If we found the description - prepare it to be returned + if desc != "": + words = " " + self.phrase_descriptor(tree, node, desc, desc) + return words + + def tropicalBooleanConditions(self, headlineKeys): + ''' + This method sets the conditions needed by subsequent methods + (getPeriodX_Desc) to form the appropriate wording: + Each entry in the conditionsList is + the condition name (e.g. self._Hurricane_W) and + the Hazard keys whose presence will trigger the condition. + Note that we only need four conditions to cover all the + logic for generating wording. + + ''' + conditionList = [ + ("Hurricane_W", ["HU.W", "HI.W"]), + ("Hurricane_A", ["HU.A", "HI.A"]), + ("TropStorm_W", ["TR.W", "TI.W"]), + ("TropStorm_A", ["TR.A", "TI.A"]), + ] + + for varName, hazardList in conditionList: + found = False + for key in hazardList: + if key in headlineKeys: + found = True + break + exec("self._"+varName+"= found") + + +## def tropicalBooleanConditions(self, headlineKeys): +## """ +## Sets various boolean variables used by the pws_phrase logic based +## upon contents of current headlines. +## """ +## self.debug_print("\ttropicalBooleanConditions") +## # COMMENT: All boolean variables are defined globally within the tropical +## # formatter, so there is nothing to 'return' +## # These are all the tropical headline combinations accounted for. +## conditionLists = [ +## ["HI.W"], +## ["HI.A"], +## ["HU.W"], +## ['HU.A'], +## ["TI.W"], +## ["TI.A"], +## ["TR.W"], +## ["TR.A"], +## ["HU.A", "TR.W"], +## ["HI.A", "TI.W"], +## ["HU.W", "TI.W"], +## ["HI.W", "TI.W"], +## ["HI.W", "TR.W"], +## ["HI.A", "TR.A"], +## ["HU.A", "TI.A"], +## ["HI.A", "TI.A"], +## ["TI.W", "TR.A"], +## ["TI.W", "HU.A"], +## ["HI.W", "TR.A"], +## ["HI.W", "HU.A"], +## ["HI.W", "HU.W"], +## ["HI.A", "HU.A"], +## ["TI.A", "TR.A"], +## ["TI.W", "TR.W"], +## ["HI.A", "TI.W", "TR.W"], +## ["HI.A","TI.W","HU.A","TR.W"], +## ["HI.A", "TI.W", "HU.A", "TR.W"], +## ] + +## kLen = len(headlineKeys) +## for keyList in conditionLists: +## conditionName = "" +## klLen = len(keyList) +## if kLen == klLen: cond = True +## else: cond = False +## for keyStr in keyList: +## conditionName = conditionName + "_" + keyStr +## if kLen == klLen: +## if keyStr not in headlineKeys: +## cond = False +## conditionName = conditionName.replace(".", "_") +## exec "self." + conditionName + "= cond" + +## self.debug_print("HU_A_TR_W %s" % (self._HU_A_TR_W), 1) +## self.debug_print("HI_A_TI_W %s" % (self._HI_A_TI_W), 1) +## self.debug_print("HI_A_TI_W_HU_A_TR_W %s" % (self._HI_A_TI_W_HU_A_TR_W), 1) +## self.debug_print("HI_A_TI_W_TR_W %s" % (self._HI_A_TI_W_TR_W), 1) +## self.debug_print("HI_A_TI_W_HU_A_TR_W %s" % (self._HI_A_TI_W_HU_A_TR_W), 1) +## self.debug_print("HU_W_TI_W %s" % (self._HU_W_TI_W), 1) +## self.debug_print("HI_W_TI_W %s" % (self._HI_W_TI_W), 1) +## self.debug_print("HI_W_TR_W %s" % (self._HI_W_TR_W), 1) +## self.debug_print("HI_A_TR_A %s" % (self._HI_A_TR_A), 1) +## self.debug_print("HU_A_TI_A %s" % (self._HU_A_TI_A), 1) +## self.debug_print("HI_A_TI_A %s" % (self._HI_A_TI_A), 1) +## self.debug_print("TI_W_TR_A %s" % (self._TI_W_TR_A), 1) +## self.debug_print("TI_W_HU_A %s" % (self._TI_W_HU_A), 1) +## self.debug_print("HI_W_TR_A %s" % (self._HI_W_TR_A), 1) +## self.debug_print("HI_W_HU_A %s" % (self._HI_W_HU_A), 1) +## self.debug_print("HI_W %s" % (self._HI_W), 1) +## self.debug_print("HI_A %s" % (self._HI_A), 1) +## self.debug_print("HU_W %s" % (self._HU_W), 1) +## self.debug_print("HU_A %s" % (self._HU_A), 1) +## self.debug_print("HI_W_HU_W %s" % (self._HI_W_HU_W), 1) +## self.debug_print("HI_A_HU_A %s" % (self._HI_A_HU_A), 1) +## self.debug_print("TI_W %s" % (self._TI_W), 1) +## self.debug_print("TI_A %s" % (self._TI_A), 1) +## self.debug_print("TR_W %s" % (self._TR_W), 1) +## self.debug_print("TR_A %s" % (self._TR_A), 1) +## self.debug_print("TI_A_TR_A %s" % (self._TI_A_TR_A), 1) +## self.debug_print("TI_W_TR_W %s" % (self._TI_W_TR_W), 1) + +# +# COMMENT: getPeriod_#_ definitions below contain the guts of the tropical +# cyclone formatter logic used to determine pws phrases or expressions of +# uncertainty. +# + def getPeriod_1_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a first period forecast. + """ + self.debug_print("\tgetPeriod_1_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period - special case 2 for each + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34low, thresh34high) = windSpdProb_thresholds[0][0] + (thresh64low, thresh64high) = windSpdProb_thresholds[0][1] + + # Display thresholds so we know what we're using + self.debug_print("34 kt thresholds = (%.2f, %.2f)" % + (thresh34low, thresh34high), 1) + self.debug_print("64 kt thresholds = (%.2f, %.2f)" % + (thresh64low, thresh64high), 1) + + if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: + #if (self._Hurricane_W or self._Hurricane_A) and (self._TropStorm_W or self._TropStorm_A): + if maxMag >= 34.0: + if pws34 >= thresh34high: + desc = "iminTSposHR" + else: + desc = "expTSposHR" + elif pws34 >= thresh34low and maxMag >= 25.0: + desc = "expTSposHR" + elif pws64 >= thresh64low: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse1!!! %s" % (maxMag)) + + elif self._Hurricane_W or self._Hurricane_A: + + if maxMag >= 64.0: + if pws64 >= thresh64high: + desc = "iminHR" + else: + desc = "expHR" + elif pws64 >= thresh64low and maxMag >= 50.0: + desc = "expHR" + elif maxMag >= 34.0: + if pws34 >= thresh34high: + desc = "iminTSposHR" + else: + desc = "expTSposHR" + elif pws34 >= thresh34low and maxMag >= 25.0: + desc = "expTSposHR" + elif pws64 >= thresh64low: + desc = "posHR" + elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: + desc = "posTSbcmgposHR" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse2!!! %s" % (maxMag)) + + elif self._TropStorm_W or self._TropStorm_A: + if maxMag >= 34.0: + if pws34 >= thresh34high: + desc = "iminTS" + else: + desc = "expTS" + elif pws34 >= thresh34low and maxMag >= 25.0: + desc = "expTS" + elif pws64 >= thresh64low: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34low or pws34+10.0 >= thresh34low or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse3!!! %s" % (maxMag)) + + else: + print("check.......... ", "check") + if maxMag >= 64.0: + desc = "posHR" + elif maxMag >= 34.0: + desc = "posTS" + elif pws64 >= thresh64low or pws64 +5.0 >= thresh64low: + desc = "posHR" + elif pws34 >= thresh34low or pws34+10.0 >= thresh34low: + desc = "posTS" + else: + desc = "" + + return desc + + + def getPeriod_2_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a second period forecast. + """ + self.debug_print("\tgetPeriod_2_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34, thresh64) = windSpdProb_thresholds[1] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: + #if (self._Hurricane_W or self._Hurricane_A) and (self._TropStorm_W or self._TropStorm_A): + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse1!!! %s" % (maxMag)) + + elif self._Hurricane_W or self._Hurricane_A: + if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): + desc = "expHR" + elif maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posHR" + elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: + desc = "posTSbcmgposHR" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse2!!! %s" % (maxMag)) + + elif self._TropStorm_W or self._TropStorm_A: + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTS" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+10.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" # or regular phrasing + self.debug_print("ifelse3!!! %s" % (maxMag)) + + else: + # print "check.......... ", "check" + if maxMag >= 64.0: + desc = "posHR" + elif maxMag >= 34.0: + desc = "posTS" + elif pws64 >= thresh64 or pws64 +5.0 >= thresh64: + desc = "posHR" + elif pws34 >= thresh34 or pws34+10.0 >= thresh34: + desc = "posTS" + else: + desc = "" + + return desc + + + def getPeriod_3_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a third period forecast. + """ + self.debug_print("\tgetPeriod_3_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34, thresh64) = windSpdProb_thresholds[2] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: + + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse1!!! %s" % (maxMag)) + + elif self._Hurricane_W: + + if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): + desc = "expHR" + elif maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTSbcmgposHR" + else: + desc = "" + self.debug_print("ifelse2!!! %s" % (maxMag)) + + elif self._TropStorm_W: + + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTS" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse3!!! %s" % (maxMag)) + + elif self._Hurricane_A: + + if maxMag >= 50.0 or pws64 >= thresh64: + desc = "posHR" + elif maxMag >= 25.0 or pws34 >= thresh34 or pws34+5.0 >= thresh34: + desc = "posTSbcmgposHR" + else: + desc = "" + self.debug_print("ifelse4!!! %s" % (maxMag)) + + elif self._TropStorm_A: + + if maxMag >= 34.0: + if pws64 >= thresh64: + desc = "posTSbcmgposHR" + else: + desc = "posTS" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse5!!! %s" % (maxMag)) + + else: + self.debug_print("HERE I AM") + if pws64 >= thresh64 or pws64+2.5 >= thresh64: + desc = "posHR" + elif maxMag >= 64.0: + desc = "posHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34: + desc = "posTS" + elif maxMag >= 34.0: + desc = "posTS" + else: + desc = "" + + return desc + + + def getPeriod_4_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a fourth period forecast. + """ + self.debug_print("\tgetPeriod_4_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34, thresh64) = windSpdProb_thresholds[3] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + if self._Hurricane_A and self._TropStorm_W and not self._Hurricane_W: + + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse1!!! %s" % (maxMag)) + + elif self._Hurricane_W: + + if maxMag >= 64.0 or (pws64 >= thresh64 and maxMag >= 50.0): + desc = "expHR" + elif maxMag >= 34 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTSposHR" + elif pws64 >= thresh64: + desc = "posHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTSbcmgposHR" + else: + desc = "" + self.debug_print("ifelse2!!! %s" % (maxMag)) + + elif self._TropStorm_W: + + if maxMag >= 34.0 or (pws34 >= thresh34 and maxMag >= 25.0): + desc = "expTS" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse3!!! %s" % (maxMag)) + + elif self._Hurricane_A: + + if maxMag >= 50.0 or pws64 >= thresh64: + desc = "posHR" + elif maxMag >= 25.0 or pws34 >= thresh34 or pws34+5.0 >= thresh34: + desc = "posTSbcmgposHR" + else: + desc = "" + self.debug_print("ifelse4!!! %s" % (maxMag)) + + elif self._TropStorm_A: + + if maxMag >= 34.0: + if pws64 >= thresh64: + desc = "posTSbcmgposHR" + else: + desc = "posTS" + elif pws64 >= thresh64: + desc = "posTSbcmgposHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34 or maxMag >= 25.0: + desc = "posTS" + else: + desc = "" + self.debug_print("ifelse5!!! %s" % (maxMag)) + + else: + self.debug_print("HERE I AM") + if pws64 >= thresh64 or pws64+2.5 >= thresh64: + desc = "posHR" + elif maxMag >= 64.0: + desc = "posHR" + elif pws34 >= thresh34 or pws34+5.0 >= thresh34: + desc = "posTS" + elif maxMag >= 34.0: + desc = "posTS" + else: + desc = "" + + return desc + + + def getPeriod_5_9_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a fifth to ninth period forecast. + """ + self.debug_print("\tgetPeriod_5_9_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34, thresh64) = \ + windSpdProb_thresholds[node.getComponent().getIndex()] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + if (pws64 >= thresh64 or (pws64 + 1.0) >= thresh64): + desc = "posHR" + elif maxMag >= 64.0: + desc = "posHR" + elif (self._Hurricane_A or self._Hurricane_W) and maxMag >= 50: + desc = "posHR" + elif (pws34 >= thresh34 or (pws34 + 2.5) >= thresh34): + desc = "posTS" + elif maxMag >= 34.0: + desc = "posTS" + elif (self._Hurricane_A or self._Hurricane_W or self._TropStorm_A or self._TropStorm_W) and maxMag >= 25: + desc = "posTS" + else: + desc = "" + + return desc + + + def getPeriod_10_14_Desc(self, tree, node, maxMag, pws64, pws34): + """ + Determines contents of PWS phrase for a fourth period forecast. + """ + self.debug_print("\tgetPeriod_4_Desc") + + desc = "" + self.debug_print("Period time range = %s" % + (repr(node.getComponent().getTimeRange())), 1) + self.debug_print("PWS34_wrng = %s" % (pws34), 1) + self.debug_print("PWS64_wrng = %s" % (pws64), 1) + + # Grab thresholds for this period + component = node.getComponent() + windSpdProb_thresholds = self.windSpdProb_thresholds(tree, component) + (thresh34, thresh64) = windSpdProb_thresholds[9] + + # Display thresholds so we know what we're using + self.debug_print("(34 kt threshold, 64 kt threshold) = (%.2f, %.2f)" % + (thresh34, thresh64), 1) + + if (pws64 >= thresh64 or (pws64 + 1.0) >= thresh64): + desc = "posHR" + elif maxMag >= 64.0: + desc = "posHR" + elif (self._Hurricane_A or self._Hurricane_W) and maxMag >= 50: + desc = "posHR" + elif (pws34 >= thresh34 or (pws34 + 2.5) >= thresh34): + desc = "posTS" + elif maxMag >= 34.0: + desc = "posTS" + elif (self._Hurricane_A or self._Hurricane_W or self._TropStorm_A or self._TropStorm_W) and maxMag >= 25: + desc = "posTS" + else: + desc = "" + + return desc + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/WxPhrases.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/WxPhrases.py index 498ef16c3d..d0f95c309a 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/WxPhrases.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/textUtilities/regular/WxPhrases.py @@ -1,1940 +1,1940 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# WxPhrases.py -# Methods for producing text forecast from SampleAnalysis statistics. -# -# Author: hansen -# ---------------------------------------------------------------------------- -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ----------------------------- -# 11/28/2016 5749 randerso Changed calls to addTextList -# to use default parameters -# -## - -## -# This is a base file that is not intended to be overridden. -## - -import PhraseBuilder -import types, re -import TimeRange -import logging - -class WxPhrases(PhraseBuilder.PhraseBuilder): - def __init__(self): - PhraseBuilder.PhraseBuilder.__init__(self) - self.log = logging.getLogger("FormatterRunner.WxPhrases.WxPhrases") - - ############################################ - ### WEATHER PHRASES - def standard_weather_phraseMethods(self): - return [ - self.preProcessWx, - self.separateNonPrecip, - self.consolidateVisibility, - self.checkLocalEffects, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.checkResolution, - self.assembleSubPhrases, - self.postProcessPhrase, - ] - - ### WX - def pop_wx_lower_threshold(self, tree, node): - # Pop-related Wx will not be reported if Pop is below this threshold - return self.popThreshold(tree, node, self._hoursSChcEnds, 15.0, 25.0) - - def pop_related_flag(self, tree, node, subkey): - # These are weather subkeys that are related to PoP and - # should be not be reported if pop is low - wxType = subkey.wxType() - if wxType in ["ZR","R","RW","S","SW", "T", "IP"]: - if wxType == "SW" or wxType == "RW": - if subkey.intensity() == "--": - return 0 - if wxType == "T" and "Dry" in subkey.attributes(): - return 0 - return 1 - else: - return 0 - - def precip_related_flag(self, tree, node, subkey): - # These are weather types that are precip versus non-precip - # and could be separated into different phrases from the - # non-precip weather types. - if subkey.wxType() in ["ZR", "R", "RW", "S", "SW", "T", "ZL", "L", "IP"]: - return 1 - else: - return 0 - - def filter_subkeys_flag(self): - # Filtering and condensing of weather subkeys. - # If you do not want subkeys to be condensed and filtered, override this - # variable and set it to 0. - return 1 - - def wxHierarchies(self): - # This is the hierarchy of which coverage and intensity to choose if - # wxTypes are the same and to be combined into one subkey. - return { - "wxType":["WP", "R", "RW", "T", "L", "ZR", "ZL", "S", "SW", - "IP", "F", "ZF", "IF", "IC", "H", "BS", "BN", "K", "BD", - "FR", "ZY", "BA", "",""], - "coverage":["Def","Wide","Brf","Frq", "Ocnl","Pds", "Inter", - "Lkly","Num","Sct","Chc","Areas", - "SChc","WSct","Iso","Patchy","",""], - "intensity":["+","m","-","--","",""], - "visibility":["0SM", "1/4SM", "1/2SM", "3/4SM", "1SM", "11/2SM", "2SM", - "21/2SM", "3SM", "4SM", "5SM", "6SM", "P6SM", "",""], - } - - def similarCoverageLists(self, tree, node, subkey1, subkey2): - # Lists of coverages that should be combined or considered equal. - # Each list should be ordered from weaker to stronger - # and the stronger coverage will be kept when subkeys are - # combined. - # - # These lists are examined when combining sub-phrases and when - # determining if there is a local effect to report. - # Called by PhraseBuilder:checkWeatherSimilarity - # - return [ - ['SChc', 'Iso'], - ['Chc', 'Sct'], - ['Lkly', 'Num'], - ['Brf', 'Frq', 'Ocnl', 'Pds', 'Inter', 'Def', 'Wide'], - ] - - def wxCombinations(self): - # This is the list of which wxTypes should be combined into one - # WITHIN a sub-phrase. - # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will - # be combined into one key and the key with the dominant coverage will - # be used as the combined key. - # You may also specify a method which will be - # -- given arguments subkey1 and subkey2 and - # -- should return - # -- a flag = 1 if they are to be combined, 0 otherwise - # -- the combined key to be used - # Note: The method will be called twice, once with (subkey1, subkey2) - # and once with (subkey2, subkey1) so you can assume one ordering. - # See the example below, "combine_T_RW" - # - return [ - ("RW", "R"), - ("SW", "S"), - self.combine_T_RW, - ] - - def combine_T_RW(self, subkey1, subkey2): - # Combine T and RW only if the coverage of T - # is dominant over OR equal to the coverage of RW and - # RW does not have + intensity - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - if wxType1 == "T" and wxType2 == "RW": - if subkey2.intensity() != "+": - order = self.dominantCoverageOrder(subkey1, subkey2) - if order == -1 or order == 0: - return 1, subkey1 - return 0, None - - # Customizing Weather Phrases - def wxCoverageDescriptors(self): - # This is the list of coverages, wxTypes, intensities, attributes for which special - # weather coverage wording is desired. Wildcards (*) can be used to match any value. - # If a weather subkey is not found in this list, default wording - # will be used from the Weather Definition in the server. - # The format of each tuple is: - # (coverage, wxType, intensity, attribute, descriptor) - # For example: - #return [ - # ("Chc", "*", "*", "*", "a chance of"), - # ] - # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments - return [] - - def wxTypeDescriptors(self): - # This is the list of coverages, wxTypes, intensities, attributes for which special - # weather type wording is desired. Wildcards (*) can be used to match any value. - # If a weather subkey is not found in this list, default wording - # will be used from the Weather Definition in the server. - # The format of each tuple is: - # (coverage, wxType, intensity, attribute, descriptor) - # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments - return [ - ("*", "SW", "--", "*", "flurries"), - ("*", "RW", "*", "*", self.rainShowersDescriptor), - ("*", "T", "*", "Dry", "dry thunderstorms"), - ] - - def rainShowersDescriptor(self, tree, node, subkey): - if subkey.intensity() == "--": - return "sprinkles" - if tree is None: - return "showers" - t = tree.stats.get( - "T", node.getTimeRange(), - node.getAreaLabel(), statLabel="minMax", - mergeMethod="Min") - if t is None: - return "showers" - if t < 60: - return "rain showers" - else: - return "showers" - - def wxIntensityDescriptors(self): - # This is the list of coverages, wxTypes, intensities, attribute for which special - # weather intensity wording is desired. Wildcards (*) can be used to match any value. - # If a weather subkey is not found in this list, default wording - # will be used from the Weather Definition in the server. - # The format of each tuple is: - # (coverage, wxType, intensity, attribute, descriptor) - # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments - return [ - ("*", "RW", "--", "*", ""), - ("*", "RW", "-", "*", ""), - ("*", "R", "--", "*", "light"), - ("*", "R", "-", "*", ""), - ("*", "R", "+", "*", ""), - ("*", "RW", "+", "*", ""), - ("*", "SW", "--", "*", ""), - ("*", "SW", "-", "*", ""), - ("*", "SW", "+", "*", ""), - ("*", "S", "--", "*", "very light"), - ("*", "S", "-", "*", ""), - ("*", "S", "+", "*", ""), - ("*", "T", "+", "*", ""), - ("*", "ZR", "--", "*", "light"), - ("*", "ZR", "+", "*", ""), - ("*", "L", "*", "*", ""), - ("*", "F", "+", "*", "dense"), - ("*", "IP", "+", "*", ""), - ] - - def wxAttributeDescriptors(self): - # This is the list of coverages, wxTypes, intensities, attributes, for which special - # weather attribute wording is desired. Wildcards (*) can be used to match any value. - # If a weather subkey is not found in this list, default wording - # will be used from the Weather Definition in the server. - # The format of each tuple is: - # (coverage, wxType, intensity, attribute, descriptor) - # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments - return [ - ("*", "T", "*", "HvyRn", ""), - ("*", "T", "*", "Dry", ""), - ("*", "T", "*", "GW", ""), - ("*", "T", "*", "DmgW", ""), - ("*", "T", "*", "FL", ""), - ("*", "T", "*", "LgA", ""), - ("*", "T", "*", "SmA", ""), - ("*", "T", "*", "TOR", ""), - ] - - def weather_phrase(self): - return { - "setUpMethod": self.weather_setUp, - "wordMethod": self.weather_words, - "phraseMethods": self.standard_weather_phraseMethods() - } - def weather_setUp(self, tree, node): - resolution = node.get("resolution") - if resolution is not None: - mergeMethod = "Average" - else: - mergeMethod = "List" - elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, - resolution) - node.set("allTimeDescriptors", 1) - if self.areal_sky_flag(tree, node): - self.disableSkyRelatedWx(tree, node) - return self.DONE() - - def preProcessWx(self, tree, phrase): - #print '---------------------------------------------------------------' - #print 'in _preProcessWxStats -> ', phrase.get("name"), phrase.getTimeRange() - #time1 = time.time() - - # Create a new list to hold all our subkeys and timeRanges - newStats = [] - # Create a variable to hold the subphrase methodList - methodList = None - # Look through each subphrase of this phrase - resolution = phrase.get("resolution") - for subPhrase in phrase.childList: - # If we do not have a copy of the subPhrase methodList - make one - if methodList is None: - methodList = subPhrase.get("methodList") - # Get stats for this subphrase - if resolution is not None: - tr = subPhrase.getTimeRange() - rankList = tree.stats.get( - 'Wx', tr, subPhrase.getAreaLabel(), mergeMethod="Average") - statList = [(rankList, tr)] - else: - statList = tree.stats.get( - 'Wx', subPhrase.getTimeRange(),subPhrase.getAreaLabel()) - if statList is None: - return self.DONE() - # Gather the subkeys and ranks by time range - for (rankList, timeRange) in statList: - if rankList is None: - continue - for subkey, rank in rankList: - newStats.append((subkey, rank, timeRange, 0)) - # Remove this node from the tree - #subPhrase.remove() - - # Make a new list to hold combined stats - combinedStats = [] - # Define a variable to track number of changes made to new Wx stats - changes = 1 # We want at least one pass through the stats - # Keep consolidating new statistics until no changes are made - while len(newStats) > 0 and changes > 0: - # No changes made yet this pass - changes = 0 - # If we have already combined Wx statistics - if len(combinedStats) > 0: - # Update the list of Wx stats we need to consolidate - newStats = combinedStats - # Combine as many Wx stats as possible - (changes, combinedStats) = self.combineWxStats( - tree, subPhrase, newStats) - - # Make a new dictionary to filter consolidated stats by time - finalStatDict = {} - finalKeys = [] - # For each combined Wx statistic - for (curKey, curRank, curTR, curCombined_flag) in combinedStats: - # If the dictionary does not have this time range currently - if curTR not in finalStatDict.keys(): - # Add this tuple to the dictionary - finalStatDict[curTR] = [(curKey, curRank)] - finalKeys.append(curTR) - # Otherwise - else: - tempList = finalStatDict[curTR] - tempList.append((curKey, curRank)) - finalStatDict[curTR] = tempList - - #print "\nPhrase :", phrase.get("name") - #print 'finalStatDict = ', finalStatDict - #print "timeRanges", finalKeys - -## for su100bPhrase in phrase.childList: -## print '------------------------------------------------------------' -## print subPhrase.printNode(subPhrase) - - # Make a list to hold all the new subphrases - newSubPhraseList = [] - # Sort consolidated time ranges - #print "\nfinalKeys before sort", finalKeys - finalKeys.sort(self.orderTimeRanges) - #print "\nfinalKeys after sort", finalKeys - # Create new nodes for each of our consolidated subphrases - for timeRange in finalKeys: - # Create a new subphrase for the consolidated Wx types and times - newSubPhrase = tree.makeNode([], methodList, phrase) - statDict = {} - statDict['Wx'] = finalStatDict[timeRange] - newSubPhrase.set("elementName", 'Wx') - newSubPhrase.set("changeFlag", 0) - newSubPhrase.set("parent", phrase) - newSubPhrase.set("statDict", statDict) - newSubPhrase.set("timeRange", timeRange) - newSubPhrase.set("childList", []) - newSubPhrase.set("timeDescFlag", 1) # always include timers - # Keep track of this new node - newSubPhraseList.append(newSubPhrase) - - # Replace the old subphrases with the new subphrases - phrase.set("childList", newSubPhraseList) - # Indicate Wx stats have been preprocessed\ - #print " Time: ", time.time() - time1 - return self.DONE() - - # Define a method to combine Wx stats per my relaxed rules - def combineWxStats(self, tree, subPhrase, statList): - # Define some variables to keep track of combined Wx subkeys - combinedStats = [] - changes = 0 - combinedTypes = [] - - # Look through all our subkeys - for index in range(len(statList)-1): - - # Get the info about this key - (curKey, curRank, curTR, curCombined_flag) = statList[index] - - # Break this subkey into its components - curCov = curKey.coverage() - curType = curKey.wxType() - curInten = curKey.intensity() - curVis = curKey.visibility() - curAttr = curKey.attributes() - - # Shorten the list of keys we need to test - searchStats = statList[index+1:] -## print 'searchStats = ', searchStats - - # See if we can combine this key with any of the other keys - for testIndex in range(len(searchStats)): - - # Get the infor about this key - (testKey, testRank, testTR, testCombined_flag) = searchStats[testIndex] - - # We will only try combining if neighboring time ranges - if curTR.endTime() != testTR.startTime(): - continue - - # See if there is a significant difference between these - # Wx subkeys - # match is zero if there is a difference between the keys. - # is 1 or 2 if they can be combined. - match = self.checkWeatherSimilarity( - tree, subPhrase, [(curKey, curRank)],[(testKey, testRank)], - tr1=curTR, tr2=testTR) - - #print 'combineWxStats %s <=> %s -> %d' % (curKey, testKey, match) - - # If these keys could be combined, and this type has not been - # combined yet - if match > 0 and curType not in combinedTypes: - newSubkey = self.makeAggregateSubkey(curKey, curRank, testKey, testRank) - #print "newSubkey", newSubkey - # Mark both keys in current lists as being combined - statList[index] = (curKey, curRank, curTR, curCombined_flag + 1) - searchStats[testIndex] = (testKey, testTR, - testCombined_flag + 1) - # Make sure the search key is also marked as combined - # in the main stats list - statList[index+testIndex+1] = (testKey, testRank, testTR, - testCombined_flag + 1) - # Make a new time range for this combined key - newTR = TimeRange.TimeRange(curTR.startTime(), testTR.endTime()) - # Take highest rank of the two subkeys to be combined - if curRank > testRank: - newRank = curRank - else: - newRank = testRank - # Add this new subkey to the consolidated stats - combinedStats.append((newSubkey, newRank, newTR, 0)) - # Do not try to combine this Wx subkey any more - changes = changes + 1 - combinedTypes.append(curType) - - # Make sure we did not miss any subkeys that were not combined - for (curKey, curRank, curTR, curCombined_flag) in statList: - - # If this key was not previously combined - if curCombined_flag == 0: - - # Add this key to the consolidated stats - combinedStats.append((curKey, curRank, curTR, curCombined_flag)) - -## print '\n', changes, combinedTypes -## print 'combinedStats = ' -## print combinedStats -## print '**********************************************************' - - # Return number of changes and current combined Wx stats - return (changes, combinedStats) - - def useSimple(self, tree, node, rankList, subkeys): - # Return 1 if we want to use simple weather phrasing - # where the weather subkeys are simply connected by - # a conjunction i.e. do not use "with", "mixedWith", "possiblyMixedWith", - # "withPocketsOf". For example: - # - # Simple wording: - # Chance of rain and snow and slight chance of sleet in the evening. - # Mostly cloudy with chance of rain and a slight chance of thunderstorms. - # - # Complex wording: - # Chance of rain and snow possibly mixed with sleet in the evening. - # Chance of rain with possible thunderstorms in the evening. - # - numSubkeys = len(subkeys) - if numSubkeys <= 2: - return 1 - elif numSubkeys <= 4: - # Check for coverage groupings - # If there are only 1 or 2 coverage groupings, use simple wording - covList = [] - for subkey in subkeys: - cov = subkey.coverage() - if cov not in covList: - covList.append(cov) - if len(covList) <= 2: - return 1 - else: - return 0 - else: - return 0 - - def rankWordingFuzzFactor(self, tree, node, subkey1, subkey2): - # Used in weather wording to determine if - # subkeys have significantly different ranks. - # If so, then wording such as "possibly mixed with" or - # "with pockets of" could be used. - return 10 - - def wxConjunction(self, tree, node, subkey1, rank1, subkey2, rank2): - if subkey1 is not None: - attr1 = subkey1.attributes() - else: - attr1 = [] - attr2 = subkey2.attributes() - if "OR" in attr1 or "OR" in attr2: - return " or " - - # If Def is followed by Lkly, return "with" to avoid - # ambiguity - cov1 = subkey1.coverage() - cov2 = subkey2.coverage() - if cov1 == "Def" and cov2 == "Lkly": - return " with " - else: - return " and " - - def withPossible(self, tree, node, subkey1, rank1, subkey2, rank2): - # Wording to use if subkey1 has higher rank or is dominant over subkey2 - # Handle "with little or no rain" - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - if wxType1 in ["T"] and wxType2 in ["RW", "R"]: - self.includeValue = 0 # i.e. do not add rain or rain showers wording - return " with little or no rain" - return " with possible " - - def withPhrase(self, tree, node, subkey1, rank1, subkey2, rank2): - # Wording to use if subkey1 has similar rank and coverage to subkey2 - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - #print "wxType1, 2", wxType1, wxType2 - if wxType2 in ["T"]: - return " and " - else: - return " with " - - def withPocketsOf(self, tree, node, subkey1, rank1, subkey2, rank2): - # Wording to use if subkey2 has higher rank or is dominant over subkey1 - return " with pockets of " - - def possiblyMixedWith(self, tree, node, subkey1, rank1, subkey2, rank2): - # Wording to use if subkey1 has higher rank or is dominant over subkey2 - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - # Handle "with possible thunderstorms" - if wxType2 in ["T"]: - return " with possible " - # Handle "with little or no rain" - if wxType1 in ["T"] and wxType2 in ["RW", "R"]: - self.includeValue = 0 # i.e. do not add rain or rain showers wording - return " with little or no rain" - return " possibly mixed with " - - def mixedWith(self, tree, node, subkey1, rank1, subkey2, rank2): - # Wording to use if subkey1 has similar rank and coverage to subkey2 - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - #print "wxType1, 2", wxType1, wxType2 - if wxType2 in ["T"]: - return " and " - else: - return " mixed with " - - def weather_words(self, tree, node): - # Create a phrase to describe a list of weather sub keys for one sub-period - - # Get rankList - statDict = node.getStatDict() - rankList = self.getStats(statDict, "Wx") - if self._debug: - print "\n SubKeys in weather_words", rankList - print " TimeRange", node.getTimeRange(), node.getAreaLabel() - print " Phrase name", node.getAncestor("name") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - - # Check against PoP - rankList = self.checkPoP(tree, node, rankList) - - # Check visibility - subkeys = self.getSubkeys(rankList) - if self.checkVisibility(tree, node, subkeys): - return self.setWords(node, "null") - - # Get the weather words - words = self.getWeatherWords(tree, node, rankList) - node.set('reportedRankList', rankList) - - # Add embedded visibility - words = self.addEmbeddedVisibility(tree, node, subkeys, words) - if words == "": - words = "null" - if self._debug: - print " Setting words", words - - # To replace multiple "and's" with ellipses - words = self.useCommas(tree, node, words) - - return self.setWords(node, words) - - def checkPoP(self, tree, node, rankList): - # Do not report pop_related subkeys if PoP is below - # pop_wx_lower_threshold - popThreshold = self.pop_wx_lower_threshold(tree, node) - lowPopFlag = self.lowPop_flag(tree, node, popThreshold) - # Force a check of all weather subkeys - newList = [] - for subkey, rank in rankList: - # If PoP threshold >= 25% and coverage is 'SChc' or 'Iso' - # don't mention the subkey - if popThreshold >= 25 and subkey.coverage() in ['SChc', 'Iso']: - continue - # If we have a low PoP, and this is not a precip-related subkey - # report it e.g. Fog - if lowPopFlag == 1: - if not self.pop_related_flag(tree, node, subkey): - newList.append((subkey, rank)) - # If there is no low PoP, report the subkey - else: - newList.append((subkey, rank)) - return newList - - def getWeatherWords(self, tree, node, rankList): - # For each WeatherSubKey, add it to the phrase - # Use ranking of subkeys to form wording: - # If useSimple produce simple wording e.g. - # Chance of rain and snow and slight chance of sleet and freezing rain. - # Otherwise: - # Create a phrase of the form: - # - # where: - # list1 and list2 are lists of subkeys separated by - # '...' or 'and' e.g. Snow...rain and sleet - # list1 subkeys have similar coverages and ranks - # list2 subkeys have coverages or ranks significantly - # different from those in list1 - # conjunction connects the 2 lists appropriately, e.g. - # Snow and rain with possible sleet and freezing rain. - # Rain and drizzle with pockets of snow. - - rankList.sort(self.rankedSortOrder) - length = len(rankList) - words = "" - index = 0 - # For non-simple phrasing, have we switched to the second list - # using the conjunction yet? - switchConjunction = 0 - # Begin by including coverage with weather value - includeCovInten = 1 - # Handle "Likely" specially - addLkly = 0 - subkeys = self.getSubkeys(rankList) - useSimple = self.useSimple(tree, node, rankList, subkeys) - prevCoverage = prevSubkey = prevRank = None - prevConj = "" - - for index in range(len(rankList)): - subkey, rank = rankList[index] - # If not last one, determine nextCoverage - if index < length-1: - nextSubkey, nextRank = rankList[index+1] - else: - nextSubkey = None - - # Set so that value is included UNLESS re-set by one of the - # sub-methods e.g. mixedWith, possiblyMixedWith, etc.. - self.includeValue = 1 - - # Add conjunction for non-simple words - if not useSimple: - words, conj, switchConjunction, includeCovInten, addLkly = \ - self.addWxConjunction( - tree, node, words, prevSubkey, prevRank, subkey, rank, - index, switchConjunction, includeCovInten, addLkly) - - # Get string for subkey checking previous and next coverage - value, prevCoverage = self.weather_value( - tree, node, subkey, prevCoverage, nextSubkey, - includeCovInten=includeCovInten) - if self.includeValue == 1: - if value == "": - # If empty value string, remove the previous conjunction - # so we don't end up with something like "rain and" - words = self.removeLast(words, prevConj) - else: - words = words + value - - # if last one, do not add conjunction - if index == length - 1: break - if useSimple: - conj = self.wxConjunction(tree, node, subkey, rank, nextSubkey, nextRank) - words = words + conj - - prevSubkey = subkey - prevConj = conj - prevRank = rank - - if addLkly: - words = words + " likely" - return words - - def addWxConjunction(self, tree, node, words, prevSubkey, prevRank, subkey, rank, - index, switchConjunction, includeCovInten, addLkly): - # Check to see if we can switch to "with" or "with pockets of" - # OR just add the regular conjunction - # NOTE:"mixed" weather will be implemented when the samplers can support it. - conj = "" - #print "\nin addWxConj", prevSubkey, prevRank, subkey, rank - if not switchConjunction and index > 0: - includeCovInten = 0 - similarCovs = self.similarCoverages(tree, node, prevSubkey, subkey) - rankWordingFuzzFactor = self.rankWordingFuzzFactor( - tree, node, prevSubkey, subkey) - # If the current rank is significantly less than the previous one - # or if the coverage is significantly different from the previous one - #print "prevSubkey, subkey", prevSubkey, subkey - #print "similarCovs", similarCovs - if rank <= prevRank - rankWordingFuzzFactor or similarCovs == 0: - if similarCovs == 1 or similarCovs == 0: - # Prev subkey is dominant either by coverage or rank - method = self.withPossible - else: # similarCovs == 2 i.e. Current subkey is dominant - method = self.withPocketsOf - switchConjunction = 1 - conj = method(tree, node, prevSubkey, prevRank, subkey, rank) - if conj == "": - if index > 0: - if prevSubkey.coverage() == "Lkly": - addLkly = 1 - conj = self.wxConjunction(tree, node, prevSubkey, prevRank, - subkey, rank) - words = words + conj - #print "returning", words+conj - return words, conj, switchConjunction, includeCovInten, addLkly - - - def weather_value(self, tree, node, subkey, prevCoverage=None, - nextSubkey=None, typeOnly=0, - includeCovInten=1): - "Return a phrase for the WeatherSubkey" - - # If the prevCoverage is the same, then do not repeat it. - # e.g. "Widespread rain and snow" instead of - # "Widespread rain and widespread snow" - # "Likely" is a special case because it follows the nouns, - # so we need to look at the nextCoverage to get: - # e.g. "Rain and snow likely" instead of - # "Rain likely and snow likely" - # If typeOnly is set to one, only the Type phrase is returned. - - wxDef = subkey.wxDef() - wxType = subkey.wxType() - if wxType == '': - wxType = "" - else: - wxType = wxDef.typeDesc(wxType).lower() - - inten = subkey.intensity() - if inten == '': - inten = "" - else: - inten = wxDef.intensityDesc(subkey.wxType(), inten).lower() - - if inten.find("moderate") != -1: - inten = "" - - attrList = subkey.attributes() - attrList = self.removeDups(attrList) - attrTextList = [] - for attr in attrList: - # Ignore non-text attributes - if attr == "MX" or attr == "OR" or attr == "Primary" or attr == "Mention": - continue - attrDesc = wxDef.attributeDesc(subkey.wxType(), attr).lower() - # Use the wxAttributeDescriptors if provided - attrDescs = self.call(self.wxAttributeDescriptors, tree, node) - for des_cov, des_type, des_inten, des_attr, desc in attrDescs: - if self.matchSubkey(subkey, des_cov, des_type, des_inten, des_attr, [attr]) == 1: - attrDesc = self.getWxDesc(tree, node, subkey, desc) - if attrDesc != "": - attrTextList.append(attrDesc) - - # Determine coverage. Check for repetition. - covDescs = self.call(self.wxCoverageDescriptors, tree, node) - cov = self.getCoverage(tree, node, subkey, covDescs, wxDef, attrList) - # Make a copy of this coverage for later use - copyCov = cov - #print "cov, prev", cov, prevCoverage - if cov == prevCoverage and not prevCoverage == "likely": - cov = "" - elif cov == "likely": - nextCoverage = self.getCoverage( - tree, node, nextSubkey, covDescs, wxDef, attrList) - if cov == nextCoverage: - cov = "" - #print "result", cov - - # Use wxTypeDescriptors and wxIntensityDescriptors if provided - typeDescs = self.call(self.wxTypeDescriptors, tree, node) - intenDescs = self.call(self.wxIntensityDescriptors, tree, node) - - for des_cov, des_type, des_inten, des_attr, desc in typeDescs: - if self.matchSubkey(subkey, des_cov, des_type, des_inten,des_attr, attrList) == 1: - wxType = self.getWxDesc(tree, node, subkey, desc) - for des_cov, des_type, des_inten, des_attr, desc in intenDescs: - if self.matchSubkey(subkey, des_cov, des_type, des_inten,des_attr, attrList) == 1: - inten = self.getWxDesc(tree, node, subkey, desc) - - # Handle special cases and clean up - if cov == "definite": - cov = "" - - # Hail -- "Large Hail" and "Small Hail" attributes - # get converted to adjectives instead of attributes. - if wxType == "hail": - hailAttr = None - if "large hail" in attrTextList: - hailAttr = "large hail" - adj = "large" - if "small hail" in attrTextList: - hailAttr = "small hail" - adj = "small" - if hailAttr is not None: - wxType = adj + " hail" - newAttrs = [] - for attr in attrTextList: - if attr != hailAttr: - newAttrs.append(attr) - attrTextList = newAttrs - - # Arrange the order of the words - if typeOnly == 1: - return wxType, cov - - if includeCovInten == 0: - cov = "" - inten = "" - - word1 = cov - word2 = inten - word3 = wxType - - # Handle special case of "likely" - if cov == "likely": - word1 = inten - word2 = wxType - word3 = cov - - # Put coverage, intensity and wxType together - if word2 == "": - phrase = word1 + " " + word3 - else: - phrase = word1 + " " + word2 + " " + word3 - phrase = phrase.strip() - - # Add attributes - phrase = self.addTextList(phrase, attrTextList) - phrase = phrase.replace("with in", "in") - - if cov == "": - cov = copyCov - return phrase, cov - - def getIndex(self, hierarchy, value): - list = self.wxHierarchies()[hierarchy] - return list.index(value) - - def getSubkeys(self, rankList): - subkeys = [] - if rankList is None: - return subkeys - for subkey, rank in rankList: - if subkey.wxType() == "": - continue - subkeys.append(subkey) - return subkeys - - def checkVisibility(self, tree, node, subkeys): - ## If no visibility threshold is set, produce weather words as normal. - ## If a visibility threshold is set: - ## --If there are significant weather keys in the weather grid, - ## produce the weather words regardless of the visibility. - ## --If there is no visibility specified in the grids, - ## produce the weather words IF there are significant - ## weather keys in the grids. - ## --If there is a visibility specified in the grids, check to see - ## if it is less than the visibility threshold. - ## If it is, produce the weather words. - ## --If there is a visibility specified in the grids and it is - ## greater or equal to threshold and there are no significant weather - ## keys, produce "null" weather words. - visThreshold = self.visibility_wx_threshold(tree, node) - if visThreshold is not None: - produceWords = 0 - # Check for significant keys - significantKeys = self.significant_wx_visibility_subkeys(tree, node) - sigFlag = self.findSubkeys(subkeys, significantKeys) - # If significant weather keys, produce words regardless of vis - if sigFlag: - produceWords = 1 - else: # sigFlag is 0 - lowVisNM = self.getVis(subkeys) - # If lowVisNM is None, we will not produce words - if lowVisNM is not None: - # Produce words only if lowVisNM < visThreshold - if lowVisNM < visThreshold: - produceWords = 1 - if not produceWords: - return 1 - return 0 - - def addEmbeddedVisibility(self, tree, node, subkeys, words): - # Add embedded visibility wording - if self.embedded_visibility_flag(tree, node): - # Check for visibility having consolidated to separate phrase - visFlag = node.parent.get("reportVisibility") - if visFlag != 0: - # Find low visibility for subkeys - lowVisNM = self.getVis(subkeys) - if lowVisNM is not None: - lowVisNM = self.roundStatistic(tree, node, lowVisNM, "Visibility") - # If below null_nlValue, report it - visThreshold = self.nlValue(self.null_nlValue( - tree, node, "Visibility", "Visibility"), lowVisNM) - if lowVisNM < visThreshold: - visWords = self.nlValue( - self.visibility_weather_phrase_nlValue(tree, node), lowVisNM) - significantKeys = self.significant_wx_visibility_subkeys(tree, node) - sigFlag = self.findSubkeys(subkeys, significantKeys) - # If the weather words are not null and this is a Wx - # obstruction for which we should report vsby - if words != "": # and sigFlag != 0: DR_18894 change - visWords = " with " + visWords - words = words + visWords - return words - - def separateNonPrecip_threshold(self, tree, node): - # Number of sub-phrases required to separate precip from - # non-precip - return 1 - - def separateNonPrecip(self, tree, node): - # See if ready to process - if not self.phrase_trigger(tree, node, setUpOnly=1): - return - # If > designated subPhrases, separate into precip/non-precip - statList = self.getSubStats(node, "Wx") - length = len(statList) - if self.__dict__.get('_leDebug', 0): - print "\n\nseparateNonPrecip disabled", node, length, \ - node.getAncestor("disabledSubkeys") - print " timerange", node.getTimeRange() - print " statList", statList - #print " doneList", node.doneList - if length >= self.separateNonPrecip_threshold(tree, node): - precip = [] - nonPrecip = [] - for rankList in statList: - subkeys = self.getSubkeys(rankList) - for subkey in subkeys: - if subkey.wxType() == "": - continue - if self.precip_related_flag(tree, node, subkey): - precip.append(subkey) - else: - nonPrecip.append(subkey) - if self.__dict__.get('_leDebug', 0): print "precip, nonPrecip", precip, nonPrecip - if len(precip) >= 1 and len(nonPrecip) >= 1: - self.splitWxPhrase(tree, node, precip, nonPrecip, [self.separateNonPrecip]) - return self.DONE() - - def consolidateVisibility(self, tree, node): - # If visibility is constant throughout subphrases and non-null, - # separate out into its own phrase - # See if ready to process - if not self.phrase_trigger(tree, node, setUpOnly=1): - return - subPhrases = node.get("childList") - if len(subPhrases) <= 1: - return self.DONE() - lowVis = None - firstTime = 1 - #print "\nconsolidating" - for subPhrase in node.childList: - statDict = subPhrase.getStatDict() - rankList = self.getStats(statDict, "Wx") - subkeys = self.getSubkeys(rankList) - subLowVis = self.getVis(subkeys) - #print "low vis", subLowVis, subPhrase.getTimeRange() - if firstTime: - lowVis = subLowVis - firstTime = 0 - elif subLowVis != lowVis: - # Visibility for this subPhrase differs from previous - # so we can't consolidate - return self.DONE() - if lowVis is None: - return self.DONE() - # Check to see if lowVis is non-null - visThreshold = self.nlValue(self.null_nlValue( - tree, node, "Visibility", "Visibility"), lowVis) - if lowVis <= visThreshold: - # Need to report as separate phrase - newPhrase = tree.addPhraseDef(node, self.visibility_phrase) - # Turn off visibility reporting for this weather phrase - node.set("reportVisibility", 0) - return self.DONE() - - def consolidateWx(self, tree, node): - # If any wxTypes span all subPhrases, separate into their own phrase - statList = self.getSubStats(node, "Wx") - length = len(statList) - subkeyDict = {} - if self.__dict__.get('leDebug', 0): - print "\n\nConsolidating disabled", node.getAncestor("disabledSubkeys") - print " timerange", node.getTimeRange() - print " statList", statList - #print " doneList", node.doneList - if length > 1: - # Count occurrences of each weather key - for subkeys in statList: - for subkey in subkeys: - if subkey not in subkeyDict.keys(): - subkeyDict[subkey] = 1 - else: - subkeyDict[subkey] += 1 - # Find subkeys to disable in first phrase and second phrase, respectively - list1 = [] - list2 = [] - for subkey in subkeyDict.keys(): - count = subkeyDict[subkey] - if count >= length: - list2.append(subkey) - else: - list1.append(subkey) - if self.__dict__.get('_leDebug', 0): print "list1, list2", list1, list2 - if len(list1) > 0 and len(list2) > 0: - self.splitWxPhrase( - tree, node, list1, list2, - [self.consolidateWx, self.separateNonPrecip, - self.skyPopWx_consolidateWx]) - return self.DONE() - - def subPhrase_limit(self, tree, node): - # If the number of sub-phrases is greater than this limit, the weather - # phrase will use 6-hour instead of the higher resolution to produce: - # - # Occasional snow possibly mixed with sleet and freezing - # drizzle in the morning, then a chance of rain possibly mixed wiht snow - # and sleet and freezing drizzle in the afternoon. - # - # instead of: - # Occasional snow in the morning. Chance of light sleet and - # slight chance of light freezing drizzle in the late morning and - # early afternoon. Chance of snow early in the afternoon. Chance of - # rain in the afternoon. - return 3 - - - def checkResolution(self, tree, node): - # Check to see if there are too many sub-phrases and we need to re-do the - # phrase in lower resolution. The limit is determined by "subPhrase_limit". - # This currently assumes we have a 3 or greater resolution and want to go to - # a 6-hour resolution. - - # See if ready to process - if not self.phrase_trigger(tree, node): - return - - # Count the number of non-empty phrases - #print "\n In check resolution", node - count = 0 - for subPhrase in node.get("childList"): - words = subPhrase.get("words") - if words == "": - continue - #print "words", subPhrase, words - count += 1 - if count > self.subPhrase_limit(tree, node): - #print "count", count - # Create a new node in it's place with a new - # resolution set - exec "newPhraseDef = self." + node.getAncestor('name') - newPhrase = tree.addPhraseDef(node, newPhraseDef) - newPhrase.set("disabledSubkeys", node.get("disabledSubkeys")) - curResolution = node.get("resolution") - if curResolution is not None: - # If we have already re-set the resolution and we are still over the - # sub-phrase limit, we'll have to decrease the resolution some more - # to try and reduce the number of sub-phrases. - # This is necessary because of the way preProcessWx works: - # For example, even if we have only 2 time periods sampled, - # they can result in 3 or more sub-phrases depending on the - # complexity of weather. - # Example: Hours 1-6 Chc RW Chc L - # Hours 7-12 Chc SW Chc L - # Results in 3 sub-phrases - # Hours 1-12 Chc L - # Hours 1-6 Chc RW - # Hours 7-12 Chc SW - newResolution = curResolution * 2 - else: - newResolution = 6 - newPhrase.set("resolution", newResolution) - for key in ["spawnedWxPhrases", "conjunctiveQualifier", - "embeddedQualifier", "localEffect", "localEffectsList", - "firstElement", "elementName"]: - newPhrase.set(key, node.get(key)) - #print "making newPhrase", newPhrase - #print "parent should be", node.parent - #tree.printNode(newPhrase) - # Remove this node - node.remove() - return self.DONE() - - def severeWeather_phrase(self): - return { - "setUpMethod": self.severeWeather_setUp, - "wordMethod": self.severeWeather_words, - "phraseMethods": [ - self.preProcessWx, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - - def severeWeather_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) - # Set this flag used by the "checkWeatherSimilarity" method - node.set("noIntensityCombining", 1) - self.determineSevereTimeDescriptors(tree, node) - return self.DONE() - - def determineSevereTimeDescriptors(self, tree, node): - wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel()) - thunderThru = 1 # T throughout the time period - severeThru = 1 # T+ throughout the time period - allThunderSevere = 1 # All T that appears in the period is + - if wxStats is not None: - for subkeys, tr in wxStats: - thunderFound = 0 - severeFound = 0 - for subkey, rank in subkeys: - #print " subkey", subkey - if subkey.wxType() == "T": - thunderFound = 1 - if subkey.intensity() == "+": - severeFound = 1 - else: - allThunderSevere = 0 - if not severeFound: - severeThru = 0 - if not thunderFound: - thunderThru = 0 - #print "thunderThru, severeThru, allThunderSevere", thunderThru, severeThru, allThunderSevere - if thunderThru == 1: - if severeThru == 1: - noTD = 1 - else: - noTD = 0 - elif allThunderSevere == 1: - noTD = 1 - else: - noTD = 0 - #print "noTD", noTD - if noTD: - node.set("noTimeDescriptors", 1) - #print "setting", node - return - - def severeWeather_words(self, tree, node): - "If T +, produce phrase. Report attributes of T." - # Wx Statistics: rankedWx - - statDict = node.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - # Check against PoP - rankList = self.checkPoP(tree, node, rankList) - subkeyList = self.getSubkeys(rankList) - - severe = 0 - thunder = 0 - attrTextList = [] - for subkey in subkeyList: - wxType = subkey.wxType() - if wxType == "T": - thunder = 1 - intensity = subkey.intensity() - if intensity == "+": - severe = 1 - wxDef = subkey.wxDef() - for attr in subkey.attributes(): - if attr in ["Primary", "Mention", "Dry"]: - continue - attrText = wxDef.attributeDesc(subkey.wxType(), attr).lower() - if attrText not in attrTextList: - attrTextList.append(attrText) - - if thunder == 0: - return self.setWords(node, "") - if severe == 0 and attrTextList == []: - return self.setWords(node, "") - - # Add attributes to phrase - if severe == 0: - words = self.phrase_descriptor(tree, node, "thunderstorms", "Wx") - words = self.addTextList(words, attrTextList, " ") # no preposition - else: - words = self.phrase_descriptor(tree, node, "severeWeather", "Wx") - words = self.addTextList(words, attrTextList) - - return self.setWords(node, words) - - def heavyRainTypes(self, tree, node): - # Rain weather types that will trigger the heavyPrecip_phrase - return ["R", "RW"] - - def heavySnowTypes(self, tree, node): - # Snow weather types that will trigger the heavyPrecip_phrase - return ["S", "SW"] - - def heavyOtherTypes(self, tree, node): - # Weather types other than snow that will trigger the heavyPrecip_phrase - # R and RW are automatic triggers - return ["IP", "ZR", "L", "ZL"] - - def heavyPrecip_phrase(self): - ### NEW METHOD by Tom Spriggs/Steve Nelson/Tracy Hansen - ### ZFP_Local - return { - "setUpMethod": self.heavyPrecip_setUp, - "wordMethod": self.heavyPrecip_words, - "phraseMethods": [ - self.combineHeavyPrecip, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - def heavyPrecip_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) - # Set this flag used by the "checkWeatherSimilarity" method - node.set("noIntensityCombining", 1) - return self.DONE() - - def combineHeavyPrecip(self, tree, phrase): - # See if ready to process - if not self.phrase_trigger(tree, phrase, setUpOnly=1): - return - return self.combineChildren(tree, phrase, self.combineHeavy) - - def combineHeavy(self, tree, phrase, subPhrase1, subPhrase2): - ### NEW METHOD TO prevent reporting redundant phrases in complex wx - - # If there is heavy precip in both subPhrase1 and subPhrase2, combine - statDict1 = subPhrase1.getStatDict() - stats1 = statDict1["Wx"] - statDict2 = subPhrase2.getStatDict() - stats2 = statDict2["Wx"] - - if stats1 is None and stats2 is None: - return 1, None - if stats1 is None or stats2 is None: - return 0, None - - newStats = [] - heavy = [0,0] - index = 0 - for wxStats in [stats1, stats2]: - for subkey, rank in wxStats: - wxType = subkey.wxType() - if subkey.intensity() == "+": - if wxType in self.heavyRainTypes(tree, phrase) or \ - wxType in self.heavySnowTypes(tree, phrase) or \ - wxType in self.heavyOtherTypes(tree, phrase): - heavy[index] = 1 - newStats.append((subkey, rank)) - index += 1 - if heavy[0] and heavy[1]: - elementInfoList = phrase.get("elementInfoList") - newSubPhrase = self.combine2SubPhrases(tree, phrase, subPhrase1, subPhrase2, - elementInfoList, newStats) - return 1, newSubPhrase - else: - return 0, None - - def heavyPrecip_words(self, tree, node): - ### WxPhrases - self._heavyPrecipFlag = 0 - self._rainfallFlag = 0 - self._rainFlag = 0 - self._snowFlag = 0 - self._otherFlag = 0 - statDict = node.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - # Check against PoP - rankList = self.checkPoP(tree, node, rankList) - - subkeyList = self.getSubkeys(rankList) - - checkSnowTypes = self.heavySnowTypes(tree, node) - checkOtherTypes = self.heavyOtherTypes(tree, node) - - words = "" - for subkey in subkeyList: - wxType = subkey.wxType() - intensity = subkey.intensity() - - if intensity == "+": - for type in checkOtherTypes: - if wxType == type: - self._heavyPrecipFlag = 1 - self._otherFlag = 1 - - for type in checkSnowTypes: - if wxType == type: - self._heavyPrecipFlag = 1 - self._snowFlag = 1 - - if wxType == "RW": - self._heavyPrecipFlag = 1 - self._rainfallFlag = 1 - if wxType == "R": - self._heavyPrecipFlag = 1 - self._rainFlag = 1 - - if self._heavyPrecipFlag == 1: - if self._otherFlag == 1: - words = self.phrase_descriptor(tree, node, "heavyPrecip", "Wx") - elif self._snowFlag == 1 and self._rainFlag == 0 and self._rainfallFlag == 0: - words = self.phrase_descriptor(tree, node, "heavySnow", "Wx") - elif self._snowFlag == 0 and self._rainFlag == 1 and self._rainfallFlag == 0: - words = self.phrase_descriptor(tree, node, "heavyRain", "Wx") - elif self._snowFlag == 0 and self._rainFlag == 0 and self._rainfallFlag == 1: - words = self.phrase_descriptor(tree, node, "heavyRainfall", "Wx") - else: - words = self.phrase_descriptor(tree, node, "heavyPrecip", "Wx") - - return self.setWords(node, words) - - def filterSubkeys(self, tree, node, rankList): - # Filter subkeys in rankList: - # Combine using wxCombinations - - if self.filter_subkeys_flag() == 0: - return rankList - #print "rankList in filter", rankList - if rankList is None: - return rankList - if len(rankList) == 0: - return rankList - rankList, convertedFlag = self.convertToRankList(rankList) - rankList = self.combineSubKeys(tree, node, rankList) - if convertedFlag: - rankList = self.convertFromRankList(rankList) - return rankList - - def convertToRankList(self, rankList): - # If the list is a simple list of subkeys, - # add a dummy rank to each entry - # and return convertedFlag = 1 - if rankList == []: - return rankList, 0 - entry = rankList[0] - if type(entry) is not types.TupleType: - newList = [] - for subkey in rankList: - newList.append((subkey,0)) - rankList = newList - convertedFlag = 1 - else: - convertedFlag = 0 - return rankList, convertedFlag - - def convertFromRankList(self, rankList): - # Strip the dummy ranks off the rankList - newList = [] - for subkey, rank in rankList: - newList.append(subkey) - return newList - - def combineSubKeys(self, tree, node, rankList): - # Compare subkeys and condense if appropriate - rankList, convertedFlag = self.convertToRankList(rankList) - done = 0 - while done == 0: - combinedKey, combinedRank, index1, index2 = self.combineKeys(tree, node, rankList) - #print "combinedKey", combinedKey, index1, index2 - # If no more combinations possible, we are done - if combinedKey is None: - done = 1 - else: - # Make a new list: - # Set index1 to combinedKey - # Delete index2 - newList = [] - ind = 0 - length = len(rankList) - for subkey, rank in rankList: - if ind > length-1: - break - if ind == index1: - newList.append((combinedKey, combinedRank)) - elif ind != index2: - newList.append((subkey, rank)) - ind = ind + 1 - rankList = newList - #print "Leaving combineSubKeys", rankList - if convertedFlag: - rankList = self.convertFromRankList(rankList) - return rankList - - def combineKeys(self, tree, node, rankList): - # See if any keys can be combined - # Return when the first combination is found - length = len(rankList) - if length <= 1: - return None, None, 0, 0 - for index1 in range(0, length): - # March down remaining list, trying to combine - for index2 in range(index1 + 1 , length): - combinedKey, combinedRank = self.combineKey1Key2(tree, node, rankList[index1], - rankList[index2]) - if combinedKey is not None: - return combinedKey, combinedRank, index1, index2 - return None, None, 0, 0 - - def combineKey1Key2(self, tree, node, entry1, entry2): - # Combine duplicates, "near duplicates", and user-defined - # combinations from wxCombinations - subkey1, rank1 = entry1 - subkey2, rank2 = entry2 - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - cov1 = subkey1.coverage() - inten1 = subkey1.intensity() - cov2 = subkey2.coverage() - inten2 = subkey2.intensity() - - combinedKey = None - combinedRank = max(rank1, rank2) - if subkey1 == subkey2: - combinedKey = subkey1 - elif wxType1 == wxType2: - # In this case, we must make an aggregate - combinedKey = self.makeAggregateSubkey(subkey1, rank1, subkey2, rank2) - else: - # Try to combine using configurable wxCombinations - # May need to pick lowVis, preserve attrs here too - combinations = self.call(self.wxCombinations, tree, node) - for combination in combinations: - match, combinedKey = self.matchWxCombination( - combination, subkey1, subkey2) - if match: - break - return combinedKey, combinedRank - - def matchWxCombination(self, combination, subkey1, subkey2): - # Given a combination i.e. pair (wx1, wx2) or method, - # determine if subkey1 and subkey2 should be combined. - # Return flag (match) and combinedKey - keyList = [(subkey1, subkey2), (subkey2, subkey1)] - match = 0 - combinedKey = None - for key1, key2 in keyList: - if type(combination) is types.MethodType: - match, combinedKey = combination(key1, key2) - if match: - break - else: - wx1, wx2 = combination - if wx1 == key1.wxType() and wx2 == key2.wxType(): - order = self.dominantCoverageOrder(key1, key2) - if order == -1 or order == 0: - combinedKey = key1 - else: - combinedKey = key2 - match = 1 - break - return match, combinedKey - - def dominantCoverageOrder(self, val1, val2): - # Order by dominant coverage -- lower indices are dominant - # If val1 coverage is dominant over val2 coverage, - # return -1, if equal return 0, else return 1 - val1 = val1.coverage() - val2 = val2.coverage() - list = self.wxHierarchies()["coverage"] - try: - index1 = list.index(val1) - except: - index1 = len(list)-1 - try: - index2 = list.index(val2) - except: - index2 = len(list)-1 - if index1 < index2: - return -1 - if index1 == index2: - return 0 - if index1 > index2: - return 1 - - def dominantTypeOrder(self, val1, val2): - # If val1 wxType is dominant over val2 wxType - # Lower indices are dominant - # return -1, if equal return 0, else return 1 - val1 = val1.wxType() - val2 = val2.wxType() - list = self.wxHierarchies()["wxType"] - try: - index1 = list.index(val1) - except: - index1 = len(list)-1 - try: - index2 = list.index(val2) - except: - index2 = len(list)-1 - if index1 < index2: - return -1 - if index1 == index2: - return 0 - if index1 > index2: - return 1 - - def getDominant(self, hierarchy, val1, val2): - # Return the value that appears first in the given hierarchy - list = self.wxHierarchies()[hierarchy] - - index1 = list.index(val1) - index2 = list.index(val2) - if index1 < index2: - return val1 - else: - return val2 - - - def getCoverage(self, tree, node, subkey, covDescs, wxDef, attrList): - if subkey is None: - return None - cov = subkey.coverage() - if cov == "": - return "" - cov = wxDef.coverageDesc(subkey.wxType(), cov).lower() - for des_cov, des_type, des_inten, des_attr, desc in covDescs: - if self.matchSubkey(subkey, des_cov, des_type, des_inten, des_attr, attrList) == 1: - cov = self.getWxDesc(tree, node, subkey, desc) - return cov - - def matchSubkey(self, subkey, cov, wxType, inten, attr, attrList): - if cov != "*": - if subkey.coverage() != cov: - return 0 - if wxType != "*": - if subkey.wxType() != wxType: - return 0 - if inten != "*": - if subkey.intensity() != inten: - return 0 - if attr != "*": - if attr not in attrList: - return 0 - return 1 - - def getWxDesc(self, tree, node, subkey, desc): - if type(desc) is types.MethodType: - return desc(tree, node, subkey) - else: - return desc - - # VISIBILITY - def visibility_phrase(self): - return { - "setUpMethod": self.visibility_setUp, - "wordMethod": self.visibility_words, - "phraseMethods": [ - #self.preProcessWx, - self.combinePhraseStats, - self.combineWords, - self.fillNulls, - self.timeDescriptorModeration, - self.assembleSubPhrases, - self.postProcessPhrase, - ], - } - def visibility_setUp(self, tree, node): - elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.visConnector) - node.set("combineVisibility", 1) - descriptor = self.phrase_descriptor(tree, node, "Visibility", "Visibility") - node.set("descriptor", descriptor) - return self.DONE() - - def visibility_words(self, tree, node): - # Return a phrase for the given subPhrase - - # Create a phrase to describe a list of weather subkeys for one sub-period - statDict = node.getStatDict() - rankList = self.getStats(statDict, "Wx") - if rankList is None or len(rankList) == 0: - return self.setWords(node, "") - # Filter rankList so we don't report visibility for weather subkeys - # not reported in the text (e.g. SChc, Iso) - rankList = self.checkPoP(tree, node, rankList) - subkeyList = self.getSubkeys(rankList) - - lowVisNM = self.getVis(subkeyList) - if lowVisNM is None: - return self.setWords(node, "null") - # If less than null_nlValue (in nautical miles) return "null" - nullVisNM = self.null_nlValue(tree, node, "Visibility", "Visibility") - lowVisNM = self.roundStatistic(tree, node, lowVisNM, "Visibility") - if lowVisNM >= self.nlValue(nullVisNM, lowVisNM): - return self.setWords(node, "null") - words = self.nlValue(self.visibility_phrase_nlValue(tree, node), lowVisNM) - # See if the Wx type is significant - significantKeys = self.significant_wx_visibility_subkeys(tree, node) - sigFlag = self.findSubkeys(subkeyList, significantKeys) - # If there are no Wx obstructions for which we should report vsby - if sigFlag == 0: - words = "" - return self.setWords(node, words) - - def visibility_phrase_nlValue(self, tree, node): - # Visibility descriptions for visibility_phrase. - # "Visibility less than 1 nautical mile then 2 NM in the afternoon." - # The numerical ranges are in nautical miles. - outUnits = self.element_outUnits(tree, node, "Visibility", "Visibility") - if outUnits == "NM": - return { - (0, 1): "1 NM or less", - (1.1, 2): "2 NM", - (2.1, 3): "3 NM", - (3.1, 4): "4 NM", - (4,1, 5): "5 NM", - (5.1, 6): "6 NM", - "default": "null", - } - else: - return { - (0, .3): "one quarter mile or less at times", - "default": "null", - } - - # Handling visibility within the weather phrase - def embedded_visibility_flag(self, tree, node): - # If 1, report visibility embedded with the - # weather phrase. Set this to 0 if you are using the - # visibility_phrase. - return 0 - - def visibility_wx_threshold(self, tree, node): - # Weather will be reported if the visibility is below - # this threshold (in NM) OR if it includes a - # significant_wx_visibility_subkey (see below) - return None - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* F "), ("* ZF "), ("* IF "), ("* H"), ("* K"), ("* BS"), ("* BD"), ("* VA")] - - def visibility_weather_phrase_nlValue(self, tree, node): - # Visibility descriptions within the weather_phrase - # "Rain showers and fog with visibility less than 1 nautical mile in the morning." - # The numerical ranges are in nautical miles. - outUnits = self.element_outUnits(tree, node, "Visibility", "Visibility") - if outUnits == "NM": - return { - (0, 1): "visibility 1 NM or less", - (1, 2.1): "2 NM visibility", - (2.1, 3.1): "3 NM visibility", - (3.1, 4.1): "4 NM visibility", - (4.1, 5.1): "5 NM visibility", - (5.1, 6.1): "6 NM visibility", - "default": "null", - } - else: - return { - (0, .3): "visibility one quarter mile or less at times", - "default": "null", - } - - - def matchToWx(self, tree, node, element, timeRange=None, areaLabel=None, - algorithm=None, increment=None): - if timeRange is None: - timeRange = node.getTimeRange() - if areaLabel is None: - areaLabel = node.getAreaLabel() - algorithm, increment, noPrecipValue, percentThreshold, wxTypes = \ - self.getMatchToWxInfo(tree, node, element, algorithm, increment) - #print "\nin matchToWx", element, timeRange, areaLabel - #print " ", algorithm, increment, noPrecipValue, wxTypes - #print " node", node.getAncestor("name") - #import traceback - #traceback.print_stack(limit=6) - - # Gather all data that might be necessary - analysisMethodVal = tree.stats.get( - element, timeRange, areaLabel, mergeMethod="Max") - elementBins = tree.stats.get( - element, timeRange, areaLabel, statLabel="binnedPercent", - mergeMethod="MergeBins") - - # Compute result - result = None - if algorithm == "AnalysisMethod": - #print " Returning AnalysisMethod", elementVal - result = analysisMethodVal - else: - # Determine "highKey" -- key with the highest coverage - highKey = self.getHighSubkey(tree, node, timeRange, areaLabel, wxTypes) - #print "highKey", highKey - # Handle case of no precipitating weather - if highKey is None: - result = self.getNoPrecipValue( - noPrecipValue, elementBins, analysisMethodVal) - else: - # Get the Element range of values corresponding to the - # high key coverage. - coverage = highKey.coverage() - exec "elementRange = self.coverage"+element+"_value(coverage)" - if type(elementRange) is types.MethodType: - covLowVal, covHighVal = elementRange(tree, node, highKey) - else: - covLowVal, covHighVal = elementRange - - if algorithm == "Max" or algorithm == "Mode": - # Merge the binned values over space and time - if elementBins is None: - return None - # Return the result - result = self.getBinnedResult(tree, node, elementBins, covLowVal, - covHighVal, increment, algorithm, - percentThreshold) - elif algorithm == "MaxMode": - # Get the list of bins for each grid in the time range - elementBinsList = tree.stats.get( - element, timeRange, areaLabel, statLabel="binnedPercent", - mergeMethod="List") - if elementBinsList is None or elementBinsList == []: - result = None - else: - result = self.getMaxModeResult( - tree, node, elementBinsList, covLowVal, covHighVal, - increment, algorithm, percentThreshold) - else: - log.warning( - "WARNING -- Invalid matToWxInfo algorithm for " + element + \ - " Must be 'Max', 'Mode', 'MaxMode', or 'AnalysisMethod'") - return result - - def getMatchToWxInfo(self, tree, node, element, algorithm, increment): - matchingInfo = self.matchToWxInfo(tree, node, element, element) - if matchingInfo == "": - log.warning( - "WARNING -- Cannot MatchToWx: Add " + element + \ - " to matchToWxInfo_dict!!") - increment = 0 - algorithm = "Max" - noPrecipValue = None - percentThreshold = 0 - wxTypes = None - else: - # Get increment and algorithm if not set in arguments - # Get other matchingInfo - inc = matchingInfo[0] - alg = matchingInfo[1] - noPrecipValue = matchingInfo[2] - try: - percentThreshold = matchingInfo[3] - except: - percentThreshold = 0 - try: - wxTypes = matchingInfo[4] - except: - wxTypes = None - if algorithm is None: - algorithm = alg - if increment is None: - increment = inc - return algorithm, increment, noPrecipValue, percentThreshold, wxTypes - - def getNoPrecipValue(self, noPrecipValue, elementBins, analysisMethodVal): - if noPrecipValue is None: - result = None - elif noPrecipValue == "Max": - # Return the maximum value with > 0% areal coverage - if elementBins is None: - result = None - else: - elementVal = None - for lowVal, highVal, percent in elementBins: - if percent > 0: - elementVal = lowVal + increment - result = elementVal - elif noPrecipValue == "AnalysisMethod": - result = analysisMethodVal - else: - result = noPrecipValue - return result - - def getBinnedResult(self, tree, node, elementBins, binLowVal, - binHighVal, increment, algorithm, percentThreshold): - # If algorithm == "Max": - # Return the MAXIMUM element value that falls within the element range - # AND has a greater than zero percentage. - # Else: (Algorithm == "Mode") - # Return the MOST FREQUENT element value that falls within the element range - # AND has a greater than zero percentage. - # We also calculate "resultValue" in case no element value falls within - # the range. - # If the algorithm is "Max", resultValue will be the Maximum element value - # overall, otherwise, it will be the Most Frequent. - elementVal = None - resultValue = None - # The Most Frequent value overall - maxPercent = 0 - # The Most Frequent value within the lowVal/highVal range - maxInRangePercent = 0 - #print "\nLooking for range", binLowVal, binHighVal - for lowVal, highVal, percent in elementBins: - #print "low, high, percent", lowVal, highVal, percent - #print " maxPercent, maxInRangePercent", maxPercent, maxInRangePercent - #print " elementVal, resultValue", elementVal, resultValue - if percent > percentThreshold: - # The element bins could be, for example, 55-65, - # so we add the increment - curValue = lowVal + increment - if algorithm == "Max": - # Bins are in ascending order so the - # (Maximum) will just be the last one - resultValue = curValue - if curValue >= binLowVal and curValue <= binHighVal: - elementVal = curValue - else: # "Mode" - if percent > maxPercent: - resultValue = curValue - maxPercent = percent - if curValue >= binLowVal and curValue <= binHighVal: - if percent > maxInRangePercent: - elementVal = curValue - maxInRangePercent = percent - #print "algorithm, elementVal, resultVal", algorithm, elementVal, resultValue - # If no element meets this criteria - # If the resultValue value is greater than the binHighVal, use binHighVal - # Otherwise, use the resultValue value - if elementVal is None: - if resultValue > binHighVal: - elementVal = binHighVal - else: - elementVal = resultValue - return elementVal - - def getMaxModeResult(self, tree, node, elementBinsList, covLowVal, - covHighVal, increment, algorithm, percentThreshold): - # For each grid, find the Mode i.e. highest percentage value - # that falls within the coverage range - valueList = [] - for elementBins in elementBinsList: - elementBins, timeRange = elementBins - elementVal = self.getBinnedResult( - tree, node, elementBins, covLowVal, covHighVal, - increment, "Mode", percentThreshold) - valueList.append(elementVal) - # Choose the maximum of these values that fall within the - # range for high key coverage. - #print "valueList", valueList - maxVal = valueList[0] # At least we'll have something ?? - for value in valueList: - if value >= covLowVal and value <=covHighVal: - if value > maxVal: - maxVal = value - return maxVal - - def getHighSubkey(self, tree, node, timeRange, areaLabel, wxTypes=None): - # Find the highest precip subkey in the ranked list - # If wxType is not None, consider only keys of that wxType - highKey = None - wxStats = tree.stats.get("Wx", timeRange, areaLabel) - #print "\nWx in getHighSubkey", wxStats - if wxStats is not None: - highKey = None - highCov = None - for subkeys, tr in wxStats: - #print "subkeys, rank", subkeys, tr - if subkeys is None: - continue - for subkey, rank in subkeys: - #print "subkey", subkey - if wxTypes is not None: - if subkey.wxType() not in wxTypes: - continue - else: - # Check for PoP-related - if self.pop_related_flag(tree, node, subkey): - # Do not consider trace events for matching - if subkey.wxType() in ['L', 'ZL'] or \ - (subkey.wxType() in ['RW', 'SW'] and - subkey.intensity() == '--'): - # Move on to next subkey - continue - else: - # If non-precipitating, skip - continue - if highKey is None or \ - self.dominantCoverageOrder( - subkey, highKey) == -1: - highKey = subkey - return highKey +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# WxPhrases.py +# Methods for producing text forecast from SampleAnalysis statistics. +# +# Author: hansen +# ---------------------------------------------------------------------------- +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ----------------------------- +# 11/28/2016 5749 randerso Changed calls to addTextList +# to use default parameters +# +## + +## +# This is a base file that is not intended to be overridden. +## + +import PhraseBuilder +import types, re +import TimeRange +import logging + +class WxPhrases(PhraseBuilder.PhraseBuilder): + def __init__(self): + PhraseBuilder.PhraseBuilder.__init__(self) + self.log = logging.getLogger("FormatterRunner.WxPhrases.WxPhrases") + + ############################################ + ### WEATHER PHRASES + def standard_weather_phraseMethods(self): + return [ + self.preProcessWx, + self.separateNonPrecip, + self.consolidateVisibility, + self.checkLocalEffects, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.checkResolution, + self.assembleSubPhrases, + self.postProcessPhrase, + ] + + ### WX + def pop_wx_lower_threshold(self, tree, node): + # Pop-related Wx will not be reported if Pop is below this threshold + return self.popThreshold(tree, node, self._hoursSChcEnds, 15.0, 25.0) + + def pop_related_flag(self, tree, node, subkey): + # These are weather subkeys that are related to PoP and + # should be not be reported if pop is low + wxType = subkey.wxType() + if wxType in ["ZR","R","RW","S","SW", "T", "IP"]: + if wxType == "SW" or wxType == "RW": + if subkey.intensity() == "--": + return 0 + if wxType == "T" and "Dry" in subkey.attributes(): + return 0 + return 1 + else: + return 0 + + def precip_related_flag(self, tree, node, subkey): + # These are weather types that are precip versus non-precip + # and could be separated into different phrases from the + # non-precip weather types. + if subkey.wxType() in ["ZR", "R", "RW", "S", "SW", "T", "ZL", "L", "IP"]: + return 1 + else: + return 0 + + def filter_subkeys_flag(self): + # Filtering and condensing of weather subkeys. + # If you do not want subkeys to be condensed and filtered, override this + # variable and set it to 0. + return 1 + + def wxHierarchies(self): + # This is the hierarchy of which coverage and intensity to choose if + # wxTypes are the same and to be combined into one subkey. + return { + "wxType":["WP", "R", "RW", "T", "L", "ZR", "ZL", "S", "SW", + "IP", "F", "ZF", "IF", "IC", "H", "BS", "BN", "K", "BD", + "FR", "ZY", "BA", "",""], + "coverage":["Def","Wide","Brf","Frq", "Ocnl","Pds", "Inter", + "Lkly","Num","Sct","Chc","Areas", + "SChc","WSct","Iso","Patchy","",""], + "intensity":["+","m","-","--","",""], + "visibility":["0SM", "1/4SM", "1/2SM", "3/4SM", "1SM", "11/2SM", "2SM", + "21/2SM", "3SM", "4SM", "5SM", "6SM", "P6SM", "",""], + } + + def similarCoverageLists(self, tree, node, subkey1, subkey2): + # Lists of coverages that should be combined or considered equal. + # Each list should be ordered from weaker to stronger + # and the stronger coverage will be kept when subkeys are + # combined. + # + # These lists are examined when combining sub-phrases and when + # determining if there is a local effect to report. + # Called by PhraseBuilder:checkWeatherSimilarity + # + return [ + ['SChc', 'Iso'], + ['Chc', 'Sct'], + ['Lkly', 'Num'], + ['Brf', 'Frq', 'Ocnl', 'Pds', 'Inter', 'Def', 'Wide'], + ] + + def wxCombinations(self): + # This is the list of which wxTypes should be combined into one + # WITHIN a sub-phrase. + # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will + # be combined into one key and the key with the dominant coverage will + # be used as the combined key. + # You may also specify a method which will be + # -- given arguments subkey1 and subkey2 and + # -- should return + # -- a flag = 1 if they are to be combined, 0 otherwise + # -- the combined key to be used + # Note: The method will be called twice, once with (subkey1, subkey2) + # and once with (subkey2, subkey1) so you can assume one ordering. + # See the example below, "combine_T_RW" + # + return [ + ("RW", "R"), + ("SW", "S"), + self.combine_T_RW, + ] + + def combine_T_RW(self, subkey1, subkey2): + # Combine T and RW only if the coverage of T + # is dominant over OR equal to the coverage of RW and + # RW does not have + intensity + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + if wxType1 == "T" and wxType2 == "RW": + if subkey2.intensity() != "+": + order = self.dominantCoverageOrder(subkey1, subkey2) + if order == -1 or order == 0: + return 1, subkey1 + return 0, None + + # Customizing Weather Phrases + def wxCoverageDescriptors(self): + # This is the list of coverages, wxTypes, intensities, attributes for which special + # weather coverage wording is desired. Wildcards (*) can be used to match any value. + # If a weather subkey is not found in this list, default wording + # will be used from the Weather Definition in the server. + # The format of each tuple is: + # (coverage, wxType, intensity, attribute, descriptor) + # For example: + #return [ + # ("Chc", "*", "*", "*", "a chance of"), + # ] + # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments + return [] + + def wxTypeDescriptors(self): + # This is the list of coverages, wxTypes, intensities, attributes for which special + # weather type wording is desired. Wildcards (*) can be used to match any value. + # If a weather subkey is not found in this list, default wording + # will be used from the Weather Definition in the server. + # The format of each tuple is: + # (coverage, wxType, intensity, attribute, descriptor) + # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments + return [ + ("*", "SW", "--", "*", "flurries"), + ("*", "RW", "*", "*", self.rainShowersDescriptor), + ("*", "T", "*", "Dry", "dry thunderstorms"), + ] + + def rainShowersDescriptor(self, tree, node, subkey): + if subkey.intensity() == "--": + return "sprinkles" + if tree is None: + return "showers" + t = tree.stats.get( + "T", node.getTimeRange(), + node.getAreaLabel(), statLabel="minMax", + mergeMethod="Min") + if t is None: + return "showers" + if t < 60: + return "rain showers" + else: + return "showers" + + def wxIntensityDescriptors(self): + # This is the list of coverages, wxTypes, intensities, attribute for which special + # weather intensity wording is desired. Wildcards (*) can be used to match any value. + # If a weather subkey is not found in this list, default wording + # will be used from the Weather Definition in the server. + # The format of each tuple is: + # (coverage, wxType, intensity, attribute, descriptor) + # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments + return [ + ("*", "RW", "--", "*", ""), + ("*", "RW", "-", "*", ""), + ("*", "R", "--", "*", "light"), + ("*", "R", "-", "*", ""), + ("*", "R", "+", "*", ""), + ("*", "RW", "+", "*", ""), + ("*", "SW", "--", "*", ""), + ("*", "SW", "-", "*", ""), + ("*", "SW", "+", "*", ""), + ("*", "S", "--", "*", "very light"), + ("*", "S", "-", "*", ""), + ("*", "S", "+", "*", ""), + ("*", "T", "+", "*", ""), + ("*", "ZR", "--", "*", "light"), + ("*", "ZR", "+", "*", ""), + ("*", "L", "*", "*", ""), + ("*", "F", "+", "*", "dense"), + ("*", "IP", "+", "*", ""), + ] + + def wxAttributeDescriptors(self): + # This is the list of coverages, wxTypes, intensities, attributes, for which special + # weather attribute wording is desired. Wildcards (*) can be used to match any value. + # If a weather subkey is not found in this list, default wording + # will be used from the Weather Definition in the server. + # The format of each tuple is: + # (coverage, wxType, intensity, attribute, descriptor) + # NOTE: descriptor can be a method taking (tree, node, subkey) as arguments + return [ + ("*", "T", "*", "HvyRn", ""), + ("*", "T", "*", "Dry", ""), + ("*", "T", "*", "GW", ""), + ("*", "T", "*", "DmgW", ""), + ("*", "T", "*", "FL", ""), + ("*", "T", "*", "LgA", ""), + ("*", "T", "*", "SmA", ""), + ("*", "T", "*", "TOR", ""), + ] + + def weather_phrase(self): + return { + "setUpMethod": self.weather_setUp, + "wordMethod": self.weather_words, + "phraseMethods": self.standard_weather_phraseMethods() + } + def weather_setUp(self, tree, node): + resolution = node.get("resolution") + if resolution is not None: + mergeMethod = "Average" + else: + mergeMethod = "List" + elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, + resolution) + node.set("allTimeDescriptors", 1) + if self.areal_sky_flag(tree, node): + self.disableSkyRelatedWx(tree, node) + return self.DONE() + + def preProcessWx(self, tree, phrase): + #print '---------------------------------------------------------------' + #print 'in _preProcessWxStats -> ', phrase.get("name"), phrase.getTimeRange() + #time1 = time.time() + + # Create a new list to hold all our subkeys and timeRanges + newStats = [] + # Create a variable to hold the subphrase methodList + methodList = None + # Look through each subphrase of this phrase + resolution = phrase.get("resolution") + for subPhrase in phrase.childList: + # If we do not have a copy of the subPhrase methodList - make one + if methodList is None: + methodList = subPhrase.get("methodList") + # Get stats for this subphrase + if resolution is not None: + tr = subPhrase.getTimeRange() + rankList = tree.stats.get( + 'Wx', tr, subPhrase.getAreaLabel(), mergeMethod="Average") + statList = [(rankList, tr)] + else: + statList = tree.stats.get( + 'Wx', subPhrase.getTimeRange(),subPhrase.getAreaLabel()) + if statList is None: + return self.DONE() + # Gather the subkeys and ranks by time range + for (rankList, timeRange) in statList: + if rankList is None: + continue + for subkey, rank in rankList: + newStats.append((subkey, rank, timeRange, 0)) + # Remove this node from the tree + #subPhrase.remove() + + # Make a new list to hold combined stats + combinedStats = [] + # Define a variable to track number of changes made to new Wx stats + changes = 1 # We want at least one pass through the stats + # Keep consolidating new statistics until no changes are made + while len(newStats) > 0 and changes > 0: + # No changes made yet this pass + changes = 0 + # If we have already combined Wx statistics + if len(combinedStats) > 0: + # Update the list of Wx stats we need to consolidate + newStats = combinedStats + # Combine as many Wx stats as possible + (changes, combinedStats) = self.combineWxStats( + tree, subPhrase, newStats) + + # Make a new dictionary to filter consolidated stats by time + finalStatDict = {} + finalKeys = [] + # For each combined Wx statistic + for (curKey, curRank, curTR, curCombined_flag) in combinedStats: + # If the dictionary does not have this time range currently + if curTR not in list(finalStatDict.keys()): + # Add this tuple to the dictionary + finalStatDict[curTR] = [(curKey, curRank)] + finalKeys.append(curTR) + # Otherwise + else: + tempList = finalStatDict[curTR] + tempList.append((curKey, curRank)) + finalStatDict[curTR] = tempList + + #print "\nPhrase :", phrase.get("name") + #print 'finalStatDict = ', finalStatDict + #print "timeRanges", finalKeys + +## for su100bPhrase in phrase.childList: +## print '------------------------------------------------------------' +## print subPhrase.printNode(subPhrase) + + # Make a list to hold all the new subphrases + newSubPhraseList = [] + # Sort consolidated time ranges + #print "\nfinalKeys before sort", finalKeys + finalKeys.sort(self.orderTimeRanges) + #print "\nfinalKeys after sort", finalKeys + # Create new nodes for each of our consolidated subphrases + for timeRange in finalKeys: + # Create a new subphrase for the consolidated Wx types and times + newSubPhrase = tree.makeNode([], methodList, phrase) + statDict = {} + statDict['Wx'] = finalStatDict[timeRange] + newSubPhrase.set("elementName", 'Wx') + newSubPhrase.set("changeFlag", 0) + newSubPhrase.set("parent", phrase) + newSubPhrase.set("statDict", statDict) + newSubPhrase.set("timeRange", timeRange) + newSubPhrase.set("childList", []) + newSubPhrase.set("timeDescFlag", 1) # always include timers + # Keep track of this new node + newSubPhraseList.append(newSubPhrase) + + # Replace the old subphrases with the new subphrases + phrase.set("childList", newSubPhraseList) + # Indicate Wx stats have been preprocessed\ + #print " Time: ", time.time() - time1 + return self.DONE() + + # Define a method to combine Wx stats per my relaxed rules + def combineWxStats(self, tree, subPhrase, statList): + # Define some variables to keep track of combined Wx subkeys + combinedStats = [] + changes = 0 + combinedTypes = [] + + # Look through all our subkeys + for index in range(len(statList)-1): + + # Get the info about this key + (curKey, curRank, curTR, curCombined_flag) = statList[index] + + # Break this subkey into its components + curCov = curKey.coverage() + curType = curKey.wxType() + curInten = curKey.intensity() + curVis = curKey.visibility() + curAttr = curKey.attributes() + + # Shorten the list of keys we need to test + searchStats = statList[index+1:] +## print 'searchStats = ', searchStats + + # See if we can combine this key with any of the other keys + for testIndex in range(len(searchStats)): + + # Get the infor about this key + (testKey, testRank, testTR, testCombined_flag) = searchStats[testIndex] + + # We will only try combining if neighboring time ranges + if curTR.endTime() != testTR.startTime(): + continue + + # See if there is a significant difference between these + # Wx subkeys + # match is zero if there is a difference between the keys. + # is 1 or 2 if they can be combined. + match = self.checkWeatherSimilarity( + tree, subPhrase, [(curKey, curRank)],[(testKey, testRank)], + tr1=curTR, tr2=testTR) + + #print 'combineWxStats %s <=> %s -> %d' % (curKey, testKey, match) + + # If these keys could be combined, and this type has not been + # combined yet + if match > 0 and curType not in combinedTypes: + newSubkey = self.makeAggregateSubkey(curKey, curRank, testKey, testRank) + #print "newSubkey", newSubkey + # Mark both keys in current lists as being combined + statList[index] = (curKey, curRank, curTR, curCombined_flag + 1) + searchStats[testIndex] = (testKey, testTR, + testCombined_flag + 1) + # Make sure the search key is also marked as combined + # in the main stats list + statList[index+testIndex+1] = (testKey, testRank, testTR, + testCombined_flag + 1) + # Make a new time range for this combined key + newTR = TimeRange.TimeRange(curTR.startTime(), testTR.endTime()) + # Take highest rank of the two subkeys to be combined + if curRank > testRank: + newRank = curRank + else: + newRank = testRank + # Add this new subkey to the consolidated stats + combinedStats.append((newSubkey, newRank, newTR, 0)) + # Do not try to combine this Wx subkey any more + changes = changes + 1 + combinedTypes.append(curType) + + # Make sure we did not miss any subkeys that were not combined + for (curKey, curRank, curTR, curCombined_flag) in statList: + + # If this key was not previously combined + if curCombined_flag == 0: + + # Add this key to the consolidated stats + combinedStats.append((curKey, curRank, curTR, curCombined_flag)) + +## print '\n', changes, combinedTypes +## print 'combinedStats = ' +## print combinedStats +## print '**********************************************************' + + # Return number of changes and current combined Wx stats + return (changes, combinedStats) + + def useSimple(self, tree, node, rankList, subkeys): + # Return 1 if we want to use simple weather phrasing + # where the weather subkeys are simply connected by + # a conjunction i.e. do not use "with", "mixedWith", "possiblyMixedWith", + # "withPocketsOf". For example: + # + # Simple wording: + # Chance of rain and snow and slight chance of sleet in the evening. + # Mostly cloudy with chance of rain and a slight chance of thunderstorms. + # + # Complex wording: + # Chance of rain and snow possibly mixed with sleet in the evening. + # Chance of rain with possible thunderstorms in the evening. + # + numSubkeys = len(subkeys) + if numSubkeys <= 2: + return 1 + elif numSubkeys <= 4: + # Check for coverage groupings + # If there are only 1 or 2 coverage groupings, use simple wording + covList = [] + for subkey in subkeys: + cov = subkey.coverage() + if cov not in covList: + covList.append(cov) + if len(covList) <= 2: + return 1 + else: + return 0 + else: + return 0 + + def rankWordingFuzzFactor(self, tree, node, subkey1, subkey2): + # Used in weather wording to determine if + # subkeys have significantly different ranks. + # If so, then wording such as "possibly mixed with" or + # "with pockets of" could be used. + return 10 + + def wxConjunction(self, tree, node, subkey1, rank1, subkey2, rank2): + if subkey1 is not None: + attr1 = subkey1.attributes() + else: + attr1 = [] + attr2 = subkey2.attributes() + if "OR" in attr1 or "OR" in attr2: + return " or " + + # If Def is followed by Lkly, return "with" to avoid + # ambiguity + cov1 = subkey1.coverage() + cov2 = subkey2.coverage() + if cov1 == "Def" and cov2 == "Lkly": + return " with " + else: + return " and " + + def withPossible(self, tree, node, subkey1, rank1, subkey2, rank2): + # Wording to use if subkey1 has higher rank or is dominant over subkey2 + # Handle "with little or no rain" + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + if wxType1 in ["T"] and wxType2 in ["RW", "R"]: + self.includeValue = 0 # i.e. do not add rain or rain showers wording + return " with little or no rain" + return " with possible " + + def withPhrase(self, tree, node, subkey1, rank1, subkey2, rank2): + # Wording to use if subkey1 has similar rank and coverage to subkey2 + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + #print "wxType1, 2", wxType1, wxType2 + if wxType2 in ["T"]: + return " and " + else: + return " with " + + def withPocketsOf(self, tree, node, subkey1, rank1, subkey2, rank2): + # Wording to use if subkey2 has higher rank or is dominant over subkey1 + return " with pockets of " + + def possiblyMixedWith(self, tree, node, subkey1, rank1, subkey2, rank2): + # Wording to use if subkey1 has higher rank or is dominant over subkey2 + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + # Handle "with possible thunderstorms" + if wxType2 in ["T"]: + return " with possible " + # Handle "with little or no rain" + if wxType1 in ["T"] and wxType2 in ["RW", "R"]: + self.includeValue = 0 # i.e. do not add rain or rain showers wording + return " with little or no rain" + return " possibly mixed with " + + def mixedWith(self, tree, node, subkey1, rank1, subkey2, rank2): + # Wording to use if subkey1 has similar rank and coverage to subkey2 + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + #print "wxType1, 2", wxType1, wxType2 + if wxType2 in ["T"]: + return " and " + else: + return " mixed with " + + def weather_words(self, tree, node): + # Create a phrase to describe a list of weather sub keys for one sub-period + + # Get rankList + statDict = node.getStatDict() + rankList = self.getStats(statDict, "Wx") + if self._debug: + print("\n SubKeys in weather_words", rankList) + print(" TimeRange", node.getTimeRange(), node.getAreaLabel()) + print(" Phrase name", node.getAncestor("name")) + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + + # Check against PoP + rankList = self.checkPoP(tree, node, rankList) + + # Check visibility + subkeys = self.getSubkeys(rankList) + if self.checkVisibility(tree, node, subkeys): + return self.setWords(node, "null") + + # Get the weather words + words = self.getWeatherWords(tree, node, rankList) + node.set('reportedRankList', rankList) + + # Add embedded visibility + words = self.addEmbeddedVisibility(tree, node, subkeys, words) + if words == "": + words = "null" + if self._debug: + print(" Setting words", words) + + # To replace multiple "and's" with ellipses + words = self.useCommas(tree, node, words) + + return self.setWords(node, words) + + def checkPoP(self, tree, node, rankList): + # Do not report pop_related subkeys if PoP is below + # pop_wx_lower_threshold + popThreshold = self.pop_wx_lower_threshold(tree, node) + lowPopFlag = self.lowPop_flag(tree, node, popThreshold) + # Force a check of all weather subkeys + newList = [] + for subkey, rank in rankList: + # If PoP threshold >= 25% and coverage is 'SChc' or 'Iso' + # don't mention the subkey + if popThreshold >= 25 and subkey.coverage() in ['SChc', 'Iso']: + continue + # If we have a low PoP, and this is not a precip-related subkey + # report it e.g. Fog + if lowPopFlag == 1: + if not self.pop_related_flag(tree, node, subkey): + newList.append((subkey, rank)) + # If there is no low PoP, report the subkey + else: + newList.append((subkey, rank)) + return newList + + def getWeatherWords(self, tree, node, rankList): + # For each WeatherSubKey, add it to the phrase + # Use ranking of subkeys to form wording: + # If useSimple produce simple wording e.g. + # Chance of rain and snow and slight chance of sleet and freezing rain. + # Otherwise: + # Create a phrase of the form: + # + # where: + # list1 and list2 are lists of subkeys separated by + # '...' or 'and' e.g. Snow...rain and sleet + # list1 subkeys have similar coverages and ranks + # list2 subkeys have coverages or ranks significantly + # different from those in list1 + # conjunction connects the 2 lists appropriately, e.g. + # Snow and rain with possible sleet and freezing rain. + # Rain and drizzle with pockets of snow. + + rankList.sort(self.rankedSortOrder) + length = len(rankList) + words = "" + index = 0 + # For non-simple phrasing, have we switched to the second list + # using the conjunction yet? + switchConjunction = 0 + # Begin by including coverage with weather value + includeCovInten = 1 + # Handle "Likely" specially + addLkly = 0 + subkeys = self.getSubkeys(rankList) + useSimple = self.useSimple(tree, node, rankList, subkeys) + prevCoverage = prevSubkey = prevRank = None + prevConj = "" + + for index in range(len(rankList)): + subkey, rank = rankList[index] + # If not last one, determine nextCoverage + if index < length-1: + nextSubkey, nextRank = rankList[index+1] + else: + nextSubkey = None + + # Set so that value is included UNLESS re-set by one of the + # sub-methods e.g. mixedWith, possiblyMixedWith, etc.. + self.includeValue = 1 + + # Add conjunction for non-simple words + if not useSimple: + words, conj, switchConjunction, includeCovInten, addLkly = \ + self.addWxConjunction( + tree, node, words, prevSubkey, prevRank, subkey, rank, + index, switchConjunction, includeCovInten, addLkly) + + # Get string for subkey checking previous and next coverage + value, prevCoverage = self.weather_value( + tree, node, subkey, prevCoverage, nextSubkey, + includeCovInten=includeCovInten) + if self.includeValue == 1: + if value == "": + # If empty value string, remove the previous conjunction + # so we don't end up with something like "rain and" + words = self.removeLast(words, prevConj) + else: + words = words + value + + # if last one, do not add conjunction + if index == length - 1: break + if useSimple: + conj = self.wxConjunction(tree, node, subkey, rank, nextSubkey, nextRank) + words = words + conj + + prevSubkey = subkey + prevConj = conj + prevRank = rank + + if addLkly: + words = words + " likely" + return words + + def addWxConjunction(self, tree, node, words, prevSubkey, prevRank, subkey, rank, + index, switchConjunction, includeCovInten, addLkly): + # Check to see if we can switch to "with" or "with pockets of" + # OR just add the regular conjunction + # NOTE:"mixed" weather will be implemented when the samplers can support it. + conj = "" + #print "\nin addWxConj", prevSubkey, prevRank, subkey, rank + if not switchConjunction and index > 0: + includeCovInten = 0 + similarCovs = self.similarCoverages(tree, node, prevSubkey, subkey) + rankWordingFuzzFactor = self.rankWordingFuzzFactor( + tree, node, prevSubkey, subkey) + # If the current rank is significantly less than the previous one + # or if the coverage is significantly different from the previous one + #print "prevSubkey, subkey", prevSubkey, subkey + #print "similarCovs", similarCovs + if rank <= prevRank - rankWordingFuzzFactor or similarCovs == 0: + if similarCovs == 1 or similarCovs == 0: + # Prev subkey is dominant either by coverage or rank + method = self.withPossible + else: # similarCovs == 2 i.e. Current subkey is dominant + method = self.withPocketsOf + switchConjunction = 1 + conj = method(tree, node, prevSubkey, prevRank, subkey, rank) + if conj == "": + if index > 0: + if prevSubkey.coverage() == "Lkly": + addLkly = 1 + conj = self.wxConjunction(tree, node, prevSubkey, prevRank, + subkey, rank) + words = words + conj + #print "returning", words+conj + return words, conj, switchConjunction, includeCovInten, addLkly + + + def weather_value(self, tree, node, subkey, prevCoverage=None, + nextSubkey=None, typeOnly=0, + includeCovInten=1): + "Return a phrase for the WeatherSubkey" + + # If the prevCoverage is the same, then do not repeat it. + # e.g. "Widespread rain and snow" instead of + # "Widespread rain and widespread snow" + # "Likely" is a special case because it follows the nouns, + # so we need to look at the nextCoverage to get: + # e.g. "Rain and snow likely" instead of + # "Rain likely and snow likely" + # If typeOnly is set to one, only the Type phrase is returned. + + wxDef = subkey.wxDef() + wxType = subkey.wxType() + if wxType == '': + wxType = "" + else: + wxType = wxDef.typeDesc(wxType).lower() + + inten = subkey.intensity() + if inten == '': + inten = "" + else: + inten = wxDef.intensityDesc(subkey.wxType(), inten).lower() + + if inten.find("moderate") != -1: + inten = "" + + attrList = subkey.attributes() + attrList = self.removeDups(attrList) + attrTextList = [] + for attr in attrList: + # Ignore non-text attributes + if attr == "MX" or attr == "OR" or attr == "Primary" or attr == "Mention": + continue + attrDesc = wxDef.attributeDesc(subkey.wxType(), attr).lower() + # Use the wxAttributeDescriptors if provided + attrDescs = self.call(self.wxAttributeDescriptors, tree, node) + for des_cov, des_type, des_inten, des_attr, desc in attrDescs: + if self.matchSubkey(subkey, des_cov, des_type, des_inten, des_attr, [attr]) == 1: + attrDesc = self.getWxDesc(tree, node, subkey, desc) + if attrDesc != "": + attrTextList.append(attrDesc) + + # Determine coverage. Check for repetition. + covDescs = self.call(self.wxCoverageDescriptors, tree, node) + cov = self.getCoverage(tree, node, subkey, covDescs, wxDef, attrList) + # Make a copy of this coverage for later use + copyCov = cov + #print "cov, prev", cov, prevCoverage + if cov == prevCoverage and not prevCoverage == "likely": + cov = "" + elif cov == "likely": + nextCoverage = self.getCoverage( + tree, node, nextSubkey, covDescs, wxDef, attrList) + if cov == nextCoverage: + cov = "" + #print "result", cov + + # Use wxTypeDescriptors and wxIntensityDescriptors if provided + typeDescs = self.call(self.wxTypeDescriptors, tree, node) + intenDescs = self.call(self.wxIntensityDescriptors, tree, node) + + for des_cov, des_type, des_inten, des_attr, desc in typeDescs: + if self.matchSubkey(subkey, des_cov, des_type, des_inten,des_attr, attrList) == 1: + wxType = self.getWxDesc(tree, node, subkey, desc) + for des_cov, des_type, des_inten, des_attr, desc in intenDescs: + if self.matchSubkey(subkey, des_cov, des_type, des_inten,des_attr, attrList) == 1: + inten = self.getWxDesc(tree, node, subkey, desc) + + # Handle special cases and clean up + if cov == "definite": + cov = "" + + # Hail -- "Large Hail" and "Small Hail" attributes + # get converted to adjectives instead of attributes. + if wxType == "hail": + hailAttr = None + if "large hail" in attrTextList: + hailAttr = "large hail" + adj = "large" + if "small hail" in attrTextList: + hailAttr = "small hail" + adj = "small" + if hailAttr is not None: + wxType = adj + " hail" + newAttrs = [] + for attr in attrTextList: + if attr != hailAttr: + newAttrs.append(attr) + attrTextList = newAttrs + + # Arrange the order of the words + if typeOnly == 1: + return wxType, cov + + if includeCovInten == 0: + cov = "" + inten = "" + + word1 = cov + word2 = inten + word3 = wxType + + # Handle special case of "likely" + if cov == "likely": + word1 = inten + word2 = wxType + word3 = cov + + # Put coverage, intensity and wxType together + if word2 == "": + phrase = word1 + " " + word3 + else: + phrase = word1 + " " + word2 + " " + word3 + phrase = phrase.strip() + + # Add attributes + phrase = self.addTextList(phrase, attrTextList) + phrase = phrase.replace("with in", "in") + + if cov == "": + cov = copyCov + return phrase, cov + + def getIndex(self, hierarchy, value): + list = self.wxHierarchies()[hierarchy] + return list.index(value) + + def getSubkeys(self, rankList): + subkeys = [] + if rankList is None: + return subkeys + for subkey, rank in rankList: + if subkey.wxType() == "": + continue + subkeys.append(subkey) + return subkeys + + def checkVisibility(self, tree, node, subkeys): + ## If no visibility threshold is set, produce weather words as normal. + ## If a visibility threshold is set: + ## --If there are significant weather keys in the weather grid, + ## produce the weather words regardless of the visibility. + ## --If there is no visibility specified in the grids, + ## produce the weather words IF there are significant + ## weather keys in the grids. + ## --If there is a visibility specified in the grids, check to see + ## if it is less than the visibility threshold. + ## If it is, produce the weather words. + ## --If there is a visibility specified in the grids and it is + ## greater or equal to threshold and there are no significant weather + ## keys, produce "null" weather words. + visThreshold = self.visibility_wx_threshold(tree, node) + if visThreshold is not None: + produceWords = 0 + # Check for significant keys + significantKeys = self.significant_wx_visibility_subkeys(tree, node) + sigFlag = self.findSubkeys(subkeys, significantKeys) + # If significant weather keys, produce words regardless of vis + if sigFlag: + produceWords = 1 + else: # sigFlag is 0 + lowVisNM = self.getVis(subkeys) + # If lowVisNM is None, we will not produce words + if lowVisNM is not None: + # Produce words only if lowVisNM < visThreshold + if lowVisNM < visThreshold: + produceWords = 1 + if not produceWords: + return 1 + return 0 + + def addEmbeddedVisibility(self, tree, node, subkeys, words): + # Add embedded visibility wording + if self.embedded_visibility_flag(tree, node): + # Check for visibility having consolidated to separate phrase + visFlag = node.parent.get("reportVisibility") + if visFlag != 0: + # Find low visibility for subkeys + lowVisNM = self.getVis(subkeys) + if lowVisNM is not None: + lowVisNM = self.roundStatistic(tree, node, lowVisNM, "Visibility") + # If below null_nlValue, report it + visThreshold = self.nlValue(self.null_nlValue( + tree, node, "Visibility", "Visibility"), lowVisNM) + if lowVisNM < visThreshold: + visWords = self.nlValue( + self.visibility_weather_phrase_nlValue(tree, node), lowVisNM) + significantKeys = self.significant_wx_visibility_subkeys(tree, node) + sigFlag = self.findSubkeys(subkeys, significantKeys) + # If the weather words are not null and this is a Wx + # obstruction for which we should report vsby + if words != "": # and sigFlag != 0: DR_18894 change + visWords = " with " + visWords + words = words + visWords + return words + + def separateNonPrecip_threshold(self, tree, node): + # Number of sub-phrases required to separate precip from + # non-precip + return 1 + + def separateNonPrecip(self, tree, node): + # See if ready to process + if not self.phrase_trigger(tree, node, setUpOnly=1): + return + # If > designated subPhrases, separate into precip/non-precip + statList = self.getSubStats(node, "Wx") + length = len(statList) + if self.__dict__.get('_leDebug', 0): + print("\n\nseparateNonPrecip disabled", node, length, \ + node.getAncestor("disabledSubkeys")) + print(" timerange", node.getTimeRange()) + print(" statList", statList) + #print " doneList", node.doneList + if length >= self.separateNonPrecip_threshold(tree, node): + precip = [] + nonPrecip = [] + for rankList in statList: + subkeys = self.getSubkeys(rankList) + for subkey in subkeys: + if subkey.wxType() == "": + continue + if self.precip_related_flag(tree, node, subkey): + precip.append(subkey) + else: + nonPrecip.append(subkey) + if self.__dict__.get('_leDebug', 0): print("precip, nonPrecip", precip, nonPrecip) + if len(precip) >= 1 and len(nonPrecip) >= 1: + self.splitWxPhrase(tree, node, precip, nonPrecip, [self.separateNonPrecip]) + return self.DONE() + + def consolidateVisibility(self, tree, node): + # If visibility is constant throughout subphrases and non-null, + # separate out into its own phrase + # See if ready to process + if not self.phrase_trigger(tree, node, setUpOnly=1): + return + subPhrases = node.get("childList") + if len(subPhrases) <= 1: + return self.DONE() + lowVis = None + firstTime = 1 + #print "\nconsolidating" + for subPhrase in node.childList: + statDict = subPhrase.getStatDict() + rankList = self.getStats(statDict, "Wx") + subkeys = self.getSubkeys(rankList) + subLowVis = self.getVis(subkeys) + #print "low vis", subLowVis, subPhrase.getTimeRange() + if firstTime: + lowVis = subLowVis + firstTime = 0 + elif subLowVis != lowVis: + # Visibility for this subPhrase differs from previous + # so we can't consolidate + return self.DONE() + if lowVis is None: + return self.DONE() + # Check to see if lowVis is non-null + visThreshold = self.nlValue(self.null_nlValue( + tree, node, "Visibility", "Visibility"), lowVis) + if lowVis <= visThreshold: + # Need to report as separate phrase + newPhrase = tree.addPhraseDef(node, self.visibility_phrase) + # Turn off visibility reporting for this weather phrase + node.set("reportVisibility", 0) + return self.DONE() + + def consolidateWx(self, tree, node): + # If any wxTypes span all subPhrases, separate into their own phrase + statList = self.getSubStats(node, "Wx") + length = len(statList) + subkeyDict = {} + if self.__dict__.get('leDebug', 0): + print("\n\nConsolidating disabled", node.getAncestor("disabledSubkeys")) + print(" timerange", node.getTimeRange()) + print(" statList", statList) + #print " doneList", node.doneList + if length > 1: + # Count occurrences of each weather key + for subkeys in statList: + for subkey in subkeys: + if subkey not in list(subkeyDict.keys()): + subkeyDict[subkey] = 1 + else: + subkeyDict[subkey] += 1 + # Find subkeys to disable in first phrase and second phrase, respectively + list1 = [] + list2 = [] + for subkey in list(subkeyDict.keys()): + count = subkeyDict[subkey] + if count >= length: + list2.append(subkey) + else: + list1.append(subkey) + if self.__dict__.get('_leDebug', 0): print("list1, list2", list1, list2) + if len(list1) > 0 and len(list2) > 0: + self.splitWxPhrase( + tree, node, list1, list2, + [self.consolidateWx, self.separateNonPrecip, + self.skyPopWx_consolidateWx]) + return self.DONE() + + def subPhrase_limit(self, tree, node): + # If the number of sub-phrases is greater than this limit, the weather + # phrase will use 6-hour instead of the higher resolution to produce: + # + # Occasional snow possibly mixed with sleet and freezing + # drizzle in the morning, then a chance of rain possibly mixed wiht snow + # and sleet and freezing drizzle in the afternoon. + # + # instead of: + # Occasional snow in the morning. Chance of light sleet and + # slight chance of light freezing drizzle in the late morning and + # early afternoon. Chance of snow early in the afternoon. Chance of + # rain in the afternoon. + return 3 + + + def checkResolution(self, tree, node): + # Check to see if there are too many sub-phrases and we need to re-do the + # phrase in lower resolution. The limit is determined by "subPhrase_limit". + # This currently assumes we have a 3 or greater resolution and want to go to + # a 6-hour resolution. + + # See if ready to process + if not self.phrase_trigger(tree, node): + return + + # Count the number of non-empty phrases + #print "\n In check resolution", node + count = 0 + for subPhrase in node.get("childList"): + words = subPhrase.get("words") + if words == "": + continue + #print "words", subPhrase, words + count += 1 + if count > self.subPhrase_limit(tree, node): + #print "count", count + # Create a new node in it's place with a new + # resolution set + exec("newPhraseDef = self." + node.getAncestor('name')) + newPhrase = tree.addPhraseDef(node, newPhraseDef) + newPhrase.set("disabledSubkeys", node.get("disabledSubkeys")) + curResolution = node.get("resolution") + if curResolution is not None: + # If we have already re-set the resolution and we are still over the + # sub-phrase limit, we'll have to decrease the resolution some more + # to try and reduce the number of sub-phrases. + # This is necessary because of the way preProcessWx works: + # For example, even if we have only 2 time periods sampled, + # they can result in 3 or more sub-phrases depending on the + # complexity of weather. + # Example: Hours 1-6 Chc RW Chc L + # Hours 7-12 Chc SW Chc L + # Results in 3 sub-phrases + # Hours 1-12 Chc L + # Hours 1-6 Chc RW + # Hours 7-12 Chc SW + newResolution = curResolution * 2 + else: + newResolution = 6 + newPhrase.set("resolution", newResolution) + for key in ["spawnedWxPhrases", "conjunctiveQualifier", + "embeddedQualifier", "localEffect", "localEffectsList", + "firstElement", "elementName"]: + newPhrase.set(key, node.get(key)) + #print "making newPhrase", newPhrase + #print "parent should be", node.parent + #tree.printNode(newPhrase) + # Remove this node + node.remove() + return self.DONE() + + def severeWeather_phrase(self): + return { + "setUpMethod": self.severeWeather_setUp, + "wordMethod": self.severeWeather_words, + "phraseMethods": [ + self.preProcessWx, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + + def severeWeather_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) + # Set this flag used by the "checkWeatherSimilarity" method + node.set("noIntensityCombining", 1) + self.determineSevereTimeDescriptors(tree, node) + return self.DONE() + + def determineSevereTimeDescriptors(self, tree, node): + wxStats = tree.stats.get("Wx", node.getTimeRange(), node.getAreaLabel()) + thunderThru = 1 # T throughout the time period + severeThru = 1 # T+ throughout the time period + allThunderSevere = 1 # All T that appears in the period is + + if wxStats is not None: + for subkeys, tr in wxStats: + thunderFound = 0 + severeFound = 0 + for subkey, rank in subkeys: + #print " subkey", subkey + if subkey.wxType() == "T": + thunderFound = 1 + if subkey.intensity() == "+": + severeFound = 1 + else: + allThunderSevere = 0 + if not severeFound: + severeThru = 0 + if not thunderFound: + thunderThru = 0 + #print "thunderThru, severeThru, allThunderSevere", thunderThru, severeThru, allThunderSevere + if thunderThru == 1: + if severeThru == 1: + noTD = 1 + else: + noTD = 0 + elif allThunderSevere == 1: + noTD = 1 + else: + noTD = 0 + #print "noTD", noTD + if noTD: + node.set("noTimeDescriptors", 1) + #print "setting", node + return + + def severeWeather_words(self, tree, node): + "If T +, produce phrase. Report attributes of T." + # Wx Statistics: rankedWx + + statDict = node.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + # Check against PoP + rankList = self.checkPoP(tree, node, rankList) + subkeyList = self.getSubkeys(rankList) + + severe = 0 + thunder = 0 + attrTextList = [] + for subkey in subkeyList: + wxType = subkey.wxType() + if wxType == "T": + thunder = 1 + intensity = subkey.intensity() + if intensity == "+": + severe = 1 + wxDef = subkey.wxDef() + for attr in subkey.attributes(): + if attr in ["Primary", "Mention", "Dry"]: + continue + attrText = wxDef.attributeDesc(subkey.wxType(), attr).lower() + if attrText not in attrTextList: + attrTextList.append(attrText) + + if thunder == 0: + return self.setWords(node, "") + if severe == 0 and attrTextList == []: + return self.setWords(node, "") + + # Add attributes to phrase + if severe == 0: + words = self.phrase_descriptor(tree, node, "thunderstorms", "Wx") + words = self.addTextList(words, attrTextList, " ") # no preposition + else: + words = self.phrase_descriptor(tree, node, "severeWeather", "Wx") + words = self.addTextList(words, attrTextList) + + return self.setWords(node, words) + + def heavyRainTypes(self, tree, node): + # Rain weather types that will trigger the heavyPrecip_phrase + return ["R", "RW"] + + def heavySnowTypes(self, tree, node): + # Snow weather types that will trigger the heavyPrecip_phrase + return ["S", "SW"] + + def heavyOtherTypes(self, tree, node): + # Weather types other than snow that will trigger the heavyPrecip_phrase + # R and RW are automatic triggers + return ["IP", "ZR", "L", "ZL"] + + def heavyPrecip_phrase(self): + ### NEW METHOD by Tom Spriggs/Steve Nelson/Tracy Hansen + ### ZFP_Local + return { + "setUpMethod": self.heavyPrecip_setUp, + "wordMethod": self.heavyPrecip_words, + "phraseMethods": [ + self.combineHeavyPrecip, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + def heavyPrecip_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) + # Set this flag used by the "checkWeatherSimilarity" method + node.set("noIntensityCombining", 1) + return self.DONE() + + def combineHeavyPrecip(self, tree, phrase): + # See if ready to process + if not self.phrase_trigger(tree, phrase, setUpOnly=1): + return + return self.combineChildren(tree, phrase, self.combineHeavy) + + def combineHeavy(self, tree, phrase, subPhrase1, subPhrase2): + ### NEW METHOD TO prevent reporting redundant phrases in complex wx + + # If there is heavy precip in both subPhrase1 and subPhrase2, combine + statDict1 = subPhrase1.getStatDict() + stats1 = statDict1["Wx"] + statDict2 = subPhrase2.getStatDict() + stats2 = statDict2["Wx"] + + if stats1 is None and stats2 is None: + return 1, None + if stats1 is None or stats2 is None: + return 0, None + + newStats = [] + heavy = [0,0] + index = 0 + for wxStats in [stats1, stats2]: + for subkey, rank in wxStats: + wxType = subkey.wxType() + if subkey.intensity() == "+": + if wxType in self.heavyRainTypes(tree, phrase) or \ + wxType in self.heavySnowTypes(tree, phrase) or \ + wxType in self.heavyOtherTypes(tree, phrase): + heavy[index] = 1 + newStats.append((subkey, rank)) + index += 1 + if heavy[0] and heavy[1]: + elementInfoList = phrase.get("elementInfoList") + newSubPhrase = self.combine2SubPhrases(tree, phrase, subPhrase1, subPhrase2, + elementInfoList, newStats) + return 1, newSubPhrase + else: + return 0, None + + def heavyPrecip_words(self, tree, node): + ### WxPhrases + self._heavyPrecipFlag = 0 + self._rainfallFlag = 0 + self._rainFlag = 0 + self._snowFlag = 0 + self._otherFlag = 0 + statDict = node.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + # Check against PoP + rankList = self.checkPoP(tree, node, rankList) + + subkeyList = self.getSubkeys(rankList) + + checkSnowTypes = self.heavySnowTypes(tree, node) + checkOtherTypes = self.heavyOtherTypes(tree, node) + + words = "" + for subkey in subkeyList: + wxType = subkey.wxType() + intensity = subkey.intensity() + + if intensity == "+": + for type in checkOtherTypes: + if wxType == type: + self._heavyPrecipFlag = 1 + self._otherFlag = 1 + + for type in checkSnowTypes: + if wxType == type: + self._heavyPrecipFlag = 1 + self._snowFlag = 1 + + if wxType == "RW": + self._heavyPrecipFlag = 1 + self._rainfallFlag = 1 + if wxType == "R": + self._heavyPrecipFlag = 1 + self._rainFlag = 1 + + if self._heavyPrecipFlag == 1: + if self._otherFlag == 1: + words = self.phrase_descriptor(tree, node, "heavyPrecip", "Wx") + elif self._snowFlag == 1 and self._rainFlag == 0 and self._rainfallFlag == 0: + words = self.phrase_descriptor(tree, node, "heavySnow", "Wx") + elif self._snowFlag == 0 and self._rainFlag == 1 and self._rainfallFlag == 0: + words = self.phrase_descriptor(tree, node, "heavyRain", "Wx") + elif self._snowFlag == 0 and self._rainFlag == 0 and self._rainfallFlag == 1: + words = self.phrase_descriptor(tree, node, "heavyRainfall", "Wx") + else: + words = self.phrase_descriptor(tree, node, "heavyPrecip", "Wx") + + return self.setWords(node, words) + + def filterSubkeys(self, tree, node, rankList): + # Filter subkeys in rankList: + # Combine using wxCombinations + + if self.filter_subkeys_flag() == 0: + return rankList + #print "rankList in filter", rankList + if rankList is None: + return rankList + if len(rankList) == 0: + return rankList + rankList, convertedFlag = self.convertToRankList(rankList) + rankList = self.combineSubKeys(tree, node, rankList) + if convertedFlag: + rankList = self.convertFromRankList(rankList) + return rankList + + def convertToRankList(self, rankList): + # If the list is a simple list of subkeys, + # add a dummy rank to each entry + # and return convertedFlag = 1 + if rankList == []: + return rankList, 0 + entry = rankList[0] + if type(entry) is not tuple: + newList = [] + for subkey in rankList: + newList.append((subkey,0)) + rankList = newList + convertedFlag = 1 + else: + convertedFlag = 0 + return rankList, convertedFlag + + def convertFromRankList(self, rankList): + # Strip the dummy ranks off the rankList + newList = [] + for subkey, rank in rankList: + newList.append(subkey) + return newList + + def combineSubKeys(self, tree, node, rankList): + # Compare subkeys and condense if appropriate + rankList, convertedFlag = self.convertToRankList(rankList) + done = 0 + while done == 0: + combinedKey, combinedRank, index1, index2 = self.combineKeys(tree, node, rankList) + #print "combinedKey", combinedKey, index1, index2 + # If no more combinations possible, we are done + if combinedKey is None: + done = 1 + else: + # Make a new list: + # Set index1 to combinedKey + # Delete index2 + newList = [] + ind = 0 + length = len(rankList) + for subkey, rank in rankList: + if ind > length-1: + break + if ind == index1: + newList.append((combinedKey, combinedRank)) + elif ind != index2: + newList.append((subkey, rank)) + ind = ind + 1 + rankList = newList + #print "Leaving combineSubKeys", rankList + if convertedFlag: + rankList = self.convertFromRankList(rankList) + return rankList + + def combineKeys(self, tree, node, rankList): + # See if any keys can be combined + # Return when the first combination is found + length = len(rankList) + if length <= 1: + return None, None, 0, 0 + for index1 in range(0, length): + # March down remaining list, trying to combine + for index2 in range(index1 + 1 , length): + combinedKey, combinedRank = self.combineKey1Key2(tree, node, rankList[index1], + rankList[index2]) + if combinedKey is not None: + return combinedKey, combinedRank, index1, index2 + return None, None, 0, 0 + + def combineKey1Key2(self, tree, node, entry1, entry2): + # Combine duplicates, "near duplicates", and user-defined + # combinations from wxCombinations + subkey1, rank1 = entry1 + subkey2, rank2 = entry2 + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + cov1 = subkey1.coverage() + inten1 = subkey1.intensity() + cov2 = subkey2.coverage() + inten2 = subkey2.intensity() + + combinedKey = None + combinedRank = max(rank1, rank2) + if subkey1 == subkey2: + combinedKey = subkey1 + elif wxType1 == wxType2: + # In this case, we must make an aggregate + combinedKey = self.makeAggregateSubkey(subkey1, rank1, subkey2, rank2) + else: + # Try to combine using configurable wxCombinations + # May need to pick lowVis, preserve attrs here too + combinations = self.call(self.wxCombinations, tree, node) + for combination in combinations: + match, combinedKey = self.matchWxCombination( + combination, subkey1, subkey2) + if match: + break + return combinedKey, combinedRank + + def matchWxCombination(self, combination, subkey1, subkey2): + # Given a combination i.e. pair (wx1, wx2) or method, + # determine if subkey1 and subkey2 should be combined. + # Return flag (match) and combinedKey + keyList = [(subkey1, subkey2), (subkey2, subkey1)] + match = 0 + combinedKey = None + for key1, key2 in keyList: + if type(combination) is types.MethodType: + match, combinedKey = combination(key1, key2) + if match: + break + else: + wx1, wx2 = combination + if wx1 == key1.wxType() and wx2 == key2.wxType(): + order = self.dominantCoverageOrder(key1, key2) + if order == -1 or order == 0: + combinedKey = key1 + else: + combinedKey = key2 + match = 1 + break + return match, combinedKey + + def dominantCoverageOrder(self, val1, val2): + # Order by dominant coverage -- lower indices are dominant + # If val1 coverage is dominant over val2 coverage, + # return -1, if equal return 0, else return 1 + val1 = val1.coverage() + val2 = val2.coverage() + list = self.wxHierarchies()["coverage"] + try: + index1 = list.index(val1) + except: + index1 = len(list)-1 + try: + index2 = list.index(val2) + except: + index2 = len(list)-1 + if index1 < index2: + return -1 + if index1 == index2: + return 0 + if index1 > index2: + return 1 + + def dominantTypeOrder(self, val1, val2): + # If val1 wxType is dominant over val2 wxType + # Lower indices are dominant + # return -1, if equal return 0, else return 1 + val1 = val1.wxType() + val2 = val2.wxType() + list = self.wxHierarchies()["wxType"] + try: + index1 = list.index(val1) + except: + index1 = len(list)-1 + try: + index2 = list.index(val2) + except: + index2 = len(list)-1 + if index1 < index2: + return -1 + if index1 == index2: + return 0 + if index1 > index2: + return 1 + + def getDominant(self, hierarchy, val1, val2): + # Return the value that appears first in the given hierarchy + list = self.wxHierarchies()[hierarchy] + + index1 = list.index(val1) + index2 = list.index(val2) + if index1 < index2: + return val1 + else: + return val2 + + + def getCoverage(self, tree, node, subkey, covDescs, wxDef, attrList): + if subkey is None: + return None + cov = subkey.coverage() + if cov == "": + return "" + cov = wxDef.coverageDesc(subkey.wxType(), cov).lower() + for des_cov, des_type, des_inten, des_attr, desc in covDescs: + if self.matchSubkey(subkey, des_cov, des_type, des_inten, des_attr, attrList) == 1: + cov = self.getWxDesc(tree, node, subkey, desc) + return cov + + def matchSubkey(self, subkey, cov, wxType, inten, attr, attrList): + if cov != "*": + if subkey.coverage() != cov: + return 0 + if wxType != "*": + if subkey.wxType() != wxType: + return 0 + if inten != "*": + if subkey.intensity() != inten: + return 0 + if attr != "*": + if attr not in attrList: + return 0 + return 1 + + def getWxDesc(self, tree, node, subkey, desc): + if type(desc) is types.MethodType: + return desc(tree, node, subkey) + else: + return desc + + # VISIBILITY + def visibility_phrase(self): + return { + "setUpMethod": self.visibility_setUp, + "wordMethod": self.visibility_words, + "phraseMethods": [ + #self.preProcessWx, + self.combinePhraseStats, + self.combineWords, + self.fillNulls, + self.timeDescriptorModeration, + self.assembleSubPhrases, + self.postProcessPhrase, + ], + } + def visibility_setUp(self, tree, node): + elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.visConnector) + node.set("combineVisibility", 1) + descriptor = self.phrase_descriptor(tree, node, "Visibility", "Visibility") + node.set("descriptor", descriptor) + return self.DONE() + + def visibility_words(self, tree, node): + # Return a phrase for the given subPhrase + + # Create a phrase to describe a list of weather subkeys for one sub-period + statDict = node.getStatDict() + rankList = self.getStats(statDict, "Wx") + if rankList is None or len(rankList) == 0: + return self.setWords(node, "") + # Filter rankList so we don't report visibility for weather subkeys + # not reported in the text (e.g. SChc, Iso) + rankList = self.checkPoP(tree, node, rankList) + subkeyList = self.getSubkeys(rankList) + + lowVisNM = self.getVis(subkeyList) + if lowVisNM is None: + return self.setWords(node, "null") + # If less than null_nlValue (in nautical miles) return "null" + nullVisNM = self.null_nlValue(tree, node, "Visibility", "Visibility") + lowVisNM = self.roundStatistic(tree, node, lowVisNM, "Visibility") + if lowVisNM >= self.nlValue(nullVisNM, lowVisNM): + return self.setWords(node, "null") + words = self.nlValue(self.visibility_phrase_nlValue(tree, node), lowVisNM) + # See if the Wx type is significant + significantKeys = self.significant_wx_visibility_subkeys(tree, node) + sigFlag = self.findSubkeys(subkeyList, significantKeys) + # If there are no Wx obstructions for which we should report vsby + if sigFlag == 0: + words = "" + return self.setWords(node, words) + + def visibility_phrase_nlValue(self, tree, node): + # Visibility descriptions for visibility_phrase. + # "Visibility less than 1 nautical mile then 2 NM in the afternoon." + # The numerical ranges are in nautical miles. + outUnits = self.element_outUnits(tree, node, "Visibility", "Visibility") + if outUnits == "NM": + return { + (0, 1): "1 NM or less", + (1.1, 2): "2 NM", + (2.1, 3): "3 NM", + (3.1, 4): "4 NM", + (4,1, 5): "5 NM", + (5.1, 6): "6 NM", + "default": "null", + } + else: + return { + (0, .3): "one quarter mile or less at times", + "default": "null", + } + + # Handling visibility within the weather phrase + def embedded_visibility_flag(self, tree, node): + # If 1, report visibility embedded with the + # weather phrase. Set this to 0 if you are using the + # visibility_phrase. + return 0 + + def visibility_wx_threshold(self, tree, node): + # Weather will be reported if the visibility is below + # this threshold (in NM) OR if it includes a + # significant_wx_visibility_subkey (see below) + return None + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* F "), ("* ZF "), ("* IF "), ("* H"), ("* K"), ("* BS"), ("* BD"), ("* VA")] + + def visibility_weather_phrase_nlValue(self, tree, node): + # Visibility descriptions within the weather_phrase + # "Rain showers and fog with visibility less than 1 nautical mile in the morning." + # The numerical ranges are in nautical miles. + outUnits = self.element_outUnits(tree, node, "Visibility", "Visibility") + if outUnits == "NM": + return { + (0, 1): "visibility 1 NM or less", + (1, 2.1): "2 NM visibility", + (2.1, 3.1): "3 NM visibility", + (3.1, 4.1): "4 NM visibility", + (4.1, 5.1): "5 NM visibility", + (5.1, 6.1): "6 NM visibility", + "default": "null", + } + else: + return { + (0, .3): "visibility one quarter mile or less at times", + "default": "null", + } + + + def matchToWx(self, tree, node, element, timeRange=None, areaLabel=None, + algorithm=None, increment=None): + if timeRange is None: + timeRange = node.getTimeRange() + if areaLabel is None: + areaLabel = node.getAreaLabel() + algorithm, increment, noPrecipValue, percentThreshold, wxTypes = \ + self.getMatchToWxInfo(tree, node, element, algorithm, increment) + #print "\nin matchToWx", element, timeRange, areaLabel + #print " ", algorithm, increment, noPrecipValue, wxTypes + #print " node", node.getAncestor("name") + #import traceback + #traceback.print_stack(limit=6) + + # Gather all data that might be necessary + analysisMethodVal = tree.stats.get( + element, timeRange, areaLabel, mergeMethod="Max") + elementBins = tree.stats.get( + element, timeRange, areaLabel, statLabel="binnedPercent", + mergeMethod="MergeBins") + + # Compute result + result = None + if algorithm == "AnalysisMethod": + #print " Returning AnalysisMethod", elementVal + result = analysisMethodVal + else: + # Determine "highKey" -- key with the highest coverage + highKey = self.getHighSubkey(tree, node, timeRange, areaLabel, wxTypes) + #print "highKey", highKey + # Handle case of no precipitating weather + if highKey is None: + result = self.getNoPrecipValue( + noPrecipValue, elementBins, analysisMethodVal) + else: + # Get the Element range of values corresponding to the + # high key coverage. + coverage = highKey.coverage() + exec("elementRange = self.coverage"+element+"_value(coverage)") + if type(elementRange) is types.MethodType: + covLowVal, covHighVal = elementRange(tree, node, highKey) + else: + covLowVal, covHighVal = elementRange + + if algorithm == "Max" or algorithm == "Mode": + # Merge the binned values over space and time + if elementBins is None: + return None + # Return the result + result = self.getBinnedResult(tree, node, elementBins, covLowVal, + covHighVal, increment, algorithm, + percentThreshold) + elif algorithm == "MaxMode": + # Get the list of bins for each grid in the time range + elementBinsList = tree.stats.get( + element, timeRange, areaLabel, statLabel="binnedPercent", + mergeMethod="List") + if elementBinsList is None or elementBinsList == []: + result = None + else: + result = self.getMaxModeResult( + tree, node, elementBinsList, covLowVal, covHighVal, + increment, algorithm, percentThreshold) + else: + log.warning( + "WARNING -- Invalid matToWxInfo algorithm for " + element + \ + " Must be 'Max', 'Mode', 'MaxMode', or 'AnalysisMethod'") + return result + + def getMatchToWxInfo(self, tree, node, element, algorithm, increment): + matchingInfo = self.matchToWxInfo(tree, node, element, element) + if matchingInfo == "": + log.warning( + "WARNING -- Cannot MatchToWx: Add " + element + \ + " to matchToWxInfo_dict!!") + increment = 0 + algorithm = "Max" + noPrecipValue = None + percentThreshold = 0 + wxTypes = None + else: + # Get increment and algorithm if not set in arguments + # Get other matchingInfo + inc = matchingInfo[0] + alg = matchingInfo[1] + noPrecipValue = matchingInfo[2] + try: + percentThreshold = matchingInfo[3] + except: + percentThreshold = 0 + try: + wxTypes = matchingInfo[4] + except: + wxTypes = None + if algorithm is None: + algorithm = alg + if increment is None: + increment = inc + return algorithm, increment, noPrecipValue, percentThreshold, wxTypes + + def getNoPrecipValue(self, noPrecipValue, elementBins, analysisMethodVal): + if noPrecipValue is None: + result = None + elif noPrecipValue == "Max": + # Return the maximum value with > 0% areal coverage + if elementBins is None: + result = None + else: + elementVal = None + for lowVal, highVal, percent in elementBins: + if percent > 0: + elementVal = lowVal + increment + result = elementVal + elif noPrecipValue == "AnalysisMethod": + result = analysisMethodVal + else: + result = noPrecipValue + return result + + def getBinnedResult(self, tree, node, elementBins, binLowVal, + binHighVal, increment, algorithm, percentThreshold): + # If algorithm == "Max": + # Return the MAXIMUM element value that falls within the element range + # AND has a greater than zero percentage. + # Else: (Algorithm == "Mode") + # Return the MOST FREQUENT element value that falls within the element range + # AND has a greater than zero percentage. + # We also calculate "resultValue" in case no element value falls within + # the range. + # If the algorithm is "Max", resultValue will be the Maximum element value + # overall, otherwise, it will be the Most Frequent. + elementVal = None + resultValue = None + # The Most Frequent value overall + maxPercent = 0 + # The Most Frequent value within the lowVal/highVal range + maxInRangePercent = 0 + #print "\nLooking for range", binLowVal, binHighVal + for lowVal, highVal, percent in elementBins: + #print "low, high, percent", lowVal, highVal, percent + #print " maxPercent, maxInRangePercent", maxPercent, maxInRangePercent + #print " elementVal, resultValue", elementVal, resultValue + if percent > percentThreshold: + # The element bins could be, for example, 55-65, + # so we add the increment + curValue = lowVal + increment + if algorithm == "Max": + # Bins are in ascending order so the + # (Maximum) will just be the last one + resultValue = curValue + if curValue >= binLowVal and curValue <= binHighVal: + elementVal = curValue + else: # "Mode" + if percent > maxPercent: + resultValue = curValue + maxPercent = percent + if curValue >= binLowVal and curValue <= binHighVal: + if percent > maxInRangePercent: + elementVal = curValue + maxInRangePercent = percent + #print "algorithm, elementVal, resultVal", algorithm, elementVal, resultValue + # If no element meets this criteria + # If the resultValue value is greater than the binHighVal, use binHighVal + # Otherwise, use the resultValue value + if elementVal is None: + if resultValue > binHighVal: + elementVal = binHighVal + else: + elementVal = resultValue + return elementVal + + def getMaxModeResult(self, tree, node, elementBinsList, covLowVal, + covHighVal, increment, algorithm, percentThreshold): + # For each grid, find the Mode i.e. highest percentage value + # that falls within the coverage range + valueList = [] + for elementBins in elementBinsList: + elementBins, timeRange = elementBins + elementVal = self.getBinnedResult( + tree, node, elementBins, covLowVal, covHighVal, + increment, "Mode", percentThreshold) + valueList.append(elementVal) + # Choose the maximum of these values that fall within the + # range for high key coverage. + #print "valueList", valueList + maxVal = valueList[0] # At least we'll have something ?? + for value in valueList: + if value >= covLowVal and value <=covHighVal: + if value > maxVal: + maxVal = value + return maxVal + + def getHighSubkey(self, tree, node, timeRange, areaLabel, wxTypes=None): + # Find the highest precip subkey in the ranked list + # If wxType is not None, consider only keys of that wxType + highKey = None + wxStats = tree.stats.get("Wx", timeRange, areaLabel) + #print "\nWx in getHighSubkey", wxStats + if wxStats is not None: + highKey = None + highCov = None + for subkeys, tr in wxStats: + #print "subkeys, rank", subkeys, tr + if subkeys is None: + continue + for subkey, rank in subkeys: + #print "subkey", subkey + if wxTypes is not None: + if subkey.wxType() not in wxTypes: + continue + else: + # Check for PoP-related + if self.pop_related_flag(tree, node, subkey): + # Do not consider trace events for matching + if subkey.wxType() in ['L', 'ZL'] or \ + (subkey.wxType() in ['RW', 'SW'] and + subkey.intensity() == '--'): + # Move on to next subkey + continue + else: + # If non-precipitating, skip + continue + if highKey is None or \ + self.dominantCoverageOrder( + subkey, highKey) == -1: + highKey = subkey + return highKey diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/AppDialog.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/AppDialog.py index d960e2253a..c009e65c14 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/AppDialog.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/AppDialog.py @@ -1,89 +1,89 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import Tkinter -# -## -# We have a system in which "short" Python scripts are invoked from a GUI -# written in another language. Some of the scripts need to pop up dialogs -# to obtain user input. tkSimpleDialog.Dialog does not work well as a base -# class for these dialogs because it assumes that a Tk root window is present, -# which is typically not the case for these embedded scripts. -# -class AppDialog(Tkinter.Tk): - "A top-level dialog with its own Tcl interpreter." - - def __init__(self, **kw): - "Constructor." - Tkinter.Tk.__init__(self, **kw) - body = self.body(self) - body.focus_set() - self.buttonbox() - self.protocol("WM_DELETE_WINDOW", self.cancel) - - def apply(self): - """Process the data. - -This method is called automatically to process the data, *after* -the dialog is destroyed. By default, it does nothing.""" - pass - - def body(self, master): - """Create dialog body. - -Return the widget that should have initial focus. -This method should be overridden, and is called -by the __init__ method.""" - frame = Tkinter.Frame(master) - label = Tkinter.Label(frame, text="Body") - label.pack() - frame.pack(side=Tkinter.TOP) - return frame - - def buttonbox(self): - box = Tkinter.Frame(self) - okButton = Tkinter.Button(box, text="Ok", command=self.ok) - okButton.pack(side=Tkinter.LEFT, pady=5, padx=10) - cancelButton = Tkinter.Button(box, text="Cancel", command=self.cancel) - cancelButton.pack(side=Tkinter.LEFT, pady=5, padx=10) - box.pack(side=Tkinter.BOTTOM) - - def cancel(self, event=None): - """Process the Cancel button. -This method is also invoked when the dialog is closed by the control bar.""" - self.destroy() - - def ok(self, event=None): - """Process the Ok button.""" - if self.validate(): - self.destroy() - self.apply() - - def validate(self): - """Validate the data. -This method is called automatically to process the data before -the dialog is destroyed. By default, it always returns True. -Override to perform validation.""" - return True +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import tkinter +# +## +# We have a system in which "short" Python scripts are invoked from a GUI +# written in another language. Some of the scripts need to pop up dialogs +# to obtain user input. tkSimpleDialog.Dialog does not work well as a base +# class for these dialogs because it assumes that a Tk root window is present, +# which is typically not the case for these embedded scripts. +# +class AppDialog(tkinter.Tk): + "A top-level dialog with its own Tcl interpreter." + + def __init__(self, **kw): + "Constructor." + tkinter.Tk.__init__(self, **kw) + body = self.body(self) + body.focus_set() + self.buttonbox() + self.protocol("WM_DELETE_WINDOW", self.cancel) + + def apply(self): + """Process the data. + +This method is called automatically to process the data, *after* +the dialog is destroyed. By default, it does nothing.""" + pass + + def body(self, master): + """Create dialog body. + +Return the widget that should have initial focus. +This method should be overridden, and is called +by the __init__ method.""" + frame = tkinter.Frame(master) + label = tkinter.Label(frame, text="Body") + label.pack() + frame.pack(side=tkinter.TOP) + return frame + + def buttonbox(self): + box = tkinter.Frame(self) + okButton = tkinter.Button(box, text="Ok", command=self.ok) + okButton.pack(side=tkinter.LEFT, pady=5, padx=10) + cancelButton = tkinter.Button(box, text="Cancel", command=self.cancel) + cancelButton.pack(side=tkinter.LEFT, pady=5, padx=10) + box.pack(side=tkinter.BOTTOM) + + def cancel(self, event=None): + """Process the Cancel button. +This method is also invoked when the dialog is closed by the control bar.""" + self.destroy() + + def ok(self, event=None): + """Process the Ok button.""" + if self.validate(): + self.destroy() + self.apply() + + def validate(self): + """Validate the data. +This method is called automatically to process the data before +the dialog is destroyed. By default, it always returns True. +Override to perform validation.""" + return True diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/BOIVerifyUtility.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/BOIVerifyUtility.py index a4a8b5bae8..8a4bb9af83 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/BOIVerifyUtility.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/BOIVerifyUtility.py @@ -1,5099 +1,5099 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# BOIVerifyUtility - version 2.0.5 -# -# Main utility supporting maintenance of verification databases of both -# grids and stats. -# -# Author: Tim Barker - SOO BOI -# 2005/07/20 - Original Implementation -# 2005/07/29 - version 0.1 - update to grid database structure -# to add time that grid was stored -# 2006/11/06 - version 1.0 - First version with time-series graphs. Still -# lots of bugs and not what I would like for a version 1.0 but -# I've put it off way too long anyway. -# 2006/12/04 - fix bug with getStatList getting slightly wrong time periods -# 2007/02/05 - version X.X -# fixed hardcoded references to "Obs" in getObsPeriod and -# getObsList -# 2007/05/01 - version 1.1 - emergency fix to getPairList for speed -# and memory usage when large number of grids are stored -# 2007/10/23 - version 2.0 - gobs of new stuff for handling common cases -# probability parms, rate parms, new scores, etc. -# 2007/11/30 - version 2.0.1 - fix accumulations over periods more than -# twice as long as models grids (grids totally inside the -# time period were getting counted multiple times) -# 2007/12/17 - version 2.0.2 - fix list of verifying accumulative -# periods when specifying list of verifying dates (it was -# omitting the last period that fit within the time period). -# 2008/05/28 - version 2.0.5 - Added support for new statistics: -# frequency observed and frequency forecast. -# -# -# 2010/04/23 ryu Initial port to AWIPS II. -# -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 03/24/2016 18773 ryu Fix IOError from use of NetCDFVariables -# in numpy operations -# 05/25/2016 18743 arickert Original forecaster who edited grids was -# was included with more recent editor -# 06/10/2016 18169 ryu Fix typo in getStatModelCases method -#===================================================================== - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import TimeRange, AbsTime - -from numpy import * -from types import * -import os,string,re,sys -import time,calendar,math,types -try: - from Scientific.IO import NetCDF -except: - import NetCDF - -import SmartScript -# -# Import the Configuration Information -# -import BOIVerifyConfig -#try: -# from BOIVerifyConfig import * -#except: -# print "Could not import BOIVerifyConfig" -#print "VERDIR=",VERDIR -#print "EDITAREAS=",EDITAREAS -# -# Contants that should not be changed by the users! -# -VERSION="2.0.2" -# -# STATAREAS = max number of areas to keep stats for -# STATTYPES = max number of stats to keep -# Changing these is NOT recommended - it changes the size of the -# dimensions in the stat database. If you already have an existing -# stat database, all old information will be lost. Eventually we'll -# have a program that will convert a stat database for more areas or -# more stats -# -STATAREAS=150 # max number of areas to keep stats for -STATTYPES=15 # max number of stats to keep -# -# Do not change - will corrupt the database. Will eventually have -# a program that allows the number of forecaster numbers saved for -# a grid or stat to be changed. -# -MAXFCSTRS=5 -# -# Number of thresholds saved for each parm -# -NUMTHRESH=5 -# -# Forecaster number file -# -FCSTRNUMFILE="FcstrNums.dat" # maintained by software - do not change -HOURSECS=60*60 -DAYSECS=24*HOURSECS -SkipParms=[] # parms that we do not verify yet - -class BOIVerifyUtility(SmartScript.SmartScript): - def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): - SmartScript.SmartScript.__init__(self, dbss) - - # - # Read in Config info. - # Each entry in BOIVerifyConfig is put into a self. variable - # for fast access within this utility - # And put into a self.CFG dictionary for easy access outside - # this utility (through the getCFG method) - # (only does variables in BOIVerifyConfig that do NOT start - # with an underscore _ ) - # - names=dir(BOIVerifyConfig) - - self.CFG={} - #names=["VERDIR","EDITAREAS","PERCENT_COLOR","GRIDDAYS","STATDAYS", - # "OBSMODELS"] - for name in names: - if name[0:1]!="_": - execstr="self.CFG['"+name+"']=BOIVerifyConfig."+name - #execstr="self.CFG['"+name+"']="+name - exec execstr - execstr="self."+name+"=BOIVerifyConfig."+name - #execstr="self."+name+"="+name - exec execstr - # - # Setup the REALLY constant variables - # - self.STATAREAS=STATAREAS - self.CFG['STATAREAS']=STATAREAS - self.STATTYPES=STATTYPES - self.CFG['STATTYPES']=STATTYPES - self.MAXFCSTRS=MAXFCSTRS - self.CFG['MAXFCSTRS']=MAXFCSTRS - self.NUMTHRESH=NUMTHRESH - self.CFG['NUMTHRESH']=NUMTHRESH - self.FCSTRNUMFILE=FCSTRNUMFILE - self.CFG['FCSTRNUMFILE']=FCSTRNUMFILE - # - # Setup DEBUG level - # - self.DEBUG=0 - # - # Setup Observed file info - # - self.oncParm="" - self.oncModel="" - self.oncModify=0 - self.onumRecs=0 - # - # Setup forecast grid file info - # - self.fncParm="" - self.fncModel="" - self.fncModify=0 - self.fnumRecs=0 - # - # Setup forecast stat file info - # - self.sncParm="" - self.sncModel="" - self.sncObsModel="" - self.sncModify=0 - self.sncNumRecs=0 - # - # stup forecaster numbers, edit areas, and check config - # - self.setupFcstrNums() - self.setupEditAreas() - self.checkConfig() - # - # setup all the stat names. statIDs are the 'short' and - # 'correct' name to use for a stat. Various optional - # names, shortened spellings, etc. are stored in statNames - # dictionary for each ID. allStats contains all the possible - # names for all stats in the system. - # - self.statNames={"bias":["bias","error","err"], - "mae" :["mae","absolute error","abs error","abs err", - "mean abs error","mean abs err", - "mean absolute error","mean absolute err"], - "rms" :["rms","root mean squared error", - "root mean squared err","rms error","rms err"], - "mse" :["mse","mean squared error","mean squared err", - "brier score","brier"], - "peb" :["peb","percent error below","percent err below", - "% error below","% err below","percent error <", - "percent err <","% error <","% err <"], - "fc" :["fc","fraction correct"], - "afc" :["afc","areal fraction correct"], - "freqo":["freqo","frequency observed"], - "freqf":["freqf","frequency forecast"], - "freqbias":["freqbias","frequency bias"], - "afreqbias":["afreqbias","areal frequency bias"], - "pod" :["pod","probability of detection","prob detection"], - "apod":["apod","areal probability of detection", - "areal prob detection","areal pod"], - "far" :["far","false alarm ratio"], - "afar":["afar","areal false alarm ratio"], - "pofd":["pofd","probability of false detection", - "prob of false detection"], - "apofd":["apofd","areal probability of false detection", - "areal prob of false detection"], - "ts" :["ts","threat score","csi", - "critical success index"], - "ats" :["ats","areal threat score","acsi", - "areal critical success index"], - "ets" :["ets","equitable threat score","gilbert"], - "aets":["aets","areal equitable threat score", - "agilbert","areal gilbert"], - "hk" :["hk","hanssen and kuipers discriminant", - "peirce","peirces skill score", - "tss","true skill score"], - "ahk" :["ahk","areal hanssen and kuipers discriminant", - "apeirce","areal peirces skill score", - "atss","areal true skill score"], - "hss" :["hss","heidke skill score"], - "ahss":["ahss","areal heidke skill score"], - "oddsratio":["oddsratio","odds ratio"], - "aoddsratio":["aoddsratio","areal odds ratio"], - "hits":["hits",], - "ahits":["ahits","areal hits"], - "miss":["miss","misses"], - "amiss":["amiss","areal misses"], - "fals":["fals","false alarms"], - "afals":["afals","areal false alarms"], - "corn":["corn","correct negatives"], - "acorn":["acorn","areal correct negatives"], - "cont":["cont","contingency table"], - "acont":["acont","areal contingency table"], - } - self.statIDs=self.statNames.keys() - self.allStats=[] - for statName in self.statIDs: - names=self.statNames[statName] - for name in names: - self.allStats.append(name) - return - - #================================================================= - # checkConfig - cross check configuration data, and make log - # messages about problems. - # - # Return 0 if config is OK, return 1 if there - # are any problems - # - def checkConfig(self): - badConfig=0 - mutid=self.mutableID() - # - # Make sure parms are well defined - # - parmNames=self.VERCONFIG.keys() - for parmName in parmNames: - parmInfo=self.getParm(mutid,parmName,"SFC") - if parmInfo is None: - self.logMsg("Could not check VER config for %s"%parmName) - continue - config=self.VERCONFIG[parmName] - # - # check for horribly bad config lines - # - if type(config)!=TupleType: - self.logMsg("BOIVerify VERCONFIG of %s is not a tuple - it should be"%parm) - badConfig=1 - continue - if len(config)!=8: - self.logMsg("BOIVerify VERCONFIG of %s does not have 8 elements"%parmName) - badConfig=1 - continue - # - # check parm type - # - if type(config[0])!=IntType: - self.logMsg("BOIVerify VERCONFIG of %s has bad type:%s"%(parm,config[0])) - badConfig=1 - else: - parmType=config[0] - if ((parmType<0)or(parmType>1)): - self.logMsg("BOIVerify VERCONFIG of %s has bad type:%d"%(parm,parmType)) - badConfig=1 - # - # check ver type - # - if type(config[1])!=IntType: - self.logMsg("BOIVerify VERCONFIG of %s has bad verification type:%s"%(parm,config[1])) - badConfig=1 - else: - verType=config[1] - if ((verType<0)or(verType>1)): - self.logMsg("BOIVerify VERCONFIG of %s has bad verification type:%d"%(parm,verType)) - badConfig=1 - # - # check parm save interval - # - if type(config[2])!=IntType: - self.logMsg("BOIVerify VERCONFIG of %s has bad save interval:%s"%(parm,config[2])) - badConfig=1 - else: - saveInt=config[2] - if ((saveInt<0)or(saveInt>24)): - self.logMsg("BOIVerify VERCONFIG of %s has bad save interval:%d"%(parm,saveInt)) - badConfig=1 - # - # Checks are different for Scalar/Probability vs. Vectors - # - ##if parmInfo.dataType()!=1: - wxType = parmInfo.getGridInfo().getGridType().ordinal() - 1 - if wxType!=1: - # - # check for NUMTHRESH thresholds - # - if type(config[3])!=TupleType: - self.logMsg("BOIVerify VERCONFIG of %s has bad thresholds:%s"%(parm,config[3])) - badConfig=1 - else: - thresholds=config[3] - numthresh=len(thresholds) - if (numthresh!=self.NUMTHRESH): - self.logMsg("BOIVerify VERCONFIG of %s does not have %d thresholds: has %d instead"%(parm,self.NUMTHRESH,numthresh)) - badConfig=1 - # - # check for binwidth - # - return badConfig - #================================================================= - # CONVENIENCE FUNCTION FOR GETTING VER CONFIGURATION - # - def getCFG(self,name): - if name in self.CFG: - return self.CFG[name] - else: - return None - #================================================================= - # CONVENIENCE FUNCTIONS FOR GETTING VER PARM CONFIGURATION - # - # getVerParms - get list of configured verification parameters - # - def getVerParms(self): - VerParms=self.VERCONFIG.keys() - VerParms.sort() - return VerParms - #================================================================= - # getVerParmsVect - gets list of configured verification - # parameters, with any vector parms also - # listed with "Spd" appended, and "Dir" - # appended - # - def getVerParmsVect(self): - parmList=self.getVerParms() - VerParms=[] - for parm in parmList: - if self.getVerParmType(parm)==1: - VerParms.append(parm) - VerParms.append(parm+"Spd") - VerParms.append(parm+"Dir") - else: - VerParms.append(parm) - VerParms.sort() - return VerParms - #================================================================= - # getVerParmsVal - gets list of configured verification parms - # that are scalars (removes vectors from list) - # - def getVerParmsVal(self): - parmList=self.getVerParms() - VerParms=[] - for parm in parmList: - if self.getVerType(parm)==0: - VerParms.append(parm) - VerParms.sort() - return VerParms - #================================================================= - # getVerParmsObs - gets list of configured verification parms, - # and adds in any observed parm namesget list of - # observed parms - # - def getVerParmsObs(self): - VerParms=[] - fparms=self.VERCONFIG.keys() - fparms.sort() - for key in fparms: - if self.VERCONFIG[key][1]==1: - (obsparm,ttype,tval)=self.VERCONFIG[key][7] - else: - obsparm=self.VERCONFIG[key][7] - if obsparm not in VerParms: - VerParms.append(obsparm) - VerParms.sort() - return VerParms - #================================================================= - # getObsParm - for a particular parm, get its corresponding - # obsparm name. - # - def getObsParm(self,fcstParm): - obsparm="" - if fcstParm in self.VERCONFIG.keys(): - if self.VERCONFIG[fcstParm][1]==1: - (obsparm,ttype,tval)=self.VERCONFIG[fcstParm][7] - else: - obsparm=self.VERCONFIG[fcstParm][7] - return obsparm - #================================================================= - # getObsCondition - for a probability parm, get the condition - # ">", ">=", "<", "<=" etc. for the observed - # parm. For example, in the US, PoP is verified - # with QPE >= 0.01, so the condition is ">=" in - # this case. If the parm is not a probability parm - # it just returns an empty string. - # - def getObsCondition(self,fcstParm): - obsCondition="" - if fcstParm in self.VERCONFIG.keys(): - if self.VERCONFIG[fcstParm][1]==1: - (obsparm,obsCondition,obsThreshold)=self.VERCONFIG[fcstParm][7] - return obsCondition - #================================================================= - # getObsThreshold - for a probability parm, get the threshold for - # the observed parm. For example, in the US, - # PoP is verified with QPE >= 0.01. The threshold - # is 0.01 in this case. If the specified parm is - # not a probability parm, returns zero for the - # threshold. - # - def getObsThreshold(self,fcstParm): - obsThreshold=0 - if fcstParm in self.VERCONFIG.keys(): - if self.VERCONFIG[fcstParm][1]==1: - (obsparm,obsCondition,obsThreshold)=self.VERCONFIG[fcstParm][7] - return obsThreshold - #================================================================= - # getVerConfig - get the specified element of the config stuff - # for a parm, or return None if that parm is not - # configured - # - def getVerConfig(self,parmName,element): - if parmName not in self.VERCONFIG.keys(): - return None - config=self.VERCONFIG[parmName] - return config[element] - #================================================================= - # getVerParmType - get the type of parm, either 0 for SCALAR or - # 1 for VECTOR. If the parm is not configured it - # also checks to see of the observed parm of any - # configured parm matches - and returns whether - # that parm is a SCALAR or VECTOR - # - def getVerParmType(self,parmName): - parmType=self.getVerConfig(parmName,0) - if parmType is None: - parms=self.getVerParms() - for parm in parms: - oparm=self.getObsParm(parm) - if oparm==parmName: - parmType=self.getVerConfig(parm,0) - break - return parmType - #================================================================= - # getVerType - get the type of verification needed for this parm. - # Normally 0, meaning just typical value verification. - # A value of 1 means 'floating probability' type - # verification. A value of 2 means 'probability' type - # verification. - # - def getVerType(self,parmName): - return self.getVerConfig(parmName,1) - #================================================================= - # getVerSaveInterval - used for the saving of parms. If set to 0 - # then save all grids for this parm. If set - # to 3, then save 'snapshots' every 3 hours - # through the day. If set to 6, then save - # 'snapshots' every 6 hours through the day, - # etc. - # - def getVerSaveInterval(self,parmName): - return self.getVerConfig(parmName,2) - #================================================================= - # getVerThresholds - used in BOIVerify autocalc. calculates some - # stats for absolute value of errors less than - # these thresholds. For SCALAR parms, should - # be a tuple with 5 threshold values. For - # VECTOR parms, should be two tuples, each - # with 5 values. - # - def getVerThresholds(self,parmName): - return self.getVerConfig(parmName,3) - #================================================================= - # getVerBinWidth - used in BOIVerify histogram displays. Used - # for the width of bins in the histogram. For - # a SCALAR should be just one value. For a VECTOR - # will be a tuple with the magnitude binWidth and - # the direction binWidth - # - def getVerBinWidth(self,parmName): - return self.getVerConfig(parmName,4) - #================================================================= - # getVerBigErr - used in BOIVerify error displays. Errors bigger - # than this are sometimes not displayed. SCALARS - # will have one value. VECTORS will have a tuple - # with magnitude bigErr and direction bigErr. - # - def getVerBigErr(self,parmName): - return self.getVerConfig(parmName,5) - #================================================================= - # getVerErrColor - used in BOIVerify error displays. Color table - # name used for errors. This is in here so that - # dewpoint errors can have a different color - # table than, say, temperature errors. For a - # VECTOR parm, this returns a tuple of two color - # table names - # - def getVerErrColor(self,parmName): - return self.getVerConfig(parmName,6) - #================================================================= - # getVerObsInfo - used in BOIVerify for probability parms. For - # normal value parms, this is just the name of - # the parm that verifies it (so that QPF can be - # verified by QPE, if needed). For probability - # parms this is a 3-value tuple with name, - # condition, threshold) - # - def getVerObsInfo(self,parmName): - return self.getVerConfig(parmName,7) - #================================================================= - # getBaseOffset - looks through baseOffset in configuration to - # see if model is listed. If so, returns the - # offset (in hours). If not, returns zero. - # - def getBaseOffset(self,model): - if model in self.BASE_OFFSET.keys(): - offset=self.BASE_OFFSET[model] - else: - offset=0 - return offset - #================================================================= - # setupFcstrNums - sets up the Fcstrs list with the strings of - # forecaster names for each number by reading - # the FCSTRNUMFILE file from disk. - # FcstrNames is a dictionary with names for - # each 2-digit number string - # FcstrIDs is a dictionary with 8-character IDs - # for each 2-digit number string - # - def setupFcstrNums(self): - self.FcstrNames={} - self.FcstrIDs={} - self.FcstrNames["00"]="Unknown" - self.FcstrIDs["00"]="Unknown" - filename="%s/%s"%(self.VERDIR,self.FCSTRNUMFILE) - try: - fs=file(filename,"r") - lines=fs.readlines() - fs.close() - except: - return 0 - for line in lines: - nocomment=re.sub("#.*","",line) - pieces=nocomment.split(",") - if len(pieces)>1: - numstr=pieces[0].strip() - try: - num=int(numstr) - except: - self.logMsg("Bad forecaster number ignored:") - self.logMsg(" %s"%nocomment) - continue - idstr=pieces[1].strip() - if len(pieces)>2: - namstr=pieces[2].strip() - else: - namstr=idstr - # - # forecaster number 0 is always forced to be Unknown - # - if num==0: - namstr="Unknown" - idstr="Unknown" - # - numstr="%2.2d"%num - self.FcstrNames[numstr]=namstr - self.FcstrIDs[numstr]=idstr - # - # If debug is set...pwrint a list of forecaster number:id,name - # - if self.DEBUG>0: - self.logMsg("setupFcstrNums reading verification forecasters:") - numericList=[] - for (num,name) in self.FcstrNames.items(): - id=self.FcstrIDs[num] - numericList.append("%s:%s,%s"%(num,id,name)) - numericList.sort() - for entry in numericList: - self.logMsg(" %s"%entry) - return 1 - #================================================================= - # getFcstrNames - return dictionary of forecaster names for each - # number (number is a two-digit string). - # - def getFcstrNames(self): - return self.FcstrNames - #================================================================= - # getFcstrName - given a number (integer), find the full forecaster - # name. Returns empty string if number is not used - # - def getFcstrName(self,num): - name="" - numstr="%2.2d"%num - if numstr in self.FcstrNames.keys(): - name=self.FcstrNames[numstr] - return name - #================================================================= - # getFcstrIDs - return dictionary of forecast IDs for each number - # - def getFcstrIDs(self): - return self.FcstrIDs - #================================================================= - # getFcstr ID - given a number (integer), find the forecaster ID. - # Returns empty string if number is not used - # - def getFcstrID(self,num): - ID="" - numstr="%2.2d"%num - if numstr in self.FcstrIDs.keys(): - ID=self.FcstrIDs[numstr] - return ID - #================================================================= - # getFcstrNums - return list of forecaster numbers (as strings) - # - def getFcstrNums(self): - nums=self.FcstrNames.keys() - nums.sort() - return nums - #================================================================= - # setFcstrs - take a name dictionary, and an id dictionary, and - # set the self.FcstrNames and self.FcstrIDs - # dictionaries in the Utility. Used in BOIVerifyInfo - # when these dictionaries are being updated. - # - def setFcstrs(self,nameDict,idDict): - self.FcstrNames={} - for (key,value) in nameDict.items(): - self.FcstrNames[key]=value - self.FcstrIDs={} - for (key,value) in idDict.items(): - self.FcstrIDs[key]=value - #================================================================= - # findFcstrNumFromID - takes a forecaster ID string - and returns - # the associated integer forecast number. - # If the ID is not found - returns 0. - # - def findFcstrNumFromID(self,id): - num=0 - if id in self.FcstrIDs.values(): - for (testnum,testid) in self.FcstrIDs.items(): - if testid==id: - num=int(testnum) - break - return num - #================================================================== - # findFcstrNumFromName - takes a forecaster name string - and returns - # the associated integer forecast number. - # if the ID is not found - returns 0. - # - def findFcstrNumFromName(self,name): - num=0 - if id in self.FcstrNames.values(): - for (testnum,testname) in self.FcstrNames.items(): - if testname==name: - num=int(testnum) - break - return num - #================================================================== - # saveFcstrNum - write the FcstrNames and FcstrIDs info into the - # FCSTRNUMSFILE on disk. This called by BOIVerifyInfo - # as these are being updated and should not be - # called elsewhere. - # - def saveFcstrNums(self): - filename="%s/%s"%(self.VERDIR,self.FCSTRNUMFILE) - try: - fs=file(filename,"w") - fs.write("#\n") - fs.write("# This file maintained by the verification programs\n") - fs.write("# Please DO NOT EDIT\n") - fs.write("#\n") - numkeys=self.FcstrNames.keys() - numkeys.sort() - for numstr in numkeys: - name=self.FcstrNames[numstr] - id=self.FcstrIDs[numstr] - fs.write("%s,%s,%s\n"%(numstr,id,name)) - fs.close() - try: - os.chmod(filename,0666) - except: - self.logMsg("%s should have 666 permissions"%self.FCSTRNUMFILE) - return 0 - except: - self.logMsg("Error writing to %s"%self.FCSTRNUMFILE) - return 1 - return 0 - #================================================================= - # setupEditAreas - read the EDITAREAS file and sets up internal - # EditAreas list with the names of editareas. - # They names are in the appropriate 'slot' in - # the list via number (i.e. if the file does not - # specify an edit area #35, then slot 35 in the - # list is kept blank. EditAreaDescriptions keeps - # the long descriptive names in a similar way - # - def setupEditAreas(self): - self.EditAreas=[] - self.EditAreaDescriptions=[] - filename="%s/%s"%(self.VERDIR,self.EDITAREAS) - try: - fs=file(filename,"r") - lines=fs.readlines() - fs.close() - except: - return 0 - for line in lines: - nocomment=re.sub("#.*","",line) - pieces=nocomment.split(",") - if len(pieces)>2: - numstr=pieces[0].strip() - namstr=pieces[1].strip() - desstr=pieces[2].strip() - try: - num=int(numstr) - except: - continue - if num>self.STATAREAS: - continue - if num<0: - continue - if num>=len(self.EditAreas): - for j in range(len(self.EditAreas),num+1): - self.EditAreas.append("") - self.EditAreaDescriptions.append("") - self.EditAreas[num]=namstr - self.EditAreaDescriptions[num]=desstr - if self.DEBUG>0: - self.logMsg("Setting up verification edit areas:") - for i in range(len(self.EditAreas)): - if self.EditAreas[i]!="": - self.logMsg(" %d:%s"%(i,self.EditAreas[i])) - return 1 - #================================================================= - # listEditAreas - get a list with just the names of edit areas - # (in other words, just like EditAreas, but - # without the 'blank' entries) - # - def listEditAreas(self): - usedEditAreas=[] - for area in self.EditAreas: - if area!="": - usedEditAreas.append(area) - return usedEditAreas - #================================================================= - # listEditAreaDescriptions - get a list with just the descriptions - # of all the editAreas, but without - # the 'blank' entries that are in - # EditAreaDescriptions. - # - def listEditAreaDescriptions(self): - usedEditAreaDescriptions=[] - for area in self.EditAreaDescriptions: - if area!="": - usedEditAreaDescriptions.append(area) - return usedEditAreaDescriptions - #================================================================= - # getEditAreaNumberFromName - given a name, return the number of - # that edit area in the MAXEDITAREAS list. If the name - # does not exist, return 0 - # - def getEditAreaNumberFromName(self,name): - if name in self.EditAreas: - j=self.EditAreas.index(name) - else: - j=0 - return j - #================================================================= - # listModels - List models in the BOIVerify system by looking - # through Grids directories looking for different - # names. Does not include any models that - # are in the OBSMODELS list - # - def listModels(self): - Models=[] - pat=re.compile("(\S+)_\S+_index\.nc") - parmdirs=os.listdir("%s/Grids"%self.VERDIR) - for parmdir in parmdirs: - if parmdir[0]==".": - continue - dirname="%s/Grids/%s"%(self.VERDIR,parmdir) - if os.path.isdir(dirname): - files=os.listdir(dirname) - for file in files: - if file[0]==".": - continue - matchObject=pat.search(file) - if matchObject is None: - continue - model=matchObject.group(1) - if ((model not in Models)and(model not in self.OBSMODELS)): - Models.append(model) - Models.sort() - return Models - #================================================================= - # listParms - DEPRECATED - gets a list of parms in the system, not - # by reading the configuration (which is the right way) - # but by looping through all the directories looking for - # names of files. - # - def listParms(self): - Parms=[] - files=os.listdir("%s/Grids"%self.VERDIR) - for file in files: - if file[0]==".": - continue - fname="%s/Grids/%s"%(self.VERDIR,file) - if os.path.isdir(fname): - parm=file - if ((parm not in Parms)and(parm not in SkipParms)): - Parms.append(parm) - Parms.sort() - return Parms - #================================================================= - # listModelParms - given a model, get the parms that have been - # archived - by looking for data files - # - def listModelParms(self,model): - Parms=[] - files=os.listdir("%s/Grids"%self.VERDIR) - for file in files: - if file[0]==".": - continue - fname="%s/Grids/%s"%(self.VERDIR,file) - if os.path.isdir(fname): - indexExists=os.path.exists("%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,file,model,file)) - dataExists=os.path.exists("%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,file,model,file)) - if (indexExists and dataExists and (file not in Parms) and (file not in SkipParms)): - Parms.append(file) - Parms.sort() - return Parms - #================================================================= - # listStatParms - DEPRECATED - gets a list of stat parms in the - # system not by reading through the configuration - # but by looping through all the directories - # looking for names of files. - # - def listStatParms(self): - Parms=[] - files=os.listdir("%s/Stats"%self.VERDIR) - for file in files: - if file[0]==".": - continue - fname="%s/Stats/%s"%(self.VERDIR,file) - if os.path.isdir(fname): - parm=file - if ((parm not in Parms)and(parm not in SkipParms)): - Parms.append(parm) - Parms.sort() - return Parms - #================================================================= - # closeObsFile - if an Obs file is open, close it and free up - # all the structures associated with it. - # - def closeObsFile(self): - if (not(self.oncParm=="")): - self.oncIndex.close() - self.oncData.close() - del self.oncIndex - del self.oncData - del self.oncFcstr - del self.oncBtime - del self.oncStime - del self.oncEtime - del self.oncVtime - del self.oncScale - del self.oncAddit - del self.oncValue - del self.oncRecs - try: - del self.oncScale1 - del self.oncAddit1 - del self.oncValue1 - except: - pass - self.oncParm="" - self.oncModel="" - self.oncModify=0 - self.oncType=0 - return - #================================================================= - # closeFcstFile - if a Fcst file is open, close it and free up - # all the structures associated with it. - # - def closeFcstFile(self): - if (not(self.fncParm=="")): - self.fncIndex.close() - self.fncData.close() - del self.fncIndex - del self.fncData - del self.fncFcstr - del self.fncBtime - del self.fncStime - del self.fncEtime - del self.fncVtime - del self.fncScale - del self.fncAddit - del self.fncValue - del self.fncRecs - try: - del self.fncScale1 - del self.fncAddit1 - del self.fncValue1 - except: - pass - self.fncParm="" - self.fncModel="" - self.fncModify=0 - self.fncType=0 - return - #================================================================= - # makeGridDir - make a directory for grids for the specified - # parm. - # - def makeGridDir(self,parm,modify): - newDir="%s/Grids/%s"%(self.VERDIR,parm) - already=os.path.exists(newDir) - if ((not already)and(modify!=0)): - os.umask(0002) - os.makedirs(newDir) - return - #================================================================= - # makeStatDir - make a directory for stats for the specified - # parm. - # - def makeStatDir(self,parm,modify): - newDir="%s/Stats/%s"%(self.VERDIR,parm) - already=os.path.exists(newDir) - if ((not already)and(modify!=0)): - os.umask(0002) - os.makedirs(newDir) - return - #================================================================= - # checkFile - given a parm and model, see if it is open, and if - # not - open it. Takes care of figuring out if it - # is an 'observation' model or not. If modify is - # 1, then it opens it for writing - which locks it - # from writing by others. - # - # returns 0 if there is trouble opening file - # - def checkFile(self,parm,model,modify=0,datatype=-1): - if model in self.OBSMODELS: - retVal=self.checkObsFile(parm,model,modify=modify,datatype=datatype) - return retVal - else: - retVal=self.checkFcstFile(parm,model,modify=modify,datatype=datatype) - return retVal - #================================================================= - # checkObsFile - given an parm and obsmodel, see if it is open, - # and if not - open it. If modify is 1, then it - # opens the file for writing - which locks it - # from writing by others. - # - # returns 0 if there is trouble opening file - # - def checkObsFile(self,parm,model,modify=0,datatype=-1): - # - # If everything is the same...return right away - # - if ((parm==self.oncParm)and(model==self.oncModel)and(modify==self.oncModify)): - return 1 - # - # If a file is currently open - close it - # - if (not(self.oncParm=="")): - self.closeObsFile() - # - # Setup the file names and see if they exist - # - self.makeGridDir(parm,modify) - newIndex="%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,parm,model,parm) - newData="%s/Grids/%s/%s_%s_data.nc"%(self.VERDIR,parm,model,parm) - already=os.path.exists(newIndex) - # - # Can't read data from file that does not exist - # - if ((not already) and (modify==0)): - return 0 - # - # Figure out read-mode for file - # - if modify==0: - mode="r" - else: - mode="a" - # - # Figure data type and number of points in grid - # - if datatype<0: - datatype=self.getVerParmType(parm) - if datatype is None: - return 0 - (ypts,xpts)=self.getGridShape() - # - # Open the two obs files: the index and the data - # - self.oncIndex=NetCDF.NetCDFFile(newIndex,mode) - self.oncData=NetCDF.NetCDFFile(newData,mode) - # - # If a new file...create the variables - # - if not already: - self.oncData.createDimension("ypts",ypts) - self.oncData.createDimension("xpts",xpts) - self.oncData.createDimension("record",None) - self.oncIndex.createDimension("record",None) - self.oncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) - self.oncFcstr=self.oncIndex.createVariable('fcstr','b',('record','maxfcstrs')) - self.oncBtime=self.oncIndex.createVariable('btime','i',('record',)) - self.oncStime=self.oncIndex.createVariable('stime','i',('record',)) - self.oncEtime=self.oncIndex.createVariable('etime','i',('record',)) - self.oncVtime=self.oncIndex.createVariable('vtime','i',('record',)) - self.oncScale=self.oncIndex.createVariable('scale','d',('record',)) - self.oncAddit=self.oncIndex.createVariable('addit','d',('record',)) - self.oncValue=self.oncData.createVariable('value','h',('record','ypts','xpts')) - if datatype==1: - self.oncScale1=self.oncIndex.createVariable('scale1','d',('record',)) - self.oncAddit1=self.oncIndex.createVariable('addit1','d',('record',)) - self.oncValue1=self.oncData.createVariable('value1','h',('record','ypts','xpts')) - self.oncIndex.sync() - self.oncData.sync() - os.chmod(newIndex,0775) - os.chmod(newData,0775) - # - # If an old file...hook up variables to the netCDF files - # - else: - ivarnames=self.oncIndex.variables.keys() - dvarnames=self.oncData.variables.keys() - for name in ('fcstr','btime','stime','etime','vtime','scale','addit'): - if name not in ivarnames: - self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) - return 0 - if 'value' not in dvarnames: - self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) - return 0 - self.oncFcstr=self.oncIndex.variables['fcstr'] - if len(self.oncFcstr.shape)!=2: - self.logMsg("Old index file (pre version 1.0) detected for %s %s"%(model,parm)) - return 0 - self.oncBtime=self.oncIndex.variables['btime'] - self.oncStime=self.oncIndex.variables['stime'] - self.oncEtime=self.oncIndex.variables['etime'] - self.oncVtime=self.oncIndex.variables['vtime'] - self.oncScale=self.oncIndex.variables['scale'] - self.oncAddit=self.oncIndex.variables['addit'] - self.oncValue=self.oncData.variables['value'] - if datatype==1: - if (('scale1' not in ivarnames)or('addit1' not in ivarnames)): - self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) - return 0 - if 'value1' not in dvarnames: - self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) - return 0 - self.oncScale1=self.oncIndex.variables['scale1'] - self.oncAddit1=self.oncIndex.variables['addit1'] - self.oncValue1=self.oncData.variables['value1'] - self.oncParm=parm - self.oncModel=model - self.oncModify=modify - self.onumRecs=self.oncStime.shape[0] - self.oncRecs=indices((self.onumRecs,))[0] - self.oncType=datatype - return 1 - #================================================================= - # checkFcstFile - given an parm and obsmodel, see if it is open, - # and if not - open it. If modify is 1, then it - # opens the file for writing - which locks it - # from writing by others. - # - # returns 0 if there is trouble opening file - # - def checkFcstFile(self,parm,model,modify=0,datatype=-1): - # - # If everything is the same...return right away - # - if ((parm==self.fncParm)and(model==self.fncModel)and(modify==self.fncModify)): - return 1 - # - # If a file is currently open - close it - # - if (not(self.fncParm=="")): - self.closeFcstFile() - # - # Setup the file names and see if they exist - # - self.makeGridDir(parm,modify) - newIndex="%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,parm,model,parm) - newData="%s/Grids/%s/%s_%s_data.nc"%(self.VERDIR,parm,model,parm) - already=os.path.exists(newIndex) - # - # Can't read data from file that does not exist - # - if ((not already) and (modify==0)): - return 0 - # - # Figure out read-mode for file - # - if modify==0: - mode="r" - else: - mode="a" - # - # Figure data type and number of points in grid - # - if datatype<0: - datatype=self.getVerParmType(parm) - if datatype is None: - return 0 - (ypts,xpts)=self.getGridShape() - # - # Open the two fcst files: the index and the data - # - self.fncIndex=NetCDF.NetCDFFile(newIndex,mode) - self.fncData=NetCDF.NetCDFFile(newData,mode) - # - # If a new file...create the variables - # - if not already: - self.fncData.createDimension("ypts",ypts) - self.fncData.createDimension("xpts",xpts) - self.fncData.createDimension("record",None) - self.fncIndex.createDimension("record",None) - self.fncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) - self.fncFcstr=self.fncIndex.createVariable('fcstr', 'b', ('record','maxfcstrs')) - self.fncBtime=self.fncIndex.createVariable('btime', 'i', ('record',)) - self.fncStime=self.fncIndex.createVariable('stime', 'i',('record',)) - self.fncEtime=self.fncIndex.createVariable('etime', 'i',('record',)) - self.fncVtime=self.fncIndex.createVariable('vtime', 'i',('record',)) - self.fncScale=self.fncIndex.createVariable('scale','d',('record',)) - self.fncAddit=self.fncIndex.createVariable('addit','d',('record',)) - self.fncValue=self.fncData.createVariable('value','h',('record','ypts','xpts')) - if datatype==1: - self.fncScale1=self.fncIndex.createVariable('scale1','d',('record',)) - self.fncAddit1=self.fncIndex.createVariable('addit1','d',('record',)) - self.fncValue1=self.fncData.createVariable('value1','h',('record','ypts','xpts')) - self.fncIndex.sync() - self.fncData.sync() - os.chmod(newIndex,0775) - os.chmod(newData,0775) - # - # If an old file...hook up variables to the netCDF files - # - else: - ivarnames=self.fncIndex.variables.keys() - dvarnames=self.fncData.variables.keys() - for name in ('fcstr','btime','stime','etime','vtime','scale','addit'): - if name not in ivarnames: - self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) - return 0 - if 'value' not in dvarnames: - self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) - return 0 - self.fncFcstr=self.fncIndex.variables['fcstr'] - if len(self.fncFcstr.shape)!=2: - self.logMsg("Old index file (pre version 1.0) detected for %s %s"%(model,parm)) - return 0 - self.fncBtime=self.fncIndex.variables['btime'] - self.fncStime=self.fncIndex.variables['stime'] - self.fncEtime=self.fncIndex.variables['etime'] - self.fncVtime=self.fncIndex.variables['vtime'] - self.fncScale=self.fncIndex.variables['scale'] - self.fncAddit=self.fncIndex.variables['addit'] - self.fncValue=self.fncData.variables['value'] - if datatype==1: - if (('scale1' not in ivarnames)or('addit1' not in ivarnames)): - self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) - return 0 - if 'value1' not in dvarnames: - self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) - return 0 - self.fncScale1=self.fncIndex.variables['scale1'] - self.fncAddit1=self.fncIndex.variables['addit1'] - self.fncValue1=self.fncData.variables['value1'] - self.fncParm=parm - self.fncModel=model - self.fncModify=modify - self.fnumRecs=self.fncStime.shape[0] - self.fncRecs=indices((self.fnumRecs,))[0] - self.fncType=datatype - return 1 - #================================================================= - # checkStats - given an parm, model, and obsmodel, see if the - # stats file for this is open. If not, close any - # current stat file. If modify is 1, then it - # opens the file for writing - which locks it - # from writing by others. - # - # returns 0 if there is trouble opening file - # - def checkStats(self,parm,model,obsmodel,modify=0): - # - # If everything is the same...return right away - # - if ((parm==self.sncParm)and(model==self.sncModel)and - (obsmodel==self.sncObsModel)and(modify==self.sncModify)): - return 1 - # - # If a file is currently open - close it - # - if (not(self.sncParm=="")): - self.closeStatsFile() - # - # Setup the file names and see if they exist - # - self.makeStatDir(parm,modify) - newData="%s/Stats/%s/%s_%s_%s_data.nc"%(self.VERDIR,parm,model,parm,obsmodel) - newIndex="%s/Stats/%s/%s_%s_%s_index.nc"%(self.VERDIR,parm,model,parm,obsmodel) - already=os.path.exists(newIndex) - # - # Can't read data from file that does not exist - # - if ((not already) and (modify==0)): - return 0 - # - # Figure out read-mode for file - # - if modify==0: - mode="r" - else: - mode="a" - # - # Open the two fcst files: the index and the data - # - self.sncIndex=NetCDF.NetCDFFile(newIndex,mode) - self.sncData=NetCDF.NetCDFFile(newData,mode) - # - # If a new file...create the variables - # - if not already: - self.sncData.createDimension("maxareas",self.STATAREAS) - self.sncData.createDimension("maxstats",self.STATTYPES) - self.sncData.createDimension("record",None) - self.sncIndex.createDimension("record",None) - self.sncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) - self.sncFcstr=self.sncIndex.createVariable('fcstr','b',('record','maxfcstrs')) - self.sncBtime=self.sncIndex.createVariable('btime','i',('record',)) - self.sncStime=self.sncIndex.createVariable('stime','i',('record',)) - self.sncEtime=self.sncIndex.createVariable('etime','i',('record',)) - self.sncVtime=self.sncIndex.createVariable('vtime','i',('record',)) - self.sncCycle=self.sncIndex.createVariable('cycle','b',('record',)) - self.sncFhour=self.sncIndex.createVariable('fhour','h',('record',)) - self.sncStats=self.sncData.createVariable('stats','f',('record','maxareas','maxstats')) - self.sncIndex.sync() - self.sncData.sync() - os.chmod(newIndex,0775) - os.chmod(newData,0775) - # - # If an old file...hook up variables to the netCDF files - # - else: - self.sncFcstr=self.sncIndex.variables['fcstr'] - self.sncBtime=self.sncIndex.variables['btime'] - self.sncStime=self.sncIndex.variables['stime'] - self.sncEtime=self.sncIndex.variables['etime'] - self.sncVtime=self.sncIndex.variables['vtime'] - self.sncCycle=self.sncIndex.variables['cycle'] - self.sncFhour=self.sncIndex.variables['fhour'] - self.sncStats=self.sncData.variables['stats'] - self.sncParm=parm - self.sncModel=model - self.sncObsModel=obsmodel - self.sncModify=modify - self.sncNumRecs=self.sncBtime.shape[0] - self.sncRecs=indices((self.sncNumRecs,))[0] - return 1 - #================================================================= - # closeStatsFile - if a stat file is open, close it and free up - # all the structures associated with it. - # - def closeStatsFile(self): - if (not(self.sncParm=="")): - self.sncIndex.close() - self.sncData.close() - del self.sncIndex - del self.sncData - del self.sncFcstr - del self.sncBtime - del self.sncStime - del self.sncEtime - del self.sncVtime - del self.sncCycle - del self.sncFhour - del self.sncStats - del self.sncRecs - self.sncParm="" - self.sncModel="" - self.sncObsModel="" - self.sncNumRecs=-1 - self.sncModify=0 - return - #================================================================= - # getBases - get sorted list of all base times (model run times) - # stored for an inputParm and model. Can be helpful - # when figuring out all possible model run times - # (though, this is really only the model run times - # we have SAVED - not that COULD exist). - # - # If model is in the OBSMODELS list, then it returns - # all the times of the saved OBSMODEL grids - which are - # the same as the grid start times. - # - # if no data file exists for inputParm and model, it - # returns an empty list. - # - def getBases(self,inputParm,model): - Bases=[] - if not self.checkFile(inputParm,model): - return Bases - if model in self.OBSMODELS: - for i in range(self.onumRecs): - if self.oncBtime[i] not in Bases: - Bases.append(self.oncBtime[i]) - else: - for i in range(self.fnumRecs): - if self.fncBtime[i] not in Bases: - Bases.append(self.fncBtime[i]) - Bases.sort() - return Bases - #================================================================= - # getStarts - get sorted list of all grid start times stored for - # an inputParm and model. Can be helpful when - # figuring out all possible times (though, this is - # really only the times we have SAVED - not all those - # that COULD exist). Some forecast ones may not be - # verified yet! - # - # if no data file exists for inputParm and model, it - # returns an empty list. - # - def getStarts(self,inputParm,model): - Starts=[] - if not self.checkFile(inputParm,model): - return Starts - if model in self.OBSMODELS: - for i in range(self.onumRecs): - if self.oncStime[i] not in Starts: - Starts.append(self.oncStime[i]) - else: - for i in range(self.fnumRecs): - if self.fncStime[i] not in Starts: - Starts.append(self.fncStime[i]) - Starts.sort() - return Starts - #================================================================== - # getFhours - get sorted list of all forecast hour times stored - # for inputParm and model. Can be helpful when - # figuring out all possible times, By default, gets - # forecast hours from all cycle times - but if - # cycle time is non-negative, then it figures all - # the forecast times that have the specified - # cycle-time. - # - # if model is in OBSMODELS list, then start times are - # the same as base times, and only 0 should be in the - # list. - # - # if no data file exists for inputParm and model, it - # returns an empty list. - # - def getFhours(self,inputParm,model,cycle=-1): - Fhours=[] - if not self.checkFile(inputParm,model): - return Fhours - if model in self.OBSMODELS: - Fhours.append(0) - return Fhours - fhr=(self.fncStime[:]-self.fncBtime[:])/HOURSECS - if cycle>=0: - cyclehrs=(self.fncBtime[:]-((self.fncBtime[:]/DAYSECS).astype(int)*DAYSECS))/HOURSECS - for i in range(self.fnumRecs): - if cyclehrs[i]==cycle: - if fhr[i] not in Fhours: - Fhours.append(fhr[i]) - else: - for i in range(self.fnumRecs): - if fhr[i] not in Fhours: - Fhours.append(fhr[i]) - Fhours.sort() - return Fhours - #================================================================= - # getFcstRecords - gets a sorted list of record numbers where the - # basetime is the same as the specified basetime - # - def getFcstRecords(self,parm,model,basetime): - Records=[] - ret=self.checkFile(parm,model) - if ret==0: - return Records - if model in self.OBSMODELS: - use=equal(self.oncBtime[:],basetime) - a=compress(use,self.oncRecs) - else: - use=equal(self.fncBtime[:],basetime) - a=compress(use,self.fncRecs) - Records=list(a) - #for i in range(a.shape[0]): - # Records.append(a[i]) - return Records - #================================================================= - # getFcstHour - take a basetime and a starttime and calculate the - # number of hours between them - # - def getFcstHour(self,Btime,Stime): - hours=int(round(((Stime-Btime)/3600),0)) - return hours - #================================================================= - # getRecFcstHour - given a record in the forecast file, calculate - # the forecast hour, based on the basetime and - # starttime - # - def getRecFcstHour(self,rec): - rec = int(rec) - btime=self.fncBtime[rec] - stime=self.fncStime[rec] - return self.getFcstHour(btime,stime) - #================================================================= - # getVerTimeRange - given a starttime and endtime, create a - # TimeRange object covering this time - # - def getVerTimeRange(self,Stime,Etime): - start=AbsTime.AbsTime(Stime) - end=AbsTime.AbsTime(Etime) - tr=TimeRange.TimeRange(start,end) - return tr - #================================================================= - # readRecord - read and unpack a gridded data record. handles - # opening/closing files, and whether this is an - # observed model or a forecast model. If the parm - # is a vector, returns a tuple with mag,dir. If it - # cannot read - then it returns None - # - # Note vertical flip of the grid is done so it's in the AWIPS II order. - # - def readRecord(self,parm,model,rec): - if self.checkFile(parm,model)==0: - return None - rec = int(rec) - if model in self.OBSMODELS: - if self.oncType==0: - vals=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] - return flipud(vals) - else: - mags=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] - dirs=(self.oncValue1[rec].astype(float)*self.oncScale1[rec])+self.oncAddit1[rec] - return (flipud(mags), flipud(dirs)) - else: - if self.fncType==0: - vals=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] - return flipud(vals) - else: - mags=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] - dirs=(self.fncValue1[rec].astype(float)*self.fncScale1[rec])+self.fncAddit1[rec] - return (flipud(mags), flipud(dirs)) - #================================================================= - # packIt - convert a scalar grid into packed 16-bit integer - # equivalent, with a float scale and offset that can - # be used to get back all the data exactly. - # - def packIt(self,grid): - # - # convert the grid to its packed equivalent - # - minval=minimum.reduce(minimum.reduce(grid)) - maxval=maximum.reduce(maximum.reduce(grid)) - diff=maxval-minval - diff=diff+(0.05*diff) # make range a little wider so that roundoff - # will not make packed integers larger than - # what will fit. - Scale=diff/65534.0 - if Scale==0.0: - Scale=1.0 - Addit=(minval+maxval)/2.0 - Valgrid=((grid-Addit)/Scale).astype('h') - return(Scale,Addit,Valgrid) - #================================================================= - # writeVals - write gridded data. Overwrite data for same time - # if it already exists, or replace old data in the - # file (if any) or append it to the file. - # - # return 0 if a problem writing. Returns -1 if it - # skipped writing it because it matches what is - # already there. - # - def writeVals(self,parm,model,fcstrID,Btime,Stime,Etime,Grid): - self.logMsg("Starting writeVals in VerifyUtility",10) - # - # get datatype - # - datatype=self.getVerParmType(parm) - if datatype is None: - return 0 - # - # Check that the correct file is open - and ready to modify - # - if not self.checkFile(parm,model,modify=1): - return 0 - # - # flip grid to AWIPS I grid-point order (as in the netCDF file) - # - if datatype != 1: - Grid = flipud(Grid) - else: - Grid = flipud(Grid[0]), flipud(Grid[1]) - # - # - # - if datatype!=1: - (Scale,Addit,Valgrid)=self.packIt(Grid) - else: - (mag,direc)=Grid - (Scale, Addit, Valgrid )=self.packIt(mag) - (Scale1,Addit1,Valgrid1)=self.packIt(direc) - # - # Get forecaster number - # - if model=="Official": - fcstrNum=self.findFcstrNumFromID(fcstrID) - else: - fcstrNum=0 - # - # - # - recnum=-1 - overrec=-1 - oldest=time.time() - veryOld=int(time.time())-self.GRIDDAYS*DAYSECS - # - # Figure if this is for the Obs grid or a Fcst grid - # - if model in self.OBSMODELS: - # - # If nothing there - just adding record 0 - # - if self.onumRecs==0: - recnum=0 - # - # If there are records...see if one with the exact - # same times exists - if so we can overwrite. - # - if recnum<0: - s=equal(self.oncStime[:],Stime) - b=equal(self.oncBtime[:],Btime) - e=equal(self.oncEtime[:],Etime) - use=logical_and(logical_and(b,s),e) - if sometrue(sometrue(use)): - a=compress(use,self.oncRecs) - recnum=int(a[0]) - self.logMsg("existing record %d for that time"%recnum,10) - # - # if still no record found - find if there are any old - # ones to overwrite - # - if (recnum==-1): - overrec=int(argmin(self.oncStime[:],0)) - if self.DEBUG>0: - self.logMsg(" oldest record is %s"%overrec) - oldest=self.oncStime[overrec] - if oldest0: - self.logMsg(" and it is old enough to overwrite") - recnum=overrec - else: - if self.DEBUG>0: - self.logMsg(" but not old enough to overwrite") - # - # If STILL no record found - add to the current file - # - if (recnum==-1): - recnum=self.onumRecs - # - # Change the data - # - recnum = int(recnum) - self.oncFcstr[recnum,0]=fcstrNum - for i in range(1,self.MAXFCSTRS): - self.oncFcstr[recnum,i]=0 - self.oncBtime[recnum]=Btime - self.oncStime[recnum]=Stime - self.oncEtime[recnum]=Etime - self.oncVtime[recnum]=time.time() - self.oncScale[recnum]=Scale - self.oncAddit[recnum]=Addit - self.oncValue[recnum]=Valgrid - if datatype==1: - self.oncScale1[recnum]=Scale1 - self.oncAddit1[recnum]=Addit1 - self.oncValue1[recnum]=Valgrid1 - # - # If we added a record - need to increase - # the indicies and the number of records counter - # - if recnum==self.onumRecs: - self.onumRecs=self.onumRecs+1 - self.oncRecs=indices((self.onumRecs,))[0] - #self.oncIndex.sync() - #self.oncData.sync() - self.closeObsFile() - else: - # If nothing there - just adding record 0 - # - if self.fnumRecs==0: - recnum=0 - # - # If there are records...see if one with the exact - # same times exists - if so we can overwrite. - # - if recnum<0: - s=equal(self.fncStime[:],Stime) - b=equal(self.fncBtime[:],Btime) - e=equal(self.fncEtime[:],Etime) - use=logical_and(logical_and(b,s),e) - if sometrue(sometrue(use)): - a=compress(use,self.fncRecs) - recnum=int(a[0]) - self.logMsg("existing record %d for that time"%recnum,10) - issame=alltrue(alltrue(equal(self.fncValue[recnum],Valgrid))) - if issame: - self.logMsg(" is exactly the same",10) - self.logMsg(" updating archive time",10) - self.fncVtime[recnum]=time.time() - return -1 - # - # if still no record found - find if there are any old - # ones to overwrite - # - if (recnum==-1): - overrec=argmin(self.fncStime[:],0) - overrec = int(overrec) - if self.DEBUG>0: - self.logMsg(" oldest record is %s"%overrec) - oldest=self.fncStime[overrec] - if oldest0: - self.logMsg(" and it is old enough to overwrite") - recnum=overrec - else: - if self.DEBUG>0: - self.logMsg(" but not old enough to overwrite") - # - # If STILL no record found - add to the current file - # - if (recnum==-1): - recnum=self.fnumRecs - # - # Change the data - # - self.fncFcstr[recnum,0]=fcstrNum - for i in range(1,self.MAXFCSTRS): - self.fncFcstr[recnum,i]=0 - self.fncBtime[recnum]=Btime - self.fncStime[recnum]=Stime - self.fncEtime[recnum]=Etime - self.fncVtime[recnum]=time.time() - self.fncScale[recnum]=Scale - self.fncAddit[recnum]=Addit - self.fncValue[recnum]=Valgrid - if datatype==1: - self.fncScale1[recnum]=Scale1 - self.fncAddit1[recnum]=Addit1 - self.fncValue1[recnum]=Valgrid1 - # - # If we added a record - need to increase - # the indicies and the number of records counter - # - if recnum==self.fnumRecs: - self.fnumRecs=self.fnumRecs+1 - self.fncRecs=indices((self.fnumRecs,))[0] - self.closeFcstFile() - - - self.logMsg("Done with writeVals in VerifyUtility",10) - return 1 - #================================================================= - # getDoAgain - given a parm,model and btime, stime, etime - # - see if a record for stats already exists. If so, - # then get the time that those stats were calculated - # If the time was earlier than the observed or forecast - # grid savetime (ovtime, fvtime) then return 1 to - # indicate that we need to re-calculate these stats. - # If the the record for these stats to NOT exist - then - # we have to return 1 to indicate that these stats need - # to be calculated the first time. - # - def getDoAgain(self,parm,model,obsmodel,btime,stime,etime,ovtime,fvtime): - self.logMsg("Starting getDoAgain in VerifyUtility",5) - if not self.checkStats(parm,model,obsmodel): - return 1 - # - # If nothing in current stat file - need to add a record - # - if self.sncNumRecs==0: - return 1 - # - # If there are records...see if one with the exact - # same times exists - # - s=equal(self.sncStime[:],stime) - b=equal(self.sncBtime[:],btime) - e=equal(self.sncEtime[:],etime) - use=logical_and(logical_and(b,s),e) - if sometrue(sometrue(use)): - # - # get record number - # - a=compress(use,self.sncRecs) - recnum=int(a[0]) - # - # If the time of the stat save is after BOTH grid - # were saved - then do not need to do stats again - # - savetime=self.sncVtime[recnum] - if ((savetime>ovtime)and(savetime>fvtime)): - return 0 - return 1 - #================================================================= - # writeStats - write stat data. Overwrite data for same time if it - # already exists, or replace old data in the file (if any) - # or append it to the file. - # - def writeStats(self,parm,model,obsmodel,fcstrNums,Btime,Stime,Etime,Cycle, - Fhour,Stats): - self.logMsg("Starting writeStats in VerifyUtility",10) - if not self.checkStats(parm,model,obsmodel,modify=1): - return 0 - oldest=time.time() - veryOld=int(time.time())-self.STATDAYS*DAYSECS - # - # set record to missing (-1) - # - recnum=-1 - overrec=-1 - # - # If nothing in current stat file - just adding record 0 - # - if self.sncNumRecs==0: - recnum=0 - # - # If there are records...see if one with the exact - # same times exists - if so we can overwrite. - # - else: - s=equal(self.sncStime[:],Stime) - b=equal(self.sncBtime[:],Btime) - e=equal(self.sncEtime[:],Etime) - use=logical_and(logical_and(b,s),e) - if sometrue(sometrue(use)): - a=compress(use,self.sncRecs) - recnum=int(a[0]) - self.logMsg("overwriting existing record:%d"%recnum,5) - # - # if still no record found - find if there are any old - # ones to overwrite - # - if (recnum==-1): - overrec=int(argmin(self.sncStime[:],0)) - self.logMsg(" oldest record is %s"%overrec,10) - oldest=self.sncStime[overrec] - if oldest=0): - obslist.append(iobrec) - pairList.append((obslist,flists[obrec])) - return pairList - #================================================================== - # getCases - gets caseInfo structure...can either be for 'common - # cases' or normal. - # - def getCases(self,readParm,models,obsParm,obsmodel,dateStyle, - dateType,fromDay=0,numDays=0,dayList=[], - fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, - accumHours=12,accumFreq=12, - requireObs=1,commonCases=1,basetimeOffsets=0, - callbackMethod=None): - if commonCases==1: - caseInfo=self.getCommonCases(readParm,models,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrs, - cycles=cycles,fhrStart=fhrStart, - fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,requireObs=requireObs, - basetimeOffsets=basetimeOffsets, - callbackMethod=callbackMethod) - else: - caseInfo={} - for model in models: - cases=self.getCommonCases(readParm,model,obsParm,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrs,cycles=cycles, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq,requireObs=requireObs, - basetimeOffsets=basetimeOffsets, - callbackMethod=callbackMethod) - caseInfo[model]=cases[model] - return caseInfo - #================================================================== - # getCommonCases - obtain dictionary of records for common cases. - # Keys are each model in modelList. The value for - # each model is, itself, a dictionary, with keys - # of 'basetime,starttime,endtime' and a value of - # a two-element list. The first element is, - # itself, a list - with records that make up - # the forecast, and the second element is, - # itself, a list - with records that make up the - # observation - # - # if obsRequired is zero - it will allow - # cases without observations...in which case the - # second list will be empty. - # - def getCommonCases(self,parm,models,obsParm,obsModel,dateStyle, - dateType,fromDay=0,numDays=0,dayList=[], - fcstrs=-1,cycles=-1,fhrStart=-48, - fhrEnd=-48,accumHours=12, - accumFreq=12,requireObs=1,basetimeOffsets=0, - callbackMethod=None): - self.logMsg("start getCommonCases",10) - finalCases={} - self.callbackMethod=callbackMethod - # - # Get all the verifing cases - # - self.internalMessage="Observations:" - obsCases=self.getObsCases(obsParm,obsModel,accumHours,accumFreq, - dateStyle,dateType,fromDay,numDays,dayList, - self.internCB) - # - # Check to see if stopping - # - if self.callbackMethod is not None: - exit=self.callbackMethod(self.internalMessage) - if exit==1: - return finalCases - # - obskeys=obsCases.keys() - numgrids=len(obskeys) - self.logMsg("Observed cases:%d"%numgrids,5) - obskeys.sort() - if self.getDebug()>=5: - for obskey in obskeys: - (st,en)=obskey.split(",") - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) - self.logMsg(" Obs for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(syea, - smon,sday,shou,eyea,emon,eday,ehou),10) - # - allCases={} - # - # See if models is a list, or a single model - putting them into - # modelList for further processing - # - modelList=[] - if ((type(models) is types.ListType) or (type(models) is types.TupleType)): - for model in models: - modelList.append(model) - else: - modelList.append(models) - # - # Loop over all the models - # - totalmodels=len(modelList) - modelcount=0 - for model in modelList: - modelcount+=1 - if totalmodels>1: - self.internalMessage="%s (%d of %d):"%(model,modelcount,totalmodels) - else: - self.internalMessage="%s:"%model - cases=self.getModelCases(parm,model,fcstrs,cycles, - fhrStart,fhrEnd,accumHours, - accumFreq,dateStyle,dateType,fromDay,numDays, - dayList,self.internCB) - if self.callbackMethod is not None: - exit=self.callbackMethod(self.internalMessage) - if exit==1: - return finalCases - casekeys=cases.keys() - numgrids=len(casekeys) - self.logMsg("%s has %d potential cases"%(model,numgrids),5) - if self.getDebug()>=5: - casekeys.sort() - for casekey in casekeys: - (base,st,en)=casekey.split(",") - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) - self.logMsg(" Model potential from %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, - bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) - # - # If obs are required...look through cases and make sure that the - # period is available in the obsCases retreived above - # - if requireObs!=0: - noobs=0 - for key in casekeys: - (base,rest)=key.split(",",1) - if rest not in obskeys: - (st,en)=rest.split(",") - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) - self.logMsg(" deleting case for no Obs - from %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, - bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) - del cases[key] - noobs=noobs+1 - self.logMsg(" %d cases deleted because they have no obs"%noobs,5) - allCases[model]=cases - # - # With only 1 model common cases are easy! - # - if totalmodels==1: - finalkeys=allCases[model].keys() - # - # For mulitple models...Get keys for each model...convert them to - # the offsetBasetime (if basetimeOffsets==1)...and find the - # model with the fewest keys - # - else: - if self.callbackMethod is not None: - exit=self.callbackMethod("filtering through models") - if exit==1: - return finalCases - finalkeys=[] - modkeys={} - minmod="" - minkeys=-1 - for model in modelList: - realKeys=allCases[model].keys() - if basetimeOffsets==1: - baseOffset=self.getBaseOffset(model) - else: - baseOffset=0 - if baseOffset==0: - testKeys=realKeys - else: - testKeys=[] - for key in realKeys: - (basetimeStr,starttimeStr,endtimeStr)=key.split(",") - basetime="%d"%(int(basetimeStr)+(baseOffset*HOURSECS)) - newkey="%s,%s,%s"%(basetime,starttimeStr,endtimeStr) - testKeys.append(newkey) - modkeys[model]=testKeys - numkeys=len(modkeys[model]) - if ((minkeys==-1)or(numkeys=10: - for key in finalkeys: - (base,start,end)=key.split(",") - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(start)) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(end)) - self.logMsg(" %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, - bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) - # - # Make the final case structure - with list of forecast/observed - # records for each case. If basetimeOffsets==1, then the finalkeys may - # need to be converted back to real keys for each model. The keys of - # returned lists will be with the offset 'basetimes'. - # - finalCases={} - for model in modelList: - cases={} - modCases=allCases[model] - if ((basetimeOffsets==1)and(totalmodels!=1)): - baseOffset=self.getBaseOffset(model) - else: - baseOffset=0 - for key in finalkeys: - if baseOffset!=0: - (offsetBasetime,starttime,endtime)=key.split(",") - realkey="%d,%s,%s"%(int(offsetBasetime)-(baseOffset*HOURSECS),starttime,endtime) - else: - realkey=key - frecList=modCases[realkey] - (base,rest)=key.split(",",1) - if rest in obskeys: - orecList=obsCases[rest] - else: - orecList=[] - cases[key]=[frecList,orecList] - finalCases[model]=cases - self.logMsg("end getCommonCases",10) - return finalCases - #================================================================== - # internCB - # - def internCB(self,message): - fullmessage="%s %s"%(self.internalMessage,message) - retval=0 - if self.callbackMethod is not None: - retval=self.callbackMethod(fullmessage) - return retval - #================================================================= - # getModelCases - return a dictionary for the specified model of - # forecast records for the specified periods. - # The keys are "basetime,starttime,endtime", and - # the values in the dictionary are lists of records - # (these are lists, because it can take multiple - # records to cover long time periods for accumulative - # parms, or probability parms). - # - def getModelCases(self,parm,model,fcstrs,cycles,fhrStart,fhrEnd, - accumHours,accumFreq,dateStyle, - dateType,fromDay,numDays,dayList, - callbackMethod=None): - self.logMsg("start getModelCases",10) - verType=self.getVerType(parm) - rateFlag=self.getRateFlag(model,parm) - cases={} - dateStyleLow=dateStyle.lower() - dateTypeLow=dateType.lower() - # - # Give up right away if you cant open the model file - # - if not self.checkFile(parm,model): - return cases - # - # Setup logical array with records that contain the right Forecaster, - # the right cycle, and the right forecast hours. If none...get out - # right away. - # - rightRecord=self.getFcstrCycleFhr(model,fcstrs,cycles,fhrStart, - fhrEnd) - numRecs=add.reduce(rightRecord) - self.logMsg("number of records with right forecaster, cycle, hours:%d"%numRecs,10) - # - # If a probability parm, or an accumulative parm, then find - # cases where forecasts completely coverred the possible - # periods. - # - if ((verType==1)or(rateFlag==1)): - # - # Get potential verifying periods for this accumHour,accumFreq - # combination with the dateStyle/dateType/fromDay/numDays/ - # dayList combination - # - verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, - dateType,fromDay,numDays,dayList) - numPeriods=len(verPeriods) - self.logMsg("number of periods:%d"%numPeriods,10) - # - # Loop over potential periods...and find matching records - # - count=0 - if dateStyleLow=="forecast on": - fromPeriods=self.createFromPeriods(dateType,fromDay,numDays,dayList) - for verPer in verPeriods: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,numPeriods)) - if exit==1: - return cases - (stime,etime)=verPer - totalTime=etime-stime - recmatch=logical_and(rightRecord, - logical_and(greater(self.fncEtime[:],stime), - less(self.fncStime[:],etime))) - # - # When there are matching records...find each basetime that - # forecast for this period - # - if sometrue(recmatch): - recnumberList=list(compress(recmatch,self.fncRecs)) - baselist=[] - for rec in recnumberList: - rec = int(rec) - if self.fncBtime[rec] not in baselist: - baselist.append(self.fncBtime[rec]) - # - # And for each basetime...see if the period was coverred - # by forecast grids - # - for base in baselist: - # - # - # - if dateStyleLow=="forecast on": - okbase=0 - for testper in fromPeriods: - (perstart,perend)=testper - if ((base>=perstart)and(base=totalTime: - key="%d,%d,%d"%(base,stime,etime) - cases[key]=reclist - # - # Other parms get forecast periods based on the forecast grids - # that were actually made - # - else: - if dateStyleLow=="verifying on": - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - recList=self.getObsPeriod(model,parm,fromDay,numDays,mask=rightRecord) - else: - recList=self.getObsList(model,parm,dayList,mask=rightRecord, - callbackMethod=callbackMethod) - else: - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - starttime=fromDay-((numDays-1)*DAYSECS) - endtime=fromDay+DAYSECS-1 - recList=self.listRecords(parm,model,starttime,endtime,"forecast",rightRecord) - else: - recList=[] - count=0 - totalDays=len(dayList) - for date in dayList: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,totalDays)) - if exit==1: - return cases - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+DAYSECS-1 - tmprecs=self.listRecords(parm,model,starttime,endtime,"forecast",rightRecord) - for rec in tmprecs: - recList.append(rec) - # - # Now make case entries for each of these records - # - for rec in recList: - rec = int(rec) - base=self.fncBtime[rec] - stime=self.fncStime[rec] - etime=self.fncEtime[rec] - key="%d,%d,%d"%(base,stime,etime) - cases[key]=[rec] - self.logMsg("end getModelCases",10) - return cases - #================================================================= - # getObsCases - return a dictionary for the specified obs model of - # records for the specified periods. - # The keys are "starttime,endtime", and - # the values in the dictionary are lists of records - # (these are lists, because it can take multiple - # records to cover long time periods for accumulative - # parms, or probability parms). - # - def getObsCases(self,parm,model, - accumHours,accumFreq,dateStyle, - dateType,fromDay,numDays,dayList, - callbackMethod=None): - self.logMsg("start getObsCases",10) - cases={} - dateStyleLow=dateStyle.lower() - dateTypeLow=dateType.lower() - # - # Give up right away if you cant open the model file - # - if not self.checkFile(parm,model): - return cases - # - # If a probability parm, or an accumulative parm, then find - # cases where observations completely coverred the possible - # periods. - # - rateFlag=self.getRateFlag(model,parm) - if (rateFlag==1): - # - # Get potential verifying periods for this accumHour,accumFreq - # combination with the dateStyle/dateType/fromDay/numDays/ - # dayList combination - # - verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, - dateType,fromDay,numDays,dayList) - numPeriods=len(verPeriods) - self.logMsg("number of periods:%d"%numPeriods,10) - # - # Loop over potential periods...and find matching records - # - count=0 - for verPer in verPeriods: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,numPeriods)) - if exit==1: - return cases - (stime,etime)=verPer - totalTime=etime-stime - recmatch=logical_and(greater(self.oncEtime[:],stime), - less(self.oncStime[:],etime)) - # - # When there are matching records...find each basetime that - # forecast for this period - # - if sometrue(recmatch): - recnumberList=list(compress(recmatch,self.oncRecs)) - totcov=0 - for rec in recnumberList: - rec = int(rec) - recstart=self.oncStime[rec] - recend=self.oncEtime[rec] - cover=min(etime-recstart,recend-stime,recend-recstart) - totcov=totcov+cover - if totcov==totalTime: - key="%d,%d"%(stime,etime) - cases[key]=recnumberList - # - # Other parms get forecast periods based on the forecast grids - # that were actually made - # - else: - if dateStyleLow=="verifying on": - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - recList=self.getObsPeriod(model,parm,fromDay,numDays) - else: - recList=self.getObsList(model,parm,dayList,callbackMethod=callbackMethod) - else: - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - soff=self.getStartOffset(parm) - eoff=self.getEndOffset(parm) - starttime=fromDay-((numDays-1)*DAYSECS)+soff - endtime=fromDay+DAYSECS-1+(self.MAXFORECASTHOUR*HOURSECS)+eoff - recList=self.listRecords(parm,model,starttime, - endtime,"verify") - else: - recList=[] - soff=self.getStartOffset(parm) - eoff=self.getEndOffset(parm) - count=0 - totalDays=len(dayList) - for date in dayList: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,totalDays)) - if exit==1: - return cases - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1)) - except: - continue - else: - starttime=date - endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS)+eoff - starttime=starttime+soff - tmprecs=self.listRecords(parm,model,starttime, - endtime,"verify") - for rec in tmprecs: - recList.append(rec) - # - # Now make case entries for each of these records - # - for rec in recList: - rec = int(rec) - stime=self.oncStime[rec] - etime=self.oncEtime[rec] - key="%d,%d"%(stime,etime) - cases[key]=[rec] - self.logMsg("end getObsCases",10) - return cases - #================================================================== - # getStatCases - gets caseInfo structure for stats...can either be - # for 'common cases' or normal. - # - def getStatCases(self,parm,models,obsmodel,dateStyle, - dateType,fromDay=0,numDays=0,dayList=[], - fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, - accumHours=12,accumFreq=12, - commonCases=1,basetimeOffsets=0, - callbackMethod=None): - if commonCases==1: - caseInfo=self.getStatCommonCases(parm,models,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrs, - cycles=cycles,fhrStart=fhrStart, - fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq, - basetimeOffsets=basetimeOffsets, - callbackMethod=callbackMethod) - else: - caseInfo={} - for model in models: - cases=self.getStatCommonCases(parm,model,obsmodel, - dateStyle,dateType,fromDay=fromDay,numDays=numDays, - dayList=dayList,fcstrs=fcstrs,cycles=cycles, - fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, - accumFreq=accumFreq, - basetimeOffsets=basetimeOffsets, - callbackMethod=callbackMethod) - caseInfo[model]=cases[model] - return caseInfo - #================================================================== - # getStatCommonCases - obtain dictionary of records for common cases. - # Keys are each model in modelList. The value for - # each model is, itself, a dictionary, with keys - # of 'basetime,starttime,endtime' and a value of - # a list of stat records. - # - def getStatCommonCases(self,parm,models,obsModel,dateStyle, - dateType,fromDay=0,numDays=0,dayList=[], - fcstrs=-1,cycles=-1,fhrStart=-48, - fhrEnd=-48,accumHours=12, - accumFreq=12,basetimeOffsets=0, - callbackMethod=None): - self.logMsg("start getStatCommonCases",10) - self.callbackMethod=callbackMethod - allCases={} - # - # See if models is a list, or a single model - putting them into - # modelList for further processing - # - modelList=[] - if ((type(models) is types.ListType) or (type(models) is types.TupleType)): - for model in models: - modelList.append(model) - else: - modelList.append(models) - # - # Loop over all the models - # - totalmodels=len(modelList) - modelcount=0 - for model in modelList: - modelcount+=1 - if totalmodels>1: - self.internalMessage="%s (%d of %d):"%(model,modelcount,totalmodels) - else: - self.internalMessage="%s:"%model - cases=self.getStatModelCases(parm,model,obsModel,dateStyle,dateType, - cycles=cycles,fcstrs=fcstrs,fhrStart=fhrStart,fhrEnd=fhrEnd, - fromDay=fromDay,numDays=numDays,dayList=dayList, - accumHours=accumHours,accumFreq=accumFreq, - callbackMethod=self.internCB) - casekeys=cases.keys() - numgrids=len(casekeys) - self.logMsg("%s has %d pre-calculated cases"%(model,numgrids),5) - allCases[model]=cases - # - # With only 1 model common cases are easy! - # - if totalmodels==1: - finalkeys=allCases[model].keys() - # - # For mulitple models...Get keys for each model...convert them to - # the offsetBasetime (if basetimeOffsets==1)...and find the - # model with the fewest keys - # - else: - if self.callbackMethod is not None: - exit=self.callbackMethod("filtering through models") - if exit==1: - finalCases={} - return finalCases - finalkeys=[] - modkeys={} - minmod="" - minkeys=-1 - for model in modelList: - realKeys=allCases[model].keys() - if basetimeOffsets==1: - baseOffset=self.getBaseOffset(model)*HOURSECS - else: - baseOffset=0 - if baseOffset==0: - testKeys=realKeys - else: - testKeys=[] - for key in realKeys: - (basetimeStr,starttimeStr,endtimeStr)=key.split(",") - basetime="%d"%(int(basetimeStr)+baseOffset) - newkey="%s,%s,%s"%(basetime,starttimeStr,endtimeStr) - testKeys.append(newkey) - modkeys[model]=testKeys - numkeys=len(modkeys[model]) - if ((minkeys==-1)or(numkeys=10: - for key in finalkeys: - (base,start,end)=key.split(",") - (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) - (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(start)) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(end)) - self.logMsg(" %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, - bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) - # - # Make the final case structure - with list of forecast/observed - # records for each case. If basetimeOffsets==1, then the finalkeys may - # need to be converted back to real keys for each model. The keys of - # returned lists will be with the offset 'basetimes'. - # - finalCases={} - for model in modelList: - cases={} - modCases=allCases[model] - if ((basetimeOffsets==1)and(totalmodels!=1)): - baseOffset=self.getBaseOffset(model)*HOURSECS - else: - baseOffset=0 - for key in finalkeys: - if baseOffset!=0: - (offsetBasetime,starttime,endtime)=key.split(",") - realkey="%d,%s,%s"%(int(offsetBasetime)-baseOffset,starttime,endtime) - else: - realkey=key - cases[key]=modCases[realkey] - finalCases[model]=cases - self.logMsg("end getStatCommonCases",10) - return finalCases - #================================================================= - # getStatModelCases - return a dictionary for the specified model of - # forecast records for the specified periods. - # The keys are "basetime,starttime,endtime", and - # the values in the dictionary are lists of records - # (these are lists, because it can take multiple - # records to cover long time periods for accumulative - # parms, or probability parms). - # - def getStatModelCases(self,parm,model,obsmodel,dateStyle,dateType, - fromDay=0,numDays=0,dayList=[], - fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, - accumHours=12,accumFreq=12, - callbackMethod=None): - self.logMsg("start getStatModelCases",10) - verType=self.getVerType(parm) - rateFlag=self.getRateFlag(model,parm) - cases={} - dateStyleLow=dateStyle.lower() - dateTypeLow=dateType.lower() - # - # Give up right away if you cant open the model file - # - if not self.checkStats(parm,model,obsmodel): - return cases - # - # Setup logical array with records that contain the right Forecaster, - # the right cycle, and the right forecast hours. If none...get out - # right away. - # - rightRecord=self.getStatFcstrCycleFhr(model,fcstrs,cycles, - fhrStart,fhrEnd) - numRecs=add.reduce(rightRecord) - self.logMsg("number of records with right forecaster, cycle, hours:%d"%numRecs,10) - # - # If a probability parm, or an accumulative parm, then find - # cases where forecasts completely coverred the possible - # periods. - # - if ((verType==1)or(rateFlag==1)): - # - # Get potential verifying periods for this accumHour,accumFreq - # combination with the dateStyle/dateType/fromDay/numDays/ - # dayList combination - # - verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, - dateType,fromDay,numDays,dayList) - numPeriods=len(verPeriods) - self.logMsg("number of periods:%d"%numPeriods,10) - # - # Loop over potential periods...and find matching records - # - count=0 - for verPer in verPeriods: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,numPeriods)) - if exit==1: - return cases - (stime,etime)=verPer - totalTime=etime-stime - recmatch=logical_and(rightRecord, - logical_and(greater(self.sncEtime[:],stime), - less(self.sncStime[:],etime))) - # - # When there are matching records...find each basetime that - # forecast for this period - # - if sometrue(recmatch): - recnumberList=list(compress(recmatch,self.sncRecs)) - baselist=[] - for rec in recnumberList: - rec = int(rec) - if self.sncBtime[rec] not in baselist: - baselist.append(self.sncBtime[rec]) - # - # And for each basetime...see if the period was coverred - # by forecast grids - # - for base in baselist: - reclist=[] - totcov=0 - for rec in recnumberList: - rec = int(rec) - if self.sncBtime[rec]==base: - reclist.append(rec) - recstart=self.sncStime[rec] - recend=self.sncEtime[rec] - cover=min(etime-recstart,recend-stime,recend-recstart) - totcov=totcov+cover - if totcov==totalTime: - key="%d,%d,%d"%(base,stime,etime) - cases[key]=reclist - # - # Other parms get forecast periods based on the forecast grids - # that were actually made - # - else: - if dateStyleLow=="verifying on": - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - recList=self.getObsStatPeriod(model,parm,obsmodel,fromDay, - numDays,mask=rightRecord) - else: - recList=self.getObsStatList(model,parm,obsmodel,dayList, - mask=rightRecord, - callbackMethod=callbackMethod) - else: - if dateTypeLow=="period length": - if callbackMethod is not None: - exit=callbackMethod("1 of 1") - if exit==1: - return cases - starttime=fromDay-((numDays-1)*DAYSECS) - endtime=fromDay+DAYSECS-1+(self.MAXFORECASTHOUR*HOURSECS) - recList=self.listStatRecords(parm,model,obsmodel,starttime, - endtime,"forecast",rightRecord) - else: - recList=[] - count=0 - totalDays=len(dayList) - for date in dayList: - count=count+1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,totalDays)) - if exit==1: - return cases - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS) - tmprecs=self.listStatRecords(parm,model,starttime,endtime, - "forecast",rightRecord) - for rec in tmprecs: - recList.append(rec) - # - # Now make case entries for each of these records - # - for rec in recList: - rec = int(rec) - base=self.sncBtime[rec] - stime=self.sncStime[rec] - etime=self.sncEtime[rec] - key="%d,%d,%d"%(base,stime,etime) - cases[key]=[rec] - self.logMsg("end getStatModelCases",10) - return cases - #================================================================= - # getVerifyingTimeRanges - get list of time periods (start/end) - # that match date criteria and actually have observed data - # - def getVerifyingTimeRanges(self,obsParm,obsModel,dataType, - rateFlag,accumHours,dateStyle,dateType, - numDays,fromDay,dayList): - self.logMsg("start getVerifyingTimeRanges",10) - perList=[] - if not self.checkFile(obsParm,obsModel): - self.logMsg("could not open %s file for %s"%(obsParm,obsModel),10) - return perList - dateStyleLow=dateStyle.lower() - dateTypeLow=dateType.lower() - # - # If a probability parm, or an accumulative parm, then we - # cannot use the observed records for the time range - but - # must create timePeriod blocks for the specified times. - # - if ((dataType==1)or(rateFlag==1)): - periods=self.createObsPeriods(accumHours,dateStyle,dateType, - fromDay,numDays,dayList) - for per in periods: - (start,end)=per - verList=self.getVerGridInfo(obsModel,start,obsParm,start,end) - if self.isCoverred(start,end,verList): - perList.append((start,end)) - # - # Other parms get verifying periods based on the observed grids - # that actually exist - # - else: - if dateStyleLow=="verifying on": - if dateTypeLow=="period length": - obrecs=self.getObsPeriod(obsModel,obsParm,fromDay,numDays) - else: - obrecs=self.getObsList(obsModel,obsParm,dayList) - else: - if dateTypeLow=="period length": - (yea,mon,day)=fromDay.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((numDays-1)*DAYSECS) - endtime=starttime+(numDays*DAYSECS)+(self.MAXFORECASTHOUR*HOURSECS) - obrecs=self.listRecords(obsModel,obsParm,starttime,endtime,"verify") - else: - obrecs=[] - for date in dayList: - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS)+eoff - starttime=starttime+soff - tmprecs=self.listRecords(obsModel,obsParm,starttime,endtime,"verify") - for obrec in tmprecs: - if obrec not in obrecs: - obrecs.append(obrec) - obrecs.sort(lambda x,y: cmp(self.oncStime[x],self.oncStime[y])) - # - # Add time ranges to list - # - for obrec in obrecs: - start=self.oncStime[obrec] - end=self.oncEtime[obrec] - perList.append((start,end)) - - if self.DEBUG>=10: - for per in perList: - (start,end)=per - (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(start) - (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(end) - self.logMsg(" %4.4d/%2.2d/%2.2d %2.2d --> %4.4d/%2.2d/%2.2d %2.2d"%(gyea,gmon,gday,ghou, - eyea,emon,eday,ehou),10) - self.logMsg("end getVerifyingTimeRanges",10) - return perList - #================================================================= - # createObsPeriods - make list of time periods that are accumHours - # width, and start with accumFreq frequency, - # and cover the time periods requested. Each - # period in the list is (start,end) times. - # time periods of accumHours width... - # coverring the time periods specified by - # dateStyle, dateType, and fromDay/numDays - # or dayList. - # - def createObsPeriods(self,accumHours,accumFreq,dateStyle,dateType, - fromDay,numDays,dayList): - self.logMsg("start createObsPeriods",10) - accumTime=accumHours*HOURSECS - accumFreqSecs=accumFreq*HOURSECS - periods=[] - dateStyleLow=dateStyle.lower() - dateTypeLow=dateType.lower() - if dateStyleLow=="verifying on": - if dateTypeLow=="period length": - endtime=fromDay+DAYSECS - starttime=fromDay-((numDays-1)*DAYSECS) - for pstart in range(starttime,endtime,accumFreqSecs): - pend=pstart+accumTime - if pend<=endtime: - periods.append((pstart,pend)) - else: - dayList.sort() - timeRanges=[] - lastend=0 - for date in dayList: - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+DAYSECS - if starttime!=lastend: - timeRanges.append((starttime,endtime)) - lastend=endtime - else: - (sl,el)=timeRanges[-1] - timeRanges[-1]=(sl,endtime) - lastend=endtime - for timerange in timeRanges: - (starttime,endtime)=timerange - for pstart in range(starttime,endtime,accumFreqSecs): - pend=pstart+accumTime - if pend<=endtime: - periods.append((pstart,pend)) - else: - if dateTypeLow=="period length": - starttime=fromDay-((numDays-1)*DAYSECS) - endtime=starttime+(numDays*DAYSECS)+(self.MAXFORECASTHOUR*HOURSECS) - for pstart in range(starttime,endtime,accumFreqSecs): - pend=pstart+accumTime - if pend<=endtime: - periods.append((pstart,pend)) - else: - dayList.sort() - starts=[] - for date in dayList: - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS) - for pstart in range(starttime,endtime,accumFreqSecs): - if pstart not in starts: - pend=pstart+accumTime - if pend<=endtime: - periods.append((pstart,pend)) - starts.append(pstart) - self.logMsg("end createObsPeriods",10) - return periods - #================================================================= - # - def createFromPeriods(self,dateType,fromDay,numDays,dayList): - periods=[] - dateTypeLow=dateType.lower() - if dateTypeLow=="period length": - starttime=fromDay-((numDays-1)*DAYSECS) - endtime=fromDay+DAYSECS - periods.append((starttime,endtime)) - else: - for date in dayList: - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+DAYSECS - periods.append((starttime,endtime)) - return periods - #================================================================= - # getFcstrCycleFhr - get logical array of records that have the - # rightFcstr,rightCycle,rightFhr - # - def getFcstrCycleFhr(self,model,fcstrs,cycles,fhrStart,fhrEnd): - self.logMsg("start getFcstCycleFhr",10) - # - # Get logical array of records with right forecaster - # - fcstrList=[] - if ((type(fcstrs) is types.TupleType)or(type(fcstrs) is types.ListType)): - for fcstr in fcstrs: - fcstrList.append(fcstr) - else: - fcstrList.append(fcstrs) - - if ((model!="Official")or(-1 in fcstrList)): - rightFcstr=ones(self.fncFcstr.shape[0]) - else: - rightFcstr=zeros(self.fncFcstr.shape[0]) - for fnum in fcstrList: - rightFcstr=logical_or(sometrue(equal(self.fncFcstr[:],fnum),-1),rightFcstr) - # - # Get logical array of records with right cycle - # - cycleList=[] - if ((type(cycles) is types.TupleType)or(type(cycles) is types.ListType)): - for cycle in cycles: - if type(cycle) is types.StringType: - cycleList.append(int(cycle)) - else: - cycleList.append(cycle) - else: - if type(cycles) is types.StringType: - cycleList.append(int(cycles)) - else: - cycleList.append(cycles) - if (-1 in cycleList): - rightCycle=ones(self.fncBtime.shape, dtype=bool) - else: - rightCycle=zeros(self.fncBtime.shape, dtype=bool) - rem = remainder(self.fncBtime[:], DAYSECS).astype('i') - for cycle in cycleList: - cyc=cycle*HOURSECS - rightCycle[equal(rem,cyc)] = True - # - # get logical array of records with right forecast hours - # - if fhrEnd<0: - fhrEnd=self.MAXFORECASTHOUR - fhr=(self.fncStime[:]-self.fncBtime[:])/float(HOURSECS) - rightFhr=logical_and(greater_equal(fhr,fhrStart),less_equal(fhr,fhrEnd)) - # - # return combined logical array - # - rightRecord=logical_and(logical_and(rightFcstr,rightCycle),rightFhr) - self.logMsg("end getFcstCycleFhr",10) - return rightRecord - #================================================================= - # getStatFcstrCycleFhr - get logical array of statistic records - # that have the rightFcstr,rightCycle, - # rightFhr - # - def getStatFcstrCycleFhr(self,model,fcstrs,cycles,fhrStart,fhrEnd): - self.logMsg("start getStatFcstCycleFhr",10) - # - # Get logical array of records with right forecaster - # - fcstrList=[] - ftype=type(fcstrs) - if ((ftype is types.TupleType)or(ftype is types.ListType)): - for fcstr in fcstrs: - fcstrList.append(fcstr) - else: - fcstrList.append(fcstrs) - if ((model!="Official")or(-1 in fcstrList)): - rightFcstr=ones(self.sncFcstr.shape[0], dtype=bool) - else: - rightFcstr=zeros(self.sncFcstr.shape[0], dtype=bool) - for fnum in fcstrList: - rightFcstr[sometrue(equal(self.sncFcstr[:],fnum),-1)] = True - # - # Get logical array of records with right cycle - # - cycleList=[] - ctype=type(cycles) - if ((ctype is types.TupleType)or(ctype is types.ListType)): - for cycle in cycles: - if type(cycle) is types.StringType: - cycleList.append(int(cycle)) - else: - cycleList.append(cycle) - else: - if type(cycles) is types.StringType: - cycleList.append(int(cycles)) - else: - cycleList.append(cycles) - if (-1 in cycleList): - rightCycle=ones(self.sncBtime.shape, dtype=bool) - else: - rightCycle=zeros(self.sncBtime.shape, dtype=bool) - rem = remainder(self.sncBtime[:], DAYSECS).astype('i') - for cycle in cycleList: - cyc=cycle*HOURSECS - rightCycle[equal(rem,cyc)] = True - # - # get logical array of records with right forecast hours - # - fhr=(self.sncStime[:]-self.sncBtime[:])/float(HOURSECS) - rightFhr=logical_and(greater_equal(fhr,fhrStart),less_equal(fhr,fhrEnd)) - # - # return combined logical array - # - rightRecord=logical_and(logical_and(rightFcstr,rightCycle),rightFhr) - self.logMsg("end getFcstCycleFhr",10) - return rightRecord - #================================================================== - # getRateFlag - given a model name and parm name, return a flag - # with 1 if this is a rateParm. If parms can't be - # found - return 0, just like if a parm isn't a - # rateParm - # - def getRateFlag(self,model,parm): - parmData=self.getParm(model,parm,"SFC") - if parmData is not None: - rateFlag=parmData.getGridInfo().isRateParm() - return rateFlag - else: - return 0 - #================================================================= - # getStatID - given a stat name like "Areal POD", return a - # consistent, lowercase, unique statID used elsewhere - # in the system. Return None if not valid - # - def getStatID(self,statName): - # - # make statName lowercase - # - statname=statName.lower() - # - # Check that the name is somewhere in all the stat names - # - if statname not in self.allStats: - return None - # - # Find the ID that contains this statname - # - statID="" - for testID in self.statIDs: - if statname in self.statNames[testID]: - statID=testID - break - if statID=="": - return None - return statID - #================================================================== - # getVerStat - Main routine to get a statistic from BOIVerify - # for a particular model-run for a particular time - # for a particular area. Tries to get it from the - # stat database if it can - otherwise tries to - # calculate it from the grids - # - # for vectors, if vectorType is: - # -1 Calculate the stat on the magnitude of the - # vector error. - # 0 Calculate the stat on the magnitude error - # 1 Calculate the stat on the direction error - # - def getVerStat(self,model,basetime,parm,trStart,trEnd,obsmodel, - statName,statVal=0,statCond="",editArea=None, - smooth=0,vectorType=-1,forceCalc=0,srecList=None, - grecList=None): - self.logMsg("start getVerStat",10) - retVal=None - # - # Check for stats we know how to calculate - # - statID=self.getStatID(statName) - if statID is None: - self.logMsg("unknown statName:%s"%statName,2) - return retVal - # - # Stuff about the parm - # - rateFlag=self.getRateFlag(model,parm) - verType=self.getVerType(parm) - obsParm=self.getObsParm(parm) - dataType=self.getVerParmType(parm) - # - # if editArea is None - calculate for the whole grid - # - if editArea is None: - editArea = self.encodeEditArea(self.__refSetMgr.fullRefSet()) - # - # if editArea is an array - then it must be a mask of - # points to calculate over - # - if type(editArea) is ndarray: - eaGrid=editArea - # - # If editArea is a string...see if BOIVerify pre-calculates - # statistics for that editArea name - # - elif type(editArea) is StringType: - eas=self.listEditAreas() - if editArea in eas: - eaNum=self.getEditAreaNumberFromName(editArea) - if (forceCalc==0)and(smooth==0): - if dataType==1: - if vectorType==0: - statParm=parm+"Spd" - elif vectorType==1: - statParm=parm+"Dir" - else: - statParm=parm - else: - statParm=parm - retVal=self.readVerStat(model,basetime,statParm,trStart, - trEnd,obsmodel,eaNum, - statID,statVal,vectorType=vectorType, - srecList=srecList) - if retVal is not None: - self.logMsg("got the stat from saved stats",5) - return retVal - else: - self.logMsg("tried to get it from saved stats - but failed",5) - else: - self.logMsg("not a saved edit area",2) - # - # See if the named editArea even exists in the GFE system - # - allEditAreaNames=self.editAreaList() - if editArea not in allEditAreaNames: - self.logMsg("editArea %s does not exist"%editArea,2) - return retVal - eaGrid=self.encodeEditArea(editArea) - else: - self.logMsg("invalid type of editArea provided to getVerStat") - return retVal - # - # OK...We have to calculate the stat over the eaGrid mask - # Make sure that it has at least 1 point. - # - numpts=add.reduce(add.reduce(eaGrid)) - if numpts<1: - self.logMsg("No points specified - so no stats",2) - return retVal - fnum=float(numpts) - # - # If some records were provided...split them out - # - if grecList is not None: - (frecList,orecList)=grecList - else: - frecList=None - orecList=None - # - # Get obs grid - # - if rateFlag==1: - gridMode="Sum" - else: - gridMode="TimeWtAverage" - obsGrid=self.getVerGrids(obsmodel,trStart,obsParm,trStart, - trEnd,mode=gridMode,recList=orecList) - if obsGrid is None: - self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) - return retVal - if (dataType==1): - (mag,direc)=obsGrid - if mag is None: - self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) - return retVal - # - # get Fcst grid - # - if verType==1: - gridMode="Max" - else: - if rateFlag==1: - gridMode="Sum" - else: - gridMode="TimeWtAverage" - fcstGrid=self.getVerGrids(model,basetime,parm,trStart,trEnd, - mode=gridMode,recList=frecList) - if fcstGrid is None: - self.logMsg("could not read %s grid for %s"%(model,parm),2) - return retVal - if (dataType==1): - (mag,direc)=fcstGrid - if mag is None: - self.logMsg("could not read observed %s grid for %s"%(model,parm),2) - return retVal - # - # Basic point stats - # - if statID in ["bias","mae","rms","mse","peb"]: - # - # Handle various types of vector errors - # - vectorErr=0 - if dataType==1: - (omag,odirec)=obsGrid - (fmag,fdirec)=fcstGrid - if vectorType==0: - obsGrid=omag - fcstGrid=fmag - elif vectorType==1: - obsGrid=odirec - fcstGrid=fdirec - err=fcstGrid-obsGrid - err=where(greater(err,180.0),360.0-err,err) - err=where(less(err,-180.0),-(360.0+err),err) - vectorErr=1 - else: - (ou,ov)=self.MagDirToUV(omag,odirec) - (fu,fv)=self.MagDirToUV(fmag,fdirec) - (mag,direc)=self.UVToMagDir(fu-ou,fv-ov) - err=mag - vectorErr=1 - # - # If smoothing is on...smooth obs and fcst grids - # inside the edit area... - # but for direction errors, and magnitude of - # vector errors (vectorErr==1)...smooth the - # errors, not the obs/fcst grids. - # - if smooth>0: - ismooth=int(smooth) - if vectorErr==0: - obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) - fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) - else: - err=self.smoothpm(err,ismooth,mask=eaGrid) - # - # For probability parms - need to calculate the obs grid - # based on the observed parameter - # - if verType==1: - cond=self.getObsCondition(parm) - thresh=self.getObsThreshold(parm) - if cond==">": - obsGrid=greater(obsGrid,thresh)*100 - elif cond==">=": - obsGrid=greater_equal(obsGrid,thresh)*100 - elif cond=="<": - obsGrid=less(obsGrid,thresh)*100 - elif cond=="<=": - obsGrid=less_equal(obsGrid,thres)*100 - # - # get the error - but vector err magnitude has already - # been done...so don't do that... - # - if vectorErr==0: - err=where(eaGrid,fcstGrid-obsGrid,float32(0)) - else: - err=where(eaGrid,err,float32(0)) - # - # Now all the stat calculations - # - if statID=="bias": - retVal=add.reduce(add.reduce(err))/fnum - return retVal - if statID=="mae": - err=where(less(err,0.0),-err,err) - retVal=add.reduce(add.reduce(err))/fnum - return retVal - if statID=="rms": - err=err*err - retVal=sqrt(add.reduce(add.reduce(err))/fnum) - return retVal - if statID=="mse": - err=err*err - retVal=add.reduce(add.reduce(err))/fnum - return retVal - if statID=="peb": - err=where(less(err,0.0),-err,err) - good=logical_and(less(err,statVal),eaGrid) - retVal=add.reduce(add.reduce(good))/fnum - return retVal - elif statID in ["fc","afc","freqo","freqf","freqbias","afreqbias","pod","apod","far","afar", - "pofd","apofd","ts","ats","ets","aets","hk","ahk", - "hss","ahss","oddsratio","aoddsratio","hits","ahits", - "miss","amiss","fals","afals","corn","acorn","cont","acont"]: - # - # threshold for vectors is with magnitude - # - if dataType==1: - (omag,odirec)=obsGrid - (fmag,fdirec)=fcstGrid - obsGrid=omag - fcstGrid=fmag - # - # If smoothing is on...smooth obs and fcst grids - # inside the edit area - # - if statName[0:1]!="a": - if smooth>0: - ismooth=int(smooth) - obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) - fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) - # - # Get grids of yes/no forecast/occurrence - # - if statCond==">": - obsOccur=greater(obsGrid,statVal) - fcstOccur=greater(fcstGrid,statVal) - elif statCond==">=": - obsOccur=greater_equal(obsGrid,statVal) - fcstOccur=greater_equal(fcstGrid,statVal) - elif statCond=="<": - obsOccur=less(obsGrid,statVal) - fcstOccur=less(fcstGrid,statVal) - elif statCond=="<=": - obsOccur=less_equal(obsGrid,statVal) - fcstOccur=less_equal(fcstGrid,statVal) - # - # do neighborhood look here - # - if statName[0:1]=="a": - if smooth>0: - ismooth=int(smooth) - obsOccur=self.arealOccur(obsOccur,ismooth,mask=eaGrid) - fcstOccur=self.arealOccur(fcstOccur,ismooth,mask=eaGrid) - # - # Calculate hits/misses/falsealarms/correctnegatives - # - notFcst=logical_not(fcstOccur) - notObs=logical_not(obsOccur) - hits=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,obsOccur))) - miss=count_nonzero(logical_and(eaGrid,logical_and(notFcst,obsOccur))) - falr=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,notObs))) - corn=count_nonzero(logical_and(eaGrid,logical_and(notFcst,notObs))) - total=hits+miss+falr+corn - if abs(float(total)-fnum)>0.5: - self.logMsg("Number in binary histogram not the same as number of points") - return 0.0 - # - # Get the Binary stat and return it - # - ret=self.getBinaryStat(statID,hits,miss,falr,corn) - return ret - else: - self.logMsg("Have not yet implemented stat:%s"%statName,0) - return retVal - #================================================================== - # getBinaryStat - given values of hits/miss/falr/corn, and a - # correct statID (it better be right!) - to the - # calculations and return a value. In cases where - # no forecasts have been made - return the perfect - # score! - # - def getBinaryStat(self,statID,hits,miss,falr,corn): - total=hits+miss+falr+corn - if statID in ["hits","ahits"]: - return hits - if statID in ["miss","amiss"]: - return miss - if statID in ["fals","afals"]: - return falr - if statID in ["corn","acorn"]: - return corn - if statID in ["cont","acont"]: - return (hits,miss,falr,corn) - if statID in ["fc","afc"]: - if total<1: - return 1.0 - return float(hits+corn)/float(total) - if statID in ["freqo",]: - if total<1: - return 1.0 - return float(hits+miss)/float(total) - if statID in ["freqf",]: - if total<1: - return 1.0 - return float(hits+falr)/float(total) - if statID in ["freqbias","afreqbias"]: - denom=hits+miss - if denom<1: - return 1.0 - return float(hits+falr)/float(denom) - if statID in ["pod","apod"]: - denom=hits+miss - if denom<1: - return 1.0 - return float(hits)/float(denom) - if statID in ["far","afar"]: - denom=falr+hits - if denom<1: - return 0.0 - return float(falr)/float(denom) - if statID in ["pofd","apofd"]: - denom=falr+corn - if denom<1: - return 0.0 - return float(falr)/float(denom) - if statID in ["ts","ats"]: - denom=hits+miss+falr - if denom<1: - return 1.0 - return float(hits)/float(denom) - if statID in ["ets","aets"]: - hitsrand=float((hits+miss)*(hits+falr))/float(total) - denom=hits+miss+falr-hitsrand - if ((denom>-0.1)and(denom<0.1)): - return 1.0 - return float(hits-hitsrand)/float(denom) - if statID in ["hk","ahk"]: - denom=falr+corn - if denom<1: - pofd=0.0 - else: - pofd=float(falr)/float(denom) - denom=hits+miss - if denom<1: - pod=1.0 - else: - pod=float(hits)/float(denom) - return pod-pofd - if statID in ["hss","ahss"]: - ecrand=float(((hits+miss)*(hits+falr))+((corn+miss)*(corn+falr)))/float(total) - denom=float(total)-ecrand - if ((denom>-0.1)and(denom<0.1)): - return 1.0 - return float(hits+corn-ecrand)/float(denom) - if statID in ["oddsratio","aoddsratio"]: - if ((hits==0)or(corn==0)or(falr==0)or(miss==0)): - return 200.0 - return float(hits*corn)/float(falr*miss) - return None - #================================================================== - # getGridBinaryStat - given grids of hits/miss/falr/corn, and a - # correct statID (it better be right!) - do - # the calculations and return a grid of results. - # In cases where no forecasts have been made - - # return the perfect score! - # - def getGridBinaryStat(self,statID,hits,miss,falr,corn): - total=hits+miss+falr+corn - if statID in ["hits","ahits"]: - return hits/1.0 - if statID in ["miss","amiss"]: - return miss/1.0 - if statID in ["fals","afals"]: - return falr/1.0 - if statID in ["corn","acorn"]: - return corn/1.0 - if statID in ["fc","afc"]: - nofcst=less(total,1) - total[nofcst] = 1 - score=(hits+corn)/total - score[nofcst] = 1.0 - return score - if statID in ["freqo",]: - nofcst=less(total,1) - total[nofcst] = 1 - score=(hits+miss)/total - score[nofcst] = 0.0 - return score - if statID in ["freqf",]: - nofcst=less(total,1) - total[nofcst] = 1 - score=(hits+falr)/total - score[nofcst] = 0.0 - return score - if statID in ["freqbias","afreqbias"]: - denom=hits+miss - nofcst=less(denom,1) - denom[nofcst] = 1 - score=(hits+falr)/denom - score[nofcst] = 1.0 - return score - if statID in ["pod","apod"]: - denom=hits+miss - nofcst=less(denom,1) - denom[nofcst] = 1 - score=hits/denom - score[nofcst] = 1.0 - return score - if statID in ["far","afar"]: - denom=falr+hits - nofcst=less(denom,1) - denom[nofcst] = 1 - score=falr/denom - score[nofcst] = 0.0 - return score - if statID in ["pofd","apofd"]: - denom=falr+corn - nofcst=less(denom,1) - denom[nofcst] = 1 - score=falr/denom - score[nofcst] = 0.0 - return score - if statID in ["ts","ats"]: - denom=hits+miss+falr - nofcst=less(denom,1) - denom[nofcst] = 1 - score=hits/denom - score[nofcst] = 1.0 - return score - if statID in ["ets","aets"]: - total[less(total,1)] = 1 - hitsrand=((hits+miss)*(hits+falr))/total - denom=hits+miss+falr-hitsrand - nofcst=logical_and(greater(denom,-0.1),less(denom,0.1)) - denom[nofcst] = 1 - score=(hits-hitsrand)/denom - score[nofcst] = 1.0 - return score - if statID in ["hk","ahk"]: - #pofd - denom=falr+corn - nofcst=less(denom,1) - denom[nofcst] = 1 - pofd=falr/denom - pofd[nofcst] = 0.0 - #pod - denom=hits+miss - nofcst=less(denom,1) - denom[nofcst] = 1 - pod=hits/denom - pod[nofcst] = 1.0 - score=pod-pofd - return score - if statID in ["hss","ahss"]: - total[less(total,1)] = 1 - ecrand=(((hits+miss)*(hits+falr))+((corn+miss)*(corn+falr)))/total - denom=total-ecrand - nofcst=logical_and(greater(denom,-0.1),less(denom,0.1)) - denom[nofcst] = 1 - score=(hits+corn-ecrand)/denom - score[nofcst] = 1.0 - return score - if statID in ["oddsratio","aoddsratio"]: - no1=logical_or(less(hits,0.5),less(corn,0.5)) - no2=logical_or(less(falr,0.5),less(miss,0.5)) - nofcst=logical_or(no1,no2) - denom=falr*miss - denom[less(denom,1)] = 1 - score=(hits*corn)/denom - score[nofcst] = 200.0 - return score - #================================================================== - # getVerStatScales - Main routine to get a calculate a statistic - # for many scales. Same as getVerStat except - # that smooth is provided as a list of smooth - # numbers. This is always calculated - never - # read from stat files. - # - def getVerStatScales(self,model,basetime,parm,trStart,trEnd,obsmodel, - statName,statVal=0,statCond="",editArea=None, - smoothList=[0,],vectorType=-1, - grecList=None): - self.logMsg("start getVerStatScales",10) - retVal=[] - # - # Check for stats we know how to calculate - # - statID=self.getStatID(statName) - if statID is None: - self.logMsg("unknown statName:%s"%statName,2) - return retVal - # - # Stuff about the parm - # - rateFlag=self.getRateFlag(model,parm) - verType=self.getVerType(parm) - obsParm=self.getObsParm(parm) - dataType=self.getVerParmType(parm) - # - # check if editArea is one that we routinely calculate - # - if editArea is None: - editArea = self.encodeEditArea(self.__refSetMgr.fullRefSet()) - - if type(editArea) is ndarray: - eaGrid=editArea - elif type(editArea) is StringType: - # - # get list of editAreas from smartScript - # - ealist=self.editAreaList() - if editArea not in ealist: - self.logMsg("editArea %s does not exist"%editArea,2) - return retVal - eaGrid=self.encodeEditArea(editArea) - else: - self.logMsg("invalid type of editArea provided to getVerStatScales") - return retVal - # - # OK...We have to calculate the stat from the grids - # - numpts=add.reduce(add.reduce(eaGrid)) - if numpts<1: - self.logMsg("No points specified - so no stats",2) - return retVal - fnum=float(numpts) - # - # If some records were provided...split them out - # - if grecList is not None: - (frecList,orecList)=grecList - else: - frecList=None - orecList=None - # - # Get obs grid - # - if rateFlag==1: - gridMode="Sum" - else: - gridMode="TimeWtAverage" - obsGrid=self.getVerGrids(obsmodel,trStart,obsParm,trStart, - trEnd,mode=gridMode,recList=orecList) - if obsGrid is None: - self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) - return retVal - if (dataType==1): - (mag,direc)=obsGrid - if mag is None: - self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) - return retVal - # - # get Fcst grid - # - if verType==1: - gridMode="Max" - else: - if rateFlag==1: - gridMode="Sum" - else: - gridMode="TimeWtAverage" - fcstGrid=self.getVerGrids(model,basetime,parm,trStart,trEnd, - mode=gridMode,recList=frecList) - if fcstGrid is None: - self.logMsg("could not read %s grid for %s"%(model,parm),2) - return retVal - if (dataType==1): - (mag,direc)=fcstGrid - if mag is None: - self.logMsg("could not read observed %s grid for %s"%(model,parm),2) - return retVal - # - # Loop over scales - # - for smooth in smoothList: - self.logMsg("smooth=%d"%smooth) - # - # Basic point stats - # - if statID in ["bias","mae","rms","mse","peb"]: - # - # Handle various types of vector errors - # - vectorErr=0 - if dataType==1: - (omag,odirec)=obsGrid - (fmag,fdirec)=fcstGrid - if vectorType==0: - obsGrid=omag - fcstGrid=fmag - elif vectorType==1: - obsGrid=odirec - fcstGrid=fdirec - err=fcstGrid-obsGrid - err=where(greater(err,180.0),360.0-err,err) - err=where(less(err,-180.0),-(360.0+err),err) - vectorErr=1 - else: - (ou,ov)=self.MagDirToUV(omag,odirec) - (fu,fv)=self.MagDirToUV(fmag,fdirec) - (mag,direc)=self.UVToMagDir(fu-ou,fv-ov) - err=mag - vectorErr=1 - # - # If smoothing is on...smooth obs and fcst grids - # inside the edit area... - # but for direction errors, and magnitude of - # vector errors (vectorErr==1)...smooth the - # errors, not the obs/fcst grids. - # - if smooth>0: - ismooth=int(smooth) - if vectorErr==0: - obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) - fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) - else: - err=self.smoothpm(err,ismooth,mask=eaGrid) - # - # For probability parms - need to calculate the obs grid - # based on the observed parameter - # - if verType==1: - cond=self.getObsCondition(parm) - thresh=self.getObsThreshold(parm) - if cond==">": - obsGrid=greater(obsGrid,thresh)*100 - elif cond==">=": - obsGrid=greater_equal(obsGrid,thresh)*100 - elif cond=="<": - obsGrid=less(obsGrid,thresh)*100 - elif cond=="<=": - obsGrid=less_equal(obsGrid,thres)*100 - # - # get the error - but vector err magnitude has already - # been done...so don't do that... - # - if vectorErr==0: - err=where(eaGrid,fcstGrid-obsGrid,float32(0)) - else: - err=where(eaGrid,err,float32(0)) - # - # Now all the stat calculations - # - if statID=="bias": - retVal.append(add.reduce(add.reduce(err))/fnum) - elif statID=="mae": - err=where(less(err,0.0),-err,err) - retVal.append(add.reduce(add.reduce(err))/fnum) - elif statID=="rms": - err=err*err - retVal.append(sqrt(add.reduce(add.reduce(err))/fnum)) - if statID=="mse": - err=err*err - retVal.append(add.reduce(add.reduce(err))/fnum) - if statID=="peb": - err=where(less(err,0.0),-err,err) - good=logical_and(less(err,statVal),eaGrid) - retVal.append(add.reduce(add.reduce(good))/fnum) - elif statID in ["fc","afc","freqo","freqf","freqbias","afreqbias","pod","apod","far","afar", - "pofd","apofd","ts","ats","ets","aets","hk","ahk", - "hss","ahss","oddsratio","aoddsratio","hits","ahits", - "miss","amiss","fals","afals","corn","acorn","cont","acont"]: - # - # threshold for vectors is with magnitude - # - if dataType==1: - (omag,odirec)=obsGrid - (fmag,fdirec)=fcstGrid - obsGrid=omag - fcstGrid=fmag - # - # If smoothing is on...smooth obs and fcst grids - # inside the edit area - # - if statName[0:1]!="a": - if smooth>0: - ismooth=int(smooth) - obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) - fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) - # - # Get grids of yes/no forecast/occurrence - # - if statCond==">": - obsOccur=greater(obsGrid,statVal) - fcstOccur=greater(fcstGrid,statVal) - elif statCond==">=": - obsOccur=greater_equal(obsGrid,statVal) - fcstOccur=greater_equal(fcstGrid,statVal) - elif statCond=="<": - obsOccur=less(obsGrid,statVal) - fcstOccur=less(fcstGrid,statVal) - elif statCond=="<=": - obsOccur=less_equal(obsGrid,statVal) - fcstOccur=less_equal(fcstGrid,statVal) - # - # do neighborhood look here - # - if statName[0:1]=="a": - if smooth>0: - ismooth=int(smooth) - obsOccur=self.arealOccur(obsOccur,ismooth,mask=eaGrid) - fcstOccur=self.arealOccur(fcstOccur,ismooth,mask=eaGrid) - # - # Calculate hits/misses/falsealarms/correctnegatives - # - notFcst=logical_not(fcstOccur) - notObs=logical_not(obsOccur) - hits=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,obsOccur))) - miss=count_nonzero(logical_and(eaGrid,logical_and(notFcst,obsOccur))) - falr=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,notObs))) - corn=count_nonzero(logical_and(eaGrid,logical_and(notFcst,notObs))) - total=hits+miss+falr+corn - if abs(float(total)-fnum)>0.5: - self.logMsg("Number in binary histogram not the same as number of points") - return 0.0 - # - # Get the Binary stat and return it - # - ret=self.getBinaryStat(statID,hits,miss,falr,corn) - retVal.append(ret) - else: - self.logMsg("Have not yet implemented stat:%s"%statName,0) - return retVal - #================================================================ - # readVerStat - - # - def readVerStat(self,model,basetime,element,trStart,trEnd, - obsmodel,eaNum,statName,statVal,vectorType=-1, - srecList=None): - self.logMsg("start readVerStat",10) - retVal=None - # - # Check for stats calculated by AutoCalc - # - if statName not in ["error","err","bias","absolute error", - "abs error","mae","root mean squared error", - "rms error","rms","mean squared error","mse", - "brier","peb","percent error below","percent err below", - "% error below","% err below"]: - return retVal - # - # Make sure file can be openned - # - dataType=self.getVerParmType(element) - parm=element - if dataType==1: - if vectorType==0: - parm=element+"Spd" - elif vectorType==0: - parm=element+"Dir" - if not self.checkStats(parm,model,obsmodel): - self.logMsg("Could not open stat file for %s using %s observations"%(model,obsmodel),2) - return retVal - # - # make sure any threshold stats use one of the thresholds - # we routinely calculate - # - if statName in ["peb","percent error below","percent err below", - "% error below","% err below"]: - threshNum=-1 - thresholds=self.getVerThresholds(element) - if dataType==1: - (threshMag,threshDir)=thresholds - if ((vectorType==-1)or(vectorType==0)): - thresholds=threshMag - else: - thresholds=threshDir - for i in range(len(thresholds)): - check=thresholds[i] - if statVal==check: - threshNum=i - break - if threshNum==-1: - self.logMsg("Not one of the normal thresholds",2) - return retVal - # - # Find the records - the most time costly part - # - if srecList is None: - self.logMsg("finding appropriate records",10) - recbase=equal(self.sncBtime[:],basetime) - recfit=logical_and(greater(self.sncEtime[:],trStart), - less(self.sncStime[:],trEnd)) - recmatch=logical_and(recbase,recfit) - recnumbers=compress(recmatch,self.sncRecs) - recnumberList=list(recnumbers) - self.logMsg("done finding appropriate records",10) - else: - self.logMsg("used provided records",10) - recnumberList=srecList - if len(recnumberList)<1: - return retVal - if len(recnumberList)>1: - recnumberList.sort(lambda x,y: cmp(self.sncStime[x],self.sncStime[y])) - # - # Read the needed stats - # - retVal=0.0 - count=0 - for rec in recnumberList: - stats=self.sncStats[rec,eaNum,:] - if statName in ["error","err","bias"]: - retVal=retVal+stats[0] - elif statName in ["absolute error","abs error","mae"]: - retVal=retVal+stats[2] - elif statName in ["root mean squared error","rms error","rms", - "mean squared error","mse","brier"]: - retVal=retVal+stats[1] - elif statName in ["peb","percent error below","percent err below", - "% error below","% err below"]: - retVal=retVal+stats[8+threshNum] - # - # If we had to read multiple records...then average over those - # - if len(recnumberList)>1: - retVal=retVal/float(len(recnumberList)) - if statName in ["root mean squared error","rms error","rms"]: - retVal=sqrt(retVal) - # - # ????? still need work here. Need to multiply by number of cases - # Need to figure out if the percent stats are right...etc. - # - self.logMsg("end readVerStat",10) - return retVal - #================================================================== - # getVerGridInfo - Similar to getGridInfo of SmartScript...but read - # from the verification archive. Difference is - # that here you must specify the model and the - # BASETIME of the model, rather than just asking - # for the latest version etc. It returns a list - # of tuples with info on each grid that intersects - # the time period. The tuple contains - # (recnum,starttime,endtime) and is sorted by - # the starttime. - # - def getVerGridInfo(self,model,basetime,element,stime,etime): - self.logMsg("getVerGridInfo Start",10) - # - # get parm type (scalar/vector) and set default return values - # - dataType=self.getVerParmType(element) - retVal=[] - # - # Make sure file for parm/model exists - # - if not self.checkFile(element,model): - self.logMsg("Could not open file",5) - return retVal - # - # Get list of records that intersect the timeRange - # - self.logMsg("finding grids that intersect",10) - if model not in self.OBSMODELS: - recbase=equal(self.fncBtime[:],basetime) - recfit=logical_and(greater(self.fncEtime[:],stime), - less(self.fncStime[:],etime)) - recmatch=logical_and(recbase,recfit) - recnumbers=compress(recmatch,self.fncRecs) - recnumberList=list(recnumbers) - recnumberList.sort(lambda x,y: cmp(self.fncStime[x],self.fncStime[y])) - for recnum in recnumberList: - retVal.append((recnum,self.fncStime[recnum],self.fncEtime[recnum])) - else: - recmatch=logical_and(greater(self.oncEtime[:],stime), - less(self.oncStime[:],etime)) - recnumbers=compress(recmatch,self.oncRecs) - recnumberList=list(recnumbers) - recnumberList.sort(lambda x,y: cmp(self.oncStime[x],self.oncStime[y])) - for recnum in recnumberList: - retVal.append((recnum,self.oncStime[recnum],self.oncEtime[recnum])) - # - # - #for ret in retVal: - # (rec,st,en)=ret - # self.logMsg(" (%d,%d,%d)"%(rec,st,en),10) - self.logMsg("getVerGridInfo - End",10) - return retVal - #================================================================== - # isCoverred(start,end,infoList) - given a start/end time and a list - # of getVerGridInfo about grids in this time period - return - # 1 or 0 if the start/end period is completely coverred. - # - def isCoverred(self,start,end,infoList): - self.logMsg("isCoverred",10) - if len(infoList)<1: - return 0 - total=end-start - totcov=0 - for info in infoList: - (rec,recstart,recend)=info - totcov=totcov+min(end-recstart,recend-start) - if totcov>=total: - return 1 - return 0 - #================================================================== - # getReadMode - figure out if parm is a rateParm...and set mode - # to "Sum" if it is. - # If not...and checkProb is set...figure out if the - # parm is a probability parm and set mode to - # "Max" if it is (floating PoP). - # Otherwise set to "Average" - # - def getReadMode(self,model,parmName,checkProb=1): - rateFlag=self.getRateFlag(model,parmName) - if (rateFlag==1): - readMode="Sum" - else: - readMode="TimeWtAverage" - if checkProb==1: - verType=self.getVerType(parmName) - if verType is not None: - if verType==1: - readMode="Max" - return readMode - #================================================================== - # getVerGrids - Similar to getGrids of SmartScript...but read from - # the verification archive. Difference is that here - # you must specify the model and the BASETIME of the - # model, rather than just asking for the latest - # version etc. There are other routines to help you - # figure out the basetime - # - # mode=TimeWtAverage - # Average - # Max - # Min - # Sum - # First - # List - # - # normally stime and etime define the time period - # for which you want grids. However, if recList - # is not None - then we assume that recList has - # a list of record numbers that span the desired - # time period - and we don't search for records - # that fit the stime/etime period. This saves - # considerable time - and the records are often - # known ahead of time from other routines. - # - # Note grids are flipped vertically to AWIPS II ordering. - # - def getVerGrids(self,model,basetime,element,stime,etime, - mode="TimeWtAverage",recList=None): - self.logMsg("getVerGrids Start",10) - # - # get parm type (scalar/vector) and set default return values - # - dataType=self.getVerParmType(element) - if mode=="List": - retVal=[] - else: - if dataType==0: - retVal=None - else: - retVal=(None,None) - # - # Make sure file for parm/model exists - # - if not self.checkFile(element,model): - self.logMsg("Could not open file",5) - return retVal - # - rateFlag=self.getRateFlag(model,element) - # - # Get list of records that intersect the timeRange - # - if recList is None: - self.logMsg("finding grids that intersect",10) - if model not in self.OBSMODELS: - recbase=equal(self.fncBtime[:],basetime) - recfit=logical_and(greater(self.fncEtime[:],stime), - less(self.fncStime[:],etime)) - recmatch=logical_and(recbase,recfit) - recnumbers=compress(recmatch,self.fncRecs) - recList=list(recnumbers) - recList.sort(lambda x,y: cmp(self.fncStime[int(x)],self.fncStime[int(y)])) - else: - recmatch=logical_and(greater(self.oncEtime[:],stime), - less(self.oncStime[:],etime)) - recnumbers=compress(recmatch,self.oncRecs) - recList=list(recnumbers) - recList.sort(lambda x,y: cmp(self.oncStime[int(x)],self.oncStime[int(y)])) - self.logMsg("number of intersecting grids:%d"%len(recList),10) - if len(recList)<1: - return retVal - # - # Loop over grids - # - totalWeights=0 - gridtot=self.empty() - utot=self.empty() - vtot=self.empty() - for rec in recList: - rec = int(rec) - self.logMsg("reading grid",5) - # - # get total hours in grid, and amount of grid that intersects - # time range - # - if model not in self.OBSMODELS: - gstime=self.fncStime[rec] - getime=self.fncEtime[rec] - else: - gstime=self.oncStime[rec] - getime=self.oncEtime[rec] - gridHours=float(getime-gstime)/float(HOURSECS) - intersectHours=float(min(etime-gstime,getime-stime,getime-gstime))/float(HOURSECS) - if dataType!=1: - if model not in self.OBSMODELS: - grid=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] - else: - grid=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] - - # flip to AWIPS II order - grid = flipud(grid) - - # - # If a rateParm - chop grid to only the piece being used - # - if rateFlag==1: - grid=grid*(float(intersectHours)/float(gridHours)) - # - # - # - if mode in ["TimeWtAverage","Average","Sum"]: - if len(recList)>1: - weight=1.0 - if mode=="TimeWtAverage": - weight=intersectHours - gridtot+=(grid*weight) - totalWeights+=weight - else: - retVal=grid - elif mode=="Max": - if retVal is None: - retVal=grid - else: - retVal=maximum(retVal,grid) - elif mode=="Min": - if retVal is None: - retVal=grid - else: - retVal=minimum(retVal,grid) - elif mode=="First": - if retVal is None: - retVal=grid - elif mode=="List": - retVal.append(grid) - else: - if model not in self.OBSMODELS: - mag= (self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] - direc=(self.fncValue1[rec].astype(float)*self.fncScale1[rec])+self.fncAddit1[rec] - else: - mag= (self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] - direc=(self.oncValue1[rec].astype(float)*self.oncScale1[rec])+self.oncAddit1[rec] - - # flip to AWIPS II order - mag = flipud(mag) - direc = flipud(direc) - - if mode in ["TimeWtAverage","Average","Sum"]: - if len(recList)>1: - (u,v)=self.MagDirToUV(mag,direc) - weight=1.0 - if mode=="TimeWtAverage": - weight=intersectHours - utot+=(u*weight) - vtot+=(v*weight) - totalWeights=totalWeights+weight - else: - retVal=(mag,direc) - elif mode=="Max": - if retVal[0] is None: - retVal=(mag,direc) - else: - newdir=where(greater(mag,retVal[0]),direc,retVal[1]) - newmag=maximum(retVal[0],mag) - retVal=(newmag,newdir) - elif mode=="Min": - if retVal[0] is None: - retVal=(mag,direc) - else: - newdir=where(less(mag,retVal[0]),direc,retVal[1]) - newmag=minimum(retVal[0],mag) - retVal=(newmag,newdir) - elif mode=="First": - if retVal[0] is None: - retVal=(mag,direc) - elif mode=="List": - retVal.append((mag,direc)) - # - # When we had averages/sums and were adding up... - # - if len(recList)>1: - if mode in ["TimeWtAverage","Average","Sum"]: - if dataType!=1: - retVal=gridtot - if mode in ["TimeWtAverage","Average"]: - retVal=retVal/totalWeights - else: - if mode in ["TimeWtAverage","Average"]: - utot=utot/totalWeights - vtot=vtot/totalWeights - (mag,direc)=self.UVToMagDir(utot,vtot) - retVal=(mag,direc) - # - # - # - self.logMsg("getVerGrids - End",10) - return retVal - #================================================================== - # getObsPeriod - get list of observed records within a period of - # Ndays (integer) ending on endDay - # - def getObsPeriod(self,model,parm,endDay,Ndays,mask=None): - obrecs=[] - eoff=self.getEndOffset(parm) - soff=self.getStartOffset(parm) - if type(endDay) is types.StringType: - try: - (yea,mon,day)=endDay.split("/") - endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((Ndays-1)*DAYSECS)+soff - except: - return obrecs - else: - endtime=endDay+DAYSECS-1+eoff - starttime=endDay-((Ndays-1)*DAYSECS)+soff - obrecs=self.listRecords(parm,model,starttime,endtime,"verify",mask) - return obrecs - #================================================================== - # getObsStatPeriod - get list of observed records within a period of - # Ndays (integer) ending on endDay - # - def getObsStatPeriod(self,model,parm,obsmodel,endDay,Ndays,mask=None): - obrecs=[] - eoff=self.getEndOffset(parm) - soff=self.getStartOffset(parm) - if type(endDay) is types.StringType: - try: - (yea,mon,day)=endDay.split("/") - endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((Ndays-1)*DAYSECS)+soff - except: - return obrecs - else: - endtime=endDay+DAYSECS-1+eoff - starttime=endDay-((Ndays-1)*DAYSECS)+soff - obrecs=self.listStatRecords(parm,model,obsmodel,starttime,endtime,"verify",mask) - return obrecs - #================================================================== - # getEndOffset - gets the END_OFFSET_HOURS for a parm (multiplied - # by HOURSECS so that the value returned is in seconds. If no - # END_OFFSET_HOURS is specified for the parm, then it returns - # zero. - # - def getEndOffset(self,parm): - eoff=0 - keys=self.END_OFFSET_HOURS.keys() - if parm in keys: - eoff=self.END_OFFSET_HOURS[parm]*HOURSECS - return eoff - #================================================================== - # getStartOffset - gets the START_OFFSET_HOURS for a parm - # (multiplied by HOURSECS so that the value returned is in - # seconds. If no START_OFFSET_HOURS is specified for the - # parm, then it returns zero. - # - def getStartOffset(self,parm): - soff=0 - keys=self.START_OFFSET_HOURS.keys() - if parm in keys: - soff=self.START_OFFSET_HOURS[parm]*HOURSECS - return soff - #================================================================== - # getObsList - get list of observed records within days listed in - # dayList (each day is "year/mon/day") with eoff and soff - # (seconds) added to the end and beginning respectively - # - def getObsList(self,model,parm,dayList,mask=None,callbackMethod=None): - obrecs=[] - eoff=self.getEndOffset(parm) - soff=self.getStartOffset(parm) - count=0 - totalcount=len(dayList) - for date in dayList: - count+=1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,totalcount)) - if exit==1: - return obrecs - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))+soff - except: - continue - else: - starttime=date+soff - endtime=date+DAYSECS-1+eoff - recs=self.listRecords(parm,model,starttime,endtime,"verify",mask) - for rec in recs: - if rec not in obrecs: - obrecs.append(rec) - return obrecs - #================================================================== - # getObsStatList - get list of observed records within days listed in - # dayList (each day is "year/mon/day") with eoff and soff - # (seconds) added to the end and beginning respectively - # - def getObsStatList(self,model,parm,obsmodel,dayList,mask=None,callbackMethod=None): - obrecs=[] - eoff=self.getEndOffset(parm) - soff=self.getStartOffset(parm) - count=0 - totalcount=len(dayList) - for date in dayList: - count+=1 - if callbackMethod is not None: - exit=callbackMethod("%d of %d"%(count,totalcount)) - if exit==1: - return obrecs - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))+soff - except: - continue - else: - starttime=date+soff - endtime=date+DAYSECS-1+eoff - recs=self.listStatRecords(parm,model,obsmodel,starttime,endtime,"verify",mask) - for rec in recs: - if rec not in obrecs: - obrecs.append(rec) - return obrecs - #================================================================== - # getForecasters - given a model, element, and records - open the - # correct grid file and get a list of forecaster - # numbers for that record. Will return an empty - # list if there are problems opening the file - # - def getForecasters(self,model,element,rec): - if not self.checkFile(element,model): - self.logMsg("Could not open file",5) - return [] - retVal=self.getRecFcstrs(rec) - return retVal - #================================================================== - # getRecFcstrs - given a record, get a list of forecaster numbers - # from the currently open forecast grid file - # - def getRecFcstrs(self,rec): - return list(self.fncFcstr[rec,:]) - #================================================================== - # trimFcstRecs - get rid of forecast records that do not match - # the cycle and forecaster lists - # - def trimFcstRecs(self,fcstrecs,model,cycles,fcstrIDlist,fhrStart=0, - fhrEnd=0): - fcstTrimmed=[] - # - # Get cycleList with integer hours - # - maxCycles=len(self.getCFG('ALLCYCLES')) - cycleList=[] - if ((type(cycles) is types.TupleType)or(type(cycles) is types.ListType)): - for cycle in cycles: - if type(cycle) is types.StringType: - cycleList.append(int(cycle)) - else: - cycleList.append(cycle) - else: - if type(cycles) is types.StringType: - cycleList.append(int(cycles)) - else: - cycleList.append(cycles) - for fcstrec in fcstrecs: - # - # skip forecasts from wrong cycle - # - if -1 not in cycleList: - btime=self.fncBtime[fcstrec] - basetuple=time.gmtime(btime) - if basetuple[3] not in cycleList: - continue - # - # skip forecasts for wrong forecaster - # - if "ALL" not in fcstrlist: - if model=="Official": - foundfcstr=0 - for j in range(self.MAXFCSTRS): - fnum=int(self.fncFcstr[fcstrec,j]) - fnumstr="%2.2d"%fnum - fid=self.FcstrIDs[fnumstr] - if fid in fcstrIDlist: - foundfcstr=1 - if foundfcstr==0: - continue - # - # skip forecasts outside of matching fhrs - # - fhr=int(float(self.fncStime[fcstrec]-self.fncBtime[fcstrec])/float(HOURSECS)) - if ((fhrfhrEnd)): - continue - # - # Add records that came this far - # - fcstTrimmed.append(fcstrec) - return fcstTrimmed - #================================================================== - # getFcstPeriod - get list of forecast records made within a period - # of Ndays (integer) ending on endDay(either a "year/mon/day" - # string, or a time integer) by model - # - def getFcstPeriod(self,inputParm,endDay,Ndays,model): - fcstrecs=[] - if type(endDay) is types.StringType: - try: - (yea,mon,day)=endDay.split("/") - startOfEndDay=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - return fcstrecs - else: - startOfEndDay=endDay - endtime=startOfEndDay+DAYSECS-1 - starttime=startOfEndDay-((Ndays-1)*DAYSECS) - fcstrecs=self.listRecords(inputParm,model,starttime,endtime,"forecast") - return fcstrecs - #================================================================== - # getFcstList - get list of forecast records made on days listed - # in dayList (each day is "year/mon/day") by model - # - def getFcstList(self,inputParm,dayList,model): - fcstrecs=[] - for date in dayList: - if type(date) is types.StringType: - try: - (yea,mon,day)=date.split("/") - starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) - except: - continue - else: - starttime=date - endtime=starttime+DAYSECS - recs=self.listRecords(inputParm,model,starttime,endtime,"forecast") - for rec in recs: - if rec not in fcstrecs: - fcstrecs.append(rec) - return fcstrecs - #================================================================= - # logMsg - writes a message to STDOUT with a date/time stamp - # and flushes immediately - # - def logMsg(self,msg,significance=0): - if significance<=self.DEBUG: - gmt=time.gmtime() - print "%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d:%s"%(gmt[0],gmt[1], - gmt[2],gmt[3],gmt[4],gmt[5],msg) - sys.stdout.flush() - return - #================================================================= - # setQuiet - set DEBUG level to 0 - suppressing most messages - # - def setQuiet(self): - self.DEBUG=0 - return - #================================================================= - # setVerbose - set DEBUG level (defaults to 1) - # - def setVerbose(self,value=1): - self.DEBUG=value - return - def setDebug(self,value=1): - self.DEBUG=value - return - def getDebug(self): - return self.DEBUG - def getVerbose(self): - return self.DEBUG - #================================================================== - # Given a modelname and parmname (and optional level), return - # the GFE precision number (0 for integers, 1 for 0.1 resolution, - # 2 for 0.01 resolution, etc.). This is useful in many areas. - # - def getParmPrecision(self,modelname,parmname,level="SFC"): - precision=0 - parmInfo=self.getParm(modelname,parmname,level) - if parmInfo is not None: - precision=parmInfo.getGridInfo().getPrecision() - return precision - #================================================================= - # lastSaved - get time that the grid for the specified parm, model, - # basetime, starttime, endtime was written to the grid - # archive database. If it has not been written - return - # 0. - # - def lastSaved(self,parm,model,Btime,Stime,Etime): - self.logMsg("Starting lastSaved in VerifyUtility",10) - savedTime=0 - # - # Check that the correct file is open and ready to modify - # - if not self.checkFile(parm,model,modify=1): - return savedTime - # - # for models in OBSMODELS - use open Obs file - # - if model in self.OBSMODELS: - # - # If no records - it hasn't been saved - # - if self.onumRecs==0: - return savedTime - # - # See if a record with the exact same times exists - # - s=equal(self.oncStime[:],Stime) - b=equal(self.oncBtime[:],Btime) - e=equal(self.oncEtime[:],Etime) - use=logical_and(logical_and(b,s),e) - if sometrue(use): - a=compress(use,self.oncRecs) - recnum=int(a[0]) - savedTime=self.oncVtime[recnum] - # - # for non OBSMODELS models - use open Fcst file - # - else: - # - # If no records - it hasn't been saved - # - if self.fnumRecs==0: - return savedTime - # - # See if a record with the exact same times exists - # - s=equal(self.fncStime[:],Stime) - b=equal(self.fncBtime[:],Btime) - e=equal(self.fncEtime[:],Etime) - use=logical_and(logical_and(b,s),e) - if sometrue(use): - a=compress(use,self.fncRecs) - recnum=int(a[0]) - savedTime=self.fncVtime[recnum] - return savedTime - #======================================================================= - # smoothpm - smooths grid by averaging over plus and minus k - # gridpoints, which means an average over a square 2k+1 - # gridpoints on a side. If mask is specified, only - # smooth over the points that have mask=1, not any others. - # - # Near the edges it can't average over plus and minus - # - since some points would be off the grid - so it - # averages over all the points it can. For example, on - # the edge gridpoint - it can only come inside k points - - # so the average is over only k+1 points in that direction - # (though over all 2k+1 points in the other direction - - # if possible) - # - # Much faster by using the cumsum function in numeric. - # Total across the 2k+1 points is the cumsum at the last - # point minus the cumsum at the point before the first - # point. Only edge points need special handling - and - # cumsum is useful here too. - # - def smoothpm(self,grid,k,mask=None): - k=int(k) # has to be integer number of gridpoints - if (k<1): # has to be a positive number of gridpoints - return grid - (ny,nx)=grid.shape - k2=k*2 - # - # Remove the minimum from the grid so that cumsum over a full - # row or column of the grid doesn't get so big that precision - # might be lost. - # - fullmin=minimum.reduce(minimum.reduce(grid)) - gridmin=grid-fullmin - # - # No mask is simpler - # - if mask is None: - # - # Average over the first (y) dimension - making the 'mid' grid - # - mid=grid*0.0 - c=cumsum(gridmin,0) - nym1=ny-1 - midy=int((ny-1.0)/2.0) - ymax=min(k+1,midy+1) - for j in range(ymax): # handle edges - jk=min(j+k,nym1) - jk2=max(nym1-j-k-1,-1) - mid[j,:]=c[jk,:]/float(jk+1) - if jk2==-1: - mid[nym1-j,:]=c[nym1,:]/float(jk+1) - else: - mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1) - if ((k+1)<=(ny-k)): # middle - mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1) - # - # Average over the second (x) dimension - making the 'out' grid - # - c=cumsum(mid,1) - out=grid*0.0 - nxm1=nx-1 - midx=int((nx-1.0)/2.0) - xmax=min(k+1,midx+1) - for j in range(xmax): # handle edges - jk=min(j+k,nxm1) - jk2=max(nxm1-j-k-1,-1) - out[:,j]=c[:,jk]/float(jk+1) - if jk2==-1: - out[:,nxm1-j]=c[:,nxm1]/float(jk+1) - else: - out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1) - if ((k+1)<=(nx-k)): # middle - out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1) - # - # Add the minimum back in - # - out=out+fullmin - # - # Mask makes it a bit more difficult - have to find out how many - # points were in each cumsum - and have to deal with possible - # divide-by-zero errors - # - else: - # - # Average over the first (y) dimension - making the 'mid' grid - # - mask=clip(mask,0,1) - gridmin1=where(mask,gridmin,float32(0)) - mid=grid*0.0 - midd=grid*0.0 - c=cumsum(gridmin1,0) - d=cumsum(mask,0) - nym1=ny-1 - midy=int((ny-1.0)/2.0) - ymax=min(k+1,midy+1) - for j in range(ymax): # handle edges - jk=min(j+k,nym1) - jk2=max(nym1-j-k-1,-1) - mid[j,:]=c[jk,:] - midd[j,:]=d[jk,:] - if jk2==-1: - mid[nym1-j,:]=c[nym1,:] - midd[nym1-j,:]=d[nym1] - else: - mid[nym1-j,:]=(c[nym1,:]-c[jk2,:]) - midd[nym1-j,:]=d[nym1,:]-d[jk2,:] - if ((k+1)<=(ny-k)): # middle - mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:]) - midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:] - # - # Average over the second (x) dimension - making the 'out' grid - # - c=cumsum(mid,1) - d=cumsum(midd,1) - out=grid*0.0 - nxm1=nx-1 - midx=int((nx-1.0)/2.0) - xmax=min(k+1,midx+1) - for j in range(xmax): # handle edges - jk=min(j+k,nxm1) - jk2=max(nxm1-j-k-1,-1) - out[:,j]=c[:,jk]/maximum(d[:,jk],1) - if jk2==-1: - out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1) - else: - out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1) - if ((k+1)<=(nx-k)): # middle - out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1) - # - # Add the minimum back in - # - out=where(mask,out+fullmin,grid) - return out - #======================================================================= - # arealOccur - similar to smoothpm, in that it looks over a square 2k+1 - # on a side. But should be used with a logical array of 0 - # and 1, and just tells you whether an occurrence (a 1) - # occurred anywhere in the search square. If a mask is - # specified it only searches over the points that have - # mask=1, not any others. - # - # Near the edges it can't search over plus and minus - # - since some points would be off the grid - so it - # searches over all the points it can. For example, on - # the edge gridpoint - it can only come inside k points - - # so the average is over only k+1 points in that direction - # (though over all 2k+1 points in the other direction - - # if possible) - # - # Much faster by using the cumsum function in numeric. - # Total across the 2k+1 points is the cumsum at the last - # point minus the cumsum at the point before the first - # point. Only edge points need special handling - and - # cumsum is useful here too. - # - def arealOccur(self,grid,k,mask=None): - k=int(k) # has to be integer number of gridpoints - if (k<1): # has to be a positive number of gridpoints - return grid - (ny,nx)=grid.shape - k2=k*2 - # - # No mask is simpler - # - if mask is None: - grid1=grid - else: - mask=clip(mask,0,1) - grid1=where(mask,grid,float32(0)) - # - # Average over the first (y) dimension - making the 'mid' grid - # - mid=grid*0.0 - c=cumsum(grid1,0) - nym1=ny-1 - midy=int((ny-1.0)/2.0) - ymax=min(k+1,midy+1) - for j in range(ymax): # handle edges - jk=min(j+k,nym1) - jk2=max(nym1-j-k-1,-1) - mid[j,:]=c[jk,:] - if jk2==-1: - mid[nym1-j,:]=c[nym1,:] - else: - mid[nym1-j,:]=c[nym1,:]-c[jk2,:] - if ((k+1)<=(ny-k)): # middle - mid[k+1:ny-k,:]=c[k2+1:,:]-c[:-k2-1,:] - # - # Average over the second (x) dimension - making the 'out' grid - # - c=cumsum(mid,1) - out=grid*0.0 - nxm1=nx-1 - midx=int((nx-1.0)/2.0) - xmax=min(k+1,midx+1) - for j in range(xmax): # handle edges - jk=min(j+k,nxm1) - jk2=max(nxm1-j-k-1,-1) - out[:,j]=c[:,jk] - if jk2==-1: - out[:,nxm1-j]=c[:,nxm1] - else: - out[:,nxm1-j]=c[:,nxm1]-c[:,jk2] - if ((k+1)<=(nx-k)): # middle - out[:,k+1:nx-k]=c[:,k2+1:]-c[:,:-k2-1] - # - # Occur is where non-zero - # - out=greater(out,0.5) - return out - #----------------------------------------------------------------------- - # getGridSpacing - get 'rough grid spacing' by getting the distance - # between the corners of the GFE grid and dividing by - # the number of points. - # - def getGridSpacing(self): - xmax=self.getGridShape()[1] - ymax=self.getGridShape()[0] - (lat1,lon1)=self.getLatLon(0,0) - (lat2,lon2)=self.getLatLon(xmax-1,ymax-1) - hypot=math.hypot(xmax-1,ymax-1) - spacing1=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot - (lat1,lon1)=self.getLatLon(0,ymax-1) - (lat2,lon2)=self.getLatLon(xmax-1,0) - spacing2=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot - avgspacing=(spacing1+spacing2)/2.0 - return avgspacing - #----------------------------------------------------------------------- - # getCircleDistance - get the 'great circle distance' between two lat - # lon points (in km) - # - def getCircleDistance(self,lat1,lon1,lat2,lon2): - DTR=math.pi/180.0 - lat1r=lat1*DTR - lon1r=lon1*DTR - lat2r=lat2*DTR - lon2r=lon2*DTR - dl=lon2r-lon1r - a=(math.acos((math.sin(lat1r)*math.sin(lat2r))+(math.cos(lat1r)*\ - math.cos(lat2r)*math.cos(dl))))/DTR - return(a*1.852*60) +# BOIVerifyUtility - version 2.0.5 +# +# Main utility supporting maintenance of verification databases of both +# grids and stats. +# +# Author: Tim Barker - SOO BOI +# 2005/07/20 - Original Implementation +# 2005/07/29 - version 0.1 - update to grid database structure +# to add time that grid was stored +# 2006/11/06 - version 1.0 - First version with time-series graphs. Still +# lots of bugs and not what I would like for a version 1.0 but +# I've put it off way too long anyway. +# 2006/12/04 - fix bug with getStatList getting slightly wrong time periods +# 2007/02/05 - version X.X +# fixed hardcoded references to "Obs" in getObsPeriod and +# getObsList +# 2007/05/01 - version 1.1 - emergency fix to getPairList for speed +# and memory usage when large number of grids are stored +# 2007/10/23 - version 2.0 - gobs of new stuff for handling common cases +# probability parms, rate parms, new scores, etc. +# 2007/11/30 - version 2.0.1 - fix accumulations over periods more than +# twice as long as models grids (grids totally inside the +# time period were getting counted multiple times) +# 2007/12/17 - version 2.0.2 - fix list of verifying accumulative +# periods when specifying list of verifying dates (it was +# omitting the last period that fit within the time period). +# 2008/05/28 - version 2.0.5 - Added support for new statistics: +# frequency observed and frequency forecast. +# +# +# 2010/04/23 ryu Initial port to AWIPS II. +# +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 03/24/2016 18773 ryu Fix IOError from use of NetCDFVariables +# in numpy operations +# 05/25/2016 18743 arickert Original forecaster who edited grids was +# was included with more recent editor +# 06/10/2016 18169 ryu Fix typo in getStatModelCases method +#===================================================================== + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import TimeRange, AbsTime + +from numpy import * +from types import * +import os,string,re,sys +import time,calendar,math,types +try: + from Scientific.IO import NetCDF +except: + import NetCDF + +import SmartScript +# +# Import the Configuration Information +# +import BOIVerifyConfig +#try: +# from BOIVerifyConfig import * +#except: +# print "Could not import BOIVerifyConfig" +#print "VERDIR=",VERDIR +#print "EDITAREAS=",EDITAREAS +# +# Contants that should not be changed by the users! +# +VERSION="2.0.2" +# +# STATAREAS = max number of areas to keep stats for +# STATTYPES = max number of stats to keep +# Changing these is NOT recommended - it changes the size of the +# dimensions in the stat database. If you already have an existing +# stat database, all old information will be lost. Eventually we'll +# have a program that will convert a stat database for more areas or +# more stats +# +STATAREAS=150 # max number of areas to keep stats for +STATTYPES=15 # max number of stats to keep +# +# Do not change - will corrupt the database. Will eventually have +# a program that allows the number of forecaster numbers saved for +# a grid or stat to be changed. +# +MAXFCSTRS=5 +# +# Number of thresholds saved for each parm +# +NUMTHRESH=5 +# +# Forecaster number file +# +FCSTRNUMFILE="FcstrNums.dat" # maintained by software - do not change +HOURSECS=60*60 +DAYSECS=24*HOURSECS +SkipParms=[] # parms that we do not verify yet + +class BOIVerifyUtility(SmartScript.SmartScript): + def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): + SmartScript.SmartScript.__init__(self, dbss) + + # + # Read in Config info. + # Each entry in BOIVerifyConfig is put into a self. variable + # for fast access within this utility + # And put into a self.CFG dictionary for easy access outside + # this utility (through the getCFG method) + # (only does variables in BOIVerifyConfig that do NOT start + # with an underscore _ ) + # + names=dir(BOIVerifyConfig) + + self.CFG={} + #names=["VERDIR","EDITAREAS","PERCENT_COLOR","GRIDDAYS","STATDAYS", + # "OBSMODELS"] + for name in names: + if name[0:1]!="_": + execstr="self.CFG['"+name+"']=BOIVerifyConfig."+name + #execstr="self.CFG['"+name+"']="+name + exec(execstr) + execstr="self."+name+"=BOIVerifyConfig."+name + #execstr="self."+name+"="+name + exec(execstr) + # + # Setup the REALLY constant variables + # + self.STATAREAS=STATAREAS + self.CFG['STATAREAS']=STATAREAS + self.STATTYPES=STATTYPES + self.CFG['STATTYPES']=STATTYPES + self.MAXFCSTRS=MAXFCSTRS + self.CFG['MAXFCSTRS']=MAXFCSTRS + self.NUMTHRESH=NUMTHRESH + self.CFG['NUMTHRESH']=NUMTHRESH + self.FCSTRNUMFILE=FCSTRNUMFILE + self.CFG['FCSTRNUMFILE']=FCSTRNUMFILE + # + # Setup DEBUG level + # + self.DEBUG=0 + # + # Setup Observed file info + # + self.oncParm="" + self.oncModel="" + self.oncModify=0 + self.onumRecs=0 + # + # Setup forecast grid file info + # + self.fncParm="" + self.fncModel="" + self.fncModify=0 + self.fnumRecs=0 + # + # Setup forecast stat file info + # + self.sncParm="" + self.sncModel="" + self.sncObsModel="" + self.sncModify=0 + self.sncNumRecs=0 + # + # stup forecaster numbers, edit areas, and check config + # + self.setupFcstrNums() + self.setupEditAreas() + self.checkConfig() + # + # setup all the stat names. statIDs are the 'short' and + # 'correct' name to use for a stat. Various optional + # names, shortened spellings, etc. are stored in statNames + # dictionary for each ID. allStats contains all the possible + # names for all stats in the system. + # + self.statNames={"bias":["bias","error","err"], + "mae" :["mae","absolute error","abs error","abs err", + "mean abs error","mean abs err", + "mean absolute error","mean absolute err"], + "rms" :["rms","root mean squared error", + "root mean squared err","rms error","rms err"], + "mse" :["mse","mean squared error","mean squared err", + "brier score","brier"], + "peb" :["peb","percent error below","percent err below", + "% error below","% err below","percent error <", + "percent err <","% error <","% err <"], + "fc" :["fc","fraction correct"], + "afc" :["afc","areal fraction correct"], + "freqo":["freqo","frequency observed"], + "freqf":["freqf","frequency forecast"], + "freqbias":["freqbias","frequency bias"], + "afreqbias":["afreqbias","areal frequency bias"], + "pod" :["pod","probability of detection","prob detection"], + "apod":["apod","areal probability of detection", + "areal prob detection","areal pod"], + "far" :["far","false alarm ratio"], + "afar":["afar","areal false alarm ratio"], + "pofd":["pofd","probability of false detection", + "prob of false detection"], + "apofd":["apofd","areal probability of false detection", + "areal prob of false detection"], + "ts" :["ts","threat score","csi", + "critical success index"], + "ats" :["ats","areal threat score","acsi", + "areal critical success index"], + "ets" :["ets","equitable threat score","gilbert"], + "aets":["aets","areal equitable threat score", + "agilbert","areal gilbert"], + "hk" :["hk","hanssen and kuipers discriminant", + "peirce","peirces skill score", + "tss","true skill score"], + "ahk" :["ahk","areal hanssen and kuipers discriminant", + "apeirce","areal peirces skill score", + "atss","areal true skill score"], + "hss" :["hss","heidke skill score"], + "ahss":["ahss","areal heidke skill score"], + "oddsratio":["oddsratio","odds ratio"], + "aoddsratio":["aoddsratio","areal odds ratio"], + "hits":["hits",], + "ahits":["ahits","areal hits"], + "miss":["miss","misses"], + "amiss":["amiss","areal misses"], + "fals":["fals","false alarms"], + "afals":["afals","areal false alarms"], + "corn":["corn","correct negatives"], + "acorn":["acorn","areal correct negatives"], + "cont":["cont","contingency table"], + "acont":["acont","areal contingency table"], + } + self.statIDs=list(self.statNames.keys()) + self.allStats=[] + for statName in self.statIDs: + names=self.statNames[statName] + for name in names: + self.allStats.append(name) + return + + #================================================================= + # checkConfig - cross check configuration data, and make log + # messages about problems. + # + # Return 0 if config is OK, return 1 if there + # are any problems + # + def checkConfig(self): + badConfig=0 + mutid=self.mutableID() + # + # Make sure parms are well defined + # + parmNames=list(self.VERCONFIG.keys()) + for parmName in parmNames: + parmInfo=self.getParm(mutid,parmName,"SFC") + if parmInfo is None: + self.logMsg("Could not check VER config for %s"%parmName) + continue + config=self.VERCONFIG[parmName] + # + # check for horribly bad config lines + # + if type(config)!=TupleType: + self.logMsg("BOIVerify VERCONFIG of %s is not a tuple - it should be"%parm) + badConfig=1 + continue + if len(config)!=8: + self.logMsg("BOIVerify VERCONFIG of %s does not have 8 elements"%parmName) + badConfig=1 + continue + # + # check parm type + # + if type(config[0])!=IntType: + self.logMsg("BOIVerify VERCONFIG of %s has bad type:%s"%(parm,config[0])) + badConfig=1 + else: + parmType=config[0] + if ((parmType<0)or(parmType>1)): + self.logMsg("BOIVerify VERCONFIG of %s has bad type:%d"%(parm,parmType)) + badConfig=1 + # + # check ver type + # + if type(config[1])!=IntType: + self.logMsg("BOIVerify VERCONFIG of %s has bad verification type:%s"%(parm,config[1])) + badConfig=1 + else: + verType=config[1] + if ((verType<0)or(verType>1)): + self.logMsg("BOIVerify VERCONFIG of %s has bad verification type:%d"%(parm,verType)) + badConfig=1 + # + # check parm save interval + # + if type(config[2])!=IntType: + self.logMsg("BOIVerify VERCONFIG of %s has bad save interval:%s"%(parm,config[2])) + badConfig=1 + else: + saveInt=config[2] + if ((saveInt<0)or(saveInt>24)): + self.logMsg("BOIVerify VERCONFIG of %s has bad save interval:%d"%(parm,saveInt)) + badConfig=1 + # + # Checks are different for Scalar/Probability vs. Vectors + # + ##if parmInfo.dataType()!=1: + wxType = parmInfo.getGridInfo().getGridType().ordinal() - 1 + if wxType!=1: + # + # check for NUMTHRESH thresholds + # + if type(config[3])!=TupleType: + self.logMsg("BOIVerify VERCONFIG of %s has bad thresholds:%s"%(parm,config[3])) + badConfig=1 + else: + thresholds=config[3] + numthresh=len(thresholds) + if (numthresh!=self.NUMTHRESH): + self.logMsg("BOIVerify VERCONFIG of %s does not have %d thresholds: has %d instead"%(parm,self.NUMTHRESH,numthresh)) + badConfig=1 + # + # check for binwidth + # + return badConfig + #================================================================= + # CONVENIENCE FUNCTION FOR GETTING VER CONFIGURATION + # + def getCFG(self,name): + if name in self.CFG: + return self.CFG[name] + else: + return None + #================================================================= + # CONVENIENCE FUNCTIONS FOR GETTING VER PARM CONFIGURATION + # + # getVerParms - get list of configured verification parameters + # + def getVerParms(self): + VerParms=list(self.VERCONFIG.keys()) + VerParms.sort() + return VerParms + #================================================================= + # getVerParmsVect - gets list of configured verification + # parameters, with any vector parms also + # listed with "Spd" appended, and "Dir" + # appended + # + def getVerParmsVect(self): + parmList=self.getVerParms() + VerParms=[] + for parm in parmList: + if self.getVerParmType(parm)==1: + VerParms.append(parm) + VerParms.append(parm+"Spd") + VerParms.append(parm+"Dir") + else: + VerParms.append(parm) + VerParms.sort() + return VerParms + #================================================================= + # getVerParmsVal - gets list of configured verification parms + # that are scalars (removes vectors from list) + # + def getVerParmsVal(self): + parmList=self.getVerParms() + VerParms=[] + for parm in parmList: + if self.getVerType(parm)==0: + VerParms.append(parm) + VerParms.sort() + return VerParms + #================================================================= + # getVerParmsObs - gets list of configured verification parms, + # and adds in any observed parm namesget list of + # observed parms + # + def getVerParmsObs(self): + VerParms=[] + fparms=list(self.VERCONFIG.keys()) + fparms.sort() + for key in fparms: + if self.VERCONFIG[key][1]==1: + (obsparm,ttype,tval)=self.VERCONFIG[key][7] + else: + obsparm=self.VERCONFIG[key][7] + if obsparm not in VerParms: + VerParms.append(obsparm) + VerParms.sort() + return VerParms + #================================================================= + # getObsParm - for a particular parm, get its corresponding + # obsparm name. + # + def getObsParm(self,fcstParm): + obsparm="" + if fcstParm in list(self.VERCONFIG.keys()): + if self.VERCONFIG[fcstParm][1]==1: + (obsparm,ttype,tval)=self.VERCONFIG[fcstParm][7] + else: + obsparm=self.VERCONFIG[fcstParm][7] + return obsparm + #================================================================= + # getObsCondition - for a probability parm, get the condition + # ">", ">=", "<", "<=" etc. for the observed + # parm. For example, in the US, PoP is verified + # with QPE >= 0.01, so the condition is ">=" in + # this case. If the parm is not a probability parm + # it just returns an empty string. + # + def getObsCondition(self,fcstParm): + obsCondition="" + if fcstParm in list(self.VERCONFIG.keys()): + if self.VERCONFIG[fcstParm][1]==1: + (obsparm,obsCondition,obsThreshold)=self.VERCONFIG[fcstParm][7] + return obsCondition + #================================================================= + # getObsThreshold - for a probability parm, get the threshold for + # the observed parm. For example, in the US, + # PoP is verified with QPE >= 0.01. The threshold + # is 0.01 in this case. If the specified parm is + # not a probability parm, returns zero for the + # threshold. + # + def getObsThreshold(self,fcstParm): + obsThreshold=0 + if fcstParm in list(self.VERCONFIG.keys()): + if self.VERCONFIG[fcstParm][1]==1: + (obsparm,obsCondition,obsThreshold)=self.VERCONFIG[fcstParm][7] + return obsThreshold + #================================================================= + # getVerConfig - get the specified element of the config stuff + # for a parm, or return None if that parm is not + # configured + # + def getVerConfig(self,parmName,element): + if parmName not in list(self.VERCONFIG.keys()): + return None + config=self.VERCONFIG[parmName] + return config[element] + #================================================================= + # getVerParmType - get the type of parm, either 0 for SCALAR or + # 1 for VECTOR. If the parm is not configured it + # also checks to see of the observed parm of any + # configured parm matches - and returns whether + # that parm is a SCALAR or VECTOR + # + def getVerParmType(self,parmName): + parmType=self.getVerConfig(parmName,0) + if parmType is None: + parms=self.getVerParms() + for parm in parms: + oparm=self.getObsParm(parm) + if oparm==parmName: + parmType=self.getVerConfig(parm,0) + break + return parmType + #================================================================= + # getVerType - get the type of verification needed for this parm. + # Normally 0, meaning just typical value verification. + # A value of 1 means 'floating probability' type + # verification. A value of 2 means 'probability' type + # verification. + # + def getVerType(self,parmName): + return self.getVerConfig(parmName,1) + #================================================================= + # getVerSaveInterval - used for the saving of parms. If set to 0 + # then save all grids for this parm. If set + # to 3, then save 'snapshots' every 3 hours + # through the day. If set to 6, then save + # 'snapshots' every 6 hours through the day, + # etc. + # + def getVerSaveInterval(self,parmName): + return self.getVerConfig(parmName,2) + #================================================================= + # getVerThresholds - used in BOIVerify autocalc. calculates some + # stats for absolute value of errors less than + # these thresholds. For SCALAR parms, should + # be a tuple with 5 threshold values. For + # VECTOR parms, should be two tuples, each + # with 5 values. + # + def getVerThresholds(self,parmName): + return self.getVerConfig(parmName,3) + #================================================================= + # getVerBinWidth - used in BOIVerify histogram displays. Used + # for the width of bins in the histogram. For + # a SCALAR should be just one value. For a VECTOR + # will be a tuple with the magnitude binWidth and + # the direction binWidth + # + def getVerBinWidth(self,parmName): + return self.getVerConfig(parmName,4) + #================================================================= + # getVerBigErr - used in BOIVerify error displays. Errors bigger + # than this are sometimes not displayed. SCALARS + # will have one value. VECTORS will have a tuple + # with magnitude bigErr and direction bigErr. + # + def getVerBigErr(self,parmName): + return self.getVerConfig(parmName,5) + #================================================================= + # getVerErrColor - used in BOIVerify error displays. Color table + # name used for errors. This is in here so that + # dewpoint errors can have a different color + # table than, say, temperature errors. For a + # VECTOR parm, this returns a tuple of two color + # table names + # + def getVerErrColor(self,parmName): + return self.getVerConfig(parmName,6) + #================================================================= + # getVerObsInfo - used in BOIVerify for probability parms. For + # normal value parms, this is just the name of + # the parm that verifies it (so that QPF can be + # verified by QPE, if needed). For probability + # parms this is a 3-value tuple with name, + # condition, threshold) + # + def getVerObsInfo(self,parmName): + return self.getVerConfig(parmName,7) + #================================================================= + # getBaseOffset - looks through baseOffset in configuration to + # see if model is listed. If so, returns the + # offset (in hours). If not, returns zero. + # + def getBaseOffset(self,model): + if model in list(self.BASE_OFFSET.keys()): + offset=self.BASE_OFFSET[model] + else: + offset=0 + return offset + #================================================================= + # setupFcstrNums - sets up the Fcstrs list with the strings of + # forecaster names for each number by reading + # the FCSTRNUMFILE file from disk. + # FcstrNames is a dictionary with names for + # each 2-digit number string + # FcstrIDs is a dictionary with 8-character IDs + # for each 2-digit number string + # + def setupFcstrNums(self): + self.FcstrNames={} + self.FcstrIDs={} + self.FcstrNames["00"]="Unknown" + self.FcstrIDs["00"]="Unknown" + filename="%s/%s"%(self.VERDIR,self.FCSTRNUMFILE) + try: + fs=file(filename,"r") + lines=fs.readlines() + fs.close() + except: + return 0 + for line in lines: + nocomment=re.sub("#.*","",line) + pieces=nocomment.split(",") + if len(pieces)>1: + numstr=pieces[0].strip() + try: + num=int(numstr) + except: + self.logMsg("Bad forecaster number ignored:") + self.logMsg(" %s"%nocomment) + continue + idstr=pieces[1].strip() + if len(pieces)>2: + namstr=pieces[2].strip() + else: + namstr=idstr + # + # forecaster number 0 is always forced to be Unknown + # + if num==0: + namstr="Unknown" + idstr="Unknown" + # + numstr="%2.2d"%num + self.FcstrNames[numstr]=namstr + self.FcstrIDs[numstr]=idstr + # + # If debug is set...pwrint a list of forecaster number:id,name + # + if self.DEBUG>0: + self.logMsg("setupFcstrNums reading verification forecasters:") + numericList=[] + for (num,name) in list(self.FcstrNames.items()): + id=self.FcstrIDs[num] + numericList.append("%s:%s,%s"%(num,id,name)) + numericList.sort() + for entry in numericList: + self.logMsg(" %s"%entry) + return 1 + #================================================================= + # getFcstrNames - return dictionary of forecaster names for each + # number (number is a two-digit string). + # + def getFcstrNames(self): + return self.FcstrNames + #================================================================= + # getFcstrName - given a number (integer), find the full forecaster + # name. Returns empty string if number is not used + # + def getFcstrName(self,num): + name="" + numstr="%2.2d"%num + if numstr in list(self.FcstrNames.keys()): + name=self.FcstrNames[numstr] + return name + #================================================================= + # getFcstrIDs - return dictionary of forecast IDs for each number + # + def getFcstrIDs(self): + return self.FcstrIDs + #================================================================= + # getFcstr ID - given a number (integer), find the forecaster ID. + # Returns empty string if number is not used + # + def getFcstrID(self,num): + ID="" + numstr="%2.2d"%num + if numstr in list(self.FcstrIDs.keys()): + ID=self.FcstrIDs[numstr] + return ID + #================================================================= + # getFcstrNums - return list of forecaster numbers (as strings) + # + def getFcstrNums(self): + nums=list(self.FcstrNames.keys()) + nums.sort() + return nums + #================================================================= + # setFcstrs - take a name dictionary, and an id dictionary, and + # set the self.FcstrNames and self.FcstrIDs + # dictionaries in the Utility. Used in BOIVerifyInfo + # when these dictionaries are being updated. + # + def setFcstrs(self,nameDict,idDict): + self.FcstrNames={} + for (key,value) in list(nameDict.items()): + self.FcstrNames[key]=value + self.FcstrIDs={} + for (key,value) in list(idDict.items()): + self.FcstrIDs[key]=value + #================================================================= + # findFcstrNumFromID - takes a forecaster ID string - and returns + # the associated integer forecast number. + # If the ID is not found - returns 0. + # + def findFcstrNumFromID(self,id): + num=0 + if id in list(self.FcstrIDs.values()): + for (testnum,testid) in list(self.FcstrIDs.items()): + if testid==id: + num=int(testnum) + break + return num + #================================================================== + # findFcstrNumFromName - takes a forecaster name string - and returns + # the associated integer forecast number. + # if the ID is not found - returns 0. + # + def findFcstrNumFromName(self,name): + num=0 + if id in list(self.FcstrNames.values()): + for (testnum,testname) in list(self.FcstrNames.items()): + if testname==name: + num=int(testnum) + break + return num + #================================================================== + # saveFcstrNum - write the FcstrNames and FcstrIDs info into the + # FCSTRNUMSFILE on disk. This called by BOIVerifyInfo + # as these are being updated and should not be + # called elsewhere. + # + def saveFcstrNums(self): + filename="%s/%s"%(self.VERDIR,self.FCSTRNUMFILE) + try: + fs=file(filename,"w") + fs.write("#\n") + fs.write("# This file maintained by the verification programs\n") + fs.write("# Please DO NOT EDIT\n") + fs.write("#\n") + numkeys=list(self.FcstrNames.keys()) + numkeys.sort() + for numstr in numkeys: + name=self.FcstrNames[numstr] + id=self.FcstrIDs[numstr] + fs.write("%s,%s,%s\n"%(numstr,id,name)) + fs.close() + try: + os.chmod(filename,0o666) + except: + self.logMsg("%s should have 666 permissions"%self.FCSTRNUMFILE) + return 0 + except: + self.logMsg("Error writing to %s"%self.FCSTRNUMFILE) + return 1 + return 0 + #================================================================= + # setupEditAreas - read the EDITAREAS file and sets up internal + # EditAreas list with the names of editareas. + # They names are in the appropriate 'slot' in + # the list via number (i.e. if the file does not + # specify an edit area #35, then slot 35 in the + # list is kept blank. EditAreaDescriptions keeps + # the long descriptive names in a similar way + # + def setupEditAreas(self): + self.EditAreas=[] + self.EditAreaDescriptions=[] + filename="%s/%s"%(self.VERDIR,self.EDITAREAS) + try: + fs=file(filename,"r") + lines=fs.readlines() + fs.close() + except: + return 0 + for line in lines: + nocomment=re.sub("#.*","",line) + pieces=nocomment.split(",") + if len(pieces)>2: + numstr=pieces[0].strip() + namstr=pieces[1].strip() + desstr=pieces[2].strip() + try: + num=int(numstr) + except: + continue + if num>self.STATAREAS: + continue + if num<0: + continue + if num>=len(self.EditAreas): + for j in range(len(self.EditAreas),num+1): + self.EditAreas.append("") + self.EditAreaDescriptions.append("") + self.EditAreas[num]=namstr + self.EditAreaDescriptions[num]=desstr + if self.DEBUG>0: + self.logMsg("Setting up verification edit areas:") + for i in range(len(self.EditAreas)): + if self.EditAreas[i]!="": + self.logMsg(" %d:%s"%(i,self.EditAreas[i])) + return 1 + #================================================================= + # listEditAreas - get a list with just the names of edit areas + # (in other words, just like EditAreas, but + # without the 'blank' entries) + # + def listEditAreas(self): + usedEditAreas=[] + for area in self.EditAreas: + if area!="": + usedEditAreas.append(area) + return usedEditAreas + #================================================================= + # listEditAreaDescriptions - get a list with just the descriptions + # of all the editAreas, but without + # the 'blank' entries that are in + # EditAreaDescriptions. + # + def listEditAreaDescriptions(self): + usedEditAreaDescriptions=[] + for area in self.EditAreaDescriptions: + if area!="": + usedEditAreaDescriptions.append(area) + return usedEditAreaDescriptions + #================================================================= + # getEditAreaNumberFromName - given a name, return the number of + # that edit area in the MAXEDITAREAS list. If the name + # does not exist, return 0 + # + def getEditAreaNumberFromName(self,name): + if name in self.EditAreas: + j=self.EditAreas.index(name) + else: + j=0 + return j + #================================================================= + # listModels - List models in the BOIVerify system by looking + # through Grids directories looking for different + # names. Does not include any models that + # are in the OBSMODELS list + # + def listModels(self): + Models=[] + pat=re.compile("(\S+)_\S+_index\.nc") + parmdirs=os.listdir("%s/Grids"%self.VERDIR) + for parmdir in parmdirs: + if parmdir[0]==".": + continue + dirname="%s/Grids/%s"%(self.VERDIR,parmdir) + if os.path.isdir(dirname): + files=os.listdir(dirname) + for file in files: + if file[0]==".": + continue + matchObject=pat.search(file) + if matchObject is None: + continue + model=matchObject.group(1) + if ((model not in Models)and(model not in self.OBSMODELS)): + Models.append(model) + Models.sort() + return Models + #================================================================= + # listParms - DEPRECATED - gets a list of parms in the system, not + # by reading the configuration (which is the right way) + # but by looping through all the directories looking for + # names of files. + # + def listParms(self): + Parms=[] + files=os.listdir("%s/Grids"%self.VERDIR) + for file in files: + if file[0]==".": + continue + fname="%s/Grids/%s"%(self.VERDIR,file) + if os.path.isdir(fname): + parm=file + if ((parm not in Parms)and(parm not in SkipParms)): + Parms.append(parm) + Parms.sort() + return Parms + #================================================================= + # listModelParms - given a model, get the parms that have been + # archived - by looking for data files + # + def listModelParms(self,model): + Parms=[] + files=os.listdir("%s/Grids"%self.VERDIR) + for file in files: + if file[0]==".": + continue + fname="%s/Grids/%s"%(self.VERDIR,file) + if os.path.isdir(fname): + indexExists=os.path.exists("%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,file,model,file)) + dataExists=os.path.exists("%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,file,model,file)) + if (indexExists and dataExists and (file not in Parms) and (file not in SkipParms)): + Parms.append(file) + Parms.sort() + return Parms + #================================================================= + # listStatParms - DEPRECATED - gets a list of stat parms in the + # system not by reading through the configuration + # but by looping through all the directories + # looking for names of files. + # + def listStatParms(self): + Parms=[] + files=os.listdir("%s/Stats"%self.VERDIR) + for file in files: + if file[0]==".": + continue + fname="%s/Stats/%s"%(self.VERDIR,file) + if os.path.isdir(fname): + parm=file + if ((parm not in Parms)and(parm not in SkipParms)): + Parms.append(parm) + Parms.sort() + return Parms + #================================================================= + # closeObsFile - if an Obs file is open, close it and free up + # all the structures associated with it. + # + def closeObsFile(self): + if (not(self.oncParm=="")): + self.oncIndex.close() + self.oncData.close() + del self.oncIndex + del self.oncData + del self.oncFcstr + del self.oncBtime + del self.oncStime + del self.oncEtime + del self.oncVtime + del self.oncScale + del self.oncAddit + del self.oncValue + del self.oncRecs + try: + del self.oncScale1 + del self.oncAddit1 + del self.oncValue1 + except: + pass + self.oncParm="" + self.oncModel="" + self.oncModify=0 + self.oncType=0 + return + #================================================================= + # closeFcstFile - if a Fcst file is open, close it and free up + # all the structures associated with it. + # + def closeFcstFile(self): + if (not(self.fncParm=="")): + self.fncIndex.close() + self.fncData.close() + del self.fncIndex + del self.fncData + del self.fncFcstr + del self.fncBtime + del self.fncStime + del self.fncEtime + del self.fncVtime + del self.fncScale + del self.fncAddit + del self.fncValue + del self.fncRecs + try: + del self.fncScale1 + del self.fncAddit1 + del self.fncValue1 + except: + pass + self.fncParm="" + self.fncModel="" + self.fncModify=0 + self.fncType=0 + return + #================================================================= + # makeGridDir - make a directory for grids for the specified + # parm. + # + def makeGridDir(self,parm,modify): + newDir="%s/Grids/%s"%(self.VERDIR,parm) + already=os.path.exists(newDir) + if ((not already)and(modify!=0)): + os.umask(0o002) + os.makedirs(newDir) + return + #================================================================= + # makeStatDir - make a directory for stats for the specified + # parm. + # + def makeStatDir(self,parm,modify): + newDir="%s/Stats/%s"%(self.VERDIR,parm) + already=os.path.exists(newDir) + if ((not already)and(modify!=0)): + os.umask(0o002) + os.makedirs(newDir) + return + #================================================================= + # checkFile - given a parm and model, see if it is open, and if + # not - open it. Takes care of figuring out if it + # is an 'observation' model or not. If modify is + # 1, then it opens it for writing - which locks it + # from writing by others. + # + # returns 0 if there is trouble opening file + # + def checkFile(self,parm,model,modify=0,datatype=-1): + if model in self.OBSMODELS: + retVal=self.checkObsFile(parm,model,modify=modify,datatype=datatype) + return retVal + else: + retVal=self.checkFcstFile(parm,model,modify=modify,datatype=datatype) + return retVal + #================================================================= + # checkObsFile - given an parm and obsmodel, see if it is open, + # and if not - open it. If modify is 1, then it + # opens the file for writing - which locks it + # from writing by others. + # + # returns 0 if there is trouble opening file + # + def checkObsFile(self,parm,model,modify=0,datatype=-1): + # + # If everything is the same...return right away + # + if ((parm==self.oncParm)and(model==self.oncModel)and(modify==self.oncModify)): + return 1 + # + # If a file is currently open - close it + # + if (not(self.oncParm=="")): + self.closeObsFile() + # + # Setup the file names and see if they exist + # + self.makeGridDir(parm,modify) + newIndex="%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,parm,model,parm) + newData="%s/Grids/%s/%s_%s_data.nc"%(self.VERDIR,parm,model,parm) + already=os.path.exists(newIndex) + # + # Can't read data from file that does not exist + # + if ((not already) and (modify==0)): + return 0 + # + # Figure out read-mode for file + # + if modify==0: + mode="r" + else: + mode="a" + # + # Figure data type and number of points in grid + # + if datatype<0: + datatype=self.getVerParmType(parm) + if datatype is None: + return 0 + (ypts,xpts)=self.getGridShape() + # + # Open the two obs files: the index and the data + # + self.oncIndex=NetCDF.NetCDFFile(newIndex,mode) + self.oncData=NetCDF.NetCDFFile(newData,mode) + # + # If a new file...create the variables + # + if not already: + self.oncData.createDimension("ypts",ypts) + self.oncData.createDimension("xpts",xpts) + self.oncData.createDimension("record",None) + self.oncIndex.createDimension("record",None) + self.oncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) + self.oncFcstr=self.oncIndex.createVariable('fcstr','b',('record','maxfcstrs')) + self.oncBtime=self.oncIndex.createVariable('btime','i',('record',)) + self.oncStime=self.oncIndex.createVariable('stime','i',('record',)) + self.oncEtime=self.oncIndex.createVariable('etime','i',('record',)) + self.oncVtime=self.oncIndex.createVariable('vtime','i',('record',)) + self.oncScale=self.oncIndex.createVariable('scale','d',('record',)) + self.oncAddit=self.oncIndex.createVariable('addit','d',('record',)) + self.oncValue=self.oncData.createVariable('value','h',('record','ypts','xpts')) + if datatype==1: + self.oncScale1=self.oncIndex.createVariable('scale1','d',('record',)) + self.oncAddit1=self.oncIndex.createVariable('addit1','d',('record',)) + self.oncValue1=self.oncData.createVariable('value1','h',('record','ypts','xpts')) + self.oncIndex.sync() + self.oncData.sync() + os.chmod(newIndex,0o775) + os.chmod(newData,0o775) + # + # If an old file...hook up variables to the netCDF files + # + else: + ivarnames=list(self.oncIndex.variables.keys()) + dvarnames=list(self.oncData.variables.keys()) + for name in ('fcstr','btime','stime','etime','vtime','scale','addit'): + if name not in ivarnames: + self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) + return 0 + if 'value' not in dvarnames: + self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) + return 0 + self.oncFcstr=self.oncIndex.variables['fcstr'] + if len(self.oncFcstr.shape)!=2: + self.logMsg("Old index file (pre version 1.0) detected for %s %s"%(model,parm)) + return 0 + self.oncBtime=self.oncIndex.variables['btime'] + self.oncStime=self.oncIndex.variables['stime'] + self.oncEtime=self.oncIndex.variables['etime'] + self.oncVtime=self.oncIndex.variables['vtime'] + self.oncScale=self.oncIndex.variables['scale'] + self.oncAddit=self.oncIndex.variables['addit'] + self.oncValue=self.oncData.variables['value'] + if datatype==1: + if (('scale1' not in ivarnames)or('addit1' not in ivarnames)): + self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) + return 0 + if 'value1' not in dvarnames: + self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) + return 0 + self.oncScale1=self.oncIndex.variables['scale1'] + self.oncAddit1=self.oncIndex.variables['addit1'] + self.oncValue1=self.oncData.variables['value1'] + self.oncParm=parm + self.oncModel=model + self.oncModify=modify + self.onumRecs=self.oncStime.shape[0] + self.oncRecs=indices((self.onumRecs,))[0] + self.oncType=datatype + return 1 + #================================================================= + # checkFcstFile - given an parm and obsmodel, see if it is open, + # and if not - open it. If modify is 1, then it + # opens the file for writing - which locks it + # from writing by others. + # + # returns 0 if there is trouble opening file + # + def checkFcstFile(self,parm,model,modify=0,datatype=-1): + # + # If everything is the same...return right away + # + if ((parm==self.fncParm)and(model==self.fncModel)and(modify==self.fncModify)): + return 1 + # + # If a file is currently open - close it + # + if (not(self.fncParm=="")): + self.closeFcstFile() + # + # Setup the file names and see if they exist + # + self.makeGridDir(parm,modify) + newIndex="%s/Grids/%s/%s_%s_index.nc"%(self.VERDIR,parm,model,parm) + newData="%s/Grids/%s/%s_%s_data.nc"%(self.VERDIR,parm,model,parm) + already=os.path.exists(newIndex) + # + # Can't read data from file that does not exist + # + if ((not already) and (modify==0)): + return 0 + # + # Figure out read-mode for file + # + if modify==0: + mode="r" + else: + mode="a" + # + # Figure data type and number of points in grid + # + if datatype<0: + datatype=self.getVerParmType(parm) + if datatype is None: + return 0 + (ypts,xpts)=self.getGridShape() + # + # Open the two fcst files: the index and the data + # + self.fncIndex=NetCDF.NetCDFFile(newIndex,mode) + self.fncData=NetCDF.NetCDFFile(newData,mode) + # + # If a new file...create the variables + # + if not already: + self.fncData.createDimension("ypts",ypts) + self.fncData.createDimension("xpts",xpts) + self.fncData.createDimension("record",None) + self.fncIndex.createDimension("record",None) + self.fncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) + self.fncFcstr=self.fncIndex.createVariable('fcstr', 'b', ('record','maxfcstrs')) + self.fncBtime=self.fncIndex.createVariable('btime', 'i', ('record',)) + self.fncStime=self.fncIndex.createVariable('stime', 'i',('record',)) + self.fncEtime=self.fncIndex.createVariable('etime', 'i',('record',)) + self.fncVtime=self.fncIndex.createVariable('vtime', 'i',('record',)) + self.fncScale=self.fncIndex.createVariable('scale','d',('record',)) + self.fncAddit=self.fncIndex.createVariable('addit','d',('record',)) + self.fncValue=self.fncData.createVariable('value','h',('record','ypts','xpts')) + if datatype==1: + self.fncScale1=self.fncIndex.createVariable('scale1','d',('record',)) + self.fncAddit1=self.fncIndex.createVariable('addit1','d',('record',)) + self.fncValue1=self.fncData.createVariable('value1','h',('record','ypts','xpts')) + self.fncIndex.sync() + self.fncData.sync() + os.chmod(newIndex,0o775) + os.chmod(newData,0o775) + # + # If an old file...hook up variables to the netCDF files + # + else: + ivarnames=list(self.fncIndex.variables.keys()) + dvarnames=list(self.fncData.variables.keys()) + for name in ('fcstr','btime','stime','etime','vtime','scale','addit'): + if name not in ivarnames: + self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) + return 0 + if 'value' not in dvarnames: + self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) + return 0 + self.fncFcstr=self.fncIndex.variables['fcstr'] + if len(self.fncFcstr.shape)!=2: + self.logMsg("Old index file (pre version 1.0) detected for %s %s"%(model,parm)) + return 0 + self.fncBtime=self.fncIndex.variables['btime'] + self.fncStime=self.fncIndex.variables['stime'] + self.fncEtime=self.fncIndex.variables['etime'] + self.fncVtime=self.fncIndex.variables['vtime'] + self.fncScale=self.fncIndex.variables['scale'] + self.fncAddit=self.fncIndex.variables['addit'] + self.fncValue=self.fncData.variables['value'] + if datatype==1: + if (('scale1' not in ivarnames)or('addit1' not in ivarnames)): + self.logMsg("Corrupt index file for %s %s detected"%(model,parm)) + return 0 + if 'value1' not in dvarnames: + self.logMsg("Corrupt data file for %s %s detected"%(model,parm)) + return 0 + self.fncScale1=self.fncIndex.variables['scale1'] + self.fncAddit1=self.fncIndex.variables['addit1'] + self.fncValue1=self.fncData.variables['value1'] + self.fncParm=parm + self.fncModel=model + self.fncModify=modify + self.fnumRecs=self.fncStime.shape[0] + self.fncRecs=indices((self.fnumRecs,))[0] + self.fncType=datatype + return 1 + #================================================================= + # checkStats - given an parm, model, and obsmodel, see if the + # stats file for this is open. If not, close any + # current stat file. If modify is 1, then it + # opens the file for writing - which locks it + # from writing by others. + # + # returns 0 if there is trouble opening file + # + def checkStats(self,parm,model,obsmodel,modify=0): + # + # If everything is the same...return right away + # + if ((parm==self.sncParm)and(model==self.sncModel)and + (obsmodel==self.sncObsModel)and(modify==self.sncModify)): + return 1 + # + # If a file is currently open - close it + # + if (not(self.sncParm=="")): + self.closeStatsFile() + # + # Setup the file names and see if they exist + # + self.makeStatDir(parm,modify) + newData="%s/Stats/%s/%s_%s_%s_data.nc"%(self.VERDIR,parm,model,parm,obsmodel) + newIndex="%s/Stats/%s/%s_%s_%s_index.nc"%(self.VERDIR,parm,model,parm,obsmodel) + already=os.path.exists(newIndex) + # + # Can't read data from file that does not exist + # + if ((not already) and (modify==0)): + return 0 + # + # Figure out read-mode for file + # + if modify==0: + mode="r" + else: + mode="a" + # + # Open the two fcst files: the index and the data + # + self.sncIndex=NetCDF.NetCDFFile(newIndex,mode) + self.sncData=NetCDF.NetCDFFile(newData,mode) + # + # If a new file...create the variables + # + if not already: + self.sncData.createDimension("maxareas",self.STATAREAS) + self.sncData.createDimension("maxstats",self.STATTYPES) + self.sncData.createDimension("record",None) + self.sncIndex.createDimension("record",None) + self.sncIndex.createDimension("maxfcstrs",self.MAXFCSTRS) + self.sncFcstr=self.sncIndex.createVariable('fcstr','b',('record','maxfcstrs')) + self.sncBtime=self.sncIndex.createVariable('btime','i',('record',)) + self.sncStime=self.sncIndex.createVariable('stime','i',('record',)) + self.sncEtime=self.sncIndex.createVariable('etime','i',('record',)) + self.sncVtime=self.sncIndex.createVariable('vtime','i',('record',)) + self.sncCycle=self.sncIndex.createVariable('cycle','b',('record',)) + self.sncFhour=self.sncIndex.createVariable('fhour','h',('record',)) + self.sncStats=self.sncData.createVariable('stats','f',('record','maxareas','maxstats')) + self.sncIndex.sync() + self.sncData.sync() + os.chmod(newIndex,0o775) + os.chmod(newData,0o775) + # + # If an old file...hook up variables to the netCDF files + # + else: + self.sncFcstr=self.sncIndex.variables['fcstr'] + self.sncBtime=self.sncIndex.variables['btime'] + self.sncStime=self.sncIndex.variables['stime'] + self.sncEtime=self.sncIndex.variables['etime'] + self.sncVtime=self.sncIndex.variables['vtime'] + self.sncCycle=self.sncIndex.variables['cycle'] + self.sncFhour=self.sncIndex.variables['fhour'] + self.sncStats=self.sncData.variables['stats'] + self.sncParm=parm + self.sncModel=model + self.sncObsModel=obsmodel + self.sncModify=modify + self.sncNumRecs=self.sncBtime.shape[0] + self.sncRecs=indices((self.sncNumRecs,))[0] + return 1 + #================================================================= + # closeStatsFile - if a stat file is open, close it and free up + # all the structures associated with it. + # + def closeStatsFile(self): + if (not(self.sncParm=="")): + self.sncIndex.close() + self.sncData.close() + del self.sncIndex + del self.sncData + del self.sncFcstr + del self.sncBtime + del self.sncStime + del self.sncEtime + del self.sncVtime + del self.sncCycle + del self.sncFhour + del self.sncStats + del self.sncRecs + self.sncParm="" + self.sncModel="" + self.sncObsModel="" + self.sncNumRecs=-1 + self.sncModify=0 + return + #================================================================= + # getBases - get sorted list of all base times (model run times) + # stored for an inputParm and model. Can be helpful + # when figuring out all possible model run times + # (though, this is really only the model run times + # we have SAVED - not that COULD exist). + # + # If model is in the OBSMODELS list, then it returns + # all the times of the saved OBSMODEL grids - which are + # the same as the grid start times. + # + # if no data file exists for inputParm and model, it + # returns an empty list. + # + def getBases(self,inputParm,model): + Bases=[] + if not self.checkFile(inputParm,model): + return Bases + if model in self.OBSMODELS: + for i in range(self.onumRecs): + if self.oncBtime[i] not in Bases: + Bases.append(self.oncBtime[i]) + else: + for i in range(self.fnumRecs): + if self.fncBtime[i] not in Bases: + Bases.append(self.fncBtime[i]) + Bases.sort() + return Bases + #================================================================= + # getStarts - get sorted list of all grid start times stored for + # an inputParm and model. Can be helpful when + # figuring out all possible times (though, this is + # really only the times we have SAVED - not all those + # that COULD exist). Some forecast ones may not be + # verified yet! + # + # if no data file exists for inputParm and model, it + # returns an empty list. + # + def getStarts(self,inputParm,model): + Starts=[] + if not self.checkFile(inputParm,model): + return Starts + if model in self.OBSMODELS: + for i in range(self.onumRecs): + if self.oncStime[i] not in Starts: + Starts.append(self.oncStime[i]) + else: + for i in range(self.fnumRecs): + if self.fncStime[i] not in Starts: + Starts.append(self.fncStime[i]) + Starts.sort() + return Starts + #================================================================== + # getFhours - get sorted list of all forecast hour times stored + # for inputParm and model. Can be helpful when + # figuring out all possible times, By default, gets + # forecast hours from all cycle times - but if + # cycle time is non-negative, then it figures all + # the forecast times that have the specified + # cycle-time. + # + # if model is in OBSMODELS list, then start times are + # the same as base times, and only 0 should be in the + # list. + # + # if no data file exists for inputParm and model, it + # returns an empty list. + # + def getFhours(self,inputParm,model,cycle=-1): + Fhours=[] + if not self.checkFile(inputParm,model): + return Fhours + if model in self.OBSMODELS: + Fhours.append(0) + return Fhours + fhr=(self.fncStime[:]-self.fncBtime[:])/HOURSECS + if cycle>=0: + cyclehrs=(self.fncBtime[:]-((self.fncBtime[:]/DAYSECS).astype(int)*DAYSECS))/HOURSECS + for i in range(self.fnumRecs): + if cyclehrs[i]==cycle: + if fhr[i] not in Fhours: + Fhours.append(fhr[i]) + else: + for i in range(self.fnumRecs): + if fhr[i] not in Fhours: + Fhours.append(fhr[i]) + Fhours.sort() + return Fhours + #================================================================= + # getFcstRecords - gets a sorted list of record numbers where the + # basetime is the same as the specified basetime + # + def getFcstRecords(self,parm,model,basetime): + Records=[] + ret=self.checkFile(parm,model) + if ret==0: + return Records + if model in self.OBSMODELS: + use=equal(self.oncBtime[:],basetime) + a=compress(use,self.oncRecs) + else: + use=equal(self.fncBtime[:],basetime) + a=compress(use,self.fncRecs) + Records=list(a) + #for i in range(a.shape[0]): + # Records.append(a[i]) + return Records + #================================================================= + # getFcstHour - take a basetime and a starttime and calculate the + # number of hours between them + # + def getFcstHour(self,Btime,Stime): + hours=int(round(((Stime-Btime)/3600),0)) + return hours + #================================================================= + # getRecFcstHour - given a record in the forecast file, calculate + # the forecast hour, based on the basetime and + # starttime + # + def getRecFcstHour(self,rec): + rec = int(rec) + btime=self.fncBtime[rec] + stime=self.fncStime[rec] + return self.getFcstHour(btime,stime) + #================================================================= + # getVerTimeRange - given a starttime and endtime, create a + # TimeRange object covering this time + # + def getVerTimeRange(self,Stime,Etime): + start=AbsTime.AbsTime(Stime) + end=AbsTime.AbsTime(Etime) + tr=TimeRange.TimeRange(start,end) + return tr + #================================================================= + # readRecord - read and unpack a gridded data record. handles + # opening/closing files, and whether this is an + # observed model or a forecast model. If the parm + # is a vector, returns a tuple with mag,dir. If it + # cannot read - then it returns None + # + # Note vertical flip of the grid is done so it's in the AWIPS II order. + # + def readRecord(self,parm,model,rec): + if self.checkFile(parm,model)==0: + return None + rec = int(rec) + if model in self.OBSMODELS: + if self.oncType==0: + vals=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] + return flipud(vals) + else: + mags=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] + dirs=(self.oncValue1[rec].astype(float)*self.oncScale1[rec])+self.oncAddit1[rec] + return (flipud(mags), flipud(dirs)) + else: + if self.fncType==0: + vals=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] + return flipud(vals) + else: + mags=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] + dirs=(self.fncValue1[rec].astype(float)*self.fncScale1[rec])+self.fncAddit1[rec] + return (flipud(mags), flipud(dirs)) + #================================================================= + # packIt - convert a scalar grid into packed 16-bit integer + # equivalent, with a float scale and offset that can + # be used to get back all the data exactly. + # + def packIt(self,grid): + # + # convert the grid to its packed equivalent + # + minval=minimum.reduce(minimum.reduce(grid)) + maxval=maximum.reduce(maximum.reduce(grid)) + diff=maxval-minval + diff=diff+(0.05*diff) # make range a little wider so that roundoff + # will not make packed integers larger than + # what will fit. + Scale=diff/65534.0 + if Scale==0.0: + Scale=1.0 + Addit=(minval+maxval)/2.0 + Valgrid=((grid-Addit)/Scale).astype('h') + return(Scale,Addit,Valgrid) + #================================================================= + # writeVals - write gridded data. Overwrite data for same time + # if it already exists, or replace old data in the + # file (if any) or append it to the file. + # + # return 0 if a problem writing. Returns -1 if it + # skipped writing it because it matches what is + # already there. + # + def writeVals(self,parm,model,fcstrID,Btime,Stime,Etime,Grid): + self.logMsg("Starting writeVals in VerifyUtility",10) + # + # get datatype + # + datatype=self.getVerParmType(parm) + if datatype is None: + return 0 + # + # Check that the correct file is open - and ready to modify + # + if not self.checkFile(parm,model,modify=1): + return 0 + # + # flip grid to AWIPS I grid-point order (as in the netCDF file) + # + if datatype != 1: + Grid = flipud(Grid) + else: + Grid = flipud(Grid[0]), flipud(Grid[1]) + # + # + # + if datatype!=1: + (Scale,Addit,Valgrid)=self.packIt(Grid) + else: + (mag,direc)=Grid + (Scale, Addit, Valgrid )=self.packIt(mag) + (Scale1,Addit1,Valgrid1)=self.packIt(direc) + # + # Get forecaster number + # + if model=="Official": + fcstrNum=self.findFcstrNumFromID(fcstrID) + else: + fcstrNum=0 + # + # + # + recnum=-1 + overrec=-1 + oldest=time.time() + veryOld=int(time.time())-self.GRIDDAYS*DAYSECS + # + # Figure if this is for the Obs grid or a Fcst grid + # + if model in self.OBSMODELS: + # + # If nothing there - just adding record 0 + # + if self.onumRecs==0: + recnum=0 + # + # If there are records...see if one with the exact + # same times exists - if so we can overwrite. + # + if recnum<0: + s=equal(self.oncStime[:],Stime) + b=equal(self.oncBtime[:],Btime) + e=equal(self.oncEtime[:],Etime) + use=logical_and(logical_and(b,s),e) + if sometrue(sometrue(use)): + a=compress(use,self.oncRecs) + recnum=int(a[0]) + self.logMsg("existing record %d for that time"%recnum,10) + # + # if still no record found - find if there are any old + # ones to overwrite + # + if (recnum==-1): + overrec=int(argmin(self.oncStime[:],0)) + if self.DEBUG>0: + self.logMsg(" oldest record is %s"%overrec) + oldest=self.oncStime[overrec] + if oldest0: + self.logMsg(" and it is old enough to overwrite") + recnum=overrec + else: + if self.DEBUG>0: + self.logMsg(" but not old enough to overwrite") + # + # If STILL no record found - add to the current file + # + if (recnum==-1): + recnum=self.onumRecs + # + # Change the data + # + recnum = int(recnum) + self.oncFcstr[recnum,0]=fcstrNum + for i in range(1,self.MAXFCSTRS): + self.oncFcstr[recnum,i]=0 + self.oncBtime[recnum]=Btime + self.oncStime[recnum]=Stime + self.oncEtime[recnum]=Etime + self.oncVtime[recnum]=time.time() + self.oncScale[recnum]=Scale + self.oncAddit[recnum]=Addit + self.oncValue[recnum]=Valgrid + if datatype==1: + self.oncScale1[recnum]=Scale1 + self.oncAddit1[recnum]=Addit1 + self.oncValue1[recnum]=Valgrid1 + # + # If we added a record - need to increase + # the indicies and the number of records counter + # + if recnum==self.onumRecs: + self.onumRecs=self.onumRecs+1 + self.oncRecs=indices((self.onumRecs,))[0] + #self.oncIndex.sync() + #self.oncData.sync() + self.closeObsFile() + else: + # If nothing there - just adding record 0 + # + if self.fnumRecs==0: + recnum=0 + # + # If there are records...see if one with the exact + # same times exists - if so we can overwrite. + # + if recnum<0: + s=equal(self.fncStime[:],Stime) + b=equal(self.fncBtime[:],Btime) + e=equal(self.fncEtime[:],Etime) + use=logical_and(logical_and(b,s),e) + if sometrue(sometrue(use)): + a=compress(use,self.fncRecs) + recnum=int(a[0]) + self.logMsg("existing record %d for that time"%recnum,10) + issame=alltrue(alltrue(equal(self.fncValue[recnum],Valgrid))) + if issame: + self.logMsg(" is exactly the same",10) + self.logMsg(" updating archive time",10) + self.fncVtime[recnum]=time.time() + return -1 + # + # if still no record found - find if there are any old + # ones to overwrite + # + if (recnum==-1): + overrec=argmin(self.fncStime[:],0) + overrec = int(overrec) + if self.DEBUG>0: + self.logMsg(" oldest record is %s"%overrec) + oldest=self.fncStime[overrec] + if oldest0: + self.logMsg(" and it is old enough to overwrite") + recnum=overrec + else: + if self.DEBUG>0: + self.logMsg(" but not old enough to overwrite") + # + # If STILL no record found - add to the current file + # + if (recnum==-1): + recnum=self.fnumRecs + # + # Change the data + # + self.fncFcstr[recnum,0]=fcstrNum + for i in range(1,self.MAXFCSTRS): + self.fncFcstr[recnum,i]=0 + self.fncBtime[recnum]=Btime + self.fncStime[recnum]=Stime + self.fncEtime[recnum]=Etime + self.fncVtime[recnum]=time.time() + self.fncScale[recnum]=Scale + self.fncAddit[recnum]=Addit + self.fncValue[recnum]=Valgrid + if datatype==1: + self.fncScale1[recnum]=Scale1 + self.fncAddit1[recnum]=Addit1 + self.fncValue1[recnum]=Valgrid1 + # + # If we added a record - need to increase + # the indicies and the number of records counter + # + if recnum==self.fnumRecs: + self.fnumRecs=self.fnumRecs+1 + self.fncRecs=indices((self.fnumRecs,))[0] + self.closeFcstFile() + + + self.logMsg("Done with writeVals in VerifyUtility",10) + return 1 + #================================================================= + # getDoAgain - given a parm,model and btime, stime, etime + # - see if a record for stats already exists. If so, + # then get the time that those stats were calculated + # If the time was earlier than the observed or forecast + # grid savetime (ovtime, fvtime) then return 1 to + # indicate that we need to re-calculate these stats. + # If the the record for these stats to NOT exist - then + # we have to return 1 to indicate that these stats need + # to be calculated the first time. + # + def getDoAgain(self,parm,model,obsmodel,btime,stime,etime,ovtime,fvtime): + self.logMsg("Starting getDoAgain in VerifyUtility",5) + if not self.checkStats(parm,model,obsmodel): + return 1 + # + # If nothing in current stat file - need to add a record + # + if self.sncNumRecs==0: + return 1 + # + # If there are records...see if one with the exact + # same times exists + # + s=equal(self.sncStime[:],stime) + b=equal(self.sncBtime[:],btime) + e=equal(self.sncEtime[:],etime) + use=logical_and(logical_and(b,s),e) + if sometrue(sometrue(use)): + # + # get record number + # + a=compress(use,self.sncRecs) + recnum=int(a[0]) + # + # If the time of the stat save is after BOTH grid + # were saved - then do not need to do stats again + # + savetime=self.sncVtime[recnum] + if ((savetime>ovtime)and(savetime>fvtime)): + return 0 + return 1 + #================================================================= + # writeStats - write stat data. Overwrite data for same time if it + # already exists, or replace old data in the file (if any) + # or append it to the file. + # + def writeStats(self,parm,model,obsmodel,fcstrNums,Btime,Stime,Etime,Cycle, + Fhour,Stats): + self.logMsg("Starting writeStats in VerifyUtility",10) + if not self.checkStats(parm,model,obsmodel,modify=1): + return 0 + oldest=time.time() + veryOld=int(time.time())-self.STATDAYS*DAYSECS + # + # set record to missing (-1) + # + recnum=-1 + overrec=-1 + # + # If nothing in current stat file - just adding record 0 + # + if self.sncNumRecs==0: + recnum=0 + # + # If there are records...see if one with the exact + # same times exists - if so we can overwrite. + # + else: + s=equal(self.sncStime[:],Stime) + b=equal(self.sncBtime[:],Btime) + e=equal(self.sncEtime[:],Etime) + use=logical_and(logical_and(b,s),e) + if sometrue(sometrue(use)): + a=compress(use,self.sncRecs) + recnum=int(a[0]) + self.logMsg("overwriting existing record:%d"%recnum,5) + # + # if still no record found - find if there are any old + # ones to overwrite + # + if (recnum==-1): + overrec=int(argmin(self.sncStime[:],0)) + self.logMsg(" oldest record is %s"%overrec,10) + oldest=self.sncStime[overrec] + if oldest=0): + obslist.append(iobrec) + pairList.append((obslist,flists[obrec])) + return pairList + #================================================================== + # getCases - gets caseInfo structure...can either be for 'common + # cases' or normal. + # + def getCases(self,readParm,models,obsParm,obsmodel,dateStyle, + dateType,fromDay=0,numDays=0,dayList=[], + fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, + accumHours=12,accumFreq=12, + requireObs=1,commonCases=1,basetimeOffsets=0, + callbackMethod=None): + if commonCases==1: + caseInfo=self.getCommonCases(readParm,models,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrs, + cycles=cycles,fhrStart=fhrStart, + fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,requireObs=requireObs, + basetimeOffsets=basetimeOffsets, + callbackMethod=callbackMethod) + else: + caseInfo={} + for model in models: + cases=self.getCommonCases(readParm,model,obsParm,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrs,cycles=cycles, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq,requireObs=requireObs, + basetimeOffsets=basetimeOffsets, + callbackMethod=callbackMethod) + caseInfo[model]=cases[model] + return caseInfo + #================================================================== + # getCommonCases - obtain dictionary of records for common cases. + # Keys are each model in modelList. The value for + # each model is, itself, a dictionary, with keys + # of 'basetime,starttime,endtime' and a value of + # a two-element list. The first element is, + # itself, a list - with records that make up + # the forecast, and the second element is, + # itself, a list - with records that make up the + # observation + # + # if obsRequired is zero - it will allow + # cases without observations...in which case the + # second list will be empty. + # + def getCommonCases(self,parm,models,obsParm,obsModel,dateStyle, + dateType,fromDay=0,numDays=0,dayList=[], + fcstrs=-1,cycles=-1,fhrStart=-48, + fhrEnd=-48,accumHours=12, + accumFreq=12,requireObs=1,basetimeOffsets=0, + callbackMethod=None): + self.logMsg("start getCommonCases",10) + finalCases={} + self.callbackMethod=callbackMethod + # + # Get all the verifing cases + # + self.internalMessage="Observations:" + obsCases=self.getObsCases(obsParm,obsModel,accumHours,accumFreq, + dateStyle,dateType,fromDay,numDays,dayList, + self.internCB) + # + # Check to see if stopping + # + if self.callbackMethod is not None: + exit=self.callbackMethod(self.internalMessage) + if exit==1: + return finalCases + # + obskeys=list(obsCases.keys()) + numgrids=len(obskeys) + self.logMsg("Observed cases:%d"%numgrids,5) + obskeys.sort() + if self.getDebug()>=5: + for obskey in obskeys: + (st,en)=obskey.split(",") + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) + self.logMsg(" Obs for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(syea, + smon,sday,shou,eyea,emon,eday,ehou),10) + # + allCases={} + # + # See if models is a list, or a single model - putting them into + # modelList for further processing + # + modelList=[] + if ((type(models) is list) or (type(models) is tuple)): + for model in models: + modelList.append(model) + else: + modelList.append(models) + # + # Loop over all the models + # + totalmodels=len(modelList) + modelcount=0 + for model in modelList: + modelcount+=1 + if totalmodels>1: + self.internalMessage="%s (%d of %d):"%(model,modelcount,totalmodels) + else: + self.internalMessage="%s:"%model + cases=self.getModelCases(parm,model,fcstrs,cycles, + fhrStart,fhrEnd,accumHours, + accumFreq,dateStyle,dateType,fromDay,numDays, + dayList,self.internCB) + if self.callbackMethod is not None: + exit=self.callbackMethod(self.internalMessage) + if exit==1: + return finalCases + casekeys=list(cases.keys()) + numgrids=len(casekeys) + self.logMsg("%s has %d potential cases"%(model,numgrids),5) + if self.getDebug()>=5: + casekeys.sort() + for casekey in casekeys: + (base,st,en)=casekey.split(",") + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) + self.logMsg(" Model potential from %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, + bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) + # + # If obs are required...look through cases and make sure that the + # period is available in the obsCases retreived above + # + if requireObs!=0: + noobs=0 + for key in casekeys: + (base,rest)=key.split(",",1) + if rest not in obskeys: + (st,en)=rest.split(",") + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(st)) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(en)) + self.logMsg(" deleting case for no Obs - from %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, + bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) + del cases[key] + noobs=noobs+1 + self.logMsg(" %d cases deleted because they have no obs"%noobs,5) + allCases[model]=cases + # + # With only 1 model common cases are easy! + # + if totalmodels==1: + finalkeys=list(allCases[model].keys()) + # + # For mulitple models...Get keys for each model...convert them to + # the offsetBasetime (if basetimeOffsets==1)...and find the + # model with the fewest keys + # + else: + if self.callbackMethod is not None: + exit=self.callbackMethod("filtering through models") + if exit==1: + return finalCases + finalkeys=[] + modkeys={} + minmod="" + minkeys=-1 + for model in modelList: + realKeys=list(allCases[model].keys()) + if basetimeOffsets==1: + baseOffset=self.getBaseOffset(model) + else: + baseOffset=0 + if baseOffset==0: + testKeys=realKeys + else: + testKeys=[] + for key in realKeys: + (basetimeStr,starttimeStr,endtimeStr)=key.split(",") + basetime="%d"%(int(basetimeStr)+(baseOffset*HOURSECS)) + newkey="%s,%s,%s"%(basetime,starttimeStr,endtimeStr) + testKeys.append(newkey) + modkeys[model]=testKeys + numkeys=len(modkeys[model]) + if ((minkeys==-1)or(numkeys=10: + for key in finalkeys: + (base,start,end)=key.split(",") + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(start)) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(end)) + self.logMsg(" %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, + bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) + # + # Make the final case structure - with list of forecast/observed + # records for each case. If basetimeOffsets==1, then the finalkeys may + # need to be converted back to real keys for each model. The keys of + # returned lists will be with the offset 'basetimes'. + # + finalCases={} + for model in modelList: + cases={} + modCases=allCases[model] + if ((basetimeOffsets==1)and(totalmodels!=1)): + baseOffset=self.getBaseOffset(model) + else: + baseOffset=0 + for key in finalkeys: + if baseOffset!=0: + (offsetBasetime,starttime,endtime)=key.split(",") + realkey="%d,%s,%s"%(int(offsetBasetime)-(baseOffset*HOURSECS),starttime,endtime) + else: + realkey=key + frecList=modCases[realkey] + (base,rest)=key.split(",",1) + if rest in obskeys: + orecList=obsCases[rest] + else: + orecList=[] + cases[key]=[frecList,orecList] + finalCases[model]=cases + self.logMsg("end getCommonCases",10) + return finalCases + #================================================================== + # internCB + # + def internCB(self,message): + fullmessage="%s %s"%(self.internalMessage,message) + retval=0 + if self.callbackMethod is not None: + retval=self.callbackMethod(fullmessage) + return retval + #================================================================= + # getModelCases - return a dictionary for the specified model of + # forecast records for the specified periods. + # The keys are "basetime,starttime,endtime", and + # the values in the dictionary are lists of records + # (these are lists, because it can take multiple + # records to cover long time periods for accumulative + # parms, or probability parms). + # + def getModelCases(self,parm,model,fcstrs,cycles,fhrStart,fhrEnd, + accumHours,accumFreq,dateStyle, + dateType,fromDay,numDays,dayList, + callbackMethod=None): + self.logMsg("start getModelCases",10) + verType=self.getVerType(parm) + rateFlag=self.getRateFlag(model,parm) + cases={} + dateStyleLow=dateStyle.lower() + dateTypeLow=dateType.lower() + # + # Give up right away if you cant open the model file + # + if not self.checkFile(parm,model): + return cases + # + # Setup logical array with records that contain the right Forecaster, + # the right cycle, and the right forecast hours. If none...get out + # right away. + # + rightRecord=self.getFcstrCycleFhr(model,fcstrs,cycles,fhrStart, + fhrEnd) + numRecs=add.reduce(rightRecord) + self.logMsg("number of records with right forecaster, cycle, hours:%d"%numRecs,10) + # + # If a probability parm, or an accumulative parm, then find + # cases where forecasts completely coverred the possible + # periods. + # + if ((verType==1)or(rateFlag==1)): + # + # Get potential verifying periods for this accumHour,accumFreq + # combination with the dateStyle/dateType/fromDay/numDays/ + # dayList combination + # + verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, + dateType,fromDay,numDays,dayList) + numPeriods=len(verPeriods) + self.logMsg("number of periods:%d"%numPeriods,10) + # + # Loop over potential periods...and find matching records + # + count=0 + if dateStyleLow=="forecast on": + fromPeriods=self.createFromPeriods(dateType,fromDay,numDays,dayList) + for verPer in verPeriods: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,numPeriods)) + if exit==1: + return cases + (stime,etime)=verPer + totalTime=etime-stime + recmatch=logical_and(rightRecord, + logical_and(greater(self.fncEtime[:],stime), + less(self.fncStime[:],etime))) + # + # When there are matching records...find each basetime that + # forecast for this period + # + if sometrue(recmatch): + recnumberList=list(compress(recmatch,self.fncRecs)) + baselist=[] + for rec in recnumberList: + rec = int(rec) + if self.fncBtime[rec] not in baselist: + baselist.append(self.fncBtime[rec]) + # + # And for each basetime...see if the period was coverred + # by forecast grids + # + for base in baselist: + # + # + # + if dateStyleLow=="forecast on": + okbase=0 + for testper in fromPeriods: + (perstart,perend)=testper + if ((base>=perstart)and(base=totalTime: + key="%d,%d,%d"%(base,stime,etime) + cases[key]=reclist + # + # Other parms get forecast periods based on the forecast grids + # that were actually made + # + else: + if dateStyleLow=="verifying on": + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + recList=self.getObsPeriod(model,parm,fromDay,numDays,mask=rightRecord) + else: + recList=self.getObsList(model,parm,dayList,mask=rightRecord, + callbackMethod=callbackMethod) + else: + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + starttime=fromDay-((numDays-1)*DAYSECS) + endtime=fromDay+DAYSECS-1 + recList=self.listRecords(parm,model,starttime,endtime,"forecast",rightRecord) + else: + recList=[] + count=0 + totalDays=len(dayList) + for date in dayList: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,totalDays)) + if exit==1: + return cases + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+DAYSECS-1 + tmprecs=self.listRecords(parm,model,starttime,endtime,"forecast",rightRecord) + for rec in tmprecs: + recList.append(rec) + # + # Now make case entries for each of these records + # + for rec in recList: + rec = int(rec) + base=self.fncBtime[rec] + stime=self.fncStime[rec] + etime=self.fncEtime[rec] + key="%d,%d,%d"%(base,stime,etime) + cases[key]=[rec] + self.logMsg("end getModelCases",10) + return cases + #================================================================= + # getObsCases - return a dictionary for the specified obs model of + # records for the specified periods. + # The keys are "starttime,endtime", and + # the values in the dictionary are lists of records + # (these are lists, because it can take multiple + # records to cover long time periods for accumulative + # parms, or probability parms). + # + def getObsCases(self,parm,model, + accumHours,accumFreq,dateStyle, + dateType,fromDay,numDays,dayList, + callbackMethod=None): + self.logMsg("start getObsCases",10) + cases={} + dateStyleLow=dateStyle.lower() + dateTypeLow=dateType.lower() + # + # Give up right away if you cant open the model file + # + if not self.checkFile(parm,model): + return cases + # + # If a probability parm, or an accumulative parm, then find + # cases where observations completely coverred the possible + # periods. + # + rateFlag=self.getRateFlag(model,parm) + if (rateFlag==1): + # + # Get potential verifying periods for this accumHour,accumFreq + # combination with the dateStyle/dateType/fromDay/numDays/ + # dayList combination + # + verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, + dateType,fromDay,numDays,dayList) + numPeriods=len(verPeriods) + self.logMsg("number of periods:%d"%numPeriods,10) + # + # Loop over potential periods...and find matching records + # + count=0 + for verPer in verPeriods: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,numPeriods)) + if exit==1: + return cases + (stime,etime)=verPer + totalTime=etime-stime + recmatch=logical_and(greater(self.oncEtime[:],stime), + less(self.oncStime[:],etime)) + # + # When there are matching records...find each basetime that + # forecast for this period + # + if sometrue(recmatch): + recnumberList=list(compress(recmatch,self.oncRecs)) + totcov=0 + for rec in recnumberList: + rec = int(rec) + recstart=self.oncStime[rec] + recend=self.oncEtime[rec] + cover=min(etime-recstart,recend-stime,recend-recstart) + totcov=totcov+cover + if totcov==totalTime: + key="%d,%d"%(stime,etime) + cases[key]=recnumberList + # + # Other parms get forecast periods based on the forecast grids + # that were actually made + # + else: + if dateStyleLow=="verifying on": + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + recList=self.getObsPeriod(model,parm,fromDay,numDays) + else: + recList=self.getObsList(model,parm,dayList,callbackMethod=callbackMethod) + else: + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + soff=self.getStartOffset(parm) + eoff=self.getEndOffset(parm) + starttime=fromDay-((numDays-1)*DAYSECS)+soff + endtime=fromDay+DAYSECS-1+(self.MAXFORECASTHOUR*HOURSECS)+eoff + recList=self.listRecords(parm,model,starttime, + endtime,"verify") + else: + recList=[] + soff=self.getStartOffset(parm) + eoff=self.getEndOffset(parm) + count=0 + totalDays=len(dayList) + for date in dayList: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,totalDays)) + if exit==1: + return cases + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1)) + except: + continue + else: + starttime=date + endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS)+eoff + starttime=starttime+soff + tmprecs=self.listRecords(parm,model,starttime, + endtime,"verify") + for rec in tmprecs: + recList.append(rec) + # + # Now make case entries for each of these records + # + for rec in recList: + rec = int(rec) + stime=self.oncStime[rec] + etime=self.oncEtime[rec] + key="%d,%d"%(stime,etime) + cases[key]=[rec] + self.logMsg("end getObsCases",10) + return cases + #================================================================== + # getStatCases - gets caseInfo structure for stats...can either be + # for 'common cases' or normal. + # + def getStatCases(self,parm,models,obsmodel,dateStyle, + dateType,fromDay=0,numDays=0,dayList=[], + fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, + accumHours=12,accumFreq=12, + commonCases=1,basetimeOffsets=0, + callbackMethod=None): + if commonCases==1: + caseInfo=self.getStatCommonCases(parm,models,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrs, + cycles=cycles,fhrStart=fhrStart, + fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq, + basetimeOffsets=basetimeOffsets, + callbackMethod=callbackMethod) + else: + caseInfo={} + for model in models: + cases=self.getStatCommonCases(parm,model,obsmodel, + dateStyle,dateType,fromDay=fromDay,numDays=numDays, + dayList=dayList,fcstrs=fcstrs,cycles=cycles, + fhrStart=fhrStart,fhrEnd=fhrEnd,accumHours=accumHours, + accumFreq=accumFreq, + basetimeOffsets=basetimeOffsets, + callbackMethod=callbackMethod) + caseInfo[model]=cases[model] + return caseInfo + #================================================================== + # getStatCommonCases - obtain dictionary of records for common cases. + # Keys are each model in modelList. The value for + # each model is, itself, a dictionary, with keys + # of 'basetime,starttime,endtime' and a value of + # a list of stat records. + # + def getStatCommonCases(self,parm,models,obsModel,dateStyle, + dateType,fromDay=0,numDays=0,dayList=[], + fcstrs=-1,cycles=-1,fhrStart=-48, + fhrEnd=-48,accumHours=12, + accumFreq=12,basetimeOffsets=0, + callbackMethod=None): + self.logMsg("start getStatCommonCases",10) + self.callbackMethod=callbackMethod + allCases={} + # + # See if models is a list, or a single model - putting them into + # modelList for further processing + # + modelList=[] + if ((type(models) is list) or (type(models) is tuple)): + for model in models: + modelList.append(model) + else: + modelList.append(models) + # + # Loop over all the models + # + totalmodels=len(modelList) + modelcount=0 + for model in modelList: + modelcount+=1 + if totalmodels>1: + self.internalMessage="%s (%d of %d):"%(model,modelcount,totalmodels) + else: + self.internalMessage="%s:"%model + cases=self.getStatModelCases(parm,model,obsModel,dateStyle,dateType, + cycles=cycles,fcstrs=fcstrs,fhrStart=fhrStart,fhrEnd=fhrEnd, + fromDay=fromDay,numDays=numDays,dayList=dayList, + accumHours=accumHours,accumFreq=accumFreq, + callbackMethod=self.internCB) + casekeys=list(cases.keys()) + numgrids=len(casekeys) + self.logMsg("%s has %d pre-calculated cases"%(model,numgrids),5) + allCases[model]=cases + # + # With only 1 model common cases are easy! + # + if totalmodels==1: + finalkeys=list(allCases[model].keys()) + # + # For mulitple models...Get keys for each model...convert them to + # the offsetBasetime (if basetimeOffsets==1)...and find the + # model with the fewest keys + # + else: + if self.callbackMethod is not None: + exit=self.callbackMethod("filtering through models") + if exit==1: + finalCases={} + return finalCases + finalkeys=[] + modkeys={} + minmod="" + minkeys=-1 + for model in modelList: + realKeys=list(allCases[model].keys()) + if basetimeOffsets==1: + baseOffset=self.getBaseOffset(model)*HOURSECS + else: + baseOffset=0 + if baseOffset==0: + testKeys=realKeys + else: + testKeys=[] + for key in realKeys: + (basetimeStr,starttimeStr,endtimeStr)=key.split(",") + basetime="%d"%(int(basetimeStr)+baseOffset) + newkey="%s,%s,%s"%(basetime,starttimeStr,endtimeStr) + testKeys.append(newkey) + modkeys[model]=testKeys + numkeys=len(modkeys[model]) + if ((minkeys==-1)or(numkeys=10: + for key in finalkeys: + (base,start,end)=key.split(",") + (byea,bmon,bday,bhou,bmin,bsec,bwda,byda,bdst)=time.gmtime(int(base)) + (syea,smon,sday,shou,smin,ssec,swda,syda,sdst)=time.gmtime(int(start)) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(int(end)) + self.logMsg(" %4.4d/%2.2d/%2.2d %2.2dZ for %4.4d/%2.2d/%2.2d %2.2dZ - %4.4d/%2.2d/%2.2d %2.2dZ"%(byea, + bmon,bday,bhou,syea,smon,sday,shou,eyea,emon,eday,ehou),10) + # + # Make the final case structure - with list of forecast/observed + # records for each case. If basetimeOffsets==1, then the finalkeys may + # need to be converted back to real keys for each model. The keys of + # returned lists will be with the offset 'basetimes'. + # + finalCases={} + for model in modelList: + cases={} + modCases=allCases[model] + if ((basetimeOffsets==1)and(totalmodels!=1)): + baseOffset=self.getBaseOffset(model)*HOURSECS + else: + baseOffset=0 + for key in finalkeys: + if baseOffset!=0: + (offsetBasetime,starttime,endtime)=key.split(",") + realkey="%d,%s,%s"%(int(offsetBasetime)-baseOffset,starttime,endtime) + else: + realkey=key + cases[key]=modCases[realkey] + finalCases[model]=cases + self.logMsg("end getStatCommonCases",10) + return finalCases + #================================================================= + # getStatModelCases - return a dictionary for the specified model of + # forecast records for the specified periods. + # The keys are "basetime,starttime,endtime", and + # the values in the dictionary are lists of records + # (these are lists, because it can take multiple + # records to cover long time periods for accumulative + # parms, or probability parms). + # + def getStatModelCases(self,parm,model,obsmodel,dateStyle,dateType, + fromDay=0,numDays=0,dayList=[], + fcstrs=-1,cycles=-1,fhrStart=-48,fhrEnd=-48, + accumHours=12,accumFreq=12, + callbackMethod=None): + self.logMsg("start getStatModelCases",10) + verType=self.getVerType(parm) + rateFlag=self.getRateFlag(model,parm) + cases={} + dateStyleLow=dateStyle.lower() + dateTypeLow=dateType.lower() + # + # Give up right away if you cant open the model file + # + if not self.checkStats(parm,model,obsmodel): + return cases + # + # Setup logical array with records that contain the right Forecaster, + # the right cycle, and the right forecast hours. If none...get out + # right away. + # + rightRecord=self.getStatFcstrCycleFhr(model,fcstrs,cycles, + fhrStart,fhrEnd) + numRecs=add.reduce(rightRecord) + self.logMsg("number of records with right forecaster, cycle, hours:%d"%numRecs,10) + # + # If a probability parm, or an accumulative parm, then find + # cases where forecasts completely coverred the possible + # periods. + # + if ((verType==1)or(rateFlag==1)): + # + # Get potential verifying periods for this accumHour,accumFreq + # combination with the dateStyle/dateType/fromDay/numDays/ + # dayList combination + # + verPeriods=self.createObsPeriods(accumHours,accumFreq,dateStyle, + dateType,fromDay,numDays,dayList) + numPeriods=len(verPeriods) + self.logMsg("number of periods:%d"%numPeriods,10) + # + # Loop over potential periods...and find matching records + # + count=0 + for verPer in verPeriods: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,numPeriods)) + if exit==1: + return cases + (stime,etime)=verPer + totalTime=etime-stime + recmatch=logical_and(rightRecord, + logical_and(greater(self.sncEtime[:],stime), + less(self.sncStime[:],etime))) + # + # When there are matching records...find each basetime that + # forecast for this period + # + if sometrue(recmatch): + recnumberList=list(compress(recmatch,self.sncRecs)) + baselist=[] + for rec in recnumberList: + rec = int(rec) + if self.sncBtime[rec] not in baselist: + baselist.append(self.sncBtime[rec]) + # + # And for each basetime...see if the period was coverred + # by forecast grids + # + for base in baselist: + reclist=[] + totcov=0 + for rec in recnumberList: + rec = int(rec) + if self.sncBtime[rec]==base: + reclist.append(rec) + recstart=self.sncStime[rec] + recend=self.sncEtime[rec] + cover=min(etime-recstart,recend-stime,recend-recstart) + totcov=totcov+cover + if totcov==totalTime: + key="%d,%d,%d"%(base,stime,etime) + cases[key]=reclist + # + # Other parms get forecast periods based on the forecast grids + # that were actually made + # + else: + if dateStyleLow=="verifying on": + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + recList=self.getObsStatPeriod(model,parm,obsmodel,fromDay, + numDays,mask=rightRecord) + else: + recList=self.getObsStatList(model,parm,obsmodel,dayList, + mask=rightRecord, + callbackMethod=callbackMethod) + else: + if dateTypeLow=="period length": + if callbackMethod is not None: + exit=callbackMethod("1 of 1") + if exit==1: + return cases + starttime=fromDay-((numDays-1)*DAYSECS) + endtime=fromDay+DAYSECS-1+(self.MAXFORECASTHOUR*HOURSECS) + recList=self.listStatRecords(parm,model,obsmodel,starttime, + endtime,"forecast",rightRecord) + else: + recList=[] + count=0 + totalDays=len(dayList) + for date in dayList: + count=count+1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,totalDays)) + if exit==1: + return cases + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS) + tmprecs=self.listStatRecords(parm,model,starttime,endtime, + "forecast",rightRecord) + for rec in tmprecs: + recList.append(rec) + # + # Now make case entries for each of these records + # + for rec in recList: + rec = int(rec) + base=self.sncBtime[rec] + stime=self.sncStime[rec] + etime=self.sncEtime[rec] + key="%d,%d,%d"%(base,stime,etime) + cases[key]=[rec] + self.logMsg("end getStatModelCases",10) + return cases + #================================================================= + # getVerifyingTimeRanges - get list of time periods (start/end) + # that match date criteria and actually have observed data + # + def getVerifyingTimeRanges(self,obsParm,obsModel,dataType, + rateFlag,accumHours,dateStyle,dateType, + numDays,fromDay,dayList): + self.logMsg("start getVerifyingTimeRanges",10) + perList=[] + if not self.checkFile(obsParm,obsModel): + self.logMsg("could not open %s file for %s"%(obsParm,obsModel),10) + return perList + dateStyleLow=dateStyle.lower() + dateTypeLow=dateType.lower() + # + # If a probability parm, or an accumulative parm, then we + # cannot use the observed records for the time range - but + # must create timePeriod blocks for the specified times. + # + if ((dataType==1)or(rateFlag==1)): + periods=self.createObsPeriods(accumHours,dateStyle,dateType, + fromDay,numDays,dayList) + for per in periods: + (start,end)=per + verList=self.getVerGridInfo(obsModel,start,obsParm,start,end) + if self.isCoverred(start,end,verList): + perList.append((start,end)) + # + # Other parms get verifying periods based on the observed grids + # that actually exist + # + else: + if dateStyleLow=="verifying on": + if dateTypeLow=="period length": + obrecs=self.getObsPeriod(obsModel,obsParm,fromDay,numDays) + else: + obrecs=self.getObsList(obsModel,obsParm,dayList) + else: + if dateTypeLow=="period length": + (yea,mon,day)=fromDay.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((numDays-1)*DAYSECS) + endtime=starttime+(numDays*DAYSECS)+(self.MAXFORECASTHOUR*HOURSECS) + obrecs=self.listRecords(obsModel,obsParm,starttime,endtime,"verify") + else: + obrecs=[] + for date in dayList: + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS)+eoff + starttime=starttime+soff + tmprecs=self.listRecords(obsModel,obsParm,starttime,endtime,"verify") + for obrec in tmprecs: + if obrec not in obrecs: + obrecs.append(obrec) + obrecs.sort(lambda x,y: cmp(self.oncStime[x],self.oncStime[y])) + # + # Add time ranges to list + # + for obrec in obrecs: + start=self.oncStime[obrec] + end=self.oncEtime[obrec] + perList.append((start,end)) + + if self.DEBUG>=10: + for per in perList: + (start,end)=per + (gyea,gmon,gday,ghou,gmin,gsec,gwda,gyda,gdst)=time.gmtime(start) + (eyea,emon,eday,ehou,emin,esec,ewda,eyda,edst)=time.gmtime(end) + self.logMsg(" %4.4d/%2.2d/%2.2d %2.2d --> %4.4d/%2.2d/%2.2d %2.2d"%(gyea,gmon,gday,ghou, + eyea,emon,eday,ehou),10) + self.logMsg("end getVerifyingTimeRanges",10) + return perList + #================================================================= + # createObsPeriods - make list of time periods that are accumHours + # width, and start with accumFreq frequency, + # and cover the time periods requested. Each + # period in the list is (start,end) times. + # time periods of accumHours width... + # coverring the time periods specified by + # dateStyle, dateType, and fromDay/numDays + # or dayList. + # + def createObsPeriods(self,accumHours,accumFreq,dateStyle,dateType, + fromDay,numDays,dayList): + self.logMsg("start createObsPeriods",10) + accumTime=accumHours*HOURSECS + accumFreqSecs=accumFreq*HOURSECS + periods=[] + dateStyleLow=dateStyle.lower() + dateTypeLow=dateType.lower() + if dateStyleLow=="verifying on": + if dateTypeLow=="period length": + endtime=fromDay+DAYSECS + starttime=fromDay-((numDays-1)*DAYSECS) + for pstart in range(starttime,endtime,accumFreqSecs): + pend=pstart+accumTime + if pend<=endtime: + periods.append((pstart,pend)) + else: + dayList.sort() + timeRanges=[] + lastend=0 + for date in dayList: + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+DAYSECS + if starttime!=lastend: + timeRanges.append((starttime,endtime)) + lastend=endtime + else: + (sl,el)=timeRanges[-1] + timeRanges[-1]=(sl,endtime) + lastend=endtime + for timerange in timeRanges: + (starttime,endtime)=timerange + for pstart in range(starttime,endtime,accumFreqSecs): + pend=pstart+accumTime + if pend<=endtime: + periods.append((pstart,pend)) + else: + if dateTypeLow=="period length": + starttime=fromDay-((numDays-1)*DAYSECS) + endtime=starttime+(numDays*DAYSECS)+(self.MAXFORECASTHOUR*HOURSECS) + for pstart in range(starttime,endtime,accumFreqSecs): + pend=pstart+accumTime + if pend<=endtime: + periods.append((pstart,pend)) + else: + dayList.sort() + starts=[] + for date in dayList: + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+(self.MAXFORECASTHOUR*HOURSECS) + for pstart in range(starttime,endtime,accumFreqSecs): + if pstart not in starts: + pend=pstart+accumTime + if pend<=endtime: + periods.append((pstart,pend)) + starts.append(pstart) + self.logMsg("end createObsPeriods",10) + return periods + #================================================================= + # + def createFromPeriods(self,dateType,fromDay,numDays,dayList): + periods=[] + dateTypeLow=dateType.lower() + if dateTypeLow=="period length": + starttime=fromDay-((numDays-1)*DAYSECS) + endtime=fromDay+DAYSECS + periods.append((starttime,endtime)) + else: + for date in dayList: + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+DAYSECS + periods.append((starttime,endtime)) + return periods + #================================================================= + # getFcstrCycleFhr - get logical array of records that have the + # rightFcstr,rightCycle,rightFhr + # + def getFcstrCycleFhr(self,model,fcstrs,cycles,fhrStart,fhrEnd): + self.logMsg("start getFcstCycleFhr",10) + # + # Get logical array of records with right forecaster + # + fcstrList=[] + if ((type(fcstrs) is tuple)or(type(fcstrs) is list)): + for fcstr in fcstrs: + fcstrList.append(fcstr) + else: + fcstrList.append(fcstrs) + + if ((model!="Official")or(-1 in fcstrList)): + rightFcstr=ones(self.fncFcstr.shape[0]) + else: + rightFcstr=zeros(self.fncFcstr.shape[0]) + for fnum in fcstrList: + rightFcstr=logical_or(sometrue(equal(self.fncFcstr[:],fnum),-1),rightFcstr) + # + # Get logical array of records with right cycle + # + cycleList=[] + if ((type(cycles) is tuple)or(type(cycles) is list)): + for cycle in cycles: + if type(cycle) is bytes: + cycleList.append(int(cycle)) + else: + cycleList.append(cycle) + else: + if type(cycles) is bytes: + cycleList.append(int(cycles)) + else: + cycleList.append(cycles) + if (-1 in cycleList): + rightCycle=ones(self.fncBtime.shape, dtype=bool) + else: + rightCycle=zeros(self.fncBtime.shape, dtype=bool) + rem = remainder(self.fncBtime[:], DAYSECS).astype('i') + for cycle in cycleList: + cyc=cycle*HOURSECS + rightCycle[equal(rem,cyc)] = True + # + # get logical array of records with right forecast hours + # + if fhrEnd<0: + fhrEnd=self.MAXFORECASTHOUR + fhr=(self.fncStime[:]-self.fncBtime[:])/float(HOURSECS) + rightFhr=logical_and(greater_equal(fhr,fhrStart),less_equal(fhr,fhrEnd)) + # + # return combined logical array + # + rightRecord=logical_and(logical_and(rightFcstr,rightCycle),rightFhr) + self.logMsg("end getFcstCycleFhr",10) + return rightRecord + #================================================================= + # getStatFcstrCycleFhr - get logical array of statistic records + # that have the rightFcstr,rightCycle, + # rightFhr + # + def getStatFcstrCycleFhr(self,model,fcstrs,cycles,fhrStart,fhrEnd): + self.logMsg("start getStatFcstCycleFhr",10) + # + # Get logical array of records with right forecaster + # + fcstrList=[] + ftype=type(fcstrs) + if ((ftype is tuple)or(ftype is list)): + for fcstr in fcstrs: + fcstrList.append(fcstr) + else: + fcstrList.append(fcstrs) + if ((model!="Official")or(-1 in fcstrList)): + rightFcstr=ones(self.sncFcstr.shape[0], dtype=bool) + else: + rightFcstr=zeros(self.sncFcstr.shape[0], dtype=bool) + for fnum in fcstrList: + rightFcstr[sometrue(equal(self.sncFcstr[:],fnum),-1)] = True + # + # Get logical array of records with right cycle + # + cycleList=[] + ctype=type(cycles) + if ((ctype is tuple)or(ctype is list)): + for cycle in cycles: + if type(cycle) is bytes: + cycleList.append(int(cycle)) + else: + cycleList.append(cycle) + else: + if type(cycles) is bytes: + cycleList.append(int(cycles)) + else: + cycleList.append(cycles) + if (-1 in cycleList): + rightCycle=ones(self.sncBtime.shape, dtype=bool) + else: + rightCycle=zeros(self.sncBtime.shape, dtype=bool) + rem = remainder(self.sncBtime[:], DAYSECS).astype('i') + for cycle in cycleList: + cyc=cycle*HOURSECS + rightCycle[equal(rem,cyc)] = True + # + # get logical array of records with right forecast hours + # + fhr=(self.sncStime[:]-self.sncBtime[:])/float(HOURSECS) + rightFhr=logical_and(greater_equal(fhr,fhrStart),less_equal(fhr,fhrEnd)) + # + # return combined logical array + # + rightRecord=logical_and(logical_and(rightFcstr,rightCycle),rightFhr) + self.logMsg("end getFcstCycleFhr",10) + return rightRecord + #================================================================== + # getRateFlag - given a model name and parm name, return a flag + # with 1 if this is a rateParm. If parms can't be + # found - return 0, just like if a parm isn't a + # rateParm + # + def getRateFlag(self,model,parm): + parmData=self.getParm(model,parm,"SFC") + if parmData is not None: + rateFlag=parmData.getGridInfo().isRateParm() + return rateFlag + else: + return 0 + #================================================================= + # getStatID - given a stat name like "Areal POD", return a + # consistent, lowercase, unique statID used elsewhere + # in the system. Return None if not valid + # + def getStatID(self,statName): + # + # make statName lowercase + # + statname=statName.lower() + # + # Check that the name is somewhere in all the stat names + # + if statname not in self.allStats: + return None + # + # Find the ID that contains this statname + # + statID="" + for testID in self.statIDs: + if statname in self.statNames[testID]: + statID=testID + break + if statID=="": + return None + return statID + #================================================================== + # getVerStat - Main routine to get a statistic from BOIVerify + # for a particular model-run for a particular time + # for a particular area. Tries to get it from the + # stat database if it can - otherwise tries to + # calculate it from the grids + # + # for vectors, if vectorType is: + # -1 Calculate the stat on the magnitude of the + # vector error. + # 0 Calculate the stat on the magnitude error + # 1 Calculate the stat on the direction error + # + def getVerStat(self,model,basetime,parm,trStart,trEnd,obsmodel, + statName,statVal=0,statCond="",editArea=None, + smooth=0,vectorType=-1,forceCalc=0,srecList=None, + grecList=None): + self.logMsg("start getVerStat",10) + retVal=None + # + # Check for stats we know how to calculate + # + statID=self.getStatID(statName) + if statID is None: + self.logMsg("unknown statName:%s"%statName,2) + return retVal + # + # Stuff about the parm + # + rateFlag=self.getRateFlag(model,parm) + verType=self.getVerType(parm) + obsParm=self.getObsParm(parm) + dataType=self.getVerParmType(parm) + # + # if editArea is None - calculate for the whole grid + # + if editArea is None: + editArea = self.encodeEditArea(self.__refSetMgr.fullRefSet()) + # + # if editArea is an array - then it must be a mask of + # points to calculate over + # + if type(editArea) is ndarray: + eaGrid=editArea + # + # If editArea is a string...see if BOIVerify pre-calculates + # statistics for that editArea name + # + elif type(editArea) is StringType: + eas=self.listEditAreas() + if editArea in eas: + eaNum=self.getEditAreaNumberFromName(editArea) + if (forceCalc==0)and(smooth==0): + if dataType==1: + if vectorType==0: + statParm=parm+"Spd" + elif vectorType==1: + statParm=parm+"Dir" + else: + statParm=parm + else: + statParm=parm + retVal=self.readVerStat(model,basetime,statParm,trStart, + trEnd,obsmodel,eaNum, + statID,statVal,vectorType=vectorType, + srecList=srecList) + if retVal is not None: + self.logMsg("got the stat from saved stats",5) + return retVal + else: + self.logMsg("tried to get it from saved stats - but failed",5) + else: + self.logMsg("not a saved edit area",2) + # + # See if the named editArea even exists in the GFE system + # + allEditAreaNames=self.editAreaList() + if editArea not in allEditAreaNames: + self.logMsg("editArea %s does not exist"%editArea,2) + return retVal + eaGrid=self.encodeEditArea(editArea) + else: + self.logMsg("invalid type of editArea provided to getVerStat") + return retVal + # + # OK...We have to calculate the stat over the eaGrid mask + # Make sure that it has at least 1 point. + # + numpts=add.reduce(add.reduce(eaGrid)) + if numpts<1: + self.logMsg("No points specified - so no stats",2) + return retVal + fnum=float(numpts) + # + # If some records were provided...split them out + # + if grecList is not None: + (frecList,orecList)=grecList + else: + frecList=None + orecList=None + # + # Get obs grid + # + if rateFlag==1: + gridMode="Sum" + else: + gridMode="TimeWtAverage" + obsGrid=self.getVerGrids(obsmodel,trStart,obsParm,trStart, + trEnd,mode=gridMode,recList=orecList) + if obsGrid is None: + self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) + return retVal + if (dataType==1): + (mag,direc)=obsGrid + if mag is None: + self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) + return retVal + # + # get Fcst grid + # + if verType==1: + gridMode="Max" + else: + if rateFlag==1: + gridMode="Sum" + else: + gridMode="TimeWtAverage" + fcstGrid=self.getVerGrids(model,basetime,parm,trStart,trEnd, + mode=gridMode,recList=frecList) + if fcstGrid is None: + self.logMsg("could not read %s grid for %s"%(model,parm),2) + return retVal + if (dataType==1): + (mag,direc)=fcstGrid + if mag is None: + self.logMsg("could not read observed %s grid for %s"%(model,parm),2) + return retVal + # + # Basic point stats + # + if statID in ["bias","mae","rms","mse","peb"]: + # + # Handle various types of vector errors + # + vectorErr=0 + if dataType==1: + (omag,odirec)=obsGrid + (fmag,fdirec)=fcstGrid + if vectorType==0: + obsGrid=omag + fcstGrid=fmag + elif vectorType==1: + obsGrid=odirec + fcstGrid=fdirec + err=fcstGrid-obsGrid + err=where(greater(err,180.0),360.0-err,err) + err=where(less(err,-180.0),-(360.0+err),err) + vectorErr=1 + else: + (ou,ov)=self.MagDirToUV(omag,odirec) + (fu,fv)=self.MagDirToUV(fmag,fdirec) + (mag,direc)=self.UVToMagDir(fu-ou,fv-ov) + err=mag + vectorErr=1 + # + # If smoothing is on...smooth obs and fcst grids + # inside the edit area... + # but for direction errors, and magnitude of + # vector errors (vectorErr==1)...smooth the + # errors, not the obs/fcst grids. + # + if smooth>0: + ismooth=int(smooth) + if vectorErr==0: + obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) + fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) + else: + err=self.smoothpm(err,ismooth,mask=eaGrid) + # + # For probability parms - need to calculate the obs grid + # based on the observed parameter + # + if verType==1: + cond=self.getObsCondition(parm) + thresh=self.getObsThreshold(parm) + if cond==">": + obsGrid=greater(obsGrid,thresh)*100 + elif cond==">=": + obsGrid=greater_equal(obsGrid,thresh)*100 + elif cond=="<": + obsGrid=less(obsGrid,thresh)*100 + elif cond=="<=": + obsGrid=less_equal(obsGrid,thres)*100 + # + # get the error - but vector err magnitude has already + # been done...so don't do that... + # + if vectorErr==0: + err=where(eaGrid,fcstGrid-obsGrid,float32(0)) + else: + err=where(eaGrid,err,float32(0)) + # + # Now all the stat calculations + # + if statID=="bias": + retVal=add.reduce(add.reduce(err))/fnum + return retVal + if statID=="mae": + err=where(less(err,0.0),-err,err) + retVal=add.reduce(add.reduce(err))/fnum + return retVal + if statID=="rms": + err=err*err + retVal=sqrt(add.reduce(add.reduce(err))/fnum) + return retVal + if statID=="mse": + err=err*err + retVal=add.reduce(add.reduce(err))/fnum + return retVal + if statID=="peb": + err=where(less(err,0.0),-err,err) + good=logical_and(less(err,statVal),eaGrid) + retVal=add.reduce(add.reduce(good))/fnum + return retVal + elif statID in ["fc","afc","freqo","freqf","freqbias","afreqbias","pod","apod","far","afar", + "pofd","apofd","ts","ats","ets","aets","hk","ahk", + "hss","ahss","oddsratio","aoddsratio","hits","ahits", + "miss","amiss","fals","afals","corn","acorn","cont","acont"]: + # + # threshold for vectors is with magnitude + # + if dataType==1: + (omag,odirec)=obsGrid + (fmag,fdirec)=fcstGrid + obsGrid=omag + fcstGrid=fmag + # + # If smoothing is on...smooth obs and fcst grids + # inside the edit area + # + if statName[0:1]!="a": + if smooth>0: + ismooth=int(smooth) + obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) + fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) + # + # Get grids of yes/no forecast/occurrence + # + if statCond==">": + obsOccur=greater(obsGrid,statVal) + fcstOccur=greater(fcstGrid,statVal) + elif statCond==">=": + obsOccur=greater_equal(obsGrid,statVal) + fcstOccur=greater_equal(fcstGrid,statVal) + elif statCond=="<": + obsOccur=less(obsGrid,statVal) + fcstOccur=less(fcstGrid,statVal) + elif statCond=="<=": + obsOccur=less_equal(obsGrid,statVal) + fcstOccur=less_equal(fcstGrid,statVal) + # + # do neighborhood look here + # + if statName[0:1]=="a": + if smooth>0: + ismooth=int(smooth) + obsOccur=self.arealOccur(obsOccur,ismooth,mask=eaGrid) + fcstOccur=self.arealOccur(fcstOccur,ismooth,mask=eaGrid) + # + # Calculate hits/misses/falsealarms/correctnegatives + # + notFcst=logical_not(fcstOccur) + notObs=logical_not(obsOccur) + hits=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,obsOccur))) + miss=count_nonzero(logical_and(eaGrid,logical_and(notFcst,obsOccur))) + falr=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,notObs))) + corn=count_nonzero(logical_and(eaGrid,logical_and(notFcst,notObs))) + total=hits+miss+falr+corn + if abs(float(total)-fnum)>0.5: + self.logMsg("Number in binary histogram not the same as number of points") + return 0.0 + # + # Get the Binary stat and return it + # + ret=self.getBinaryStat(statID,hits,miss,falr,corn) + return ret + else: + self.logMsg("Have not yet implemented stat:%s"%statName,0) + return retVal + #================================================================== + # getBinaryStat - given values of hits/miss/falr/corn, and a + # correct statID (it better be right!) - to the + # calculations and return a value. In cases where + # no forecasts have been made - return the perfect + # score! + # + def getBinaryStat(self,statID,hits,miss,falr,corn): + total=hits+miss+falr+corn + if statID in ["hits","ahits"]: + return hits + if statID in ["miss","amiss"]: + return miss + if statID in ["fals","afals"]: + return falr + if statID in ["corn","acorn"]: + return corn + if statID in ["cont","acont"]: + return (hits,miss,falr,corn) + if statID in ["fc","afc"]: + if total<1: + return 1.0 + return float(hits+corn)/float(total) + if statID in ["freqo",]: + if total<1: + return 1.0 + return float(hits+miss)/float(total) + if statID in ["freqf",]: + if total<1: + return 1.0 + return float(hits+falr)/float(total) + if statID in ["freqbias","afreqbias"]: + denom=hits+miss + if denom<1: + return 1.0 + return float(hits+falr)/float(denom) + if statID in ["pod","apod"]: + denom=hits+miss + if denom<1: + return 1.0 + return float(hits)/float(denom) + if statID in ["far","afar"]: + denom=falr+hits + if denom<1: + return 0.0 + return float(falr)/float(denom) + if statID in ["pofd","apofd"]: + denom=falr+corn + if denom<1: + return 0.0 + return float(falr)/float(denom) + if statID in ["ts","ats"]: + denom=hits+miss+falr + if denom<1: + return 1.0 + return float(hits)/float(denom) + if statID in ["ets","aets"]: + hitsrand=float((hits+miss)*(hits+falr))/float(total) + denom=hits+miss+falr-hitsrand + if ((denom>-0.1)and(denom<0.1)): + return 1.0 + return float(hits-hitsrand)/float(denom) + if statID in ["hk","ahk"]: + denom=falr+corn + if denom<1: + pofd=0.0 + else: + pofd=float(falr)/float(denom) + denom=hits+miss + if denom<1: + pod=1.0 + else: + pod=float(hits)/float(denom) + return pod-pofd + if statID in ["hss","ahss"]: + ecrand=float(((hits+miss)*(hits+falr))+((corn+miss)*(corn+falr)))/float(total) + denom=float(total)-ecrand + if ((denom>-0.1)and(denom<0.1)): + return 1.0 + return float(hits+corn-ecrand)/float(denom) + if statID in ["oddsratio","aoddsratio"]: + if ((hits==0)or(corn==0)or(falr==0)or(miss==0)): + return 200.0 + return float(hits*corn)/float(falr*miss) + return None + #================================================================== + # getGridBinaryStat - given grids of hits/miss/falr/corn, and a + # correct statID (it better be right!) - do + # the calculations and return a grid of results. + # In cases where no forecasts have been made - + # return the perfect score! + # + def getGridBinaryStat(self,statID,hits,miss,falr,corn): + total=hits+miss+falr+corn + if statID in ["hits","ahits"]: + return hits/1.0 + if statID in ["miss","amiss"]: + return miss/1.0 + if statID in ["fals","afals"]: + return falr/1.0 + if statID in ["corn","acorn"]: + return corn/1.0 + if statID in ["fc","afc"]: + nofcst=less(total,1) + total[nofcst] = 1 + score=(hits+corn)/total + score[nofcst] = 1.0 + return score + if statID in ["freqo",]: + nofcst=less(total,1) + total[nofcst] = 1 + score=(hits+miss)/total + score[nofcst] = 0.0 + return score + if statID in ["freqf",]: + nofcst=less(total,1) + total[nofcst] = 1 + score=(hits+falr)/total + score[nofcst] = 0.0 + return score + if statID in ["freqbias","afreqbias"]: + denom=hits+miss + nofcst=less(denom,1) + denom[nofcst] = 1 + score=(hits+falr)/denom + score[nofcst] = 1.0 + return score + if statID in ["pod","apod"]: + denom=hits+miss + nofcst=less(denom,1) + denom[nofcst] = 1 + score=hits/denom + score[nofcst] = 1.0 + return score + if statID in ["far","afar"]: + denom=falr+hits + nofcst=less(denom,1) + denom[nofcst] = 1 + score=falr/denom + score[nofcst] = 0.0 + return score + if statID in ["pofd","apofd"]: + denom=falr+corn + nofcst=less(denom,1) + denom[nofcst] = 1 + score=falr/denom + score[nofcst] = 0.0 + return score + if statID in ["ts","ats"]: + denom=hits+miss+falr + nofcst=less(denom,1) + denom[nofcst] = 1 + score=hits/denom + score[nofcst] = 1.0 + return score + if statID in ["ets","aets"]: + total[less(total,1)] = 1 + hitsrand=((hits+miss)*(hits+falr))/total + denom=hits+miss+falr-hitsrand + nofcst=logical_and(greater(denom,-0.1),less(denom,0.1)) + denom[nofcst] = 1 + score=(hits-hitsrand)/denom + score[nofcst] = 1.0 + return score + if statID in ["hk","ahk"]: + #pofd + denom=falr+corn + nofcst=less(denom,1) + denom[nofcst] = 1 + pofd=falr/denom + pofd[nofcst] = 0.0 + #pod + denom=hits+miss + nofcst=less(denom,1) + denom[nofcst] = 1 + pod=hits/denom + pod[nofcst] = 1.0 + score=pod-pofd + return score + if statID in ["hss","ahss"]: + total[less(total,1)] = 1 + ecrand=(((hits+miss)*(hits+falr))+((corn+miss)*(corn+falr)))/total + denom=total-ecrand + nofcst=logical_and(greater(denom,-0.1),less(denom,0.1)) + denom[nofcst] = 1 + score=(hits+corn-ecrand)/denom + score[nofcst] = 1.0 + return score + if statID in ["oddsratio","aoddsratio"]: + no1=logical_or(less(hits,0.5),less(corn,0.5)) + no2=logical_or(less(falr,0.5),less(miss,0.5)) + nofcst=logical_or(no1,no2) + denom=falr*miss + denom[less(denom,1)] = 1 + score=(hits*corn)/denom + score[nofcst] = 200.0 + return score + #================================================================== + # getVerStatScales - Main routine to get a calculate a statistic + # for many scales. Same as getVerStat except + # that smooth is provided as a list of smooth + # numbers. This is always calculated - never + # read from stat files. + # + def getVerStatScales(self,model,basetime,parm,trStart,trEnd,obsmodel, + statName,statVal=0,statCond="",editArea=None, + smoothList=[0,],vectorType=-1, + grecList=None): + self.logMsg("start getVerStatScales",10) + retVal=[] + # + # Check for stats we know how to calculate + # + statID=self.getStatID(statName) + if statID is None: + self.logMsg("unknown statName:%s"%statName,2) + return retVal + # + # Stuff about the parm + # + rateFlag=self.getRateFlag(model,parm) + verType=self.getVerType(parm) + obsParm=self.getObsParm(parm) + dataType=self.getVerParmType(parm) + # + # check if editArea is one that we routinely calculate + # + if editArea is None: + editArea = self.encodeEditArea(self.__refSetMgr.fullRefSet()) + + if type(editArea) is ndarray: + eaGrid=editArea + elif type(editArea) is StringType: + # + # get list of editAreas from smartScript + # + ealist=self.editAreaList() + if editArea not in ealist: + self.logMsg("editArea %s does not exist"%editArea,2) + return retVal + eaGrid=self.encodeEditArea(editArea) + else: + self.logMsg("invalid type of editArea provided to getVerStatScales") + return retVal + # + # OK...We have to calculate the stat from the grids + # + numpts=add.reduce(add.reduce(eaGrid)) + if numpts<1: + self.logMsg("No points specified - so no stats",2) + return retVal + fnum=float(numpts) + # + # If some records were provided...split them out + # + if grecList is not None: + (frecList,orecList)=grecList + else: + frecList=None + orecList=None + # + # Get obs grid + # + if rateFlag==1: + gridMode="Sum" + else: + gridMode="TimeWtAverage" + obsGrid=self.getVerGrids(obsmodel,trStart,obsParm,trStart, + trEnd,mode=gridMode,recList=orecList) + if obsGrid is None: + self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) + return retVal + if (dataType==1): + (mag,direc)=obsGrid + if mag is None: + self.logMsg("could not read observed %s grid for %s"%(obsmodel,obsParm),2) + return retVal + # + # get Fcst grid + # + if verType==1: + gridMode="Max" + else: + if rateFlag==1: + gridMode="Sum" + else: + gridMode="TimeWtAverage" + fcstGrid=self.getVerGrids(model,basetime,parm,trStart,trEnd, + mode=gridMode,recList=frecList) + if fcstGrid is None: + self.logMsg("could not read %s grid for %s"%(model,parm),2) + return retVal + if (dataType==1): + (mag,direc)=fcstGrid + if mag is None: + self.logMsg("could not read observed %s grid for %s"%(model,parm),2) + return retVal + # + # Loop over scales + # + for smooth in smoothList: + self.logMsg("smooth=%d"%smooth) + # + # Basic point stats + # + if statID in ["bias","mae","rms","mse","peb"]: + # + # Handle various types of vector errors + # + vectorErr=0 + if dataType==1: + (omag,odirec)=obsGrid + (fmag,fdirec)=fcstGrid + if vectorType==0: + obsGrid=omag + fcstGrid=fmag + elif vectorType==1: + obsGrid=odirec + fcstGrid=fdirec + err=fcstGrid-obsGrid + err=where(greater(err,180.0),360.0-err,err) + err=where(less(err,-180.0),-(360.0+err),err) + vectorErr=1 + else: + (ou,ov)=self.MagDirToUV(omag,odirec) + (fu,fv)=self.MagDirToUV(fmag,fdirec) + (mag,direc)=self.UVToMagDir(fu-ou,fv-ov) + err=mag + vectorErr=1 + # + # If smoothing is on...smooth obs and fcst grids + # inside the edit area... + # but for direction errors, and magnitude of + # vector errors (vectorErr==1)...smooth the + # errors, not the obs/fcst grids. + # + if smooth>0: + ismooth=int(smooth) + if vectorErr==0: + obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) + fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) + else: + err=self.smoothpm(err,ismooth,mask=eaGrid) + # + # For probability parms - need to calculate the obs grid + # based on the observed parameter + # + if verType==1: + cond=self.getObsCondition(parm) + thresh=self.getObsThreshold(parm) + if cond==">": + obsGrid=greater(obsGrid,thresh)*100 + elif cond==">=": + obsGrid=greater_equal(obsGrid,thresh)*100 + elif cond=="<": + obsGrid=less(obsGrid,thresh)*100 + elif cond=="<=": + obsGrid=less_equal(obsGrid,thres)*100 + # + # get the error - but vector err magnitude has already + # been done...so don't do that... + # + if vectorErr==0: + err=where(eaGrid,fcstGrid-obsGrid,float32(0)) + else: + err=where(eaGrid,err,float32(0)) + # + # Now all the stat calculations + # + if statID=="bias": + retVal.append(add.reduce(add.reduce(err))/fnum) + elif statID=="mae": + err=where(less(err,0.0),-err,err) + retVal.append(add.reduce(add.reduce(err))/fnum) + elif statID=="rms": + err=err*err + retVal.append(sqrt(add.reduce(add.reduce(err))/fnum)) + if statID=="mse": + err=err*err + retVal.append(add.reduce(add.reduce(err))/fnum) + if statID=="peb": + err=where(less(err,0.0),-err,err) + good=logical_and(less(err,statVal),eaGrid) + retVal.append(add.reduce(add.reduce(good))/fnum) + elif statID in ["fc","afc","freqo","freqf","freqbias","afreqbias","pod","apod","far","afar", + "pofd","apofd","ts","ats","ets","aets","hk","ahk", + "hss","ahss","oddsratio","aoddsratio","hits","ahits", + "miss","amiss","fals","afals","corn","acorn","cont","acont"]: + # + # threshold for vectors is with magnitude + # + if dataType==1: + (omag,odirec)=obsGrid + (fmag,fdirec)=fcstGrid + obsGrid=omag + fcstGrid=fmag + # + # If smoothing is on...smooth obs and fcst grids + # inside the edit area + # + if statName[0:1]!="a": + if smooth>0: + ismooth=int(smooth) + obsGrid=self.smoothpm(obsGrid,ismooth,mask=eaGrid) + fcstGrid=self.smoothpm(fcstGrid,ismooth,mask=eaGrid) + # + # Get grids of yes/no forecast/occurrence + # + if statCond==">": + obsOccur=greater(obsGrid,statVal) + fcstOccur=greater(fcstGrid,statVal) + elif statCond==">=": + obsOccur=greater_equal(obsGrid,statVal) + fcstOccur=greater_equal(fcstGrid,statVal) + elif statCond=="<": + obsOccur=less(obsGrid,statVal) + fcstOccur=less(fcstGrid,statVal) + elif statCond=="<=": + obsOccur=less_equal(obsGrid,statVal) + fcstOccur=less_equal(fcstGrid,statVal) + # + # do neighborhood look here + # + if statName[0:1]=="a": + if smooth>0: + ismooth=int(smooth) + obsOccur=self.arealOccur(obsOccur,ismooth,mask=eaGrid) + fcstOccur=self.arealOccur(fcstOccur,ismooth,mask=eaGrid) + # + # Calculate hits/misses/falsealarms/correctnegatives + # + notFcst=logical_not(fcstOccur) + notObs=logical_not(obsOccur) + hits=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,obsOccur))) + miss=count_nonzero(logical_and(eaGrid,logical_and(notFcst,obsOccur))) + falr=count_nonzero(logical_and(eaGrid,logical_and(fcstOccur,notObs))) + corn=count_nonzero(logical_and(eaGrid,logical_and(notFcst,notObs))) + total=hits+miss+falr+corn + if abs(float(total)-fnum)>0.5: + self.logMsg("Number in binary histogram not the same as number of points") + return 0.0 + # + # Get the Binary stat and return it + # + ret=self.getBinaryStat(statID,hits,miss,falr,corn) + retVal.append(ret) + else: + self.logMsg("Have not yet implemented stat:%s"%statName,0) + return retVal + #================================================================ + # readVerStat - + # + def readVerStat(self,model,basetime,element,trStart,trEnd, + obsmodel,eaNum,statName,statVal,vectorType=-1, + srecList=None): + self.logMsg("start readVerStat",10) + retVal=None + # + # Check for stats calculated by AutoCalc + # + if statName not in ["error","err","bias","absolute error", + "abs error","mae","root mean squared error", + "rms error","rms","mean squared error","mse", + "brier","peb","percent error below","percent err below", + "% error below","% err below"]: + return retVal + # + # Make sure file can be openned + # + dataType=self.getVerParmType(element) + parm=element + if dataType==1: + if vectorType==0: + parm=element+"Spd" + elif vectorType==0: + parm=element+"Dir" + if not self.checkStats(parm,model,obsmodel): + self.logMsg("Could not open stat file for %s using %s observations"%(model,obsmodel),2) + return retVal + # + # make sure any threshold stats use one of the thresholds + # we routinely calculate + # + if statName in ["peb","percent error below","percent err below", + "% error below","% err below"]: + threshNum=-1 + thresholds=self.getVerThresholds(element) + if dataType==1: + (threshMag,threshDir)=thresholds + if ((vectorType==-1)or(vectorType==0)): + thresholds=threshMag + else: + thresholds=threshDir + for i in range(len(thresholds)): + check=thresholds[i] + if statVal==check: + threshNum=i + break + if threshNum==-1: + self.logMsg("Not one of the normal thresholds",2) + return retVal + # + # Find the records - the most time costly part + # + if srecList is None: + self.logMsg("finding appropriate records",10) + recbase=equal(self.sncBtime[:],basetime) + recfit=logical_and(greater(self.sncEtime[:],trStart), + less(self.sncStime[:],trEnd)) + recmatch=logical_and(recbase,recfit) + recnumbers=compress(recmatch,self.sncRecs) + recnumberList=list(recnumbers) + self.logMsg("done finding appropriate records",10) + else: + self.logMsg("used provided records",10) + recnumberList=srecList + if len(recnumberList)<1: + return retVal + if len(recnumberList)>1: + recnumberList.sort(lambda x,y: cmp(self.sncStime[x],self.sncStime[y])) + # + # Read the needed stats + # + retVal=0.0 + count=0 + for rec in recnumberList: + stats=self.sncStats[rec,eaNum,:] + if statName in ["error","err","bias"]: + retVal=retVal+stats[0] + elif statName in ["absolute error","abs error","mae"]: + retVal=retVal+stats[2] + elif statName in ["root mean squared error","rms error","rms", + "mean squared error","mse","brier"]: + retVal=retVal+stats[1] + elif statName in ["peb","percent error below","percent err below", + "% error below","% err below"]: + retVal=retVal+stats[8+threshNum] + # + # If we had to read multiple records...then average over those + # + if len(recnumberList)>1: + retVal=retVal/float(len(recnumberList)) + if statName in ["root mean squared error","rms error","rms"]: + retVal=sqrt(retVal) + # + # ????? still need work here. Need to multiply by number of cases + # Need to figure out if the percent stats are right...etc. + # + self.logMsg("end readVerStat",10) + return retVal + #================================================================== + # getVerGridInfo - Similar to getGridInfo of SmartScript...but read + # from the verification archive. Difference is + # that here you must specify the model and the + # BASETIME of the model, rather than just asking + # for the latest version etc. It returns a list + # of tuples with info on each grid that intersects + # the time period. The tuple contains + # (recnum,starttime,endtime) and is sorted by + # the starttime. + # + def getVerGridInfo(self,model,basetime,element,stime,etime): + self.logMsg("getVerGridInfo Start",10) + # + # get parm type (scalar/vector) and set default return values + # + dataType=self.getVerParmType(element) + retVal=[] + # + # Make sure file for parm/model exists + # + if not self.checkFile(element,model): + self.logMsg("Could not open file",5) + return retVal + # + # Get list of records that intersect the timeRange + # + self.logMsg("finding grids that intersect",10) + if model not in self.OBSMODELS: + recbase=equal(self.fncBtime[:],basetime) + recfit=logical_and(greater(self.fncEtime[:],stime), + less(self.fncStime[:],etime)) + recmatch=logical_and(recbase,recfit) + recnumbers=compress(recmatch,self.fncRecs) + recnumberList=list(recnumbers) + recnumberList.sort(lambda x,y: cmp(self.fncStime[x],self.fncStime[y])) + for recnum in recnumberList: + retVal.append((recnum,self.fncStime[recnum],self.fncEtime[recnum])) + else: + recmatch=logical_and(greater(self.oncEtime[:],stime), + less(self.oncStime[:],etime)) + recnumbers=compress(recmatch,self.oncRecs) + recnumberList=list(recnumbers) + recnumberList.sort(lambda x,y: cmp(self.oncStime[x],self.oncStime[y])) + for recnum in recnumberList: + retVal.append((recnum,self.oncStime[recnum],self.oncEtime[recnum])) + # + # + #for ret in retVal: + # (rec,st,en)=ret + # self.logMsg(" (%d,%d,%d)"%(rec,st,en),10) + self.logMsg("getVerGridInfo - End",10) + return retVal + #================================================================== + # isCoverred(start,end,infoList) - given a start/end time and a list + # of getVerGridInfo about grids in this time period - return + # 1 or 0 if the start/end period is completely coverred. + # + def isCoverred(self,start,end,infoList): + self.logMsg("isCoverred",10) + if len(infoList)<1: + return 0 + total=end-start + totcov=0 + for info in infoList: + (rec,recstart,recend)=info + totcov=totcov+min(end-recstart,recend-start) + if totcov>=total: + return 1 + return 0 + #================================================================== + # getReadMode - figure out if parm is a rateParm...and set mode + # to "Sum" if it is. + # If not...and checkProb is set...figure out if the + # parm is a probability parm and set mode to + # "Max" if it is (floating PoP). + # Otherwise set to "Average" + # + def getReadMode(self,model,parmName,checkProb=1): + rateFlag=self.getRateFlag(model,parmName) + if (rateFlag==1): + readMode="Sum" + else: + readMode="TimeWtAverage" + if checkProb==1: + verType=self.getVerType(parmName) + if verType is not None: + if verType==1: + readMode="Max" + return readMode + #================================================================== + # getVerGrids - Similar to getGrids of SmartScript...but read from + # the verification archive. Difference is that here + # you must specify the model and the BASETIME of the + # model, rather than just asking for the latest + # version etc. There are other routines to help you + # figure out the basetime + # + # mode=TimeWtAverage + # Average + # Max + # Min + # Sum + # First + # List + # + # normally stime and etime define the time period + # for which you want grids. However, if recList + # is not None - then we assume that recList has + # a list of record numbers that span the desired + # time period - and we don't search for records + # that fit the stime/etime period. This saves + # considerable time - and the records are often + # known ahead of time from other routines. + # + # Note grids are flipped vertically to AWIPS II ordering. + # + def getVerGrids(self,model,basetime,element,stime,etime, + mode="TimeWtAverage",recList=None): + self.logMsg("getVerGrids Start",10) + # + # get parm type (scalar/vector) and set default return values + # + dataType=self.getVerParmType(element) + if mode=="List": + retVal=[] + else: + if dataType==0: + retVal=None + else: + retVal=(None,None) + # + # Make sure file for parm/model exists + # + if not self.checkFile(element,model): + self.logMsg("Could not open file",5) + return retVal + # + rateFlag=self.getRateFlag(model,element) + # + # Get list of records that intersect the timeRange + # + if recList is None: + self.logMsg("finding grids that intersect",10) + if model not in self.OBSMODELS: + recbase=equal(self.fncBtime[:],basetime) + recfit=logical_and(greater(self.fncEtime[:],stime), + less(self.fncStime[:],etime)) + recmatch=logical_and(recbase,recfit) + recnumbers=compress(recmatch,self.fncRecs) + recList=list(recnumbers) + recList.sort(lambda x,y: cmp(self.fncStime[int(x)],self.fncStime[int(y)])) + else: + recmatch=logical_and(greater(self.oncEtime[:],stime), + less(self.oncStime[:],etime)) + recnumbers=compress(recmatch,self.oncRecs) + recList=list(recnumbers) + recList.sort(lambda x,y: cmp(self.oncStime[int(x)],self.oncStime[int(y)])) + self.logMsg("number of intersecting grids:%d"%len(recList),10) + if len(recList)<1: + return retVal + # + # Loop over grids + # + totalWeights=0 + gridtot=self.empty() + utot=self.empty() + vtot=self.empty() + for rec in recList: + rec = int(rec) + self.logMsg("reading grid",5) + # + # get total hours in grid, and amount of grid that intersects + # time range + # + if model not in self.OBSMODELS: + gstime=self.fncStime[rec] + getime=self.fncEtime[rec] + else: + gstime=self.oncStime[rec] + getime=self.oncEtime[rec] + gridHours=float(getime-gstime)/float(HOURSECS) + intersectHours=float(min(etime-gstime,getime-stime,getime-gstime))/float(HOURSECS) + if dataType!=1: + if model not in self.OBSMODELS: + grid=(self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] + else: + grid=(self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] + + # flip to AWIPS II order + grid = flipud(grid) + + # + # If a rateParm - chop grid to only the piece being used + # + if rateFlag==1: + grid=grid*(float(intersectHours)/float(gridHours)) + # + # + # + if mode in ["TimeWtAverage","Average","Sum"]: + if len(recList)>1: + weight=1.0 + if mode=="TimeWtAverage": + weight=intersectHours + gridtot+=(grid*weight) + totalWeights+=weight + else: + retVal=grid + elif mode=="Max": + if retVal is None: + retVal=grid + else: + retVal=maximum(retVal,grid) + elif mode=="Min": + if retVal is None: + retVal=grid + else: + retVal=minimum(retVal,grid) + elif mode=="First": + if retVal is None: + retVal=grid + elif mode=="List": + retVal.append(grid) + else: + if model not in self.OBSMODELS: + mag= (self.fncValue[rec].astype(float)*self.fncScale[rec])+self.fncAddit[rec] + direc=(self.fncValue1[rec].astype(float)*self.fncScale1[rec])+self.fncAddit1[rec] + else: + mag= (self.oncValue[rec].astype(float)*self.oncScale[rec])+self.oncAddit[rec] + direc=(self.oncValue1[rec].astype(float)*self.oncScale1[rec])+self.oncAddit1[rec] + + # flip to AWIPS II order + mag = flipud(mag) + direc = flipud(direc) + + if mode in ["TimeWtAverage","Average","Sum"]: + if len(recList)>1: + (u,v)=self.MagDirToUV(mag,direc) + weight=1.0 + if mode=="TimeWtAverage": + weight=intersectHours + utot+=(u*weight) + vtot+=(v*weight) + totalWeights=totalWeights+weight + else: + retVal=(mag,direc) + elif mode=="Max": + if retVal[0] is None: + retVal=(mag,direc) + else: + newdir=where(greater(mag,retVal[0]),direc,retVal[1]) + newmag=maximum(retVal[0],mag) + retVal=(newmag,newdir) + elif mode=="Min": + if retVal[0] is None: + retVal=(mag,direc) + else: + newdir=where(less(mag,retVal[0]),direc,retVal[1]) + newmag=minimum(retVal[0],mag) + retVal=(newmag,newdir) + elif mode=="First": + if retVal[0] is None: + retVal=(mag,direc) + elif mode=="List": + retVal.append((mag,direc)) + # + # When we had averages/sums and were adding up... + # + if len(recList)>1: + if mode in ["TimeWtAverage","Average","Sum"]: + if dataType!=1: + retVal=gridtot + if mode in ["TimeWtAverage","Average"]: + retVal=retVal/totalWeights + else: + if mode in ["TimeWtAverage","Average"]: + utot=utot/totalWeights + vtot=vtot/totalWeights + (mag,direc)=self.UVToMagDir(utot,vtot) + retVal=(mag,direc) + # + # + # + self.logMsg("getVerGrids - End",10) + return retVal + #================================================================== + # getObsPeriod - get list of observed records within a period of + # Ndays (integer) ending on endDay + # + def getObsPeriod(self,model,parm,endDay,Ndays,mask=None): + obrecs=[] + eoff=self.getEndOffset(parm) + soff=self.getStartOffset(parm) + if type(endDay) is bytes: + try: + (yea,mon,day)=endDay.split("/") + endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((Ndays-1)*DAYSECS)+soff + except: + return obrecs + else: + endtime=endDay+DAYSECS-1+eoff + starttime=endDay-((Ndays-1)*DAYSECS)+soff + obrecs=self.listRecords(parm,model,starttime,endtime,"verify",mask) + return obrecs + #================================================================== + # getObsStatPeriod - get list of observed records within a period of + # Ndays (integer) ending on endDay + # + def getObsStatPeriod(self,model,parm,obsmodel,endDay,Ndays,mask=None): + obrecs=[] + eoff=self.getEndOffset(parm) + soff=self.getStartOffset(parm) + if type(endDay) is bytes: + try: + (yea,mon,day)=endDay.split("/") + endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))-((Ndays-1)*DAYSECS)+soff + except: + return obrecs + else: + endtime=endDay+DAYSECS-1+eoff + starttime=endDay-((Ndays-1)*DAYSECS)+soff + obrecs=self.listStatRecords(parm,model,obsmodel,starttime,endtime,"verify",mask) + return obrecs + #================================================================== + # getEndOffset - gets the END_OFFSET_HOURS for a parm (multiplied + # by HOURSECS so that the value returned is in seconds. If no + # END_OFFSET_HOURS is specified for the parm, then it returns + # zero. + # + def getEndOffset(self,parm): + eoff=0 + keys=list(self.END_OFFSET_HOURS.keys()) + if parm in keys: + eoff=self.END_OFFSET_HOURS[parm]*HOURSECS + return eoff + #================================================================== + # getStartOffset - gets the START_OFFSET_HOURS for a parm + # (multiplied by HOURSECS so that the value returned is in + # seconds. If no START_OFFSET_HOURS is specified for the + # parm, then it returns zero. + # + def getStartOffset(self,parm): + soff=0 + keys=list(self.START_OFFSET_HOURS.keys()) + if parm in keys: + soff=self.START_OFFSET_HOURS[parm]*HOURSECS + return soff + #================================================================== + # getObsList - get list of observed records within days listed in + # dayList (each day is "year/mon/day") with eoff and soff + # (seconds) added to the end and beginning respectively + # + def getObsList(self,model,parm,dayList,mask=None,callbackMethod=None): + obrecs=[] + eoff=self.getEndOffset(parm) + soff=self.getStartOffset(parm) + count=0 + totalcount=len(dayList) + for date in dayList: + count+=1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,totalcount)) + if exit==1: + return obrecs + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))+soff + except: + continue + else: + starttime=date+soff + endtime=date+DAYSECS-1+eoff + recs=self.listRecords(parm,model,starttime,endtime,"verify",mask) + for rec in recs: + if rec not in obrecs: + obrecs.append(rec) + return obrecs + #================================================================== + # getObsStatList - get list of observed records within days listed in + # dayList (each day is "year/mon/day") with eoff and soff + # (seconds) added to the end and beginning respectively + # + def getObsStatList(self,model,parm,obsmodel,dayList,mask=None,callbackMethod=None): + obrecs=[] + eoff=self.getEndOffset(parm) + soff=self.getStartOffset(parm) + count=0 + totalcount=len(dayList) + for date in dayList: + count+=1 + if callbackMethod is not None: + exit=callbackMethod("%d of %d"%(count,totalcount)) + if exit==1: + return obrecs + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + endtime=calendar.timegm((int(yea),int(mon),int(day),23,59,59,0,0,-1))+eoff + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,-1))+soff + except: + continue + else: + starttime=date+soff + endtime=date+DAYSECS-1+eoff + recs=self.listStatRecords(parm,model,obsmodel,starttime,endtime,"verify",mask) + for rec in recs: + if rec not in obrecs: + obrecs.append(rec) + return obrecs + #================================================================== + # getForecasters - given a model, element, and records - open the + # correct grid file and get a list of forecaster + # numbers for that record. Will return an empty + # list if there are problems opening the file + # + def getForecasters(self,model,element,rec): + if not self.checkFile(element,model): + self.logMsg("Could not open file",5) + return [] + retVal=self.getRecFcstrs(rec) + return retVal + #================================================================== + # getRecFcstrs - given a record, get a list of forecaster numbers + # from the currently open forecast grid file + # + def getRecFcstrs(self,rec): + return list(self.fncFcstr[rec,:]) + #================================================================== + # trimFcstRecs - get rid of forecast records that do not match + # the cycle and forecaster lists + # + def trimFcstRecs(self,fcstrecs,model,cycles,fcstrIDlist,fhrStart=0, + fhrEnd=0): + fcstTrimmed=[] + # + # Get cycleList with integer hours + # + maxCycles=len(self.getCFG('ALLCYCLES')) + cycleList=[] + if ((type(cycles) is tuple)or(type(cycles) is list)): + for cycle in cycles: + if type(cycle) is bytes: + cycleList.append(int(cycle)) + else: + cycleList.append(cycle) + else: + if type(cycles) is bytes: + cycleList.append(int(cycles)) + else: + cycleList.append(cycles) + for fcstrec in fcstrecs: + # + # skip forecasts from wrong cycle + # + if -1 not in cycleList: + btime=self.fncBtime[fcstrec] + basetuple=time.gmtime(btime) + if basetuple[3] not in cycleList: + continue + # + # skip forecasts for wrong forecaster + # + if "ALL" not in fcstrlist: + if model=="Official": + foundfcstr=0 + for j in range(self.MAXFCSTRS): + fnum=int(self.fncFcstr[fcstrec,j]) + fnumstr="%2.2d"%fnum + fid=self.FcstrIDs[fnumstr] + if fid in fcstrIDlist: + foundfcstr=1 + if foundfcstr==0: + continue + # + # skip forecasts outside of matching fhrs + # + fhr=int(float(self.fncStime[fcstrec]-self.fncBtime[fcstrec])/float(HOURSECS)) + if ((fhrfhrEnd)): + continue + # + # Add records that came this far + # + fcstTrimmed.append(fcstrec) + return fcstTrimmed + #================================================================== + # getFcstPeriod - get list of forecast records made within a period + # of Ndays (integer) ending on endDay(either a "year/mon/day" + # string, or a time integer) by model + # + def getFcstPeriod(self,inputParm,endDay,Ndays,model): + fcstrecs=[] + if type(endDay) is bytes: + try: + (yea,mon,day)=endDay.split("/") + startOfEndDay=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + return fcstrecs + else: + startOfEndDay=endDay + endtime=startOfEndDay+DAYSECS-1 + starttime=startOfEndDay-((Ndays-1)*DAYSECS) + fcstrecs=self.listRecords(inputParm,model,starttime,endtime,"forecast") + return fcstrecs + #================================================================== + # getFcstList - get list of forecast records made on days listed + # in dayList (each day is "year/mon/day") by model + # + def getFcstList(self,inputParm,dayList,model): + fcstrecs=[] + for date in dayList: + if type(date) is bytes: + try: + (yea,mon,day)=date.split("/") + starttime=calendar.timegm((int(yea),int(mon),int(day),0,0,0,0,0,0)) + except: + continue + else: + starttime=date + endtime=starttime+DAYSECS + recs=self.listRecords(inputParm,model,starttime,endtime,"forecast") + for rec in recs: + if rec not in fcstrecs: + fcstrecs.append(rec) + return fcstrecs + #================================================================= + # logMsg - writes a message to STDOUT with a date/time stamp + # and flushes immediately + # + def logMsg(self,msg,significance=0): + if significance<=self.DEBUG: + gmt=time.gmtime() + print("%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d:%s"%(gmt[0],gmt[1], + gmt[2],gmt[3],gmt[4],gmt[5],msg)) + sys.stdout.flush() + return + #================================================================= + # setQuiet - set DEBUG level to 0 - suppressing most messages + # + def setQuiet(self): + self.DEBUG=0 + return + #================================================================= + # setVerbose - set DEBUG level (defaults to 1) + # + def setVerbose(self,value=1): + self.DEBUG=value + return + def setDebug(self,value=1): + self.DEBUG=value + return + def getDebug(self): + return self.DEBUG + def getVerbose(self): + return self.DEBUG + #================================================================== + # Given a modelname and parmname (and optional level), return + # the GFE precision number (0 for integers, 1 for 0.1 resolution, + # 2 for 0.01 resolution, etc.). This is useful in many areas. + # + def getParmPrecision(self,modelname,parmname,level="SFC"): + precision=0 + parmInfo=self.getParm(modelname,parmname,level) + if parmInfo is not None: + precision=parmInfo.getGridInfo().getPrecision() + return precision + #================================================================= + # lastSaved - get time that the grid for the specified parm, model, + # basetime, starttime, endtime was written to the grid + # archive database. If it has not been written - return + # 0. + # + def lastSaved(self,parm,model,Btime,Stime,Etime): + self.logMsg("Starting lastSaved in VerifyUtility",10) + savedTime=0 + # + # Check that the correct file is open and ready to modify + # + if not self.checkFile(parm,model,modify=1): + return savedTime + # + # for models in OBSMODELS - use open Obs file + # + if model in self.OBSMODELS: + # + # If no records - it hasn't been saved + # + if self.onumRecs==0: + return savedTime + # + # See if a record with the exact same times exists + # + s=equal(self.oncStime[:],Stime) + b=equal(self.oncBtime[:],Btime) + e=equal(self.oncEtime[:],Etime) + use=logical_and(logical_and(b,s),e) + if sometrue(use): + a=compress(use,self.oncRecs) + recnum=int(a[0]) + savedTime=self.oncVtime[recnum] + # + # for non OBSMODELS models - use open Fcst file + # + else: + # + # If no records - it hasn't been saved + # + if self.fnumRecs==0: + return savedTime + # + # See if a record with the exact same times exists + # + s=equal(self.fncStime[:],Stime) + b=equal(self.fncBtime[:],Btime) + e=equal(self.fncEtime[:],Etime) + use=logical_and(logical_and(b,s),e) + if sometrue(use): + a=compress(use,self.fncRecs) + recnum=int(a[0]) + savedTime=self.fncVtime[recnum] + return savedTime + #======================================================================= + # smoothpm - smooths grid by averaging over plus and minus k + # gridpoints, which means an average over a square 2k+1 + # gridpoints on a side. If mask is specified, only + # smooth over the points that have mask=1, not any others. + # + # Near the edges it can't average over plus and minus + # - since some points would be off the grid - so it + # averages over all the points it can. For example, on + # the edge gridpoint - it can only come inside k points - + # so the average is over only k+1 points in that direction + # (though over all 2k+1 points in the other direction - + # if possible) + # + # Much faster by using the cumsum function in numeric. + # Total across the 2k+1 points is the cumsum at the last + # point minus the cumsum at the point before the first + # point. Only edge points need special handling - and + # cumsum is useful here too. + # + def smoothpm(self,grid,k,mask=None): + k=int(k) # has to be integer number of gridpoints + if (k<1): # has to be a positive number of gridpoints + return grid + (ny,nx)=grid.shape + k2=k*2 + # + # Remove the minimum from the grid so that cumsum over a full + # row or column of the grid doesn't get so big that precision + # might be lost. + # + fullmin=minimum.reduce(minimum.reduce(grid)) + gridmin=grid-fullmin + # + # No mask is simpler + # + if mask is None: + # + # Average over the first (y) dimension - making the 'mid' grid + # + mid=grid*0.0 + c=cumsum(gridmin,0) + nym1=ny-1 + midy=int((ny-1.0)/2.0) + ymax=min(k+1,midy+1) + for j in range(ymax): # handle edges + jk=min(j+k,nym1) + jk2=max(nym1-j-k-1,-1) + mid[j,:]=c[jk,:]/float(jk+1) + if jk2==-1: + mid[nym1-j,:]=c[nym1,:]/float(jk+1) + else: + mid[nym1-j,:]=(c[nym1,:]-c[jk2,:])/float(jk+1) + if ((k+1)<=(ny-k)): # middle + mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:])/float(k2+1) + # + # Average over the second (x) dimension - making the 'out' grid + # + c=cumsum(mid,1) + out=grid*0.0 + nxm1=nx-1 + midx=int((nx-1.0)/2.0) + xmax=min(k+1,midx+1) + for j in range(xmax): # handle edges + jk=min(j+k,nxm1) + jk2=max(nxm1-j-k-1,-1) + out[:,j]=c[:,jk]/float(jk+1) + if jk2==-1: + out[:,nxm1-j]=c[:,nxm1]/float(jk+1) + else: + out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/float(jk+1) + if ((k+1)<=(nx-k)): # middle + out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/float(k2+1) + # + # Add the minimum back in + # + out=out+fullmin + # + # Mask makes it a bit more difficult - have to find out how many + # points were in each cumsum - and have to deal with possible + # divide-by-zero errors + # + else: + # + # Average over the first (y) dimension - making the 'mid' grid + # + mask=clip(mask,0,1) + gridmin1=where(mask,gridmin,float32(0)) + mid=grid*0.0 + midd=grid*0.0 + c=cumsum(gridmin1,0) + d=cumsum(mask,0) + nym1=ny-1 + midy=int((ny-1.0)/2.0) + ymax=min(k+1,midy+1) + for j in range(ymax): # handle edges + jk=min(j+k,nym1) + jk2=max(nym1-j-k-1,-1) + mid[j,:]=c[jk,:] + midd[j,:]=d[jk,:] + if jk2==-1: + mid[nym1-j,:]=c[nym1,:] + midd[nym1-j,:]=d[nym1] + else: + mid[nym1-j,:]=(c[nym1,:]-c[jk2,:]) + midd[nym1-j,:]=d[nym1,:]-d[jk2,:] + if ((k+1)<=(ny-k)): # middle + mid[k+1:ny-k,:]=(c[k2+1:,:]-c[:-k2-1,:]) + midd[k+1:ny-k,:]=d[k2+1:,:]-d[:-k2-1,:] + # + # Average over the second (x) dimension - making the 'out' grid + # + c=cumsum(mid,1) + d=cumsum(midd,1) + out=grid*0.0 + nxm1=nx-1 + midx=int((nx-1.0)/2.0) + xmax=min(k+1,midx+1) + for j in range(xmax): # handle edges + jk=min(j+k,nxm1) + jk2=max(nxm1-j-k-1,-1) + out[:,j]=c[:,jk]/maximum(d[:,jk],1) + if jk2==-1: + out[:,nxm1-j]=c[:,nxm1]/maximum(d[:,nxm1],1) + else: + out[:,nxm1-j]=(c[:,nxm1]-c[:,jk2])/maximum((d[:,nxm1]-d[:,jk2]),1) + if ((k+1)<=(nx-k)): # middle + out[:,k+1:nx-k]=(c[:,k2+1:]-c[:,:-k2-1])/maximum((d[:,k2+1:]-d[:,:-k2-1]),1) + # + # Add the minimum back in + # + out=where(mask,out+fullmin,grid) + return out + #======================================================================= + # arealOccur - similar to smoothpm, in that it looks over a square 2k+1 + # on a side. But should be used with a logical array of 0 + # and 1, and just tells you whether an occurrence (a 1) + # occurred anywhere in the search square. If a mask is + # specified it only searches over the points that have + # mask=1, not any others. + # + # Near the edges it can't search over plus and minus + # - since some points would be off the grid - so it + # searches over all the points it can. For example, on + # the edge gridpoint - it can only come inside k points - + # so the average is over only k+1 points in that direction + # (though over all 2k+1 points in the other direction - + # if possible) + # + # Much faster by using the cumsum function in numeric. + # Total across the 2k+1 points is the cumsum at the last + # point minus the cumsum at the point before the first + # point. Only edge points need special handling - and + # cumsum is useful here too. + # + def arealOccur(self,grid,k,mask=None): + k=int(k) # has to be integer number of gridpoints + if (k<1): # has to be a positive number of gridpoints + return grid + (ny,nx)=grid.shape + k2=k*2 + # + # No mask is simpler + # + if mask is None: + grid1=grid + else: + mask=clip(mask,0,1) + grid1=where(mask,grid,float32(0)) + # + # Average over the first (y) dimension - making the 'mid' grid + # + mid=grid*0.0 + c=cumsum(grid1,0) + nym1=ny-1 + midy=int((ny-1.0)/2.0) + ymax=min(k+1,midy+1) + for j in range(ymax): # handle edges + jk=min(j+k,nym1) + jk2=max(nym1-j-k-1,-1) + mid[j,:]=c[jk,:] + if jk2==-1: + mid[nym1-j,:]=c[nym1,:] + else: + mid[nym1-j,:]=c[nym1,:]-c[jk2,:] + if ((k+1)<=(ny-k)): # middle + mid[k+1:ny-k,:]=c[k2+1:,:]-c[:-k2-1,:] + # + # Average over the second (x) dimension - making the 'out' grid + # + c=cumsum(mid,1) + out=grid*0.0 + nxm1=nx-1 + midx=int((nx-1.0)/2.0) + xmax=min(k+1,midx+1) + for j in range(xmax): # handle edges + jk=min(j+k,nxm1) + jk2=max(nxm1-j-k-1,-1) + out[:,j]=c[:,jk] + if jk2==-1: + out[:,nxm1-j]=c[:,nxm1] + else: + out[:,nxm1-j]=c[:,nxm1]-c[:,jk2] + if ((k+1)<=(nx-k)): # middle + out[:,k+1:nx-k]=c[:,k2+1:]-c[:,:-k2-1] + # + # Occur is where non-zero + # + out=greater(out,0.5) + return out + #----------------------------------------------------------------------- + # getGridSpacing - get 'rough grid spacing' by getting the distance + # between the corners of the GFE grid and dividing by + # the number of points. + # + def getGridSpacing(self): + xmax=self.getGridShape()[1] + ymax=self.getGridShape()[0] + (lat1,lon1)=self.getLatLon(0,0) + (lat2,lon2)=self.getLatLon(xmax-1,ymax-1) + hypot=math.hypot(xmax-1,ymax-1) + spacing1=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + (lat1,lon1)=self.getLatLon(0,ymax-1) + (lat2,lon2)=self.getLatLon(xmax-1,0) + spacing2=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + avgspacing=(spacing1+spacing2)/2.0 + return avgspacing + #----------------------------------------------------------------------- + # getCircleDistance - get the 'great circle distance' between two lat + # lon points (in km) + # + def getCircleDistance(self,lat1,lon1,lat2,lon2): + DTR=math.pi/180.0 + lat1r=lat1*DTR + lon1r=lon1*DTR + lat2r=lat2*DTR + lon2r=lon2*DTR + dl=lon2r-lon1r + a=(math.acos((math.sin(lat1r)*math.sin(lat2r))+(math.cos(lat1r)*\ + math.cos(lat2r)*math.cos(dl))))/DTR + return(a*1.852*60) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/Common.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/Common.py index 567cd1fcd9..4b0de4ba11 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/Common.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/Common.py @@ -1,32 +1,32 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -import SmartScript - -class Common(SmartScript.SmartScript): - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - # Include your utility methods here - def _convertFtToM(self, value): - print "Using Utility Version of convertFtToM" - return value/3.28084 - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +import SmartScript + +class Common(SmartScript.SmartScript): + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + # Include your utility methods here + def _convertFtToM(self, value): + print("Using Utility Version of convertFtToM") + return value/3.28084 + + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/DefineMaxWindGUI.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/DefineMaxWindGUI.py index 2171501e1e..5cb2326641 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/DefineMaxWindGUI.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/DefineMaxWindGUI.py @@ -1,230 +1,230 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# DefineMaxWindGUI -# -# Author: -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -try: # See if this is the AWIPS I environment - from Numeric import * - import AFPS - AWIPS_ENVIRON = "AWIPS1" -except: # Must be the AWIPS II environment - from numpy import * - import AbsTime - import TimeRange - AWIPS_ENVIRON = "AWIPS2" - -import SmartScript -import Tkinter -import tkFont - - -class DefineMaxWindGUI(SmartScript.SmartScript): - - def __init__(self, dbss, eaMgr=None): - SmartScript.SmartScript.__init__(self, dbss) - - if AWIPS_ENVIRON == "AWIPS1": - self.setUp(eaMgr) - - def reportQuadError(self): - self.statusBarMsg("Only three quadants at a time may be reduced.\n" + \ - "...Please toggle another quadrant off first.","S") - return - - def toggleButton(self, buttonLabel): - b = self._buttonLabels.index(buttonLabel) - if self._buttonState[b]: # button was on - self._buttonState[b] = False - self._buttonList[b].configure(background="gray", activebackground="gray") - else: # button was off - if sum(self._buttonState) > 2: # only three at a time allowed - self.reportQuadError() - return - self._buttonState[b] = True - self._buttonList[b].configure(background="green", activebackground="green") - - - def NEClick(self): - self.toggleButton("NE") - return - - def SEClick(self): - self.toggleButton("SE") - return - - def SWClick(self): - self.toggleButton("SW") - return - - def NWClick(self): - self.toggleButton("NW") - return - - def makeLabel(self, frame): - - label = Tkinter.Label(frame, fg="red", font=self._boldFont, - text="Max winds will be reduced by\n 20% in selected quadrants") - label.grid(row=0) - - return - - def makeBottomButtons(self, frame): - # Create the Execute/Cancel buttons - self._doneButton = Tkinter.Button(frame, text="Done", - command=self.doneCommand) - self._doneButton.grid(row=3, column=0, padx=20, pady=5, sticky=Tkinter.W) - - self._cancelButton = Tkinter.Button(frame, text="Cancel", - command=self.cancelCommand) - self._cancelButton.grid(row=3, column=2, padx=20, pady=5, sticky=Tkinter.E) - - frame.grid(columnspan=3, sticky=Tkinter.EW) - - return - - def makeQuadButtons(self, frame): - # Create the quadrant buttons - commandList = [self.NWClick, self.SWClick, self.SEClick, self.NEClick] - self._buttonLabels = ["NW", "SW", "SE", "NE"] - - # define the button position in geometric order - buttonPos = [(0, 0), (1, 0), (1, 1), (0, 1)] - for b in range(len(self._buttonLabels)): - label = self._buttonLabels[b] - - self._buttonList[b] = Tkinter.Button(frame, text=label, - command=commandList[b], - font=self._bigFont, width=3) - rowPos, colPos = buttonPos[b] - self._buttonList[b].grid(row=rowPos, column=colPos, padx=30, pady=10) - - return - - def setUpUI(self): - - self._labelFrame = Tkinter.Frame(self._master) - - self._labelFrame.grid(row=0) - - - self._buttonFrame = Tkinter.Frame(self._master, borderwidth=3, - relief=Tkinter.RIDGE, bd=2, pady=5) - self._buttonFrame.grid(row=1, column=0,padx=25, - sticky=Tkinter.E+Tkinter.W, pady=5) - - - self._bottomFrame = Tkinter.Frame(self._master, borderwidth=3, - relief=Tkinter.RIDGE, bd=2) - self._bottomFrame.grid(row=2, column=0, columnspan=2, padx=25) - - self._master.title("Reduce Max Wind by Quadrant") - - self.makeLabel(self._labelFrame) - - self.makeQuadButtons(self._buttonFrame) - - self.makeBottomButtons(self._bottomFrame) - - return - - def doneCommand(self): - self._master.quit() - - quadCount = 4 - reducePct = 0.80 - - # Gather up the maxWind info to return to the main tool - self._maxWindDict = {} - for h in range(len(self._hourList)): - windList = [] - for quad in range(quadCount): - - # Reduce the value if that quadrant was selected - if self._buttonState[quad]: - windValue = self._maxWind[quad][h] * self._allTimeMaxWind * reducePct - else: - windValue = self._maxWind[quad][h] * self._allTimeMaxWind - - windList.append(windValue) - - windList.reverse() - self._maxWindDict[self._hourList[h]] = windList - - return - - def cancelCommand(self): - self._master.destroy() - - return None - - def displayGUI(self, windDict): - - self._windDict = windDict - self._maxWindDict = None - self._quadCount = 4 - - hourKeys = self._windDict.keys() - hourKeys.sort() - self._hourList = hourKeys - self._initialMinWind = [] - self._initialMaxWind = [] - for hour in hourKeys: - minWind, maxWind = windDict[hour] - self._initialMinWind.append(minWind) - self._initialMaxWind.append(maxWind) - - self._hourCount = len(hourKeys) - - if AWIPS_ENVIRON == "AWIPS1": - self._allTimeMaxWind = max(self._initialMaxWind) - else: # numpy uses "amax" - self._allTimeMaxWind = amax(self._initialMaxWind) - - - - # Make the topLevel window - different for A1 and A2 - if AWIPS_ENVIRON == 'AWIPS1': - self._master = Tkinter.Toplevel(self.eaMgr().root()) - self._master.transient(self.eaMgr().root()) # always draw on top of GFE - else: - self._tkmaster = Tkinter.Tk() - self._master = Tkinter.Toplevel(self._tkmaster) - self._tkmaster.withdraw() - - self._buttonLabels = ["NW", "SW", "SE", "NE"] - - self._buttonState = [False, False, False, False] - self._buttonList = [None, None, None, None] - - self._boldFont = tkFont.Font(family="Helvetica", size=12, weight="bold") - self._bigFont = tkFont.Font(family="Helvetica", size=16) - - self.setUpUI() - - self._maxWind = zeros((self._quadCount, self._hourCount)) * 1.0 - - for hour in range(len(hourKeys)): - for quad in range(self._quadCount): - minWind, maxWind = self._windDict[hourKeys[hour]] - self._maxWind[quad][hour] = maxWind / self._allTimeMaxWind - - - #self.updateDisplay() # Draws everything - - - self._master.mainloop() - - self._master.withdraw() - self._master.destroy() - - return self._maxWindDict +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# DefineMaxWindGUI +# +# Author: +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +try: # See if this is the AWIPS I environment + from Numeric import * + import AFPS + AWIPS_ENVIRON = "AWIPS1" +except: # Must be the AWIPS II environment + from numpy import * + import AbsTime + import TimeRange + AWIPS_ENVIRON = "AWIPS2" + +import SmartScript +import tkinter +import tkinter.font + + +class DefineMaxWindGUI(SmartScript.SmartScript): + + def __init__(self, dbss, eaMgr=None): + SmartScript.SmartScript.__init__(self, dbss) + + if AWIPS_ENVIRON == "AWIPS1": + self.setUp(eaMgr) + + def reportQuadError(self): + self.statusBarMsg("Only three quadants at a time may be reduced.\n" + \ + "...Please toggle another quadrant off first.","S") + return + + def toggleButton(self, buttonLabel): + b = self._buttonLabels.index(buttonLabel) + if self._buttonState[b]: # button was on + self._buttonState[b] = False + self._buttonList[b].configure(background="gray", activebackground="gray") + else: # button was off + if sum(self._buttonState) > 2: # only three at a time allowed + self.reportQuadError() + return + self._buttonState[b] = True + self._buttonList[b].configure(background="green", activebackground="green") + + + def NEClick(self): + self.toggleButton("NE") + return + + def SEClick(self): + self.toggleButton("SE") + return + + def SWClick(self): + self.toggleButton("SW") + return + + def NWClick(self): + self.toggleButton("NW") + return + + def makeLabel(self, frame): + + label = tkinter.Label(frame, fg="red", font=self._boldFont, + text="Max winds will be reduced by\n 20% in selected quadrants") + label.grid(row=0) + + return + + def makeBottomButtons(self, frame): + # Create the Execute/Cancel buttons + self._doneButton = tkinter.Button(frame, text="Done", + command=self.doneCommand) + self._doneButton.grid(row=3, column=0, padx=20, pady=5, sticky=tkinter.W) + + self._cancelButton = tkinter.Button(frame, text="Cancel", + command=self.cancelCommand) + self._cancelButton.grid(row=3, column=2, padx=20, pady=5, sticky=tkinter.E) + + frame.grid(columnspan=3, sticky=tkinter.EW) + + return + + def makeQuadButtons(self, frame): + # Create the quadrant buttons + commandList = [self.NWClick, self.SWClick, self.SEClick, self.NEClick] + self._buttonLabels = ["NW", "SW", "SE", "NE"] + + # define the button position in geometric order + buttonPos = [(0, 0), (1, 0), (1, 1), (0, 1)] + for b in range(len(self._buttonLabels)): + label = self._buttonLabels[b] + + self._buttonList[b] = tkinter.Button(frame, text=label, + command=commandList[b], + font=self._bigFont, width=3) + rowPos, colPos = buttonPos[b] + self._buttonList[b].grid(row=rowPos, column=colPos, padx=30, pady=10) + + return + + def setUpUI(self): + + self._labelFrame = tkinter.Frame(self._master) + + self._labelFrame.grid(row=0) + + + self._buttonFrame = tkinter.Frame(self._master, borderwidth=3, + relief=tkinter.RIDGE, bd=2, pady=5) + self._buttonFrame.grid(row=1, column=0,padx=25, + sticky=tkinter.E+tkinter.W, pady=5) + + + self._bottomFrame = tkinter.Frame(self._master, borderwidth=3, + relief=tkinter.RIDGE, bd=2) + self._bottomFrame.grid(row=2, column=0, columnspan=2, padx=25) + + self._master.title("Reduce Max Wind by Quadrant") + + self.makeLabel(self._labelFrame) + + self.makeQuadButtons(self._buttonFrame) + + self.makeBottomButtons(self._bottomFrame) + + return + + def doneCommand(self): + self._master.quit() + + quadCount = 4 + reducePct = 0.80 + + # Gather up the maxWind info to return to the main tool + self._maxWindDict = {} + for h in range(len(self._hourList)): + windList = [] + for quad in range(quadCount): + + # Reduce the value if that quadrant was selected + if self._buttonState[quad]: + windValue = self._maxWind[quad][h] * self._allTimeMaxWind * reducePct + else: + windValue = self._maxWind[quad][h] * self._allTimeMaxWind + + windList.append(windValue) + + windList.reverse() + self._maxWindDict[self._hourList[h]] = windList + + return + + def cancelCommand(self): + self._master.destroy() + + return None + + def displayGUI(self, windDict): + + self._windDict = windDict + self._maxWindDict = None + self._quadCount = 4 + + hourKeys = list(self._windDict.keys()) + hourKeys.sort() + self._hourList = hourKeys + self._initialMinWind = [] + self._initialMaxWind = [] + for hour in hourKeys: + minWind, maxWind = windDict[hour] + self._initialMinWind.append(minWind) + self._initialMaxWind.append(maxWind) + + self._hourCount = len(hourKeys) + + if AWIPS_ENVIRON == "AWIPS1": + self._allTimeMaxWind = max(self._initialMaxWind) + else: # numpy uses "amax" + self._allTimeMaxWind = amax(self._initialMaxWind) + + + + # Make the topLevel window - different for A1 and A2 + if AWIPS_ENVIRON == 'AWIPS1': + self._master = tkinter.Toplevel(self.eaMgr().root()) + self._master.transient(self.eaMgr().root()) # always draw on top of GFE + else: + self._tkmaster = tkinter.Tk() + self._master = tkinter.Toplevel(self._tkmaster) + self._tkmaster.withdraw() + + self._buttonLabels = ["NW", "SW", "SE", "NE"] + + self._buttonState = [False, False, False, False] + self._buttonList = [None, None, None, None] + + self._boldFont = tkinter.font.Font(family="Helvetica", size=12, weight="bold") + self._bigFont = tkinter.font.Font(family="Helvetica", size=16) + + self.setUpUI() + + self._maxWind = zeros((self._quadCount, self._hourCount)) * 1.0 + + for hour in range(len(hourKeys)): + for quad in range(self._quadCount): + minWind, maxWind = self._windDict[hourKeys[hour]] + self._maxWind[quad][hour] = maxWind / self._allTimeMaxWind + + + #self.updateDisplay() # Draws everything + + + self._master.mainloop() + + self._master.withdraw() + self._master.destroy() + + return self._maxWindDict diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/EditAreaUtilities.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/EditAreaUtilities.py index 3f984f2725..1cf3b39647 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/EditAreaUtilities.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/EditAreaUtilities.py @@ -1,72 +1,72 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# ---------------------------------------------------------------------------- -## -# EditAreaUtilities -# -# An interface to create and maintain a set of edit areas outside of CAVE. -# The Data Access Framework does not offer edit areas as a data type, so -# this code is necessary to store and retrieve data outside of CAVE. -# -# Author: lefebvre -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ----------- ---------- ----------- -------------------------- -# 08/05/2016 - tlefebvre Better error checking when mask file not found. -# 12/02/2016 - tlefebvre Documented code. -# 12/20/2017 DCS17686 tlefebvre Initial baseline version. -# -## -# ---------------------------------------------------------------------------- - -import numpy as np -import os -import time - -class EditAreaUtilities(): - def __init__(self, repositoryPath): - self._repositoryPath = repositoryPath - - # Saves the specified mask under the name eaName into the edit area repository - def saveEditArea(self, eaName, mask): - - # Use the numpy method to save the mask - np.save(self._repositoryPath + eaName, mask) - - return - - # Retreives an edit area in the form of a mask from the repository - def fetchEditArea(self, name): - - fullPath = os.path.join(self._repositoryPath, name + ".npy") - if not os.path.exists(fullPath): - - print "Error!", fullPath, "not found in EditArea repository" - return None - - try: - mask = np.load(fullPath) - except: - print "Error reading edit area:", name, "from EditArea repository." - return None - - # Make very sure we're returning a good mask or None - if mask is None: - return None - - return mask # return the contents of the file rather than the file itself - - # Returns the names of all the edit areas in the repository - def allEditAreaNames(self): - - fileList = os.listdir(self._repositoryPath) - nameList = [] - for fileName in fileList: - if ".npy" in fileName: - nameList.append(fileName[:-4]) - - return nameList - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# ---------------------------------------------------------------------------- +## +# EditAreaUtilities +# +# An interface to create and maintain a set of edit areas outside of CAVE. +# The Data Access Framework does not offer edit areas as a data type, so +# this code is necessary to store and retrieve data outside of CAVE. +# +# Author: lefebvre +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ----------- ---------- ----------- -------------------------- +# 08/05/2016 - tlefebvre Better error checking when mask file not found. +# 12/02/2016 - tlefebvre Documented code. +# 12/20/2017 DCS17686 tlefebvre Initial baseline version. +# +## +# ---------------------------------------------------------------------------- + +import numpy as np +import os +import time + +class EditAreaUtilities(): + def __init__(self, repositoryPath): + self._repositoryPath = repositoryPath + + # Saves the specified mask under the name eaName into the edit area repository + def saveEditArea(self, eaName, mask): + + # Use the numpy method to save the mask + np.save(self._repositoryPath + eaName, mask) + + return + + # Retreives an edit area in the form of a mask from the repository + def fetchEditArea(self, name): + + fullPath = os.path.join(self._repositoryPath, name + ".npy") + if not os.path.exists(fullPath): + + print("Error!", fullPath, "not found in EditArea repository") + return None + + try: + mask = np.load(fullPath) + except: + print("Error reading edit area:", name, "from EditArea repository.") + return None + + # Make very sure we're returning a good mask or None + if mask is None: + return None + + return mask # return the contents of the file rather than the file itself + + # Returns the names of all the edit areas in the repository + def allEditAreaNames(self): + + fileList = os.listdir(self._repositoryPath) + nameList = [] + for fileName in fileList: + if ".npy" in fileName: + nameList.append(fileName[:-4]) + + return nameList + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/GridManipulation.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/GridManipulation.py index 26bda4d61e..b65379b0f7 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/GridManipulation.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/GridManipulation.py @@ -1,999 +1,999 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# GridManipulation - Version 2.1 -# -# Author: Matthew H. Belk WFO Taunton MA -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 10, 2012 mbelk Initial creation -# Dec 03, 2015 mbelk ???? -# Sep 19, 2016 19293 randerso Initial baseline check in -# -######################################################################## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import types, re - -import LogStream -import SmartScript -import TimeRange, AbsTime -import numpy as np - - -class GridManipulation(SmartScript.SmartScript): - - def __init__(self, dbss): - SmartScript.SmartScript.__init__(self, dbss) - - ############################################################################ - # (originally from CheckTandTd by Tom LeFebvre). - - def GM_getWEInventory(self, WEName, dbase="Fcst", level="SFC", - timeRange=TimeRange.allTimes()): - """Return a list of time ranges with available data for a weather element from - a specific database and level. - Args: - string WEName: name of the weather element to inventory - string dbase: name of database to search (default = 'Fcst') - string level: level of data to inventory (default = 'SFC') - timeRange: if specified, limit inventory to grids overlapping this timeRange - Returns: - Python list of Python time range objects - """ - -# print "Getting inventory of -> '%s' from '%s' at '%s'" % \ -# (WEName, dbase, level) - - trList = [] - # getGridInfo will just die if the modelName or weName is not valid - # so wrap it in a try block and return [] if it fails - try: - gridInfo = self.getGridInfo(dbase, WEName, level, timeRange) - except: - return trList - - trList = [g.gridTime() for g in gridInfo - if timeRange.overlaps(g.gridTime())] - return trList - - - def GM_getParmLocksByOthers(self, weName, level="SFC"): - """Return a list of time ranges locked by other CAVE sessions within the - current mutable database (typically 'Fcst'). - Args: - string WEName: name of field to inventory - string level: level of data to inventory (default = 'SFC') - Returns: - Python list of Python time range objects - """ - - # returns list of time ranges locked by others for this weather element - parm = self.getParm(self.mutableID(), weName, level) - if parm is None: - return [] - - lockTable = parm.getLockTable() - locksByOthers = lockTable.lockedByOther() - trList = [] - - for lock in locksByOthers.toArray(): - print lock - - start = lock.getStart().getTime() / 1000 - end = lock.getEnd().getTime() / 1000 - tr = self.GM_makeTimeRange(start, end) - - trList.append(tr) - - return trList - - - def GM_overlappingTRs(self, timeRange, trList, closest=False): - """Return a list of time ranges of locked data within the current - mutable database (typically 'Fcst'). - Args: - TimeRange timeRange: a Python time range object - list trList: list of Python time range objects - boolean closest: if True, force new time range list to start and - end with the times closest to the start and end of - initial selected time range. If False (default), - only include times which overlap the initial - selected time range. - Returns: - Python list of Python time range objects - """ - - # Get ready to return updated list of times - newTRList = [] - - # Get ready to track certain times - beforeTime = None # start time closest to selected time range start - afterTime = None # time range closest to selected time range start - beforeTR = None # start time closest to selected time range end - afterTR = None # time range closest to selected time range end - - # Get start and end time of selected time range - selectStartTime = timeRange.startTime() - selectEndTime = timeRange.endTime() - - #======================================================================= - # Examine each time range in the list - - for tr in trList: - - # If this time range overlaps the selected range - if timeRange.overlaps(tr): - - # Add it to the list - newTRList.append(tr) - - # Otherwise, if we should find the closest time ranges - elif closest: - - # Get the start time of this time range - startTime = tr.startTime() - - # Compute the difference between the start of this grid and - # the start and end times of our selected time range - diffStartTime = (startTime - selectStartTime) - diffEndTime = (startTime - selectEndTime) -# print "\t", diffStartTime, diffEndTime - - # If start time of this grid is the closest to start time of - # selected time range, or it's the first one - if beforeTime is None or \ - ((diffStartTime < 0 and diffStartTime >= beforeTime) or - (diffStartTime >= 0 and diffStartTime < beforeTime)): - - # Mark this grid as the closest to the selected start time - beforeTime = diffStartTime - beforeTR = tr - -# print "beforeTime =", beforeTime, beforeTR - - # If start time of this grid is the closest to end time of - # selected time range, or it's the first one - if afterTime is None or \ - (diffEndTime >= 0 and diffEndTime <= abs(afterTime)): - - # Mark this grid as the closest to the selected end time - afterTime = diffEndTime - afterTR = tr - -# print "afterTime =", afterTime, afterTR - -# print "newTRList = ", newTRList, beforeTR, afterTR - - # If we don't have any grids in the list and we should determine the - # closest grid time ranges to the selected time range - if len(newTRList) == 0 and closest: - - # Add closest start and end time ranges to selected time range - newTRList = [beforeTR, afterTR] - - # Ensure time ranges are sorted when we return them - newTRList.sort(self.GM_trSortMethod) - - # Finally, return our completed list - return newTRList - - - def GM_trSortMethod(self, first, last): - """Comparison method for sorting time ranges by start time. - Args: - TimeRange first: a Python time range object - TimeRange last: a Python time range object - Returns: - An integer indicating the ascending order of the compared time - range objects. - """ - if first.startTime() < last.startTime(): - return -1 - elif first.startTime() == last.startTime(): - return 0 - else: - return 1 - - - def GM_mergeTRLists(self, TRList1, TRList2): - """Merges and sorts Python time range lists into ascending order by start time. - Args: - TimeRange TRList1: a Python time range object - TimeRange TRList2: a Python time range object - Returns: - A merged and sorted list of Python time range objects. - """ - - # Merge the lists - combined = set(TRList1) | set(TRList2) - - # Sort the resulting time range list in ascending order - newList = sorted(combined, self.GM_trSortMethod) - - # Return the merged and sorted list - return newList - - # - ############################################################################ - - ############################################################################ - # Other utility methods originally provided by Tom LeFebvre (GSD) - - def GM_makeMaxTimeRange(self): - """Gets the maximum possible time range - Returns: - The maximum possible Python time range. - """ - return TimeRange.allTimes() - - - def GM_logToolUse(self, string): - """Inserts an entry into the log files. - Args: - string string: message to be inserted into the log files - Returns: - Nothing - """ - - gtime = self._gmtime().timetuple() - ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(gtime[0], gtime[1], gtime[2], - gtime[3], gtime[4], gtime[5]) - - # Insert this message into the logs - LogStream.logEvent("%s| %s" % (ts, string)) - - - def GM_makeTimeRange(self, start, end): - """Creates a time range. - Args: - double start - start of time range in seconds since the epoch began - double end - end of time range in seconds since the epoch began - Returns: - Time range appropriate for AWIPS version - """ - - startTime = AbsTime.AbsTime(start) - endTime = AbsTime.AbsTime(end) - - return TimeRange.TimeRange(startTime, endTime) - - - def GM_makeTimeRangeList(self, executeTR, interpHours=1, duration=1): - """Creates a list of time range objects from specified time range. - Args: - executeTR - time range object appropriate to AWIPS version - integer interpHours - number of hours between each time step - (default = 1) - integer duration - duration of each time range in hours - (default = 1) - Returns: - Python list of time range appropriate for AWIPS version - """ - if interpHours <= 0: - raise ValueError("interpHours must be > 0") - - if duration <= 0 or duration > interpHours: - raise ValueError("duration must be > 0 and <= interpHours") - - start = executeTR.startTime().unixTime() - end = executeTR.endTime().unixTime() - - trList = [] - for t in range(start, end, 3600*interpHours): - - tr = self.GM_makeTimeRange(t, t + (duration * 3600)) - trList.append(tr) - return trList - - - def GM_getPrevNextModelTimes(self, modelInventory, targetTR): - """Searches a grid inventory for the first available time ranges before - and after the target time range and returns those objects - Args: - list modelInventory - list of available data times for a model - time range targetTR - time range to use as basis for search - Returns: - Previous and next time range objects appropriate for AWIPS version, - or None for missing data - """ - - # If we have a model inventory - if len(modelInventory) == 0: - print "Model Inventory is empty" - return None, None - - # Convert target time range object into number of seconds since epoch - targetTRsecs = targetTR.startTime().unixTime() - - #----------------------------------------------------------------------- - # Make sure we're in range - - # Target time range is before all available model data - if targetTRsecs < modelInventory[0].startTime().unixTime(): - return None, None - - # Target time range is after all available model data - if targetTRsecs > modelInventory[-1].startTime().unixTime(): - return None, None - - #----------------------------------------------------------------------- - # Search the model inventory - - for i in range(len(modelInventory)): - - # If we found the first available model time ranges on both sides - # of the target time range - if modelInventory[i].startTime().unixTime() < targetTRsecs and \ - modelInventory[i + 1].startTime().unixTime() > targetTRsecs: - - # Return these time range objects - return modelInventory[i], modelInventory[i+1] - - # If we made it this far, indicate we could not find appropriate - # time range objects - return None, None - - - def GM_interpolateSounding(self, model, weName, levels, timeRange, - modelInventory): - """Interpolates a sounding at the specified time range, if needed. - Otherwise, will use a cached sounding if appropriate. - within the target time range and returns those objects - Args: - string model - model to use to grab cube - string weName - weather element name to get cube data for - list levels - list of levels to use in constructing cube - TimeRange timeRange - time range to use as basis for search - list modelInventory - list of available data times for a particular model - Returns: - cube of geopotential height and cube of specified field as a - Python tuple of numpy cube data - """ - - prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory, - timeRange) - if prevTR is None or nextTR is None: - return None - - prevGHCube, prevCube = self.makeNumericSounding(model, weName, levels, - prevTR, noDataError=0) - nextGHCube, nextCube = self.makeNumericSounding(model, weName, levels, - nextTR, noDataError=0) - # calculate weights for a time-weighted average - t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime() - t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime() - prevWt = float(t2) / float(t1 + t2) - nextWt = float(t1) / float(t1 + t2) - - interpGHCube = (prevGHCube * prevWt) + (nextGHCube * nextWt) - - # If this is a cube of scalars - if re.search("(?i)wind", weName) is None: - interpCube = (prevCube * prevWt) + (nextCube * nextWt) - else: - - # Break up the wind into u and v components - (prevU, prevV) = self.MagDirToUV(prevCube[0], prevCube[1]) - (nextU, nextV) = self.MagDirToUV(nextCube[0], nextCube[1]) - - # Interpolate the wind components - interpU = (prevU * prevWt) + (nextU * nextWt) - interpV = (prevV * prevWt) + (nextV * nextWt) - - # Now compute the final wind magnitude and direction - interpCube = self.UVToMagDir(interpU, interpV) - - return interpGHCube, interpCube - - - def GM_interpolateGrid(self, model, weName, level, timeRange, - modelInventory): - """Interpolates a grid field at the specified time range, if needed. - Otherwise, will use a cached sounding if appropriate. - within the target time range and returns those objects - Args: - string model - model to use to grab field - string weName - weather element name to get cube data for - string level - level of data to interpolate - TimeRange timeRange - time range to use as basis for search - list modelInventory - list of available data times for a particular model - Returns: - grid of specified field as numpy grid data - """ - prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory, - timeRange) - - if prevTR is None or nextTR is None: - return None - - prevGrid = self.getGrids(model, weName, level, prevTR, noDataError=0) - nextGrid = self.getGrids(model, weName, level, nextTR, noDataError=0) - - # calculate weights for a time-weighted average - t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime() - t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime() - prevWt = t2 / float(t1 + t2) - nextWt = t1 / float(t1 + t2) - - # If this is a grid of scalars - if re.search("(?i)wind", weName) is None: - finalGrid = (prevGrid * prevWt) + (nextGrid * nextWt) - else: - - # Break up the wind into u and v components - (prevU, prevV) = self.MagDirToUV(prevGrid[0], prevGrid[1]) - (nextU, nextV) = self.MagDirToUV(nextGrid[0], nextGrid[1]) - - # Interpolate the wind components - interpU = (prevU * prevWt) + (nextU * nextWt) - interpV = (prevV * prevWt) + (nextV * nextWt) - - # Now compute the final wind magnitude and direction - finalGrid = self.UVToMagDir(interpU, interpV) - - return finalGrid - - # - ############################################################################ - - - ############################################################################ - # Define a method to manipulate grid times - ############################################################################ - - def GM_makeNewTRlist(self, dataDict, dataLocks, interpHours=3): - """Produces a list of Python time ranges. - Args: - dataDict: Python dictionary of time ranges of available data keyed by database - dataLocks: Python list of time ranges which are locked by others - interpHours: requested time step in hours - Returns: - Python list of Python time range objects - """ - - #======================================================================= - # Make a new list of time ranges to iterate over - - newTRlist = [] - - #----------------------------------------------------------------------- - # Look at all the models we have data for - - for model in dataDict.keys(): - #------------------------------------------------------------------- - # Start with all time steps from this model - - for tr in dataDict[model].keys(): - #print "TR:", dir(tr) - - pyStart = self._gmtime(tr.startTime().unixTime()) - startHour = pyStart.tm_hour - -# print "HOUR:", startHour - #--------------------------------------------------------------- - # If this time range is not already locked by someone else, and - # it is one we would want to have but do not have yet, and it - # is one we have data for from this model - -# print "newTRlist:", newTRlist, "type:", type(newTRlist) -# print "dataLocks:", dataLocks, "type:", type(dataLocks) - - if tr not in newTRlist and tr not in dataLocks and \ - (startHour % interpHours) == 0 and \ - dataDict[model][tr] is not None: - - # Add this time range to the new time range list - newTRlist.append(tr) - - #----------------------------------------------------------------------- - # Sort new model time range list by time - - newTRlist.sort(self.GM_trSortMethod) - - #----------------------------------------------------------------------- - # Return completed consolidated time range list - - return newTRlist - - - ############################################################################ - # Define a method to adjust time range which will be deleted - this is so - # only grids for which we have data from selected model will be deleted - ############################################################################ - - def GM_adjustDeleteTimeRange(self, timeRange, TRlist, adjustTR=0): - """Adjusts a time range for purposes of deleting grids. The intent is - to make it easier to interpolate between old and new data. - Args: - timeRange: Python time range object representing selected time - ranage - TRlist: Python list of Python time range objects where data is - available - integer adjustTR: number of hours to delete on either side of - available data to make for easier interpolation - Returns: - a TimeRange object spanning adjusted time range - """ - - #----------------------------------------------------------------------- - # Get ready to set new limits of the time range - - newStart = None - newEnd = None - - #----------------------------------------------------------------------- - # Look through the time ranges we have for model data - - for tr in TRlist: - - # If this grid is in the selected time range - if timeRange.overlaps(tr): - - # If we have not yet determined a start time - if newStart is None: - - # Define the new start time - newStart = tr.startTime().unixTime() - adjustTR*3600.0 - - # If we have not yet determined an end time - if tr.endTime().unixTime() > newEnd: - - # Define the new end time - newEnd = tr.endTime().unixTime() + adjustTR*3600.0 - -## print '+'*90 -## print newStart, newEnd -## print TimeRange.TimeRange(AbsTime.AbsTime(newStart), AbsTime.AbsTime(newEnd)) - - #----------------------------------------------------------------------- - # Return adjusted time range - if we did adjust it - - if newStart is not None and newEnd is not None: - - return TimeRange.TimeRange(AbsTime.AbsTime(newStart), - AbsTime.AbsTime(newEnd)) - - # Otherwise, return the original time range - else: - return timeRange - - - ############################################################################ - # Define a method to linearly interpolate data - ############################################################################ - - def GM_interpolateData(self, dataDict, TRlist, interpHours=3, - vector=[], singleLevel=[]): - """Produces an updated Python dictionary with interpolated data where needed - Args: - dict dataDict - keyed by TimeRange, data for a specific time, can be mixed (e.g. gh, t, p) - list TRList - list of times ranges - integer interpHours - ???? - list vector - ???? - list singleLevel - ???? - Returns: - dict of interpolated data ???? - """ - - #----------------------------------------------------------------------- - # Determine the structure (i.e. how many fields are present) of the - # data dictionary - - try: - numFields = len(dataDict[TRlist[0]]) - except: - print "No data to interpolate!" - return dataDict - - #----------------------------------------------------------------------- - # Cycle through each time period we already have - - for index in range(len(TRlist) - 1): - -# print "\tindex = ", index - - #------------------------------------------------------------------- - # Define a list to hold the times we need to create soundings for - - makeList = [] - - #------------------------------------------------------------------- - # Get the time range of the current and next soundings we have - - current = TRlist[index] - next = TRlist[index + 1] -# print '*'*80 -# print current, next - - #------------------------------------------------------------------- - # Get the starting times of each sounding time range - - currentStart = current.startTime().unixTime() - nextStart = next.startTime().unixTime() - - #------------------------------------------------------------------- - # See how far apart these soundings are in time (hours) - - diffTime = nextStart - currentStart -# print diffTime, interpHours*3600 - - #------------------------------------------------------------------- - # If gap between data time steps are more than what we need - - if int(diffTime) > interpHours*3600: - - #-------------------------------------------------------------- - # Keep track of seconds we are between data time steps - - curTime = float(interpHours*3600) - - #--------------------------------------------------------------- - # Make a new time range every three hours -# print '\t', int(currentStart + curTime), int(nextStart) - - while int(currentStart + curTime) < int(nextStart): - - #----------------------------------------------------------- - # Compute linear interpolation weight - - weight = curTime / diffTime -# print "weight = ", weight - - #----------------------------------------------------------- - # Make a new TimeRange object for this new time step - - newTR = TimeRange.TimeRange( - AbsTime.AbsTime(currentStart + curTime), - AbsTime.AbsTime(currentStart + curTime + 3600) - ) - - #----------------------------------------------------------- - # Define an empty string to hold all interpolated data - # which should be placed within the final data structure - # for this time - - finalData = "" - - #=========================================================== - # Interpolate data for each field at this time step - - for field in range(numFields): - - # Create a final data structure for interpolated data - exec "data%d = []" % (field) - - # If this field is a vector, make component data - # structures - if field in vector: - exec "data%dU = []" % (field) - exec "data%dV = []" % (field) - - #------------------------------------------------------- - # Get data from the current and next time steps we have - - try: - curData = dataDict[current][field] - except: - # No point in continuing with this time step - msg = "Could not get 'current' data -> %s" % \ - (repr(current)) - self.statusBarMsg(msg, "R") - continue # move on - - try: - nextData = dataDict[next][field] - except: - # No point in continuing with this time step - msg = "Could not get 'next' data -> %s" % \ - (repr(next)) - self.statusBarMsg(msg, "R") - continue # move on - - #------------------------------------------------------- - # If this field is a vector, separate it into its' - # u and v components - - if field in vector: - - (curU, curV) = self.MagDirToUV(curData[0], - curData[1]) - - (nextU, nextV) = self.MagDirToUV(nextData[0], - nextData[1]) - - #======================================================= - # If this field is a single level - - if field in singleLevel: - - if not vector: - data = (curData + (nextData - curData) * weight) - else: - u = (curU + (nextU - curU) * weight) - v = (curV + (nextV - curV) * weight) - - #--------------------------------------------------- - # Get the newly interpolated grids - - if not vector: - - if type(data) == types.ListType: - dataGrid = data[0] - else: - dataGrid = data - - else: - if type(u) == types.ListType: - uGrid = u[0] - else: - uGrid = u - - if type(v) == types.ListType: - vGrid = v[0] - else: - vGrid = v - - #--------------------------------------------------- - # Add current level into the new data structure - - if not vector: - exec "data%d = array(dataGrid)" % (field) - else: - exec "data%dU = array(uGrid)" % (field) - exec "data%dV = array(vGrid)" % (field) - - #======================================================= - # Otherwise, cycle through each level in the sounding - - else: - - for level in xrange(curData.shape[0]): - - #----------------------------------------------- - # Construct sounding values for this level - - if not vector: - data = (curData[level] + - (nextData[level] - curData[level]) * - weight) - else: - u = (curU[level] + - (nextU[level] - curU[level]) * weight) - - v = (curV[level] + - (nextV[level] - curV[level]) * weight) - - #----------------------------------------------- - # Get the newly interpolated grids - - if not vector: - - if type(data) == types.ListType: - dataGrid = data[0] - else: - dataGrid = data - - else: - if type(u) == types.ListType: - uGrid = u[0] - else: - uGrid = u - - if type(v) == types.ListType: - vGrid = v[0] - else: - vGrid = v - - #----------------------------------------------- - # Add current level into the new sounding - - if not vector: - exec "data%d = data%d + [dataGrid]" % \ - (field, field) - else: - exec "data%dU = data%dU + [uGrid]" % \ - (field, field) - exec "data%dV = data%dV + [vGrid]" % \ - (field, field) - - #--------------------------------------------------- - # Finish off the new cube for this time - - if not vector: - exec "data%d = array(data%d)" % (field, field) - else: - exec "data%dU = array(data%dU)" % (field, field) - exec "data%dV = array(data%dV)" % (field, field) - - #======================================================= - # If this is a vector field, reconstruct vector from - # the components - - if vector: - exec "data%d = self.UVToMagDir(data%dU, data%dV)" %\ - (field, field, field) - - #======================================================= - # Add current interpolated data for this time step to - # the final data structure - - exec "finalData += 'data%d'" % (field) - - if field < (numFields - 1): - finalData += ", " - - #----------------------------------------------------------- - # Add this interpolated data to data structure - - exec "dataDict[newTR] = (%s)" % (finalData) - - msg = "Created data for -> %s" % (repr(newTR)) - self.statusBarMsg(msg, "R") - - #----------------------------------------------------------- - # Move on to next desired time step - - curTime += float(interpHours)*3600.0 - - #----------------------------------------------------------------------- - # Return the completed data dictionary - - return dataDict - - - ############################################################################ - # Define a method to smooth data - ############################################################################ - - def GM_smoothGrid(self, grid, factor=3, mask=None): - """Produces a smoother version of a numpy grid. - Args: - NDArray grid - numpy grid to be smoothed - integer factor - factor to control level of smoothing - bool NDArray mask - optional mask to limit area being smoothed - Returns: - smoothed grid as NDArray - """ - k = int(factor) # has to be integer number of gridpoints - if k < 1: # has to be a positive number of gridpoints - return grid - (ny, nx) = grid.shape - k2 = k * 2 - - finalReturnType = grid.dtype - - #----------------------------------------------------------------------- - # If the input grid is an integer type, convert it to a float before - # any smoothing takes place. It will be converted back to an integer - # before it is returned - - if finalReturnType != np.float32: - grid = grid.astype(np.float32) - # - # Remove the minimum from the grid so that cumsum over a full - # row or column of the grid doesn't get so big that precision - # might be lost. - # - fullmin = np.amin(grid) - gridmin = grid - fullmin - # - # No mask is simpler - # - if mask is None: - # - # Average over the first (y) dimension - making the 'mid' grid - # - mid = np.zeros(grid.shape, np.float32) - c = np.cumsum(gridmin, 0) - nym1 = ny - 1 - midy = int((ny - 1.0) / 2.0) - ymax = min(k + 1, midy + 1) - for j in range(ymax): # handle edges - jk = min(j + k, nym1) - jk2 = max(nym1-j-k-1, -1) - mid[j,:] = c[jk,:]/float(jk + 1) - if jk2 == -1: - mid[nym1-j,:] = c[nym1,:] / float(jk + 1) - else: - mid[nym1-j,:] = (c[nym1,:] - c[jk2,:]) / float(jk + 1) - if (k + 1) <= (ny - k): # middle - mid[k+1:ny-k,:] = (c[k2+1:,:] - c[:-k2-1,:]) / float(k2 + 1) - # - # Average over the second (x) dimension - making the 'out' grid - # - c = np.cumsum(mid, 1) - out = np.zeros(grid.shape, np.float32) - nxm1 = nx - 1 - midx = int((nx - 1.0) / 2.0) - xmax = min(k+1, midx+1) - for j in range(xmax): # handle edges - jk = min(j+k, nxm1) - jk2 = max(nxm1-j-k-1, -1) - out[:,j] = c[:,jk] / float(jk + 1) - if jk2 == -1: - out[:,nxm1-j] = c[:,nxm1] / float(jk + 1) - else: - out[:,nxm1-j] = (c[:,nxm1] - c[:,jk2]) / float(jk + 1) - if (k + 1) <= (nx - k): # middle - out[:,k+1:nx-k] = (c[:,k2+1:] - c[:,:-k2-1]) / float(k2 + 1) - # - # Add the minimum back in - # - out += fullmin - # - # Mask makes it a bit more difficult - have to find out how many - # points were in each cumsum - and have to deal with possible - # divide-by-zero errors - # - else: - # - # Average over the first (y) dimension - making the 'mid' grid - # -## mask = np.clip(mask,0,1) # Mask should be a boolean - gridmin1 = np.where(mask, gridmin, 0.0) - mid = np.zeros(grid.shape, np.float32) - midd = np.zeros(grid.shape, np.float32) - c = np.cumsum(gridmin1, 0) - d = np.cumsum(mask, 0) - nym1 = ny - 1 - midy = int((ny - 1.0) / 2.0) - ymax = min(k+1, midy+1) - for j in range(ymax): # handle edges - jk = min(j+k, nym1) - jk2 = max(nym1-j-k-1, -1) - mid[j,:] = c[jk,:] - midd[j,:] = d[jk,:] - if jk2 == -1: - mid[nym1-j,:] = c[nym1,:] - midd[nym1-j,:] = d[nym1] - else: - mid[nym1-j,:] = c[nym1,:] - c[jk2,:] - midd[nym1-j,:] = d[nym1,:] - d[jk2,:] - if (k+1) <= (ny-k): # middle - mid[k+1:ny-k,:] = c[k2+1:,:] - c[:-k2-1,:] - midd[k+1:ny-k,:] = d[k2+1:,:] - d[:-k2-1,:] - # - # Average over the second (x) dimension - making the 'out' grid - # - c = np.cumsum(mid, 1) - d = np.cumsum(midd, 1) - out = np.zeros(grid.shape, np.float32) - nxm1 = nx - 1 - midx = int((nx - 1.0) / 2.0) - xmax = min(k+1, midx+1) - for j in range(xmax): # handle edges - jk = min(j+k, nxm1) - jk2 = max(nxm1-j-k-1, -1) - out[:,j] = c[:,jk] / np.maximum(d[:,jk], 1) - if jk2 == -1: - out[:,nxm1-j] = c[:,nxm1] / np.maximum(d[:,nxm1], 1) - else: - out[:,nxm1-j] = ((c[:,nxm1] - c[:,jk2]) / - np.maximum(d[:,nxm1] - d[:,jk2], 1)) - if ((k+1)<=(nx-k)): # middle - out[:,k+1:nx-k] = ((c[:,k2+1:] - c[:,:-k2-1]) / - np.maximum(d[:,k2+1:] - d[:,:-k2-1], 1)) - # - # Add the minimum back in - # - out += fullmin - out[~mask] = grid[~mask] - - # If we need to return this grid as an integer, round to the nearest - # integer before we do - if finalReturnType != np.float32: - out = np.rint(out) - - # Return the grid as either a float or integer - return out.astype(finalReturnType) +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# GridManipulation - Version 2.1 +# +# Author: Matthew H. Belk WFO Taunton MA +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 10, 2012 mbelk Initial creation +# Dec 03, 2015 mbelk ???? +# Sep 19, 2016 19293 randerso Initial baseline check in +# +######################################################################## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import types, re + +import LogStream +import SmartScript +import TimeRange, AbsTime +import numpy as np + + +class GridManipulation(SmartScript.SmartScript): + + def __init__(self, dbss): + SmartScript.SmartScript.__init__(self, dbss) + + ############################################################################ + # (originally from CheckTandTd by Tom LeFebvre). + + def GM_getWEInventory(self, WEName, dbase="Fcst", level="SFC", + timeRange=TimeRange.allTimes()): + """Return a list of time ranges with available data for a weather element from + a specific database and level. + Args: + string WEName: name of the weather element to inventory + string dbase: name of database to search (default = 'Fcst') + string level: level of data to inventory (default = 'SFC') + timeRange: if specified, limit inventory to grids overlapping this timeRange + Returns: + Python list of Python time range objects + """ + +# print "Getting inventory of -> '%s' from '%s' at '%s'" % \ +# (WEName, dbase, level) + + trList = [] + # getGridInfo will just die if the modelName or weName is not valid + # so wrap it in a try block and return [] if it fails + try: + gridInfo = self.getGridInfo(dbase, WEName, level, timeRange) + except: + return trList + + trList = [g.gridTime() for g in gridInfo + if timeRange.overlaps(g.gridTime())] + return trList + + + def GM_getParmLocksByOthers(self, weName, level="SFC"): + """Return a list of time ranges locked by other CAVE sessions within the + current mutable database (typically 'Fcst'). + Args: + string WEName: name of field to inventory + string level: level of data to inventory (default = 'SFC') + Returns: + Python list of Python time range objects + """ + + # returns list of time ranges locked by others for this weather element + parm = self.getParm(self.mutableID(), weName, level) + if parm is None: + return [] + + lockTable = parm.getLockTable() + locksByOthers = lockTable.lockedByOther() + trList = [] + + for lock in locksByOthers.toArray(): + print(lock) + + start = lock.getStart().getTime() / 1000 + end = lock.getEnd().getTime() / 1000 + tr = self.GM_makeTimeRange(start, end) + + trList.append(tr) + + return trList + + + def GM_overlappingTRs(self, timeRange, trList, closest=False): + """Return a list of time ranges of locked data within the current + mutable database (typically 'Fcst'). + Args: + TimeRange timeRange: a Python time range object + list trList: list of Python time range objects + boolean closest: if True, force new time range list to start and + end with the times closest to the start and end of + initial selected time range. If False (default), + only include times which overlap the initial + selected time range. + Returns: + Python list of Python time range objects + """ + + # Get ready to return updated list of times + newTRList = [] + + # Get ready to track certain times + beforeTime = None # start time closest to selected time range start + afterTime = None # time range closest to selected time range start + beforeTR = None # start time closest to selected time range end + afterTR = None # time range closest to selected time range end + + # Get start and end time of selected time range + selectStartTime = timeRange.startTime() + selectEndTime = timeRange.endTime() + + #======================================================================= + # Examine each time range in the list + + for tr in trList: + + # If this time range overlaps the selected range + if timeRange.overlaps(tr): + + # Add it to the list + newTRList.append(tr) + + # Otherwise, if we should find the closest time ranges + elif closest: + + # Get the start time of this time range + startTime = tr.startTime() + + # Compute the difference between the start of this grid and + # the start and end times of our selected time range + diffStartTime = (startTime - selectStartTime) + diffEndTime = (startTime - selectEndTime) +# print "\t", diffStartTime, diffEndTime + + # If start time of this grid is the closest to start time of + # selected time range, or it's the first one + if beforeTime is None or \ + ((diffStartTime < 0 and diffStartTime >= beforeTime) or + (diffStartTime >= 0 and diffStartTime < beforeTime)): + + # Mark this grid as the closest to the selected start time + beforeTime = diffStartTime + beforeTR = tr + +# print "beforeTime =", beforeTime, beforeTR + + # If start time of this grid is the closest to end time of + # selected time range, or it's the first one + if afterTime is None or \ + (diffEndTime >= 0 and diffEndTime <= abs(afterTime)): + + # Mark this grid as the closest to the selected end time + afterTime = diffEndTime + afterTR = tr + +# print "afterTime =", afterTime, afterTR + +# print "newTRList = ", newTRList, beforeTR, afterTR + + # If we don't have any grids in the list and we should determine the + # closest grid time ranges to the selected time range + if len(newTRList) == 0 and closest: + + # Add closest start and end time ranges to selected time range + newTRList = [beforeTR, afterTR] + + # Ensure time ranges are sorted when we return them + newTRList.sort(self.GM_trSortMethod) + + # Finally, return our completed list + return newTRList + + + def GM_trSortMethod(self, first, last): + """Comparison method for sorting time ranges by start time. + Args: + TimeRange first: a Python time range object + TimeRange last: a Python time range object + Returns: + An integer indicating the ascending order of the compared time + range objects. + """ + if first.startTime() < last.startTime(): + return -1 + elif first.startTime() == last.startTime(): + return 0 + else: + return 1 + + + def GM_mergeTRLists(self, TRList1, TRList2): + """Merges and sorts Python time range lists into ascending order by start time. + Args: + TimeRange TRList1: a Python time range object + TimeRange TRList2: a Python time range object + Returns: + A merged and sorted list of Python time range objects. + """ + + # Merge the lists + combined = set(TRList1) | set(TRList2) + + # Sort the resulting time range list in ascending order + newList = sorted(combined, self.GM_trSortMethod) + + # Return the merged and sorted list + return newList + + # + ############################################################################ + + ############################################################################ + # Other utility methods originally provided by Tom LeFebvre (GSD) + + def GM_makeMaxTimeRange(self): + """Gets the maximum possible time range + Returns: + The maximum possible Python time range. + """ + return TimeRange.allTimes() + + + def GM_logToolUse(self, string): + """Inserts an entry into the log files. + Args: + string string: message to be inserted into the log files + Returns: + Nothing + """ + + gtime = self._gmtime().timetuple() + ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(gtime[0], gtime[1], gtime[2], + gtime[3], gtime[4], gtime[5]) + + # Insert this message into the logs + LogStream.logEvent("%s| %s" % (ts, string)) + + + def GM_makeTimeRange(self, start, end): + """Creates a time range. + Args: + double start - start of time range in seconds since the epoch began + double end - end of time range in seconds since the epoch began + Returns: + Time range appropriate for AWIPS version + """ + + startTime = AbsTime.AbsTime(start) + endTime = AbsTime.AbsTime(end) + + return TimeRange.TimeRange(startTime, endTime) + + + def GM_makeTimeRangeList(self, executeTR, interpHours=1, duration=1): + """Creates a list of time range objects from specified time range. + Args: + executeTR - time range object appropriate to AWIPS version + integer interpHours - number of hours between each time step + (default = 1) + integer duration - duration of each time range in hours + (default = 1) + Returns: + Python list of time range appropriate for AWIPS version + """ + if interpHours <= 0: + raise ValueError("interpHours must be > 0") + + if duration <= 0 or duration > interpHours: + raise ValueError("duration must be > 0 and <= interpHours") + + start = executeTR.startTime().unixTime() + end = executeTR.endTime().unixTime() + + trList = [] + for t in range(start, end, 3600*interpHours): + + tr = self.GM_makeTimeRange(t, t + (duration * 3600)) + trList.append(tr) + return trList + + + def GM_getPrevNextModelTimes(self, modelInventory, targetTR): + """Searches a grid inventory for the first available time ranges before + and after the target time range and returns those objects + Args: + list modelInventory - list of available data times for a model + time range targetTR - time range to use as basis for search + Returns: + Previous and next time range objects appropriate for AWIPS version, + or None for missing data + """ + + # If we have a model inventory + if len(modelInventory) == 0: + print("Model Inventory is empty") + return None, None + + # Convert target time range object into number of seconds since epoch + targetTRsecs = targetTR.startTime().unixTime() + + #----------------------------------------------------------------------- + # Make sure we're in range + + # Target time range is before all available model data + if targetTRsecs < modelInventory[0].startTime().unixTime(): + return None, None + + # Target time range is after all available model data + if targetTRsecs > modelInventory[-1].startTime().unixTime(): + return None, None + + #----------------------------------------------------------------------- + # Search the model inventory + + for i in range(len(modelInventory)): + + # If we found the first available model time ranges on both sides + # of the target time range + if modelInventory[i].startTime().unixTime() < targetTRsecs and \ + modelInventory[i + 1].startTime().unixTime() > targetTRsecs: + + # Return these time range objects + return modelInventory[i], modelInventory[i+1] + + # If we made it this far, indicate we could not find appropriate + # time range objects + return None, None + + + def GM_interpolateSounding(self, model, weName, levels, timeRange, + modelInventory): + """Interpolates a sounding at the specified time range, if needed. + Otherwise, will use a cached sounding if appropriate. + within the target time range and returns those objects + Args: + string model - model to use to grab cube + string weName - weather element name to get cube data for + list levels - list of levels to use in constructing cube + TimeRange timeRange - time range to use as basis for search + list modelInventory - list of available data times for a particular model + Returns: + cube of geopotential height and cube of specified field as a + Python tuple of numpy cube data + """ + + prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory, + timeRange) + if prevTR is None or nextTR is None: + return None + + prevGHCube, prevCube = self.makeNumericSounding(model, weName, levels, + prevTR, noDataError=0) + nextGHCube, nextCube = self.makeNumericSounding(model, weName, levels, + nextTR, noDataError=0) + # calculate weights for a time-weighted average + t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime() + t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime() + prevWt = float(t2) / float(t1 + t2) + nextWt = float(t1) / float(t1 + t2) + + interpGHCube = (prevGHCube * prevWt) + (nextGHCube * nextWt) + + # If this is a cube of scalars + if re.search("(?i)wind", weName) is None: + interpCube = (prevCube * prevWt) + (nextCube * nextWt) + else: + + # Break up the wind into u and v components + (prevU, prevV) = self.MagDirToUV(prevCube[0], prevCube[1]) + (nextU, nextV) = self.MagDirToUV(nextCube[0], nextCube[1]) + + # Interpolate the wind components + interpU = (prevU * prevWt) + (nextU * nextWt) + interpV = (prevV * prevWt) + (nextV * nextWt) + + # Now compute the final wind magnitude and direction + interpCube = self.UVToMagDir(interpU, interpV) + + return interpGHCube, interpCube + + + def GM_interpolateGrid(self, model, weName, level, timeRange, + modelInventory): + """Interpolates a grid field at the specified time range, if needed. + Otherwise, will use a cached sounding if appropriate. + within the target time range and returns those objects + Args: + string model - model to use to grab field + string weName - weather element name to get cube data for + string level - level of data to interpolate + TimeRange timeRange - time range to use as basis for search + list modelInventory - list of available data times for a particular model + Returns: + grid of specified field as numpy grid data + """ + prevTR, nextTR = self.GM_getPrevNextModelTimes(modelInventory, + timeRange) + + if prevTR is None or nextTR is None: + return None + + prevGrid = self.getGrids(model, weName, level, prevTR, noDataError=0) + nextGrid = self.getGrids(model, weName, level, nextTR, noDataError=0) + + # calculate weights for a time-weighted average + t1 = timeRange.startTime().unixTime() - prevTR.startTime().unixTime() + t2 = nextTR.startTime().unixTime() - timeRange.startTime().unixTime() + prevWt = t2 / float(t1 + t2) + nextWt = t1 / float(t1 + t2) + + # If this is a grid of scalars + if re.search("(?i)wind", weName) is None: + finalGrid = (prevGrid * prevWt) + (nextGrid * nextWt) + else: + + # Break up the wind into u and v components + (prevU, prevV) = self.MagDirToUV(prevGrid[0], prevGrid[1]) + (nextU, nextV) = self.MagDirToUV(nextGrid[0], nextGrid[1]) + + # Interpolate the wind components + interpU = (prevU * prevWt) + (nextU * nextWt) + interpV = (prevV * prevWt) + (nextV * nextWt) + + # Now compute the final wind magnitude and direction + finalGrid = self.UVToMagDir(interpU, interpV) + + return finalGrid + + # + ############################################################################ + + + ############################################################################ + # Define a method to manipulate grid times + ############################################################################ + + def GM_makeNewTRlist(self, dataDict, dataLocks, interpHours=3): + """Produces a list of Python time ranges. + Args: + dataDict: Python dictionary of time ranges of available data keyed by database + dataLocks: Python list of time ranges which are locked by others + interpHours: requested time step in hours + Returns: + Python list of Python time range objects + """ + + #======================================================================= + # Make a new list of time ranges to iterate over + + newTRlist = [] + + #----------------------------------------------------------------------- + # Look at all the models we have data for + + for model in list(dataDict.keys()): + #------------------------------------------------------------------- + # Start with all time steps from this model + + for tr in list(dataDict[model].keys()): + #print "TR:", dir(tr) + + pyStart = self._gmtime(tr.startTime().unixTime()) + startHour = pyStart.tm_hour + +# print "HOUR:", startHour + #--------------------------------------------------------------- + # If this time range is not already locked by someone else, and + # it is one we would want to have but do not have yet, and it + # is one we have data for from this model + +# print "newTRlist:", newTRlist, "type:", type(newTRlist) +# print "dataLocks:", dataLocks, "type:", type(dataLocks) + + if tr not in newTRlist and tr not in dataLocks and \ + (startHour % interpHours) == 0 and \ + dataDict[model][tr] is not None: + + # Add this time range to the new time range list + newTRlist.append(tr) + + #----------------------------------------------------------------------- + # Sort new model time range list by time + + newTRlist.sort(self.GM_trSortMethod) + + #----------------------------------------------------------------------- + # Return completed consolidated time range list + + return newTRlist + + + ############################################################################ + # Define a method to adjust time range which will be deleted - this is so + # only grids for which we have data from selected model will be deleted + ############################################################################ + + def GM_adjustDeleteTimeRange(self, timeRange, TRlist, adjustTR=0): + """Adjusts a time range for purposes of deleting grids. The intent is + to make it easier to interpolate between old and new data. + Args: + timeRange: Python time range object representing selected time + ranage + TRlist: Python list of Python time range objects where data is + available + integer adjustTR: number of hours to delete on either side of + available data to make for easier interpolation + Returns: + a TimeRange object spanning adjusted time range + """ + + #----------------------------------------------------------------------- + # Get ready to set new limits of the time range + + newStart = None + newEnd = None + + #----------------------------------------------------------------------- + # Look through the time ranges we have for model data + + for tr in TRlist: + + # If this grid is in the selected time range + if timeRange.overlaps(tr): + + # If we have not yet determined a start time + if newStart is None: + + # Define the new start time + newStart = tr.startTime().unixTime() - adjustTR*3600.0 + + # If we have not yet determined an end time + if tr.endTime().unixTime() > newEnd: + + # Define the new end time + newEnd = tr.endTime().unixTime() + adjustTR*3600.0 + +## print '+'*90 +## print newStart, newEnd +## print TimeRange.TimeRange(AbsTime.AbsTime(newStart), AbsTime.AbsTime(newEnd)) + + #----------------------------------------------------------------------- + # Return adjusted time range - if we did adjust it + + if newStart is not None and newEnd is not None: + + return TimeRange.TimeRange(AbsTime.AbsTime(newStart), + AbsTime.AbsTime(newEnd)) + + # Otherwise, return the original time range + else: + return timeRange + + + ############################################################################ + # Define a method to linearly interpolate data + ############################################################################ + + def GM_interpolateData(self, dataDict, TRlist, interpHours=3, + vector=[], singleLevel=[]): + """Produces an updated Python dictionary with interpolated data where needed + Args: + dict dataDict - keyed by TimeRange, data for a specific time, can be mixed (e.g. gh, t, p) + list TRList - list of times ranges + integer interpHours - ???? + list vector - ???? + list singleLevel - ???? + Returns: + dict of interpolated data ???? + """ + + #----------------------------------------------------------------------- + # Determine the structure (i.e. how many fields are present) of the + # data dictionary + + try: + numFields = len(dataDict[TRlist[0]]) + except: + print("No data to interpolate!") + return dataDict + + #----------------------------------------------------------------------- + # Cycle through each time period we already have + + for index in range(len(TRlist) - 1): + +# print "\tindex = ", index + + #------------------------------------------------------------------- + # Define a list to hold the times we need to create soundings for + + makeList = [] + + #------------------------------------------------------------------- + # Get the time range of the current and next soundings we have + + current = TRlist[index] + next = TRlist[index + 1] +# print '*'*80 +# print current, next + + #------------------------------------------------------------------- + # Get the starting times of each sounding time range + + currentStart = current.startTime().unixTime() + nextStart = next.startTime().unixTime() + + #------------------------------------------------------------------- + # See how far apart these soundings are in time (hours) + + diffTime = nextStart - currentStart +# print diffTime, interpHours*3600 + + #------------------------------------------------------------------- + # If gap between data time steps are more than what we need + + if int(diffTime) > interpHours*3600: + + #-------------------------------------------------------------- + # Keep track of seconds we are between data time steps + + curTime = float(interpHours*3600) + + #--------------------------------------------------------------- + # Make a new time range every three hours +# print '\t', int(currentStart + curTime), int(nextStart) + + while int(currentStart + curTime) < int(nextStart): + + #----------------------------------------------------------- + # Compute linear interpolation weight + + weight = curTime / diffTime +# print "weight = ", weight + + #----------------------------------------------------------- + # Make a new TimeRange object for this new time step + + newTR = TimeRange.TimeRange( + AbsTime.AbsTime(currentStart + curTime), + AbsTime.AbsTime(currentStart + curTime + 3600) + ) + + #----------------------------------------------------------- + # Define an empty string to hold all interpolated data + # which should be placed within the final data structure + # for this time + + finalData = "" + + #=========================================================== + # Interpolate data for each field at this time step + + for field in range(numFields): + + # Create a final data structure for interpolated data + exec("data%d = []" % (field)) + + # If this field is a vector, make component data + # structures + if field in vector: + exec("data%dU = []" % (field)) + exec("data%dV = []" % (field)) + + #------------------------------------------------------- + # Get data from the current and next time steps we have + + try: + curData = dataDict[current][field] + except: + # No point in continuing with this time step + msg = "Could not get 'current' data -> %s" % \ + (repr(current)) + self.statusBarMsg(msg, "R") + continue # move on + + try: + nextData = dataDict[next][field] + except: + # No point in continuing with this time step + msg = "Could not get 'next' data -> %s" % \ + (repr(next)) + self.statusBarMsg(msg, "R") + continue # move on + + #------------------------------------------------------- + # If this field is a vector, separate it into its' + # u and v components + + if field in vector: + + (curU, curV) = self.MagDirToUV(curData[0], + curData[1]) + + (nextU, nextV) = self.MagDirToUV(nextData[0], + nextData[1]) + + #======================================================= + # If this field is a single level + + if field in singleLevel: + + if not vector: + data = (curData + (nextData - curData) * weight) + else: + u = (curU + (nextU - curU) * weight) + v = (curV + (nextV - curV) * weight) + + #--------------------------------------------------- + # Get the newly interpolated grids + + if not vector: + + if type(data) == list: + dataGrid = data[0] + else: + dataGrid = data + + else: + if type(u) == list: + uGrid = u[0] + else: + uGrid = u + + if type(v) == list: + vGrid = v[0] + else: + vGrid = v + + #--------------------------------------------------- + # Add current level into the new data structure + + if not vector: + exec("data%d = array(dataGrid)" % (field)) + else: + exec("data%dU = array(uGrid)" % (field)) + exec("data%dV = array(vGrid)" % (field)) + + #======================================================= + # Otherwise, cycle through each level in the sounding + + else: + + for level in range(curData.shape[0]): + + #----------------------------------------------- + # Construct sounding values for this level + + if not vector: + data = (curData[level] + + (nextData[level] - curData[level]) * + weight) + else: + u = (curU[level] + + (nextU[level] - curU[level]) * weight) + + v = (curV[level] + + (nextV[level] - curV[level]) * weight) + + #----------------------------------------------- + # Get the newly interpolated grids + + if not vector: + + if type(data) == list: + dataGrid = data[0] + else: + dataGrid = data + + else: + if type(u) == list: + uGrid = u[0] + else: + uGrid = u + + if type(v) == list: + vGrid = v[0] + else: + vGrid = v + + #----------------------------------------------- + # Add current level into the new sounding + + if not vector: + exec("data%d = data%d + [dataGrid]" % \ + (field, field)) + else: + exec("data%dU = data%dU + [uGrid]" % \ + (field, field)) + exec("data%dV = data%dV + [vGrid]" % \ + (field, field)) + + #--------------------------------------------------- + # Finish off the new cube for this time + + if not vector: + exec("data%d = array(data%d)" % (field, field)) + else: + exec("data%dU = array(data%dU)" % (field, field)) + exec("data%dV = array(data%dV)" % (field, field)) + + #======================================================= + # If this is a vector field, reconstruct vector from + # the components + + if vector: + exec("data%d = self.UVToMagDir(data%dU, data%dV)" %\ + (field, field, field)) + + #======================================================= + # Add current interpolated data for this time step to + # the final data structure + + exec("finalData += 'data%d'" % (field)) + + if field < (numFields - 1): + finalData += ", " + + #----------------------------------------------------------- + # Add this interpolated data to data structure + + exec("dataDict[newTR] = (%s)" % (finalData)) + + msg = "Created data for -> %s" % (repr(newTR)) + self.statusBarMsg(msg, "R") + + #----------------------------------------------------------- + # Move on to next desired time step + + curTime += float(interpHours)*3600.0 + + #----------------------------------------------------------------------- + # Return the completed data dictionary + + return dataDict + + + ############################################################################ + # Define a method to smooth data + ############################################################################ + + def GM_smoothGrid(self, grid, factor=3, mask=None): + """Produces a smoother version of a numpy grid. + Args: + NDArray grid - numpy grid to be smoothed + integer factor - factor to control level of smoothing + bool NDArray mask - optional mask to limit area being smoothed + Returns: + smoothed grid as NDArray + """ + k = int(factor) # has to be integer number of gridpoints + if k < 1: # has to be a positive number of gridpoints + return grid + (ny, nx) = grid.shape + k2 = k * 2 + + finalReturnType = grid.dtype + + #----------------------------------------------------------------------- + # If the input grid is an integer type, convert it to a float before + # any smoothing takes place. It will be converted back to an integer + # before it is returned + + if finalReturnType != np.float32: + grid = grid.astype(np.float32) + # + # Remove the minimum from the grid so that cumsum over a full + # row or column of the grid doesn't get so big that precision + # might be lost. + # + fullmin = np.amin(grid) + gridmin = grid - fullmin + # + # No mask is simpler + # + if mask is None: + # + # Average over the first (y) dimension - making the 'mid' grid + # + mid = np.zeros(grid.shape, np.float32) + c = np.cumsum(gridmin, 0) + nym1 = ny - 1 + midy = int((ny - 1.0) / 2.0) + ymax = min(k + 1, midy + 1) + for j in range(ymax): # handle edges + jk = min(j + k, nym1) + jk2 = max(nym1-j-k-1, -1) + mid[j,:] = c[jk,:]/float(jk + 1) + if jk2 == -1: + mid[nym1-j,:] = c[nym1,:] / float(jk + 1) + else: + mid[nym1-j,:] = (c[nym1,:] - c[jk2,:]) / float(jk + 1) + if (k + 1) <= (ny - k): # middle + mid[k+1:ny-k,:] = (c[k2+1:,:] - c[:-k2-1,:]) / float(k2 + 1) + # + # Average over the second (x) dimension - making the 'out' grid + # + c = np.cumsum(mid, 1) + out = np.zeros(grid.shape, np.float32) + nxm1 = nx - 1 + midx = int((nx - 1.0) / 2.0) + xmax = min(k+1, midx+1) + for j in range(xmax): # handle edges + jk = min(j+k, nxm1) + jk2 = max(nxm1-j-k-1, -1) + out[:,j] = c[:,jk] / float(jk + 1) + if jk2 == -1: + out[:,nxm1-j] = c[:,nxm1] / float(jk + 1) + else: + out[:,nxm1-j] = (c[:,nxm1] - c[:,jk2]) / float(jk + 1) + if (k + 1) <= (nx - k): # middle + out[:,k+1:nx-k] = (c[:,k2+1:] - c[:,:-k2-1]) / float(k2 + 1) + # + # Add the minimum back in + # + out += fullmin + # + # Mask makes it a bit more difficult - have to find out how many + # points were in each cumsum - and have to deal with possible + # divide-by-zero errors + # + else: + # + # Average over the first (y) dimension - making the 'mid' grid + # +## mask = np.clip(mask,0,1) # Mask should be a boolean + gridmin1 = np.where(mask, gridmin, 0.0) + mid = np.zeros(grid.shape, np.float32) + midd = np.zeros(grid.shape, np.float32) + c = np.cumsum(gridmin1, 0) + d = np.cumsum(mask, 0) + nym1 = ny - 1 + midy = int((ny - 1.0) / 2.0) + ymax = min(k+1, midy+1) + for j in range(ymax): # handle edges + jk = min(j+k, nym1) + jk2 = max(nym1-j-k-1, -1) + mid[j,:] = c[jk,:] + midd[j,:] = d[jk,:] + if jk2 == -1: + mid[nym1-j,:] = c[nym1,:] + midd[nym1-j,:] = d[nym1] + else: + mid[nym1-j,:] = c[nym1,:] - c[jk2,:] + midd[nym1-j,:] = d[nym1,:] - d[jk2,:] + if (k+1) <= (ny-k): # middle + mid[k+1:ny-k,:] = c[k2+1:,:] - c[:-k2-1,:] + midd[k+1:ny-k,:] = d[k2+1:,:] - d[:-k2-1,:] + # + # Average over the second (x) dimension - making the 'out' grid + # + c = np.cumsum(mid, 1) + d = np.cumsum(midd, 1) + out = np.zeros(grid.shape, np.float32) + nxm1 = nx - 1 + midx = int((nx - 1.0) / 2.0) + xmax = min(k+1, midx+1) + for j in range(xmax): # handle edges + jk = min(j+k, nxm1) + jk2 = max(nxm1-j-k-1, -1) + out[:,j] = c[:,jk] / np.maximum(d[:,jk], 1) + if jk2 == -1: + out[:,nxm1-j] = c[:,nxm1] / np.maximum(d[:,nxm1], 1) + else: + out[:,nxm1-j] = ((c[:,nxm1] - c[:,jk2]) / + np.maximum(d[:,nxm1] - d[:,jk2], 1)) + if ((k+1)<=(nx-k)): # middle + out[:,k+1:nx-k] = ((c[:,k2+1:] - c[:,:-k2-1]) / + np.maximum(d[:,k2+1:] - d[:,:-k2-1], 1)) + # + # Add the minimum back in + # + out += fullmin + out[~mask] = grid[~mask] + + # If we need to return this grid as an integer, round to the nearest + # integer before we do + if finalReturnType != np.float32: + out = np.rint(out) + + # Return the grid as either a float or integer + return out.astype(finalReturnType) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/HazardUtils.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/HazardUtils.py index 8408081caa..7ee140424e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/HazardUtils.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/HazardUtils.py @@ -1,1110 +1,1110 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# HazardUtils -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jan 16, 2015 4006 njensen create _getUniqueKeys() mask with dtype bool -# 06/08/16 19096 ryu Change mask to boolean data type -# -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import SmartScript -import time, string -import VTECTable -import LogStream -import numpy -from AbsTime import AbsTime -from AbsTime import current -from TimeRange import TimeRange -from java.util import Date -from java.util import ArrayList -import jep -from JUtil import JavaWrapperClass - - -def LOCK_HOURS(): - return 192 - -def HOUR_SECONDS(): - return 3600 - -MODEL = "Fcst" -ELEMENT = "Hazards" -LEVEL = "SFC" - -# Status return codes for _separateHazardGrids -SUCCESS = 1 -FAIL_REDUNDANT = 0 -FAIL_LOCK = -1 - -class HazardUtils(SmartScript.SmartScript): - def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): - SmartScript.SmartScript.__init__(self, dbss) - - # self.setUp(eaMgr, mdMode, toolType) - - ## - # Get timeRanges that make up the inventory of the given weather element. - # This is normally only used for the hazards inventory, so model is "Fcst" - # and level is "SFC" in the lookup. - # - # @param WEName: The weather element whose time ranges are to be acquired. - # @type WEName: string - # @param timeRange: optional time range of the inventory. If not specified, - # the default is from 24 hours ago to ten days from now. - # @type timeRange: Java or Python TimeRange - # @param asJava: If True, the inventory is returned as a list of Java - # TimeRanges; if False, the inventory is returned as a list - # of Python TimeRanges. The default is False. - # @type asJava: boolean - # @return: The time ranges for WEName that overlap the specified or default - # time range. - def _getWEInventory(self, WEName, timeRange=None, asJava=False): - # set up a timeRange if it is None - if timeRange is None: - now = current() - yesterday = now - (24 * 3600) # one day ago - later = now + 10 * 24 * 3600 # 10 days from now - timeRange = self._makeTimeRange(yesterday.unixTime(), later.unixTime()) - parm = self.getParm(MODEL, WEName, LEVEL) - trList = [] - if parm is not None: - if isinstance(timeRange, JavaWrapperClass): - timeRange = timeRange.toJavaObj() - gridInventory = parm.getGridInventory(timeRange) - for g in gridInventory: - gridTimeRange = g.getGridTime() - tr = gridTimeRange.clone() - if not asJava: - tr = TimeRange(tr) - trList.append(tr) - - return trList - - # makes a TimeRange from a start and end integers - def _makeTimeRange(self, start, end): - return TimeRange(AbsTime(start), AbsTime(end)) - - ## - # Get timeRanges that correspond to gaps in the specified WEName inventory - # within the specified time ranges. - # - # @param WEName: A weather element name - # @type WEName: string - # @param trList: Time ranges of interest - # @type trList: list of Python or Java TimeRange - # @return: time ranges overlapping timeRange that are missing from the - # inventory of WEName - # @rtype: list of Python TimeRanges - def _getGaps(self, WEName, trList): - - fullHazardInv = self._getWEInventory(WEName) - gaps = [] - - for timeRange in trList: - - # Convert Java TimeRange to Python for comparisons - if not isinstance(timeRange, TimeRange): - timeRange = TimeRange(timeRange) - - hazInv = [] - for h in fullHazardInv: - if timeRange.overlaps(h): - hazInv.append(h) - - # check for empty inventory - if len(hazInv) == 0: # no grids at all - gaps.append(timeRange) - continue - - # see if we have a gap at the beginning - if timeRange.startTime() < hazInv[0].startTime(): - tr = TimeRange(timeRange.startTime(), - hazInv[0].startTime()) - gaps.append(tr) - - # Find any gaps in the middle of the inventory - for i in range(len(hazInv) - 1): - if hazInv[i].endTime() != hazInv[i+1].startTime(): - gapTR = TimeRange(hazInv[i].endTime(), - hazInv[i+1].startTime()) - gaps.append(gapTR) - - # see if we have a gap at the end of the inventory - if timeRange.endTime() > hazInv[-1].endTime(): - tr = TimeRange(hazInv[-1].endTime(), - timeRange.endTime()) - gaps.append(tr) - - return gaps - - ## - # Create an empty hazards-type grid with the specified - # name and timeRange - # - # @param weName: The name of the weather element to create. - # @type weName: string - # @param timeRange: The time range of the new grid. - # @type timeRange: a Java or Python TimeRange - # @raise JepException: when raised by SmartScript methods. - def _makeEmptyHazardGrid(self, weName, timeRange): - gridShape = self.getGridShape() - byteGrid = numpy.zeros(gridShape, dtype=numpy.int8) - hazKeys = self.getDiscreteKeys(ELEMENT) - currentKeys = [""] - # make the grid - if weName == ELEMENT: - self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, currentKeys), - timeRange, discreteKeys=hazKeys, - discreteAuxDataLength=4, discreteOverlap=1) - else: - hazard = self._tempWENameToKey(weName) - discreteKeys = ["", hazard] - hazKeyDesc = self._addHazardDesc(discreteKeys) - self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, currentKeys), - timeRange, discreteKeys=hazKeyDesc, - discreteAuxDataLength=4, discreteOverlap=0, - defaultColorTable="YesNo") - return - - ## - # Prepare the Hazards inventory so that it can be merged with the - # activeTable. This includes splitting grids and adding new ones where - # we have gaps. - # - # @param weName: Name of a weather element - # @type weName: string - # @param trList: Time ranges of interest - # @type trList: list of Python or Java TimeRanges - def _setupHazardsInventory(self, weName, trList): - # see if the element exists yet, if not, make a new grid - # This is a painful way just to see if the grid exists - # but all other techniques fail for temporary weather elements - now = current() - yesterday = now - (24 * 3600) # one day ago - later = now + 10 * 24 * 3600 # 10 days from now - timeRange = TimeRange(yesterday, later).toJavaObj() - try: - gridInfo = self.getGridInfo(MODEL, weName, LEVEL, timeRange) - except: # this means the WE does not exist, so make a grid - if len(trList) <= 0: - return - for tr in trList: - self._makeEmptyHazardGrid(weName, tr) - return - # fill any gaps in the inventory - gapList = self._getGaps(weName, trList) - for g in gapList: - self._makeEmptyHazardGrid(weName, g) - - # Split the grids at the timeRange boundaries - unix_now = now.unixTime() - for tr in trList: - # If tr is a java timerange, convert it to a python TimeRange - if not isinstance(tr, TimeRange): - tr = TimeRange(tr) - end = tr.endTime().unixTime() - if end > unix_now: - # parm.splitTR() will split timeRanges with non-zero minutes - # to the next hour. So, truncate start and end times to the - # previous hour and then split - start = tr.startTime().unixTime() - start = int(start / 3600) * 3600 - end = int(end / 3600) * 3600 - roundedTR = TimeRange(AbsTime(start), AbsTime(end)).toJavaObj() - parm = self.getParm(MODEL, weName, LEVEL) - self.splitCmd([weName], roundedTR) - - return - - # returns a Numeric mask where each zone in zoneList is set to 1 - def _makeMask(self, zoneList): - mask = self.empty(dtype=numpy.bool) - eaList = self.editAreaList() - for z in zoneList: - if z in eaList: - zoneArea = self.getEditArea(z) - zoneMask = self.encodeEditArea(zoneArea) - mask = numpy.logical_or(mask, zoneMask) - - return mask - - # Fetches the gridSize from the GFE and returns it as a tuple. - def _getGridSize(self): - return self.getGridShape() - - ## - # Determine whether temporary weather elements are loaded. - # - # @return: 1 if temporary weather elements are loaded; - # 0 otherwise. - def _tempWELoaded(self): - parms = self.loadedParms() - for weName, level, dbID in parms: - if string.find(weName, "haz") == 0: - return 1 - - return 0 - - ## - # Create a temporary weather element name from key. - # - # @param key: String like BZ.W:1234, or LCLKEY, or BZ.W - # @type key: string - # @return: key with 'haz' prepended and any '.' or ':' chars removed. - # @rtype: string - def _makeTempWEName(self, key): - "Create a temporary weather element name from a key string." - #key is BZ.W:1234, or LCLKEY, or BZ.W - key = string.replace(key, ".","") - key = string.replace(key, ":","") - weName = "haz" + key - return weName - - ## - # Create a key string from a temporary weather element name. - # - # @param wename: A temporary weather element name - # @type wename: string - # @return: The key string from which the temporary element was derived. - # @rtype: string - def _tempWENameToKey(self, wename): - "Make a key string from a temporary weather element name." - #wename is hazBZW, hazBZW1234, hazLCLK - if len(wename) > 3 and wename[0:3] == 'haz': - key = wename[3:] #eliminate "haz" - if len(key) >= 3: - vkey = key[0:2] + '.' + key[2] - if VTECTable.VTECTable.has_key(vkey): - seg = key[3:] - if len(seg): - return vkey + ':' + seg - else: - return vkey - # local key, look for segment via digits - else: - lkey = key - for i in xrange(len(key)): - if key[i:].isdigit(): - lkey = key[0:i] + ":" + key[i:] - break - return lkey - else: - # TODO: or should I fail? - return key - else: - raise Exception, "Illegal wename: " + wename - - ## - # Gets the unique list of keys over the specified mask - # if no mask is passed, the entire grid is used - # - # @param byteGrid: Grid of indices - # @type byteGrid: Numpy array of int8 - # @param keys: Keys associated with byteGrid. If byteGrid[2,2] is 3, then - # keys[3] describes its state. - # @type keys: List of strings - # @param mask: Optional mask of points to include; defaults to all ones. - # @type mask: Numpy array of boolean, same dimensions as byteGrid; - # @return: The keys referenced by the masked byteGrid, without duplicates. - # @rtype: List of strings - def _getUniqueKeys(self, byteGrid, keys, mask = None): - uniqueKeys = [] - - # if mask is None, make a mask of the whole area - if mask is None: - mask = numpy.ones(byteGrid.shape, numpy.bool) - - # get the list of values over the mask area only - valueList = numpy.compress(mask.flat, byteGrid.flat) - - # remove the duplciates to get unique values - uniqueValues = list( numpy.unique(valueList) ) - - # extract the keys that correspond to the byte values - for u in uniqueValues: - uniqueKeys.append(keys[u]) - - return uniqueKeys - - ## - # Get the phen portion of key. - # If key is not a VTEC hazard key, returns "" - # @param key: A grid key - # @type key: string - # @return: The phen portion of key. - # @rtype: string - def _keyPhen(self, key): - pos = string.find(key, ".") - if pos == -1: # not found - return "" - - return key[0:pos] - - ## - # Get the sig portion of key. - # If key is not a VTEC hazard key, returns "" - # - # @param key: A grid key. - # @type key: string - # @return: The sig portion of key. - # @rtype: string - def _keySig(self, key): - pos = string.find(key, ".") - if pos == -1: # not found - return "" - - return key[pos + 1] - - ## - # Combine newKey with subKeys and return a new combined key. Enforces the - # rule that keys with the same phen returns the one key with the highest - # priority sig. - # - # @param subKeys: The old key. - # @type subKeys: string - # @param newKey: The key to add. - # @type newKey: string - # @return: The key made by combining subKeys with newKey. - # @rtype: string - def _combinedKey(self, subKeys, newKey): - if newKey is None: - return subKeys - - subKeyList = string.split(subKeys, "^") - - # check for same keys - if newKey in subKeyList: - return subKeys - - defaultCombo = subKeys + "^" + newKey - - # check for non-VTEC key - if string.find(newKey, ".") == -1: - return defaultCombo - - # more exceptions - these phens are above the law - exceptions = ["TO", "SV", "FF"] - sigList = ["W", "Y", "A"] - if self._keyPhen(newKey) in exceptions: - return defaultCombo - - subKeyList = string.split(subKeys, "^") - for sk in subKeyList: - if self._keyPhen(sk) == self._keyPhen(newKey): - subSig = self._keySig(sk) - newSig = self._keySig(newKey) - if subSig == newSig: - return subKeys - - if subSig not in sigList or newSig not in sigList: - continue - - if sigList.index(subSig) > sigList.index(newSig): - subKeys = subKeys.replace(sk, newKey) - - return subKeys - - return defaultCombo - - - # Makes a new hazard given the old key oldKey and a new watch phenSig. - # @param oldKey: The old key - # @type oldKey: string - # @param phenSig: The new watch phen and sig - # @type phenSig: string - # @return: A new combined key. - # @rtype: string - def _makeNewKey(self, oldKey, phenSig): - # check for the dumb cases - if oldKey == "" or oldKey == phenSig: - return phenSig - - # split up the key, add the hazard, sort, and reassemble - parts = string.split(oldKey, "^") - parts.append(phenSig) - parts.sort() # makes sure the same set of subKeys look the same - - # assemble the new key - newKey = "" - for p in parts: - if newKey == "": - newKey = p - else: - newKey = self._combinedKey(newKey, p) - - # just in case - if newKey == "": - newKey = "" - - return newKey - - ## - # Get the subkeys of key - # - # @param key: A key to divide into subkeys - # @type key: String - # @return: The subkeys of key - # @rtype: List of strings - def _getSubKeys(self, key): - parts = string.split(key, "^") - if "" in parts: - parts.remove("") - return parts - - def _removeSubKey(self, key, subKey): - newKey = "" - for p in string.split(key, "^"): - if p == subKey: - continue - if newKey == "": - newKey = p - else: - newKey = newKey + "^" + p - - if newKey == "": - newKey = "" - - return newKey - - ## - # Take a sequence or set of time ranges and produce a set of time ranges by - # combining all adjacent or overlapping time ranges in the sequence. - # - # @param timeranges: the timeranges to merge - # @type timeranges : sequence, set or frozenset of TimeRange - # @return: the merged timeranges - # @rtype: set of TimeRange - def _mergeTimeranges(self, timeranges): - trset = set(timeranges) - # Loop until a pass doesn't merge any time ranges - moreToDo = True - while moreToDo: - moreToDo = False - merged = [] - for tr in trset: - found = False - for idx, mtr in enumerate(merged): - if tr == mtr: - found = True - break - elif tr.overlaps(mtr) or tr.isAdjacentTo(mtr): - found = True - merged[idx] = mtr.join(tr) - moreToDo = True - break - if not found: - merged.append(tr) - trset = set(merged) - return trset - - ## - # Determine whether the time ranges of any (temporary) parm in hazParms - # overlaps a locked time range of the Hazards element. If not, add the - # time ranges of the temporary parms to the locked time ranges of the - # Hazards parm. - # - # @param hazParms: Temporary hazard parm names. - # @type hazParms: sequence of string - # @return: 0 if there are not conflicting locks, 1 if there are - # @rtype: int - def _conflictingLocks(self, hazParms): - # find all the time ranges that should be locked - neededTRs = set() - - for hazParm in hazParms: - trList = self._getWEInventory(hazParm) - neededTRs = neededTRs.union(trList) - - # Find all the time ranges that are locked in Hazards - myTRs = self.lockedByMe(ELEMENT, LEVEL) - myTRs = set(myTRs) - - # Add locks we already have to the needed TRs, - # in case grids were deleted - neededTRs = neededTRs.union(myTRs) - - # Squish the TRs into contiguous blocks - neededTRs = self._mergeTimeranges(neededTRs) - - # See if there are any blocks we don't have yet - missingTRs = neededTRs - myTRs - - # If not, then there are no conflicts and we're done. - if len(missingTRs) == 0: - return 0 - - startTimes = jep.jarray(len(missingTRs), Date) - - midx = 0 - for missingTR in missingTRs: - startTimes[midx] = missingTR.toJavaObj().getStart() - midx += 1 - - hazardParm = self.getParm(MODEL, ELEMENT, LEVEL) - gridData = None - try: - gridData = hazardParm.startParmEdit(startTimes) - except RuntimeError, runtimeErr: - if runtimeErr.message is None: - raise - if runtimeErr.message.startswith("com.raytheon.viz.gfe.GFEOperationFailedException:"): - return 1 - else: - raise - - if gridData is not None and len(gridData) != 0: - if not hazardParm.endParmEdit(): - return 1 - - # The locks acquired in the endParmEdit() call may not have been quite right. - # However, we needed to end the parm edit. - # Negotiate the locks we _really_ need now that it's done. - locktable = hazardParm.getLockTable() - LOCK = locktable.getClass().getLockMode("LOCK"); - - from com.raytheon.uf.common.dataplugin.gfe.server.request import LockRequest - desiredLocks = ArrayList() - for missingTR in missingTRs: - newLock = LockRequest() - newLock.setParmId(hazardParm.getParmID()) - newLock.setTimeRange(missingTR.toJavaObj()) - newLock.setMode(LOCK) - desiredLocks.add(newLock) - - client = hazardParm.getDataManager().getClient() - serverResponse = client.requestLockChange(desiredLocks) - if not serverResponse.isOkay(): - hazardParm.undo() - return 1 - - return 0 - - ## - # Create a list of (key, desc) tuples from keys. - # For each key in keys, look up the key in VTECTable. - # If the key is found, use its headline value as its description; - # otherwise, use the key as its own description. - # - # @param keys: Keys to look up descriptions for. - # @type keys: iterable of strings - # @return: keys and descriptions for the key - # @rtype: list of 2-tuples - def _addHazardDesc(self, keys): - newKeys = [] - for k in keys: - index = string.find(k, ':') - if index != -1: - k = k[0:index] #eliminate the colon and segment # - if not VTECTable.VTECTable.has_key(k): - desc = k - else: - # get the description - desc = VTECTable.VTECTable[k]['hdln'] - - newKeys.append((k, desc)) - - return newKeys - - ## - # Determine whether the Hazards forecast weather element is loaded. - # - # @param weName: The name of the weather element. Defaults to "Hazards". - # @type wename: string - # @return: 1 if the weather element is loaded, 0 otherwise - # @rtype: int - def _hazardsLoaded(self, weName=ELEMENT): - - tupleList = self.loadedParms() - ## look for the Hazards Weather element - for element, level, databaseID in tupleList: - modelName = databaseID.modelName() - if element == weName and level == LEVEL and modelName == MODEL: - return 1 - - # if we got this far we didn't find it. - return 0 - - ## - # Remove any grids for weName whose end times are in the past - # - # @param weName: A weather element name. - # @type weName: string - # @raise JepException: if calls to Java methods fail. - def _removeOldGrids(self, weName): - # get the inventory - trList = self._getWEInventory(weName) - - for tr in trList: - if tr.endTime().unixTime() < current().unixTime(): - self.deleteCmd([weName], tr) - - return - - ## - # Remove any data grids for MODEL, ELEMENT, and LEVEL over the default - # inventory timerange (from now to 10 days in the future). The parm - # remains in the parm manager. - def _removeAllHazardsGrids(self): - - removeTRList = self._getWEInventory(ELEMENT, asJava=True) - - # Remove the real Hazards grid - for tr in removeTRList: - if not self.deleteGrid(MODEL, ELEMENT, LEVEL, tr): - return False - return True - - - ## - # Destroy all the temporary hazards (they are removed from the parm manager). - # - def _removeTempHazardWEs(self): - parms = self.loadedParms() - - for weName, level, dbID in parms: - if string.find(weName, "haz") == 0 and len(weName) > 3: - self.unloadWE(MODEL, weName, level) - - return - - ## - # Determine whether the indicated grids are consecutive in time and - # identical in value at every point. - # @attention: This method assumes timeRange1 begins before timeRange2. - # It will give wrong answers if their order is reversed - # - # @param weName: Weather element name - # @type weName: string - # @param timeRange1: First time range for weather element - # @type timeRange1: Python TimeRange - # @param timeRange2: Second time range for weather element - # @type timeRange2: Python TimeRange - # @return: True if the end time for timeRange1 matches the start time of - # timeRange2 and the grid for weName during timeRange1 is identical - # to the grid for weName during timeRange2, False otherwise. - # @rtype: boolean - def _consecutiveIdenticalGrids(self, weName, timeRange1, timeRange2): - if timeRange1.endTime() == timeRange2.startTime(): - # get the grids - firstGrid, key = self.getGrids(MODEL, weName, LEVEL, - timeRange1.toJavaObj(), mode="First", cache=0) - secondGrid, key = self.getGrids(MODEL, weName, LEVEL, - timeRange2.toJavaObj(), mode="First", cache=0) - if numpy.sometrue(numpy.logical_xor(firstGrid, secondGrid)): - return 0 - else: - return 1 - - return 0 - - ## - # Replace existing grids for weName with a single grid over the - # time range from groupStart to groupEnd. - # - # This function should only be used by _consolidateTimes(); it - # exists only to be sure we create the consolidated grid the same way in - # the "last timeRange" code block as we do in the "broke the string" block. - # @param groupStart: Starting time as seconds since the epoch - # @type groupStart: int - # @param groupEnd: Ending time as seconds since the epoch - # @type groupEnd: int - # @param weName: (temporary) weather element name - # @type weName: string - # @return: None - def _createConsolidatedGrid(self, groupStart, groupEnd, weName): - "Used internally by _consolidateTimes()" - timeRange = self._makeTimeRange(groupStart, groupEnd).toJavaObj() - byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, - timeRange, mode="First", cache=0) - if isinstance(hazKey, str): - hazKey = eval(hazKey) - self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), - timeRange, discreteOverlap=1, - discreteAuxDataLength=4) - - ## - # Consolidate grid times for each weather element in weNameList. - # For each weather element, find time ranges that touch whose grids are - # identical and turn them into a single grid for the combined time range. - def _consolidateTimes(self, weNameList): - for weName in weNameList: - # Get "all" the time ranges for this element - trList = self._getWEInventory(weName) - if len(trList) == 0: - return - - count = 1 - groupStart = int(trList[0].startTime().unixTime()) - groupEnd = int(trList[0].endTime().unixTime()) - - for i in range(0, len(trList) - 1): - if self._consecutiveIdenticalGrids(weName, trList[i], trList[i+1]): - # keep looking for the end - count = count + 1 - groupEnd = int(trList[i+1].endTime().unixTime()) - else: # broke the string of grids - if count > 1: # store the new time-consolidated grid - self._createConsolidatedGrid(groupStart, groupEnd, weName) - # reset the times - groupStart = int(trList[i+1].startTime().unixTime()) - groupEnd = int(trList[i+1].endTime().unixTime()) - count = 1 - - # make sure we catch the last timeRange - if count > 1: # store the new time-consolidated grid - self._createConsolidatedGrid(groupStart, groupEnd, weName) - - return - - ## - # Lock any grids in the hazards parm from now to 10 hours in the future. - # - # @return: the hazards parm and its igrids - # @rtype: a 2-tuple; the first item is a Parm, the second is a list of IGridDatas, - # which, for discrete grids, translate to a 2-tuple containing a numpy - # array and a key string. So, like this: - # (parm,[(arr0,key0), (arr1,key1), ...]) - # - def _lockHazards(self): - "Flag the hazards parm as being edited. Return the hazards parm and its grid." - hazParm = self.getParm(MODEL, ELEMENT, LEVEL) - startAbsTime = AbsTime(int(current().unixTime() /3600)*3600) - endAbsTime = startAbsTime + LOCK_HOURS() * HOUR_SECONDS() - timeRange = TimeRange(startAbsTime, endAbsTime) - - inventory = self._getWEInventory(ELEMENT, timeRange, asJava=True) - startTimes = jep.jarray(len(inventory), Date) - for trNum in range(len(inventory)): - startTimes[trNum] = inventory[trNum].getStart() - gridData = None - try: - # startParmEdit() refreshes the grids and sets up the times that endParmEdit() will lock. - gridData = hazParm.startParmEdit(startTimes) - except RuntimeError, runtimeErr: - if runtimeErr.message is None: - raise - if runtimeErr.message.startswith("com.raytheon.viz.gfe.GFEOperationFailedException:"): - self.statusBarMsg("There are conflicting locks. " + \ - "Please resolve these before adding any hazards", "S") - hazParm = None - else: - raise - - # endParmEdit() locks the grids. - # The locks will be released when the forecast is saved. - if hazParm is not None: - locked = True - if len(startTimes) != 0: - locked = hazParm.endParmEdit() - if locked: - locked = hazParm.forceLockTR(timeRange.toJavaObj()) - if not locked: - self.statusBarMsg("There are conflicting locks. " + \ - "Please resolve these before adding any hazards", "S") - hazParm = None - - return (hazParm, gridData) - - ## - # Let other users edit the hazards parm. - # - # @return: True for success, False otherwise. - # @raise JepException: if the hazards parm was not being edited. - def _endEdit(self): - "Let other users edit the hazards parm. Return True for success." - hazParm = self.getParm(MODEL, ELEMENT, LEVEL) - return hazParm.endParmEdit() - - ## - # Make temporary hazard grids for each hazard subkey. - # Hazards are "being edited" until they are merged again. - # - # @return: True if separation succeeded, false otherwise. - # - def _separateHazardGrids(self): - "Make temporary hazard grids for each hazard subkey." - - # if any temp hazard grids are loaded, don't separate again - if self._tempWELoaded(): - return FAIL_REDUNDANT #already separated - - hazParm, gridData = self._lockHazards() - if hazParm is None: - return FAIL_LOCK # unavailable - - # get a collection of distinct Java TimeRange objects - trSet = set() - for gd in gridData: - trSet.add(gd.getGridTime()) - - # Create a set of temporary weather element names - weNameSet = set() - - for tr in trSet: - # Get the index grid and key list for the real Hazards element - byteGrid, hazKey = self.getGrids(MODEL, ELEMENT, LEVEL, tr, - mode="First") - if isinstance(hazKey, str): - hazKey = eval(hazKey) - - # Only work with the keys that have points in the grid - uniqueKeys = self._getUniqueKeys(byteGrid, hazKey) - if len(uniqueKeys) > 0: - # build list of split hazKeys for use in loop below - splitHazKeys = [] - for haz in hazKey: - splitHazKeys.append(self._getSubKeys(haz)) - - for uKey in uniqueKeys: - - if uKey == "": - continue - - # split the current key into its subkeys - subKeys = self._getSubKeys(uKey) - for subKey in subKeys: - # make the temporary name - weName = self._makeTempWEName(subKey) - - # make the mask - find all areas that contain the subKey - mask = numpy.zeros(byteGrid.shape, dtype=numpy.bool) - for hazIndex in range(len(hazKey)): - if subKey in splitHazKeys[hazIndex]: - mask |= (byteGrid==hazIndex) - - # make the grid - self._addHazard(weName, tr, subKey, mask) - pytr = TimeRange(tr) - logmsg = " ".join(["Separate", weName, - self._printTime(pytr.startTime().unixTime()), - self._printTime(pytr.endTime().unixTime()), subKey]) - LogStream.logEvent(logmsg) - - # save the weNames for later - weNameSet.add(weName) - - # Combine time ranges for the temporary weather elements we created - self._consolidateTimes(weNameSet) - - return SUCCESS - - ## - # Add the specified hazard to weName over the specified timeRange - # and spatially over the specified mask. Combines the specified - # hazard with the existing hazards by default. For replaceMode, - # specify 0 in the combineField. - # - # @param weName: The weather element name. - # @type wename: string - # @param timeRange: Time range of the hazard. - # @type timeRange: Java or Python TimeRange - # @param addHaz: Key for the new hazard - # @type addHaz: string - # @return: None - def _addHazard(self, weName, timeRange, addHaz, mask, combine=1): - # Python TimeRanges are easy to compare. - # Java methods require Java TimeRanges. - # Make sure we have one of each. - if isinstance(timeRange, JavaWrapperClass): - pyTimeRange = timeRange - timeRange = timeRange.toJavaObj() - else: - pyTimeRange = TimeRange(timeRange) - # refuse to make new grids that are more than one hour in the past - if pyTimeRange.endTime().unixTime() < current().unixTime() - HOUR_SECONDS(): - msg = "skipped time range creation: %s < %s" % (pyTimeRange.endTime().string(), current().string()) - return - - # set up the inventory first - self._setupHazardsInventory(weName, [timeRange]) - - # get the inventory - trList = self._getWEInventory(weName, timeRange, asJava=True) - - # coerce mask into a boolean array if it isn't already - if not (isinstance(mask, numpy.ndarray) and mask.dtype==numpy.bool): - mask = numpy.array(mask, dtype=numpy.bool) - - for tr in trList: - # get the grid of index values and list of keys those indices select - byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, tr, - mode="First", cache=0) - if isinstance(hazKey, str): - hazKey = eval(hazKey) - - # Eliminate keys that aren't in the grid from the list. - uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) - for uKey in uniqueKeys: - # Figure out what the new key is - if combine: - newKey = self._makeNewKey(uKey, addHaz) - else: #replace - newKey = addHaz - - # Find the index number for the old key - oldIndex = self.getIndex(uKey, hazKey) - # Find the index number for the new key (newKey is added if not in hazKey) - newIndex = self.getIndex(newKey, hazKey) - - # calculate the mask - intersection of mask and oldIndex values - editMask = (byteGrid==oldIndex) & mask - - # poke in the new values - byteGrid[editMask] = newIndex - - # Save the updated byteGrid and hazKey - if weName == ELEMENT: - self.createGrid(MODEL, ELEMENT, "DISCRETE", (byteGrid, hazKey), - tr, discreteOverlap=1, discreteAuxDataLength=4) - else: # it's a temporary WE - special key - hazKey = ["", addHaz] - hazKeyDesc = self._addHazardDesc(hazKey) - self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), - tr, discreteOverlap=0, discreteAuxDataLength=4, - discreteKeys=hazKeyDesc, - defaultColorTable="YesNo") - - # remove any grids that are completely in the past - self._removeOldGrids(weName) - - return - - ## - # Removes the specified hazard from the specified grid over the mask. - # - # @param weName: Name of the weather element to remove hazards from. - # @type weName: string - # @param timeRange: Time range from which to remove hazards. - # @type timeRange: Python or Java TimeRange - # @param removeHaz: Hazard phensig to remove - # @type removeHaz: string - # @param mask: Grid that is True for points where removeHaz should be removed, - # false where it should not. Defaults to all points selected if - # omitted or passed as None. - # @type mask: numpy array of boolean or Nonetype - def _removeHazard(self, weName, timeRange, removeHaz, mask = None): - - # get the inventory - trList = self._getWEInventory(weName, timeRange) - - # make sure we have a real mask - if mask is None: - gridShape = self._getGridSize() - mask = numpy.ones(gridShape, bool) - - for tr in trList: - byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, tr, - mode="First", cache=0) - uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) - - for uKey in uniqueKeys: - if string.find(uKey, removeHaz) >= 0: - newKey = self._removeSubKey(uKey, removeHaz) - oldIndex = self.getIndex(uKey, hazKey) - newIndex = self.getIndex(newKey, hazKey) - - # calculate the mask - intersection of mask and oldIndex values - editMask = (byteGrid == oldIndex) & mask - - # poke in the new values - byteGrid[editMask] = newIndex - - # see if there's any hazards left and if not, delete the whole grid - noneIndex = self.getIndex("", hazKey) - noneGrid = (byteGrid == noneIndex) - if noneGrid.all(): - self.deleteCmd([weName], tr) - else: - self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), - tr, discreteOverlap= 0, - discreteAuxDataLength=4, - defaultColorTable="YesNo") - - return - - ## - # Format time as yyyymmdd_hhmm - # - # @param t: Time - # @type t: seconds since the epoch - # @return: Formatted version of t - # @rtype: string - def _printTime(self, t): - gm = time.gmtime(t) - s = time.strftime("%Y%m%d_%H%M", gm) - return s - - #print areas, from dictionary - def _printAreas(self, areas): - ara = list(areas) - ara.sort() - return ara - - #filter vtec table based on gfe operating mode, returns vtec table - def _filterVTECBasedOnGFEMode(self, vtecTable): - #get gfe mode - rawGfeMode = self.gfeOperatingMode() - gfeMode = rawGfeMode - if gfeMode is None: - gfeMode = "" - - gfeMode = gfeMode.strip().lower() - #practice mode = accept all records - if "practice" == gfeMode: - return vtecTable #allow all records - - #test mode -- only accept records that have "T" vtec - elif "test" == gfeMode: - fvtecTable = [] - for rec in vtecTable: - testEntry = (rec['vtecstr'].find('/T.') == 0) - if testEntry: - fvtecTable.append(rec) - return fvtecTable - - #regular/operational mode -- accept records that don't have "T" vtec - elif "standard" == gfeMode or "operational" == gfeMode: - fvtecTable = [] - for rec in vtecTable: - testEntry = (rec['vtecstr'].find('/T.') == 0) - if not testEntry: - fvtecTable.append(rec) - return fvtecTable - - else: - raise Exception, "Unknown GFE operating mode: " + rawGfeMode - - ## - # A Python access to the looseLocks() method of the Hazards parm. - def _unlockHazards(self): - hazParm = self.getParm(MODEL, ELEMENT, LEVEL) - hazParm.looseLocks() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# HazardUtils +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jan 16, 2015 4006 njensen create _getUniqueKeys() mask with dtype bool +# 06/08/16 19096 ryu Change mask to boolean data type +# +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import SmartScript +import time, string +import VTECTable +import LogStream +import numpy +from AbsTime import AbsTime +from AbsTime import current +from TimeRange import TimeRange +from java.util import Date +from java.util import ArrayList +import jep +from JUtil import JavaWrapperClass + + +def LOCK_HOURS(): + return 192 + +def HOUR_SECONDS(): + return 3600 + +MODEL = "Fcst" +ELEMENT = "Hazards" +LEVEL = "SFC" + +# Status return codes for _separateHazardGrids +SUCCESS = 1 +FAIL_REDUNDANT = 0 +FAIL_LOCK = -1 + +class HazardUtils(SmartScript.SmartScript): + def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): + SmartScript.SmartScript.__init__(self, dbss) + + # self.setUp(eaMgr, mdMode, toolType) + + ## + # Get timeRanges that make up the inventory of the given weather element. + # This is normally only used for the hazards inventory, so model is "Fcst" + # and level is "SFC" in the lookup. + # + # @param WEName: The weather element whose time ranges are to be acquired. + # @type WEName: string + # @param timeRange: optional time range of the inventory. If not specified, + # the default is from 24 hours ago to ten days from now. + # @type timeRange: Java or Python TimeRange + # @param asJava: If True, the inventory is returned as a list of Java + # TimeRanges; if False, the inventory is returned as a list + # of Python TimeRanges. The default is False. + # @type asJava: boolean + # @return: The time ranges for WEName that overlap the specified or default + # time range. + def _getWEInventory(self, WEName, timeRange=None, asJava=False): + # set up a timeRange if it is None + if timeRange is None: + now = current() + yesterday = now - (24 * 3600) # one day ago + later = now + 10 * 24 * 3600 # 10 days from now + timeRange = self._makeTimeRange(yesterday.unixTime(), later.unixTime()) + parm = self.getParm(MODEL, WEName, LEVEL) + trList = [] + if parm is not None: + if isinstance(timeRange, JavaWrapperClass): + timeRange = timeRange.toJavaObj() + gridInventory = parm.getGridInventory(timeRange) + for g in gridInventory: + gridTimeRange = g.getGridTime() + tr = gridTimeRange.clone() + if not asJava: + tr = TimeRange(tr) + trList.append(tr) + + return trList + + # makes a TimeRange from a start and end integers + def _makeTimeRange(self, start, end): + return TimeRange(AbsTime(start), AbsTime(end)) + + ## + # Get timeRanges that correspond to gaps in the specified WEName inventory + # within the specified time ranges. + # + # @param WEName: A weather element name + # @type WEName: string + # @param trList: Time ranges of interest + # @type trList: list of Python or Java TimeRange + # @return: time ranges overlapping timeRange that are missing from the + # inventory of WEName + # @rtype: list of Python TimeRanges + def _getGaps(self, WEName, trList): + + fullHazardInv = self._getWEInventory(WEName) + gaps = [] + + for timeRange in trList: + + # Convert Java TimeRange to Python for comparisons + if not isinstance(timeRange, TimeRange): + timeRange = TimeRange(timeRange) + + hazInv = [] + for h in fullHazardInv: + if timeRange.overlaps(h): + hazInv.append(h) + + # check for empty inventory + if len(hazInv) == 0: # no grids at all + gaps.append(timeRange) + continue + + # see if we have a gap at the beginning + if timeRange.startTime() < hazInv[0].startTime(): + tr = TimeRange(timeRange.startTime(), + hazInv[0].startTime()) + gaps.append(tr) + + # Find any gaps in the middle of the inventory + for i in range(len(hazInv) - 1): + if hazInv[i].endTime() != hazInv[i+1].startTime(): + gapTR = TimeRange(hazInv[i].endTime(), + hazInv[i+1].startTime()) + gaps.append(gapTR) + + # see if we have a gap at the end of the inventory + if timeRange.endTime() > hazInv[-1].endTime(): + tr = TimeRange(hazInv[-1].endTime(), + timeRange.endTime()) + gaps.append(tr) + + return gaps + + ## + # Create an empty hazards-type grid with the specified + # name and timeRange + # + # @param weName: The name of the weather element to create. + # @type weName: string + # @param timeRange: The time range of the new grid. + # @type timeRange: a Java or Python TimeRange + # @raise JepException: when raised by SmartScript methods. + def _makeEmptyHazardGrid(self, weName, timeRange): + gridShape = self.getGridShape() + byteGrid = numpy.zeros(gridShape, dtype=numpy.int8) + hazKeys = self.getDiscreteKeys(ELEMENT) + currentKeys = [""] + # make the grid + if weName == ELEMENT: + self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, currentKeys), + timeRange, discreteKeys=hazKeys, + discreteAuxDataLength=4, discreteOverlap=1) + else: + hazard = self._tempWENameToKey(weName) + discreteKeys = ["", hazard] + hazKeyDesc = self._addHazardDesc(discreteKeys) + self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, currentKeys), + timeRange, discreteKeys=hazKeyDesc, + discreteAuxDataLength=4, discreteOverlap=0, + defaultColorTable="YesNo") + return + + ## + # Prepare the Hazards inventory so that it can be merged with the + # activeTable. This includes splitting grids and adding new ones where + # we have gaps. + # + # @param weName: Name of a weather element + # @type weName: string + # @param trList: Time ranges of interest + # @type trList: list of Python or Java TimeRanges + def _setupHazardsInventory(self, weName, trList): + # see if the element exists yet, if not, make a new grid + # This is a painful way just to see if the grid exists + # but all other techniques fail for temporary weather elements + now = current() + yesterday = now - (24 * 3600) # one day ago + later = now + 10 * 24 * 3600 # 10 days from now + timeRange = TimeRange(yesterday, later).toJavaObj() + try: + gridInfo = self.getGridInfo(MODEL, weName, LEVEL, timeRange) + except: # this means the WE does not exist, so make a grid + if len(trList) <= 0: + return + for tr in trList: + self._makeEmptyHazardGrid(weName, tr) + return + # fill any gaps in the inventory + gapList = self._getGaps(weName, trList) + for g in gapList: + self._makeEmptyHazardGrid(weName, g) + + # Split the grids at the timeRange boundaries + unix_now = now.unixTime() + for tr in trList: + # If tr is a java timerange, convert it to a python TimeRange + if not isinstance(tr, TimeRange): + tr = TimeRange(tr) + end = tr.endTime().unixTime() + if end > unix_now: + # parm.splitTR() will split timeRanges with non-zero minutes + # to the next hour. So, truncate start and end times to the + # previous hour and then split + start = tr.startTime().unixTime() + start = int(start / 3600) * 3600 + end = int(end / 3600) * 3600 + roundedTR = TimeRange(AbsTime(start), AbsTime(end)).toJavaObj() + parm = self.getParm(MODEL, weName, LEVEL) + self.splitCmd([weName], roundedTR) + + return + + # returns a Numeric mask where each zone in zoneList is set to 1 + def _makeMask(self, zoneList): + mask = self.empty(dtype=numpy.bool) + eaList = self.editAreaList() + for z in zoneList: + if z in eaList: + zoneArea = self.getEditArea(z) + zoneMask = self.encodeEditArea(zoneArea) + mask = numpy.logical_or(mask, zoneMask) + + return mask + + # Fetches the gridSize from the GFE and returns it as a tuple. + def _getGridSize(self): + return self.getGridShape() + + ## + # Determine whether temporary weather elements are loaded. + # + # @return: 1 if temporary weather elements are loaded; + # 0 otherwise. + def _tempWELoaded(self): + parms = self.loadedParms() + for weName, level, dbID in parms: + if string.find(weName, "haz") == 0: + return 1 + + return 0 + + ## + # Create a temporary weather element name from key. + # + # @param key: String like BZ.W:1234, or LCLKEY, or BZ.W + # @type key: string + # @return: key with 'haz' prepended and any '.' or ':' chars removed. + # @rtype: string + def _makeTempWEName(self, key): + "Create a temporary weather element name from a key string." + #key is BZ.W:1234, or LCLKEY, or BZ.W + key = string.replace(key, ".","") + key = string.replace(key, ":","") + weName = "haz" + key + return weName + + ## + # Create a key string from a temporary weather element name. + # + # @param wename: A temporary weather element name + # @type wename: string + # @return: The key string from which the temporary element was derived. + # @rtype: string + def _tempWENameToKey(self, wename): + "Make a key string from a temporary weather element name." + #wename is hazBZW, hazBZW1234, hazLCLK + if len(wename) > 3 and wename[0:3] == 'haz': + key = wename[3:] #eliminate "haz" + if len(key) >= 3: + vkey = key[0:2] + '.' + key[2] + if vkey in VTECTable.VTECTable: + seg = key[3:] + if len(seg): + return vkey + ':' + seg + else: + return vkey + # local key, look for segment via digits + else: + lkey = key + for i in range(len(key)): + if key[i:].isdigit(): + lkey = key[0:i] + ":" + key[i:] + break + return lkey + else: + # TODO: or should I fail? + return key + else: + raise Exception("Illegal wename: " + wename) + + ## + # Gets the unique list of keys over the specified mask + # if no mask is passed, the entire grid is used + # + # @param byteGrid: Grid of indices + # @type byteGrid: Numpy array of int8 + # @param keys: Keys associated with byteGrid. If byteGrid[2,2] is 3, then + # keys[3] describes its state. + # @type keys: List of strings + # @param mask: Optional mask of points to include; defaults to all ones. + # @type mask: Numpy array of boolean, same dimensions as byteGrid; + # @return: The keys referenced by the masked byteGrid, without duplicates. + # @rtype: List of strings + def _getUniqueKeys(self, byteGrid, keys, mask = None): + uniqueKeys = [] + + # if mask is None, make a mask of the whole area + if mask is None: + mask = numpy.ones(byteGrid.shape, numpy.bool) + + # get the list of values over the mask area only + valueList = numpy.compress(mask.flat, byteGrid.flat) + + # remove the duplciates to get unique values + uniqueValues = list( numpy.unique(valueList) ) + + # extract the keys that correspond to the byte values + for u in uniqueValues: + uniqueKeys.append(keys[u]) + + return uniqueKeys + + ## + # Get the phen portion of key. + # If key is not a VTEC hazard key, returns "" + # @param key: A grid key + # @type key: string + # @return: The phen portion of key. + # @rtype: string + def _keyPhen(self, key): + pos = string.find(key, ".") + if pos == -1: # not found + return "" + + return key[0:pos] + + ## + # Get the sig portion of key. + # If key is not a VTEC hazard key, returns "" + # + # @param key: A grid key. + # @type key: string + # @return: The sig portion of key. + # @rtype: string + def _keySig(self, key): + pos = string.find(key, ".") + if pos == -1: # not found + return "" + + return key[pos + 1] + + ## + # Combine newKey with subKeys and return a new combined key. Enforces the + # rule that keys with the same phen returns the one key with the highest + # priority sig. + # + # @param subKeys: The old key. + # @type subKeys: string + # @param newKey: The key to add. + # @type newKey: string + # @return: The key made by combining subKeys with newKey. + # @rtype: string + def _combinedKey(self, subKeys, newKey): + if newKey is None: + return subKeys + + subKeyList = string.split(subKeys, "^") + + # check for same keys + if newKey in subKeyList: + return subKeys + + defaultCombo = subKeys + "^" + newKey + + # check for non-VTEC key + if string.find(newKey, ".") == -1: + return defaultCombo + + # more exceptions - these phens are above the law + exceptions = ["TO", "SV", "FF"] + sigList = ["W", "Y", "A"] + if self._keyPhen(newKey) in exceptions: + return defaultCombo + + subKeyList = string.split(subKeys, "^") + for sk in subKeyList: + if self._keyPhen(sk) == self._keyPhen(newKey): + subSig = self._keySig(sk) + newSig = self._keySig(newKey) + if subSig == newSig: + return subKeys + + if subSig not in sigList or newSig not in sigList: + continue + + if sigList.index(subSig) > sigList.index(newSig): + subKeys = subKeys.replace(sk, newKey) + + return subKeys + + return defaultCombo + + + # Makes a new hazard given the old key oldKey and a new watch phenSig. + # @param oldKey: The old key + # @type oldKey: string + # @param phenSig: The new watch phen and sig + # @type phenSig: string + # @return: A new combined key. + # @rtype: string + def _makeNewKey(self, oldKey, phenSig): + # check for the dumb cases + if oldKey == "" or oldKey == phenSig: + return phenSig + + # split up the key, add the hazard, sort, and reassemble + parts = string.split(oldKey, "^") + parts.append(phenSig) + parts.sort() # makes sure the same set of subKeys look the same + + # assemble the new key + newKey = "" + for p in parts: + if newKey == "": + newKey = p + else: + newKey = self._combinedKey(newKey, p) + + # just in case + if newKey == "": + newKey = "" + + return newKey + + ## + # Get the subkeys of key + # + # @param key: A key to divide into subkeys + # @type key: String + # @return: The subkeys of key + # @rtype: List of strings + def _getSubKeys(self, key): + parts = string.split(key, "^") + if "" in parts: + parts.remove("") + return parts + + def _removeSubKey(self, key, subKey): + newKey = "" + for p in string.split(key, "^"): + if p == subKey: + continue + if newKey == "": + newKey = p + else: + newKey = newKey + "^" + p + + if newKey == "": + newKey = "" + + return newKey + + ## + # Take a sequence or set of time ranges and produce a set of time ranges by + # combining all adjacent or overlapping time ranges in the sequence. + # + # @param timeranges: the timeranges to merge + # @type timeranges : sequence, set or frozenset of TimeRange + # @return: the merged timeranges + # @rtype: set of TimeRange + def _mergeTimeranges(self, timeranges): + trset = set(timeranges) + # Loop until a pass doesn't merge any time ranges + moreToDo = True + while moreToDo: + moreToDo = False + merged = [] + for tr in trset: + found = False + for idx, mtr in enumerate(merged): + if tr == mtr: + found = True + break + elif tr.overlaps(mtr) or tr.isAdjacentTo(mtr): + found = True + merged[idx] = mtr.join(tr) + moreToDo = True + break + if not found: + merged.append(tr) + trset = set(merged) + return trset + + ## + # Determine whether the time ranges of any (temporary) parm in hazParms + # overlaps a locked time range of the Hazards element. If not, add the + # time ranges of the temporary parms to the locked time ranges of the + # Hazards parm. + # + # @param hazParms: Temporary hazard parm names. + # @type hazParms: sequence of string + # @return: 0 if there are not conflicting locks, 1 if there are + # @rtype: int + def _conflictingLocks(self, hazParms): + # find all the time ranges that should be locked + neededTRs = set() + + for hazParm in hazParms: + trList = self._getWEInventory(hazParm) + neededTRs = neededTRs.union(trList) + + # Find all the time ranges that are locked in Hazards + myTRs = self.lockedByMe(ELEMENT, LEVEL) + myTRs = set(myTRs) + + # Add locks we already have to the needed TRs, + # in case grids were deleted + neededTRs = neededTRs.union(myTRs) + + # Squish the TRs into contiguous blocks + neededTRs = self._mergeTimeranges(neededTRs) + + # See if there are any blocks we don't have yet + missingTRs = neededTRs - myTRs + + # If not, then there are no conflicts and we're done. + if len(missingTRs) == 0: + return 0 + + startTimes = jep.jarray(len(missingTRs), Date) + + midx = 0 + for missingTR in missingTRs: + startTimes[midx] = missingTR.toJavaObj().getStart() + midx += 1 + + hazardParm = self.getParm(MODEL, ELEMENT, LEVEL) + gridData = None + try: + gridData = hazardParm.startParmEdit(startTimes) + except RuntimeError as runtimeErr: + if runtimeErr.message is None: + raise + if runtimeErr.message.startswith("com.raytheon.viz.gfe.GFEOperationFailedException:"): + return 1 + else: + raise + + if gridData is not None and len(gridData) != 0: + if not hazardParm.endParmEdit(): + return 1 + + # The locks acquired in the endParmEdit() call may not have been quite right. + # However, we needed to end the parm edit. + # Negotiate the locks we _really_ need now that it's done. + locktable = hazardParm.getLockTable() + LOCK = locktable.getClass().getLockMode("LOCK"); + + from com.raytheon.uf.common.dataplugin.gfe.server.request import LockRequest + desiredLocks = ArrayList() + for missingTR in missingTRs: + newLock = LockRequest() + newLock.setParmId(hazardParm.getParmID()) + newLock.setTimeRange(missingTR.toJavaObj()) + newLock.setMode(LOCK) + desiredLocks.add(newLock) + + client = hazardParm.getDataManager().getClient() + serverResponse = client.requestLockChange(desiredLocks) + if not serverResponse.isOkay(): + hazardParm.undo() + return 1 + + return 0 + + ## + # Create a list of (key, desc) tuples from keys. + # For each key in keys, look up the key in VTECTable. + # If the key is found, use its headline value as its description; + # otherwise, use the key as its own description. + # + # @param keys: Keys to look up descriptions for. + # @type keys: iterable of strings + # @return: keys and descriptions for the key + # @rtype: list of 2-tuples + def _addHazardDesc(self, keys): + newKeys = [] + for k in keys: + index = string.find(k, ':') + if index != -1: + k = k[0:index] #eliminate the colon and segment # + if k not in VTECTable.VTECTable: + desc = k + else: + # get the description + desc = VTECTable.VTECTable[k]['hdln'] + + newKeys.append((k, desc)) + + return newKeys + + ## + # Determine whether the Hazards forecast weather element is loaded. + # + # @param weName: The name of the weather element. Defaults to "Hazards". + # @type wename: string + # @return: 1 if the weather element is loaded, 0 otherwise + # @rtype: int + def _hazardsLoaded(self, weName=ELEMENT): + + tupleList = self.loadedParms() + ## look for the Hazards Weather element + for element, level, databaseID in tupleList: + modelName = databaseID.modelName() + if element == weName and level == LEVEL and modelName == MODEL: + return 1 + + # if we got this far we didn't find it. + return 0 + + ## + # Remove any grids for weName whose end times are in the past + # + # @param weName: A weather element name. + # @type weName: string + # @raise JepException: if calls to Java methods fail. + def _removeOldGrids(self, weName): + # get the inventory + trList = self._getWEInventory(weName) + + for tr in trList: + if tr.endTime().unixTime() < current().unixTime(): + self.deleteCmd([weName], tr) + + return + + ## + # Remove any data grids for MODEL, ELEMENT, and LEVEL over the default + # inventory timerange (from now to 10 days in the future). The parm + # remains in the parm manager. + def _removeAllHazardsGrids(self): + + removeTRList = self._getWEInventory(ELEMENT, asJava=True) + + # Remove the real Hazards grid + for tr in removeTRList: + if not self.deleteGrid(MODEL, ELEMENT, LEVEL, tr): + return False + return True + + + ## + # Destroy all the temporary hazards (they are removed from the parm manager). + # + def _removeTempHazardWEs(self): + parms = self.loadedParms() + + for weName, level, dbID in parms: + if string.find(weName, "haz") == 0 and len(weName) > 3: + self.unloadWE(MODEL, weName, level) + + return + + ## + # Determine whether the indicated grids are consecutive in time and + # identical in value at every point. + # @attention: This method assumes timeRange1 begins before timeRange2. + # It will give wrong answers if their order is reversed + # + # @param weName: Weather element name + # @type weName: string + # @param timeRange1: First time range for weather element + # @type timeRange1: Python TimeRange + # @param timeRange2: Second time range for weather element + # @type timeRange2: Python TimeRange + # @return: True if the end time for timeRange1 matches the start time of + # timeRange2 and the grid for weName during timeRange1 is identical + # to the grid for weName during timeRange2, False otherwise. + # @rtype: boolean + def _consecutiveIdenticalGrids(self, weName, timeRange1, timeRange2): + if timeRange1.endTime() == timeRange2.startTime(): + # get the grids + firstGrid, key = self.getGrids(MODEL, weName, LEVEL, + timeRange1.toJavaObj(), mode="First", cache=0) + secondGrid, key = self.getGrids(MODEL, weName, LEVEL, + timeRange2.toJavaObj(), mode="First", cache=0) + if numpy.sometrue(numpy.logical_xor(firstGrid, secondGrid)): + return 0 + else: + return 1 + + return 0 + + ## + # Replace existing grids for weName with a single grid over the + # time range from groupStart to groupEnd. + # + # This function should only be used by _consolidateTimes(); it + # exists only to be sure we create the consolidated grid the same way in + # the "last timeRange" code block as we do in the "broke the string" block. + # @param groupStart: Starting time as seconds since the epoch + # @type groupStart: int + # @param groupEnd: Ending time as seconds since the epoch + # @type groupEnd: int + # @param weName: (temporary) weather element name + # @type weName: string + # @return: None + def _createConsolidatedGrid(self, groupStart, groupEnd, weName): + "Used internally by _consolidateTimes()" + timeRange = self._makeTimeRange(groupStart, groupEnd).toJavaObj() + byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, + timeRange, mode="First", cache=0) + if isinstance(hazKey, str): + hazKey = eval(hazKey) + self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), + timeRange, discreteOverlap=1, + discreteAuxDataLength=4) + + ## + # Consolidate grid times for each weather element in weNameList. + # For each weather element, find time ranges that touch whose grids are + # identical and turn them into a single grid for the combined time range. + def _consolidateTimes(self, weNameList): + for weName in weNameList: + # Get "all" the time ranges for this element + trList = self._getWEInventory(weName) + if len(trList) == 0: + return + + count = 1 + groupStart = int(trList[0].startTime().unixTime()) + groupEnd = int(trList[0].endTime().unixTime()) + + for i in range(0, len(trList) - 1): + if self._consecutiveIdenticalGrids(weName, trList[i], trList[i+1]): + # keep looking for the end + count = count + 1 + groupEnd = int(trList[i+1].endTime().unixTime()) + else: # broke the string of grids + if count > 1: # store the new time-consolidated grid + self._createConsolidatedGrid(groupStart, groupEnd, weName) + # reset the times + groupStart = int(trList[i+1].startTime().unixTime()) + groupEnd = int(trList[i+1].endTime().unixTime()) + count = 1 + + # make sure we catch the last timeRange + if count > 1: # store the new time-consolidated grid + self._createConsolidatedGrid(groupStart, groupEnd, weName) + + return + + ## + # Lock any grids in the hazards parm from now to 10 hours in the future. + # + # @return: the hazards parm and its igrids + # @rtype: a 2-tuple; the first item is a Parm, the second is a list of IGridDatas, + # which, for discrete grids, translate to a 2-tuple containing a numpy + # array and a key string. So, like this: + # (parm,[(arr0,key0), (arr1,key1), ...]) + # + def _lockHazards(self): + "Flag the hazards parm as being edited. Return the hazards parm and its grid." + hazParm = self.getParm(MODEL, ELEMENT, LEVEL) + startAbsTime = AbsTime(int(current().unixTime() /3600)*3600) + endAbsTime = startAbsTime + LOCK_HOURS() * HOUR_SECONDS() + timeRange = TimeRange(startAbsTime, endAbsTime) + + inventory = self._getWEInventory(ELEMENT, timeRange, asJava=True) + startTimes = jep.jarray(len(inventory), Date) + for trNum in range(len(inventory)): + startTimes[trNum] = inventory[trNum].getStart() + gridData = None + try: + # startParmEdit() refreshes the grids and sets up the times that endParmEdit() will lock. + gridData = hazParm.startParmEdit(startTimes) + except RuntimeError as runtimeErr: + if runtimeErr.message is None: + raise + if runtimeErr.message.startswith("com.raytheon.viz.gfe.GFEOperationFailedException:"): + self.statusBarMsg("There are conflicting locks. " + \ + "Please resolve these before adding any hazards", "S") + hazParm = None + else: + raise + + # endParmEdit() locks the grids. + # The locks will be released when the forecast is saved. + if hazParm is not None: + locked = True + if len(startTimes) != 0: + locked = hazParm.endParmEdit() + if locked: + locked = hazParm.forceLockTR(timeRange.toJavaObj()) + if not locked: + self.statusBarMsg("There are conflicting locks. " + \ + "Please resolve these before adding any hazards", "S") + hazParm = None + + return (hazParm, gridData) + + ## + # Let other users edit the hazards parm. + # + # @return: True for success, False otherwise. + # @raise JepException: if the hazards parm was not being edited. + def _endEdit(self): + "Let other users edit the hazards parm. Return True for success." + hazParm = self.getParm(MODEL, ELEMENT, LEVEL) + return hazParm.endParmEdit() + + ## + # Make temporary hazard grids for each hazard subkey. + # Hazards are "being edited" until they are merged again. + # + # @return: True if separation succeeded, false otherwise. + # + def _separateHazardGrids(self): + "Make temporary hazard grids for each hazard subkey." + + # if any temp hazard grids are loaded, don't separate again + if self._tempWELoaded(): + return FAIL_REDUNDANT #already separated + + hazParm, gridData = self._lockHazards() + if hazParm is None: + return FAIL_LOCK # unavailable + + # get a collection of distinct Java TimeRange objects + trSet = set() + for gd in gridData: + trSet.add(gd.getGridTime()) + + # Create a set of temporary weather element names + weNameSet = set() + + for tr in trSet: + # Get the index grid and key list for the real Hazards element + byteGrid, hazKey = self.getGrids(MODEL, ELEMENT, LEVEL, tr, + mode="First") + if isinstance(hazKey, str): + hazKey = eval(hazKey) + + # Only work with the keys that have points in the grid + uniqueKeys = self._getUniqueKeys(byteGrid, hazKey) + if len(uniqueKeys) > 0: + # build list of split hazKeys for use in loop below + splitHazKeys = [] + for haz in hazKey: + splitHazKeys.append(self._getSubKeys(haz)) + + for uKey in uniqueKeys: + + if uKey == "": + continue + + # split the current key into its subkeys + subKeys = self._getSubKeys(uKey) + for subKey in subKeys: + # make the temporary name + weName = self._makeTempWEName(subKey) + + # make the mask - find all areas that contain the subKey + mask = numpy.zeros(byteGrid.shape, dtype=numpy.bool) + for hazIndex in range(len(hazKey)): + if subKey in splitHazKeys[hazIndex]: + mask |= (byteGrid==hazIndex) + + # make the grid + self._addHazard(weName, tr, subKey, mask) + pytr = TimeRange(tr) + logmsg = " ".join(["Separate", weName, + self._printTime(pytr.startTime().unixTime()), + self._printTime(pytr.endTime().unixTime()), subKey]) + LogStream.logEvent(logmsg) + + # save the weNames for later + weNameSet.add(weName) + + # Combine time ranges for the temporary weather elements we created + self._consolidateTimes(weNameSet) + + return SUCCESS + + ## + # Add the specified hazard to weName over the specified timeRange + # and spatially over the specified mask. Combines the specified + # hazard with the existing hazards by default. For replaceMode, + # specify 0 in the combineField. + # + # @param weName: The weather element name. + # @type wename: string + # @param timeRange: Time range of the hazard. + # @type timeRange: Java or Python TimeRange + # @param addHaz: Key for the new hazard + # @type addHaz: string + # @return: None + def _addHazard(self, weName, timeRange, addHaz, mask, combine=1): + # Python TimeRanges are easy to compare. + # Java methods require Java TimeRanges. + # Make sure we have one of each. + if isinstance(timeRange, JavaWrapperClass): + pyTimeRange = timeRange + timeRange = timeRange.toJavaObj() + else: + pyTimeRange = TimeRange(timeRange) + # refuse to make new grids that are more than one hour in the past + if pyTimeRange.endTime().unixTime() < current().unixTime() - HOUR_SECONDS(): + msg = "skipped time range creation: %s < %s" % (pyTimeRange.endTime().string(), current().string()) + return + + # set up the inventory first + self._setupHazardsInventory(weName, [timeRange]) + + # get the inventory + trList = self._getWEInventory(weName, timeRange, asJava=True) + + # coerce mask into a boolean array if it isn't already + if not (isinstance(mask, numpy.ndarray) and mask.dtype==numpy.bool): + mask = numpy.array(mask, dtype=numpy.bool) + + for tr in trList: + # get the grid of index values and list of keys those indices select + byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, tr, + mode="First", cache=0) + if isinstance(hazKey, str): + hazKey = eval(hazKey) + + # Eliminate keys that aren't in the grid from the list. + uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) + for uKey in uniqueKeys: + # Figure out what the new key is + if combine: + newKey = self._makeNewKey(uKey, addHaz) + else: #replace + newKey = addHaz + + # Find the index number for the old key + oldIndex = self.getIndex(uKey, hazKey) + # Find the index number for the new key (newKey is added if not in hazKey) + newIndex = self.getIndex(newKey, hazKey) + + # calculate the mask - intersection of mask and oldIndex values + editMask = (byteGrid==oldIndex) & mask + + # poke in the new values + byteGrid[editMask] = newIndex + + # Save the updated byteGrid and hazKey + if weName == ELEMENT: + self.createGrid(MODEL, ELEMENT, "DISCRETE", (byteGrid, hazKey), + tr, discreteOverlap=1, discreteAuxDataLength=4) + else: # it's a temporary WE - special key + hazKey = ["", addHaz] + hazKeyDesc = self._addHazardDesc(hazKey) + self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), + tr, discreteOverlap=0, discreteAuxDataLength=4, + discreteKeys=hazKeyDesc, + defaultColorTable="YesNo") + + # remove any grids that are completely in the past + self._removeOldGrids(weName) + + return + + ## + # Removes the specified hazard from the specified grid over the mask. + # + # @param weName: Name of the weather element to remove hazards from. + # @type weName: string + # @param timeRange: Time range from which to remove hazards. + # @type timeRange: Python or Java TimeRange + # @param removeHaz: Hazard phensig to remove + # @type removeHaz: string + # @param mask: Grid that is True for points where removeHaz should be removed, + # false where it should not. Defaults to all points selected if + # omitted or passed as None. + # @type mask: numpy array of boolean or Nonetype + def _removeHazard(self, weName, timeRange, removeHaz, mask = None): + + # get the inventory + trList = self._getWEInventory(weName, timeRange) + + # make sure we have a real mask + if mask is None: + gridShape = self._getGridSize() + mask = numpy.ones(gridShape, bool) + + for tr in trList: + byteGrid, hazKey = self.getGrids(MODEL, weName, LEVEL, tr, + mode="First", cache=0) + uniqueKeys = self._getUniqueKeys(byteGrid, hazKey, mask) + + for uKey in uniqueKeys: + if string.find(uKey, removeHaz) >= 0: + newKey = self._removeSubKey(uKey, removeHaz) + oldIndex = self.getIndex(uKey, hazKey) + newIndex = self.getIndex(newKey, hazKey) + + # calculate the mask - intersection of mask and oldIndex values + editMask = (byteGrid == oldIndex) & mask + + # poke in the new values + byteGrid[editMask] = newIndex + + # see if there's any hazards left and if not, delete the whole grid + noneIndex = self.getIndex("", hazKey) + noneGrid = (byteGrid == noneIndex) + if noneGrid.all(): + self.deleteCmd([weName], tr) + else: + self.createGrid(MODEL, weName, "DISCRETE", (byteGrid, hazKey), + tr, discreteOverlap= 0, + discreteAuxDataLength=4, + defaultColorTable="YesNo") + + return + + ## + # Format time as yyyymmdd_hhmm + # + # @param t: Time + # @type t: seconds since the epoch + # @return: Formatted version of t + # @rtype: string + def _printTime(self, t): + gm = time.gmtime(t) + s = time.strftime("%Y%m%d_%H%M", gm) + return s + + #print areas, from dictionary + def _printAreas(self, areas): + ara = list(areas) + ara.sort() + return ara + + #filter vtec table based on gfe operating mode, returns vtec table + def _filterVTECBasedOnGFEMode(self, vtecTable): + #get gfe mode + rawGfeMode = self.gfeOperatingMode() + gfeMode = rawGfeMode + if gfeMode is None: + gfeMode = "" + + gfeMode = gfeMode.strip().lower() + #practice mode = accept all records + if "practice" == gfeMode: + return vtecTable #allow all records + + #test mode -- only accept records that have "T" vtec + elif "test" == gfeMode: + fvtecTable = [] + for rec in vtecTable: + testEntry = (rec['vtecstr'].find('/T.') == 0) + if testEntry: + fvtecTable.append(rec) + return fvtecTable + + #regular/operational mode -- accept records that don't have "T" vtec + elif "standard" == gfeMode or "operational" == gfeMode: + fvtecTable = [] + for rec in vtecTable: + testEntry = (rec['vtecstr'].find('/T.') == 0) + if not testEntry: + fvtecTable.append(rec) + return fvtecTable + + else: + raise Exception("Unknown GFE operating mode: " + rawGfeMode) + + ## + # A Python access to the looseLocks() method of the Hazards parm. + def _unlockHazards(self): + hazParm = self.getParm(MODEL, ELEMENT, LEVEL) + hazParm.looseLocks() diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ISC_Utility.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ISC_Utility.py index 45bc3b2f3c..8a93408772 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ISC_Utility.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ISC_Utility.py @@ -1,1234 +1,1234 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ISC_Utility - Version 3.05 (Tim Barker - SOO Boise, ID) -# -# Supports new routines that mimic NDFD algorithms that were changed -# in late 2005 or early 2006. Algorithms now have potentially different -# thresholds for every 'border pair', based on topography, and the values -# of the grids themselves (i.e. large values of waveheight have more -# leniant' thresholds). Algorithms now consider a border as 'discrepant' -# if the average absolute difference along the border is larger than the -# average threshold along that border. Some tools/algorithms will also show -# which individual pairs violate their particular threshold. -# -# Author: barker -# 2008-11-19 - Barker - Version 3.05. Added code to check for 'office type' -# of editAreas, so that it checks only ISC_xxxx areas for wfos - -# not rfcs. Also added a check for ISC_xxxx editAreas without -# corresponding xxxx editArea. -# 2006-01-23 - Barker - Version 3.04. Add thresholds for PoP12hr, QPF12hr -# SnowAmt12hr (standard in ER), WindChill, HeatIndex (instead -# of AppT), and PoP6, PoP12 (common in SR). -# 2006-01-19 - Barker - Version 3.03. Another typo for non-square grids. -# 2006-01-17 - Barker - Version 3.02. Fix problem for non-square grids. -# 2006-01-13 - Barker - Version 3.01. Changed for new NDFD algorithm -# Thresholds now vary at each gridpoint - overall average -# difference along border must be less than average threshold -# along that border (a much better algorithm!). -# -# ---------------------------------------------------------------------------- - -## -# This is a base file that is not intended to be overridden. -## - -import numpy -import SmartScript -import time -import TimeRange -from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo -GridType = GridParmInfo.GridType - -class ISC_Utility(SmartScript.SmartScript): - def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): - SmartScript.SmartScript.__init__(self, dbss) - self.setToolType(toolType) - self._dbss = dbss - # self.setUp(eaMgr, mdMode, toolType) - self.configuration() - # - # Always check to see if BorderPairs are current - # - refresh=7200 # seconds between refresh of borders - self._debug=0 # set to 1 or 5 or 10 for increasing info - self.list="" - # - # get Border Pair info - either from cache, or by calculating - # - self.pairInfo=self._getCachedPairs("BorderPairs","ISCPairs",refresh) - if self.pairInfo is None: - if self._debug>=1: - self.statusBarMsg("Calculating Pairs","R") - self.pairInfo=self._getPairInfo(self.getTopo()) - if self._debug>=1: - self.statusBarMsg("Calculating Pairs Done","R") - self._cachePairs("BorderPairs",self.pairInfo,"ISCPairs") - else: - if self._debug>=1: - self.statusBarMsg("Pair info obtained from cache","R") - - - #------------------------------------------------------------------------- - # - # C O N F I G U R A T I O N S E C T I O N F O R A L L I S C TOOLS - # - # this function is intended to be overridden and sets up the default - # configuration for the set of ISC tools. Copy and place into the - # ISC_Utility_Local_New and modify as needed. - def configuration(self): - # - # points which have an elevation difference greater than this will NOT - # be considered in ISC statistics (in feet). NDFD sets this to 1000ft. - # - self.MAXTOPODIFF=1000.0 - # - # NDFD checks are not performed when one side of a border is a land - # point and the other side is an ocean point. To do this, an EditArea - # with land/sea points needs to be calculated. With LANDEDITAREA set - # to None - the code will calculate the land area by a 'union' of all - # points found in the CWA editareas named XXX, where the XXX values - # are taken from all the editareas name ISC_XXX. If you have not - # overridden the ISC_XXX editarea or XXX edit areas, then this will - # work fine. If you HAVE overridden these edit area - use the - # LANDEDITAREA to specify the name of an editarea that contains just - # land points (all others are assumed to be ocean points). - # - self.LANDEDITAREA=None # or string with name of EditArea containing land - # - #-------------------------------------------------------------------- - # These configuration items for Show_ISC_Info and Show_ISC_Highlights. - # - # If you want the check for a particular parm to ACTUALLY check other - # parms, then list them here. Vector parms need not be listed - but - # the threshold for Vector parms in GFE is assumed to be the threshold - # for the magnitude part - and the threshold for the direction part is - # hard-coded below - # - self.MultiParms={"MaxRH":("MinT","TdMrn","MaxRH"), - "MinRH":("MaxT","TdAft","MinRH"), - "RH": ("T","Td","RH"), - } - - # Minimum number of points along a border before it considers a - # failing average threshold "significant" (to get rid of short borders) - # - self.MINPOINTS=10 - # - #------------------------------------------------------------------ - # - # NDFD thresholds - should not need to be modified. - # - # Each entry in THRESHOLDS contains a tuple ( parmnames, thresholdinfo) - # parmnames can be a tuple with many parms listed that use the - # same threshold - # thresholdinfo contains (thresholdtype,thresholdvalues, - # conditions,dirflag) where: - # thresholdtype="contant","topo" or "graduated" - # thresholdvalues= - # for "constant" type: value - # - # differences greater than value are considered - # discrepant - # - # for "topo" type: (elev,lowvalue,highvalue) - # - # if the elevation difference between points is - # less than elev, then the lowvalue is used as - # the threshold value. Otherwise the highvalue - # is used for the threshold value - # - # for "graduated" type: (bigvalue,(lessthan,value),(lessthan,value),...) - # - # bigvalue is the default threshold value. However - # if the lowest of the two pair points is less than the - # 'lessthan', then that 'value' is used for the - # threshold instead. All 'lessthan' values are checked, - # so they should be listed in decreasing order. - # - self.DEFAULT_THRESHOLD=("constant",5,("none",0,0),0) - self.THRESHOLDS=[ - (("T","Td","MaxT","MinT","TdAft","TdMrn"), - ("topo",(500,5,7),("none",0,0),0)), - (("HeatIndex","WindChill"), - ("topo",(500,7,9),("none",0,0),0)), - (("PoP","PoP12","PoP6","PoP12hr","PoP6hr"), - ("constant",20,("none",0,0),0)), - (("WindSpd","TransWindSpd","WindGust"), - ("graduated",(15,(20,10)),("greater_equal",12,0),0)), - (("WindDirec","TransWindDirec"), - ("topo",(500,45,90),("greater_equal",12,1),1)), - (("Sky"), - ("topo",(500,25,35),("none",0,0),0)), - (("QPF","QPF6hr"), - ("graduated",(1.0,(3.0,0.5),(1.5,0.25)),("greater",0.25,0),0)), - (("SnowAmt","SnowAmt6hr"), - ("graduated",(6,(12,4),(6,2)),("greater",2,0),0)), - (("SnowLevel","FzLevel","MixHgt"), - ("constant",1000,("none",0,0),0)), - (("RH","MaxRH","MinRH"), - ("graduated",(25,(75,20),(50,15),(25,10)),("none",0,0),0)), - (("WaveHeight"), - ("graduated",(10,(36,9),(32,8),(28,7),(24,6),(20,5),(16,4),(12,3),(6,2)),("greater",0,1),0)), - (("CWR"), - ("constant",10,("none",0,0),0)), - (("Haines"), - ("constant",1,("none",0,0),0)), - ] -# --------- E N D C O N F I G U R A T I O N S E C T I O N ---------- - - - - #================================================================= - # _getPairInfo - calculate pairInfo data structure from edit areas - # - # each entry in pairInfo is a tuple with (label,pairlist) - # where label=string with CWA neighbor name - # pairlist=list of tuples - # where each entry in pairlist is a tuple with: (insidex,insidey,outsidex,outsidey,TopoDiff) - # where insidex,insidey = pair coordinates inside CWA - # outsidex,outsidey = pair coordinates outside CWA - # absTopoDiff = topography difference (always positive) - # - def _getPairInfo(self,Topo): - pairInfo=[] - # - # Find coordinates of neighboring pairs - # first setup eah=home edit area - # - homeSite=self.getSiteID() - name="ISC_"+homeSite - eah=self.encodeEditArea(self.getEditArea(name)) - homeType=self.myOfficeType() - # - # Get grids with home edit area shifted 1 pixel - # each direction...and topo difference when shifted - # 1 pixel in each direction - # - xshift=( 1,-1, 0, 0) - yshift=( 0, 0, 1,-1) - homeshifts=[] - topodiffs=[] - for i in range(4): - homeshifts.append(self.offset(eah,xshift[i],yshift[i])) - topodiffs.append(Topo-self.offset(Topo,xshift[i],yshift[i])) - landGrid=self._getLandEditArea() - # - # Loop through other ISC_xxx edit areas - except the one - # for the home edit area - # - eanames=self.editAreaList() - - eaTime = time.time() - for eaname in eanames: - iterationTime = time.time() - if (len(eaname)==7)and(eaname[0:4]=="ISC_")and(eaname[4:]!=homeSite): - siteName=eaname[4:] - sType=self.officeType(siteName) - if sType is None: - continue - if sType==homeType: - ean=self.encodeEditArea(self.getEditArea(eaname)) - # - # Compare edit area to the shifted home edit areas - # looking for common points. Also check that topo - # difference is less than MAXTOPODIFF - # - pairs=[] - for k in range(4): - pair=numpy.logical_and(homeshifts[k],ean) - topodiff=topodiffs[k] - - for x in range(eah.shape[1]): - for y in range(eah.shape[0]): - if pair[y,x]==1: # common point - tdiff=numpy.abs(topodiff[y,x]) - homepointx=x-xshift[k] - homepointy=y-yshift[k] - land1=landGrid[homepointy,homepointx] - land2=landGrid[y,x] - coast=numpy.not_equal(land1,land2) # 0 if both land or both water - pairs.append((homepointx,homepointy,x,y,tdiff,coast)) - - if len(pairs)>0: - label=siteName - pairInfo.append((label,pairs)) - - - return pairInfo - #================================================================= - # _getCachedPairs - See if cached pairInfo structure is still - # valid. If so, return it, otherwise return - # None. - # - def _getCachedPairs(self, name, category, timeLimit): - try: - object = self.getObject(name+"_"+self._dbss.getSiteID(), category) - pairInfo, timeWritten, geoInfo = object - if timeLimit != 0 and time.time() - timeWritten > timeLimit: - return None #too old - - # validate geoinfo - geo = self._dbss.getSiteID() - if not geoInfo == geo: - return None #different geoinfo - return pairInfo - except: - return None - #================================================================= - # _cachePairs - save pairInfo structure data, along with time and - # grid location info to IFPS database - so it can be - # grabbed quickly later without re-calculating - # - def _cachePairs(self, name, pairInfo, category): - #geo = self._dbss.getParmManager().compositeGridLocation() - object = pairInfo, time.time(), self._dbss.getSiteID() - self.saveObject(name+"_"+self._dbss.getSiteID(), object, category) - #================================================================= - # _getLandEditArea - get Land editArea grid, calculating from - # cwa edit areas if not specified via - # configuration - # - # Returns grid of 0/1 for land. Everything not land is assumed - # to be water. - # - def _getLandEditArea(self): - # - # Get points that are land - either by the specified edit - # area in configuration, or by looking for ISC_xxx edit - # areas, and adding up all the corresponding xxx areas. - # - # This does NOT work if the default CWA edit areas named 'xxx' - # have been overridden with other names or different areas, or - # if new ISC_xxx edit areas have been added to the system and - # have a corresponding xxx edit area. - # - landGrid=None - if (self.LANDEDITAREA is not None): - landea=self.getEditArea(self.LANDEDITAREA) - if (landea is not None): - landGrid=self.encodeEditArea(landea) - if landGrid is None: - landGrid = self.empty(numpy.bool) - eanames=self.editAreaList() - for eaname in eanames: - if ((len(eaname)==7)and(eaname[0:4]=="ISC_")): - name=eaname[4:] - if name in eanames: - ea=self.getEditArea(name) - if ea is not None: - grid=self.encodeEditArea(ea) - landGrid |= grid - return landGrid - #======================================================================== - # _getThresholdInfo - return thresholdInfo structure for the - # specified parm. - # - def _getThresholdInfo(self,parmName): - thresholdInfo=self.DEFAULT_THRESHOLD - for (names,threshold) in self.THRESHOLDS: - if parmName in names: - thresholdInfo=threshold - break - return thresholdInfo - #================================================================= - # _getListing - get text detailing most recent checks - # - def _getListing(self): - return self.list - #================================================================= - # _checkParmBorders - check ISC borders for this parm - which may - # mean checking more than one grid (see the - # MultiParms configuration section) - # - def _checkParmBorders(self,WEname,GridTimeRange,listing=0): - timetext=self.makeTimeMsg(GridTimeRange) - if listing==1: - self.list="ISC Discrepancies Check for %s %s:\n\n"%(WEname,timetext) - # - # Get list of parms to check - which might be more than one - # - if (WEname in self.MultiParms.keys()): - parmlist=self.MultiParms[WEname] - else: - parmlist=(WEname,) - # - # Loop over each parm - put status messages for reading problems - # - totalviolate=0 - totalwarning=0 - totalchecked=0 - for parmname in parmlist: - results=self._checkGridBorders(parmname, GridTimeRange, listing=listing) - (status,numchecked,violate,warning)=results - if status==1: - msg="No %s GridInfo for %s"%(WEname,timetext) - self.statusBarMsg(msg,"A") - continue - if status==2: - msg="No %s ISC data for %s"%(WEname,timetext) - self.statusBarMsg(msg,"A") - continue - if status==3: - msg="No border checks for weather or discrete elements: %s"%WEname - self.statusBarMsg(msg,"A") - continue - totalviolate=totalviolate+violate - totalwarning=totalwarning+warning - totalchecked=totalchecked+numchecked - return(totalchecked,totalviolate,totalwarning) - #================================================================= - # _checkGridBorders - check Borders for a single grid (which - # might be a vector - so it might actually - # check two grids) - # - # returns (status,numchecked,violate,warning) - # - # if status is non-zero, then there was a problem reading the - # grid and the values are meaningless). numchecked is the total - # number of pairs checked - if zero - then no checks were done - # because no points met the conditions. That is different than - # no violations/warning when lots of points were checked. - # - def _checkGridBorders(self,parmName,GridTimeRange,listing=0): - # - # Get grid info - return status=1 if no gridInfo - # - try: - gridInfoList=self.getGridInfo(self.mutableID(), parmName, "SFC", - GridTimeRange) - except: - return(1,0,0,0) - # - # Return status=1 if gridInfo is empty - # - if (len(gridInfoList)<1): - return (1,0,0,0) - gridInfo=gridInfoList[0] - # - # get the ISC data - return status=2 if could not read ISC data - # - bits,isc=self._getBitsAndISC(parmName, gridInfo, GridTimeRange) - if ((bits is None) or (isc is None)): - return (2,0,0,0) - # - # Only know how to check for Scalar or Vector grids - # - WEtype=gridInfo.type() - if (not GridType.SCALAR.equals(WEtype)) and (not GridType.VECTOR.equals(WEtype)): - return (3,0,0,0) - - # - # If a vector - then check each part seperately - but the - # condition always depends on speed. For scalar grids the - # condition is always the same as the grid itself. - # - gridList=[] - if GridType.SCALAR.equals(WEtype): - mag=isc - gridList.append((parmName,mag,mag)) - else: - name1=parmName+"Spd" - (mag,direc)=isc - gridList.append((name1,mag,mag)) - name2=parmName+"Direc" - gridList.append((name2,direc,mag)) - # - # Loop over grids (usually one, but possibly two for vectors) - # - totalchecked=0 - totalviolate=0 - totalwarning=0 - for (pname,grid,condgrid) in gridList: - if self._debug>=10: - print "checking %s grid"%pname - if listing==1: - self.list=self.list+"For %s:\n"%pname - (numchecked,violate,warning)=self._checkAllBorders(pname,grid,condgrid,bits,listing=listing) - totalchecked=totalchecked+numchecked - totalviolate=totalviolate+violate - totalwarning=totalwarning+warning - if listing==1: - self.list=self.list+"\n" - return(0,totalchecked,totalviolate,totalwarning) - #================================================================= - # _checkAllBorders - check all borders for a single scalar grid - # that has a condition grid of congrid and bits - # indicates where ISC data was available. - # - # if listing=1, then adds text to self.list - # that shows some stats for each border - # - # returns: - # numchecked - total number of pairs checked - # over all borders - # numviolations - total number of borders that - # were in violation - # numwarnings - total number of borders that - # were in warning (violating but - # having less than MINPOINTS - # pairs) - # - def _checkAllBorders(self,parmName,grid,condgrid,bits,listing=0): - # - # get Threshold info for this parm - # - thresholdInfo=self._getThresholdInfo(parmName) - if self._debug>=10: - print "thresholdInfo=",thresholdInfo - (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo - # - # Loop over each neighbors border - # - violate=0 - warning=0 - totalchecks=0 - for (label,pairList) in self.pairInfo: - if self._debug>=5: - print "Checking borders with %s"%label - results=self._checkOneBorder(grid,condgrid,bits,pairList,thresholdInfo) - (returnvalue,totalnum,numchecked,numviolate, - avgbias,avgdiff,avgthresh)=results - if self._debug>=5: - print " totalpoints :%d"%totalnum - print " numberchecked:%d"%numchecked - print " numviolate :%d"%numviolate - if (numchecked>0): - print " bias :%f"%(avgbias) - print " diff :%f"%(avgdiff) - print " threshold :%f"%(avgthresh) - if listing==1: - self.list=self.list+" Avg Diff for %s is %7.2f (limit %7.2f) [%4d pairs - %4d failed] - "%(label,avgdiff,avgthresh,numchecked,numviolate) - if avgdiff>avgthresh: - if numcheckedconditionValue) - cond2=(value2>conditionValue) - elif conditionType=="greater_equal": - cond1=(value1>=conditionValue) - cond2=(value2>=conditionValue) - elif conditionType=="less": - cond1=(value1=10: - print "thresholdInfo=",thresholdInfo - (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo - # - # Loop over each neighbors border - # - for (label,pairList) in self.pairInfo: - if self._debug>=5: - print "Checking borders with %s"%label - for pair in pairList: - (x1,y1,x2,y2,topodiff,coast)=pair - if self._debug>=10: - print " point %3d,%3d-->%3d,%3d"%(x1,y1,x2,y2) - # - # get values across the border - # - value1=grid[y1,x1] - value2=grid[y2,x2] - # - # Get the difference across the border - # - diff=value1-value2 - if dirflag==1: - if diff>180.0: - diff=diff-360.0 - if diff<-180.0: - diff=diff+360.0 - absdiff=abs(diff) - # - # Get the threshold (which might depend on the values) - # - if thresholdType=="constant": - thresh=thresholdValues - elif thresholdType=="topo": - (elevation,low,high)=thresholdValues - if (topodiff<=elevation): - thresh=low - else: - thresh=high - elif thresholdType=="graduated": - minvalue=min(value1,value2) - thresh=thresholdValues[0] - for i in range(1,len(thresholdValues)): - (lessthan,newthresh)=thresholdValues[i] - if minvalue=10: - print " %f %f diff:%f threshold:%f itPasses:%1d"%(value1,value2,diff,thresh,itPasses) - # - # If too big a topodiff - or a coast - then it passes anyway - # - if ((topodiff>self.MAXTOPODIFF) or (coast==1)): - itPasses=1 - # - # If no ISC data - then it passes anyway - # available in ISC - # - if (bits[y2,x2]<0.5): - itPasses=1 - # - # Make sure conditions for checking are met - # - if (not self._meetConditions(conditions,x1,y1,x2,y2,condgrid)): - itPasses=1 - # - # Do nothing if this point passes and configuration - # indicates that nonViolators will not be displayed - # - if (maskNonViolators and itPasses): - continue - # - if absdiff>abs(diffGrid[y1,x1]): - diffGrid[y1,x1]=diff - return diffGrid - #================================================================= - # - # checkOneBorder - given a list of points in pairList, the threshold - # info, the grid and condition grid, and the bits - # that indicates where ISC data is available: - # then for each point: - # get the grid values, - # check that conditions are met, - # get difference, - # get threshold, - # check difference comapred to threshold and - # add to total differences, biases, etc. - # - # return: - # code = 0 if border passed (even if no points) - # 1 if border violated but had MINPOINTS - # or fewer pairs - # 2 if border violated and had more than - # MINPOINTS pairs - # totalnum = total number of pairs along the - # border - # numchecked = total pairs checked along the - # border after skipping ones without - # ISC data (bits=0), topo difference - # higher than limit, or condgrid - # values not meeting condition - # numviolate = number of individual pairs that - # violated their threshold - # avgbias = average difference among pairs - # avgdiff = average absolute difference among pairs - # avgthresh = average threshold along border - # - # - def _checkOneBorder(self,grid,condgrid,bits,pairList,thresholdInfo): - (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo - totalnum=len(pairList) - numchecked=0 - numviolate=0 - biastotal=0.0 - difftotal=0.0 - threshtotal=0.0 - - for pair in pairList: - (x1,y1,x2,y2,topodiff,coast)=pair - if self._debug>=10: - print " point %3d,%3d-->%3d,%3d"%(x1,y1,x2,y2) - # - # no tests if too big of topodiff, or if a coastline pair - # - if ((topodiff>self.MAXTOPODIFF) or (coast==1)): - if self._debug>=10: - print " skipped because of elevation or coastline" - continue - # - # no tests if bits of outside point indicate it is NOT - # available in ISC - # - if (bits[y2,x2]<0.5): - if self._debug>=10: - print " skipped because ISC data not available" - continue - # - # get values across the border - # - value1=grid[y1,x1] - value2=grid[y2,x2] - # - # Make sure conditions for checking are met - # - if (not self._meetConditions(conditions,x1,y1,x2,y2,condgrid)): - if self._debug>=10: - print " skipped because %f and %f did not meet the conditions:%s"%(value1,value2,conditions) - continue - numchecked=numchecked+1 - # - # Get the difference across the border - add it to the - # total difference (biastotal) and total of absolute - # value of differences (difftotal) - # - diff=value1-value2 - if dirflag==1: - if diff>180.0: - diff=diff-360.0 - if diff<-180.0: - diff=diff+360.0 - absdiff=abs(diff) - biastotal=biastotal+diff - difftotal=difftotal+absdiff - # - # Get the threshold (which might depend on the values) - # Add it to the total of thresholds - # - if thresholdType=="constant": - thresh=thresholdValues - elif thresholdType=="topo": - (elevation,low,high)=thresholdValues - if (topodiff<=elevation): - thresh=low - else: - thresh=high - elif thresholdType=="graduated": - minvalue=min(value1,value2) - thresh=thresholdValues[0] - for i in range(1,len(thresholdValues)): - (lessthan,newthresh)=thresholdValues[i] - if minvalue=10: - print " %f %f diff:%f threshold:%f itPasses:%1d"%(value1,value2,diff,thresh,itPasses) - # - # Calculate average bias, average abs difference, average threshold - # - if numchecked>0: - avgbias=biastotal/numchecked - avgdiff=difftotal/numchecked - avgthresh=threshtotal/numchecked - else: - avgbias=0.0 - avgdiff=0.0 - avgthresh=0.0 - # - # Setup return value (0 if OK, 1 if failed but only a few points, 2 if failed) - # - returnvalue=0 - if ((avgdiff>avgthresh) and (avgthresh>0.0)): - if numchecked>self.MINPOINTS: - returnvalue=2 - else: - returnvalue=1 - return(returnvalue,totalnum,numchecked,numviolate,avgbias,avgdiff,avgthresh) - #========================================================================= - # - # _getBitsAndISC - a routine to get the ISC composite - broken out - # from the neighboringPoints routine so that it need not be called - # several times when looping over different edit areas (for each - # of the neighboring CWAs). - # - def _getBitsAndISC(self,WEname,GridInfo,GridTimeRange): - isc = self._getBetterComposite(WEname, GridTimeRange) - if isc is None: - return None, None - # - # See if we are working with a Scalar or Vector element - # - wxType = GridInfo.type() - if GridType.SCALAR.equals(wxType): - bits, isc = isc - return bits,isc - elif GridType.VECTOR.equals(wxType): - bits, isc, direc = isc - return bits,(isc,direc) - else: - return None, None - #======================================================================== - # - # Essentially the same as the SmartScript getComposite routine - # but correctly handles multiple ISC grids within the timeRange - # of the grid you want. Can return None if no ISC or specified - # grids lie within the TimeRange specified. - # - # 2005-01-24 - Changed again because accumulative parms return - # different values from getComposite after IFPS16. - # - def _getBetterComposite(self,parmName, timeRange): - # - # Get the type, rateParm flag, and limits - # for the parameter name passed in. - # - mutableID=self.mutableID() - baseGrid=self.getGrids(mutableID,parmName,"SFC",timeRange,noDataError=0) - if baseGrid is None: - return None - gridInfoList=self.getGridInfo(mutableID,parmName,"SFC",timeRange) - if (len(gridInfoList)<1): - return None - for gridInfo in gridInfoList: - wxType=gridInfo.type() - rateParm=gridInfo.rateParm() - minlimit=gridInfo.minLimit() - maxlimit=gridInfo.maxLimit() - # - # Make sure ISC grids exist for this parm - # - parm=self.getParm("ISC",parmName,"SFC") - if parm is None: - return None - # - # Get list of all ISC time-blocks that fit in the - # timerange of the specified GridTimeRange grid - # - iscInfos=self.getGridInfo("ISC",parmName,"SFC",timeRange) - if (len(iscInfos)<1): - return None - alltrs=[] - for info in iscInfos: - tr=info.gridTime() - alltrs.append(tr) - # - # setup sum/counter for average - # - if ((parmName=="MaxT")or(parmName=="PoP")): - sum=self.newGrid(-150.0) - elif (parmName=="MinT"): - sum=self.newGrid(150.0) - else: - sum=self.empty() - if GridType.VECTOR.equals(wxType): - sumv=self.empty() - cnt = self.empty() - # - # foreach time range...get the ISC composite for - # that hour - # - for tr in alltrs: - comp=self.getComposite(parmName,tr) - if comp[0].shape!=sum.shape: - continue - # - # Add to sums, or min/max - # - if GridType.SCALAR.equals(wxType): - bits,isc=comp - #isc=self.getGrids("ISC",parmName,"SFC",tr) - if parmName in ["MaxT", "PoP"]: - sum[bits] = numpy.maximum(isc,sum)[bits] - cnt[bits] = 1 - elif parmName=="MinT": - sum[bits] = numpy.minimum(isc,sum)[bits] - cnt[bits] = 1 - else: - sum[bits] += isc[bits] - cnt[bits] += 1 - if GridType.VECTOR.equals(wxType): - bits,mag,direc = comp - #(mag,direc)=self.getGrids("ISC",parmName,"SFC",tr) - (u,v)=self.MagDirToUV(mag,direc) - sum[bits] += u[bits] - sumv[bits] += v[bits] - cnt[bits] += 1 - if GridType.WEATHER.equals(wxType): - bits = comp - bits,keys,strings=comp - #(keys,strings)=self.getGrids("ISC",parmName,"SFC",tr) - # - # now calculate average/max/min, etc. - # (count is always 1 for max/min) - # - noISC=numpy.less(cnt,0.5) - bits=numpy.greater(cnt,0.5) - if GridType.SCALAR.equals(wxType) or GridType.VECTOR.equals(wxType): - cnt[numpy.less(cnt, 1)] = 1 - if GridType.VECTOR.equals(wxType): - sum /= cnt - sum[noISC]= minlimit - - sumv /= cnt - sumv[noISC] = minlimit - - (mag,direc)=self.UVToMagDir(sum,sumv) - (baseMag,baseDir)=baseGrid - mag[noISC] = baseMag[noISC] - direc[noISC] = baseDir[noISC] - return bits,mag,direc - else: - sum /= cnt - sum[noISC] = baseGrid[noISC] - return bits,sum - else: - return bits,keys,strings - #================================================================= - # makeTimeMsg - Make short string with time of this grid, usually - # something like "Mon (6/5) 12-18Z:", but can get - # complicated like "Mon (6/5) 18Z - Tue (6/6) 03Z:" - # - def makeTimeMsg(self,tr): - DAYS=("Monday","Tuesday","Wednesday","Thursday", - "Friday","Saturday","Sunday") - swdy=DAYS[time.gmtime(tr.startTime().unixTime())[6]] - sday=tr.startTime().day - smon=tr.startTime().month - sdate="%s (%d/%d)"%(swdy,smon,sday) - shou=tr.startTime().hour - ewdy=DAYS[time.gmtime(tr.endTime().unixTime())[6]] - eday=tr.endTime().day - emon=tr.endTime().month - edate="%s (%d/%d)"%(ewdy,emon,eday) - ehou=tr.endTime().hour - if (sdate==edate): - msg="%s %2.2d-%2.2dZ:"%(sdate,shou,ehou) - else: - msg="%s %2.2dZ - %s %2.2dZ:"%(sdate,shou,edate,ehou) - return msg - #================================================================ - # _getElementList - get sorted list of currently displayed mutable - # model elements (by default it excludes nonSCALAR and non- - # VECTOR elements - but you can include them with the flag) - # - def _getElementList(self,excludeWxDiscrete=1): - mutableModel=self.mutableID().modelName() - parmList = self._dbss.getParmManager().getDisplayedParms() - elementList = [] - for parm in parmList: - name = parm.expressionName() - model = parm.getParmID().getDbId().getModelName() - if model == mutableModel: - wxType = parm.getGridInfo().getGridType() - if ((excludeWxDiscrete == 1) and (GridType.SCALAR.equals(wxType) or \ - GridType.VECTOR.equals(wxType))): - elementList.append(parm.expressionName()) - elementList.sort() - return elementList - #================================================================ - # _getTimeRangeList - get list of all PublishTimes, - # plus "All Grids" and "Selected Time" added at the top of - # the list - # - def _getTimeRangeList(self): - trList = ["All Grids", "Selected Time"] - publishTimes = self.getConfigItem("PublishTimes",[]) - #inv = self._dbss.dataManager().selectTRMgr().inventory() - inv = self._dbss.getSelectTimeRangeManager().inventory() - for t in publishTimes: - if t in inv: - trList.append(t) - return trList - #================================================================ - # _convertTimeRange - given a timeRangeName - return the - # timeRange (including the bogus names "All Grids" and - # "Selected Time"). - # - def _convertTimeRange(self, trName): - if trName == "All Grids": - curTime = self.gmtime() - startHour = curTime[3] - timeRange = self.createTimeRange(startHour, 204, mode="Zulu") - #timeRange = TimeRange.allTimes() - # timeRange = AFPS.TimeRange(AFPS.AbsTime(0), - # AFPS.AbsTime_maxFutureTime()) - elif trName == "Selected Time": - selectedTime = self._dbss.getParmOp().getSelectionTimeRange() - if selectedTime is None: - return None - else: - tr = self._dbss.getParmOp().getSelectionTimeRange() - return TimeRange.TimeRange(tr.getStart(), tr.getEnd()) - else: - timeRange = self.getTimeRange(trName) - return timeRange - - def _getCachedGrid(self, name, category, timeLimit): - try: - object = self.getObject(name, category) - grid, timeWritten, geoInfo = object - if timeLimit != 0 and time.time() - timeWritten > timeLimit: - return None #too old - - # validate geoinfo - geo = self._dbss.getParmManager().compositeGridLocation() - if geoInfo != `geo`: - return None #different geoinfo - return grid - except: - return None - - def _cacheGrid(self, name, grid, category): - geo = self._dbss.getParmManager().compositeGridLocation() - object = grid, time.time(), `geo` - self.saveObject(name, object, category) - - #======================================================================== - # - # _checkViolate makes a mask of points INSIDE the SITE_EDITAREA that - # have a difference with a neighbors gridpoints (defined by areamask) - # that have a magnitude greater than threshold). Uses bits and criteria - # just like other calcuations - # - def _checkViolate(self, bits, criteria, areamask, discGrid, threshold): - violate = self.empty(bool) - for i in range(4): # range(8) to consider diagonal neighbors - # - # make sure data exists for both points - # - bitshift=self.offset(bits,self._ishift[i],self._jshift[i]) - exist=logical_and(bitshift,bits) - # - # Make sure at least one of the points meets the criteria - # - critshift=self.offset(criteria,self._ishift[i],self._jshift[i]) - meetcrit=logical_or(critshift,criteria) - # - # Make sure it borders the specified area - # - areashift=self.offset(areamask,self._ishift[i],self._jshift[i]) - onborder=logical_and(areashift,self._siteAreaMask) - # - # Make sure it meets all criteria: exist, meetcrit, onborder - # and checkNDFD (meets topo thresholds) - # - mask=logical_and(logical_and(logical_and(exist,meetcrit),onborder),less(self._topodiff[i],self.MAXTOPODIFF)) - # - violate[logical_and(mask, greater(abs(discGrid), threshold))] = True - return violate - - - #======================================================================== - # - # _nbmask - # - # Replicate the NDFD neighboring points algorithm. It checks 'border - # pairs' where one point is 'inside' the CWA and the immediate - # neighbor (east-west-north-south) is 'outside' the CWA. A point - # inside the CWA might be used in several 'border pairs' - since it - # might have a neighbor outside the CWA to both the north and the west, - # etc. - # - # A 'border pair' is compared if: - # . The elevation difference between the two points is less - # than 1000 feet. - # . Both points have ISC data available. - # . The point 'outside' must be inside the area specified by - # areamask (usually the mask of the CWA you are checking against - # - # After finding the differences for each of the remaining border pairs, - # It averages them, and returns the average border pair difference, the - # average elevation difference for the pairs, the overall average of the - # values, the minimum of the average of the values 'inside' or the values - # 'outside', and the number of pairs used in this average. - # - def _nbmask(self, bits, isc, criteria, areamask, dirtype=0): - totalpts=0 - totaldiff=0 - totaltopo=0 - totalvalue=0 - totalvaluein=0 - totalvalueout=0 - for i in range(4): # range(8) to consider diagonal neighbors - # - # make sure data exists for both points - # - bitshift=self.offset(bits,self._ishift[i],self._jshift[i]) - exist=logical_and(bitshift,bits) - # - # Make sure at least one of the points meets the criteria - # - critshift=self.offset(criteria,self._ishift[i],self._jshift[i]) - meetcrit=logical_or(critshift,criteria) - # - # Make sure it borders the specified area - # - areashift=self.offset(areamask,self._ishift[i],self._jshift[i]) - onborder=logical_and(areashift,self._siteAreaMask) - # - # Make sure it meets all criteria: exist, meetcrit, onborder - # and elevation difference less than MAXTOPODIFF (meets topo thresholds) - # - mask=logical_and(logical_and(logical_and(exist,meetcrit),onborder),less(self._topodiff[i],self.MAXTOPODIFF)) - # - # Shift the data the directions - # - shift=self.offset(isc,self._ishift[i],self._jshift[i]) - # - # Get the difference (different for directions) - # - if dirtype==1: - d1=self._dirdiff(isc,shift) - else: - d1=abs(isc-shift) - # - # Get number of points - # - pts=add.reduce(add.reduce(mask)) - # - # get sum of differences for each shift direction - # - diff=sum(compress(mask.flat,d1.flat)) - # - # get sum of topo differences - # - topo=sum(compress(mask.flat,self._topodiff[i].flat)) - # - # get sums for points inside/outside - # - totalout=sum(compress(mask.flat,shift.flat)) - totalin=sum(compress(mask.flat,isc.flat)) - totalpts=totalpts+pts - totaldiff=totaldiff+diff - totaltopo=totaltopo+topo - totalvalue=totalvalue+totalout+totalin - totalvaluein=totalvaluein+totalin - totalvalueout=totalvalueout+totalout - # - # calculate total average - # - if (totalpts>0): - avg=totaldiff/totalpts - topoavg=totaltopo/totalpts - avgvalue=totalvalue/(totalpts*2) - avgvaluein=totalvaluein/totalpts - avgvalueout=totalvalueout/totalpts - minavgvalue=min(avgvaluein,avgvalueout) - else: - avg=0 - topoavg=0 - avgvalue=0 - minavgvalue=0 - return(avg,topoavg,avgvalue,minavgvalue,totalpts) - - #======================================================================== - # - # Get the smallest direction difference between two directions - # - def _dirdiff(self,dir1,dir2): - diff=abs(dir1-dir2) - less1=less(dir1,dir2) - diff=where(greater(diff,180.0),where(less1,dir1+360.0-dir2,dir2+360.0-dir1),diff) - return diff - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ISC_Utility - Version 3.05 (Tim Barker - SOO Boise, ID) +# +# Supports new routines that mimic NDFD algorithms that were changed +# in late 2005 or early 2006. Algorithms now have potentially different +# thresholds for every 'border pair', based on topography, and the values +# of the grids themselves (i.e. large values of waveheight have more +# leniant' thresholds). Algorithms now consider a border as 'discrepant' +# if the average absolute difference along the border is larger than the +# average threshold along that border. Some tools/algorithms will also show +# which individual pairs violate their particular threshold. +# +# Author: barker +# 2008-11-19 - Barker - Version 3.05. Added code to check for 'office type' +# of editAreas, so that it checks only ISC_xxxx areas for wfos - +# not rfcs. Also added a check for ISC_xxxx editAreas without +# corresponding xxxx editArea. +# 2006-01-23 - Barker - Version 3.04. Add thresholds for PoP12hr, QPF12hr +# SnowAmt12hr (standard in ER), WindChill, HeatIndex (instead +# of AppT), and PoP6, PoP12 (common in SR). +# 2006-01-19 - Barker - Version 3.03. Another typo for non-square grids. +# 2006-01-17 - Barker - Version 3.02. Fix problem for non-square grids. +# 2006-01-13 - Barker - Version 3.01. Changed for new NDFD algorithm +# Thresholds now vary at each gridpoint - overall average +# difference along border must be less than average threshold +# along that border (a much better algorithm!). +# +# ---------------------------------------------------------------------------- + +## +# This is a base file that is not intended to be overridden. +## + +import numpy +import SmartScript +import time +import TimeRange +from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo +GridType = GridParmInfo.GridType + +class ISC_Utility(SmartScript.SmartScript): + def __init__(self, dbss, eaMgr, mdMode=None, toolType="numeric"): + SmartScript.SmartScript.__init__(self, dbss) + self.setToolType(toolType) + self._dbss = dbss + # self.setUp(eaMgr, mdMode, toolType) + self.configuration() + # + # Always check to see if BorderPairs are current + # + refresh=7200 # seconds between refresh of borders + self._debug=0 # set to 1 or 5 or 10 for increasing info + self.list="" + # + # get Border Pair info - either from cache, or by calculating + # + self.pairInfo=self._getCachedPairs("BorderPairs","ISCPairs",refresh) + if self.pairInfo is None: + if self._debug>=1: + self.statusBarMsg("Calculating Pairs","R") + self.pairInfo=self._getPairInfo(self.getTopo()) + if self._debug>=1: + self.statusBarMsg("Calculating Pairs Done","R") + self._cachePairs("BorderPairs",self.pairInfo,"ISCPairs") + else: + if self._debug>=1: + self.statusBarMsg("Pair info obtained from cache","R") + + + #------------------------------------------------------------------------- + # + # C O N F I G U R A T I O N S E C T I O N F O R A L L I S C TOOLS + # + # this function is intended to be overridden and sets up the default + # configuration for the set of ISC tools. Copy and place into the + # ISC_Utility_Local_New and modify as needed. + def configuration(self): + # + # points which have an elevation difference greater than this will NOT + # be considered in ISC statistics (in feet). NDFD sets this to 1000ft. + # + self.MAXTOPODIFF=1000.0 + # + # NDFD checks are not performed when one side of a border is a land + # point and the other side is an ocean point. To do this, an EditArea + # with land/sea points needs to be calculated. With LANDEDITAREA set + # to None - the code will calculate the land area by a 'union' of all + # points found in the CWA editareas named XXX, where the XXX values + # are taken from all the editareas name ISC_XXX. If you have not + # overridden the ISC_XXX editarea or XXX edit areas, then this will + # work fine. If you HAVE overridden these edit area - use the + # LANDEDITAREA to specify the name of an editarea that contains just + # land points (all others are assumed to be ocean points). + # + self.LANDEDITAREA=None # or string with name of EditArea containing land + # + #-------------------------------------------------------------------- + # These configuration items for Show_ISC_Info and Show_ISC_Highlights. + # + # If you want the check for a particular parm to ACTUALLY check other + # parms, then list them here. Vector parms need not be listed - but + # the threshold for Vector parms in GFE is assumed to be the threshold + # for the magnitude part - and the threshold for the direction part is + # hard-coded below + # + self.MultiParms={"MaxRH":("MinT","TdMrn","MaxRH"), + "MinRH":("MaxT","TdAft","MinRH"), + "RH": ("T","Td","RH"), + } + + # Minimum number of points along a border before it considers a + # failing average threshold "significant" (to get rid of short borders) + # + self.MINPOINTS=10 + # + #------------------------------------------------------------------ + # + # NDFD thresholds - should not need to be modified. + # + # Each entry in THRESHOLDS contains a tuple ( parmnames, thresholdinfo) + # parmnames can be a tuple with many parms listed that use the + # same threshold + # thresholdinfo contains (thresholdtype,thresholdvalues, + # conditions,dirflag) where: + # thresholdtype="contant","topo" or "graduated" + # thresholdvalues= + # for "constant" type: value + # + # differences greater than value are considered + # discrepant + # + # for "topo" type: (elev,lowvalue,highvalue) + # + # if the elevation difference between points is + # less than elev, then the lowvalue is used as + # the threshold value. Otherwise the highvalue + # is used for the threshold value + # + # for "graduated" type: (bigvalue,(lessthan,value),(lessthan,value),...) + # + # bigvalue is the default threshold value. However + # if the lowest of the two pair points is less than the + # 'lessthan', then that 'value' is used for the + # threshold instead. All 'lessthan' values are checked, + # so they should be listed in decreasing order. + # + self.DEFAULT_THRESHOLD=("constant",5,("none",0,0),0) + self.THRESHOLDS=[ + (("T","Td","MaxT","MinT","TdAft","TdMrn"), + ("topo",(500,5,7),("none",0,0),0)), + (("HeatIndex","WindChill"), + ("topo",(500,7,9),("none",0,0),0)), + (("PoP","PoP12","PoP6","PoP12hr","PoP6hr"), + ("constant",20,("none",0,0),0)), + (("WindSpd","TransWindSpd","WindGust"), + ("graduated",(15,(20,10)),("greater_equal",12,0),0)), + (("WindDirec","TransWindDirec"), + ("topo",(500,45,90),("greater_equal",12,1),1)), + (("Sky"), + ("topo",(500,25,35),("none",0,0),0)), + (("QPF","QPF6hr"), + ("graduated",(1.0,(3.0,0.5),(1.5,0.25)),("greater",0.25,0),0)), + (("SnowAmt","SnowAmt6hr"), + ("graduated",(6,(12,4),(6,2)),("greater",2,0),0)), + (("SnowLevel","FzLevel","MixHgt"), + ("constant",1000,("none",0,0),0)), + (("RH","MaxRH","MinRH"), + ("graduated",(25,(75,20),(50,15),(25,10)),("none",0,0),0)), + (("WaveHeight"), + ("graduated",(10,(36,9),(32,8),(28,7),(24,6),(20,5),(16,4),(12,3),(6,2)),("greater",0,1),0)), + (("CWR"), + ("constant",10,("none",0,0),0)), + (("Haines"), + ("constant",1,("none",0,0),0)), + ] +# --------- E N D C O N F I G U R A T I O N S E C T I O N ---------- + + + + #================================================================= + # _getPairInfo - calculate pairInfo data structure from edit areas + # + # each entry in pairInfo is a tuple with (label,pairlist) + # where label=string with CWA neighbor name + # pairlist=list of tuples + # where each entry in pairlist is a tuple with: (insidex,insidey,outsidex,outsidey,TopoDiff) + # where insidex,insidey = pair coordinates inside CWA + # outsidex,outsidey = pair coordinates outside CWA + # absTopoDiff = topography difference (always positive) + # + def _getPairInfo(self,Topo): + pairInfo=[] + # + # Find coordinates of neighboring pairs + # first setup eah=home edit area + # + homeSite=self.getSiteID() + name="ISC_"+homeSite + eah=self.encodeEditArea(self.getEditArea(name)) + homeType=self.myOfficeType() + # + # Get grids with home edit area shifted 1 pixel + # each direction...and topo difference when shifted + # 1 pixel in each direction + # + xshift=( 1,-1, 0, 0) + yshift=( 0, 0, 1,-1) + homeshifts=[] + topodiffs=[] + for i in range(4): + homeshifts.append(self.offset(eah,xshift[i],yshift[i])) + topodiffs.append(Topo-self.offset(Topo,xshift[i],yshift[i])) + landGrid=self._getLandEditArea() + # + # Loop through other ISC_xxx edit areas - except the one + # for the home edit area + # + eanames=self.editAreaList() + + eaTime = time.time() + for eaname in eanames: + iterationTime = time.time() + if (len(eaname)==7)and(eaname[0:4]=="ISC_")and(eaname[4:]!=homeSite): + siteName=eaname[4:] + sType=self.officeType(siteName) + if sType is None: + continue + if sType==homeType: + ean=self.encodeEditArea(self.getEditArea(eaname)) + # + # Compare edit area to the shifted home edit areas + # looking for common points. Also check that topo + # difference is less than MAXTOPODIFF + # + pairs=[] + for k in range(4): + pair=numpy.logical_and(homeshifts[k],ean) + topodiff=topodiffs[k] + + for x in range(eah.shape[1]): + for y in range(eah.shape[0]): + if pair[y,x]==1: # common point + tdiff=numpy.abs(topodiff[y,x]) + homepointx=x-xshift[k] + homepointy=y-yshift[k] + land1=landGrid[homepointy,homepointx] + land2=landGrid[y,x] + coast=numpy.not_equal(land1,land2) # 0 if both land or both water + pairs.append((homepointx,homepointy,x,y,tdiff,coast)) + + if len(pairs)>0: + label=siteName + pairInfo.append((label,pairs)) + + + return pairInfo + #================================================================= + # _getCachedPairs - See if cached pairInfo structure is still + # valid. If so, return it, otherwise return + # None. + # + def _getCachedPairs(self, name, category, timeLimit): + try: + object = self.getObject(name+"_"+self._dbss.getSiteID(), category) + pairInfo, timeWritten, geoInfo = object + if timeLimit != 0 and time.time() - timeWritten > timeLimit: + return None #too old + + # validate geoinfo + geo = self._dbss.getSiteID() + if not geoInfo == geo: + return None #different geoinfo + return pairInfo + except: + return None + #================================================================= + # _cachePairs - save pairInfo structure data, along with time and + # grid location info to IFPS database - so it can be + # grabbed quickly later without re-calculating + # + def _cachePairs(self, name, pairInfo, category): + #geo = self._dbss.getParmManager().compositeGridLocation() + object = pairInfo, time.time(), self._dbss.getSiteID() + self.saveObject(name+"_"+self._dbss.getSiteID(), object, category) + #================================================================= + # _getLandEditArea - get Land editArea grid, calculating from + # cwa edit areas if not specified via + # configuration + # + # Returns grid of 0/1 for land. Everything not land is assumed + # to be water. + # + def _getLandEditArea(self): + # + # Get points that are land - either by the specified edit + # area in configuration, or by looking for ISC_xxx edit + # areas, and adding up all the corresponding xxx areas. + # + # This does NOT work if the default CWA edit areas named 'xxx' + # have been overridden with other names or different areas, or + # if new ISC_xxx edit areas have been added to the system and + # have a corresponding xxx edit area. + # + landGrid=None + if (self.LANDEDITAREA is not None): + landea=self.getEditArea(self.LANDEDITAREA) + if (landea is not None): + landGrid=self.encodeEditArea(landea) + if landGrid is None: + landGrid = self.empty(numpy.bool) + eanames=self.editAreaList() + for eaname in eanames: + if ((len(eaname)==7)and(eaname[0:4]=="ISC_")): + name=eaname[4:] + if name in eanames: + ea=self.getEditArea(name) + if ea is not None: + grid=self.encodeEditArea(ea) + landGrid |= grid + return landGrid + #======================================================================== + # _getThresholdInfo - return thresholdInfo structure for the + # specified parm. + # + def _getThresholdInfo(self,parmName): + thresholdInfo=self.DEFAULT_THRESHOLD + for (names,threshold) in self.THRESHOLDS: + if parmName in names: + thresholdInfo=threshold + break + return thresholdInfo + #================================================================= + # _getListing - get text detailing most recent checks + # + def _getListing(self): + return self.list + #================================================================= + # _checkParmBorders - check ISC borders for this parm - which may + # mean checking more than one grid (see the + # MultiParms configuration section) + # + def _checkParmBorders(self,WEname,GridTimeRange,listing=0): + timetext=self.makeTimeMsg(GridTimeRange) + if listing==1: + self.list="ISC Discrepancies Check for %s %s:\n\n"%(WEname,timetext) + # + # Get list of parms to check - which might be more than one + # + if (WEname in list(self.MultiParms.keys())): + parmlist=self.MultiParms[WEname] + else: + parmlist=(WEname,) + # + # Loop over each parm - put status messages for reading problems + # + totalviolate=0 + totalwarning=0 + totalchecked=0 + for parmname in parmlist: + results=self._checkGridBorders(parmname, GridTimeRange, listing=listing) + (status,numchecked,violate,warning)=results + if status==1: + msg="No %s GridInfo for %s"%(WEname,timetext) + self.statusBarMsg(msg,"A") + continue + if status==2: + msg="No %s ISC data for %s"%(WEname,timetext) + self.statusBarMsg(msg,"A") + continue + if status==3: + msg="No border checks for weather or discrete elements: %s"%WEname + self.statusBarMsg(msg,"A") + continue + totalviolate=totalviolate+violate + totalwarning=totalwarning+warning + totalchecked=totalchecked+numchecked + return(totalchecked,totalviolate,totalwarning) + #================================================================= + # _checkGridBorders - check Borders for a single grid (which + # might be a vector - so it might actually + # check two grids) + # + # returns (status,numchecked,violate,warning) + # + # if status is non-zero, then there was a problem reading the + # grid and the values are meaningless). numchecked is the total + # number of pairs checked - if zero - then no checks were done + # because no points met the conditions. That is different than + # no violations/warning when lots of points were checked. + # + def _checkGridBorders(self,parmName,GridTimeRange,listing=0): + # + # Get grid info - return status=1 if no gridInfo + # + try: + gridInfoList=self.getGridInfo(self.mutableID(), parmName, "SFC", + GridTimeRange) + except: + return(1,0,0,0) + # + # Return status=1 if gridInfo is empty + # + if (len(gridInfoList)<1): + return (1,0,0,0) + gridInfo=gridInfoList[0] + # + # get the ISC data - return status=2 if could not read ISC data + # + bits,isc=self._getBitsAndISC(parmName, gridInfo, GridTimeRange) + if ((bits is None) or (isc is None)): + return (2,0,0,0) + # + # Only know how to check for Scalar or Vector grids + # + WEtype=gridInfo.type() + if (not GridType.SCALAR.equals(WEtype)) and (not GridType.VECTOR.equals(WEtype)): + return (3,0,0,0) + + # + # If a vector - then check each part seperately - but the + # condition always depends on speed. For scalar grids the + # condition is always the same as the grid itself. + # + gridList=[] + if GridType.SCALAR.equals(WEtype): + mag=isc + gridList.append((parmName,mag,mag)) + else: + name1=parmName+"Spd" + (mag,direc)=isc + gridList.append((name1,mag,mag)) + name2=parmName+"Direc" + gridList.append((name2,direc,mag)) + # + # Loop over grids (usually one, but possibly two for vectors) + # + totalchecked=0 + totalviolate=0 + totalwarning=0 + for (pname,grid,condgrid) in gridList: + if self._debug>=10: + print("checking %s grid"%pname) + if listing==1: + self.list=self.list+"For %s:\n"%pname + (numchecked,violate,warning)=self._checkAllBorders(pname,grid,condgrid,bits,listing=listing) + totalchecked=totalchecked+numchecked + totalviolate=totalviolate+violate + totalwarning=totalwarning+warning + if listing==1: + self.list=self.list+"\n" + return(0,totalchecked,totalviolate,totalwarning) + #================================================================= + # _checkAllBorders - check all borders for a single scalar grid + # that has a condition grid of congrid and bits + # indicates where ISC data was available. + # + # if listing=1, then adds text to self.list + # that shows some stats for each border + # + # returns: + # numchecked - total number of pairs checked + # over all borders + # numviolations - total number of borders that + # were in violation + # numwarnings - total number of borders that + # were in warning (violating but + # having less than MINPOINTS + # pairs) + # + def _checkAllBorders(self,parmName,grid,condgrid,bits,listing=0): + # + # get Threshold info for this parm + # + thresholdInfo=self._getThresholdInfo(parmName) + if self._debug>=10: + print("thresholdInfo=",thresholdInfo) + (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo + # + # Loop over each neighbors border + # + violate=0 + warning=0 + totalchecks=0 + for (label,pairList) in self.pairInfo: + if self._debug>=5: + print("Checking borders with %s"%label) + results=self._checkOneBorder(grid,condgrid,bits,pairList,thresholdInfo) + (returnvalue,totalnum,numchecked,numviolate, + avgbias,avgdiff,avgthresh)=results + if self._debug>=5: + print(" totalpoints :%d"%totalnum) + print(" numberchecked:%d"%numchecked) + print(" numviolate :%d"%numviolate) + if (numchecked>0): + print(" bias :%f"%(avgbias)) + print(" diff :%f"%(avgdiff)) + print(" threshold :%f"%(avgthresh)) + if listing==1: + self.list=self.list+" Avg Diff for %s is %7.2f (limit %7.2f) [%4d pairs - %4d failed] - "%(label,avgdiff,avgthresh,numchecked,numviolate) + if avgdiff>avgthresh: + if numcheckedconditionValue) + cond2=(value2>conditionValue) + elif conditionType=="greater_equal": + cond1=(value1>=conditionValue) + cond2=(value2>=conditionValue) + elif conditionType=="less": + cond1=(value1=10: + print("thresholdInfo=",thresholdInfo) + (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo + # + # Loop over each neighbors border + # + for (label,pairList) in self.pairInfo: + if self._debug>=5: + print("Checking borders with %s"%label) + for pair in pairList: + (x1,y1,x2,y2,topodiff,coast)=pair + if self._debug>=10: + print(" point %3d,%3d-->%3d,%3d"%(x1,y1,x2,y2)) + # + # get values across the border + # + value1=grid[y1,x1] + value2=grid[y2,x2] + # + # Get the difference across the border + # + diff=value1-value2 + if dirflag==1: + if diff>180.0: + diff=diff-360.0 + if diff<-180.0: + diff=diff+360.0 + absdiff=abs(diff) + # + # Get the threshold (which might depend on the values) + # + if thresholdType=="constant": + thresh=thresholdValues + elif thresholdType=="topo": + (elevation,low,high)=thresholdValues + if (topodiff<=elevation): + thresh=low + else: + thresh=high + elif thresholdType=="graduated": + minvalue=min(value1,value2) + thresh=thresholdValues[0] + for i in range(1,len(thresholdValues)): + (lessthan,newthresh)=thresholdValues[i] + if minvalue=10: + print(" %f %f diff:%f threshold:%f itPasses:%1d"%(value1,value2,diff,thresh,itPasses)) + # + # If too big a topodiff - or a coast - then it passes anyway + # + if ((topodiff>self.MAXTOPODIFF) or (coast==1)): + itPasses=1 + # + # If no ISC data - then it passes anyway + # available in ISC + # + if (bits[y2,x2]<0.5): + itPasses=1 + # + # Make sure conditions for checking are met + # + if (not self._meetConditions(conditions,x1,y1,x2,y2,condgrid)): + itPasses=1 + # + # Do nothing if this point passes and configuration + # indicates that nonViolators will not be displayed + # + if (maskNonViolators and itPasses): + continue + # + if absdiff>abs(diffGrid[y1,x1]): + diffGrid[y1,x1]=diff + return diffGrid + #================================================================= + # + # checkOneBorder - given a list of points in pairList, the threshold + # info, the grid and condition grid, and the bits + # that indicates where ISC data is available: + # then for each point: + # get the grid values, + # check that conditions are met, + # get difference, + # get threshold, + # check difference comapred to threshold and + # add to total differences, biases, etc. + # + # return: + # code = 0 if border passed (even if no points) + # 1 if border violated but had MINPOINTS + # or fewer pairs + # 2 if border violated and had more than + # MINPOINTS pairs + # totalnum = total number of pairs along the + # border + # numchecked = total pairs checked along the + # border after skipping ones without + # ISC data (bits=0), topo difference + # higher than limit, or condgrid + # values not meeting condition + # numviolate = number of individual pairs that + # violated their threshold + # avgbias = average difference among pairs + # avgdiff = average absolute difference among pairs + # avgthresh = average threshold along border + # + # + def _checkOneBorder(self,grid,condgrid,bits,pairList,thresholdInfo): + (thresholdType,thresholdValues,conditions,dirflag)=thresholdInfo + totalnum=len(pairList) + numchecked=0 + numviolate=0 + biastotal=0.0 + difftotal=0.0 + threshtotal=0.0 + + for pair in pairList: + (x1,y1,x2,y2,topodiff,coast)=pair + if self._debug>=10: + print(" point %3d,%3d-->%3d,%3d"%(x1,y1,x2,y2)) + # + # no tests if too big of topodiff, or if a coastline pair + # + if ((topodiff>self.MAXTOPODIFF) or (coast==1)): + if self._debug>=10: + print(" skipped because of elevation or coastline") + continue + # + # no tests if bits of outside point indicate it is NOT + # available in ISC + # + if (bits[y2,x2]<0.5): + if self._debug>=10: + print(" skipped because ISC data not available") + continue + # + # get values across the border + # + value1=grid[y1,x1] + value2=grid[y2,x2] + # + # Make sure conditions for checking are met + # + if (not self._meetConditions(conditions,x1,y1,x2,y2,condgrid)): + if self._debug>=10: + print(" skipped because %f and %f did not meet the conditions:%s"%(value1,value2,conditions)) + continue + numchecked=numchecked+1 + # + # Get the difference across the border - add it to the + # total difference (biastotal) and total of absolute + # value of differences (difftotal) + # + diff=value1-value2 + if dirflag==1: + if diff>180.0: + diff=diff-360.0 + if diff<-180.0: + diff=diff+360.0 + absdiff=abs(diff) + biastotal=biastotal+diff + difftotal=difftotal+absdiff + # + # Get the threshold (which might depend on the values) + # Add it to the total of thresholds + # + if thresholdType=="constant": + thresh=thresholdValues + elif thresholdType=="topo": + (elevation,low,high)=thresholdValues + if (topodiff<=elevation): + thresh=low + else: + thresh=high + elif thresholdType=="graduated": + minvalue=min(value1,value2) + thresh=thresholdValues[0] + for i in range(1,len(thresholdValues)): + (lessthan,newthresh)=thresholdValues[i] + if minvalue=10: + print(" %f %f diff:%f threshold:%f itPasses:%1d"%(value1,value2,diff,thresh,itPasses)) + # + # Calculate average bias, average abs difference, average threshold + # + if numchecked>0: + avgbias=biastotal/numchecked + avgdiff=difftotal/numchecked + avgthresh=threshtotal/numchecked + else: + avgbias=0.0 + avgdiff=0.0 + avgthresh=0.0 + # + # Setup return value (0 if OK, 1 if failed but only a few points, 2 if failed) + # + returnvalue=0 + if ((avgdiff>avgthresh) and (avgthresh>0.0)): + if numchecked>self.MINPOINTS: + returnvalue=2 + else: + returnvalue=1 + return(returnvalue,totalnum,numchecked,numviolate,avgbias,avgdiff,avgthresh) + #========================================================================= + # + # _getBitsAndISC - a routine to get the ISC composite - broken out + # from the neighboringPoints routine so that it need not be called + # several times when looping over different edit areas (for each + # of the neighboring CWAs). + # + def _getBitsAndISC(self,WEname,GridInfo,GridTimeRange): + isc = self._getBetterComposite(WEname, GridTimeRange) + if isc is None: + return None, None + # + # See if we are working with a Scalar or Vector element + # + wxType = GridInfo.type() + if GridType.SCALAR.equals(wxType): + bits, isc = isc + return bits,isc + elif GridType.VECTOR.equals(wxType): + bits, isc, direc = isc + return bits,(isc,direc) + else: + return None, None + #======================================================================== + # + # Essentially the same as the SmartScript getComposite routine + # but correctly handles multiple ISC grids within the timeRange + # of the grid you want. Can return None if no ISC or specified + # grids lie within the TimeRange specified. + # + # 2005-01-24 - Changed again because accumulative parms return + # different values from getComposite after IFPS16. + # + def _getBetterComposite(self,parmName, timeRange): + # + # Get the type, rateParm flag, and limits + # for the parameter name passed in. + # + mutableID=self.mutableID() + baseGrid=self.getGrids(mutableID,parmName,"SFC",timeRange,noDataError=0) + if baseGrid is None: + return None + gridInfoList=self.getGridInfo(mutableID,parmName,"SFC",timeRange) + if (len(gridInfoList)<1): + return None + for gridInfo in gridInfoList: + wxType=gridInfo.type() + rateParm=gridInfo.rateParm() + minlimit=gridInfo.minLimit() + maxlimit=gridInfo.maxLimit() + # + # Make sure ISC grids exist for this parm + # + parm=self.getParm("ISC",parmName,"SFC") + if parm is None: + return None + # + # Get list of all ISC time-blocks that fit in the + # timerange of the specified GridTimeRange grid + # + iscInfos=self.getGridInfo("ISC",parmName,"SFC",timeRange) + if (len(iscInfos)<1): + return None + alltrs=[] + for info in iscInfos: + tr=info.gridTime() + alltrs.append(tr) + # + # setup sum/counter for average + # + if ((parmName=="MaxT")or(parmName=="PoP")): + sum=self.newGrid(-150.0) + elif (parmName=="MinT"): + sum=self.newGrid(150.0) + else: + sum=self.empty() + if GridType.VECTOR.equals(wxType): + sumv=self.empty() + cnt = self.empty() + # + # foreach time range...get the ISC composite for + # that hour + # + for tr in alltrs: + comp=self.getComposite(parmName,tr) + if comp[0].shape!=sum.shape: + continue + # + # Add to sums, or min/max + # + if GridType.SCALAR.equals(wxType): + bits,isc=comp + #isc=self.getGrids("ISC",parmName,"SFC",tr) + if parmName in ["MaxT", "PoP"]: + sum[bits] = numpy.maximum(isc,sum)[bits] + cnt[bits] = 1 + elif parmName=="MinT": + sum[bits] = numpy.minimum(isc,sum)[bits] + cnt[bits] = 1 + else: + sum[bits] += isc[bits] + cnt[bits] += 1 + if GridType.VECTOR.equals(wxType): + bits,mag,direc = comp + #(mag,direc)=self.getGrids("ISC",parmName,"SFC",tr) + (u,v)=self.MagDirToUV(mag,direc) + sum[bits] += u[bits] + sumv[bits] += v[bits] + cnt[bits] += 1 + if GridType.WEATHER.equals(wxType): + bits = comp + bits,keys,strings=comp + #(keys,strings)=self.getGrids("ISC",parmName,"SFC",tr) + # + # now calculate average/max/min, etc. + # (count is always 1 for max/min) + # + noISC=numpy.less(cnt,0.5) + bits=numpy.greater(cnt,0.5) + if GridType.SCALAR.equals(wxType) or GridType.VECTOR.equals(wxType): + cnt[numpy.less(cnt, 1)] = 1 + if GridType.VECTOR.equals(wxType): + sum /= cnt + sum[noISC]= minlimit + + sumv /= cnt + sumv[noISC] = minlimit + + (mag,direc)=self.UVToMagDir(sum,sumv) + (baseMag,baseDir)=baseGrid + mag[noISC] = baseMag[noISC] + direc[noISC] = baseDir[noISC] + return bits,mag,direc + else: + sum /= cnt + sum[noISC] = baseGrid[noISC] + return bits,sum + else: + return bits,keys,strings + #================================================================= + # makeTimeMsg - Make short string with time of this grid, usually + # something like "Mon (6/5) 12-18Z:", but can get + # complicated like "Mon (6/5) 18Z - Tue (6/6) 03Z:" + # + def makeTimeMsg(self,tr): + DAYS=("Monday","Tuesday","Wednesday","Thursday", + "Friday","Saturday","Sunday") + swdy=DAYS[time.gmtime(tr.startTime().unixTime())[6]] + sday=tr.startTime().day + smon=tr.startTime().month + sdate="%s (%d/%d)"%(swdy,smon,sday) + shou=tr.startTime().hour + ewdy=DAYS[time.gmtime(tr.endTime().unixTime())[6]] + eday=tr.endTime().day + emon=tr.endTime().month + edate="%s (%d/%d)"%(ewdy,emon,eday) + ehou=tr.endTime().hour + if (sdate==edate): + msg="%s %2.2d-%2.2dZ:"%(sdate,shou,ehou) + else: + msg="%s %2.2dZ - %s %2.2dZ:"%(sdate,shou,edate,ehou) + return msg + #================================================================ + # _getElementList - get sorted list of currently displayed mutable + # model elements (by default it excludes nonSCALAR and non- + # VECTOR elements - but you can include them with the flag) + # + def _getElementList(self,excludeWxDiscrete=1): + mutableModel=self.mutableID().modelName() + parmList = self._dbss.getParmManager().getDisplayedParms() + elementList = [] + for parm in parmList: + name = parm.expressionName() + model = parm.getParmID().getDbId().getModelName() + if model == mutableModel: + wxType = parm.getGridInfo().getGridType() + if ((excludeWxDiscrete == 1) and (GridType.SCALAR.equals(wxType) or \ + GridType.VECTOR.equals(wxType))): + elementList.append(parm.expressionName()) + elementList.sort() + return elementList + #================================================================ + # _getTimeRangeList - get list of all PublishTimes, + # plus "All Grids" and "Selected Time" added at the top of + # the list + # + def _getTimeRangeList(self): + trList = ["All Grids", "Selected Time"] + publishTimes = self.getConfigItem("PublishTimes",[]) + #inv = self._dbss.dataManager().selectTRMgr().inventory() + inv = self._dbss.getSelectTimeRangeManager().inventory() + for t in publishTimes: + if t in inv: + trList.append(t) + return trList + #================================================================ + # _convertTimeRange - given a timeRangeName - return the + # timeRange (including the bogus names "All Grids" and + # "Selected Time"). + # + def _convertTimeRange(self, trName): + if trName == "All Grids": + curTime = self.gmtime() + startHour = curTime[3] + timeRange = self.createTimeRange(startHour, 204, mode="Zulu") + #timeRange = TimeRange.allTimes() + # timeRange = AFPS.TimeRange(AFPS.AbsTime(0), + # AFPS.AbsTime_maxFutureTime()) + elif trName == "Selected Time": + selectedTime = self._dbss.getParmOp().getSelectionTimeRange() + if selectedTime is None: + return None + else: + tr = self._dbss.getParmOp().getSelectionTimeRange() + return TimeRange.TimeRange(tr.getStart(), tr.getEnd()) + else: + timeRange = self.getTimeRange(trName) + return timeRange + + def _getCachedGrid(self, name, category, timeLimit): + try: + object = self.getObject(name, category) + grid, timeWritten, geoInfo = object + if timeLimit != 0 and time.time() - timeWritten > timeLimit: + return None #too old + + # validate geoinfo + geo = self._dbss.getParmManager().compositeGridLocation() + if geoInfo != repr(geo): + return None #different geoinfo + return grid + except: + return None + + def _cacheGrid(self, name, grid, category): + geo = self._dbss.getParmManager().compositeGridLocation() + object = grid, time.time(), repr(geo) + self.saveObject(name, object, category) + + #======================================================================== + # + # _checkViolate makes a mask of points INSIDE the SITE_EDITAREA that + # have a difference with a neighbors gridpoints (defined by areamask) + # that have a magnitude greater than threshold). Uses bits and criteria + # just like other calcuations + # + def _checkViolate(self, bits, criteria, areamask, discGrid, threshold): + violate = self.empty(bool) + for i in range(4): # range(8) to consider diagonal neighbors + # + # make sure data exists for both points + # + bitshift=self.offset(bits,self._ishift[i],self._jshift[i]) + exist=logical_and(bitshift,bits) + # + # Make sure at least one of the points meets the criteria + # + critshift=self.offset(criteria,self._ishift[i],self._jshift[i]) + meetcrit=logical_or(critshift,criteria) + # + # Make sure it borders the specified area + # + areashift=self.offset(areamask,self._ishift[i],self._jshift[i]) + onborder=logical_and(areashift,self._siteAreaMask) + # + # Make sure it meets all criteria: exist, meetcrit, onborder + # and checkNDFD (meets topo thresholds) + # + mask=logical_and(logical_and(logical_and(exist,meetcrit),onborder),less(self._topodiff[i],self.MAXTOPODIFF)) + # + violate[logical_and(mask, greater(abs(discGrid), threshold))] = True + return violate + + + #======================================================================== + # + # _nbmask + # + # Replicate the NDFD neighboring points algorithm. It checks 'border + # pairs' where one point is 'inside' the CWA and the immediate + # neighbor (east-west-north-south) is 'outside' the CWA. A point + # inside the CWA might be used in several 'border pairs' - since it + # might have a neighbor outside the CWA to both the north and the west, + # etc. + # + # A 'border pair' is compared if: + # . The elevation difference between the two points is less + # than 1000 feet. + # . Both points have ISC data available. + # . The point 'outside' must be inside the area specified by + # areamask (usually the mask of the CWA you are checking against + # + # After finding the differences for each of the remaining border pairs, + # It averages them, and returns the average border pair difference, the + # average elevation difference for the pairs, the overall average of the + # values, the minimum of the average of the values 'inside' or the values + # 'outside', and the number of pairs used in this average. + # + def _nbmask(self, bits, isc, criteria, areamask, dirtype=0): + totalpts=0 + totaldiff=0 + totaltopo=0 + totalvalue=0 + totalvaluein=0 + totalvalueout=0 + for i in range(4): # range(8) to consider diagonal neighbors + # + # make sure data exists for both points + # + bitshift=self.offset(bits,self._ishift[i],self._jshift[i]) + exist=logical_and(bitshift,bits) + # + # Make sure at least one of the points meets the criteria + # + critshift=self.offset(criteria,self._ishift[i],self._jshift[i]) + meetcrit=logical_or(critshift,criteria) + # + # Make sure it borders the specified area + # + areashift=self.offset(areamask,self._ishift[i],self._jshift[i]) + onborder=logical_and(areashift,self._siteAreaMask) + # + # Make sure it meets all criteria: exist, meetcrit, onborder + # and elevation difference less than MAXTOPODIFF (meets topo thresholds) + # + mask=logical_and(logical_and(logical_and(exist,meetcrit),onborder),less(self._topodiff[i],self.MAXTOPODIFF)) + # + # Shift the data the directions + # + shift=self.offset(isc,self._ishift[i],self._jshift[i]) + # + # Get the difference (different for directions) + # + if dirtype==1: + d1=self._dirdiff(isc,shift) + else: + d1=abs(isc-shift) + # + # Get number of points + # + pts=add.reduce(add.reduce(mask)) + # + # get sum of differences for each shift direction + # + diff=sum(compress(mask.flat,d1.flat)) + # + # get sum of topo differences + # + topo=sum(compress(mask.flat,self._topodiff[i].flat)) + # + # get sums for points inside/outside + # + totalout=sum(compress(mask.flat,shift.flat)) + totalin=sum(compress(mask.flat,isc.flat)) + totalpts=totalpts+pts + totaldiff=totaldiff+diff + totaltopo=totaltopo+topo + totalvalue=totalvalue+totalout+totalin + totalvaluein=totalvaluein+totalin + totalvalueout=totalvalueout+totalout + # + # calculate total average + # + if (totalpts>0): + avg=totaldiff/totalpts + topoavg=totaltopo/totalpts + avgvalue=totalvalue/(totalpts*2) + avgvaluein=totalvaluein/totalpts + avgvalueout=totalvalueout/totalpts + minavgvalue=min(avgvaluein,avgvalueout) + else: + avg=0 + topoavg=0 + avgvalue=0 + minavgvalue=0 + return(avg,topoavg,avgvalue,minavgvalue,totalpts) + + #======================================================================== + # + # Get the smallest direction difference between two directions + # + def _dirdiff(self,dir1,dir2): + diff=abs(dir1-dir2) + less1=less(dir1,dir2) + diff=where(greater(diff,180.0),where(less1,dir1+360.0-dir2,dir2+360.0-dir1),diff) + return diff + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/IToolInterface.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/IToolInterface.py index 02e1a9abd3..4f7a9b71a0 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/IToolInterface.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/IToolInterface.py @@ -1,85 +1,85 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# +# Globally import and sets up instances of the itool scripts. +# Designed to be used as a master controller for inspecting and running +# itools from Java. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 04/21/09 njensen Initial Creation. +# # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Globally import and sets up instances of the itool scripts. -# Designed to be used as a master controller for inspecting and running -# itools from Java. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 04/21/09 njensen Initial Creation. -# -# -# - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import sys -import MasterInterface -import Exceptions - -class IToolInterface(MasterInterface.MasterInterface): - - def __init__(self, scriptPath): - MasterInterface.MasterInterface.__init__(self) - self.importModules(scriptPath) - - - def getScripts(self, menu): - from java.util import HashSet - scriptList = HashSet() - for script in self.scripts: - scriptList.add(str(script)) - return scriptList - - def getStartupErrors(self): - from java.util import ArrayList - errorList = ArrayList() - for err in self.getImportErrors(): - errorList.add(str(err)) - return errorList - - def runITool(self, moduleName, className, methodName, **kwargs): - try: - print kwargs - return self.runMethod(moduleName, className, methodName, **kwargs) - except Exceptions.EditActionError, e: - msg = e.errorType() + ": " + e.errorInfo() - raise RuntimeError(msg) - - def getVariableList(self, name): - result = None - if hasattr(sys.modules[name], "VariableList"): - result = sys.modules[name].VariableList - return result - - def getVariableListInputs(self, name): - varList = self.getVariableList(name) - return self.runMethod(name, "ITool", "getVariableListInputs", VariableList=varList) - +# + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import sys +import MasterInterface +import Exceptions + +class IToolInterface(MasterInterface.MasterInterface): + + def __init__(self, scriptPath): + MasterInterface.MasterInterface.__init__(self) + self.importModules(scriptPath) + + + def getScripts(self, menu): + from java.util import HashSet + scriptList = HashSet() + for script in self.scripts: + scriptList.add(str(script)) + return scriptList + + def getStartupErrors(self): + from java.util import ArrayList + errorList = ArrayList() + for err in self.getImportErrors(): + errorList.add(str(err)) + return errorList + + def runITool(self, moduleName, className, methodName, **kwargs): + try: + print(kwargs) + return self.runMethod(moduleName, className, methodName, **kwargs) + except Exceptions.EditActionError as e: + msg = e.errorType() + ": " + e.errorInfo() + raise RuntimeError(msg) + + def getVariableList(self, name): + result = None + if hasattr(sys.modules[name], "VariableList"): + result = sys.modules[name].VariableList + return result + + def getVariableListInputs(self, name): + varList = self.getVariableList(name) + return self.runMethod(name, "ITool", "getVariableListInputs", VariableList=varList) + \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MakeHazardConfig.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MakeHazardConfig.py index 745a7a8a9c..f3f969a00e 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MakeHazardConfig.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MakeHazardConfig.py @@ -1,192 +1,192 @@ -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MakeHazard.py -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jul 10,2012 436 randerso Separated configuration data from the -# MakeHazard procedure -# Jul 29, 2015 17770 lshi Add tcmList template for WP basin -# Jun 23, 2017 6138 dgilling Changes for Winter Weather VTEC -# consolidation. -# -# Author: randerso -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -def sortHazardList(dict): - #sorts the entries in the menus in alphabetical order, returns sorted - #dictionary - import VTECTable - for ent in dict.keys(): - values = dict[ent] - # get the descriptive word for this phen/sig - items = [] - for v in values: - desc = VTECTable.VTECTable.get(v,'') - items.append((desc, v)) - items.sort() #sorts by description - #extract out the sorted phen/sig - phensig = [] - for desc, v in items: - phensig.append(v) - dict[ent] = phensig - - return dict - -# Lists of hazards organized by type in a dictionary -# Set these to value you use for your site. To minimize scrolling, -# change the order so that the most common values your site uses are -# near the front of each list. The key is the menu entry on the -# Make Hazard dialog, the values are the key values for Hazards. - -# Using OrderedDict allows you to control the order in which the -# Hazard Types are displayed in the dialog -# -from collections import OrderedDict -hazardDict = OrderedDict([ - ('Winter Weather', ["BZ.W", "IS.W", "LE.W", - "WC.Y", "WC.W", "WC.A", "WS.W", "WS.A", "WW.Y"]), - ('Hydrology', ["FF.A", "FA.A"]), - ('Fire Weather', ["FW.A", "FW.W"]), - ('Convective Watches', ["SV.A", "TO.A"]), - ('Coastal Flood', ["CF.S", "LS.S", "CF.Y", "CF.W", "CF.A", - "SU.Y", "SU.W", "LS.Y", "LS.W", "LS.A", "BH.S", "RP.S"]), - ('Non-Precipitation', ["AF.W", "AF.Y", "AQ.Y", "AS.O", "AS.Y", "DU.W", - "DU.Y", "EH.W", "EH.A", "EC.W", "EC.A", "FG.Y", "FZ.W", "FZ.A", - "HZ.W", "HZ.A", "ZF.Y", "FR.Y", "HT.Y", "HW.W", "HW.A", - "LW.Y", "SM.Y", "WI.Y"]), - ('Marine', ["MA.S", "MH.W", "MH.Y", "BW.Y", "UP.Y", "MF.Y", - "GL.A", "GL.W", "SE.A", "SE.W", "UP.A", "UP.W", "HF.A", "HF.W", "LO.Y", "SC.Y", "SW.Y", - "RB.Y", "SI.Y", "MS.Y", "SR.A", "SR.W"]), - ('Tropical Cyclone', ["HU.W", "HU.A", "TR.W", "TR.A"]), - ('Tsunami', ["TS.A", "TS.W", "TS.Y"]), - - # ('Local', ["TEST"]), #example of adding local hazards - # you can define your own groups of hazards by adding new categories - ]) - -# for GUM use comment out the above definition and uncomment the one below - -#hazardDict = OrderedDict([ -# ('Hydrology', ["FF.A", "FA.A"]), -# ('Fire Weather', ["FW.A", "FW.W"]), -# ('Coastal Flood', ["CF.S", "LS.S", "CF.Y", "CF.W", "CF.A", -# "SU.Y", "SU.W", "LS.Y", "LS.W", "LS.A", "RP.S", "BH.S"]), -# ('Non-Precipitation', ["AF.W", "AF.Y", "AQ.Y", "AS.O", "AS.Y", "DU.W", -# "DU.Y", "EH.W", "EH.A", "EC.W", "EC.A", "FG.Y", "FZ.W", "FZ.A", -# "HZ.W", "HZ.A", "ZF.Y", "FR.Y", "HT.Y", "HW.W", "HW.A", -# "LW.Y", "SM.Y", "WI.Y"]), -# ('Marine', ["MA.S", "MH.W", "MH.Y", "BW.Y", "UP.Y", "MF.Y", -# "GL.A", "GL.W", "SE.A", "SE.W", "UP.A", "UP.W", "HF.A", "HF.W", "LO.Y", "SC.Y", "SW.Y", -# "RB.Y", "SI.Y", "MS.Y", "SR.A", "SR.W"]), -# ('Typhoon', ["TY.A", "TY.W", "TR.A", "TR.W", "HU.S"]), -# ('Tsunami', ["TS.A", "TS.W"]), -# -# # ('Local', ["TEST"]), #example of adding local hazards -# # you can define your own groups of hazards by adding new categories -# ]) - - -# This function sorts the hazards in the hazardDict by description. -# Comment it out if this is not desired. -hazardDict = sortHazardList(hazardDict) - - # Dictionary of map categories and the map names. The "" is - # substituted with your site name. The names of the map must match - # those defined in the ifpServer. The keys in mapNames must match - # the keys in hazardDict. - -mapNames = { - 'Fire Weather' : ["FireWxZones_"], - 'Hydrology' : ["Zones_"], - 'Coastal Flood': ["Zones_"], - 'Convective Watches' : ["Marine_Zones_","FIPS_"], - 'Non-Precipitation' : ["Zones_"], - 'Tropical Cyclone' : ["Offshore_Marine_Zones_", - "Marine_Zones_","Zones_"], - 'Typhoon' : ["Offshore_Marine_Zones_", - "Marine_Zones_","Zones_"], - 'Tsunami' : ["Offshore_Marine_Zones_", - "Marine_Zones_","Zones_"], - 'Winter Weather' : ["Zones_"], - 'Marine' : ["Offshore_Marine_Zones_", - "Marine_Zones_"], - #'Local' : ["Zones_"], #example of adding local class - } - -# The defaultHazardType - selected when the tool is first run. This -# must be one of the categories (keys) in the mapNames and hazardDict. -defaultHazardType = "Non-Precipitation" - -# this is the color for the selected areas in the map -mapColor = "red" # color of selected areas - -# initial map width -defaultMapWidth = 400; - -# the percentage that an area must be covered to default to selected -areaThreshold = 0.10 - -# End time in hours of the time scales -timeScaleEndTime = 96 - -# Define the tropical product used to identify the particular storm -tcmList = [] # Comment out for HLS sites - -# Uncomment line below for Atlantic basin sites -#tcmList = ["TCMAT1", "TCMAT2", "TCMAT3", "TCMAT4", "TCMAT5"] - -# Uncomment line below for EPac basin sites -#tcmList = ["TCMEP1", "TCMEP2", "TCMEP3", "TCMEP4", "TCMEP5"] - -# Uncomment line below for CPac basin sites -#tcmList = ["TCMCP1", "TCMCP2", "TCMCP3", "TCMCP4", "TCMCP5"] - -# Uncomment line below for WPac basin sites -#tcmList = ["TCPPQ1", "TCPPQ2", "TCPPQ3", "TCPPQ4", "TCPPQ5"] - -# Dictionary mapping Hazard Types to applicable local effect areas -# that can be intersected with the zone edit areas. -# You should not define localEffectAreas entries for Tropical Cyclone -# or Convective Watches. -localEffectAreas = {} - -#localEffectAreas = { -# 'Winter Weather' : ["Below_1000","Below_1500","Below_2000","Below_2500","Below_3000","Below_3500","Below_4000", -# "Above_1000","Above_1500","Above_2000","Above_2500","Above_3000","Above_3500"], -# } - -# Dictionary associating local Effect Area names with a corresponding -# segment number, display name, and list of zones to be auto-selected -# If you do not wish to auto-select zones you should supply an empty list -# -# The display name allows you to display a "pretty" string in the UI rather -# than the edit area name. If the display name is empty ("") the edit area -# name will be used. -localAreaData = {} - -#localAreaData = { -# "Below_1000" : ( 999, "", []), -# "Below_1500" : (1499, "", []), -# "Below_2000" : (1999, "", []), -# "Below_2500" : (2499, "", []), -# "Below_3000" : (2999, "", []), -# "Below_3500" : (3499, "", []), -# "Below_4000" : (3999, "", []), -# "Above_1000" : (1000, "", []), -# "Above_1500" : (1500, "", []), -# "Above_2000" : (2000, "", []), -# "Above_2500" : (2500, "", []), -# "Above_3000" : (3000, "", []), -# "Above_3500" : (3500, "", []), -# } - +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MakeHazard.py +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jul 10,2012 436 randerso Separated configuration data from the +# MakeHazard procedure +# Jul 29, 2015 17770 lshi Add tcmList template for WP basin +# Jun 23, 2017 6138 dgilling Changes for Winter Weather VTEC +# consolidation. +# +# Author: randerso +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +def sortHazardList(dict): + #sorts the entries in the menus in alphabetical order, returns sorted + #dictionary + import VTECTable + for ent in list(dict.keys()): + values = dict[ent] + # get the descriptive word for this phen/sig + items = [] + for v in values: + desc = VTECTable.VTECTable.get(v,'') + items.append((desc, v)) + items.sort() #sorts by description + #extract out the sorted phen/sig + phensig = [] + for desc, v in items: + phensig.append(v) + dict[ent] = phensig + + return dict + +# Lists of hazards organized by type in a dictionary +# Set these to value you use for your site. To minimize scrolling, +# change the order so that the most common values your site uses are +# near the front of each list. The key is the menu entry on the +# Make Hazard dialog, the values are the key values for Hazards. + +# Using OrderedDict allows you to control the order in which the +# Hazard Types are displayed in the dialog +# +from collections import OrderedDict +hazardDict = OrderedDict([ + ('Winter Weather', ["BZ.W", "IS.W", "LE.W", + "WC.Y", "WC.W", "WC.A", "WS.W", "WS.A", "WW.Y"]), + ('Hydrology', ["FF.A", "FA.A"]), + ('Fire Weather', ["FW.A", "FW.W"]), + ('Convective Watches', ["SV.A", "TO.A"]), + ('Coastal Flood', ["CF.S", "LS.S", "CF.Y", "CF.W", "CF.A", + "SU.Y", "SU.W", "LS.Y", "LS.W", "LS.A", "BH.S", "RP.S"]), + ('Non-Precipitation', ["AF.W", "AF.Y", "AQ.Y", "AS.O", "AS.Y", "DU.W", + "DU.Y", "EH.W", "EH.A", "EC.W", "EC.A", "FG.Y", "FZ.W", "FZ.A", + "HZ.W", "HZ.A", "ZF.Y", "FR.Y", "HT.Y", "HW.W", "HW.A", + "LW.Y", "SM.Y", "WI.Y"]), + ('Marine', ["MA.S", "MH.W", "MH.Y", "BW.Y", "UP.Y", "MF.Y", + "GL.A", "GL.W", "SE.A", "SE.W", "UP.A", "UP.W", "HF.A", "HF.W", "LO.Y", "SC.Y", "SW.Y", + "RB.Y", "SI.Y", "MS.Y", "SR.A", "SR.W"]), + ('Tropical Cyclone', ["HU.W", "HU.A", "TR.W", "TR.A"]), + ('Tsunami', ["TS.A", "TS.W", "TS.Y"]), + + # ('Local', ["TEST"]), #example of adding local hazards + # you can define your own groups of hazards by adding new categories + ]) + +# for GUM use comment out the above definition and uncomment the one below + +#hazardDict = OrderedDict([ +# ('Hydrology', ["FF.A", "FA.A"]), +# ('Fire Weather', ["FW.A", "FW.W"]), +# ('Coastal Flood', ["CF.S", "LS.S", "CF.Y", "CF.W", "CF.A", +# "SU.Y", "SU.W", "LS.Y", "LS.W", "LS.A", "RP.S", "BH.S"]), +# ('Non-Precipitation', ["AF.W", "AF.Y", "AQ.Y", "AS.O", "AS.Y", "DU.W", +# "DU.Y", "EH.W", "EH.A", "EC.W", "EC.A", "FG.Y", "FZ.W", "FZ.A", +# "HZ.W", "HZ.A", "ZF.Y", "FR.Y", "HT.Y", "HW.W", "HW.A", +# "LW.Y", "SM.Y", "WI.Y"]), +# ('Marine', ["MA.S", "MH.W", "MH.Y", "BW.Y", "UP.Y", "MF.Y", +# "GL.A", "GL.W", "SE.A", "SE.W", "UP.A", "UP.W", "HF.A", "HF.W", "LO.Y", "SC.Y", "SW.Y", +# "RB.Y", "SI.Y", "MS.Y", "SR.A", "SR.W"]), +# ('Typhoon', ["TY.A", "TY.W", "TR.A", "TR.W", "HU.S"]), +# ('Tsunami', ["TS.A", "TS.W"]), +# +# # ('Local', ["TEST"]), #example of adding local hazards +# # you can define your own groups of hazards by adding new categories +# ]) + + +# This function sorts the hazards in the hazardDict by description. +# Comment it out if this is not desired. +hazardDict = sortHazardList(hazardDict) + + # Dictionary of map categories and the map names. The "" is + # substituted with your site name. The names of the map must match + # those defined in the ifpServer. The keys in mapNames must match + # the keys in hazardDict. + +mapNames = { + 'Fire Weather' : ["FireWxZones_"], + 'Hydrology' : ["Zones_"], + 'Coastal Flood': ["Zones_"], + 'Convective Watches' : ["Marine_Zones_","FIPS_"], + 'Non-Precipitation' : ["Zones_"], + 'Tropical Cyclone' : ["Offshore_Marine_Zones_", + "Marine_Zones_","Zones_"], + 'Typhoon' : ["Offshore_Marine_Zones_", + "Marine_Zones_","Zones_"], + 'Tsunami' : ["Offshore_Marine_Zones_", + "Marine_Zones_","Zones_"], + 'Winter Weather' : ["Zones_"], + 'Marine' : ["Offshore_Marine_Zones_", + "Marine_Zones_"], + #'Local' : ["Zones_"], #example of adding local class + } + +# The defaultHazardType - selected when the tool is first run. This +# must be one of the categories (keys) in the mapNames and hazardDict. +defaultHazardType = "Non-Precipitation" + +# this is the color for the selected areas in the map +mapColor = "red" # color of selected areas + +# initial map width +defaultMapWidth = 400; + +# the percentage that an area must be covered to default to selected +areaThreshold = 0.10 + +# End time in hours of the time scales +timeScaleEndTime = 96 + +# Define the tropical product used to identify the particular storm +tcmList = [] # Comment out for HLS sites + +# Uncomment line below for Atlantic basin sites +#tcmList = ["TCMAT1", "TCMAT2", "TCMAT3", "TCMAT4", "TCMAT5"] + +# Uncomment line below for EPac basin sites +#tcmList = ["TCMEP1", "TCMEP2", "TCMEP3", "TCMEP4", "TCMEP5"] + +# Uncomment line below for CPac basin sites +#tcmList = ["TCMCP1", "TCMCP2", "TCMCP3", "TCMCP4", "TCMCP5"] + +# Uncomment line below for WPac basin sites +#tcmList = ["TCPPQ1", "TCPPQ2", "TCPPQ3", "TCPPQ4", "TCPPQ5"] + +# Dictionary mapping Hazard Types to applicable local effect areas +# that can be intersected with the zone edit areas. +# You should not define localEffectAreas entries for Tropical Cyclone +# or Convective Watches. +localEffectAreas = {} + +#localEffectAreas = { +# 'Winter Weather' : ["Below_1000","Below_1500","Below_2000","Below_2500","Below_3000","Below_3500","Below_4000", +# "Above_1000","Above_1500","Above_2000","Above_2500","Above_3000","Above_3500"], +# } + +# Dictionary associating local Effect Area names with a corresponding +# segment number, display name, and list of zones to be auto-selected +# If you do not wish to auto-select zones you should supply an empty list +# +# The display name allows you to display a "pretty" string in the UI rather +# than the edit area name. If the display name is empty ("") the edit area +# name will be used. +localAreaData = {} + +#localAreaData = { +# "Below_1000" : ( 999, "", []), +# "Below_1500" : (1499, "", []), +# "Below_2000" : (1999, "", []), +# "Below_2500" : (2499, "", []), +# "Below_3000" : (2999, "", []), +# "Below_3500" : (3499, "", []), +# "Below_4000" : (3999, "", []), +# "Above_1000" : (1000, "", []), +# "Above_1500" : (1500, "", []), +# "Above_2000" : (2000, "", []), +# "Above_2500" : (2500, "", []), +# "Above_3000" : (3000, "", []), +# "Above_3500" : (3500, "", []), +# } + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MyDialog.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MyDialog.py index 16d6bd76e1..13a939563f 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MyDialog.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/MyDialog.py @@ -1,132 +1,132 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# MyDialog.py -# Template class for creating a message or dialog -# -# Author: hansen -# ---------------------------------------------------------------------------- - -import Tkinter - -# To use this dialog: -# -- Create "MyDialog" as a Utility in the Edit Actions Dialog -# Utilities window. -# -- At the beginning of your file, include: -# import MyDialog -# -- In your procedure or smart tool (preProcessTool section): -# dialog = MyDialog.MyDialog(None, "Title", "Message") -# -- When you want to close the dialog (e.g. at end of the procedure, -# or in postProcessTool): -# dialog.destroy() -# -# For example, here is a Procedure that displays an "In Progress" -# dialog: -# -## import MyDialog -## import SmartScript - -## class Procedure (SmartScript.SmartScript): -## def __init__(self, dbss): -## SmartScript.SmartScript.__init__(self, dbss) - -## def execute(self, editArea, timeRange, varDict): -## # Put up In Progress dialog -## dialog = MyDialog.MyDialog(None,"Status","Procedure in Progress") - -## self.copy(elements, 120, 240, 'MRF') -## self.copy(elements, 61, 120, 'AVN') -## self.copy(elements, begintime, 60, 'NAM') - -## # Destroy In Progress dialog -## dialog.top().destroy() - -class MyDialog(Tkinter.Tk): - - def __init__(self, parent, title, message, callback=None): - - Tkinter.Tk.__init__(self, parent) - - self.transient(parent) - self.__parent = parent - self.withdraw() - - # Create the dialog - self.__top = Tkinter.Toplevel(self.__parent) - self.__top.title(title) - - # Center the dialog on the screen - xoff = (self.__top.winfo_screenwidth() / 2) - (self.__top.winfo_reqwidth() / 2) - yoff = (self.__top.winfo_screenheight() / 2) - (self.__top.winfo_reqheight() / 2) - self.__top.wm_geometry("+%d+%d" % (xoff, yoff)) - - # Create Label text in the dialog - labelFont = "-b&h-helvetica-bold-i-*-18-*" - self.__l = Tkinter.Label(self.__top, text=message, font=labelFont) - self.__l.pack(padx=5) - - #Include these lines if you want your dialog to have - # an entry field and Ok button - #The callback (set up by your calling program) will be - # called when the OK button is clicked. It can be used to - # send the results of the user entries back to your procedure - # or smart tool. - -## self.__callback = callback -## self.__e = Tkinter.Entry(top) -## self.__e.pack(padx=5) -## b = Tkinter.Button(self.__top, text="OK", command=self.ok) -## b.pack(pady=5) - - self.protocol("WM_DELETE_WINDOW", self.closeCB) - - self.__top.update_idletasks() - - def ok(self): - print "value is", self.__e.get() - # Set up a variable dictionary and notify the - # the calling program of the results - if self.__callback is not None: - varDict = {} - varDict["Entry Value"] = self.__e.get() - self.__callback(varDict) - self.destroy() - - def closeCB(self): - self.destroy() - self.quit() - -def myCB(varDict): - entry = varDict["Entry Value"] - print "Entry value", entry - -# The following is executed when testing this dialog by entering -# the following from the command line: -# python MyDialog.py - -if __name__ == "__main__": - d = MyDialog(None, "Status","Procedure In Progress", callback=myCB) - x = input("Enter 0 to Close Dialog: ") - d.destroy() - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# MyDialog.py +# Template class for creating a message or dialog +# +# Author: hansen +# ---------------------------------------------------------------------------- + +import tkinter + +# To use this dialog: +# -- Create "MyDialog" as a Utility in the Edit Actions Dialog +# Utilities window. +# -- At the beginning of your file, include: +# import MyDialog +# -- In your procedure or smart tool (preProcessTool section): +# dialog = MyDialog.MyDialog(None, "Title", "Message") +# -- When you want to close the dialog (e.g. at end of the procedure, +# or in postProcessTool): +# dialog.destroy() +# +# For example, here is a Procedure that displays an "In Progress" +# dialog: +# +## import MyDialog +## import SmartScript + +## class Procedure (SmartScript.SmartScript): +## def __init__(self, dbss): +## SmartScript.SmartScript.__init__(self, dbss) + +## def execute(self, editArea, timeRange, varDict): +## # Put up In Progress dialog +## dialog = MyDialog.MyDialog(None,"Status","Procedure in Progress") + +## self.copy(elements, 120, 240, 'MRF') +## self.copy(elements, 61, 120, 'AVN') +## self.copy(elements, begintime, 60, 'NAM') + +## # Destroy In Progress dialog +## dialog.top().destroy() + +class MyDialog(tkinter.Tk): + + def __init__(self, parent, title, message, callback=None): + + tkinter.Tk.__init__(self, parent) + + self.transient(parent) + self.__parent = parent + self.withdraw() + + # Create the dialog + self.__top = tkinter.Toplevel(self.__parent) + self.__top.title(title) + + # Center the dialog on the screen + xoff = (self.__top.winfo_screenwidth() / 2) - (self.__top.winfo_reqwidth() / 2) + yoff = (self.__top.winfo_screenheight() / 2) - (self.__top.winfo_reqheight() / 2) + self.__top.wm_geometry("+%d+%d" % (xoff, yoff)) + + # Create Label text in the dialog + labelFont = "-b&h-helvetica-bold-i-*-18-*" + self.__l = tkinter.Label(self.__top, text=message, font=labelFont) + self.__l.pack(padx=5) + + #Include these lines if you want your dialog to have + # an entry field and Ok button + #The callback (set up by your calling program) will be + # called when the OK button is clicked. It can be used to + # send the results of the user entries back to your procedure + # or smart tool. + +## self.__callback = callback +## self.__e = Tkinter.Entry(top) +## self.__e.pack(padx=5) +## b = Tkinter.Button(self.__top, text="OK", command=self.ok) +## b.pack(pady=5) + + self.protocol("WM_DELETE_WINDOW", self.closeCB) + + self.__top.update_idletasks() + + def ok(self): + print("value is", self.__e.get()) + # Set up a variable dictionary and notify the + # the calling program of the results + if self.__callback is not None: + varDict = {} + varDict["Entry Value"] = self.__e.get() + self.__callback(varDict) + self.destroy() + + def closeCB(self): + self.destroy() + self.quit() + +def myCB(varDict): + entry = varDict["Entry Value"] + print("Entry value", entry) + +# The following is executed when testing this dialog by entering +# the following from the command line: +# python MyDialog.py + +if __name__ == "__main__": + d = MyDialog(None, "Status","Procedure In Progress", callback=myCB) + x = eval(input("Enter 0 to Close Dialog: ")) + d.destroy() + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py index 02561ffd06..f4835b2e5b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ObjAnal.py @@ -1,1154 +1,1154 @@ -# ---------------------------------------------------------------------------- -# SVN: $Revision: 134 $ $Date: 2010-08-26 17:32:30 +0000 (Thu, 26 Aug 2010) $ -# -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# ObjAnal - version 2.12 - various Objective Analysis routines -# -# Author: Tim Barker - SOO Boise, ID -# -# 2014/10/06 - Version 2.12. Fix typo with timetupe in logtime which handles -# when running in simulations. -# 2014/08/31 - Version 2.11. Get rid of debug print statement that shouldn't -# have been there in the first place. -# 2014/07/28 - Version 2.10. Fix issues when ActualElev=1 and landMask is -# used, and a control point near the edge of the landMask has -# an elevation that is wildly different than the grid elevation -# at at that location. Also introduce the concept of a 'missing' -# elevation value for the point obs. If the elevation is missing -# the code will use the grid elevation - regardless of the -# setting of ActualElev. Defaults to -500ft. Can be changed -# with new setMissingElevThreshold routine (but doubt anybody will) -# 2014/03/20 - Version 2.8. Better import of numpy. Used SmartScript for -# _gmtime instead of time module (for more effective playback) -# 2014/01/10 - Version 2.7. Fixed copy of self._empty -# 2013/12/03 - Version 2.6. Fixed a typo in the ActualElev code, and made -# using ActualElev the default. -# 2013/05/04 - Version 2.5. Tweaked the code a bit more when using Serp -# and actual_elev=1. Does a better job of estimating what -# the grid WOULD have at the ob elevation - by finding a best -# match among surrounding gridpoints, rather than a value -# vs. elevation regression. -# 2012/09/11 - Version 2.4. Fixed a bug where consecutive calls to Serp -# using different points incorrectly tried to use the cached -# point data the second time through - and could crash the -# calculations. -# 2012/08/15 - Version 2.3. Added configuration element to control size of -# cache for Serp distance grids. Trimmed memory usage in Serp -# a little more. Changed sense of Verbose logging. Changed to -# CamelCase for config parameters. -# 2012/06/02 - Version 2.2 - Added code to produce better analyses when -# using ActualElev=1. Now estimates what the grid "would" -# have at that elevation at that gridpoint. This makes the -# magnitude of changes needed much more reasonable. In Serp -# routine, a final step to match the point obs exactly was -# added at the end. Added some memory enhancements in Serp. -# 2011/03/11 - Handle AWIPS-2 vector grids now being LISTS, instead of Tuples. -# 2010/07/30 - AWIPS 2 port by Paul Jendrowski -# 2007/07/10 - Add code for Barnes and Inverse Distance Squared (most of -# the code came from Ken Pomeroy and Chris Gibson). -# 2007/06/17 - Add code for handling a land/sea mask. Essentially just -# makes gridpoints not on the same (land or sea) appear to -# be MUCH further apart. -# 2006/10/10 - Reduce memory in the Serp routines -# ---------------------------------------------------------------------------- - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import numpy as np -import SmartScript -import sys,types,math,os,gc -import numpy.linalg as LinearAlgebra - -class ObjAnal(SmartScript.SmartScript): - def __init__(self, dataMgr, mdMode=None, toolType="numeric"): - SmartScript.SmartScript.__init__(self,dataMgr) - self.verbose=0 - # - # speed up distance calculations with vectors of I/J coords - # - gridsize=self.getGridShape() - ij=np.indices(gridsize,dtype=np.float32) - i=ij[1] - self.Irow=i[0,:] - j=ij[0] - self.Jrow=j[:,0] - # - # Size of output grid is based on GFE gridsize - # - self.ymax=self.getGridShape()[0] - self.xmax=self.getGridShape()[1] - self.gridres=self.getGridSpacing() - # - # If ActualElev=1...then use the station elevation for elevation - # related calculations. - # otherwise.......use the elevation of the gridpoint that - # contains the station for elevation related - # calculations - # However...if the station elevation is lower than the missing - # elevation Threshold, then use the grid elevation - # even if ActualElev is equal to 1. - # - self.ActualElev=1 - self.MissingElevThreshold=-500 - # - # Default Serp parameters - # Cache (500 by default) (between 0 and 1000) amount of memory - # (in MB) allowed for saving distance grids between Serp - # calls. - # Elevfactor - the elevation factor used in the previous Serp - # analysis - # SerpXYgrids - the cache of distance grids saved between Serp - # runs - # - self.SerpLastPoints=0 - self.SerpCache=500 - self.SerpXYgrids={} - self.SerpElevfactor=-1.0 - # - # Default Barnes parameters - # Gamma (0.3 by default) (should be between 0.1 and 1.0) - # Spacing (calculated by default) wavelength below which - # data will be filtered. - # - self.BarnesGamma=0.3 - self.BarnesSpacing=-1 # negative value forces calculation - # - # Default DSquared parameters - # Dist --- minimum radius around a gridpoint to search for - # station data to use in the weighted average - # MaxPoints - maximum number of stations to use in the - # weighted average for a gridpoint. - # - self.DSquaredDist=-1 - self.DSquaredMaxPoints=-1 - - return - - #--------------------------------------------------------------------------- - # ObjectiveAnalysis - perform an objective analysis of the point values, - # using the specified guess grid. If the guess grid is a vector type - # then both the point values and grids are handled differently. - # - def ObjectiveAnalysis(self,values,guessGrid,analysisType, - elevfactor=0.0,topoGrid=None,landMask=None): - self.logtime("Performing %s analysis"%analysisType,1) - self.logtime("Mem usage at start of ObjectiveAnalysis: %d"%memory(),5) - if topoGrid is None: - topoGrid=self.getTopo() - if landMask is None: - landMask=self.newGrid(True, bool) - values=self.removeDuplicates(values) - gridType=type(guessGrid) - if ((gridType is not types.TupleType)and(gridType is not types.ListType)): - new=self.ObjectiveAnalysisScalar(values,guessGrid,analysisType, - elevfactor,topoGrid, - landMask) - self.logtime("Mem usage at end of ObjectiveAnalysis: %d"%memory(),5) - return new - else: # vector - uvalues=[] - vvalues=[] - for i in range(len(values)): - (name,x,y,elev,spd,direc)=values[i] - (u,v)=self.MagDirToUV(spd,direc) - uvalues.append((name,x,y,elev,u)) - vvalues.append((name,x,y,elev,v)) - (spdgrid,dirgrid)=guessGrid - (uguess,vguess)=self.MagDirToUV(spdgrid,dirgrid) - # - unew=self.ObjectiveAnalysisScalar(uvalues,uguess,analysisType, - elevfactor,topoGrid, - landMask=landMask) - vnew=self.ObjectiveAnalysisScalar(vvalues,vguess,analysisType, - elevfactor,topoGrid, - landMask) - (newspd,newdir)=self.UVToMagDir(unew,vnew) - self.logtime("Mem usage at end of ObjectiveAnalysis (vector): %d"%memory(),5) - self.logtime("%s analysis complete"%analysisType,1) - return(newspd,newdir) - #--------------------------------------------------------------------------- - # ObjectiveAnalysisScalar - perform an objective analysis of the point - # values, using the specified guess grid. Point values are a list of - # tuples. Each tuple contains: name,x,y,elev,val - # - def ObjectiveAnalysisScalar(self,values,guessGrid,analysisType, - elevfactor,topoGrid,landMask=None): - self.logtime("Mem usage at start of ObjectiveAnalysisScalar: %d"%memory(),5) - # - # Make lists of x,y,h,value-guess - and get rid of points - # that are off the grid - # - xloclist=[] - yloclist=[] - hloclist=[] - zlist=[] - if landMask is None: - newlandMask=self.newGrid(True, bool) - else: - newLandMask=landMask - self.logtime("Point values used in analysis:",4) - for i in range(len(values)): - (name,x,y,elev,val)=values[i] - if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): - continue - # - # If the ob point elevation is missing - always use - # the gridpoint elevation - # - if elev>self.MissingElevThreshold: - hloclist.append(elev) - else: - hloclist.append(topoGrid[y,x]) - xloclist.append(x) - yloclist.append(y) - # - # If using the grid elevation at the point, then the - # z value (change)is simply the observed value minus the guess - # grid value. - # - if self.ActualElev!=1: - self.logtime(" %12s %3d,%3d %5d Val:%5.1f -- grid:%5.1f -- change:%5.1f"%(name,x,y,elev,val,guessGrid[y,x],val-guessGrid[y,x]),4) - zlist.append(val-guessGrid[y,x]) - - # - # If using actual elevations - then need to make the z value the - # difference between what the guess grid WOULD have at the ob elevation - # rather than the guess grid value itself. Searches outward until - # it finds a guess grid point with an elevation less than 100 feet - # from the ob's elevation. - # - else: - pt=topoGrid[y,x] - obLandMask=newLandMask[y,x] - desiredDiff=100 - bestval=guessGrid[y,x] - if elev>self.MissingElevThreshold: - bestdif=abs(elev-pt) - else: - bestdif=0 - bestele=pt - wid=1 - # - # Spiral out from the point - looking for nearby gridpoints - # that are closer to the actual observation elevation - # than the gridpoint elevation. When we find one within - # 100ft of the observation - stop searching and use the - # grid value at that point to determine how much we need - # to change the grid at the observation gridpoint. - # - while ((bestdif>desiredDiff)and(wid<10)): - #print " searching with wid=%d"%wid - if ((y+wid)=0): - for ii in range(max(0,x-wid),min(x+wid+1,self.xmax)): - if obLandMask==newLandMask[y-wid,ii]: - gelev=topoGrid[y-wid,ii] - dif=abs(elev-gelev) - if dif=0): - for jj in range(max(0,y-wid),min(y+wid+1,self.ymax)): - if obLandMask==newLandMask[jj,x-wid]: - gelev=topoGrid[jj,x-wid] - dif=abs(elev-gelev) - if difdesiredDiff: - wid+=1 - estval=bestval - self.logtime(" %12s %3d,%3d, est at %5d:%5.1f --- grid at %5d:%5.1f --- (%5d diff) -- Val:%5.1f -- Change:%5.1f"%(name,x,y,elev,estval,pt,guessGrid[y,x],pt-elev,val,val-estval),4) - zlist.append(val-estval) - # - # Do the requested analysis - # - if analysisType=="serp": - zval=self.Serp(zlist,xloclist,yloclist,hloclist,elevfactor, - topoGrid,landMask=landMask) - finalGrid=(guessGrid+zval).astype(np.float32) - if self.ActualElev==1: - for i in range(len(values)): - (name,x,y,elev,val)=values[i] - if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): - continue - finalGrid[y,x]=val - elif analysisType=="barnes": - zval=self.Barnes(zlist,xloclist,yloclist,hloclist,elevfactor, - topoGrid,landMask=landMask) - finalGrid=(guessGrid+zval).astype(np.float32) - elif analysisType=="dsquared": - zval=self.Dsquared(zlist,xloclist,yloclist,hloclist,elevfactor, - topoGrid,landMask=landMask) - finalGrid=(guessGrid+zval).astype(np.float32) - else: - self.logtime("Unknown analysisType:%s"%analysisType) - zval=self.empty() - finalGrid=(guessGrid+zval).astype(np.float32) - self.logtime("Mem usage at end of ObjectiveAnalysisScalar: %d"%memory(),5) - return finalGrid - #--------------------------------------------------------------------------- - # removeDuplicates(stationlist) - find any stations in the same x,y gridbox - # and average the data for those stations, returning a new stationlist. - # The stationlist is a list of tuples. For vectors the tuples have 6 - # values: name,x,y,elev,speed,direc For scalars the tuples have 5 - # values: name,x,y,elev,value - # - def removeDuplicates(self,values): - if len(values)<1: - return values - test=values[0] - numpieces=len(test) - if len(test)==6: - type="VECTOR" - elif len(test)==5: - type="SCALAR" - else: - return values - # - newvalues=[] - hash={} - for stn in values: - x=stn[1] - y=stn[2] - key="%4.4d%4.4d"%(x,y) - if key in hash: - list=hash[key] - list.append(stn) - hash[key]=list - else: - list=[] - list.append(stn) - hash[key]=list - - hkeys=hash.keys() - hkeys.sort() - for key in hkeys: - stnlist=hash[key] - if (len(stnlist)==1): - newvalues.append(stnlist[0]) - else: - valsum=0 - usum=0 - vsum=0 - valnum=0 - avgnames="" - for stn in stnlist: - if type=="VECTOR": - (name,x,y,elev,spd,direc)=stn - (u,v)=self.MagDirToUV(spd,direc) - usum=usum+u - vsum=vsum+v - else: - (name,x,y,elev,val)=stn - valsum=valsum+val - valnum=valnum+1 - avgnames=avgnames+name+"+" - avgname=avgnames[:-1] - if type=="VECTOR": - uavg=float(usum)/float(valnum) - vavg=float(vsum)/float(valnum) - (spd,direc)=self.UVToMagDir(uavg,vavg) - stn=(avgname,x,y,elev,spd,direc) - else: - valavg=int(float(valsum)/float(valnum)) - stn=(avgname,x,y,elev,valavg) - newvalues.append(stn) - return newvalues - #--------------------------------------------------------------------------- - # Serp - Given a list of values (zlist) at points (xlist, ylist, hlist - # lists) and topography weighting factor (elevfactor) calculate a grid - # that fits the values exactly, using a curve-fitting algorithm using - # 'serpentine' curves. - # - # To save time, this routine carefully checks to see if it has been - # recently called with the same set of gridpoint locations and - # elevation factor - and then skips all the calculations based on - # location - and only applies the code based on the zlist values. - # - def Serp(self,zlist,xlist,ylist,hlist,elevfactor,Topo,landMask=None): - # - # Check for case of cbig array being bigger than 2GB. If so, - # likely to have memory problems. Thus, write an error message - # and return with no change. - # - mem=((self.xmax*self.ymax)*len(zlist))*8 - self.logtime("Serp memory usage estimate: %d"%mem,5) - if mem>2147000000: - self.logtime(" Combination of size of grid (%d x %d) and"%(self.xmax,self.ymax)) - self.logtime(" number of control points (%d) will take up too"%len(zlist)) - self.logtime(" much memory for Serp. Either use smaller grid, fewer") - self.logtime(" control points, or use a different analysis scheme") - chg=Topo*0.0 - return chg - self.logtime("Mem usage at start of serp: %d"%memory(),5) - # - # Determine if we need to do setup again - # first are the number of points different - # second is the elevation factor different - # third (if still OK) check that each point is in the - # distance arrays Disq - # - setup=0 - if (len(xlist)!=self.SerpLastPoints): - setup=1 - if (elevfactor!=self.SerpElevfactor): - setup=1 - if (setup==0): - for i in range(len(xlist)): - x=xlist[i] - y=ylist[i] - xy=(y*self.xmax)+x - if (xy not in self.SerpXYgrids): - setup=1 - break - # - # Now we know if we need to do the setup stuff again - # - if (setup==0): - self.logtime("Skipping SerpSetup - same points",2) - else: - self.logtime("Running SerpSetup",2) - if elevfactor!=self.SerpElevfactor: - self.SerpXYgrids={} - self.SerpElevfactor=elevfactor - # - (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, - ylist,hlist,elevfactor,Topo,landMask) - # - # - # - totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) - totDist=np.sqrt(totDistSquared) - # - newsize=(numpts,self.ymax,self.xmax) - # - # Get the "remoteness" values which modify the weights - # - self.logtime("Calculating Remoteness",3) - rem=self.getSerpRemoteness(totDist) - # - # For each control point, get the distance to the - # next nearest control point - # - self.logtime("Calculating MinDist",3) - dmin=self.getMinDist(totDist) - dmin2=np.square(dmin) - del dmin - del totDist - # - # make a new total distance - # - self.SerpDisq=np.zeros(newsize,np.float32) - # - # zero out the avary-array, which varies for every control point - # - avary=np.zeros((numpts,numpts),np.float32) - # - # Get maximum number of distance grids to save for quick - # recall (dont let it use more than SerpCache MB of space) - # - ngrid=self.xmax*self.ymax - maxsave=int((self.SerpCache*1000000)/(ngrid*8)) - self.logtime("calculated max points to save:%d"%maxsave,4) - # - # Get the factor that relates every control point to - # every gridpoint, as well as the sum of those factors - # - self.logtime("Calculating SerpDisq",3) - newcount=0 - dcount=0 - for k in range(numpts): - x=int(xarr[k]) - y=int(yarr[k]) - avary[k]=dmin2[k] - xy=(y*self.xmax)+x - - if xy in self.SerpXYgrids: - tempdist=self.SerpXYgrids[xy] - else: - newcount=newcount+1 - xs=np.square(self.Irow-x) - ys=np.square(self.Jrow-y) - b=np.add.outer(ys,xs) - if self.ActualElev==0: - elev=scaledtopo[y,x] - else: - elev=harr[k] - ed=scaledtopo-elev - land=newlandMask[y,x] - ld=np.square(land-newlandMask) - ed=ed+(ld*10000.0) - tempdist=b+np.square(ed) - if (len(self.SerpXYgrids)=10: - self.logtime("Points saved so far:%d"%len(self.SerpXYgrids),4) - self.logtime("Mem used so far:%d"%memory(),5) - dcount=0 - self.SerpDisq[k]=(rem[k]/(tempdist+dmin2[k])).astype(np.float32) - self.logtime("Mem after all points in:%d"%memory(),5) - self.SerpDsum=np.add.reduce(self.SerpDisq) - # - # The coefficients for each control point - # - rej=np.transpose(np.resize(rem,(numpts,numpts))) - SerpWeights=rej/(totDistSquared+avary) - del rej - del rem - del totDistSquared - del avary - self.SerpWsum=np.add.reduce(SerpWeights) - # - # Solve Matrix of weights - # - self.SerpCc=LinearAlgebra.inv(SerpWeights).astype(np.float32) - # - # Free up some memory - # - del SerpWeights - self.logtime("Mem before serp setup gc.collect: %d"%memory(),5) - gc.collect() - self.SerpLastPoints=numpts - self.logtime("Mem after serp setup: %d"%memory(),5) - # - # Now do the Serp calculations - # - self.logtime("Running Serp calculations",2) - numpts=len(zlist) - zarr=np.array(zlist,np.float32) - # - # - # - nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) - zarr[nearzero] = 0.001 - del nearzero - zw=zarr*self.SerpWsum - del zarr - rjt=np.resize(zw,(numpts,numpts)) - del zw - rj=np.transpose(rjt) - del rjt - self.logtime("Mem usage after rj: %d"%memory(),5) - # - # fastest way I could come up with to expand c array - # out into grids that have the same value for every - # gridpoint and every control point - # - tshape=(self.SerpDisq.shape[1],self.SerpDisq.shape[2],self.SerpDisq.shape[0]) - a1=self.SerpCc*rj - del rj - a2=np.add.reduce(a1) - del a1 - a3=np.resize(a2,tshape) - del a2 - cbig=np.transpose(a3,(2,0,1)) - del a3 - gc.collect() - self.logtime("Mem usage after cbig calculation: %d"%memory(),5) - # - # calculate change grid by multiplying each gridpoint by the - # weight of each change point (and considering the distance - # squared between each gridpoint and the change point) - # - a1=cbig*self.SerpDisq - del cbig - a2=np.add.reduce(a1) - del a1 - gc.collect() - chg=a2/self.SerpDsum - del a2 - self.logtime("Mem usage after the chg calculation: %d"%memory(),5) - self.logtime("Done with serp calculations",2) - return chg - #--------------------------------------------------------------------------- - # setSerpCache - set size of the serp distance grids cache (in MB). The - # default value of 500MB allows for a significant speedup in the serp - # routines - by saving and re-using expensive distance calculations - # between runs. However, these are kept in memory and can cause the - # calculations to fail with 'out of memory' errors. You can set this - # value to 0 to NOT use any cache - but expect the analysis to run 20% - # slower each time. - # - def setSerpCache(self,value): - if ((value>=0) and (value<=1000)): - self.SerpCache=value - else: - self.logtime("SerpCache must be between 0 and 1000") - return - #--------------------------------------------------------------------------- - # Dsquared - An inverse distance squared weighting scheme. - # - def Dsquared(self,zlist,xlist,ylist,hlist,elevfactor,Topo, - landMask=None): - self.logtime("Running Distance Squared Calculations",2) - # - # Setup elevation and land/sea scaling - # - (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, - ylist,hlist,elevfactor,Topo,landMask) - # - # turn lists into numeric python arrays - # - zarr=np.array(zlist,np.float32) - # - nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) - zarr[nearzero] = 0.001 - - newsize=(numpts,self.ymax,self.xmax) - - dsquared=np.zeros(newsize,np.float32) - dists=np.zeros(newsize,np.float32) - - self.logtime("Getting distances",3) - for k in range(numpts): - dist=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask) - dist[np.less(dist,0.000001)] = 0.000001 - dsquared[k]=(dist*dist).astype(np.float32) - dists[k]=dist.astype(np.float32) - self.logtime("Done getting distances",3) - - if self.DSquaredMaxPoints>0: - usePoints = min(int(self.DSquaredMaxPoints)-1,numpts-1) - sortdists=np.sort(dists,0) - finalDist=sortdists[usePoints] - - totweight=self.empty() - totsum=self.empty() - for k in range(numpts): - w=1.0/dsquared[k] - if self.DSquaredMaxPoints>0: - if self.DSquaredDist>0: - dd=self.DSquaredDist/self.gridres - finalDist=np.where(np.greater(dd,finalDist),dd,finalDist) - w[np.greater(dists[k],finalDist)] = 0.0 - elif self.DSquaredDist>0: - w[np.greater(dists[k],self.DSquaredDist/self.gridres)] = 0.0 - totweight=totweight+w - totsum=totsum+(zarr[k]*w) - - totweight[np.less(totweight,1.0e-200)] = 1.0 - chg=totsum/totweight - self.logtime("Done with Distance Squared calculations",2) - return chg - #--------------------------------------------------------------------------- - # setDSquaredDist - set the minimum distance used by the Distance Squared - # weighting algorithm. Only control points within this distance of a - # gridpoint will be used in calculating the weighted average. If set - # negative then the distance is calculated such that the nearest 5 - # control points are used at each gridpoint. - # - def setDSquaredDist(self,value): - self.DSquaredDist=value - if value<0.0: - self.logtime("Distance Squared distance will be infinite",1) - else: - self.logtime("Distance Squared distance will be %f"%value,1) - return - def setDSquaredMaxPoints(self,value): - self.DSquaredMaxPoints=value - if value>0: - self.logtime("Distance Squared number of points will now be %d"%value,1) - else: - self.logtime("Distance Squared number of points will now be infinite",1) - return - #----------------------------------------------------------------------- - # Barnes - A Barnes analysis routine - # - def Barnes(self,zlist,xlist,ylist,hlist,elevfactor, - Topo,landMask=None): - self.logtime("Running barnes calculations",2) - # - # Setup elevation and land/sea scaling - # - (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, - ylist,hlist,elevfactor,Topo,landMask) - totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) - totDist=np.sqrt(totDistSquared) - # - # Get distance squared of control points to every gridpoint - # - self.logtime("Getting distance squared between control points and gridpoints",3) - dists=np.zeros((numpts,self.ymax,self.xmax),np.float32) - for k in range(numpts): - d=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask)*self.gridres - dists[k]=(d*d).astype(np.float32) - # - # If BarnesSpacing is negative...they want it calculated - # - if self.BarnesSpacing<0: - self.logtime("Calculating Barnes Station Spacing",3) - if len(xlist)>1: - # - # get min distance of control points to each other - # - minDist=self.getMinDist(totDist) - # - # If <-50...Get average distance to closest neighbor - # - if self.BarnesSpacing<-50: - self.logtime(" using average distance of 'closest neighbor'",3) - total=np.add.reduce(minDist) - c=(total/len(xlist))*self.gridres - # - # otherwise...get maximum distance to closest neighbor - # - else: - self.logtime(" using furthest 'closest neighbor' for all control points",3) - c=np.maximum.reduce(minDist)*self.gridres - else: - c=50 - self.logtime("Calculated Barnes Station Spacing = %.2f km"%c,3) - else: - c=self.BarnesSpacing - self.logtime("Using a Barnes Station Spacing of %.2f km"%c,3) - # - # The Barnes 'kappa' value depends on twice the barnes distance - # - kappa=5.052*(((2.0*c)/math.pi)**2) - self.logtime("Barnes kappa value= %f"%kappa,3) - # - # Barnes PASS 1 - # - self.logtime("Barnes Pass 1",3) - totweights=np.zeros((self.ymax,self.xmax),np.float32) - totsum=np.zeros((self.ymax,self.xmax),np.float32) - for k in range(numpts): - # - # get scaled distance squared divided by kappa - # - xx=dists[k]/kappa - # - # Barnes weight is e taken to the negative xx power - - # but make sure it isn't huge - which would return a zero weight - # - xx[np.greater(xx,200.0)] = 200.0 - w=(np.exp(xx*-1.0)).astype(np.float32) - totweights=totweights+w - # - # Calculate weight * point k value - # - z=zlist[k] - totsum = totsum + (w*z).astype(np.float32) - # - # Calculate weighted average. Sum of (weights * values) divided by - # the sum of weights (make sure sum of weights is non-zero) - # - totweights[np.less(totweights,1.0e-200)] = 1.0e-200 - chg=totsum/totweights - # - # Barnes PASS 2 - # - self.logtime("Barnes Pass 2",3) - totweights=np.zeros((self.ymax,self.xmax),np.float32) - totsum=np.zeros((self.ymax,self.xmax),np.float32) - for k in range(numpts): - # - # get scaled distance squared divided by gamma *kappa - # - xx=dists[k]/(self.BarnesGamma*kappa) - # - # Barnes weight is e taken to the negative xx power - - # but make sure it isn't huge - which would return a zero weight - # - xx[np.greater(xx,200.0)] = 200.0 - w=(np.exp(xx*-1.0)).astype(np.float32) - totweights=totweights+w - # - # In second pass...weighting the difference between the - # point k value, and the change calcuated in the first pass - # - x=int(xarr[k]) - y=int(yarr[k]) - zdiff=zlist[k]-chg[y,x] - totsum = totsum + (w*zdiff).astype(np.float32) - # - # Calculate weighted average. Sum of (weights * values) divided by - # the sum of weights (make sure sum of weights is non-zero) - # - totweights[np.less(totweights,1.0e-200)] = 1.0e-200 - chg2=totsum/totweights - # - # Add the adjustment from PASS 2 to PASS 1 - # - chg=chg+chg2 - # - # Return the adjustment - # - self.logtime("Done with Barnes calculations",2) - return chg - #--------------------------------------------------------------------------- - # setBarnesGamma - set the gamma values used in the second pass of Barnes - # algorithm. By default it is 0.3, but the user can set it to anything - # between 0.0 and 1.0 - # - def setBarnesGamma(self,value): - if ((value>=0.0) and (value<=1.0)): - self.BarnesGamma=value - else: - self.logtime("Barnes Gamma must be between 0.0 and 1.0") - return - #--------------------------------------------------------------------------- - # setBarnesSpacing - set the station spacing used by the Barnes algorithm. - # Basically data for wavelengths less than 2 times this distance are - # removed by the analysis. If set to a negative value, the Barnes - # routine will calculate this by finding the distance to the nearest - # neighbor for each control point...and then finding the maximum (the - # 'furthest closest neighbor'). If less than -50, it will take the - # average of the distances to the closest neighbors (the more - # traditional Barnes value). - # - def setBarnesSpacing(self,value): - self.BarnesSpacing=value - if value<0.0: - self.logtime("Barnes Station Spacing will be calculated",1) - return - #--------------------------------------------------------------------------- - # setupScaling - setup all the numeric arrays for the control point - # locations...based on any elevation and land/sea scaling - # - def setupScaling(self,xlist,ylist,hlist,elevfactor,Topo,landMask): - # - # Number of control points - # - numpts=len(xlist) - # - # scaling topo - # - (halist,scaledtopo)=self.setupElev(xlist,ylist,hlist,elevfactor,Topo) - # - # setup the land/water mask - # - if landMask is None: - newlandMask=(Topo*0.0)+1.0 - else: - newlandMask=landMask - llist=self.setupLandWater(xlist,ylist,newlandMask) - # - # setup arrays - # - xarr=np.array(xlist,np.float32) - yarr=np.array(ylist,np.float32) - harr=np.array(halist,np.float32) - larr=np.array(llist,np.float32) - # - # - # - return(numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask) - #--------------------------------------------------------------------------- - # getTotDistSquared - get "total" distance between each point and every - # other point. This includes the elevation distance, and the - # land/water. - # - def getTotDistSquared(self,xarr,yarr,harr,larr): - xd=np.square(self.getComponentDiff(xarr)) - yd=np.square(self.getComponentDiff(yarr)) - ld=np.square(self.getComponentDiff(larr)) - hd=np.square(self.getComponentDiff(harr)+(ld*10000.0)) - return(xd+yd+hd) - #--------------------------------------------------------------------------- - # useActualElev - set options so that actual station elevation will be used - # when calculating "distance" of a gridpoint from the observation. - # - def useActualElev(self): - self.ActualElev=1 - return - #-------------------------------------------------------------------------- - # useGridElev - set options so that elevation of the gridpoint that - # contains an observation will be used when calculating the "distance" - # of a gridpoint from the observation - # - def useGridElev(self): - self.ActualElev=0 - return - #--------------------------------------------------------------------------- - # getDistance - get a grid of distance from a single point with coordinates - # xval,yval and elevation hval. This distance is in terms of - # grid-spacing - not physical distance units like km. The distance - # includes difference between the hval elevation and the topography - # grid passed in via scaledtopo. Also differences in the land/water - # mask between the point and each gridpoint count strongly in the - # distance calculation). - # - def getDistance(self,xval,yval,hval,scaledtopo,landMask): - ix=int(xval) - iy=int(yval) - xs=np.square(self.Irow-ix) - ys=np.square(self.Jrow-iy) - horizdist=np.add.outer(ys,xs) - # - # - # - if self.ActualElev==0: - elev=scaledtopo[iy,ix] - else: - elev=hval - ed=scaledtopo-elev - # - # A land/water difference counts as 10000 in scaled elevation - # units. - # - land=landMask[iy,ix] - ld=np.square(land-landMask) - ed2=np.square(ed+(ld*10000.0)) - # - # - # - dist=np.sqrt(horizdist+ed2) - return dist - #--------------------------------------------------------------------------- - # getMinDist - the minimum distance between a control point and all other - # control points (elevation and land/water is considered) - but this is - # in terms of gridpoints - not km - # - def getMinDist(self,totDist): - d=np.where(np.less(totDist,0.001),2*self.xmax,totDist) - dmin=np.minimum.reduce(d) - return dmin - #--------------------------------------------------------------------------- - # getSerpRemoteness - a multiplier for the serp weight - such that "remote" - # points (ones without many neighbors) are weighted more strongly than - # points that are very near other points. This keeps 'clustered' - # control points from dominating the analysis - since there might be - # many clustered points giving basically the same info. - # - def getSerpRemoteness(self,totDist): - numpts=totDist.shape[0] - # - # special cases: - # only 1 point: remoteness is 1.0 - # - if (numpts==1): - ren=np.array([1.0]).astype(np.float32) - return ren - # - # two points is easy - remoteness is 0.5 - # - if (numpts==2): - ren=np.array([0.5,0.5]).astype(np.float32) - return ren - # - # sort the distances...so for each point we have the - # distances to its neighbors in sorted order - # - dsort=np.sort(totDist,0) - # - # The distance of each point to its nearest neighbor is now - # in dsort[1,:] - # - dmax=dsort[:,:] - mostremote=np.maximum.reduce(dmax) - # - # add up distances from each point to each neighbor point - # - dsums=np.add.accumulate(dsort) - dsumsflat=dsums.flat - # - # get rid of all accumulated distances greater than most remote - # that way maximum value in each column will be the one where - # distance is less or equal to mostremote distance - # - dloc=np.where(np.greater(dsums,mostremote),np.float32(0),dsums) - # - # get total distance up to the point where it is less than mostremote - # - dint=np.argmax(dloc,0) - dintindex=(dint*numpts)+np.arange(numpts) - valuebefore=np.take(dsumsflat,dintindex) - # - # get total distance at point where it is more than most remote - # - dnext=dint+1 - dnextindex=(dnext*numpts)+np.arange(numpts) - valueafter=np.take(dsumsflat,dnextindex) - # - # get fractional part of points - # - frac=(mostremote-valuebefore)/(valueafter-valuebefore) - # - # get total number of points to make the most remote distance - # and take reciprocal - # - npt=dint+frac - factor=1.0/npt - # - # divide by sum of all factors - so they add to 1.0 - # - factorsum=np.add.reduce(factor) - ren=(factor/factorsum).astype(np.float32) - # - # - # - return ren - #--------------------------------------------------------------------------- - # setupElev - use the elevfactor to change real Topo into a 'scaled topo', - # as well as changing actual station elevations in hlist into 'scaled - # elevations' in scaledhlist. - # - # elevfactor should be in units of feet/km. If you set it to 1, then - # 1 foot of elevation difference is equivalent to 1km of horizontal - # distance (this means that elevation is VERY important in the - # analysis). If you set it to 1000, then 1000 feet of elevation - # difference is equal to 1 km of horizontal distance (this means that - # elevation is NOT important to the analysis). To turn off elevation - # completely - set the elevfactor to zero. - # - def setupElev(self,xlist,ylist,hlist,elevfactor,Topo): - - scaledhlist=[] - if elevfactor>0.001: - factor=elevfactor*self.gridres - scaledtopo=Topo/factor - for i in range(len(hlist)): - h=hlist[i] - if self.ActualElev==0: - scaledhlist.append(scaledtopo[ylist[i],xlist[i]]) - else: - scaledhlist.append(h/factor) - else: - scaledtopo=Topo*0.0 - for h in hlist: - scaledhlist.append(0.0) - return(scaledhlist,scaledtopo) - #--------------------------------------------------------------------------- - # setupLandWater - setup a list that contains the value of the landMask - # grid for every point in the xlist,ylist locations. It doesn't really - # matter - but the convention is that land=1 and water=0 - # - def setupLandWater(self,xlist,ylist,landMask): - llist=[] - for i in range(len(xlist)): - x=xlist[i] - y=ylist[i] - if landMask is None: - llist.append(1) - else: - llist.append(landMask[y,x]) - return llist - #--------------------------------------------------------------------------- - # getComponentDiff - get difference between all control points - # - def getComponentDiff(self,xloc): - xd=-(np.subtract.outer(xloc,xloc)) - return xd - #--------------------------------------------------------------------------- - # getGridSpacing - get 'rough grid spacing' by getting the distance between - # the corners of the GFE grid and dividing by the number of points. - # - def getGridSpacing(self): - (lat1,lon1)=self.getLatLon(0.0, 0.0) - (lat2,lon2)=self.getLatLon(self.xmax-1.0, self.ymax-1.0) - hypot=math.hypot(self.xmax-1.0, self.ymax-1.0) - spacing1=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot - (lat1,lon1)=self.getLatLon(0.0, self.ymax-1.0) - (lat2,lon2)=self.getLatLon(self.xmax-1.0, 0.0) - spacing2=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot - avgspacing=(spacing1+spacing2)/2.0 - return avgspacing - #--------------------------------------------------------------------------- - # getCircleDistance - get the 'great circle distance' between two lat lon - # points (in km) - # - def getCircleDistance(self,lat1,lon1,lat2,lon2): - DTR=math.pi/180.0 - lat1r=lat1*DTR - lon1r=lon1*DTR - lat2r=lat2*DTR - lon2r=lon2*DTR - dl=lon2r-lon1r - a=(math.acos((math.sin(lat1r)*math.sin(lat2r))+(math.cos(lat1r)*\ - math.cos(lat2r)*math.cos(dl))))/DTR - return(a*1.852*60) - #--------------------------------------------------------------------------- - # setVerbose - set 'verbosity' of logging. By default sets to 1, but - # can set higher to see even more detailed messages. - # 0=no messages (only errors) - # 1=simple message saying doing analysis - # 2=add messages about pieces of analysis being done - # 3=add messages with more timing information - # 4=add listing of all point obs used in analysis - # 5=add memory usage messages - # - def setVerbose(self,value=1): - self.verbose=value - return - #--------------------------------------------------------------------------- - # setQuiet - set 'verbosity' to zero so that only required (level=0) - # log messages are output. - # - def setQuiet(self): - self.verbose=0 - return - #--------------------------------------------------------------------------- - # setMissingElevThreshold - set the MissingElevThreshold value - # Obs with elevation values less than or equal to this threshold - # will use the topo grid elevation instead, even if ActualElev is set - # to 1. - # - def setMissingElevThreshold(self,value): - self.MissingElevThreshold=value - return - #--------------------------------------------------------------------------- - # logtime - write a string with date/time stamp. Can dynamically control - # which get printed by using the importance and verbosity settings. - # Will only print message with importance less or equal to verbosity - # setting. (in other words, importance=0 are VERY IMPORTANT messages - # that are always printed. importance=1 are only shown when Verbose - # is 1 or greater, etc.). - # - def logtime(self,string,importance=0): - if importance<=self.verbose: - tt=self._gmtime().timetuple() - ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(tt[0],tt[1],tt[2],tt[3],tt[4],tt[5]) - print "%s|ObjAnal - %s" % (ts,string) - sys.stdout.flush() - return -# -# debug stuff for memory usage -# -_proc_status="/proc/%d/status"%os.getpid() -_scale={'kB':1024.0,'mB':1024.0*1024.0, - 'KB':1024.0,'MB':1024.0*1024.0} -def _VmB(VmKey): - try: - t=open(_proc_status) - v=t.read() - t.close() - except IOError: - return 0.0 - i=v.index(VmKey) - v=v[i:].split(None,3) - if len(v)<3: - return 0.0 - return float(v[1])*_scale[v[2]] -def memory(): - return _VmB('VmSize:') -def resident(): - return _VmB('VmRSS:') - +# ---------------------------------------------------------------------------- +# SVN: $Revision: 134 $ $Date: 2010-08-26 17:32:30 +0000 (Thu, 26 Aug 2010) $ +# +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# ObjAnal - version 2.12 - various Objective Analysis routines +# +# Author: Tim Barker - SOO Boise, ID +# +# 2014/10/06 - Version 2.12. Fix typo with timetupe in logtime which handles +# when running in simulations. +# 2014/08/31 - Version 2.11. Get rid of debug print statement that shouldn't +# have been there in the first place. +# 2014/07/28 - Version 2.10. Fix issues when ActualElev=1 and landMask is +# used, and a control point near the edge of the landMask has +# an elevation that is wildly different than the grid elevation +# at at that location. Also introduce the concept of a 'missing' +# elevation value for the point obs. If the elevation is missing +# the code will use the grid elevation - regardless of the +# setting of ActualElev. Defaults to -500ft. Can be changed +# with new setMissingElevThreshold routine (but doubt anybody will) +# 2014/03/20 - Version 2.8. Better import of numpy. Used SmartScript for +# _gmtime instead of time module (for more effective playback) +# 2014/01/10 - Version 2.7. Fixed copy of self._empty +# 2013/12/03 - Version 2.6. Fixed a typo in the ActualElev code, and made +# using ActualElev the default. +# 2013/05/04 - Version 2.5. Tweaked the code a bit more when using Serp +# and actual_elev=1. Does a better job of estimating what +# the grid WOULD have at the ob elevation - by finding a best +# match among surrounding gridpoints, rather than a value +# vs. elevation regression. +# 2012/09/11 - Version 2.4. Fixed a bug where consecutive calls to Serp +# using different points incorrectly tried to use the cached +# point data the second time through - and could crash the +# calculations. +# 2012/08/15 - Version 2.3. Added configuration element to control size of +# cache for Serp distance grids. Trimmed memory usage in Serp +# a little more. Changed sense of Verbose logging. Changed to +# CamelCase for config parameters. +# 2012/06/02 - Version 2.2 - Added code to produce better analyses when +# using ActualElev=1. Now estimates what the grid "would" +# have at that elevation at that gridpoint. This makes the +# magnitude of changes needed much more reasonable. In Serp +# routine, a final step to match the point obs exactly was +# added at the end. Added some memory enhancements in Serp. +# 2011/03/11 - Handle AWIPS-2 vector grids now being LISTS, instead of Tuples. +# 2010/07/30 - AWIPS 2 port by Paul Jendrowski +# 2007/07/10 - Add code for Barnes and Inverse Distance Squared (most of +# the code came from Ken Pomeroy and Chris Gibson). +# 2007/06/17 - Add code for handling a land/sea mask. Essentially just +# makes gridpoints not on the same (land or sea) appear to +# be MUCH further apart. +# 2006/10/10 - Reduce memory in the Serp routines +# ---------------------------------------------------------------------------- + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import numpy as np +import SmartScript +import sys,types,math,os,gc +import numpy.linalg as LinearAlgebra + +class ObjAnal(SmartScript.SmartScript): + def __init__(self, dataMgr, mdMode=None, toolType="numeric"): + SmartScript.SmartScript.__init__(self,dataMgr) + self.verbose=0 + # + # speed up distance calculations with vectors of I/J coords + # + gridsize=self.getGridShape() + ij=np.indices(gridsize,dtype=np.float32) + i=ij[1] + self.Irow=i[0,:] + j=ij[0] + self.Jrow=j[:,0] + # + # Size of output grid is based on GFE gridsize + # + self.ymax=self.getGridShape()[0] + self.xmax=self.getGridShape()[1] + self.gridres=self.getGridSpacing() + # + # If ActualElev=1...then use the station elevation for elevation + # related calculations. + # otherwise.......use the elevation of the gridpoint that + # contains the station for elevation related + # calculations + # However...if the station elevation is lower than the missing + # elevation Threshold, then use the grid elevation + # even if ActualElev is equal to 1. + # + self.ActualElev=1 + self.MissingElevThreshold=-500 + # + # Default Serp parameters + # Cache (500 by default) (between 0 and 1000) amount of memory + # (in MB) allowed for saving distance grids between Serp + # calls. + # Elevfactor - the elevation factor used in the previous Serp + # analysis + # SerpXYgrids - the cache of distance grids saved between Serp + # runs + # + self.SerpLastPoints=0 + self.SerpCache=500 + self.SerpXYgrids={} + self.SerpElevfactor=-1.0 + # + # Default Barnes parameters + # Gamma (0.3 by default) (should be between 0.1 and 1.0) + # Spacing (calculated by default) wavelength below which + # data will be filtered. + # + self.BarnesGamma=0.3 + self.BarnesSpacing=-1 # negative value forces calculation + # + # Default DSquared parameters + # Dist --- minimum radius around a gridpoint to search for + # station data to use in the weighted average + # MaxPoints - maximum number of stations to use in the + # weighted average for a gridpoint. + # + self.DSquaredDist=-1 + self.DSquaredMaxPoints=-1 + + return + + #--------------------------------------------------------------------------- + # ObjectiveAnalysis - perform an objective analysis of the point values, + # using the specified guess grid. If the guess grid is a vector type + # then both the point values and grids are handled differently. + # + def ObjectiveAnalysis(self,values,guessGrid,analysisType, + elevfactor=0.0,topoGrid=None,landMask=None): + self.logtime("Performing %s analysis"%analysisType,1) + self.logtime("Mem usage at start of ObjectiveAnalysis: %d"%memory(),5) + if topoGrid is None: + topoGrid=self.getTopo() + if landMask is None: + landMask=self.newGrid(True, bool) + values=self.removeDuplicates(values) + gridType=type(guessGrid) + if ((gridType is not tuple)and(gridType is not list)): + new=self.ObjectiveAnalysisScalar(values,guessGrid,analysisType, + elevfactor,topoGrid, + landMask) + self.logtime("Mem usage at end of ObjectiveAnalysis: %d"%memory(),5) + return new + else: # vector + uvalues=[] + vvalues=[] + for i in range(len(values)): + (name,x,y,elev,spd,direc)=values[i] + (u,v)=self.MagDirToUV(spd,direc) + uvalues.append((name,x,y,elev,u)) + vvalues.append((name,x,y,elev,v)) + (spdgrid,dirgrid)=guessGrid + (uguess,vguess)=self.MagDirToUV(spdgrid,dirgrid) + # + unew=self.ObjectiveAnalysisScalar(uvalues,uguess,analysisType, + elevfactor,topoGrid, + landMask=landMask) + vnew=self.ObjectiveAnalysisScalar(vvalues,vguess,analysisType, + elevfactor,topoGrid, + landMask) + (newspd,newdir)=self.UVToMagDir(unew,vnew) + self.logtime("Mem usage at end of ObjectiveAnalysis (vector): %d"%memory(),5) + self.logtime("%s analysis complete"%analysisType,1) + return(newspd,newdir) + #--------------------------------------------------------------------------- + # ObjectiveAnalysisScalar - perform an objective analysis of the point + # values, using the specified guess grid. Point values are a list of + # tuples. Each tuple contains: name,x,y,elev,val + # + def ObjectiveAnalysisScalar(self,values,guessGrid,analysisType, + elevfactor,topoGrid,landMask=None): + self.logtime("Mem usage at start of ObjectiveAnalysisScalar: %d"%memory(),5) + # + # Make lists of x,y,h,value-guess - and get rid of points + # that are off the grid + # + xloclist=[] + yloclist=[] + hloclist=[] + zlist=[] + if landMask is None: + newlandMask=self.newGrid(True, bool) + else: + newLandMask=landMask + self.logtime("Point values used in analysis:",4) + for i in range(len(values)): + (name,x,y,elev,val)=values[i] + if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): + continue + # + # If the ob point elevation is missing - always use + # the gridpoint elevation + # + if elev>self.MissingElevThreshold: + hloclist.append(elev) + else: + hloclist.append(topoGrid[y,x]) + xloclist.append(x) + yloclist.append(y) + # + # If using the grid elevation at the point, then the + # z value (change)is simply the observed value minus the guess + # grid value. + # + if self.ActualElev!=1: + self.logtime(" %12s %3d,%3d %5d Val:%5.1f -- grid:%5.1f -- change:%5.1f"%(name,x,y,elev,val,guessGrid[y,x],val-guessGrid[y,x]),4) + zlist.append(val-guessGrid[y,x]) + + # + # If using actual elevations - then need to make the z value the + # difference between what the guess grid WOULD have at the ob elevation + # rather than the guess grid value itself. Searches outward until + # it finds a guess grid point with an elevation less than 100 feet + # from the ob's elevation. + # + else: + pt=topoGrid[y,x] + obLandMask=newLandMask[y,x] + desiredDiff=100 + bestval=guessGrid[y,x] + if elev>self.MissingElevThreshold: + bestdif=abs(elev-pt) + else: + bestdif=0 + bestele=pt + wid=1 + # + # Spiral out from the point - looking for nearby gridpoints + # that are closer to the actual observation elevation + # than the gridpoint elevation. When we find one within + # 100ft of the observation - stop searching and use the + # grid value at that point to determine how much we need + # to change the grid at the observation gridpoint. + # + while ((bestdif>desiredDiff)and(wid<10)): + #print " searching with wid=%d"%wid + if ((y+wid)=0): + for ii in range(max(0,x-wid),min(x+wid+1,self.xmax)): + if obLandMask==newLandMask[y-wid,ii]: + gelev=topoGrid[y-wid,ii] + dif=abs(elev-gelev) + if dif=0): + for jj in range(max(0,y-wid),min(y+wid+1,self.ymax)): + if obLandMask==newLandMask[jj,x-wid]: + gelev=topoGrid[jj,x-wid] + dif=abs(elev-gelev) + if difdesiredDiff: + wid+=1 + estval=bestval + self.logtime(" %12s %3d,%3d, est at %5d:%5.1f --- grid at %5d:%5.1f --- (%5d diff) -- Val:%5.1f -- Change:%5.1f"%(name,x,y,elev,estval,pt,guessGrid[y,x],pt-elev,val,val-estval),4) + zlist.append(val-estval) + # + # Do the requested analysis + # + if analysisType=="serp": + zval=self.Serp(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + if self.ActualElev==1: + for i in range(len(values)): + (name,x,y,elev,val)=values[i] + if (x>(self.xmax-1))or(x<0)or(y>(self.ymax-1))or(y<0): + continue + finalGrid[y,x]=val + elif analysisType=="barnes": + zval=self.Barnes(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + elif analysisType=="dsquared": + zval=self.Dsquared(zlist,xloclist,yloclist,hloclist,elevfactor, + topoGrid,landMask=landMask) + finalGrid=(guessGrid+zval).astype(np.float32) + else: + self.logtime("Unknown analysisType:%s"%analysisType) + zval=self.empty() + finalGrid=(guessGrid+zval).astype(np.float32) + self.logtime("Mem usage at end of ObjectiveAnalysisScalar: %d"%memory(),5) + return finalGrid + #--------------------------------------------------------------------------- + # removeDuplicates(stationlist) - find any stations in the same x,y gridbox + # and average the data for those stations, returning a new stationlist. + # The stationlist is a list of tuples. For vectors the tuples have 6 + # values: name,x,y,elev,speed,direc For scalars the tuples have 5 + # values: name,x,y,elev,value + # + def removeDuplicates(self,values): + if len(values)<1: + return values + test=values[0] + numpieces=len(test) + if len(test)==6: + type="VECTOR" + elif len(test)==5: + type="SCALAR" + else: + return values + # + newvalues=[] + hash={} + for stn in values: + x=stn[1] + y=stn[2] + key="%4.4d%4.4d"%(x,y) + if key in hash: + list=hash[key] + list.append(stn) + hash[key]=list + else: + list=[] + list.append(stn) + hash[key]=list + + hkeys=list(hash.keys()) + hkeys.sort() + for key in hkeys: + stnlist=hash[key] + if (len(stnlist)==1): + newvalues.append(stnlist[0]) + else: + valsum=0 + usum=0 + vsum=0 + valnum=0 + avgnames="" + for stn in stnlist: + if type=="VECTOR": + (name,x,y,elev,spd,direc)=stn + (u,v)=self.MagDirToUV(spd,direc) + usum=usum+u + vsum=vsum+v + else: + (name,x,y,elev,val)=stn + valsum=valsum+val + valnum=valnum+1 + avgnames=avgnames+name+"+" + avgname=avgnames[:-1] + if type=="VECTOR": + uavg=float(usum)/float(valnum) + vavg=float(vsum)/float(valnum) + (spd,direc)=self.UVToMagDir(uavg,vavg) + stn=(avgname,x,y,elev,spd,direc) + else: + valavg=int(float(valsum)/float(valnum)) + stn=(avgname,x,y,elev,valavg) + newvalues.append(stn) + return newvalues + #--------------------------------------------------------------------------- + # Serp - Given a list of values (zlist) at points (xlist, ylist, hlist + # lists) and topography weighting factor (elevfactor) calculate a grid + # that fits the values exactly, using a curve-fitting algorithm using + # 'serpentine' curves. + # + # To save time, this routine carefully checks to see if it has been + # recently called with the same set of gridpoint locations and + # elevation factor - and then skips all the calculations based on + # location - and only applies the code based on the zlist values. + # + def Serp(self,zlist,xlist,ylist,hlist,elevfactor,Topo,landMask=None): + # + # Check for case of cbig array being bigger than 2GB. If so, + # likely to have memory problems. Thus, write an error message + # and return with no change. + # + mem=((self.xmax*self.ymax)*len(zlist))*8 + self.logtime("Serp memory usage estimate: %d"%mem,5) + if mem>2147000000: + self.logtime(" Combination of size of grid (%d x %d) and"%(self.xmax,self.ymax)) + self.logtime(" number of control points (%d) will take up too"%len(zlist)) + self.logtime(" much memory for Serp. Either use smaller grid, fewer") + self.logtime(" control points, or use a different analysis scheme") + chg=Topo*0.0 + return chg + self.logtime("Mem usage at start of serp: %d"%memory(),5) + # + # Determine if we need to do setup again + # first are the number of points different + # second is the elevation factor different + # third (if still OK) check that each point is in the + # distance arrays Disq + # + setup=0 + if (len(xlist)!=self.SerpLastPoints): + setup=1 + if (elevfactor!=self.SerpElevfactor): + setup=1 + if (setup==0): + for i in range(len(xlist)): + x=xlist[i] + y=ylist[i] + xy=(y*self.xmax)+x + if (xy not in self.SerpXYgrids): + setup=1 + break + # + # Now we know if we need to do the setup stuff again + # + if (setup==0): + self.logtime("Skipping SerpSetup - same points",2) + else: + self.logtime("Running SerpSetup",2) + if elevfactor!=self.SerpElevfactor: + self.SerpXYgrids={} + self.SerpElevfactor=elevfactor + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + # + # + # + totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) + totDist=np.sqrt(totDistSquared) + # + newsize=(numpts,self.ymax,self.xmax) + # + # Get the "remoteness" values which modify the weights + # + self.logtime("Calculating Remoteness",3) + rem=self.getSerpRemoteness(totDist) + # + # For each control point, get the distance to the + # next nearest control point + # + self.logtime("Calculating MinDist",3) + dmin=self.getMinDist(totDist) + dmin2=np.square(dmin) + del dmin + del totDist + # + # make a new total distance + # + self.SerpDisq=np.zeros(newsize,np.float32) + # + # zero out the avary-array, which varies for every control point + # + avary=np.zeros((numpts,numpts),np.float32) + # + # Get maximum number of distance grids to save for quick + # recall (dont let it use more than SerpCache MB of space) + # + ngrid=self.xmax*self.ymax + maxsave=int((self.SerpCache*1000000)/(ngrid*8)) + self.logtime("calculated max points to save:%d"%maxsave,4) + # + # Get the factor that relates every control point to + # every gridpoint, as well as the sum of those factors + # + self.logtime("Calculating SerpDisq",3) + newcount=0 + dcount=0 + for k in range(numpts): + x=int(xarr[k]) + y=int(yarr[k]) + avary[k]=dmin2[k] + xy=(y*self.xmax)+x + + if xy in self.SerpXYgrids: + tempdist=self.SerpXYgrids[xy] + else: + newcount=newcount+1 + xs=np.square(self.Irow-x) + ys=np.square(self.Jrow-y) + b=np.add.outer(ys,xs) + if self.ActualElev==0: + elev=scaledtopo[y,x] + else: + elev=harr[k] + ed=scaledtopo-elev + land=newlandMask[y,x] + ld=np.square(land-newlandMask) + ed=ed+(ld*10000.0) + tempdist=b+np.square(ed) + if (len(self.SerpXYgrids)=10: + self.logtime("Points saved so far:%d"%len(self.SerpXYgrids),4) + self.logtime("Mem used so far:%d"%memory(),5) + dcount=0 + self.SerpDisq[k]=(rem[k]/(tempdist+dmin2[k])).astype(np.float32) + self.logtime("Mem after all points in:%d"%memory(),5) + self.SerpDsum=np.add.reduce(self.SerpDisq) + # + # The coefficients for each control point + # + rej=np.transpose(np.resize(rem,(numpts,numpts))) + SerpWeights=rej/(totDistSquared+avary) + del rej + del rem + del totDistSquared + del avary + self.SerpWsum=np.add.reduce(SerpWeights) + # + # Solve Matrix of weights + # + self.SerpCc=LinearAlgebra.inv(SerpWeights).astype(np.float32) + # + # Free up some memory + # + del SerpWeights + self.logtime("Mem before serp setup gc.collect: %d"%memory(),5) + gc.collect() + self.SerpLastPoints=numpts + self.logtime("Mem after serp setup: %d"%memory(),5) + # + # Now do the Serp calculations + # + self.logtime("Running Serp calculations",2) + numpts=len(zlist) + zarr=np.array(zlist,np.float32) + # + # + # + nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) + zarr[nearzero] = 0.001 + del nearzero + zw=zarr*self.SerpWsum + del zarr + rjt=np.resize(zw,(numpts,numpts)) + del zw + rj=np.transpose(rjt) + del rjt + self.logtime("Mem usage after rj: %d"%memory(),5) + # + # fastest way I could come up with to expand c array + # out into grids that have the same value for every + # gridpoint and every control point + # + tshape=(self.SerpDisq.shape[1],self.SerpDisq.shape[2],self.SerpDisq.shape[0]) + a1=self.SerpCc*rj + del rj + a2=np.add.reduce(a1) + del a1 + a3=np.resize(a2,tshape) + del a2 + cbig=np.transpose(a3,(2,0,1)) + del a3 + gc.collect() + self.logtime("Mem usage after cbig calculation: %d"%memory(),5) + # + # calculate change grid by multiplying each gridpoint by the + # weight of each change point (and considering the distance + # squared between each gridpoint and the change point) + # + a1=cbig*self.SerpDisq + del cbig + a2=np.add.reduce(a1) + del a1 + gc.collect() + chg=a2/self.SerpDsum + del a2 + self.logtime("Mem usage after the chg calculation: %d"%memory(),5) + self.logtime("Done with serp calculations",2) + return chg + #--------------------------------------------------------------------------- + # setSerpCache - set size of the serp distance grids cache (in MB). The + # default value of 500MB allows for a significant speedup in the serp + # routines - by saving and re-using expensive distance calculations + # between runs. However, these are kept in memory and can cause the + # calculations to fail with 'out of memory' errors. You can set this + # value to 0 to NOT use any cache - but expect the analysis to run 20% + # slower each time. + # + def setSerpCache(self,value): + if ((value>=0) and (value<=1000)): + self.SerpCache=value + else: + self.logtime("SerpCache must be between 0 and 1000") + return + #--------------------------------------------------------------------------- + # Dsquared - An inverse distance squared weighting scheme. + # + def Dsquared(self,zlist,xlist,ylist,hlist,elevfactor,Topo, + landMask=None): + self.logtime("Running Distance Squared Calculations",2) + # + # Setup elevation and land/sea scaling + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + # + # turn lists into numeric python arrays + # + zarr=np.array(zlist,np.float32) + # + nearzero=np.logical_and(np.less(zarr,0.001),np.greater(zarr,-0.001)) + zarr[nearzero] = 0.001 + + newsize=(numpts,self.ymax,self.xmax) + + dsquared=np.zeros(newsize,np.float32) + dists=np.zeros(newsize,np.float32) + + self.logtime("Getting distances",3) + for k in range(numpts): + dist=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask) + dist[np.less(dist,0.000001)] = 0.000001 + dsquared[k]=(dist*dist).astype(np.float32) + dists[k]=dist.astype(np.float32) + self.logtime("Done getting distances",3) + + if self.DSquaredMaxPoints>0: + usePoints = min(int(self.DSquaredMaxPoints)-1,numpts-1) + sortdists=np.sort(dists,0) + finalDist=sortdists[usePoints] + + totweight=self.empty() + totsum=self.empty() + for k in range(numpts): + w=1.0/dsquared[k] + if self.DSquaredMaxPoints>0: + if self.DSquaredDist>0: + dd=self.DSquaredDist/self.gridres + finalDist=np.where(np.greater(dd,finalDist),dd,finalDist) + w[np.greater(dists[k],finalDist)] = 0.0 + elif self.DSquaredDist>0: + w[np.greater(dists[k],self.DSquaredDist/self.gridres)] = 0.0 + totweight=totweight+w + totsum=totsum+(zarr[k]*w) + + totweight[np.less(totweight,1.0e-200)] = 1.0 + chg=totsum/totweight + self.logtime("Done with Distance Squared calculations",2) + return chg + #--------------------------------------------------------------------------- + # setDSquaredDist - set the minimum distance used by the Distance Squared + # weighting algorithm. Only control points within this distance of a + # gridpoint will be used in calculating the weighted average. If set + # negative then the distance is calculated such that the nearest 5 + # control points are used at each gridpoint. + # + def setDSquaredDist(self,value): + self.DSquaredDist=value + if value<0.0: + self.logtime("Distance Squared distance will be infinite",1) + else: + self.logtime("Distance Squared distance will be %f"%value,1) + return + def setDSquaredMaxPoints(self,value): + self.DSquaredMaxPoints=value + if value>0: + self.logtime("Distance Squared number of points will now be %d"%value,1) + else: + self.logtime("Distance Squared number of points will now be infinite",1) + return + #----------------------------------------------------------------------- + # Barnes - A Barnes analysis routine + # + def Barnes(self,zlist,xlist,ylist,hlist,elevfactor, + Topo,landMask=None): + self.logtime("Running barnes calculations",2) + # + # Setup elevation and land/sea scaling + # + (numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask)=self.setupScaling(xlist, + ylist,hlist,elevfactor,Topo,landMask) + totDistSquared=self.getTotDistSquared(xarr,yarr,harr,larr) + totDist=np.sqrt(totDistSquared) + # + # Get distance squared of control points to every gridpoint + # + self.logtime("Getting distance squared between control points and gridpoints",3) + dists=np.zeros((numpts,self.ymax,self.xmax),np.float32) + for k in range(numpts): + d=self.getDistance(xarr[k],yarr[k],harr[k],scaledtopo,newlandMask)*self.gridres + dists[k]=(d*d).astype(np.float32) + # + # If BarnesSpacing is negative...they want it calculated + # + if self.BarnesSpacing<0: + self.logtime("Calculating Barnes Station Spacing",3) + if len(xlist)>1: + # + # get min distance of control points to each other + # + minDist=self.getMinDist(totDist) + # + # If <-50...Get average distance to closest neighbor + # + if self.BarnesSpacing<-50: + self.logtime(" using average distance of 'closest neighbor'",3) + total=np.add.reduce(minDist) + c=(total/len(xlist))*self.gridres + # + # otherwise...get maximum distance to closest neighbor + # + else: + self.logtime(" using furthest 'closest neighbor' for all control points",3) + c=np.maximum.reduce(minDist)*self.gridres + else: + c=50 + self.logtime("Calculated Barnes Station Spacing = %.2f km"%c,3) + else: + c=self.BarnesSpacing + self.logtime("Using a Barnes Station Spacing of %.2f km"%c,3) + # + # The Barnes 'kappa' value depends on twice the barnes distance + # + kappa=5.052*(((2.0*c)/math.pi)**2) + self.logtime("Barnes kappa value= %f"%kappa,3) + # + # Barnes PASS 1 + # + self.logtime("Barnes Pass 1",3) + totweights=np.zeros((self.ymax,self.xmax),np.float32) + totsum=np.zeros((self.ymax,self.xmax),np.float32) + for k in range(numpts): + # + # get scaled distance squared divided by kappa + # + xx=dists[k]/kappa + # + # Barnes weight is e taken to the negative xx power - + # but make sure it isn't huge - which would return a zero weight + # + xx[np.greater(xx,200.0)] = 200.0 + w=(np.exp(xx*-1.0)).astype(np.float32) + totweights=totweights+w + # + # Calculate weight * point k value + # + z=zlist[k] + totsum = totsum + (w*z).astype(np.float32) + # + # Calculate weighted average. Sum of (weights * values) divided by + # the sum of weights (make sure sum of weights is non-zero) + # + totweights[np.less(totweights,1.0e-200)] = 1.0e-200 + chg=totsum/totweights + # + # Barnes PASS 2 + # + self.logtime("Barnes Pass 2",3) + totweights=np.zeros((self.ymax,self.xmax),np.float32) + totsum=np.zeros((self.ymax,self.xmax),np.float32) + for k in range(numpts): + # + # get scaled distance squared divided by gamma *kappa + # + xx=dists[k]/(self.BarnesGamma*kappa) + # + # Barnes weight is e taken to the negative xx power - + # but make sure it isn't huge - which would return a zero weight + # + xx[np.greater(xx,200.0)] = 200.0 + w=(np.exp(xx*-1.0)).astype(np.float32) + totweights=totweights+w + # + # In second pass...weighting the difference between the + # point k value, and the change calcuated in the first pass + # + x=int(xarr[k]) + y=int(yarr[k]) + zdiff=zlist[k]-chg[y,x] + totsum = totsum + (w*zdiff).astype(np.float32) + # + # Calculate weighted average. Sum of (weights * values) divided by + # the sum of weights (make sure sum of weights is non-zero) + # + totweights[np.less(totweights,1.0e-200)] = 1.0e-200 + chg2=totsum/totweights + # + # Add the adjustment from PASS 2 to PASS 1 + # + chg=chg+chg2 + # + # Return the adjustment + # + self.logtime("Done with Barnes calculations",2) + return chg + #--------------------------------------------------------------------------- + # setBarnesGamma - set the gamma values used in the second pass of Barnes + # algorithm. By default it is 0.3, but the user can set it to anything + # between 0.0 and 1.0 + # + def setBarnesGamma(self,value): + if ((value>=0.0) and (value<=1.0)): + self.BarnesGamma=value + else: + self.logtime("Barnes Gamma must be between 0.0 and 1.0") + return + #--------------------------------------------------------------------------- + # setBarnesSpacing - set the station spacing used by the Barnes algorithm. + # Basically data for wavelengths less than 2 times this distance are + # removed by the analysis. If set to a negative value, the Barnes + # routine will calculate this by finding the distance to the nearest + # neighbor for each control point...and then finding the maximum (the + # 'furthest closest neighbor'). If less than -50, it will take the + # average of the distances to the closest neighbors (the more + # traditional Barnes value). + # + def setBarnesSpacing(self,value): + self.BarnesSpacing=value + if value<0.0: + self.logtime("Barnes Station Spacing will be calculated",1) + return + #--------------------------------------------------------------------------- + # setupScaling - setup all the numeric arrays for the control point + # locations...based on any elevation and land/sea scaling + # + def setupScaling(self,xlist,ylist,hlist,elevfactor,Topo,landMask): + # + # Number of control points + # + numpts=len(xlist) + # + # scaling topo + # + (halist,scaledtopo)=self.setupElev(xlist,ylist,hlist,elevfactor,Topo) + # + # setup the land/water mask + # + if landMask is None: + newlandMask=(Topo*0.0)+1.0 + else: + newlandMask=landMask + llist=self.setupLandWater(xlist,ylist,newlandMask) + # + # setup arrays + # + xarr=np.array(xlist,np.float32) + yarr=np.array(ylist,np.float32) + harr=np.array(halist,np.float32) + larr=np.array(llist,np.float32) + # + # + # + return(numpts,xarr,yarr,harr,larr,scaledtopo,newlandMask) + #--------------------------------------------------------------------------- + # getTotDistSquared - get "total" distance between each point and every + # other point. This includes the elevation distance, and the + # land/water. + # + def getTotDistSquared(self,xarr,yarr,harr,larr): + xd=np.square(self.getComponentDiff(xarr)) + yd=np.square(self.getComponentDiff(yarr)) + ld=np.square(self.getComponentDiff(larr)) + hd=np.square(self.getComponentDiff(harr)+(ld*10000.0)) + return(xd+yd+hd) + #--------------------------------------------------------------------------- + # useActualElev - set options so that actual station elevation will be used + # when calculating "distance" of a gridpoint from the observation. + # + def useActualElev(self): + self.ActualElev=1 + return + #-------------------------------------------------------------------------- + # useGridElev - set options so that elevation of the gridpoint that + # contains an observation will be used when calculating the "distance" + # of a gridpoint from the observation + # + def useGridElev(self): + self.ActualElev=0 + return + #--------------------------------------------------------------------------- + # getDistance - get a grid of distance from a single point with coordinates + # xval,yval and elevation hval. This distance is in terms of + # grid-spacing - not physical distance units like km. The distance + # includes difference between the hval elevation and the topography + # grid passed in via scaledtopo. Also differences in the land/water + # mask between the point and each gridpoint count strongly in the + # distance calculation). + # + def getDistance(self,xval,yval,hval,scaledtopo,landMask): + ix=int(xval) + iy=int(yval) + xs=np.square(self.Irow-ix) + ys=np.square(self.Jrow-iy) + horizdist=np.add.outer(ys,xs) + # + # + # + if self.ActualElev==0: + elev=scaledtopo[iy,ix] + else: + elev=hval + ed=scaledtopo-elev + # + # A land/water difference counts as 10000 in scaled elevation + # units. + # + land=landMask[iy,ix] + ld=np.square(land-landMask) + ed2=np.square(ed+(ld*10000.0)) + # + # + # + dist=np.sqrt(horizdist+ed2) + return dist + #--------------------------------------------------------------------------- + # getMinDist - the minimum distance between a control point and all other + # control points (elevation and land/water is considered) - but this is + # in terms of gridpoints - not km + # + def getMinDist(self,totDist): + d=np.where(np.less(totDist,0.001),2*self.xmax,totDist) + dmin=np.minimum.reduce(d) + return dmin + #--------------------------------------------------------------------------- + # getSerpRemoteness - a multiplier for the serp weight - such that "remote" + # points (ones without many neighbors) are weighted more strongly than + # points that are very near other points. This keeps 'clustered' + # control points from dominating the analysis - since there might be + # many clustered points giving basically the same info. + # + def getSerpRemoteness(self,totDist): + numpts=totDist.shape[0] + # + # special cases: + # only 1 point: remoteness is 1.0 + # + if (numpts==1): + ren=np.array([1.0]).astype(np.float32) + return ren + # + # two points is easy - remoteness is 0.5 + # + if (numpts==2): + ren=np.array([0.5,0.5]).astype(np.float32) + return ren + # + # sort the distances...so for each point we have the + # distances to its neighbors in sorted order + # + dsort=np.sort(totDist,0) + # + # The distance of each point to its nearest neighbor is now + # in dsort[1,:] + # + dmax=dsort[:,:] + mostremote=np.maximum.reduce(dmax) + # + # add up distances from each point to each neighbor point + # + dsums=np.add.accumulate(dsort) + dsumsflat=dsums.flat + # + # get rid of all accumulated distances greater than most remote + # that way maximum value in each column will be the one where + # distance is less or equal to mostremote distance + # + dloc=np.where(np.greater(dsums,mostremote),np.float32(0),dsums) + # + # get total distance up to the point where it is less than mostremote + # + dint=np.argmax(dloc,0) + dintindex=(dint*numpts)+np.arange(numpts) + valuebefore=np.take(dsumsflat,dintindex) + # + # get total distance at point where it is more than most remote + # + dnext=dint+1 + dnextindex=(dnext*numpts)+np.arange(numpts) + valueafter=np.take(dsumsflat,dnextindex) + # + # get fractional part of points + # + frac=(mostremote-valuebefore)/(valueafter-valuebefore) + # + # get total number of points to make the most remote distance + # and take reciprocal + # + npt=dint+frac + factor=1.0/npt + # + # divide by sum of all factors - so they add to 1.0 + # + factorsum=np.add.reduce(factor) + ren=(factor/factorsum).astype(np.float32) + # + # + # + return ren + #--------------------------------------------------------------------------- + # setupElev - use the elevfactor to change real Topo into a 'scaled topo', + # as well as changing actual station elevations in hlist into 'scaled + # elevations' in scaledhlist. + # + # elevfactor should be in units of feet/km. If you set it to 1, then + # 1 foot of elevation difference is equivalent to 1km of horizontal + # distance (this means that elevation is VERY important in the + # analysis). If you set it to 1000, then 1000 feet of elevation + # difference is equal to 1 km of horizontal distance (this means that + # elevation is NOT important to the analysis). To turn off elevation + # completely - set the elevfactor to zero. + # + def setupElev(self,xlist,ylist,hlist,elevfactor,Topo): + + scaledhlist=[] + if elevfactor>0.001: + factor=elevfactor*self.gridres + scaledtopo=Topo/factor + for i in range(len(hlist)): + h=hlist[i] + if self.ActualElev==0: + scaledhlist.append(scaledtopo[ylist[i],xlist[i]]) + else: + scaledhlist.append(h/factor) + else: + scaledtopo=Topo*0.0 + for h in hlist: + scaledhlist.append(0.0) + return(scaledhlist,scaledtopo) + #--------------------------------------------------------------------------- + # setupLandWater - setup a list that contains the value of the landMask + # grid for every point in the xlist,ylist locations. It doesn't really + # matter - but the convention is that land=1 and water=0 + # + def setupLandWater(self,xlist,ylist,landMask): + llist=[] + for i in range(len(xlist)): + x=xlist[i] + y=ylist[i] + if landMask is None: + llist.append(1) + else: + llist.append(landMask[y,x]) + return llist + #--------------------------------------------------------------------------- + # getComponentDiff - get difference between all control points + # + def getComponentDiff(self,xloc): + xd=-(np.subtract.outer(xloc,xloc)) + return xd + #--------------------------------------------------------------------------- + # getGridSpacing - get 'rough grid spacing' by getting the distance between + # the corners of the GFE grid and dividing by the number of points. + # + def getGridSpacing(self): + (lat1,lon1)=self.getLatLon(0.0, 0.0) + (lat2,lon2)=self.getLatLon(self.xmax-1.0, self.ymax-1.0) + hypot=math.hypot(self.xmax-1.0, self.ymax-1.0) + spacing1=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + (lat1,lon1)=self.getLatLon(0.0, self.ymax-1.0) + (lat2,lon2)=self.getLatLon(self.xmax-1.0, 0.0) + spacing2=self.getCircleDistance(lat1,lon1,lat2,lon2)/hypot + avgspacing=(spacing1+spacing2)/2.0 + return avgspacing + #--------------------------------------------------------------------------- + # getCircleDistance - get the 'great circle distance' between two lat lon + # points (in km) + # + def getCircleDistance(self,lat1,lon1,lat2,lon2): + DTR=math.pi/180.0 + lat1r=lat1*DTR + lon1r=lon1*DTR + lat2r=lat2*DTR + lon2r=lon2*DTR + dl=lon2r-lon1r + a=(math.acos((math.sin(lat1r)*math.sin(lat2r))+(math.cos(lat1r)*\ + math.cos(lat2r)*math.cos(dl))))/DTR + return(a*1.852*60) + #--------------------------------------------------------------------------- + # setVerbose - set 'verbosity' of logging. By default sets to 1, but + # can set higher to see even more detailed messages. + # 0=no messages (only errors) + # 1=simple message saying doing analysis + # 2=add messages about pieces of analysis being done + # 3=add messages with more timing information + # 4=add listing of all point obs used in analysis + # 5=add memory usage messages + # + def setVerbose(self,value=1): + self.verbose=value + return + #--------------------------------------------------------------------------- + # setQuiet - set 'verbosity' to zero so that only required (level=0) + # log messages are output. + # + def setQuiet(self): + self.verbose=0 + return + #--------------------------------------------------------------------------- + # setMissingElevThreshold - set the MissingElevThreshold value + # Obs with elevation values less than or equal to this threshold + # will use the topo grid elevation instead, even if ActualElev is set + # to 1. + # + def setMissingElevThreshold(self,value): + self.MissingElevThreshold=value + return + #--------------------------------------------------------------------------- + # logtime - write a string with date/time stamp. Can dynamically control + # which get printed by using the importance and verbosity settings. + # Will only print message with importance less or equal to verbosity + # setting. (in other words, importance=0 are VERY IMPORTANT messages + # that are always printed. importance=1 are only shown when Verbose + # is 1 or greater, etc.). + # + def logtime(self,string,importance=0): + if importance<=self.verbose: + tt=self._gmtime().timetuple() + ts="%4.4d/%2.2d/%2.2d %2.2d:%2.2d:%2.2d"%(tt[0],tt[1],tt[2],tt[3],tt[4],tt[5]) + print("%s|ObjAnal - %s" % (ts,string)) + sys.stdout.flush() + return +# +# debug stuff for memory usage +# +_proc_status="/proc/%d/status"%os.getpid() +_scale={'kB':1024.0,'mB':1024.0*1024.0, + 'KB':1024.0,'MB':1024.0*1024.0} +def _VmB(VmKey): + try: + t=open(_proc_status) + v=t.read() + t.close() + except IOError: + return 0.0 + i=v.index(VmKey) + v=v[i:].split(None,3) + if len(v)<3: + return 0.0 + return float(v[1])*_scale[v[2]] +def memory(): + return _VmB('VmSize:') +def resident(): + return _VmB('VmRSS:') + diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcedureInterface.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcedureInterface.py index 446afd83b9..9e3753e1fc 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcedureInterface.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcedureInterface.py @@ -1,128 +1,128 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# +# Globally import and sets up instances of the procedure scripts. +# Designed to be used as a master controller for inspecting and running +# procedures from Java. +# +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 11/05/08 njensen Initial Creation. +# 01/17/13 1486 dgilling Re-factor based on +# RollbackMasterInterface. +# 07/27/15 4263 dgilling Support refactored Java +# ProcedureControllers. +# 02/19/18 7222 mapeters Log canceled procedures. +# # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Globally import and sets up instances of the procedure scripts. -# Designed to be used as a master controller for inspecting and running -# procedures from Java. -# -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 11/05/08 njensen Initial Creation. -# 01/17/13 1486 dgilling Re-factor based on -# RollbackMasterInterface. -# 07/27/15 4263 dgilling Support refactored Java -# ProcedureControllers. -# 02/19/18 7222 mapeters Log canceled procedures. -# -# -# - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -import logging -import sys -import Exceptions - -import JUtil -import ProcessVariableList -import RollbackMasterInterface -import UFStatusHandler - - -PLUGIN_NAME = 'com.raytheon.viz.gfe' -CATEGORY = 'GFE' - - -class ProcedureInterface(RollbackMasterInterface.RollbackMasterInterface): - - def __init__(self, scriptPath): - super(ProcedureInterface, self).__init__(scriptPath) - - logging.basicConfig(level=logging.INFO) - self.log = logging.getLogger("ProcedureInterface") - self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY)) - - self.importModules() - - def __getProcedureInfo(self, script, dataMgr): - menus = self.getMenuName(script) - argNames = self.getMethodArgNames(script, "Procedure", "execute") - varDict = self.getVariableListInputs(script) - return menus, argNames, varDict - - def getScripts(self, dataMgr): - from java.util import HashMap - from com.raytheon.viz.gfe.procedures import ProcedureMetadata - - scriptList = HashMap() - for script in self.scripts: - try: - (menus, argNames, varDict) = self.__getProcedureInfo(script, dataMgr) - name = str(script) - if not menus: - menus = [] - menus = JUtil.pyValToJavaObj(menus) - argNames = JUtil.pyValToJavaObj(argNames) - metadata = ProcedureMetadata(name, menus, argNames, varDict) - scriptList.put(name, metadata) - except: - self.log.exception("Unable to load metadata for procedure " + script) - - return scriptList - - def addModule(self, moduleName): - super(ProcedureInterface, self).addModule(moduleName) - - def removeModule(self, moduleName): - super(ProcedureInterface, self).removeModule(moduleName) - - def getMethodArgNames(self, moduleName, className, methodName): - args = self.getMethodArgs(moduleName, className, methodName) - return JUtil.pyValToJavaObj(args) - - def runProcedure(self, moduleName, className, methodName, **kwargs): - try: - return self.runMethod(moduleName, className, methodName, **kwargs) - except Exceptions.EditActionError, e: - if "Cancel" == e.errorType() and "Cancel" == e.errorInfo(): - self.log.info("Procedure [" + moduleName + "] canceled") - return None - raise - - def getMenuName(self, name): - return getattr(sys.modules[name], "MenuItems", []) - - def getVariableList(self, name): - return getattr(sys.modules[name], "VariableList", []) - - def getVariableListInputs(self, name): - varList = self.getVariableList(name) - return ProcessVariableList.buildWidgetList(varList) - - def reloadModule(self, moduleName): - super(ProcedureInterface, self).reloadModule(moduleName) +# + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +import logging +import sys +import Exceptions + +import JUtil +import ProcessVariableList +import RollbackMasterInterface +import UFStatusHandler + + +PLUGIN_NAME = 'com.raytheon.viz.gfe' +CATEGORY = 'GFE' + + +class ProcedureInterface(RollbackMasterInterface.RollbackMasterInterface): + + def __init__(self, scriptPath): + super(ProcedureInterface, self).__init__(scriptPath) + + logging.basicConfig(level=logging.INFO) + self.log = logging.getLogger("ProcedureInterface") + self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY)) + + self.importModules() + + def __getProcedureInfo(self, script, dataMgr): + menus = self.getMenuName(script) + argNames = self.getMethodArgNames(script, "Procedure", "execute") + varDict = self.getVariableListInputs(script) + return menus, argNames, varDict + + def getScripts(self, dataMgr): + from java.util import HashMap + from com.raytheon.viz.gfe.procedures import ProcedureMetadata + + scriptList = HashMap() + for script in self.scripts: + try: + (menus, argNames, varDict) = self.__getProcedureInfo(script, dataMgr) + name = str(script) + if not menus: + menus = [] + menus = JUtil.pyValToJavaObj(menus) + argNames = JUtil.pyValToJavaObj(argNames) + metadata = ProcedureMetadata(name, menus, argNames, varDict) + scriptList.put(name, metadata) + except: + self.log.exception("Unable to load metadata for procedure " + script) + + return scriptList + + def addModule(self, moduleName): + super(ProcedureInterface, self).addModule(moduleName) + + def removeModule(self, moduleName): + super(ProcedureInterface, self).removeModule(moduleName) + + def getMethodArgNames(self, moduleName, className, methodName): + args = self.getMethodArgs(moduleName, className, methodName) + return JUtil.pyValToJavaObj(args) + + def runProcedure(self, moduleName, className, methodName, **kwargs): + try: + return self.runMethod(moduleName, className, methodName, **kwargs) + except Exceptions.EditActionError as e: + if "Cancel" == e.errorType() and "Cancel" == e.errorInfo(): + self.log.info("Procedure [" + moduleName + "] canceled") + return None + raise + + def getMenuName(self, name): + return getattr(sys.modules[name], "MenuItems", []) + + def getVariableList(self, name): + return getattr(sys.modules[name], "VariableList", []) + + def getVariableListInputs(self, name): + varList = self.getVariableList(name) + return ProcessVariableList.buildWidgetList(varList) + + def reloadModule(self, moduleName): + super(ProcedureInterface, self).reloadModule(moduleName) diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcessVariableList.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcessVariableList.py index 7202bcbbc5..d6f3b6896b 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcessVariableList.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProcessVariableList.py @@ -1,205 +1,205 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -## Python interface to the Java runtime UI SWT component. -## -## This interface provides a link between user created python scripts and the Java driven user interface. -## This script is to be called from within Java through a jep interface. -## -## If an invalid widget type is encountered a Label widget is generated instead with a tooltip explaining what went wrong. -## -##
-## SOFTWARE HISTORY
-## Date            Ticket#        Engineer    Description
-## ------------    ----------    -----------    --------------------------
-## Jun 11, 2008    1164           jelkins    Initial creation
-## Jun 16, 2008    1164           jelkins    Implemented Callback Handling
-## Jun 23, 2008    1164           jelkins    Support 3 argument widget tuple
-## Nov 28, 2017    6540           randerso   Set default precision to 0 to 
-##                                           match default resolution
-## 
-## 
-## -## @author jelkins -## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -from com.raytheon.viz.gfe.ui.runtimeui import ValuesDialog -from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID, ParmID -from com.raytheon.uf.common.time import TimeRange - -import JUtil, DatabaseID, ParmID, TimeRange - -import types - -class ProcessVariableList: - - ## Class constructor - ## - ## The following arguments are no longer used but remain part of the signature for compatibility reasons - ## * parent - not needed - ## * modal - similar functionality is achieved by setting or not setting a callback - ## * dataMgr - required for model and D2D_model widgets - ## * cmdLineVarDict - not needed, see TODO - ## - ## TODO dbSubsytem may be needed when implementing the model and D2D_model widgets - ## - ## @param title: str() of the title of the dialog that will appear - ## @param varList: list() of widgets to to place on the dialog - ## @param varDict: dict() onto which to add widget values - ## - ## @param runCB: function() pointer to a callback function which takes two arugments argList (see below) and varDict - ## @param argList: list() of arguments to pass to the callback function - ## - def __init__(self, title, varList, varDict=None, parent=None, - dataMgr=None, modal=1, runCB=None, argList=[], - cmdLineVarDict=None): - - self.__varDict = varDict - if self.__varDict is None: - self.__varDict = {} - self.__callbackResult = None - - # build widgetList - widgetList = buildWidgetList(varList) - - # Construct the dialog - self.__dialog = ValuesDialog.openDialog(title, widgetList, dataMgr) - #self.__dialog = ValuesDialog(title,widgetList) - - # since ValuesDialog blocks on open() we can set status and varDict here - values = JUtil.javaMapToPyDict(self.__dialog.getValues(), self.__convertJavaObjToPyWrapper) - - self.__varDict.update(values) - - from com.raytheon.uf.viz.python.swt import ButtonConstant - self.__selectionStatus = str(ButtonConstant.getButton(self.__dialog.getReturnCode())) - - # Handle callbacks - #if runCB is not None: - # self.__callbackLoop(runCB,argList) - #else: - # self.__dialog.open() - - ## The following methods may no longer be needed as they are mostly taken care of from within the ValuesDialog - ## def __getCmdLineSelections(self, varList, cmdLineVarDict): - ## def __getSelections(self, title, varList, varDict, parent): - ## def selectionCB(self, entry, status="Ok"): - - # display UI until the user clicks the Cancel or Run/Dismiss button - # when the user clicks the Run button evaluate the callback and keep the display - # - # TODO find a way to keep the dialog from noticably disappearing and re-appearing when the run button is selected - # - # @param callback: function() callback with signature (list(),dict()) - # @param callbackArguments: list() of arguments to pass to the callback - def __callbackLoop(self,callback,callbackArguments): - from com.raytheon.uf.viz.python.swt import ButtonConstant - - self.__dialog.setCloseAfterRun(True) - self.__dialog.open() - - # poll for button press (can't use self.status() because it only returns Cancel, Ok - button = self.status() - - # execute the callback - if button == "Run" or button == "Run/Dismiss": - self.__callbackResult = callback(callbackArguments,self.varDict()) - - # break the cycle - if button == "Cancel" or button == "Run/Dismiss": - return - - # replace the initial widget values with the updated ones - - self.__callbackLoop(callback, callbackArguments) - - def __convertJavaObjToPyWrapper(self, javaObj): - objtype = javaObj.java_name - if objtype == "com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID": - retVal = DatabaseID.DatabaseID(javaObj) - elif objtype == "com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID": - retVal = ParmID.ParmID(javaObj) - elif objtype == "com.raytheon.uf.common.time.TimeRange": - retVal = TimeRange.TimeRange(javaObj) - else: - retVal = None - - return retVal - - # @return: str() of the dialog button that was pressed - def status(self): - return self.__selectionStatus - - # @return: dict() of the values from the widgets - def varDict(self): - return dict(self.__varDict) - - # @return: the result of the callback - def lastOkReturn(self): - return self.__callbackResult - -# build a Java list capable of being passed to the ValuesDialog -# -# @param widgetList: list() of widgets to add to the dialog -# -# @return: Java List of widgets -def buildWidgetList(pythonWidgetList): - - from java.util import ArrayList - from com.raytheon.viz.gfe.smartscript import FieldDefinition - FieldType = FieldDefinition.FieldType - widgetList = ArrayList() - - for widget in pythonWidgetList: - - res = 1.0 # Default resolution - prec = 0 # Default precision - valueList = [] - - # unpack the tuple - if len(widget) == 3: - name,defaultValue,entType = widget - if len(widget) == 4: - name,defaultValue,entType,valueList = widget - if len(widget) == 5: - name,defaultValue,entType,valueList,res = widget - if len(widget) == 6: - name,defaultValue,entType,valueList,res,prec = widget - # Handle possibility of (label, variable) tuple - if type(name) is types.TupleType: - desc = name[0] - else: - desc = name - - w = FieldDefinition(JUtil.pyValToJavaObj(name),desc,FieldType.convertPythonType(entType), - JUtil.pyValToJavaObj(defaultValue),JUtil.pyValToJavaObj(valueList), - float(res),int(prec)) - widgetList.add(w) - - - return widgetList - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +## Python interface to the Java runtime UI SWT component. +## +## This interface provides a link between user created python scripts and the Java driven user interface. +## This script is to be called from within Java through a jep interface. +## +## If an invalid widget type is encountered a Label widget is generated instead with a tooltip explaining what went wrong. +## +##
+## SOFTWARE HISTORY
+## Date            Ticket#        Engineer    Description
+## ------------    ----------    -----------    --------------------------
+## Jun 11, 2008    1164           jelkins    Initial creation
+## Jun 16, 2008    1164           jelkins    Implemented Callback Handling
+## Jun 23, 2008    1164           jelkins    Support 3 argument widget tuple
+## Nov 28, 2017    6540           randerso   Set default precision to 0 to 
+##                                           match default resolution
+## 
+## 
+## +## @author jelkins +## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +from com.raytheon.viz.gfe.ui.runtimeui import ValuesDialog +from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID, ParmID +from com.raytheon.uf.common.time import TimeRange + +import JUtil, DatabaseID, ParmID, TimeRange + +import types + +class ProcessVariableList: + + ## Class constructor + ## + ## The following arguments are no longer used but remain part of the signature for compatibility reasons + ## * parent - not needed + ## * modal - similar functionality is achieved by setting or not setting a callback + ## * dataMgr - required for model and D2D_model widgets + ## * cmdLineVarDict - not needed, see TODO + ## + ## TODO dbSubsytem may be needed when implementing the model and D2D_model widgets + ## + ## @param title: str() of the title of the dialog that will appear + ## @param varList: list() of widgets to to place on the dialog + ## @param varDict: dict() onto which to add widget values + ## + ## @param runCB: function() pointer to a callback function which takes two arugments argList (see below) and varDict + ## @param argList: list() of arguments to pass to the callback function + ## + def __init__(self, title, varList, varDict=None, parent=None, + dataMgr=None, modal=1, runCB=None, argList=[], + cmdLineVarDict=None): + + self.__varDict = varDict + if self.__varDict is None: + self.__varDict = {} + self.__callbackResult = None + + # build widgetList + widgetList = buildWidgetList(varList) + + # Construct the dialog + self.__dialog = ValuesDialog.openDialog(title, widgetList, dataMgr) + #self.__dialog = ValuesDialog(title,widgetList) + + # since ValuesDialog blocks on open() we can set status and varDict here + values = JUtil.javaMapToPyDict(self.__dialog.getValues(), self.__convertJavaObjToPyWrapper) + + self.__varDict.update(values) + + from com.raytheon.uf.viz.python.swt import ButtonConstant + self.__selectionStatus = str(ButtonConstant.getButton(self.__dialog.getReturnCode())) + + # Handle callbacks + #if runCB is not None: + # self.__callbackLoop(runCB,argList) + #else: + # self.__dialog.open() + + ## The following methods may no longer be needed as they are mostly taken care of from within the ValuesDialog + ## def __getCmdLineSelections(self, varList, cmdLineVarDict): + ## def __getSelections(self, title, varList, varDict, parent): + ## def selectionCB(self, entry, status="Ok"): + + # display UI until the user clicks the Cancel or Run/Dismiss button + # when the user clicks the Run button evaluate the callback and keep the display + # + # TODO find a way to keep the dialog from noticably disappearing and re-appearing when the run button is selected + # + # @param callback: function() callback with signature (list(),dict()) + # @param callbackArguments: list() of arguments to pass to the callback + def __callbackLoop(self,callback,callbackArguments): + from com.raytheon.uf.viz.python.swt import ButtonConstant + + self.__dialog.setCloseAfterRun(True) + self.__dialog.open() + + # poll for button press (can't use self.status() because it only returns Cancel, Ok + button = self.status() + + # execute the callback + if button == "Run" or button == "Run/Dismiss": + self.__callbackResult = callback(callbackArguments,self.varDict()) + + # break the cycle + if button == "Cancel" or button == "Run/Dismiss": + return + + # replace the initial widget values with the updated ones + + self.__callbackLoop(callback, callbackArguments) + + def __convertJavaObjToPyWrapper(self, javaObj): + objtype = javaObj.java_name + if objtype == "com.raytheon.uf.common.dataplugin.gfe.db.objects.DatabaseID": + retVal = DatabaseID.DatabaseID(javaObj) + elif objtype == "com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID": + retVal = ParmID.ParmID(javaObj) + elif objtype == "com.raytheon.uf.common.time.TimeRange": + retVal = TimeRange.TimeRange(javaObj) + else: + retVal = None + + return retVal + + # @return: str() of the dialog button that was pressed + def status(self): + return self.__selectionStatus + + # @return: dict() of the values from the widgets + def varDict(self): + return dict(self.__varDict) + + # @return: the result of the callback + def lastOkReturn(self): + return self.__callbackResult + +# build a Java list capable of being passed to the ValuesDialog +# +# @param widgetList: list() of widgets to add to the dialog +# +# @return: Java List of widgets +def buildWidgetList(pythonWidgetList): + + from java.util import ArrayList + from com.raytheon.viz.gfe.smartscript import FieldDefinition + FieldType = FieldDefinition.FieldType + widgetList = ArrayList() + + for widget in pythonWidgetList: + + res = 1.0 # Default resolution + prec = 0 # Default precision + valueList = [] + + # unpack the tuple + if len(widget) == 3: + name,defaultValue,entType = widget + if len(widget) == 4: + name,defaultValue,entType,valueList = widget + if len(widget) == 5: + name,defaultValue,entType,valueList,res = widget + if len(widget) == 6: + name,defaultValue,entType,valueList,res,prec = widget + # Handle possibility of (label, variable) tuple + if type(name) is tuple: + desc = name[0] + else: + desc = name + + w = FieldDefinition(JUtil.pyValToJavaObj(name),desc,FieldType.convertPythonType(entType), + JUtil.pyValToJavaObj(defaultValue),JUtil.pyValToJavaObj(valueList), + float(res),int(prec)) + widgetList.add(w) + + + return widgetList + \ No newline at end of file diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProductParser.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProductParser.py index f2c5116ff4..1fc63d0e85 100644 --- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProductParser.py +++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/ProductParser.py @@ -1,313 +1,313 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +#!/usr/bin/env python +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## + +# CI block +# - wmoAbrevHeading (TTAAii CCCC DDHHMM [BBB]) +# * TTAAii (data type/location) +# * ID of issuing office +# * product issuance time (UTC) +# * "funny field" +# - awipsID (NNNXXX) (PIL) +# * NNN = specific product categogy (PIL) +# * XXX = NWS location id + +# blank line + +# MND block +# - [Broacast instruction] +# * WORD _'-'_ junk +# - Product type (one line) +# At end of line (optional) ...UPDATED/AMENDED/CORRECTED/ +# RESENT/DELAYED/TEST +# with TEST "TEST..." is also prepended to the line +# - Issuing office +# (NATIONAL WEATHER SERVICE CITY STATE) (one line) +# or +# (NWS NATIONALCENTER CITY STATE) +# or (special cases follow) +# ISSUED_BY_NATIONAL_WEATHER_SERVICE_CITY_SS (second line) +# or +# EXTERNAL_AGENCY_CITY/COUNTY/STATE_SS +# RELAYED_BY_NATIONAL_WEATHER_SERVICE_CITY_SS +# - Issuance data/time - local time +# HHMM (AM/PM LST or LDT) day_of_week(3 char) month(3 char) day year +# first H is not used if 0, both Ms required. +# Multiple times can be used (refer to same UTC time). These +# times are seperated by '/' characters. Line breaks can +# occur anywhere. '/' is just a seperator or maybe at end. +# blank line (optional and only if followed by: +# - (Optional) reason for action line starts with +# CORRECTED,UPDATED, or AMENDED. This line occurs after # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## - -# CI block -# - wmoAbrevHeading (TTAAii CCCC DDHHMM [BBB]) -# * TTAAii (data type/location) -# * ID of issuing office -# * product issuance time (UTC) -# * "funny field" -# - awipsID (NNNXXX) (PIL) -# * NNN = specific product categogy (PIL) -# * XXX = NWS location id - -# blank line - -# MND block -# - [Broacast instruction] -# * WORD _'-'_ junk -# - Product type (one line) -# At end of line (optional) ...UPDATED/AMENDED/CORRECTED/ -# RESENT/DELAYED/TEST -# with TEST "TEST..." is also prepended to the line -# - Issuing office -# (NATIONAL WEATHER SERVICE CITY STATE) (one line) -# or -# (NWS NATIONALCENTER CITY STATE) -# or (special cases follow) -# ISSUED_BY_NATIONAL_WEATHER_SERVICE_CITY_SS (second line) -# or -# EXTERNAL_AGENCY_CITY/COUNTY/STATE_SS -# RELAYED_BY_NATIONAL_WEATHER_SERVICE_CITY_SS -# - Issuance data/time - local time -# HHMM (AM/PM LST or LDT) day_of_week(3 char) month(3 char) day year -# first H is not used if 0, both Ms required. -# Multiple times can be used (refer to same UTC time). These -# times are seperated by '/' characters. Line breaks can -# occur anywhere. '/' is just a seperator or maybe at end. -# blank line (optional and only if followed by: -# - (Optional) reason for action line starts with -# CORRECTED,UPDATED, or AMENDED. This line occurs after -# - - - -# blank line - - -# Product content block - -# UGC "line" -# SSFNNN-NNN>NNN-SSFNNN-DDHHMM- -# - -# Warnings -# (optional) headlines -# -# attribution paragraph -# -# text (multiple paragraph) -# -# (optional) call to action (multiple paragraphs) - -#$$ - -import re, bisect - -sl = r'^' # start of line -el = r'\s*?\n' # end of line -id3 = r'[A-Za-z]{3}' # 3 charater word -empty = r'^\s*' + el # empty line - -wmoid = r'(?P[A-Z]{4}\d{2})' # wmoid -fsid = r'(?P[A-Z]{4})' # full station id -pit = r'(?P\d{6})' # product issuance time UTC -ff = r'(?P ' + id3 + ')?' # "funny" field - -# NWS time format -ntime = r'\d{3,4}\s+[A-Z]{2}\s+[1-Za-z]{3,4}\s+' + id3 + r'\s+' + id3 \ - + r'\s+\d{1,2}\s+\d{4}\s*?' -nwstime = sl + r'(?P' + ntime + r'(?:\s*/\s*\n?' + ntime + r'/)*\n)' -#nwstime = sl + r'(?P' + ntime + r'(?:/\s*\n?' + ntime + r')*/?\n)' - - -# CI block -ci_start = sl + wmoid + ' ' + fsid + ' ' + pit + ff + el -awipsid = r'(?P(?P[A-Z0-9]{3})(?P[A-Z0-9]{1,3}))' + el -ci_block = r'(?P' + ci_start + awipsid + '\n?)' #+ empty + r')' - -ci_re = re.compile(ci_block) - -# MND block -bi = r'(^(?P[A-Z]+) - (?P.*)\n)?' # broadcast instruction -pt = sl + r'(?P(?P.*)(?P\.\.\.[A-Z]+)*)' + el # product type -io = sl + r'(?P.*)\n' # issuing office -ibo = sl + r'((?P.*)\n)?' # issued by office -mnd = empty + r'(?P' + bi + pt + io + ibo + nwstime + r')' #+ empty - -mnd_re = re.compile(mnd, re.M) - -# UGC block -nnn = r'(?:[A-Z]{2}[ZC])?\d{3}' -purge = r'(?P\d{6})-' -ugc = r'\n(?P' + r'[A-Z]{2}[Z|C](?:(?:\d{3})|(?:ALL))' + r'(?:[->]\n?' + nnn + \ - r')*-\n?' + purge + el + r')' -cityh = r'(?<=-\n(?!.*-\n))(?P(?:.*\n))' - -body = r'(?P(?:^.*\n)*?)' -#body = r'.*' -term = r'(?P' + r'^\n\$\$\n)' -vtec = r'(?P(?:^/[-A-Z0-9.]+/\s+?)*)' - -updateWrds = r'(?:(?:UPDATED)|(?:CORRECTED)|(?:AMENDED))' -reason = r'(?P(?:^' + updateWrds + r'\s(?:.|\n)*?' + empty + r')?)' - -headlines = r'(?P(?:^\.\.\.(?:.|\n)*?\.\.\.\n)*)' - -#ugc_re = re.compile(r'(?P(?P
' + ugc + vtec + r'(?:^.*\n)*?' -# + '(?:' + nwstime + ')?' + r')' -# + empty + reason + headlines + body + r'^(?P\$\$))' -# + el, re.M) - -# MAFOR (funky marine product which omits the blank line -# after a ugc header) -mafor = r'(?:^MAFOR .*\n)' - -ugch_re = re.compile(ugc + vtec, re.M) -cityh_re = re.compile(cityh) -ghend_re1 = re.compile(r'(?:' + nwstime + r')', re.M) -ghend_re2 = re.compile(r'(?:' + empty + r'|' + mafor + r')', re.M) - -gend_re = re.compile(term, re.M) -headlines2 = r'(?P(?:^\.\.\.(?:.|\n)*?\.\.\.\s+?)+)' -head_re = re.compile(headlines2, re.M) - -# Single headline re -headlines3 = r'^\.\.\.(?:.|\n)*?\.\.\.\s+' -single_head_re = re.compile(headlines3, re.M) - -# Framing code -frame_re = re.compile(r'(?P\|\*(.|\n)*?\*\|)', re.M) - -# This is the list of words which will trigger an unlocked section -# of a headline -# locWords = r'((IN)(?!\s((EFFECT)|(PLACE)))|(ABOVE)|(BELOW)|(NEAR)|((FOR)(?!\s((MARINE)|(ROUGH BAR)|(TEST PURPOSES ONLY)|(WINDS)|(HAZARDOUS))))|(AROUND)|(DUE)|(ALONG)|(ACROSS)|(AWAY)|(NORTH)|(NORTHEAST)|(EAST)|(SOUTHEAST)|(SOUTH)|(SOUTHWEST)|(WEST)|(NORTHWEST))' -# local = r'(?P\s' + locWords + r'\s(.|\n)*?)?' -# headline = r'(?P
^\.\.\.(.|\n)*?)' + local + r'(?P(TEST)?\.\.\.\n)'
-# headline_re = re.compile(headline, re.M)
-
-# These words define the end of a locked section of headline
-headlineEnders = ['AFTERNOON', 'CANCELLED', 'EFFECT', 'EXPIRED',
-                  'EVENING', 'FRIDAY', 'MONDAY', 'MORNING', 'NIGHT',
-                  'SATURDAY', 'SUNDAY', 'THURSDAY', 'TODAY', 'TONIGHT',
-                  'TUESDAY', 'WEDNESDAY', 'IS FOR TEST PURPOSES ONLY']
-
-endWords = '(' + '|'.join(map(lambda x: '(' + x + ')', headlineEnders)) + ')'
-local = r'(?P(.|\n)*?)'
-headline = r'(?P
^\.\.\.((.|\n)*\s' + endWords + r')+)' + local \
-           + r'(?P(TEST)?\.\.\.\n)'
-headline_re = re.compile(headline, re.M)
-
-class ProductParser:
-    def __init__(self):
-        pass
-
-    # Convert an offset to a Tk line,col
-    def tkc(self, offset):
-        i = bisect.bisect(self._totals, offset) - 1
-        return (i+1, offset - self._totals[i])
-
-
-    def processHeadline(self, rval, m):
-        str = m.group('headlines')
-        start = m.start('headlines')
-        hdlns = []
-        #l = headline_re.finditer(str)
-        l = single_head_re.finditer(str)
-        for m in l:
-            if m is not None:
-                #print 'phl m = ', m
-                newstart = start + m.start()
-                m = headline_re.match(m.group(0))
-                if m is not None:
-                    hdlns.append(self.dumpMatch(m, newstart))
-
-        #print 'hdlns = ', hdlns
-        rval['headInfo'] = hdlns
-
-    def dumpMatch(self, m, offset=0, rval=None):
-        if rval is None:
-            rval = {}
-
-        #print 'dumpmatch m = ', m.groupdict()
-        for k in m.groupdict().keys():
-            if m.start(k) != -1 and m.start(k) != m.end(k):
-                if k == 'headlines':
-                    self.processHeadline(rval, m)
-                span = m.span(k)
-                rval[k] = (self.tkc(span[0] + offset),
-                           self.tkc(span[1] + offset))
-        #print 'dumpmatch rval = ', rval
-        return rval
-
-    def matchCoords(self, m):
-        return self.tkc(m.span(0)[0]), self.tkc(m.span(0)[1])
-
-    def parse(self):
-        rval = {}
-        m = ci_re.search(self._str)
-        if m is not None:
-            #print 'ci -- ', m.group()
-            rval['ci'] = self.dumpMatch(m)
-
-        m = mnd_re.search(self._str)
-        if m is not None:
-            #print 'mnd -- ', m.group()
-            rval['mnd'] = self.dumpMatch(m)
-
-        segs = []
-        l = ugch_re.finditer(self._str)
-
-        for m in l:
-            if m is not None:
-                m1 = cityh_re.search(self._str, m.end())
-                m21 = ghend_re1.search(self._str, m.end())
-                m22 = ghend_re2.search(self._str, m.end())
-                m3 = gend_re.search(self._str, m.end())
-                if m3 is None:
-                    continue
-                if m21 is not None and m21.start() < m3.start():
-                    m2 = m21
-                elif m22 is not None and m22.start() < m3.start():
-                    m2 = m22
-                else:
-                    continue
-                m4 = head_re.search(self._str, m.end(), m3.end())
-
-                d = self.dumpMatch(m)
-                d = self.dumpMatch(m2, rval=d)
-                d = self.dumpMatch(m3, rval=d)
-                d['header'] = (self.tkc(m.start('uhdr')),
-                               self.tkc(m2.end()))
-                if m1 is not None and m1.start('incc') < m2.start():
-                    d['city'] = (self.tkc(m1.start('incc')),
-                                 self.tkc(m2.start()))
-                    mm = frame_re.search(self._str, m1.start(), m2.start())
-                    if mm is not None:
-                        d['cframe'] = (self.tkc(mm.start()),
-                                       self.tkc(mm.end()))
-                else:
-                    d['city'] = (self.tkc(m2.start()),
-                                 self.tkc(m2.start()))
-
-                if m4 is not None:
-                    #print 'm4 = ', m4.group()
-                    d = self.dumpMatch(m4, rval=d)
-                d['ugc'] = (self.tkc(m.start() + 1),
-                            self.tkc(m3.end() - 1))
-                segs.append(d)
-        #print 'segs = ', segs
-        rval['segs'] = segs
-
-        frames = []
-        l = frame_re.finditer(self._str)
-        for m in l:
-            if m is not None:
-                frames.append(self.dumpMatch(m))
-        rval['frames'] = frames
-
-        return rval
-
-    def parseFromJava(self, text):
-        self._str = text
-        self._ci = None
-        lines = map(lambda x: len(x), text.splitlines(1))
-        count = 0
-        lc = []
-        for l in lines:
-            lc.append(count)
-            count += l
-        self._totals = lc
-
-        #print 'text START ----------------------'
-        #print text
-        #print 'text END ------------------------'
-
-        result = self.parse()
-
-        #print 'result = ', result
-
-        return result
+
+
+
+# blank line
+
+
+# Product content block
+
+# UGC "line"
+# SSFNNN-NNN>NNN-SSFNNN-DDHHMM-
+#
+
+# Warnings
+# (optional) headlines
+#
+# attribution paragraph
+#
+# text (multiple paragraph)
+#
+# (optional) call to action (multiple paragraphs)
+
+#$$
+
+import re, bisect
+
+sl = r'^'                            # start of line
+el = r'\s*?\n'                       # end of line
+id3 = r'[A-Za-z]{3}'                 # 3 charater word
+empty = r'^\s*' + el                 # empty line
+
+wmoid = r'(?P[A-Z]{4}\d{2})' # wmoid
+fsid  = r'(?P[A-Z]{4})'       # full station id
+pit   = r'(?P\d{6})'           # product issuance time UTC
+ff    = r'(?P ' + id3 + ')?'          # "funny" field
+
+# NWS time format
+ntime = r'\d{3,4}\s+[A-Z]{2}\s+[1-Za-z]{3,4}\s+' + id3 + r'\s+' + id3 \
+        + r'\s+\d{1,2}\s+\d{4}\s*?'
+nwstime = sl + r'(?P' + ntime + r'(?:\s*/\s*\n?' + ntime + r'/)*\n)'
+#nwstime = sl + r'(?P' + ntime + r'(?:/\s*\n?' + ntime + r')*/?\n)'
+
+
+# CI block
+ci_start = sl + wmoid + ' ' + fsid + ' ' + pit + ff + el
+awipsid = r'(?P(?P[A-Z0-9]{3})(?P[A-Z0-9]{1,3}))' + el
+ci_block = r'(?P' + ci_start + awipsid + '\n?)' #+ empty + r')'
+
+ci_re = re.compile(ci_block)
+
+# MND block
+bi = r'(^(?P[A-Z]+) - (?P.*)\n)?'  # broadcast instruction
+pt = sl + r'(?P(?P.*)(?P\.\.\.[A-Z]+)*)' + el # product type
+io = sl + r'(?P.*)\n'                            # issuing office
+ibo = sl + r'((?P.*)\n)?'                       # issued by office
+mnd = empty + r'(?P' + bi + pt + io + ibo + nwstime + r')' #+ empty
+
+mnd_re = re.compile(mnd, re.M)
+
+# UGC block
+nnn = r'(?:[A-Z]{2}[ZC])?\d{3}'
+purge = r'(?P\d{6})-'
+ugc = r'\n(?P' + r'[A-Z]{2}[Z|C](?:(?:\d{3})|(?:ALL))' + r'(?:[->]\n?' + nnn + \
+      r')*-\n?' + purge + el + r')'
+cityh = r'(?<=-\n(?!.*-\n))(?P(?:.*\n))'
+
+body = r'(?P(?:^.*\n)*?)'
+#body = r'.*'
+term = r'(?P' +  r'^\n\$\$\n)'
+vtec = r'(?P(?:^/[-A-Z0-9.]+/\s+?)*)'
+
+updateWrds = r'(?:(?:UPDATED)|(?:CORRECTED)|(?:AMENDED))'
+reason = r'(?P(?:^' + updateWrds + r'\s(?:.|\n)*?' + empty + r')?)'
+
+headlines = r'(?P(?:^\.\.\.(?:.|\n)*?\.\.\.\n)*)'
+
+#ugc_re = re.compile(r'(?P(?P
' + ugc + vtec + r'(?:^.*\n)*?' +# + '(?:' + nwstime + ')?' + r')' +# + empty + reason + headlines + body + r'^(?P\$\$))' +# + el, re.M) + +# MAFOR (funky marine product which omits the blank line +# after a ugc header) +mafor = r'(?:^MAFOR .*\n)' + +ugch_re = re.compile(ugc + vtec, re.M) +cityh_re = re.compile(cityh) +ghend_re1 = re.compile(r'(?:' + nwstime + r')', re.M) +ghend_re2 = re.compile(r'(?:' + empty + r'|' + mafor + r')', re.M) + +gend_re = re.compile(term, re.M) +headlines2 = r'(?P(?:^\.\.\.(?:.|\n)*?\.\.\.\s+?)+)' +head_re = re.compile(headlines2, re.M) + +# Single headline re +headlines3 = r'^\.\.\.(?:.|\n)*?\.\.\.\s+' +single_head_re = re.compile(headlines3, re.M) + +# Framing code +frame_re = re.compile(r'(?P\|\*(.|\n)*?\*\|)', re.M) + +# This is the list of words which will trigger an unlocked section +# of a headline +# locWords = r'((IN)(?!\s((EFFECT)|(PLACE)))|(ABOVE)|(BELOW)|(NEAR)|((FOR)(?!\s((MARINE)|(ROUGH BAR)|(TEST PURPOSES ONLY)|(WINDS)|(HAZARDOUS))))|(AROUND)|(DUE)|(ALONG)|(ACROSS)|(AWAY)|(NORTH)|(NORTHEAST)|(EAST)|(SOUTHEAST)|(SOUTH)|(SOUTHWEST)|(WEST)|(NORTHWEST))' +# local = r'(?P\s' + locWords + r'\s(.|\n)*?)?' +# headline = r'(?P
^\.\.\.(.|\n)*?)' + local + r'(?P(TEST)?\.\.\.\n)'
+# headline_re = re.compile(headline, re.M)
+
+# These words define the end of a locked section of headline
+headlineEnders = ['AFTERNOON', 'CANCELLED', 'EFFECT', 'EXPIRED',
+                  'EVENING', 'FRIDAY', 'MONDAY', 'MORNING', 'NIGHT',
+                  'SATURDAY', 'SUNDAY', 'THURSDAY', 'TODAY', 'TONIGHT',
+                  'TUESDAY', 'WEDNESDAY', 'IS FOR TEST PURPOSES ONLY']
+
+endWords = '(' + '|'.join(['(' + x + ')' for x in headlineEnders]) + ')'
+local = r'(?P(.|\n)*?)'
+headline = r'(?P
^\.\.\.((.|\n)*\s' + endWords + r')+)' + local \
+           + r'(?P(TEST)?\.\.\.\n)'
+headline_re = re.compile(headline, re.M)
+
+class ProductParser:
+    def __init__(self):
+        pass
+
+    # Convert an offset to a Tk line,col
+    def tkc(self, offset):
+        i = bisect.bisect(self._totals, offset) - 1
+        return (i+1, offset - self._totals[i])
+
+
+    def processHeadline(self, rval, m):
+        str = m.group('headlines')
+        start = m.start('headlines')
+        hdlns = []
+        #l = headline_re.finditer(str)
+        l = single_head_re.finditer(str)
+        for m in l:
+            if m is not None:
+                #print 'phl m = ', m
+                newstart = start + m.start()
+                m = headline_re.match(m.group(0))
+                if m is not None:
+                    hdlns.append(self.dumpMatch(m, newstart))
+
+        #print 'hdlns = ', hdlns
+        rval['headInfo'] = hdlns
+
+    def dumpMatch(self, m, offset=0, rval=None):
+        if rval is None:
+            rval = {}
+
+        #print 'dumpmatch m = ', m.groupdict()
+        for k in list(m.groupdict().keys()):
+            if m.start(k) != -1 and m.start(k) != m.end(k):
+                if k == 'headlines':
+                    self.processHeadline(rval, m)
+                span = m.span(k)
+                rval[k] = (self.tkc(span[0] + offset),
+                           self.tkc(span[1] + offset))
+        #print 'dumpmatch rval = ', rval
+        return rval
+
+    def matchCoords(self, m):
+        return self.tkc(m.span(0)[0]), self.tkc(m.span(0)[1])
+
+    def parse(self):
+        rval = {}
+        m = ci_re.search(self._str)
+        if m is not None:
+            #print 'ci -- ', m.group()
+            rval['ci'] = self.dumpMatch(m)
+
+        m = mnd_re.search(self._str)
+        if m is not None:
+            #print 'mnd -- ', m.group()
+            rval['mnd'] = self.dumpMatch(m)
+
+        segs = []
+        l = ugch_re.finditer(self._str)
+
+        for m in l:
+            if m is not None:
+                m1 = cityh_re.search(self._str, m.end())
+                m21 = ghend_re1.search(self._str, m.end())
+                m22 = ghend_re2.search(self._str, m.end())
+                m3 = gend_re.search(self._str, m.end())
+                if m3 is None:
+                    continue
+                if m21 is not None and m21.start() < m3.start():
+                    m2 = m21
+                elif m22 is not None and m22.start() < m3.start():
+                    m2 = m22
+                else:
+                    continue
+                m4 = head_re.search(self._str, m.end(), m3.end())
+
+                d = self.dumpMatch(m)
+                d = self.dumpMatch(m2, rval=d)
+                d = self.dumpMatch(m3, rval=d)
+                d['header'] = (self.tkc(m.start('uhdr')),
+                               self.tkc(m2.end()))
+                if m1 is not None and m1.start('incc') < m2.start():
+                    d['city'] = (self.tkc(m1.start('incc')),
+                                 self.tkc(m2.start()))
+                    mm = frame_re.search(self._str, m1.start(), m2.start())
+                    if mm is not None:
+                        d['cframe'] = (self.tkc(mm.start()),
+                                       self.tkc(mm.end()))
+                else:
+                    d['city'] = (self.tkc(m2.start()),
+                                 self.tkc(m2.start()))
+
+                if m4 is not None:
+                    #print 'm4 = ', m4.group()
+                    d = self.dumpMatch(m4, rval=d)
+                d['ugc'] = (self.tkc(m.start() + 1),
+                            self.tkc(m3.end() - 1))
+                segs.append(d)
+        #print 'segs = ', segs
+        rval['segs'] = segs
+
+        frames = []
+        l = frame_re.finditer(self._str)
+        for m in l:
+            if m is not None:
+                frames.append(self.dumpMatch(m))
+        rval['frames'] = frames
+
+        return rval
+
+    def parseFromJava(self, text):
+        self._str = text
+        self._ci = None
+        lines = [len(x) for x in text.splitlines(1)]
+        count = 0
+        lc = []
+        for l in lines:
+            lc.append(count)
+            count += l
+        self._totals = lc
+
+        #print 'text START ----------------------'
+        #print text
+        #print 'text END ------------------------'
+
+        result = self.parse()
+
+        #print 'result = ', result
+
+        return result
diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py
index 93534dff25..510bd61067 100644
--- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py
+++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartScript.py
@@ -1,2782 +1,2782 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-#
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-#
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-#
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-########################################################################
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-#    SmartScript -- library of methods for Smart Tools and Procedures
-#
-# Author: hansen
-# SOFTWARE HISTORY
-#
-# Date          Ticket#  Engineer  Description
-# ------------- -------- --------- ---------------------------------------------
-# Jan 09, 2013  15626    J. Zeng   Add methods
-#                                    enableISCsend
-#                                    clientISCSendStatus
-#                                    manualSendISC_autoMode
-#                                    manualSendISC_manualMode
-# Jan 30, 2013  1559     dgilling  Fix TypeError in getGridCellSwath().
-# Mar 13, 2013  1791     bsteffen  Implement bulk getGrids to improve
-#                                  performance.
-# Mar 13, 2013  1793     bsteffen  Performance improvements for TCMWindTool
-# Apr 24, 2013  1947     randerso  Fix UVToMagDir to work with scalar arguments
-#                                  Cleaned up some constants
-# Jun 21, 2013  14983    ryu       Fixed encodeEditArea() to evaluate query
-#                                  when necessary
-# Aug 14, 2013  1571     randerso  Fixed encodeEditArea() to return
-#                                  astype(numpy.bool8) so mask can be used with
-#                                  advanced indexing (e.g. grid[mask] = value)
-# Oct 07, 2013  2424     randerso  remove use of pytz
-# Oct 29, 2013  2476     njensen   Improved getting wx/discrete keys in 
-#                                  _getGridResults
-# Oct 31, 2013  2508     randerso  Change to use DiscreteGridSlice.getKeys()
-# Nov 07, 2013  2476     dgilling  Fix _getGridsResult() for retrieving 
-#                                  Wx/Discrete in First mode.
-# Dec 23, 2013  16893    ryu       Added unloadWEs() method (created by njensen)
-# Apr 29, 2014  3097     randerso  Fixed getGrids() to return non-scalar grids
-#                                  as tuples in all cases
-# Nov 26, 2014  633      zhao      Corrected a type error in loadParm() 
-# Dec 01, 2014  3875     randerso  Added gmTime() and localTime() functions
-#                                  which are exact equivalents to those in the
-#                                  python time module. Added getTimeZoneStr and
-#                                  getTzInfo which return the site's local time
-#                                  zone as a string or as an object respectively
-#                                  Fixed createTimeRange to correctly return
-#                                  time ranges relative to local time regardless
-#                                  of setting of os.environ['TZ']
-# Jan 13, 2015    3955   randerso  Added optional parameter to availableParms to
-#                                  specify desired databases.
-#                                  Fixed createGrid to accept a DatabaseID for
-#                                  model
-# Apr 23, 2015    4259   njensen   Updated for new JEP API
-# Jul 17, 2015    4575   njensen   callSmartTool() and callProcedure() send
-#                                  HashMap for varDict
-# Aug 13, 2015    4704   randerso  Added NumpyJavaEnforcer support in
-#                                  createGrids and decodeEditArea.
-#                                  Additional code cleanup
-# Aug 26, 2015    4809   randerso  Added option group parameter to
-#                                  editAreaList()
-# Aug 26, 2015    4804   dgilling  Added callTextFormatter().
-# Aug 27, 2015    4805   dgilling  Added saveCombinationsFile().
-# Aug 27, 2015    4806   dgilling  Added transmitTextProduct().
-# Sep 16, 2015    4871   randerso  Return modified varDict from called
-#                                  Tool/Procedure
-# Sep 11, 2015    4858   dgilling  Remove notification processing from
-#                                  publishElements.
-# Jan 20, 2016    4751   randerso  Fix type of mask returned from getComposite()
-#                                  to work with numpy 1.9.2
-# Jan 28, 2016    5129   dgilling  Support changes to IFPClient.
-# Feb 22, 2016    5374   randerso  Added support for sendWFOMessage
-# Apr 05, 2016    5539   randerso  Added exception when attempting create more
-#                                  than 256 Wx keys
-# May 06, 2016    18967  ryu       Fix issue of contours plotted over
-#                                  ProposedWatches grid when ViewWCL is run.
-# Aug 22, 2016    18605  ryu       Retrieve operational text product in test
-#                                  mode.
-# Sep 28, 2016    19293  randerso  Added loadCombinationsFile method. Moved
-#                                  CombinationsFileUtil to common.
-# Oct 31, 2016    5979   njensen   Cast to primitives for compatibility
-# Feb 06, 2017    5959   randerso  Removed Java .toString() calls 
-#
-########################################################################
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-import types, string, time, sys
-from math import *
-from numpy import *
-import os
-import numpy
-import math
-import re
-import jep
-import BaseTool, Exceptions
-import DatabaseID, TimeRange, AbsTime, ParmID
-import GridInfo
-import JUtil
-import NumpyJavaEnforcer
-
-from java.util import ArrayList
-from java.util import Date
-from java.nio import FloatBuffer
-
-from com.raytheon.uf.common.time import SimulatedTime
-from com.raytheon.uf.common.time import TimeRange as javaTimeRange
-from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DByte
-from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DFloat
-from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey
-from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKeyDef
-from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteDefinition
-from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherKey
-from com.raytheon.uf.common.dataplugin.gfe.db.objects import TimeConstraints
-from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo
-GridType = GridParmInfo.GridType
-from com.raytheon.uf.common.dataplugin.gfe.server.request import SendISCRequest
-from com.raytheon.uf.common.dataplugin.gfe.textproduct import CombinationsFileUtil
-from com.raytheon.viz.gfe.dialogs.formatterlauncher import ConfigData
-ProductStateEnum = ConfigData.ProductStateEnum
-from com.raytheon.viz.gfe.textformatter import FormatterUtil
-from com.raytheon.viz.gfe.textformatter import TextProductFinishWaiter
-from com.raytheon.viz.gfe.textformatter import TextProductTransmitter
-
-
-class SmartScript(BaseTool.BaseTool):
-
-    def __init__(self, dataMgr):
-        BaseTool.BaseTool.__init__(self)
-        self.__dataMgr = dataMgr
-        self.__parmMgr = self.__dataMgr.getParmManager()
-        self.__refSetMgr = self.__dataMgr.getRefManager()
-        self.__mutableID = DatabaseID.DatabaseID(self.__parmMgr.getMutableDatabase())
-        self.__cycler = self.__dataMgr.getGridCycler()
-        self.__parmOp = self.__dataMgr.getParmOp()
-        # A cache of grids accessed by the derived class
-        #self.__pythonGrids = []
-        self.__accessTime = 0
-        self.__gridLoc = self.__parmMgr.compositeGridLocation()
-        self.__gridShape = (int(self.__gridLoc.getNy()), int(self.__gridLoc.getNx()))
-        self.__topoGrid = None
-        self.__toolType = "numeric"
-        self._empty = self.empty()
-        self._minus = self.newGrid(-1)
-        self._handlers = dict()
-
-
-    def empty(self, dtype=float32):
-        """Return a grid filled with 0"""
-        return zeros(self.getGridShape(), dtype)
-    
-    def newGrid(self, initialValue, dtype=float32):
-        """Return a grid filled with initialValue"""
-        return full(self.getGridShape(), initialValue, dtype)
-
-    ##
-    ## Call ProcessVariableList to obtain values from the user
-    ##
-    ## @param VariableList: list() of tuples describing the widgets to display
-    ##
-    ## @return dict() of values gathered from the widgets
-    ##
-    def getVariableListInputs(self, VariableList):
-        import ProcessVariableList
-        return ProcessVariableList.buildWidgetList(VariableList)
-
-
-    def mutableID(self):
-        # Returns the mutable database ID
-        return self.__mutableID
-
-    def getGridLoc(self):
-        return self.__gridLoc
-
-    def setToolType(self, toolType):
-        # Tool type is "point-based", "numeric", "parm-based"
-        # It is set when SmartScript is instantiated.
-        # For Procedures, it is set to the default of "point-based"
-        # So a procedure can override this by using this method.
-        self.__toolType = toolType
-
-    def editAreaList(self, eaGroup=None):
-        """ 
-        Returns a list of strings containing all edit areas in eaGroup.
-        If eaGroup is None, all known edit areas are returned.
-        """
-        eaList = []
-        if eaGroup is not None:
-            eans = self.__refSetMgr.getGroupData(eaGroup)
-            size = eans.size()
-            for i in range(size):
-                eaList.append(str(eans.get(i)))
-        else:
-            eans = self.__refSetMgr.getAvailableSets()
-            size = eans.size()
-            for i in range(size):
-                eaList.append(eans.get(i).getName())
-        return eaList
-
-    def getSite4ID(self, id3):
-        # Returns 4-letter site id, based on 3-letter site id
-        if id3 in ['SJU']:
-            return "TJSJ"
-        elif id3 in ['AFG', 'AJK', 'HFO', 'GUM']:
-            return "P" + id3
-        elif id3 in ['AER', 'ALU']:
-            return "PAFC"
-        else:
-            return "K" + id3
-
-
-    def loadedParms(self):
-        # Returns a list of tuples that are weather elements that are
-        # loaded.  The tuples are (element, level, model).  element and
-        # level are strings.  model is a DatabaseID.
-        allParms = self.__parmMgr.getAllParms()
-        retList = []
-        for p in allParms:
-            pid = p.getParmID()
-            dbid = DatabaseID.DatabaseID(pid.getDbId())
-            retList.append((pid.getParmName(), pid.getParmLevel(), dbid))
-        return retList
-
-    def availableParms(self, dbs=None):
-        # Returns a list of tuples that are weather elements that are
-        # available in the specified dbs.
-        # dbs may contain a list of DatabaseIDs or a single DatabaseID
-        # If dbs is None parms from all available databases are returned.   
-        # The tuples are (element, level, model).  
-        # element and level are strings, model is a DatabaseID.
-        retList = []
-
-        if dbs is None:
-            dbs = self.__parmMgr.getAvailableDbs()
-        elif type(dbs) is not list: # assume single db
-            db = dbs
-            
-            if isinstance(db, DatabaseID.DatabaseID):
-                db = db.toJavaObj()
-            else:
-                # assume java DatabaseID
-                pass
-            
-            dbs = ArrayList()
-            dbs.add(db)
-            
-        for i in range(dbs.size()):
-            d = dbs.get(i);
-            parms = self.__parmMgr.getAvailableParms(d)
-            for pid in parms:
-                dbid = DatabaseID.DatabaseID(pid.getDbId())
-                retList.append((pid.getParmName(), pid.getParmLevel(), dbid))
-        return retList
-
-    def selectedParms(self):
-        # Returns a list of tuples that are weather elements that are
-        # currently selected.  The tuples are (element, level, model).
-        # Element and level are string. model is a DatabaseID.
-        retList = []
-        parms = self.__parmMgr.getSelectedParms()
-        for p in parms:
-            parmid = p.getParmID()
-            javaDbId = parmid.getDbId()
-            dbid = None
-            if javaDbId is not None:
-                dbid = DatabaseID.DatabaseID(javaDbId)
-            retList.append((parmid.getParmName(), parmid.getParmLevel(),
-              dbid))
-
-        return retList
-
-    def loadParm(self, model, element, level, mostRecent=0):
-        # loads a parm and makes it visible.
-        parm = self.getParm(model, element, level, timeRange=None,
-          mostRecent=mostRecent)
-        if parm is not None:
-            self.__parmMgr.setParmDisplayable(parm, 1)
-        else:
-            raise TypeError("SmartScript loadParm: " + \
-              "couldn't load " + `model` + ' ' + `element` + ' ' + `level` + \
-              ' ' + str(mostRecent) + " (None is returned from getParm())" )
-    ##
-    # Get the list of timeranges locked by me in this weather element.
-    #
-    # @param weName: Weather element to look for locks on
-    # @type weName: String
-    # @param level: The level of the element to look for locks on
-    # @type level: String
-    # @return: The time ranges
-    # @rtype: Python list of Python TimeRanges
-    def lockedByMe(self, weName, level):
-        # returns list of time ranges locked by me in this weather element
-        # Uses the mutable database
-        parm = self.getParm(self.mutableID(), weName, level)
-        if parm is None:
-            return []
-        lt = parm.getLockTable()
-        jlbm = lt.lockedByMe()
-        # jlbm is a Java list of Java TimeRanges. Convert it to Python.
-        jlbmIter = jlbm.iterator()
-        lbm = []
-        while (jlbmIter.hasNext()):
-            jtr = jlbmIter.next()
-            tr = TimeRange.TimeRange(jtr)
-            lbm.append(tr)
-        return lbm
-
-    ##
-    # Get the list of timeranges locked by other users in this weather element.
-    #
-    # @param weName: Weather element to look for locks on
-    # @type weName: String
-    # @param level: The level of the element to look for locks on
-    # @type level: String
-    # @return: The time ranges
-    # @rtype: Python list of Python TimeRanges
-    def lockedByOther(self, weName, level):
-        # returns list of time ranges locked by others in this weather element
-        # Uses the mutable database
-        parm = self.getParm(self.mutableID(), weName, level)
-        if parm is None:
-            return []
-        lt = parm.getLockTable()
-        jlbo = lt.lockedByOther()
-        # jlbo is a Java list of Java TimeRanges. Convert it to Python.
-        jlboIter = jlbo.iterator()
-        lbo = []
-        while (jlboIter.hasNext()):
-            jtr = jlboIter.next()
-            tr = TimeRange.TimeRange(jtr)
-            lbo.append(tr)
-        return lbo
-
-    def forceLock(self, weName, level, startT, endT):
-        # forces locks in the given time range (startT to endT).
-        # startT, endT can either be ints/floats, or should be AbsTimes
-        # Returns 0 if not successful, 1 for okay.
-        if (type(startT) is types.IntType or type(startT) is types.FloatType) \
-          and (type(endT) is types.IntType or type(endT) is types.FloatType):
-            t1 = AbsTime.AbsTime(int(startT))
-            t2 = AbsTime.AbsTime(int(endT))
-            tr = TimeRange.TimeRange(t1, t2)
-        else:
-            tr = TimeRange.TimeRange(startT, endT)   #AbsTime
-        parm = self.getParm(self.mutableID(), weName, level)
-        if parm is None:
-            return 0
-        else:
-            return parm.forceLockTR(tr.toJavaObj())
-
-
-    def vtecActiveTable(self):
-        #returns the VTEC active table (or specified table)
-        import ActiveTableVtec
-        entries = self.__dataMgr.getActiveTable()
-        try:
-            return ActiveTableVtec.transformActiveTableToPython(entries)
-        except:
-            raise TypeError("SmartScript vtecActiveTable: could not convert to python objects.")
-
-
-    def gfeOperatingMode(self):
-        #returns the current operating mode of the GFE.
-        #Standard, PRACTICE, TEST
-        return self.__dataMgr.getOpMode().name()
-
-#------------------------------------------------------------------------
-# ISC control functions
-#------------------------------------------------------------------------
-
-    def enableISCsend(self, state):
-        #sets the overall isc send state.  If the send state is false, then
-        #no ISC grids can be transmitted.  To change the behavior
-        #when these programs (e.g., procedures) are run from the command line,
-        #you can enable/disable the send capability upon saving.  This
-        #command does not send grids, but sets the system state.  When
-        #saving grids and SendISCOnSave is set, or the manual Send ISC Dialog
-        #is used, then the grids will be sent.
-        self.__dataMgr.enableISCsend(state)
-
-    def clientISCSendStatus(self):
-        #returns the current state for sending isc from this program.  This
-        #depicts the state of whether this client has been enabled to send
-        #ISC via the SendISCOnSave or manual Send ISC Dialog.  The ifpServer
-        #still needs to be properly configured for sending to occur.
-        return self.__dataMgr.clientISCSendStatus()
-
-    def manualSendISC_autoMode(self):
-        #Simulates the use of the SendISCDialog.  Note if the ifpServer's
-        #SendISCOnSave is enabled, then this routine will fail as grids are
-        #sent when saved and the manual operation is not allowed.  The
-        #overall isc send state must also be True for this command to work.
-        req = ArrayList()
-        req.add(SendISCRequest())
-        self.__parmOp.sendISC(req)
-
-    def manualSendISC_manualMode(self, requests):
-        #simulates the use of the SendISCDialog.  Note if the ifpServers's
-        #SendISCOnSave is enabled, then this routine will fail as grids are
-        #sent when saved and the manual operation is not allowed.
-        #The requests are tuples of (parmName, parmLevel, timeRange). The
-        #TimeRange is an TimeRange() instance.  The overall isc
-        #send state must also be True for this command to work.
-        req = ArrayList()
-        for parmName, parmLevel, tr in requests:
-            pid = ParmID.ParmID(name=parmName, dbid=self.mutableID(), level=parmLevel).toJavaObj()
-            req.add(SendISCRequest(pid, tr.toJavaObj()))
-        self.__parmOp.sendISC(req)
-
-
-#########################################################################
-## Smart Tool methods                                                  ##
-#########################################################################
-
-        # Arguments
-        #   The following arguments are used throughout the
-        #   SmartScript Library methods
-        #
-        # self: When you call a method, use the "self" prefix (see
-        #       examples below)
-        # model: There are various ways to specify the database model
-        #       from which you want the values:
-        #    -- Simply "Fcst" or "Official" OR
-        #    -- siteID_type_model_modeltime
-        #       where the "type" is an empty string for Standard GFE data
-        #       and is "D2D" for D2D data.
-        #       Examples:
-        #         BOU__NAM12_Mar2912  :gets March 29 12Z NAM12 run created by GFE.
-        #         BOU_D2D_NAM12_Mar2912  :gets March 29 12Z original NAM12 run from D2D.
-        #       If you omit the "modeltime", the most recent model run will
-        #       be selected. For example:
-        #         BOU__NAM12 : gets the most recent NAM12 run created by GFE.
-        #         BOU_D2D_NAM12 : gets the most recent original NAM12 run from D2D.
-        #    -- the result of soliciting a model from the user using the
-        #       "model" or "D2D_model" type of VariableList entry. (See
-        #       examples above.)
-        #    -- you may also use a DatabaseID (see getDatabase, below)
-        #    -- simple string with no special characters (this will be
-        #       assumed to be a model created "on-the-fly"
-        # element: The element name in quotes:
-        #       e.g.  "QPF", "rh", "tp"
-        # level: The level in quotes:
-        #       e.g. "SFC", "MB350", "BL030"
-        # x, y: integer coordinates
-        # timeRange: Must be a special time range object such as
-        #   that passed in the argument list as GridTimeRange or a list of time
-        #   range objects. If it is a list than the return value will be a dict
-        #   where the time range objects are keys and the result of getGrids
-        #   for each time range is the value.
-        # mode: specifies how to handle the situation if multiple grids
-        #   are found within the given time range:
-        #   "TimeWtAverage": return time-weighted Average value
-        #   "Average" : return Average values
-        #   "Max" : return Max values
-        #   "Min" : return Min values
-        #   "Sum" : return Summed values
-        #   "First" : return values from grid with earliest time range
-        #   "List" : return list of grids (or values for getValue)
-        # noDataError: If 1, and there is no data, the Smart Tool will abort.
-        #   Otherwise, return None. None is a special variable in Python
-        #   which can be tested as follows:
-        #     PoP = self.getGrid("Fcst", "PoP", "SFC", GridTimeRange,
-        #           noDataError=0)
-        #     if PoP is None:
-        #         print "No data found for PoP"
-        # mostRecentModel: Applies only to model data. Will get the
-        #   most recent model and ignore any times (if included) in the
-        #   model argument.  (Note that if a time is not included in the
-        #   model argument, you will automatically get the most recent
-        #   model no matter how this argument is set.)
-
-    ###########################
-    ## Grid Access methods
-
-    def getGrids(self, model, element, level, timeRange,
-                 mode="TimeWtAverage",
-                 noDataError=1, mostRecentModel=0,
-                 cache=1):
-        # Get the value(s) for the given model, element, and level
-        #   at the x, y coordinate and over the given timeRange.
-        #
-        # The resulting grid values can be accessed as follows:
-        #   PoPGrid = self.getGrids("Fcst","PoP","SFC", GridTimeRange)
-        #   popValue = PoPGrid[x][y]
-        #
-        #  where x and y are integer grid coordinates.
-        #
-        # The argument descriptions are given above
-
-        if isinstance(model, DatabaseID.DatabaseID):
-            model = model.modelIdentifier()
-
-        timeRangeList = None
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        elif isinstance(timeRange, list):
-            timeRangeList = timeRange
-            timeRangeArray = jep.jarray(len(timeRangeList), javaTimeRange)
-            for i in xrange(len(timeRangeList)):
-                tr = timeRangeList[i]
-                if isinstance(tr, TimeRange.TimeRange):
-                    tr = tr.toJavaObj()
-                timeRangeArray[i] = tr
-            timeRange = timeRangeArray
-#        if cache:
-#            for cModel, cElement, cLevel, cMostRecent, cRange, cMode, cResult in \
-#                    self.__pythonGrids:
-#                if cModel == model and cElement == element and \
-#                       cLevel == level and cRange == timeRange \
-#                       and cMode == mode and cMostRecent == mostRecentModel:
-#                    return cResult
-
-        # Get the parm from parmMgr, find the corresponding result
-        exprName = self.getExprName(model, element, level, mostRecentModel)
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        if parm is None:
-            if noDataError == 1:
-                raise Exceptions.EditActionError(
-                    "NoData", "No Weather Element for " + exprName)
-            else:
-                return None
-        result = self.__cycler.getCorrespondingResult(parm, timeRange, mode)
-        if timeRangeList is not None:
-            retVal = {}
-            for i in xrange(len(timeRangeList)):
-                iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, exprName, result[i])
-                retVal[timeRangeList[i]] = iresult
-            return retVal
-        else:
-            return self._getGridsResult(timeRange, noDataError, mode, exprName, result)
-
-    def _getGridsResult(self, timeRange, noDataError, mode, exprName, result):
-        retVal = None
-        if result is not None:
-            if len(result) == 0:
-                retVal = None
-            elif "List" == mode:
-                xlated = []
-                for rgrid in result:
-                    jxlgrid = rgrid.getGridSlice()                    
-                    xlgrid = jxlgrid.getNDArray()
-                    if type(xlgrid) is ndarray and xlgrid.dtype == numpy.int8:                    
-                        # discrete or weather
-                        keys = JUtil.javaObjToPyVal(jxlgrid.getKeyList())
-                        xlgrid = (xlgrid, keys)
-                    elif type(xlgrid) is not numpy.ndarray and len(xlgrid) == 2:
-                        # vector
-                        xlgrid = tuple(xlgrid)                    
-                    xlated.append(xlgrid)
-                retVal = xlated
-            else:
-                result = result[0];
-                slice = result.getGridSlice()
-                retVal = slice.getNDArray()
-                if type(retVal) is ndarray and retVal.dtype == numpy.int8:
-                    # discrete or weather
-                    keys = JUtil.javaObjToPyVal(slice.getKeyList())
-                    retVal = (retVal, keys)
-                elif type(retVal) is not numpy.ndarray and len(retVal) == 2:
-                    # vector
-                    retVal = tuple(retVal)
-
-        if retVal is None or retVal == []:
-            if noDataError == 1:
-                msg = "No corresponding grids for " + exprName + " " + str(timeRange)
-                raise UserWarning(msg)
-#        else:
-#            self.__pythonGrids.append((model, element, level, mostRecentModel,
-#                                       timeRange, mode, retVal))
-        return retVal
-
-
-    # Returns history info for the specified model, element, level and
-    # timerange.  ISC grids force this to be a list of lists [[]].
-    def getGridHistory(self, model, element, level, timeRange):
-        if isinstance(model, DatabaseID.DatabaseID):
-            model = model.modelIdentifier()
-        exprName = self.getExprName(model, element, level)
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        if parm is None:
-            raise Exceptions.EditActionError(
-                    "NoData", "getGridInfo: No Weather Element " + exprName)
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        grids = parm.getGridInventory(timeRange)
-        if len(grids) == 0:
-            return []
-        historyList = []
-        for grid in grids:
-            history = grid.getHistory()
-            histList = []
-            for h in history:
-                histList.append((str(h.getOrigin()),
-                                 ParmID.ParmID(jParmId=h.getOriginParm()),
-                                 TimeRange.TimeRange(h.getOriginTimeRange()),
-                                 AbsTime.AbsTime(h.getTimeModified()),
-                                 str(h.getWhoModified()),
-                                 AbsTime.AbsTime(h.getUpdateTime()),
-                                 AbsTime.AbsTime(h.getPublishTime())))
-
-            historyList.append(histList)
-
-        return historyList
-
-    def taperGrid(self, editArea, taperFactor=5):
-        # Returns a 2-D Grid of values between 0-1 about the
-        # given edit area.
-        # These values can be applied by smart tools to taper results.
-        # Argument:
-        #   editArea : must be of type AFPS.ReferenceData or None
-        #              (use editArea tool argument)
-        #   taperFactor: If set to zero, will do Full Taper
-        # Example:
-        #  def preProcessTool(self, editArea):
-        #     self._tGrid = self.taperGrid(editArea, 5)
-        #  def execute(self, variableElement):
-        #     return = variableElement + self._tGrid * 10.0
-        #
-        taperGrid = self.__refSetMgr.taperGrid(editArea, taperFactor)
-        taperGrid = taperGrid.getNDArray()        
-        return taperGrid
-
-    def directionTaperGrid(self, editArea, direction):
-        # Returns a 2-D Grid of values between 0-1 within the
-        # given edit area.
-        # E.g. if the Dir is W and x,y is half-way along the
-        #  W to E vector within the given edit area, the value of
-        #  directionTaperGrid at x,y will be .5
-        # These values can be applied by smart tools to show
-        #  spatial progress across an edit area.
-        # Argument:
-        #   editArea : must be of type AFPS.ReferenceData or None
-        #              (use editArea tool argument)
-        #   direction : 16 point text direction e.g. "NNW", "NW", etc.
-        # Example:
-        #  def preProcessTool(self, editArea):
-        #      self._spaceProgress = self.directionTaperGrid(editArea, "NW")
-        #  def execute(self, variableElement):
-        #      return variableElement * self._spaceProgress
-        #
-        taperGrid = self.__refSetMgr.directionTaperGrid(editArea, direction)
-        taperGrid = taperGrid.getNDArray()        
-        return taperGrid
-
-
-    def getComposite(self, WEname, GridTimeRange, exactMatch=1, onlyISC=0):
-        # Returns a composite grid consisting of the primary grid and any
-        # corresponding ISC grid, blended together based on the mask information
-        # derived from the Grid Data History. Primary grid must exist. Returns
-        # the set of points that are valid in the output grid. (Note the output
-        # grid consists of the primary grid and isc grid. Any "invalid" points,
-        # indicate those areas that have no isc data and are outside the home
-        # site's region.  The returned grid will have the primary data in
-        # the site's region.)
-        #
-        # A Python tuple is returned.
-        # For Scalar elements, the tuple contains:
-        #   a numeric grid of 1's and 0's where 1 indicates a valid point
-        #   a numeric grid of scalar values
-        # For Vector elements, the tuple contains:
-        #   a numeric grid of 1's and 0's where 1 indicates a valid point
-        #   a numeric grid of scalar values representing magnitude
-        #   a numeric grid of scalar values representing direction
-        # For Weather elements, the tuple contains:
-        #   a numeric grid of 1's and 0's where 1 indicates a valid point
-        #   a numeric grid of byte values representing the weather value
-        #   list of keys corresponding to the weather values
-        #
-        # For example:
-        #    isc = self.getComposite(WEname, GridTimeRange)
-        #    if isc is None:
-        #      self.noData()
-        #    # See if we are working with a Scalar or Vector element
-        #    wxType = variableElement_GridInfo.type()
-        #    if wxType == 0: # SCALAR
-        #         bits, values = isc
-        #    elif wxType == 1: # VECTOR
-        #         bits, mag, dir = isc
-
-
-        if onlyISC == 0:
-            exprName = self.getExprName("Fcst", WEname, "SFC")
-        else:
-            exprName = self.getExprName("ISC", WEname, "SFC")
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        if parm is None:
-            return None
-        seTime = AbsTime.AbsTime(self.__dataMgr.getSpatialDisplayManager().getSpatialEditorTime())
-        if GridTimeRange.contains(seTime):
-            gridTime = seTime
-        else:
-            gridTime = GridTimeRange.startTime()
-        from com.raytheon.viz.gfe.edittool import GridID
-        gid = GridID(parm, gridTime.javaDate())
-
-        wxType = self.__dataMgr.getClient().getPythonClient().getGridParmInfo(parm.getParmID()).getGridType()
-        if GridType.SCALAR.equals(wxType):
-            from com.raytheon.uf.common.dataplugin.gfe.slice import ScalarGridSlice
-            slice = ScalarGridSlice()
-            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)
-            args = (bits.getNDArray().astype(bool), slice.getScalarGrid().getNDArray())
-        elif GridType.VECTOR.equals(wxType):
-            from com.raytheon.uf.common.dataplugin.gfe.slice import VectorGridSlice
-            slice = VectorGridSlice()
-            bits = self.__dataMgr.getIscDataAccess().getVectorCompositeGrid(gid, exactMatch, slice)
-            args = (bits.getNDArray().astype(bool), slice.getMagGrid().getNDArray(), slice.getDirGrid().getNDArray())
-        elif GridType.WEATHER.equals(wxType):
-            from com.raytheon.uf.common.dataplugin.gfe.slice import WeatherGridSlice
-            slice = WeatherGridSlice()
-            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)
-            keys = []
-            for k in slice.getKeys():
-                keys.append(str(k))
-            args = (bits.getNDArray().astype(bool), slice.getWeatherGrid().getNDArray(), keys)
-        elif GridType.DISCRETE.equals(wxType):
-            from com.raytheon.uf.common.dataplugin.gfe.slice import DiscreteGridSlice
-            slice = DiscreteGridSlice()
-            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)
-            keys = []
-            for k in slice.getKeys():
-                keys.append(str(k))
-            args = (bits.getNDArray().astype(bool), slice.getDiscreteGrid().getNDArray(), keys)
-        return args
-
-    ##
-    # Return the GridInfo object for the given weather element and timeRange
-    # Example:
-    #    timeRange = self.getTimeRange("Today")
-    #    infoList = self.getGridInfo("Fcst", "T", "SFC", timeRange)
-    #    for info in infoList:
-    #        print "grid", info.gridTime()
-    #
-    # @param model: The model for which grid info is requested.
-    # @type model: DatabaseId or String
-    # @param element: The element for which grid info is requested.
-    # @type element: String
-    # @param level: The level for which grid info is requested.
-    # @type level: String
-    # @param timeRange: A time range over which grid info is requested.
-    # @type timeRange: com.raytheon.uf.common.time.TimeRange or TimeRange
-    # @param mostRecentModel: whether to use current time in request expr.
-    # @type mostRecentModel: integer or boolean
-    # @return: Java GridParmInfo object
-    def getGridInfo(self, model, element, level, timeRange,
-                    mostRecentModel=0):
-        if isinstance(model, DatabaseID.DatabaseID):
-            model = model.modelIdentifier()
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        parm = self.getParm(model, element, level, mostRecentModel)
-        if parm is None:
-            exprName = self.getExprName(model, element, level, mostRecentModel)
-            raise Exceptions.EditActionError(
-                    "NoData", "getGridInfo: No Weather Element " + exprName)
-        grids = parm.getGridInventory(timeRange)
-        if len(grids) == 0:
-            return []
-        gridParmInfo = parm.getGridInfo()
-        gridInfos = []
-        for grid in grids:
-            timeRange = grid.getGridTime()
-            gridInfo = GridInfo.GridInfo(gridParmInfo=gridParmInfo,
-                                         gridTime=timeRange)
-            gridInfos.append(gridInfo)
-        return gridInfos
-
-    ###########################
-    ## Sounding methods
-
-    # Numeric only
-    def makeNumericSounding(self, model, element, levels, timeRange,
-                     noDataError=1, mostRecentModel=0):
-        # Make a numeric sounding for the given model, element, and levels
-
-        # Example:
-        #  levels = ["MB850","MB800","MB750","MB700","MB650","MB600"]
-        #  gh_Cube, rh_Cube = self.makeNumericSounding(
-        #                         model, "rh", levels, GridTimeRange)
-        #
-        # Arguments:
-        #
-        # The "levels" argument is a Python list of levels INCREASING
-        #  in height.
-        # This method returns two numeric cubes:
-        #   ghCube of geopotential heights for the given levels
-        #   valueCube of values for the given levels
-
-        ghCube = []
-        valueCube = []
-        magCube = []
-        dirCube = []
-        for level in levels:
-
-            ghGrids = self.getGrids(model, "gh", level, timeRange,
-                                    noDataError=noDataError,
-                                    mostRecentModel=mostRecentModel)
-            if ghGrids is None:
-                return None
-
-            valueGrids = self.getGrids(model, element, level, timeRange,
-                                       noDataError=noDataError,
-                                       mostRecentModel=mostRecentModel)
-            if valueGrids is None:
-                return None
-
-            if type(ghGrids) == types.ListType:
-                ghGrid = ghGrids[0]
-            else:
-                ghGrid = ghGrids
-
-            if type(valueGrids) == types.ListType:
-                valueGrid = valueGrids[0]
-            else:
-                valueGrid = valueGrids
-
-            #jdynina ghCube = ghCube + [ghGrid]
-            ghCube.append(ghGrid)
-
-            if type(valueGrid) == types.TupleType:
-                magCube = magCube + [valueGrid[0]]
-                dirCube = dirCube + [valueGrid[1]]
-            else:
-                valueCube = valueCube + [valueGrid]
-
-        ghCube = array(ghCube)
-        if len(magCube) > 0:
-            magCube = array(magCube)
-            dirCube = array(dirCube)
-            valueCube = (magCube, dirCube)
-        else:
-            valueCube = array(valueCube)
-        return (ghCube, valueCube)
-
-    # numeric only
-    def getNumericMeanValue(self, model, element, levels, timeRange,
-                     noDataError=1):
-        # Return a numeric array of mean values for the given element
-        #  between and including the given levels
-        if len(levels) < 1:
-            return self.errorReturn(
-                noDataError,
-                "SmartScript.getNumericMeanValue:: No Levels for Mean Value.")
-        elementType = "Scalar"
-        empty = self.getTopo() * 0.0
-        totalValue = empty
-        uSum = empty
-        vSum = empty
-        for level in levels:
-            value = self.getGrids(model, element, level, timeRange,
-                             noDataError=noDataError)
-            if type(value) == types.TupleType:
-                elementType = "Vector"
-                uw, vw = self.MagDirToUV(value[0], value[1])
-                uSum = uSum + uw
-                vSum = vSum + vw
-            else:
-                totalValue = totalValue + value
-        # Compute the average
-        totCount = float(len(levels))
-        if elementType == "Scalar":
-            return totalValue / totCount
-        else:
-            u = uSum / totCount
-            v = vSum / totCount
-            mag, dir = self.UVToMagDir(u, v)
-            mag = int(mag + 0.5)
-            dir = int(dir + 0.5)
-            return (mag, dir)
-
-
-    ###########################
-    ## Conversion methods
-
-    def UVToMagDir(self, u, v):
-        RAD_TO_DEG = 180.0 / numpy.pi
-        # Sign change to make math to meteor. coords work
-        u = -u
-        v = -v
-        if type(u) is numpy.ndarray or type(v) is numpy.ndarray:
-            speed = numpy.sqrt(u * u + v * v)
-            dir = numpy.arctan2(u, v) * RAD_TO_DEG
-            dir[numpy.greater_equal(dir, 360)] -= 360
-            dir[numpy.less(dir, 0)] += 360
-        else:
-            speed = math.sqrt(u * u + v * v)
-            dir = math.atan2(u, v) * RAD_TO_DEG
-            while dir < 0.0:
-                dir = dir + 360.0
-            while dir >= 360.0:
-                dir = dir - 360.0
-        return (speed, dir)
-
-    def MagDirToUV(self, mag, dir):
-        DEG_TO_RAD = numpy.pi / 180.0
-        # Note sign change for components so math to meteor. coords works
-        uw = - sin(dir * DEG_TO_RAD) * mag
-        vw = - cos(dir * DEG_TO_RAD) * mag
-        return (uw, vw)
-
-    def convertMsecToKts(self, value_Msec):
-        # Convert from meters/sec to Kts
-        return value_Msec * 3600.0 / 1852.0
-
-    def convertKtoF(self, t_K):
-        # Convert the temperature from Kelvin to Fahrenheit
-        # Degrees Fahrenheit = (Degrees Kelvin - 273.15) / (5/9) + 32
-        t_F = (t_K - 273.15) * 9.0 / 5.0 + 32.0
-        return t_F
-
-    def KtoF(self, t_K):
-        return self.convertKtoF(t_K)
-
-    def convertFtoK(self, t_F):
-        # Convert the temperature from Kelvin to Fahrenheit
-        # Degrees Kelvin = (Degrees Fahrenheit - 32) * (5 / 9) + 273.15
-        t_K = (t_F - 32.0) * (5.0 / 9.0) + 273.15;
-        return t_K
-
-    def FtoK(self, t_F):
-        return self.convertFtoK(t_F)
-
-    def convertFtToM(self, value_Ft):
-        # Convert the value in Feet to Meters
-        return value_Ft * 0.3048
-
-#########################################################################
-## Error Handling                                                      ##
-#########################################################################
-
-    def abort(self, info):
-        # This call will send the info to the GFE status bar,
-        #  put up a dialog with the given info, and abort the
-        #  smart tool or procedure.
-        # Example:
-        #  self.abort("Error processing my tool")
-        #
-        raise TypeError, info
-
-    def noData(self, info="Insufficient Data to run Tool"):
-        # Raise the NoData exception error
-        raise Exceptions.EditActionError("NoData", info)
-
-    def cancel(self):
-        # Cancels a smart tool without displaying an error message
-        raise Exceptions.EditActionError("Cancel", "Cancel")
-
-    def errorReturn(self, noDataError, message):
-        if noDataError == 1:
-            self.abort(message)
-        else:
-            return None
-
-    ##
-    # Sends the text message to the GFE status bar with the
-    #  given status code: "R" (regular), "S" (significant), "U" (urgent),
-    #  or "A" (alert)
-    # Example:
-    #  self.statusBarMsg("Running Smart Tool", "R")
-    #
-    # @param message: The message to send.
-    # @type message: string
-    # @param status: Importance of message. "A"=Alert, "R"=Regular, "U"=Urgent;
-    #                anything else=Significant
-    # @type status: string
-    # @param category: The message category. Defaults to "GFE".
-    # @type category: string
-    # @return: None
-    def statusBarMsg(self, message, status, category="GFE"):
-        from com.raytheon.uf.common.status import UFStatus
-        Priority = UFStatus.Priority
-
-        if "A" == status:
-            importance = Priority.PROBLEM
-        elif "R" == status:
-            importance = Priority.EVENTA
-        elif "U" == status:
-            importance = Priority.CRITICAL
-        else:
-            importance = Priority.SIGNIFICANT
-
-        if category not in self._handlers:
-            self._handlers[category] = UFStatus.getHandler("GFE", category, 'GFE')
-
-        self._handlers[category].handle(importance, message);
-
-   #########################
-    ##  Smart Commands
-    ##
-    ## These commands take some similar arguments:
-    ##   editArea : must be of type AFPS.ReferenceData or None
-    ##              (See getEditArea)
-    ##              If you specify None, the system will supply
-    ##              the active edit area from the GFE or from
-    ##              the editArea argument for runProcedure.
-    ##   timeRange: must be of type AFPS.TimeRange or None
-    ##              (See getTimeRange and createTimeRange)
-    ##              If you specify None, the system will supply
-    ##              the selected Time Range from the GFE or from
-    ##              the timeRange argument for runProcedure.
-    ##   varDict  : If you supply a varDict in this call, the
-    ##              variable list dialog will not be displayed
-    ##              when the tool is run.
-    ##              If you supply a varDict from a Procedure,
-    ##              make sure that the variables
-    ##              for all the tools called by the Procedure are
-    ##              supplied in your varDict.
-    ##   missingDataMode: Can be "Stop", "Skip", or "Create". If not
-    ##              included, will be set to the current GFE default.
-    ##   modal:     If 0, VariableList dialogs will appear with the
-    ##              non-modal "Run" and "Run/Dismiss" buttons.
-    ##              Otherwise, they will appear with the "Ok" button.
-    ##
-    ##  If editValues is true, the grid values are changed.
-    ##  FOR POINT-BASED TOOLS ONLY:
-    ##     If calcArea is true, a reference area is created and saved which
-    ##       shows discrepancies greater than the DiscrepancyValue between the current
-    ##       value and new value.
-    ##     If calcGrid is true, a scalar grid is created which shows the discrepancy
-    ##       amount between the current value and new value. (Not implemented.)
-    ##
-    ## These commands all return an error which will be None if no
-    ##   errors occurred.  Otherwise, the errorType and errorInfo
-    ##   can be accessed e.g. error.errorType() and error.errorInfo()
-    ## If "noData" has been called, the errorType will be "NoData" and
-    ##   can be tested by the calling tool or script.
-
-
-    def callSmartTool(self, toolName, elementName, editArea=None,
-                      timeRange=None, varDict=None,
-                      editValues=1, calcArea=0, calcGrid=0,
-                      passErrors=[],
-                      missingDataMode="",
-                      modal=1):
-        # passErrors:  a list of errors to ignore and pass back to the
-        #  calling program.  Some errors that can be ignored are:
-        #    NoData
-        #    NoElementToEdit
-        #    ExecuteOrClassError
-        #    LockedGridError
-        #
-        # For example:
-        #  In the Procedure:
-        #     error = self.callSmartTool(
-        #        "MyTool", "MixHgt", editArea, timeRange, varDict,
-        #        passErrors= ["NoData"])
-        #     if error is not None:
-        #        print "No Data available to run tool"
-        #
-        #  In the Smart Tool:
-        #     mixHgt = self.getGrids(model, "MixHgt", "SFC", timeRange)
-        #     if mixHgt is None:
-        #        self.noData()
-
-        if editArea is None or not editArea.getGrid().isAnyBitsSet():
-            editArea = self.__refSetMgr.fullRefSet()
-            emptyEditAreaFlag = True
-        else:
-            emptyEditAreaFlag = False
-            
-        javaDict = None
-        if varDict is not None:
-            javaDict = JUtil.pyValToJavaObj(varDict)
-
-        parm = self.getParm(self.__mutableID, elementName, "SFC")
-        if timeRange is None:
-            from com.raytheon.viz.gfe.core.parm import ParmState
-            timeRange = parm.getParmState().getSelectedTimeRange()
-        else:
-            timeRange = timeRange.toJavaObj()
-
-        from com.raytheon.viz.gfe.smarttool import SmartUtil
-        result, returnedDict = SmartUtil.callFromSmartScript(self.__dataMgr, toolName, elementName, editArea,
-                                            timeRange, javaDict, emptyEditAreaFlag,
-                                            JUtil.pylistToJavaStringList(passErrors),
-                                            missingDataMode, parm)
-
-        if varDict is not None and returnedDict:
-            returnedDict = JUtil.javaObjToPyVal(returnedDict)
-            varDict.clear()
-            varDict.update(returnedDict)
-
-        if result:
-            raise Exceptions.EditActionError(errorType="Error", errorInfo=str(result))
-        return None
-
-    def callProcedure(self, name, editArea=None, timeRange=None, varDict=None,
-                      missingDataMode="Stop",
-                      modal=1):
-        if editArea is None:
-            from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData
-            editArea = ReferenceData()
-        if timeRange is None:
-            from com.raytheon.uf.common.time import TimeRange as JavaTimeRange
-            timeRange = JavaTimeRange()
-        else:
-            timeRange = timeRange.toJavaObj()
-
-        javaDict=None
-        if varDict is not None:
-            javaDict = JUtil.pyValToJavaObj(varDict)
-
-        from com.raytheon.viz.gfe.procedures import ProcedureUtil
-        result, returnedDict = ProcedureUtil.callFromSmartScript(self.__dataMgr, name, editArea, timeRange, javaDict)
-        
-        if varDict is not None and returnedDict:
-            returnedDict = JUtil.javaObjToPyVal(returnedDict)
-            varDict.clear()
-            varDict.update(returnedDict)
-
-        # callSmartTool raises the exception put here it is returned.
-        if result:
-           return Exceptions.EditActionError(errorType="Error", errorInfo=str(result))
-        return None
-
-
-    ###########################
-    ## Creating On-the-fly elements
-
-    def createGrid(self, model, element, elementType, numericGrid, timeRange,
-                   descriptiveName=None, timeConstraints=None,
-                   precision=None, minAllowedValue=None,
-                   maxAllowedValue=None, units=None, rateParm=0,
-                   discreteKeys=None, discreteOverlap=None,
-                   discreteAuxDataLength=None, defaultColorTable=None):
-
-
-        # Creates a grid for the given model and element.
-        # If the model and element do not already exist, creates them on-the-fly
-        #
-        # The descriptiveName, timeConstraints, precision, minAllowedValue,
-        # maxAllowedValue, units, rateParm, discreteKeys, discreteOverlap,
-        # and discreteAuxDataLength only need to be
-        # specified for the first grid being created.  These
-        # values are ignored for subsequent calls to createGrid() for
-        # the same weather element.
-
-        # For new parms, the defaultColorTable is the one to be used for
-        # display.  If not specified and not in the gfe configuration file,
-        # a DEFAULT color table will be used.
-
-        # DISCRETE elements require a definition for discreteKeys,
-        # discreteAuxDataLength,  and discreteOverlap. For DISCRETE, the
-        # precision, minAllowedValue, maxAllowedValue, and rateParm
-        # are ignored.
-
-        # Note that this works for numeric grids only.
-        # The arguments exampleModel, exampleElement, and exampleLevel can be
-        # supplied so that the new element will have the same characteristics
-        # (units, precision, etc.) as the example element.
-        #
-        # model -- If you are creating an "on-the-fly" element (i.e. not
-        #          in the server), this should be a simple string with
-        #          with no special characters.  The site ID and other
-        #          information will be added for you.
-        #          If you are creating a grid for a model that exists
-        #          in the server, follow the guidelines for the model
-        #          argument described for the "getValue" command.
-        # element -- This should be a simple string with no special
-        #          characters.
-        # elementType -- "SCALAR", "VECTOR", "WEATHER", or "DISCRETE"
-        # numericGrid -- a Numeric Python grid
-        # timeRange -- valid time range for the grid.  You may want
-        #          to use the "createTimeRange" command
-        #
-        # The descriptiveName, timeConstraints, precision, minAllowedValue,
-        # precision, minAllowedValue, maxAllowedValue, and units can be
-        # used to define the GridParmInfo needed. Note that timeConstraints
-        # is not the C++ version, but a (startSec, repeatSec, durSec).
-        #
-        # Example:
-        #    self.createGrid("ISCDisc", WEname+"Disc", "SCALAR", maxDisc,
-        #                   GridTimeRange, descriptiveName=WEname+"Disc")
-        #
-        if string.find(element, "_") >= 0:
-            message = "SmartScript:createGrid --" + \
-                              "Illegal element name contains underscore. " + \
-                              "No special characters allowed. "
-            self.abort(message)
-        parm = self.getParm(model, element, "SFC")
-        if parm is None:
-            # Create a parm on-the-fly
-            # Parm ID
-            siteID = self.__dataMgr.getSiteID()
-            if model == "Fcst":
-                dbi = self.__mutableID
-            elif isinstance(model, DatabaseID.DatabaseID):
-                dbi = model
-            else:
-                dbi = DatabaseID.databaseID(siteID + "_GRID__" + model + "_00000000_0000")
-            pid = ParmID.ParmID(element, dbid=dbi).toJavaObj()
-            # Grid Parm Info set up to use a default at first
-            if elementType == "VECTOR":
-                example = self.getParm("Fcst", "Wind", "SFC")
-            elif elementType == "WEATHER":
-                example = self.getParm("Fcst", "Wx", "SFC")
-            elif elementType == "SCALAR":
-                example = self.getParm("Fcst", "T", "SFC")
-            elif elementType == "DISCRETE":
-                example = self.getParm("Fcst", "Hazards", "SFC")
-            else:
-                message = "SmartScript:createGrid -- illegal element type"
-                self.abort(message)
-
-            exampleGPI = None
-            if example is not None:
-                exampleGPI = example.getGridInfo()
-
-            #look for overrides
-            if descriptiveName is None:
-                descriptiveName = element
-            
-            if timeConstraints is None:
-                if exampleGPI is None:
-                    tc = TimeConstraints(0, 60, 60)
-                else:
-                    tc = exampleGPI.getTimeConstraints()
-            elif isinstance(timeConstraints, types.TupleType):
-                # TC constructor (dur, repeat, start)
-                # TC tuple (start, repeat, dur)
-                tc = TimeConstraints(timeConstraints[2], timeConstraints[1],
-                                     timeConstraints[0])
-            else:
-                # Assume Java TimeConstraints or compatible
-                tc = TimeConstraints(
-                  timeConstraints.getDuration(), timeConstraints.getRepeatInterval(),
-                  timeConstraints.getStartTime())
-
-            if precision is None :
-                if exampleGPI is None:
-                    precision = 0
-                else:
-                    precision = exampleGPI.getPrecision()
-                    
-            if maxAllowedValue is None:
-                if exampleGPI is None:
-                    maxAllowedValue = nanmax(numericGrid)
-                else:
-                    maxAllowedValue = exampleGPI.getMaxValue()
-                    
-            if minAllowedValue is None:
-                if exampleGPI is None:
-                    minAllowedValue = nanmin(numericGrid)
-                else:
-                    minAllowedValue = exampleGPI.getMinValue()
-
-            if units is None:
-                if exampleGPI is None:
-                    units = "1" # unitless
-                else:
-                    units = exampleGPI.getUnitString()
-
-            if tc.anyConstraints() == 0:
-                timeIndependentParm = 1
-                timeRange = TimeRange.TimeRange.allTimes().toJavaObj()
-            else:
-                timeIndependentParm = 0
-
-            #create the new GridParmInfo
-            minAllowedValue = float(minAllowedValue)
-            maxAllowedValue = float(maxAllowedValue)
-            gpi = GridParmInfo(pid,
-                self.getGridLoc(), GridType.valueOf(elementType), units,
-                descriptiveName, minAllowedValue, maxAllowedValue,
-                precision, timeIndependentParm, tc, rateParm)
-
-            # if DISCRETE, deal with the key definitions
-            if elementType == "DISCRETE":
-                if discreteKeys is None or discreteOverlap is None or \
-                  discreteAuxDataLength is None:
-                    message = "SmartScript:createGrid --" + \
-                              "Discrete elements require discretekeys, " + \
-                              "discreteAuxDataLength, " + \
-                              "and discreteOverlap defined. "
-                    self.abort(message)
-                currDef = DiscreteKey.discreteDefinition(siteID)
-                keys = ArrayList()
-                for h in discreteKeys:
-                    if type(h) is types.TupleType:
-                        kname, kdesc = h
-                    elif type(h) is types.StringType:
-                        kname = h
-                        kdesc = h
-                    keys.add(DiscreteKeyDef(kname, kdesc))
-                currDef.addDefinition(pid.getCompositeName(), discreteOverlap,
-                                             discreteAuxDataLength, keys)
-                DiscreteKey.setDiscreteDefinition(siteID, currDef)
-
-            #set a default color table if specified
-            if defaultColorTable is not None:
-                from com.raytheon.viz.gfe import Activator
-                prefName = element + "_defaultColorTable"
-                Activator.getDefault().getPreferenceStore().setValue(prefName, defaultColorTable)
-
-            #create the parm
-            parm = self.__parmMgr.createVirtualParm(pid, gpi, None, 1, 1)
-
-        # Create Java objects from numericGrid.
-        # Do this here because, while numericGrid can be sent straight to Java,
-        # the keys of discrete grids arrive as a single string, which must then
-        # be parsed. It's easier to create Java objects of the proper types here.
-        javaGrid = None
-        auxJavaGrid = None
-        javaOldKeys = None
-        if elementType == "DISCRETE" or elementType == "WEATHER":
-            ngZero = NumpyJavaEnforcer.checkdTypes(numericGrid[0], int8)
-            dimx = ngZero.shape[1]
-            dimy = ngZero.shape[0]
-            # Use createGrid() to get around Jep problems with 3-arg ctor.
-            javaGrid = Grid2DByte.createGrid(dimx, dimy, ngZero)
-            oldKeys = numericGrid[1]
-            javaOldKeys = ArrayList()
-            for oldKey in oldKeys:
-                # it seems stupid that we break apart tuples for discrete keys
-                # when modifying the DiscreteDefinition, but not here when
-                # creating the actual grid. It actually prevents the grid from
-                # being created because the string representation of the tuple
-                # won't match what we added to the DiscreteDefinition.
-                # However, this is exactly what AWIPS1 does...
-                # SEE GridCycler.C, line 1131
-                # FIXME: add oldKey[0] to the ArrayList for tuple types
-                javaOldKeys.add(str(oldKey))
-        elif elementType == "SCALAR":
-            numericGrid = NumpyJavaEnforcer.checkdTypes(numericGrid, float32)
-            javaGrid = Grid2DFloat.createGrid(numericGrid.shape[1], numericGrid.shape[0], numericGrid)
-        elif elementType == "VECTOR":
-            ngZero = NumpyJavaEnforcer.checkdTypes(numericGrid[0], float32)
-            ngOne = NumpyJavaEnforcer.checkdTypes(numericGrid[1], float32)
-            javaGrid = Grid2DFloat.createGrid(ngZero.shape[1], ngZero.shape[0], ngZero)
-            auxJavaGrid = Grid2DFloat.createGrid(ngOne.shape[1], ngOne.shape[0], ngOne)
-        else:
-            raise ValueError, "Unknown elementType: %s" % elementType
-
-        # Make sure we pass a java TimeRange to Java methods
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        gridData = self.__cycler.makeGridDataFromNumeric(parm, timeRange, javaGrid, auxJavaGrid, javaOldKeys)
-        parm.replaceGriddedData(timeRange, gridData)
-
-    ##
-    #
-    # @param model: Model name
-    # @type model: string
-    # @param element: Element name
-    # @type element: string
-    # @param level: Level name
-    # @type level: string
-    # @param timeRange: Time range of grid
-    # @type timeRange: Python or Java TimeRange
-    # @return: True if grids were deleted
-    def deleteGrid(self, model, element, level, timeRange):
-        # Deletes any grids for the given model and element
-        # completely contained in the given timeRange.
-        # If the model and element do not exist or if there are no existing grids,
-        #   no action is taken.
-        #
-        parm = self.getParm(model, element, level)
-        if parm is None:
-            returnVal = False
-        else:
-            if isinstance(timeRange, TimeRange.TimeRange):
-                timeRange = timeRange.toJavaObj()
-            returnVal = parm.deleteTR(timeRange)
-        return returnVal
-
-    def highlightGrids(self, model, element, level, timeRange, color, on=1):
-        # Highlight the grids in the given time range using designated
-        # color.  If "on" is 0, turn off the highlight.
-        parm = self.getParm(model, element, level)
-        from com.raytheon.viz.gfe.core.msgs import HighlightMsg
-
-        trs = jep.jarray(1, javaTimeRange)
-        trs[0] = timeRange.toJavaObj()
-        HighlightMsg(parm, trs, on, color).send()
-
-    def makeHeadlineGrid(self, headlineTable, fcstGrid, headlineGrid=None):
-        # This method defines a headline grid based on the specified data.
-        # The headlineTable parameter must be a list of tuples each containing
-        # the threshold for each headline category and headline label
-        # Example:
-        #    headlineTable =[(15.0, 'SW.Y'),
-        #                    (21.0, 'SC.Y'),
-        #                    (34.0, 'GL.W'),
-        #                    (47.0, 'SR.W'),
-        #                    (67.0, 'HF.W'),
-        #                    ]
-        # "fsctGrid" is the grid that defines what headline category should
-        # be assigned. "headlineGrid" is the grid you wish to combine with
-        # the calculated grid.  This forces a combine even if the GFE is not
-        # in combine mode.  Omitting "headlineGrid" will cause the calculated
-        # grid to replace whatever is in the GFE, no matter what the GFE's
-        # combine mode. Note that a side effect of omitting the headline grid
-        # is that the GFE will end up in replace mode after the tool completes.
-        noneKey = ""  # define the  key
-        # set the mode to replace so the tool always behaves the same
-
-        if headlineGrid is None: # make new headline grid components
-            headValues = zeros(fcstGrid.shape, int8)
-            headKeys = [noneKey]
-            self.setCombineMode("Replace") # force a replace in GFE
-        else:
-            headValues, headKeys = headlineGrid
-
-        # make sure the headlineTable is not empty
-        if len(headlineTable) <= 0:
-            self.statusBarMsg("HeadlineTable is empty", "S")
-            return headlineGrid
-
-        # make a list of (mask, key) for the new headlines
-        newHeadlines = []
-        for value, headline in headlineTable:
-            mask = greater_equal(fcstGrid, value)
-            if sometrue(mask):
-                newHeadlines.append((mask, headline))
-        # make the same list for old headlines
-        oldHeadlines = []
-        for i in range(len(headKeys)):
-            mask = equal(headValues, i)
-            if sometrue(mask):
-                oldHeadlines.append((mask, headKeys[i]))
-
-        # make combinations at every intersection
-        for newMask, newKey in newHeadlines:
-            for oldMask, oldKey in oldHeadlines:
-                overlap = logical_and(newMask, oldMask) # intersection
-                if sometrue(overlap): #  combined key needed
-                    if oldKey == newKey:
-                        continue
-                    if oldKey == noneKey:
-                        combinedKey = newKey
-                    else:
-                        combinedKey = oldKey + "^" + newKey
-                    # make sure the key is on the list
-                    if combinedKey not in headKeys:
-                        headKeys.append(combinedKey)
-                    index = self.getIndex(combinedKey, headKeys)
-                    headValues[overlap] = index
-
-        # return the new headlines grid
-        return (headValues, headKeys)
-
-
-    ######################
-    ##  Utility Commands
-
-    def findDatabase(self, databaseName, version=0):
-        # Return an AFPS.DatabaseID object.
-        #  databaseName can have the appended type. E.g. "NAM12" or "D2D_NAM12"
-        #  version is 0 (most recent), -1 (previous), -2, etc.
-        # E.g.
-        #    databaseID = self.findDatabase("NAM12",0)
-        # returns most recent NAM12 model
-        result = self.__parmMgr.findDatabase(databaseName, version)
-        if result is not None:
-            result = DatabaseID.DatabaseID(result)
-        return result
-
-    def getDatabase(self, databaseString):
-        # Return an AFPS.DatabaseID object.
-        #  databaseString is the result of a VariableList entry of type
-        #   "model" or "D2D_model"
-        dbID = DatabaseID.databaseID(databaseString)
-        return dbID
-
-    def getTimeRange(self, timeRangeName):
-        # Returns an AFPS.TimeRange object given a time range name
-        # as defined in the GFE
-        # E.g.
-        #   timeRange = self.getTimeRange("Today")
-        tr = self.__dataMgr.getSelectTimeRangeManager().getRange(timeRangeName).toTimeRange();
-        return TimeRange.TimeRange(tr)
-
-    def createTimeRange(self, startHour, endHour, mode="LT", dbID=None):
-        # Returns an AFPS.TimeRange object given by:
-        #    startHour, endHour
-        #       (range is startHour up to and not including endHour)
-        #       startHour and endHour are relative to midnight of the
-        #          current day either in Local or Zulu time (see below)
-        #    mode can be:
-        #    "LT" : the startHour and endHour are relative to local time
-        #    "Zulu": relative to Zulu time,
-        #    "Database": relative to a database (e.g. model time.
-        #      In this case, the databaseID for the model must
-        #      be supplied (see findDatabase)
-        #
-        # E.g.
-        #    timeRange = self.createTimeRange(0,121,"Zulu")
-        #    databaseID = self.findDatabase("NAM12")
-        #    timeRange = self.createTimeRange(120,241,"Database",databaseID)
-
-        if mode == "Database" and dbID is None:
-            raise TypeError("SmartScript createTimeRange: " + \
-                      "Must specify a database ID for mode=Database")
-
-        if mode == "LT":
-            localTime = time.mktime(self.localtime())
-            gmTime = time.mktime(self.gmtime())
-            localAbsTime = AbsTime.AbsTime(localTime)
-            delta = localTime - gmTime
-
-            todayMidnight = AbsTime.absTimeYMD(localAbsTime.year, localAbsTime.month,
-                                               localAbsTime.day)
-            start = todayMidnight + (startHour * 3600) - delta
-            end = todayMidnight + (endHour * 3600) - delta
-            return TimeRange.TimeRange(start, end)
-        elif mode == "Database" and dbID.toJavaObj().getModelTime() != "00000000_0000":
-            start = dbID.modelTime() + (startHour * 3600)
-            end = dbID.modelTime() + (endHour * 3600)
-            return TimeRange.TimeRange(start, end)
-        else:
-            currentTime = self.gmtime()
-            today = AbsTime.absTimeYMD(currentTime.tm_year, currentTime.tm_mon,
-                                       currentTime.tm_mday)
-            start = today + (startHour * 3600)
-            end = today + (endHour * 3600)
-            return TimeRange.TimeRange(start, end)
-
-    def getSamplePoints(self, sampleSetName=None):
-        # Return a list of x,y tuples representing sample points
-        # sampleSet is the name of a saved sample set
-        # if sampleSet is None, the sample points will be
-        #   those currently displayed on the GFE
-        points = []
-        sampleSet = self.__dataMgr.getSampleSetManager()
-        if sampleSetName is None:
-            locations = sampleSet.getLocations()
-        else:
-            locations = sampleSet.sampleSetLocations(sampleSetName)
-        for i in range(locations.size()):
-            xy = self.getGridLoc().gridCoordinate(locations.get(i))
-            points.append((xy.x, xy.y))
-        return points
-
-    def _timeDisplay(self, timeRange, LTorZulu, durFmt, startFmt, endFmt):
-        # Return a string display for the given timeRange, assumed to be
-        #  in GMT.
-        # If LTorZulu == "LT", the timeRange will be converted from GMT
-        #  to local time.
-        # durationFmt, startFmt, endFmt are format strings for the
-        #  timeRange duration, the start time and end time respectively.
-        # See Text Product User Guide to see possible formats.
-        #
-        # Example:
-        #   self._timeDisplay(timeRange, "LT", "%H hours ",
-        #                     "%a %b %d, %Y %I:%M %p",
-        #                    " to %a %b %d, %Y %I:%M %p %Z")
-        #
-        #   yields a string such as:
-        #
-        #  12 hours Mon Apr 23, 2001 06:00 AM to Mon Apr 23, 2001 06:00 PM MDT.
-        if LTorZulu == "LT":
-            # Convert to local time
-            timeRange = self._shiftedTimeRange(timeRange)
-        display = ""
-        if durFmt != "":
-            duration = timeRange.duration()
-            durHours = duration / 3600
-            durMinutes = duration / 3600 / 60
-            durStr = string.replace(durFmt, "%H", `durHours`)
-            durStr = string.replace(durStr, "%M", `durMinutes`)
-            display = display + durStr
-        if startFmt != "":
-            display = display + timeRange.startTime().stringFmt(startFmt)
-        if endFmt != "":
-            display = display + timeRange.endTime().stringFmt(endFmt)
-        if LTorZulu == "LT":
-            # Adjust time zone to local time
-            localTime = self.localtime()
-            zoneName = time.strftime("%Z", localTime)
-            display = string.replace(display, "GMT", zoneName)
-        return display
-
-    def _shiftedTimeRange(self, timeRange):
-        localTime, shift = self._determineTimeShift()
-        return TimeRange.TimeRange(timeRange.startTime() + shift,
-                              timeRange.endTime() + shift)
-
-    def _determineTimeShift(self):
-        ''' Get the current Simulated UTC time and convert it to the
-        Site Time Zone as AbsTime return this and the number of seconds the
-        Simulated UTC time was shifted to get local time
-        '''
-        ldt = self._localtime()
-        shift = int(ldt.utcoffset().total_seconds())
-        currentTime = AbsTime.absTimeYMD(ldt.year, ldt.month, ldt.day, ldt.hour, ldt.minute)
-        return currentTime, shift
-
-    def _localtime(self, date=None, tz=None):
-        ''' Assumes date (default is current Simulate Time) is a UTC time to convert
-            to the time zone tz (default is Site Time Zone).
-            returns datetime
-        '''
-        if tz is None:
-            tz = self.getTzInfo()
-
-        gmdt = self._gmtime(date)
-        tzdt = gmdt.astimezone(tz)
-        return tzdt
-
-    def _gmtime(self, date=None):
-        ''' This takes date (default current Simulated Time) and converts it to AbsTime
-        '''
-        if date is None:
-            date = SimulatedTime.getSystemTime().getTime()
-        return AbsTime.AbsTime(date)
-    
-    def gmtime(self, date=None):
-        ''' This takes date (default current Simulated Time) and converts it to AbsTime
-
-            This should be used instead of time.gmtime()
-        '''
-        return self._gmtime(date).utctimetuple()
-
-    def localtime(self, date=None):
-        ''' Assumes date (default is current Simulated Time) is a UTC time to convert
-            to the time zone of the local site.
-
-            This should be used instead of time.localtime()
-        '''
-        return self._localtime(date).timetuple()
-    
-    def getTimeZoneStr(self):
-        ''' Returns local time zone of the current site as a string
-        '''
-        return self.__gridLoc.getTimeZone()
-    
-    def getTzInfo(self, tzname=None):
-        ''' Returns time zone object compatible with datetime for the desired time zone. 
-            Defaults to local site's time zone if tzname not specified.
-        '''
-        if tzname is None:
-            tzname = self.getTimeZoneStr()
-        import dateutil.tz
-        return dateutil.tz.gettz(tzname)
-
-    def dayTime(self, timeRange, startHour=6, endHour=18):
-        # Return 1 if start of timeRange is between the
-        #  startHour and endHour, Return 0 otherwise.
-        # Assume timeRange is GMT and convert to local time.
-        shift = self.determineTimeShift()
-        startTime = timeRange.startTime() + shift
-        localStartHour = startTime.hour
-        if localStartHour >= startHour and localStartHour < endHour:
-            return 1
-        else:
-            return 0
-
-    def determineTimeShift(self):
-        loctime, shift = self._determineTimeShift()
-        return shift
-
-    def getEditArea(self, editAreaName):
-        # Returns an AFPS.ReferenceData object given an edit area name
-        # as defined in the GFE
-
-        # Example:
-        #    myArea = self.getEditArea("BOU")
-        #    self.callSmartTool("MyTool", "T", editArea=myArea, timeRange)
-        #
-        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID
-        refID = ReferenceID(editAreaName)
-        return self.__dataMgr.getRefManager().loadRefSet(refID)
-
-    def saveEditArea(self, editAreaName, refData):
-        # Saves the AFPS.ReferenceData object with the given name
-
-        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID
-        refID = ReferenceID(editAreaName)
-        refData = ReferenceData(refData.getGloc(), refID, refData.getGrid())
-        self.__dataMgr.getRefManager().saveRefSet(refData)
-
-    def setActiveEditArea(self, area):
-        # Set the AFPS.ReferenceData area to be the active one in the GFE
-        # Note: This will not take effect until AFTER the smart tool or
-        # procedure is finished executing.
-        self.__dataMgr.getRefManager().setActiveRefSet(area)
-
-    def getActiveEditArea(self):
-        # Get the AFPS.ReferenceData area for the active one in the GFE
-        return self.__dataMgr.getRefManager().getActiveRefSet()
-
-    def clearActiveEditArea(self):
-        # Clear the active edit area in the GFE
-        #area = AFPS.ReferenceData_default()
-        #self.__dataMgr.referenceSetMgr().setActiveRefSet(area)
-        self.__dataMgr.getRefManager().clearRefSet()
-
-    def setActiveElement(self, model, element, level, timeRange,
-                         colorTable=None, minMax=None, fitToData=0):
-        # Set the given element to the active one in the GFE
-        # A colorTable name may be given.
-        # A min/max range for the colorTable may be given.
-        # If fitToData = 1, the color table is fit to the data
-        #
-        # Example:
-        #    self.setActiveElement("ISCDisc", WEname+"Disc", "SFC", GridTimeRange,
-        #                          colorTable="Discrepancy", minMax=(-20,+20),
-        #                          fitToData=1)
-        #
-        parm = self.getParm(model, element, level)
-        spatialMgr = self.__dataMgr.getSpatialDisplayManager()
-        if minMax or colorTable:
-            rsc = spatialMgr.getResourcePair(parm).getResource()
-            from com.raytheon.uf.viz.core.rsc.capabilities import ColorMapCapability
-            params = rsc.getCapability(ColorMapCapability).getColorMapParameters()
-            if colorTable:
-                from com.raytheon.uf.viz.core.drawables import ColorMapLoader
-                colorMap = ColorMapLoader.loadColorMap(colorTable)
-                elemType = str(parm.getGridInfo().getGridType())
-                if ('DISCRETE' == elemType):
-                    from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil
-                    DiscreteDisplayUtil.deleteParmColorMap(parm)
-                params.setColorMap(colorMap)
-                params.setColorMapName(colorTable)
-                rsc.issueRefresh()
-            if minMax:
-                minVal, maxVal = minMax
-                if (minVal != maxVal):
-                    params.setColorMapMax(maxVal)
-                    params.setColorMapMin(minVal)
-            parm.getListeners().fireColorTableModified(parm)
-        if fitToData:
-            from com.raytheon.viz.gfe.rsc.colorbar import FitToData
-            fitter = FitToData(self.__dataMgr, parm)
-            fitter.fitToData()
-        spatialMgr.activateParm(parm)
-        spatialMgr.makeVisible(parm, True, True)
-        spatialMgr.setSpatialEditorTime(timeRange.startTime().javaDate())
-
-
-    def getActiveElement(self):
-        return self.__dataMgr.getSpatialDisplayManager().getActivatedParm()
-
-    def getGridCellSwath(self, editArea, cells):
-        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData
-        CoordinateType = ReferenceData.CoordinateType
-        # Returns an AFPS.ReferenceData swath of the given
-        # number of cells around the given an edit area.
-        # The edit area must not be a query.
-        if type(editArea) is types.StringType:
-            editArea = self.getEditArea(editArea)
-        grid2DB = None
-        multipolygon = editArea.getPolygons(CoordinateType.valueOf("GRID"))
-        numPolygons = multipolygon.getNumGeometries()
-        for n in range(numPolygons):
-            polygon = multipolygon.getGeometryN(n)
-            grid2DBit = self.getGridLoc().gridCellSwath(
-                polygon.getCoordinates(), float(cells), False)
-            if grid2DB is not None:
-                grid2DB = grid2DB.orEquals(grid2DBit)
-            else:
-                grid2DB = grid2DBit
-        return self.getGridLoc().convertToReferenceData(grid2DB)
-
-    def getLatLon(self, x, y):
-        # Get the latitude/longitude values for the given grid point
-        from com.vividsolutions.jts.geom import Coordinate
-        coords = Coordinate(float(x), float(y))
-        cc2D = self.getGridLoc().latLonCenter(coords)
-        return cc2D.y, cc2D.x
-
-    def getLatLonGrids(self):
-        gridLoc = self.getGridLoc()
-        latLonGrid = gridLoc.getLatLonGrid()
-
-        latLonGrid = numpy.reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F')
-        return latLonGrid[1], latLonGrid[0]
-
-
-    def getGridCell(self, lat, lon):
-        # Get the corresponding x,y values for the given lat/lon
-        # Return None, None if the lat/lon is outside the grid domain
-        cc2D = self.getGridLoc().gridCell(lat, lon)
-        gridSize = self.getGridLoc().gridSize()
-        if cc2D.x < 0 or cc2D.x >= gridSize.x or \
-           cc2D.y < 0 or cc2D.y >= gridSize.y:
-            return None, None
-        else:
-            return cc2D.x, cc2D.y
-
-    def getGrid2DBit(self, editArea):
-        # Returns a Grid of on/off values indicating whether
-        # or not the grid point is in the given edit area.
-        # This could be used as follows in a Smart Tool:
-        #  def preProcessGrid(self):
-        #     editArea = self.getEditArea("Area1")
-        #     self.__area1Bits = self.getGrid2DBit(editArea)
-        #     editArea = self.getEditArea("Area2")
-        #     self.__area2Bits = self.getGrid2DBit(editArea)
-        #
-        #  def execute(self, x, y):
-        #     if self.__area1Bits.get(x,y) == 1:
-        #        
-        #     elif self.__area2Bits.get(x,y) == 1:
-        #        
-        #
-        return editArea.getGrid()
-
-    def getGridTimes(self, model, element, level, startTime, hours):
-        # Return the timeRange and gridTimes for the number of hours
-        # FOLLOWING the given startTime
-        timeRange = TimeRange.TimeRange(startTime, startTime + hours * 3600)
-        parm = self.getParm(model, element, level, timeRange)
-        gridTimes = parm.getGridInfo().getTimeConstraints().constraintTimes(timeRange.toJavaObj())
-        pyList = []
-        for t in gridTimes:
-            pyList.append(TimeRange.TimeRange(t))
-        return timeRange, pyList
-
-    def getExprName(self, model, element, level="SFC", mostRecent=0):
-        # Return an expressionName for the element
-        # This method is complicated because it is handling all the
-        # variations for the "model" argument.  For a description
-        # of the variations, see the "getValue" documentation above.
-
-        siteID = self.__mutableID.siteID()
-        if type(model) is types.StringType:
-            modelStr = model
-        else:
-            # Must be a databaseID, so get model string
-            modelStr = model.modelName()
-        if element == "Topo" or modelStr == self.__mutableID.modelName():
-            exprName = element
-        elif modelStr == "Official":
-            dbType = self.__mutableID.type()
-            modelName = "Official"
-            exprName = element + "_" + level + "_" + siteID + "_" + dbType + "_" + modelName
-        else:
-            if type(model) is types.StringType:
-                if string.count(model, "_") == 5:
-                    # String as databaseID
-                    dbID = DatabaseID.databaseID(model)
-                elif string.find(model, "_") < 0:
-                    # Assume "on-the-fly" so need to prepend site
-                    exprName = element + "_" + level + "_" + siteID + "__" + model
-                    dbID = DatabaseID.databaseID_default()
-                else:
-                    # Assume model is site_type_modelName
-                    exprName = element + "_" + level + "_" + model
-                    dbID = DatabaseID.databaseID_default()
-            else:
-                # Assume it is already a dbID
-                dbID = model
-            if dbID.siteID() is not None and dbID.siteID() != "":
-                if str(dbID) == str(self.__mutableID):
-                    exprName = element
-                else:
-                    exprName = element + "_" + level + "_" + dbID.siteID() + "_" + \
-                               dbID.type() + "_" + dbID.modelName()
-                    if mostRecent == 0:
-                        if dbID.toJavaObj().getModelDate() is None:
-                            exprName = exprName + "_00000000_0000"
-                        else:
-                            exprName = exprName + "_" + dbID.modelTime().stringFmt(
-                                "%b%d%H")
-        return exprName
-
-    def getSiteID(self):
-        return self.__dataMgr.getSiteID()
-
-    def getModelName(self, databaseString):
-        # Return the model name.
-        #  databaseString is the result of a VariableList entry of type
-        #   "model" or "D2D_model"
-        dbID = DatabaseID.databaseID(databaseString)
-        return dbID.modelName()
-
-    def getD2Dmodel(self, model):
-        # Given a GFE Surface model, return the corresponding D2D model
-        if isinstance(model, DatabaseID.DatabaseID):
-            model = model.modelIdentifier()
-        d2d = string.replace(model, "__", "_D2D_")
-        return d2d
-
-    def getParm(self, model, element, level, timeRange=None, mostRecent=0):
-        # Returns the parm object for the given model, element, and level
-        exprName = self.getExprName(model, element, level, mostRecent)
-        #print "Expression Name", exprName
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        return parm
-
-    def getParmByExpr(self, exprName):
-        #print "Expression Name", exprName
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        return parm
-
-    ##
-    # @param elementNames: ignored
-    #
-    # @deprecated: Cacheing is controlled by the system.
-    def cacheElements(self, elementNames):
-        pass
-
-    ##
-    # Cacheing is controlled by the system. Users may still call this method
-    # to delete temporary parms in the parm manager.
-    #
-    # @param elementNames: ignored
-    def unCacheElements(self, elementNames):
-        self.__parmMgr.deleteTemporaryParms()
-
-    def loadWEGroup(self, groupName):
-        parmArray = self.__parmMgr.getAllAvailableParms();
-        parmIDs = self.__dataMgr.getWEGroupManager().getParmIDs(
-              groupName, parmArray)
-        # Load the group
-        self.__parmMgr.setDisplayedParms(parmIDs)
-
-    ##
-    # @param model: Database model name
-    # @type model: String
-    # @param element: Element name, i.e., "Hazards"
-    # @type element: String
-    # @param level: Parm level, i.e., "SFC"
-    # @type level: String
-    # @return: None
-    def unloadWE(self, model, element, level, mostRecent=0):
-        # unloads the WE from the GFE
-        exprName = self.getExprName(model, element, level, mostRecent)
-        parm = self.__parmMgr.getParmInExpr(exprName, 1)
-        if parm is None:
-            return
-        parmJA = jep.jarray(1, parm)
-        parmJA[0] = parm
-        self.__parmMgr.deleteParm(parmJA)
-
-    def unloadWEs(self, model, elementLevelPairs, mostRecent=0):
-        jparms = []
-        for element, level in elementLevelPairs:
-            exprName = self.getExprName(model, element, level, mostRecent)
-            parm = self.__parmMgr.getParmInExpr(exprName, 1)
-            if parm:
-                jparms.append(parm)
-        if jparms:
-            parmJA = jep.jarray(len(jparms), jparms[0])
-            for i in xrange(len(jparms)):
-                parmJA[i] = jparms[i]
-            self.__parmMgr.deleteParm(parmJA)
-
-    def saveElements(self, elementList):
-        # Save the given Fcst elements to the server
-        # Example:
-        #    self.saveElements(["T","Td"])
-        for element in elementList:
-            parm = self.getParm(self.mutableID(), element, "SFC")
-            parm.saveParameter(True)
-
-    def publishElements(self, elementList, timeRange):
-        # Publish the given Fcst elements to the server
-        # over the given time range.
-        # NOTE: This method is design to run from a procedure
-        # NOT a SmartTool!!!
-        # Example:
-        #    self.publishElements(["T","Td"], timeRange)
-        from com.raytheon.uf.common.dataplugin.gfe.server.request import CommitGridRequest
-        requests = ArrayList()
-        for element in elementList:
-            # get the inventory for this element from the server
-            parm = self.getParm("Fcst", element, "SFC")
-            recList = self.__dataMgr.getClient().getPythonClient().getGridInventory(parm.getParmID())
-            publishTimeRange = timeRange
-            if recList is not None:
-                recSize = recList.size()
-                for x in range(recSize):
-                    tr = TimeRange.TimeRange(recList.get(x))
-                    if tr.overlaps(timeRange):
-                        publishTimeRange = publishTimeRange.combineWith(tr)
-
-            cgr = CommitGridRequest(parm.getParmID(), publishTimeRange.toJavaObj())
-            requests.add(cgr)
-            self.__parmOp.publish(requests)
-
-    def combineMode(self):
-        from com.raytheon.viz.gfe.core.parm import ParmState
-        CombineMode = ParmState.CombineMode
-        mode = ParmState.getCurrentCombineMode()
-        if mode.equals(CombineMode.valueOf("COMBINE")):
-            return True
-        else:
-            return False
-
-    def setCombineMode(self, mode):
-        from com.raytheon.viz.gfe.core.parm import ParmState
-        CombineMode = ParmState.CombineMode
-        if mode == "Combine":
-            self.__parmOp.setCombineMode(CombineMode.valueOf("COMBINE"))
-        elif mode == "Replace":
-            self.__parmOp.setCombineMode(CombineMode.valueOf("REPLACE"))
-        else:
-            self.statusBarMsg("Invalid Weather Combine mode.", "S")
-            return None
-
-    def getVectorEditMode(self):
-        # Returns Vector Edit mode in the GFE
-        # mode:
-        #    "Magnitude Only"
-        #    "Direction Only"
-        #    "Both"
-        from com.raytheon.viz.gfe.core.parm import ParmState
-        VectorMode = ParmState.VectorMode
-        mode = ParmState.getCurrentVectorMode()
-        if mode.equals(VectorMode.valueOf("MAGNITUDE")):
-            return "Magnitude Only"
-        elif mode.equals(VectorMode.valueOf("DIRECTION")):
-            return "Direction Only"
-        elif mode.equals(VectorMode.valueOf("BOTH")):
-                return "Both"
-        return "None"
-
-    def setVectorEditMode(self, mode):
-        # Sets the Vector Edit mode in the GFE
-        # mode:
-        #    "Magnitude only"
-        #    "Direction only"
-        #    "Both"
-        from com.raytheon.viz.gfe.core.parm import ParmState
-        VectorMode = ParmState.VectorMode
-        if mode == "Magnitude Only":
-            self.__parmOp.setVectorMode(VectorMode.valueOf("MAGNITUDE"))
-        elif mode == "Direction Only":
-            self.__parmOp.setVectorMode(VectorMode.valueOf("DIRECTION"))
-        else:
-            self.__parmOp.setVectorMode(VectorMode.valueOf("BOTH"))
-
-    def getConfigItem(self, itemName, default=None):
-        # Return the configuration file value for "itemName"
-        #  If not found, return the default given
-        from com.raytheon.viz.gfe import Activator
-        prefs = Activator.getDefault().getPreferenceStore()
-        if prefs.contains(itemName):
-            if prefs.isString(itemName):
-                return str(prefs.getString(itemName))
-            elif prefs.isInt(itemName):
-                return prefs.getInt(itemName)
-            elif prefs.isFloat(itemName):
-                return prefs.getFloat(itemName)
-            elif prefs.isDouble(itemName):
-                return prefs.getDouble(itemName)
-            elif prefs.isLong(itemName):
-                return prefs.getLong(itemName)
-            elif prefs.isBoolean(itemName):
-                return prefs.getBoolean(itemName)
-            elif prefs.isStringArray(itemName):
-                pa = []
-                jsa = prefs.getStringArray(itemName)
-                for i in jsa:
-                    pa.append(str(i))
-                return pa
-            elif prefs.isFloatArray(itemName):
-                pa = []
-                jsa = prefs.getFloatArray(itemName)
-                for i in jsa:
-                    pa.append(float(i))
-                return pa
-            elif prefs.isIntArray(itemName):
-                pa = []
-                jsa = prefs.getIntArray(itemName)
-                for i in jsa:
-                    pa.append(int(i))
-                return pa
-            else:
-                return default
-        else:
-            return default
-
-    def esat(self, temp):
-        return exp(26.660820 - 0.0091379024 * temp - 6106.3960 / temp)
-
-    ##
-    # Get the discrete keys for elementName.
-    #
-    # @param elementName: Name of an element.
-    # @type elementName: string
-    # @return: the keys for the element
-    # @rtype: list of strings
-    def getDiscreteKeys(self, elementName):
-        parm = self.getParm("Fcst", elementName, "SFC")
-        keyList = parm.getGridInfo().getDiscreteKeys()
-        keyList = JUtil.javaStringListToPylist(keyList)
-        return keyList
-
-#########################################################################
-## Numeric Python methods                                              ##
-#########################################################################
-
-    def getTopo(self):
-        # Return the numeric topo grid
-        if self.__topoGrid is None:
-            topo = self.__parmMgr.getParmInExpr("Topo", True)
-            self.__topoGrid = self.__cycler.getCorrespondingResult(
-                                topo, TimeRange.allTimes().toJavaObj(), "TimeWtAverage")[0]
-            self.__topoGrid = self.__topoGrid.getGridSlice().getNDArray()
-        return self.__topoGrid
-
-    def wxMask(self, wx, query, isreg=0):
-        # Returns a numeric mask i.e. a grid of 0's and 1's
-        #  where the value is 1 if the given query succeeds
-        # Arguments:
-        #  wx -- a 2-tuple:
-        #    wxValues : numerical grid of byte values
-        #    keys : list of "ugly strings" where the index of
-        #      the ugly string corresponds to the byte value in
-        #      the wxValues grid.
-        #  query -- a text string representing a query
-        #  isreg -- if 1, the query is treated as a regular expression
-        #           otherwise as a literal string
-        # Examples:
-        #  # Here we want to treat the query as a regular expression
-        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)
-        #  # Here we want to treat the query as a literal
-        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)
-        #
-        rv = self.empty(bool)
-        if not isreg:
-            for i in xrange(len(wx[1])):
-                #if fnmatch.fnmatchcase(wx[1][i], query):
-                if string.find(wx[1][i], query) >= 0:
-                    rv[equal(wx[0], i)] = True
-        else:
-            r = re.compile(query)
-            for i in xrange(len(wx[1])):
-                m = r.search(wx[1][i])
-                if m is not None:
-                    rv[equal(wx[0], i)] = True
-        return rv
-
-        # Returns a numeric mask i.e. a grid of 0's and 1's
-        #  where the value is 1 if the given query succeeds
-        # Arguments:
-        #  Discrete -- a 2-tuple:
-        #    grid : numerical grid of byte values
-        #    keys : list of "ugly strings" where the index of
-        #      the ugly string corresponds to the byte value in
-        #      the wxValues grid.
-        #  query -- a text string representing a query
-        #  isreg -- if 1, the query is treated as a regular expression
-        #           otherwise as a literal string
-        # Examples:
-        #  # Here we want to treat the query as a regular expression
-        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)
-        #  # Here we want to treat the query as a literal
-        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)
-    discreteMask = wxMask
-
-    ##
-    # Sort the subkeys of uglyStr alphabetically.
-    #
-    # @param uglyStr: A key with "^"s separating subkeys
-    # @type uglyStr: string
-    # @return: uglyStr with alpha sorted subkeys.
-    # @rtype: string
-    def sortUglyStr(self, uglyStr):
-        parts = uglyStr.split("^")
-        if len(parts) < 2:
-            return uglyStr
-
-        # do the sort
-        parts.sort()
-
-        sortedStr = "^".join(parts)
-
-        return sortedStr
-
-    ##
-    # Get the index of uglyStr within keys.
-    # This routine compares normalized (sorted) versions of uglyStr and
-    # keys to be sure that equivalent hazards are assigned the same grid
-    # index.
-    # When a matching key is not in keys, uglyStr will be added to keys
-    # and the index of the new entry will be returned.
-    #
-    # @param uglyStr: A hazard key
-    # @type uglyStr: string
-    # @param keys: Existing hazard keys
-    # @type keys: list
-    # @return: The index of a key equivalent to uglyStr in keys
-    # @rtype: int
-    def getIndex(self, uglyStr, keys):
-        # Returns the byte value that corresponds to the
-        #   given ugly string. It will add a new key if a new ugly
-        #   string is requested.
-        # Arguments:
-        #   uglyStr: a string representing a weather value
-        #   keys: a list of ugly strings.
-        #     A Wx argument represents a 2-tuple:
-        #       wxValues : numerical grid of byte values
-        #       keys : list of "ugly strings" where the index of
-        #        the ugly string corresponds to the byte value in the wxValues grid.
-        #     For example, if our keys are:
-        #       "Sct:RW:-::"
-        #       "Chc:T:-::"
-        #       "Chc:SW:-::"
-        #    Then, the wxValues grid will have byte values of 0 where
-        #    there is "Sct:RW:-::", 1 where there is "Chc:T:-::"
-        #    and 2 where there is "Chc:SW:-::"
-        #
-        #  The ugly strings are also used by DISCRETE.  The keys are
-        #  separated by '^' for the subkeys.
-        #  18 Nov 2005 - tl
-        #  Added sorting to ugly strings to prevent duplicate keys
-        #  Duplicate keys causes a bug when generating hazards grids.
-
-        sortedUglyStr = self.sortUglyStr(uglyStr)
-        for str in keys:
-            if sortedUglyStr == self.sortUglyStr(str):
-                return keys.index(str)
-        
-        if len(keys) >= 256:
-            raise IndexError("Attempt to create more than 256 Wx keys")
-        
-        keys.append(uglyStr)
-        return len(keys) - 1
-
-    ##
-    # Returns a Numeric Python mask for the edit area
-    # "editArea" can be a named area or a ReferenceData object
-    # @param editArea: An edit area to obtain a mask for
-    # @type editArea: String or referenceArea
-    # @return: grid for the edit area
-    # @rtype: numpy array of int8
-    def encodeEditArea(self, editArea):
-        # Returns a Numeric Python mask for the edit area
-        # "editArea" can be a named area or a referenceData object
-        if type(editArea) is types.StringType:
-            editArea = self.getEditArea(editArea)
-
-        if editArea.isQuery():
-            editArea = self.__refSetMgr.evaluateQuery(editArea.getQuery())
-
-        return editArea.getGrid().getNDArray().astype(bool)
-
-    def decodeEditArea(self, mask):
-        # Returns a refData object for the given mask
-        from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit
-        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID
-        gridLoc = self.getGridLoc()
-        nx = int(gridLoc.getNx())
-        ny = int(gridLoc.getNy())
-        
-        # force mask to boolean if it's not
-        mask = NumpyJavaEnforcer.checkdTypes(mask, bool)
-        
-        # convert boolean mask to bytes for Grid2DBit        
-        bytes = mask.astype(int8)
-        grid = Grid2DBit.createBitGrid(nx, ny, bytes)
-        return ReferenceData(gridLoc, ReferenceID("test"), grid)
-
-
-    def getindicies(self, o, l):
-        if o > 0:
-            a = slice(o, l); b = slice(0, l - o)
-        elif o < 0:
-            a = slice(0, l + o); b = slice(- o, l)
-        else:
-            a = slice(0, l); b = slice(0, l)
-        return a, b
-
-    def offset(self, a, x, y):
-        # Gives an offset grid for array, a, by x and y points
-        sy1, sy2 = self.getindicies(y, a.shape[0])
-        sx1, sx2 = self.getindicies(x, a.shape[1])        
-        b = zeros_like(a)
-        b[sy1, sx1] = a[sy2, sx2]
-        return b
-
-    def agradient(self, a):
-        # Gives offset grids in the "forward" x and "up" y directions
-        dx = a - self.offset(a, 1, 0)
-        dy = a - self.offset(a, 0, - 1)
-        return dx, dy
-
-    def diff2(self, x, n=1, axis= - 1):
-        """diff2(x,n=1,axis=-1) calculates the first-order, discrete
-        center difference approximation to the derivative along the axis
-        specified. array edges are padded with adjacent values.
-        """
-        a = asarray(x)
-        nd = len(a.shape)
-        slice1 = [slice(None)] * nd
-        slice2 = [slice(None)] * nd
-        slice1[axis] = slice(2, None)
-        slice2[axis] = slice(None, - 2)
-        tmp = a[slice1] - a[slice2]
-        rval = zeros_like(a)
-        slice3 = [slice(None)] * nd
-        slice3[axis] = slice(1, - 1)
-        rval[slice3] = tmp
-        slice4 = [slice(None)] * nd
-        slice4[axis] = slice(0, 1)
-        rval[slice4] = tmp[slice4]
-        slice5 = [slice(None)] * nd
-        slice5[axis] = slice(- 1, None)
-        rval[slice5] = tmp[slice5]
-        if n > 1:
-            return diff2(rval, n - 1)
-        else:
-            return rval
-
-    ##
-    # Get the grid shape from the GridLocation stored in the parm manager.
-    #
-    # @return: The number of data points in the X and Y directions.
-    # @rtype: 2-tuple of int
-    def getGridShape(self):
-        return self.__gridShape
-
-#########################################################################
-## Procedure methods                                                   ##
-#########################################################################
-
-    # These commands always apply to the mutable model only.
-    # NOTE:  Most of these commands are duplicated with "old" and
-    #  "recommended" versions which end in "Cmd".  For example, "copy"
-    #  is the "old" version and will eventually not be supported
-    #  while the recommended version is "copyCmd".
-
-    # Command Arguments:
-    # name1, name2, name3     is a list of the weather element names
-    # startHour     is the starting hour for the command offset from modelbase
-    # endHour       is the ending hour for the command offset from modelbase.
-    #               The ending hour is NOT included in the processing of the
-    #               command.
-    # modelbase     is the name of the model to be used to determine base times
-    #               Note that if this is "", then 0000z from today will be
-    #               used for the base time.
-    # modelsource   is the name of the model to be used in the copy command
-    # copyOnly      is 0 for move and 1 for copy only in the time shift command
-    # hoursToShift  is the number of hours to shift the data in time
-    #                shift command
-    # databaseID    must be of type AFPS.DatabaseID
-    #            Can be obtained in various ways:
-    #            --By calling findDatabase (see below)
-    #            --By calling getDatabase (see below) with the result
-    #              of a VariableList entry of type "model" or "D2D_model"
-    # timeRange    must be of type AFPS.TimeRange.
-    #            Can be obtained in various ways:
-    #            --As an argument passed into Smart Tool or Procedure,
-    #            --By calling getTimeRange (see below)
-    #            --By calling createTimeRange (see below)
-
-    # List of available commands:
-    # copyCmd(['name1', 'name2', 'name3'], databaseID, timeRange)
-    #    Copies all grids for each weather element from the given database
-    #    into the weather element in the mutable database that overlaps
-    #    the time range.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.copyCmd(['T', 'Wind'], databaseID, timeRange)
-    #    will copy the Temperature and Wind fields analysis through 48 hours
-    #    from the latest NAM12 and place them into the forecast.
-    # copyToCmd([('srcName1', 'dstName1'),
-    #            ('srcName2', 'dstName2')], databaseID, timeRange)
-    #    Copies all grids for each weather element from the given database
-    #    into the weather element in the mutable database that overlaps
-    #    the time range.  The source name and destination name are both
-    #    supplied.  This allows for copying data with different names
-    #    (The units must match).
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.copyToCmd([('MaxT', 'T'), ('T', 'MinT')], databaseID,
-    #         timeRange)
-    #    will copy the Max Temperature into T and T into MinT.
-    #    from the latest NAM12 and place them into the forecast.
-    #
-    # deleteCmd(['name1', 'name2', 'name3'], timeRange)
-    #    Deletes all grids that overlap the input time range for element
-    #    in the mutable database.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.deleteCmd(['T', 'Wind'], timeRange)
-    #    will delete the Temperature and Wind fields analysis up to
-    #    but not including 48 hours relative to the start time of
-    #    the latest NAM12 model.
-    #
-    # zeroCmd(['name1', 'name2', 'name3'], timeRange)
-    #    Assigns the minimum possible value for scalar and vector, and ""
-    #    for weather for the parameter in the mutable database for all grids
-    #    that overlap the specified time range.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.zeroCmd(['T', 'Wind'], databaseID, timeRange)
-    #    will zero the Temperature and Wind grids through 48 hours
-    #    relative to the start time of the latest NAM12 model.
-    #
-    # interpolateCmd(['name1', 'name2', 'name3'], timeRange,
-    #       interpMode="GAPS", interpState="SYNC", interval=0, duration=0)
-    #    Interpolates data in the forecast for the named weather elements
-    #    for the given timeRange.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.interpolateCmd(['T', 'Wind'], timeRange, "GAPS","SYNC")
-    #    will interpolate the Temperature and Wind grids up to but
-    #    but not including 48 hours relative to the start time of
-    #the latest NAM12 model.
-    #    The interpolation will run in SYNC mode i.e. completing before
-    #    continuing with the procedure.
-    #
-    # createFromScratchCmd(['name1', 'name2'], timeRange, repeat, duration)
-    #    Creates one or more grids from scratch over the given timeRange
-    #    and assigns the default (minimum possible value for scalar
-    #    and vector, "" for weather).
-    #    The repeat interval and duration (both specified in hours) are
-    #    used to control the number of grids created.  If 0 is specified for
-    #    either one, than only 1 grid is created for the given time range.  If
-    #    valid numbers for duration and repeat are given, then grids will
-    #    be created every "repeat" hours and they will have a duration
-    #    of "duration" hours.  If there is not enough room remaining to create
-    #    a grid with the full duration, then no grid will be created in the space
-    #    remaining.  If you don't get the desired results, be sure that your input
-    #    time range starts on a valid time constraint for the element.  If the
-    #    element's time constraints (not the values supplied in this routine) contains
-    #    gaps (i.e., duration != repeatInterval), then the repeat interval and
-    #    duration will be ignored and grids will be created for each possible
-    #    constraint time.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.createFromScratchCmd(['T', 'Wind'], timeRange, 3, 1)
-    #    will create the 1-hour Temperature grids through 48 hours at
-    #    3 hour intervals relative to the start time of the latest NAM12 model.
-    #
-    # timeShiftCmd(['name1', 'name2'], copyOnly, shiftAmount, timeRange)
-    #    Performs a time shift by the shiftAmount for all elements that
-    #    overlap the time range.
-    #    Example:
-    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model
-    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)
-    #       self.timeShiftCmd(['T', 'Wind'], 1, 3, timeRange)
-    #
-    # splitCmd(elements, timeRange)
-    #    Splits any grid that falls on the start time or ending time of the
-    #    specified time range for the given parameter in the mutable database.
-    #
-    # fragmentCmd(elements, timeRange)
-    #    Fragments any grids that overlap the input time range for the parm
-    #    identified in the mutable database.
-    #
-    # assignValueCmd(elements, timeRange, value)
-    #    Assigns the specified value to all grids points for the grids that
-    #    overlap the specified time range, for the weather element in the mutable
-    #    database specified.
-    #   value is:
-    #    an Integer or Float for SCALAR
-    #    a magnitude-direction tuple for VECTOR:  e.g. (55,120)
-    #    a text string for Weather which can be obtained via the
-    #      WxMethods WxString method
-    #  Example:
-    #    # Scalar
-    #    value = 60
-    #    self.assignValue(["T","Td"], 0, 12, 'NAM12', value)
-    #    # Vector
-    #    value = (15, 120)
-    #    self.assignValue(["Wind"], 0, 12, 'NAM12', value)
-    #    # Weather
-    #    from WxMethods import *
-    #    value = WxString("Sct RW")
-    #    self.assignValue(["Wx"], 0, 12, 'NAM12', value)
-
-    # Example: Copy RAP40 0-12, NAM12 13-48, GFS80 49-72 for T, Wx, and Wind,
-    # and then interpolate from hours 0 - 24.
-    #
-    #
-    #       self.copy(['T','Wx', 'Wind'], 0, 12, 'RAP40')
-    #       self.copy(['T','Wx', 'Wind'], 13, 48, 'NAM12')
-    #       self.copy(['T','Wx', 'Wind'], 49, 72, 'GFS80')
-    #       self.interpolate(['T','Wx', 'Wind'], 0, 24, 'RAP40')
-
-    def copyCmd(self, elements, databaseID, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        if isinstance(databaseID, DatabaseID.DatabaseID):
-            databaseID = databaseID.toJavaObj()
-        for element in elements:
-            self.__parmOp.copyCmd(element, databaseID, timeRange)
-
-    def copyToCmd(self, elements, databaseID, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        if isinstance(databaseID, DatabaseID.DatabaseID):
-            databaseID = databaseID.toJavaObj()
-        for src, dst in elements:
-            self.__parmOp.copyToCmd(src, dst, databaseID, timeRange)
-
-    def deleteCmd(self, elements, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        for element in elements:
-            self.__parmOp.deleteCmd(element, timeRange)
-
-    def zeroCmd(self, elements, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        for element in elements:
-            self.__parmOp.zeroCmd(element, timeRange)
-
-    def interpolateCmd(self, elements, timeRange,
-                    interpMode="GAPS", interpState="SYNC", interval=0,
-                    duration=0):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        # Convert interval to seconds
-        interval = interval * 3600
-        for element in elements:
-            self.__parmOp.interpolateCmd(element, timeRange,
-                                         interpMode, interpState, interval,
-                                         duration)
-
-    def createFromScratchCmd(self, elements, timeRange, repeat=0, duration=0):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        # Convert repeat and duration to seconds
-        repeat = repeat * 3600
-        duration = duration * 3600
-        for element in elements:
-            self.__parmOp.createFromScratchCmd(element, timeRange, repeat, duration)
-
-    def timeShiftCmd(self, elements, copyOnly, shiftAmount, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        shiftAmount = shiftAmount * 3600
-        for element in elements:
-            self.__parmOp.timeShiftCmd(element, timeRange, copyOnly,
-                                       shiftAmount)
-
-    def splitCmd(self, elements, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        for element in elements:
-            self.__parmOp.splitCmd(element, timeRange)
-
-    def fragmentCmd(self, elements, timeRange):
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        for element in elements:
-            self.__parmOp.fragmentCmd(element, timeRange)
-
-    def assignValueCmd(self, elements, timeRange, value):
-        from com.raytheon.viz.gfe.core.wxvalue import ScalarWxValue, VectorWxValue, WeatherWxValue
-        if isinstance(timeRange, TimeRange.TimeRange):
-            timeRange = timeRange.toJavaObj()
-        for element in elements:
-            parm = self.__parmMgr.getParmInExpr(element, 1)
-            if type(value) == types.TupleType:
-                newvalue = VectorWxValue(float(value[0]), float(value[1]), parm)
-            elif type(value) == types.StringType:
-                newvalue = WeatherKey(value)
-                newvalue = WeatherWxValue(newvalue, parm)
-            else:
-                newvalue = ScalarWxValue(float(value), parm)
-            self.__parmOp.assignValueCmd(element, timeRange, newvalue)
-
-    def __getUserFile(self, name, category):
-        from com.raytheon.uf.common.localization import PathManagerFactory, LocalizationContext
-        LocalizationType = LocalizationContext.LocalizationType
-        LocalizationLevel = LocalizationContext.LocalizationLevel
-        pathMgr = PathManagerFactory.getPathManager()
-        path = 'gfe/userPython/' + category + '/' + name
-        lc = pathMgr.getContext(LocalizationType.valueOf('CAVE_STATIC'), LocalizationLevel.valueOf('USER'))
-        lf = pathMgr.getLocalizationFile(lc, path)
-        return lf
-
-
-    def saveObject(self, name, object, category):
-        import cPickle
-        # Save a Python object (e.g. a Numeric grid)
-        # in the server under the given name
-        #   Example:
-        #   self.saveObject("MyGrid", numericGrid, "DiscrepancyValueGrids")
-        #
-        lf = self.__getUserFile(name, category)
-        fullpath = lf.getFile().getPath()
-        idx = fullpath.rfind("/")
-        if not os.path.exists(fullpath[:idx]):
-            os.makedirs(fullpath[:idx])
-        openfile = open(fullpath, 'w')
-        cPickle.dump(object, openfile)
-        openfile.close()
-        lf.save()
-
-    def getObject(self, name, category):
-        import cPickle
-        # Returns the given object stored in the server
-        #   Example:
-        #   discrepancyValueGrid = self.getObject("MyGrid","DiscrepancyValueGrids")
-        #
-        lf = self.__getUserFile(name, category)
-        fullpath = lf.getFile().getPath()
-        openfile = open(fullpath, 'r')
-        obj = cPickle.load(openfile)
-        openfile.close()
-        return obj
-
-    def deleteObject(self, name, category):
-        # Delete the given object stored in the server
-        #    Example:
-        #    self.deleteObject("MyGrid", "DiscrepancyValueGrids")
-        #
-        lf = self.__getUserFile(name, category)
-        lf.delete()
-
-    def myOfficeType(self):
-        #returns my configured office type, such as "wfo" or "rfc"
-        return self.__dataMgr.getOfficeType()
-
-    def officeType(self, siteid):
-        #returns the office type for the given site identifier
-        #returns None if unknown site id
-        a = self.__dataMgr.officeType(siteid)
-        if len(a):
-            return a
-        else:
-            return None
-
-    def availableDatabases(self):
-        dbs = []
-        availDbs = self.__parmMgr.getAvailableDbs()
-        for i in range(availDbs.size()):
-            dbId = availDbs.get(i)
-            dbs.append(DatabaseID.DatabaseID(dbId))
-        return dbs
-
-    def knownOfficeTypes(self):
-        import JUtil
-        return JUtil.javaStringListToPylist(self.__dataMgr.knownOfficeTypes())
-
-    # Retrieves a text product from the text database
-    def getTextProductFromDB(self, productID):
-        from com.raytheon.viz.gfe.product import TextDBUtil
-
-        opMode = self.gfeOperatingMode() in ("OPERATIONAL", "TEST")
-        fullText = TextDBUtil.retrieveProduct(productID, opMode)
-        textList =  fullText.splitlines(True)
-        return textList
-
-    def callTextFormatter(self, productName, dbId, varDict={}, vtecMode=None):
-        """
-        Execute the requested text product formatter.
-
-        Args: 
-                productName: the display name of the formatter to run.
-                dbId: string form of the DatabaseID to use as data source.
-                varDict: optional, product varDict, use an empty dict instead
-                         of None to signify a null varDict.
-                vtecMode: optional, for VTEC products specify VTEC mode (one of
-                          'O', 'T', 'E' or 'X').
-
-        Returns:
-                The output of the formatter--the content of the requested product.
-
-        Throws:
-                TypeError: If varDict is not a dict.
-                RuntimeError: If the formatter fails during execution. 
-        """
-        if type(varDict) is not dict:
-            raise TypeError("Argument varDict must be a dict.")
-        varDict = str(varDict)
-        
-        listener = TextProductFinishWaiter()
-        FormatterUtil.callFromSmartScript(productName, dbId, varDict, vtecMode, self.__dataMgr, listener)
-        product = listener.waitAndGetProduct()
-        state = listener.getState()
-        if not state.equals(ProductStateEnum.Finished):
-            msg = "Formatter " + productName + " terminated before completion with state: " + state.name() + \
-            ". Check formatter logs from Process Monitor for more information."
-            raise RuntimeError(msg)
-        return product
-    
-    def saveCombinationsFile(self, name, combinations):
-        """
-        Save the specified zone combinations to the localization data store.
-
-        Args: 
-                name: Name for the combinations file. The ".py" extension will
-                      automatically be appended to the final file name.
-                combinations: The zone combinations. This data structure should
-                      be a list of list of zone names 
-                      (e.g. [["OAX", "GID", "LBF"], ["DMX"], ["FSD", "ABR"]]
-
-        Throws:
-                TypeError: If combinations is not in the proper format.
-        """
-        # Validate that we were passed a collection of collections, we'll convert
-        # to list of lists to satisfy the Java type checker.
-        try:
-            for item in iter(combinations):
-                iter(item)
-        except TypeError:
-            raise TypeError("combinations must be a list of list of zone names.")
-        
-        combo_list = JUtil.pyValToJavaObj([[str(zone) for zone in group] for group in combinations])
-        CombinationsFileUtil.generateAutoCombinationsFile(combo_list, str(name))
-    
-    def loadCombinationsFile(self, name):
-        """
-        Load the specified zone combinations file form the localization data store.
-        
-        Args:
-                name: Name for the combinations file. The ".py" extension will
-                      automatically be appended to the final file name.
-
-        Returns:
-                The zone combinations as a list of lists of zone names
-                (e.g. [["OAX", "GID", "LBF"], ["DMX"], ["FSD", "ABR"]]
-        """
-        return JUtil.javaObjToPyVal(CombinationsFileUtil.init(name))
-
-    def transmitTextProduct(self, product, wanPil, wmoType):
-        """
-        Transmit the specified product. Will automatically detect if GFE is 
-        operating in OPERATIONAL or PRACTICE mode and send using the appropriate
-        route.
-
-        Args: 
-                product: the text or body of the product to transmit.
-                wanPil: the AWIPS WAN PIL for the product
-                wmoType: The WMO type of the product.
-
-        Returns:
-                The status of the transmission request as a ProductStateEnum.
-        """
-        wanPil = str(wanPil)
-        product = str(product)
-        wmoType = str(wmoType)
-        
-        transmitter = TextProductTransmitter(product, wanPil, wmoType)
-        practice = self.gfeOperatingMode()=="PRACTICE"
-        status = transmitter.transmitProduct(practice)
-        return status
-
-    def sendWFOMessage(self, wfos, message):
-        '''
-        Sends a message to a list of wfos
-        
-        Args:
-            wfos: string or list, set or tuple of strings containing the destination wfo(s)
-            
-            message: string containing the message to be sent
-
-        Returns:
-            string: empty if successful or error message
-        
-        Raises:
-            TypeError: if wfos is not a string, list, tuple or set
-        '''
-        
-        if not wfos:
-            # called with empty wfo list, nothing to do
-            return ""
-        
-        javaWfos = ArrayList()
-        if type(wfos) in [list, tuple, set]:
-            for wfo in wfos:
-                javaWfos.add(wfo)
-        elif type(wfos) is str:
-            javaWfos.add(wfos)
-        else:
-            raise TypeError("Invalid type received for wfos: " + type(wfos))
-            
-        response = self.__dataMgr.getClient().sendWFOMessage(javaWfos, message)
-        return response.message()
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+#

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+#

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+#

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+########################################################################

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+#    SmartScript -- library of methods for Smart Tools and Procedures

+#

+# Author: hansen

+# SOFTWARE HISTORY

+#

+# Date          Ticket#  Engineer  Description

+# ------------- -------- --------- ---------------------------------------------

+# Jan 09, 2013  15626    J. Zeng   Add methods

+#                                    enableISCsend

+#                                    clientISCSendStatus

+#                                    manualSendISC_autoMode

+#                                    manualSendISC_manualMode

+# Jan 30, 2013  1559     dgilling  Fix TypeError in getGridCellSwath().

+# Mar 13, 2013  1791     bsteffen  Implement bulk getGrids to improve

+#                                  performance.

+# Mar 13, 2013  1793     bsteffen  Performance improvements for TCMWindTool

+# Apr 24, 2013  1947     randerso  Fix UVToMagDir to work with scalar arguments

+#                                  Cleaned up some constants

+# Jun 21, 2013  14983    ryu       Fixed encodeEditArea() to evaluate query

+#                                  when necessary

+# Aug 14, 2013  1571     randerso  Fixed encodeEditArea() to return

+#                                  astype(numpy.bool8) so mask can be used with

+#                                  advanced indexing (e.g. grid[mask] = value)

+# Oct 07, 2013  2424     randerso  remove use of pytz

+# Oct 29, 2013  2476     njensen   Improved getting wx/discrete keys in 

+#                                  _getGridResults

+# Oct 31, 2013  2508     randerso  Change to use DiscreteGridSlice.getKeys()

+# Nov 07, 2013  2476     dgilling  Fix _getGridsResult() for retrieving 

+#                                  Wx/Discrete in First mode.

+# Dec 23, 2013  16893    ryu       Added unloadWEs() method (created by njensen)

+# Apr 29, 2014  3097     randerso  Fixed getGrids() to return non-scalar grids

+#                                  as tuples in all cases

+# Nov 26, 2014  633      zhao      Corrected a type error in loadParm() 

+# Dec 01, 2014  3875     randerso  Added gmTime() and localTime() functions

+#                                  which are exact equivalents to those in the

+#                                  python time module. Added getTimeZoneStr and

+#                                  getTzInfo which return the site's local time

+#                                  zone as a string or as an object respectively

+#                                  Fixed createTimeRange to correctly return

+#                                  time ranges relative to local time regardless

+#                                  of setting of os.environ['TZ']

+# Jan 13, 2015    3955   randerso  Added optional parameter to availableParms to

+#                                  specify desired databases.

+#                                  Fixed createGrid to accept a DatabaseID for

+#                                  model

+# Apr 23, 2015    4259   njensen   Updated for new JEP API

+# Jul 17, 2015    4575   njensen   callSmartTool() and callProcedure() send

+#                                  HashMap for varDict

+# Aug 13, 2015    4704   randerso  Added NumpyJavaEnforcer support in

+#                                  createGrids and decodeEditArea.

+#                                  Additional code cleanup

+# Aug 26, 2015    4809   randerso  Added option group parameter to

+#                                  editAreaList()

+# Aug 26, 2015    4804   dgilling  Added callTextFormatter().

+# Aug 27, 2015    4805   dgilling  Added saveCombinationsFile().

+# Aug 27, 2015    4806   dgilling  Added transmitTextProduct().

+# Sep 16, 2015    4871   randerso  Return modified varDict from called

+#                                  Tool/Procedure

+# Sep 11, 2015    4858   dgilling  Remove notification processing from

+#                                  publishElements.

+# Jan 20, 2016    4751   randerso  Fix type of mask returned from getComposite()

+#                                  to work with numpy 1.9.2

+# Jan 28, 2016    5129   dgilling  Support changes to IFPClient.

+# Feb 22, 2016    5374   randerso  Added support for sendWFOMessage

+# Apr 05, 2016    5539   randerso  Added exception when attempting create more

+#                                  than 256 Wx keys

+# May 06, 2016    18967  ryu       Fix issue of contours plotted over

+#                                  ProposedWatches grid when ViewWCL is run.

+# Aug 22, 2016    18605  ryu       Retrieve operational text product in test

+#                                  mode.

+# Sep 28, 2016    19293  randerso  Added loadCombinationsFile method. Moved

+#                                  CombinationsFileUtil to common.

+# Oct 31, 2016    5979   njensen   Cast to primitives for compatibility

+# Feb 06, 2017    5959   randerso  Removed Java .toString() calls 

+#

+########################################################################

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+import types, string, time, sys

+from math import *

+from numpy import *

+import os

+import numpy

+import math

+import re

+import jep

+import BaseTool, Exceptions

+import DatabaseID, TimeRange, AbsTime, ParmID

+import GridInfo

+import JUtil

+import NumpyJavaEnforcer

+

+from java.util import ArrayList

+from java.util import Date

+from java.nio import FloatBuffer

+

+from com.raytheon.uf.common.time import SimulatedTime

+from com.raytheon.uf.common.time import TimeRange as javaTimeRange

+from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DByte

+from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DFloat

+from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey

+from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKeyDef

+from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteDefinition

+from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherKey

+from com.raytheon.uf.common.dataplugin.gfe.db.objects import TimeConstraints

+from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridParmInfo

+GridType = GridParmInfo.GridType

+from com.raytheon.uf.common.dataplugin.gfe.server.request import SendISCRequest

+from com.raytheon.uf.common.dataplugin.gfe.textproduct import CombinationsFileUtil

+from com.raytheon.viz.gfe.dialogs.formatterlauncher import ConfigData

+ProductStateEnum = ConfigData.ProductStateEnum

+from com.raytheon.viz.gfe.textformatter import FormatterUtil

+from com.raytheon.viz.gfe.textformatter import TextProductFinishWaiter

+from com.raytheon.viz.gfe.textformatter import TextProductTransmitter

+

+

+class SmartScript(BaseTool.BaseTool):

+

+    def __init__(self, dataMgr):

+        BaseTool.BaseTool.__init__(self)

+        self.__dataMgr = dataMgr

+        self.__parmMgr = self.__dataMgr.getParmManager()

+        self.__refSetMgr = self.__dataMgr.getRefManager()

+        self.__mutableID = DatabaseID.DatabaseID(self.__parmMgr.getMutableDatabase())

+        self.__cycler = self.__dataMgr.getGridCycler()

+        self.__parmOp = self.__dataMgr.getParmOp()

+        # A cache of grids accessed by the derived class

+        #self.__pythonGrids = []

+        self.__accessTime = 0

+        self.__gridLoc = self.__parmMgr.compositeGridLocation()

+        self.__gridShape = (int(self.__gridLoc.getNy()), int(self.__gridLoc.getNx()))

+        self.__topoGrid = None

+        self.__toolType = "numeric"

+        self._empty = self.empty()

+        self._minus = self.newGrid(-1)

+        self._handlers = dict()

+

+

+    def empty(self, dtype=float32):

+        """Return a grid filled with 0"""

+        return zeros(self.getGridShape(), dtype)

+    

+    def newGrid(self, initialValue, dtype=float32):

+        """Return a grid filled with initialValue"""

+        return full(self.getGridShape(), initialValue, dtype)

+

+    ##

+    ## Call ProcessVariableList to obtain values from the user

+    ##

+    ## @param VariableList: list() of tuples describing the widgets to display

+    ##

+    ## @return dict() of values gathered from the widgets

+    ##

+    def getVariableListInputs(self, VariableList):

+        import ProcessVariableList

+        return ProcessVariableList.buildWidgetList(VariableList)

+

+

+    def mutableID(self):

+        # Returns the mutable database ID

+        return self.__mutableID

+

+    def getGridLoc(self):

+        return self.__gridLoc

+

+    def setToolType(self, toolType):

+        # Tool type is "point-based", "numeric", "parm-based"

+        # It is set when SmartScript is instantiated.

+        # For Procedures, it is set to the default of "point-based"

+        # So a procedure can override this by using this method.

+        self.__toolType = toolType

+

+    def editAreaList(self, eaGroup=None):

+        """ 

+        Returns a list of strings containing all edit areas in eaGroup.

+        If eaGroup is None, all known edit areas are returned.

+        """

+        eaList = []

+        if eaGroup is not None:

+            eans = self.__refSetMgr.getGroupData(eaGroup)

+            size = eans.size()

+            for i in range(size):

+                eaList.append(str(eans.get(i)))

+        else:

+            eans = self.__refSetMgr.getAvailableSets()

+            size = eans.size()

+            for i in range(size):

+                eaList.append(eans.get(i).getName())

+        return eaList

+

+    def getSite4ID(self, id3):

+        # Returns 4-letter site id, based on 3-letter site id

+        if id3 in ['SJU']:

+            return "TJSJ"

+        elif id3 in ['AFG', 'AJK', 'HFO', 'GUM']:

+            return "P" + id3

+        elif id3 in ['AER', 'ALU']:

+            return "PAFC"

+        else:

+            return "K" + id3

+

+

+    def loadedParms(self):

+        # Returns a list of tuples that are weather elements that are

+        # loaded.  The tuples are (element, level, model).  element and

+        # level are strings.  model is a DatabaseID.

+        allParms = self.__parmMgr.getAllParms()

+        retList = []

+        for p in allParms:

+            pid = p.getParmID()

+            dbid = DatabaseID.DatabaseID(pid.getDbId())

+            retList.append((pid.getParmName(), pid.getParmLevel(), dbid))

+        return retList

+

+    def availableParms(self, dbs=None):

+        # Returns a list of tuples that are weather elements that are

+        # available in the specified dbs.

+        # dbs may contain a list of DatabaseIDs or a single DatabaseID

+        # If dbs is None parms from all available databases are returned.   

+        # The tuples are (element, level, model).  

+        # element and level are strings, model is a DatabaseID.

+        retList = []

+

+        if dbs is None:

+            dbs = self.__parmMgr.getAvailableDbs()

+        elif type(dbs) is not list: # assume single db

+            db = dbs

+            

+            if isinstance(db, DatabaseID.DatabaseID):

+                db = db.toJavaObj()

+            else:

+                # assume java DatabaseID

+                pass

+            

+            dbs = ArrayList()

+            dbs.add(db)

+            

+        for i in range(dbs.size()):

+            d = dbs.get(i);

+            parms = self.__parmMgr.getAvailableParms(d)

+            for pid in parms:

+                dbid = DatabaseID.DatabaseID(pid.getDbId())

+                retList.append((pid.getParmName(), pid.getParmLevel(), dbid))

+        return retList

+

+    def selectedParms(self):

+        # Returns a list of tuples that are weather elements that are

+        # currently selected.  The tuples are (element, level, model).

+        # Element and level are string. model is a DatabaseID.

+        retList = []

+        parms = self.__parmMgr.getSelectedParms()

+        for p in parms:

+            parmid = p.getParmID()

+            javaDbId = parmid.getDbId()

+            dbid = None

+            if javaDbId is not None:

+                dbid = DatabaseID.DatabaseID(javaDbId)

+            retList.append((parmid.getParmName(), parmid.getParmLevel(),

+              dbid))

+

+        return retList

+

+    def loadParm(self, model, element, level, mostRecent=0):

+        # loads a parm and makes it visible.

+        parm = self.getParm(model, element, level, timeRange=None,

+          mostRecent=mostRecent)

+        if parm is not None:

+            self.__parmMgr.setParmDisplayable(parm, 1)

+        else:

+            raise TypeError("SmartScript loadParm: " + \

+              "couldn't load " + repr(model) + ' ' + repr(element) + ' ' + repr(level) + \

+              ' ' + str(mostRecent) + " (None is returned from getParm())" )

+    ##

+    # Get the list of timeranges locked by me in this weather element.

+    #

+    # @param weName: Weather element to look for locks on

+    # @type weName: String

+    # @param level: The level of the element to look for locks on

+    # @type level: String

+    # @return: The time ranges

+    # @rtype: Python list of Python TimeRanges

+    def lockedByMe(self, weName, level):

+        # returns list of time ranges locked by me in this weather element

+        # Uses the mutable database

+        parm = self.getParm(self.mutableID(), weName, level)

+        if parm is None:

+            return []

+        lt = parm.getLockTable()

+        jlbm = lt.lockedByMe()

+        # jlbm is a Java list of Java TimeRanges. Convert it to Python.

+        jlbmIter = jlbm.iterator()

+        lbm = []

+        while (jlbmIter.hasNext()):

+            jtr = next(jlbmIter)

+            tr = TimeRange.TimeRange(jtr)

+            lbm.append(tr)

+        return lbm

+

+    ##

+    # Get the list of timeranges locked by other users in this weather element.

+    #

+    # @param weName: Weather element to look for locks on

+    # @type weName: String

+    # @param level: The level of the element to look for locks on

+    # @type level: String

+    # @return: The time ranges

+    # @rtype: Python list of Python TimeRanges

+    def lockedByOther(self, weName, level):

+        # returns list of time ranges locked by others in this weather element

+        # Uses the mutable database

+        parm = self.getParm(self.mutableID(), weName, level)

+        if parm is None:

+            return []

+        lt = parm.getLockTable()

+        jlbo = lt.lockedByOther()

+        # jlbo is a Java list of Java TimeRanges. Convert it to Python.

+        jlboIter = jlbo.iterator()

+        lbo = []

+        while (jlboIter.hasNext()):

+            jtr = next(jlboIter)

+            tr = TimeRange.TimeRange(jtr)

+            lbo.append(tr)

+        return lbo

+

+    def forceLock(self, weName, level, startT, endT):

+        # forces locks in the given time range (startT to endT).

+        # startT, endT can either be ints/floats, or should be AbsTimes

+        # Returns 0 if not successful, 1 for okay.

+        if (type(startT) is int or type(startT) is float) \

+          and (type(endT) is int or type(endT) is float):

+            t1 = AbsTime.AbsTime(int(startT))

+            t2 = AbsTime.AbsTime(int(endT))

+            tr = TimeRange.TimeRange(t1, t2)

+        else:

+            tr = TimeRange.TimeRange(startT, endT)   #AbsTime

+        parm = self.getParm(self.mutableID(), weName, level)

+        if parm is None:

+            return 0

+        else:

+            return parm.forceLockTR(tr.toJavaObj())

+

+

+    def vtecActiveTable(self):

+        #returns the VTEC active table (or specified table)

+        import ActiveTableVtec

+        entries = self.__dataMgr.getActiveTable()

+        try:

+            return ActiveTableVtec.transformActiveTableToPython(entries)

+        except:

+            raise TypeError("SmartScript vtecActiveTable: could not convert to python objects.")

+

+

+    def gfeOperatingMode(self):

+        #returns the current operating mode of the GFE.

+        #Standard, PRACTICE, TEST

+        return self.__dataMgr.getOpMode().name()

+

+#------------------------------------------------------------------------

+# ISC control functions

+#------------------------------------------------------------------------

+

+    def enableISCsend(self, state):

+        #sets the overall isc send state.  If the send state is false, then

+        #no ISC grids can be transmitted.  To change the behavior

+        #when these programs (e.g., procedures) are run from the command line,

+        #you can enable/disable the send capability upon saving.  This

+        #command does not send grids, but sets the system state.  When

+        #saving grids and SendISCOnSave is set, or the manual Send ISC Dialog

+        #is used, then the grids will be sent.

+        self.__dataMgr.enableISCsend(state)

+

+    def clientISCSendStatus(self):

+        #returns the current state for sending isc from this program.  This

+        #depicts the state of whether this client has been enabled to send

+        #ISC via the SendISCOnSave or manual Send ISC Dialog.  The ifpServer

+        #still needs to be properly configured for sending to occur.

+        return self.__dataMgr.clientISCSendStatus()

+

+    def manualSendISC_autoMode(self):

+        #Simulates the use of the SendISCDialog.  Note if the ifpServer's

+        #SendISCOnSave is enabled, then this routine will fail as grids are

+        #sent when saved and the manual operation is not allowed.  The

+        #overall isc send state must also be True for this command to work.

+        req = ArrayList()

+        req.add(SendISCRequest())

+        self.__parmOp.sendISC(req)

+

+    def manualSendISC_manualMode(self, requests):

+        #simulates the use of the SendISCDialog.  Note if the ifpServers's

+        #SendISCOnSave is enabled, then this routine will fail as grids are

+        #sent when saved and the manual operation is not allowed.

+        #The requests are tuples of (parmName, parmLevel, timeRange). The

+        #TimeRange is an TimeRange() instance.  The overall isc

+        #send state must also be True for this command to work.

+        req = ArrayList()

+        for parmName, parmLevel, tr in requests:

+            pid = ParmID.ParmID(name=parmName, dbid=self.mutableID(), level=parmLevel).toJavaObj()

+            req.add(SendISCRequest(pid, tr.toJavaObj()))

+        self.__parmOp.sendISC(req)

+

+

+#########################################################################

+## Smart Tool methods                                                  ##

+#########################################################################

+

+        # Arguments

+        #   The following arguments are used throughout the

+        #   SmartScript Library methods

+        #

+        # self: When you call a method, use the "self" prefix (see

+        #       examples below)

+        # model: There are various ways to specify the database model

+        #       from which you want the values:

+        #    -- Simply "Fcst" or "Official" OR

+        #    -- siteID_type_model_modeltime

+        #       where the "type" is an empty string for Standard GFE data

+        #       and is "D2D" for D2D data.

+        #       Examples:

+        #         BOU__NAM12_Mar2912  :gets March 29 12Z NAM12 run created by GFE.

+        #         BOU_D2D_NAM12_Mar2912  :gets March 29 12Z original NAM12 run from D2D.

+        #       If you omit the "modeltime", the most recent model run will

+        #       be selected. For example:

+        #         BOU__NAM12 : gets the most recent NAM12 run created by GFE.

+        #         BOU_D2D_NAM12 : gets the most recent original NAM12 run from D2D.

+        #    -- the result of soliciting a model from the user using the

+        #       "model" or "D2D_model" type of VariableList entry. (See

+        #       examples above.)

+        #    -- you may also use a DatabaseID (see getDatabase, below)

+        #    -- simple string with no special characters (this will be

+        #       assumed to be a model created "on-the-fly"

+        # element: The element name in quotes:

+        #       e.g.  "QPF", "rh", "tp"

+        # level: The level in quotes:

+        #       e.g. "SFC", "MB350", "BL030"

+        # x, y: integer coordinates

+        # timeRange: Must be a special time range object such as

+        #   that passed in the argument list as GridTimeRange or a list of time

+        #   range objects. If it is a list than the return value will be a dict

+        #   where the time range objects are keys and the result of getGrids

+        #   for each time range is the value.

+        # mode: specifies how to handle the situation if multiple grids

+        #   are found within the given time range:

+        #   "TimeWtAverage": return time-weighted Average value

+        #   "Average" : return Average values

+        #   "Max" : return Max values

+        #   "Min" : return Min values

+        #   "Sum" : return Summed values

+        #   "First" : return values from grid with earliest time range

+        #   "List" : return list of grids (or values for getValue)

+        # noDataError: If 1, and there is no data, the Smart Tool will abort.

+        #   Otherwise, return None. None is a special variable in Python

+        #   which can be tested as follows:

+        #     PoP = self.getGrid("Fcst", "PoP", "SFC", GridTimeRange,

+        #           noDataError=0)

+        #     if PoP is None:

+        #         print "No data found for PoP"

+        # mostRecentModel: Applies only to model data. Will get the

+        #   most recent model and ignore any times (if included) in the

+        #   model argument.  (Note that if a time is not included in the

+        #   model argument, you will automatically get the most recent

+        #   model no matter how this argument is set.)

+

+    ###########################

+    ## Grid Access methods

+

+    def getGrids(self, model, element, level, timeRange,

+                 mode="TimeWtAverage",

+                 noDataError=1, mostRecentModel=0,

+                 cache=1):

+        # Get the value(s) for the given model, element, and level

+        #   at the x, y coordinate and over the given timeRange.

+        #

+        # The resulting grid values can be accessed as follows:

+        #   PoPGrid = self.getGrids("Fcst","PoP","SFC", GridTimeRange)

+        #   popValue = PoPGrid[x][y]

+        #

+        #  where x and y are integer grid coordinates.

+        #

+        # The argument descriptions are given above

+

+        if isinstance(model, DatabaseID.DatabaseID):

+            model = model.modelIdentifier()

+

+        timeRangeList = None

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        elif isinstance(timeRange, list):

+            timeRangeList = timeRange

+            timeRangeArray = jep.jarray(len(timeRangeList), javaTimeRange)

+            for i in range(len(timeRangeList)):

+                tr = timeRangeList[i]

+                if isinstance(tr, TimeRange.TimeRange):

+                    tr = tr.toJavaObj()

+                timeRangeArray[i] = tr

+            timeRange = timeRangeArray

+#        if cache:

+#            for cModel, cElement, cLevel, cMostRecent, cRange, cMode, cResult in \

+#                    self.__pythonGrids:

+#                if cModel == model and cElement == element and \

+#                       cLevel == level and cRange == timeRange \

+#                       and cMode == mode and cMostRecent == mostRecentModel:

+#                    return cResult

+

+        # Get the parm from parmMgr, find the corresponding result

+        exprName = self.getExprName(model, element, level, mostRecentModel)

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        if parm is None:

+            if noDataError == 1:

+                raise Exceptions.EditActionError(

+                    "NoData", "No Weather Element for " + exprName)

+            else:

+                return None

+        result = self.__cycler.getCorrespondingResult(parm, timeRange, mode)

+        if timeRangeList is not None:

+            retVal = {}

+            for i in range(len(timeRangeList)):

+                iresult = self._getGridsResult(timeRangeList[i], noDataError, mode, exprName, result[i])

+                retVal[timeRangeList[i]] = iresult

+            return retVal

+        else:

+            return self._getGridsResult(timeRange, noDataError, mode, exprName, result)

+

+    def _getGridsResult(self, timeRange, noDataError, mode, exprName, result):

+        retVal = None

+        if result is not None:

+            if len(result) == 0:

+                retVal = None

+            elif "List" == mode:

+                xlated = []

+                for rgrid in result:

+                    jxlgrid = rgrid.getGridSlice()                    

+                    xlgrid = jxlgrid.getNDArray()

+                    if type(xlgrid) is ndarray and xlgrid.dtype == numpy.int8:                    

+                        # discrete or weather

+                        keys = JUtil.javaObjToPyVal(jxlgrid.getKeyList())

+                        xlgrid = (xlgrid, keys)

+                    elif type(xlgrid) is not numpy.ndarray and len(xlgrid) == 2:

+                        # vector

+                        xlgrid = tuple(xlgrid)                    

+                    xlated.append(xlgrid)

+                retVal = xlated

+            else:

+                result = result[0];

+                slice = result.getGridSlice()

+                retVal = slice.getNDArray()

+                if type(retVal) is ndarray and retVal.dtype == numpy.int8:

+                    # discrete or weather

+                    keys = JUtil.javaObjToPyVal(slice.getKeyList())

+                    retVal = (retVal, keys)

+                elif type(retVal) is not numpy.ndarray and len(retVal) == 2:

+                    # vector

+                    retVal = tuple(retVal)

+

+        if retVal is None or retVal == []:

+            if noDataError == 1:

+                msg = "No corresponding grids for " + exprName + " " + str(timeRange)

+                raise UserWarning(msg)

+#        else:

+#            self.__pythonGrids.append((model, element, level, mostRecentModel,

+#                                       timeRange, mode, retVal))

+        return retVal

+

+

+    # Returns history info for the specified model, element, level and

+    # timerange.  ISC grids force this to be a list of lists [[]].

+    def getGridHistory(self, model, element, level, timeRange):

+        if isinstance(model, DatabaseID.DatabaseID):

+            model = model.modelIdentifier()

+        exprName = self.getExprName(model, element, level)

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        if parm is None:

+            raise Exceptions.EditActionError(

+                    "NoData", "getGridInfo: No Weather Element " + exprName)

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        grids = parm.getGridInventory(timeRange)

+        if len(grids) == 0:

+            return []

+        historyList = []

+        for grid in grids:

+            history = grid.getHistory()

+            histList = []

+            for h in history:

+                histList.append((str(h.getOrigin()),

+                                 ParmID.ParmID(jParmId=h.getOriginParm()),

+                                 TimeRange.TimeRange(h.getOriginTimeRange()),

+                                 AbsTime.AbsTime(h.getTimeModified()),

+                                 str(h.getWhoModified()),

+                                 AbsTime.AbsTime(h.getUpdateTime()),

+                                 AbsTime.AbsTime(h.getPublishTime())))

+

+            historyList.append(histList)

+

+        return historyList

+

+    def taperGrid(self, editArea, taperFactor=5):

+        # Returns a 2-D Grid of values between 0-1 about the

+        # given edit area.

+        # These values can be applied by smart tools to taper results.

+        # Argument:

+        #   editArea : must be of type AFPS.ReferenceData or None

+        #              (use editArea tool argument)

+        #   taperFactor: If set to zero, will do Full Taper

+        # Example:

+        #  def preProcessTool(self, editArea):

+        #     self._tGrid = self.taperGrid(editArea, 5)

+        #  def execute(self, variableElement):

+        #     return = variableElement + self._tGrid * 10.0

+        #

+        taperGrid = self.__refSetMgr.taperGrid(editArea, taperFactor)

+        taperGrid = taperGrid.getNDArray()        

+        return taperGrid

+

+    def directionTaperGrid(self, editArea, direction):

+        # Returns a 2-D Grid of values between 0-1 within the

+        # given edit area.

+        # E.g. if the Dir is W and x,y is half-way along the

+        #  W to E vector within the given edit area, the value of

+        #  directionTaperGrid at x,y will be .5

+        # These values can be applied by smart tools to show

+        #  spatial progress across an edit area.

+        # Argument:

+        #   editArea : must be of type AFPS.ReferenceData or None

+        #              (use editArea tool argument)

+        #   direction : 16 point text direction e.g. "NNW", "NW", etc.

+        # Example:

+        #  def preProcessTool(self, editArea):

+        #      self._spaceProgress = self.directionTaperGrid(editArea, "NW")

+        #  def execute(self, variableElement):

+        #      return variableElement * self._spaceProgress

+        #

+        taperGrid = self.__refSetMgr.directionTaperGrid(editArea, direction)

+        taperGrid = taperGrid.getNDArray()        

+        return taperGrid

+

+

+    def getComposite(self, WEname, GridTimeRange, exactMatch=1, onlyISC=0):

+        # Returns a composite grid consisting of the primary grid and any

+        # corresponding ISC grid, blended together based on the mask information

+        # derived from the Grid Data History. Primary grid must exist. Returns

+        # the set of points that are valid in the output grid. (Note the output

+        # grid consists of the primary grid and isc grid. Any "invalid" points,

+        # indicate those areas that have no isc data and are outside the home

+        # site's region.  The returned grid will have the primary data in

+        # the site's region.)

+        #

+        # A Python tuple is returned.

+        # For Scalar elements, the tuple contains:

+        #   a numeric grid of 1's and 0's where 1 indicates a valid point

+        #   a numeric grid of scalar values

+        # For Vector elements, the tuple contains:

+        #   a numeric grid of 1's and 0's where 1 indicates a valid point

+        #   a numeric grid of scalar values representing magnitude

+        #   a numeric grid of scalar values representing direction

+        # For Weather elements, the tuple contains:

+        #   a numeric grid of 1's and 0's where 1 indicates a valid point

+        #   a numeric grid of byte values representing the weather value

+        #   list of keys corresponding to the weather values

+        #

+        # For example:

+        #    isc = self.getComposite(WEname, GridTimeRange)

+        #    if isc is None:

+        #      self.noData()

+        #    # See if we are working with a Scalar or Vector element

+        #    wxType = variableElement_GridInfo.type()

+        #    if wxType == 0: # SCALAR

+        #         bits, values = isc

+        #    elif wxType == 1: # VECTOR

+        #         bits, mag, dir = isc

+

+

+        if onlyISC == 0:

+            exprName = self.getExprName("Fcst", WEname, "SFC")

+        else:

+            exprName = self.getExprName("ISC", WEname, "SFC")

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        if parm is None:

+            return None

+        seTime = AbsTime.AbsTime(self.__dataMgr.getSpatialDisplayManager().getSpatialEditorTime())

+        if GridTimeRange.contains(seTime):

+            gridTime = seTime

+        else:

+            gridTime = GridTimeRange.startTime()

+        from com.raytheon.viz.gfe.edittool import GridID

+        gid = GridID(parm, gridTime.javaDate())

+

+        wxType = self.__dataMgr.getClient().getPythonClient().getGridParmInfo(parm.getParmID()).getGridType()

+        if GridType.SCALAR.equals(wxType):

+            from com.raytheon.uf.common.dataplugin.gfe.slice import ScalarGridSlice

+            slice = ScalarGridSlice()

+            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)

+            args = (bits.getNDArray().astype(bool), slice.getScalarGrid().getNDArray())

+        elif GridType.VECTOR.equals(wxType):

+            from com.raytheon.uf.common.dataplugin.gfe.slice import VectorGridSlice

+            slice = VectorGridSlice()

+            bits = self.__dataMgr.getIscDataAccess().getVectorCompositeGrid(gid, exactMatch, slice)

+            args = (bits.getNDArray().astype(bool), slice.getMagGrid().getNDArray(), slice.getDirGrid().getNDArray())

+        elif GridType.WEATHER.equals(wxType):

+            from com.raytheon.uf.common.dataplugin.gfe.slice import WeatherGridSlice

+            slice = WeatherGridSlice()

+            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)

+            keys = []

+            for k in slice.getKeys():

+                keys.append(str(k))

+            args = (bits.getNDArray().astype(bool), slice.getWeatherGrid().getNDArray(), keys)

+        elif GridType.DISCRETE.equals(wxType):

+            from com.raytheon.uf.common.dataplugin.gfe.slice import DiscreteGridSlice

+            slice = DiscreteGridSlice()

+            bits = self.__dataMgr.getIscDataAccess().getCompositeGrid(gid, exactMatch, slice)

+            keys = []

+            for k in slice.getKeys():

+                keys.append(str(k))

+            args = (bits.getNDArray().astype(bool), slice.getDiscreteGrid().getNDArray(), keys)

+        return args

+

+    ##

+    # Return the GridInfo object for the given weather element and timeRange

+    # Example:

+    #    timeRange = self.getTimeRange("Today")

+    #    infoList = self.getGridInfo("Fcst", "T", "SFC", timeRange)

+    #    for info in infoList:

+    #        print "grid", info.gridTime()

+    #

+    # @param model: The model for which grid info is requested.

+    # @type model: DatabaseId or String

+    # @param element: The element for which grid info is requested.

+    # @type element: String

+    # @param level: The level for which grid info is requested.

+    # @type level: String

+    # @param timeRange: A time range over which grid info is requested.

+    # @type timeRange: com.raytheon.uf.common.time.TimeRange or TimeRange

+    # @param mostRecentModel: whether to use current time in request expr.

+    # @type mostRecentModel: integer or boolean

+    # @return: Java GridParmInfo object

+    def getGridInfo(self, model, element, level, timeRange,

+                    mostRecentModel=0):

+        if isinstance(model, DatabaseID.DatabaseID):

+            model = model.modelIdentifier()

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        parm = self.getParm(model, element, level, mostRecentModel)

+        if parm is None:

+            exprName = self.getExprName(model, element, level, mostRecentModel)

+            raise Exceptions.EditActionError(

+                    "NoData", "getGridInfo: No Weather Element " + exprName)

+        grids = parm.getGridInventory(timeRange)

+        if len(grids) == 0:

+            return []

+        gridParmInfo = parm.getGridInfo()

+        gridInfos = []

+        for grid in grids:

+            timeRange = grid.getGridTime()

+            gridInfo = GridInfo.GridInfo(gridParmInfo=gridParmInfo,

+                                         gridTime=timeRange)

+            gridInfos.append(gridInfo)

+        return gridInfos

+

+    ###########################

+    ## Sounding methods

+

+    # Numeric only

+    def makeNumericSounding(self, model, element, levels, timeRange,

+                     noDataError=1, mostRecentModel=0):

+        # Make a numeric sounding for the given model, element, and levels

+

+        # Example:

+        #  levels = ["MB850","MB800","MB750","MB700","MB650","MB600"]

+        #  gh_Cube, rh_Cube = self.makeNumericSounding(

+        #                         model, "rh", levels, GridTimeRange)

+        #

+        # Arguments:

+        #

+        # The "levels" argument is a Python list of levels INCREASING

+        #  in height.

+        # This method returns two numeric cubes:

+        #   ghCube of geopotential heights for the given levels

+        #   valueCube of values for the given levels

+

+        ghCube = []

+        valueCube = []

+        magCube = []

+        dirCube = []

+        for level in levels:

+

+            ghGrids = self.getGrids(model, "gh", level, timeRange,

+                                    noDataError=noDataError,

+                                    mostRecentModel=mostRecentModel)

+            if ghGrids is None:

+                return None

+

+            valueGrids = self.getGrids(model, element, level, timeRange,

+                                       noDataError=noDataError,

+                                       mostRecentModel=mostRecentModel)

+            if valueGrids is None:

+                return None

+

+            if type(ghGrids) == list:

+                ghGrid = ghGrids[0]

+            else:

+                ghGrid = ghGrids

+

+            if type(valueGrids) == list:

+                valueGrid = valueGrids[0]

+            else:

+                valueGrid = valueGrids

+

+            #jdynina ghCube = ghCube + [ghGrid]

+            ghCube.append(ghGrid)

+

+            if type(valueGrid) == tuple:

+                magCube = magCube + [valueGrid[0]]

+                dirCube = dirCube + [valueGrid[1]]

+            else:

+                valueCube = valueCube + [valueGrid]

+

+        ghCube = array(ghCube)

+        if len(magCube) > 0:

+            magCube = array(magCube)

+            dirCube = array(dirCube)

+            valueCube = (magCube, dirCube)

+        else:

+            valueCube = array(valueCube)

+        return (ghCube, valueCube)

+

+    # numeric only

+    def getNumericMeanValue(self, model, element, levels, timeRange,

+                     noDataError=1):

+        # Return a numeric array of mean values for the given element

+        #  between and including the given levels

+        if len(levels) < 1:

+            return self.errorReturn(

+                noDataError,

+                "SmartScript.getNumericMeanValue:: No Levels for Mean Value.")

+        elementType = "Scalar"

+        empty = self.getTopo() * 0.0

+        totalValue = empty

+        uSum = empty

+        vSum = empty

+        for level in levels:

+            value = self.getGrids(model, element, level, timeRange,

+                             noDataError=noDataError)

+            if type(value) == tuple:

+                elementType = "Vector"

+                uw, vw = self.MagDirToUV(value[0], value[1])

+                uSum = uSum + uw

+                vSum = vSum + vw

+            else:

+                totalValue = totalValue + value

+        # Compute the average

+        totCount = float(len(levels))

+        if elementType == "Scalar":

+            return totalValue / totCount

+        else:

+            u = uSum / totCount

+            v = vSum / totCount

+            mag, dir = self.UVToMagDir(u, v)

+            mag = int(mag + 0.5)

+            dir = int(dir + 0.5)

+            return (mag, dir)

+

+

+    ###########################

+    ## Conversion methods

+

+    def UVToMagDir(self, u, v):

+        RAD_TO_DEG = 180.0 / numpy.pi

+        # Sign change to make math to meteor. coords work

+        u = -u

+        v = -v

+        if type(u) is numpy.ndarray or type(v) is numpy.ndarray:

+            speed = numpy.sqrt(u * u + v * v)

+            dir = numpy.arctan2(u, v) * RAD_TO_DEG

+            dir[numpy.greater_equal(dir, 360)] -= 360

+            dir[numpy.less(dir, 0)] += 360

+        else:

+            speed = math.sqrt(u * u + v * v)

+            dir = math.atan2(u, v) * RAD_TO_DEG

+            while dir < 0.0:

+                dir = dir + 360.0

+            while dir >= 360.0:

+                dir = dir - 360.0

+        return (speed, dir)

+

+    def MagDirToUV(self, mag, dir):

+        DEG_TO_RAD = numpy.pi / 180.0

+        # Note sign change for components so math to meteor. coords works

+        uw = - sin(dir * DEG_TO_RAD) * mag

+        vw = - cos(dir * DEG_TO_RAD) * mag

+        return (uw, vw)

+

+    def convertMsecToKts(self, value_Msec):

+        # Convert from meters/sec to Kts

+        return value_Msec * 3600.0 / 1852.0

+

+    def convertKtoF(self, t_K):

+        # Convert the temperature from Kelvin to Fahrenheit

+        # Degrees Fahrenheit = (Degrees Kelvin - 273.15) / (5/9) + 32

+        t_F = (t_K - 273.15) * 9.0 / 5.0 + 32.0

+        return t_F

+

+    def KtoF(self, t_K):

+        return self.convertKtoF(t_K)

+

+    def convertFtoK(self, t_F):

+        # Convert the temperature from Kelvin to Fahrenheit

+        # Degrees Kelvin = (Degrees Fahrenheit - 32) * (5 / 9) + 273.15

+        t_K = (t_F - 32.0) * (5.0 / 9.0) + 273.15;

+        return t_K

+

+    def FtoK(self, t_F):

+        return self.convertFtoK(t_F)

+

+    def convertFtToM(self, value_Ft):

+        # Convert the value in Feet to Meters

+        return value_Ft * 0.3048

+

+#########################################################################

+## Error Handling                                                      ##

+#########################################################################

+

+    def abort(self, info):

+        # This call will send the info to the GFE status bar,

+        #  put up a dialog with the given info, and abort the

+        #  smart tool or procedure.

+        # Example:

+        #  self.abort("Error processing my tool")

+        #

+        raise TypeError(info)

+

+    def noData(self, info="Insufficient Data to run Tool"):

+        # Raise the NoData exception error

+        raise Exceptions.EditActionError("NoData", info)

+

+    def cancel(self):

+        # Cancels a smart tool without displaying an error message

+        raise Exceptions.EditActionError("Cancel", "Cancel")

+

+    def errorReturn(self, noDataError, message):

+        if noDataError == 1:

+            self.abort(message)

+        else:

+            return None

+

+    ##

+    # Sends the text message to the GFE status bar with the

+    #  given status code: "R" (regular), "S" (significant), "U" (urgent),

+    #  or "A" (alert)

+    # Example:

+    #  self.statusBarMsg("Running Smart Tool", "R")

+    #

+    # @param message: The message to send.

+    # @type message: string

+    # @param status: Importance of message. "A"=Alert, "R"=Regular, "U"=Urgent;

+    #                anything else=Significant

+    # @type status: string

+    # @param category: The message category. Defaults to "GFE".

+    # @type category: string

+    # @return: None

+    def statusBarMsg(self, message, status, category="GFE"):

+        from com.raytheon.uf.common.status import UFStatus

+        Priority = UFStatus.Priority

+

+        if "A" == status:

+            importance = Priority.PROBLEM

+        elif "R" == status:

+            importance = Priority.EVENTA

+        elif "U" == status:

+            importance = Priority.CRITICAL

+        else:

+            importance = Priority.SIGNIFICANT

+

+        if category not in self._handlers:

+            self._handlers[category] = UFStatus.getHandler("GFE", category, 'GFE')

+

+        self._handlers[category].handle(importance, message);

+

+   #########################

+    ##  Smart Commands

+    ##

+    ## These commands take some similar arguments:

+    ##   editArea : must be of type AFPS.ReferenceData or None

+    ##              (See getEditArea)

+    ##              If you specify None, the system will supply

+    ##              the active edit area from the GFE or from

+    ##              the editArea argument for runProcedure.

+    ##   timeRange: must be of type AFPS.TimeRange or None

+    ##              (See getTimeRange and createTimeRange)

+    ##              If you specify None, the system will supply

+    ##              the selected Time Range from the GFE or from

+    ##              the timeRange argument for runProcedure.

+    ##   varDict  : If you supply a varDict in this call, the

+    ##              variable list dialog will not be displayed

+    ##              when the tool is run.

+    ##              If you supply a varDict from a Procedure,

+    ##              make sure that the variables

+    ##              for all the tools called by the Procedure are

+    ##              supplied in your varDict.

+    ##   missingDataMode: Can be "Stop", "Skip", or "Create". If not

+    ##              included, will be set to the current GFE default.

+    ##   modal:     If 0, VariableList dialogs will appear with the

+    ##              non-modal "Run" and "Run/Dismiss" buttons.

+    ##              Otherwise, they will appear with the "Ok" button.

+    ##

+    ##  If editValues is true, the grid values are changed.

+    ##  FOR POINT-BASED TOOLS ONLY:

+    ##     If calcArea is true, a reference area is created and saved which

+    ##       shows discrepancies greater than the DiscrepancyValue between the current

+    ##       value and new value.

+    ##     If calcGrid is true, a scalar grid is created which shows the discrepancy

+    ##       amount between the current value and new value. (Not implemented.)

+    ##

+    ## These commands all return an error which will be None if no

+    ##   errors occurred.  Otherwise, the errorType and errorInfo

+    ##   can be accessed e.g. error.errorType() and error.errorInfo()

+    ## If "noData" has been called, the errorType will be "NoData" and

+    ##   can be tested by the calling tool or script.

+

+

+    def callSmartTool(self, toolName, elementName, editArea=None,

+                      timeRange=None, varDict=None,

+                      editValues=1, calcArea=0, calcGrid=0,

+                      passErrors=[],

+                      missingDataMode="",

+                      modal=1):

+        # passErrors:  a list of errors to ignore and pass back to the

+        #  calling program.  Some errors that can be ignored are:

+        #    NoData

+        #    NoElementToEdit

+        #    ExecuteOrClassError

+        #    LockedGridError

+        #

+        # For example:

+        #  In the Procedure:

+        #     error = self.callSmartTool(

+        #        "MyTool", "MixHgt", editArea, timeRange, varDict,

+        #        passErrors= ["NoData"])

+        #     if error is not None:

+        #        print "No Data available to run tool"

+        #

+        #  In the Smart Tool:

+        #     mixHgt = self.getGrids(model, "MixHgt", "SFC", timeRange)

+        #     if mixHgt is None:

+        #        self.noData()

+

+        if editArea is None or not editArea.getGrid().isAnyBitsSet():

+            editArea = self.__refSetMgr.fullRefSet()

+            emptyEditAreaFlag = True

+        else:

+            emptyEditAreaFlag = False

+            

+        javaDict = None

+        if varDict is not None:

+            javaDict = JUtil.pyValToJavaObj(varDict)

+

+        parm = self.getParm(self.__mutableID, elementName, "SFC")

+        if timeRange is None:

+            from com.raytheon.viz.gfe.core.parm import ParmState

+            timeRange = parm.getParmState().getSelectedTimeRange()

+        else:

+            timeRange = timeRange.toJavaObj()

+

+        from com.raytheon.viz.gfe.smarttool import SmartUtil

+        result, returnedDict = SmartUtil.callFromSmartScript(self.__dataMgr, toolName, elementName, editArea,

+                                            timeRange, javaDict, emptyEditAreaFlag,

+                                            JUtil.pylistToJavaStringList(passErrors),

+                                            missingDataMode, parm)

+

+        if varDict is not None and returnedDict:

+            returnedDict = JUtil.javaObjToPyVal(returnedDict)

+            varDict.clear()

+            varDict.update(returnedDict)

+

+        if result:

+            raise Exceptions.EditActionError(errorType="Error", errorInfo=str(result))

+        return None

+

+    def callProcedure(self, name, editArea=None, timeRange=None, varDict=None,

+                      missingDataMode="Stop",

+                      modal=1):

+        if editArea is None:

+            from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData

+            editArea = ReferenceData()

+        if timeRange is None:

+            from com.raytheon.uf.common.time import TimeRange as JavaTimeRange

+            timeRange = JavaTimeRange()

+        else:

+            timeRange = timeRange.toJavaObj()

+

+        javaDict=None

+        if varDict is not None:

+            javaDict = JUtil.pyValToJavaObj(varDict)

+

+        from com.raytheon.viz.gfe.procedures import ProcedureUtil

+        result, returnedDict = ProcedureUtil.callFromSmartScript(self.__dataMgr, name, editArea, timeRange, javaDict)

+        

+        if varDict is not None and returnedDict:

+            returnedDict = JUtil.javaObjToPyVal(returnedDict)

+            varDict.clear()

+            varDict.update(returnedDict)

+

+        # callSmartTool raises the exception put here it is returned.

+        if result:

+           return Exceptions.EditActionError(errorType="Error", errorInfo=str(result))

+        return None

+

+

+    ###########################

+    ## Creating On-the-fly elements

+

+    def createGrid(self, model, element, elementType, numericGrid, timeRange,

+                   descriptiveName=None, timeConstraints=None,

+                   precision=None, minAllowedValue=None,

+                   maxAllowedValue=None, units=None, rateParm=0,

+                   discreteKeys=None, discreteOverlap=None,

+                   discreteAuxDataLength=None, defaultColorTable=None):

+

+

+        # Creates a grid for the given model and element.

+        # If the model and element do not already exist, creates them on-the-fly

+        #

+        # The descriptiveName, timeConstraints, precision, minAllowedValue,

+        # maxAllowedValue, units, rateParm, discreteKeys, discreteOverlap,

+        # and discreteAuxDataLength only need to be

+        # specified for the first grid being created.  These

+        # values are ignored for subsequent calls to createGrid() for

+        # the same weather element.

+

+        # For new parms, the defaultColorTable is the one to be used for

+        # display.  If not specified and not in the gfe configuration file,

+        # a DEFAULT color table will be used.

+

+        # DISCRETE elements require a definition for discreteKeys,

+        # discreteAuxDataLength,  and discreteOverlap. For DISCRETE, the

+        # precision, minAllowedValue, maxAllowedValue, and rateParm

+        # are ignored.

+

+        # Note that this works for numeric grids only.

+        # The arguments exampleModel, exampleElement, and exampleLevel can be

+        # supplied so that the new element will have the same characteristics

+        # (units, precision, etc.) as the example element.

+        #

+        # model -- If you are creating an "on-the-fly" element (i.e. not

+        #          in the server), this should be a simple string with

+        #          with no special characters.  The site ID and other

+        #          information will be added for you.

+        #          If you are creating a grid for a model that exists

+        #          in the server, follow the guidelines for the model

+        #          argument described for the "getValue" command.

+        # element -- This should be a simple string with no special

+        #          characters.

+        # elementType -- "SCALAR", "VECTOR", "WEATHER", or "DISCRETE"

+        # numericGrid -- a Numeric Python grid

+        # timeRange -- valid time range for the grid.  You may want

+        #          to use the "createTimeRange" command

+        #

+        # The descriptiveName, timeConstraints, precision, minAllowedValue,

+        # precision, minAllowedValue, maxAllowedValue, and units can be

+        # used to define the GridParmInfo needed. Note that timeConstraints

+        # is not the C++ version, but a (startSec, repeatSec, durSec).

+        #

+        # Example:

+        #    self.createGrid("ISCDisc", WEname+"Disc", "SCALAR", maxDisc,

+        #                   GridTimeRange, descriptiveName=WEname+"Disc")

+        #

+        if string.find(element, "_") >= 0:

+            message = "SmartScript:createGrid --" + \

+                              "Illegal element name contains underscore. " + \

+                              "No special characters allowed. "

+            self.abort(message)

+        parm = self.getParm(model, element, "SFC")

+        if parm is None:

+            # Create a parm on-the-fly

+            # Parm ID

+            siteID = self.__dataMgr.getSiteID()

+            if model == "Fcst":

+                dbi = self.__mutableID

+            elif isinstance(model, DatabaseID.DatabaseID):

+                dbi = model

+            else:

+                dbi = DatabaseID.databaseID(siteID + "_GRID__" + model + "_00000000_0000")

+            pid = ParmID.ParmID(element, dbid=dbi).toJavaObj()

+            # Grid Parm Info set up to use a default at first

+            if elementType == "VECTOR":

+                example = self.getParm("Fcst", "Wind", "SFC")

+            elif elementType == "WEATHER":

+                example = self.getParm("Fcst", "Wx", "SFC")

+            elif elementType == "SCALAR":

+                example = self.getParm("Fcst", "T", "SFC")

+            elif elementType == "DISCRETE":

+                example = self.getParm("Fcst", "Hazards", "SFC")

+            else:

+                message = "SmartScript:createGrid -- illegal element type"

+                self.abort(message)

+

+            exampleGPI = None

+            if example is not None:

+                exampleGPI = example.getGridInfo()

+

+            #look for overrides

+            if descriptiveName is None:

+                descriptiveName = element

+            

+            if timeConstraints is None:

+                if exampleGPI is None:

+                    tc = TimeConstraints(0, 60, 60)

+                else:

+                    tc = exampleGPI.getTimeConstraints()

+            elif isinstance(timeConstraints, tuple):

+                # TC constructor (dur, repeat, start)

+                # TC tuple (start, repeat, dur)

+                tc = TimeConstraints(timeConstraints[2], timeConstraints[1],

+                                     timeConstraints[0])

+            else:

+                # Assume Java TimeConstraints or compatible

+                tc = TimeConstraints(

+                  timeConstraints.getDuration(), timeConstraints.getRepeatInterval(),

+                  timeConstraints.getStartTime())

+

+            if precision is None :

+                if exampleGPI is None:

+                    precision = 0

+                else:

+                    precision = exampleGPI.getPrecision()

+                    

+            if maxAllowedValue is None:

+                if exampleGPI is None:

+                    maxAllowedValue = nanmax(numericGrid)

+                else:

+                    maxAllowedValue = exampleGPI.getMaxValue()

+                    

+            if minAllowedValue is None:

+                if exampleGPI is None:

+                    minAllowedValue = nanmin(numericGrid)

+                else:

+                    minAllowedValue = exampleGPI.getMinValue()

+

+            if units is None:

+                if exampleGPI is None:

+                    units = "1" # unitless

+                else:

+                    units = exampleGPI.getUnitString()

+

+            if tc.anyConstraints() == 0:

+                timeIndependentParm = 1

+                timeRange = TimeRange.TimeRange.allTimes().toJavaObj()

+            else:

+                timeIndependentParm = 0

+

+            #create the new GridParmInfo

+            minAllowedValue = float(minAllowedValue)

+            maxAllowedValue = float(maxAllowedValue)

+            gpi = GridParmInfo(pid,

+                self.getGridLoc(), GridType.valueOf(elementType), units,

+                descriptiveName, minAllowedValue, maxAllowedValue,

+                precision, timeIndependentParm, tc, rateParm)

+

+            # if DISCRETE, deal with the key definitions

+            if elementType == "DISCRETE":

+                if discreteKeys is None or discreteOverlap is None or \

+                  discreteAuxDataLength is None:

+                    message = "SmartScript:createGrid --" + \

+                              "Discrete elements require discretekeys, " + \

+                              "discreteAuxDataLength, " + \

+                              "and discreteOverlap defined. "

+                    self.abort(message)

+                currDef = DiscreteKey.discreteDefinition(siteID)

+                keys = ArrayList()

+                for h in discreteKeys:

+                    if type(h) is tuple:

+                        kname, kdesc = h

+                    elif type(h) is bytes:

+                        kname = h

+                        kdesc = h

+                    keys.add(DiscreteKeyDef(kname, kdesc))

+                currDef.addDefinition(pid.getCompositeName(), discreteOverlap,

+                                             discreteAuxDataLength, keys)

+                DiscreteKey.setDiscreteDefinition(siteID, currDef)

+

+            #set a default color table if specified

+            if defaultColorTable is not None:

+                from com.raytheon.viz.gfe import Activator

+                prefName = element + "_defaultColorTable"

+                Activator.getDefault().getPreferenceStore().setValue(prefName, defaultColorTable)

+

+            #create the parm

+            parm = self.__parmMgr.createVirtualParm(pid, gpi, None, 1, 1)

+

+        # Create Java objects from numericGrid.

+        # Do this here because, while numericGrid can be sent straight to Java,

+        # the keys of discrete grids arrive as a single string, which must then

+        # be parsed. It's easier to create Java objects of the proper types here.

+        javaGrid = None

+        auxJavaGrid = None

+        javaOldKeys = None

+        if elementType == "DISCRETE" or elementType == "WEATHER":

+            ngZero = NumpyJavaEnforcer.checkdTypes(numericGrid[0], int8)

+            dimx = ngZero.shape[1]

+            dimy = ngZero.shape[0]

+            # Use createGrid() to get around Jep problems with 3-arg ctor.

+            javaGrid = Grid2DByte.createGrid(dimx, dimy, ngZero)

+            oldKeys = numericGrid[1]

+            javaOldKeys = ArrayList()

+            for oldKey in oldKeys:

+                # it seems stupid that we break apart tuples for discrete keys

+                # when modifying the DiscreteDefinition, but not here when

+                # creating the actual grid. It actually prevents the grid from

+                # being created because the string representation of the tuple

+                # won't match what we added to the DiscreteDefinition.

+                # However, this is exactly what AWIPS1 does...

+                # SEE GridCycler.C, line 1131

+                # FIXME: add oldKey[0] to the ArrayList for tuple types

+                javaOldKeys.add(str(oldKey))

+        elif elementType == "SCALAR":

+            numericGrid = NumpyJavaEnforcer.checkdTypes(numericGrid, float32)

+            javaGrid = Grid2DFloat.createGrid(numericGrid.shape[1], numericGrid.shape[0], numericGrid)

+        elif elementType == "VECTOR":

+            ngZero = NumpyJavaEnforcer.checkdTypes(numericGrid[0], float32)

+            ngOne = NumpyJavaEnforcer.checkdTypes(numericGrid[1], float32)

+            javaGrid = Grid2DFloat.createGrid(ngZero.shape[1], ngZero.shape[0], ngZero)

+            auxJavaGrid = Grid2DFloat.createGrid(ngOne.shape[1], ngOne.shape[0], ngOne)

+        else:

+            raise ValueError("Unknown elementType: %s" % elementType)

+

+        # Make sure we pass a java TimeRange to Java methods

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        gridData = self.__cycler.makeGridDataFromNumeric(parm, timeRange, javaGrid, auxJavaGrid, javaOldKeys)

+        parm.replaceGriddedData(timeRange, gridData)

+

+    ##

+    #

+    # @param model: Model name

+    # @type model: string

+    # @param element: Element name

+    # @type element: string

+    # @param level: Level name

+    # @type level: string

+    # @param timeRange: Time range of grid

+    # @type timeRange: Python or Java TimeRange

+    # @return: True if grids were deleted

+    def deleteGrid(self, model, element, level, timeRange):

+        # Deletes any grids for the given model and element

+        # completely contained in the given timeRange.

+        # If the model and element do not exist or if there are no existing grids,

+        #   no action is taken.

+        #

+        parm = self.getParm(model, element, level)

+        if parm is None:

+            returnVal = False

+        else:

+            if isinstance(timeRange, TimeRange.TimeRange):

+                timeRange = timeRange.toJavaObj()

+            returnVal = parm.deleteTR(timeRange)

+        return returnVal

+

+    def highlightGrids(self, model, element, level, timeRange, color, on=1):

+        # Highlight the grids in the given time range using designated

+        # color.  If "on" is 0, turn off the highlight.

+        parm = self.getParm(model, element, level)

+        from com.raytheon.viz.gfe.core.msgs import HighlightMsg

+

+        trs = jep.jarray(1, javaTimeRange)

+        trs[0] = timeRange.toJavaObj()

+        HighlightMsg(parm, trs, on, color).send()

+

+    def makeHeadlineGrid(self, headlineTable, fcstGrid, headlineGrid=None):

+        # This method defines a headline grid based on the specified data.

+        # The headlineTable parameter must be a list of tuples each containing

+        # the threshold for each headline category and headline label

+        # Example:

+        #    headlineTable =[(15.0, 'SW.Y'),

+        #                    (21.0, 'SC.Y'),

+        #                    (34.0, 'GL.W'),

+        #                    (47.0, 'SR.W'),

+        #                    (67.0, 'HF.W'),

+        #                    ]

+        # "fsctGrid" is the grid that defines what headline category should

+        # be assigned. "headlineGrid" is the grid you wish to combine with

+        # the calculated grid.  This forces a combine even if the GFE is not

+        # in combine mode.  Omitting "headlineGrid" will cause the calculated

+        # grid to replace whatever is in the GFE, no matter what the GFE's

+        # combine mode. Note that a side effect of omitting the headline grid

+        # is that the GFE will end up in replace mode after the tool completes.

+        noneKey = ""  # define the  key

+        # set the mode to replace so the tool always behaves the same

+

+        if headlineGrid is None: # make new headline grid components

+            headValues = zeros(fcstGrid.shape, int8)

+            headKeys = [noneKey]

+            self.setCombineMode("Replace") # force a replace in GFE

+        else:

+            headValues, headKeys = headlineGrid

+

+        # make sure the headlineTable is not empty

+        if len(headlineTable) <= 0:

+            self.statusBarMsg("HeadlineTable is empty", "S")

+            return headlineGrid

+

+        # make a list of (mask, key) for the new headlines

+        newHeadlines = []

+        for value, headline in headlineTable:

+            mask = greater_equal(fcstGrid, value)

+            if sometrue(mask):

+                newHeadlines.append((mask, headline))

+        # make the same list for old headlines

+        oldHeadlines = []

+        for i in range(len(headKeys)):

+            mask = equal(headValues, i)

+            if sometrue(mask):

+                oldHeadlines.append((mask, headKeys[i]))

+

+        # make combinations at every intersection

+        for newMask, newKey in newHeadlines:

+            for oldMask, oldKey in oldHeadlines:

+                overlap = logical_and(newMask, oldMask) # intersection

+                if sometrue(overlap): #  combined key needed

+                    if oldKey == newKey:

+                        continue

+                    if oldKey == noneKey:

+                        combinedKey = newKey

+                    else:

+                        combinedKey = oldKey + "^" + newKey

+                    # make sure the key is on the list

+                    if combinedKey not in headKeys:

+                        headKeys.append(combinedKey)

+                    index = self.getIndex(combinedKey, headKeys)

+                    headValues[overlap] = index

+

+        # return the new headlines grid

+        return (headValues, headKeys)

+

+

+    ######################

+    ##  Utility Commands

+

+    def findDatabase(self, databaseName, version=0):

+        # Return an AFPS.DatabaseID object.

+        #  databaseName can have the appended type. E.g. "NAM12" or "D2D_NAM12"

+        #  version is 0 (most recent), -1 (previous), -2, etc.

+        # E.g.

+        #    databaseID = self.findDatabase("NAM12",0)

+        # returns most recent NAM12 model

+        result = self.__parmMgr.findDatabase(databaseName, version)

+        if result is not None:

+            result = DatabaseID.DatabaseID(result)

+        return result

+

+    def getDatabase(self, databaseString):

+        # Return an AFPS.DatabaseID object.

+        #  databaseString is the result of a VariableList entry of type

+        #   "model" or "D2D_model"

+        dbID = DatabaseID.databaseID(databaseString)

+        return dbID

+

+    def getTimeRange(self, timeRangeName):

+        # Returns an AFPS.TimeRange object given a time range name

+        # as defined in the GFE

+        # E.g.

+        #   timeRange = self.getTimeRange("Today")

+        tr = self.__dataMgr.getSelectTimeRangeManager().getRange(timeRangeName).toTimeRange();

+        return TimeRange.TimeRange(tr)

+

+    def createTimeRange(self, startHour, endHour, mode="LT", dbID=None):

+        # Returns an AFPS.TimeRange object given by:

+        #    startHour, endHour

+        #       (range is startHour up to and not including endHour)

+        #       startHour and endHour are relative to midnight of the

+        #          current day either in Local or Zulu time (see below)

+        #    mode can be:

+        #    "LT" : the startHour and endHour are relative to local time

+        #    "Zulu": relative to Zulu time,

+        #    "Database": relative to a database (e.g. model time.

+        #      In this case, the databaseID for the model must

+        #      be supplied (see findDatabase)

+        #

+        # E.g.

+        #    timeRange = self.createTimeRange(0,121,"Zulu")

+        #    databaseID = self.findDatabase("NAM12")

+        #    timeRange = self.createTimeRange(120,241,"Database",databaseID)

+

+        if mode == "Database" and dbID is None:

+            raise TypeError("SmartScript createTimeRange: " + \

+                      "Must specify a database ID for mode=Database")

+

+        if mode == "LT":

+            localTime = time.mktime(self.localtime())

+            gmTime = time.mktime(self.gmtime())

+            localAbsTime = AbsTime.AbsTime(localTime)

+            delta = localTime - gmTime

+

+            todayMidnight = AbsTime.absTimeYMD(localAbsTime.year, localAbsTime.month,

+                                               localAbsTime.day)

+            start = todayMidnight + (startHour * 3600) - delta

+            end = todayMidnight + (endHour * 3600) - delta

+            return TimeRange.TimeRange(start, end)

+        elif mode == "Database" and dbID.toJavaObj().getModelTime() != "00000000_0000":

+            start = dbID.modelTime() + (startHour * 3600)

+            end = dbID.modelTime() + (endHour * 3600)

+            return TimeRange.TimeRange(start, end)

+        else:

+            currentTime = self.gmtime()

+            today = AbsTime.absTimeYMD(currentTime.tm_year, currentTime.tm_mon,

+                                       currentTime.tm_mday)

+            start = today + (startHour * 3600)

+            end = today + (endHour * 3600)

+            return TimeRange.TimeRange(start, end)

+

+    def getSamplePoints(self, sampleSetName=None):

+        # Return a list of x,y tuples representing sample points

+        # sampleSet is the name of a saved sample set

+        # if sampleSet is None, the sample points will be

+        #   those currently displayed on the GFE

+        points = []

+        sampleSet = self.__dataMgr.getSampleSetManager()

+        if sampleSetName is None:

+            locations = sampleSet.getLocations()

+        else:

+            locations = sampleSet.sampleSetLocations(sampleSetName)

+        for i in range(locations.size()):

+            xy = self.getGridLoc().gridCoordinate(locations.get(i))

+            points.append((xy.x, xy.y))

+        return points

+

+    def _timeDisplay(self, timeRange, LTorZulu, durFmt, startFmt, endFmt):

+        # Return a string display for the given timeRange, assumed to be

+        #  in GMT.

+        # If LTorZulu == "LT", the timeRange will be converted from GMT

+        #  to local time.

+        # durationFmt, startFmt, endFmt are format strings for the

+        #  timeRange duration, the start time and end time respectively.

+        # See Text Product User Guide to see possible formats.

+        #

+        # Example:

+        #   self._timeDisplay(timeRange, "LT", "%H hours ",

+        #                     "%a %b %d, %Y %I:%M %p",

+        #                    " to %a %b %d, %Y %I:%M %p %Z")

+        #

+        #   yields a string such as:

+        #

+        #  12 hours Mon Apr 23, 2001 06:00 AM to Mon Apr 23, 2001 06:00 PM MDT.

+        if LTorZulu == "LT":

+            # Convert to local time

+            timeRange = self._shiftedTimeRange(timeRange)

+        display = ""

+        if durFmt != "":

+            duration = timeRange.duration()

+            durHours = duration / 3600

+            durMinutes = duration / 3600 / 60

+            durStr = string.replace(durFmt, "%H", repr(durHours))

+            durStr = string.replace(durStr, "%M", repr(durMinutes))

+            display = display + durStr

+        if startFmt != "":

+            display = display + timeRange.startTime().stringFmt(startFmt)

+        if endFmt != "":

+            display = display + timeRange.endTime().stringFmt(endFmt)

+        if LTorZulu == "LT":

+            # Adjust time zone to local time

+            localTime = self.localtime()

+            zoneName = time.strftime("%Z", localTime)

+            display = string.replace(display, "GMT", zoneName)

+        return display

+

+    def _shiftedTimeRange(self, timeRange):

+        localTime, shift = self._determineTimeShift()

+        return TimeRange.TimeRange(timeRange.startTime() + shift,

+                              timeRange.endTime() + shift)

+

+    def _determineTimeShift(self):

+        ''' Get the current Simulated UTC time and convert it to the

+        Site Time Zone as AbsTime return this and the number of seconds the

+        Simulated UTC time was shifted to get local time

+        '''

+        ldt = self._localtime()

+        shift = int(ldt.utcoffset().total_seconds())

+        currentTime = AbsTime.absTimeYMD(ldt.year, ldt.month, ldt.day, ldt.hour, ldt.minute)

+        return currentTime, shift

+

+    def _localtime(self, date=None, tz=None):

+        ''' Assumes date (default is current Simulate Time) is a UTC time to convert

+            to the time zone tz (default is Site Time Zone).

+            returns datetime

+        '''

+        if tz is None:

+            tz = self.getTzInfo()

+

+        gmdt = self._gmtime(date)

+        tzdt = gmdt.astimezone(tz)

+        return tzdt

+

+    def _gmtime(self, date=None):

+        ''' This takes date (default current Simulated Time) and converts it to AbsTime

+        '''

+        if date is None:

+            date = SimulatedTime.getSystemTime().getTime()

+        return AbsTime.AbsTime(date)

+    

+    def gmtime(self, date=None):

+        ''' This takes date (default current Simulated Time) and converts it to AbsTime

+

+            This should be used instead of time.gmtime()

+        '''

+        return self._gmtime(date).utctimetuple()

+

+    def localtime(self, date=None):

+        ''' Assumes date (default is current Simulated Time) is a UTC time to convert

+            to the time zone of the local site.

+

+            This should be used instead of time.localtime()

+        '''

+        return self._localtime(date).timetuple()

+    

+    def getTimeZoneStr(self):

+        ''' Returns local time zone of the current site as a string

+        '''

+        return self.__gridLoc.getTimeZone()

+    

+    def getTzInfo(self, tzname=None):

+        ''' Returns time zone object compatible with datetime for the desired time zone. 

+            Defaults to local site's time zone if tzname not specified.

+        '''

+        if tzname is None:

+            tzname = self.getTimeZoneStr()

+        import dateutil.tz

+        return dateutil.tz.gettz(tzname)

+

+    def dayTime(self, timeRange, startHour=6, endHour=18):

+        # Return 1 if start of timeRange is between the

+        #  startHour and endHour, Return 0 otherwise.

+        # Assume timeRange is GMT and convert to local time.

+        shift = self.determineTimeShift()

+        startTime = timeRange.startTime() + shift

+        localStartHour = startTime.hour

+        if localStartHour >= startHour and localStartHour < endHour:

+            return 1

+        else:

+            return 0

+

+    def determineTimeShift(self):

+        loctime, shift = self._determineTimeShift()

+        return shift

+

+    def getEditArea(self, editAreaName):

+        # Returns an AFPS.ReferenceData object given an edit area name

+        # as defined in the GFE

+

+        # Example:

+        #    myArea = self.getEditArea("BOU")

+        #    self.callSmartTool("MyTool", "T", editArea=myArea, timeRange)

+        #

+        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID

+        refID = ReferenceID(editAreaName)

+        return self.__dataMgr.getRefManager().loadRefSet(refID)

+

+    def saveEditArea(self, editAreaName, refData):

+        # Saves the AFPS.ReferenceData object with the given name

+

+        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID

+        refID = ReferenceID(editAreaName)

+        refData = ReferenceData(refData.getGloc(), refID, refData.getGrid())

+        self.__dataMgr.getRefManager().saveRefSet(refData)

+

+    def setActiveEditArea(self, area):

+        # Set the AFPS.ReferenceData area to be the active one in the GFE

+        # Note: This will not take effect until AFTER the smart tool or

+        # procedure is finished executing.

+        self.__dataMgr.getRefManager().setActiveRefSet(area)

+

+    def getActiveEditArea(self):

+        # Get the AFPS.ReferenceData area for the active one in the GFE

+        return self.__dataMgr.getRefManager().getActiveRefSet()

+

+    def clearActiveEditArea(self):

+        # Clear the active edit area in the GFE

+        #area = AFPS.ReferenceData_default()

+        #self.__dataMgr.referenceSetMgr().setActiveRefSet(area)

+        self.__dataMgr.getRefManager().clearRefSet()

+

+    def setActiveElement(self, model, element, level, timeRange,

+                         colorTable=None, minMax=None, fitToData=0):

+        # Set the given element to the active one in the GFE

+        # A colorTable name may be given.

+        # A min/max range for the colorTable may be given.

+        # If fitToData = 1, the color table is fit to the data

+        #

+        # Example:

+        #    self.setActiveElement("ISCDisc", WEname+"Disc", "SFC", GridTimeRange,

+        #                          colorTable="Discrepancy", minMax=(-20,+20),

+        #                          fitToData=1)

+        #

+        parm = self.getParm(model, element, level)

+        spatialMgr = self.__dataMgr.getSpatialDisplayManager()

+        if minMax or colorTable:

+            rsc = spatialMgr.getResourcePair(parm).getResource()

+            from com.raytheon.uf.viz.core.rsc.capabilities import ColorMapCapability

+            params = rsc.getCapability(ColorMapCapability).getColorMapParameters()

+            if colorTable:

+                from com.raytheon.uf.viz.core.drawables import ColorMapLoader

+                colorMap = ColorMapLoader.loadColorMap(colorTable)

+                elemType = str(parm.getGridInfo().getGridType())

+                if ('DISCRETE' == elemType):

+                    from com.raytheon.viz.gfe.rsc import DiscreteDisplayUtil

+                    DiscreteDisplayUtil.deleteParmColorMap(parm)

+                params.setColorMap(colorMap)

+                params.setColorMapName(colorTable)

+                rsc.issueRefresh()

+            if minMax:

+                minVal, maxVal = minMax

+                if (minVal != maxVal):

+                    params.setColorMapMax(maxVal)

+                    params.setColorMapMin(minVal)

+            parm.getListeners().fireColorTableModified(parm)

+        if fitToData:

+            from com.raytheon.viz.gfe.rsc.colorbar import FitToData

+            fitter = FitToData(self.__dataMgr, parm)

+            fitter.fitToData()

+        spatialMgr.activateParm(parm)

+        spatialMgr.makeVisible(parm, True, True)

+        spatialMgr.setSpatialEditorTime(timeRange.startTime().javaDate())

+

+

+    def getActiveElement(self):

+        return self.__dataMgr.getSpatialDisplayManager().getActivatedParm()

+

+    def getGridCellSwath(self, editArea, cells):

+        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData

+        CoordinateType = ReferenceData.CoordinateType

+        # Returns an AFPS.ReferenceData swath of the given

+        # number of cells around the given an edit area.

+        # The edit area must not be a query.

+        if type(editArea) is bytes:

+            editArea = self.getEditArea(editArea)

+        grid2DB = None

+        multipolygon = editArea.getPolygons(CoordinateType.valueOf("GRID"))

+        numPolygons = multipolygon.getNumGeometries()

+        for n in range(numPolygons):

+            polygon = multipolygon.getGeometryN(n)

+            grid2DBit = self.getGridLoc().gridCellSwath(

+                polygon.getCoordinates(), float(cells), False)

+            if grid2DB is not None:

+                grid2DB = grid2DB.orEquals(grid2DBit)

+            else:

+                grid2DB = grid2DBit

+        return self.getGridLoc().convertToReferenceData(grid2DB)

+

+    def getLatLon(self, x, y):

+        # Get the latitude/longitude values for the given grid point

+        from com.vividsolutions.jts.geom import Coordinate

+        coords = Coordinate(float(x), float(y))

+        cc2D = self.getGridLoc().latLonCenter(coords)

+        return cc2D.y, cc2D.x

+

+    def getLatLonGrids(self):

+        gridLoc = self.getGridLoc()

+        latLonGrid = gridLoc.getLatLonGrid()

+

+        latLonGrid = numpy.reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F')

+        return latLonGrid[1], latLonGrid[0]

+

+

+    def getGridCell(self, lat, lon):

+        # Get the corresponding x,y values for the given lat/lon

+        # Return None, None if the lat/lon is outside the grid domain

+        cc2D = self.getGridLoc().gridCell(lat, lon)

+        gridSize = self.getGridLoc().gridSize()

+        if cc2D.x < 0 or cc2D.x >= gridSize.x or \

+           cc2D.y < 0 or cc2D.y >= gridSize.y:

+            return None, None

+        else:

+            return cc2D.x, cc2D.y

+

+    def getGrid2DBit(self, editArea):

+        # Returns a Grid of on/off values indicating whether

+        # or not the grid point is in the given edit area.

+        # This could be used as follows in a Smart Tool:

+        #  def preProcessGrid(self):

+        #     editArea = self.getEditArea("Area1")

+        #     self.__area1Bits = self.getGrid2DBit(editArea)

+        #     editArea = self.getEditArea("Area2")

+        #     self.__area2Bits = self.getGrid2DBit(editArea)

+        #

+        #  def execute(self, x, y):

+        #     if self.__area1Bits.get(x,y) == 1:

+        #        

+        #     elif self.__area2Bits.get(x,y) == 1:

+        #        

+        #

+        return editArea.getGrid()

+

+    def getGridTimes(self, model, element, level, startTime, hours):

+        # Return the timeRange and gridTimes for the number of hours

+        # FOLLOWING the given startTime

+        timeRange = TimeRange.TimeRange(startTime, startTime + hours * 3600)

+        parm = self.getParm(model, element, level, timeRange)

+        gridTimes = parm.getGridInfo().getTimeConstraints().constraintTimes(timeRange.toJavaObj())

+        pyList = []

+        for t in gridTimes:

+            pyList.append(TimeRange.TimeRange(t))

+        return timeRange, pyList

+

+    def getExprName(self, model, element, level="SFC", mostRecent=0):

+        # Return an expressionName for the element

+        # This method is complicated because it is handling all the

+        # variations for the "model" argument.  For a description

+        # of the variations, see the "getValue" documentation above.

+

+        siteID = self.__mutableID.siteID()

+        if type(model) is bytes:

+            modelStr = model

+        else:

+            # Must be a databaseID, so get model string

+            modelStr = model.modelName()

+        if element == "Topo" or modelStr == self.__mutableID.modelName():

+            exprName = element

+        elif modelStr == "Official":

+            dbType = self.__mutableID.type()

+            modelName = "Official"

+            exprName = element + "_" + level + "_" + siteID + "_" + dbType + "_" + modelName

+        else:

+            if type(model) is bytes:

+                if string.count(model, "_") == 5:

+                    # String as databaseID

+                    dbID = DatabaseID.databaseID(model)

+                elif string.find(model, "_") < 0:

+                    # Assume "on-the-fly" so need to prepend site

+                    exprName = element + "_" + level + "_" + siteID + "__" + model

+                    dbID = DatabaseID.databaseID_default()

+                else:

+                    # Assume model is site_type_modelName

+                    exprName = element + "_" + level + "_" + model

+                    dbID = DatabaseID.databaseID_default()

+            else:

+                # Assume it is already a dbID

+                dbID = model

+            if dbID.siteID() is not None and dbID.siteID() != "":

+                if str(dbID) == str(self.__mutableID):

+                    exprName = element

+                else:

+                    exprName = element + "_" + level + "_" + dbID.siteID() + "_" + \

+                               dbID.type() + "_" + dbID.modelName()

+                    if mostRecent == 0:

+                        if dbID.toJavaObj().getModelDate() is None:

+                            exprName = exprName + "_00000000_0000"

+                        else:

+                            exprName = exprName + "_" + dbID.modelTime().stringFmt(

+                                "%b%d%H")

+        return exprName

+

+    def getSiteID(self):

+        return self.__dataMgr.getSiteID()

+

+    def getModelName(self, databaseString):

+        # Return the model name.

+        #  databaseString is the result of a VariableList entry of type

+        #   "model" or "D2D_model"

+        dbID = DatabaseID.databaseID(databaseString)

+        return dbID.modelName()

+

+    def getD2Dmodel(self, model):

+        # Given a GFE Surface model, return the corresponding D2D model

+        if isinstance(model, DatabaseID.DatabaseID):

+            model = model.modelIdentifier()

+        d2d = string.replace(model, "__", "_D2D_")

+        return d2d

+

+    def getParm(self, model, element, level, timeRange=None, mostRecent=0):

+        # Returns the parm object for the given model, element, and level

+        exprName = self.getExprName(model, element, level, mostRecent)

+        #print "Expression Name", exprName

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        return parm

+

+    def getParmByExpr(self, exprName):

+        #print "Expression Name", exprName

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        return parm

+

+    ##

+    # @param elementNames: ignored

+    #

+    # @deprecated: Cacheing is controlled by the system.

+    def cacheElements(self, elementNames):

+        pass

+

+    ##

+    # Cacheing is controlled by the system. Users may still call this method

+    # to delete temporary parms in the parm manager.

+    #

+    # @param elementNames: ignored

+    def unCacheElements(self, elementNames):

+        self.__parmMgr.deleteTemporaryParms()

+

+    def loadWEGroup(self, groupName):

+        parmArray = self.__parmMgr.getAllAvailableParms();

+        parmIDs = self.__dataMgr.getWEGroupManager().getParmIDs(

+              groupName, parmArray)

+        # Load the group

+        self.__parmMgr.setDisplayedParms(parmIDs)

+

+    ##

+    # @param model: Database model name

+    # @type model: String

+    # @param element: Element name, i.e., "Hazards"

+    # @type element: String

+    # @param level: Parm level, i.e., "SFC"

+    # @type level: String

+    # @return: None

+    def unloadWE(self, model, element, level, mostRecent=0):

+        # unloads the WE from the GFE

+        exprName = self.getExprName(model, element, level, mostRecent)

+        parm = self.__parmMgr.getParmInExpr(exprName, 1)

+        if parm is None:

+            return

+        parmJA = jep.jarray(1, parm)

+        parmJA[0] = parm

+        self.__parmMgr.deleteParm(parmJA)

+

+    def unloadWEs(self, model, elementLevelPairs, mostRecent=0):

+        jparms = []

+        for element, level in elementLevelPairs:

+            exprName = self.getExprName(model, element, level, mostRecent)

+            parm = self.__parmMgr.getParmInExpr(exprName, 1)

+            if parm:

+                jparms.append(parm)

+        if jparms:

+            parmJA = jep.jarray(len(jparms), jparms[0])

+            for i in range(len(jparms)):

+                parmJA[i] = jparms[i]

+            self.__parmMgr.deleteParm(parmJA)

+

+    def saveElements(self, elementList):

+        # Save the given Fcst elements to the server

+        # Example:

+        #    self.saveElements(["T","Td"])

+        for element in elementList:

+            parm = self.getParm(self.mutableID(), element, "SFC")

+            parm.saveParameter(True)

+

+    def publishElements(self, elementList, timeRange):

+        # Publish the given Fcst elements to the server

+        # over the given time range.

+        # NOTE: This method is design to run from a procedure

+        # NOT a SmartTool!!!

+        # Example:

+        #    self.publishElements(["T","Td"], timeRange)

+        from com.raytheon.uf.common.dataplugin.gfe.server.request import CommitGridRequest

+        requests = ArrayList()

+        for element in elementList:

+            # get the inventory for this element from the server

+            parm = self.getParm("Fcst", element, "SFC")

+            recList = self.__dataMgr.getClient().getPythonClient().getGridInventory(parm.getParmID())

+            publishTimeRange = timeRange

+            if recList is not None:

+                recSize = recList.size()

+                for x in range(recSize):

+                    tr = TimeRange.TimeRange(recList.get(x))

+                    if tr.overlaps(timeRange):

+                        publishTimeRange = publishTimeRange.combineWith(tr)

+

+            cgr = CommitGridRequest(parm.getParmID(), publishTimeRange.toJavaObj())

+            requests.add(cgr)

+            self.__parmOp.publish(requests)

+

+    def combineMode(self):

+        from com.raytheon.viz.gfe.core.parm import ParmState

+        CombineMode = ParmState.CombineMode

+        mode = ParmState.getCurrentCombineMode()

+        if mode.equals(CombineMode.valueOf("COMBINE")):

+            return True

+        else:

+            return False

+

+    def setCombineMode(self, mode):

+        from com.raytheon.viz.gfe.core.parm import ParmState

+        CombineMode = ParmState.CombineMode

+        if mode == "Combine":

+            self.__parmOp.setCombineMode(CombineMode.valueOf("COMBINE"))

+        elif mode == "Replace":

+            self.__parmOp.setCombineMode(CombineMode.valueOf("REPLACE"))

+        else:

+            self.statusBarMsg("Invalid Weather Combine mode.", "S")

+            return None

+

+    def getVectorEditMode(self):

+        # Returns Vector Edit mode in the GFE

+        # mode:

+        #    "Magnitude Only"

+        #    "Direction Only"

+        #    "Both"

+        from com.raytheon.viz.gfe.core.parm import ParmState

+        VectorMode = ParmState.VectorMode

+        mode = ParmState.getCurrentVectorMode()

+        if mode.equals(VectorMode.valueOf("MAGNITUDE")):

+            return "Magnitude Only"

+        elif mode.equals(VectorMode.valueOf("DIRECTION")):

+            return "Direction Only"

+        elif mode.equals(VectorMode.valueOf("BOTH")):

+                return "Both"

+        return "None"

+

+    def setVectorEditMode(self, mode):

+        # Sets the Vector Edit mode in the GFE

+        # mode:

+        #    "Magnitude only"

+        #    "Direction only"

+        #    "Both"

+        from com.raytheon.viz.gfe.core.parm import ParmState

+        VectorMode = ParmState.VectorMode

+        if mode == "Magnitude Only":

+            self.__parmOp.setVectorMode(VectorMode.valueOf("MAGNITUDE"))

+        elif mode == "Direction Only":

+            self.__parmOp.setVectorMode(VectorMode.valueOf("DIRECTION"))

+        else:

+            self.__parmOp.setVectorMode(VectorMode.valueOf("BOTH"))

+

+    def getConfigItem(self, itemName, default=None):

+        # Return the configuration file value for "itemName"

+        #  If not found, return the default given

+        from com.raytheon.viz.gfe import Activator

+        prefs = Activator.getDefault().getPreferenceStore()

+        if prefs.contains(itemName):

+            if prefs.isString(itemName):

+                return str(prefs.getString(itemName))

+            elif prefs.isInt(itemName):

+                return prefs.getInt(itemName)

+            elif prefs.isFloat(itemName):

+                return prefs.getFloat(itemName)

+            elif prefs.isDouble(itemName):

+                return prefs.getDouble(itemName)

+            elif prefs.isLong(itemName):

+                return prefs.getLong(itemName)

+            elif prefs.isBoolean(itemName):

+                return prefs.getBoolean(itemName)

+            elif prefs.isStringArray(itemName):

+                pa = []

+                jsa = prefs.getStringArray(itemName)

+                for i in jsa:

+                    pa.append(str(i))

+                return pa

+            elif prefs.isFloatArray(itemName):

+                pa = []

+                jsa = prefs.getFloatArray(itemName)

+                for i in jsa:

+                    pa.append(float(i))

+                return pa

+            elif prefs.isIntArray(itemName):

+                pa = []

+                jsa = prefs.getIntArray(itemName)

+                for i in jsa:

+                    pa.append(int(i))

+                return pa

+            else:

+                return default

+        else:

+            return default

+

+    def esat(self, temp):

+        return exp(26.660820 - 0.0091379024 * temp - 6106.3960 / temp)

+

+    ##

+    # Get the discrete keys for elementName.

+    #

+    # @param elementName: Name of an element.

+    # @type elementName: string

+    # @return: the keys for the element

+    # @rtype: list of strings

+    def getDiscreteKeys(self, elementName):

+        parm = self.getParm("Fcst", elementName, "SFC")

+        keyList = parm.getGridInfo().getDiscreteKeys()

+        keyList = JUtil.javaStringListToPylist(keyList)

+        return keyList

+

+#########################################################################

+## Numeric Python methods                                              ##

+#########################################################################

+

+    def getTopo(self):

+        # Return the numeric topo grid

+        if self.__topoGrid is None:

+            topo = self.__parmMgr.getParmInExpr("Topo", True)

+            self.__topoGrid = self.__cycler.getCorrespondingResult(

+                                topo, TimeRange.allTimes().toJavaObj(), "TimeWtAverage")[0]

+            self.__topoGrid = self.__topoGrid.getGridSlice().getNDArray()

+        return self.__topoGrid

+

+    def wxMask(self, wx, query, isreg=0):

+        # Returns a numeric mask i.e. a grid of 0's and 1's

+        #  where the value is 1 if the given query succeeds

+        # Arguments:

+        #  wx -- a 2-tuple:

+        #    wxValues : numerical grid of byte values

+        #    keys : list of "ugly strings" where the index of

+        #      the ugly string corresponds to the byte value in

+        #      the wxValues grid.

+        #  query -- a text string representing a query

+        #  isreg -- if 1, the query is treated as a regular expression

+        #           otherwise as a literal string

+        # Examples:

+        #  # Here we want to treat the query as a regular expression

+        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)

+        #  # Here we want to treat the query as a literal

+        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)

+        #

+        rv = self.empty(bool)

+        if not isreg:

+            for i in range(len(wx[1])):

+                #if fnmatch.fnmatchcase(wx[1][i], query):

+                if string.find(wx[1][i], query) >= 0:

+                    rv[equal(wx[0], i)] = True

+        else:

+            r = re.compile(query)

+            for i in range(len(wx[1])):

+                m = r.search(wx[1][i])

+                if m is not None:

+                    rv[equal(wx[0], i)] = True

+        return rv

+

+        # Returns a numeric mask i.e. a grid of 0's and 1's

+        #  where the value is 1 if the given query succeeds

+        # Arguments:

+        #  Discrete -- a 2-tuple:

+        #    grid : numerical grid of byte values

+        #    keys : list of "ugly strings" where the index of

+        #      the ugly string corresponds to the byte value in

+        #      the wxValues grid.

+        #  query -- a text string representing a query

+        #  isreg -- if 1, the query is treated as a regular expression

+        #           otherwise as a literal string

+        # Examples:

+        #  # Here we want to treat the query as a regular expression

+        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)

+        #  # Here we want to treat the query as a literal

+        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)

+    discreteMask = wxMask

+

+    ##

+    # Sort the subkeys of uglyStr alphabetically.

+    #

+    # @param uglyStr: A key with "^"s separating subkeys

+    # @type uglyStr: string

+    # @return: uglyStr with alpha sorted subkeys.

+    # @rtype: string

+    def sortUglyStr(self, uglyStr):

+        parts = uglyStr.split("^")

+        if len(parts) < 2:

+            return uglyStr

+

+        # do the sort

+        parts.sort()

+

+        sortedStr = "^".join(parts)

+

+        return sortedStr

+

+    ##

+    # Get the index of uglyStr within keys.

+    # This routine compares normalized (sorted) versions of uglyStr and

+    # keys to be sure that equivalent hazards are assigned the same grid

+    # index.

+    # When a matching key is not in keys, uglyStr will be added to keys

+    # and the index of the new entry will be returned.

+    #

+    # @param uglyStr: A hazard key

+    # @type uglyStr: string

+    # @param keys: Existing hazard keys

+    # @type keys: list

+    # @return: The index of a key equivalent to uglyStr in keys

+    # @rtype: int

+    def getIndex(self, uglyStr, keys):

+        # Returns the byte value that corresponds to the

+        #   given ugly string. It will add a new key if a new ugly

+        #   string is requested.

+        # Arguments:

+        #   uglyStr: a string representing a weather value

+        #   keys: a list of ugly strings.

+        #     A Wx argument represents a 2-tuple:

+        #       wxValues : numerical grid of byte values

+        #       keys : list of "ugly strings" where the index of

+        #        the ugly string corresponds to the byte value in the wxValues grid.

+        #     For example, if our keys are:

+        #       "Sct:RW:-::"

+        #       "Chc:T:-::"

+        #       "Chc:SW:-::"

+        #    Then, the wxValues grid will have byte values of 0 where

+        #    there is "Sct:RW:-::", 1 where there is "Chc:T:-::"

+        #    and 2 where there is "Chc:SW:-::"

+        #

+        #  The ugly strings are also used by DISCRETE.  The keys are

+        #  separated by '^' for the subkeys.

+        #  18 Nov 2005 - tl

+        #  Added sorting to ugly strings to prevent duplicate keys

+        #  Duplicate keys causes a bug when generating hazards grids.

+

+        sortedUglyStr = self.sortUglyStr(uglyStr)

+        for str in keys:

+            if sortedUglyStr == self.sortUglyStr(str):

+                return keys.index(str)

+        

+        if len(keys) >= 256:

+            raise IndexError("Attempt to create more than 256 Wx keys")

+        

+        keys.append(uglyStr)

+        return len(keys) - 1

+

+    ##

+    # Returns a Numeric Python mask for the edit area

+    # "editArea" can be a named area or a ReferenceData object

+    # @param editArea: An edit area to obtain a mask for

+    # @type editArea: String or referenceArea

+    # @return: grid for the edit area

+    # @rtype: numpy array of int8

+    def encodeEditArea(self, editArea):

+        # Returns a Numeric Python mask for the edit area

+        # "editArea" can be a named area or a referenceData object

+        if type(editArea) is bytes:

+            editArea = self.getEditArea(editArea)

+

+        if editArea.isQuery():

+            editArea = self.__refSetMgr.evaluateQuery(editArea.getQuery())

+

+        return editArea.getGrid().getNDArray().astype(bool)

+

+    def decodeEditArea(self, mask):

+        # Returns a refData object for the given mask

+        from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit

+        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID

+        gridLoc = self.getGridLoc()

+        nx = int(gridLoc.getNx())

+        ny = int(gridLoc.getNy())

+        

+        # force mask to boolean if it's not

+        mask = NumpyJavaEnforcer.checkdTypes(mask, bool)

+        

+        # convert boolean mask to bytes for Grid2DBit        

+        bytes = mask.astype(int8)

+        grid = Grid2DBit.createBitGrid(nx, ny, bytes)

+        return ReferenceData(gridLoc, ReferenceID("test"), grid)

+

+

+    def getindicies(self, o, l):

+        if o > 0:

+            a = slice(o, l); b = slice(0, l - o)

+        elif o < 0:

+            a = slice(0, l + o); b = slice(- o, l)

+        else:

+            a = slice(0, l); b = slice(0, l)

+        return a, b

+

+    def offset(self, a, x, y):

+        # Gives an offset grid for array, a, by x and y points

+        sy1, sy2 = self.getindicies(y, a.shape[0])

+        sx1, sx2 = self.getindicies(x, a.shape[1])        

+        b = zeros_like(a)

+        b[sy1, sx1] = a[sy2, sx2]

+        return b

+

+    def agradient(self, a):

+        # Gives offset grids in the "forward" x and "up" y directions

+        dx = a - self.offset(a, 1, 0)

+        dy = a - self.offset(a, 0, - 1)

+        return dx, dy

+

+    def diff2(self, x, n=1, axis= - 1):

+        """diff2(x,n=1,axis=-1) calculates the first-order, discrete

+        center difference approximation to the derivative along the axis

+        specified. array edges are padded with adjacent values.

+        """

+        a = asarray(x)

+        nd = len(a.shape)

+        slice1 = [slice(None)] * nd

+        slice2 = [slice(None)] * nd

+        slice1[axis] = slice(2, None)

+        slice2[axis] = slice(None, - 2)

+        tmp = a[slice1] - a[slice2]

+        rval = zeros_like(a)

+        slice3 = [slice(None)] * nd

+        slice3[axis] = slice(1, - 1)

+        rval[slice3] = tmp

+        slice4 = [slice(None)] * nd

+        slice4[axis] = slice(0, 1)

+        rval[slice4] = tmp[slice4]

+        slice5 = [slice(None)] * nd

+        slice5[axis] = slice(- 1, None)

+        rval[slice5] = tmp[slice5]

+        if n > 1:

+            return diff2(rval, n - 1)

+        else:

+            return rval

+

+    ##

+    # Get the grid shape from the GridLocation stored in the parm manager.

+    #

+    # @return: The number of data points in the X and Y directions.

+    # @rtype: 2-tuple of int

+    def getGridShape(self):

+        return self.__gridShape

+

+#########################################################################

+## Procedure methods                                                   ##

+#########################################################################

+

+    # These commands always apply to the mutable model only.

+    # NOTE:  Most of these commands are duplicated with "old" and

+    #  "recommended" versions which end in "Cmd".  For example, "copy"

+    #  is the "old" version and will eventually not be supported

+    #  while the recommended version is "copyCmd".

+

+    # Command Arguments:

+    # name1, name2, name3     is a list of the weather element names

+    # startHour     is the starting hour for the command offset from modelbase

+    # endHour       is the ending hour for the command offset from modelbase.

+    #               The ending hour is NOT included in the processing of the

+    #               command.

+    # modelbase     is the name of the model to be used to determine base times

+    #               Note that if this is "", then 0000z from today will be

+    #               used for the base time.

+    # modelsource   is the name of the model to be used in the copy command

+    # copyOnly      is 0 for move and 1 for copy only in the time shift command

+    # hoursToShift  is the number of hours to shift the data in time

+    #                shift command

+    # databaseID    must be of type AFPS.DatabaseID

+    #            Can be obtained in various ways:

+    #            --By calling findDatabase (see below)

+    #            --By calling getDatabase (see below) with the result

+    #              of a VariableList entry of type "model" or "D2D_model"

+    # timeRange    must be of type AFPS.TimeRange.

+    #            Can be obtained in various ways:

+    #            --As an argument passed into Smart Tool or Procedure,

+    #            --By calling getTimeRange (see below)

+    #            --By calling createTimeRange (see below)

+

+    # List of available commands:

+    # copyCmd(['name1', 'name2', 'name3'], databaseID, timeRange)

+    #    Copies all grids for each weather element from the given database

+    #    into the weather element in the mutable database that overlaps

+    #    the time range.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.copyCmd(['T', 'Wind'], databaseID, timeRange)

+    #    will copy the Temperature and Wind fields analysis through 48 hours

+    #    from the latest NAM12 and place them into the forecast.

+    # copyToCmd([('srcName1', 'dstName1'),

+    #            ('srcName2', 'dstName2')], databaseID, timeRange)

+    #    Copies all grids for each weather element from the given database

+    #    into the weather element in the mutable database that overlaps

+    #    the time range.  The source name and destination name are both

+    #    supplied.  This allows for copying data with different names

+    #    (The units must match).

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.copyToCmd([('MaxT', 'T'), ('T', 'MinT')], databaseID,

+    #         timeRange)

+    #    will copy the Max Temperature into T and T into MinT.

+    #    from the latest NAM12 and place them into the forecast.

+    #

+    # deleteCmd(['name1', 'name2', 'name3'], timeRange)

+    #    Deletes all grids that overlap the input time range for element

+    #    in the mutable database.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.deleteCmd(['T', 'Wind'], timeRange)

+    #    will delete the Temperature and Wind fields analysis up to

+    #    but not including 48 hours relative to the start time of

+    #    the latest NAM12 model.

+    #

+    # zeroCmd(['name1', 'name2', 'name3'], timeRange)

+    #    Assigns the minimum possible value for scalar and vector, and ""

+    #    for weather for the parameter in the mutable database for all grids

+    #    that overlap the specified time range.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.zeroCmd(['T', 'Wind'], databaseID, timeRange)

+    #    will zero the Temperature and Wind grids through 48 hours

+    #    relative to the start time of the latest NAM12 model.

+    #

+    # interpolateCmd(['name1', 'name2', 'name3'], timeRange,

+    #       interpMode="GAPS", interpState="SYNC", interval=0, duration=0)

+    #    Interpolates data in the forecast for the named weather elements

+    #    for the given timeRange.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.interpolateCmd(['T', 'Wind'], timeRange, "GAPS","SYNC")

+    #    will interpolate the Temperature and Wind grids up to but

+    #    but not including 48 hours relative to the start time of

+    #the latest NAM12 model.

+    #    The interpolation will run in SYNC mode i.e. completing before

+    #    continuing with the procedure.

+    #

+    # createFromScratchCmd(['name1', 'name2'], timeRange, repeat, duration)

+    #    Creates one or more grids from scratch over the given timeRange

+    #    and assigns the default (minimum possible value for scalar

+    #    and vector, "" for weather).

+    #    The repeat interval and duration (both specified in hours) are

+    #    used to control the number of grids created.  If 0 is specified for

+    #    either one, than only 1 grid is created for the given time range.  If

+    #    valid numbers for duration and repeat are given, then grids will

+    #    be created every "repeat" hours and they will have a duration

+    #    of "duration" hours.  If there is not enough room remaining to create

+    #    a grid with the full duration, then no grid will be created in the space

+    #    remaining.  If you don't get the desired results, be sure that your input

+    #    time range starts on a valid time constraint for the element.  If the

+    #    element's time constraints (not the values supplied in this routine) contains

+    #    gaps (i.e., duration != repeatInterval), then the repeat interval and

+    #    duration will be ignored and grids will be created for each possible

+    #    constraint time.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.createFromScratchCmd(['T', 'Wind'], timeRange, 3, 1)

+    #    will create the 1-hour Temperature grids through 48 hours at

+    #    3 hour intervals relative to the start time of the latest NAM12 model.

+    #

+    # timeShiftCmd(['name1', 'name2'], copyOnly, shiftAmount, timeRange)

+    #    Performs a time shift by the shiftAmount for all elements that

+    #    overlap the time range.

+    #    Example:

+    #       databaseID = self.findDatabase("NAM12") # Most recent NAM12 model

+    #       timeRange = self.createTimeRange(0, 49, "Database", databaseID)

+    #       self.timeShiftCmd(['T', 'Wind'], 1, 3, timeRange)

+    #

+    # splitCmd(elements, timeRange)

+    #    Splits any grid that falls on the start time or ending time of the

+    #    specified time range for the given parameter in the mutable database.

+    #

+    # fragmentCmd(elements, timeRange)

+    #    Fragments any grids that overlap the input time range for the parm

+    #    identified in the mutable database.

+    #

+    # assignValueCmd(elements, timeRange, value)

+    #    Assigns the specified value to all grids points for the grids that

+    #    overlap the specified time range, for the weather element in the mutable

+    #    database specified.

+    #   value is:

+    #    an Integer or Float for SCALAR

+    #    a magnitude-direction tuple for VECTOR:  e.g. (55,120)

+    #    a text string for Weather which can be obtained via the

+    #      WxMethods WxString method

+    #  Example:

+    #    # Scalar

+    #    value = 60

+    #    self.assignValue(["T","Td"], 0, 12, 'NAM12', value)

+    #    # Vector

+    #    value = (15, 120)

+    #    self.assignValue(["Wind"], 0, 12, 'NAM12', value)

+    #    # Weather

+    #    from WxMethods import *

+    #    value = WxString("Sct RW")

+    #    self.assignValue(["Wx"], 0, 12, 'NAM12', value)

+

+    # Example: Copy RAP40 0-12, NAM12 13-48, GFS80 49-72 for T, Wx, and Wind,

+    # and then interpolate from hours 0 - 24.

+    #

+    #

+    #       self.copy(['T','Wx', 'Wind'], 0, 12, 'RAP40')

+    #       self.copy(['T','Wx', 'Wind'], 13, 48, 'NAM12')

+    #       self.copy(['T','Wx', 'Wind'], 49, 72, 'GFS80')

+    #       self.interpolate(['T','Wx', 'Wind'], 0, 24, 'RAP40')

+

+    def copyCmd(self, elements, databaseID, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        if isinstance(databaseID, DatabaseID.DatabaseID):

+            databaseID = databaseID.toJavaObj()

+        for element in elements:

+            self.__parmOp.copyCmd(element, databaseID, timeRange)

+

+    def copyToCmd(self, elements, databaseID, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        if isinstance(databaseID, DatabaseID.DatabaseID):

+            databaseID = databaseID.toJavaObj()

+        for src, dst in elements:

+            self.__parmOp.copyToCmd(src, dst, databaseID, timeRange)

+

+    def deleteCmd(self, elements, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        for element in elements:

+            self.__parmOp.deleteCmd(element, timeRange)

+

+    def zeroCmd(self, elements, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        for element in elements:

+            self.__parmOp.zeroCmd(element, timeRange)

+

+    def interpolateCmd(self, elements, timeRange,

+                    interpMode="GAPS", interpState="SYNC", interval=0,

+                    duration=0):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        # Convert interval to seconds

+        interval = interval * 3600

+        for element in elements:

+            self.__parmOp.interpolateCmd(element, timeRange,

+                                         interpMode, interpState, interval,

+                                         duration)

+

+    def createFromScratchCmd(self, elements, timeRange, repeat=0, duration=0):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        # Convert repeat and duration to seconds

+        repeat = repeat * 3600

+        duration = duration * 3600

+        for element in elements:

+            self.__parmOp.createFromScratchCmd(element, timeRange, repeat, duration)

+

+    def timeShiftCmd(self, elements, copyOnly, shiftAmount, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        shiftAmount = shiftAmount * 3600

+        for element in elements:

+            self.__parmOp.timeShiftCmd(element, timeRange, copyOnly,

+                                       shiftAmount)

+

+    def splitCmd(self, elements, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        for element in elements:

+            self.__parmOp.splitCmd(element, timeRange)

+

+    def fragmentCmd(self, elements, timeRange):

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        for element in elements:

+            self.__parmOp.fragmentCmd(element, timeRange)

+

+    def assignValueCmd(self, elements, timeRange, value):

+        from com.raytheon.viz.gfe.core.wxvalue import ScalarWxValue, VectorWxValue, WeatherWxValue

+        if isinstance(timeRange, TimeRange.TimeRange):

+            timeRange = timeRange.toJavaObj()

+        for element in elements:

+            parm = self.__parmMgr.getParmInExpr(element, 1)

+            if type(value) == tuple:

+                newvalue = VectorWxValue(float(value[0]), float(value[1]), parm)

+            elif type(value) == bytes:

+                newvalue = WeatherKey(value)

+                newvalue = WeatherWxValue(newvalue, parm)

+            else:

+                newvalue = ScalarWxValue(float(value), parm)

+            self.__parmOp.assignValueCmd(element, timeRange, newvalue)

+

+    def __getUserFile(self, name, category):

+        from com.raytheon.uf.common.localization import PathManagerFactory, LocalizationContext

+        LocalizationType = LocalizationContext.LocalizationType

+        LocalizationLevel = LocalizationContext.LocalizationLevel

+        pathMgr = PathManagerFactory.getPathManager()

+        path = 'gfe/userPython/' + category + '/' + name

+        lc = pathMgr.getContext(LocalizationType.valueOf('CAVE_STATIC'), LocalizationLevel.valueOf('USER'))

+        lf = pathMgr.getLocalizationFile(lc, path)

+        return lf

+

+

+    def saveObject(self, name, object, category):

+        import pickle

+        # Save a Python object (e.g. a Numeric grid)

+        # in the server under the given name

+        #   Example:

+        #   self.saveObject("MyGrid", numericGrid, "DiscrepancyValueGrids")

+        #

+        lf = self.__getUserFile(name, category)

+        fullpath = lf.getFile().getPath()

+        idx = fullpath.rfind("/")

+        if not os.path.exists(fullpath[:idx]):

+            os.makedirs(fullpath[:idx])

+        openfile = open(fullpath, 'w')

+        pickle.dump(object, openfile)

+        openfile.close()

+        lf.save()

+

+    def getObject(self, name, category):

+        import pickle

+        # Returns the given object stored in the server

+        #   Example:

+        #   discrepancyValueGrid = self.getObject("MyGrid","DiscrepancyValueGrids")

+        #

+        lf = self.__getUserFile(name, category)

+        fullpath = lf.getFile().getPath()

+        openfile = open(fullpath, 'r')

+        obj = pickle.load(openfile)

+        openfile.close()

+        return obj

+

+    def deleteObject(self, name, category):

+        # Delete the given object stored in the server

+        #    Example:

+        #    self.deleteObject("MyGrid", "DiscrepancyValueGrids")

+        #

+        lf = self.__getUserFile(name, category)

+        lf.delete()

+

+    def myOfficeType(self):

+        #returns my configured office type, such as "wfo" or "rfc"

+        return self.__dataMgr.getOfficeType()

+

+    def officeType(self, siteid):

+        #returns the office type for the given site identifier

+        #returns None if unknown site id

+        a = self.__dataMgr.officeType(siteid)

+        if len(a):

+            return a

+        else:

+            return None

+

+    def availableDatabases(self):

+        dbs = []

+        availDbs = self.__parmMgr.getAvailableDbs()

+        for i in range(availDbs.size()):

+            dbId = availDbs.get(i)

+            dbs.append(DatabaseID.DatabaseID(dbId))

+        return dbs

+

+    def knownOfficeTypes(self):

+        import JUtil

+        return JUtil.javaStringListToPylist(self.__dataMgr.knownOfficeTypes())

+

+    # Retrieves a text product from the text database

+    def getTextProductFromDB(self, productID):

+        from com.raytheon.viz.gfe.product import TextDBUtil

+

+        opMode = self.gfeOperatingMode() in ("OPERATIONAL", "TEST")

+        fullText = TextDBUtil.retrieveProduct(productID, opMode)

+        textList =  fullText.splitlines(True)

+        return textList

+

+    def callTextFormatter(self, productName, dbId, varDict={}, vtecMode=None):

+        """

+        Execute the requested text product formatter.

+

+        Args: 

+                productName: the display name of the formatter to run.

+                dbId: string form of the DatabaseID to use as data source.

+                varDict: optional, product varDict, use an empty dict instead

+                         of None to signify a null varDict.

+                vtecMode: optional, for VTEC products specify VTEC mode (one of

+                          'O', 'T', 'E' or 'X').

+

+        Returns:

+                The output of the formatter--the content of the requested product.

+

+        Throws:

+                TypeError: If varDict is not a dict.

+                RuntimeError: If the formatter fails during execution. 

+        """

+        if type(varDict) is not dict:

+            raise TypeError("Argument varDict must be a dict.")

+        varDict = str(varDict)

+        

+        listener = TextProductFinishWaiter()

+        FormatterUtil.callFromSmartScript(productName, dbId, varDict, vtecMode, self.__dataMgr, listener)

+        product = listener.waitAndGetProduct()

+        state = listener.getState()

+        if not state.equals(ProductStateEnum.Finished):

+            msg = "Formatter " + productName + " terminated before completion with state: " + state.name() + \

+            ". Check formatter logs from Process Monitor for more information."

+            raise RuntimeError(msg)

+        return product

+    

+    def saveCombinationsFile(self, name, combinations):

+        """

+        Save the specified zone combinations to the localization data store.

+

+        Args: 

+                name: Name for the combinations file. The ".py" extension will

+                      automatically be appended to the final file name.

+                combinations: The zone combinations. This data structure should

+                      be a list of list of zone names 

+                      (e.g. [["OAX", "GID", "LBF"], ["DMX"], ["FSD", "ABR"]]

+

+        Throws:

+                TypeError: If combinations is not in the proper format.

+        """

+        # Validate that we were passed a collection of collections, we'll convert

+        # to list of lists to satisfy the Java type checker.

+        try:

+            for item in iter(combinations):

+                iter(item)

+        except TypeError:

+            raise TypeError("combinations must be a list of list of zone names.")

+        

+        combo_list = JUtil.pyValToJavaObj([[str(zone) for zone in group] for group in combinations])

+        CombinationsFileUtil.generateAutoCombinationsFile(combo_list, str(name))

+    

+    def loadCombinationsFile(self, name):

+        """

+        Load the specified zone combinations file form the localization data store.

+        

+        Args:

+                name: Name for the combinations file. The ".py" extension will

+                      automatically be appended to the final file name.

+

+        Returns:

+                The zone combinations as a list of lists of zone names

+                (e.g. [["OAX", "GID", "LBF"], ["DMX"], ["FSD", "ABR"]]

+        """

+        return JUtil.javaObjToPyVal(CombinationsFileUtil.init(name))

+

+    def transmitTextProduct(self, product, wanPil, wmoType):

+        """

+        Transmit the specified product. Will automatically detect if GFE is 

+        operating in OPERATIONAL or PRACTICE mode and send using the appropriate

+        route.

+

+        Args: 

+                product: the text or body of the product to transmit.

+                wanPil: the AWIPS WAN PIL for the product

+                wmoType: The WMO type of the product.

+

+        Returns:

+                The status of the transmission request as a ProductStateEnum.

+        """

+        wanPil = str(wanPil)

+        product = str(product)

+        wmoType = str(wmoType)

+        

+        transmitter = TextProductTransmitter(product, wanPil, wmoType)

+        practice = self.gfeOperatingMode()=="PRACTICE"

+        status = transmitter.transmitProduct(practice)

+        return status

+

+    def sendWFOMessage(self, wfos, message):

+        '''

+        Sends a message to a list of wfos

+        

+        Args:

+            wfos: string or list, set or tuple of strings containing the destination wfo(s)

+            

+            message: string containing the message to be sent

+

+        Returns:

+            string: empty if successful or error message

+        

+        Raises:

+            TypeError: if wfos is not a string, list, tuple or set

+        '''

+        

+        if not wfos:

+            # called with empty wfo list, nothing to do

+            return ""

+        

+        javaWfos = ArrayList()

+        if type(wfos) in [list, tuple, set]:

+            for wfo in wfos:

+                javaWfos.add(wfo)

+        elif type(wfos) is str:

+            javaWfos.add(wfos)

+        else:

+            raise TypeError("Invalid type received for wfos: " + type(wfos))

+            

+        response = self.__dataMgr.getClient().sendWFOMessage(javaWfos, message)

+        return response.message()

diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartToolInterface.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartToolInterface.py
index 3b7c520560..4d9ac34930 100644
--- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartToolInterface.py
+++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/SmartToolInterface.py
@@ -1,150 +1,150 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-# Globally import and sets up instances of the smart tool scripts.
-# Designed to be used as a master controller for inspecting and running
-# smart tools from Java.
-#   
-#
-#    
-# SOFTWARE HISTORY
-#
-# Date          Ticket#  Engineer  Description
-# ------------- -------- --------- --------------------------------------------
-# Oct 21, 2008           njensen   Initial Creation.
-# Jan 17, 2013  1486     dgilling  Re-factor based on RollbackMasterInterface.
-# Jul 23, 2015  4263     dgilling  Support refactored Java SmartToolControllers.
-# Apr 13, 2016  5568     dgilling  More lenient handling of ScreenList.
-# May 05, 2017  6261     randerso  Added handling for SmartScript.cancel()
-# Feb 19, 2018  7222     mapeters  Removed handling of SmartScript.cancel()
-#
-##
-# This is an absolute override file, indicating that a higher priority version
-# of the file will completely replace a lower priority version of the file.
-##
-
-import logging
-import sys
-import Exceptions
-
-import JUtil
-import ProcessVariableList
-import RollbackMasterInterface
-import UFStatusHandler
-
-
-PLUGIN_NAME = 'com.raytheon.viz.gfe'
-CATEGORY = 'GFE'
-
-
-class SmartToolInterface(RollbackMasterInterface.RollbackMasterInterface):
-    
-    def __init__(self, scriptPath):
-        super(SmartToolInterface, self).__init__(scriptPath)
-        
-        logging.basicConfig(level=logging.INFO)
-        self.log = logging.getLogger("SmartToolInterface")
-        self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY))
-        
-        self.importModules()
-            
-    def __getToolInfo(self, script, dataMgr):
-        elementToEdit = self.getWeatherElementEdited(script)
-        screenList = self.getScreenList(script)
-        hideTool = self.getHideTool(script)
-        docString = self.getMethodInfo(script, "Tool", "execute")
-        varDict = self.getVariableListInputs(script)
-        return elementToEdit, screenList, hideTool, docString, varDict
-                    
-    def getWeatherElementEdited(self, name):
-        return getattr(sys.modules[name], "WeatherElementEdited", "None")
-
-    def getScreenList(self, name):
-        screenList = getattr(sys.modules[name], "ScreenList", None)
-        if screenList is not None:
-            try:
-                iter(screenList)
-            except TypeError:
-                screenList = [str(screenList)]
-            else:
-                if isinstance(screenList, basestring):
-                    screenList = [str(screenList)]
-                else:
-                    screenList = [str(i) for i in screenList]
-        return screenList
-
-    def getVariableList(self, name):
-        return getattr(sys.modules[name], "VariableList", [])
-    
-    def getHideTool(self, name):
-        "Determine whether a tool is hidden."
-        result = getattr(sys.modules[name], "HideTool", False)
-        result = bool(result)
-        return result
-    
-    def getVariableListInputs(self, name):
-        varList = self.getVariableList(name)
-        return ProcessVariableList.buildWidgetList(varList)
-    
-    def getScripts(self, dataMgr):
-        from java.util import HashMap
-        from com.raytheon.viz.gfe.smarttool.script import SmartToolMetadata
-        
-        scriptList = HashMap()
-        
-        for script in self.scripts:
-            try:
-                (element, screenList, hideTool, docString, varDict) = self.__getToolInfo(script, dataMgr)
-                name = str(script)
-                if screenList is not None:
-                    screenList = JUtil.pyValToJavaObj(screenList)            
-                hideTool = bool(hideTool)
-                docString = str(docString)
-                metadata = SmartToolMetadata(name, element, screenList, hideTool, docString, varDict)
-                scriptList.put(name, metadata)
-            except:
-                self.log.exception("Unable to load metadata for smart tool " + script)
-                
-        return scriptList
-    
-    def getMethodArgNames(self, moduleName, className, methodName):
-        from java.util import ArrayList        
-        args = self.getMethodArgs(moduleName, className, methodName)
-        argList = ArrayList()
-        for a in args:
-            argList.add(a)
-        return argList
-    
-    def addModule(self, moduleName):        
-        super(SmartToolInterface, self).addModule(moduleName)
-        
-    def reloadModule(self, moduleName):
-        super(SmartToolInterface, self).reloadModule(moduleName)
-        
-    def removeModule(self, moduleName):
-        super(SmartToolInterface, self).removeModule(moduleName)
-    
-    def runTool(self, moduleName, className, methodName, **kwargs):
-        try:
-             return self.runMethod(moduleName, className, methodName, **kwargs)
-        except Exceptions.EditActionError, e:
-            msg = e.errorType() + ": " + e.errorInfo()
-            raise RuntimeError(msg)
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+#
+# Globally import and sets up instances of the smart tool scripts.
+# Designed to be used as a master controller for inspecting and running
+# smart tools from Java.
+#   
+#
+#    
+# SOFTWARE HISTORY
+#
+# Date          Ticket#  Engineer  Description
+# ------------- -------- --------- --------------------------------------------
+# Oct 21, 2008           njensen   Initial Creation.
+# Jan 17, 2013  1486     dgilling  Re-factor based on RollbackMasterInterface.
+# Jul 23, 2015  4263     dgilling  Support refactored Java SmartToolControllers.
+# Apr 13, 2016  5568     dgilling  More lenient handling of ScreenList.
+# May 05, 2017  6261     randerso  Added handling for SmartScript.cancel()
+# Feb 19, 2018  7222     mapeters  Removed handling of SmartScript.cancel()
+#
+##
+# This is an absolute override file, indicating that a higher priority version
+# of the file will completely replace a lower priority version of the file.
+##
+
+import logging
+import sys
+import Exceptions
+
+import JUtil
+import ProcessVariableList
+import RollbackMasterInterface
+import UFStatusHandler
+
+
+PLUGIN_NAME = 'com.raytheon.viz.gfe'
+CATEGORY = 'GFE'
+
+
+class SmartToolInterface(RollbackMasterInterface.RollbackMasterInterface):
+    
+    def __init__(self, scriptPath):
+        super(SmartToolInterface, self).__init__(scriptPath)
+        
+        logging.basicConfig(level=logging.INFO)
+        self.log = logging.getLogger("SmartToolInterface")
+        self.log.addHandler(UFStatusHandler.UFStatusHandler(PLUGIN_NAME, CATEGORY))
+        
+        self.importModules()
+            
+    def __getToolInfo(self, script, dataMgr):
+        elementToEdit = self.getWeatherElementEdited(script)
+        screenList = self.getScreenList(script)
+        hideTool = self.getHideTool(script)
+        docString = self.getMethodInfo(script, "Tool", "execute")
+        varDict = self.getVariableListInputs(script)
+        return elementToEdit, screenList, hideTool, docString, varDict
+                    
+    def getWeatherElementEdited(self, name):
+        return getattr(sys.modules[name], "WeatherElementEdited", "None")
+
+    def getScreenList(self, name):
+        screenList = getattr(sys.modules[name], "ScreenList", None)
+        if screenList is not None:
+            try:
+                iter(screenList)
+            except TypeError:
+                screenList = [str(screenList)]
+            else:
+                if isinstance(screenList, str):
+                    screenList = [str(screenList)]
+                else:
+                    screenList = [str(i) for i in screenList]
+        return screenList
+
+    def getVariableList(self, name):
+        return getattr(sys.modules[name], "VariableList", [])
+    
+    def getHideTool(self, name):
+        "Determine whether a tool is hidden."
+        result = getattr(sys.modules[name], "HideTool", False)
+        result = bool(result)
+        return result
+    
+    def getVariableListInputs(self, name):
+        varList = self.getVariableList(name)
+        return ProcessVariableList.buildWidgetList(varList)
+    
+    def getScripts(self, dataMgr):
+        from java.util import HashMap
+        from com.raytheon.viz.gfe.smarttool.script import SmartToolMetadata
+        
+        scriptList = HashMap()
+        
+        for script in self.scripts:
+            try:
+                (element, screenList, hideTool, docString, varDict) = self.__getToolInfo(script, dataMgr)
+                name = str(script)
+                if screenList is not None:
+                    screenList = JUtil.pyValToJavaObj(screenList)            
+                hideTool = bool(hideTool)
+                docString = str(docString)
+                metadata = SmartToolMetadata(name, element, screenList, hideTool, docString, varDict)
+                scriptList.put(name, metadata)
+            except:
+                self.log.exception("Unable to load metadata for smart tool " + script)
+                
+        return scriptList
+    
+    def getMethodArgNames(self, moduleName, className, methodName):
+        from java.util import ArrayList        
+        args = self.getMethodArgs(moduleName, className, methodName)
+        argList = ArrayList()
+        for a in args:
+            argList.add(a)
+        return argList
+    
+    def addModule(self, moduleName):        
+        super(SmartToolInterface, self).addModule(moduleName)
+        
+    def reloadModule(self, moduleName):
+        super(SmartToolInterface, self).reloadModule(moduleName)
+        
+    def removeModule(self, moduleName):
+        super(SmartToolInterface, self).removeModule(moduleName)
+    
+    def runTool(self, moduleName, className, methodName, **kwargs):
+        try:
+             return self.runMethod(moduleName, className, methodName, **kwargs)
+        except Exceptions.EditActionError as e:
+            msg = e.errorType() + ": " + e.errorInfo()
+            raise RuntimeError(msg)
diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/StartupDialog.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/StartupDialog.py
index 7569a477f4..bdaea1c64a 100644
--- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/StartupDialog.py
+++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/StartupDialog.py
@@ -1,90 +1,90 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# StartUpDialog.py
-# Class for displaying GFE startup dialog and
-#   retrying the Server when there has been a problem
-#
-# Author: romberg
-# ----------------------------------------------------------------------------
-
-##
-# This is an absolute override file, indicating that a higher priority version
-# of the file will completely replace a lower priority version of the file.
-##
-
-import Tkinter, tkSimpleDialog
-
-class Dialog(tkSimpleDialog.Dialog):
-    def __init__(self, parent, title = None, wait=1, xoffset=50, yoffset=50,
-                 **kw):
-        Tkinter.Toplevel.__init__(self, parent, **kw)
-        self.transient(parent)
-
-        if title:
-            self.title(title)
-
-        self.parent = parent
-
-        if self.parent is not None:
-            self.geometry("+%d+%d" % (parent.winfo_rootx()+xoffset,
-                                      parent.winfo_rooty()+yoffset))
-        else:
-            self.geometry("+0+0")
-
-        self.result = None
-        body = Tkinter.Frame(self)
-        self.initial_focus = self.body(body)
-        body.pack(expand=Tkinter.YES, fill=Tkinter.BOTH)
-        self.buttonbox()
-        if wait:
-            self.grab_set()
-
-        if not self.initial_focus:
-            self.initial_focus = self
-
-        self.protocol("WM_DELETE_WINDOW", self.cancel)
-
-        self.initial_focus.focus_set()
-
-        self.ctorHook()
-
-        if wait:
-            self.wait_window(self)
-        else:
-            self.update_idletasks()
-    def ctorHook(self):
-        pass
-
-class IFPDialog(Dialog):
-    def __init__(self, parent, title = None, modal=1, xoffset=50, yoffset=50,
-                 constructionHook = None, **kw):
-        self._chook = constructionHook
-        Dialog.__init__(self, parent, title, modal, xoffset,
-                                      yoffset, **kw)
-
-    def ctorHook(self):
-        if self._chook is not None:
-            self._chook()
-
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# StartUpDialog.py
+# Class for displaying GFE startup dialog and
+#   retrying the Server when there has been a problem
+#
+# Author: romberg
+# ----------------------------------------------------------------------------
+
+##
+# This is an absolute override file, indicating that a higher priority version
+# of the file will completely replace a lower priority version of the file.
+##
+
+import tkinter, tkinter.simpledialog
+
+class Dialog(tkinter.simpledialog.Dialog):
+    def __init__(self, parent, title = None, wait=1, xoffset=50, yoffset=50,
+                 **kw):
+        tkinter.Toplevel.__init__(self, parent, **kw)
+        self.transient(parent)
+
+        if title:
+            self.title(title)
+
+        self.parent = parent
+
+        if self.parent is not None:
+            self.geometry("+%d+%d" % (parent.winfo_rootx()+xoffset,
+                                      parent.winfo_rooty()+yoffset))
+        else:
+            self.geometry("+0+0")
+
+        self.result = None
+        body = tkinter.Frame(self)
+        self.initial_focus = self.body(body)
+        body.pack(expand=tkinter.YES, fill=tkinter.BOTH)
+        self.buttonbox()
+        if wait:
+            self.grab_set()
+
+        if not self.initial_focus:
+            self.initial_focus = self
+
+        self.protocol("WM_DELETE_WINDOW", self.cancel)
+
+        self.initial_focus.focus_set()
+
+        self.ctorHook()
+
+        if wait:
+            self.wait_window(self)
+        else:
+            self.update_idletasks()
+    def ctorHook(self):
+        pass
+
+class IFPDialog(Dialog):
+    def __init__(self, parent, title = None, modal=1, xoffset=50, yoffset=50,
+                 constructionHook = None, **kw):
+        self._chook = constructionHook
+        Dialog.__init__(self, parent, title, modal, xoffset,
+                                      yoffset, **kw)
+
+    def ctorHook(self):
+        if self._chook is not None:
+            self._chook()
+
diff --git a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/TropicalUtility.py b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/TropicalUtility.py
index 57e9c8307a..f4615adfd8 100644
--- a/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/TropicalUtility.py
+++ b/cave/com.raytheon.viz.gfe/localization/gfe/userPython/utilities/TropicalUtility.py
@@ -1,1271 +1,1269 @@
-# ------------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# TropicalUtility - Version 3.0
-#
-# Authors:  Matthew H. Belk (BOX), Shannon White (AWIPS), Pablo Santos (MFL),
-# Tom LeFebvre (GSD)
-#
-# Created:  03/03/2012        Last Modified:  04/26/2016
-#
-#  04/26/2016 - Modified to add the displayProduct method supplied by Ron
-#               Anderson (Raytheon)
-# ------------------------------------------------------------------------------
-#
-# SOFTWARE HISTORY
-#
-# Date         Ticket#    Engineer    Description
-# ------------ ---------- ----------- ------------------------------------------
-# Mar 03, 2012                        Initial creation
-# Apr 26, 2016            mbelk       Modified to add the displayProduct method
-#                                     supplied by Ron Anderson (Raytheon)
-# Sep 19, 2016 19293      randerso    Initial baseline check in
-# Feb 21, 2017 29544      randerso    Fix possible RuntimeError caused by
-#                                     discarding from set while looping over it
-#
-################################################################################
-
-##
-# This is an absolute override file, indicating that a higher priority version
-# of the file will completely replace a lower priority version of the file.
-##
-
-import collections
-import errno
-import os
-import re
-import string
-
-from awips.dataaccess import DataAccessLayer
-
-import GridManipulation
-import HazardUtils
-import JsonSupport
-import LocalizationSupport
-import LogStream
-import ProcessVariableList
-import TimeRange
-import numpy as np
-import pprint as pp
-
-
-class TropicalUtility(GridManipulation.GridManipulation):
-
-    def __init__(self, dbss):
-        GridManipulation.GridManipulation.__init__(self, dbss)
-        self._dbss = dbss
-
-        #  Make an instance of the HazardUtils
-        self._hazUtils = HazardUtils.HazardUtils(dbss, None)
-
-        #  Define a base for the ETN issued by a national center
-        self._natlBaseETN = 1000    #  Not used in current tools/procedures
-
-        #  Get the current mutable database ID
-        self._mutableID = self.mutableID()
-
-        #  Make lists of all WFOs we might want to send a message to from NHC.
-        #  The offices are split into coastal offices which deal with storm
-        #  surge, and inland offices which only deal with wind hazards from NHC
-        self._surgeWfos = ["CAR", "GYX", "BOX", "OKX", "PHI", "LWX", "AKQ",
-                           "MHX", "ILM", "CHS", "JAX", "MLB", "MFL", "KEY",
-                           "TBW", "TAE", "MOB", "LIX", "LCH", "HGX", "CRP",
-                           "BRO"]
-        self._windWfos = ["ALY", "MRX", "FFC", "OHX", "HUN", "BMX", "MEG",
-                          "JAN", "LZK", "SHV", "TSA", "FWD", "OUN", "SJT",
-                          "EWX", "MAF"]
-
-        #  Toggle for debugging output
-        self._debug = False                 #  True = On / False = Off
-
-        #  Define test mode for procedure which communicate with WFOs
-##        self._testMode = True    # if True, the command is only printed (test)
-        self._testMode = False     # if False, messages get sent to WFOs (live)
-
-
-    #===========================================================================
-    #  Utility methods to create common dialog buttons and actions
-
-    ### Makes the Run button
-    def makeRunButton(self, buttonFrame):
-        Tkinter.Button(buttonFrame, text="Run", width=10,
-                       command=self.runCommand,
-                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,
-                                                  padx=50, fill=Tkinter.X)
-
-
-    ### Makes the Run/Dismiss button
-    def makeRunDismissButton(self, buttonFrame):
-        Tkinter.Button(buttonFrame, text="Run/Dismiss", width=10,
-                       command=self.runDismissCommand,
-                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,
-                                                  padx=50, fill=Tkinter.X)
-
-
-    ### Makes the Cancel button
-    def makeCancelButton(self, buttonFrame):
-        Tkinter.Button(buttonFrame, text="Cancel", width=10,
-                       command=self.cancelCommand,
-                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,
-                                                  padx=50, fill=Tkinter.X)
-
-
-    ### Action when "Run" button is clicked
-    def runCommand(self):
-        LogStream.logUse("Run")
-        self.makeHazardGrid()
-        return
-
-
-    ### Action when "Run/Dismiss" button is clicked
-    def runDismissCommand(self):
-        LogStream.logUse("Run/Dismiss")
-        if self.makeHazardGrid() == 1:
-            self.cancelIt()
-
-
-    ### Action when "Cancel" button is clicked
-    def cancelCommand(self):
-        # unregister the maps
-        LogStream.logUse("Cancel")
-        self.cancelIt()
-
-
-    ### Actual steps required to cancel/exit
-    def cancelIt(self):
-        # unregister the maps
-        for key in self._registeredMaps:
-            self._mapManager.unregisterMapSet(self._registeredMaps[key].mapId())
-        self.__master.destroy()
-
-
-    def getSubKeys(self, key):
-        parts = key.split("^")
-        if "" in parts:
-            parts = parts.remove("")
-        return parts
-
-
-    #===========================================================================
-    #  Utility methods to manipulate Hazard grids
-
-    # Returns the phen for specified hazard key.
-    # If not a VTEC hazard, returns ""
-    def keyPhen(self, key):
-        pos = key.find(".")
-        if pos == -1:   # not found
-            return ""
-
-        return key[0:pos]
-
-
-    # Parses specified key and returns the sig field.
-    def keySig(self, key):
-        pos = key.find(".")
-        if pos == -1:   # not found
-            return ""
-
-        return key[pos + 1]
-
-
-    # Parse the specified key and return the ETN. If none found,
-    # return an empty string ("")
-    def getETN(self, key):
-
-        subKeys = key.split("^")
-        subKey = subKeys[0]
-        parts = subKey.split(":")
-        if len(parts) < 2:
-            return ""
-        else:
-            return parts[1]
-
-
-    # Checks the specified hazard and proposed keys over the selectedMask
-    # for any conflicting hazards. If found, returns True, otherwise
-    # return False.
-    def anyHazardConflicts(self, hazard, proposed, selectedMask):
-
-        # Make the list of tropical hazards
-        tropicalHazList = ["TR.W", "TR.A", "HU.W", "HU.A", "SS.A", "SS.W"]
-
-        hazGrid, hazKeys = hazard
-        propGrid, propKeys = proposed
-
-        # Make the list of hazard subKeys found in the hazard grid over the
-        # selectedMask
-        hazList = []
-        for hazKey in hazKeys:
-            if hazKey == "":      #  Ignore the  key
-                continue
-            
-            #  Identify the area where this hazard exists
-            hazIndex = self.getIndex(hazKey, hazKeys)
-            mask = hazGrid == hazIndex
-
-            #  Check for overlapping points 
-            overlap = mask & selectedMask
-            
-            #  If there is any overlap
-            if overlap.any():
-                
-                # These keys can have subKeys so separate those, too
-                subKeyList = self.getSubKeys(hazKey)
-                for subKey in subKeyList:
-                    if subKey not in hazList:
-                        hazList.append(subKey)
-
-        #  Look over the proposed hazards keys
-        for propKey in propKeys:
-            if propKey == "":     #  Ignore the  key
-                continue
-
-            # Check for overlapping points
-            propIndex = self.getIndex(propKey, propKeys)
-            propMask = propGrid == propIndex
-            overlap = propMask & selectedMask
-
-            if not overlap.any():  # no points in selectedMask
-                continue
-
-            # Parse the phen, sig, and ETN
-            propPhen = self.keyPhen(propKey)
-            propSig = self.keySig(propKey)
-            propETN = self.getETN(propKey)
-
-            for hazKey in hazList:
-                # See if this hazard overlaps with the current proposed hazard
-                hazIndex = self.getIndex(hazKey, hazKeys)
-                hazMask = hazGrid == hazIndex
-                hazOverlap = hazMask & propMask
-                if not hazOverlap.any():
-                    continue
-
-                # Parse the hazKey
-                hazETN = self.getETN(hazKey)
-                hazPhen = self.keyPhen(hazKey)
-                hazSig = self.keySig(hazKey)
-
-                # reconstruct the phen and sig
-                hazPhenSig = hazPhen + "." + hazSig
-                propPhenSig = propPhen + "." + propSig
-
-                # If the hazard keys are both tropical one the ETNs must match
-                if hazPhenSig in tropicalHazList and \
-                   propPhenSig in tropicalHazList:
-                    if hazETN != propETN:
-                        return True
-
-                # Otherwise if the phenSigs match, the ETNs must match
-                if hazPhenSig == propPhenSig:
-                    if propETN != hazETN:
-                        return True
-        return False
-
-
-    # Check for hazard conflicts on a point by point basis.  Uses the method
-    # anyHazadConflicts to do the logic for checking hazard phen, sig and ETN.
-    def anyHazardConflictsByPoint(self, hazardGrid, proposedGrid, selectedMask):
-        print "Inside MergeTool."
-        # Make the list of tropical hazards
-        tropicalHazList = ["TR.W", "TR.A", "HU.W", "HU.A", "SS.A", "SS.W"]
-
-        hazGrid, hazKeys = hazardGrid
-        propGrid, propKeys = proposedGrid
-
-        # Make the list of hazards found in the hazard grid over the
-        # selectedMask
-        hazList = []
-        for hazKey in hazKeys:
-            if hazKey == "":
-                continue
-
-            hazIndex = self.getIndex(hazKey, hazKeys)
-            hazMask = hazGrid == hazIndex
-
-            # Now check propKeys
-            for propKey in propKeys:
-                if propKey == "":
-                    continue
-                propIndex = self.getIndex(propKey, propKeys)
-                propMask = propGrid == propIndex
-                overlap = hazMask & propMask & selectedMask
-                if overlap.any():
-                    if self.anyHazardConflicts(hazardGrid, proposedGrid, overlap):
-                        start = int(self._gmtime().unixTime() / 3600) * 3600
-                        end = start + 3600
-                        timeRange = self.GM_makeTimeRange(start, end)
-                        return True
-        return False
-
-    #  Calculates a difference grid (added versus removed)
-    #  Calculates a difference grid (added versus removed)
-    def calcDiffGrid(self, initialGrid, proposedGrid, diffName, timeRange, 
-                     isWFO=False):
-
-        #  If this is a WFO
-        if isWFO:
-            print"Computing a diff grid for WFO"
-            
-            #  Filter the Hazards to only keep the Storm Surge hazards
-            ssKeys = ["SS.W", "SS.A"]
-        
-            initialGrid = self.filterHazardGrid(initialGrid, ssKeys)
-            proposedGrid = self.filterHazardGrid(proposedGrid, ssKeys)
-        
-        #  Split these grids into their components
-        initGrid, initKeys = initialGrid
-        propGrid, propKeys = proposedGrid
-
-        #  Identify where there are no hazards in both grids
-        initNone = self.getIndex("", initKeys)
-        propNone = self.getIndex("", propKeys)
-
-        #  Mask of these areas
-        initNoneMask = (initGrid == initNone)
-        propNoneMask = (propGrid == propNone)
-
-        #  Make an empty grid to hold difference indicator
-        diffGrid = np.zeros(self.getGridShape(), np.float32)
-
-        # Calculate hazards that were removed
-        diffGrid[propNoneMask & ~initNoneMask] = -1
-
-        # Calculate hazards that were added
-        diffGrid[~propNoneMask & initNoneMask] = 1
-        
-        # Find areas that had some hazard and it changed to another hazard
-        for initKey in initKeys:
-            for propKey in propKeys:
-                if initKey == "" or propKey == "":   # ignore any  cases
-                    continue
-                if initKey == propKey: # ignore cases where the keys are the same
-                    continue
-                
-                # Now we know the keys are different and neither is 
-                initIndex = self.getIndex(initKey, initKeys)
-                propIndex = self.getIndex(propKey, propKeys)
-
-                initMask = (initGrid == initIndex)
-                propMask = (propGrid == propIndex)
-                
-                # The intersection is where they changed
-                diffGrid[initMask & propMask] = 2
-
-        #  Add this temporary grid to the grid manager so it can be seen
-        self.createGrid("Fcst", diffName, "SCALAR", diffGrid, timeRange,
-                descriptiveName="Diff Between NHC and WFO",
-                precision=0, minAllowedValue=-1.0, maxAllowedValue=2.0)
-        
-        return 
-    
-
-    def filterHazardGrid(self, hazardGrid, filterKeys):
-        
-        filteredGrid = self.empty(np.int8)
-        filteredKeys = [""]
-        hazGrid, hazKeys = hazardGrid
-        
-        # Find the hazard keys that contain any filter key
-        for filterKey in filterKeys:
-            for hazKey in hazKeys:
-                
-                if filterKey not in hazKey:
-                    continue
-                hazIndex = self.getIndex(hazKey, hazKeys)
-                mask = (hazGrid == hazIndex)  # get the points that are set to this mask
-                # Cleanse the hazKey of all keys except filterKeys
-                newKey = ""
-                splitKeys = hazKey.split("^")
-                for splitKey in splitKeys:
-                    phenSig = splitKey.split(":")[0]
-                    if phenSig not in filterKeys:
-                        continue
-                    
-                    newKey = newKey + splitKey + "^"
-                    
-                if newKey[-1] == "^":
-                    newKey = newKey[0:-1]   # trim the trailing "^"
-                    
-                    
-                newIndex = self.getIndex(newKey, filteredKeys)
-               
-                filteredGrid[mask] = newIndex
-            
-        return filteredGrid, filteredKeys
-    #  Calculates a difference grid (added versus removed) for WFOs
-
- #==============================================================================
- #  Methods for sending messages to WFOs
- #==============================================================================
-
-    #  Define method to send a message to WFOs
-    def sendMessageToWfos(self, wfos, message, testMode=True):
-        SendMessageResult = collections.namedtuple('SendMessageResult',
-                                                   ('success', 'wfo', 'output'))
-
-        if len(wfos) == 0:
-            msg = "sendMessageToWfos called with empty WFO list, nothing to do."
-            self.statusBarMsg(msg, 'A')
-            return
-
-        #  Look at each WFO which needs a message
-        results = []
-        for wfo in wfos:
-
-            #  Start constructing the final message
-            final_message = "{} - {} have been sent by NHC.".format(wfo.strip(),
-                                                                message.strip())
-
-            #  If the ProposedSS grids are mentioned, send one message
-            if "ProposedSS" in message:
-                final_message += " Please join the stormsurgecollaboration"
-                final_message += " chat room in NWSChat. You should run "
-                final_message += "the Populate -> CopyNHCProposed procedure "
-                final_message += "now, and start the collaboration process."
-            
-            #  Otherwise, let the WFO's know we're finished with this round 
-            else:
-                final_message += " The collaboration process is now done. "
-                final_message += "You should run the Populate -> "
-                final_message += "MergeProposedSS procedure now, and finish "
-                final_message += "preparing the grids for the WFO TCV."
-
-            #  If we are in test mode, just display the command which
-            #  would be executed
-            if testMode:
-                msg = "Test message to WFO {}: '{}'".format(wfo, final_message)
-                LogStream.logDebug(msg)
-
-                result = ""          #  Simulate a successful transfer
-
-            #  Otherwise, actually send this message
-            else:
-                msg = "Live message to WFO {}: '{}'".format(wfo, final_message)
-                LogStream.logDebug(msg)
-
-                result = self.sendWFOMessage(wfo, final_message)
-
-            #  Keep track of which offices successfully got the message
-            results.append(SendMessageResult(result == "", wfo, result))
-
-
-        #  Comparison function to sort results, by status first, then by WFO
-        def compare(x,y):
-            result = cmp(x.success, y.success)
-
-            if result == 0:
-                result = cmp(x.wfo, y.wfo)
-
-            return result
-
-#         print "*"*80
-#         print results
-
-        total_count = 0
-        fail_count = 0
-        details = ""
-        
-        #  Construct a final status message of the message send status
-        for result in sorted(results, cmp=compare):
-            total_count += 1
-            if result.success:
-                details += "\nMessage successfully sent to site {}.".format(result.wfo)
-            else:
-                fail_count += 1
-                details += "\nCould not send message to site {}. Command output:\n{}".format(result.wfo, result.output)
-
-        if fail_count:
-            msg = "{} of {} server(s) failed to receive WFO message. Site-by-site detail: \n{}".format(fail_count, total_count, details)
-            self.statusBarMsg(msg, 'A')
-        else:
-            msg = "WFO message sent to all {} sites successfully. Site-by-site detail: \n{}".format(total_count, details)
-            self.statusBarMsg(msg, 'R')
-
-
-    #  Define method to determine WFOs which should get a message from NHC
-    def getWfosAttention(self, WEname, anyChanges=None, percentThresh=3):
-        #  anyChanges is a mask, where True means a change in hazards happened
-
-        #  Make a list of WFOs NHC might communicate with
-        searchWfos = set(self._surgeWfos + self._windWfos)
-
-        #  Make sets to track WFOs with only surge hazards, those with only
-        #  wind hazards, and those with both
-        surgeWfos = set()
-        windWfos = set()
-        bothWfos = set()
-
-        #  Make a dictionary of masks for all of these offices
-        officeMasks = {}
-        for wfo in searchWfos:
-            try:
-                officeMasks[wfo] = self.encodeEditArea("ISC_%s" % (wfo.upper()))
-                
-                #  If we are looking for any changes to the underlying field
-                if anyChanges is not None:
-                    
-                    #  See if there are any changes in hazards for this WFO
-                    overlay = (anyChanges & officeMasks[wfo])
-                
-                    if overlay.any():
-                        msg = "Adding to surge - " + wfo + " for changes"
-                        self.statusBarMsg(msg, 'R')
-                        surgeWfos.add(wfo)
-            except:
-                msg = "No edit area found. Removing " + wfo + \
-                      " from further processing."
-                self.statusBarMsg(msg, 'U')
-
-        #  Get the Hazards grid
-        hazardGridList = self.getGrids(self._mutableID, WEname, "SFC",
-                                       TimeRange.allTimes(), mode="List", 
-                                       noDataError=0)
-
-#         print "hazardGridList =", hazardGridList
-
-        #  If there are no hazard grids
-        if hazardGridList is None:
-            hazardGridList = []
-
-        #  Look at each WFO which needs a message
-        for (hazardBytes, hazardKeys) in hazardGridList:
-
-#             print "Starting to examine hazards"
-
-            #  Look at each hazard key in this grid - except the first, 
-            for (index, key) in enumerate(hazardKeys):
-
-                #  Ignore the  and  keys
-                if key in ["", ""]:
-                    continue   #  do not bother looking further
-
-#                 print "\n\nLooking at ", index, key
-
-                #  Check this key for either storm surge (SS), or wind (HU, TR)
-                #  hazards
-                if re.search("(SS|HU|TR).[AW]", key) is not None:
-
-#                     print "found a tropical hazard"
-                    hazardType = "both"         #  assume both hazards are here
-
-                    #-----------------------------------------------------------
-                    #  See if which type of hazard this is
-
-                    #  Wind hazard, no surge hazard
-                    if re.search("(HU|TR).[AW]", key) is not None and \
-                       re.search("SS.[AW]", key) is None:
-
-                        hazardType = "wind-only"
-
-                    #  Surge hazard, no wind hazard
-                    elif re.search("SS.[AW]", key) is not None and \
-                         re.search("(HU|TR).[AW]", key) is None:
-
-                        hazardType = "surge-only"
-
-                    #  See where this hazard is on the grid
-                    hazardMask = hazardBytes == index
-
-                    #  Now determine which offices we need to notify
-                    for wfo, wfoMask in officeMasks.items():
-
-                        #  See if this office has a current hazard
-                        overlay = (officeMasks[wfo] & hazardMask)
-
-                        #  If there are any points which overlap
-                        if overlay.any():
-
-#                            print "Getting zones for '%s'" % (wfo)
-
-                            #  We need to look at all the zones associated
-                            #  with this WFO, get them
-                            zoneList = self.findWfoZones(wfo)
-                            if len(zoneList) == 0:
-                                msg = "\tCould not get zones for " + wfo
-                                LogStream.logProblem(msg)
-                                continue
-
-                            #  Now, process each zone
-                            for zone in zoneList:
-
-#                                print zone,
-
-                                #  Get the mask for this zone
-                                try:
-                                    zoneMask = self.encodeEditArea(zone)
-                                except errno:
-                                    msg = "\tCould not get zone mask for " + wfo
-                                    LogStream.logProblem(msg, LogStream.exc())
-                                    continue
-
-#                                #  If we did not get this mask - move on
-#                                if zoneMask is None:
-#                                    continue
-
-                                #  See if there is an overlap with current
-                                #  hazard type
-                                zoneOverlap = zoneMask & hazardMask
-
-    #=======================================================================
-    #  This code kept in case we need to enforce the 3% area of a zone
-    #  requirement in the future. This would mimic the process of the text
-    #  formatters.
-                                #  Count all the points of the masks
-#                                 countOverlap = np.count_nonzero(zoneOverlap)
-#                                 countMask = np.count_nonzero(zoneMask)
-#
-#                                 #  See if there are enough points to justify
-#                                 #  keeping this zone in the list
-#                                 zonePercent = (
-#                                     float(countOverlap) / float(countMask)
-#                                 )
-
-#                                 print "overlap = %d\tmask = %d\tpercent =%.2f" % \
-#                                       (countOverlap, countMask, zonePercent)
-#
-                                #  If the percentage is high enough
-#                                 if int((zonePercent*100.0) + 0.5) >= percentThresh:
-    #
-    #=======================================================================
-
-                                #  For now, notify any zone which has a
-                                #  possibility for a storm surge hazard
-                                if zoneOverlap.any():
-
-                                    #  We need to notify this WFO
-                                    if hazardType == "wind-only":
-                                        msg = "Adding to wind - " + wfo
-                                        windWfos.add(wfo)
-                                    elif hazardType == "surge-only":
-                                        msg = "Adding to surge - " + wfo
-                                        surgeWfos.add(wfo)
-                                    else:
-                                        msg = "Adding to both - " + wfo
-                                        bothWfos.add(wfo)
-
-                                    self.statusBarMsg(msg, 'R')
-                                    print msg
-
-                                    #  No point in looking at further zones
-                                    break
-
-        #=======================================================================
-        #  Now ensure we do not duplicate WFOs with both hazards in the
-        #  individual hazard sets.  Use this code when we are no longer using
-        #  the text TCV to notify WFOs of tropical wind hazards.
-
-#         for wfo in bothWfos:
-#             if wfo in windWfos:
-#                 windWfos.discard(wfo)
-#             if wfo in surgeWfos:
-#                 surgeWfos.discard(wfo)
-
-        #=======================================================================
-        #  Now ensure we do not duplicate WFOs with both hazards in the
-        #  individual hazard sets - this is for the 2016 season
-
-        for wfo in bothWfos:
-            surgeWfos.add(wfo)
-
-        #  Reset the sets for "both" and "wind-only" WFOs
-        bothWfos = set()
-        windWfos = set()
-
-        #  Return the completed WFO notification list
-        return (list(bothWfos), list(windWfos), list(surgeWfos))
-
-
-    #  Define a method to find zones associated with a WFO
-    def findWfoZones(self, wfo):
-
-        #  Construct the SQL to get these attributes from the maps database
-        reqParms = {'datatype' : 'maps',
-                    'table' : 'mapdata.zone',
-                    'locationField' : 'cwa',
-                    'geomField' : 'the_geom',
-                    'locationNames' : [wfo.strip()],
-                    'parameters' : ['state', 'zone'],
-                    }
-
-        #  Create the Data Access request
-        req = DataAccessLayer.newDataRequest(**reqParms)
-
-        #  Process the response
-        result = DataAccessLayer.getGeometryData(req)
-
-        #  Check if we got a response
-        if not result:
-            # TODO need better error message
-            # What should be done in this case?
-            print "What!??!"
-
-        #  Get ready to track matching zones
-        zoneSet = set()
-
-        #  Process the response contents
-        for record in result:
-
-            #  Retrieve state and zone
-            state = record.getString('state')
-            zone = record.getString('zone')
-
-#             print "*"*(80)
-#             print "state = {}   zone = {}".format(state, zone)
-
-            #  If this is ALY - only keep CT and MA zones
-            if wfo == "ALY" and state not in ["MA", "CT"]:
-                continue
-
-            #  Construct a UGC code and store it for later
-            zoneSet.add(state + "Z" + zone)
-
-#         print "zoneSet =", repr(zoneSet)
-
-        #  Return the completed zone set
-        return zoneSet
-
-
-    #  Define a method to find zones associated with a WFO
-    def notifyWFOs(self, field, anyChanges=None, testMode=None):
-        #  anyChanges is a mask, where True means a change in hazards happened
-
-        #  Ensure the test mode status is set - one way or the other
-        if testMode is None:
-            testMode = self._testMode
-
-#         #  Get the status of each WFO's communications
-#         wfoStatus = self.getWfoStatus()
-
-        #  See which WFOs we need to notify
-        (bothWfos, windWfos, surgeWfos) = self.getWfosAttention(field, 
-                                                                anyChanges)
-
-        #  Send a message to each office
-#         message = "%s grids containing tropical, wind and storm surge hazards"%\
-#                   (field)
-#         self.sendMessageToWfos(bothWfos, message, self._testMode)
-
-#         message = "%s grids containing tropical, wind hazards" % (field)
-#         self.sendMessageToWfos(windWfos, message, self._testMode)
-
-        message = "%s grids containing tropical, storm surge hazards" % (field)
-        self.sendMessageToWfos(surgeWfos, message, testMode)
-
-
-#===============================================================================
-#  Code to process StormInfo files -
-#===============================================================================
-
-    def _synchronizeAdvisories(self):
-        # Retrieving a directory causes synching to occur.
-        # This code can throw an exception but don't catch it
-        # so that forecasters can be made aware of the issue.
-        file = LocalizationSupport.getLocalizationFile(
-                                    LocalizationSupport.CAVE_STATIC,
-                                    LocalizationSupport.SITE, self.getSiteID(),
-                                    self._getAdvisoryPath()).getFile()
-        return file
-
-    #  Constructs the absolute path to the JSON files for this site
-    def _getLocalAdvisoryDirectoryPath(self):
-        file = self._synchronizeAdvisories()
-        path = file.getPath()
-
-        try:
-             os.makedirs(path)
-        except OSError as exception:
-            if exception.errno != errno.EEXIST:
-                raise
-
-        return path
-
-    #  Retrieves the names of the active storm JSON files for further processing  
-    def _getStormAdvisoryNames(self):
-        advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath()
-        filenames = os.listdir(advisoryDirectoryPath)
-        allAdvisories = filter(lambda filename: filename[-5:] == ".json",
-                               filenames)
-
-        print "allAdvisories = %s" % (pp.pformat(allAdvisories))
-
-        stormAdvisories = filter(lambda filename: filename[:2] == "AT",
-                                 allAdvisories)
-#         stormAdvisories = map(lambda filename: filename[:-5], stormAdvisories)
-        print "stormAdvisories = %s" % (pp.pformat(stormAdvisories))
-
-        return stormAdvisories
-
-    #  Loads a JSON storm record
-    def _loadAdvisory(self, advisoryName):
-        self._synchronizeAdvisories()
-        fileName = self._getAdvisoryFilename(advisoryName)
-
-        try:
-            pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC,
-                                             self.getSiteID(), fileName)
-
-            statFileName = os.path.join(os.environ["HOME"], "caveData", "etc",
-                                        "site", self.getSiteID(), fileName)
-            lastModified = os.stat(statFileName).st_mtime
-            pythonDict["lastModified"] = lastModified
-
-            print "File contents for %s:" % (fileName)
-            print pp.pformat(pythonDict)
-
-            return pythonDict
-
-        except Exception, e:
-            print "Load Exception for %s : %s" % (fileName, e)
-            return None
-
-
-    #  Saves a JSON storm record
-    def _saveAdvisory(self, advisoryName, advisoryDict):
-        self._synchronizeAdvisories()
-        fileName = self._getAdvisoryFilename(advisoryName)
-
-        print "Saving %s to %s" % (advisoryName, fileName)
-        print "advisoryDict: %s" % (pp.pformat(advisoryDict))
-
-        try:
-            JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC,
-                                   self.getSiteID(), fileName, advisoryDict)
-#             os.system('chmod 664 %s' % (fileName))
-        except Exception as e:
-            print "Save Exception for %s : %s" % (fileName, e)
-        else: # No exceptions occurred
-            print "Wrote file contents for: %s" % (fileName)
-
-            # Purposely allow this to throw
-            self._synchronizeAdvisories()
-
-    #  Helper method which identifies where the JSON records go, based on GFE
-    #  operating mode.  PRACTICE mode requires the files be placed in a
-    #  different location in the Localization store
-    def _getAdvisoryPath(self):
-        gfeMode = self.gfeOperatingMode()
-
-        if gfeMode == "PRACTICE":
-            return os.path.join("gfe", "tcvAdvisories", "practice")
-        else:
-            return os.path.join("gfe", "tcvAdvisories")
-
-    #  Helper method which constructs the absolute filename for a JSON record 
-    def _getAdvisoryFilename(self, advisoryName):
-        advisoryFilename = os.path.join(self._getAdvisoryPath(), advisoryName)
-
-        if not advisoryFilename.endswith(".json"):
-            advisoryFilename += ".json"
-        
-        return advisoryFilename
-
-    #  Helper method which coordinates the actual extraction of JSON records 
-    #  into our Python environment
-    def extractStormInfo(self):
-
-        #  Sync the CAVE localization store
-        self._synchronizeAdvisories()
-
-        #  Get the list of all available storm advisories
-        fileList = self._getStormAdvisoryNames()
-
-        #  Get the storm information from each advisory
-        stormList = []
-
-        for f in fileList:
-
-            #  Load this storm info
-            curStorm = self._loadAdvisory(f)
-
-            for key in curStorm:
-
-                #  Convert from unicode strings to a string Java will accept
-                if type(curStorm[key]) is unicode:
-                    curStorm[key] = str(curStorm[key])
-
-            #  Create a dictionary for this storm
-            stormList.append(curStorm)
-
-        return stormList
-
-
-    def determineStorm (self, stormList, bogusStormName):
-        # Decide if this is a new storm or if we need to pre-populate info from existing storm
-##        stormList = self.extractStormInfo()
-        stormNames = []
-        print "/"*100
-        print stormList
-        for sDict in stormList:
-            stormNames.append(sDict["stormName"])
-        stormNames.append("New")
-
-        # Make the variableList dynamically based on the storm info
-        variableList = []
-        variableList.append(("Choose Storm", bogusStormName, "radio",
-                             stormNames))
-
-        # Display the GUI
-        varDict = {}
-        processVarList = ProcessVariableList.ProcessVariableList(
-            "Choose Existing Storm or New Storm", variableList, varDict)
-        status = processVarList.status()
-
-        varDict = processVarList.varDict()
-
-        if status.upper() != "OK":
-            self.cancel()
-
-        # Make sure they only choose one storm
-        selectedName = varDict["Choose Storm"]
-
-        return selectedName
-
-
-#===============================================================================
-#  Miscellaneous helper methods
-#===============================================================================
-
-    # Extract just the wind hazards from the specified hazard grid.
-    def extractWindHazards(self, hazardGridList,
-                           windHazards=["TR.W", "TR.A", "HU.W", "HU.A"]):
-
-        #hazGrid, hazKeys = hazardGridList[hazWindIndex]
-        # Make new empty wind hazard grid
-        windHazGrid = self.empty(np.int8)
-        windKeys = [""]
-
-        # Find the hazardGrid that contains any windHazards.
-        # Reverse the list first so we search backwards
-        hazardGridList.reverse()
-        hazardGrid = None
-        for grid, keys in hazardGridList:
-            if hazardGrid is not None:
-                break
-            for key in keys:
-                for windHaz in windHazards:
-                    # If we find a windHazard, save that grid
-                    if key.find(windHaz):
-                        hazardGrid = (grid, keys)
-
-        # If we didn't find any wind hazards above, return the empty grid
-        if hazardGrid is None:
-            return (windHazGrid, windKeys)
-
-        # Extract just the wind hazards from the grid we found
-        hazGrid, hazKeys = hazardGrid
-        for hazKey in hazKeys:
-            phen = self.keyPhen(hazKey)
-            sig = self.keySig(hazKey)
-            phenSig = phen + "." + sig
-            if phenSig in windHazards:
-                hazIndex = self.getIndex(hazKey, hazKeys)
-                windIndex = self.getIndex(hazKey, windKeys)
-                windHazGrid[hazGrid == hazIndex] = windIndex
-
-        return (windHazGrid, windKeys)
-
-
-    # Merge the specified Discrete grid into the Hazard grid.
-    def mergeDiscreteGrid(self, mergeHazGrid, timeRange):
-
-        mergeGrid, mergeKeys = mergeHazGrid
-
-        for mergeKey in mergeKeys:
-
-            mergeIndex = self.getIndex(mergeKey, mergeKeys)
-            mask = mergeGrid == mergeIndex
-
-            self._hazUtils._addHazard("Hazards", timeRange, mergeKey, mask)
-
-        return
-
-
-    def variableExists(self, modelName, weName, weLevel):
-
-        # it turns out the the modelName will not match the dbID().model()
-        # directly, so it needs to be massaged.
-        modelPos = modelName.find("_D2D_")
-        if modelPos > -1:
-            modelName = modelName[modelPos+5:]
-
-        availParms = self.availableParms()
-        for pName, level, dbID in availParms:
-            if modelName in dbID.model():
-                if weName in pName and weLevel in level:
-                    return True
-
-        return False
-
-
-    def getAvgTopoGrid(self, topodb):
-
-        siteID = self.getSiteID()
-#         print "********************\n TOPO IS: ", topodb
-        dbName = siteID + "_D2D_" + topodb
-
-        weName = "avgTopo"
-        trList = self.GM_getWEInventory(weName, dbName)
-
-        #  Get the GFE topo
-        topoGrid = self.getGrids(dbName, weName, "SFC",
-                                 trList[0], mode="First")
-
-        #  Convert from meters to feet
-        topoGrid /= 0.3048
-
-        topoGrid[topoGrid < -16000] = -80.0
-        mask = topoGrid > 16000
-        topoGrid[mask] = self.getTopo()[mask]
-
-        return topoGrid
-
-
-    def removeEarlierTRs(self, weName):
-
-        #  Get an inventory of all the grids
-        trList = self.GM_getWEInventory(weName, self._mutableID)
-
-        #  Keep the latest grid
-        del trList[-1]
-
-        #  Remove all other grid we found
-        for tr in trList:
-            self.deleteCmd([weName], tr)
-
-        return
-
-
-    def getParmMinMaxLimits(self, modelName, weName):
-
-        #  Get the info for this parameter
-        parm = self.getParm(modelName, weName, "SFC")
-
-        #  Return the valid min and max values
-        return (parm.getGridInfo().getMinValue(),
-                parm.getGridInfo().getMaxValue())
-
-
-    #  Define a method to sort breakpoint record keys
-    def sortBreakpoints(self, a, b):
-
-        #  Make a list of valid string parts
-        validTypes = [
-            "LN",     #  mainland segments
-            "KEY",    #  Florida Keys
-            "ISL",    #  islands
-            "CUBA",   #  Cuba
-            "HISP",   #  Hispaniola
-            "NAI",    #  North Atlantic islands
-            "WTDE",   #  Deleware Bay
-            "WTTP",   #  Tidal Potomac
-            "WTCP",   #  Chesapeake Bay
-            "WTPT",   #  Generic water points
-            "GYC",    #  Guyana
-            "VEC",    #  Venezuela
-            "COC",    #  Colombia
-            "PAC",    #  Panama
-            "CRC",    #  Costa Rica
-            "NIC",    #  Nicaragua
-            "HNC",    #  Honduras
-            "GTC",    #  Guatemala
-            "BZC",    #  Belize
-            "MXC",    #  Mexico
-            "USC",    #  United States
-            "CNC",    #  Canada
-            "KEC",    #  Dry Tortugas
-            "AWC",    #  Aruba
-            "CWC",    #  Curacao
-            "TTC",    #  Trinidad and Tobago
-            "BBC",    #  Barbados
-            "LCC",    #  St. Lucia
-            "MQC",    #  France - Caribbean
-            "AGC",    #  Antigua and Barbuda
-            "BSC",    #  Bahamas
-            "BMC",    #  Bermuda
-            "JMC",    #  Jamaica
-            "KYC",    #  Cayman Islands
-            "CUC",    #  Cuba
-            "DOC",    #  Dominican Republic
-            "HTC",    #  Haiti
-            "PMC",    #  France - North Atlantic
-            "LOC",    #  Lake_Okeechobee
-            "FBC",    #  Florida Bay
-            "PSC",    #  Pamlico Sound
-            "ASC",    #  Albemarle Sound
-            "TXZ",    #  Texas
-            "LAZ",    #  Louisiana
-            "MSZ",    #  Mississippi
-            "ALZ",    #  Alabama
-            "FLZ",    #  Florida
-            "GAZ",    #  Georgia
-            "SCZ",    #  South Carolina
-            "NCZ",    #  North Carolina
-            "VAZ",    #  Virginia
-            "MDZ",    #  Maryland
-            "DCZ",    #  District of Columbia
-            "DEZ",    #  Deleware
-            "NJZ",    #  New Jersey
-            "NYZ",    #  New York
-            "CTZ",    #  Connecticut
-            "RIZ",    #  Rhode Island
-            "MAZ",    #  Massachusetts
-            "NHZ",    #  New Hampshire
-            "MEZ",    #  Maine
-            "NMZ",    #  New Mexico
-            "ARZ",    #  Arkansas
-            "OKZ",    #  Oklahoma
-            "MOZ",    #  Missouri
-            "TNZ",    #  Tennessee
-            "WVZ",    #  West Virginia
-            "PAZ",    #  Pennsylvania
-            "VTZ",    #  Vermont
-            "PRZ",    #  Puerto Rico
-            "VIZ",    #  U.S. Virgin Islands
-            "RE",     #  General edit area collection
-        ]
-
-#         print "a = '%s'    b = '%s'" % (a, b)
-
-        aSeg = a.split("_")[0]
-        bSeg = b.split("_")[0]
-
-        aSegType = ""
-        bSegType = ""
-        aSegNum = ""
-        bSegNum = ""
-
-        for c in aSeg:
-            if c in string.letters:
-                aSegType = aSegType + c
-
-        for c in bSeg:
-            if c in string.letters:
-                bSegType = bSegType + c
-
-        for c in aSeg:
-            if c in string.digits:
-                aSegNum = aSegNum + c
-        for c in bSeg:
-            if c in string.digits:
-                bSegNum = bSegNum + c
-
-        aTypeIndex = validTypes.index(aSegType)
-        bTypeIndex = validTypes.index(bSegType)
-
-        if aTypeIndex < bTypeIndex:
-            return -1
-        elif bTypeIndex < aTypeIndex:
-            return 1
-
-        if int(aSegNum) < int(bSegNum):
-            return -1
-        elif int(bSegNum) < int(aSegNum):
-            return 1
-        else:
-            print "ERROR!!!!!!! Segment names are equal!!!!!!!"
-            return 0
-
-
-#===============================================================================
-#  Hazard grid helper methods
-#===============================================================================
-
-    # Extracts the specified hazard from the hazardGrid. Returns a list of
-    # keys, mask pairs where each hazard exists.
-    def extractHazards(self, hazardGrid, hazard):
-        hazGrid, hazKeys = hazardGrid
-
-        keyMaskList = []
-        for hazIndex, hazKey in enumerate(hazKeys):
-            if hazard in hazKey:
-
-                #  See if this key covers any portion of the domain
-                mask = hazGrid == hazIndex
-                if not mask.any():
-                    continue
-
-                #  Pair this key with its mask
-                keyMaskList.append((hazKey, mask))
-
-        return keyMaskList
-
-
-    def purifyKey(self, hazKey, allowedKeys):
-
-        #  Get ready to process some subkeys
-        subKeyList = set()
-        subKeys = hazKey.split("^")
-
-        #  Process all the hazard subkeys
-        for subKey in subKeys:
-
-            #  Go over all the allowed Hazard keys
-            for allowedKey in allowedKeys:
-
-                #  If this is one of them
-                if allowedKey in subKey:
-
-                    #  Add it to the subkey list - if not already there
-                    if allowedKey not in subKeyList:
-                        subKeyList.add(subKey)
-
-        #  Return the final key
-        return "^".join(subKeyList)
-
-
-    def mergeCertainHazards(self, initalGrid, gridToMerge, hazTR,
-                            selectedHazards=["SS.W", "SS.A"]):
-
-        #  Use the Proposed grid is now the one to use for GFE hazards, for now
-        HazardUtils.ELEMENT = "ProposedSS"
-
-        #  Split the initial grid into its components
-        initialBytes, initialKeys = initalGrid
-
-        #  Look for all the hazards we wish to keep
-        for haz in selectedHazards:
-
-            #  Find all the areas in the domain where this hazard exists
-            keyMaskList = self.extractHazards(gridToMerge, haz)
-
-            #  Process all areas identified to have current tropical hazard
-            for hazKey, hazMask in keyMaskList:
-
-                #  Filter out the hazards we do not want
-                pureHazKey = self.purifyKey(hazKey, selectedHazards)
-
-                #  If there is nothing left to do, move on to next hazard
-                if pureHazKey == "":
-                    continue
-
-                #  Merge these hazards into the initial grid
-                hazIndex = self.getIndex(pureHazKey, initialKeys)
-                self._hazUtils._addHazard("ProposedSS", hazTR, pureHazKey,
-                                          hazMask, combine=1)
-
-        #  Make sure the Hazards grid is now the one to use for GFE hazards
-        HazardUtils.ELEMENT = "Hazards"
-
-        #  Return the merged grid
-        return (initialBytes, initialKeys)
-
-
-#===============================================================================
-#  Generic method to display product text via a GFE procedure/smartTool
-#===============================================================================
-
-    def displayProduct(self, product):
-        """
-        Displays the product text. Returns true if forecaster clicked OK
-        """
-        from com.raytheon.viz.gfe.ui.runtimeui import ValuesDialog
-        varList = []
-        varList.append(("Product Text:", "", "label"))
-        varList.append((product, "", "label"))
-        varList.append(("Click OK to transmit the product", "", "label"))
-        widgetList = self.getVariableListInputs(varList)
-        dialog = ValuesDialog.openDialog("Text Product", widgetList, None)
-        return dialog.getReturnCode() == 0 # 0 is OK, 1 is CANCEL
+# ------------------------------------------------------------------------------

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+# TropicalUtility - Version 3.0

+#

+# Authors:  Matthew H. Belk (BOX), Shannon White (AWIPS), Pablo Santos (MFL),

+# Tom LeFebvre (GSD)

+#

+# Created:  03/03/2012        Last Modified:  04/26/2016

+#

+#  04/26/2016 - Modified to add the displayProduct method supplied by Ron

+#               Anderson (Raytheon)

+# ------------------------------------------------------------------------------

+#

+# SOFTWARE HISTORY

+#

+# Date         Ticket#    Engineer    Description

+# ------------ ---------- ----------- ------------------------------------------

+# Mar 03, 2012                        Initial creation

+# Apr 26, 2016            mbelk       Modified to add the displayProduct method

+#                                     supplied by Ron Anderson (Raytheon)

+# Sep 19, 2016 19293      randerso    Initial baseline check in

+# Feb 21, 2017 29544      randerso    Fix possible RuntimeError caused by

+#                                     discarding from set while looping over it

+#

+################################################################################

+

+##

+# This is an absolute override file, indicating that a higher priority version

+# of the file will completely replace a lower priority version of the file.

+##

+

+import collections

+import errno

+import os

+import re

+import string

+

+from awips.dataaccess import DataAccessLayer

+

+import GridManipulation

+import HazardUtils

+import JsonSupport

+import LocalizationSupport

+import LogStream

+import ProcessVariableList

+import TimeRange

+import numpy as np

+import pprint as pp

+

+

+class TropicalUtility(GridManipulation.GridManipulation):

+

+    def __init__(self, dbss):

+        GridManipulation.GridManipulation.__init__(self, dbss)

+        self._dbss = dbss

+

+        #  Make an instance of the HazardUtils

+        self._hazUtils = HazardUtils.HazardUtils(dbss, None)

+

+        #  Define a base for the ETN issued by a national center

+        self._natlBaseETN = 1000    #  Not used in current tools/procedures

+

+        #  Get the current mutable database ID

+        self._mutableID = self.mutableID()

+

+        #  Make lists of all WFOs we might want to send a message to from NHC.

+        #  The offices are split into coastal offices which deal with storm

+        #  surge, and inland offices which only deal with wind hazards from NHC

+        self._surgeWfos = ["CAR", "GYX", "BOX", "OKX", "PHI", "LWX", "AKQ",

+                           "MHX", "ILM", "CHS", "JAX", "MLB", "MFL", "KEY",

+                           "TBW", "TAE", "MOB", "LIX", "LCH", "HGX", "CRP",

+                           "BRO"]

+        self._windWfos = ["ALY", "MRX", "FFC", "OHX", "HUN", "BMX", "MEG",

+                          "JAN", "LZK", "SHV", "TSA", "FWD", "OUN", "SJT",

+                          "EWX", "MAF"]

+

+        #  Toggle for debugging output

+        self._debug = False                 #  True = On / False = Off

+

+        #  Define test mode for procedure which communicate with WFOs

+##        self._testMode = True    # if True, the command is only printed (test)

+        self._testMode = False     # if False, messages get sent to WFOs (live)

+

+

+    #===========================================================================

+    #  Utility methods to create common dialog buttons and actions

+

+    ### Makes the Run button

+    def makeRunButton(self, buttonFrame):

+        Tkinter.Button(buttonFrame, text="Run", width=10,

+                       command=self.runCommand,

+                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,

+                                                  padx=50, fill=Tkinter.X)

+

+

+    ### Makes the Run/Dismiss button

+    def makeRunDismissButton(self, buttonFrame):

+        Tkinter.Button(buttonFrame, text="Run/Dismiss", width=10,

+                       command=self.runDismissCommand,

+                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,

+                                                  padx=50, fill=Tkinter.X)

+

+

+    ### Makes the Cancel button

+    def makeCancelButton(self, buttonFrame):

+        Tkinter.Button(buttonFrame, text="Cancel", width=10,

+                       command=self.cancelCommand,

+                       state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5,

+                                                  padx=50, fill=Tkinter.X)

+

+

+    ### Action when "Run" button is clicked

+    def runCommand(self):

+        LogStream.logUse("Run")

+        self.makeHazardGrid()

+        return

+

+

+    ### Action when "Run/Dismiss" button is clicked

+    def runDismissCommand(self):

+        LogStream.logUse("Run/Dismiss")

+        if self.makeHazardGrid() == 1:

+            self.cancelIt()

+

+

+    ### Action when "Cancel" button is clicked

+    def cancelCommand(self):

+        # unregister the maps

+        LogStream.logUse("Cancel")

+        self.cancelIt()

+

+

+    ### Actual steps required to cancel/exit

+    def cancelIt(self):

+        # unregister the maps

+        for key in self._registeredMaps:

+            self._mapManager.unregisterMapSet(self._registeredMaps[key].mapId())

+        self.__master.destroy()

+

+

+    def getSubKeys(self, key):

+        parts = key.split("^")

+        if "" in parts:

+            parts = parts.remove("")

+        return parts

+

+

+    #===========================================================================

+    #  Utility methods to manipulate Hazard grids

+

+    # Returns the phen for specified hazard key.

+    # If not a VTEC hazard, returns ""

+    def keyPhen(self, key):

+        pos = key.find(".")

+        if pos == -1:   # not found

+            return ""

+

+        return key[0:pos]

+

+

+    # Parses specified key and returns the sig field.

+    def keySig(self, key):

+        pos = key.find(".")

+        if pos == -1:   # not found

+            return ""

+

+        return key[pos + 1]

+

+

+    # Parse the specified key and return the ETN. If none found,

+    # return an empty string ("")

+    def getETN(self, key):

+

+        subKeys = key.split("^")

+        subKey = subKeys[0]

+        parts = subKey.split(":")

+        if len(parts) < 2:

+            return ""

+        else:

+            return parts[1]

+

+

+    # Checks the specified hazard and proposed keys over the selectedMask

+    # for any conflicting hazards. If found, returns True, otherwise

+    # return False.

+    def anyHazardConflicts(self, hazard, proposed, selectedMask):

+

+        # Make the list of tropical hazards

+        tropicalHazList = ["TR.W", "TR.A", "HU.W", "HU.A", "SS.A", "SS.W"]

+

+        hazGrid, hazKeys = hazard

+        propGrid, propKeys = proposed

+

+        # Make the list of hazard subKeys found in the hazard grid over the

+        # selectedMask

+        hazList = []

+        for hazKey in hazKeys:

+            if hazKey == "":      #  Ignore the  key

+                continue

+            

+            #  Identify the area where this hazard exists

+            hazIndex = self.getIndex(hazKey, hazKeys)

+            mask = hazGrid == hazIndex

+

+            #  Check for overlapping points 

+            overlap = mask & selectedMask

+            

+            #  If there is any overlap

+            if overlap.any():

+                

+                # These keys can have subKeys so separate those, too

+                subKeyList = self.getSubKeys(hazKey)

+                for subKey in subKeyList:

+                    if subKey not in hazList:

+                        hazList.append(subKey)

+

+        #  Look over the proposed hazards keys

+        for propKey in propKeys:

+            if propKey == "":     #  Ignore the  key

+                continue

+

+            # Check for overlapping points

+            propIndex = self.getIndex(propKey, propKeys)

+            propMask = propGrid == propIndex

+            overlap = propMask & selectedMask

+

+            if not overlap.any():  # no points in selectedMask

+                continue

+

+            # Parse the phen, sig, and ETN

+            propPhen = self.keyPhen(propKey)

+            propSig = self.keySig(propKey)

+            propETN = self.getETN(propKey)

+

+            for hazKey in hazList:

+                # See if this hazard overlaps with the current proposed hazard

+                hazIndex = self.getIndex(hazKey, hazKeys)

+                hazMask = hazGrid == hazIndex

+                hazOverlap = hazMask & propMask

+                if not hazOverlap.any():

+                    continue

+

+                # Parse the hazKey

+                hazETN = self.getETN(hazKey)

+                hazPhen = self.keyPhen(hazKey)

+                hazSig = self.keySig(hazKey)

+

+                # reconstruct the phen and sig

+                hazPhenSig = hazPhen + "." + hazSig

+                propPhenSig = propPhen + "." + propSig

+

+                # If the hazard keys are both tropical one the ETNs must match

+                if hazPhenSig in tropicalHazList and \

+                   propPhenSig in tropicalHazList:

+                    if hazETN != propETN:

+                        return True

+

+                # Otherwise if the phenSigs match, the ETNs must match

+                if hazPhenSig == propPhenSig:

+                    if propETN != hazETN:

+                        return True

+        return False

+

+

+    # Check for hazard conflicts on a point by point basis.  Uses the method

+    # anyHazadConflicts to do the logic for checking hazard phen, sig and ETN.

+    def anyHazardConflictsByPoint(self, hazardGrid, proposedGrid, selectedMask):

+        print("Inside MergeTool.")

+        # Make the list of tropical hazards

+        tropicalHazList = ["TR.W", "TR.A", "HU.W", "HU.A", "SS.A", "SS.W"]

+

+        hazGrid, hazKeys = hazardGrid

+        propGrid, propKeys = proposedGrid

+

+        # Make the list of hazards found in the hazard grid over the

+        # selectedMask

+        hazList = []

+        for hazKey in hazKeys:

+            if hazKey == "":

+                continue

+

+            hazIndex = self.getIndex(hazKey, hazKeys)

+            hazMask = hazGrid == hazIndex

+

+            # Now check propKeys

+            for propKey in propKeys:

+                if propKey == "":

+                    continue

+                propIndex = self.getIndex(propKey, propKeys)

+                propMask = propGrid == propIndex

+                overlap = hazMask & propMask & selectedMask

+                if overlap.any():

+                    if self.anyHazardConflicts(hazardGrid, proposedGrid, overlap):

+                        start = int(self._gmtime().unixTime() / 3600) * 3600

+                        end = start + 3600

+                        timeRange = self.GM_makeTimeRange(start, end)

+                        return True

+        return False

+

+    #  Calculates a difference grid (added versus removed)

+    #  Calculates a difference grid (added versus removed)

+    def calcDiffGrid(self, initialGrid, proposedGrid, diffName, timeRange, 

+                     isWFO=False):

+

+        #  If this is a WFO

+        if isWFO:

+            print("Computing a diff grid for WFO")

+            

+            #  Filter the Hazards to only keep the Storm Surge hazards

+            ssKeys = ["SS.W", "SS.A"]

+        

+            initialGrid = self.filterHazardGrid(initialGrid, ssKeys)

+            proposedGrid = self.filterHazardGrid(proposedGrid, ssKeys)

+        

+        #  Split these grids into their components

+        initGrid, initKeys = initialGrid

+        propGrid, propKeys = proposedGrid

+

+        #  Identify where there are no hazards in both grids

+        initNone = self.getIndex("", initKeys)

+        propNone = self.getIndex("", propKeys)

+

+        #  Mask of these areas

+        initNoneMask = (initGrid == initNone)

+        propNoneMask = (propGrid == propNone)

+

+        #  Make an empty grid to hold difference indicator

+        diffGrid = np.zeros(self.getGridShape(), np.float32)

+

+        # Calculate hazards that were removed

+        diffGrid[propNoneMask & ~initNoneMask] = -1

+

+        # Calculate hazards that were added

+        diffGrid[~propNoneMask & initNoneMask] = 1

+        

+        # Find areas that had some hazard and it changed to another hazard

+        for initKey in initKeys:

+            for propKey in propKeys:

+                if initKey == "" or propKey == "":   # ignore any  cases

+                    continue

+                if initKey == propKey: # ignore cases where the keys are the same

+                    continue

+                

+                # Now we know the keys are different and neither is 

+                initIndex = self.getIndex(initKey, initKeys)

+                propIndex = self.getIndex(propKey, propKeys)

+

+                initMask = (initGrid == initIndex)

+                propMask = (propGrid == propIndex)

+                

+                # The intersection is where they changed

+                diffGrid[initMask & propMask] = 2

+

+        #  Add this temporary grid to the grid manager so it can be seen

+        self.createGrid("Fcst", diffName, "SCALAR", diffGrid, timeRange,

+                descriptiveName="Diff Between NHC and WFO",

+                precision=0, minAllowedValue=-1.0, maxAllowedValue=2.0)

+        

+        return 

+    

+

+    def filterHazardGrid(self, hazardGrid, filterKeys):

+        

+        filteredGrid = self.empty(np.int8)

+        filteredKeys = [""]

+        hazGrid, hazKeys = hazardGrid

+        

+        # Find the hazard keys that contain any filter key

+        for filterKey in filterKeys:

+            for hazKey in hazKeys:

+                

+                if filterKey not in hazKey:

+                    continue

+                hazIndex = self.getIndex(hazKey, hazKeys)

+                mask = (hazGrid == hazIndex)  # get the points that are set to this mask

+                # Cleanse the hazKey of all keys except filterKeys

+                newKey = ""

+                splitKeys = hazKey.split("^")

+                for splitKey in splitKeys:

+                    phenSig = splitKey.split(":")[0]

+                    if phenSig not in filterKeys:

+                        continue

+                    

+                    newKey = newKey + splitKey + "^"

+                    

+                if newKey[-1] == "^":

+                    newKey = newKey[0:-1]   # trim the trailing "^"

+                    

+                    

+                newIndex = self.getIndex(newKey, filteredKeys)

+               

+                filteredGrid[mask] = newIndex

+            

+        return filteredGrid, filteredKeys

+    #  Calculates a difference grid (added versus removed) for WFOs

+

+ #==============================================================================

+ #  Methods for sending messages to WFOs

+ #==============================================================================

+

+    #  Define method to send a message to WFOs

+    def sendMessageToWfos(self, wfos, message, testMode=True):

+        SendMessageResult = collections.namedtuple('SendMessageResult',

+                                                   ('success', 'wfo', 'output'))

+

+        if len(wfos) == 0:

+            msg = "sendMessageToWfos called with empty WFO list, nothing to do."

+            self.statusBarMsg(msg, 'A')

+            return

+

+        #  Look at each WFO which needs a message

+        results = []

+        for wfo in wfos:

+

+            #  Start constructing the final message

+            final_message = "{} - {} have been sent by NHC.".format(wfo.strip(),

+                                                                message.strip())

+

+            #  If the ProposedSS grids are mentioned, send one message

+            if "ProposedSS" in message:

+                final_message += " Please join the stormsurgecollaboration"

+                final_message += " chat room in NWSChat. You should run "

+                final_message += "the Populate -> CopyNHCProposed procedure "

+                final_message += "now, and start the collaboration process."

+            

+            #  Otherwise, let the WFO's know we're finished with this round 

+            else:

+                final_message += " The collaboration process is now done. "

+                final_message += "You should run the Populate -> "

+                final_message += "MergeProposedSS procedure now, and finish "

+                final_message += "preparing the grids for the WFO TCV."

+

+            #  If we are in test mode, just display the command which

+            #  would be executed

+            if testMode:

+                msg = "Test message to WFO {}: '{}'".format(wfo, final_message)

+                LogStream.logDebug(msg)

+

+                result = ""          #  Simulate a successful transfer

+

+            #  Otherwise, actually send this message

+            else:

+                msg = "Live message to WFO {}: '{}'".format(wfo, final_message)

+                LogStream.logDebug(msg)

+

+                result = self.sendWFOMessage(wfo, final_message)

+

+            #  Keep track of which offices successfully got the message

+            results.append(SendMessageResult(result == "", wfo, result))

+

+

+        #  Comparison function to sort results, by status first, then by WFO

+        def compare(x,y):

+            result = cmp(x.success, y.success)

+

+            if result == 0:

+                result = cmp(x.wfo, y.wfo)

+

+            return result

+

+#         print "*"*80

+#         print results

+

+        total_count = 0

+        fail_count = 0

+        details = ""

+        

+        #  Construct a final status message of the message send status

+        for result in sorted(results, cmp=compare):

+            total_count += 1

+            if result.success:

+                details += "\nMessage successfully sent to site {}.".format(result.wfo)

+            else:

+                fail_count += 1

+                details += "\nCould not send message to site {}. Command output:\n{}".format(result.wfo, result.output)

+

+        if fail_count:

+            msg = "{} of {} server(s) failed to receive WFO message. Site-by-site detail: \n{}".format(fail_count, total_count, details)

+            self.statusBarMsg(msg, 'A')

+        else:

+            msg = "WFO message sent to all {} sites successfully. Site-by-site detail: \n{}".format(total_count, details)

+            self.statusBarMsg(msg, 'R')

+

+

+    #  Define method to determine WFOs which should get a message from NHC

+    def getWfosAttention(self, WEname, anyChanges=None, percentThresh=3):

+        #  anyChanges is a mask, where True means a change in hazards happened

+

+        #  Make a list of WFOs NHC might communicate with

+        searchWfos = set(self._surgeWfos + self._windWfos)

+

+        #  Make sets to track WFOs with only surge hazards, those with only

+        #  wind hazards, and those with both

+        surgeWfos = set()

+        windWfos = set()

+        bothWfos = set()

+

+        #  Make a dictionary of masks for all of these offices

+        officeMasks = {}

+        for wfo in searchWfos:

+            try:

+                officeMasks[wfo] = self.encodeEditArea("ISC_%s" % (wfo.upper()))

+                

+                #  If we are looking for any changes to the underlying field

+                if anyChanges is not None:

+                    

+                    #  See if there are any changes in hazards for this WFO

+                    overlay = (anyChanges & officeMasks[wfo])

+                

+                    if overlay.any():

+                        msg = "Adding to surge - " + wfo + " for changes"

+                        self.statusBarMsg(msg, 'R')

+                        surgeWfos.add(wfo)

+            except:

+                msg = "No edit area found. Removing " + wfo + \

+                      " from further processing."

+                self.statusBarMsg(msg, 'U')

+

+        #  Get the Hazards grid

+        hazardGridList = self.getGrids(self._mutableID, WEname, "SFC",

+                                       TimeRange.allTimes(), mode="List", 

+                                       noDataError=0)

+

+#         print "hazardGridList =", hazardGridList

+

+        #  If there are no hazard grids

+        if hazardGridList is None:

+            hazardGridList = []

+

+        #  Look at each WFO which needs a message

+        for (hazardBytes, hazardKeys) in hazardGridList:

+

+#             print "Starting to examine hazards"

+

+            #  Look at each hazard key in this grid - except the first, 

+            for (index, key) in enumerate(hazardKeys):

+

+                #  Ignore the  and  keys

+                if key in ["", ""]:

+                    continue   #  do not bother looking further

+

+#                 print "\n\nLooking at ", index, key

+

+                #  Check this key for either storm surge (SS), or wind (HU, TR)

+                #  hazards

+                if re.search("(SS|HU|TR).[AW]", key) is not None:

+

+#                     print "found a tropical hazard"

+                    hazardType = "both"         #  assume both hazards are here

+

+                    #-----------------------------------------------------------

+                    #  See if which type of hazard this is

+

+                    #  Wind hazard, no surge hazard

+                    if re.search("(HU|TR).[AW]", key) is not None and \

+                       re.search("SS.[AW]", key) is None:

+

+                        hazardType = "wind-only"

+

+                    #  Surge hazard, no wind hazard

+                    elif re.search("SS.[AW]", key) is not None and \

+                         re.search("(HU|TR).[AW]", key) is None:

+

+                        hazardType = "surge-only"

+

+                    #  See where this hazard is on the grid

+                    hazardMask = hazardBytes == index

+

+                    #  Now determine which offices we need to notify

+                    for wfo, wfoMask in list(officeMasks.items()):

+

+                        #  See if this office has a current hazard

+                        overlay = (officeMasks[wfo] & hazardMask)

+

+                        #  If there are any points which overlap

+                        if overlay.any():

+

+#                            print "Getting zones for '%s'" % (wfo)

+

+                            #  We need to look at all the zones associated

+                            #  with this WFO, get them

+                            zoneList = self.findWfoZones(wfo)

+                            if len(zoneList) == 0:

+                                msg = "\tCould not get zones for " + wfo

+                                LogStream.logProblem(msg)

+                                continue

+

+                            #  Now, process each zone

+                            for zone in zoneList:

+

+#                                print zone,

+

+                                #  Get the mask for this zone

+                                try:

+                                    zoneMask = self.encodeEditArea(zone)

+                                except errno:

+                                    msg = "\tCould not get zone mask for " + wfo

+                                    LogStream.logProblem(msg, LogStream.exc())

+                                    continue

+

+#                                #  If we did not get this mask - move on

+#                                if zoneMask is None:

+#                                    continue

+

+                                #  See if there is an overlap with current

+                                #  hazard type

+                                zoneOverlap = zoneMask & hazardMask

+

+    #=======================================================================

+    #  This code kept in case we need to enforce the 3% area of a zone

+    #  requirement in the future. This would mimic the process of the text

+    #  formatters.

+                                #  Count all the points of the masks

+#                                 countOverlap = np.count_nonzero(zoneOverlap)

+#                                 countMask = np.count_nonzero(zoneMask)

+#

+#                                 #  See if there are enough points to justify

+#                                 #  keeping this zone in the list

+#                                 zonePercent = (

+#                                     float(countOverlap) / float(countMask)

+#                                 )

+

+#                                 print "overlap = %d\tmask = %d\tpercent =%.2f" % \

+#                                       (countOverlap, countMask, zonePercent)

+#

+                                #  If the percentage is high enough

+#                                 if int((zonePercent*100.0) + 0.5) >= percentThresh:

+    #

+    #=======================================================================

+

+                                #  For now, notify any zone which has a

+                                #  possibility for a storm surge hazard

+                                if zoneOverlap.any():

+

+                                    #  We need to notify this WFO

+                                    if hazardType == "wind-only":

+                                        msg = "Adding to wind - " + wfo

+                                        windWfos.add(wfo)

+                                    elif hazardType == "surge-only":

+                                        msg = "Adding to surge - " + wfo

+                                        surgeWfos.add(wfo)

+                                    else:

+                                        msg = "Adding to both - " + wfo

+                                        bothWfos.add(wfo)

+

+                                    self.statusBarMsg(msg, 'R')

+                                    print(msg)

+

+                                    #  No point in looking at further zones

+                                    break

+

+        #=======================================================================

+        #  Now ensure we do not duplicate WFOs with both hazards in the

+        #  individual hazard sets.  Use this code when we are no longer using

+        #  the text TCV to notify WFOs of tropical wind hazards.

+

+#         for wfo in bothWfos:

+#             if wfo in windWfos:

+#                 windWfos.discard(wfo)

+#             if wfo in surgeWfos:

+#                 surgeWfos.discard(wfo)

+

+        #=======================================================================

+        #  Now ensure we do not duplicate WFOs with both hazards in the

+        #  individual hazard sets - this is for the 2016 season

+

+        for wfo in bothWfos:

+            surgeWfos.add(wfo)

+

+        #  Reset the sets for "both" and "wind-only" WFOs

+        bothWfos = set()

+        windWfos = set()

+

+        #  Return the completed WFO notification list

+        return (list(bothWfos), list(windWfos), list(surgeWfos))

+

+

+    #  Define a method to find zones associated with a WFO

+    def findWfoZones(self, wfo):

+

+        #  Construct the SQL to get these attributes from the maps database

+        reqParms = {'datatype' : 'maps',

+                    'table' : 'mapdata.zone',

+                    'locationField' : 'cwa',

+                    'geomField' : 'the_geom',

+                    'locationNames' : [wfo.strip()],

+                    'parameters' : ['state', 'zone'],

+                    }

+

+        #  Create the Data Access request

+        req = DataAccessLayer.newDataRequest(**reqParms)

+

+        #  Process the response

+        result = DataAccessLayer.getGeometryData(req)

+

+        #  Check if we got a response

+        if not result:

+            # TODO need better error message

+            # What should be done in this case?

+            print("What!??!")

+

+        #  Get ready to track matching zones

+        zoneSet = set()

+

+        #  Process the response contents

+        for record in result:

+

+            #  Retrieve state and zone

+            state = record.getString('state')

+            zone = record.getString('zone')

+

+#             print "*"*(80)

+#             print "state = {}   zone = {}".format(state, zone)

+

+            #  If this is ALY - only keep CT and MA zones

+            if wfo == "ALY" and state not in ["MA", "CT"]:

+                continue

+

+            #  Construct a UGC code and store it for later

+            zoneSet.add(state + "Z" + zone)

+

+#         print "zoneSet =", repr(zoneSet)

+

+        #  Return the completed zone set

+        return zoneSet

+

+

+    #  Define a method to find zones associated with a WFO

+    def notifyWFOs(self, field, anyChanges=None, testMode=None):

+        #  anyChanges is a mask, where True means a change in hazards happened

+

+        #  Ensure the test mode status is set - one way or the other

+        if testMode is None:

+            testMode = self._testMode

+

+#         #  Get the status of each WFO's communications

+#         wfoStatus = self.getWfoStatus()

+

+        #  See which WFOs we need to notify

+        (bothWfos, windWfos, surgeWfos) = self.getWfosAttention(field, 

+                                                                anyChanges)

+

+        #  Send a message to each office

+#         message = "%s grids containing tropical, wind and storm surge hazards"%\

+#                   (field)

+#         self.sendMessageToWfos(bothWfos, message, self._testMode)

+

+#         message = "%s grids containing tropical, wind hazards" % (field)

+#         self.sendMessageToWfos(windWfos, message, self._testMode)

+

+        message = "%s grids containing tropical, storm surge hazards" % (field)

+        self.sendMessageToWfos(surgeWfos, message, testMode)

+

+

+#===============================================================================

+#  Code to process StormInfo files -

+#===============================================================================

+

+    def _synchronizeAdvisories(self):

+        # Retrieving a directory causes synching to occur.

+        # This code can throw an exception but don't catch it

+        # so that forecasters can be made aware of the issue.

+        file = LocalizationSupport.getLocalizationFile(

+                                    LocalizationSupport.CAVE_STATIC,

+                                    LocalizationSupport.SITE, self.getSiteID(),

+                                    self._getAdvisoryPath()).getFile()

+        return file

+

+    #  Constructs the absolute path to the JSON files for this site

+    def _getLocalAdvisoryDirectoryPath(self):

+        file = self._synchronizeAdvisories()

+        path = file.getPath()

+

+        try:

+             os.makedirs(path)

+        except OSError as exception:

+            if exception.errno != errno.EEXIST:

+                raise

+

+        return path

+

+    #  Retrieves the names of the active storm JSON files for further processing  

+    def _getStormAdvisoryNames(self):

+        advisoryDirectoryPath = self._getLocalAdvisoryDirectoryPath()

+        filenames = os.listdir(advisoryDirectoryPath)

+        allAdvisories = [filename for filename in filenames if filename[-5:] == ".json"]

+

+        print("allAdvisories = %s" % (pp.pformat(allAdvisories)))

+

+        stormAdvisories = [filename for filename in allAdvisories if filename[:2] == "AT"]

+#         stormAdvisories = map(lambda filename: filename[:-5], stormAdvisories)

+        print("stormAdvisories = %s" % (pp.pformat(stormAdvisories)))

+

+        return stormAdvisories

+

+    #  Loads a JSON storm record

+    def _loadAdvisory(self, advisoryName):

+        self._synchronizeAdvisories()

+        fileName = self._getAdvisoryFilename(advisoryName)

+

+        try:

+            pythonDict = JsonSupport.loadFromJson(LocalizationSupport.CAVE_STATIC,

+                                             self.getSiteID(), fileName)

+

+            statFileName = os.path.join(os.environ["HOME"], "caveData", "etc",

+                                        "site", self.getSiteID(), fileName)

+            lastModified = os.stat(statFileName).st_mtime

+            pythonDict["lastModified"] = lastModified

+

+            print("File contents for %s:" % (fileName))

+            print(pp.pformat(pythonDict))

+

+            return pythonDict

+

+        except Exception as e:

+            print("Load Exception for %s : %s" % (fileName, e))

+            return None

+

+

+    #  Saves a JSON storm record

+    def _saveAdvisory(self, advisoryName, advisoryDict):

+        self._synchronizeAdvisories()

+        fileName = self._getAdvisoryFilename(advisoryName)

+

+        print("Saving %s to %s" % (advisoryName, fileName))

+        print("advisoryDict: %s" % (pp.pformat(advisoryDict)))

+

+        try:

+            JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC,

+                                   self.getSiteID(), fileName, advisoryDict)

+#             os.system('chmod 664 %s' % (fileName))

+        except Exception as e:

+            print("Save Exception for %s : %s" % (fileName, e))

+        else: # No exceptions occurred

+            print("Wrote file contents for: %s" % (fileName))

+

+            # Purposely allow this to throw

+            self._synchronizeAdvisories()

+

+    #  Helper method which identifies where the JSON records go, based on GFE

+    #  operating mode.  PRACTICE mode requires the files be placed in a

+    #  different location in the Localization store

+    def _getAdvisoryPath(self):

+        gfeMode = self.gfeOperatingMode()

+

+        if gfeMode == "PRACTICE":

+            return os.path.join("gfe", "tcvAdvisories", "practice")

+        else:

+            return os.path.join("gfe", "tcvAdvisories")

+

+    #  Helper method which constructs the absolute filename for a JSON record 

+    def _getAdvisoryFilename(self, advisoryName):

+        advisoryFilename = os.path.join(self._getAdvisoryPath(), advisoryName)

+

+        if not advisoryFilename.endswith(".json"):

+            advisoryFilename += ".json"

+        

+        return advisoryFilename

+

+    #  Helper method which coordinates the actual extraction of JSON records 

+    #  into our Python environment

+    def extractStormInfo(self):

+

+        #  Sync the CAVE localization store

+        self._synchronizeAdvisories()

+

+        #  Get the list of all available storm advisories

+        fileList = self._getStormAdvisoryNames()

+

+        #  Get the storm information from each advisory

+        stormList = []

+

+        for f in fileList:

+

+            #  Load this storm info

+            curStorm = self._loadAdvisory(f)

+

+            for key in curStorm:

+

+                #  Convert from unicode strings to a string Java will accept

+                if type(curStorm[key]) is str:

+                    curStorm[key] = str(curStorm[key])

+

+            #  Create a dictionary for this storm

+            stormList.append(curStorm)

+

+        return stormList

+

+

+    def determineStorm (self, stormList, bogusStormName):

+        # Decide if this is a new storm or if we need to pre-populate info from existing storm

+##        stormList = self.extractStormInfo()

+        stormNames = []

+        print("/"*100)

+        print(stormList)

+        for sDict in stormList:

+            stormNames.append(sDict["stormName"])

+        stormNames.append("New")

+

+        # Make the variableList dynamically based on the storm info

+        variableList = []

+        variableList.append(("Choose Storm", bogusStormName, "radio",

+                             stormNames))

+

+        # Display the GUI

+        varDict = {}

+        processVarList = ProcessVariableList.ProcessVariableList(

+            "Choose Existing Storm or New Storm", variableList, varDict)

+        status = processVarList.status()

+

+        varDict = processVarList.varDict()

+

+        if status.upper() != "OK":

+            self.cancel()

+

+        # Make sure they only choose one storm

+        selectedName = varDict["Choose Storm"]

+

+        return selectedName

+

+

+#===============================================================================

+#  Miscellaneous helper methods

+#===============================================================================

+

+    # Extract just the wind hazards from the specified hazard grid.

+    def extractWindHazards(self, hazardGridList,

+                           windHazards=["TR.W", "TR.A", "HU.W", "HU.A"]):

+

+        #hazGrid, hazKeys = hazardGridList[hazWindIndex]

+        # Make new empty wind hazard grid

+        windHazGrid = self.empty(np.int8)

+        windKeys = [""]

+

+        # Find the hazardGrid that contains any windHazards.

+        # Reverse the list first so we search backwards

+        hazardGridList.reverse()

+        hazardGrid = None

+        for grid, keys in hazardGridList:

+            if hazardGrid is not None:

+                break

+            for key in keys:

+                for windHaz in windHazards:

+                    # If we find a windHazard, save that grid

+                    if key.find(windHaz):

+                        hazardGrid = (grid, keys)

+

+        # If we didn't find any wind hazards above, return the empty grid

+        if hazardGrid is None:

+            return (windHazGrid, windKeys)

+

+        # Extract just the wind hazards from the grid we found

+        hazGrid, hazKeys = hazardGrid

+        for hazKey in hazKeys:

+            phen = self.keyPhen(hazKey)

+            sig = self.keySig(hazKey)

+            phenSig = phen + "." + sig

+            if phenSig in windHazards:

+                hazIndex = self.getIndex(hazKey, hazKeys)

+                windIndex = self.getIndex(hazKey, windKeys)

+                windHazGrid[hazGrid == hazIndex] = windIndex

+

+        return (windHazGrid, windKeys)

+

+

+    # Merge the specified Discrete grid into the Hazard grid.

+    def mergeDiscreteGrid(self, mergeHazGrid, timeRange):

+

+        mergeGrid, mergeKeys = mergeHazGrid

+

+        for mergeKey in mergeKeys:

+

+            mergeIndex = self.getIndex(mergeKey, mergeKeys)

+            mask = mergeGrid == mergeIndex

+

+            self._hazUtils._addHazard("Hazards", timeRange, mergeKey, mask)

+

+        return

+

+

+    def variableExists(self, modelName, weName, weLevel):

+

+        # it turns out the the modelName will not match the dbID().model()

+        # directly, so it needs to be massaged.

+        modelPos = modelName.find("_D2D_")

+        if modelPos > -1:

+            modelName = modelName[modelPos+5:]

+

+        availParms = self.availableParms()

+        for pName, level, dbID in availParms:

+            if modelName in dbID.model():

+                if weName in pName and weLevel in level:

+                    return True

+

+        return False

+

+

+    def getAvgTopoGrid(self, topodb):

+

+        siteID = self.getSiteID()

+#         print "********************\n TOPO IS: ", topodb

+        dbName = siteID + "_D2D_" + topodb

+

+        weName = "avgTopo"

+        trList = self.GM_getWEInventory(weName, dbName)

+

+        #  Get the GFE topo

+        topoGrid = self.getGrids(dbName, weName, "SFC",

+                                 trList[0], mode="First")

+

+        #  Convert from meters to feet

+        topoGrid /= 0.3048

+

+        topoGrid[topoGrid < -16000] = -80.0

+        mask = topoGrid > 16000

+        topoGrid[mask] = self.getTopo()[mask]

+

+        return topoGrid

+

+

+    def removeEarlierTRs(self, weName):

+

+        #  Get an inventory of all the grids

+        trList = self.GM_getWEInventory(weName, self._mutableID)

+

+        #  Keep the latest grid

+        del trList[-1]

+

+        #  Remove all other grid we found

+        for tr in trList:

+            self.deleteCmd([weName], tr)

+

+        return

+

+

+    def getParmMinMaxLimits(self, modelName, weName):

+

+        #  Get the info for this parameter

+        parm = self.getParm(modelName, weName, "SFC")

+

+        #  Return the valid min and max values

+        return (parm.getGridInfo().getMinValue(),

+                parm.getGridInfo().getMaxValue())

+

+

+    #  Define a method to sort breakpoint record keys

+    def sortBreakpoints(self, a, b):

+

+        #  Make a list of valid string parts

+        validTypes = [

+            "LN",     #  mainland segments

+            "KEY",    #  Florida Keys

+            "ISL",    #  islands

+            "CUBA",   #  Cuba

+            "HISP",   #  Hispaniola

+            "NAI",    #  North Atlantic islands

+            "WTDE",   #  Deleware Bay

+            "WTTP",   #  Tidal Potomac

+            "WTCP",   #  Chesapeake Bay

+            "WTPT",   #  Generic water points

+            "GYC",    #  Guyana

+            "VEC",    #  Venezuela

+            "COC",    #  Colombia

+            "PAC",    #  Panama

+            "CRC",    #  Costa Rica

+            "NIC",    #  Nicaragua

+            "HNC",    #  Honduras

+            "GTC",    #  Guatemala

+            "BZC",    #  Belize

+            "MXC",    #  Mexico

+            "USC",    #  United States

+            "CNC",    #  Canada

+            "KEC",    #  Dry Tortugas

+            "AWC",    #  Aruba

+            "CWC",    #  Curacao

+            "TTC",    #  Trinidad and Tobago

+            "BBC",    #  Barbados

+            "LCC",    #  St. Lucia

+            "MQC",    #  France - Caribbean

+            "AGC",    #  Antigua and Barbuda

+            "BSC",    #  Bahamas

+            "BMC",    #  Bermuda

+            "JMC",    #  Jamaica

+            "KYC",    #  Cayman Islands

+            "CUC",    #  Cuba

+            "DOC",    #  Dominican Republic

+            "HTC",    #  Haiti

+            "PMC",    #  France - North Atlantic

+            "LOC",    #  Lake_Okeechobee

+            "FBC",    #  Florida Bay

+            "PSC",    #  Pamlico Sound

+            "ASC",    #  Albemarle Sound

+            "TXZ",    #  Texas

+            "LAZ",    #  Louisiana

+            "MSZ",    #  Mississippi

+            "ALZ",    #  Alabama

+            "FLZ",    #  Florida

+            "GAZ",    #  Georgia

+            "SCZ",    #  South Carolina

+            "NCZ",    #  North Carolina

+            "VAZ",    #  Virginia

+            "MDZ",    #  Maryland

+            "DCZ",    #  District of Columbia

+            "DEZ",    #  Deleware

+            "NJZ",    #  New Jersey

+            "NYZ",    #  New York

+            "CTZ",    #  Connecticut

+            "RIZ",    #  Rhode Island

+            "MAZ",    #  Massachusetts

+            "NHZ",    #  New Hampshire

+            "MEZ",    #  Maine

+            "NMZ",    #  New Mexico

+            "ARZ",    #  Arkansas

+            "OKZ",    #  Oklahoma

+            "MOZ",    #  Missouri

+            "TNZ",    #  Tennessee

+            "WVZ",    #  West Virginia

+            "PAZ",    #  Pennsylvania

+            "VTZ",    #  Vermont

+            "PRZ",    #  Puerto Rico

+            "VIZ",    #  U.S. Virgin Islands

+            "RE",     #  General edit area collection

+        ]

+

+#         print "a = '%s'    b = '%s'" % (a, b)

+

+        aSeg = a.split("_")[0]

+        bSeg = b.split("_")[0]

+

+        aSegType = ""

+        bSegType = ""

+        aSegNum = ""

+        bSegNum = ""

+

+        for c in aSeg:

+            if c in string.letters:

+                aSegType = aSegType + c

+

+        for c in bSeg:

+            if c in string.letters:

+                bSegType = bSegType + c

+

+        for c in aSeg:

+            if c in string.digits:

+                aSegNum = aSegNum + c

+        for c in bSeg:

+            if c in string.digits:

+                bSegNum = bSegNum + c

+

+        aTypeIndex = validTypes.index(aSegType)

+        bTypeIndex = validTypes.index(bSegType)

+

+        if aTypeIndex < bTypeIndex:

+            return -1

+        elif bTypeIndex < aTypeIndex:

+            return 1

+

+        if int(aSegNum) < int(bSegNum):

+            return -1

+        elif int(bSegNum) < int(aSegNum):

+            return 1

+        else:

+            print("ERROR!!!!!!! Segment names are equal!!!!!!!")

+            return 0

+

+

+#===============================================================================

+#  Hazard grid helper methods

+#===============================================================================

+

+    # Extracts the specified hazard from the hazardGrid. Returns a list of

+    # keys, mask pairs where each hazard exists.

+    def extractHazards(self, hazardGrid, hazard):

+        hazGrid, hazKeys = hazardGrid

+

+        keyMaskList = []

+        for hazIndex, hazKey in enumerate(hazKeys):

+            if hazard in hazKey:

+

+                #  See if this key covers any portion of the domain

+                mask = hazGrid == hazIndex

+                if not mask.any():

+                    continue

+

+                #  Pair this key with its mask

+                keyMaskList.append((hazKey, mask))

+

+        return keyMaskList

+

+

+    def purifyKey(self, hazKey, allowedKeys):

+

+        #  Get ready to process some subkeys

+        subKeyList = set()

+        subKeys = hazKey.split("^")

+

+        #  Process all the hazard subkeys

+        for subKey in subKeys:

+

+            #  Go over all the allowed Hazard keys

+            for allowedKey in allowedKeys:

+

+                #  If this is one of them

+                if allowedKey in subKey:

+

+                    #  Add it to the subkey list - if not already there

+                    if allowedKey not in subKeyList:

+                        subKeyList.add(subKey)

+

+        #  Return the final key

+        return "^".join(subKeyList)

+

+

+    def mergeCertainHazards(self, initalGrid, gridToMerge, hazTR,

+                            selectedHazards=["SS.W", "SS.A"]):

+

+        #  Use the Proposed grid is now the one to use for GFE hazards, for now

+        HazardUtils.ELEMENT = "ProposedSS"

+

+        #  Split the initial grid into its components

+        initialBytes, initialKeys = initalGrid

+

+        #  Look for all the hazards we wish to keep

+        for haz in selectedHazards:

+

+            #  Find all the areas in the domain where this hazard exists

+            keyMaskList = self.extractHazards(gridToMerge, haz)

+

+            #  Process all areas identified to have current tropical hazard

+            for hazKey, hazMask in keyMaskList:

+

+                #  Filter out the hazards we do not want

+                pureHazKey = self.purifyKey(hazKey, selectedHazards)

+

+                #  If there is nothing left to do, move on to next hazard

+                if pureHazKey == "":

+                    continue

+

+                #  Merge these hazards into the initial grid

+                hazIndex = self.getIndex(pureHazKey, initialKeys)

+                self._hazUtils._addHazard("ProposedSS", hazTR, pureHazKey,

+                                          hazMask, combine=1)

+

+        #  Make sure the Hazards grid is now the one to use for GFE hazards

+        HazardUtils.ELEMENT = "Hazards"

+

+        #  Return the merged grid

+        return (initialBytes, initialKeys)

+

+

+#===============================================================================

+#  Generic method to display product text via a GFE procedure/smartTool

+#===============================================================================

+

+    def displayProduct(self, product):

+        """

+        Displays the product text. Returns true if forecaster clicked OK

+        """

+        from com.raytheon.viz.gfe.ui.runtimeui import ValuesDialog

+        varList = []

+        varList.append(("Product Text:", "", "label"))

+        varList.append((product, "", "label"))

+        varList.append(("Click OK to transmit the product", "", "label"))

+        widgetList = self.getVariableListInputs(varList)

+        dialog = ValuesDialog.openDialog("Text Product", widgetList, None)

+        return dialog.getReturnCode() == 0 # 0 is OK, 1 is CANCEL

diff --git a/cave/com.raytheon.viz.gfe/python/autotest/CreateGrids.py b/cave/com.raytheon.viz.gfe/python/autotest/CreateGrids.py
index d19408b592..236ed0076e 100644
--- a/cave/com.raytheon.viz.gfe/python/autotest/CreateGrids.py
+++ b/cave/com.raytheon.viz.gfe/python/autotest/CreateGrids.py
@@ -1,757 +1,757 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Create Grids
-#
-# Author: hansen
-# ----------------------------------------------------------------------------
-
-import TestScript
-
-Public_createGrids = [
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin -24", "MaxTEnd -24", 60, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin", "MaxTEnd", 78, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 24", "MaxTEnd + 24", 79, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 48", "MaxTEnd + 48", 78, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 72", "MaxTEnd + 72", 80, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 96", "MaxTEnd + 96", 81, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 120", "MaxTEnd + 120", 83, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 144", "MaxTEnd + 144", 84, "all"),
-        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 168", "MaxTEnd + 168", 86, "all"),
-
-        ("Fcst", "MinT", "SCALAR", "MinTBegin-24", "MinTEnd-24", 40, "all"),        
-        ("Fcst", "MinT", "SCALAR", "MinTBegin", "MinTEnd", 60, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 24", "MinTEnd + 24", 68, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 48", "MinTEnd + 48", 65, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 72", "MinTEnd + 72", 64, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 96", "MinTEnd + 96", 63, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 120", "MinTEnd + 120", 66, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 144", "MinTEnd + 144", 68, "all"),
-        ("Fcst", "MinT", "SCALAR", "MinTBegin + 168", "MinTEnd + 168", 67, "all"),
-
-        ("Fcst", "T", "SCALAR", 0, 12,  55, "all"),
-        ("Fcst", "T", "SCALAR", 12, 24, 45, "all"),
-        ("Fcst", "T", "SCALAR", 24, 36, 75, "all"),
-        ("Fcst", "T", "SCALAR", 36, 48, 55, "all"),
-        ("Fcst", "T", "SCALAR", 48, 60, 65, "all"),
-        ("Fcst", "T", "SCALAR", 60, 72, 70, "all"),
-        ("Fcst", "T", "SCALAR", 72, 84, 80, "all"),
-        ("Fcst", "T", "SCALAR", 84, 96, 75, "all"),
-        ("Fcst", "T", "SCALAR", 96, 108, 75 , "all"),
-
-        ("Fcst", "Td", "SCALAR", 0, 12,  55, "all"),
-        ("Fcst", "Td", "SCALAR", 12, 24, 45, "all"),
-        ("Fcst", "Td", "SCALAR", 24, 36, 75, "all"),
-        ("Fcst", "Td", "SCALAR", 36, 48, 55, "all"),
-        ("Fcst", "Td", "SCALAR", 48, 60, 65, "all"),
-        ("Fcst", "Td", "SCALAR", 60, 72, 70, "all"),
-        ("Fcst", "Td", "SCALAR", 72, 84, 80, "all"),
-        ("Fcst", "Td", "SCALAR", 84, 96, 75, "all"),
-        ("Fcst", "Td", "SCALAR", 96, 108, 75 , "all"),
-
-        ("Fcst", "HeatIndex", "SCALAR", 0, 12,  95, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 12, 24, 105, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 24, 36, 103, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 36, 48, 85, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 48, 60, 75, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 60, 72, 110, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 72, 84, 120, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 84, 96, 108, "all"),
-        ("Fcst", "HeatIndex", "SCALAR", 96, 108, 75 , "all"),
-
-        ("Fcst", "WindChill", "SCALAR", 0, 12,  -20, "all"),
-        ("Fcst", "WindChill", "SCALAR", 12, 24, 10, "all"),
-        ("Fcst", "WindChill", "SCALAR", 24, 36, 20, "all"),
-        ("Fcst", "WindChill", "SCALAR", 36, 48, 55, "all"),
-        ("Fcst", "WindChill", "SCALAR", 48, 60, 65, "all"),
-        ("Fcst", "WindChill", "SCALAR", 60, 72, -30, "all"),
-        ("Fcst", "WindChill", "SCALAR", 72, 84, 30, "all"),
-        ("Fcst", "WindChill", "SCALAR", 84, 96, 20, "all"),
-        ("Fcst", "WindChill", "SCALAR", 96, 108, 10, "all"),
-
-        ("Fcst", "Wind", "VECTOR", 0, 12, (10, "SW"), "all"),
-        ("Fcst", "Wind", "VECTOR", 12, 24, (40, "SE"), "all"),
-        ("Fcst", "Wind", "VECTOR", 24, 36, (35, "NW"), "all"),
-        ("Fcst", "Wind", "VECTOR", 36, 48, (45, "W"), "all"),
-        ("Fcst", "Wind", "VECTOR", 48, 60, (50, "SW"), "all"),
-        ("Fcst", "Wind", "VECTOR", 60, 72, (45, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 72, 84, (60, "W"), "all"),
-        ("Fcst", "Wind", "VECTOR", 84, 96,(55, "SW"), "all"),
-        ("Fcst", "Wind", "VECTOR", 96, 108,(55, "SW"), "all"),
-        ("Fcst", "Wind", "VECTOR", 108, 120, (42, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 120, 132, (45, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 132, 144, (46, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 144, 156, (48, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 156, 168, (60, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 168, 180, (35, "E"), "all"),
-        ("Fcst", "Wind", "VECTOR", 180, 192, (50, "E"), "all"),
-
-        ("Fcst", "WindGust", "SCALAR", 0, 12,  25, "all"),
-        ("Fcst", "WindGust", "SCALAR", 12, 24, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 24, 36, 45, "all"),
-        ("Fcst", "WindGust", "SCALAR", 36, 48, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 48, 60, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 60, 72, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 72, 84, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 84, 96, 0, "all"),
-        ("Fcst", "WindGust", "SCALAR", 96, 108, 0, "all"),
-
-        ("Fcst", "SnowAmt", "SCALAR", 0, 12,  2, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 12, 24, 0, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 24, 36, 3, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 36, 48, 5, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 48, 60, 10, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 60, 72, 0, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 72, 84, 2, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 84, 96, 4, "all"),
-        ("Fcst", "SnowAmt", "SCALAR", 96, 108, 0, "all"),
-
-        ("Fcst", "IceAccum", "SCALAR", 0, 12,  2, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 12, 24, 0, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 24, 36, 3, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 36, 48, 5, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 48, 60, 5, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 60, 72, 0, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 72, 84, 2, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 84, 96, 4, "all"),
-        ("Fcst", "IceAccum", "SCALAR", 96, 108, 0, "all"),
-
-        ("Fcst", "SnowLevel", "SCALAR", 0, 12,  500, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 12, 24, 50, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 24, 36, 1000, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 36, 48, 500, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 48, 60, 100, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 60, 72, 1000, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 72, 84, 2000, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 84, 96, 0, "all"),
-        ("Fcst", "SnowLevel", "SCALAR", 96, 108, 0, "all"),
-        
-        ("Fcst", "FzLevel", "SCALAR", 0, 24, 5000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 24, 48, 10000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 48, 72, 4000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 72, 96, 20000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 96, 120, 3000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 120, 144, 16000, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 144, 168, 18500, "all"),
-        ("Fcst", "FzLevel", "SCALAR", 168, 192, 21000, "all"),
-
-        ("Fcst", "Sky", "SCALAR", 0, 12, 100, "all"),
-        ("Fcst", "Sky", "SCALAR", 12, 24, 95, "all"),
-        ("Fcst", "Sky", "SCALAR", 24, 36, 0, "all"),
-        ("Fcst", "Sky", "SCALAR", 36, 48, 15, "all"),
-        ("Fcst", "Sky", "SCALAR", 48, 60, 30, "all"),
-        ("Fcst", "Sky", "SCALAR", 60, 72, 55, "all"),
-        ("Fcst", "Sky", "SCALAR", 72, 84, 65, "all"),
-        ("Fcst", "Sky", "SCALAR", 84, 96, 70, "all"),
-        ("Fcst", "Sky", "SCALAR", 96, 108, 30, "all"),
-        ("Fcst", "Sky", "SCALAR", 108, 120, 48, "all"),
-        ("Fcst", "Sky", "SCALAR", 120, 132, 100, "all"),
-        ("Fcst", "Sky", "SCALAR", 132, 144, 10, "all"),
-        ("Fcst", "Sky", "SCALAR", 144, 156, 75, "all"),
-        ("Fcst", "Sky", "SCALAR", 156, 168, 25, "all"),
-        ("Fcst", "Sky", "SCALAR", 168, 180, 20, "all"),
-        ("Fcst", "Sky", "SCALAR", 180, 192, 87, "all"),
-                
-        ("Fcst", "Wx", "WEATHER", 0, 12, "Patchy:F:+::", "all"),
-        ("Fcst", "Wx", "WEATHER", 12, 24, "Wide:T:::", "all"),
-        ("Fcst", "Wx", "WEATHER", 24, 36, "Chc:RW:-::", "all"),
-        ("Fcst", "Wx", "WEATHER", 36, 48, "Frq:R:--::", "all"),
-        ("Fcst", "Wx", "WEATHER", 48, 60, "Wide:ZR:-::", "all"),
-        ("Fcst", "Wx", "WEATHER", 60, 72, "Lkly:S:--::", "all"),
-        ("Fcst", "Wx", "WEATHER", 72, 84, "Wide:IP:--::", "all"),
-        ("Fcst", "Wx", "WEATHER", 84, 96, "Areas:BS:::", "all"),
-        ("Fcst", "Wx", "WEATHER", 96, 108, "Patchy:F:::", "all"),
-        ("Fcst", "Wx", "WEATHER", 108, 120, "Lkly:L:--::", "all"),
-        ("Fcst", "Wx", "WEATHER", 120, 132, "SChc:ZL:--::", "all"),
-        ("Fcst", "Wx", "WEATHER", 132, 144, "Num:T:::", "all"),
-        ("Fcst", "Wx", "WEATHER", 144, 156, "Iso:ZY:-::", "all"),
-        ("Fcst", "Wx", "WEATHER", 156, 168, "Areas:FR:::", "all"),
-        ("Fcst", "Wx", "WEATHER", 168, 180, "Chc:RW:-::", "all"),
-        ("Fcst", "Wx", "WEATHER", 180, 192, "Brf:R:m::", "all"),
-
-        ("Fcst", "PoP", "SCALAR", 0, 12, 0, "all"),
-        ("Fcst", "PoP", "SCALAR", 12, 24, 90 , "all"),
-        ("Fcst", "PoP", "SCALAR", 24, 36, 90, "all"),
-        ("Fcst", "PoP", "SCALAR", 36, 48, 90, "all"),
-        ("Fcst", "PoP", "SCALAR", 48, 60, 90, "all"),
-        ("Fcst", "PoP", "SCALAR", 60, 72, 70, "all"),
-        ("Fcst", "PoP", "SCALAR", 72, 84, 90, "all"),
-        ("Fcst", "PoP", "SCALAR", 84, 96, 0, "all"),
-        ("Fcst", "PoP", "SCALAR", 96, 108, 0, "all"),
-        ("Fcst", "PoP", "SCALAR", 108, 120, 70, "all"),
-        ("Fcst", "PoP", "SCALAR", 120, 132, 20, "all"),
-        ("Fcst", "PoP", "SCALAR", 132, 144, 70, "all"),
-        ("Fcst", "PoP", "SCALAR", 144, 156, 20, "all"),
-        ("Fcst", "PoP", "SCALAR", 156, 168, 0, "all"),
-        ("Fcst", "PoP", "SCALAR", 168, 180, 40, "all"),
-        ("Fcst", "PoP", "SCALAR", 180, 192, 90, "all"),
-
-        ]
-
-Hazard_createGrids = [
-        ("Fcst", "Hazards", "DISCRETE", 0, 96, "CF.Y^FF.A^MA.S^FW.W^BZ.W", "all"),
-        ]
-
-Fire_createGrids = [
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin-24", "MaxRHEnd-24", 60, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin", "MaxRHEnd", 78, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 24", "MaxRHEnd + 24", 80, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 48", "MaxRHEnd + 48", 85, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 72", "MaxRHEnd + 72", 90, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 96", "MaxRHEnd + 96", 87, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 120", "MaxRHEnd + 120", 88, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 144", "MaxRHEnd + 144", 89, "all"),
-        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 168", "MaxRHEnd + 168", 90, "all"),
-        
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin-24", "MinRHEnd-24", 40, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin", "MinRHEnd", 65, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 24", "MinRHEnd + 24", 68, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 48", "MinRHEnd + 48", 70, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 72", "MinRHEnd + 72", 73, "all"), 
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 96", "MinRHEnd + 96", 74, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 120", "MinRHEnd + 120", 72, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 144", "MinRHEnd + 144", 70, "all"),
-        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 168", "MinRHEnd + 168", 71, "all"),
-        
-        ("Fcst", "RH", "SCALAR", 0, 12, 60, "all"),
-        ("Fcst", "RH", "SCALAR", 12, 24, 78, "all"),
-        ("Fcst", "RH", "SCALAR", 24, 36, 30, "all"),
-        ("Fcst", "RH", "SCALAR", 36, 48, 45, "all"),
-        ("Fcst", "RH", "SCALAR", 48, 60, 55, "all"),
-        ("Fcst", "RH", "SCALAR", 60, 72, 65, "all"),
-        ("Fcst", "RH", "SCALAR", 72, 84, 70, "all"),
-        ("Fcst", "RH", "SCALAR", 84, 96, 45, "all"),
-        ("Fcst", "RH", "SCALAR", 96, 108, 20, "all"),
-        ("Fcst", "RH", "SCALAR", 108, 120, 25, "all"),
-        ("Fcst", "RH", "SCALAR", 120, 132, 35, "all"),
-        ("Fcst", "RH", "SCALAR", 132, 144, 43, "all"),
-        ("Fcst", "RH", "SCALAR", 144, 156, 15, "all"),
-        ("Fcst", "RH", "SCALAR", 156, 168, 3, "all"),
-        ("Fcst", "RH", "SCALAR", 168, 180, 28, "all"),
-        ("Fcst", "RH", "SCALAR", 180, 192, 90, "all"),
-
-        ("Fcst", "TransWind", "VECTOR", 0, 12, (10, "SW"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 12, 24, (5, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 24, 48, (10, "NW"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 48, 72, (20, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 72, 96, (30, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 96, 120, (40, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 120, 144, (50, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 144, 168, (60, "W"), "all"),
-        ("Fcst", "TransWind", "VECTOR", 168, 192, (70, "W"), "all"),
-
-        ("Fcst", "LAL", "SCALAR", 0, 12, 1, "all"),
-        ("Fcst", "LAL", "SCALAR", 12, 24, 2, "all"),
-        ("Fcst", "LAL", "SCALAR", 24, 36, 3, "all"),
-        ("Fcst", "LAL", "SCALAR", 36, 48, 4, "all"),
-        ("Fcst", "LAL", "SCALAR", 48, 60, 5, "all"),
-        ("Fcst", "LAL", "SCALAR", 60, 72, 6, "all"),
-        ("Fcst", "LAL", "SCALAR", 72, 84, 3, "all"),
-        ("Fcst", "LAL", "SCALAR", 84, 96, 1, "all"),
-        ("Fcst", "LAL", "SCALAR", 96, 108, 2, "all"),
-        ("Fcst", "LAL", "SCALAR", 108, 120, 4, "all"),
-        ("Fcst", "LAL", "SCALAR", 120, 132, 5, "all"),
-        ("Fcst", "LAL", "SCALAR", 132, 144, 3, "all"),
-        ("Fcst", "LAL", "SCALAR", 144, 156, 2, "all"),
-        ("Fcst", "LAL", "SCALAR", 156, 168, 5, "all"),
-        ("Fcst", "LAL", "SCALAR", 168, 180, 6, "all"),
-        ("Fcst", "LAL", "SCALAR", 180, 192, 3, "all"),
-                        
-        ("Fcst", "CWR", "SCALAR", 0, 12, 0, "all"),
-        ("Fcst", "CWR", "SCALAR", 12, 24, 20, "all"),
-        ("Fcst", "CWR", "SCALAR", 24, 36, 30, "all"),
-        ("Fcst", "CWR", "SCALAR", 36, 48, 30, "all"),
-        ("Fcst", "CWR", "SCALAR", 48, 60, 45, "all"),
-        ("Fcst", "CWR", "SCALAR", 60, 72, 60, "all"),
-        ("Fcst", "CWR", "SCALAR", 72, 84, 25, "all"),
-        ("Fcst", "CWR", "SCALAR", 84, 96, 47, "all"),
-        ("Fcst", "CWR", "SCALAR", 96, 108, 34, "all"),
-        ("Fcst", "CWR", "SCALAR", 108, 120, 60, "all"),
-        ("Fcst", "CWR", "SCALAR", 120, 132, 55, "all"),
-        ("Fcst", "CWR", "SCALAR", 132, 144, 50, "all"),
-        ("Fcst", "CWR", "SCALAR", 144, 156, 20, "all"),
-        ("Fcst", "CWR", "SCALAR", 156, 168, 10, "all"),
-        ("Fcst", "CWR", "SCALAR", 168, 180, 5, "all"),
-        ("Fcst", "CWR", "SCALAR", 180, 192, 40, "all"),
-                
-        ("Fcst", "QPF", "SCALAR", 0, 12, 0, "all"),
-        ("Fcst", "QPF", "SCALAR", 12, 24, 0.05, "all"),
-        ("Fcst", "QPF", "SCALAR", 24, 36, 0.1, "all"),
-        ("Fcst", "QPF", "SCALAR", 36, 48, 0, "all"),
-        ("Fcst", "QPF", "SCALAR", 48, 60, 5, "all"),
-        ("Fcst", "QPF", "SCALAR", 60, 72, 4.5, "all"),
-        ("Fcst", "QPF", "SCALAR", 72, 84, 1.5, "all"),
-        ("Fcst", "QPF", "SCALAR", 84, 96, 2.5, "all"),
-        ("Fcst", "QPF", "SCALAR", 96, 108, 3.5, "all"),
-        ("Fcst", "QPF", "SCALAR", 108, 120, 4.0, "all"),
-        ("Fcst", "QPF", "SCALAR", 120, 132, 1.0, "all"),
-        ("Fcst", "QPF", "SCALAR", 132, 144, 2.0, "all"),
-        ("Fcst", "QPF", "SCALAR", 144, 156, 3.0, "all"),
-        ("Fcst", "QPF", "SCALAR", 156, 168, 1.3, "all"),
-        ("Fcst", "QPF", "SCALAR", 168, 180, 0.12, "all"),
-        ("Fcst", "QPF", "SCALAR", 180, 192, 0.34, "all"),
-        
-        ("Fcst", "Haines", "SCALAR", 0, 12, 2, "all"),
-        ("Fcst", "Haines", "SCALAR", 12, 24, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 24, 36, 4, "all"),
-        ("Fcst", "Haines", "SCALAR", 36, 48, 6, "all"),
-        ("Fcst", "Haines", "SCALAR", 48, 60, 2, "all"),
-        ("Fcst", "Haines", "SCALAR", 60, 72, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 72, 84, 2, "all"),
-        ("Fcst", "Haines", "SCALAR", 84, 96, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 96, 108, 5, "all"),
-        ("Fcst", "Haines", "SCALAR", 108, 120, 6, "all"),
-        ("Fcst", "Haines", "SCALAR", 120, 132, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 132, 144, 2, "all"),
-        ("Fcst", "Haines", "SCALAR", 144, 156, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 156, 168, 4, "all"),
-        ("Fcst", "Haines", "SCALAR", 168, 180, 3, "all"),
-        ("Fcst", "Haines", "SCALAR", 180, 192, 6, "all"),
-        
-        ("Fcst", "MixHgt", "SCALAR", 0, 24, 5000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 24, 48, 10000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 48, 72, 4000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 72, 96, 20000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 96, 120, 3000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 120, 144, 16000, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 144, 168, 18500, "all"),
-        ("Fcst", "MixHgt", "SCALAR", 168, 192, 20000, "all"),
-
-        ("Fcst", "MarineLayer", "SCALAR", 0, 24, 1000, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 24, 48, 2000, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 48, 72, 4000, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 72, 96, 5280, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 96, 120, 6500, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 120, 144, 10000, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 144, 168, 12300, "all"),
-        ("Fcst", "MarineLayer", "SCALAR", 168, 192, 14500, "all"),
-
-        ("Fcst", "Wind20ft", "VECTOR", 0, 12, (5, "N"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 12, 24, (40, "NE"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 24, 36, (10, "NW"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 36, 48, (0, "N"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 48, 60, (125, "E"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 60, 72, (90, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 72, 84, (50, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 84, 96, (100, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 96, 108, (0, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 108, 120, (10, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 120, 132, (30, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 132, 144, (60, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 144, 156, (25, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 156, 168, (68, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 168, 180, (15, "S"), "all"),
-        ("Fcst", "Wind20ft", "VECTOR", 180, 192, (2, "S"), "all"),
-        
-        ("Fcst", "VentRate", "SCALAR", 0, 12, 160000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 12, 24, 100000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 24, 36, 50000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 36, 48, 20000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 48, 60, 70000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 60, 144, 4000, "all"),
-        ("Fcst", "VentRate", "SCALAR", 144, 168, 6900, "all"),
-        ("Fcst", "VentRate", "SCALAR", 168, 192, 30000, "all"),
-        
-        ("Fcst", "Stability", "SCALAR", 0, 12, 1, "all"),
-        ("Fcst", "Stability", "SCALAR", 12, 24, 2, "all"), 
-        ("Fcst", "Stability", "SCALAR", 24, 36, 1, "all"),
-        ("Fcst", "Stability", "SCALAR", 36, 48, 3, "all"),
-        ("Fcst", "Stability", "SCALAR", 48, 60, 4, "all"),
-        ("Fcst", "Stability", "SCALAR", 60, 72, 5, "all"),
-        ("Fcst", "Stability", "SCALAR", 72, 84, 1, "all"),
-        ("Fcst", "Stability", "SCALAR", 84, 96, 2, "all"),
-        ("Fcst", "Stability", "SCALAR", 96, 108, 3, "all"),
-        ("Fcst", "Stability", "SCALAR", 108, 120, 4, "all"),
-        ("Fcst", "Stability", "SCALAR", 120, 132, 5, "all"),
-        ("Fcst", "Stability", "SCALAR", 132, 144, 4, "all"),
-        ("Fcst", "Stability", "SCALAR", 144, 156, 3, "all"),
-        ("Fcst", "Stability", "SCALAR", 156, 168, 2, "all"),
-        ("Fcst", "Stability", "SCALAR", 168, 180, 1, "all"),
-        ("Fcst", "Stability", "SCALAR", 180, 192, 3, "all"),
-        
-        ("Fcst", "HrsOfSun", "SCALAR", 0, 24, 6, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 24, 48, 7, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 48, 72, 5, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 72, 96, 5, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 96, 120, 5, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 120, 144, 5, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 144, 168, 5, "all"),
-        ("Fcst", "HrsOfSun", "SCALAR", 168, 192, 5, "all"),
-        
-        ("Fcst", "DSI", "SCALAR", 0, 12, 0, "all"),
-        ("Fcst", "DSI", "SCALAR", 12, 24, 2, "all"),
-        ("Fcst", "DSI", "SCALAR", 24, 36, 6, "all"),
-        ("Fcst", "DSI", "SCALAR", 36, 48, 1, "all"),
-        ("Fcst", "DSI", "SCALAR", 48, 60, 5, "all"),
-        ("Fcst", "DSI", "SCALAR", 60, 72, 4, "all"),
-        ("Fcst", "DSI", "SCALAR", 72, 84, 3, "all"),
-        ("Fcst", "DSI", "SCALAR", 84, 96, 2, "all"),
-        ("Fcst", "DSI", "SCALAR", 96, 108, 1, "all"),
-        ("Fcst", "DSI", "SCALAR", 108, 120, 0, "all"),
-        ("Fcst", "DSI", "SCALAR", 120, 132, 5, "all"),
-        ("Fcst", "DSI", "SCALAR", 132, 144, 4, "all"),
-        ("Fcst", "DSI", "SCALAR", 144, 156, 3, "all"),
-        ("Fcst", "DSI", "SCALAR", 156, 168, 2, "all"),
-        ("Fcst", "DSI", "SCALAR", 168, 180, 1, "all"),
-        ("Fcst", "DSI", "SCALAR", 180, 192, 0, "all"),
-        ]
-
-
-Marine_createGrids = [
-        ("Fcst", "Swell", "VECTOR", 0, 3, (10, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 3, 6, (20, "W"), "all"),
-        ("Fcst", "Swell", "VECTOR", 6, 9, (30, "W"), "all"),
-        ("Fcst", "Swell", "VECTOR", 9, 12, (20, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 12, 18, (40, "SE"), "all"),
-        ("Fcst", "Swell", "VECTOR", 18, 24, (40, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 24, 36, (35, "NW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 36, 48, (45, "W"), "all"),
-        ("Fcst", "Swell", "VECTOR", 48, 60, (50, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 60, 72, (45, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 72, 84, (60, "W"), "all"),
-        ("Fcst", "Swell", "VECTOR", 84, 96,(55, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 96, 108,(55, "SW"), "all"),
-        ("Fcst", "Swell", "VECTOR", 108, 120, (42, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 120, 132, (45, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 132, 144, (46, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 144, 156, (48, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 156, 168, (60, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 168, 180, (35, "E"), "all"),
-        ("Fcst", "Swell", "VECTOR", 180, 192, (50, "E"), "all"),
-
-        ("Fcst", "Swell2", "VECTOR", 0, 3, (10, "NE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 3, 6, (20, "E"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 6, 9, (30, "E"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 9, 12, (20, "SE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 12, 18, (40, "SW"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 18, 24, (40, "SE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 24, 36, (35, "NE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 36, 48, (45, "E"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 48, 60, (50, "SE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 60, 72, (45, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 72, 84, (60, "E"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 84, 96,(55, "SE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 96, 108,(55, "SE"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 108, 120, (42, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 120, 132, (45, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 132, 144, (46, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 144, 156, (48, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 156, 168, (60, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 168, 180, (35, "W"), "all"),
-        ("Fcst", "Swell2", "VECTOR", 180, 192, (50, "W"), "all"),
-
-        ("Fcst", "Period", "SCALAR", 0, 3,  10, "all"),
-        ("Fcst", "Period", "SCALAR", 3, 6,  15, "all"),
-        ("Fcst", "Period", "SCALAR", 6, 9,  20, "all"),
-        ("Fcst", "Period", "SCALAR", 9, 12,  5, "all"),
-        ("Fcst", "Period", "SCALAR", 12, 24, 10, "all"),
-        ("Fcst", "Period", "SCALAR", 24, 36, 15, "all"),
-        ("Fcst", "Period", "SCALAR", 36, 48, 20, "all"),
-        ("Fcst", "Period", "SCALAR", 48, 60, 10, "all"),
-        ("Fcst", "Period", "SCALAR", 60, 72, 17, "all"),
-        ("Fcst", "Period", "SCALAR", 72, 84, 12, "all"),
-        ("Fcst", "Period", "SCALAR", 84, 96, 13, "all"),
-        ("Fcst", "Period", "SCALAR", 96, 108, 18, "all"),
-
-        ("Fcst", "Period2", "SCALAR", 0, 3,  10, "all"),
-        ("Fcst", "Period2", "SCALAR", 3, 6,  15, "all"),
-        ("Fcst", "Period2", "SCALAR", 6, 9,  8, "all"),
-        ("Fcst", "Period2", "SCALAR", 9, 12,  5, "all"),
-        ("Fcst", "Period2", "SCALAR", 12, 24, 10, "all"),
-        ("Fcst", "Period2", "SCALAR", 24, 36, 15, "all"),
-        ("Fcst", "Period2", "SCALAR", 36, 48, 20, "all"),
-        ("Fcst", "Period2", "SCALAR", 48, 60, 17, "all"),
-        ("Fcst", "Period2", "SCALAR", 60, 72, 19, "all"),
-        ("Fcst", "Period2", "SCALAR", 72, 84, 12, "all"),
-        ("Fcst", "Period2", "SCALAR", 84, 96, 7, "all"),
-        ("Fcst", "Period2", "SCALAR", 96, 108, 6, "all"),
-
-        ("Fcst", "WindWaveHgt", "SCALAR", 0, 3,  10, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 3, 6,  15, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 6, 9,  25, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 9, 12,  5, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 12, 24, 10, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 24, 36, 45, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 36, 48, 20, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 48, 60, 30, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 60, 72, 40, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 72, 84, 20, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 84, 96, 20, "all"),
-        ("Fcst", "WindWaveHgt", "SCALAR", 96, 108, 20, "all"),
-
-        ("Fcst", "WaveHeight", "SCALAR", 0, 3,  10, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 3, 6,  15, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 6, 9,  25, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 9, 12,  5, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 12, 24, 10, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 24, 36, 45, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 36, 48, 20, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 48, 60, 30, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 60, 72, 40, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 72, 84, 20, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 84, 96, 20, "all"),
-        ("Fcst", "WaveHeight", "SCALAR", 96, 108, 20, "all"),
-
-    ]
-
-Delete_grids = [
-        ("Fcst", "PoP", "SFC", "all", "all"),
-        ("Fcst", "MaxT", "SFC", "all", "all"),
-        ("Fcst", "MinT", "SFC", "all", "all"),
-        ("Fcst", "T", "SFC", "all", "all"),
-        ("Fcst", "Td", "SFC", "all", "all"),
-        ("Fcst", "WindChill", "SFC", "all", "all"),
-        ("Fcst", "HeatIndex", "SFC", "all", "all"),
-        ("Fcst", "StormTotalSnow", "SFC", "all", "all"),
-        ("Fcst", "SnowLevel", "SFC", "all", "all"),
-        ("Fcst", "FzLevel", "SFC", "all", "all"),
-        ("Fcst", "RH", "SFC", "all", "all"),
-        ("Fcst", "Wind", "SFC", "all", "all"),
-        ("Fcst", "Sky", "SFC", "all", "all"),
-        ("Fcst", "WindGust", "SFC", "all", "all"),
-        ("Fcst", "Wx", "SFC", "all", "all"),
-        ("Fcst", "QPF", "SFC", "all", "all"),
-        ("Fcst", "SnowAmt", "SFC", "all", "all"),
-        ("Fcst", "IceAccum", "SFC", "all", "all"),
-
-        ("Fcst", "MaxRH", "SFC", "all", "all"),
-        ("Fcst", "MinRH", "SFC", "all", "all"),
-        ("Fcst", "RH", "SFC", "all", "all"),
-        ("Fcst", "TransWind", "SFC", "all", "all"),
-        ("Fcst", "LAL", "SFC", "all", "all"),
-        ("Fcst", "CWR", "SFC", "all", "all"),
-        ("Fcst", "QPF", "SFC", "all", "all"),
-        ("Fcst", "Haines", "SFC", "all", "all"),
-        ("Fcst", "MixHgt", "SFC", "all", "all"),
-        ("Fcst", "MarineLayer", "SFC", "all", "all"),
-        ("Fcst", "Wind20ft", "SFC", "all", "all"),
-        ("Fcst", "VentRate", "SFC", "all", "all"),
-        ("Fcst", "Stability", "SFC", "all", "all"),
-        ("Fcst", "HrsOfSun", "SFC", "all", "all"),
-        ("Fcst", "DSI", "SFC", "all", "all"),
-
-        ("Fcst", "Swell", "SFC", "all", "all"),
-        ("Fcst", "Swell2", "SFC", "all", "all"),
-        ("Fcst", "Period", "SFC", "all", "all"),
-        ("Fcst", "Period2", "SFC", "all", "all"),
-        ("Fcst", "WaveHeight", "SFC", "all", "all"),
-        ("Fcst", "WindWaveHgt", "SFC", "all", "all"),
-        
-        ("Fcst", "Hazards", "SFC", "all", "all"),
-        ("Fcst", "pwsD34", "SFC", "all", "all"),
-        ("Fcst", "pwsN34", "SFC", "all", "all"),
-        ("Fcst", "pwsD64", "SFC", "all", "all"),
-        ("Fcst", "pwsN64", "SFC", "all", "all"),
-        ]
-
-Delete_grids_specific = [
-        ("Fcst", "PoP", "SFC", -300, 300),
-        ("Fcst", "MaxT", "SFC", -300, 300),
-        ("Fcst", "MinT", "SFC", -300, 300),
-        ("Fcst", "T", "SFC", -300, 300),
-        ("Fcst", "Td", "SFC", -300, 300),
-        ("Fcst", "WindChill", "SFC", -300, 300),
-        ("Fcst", "HeatIndex", "SFC", -300, 300),
-        ("Fcst", "StormTotalSnow", "SFC", -300, 300),
-        ("Fcst", "SnowLevel", "SFC", -300, 300),
-        ("Fcst", "FzLevel", "SFC", -300, 300),
-        ("Fcst", "RH", "SFC", -300, 300),
-        ("Fcst", "Wind", "SFC", -300, 300),
-        ("Fcst", "Sky", "SFC", -300, 300),
-        ("Fcst", "WindGust", "SFC", -300, 300),
-        ("Fcst", "Wx", "SFC", -300, 300),
-        ("Fcst", "QPF", "SFC", -300, 300),
-        ("Fcst", "SnowAmt", "SFC", -300, 300),
-        ("Fcst", "IceAccum", "SFC", -300, 300),
-
-        ("Fcst", "MaxRH", "SFC", -300, 300),
-        ("Fcst", "MinRH", "SFC", -300, 300),
-        ("Fcst", "RH", "SFC", -300, 300),
-        ("Fcst", "TransWind", "SFC", -300, 300),
-        ("Fcst", "LAL", "SFC", -300, 300),
-        ("Fcst", "CWR", "SFC", -300, 300),
-        ("Fcst", "QPF", "SFC", -300, 300),
-        ("Fcst", "Haines", "SFC", -300, 300),
-        ("Fcst", "MixHgt", "SFC", -300, 300),
-        ("Fcst", "MarineLayer", "SFC", -300, 300),
-        ("Fcst", "Wind20ft", "SFC", -300, 300),
-        ("Fcst", "VentRate", "SFC", -300, 300),
-        ("Fcst", "Stability", "SFC", -300, 300),
-        ("Fcst", "HrsOfSun", "SFC", -300, 300),
-        ("Fcst", "DSI", "SFC", -300, 300),
-
-        ("Fcst", "Swell", "SFC", -300, 300),
-        ("Fcst", "Swell2", "SFC", -300, 300),
-        ("Fcst", "Period", "SFC", -300, 300),
-        ("Fcst", "Period2", "SFC", -300, 300),
-        ("Fcst", "WaveHeight", "SFC", -300, 300),
-        ("Fcst", "WindWaveHgt", "SFC", -300, 300),
-        
-        ("Fcst", "Hazards", "SFC", -300, 300),
-        ]
-
-
-scripts = [
-    {    
-    "name":"CreateGrids_Today",
-    "commentary": "Create Grids starting Today",
-    "productType": None,
-    "createGrids": TestScript.general_createGrids,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"CreatePublicGrids_Today",
-    "commentary": "Create Grids for All Products starting Today",
-    "productType": None,
-    "createGrids": Public_createGrids,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"CreateFireGrids_Today",
-    "commentary": "Create Grids for All Products starting Today",
-    "productType": None,
-    "createGrids": Fire_createGrids,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"CreateMarineGrids_Today",
-    "commentary": "Create Grids for All Products starting Today",
-    "productType": None,
-    "createGrids": Marine_createGrids,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"CreateHazardGrids_Today",
-    "commentary": "Create Grids for All Products starting Today",
-    "productType": None,
-    "createGrids": Hazard_createGrids,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"DeleteGrids",
-    "commentary": "Delete ALL Grids",
-    "productType": None,
-    "deleteGrids": Delete_grids,
-    },
-    {    
-    "name":"DeleteGrids_Today",
-    "commentary": "Delete Grids from -300 to 300",
-    "productType": None,
-    "deleteGrids": Delete_grids_specific,
-    "gridsStartTime": "6am Local Today",
-    "drtTime": "6am Local Today",
-    },
-    {    
-    "name":"DeleteGrids_2010",
-    "commentary": "Delete Grids from -300 to 300",
-    "productType": None,
-    "deleteGrids": Delete_grids_specific, 
-    "gridsStartTime": "6am Local",
-    "drtTime": "6am Local",
-    },
-    {    
-    "name":"CreateGrids_2010",
-    "commentary": "Create Grids starting in 2010",
-    "productType": None,
-    "createGrids": TestScript.general_createGrids,
-    "gridsStartTime": "6am Local",
-    "drtTime": "6am Local",
-    },
-
-    
-    {    
-    "name":"WxProb1",  
-    "commentary": """
-           To test the WxProb_QC_Tool (Spriggs, Barker)
-           SChc RW -- SChc RW Areas F -- Areas F
-           
-        Quick Summary of Rules:
-        --Probability terms stay probability after change (if any)
-        --Coverage terms stay coverage after change (if any)
-        --Non-PoP-related Wx always gets a "free pass" and is preserved as-is
-        --PoP < 15:  Any PoP-related Wx becomes 
-        --PoP > 14 AND PoP-related Wx types are not present:  create a default Wx type (RW-)
-        --Only "high" key and its equivalent values are changed unless PoP becomes
-          less than coverage index's implied value
-""",
-    "productType": None,
-    "createGrids": [
-       # No RW where PoP < 15
-       ("Fcst", "PoP", "SCALAR", 0, 12, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 0, 12, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 0, 12, "SChc:RW:-::", "all"),
-
-       # No RW where PoP < 15 -- Fog everywhere
-       ("Fcst", "PoP", "SCALAR", 12, 24, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 12, 24, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 12, 24,
-        "SChc:RW:-::^Areas:F:::", "all"),
-
-       # RW where PoP > 15Fog everywhere
-       ("Fcst", "PoP", "SCALAR", 24, 36, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 24, 36, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 24, 36,
-        "Areas:F:::", "all"),
-
-       # RW where PoP > 15
-       ("Fcst", "PoP", "SCALAR", 36, 48, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 36, 48, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 36, 48,
-        "NoWx", "all"),
-       
-       # RW where PoP > 15, retain F and BS 
-       ("Fcst", "PoP", "SCALAR", 48, 60, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 48, 60, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 48, 60,
-        "Wide:F:+:1/4SM:^Def:BS::1/4SM:", "all"),
-       
-       # RW where PoP > 15,  
-       ("Fcst", "PoP", "SCALAR", 60, 72, 20, ["AboveElev"]),
-       ("Fcst", "PoP", "SCALAR", 60, 72, 10, ["BelowElev"]),
-       ("Fcst", "Wx", "WEATHER", 60, 72, 
-        "Lkly:RW:-::^Chc:T:::^Wide:F::1SM:", "all"),
-       
-       ],
-    },
-    ]
-
-def testScript(self, dataMgr, level="Site"):
-    time_6am = self.getAbsFromLocal(2010, 1, 1, 0, 0)
-    todayTR = self.getTimeRange("Today")
-    time_6am_today = todayTR.startTime()
-    print "time", time_6am_today
-    for script in scripts:
-        for entry in ["gridsStartTime", "drtTime"]:
-            if script.get(entry, None) == "6am Local":
-                script[entry] = time_6am
-            elif script.get(entry, None) == "6am Local Today":
-                script[entry] = time_6am_today
-    return TestScript.generalTestScript(self, dataMgr, scripts, {}, level=level)
-
-
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# Create Grids
+#
+# Author: hansen
+# ----------------------------------------------------------------------------
+
+import TestScript
+
+Public_createGrids = [
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin -24", "MaxTEnd -24", 60, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin", "MaxTEnd", 78, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 24", "MaxTEnd + 24", 79, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 48", "MaxTEnd + 48", 78, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 72", "MaxTEnd + 72", 80, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 96", "MaxTEnd + 96", 81, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 120", "MaxTEnd + 120", 83, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 144", "MaxTEnd + 144", 84, "all"),
+        ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 168", "MaxTEnd + 168", 86, "all"),
+
+        ("Fcst", "MinT", "SCALAR", "MinTBegin-24", "MinTEnd-24", 40, "all"),        
+        ("Fcst", "MinT", "SCALAR", "MinTBegin", "MinTEnd", 60, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 24", "MinTEnd + 24", 68, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 48", "MinTEnd + 48", 65, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 72", "MinTEnd + 72", 64, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 96", "MinTEnd + 96", 63, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 120", "MinTEnd + 120", 66, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 144", "MinTEnd + 144", 68, "all"),
+        ("Fcst", "MinT", "SCALAR", "MinTBegin + 168", "MinTEnd + 168", 67, "all"),
+
+        ("Fcst", "T", "SCALAR", 0, 12,  55, "all"),
+        ("Fcst", "T", "SCALAR", 12, 24, 45, "all"),
+        ("Fcst", "T", "SCALAR", 24, 36, 75, "all"),
+        ("Fcst", "T", "SCALAR", 36, 48, 55, "all"),
+        ("Fcst", "T", "SCALAR", 48, 60, 65, "all"),
+        ("Fcst", "T", "SCALAR", 60, 72, 70, "all"),
+        ("Fcst", "T", "SCALAR", 72, 84, 80, "all"),
+        ("Fcst", "T", "SCALAR", 84, 96, 75, "all"),
+        ("Fcst", "T", "SCALAR", 96, 108, 75 , "all"),
+
+        ("Fcst", "Td", "SCALAR", 0, 12,  55, "all"),
+        ("Fcst", "Td", "SCALAR", 12, 24, 45, "all"),
+        ("Fcst", "Td", "SCALAR", 24, 36, 75, "all"),
+        ("Fcst", "Td", "SCALAR", 36, 48, 55, "all"),
+        ("Fcst", "Td", "SCALAR", 48, 60, 65, "all"),
+        ("Fcst", "Td", "SCALAR", 60, 72, 70, "all"),
+        ("Fcst", "Td", "SCALAR", 72, 84, 80, "all"),
+        ("Fcst", "Td", "SCALAR", 84, 96, 75, "all"),
+        ("Fcst", "Td", "SCALAR", 96, 108, 75 , "all"),
+
+        ("Fcst", "HeatIndex", "SCALAR", 0, 12,  95, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 12, 24, 105, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 24, 36, 103, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 36, 48, 85, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 48, 60, 75, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 60, 72, 110, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 72, 84, 120, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 84, 96, 108, "all"),
+        ("Fcst", "HeatIndex", "SCALAR", 96, 108, 75 , "all"),
+
+        ("Fcst", "WindChill", "SCALAR", 0, 12,  -20, "all"),
+        ("Fcst", "WindChill", "SCALAR", 12, 24, 10, "all"),
+        ("Fcst", "WindChill", "SCALAR", 24, 36, 20, "all"),
+        ("Fcst", "WindChill", "SCALAR", 36, 48, 55, "all"),
+        ("Fcst", "WindChill", "SCALAR", 48, 60, 65, "all"),
+        ("Fcst", "WindChill", "SCALAR", 60, 72, -30, "all"),
+        ("Fcst", "WindChill", "SCALAR", 72, 84, 30, "all"),
+        ("Fcst", "WindChill", "SCALAR", 84, 96, 20, "all"),
+        ("Fcst", "WindChill", "SCALAR", 96, 108, 10, "all"),
+
+        ("Fcst", "Wind", "VECTOR", 0, 12, (10, "SW"), "all"),
+        ("Fcst", "Wind", "VECTOR", 12, 24, (40, "SE"), "all"),
+        ("Fcst", "Wind", "VECTOR", 24, 36, (35, "NW"), "all"),
+        ("Fcst", "Wind", "VECTOR", 36, 48, (45, "W"), "all"),
+        ("Fcst", "Wind", "VECTOR", 48, 60, (50, "SW"), "all"),
+        ("Fcst", "Wind", "VECTOR", 60, 72, (45, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 72, 84, (60, "W"), "all"),
+        ("Fcst", "Wind", "VECTOR", 84, 96,(55, "SW"), "all"),
+        ("Fcst", "Wind", "VECTOR", 96, 108,(55, "SW"), "all"),
+        ("Fcst", "Wind", "VECTOR", 108, 120, (42, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 120, 132, (45, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 132, 144, (46, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 144, 156, (48, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 156, 168, (60, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 168, 180, (35, "E"), "all"),
+        ("Fcst", "Wind", "VECTOR", 180, 192, (50, "E"), "all"),
+
+        ("Fcst", "WindGust", "SCALAR", 0, 12,  25, "all"),
+        ("Fcst", "WindGust", "SCALAR", 12, 24, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 24, 36, 45, "all"),
+        ("Fcst", "WindGust", "SCALAR", 36, 48, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 48, 60, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 60, 72, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 72, 84, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 84, 96, 0, "all"),
+        ("Fcst", "WindGust", "SCALAR", 96, 108, 0, "all"),
+
+        ("Fcst", "SnowAmt", "SCALAR", 0, 12,  2, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 12, 24, 0, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 24, 36, 3, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 36, 48, 5, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 48, 60, 10, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 60, 72, 0, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 72, 84, 2, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 84, 96, 4, "all"),
+        ("Fcst", "SnowAmt", "SCALAR", 96, 108, 0, "all"),
+
+        ("Fcst", "IceAccum", "SCALAR", 0, 12,  2, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 12, 24, 0, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 24, 36, 3, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 36, 48, 5, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 48, 60, 5, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 60, 72, 0, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 72, 84, 2, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 84, 96, 4, "all"),
+        ("Fcst", "IceAccum", "SCALAR", 96, 108, 0, "all"),
+
+        ("Fcst", "SnowLevel", "SCALAR", 0, 12,  500, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 12, 24, 50, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 24, 36, 1000, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 36, 48, 500, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 48, 60, 100, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 60, 72, 1000, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 72, 84, 2000, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 84, 96, 0, "all"),
+        ("Fcst", "SnowLevel", "SCALAR", 96, 108, 0, "all"),
+        
+        ("Fcst", "FzLevel", "SCALAR", 0, 24, 5000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 24, 48, 10000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 48, 72, 4000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 72, 96, 20000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 96, 120, 3000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 120, 144, 16000, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 144, 168, 18500, "all"),
+        ("Fcst", "FzLevel", "SCALAR", 168, 192, 21000, "all"),
+
+        ("Fcst", "Sky", "SCALAR", 0, 12, 100, "all"),
+        ("Fcst", "Sky", "SCALAR", 12, 24, 95, "all"),
+        ("Fcst", "Sky", "SCALAR", 24, 36, 0, "all"),
+        ("Fcst", "Sky", "SCALAR", 36, 48, 15, "all"),
+        ("Fcst", "Sky", "SCALAR", 48, 60, 30, "all"),
+        ("Fcst", "Sky", "SCALAR", 60, 72, 55, "all"),
+        ("Fcst", "Sky", "SCALAR", 72, 84, 65, "all"),
+        ("Fcst", "Sky", "SCALAR", 84, 96, 70, "all"),
+        ("Fcst", "Sky", "SCALAR", 96, 108, 30, "all"),
+        ("Fcst", "Sky", "SCALAR", 108, 120, 48, "all"),
+        ("Fcst", "Sky", "SCALAR", 120, 132, 100, "all"),
+        ("Fcst", "Sky", "SCALAR", 132, 144, 10, "all"),
+        ("Fcst", "Sky", "SCALAR", 144, 156, 75, "all"),
+        ("Fcst", "Sky", "SCALAR", 156, 168, 25, "all"),
+        ("Fcst", "Sky", "SCALAR", 168, 180, 20, "all"),
+        ("Fcst", "Sky", "SCALAR", 180, 192, 87, "all"),
+                
+        ("Fcst", "Wx", "WEATHER", 0, 12, "Patchy:F:+::", "all"),
+        ("Fcst", "Wx", "WEATHER", 12, 24, "Wide:T:::", "all"),
+        ("Fcst", "Wx", "WEATHER", 24, 36, "Chc:RW:-::", "all"),
+        ("Fcst", "Wx", "WEATHER", 36, 48, "Frq:R:--::", "all"),
+        ("Fcst", "Wx", "WEATHER", 48, 60, "Wide:ZR:-::", "all"),
+        ("Fcst", "Wx", "WEATHER", 60, 72, "Lkly:S:--::", "all"),
+        ("Fcst", "Wx", "WEATHER", 72, 84, "Wide:IP:--::", "all"),
+        ("Fcst", "Wx", "WEATHER", 84, 96, "Areas:BS:::", "all"),
+        ("Fcst", "Wx", "WEATHER", 96, 108, "Patchy:F:::", "all"),
+        ("Fcst", "Wx", "WEATHER", 108, 120, "Lkly:L:--::", "all"),
+        ("Fcst", "Wx", "WEATHER", 120, 132, "SChc:ZL:--::", "all"),
+        ("Fcst", "Wx", "WEATHER", 132, 144, "Num:T:::", "all"),
+        ("Fcst", "Wx", "WEATHER", 144, 156, "Iso:ZY:-::", "all"),
+        ("Fcst", "Wx", "WEATHER", 156, 168, "Areas:FR:::", "all"),
+        ("Fcst", "Wx", "WEATHER", 168, 180, "Chc:RW:-::", "all"),
+        ("Fcst", "Wx", "WEATHER", 180, 192, "Brf:R:m::", "all"),
+
+        ("Fcst", "PoP", "SCALAR", 0, 12, 0, "all"),
+        ("Fcst", "PoP", "SCALAR", 12, 24, 90 , "all"),
+        ("Fcst", "PoP", "SCALAR", 24, 36, 90, "all"),
+        ("Fcst", "PoP", "SCALAR", 36, 48, 90, "all"),
+        ("Fcst", "PoP", "SCALAR", 48, 60, 90, "all"),
+        ("Fcst", "PoP", "SCALAR", 60, 72, 70, "all"),
+        ("Fcst", "PoP", "SCALAR", 72, 84, 90, "all"),
+        ("Fcst", "PoP", "SCALAR", 84, 96, 0, "all"),
+        ("Fcst", "PoP", "SCALAR", 96, 108, 0, "all"),
+        ("Fcst", "PoP", "SCALAR", 108, 120, 70, "all"),
+        ("Fcst", "PoP", "SCALAR", 120, 132, 20, "all"),
+        ("Fcst", "PoP", "SCALAR", 132, 144, 70, "all"),
+        ("Fcst", "PoP", "SCALAR", 144, 156, 20, "all"),
+        ("Fcst", "PoP", "SCALAR", 156, 168, 0, "all"),
+        ("Fcst", "PoP", "SCALAR", 168, 180, 40, "all"),
+        ("Fcst", "PoP", "SCALAR", 180, 192, 90, "all"),
+
+        ]
+
+Hazard_createGrids = [
+        ("Fcst", "Hazards", "DISCRETE", 0, 96, "CF.Y^FF.A^MA.S^FW.W^BZ.W", "all"),
+        ]
+
+Fire_createGrids = [
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin-24", "MaxRHEnd-24", 60, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin", "MaxRHEnd", 78, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 24", "MaxRHEnd + 24", 80, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 48", "MaxRHEnd + 48", 85, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 72", "MaxRHEnd + 72", 90, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 96", "MaxRHEnd + 96", 87, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 120", "MaxRHEnd + 120", 88, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 144", "MaxRHEnd + 144", 89, "all"),
+        ("Fcst", "MaxRH", "SCALAR", "MaxRHBegin + 168", "MaxRHEnd + 168", 90, "all"),
+        
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin-24", "MinRHEnd-24", 40, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin", "MinRHEnd", 65, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 24", "MinRHEnd + 24", 68, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 48", "MinRHEnd + 48", 70, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 72", "MinRHEnd + 72", 73, "all"), 
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 96", "MinRHEnd + 96", 74, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 120", "MinRHEnd + 120", 72, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 144", "MinRHEnd + 144", 70, "all"),
+        ("Fcst", "MinRH", "SCALAR", "MinRHBegin + 168", "MinRHEnd + 168", 71, "all"),
+        
+        ("Fcst", "RH", "SCALAR", 0, 12, 60, "all"),
+        ("Fcst", "RH", "SCALAR", 12, 24, 78, "all"),
+        ("Fcst", "RH", "SCALAR", 24, 36, 30, "all"),
+        ("Fcst", "RH", "SCALAR", 36, 48, 45, "all"),
+        ("Fcst", "RH", "SCALAR", 48, 60, 55, "all"),
+        ("Fcst", "RH", "SCALAR", 60, 72, 65, "all"),
+        ("Fcst", "RH", "SCALAR", 72, 84, 70, "all"),
+        ("Fcst", "RH", "SCALAR", 84, 96, 45, "all"),
+        ("Fcst", "RH", "SCALAR", 96, 108, 20, "all"),
+        ("Fcst", "RH", "SCALAR", 108, 120, 25, "all"),
+        ("Fcst", "RH", "SCALAR", 120, 132, 35, "all"),
+        ("Fcst", "RH", "SCALAR", 132, 144, 43, "all"),
+        ("Fcst", "RH", "SCALAR", 144, 156, 15, "all"),
+        ("Fcst", "RH", "SCALAR", 156, 168, 3, "all"),
+        ("Fcst", "RH", "SCALAR", 168, 180, 28, "all"),
+        ("Fcst", "RH", "SCALAR", 180, 192, 90, "all"),
+
+        ("Fcst", "TransWind", "VECTOR", 0, 12, (10, "SW"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 12, 24, (5, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 24, 48, (10, "NW"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 48, 72, (20, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 72, 96, (30, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 96, 120, (40, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 120, 144, (50, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 144, 168, (60, "W"), "all"),
+        ("Fcst", "TransWind", "VECTOR", 168, 192, (70, "W"), "all"),
+
+        ("Fcst", "LAL", "SCALAR", 0, 12, 1, "all"),
+        ("Fcst", "LAL", "SCALAR", 12, 24, 2, "all"),
+        ("Fcst", "LAL", "SCALAR", 24, 36, 3, "all"),
+        ("Fcst", "LAL", "SCALAR", 36, 48, 4, "all"),
+        ("Fcst", "LAL", "SCALAR", 48, 60, 5, "all"),
+        ("Fcst", "LAL", "SCALAR", 60, 72, 6, "all"),
+        ("Fcst", "LAL", "SCALAR", 72, 84, 3, "all"),
+        ("Fcst", "LAL", "SCALAR", 84, 96, 1, "all"),
+        ("Fcst", "LAL", "SCALAR", 96, 108, 2, "all"),
+        ("Fcst", "LAL", "SCALAR", 108, 120, 4, "all"),
+        ("Fcst", "LAL", "SCALAR", 120, 132, 5, "all"),
+        ("Fcst", "LAL", "SCALAR", 132, 144, 3, "all"),
+        ("Fcst", "LAL", "SCALAR", 144, 156, 2, "all"),
+        ("Fcst", "LAL", "SCALAR", 156, 168, 5, "all"),
+        ("Fcst", "LAL", "SCALAR", 168, 180, 6, "all"),
+        ("Fcst", "LAL", "SCALAR", 180, 192, 3, "all"),
+                        
+        ("Fcst", "CWR", "SCALAR", 0, 12, 0, "all"),
+        ("Fcst", "CWR", "SCALAR", 12, 24, 20, "all"),
+        ("Fcst", "CWR", "SCALAR", 24, 36, 30, "all"),
+        ("Fcst", "CWR", "SCALAR", 36, 48, 30, "all"),
+        ("Fcst", "CWR", "SCALAR", 48, 60, 45, "all"),
+        ("Fcst", "CWR", "SCALAR", 60, 72, 60, "all"),
+        ("Fcst", "CWR", "SCALAR", 72, 84, 25, "all"),
+        ("Fcst", "CWR", "SCALAR", 84, 96, 47, "all"),
+        ("Fcst", "CWR", "SCALAR", 96, 108, 34, "all"),
+        ("Fcst", "CWR", "SCALAR", 108, 120, 60, "all"),
+        ("Fcst", "CWR", "SCALAR", 120, 132, 55, "all"),
+        ("Fcst", "CWR", "SCALAR", 132, 144, 50, "all"),
+        ("Fcst", "CWR", "SCALAR", 144, 156, 20, "all"),
+        ("Fcst", "CWR", "SCALAR", 156, 168, 10, "all"),
+        ("Fcst", "CWR", "SCALAR", 168, 180, 5, "all"),
+        ("Fcst", "CWR", "SCALAR", 180, 192, 40, "all"),
+                
+        ("Fcst", "QPF", "SCALAR", 0, 12, 0, "all"),
+        ("Fcst", "QPF", "SCALAR", 12, 24, 0.05, "all"),
+        ("Fcst", "QPF", "SCALAR", 24, 36, 0.1, "all"),
+        ("Fcst", "QPF", "SCALAR", 36, 48, 0, "all"),
+        ("Fcst", "QPF", "SCALAR", 48, 60, 5, "all"),
+        ("Fcst", "QPF", "SCALAR", 60, 72, 4.5, "all"),
+        ("Fcst", "QPF", "SCALAR", 72, 84, 1.5, "all"),
+        ("Fcst", "QPF", "SCALAR", 84, 96, 2.5, "all"),
+        ("Fcst", "QPF", "SCALAR", 96, 108, 3.5, "all"),
+        ("Fcst", "QPF", "SCALAR", 108, 120, 4.0, "all"),
+        ("Fcst", "QPF", "SCALAR", 120, 132, 1.0, "all"),
+        ("Fcst", "QPF", "SCALAR", 132, 144, 2.0, "all"),
+        ("Fcst", "QPF", "SCALAR", 144, 156, 3.0, "all"),
+        ("Fcst", "QPF", "SCALAR", 156, 168, 1.3, "all"),
+        ("Fcst", "QPF", "SCALAR", 168, 180, 0.12, "all"),
+        ("Fcst", "QPF", "SCALAR", 180, 192, 0.34, "all"),
+        
+        ("Fcst", "Haines", "SCALAR", 0, 12, 2, "all"),
+        ("Fcst", "Haines", "SCALAR", 12, 24, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 24, 36, 4, "all"),
+        ("Fcst", "Haines", "SCALAR", 36, 48, 6, "all"),
+        ("Fcst", "Haines", "SCALAR", 48, 60, 2, "all"),
+        ("Fcst", "Haines", "SCALAR", 60, 72, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 72, 84, 2, "all"),
+        ("Fcst", "Haines", "SCALAR", 84, 96, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 96, 108, 5, "all"),
+        ("Fcst", "Haines", "SCALAR", 108, 120, 6, "all"),
+        ("Fcst", "Haines", "SCALAR", 120, 132, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 132, 144, 2, "all"),
+        ("Fcst", "Haines", "SCALAR", 144, 156, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 156, 168, 4, "all"),
+        ("Fcst", "Haines", "SCALAR", 168, 180, 3, "all"),
+        ("Fcst", "Haines", "SCALAR", 180, 192, 6, "all"),
+        
+        ("Fcst", "MixHgt", "SCALAR", 0, 24, 5000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 24, 48, 10000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 48, 72, 4000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 72, 96, 20000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 96, 120, 3000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 120, 144, 16000, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 144, 168, 18500, "all"),
+        ("Fcst", "MixHgt", "SCALAR", 168, 192, 20000, "all"),
+
+        ("Fcst", "MarineLayer", "SCALAR", 0, 24, 1000, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 24, 48, 2000, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 48, 72, 4000, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 72, 96, 5280, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 96, 120, 6500, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 120, 144, 10000, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 144, 168, 12300, "all"),
+        ("Fcst", "MarineLayer", "SCALAR", 168, 192, 14500, "all"),
+
+        ("Fcst", "Wind20ft", "VECTOR", 0, 12, (5, "N"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 12, 24, (40, "NE"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 24, 36, (10, "NW"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 36, 48, (0, "N"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 48, 60, (125, "E"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 60, 72, (90, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 72, 84, (50, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 84, 96, (100, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 96, 108, (0, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 108, 120, (10, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 120, 132, (30, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 132, 144, (60, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 144, 156, (25, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 156, 168, (68, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 168, 180, (15, "S"), "all"),
+        ("Fcst", "Wind20ft", "VECTOR", 180, 192, (2, "S"), "all"),
+        
+        ("Fcst", "VentRate", "SCALAR", 0, 12, 160000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 12, 24, 100000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 24, 36, 50000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 36, 48, 20000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 48, 60, 70000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 60, 144, 4000, "all"),
+        ("Fcst", "VentRate", "SCALAR", 144, 168, 6900, "all"),
+        ("Fcst", "VentRate", "SCALAR", 168, 192, 30000, "all"),
+        
+        ("Fcst", "Stability", "SCALAR", 0, 12, 1, "all"),
+        ("Fcst", "Stability", "SCALAR", 12, 24, 2, "all"), 
+        ("Fcst", "Stability", "SCALAR", 24, 36, 1, "all"),
+        ("Fcst", "Stability", "SCALAR", 36, 48, 3, "all"),
+        ("Fcst", "Stability", "SCALAR", 48, 60, 4, "all"),
+        ("Fcst", "Stability", "SCALAR", 60, 72, 5, "all"),
+        ("Fcst", "Stability", "SCALAR", 72, 84, 1, "all"),
+        ("Fcst", "Stability", "SCALAR", 84, 96, 2, "all"),
+        ("Fcst", "Stability", "SCALAR", 96, 108, 3, "all"),
+        ("Fcst", "Stability", "SCALAR", 108, 120, 4, "all"),
+        ("Fcst", "Stability", "SCALAR", 120, 132, 5, "all"),
+        ("Fcst", "Stability", "SCALAR", 132, 144, 4, "all"),
+        ("Fcst", "Stability", "SCALAR", 144, 156, 3, "all"),
+        ("Fcst", "Stability", "SCALAR", 156, 168, 2, "all"),
+        ("Fcst", "Stability", "SCALAR", 168, 180, 1, "all"),
+        ("Fcst", "Stability", "SCALAR", 180, 192, 3, "all"),
+        
+        ("Fcst", "HrsOfSun", "SCALAR", 0, 24, 6, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 24, 48, 7, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 48, 72, 5, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 72, 96, 5, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 96, 120, 5, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 120, 144, 5, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 144, 168, 5, "all"),
+        ("Fcst", "HrsOfSun", "SCALAR", 168, 192, 5, "all"),
+        
+        ("Fcst", "DSI", "SCALAR", 0, 12, 0, "all"),
+        ("Fcst", "DSI", "SCALAR", 12, 24, 2, "all"),
+        ("Fcst", "DSI", "SCALAR", 24, 36, 6, "all"),
+        ("Fcst", "DSI", "SCALAR", 36, 48, 1, "all"),
+        ("Fcst", "DSI", "SCALAR", 48, 60, 5, "all"),
+        ("Fcst", "DSI", "SCALAR", 60, 72, 4, "all"),
+        ("Fcst", "DSI", "SCALAR", 72, 84, 3, "all"),
+        ("Fcst", "DSI", "SCALAR", 84, 96, 2, "all"),
+        ("Fcst", "DSI", "SCALAR", 96, 108, 1, "all"),
+        ("Fcst", "DSI", "SCALAR", 108, 120, 0, "all"),
+        ("Fcst", "DSI", "SCALAR", 120, 132, 5, "all"),
+        ("Fcst", "DSI", "SCALAR", 132, 144, 4, "all"),
+        ("Fcst", "DSI", "SCALAR", 144, 156, 3, "all"),
+        ("Fcst", "DSI", "SCALAR", 156, 168, 2, "all"),
+        ("Fcst", "DSI", "SCALAR", 168, 180, 1, "all"),
+        ("Fcst", "DSI", "SCALAR", 180, 192, 0, "all"),
+        ]
+
+
+Marine_createGrids = [
+        ("Fcst", "Swell", "VECTOR", 0, 3, (10, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 3, 6, (20, "W"), "all"),
+        ("Fcst", "Swell", "VECTOR", 6, 9, (30, "W"), "all"),
+        ("Fcst", "Swell", "VECTOR", 9, 12, (20, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 12, 18, (40, "SE"), "all"),
+        ("Fcst", "Swell", "VECTOR", 18, 24, (40, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 24, 36, (35, "NW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 36, 48, (45, "W"), "all"),
+        ("Fcst", "Swell", "VECTOR", 48, 60, (50, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 60, 72, (45, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 72, 84, (60, "W"), "all"),
+        ("Fcst", "Swell", "VECTOR", 84, 96,(55, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 96, 108,(55, "SW"), "all"),
+        ("Fcst", "Swell", "VECTOR", 108, 120, (42, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 120, 132, (45, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 132, 144, (46, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 144, 156, (48, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 156, 168, (60, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 168, 180, (35, "E"), "all"),
+        ("Fcst", "Swell", "VECTOR", 180, 192, (50, "E"), "all"),
+
+        ("Fcst", "Swell2", "VECTOR", 0, 3, (10, "NE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 3, 6, (20, "E"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 6, 9, (30, "E"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 9, 12, (20, "SE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 12, 18, (40, "SW"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 18, 24, (40, "SE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 24, 36, (35, "NE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 36, 48, (45, "E"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 48, 60, (50, "SE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 60, 72, (45, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 72, 84, (60, "E"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 84, 96,(55, "SE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 96, 108,(55, "SE"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 108, 120, (42, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 120, 132, (45, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 132, 144, (46, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 144, 156, (48, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 156, 168, (60, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 168, 180, (35, "W"), "all"),
+        ("Fcst", "Swell2", "VECTOR", 180, 192, (50, "W"), "all"),
+
+        ("Fcst", "Period", "SCALAR", 0, 3,  10, "all"),
+        ("Fcst", "Period", "SCALAR", 3, 6,  15, "all"),
+        ("Fcst", "Period", "SCALAR", 6, 9,  20, "all"),
+        ("Fcst", "Period", "SCALAR", 9, 12,  5, "all"),
+        ("Fcst", "Period", "SCALAR", 12, 24, 10, "all"),
+        ("Fcst", "Period", "SCALAR", 24, 36, 15, "all"),
+        ("Fcst", "Period", "SCALAR", 36, 48, 20, "all"),
+        ("Fcst", "Period", "SCALAR", 48, 60, 10, "all"),
+        ("Fcst", "Period", "SCALAR", 60, 72, 17, "all"),
+        ("Fcst", "Period", "SCALAR", 72, 84, 12, "all"),
+        ("Fcst", "Period", "SCALAR", 84, 96, 13, "all"),
+        ("Fcst", "Period", "SCALAR", 96, 108, 18, "all"),
+
+        ("Fcst", "Period2", "SCALAR", 0, 3,  10, "all"),
+        ("Fcst", "Period2", "SCALAR", 3, 6,  15, "all"),
+        ("Fcst", "Period2", "SCALAR", 6, 9,  8, "all"),
+        ("Fcst", "Period2", "SCALAR", 9, 12,  5, "all"),
+        ("Fcst", "Period2", "SCALAR", 12, 24, 10, "all"),
+        ("Fcst", "Period2", "SCALAR", 24, 36, 15, "all"),
+        ("Fcst", "Period2", "SCALAR", 36, 48, 20, "all"),
+        ("Fcst", "Period2", "SCALAR", 48, 60, 17, "all"),
+        ("Fcst", "Period2", "SCALAR", 60, 72, 19, "all"),
+        ("Fcst", "Period2", "SCALAR", 72, 84, 12, "all"),
+        ("Fcst", "Period2", "SCALAR", 84, 96, 7, "all"),
+        ("Fcst", "Period2", "SCALAR", 96, 108, 6, "all"),
+
+        ("Fcst", "WindWaveHgt", "SCALAR", 0, 3,  10, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 3, 6,  15, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 6, 9,  25, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 9, 12,  5, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 12, 24, 10, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 24, 36, 45, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 36, 48, 20, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 48, 60, 30, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 60, 72, 40, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 72, 84, 20, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 84, 96, 20, "all"),
+        ("Fcst", "WindWaveHgt", "SCALAR", 96, 108, 20, "all"),
+
+        ("Fcst", "WaveHeight", "SCALAR", 0, 3,  10, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 3, 6,  15, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 6, 9,  25, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 9, 12,  5, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 12, 24, 10, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 24, 36, 45, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 36, 48, 20, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 48, 60, 30, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 60, 72, 40, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 72, 84, 20, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 84, 96, 20, "all"),
+        ("Fcst", "WaveHeight", "SCALAR", 96, 108, 20, "all"),
+
+    ]
+
+Delete_grids = [
+        ("Fcst", "PoP", "SFC", "all", "all"),
+        ("Fcst", "MaxT", "SFC", "all", "all"),
+        ("Fcst", "MinT", "SFC", "all", "all"),
+        ("Fcst", "T", "SFC", "all", "all"),
+        ("Fcst", "Td", "SFC", "all", "all"),
+        ("Fcst", "WindChill", "SFC", "all", "all"),
+        ("Fcst", "HeatIndex", "SFC", "all", "all"),
+        ("Fcst", "StormTotalSnow", "SFC", "all", "all"),
+        ("Fcst", "SnowLevel", "SFC", "all", "all"),
+        ("Fcst", "FzLevel", "SFC", "all", "all"),
+        ("Fcst", "RH", "SFC", "all", "all"),
+        ("Fcst", "Wind", "SFC", "all", "all"),
+        ("Fcst", "Sky", "SFC", "all", "all"),
+        ("Fcst", "WindGust", "SFC", "all", "all"),
+        ("Fcst", "Wx", "SFC", "all", "all"),
+        ("Fcst", "QPF", "SFC", "all", "all"),
+        ("Fcst", "SnowAmt", "SFC", "all", "all"),
+        ("Fcst", "IceAccum", "SFC", "all", "all"),
+
+        ("Fcst", "MaxRH", "SFC", "all", "all"),
+        ("Fcst", "MinRH", "SFC", "all", "all"),
+        ("Fcst", "RH", "SFC", "all", "all"),
+        ("Fcst", "TransWind", "SFC", "all", "all"),
+        ("Fcst", "LAL", "SFC", "all", "all"),
+        ("Fcst", "CWR", "SFC", "all", "all"),
+        ("Fcst", "QPF", "SFC", "all", "all"),
+        ("Fcst", "Haines", "SFC", "all", "all"),
+        ("Fcst", "MixHgt", "SFC", "all", "all"),
+        ("Fcst", "MarineLayer", "SFC", "all", "all"),
+        ("Fcst", "Wind20ft", "SFC", "all", "all"),
+        ("Fcst", "VentRate", "SFC", "all", "all"),
+        ("Fcst", "Stability", "SFC", "all", "all"),
+        ("Fcst", "HrsOfSun", "SFC", "all", "all"),
+        ("Fcst", "DSI", "SFC", "all", "all"),
+
+        ("Fcst", "Swell", "SFC", "all", "all"),
+        ("Fcst", "Swell2", "SFC", "all", "all"),
+        ("Fcst", "Period", "SFC", "all", "all"),
+        ("Fcst", "Period2", "SFC", "all", "all"),
+        ("Fcst", "WaveHeight", "SFC", "all", "all"),
+        ("Fcst", "WindWaveHgt", "SFC", "all", "all"),
+        
+        ("Fcst", "Hazards", "SFC", "all", "all"),
+        ("Fcst", "pwsD34", "SFC", "all", "all"),
+        ("Fcst", "pwsN34", "SFC", "all", "all"),
+        ("Fcst", "pwsD64", "SFC", "all", "all"),
+        ("Fcst", "pwsN64", "SFC", "all", "all"),
+        ]
+
+Delete_grids_specific = [
+        ("Fcst", "PoP", "SFC", -300, 300),
+        ("Fcst", "MaxT", "SFC", -300, 300),
+        ("Fcst", "MinT", "SFC", -300, 300),
+        ("Fcst", "T", "SFC", -300, 300),
+        ("Fcst", "Td", "SFC", -300, 300),
+        ("Fcst", "WindChill", "SFC", -300, 300),
+        ("Fcst", "HeatIndex", "SFC", -300, 300),
+        ("Fcst", "StormTotalSnow", "SFC", -300, 300),
+        ("Fcst", "SnowLevel", "SFC", -300, 300),
+        ("Fcst", "FzLevel", "SFC", -300, 300),
+        ("Fcst", "RH", "SFC", -300, 300),
+        ("Fcst", "Wind", "SFC", -300, 300),
+        ("Fcst", "Sky", "SFC", -300, 300),
+        ("Fcst", "WindGust", "SFC", -300, 300),
+        ("Fcst", "Wx", "SFC", -300, 300),
+        ("Fcst", "QPF", "SFC", -300, 300),
+        ("Fcst", "SnowAmt", "SFC", -300, 300),
+        ("Fcst", "IceAccum", "SFC", -300, 300),
+
+        ("Fcst", "MaxRH", "SFC", -300, 300),
+        ("Fcst", "MinRH", "SFC", -300, 300),
+        ("Fcst", "RH", "SFC", -300, 300),
+        ("Fcst", "TransWind", "SFC", -300, 300),
+        ("Fcst", "LAL", "SFC", -300, 300),
+        ("Fcst", "CWR", "SFC", -300, 300),
+        ("Fcst", "QPF", "SFC", -300, 300),
+        ("Fcst", "Haines", "SFC", -300, 300),
+        ("Fcst", "MixHgt", "SFC", -300, 300),
+        ("Fcst", "MarineLayer", "SFC", -300, 300),
+        ("Fcst", "Wind20ft", "SFC", -300, 300),
+        ("Fcst", "VentRate", "SFC", -300, 300),
+        ("Fcst", "Stability", "SFC", -300, 300),
+        ("Fcst", "HrsOfSun", "SFC", -300, 300),
+        ("Fcst", "DSI", "SFC", -300, 300),
+
+        ("Fcst", "Swell", "SFC", -300, 300),
+        ("Fcst", "Swell2", "SFC", -300, 300),
+        ("Fcst", "Period", "SFC", -300, 300),
+        ("Fcst", "Period2", "SFC", -300, 300),
+        ("Fcst", "WaveHeight", "SFC", -300, 300),
+        ("Fcst", "WindWaveHgt", "SFC", -300, 300),
+        
+        ("Fcst", "Hazards", "SFC", -300, 300),
+        ]
+
+
+scripts = [
+    {    
+    "name":"CreateGrids_Today",
+    "commentary": "Create Grids starting Today",
+    "productType": None,
+    "createGrids": TestScript.general_createGrids,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"CreatePublicGrids_Today",
+    "commentary": "Create Grids for All Products starting Today",
+    "productType": None,
+    "createGrids": Public_createGrids,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"CreateFireGrids_Today",
+    "commentary": "Create Grids for All Products starting Today",
+    "productType": None,
+    "createGrids": Fire_createGrids,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"CreateMarineGrids_Today",
+    "commentary": "Create Grids for All Products starting Today",
+    "productType": None,
+    "createGrids": Marine_createGrids,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"CreateHazardGrids_Today",
+    "commentary": "Create Grids for All Products starting Today",
+    "productType": None,
+    "createGrids": Hazard_createGrids,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"DeleteGrids",
+    "commentary": "Delete ALL Grids",
+    "productType": None,
+    "deleteGrids": Delete_grids,
+    },
+    {    
+    "name":"DeleteGrids_Today",
+    "commentary": "Delete Grids from -300 to 300",
+    "productType": None,
+    "deleteGrids": Delete_grids_specific,
+    "gridsStartTime": "6am Local Today",
+    "drtTime": "6am Local Today",
+    },
+    {    
+    "name":"DeleteGrids_2010",
+    "commentary": "Delete Grids from -300 to 300",
+    "productType": None,
+    "deleteGrids": Delete_grids_specific, 
+    "gridsStartTime": "6am Local",
+    "drtTime": "6am Local",
+    },
+    {    
+    "name":"CreateGrids_2010",
+    "commentary": "Create Grids starting in 2010",
+    "productType": None,
+    "createGrids": TestScript.general_createGrids,
+    "gridsStartTime": "6am Local",
+    "drtTime": "6am Local",
+    },
+
+    
+    {    
+    "name":"WxProb1",  
+    "commentary": """
+           To test the WxProb_QC_Tool (Spriggs, Barker)
+           SChc RW -- SChc RW Areas F -- Areas F
+           
+        Quick Summary of Rules:
+        --Probability terms stay probability after change (if any)
+        --Coverage terms stay coverage after change (if any)
+        --Non-PoP-related Wx always gets a "free pass" and is preserved as-is
+        --PoP < 15:  Any PoP-related Wx becomes 
+        --PoP > 14 AND PoP-related Wx types are not present:  create a default Wx type (RW-)
+        --Only "high" key and its equivalent values are changed unless PoP becomes
+          less than coverage index's implied value
+""",
+    "productType": None,
+    "createGrids": [
+       # No RW where PoP < 15
+       ("Fcst", "PoP", "SCALAR", 0, 12, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 0, 12, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 0, 12, "SChc:RW:-::", "all"),
+
+       # No RW where PoP < 15 -- Fog everywhere
+       ("Fcst", "PoP", "SCALAR", 12, 24, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 12, 24, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 12, 24,
+        "SChc:RW:-::^Areas:F:::", "all"),
+
+       # RW where PoP > 15Fog everywhere
+       ("Fcst", "PoP", "SCALAR", 24, 36, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 24, 36, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 24, 36,
+        "Areas:F:::", "all"),
+
+       # RW where PoP > 15
+       ("Fcst", "PoP", "SCALAR", 36, 48, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 36, 48, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 36, 48,
+        "NoWx", "all"),
+       
+       # RW where PoP > 15, retain F and BS 
+       ("Fcst", "PoP", "SCALAR", 48, 60, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 48, 60, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 48, 60,
+        "Wide:F:+:1/4SM:^Def:BS::1/4SM:", "all"),
+       
+       # RW where PoP > 15,  
+       ("Fcst", "PoP", "SCALAR", 60, 72, 20, ["AboveElev"]),
+       ("Fcst", "PoP", "SCALAR", 60, 72, 10, ["BelowElev"]),
+       ("Fcst", "Wx", "WEATHER", 60, 72, 
+        "Lkly:RW:-::^Chc:T:::^Wide:F::1SM:", "all"),
+       
+       ],
+    },
+    ]
+
+def testScript(self, dataMgr, level="Site"):
+    time_6am = self.getAbsFromLocal(2010, 1, 1, 0, 0)
+    todayTR = self.getTimeRange("Today")
+    time_6am_today = todayTR.startTime()
+    print("time", time_6am_today)
+    for script in scripts:
+        for entry in ["gridsStartTime", "drtTime"]:
+            if script.get(entry, None) == "6am Local":
+                script[entry] = time_6am
+            elif script.get(entry, None) == "6am Local Today":
+                script[entry] = time_6am_today
+    return TestScript.generalTestScript(self, dataMgr, scripts, {}, level=level)
+
+
diff --git a/cave/com.raytheon.viz.gfe/python/autotest/Hazard_HLS_TestScript.py b/cave/com.raytheon.viz.gfe/python/autotest/Hazard_HLS_TestScript.py
index 07912b96e1..e3a48de52f 100644
--- a/cave/com.raytheon.viz.gfe/python/autotest/Hazard_HLS_TestScript.py
+++ b/cave/com.raytheon.viz.gfe/python/autotest/Hazard_HLS_TestScript.py
@@ -1,684 +1,684 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Hazard_HLS_TestScript
-#
-# Author:
-# ----------------------------------------------------------------------------
-
-
-useTest = """
-    def _useTestTCP(self):
-        return True
-        ##return False
-
-    def _inlandAreas(self):
-        return [
-            "FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043"
-            ]
-    def _coastalAreas(self):
-        return [
-            "FLZ139", "FLZ142", "FLZ148", "FLZ149", "FLZ050", "FLZ151", "FLZ155", "FLZ160",
-            "FLZ162", "FLZ165"
-            ]
-    def _marineAreas(self):
-        return [
-            "GMZ830", "GMZ850", "GMZ853", "GMZ856", "GMZ870", "GMZ873", "GMZ876"
-            ]    
-    def _cwa(self):
-        return "TBW"
-
-    def _cwa_descriptor(self):
-        return "CENTRAL WEST FLORIDA"
-
-    def _maor_descriptor(self):
-        return "WEST CENTRAL FLORIDA AND THE GULF OF MEXICO"
-
-    def _cwa_maor_descriptor(self):
-        return "WEST FLORIDA AND THE GULF OF MEXICO"
-
-    def _localReferencePoints(self):
-        # Give the name and lat/lon for each local reference point
-        return [
-                ("Tampa Bay, FL", (28.01, -82.48)),
-                ("Cape Coral, FL", (26.63, -82.00)),
-                ("Lakeland, FL", (28.04, -81.95)),
-                ("Sarasota, FL", (27.37, -82.55)),
-                ]     
-
-    def _localReferencePoints_defaults(self):
-        # Give a list of the local reference point names to be
-        #  turned on by default
-        return ["Tampa Bay, FL", "Sarasota, FL"]    
-    
-
-"""
-
-## We are setting up the segments which would result from combinations.
-##  Note that GMZ876 is not include in the combos, so ends up in a segment
-##  by itself.
-
-##     Segments [['FLZ151', 'FLZ052', 'GMZ830'],
-##               ['GMZ850', 'FLZ142', 'FLZ139', 'FLZ043'],
-##               ['FLZ160', 'FLZ061', 'GMZ853'],
-##               ['FLZ148', 'FLZ149', 'FLZ050'],
-##               ['FLZ162', 'GMZ856', 'FLZ057'],
-##               ['FLZ165', 'GMZ876']]
-
-##     Combos [
-##         (['FLZ139', 'FLZ142', 'FLZ043'], 'Region01'),
-##         (['GMZ850'], 'Region02'),
-##         (['FLZ148', 'FLZ149', 'FLZ050'], 'Region03'),
-##         (['FLZ052'], 'Region04'), (['FLZ151', 'GMZ830'], 'Region05'),
-##         (['FLZ160', 'FLZ061', 'GMZ853'], 'Region06'),
-##         (['FLZ162', 'GMZ856', 'FLZ155', 'FLZ057', 'FLZ056'], 'Region07'),
-##         (['FLZ165'], 'Region08')]
-
-##     New segments [['FLZ139', 'FLZ142', 'FLZ043'],
-##                   ['GMZ850'],
-##                   ['FLZ148', 'FLZ149', 'FLZ050'],
-##                   ['FLZ052'], ['FLZ151', 'GMZ830'],
-##                   ['FLZ160', 'FLZ061', 'GMZ853'],
-##                   ['FLZ162', 'GMZ856', 'FLZ057'],
-##                   ['FLZ165'],
-##                   ['GMZ876']]
-
-segmentSetUp = [
-        (1, "HU_W", ['FLZ139', 'FLZ142', 'FLZ043']), 
-        (2, "HU_W", ['GMZ850']),
-        (3, "HU_A", ['FLZ148', 'FLZ149', 'FLZ050']),
-        (4, "HU_A_TR_W", ['FLZ052']),
-        (5, "HU_A_TR_W", ['FLZ151', 'GMZ830']),
-        (6, "TR_W", ['FLZ160', 'FLZ061', 'GMZ853']),
-        (7, "TR_A", ['FLZ162', 'GMZ856', 'FLZ057']),
-        (8, "HU_S", ['FLZ165']),
-        (9, "HU_S", ['GMZ876']),
-        ]
-        
-def makeTestCases():
-    # 9 segments with these hazards and areas
-    segments = [hazard for segNum, hazard, areas in segmentSetUp]
-    contexts = ["NonEvent","PreEvent","Abbreviated","Watch","Warning","Conditions",
-                "PostEvent","ExtraTropical"]
-    uncertainty = ['Low', 'Average', 'High']
-    sitDict = {
-        "HU_W": ['Abbreviated', 'Warning', 'Conditions', 'ExtraTropical'],
-        "TR_W": ['Abbreviated', 'Warning', 'Conditions', 'ExtraTropical'],
-        "HU_A": ['Abbreviated', 'Watch'],
-        "TR_A": ['Abbreviated', 'Watch'],
-        "HU_A_TR_W": ['Abbreviated', 'Watch', 'Warning', 'Conditions', 'ExtraTropical'],
-        "HU_S": ['NonEvent', 'PreEvent', 'PostEvent'],
-        }
-    scenDict = {
-                "NonEvent": ["NonEvent"],
-                "PreEvent": ["Advancing", "Peripheral", "InSitu"],
-                "Abbreviated": ["FirstIssuance"],
-                "Watch": ["Advancing", "Peripheral", "InSitu"],
-                "Warning": ["Advancing", "Peripheral", "InSitu"],
-                "Conditions": ["Imminent", "Ongoing", "Diminishing"],
-                "PostEvent": ["Immediate", "LongTerm"],
-                "ExtraTropical": ["InSitu", "Completed"],
-                }
-    segCaseDict = {}
-    maxCases = 0
-    for i in range(len(segments)):
-        hazard = segments[i]
-        segCases = []
-        for sit in sitDict[hazard]:
-            scenarios = scenDict[sit]
-            for scen in scenDict[sit]:
-                segCases.append((sit, scen))
-        segCaseDict[i] = segCases
-        if len(segCases) > maxCases:
-            maxCases = len(segCases)
-    #print "segCases\n", segCaseDict
-    #print "maxCases", maxCases
-            
-    ##    # A test case is :
-    ##     #   EventContext, Uncertainty, segmentInfo, checkStrs
-    ##     #   segmentInfo is (segNum, situation, scenario) --
-    ##            we will assume all sections included
-    ##     # Generate a list of tests that cycles through all the situations and scenarios
-    ##     #   for each segment
-    testCases = []
-    caseIndex = 0
-    for i in range(maxCases):
-        # Make a test case
-        EventContext = getValue(contexts, i)
-        Uncertainty = getValue(uncertainty, i)
-        segs = []
-        for segNum in range(len(segments)):
-            segCaseList = segCaseDict[segNum]
-            segCase = getValue(segCaseList, i)
-            sit, scen = segCase
-            segs.append((segNum+1, sit, scen))
-        #checkStrs = checkStrings[i+1]  # Need to fix this later
-        checkStrs = ["National Weather Service Tampa Bay Ruskin FL"]
-        testCase = (EventContext, Uncertainty, segs, checkStrs)
-        testCases.append(testCase)
-    #printTestCases(testCases)
-    return testCases
-
-def printTestCases(testCases):
-    print "Test cases: Event Context, Uncertainty, Segments"
-    ind = 1
-    for testCase in testCases:
-        ec, un, segs, checkStrs= testCase
-        print `ind`, ec, un, "Segments:"
-        for seg in segs:
-            print '   ', seg, segmentSetUp[segs.index(seg)]
-        print '\n'
-        ind+= 1
-    return testCases
-                
-def getValue(list, index):
-    # If index too big, cycle around
-    index = index%len(list)
-    return list[index]
-
-def makeScript(testCase, testNum, createData=None):
-    # Use template script to make a script for this test case
-    #  Test case is (EventContext, Uncertainty, segments)
-    name = "HLS_"+ `testNum`
-    ec, un, segs, checkStrs = testCase
-    if testNum > 2: selectedSections=True
-    else: selectedSections = False
-    varDict, commentary = cmdLineVars(ec, un, segs, includeCommentary=True,
-                                      selectedSections=selectedSections)
-    varDict = str(varDict)
-    if createData is None:
-        createData = createDataGrids()
-    script = {
-     "name":name,
-     "commentary": commentary,
-     "productType" : "Hazard_HLS", 
-     "gridsStartTime": None,     
-     "createGrids": createHazardGrids() + createData,
-     "cmdLineVars" : varDict,     
-     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],
-     "checkStrings" : checkStrs,
-     }
-    return script
-     
-def makeScripts(testNum=None, createData=None):
-    testCases = makeTestCases()
-    scripts = []
-    if createData is None:
-        createData = createDataGrids()
-    if testNum is None:
-        testNum = 0
-    for testCase in testCases:
-        testNum+=1
-        scripts.append(makeScript(testCase, testNum, createData=createData))
-    return scripts
-
-
-def cmdLineVars(EventContext='NonEvent', Uncertainty='N/A', segmentList=None,
-                StormInfo='TCPAT5', includeCommentary=False, selectedSections=False):
-    sections = [
-        ('PrecautionaryPreparednessActions', None, 0, None),
-        ('Probability', None, 0, None),
-        ('Wind', '', 0, None),
-        ('StormSurgeTide', '', 0, None),
-        ('InlandFlooding', '', 0, None),
-        ('Tornadoes', '', 0, None)
-        ]
-    selectedSections = [
-        ('PrecautionaryPreparednessActions', None, 0, None),
-        ('Probability', None, 0, None),
-        ('Wind', '', 0, None),
-        #('StormSurgeTide', '', 0, None),
-        #('InlandFlooding', '', 0, None),
-        #('Tornadoes', '', 0, None)
-        ]
-    extraInfo = {'usePrev_HU_S_Headline': None, 'userHeadline_HU_S': None}    
-    if segmentList is None:
-        segmentList = [
-            (1, 'Warning', 'Advancing'),
-            (2, 'Conditions', 'Imminent'),
-            (3, 'Watch', 'Peripheral'),
-            (4, 'ExtraTropical', 'InSitu'),
-            (5, 'Abbreviated', 'FirstIssuance'),
-            (6, 'Abbreviated', 'FirstIssuance'),
-            (7, 'Abbreviated', 'FirstIssuance'),
-            (8, 'NonEvent', 'NonEvent'),
-            (9, 'PreEvent', 'InSitu'),
-            ]
-    segTemplate = [(segNum, areas) for segNum, hazard, areas in segmentSetUp]
-    commentary = EventContext +" " + Uncertainty + "\n"
-    if selectedSections:
-        sections = selectedSections
-    segs = []
-    for i in range(len(segTemplate)):
-        segNum, areas = segTemplate[i]
-        segNum, sit, scen = segmentList[i]
-        sitStr = sit.ljust(15)
-        scenStr = scen.ljust(15)
-        commentary+= "   " +`segNum`+" "+sitStr+" "+scenStr+" "+`segmentSetUp[segNum-1]` + "\n"
-        segs.append((segNum, areas, sit, scen, sections, extraInfo))
-    varDict =  {
-       ('StormInfo_entry:', 'StormInfo_entry'): '',
-       ('OverviewEditMode:', 'OverviewEditMode'): 'CreateFromGUI',
-       ('MainHeadline_entry:', 'MainHeadline_entry'): '',
-       ('NextUpdate:', 'NextUpdate'): 'Shortly',
-       ('EventContext:', 'EventContext'): EventContext,
-       ('StormInfo:', 'StormInfo'): StormInfo,
-       ('Uncertainty:', 'Uncertainty'): Uncertainty,
-       ('Issued By', 'issuedBy'): None,
-       ('MainHeadline:', 'MainHeadline'): 'Enter',
-       ('LocalReferencePoints:', 'LocalReferencePoints'): [('Tampa Bay, FL', (28.01, -82.48)), ('Sarasota, FL', (27.37, -82.55))],    
-       ('segments:', 'segments'): segs,
-       ('NextUpdate_entry:', 'NextUpdate_entry'): ''
-       }
-    if includeCommentary:
-        return varDict, commentary
-    return varDict
-
-
-def createHazardGrids():
-    return [
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139","FLZ142","FLZ043","GMZ850"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ148", "FLZ149", "FLZ050"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ151", "FLZ052", "GMZ830"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^FF.A",["FLZ061","FLZ160","GMZ853"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A", ["FLZ162","FLZ057","GMZ856"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.S^CF.W", ["GMZ876","FLZ165"]),
-          ]
-
-def createDataGrids(maxWind=60, maxGust=75, prob34=20, prob64=10,
-                    timeMax_pws34int=(30,36), timeMax_pws64int=(42,48)):
-    return [
-          ("Fcst", "prob34",  "SCALAR", 0, 120, prob34, "all"),
-          ("Fcst", "prob64",  "SCALAR", 0, 120, prob64, "all"),
-
-          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),
-          # Trop begin
-          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),
-          # Hurricane begin
-          ("Fcst", "Wind", "VECTOR", 30, 36, (maxWind-10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 36, 42, (maxWind, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 42, 48, (maxWind, "W"), "all"),
-          # Hurricane end
-          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),
-          # Trop end
-          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),
-
-          ("Fcst", "WindGust", "SCALAR", -6, 120, maxGust, "all"),
-          
-          ("Fcst", "pws34int", "SCALAR", 0, 6, 10, "all"),
-          ("Fcst", "pws34int", "SCALAR", 6, 12, 10, "all"),
-          ("Fcst", "pws34int", "SCALAR", 12, 18, 20, "all"),
-          ("Fcst", "pws34int", "SCALAR", 18, 24, 30, "all"),
-          ("Fcst", "pws34int", "SCALAR", 24, 30, 40, "all"),
-          ("Fcst", "pws34int", "SCALAR", 30, 36, 80, "all"),
-          ("Fcst", "pws34int", "SCALAR", 36, 42, 80, "all"),
-          ("Fcst", "pws34int", "SCALAR", 42, 48, 60, "all"),
-          ("Fcst", "pws34int", "SCALAR", 48, 54, 40, "all"),
-          ("Fcst", "pws34int", "SCALAR", 54, 60, 10, "all"),
-          ("Fcst", "pws34int", "SCALAR", 60, 66, 10, "all"),
-          ("Fcst", "pws34int", "SCALAR", 66, 120, 10, "all"),
-
-          ("Fcst", "pws64int", "SCALAR", 0, 6, 10, "all"),
-          ("Fcst", "pws64int", "SCALAR", 6, 12, 20, "all"),
-          ("Fcst", "pws64int", "SCALAR", 12, 18, 20, "all"),
-          ("Fcst", "pws64int", "SCALAR", 18, 24, 30, "all"),
-          ("Fcst", "pws64int", "SCALAR", 24, 30, 40, "all"),
-          ("Fcst", "pws64int", "SCALAR", 30, 36, 60, "all"),
-          ("Fcst", "pws64int", "SCALAR", 36, 42, 60, "all"),
-          ("Fcst", "pws64int", "SCALAR", 42, 48, 80, "all"),
-          ("Fcst", "pws64int", "SCALAR", 48, 54, 40, "all"),
-          ("Fcst", "pws64int", "SCALAR", 54, 60, 10, "all"),
-          ("Fcst", "pws64int", "SCALAR", 60, 66, 10, "all"),
-          ("Fcst", "pws64int", "SCALAR", 66, 120, 10, "all"),
-          ]
-
-
-##  Tests 1-12                         have data values:
-##         maxWind==60, maxGust=75, prob34=50, prob64=20,
-
-##  Tests 12-24 (repeat 1-12 segments) have data values:
-##         maxWind=75, maxGust=90, prob34=20, prob64=10,
-
-##     Test cases: Event Context, Uncertainty, Segments
-##     1 NonEvent Low Segments:
-##         (1, 'Abbreviated', 'FirstIssuance') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Abbreviated', 'FirstIssuance') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Abbreviated', 'FirstIssuance') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Abbreviated', 'FirstIssuance') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Abbreviated', 'FirstIssuance') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'NonEvent', 'NonEvent') (8, 'HU_S', ['FLZ165'])
-##         (9, 'NonEvent', 'NonEvent') (9, 'HU_S', ['GMZ876'])
-
-
-##     2 PreEvent Average Segments:
-##         (1, 'Warning', 'Advancing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Warning', 'Advancing') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Watch', 'Advancing') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Watch', 'Advancing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Warning', 'Advancing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'Advancing') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'Advancing') (9, 'HU_S', ['GMZ876'])
-
-
-##     3 Abbreviated High Segments:
-##         (1, 'Warning', 'Peripheral') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Warning', 'Peripheral') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Watch', 'Peripheral') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Watch', 'Peripheral') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Warning', 'Peripheral') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'Peripheral') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'Peripheral') (9, 'HU_S', ['GMZ876'])
-
-
-##     4 Watch Low Segments:
-##         (1, 'Warning', 'InSitu') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Warning', 'InSitu') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Watch', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Watch', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Warning', 'InSitu') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'InSitu') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'InSitu') (9, 'HU_S', ['GMZ876'])
-
-
-##     5 Warning Average Segments:
-##         (1, 'Conditions', 'Imminent') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Conditions', 'Imminent') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Warning', 'Advancing') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Warning', 'Advancing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Conditions', 'Imminent') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PostEvent', 'Immediate') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PostEvent', 'Immediate') (9, 'HU_S', ['GMZ876'])
-
-
-##     6 Conditions High Segments:
-##         (1, 'Conditions', 'Ongoing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Conditions', 'Ongoing') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Warning', 'Peripheral') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Warning', 'Peripheral') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Conditions', 'Ongoing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PostEvent', 'LongTerm') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PostEvent', 'LongTerm') (9, 'HU_S', ['GMZ876'])
-
-
-##     7 PostEvent Low Segments:
-##         (1, 'Conditions', 'Diminishing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Conditions', 'Diminishing') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Warning', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Warning', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Conditions', 'Diminishing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'NonEvent', 'NonEvent') (8, 'HU_S', ['FLZ165'])
-##         (9, 'NonEvent', 'NonEvent') (9, 'HU_S', ['GMZ876'])
-
-
-##     8 ExtraTropical Average Segments:
-##         (1, 'ExtraTropical', 'InSitu') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'ExtraTropical', 'InSitu') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Conditions', 'Imminent') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Conditions', 'Imminent') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'ExtraTropical', 'InSitu') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'Advancing') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'Advancing') (9, 'HU_S', ['GMZ876'])
-
-
-##     9 NonEvent High Segments:
-##         (1, 'ExtraTropical', 'Completed') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'ExtraTropical', 'Completed') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Conditions', 'Ongoing') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Conditions', 'Ongoing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'ExtraTropical', 'Completed') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'Peripheral') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'Peripheral') (9, 'HU_S', ['GMZ876'])
-
-
-##     10 PreEvent Low Segments:
-##         (1, 'Abbreviated', 'FirstIssuance') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Abbreviated', 'FirstIssuance') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'Conditions', 'Diminishing') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'Conditions', 'Diminishing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Abbreviated', 'FirstIssuance') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PreEvent', 'InSitu') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PreEvent', 'InSitu') (9, 'HU_S', ['GMZ876'])
-
-
-##     11 Abbreviated Average Segments:
-##         (1, 'Warning', 'Advancing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Warning', 'Advancing') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'ExtraTropical', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'ExtraTropical', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Warning', 'Advancing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PostEvent', 'Immediate') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PostEvent', 'Immediate') (9, 'HU_S', ['GMZ876'])
-
-
-##     12 Watch High Segments:
-##         (1, 'Warning', 'Peripheral') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-##         (2, 'Warning', 'Peripheral') (2, 'HU_W', ['GMZ850'])
-##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-##         (4, 'ExtraTropical', 'Completed') (4, 'HU_A_TR_W', ['FLZ052'])
-##         (5, 'ExtraTropical', 'Completed') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-##         (6, 'Warning', 'Peripheral') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-##         (8, 'PostEvent', 'LongTerm') (8, 'HU_S', ['FLZ165'])
-##         (9, 'PostEvent', 'LongTerm') (9, 'HU_S', ['GMZ876'])
-
-
-
-
-
-scripts = [   
-     {
-     "name":"HLS_segmentation_1", 
-     "productType" : "Hazard_HLS", 
-     "commentary": """Segmentation testing of HLS product.
-NonEvent N/A
-   1 'Warning',       'Advancing'       (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])
-   2 'Conditions',    'Imminent'        (2, 'HU_W', ['GMZ850'])
-   3 'Watch',         'Peripheral'      (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])
-   4 'ExtraTropical', 'InSitu'          (4, 'HU_A_TR_W', ['FLZ052'])
-   5 'Abbreviated',   'FirstIssuance'   (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])
-   6 'Abbreviated',   'FirstIssuance'   (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])
-   7 'Abbreviated',   'FirstIssuance'   (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])
-   8 'NonEvent',      'NonEvent'        (8, 'HU_S', ['FLZ165'])
-   9 'PreEvent',      'InSitu'          (9, 'HU_S', ['GMZ876'])
-             """,
-     "gridsStartTime": None,     
-     "createGrids": createHazardGrids() + createDataGrids(),
-     "cmdLineVars" :str(cmdLineVars()),     
-     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],
-     "checkStrings" : [
-        #"HURRICANE LOCAL STATEMENT",
-        "National Weather Service Tampa Bay Ruskin FL",
-       ],
-     },
-
-     {
-     "name":"HLS_segmentation_2", 
-     "productType" : "Hazard_HLS", 
-     "commentary": """Segmentation testing of HLS product. -- ETN codes""",
-     "gridsStartTime": None,     
-     "createGrids": createDataGrids() + [
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W:1001",["FLZ139"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["GMZ830"]),            
-          ],
-     "cmdLineVars" :str(cmdLineVars()),     
-     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],
-     "checkStrings" : [
-        #"HURRICANE LOCAL STATEMENT",
-        "National Weather Service Tampa Bay Ruskin FL",
-       ],
-     },
-
-     {
-     "name":"HLS_generalTests_1", 
-     "productType" : "Hazard_HLS", 
-     "commentary": """Looks at Overview Watches/Warnings and Wind Timing reporting""",
-     "gridsStartTime": None,     
-     "createGrids": createDataGrids() + [
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W",["GMZ830"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W",["FLZ052"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ148"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ050"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W^HI.A",["FLZ056"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.A",["FLZ061"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A^FF.A",["GMZ870"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.A^CF.W",["FLZ057"]),
-
-          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),
-          # Trop begin
-          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),
-          # Hurricane begin
-          ("Fcst", "Wind", "VECTOR", 30, 36, (70, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 36, 42, (75, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 42, 48, (70, "W"), "all"),
-          # Hurricane end
-          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),
-          # Trop end
-          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),
-          
-          ("Fcst", "WindGust", "SCALAR", -6, 120, 90, "all"),
-          ],
-     "cmdLineVars" :str(cmdLineVars()),     
-     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],
-     "checkStrings" : [
-        #"HURRICANE LOCAL STATEMENT",
-        "National Weather Service Tampa Bay Ruskin FL",
-       ],
-     },
-
-     {
-     "name":"HLS_generalTests_2", 
-     "productType" : "Hazard_HLS", 
-     "commentary": """Looks at Overview Watches/Warnings and Wind Timing reporting""",
-     "gridsStartTime": None,     
-     "createGrids": createDataGrids() + [
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["GMZ873"]),# New
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ057"]), # New -- TI.A UPG
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W",["GMZ830"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W",["FLZ052"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ148"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ050"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W^HI.A",["FLZ056"]),            
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.A",["FLZ061"]),
-          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A",["GMZ870"]),
-
-          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),
-          # Trop begin
-          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),
-          # Hurricane begin
-          ("Fcst", "Wind", "VECTOR", 30, 36, (70, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 36, 42, (75, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 42, 48, (70, "W"), "all"),
-          # Hurricane end
-          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),
-          # Trop end
-          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),
-          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),
-          ],
-     "cmdLineVars" :str(cmdLineVars()),     
-     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],
-     "checkStrings" : [
-        #"HURRICANE LOCAL STATEMENT",
-        "National Weather Service Tampa Bay Ruskin FL",
-       ],
-     },  
-    ]
-
-# Now add the generated scripts
-firstScripts = makeScripts()
-secondScripts = makeScripts(
-    len(firstScripts),
-    createDataGrids(maxWind=75, maxGust=90, prob34=50, prob64=20))
-
-scripts = scripts + firstScripts + secondScripts
-
-
-
-
-import TestScript
-def testScript(self, dataMgr):
-    defaults = {
-        "cmdLineVars" :"{('Source', 'source'): 'Colorado Emergency Management Agency Denver Colorado', ('Issued By', 'issuedBy'): None, ('EAS Level', 'eas'): 'NONE'}",
-        "publishGrids" : 1,
-        "vtecMode" : "O",
-        "clearHazardsTable": 1,
-        "gridsStartTime": "202001201_0000",
-        "orderStrings": 1,
-        "deleteGrids" : [
-            ("Fcst", "Hazards", "SFC", -48, 240),
-            ("Fcst", "pws34int", "SFC", -48, 240),
-            ("Fcst", "pws64int", "SFC", -48, 240),
-            ("Fcst", "prob34", "SFC", -48, 240),
-            ("Fcst", "prob64", "SFC", -48, 240),
-            ],
-        }
-    return TestScript.generalTestScript(self, dataMgr, scripts, defaults)
-
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+# ----------------------------------------------------------------------------

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+# Hazard_HLS_TestScript

+#

+# Author:

+# ----------------------------------------------------------------------------

+

+

+useTest = """

+    def _useTestTCP(self):

+        return True

+        ##return False

+

+    def _inlandAreas(self):

+        return [

+            "FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043"

+            ]

+    def _coastalAreas(self):

+        return [

+            "FLZ139", "FLZ142", "FLZ148", "FLZ149", "FLZ050", "FLZ151", "FLZ155", "FLZ160",

+            "FLZ162", "FLZ165"

+            ]

+    def _marineAreas(self):

+        return [

+            "GMZ830", "GMZ850", "GMZ853", "GMZ856", "GMZ870", "GMZ873", "GMZ876"

+            ]    

+    def _cwa(self):

+        return "TBW"

+

+    def _cwa_descriptor(self):

+        return "CENTRAL WEST FLORIDA"

+

+    def _maor_descriptor(self):

+        return "WEST CENTRAL FLORIDA AND THE GULF OF MEXICO"

+

+    def _cwa_maor_descriptor(self):

+        return "WEST FLORIDA AND THE GULF OF MEXICO"

+

+    def _localReferencePoints(self):

+        # Give the name and lat/lon for each local reference point

+        return [

+                ("Tampa Bay, FL", (28.01, -82.48)),

+                ("Cape Coral, FL", (26.63, -82.00)),

+                ("Lakeland, FL", (28.04, -81.95)),

+                ("Sarasota, FL", (27.37, -82.55)),

+                ]     

+

+    def _localReferencePoints_defaults(self):

+        # Give a list of the local reference point names to be

+        #  turned on by default

+        return ["Tampa Bay, FL", "Sarasota, FL"]    

+    

+

+"""

+

+## We are setting up the segments which would result from combinations.

+##  Note that GMZ876 is not include in the combos, so ends up in a segment

+##  by itself.

+

+##     Segments [['FLZ151', 'FLZ052', 'GMZ830'],

+##               ['GMZ850', 'FLZ142', 'FLZ139', 'FLZ043'],

+##               ['FLZ160', 'FLZ061', 'GMZ853'],

+##               ['FLZ148', 'FLZ149', 'FLZ050'],

+##               ['FLZ162', 'GMZ856', 'FLZ057'],

+##               ['FLZ165', 'GMZ876']]

+

+##     Combos [

+##         (['FLZ139', 'FLZ142', 'FLZ043'], 'Region01'),

+##         (['GMZ850'], 'Region02'),

+##         (['FLZ148', 'FLZ149', 'FLZ050'], 'Region03'),

+##         (['FLZ052'], 'Region04'), (['FLZ151', 'GMZ830'], 'Region05'),

+##         (['FLZ160', 'FLZ061', 'GMZ853'], 'Region06'),

+##         (['FLZ162', 'GMZ856', 'FLZ155', 'FLZ057', 'FLZ056'], 'Region07'),

+##         (['FLZ165'], 'Region08')]

+

+##     New segments [['FLZ139', 'FLZ142', 'FLZ043'],

+##                   ['GMZ850'],

+##                   ['FLZ148', 'FLZ149', 'FLZ050'],

+##                   ['FLZ052'], ['FLZ151', 'GMZ830'],

+##                   ['FLZ160', 'FLZ061', 'GMZ853'],

+##                   ['FLZ162', 'GMZ856', 'FLZ057'],

+##                   ['FLZ165'],

+##                   ['GMZ876']]

+

+segmentSetUp = [

+        (1, "HU_W", ['FLZ139', 'FLZ142', 'FLZ043']), 

+        (2, "HU_W", ['GMZ850']),

+        (3, "HU_A", ['FLZ148', 'FLZ149', 'FLZ050']),

+        (4, "HU_A_TR_W", ['FLZ052']),

+        (5, "HU_A_TR_W", ['FLZ151', 'GMZ830']),

+        (6, "TR_W", ['FLZ160', 'FLZ061', 'GMZ853']),

+        (7, "TR_A", ['FLZ162', 'GMZ856', 'FLZ057']),

+        (8, "HU_S", ['FLZ165']),

+        (9, "HU_S", ['GMZ876']),

+        ]

+        

+def makeTestCases():

+    # 9 segments with these hazards and areas

+    segments = [hazard for segNum, hazard, areas in segmentSetUp]

+    contexts = ["NonEvent","PreEvent","Abbreviated","Watch","Warning","Conditions",

+                "PostEvent","ExtraTropical"]

+    uncertainty = ['Low', 'Average', 'High']

+    sitDict = {

+        "HU_W": ['Abbreviated', 'Warning', 'Conditions', 'ExtraTropical'],

+        "TR_W": ['Abbreviated', 'Warning', 'Conditions', 'ExtraTropical'],

+        "HU_A": ['Abbreviated', 'Watch'],

+        "TR_A": ['Abbreviated', 'Watch'],

+        "HU_A_TR_W": ['Abbreviated', 'Watch', 'Warning', 'Conditions', 'ExtraTropical'],

+        "HU_S": ['NonEvent', 'PreEvent', 'PostEvent'],

+        }

+    scenDict = {

+                "NonEvent": ["NonEvent"],

+                "PreEvent": ["Advancing", "Peripheral", "InSitu"],

+                "Abbreviated": ["FirstIssuance"],

+                "Watch": ["Advancing", "Peripheral", "InSitu"],

+                "Warning": ["Advancing", "Peripheral", "InSitu"],

+                "Conditions": ["Imminent", "Ongoing", "Diminishing"],

+                "PostEvent": ["Immediate", "LongTerm"],

+                "ExtraTropical": ["InSitu", "Completed"],

+                }

+    segCaseDict = {}

+    maxCases = 0

+    for i in range(len(segments)):

+        hazard = segments[i]

+        segCases = []

+        for sit in sitDict[hazard]:

+            scenarios = scenDict[sit]

+            for scen in scenDict[sit]:

+                segCases.append((sit, scen))

+        segCaseDict[i] = segCases

+        if len(segCases) > maxCases:

+            maxCases = len(segCases)

+    #print "segCases\n", segCaseDict

+    #print "maxCases", maxCases

+            

+    ##    # A test case is :

+    ##     #   EventContext, Uncertainty, segmentInfo, checkStrs

+    ##     #   segmentInfo is (segNum, situation, scenario) --

+    ##            we will assume all sections included

+    ##     # Generate a list of tests that cycles through all the situations and scenarios

+    ##     #   for each segment

+    testCases = []

+    caseIndex = 0

+    for i in range(maxCases):

+        # Make a test case

+        EventContext = getValue(contexts, i)

+        Uncertainty = getValue(uncertainty, i)

+        segs = []

+        for segNum in range(len(segments)):

+            segCaseList = segCaseDict[segNum]

+            segCase = getValue(segCaseList, i)

+            sit, scen = segCase

+            segs.append((segNum+1, sit, scen))

+        #checkStrs = checkStrings[i+1]  # Need to fix this later

+        checkStrs = ["National Weather Service Tampa Bay Ruskin FL"]

+        testCase = (EventContext, Uncertainty, segs, checkStrs)

+        testCases.append(testCase)

+    #printTestCases(testCases)

+    return testCases

+

+def printTestCases(testCases):

+    print("Test cases: Event Context, Uncertainty, Segments")

+    ind = 1

+    for testCase in testCases:

+        ec, un, segs, checkStrs= testCase

+        print(repr(ind), ec, un, "Segments:")

+        for seg in segs:

+            print('   ', seg, segmentSetUp[segs.index(seg)])

+        print('\n')

+        ind+= 1

+    return testCases

+                

+def getValue(list, index):

+    # If index too big, cycle around

+    index = index%len(list)

+    return list[index]

+

+def makeScript(testCase, testNum, createData=None):

+    # Use template script to make a script for this test case

+    #  Test case is (EventContext, Uncertainty, segments)

+    name = "HLS_"+ repr(testNum)

+    ec, un, segs, checkStrs = testCase

+    if testNum > 2: selectedSections=True

+    else: selectedSections = False

+    varDict, commentary = cmdLineVars(ec, un, segs, includeCommentary=True,

+                                      selectedSections=selectedSections)

+    varDict = str(varDict)

+    if createData is None:

+        createData = createDataGrids()

+    script = {

+     "name":name,

+     "commentary": commentary,

+     "productType" : "Hazard_HLS", 

+     "gridsStartTime": None,     

+     "createGrids": createHazardGrids() + createData,

+     "cmdLineVars" : varDict,     

+     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],

+     "checkStrings" : checkStrs,

+     }

+    return script

+     

+def makeScripts(testNum=None, createData=None):

+    testCases = makeTestCases()

+    scripts = []

+    if createData is None:

+        createData = createDataGrids()

+    if testNum is None:

+        testNum = 0

+    for testCase in testCases:

+        testNum+=1

+        scripts.append(makeScript(testCase, testNum, createData=createData))

+    return scripts

+

+

+def cmdLineVars(EventContext='NonEvent', Uncertainty='N/A', segmentList=None,

+                StormInfo='TCPAT5', includeCommentary=False, selectedSections=False):

+    sections = [

+        ('PrecautionaryPreparednessActions', None, 0, None),

+        ('Probability', None, 0, None),

+        ('Wind', '', 0, None),

+        ('StormSurgeTide', '', 0, None),

+        ('InlandFlooding', '', 0, None),

+        ('Tornadoes', '', 0, None)

+        ]

+    selectedSections = [

+        ('PrecautionaryPreparednessActions', None, 0, None),

+        ('Probability', None, 0, None),

+        ('Wind', '', 0, None),

+        #('StormSurgeTide', '', 0, None),

+        #('InlandFlooding', '', 0, None),

+        #('Tornadoes', '', 0, None)

+        ]

+    extraInfo = {'usePrev_HU_S_Headline': None, 'userHeadline_HU_S': None}    

+    if segmentList is None:

+        segmentList = [

+            (1, 'Warning', 'Advancing'),

+            (2, 'Conditions', 'Imminent'),

+            (3, 'Watch', 'Peripheral'),

+            (4, 'ExtraTropical', 'InSitu'),

+            (5, 'Abbreviated', 'FirstIssuance'),

+            (6, 'Abbreviated', 'FirstIssuance'),

+            (7, 'Abbreviated', 'FirstIssuance'),

+            (8, 'NonEvent', 'NonEvent'),

+            (9, 'PreEvent', 'InSitu'),

+            ]

+    segTemplate = [(segNum, areas) for segNum, hazard, areas in segmentSetUp]

+    commentary = EventContext +" " + Uncertainty + "\n"

+    if selectedSections:

+        sections = selectedSections

+    segs = []

+    for i in range(len(segTemplate)):

+        segNum, areas = segTemplate[i]

+        segNum, sit, scen = segmentList[i]

+        sitStr = sit.ljust(15)

+        scenStr = scen.ljust(15)

+        commentary+= "   " +repr(segNum)+" "+sitStr+" "+scenStr+" "+repr(segmentSetUp[segNum-1]) + "\n"

+        segs.append((segNum, areas, sit, scen, sections, extraInfo))

+    varDict =  {

+       ('StormInfo_entry:', 'StormInfo_entry'): '',

+       ('OverviewEditMode:', 'OverviewEditMode'): 'CreateFromGUI',

+       ('MainHeadline_entry:', 'MainHeadline_entry'): '',

+       ('NextUpdate:', 'NextUpdate'): 'Shortly',

+       ('EventContext:', 'EventContext'): EventContext,

+       ('StormInfo:', 'StormInfo'): StormInfo,

+       ('Uncertainty:', 'Uncertainty'): Uncertainty,

+       ('Issued By', 'issuedBy'): None,

+       ('MainHeadline:', 'MainHeadline'): 'Enter',

+       ('LocalReferencePoints:', 'LocalReferencePoints'): [('Tampa Bay, FL', (28.01, -82.48)), ('Sarasota, FL', (27.37, -82.55))],    

+       ('segments:', 'segments'): segs,

+       ('NextUpdate_entry:', 'NextUpdate_entry'): ''

+       }

+    if includeCommentary:

+        return varDict, commentary

+    return varDict

+

+

+def createHazardGrids():

+    return [

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139","FLZ142","FLZ043","GMZ850"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ148", "FLZ149", "FLZ050"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ151", "FLZ052", "GMZ830"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^FF.A",["FLZ061","FLZ160","GMZ853"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A", ["FLZ162","FLZ057","GMZ856"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.S^CF.W", ["GMZ876","FLZ165"]),

+          ]

+

+def createDataGrids(maxWind=60, maxGust=75, prob34=20, prob64=10,

+                    timeMax_pws34int=(30,36), timeMax_pws64int=(42,48)):

+    return [

+          ("Fcst", "prob34",  "SCALAR", 0, 120, prob34, "all"),

+          ("Fcst", "prob64",  "SCALAR", 0, 120, prob64, "all"),

+

+          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),

+          # Trop begin

+          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),

+          # Hurricane begin

+          ("Fcst", "Wind", "VECTOR", 30, 36, (maxWind-10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 36, 42, (maxWind, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 42, 48, (maxWind, "W"), "all"),

+          # Hurricane end

+          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),

+          # Trop end

+          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),

+

+          ("Fcst", "WindGust", "SCALAR", -6, 120, maxGust, "all"),

+          

+          ("Fcst", "pws34int", "SCALAR", 0, 6, 10, "all"),

+          ("Fcst", "pws34int", "SCALAR", 6, 12, 10, "all"),

+          ("Fcst", "pws34int", "SCALAR", 12, 18, 20, "all"),

+          ("Fcst", "pws34int", "SCALAR", 18, 24, 30, "all"),

+          ("Fcst", "pws34int", "SCALAR", 24, 30, 40, "all"),

+          ("Fcst", "pws34int", "SCALAR", 30, 36, 80, "all"),

+          ("Fcst", "pws34int", "SCALAR", 36, 42, 80, "all"),

+          ("Fcst", "pws34int", "SCALAR", 42, 48, 60, "all"),

+          ("Fcst", "pws34int", "SCALAR", 48, 54, 40, "all"),

+          ("Fcst", "pws34int", "SCALAR", 54, 60, 10, "all"),

+          ("Fcst", "pws34int", "SCALAR", 60, 66, 10, "all"),

+          ("Fcst", "pws34int", "SCALAR", 66, 120, 10, "all"),

+

+          ("Fcst", "pws64int", "SCALAR", 0, 6, 10, "all"),

+          ("Fcst", "pws64int", "SCALAR", 6, 12, 20, "all"),

+          ("Fcst", "pws64int", "SCALAR", 12, 18, 20, "all"),

+          ("Fcst", "pws64int", "SCALAR", 18, 24, 30, "all"),

+          ("Fcst", "pws64int", "SCALAR", 24, 30, 40, "all"),

+          ("Fcst", "pws64int", "SCALAR", 30, 36, 60, "all"),

+          ("Fcst", "pws64int", "SCALAR", 36, 42, 60, "all"),

+          ("Fcst", "pws64int", "SCALAR", 42, 48, 80, "all"),

+          ("Fcst", "pws64int", "SCALAR", 48, 54, 40, "all"),

+          ("Fcst", "pws64int", "SCALAR", 54, 60, 10, "all"),

+          ("Fcst", "pws64int", "SCALAR", 60, 66, 10, "all"),

+          ("Fcst", "pws64int", "SCALAR", 66, 120, 10, "all"),

+          ]

+

+

+##  Tests 1-12                         have data values:

+##         maxWind==60, maxGust=75, prob34=50, prob64=20,

+

+##  Tests 12-24 (repeat 1-12 segments) have data values:

+##         maxWind=75, maxGust=90, prob34=20, prob64=10,

+

+##     Test cases: Event Context, Uncertainty, Segments

+##     1 NonEvent Low Segments:

+##         (1, 'Abbreviated', 'FirstIssuance') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Abbreviated', 'FirstIssuance') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Abbreviated', 'FirstIssuance') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Abbreviated', 'FirstIssuance') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Abbreviated', 'FirstIssuance') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'NonEvent', 'NonEvent') (8, 'HU_S', ['FLZ165'])

+##         (9, 'NonEvent', 'NonEvent') (9, 'HU_S', ['GMZ876'])

+

+

+##     2 PreEvent Average Segments:

+##         (1, 'Warning', 'Advancing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Warning', 'Advancing') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Watch', 'Advancing') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Watch', 'Advancing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Warning', 'Advancing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'Advancing') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'Advancing') (9, 'HU_S', ['GMZ876'])

+

+

+##     3 Abbreviated High Segments:

+##         (1, 'Warning', 'Peripheral') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Warning', 'Peripheral') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Watch', 'Peripheral') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Watch', 'Peripheral') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Warning', 'Peripheral') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'Peripheral') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'Peripheral') (9, 'HU_S', ['GMZ876'])

+

+

+##     4 Watch Low Segments:

+##         (1, 'Warning', 'InSitu') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Warning', 'InSitu') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Watch', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Watch', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Warning', 'InSitu') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'InSitu') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'InSitu') (9, 'HU_S', ['GMZ876'])

+

+

+##     5 Warning Average Segments:

+##         (1, 'Conditions', 'Imminent') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Conditions', 'Imminent') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Warning', 'Advancing') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Warning', 'Advancing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Conditions', 'Imminent') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PostEvent', 'Immediate') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PostEvent', 'Immediate') (9, 'HU_S', ['GMZ876'])

+

+

+##     6 Conditions High Segments:

+##         (1, 'Conditions', 'Ongoing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Conditions', 'Ongoing') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Warning', 'Peripheral') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Warning', 'Peripheral') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Conditions', 'Ongoing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PostEvent', 'LongTerm') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PostEvent', 'LongTerm') (9, 'HU_S', ['GMZ876'])

+

+

+##     7 PostEvent Low Segments:

+##         (1, 'Conditions', 'Diminishing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Conditions', 'Diminishing') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Warning', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Warning', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Conditions', 'Diminishing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'NonEvent', 'NonEvent') (8, 'HU_S', ['FLZ165'])

+##         (9, 'NonEvent', 'NonEvent') (9, 'HU_S', ['GMZ876'])

+

+

+##     8 ExtraTropical Average Segments:

+##         (1, 'ExtraTropical', 'InSitu') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'ExtraTropical', 'InSitu') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Conditions', 'Imminent') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Conditions', 'Imminent') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'ExtraTropical', 'InSitu') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'Advancing') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'Advancing') (9, 'HU_S', ['GMZ876'])

+

+

+##     9 NonEvent High Segments:

+##         (1, 'ExtraTropical', 'Completed') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'ExtraTropical', 'Completed') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Abbreviated', 'FirstIssuance') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Conditions', 'Ongoing') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Conditions', 'Ongoing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'ExtraTropical', 'Completed') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Abbreviated', 'FirstIssuance') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'Peripheral') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'Peripheral') (9, 'HU_S', ['GMZ876'])

+

+

+##     10 PreEvent Low Segments:

+##         (1, 'Abbreviated', 'FirstIssuance') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Abbreviated', 'FirstIssuance') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Advancing') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'Conditions', 'Diminishing') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'Conditions', 'Diminishing') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Abbreviated', 'FirstIssuance') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Advancing') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PreEvent', 'InSitu') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PreEvent', 'InSitu') (9, 'HU_S', ['GMZ876'])

+

+

+##     11 Abbreviated Average Segments:

+##         (1, 'Warning', 'Advancing') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Warning', 'Advancing') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'Peripheral') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'ExtraTropical', 'InSitu') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'ExtraTropical', 'InSitu') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Warning', 'Advancing') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'Peripheral') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PostEvent', 'Immediate') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PostEvent', 'Immediate') (9, 'HU_S', ['GMZ876'])

+

+

+##     12 Watch High Segments:

+##         (1, 'Warning', 'Peripheral') (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+##         (2, 'Warning', 'Peripheral') (2, 'HU_W', ['GMZ850'])

+##         (3, 'Watch', 'InSitu') (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+##         (4, 'ExtraTropical', 'Completed') (4, 'HU_A_TR_W', ['FLZ052'])

+##         (5, 'ExtraTropical', 'Completed') (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+##         (6, 'Warning', 'Peripheral') (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+##         (7, 'Watch', 'InSitu') (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+##         (8, 'PostEvent', 'LongTerm') (8, 'HU_S', ['FLZ165'])

+##         (9, 'PostEvent', 'LongTerm') (9, 'HU_S', ['GMZ876'])

+

+

+

+

+

+scripts = [   

+     {

+     "name":"HLS_segmentation_1", 

+     "productType" : "Hazard_HLS", 

+     "commentary": """Segmentation testing of HLS product.

+NonEvent N/A

+   1 'Warning',       'Advancing'       (1, 'HU_W', ['FLZ139', 'FLZ142', 'FLZ043'])

+   2 'Conditions',    'Imminent'        (2, 'HU_W', ['GMZ850'])

+   3 'Watch',         'Peripheral'      (3, 'HU_A', ['FLZ148', 'FLZ149', 'FLZ050'])

+   4 'ExtraTropical', 'InSitu'          (4, 'HU_A_TR_W', ['FLZ052'])

+   5 'Abbreviated',   'FirstIssuance'   (5, 'HU_A_TR_W', ['FLZ151', 'GMZ830'])

+   6 'Abbreviated',   'FirstIssuance'   (6, 'TR_W', ['FLZ160', 'FLZ061', 'GMZ853'])

+   7 'Abbreviated',   'FirstIssuance'   (7, 'TR_A', ['FLZ162', 'GMZ856', 'FLZ057'])

+   8 'NonEvent',      'NonEvent'        (8, 'HU_S', ['FLZ165'])

+   9 'PreEvent',      'InSitu'          (9, 'HU_S', ['GMZ876'])

+             """,

+     "gridsStartTime": None,     

+     "createGrids": createHazardGrids() + createDataGrids(),

+     "cmdLineVars" :str(cmdLineVars()),     

+     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],

+     "checkStrings" : [

+        #"HURRICANE LOCAL STATEMENT",

+        "National Weather Service Tampa Bay Ruskin FL",

+       ],

+     },

+

+     {

+     "name":"HLS_segmentation_2", 

+     "productType" : "Hazard_HLS", 

+     "commentary": """Segmentation testing of HLS product. -- ETN codes""",

+     "gridsStartTime": None,     

+     "createGrids": createDataGrids() + [

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W:1001",["FLZ139"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["GMZ830"]),            

+          ],

+     "cmdLineVars" :str(cmdLineVars()),     

+     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],

+     "checkStrings" : [

+        #"HURRICANE LOCAL STATEMENT",

+        "National Weather Service Tampa Bay Ruskin FL",

+       ],

+     },

+

+     {

+     "name":"HLS_generalTests_1", 

+     "productType" : "Hazard_HLS", 

+     "commentary": """Looks at Overview Watches/Warnings and Wind Timing reporting""",

+     "gridsStartTime": None,     

+     "createGrids": createDataGrids() + [

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W",["GMZ830"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W",["FLZ052"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ148"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ050"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W^HI.A",["FLZ056"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.A",["FLZ061"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A^FF.A",["GMZ870"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.A^CF.W",["FLZ057"]),

+

+          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),

+          # Trop begin

+          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),

+          # Hurricane begin

+          ("Fcst", "Wind", "VECTOR", 30, 36, (70, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 36, 42, (75, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 42, 48, (70, "W"), "all"),

+          # Hurricane end

+          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),

+          # Trop end

+          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),

+          

+          ("Fcst", "WindGust", "SCALAR", -6, 120, 90, "all"),

+          ],

+     "cmdLineVars" :str(cmdLineVars()),     

+     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],

+     "checkStrings" : [

+        #"HURRICANE LOCAL STATEMENT",

+        "National Weather Service Tampa Bay Ruskin FL",

+       ],

+     },

+

+     {

+     "name":"HLS_generalTests_2", 

+     "productType" : "Hazard_HLS", 

+     "commentary": """Looks at Overview Watches/Warnings and Wind Timing reporting""",

+     "gridsStartTime": None,     

+     "createGrids": createDataGrids() + [

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"", "all"),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["GMZ873"]),# New

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.W",["FLZ139"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ057"]), # New -- TI.A UPG

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.W",["FLZ043"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W",["GMZ830"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W",["FLZ052"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.W^HU.A",["FLZ148"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HU.A",["FLZ050"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TI.W^HI.A",["FLZ056"]),            

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"HI.A",["FLZ061"]),

+          ("Fcst", "Hazards", "DISCRETE", 0,24,"TR.A",["GMZ870"]),

+

+          ("Fcst", "Wind", "VECTOR", -6, 0, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 0, 6, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 6, 12, (20, "W"), "all"),

+          # Trop begin

+          ("Fcst", "Wind", "VECTOR", 12, 18, (35, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 18, 24, (40, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 24, 30, (50, "W"), "all"),

+          # Hurricane begin

+          ("Fcst", "Wind", "VECTOR", 30, 36, (70, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 36, 42, (75, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 42, 48, (70, "W"), "all"),

+          # Hurricane end

+          ("Fcst", "Wind", "VECTOR", 48, 54, (50, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 54, 60, (35, "W"), "all"),

+          # Trop end

+          ("Fcst", "Wind", "VECTOR", 60, 66, (10, "W"), "all"),

+          ("Fcst", "Wind", "VECTOR", 66, 120, (10, "W"), "all"),

+          ],

+     "cmdLineVars" :str(cmdLineVars()),     

+     "fileChanges": [("Hazard_HLS__Overrides", "TextUtility", "add", useTest, "undo")],

+     "checkStrings" : [

+        #"HURRICANE LOCAL STATEMENT",

+        "National Weather Service Tampa Bay Ruskin FL",

+       ],

+     },  

+    ]

+

+# Now add the generated scripts

+firstScripts = makeScripts()

+secondScripts = makeScripts(

+    len(firstScripts),

+    createDataGrids(maxWind=75, maxGust=90, prob34=50, prob64=20))

+

+scripts = scripts + firstScripts + secondScripts

+

+

+

+

+import TestScript

+def testScript(self, dataMgr):

+    defaults = {

+        "cmdLineVars" :"{('Source', 'source'): 'Colorado Emergency Management Agency Denver Colorado', ('Issued By', 'issuedBy'): None, ('EAS Level', 'eas'): 'NONE'}",

+        "publishGrids" : 1,

+        "vtecMode" : "O",

+        "clearHazardsTable": 1,

+        "gridsStartTime": "202001201_0000",

+        "orderStrings": 1,

+        "deleteGrids" : [

+            ("Fcst", "Hazards", "SFC", -48, 240),

+            ("Fcst", "pws34int", "SFC", -48, 240),

+            ("Fcst", "pws64int", "SFC", -48, 240),

+            ("Fcst", "prob34", "SFC", -48, 240),

+            ("Fcst", "prob64", "SFC", -48, 240),

+            ],

+        }

+    return TestScript.generalTestScript(self, dataMgr, scripts, defaults)

+

diff --git a/cave/com.raytheon.viz.gfe/python/autotest/TestScript.py b/cave/com.raytheon.viz.gfe/python/autotest/TestScript.py
index 764fbb1115..330a8ec5e4 100644
--- a/cave/com.raytheon.viz.gfe/python/autotest/TestScript.py
+++ b/cave/com.raytheon.viz.gfe/python/autotest/TestScript.py
@@ -1,546 +1,546 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Generalized method for returning a TestScript to TextProductTest
-#
-# Author:
-# ----------------------------------------------------------------------------
-
-import SiteInfo
-
-from com.raytheon.uf.common.dataplugin.gfe.python import GfePyIncludeUtil
-
-    
-def getStandardProductType(self, dataMgr, productType, level):
-    siteID = self._dataMgr.getSiteID()
-    baseline = "_Baseline"
-    region = "_"+SiteInfo.SiteInfoDict[siteID]["region"]
-    #print "site, region", siteID, region    
-    tName = productType + "_"+ siteID
-    #print "tName", tName, level
-    ceProducts = dataMgr.getTextProductMgr().getModuleNames()
-    for ceProduct in ceProducts:
-        if ceProduct.find(tName) >= 0:
-            #print "   found in ceProduct"
-            if level == "Site":
-                if ceProduct.find(baseline) >= 0 or ceProduct.find(region) >= 0:
-                    continue
-            if level == "Baseline":
-                if ceProduct.find(baseline) < 0:
-                    continue
-            if level == "Region":
-                if ceProduct.find(region) < 0:
-                    continue
-            #print "setting product", ceProduct
-            productType = ceProduct
-            break
-    #print "returning", productType
-    return productType
-
-import cPickle
-
-def generalTestScript(self, dataMgr, scripts, defaults={}, level="Site"):
-    # "scripts" is a list test scripts each in dictionary form
-    # Each script has the fields described in TextProductTest
-    #
-    # In addition, it may have the following fields to be processed
-    # by this method to further set up the TestScript for TextProductTest
-    #  "comboFlag": if 1 will set up a combinations file for the product
-    #      The product must be a standard routine product.
-    #  "combinations": if specified, use as the combinations.
-    #      Otherwise, use simply "area3".
-    #      Can be keyword "ZONE" to use the first zone found for the site.
-    #  "level" is whether to use Site, Baseline, or Region for standard products
-    #  "defaults" is an optional dictionary of default entries that apply
-    #    to all scripts IF they do not have their own entries.  For example,
-    #
-    #    defaults = {"decodeVTEC": 1"}
-    #
-    #    Then any scripts that do not have a decodeVTEC entry will set it to 1
-    #    (instead of the normal default of 0).
-    #
-    # Can't use "area3" for standard products since ProductWorkstation
-    # examines the Combinations file and does not recognize "area3"
-    # We'll have to deal with this if we want this test script to
-    # run at any site.
-    siteID = dataMgr.getSiteID()
-    area = "area3"
-    groupList = dataMgr.getRefManager().getGroupData("Zones_"+siteID)
-    if groupList.size() > 0:
-        zoneArea = str(groupList.get(0))
-        
-    #print "zoneArea", zoneArea
-    cmdLineVars = "{('Product Issuance', 'productIssuance'): 'Morning', ('Issued By', 'issuedBy'): None}"
-    # Make Script
-    tScript = []
-    for script in scripts:
-        productType = getValue(script, "productType", defaults, "Phrase_Test_Local")
-        if productType is not None:
-            if productType != "Phrase_Test_Local":
-                productType = getStandardProductType(self, dataMgr, productType, level)
-        comboFlag = getValue(script, "comboFlag", defaults, 0)
-        if comboFlag and productType is not None:
-            productName = script["productType"]
-            for s in ["_Local", "CivilEmerg_", "Hazard_"]:
-                productName = productName.replace(s, "")
-            combinationsFileName = "Combinations_"+productName+"_"
-            combinations = getValue(script, "combinations", defaults, None)
-            if combinations is not None:
-                if combinations == "ZONE":
-                    combinations=  [([zoneArea], "")]
-            else:
-                # If not specified, use area3
-                combinations = [([area], "")]
-        else:
-            combinationsFileName = None
-            combinations = None
-        newScript =  {
-            "name": "test"+ script["name"],
-            "productType": productType,
-            "commentary": getValue(script, "commentary", defaults, None), 
-            "cmdLineVars": getValue(script, "cmdLineVars", defaults, cmdLineVars),
-            "database": getValue(script, "database", defaults, "_GRID__Fcst_00000000_0000"),
-            "checkMethod": getValue(script, "checkMethod", defaults, None),
-            "checkStrings": getValue(script, "checkStrings", defaults, None),
-            "notCheckStrings": getValue(script, "notCheckStrings", defaults, None),
-            "orderStrings": getValue(script, "orderStrings", defaults, 0),
-            "combinationsFileName": combinationsFileName,
-            "combinations": combinations,
-            "createGrids": getValue(script, "createGrids", defaults, []),
-            "fileChanges": getValue(script, "fileChanges", defaults, []),
-            "deleteGrids": getValue(script, "deleteGrids", defaults, []),
-            "publishGrids": getValue(script, "publishGrids", defaults, None),
-            "gridsStartTime": getValue(script, "gridsStartTime", defaults, None),
-            "drtTime": getValue(script, "drtTime", defaults, None),
-            "decodeVTEC": getValue(script, "decodeVTEC", defaults, 0),
-            "internalStrip": getValue(script, "internalStrip", defaults, 1),
-            "clearHazardsTable": getValue(script, "clearHazardsTable", defaults, 0),
-            "vtecMode": getValue(script, "vtecMode", defaults, "O"),
-            "writeableCopies": getValue(script, "writableCopies", defaults, None),
-            "callVariables": getValue(script, "callVariables", defaults, cmdLineVars),
-            }
-        # Fix db is test mode
-        #vtecMode = getValue(script, "vtecMode", defaults, "O")
-        #if vtecMode == "T":
-        #    newScript["database"] = "_GRID_Test_Fcst_00000000_0000"
-        #print "appending", newScript
-        tScript.append(newScript)
-        #break
-        
-    return tScript
-
-def getValue(script, key, defaults, default):
-    #print "\nkey", key, script.get(key), defaults.get(key), default
-    #print "returning", script.get(key, defaults.get(key, default))
-    return script.get(key, defaults.get(key, default))
-
-general_createGrids = [
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin", "MaxTEnd", 70, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin", "MinTEnd", 43, "all"),
-       ("Fcst", "T", "SCALAR", 6, 9, 70, "all"),
-       ("Fcst", "Td", "SCALAR", 6, 9, 30, "all"),  
-       ("Fcst", "Wind", "VECTOR", 6, 9, (10, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 6, 9, 0, "all"),
-       ("Fcst", "Wx", "WEATHER", 6, 9, "Iso:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 6, 9, 0, "all"),
-       ("Fcst", "QPF", "SCALAR", 6, 9, .01, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 6, 9, .05, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 6, 9, 1, "all"),
-       ("Fcst", "T", "SCALAR", 9, 12, 65, "all"),
-       ("Fcst", "Td", "SCALAR", 9, 12, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 9, 12, (15, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 9, 12, 7, "all"),
-       ("Fcst", "Wx", "WEATHER", 9, 12,
-        "Sct:T:::^Num:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 9, 12, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 9, 12, 0, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 9, 12, .01, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 9, 12, 0, "all"),
-       ("Fcst", "T", "SCALAR", 12, 15, 60, "all"),
-       ("Fcst", "Td", "SCALAR", 12, 15, 45, "all"),  
-       ("Fcst", "Wind", "VECTOR", 12, 15, (15, "SE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 12, 15, 6, "all"),
-       ("Fcst", "Wx", "WEATHER", 12, 15,
-        "Wide:T:::^Ocnl:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 12, 15, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 12, 15, 0, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 12, 15, .01, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 12, 15, 2, "all"),
-       ("Fcst", "T", "SCALAR", 15, 18, 55, "all"),
-       ("Fcst", "Td", "SCALAR", 15, 18, 50, "all"),  
-       ("Fcst", "Wind", "VECTOR", 15, 18, (25, "E"), "all"),
-       ("Fcst", "Sky", "SCALAR", 15, 18, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 15, 18,
-        "SChc:T:::^Chc:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 15, 18, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 15, 18, .05, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 15, 18, .1, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 15, 18, 1, "all"),
-       ("Fcst", "T", "SCALAR", 18, 21, 50, "all"),
-       ("Fcst", "Td", "SCALAR", 18, 21, 50, "all"),  
-       ("Fcst", "Wind", "VECTOR", 18, 21, (30, "NE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 18, 21, 15, "all"),
-       ("Fcst", "Wx", "WEATHER", 18, 21,
-        "Lkly:T:::^Def:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 18, 21, 25, "all"),
-       ("Fcst", "QPF", "SCALAR", 18, 21, .08, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 18, 21, .2, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 18, 21, 1.5, "all"),
-       ("Fcst", "T", "SCALAR", 21, 24, 50, "all"),
-       ("Fcst", "Td", "SCALAR", 21, 24, 48, "all"),  
-       ("Fcst", "Wind", "VECTOR", 21, 24, (25, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 21, 24, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 21, 24,
-        "Frq:T:::^Brf:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 21, 24, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 21, 24, .1, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 21, 24, .25, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 21, 24, 1.2, "all"),
-       ("Fcst", "T", "SCALAR", 24, 27, 48, "all"),
-       ("Fcst", "Td", "SCALAR", 24, 27, 48, "all"),  
-       ("Fcst", "Wind", "VECTOR", 24, 27, (22, "NW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 24, 27, 19, "all"),
-       ("Fcst", "Wx", "WEATHER", 24, 27,
-        "Pds:T:::^Inter:T:::", "all"),
-       ("Fcst", "PoP", "SCALAR", 24, 27, 25, "all"),
-       ("Fcst", "QPF", "SCALAR", 24, 27, 1, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 24, 27, 1, "all"),
-       ("Fcst", "T", "SCALAR", 27, 30, 43, "all"),
-       ("Fcst", "Td", "SCALAR", 27, 30, 43, "all"),  
-       ("Fcst", "Wind", "VECTOR", 27, 30, (15, "W"), "all"),
-       ("Fcst", "Sky", "SCALAR", 27, 30, 30, "all"),
-       ("Fcst", "Wx", "WEATHER", 27, 30, "Wide:R:-:3SM:^Ocnl:R:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 27, 30, 50, "all"),
-       ("Fcst", "QPF", "SCALAR", 27, 30, 3, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 27, 30, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 24", "MaxTEnd + 24", 50, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 24", "MinTEnd + 24", 33, "all"),
-       ("Fcst", "T", "SCALAR", 30, 33, 40, "all"),
-       ("Fcst", "Td", "SCALAR", 30, 33, 39, "all"),  
-       ("Fcst", "Wind", "VECTOR", 30, 33, (10, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 30, 33, 50, "all"),
-       ("Fcst", "Wx", "WEATHER", 30, 33, "SChc:R:--:0SM:^Chc:R:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 30, 33, 60, "all"),
-       ("Fcst", "QPF", "SCALAR", 30, 33, 2.5, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 30, 33, 0, "all"),
-       ("Fcst", "T", "SCALAR", 33, 36, 41, "all"),
-       ("Fcst", "Td", "SCALAR", 33, 36, 39, "all"),  
-       ("Fcst", "Wind", "VECTOR", 33, 36, (10, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 33, 36, 90, "all"),
-       ("Fcst", "Wx", "WEATHER", 33, 36, "Lkly:R:m:1/2SM:^Def:R:m:4SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 33, 36, 70, "all"),
-       ("Fcst", "QPF", "SCALAR", 33, 36, 4, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 33, 36, 0, "all"),
-       ("Fcst", "T", "SCALAR", 36, 39, 40, "all"),
-       ("Fcst", "Td", "SCALAR", 36, 39, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 36, 39, (5, "SE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 36, 39, 100, "all"),
-       ("Fcst", "Wx", "WEATHER", 36, 39, "Frq:R:+::^Brf:R:+:5SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 36, 39, 100, "all"),
-       ("Fcst", "QPF", "SCALAR", 36, 39, 5, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 36, 39, 0, "all"),
-       ("Fcst", "T", "SCALAR", 39, 42, 33, "all"),
-       ("Fcst", "Td", "SCALAR", 39, 42, 33, "all"),  
-       ("Fcst", "Wind", "VECTOR", 39, 42, (5, "E"), "all"),
-       ("Fcst", "Sky", "SCALAR", 39, 42, 100, "all"),
-       ("Fcst", "Wx", "WEATHER", 39, 42, "Pds:R:m::^Inter:R:m:2SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 39, 42, 100, "all"),
-       ("Fcst", "QPF", "SCALAR", 39, 42, 5, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 39, 42, 0, "all"),
-       ("Fcst", "T", "SCALAR", 42, 45, 35, "all"),
-       ("Fcst", "Td", "SCALAR", 42, 45, 33, "all"),  
-       ("Fcst", "Wind", "VECTOR", 42, 45, (2.5, "E"), "all"),
-       ("Fcst", "Sky", "SCALAR", 42, 45, 70, "all"),
-       ("Fcst", "Wx", "WEATHER", 42, 45, "Iso:RW:+::^Sct:RW:+:3/4SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 42, 45, 95, "all"),
-       ("Fcst", "QPF", "SCALAR", 42, 45, 5, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 42, 45, 0, "all"),
-       ("Fcst", "T", "SCALAR", 45, 48, 40, "all"),
-       ("Fcst", "Td", "SCALAR", 45, 48, 35, "all"),  
-       ("Fcst", "Wind", "VECTOR", 45, 48, (2, "NE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 45, 48, 85, "all"),
-       ("Fcst", "Wx", "WEATHER", 45, 48, "Num:RW:m::^Wide:RW:-:1/2SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 45, 48, 65, "all"),
-       ("Fcst", "QPF", "SCALAR", 45, 48, 1, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 45, 48, 5, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 45, 48, 0, "all"),
-       ("Fcst", "T", "SCALAR", 48, 51, 45, "all"),
-       ("Fcst", "Td", "SCALAR", 48, 51, 38, "all"),  
-       ("Fcst", "Wind", "VECTOR", 48, 51, (5, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 48, 51, 70, "all"),
-       ("Fcst", "Wx", "WEATHER", 48, 51, "Ocnl:RW:-::^SChc:RW:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 48, 51, 60, "all"),
-       ("Fcst", "QPF", "SCALAR", 48, 51, .5, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 48, 51, 3, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 48, 51, 0, "all"),
-       ("Fcst", "T", "SCALAR", 51, 54, 50, "all"),
-       ("Fcst", "Td", "SCALAR", 51, 54, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 51, 54, (7, "NW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 51, 54, 35, "all"),
-       ("Fcst", "Wx", "WEATHER", 51, 54, "Chc:RW:--::^Lkly:RW:--:1SM:", "all"),
-       ("Fcst", "PoP", "SCALAR", 51, 54, 50, "all"),
-       ("Fcst", "QPF", "SCALAR", 51, 54, .5, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 51, 54, 2, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 51, 54, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 48", "MaxTEnd + 48", 70, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 48", "MinTEnd + 48", 50, "all"),
-       ("Fcst", "T", "SCALAR", 54, 57, 50, "all"),
-       ("Fcst", "Td", "SCALAR", 54, 57, 45, "all"),  
-       ("Fcst", "Wind", "VECTOR", 54, 57, (10, "W"), "all"),
-       ("Fcst", "Sky", "SCALAR", 54, 57, 30, "all"),
-       ("Fcst", "Wx", "WEATHER", 54, 57, "Def:RW:--::^Frq:RW:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 54, 57, 55, "all"),
-       ("Fcst", "QPF", "SCALAR", 54, 57, .25, ["BelowElev"]),
-       ("Fcst", "QPF", "SCALAR", 54, 57, 1, ["AboveElev"]),
-       ("Fcst", "SnowAmt", "SCALAR", 54, 57, 0, "all"),
-       ("Fcst", "T", "SCALAR", 57, 60, 55, "all"),
-       ("Fcst", "Td", "SCALAR", 57, 60, 47, "all"),  
-       ("Fcst", "Wind", "VECTOR", 57, 60, (12, "E"), "all"),
-       ("Fcst", "Sky", "SCALAR", 57, 60, 40, "all"),
-       ("Fcst", "Wx", "WEATHER", 57, 60, "Brf:RW:-:4SM:^Pds:RW:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 57, 60, 40, "all"),
-       ("Fcst", "QPF", "SCALAR", 57, 60, .5, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 57, 60, 0, "all"),
-       ("Fcst", "T", "SCALAR", 60, 63, 65, "all"),
-       ("Fcst", "Td", "SCALAR", 60, 63, 43, "all"),  
-       ("Fcst", "Wind", "VECTOR", 60, 63, (15, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 60, 63, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 60, 63, "Inter:RW:--::^Wide:L:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 60, 63, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 60, 63, .01, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 60, 63, 0, "all"),
-       ("Fcst", "T", "SCALAR", 63, 66, 70, "all"),
-       ("Fcst", "Td", "SCALAR", 63, 66, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 63, 66, (15, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 63, 66, 5, "all"),
-       ("Fcst", "Wx", "WEATHER", 63, 66, "Ocnl:L:--::^SChc:L:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 63, 66, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 63, 66, .01, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 63, 66, 0, "all"),
-       ("Fcst", "T", "SCALAR", 66, 69, 68, "all"),
-       ("Fcst", "Td", "SCALAR", 66, 69, 55, "all"),  
-       ("Fcst", "Wind", "VECTOR", 66, 69, (20, "W"), "all"),
-       ("Fcst", "Sky", "SCALAR", 66, 69, 75, "all"),
-       ("Fcst", "Wx", "WEATHER", 66, 69, "Chc:L:-:3SM:^Lkly:L:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 66, 69, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 66, 69, .01, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 66, 69, 0, "all"),
-       ("Fcst", "T", "SCALAR", 69, 75, 69, "all"),
-       ("Fcst", "Td", "SCALAR", 69, 75, 56, "all"),  
-       ("Fcst", "Wind", "VECTOR", 69, 75, (10, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 69, 75, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 69, 75, "Def:L:--::^Areas:L:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 69, 75, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 69, 75, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 69, 75, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 72", "MaxTEnd + 72", 71, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 72", "MinTEnd + 72", 65, "all"),
-       ("Fcst", "T", "SCALAR", 75, 81, 69, "all"),
-       ("Fcst", "Td", "SCALAR", 75, 81, 60, "all"),  
-       ("Fcst", "Wind", "VECTOR", 75, 81, (5, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 75, 81, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 75, 81, "Patchy:L:--::^Frq:L:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 75, 81, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 75, 81, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 75, 81, 0, "all"),
-       ("Fcst", "T", "SCALAR", 81, 87, 70, "all"),
-       ("Fcst", "Td", "SCALAR", 81, 87, 61, "all"),  
-       ("Fcst", "Wind", "VECTOR", 81, 87, (20, "SE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 81, 87, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 81, 87, "Brf:L:--::^Pds:L:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 81, 87, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 81, 87, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 81, 87, 0, "all"),
-       ("Fcst", "T", "SCALAR", 87, 93, 71, "all"),
-       ("Fcst", "Td", "SCALAR", 87, 93, 65, "all"),  
-       ("Fcst", "Wind", "VECTOR", 87, 93, (15, "E"), "all"),
-       ("Fcst", "Sky", "SCALAR", 87, 93, 50, "all"),
-       ("Fcst", "Wx", "WEATHER", 87, 93, "Inter:L:-::^Wide:ZL:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 87, 93, 30, "all"),
-       ("Fcst", "QPF", "SCALAR", 87, 93, .01, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 87, 93, 0, "all"),
-       ("Fcst", "T", "SCALAR", 93, 99, 65, "all"),
-       ("Fcst", "Td", "SCALAR", 93, 99, 65, "all"),  
-       ("Fcst", "Wind", "VECTOR", 93, 99, (23, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 93, 99, 50, "all"),
-       ("Fcst", "Wx", "WEATHER", 93, 99, "Ocnl:ZL:-::^SChc:ZL:-::", "all"),
-       ("Fcst", "PoP", "SCALAR", 93, 99, 50, "all"),
-       ("Fcst", "QPF", "SCALAR", 93, 99, .01, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 93, 99, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 96", "MaxTEnd + 96", 75, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 96", "MinTEnd + 96", 68, "all"),
-       ("Fcst", "T", "SCALAR", 99, 105, 68, "all"),
-       ("Fcst", "Td", "SCALAR", 99, 105, 65, "all"),  
-       ("Fcst", "Wind", "VECTOR", 99, 105, (31, "NE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 99, 105, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 99, 105, "Chc:ZL:--::^Lkly:ZL:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 99, 105, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 99, 105, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 99, 105, 0, "all"),
-       ("Fcst", "T", "SCALAR", 105, 111, 70, "all"),
-       ("Fcst", "Td", "SCALAR", 105, 111, 65, "all"),  
-       ("Fcst", "Wind", "VECTOR", 105, 111, (40, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 105, 111, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 105, 111, "Def:ZL:--::^Areas:ZL:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 105, 111, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 105, 111, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 105, 111, 0, "all"),
-       ("Fcst", "T", "SCALAR", 111, 117, 73, "all"),
-       ("Fcst", "Td", "SCALAR", 111, 117, 65, "all"),  
-       ("Fcst", "Wind", "VECTOR", 111, 117, (5, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 111, 117, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 111, 117, "Patchy:ZL:--::^Frq:ZL:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 111, 117, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 111, 117, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 111, 117, 0, "all"),
-       ("Fcst", "T", "SCALAR", 117, 123, 75, "all"),
-       ("Fcst", "Td", "SCALAR", 117, 123, 60, "all"),  
-       ("Fcst", "Wind", "VECTOR", 117, 123, (45, "W"), "all"),
-       ("Fcst", "Sky", "SCALAR", 117, 123, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 117, 123, "Brf:ZL:--::^Pds:ZL:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 117, 123, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 117, 123, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 117, 123, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 120", "MaxTEnd + 120", 83, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 120", "MinTEnd + 120", 78, "all"),
-       ("Fcst", "T", "SCALAR", 123, 129, 78, "all"),
-       ("Fcst", "Td", "SCALAR", 123, 129, 55, "all"),  
-       ("Fcst", "Wind", "VECTOR", 123, 129, (17, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 123, 129, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 123, 129, "Inter:ZL:--::^Wide:ZR:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 123, 129, 15, "all"),
-       ("Fcst", "QPF", "SCALAR", 123, 129, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 123, 129, 0, "all"),
-       ("Fcst", "T", "SCALAR", 129, 135, 80, "all"),
-       ("Fcst", "Td", "SCALAR", 129, 135, 50, "all"),  
-       ("Fcst", "Wind", "VECTOR", 129, 135, (12, "SE"), "all"),
-       ("Fcst", "Sky", "SCALAR", 129, 135, 20, "all"),
-       ("Fcst", "Wx", "WEATHER", 129, 135, "Ocnl:ZR:--::^SChc:ZR:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 129, 135, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 129, 135, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 129, 135, 0, "all"),
-       ("Fcst", "T", "SCALAR", 135, 141, 81, "all"),
-       ("Fcst", "Td", "SCALAR", 135, 141, 45, "all"),  
-       ("Fcst", "Wind", "VECTOR", 135, 141, (15, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 135, 141, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 135, 141, "Chc:ZR:--::^Lkly:ZR:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 135, 141, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 135, 141, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 135, 141, 0, "all"),
-       ("Fcst", "T", "SCALAR", 141, 147, 83, "all"),
-       ("Fcst", "Td", "SCALAR", 141, 147, 43, "all"),  
-       ("Fcst", "Wind", "VECTOR", 141, 147, (25, "NW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 141, 147, 20, "all"),
-       ("Fcst", "Wx", "WEATHER", 141, 147, "Def:ZR:--::^Frq:ZR:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 141, 147, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 141, 147, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 141, 147, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 144", "MaxTEnd + 144", 90, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 144", "MinTEnd + 144", 83, "all"),
-       ("Fcst", "T", "SCALAR", 147, 153, 83, "all"),
-       ("Fcst", "Td", "SCALAR", 147, 153, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 147, 153, (22, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 147, 153, 20, "all"),
-       ("Fcst", "Wx", "WEATHER", 147, 153, "Brf:ZR:--::^Pds:ZR:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 147, 153, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 147, 153, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 147, 153, 0, "all"),
-       ("Fcst", "T", "SCALAR", 153, 159, 85, "all"),
-       ("Fcst", "Td", "SCALAR", 153, 159, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 153, 159, (31, "N"), "all"),
-       ("Fcst", "Sky", "SCALAR", 153, 159, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 153, 159, "Inter:ZR:--::^Wide:S:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 153, 159, 20, "all"),
-       ("Fcst", "QPF", "SCALAR", 153, 159, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 153, 159, 0, "all"),
-       ("Fcst", "T", "SCALAR", 159, 165, 86, "all"),
-       ("Fcst", "Td", "SCALAR", 159, 165, 39, "all"),  
-       ("Fcst", "Wind", "VECTOR", 159, 165, (45, "S"), "all"),
-       ("Fcst", "Sky", "SCALAR", 159, 165, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 159, 165, "Ocnl:S:--::^SChc:S:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 159, 165, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 159, 165, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 159, 165, 0, "all"),
-       ("Fcst", "T", "SCALAR", 165, 171, 90, "all"),
-       ("Fcst", "Td", "SCALAR", 165, 171, 30, "all"),  
-       ("Fcst", "Wind", "VECTOR", 165, 171, (10, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 165, 171, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 165, 171, "Chc:S:--::^Lkly:S:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 165, 171, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 165, 171, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 165, 171, 0, "all"),
-       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 168", "MaxTEnd + 168", 93, "all"),
-       ("Fcst", "MinT", "SCALAR", "MinTBegin + 168", "MinTEnd + 168", 90, "all"),
-       ("Fcst", "T", "SCALAR", 171, 177, 93, "all"),
-       ("Fcst", "Td", "SCALAR", 171, 177, 30, "all"),  
-       ("Fcst", "Wind", "VECTOR", 171, 177, (10, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 171, 177, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 171, 177, "Def:S:--::^Frq:S:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 171, 177, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 171, 177, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 171, 177, 0, "all"),
-       ("Fcst", "T", "SCALAR", 177, 183, 91, "all"),
-       ("Fcst", "Td", "SCALAR", 177, 183, 40, "all"),  
-       ("Fcst", "Wind", "VECTOR", 177, 183, (15, "SW"), "all"),
-       ("Fcst", "Sky", "SCALAR", 177, 183, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 177, 183, "Brf:S:--::^Pds:S:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 177, 183, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 177, 183, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 177, 183, 0, "all"),
-       ("Fcst", "T", "SCALAR", 183, 189, 90, "all"),
-       ("Fcst", "Td", "SCALAR", 183, 189, 35, "all"),  
-       ("Fcst", "Wind", "VECTOR", 183, 189, (20, "W"), "all"),
-       ("Fcst", "Sky", "SCALAR", 183, 189, 10, "all"),
-       ("Fcst", "Wx", "WEATHER", 183, 189, "Inter:S:--::^Iso:SW:--::", "all"),
-       ("Fcst", "PoP", "SCALAR", 183, 189, 10, "all"),
-       ("Fcst", "QPF", "SCALAR", 183, 189, 0, "all"),
-       ("Fcst", "SnowAmt", "SCALAR", 183, 189, 0, "all"),
-       ]
-
-        
-general_deleteGrids = [
-        ("Fcst", "PoP", "SFC", -100,280),
-        ("Fcst", "MaxT", "SFC", -100,280),
-        ("Fcst", "MinT", "SFC", -100,280),
-        ("Fcst", "T", "SFC", -100,280),
-        ("Fcst", "Td", "SFC", -100,280),
-        ("Fcst", "WindChill", "SFC", -100,280),
-        ("Fcst", "HeatIndex", "SFC", -100,280),
-        ("Fcst", "StormTotalSnow", "SFC", -100,280),
-        ("Fcst", "SnowLevel", "SFC", -100,280),
-        ("Fcst", "FzLevel", "SFC", -100,280),
-        ("Fcst", "RH", "SFC", -100,280),
-        ("Fcst", "Wind", "SFC", -100,280),
-        ("Fcst", "Sky", "SFC", -100,280),
-        ("Fcst", "WindGust", "SFC", -100,280),
-        ("Fcst", "Wx", "SFC", -100,280),
-        ("Fcst", "QPF", "SFC", -100,280),
-        ("Fcst", "SnowAmt", "SFC", -100,280),
-        ("Fcst", "Hazards", "SFC", -100,280),
-        ("Fcst", "pwsD34", "SFC", -100,280),
-        ("Fcst", "pwsN34", "SFC", -100,280),
-        ("Fcst", "pwsD64", "SFC", -100,280),
-        ("Fcst", "pwsN64", "SFC", -100,280),
-        ]
-
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# Generalized method for returning a TestScript to TextProductTest
+#
+# Author:
+# ----------------------------------------------------------------------------
+
+import SiteInfo
+
+from com.raytheon.uf.common.dataplugin.gfe.python import GfePyIncludeUtil
+
+    
+def getStandardProductType(self, dataMgr, productType, level):
+    siteID = self._dataMgr.getSiteID()
+    baseline = "_Baseline"
+    region = "_"+SiteInfo.SiteInfoDict[siteID]["region"]
+    #print "site, region", siteID, region    
+    tName = productType + "_"+ siteID
+    #print "tName", tName, level
+    ceProducts = dataMgr.getTextProductMgr().getModuleNames()
+    for ceProduct in ceProducts:
+        if ceProduct.find(tName) >= 0:
+            #print "   found in ceProduct"
+            if level == "Site":
+                if ceProduct.find(baseline) >= 0 or ceProduct.find(region) >= 0:
+                    continue
+            if level == "Baseline":
+                if ceProduct.find(baseline) < 0:
+                    continue
+            if level == "Region":
+                if ceProduct.find(region) < 0:
+                    continue
+            #print "setting product", ceProduct
+            productType = ceProduct
+            break
+    #print "returning", productType
+    return productType
+
+import pickle
+
+def generalTestScript(self, dataMgr, scripts, defaults={}, level="Site"):
+    # "scripts" is a list test scripts each in dictionary form
+    # Each script has the fields described in TextProductTest
+    #
+    # In addition, it may have the following fields to be processed
+    # by this method to further set up the TestScript for TextProductTest
+    #  "comboFlag": if 1 will set up a combinations file for the product
+    #      The product must be a standard routine product.
+    #  "combinations": if specified, use as the combinations.
+    #      Otherwise, use simply "area3".
+    #      Can be keyword "ZONE" to use the first zone found for the site.
+    #  "level" is whether to use Site, Baseline, or Region for standard products
+    #  "defaults" is an optional dictionary of default entries that apply
+    #    to all scripts IF they do not have their own entries.  For example,
+    #
+    #    defaults = {"decodeVTEC": 1"}
+    #
+    #    Then any scripts that do not have a decodeVTEC entry will set it to 1
+    #    (instead of the normal default of 0).
+    #
+    # Can't use "area3" for standard products since ProductWorkstation
+    # examines the Combinations file and does not recognize "area3"
+    # We'll have to deal with this if we want this test script to
+    # run at any site.
+    siteID = dataMgr.getSiteID()
+    area = "area3"
+    groupList = dataMgr.getRefManager().getGroupData("Zones_"+siteID)
+    if groupList.size() > 0:
+        zoneArea = str(groupList.get(0))
+        
+    #print "zoneArea", zoneArea
+    cmdLineVars = "{('Product Issuance', 'productIssuance'): 'Morning', ('Issued By', 'issuedBy'): None}"
+    # Make Script
+    tScript = []
+    for script in scripts:
+        productType = getValue(script, "productType", defaults, "Phrase_Test_Local")
+        if productType is not None:
+            if productType != "Phrase_Test_Local":
+                productType = getStandardProductType(self, dataMgr, productType, level)
+        comboFlag = getValue(script, "comboFlag", defaults, 0)
+        if comboFlag and productType is not None:
+            productName = script["productType"]
+            for s in ["_Local", "CivilEmerg_", "Hazard_"]:
+                productName = productName.replace(s, "")
+            combinationsFileName = "Combinations_"+productName+"_"
+            combinations = getValue(script, "combinations", defaults, None)
+            if combinations is not None:
+                if combinations == "ZONE":
+                    combinations=  [([zoneArea], "")]
+            else:
+                # If not specified, use area3
+                combinations = [([area], "")]
+        else:
+            combinationsFileName = None
+            combinations = None
+        newScript =  {
+            "name": "test"+ script["name"],
+            "productType": productType,
+            "commentary": getValue(script, "commentary", defaults, None), 
+            "cmdLineVars": getValue(script, "cmdLineVars", defaults, cmdLineVars),
+            "database": getValue(script, "database", defaults, "_GRID__Fcst_00000000_0000"),
+            "checkMethod": getValue(script, "checkMethod", defaults, None),
+            "checkStrings": getValue(script, "checkStrings", defaults, None),
+            "notCheckStrings": getValue(script, "notCheckStrings", defaults, None),
+            "orderStrings": getValue(script, "orderStrings", defaults, 0),
+            "combinationsFileName": combinationsFileName,
+            "combinations": combinations,
+            "createGrids": getValue(script, "createGrids", defaults, []),
+            "fileChanges": getValue(script, "fileChanges", defaults, []),
+            "deleteGrids": getValue(script, "deleteGrids", defaults, []),
+            "publishGrids": getValue(script, "publishGrids", defaults, None),
+            "gridsStartTime": getValue(script, "gridsStartTime", defaults, None),
+            "drtTime": getValue(script, "drtTime", defaults, None),
+            "decodeVTEC": getValue(script, "decodeVTEC", defaults, 0),
+            "internalStrip": getValue(script, "internalStrip", defaults, 1),
+            "clearHazardsTable": getValue(script, "clearHazardsTable", defaults, 0),
+            "vtecMode": getValue(script, "vtecMode", defaults, "O"),
+            "writeableCopies": getValue(script, "writableCopies", defaults, None),
+            "callVariables": getValue(script, "callVariables", defaults, cmdLineVars),
+            }
+        # Fix db is test mode
+        #vtecMode = getValue(script, "vtecMode", defaults, "O")
+        #if vtecMode == "T":
+        #    newScript["database"] = "_GRID_Test_Fcst_00000000_0000"
+        #print "appending", newScript
+        tScript.append(newScript)
+        #break
+        
+    return tScript
+
+def getValue(script, key, defaults, default):
+    #print "\nkey", key, script.get(key), defaults.get(key), default
+    #print "returning", script.get(key, defaults.get(key, default))
+    return script.get(key, defaults.get(key, default))
+
+general_createGrids = [
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin", "MaxTEnd", 70, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin", "MinTEnd", 43, "all"),
+       ("Fcst", "T", "SCALAR", 6, 9, 70, "all"),
+       ("Fcst", "Td", "SCALAR", 6, 9, 30, "all"),  
+       ("Fcst", "Wind", "VECTOR", 6, 9, (10, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 6, 9, 0, "all"),
+       ("Fcst", "Wx", "WEATHER", 6, 9, "Iso:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 6, 9, 0, "all"),
+       ("Fcst", "QPF", "SCALAR", 6, 9, .01, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 6, 9, .05, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 6, 9, 1, "all"),
+       ("Fcst", "T", "SCALAR", 9, 12, 65, "all"),
+       ("Fcst", "Td", "SCALAR", 9, 12, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 9, 12, (15, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 9, 12, 7, "all"),
+       ("Fcst", "Wx", "WEATHER", 9, 12,
+        "Sct:T:::^Num:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 9, 12, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 9, 12, 0, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 9, 12, .01, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 9, 12, 0, "all"),
+       ("Fcst", "T", "SCALAR", 12, 15, 60, "all"),
+       ("Fcst", "Td", "SCALAR", 12, 15, 45, "all"),  
+       ("Fcst", "Wind", "VECTOR", 12, 15, (15, "SE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 12, 15, 6, "all"),
+       ("Fcst", "Wx", "WEATHER", 12, 15,
+        "Wide:T:::^Ocnl:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 12, 15, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 12, 15, 0, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 12, 15, .01, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 12, 15, 2, "all"),
+       ("Fcst", "T", "SCALAR", 15, 18, 55, "all"),
+       ("Fcst", "Td", "SCALAR", 15, 18, 50, "all"),  
+       ("Fcst", "Wind", "VECTOR", 15, 18, (25, "E"), "all"),
+       ("Fcst", "Sky", "SCALAR", 15, 18, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 15, 18,
+        "SChc:T:::^Chc:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 15, 18, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 15, 18, .05, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 15, 18, .1, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 15, 18, 1, "all"),
+       ("Fcst", "T", "SCALAR", 18, 21, 50, "all"),
+       ("Fcst", "Td", "SCALAR", 18, 21, 50, "all"),  
+       ("Fcst", "Wind", "VECTOR", 18, 21, (30, "NE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 18, 21, 15, "all"),
+       ("Fcst", "Wx", "WEATHER", 18, 21,
+        "Lkly:T:::^Def:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 18, 21, 25, "all"),
+       ("Fcst", "QPF", "SCALAR", 18, 21, .08, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 18, 21, .2, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 18, 21, 1.5, "all"),
+       ("Fcst", "T", "SCALAR", 21, 24, 50, "all"),
+       ("Fcst", "Td", "SCALAR", 21, 24, 48, "all"),  
+       ("Fcst", "Wind", "VECTOR", 21, 24, (25, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 21, 24, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 21, 24,
+        "Frq:T:::^Brf:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 21, 24, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 21, 24, .1, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 21, 24, .25, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 21, 24, 1.2, "all"),
+       ("Fcst", "T", "SCALAR", 24, 27, 48, "all"),
+       ("Fcst", "Td", "SCALAR", 24, 27, 48, "all"),  
+       ("Fcst", "Wind", "VECTOR", 24, 27, (22, "NW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 24, 27, 19, "all"),
+       ("Fcst", "Wx", "WEATHER", 24, 27,
+        "Pds:T:::^Inter:T:::", "all"),
+       ("Fcst", "PoP", "SCALAR", 24, 27, 25, "all"),
+       ("Fcst", "QPF", "SCALAR", 24, 27, 1, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 24, 27, 1, "all"),
+       ("Fcst", "T", "SCALAR", 27, 30, 43, "all"),
+       ("Fcst", "Td", "SCALAR", 27, 30, 43, "all"),  
+       ("Fcst", "Wind", "VECTOR", 27, 30, (15, "W"), "all"),
+       ("Fcst", "Sky", "SCALAR", 27, 30, 30, "all"),
+       ("Fcst", "Wx", "WEATHER", 27, 30, "Wide:R:-:3SM:^Ocnl:R:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 27, 30, 50, "all"),
+       ("Fcst", "QPF", "SCALAR", 27, 30, 3, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 27, 30, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 24", "MaxTEnd + 24", 50, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 24", "MinTEnd + 24", 33, "all"),
+       ("Fcst", "T", "SCALAR", 30, 33, 40, "all"),
+       ("Fcst", "Td", "SCALAR", 30, 33, 39, "all"),  
+       ("Fcst", "Wind", "VECTOR", 30, 33, (10, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 30, 33, 50, "all"),
+       ("Fcst", "Wx", "WEATHER", 30, 33, "SChc:R:--:0SM:^Chc:R:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 30, 33, 60, "all"),
+       ("Fcst", "QPF", "SCALAR", 30, 33, 2.5, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 30, 33, 0, "all"),
+       ("Fcst", "T", "SCALAR", 33, 36, 41, "all"),
+       ("Fcst", "Td", "SCALAR", 33, 36, 39, "all"),  
+       ("Fcst", "Wind", "VECTOR", 33, 36, (10, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 33, 36, 90, "all"),
+       ("Fcst", "Wx", "WEATHER", 33, 36, "Lkly:R:m:1/2SM:^Def:R:m:4SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 33, 36, 70, "all"),
+       ("Fcst", "QPF", "SCALAR", 33, 36, 4, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 33, 36, 0, "all"),
+       ("Fcst", "T", "SCALAR", 36, 39, 40, "all"),
+       ("Fcst", "Td", "SCALAR", 36, 39, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 36, 39, (5, "SE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 36, 39, 100, "all"),
+       ("Fcst", "Wx", "WEATHER", 36, 39, "Frq:R:+::^Brf:R:+:5SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 36, 39, 100, "all"),
+       ("Fcst", "QPF", "SCALAR", 36, 39, 5, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 36, 39, 0, "all"),
+       ("Fcst", "T", "SCALAR", 39, 42, 33, "all"),
+       ("Fcst", "Td", "SCALAR", 39, 42, 33, "all"),  
+       ("Fcst", "Wind", "VECTOR", 39, 42, (5, "E"), "all"),
+       ("Fcst", "Sky", "SCALAR", 39, 42, 100, "all"),
+       ("Fcst", "Wx", "WEATHER", 39, 42, "Pds:R:m::^Inter:R:m:2SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 39, 42, 100, "all"),
+       ("Fcst", "QPF", "SCALAR", 39, 42, 5, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 39, 42, 0, "all"),
+       ("Fcst", "T", "SCALAR", 42, 45, 35, "all"),
+       ("Fcst", "Td", "SCALAR", 42, 45, 33, "all"),  
+       ("Fcst", "Wind", "VECTOR", 42, 45, (2.5, "E"), "all"),
+       ("Fcst", "Sky", "SCALAR", 42, 45, 70, "all"),
+       ("Fcst", "Wx", "WEATHER", 42, 45, "Iso:RW:+::^Sct:RW:+:3/4SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 42, 45, 95, "all"),
+       ("Fcst", "QPF", "SCALAR", 42, 45, 5, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 42, 45, 0, "all"),
+       ("Fcst", "T", "SCALAR", 45, 48, 40, "all"),
+       ("Fcst", "Td", "SCALAR", 45, 48, 35, "all"),  
+       ("Fcst", "Wind", "VECTOR", 45, 48, (2, "NE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 45, 48, 85, "all"),
+       ("Fcst", "Wx", "WEATHER", 45, 48, "Num:RW:m::^Wide:RW:-:1/2SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 45, 48, 65, "all"),
+       ("Fcst", "QPF", "SCALAR", 45, 48, 1, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 45, 48, 5, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 45, 48, 0, "all"),
+       ("Fcst", "T", "SCALAR", 48, 51, 45, "all"),
+       ("Fcst", "Td", "SCALAR", 48, 51, 38, "all"),  
+       ("Fcst", "Wind", "VECTOR", 48, 51, (5, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 48, 51, 70, "all"),
+       ("Fcst", "Wx", "WEATHER", 48, 51, "Ocnl:RW:-::^SChc:RW:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 48, 51, 60, "all"),
+       ("Fcst", "QPF", "SCALAR", 48, 51, .5, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 48, 51, 3, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 48, 51, 0, "all"),
+       ("Fcst", "T", "SCALAR", 51, 54, 50, "all"),
+       ("Fcst", "Td", "SCALAR", 51, 54, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 51, 54, (7, "NW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 51, 54, 35, "all"),
+       ("Fcst", "Wx", "WEATHER", 51, 54, "Chc:RW:--::^Lkly:RW:--:1SM:", "all"),
+       ("Fcst", "PoP", "SCALAR", 51, 54, 50, "all"),
+       ("Fcst", "QPF", "SCALAR", 51, 54, .5, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 51, 54, 2, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 51, 54, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 48", "MaxTEnd + 48", 70, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 48", "MinTEnd + 48", 50, "all"),
+       ("Fcst", "T", "SCALAR", 54, 57, 50, "all"),
+       ("Fcst", "Td", "SCALAR", 54, 57, 45, "all"),  
+       ("Fcst", "Wind", "VECTOR", 54, 57, (10, "W"), "all"),
+       ("Fcst", "Sky", "SCALAR", 54, 57, 30, "all"),
+       ("Fcst", "Wx", "WEATHER", 54, 57, "Def:RW:--::^Frq:RW:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 54, 57, 55, "all"),
+       ("Fcst", "QPF", "SCALAR", 54, 57, .25, ["BelowElev"]),
+       ("Fcst", "QPF", "SCALAR", 54, 57, 1, ["AboveElev"]),
+       ("Fcst", "SnowAmt", "SCALAR", 54, 57, 0, "all"),
+       ("Fcst", "T", "SCALAR", 57, 60, 55, "all"),
+       ("Fcst", "Td", "SCALAR", 57, 60, 47, "all"),  
+       ("Fcst", "Wind", "VECTOR", 57, 60, (12, "E"), "all"),
+       ("Fcst", "Sky", "SCALAR", 57, 60, 40, "all"),
+       ("Fcst", "Wx", "WEATHER", 57, 60, "Brf:RW:-:4SM:^Pds:RW:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 57, 60, 40, "all"),
+       ("Fcst", "QPF", "SCALAR", 57, 60, .5, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 57, 60, 0, "all"),
+       ("Fcst", "T", "SCALAR", 60, 63, 65, "all"),
+       ("Fcst", "Td", "SCALAR", 60, 63, 43, "all"),  
+       ("Fcst", "Wind", "VECTOR", 60, 63, (15, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 60, 63, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 60, 63, "Inter:RW:--::^Wide:L:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 60, 63, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 60, 63, .01, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 60, 63, 0, "all"),
+       ("Fcst", "T", "SCALAR", 63, 66, 70, "all"),
+       ("Fcst", "Td", "SCALAR", 63, 66, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 63, 66, (15, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 63, 66, 5, "all"),
+       ("Fcst", "Wx", "WEATHER", 63, 66, "Ocnl:L:--::^SChc:L:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 63, 66, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 63, 66, .01, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 63, 66, 0, "all"),
+       ("Fcst", "T", "SCALAR", 66, 69, 68, "all"),
+       ("Fcst", "Td", "SCALAR", 66, 69, 55, "all"),  
+       ("Fcst", "Wind", "VECTOR", 66, 69, (20, "W"), "all"),
+       ("Fcst", "Sky", "SCALAR", 66, 69, 75, "all"),
+       ("Fcst", "Wx", "WEATHER", 66, 69, "Chc:L:-:3SM:^Lkly:L:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 66, 69, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 66, 69, .01, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 66, 69, 0, "all"),
+       ("Fcst", "T", "SCALAR", 69, 75, 69, "all"),
+       ("Fcst", "Td", "SCALAR", 69, 75, 56, "all"),  
+       ("Fcst", "Wind", "VECTOR", 69, 75, (10, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 69, 75, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 69, 75, "Def:L:--::^Areas:L:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 69, 75, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 69, 75, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 69, 75, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 72", "MaxTEnd + 72", 71, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 72", "MinTEnd + 72", 65, "all"),
+       ("Fcst", "T", "SCALAR", 75, 81, 69, "all"),
+       ("Fcst", "Td", "SCALAR", 75, 81, 60, "all"),  
+       ("Fcst", "Wind", "VECTOR", 75, 81, (5, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 75, 81, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 75, 81, "Patchy:L:--::^Frq:L:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 75, 81, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 75, 81, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 75, 81, 0, "all"),
+       ("Fcst", "T", "SCALAR", 81, 87, 70, "all"),
+       ("Fcst", "Td", "SCALAR", 81, 87, 61, "all"),  
+       ("Fcst", "Wind", "VECTOR", 81, 87, (20, "SE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 81, 87, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 81, 87, "Brf:L:--::^Pds:L:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 81, 87, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 81, 87, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 81, 87, 0, "all"),
+       ("Fcst", "T", "SCALAR", 87, 93, 71, "all"),
+       ("Fcst", "Td", "SCALAR", 87, 93, 65, "all"),  
+       ("Fcst", "Wind", "VECTOR", 87, 93, (15, "E"), "all"),
+       ("Fcst", "Sky", "SCALAR", 87, 93, 50, "all"),
+       ("Fcst", "Wx", "WEATHER", 87, 93, "Inter:L:-::^Wide:ZL:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 87, 93, 30, "all"),
+       ("Fcst", "QPF", "SCALAR", 87, 93, .01, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 87, 93, 0, "all"),
+       ("Fcst", "T", "SCALAR", 93, 99, 65, "all"),
+       ("Fcst", "Td", "SCALAR", 93, 99, 65, "all"),  
+       ("Fcst", "Wind", "VECTOR", 93, 99, (23, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 93, 99, 50, "all"),
+       ("Fcst", "Wx", "WEATHER", 93, 99, "Ocnl:ZL:-::^SChc:ZL:-::", "all"),
+       ("Fcst", "PoP", "SCALAR", 93, 99, 50, "all"),
+       ("Fcst", "QPF", "SCALAR", 93, 99, .01, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 93, 99, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 96", "MaxTEnd + 96", 75, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 96", "MinTEnd + 96", 68, "all"),
+       ("Fcst", "T", "SCALAR", 99, 105, 68, "all"),
+       ("Fcst", "Td", "SCALAR", 99, 105, 65, "all"),  
+       ("Fcst", "Wind", "VECTOR", 99, 105, (31, "NE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 99, 105, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 99, 105, "Chc:ZL:--::^Lkly:ZL:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 99, 105, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 99, 105, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 99, 105, 0, "all"),
+       ("Fcst", "T", "SCALAR", 105, 111, 70, "all"),
+       ("Fcst", "Td", "SCALAR", 105, 111, 65, "all"),  
+       ("Fcst", "Wind", "VECTOR", 105, 111, (40, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 105, 111, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 105, 111, "Def:ZL:--::^Areas:ZL:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 105, 111, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 105, 111, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 105, 111, 0, "all"),
+       ("Fcst", "T", "SCALAR", 111, 117, 73, "all"),
+       ("Fcst", "Td", "SCALAR", 111, 117, 65, "all"),  
+       ("Fcst", "Wind", "VECTOR", 111, 117, (5, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 111, 117, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 111, 117, "Patchy:ZL:--::^Frq:ZL:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 111, 117, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 111, 117, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 111, 117, 0, "all"),
+       ("Fcst", "T", "SCALAR", 117, 123, 75, "all"),
+       ("Fcst", "Td", "SCALAR", 117, 123, 60, "all"),  
+       ("Fcst", "Wind", "VECTOR", 117, 123, (45, "W"), "all"),
+       ("Fcst", "Sky", "SCALAR", 117, 123, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 117, 123, "Brf:ZL:--::^Pds:ZL:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 117, 123, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 117, 123, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 117, 123, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 120", "MaxTEnd + 120", 83, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 120", "MinTEnd + 120", 78, "all"),
+       ("Fcst", "T", "SCALAR", 123, 129, 78, "all"),
+       ("Fcst", "Td", "SCALAR", 123, 129, 55, "all"),  
+       ("Fcst", "Wind", "VECTOR", 123, 129, (17, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 123, 129, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 123, 129, "Inter:ZL:--::^Wide:ZR:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 123, 129, 15, "all"),
+       ("Fcst", "QPF", "SCALAR", 123, 129, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 123, 129, 0, "all"),
+       ("Fcst", "T", "SCALAR", 129, 135, 80, "all"),
+       ("Fcst", "Td", "SCALAR", 129, 135, 50, "all"),  
+       ("Fcst", "Wind", "VECTOR", 129, 135, (12, "SE"), "all"),
+       ("Fcst", "Sky", "SCALAR", 129, 135, 20, "all"),
+       ("Fcst", "Wx", "WEATHER", 129, 135, "Ocnl:ZR:--::^SChc:ZR:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 129, 135, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 129, 135, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 129, 135, 0, "all"),
+       ("Fcst", "T", "SCALAR", 135, 141, 81, "all"),
+       ("Fcst", "Td", "SCALAR", 135, 141, 45, "all"),  
+       ("Fcst", "Wind", "VECTOR", 135, 141, (15, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 135, 141, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 135, 141, "Chc:ZR:--::^Lkly:ZR:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 135, 141, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 135, 141, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 135, 141, 0, "all"),
+       ("Fcst", "T", "SCALAR", 141, 147, 83, "all"),
+       ("Fcst", "Td", "SCALAR", 141, 147, 43, "all"),  
+       ("Fcst", "Wind", "VECTOR", 141, 147, (25, "NW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 141, 147, 20, "all"),
+       ("Fcst", "Wx", "WEATHER", 141, 147, "Def:ZR:--::^Frq:ZR:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 141, 147, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 141, 147, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 141, 147, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 144", "MaxTEnd + 144", 90, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 144", "MinTEnd + 144", 83, "all"),
+       ("Fcst", "T", "SCALAR", 147, 153, 83, "all"),
+       ("Fcst", "Td", "SCALAR", 147, 153, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 147, 153, (22, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 147, 153, 20, "all"),
+       ("Fcst", "Wx", "WEATHER", 147, 153, "Brf:ZR:--::^Pds:ZR:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 147, 153, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 147, 153, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 147, 153, 0, "all"),
+       ("Fcst", "T", "SCALAR", 153, 159, 85, "all"),
+       ("Fcst", "Td", "SCALAR", 153, 159, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 153, 159, (31, "N"), "all"),
+       ("Fcst", "Sky", "SCALAR", 153, 159, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 153, 159, "Inter:ZR:--::^Wide:S:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 153, 159, 20, "all"),
+       ("Fcst", "QPF", "SCALAR", 153, 159, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 153, 159, 0, "all"),
+       ("Fcst", "T", "SCALAR", 159, 165, 86, "all"),
+       ("Fcst", "Td", "SCALAR", 159, 165, 39, "all"),  
+       ("Fcst", "Wind", "VECTOR", 159, 165, (45, "S"), "all"),
+       ("Fcst", "Sky", "SCALAR", 159, 165, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 159, 165, "Ocnl:S:--::^SChc:S:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 159, 165, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 159, 165, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 159, 165, 0, "all"),
+       ("Fcst", "T", "SCALAR", 165, 171, 90, "all"),
+       ("Fcst", "Td", "SCALAR", 165, 171, 30, "all"),  
+       ("Fcst", "Wind", "VECTOR", 165, 171, (10, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 165, 171, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 165, 171, "Chc:S:--::^Lkly:S:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 165, 171, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 165, 171, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 165, 171, 0, "all"),
+       ("Fcst", "MaxT", "SCALAR", "MaxTBegin + 168", "MaxTEnd + 168", 93, "all"),
+       ("Fcst", "MinT", "SCALAR", "MinTBegin + 168", "MinTEnd + 168", 90, "all"),
+       ("Fcst", "T", "SCALAR", 171, 177, 93, "all"),
+       ("Fcst", "Td", "SCALAR", 171, 177, 30, "all"),  
+       ("Fcst", "Wind", "VECTOR", 171, 177, (10, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 171, 177, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 171, 177, "Def:S:--::^Frq:S:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 171, 177, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 171, 177, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 171, 177, 0, "all"),
+       ("Fcst", "T", "SCALAR", 177, 183, 91, "all"),
+       ("Fcst", "Td", "SCALAR", 177, 183, 40, "all"),  
+       ("Fcst", "Wind", "VECTOR", 177, 183, (15, "SW"), "all"),
+       ("Fcst", "Sky", "SCALAR", 177, 183, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 177, 183, "Brf:S:--::^Pds:S:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 177, 183, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 177, 183, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 177, 183, 0, "all"),
+       ("Fcst", "T", "SCALAR", 183, 189, 90, "all"),
+       ("Fcst", "Td", "SCALAR", 183, 189, 35, "all"),  
+       ("Fcst", "Wind", "VECTOR", 183, 189, (20, "W"), "all"),
+       ("Fcst", "Sky", "SCALAR", 183, 189, 10, "all"),
+       ("Fcst", "Wx", "WEATHER", 183, 189, "Inter:S:--::^Iso:SW:--::", "all"),
+       ("Fcst", "PoP", "SCALAR", 183, 189, 10, "all"),
+       ("Fcst", "QPF", "SCALAR", 183, 189, 0, "all"),
+       ("Fcst", "SnowAmt", "SCALAR", 183, 189, 0, "all"),
+       ]
+
+        
+general_deleteGrids = [
+        ("Fcst", "PoP", "SFC", -100,280),
+        ("Fcst", "MaxT", "SFC", -100,280),
+        ("Fcst", "MinT", "SFC", -100,280),
+        ("Fcst", "T", "SFC", -100,280),
+        ("Fcst", "Td", "SFC", -100,280),
+        ("Fcst", "WindChill", "SFC", -100,280),
+        ("Fcst", "HeatIndex", "SFC", -100,280),
+        ("Fcst", "StormTotalSnow", "SFC", -100,280),
+        ("Fcst", "SnowLevel", "SFC", -100,280),
+        ("Fcst", "FzLevel", "SFC", -100,280),
+        ("Fcst", "RH", "SFC", -100,280),
+        ("Fcst", "Wind", "SFC", -100,280),
+        ("Fcst", "Sky", "SFC", -100,280),
+        ("Fcst", "WindGust", "SFC", -100,280),
+        ("Fcst", "Wx", "SFC", -100,280),
+        ("Fcst", "QPF", "SFC", -100,280),
+        ("Fcst", "SnowAmt", "SFC", -100,280),
+        ("Fcst", "Hazards", "SFC", -100,280),
+        ("Fcst", "pwsD34", "SFC", -100,280),
+        ("Fcst", "pwsN34", "SFC", -100,280),
+        ("Fcst", "pwsD64", "SFC", -100,280),
+        ("Fcst", "pwsN64", "SFC", -100,280),
+        ]
+
diff --git a/cave/com.raytheon.viz.gfe/python/pyViz/BundlePainter.py b/cave/com.raytheon.viz.gfe/python/pyViz/BundlePainter.py
index 24a40418bb..6afdb21b1e 100644
--- a/cave/com.raytheon.viz.gfe/python/pyViz/BundlePainter.py
+++ b/cave/com.raytheon.viz.gfe/python/pyViz/BundlePainter.py
@@ -1,71 +1,71 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+import VizPainter
+
+from com.raytheon.uf.viz.core.procedures import Bundle
+from java.io import File
+
+#
+# Base class for painting Viz bundles from python
+#  
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    04/16/09                      njensen        Initial Creation.
+#    
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-import VizPainter
-
-from com.raytheon.uf.viz.core.procedures import Bundle
-from java.io import File
-
-#
-# Base class for painting Viz bundles from python
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    04/16/09                      njensen        Initial Creation.
-#    
-# 
-#
-
-
-class BundlePainter(VizPainter.VizPainter):
-    
-    def __init__(self, bundlePath, imageWidth=400.0, imageHeight=400.0):
-        f = File(bundlePath)
-        b = Bundle.unmarshalBundle(f, None)
-        displays = b.getDisplays()
-        if len(displays) != 1:
-            raise RuntimeError, 'BundlePainter only supports bundles with one display'
-        VizPainter.VizPainter.__init__(self, displays[0], imageWidth, imageHeight)
-        
-        # time match to the first resource with times we find
-        resourceList = self.getDescriptor().getResourceList()
-        size = resourceList.size()
-        timeMatchBasis = None        
-        for x in range(size):
-            rp = resourceList.get(x)
-            rsc = rp.getResource()
-            if rsc is None:
-                rp.instantiateResource(self.getDescriptor())
-                rsc = rp.getResource()
-                rsc.init(self.getTarget())
-            if len(rsc.getDataTimes()) > 0 and timeMatchBasis is None:
-                timeMatchBasis = rsc
-        
-        if timeMatchBasis:    
-            self.getDescriptor().getTimeMatcher().changeTimeMatchBasis(timeMatchBasis)
-            self.getDescriptor().getTimeMatcher().redoTimeMatching(self.getDescriptor())                        
-    
-    def __del__(self):
-        VizPainter.VizPainter.__del__(self)
-    
+#
+
+
+class BundlePainter(VizPainter.VizPainter):
+    
+    def __init__(self, bundlePath, imageWidth=400.0, imageHeight=400.0):
+        f = File(bundlePath)
+        b = Bundle.unmarshalBundle(f, None)
+        displays = b.getDisplays()
+        if len(displays) != 1:
+            raise RuntimeError('BundlePainter only supports bundles with one display')
+        VizPainter.VizPainter.__init__(self, displays[0], imageWidth, imageHeight)
+        
+        # time match to the first resource with times we find
+        resourceList = self.getDescriptor().getResourceList()
+        size = resourceList.size()
+        timeMatchBasis = None        
+        for x in range(size):
+            rp = resourceList.get(x)
+            rsc = rp.getResource()
+            if rsc is None:
+                rp.instantiateResource(self.getDescriptor())
+                rsc = rp.getResource()
+                rsc.init(self.getTarget())
+            if len(rsc.getDataTimes()) > 0 and timeMatchBasis is None:
+                timeMatchBasis = rsc
+        
+        if timeMatchBasis:    
+            self.getDescriptor().getTimeMatcher().changeTimeMatchBasis(timeMatchBasis)
+            self.getDescriptor().getTimeMatcher().redoTimeMatching(self.getDescriptor())                        
+    
+    def __del__(self):
+        VizPainter.VizPainter.__del__(self)
+    
                     
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.gfe/python/pyViz/GFEPainter.py b/cave/com.raytheon.viz.gfe/python/pyViz/GFEPainter.py
index 6de6dcdf96..48acce2645 100644
--- a/cave/com.raytheon.viz.gfe/python/pyViz/GFEPainter.py
+++ b/cave/com.raytheon.viz.gfe/python/pyViz/GFEPainter.py
@@ -1,222 +1,222 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-#
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-#
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-#
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#
-# GFE Painter for painting GFE data from scripts
-#
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    04/01/09                      njensen        Initial Creation.
-#    08/20/2012           #1077    randerso       Fixed backgroundColor setting
-#    09/13/2012           #1147    dgilling       Allow map labels to be disabled.
-#    11/06/2012           15406    ryu            Correction for computing domain from mask
-#    09/12/2013           #2033    dgilling       Change how logo files are accessed.
-#    Apr 25, 2015          4952    njensen        Updated for new JEP API
-#    Jan 19, 2017          5987    randerso       Fix after java FontAdapter class was removed
-#
-##
-
-import os
-
-from com.raytheon.uf.viz.core import RGBColors
-from com.raytheon.uf.viz.core.map import MapDescriptor
-from com.raytheon.uf.viz.core.rsc.capabilities import ColorableCapability,\
-    OutlineCapability, LabelableCapability, MagnificationCapability, ColorMapCapability
-from com.raytheon.viz.core import ColorUtil
-from com.raytheon.viz.gfe.core import DataManagerOffscreenFactory, GFEMapRenderableDisplay
-from com.raytheon.viz.gfe.ifpimage import GfeImageUtil, ImageLegendResource
-from com.raytheon.viz.gfe.rsc import GFEResource, GFESystemResource
-from com.raytheon.viz.gfe.core.parm import ParmDisplayAttributes
-EditorType = ParmDisplayAttributes.EditorType
-VisMode = ParmDisplayAttributes.VisMode
-VisualizationType = ParmDisplayAttributes.VisualizationType
-from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID
-
-from java.lang import Double
-from java.lang import Integer
-from javax.imageio import ImageIO
-from java.util import HashSet
-from java.io import File
-
-import VizPainter
-
-class GFEPainter(VizPainter.VizPainter):
-
-    def __init__(self, imageWidth=None, imageHeight=None, expandLeft=25.0, expandRight=25.0, expandTop=25.0, expandBottom=25.0, mask=None, wholeDomain=0, bgColor=None):
-        # Create empty display and data manager for display
-        display = GFEMapRenderableDisplay()
-        self.dataMgr = DataManagerOffscreenFactory.getInstance(display)
-        self.refId = None
-        envelope = None
-        gloc = self.dataMgr.getParmManager().compositeGridLocation()
-        if mask is not None:
-            from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData
-            CoordinateType = ReferenceData.CoordinateType
-            self.refId = ReferenceID(mask)
-            if wholeDomain == 0:
-                envelope = self.dataMgr.getRefManager().loadRefSet(self.refId).overallDomain(CoordinateType.GRID)
-        if imageWidth is not None:
-            imageWidth = Integer(int(imageWidth))
-        if imageHeight is not None:
-            imageHeight = Integer(int(imageHeight))
-        geom = GfeImageUtil.getLocationGeometry(gloc, envelope, imageWidth, imageHeight, expandLeft / 100.0, expandRight / 100.0, expandTop / 100.0, expandBottom / 100.0)
-        
-        # Create descriptor for display
-        desc = MapDescriptor(geom)
-        display.setDescriptor(desc)
-        VizPainter.VizPainter.__init__(self, display, backgroundColor=bgColor)
-
-        gfeSystem = GFESystemResource(self.dataMgr)
-        self.addVizResource(gfeSystem)
-        desc.getResourceList().getProperties(gfeSystem).setSystemResource(True)
-        self.primaryRsc = None
-
-
-    def __del__(self):
-        VizPainter.VizPainter.__del__(self)
-
-    def setupLegend(self, localTime=False, snapshotTime=False, snapshot='', descriptiveName='SHORT', duration='', start='', end='', override={}, lang=''):
-        legend = ImageLegendResource(self.dataMgr)
-        legend.setLocalTime(localTime)
-        legend.setSnapshotTime(snapshotTime)
-        legend.setSnapshotFormat(snapshot)
-        legend.setDescriptiveName(descriptiveName)
-        legend.setDurationFormat(duration)
-        legend.setStartFormat(start)
-        legend.setEndFormat(end)
-        legend.setLanguage(lang)
-        parms = override.keys()
-        for parm in parms:
-            legend.setColorOverride(parm, override[parm])
-        self.addVizResource(legend)
-        self.getDescriptor().getResourceList().getProperties(legend).setSystemResource(True)
-
-    def enableColorbar(self):
-        from com.raytheon.viz.gfe.rsc.colorbar import GFEColorbarResource
-        colorBar = GFEColorbarResource(self.dataMgr)
-        self.addVizResource(colorBar)
-        self.getDescriptor().getResourceList().getProperties(colorBar).setSystemResource(True)
-
-    def __makeGFEResource(self, parm):
-        parm.getParmState().setPickUpValue(None)
-        gfeRsc = GFEResource(parm, self.dataMgr)
-        self.addVizResource(gfeRsc)
-        if not parm.getDisplayAttributes().getBaseColor():
-            from com.raytheon.viz.core import ColorUtil
-            parm.getDisplayAttributes().setBaseColor(ColorUtil.getNewColor(self.getDescriptor()))
-        return gfeRsc
-
-    def addGfeResource(self, parm, colormap=None, colorMin=None, colorMax=None, smooth=False, color=None, lineWidth=None):
-        gfeRsc = self.__makeGFEResource(parm)
-#        jvisType = VisualizationType.valueOf('IMAGE')
-#        jset = HashSet()
-#        jset.add(jvisType)
-#        parm.getDisplayAttributes().setVisualizationType(EDITOR, IMAGE, jset)
-#        parm.getDisplayAttributes().setVisMode(IMAGE)
-        if self.refId is not None:
-            parm.getDisplayAttributes().setDisplayMask(self.refId)
-        self.primaryRsc = gfeRsc
-        params = gfeRsc.getCapability(ColorMapCapability).getColorMapParameters()
-        if colormap is not None:
-            from com.raytheon.uf.viz.core.drawables import ColorMapLoader
-            params.setColorMap(ColorMapLoader.loadColorMap(colormap))
-        if colorMax is not None and colorMin is not None:
-            params.setDataMin(colorMin)
-            params.setColorMapMin(colorMin)
-            params.setDataMax(colorMax)
-            params.setColorMapMax(colorMax)
-        if smooth:
-            from com.raytheon.uf.viz.core.rsc.capabilities import ImagingCapability
-            gfeRsc.getCapability(ImagingCapability).setInterpolationState(True)
-        if color is None:
-            color = ColorUtil.getNewColor(self.getDescriptor())
-        else:
-            color = RGBColors.getRGBColor(color)
-        gfeRsc.getCapability(ColorableCapability).setColor(color)
-        if lineWidth is not None:
-            gfeRsc.getCapability(OutlineCapability).setOutlineWidth(lineWidth)
-
-    def addMapBackground(self, mapName, color=None, lineWidth=None,
-                         linePattern=None, xOffset=None, yOffset=None,
-                         labelAttribute=None, fontOffset=None):
-        from com.raytheon.uf.viz.core.maps import MapManager
-        rsc = MapManager.getInstance(self.getDescriptor()).loadMapByBundleName(mapName).getResource()
-        if color is not None:
-            rsc.getCapability(ColorableCapability).setColor(RGBColors.getRGBColor(color))
-        if lineWidth is not None:
-            rsc.getCapability(OutlineCapability).setOutlineWidth(lineWidth)
-        if linePattern is not None:
-            rsc.getCapability(OutlineCapability).setLineStyle(linePattern)
-        if xOffset is not None:
-            rsc.getCapability(LabelableCapability).setxOffset(xOffset)
-        if yOffset is not None:
-            rsc.getCapability(LabelableCapability).setyOffset(yOffset)
-        rsc.getCapability(LabelableCapability).setLabelField(labelAttribute)
-        if fontOffset is not None:
-            mag = Double(1.26 ** fontOffset)
-            rsc.getCapability(MagnificationCapability).setMagnification(mag)
-
-    def getDataManager(self):
-        return self.dataMgr
-
-    def getAWTFont(self, font):
-        from java.awt import Font
-        from com.raytheon.uf.viz.core.drawables import IFont
-        
-        style = Font.PLAIN;
-        if font.getStyle() != None:
-            for s in font.getStyle():
-                if s == IFont.Style.BOLD:
-                    style = Font.BOLD | style
-                elif s == IFont.Style.ITALIC:
-                    style = Font.ITALIC | style;
-
-        awtfont = Font(font.getFontName(), style, int(font.getFontSize()))
-        return awtfont
-
-    def outputFiles(self, filename, attachLogo=False, logoText=None):
-        rendered = self.getTarget().screenshot()
-        if attachLogo:
-            from java.awt.image import BufferedImage
-            noaa = File(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logos/noaalogo2.png'))
-            nws = File(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logos/nwslogo.png'))
-            noaaImage = ImageIO.read(noaa)
-            nwsImage = ImageIO.read(nws)
-            height = rendered.getHeight() + noaaImage.getHeight()
-            finalBuf = BufferedImage(rendered.getWidth(), height, BufferedImage.TYPE_INT_ARGB)
-            graphics = finalBuf.createGraphics()
-            graphics.drawImage(rendered, 0, 0, None)
-            graphics.drawImage(noaaImage, 0, rendered.getHeight(), None)
-            graphics.fillRect(noaaImage.getWidth(), rendered.getHeight(), rendered.getWidth() - noaaImage.getWidth() - nwsImage.getWidth(), rendered.getHeight())
-            if logoText is not None:
-                from java.awt import Color
-                graphics.setColor(Color.BLACK)
-                graphics.setFont(self.getAWTFont(self.getTarget().getDefaultFont()))
-                fm = graphics.getFontMetrics()
-                textBounds = fm.getStringBounds(logoText, graphics)
-                graphics.drawString(logoText, int((rendered.getWidth() - textBounds.getWidth()) / 2), \
-                                    int(rendered.getHeight() + (noaaImage.getHeight() / 2) + textBounds.getHeight() / 2))
-            graphics.drawImage(nwsImage, finalBuf.getWidth() - nwsImage.getWidth(), rendered.getHeight(), None)
-            finalBuf.flush()
-            self.outputImage(finalBuf, filename)
-        else:
-            self.outputImage(rendered, filename)
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+#

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+#

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+#

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+#

+# GFE Painter for painting GFE data from scripts

+#

+#

+#     SOFTWARE HISTORY

+#

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    04/01/09                      njensen        Initial Creation.

+#    08/20/2012           #1077    randerso       Fixed backgroundColor setting

+#    09/13/2012           #1147    dgilling       Allow map labels to be disabled.

+#    11/06/2012           15406    ryu            Correction for computing domain from mask

+#    09/12/2013           #2033    dgilling       Change how logo files are accessed.

+#    Apr 25, 2015          4952    njensen        Updated for new JEP API

+#    Jan 19, 2017          5987    randerso       Fix after java FontAdapter class was removed

+#

+##

+

+import os

+

+from com.raytheon.uf.viz.core import RGBColors

+from com.raytheon.uf.viz.core.map import MapDescriptor

+from com.raytheon.uf.viz.core.rsc.capabilities import ColorableCapability,\

+    OutlineCapability, LabelableCapability, MagnificationCapability, ColorMapCapability

+from com.raytheon.viz.core import ColorUtil

+from com.raytheon.viz.gfe.core import DataManagerOffscreenFactory, GFEMapRenderableDisplay

+from com.raytheon.viz.gfe.ifpimage import GfeImageUtil, ImageLegendResource

+from com.raytheon.viz.gfe.rsc import GFEResource, GFESystemResource

+from com.raytheon.viz.gfe.core.parm import ParmDisplayAttributes

+EditorType = ParmDisplayAttributes.EditorType

+VisMode = ParmDisplayAttributes.VisMode

+VisualizationType = ParmDisplayAttributes.VisualizationType

+from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID

+

+from java.lang import Double

+from java.lang import Integer

+from javax.imageio import ImageIO

+from java.util import HashSet

+from java.io import File

+

+import VizPainter

+

+class GFEPainter(VizPainter.VizPainter):

+

+    def __init__(self, imageWidth=None, imageHeight=None, expandLeft=25.0, expandRight=25.0, expandTop=25.0, expandBottom=25.0, mask=None, wholeDomain=0, bgColor=None):

+        # Create empty display and data manager for display

+        display = GFEMapRenderableDisplay()

+        self.dataMgr = DataManagerOffscreenFactory.getInstance(display)

+        self.refId = None

+        envelope = None

+        gloc = self.dataMgr.getParmManager().compositeGridLocation()

+        if mask is not None:

+            from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData

+            CoordinateType = ReferenceData.CoordinateType

+            self.refId = ReferenceID(mask)

+            if wholeDomain == 0:

+                envelope = self.dataMgr.getRefManager().loadRefSet(self.refId).overallDomain(CoordinateType.GRID)

+        if imageWidth is not None:

+            imageWidth = Integer(int(imageWidth))

+        if imageHeight is not None:

+            imageHeight = Integer(int(imageHeight))

+        geom = GfeImageUtil.getLocationGeometry(gloc, envelope, imageWidth, imageHeight, expandLeft / 100.0, expandRight / 100.0, expandTop / 100.0, expandBottom / 100.0)

+        

+        # Create descriptor for display

+        desc = MapDescriptor(geom)

+        display.setDescriptor(desc)

+        VizPainter.VizPainter.__init__(self, display, backgroundColor=bgColor)

+

+        gfeSystem = GFESystemResource(self.dataMgr)

+        self.addVizResource(gfeSystem)

+        desc.getResourceList().getProperties(gfeSystem).setSystemResource(True)

+        self.primaryRsc = None

+

+

+    def __del__(self):

+        VizPainter.VizPainter.__del__(self)

+

+    def setupLegend(self, localTime=False, snapshotTime=False, snapshot='', descriptiveName='SHORT', duration='', start='', end='', override={}, lang=''):

+        legend = ImageLegendResource(self.dataMgr)

+        legend.setLocalTime(localTime)

+        legend.setSnapshotTime(snapshotTime)

+        legend.setSnapshotFormat(snapshot)

+        legend.setDescriptiveName(descriptiveName)

+        legend.setDurationFormat(duration)

+        legend.setStartFormat(start)

+        legend.setEndFormat(end)

+        legend.setLanguage(lang)

+        parms = list(override.keys())

+        for parm in parms:

+            legend.setColorOverride(parm, override[parm])

+        self.addVizResource(legend)

+        self.getDescriptor().getResourceList().getProperties(legend).setSystemResource(True)

+

+    def enableColorbar(self):

+        from com.raytheon.viz.gfe.rsc.colorbar import GFEColorbarResource

+        colorBar = GFEColorbarResource(self.dataMgr)

+        self.addVizResource(colorBar)

+        self.getDescriptor().getResourceList().getProperties(colorBar).setSystemResource(True)

+

+    def __makeGFEResource(self, parm):

+        parm.getParmState().setPickUpValue(None)

+        gfeRsc = GFEResource(parm, self.dataMgr)

+        self.addVizResource(gfeRsc)

+        if not parm.getDisplayAttributes().getBaseColor():

+            from com.raytheon.viz.core import ColorUtil

+            parm.getDisplayAttributes().setBaseColor(ColorUtil.getNewColor(self.getDescriptor()))

+        return gfeRsc

+

+    def addGfeResource(self, parm, colormap=None, colorMin=None, colorMax=None, smooth=False, color=None, lineWidth=None):

+        gfeRsc = self.__makeGFEResource(parm)

+#        jvisType = VisualizationType.valueOf('IMAGE')

+#        jset = HashSet()

+#        jset.add(jvisType)

+#        parm.getDisplayAttributes().setVisualizationType(EDITOR, IMAGE, jset)

+#        parm.getDisplayAttributes().setVisMode(IMAGE)

+        if self.refId is not None:

+            parm.getDisplayAttributes().setDisplayMask(self.refId)

+        self.primaryRsc = gfeRsc

+        params = gfeRsc.getCapability(ColorMapCapability).getColorMapParameters()

+        if colormap is not None:

+            from com.raytheon.uf.viz.core.drawables import ColorMapLoader

+            params.setColorMap(ColorMapLoader.loadColorMap(colormap))

+        if colorMax is not None and colorMin is not None:

+            params.setDataMin(colorMin)

+            params.setColorMapMin(colorMin)

+            params.setDataMax(colorMax)

+            params.setColorMapMax(colorMax)

+        if smooth:

+            from com.raytheon.uf.viz.core.rsc.capabilities import ImagingCapability

+            gfeRsc.getCapability(ImagingCapability).setInterpolationState(True)

+        if color is None:

+            color = ColorUtil.getNewColor(self.getDescriptor())

+        else:

+            color = RGBColors.getRGBColor(color)

+        gfeRsc.getCapability(ColorableCapability).setColor(color)

+        if lineWidth is not None:

+            gfeRsc.getCapability(OutlineCapability).setOutlineWidth(lineWidth)

+

+    def addMapBackground(self, mapName, color=None, lineWidth=None,

+                         linePattern=None, xOffset=None, yOffset=None,

+                         labelAttribute=None, fontOffset=None):

+        from com.raytheon.uf.viz.core.maps import MapManager

+        rsc = MapManager.getInstance(self.getDescriptor()).loadMapByBundleName(mapName).getResource()

+        if color is not None:

+            rsc.getCapability(ColorableCapability).setColor(RGBColors.getRGBColor(color))

+        if lineWidth is not None:

+            rsc.getCapability(OutlineCapability).setOutlineWidth(lineWidth)

+        if linePattern is not None:

+            rsc.getCapability(OutlineCapability).setLineStyle(linePattern)

+        if xOffset is not None:

+            rsc.getCapability(LabelableCapability).setxOffset(xOffset)

+        if yOffset is not None:

+            rsc.getCapability(LabelableCapability).setyOffset(yOffset)

+        rsc.getCapability(LabelableCapability).setLabelField(labelAttribute)

+        if fontOffset is not None:

+            mag = Double(1.26 ** fontOffset)

+            rsc.getCapability(MagnificationCapability).setMagnification(mag)

+

+    def getDataManager(self):

+        return self.dataMgr

+

+    def getAWTFont(self, font):

+        from java.awt import Font

+        from com.raytheon.uf.viz.core.drawables import IFont

+        

+        style = Font.PLAIN;

+        if font.getStyle() != None:

+            for s in font.getStyle():

+                if s == IFont.Style.BOLD:

+                    style = Font.BOLD | style

+                elif s == IFont.Style.ITALIC:

+                    style = Font.ITALIC | style;

+

+        awtfont = Font(font.getFontName(), style, int(font.getFontSize()))

+        return awtfont

+

+    def outputFiles(self, filename, attachLogo=False, logoText=None):

+        rendered = self.getTarget().screenshot()

+        if attachLogo:

+            from java.awt.image import BufferedImage

+            noaa = File(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logos/noaalogo2.png'))

+            nws = File(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'logos/nwslogo.png'))

+            noaaImage = ImageIO.read(noaa)

+            nwsImage = ImageIO.read(nws)

+            height = rendered.getHeight() + noaaImage.getHeight()

+            finalBuf = BufferedImage(rendered.getWidth(), height, BufferedImage.TYPE_INT_ARGB)

+            graphics = finalBuf.createGraphics()

+            graphics.drawImage(rendered, 0, 0, None)

+            graphics.drawImage(noaaImage, 0, rendered.getHeight(), None)

+            graphics.fillRect(noaaImage.getWidth(), rendered.getHeight(), rendered.getWidth() - noaaImage.getWidth() - nwsImage.getWidth(), rendered.getHeight())

+            if logoText is not None:

+                from java.awt import Color

+                graphics.setColor(Color.BLACK)

+                graphics.setFont(self.getAWTFont(self.getTarget().getDefaultFont()))

+                fm = graphics.getFontMetrics()

+                textBounds = fm.getStringBounds(logoText, graphics)

+                graphics.drawString(logoText, int((rendered.getWidth() - textBounds.getWidth()) / 2), \

+                                    int(rendered.getHeight() + (noaaImage.getHeight() / 2) + textBounds.getHeight() / 2))

+            graphics.drawImage(nwsImage, finalBuf.getWidth() - nwsImage.getWidth(), rendered.getHeight(), None)

+            finalBuf.flush()

+            self.outputImage(finalBuf, filename)

+        else:

+            self.outputImage(rendered, filename)

diff --git a/cave/com.raytheon.viz.gfe/python/pyViz/testBundlePainter.py b/cave/com.raytheon.viz.gfe/python/pyViz/testBundlePainter.py
index a6db35e01d..d671bd37b6 100644
--- a/cave/com.raytheon.viz.gfe/python/pyViz/testBundlePainter.py
+++ b/cave/com.raytheon.viz.gfe/python/pyViz/testBundlePainter.py
@@ -1,58 +1,58 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-
-#
-# Test of BundlePainter
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    02/23/10                      njensen        Initial Creation.
-#    
-# 
-#
-
-import BundlePainter
-
-def main():
-    print "Starting"
-    from com.raytheon.uf.viz.core.datastructure import DataCubeManagerStarter
-    dcms = DataCubeManagerStarter()
-    dcms.earlyStartup()
-    bundle = sys.argv[1]    
-    bp = BundlePainter.BundlePainter(bundle, 800.0, 600.0)
-    times = bp.getDescriptor().getDataTimes()
-    for t in times:
-        bp.paint(t)
-        img = bp.getTarget().screenshot()
-        outname = '/tmp/images/' + str(t) + '.png' #TODO fix
-        print "Outputting ", outname
-        bp.outputImage(img, outname)        
-    print "Finished"
-    
-
-
-if __name__ == "__main__":
-    main()
-    
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+

+#

+# Test of BundlePainter

+#  

+#    

+#     SOFTWARE HISTORY

+#    

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    02/23/10                      njensen        Initial Creation.

+#    

+# 

+#

+

+import BundlePainter

+

+def main():

+    print("Starting")

+    from com.raytheon.uf.viz.core.datastructure import DataCubeManagerStarter

+    dcms = DataCubeManagerStarter()

+    dcms.earlyStartup()

+    bundle = sys.argv[1]    

+    bp = BundlePainter.BundlePainter(bundle, 800.0, 600.0)

+    times = bp.getDescriptor().getDataTimes()

+    for t in times:

+        bp.paint(t)

+        img = bp.getTarget().screenshot()

+        outname = '/tmp/images/' + str(t) + '.png' #TODO fix

+        print("Outputting ", outname)

+        bp.outputImage(img, outname)        

+    print("Finished")

+    

+

+

+if __name__ == "__main__":

+    main()

+    

                     
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.gfe/python/query/DBSSClient.py b/cave/com.raytheon.viz.gfe/python/query/DBSSClient.py
index 417e30843d..5b7fd58554 100644
--- a/cave/com.raytheon.viz.gfe/python/query/DBSSClient.py
+++ b/cave/com.raytheon.viz.gfe/python/query/DBSSClient.py
@@ -1,125 +1,124 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    Sep 01, 2014    3572          randerso       Fix getTopo       
-#    Apr 23, 2015    4259          njensen        Updated for new JEP API
-#    Dec  2, 2015    18356         yteng          Fix typo in __getitem__
-#
-########################################################################
-import DatabaseID, AbsTime, JUtil
-
-from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID as JavaDatabaseID
-from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID
-from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
-
-import numpy
-
-class DBSSWE:
-    def __init__(self, parm):
-        self._parm = parm
-        self.parmName = parm.getParmID().getCompositeName()
-
-    def keys(self):
-        #return map(lambda x : (
-        #    x.getGridTime().getStart().unixTime(),
-        #    x.getGridTime().getEnd().unixTime()),
-        #                       self._parm.getGridInventory())
-        result = []
-        times = self._parm.getGridInventory()
-        for x in times:
-            start = AbsTime.AbsTime(x.getGridTime().getStart())
-            end = AbsTime.AbsTime(x.getGridTime().getEnd())
-            encodedTime = (start.unixTime(), end.unixTime())
-            result.append(encodedTime)
-        return result
-
-    def __getitem__(self, key):
-        for t, g in map(lambda x, y: (x, y), self.keys(),
-                        self._parm.getGridInventory()):
-            if t == key:                
-                #return g.pyData()
-                g.populate()
-                slice = g.getGridSlice()
-                result = slice.getNDArray()
-                if type(result) is numpy.ndarray and result.dtype == numpy.int8:                
-                    # discrete or weather
-                    dkeys = JUtil.javaObjToPyVal(slice.getKeyList())
-                    result = [result, dkeys] 
-                return result
-        return None
-
-class DBSSDB:
-    def __init__(self, pmgr, key):        
-        javaDbId = JavaDatabaseID(key)
-        self._pmgr = pmgr
-        self._dbid = DatabaseID.DatabaseID(javaDbId)
-
-    def keys(self):
-        return map(lambda x : str(x.getCompositeName()),
-                   self._pmgr.getAvailableParms(self._dbid.toJavaObj()))
-
-    def __getitem__(self, key):
-        #pid = AFPS.ParmID_string(key + ":" + str(self._dbid))
-        pid = ParmID(key + ":" + str(self._dbid))
-        return DBSSWE(self._pmgr.getParm(pid))
-
-
-class DBSSClient:
-    def __init__(self, dataMgr):
-        self._dataMgr = dataMgr
-        self._pmgr = dataMgr.getParmManager()
-        self._refmgr = dataMgr.getRefManager()
-        self._tmgr = dataMgr.getTopoManager()
-        self.siteIDs = [dataMgr.getSiteID()]
-
-    def keys(self):
-        dbs = self._pmgr.getAvailableDbs()
-        availDbs = []
-        for i in range(dbs.size()):
-            availDbs.append(dbs.get(i))
-        return map(lambda x : str(x), availDbs)
-
-    def __getitem__(self, key):
-        return DBSSDB(self._pmgr, key)
-
-    def editAreaNames(self):
-        result = []
-        avail = self._refmgr.getAvailableSets()
-        size = avail.size()
-        for x in range(size):
-            result.append(avail.get(x).getName())        
-        #return map(lambda x : x.name(), self._refmgr.getAvailableSets())
-        return result
-
-    def getEditArea(self, name):
-        rs = self._refmgr.loadRefSet(ReferenceID(name))
-        if rs.isQuery():
-            return rs.getQuery()
-        return rs.getGrid().getNDArray()
-    
-    def getOpMode(self):
-        return self._dataMgr.getOpMode().name()
-
-    def getTopo(self):
-        return self._tmgr.getCompositeTopo().getNDArray()
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+#
+#     SOFTWARE HISTORY
+#
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    Sep 01, 2014    3572          randerso       Fix getTopo       
+#    Apr 23, 2015    4259          njensen        Updated for new JEP API
+#    Dec  2, 2015    18356         yteng          Fix typo in __getitem__
+#
+########################################################################
+import DatabaseID, AbsTime, JUtil
+
+from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID as JavaDatabaseID
+from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID
+from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID
+
+import numpy
+
+class DBSSWE:
+    def __init__(self, parm):
+        self._parm = parm
+        self.parmName = parm.getParmID().getCompositeName()
+
+    def keys(self):
+        #return map(lambda x : (
+        #    x.getGridTime().getStart().unixTime(),
+        #    x.getGridTime().getEnd().unixTime()),
+        #                       self._parm.getGridInventory())
+        result = []
+        times = self._parm.getGridInventory()
+        for x in times:
+            start = AbsTime.AbsTime(x.getGridTime().getStart())
+            end = AbsTime.AbsTime(x.getGridTime().getEnd())
+            encodedTime = (start.unixTime(), end.unixTime())
+            result.append(encodedTime)
+        return result
+
+    def __getitem__(self, key):
+        for t, g in map(lambda x, y: (x, y), list(self.keys()),
+                        self._parm.getGridInventory()):
+            if t == key:                
+                #return g.pyData()
+                g.populate()
+                slice = g.getGridSlice()
+                result = slice.getNDArray()
+                if type(result) is numpy.ndarray and result.dtype == numpy.int8:                
+                    # discrete or weather
+                    dkeys = JUtil.javaObjToPyVal(slice.getKeyList())
+                    result = [result, dkeys] 
+                return result
+        return None
+
+class DBSSDB:
+    def __init__(self, pmgr, key):        
+        javaDbId = JavaDatabaseID(key)
+        self._pmgr = pmgr
+        self._dbid = DatabaseID.DatabaseID(javaDbId)
+
+    def keys(self):
+        return [str(x.getCompositeName()) for x in self._pmgr.getAvailableParms(self._dbid.toJavaObj())]
+
+    def __getitem__(self, key):
+        #pid = AFPS.ParmID_string(key + ":" + str(self._dbid))
+        pid = ParmID(key + ":" + str(self._dbid))
+        return DBSSWE(self._pmgr.getParm(pid))
+
+
+class DBSSClient:
+    def __init__(self, dataMgr):
+        self._dataMgr = dataMgr
+        self._pmgr = dataMgr.getParmManager()
+        self._refmgr = dataMgr.getRefManager()
+        self._tmgr = dataMgr.getTopoManager()
+        self.siteIDs = [dataMgr.getSiteID()]
+
+    def keys(self):
+        dbs = self._pmgr.getAvailableDbs()
+        availDbs = []
+        for i in range(dbs.size()):
+            availDbs.append(dbs.get(i))
+        return [str(x) for x in availDbs]
+
+    def __getitem__(self, key):
+        return DBSSDB(self._pmgr, key)
+
+    def editAreaNames(self):
+        result = []
+        avail = self._refmgr.getAvailableSets()
+        size = avail.size()
+        for x in range(size):
+            result.append(avail.get(x).getName())        
+        #return map(lambda x : x.name(), self._refmgr.getAvailableSets())
+        return result
+
+    def getEditArea(self, name):
+        rs = self._refmgr.loadRefSet(ReferenceID(name))
+        if rs.isQuery():
+            return rs.getQuery()
+        return rs.getGrid().getNDArray()
+    
+    def getOpMode(self):
+        return self._dataMgr.getOpMode().name()
+
+    def getTopo(self):
+        return self._tmgr.getCompositeTopo().getNDArray()
diff --git a/cave/com.raytheon.viz.gfe/python/query/Evaluator.py b/cave/com.raytheon.viz.gfe/python/query/Evaluator.py
index 541b640aa1..08e3a348eb 100644
--- a/cave/com.raytheon.viz.gfe/python/query/Evaluator.py
+++ b/cave/com.raytheon.viz.gfe/python/query/Evaluator.py
@@ -1,76 +1,76 @@
-#!/usr/bin/env python
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+#!/usr/bin/env python
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Evaluator.py
-# Class for evaluating GFE expressions
-#
-# Author: hansen
-# ----------------------------------------------------------------------------
-import Query, DBSSClient, numpy
-import AbsTime
-
-class Evaluator:
-    def __init__(self, dataMgr):
-        self.__gloc = dataMgr.getParmManager().compositeGridLocation()
-        self._query = Query.Query(DBSSClient.DBSSClient(dataMgr))
-        self.__dm = dataMgr
-        d = self.__gloc.gridSize()
-        self._shape = (d.y, d.x)
-
-    def gloc(self):
-        return self.__gloc
-
-    def evaluate(self, expression, timeInfluence=None):
-        # if no expression, return the empty ref set
-        if len(expression) == 0:
-            return self.__dm.getRefManager().emptyRefSet()
-
-        if timeInfluence is None:
-            timeInfluence = self.__dm.getSpatialDisplayManager().getSpatialEditorTime()
-        
-        if not isinstance(timeInfluence, AbsTime.AbsTime):
-            timeInfluence = AbsTime.AbsTime(timeInfluence)
-        
-        self._query.setTime(timeInfluence.unixTime())
-        grid = self._query.eval(expression)
-        if type(grid) != type(numpy.array([])) or grid.shape != self._shape:
-            raise TypeError("query did not eval to a grid of shape: "
-                            + `self._shape`)
-
-        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID
-        from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit        
-
-        if grid.dtype == 'bool':
-            grid = numpy.array(grid, 'byte') 
-        bits = Grid2DBit.createBitGrid(grid.shape[1], grid.shape[0], grid)  
-        return ReferenceData(self.gloc(),
-                             ReferenceID(expression), 
-                             expression, bits)
-    
-    def willRecurse(self, name, str):
-        return self._query.willRecurse(name, str)                                            
-
-    def cleanUp(self):
-        pass
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# Evaluator.py
+# Class for evaluating GFE expressions
+#
+# Author: hansen
+# ----------------------------------------------------------------------------
+import Query, DBSSClient, numpy
+import AbsTime
+
+class Evaluator:
+    def __init__(self, dataMgr):
+        self.__gloc = dataMgr.getParmManager().compositeGridLocation()
+        self._query = Query.Query(DBSSClient.DBSSClient(dataMgr))
+        self.__dm = dataMgr
+        d = self.__gloc.gridSize()
+        self._shape = (d.y, d.x)
+
+    def gloc(self):
+        return self.__gloc
+
+    def evaluate(self, expression, timeInfluence=None):
+        # if no expression, return the empty ref set
+        if len(expression) == 0:
+            return self.__dm.getRefManager().emptyRefSet()
+
+        if timeInfluence is None:
+            timeInfluence = self.__dm.getSpatialDisplayManager().getSpatialEditorTime()
+        
+        if not isinstance(timeInfluence, AbsTime.AbsTime):
+            timeInfluence = AbsTime.AbsTime(timeInfluence)
+        
+        self._query.setTime(timeInfluence.unixTime())
+        grid = self._query.eval(expression)
+        if type(grid) != type(numpy.array([])) or grid.shape != self._shape:
+            raise TypeError("query did not eval to a grid of shape: "
+                            + repr(self._shape))
+
+        from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID
+        from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit        
+
+        if grid.dtype == 'bool':
+            grid = numpy.array(grid, 'byte') 
+        bits = Grid2DBit.createBitGrid(grid.shape[1], grid.shape[0], grid)  
+        return ReferenceData(self.gloc(),
+                             ReferenceID(expression), 
+                             expression, bits)
+    
+    def willRecurse(self, name, str):
+        return self._query.willRecurse(name, str)                                            
+
+    def cleanUp(self):
+        pass
diff --git a/cave/com.raytheon.viz.gfe/python/query/Query.py b/cave/com.raytheon.viz.gfe/python/query/Query.py
index a0f06c338d..5a4b42adb3 100644
--- a/cave/com.raytheon.viz.gfe/python/query/Query.py
+++ b/cave/com.raytheon.viz.gfe/python/query/Query.py
@@ -1,235 +1,232 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-##
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    Aug 12, 2013    2162          dgilling       Add performance logging for
-#                                                 query evaluation.
-########################################################################
-
-import numpy, copy, string, time, re, sys
-
-from com.raytheon.uf.common.status import PerformanceStatus
-from com.raytheon.uf.common.time.util import TimeUtil
-
-
-PERF_LOG = PerformanceStatus.getHandler("GFE:");
-
-
-class Query:
-    def mask(self, wx, query, isreg=0):
-        # Returns a numeric mask i.e. a grid of 0's and 1's
-        #  where the value is 1 if the given query succeeds
-        # Arguments:
-        #  wx -- a 2-tuple:
-        #    wxValues : numerical grid of byte values
-        #    keys : list of "ugly strings" where the index of
-        #      the ugly string corresponds to the byte value in
-        #      the wxValues grid.
-        #  query -- a text string representing a query
-        #  isreg -- if 1, the query is treated as a regular expression
-        #           otherwise as a literal string
-        # Examples:
-        #  # Here we want to treat the query as a regular expression
-        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)
-        #  # Here we want to treat the query as a literal
-        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)
-        #
-        rv = numpy.zeros(wx[0].shape, dtype=bool)
-        if not isreg:
-            for i in xrange(len(wx[1])):
-                #if fnmatch.fnmatchcase(wx[1][i], query):
-                if string.find(wx[1][i],query) >=0:
-                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
-        else:
-            r = re.compile(query)
-            for i in xrange(len(wx[1])):
-                m = r.match(wx[1][i])
-                if m is not None:
-                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
-        return rv
-
-    # Helper For wxcontains checks a subkey field
-    def _cf(self, f, lst):
-        return len(lst) == 0 or f in lst
-
-    def wxcontains(self, wx, cov=[], type=[], inten=[], vis=[], att=[]):
-        rv = numpy.zeros(wx[0].shape, dtype=bool)
-        key = wx[1]
-        for index in xrange(len(key)):
-            skeys = string.split(key[index], '^')
-            for sk in skeys:
-                c, t, i, v, a = string.split(sk, ":")
-                if self._cf(c, cov) and self._cf(t, type) \
-                   and self._cf(i, inten) and self._cf(v, vis) \
-                   and self._cf(a, att):
-                    rv = numpy.logical_or(rv, numpy.equal(wx[0], index))
-                    break
-        return rv
-
-    def contains(self, wx, keys, delim='^'):
-        rv = numpy.zeros(wx[0].shape, dtype=bool)
-        for i in xrange(len(wx[1])):
-            for k in string.split(wx[1][i], delim):
-                if k in keys:
-                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
-                    break
-        return rv
-
-    def __init__(self, client):
-        self._client = client
-        opmode = client.getOpMode()
-        if opmode == "PRACTICE":
-            fcst = filter(lambda x: string.find(x, "_Prac_Fcst_") != -1,
-                      self._client.keys())
-        elif opmode == "TEST":
-            fcst = filter(lambda x: string.find(x, "_Test_Fcst_") != -1,
-                      self._client.keys())
-        else:
-            fcst = filter(lambda x: string.find(x, "__Fcst_") != -1,
-                      self._client.keys())
-        if len(fcst) == 0:
-            self._fcst = {}
-        else:
-            self._fcst = self._client[fcst[0]]
-        self._time = time.time()
-
-    def eval(self, queryStr):
-        timer = TimeUtil.getTimer()
-        timer.start()
-        co, glob, loc = self.getEval(queryStr)
-        area = eval(co, glob, loc)
-        timer.stop()
-        PERF_LOG.logDuration("Executing edit area query [" + queryStr + "]", timer.getElapsedTime())
-        return area
-
-    def getTime(self):
-        return self._time
-
-    def setTime(self, time):
-        self._time = time
-
-    def getCode(self, str):
-        co = compile(str, "", "eval")
-        return co, co.co_names
-
-    def getGrid(self, we):
-        times = we.keys()
-        times = filter(lambda x,t=self._time : t >= x[0] and t < x[1], times)
-        try:
-            return we[times[0]]
-        except IndexError:
-            raise IndexError("NO GRID FOR: " + we.parmName + " at time: "
-                             + time.ctime(self.getTime()))
-
-    def getParm(self, name):
-        # Attempt to find a parm in all of the databases.
-        sid = self._client.siteIDs[0]
-        dbs = self._client.keys()
-        dbs.sort()
-        dbs.reverse()
-        for db in dbs:
-            for p in self._client[db].keys():
-                # exact match
-                ename = p + "_" + db
-                if ename == name:
-                    return self._client[db][p]
-                # SITEID_GRID_ omitted
-                gname = p + '_' + re.sub(sid + "_GRID_", "", db)
-                if gname == name:
-                    return self._client[db][p]
-                # Time specifier omited (use most recent
-                elif re.sub(r"_\d{8}_\d{4}\Z", "", ename) == name:
-                    return self._client[db][p]
-                # Both GRID and time omited
-                elif re.sub(r"_\d{8}_\d{4}\Z", "", gname) == name:
-                    return self._client[db][p]
-        return None
-
-
-    def getLocals(self, names):
-        rval = {}
-        rval['mask'] = self.mask
-        rval['wxcontains'] = self.wxcontains
-        rval['contains'] = self.contains
-        fcstParms = self._fcst.keys()
-        editAreas = self._client.editAreaNames()
-        timer = TimeUtil.getTimer()
-        for name in names:
-            timer.reset()
-            timer.start()
-            if name in fcstParms:
-                rval[name] = self.getGrid(self._fcst[name])
-                timer.stop()
-                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "]", timer.getElapsedTime())
-            elif name + "_SFC" in fcstParms:
-                rval[name] = self.getGrid(self._fcst[name + "_SFC"])
-                timer.stop()
-                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "_SFC]", timer.getElapsedTime())
-            elif name in editAreas:
-                ea = self._client.getEditArea(name)
-                if type(ea) == type(""):
-                    ea = self.eval(ea)
-                rval[name] = ea
-                timer.stop()
-                PERF_LOG.logDuration("Retrieving edit area [" + name + "]", timer.getElapsedTime())
-            elif string.lower(name) == 'topo':
-                rval[name] = self._client.getTopo()
-                timer.stop()
-                PERF_LOG.logDuration("Retrieving topo grid", timer.getElapsedTime())
-            else:
-                tmp = self.getParm(name)
-                if tmp is not None:
-                    rval[name] = self.getGrid(tmp)
-                timer.stop()
-                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "]", timer.getElapsedTime())
-        return rval
-
-    def willRecurse(self, name, str):
-        co, names = self.getCode(str)
-        editAreas = filter(lambda x,y=names: x in y,
-                           self._client.editAreaNames())
-        if name in editAreas:
-            return 1
-        return 0
-
-    def getEval(self, queryStr):
-        timer = TimeUtil.getTimer()
-        timer.start()
-        co, names = self.getCode(queryStr)
-        timer.stop()
-        PERF_LOG.logDuration("Compiling edit area query [" + queryStr + "]", timer.getElapsedTime())
-        
-        timer.reset()
-        timer.start()
-        loc = self.getLocals(names)
-        timer.stop()
-        PERF_LOG.logDuration("Retrieving local variables for edit area query [" + queryStr + "]", timer.getElapsedTime())
-        
-        timer.reset()
-        timer.start()
-        glob = copy.copy(getattr(numpy, '__dict__'))
-        timer.stop()
-        PERF_LOG.logDuration("Creating global variables for edit area query [" + queryStr + "]", timer.getElapsedTime())
-        
-        return co, glob, loc
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+##
+#
+#     SOFTWARE HISTORY
+#
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    Aug 12, 2013    2162          dgilling       Add performance logging for
+#                                                 query evaluation.
+########################################################################
+
+import numpy, copy, string, time, re, sys
+
+from com.raytheon.uf.common.status import PerformanceStatus
+from com.raytheon.uf.common.time.util import TimeUtil
+
+
+PERF_LOG = PerformanceStatus.getHandler("GFE:");
+
+
+class Query:
+    def mask(self, wx, query, isreg=0):
+        # Returns a numeric mask i.e. a grid of 0's and 1's
+        #  where the value is 1 if the given query succeeds
+        # Arguments:
+        #  wx -- a 2-tuple:
+        #    wxValues : numerical grid of byte values
+        #    keys : list of "ugly strings" where the index of
+        #      the ugly string corresponds to the byte value in
+        #      the wxValues grid.
+        #  query -- a text string representing a query
+        #  isreg -- if 1, the query is treated as a regular expression
+        #           otherwise as a literal string
+        # Examples:
+        #  # Here we want to treat the query as a regular expression
+        #  PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP)
+        #  # Here we want to treat the query as a literal
+        #  PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP)
+        #
+        rv = numpy.zeros(wx[0].shape, dtype=bool)
+        if not isreg:
+            for i in range(len(wx[1])):
+                #if fnmatch.fnmatchcase(wx[1][i], query):
+                if string.find(wx[1][i],query) >=0:
+                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
+        else:
+            r = re.compile(query)
+            for i in range(len(wx[1])):
+                m = r.match(wx[1][i])
+                if m is not None:
+                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
+        return rv
+
+    # Helper For wxcontains checks a subkey field
+    def _cf(self, f, lst):
+        return len(lst) == 0 or f in lst
+
+    def wxcontains(self, wx, cov=[], type=[], inten=[], vis=[], att=[]):
+        rv = numpy.zeros(wx[0].shape, dtype=bool)
+        key = wx[1]
+        for index in range(len(key)):
+            skeys = string.split(key[index], '^')
+            for sk in skeys:
+                c, t, i, v, a = string.split(sk, ":")
+                if self._cf(c, cov) and self._cf(t, type) \
+                   and self._cf(i, inten) and self._cf(v, vis) \
+                   and self._cf(a, att):
+                    rv = numpy.logical_or(rv, numpy.equal(wx[0], index))
+                    break
+        return rv
+
+    def contains(self, wx, keys, delim='^'):
+        rv = numpy.zeros(wx[0].shape, dtype=bool)
+        for i in range(len(wx[1])):
+            for k in string.split(wx[1][i], delim):
+                if k in keys:
+                    rv = numpy.logical_or(rv, numpy.equal(wx[0], i))
+                    break
+        return rv
+
+    def __init__(self, client):
+        self._client = client
+        opmode = client.getOpMode()
+        if opmode == "PRACTICE":
+            fcst = [x for x in list(self._client.keys()) if string.find(x, "_Prac_Fcst_") != -1]
+        elif opmode == "TEST":
+            fcst = [x for x in list(self._client.keys()) if string.find(x, "_Test_Fcst_") != -1]
+        else:
+            fcst = [x for x in list(self._client.keys()) if string.find(x, "__Fcst_") != -1]
+        if len(fcst) == 0:
+            self._fcst = {}
+        else:
+            self._fcst = self._client[fcst[0]]
+        self._time = time.time()
+
+    def eval(self, queryStr):
+        timer = TimeUtil.getTimer()
+        timer.start()
+        co, glob, loc = self.getEval(queryStr)
+        area = eval(co, glob, loc)
+        timer.stop()
+        PERF_LOG.logDuration("Executing edit area query [" + queryStr + "]", timer.getElapsedTime())
+        return area
+
+    def getTime(self):
+        return self._time
+
+    def setTime(self, time):
+        self._time = time
+
+    def getCode(self, str):
+        co = compile(str, "", "eval")
+        return co, co.co_names
+
+    def getGrid(self, we):
+        times = list(we.keys())
+        times = list(filter(lambda x,t=self._time : t >= x[0] and t < x[1], times))
+        try:
+            return we[times[0]]
+        except IndexError:
+            raise IndexError("NO GRID FOR: " + we.parmName + " at time: "
+                             + time.ctime(self.getTime()))
+
+    def getParm(self, name):
+        # Attempt to find a parm in all of the databases.
+        sid = self._client.siteIDs[0]
+        dbs = list(self._client.keys())
+        dbs.sort()
+        dbs.reverse()
+        for db in dbs:
+            for p in list(self._client[db].keys()):
+                # exact match
+                ename = p + "_" + db
+                if ename == name:
+                    return self._client[db][p]
+                # SITEID_GRID_ omitted
+                gname = p + '_' + re.sub(sid + "_GRID_", "", db)
+                if gname == name:
+                    return self._client[db][p]
+                # Time specifier omited (use most recent
+                elif re.sub(r"_\d{8}_\d{4}\Z", "", ename) == name:
+                    return self._client[db][p]
+                # Both GRID and time omited
+                elif re.sub(r"_\d{8}_\d{4}\Z", "", gname) == name:
+                    return self._client[db][p]
+        return None
+
+
+    def getLocals(self, names):
+        rval = {}
+        rval['mask'] = self.mask
+        rval['wxcontains'] = self.wxcontains
+        rval['contains'] = self.contains
+        fcstParms = list(self._fcst.keys())
+        editAreas = self._client.editAreaNames()
+        timer = TimeUtil.getTimer()
+        for name in names:
+            timer.reset()
+            timer.start()
+            if name in fcstParms:
+                rval[name] = self.getGrid(self._fcst[name])
+                timer.stop()
+                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "]", timer.getElapsedTime())
+            elif name + "_SFC" in fcstParms:
+                rval[name] = self.getGrid(self._fcst[name + "_SFC"])
+                timer.stop()
+                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "_SFC]", timer.getElapsedTime())
+            elif name in editAreas:
+                ea = self._client.getEditArea(name)
+                if type(ea) == type(""):
+                    ea = self.eval(ea)
+                rval[name] = ea
+                timer.stop()
+                PERF_LOG.logDuration("Retrieving edit area [" + name + "]", timer.getElapsedTime())
+            elif string.lower(name) == 'topo':
+                rval[name] = self._client.getTopo()
+                timer.stop()
+                PERF_LOG.logDuration("Retrieving topo grid", timer.getElapsedTime())
+            else:
+                tmp = self.getParm(name)
+                if tmp is not None:
+                    rval[name] = self.getGrid(tmp)
+                timer.stop()
+                PERF_LOG.logDuration("Retrieving grid for Parm [" + name + "]", timer.getElapsedTime())
+        return rval
+
+    def willRecurse(self, name, str):
+        co, names = self.getCode(str)
+        editAreas = list(filter(lambda x,y=names: x in y,
+                           self._client.editAreaNames()))
+        if name in editAreas:
+            return 1
+        return 0
+
+    def getEval(self, queryStr):
+        timer = TimeUtil.getTimer()
+        timer.start()
+        co, names = self.getCode(queryStr)
+        timer.stop()
+        PERF_LOG.logDuration("Compiling edit area query [" + queryStr + "]", timer.getElapsedTime())
+        
+        timer.reset()
+        timer.start()
+        loc = self.getLocals(names)
+        timer.stop()
+        PERF_LOG.logDuration("Retrieving local variables for edit area query [" + queryStr + "]", timer.getElapsedTime())
+        
+        timer.reset()
+        timer.start()
+        glob = copy.copy(getattr(numpy, '__dict__'))
+        timer.stop()
+        PERF_LOG.logDuration("Creating global variables for edit area query [" + queryStr + "]", timer.getElapsedTime())
+        
+        return co, glob, loc
diff --git a/cave/com.raytheon.viz.gfe/python/testFormatters/RecreationFcst.py b/cave/com.raytheon.viz.gfe/python/testFormatters/RecreationFcst.py
index 6536fc3d71..609fc3af1d 100644
--- a/cave/com.raytheon.viz.gfe/python/testFormatters/RecreationFcst.py
+++ b/cave/com.raytheon.viz.gfe/python/testFormatters/RecreationFcst.py
@@ -1,568 +1,568 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-########################################################################
-# RecreationFcst
-#
-# This product creates a combination of text phrases for consecutive
-#   time periods for a list of edit areas.
-#
-#   Type: smart
-#   Local product:
-#      RecreationFcst_Local.py (type: smart)
-#   Associated Utility files:
-#     Combinations
-#   To customize this product for your site:
-#      Set up the Combinations file with Edit Areas and labels.
-#      Set up RecreationFcst_Local to override variables, definitions, thresholds, and methods
-#
-#   Component Product Definitions included as methods in this file:
-#      RecreationPhrases 
-#      Extended 
-#      Extended Label
-##
-##########################################################################
-#  Example Output:
-##    Recreation Statement
-
-##    Area 1
-
-##    .TODAY...
-##    DEW POINTS...       IN THE UPPER TEENS.
-##    Minimum humidity... 18.0 PERCENT.
-##    WIND CHILL...       35.0 BECOMING 24.0 IN THE NIGHT..
-##    WIND...             WEST WINDS 25 TO 35 MPH.
-##    PRECIPITATION...    DRY.
-##    LIGHTENING...       2.
-
-##    .WEDNESDAY...
-##    DEW POINTS...       IN THE MID TEENS.
-##    Minimum humidity... 20.0 PERCENT.
-##    WIND CHILL...       36.0 BECOMING 36.0 IN THE NIGHT..
-##    WIND...             WEST WINDS 25 TO 35 MPH.
-##    PRECIPITATION...    WIDESPREAD RAIN AND SNOW.
-##    LIGHTENING...       2.
-
-
-##    .THURSDAY...
-##    VERY WINDY. SUNNY. WIDESPREAD SNOW. LOWS IN THE UPPER 30S. HIGHS IN THE MID
-##    40S.
-##    .FRIDAY...
-##    SUNNY AND DRY. LOWS AROUND 40. HIGHS IN THE LOWER 40S.
-
-##    Area 2
-
-##    .TODAY...
-##    DEW POINTS...       IN THE UPPER TEENS.
-##    Minimum humidity... 18.0 PERCENT.
-##    WIND CHILL...       35.0 BECOMING 24.0 IN THE NIGHT..
-##    WIND...             WEST WINDS 25 TO 35 MPH.
-##    PRECIPITATION...    DRY.
-##    LIGHTENING...       2.
-
-##    .WEDNESDAY...
-##    DEW POINTS...       IN THE MID TEENS.
-##    Minimum humidity... 20.0 PERCENT.
-##    WIND CHILL...       36.0 BECOMING 36.0 IN THE NIGHT..
-##    WIND...             WEST WINDS 25 TO 35 MPH.
-##    PRECIPITATION...    WIDESPREAD RAIN AND SNOW.
-##    LIGHTENING...       2.
-
-
-##    .THURSDAY...
-##    VERY WINDY. SUNNY. WIDESPREAD SNOW. LOWS IN THE UPPER 30S. HIGHS IN THE MID
-##    40S.
-##    .FRIDAY...
-##    SUNNY AND DRY. LOWS AROUND 40. HIGHS IN THE LOWER 40S.
-
-import TextRules
-import SampleAnalysis
-import ForecastNarrative
-import time, string, types
-
-
-class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
-    VariableList = [
-         (("Product Title","title"), "Recreation Statement", "alphaNumeric"),
-         (("Choose Starting Time Range:", "timeRangeName"), "Tomorrow", "radio",
-             ["Today", "Tomorrow"]),
-         (("Number of days:", "numPeriods"), 2, "radio", [2, 3]),
-         # Comment out the following line if you do not want to include Extended as an option:
-         (("Extended", "extended"), "With Extended", "radio", ["Without Extended","With Extended"]),
-         ]
-    Definition =  {
-        "type": "smart",
-        "displayName": "None",
-        # Name of map background for creating Combinations
-        #"mapNameForCombinations": "Zones_", 
-        
-        ## Edit Areas
-        "defaultEditAreas" : [
-              ("area1","Area 1"),
-              ("area2","Area 2"),
-              ("area3","Area 3"),
-              ],      
-
-        # Product-specific variables: Can be overridden in the Local file
-        "extendedLabel": 1,
-        "lineLimit": 45,
-        "trace":0,
-        }
-    
-    def __init__(self):
-        TextRules.TextRules.__init__(self)
-        SampleAnalysis.SampleAnalysis.__init__(self)
-
-    def generateForecast(self, argDict):
-        # Generate Text Phrases for a list of edit areas
-
-        # Get variables
-        error = self._getVariables(argDict)
-        if error is not None:
-            return error
-
-        # Get the areaList -- derived from defaultEditAreas and
-        # may be solicited at run-time from user if desired
-        self._areaList = self.getAreaList(argDict)
-        if len(self._areaList) == 0:
-            return "WARNING -- No Edit Areas Specified to Generate Product."
-
-        # Determine time ranges
-        error = self._determineTimeRanges(argDict)
-        if error is not None:
-            return error
-
-        # Sample the data
-        error = self._sampleData(argDict)
-        if error is not None:
-            return error
-
-        # Initialize the output string
-        fcst = ""
-        fcst = self._preProcessProduct(fcst, argDict)
-
-        # Generate the product for each edit area in the list
-        for editArea, areaLabel in self._areaList:
-            fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict)
-            fcst  = self._makeProduct(fcst, editArea, areaLabel, argDict)
-            fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
-
-        fcst = self._postProcessProduct(fcst, argDict)
-        return fcst
-
-    def _getVariables(self, argDict):
-        # Make argDict accessible
-        self.__argDict = argDict
-
-        # Get Definition variables
-        self._definition = argDict["forecastDef"]
-        for key in self._definition.keys():
-            exec "self._" + key + "= self._definition[key]"
-
-        # Get VariableList and _issuance_list variables
-        varDict = argDict["varDict"]
-        for key in varDict.keys():
-            if type(key) is types.TupleType:
-                label, variable = key
-                exec "self._" + variable + "= varDict[key]"
-                            
-        self._language = argDict["language"]
-        return None
-
-    def _determineTimeRanges(self, argDict):
-        # Set up the Narrative Definition and initial Time Range
-        self._timeRange, self._narrativeDef = self._createNarrativeDef(argDict)
-        self._definition["narrativeDef"] = self._narrativeDef
-        self._definition["methodList"] = [self.assembleChildWords]
-        return None
-
-    def _sampleData(self, argDict):
-        # Sample and analyze the data for the narrative
-        # This data will be available in argDict["narrativeData"] for text rules
-        self._narrativeProcessor = ForecastNarrative.ForecastNarrative()
-        error = self._narrativeProcessor.getNarrativeData(
-            argDict, self._definition, self._timeRange, self._areaList, None)
-        if error is not None:
-            return error
-        return None
-
-    def _preProcessProduct(self, fcst, argDict):
-        return fcst
-
-    def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
-        return fcst
-
-    def _makeProduct(self, fcst, editArea, areaLabel, argDict):
-        # Generate Narrative Forecast for Edit Area
-        fcst = fcst + self._narrativeProcessor.generateForecast(
-            argDict, editArea, areaLabel)
-        return fcst
-
-    def _postProcessArea(self, fcst, editArea, areaLabel, argDict):
-        return fcst
-
-    def _postProcessProduct(self, fcst, argDict):
-        return string.upper(fcst)
-
-    ########################################################################
-    # PRODUCT-SPECIFIC METHODS
-    ########################################################################
-
-    def _createNarrativeDef(self, argDict):
-        # Determine the start time for the product and a Narrative Definition
-
-        timeRange = self.getTimeRange(self._timeRangeName, argDict)
-
-        if self._numPeriods == 2:
-            recPhrases = [24, 24]
-        else:
-            recPhrases = [24,24,24]
-        extendeds = [24, 24]
-
-        # Create the NarrativeDef
-        narrativeDef = []
-        for recPhrase in recPhrases:
-            narrativeDef.append(("RecreationPhrases",recPhrase))
-        if self._extended == "With Extended":
-            if self._extendedLabel == 1:
-                narrativeDef.append(("ExtendedLabel",0))
-            for extended in extendeds:
-                narrativeDef.append(("Extended", extended))
-        return timeRange, narrativeDef
-
-    def _td_phrase(self):
-        return {
-            "setUpMethod": self._td_setUp,
-            "wordMethod": self._td_words,
-            "phraseMethods": [self.assembleSubPhrases,
-                              self.postProcessPhrase],
-            }
-    def _td_setUp(self, tree, node):
-        td = self.ElementInfo("Td", "List")
-        elementInfoList = [td]
-        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
-        node.set("descriptor", "")
-        node.set("indentLabel", "DEW POINTS...       ")
-        return self.DONE()
-
-    def _td_words(self, tree, node):
-        statDict = node.getStatDict()
-        stats = self.getStats(statDict,"Td")
-        if stats is None:
-           return self.setWords(node, "")
-        words = self.getTempPhrase(tree, node, stats,"Td")
-        return self.setWords(node, words)
-
-    def _rh_phrase(self):
-        return {
-            "setUpMethod": self._rh_setUp,
-            "wordMethod": self._rh_words,
-            "phraseMethods": [self.assembleSubPhrases,
-                              self.postProcessPhrase],
-            }
-    def _rh_setUp(self, tree, node):
-        rh = self.ElementInfo("RH", "List")
-        elementInfoList = [rh]
-        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
-        node.set("descriptor", "")
-        node.set("indentLabel", "Minimum humidity... ")
-        return self.DONE()
-
-    def _rh_words(self, tree, node):
-        statDict = node.getStatDict()
-        stats = self.getStats(statDict,"RH")
-        if stats is None:
-           return self.setWords(node, "")
-        min, max = stats
-        ten = int(min / 10) * 10
-        digit = min % 10
-        if digit <= 3:
-           RH1 = ten
-        if digit > 3 or digit <= 9:
-           RH1 = ten + 5
-        RH2 = RH1 + 10
-        words = `RH1` + " to " + `RH2` + " percent"
-        return self.setWords(node, words)
-
-    def _windChill_heatIndex_compoundPhrase(self):
-        return {
-            "phraseList": [
-                self.windChill_phrase,
-                self.heatIndex_phrase,
-                ],
-            "phraseMethods": [
-                self.assembleSentences,
-                self._windChill_heatIndex_finishUp,
-                ],
-            }
-    def _windChill_heatIndex_finishUp(self, tree, node):
-        words = node.get("words")
-        if words is None:
-            return
-        if words == "":
-            words = "not a factor"
-        node.set("descriptor", "")
-        statsWC = tree.stats.get("WindChill", node.getTimeRange(),
-                                 node.getAreaLabel(), mergeMethod="Min")
-        if statsWC is not None and \
-            statsWC < self.windChill_threshold(tree, node):
-            node.set("indentLabel", "WIND CHILL...       ")
-        else:
-           statsHI = tree.stats.get("HeatIndex", node.getTimeRange(),
-                                    node.getAreaLabel(), mergeMethod="Max")
-           if statsHI is not None and \
-              statsHI > self.heatIndex_threshold(tree, node):
-              node.set("indentLabel", "Heat index...       ")
-           else:
-              node.set("indentLabel", "")
-              words = ""
-        node.set("compound", 1)
-        return self.setWords(node, words)
- 
-    def _wind_phrase(self):
-        return {
-            "setUpMethod": self._wind_setUp,
-            "wordMethod": self._wind_words,
-            "phraseMethods": [self.assembleSubPhrases,
-                              self.postProcessPhrase],
-            }
-    def _wind_setUp(self, tree, node):
-        wind = self.ElementInfo("Wind", "List")
-        elementInfoList = [wind]
-        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
-        node.set("descriptor", "")
-        node.set("indentLabel", "WIND...             ")
-        return self.DONE()
-
-    def _wind_words(self, tree, node):
-        statDict = node.getStatDict()
-        stats = self.getStats(statDict,"Wind")
-        if stats is None:
-           return self.setWords(node, "")
-        elementInfo = node.getAncestor("firstElement")
-        if elementInfo is None:
-            return self.setWords(node, "")
-        words = self.simple_vector_phrase(tree, node, elementInfo)
-        if words == "null":
-            return self.setWords(node, "null")
-        maxWind, dir = self.getValue(stats, "Max", self.VECTOR())
-        chopphrase = ""
-        if maxWind >= 26.1:
-           chopphrase = "Heavy chop expected on area rivers and lakes"
-        elif maxWind >= 21.7:
-           chopphrase = "Moderate chop expected on area rivers and lakes"
-        elif maxWind >= 17.4:
-           chopphrase = "Light chop expected on area rivers and lakes"
-        if chopphrase != "":
-           words = words + ".  " + chopphrase 
-        return self.setWords(node, words)
-
-    def _wx_phrase(self):
-        return {
-            "setUpMethod": self._wx_setUp,
-            "wordMethod": self._wx_words,
-            "phraseMethods": [self.assembleSubPhrases,
-                              self.postProcessPhrase],
-            }
-    def _wx_setUp(self, tree, node):
-        wx = self.ElementInfo("Wx", "List")
-        qpf = self.ElementInfo("QPF", "MinMax")
-        elementInfoList = [wx]
-        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
-        node.set("descriptor", "")
-        node.set("indentLabel", "PRECIPITATION...    ")
-        return self.DONE()
-
-    def _wx_words(self, tree, node):
-        statDict = node.getStatDict()
-        stats = self.getStats(statDict,"Wx")
-        if stats is None:
-           return self.setWords(node, "")
-        self.weather_words(tree, node)
-        WXwords = node.get("words")
-        statsQ = self.getStats(statDict, "QPF")
-        if statsQ is None:
-           words = WXwords
-        else:
-           QPFrange0 = str(round(statsQ[0],2))
-           QPFrange1 = str(round(statsQ[1],2))
-           #print QPFrange1
-           if ((QPFrange0 == "0.0" and QPFrange1 == "0.0") or (string.find(WXwords, "dry") != -1)):
-              #print "Found dry weather"
-              QPFwords = "\n"
-              words =  WXwords
-           elif (QPFrange0 == "0.0"):
-              QPFwords = "Amounts up to " + QPFrange1 + " of an inch"
-              words = WXwords + ".  " + QPFwords
-           else:
-              QPFwords = "Amounts between " + QPFrange0 + " and " + QPFrange1 + " of an inch"
-              words = WXwords + ".  " + QPFwords
-        return self.setWords(node, words)
-
-    def _ltng_phrase(self):
-        return {
-            "setUpMethod": self._ltng_setUp,
-            "wordMethod": self._ltng_words,
-            "phraseMethods": [self.assembleSubPhrases,
-                              self.postProcessPhrase],
-            }
-    def _ltng_setUp(self, tree, node):
-        wx = self.ElementInfo("Wx", "List")
-        elementInfoList = [wx]
-        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
-        node.set("descriptor", "")
-        node.set("indentLabel", "LIGHTNING...        ")
-        return self.DONE()
-
-    def _ltng_words(self, tree, node):
-        statDict = node.getStatDict()
-        stats = self.getStats(statDict,"Wx")
-        if stats is None:
-           return self.setWords(node, "NONE.")
-        words = ""
-        for subkey, rank in stats:
-           wxType = subkey.wxType()
-           if wxType == "T":
-              cov = subkey.coverage()
-              if cov in ["Num", "Wide", "Ocnl", "Brf", "Frq", "Pds", "Inter", "Lkly", "Def"]:
-                 words = "likely"
-              elif cov in ["Sct", "Chc"] and words not in ["likely"]:
-                 words = "scattered"
-              elif cov in ["Iso", "SChc"] and words not in ["likely", "scattered"]:
-                 words = "isolated"
-              elif words not in ["likely", "scattered", "isolated"]:
-                 words = "possible"
-           elif words not in ["likely", "scattered", "isolated", "possible"]:
-              words = "none"
-        #print words
-        return self.setWords(node, words)
-
-    ########################################################################
-    # OVERRIDING THRESHOLDS AND VARIABLES
-    ########################################################################
-
-    # SampleAnalysis overrides
-    def temporalCoverage_percentage(self, parmHisto, timeRange, componentName):
-        return 15.0
-
-    def temporalCoverage_dict(self, parmHisto, timeRange, componentName):
-        return {
-            "LAL": 0,
-            "MinRH": 0,
-            "MaxRH": 0,
-            "MinT": 10,
-            "MaxT": 10,
-            "Haines": 0,
-            "PoP" : 50,
-            }
-
-    # TextRules overrides
-    def pop_wx_lower_threshold(self, tree, node):
-        # Pop-related Wx will not be reported if Pop is below this threshold
-        return 20
-
-    def phrase_descriptor_dict(self, tree, node):
-        # Descriptors for phrases
-        dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node)
-        dict["DEW POINTS...       "] = "DEW POINTS.........."
-        dict["Minimum humidity... "] = "Minimum humidity...."
-        dict["WIND CHILL...       "] = "WIND CHILL.........."
-        dict["Heat index...       "] = "Heat index.........."
-        dict["WIND...             "] = "WIND................"
-        dict["PRECIPITATION...    "] = "PRECIPITATION......."
-        dict["LIGHTNING...        "] = "LIGHTNING..........."
-        dict["HeatIndex"] = ""
-        dict["WindChill"] = ""
-        return dict
-    
-
-    ########################################################################
-    # OVERRIDING METHODS
-    ########################################################################
-
-    ########################################################################
-    # COMPONENT PRODUCT DEFINITIONS
-    ########################################################################
-
-    def RecreationPhrases(self):
-        return {
-            "type": "component",
-            "methodList": [
-                          self.assembleIndentedPhrases,         
-                          ], 
-            "analysisList": [
-                 ("Td", self.avg),
-                 ("RH", self.minMax),
-                 ("T", self.minMax),
-                 ("WindChill", self.minMax, [12]),
-                 ("HeatIndex", self.minMax, [12]),
-                 ("Wind", self.vectorMinMax),
-                 ("Wx", self.rankedWx),
-                 ("QPF", self.accumMinMax),
-                 ("PoP", self.binnedPercent),
-                 ],
-            "phraseList":[
-                 self._td_phrase,
-                 self._rh_phrase,
-                 self._windChill_heatIndex_compoundPhrase,
-                 self._wind_phrase,
-                 self._wx_phrase,
-                 self._ltng_phrase,
-                 ],
-            }
-
-    def ExtendedLabel(self):
-        return {
-            "type": "component",
-            "methodList": [self.setLabel],
-            "analysisList": [],
-            "phraseList":[],
-            }
-    def setLabel(self, tree, component):
-        component.set("words", "\n.EXTENDED...\n")
-        return self.DONE()
-
-    def Extended(self):
-        return {
-            "type": "component",
-            "methodList": [
-                          self.orderPhrases,
-                          self.assemblePhrases,   
-                          self.wordWrap,          
-                          ], 
-            "analysisList": [
-                       ("MinT", self.avg),
-                       ("MaxT", self.avg),
-                       ("T", self.hourlyTemp),
-                       ("Sky", self.minMax),
-                       ("Wind", self.vectorMinMax),
-                       ("Wx", self.rankedWx),
-                       ("PoP", self.binnedPercent),
-                      ],
-            "phraseList":[
-                   self.reportTrends,
-                   self.wind_summary,
-                   self.sky_phrase,
-                   self.weather_phrase,
-                   self.lows_phrase,
-                   self.highs_phrase,
-                   self.temp_trends,
-                 ],
-            }
-
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+########################################################################
+# RecreationFcst
+#
+# This product creates a combination of text phrases for consecutive
+#   time periods for a list of edit areas.
+#
+#   Type: smart
+#   Local product:
+#      RecreationFcst_Local.py (type: smart)
+#   Associated Utility files:
+#     Combinations
+#   To customize this product for your site:
+#      Set up the Combinations file with Edit Areas and labels.
+#      Set up RecreationFcst_Local to override variables, definitions, thresholds, and methods
+#
+#   Component Product Definitions included as methods in this file:
+#      RecreationPhrases 
+#      Extended 
+#      Extended Label
+##
+##########################################################################
+#  Example Output:
+##    Recreation Statement
+
+##    Area 1
+
+##    .TODAY...
+##    DEW POINTS...       IN THE UPPER TEENS.
+##    Minimum humidity... 18.0 PERCENT.
+##    WIND CHILL...       35.0 BECOMING 24.0 IN THE NIGHT..
+##    WIND...             WEST WINDS 25 TO 35 MPH.
+##    PRECIPITATION...    DRY.
+##    LIGHTENING...       2.
+
+##    .WEDNESDAY...
+##    DEW POINTS...       IN THE MID TEENS.
+##    Minimum humidity... 20.0 PERCENT.
+##    WIND CHILL...       36.0 BECOMING 36.0 IN THE NIGHT..
+##    WIND...             WEST WINDS 25 TO 35 MPH.
+##    PRECIPITATION...    WIDESPREAD RAIN AND SNOW.
+##    LIGHTENING...       2.
+
+
+##    .THURSDAY...
+##    VERY WINDY. SUNNY. WIDESPREAD SNOW. LOWS IN THE UPPER 30S. HIGHS IN THE MID
+##    40S.
+##    .FRIDAY...
+##    SUNNY AND DRY. LOWS AROUND 40. HIGHS IN THE LOWER 40S.
+
+##    Area 2
+
+##    .TODAY...
+##    DEW POINTS...       IN THE UPPER TEENS.
+##    Minimum humidity... 18.0 PERCENT.
+##    WIND CHILL...       35.0 BECOMING 24.0 IN THE NIGHT..
+##    WIND...             WEST WINDS 25 TO 35 MPH.
+##    PRECIPITATION...    DRY.
+##    LIGHTENING...       2.
+
+##    .WEDNESDAY...
+##    DEW POINTS...       IN THE MID TEENS.
+##    Minimum humidity... 20.0 PERCENT.
+##    WIND CHILL...       36.0 BECOMING 36.0 IN THE NIGHT..
+##    WIND...             WEST WINDS 25 TO 35 MPH.
+##    PRECIPITATION...    WIDESPREAD RAIN AND SNOW.
+##    LIGHTENING...       2.
+
+
+##    .THURSDAY...
+##    VERY WINDY. SUNNY. WIDESPREAD SNOW. LOWS IN THE UPPER 30S. HIGHS IN THE MID
+##    40S.
+##    .FRIDAY...
+##    SUNNY AND DRY. LOWS AROUND 40. HIGHS IN THE LOWER 40S.
+
+import TextRules
+import SampleAnalysis
+import ForecastNarrative
+import time, string, types
+
+
+class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
+    VariableList = [
+         (("Product Title","title"), "Recreation Statement", "alphaNumeric"),
+         (("Choose Starting Time Range:", "timeRangeName"), "Tomorrow", "radio",
+             ["Today", "Tomorrow"]),
+         (("Number of days:", "numPeriods"), 2, "radio", [2, 3]),
+         # Comment out the following line if you do not want to include Extended as an option:
+         (("Extended", "extended"), "With Extended", "radio", ["Without Extended","With Extended"]),
+         ]
+    Definition =  {
+        "type": "smart",
+        "displayName": "None",
+        # Name of map background for creating Combinations
+        #"mapNameForCombinations": "Zones_", 
+        
+        ## Edit Areas
+        "defaultEditAreas" : [
+              ("area1","Area 1"),
+              ("area2","Area 2"),
+              ("area3","Area 3"),
+              ],      
+
+        # Product-specific variables: Can be overridden in the Local file
+        "extendedLabel": 1,
+        "lineLimit": 45,
+        "trace":0,
+        }
+    
+    def __init__(self):
+        TextRules.TextRules.__init__(self)
+        SampleAnalysis.SampleAnalysis.__init__(self)
+
+    def generateForecast(self, argDict):
+        # Generate Text Phrases for a list of edit areas
+
+        # Get variables
+        error = self._getVariables(argDict)
+        if error is not None:
+            return error
+
+        # Get the areaList -- derived from defaultEditAreas and
+        # may be solicited at run-time from user if desired
+        self._areaList = self.getAreaList(argDict)
+        if len(self._areaList) == 0:
+            return "WARNING -- No Edit Areas Specified to Generate Product."
+
+        # Determine time ranges
+        error = self._determineTimeRanges(argDict)
+        if error is not None:
+            return error
+
+        # Sample the data
+        error = self._sampleData(argDict)
+        if error is not None:
+            return error
+
+        # Initialize the output string
+        fcst = ""
+        fcst = self._preProcessProduct(fcst, argDict)
+
+        # Generate the product for each edit area in the list
+        for editArea, areaLabel in self._areaList:
+            fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict)
+            fcst  = self._makeProduct(fcst, editArea, areaLabel, argDict)
+            fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
+
+        fcst = self._postProcessProduct(fcst, argDict)
+        return fcst
+
+    def _getVariables(self, argDict):
+        # Make argDict accessible
+        self.__argDict = argDict
+
+        # Get Definition variables
+        self._definition = argDict["forecastDef"]
+        for key in list(self._definition.keys()):
+            exec("self._" + key + "= self._definition[key]")
+
+        # Get VariableList and _issuance_list variables
+        varDict = argDict["varDict"]
+        for key in list(varDict.keys()):
+            if type(key) is tuple:
+                label, variable = key
+                exec("self._" + variable + "= varDict[key]")
+                            
+        self._language = argDict["language"]
+        return None
+
+    def _determineTimeRanges(self, argDict):
+        # Set up the Narrative Definition and initial Time Range
+        self._timeRange, self._narrativeDef = self._createNarrativeDef(argDict)
+        self._definition["narrativeDef"] = self._narrativeDef
+        self._definition["methodList"] = [self.assembleChildWords]
+        return None
+
+    def _sampleData(self, argDict):
+        # Sample and analyze the data for the narrative
+        # This data will be available in argDict["narrativeData"] for text rules
+        self._narrativeProcessor = ForecastNarrative.ForecastNarrative()
+        error = self._narrativeProcessor.getNarrativeData(
+            argDict, self._definition, self._timeRange, self._areaList, None)
+        if error is not None:
+            return error
+        return None
+
+    def _preProcessProduct(self, fcst, argDict):
+        return fcst
+
+    def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
+        return fcst
+
+    def _makeProduct(self, fcst, editArea, areaLabel, argDict):
+        # Generate Narrative Forecast for Edit Area
+        fcst = fcst + self._narrativeProcessor.generateForecast(
+            argDict, editArea, areaLabel)
+        return fcst
+
+    def _postProcessArea(self, fcst, editArea, areaLabel, argDict):
+        return fcst
+
+    def _postProcessProduct(self, fcst, argDict):
+        return string.upper(fcst)
+
+    ########################################################################
+    # PRODUCT-SPECIFIC METHODS
+    ########################################################################
+
+    def _createNarrativeDef(self, argDict):
+        # Determine the start time for the product and a Narrative Definition
+
+        timeRange = self.getTimeRange(self._timeRangeName, argDict)
+
+        if self._numPeriods == 2:
+            recPhrases = [24, 24]
+        else:
+            recPhrases = [24,24,24]
+        extendeds = [24, 24]
+
+        # Create the NarrativeDef
+        narrativeDef = []
+        for recPhrase in recPhrases:
+            narrativeDef.append(("RecreationPhrases",recPhrase))
+        if self._extended == "With Extended":
+            if self._extendedLabel == 1:
+                narrativeDef.append(("ExtendedLabel",0))
+            for extended in extendeds:
+                narrativeDef.append(("Extended", extended))
+        return timeRange, narrativeDef
+
+    def _td_phrase(self):
+        return {
+            "setUpMethod": self._td_setUp,
+            "wordMethod": self._td_words,
+            "phraseMethods": [self.assembleSubPhrases,
+                              self.postProcessPhrase],
+            }
+    def _td_setUp(self, tree, node):
+        td = self.ElementInfo("Td", "List")
+        elementInfoList = [td]
+        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
+        node.set("descriptor", "")
+        node.set("indentLabel", "DEW POINTS...       ")
+        return self.DONE()
+
+    def _td_words(self, tree, node):
+        statDict = node.getStatDict()
+        stats = self.getStats(statDict,"Td")
+        if stats is None:
+           return self.setWords(node, "")
+        words = self.getTempPhrase(tree, node, stats,"Td")
+        return self.setWords(node, words)
+
+    def _rh_phrase(self):
+        return {
+            "setUpMethod": self._rh_setUp,
+            "wordMethod": self._rh_words,
+            "phraseMethods": [self.assembleSubPhrases,
+                              self.postProcessPhrase],
+            }
+    def _rh_setUp(self, tree, node):
+        rh = self.ElementInfo("RH", "List")
+        elementInfoList = [rh]
+        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
+        node.set("descriptor", "")
+        node.set("indentLabel", "Minimum humidity... ")
+        return self.DONE()
+
+    def _rh_words(self, tree, node):
+        statDict = node.getStatDict()
+        stats = self.getStats(statDict,"RH")
+        if stats is None:
+           return self.setWords(node, "")
+        min, max = stats
+        ten = int(min / 10) * 10
+        digit = min % 10
+        if digit <= 3:
+           RH1 = ten
+        if digit > 3 or digit <= 9:
+           RH1 = ten + 5
+        RH2 = RH1 + 10
+        words = repr(RH1) + " to " + repr(RH2) + " percent"
+        return self.setWords(node, words)
+
+    def _windChill_heatIndex_compoundPhrase(self):
+        return {
+            "phraseList": [
+                self.windChill_phrase,
+                self.heatIndex_phrase,
+                ],
+            "phraseMethods": [
+                self.assembleSentences,
+                self._windChill_heatIndex_finishUp,
+                ],
+            }
+    def _windChill_heatIndex_finishUp(self, tree, node):
+        words = node.get("words")
+        if words is None:
+            return
+        if words == "":
+            words = "not a factor"
+        node.set("descriptor", "")
+        statsWC = tree.stats.get("WindChill", node.getTimeRange(),
+                                 node.getAreaLabel(), mergeMethod="Min")
+        if statsWC is not None and \
+            statsWC < self.windChill_threshold(tree, node):
+            node.set("indentLabel", "WIND CHILL...       ")
+        else:
+           statsHI = tree.stats.get("HeatIndex", node.getTimeRange(),
+                                    node.getAreaLabel(), mergeMethod="Max")
+           if statsHI is not None and \
+              statsHI > self.heatIndex_threshold(tree, node):
+              node.set("indentLabel", "Heat index...       ")
+           else:
+              node.set("indentLabel", "")
+              words = ""
+        node.set("compound", 1)
+        return self.setWords(node, words)
+ 
+    def _wind_phrase(self):
+        return {
+            "setUpMethod": self._wind_setUp,
+            "wordMethod": self._wind_words,
+            "phraseMethods": [self.assembleSubPhrases,
+                              self.postProcessPhrase],
+            }
+    def _wind_setUp(self, tree, node):
+        wind = self.ElementInfo("Wind", "List")
+        elementInfoList = [wind]
+        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
+        node.set("descriptor", "")
+        node.set("indentLabel", "WIND...             ")
+        return self.DONE()
+
+    def _wind_words(self, tree, node):
+        statDict = node.getStatDict()
+        stats = self.getStats(statDict,"Wind")
+        if stats is None:
+           return self.setWords(node, "")
+        elementInfo = node.getAncestor("firstElement")
+        if elementInfo is None:
+            return self.setWords(node, "")
+        words = self.simple_vector_phrase(tree, node, elementInfo)
+        if words == "null":
+            return self.setWords(node, "null")
+        maxWind, dir = self.getValue(stats, "Max", self.VECTOR())
+        chopphrase = ""
+        if maxWind >= 26.1:
+           chopphrase = "Heavy chop expected on area rivers and lakes"
+        elif maxWind >= 21.7:
+           chopphrase = "Moderate chop expected on area rivers and lakes"
+        elif maxWind >= 17.4:
+           chopphrase = "Light chop expected on area rivers and lakes"
+        if chopphrase != "":
+           words = words + ".  " + chopphrase 
+        return self.setWords(node, words)
+
+    def _wx_phrase(self):
+        return {
+            "setUpMethod": self._wx_setUp,
+            "wordMethod": self._wx_words,
+            "phraseMethods": [self.assembleSubPhrases,
+                              self.postProcessPhrase],
+            }
+    def _wx_setUp(self, tree, node):
+        wx = self.ElementInfo("Wx", "List")
+        qpf = self.ElementInfo("QPF", "MinMax")
+        elementInfoList = [wx]
+        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
+        node.set("descriptor", "")
+        node.set("indentLabel", "PRECIPITATION...    ")
+        return self.DONE()
+
+    def _wx_words(self, tree, node):
+        statDict = node.getStatDict()
+        stats = self.getStats(statDict,"Wx")
+        if stats is None:
+           return self.setWords(node, "")
+        self.weather_words(tree, node)
+        WXwords = node.get("words")
+        statsQ = self.getStats(statDict, "QPF")
+        if statsQ is None:
+           words = WXwords
+        else:
+           QPFrange0 = str(round(statsQ[0],2))
+           QPFrange1 = str(round(statsQ[1],2))
+           #print QPFrange1
+           if ((QPFrange0 == "0.0" and QPFrange1 == "0.0") or (string.find(WXwords, "dry") != -1)):
+              #print "Found dry weather"
+              QPFwords = "\n"
+              words =  WXwords
+           elif (QPFrange0 == "0.0"):
+              QPFwords = "Amounts up to " + QPFrange1 + " of an inch"
+              words = WXwords + ".  " + QPFwords
+           else:
+              QPFwords = "Amounts between " + QPFrange0 + " and " + QPFrange1 + " of an inch"
+              words = WXwords + ".  " + QPFwords
+        return self.setWords(node, words)
+
+    def _ltng_phrase(self):
+        return {
+            "setUpMethod": self._ltng_setUp,
+            "wordMethod": self._ltng_words,
+            "phraseMethods": [self.assembleSubPhrases,
+                              self.postProcessPhrase],
+            }
+    def _ltng_setUp(self, tree, node):
+        wx = self.ElementInfo("Wx", "List")
+        elementInfoList = [wx]
+        self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) 
+        node.set("descriptor", "")
+        node.set("indentLabel", "LIGHTNING...        ")
+        return self.DONE()
+
+    def _ltng_words(self, tree, node):
+        statDict = node.getStatDict()
+        stats = self.getStats(statDict,"Wx")
+        if stats is None:
+           return self.setWords(node, "NONE.")
+        words = ""
+        for subkey, rank in stats:
+           wxType = subkey.wxType()
+           if wxType == "T":
+              cov = subkey.coverage()
+              if cov in ["Num", "Wide", "Ocnl", "Brf", "Frq", "Pds", "Inter", "Lkly", "Def"]:
+                 words = "likely"
+              elif cov in ["Sct", "Chc"] and words not in ["likely"]:
+                 words = "scattered"
+              elif cov in ["Iso", "SChc"] and words not in ["likely", "scattered"]:
+                 words = "isolated"
+              elif words not in ["likely", "scattered", "isolated"]:
+                 words = "possible"
+           elif words not in ["likely", "scattered", "isolated", "possible"]:
+              words = "none"
+        #print words
+        return self.setWords(node, words)
+
+    ########################################################################
+    # OVERRIDING THRESHOLDS AND VARIABLES
+    ########################################################################
+
+    # SampleAnalysis overrides
+    def temporalCoverage_percentage(self, parmHisto, timeRange, componentName):
+        return 15.0
+
+    def temporalCoverage_dict(self, parmHisto, timeRange, componentName):
+        return {
+            "LAL": 0,
+            "MinRH": 0,
+            "MaxRH": 0,
+            "MinT": 10,
+            "MaxT": 10,
+            "Haines": 0,
+            "PoP" : 50,
+            }
+
+    # TextRules overrides
+    def pop_wx_lower_threshold(self, tree, node):
+        # Pop-related Wx will not be reported if Pop is below this threshold
+        return 20
+
+    def phrase_descriptor_dict(self, tree, node):
+        # Descriptors for phrases
+        dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node)
+        dict["DEW POINTS...       "] = "DEW POINTS.........."
+        dict["Minimum humidity... "] = "Minimum humidity...."
+        dict["WIND CHILL...       "] = "WIND CHILL.........."
+        dict["Heat index...       "] = "Heat index.........."
+        dict["WIND...             "] = "WIND................"
+        dict["PRECIPITATION...    "] = "PRECIPITATION......."
+        dict["LIGHTNING...        "] = "LIGHTNING..........."
+        dict["HeatIndex"] = ""
+        dict["WindChill"] = ""
+        return dict
+    
+
+    ########################################################################
+    # OVERRIDING METHODS
+    ########################################################################
+
+    ########################################################################
+    # COMPONENT PRODUCT DEFINITIONS
+    ########################################################################
+
+    def RecreationPhrases(self):
+        return {
+            "type": "component",
+            "methodList": [
+                          self.assembleIndentedPhrases,         
+                          ], 
+            "analysisList": [
+                 ("Td", self.avg),
+                 ("RH", self.minMax),
+                 ("T", self.minMax),
+                 ("WindChill", self.minMax, [12]),
+                 ("HeatIndex", self.minMax, [12]),
+                 ("Wind", self.vectorMinMax),
+                 ("Wx", self.rankedWx),
+                 ("QPF", self.accumMinMax),
+                 ("PoP", self.binnedPercent),
+                 ],
+            "phraseList":[
+                 self._td_phrase,
+                 self._rh_phrase,
+                 self._windChill_heatIndex_compoundPhrase,
+                 self._wind_phrase,
+                 self._wx_phrase,
+                 self._ltng_phrase,
+                 ],
+            }
+
+    def ExtendedLabel(self):
+        return {
+            "type": "component",
+            "methodList": [self.setLabel],
+            "analysisList": [],
+            "phraseList":[],
+            }
+    def setLabel(self, tree, component):
+        component.set("words", "\n.EXTENDED...\n")
+        return self.DONE()
+
+    def Extended(self):
+        return {
+            "type": "component",
+            "methodList": [
+                          self.orderPhrases,
+                          self.assemblePhrases,   
+                          self.wordWrap,          
+                          ], 
+            "analysisList": [
+                       ("MinT", self.avg),
+                       ("MaxT", self.avg),
+                       ("T", self.hourlyTemp),
+                       ("Sky", self.minMax),
+                       ("Wind", self.vectorMinMax),
+                       ("Wx", self.rankedWx),
+                       ("PoP", self.binnedPercent),
+                      ],
+            "phraseList":[
+                   self.reportTrends,
+                   self.wind_summary,
+                   self.sky_phrase,
+                   self.weather_phrase,
+                   self.lows_phrase,
+                   self.highs_phrase,
+                   self.temp_trends,
+                 ],
+            }
+
diff --git a/cave/com.raytheon.viz.gfe/python/testFormatters/SmartElementTable.py b/cave/com.raytheon.viz.gfe/python/testFormatters/SmartElementTable.py
index bef58df5b8..f9e52a7077 100644
--- a/cave/com.raytheon.viz.gfe/python/testFormatters/SmartElementTable.py
+++ b/cave/com.raytheon.viz.gfe/python/testFormatters/SmartElementTable.py
@@ -1,310 +1,310 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#-------------------------------------------------------------------------
-# Description: This product creates a Smart Element Table.
-#  The possible elements are Temperature (MaxT, MinT), Humidity (MinRH, MaxRH), and PoP
-#-------------------------------------------------------------------------
-# Copying:
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#-------------------------------------------------------------------------
-# Standard and Local file names and Locations:
-# MultipleElementTableTable, MultipleElementTable_Local, MultipleElementTable_Aux_Local
-#-------------------------------------------------------------------------
-# User Configurable Variables:
-#-------------------------------------------------------------------------
-# Weather Elements Needed:
-#-------------------------------------------------------------------------
-# Edit Areas Needed:
-#-------------------------------------------------------------------------
-# Associated Utilities Files e.g. Combinations file:
-#-------------------------------------------------------------------------
-# Component Products:
-#-------------------------------------------------------------------------
-# Programmers and Support including product team leader's email:
-#-------------------------------------------------------------------------
-# Development tasks that are identified and in progress:
-#-------------------------------------------------------------------------
-# Additional Information:
-#-------------------------------------------------------------------------
-
-import TextRules
-import SampleAnalysis
-import string, time, types
-
-class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
-    VariableList = [
-             ("Forecast Product" , "Morning", "radio",
-              ["Morning","Afternoon"]),
-            ]
-    Definition =  {
-        "type": "smart",
-        "displayName": "None",
-        "outputFile": "/awips/GFESuite/products/TEXT/SmartElementTable.txt",
-        "defaultEditAreas": [
-            ("area1","AREA 1"),
-            ("area2","AREA 2"),
-            ("area3","AREA 3"),
-            ],
-        # Product-specific variables
-        "regionList" : [
-            ("/33",["AREA 1","AREA 2"]),
-            ("/19",["AREA 3"])
-            ],
-        # Possible elements are:
-        #   "Temp"  -- lists MaxT for daytime, MinT for nighttime
-        #   "PoP"
-        #   "Humidity"  -- lists MinRH for daytime, MaxRH for nighttime
-        "elementList" : ["Temp", "PoP"],
-        # If set to 1, only one value for each element is listed
-        "includeTitle": 1,
-        "introLetters": ".<",
-        }
-
-    def __init__(self):
-        TextRules.TextRules.__init__(self)
-        SampleAnalysis.SampleAnalysis.__init__(self)
-
-    def generateForecast(self, argDict):
-        # Generate formatted product for a list of edit areas
-
-        # Get variables from varDict and Definition
-        self._getVariables(argDict)
-
-        # Get the areaList -- derived from defaultEditAreas and
-        # may be solicited at run-time from the user if desired
-        self._areaList = self.getAreaList(argDict)
-
-        # Determine time ranges for which the data will be sampled
-        self._determineTimeRanges(argDict)
-
-        # Sample the data
-        self._sampleData(argDict)
-
-        # Initialize the output string
-        fcst = ""
-        fcst = self._preProcessProduct(fcst, argDict)
-
-        # Generate the product for each edit area in the list
-        for editArea, areaLabel in self._areaList:
-            fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict)
-            fcst  = self._makeProduct(fcst, editArea, areaLabel, argDict)
-            fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
-
-        fcst = self._postProcessProduct(fcst, argDict)
-        return fcst
-
-    def _getVariables(self, argDict):
-        # Determine whether Morning or Afternoon product type
-        varDict = argDict["varDict"]
-        self._productType = varDict["Forecast Product"]
-
-        # Make argDict accessible
-        self.__argDict = argDict
-        
-        # Set up any other product-specific variables from the Definition
-        self._definition = argDict["forecastDef"]
-        for key in self._definition.keys():
-            exec "self._" + key + "= self._definition[key]"
-
-        self._currentRegion = None
-
-        # The analysisList tells which weather elements and statistics
-        # are desired for the product.
-        self._analysisList = self._getAnalysisList()
- 
-    def _determineTimeRanges(self, argDict):
-        # Determine time ranges for product
-        # Sets up self._timeRangeList
-
-        if self._productType == "Morning":
-            timeRange = self.getTimeRange("Today", argDict)
-            numPeriods = 3
-        else:
-            timeRange = self.getTimeRange("Tonight", argDict)
-            numPeriods = 4
-            
-        self._timeRangeList = self.getPeriods(timeRange, 12, 12, numPeriods)
-        return
-
-    def _sampleData(self, argDict):
-        # Sample the data
-        self._sampler = self.getSampler(argDict, 
-          (self._analysisList, self._timeRangeList, self._areaList))
-        return
-
-    def _preProcessProduct(self, fcst, argDict):
-        # Set up format spacing and title line spacing
-        
-        numElements = len(self._elementList)
-        if numElements > 2:
-            self._spaceStr = ""
-        else:
-            self._spaceStr = "   "
-        if self._includeTitle == 0:
-            return fcst
-            
-        self._titles = self._titleDict()
-        if numElements > 2:
-            if self._productType == "Morning":
-                self._headingLen = 15
-            else:
-                self._headingLen = 19
-        else:
-            if self._productType == "Morning":
-                self._headingLen = 21
-            else:
-                self._headingLen = 28
-                    
-        # Create title line
-        title = self._introLetters + "        "
-        index = 0
-        for element in self._elementList:
-            title = title + string.center(
-                    self._titles[element], self._headingLen)
-            if index < len(self._elementList)-1:
-                title = title + "/"
-            index += 1
-        return fcst + title + "\n"
-
-    def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
-        # If we are in a new region, add region header
-        for region, areaList in self._regionList:
-            if areaLabel in areaList:
-                break
-        if region != self._currentRegion:
-            if self._currentRegion is not None:
-                # End the Region
-                fcst = fcst + "\n$$\n\n"
-            self._currentRegion = region
-            fcst = fcst + region
-
-        return fcst + "\n" + string.ljust(areaLabel, 10)
-
-    def _makeProduct(self, fcst, editArea, areaLabel, argDict):
-        # Get the Statistics
-        statList = self.getStatList(self._sampler, self._analysisList,
-                                     self._timeRangeList, editArea)
-        
-        numElements = len(self._elementList)
-        index = 0
-        for element in self._elementList:
-            exec "fcst = fcst + self._get" + element + \
-                 "Values(statList, argDict)"
-            if index < numElements-1:
-                fcst = fcst + "  /"
-            index += 1
-
-        return fcst
-
-    def _postProcessArea(self, fcst, editArea, areaLabel, argDict):
-        return fcst
-
-    def _postProcessProduct(self, fcst, argDict):
-        fcst = fcst + "\n"
-        return fcst
-
-    ########################################################################
-    # PRODUCT-SPECIFIC METHODS
-    ########################################################################
-
-    def _getAnalysisList(self):
-      return [
-          ("MinT", self.avg),
-          ("MaxT", self.avg),
-          ("MinRH", self.avg),
-          ("MaxRH", self.avg),
-          ("PoP", self.stdDevMaxAvg),
-          ]
-
-    def _titleDict(self):
-        return {
-            "Temp":    "TEMPERATURE",
-            "PoP":     "PRECIPITATION",
-            "Humidity":"HUMIDITY",
-            }
-
-    def _getTempValues(self, statList, argDict):
-       # Return a string of Temperature values given statList
-       stats1 = statList[0]
-       if self._productType == "Morning":
-           stats2 = statList[1]
-           stats3 = statList[2]
-           t1 = self.getScalarVal(stats1["MaxT"])
-           t2 = self.getScalarVal(stats2["MinT"])
-           t3 = self.getScalarVal(stats3["MaxT"])
-           str =  " " + t1+ self._spaceStr +t2+ self._spaceStr +t3
-           return str
-       else:
-           stats2 = statList[1]
-           stats3 = statList[2]
-           stats4 = statList[3]
-           t1 = self.getScalarVal(stats1["MinT"])
-           t2 = self.getScalarVal(stats2["MaxT"])
-           t3 = self.getScalarVal(stats3["MinT"])
-           t4 = self.getScalarVal(stats4["MaxT"])
-           str = " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ \
-                 self._spaceStr+t4
-           return str
- 
-    def _getHumidityValues(self, statList, argDict):
-       # Return a string of Humidity values given statList
-       stats1 = statList[0]
-       if self._productType == "Morning":
-           stats2 = statList[1]
-           stats3 = statList[2]
-           t1 = self.getScalarVal(stats1["MinRH"])
-           t2 = self.getScalarVal(stats2["MaxRH"])
-           t3 = self.getScalarVal(stats3["MinRH"])
-           return " " +t1+ self._spaceStr +t2+ self._spaceStr+t3
-       else:
-           stats2 = statList[1]
-           stats3 = statList[2]
-           stats4 = statList[3]
-           t1 = self.getScalarVal(stats1["MaxRH"])
-           t2 = self.getScalarVal(stats2["MinRH"])
-           t3 = self.getScalarVal(stats3["MaxRH"])
-           t4 = self.getScalarVal(stats4["MinRH"])
-           return " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr +t4
-
-    def _getPoPValues(self, statList, argDict):
-       # Return a string of PoP values in the statList
-       pop = []
-       popStr = ""
-       index = 0
-       for stats in statList:
-           val = self._getPoPValue(stats)
-           if index < len(statList)-1:
-               popStr = popStr  + val + self._spaceStr
-           else:
-               popStr = popStr + val
-           index += 1
-       popStr = popStr + " "
-       return popStr 
-
-    def _getPoPValue(self, stats):
-       pop = self.getStats(stats,"PoP")
-       if pop is None:
-           val = "    "
-       else:
-           max = self.round(pop, "Nearest", 10)
-           val = self.getScalarVal(max)
-       return val
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+#-------------------------------------------------------------------------
+# Description: This product creates a Smart Element Table.
+#  The possible elements are Temperature (MaxT, MinT), Humidity (MinRH, MaxRH), and PoP
+#-------------------------------------------------------------------------
+# Copying:
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#-------------------------------------------------------------------------
+# Standard and Local file names and Locations:
+# MultipleElementTableTable, MultipleElementTable_Local, MultipleElementTable_Aux_Local
+#-------------------------------------------------------------------------
+# User Configurable Variables:
+#-------------------------------------------------------------------------
+# Weather Elements Needed:
+#-------------------------------------------------------------------------
+# Edit Areas Needed:
+#-------------------------------------------------------------------------
+# Associated Utilities Files e.g. Combinations file:
+#-------------------------------------------------------------------------
+# Component Products:
+#-------------------------------------------------------------------------
+# Programmers and Support including product team leader's email:
+#-------------------------------------------------------------------------
+# Development tasks that are identified and in progress:
+#-------------------------------------------------------------------------
+# Additional Information:
+#-------------------------------------------------------------------------
+
+import TextRules
+import SampleAnalysis
+import string, time, types
+
+class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis):
+    VariableList = [
+             ("Forecast Product" , "Morning", "radio",
+              ["Morning","Afternoon"]),
+            ]
+    Definition =  {
+        "type": "smart",
+        "displayName": "None",
+        "outputFile": "/awips/GFESuite/products/TEXT/SmartElementTable.txt",
+        "defaultEditAreas": [
+            ("area1","AREA 1"),
+            ("area2","AREA 2"),
+            ("area3","AREA 3"),
+            ],
+        # Product-specific variables
+        "regionList" : [
+            ("/33",["AREA 1","AREA 2"]),
+            ("/19",["AREA 3"])
+            ],
+        # Possible elements are:
+        #   "Temp"  -- lists MaxT for daytime, MinT for nighttime
+        #   "PoP"
+        #   "Humidity"  -- lists MinRH for daytime, MaxRH for nighttime
+        "elementList" : ["Temp", "PoP"],
+        # If set to 1, only one value for each element is listed
+        "includeTitle": 1,
+        "introLetters": ".<",
+        }
+
+    def __init__(self):
+        TextRules.TextRules.__init__(self)
+        SampleAnalysis.SampleAnalysis.__init__(self)
+
+    def generateForecast(self, argDict):
+        # Generate formatted product for a list of edit areas
+
+        # Get variables from varDict and Definition
+        self._getVariables(argDict)
+
+        # Get the areaList -- derived from defaultEditAreas and
+        # may be solicited at run-time from the user if desired
+        self._areaList = self.getAreaList(argDict)
+
+        # Determine time ranges for which the data will be sampled
+        self._determineTimeRanges(argDict)
+
+        # Sample the data
+        self._sampleData(argDict)
+
+        # Initialize the output string
+        fcst = ""
+        fcst = self._preProcessProduct(fcst, argDict)
+
+        # Generate the product for each edit area in the list
+        for editArea, areaLabel in self._areaList:
+            fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict)
+            fcst  = self._makeProduct(fcst, editArea, areaLabel, argDict)
+            fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict)
+
+        fcst = self._postProcessProduct(fcst, argDict)
+        return fcst
+
+    def _getVariables(self, argDict):
+        # Determine whether Morning or Afternoon product type
+        varDict = argDict["varDict"]
+        self._productType = varDict["Forecast Product"]
+
+        # Make argDict accessible
+        self.__argDict = argDict
+        
+        # Set up any other product-specific variables from the Definition
+        self._definition = argDict["forecastDef"]
+        for key in list(self._definition.keys()):
+            exec("self._" + key + "= self._definition[key]")
+
+        self._currentRegion = None
+
+        # The analysisList tells which weather elements and statistics
+        # are desired for the product.
+        self._analysisList = self._getAnalysisList()
+ 
+    def _determineTimeRanges(self, argDict):
+        # Determine time ranges for product
+        # Sets up self._timeRangeList
+
+        if self._productType == "Morning":
+            timeRange = self.getTimeRange("Today", argDict)
+            numPeriods = 3
+        else:
+            timeRange = self.getTimeRange("Tonight", argDict)
+            numPeriods = 4
+            
+        self._timeRangeList = self.getPeriods(timeRange, 12, 12, numPeriods)
+        return
+
+    def _sampleData(self, argDict):
+        # Sample the data
+        self._sampler = self.getSampler(argDict, 
+          (self._analysisList, self._timeRangeList, self._areaList))
+        return
+
+    def _preProcessProduct(self, fcst, argDict):
+        # Set up format spacing and title line spacing
+        
+        numElements = len(self._elementList)
+        if numElements > 2:
+            self._spaceStr = ""
+        else:
+            self._spaceStr = "   "
+        if self._includeTitle == 0:
+            return fcst
+            
+        self._titles = self._titleDict()
+        if numElements > 2:
+            if self._productType == "Morning":
+                self._headingLen = 15
+            else:
+                self._headingLen = 19
+        else:
+            if self._productType == "Morning":
+                self._headingLen = 21
+            else:
+                self._headingLen = 28
+                    
+        # Create title line
+        title = self._introLetters + "        "
+        index = 0
+        for element in self._elementList:
+            title = title + string.center(
+                    self._titles[element], self._headingLen)
+            if index < len(self._elementList)-1:
+                title = title + "/"
+            index += 1
+        return fcst + title + "\n"
+
+    def _preProcessArea(self, fcst, editArea, areaLabel, argDict):
+        # If we are in a new region, add region header
+        for region, areaList in self._regionList:
+            if areaLabel in areaList:
+                break
+        if region != self._currentRegion:
+            if self._currentRegion is not None:
+                # End the Region
+                fcst = fcst + "\n$$\n\n"
+            self._currentRegion = region
+            fcst = fcst + region
+
+        return fcst + "\n" + string.ljust(areaLabel, 10)
+
+    def _makeProduct(self, fcst, editArea, areaLabel, argDict):
+        # Get the Statistics
+        statList = self.getStatList(self._sampler, self._analysisList,
+                                     self._timeRangeList, editArea)
+        
+        numElements = len(self._elementList)
+        index = 0
+        for element in self._elementList:
+            exec("fcst = fcst + self._get" + element + \
+                 "Values(statList, argDict)")
+            if index < numElements-1:
+                fcst = fcst + "  /"
+            index += 1
+
+        return fcst
+
+    def _postProcessArea(self, fcst, editArea, areaLabel, argDict):
+        return fcst
+
+    def _postProcessProduct(self, fcst, argDict):
+        fcst = fcst + "\n"
+        return fcst
+
+    ########################################################################
+    # PRODUCT-SPECIFIC METHODS
+    ########################################################################
+
+    def _getAnalysisList(self):
+      return [
+          ("MinT", self.avg),
+          ("MaxT", self.avg),
+          ("MinRH", self.avg),
+          ("MaxRH", self.avg),
+          ("PoP", self.stdDevMaxAvg),
+          ]
+
+    def _titleDict(self):
+        return {
+            "Temp":    "TEMPERATURE",
+            "PoP":     "PRECIPITATION",
+            "Humidity":"HUMIDITY",
+            }
+
+    def _getTempValues(self, statList, argDict):
+       # Return a string of Temperature values given statList
+       stats1 = statList[0]
+       if self._productType == "Morning":
+           stats2 = statList[1]
+           stats3 = statList[2]
+           t1 = self.getScalarVal(stats1["MaxT"])
+           t2 = self.getScalarVal(stats2["MinT"])
+           t3 = self.getScalarVal(stats3["MaxT"])
+           str =  " " + t1+ self._spaceStr +t2+ self._spaceStr +t3
+           return str
+       else:
+           stats2 = statList[1]
+           stats3 = statList[2]
+           stats4 = statList[3]
+           t1 = self.getScalarVal(stats1["MinT"])
+           t2 = self.getScalarVal(stats2["MaxT"])
+           t3 = self.getScalarVal(stats3["MinT"])
+           t4 = self.getScalarVal(stats4["MaxT"])
+           str = " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ \
+                 self._spaceStr+t4
+           return str
+ 
+    def _getHumidityValues(self, statList, argDict):
+       # Return a string of Humidity values given statList
+       stats1 = statList[0]
+       if self._productType == "Morning":
+           stats2 = statList[1]
+           stats3 = statList[2]
+           t1 = self.getScalarVal(stats1["MinRH"])
+           t2 = self.getScalarVal(stats2["MaxRH"])
+           t3 = self.getScalarVal(stats3["MinRH"])
+           return " " +t1+ self._spaceStr +t2+ self._spaceStr+t3
+       else:
+           stats2 = statList[1]
+           stats3 = statList[2]
+           stats4 = statList[3]
+           t1 = self.getScalarVal(stats1["MaxRH"])
+           t2 = self.getScalarVal(stats2["MinRH"])
+           t3 = self.getScalarVal(stats3["MaxRH"])
+           t4 = self.getScalarVal(stats4["MinRH"])
+           return " " +t1+ self._spaceStr +t2+ self._spaceStr +t3+ self._spaceStr +t4
+
+    def _getPoPValues(self, statList, argDict):
+       # Return a string of PoP values in the statList
+       pop = []
+       popStr = ""
+       index = 0
+       for stats in statList:
+           val = self._getPoPValue(stats)
+           if index < len(statList)-1:
+               popStr = popStr  + val + self._spaceStr
+           else:
+               popStr = popStr + val
+           index += 1
+       popStr = popStr + " "
+       return popStr 
+
+    def _getPoPValue(self, stats):
+       pop = self.getStats(stats,"PoP")
+       if pop is None:
+           val = "    "
+       else:
+           max = self.round(pop, "Nearest", 10)
+           val = self.getScalarVal(max)
+       return val
diff --git a/cave/com.raytheon.viz.gfe/python/utility/loadConfig.py b/cave/com.raytheon.viz.gfe/python/utility/loadConfig.py
index 775b9b90c8..3e096459bf 100644
--- a/cave/com.raytheon.viz.gfe/python/utility/loadConfig.py
+++ b/cave/com.raytheon.viz.gfe/python/utility/loadConfig.py
@@ -1,98 +1,98 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+#
+# Loads a gfe config file into a hashmap
+#  
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    12/11/09                      njensen        Initial Creation.
+#    04/02/2014        #2729       randerso       Fixed error handling in loadPreferences
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-# Loads a gfe config file into a hashmap
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    12/11/09                      njensen        Initial Creation.
-#    04/02/2014        #2729       randerso       Fixed error handling in loadPreferences
-# 
-#
-
-import types
-from java.util import HashMap, ArrayList
-from java.lang import String, Float, Integer, Boolean
-
-def loadPreferences(config):
-    try:
-        # import the config file
-        if type(config) is types.StringType:
-            configName = config
-            mod = __import__(config)
-        elif type(config) is types.ModuleType:
-            configName = config.__name__            
-            mod = config
-
-        globals = getGlobals(mod)
-
-        from com.raytheon.viz.gfe import Activator, PythonPreferenceStore
-        prefs = PythonPreferenceStore(globals)
-        Activator.getDefault().setPreferenceStore(prefs)
-        return prefs
-    except Exception, e:
-        import LogStream
-        import traceback
-        LogStream.logProblem("Unknown or invalid config file: %s\n%s" % (configName, traceback.format_exc()))
-        raise Exception, e
-        
-
-def loadConfig(configName):    
-    mod = __import__(configName)
-    return getGlobals(mod)
-
-def getGlobals(mod):
-    mp = HashMap()
-    for attrName in mod.__dict__:        
-        if not attrName.startswith('__'):
-            attr = mod.__getattribute__(attrName)
-            t = type(attr)
-            if t is not list:
-                if t is str:
-                    mp.put(attrName, attr)
-                elif t is int:
-                    mp.put(attrName, Integer(attr))
-                elif t is float:
-                    mp.put(attrName, Float(attr))
-                elif t is bool:
-                    mp.put(attrName, Boolean(attr))
-            else:
-                arr = None                
-                if len(attr) > 0:
-                    t = type(attr[0])
-                    if t is int:        
-                        arr = __fillArray(attr, Integer)
-                    elif t is float:
-                        arr = __fillArray(attr, Float)
-                    elif t is str:                        
-                        arr = __fillArray(attr, String)
-                mp.put(attrName, arr)
-    return mp
-
-def __fillArray(pylist, jclz):    
-    sz = len(pylist)
-    jlist = ArrayList(sz)
-    for i in range(sz):        
-        jlist.add(jclz(pylist[i]))
+#
+
+import types
+from java.util import HashMap, ArrayList
+from java.lang import String, Float, Integer, Boolean
+
+def loadPreferences(config):
+    try:
+        # import the config file
+        if type(config) is bytes:
+            configName = config
+            mod = __import__(config)
+        elif type(config) is types.ModuleType:
+            configName = config.__name__            
+            mod = config
+
+        globals = getGlobals(mod)
+
+        from com.raytheon.viz.gfe import Activator, PythonPreferenceStore
+        prefs = PythonPreferenceStore(globals)
+        Activator.getDefault().setPreferenceStore(prefs)
+        return prefs
+    except Exception as e:
+        import LogStream
+        import traceback
+        LogStream.logProblem("Unknown or invalid config file: %s\n%s" % (configName, traceback.format_exc()))
+        raise Exception(e)
+        
+
+def loadConfig(configName):    
+    mod = __import__(configName)
+    return getGlobals(mod)
+
+def getGlobals(mod):
+    mp = HashMap()
+    for attrName in mod.__dict__:        
+        if not attrName.startswith('__'):
+            attr = mod.__getattribute__(attrName)
+            t = type(attr)
+            if t is not list:
+                if t is str:
+                    mp.put(attrName, attr)
+                elif t is int:
+                    mp.put(attrName, Integer(attr))
+                elif t is float:
+                    mp.put(attrName, Float(attr))
+                elif t is bool:
+                    mp.put(attrName, Boolean(attr))
+            else:
+                arr = None                
+                if len(attr) > 0:
+                    t = type(attr[0])
+                    if t is int:        
+                        arr = __fillArray(attr, Integer)
+                    elif t is float:
+                        arr = __fillArray(attr, Float)
+                    elif t is str:                        
+                        arr = __fillArray(attr, String)
+                mp.put(attrName, arr)
+    return mp
+
+def __fillArray(pylist, jclz):    
+    sz = len(pylist)
+    jlist = ArrayList(sz)
+    for i in range(sz):        
+        jlist.add(jclz(pylist[i]))
     return jlist
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.pointdata/localization/pointdata/HoursRefTimePointDataRetrieve.py b/cave/com.raytheon.viz.pointdata/localization/pointdata/HoursRefTimePointDataRetrieve.py
index 8083ea4d47..cbd09b8009 100644
--- a/cave/com.raytheon.viz.pointdata/localization/pointdata/HoursRefTimePointDataRetrieve.py
+++ b/cave/com.raytheon.viz.pointdata/localization/pointdata/HoursRefTimePointDataRetrieve.py
@@ -1,76 +1,76 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-
-import PointDataView, PointDataContainer, NoDataException, RefTimePointDataRetrieve
-
-#
-# Python module to request reference time point data.  Split out of
-# PointDataContainer.py.
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    25Apr2012       14688         rferrel        Initial Creation.
-#    
-# 
-#
-    
-##
-# This is a base file that is not intended to be overridden.
-##
-
-class HoursRefTimePointDataRetrieve(RefTimePointDataRetrieve.RefTimePointDataRetrieve):    
-
-    def __init__(self, pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):
-        super(HoursRefTimePointDataRetrieve, self).__init__(pluginName, site, parameters, keyId, refTime, constraint, maxSize)     
-
-    def _createJarray(self, availableTimes, numHours):  
-        from java.util import Date
-        from com.raytheon.uf.common.time import DataTime
-        import jep, time
-        #Get a DataTime numHours from current time
-        stTime = long(time.time()) * 1000
-        stTime -= numHours * (60 * 60 * 1000)
-        stDateTime = DataTime(Date(stTime))
-        length = len(availableTimes)
-        xdts = []
-        for i in range(length) :
-            d = DataTime(availableTimes[length-1-i])
-            if d.greaterThan(stDateTime) :
-                xdts.append(d)
-            else :
-                 break
-        sz = len(xdts)
-        dts = jep.jarray(sz, DataTime)
-        i = 0
-        for d in xdts:
-            dts[i] = d
-            i += 1
-        return dts
-   
-def retrieve(pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):
-    ret = HoursRefTimePointDataRetrieve(pluginName, site, parameters, keyId, refTime, constraint, maxSize)
-    return ret.pdc
-
-    
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+

+import PointDataView, PointDataContainer, NoDataException, RefTimePointDataRetrieve

+

+#

+# Python module to request reference time point data.  Split out of

+# PointDataContainer.py.

+#  

+#    

+#     SOFTWARE HISTORY

+#    

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    25Apr2012       14688         rferrel        Initial Creation.

+#    

+# 

+#

+    

+##

+# This is a base file that is not intended to be overridden.

+##

+

+class HoursRefTimePointDataRetrieve(RefTimePointDataRetrieve.RefTimePointDataRetrieve):    

+

+    def __init__(self, pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):

+        super(HoursRefTimePointDataRetrieve, self).__init__(pluginName, site, parameters, keyId, refTime, constraint, maxSize)     

+

+    def _createJarray(self, availableTimes, numHours):  

+        from java.util import Date

+        from com.raytheon.uf.common.time import DataTime

+        import jep, time

+        #Get a DataTime numHours from current time

+        stTime = int(time.time()) * 1000

+        stTime -= numHours * (60 * 60 * 1000)

+        stDateTime = DataTime(Date(stTime))

+        length = len(availableTimes)

+        xdts = []

+        for i in range(length) :

+            d = DataTime(availableTimes[length-1-i])

+            if d.greaterThan(stDateTime) :

+                xdts.append(d)

+            else :

+                 break

+        sz = len(xdts)

+        dts = jep.jarray(sz, DataTime)

+        i = 0

+        for d in xdts:

+            dts[i] = d

+            i += 1

+        return dts

+   

+def retrieve(pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):

+    ret = HoursRefTimePointDataRetrieve(pluginName, site, parameters, keyId, refTime, constraint, maxSize)

+    return ret.pdc

+

+    

     
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataContainer.py b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataContainer.py
index c4a809d8eb..c8a5589c5e 100644
--- a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataContainer.py
+++ b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataContainer.py
@@ -1,68 +1,68 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+
+import PointDataView, NoDataException
+
+#
+# Python wrapper for point data
+#  
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    07/20/09                      njensen       Initial Creation.
+#    05/11/11                      njensen       Split out data request to PointDataRetrieve.py
+#    
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-
-import PointDataView, NoDataException
-
-#
-# Python wrapper for point data
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    07/20/09                      njensen       Initial Creation.
-#    05/11/11                      njensen       Split out data request to PointDataRetrieve.py
-#    
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-class PointDataContainer:
-
-    def __init__(self, pdvDict, javaPdc, refTime):        
-        self.__pdvDict = pdvDict        
-        self.__javaPdc = javaPdc
-        self.refTime = refTime
-        
-    def __getitem__(self, key):
-        return self.__pdvDict[key]
-    
-    def has_key(self, key):
-        return self.__pdvDict.has_key(key)
-    
-    def keys(self):
-        return self.__pdvDict.keys()
-    
-    def __contains__(self, key):
-        return self.has_key(key)
-    
-    def hasParam(self, param):
-        return self.__javaPdc.getParameters().contains(param)
-    
-    def __repr__(self):
-        return "PointDataContainer: " + str(self.__pdvDict)
-
-    
+#
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+class PointDataContainer:
+
+    def __init__(self, pdvDict, javaPdc, refTime):        
+        self.__pdvDict = pdvDict        
+        self.__javaPdc = javaPdc
+        self.refTime = refTime
+        
+    def __getitem__(self, key):
+        return self.__pdvDict[key]
+    
+    def has_key(self, key):
+        return key in self.__pdvDict
+    
+    def keys(self):
+        return list(self.__pdvDict.keys())
+    
+    def __contains__(self, key):
+        return key in self
+    
+    def hasParam(self, param):
+        return self.__javaPdc.getParameters().contains(param)
+    
+    def __repr__(self):
+        return "PointDataContainer: " + str(self.__pdvDict)
+
+    
     
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataRetrieve.py b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataRetrieve.py
index ef36cc473c..f8bf7476ca 100644
--- a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataRetrieve.py
+++ b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataRetrieve.py
@@ -1,116 +1,116 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-
-import PointDataView, PointDataContainer, NoDataException
-
-#
-# Python module to request point data.  Split out of PointDataContainer.py.
-#  
-#    
-#  SOFTWARE HISTORY
-#  
-#  Date            Ticket#       Engineer       Description
-#  ------------    ----------    -----------    --------------------------
-#  05/11/11                      njensen        Initial Creation.
-#  25Apr2012       14688         rferrel        Made into an abstract class.
-#  Sep 14, 2015    4880          njensen        Improved __queryNewestRefTime()
-#    
-# 
-#
-    
-##
-# This is a base file that is not intended to be overridden.
-##
-
-class PointDataRetrieve(object):
-    def __init__(self, pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):
-        """Initializes a python PointDataContainer which wraps the Java PointDataContainer capabilities.
-            @pluginName the name of the type of data, e.g. bufrmos
-            @site the name of the station, e.g. KOMA
-            @parameters a python list of parameter names as specified in the pointdata xml
-            @keyId how to organize views into the point data, defaults to forecastHr
-            @refTime the reference time to request data for, if None will default to the newest time
-            @constraint a dictionary of extra string constraints to narrow the data type returned,
-                                e.g. {'type':'LAMP'}
-        """
-
-        self.pluginName = pluginName
-        self.site = site
-        self.constraint = constraint
-        if not refTime:
-            refTime = self.__queryNewestRefTime()
-            import time
-            if refTime < time.time() - 86400:
-                raise NoDataException.NoDataException("Newest data in system is more than 24 hours old")
-        if type(refTime) is int or type(refTime) is long:
-            from java.util import Date
-            refTime = Date(refTime * 1000)
-        self.refTime = refTime # should be a java.util.Date
-        if type(parameters) is str:
-            parameters = [parameters]
-        if not parameters.__contains__(keyId):
-            parameters.append(keyId)
-        self.__keyId = keyId
-        self._query(parameters, int(maxSize))
-    
-    # Abstract method must be implemented by sub-class.
-    def _query(self, parameters, maxSize):
-        raise NoDataException.NoDataException('_query not implemented')
-    
-    def __queryNewestRefTime(self):
-        from com.raytheon.uf.viz.core.catalog import CatalogQuery
-        constraints = self._buildConstraints()
-        results = CatalogQuery.performTimeQuery(constraints, True, None)
-        nResults = len(results)
-        if nResults != 1:
-            if nResults > 1:
-                # this should be impossible to hit unless CatalogQuery is broken
-                raise NoDataException.NoDataException("Unable to determine latest time, received multiple times")
-            elif self.site:
-                raise NoDataException.NoDataException("No data available for site " + self.site)
-            else:
-                raise NoDataException.NoDataException("No data available")
-        dt = results[0]
-        return dt.getRefTime().getTime() / 1000
-    
-    def _buildConstraints(self, refTime=None):
-        from java.util import HashMap
-        from com.raytheon.uf.common.dataquery.requests import RequestConstraint
-        queryTerms = HashMap()
-        queryTerms.put("pluginName", RequestConstraint(self.pluginName))
-        if self.site:
-            queryTerms.put("location.stationId", RequestConstraint(self.site))
-        if refTime:
-            from com.raytheon.uf.common.time.util import TimeUtil 
-            queryTerms.put('dataTime.refTime', RequestConstraint(TimeUtil.formatToSqlTimestamp(refTime)))
-        if self.constraint:
-            for k in self.constraint.keys():
-                queryTerms.put(k, RequestConstraint(self.constraint[k]))
-        return queryTerms
-    
-    def _organizeData(self, container):
-        import PointDataView
-        organizedData = {}
-        for i in range(container.getCurrentSz()):
-            pdv = PointDataView.PointDataView(container.readRandom(i))
-            fcstHr = pdv[self.__keyId]
-            organizedData[fcstHr] = pdv
-        return organizedData
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+

+import PointDataView, PointDataContainer, NoDataException

+

+#

+# Python module to request point data.  Split out of PointDataContainer.py.

+#  

+#    

+#  SOFTWARE HISTORY

+#  

+#  Date            Ticket#       Engineer       Description

+#  ------------    ----------    -----------    --------------------------

+#  05/11/11                      njensen        Initial Creation.

+#  25Apr2012       14688         rferrel        Made into an abstract class.

+#  Sep 14, 2015    4880          njensen        Improved __queryNewestRefTime()

+#    

+# 

+#

+    

+##

+# This is a base file that is not intended to be overridden.

+##

+

+class PointDataRetrieve(object):

+    def __init__(self, pluginName, site, parameters, keyId='forecastHr', refTime=None, constraint={}, maxSize=99):

+        """Initializes a python PointDataContainer which wraps the Java PointDataContainer capabilities.

+            @pluginName the name of the type of data, e.g. bufrmos

+            @site the name of the station, e.g. KOMA

+            @parameters a python list of parameter names as specified in the pointdata xml

+            @keyId how to organize views into the point data, defaults to forecastHr

+            @refTime the reference time to request data for, if None will default to the newest time

+            @constraint a dictionary of extra string constraints to narrow the data type returned,

+                                e.g. {'type':'LAMP'}

+        """

+

+        self.pluginName = pluginName

+        self.site = site

+        self.constraint = constraint

+        if not refTime:

+            refTime = self.__queryNewestRefTime()

+            import time

+            if refTime < time.time() - 86400:

+                raise NoDataException.NoDataException("Newest data in system is more than 24 hours old")

+        if type(refTime) is int or type(refTime) is int:

+            from java.util import Date

+            refTime = Date(refTime * 1000)

+        self.refTime = refTime # should be a java.util.Date

+        if type(parameters) is str:

+            parameters = [parameters]

+        if not parameters.__contains__(keyId):

+            parameters.append(keyId)

+        self.__keyId = keyId

+        self._query(parameters, int(maxSize))

+    

+    # Abstract method must be implemented by sub-class.

+    def _query(self, parameters, maxSize):

+        raise NoDataException.NoDataException('_query not implemented')

+    

+    def __queryNewestRefTime(self):

+        from com.raytheon.uf.viz.core.catalog import CatalogQuery

+        constraints = self._buildConstraints()

+        results = CatalogQuery.performTimeQuery(constraints, True, None)

+        nResults = len(results)

+        if nResults != 1:

+            if nResults > 1:

+                # this should be impossible to hit unless CatalogQuery is broken

+                raise NoDataException.NoDataException("Unable to determine latest time, received multiple times")

+            elif self.site:

+                raise NoDataException.NoDataException("No data available for site " + self.site)

+            else:

+                raise NoDataException.NoDataException("No data available")

+        dt = results[0]

+        return dt.getRefTime().getTime() / 1000

+    

+    def _buildConstraints(self, refTime=None):

+        from java.util import HashMap

+        from com.raytheon.uf.common.dataquery.requests import RequestConstraint

+        queryTerms = HashMap()

+        queryTerms.put("pluginName", RequestConstraint(self.pluginName))

+        if self.site:

+            queryTerms.put("location.stationId", RequestConstraint(self.site))

+        if refTime:

+            from com.raytheon.uf.common.time.util import TimeUtil 

+            queryTerms.put('dataTime.refTime', RequestConstraint(TimeUtil.formatToSqlTimestamp(refTime)))

+        if self.constraint:

+            for k in list(self.constraint.keys()):

+                queryTerms.put(k, RequestConstraint(self.constraint[k]))

+        return queryTerms

+    

+    def _organizeData(self, container):

+        import PointDataView

+        organizedData = {}

+        for i in range(container.getCurrentSz()):

+            pdv = PointDataView.PointDataView(container.readRandom(i))

+            fcstHr = pdv[self.__keyId]

+            organizedData[fcstHr] = pdv

+        return organizedData

diff --git a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataView.py b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataView.py
index 2f3b95b0cc..c79e1ab8c0 100644
--- a/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataView.py
+++ b/cave/com.raytheon.viz.pointdata/localization/pointdata/PointDataView.py
@@ -1,103 +1,103 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+
+#
+# Python wrapper for PointDataView
+#  
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    07/20/09                      njensen       Initial Creation.
+#    
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-
-#
-# Python wrapper for PointDataView
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    07/20/09                      njensen       Initial Creation.
-#    
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-class PointDataView:
-
-    def __init__(self, javaPointDataView):        
-        self.__javaPdv = javaPointDataView   
-        self.__keys = []
-        keyset = self.__javaPdv.getContainer().getParameters()
-        itr = keyset.iterator()
-        while itr.hasNext():
-            self.__keys.append(str(itr.next()))
-            
-    def __getitem__(self, key):
-        result = None        
-        strValType = self.getType(key)
-        if strValType == 'FLOAT':
-            result = self.__javaPdv.getFloat(key)
-        elif strValType == 'STRING':
-            result = self.__javaPdv.getString(key)
-        elif strValType == 'INT':
-            result = self.__javaPdv.getInt(key)
-        elif strValType == 'LONG':
-            result = self.__javaPdv.getLong(key)
-            
-        return result
-
-    def getType(self, key):
-        val = self.__javaPdv.getType(key)
-        if val:
-            val = str(val)
-        return val
-    
-    def has_key(self, key):
-        return self.__keys.__contains__(key)
-    
-    def keys(self):
-        return self.__keys
-    
-    def __contains__(self, key):
-        return self.has_key(key)
-    
-    def getFillValue(self, key):
-        # TODO if we get fill value support in pointdata, hook that up
-        return -9999.0
-    
-    def getNumberAllLevels(self, key):
-        strValType = self.getType(key)
-        jlevels = self.__javaPdv.getNumberAllLevels(key)
-        levels = []
-        for level in jlevels:
-            level = str(level)
-            if strValType == 'FLOAT':
-                levels.append(float(level))
-            elif strValType == 'STRING':
-                levels.append(str(level))
-            elif strValType == 'INT':
-                levels.append(int(level))
-            elif strValType == 'LONG':
-                levels.append(long(level))
-        return levels
-        
-
-    
-        
-        
-            
+#
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+class PointDataView:
+
+    def __init__(self, javaPointDataView):        
+        self.__javaPdv = javaPointDataView   
+        self.__keys = []
+        keyset = self.__javaPdv.getContainer().getParameters()
+        itr = keyset.iterator()
+        while itr.hasNext():
+            self.__keys.append(str(next(itr)))
+            
+    def __getitem__(self, key):
+        result = None        
+        strValType = self.getType(key)
+        if strValType == 'FLOAT':
+            result = self.__javaPdv.getFloat(key)
+        elif strValType == 'STRING':
+            result = self.__javaPdv.getString(key)
+        elif strValType == 'INT':
+            result = self.__javaPdv.getInt(key)
+        elif strValType == 'LONG':
+            result = self.__javaPdv.getLong(key)
+            
+        return result
+
+    def getType(self, key):
+        val = self.__javaPdv.getType(key)
+        if val:
+            val = str(val)
+        return val
+    
+    def has_key(self, key):
+        return self.__keys.__contains__(key)
+    
+    def keys(self):
+        return self.__keys
+    
+    def __contains__(self, key):
+        return key in self
+    
+    def getFillValue(self, key):
+        # TODO if we get fill value support in pointdata, hook that up
+        return -9999.0
+    
+    def getNumberAllLevels(self, key):
+        strValType = self.getType(key)
+        jlevels = self.__javaPdv.getNumberAllLevels(key)
+        levels = []
+        for level in jlevels:
+            level = str(level)
+            if strValType == 'FLOAT':
+                levels.append(float(level))
+            elif strValType == 'STRING':
+                levels.append(str(level))
+            elif strValType == 'INT':
+                levels.append(int(level))
+            elif strValType == 'LONG':
+                levels.append(int(level))
+        return levels
+        
+
+    
+        
+        
+            
     
\ No newline at end of file
diff --git a/cave/com.raytheon.viz.textworkstation/localization/textws/scripting/twsScripting.py b/cave/com.raytheon.viz.textworkstation/localization/textws/scripting/twsScripting.py
index b89d94a10f..60398d2940 100644
--- a/cave/com.raytheon.viz.textworkstation/localization/textws/scripting/twsScripting.py
+++ b/cave/com.raytheon.viz.textworkstation/localization/textws/scripting/twsScripting.py
@@ -1,180 +1,180 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-from com.raytheon.viz.texteditor.scripting.runner import TextWsCommands
-import os
-
-cmds = TextWsCommands()
-#
-# symbolic names/aliases to better match AWIPS I script syntax
-on = True
-off = False
-until = "until"
-#
-# provides a single method to initialize the 'cmds' object
-# should be called at the start to each command implementation
-def initCmds():
-    cmds.setEditor(editor)
-    cmds.setObserver(observer)
-#
-# implements a standard exit strategy for cancel script
-def cancel():
-    raise ScriptCancelled()
-
-#
-# implements the basic repeat command
-def repeat(count,body=""):
-    initCmds()
-    if body == "":
-        body = count
-        count = -1
-    if count > -1:
-        for i in range(count):
-            exec body
-            cmds.doEvents()
-            if cmds.cancelScript():
-                cancel()
-    else:
-        while True:
-            exec body
-            cmds.doEvents()
-            if cmds.cancelScript():
-                cancel()
-
-#
-# turns accumulation on/off in the text editor window
-def accum(flag):
-    initCmds()
-    if cmds.cancelScript():
-        cancel()
-    try:
-        cmds.setAccumulation(flag)
-    except:
-        raise
-#
-# clears the test work editor window
-def clear():
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    try:
-        cmds.clearTextDisplay()
-    except:
-        raise
-
-#
-#implements the wait command
-def wait(opt="",time=""):
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    # need to do a little more validation...
-    if (opt == "") and (time == ""):
-        try:
-            cmds.waitIndefinate()
-        except:
-            raise
-    elif opt == until:
-        if time == "":
-            raise AttributeError,"wait(unitl) requires minutes argument"
-        try:
-            cmds.waitUntilTime(time)
-        except:
-            raise
-    else:
-        try:
-            cmds.waitForTime(opt)
-        except:
-            raise
-    if cmds.isCanceled():
-        cancel()
-#
-# implements the load(pid) command
-def load(pid):
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    try:
-        cmds.loadTextProduct(pid.upper())
-    except:
-        raise
-# implements the readdb(pid,filename) command
-def readdb(pid,filename):
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    try:
-        cmds.saveProductToFile(pid.upper(),filename)
-    except:
-        raise
-
-# implements the writedb(pid,filename) command
-def writedb(pid,filename):
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    try:
-        cmds.readProductFromFile(pid.upper(),filename)
-    except:
-        raise
-
-def run(filename):
-    initCmds()
-    if cmds.cancelScript() :
-        cancel()
-    try:
-        cmds.runLocalFile(filename)
-    except:
-        raise
-
-class writer():
-    def write(self,text):
-        cmds = TextWsCommands()
-#        initCmds()
-        cmds.setEditor(editor)
-        cmds.setObserver(observer)
-        cmds.writeText(text)
-
-sys.stdout = writer()
-
-class errwriter():
-    def write(self,text):
-        cmds = TextWsCommands()
-        cmds.setEditor(editor)
-        cmds.setObserver(observer)
-        cmds.writeError(text)
-
-sys.stderr = errwriter()
-
-class ScriptCancelled(Exception):
-    def __init__(self,value='User cancelled the script',cause=None):
-      self.value = value
-      self.cause = cause
-    def __str__(self):
-        msg = 'ScriptCancelled: ' + repr(self.value)
-        if self.cause is not None:
-            msg += "\n caused by " + repr(self.cause)
-        return msg
-    def __repr__(self):
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+from com.raytheon.viz.texteditor.scripting.runner import TextWsCommands
+import os
+
+cmds = TextWsCommands()
+#
+# symbolic names/aliases to better match AWIPS I script syntax
+on = True
+off = False
+until = "until"
+#
+# provides a single method to initialize the 'cmds' object
+# should be called at the start to each command implementation
+def initCmds():
+    cmds.setEditor(editor)
+    cmds.setObserver(observer)
+#
+# implements a standard exit strategy for cancel script
+def cancel():
+    raise ScriptCancelled()
+
+#
+# implements the basic repeat command
+def repeat(count,body=""):
+    initCmds()
+    if body == "":
+        body = count
+        count = -1
+    if count > -1:
+        for i in range(count):
+            exec(body)
+            cmds.doEvents()
+            if cmds.cancelScript():
+                cancel()
+    else:
+        while True:
+            exec(body)
+            cmds.doEvents()
+            if cmds.cancelScript():
+                cancel()
+
+#
+# turns accumulation on/off in the text editor window
+def accum(flag):
+    initCmds()
+    if cmds.cancelScript():
+        cancel()
+    try:
+        cmds.setAccumulation(flag)
+    except:
+        raise
+#
+# clears the test work editor window
+def clear():
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    try:
+        cmds.clearTextDisplay()
+    except:
+        raise
+
+#
+#implements the wait command
+def wait(opt="",time=""):
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    # need to do a little more validation...
+    if (opt == "") and (time == ""):
+        try:
+            cmds.waitIndefinate()
+        except:
+            raise
+    elif opt == until:
+        if time == "":
+            raise AttributeError("wait(unitl) requires minutes argument")
+        try:
+            cmds.waitUntilTime(time)
+        except:
+            raise
+    else:
+        try:
+            cmds.waitForTime(opt)
+        except:
+            raise
+    if cmds.isCanceled():
+        cancel()
+#
+# implements the load(pid) command
+def load(pid):
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    try:
+        cmds.loadTextProduct(pid.upper())
+    except:
+        raise
+# implements the readdb(pid,filename) command
+def readdb(pid,filename):
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    try:
+        cmds.saveProductToFile(pid.upper(),filename)
+    except:
+        raise
+
+# implements the writedb(pid,filename) command
+def writedb(pid,filename):
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    try:
+        cmds.readProductFromFile(pid.upper(),filename)
+    except:
+        raise
+
+def run(filename):
+    initCmds()
+    if cmds.cancelScript() :
+        cancel()
+    try:
+        cmds.runLocalFile(filename)
+    except:
+        raise
+
+class writer():
+    def write(self,text):
+        cmds = TextWsCommands()
+#        initCmds()
+        cmds.setEditor(editor)
+        cmds.setObserver(observer)
+        cmds.writeText(text)
+
+sys.stdout = writer()
+
+class errwriter():
+    def write(self,text):
+        cmds = TextWsCommands()
+        cmds.setEditor(editor)
+        cmds.setObserver(observer)
+        cmds.writeError(text)
+
+sys.stderr = errwriter()
+
+class ScriptCancelled(Exception):
+    def __init__(self,value='User cancelled the script',cause=None):
+      self.value = value
+      self.cause = cause
+    def __str__(self):
+        msg = 'ScriptCancelled: ' + repr(self.value)
+        if self.cause is not None:
+            msg += "\n caused by " + repr(self.cause)
+        return msg
+    def __repr__(self):
         return self.__str__()
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/Maps.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/Maps.py
index 83b0835245..c27a415c59 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/Maps.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/Maps.py
@@ -1,369 +1,369 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# NOTE: THIS FILE SHOULD NOT BE USER_MODIFIED.  INSTEAD, REFER TO THE
-# DOCUMENTATION ON HOW ENTRIES IN THIS FILE MAY BE OVERRIDDEN.  REFER TO
-# LOCALMAPS DOCUMENTATION.
-#
-# Maps.py - map background definitions for ifpServer
-#
-# ----------------------------------------------------------------------------
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    02/20/2014          #2824     randerso       Added log message when no localMaps file is found
-#    07/27/2017       DCS19921                    Removed creation of ssww from shapefile 
-#
-########################################################################
-
-##
-# This is an incremental override file, indicating that the files at different
-# localization levels will be combined. Incremental overrides are achieved by
-# creating a localMaps file at a higher priority localization level that
-# imports this base file.
-#
-# See the Configuration Guides->Server Configuration->Map Background
-# Configuration section of the GFE Online Help for more information.
-##
-
-from ShapeTable import ShapeTable
-
-import siteConfig, LogStream, JUtil
-
-BASELINE = getattr(siteConfig, 'BASELINE', 0)
-
-# Following lines extract the CWA (WFO name) from siteConfig
-CWA = siteConfig.GFESUITE_SITEID
-
-# Each map is identified by a Python variable.  The ShapeTable
-# is first identified as the data source.  The map is then filtered in
-# differing ways, or not at all if desired.  The name of the map
-# is specified.  The edit area name and edit area group
-# are specified, if it is desired to automatically generate edit areas
-# from the map. 
-#
-# NOTE: In AWIPS 2 map backgrounds are created using map bundles. 
-# Maps.py is only used for generating edit areas. 
-#
-#MapNameVariable = ShapeTable('the name of the map table')
-#MapNameVariable.filter( -- - - - - filter string - - - - - - )
-#MapNameVariable.name = 'the display name of the map' 
-#MapNameVariable.editAreaName = 'attribute in ShapeTable to be used to name
-#    'editArea'
-#MapNameVariable.groupName = 'name of the edit area group'
-#
-
-# -------------------------------------------------------------
-# Functions for determining name of edit areas
-# -------------------------------------------------------------
-# FIPS codes
-def fips(atts):
-    #make sure fips attribute exists and of proper length
-    #make sure state attribute exists and of proper length
-    if atts.has_key('fips') and len(atts['fips']) == 5 and \
-      atts.has_key('state') and len(atts['state']) == 2:
-        fips = atts['fips'][-3:]   #last 3 digits from fips code
-        s = atts['state'] + "C" + fips  #assemble COC013
-        return s
-    else:
-        return ""  #for no fips in ShapeTable
-
-# Public Zones
-def cwazones(atts):
-    if atts.has_key('zone') and len(atts['zone']) == 3 and \
-      atts.has_key('state') and len(atts['state']) == 2:
-        return atts['state'] + "Z" + atts['zone']  #assemble COZ023
-    else:
-        return ""   #bad attributes
-
-# Fire Wx Zones
-def fwxzones(atts):
-    if atts.has_key('zone') and len(atts['zone']) == 3 and \
-      atts.has_key('state') and len(atts['state']) == 2:
-        return atts['state'] + "Z" + atts['zone']  #assemble COZ023
-    else:
-        return ""
-
-# Marine Zones
-def marineZ(atts):
-    if atts.has_key('id') and len(atts['id']) == 6:
-        return atts['id']
-    else:
-        return ""
-
-# Offshore Marine Zones
-def offshoreZ(atts):
-    if atts.has_key('id') and len(atts['id']) == 6:
-        return atts['id']
-    else:
-        return ""
-
-#---------------------------------------------------------------------
-# Map Background Filters
-#---------------------------------------------------------------------
-# filter for public zones.
-def publicZoneFilter(atts):
-    if not atts.has_key('cwa'):
-        return 0
-    
-    # this CWA (all but AFC site)
-    if atts['cwa'] == CWA:
-        return 1
-
-    # AFC data - separate out AER/ALU data
-    elif atts['cwa'] == 'AFC':
-        id = cwazones(atts)
-        if CWA == 'AER':
-            return id in ['AKZ101', 'AKZ111', 'AKZ121', 'AKZ125', 'AKZ131',
-              'AKZ135', 'AKZ141', 'AKZ145', 'AKZ171']
-
-        elif CWA == 'ALU':
-            return id in  ['AKZ151', 'AKZ155', 'AKZ161', 'AKZ181',
-              'AKZ185', 'AKZ187', 'AKZ191', 'AKZ195']
-
-        elif CWA == 'AICE':
-            return 1
-
-    return 0
-
-# filter for fire weather zones.
-def firewxZoneFilter(atts):
-    if not atts.has_key('cwa'):
-        return 0
-    
-    # this CWA (all but AFC site)
-    if atts['cwa'] == CWA:
-        return 1
-
-    # AFC data - separate out AER/ALU data
-    elif atts['cwa'] == 'AFC':
-        id = fwxzones(atts)
-        if CWA == 'AER':
-            return id in ['AKZ101', 'AKZ111', 'AKZ121', 'AKZ125', 'AKZ131',
-              'AKZ135', 'AKZ141', 'AKZ145', 'AKZ171']
-
-        elif CWA == 'ALU':
-            return id in ['AKZ151', 'AKZ155', 'AKZ161', 'AKZ181',
-              'AKZ185', 'AKZ187', 'AKZ191', 'AKZ195']
-
-        elif CWA == 'AICE':
-            return 1
-
-    return 0
-
-# filter for marine zones.
-def marineZoneFilter(atts):
-    if not atts.has_key('wfo'):
-        return 0
-    
-    # this CWA (all but AFC site)
-    if atts['wfo'] == CWA:
-        return 1
-
-    # AFC data - separate out AER/ALU data
-    elif atts['wfo'] == 'AFC':
-        id = marineZ(atts)
-        if CWA == 'AER':
-            return id in ['PKZ120', 'PKZ121', 'PKZ125', 'PKZ126', 'PKZ127',
-              'PKZ128', 'PKZ129', 'PKZ130', 'PKZ132', 'PKZ136', 'PKZ137',
-              'PKZ138', 'PKZ140', 'PKZ141']
-        elif CWA == 'ALU':
-            return id in  ['PKZ150', 'PKZ155', 'PKZ160', 'PKZ165', 'PKZ170',
-              'PKZ171', 'PKZ172', 'PKZ175', 'PKZ176', 'PKZ179', 'PKZ180',
-              'PKZ185']
-        elif CWA == 'AICE':
-            return 1
-
-    return 0
-
-
-# filter for offshore marine zones.
-def offshoreZoneFilter(atts):
-    if not atts.has_key('wfo'):
-        return 0
-    
-    # this CWA (all but AFC site)
-    if atts['wfo'] == CWA:
-        return 1
-
-    # AFC data - separate out AER/ALU data
-    elif atts['wfo'] == 'AFC':
-        id = offshoreZ(atts)
-        if CWA == 'AER':
-            return id in ['PKZ350']
-        elif CWA == 'ALU':
-            return id in  ['PKZ410']
-        elif CWA == 'AICE':
-            return 1
-
-    return 0
-
-
-#---------------------------------------------------------------------
-# Map Background Definitions
-#---------------------------------------------------------------------
-
-# CWA Counties
-CWAcounties = ShapeTable('county')
-CWAcounties.filter(lambda x : x['cwa'][0:3] == CWA or x['cwa'][3:6] == CWA)
-CWAcounties.name = 'Counties_' + CWA
-CWAcounties.editAreaName = ['state','countyname']
-CWAcounties.groupName = 'Counties'
-
-# FIPS for my counties - only include first WFO indicated in CWA field
-FIPS = ShapeTable('county')
-FIPS.name = 'FIPS_' + CWA
-FIPS.filter(lambda x : x['cwa'][0:3] == CWA)
-FIPS.editAreaName = fips
-FIPS.groupName = 'FIPS_' + CWA
-
-# Unfiltered Counties
-Counties = ShapeTable('county')
-Counties.name = 'Counties'
-Counties.editAreaName = fips
-Counties.groupName = 'FIPS'
-
-# CWA Zones
-CWAzones = ShapeTable('zone')
-CWAzones.filter(publicZoneFilter)
-CWAzones.name = 'Zones_' + CWA
-CWAzones.editAreaName = cwazones
-CWAzones.groupName = 'Zones_' + CWA
-
-# Unfiltered Zones
-Zones = ShapeTable('zone')
-Zones.name = 'Zones'
-Zones.editAreaName = cwazones
-Zones.groupName = 'Zones'
-
-# Fire Wx Zones
-FWCWAzones = ShapeTable('firewxzones')
-FWCWAzones.filter(firewxZoneFilter)
-FWCWAzones.name = 'FireWxZones_' + CWA
-FWCWAzones.editAreaName = fwxzones
-FWCWAzones.groupName = 'FireWxZones_' + CWA
-
-# Unfiltered Fire Wx Zones
-FWZones = ShapeTable('firewxzones')
-FWZones.name = 'FireWxZones'
-FWZones.editAreaName = fwxzones
-FWZones.groupName = 'FireWxZones'
-
-# CWAs for all
-cwas = ShapeTable('cwa')
-cwas.name = 'CWA_all'
-cwas.editAreaName = 'wfo'
-cwas.groupName = 'WFOs'
-
-# ISC areas for all
-isc = ShapeTable('isc')
-isc.name = 'ISC_all'
-isc.editAreaName = ['ISC','wfo']
-isc.groupName = 'ISC'
-
-# Fire Wx AOR for all
-fwaor = ShapeTable('firewxaor')
-fwaor.name = 'FireWxAOR'
-fwaor.editAreaName = ['FireWxAOR', 'cwa']
-fwaor.groupName = 'FireWxAOR'
-
-# Marine Zones for CWA
-CWAmzones = ShapeTable('marinezones')
-CWAmzones.filter(marineZoneFilter)
-CWAmzones.name = 'Marine_Zones_' + CWA
-CWAmzones.editAreaName = marineZ
-CWAmzones.groupName = 'MZones_' + CWA
-
-# Marine Zones (unfiltered)
-Mzones = ShapeTable('marinezones')
-Mzones.name = "Marine_Zones"
-Mzones.editAreaName = marineZ
-Mzones.groupName = 'MZones'
-
-# States (unfiltered)
-States = ShapeTable('states')
-States.name = "States"
-States.editAreaName = 'name'
-States.groupName = 'States'
-
-# RFC maps
-rfc = ShapeTable('rfc')
-rfc.name = "RFC"
-rfc.editAreaName = ['ISC','site_id']
-rfc.groupName = 'ISC'
-
-#  NHA ISC area
-#domain = ShapeTable('nhadomain')
-#domain.name = "TropicalISC"
-#domain.groupName = "ISC"
-#domain.editAreaName = "ISC_NHA"
-
-#    Storm Surge Watch/Warning Area
-# stormsurgeww = ShapeTable('stormsurgeww')
-# stormsurgeww.name = "StormSurgeWW"
-# stormsurgeww.groupName = "SurgeCollab"
-# stormsurgeww.editAreaName = "StormSurgeWW_EditArea"
-
-# Offshore Marine Zones - unfiltered
-offshore = ShapeTable('offshore')
-offshore.name = "Offshore_Marine_Zones"
-offshore.editAreaName = offshoreZ
-offshore.groupName = 'OffShoreMZones'
-
-# Offshore Marine Zones - filtered by CWA
-offshoreCWA = ShapeTable('offshore')
-offshoreCWA.filter(offshoreZoneFilter)
-offshoreCWA.name = "Offshore_Marine_Zones_" + CWA
-offshoreCWA.editAreaName = offshoreZ
-offshoreCWA.groupName = 'OffShoreMZones_' + CWA
-
-# this is a complete listing of all maps
-maps = [ CWAcounties, FIPS, Counties, CWAzones, Zones, FWCWAzones, FWZones, cwas, isc, 
-         fwaor, CWAmzones, Mzones, States, rfc, offshore, offshoreCWA ]
-
-# import the local maps file
-if not BASELINE:
-    try:
-        from localMaps import *
-    except ImportError:
-        import LogStream
-        LogStream.logEvent("No localMaps file found, using baseline settings.");
-
-def getMaps():
-    from java.util import ArrayList
-    jmaps = ArrayList(len(maps))
-    for m in maps:
-        j = m.toJavaObj()
-        jmaps.add(j)
-        for k,v in globals().iteritems():
-            if v is m:
-                j.setInstanceName(k)
-                break
-
-    return jmaps
-
-def runFilter(instance, info):
-    info = JUtil.javaObjToPyVal(info)
-    return bool(globals()[instance].doFilter(info))
-
-def runNamer(instance, info):
-    info = JUtil.javaObjToPyVal(info)
-    return str(globals()[instance].getEAName(info))  
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+# NOTE: THIS FILE SHOULD NOT BE USER_MODIFIED.  INSTEAD, REFER TO THE

+# DOCUMENTATION ON HOW ENTRIES IN THIS FILE MAY BE OVERRIDDEN.  REFER TO

+# LOCALMAPS DOCUMENTATION.

+#

+# Maps.py - map background definitions for ifpServer

+#

+# ----------------------------------------------------------------------------

+#

+#     SOFTWARE HISTORY

+#

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    02/20/2014          #2824     randerso       Added log message when no localMaps file is found

+#    07/27/2017       DCS19921                    Removed creation of ssww from shapefile 

+#

+########################################################################

+

+##

+# This is an incremental override file, indicating that the files at different

+# localization levels will be combined. Incremental overrides are achieved by

+# creating a localMaps file at a higher priority localization level that

+# imports this base file.

+#

+# See the Configuration Guides->Server Configuration->Map Background

+# Configuration section of the GFE Online Help for more information.

+##

+

+from ShapeTable import ShapeTable

+

+import siteConfig, LogStream, JUtil

+

+BASELINE = getattr(siteConfig, 'BASELINE', 0)

+

+# Following lines extract the CWA (WFO name) from siteConfig

+CWA = siteConfig.GFESUITE_SITEID

+

+# Each map is identified by a Python variable.  The ShapeTable

+# is first identified as the data source.  The map is then filtered in

+# differing ways, or not at all if desired.  The name of the map

+# is specified.  The edit area name and edit area group

+# are specified, if it is desired to automatically generate edit areas

+# from the map. 

+#

+# NOTE: In AWIPS 2 map backgrounds are created using map bundles. 

+# Maps.py is only used for generating edit areas. 

+#

+#MapNameVariable = ShapeTable('the name of the map table')

+#MapNameVariable.filter( -- - - - - filter string - - - - - - )

+#MapNameVariable.name = 'the display name of the map' 

+#MapNameVariable.editAreaName = 'attribute in ShapeTable to be used to name

+#    'editArea'

+#MapNameVariable.groupName = 'name of the edit area group'

+#

+

+# -------------------------------------------------------------

+# Functions for determining name of edit areas

+# -------------------------------------------------------------

+# FIPS codes

+def fips(atts):

+    #make sure fips attribute exists and of proper length

+    #make sure state attribute exists and of proper length

+    if 'fips' in atts and len(atts['fips']) == 5 and \

+      'state' in atts and len(atts['state']) == 2:

+        fips = atts['fips'][-3:]   #last 3 digits from fips code

+        s = atts['state'] + "C" + fips  #assemble COC013

+        return s

+    else:

+        return ""  #for no fips in ShapeTable

+

+# Public Zones

+def cwazones(atts):

+    if 'zone' in atts and len(atts['zone']) == 3 and \

+      'state' in atts and len(atts['state']) == 2:

+        return atts['state'] + "Z" + atts['zone']  #assemble COZ023

+    else:

+        return ""   #bad attributes

+

+# Fire Wx Zones

+def fwxzones(atts):

+    if 'zone' in atts and len(atts['zone']) == 3 and \

+      'state' in atts and len(atts['state']) == 2:

+        return atts['state'] + "Z" + atts['zone']  #assemble COZ023

+    else:

+        return ""

+

+# Marine Zones

+def marineZ(atts):

+    if 'id' in atts and len(atts['id']) == 6:

+        return atts['id']

+    else:

+        return ""

+

+# Offshore Marine Zones

+def offshoreZ(atts):

+    if 'id' in atts and len(atts['id']) == 6:

+        return atts['id']

+    else:

+        return ""

+

+#---------------------------------------------------------------------

+# Map Background Filters

+#---------------------------------------------------------------------

+# filter for public zones.

+def publicZoneFilter(atts):

+    if 'cwa' not in atts:

+        return 0

+    

+    # this CWA (all but AFC site)

+    if atts['cwa'] == CWA:

+        return 1

+

+    # AFC data - separate out AER/ALU data

+    elif atts['cwa'] == 'AFC':

+        id = cwazones(atts)

+        if CWA == 'AER':

+            return id in ['AKZ101', 'AKZ111', 'AKZ121', 'AKZ125', 'AKZ131',

+              'AKZ135', 'AKZ141', 'AKZ145', 'AKZ171']

+

+        elif CWA == 'ALU':

+            return id in  ['AKZ151', 'AKZ155', 'AKZ161', 'AKZ181',

+              'AKZ185', 'AKZ187', 'AKZ191', 'AKZ195']

+

+        elif CWA == 'AICE':

+            return 1

+

+    return 0

+

+# filter for fire weather zones.

+def firewxZoneFilter(atts):

+    if 'cwa' not in atts:

+        return 0

+    

+    # this CWA (all but AFC site)

+    if atts['cwa'] == CWA:

+        return 1

+

+    # AFC data - separate out AER/ALU data

+    elif atts['cwa'] == 'AFC':

+        id = fwxzones(atts)

+        if CWA == 'AER':

+            return id in ['AKZ101', 'AKZ111', 'AKZ121', 'AKZ125', 'AKZ131',

+              'AKZ135', 'AKZ141', 'AKZ145', 'AKZ171']

+

+        elif CWA == 'ALU':

+            return id in ['AKZ151', 'AKZ155', 'AKZ161', 'AKZ181',

+              'AKZ185', 'AKZ187', 'AKZ191', 'AKZ195']

+

+        elif CWA == 'AICE':

+            return 1

+

+    return 0

+

+# filter for marine zones.

+def marineZoneFilter(atts):

+    if 'wfo' not in atts:

+        return 0

+    

+    # this CWA (all but AFC site)

+    if atts['wfo'] == CWA:

+        return 1

+

+    # AFC data - separate out AER/ALU data

+    elif atts['wfo'] == 'AFC':

+        id = marineZ(atts)

+        if CWA == 'AER':

+            return id in ['PKZ120', 'PKZ121', 'PKZ125', 'PKZ126', 'PKZ127',

+              'PKZ128', 'PKZ129', 'PKZ130', 'PKZ132', 'PKZ136', 'PKZ137',

+              'PKZ138', 'PKZ140', 'PKZ141']

+        elif CWA == 'ALU':

+            return id in  ['PKZ150', 'PKZ155', 'PKZ160', 'PKZ165', 'PKZ170',

+              'PKZ171', 'PKZ172', 'PKZ175', 'PKZ176', 'PKZ179', 'PKZ180',

+              'PKZ185']

+        elif CWA == 'AICE':

+            return 1

+

+    return 0

+

+

+# filter for offshore marine zones.

+def offshoreZoneFilter(atts):

+    if 'wfo' not in atts:

+        return 0

+    

+    # this CWA (all but AFC site)

+    if atts['wfo'] == CWA:

+        return 1

+

+    # AFC data - separate out AER/ALU data

+    elif atts['wfo'] == 'AFC':

+        id = offshoreZ(atts)

+        if CWA == 'AER':

+            return id in ['PKZ350']

+        elif CWA == 'ALU':

+            return id in  ['PKZ410']

+        elif CWA == 'AICE':

+            return 1

+

+    return 0

+

+

+#---------------------------------------------------------------------

+# Map Background Definitions

+#---------------------------------------------------------------------

+

+# CWA Counties

+CWAcounties = ShapeTable('county')

+CWAcounties.filter(lambda x : x['cwa'][0:3] == CWA or x['cwa'][3:6] == CWA)

+CWAcounties.name = 'Counties_' + CWA

+CWAcounties.editAreaName = ['state','countyname']

+CWAcounties.groupName = 'Counties'

+

+# FIPS for my counties - only include first WFO indicated in CWA field

+FIPS = ShapeTable('county')

+FIPS.name = 'FIPS_' + CWA

+FIPS.filter(lambda x : x['cwa'][0:3] == CWA)

+FIPS.editAreaName = fips

+FIPS.groupName = 'FIPS_' + CWA

+

+# Unfiltered Counties

+Counties = ShapeTable('county')

+Counties.name = 'Counties'

+Counties.editAreaName = fips

+Counties.groupName = 'FIPS'

+

+# CWA Zones

+CWAzones = ShapeTable('zone')

+CWAzones.filter(publicZoneFilter)

+CWAzones.name = 'Zones_' + CWA

+CWAzones.editAreaName = cwazones

+CWAzones.groupName = 'Zones_' + CWA

+

+# Unfiltered Zones

+Zones = ShapeTable('zone')

+Zones.name = 'Zones'

+Zones.editAreaName = cwazones

+Zones.groupName = 'Zones'

+

+# Fire Wx Zones

+FWCWAzones = ShapeTable('firewxzones')

+FWCWAzones.filter(firewxZoneFilter)

+FWCWAzones.name = 'FireWxZones_' + CWA

+FWCWAzones.editAreaName = fwxzones

+FWCWAzones.groupName = 'FireWxZones_' + CWA

+

+# Unfiltered Fire Wx Zones

+FWZones = ShapeTable('firewxzones')

+FWZones.name = 'FireWxZones'

+FWZones.editAreaName = fwxzones

+FWZones.groupName = 'FireWxZones'

+

+# CWAs for all

+cwas = ShapeTable('cwa')

+cwas.name = 'CWA_all'

+cwas.editAreaName = 'wfo'

+cwas.groupName = 'WFOs'

+

+# ISC areas for all

+isc = ShapeTable('isc')

+isc.name = 'ISC_all'

+isc.editAreaName = ['ISC','wfo']

+isc.groupName = 'ISC'

+

+# Fire Wx AOR for all

+fwaor = ShapeTable('firewxaor')

+fwaor.name = 'FireWxAOR'

+fwaor.editAreaName = ['FireWxAOR', 'cwa']

+fwaor.groupName = 'FireWxAOR'

+

+# Marine Zones for CWA

+CWAmzones = ShapeTable('marinezones')

+CWAmzones.filter(marineZoneFilter)

+CWAmzones.name = 'Marine_Zones_' + CWA

+CWAmzones.editAreaName = marineZ

+CWAmzones.groupName = 'MZones_' + CWA

+

+# Marine Zones (unfiltered)

+Mzones = ShapeTable('marinezones')

+Mzones.name = "Marine_Zones"

+Mzones.editAreaName = marineZ

+Mzones.groupName = 'MZones'

+

+# States (unfiltered)

+States = ShapeTable('states')

+States.name = "States"

+States.editAreaName = 'name'

+States.groupName = 'States'

+

+# RFC maps

+rfc = ShapeTable('rfc')

+rfc.name = "RFC"

+rfc.editAreaName = ['ISC','site_id']

+rfc.groupName = 'ISC'

+

+#  NHA ISC area

+#domain = ShapeTable('nhadomain')

+#domain.name = "TropicalISC"

+#domain.groupName = "ISC"

+#domain.editAreaName = "ISC_NHA"

+

+#    Storm Surge Watch/Warning Area

+# stormsurgeww = ShapeTable('stormsurgeww')

+# stormsurgeww.name = "StormSurgeWW"

+# stormsurgeww.groupName = "SurgeCollab"

+# stormsurgeww.editAreaName = "StormSurgeWW_EditArea"

+

+# Offshore Marine Zones - unfiltered

+offshore = ShapeTable('offshore')

+offshore.name = "Offshore_Marine_Zones"

+offshore.editAreaName = offshoreZ

+offshore.groupName = 'OffShoreMZones'

+

+# Offshore Marine Zones - filtered by CWA

+offshoreCWA = ShapeTable('offshore')

+offshoreCWA.filter(offshoreZoneFilter)

+offshoreCWA.name = "Offshore_Marine_Zones_" + CWA

+offshoreCWA.editAreaName = offshoreZ

+offshoreCWA.groupName = 'OffShoreMZones_' + CWA

+

+# this is a complete listing of all maps

+maps = [ CWAcounties, FIPS, Counties, CWAzones, Zones, FWCWAzones, FWZones, cwas, isc, 

+         fwaor, CWAmzones, Mzones, States, rfc, offshore, offshoreCWA ]

+

+# import the local maps file

+if not BASELINE:

+    try:

+        from localMaps import *

+    except ImportError:

+        import LogStream

+        LogStream.logEvent("No localMaps file found, using baseline settings.");

+

+def getMaps():

+    from java.util import ArrayList

+    jmaps = ArrayList(len(maps))

+    for m in maps:

+        j = m.toJavaObj()

+        jmaps.add(j)

+        for k,v in globals().items():

+            if v is m:

+                j.setInstanceName(k)

+                break

+

+    return jmaps

+

+def runFilter(instance, info):

+    info = JUtil.javaObjToPyVal(info)

+    return bool(globals()[instance].doFilter(info))

+

+def runNamer(instance, info):

+    info = JUtil.javaObjToPyVal(info)

+    return str(globals()[instance].getEAName(info))  

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py
index 224856c2f0..6ef8c6d45a 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/config/serverConfig.py
@@ -1,3479 +1,3479 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-#
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-#
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-#
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# serverConfig -- base GFE server configuration file
-#
-# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED.  INSTEAD REFER TO THE
-# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE.
-#
-# Baseline GFE server configuration
-#
-# ----------------------------------------------------------------------------
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    08/09/2013          #1571     randerso       Changed projections to use the Java
-#                                                 ProjectionType enumeration
-#    10/03/2013          #2418     dgilling       Update for new pSurge 2.0 data.
-#    10/03/2013          #2424     randerso       Change localTC to use dateutil instead of pytz
-#                                                 to get correct offsets for Alaska
-#    01/17/2014          #2719     randerso       Added NHA domain
-#    02/20/2014          #2824     randerso       Added log message when local override files are not found
-#    03/11/2014          #2897     dgilling       Add new MHWM databases to default configuration.
-#    03/20/2014          #2418     dgilling       Remove unneeded D2D source PHISH.
-#    04/17/2014          #2934     dgilling       Remove alias for TPCSurgeProb D2D database.
-#    05/09/2014          #3148     randerso       Add tpHPCndfd to D2DAccumulativeElements for HPCERP
-#    06/20/2014          #3230     rferrel        Added URMA25.
-#    05/29/2014          #3224     randerso       Added "SPC":8 to D2DDBVERSIONS
-#    07/09/2014          #3146     randerso       Removed unused import
-#    07/10/2014                    swhite         Add surge and tropical threat WEs and their dependencies
-#    01/08/2015          #15035    lshi           add site WNJ
-#    12/03/2014          #3866     rferrel        Added GFS20
-#    01/13/2015          #3955     randerso       Added definitions for NewTerrain database
-#                                                 Added Topo to ISCPARMS
-#    01/19/2015          #4014     dgilling       Added ETSS.
-#    02/11/2015          #4053     rferrel        Added GLWN and moved GLERL to display only for Great Lakes sites..
-#    01/19/2015          #4014     dgilling       Added ETSS.
-#    02/24/2015          #16692    byin           Added RTMA. Removed gfsLR and WaveWatch
-#    03/19/2015          #4300     randerso       Remove GUMa as it is obsolete (per Shannon White)
-#    03/30/2015          #17288    bhunder        Added Guam-RTMA to D2D models
-#    03/30/2015          #17206    yteng          Changed some parameters that are not rate parameters
-#    03/31/2015          #17288    bhunder        Added Weather Params for RTMA
-#    04/03/2015          #4367     dgilling       Change WindGust's time constraints back to TC1
-#                                                 for Fcst/Official.
-#    04/08/2015          #4383     dgilling       Define FireWX ISC configuration parameters.
-#    04/15/2015          #17383    yteng          Change localTC to fix error that time constraints
-#                                                 being off
-#    Apr 25, 2015         4952     njensen        Updated for new JEP API
-#    04/20/2015          #4414     dgilling       Add missing NWPSTrkngCG0 weather elements.
-#    05/12/2015          #17144    bhunder        Added RTMA model
-#    05/29/2015          17496     ryu            Changed parm definitions for Wave1-10 and Period1-10.
-#
-#    05/29/2015          #17144    bhunder        Added weather Params for URMA25 and OCONUS RTMA
-#    09/02/2015          #4819     rferrel        Added HWRF.
-#    09/09/2015          16287     amoore         Additional validation of user input
-#    10/07/2015          #4958     dgilling       Added support for NationalBlend D2D data.
-#    10/13/2015          #4961     randerso       Updated NewTerrain/BaseTerrain database definitions
-#    10/30/2015          #17940    jendrowski     Responded to Code Review.  Mostly syntactical changes.
-#    11/05/2015          #18182    ryu            Change D2DDBVERSIONS value for HPCERP to 24
-#    12/22/2015          #14152    jwatson        Added Sky, Wind to GFSLAMPGrid parms
-#    1/28/2016           #13910    amoore         Wave model data should be available in 3-hrly timesteps
-#    02/09/2016          #5283     nabowle        Remove NGM support.
-#    02/22/2016          #18161    wkwock         Add NationalBlend model for AK, PR, HW
-#    02/23/2016          #14845    jwatson        Changed NamDNG5 to NamDNG for all sources and params.
-#                                                 Changed D2DModels for CONUS and Alaska to
-#                                                 namdng25 and AK-NamDNG3
-#    04/01/2016          18777     ryu            Replace NCF ip addresses.
-#    04/22/2016          #18896    wkwock         Add more nationalBlend Model
-#    06/01/2016                    JCM            removed tc3ng from officialdbs for wave/period elements;
-#                                                 removed Wave_XX and Period_XX; removed Wave10, Period10;
-#                                                 added databases for all sites to baseline
-#    08/08/2016          #5747     randerso       Support removal of wrapper.py
-#    10/05/2016          19293     randerso       Fixed units on Tropical and a few other weather elements
-#    12/12/2016          #19596    bhunder        Added "tp" to NationalBlend model D2DAccumulativeElements
-#    02/20/2017        DCS18966    mdavis/pjendr. NIC adjustment: name changes and removal of obsolete 
-#                                                 smart inits(DCS 19490). Fixed addOptionalParms.
-#    03/17/2017          19673     jmaloney       Added Rip Current Probabilities (RipProb).
-#    06/29/2017          6323      randerso       Added P-ETSS model
-#    07/19/2017        DCS19490    gpetrescu      Removed AKwave10, Wave10 and Period10.
-#    07/12/2017          6324      randerso       Added TPCWindProb_Prelim model
-#    07/12/2017          6253      randerso       Updated for Standard Terrain
-#    08/03/2017          #20054    bhunder        Added changes for ETSS model and for ETSS-HiRes model.
-#    10/03/2017        DR20432     arivera        Replace GFS40 with GFS in SnowRatioGFS and remove
-#                                                 GLOBHwave from SJU model databases.
-#    11/28/2017          6539      randerso       Made P-ETSS and TPCSurgeProb elements D2DAccumulativeElements
-#    12/06/2017        DCS20267    psantos        Add NWPS Rip Current Guidance
-#    12/20/2017          20510     ryu            changes to StormTotalSnow parameter
-#    02/23/2018          #20395    wkwock         Added NBM3.1 elements.
-#    04/03/2018        DR20656     arivera        Missing comma: "Dune Erosion Probability" in optionalParmsDict['marine']
-#    05/09/2018        DR20715     arivera        Missing comma: groups['marineSites'] after 'AVAK'
-#    06/18/2018          16729     ryu            Remove tpHPC element from RFCQPF model and the smart init for the model.
-#
-####################################################################################################
-
-##
-# This is an incremental override file, indicating that the files at different
-# localization levels will be combined. Incremental overrides are achieved by
-# creating a localConfig file at a higher priority localization level that
-# imports this base file.
-#
-# See the Configuration Guides->Server Configuration->Syntax for localConfig.py
-# section of the GFE Online Help for more information.
-##
-
-
-
-#----------------------------------------------------------------------------
-# USEFUL DEFINES
-#----------------------------------------------------------------------------
-
-import siteConfig,imp
-import pprint
-import re
-import sys
-import LogStream
-from collections import defaultdict
-BASELINE = getattr(siteConfig, 'BASELINE', 0)
-
-#D scfp=open('/localapps/logs/scdebug.log','w')
-class dbConfig(object):
-    """Class to create GFE databases from modelDict"""
-    def __init__(self,modelDict):
-        self.modelDict=modelDict
-        self.dbs=[]
-        self.D2DMODELS=[]
-        self.D2DDBVERSIONS={}
-        self.D2DAccumulativeElements={}
-        self.INITMODULES={}
-        self.INITSKIPS={}
-
-    def addConfiguredModels(self,ignoreList=[]):
-        """Setup model databases defined in dbConfigDict.
-        ignoreList can be used to filter out specific models
-        """
-        for m in self.modelDict:
-            if m in ignoreList:
-                continue
-            # Don't allow BC model if regular is in ignore list
-            if m[-2:] == 'BC' and m[:-2] in ignoreList:
-                continue
-            self.addGfeDB(m,self.modelDict[m])
-        return
-    def addGfeDB(self,modelname,dbConfigDict):
-        """Does all the work needed for adding a model to GFE from entries
-        in dbConfigDict. This populates dbs and sets various self
-        variables.
-        """
-        if "DB" in dbConfigDict and "Parms" in dbConfigDict:
-            self.dbs.append((dbConfigDict["DB"],dbConfigDict["Parms"]))
-        if "D2DAccumulativeElements" in dbConfigDict:
-            self.D2DAccumulativeElements[modelname]=dbConfigDict["D2DAccumulativeElements"]
-        if "D2DDBVERSIONS" in dbConfigDict:
-            self.D2DDBVERSIONS[modelname]=dbConfigDict["D2DDBVERSIONS"]
-        if "D2DMODELS" in dbConfigDict:
-            self.D2DMODELS.append((dbConfigDict["D2DMODELS"],modelname))
-        if "INITMODULES" in dbConfigDict:
-            if type(dbConfigDict["INITMODULES"]) is tuple:
-                self.INITMODULES[dbConfigDict["INITMODULES"][0]] = dbConfigDict["INITMODULES"][1]
-            else:
-                self.INITMODULES[dbConfigDict["INITMODULES"]]=[modelname]
-        if "INITSKIPS" in dbConfigDict:
-            self.INITSKIPS[modelname]=dbConfigDict["INITSKIPS"]
-
-#===============================================================================
-#          Utility methods to manage GFE configuration
-#===============================================================================
-def mergeModelDicts(baseDict,addDict):
-    """Combine serverConfig model dict and regional modelDict into one modelDict.
-    Settings in baseDict are maintained unless overridden in addDict. The merging
-    is done on a key by key basis of a specific model's dictionary (baseDict and
-    addDict are dictionaries of dictionaries)
-    This changes baseDict in place so the object passed in as baseDict is modified
-    in the caller's scope.
-    """
-    for m,v in addDict.iteritems():
-        if m not in baseDict:
-            baseDict[m]=v
-        else:
-            for key,val in v.iteritems():
-                baseDict[m][key]=val
-
-def updateModelDict(modelDict,model,key,value):
-    """Udates a specific entry for a model in modelDict.  model and key are dictionary
-    keys into modelDict and modelDict[model] respectively. If model is not defined
-    in modelDict, then a new entry is created. Otherwise, value replaces any existing
-    value in modelDict[model][key].
-    This changes modelDict in place so the object passed in as modelDict is modified
-    in the caller's scope.
-    """
-    if model in modelDict:
-        modelDict[model][key]=value
-    else:
-        modelDict[model]= {key : value}
-
-def alterModelDef(dbTuple, name=None, format=None, dbType=None, single=None,
-                  official=None, numver=None, purgeAge=None):
-    """Alter GFE database definition. The definition is used in the dbs setting
-    and has form:
-                     (name,  format,  type, single, official, numVer, purgeAge)
-    i.e., Practice = ("Fcst",  GRID, "Prac",   YES,       NO,      1,   24)
-
-    Won't use these exact names since some might conflict with builtins
-    Only supply what you want to change. To clone a model definition, just
-    supply name='newname'
-    """
-    n,f,t,s,o,v,p=dbTuple
-    l=[]
-    for old,new in [(n,name),(f,format),(t,dbType),(s,single),(o,official),
-                    (v,numver),(p,purgeAge)]:
-        if new is None:
-            l.append(old)
-        else:
-            l.append(new)
-    return tuple(l)
-
-def createModelDict(localsDict,dbs,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements,
-                  INITMODULES,INITSKIPS):
-    """Convert serverConfig model configuration to a dictionary. This allows
-    legacy serverConfig settings in dbs,D2DMODELS,INITMODULES, etc. to be
-    maintained and then converted into a single dictionary where all settings
-    for a model are together.
-
-    WARNING: There can only be one version of a model in the dbs list. Fcst
-    practice and test databases have to be handled separately.  This is ok
-    because these databases are defined after any localConfig customizations
-    of the normal Fcst database.
-
-    modelDict contains the following keys. Only define what is needed, i.e.,
-    it is not required to have every key defined
-    "DB": Definition of the database, i.e., the first value in a dbs entry:
-          ("wrfems", GRID, "", NO,  NO,  3, 0). This must be a tuple. The name
-          in the DB entry must be the same as the model name used as the key
-          into the modelDict variable.
-
-    "Parms" : Definition of the weather element parameters in the database,
-          i.e., the second part of the dbs entry. This is a list of tuples.
-
-    "D2DMODELS" : D2D metadata database name for the source model.
-
-    "INITMODULES': Name of the SmartInit module. This should be just the module
-          name as a string, not a list.
-
-    "D2DAccumulativeElements" : List of parms that are accumulative
-
-    "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather
-          Element Browser. Defaults to 2 if not supplied.
-
-    "INITSKIPS" : Used to skip model cycles.
-
-    Example for a model:
-
-    modelDict["CMCreg"]={
-         "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0),
-         "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF,
-                     PoP, SnowAmt, SnowRatio], TC3),
-                   ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG),
-                   ([QPF12, PoP12],TC12NG),
-                   ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-                   ([MaxT], MaxTTC), ([MinT], MinTTC),
-                  ],
-         "D2DMODELS": "Canadian-Reg",
-         "INITMODULES": "Local_CMCreg",
-         "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"],
-         "D2DDBVERSIONS": 3,
-    }
-    """
-    # Create self initializing dictionary via collections.defaultdict
-    modelDict=defaultdict(dict)
-    parmsDict={}
-    tcDict={}
-
-    for n,v in sorted(localsDict.items()):
-        if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]:
-            parmsDict[n]=v
-        elif type(v) is tuple and len(v)==3 and type(v[0]) is int:
-            tcDict[n]=v
-
-    # Process dbs entries, i.e., model database definition
-    tcDict={}
-    for item in sorted(dbs):
-        plist=[]
-        parmTmpDict={}
-        pDict={}
-        for pt in item[1]:
-            parmsList=[]
-            # Try to find named parm setting
-            for p in pt[0]:
-                pname=p[0]
-                pDict[pname]=p
-                parmsList.append(pname)
-
-            # Try to get a named time constraint
-            name=next((name for name,v in tcDict.iteritems() if v == pt[1]), None)
-            if name is None:
-                name = `pt[1]`
-            tcDict[name]=pt[1]
-            if name in parmTmpDict:
-                parmTmpDict[name]+=parmsList
-            else:
-                parmTmpDict[name]=parmsList
-
-        # This consolidates parms by time constraint and sorts parm names.
-        for tc in sorted(parmTmpDict.keys()):
-            theParms=[]
-            for p in sorted(parmTmpDict[tc]):
-               theParms.append(pDict[p])
-            plist.append((theParms, tcDict[tc]))
-
-        modelDict[item[0][0]]={'DB':item[0],'Parms':plist}
-
-    for si,ml in INITMODULES.items():
-        m=ml[0]
-        modelDict[m]['INITMODULES']=si
-    for m,v in D2DDBVERSIONS.items():
-        modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m]
-
-    for m,v in D2DAccumulativeElements.items():
-        modelDict[m]['D2DAccumulativeElements']=v
-    for m,v in INITSKIPS.items():
-        modelDict[m]['INITSKIPS']=v
-    for item in D2DMODELS:
-        if type(item) is str:
-           m=item
-           v=item
-        else:
-           v,m=item
-        if m in modelDict:
-            modelDict[m]['D2DMODELS']=v
-        else:
-            modelDict[m]={'D2DMODELS':v}
-    return modelDict
-
-def changeParm(modelDict,pname,value,modelList=['Fcst']):
-    """Alter a parm that is defined in modelDict Parm setting.
-
-    pname: name of parm. This is a string not the parm definition
-    value: the parm definition tuple. If the None object, then the parm
-        will be deleted.
-    modelList: List of model names to check. An empty list will check all
-        models in modelDict.
-    Return: Nothing. modelDict is altered in place.
-    """
-    if not modelList:
-        modelList=modelDict.keys()
-    for m in modelList:
-        if m not in modelDict or 'Parms' not in modelDict[m] or \
-                 not checkForParm(modelDict[m]['Parms'],pname):
-            continue
-
-        newpt=[]
-        # parms is tuple (parmList,TC)
-        for pList,tc in modelDict[m]['Parms']:
-            # This makes a copy of the list of parms, not a reference
-            # this is needed because we are changing the list in place.
-            theParms= list(pList)
-            match=False
-            for matchParm in (p for p in theParms if p[0] == pname):
-                match=True
-                theParms.remove(matchParm)
-            if match and value is not None:
-                theParms.append(value)
-            if theParms:
-                newpt.append((theParms,tc))
-        if newpt != modelDict[m]['Parms']:
-            modelDict[m]['Parms'] = newpt
-
-def changeParmTC(modelDict,pname,newTC,modelList=['Fcst']):
-    """Alter a parm in that is defined in modelDict Parm setting.
-
-    pname: name of parm. This is a string not the parm definition
-    newTC: the new Time Contraint (tuple)
-    modelList: List of model names to check. An empty list will check all
-        models in modelDict.
-    Return: Nothing. modelDict is altered in place.
-    """
-    if not modelList:
-        modelList=modelDict.keys()
-    for m in sorted(modelList):
-        if m not in modelDict or 'Parms' not in modelDict[m]:
-            continue
-#d        print m,"checkForParm=",checkForParm(modelDict[m]['Parms'],pname)
-        if not checkForParm(modelDict[m]['Parms'],pname):
-            continue
-
-        newpt=[]
-        # Parms is tuple (parmList,TC)
-        for pList,tc in modelDict[m]['Parms']:
-            # This makes a copy of the list of parms, not a reference
-            # this is needed because we are changing the list in place.
-            theParms= list(pList)
-            matchParm=next((p for p in theParms if p[0] == pname),None)
-#d            print m,matchParm,tc,newTC,len(theParms)
-            if matchParm:
-                theParms.remove(matchParm)
-                newpt.append(([matchParm],newTC))
-#d                print "Added",matchParm,newTC
-            if theParms:
-#d                print "restored",theParms," to",tc
-                newpt.append((theParms,tc))
-        if newpt != modelDict[m]['Parms']:
-#d            print 'Updated model',m
-            modelDict[m]['Parms'] = newpt
-#d            print modelDict[m]['Parms'],'\n'
-
-def checkForParm(parmDef,pname):
-    """Check a model parm definition if a parm named pname is in it.
-
-    parmDef: list of tuples, each tuple is a list of parms and a time
-        contraint. Call with modelDict[modelname]['Parms].
-    pname: Name of parm (string).
-    Returns: Boolean True if found, or False
-    """
-    for item in parmDef:
-        t=next((pt for pt in item[0] if pt[0] == pname),None)
-        if t is not None:
-            return True
-    return False
-
-def getParmNames(parmsDef):
-    """Return a list of parm names in a model parm definition
-
-    parmsDef: list of tuples, each tuple is a list of parms and a time
-        constraint. Call with modelDict[modelname]['Parms].
-    Returns: List of string parameter names
-
-    Here's an example of how to remove unused parms from Fcst, this can
-    run in localConfig:
-
-    parmsToRemove=[]
-    for p in getParmNames(modelDict['Fcst']):
-        pl=p.lower()
-        for t in ['period','swell','wave','surf', 'surge']:
-            if t in pl:
-                parmsToRemove.append(p)
-                break
-    removeParms(modelDict,'Fcst',parmsToRemove)
-    """
-    result=[]
-    for pList,tc in parmsDef:
-        # p is the parmDef tuple where first item is the parm name
-        newParms=[p[0] for p in pList]
-        result+=newParms
-    return sorted(result)
-
-def printServerConfig(moduleObj,localsDict, logFile="/awips2/edex/logs/localConfig.log"):
-    """Dump out ServerConfig final settings. localsDict is a dictionary of
-    local variables in localConfig, normally locals().
-    """
-    # serverConfig log text
-    scText=""
-    try:
-        with open(logFile,"w") as fp:
-            # Print out dbs entries, i.e., model database definition
-            fp.write("Configuration for %s\n" % localsDict['SID'])
-            dbs=DATABASES
-            for item in sorted(dbs):
-                scText += "\ndbs[%s]: %s\n" % (item[0][0], str(item[0]))
-                scText += _dumpParms(item[1])
-
-            # Dump out serverConfig settings likely to be modified by localConfig
-            scvars=["D2DMODELS", "INITMODULES",
-                    "D2DDBVERSIONS", "D2DAccumulativeElements",
-                    "REQUEST_ISC", "SEND_ISC_ON_SAVE",
-                    "SEND_ISC_ON_PUBLISH", "REQUESTED_ISC_PARMS",
-                    "ExtraWEPrecision", "INITSKIPS",
-                    "HazardKeys",
-                    "MAX_USER_BACKGROUND_PROCESSES",
-                    "AdditionalISCRouting",
-                    "ignoreDatabases",
-                   ]
-
-            for item in scvars:
-                scText += "\n%s:\n" % item
-                obj=getattr(moduleObj,item,None)
-                if type(obj) is list:
-                    obj.sort()
-                scText += pprint.pformat(obj) +'\n'
-
-            # This prints out all variables named parms*, i.e., parmsNAM12
-            for k in sorted(localsDict.keys()):
-                if k == "OFFICIALDBS" or re.match("parms[A-Z]+",k) is not None or \
-                         k == "extraISCparms":
-                    scText += "\n%s:\n" % k
-                    scText += _dumpParms(localsDict[k])
-            scText += printModelDict(localsDict)
-            fp.write(scText)
-    except IOError as e:
-        LogStream.logProblem("printServerConfig open file problem "+logFile+" - log not created\n" +LogStream.exc(e))
-
-def printModelDict(localsDict):
-    """Convert serverConfig model configuration to a dictionary. This writes
-    the dictionary as text. This does not create a usable modelDict, just one to
-    use to print out the dictionary as python code."""
-
-    modelDict={}
-    parmsDict={}
-    tcDict={}
-    dbs=DATABASES
-    scText=""
-    for n,v in localsDict.items():
-        if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]:
-            parmsDict[n]=v
-        elif type(v) is tuple and len(v)==3 and type(v[0]) is int:
-            tcDict[n]=v
-
-    scText += '\n'
-    for n in sorted(parmsDict):
-        scText += 'parmVar: %s = %s\n' % (n,`parmsDict[n]`)
-    scText += '\n'
-    for n in sorted(tcDict):
-        scText += 'TC: %s = %s\n' % (n,`tcDict[n]`)
-    scText += '\n'
-
-    # Print out dbs entries, i.e., model database definition
-    for item in sorted(dbs):
-        plist=[]
-        parmTmpDict={}
-        for pt in item[1]:
-            parmsList=[]
-            # Try to find named parm setting
-            for p in pt[0]:
-                name=next((name for name,v in parmsDict.iteritems() if v == p), None)
-                if name is not None:
-                    parmsList.append(name)
-                else:
-                    parmsList.append(p[0])
-            theParms='&nlq(['+', '.join(parmsList)+'], '
-            # Try to get a named time constraint
-            name=next((name for name,v in tcDict.iteritems() if v == pt[1]), None)
-            if name is None:
-                name = `pt[1]`
-            if name in parmTmpDict:
-                parmTmpDict[name]+=parmsList
-            else:
-                parmTmpDict[name]=parmsList
-        # This consolidates parms by time constraint and sorts parm names.
-        for tc in sorted(parmTmpDict.keys()):
-            parmTmpDict[tc]=sorted(parmTmpDict[tc])
-            theParms='&nlq(['+', '.join(parmTmpDict[tc])+'], '
-            plist.append(theParms + tc +')&nrq')
-
-        modelDict[item[0][0]]={'DB':item[0],'Parms':plist}
-    for si,ml in INITMODULES.items():
-        m=ml[0]
-        entry=si
-        if len(ml) > 1:
-            # Multiple d2d models for smartinit
-            # Try to get model from si name
-            if si.find('Local_') == 0:
-                m=si[6:]
-            entry=(si,ml)
-        if m in modelDict:
-            # If a model has multiple SmartInit modules, try to best match which
-            # Smartinit module to assign to the model.
-            if 'INITMODULES' not in modelDict[m] or m in si:
-                modelDict[m]['INITMODULES']=entry
-        else:
-            modelDict[m]={'INITMODULES':entry}
-
-    for m,v in D2DDBVERSIONS.items():
-        if m in modelDict:
-            modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m]
-        else:
-            modelDict[m]={'D2DDBVERSIONS':D2DDBVERSIONS[m]}
-
-    for m,v in D2DAccumulativeElements.items():
-        if m in modelDict:
-            modelDict[m]['D2DAccumulativeElements']=v
-        else:
-            modelDict[m]={'D2DAccumulativeElements':v}
-    for m,v in INITSKIPS.items():
-        if m in modelDict:
-            modelDict[m]['INITSKIPS']=v
-        else:
-            modelDict[m]={'INITSKIPS':v}
-    for item in D2DMODELS:
-        if type(item) is str:
-           m=item
-           v=item
-        else:
-           v,m=item
-        if m in modelDict:
-            modelDict[m]['D2DMODELS']=v
-        else:
-            modelDict[m]={'D2DMODELS':v}
-
-    for m in sorted(modelDict):
-        text=pprint.pformat(modelDict[m],width=80,indent=0)
-        text=text.replace("'&nlq",'')
-        text=text.replace("&nrq'",'')
-        text=text.replace('"&nlq','')
-        text=text.replace('&nrq"','')
-        text=text.replace(", 'INITMODULES':",",\n'INITMODULES':")
-        text=text.replace(')]}','),\n         ]\n}')
-        text=text.replace('\n','\n            ')
-        scText += "modelDict['%s'] = {\n            %s\n\n" % (m,text[1:])
-    return scText
-
-def _dumpParms(parms):
-    """Pretty prints parms."""
-    pDict={}
-    result=""
-    for item in parms:
-        if type(item) is not tuple:
-            # Not a parm definition!
-            return
-        pList,tc = item
-        for p in pList:
-            pDict[p[0]]=(p,tc)
-    for k in sorted(pDict.keys()):
-        result += "    %s\n" % repr(pDict[k])
-    return result
-
-def addOptionalParms(defaultTC,tcParmDict,parmDict,modelDict):
-    """Adds parms from optionalParmsDict to the Fcst database.
-    This is a convience function if most parms use the default time constraint.
-    Otherwise, its just as easy to hard code what needs to be added for a
-    optionalParmsDict entry.
-
-    defaultTC: Default time constraint to if a parameter specific TC is not
-               defined in tcParmDict.
-    tcParmDict: Dictionary with keys of time constraints. Value is a list of
-                parameter names to be added with that time constraint. Empty
-                dictionary ok if everything should use the default. Example:
-                tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]}
-    parmDict: Parameter dictionary with keys of parameter name and value is
-              the parameter definition tuple. Keys must match keys in tcParmDict.
-    modelDict: The serverConfig modelDict dictionary. Must already have Fcst
-               defined. Changed in place.
-    Returns: The parameter definition added to Fcst
-    """
-
-    tcParms={defaultTC:[]}
-    for tc in tcParmDict:
-        tcParms[tc]=[]
-    if len(tcParmDict) == 0:
-        tcParmDict['dummyTC']=['dummyParm']
-    for pname,value in parmDict.iteritems():
-        # Find the time constrait to use for this parm
-        theTC=defaultTC
-        for tc in tcParmDict:
-            if pname in tcParmDict[tc]:
-                theTC=tc
-                break
-        tcParms[theTC].append(value)
-
-    theParms=[]
-    for tc in tcParms:
-        theParms.append((tcParms[tc],tc))
-    modelDict['Fcst']['Parms'] += theParms
-    return theParms
-
-def addPowt(modelDict):
-    """This sets up PoWT parameters for in Fcst database.
-    """
-    defaultTC=TC1
-    # Use value of time constraint and string name of parm in tcParmDict
-    tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]
-               }
-    return addOptionalParms(defaultTC,tcParmDict,
-                            optionalParmsDict['powt'],modelDict)
-
-def addWinterWeatherProbs(modelDict):
-    """This sets up ER Winter Weather Probability parameters in the Fcst database.
-    """
-    defaultTC=TC1
-    # Use value of time constraint and string name of parm in tcParmDict
-    tcParmDict={}
-    return addOptionalParms(defaultTC,tcParmDict,
-                            optionalParmsDict['winterProbs'],modelDict)
-
-def addRainfallProbs(modelDict):
-    """This sets up WPC rainfall probability parameters in the Fcst database.
-    """
-    defaultTC=TC1
-    # Use value of time constraint and string name of parm in tcParmDict
-    tcParmDict={}
-    return addOptionalParms(defaultTC,tcParmDict,
-                            optionalParmsDict['rainfallProb'],modelDict)
-
-# Local-time based time constraints.  Does not automatically account for
-# daylight savings time.  The dst flag is 0 for standard time and manually
-# set to 1 for daylight time (if desired).  The start is specified in
-# seconds local time, e.g., 6*HOUR would indicate 6am.
-def localTC(start,repeat,duration,dst):
-    timezone = SITES[GFESUITE_SITEID][3]
-    import dateutil.tz, datetime
-    tz = dateutil.tz.gettz(timezone)
-    local = datetime.datetime.now(tz)
-    delta = tz.utcoffset(local) - tz.dst(local)
-    offset = delta.days*86400 + delta.seconds
-    start = start - offset
-    if dst == 1:
-        start = start - 3600     #daylight savings flag
-    if start >= 3600 * 24:
-        start = start - 3600 * 24
-    elif start < 0:
-        start = start + 3600 * 24
-    return (start, repeat, duration)
-
-# imports the named module.  If the module
-# does not exist, it is just ignored.  But
-# if it exists and has an error, the exception
-# is thrown.  If the module was imported returns
-# true.
-def siteImport(modName):
-    try:
-        fp, path, des = imp.find_module(modName)
-        if fp:
-            fp.close()
-    except ImportError:
-        LogStream.logEvent("No " + modName + " file found, using baseline settings.");
-        return 0
-    globals()[modName] = __import__(modName)
-    return 1
-
-def doIt():
-    # Import the local site configuration file (if it exists)
-    import doConfig
-    import VTECPartners
-    (models, projections, vis, wx, desDef, allSites, domain, siteId, timeZone,officeTypes) = \
-      doConfig.parse(GFESUITE_SITEID, DATABASES, types, visibilities, SITES,
-      allProjections)
-    IFPConfigServer.models                  = models
-    IFPConfigServer.projectionData          = projections
-    IFPConfigServer.weatherVisibilities     = vis
-    IFPConfigServer.weatherTypes            = wx
-    IFPConfigServer.discreteDefinitions     = desDef
-    IFPConfigServer.allSites                = allSites
-    IFPConfigServer.officeTypes             = officeTypes
-    IFPConfigServer.siteID                  = siteId
-    IFPConfigServer.timeZone                = timeZone
-    IFPConfigServer.d2dModels               = doConfig.d2dParse(D2DMODELS)
-    IFPConfigServer.netCDFDirs              = doConfig.netcdfParse(NETCDFDIRS)
-    IFPConfigServer.satData                 = doConfig.parseSat(SATDATA)
-    IFPConfigServer.domain                  = domain
-
-    (serverHost, mhsid, \
-    rpcPort, \
-    initMethods, accumulativeD2DElements, \
-    initSkips, d2dVersions, \
-    logFilePurgeAfter, \
-    prdDir, baseDir, \
-    extraWEPrecision, \
-    tableFetchTime, \
-    autoConfigureNotifyTextProd, \
-    iscRoutingTableAddress, \
-    requestedISCsites, requestISC, \
-    sendiscOnSave, sendiscOnPublish, \
-    requestedISCparms, \
-    transmitScript) \
-       = doConfig.otherParse(SITES.keys(), \
-      GFESUITE_SERVER, GFESUITE_MHSID, \
-      GFESUITE_PORT, INITMODULES,
-      D2DAccumulativeElements,
-      INITSKIPS, D2DDBVERSIONS, LOG_FILE_PURGE_AFTER,
-      GFESUITE_PRDDIR, GFESUITE_HOME,
-      ExtraWEPrecision, VTECPartners.VTEC_REMOTE_TABLE_FETCH_TIME,
-      AUTO_CONFIGURE_NOTIFYTEXTPROD, ISC_ROUTING_TABLE_ADDRESS,
-      REQUESTED_ISC_SITES, REQUEST_ISC, SEND_ISC_ON_SAVE, SEND_ISC_ON_PUBLISH,
-      REQUESTED_ISC_PARMS, TRANSMIT_SCRIPT)
-    IFPConfigServer.serverHost = serverHost
-    IFPConfigServer.mhsid = mhsid
-    IFPConfigServer.rpcPort = rpcPort
-    IFPConfigServer.initMethods = initMethods
-    IFPConfigServer.accumulativeD2DElements = accumulativeD2DElements
-    IFPConfigServer.initSkips = initSkips
-    IFPConfigServer.d2dVersions =  d2dVersions
-    IFPConfigServer.logFilePurgeAfter = logFilePurgeAfter
-    IFPConfigServer.prdDir = prdDir
-    IFPConfigServer.baseDir = baseDir
-    IFPConfigServer.extraWEPrecision = extraWEPrecision
-    IFPConfigServer.tableFetchTime = tableFetchTime
-    IFPConfigServer.autoConfigureNotifyTextProd =  autoConfigureNotifyTextProd
-    IFPConfigServer.iscRoutingTableAddress = iscRoutingTableAddress
-    IFPConfigServer.requestedISCsites = requestedISCsites
-    IFPConfigServer.requestISC = requestISC
-    IFPConfigServer.sendiscOnSave = sendiscOnSave
-    IFPConfigServer.sendiscOnPublish = sendiscOnPublish
-    IFPConfigServer.requestedISCparms = requestedISCparms
-    IFPConfigServer.transmitScript = transmitScript
-    IFPConfigServer.iscRoutingConfig = doConfig.parseAdditionalISCRouting(AdditionalISCRouting)
-
-def getSimpleConfig():
-    return IFPConfigServer
-
-GFESUITE_SITEID = siteConfig.GFESUITE_SITEID
-GFESUITE_MHSID = siteConfig.GFESUITE_MHSID
-GFESUITE_SERVER =  siteConfig.GFESUITE_SERVER
-GFESUITE_HOME   = siteConfig.GFESUITE_HOME
-GFESUITE_PORT   = int(siteConfig.GFESUITE_PORT)
-#GFESUITE_DATDIR = siteConfig.GFESUITE_DATDIR
-GFESUITE_LOGDIR = siteConfig.GFESUITE_LOGDIR
-GFESUITE_PRDDIR = siteConfig.GFESUITE_PRDDIR
-#GFESUITE_SHPDIR = siteConfig.GFESUITE_SHPDIR
-#GFESUITE_TOPODIR = siteConfig.GFESUITE_TOPODIR
-#GFESUITE_VTECDIR = siteConfig.GFESUITE_VTECDIR
-
-SID = GFESUITE_SITEID
-
-# modelDict is a master configuration dictionary for all GFE databases
-# Create self initializing dictionary via collections.defaultdict
-modelDict=defaultdict(dict)
-
-# ignoreDatabases is used when executing the final configuration to ignore
-# certain models. The new paradigm with modelDict is to have one master
-# modelDict and ignore datasets for specific regions or groups. Sites can
-# add to or remove from ignoreDatabases in their localConfig.
-ignoreDatabases=[]
-
-# Groups are a way of setting up groups of parms for special or optionally used
-# methodology. For example, the Probability of Weather Type methodology.
-groups={}
-groups['ALASKA_SITES'] = ['AFG', 'AJK', 'ALU', 'AER', 'ACR', 'AFC', 'VRH', 'AAWU', 'AVAK']
-groups['GreatLake_SITES'] = ['LOT', 'MKX', 'GRB', 'DLH', 'MQT', 'APX', 'GRR', 'DTX',
-                             'IWX', 'CLE', 'BUF', 'PBZ', 'ILN', 'IND', 'ILX', 'MPX', 'FGF']
-groups['CONUS_EAST_SITES'] = ['ALY', 'AKQ', 'APX', 'BGM', 'BMX', 'BOX', 'BTV', 'BUF',
-                              'CAE', 'CAR', 'CHS', 'CLE', 'CTP', 'DTX', 'FFC', 'GRR',
-                              'GSP', 'GYX', 'ILM', 'ILN', 'IND', 'JAN', 'JAX', 'JKL',
-                              'LCH', 'LMK', 'LWX', 'MEG', 'MFL', 'MHX', 'MLB', 'MOB',
-                              'MQT', 'MRX', 'OKX', 'PAH', 'PBZ', 'PHI', 'RAH', 'RLX',
-                              'RNK', 'TAE', 'TBW', 'ALR', 'RHA', 'TAR', 'TIR']
-groups['RFC_SITES'] = ["ACR", "ALR", "FWR", "KRF", "MSR", "ORN", "PTR",
-                       "RHA", "RSA", "STR", "TAR", "TIR", "TUA"]
-
-siteRegion={}
-# need to account for RFCs?
-siteRegion['AR'] = groups['ALASKA_SITES']
-siteRegion['CR'] = ['ABR','APX','ARX','BIS','BOU','CYS','DDC','DLH','DMX','DTX',
-                    'DVN','EAX','FGF','FSD','GID','GJT','GLD','GRB','GRR','ICT',
-                    'ILX','IND','IWX','JKL','LBF','LMK','LOT','LSX','MKX','MPX',
-                    'MQT','OAX','PAH','PUB','RIW','SGF','TOP','UNR']
-siteRegion['ER'] = ['AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE',
-                    'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI',
-                    'RAH','RLX','RNK']
-siteRegion['PR'] = ['GUM','HFO','PBP','PPG']
-siteRegion['SR'] = ['ABQ','AMA','BMX','BRO','CRP','EPZ','EWX','FFC','FWD','HGX',
-                    'HUN','JAN','JAX','KEY','LCH','LIX','LUB','LZK','MAF','MEG',
-                    'MFL','MLB','MOB','MRX','OHX','OUN','SHV','SJT','SJU','TAE',
-                    'TBW','TSA']
-siteRegion['WR'] = ['BOI','BYZ','EKA','FGZ','GGW','HNX','LKN','LOX','MFR','MSO',
-                    'MTR','OTX','PDT','PIH','PQR','PSR','REV','SEW','SGX','SLC',
-                    'STO','TFX','TWC','VEF']
-
-groups['OCONUS_SITES'] = groups['ALASKA_SITES'] + siteRegion['PR'] + ['SJU']
-
-myRegion='ALL'
-for r in siteRegion:
-    if SID in siteRegion[r]:
-        myRegion=r
-        break
-
-groups['powt']=list(groups['OCONUS_SITES']+ siteRegion['CR'] + siteRegion['ER'] + siteRegion['SR'] + siteRegion['WR'])
-groups['marineSites']=[
-                       # CONUS WFOs
-                       "CAR","GYX","BOX","OKX","PHI","LWX","AKQ","MHX","ILM","CHS",
-                       "BRO","CRP","HGX","LCH","LIX","MOB","TAE","TBW","KEY","MFL",
-                       "MLB","JAX","SJU",
-                       "SEW","PQR","MFR","EKA","MTR","LOX","SGX",
-                       # AR sites
-                       'AFC', 'AFG', 'AJK', 'AER', 'ALU', 'VRH', 'AVAK',
-                       # OPC Atlantic and Pacific
-                       'ONA', 'ONP',
-                       # NHC/TAFB Pacific and Atlantic, Storm Surge
-                       'NH1', 'NH2', 'NHA',
-                       # HFO Marine, GUM
-                       'HFO', 'HPA', 'GUM',
-                      ]
-
-groups['winterProbs']= [
-            # ER sites
-            'AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE',
-            'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI',
-            'RAH','RLX','RNK',
-            #CR sites
-            'ABR','BIS','BOU','CYS','DDC','DMX','FGF','FSD','GLD','GRB',
-            'ICT','IND','IWX','JKL','LMK','LOT','MKX','MPX','MQT','OAX',
-            'PAH','PUB','SGF','GJT',
-            #SR sites
-            'FFC','LUB','MRX','OUN','TSA',
-            #WR sites
-            'FGZ','GGW','HNX','LKN','MFR','MSO','OTX','PDT','REV','SEW',
-            'SGX','SLC','STO'
-           ]
-
-groups['rainfallProbs'] = ["BOX"]
-
-#---------------------------------------------------------------------------
-#
-#  Weather Element configuration section.
-#
-#---------------------------------------------------------------------------
-
-SCALAR  = 'Scalar'
-VECTOR  = 'Vector'
-WEATHER = 'Weather'
-DISCRETE = 'Discrete'
-YES = 1
-NO = 0
-
-#SCALAR, VECTOR
-# name/type/units/description/max/min/precision/rateParm/
-#WEATHER
-# name/WEATHER/units/description/
-#DISCRETE
-# keyDef = [(keySym, keyDesc), (keySym, keyDesc)]
-# name/DISCRETE/units/description/overlapCapable/keyDef/
-
-# Standard Public Weather Elements
-SID = GFESUITE_SITEID
-
-maxTempVal=140.0
-minTempVal=-100.0
-maxTdVal=140.0
-minTdVal=-100.0
-maxQpfVal=10.0
-maxIceVal=5.0
-Temp =    ("T", SCALAR, "F", "Surface Temperature", maxTempVal, minTempVal, 0, NO)
-Td =      ("Td", SCALAR, "F", "Dewpoint", maxTdVal, minTdVal, 0, NO)
-MaxT =    ("MaxT", SCALAR, "F", "Maximum Temperature", maxTempVal, minTempVal, 0, NO)
-MinT =    ("MinT", SCALAR, "F", "Minimum Temperature", maxTempVal, minTempVal, 0, NO)
-HeatIndex = ("HeatIndex", SCALAR, "F", "Heat Index", maxTempVal, -80.0, 0, NO)
-WindChill = ("WindChill", SCALAR, "F", "Wind Chill", 120.0, -120.0, 0, NO)
-QPF =     ("QPF", SCALAR, "in", "QPF", maxQpfVal, 0.0, 2, YES)
-Wind =    ("Wind", VECTOR, "kts", "Surface Wind", 125.0, 0.0, 0, NO)
-WindGust = ("WindGust", SCALAR, "kts", "Wind Gust", 125.0, 0.0, 0, NO)
-# special for TPC hurricane winds
-HiWind =    ("Wind", VECTOR, "kts", "Surface Wind", 200.0, 0.0, 0, NO)
-Weather = ("Wx", WEATHER, "wx", "Weather")
-IceAcc = ("IceAccum", SCALAR, "in", "Ice Accumulation", maxIceVal, 0.0, 2, YES)
-StormTotalIce = ('StormTotalIce', SCALAR, 'in', 'Storm Total Ice', maxIceVal, 0.0, 2, YES)
-SnowAmt = ("SnowAmt", SCALAR, "in", "Snowfall amount", 20.0, 0.0, 1, YES)
-StormTotalSnow = ("StormTotalSnow", SCALAR, "in","Storm Total Snow", 180.0, 0.0, 1, NO)
-PoP     = ("PoP", SCALAR, "%", "Prob of Precip", 100.0, 0.0, 0, NO)
-PoP6    = ("PoP6", SCALAR, "%", "Prob of Precip (6hr)", 100.0, 0.0, 0, NO)
-PoP12   = ("PoP12", SCALAR, "%", "Prob of Precip (12hr)", 100.0, 0.0, 0, NO)
-TstmPrb3 = ("TstmPrb3", SCALAR, "%", "Prob of Tstorm (3hr)", 100.0, 0.0, 0, NO)
-TstmPrb6 = ("TstmPrb6", SCALAR, "%", "Prob of Tstorm (6hr)", 100.0, 0.0, 0, NO)
-TstmPrb12 = ("TstmPrb12", SCALAR, "%", "Prob of Tstorm (12hr)", 100.0, 0.0, 0, NO)
-Sky     = ("Sky", SCALAR, "%", "Sky Condition", 100.0, 0.0, 0, NO)
-FzLevel = ("FzLevel", SCALAR, "ft", "Freezing level", 30000.0, 0.0, 0, NO)
-SnowLevel = ("SnowLevel", SCALAR, "ft", "Snow Level", 18000.0, 0.0, 0, NO)
-RH      = ("RH", SCALAR, "%", "Relative Humidity", 100.0, 0.0, 0, NO)
-
-# DR20541 and 20482 - add collaborate PoP, SnowAmt, QPF and ndfd QPF tools
-PoP12hr = ("PoP12hr", SCALAR, "%", "12 hr Chance of Precip", 100.0, 0.0, 0, NO)
-QPF6hr = ("QPF6hr", SCALAR, "in", "6 hr Precipitation (in)", maxQpfVal, 0.0, 2, YES)
-SnowAmt6hr = ("SnowAmt6hr", SCALAR, "in", "6 hr Snowfall", 30.0, 0.0, 1, YES)
-
-# Cobb SnowTool included.
-SnowRatio = ('SnowRatio', SCALAR, 'none', 'Snow Ratio', 40.0, 0.0, 1, NO)
-#totalVV = ('totalVV', SCALAR, 'ubar/s', 'Total VV', 400.0, 0.0, 0, YES)
-cape = ("cape", SCALAR, "1unit", "CAPE", 8000.0, 0.0, 1, NO)
-ApparentT = ("ApparentT", SCALAR, "F", "Apparent Temperature", maxTempVal, -120.0, 0, NO)
-LkSfcT = ("LkSfcT", SCALAR, "C", "Lake Surface T", 40.0, -2.0, 1, NO)
-SnowMap = ("SnowMap", SCALAR, "in", "Snowfall Map", 20.0, 0.0, 1, NO)
-StormTotalQPF = ('StormTotalQPF', SCALAR, 'in', 'Storm Total QPF (in)', 36.0, 0.0, 2, NO)
-SeasonTotalSnow = ('SeasonTotalSnow', SCALAR, 'in', 'Season Total Snow (in)', 150.0, 0.0, 2, NO)
-
-# Fire Weather Weather Elements
-LAL = ("LAL", SCALAR, "cat", "Lightning Activity Level", 6.0, 1.0, 0, NO)
-CWR = ("CWR", SCALAR, "%", "Chance of Wetting Rain", 100.0, 0.0, 0, NO)
-Haines = ("Haines", SCALAR, "cat", "Haines Index", 6.0, 2.0, 0, NO)
-MixHgt = ("MixHgt", SCALAR, "ft", "Mixing Height", 20000.0, 0.0, 0, NO)
-Wind20ft =    ("Wind20ft", VECTOR, "kts", "20ft. Wind", 125.0, 0.0, 0, NO)
-FreeWind = ("FreeWind", VECTOR, "kts", "Free Air Wind", 125.0, 0.0, 0, NO)
-TransWind = ("TransWind", VECTOR, "kts", "Transport Wind", 125.0, 0.0, 0, NO)
-Stability = ("Stability",SCALAR,"cat","Stability", 6.0,1.0,0, NO)
-HrsOfSun = ("HrsOfSun",SCALAR,"hrs","Hours of Sun",24.0,0.0,1, NO)
-MarineLayer = ("MarineLayer",SCALAR,"ft","Depth of Marine Layer", 20000.0,0.0,0,NO)
-InvBurnOffTemp = ("InvBurnOffTemp",SCALAR,"F","Inversion Burn-off Temperature", 120.0,-30.0,0, NO)
-VentRate = ("VentRate", SCALAR, "kt*ft", "VentRate", 500000.0, 0.0, 0, NO)
-DSI = ("DSI", SCALAR, "index", "DSI", 6.0, 0.0, 0, NO)
-MaxRH      = ("MaxRH", SCALAR, "%", "Maximum Relative Humidity", 100.0, 0.0, 0, NO)
-MinRH      = ("MinRH", SCALAR, "%", "Minimum Relative Humidity", 100.0, 0.0, 0, NO)
-Wetflag = ("Wetflag", SCALAR, "yn", "1300LT WetFlag", 1.0, 0.0, 0, NO)
-Ttrend = ("Ttrend", SCALAR, "F", "24hr Temperature Trend", 50.0, -50.0, 0, NO)
-RHtrend = ("RHtrend", SCALAR, "F", "24hr Relative Humidity Trend", 100.0, -100.0, 0, NO)
-
-# HPC Delta weather elements
-DeltaMinT = ('DeltaMinT', SCALAR, 'F', 'DeltaMinT', 130.0, -80.0, 0, NO)
-DeltaMaxT = ('DeltaMaxT', SCALAR, 'F', 'DeltaMaxT', 130.0, -80.0, 0, NO)
-DeltaWind = ("DeltaWind", VECTOR, "kts", "Surface Delta Wind", 125.0, 0.0, 0, NO)
-DeltaSky = ("DeltaSky", SCALAR, "%", "Delta Sky Condition", 100.0, -100.0, 0, NO)
-DeltaPoP = ("DeltaPoP", SCALAR, "%", "Delta Prob of Precip", 100.0, -100.0, 0, NO)
-
-# Special LAPS parms
-Radar = ("Radar", SCALAR, "dbz", "Radar Reflectivity", 80.0, -20.0, 0, NO)
-
-# RTMA parms
-QPE =     ("QPE", SCALAR, "in", "QPE", maxQpfVal, 0.0, 2, YES)
-#if SID in groups['ALASKA_SITES']: - not sure if this needs to be like that
-if SID in groups['OCONUS_SITES']:
-    TUnc =     ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 20.0, 0.0, 0, NO)
-    TdUnc =    ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 25.0, 0.0, 0, NO)
-else:
-    TUnc =     ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 10.0, 0.0, 0, NO)
-    TdUnc =    ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 15.0, 0.0, 0, NO)
-# DR17144
-SkyUnc  =  ("SkyUnc", SCALAR, "%", "Sky Condition Uncertainty", 100.0, 0.0, 0, NO)
-WSpdUnc =  ("WSpdUnc", SCALAR, "kts", "WSpd Anl Uncertainty", 12.0, 0.0, 0, NO)
-WDirUnc =  ("WDirUnc", SCALAR, "deg", "WDir Anl Uncertainty", 10.0, 0.0, 0, NO)
-VisUnc  =  ("VisUnc", SCALAR, "SM", "Vsby Anl Uncertainty", 10.0, 0.0, 2, NO)
-# DCS 17288
-PressUnc = ("PressUnc", SCALAR, "Pa", "Press Anl Uncertainty", 110000.0, 0.0, 2, NO)
-Pressure = ("Pressure", SCALAR, "Pa", "Pressure", 110000.0, 0.0, 2, NO)
-WGustUnc =  ("WGustUnc", SCALAR, "kts", "WGust Anl Uncertainty", 12.0, 0.0, 0, NO)
-
-# NamDNG parms
-QPF3 =     ("QPF3", SCALAR, "in", "3HR QPF", maxQpfVal, 0.0, 2, YES)
-QPF6 =     ("QPF6", SCALAR, "in", "6HR QPF", maxQpfVal, 0.0, 2, YES)
-QPF12 =    ("QPF12", SCALAR, "in", "12HR QPF", maxQpfVal, 0.0, 2, YES)
-Vis =      ("Vis", SCALAR, "SM", "Visibility", 10.0, 0.0, 2, NO)
-SnowAmt6 = ("SnowAmt6", SCALAR, "in", "Snowfall amount (6hr)", 20.0, 0.0, 1, YES)
-
-MaxT3 =  ("MaxT3", SCALAR, "F", "3hr Maximum Temperature", maxTempVal, minTempVal, 0, NO)
-MinT3 =  ("MinT3", SCALAR, "F", "3hr Minimum Temperature", maxTempVal, minTempVal, 0, NO)
-MaxRH3 = ("MaxRH3", SCALAR, "%", "3hr Maximum Relative Humidity", 100.0, 0.0, 0, NO)
-
-# Parms for ,'SAT',Satellite
-SatVisE  = ("VisibleE", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO)
-SatIR11E = ("IR11E", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO)
-SatIR13E = ("IR13E", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO)
-SatIR39E = ("IR39E", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO)
-SatWVE   = ("WaterVaporE", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO)
-SatFogE  = ("FogE", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO)
-
-SatVisW  = ("VisibleW", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO)
-SatIR11W = ("IR11W", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO)
-SatIR13W = ("IR13W", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO)
-SatIR39W = ("IR39W", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO)
-SatWVW   = ("WaterVaporW", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO)
-SatFogW  = ("FogW", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO)
-
-# TPC Wind Probability parms
-prob34 = ("prob34", SCALAR, "%", "WS34 CPROB", 100.0, 0.0, 0, NO)
-prob50 = ("prob50", SCALAR, "%", "WS50 CPROB", 100.0, 0.0, 0, NO)
-prob64 = ("prob64", SCALAR, "%", "WS64 CPROB", 100.0, 0.0, 0, NO)
-pws34 = ("pws34", SCALAR, "%", "34WSIPROB", 100.0, 0.0, 0, NO)
-pws50 = ("pws50", SCALAR, "%", "50WSIPROB", 100.0, 0.0, 0, NO)
-pws64 = ("pws64", SCALAR, "%", "64WSIPROB", 100.0, 0.0, 0, NO)
-pwsD34 = ("pwsD34", SCALAR, "%", "Day34WSIPROB", 100.0, 0.0, 0, NO)
-pwsN34 = ("pwsN34", SCALAR, "%", "Night34WSIPROB", 100.0, 0.0, 0, NO)
-pwsD64 = ("pwsD64", SCALAR, "%", "Day64WSIPROB", 100.0, 0.0, 0, NO)
-pwsN64 = ("pwsN64", SCALAR, "%", "Night64WSI PROB", 100.0, 0.0, 0, NO)
-pws34int = ("pws34int", SCALAR, "%", "34WSIntPROB", 100.0, 0.0, 0, NO)
-pws64int = ("pws64int", SCALAR, "%", "64WSIntPROB", 100.0, 0.0, 0, NO)
-
-# Surge parms for HLS/TCV
-InundationMax = ("InundationMax", SCALAR, "ft", "Max Inundation", 30.0, -100.0, 1, NO)
-InundationTiming = ("InundationTiming", SCALAR, "ft", "Incremental Inundation", 30.0, -100.0, 1, NO)
-SurgeHtPlusTideMSL = ("SurgeHtPlusTideMSL", SCALAR, "ft", "Surge above MSL", 30.0, -100.0, 1, NO)
-SurgeHtPlusTideMLLW = ("SurgeHtPlusTideMLLW", SCALAR, "ft", "Surge above MLLW", 30.0, -100.0, 1, NO)
-SurgeHtPlusTideMHHW = ("SurgeHtPlusTideMHHW", SCALAR, "ft", "Surge above MHHW", 30.0, -100.0, 1, NO)
-SurgeHtPlusTideNAVD = ("SurgeHtPlusTideNAVD", SCALAR, "ft", "Surge above NAVD88", 30.0, -100.0, 1, NO)
-
-# parms for storm surge collaboration
-SShazardKeys = [("",""), ("SS.A", "STORM SURGE WATCH"), ("SS.W", "STORM SURGE WARNING")]
-ProposedSS = ("ProposedSS", DISCRETE, "wwa", "Proposed StormSurge Hazards", YES, SShazardKeys, 7)
-tempProposedSS = ("tempProposedSS", DISCRETE, "wwa", "Temp Proposed StormSurge Hazards",
-              YES, SShazardKeys, 4)
-InitialSS = ("InitialSS", DISCRETE, "wwa", "Initial StormSurge Hazards",
-              YES, SShazardKeys, 4)
-DiffSS = ("DiffSS", SCALAR, "None", "Difference StormSurge Hazards", 2.0, -1.0, 0, NO)
-
-# parms for tropical cyclone threat graphics
-Threat4Keys = [("None","None to Little"), ("Elevated","Elevated"), ("Mod", "Moderate"), ("High", "High"), ("Extreme","Extreme"),]
-
-FloodingRainThreat = ("FloodingRainThreat", DISCRETE, "cat", "Flooding Rain Threat", NO, Threat4Keys,2)
-StormSurgeThreat = ("StormSurgeThreat", DISCRETE, "cat", "Storm Surge Threat", NO, Threat4Keys,2)
-WindThreat = ("WindThreat", DISCRETE, "cat", "Wind Threat", NO, Threat4Keys,2)
-TornadoThreat = ("TornadoThreat", DISCRETE, "cat", "Tornado Threat", NO, Threat4Keys,2)
-#    09/13/2016      JCM    changed precision of QPFtoFFGRatio to 2, max from 8 to 1000
-QPFtoFFGRatio = ("QPFtoFFGRatio", SCALAR, "1", "QPF to FFG Ratio", 1000.0, 0.0, 2, NO)
-
-# Hazards
-HazardKeys = []
-HazardKeys.append(("", ""))  #1st one must be None
-import VTECTable
-kys = VTECTable.VTECTable.keys()
-kys.sort()
-for k in kys:
-    HazardKeys.append((k, VTECTable.VTECTable[k]['hdln']))
-
-#H-VTEC keys - will someday add these back in
-#("hydroER", "Hydro - Excessive Rainfall"),
-#("hydroSM", "Hydro - Snow melt"),
-#("hydroRS", "Rain and Snow melt"),
-#("hydroDM", "Dam or Levee Failure"),
-#("hydroGO", "Glacier-Dammed Lake Outburst"),
-#("hydroIJ", "Ice Jam"),
-#("hydroIC", "Rain and/or Snow melt and/or Ice Jam"),
-
-Hazards = ("Hazards", DISCRETE, "wwa", "Hazards", YES, HazardKeys, 4)
-
-# Scalar/Vector Weather Elements that Require Extra Precision (due to their
-# use in calculations) Either form may be used.
-ExtraWEPrecision = []
-
-# Parms for ESTOFS
-AstroTide = ("AstroTide", SCALAR, "ft", "Astro Tide", 20.0, -8.0, 1, NO)
-StormSurge = ("StormSurge", SCALAR, "ft", "Storm Surge", 30.0, -5.0, 1, NO)
-
-# Parms for ETSS and ETSSHiRes
-SurgeTide = ("SurgeTide", SCALAR, "ft", "Surge Tide", 20.0, -8.0, 1, NO)
-
-# Parm for Aviation/GFSLAMPGrid
-CigHgt=("CigHgt",SCALAR,"ft","Ceiling Height",25000.0,-100.0,0,NO)
-
-# Parms for NationalBlend
-QPF1=("QPF1", SCALAR, "in", "1HR QPF", maxQpfVal, 0.0, 2, YES)
-PPI01=('PPI01', SCALAR, '%', '1-H Precip Potential Index', 100.0, 0.0, 0, NO)
-PPI06=('PPI06', SCALAR, '%', '6-H Precip Potential Index', 100.0, 0.0, 0, NO)
-PositiveEnergyAloft=("PositiveEnergyAloft" , SCALAR, "j/kg", "Positive energy aloft" , 500.0, 0.0, 1, NO)
-NegativeEnergyLowLevel=("NegativeEnergyLowLevel" , SCALAR, "j/kg", "Negative energy in the low levels" , 0.0, -500.0, 1, NO)
-SnowAmt01 = ("SnowAmt01", SCALAR, "in", "1-h Snow Accumulation", 20.0, 0.0, 1, YES)
-IceAccum01 = ("IceAccum01", SCALAR, "inch", "1-h Ice Accumulation", maxIceVal, 0.0, 3, NO)
-IceAccum = ("IceAccum", SCALAR, "inch", "6-h Ice Accumulation", 13.0, 0.0, 3, NO)
-TstmPrb1 = ("TstmPrb1", SCALAR, "%", "1-h SREF-based Prob. of a Thunderstorm", 100.0, 0.0, 0, NO)
-DryTstmPrb = ("DryTstmPrb", SCALAR, "%", "3-h SREF-based Prob. of a Dry Thunderstorm", 100.0, 0.0, 0, NO)
-WGS50pct =("WGS50pct", SCALAR, "kts", "10-m Wind Gust",125.0 , 0.0, 0, NO)
-WS50Prcntl30m =("WS50Prcntl30m", SCALAR, "kts", "30-m Wind Speed", 125.0, 0.0, 0, NO)
-WS50Prcntl80m =("WS50Prcntl80m", SCALAR, "kts", "80-m Wind Speed", 125.0, 0.0, 0, NO)
-Vis50pct =("Vis50pct", SCALAR, "SM", "Visibility",10.0 , 0.0, 3, NO)
-T50pct =("T50pct", SCALAR, "F", "Air Temperature", maxTempVal, minTempVal, 1, NO)
-PMSL10pct =("PMSL10pct", SCALAR, "mb", "10th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)
-PMSL50pct =("PMSL50pct", SCALAR, "mb", "50th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)
-PMSL90pct =("PMSL90pct", SCALAR, "mb", "90th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)
-FosBerg = ("FosBerg", SCALAR, "none", "Fosberg Fire Weather Index", 100.0, 0.0, 0, NO)
-
-
-
-#---------------------------------------------------------------------------
-#
-#  Weather configuration section
-#
-#---------------------------------------------------------------------------
-
-# list of possible visibilities
-visibilities = ['', '0SM', '1/4SM', '1/2SM', '3/4SM', '1SM', '11/2SM',
-                '2SM', '21/2SM', '3SM', '4SM', '5SM', '6SM', 'P6SM']
-
-# list of possible coverages and probabilities
-NOCOV = ('', 'No Coverage')
-ISOD = ('Iso', 'Isolated')
-SCT = ('Sct', 'Scattered')
-NUM = ('Num', 'Numerous')
-WIDE = ('Wide', 'Widespread')
-OCNL = ('Ocnl', 'Occasional')
-SCHC = ('SChc', 'Slight Chance Of')
-CHC = ('Chc', 'Chance Of')
-LKLY = ('Lkly', 'Likely')
-DEFN = ('Def', 'Definite')
-PATCHY = ('Patchy', 'Patchy')
-AREAS = ('Areas', 'Areas of')
-FQT = ('Frq', 'Frequent')
-BRIEF = ('Brf', 'Brief')
-PERIODS = ('Pds', 'Periods of')
-INTM = ('Inter', 'Intermittent')
-
-# list of possible intensities
-INTEN_NONE = ('', 'No intensity')
-INTEN_VERYLIGHT = ('--', 'Very Light')
-INTEN_LIGHT = ('-', 'Light')
-INTEN_MOD = ('m', 'Moderate')
-INTEN_HEAVY = ('+', 'Heavy')
-INTEN_SEVERE = ('+', 'Severe')
-INTEN_DENSE = ('+', 'Dense')
-
-# list of optional attributes
-FQTLTG = ('FL', 'Frequent Lightning')
-GUSTS = ('GW', 'Gusty Winds')
-HVYRAFL = ('HvyRn', 'Heavy Rainfall')
-DMGWND = ('DmgW', 'Damaging Winds')
-SMALLH = ('SmA', 'Small Hail')
-LARGEH = ('LgA', 'Large Hail')
-OUTLYNG = ('OLA','in the outlying areas')
-GRASSY  = ('OGA','on grassy areas')
-OVRPASS = ('OBO','on bridges and overpasses')
-OR = ('OR', 'or')
-DRY = ('Dry', 'dry')
-PRIMARY = ('Primary', 'Highest Ranking')
-MENTION = ('Mention', 'Include Unconditionally')
-TORNADO = ('TOR', 'Tornadoes')
-
-# list of each weather types
-NOWX = ('', 'No Weather',
-          [NOCOV],
-          [INTEN_NONE],
-          [])
-THUNDER = ('T', 'Thunderstorms',
-          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF,
-            PERIODS, INTM],
-          [INTEN_NONE, INTEN_SEVERE],
-          [PRIMARY, MENTION, FQTLTG, HVYRAFL, GUSTS, DMGWND, DRY,
-            LARGEH, SMALLH, TORNADO])
-RAIN = ('R', 'Rain',
-          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-RAINSHOWERS = ('RW', 'Rain Showers',
-          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF,
-            PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-DRIZZLE = ('L', 'Drizzle',
-          [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,
-            BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-FZRAIN = ('ZR', 'Freezing Rain',
-          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-FZDRIZZLE = ('ZL', 'Freezing Drizzle',
-          [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,
-            BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-SNOW = ('S', 'Snow',
-          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-SNOWSHOWERS = ('SW', 'Snow Showers',
-          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,
-            BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-SLEET = ('IP', 'Sleet',
-          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],
-          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION, OR])
-FOG = ('F', 'Fog',
-          [PATCHY, AREAS, WIDE],
-          [INTEN_NONE, INTEN_DENSE],
-          [PRIMARY, MENTION])
-FREEZEFOG = ('ZF', 'Freezing Fog',
-          [PATCHY, AREAS, WIDE],
-          [INTEN_NONE, INTEN_DENSE],
-          [PRIMARY, MENTION])
-ICEFOG = ('IF', 'Ice Fog',
-          [PATCHY, AREAS, WIDE],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-ICECRYSTAL = ('IC', 'Ice Crystals',
-          [PATCHY, AREAS, WIDE],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-HAZE = ('H', 'Haze',
-          [DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-BLWGSNOW = ('BS', 'Blowing Snow',
-          [PATCHY, AREAS, DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-BLWGSAND = ('BN', 'Blowing Sand',
-          [PATCHY, AREAS, DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-SMOKE = ('K', 'Smoke',
-          [PATCHY, AREAS, DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-BLWGDUST = ('BD', 'Blowing Dust',
-          [PATCHY, AREAS, DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-FROST = ('FR','Frost',
-          [PATCHY, AREAS, WIDE],
-          [INTEN_NONE],
-          [PRIMARY, MENTION, OUTLYNG])
-FRZSPRAY = ('ZY','Freezing Spray',
-          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL],
-          [INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],
-          [PRIMARY, MENTION])
-VOLASH = ('VA','Volcanic Ash',
-          [NOCOV],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-WATERSPOUT = ('WP','Waterspouts',
-          [ISOD, SCHC, CHC, LKLY, DEFN],
-          [INTEN_NONE],
-          [PRIMARY, MENTION])
-
-
-types = [NOWX, THUNDER, WATERSPOUT, RAIN, RAINSHOWERS,
-         DRIZZLE, FZRAIN, FZDRIZZLE, SNOW, SNOWSHOWERS,
-         SLEET, FOG, FREEZEFOG, ICEFOG, ICECRYSTAL ,HAZE, BLWGSNOW,
-         BLWGSAND, SMOKE, BLWGDUST, FROST, FRZSPRAY, VOLASH]
-
-
-# PARMS FROM NwsInitsConfig
-#-------------------------------------------------------------------------------
-# Discrete Keys
-#-------------------------------------------------------------------------------
-#
-AirKeys = [("","None"), ("Watch","Watch"), ("Advisory","Advisory"),("Warning", "Warning"),]
-ThreatKeys=[('', 'None'), ('Very Low', 'Very Low'), ('Low', 'Low'),
-            ('Moderate', 'Moderate'), ('High', 'High'), ('Extreme','Extreme')]
-#
-SevereKeys = [('NONE', '0'), ('TSTM', '2'), ('MRGL', '3'), ('SLGT', '4'), ('ENH', '5'), ('MOD', '6'), ('HIGH', '8')]
-
-AirQuality = ('AirQuality', DISCRETE, 'cat', 'Air Quality', NO, AirKeys)
-BasinFFP = ('BasinFFP', DISCRETE, 'none', 'Basin Flash Flood Potential', NO,
-                         [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')])
-CLRIndx = ('CLRIndx', SCALAR, 'none', 'Clearing Index', 1050.0, 0.0, 0, NO)
-CQPF1 = ('CQPF1', SCALAR, 'in', '6hr Cont QPF', maxQpfVal, 0.0, 2, NO)
-Ceiling = ('Ceiling', SCALAR, 'ft', 'Lowest Cloud Base Height', 25000.0, -30000.0, 0, NO)
-CigHgtCat = ('CigHgtCat', SCALAR, 'index', 'Cloud Ceiling Height Category', 6.0, 0.0, 0, NO)
-CloudBaseConditional = ('CloudBaseConditional', SCALAR, '100ft', 'Conditional Cloud Base Height', 250.0, 0.0, 0, NO)
-CloudBasePrimary = ('CloudBasePrimary', SCALAR, '100ft', 'Primary Cloud Base Height', 250.0, 0.0, 0, NO)
-CloudBaseSecondary = ('CloudBaseSecondary', SCALAR, '100ft', 'Secondary Cloud Base Height', 250.0, 0.0, 0, NO)
-ClimoET = ('ClimoET', SCALAR, 'in', 'ClimoET', 0.75, 0.0, 2, NO)
-ClimoETAprA = ('ClimoETAprA', SCALAR, 'in', 'ClimoET AprA', 0.75, 0.0, 2, NO)
-ClimoETAprB = ('ClimoETAprB', SCALAR, 'in', 'ClimoET AprB', 0.75, 0.0, 2, NO)
-ClimoETAugA = ('ClimoETAugA', SCALAR, 'in', 'ClimoET AugA', 0.75, 0.0, 2, NO)
-ClimoETAugB = ('ClimoETAugB', SCALAR, 'in', 'ClimoET AugB', 0.75, 0.0, 2, NO)
-ClimoETDecA = ('ClimoETDecA', SCALAR, 'in', 'ClimoET DecA', 0.75, 0.0, 2, NO)
-ClimoETDecB = ('ClimoETDecB', SCALAR, 'in', 'ClimoET DecB', 0.75, 0.0, 2, NO)
-ClimoETFebA = ('ClimoETFebA', SCALAR, 'in', 'ClimoET FebA', 0.75, 0.0, 2, NO)
-ClimoETFebB = ('ClimoETFebB', SCALAR, 'in', 'ClimoET FebB', 0.75, 0.0, 2, NO)
-ClimoETJanA = ('ClimoETJanA', SCALAR, 'in', 'ClimoET JanA', 0.75, 0.0, 2, NO)
-ClimoETJanB = ('ClimoETJanB', SCALAR, 'in', 'ClimoET JanB', 0.75, 0.0, 2, NO)
-ClimoETJulA = ('ClimoETJulA', SCALAR, 'in', 'ClimoET JulA', 0.75, 0.0, 2, NO)
-ClimoETJulB = ('ClimoETJulB', SCALAR, 'in', 'ClimoET JulB', 0.75, 0.0, 2, NO)
-ClimoETJunA = ('ClimoETJunA', SCALAR, 'in', 'ClimoET JunA', 0.75, 0.0, 2, NO)
-ClimoETJunB = ('ClimoETJunB', SCALAR, 'in', 'ClimoET JunB', 0.75, 0.0, 2, NO)
-ClimoETMarA = ('ClimoETMarA', SCALAR, 'in', 'ClimoET MarA', 0.75, 0.0, 2, NO)
-ClimoETMarB = ('ClimoETMarB', SCALAR, 'in', 'ClimoET MarB', 0.75, 0.0, 2, NO)
-ClimoETMayA = ('ClimoETMayA', SCALAR, 'in', 'ClimoET MayA', 0.75, 0.0, 2, NO)
-ClimoETMayB = ('ClimoETMayB', SCALAR, 'in', 'ClimoET MayB', 0.75, 0.0, 2, NO)
-ClimoETNovA = ('ClimoETNovA', SCALAR, 'in', 'ClimoET NovA', 0.75, 0.0, 2, NO)
-ClimoETNovB = ('ClimoETNovB', SCALAR, 'in', 'ClimoET NovB', 0.75, 0.0, 2, NO)
-ClimoETOctA = ('ClimoETOctA', SCALAR, 'in', 'ClimoET OctA', 0.75, 0.0, 2, NO)
-ClimoETOctB = ('ClimoETOctB', SCALAR, 'in', 'ClimoET OctB', 0.75, 0.0, 2, NO)
-ClimoETSepA = ('ClimoETSepA', SCALAR, 'in', 'ClimoET SepA', 0.75, 0.0, 2, NO)
-ClimoETSepB = ('ClimoETSepB', SCALAR, 'in', 'ClimoET SepB', 0.75, 0.0, 2, NO)
-ClimoPoP = ('ClimoPoP', SCALAR, '%', 'ClimoPoP', 100.0, 0.0, 0, NO)
-ClimoPoPAprA = ('ClimoPoPAprA', SCALAR, '%', 'ClimoPoP AprA', 100.0, 0.0, 0, NO)
-ClimoPoPAprB = ('ClimoPoPAprB', SCALAR, '%', 'ClimoPoP AprB', 100.0, 0.0, 0, NO)
-ClimoPoPAugA = ('ClimoPoPAugA', SCALAR, '%', 'ClimoPoP AugA', 100.0, 0.0, 0, NO)
-ClimoPoPAugB = ('ClimoPoPAugB', SCALAR, '%', 'ClimoPoP AugB', 100.0, 0.0, 0, NO)
-ClimoPoPDecA = ('ClimoPoPDecA', SCALAR, '%', 'ClimoPoP DecA', 100.0, 0.0, 0, NO)
-ClimoPoPDecB = ('ClimoPoPDecB', SCALAR, '%', 'ClimoPoP DecB', 100.0, 0.0, 0, NO)
-ClimoPoPFG = ('ClimoPoPFG', SCALAR, '%', 'ClimoPoP First Guess', 100.0, 0.0, 0, NO)
-ClimoPoPFebA = ('ClimoPoPFebA', SCALAR, '%', 'ClimoPoP FebA', 100.0, 0.0, 0, NO)
-ClimoPoPFebB = ('ClimoPoPFebB', SCALAR, '%', 'ClimoPoP FebB', 100.0, 0.0, 0, NO)
-ClimoPoPJanA = ('ClimoPoPJanA', SCALAR, '%', 'ClimoPoP JanA', 100.0, 0.0, 0, NO)
-ClimoPoPJanB = ('ClimoPoPJanB', SCALAR, '%', 'ClimoPoP JanB', 100.0, 0.0, 0, NO)
-ClimoPoPJulA = ('ClimoPoPJulA', SCALAR, '%', 'ClimoPoP JulA', 100.0, 0.0, 0, NO)
-ClimoPoPJulB = ('ClimoPoPJulB', SCALAR, '%', 'ClimoPoP JulB', 100.0, 0.0, 0, NO)
-ClimoPoPJunA = ('ClimoPoPJunA', SCALAR, '%', 'ClimoPoP JunA', 100.0, 0.0, 0, NO)
-ClimoPoPJunB = ('ClimoPoPJunB', SCALAR, '%', 'ClimoPoP JunB', 100.0, 0.0, 0, NO)
-ClimoPoPMarA = ('ClimoPoPMarA', SCALAR, '%', 'ClimoPoP MarA', 100.0, 0.0, 0, NO)
-ClimoPoPMarB = ('ClimoPoPMarB', SCALAR, '%', 'ClimoPoP MarB', 100.0, 0.0, 0, NO)
-ClimoPoPMayA = ('ClimoPoPMayA', SCALAR, '%', 'ClimoPoP MayA', 100.0, 0.0, 0, NO)
-ClimoPoPMayB = ('ClimoPoPMayB', SCALAR, '%', 'ClimoPoP MayB', 100.0, 0.0, 0, NO)
-ClimoPoPNovA = ('ClimoPoPNovA', SCALAR, '%', 'ClimoPoP NovA', 100.0, 0.0, 0, NO)
-ClimoPoPNovB = ('ClimoPoPNovB', SCALAR, '%', 'ClimoPoP NovB', 100.0, 0.0, 0, NO)
-ClimoPoPOctA = ('ClimoPoPOctA', SCALAR, '%', 'ClimoPoP OctA', 100.0, 0.0, 0, NO)
-ClimoPoPOctB = ('ClimoPoPOctB', SCALAR, '%', 'ClimoPoP OctB', 100.0, 0.0, 0, NO)
-ClimoPoPSepA = ('ClimoPoPSepA', SCALAR, '%', 'ClimoPoP SepA', 100.0, 0.0, 0, NO)
-ClimoPoPSepB = ('ClimoPoPSepB', SCALAR, '%', 'ClimoPoP SepB', 100.0, 0.0, 0, NO)
-CoastalFlood = ('CoastalFlood', DISCRETE, 'cat', 'Coastal Flood', NO, ThreatKeys)
-CondPredHgt = ('CondPredHgt', SCALAR, '100ft', 'Conditional Predominant Cloud Height', 250.0, 0.0, 0, NO)
-CondPredVsby = ('CondPredVsby', SCALAR, 'mi', 'Conditional Predominant Visibility', 10.0, 0.0, 2, NO)
-DenseFogSmoke = ('DenseFogSmoke', DISCRETE, 'cat', 'Dense Fog', NO, ThreatKeys)
-DepartNormFRET = ('DepartNormFRET', SCALAR, 'in', 'DepartNormFRET', 0.35, -0.35, 2, NO)
-Dryness = ('Dryness', DISCRETE, 'none', 'EGB Fuel Dryness', NO,
-           [('NoData', 'NoData'), ('Moist', 'Moist'), ('Dry', 'Dry'), ('VeryDry', 'VeryDry')])
-ExcessiveCold = ('ExcessiveCold', DISCRETE, 'cat', 'Extreme Cold', NO, ThreatKeys)
-ExcessiveHeat = ('ExcessiveHeat', DISCRETE, 'cat', 'Excessive Heat', NO, ThreatKeys)
-FFP = ('FFP', DISCRETE, 'none', 'Flash Flood Potential', NO,
-       [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')])
-FFPI = ('FFPI', SCALAR, 'index', 'Flash Flood Potential Index', 10.0, 0.0, 2, NO)
-FRET = ('FRET', SCALAR, 'in', 'Forecast Reference ET', 0.75, 0.0, 2, NO)
-FRET7Day = ('FRET7Day', SCALAR, 'in/week', 'Weekly Forecast Reference ET', 5.0, 0.0, 2, NO)
-FireWeather = ('FireWeather', DISCRETE, 'cat', 'Wild Fire', NO, ThreatKeys)
-FlashFlood = ('FlashFlood', DISCRETE, 'cat', 'Flash Flood', NO, ThreatKeys)
-Flood = ('Flood', DISCRETE, 'cat', 'River Flood', NO, ThreatKeys)
-FrostFreeze = ('FrostFreeze', DISCRETE, 'cat', 'Frost/Freeze', NO, ThreatKeys)
-FuelMstr = ('FuelMstr', SCALAR, 'none', '10 Hour Fuel Moisture', 40.0, 1.0, 0, NO)
-HainesMid = ('HainesMid', SCALAR, 'cat', 'Mid Level Haines Index', 6.0, 2.0, 0, NO)
-HeatImpactLevels = ('HeatImpactLevels', SCALAR, 'none', 'HeatImpactLevels', 4.0, 0.0, 0, NO)
-HeatImpactLevelsMaxT = ('HeatImpactLevelsMaxT', SCALAR, 'none', 'HeatImpactLevelsMaxT', 4.0, 0.0, 0, NO)
-HeatImpactLevelsMinT = ('HeatImpactLevelsMinT', SCALAR, 'none', 'HeatImpactLevelsMinT', 4.0, 0.0, 0, NO)
-HeatOrangeMaxT = ('HeatOrangeMaxT', SCALAR, 'F', 'Heat Orange MaxT', maxTempVal, minTempVal, 0, NO)
-HeatOrangeMinT = ('HeatOrangeMinT', SCALAR, 'F', 'Heat Orange MinT', maxTempVal, minTempVal, 0, NO)
-HeatRedMaxT = ('HeatRedMaxT', SCALAR, 'F', 'Heat Red MaxT', maxTempVal, minTempVal, 0, NO)
-HeatRedMinT = ('HeatRedMinT', SCALAR, 'F', 'Heat Red MinT', maxTempVal, minTempVal, 0, NO)
-HeatYellowMaxT = ('HeatYellowMaxT', SCALAR, 'F', 'Heat Yellow MaxT', maxTempVal, minTempVal, 0, NO)
-HeatYellowMinT = ('HeatYellowMinT', SCALAR, 'F', 'Heat Yellow MinT', maxTempVal, minTempVal, 0, NO)
-HighWind = ('HighWind', DISCRETE, 'cat', 'High Wind', NO, ThreatKeys)
-IceAccum6hr = ('IceAccum6hr', SCALAR, 'in', '6-hr Ice Accumulation', 2.0, 0.0, 2, NO)
-LLWS = ('LLWS', VECTOR, 'kts', 'Low Level Wind Shear', 125.0, 0.0, 0, NO)
-LLWSHgt = ('LLWSHgt', SCALAR, '100 ft', 'Wind Shear Height', 20.0, 0.0, 0, NO)
-LTG = ('LTG', SCALAR, 'CNT', 'LTG', 100.0, 0.0, 0, NO)
-LTG12 = ('LTG12', SCALAR, 'CNT', 'LTG12', 100.0, 0.0, 0, NO)
-LTG24 = ('LTG24', SCALAR, 'CNT', 'LTG24', 100.0, 0.0, 0, NO)
-Lightning = ('Lightning', DISCRETE, 'cat', 'Lightning', NO, ThreatKeys)
-Max3 = ('Max3', SCALAR, 'F', '3hr Maximum Temperature', maxTempVal, minTempVal, 0, NO)
-Max6 = ('Max6', SCALAR, 'F', '6hr Maximum Temperature', maxTempVal, minTempVal, 0, NO)
-MaxApT = ('MaxApT', SCALAR, 'F', 'Max Apparent Temperature', maxTempVal, -120.0, 0, NO)
-MaxRHError = ('MaxRHError', SCALAR, '%', 'Maximum Relative Humidity Error', 100.0, -100.0, 0, NO)
-MaxRHFcst = ('MaxRHFcst', SCALAR, '%', 'Forecast Maximum Relative Humidity', 100.0, 0.0, 0, NO)
-MaxRHOb = ('MaxRHOb', SCALAR, '%', 'Observed Maximum Relative Humidity', 100.0, 0.0, 0, NO)
-MaxRHObs = ('MaxRHObs', SCALAR, '%', 'Maximum Observed RH', 100.0, 0.0, 0, NO)
-MaxT10 = ('MaxT10', SCALAR, 'F', '10th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)
-MaxT50 = ('MaxT50', SCALAR, 'F', '50th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)
-MaxT90 = ('MaxT90', SCALAR, 'F', '90th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)
-MaxTAloft = ('MaxTAloft', SCALAR, 'C', 'Max Temp in Warm Nose', 40.0, -20.0, 1, NO)
-MaxTError = ('MaxTError', SCALAR, 'F', 'Maximum Temperature Error', 120.0, -120.0, 0, NO)
-MaxTFcst = ('MaxTFcst', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO)
-MaxTOb = ('MaxTOb', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO)
-MaxTObs = ('MaxTObs', SCALAR, 'F', 'Maximum Temperature Obs', maxTempVal, minTempVal, 0, NO)
-Min3 = ('Min3', SCALAR, 'F', '3hr Minimum Temperature', maxTempVal, minTempVal, 0, NO)
-Min6 = ('Min6', SCALAR, 'F', '6hr Minimum Temperature', maxTempVal, minTempVal, 0, NO)
-MinApT = ('MinApT', SCALAR, 'F', 'Min Apparent Temperature', maxTempVal, -120.0, 0, NO)
-MinRH3 = ('MinRH3', SCALAR, '%', '3hr Minimum Relative Humidity', 100.0, 0.0, 0, NO)
-MinRHError = ('MinRHError', SCALAR, '%', 'Minimum Relative Humidity Error', 100.0, -100.0, 0, NO)
-MinRHFcst = ('MinRHFcst', SCALAR, '%', 'Forecast Minimum Relative Humidity', 100.0, 0.0, 0, NO)
-MinRHOb = ('MinRHOb', SCALAR, '%', 'Observed Minimum Relative Humidity', 100.0, 0.0, 0, NO)
-MinRHObs = ('MinRHObs', SCALAR, '%', 'Minimum Observed RH', 100.0, 0.0, 0, NO)
-MinT10 = ('MinT10', SCALAR, 'F', '10th Percentile for MinT', maxTempVal, minTempVal, 0, NO)
-MinT50 = ('MinT50', SCALAR, 'F', '50th Percentile for MinT', maxTempVal, minTempVal, 0, NO)
-MinT6 = ('MinT6', SCALAR, 'F', 'Minimum Temperature 6Hr', maxTempVal, minTempVal, 0, NO)
-MinT90 = ('MinT90', SCALAR, 'F', '90th Percentile for MinT', maxTempVal, minTempVal, 0, NO)
-MinTError = ('MinTError', SCALAR, 'F', 'Minimum Temperature Error', 120.0, -120.0, 0, NO)
-MinTFcst = ('MinTFcst', SCALAR, 'F', 'Forecast Minimum Temperature', maxTempVal, minTempVal, 0, NO)
-MinTOb = ('MinTOb', SCALAR, 'F', 'Observed Minimum Temperature', maxTempVal, minTempVal, 0, NO)
-MinTObs = ('MinTObs', SCALAR, 'F', 'Minimum Temperature Obs', maxTempVal, minTempVal, 0, NO)
-MixHgtAve = ('MixHgtAve', SCALAR, 'ft', 'Mixing Hgt Average', 20000.0, 0.0, 0, NO)
-MixHgtMSL = ('MixHgtMSL', SCALAR, 'ft', 'Mixing Height above sea level', 30000.0, 0.0, 0, NO)
-MixT1700 = ('MixT1700', SCALAR, 'F', '1700Foot MixingTemp', 110.0, -10.0, 0, NO)
-P95MaxT = ('P95MaxT', SCALAR, 'F', 'P95MaxT', maxTempVal, minTempVal, 0, NO)
-P95MinT = ('P95MinT', SCALAR, 'F', 'P95MinT', maxTempVal, minTempVal, 0, NO)
-     # EKDMOS
-PQPF06001 = ('PQPF06001', SCALAR, '%', '6hr Prob QPF > 0.01', 100.0, 0.0, 0, NO)
-PQPF06005 = ('PQPF06005', SCALAR, '%', '6hr Prob QPF > 0.05', 100.0, 0.0, 0, NO)
-PQPF06010 = ('PQPF06010', SCALAR, '%', '6hr Prob QPF > 0.10', 100.0, 0.0, 0, NO)
-PQPF06015 = ('PQPF06015', SCALAR, '%', '6hr Prob QPF > 0.15', 100.0, 0.0, 0, NO)
-PQPF06020 = ('PQPF06020', SCALAR, '%', '6hr Prob QPF > 0.20', 100.0, 0.0, 0, NO)
-PQPF06025 = ('PQPF06025', SCALAR, '%', '6hr Prob QPF > 0.25', 100.0, 0.0, 0, NO)
-PQPF06030 = ('PQPF06030', SCALAR, '%', '6hr Prob QPF > 0.30', 100.0, 0.0, 0, NO)
-PQPF06040 = ('PQPF06040', SCALAR, '%', '6hr Prob QPF > 0.40', 100.0, 0.0, 0, NO)
-PQPF06050 = ('PQPF06050', SCALAR, '%', '6hr Prob QPF > 0.50', 100.0, 0.0, 0, NO)
-PQPF06075 = ('PQPF06075', SCALAR, '%', '6hr Prob QPF > 0.75', 100.0, 0.0, 0, NO)
-PQPF06100 = ('PQPF06100', SCALAR, '%', '6hr Prob QPF > 1.00', 100.0, 0.0, 0, NO)
-PQPF06150 = ('PQPF06150', SCALAR, '%', '6hr Prob QPF > 1.50', 100.0, 0.0, 0, NO)
-PoP12Fcst = ('PoP12Fcst', SCALAR, '%', 'Forecast Prob. of Precip.', 100.0, 0.0, 0, NO)
-PoP3 = ('PoP3', SCALAR, '%', 'PoP3', 100.0, 0.0, 0, NO)
-PoPPCECMWF = ('PoPPatternClimoECMWF', SCALAR, '%', 'PatternClimoECMWF', 100.0, 0.0, 0, NO)
-PoPPCFIM = ('PoPPatternClimoFIM', SCALAR, '%', 'PatternClimoFIM', 100.0, 0.0, 0, NO)
-PoPPCGEM = ('PoPPatternClimoGEM', SCALAR, '%', 'PatternClimoGEM', 100.0, 0.0, 0, NO)
-PoPPCGFS = ('PoPPatternClimoGFS', SCALAR, '%', 'PatternClimoGFS', 100.0, 0.0, 0, NO)
-PoPPattern1 = ('PoPNortherlyFlow', SCALAR, '%', 'NortherlyFlow', 100.0, 0.0, 0, NO)
-PoPPattern10 = ('PoPRockiesRidge', SCALAR, '%', 'RockiesRidge', 100.0, 0.0, 0, NO)
-PoPPattern11 = ('PoPSouthernFirehose', SCALAR, '%', 'SouthernFirehose', 100.0, 0.0, 0, NO)
-PoPPattern12 = ('PoPNorthernFirehose', SCALAR, '%', 'NorthernFirehose', 100.0, 0.0, 0, NO)
-PoPPattern2 = ('PoPGreatBasinLow', SCALAR, '%', 'GreatBasinLow', 100.0, 0.0, 0, NO)
-PoPPattern3 = ('PoPBroadCyclonicFlow', SCALAR, '%', 'BroadCyclonicFlow', 100.0, 0.0, 0, NO)
-PoPPattern4 = ('PoPCoastalRidge', SCALAR, '%', 'CoastalRidge', 100.0, 0.0, 0, NO)
-PoPPattern5 = ('PoPNorthwestFlow', SCALAR, '%', 'NorthwestFlow', 100.0, 0.0, 0, NO)
-PoPPattern6 = ('PoPZonalFlow', SCALAR, '%', 'ZonalFlow', 100.0, 0.0, 0, NO)
-PoPPattern7 = ('PoPBroadAntiCyclonicFlow', SCALAR, '%', 'BroadAntiCyclonicFlow', 100.0, 0.0, 0, NO)
-PoPPattern8 = ('PoPDiffluentOnshoreFlow', SCALAR, '%', 'DiffluentOnshoreFlow', 100.0, 0.0, 0, NO)
-PoPPattern9 = ('PoPSouthwestFlow', SCALAR, '%', 'SouthwestFlow', 100.0, 0.0, 0, NO)
-PoPWG = ('PoPWG', SCALAR, '%', 'Climo PoP Work Grid', 30.0, -30.0, 0, NO)
-PPFFG = ("PPFFG", SCALAR, "%", "Prob of Excessive Rain in %", 100.0, 0.0 ,0, NO)
-PrecipDur = ('PrecipDur', SCALAR, 'hrs', 'Precipitation Duration', 12.0, 0.0, 1, YES)
-PredHgt = ('PredHgt', SCALAR, '100ft', 'Predominant Cloud Height', 250.0, 0.0, 0, NO)
-PredHgtCat = ('PredHgtCat', SCALAR, 'index', 'Predominant Cloud Height Category', 6.0, 0.0, 0, NO)
-PredHgtRH = ('PredHgtRH', SCALAR, '100ft', 'Pred Cloud Height from RH', 250.0, 1.0, 0, NO)
-PredHgtTempo = ('PredHgtTempo', SCALAR, '100ft', 'Predominant Cloud Height Tempo', 250.0, 0.0, 0, NO)
-PredVsby = ('PredVsby', SCALAR, 'mi', 'Predominant Visibility', 10.0, 0.0, 2, NO)
-Pres = ('Pres', SCALAR, 'mb', 'Pressure', 1100.0, 900.0, 2, NO)
-ProbDmgWind = ('ProbDmgWind', SCALAR, '%', 'Probability of Damaging Wind', 100.0, 0.0, 0, NO)
-ProbExtrmDmgWind = ('ProbExtrmDmgWind', SCALAR, '%', 'Probability of Extreme Damaging Wind', 100.0, 0.0, 0, NO)
-ProbExtrmHail = ('ProbExtrmHail', SCALAR, '%', 'Probability of Extreme Hail', 100.0, 0.0, 0, NO)
-ProbExtrmSvr = ('ProbExtrmSvr', SCALAR, '%', 'Probability of Extreme Severe', 100.0, 0.0, 0, NO)
-ProbExtrmTor = ('ProbExtrmTor', SCALAR, '%', 'Probability of Extreme Tornado', 100.0, 0.0, 0, NO)
-ProbSvrHail = ('ProbSvrHail', SCALAR, '%', 'Probability of Severe Hail', 100.0, 0.0, 0, NO)
-ProbTor = ('ProbTor', SCALAR, '%', 'Probability of Tornado', 100.0, 0.0, 0, NO)
-ProbTotSvr = ('ProbTotSvr', SCALAR, '%', 'Probability of Severe', 100.0, 0.0, 0, NO)
-ProbSnowGTT = ("ProbSnowGTT", SCALAR, "%", "Prob. snow > trace", 100.0, 0.0, 0, NO)
-ProbSnowGT1 = ("ProbSnowGT1", SCALAR, "%", "Prob. snow > 1 inch", 100.0, 0.0, 0, NO)
-ProbSnowGT2 = ("ProbSnowGT2", SCALAR, "%", "Prob. snow > 2 inches ", 100.0, 0.0, 0, NO)
-ProbSnowGT4 = ("ProbSnowGT4", SCALAR, "%", "Prob. snow > 4 inches ", 100.0, 0.0, 0, NO)
-ProbSnowGT6 = ("ProbSnowGT6", SCALAR, "%", "Prob. snow > 6 inches ", 100.0, 0.0, 0, NO)
-ProbSnowGT8 = ("ProbSnowGT8", SCALAR, "%", "Prob. snow > 8 inches", 100.0, 0.0, 0, NO)
-ProbSnowGT12 = ("ProbSnowGT12", SCALAR, "%", "Prob. snow > 12 inches", 100.0, 0.0, 0, NO)
-ProbSnowGT18 = ("ProbSnowGT18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO)
-ProbSnowRT1 = ("ProbSnowRT1", SCALAR, "%", "Prob. snow T-1 inch", 100.0, 0.0, 0, NO)
-ProbSnowR12 = ("ProbSnowR12", SCALAR, "%", "Prob. snow 1-2 inches", 100.0, 0.0, 0, NO)
-ProbSnowR24 = ("ProbSnowR24", SCALAR, "%", "Prob. snow 2-4 inches ", 100.0, 0.0, 0, NO)
-ProbSnowR48 = ("ProbSnowR48", SCALAR, "%", "Prob. snow 4-8 inches ", 100.0, 0.0, 0, NO)
-ProbSnowR812 = ("ProbSnowR812", SCALAR, "%", "Prob. snow 8-12 inches ", 100.0, 0.0, 0, NO)
-ProbSnowR1218 = ("ProbSnowR1218", SCALAR, "%", "Prob. snow 12-18 inches", 100.0, 0.0, 0, NO)
-ProbSnowR18 = ("ProbSnowR18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO)
-QPE06 = ('QPE06', SCALAR, 'in', 'QPE06', maxQpfVal, 0.0, 2, YES)
-QPE06Ob = ('QPE06Ob', SCALAR, 'in', 'Observed Precip', 20.0, 0.0, 2, NO)
-QPE12 = ('QPE12', SCALAR, 'in', 'QPE12', 15.0, 0.0, 2, YES)
-QPE24 = ('QPE24', SCALAR, 'in', 'QPE24', 15.0, 0.0, 2, YES)
-QPFDS = ('QPFDS', SCALAR, 'in', 'QPFDS', maxQpfVal, 0.0, 2, YES)
-QPFFcst = ('QPFFcst', SCALAR, 'in', 'Forecast Precip.', 10.0, 0.0, 2, NO)
-QPFPCECMWF = ('QPFPatternClimoECMWF', SCALAR, 'in', 'PatternClimoECMWF', maxQpfVal, 0.0, 2, NO)
-QPFPCFIM = ('QPFPatternClimoFIM', SCALAR, 'in', 'PatternClimoFIM', maxQpfVal, 0.0, 2, NO)
-QPFPCGEM = ('QPFPatternClimoGEM', SCALAR, 'in', 'PatternClimoGEM', maxQpfVal, 0.0, 2, NO)
-QPFPCGFS = ('QPFPatternClimoGFS', SCALAR, 'in', 'PatternClimoGFS', maxQpfVal, 0.0, 2, NO)
-QPFPattern1 = ('QPFNortherlyFlow', SCALAR, 'in', 'NortherlyFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern10 = ('QPFRockiesRidge', SCALAR, 'in', 'RockiesRidge', maxQpfVal, 0.0, 2, NO)
-QPFPattern11 = ('QPFSouthernFirehose', SCALAR, 'in', 'SouthernFirehose', maxQpfVal, 0.0, 2, NO)
-QPFPattern12 = ('QPFNorthernFirehose', SCALAR, 'in', 'NorthernFirehose', maxQpfVal, 0.0, 2, NO)
-QPFPattern2 = ('QPFGreatBasinLow', SCALAR, 'in', 'GreatBasinLow', maxQpfVal, 0.0, 2, NO)
-QPFPattern3 = ('QPFBroadCyclonicFlow', SCALAR, 'in', 'BroadCyclonicFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern4 = ('QPFCoastalRidge', SCALAR, 'in', 'CoastalRidge', maxQpfVal, 0.0, 2, NO)
-QPFPattern5 = ('QPFNorthwestFlow', SCALAR, 'in', 'NorthwestFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern6 = ('QPFZonalFlow', SCALAR, 'in', 'ZonalFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern7 = ('QPFBroadAntiCyclonicFlow', SCALAR, 'in', 'BroadAntiCyclonicFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern8 = ('QPFDiffluentOnshoreFlow', SCALAR, 'in', 'DiffluentOnshoreFlow', maxQpfVal, 0.0, 2, NO)
-QPFPattern9 = ('QPFSouthwestFlow', SCALAR, 'in', 'SouthwestFlow', maxQpfVal, 0.0, 2, NO)
-QPFPct = ('QPFPct', SCALAR, '%', 'QPFPct', 300.0, 0.0, 1, YES)
-QPFPctMonthlyClimo = ('QPFPctMonthlyClimo', SCALAR, '%', 'QPF Pct Monthly PRISMClimo', 200.0, 0.0, 0, NO)
-QPFRaw = ('QPFRaw', SCALAR, 'in', 'QPFRaw', maxQpfVal, 0.0, 2, YES)
-QSE06 = ('QSE06', SCALAR, 'in', 'QSE06', 100.0, 0.0, 1, YES)
-RipCurrent = ('RipCurrent', DISCRETE, 'cat', 'Rip Current', NO, ThreatKeys)
-RipCurrentIndex = ('RipCurrentIndex', SCALAR, 'ft', 'Rip Current Index', 16.0, -1.0, 1, NO)
-RipRisk = ("RipRisk", SCALAR, "none", "Rip Current Risk", 3.0, 0.0, 0, NO)
-SPC12hrLP1 = ('SPC12hrLP1', SCALAR, '%', 'SPC 12HR Lightning Probability (1)', 100.0, 0.0, 0, NO)
-SPC12hrLP10 = ('SPC12hrLP10', SCALAR, '%', 'SPC 12HR Lightning Probability (10)', 100.0, 0.0, 0, NO)
-SPC12hrLP100 = ('SPC12hrLP100', SCALAR, '%', 'SPC 12HR Lightning Probability (100)', 100.0, 0.0, 0, NO)
-SPC24hrLP1 = ('SPC24hrLP1', SCALAR, '%', 'SPC 24HR Lightning Probability (1)', 100.0, 0.0, 0, NO)
-SPC24hrLP10 = ('SPC24hrLP10', SCALAR, '%', 'SPC 24HR Lightning Probability (10)', 100.0, 0.0, 0, NO)
-SPC24hrLP100 = ('SPC24hrLP100', SCALAR, '%', 'SPC 24HR Lightning Probability (100)', 100.0, 0.0, 0, NO)
-SPC3hrLP1 = ('SPC3hrLP1', SCALAR, '%', 'SPC 3HR Lightning Probability (1)', 100.0, 0.0, 0, NO)
-SPC3hrLP10 = ('SPC3hrLP10', SCALAR, '%', 'SPC 3HR Lightning Probability (10)', 100.0, 0.0, 0, NO)
-SPC3hrLP100 = ('SPC3hrLP100', SCALAR, '%', 'SPC 3HR Lightning Probability (100)', 100.0, 0.0, 0, NO)
-SevereHail = ('SevereHail', DISCRETE, 'cat', 'Severe Hail', NO, ThreatKeys)
-SevereTstmWind = ('SevereTstmWind', DISCRETE, 'cat', 'SevereTstmWind', NO, ThreatKeys)
-SnowAmt10Prcntl = ('SnowAmt10Prcntl', SCALAR, 'in', 'min case', 50.0, 0.0, 1, NO)
-SnowAmt50Prcntl = ('SnowAmt50Prcntl', SCALAR, 'in', 'avg case', 50.0, 0.0, 1, NO)
-SnowAmt90Prcntl = ('SnowAmt90Prcntl', SCALAR, 'in', 'max case', 50.0, 0.0, 1, NO)
-SnowDepth = ('SnowDepth', SCALAR, 'in', 'Snow Depth', 50.0, 0.0, 0, NO)
-SnowRatioCLIMO = ('SnowRatioCLIMO', SCALAR, '%', 'Snow Ratio Climatology SON-DJF-MAM', 40.0, 0.0, 1, YES)
-SnowRatioGFS = ('SnowRatioGFS', SCALAR, '%', 'Snow Ratio from GFS', 40.0, 0.0, 1, YES)
-SnowRatioHPCMEAN = ('SnowRatioHPCMEAN', SCALAR, '%', 'Snow Ratio from HPC MEAN', 40.0, 0.0, 1, YES)
-SnowRatioNAM = ('SnowRatioNAM', SCALAR, '%', 'Snow Ratio from NAM40', 40.0, 0.0, 1, YES)
-T10 = ('T10', SCALAR, 'F', '10th Percentile for T', maxTempVal, minTempVal, 0, NO)
-T50 = ('T50', SCALAR, 'F', '50th Percentile for T', maxTempVal, minTempVal, 0, NO)
-T90 = ('T90', SCALAR, 'F', '90th Percentile for T', maxTempVal, minTempVal, 0, NO)
-TAloft = ('TAloft', SCALAR, 'F', 'Temperature Aloft', 120.0, -50.0, 1, NO)
-Td10 = ('Td10', SCALAR, 'F', '10th Percentile for DpT', maxTdVal, minTdVal, 0, NO)
-Td50 = ('Td50', SCALAR, 'F', '50th Percentile for DpT', maxTdVal, minTdVal, 0, NO)
-Td90 = ('Td90', SCALAR, 'F', '90th Percentile for DpT', maxTdVal, minTdVal, 0, NO)
-TdAft = ('TdAft', SCALAR, 'F', 'Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdAftError = ('TdAftError', SCALAR, 'F', 'Afternoon Dewpoint Error', 120.0, -120.0, 0, NO)
-TdAftFcst = ('TdAftFcst', SCALAR, 'F', 'Forecast Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdAftOb = ('TdAftOb', SCALAR, 'F', 'Observed Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdAftObs = ('TdAftObs', SCALAR, 'F', 'Afternoon Dewpoint Obs', maxTdVal, minTdVal, 0, NO)
-TdMrn = ('TdMrn', SCALAR, 'F', 'Morning Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdMrnError = ('TdMrnError', SCALAR, 'F', 'Morning Dewpoint Error', 120.0, -120.0, 0, NO)
-TdMrnFcst = ('TdMrnFcst', SCALAR, 'F', 'Forecast Morning Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdMrnOb = ('TdMrnOb', SCALAR, 'F', 'Observed Morning Dewpoint', maxTdVal, minTdVal, 0, NO)
-TdMrnObs = ('TdMrnObs', SCALAR, 'F', 'Morning Dewpoint Obs', maxTdVal, minTdVal, 0, NO)
-Tornado = ('Tornado', DISCRETE, 'cat', 'Tornado', NO, ThreatKeys)
-TransWindAve = ('TransWindAve', VECTOR, 'mph', 'Transport Wind Average', 125.0, 0.0, 0, NO)
-Tw = ('Tw', SCALAR, 'F', 'Surface Wet Bulb Temp', 80.0, -50.0, 0, NO)
-VentRateAve = ('VentRateAve', SCALAR, 'mph-ft', 'Vent Rate Average', 500000.0, 0.0, 0, NO)
-Visibility = ('Visibility', SCALAR, 'SM', 'Visibility', 10.0, 0.0, 2, NO)
-VisibilityConditional = ('VisibilityConditional', SCALAR, 'SM', 'Conditional Visibility', 10.0, 0.0, 2, NO)
-Vsby = ('Vsby', SCALAR, 'mi', 'Visibility', 10.0, 0.0, 2, NO)
-WG1 = ('WG1', SCALAR, 'none', 'WorkGrid1', 100.0, -100.0, 0, NO)
-WinterWx = ('WinterWx', DISCRETE, 'cat', 'Winter Weather', NO, ThreatKeys)
-
-#** Parameter sets for specific functionality
-optionalParmsDict = {}
-
-# Marine Weather Elements
-optionalParmsDict['marine']={
-    'WaveDir' : ("WaveDir", VECTOR, "m/s", "Wave Direction", 5.0, 0.0, 2, NO),
-    'WindWaveHeight' : ("WindWaveHgt", SCALAR, "ft", "Wind Wave Height", 100.0, 0.0, 0, NO),
-    'WaveHeight' : ("WaveHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO),
-    'Swell' : ("Swell", VECTOR, "ft", "Primary Swell", 100.0, 0.0, 0, NO),
-    'Swell2' : ("Swell2", VECTOR, "ft", "Secondary Swell", 100.0, 0.0, 0, NO),
-    'Period' : ("Period", SCALAR, "sec", "Primary Period", 30.0, 0.0, 0, NO),
-    'IceCoverage' : ("IceCoverage", SCALAR, "%", "Ice Coverage Amount", 100.0, 0.0, 0, NO),
-    'SurfHeight' : ("SurfHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO),
-    ##########DCS3499
-    'SigWaveHgt' : ("SigWaveHgt", SCALAR, "ft",
-                    "Significant wave height of combined wind waves and swells",
-                    30.0, 0.0, 0, NO),
-    'PeakWaveDir' : ("PeakWaveDir", VECTOR, "degree", "Direction of peak wave", 100.0, 0.0, 0, NO),
-    'WindWaveHgt' : ("WindWaveHgt", SCALAR, "ft", "Significant wave height of wind waves", 30.0, 0.0, 0, NO),
-    'WindWavePeriod' : ("WindWavePeriod", SCALAR, "sec.", "Wind wave peak period", 20.0, 0.0, 0, NO),
-    'WindWaveDir' : ("WindWaveDir", VECTOR, "degree", "Direction of wind waves", 100.0, 0.0, 0, NO),
-    'NWPSwind' : ("NWPSwind", VECTOR, "kts", "NWPSwind", 150.0, 0.0, 0, NO),
-    'UWaveDir' : ("UWaveDir", SCALAR, "m/s", "U WaveDir Comp", 0.50, -0.50, 3, NO),
-    'VWaveDir' : ("VWaveDir", SCALAR, "m/s", "V WaveDir Comp", 0.50, -0.50, 3, NO),
-    'SwanSwell' : ("SwanSwell", SCALAR, "ft", "Total Significant Swell Height", 40.0, 0.0, 2, NO),
-    'SST' : ("SST", SCALAR, "F", "Sea Sfc Temp", 100.0, 25.0, 0, NO),
-    'StormTide' : ('StormTide', SCALAR, 'ft', 'Storm Tide', 30.0, -8.0, 1, NO),
-    #Fcst Grids - for partitioned wave groups
-    'Wave1' : ("Wave1", VECTOR, "ft", "WAVE1", 50.0, 0.0, 0, NO),
-    'Wave2' : ("Wave2", VECTOR, "ft", "WAVE2", 50.0, 0.0, 0, NO),
-    'Wave3' : ("Wave3", VECTOR, "ft", "WAVE3", 50.0, 0.0, 0, NO),
-    'Wave4' : ("Wave4", VECTOR, "ft", "WAVE4", 50.0, 0.0, 0, NO),
-    'Wave5' : ("Wave5", VECTOR, "ft", "WAVE5", 50.0, 0.0, 0, NO),
-    'Wave6' : ("Wave6", VECTOR, "ft", "WAVE6", 50.0, 0.0, 0, NO),
-    'Wave7' : ("Wave7", VECTOR, "ft", "Wave7", 50.0, 0.0, 0, NO),
-    'Wave8' : ("Wave8", VECTOR, "ft", "Wave8", 50.0, 0.0, 0, NO),
-    'Wave9' : ("Wave9", VECTOR, "ft", "Wave9", 50.0, 0.0, 0, NO),
-    #Fcst Grids - for partitioned wave groups
-    'Period1' : ("Period1", SCALAR, "sec", "Period1", 30.0, 0.0, 0, NO),
-    'Period2' : ("Period2", SCALAR, "sec", "Period2", 30.0, 0.0, 0, NO),
-    'Period3' : ("Period3", SCALAR, "sec", "Period3", 30.0, 0.0, 0, NO),
-    'Period4' : ("Period4", SCALAR, "sec", "Period4", 30.0, 0.0, 0, NO),
-    'Period5' : ("Period5", SCALAR, "sec", "Period5", 30.0, 0.0, 0, NO),
-    'Period6' : ("Period6", SCALAR, "sec", "Period6", 30.0, 0.0, 0, NO),
-    'Period7' : ("Period7", SCALAR, "sec", "Period7", 30.0, 0.0, 0, NO),
-    'Period8' : ("Period8", SCALAR, "sec", "Period8", 30.0, 0.0, 0, NO),
-    'Period9' : ("Period9", SCALAR, "sec", "Period9", 30.0, 0.0, 0, NO),
-    'RipProb' : ("RipProb", SCALAR, "%", "Rip Current Probability", 100.0, 0.0, 0, NO),
-    'ErosionProb' : ("ErosionProb", SCALAR, "%", "Dune Erosion Probability", 100.0, 0.0, 0, NO),
-    'OverwashProb' : ("OverwashProb", SCALAR, "%", "Dune Overwash Probability", 100.0, 0.0, 0, NO)
-}
-if SID in groups['GreatLake_SITES']:
-    #  Redefine the WaveHeight field to include a decimal point
-    optionalParmsDict['marine'].update({'WaveHeight' :
-                 ("WaveHeight", SCALAR, "ft", "Wave Height", 40.0, 0.0, 1, NO)})
-
-# Parameter set for Probability of weather type, Optional for sites.
-optionalParmsDict['powt']={
-     'PoTBD': ('PotBlowingDust', SCALAR, '%', 'Prob of Blowing Dust', 100.0, 0.0, 0, NO),
-     'PoTBN': ('PotBlowingSand', SCALAR, '%', 'Prob of Blowing Sand', 100.0, 0.0, 0, NO),
-     'PoTBS': ('PotBlowingSnow', SCALAR, '%', 'Prob of Blowing Snow', 100.0, 0.0, 0, NO),
-     'PoTF': ('PotFog', SCALAR, '%', 'Prob of Fog', 100.0, 0.0, 0, NO),
-     'PoTFR': ('PotFrost', SCALAR, '%', 'Prob of Frost', 100.0, 0.0, 0, NO),
-     'PoTFl': ('PotFlurries', SCALAR, '%', 'Prob of Flurries', 100.0, 0.0, 0, NO),
-     'PoTH': ('PotHaze', SCALAR, '%', 'Prob of Haze', 100.0, 0.0, 0, NO),
-     'PoTIC': ('PotIceCrystals', SCALAR, '%', 'Prob of Ice Crystals', 100.0, 0.0, 0, NO),
-     'PoTIF': ('PotIceFog', SCALAR, '%', 'Prob of Ice Fog', 100.0, 0.0, 0, NO),
-     'PoTIP': ('PotSleet', SCALAR, '%', 'Prob of Sleet', 100.0, 0.0, 0, NO),
-     'PoTK': ('PotSmoke', SCALAR, '%', 'Prob of Smoke', 100.0, 0.0, 0, NO),
-     'PoTL': ('PotDrizzle', SCALAR, '%', 'Prob of Drizzle', 100.0, 0.0, 0, NO),
-     'PoTR': ('PotRain', SCALAR, '%', 'Prob of Rain', 100.0, 0.0, 0, NO),
-     'PoTRW': ('PotRainShowers', SCALAR, '%', 'Prob of Rain Showers', 100.0, 0.0, 0, NO),
-     'PoTS': ('PotSnow', SCALAR, '%', 'Prob of Snow', 100.0, 0.0, 0, NO),
-     'PoTSW': ('PotSnowShowers', SCALAR, '%', 'Prob of Snow Showers', 100.0, 0.0, 0, NO),
-     'PoTSp': ('PotSprinkles', SCALAR, '%', 'Prob of Sprinkles', 100.0, 0.0, 0, NO),
-     'PoTSvr': ('PotSevere', SCALAR, '%', 'Prob of Severe Storms', 100.0, 0.0, 0, NO),
-     'PoTT': ('PotThunder', SCALAR, '%', 'Prob of Thunder', 100.0, 0.0, 0, NO),
-     'PoTVA': ('PotVolcanicAsh', SCALAR, '%', 'Prob of Volcanic Ash', 100.0, 0.0, 0, NO),
-     'PoTWP': ('PotWaterspout', SCALAR, '%', 'Prob of Waterspout', 100.0, 0.0, 0, NO),
-     'PoTZF': ('PotFreezingFog', SCALAR, '%', 'Prob of Freezing Fog', 100.0, 0.0, 0, NO),
-     'PoTZL': ('PotFreezingDrizzle', SCALAR, '%', 'Prob of Freezing Drizzle', 100.0, 0.0, 0, NO),
-     'PoTZR': ('PotFreezingRain', SCALAR, '%', 'Prob of Freezing Rain', 100.0, 0.0, 0, NO),
-     'PoTZY': ('PotFreezingSpray', SCALAR, '%', 'Prob of Freezing Spray', 100.0, 0.0, 0, NO),
-     'PoTHZY': ('PotHeavyFreezingSpray', SCALAR, '%', 'Prob of Heavy Freezing Spray', 100.0, 0.0, 0, NO),
-     'RoadTemp' : ("RoadTemp", SCALAR, "F", "Road Temperature", 120.0, -50.0, 0, NO),
-     'MaxTwAloft' : ("MaxTwAloft", SCALAR, 'C', 'Max Wet-Bulb Temp in Warm Nose', 40.0, -20.0, 1, NO),
-     'ProbIcePresent': ("ProbIcePresent", SCALAR, "%", "Prob of Ice Present", 100.0, 0.0, 0, NO),
-     'ProbRefreezeSleet': ("ProbRefreezeSleet", SCALAR, "%", "Prob of Refreeze into Sleet", 100.0, 0.0, 0, NO),
-     'SleetAmt': ("SleetAmt", SCALAR, "in", "Sleet Accumulation", 5.0, 0.0, 1, YES),
-     'IceFlatAcc': ('IceFlatAccum', SCALAR, 'in', 'Flat Ice Accumulation', maxIceVal, 0.0, 2, YES),
-     'IceLineAcc': ('IceLineAccum', SCALAR, 'in', 'Line Ice Accumulation', maxIceVal, 0.0, 2, YES),
-}
-
-# Parameter set for Winter Weather probabilities, Optional for sites.
-#****** Winter 2017 changes
-optionalParmsDict['winterProbs']={
-    # Storm Total Snow related
-    'StormTotalSnowWPC' : ("StormTotalSnowWPC", SCALAR, "in","WPC Storm Total Snow", 50.0, 0.0, 1, NO),
-
-    # Snow Percentiles
-    'SnowAmt5Prcntl' : ("SnowAmt5Prcntl", SCALAR, "in","5 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt10Prcntl' : ("SnowAmt10Prcntl", SCALAR, "in","10 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt25Prcntl' : ("SnowAmt25Prcntl", SCALAR, "in","25 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt50Prcntl' : ("SnowAmt50Prcntl", SCALAR, "in","50 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt75Prcntl' : ("SnowAmt75Prcntl", SCALAR, "in","75 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt90Prcntl' : ("SnowAmt90Prcntl", SCALAR, "in","90 percentile", 100.0, -40.0, 1, NO),
-    'SnowAmt95Prcntl' : ("SnowAmt95Prcntl", SCALAR, "in","95 percentile", 100.0, -40.0, 1, NO),
-
-    # Snow Exceedance Probabilities (Add others as needed)
-    'ProbSnowGET' : ("ProbSnowGET", SCALAR, "%", "Prob. snow >= trace", 100.0, 0.0, 0, NO),
-    'ProbSnowGE1' : ("ProbSnowGE1", SCALAR, "%", "Prob. snow >= 1 inch", 100.0, 0.0, 0, NO),
-    'ProbSnowGE2' : ("ProbSnowGE2", SCALAR, "%", "Prob. snow >= 2 inches", 100.0, 0.0, 0, NO),
-    'ProbSnowGE4' : ("ProbSnowGE4", SCALAR, "%", "Prob. snow >= 4 inches", 100.0, 0.0, 0, NO),
-    'ProbSnowGE6' : ("ProbSnowGE6", SCALAR, "%", "Prob. snow >= 6 inches", 100.0, 0.0, 0, NO),
-    'ProbSnowGE8' : ("ProbSnowGE8", SCALAR, "%", "Prob. snow >= 8 inches", 100.0, 0.0, 0, NO),
-    'ProbSnowGE12' : ("ProbSnowGE12", SCALAR, "%", "Prob. snow >= 12 inches", 100.0, 0.0, 0, NO),
-    'ProbSnowGE18' : ("ProbSnowGE18", SCALAR, "%", "Prob. snow >= 18 inches", 100.0, 0.0, 0, NO),
-
-    # Freezing Rain Percentiles
-    'IceAccum5Prcntl' : ("IceAccum5Prcntl", SCALAR, "in","5 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum10Prcntl' : ("IceAccum10Prcntl", SCALAR, "in","10 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum25Prcntl' : ("IceAccum25Prcntl", SCALAR, "in","25 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum50Prcntl' : ("IceAccum50Prcntl", SCALAR, "in","50 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum75Prcntl' : ("IceAccum75Prcntl", SCALAR, "in","75 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum90Prcntl' : ("IceAccum90Prcntl", SCALAR, "in","90 percentile", 5.0, -4.0, 2, NO),
-    'IceAccum95Prcntl' : ("IceAccum95Prcntl", SCALAR, "in","95 percentile", 5.0, -4.0, 2, NO),
-
-    # Freezing rain accretion probabilities
-    'ProbIceGE001' : ("ProbIceGE001", SCALAR, "%", "Prob. ice >= 0.01", 100.0, 0.0, 0, NO),
-    'ProbIceGE010' : ("ProbIceGE010", SCALAR, "%", "Prob. ice >= 0.10", 100.0, 0.0, 0, NO),
-    'ProbIceGE025' : ("ProbIceGE025", SCALAR, "%", "Prob. ice >= 0.25", 100.0, 0.0, 0, NO),
-    'ProbIceGE050' : ("ProbIceGE050", SCALAR, "%", "Prob. ice >= 0.50", 100.0, 0.0, 0, NO),
-
-# Persist WPC snow prob grids
-    'SnowAmt5PrcntlWPC' : ("SnowAmt5PrcntlWPC", SCALAR, "in","WPC 5th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt10PrcntlWPC' : ("SnowAmt10PrcntlWPC", SCALAR, "in","WPC 10th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt25PrcntlWPC' : ("SnowAmt25PrcntlWPC", SCALAR, "in","WPC 25th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt50PrcntlWPC' : ("SnowAmt50PrcntlWPC", SCALAR, "in","WPC 50th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt75PrcntlWPC' : ("SnowAmt75PrcntlWPC", SCALAR, "in","WPC 75th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt90PrcntlWPC' : ("SnowAmt90PrcntlWPC", SCALAR, "in","WPC 90th percentile snow amount", 100.0, -40.0, 1, NO),
-    'SnowAmt95PrcntlWPC' : ("SnowAmt95PrcntlWPC", SCALAR, "in","WPC 95th percentile snow amount", 100.0, -40.0, 1, NO),
-    'ProbSnowGETWPC' : ("ProbSnowGETWPC", SCALAR, "%", "WPC Prob. snow >= trace", 100.0, 0.0, 0, NO),
-    'ProbSnowGE1WPC' : ("ProbSnowGE1WPC", SCALAR, "%", "WPC Prob. snow >= 1 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE2WPC' : ("ProbSnowGE2WPC", SCALAR, "%", "WPC Prob. snow >= 2 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE4WPC' : ("ProbSnowGE4WPC", SCALAR, "%", "WPC Prob. snow >= 4 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE6WPC' : ("ProbSnowGE6WPC", SCALAR, "%", "WPC Prob. snow >= 6 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE8WPC' : ("ProbSnowGE8WPC", SCALAR, "%", "WPC Prob. snow >= 8 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE12WPC' : ("ProbSnowGE12WPC", SCALAR, "%", "WPC Prob. snow >= 12 in", 100.0, 0.0, 0, NO),
-    'ProbSnowGE18WPC' : ("ProbSnowGE18WPC", SCALAR, "%", "WPC Prob. snow >= 18 in", 100.0, 0.0, 0, NO),
-}
-
-# Add rainfall probability definitions
-optionalParmsDict['rainfallProb']={
-    # Rain Percentiles
-    'QPF5Prcntl' : ("QPF5Prcntl", SCALAR, "in","5 percentile", 36.0, -24.0, 2, NO),
-    'QPF10Prcntl' : ("QPF10Prcntl", SCALAR, "in","10 percentile", 36.0, -24.0, 2, NO),
-    'QPF25Prcntl' : ("QPF25Prcntl", SCALAR, "in","25 percentile", 36.0, -24.0, 2, NO),
-    'QPF50Prcntl' : ("QPF50Prcntl", SCALAR, "in","50 percentile", 36.0, -24.0, 2, NO),
-    'QPF75Prcntl' : ("QPF75Prcntl", SCALAR, "in","75 percentile", 36.0, -24.0, 2, NO),
-    'QPF90Prcntl' : ("QPF90Prcntl", SCALAR, "in","90 percentile", 36.0, -24.0, 2, NO),
-    'QPF95Prcntl' : ("QPF95Prcntl", SCALAR, "in","95 percentile", 36.0, -24.0, 2, NO),
-
-    # Rain Exceedance Probabilities (Add others as needed)
-    'ProbRainGE001' : ("ProbRainGE001", SCALAR, "%", "Prob. Rain >= 0.01 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE010' : ("ProbRainGE010", SCALAR, "%", "Prob. Rain >= 0.10 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE025' : ("ProbRainGE025", SCALAR, "%", "Prob. Rain >= 0.25 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE050' : ("ProbRainGE050", SCALAR, "%", "Prob. Rain >= 0.50 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE075' : ("ProbRainGE075", SCALAR, "%", "Prob. Rain >= 0.75 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE100' : ("ProbRainGE100", SCALAR, "%", "Prob. Rain >= 1.00 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE150' : ("ProbRainGE150", SCALAR, "%", "Prob. Rain >= 1.50 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE200' : ("ProbRainGE200", SCALAR, "%", "Prob. Rain >= 2.00 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE250' : ("ProbRainGE250", SCALAR, "%", "Prob. Rain >= 2.50 in", 100.0, 0.0, 0, NO),
-    'ProbRainGE300' : ("ProbRainGE300", SCALAR, "%", "Prob. Rain >= 3.00 in", 100.0, 0.0, 0, NO),
-}
-
-
-# Make all optional parms available as variables.
-for optionalParmKey in optionalParmsDict:
-    for pname,parm in optionalParmsDict[optionalParmKey].iteritems():
-        setattr(sys.modules[__name__],pname,parm)
-
-#-----------------------------------
-# DO NOT CHANGE THE FOLLOWING SECTION
-#------------------------------------
-if not BASELINE and siteImport('localWxConfig'):
-    types = localWxConfig.types
-
-
-#---------------------------------------------------------------------------
-#
-#  Projection Configuration section.
-#
-#---------------------------------------------------------------------------
-from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData
-ProjectionType = ProjectionData.ProjectionType
-NONE = ProjectionType.NONE
-LAMBERT_CONFORMAL = ProjectionType.LAMBERT_CONFORMAL
-MERCATOR = ProjectionType.MERCATOR
-POLAR_STEREOGRAPHIC = ProjectionType.POLAR_STEREOGRAPHIC
-LATLON = ProjectionType.LATLON
-
-# projectionID / projectionType / latLonLL / latLonUR /
-# latLonOrigin / stdParallelOne / stdParallelTwo / gridPointLL / gridPointUR
-# latIntersect / lonCenter / lonOrigin
-
-Grid201 = ('Grid201',POLAR_STEREOGRAPHIC,
-      (-150.00, -20.826), (-20.90846, 30.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 65), 0.0, 0.0, -105.0)
-
-Grid202 = ('Grid202', POLAR_STEREOGRAPHIC,
-      (-141.028, 7.838), (-18.576, 35.617),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 43), 0.0, 0.0, -105.0)
-
-Grid203 = ('Grid203', POLAR_STEREOGRAPHIC,
-      (-185.837, 19.132), (-53.660, 57.634),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -150.0)
-
-Grid204 = ('Grid204', MERCATOR,
-      (-250.0, -25.0), (-109.129, 60.644),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (93, 68), 0.0, -179.564, 0.0)
-
-Grid205 = ('Grid205', POLAR_STEREOGRAPHIC,
-      (-84.904, 0.616), (-15.000, 45.620),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -60.0)
-
-Grid206 = ('Grid206', LAMBERT_CONFORMAL,
-      (-117.991, 22.289), (-73.182, 51.072),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (51, 41), 0.0, 0.0, 0.0)
-
-Grid207 = ('Grid207', POLAR_STEREOGRAPHIC,
-      (-175.641, 42.085), (-93.689, 63.976),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (49, 35), 0.0, 0.0, -150.0)
-
-Grid208 = ('Grid208', MERCATOR,
-      (-166.219, 10.656), (-147.844, 27.917),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -157.082, 0.0)
-
-Grid209 = ('Grid209', LAMBERT_CONFORMAL,
-      (-117.991, 22.289), (-73.182, 51.072),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (101, 81), 0.0, 0.0, 0.0)
-
-Grid210 = ('Grid210', MERCATOR,
-      (-77.000, 9.000), (-58.625, 26.422),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -67.812, 0.0)
-
-Grid211 = ('Grid211', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (93, 65), 0.0, 0.0, 0.0)
-
-Grid212 = ('Grid212', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (185, 129), 0.0, 0.0, 0.0)
-
-Grid213 = ('Grid213', POLAR_STEREOGRAPHIC,
-      (-141.028, 7.838), (-18.577, 35.617),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (129, 85), 0.0, 0.0, -105.0)
-
-Grid214 = ('Grid214', POLAR_STEREOGRAPHIC,
-      (-175.641, 42.085), (-93.689, 63.975),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (97, 69), 0.0, 0.0, -150.0)
-
-# (new alaska grid)
-Grid214AK = ('Grid214AK', POLAR_STEREOGRAPHIC,
-             (-178.571, 40.5301), (-93.689, 63.975),
-             (0.0, 0.0), 0.0, 0.0, (1,1), (104, 70), 0.0, 0.0, -150.0)
-
-Grid215 = ('Grid215', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (369, 257), 0.0, 0.0, 0.0)
-
-Grid216 = ('Grid216', POLAR_STEREOGRAPHIC,
-      (-173.000, 30.000), (-62.850, 70.111),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (139, 107), 0.0, 0.0, -135.0)
-
-Grid217 = ('Grid217', POLAR_STEREOGRAPHIC,
-      (-173.000, 30.000), (-62.850, 70.111),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (277, 213), 0.0, 0.0, -135.0)
-
-Grid218 = ('Grid218', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (614, 428), 0.0, 0.0, 0.0)
-
-Grid219 = ('Grid219', POLAR_STEREOGRAPHIC,
-      (-119.559, 25.008), (60.339, 24.028),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (385, 465), 0.0, 0.0, -80.0)
-
-Grid221 = ('Grid221', LAMBERT_CONFORMAL,
-      (-145.500, 1.000), (-2.566, 46.352),
-      (-107.0, 50.0), 50.0, 50.0, (1, 1), (349, 277), 0.0, 0.0, 0.0)
-
-Grid222 = ('Grid222', LAMBERT_CONFORMAL,
-      (-145.500, 1.000), (-2.566, 46.352),
-      (-107.0, 50.0), 50.0, 50.0, (1, 1), (59, 47), 0.0, 0.0, 0.0)
-
-Grid225 = ('Grid225', MERCATOR,
-      (-250.0, -25.0), (-109.129, 60.644),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (185, 135), 0.0, -179.564, 0.0)
-
-Grid226 = ('Grid226', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (737, 513), 0.0, 0.0, 0.0)
-
-Grid227 = ('Grid227', LAMBERT_CONFORMAL,
-      (-133.459, 12.190), (-49.385, 57.290),
-      (-95.0, 25.0), 25.0, 25.0, (1, 1), (1473, 1025), 0.0, 0.0, 0.0)
-
-Grid228 = ('Grid228', LATLON,
-      (0.0, 90.0), (359.0, -90.0), (0.0, 0.0), 0.0, 0.0,
-      (1, 1), (144, 73), 0.0, 0.0, 0.0)
-
-Grid229 = ('Grid229', LATLON,
-      (0.0, 90.0), (359.0, -90.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 181), 0.0, 0.0, 0.0)
-
-Grid230 = ('Grid230', LATLON,
-      (0.0, 90.0), (359.5, -90.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 361), 0.0, 0.0, 0.0)
-
-Grid231 = ('Grid231', LATLON,
-      (0.0, 0.0), (359.5, 90.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 181), 0.0, 0.0, 0.0)
-
-Grid232 = ('Grid232', LATLON,
-      (0.0, 0.0), (359.0, 90.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 91), 0.0, 0.0, 0.0)
-
-Grid233 = ('Grid233', LATLON,
-      (0.0, -78.0), (358.750, 78.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (288, 157), 0.0, 0.0, 0.0)
-
-Grid234 = ('Grid234', LATLON,
-      (-98.000, 15.0), (-65.000, -45.0),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (133, 121), 0.0, 0.0, 0.0)
-
-Grid235 = ('Grid235', LATLON,
-      (0.250, 89.750), (359.750, -89.750),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 360), 0.0, 0.0, 0.0)
-
-HRAP = ('HRAP', POLAR_STEREOGRAPHIC,
-      (-119.036, 23.097), (-75.945396, 53.480095),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (801, 881), 0.0, 0.0, -105.0)
-
-NDFD_Oceanic_10K = ('NDFD_Oceanic_10km', MERCATOR,
-      (-230.094, -30.4192), (10.71, 80.01),
-      (0.0, 0.0), 0.0, 0.0, (1, 1), (2517, 1793), 0.0, -109.962, 0.0)
-
-#  Add a new domain for NHC purposes
-GridForNHA = ('GridForNHA', LAMBERT_CONFORMAL,
-      (-103.929, 20.164), (-50.8894, 42.9545),
-      (-95.0, 35.0), 35.0, 35.0, (1, 1), (1833,1241), 0.0, 0.0, 0.0)
-
-# list of all projections
-allProjections = [Grid201, Grid202, Grid203, Grid204, Grid205, Grid206,
- Grid207, Grid208, Grid209, Grid210, Grid211, Grid212, Grid213, Grid214,
- Grid214AK, Grid215, Grid216, Grid217, Grid218, Grid219, Grid221, Grid222,
- Grid225, Grid226, Grid227, Grid228, Grid229, Grid230, Grid231, Grid232,
- Grid233, Grid234, Grid235, HRAP, NDFD_Oceanic_10K, GridForNHA]
-
-#---------------------------------------------------------------------------
-#
-#  Grid Domain configuration section
-#
-#---------------------------------------------------------------------------
-#
-# xdim/ydim:  Defines the dimensions of the grids. (GFE grid size)
-#
-# origin:  Defines the lower-left corner of the grid (point 0,0) in
-#   world coordinates.
-#
-# extent:  Defines the "size" of the grid in world coordinates.  The upper
-#   right corner is the origin+extent.
-#
-# TimeZone: Defines the timezone used by this site in standard TZ format.
-# Refer to /usr/share/zoneinfo/zone.tab for the correct settings.
-#
-# Projection:  Defines the projection identifier to be used for this domain.
-
-# Note that all parameters for an existing database must use the same
-# projection, though not necessarily the same grid size and location.
-
-# These values are set up for AWIPS.  There is a script at the end
-# of this section that adjusts the resolution for the RPP sites.
-
-#         [xdim, ydim] / (origin) /( extent)  / TimeZone / Projection / OfficeType
-
-SITES = {
-#WFOs
-    # Experimental combined AFC site
-    'AFC' : ([1057, 449], (1.0, 19.00),  (66.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"),
-    'ABQ' : ([145, 145], (36.00, 22.00), (9.0, 9.0), 'MST7MDT', Grid211,"wfo"),
-    'ABR' : ([145, 145], (45.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211,"wfo"),
-    'AER' : ([369, 337], (44.00, 23.00), (23.0, 21.0), 'America/Anchorage', Grid214AK, "wfo"),
-    'AFG' : ([641, 497], (27.0, 38.0),   (40.0, 31.0), 'America/Anchorage', Grid214AK, "wfo"),
-    'AJK' : ([337, 241], (62.0, 23.0),   (21.0, 15.0), 'America/Juneau', Grid214AK, "wfo"),
-    'AKQ' : ([145, 145], (68.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'ALU' : ([865, 449], (1.0, 19.0),    (54.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"),
-    'ALY' : ([145, 145], (70.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'AMA' : ([145, 145], (41.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'APX' : ([145, 145], (58.00, 34.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'ARX' : ([145, 145], (52.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'BGM' : ([145, 145], (68.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'BIS' : ([145, 145], (43.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'BMX' : ([145, 145], (58.00, 19.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'BOI' : ([177, 177], (25.00, 34.00), (11.0, 11.0), 'MST7MDT', Grid211, "wfo"),
-    'BOU' : ([145, 145], (38.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'BOX' : ([187, 154], (75.375,34.59375), (5.8125,4.78125), "EST5EDT", Grid211, "wfo"),
-    'BRO' : ([145, 145], (44.00, 10.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'BTV' : ([193, 157], (72.00, 37.15), (6.0, 4.875), 'EST5EDT', Grid211, "wfo"),
-    'BUF' : ([145, 145], (66.00, 32.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'BYZ' : ([145, 145], (36.00, 37.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'CAE' : ([145, 145], (65.00, 20.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'CAR' : ([145, 145], (75.00, 39.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'CHS' : ([145, 145], (65.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'CLE' : ([145, 145], (62.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'CRP' : ([145, 145], (45.00, 11.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'CTP' : ([145, 145], (67.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'CYS' : ([145, 145], (37.00, 31.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'DDC' : ([145, 145], (43.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'DLH' : ([145, 145], (50.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'DMX' : ([145, 145], (49.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'DTX' : ([161, 161], (57.00, 34.00), (10.0, 10.0), 'EST5EDT', Grid211, "wfo"),
-    'DVN' : ([145, 145], (52.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'EAX' : ([145, 145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'EKA' : ([145, 145], (20.00, 31.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'EPZ' : ([145, 145], (36.00, 16.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'EWX' : ([145, 145], (44.00, 12.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'FFC' : ([145, 145], (61.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'FGF' : ([145, 145], (45.00, 39.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'FGZ' : ([145, 145], (29.00, 23.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),
-    'FSD' : ([177, 177], (43.00, 32.00), (11.0, 11.0), 'CST6CDT', Grid211, "wfo"),
-    'FWD' : ([145, 145], (45.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'GGW' : ([145, 145], (36.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'GID' : ([145, 145], (44.00, 28.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'GJT' : ([145, 145], (34.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'GLD' : ([145, 145], (41.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'GRB' : ([145, 145], (54.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'GRR' : ([145, 145], (58.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'GSP' : ([145, 145], (63.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'GUM' : ([193, 193], (23.0, 26.0), (3.0, 3.0), 'Pacific/Guam', Grid204, "wfo"),
-    'GYX' : ([193,209],  (76.00, 37.375), (6.0, 6.5), 'EST5EDT', Grid211, "wfo"),
-    'HFO' : ([321, 225], (58.78125,29.875),(5.0,3.5), 'Pacific/Honolulu', Grid204, 'wfo'),
-    'HGX' : ([145, 145], (48.00, 13.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'HNX' : ([145, 145], (22.00, 24.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'HUN' : ([161, 161], (60.0, 22.0),   (5.0, 5.0), 'CST6CDT', Grid211, "wfo"),
-    'ICT' : ([145, 145], (45.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'ILM' : ([145, 145], (67.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'ILN' : ([145, 145], (60.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'ILX' : ([145, 145], (55.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'IND' : ([145, 145], (58.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'IWX' : ([145, 145], (58.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'JAN' : ([145, 145], (54.00, 18.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'JAX' : ([145, 145], (64.00, 14.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'JKL' : ([145, 145], (61.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'KEY' : ([145, 145], (66.00, 8.00),  (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'LBF' : ([145, 145], (43.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LCH' : ([145, 145], (52.00, 15.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LIX' : ([145, 145], (54.00, 14.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LKN' : ([145, 145], (25.00, 30.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'LMK' : ([145, 145], (59.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'LOT' : ([145, 145], (55.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LOX' : ([145, 145], (21.00, 23.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'LSX' : ([145, 145], (52.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LUB' : ([145, 145], (39.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'LWX' : ([145, 145], (67.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'LZK' : ([145, 145], (51.00, 20.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'MAF' : ([205,247],  (40.375, 16.8125), (6.375, 7.6875), 'CST6CDT', Grid211, "wfo"),
-    'MEG' : ([145, 145], (54.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'MFL' : ([145, 145], (66.00, 9.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'MFR' : ([145, 145], (20.00, 34.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'MHX' : ([145, 145], (68.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'MKX' : ([145, 145], (55.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'MLB' : ([145, 145], (66.00, 12.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'MOB' : ([145, 145], (57.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'MPX' : ([145, 145], (50.00, 34.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'MQT' : ([145, 145], (56.00, 36.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'MRX' : ([145, 145], (61.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'MSO' : ([145, 145], (29.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'MTR' : ([145, 145], (20.00, 26.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'OAX' : ([145, 145], (45.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'OHX' : ([145, 145], (58.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'OKX' : ([145, 145], (71.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'OTX' : ([145, 145], (25.00, 40.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'OUN' : ([145, 145], (44.00, 21.00), (9.0, 9.0), 'CST6CDT',  Grid211, "wfo"),
-    'PAH' : ([145, 145], (56.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'PBZ' : ([145, 145], (65.00, 29.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'PDT' : ([145, 145], (23.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'PHI' : ([145, 145], (70.00, 28.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'PIH' : ([145, 145], (30.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'PQR' : ([145, 145], (19.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'PSR' : ([145, 145], (28.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),
-    'PUB' : ([145, 145], (38.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'RAH' : ([145, 145], (66.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'REV' : ([145, 145], (23.00, 29.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'RIW' : ([145, 145], (35.00, 33.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'RLX' : ([145, 145], (63.00, 26.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'RNK' : ([161, 161], (67.0,  26.00), (5.0, 5.0), 'EST5EDT', Grid211, 'wfo'),
-    'SEW' : ([145, 145], (21.00, 42.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'SGF' : ([145, 145], (51.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'SGX' : ([145, 145], (24.00, 21.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'SHV' : ([145, 145], (50.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'SJT' : ([145, 145], (43.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'SJU' : ([32, 28], (10.0, 10.0), (8.0, 7.0), 'America/Puerto_Rico',Grid210, "wfo"),
-    'SLC' : ([161, 161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "wfo"),
-    'STO' : ([145, 145], (20.00, 28.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-    'TAE' : ([145, 145], (60.00, 15.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'TBW' : ([145, 145], (64.00, 11.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),
-    'TFX' : ([145, 145], (32.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'TOP' : ([145, 145], (47.00, 26.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'TSA' : ([145, 145], (48.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),
-    'TWC' : ([145, 145], (29.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),
-    'UNR' : ([145, 145], (40.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),
-    'VEF' : ([145, 145], (26.00, 25.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),
-#RFCs
-    'ACR' : ([565, 415], (26.0, 19.0), (60.0, 44.0), 'America/Anchorage', Grid214AK, "rfc"),
-    'ALR' : ([299, 278], (59.0, 11.0), (17.0, 19.0), 'CST6CDT', Grid211, "rfc"),
-    'FWR' : ([362, 334], (36.0, 11.0), (20.0, 20.0), 'CST6CDT', Grid211, "rfc"),
-    'KRF' : ([408, 356], (33.0, 27.0), (26.0, 22.0), 'CST6CDT', Grid211, "rfc"),
-    'MSR' : ([381, 304], (43.0, 28.0), (24.0, 20.0), 'CST6CDT', Grid211, "rfc"),
-    'ORN' : ([303, 216], (51.0, 16.0), (18.0, 14.0), 'CST6CDT', Grid211, "rfc"),
-    'PTR' : ([218, 308], (21.0, 35.0), (17.0, 19.0), 'PST8PDT', Grid211, "rfc"),
-    'RHA' : ([132, 140], (69.0, 28.0), (7.0, 10.0), 'EST5EDT', Grid211, "rfc"),
-    'RSA' : ([140, 296], (21.0, 23.0), (12.0, 17.0), 'PST8PDT', Grid211, "rfc"),
-    'STR' : ([171, 307], (29.0, 20.0), (13.0, 18.0), 'MST7MDT', Grid211, "rfc"),
-    'TAR' : ([226, 164], (69.0, 34.0), (13.0, 13.0), 'EST5EDT', Grid211, "rfc"),
-    'TIR' : ([220, 171], (59.0, 25.0), (13.0, 12.0), 'EST5EDT', Grid211, "rfc"),
-    'TUA' : ([281, 168], (39.0, 22.0), (18.0, 10.0), 'CST6CDT', Grid211, "rfc"),
-
-#Special Sites - Added Hawaiian High Seas domain
-    'US' : ([267, 159], (18.0, 9.5), (67.0, 40.0), 'EDT5EDT', Grid211, "other"),
-    'FSL' : ([161, 145], (38.50, 27.00), (10.0, 9.0), 'MST7MDT', Grid211, "other"),
-    'NH1' : ([838, 577], (887.0, 121.0), (837.0, 576.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"),
-    'NH2' : ([1188, 363], (1328.0, 365.0), (1187.0, 362.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"),
-    'ONA' : ([244, 383], (68.9375, 19.5625), (15.1875, 23.875), 'EST5EDT', Grid211, "wfo"),
-    'ONP' : ([396, 415], (8.1875, 21.5625), (24.6875, 25.875), 'PST8PDT', Grid211, "wfo"),
-    'HPA' : ([899, 671], (284.0, 30.0), (898.0, 670.0), 'Pacific/Honolulu', NDFD_Oceanic_10K, "wfo"),
-    'WNJ' : ([301, 346], (1000.0, 475.0), (300.0, 345.0), 'CST6CDT', NDFD_Oceanic_10K, "wfo"),
-
-#Aviation Domains for AAWU
-    'AAWU' : ([705, 457], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'),
-    'AVAK' : ([465, 417], (8.0, 12.0), (29.0, 26.0), 'America/Anchorage', Grid203, 'nc'),
-
-#Regional Offices
-    'VUY' : ([337,449], (62.00, 19.00), (21.0, 28.0), 'EST5EDT', Grid211, "ro"),
-    'BCQ' : ([145,145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "ro"),
-    'EHU' : ([657,321], (36.00, 9.50), (41.0, 20.0), 'CST6CDT', Grid211, "ro"),
-    'VHW' : ([161,161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "ro"),
-    'PBP' : ([321,225], (7.00, 11.00), (10.0, 7.0), 'Pacific/Honolulu', Grid208, "ro"),
-    'VRH' : ([1409, 913], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'),
-
-#National Centers
-    'HAK' : ( [825,553], ( 1.0, 1.0), (103.0, 69.0), 'EST5EDT', Grid214AK, "nc"),
-    'HUS' : ([1073,689], (19.0, 8.0), ( 67.0, 43.0), 'EST5EDT', Grid211,   "nc"),
-    'NHA' : ([1873,1361], (35.5, 3.5), (58.5, 42.5), 'EST5EDT', Grid211, "nc"),
-
-}
-
-# Get list of valid office types, for validation.
-VALID_OFFICE_TYPES = []
-# List of all values of all sites.
-for siteValues in SITES.values():
-    # Office type is the 5th element of each site's values
-    officeType = siteValues[5]
-    if officeType not in VALID_OFFICE_TYPES:
-        # A new office type
-        VALID_OFFICE_TYPES.append(officeType)
-
-#---------------------------------------------------------------------------
-#
-#  Time Constraint configuration section
-#
-#---------------------------------------------------------------------------
-HOUR = 3600
-DAY  = 24 * HOUR
-
-# Start: is the number of seconds since 0000z for the first grid of the day
-# Repeat: is the number of seconds from start until the next grid starts
-# Duration: is the length of the grid in number of seconds
-
-# Examples of constraints:
-# Hourly temperatures
-#     HrTemp = (0, HOUR, HOUR)
-# QPF that is 6 hours long, aligned on 0000z, exists for every 6 hours
-#     Q = (0, HOUR*6, HOUR*6)
-#
-
-# fixed time constraints: start / repeat / duration
-TC_1M    = (0, 60, 60) # 1 minute
-TC1      = (0, HOUR, HOUR)
-TC3      = (0, 3 * HOUR, HOUR)
-TC6      = (0, 6 * HOUR, HOUR)
-TC12     = (0, 12 * HOUR, HOUR)
-TC3NG    = (0, 3 * HOUR, 3 * HOUR)
-TC6NG    = (0, 6 * HOUR, 6 * HOUR)
-TC12NG   = (0, 12 * HOUR, 12 * HOUR)
-TC24NG   = (0, 24 * HOUR, 24 * HOUR)
-TC061212 = (6 * HOUR, 12 * HOUR, 12 * HOUR)
-Persistent = (0, 0, 0)     # special time constraint
-
-
-# The following time constraints are based on local standard time.
-# Change the last parameter from 0 to 1 to force daylight savings time
-# always.
-# PWS TCs changed in OB9.3 for new 6 hour data from NHC
-MaxTTC     = localTC(7*HOUR, 24*HOUR, 13*HOUR, 0)
-MinTTC     = localTC(19*HOUR, 24*HOUR, 14*HOUR, 0)
-MaxRHTC    = localTC(15*HOUR, 24*HOUR, 18*HOUR, 0)
-MinRHTC    = localTC(3*HOUR, 24*HOUR, 18*HOUR, 0)
-LT3NG      = localTC(0*HOUR, 3*HOUR, 3*HOUR, 0)
-LT6NG      = localTC(0*HOUR, 6*HOUR, 6*HOUR, 0)
-LT12NG     = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0)
-LTMOS      = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0)  #special MOS local time
-MaxTTCMOS  = localTC(6*HOUR, 24*HOUR, 12*HOUR, 0)  #special MOS maxT
-MinTTCMOS  = localTC(18*HOUR, 24*HOUR, 12*HOUR, 0)  #special MOS minT
-LT24       = localTC(0*HOUR, 24*HOUR, 24*HOUR, 0)
-FireWx1300TC = localTC(13*HOUR, 24*HOUR, 1*HOUR, 0)   #special FireWx 1pm snap
-#DR3511 DeltaMaxTTC  = localTC(7*HOUR, 24*HOUR, 16*HOUR, 0)  # just for HPCdeltaMaxT
-PWSDTC     = localTC(11*HOUR, 24*HOUR, 12*HOUR, 0)
-PWSNTC     = localTC(23*HOUR, 24*HOUR, 12*HOUR, 0)
-# Alaska OCONUS
-if SID in siteRegion['AR']:
-    MaxTTC     = localTC(5*HOUR, 24*HOUR, 15*HOUR, 0)
-    MinTTC     = localTC(17*HOUR, 24*HOUR, 18*HOUR, 0)
-
-# From NwsInitsConfig
-LT24APT  = localTC(7*HOUR, 24*HOUR, 24*HOUR, 0)
-FireWxAvgTC = localTC( 12*HOUR,  24*HOUR,  6*HOUR, 0)
-LT4HH = localTC(11*HOUR, 24*HOUR, 4*HOUR, 0)
-SPC24 = (12*HOUR, 24*HOUR, 24*HOUR)
-# For WR
-TC0624NG=(6*HOUR,24*HOUR,24*HOUR)
-TC12NG6=(6*HOUR,12*HOUR,12*HOUR)
-# HIL Time Constraint
-HILTC=(6*HOUR,24*HOUR,24*HOUR)
-
-#---------------------------------------------------------------------------
-#
-#  Database/(Model) Attribute Configuration
-#
-#---------------------------------------------------------------------------
-#
-# name:  The model name of the database
-#
-# format:  Either 'GRID' or 'DFM'
-#
-# type:  Optional type of the database
-#
-# single:  YES or NO. YES if this database always exists and is not
-#   based on model-times.  NO if this database is created/destroyed and
-#   is based on model-runs.  When created, the names of these databases have
-#   time stamps.
-#
-# official:  YES or NO.  YES if this is an official database from which
-#   products can be generated.  NO if this is a conventional database.
-#
-# numVer:  Number of versions of this database to retain.
-#
-# purgeAge: Number of hours in the past before grids will be automatically
-#   purged from the database.  If 0, then purging is disabled.
-#
-
-YES = 1
-NO = 0
-GRID = 'GRID'
-# name /  format / type / single / official / numVer / purgeAge
-
-Fcst        = ('Fcst',         GRID,   '', YES, NO,  1, 24)
-Practice    = ('Fcst',         GRID,   'Prac', YES, NO,  1, 24)
-TestFcst    = ('Fcst',         GRID,   'Test', YES, NO,  1, 24)
-Restore     = ('Restore',      GRID,   '', YES, NO,  1, 24)
-Test        = ('Test',         GRID,   'test', NO, NO,  1, 0)
-Official    = ('Official',     GRID,   '', YES, YES, 1, 24)
-ISC         = ('ISC',          GRID,   '', YES, NO,  1, 12)
-
-
-#---------------------------------------------------------------------------
-#
-#  Search path for netCDF data files.
-#  NOTE: This feature was implemented only backward compatibility with existing A1 datasets.
-#        New datasets should be generated in a from that can be ingested by A2
-#        It shoudl only be used for static datasets.
-#        New files will not be recognized without a server restart.
-#
-#---------------------------------------------------------------------------
-# Alaska OCONUS
-if SID in groups['ALASKA_SITES']:
-    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISMAK'),
-                  ('/awips2/edex/data/gfe/climo/PRISMAK800'),
-                  ]
-
-# Hawaii OCONUS
-elif SID == "HFO":
-    NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),
-                  ('/awips2/edex/data/gfe/topo/StdTerrain/Hawaii', 'StdTerrain'),
-                  ]
-
-# San Juan OCONUS
-elif SID == "SJU":
-    NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),
-                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),
-                  ('/awips2/edex/data/gfe/topo/StdTerrain/PuertoRico', 'StdTerrain')
-                  ]
-
-# Guam OCONUS
-elif SID == "GUM":
-    NETCDFDIRS = []
-
-#CONUS sites
-elif SID in groups['CONUS_EAST_SITES']:
-    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'),
-                  ('/awips2/edex/data/gfe/climo/NCDC'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),
-                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),
-                  ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'),
-                  ]
-
-else:   #######DCS3501 WEST_CONUS
-    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'),
-                  ('/awips2/edex/data/gfe/climo/NCDC'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),
-                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),
-                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),
-                  ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'),
-                  ]
-
-#---------------------------------------------------------------------------
-#
-# Where to find (and what to call) satellite data.
-#
-#---------------------------------------------------------------------------
-#
-
-# This table contains product ID and weather element names for satellite data
-#
-# A product ID consists of the sector ID and physical element of the
-# satellite product.
-#
-# Examples:
-#
-#   "East CONUS/Imager Visible"
-#   "East CONUS/Imager 11 micron IR"
-#   "East CONUS/Imager 13 micron (IR)"
-#   "East CONUS/Imager 3.9 micron IR"
-#
-
-# Alaska OCONUS
-if SID in groups['ALASKA_SITES']:
-    SATDATA = []
-
-# Hawaii OCONUS
-elif SID == "HFO":
-    SATDATA = []
-
-# San Juan OCONUS
-elif SID == "SJU":
-    SATDATA = [("East CONUS/Imager Visible", "visibleEast"),
-               ("East CONUS/Imager 11 micron IR", "ir11East"),
-               ("East CONUS/Imager 13 micron (IR)", "ir13East"),
-               ("East CONUS/Imager 3.9 micron IR", "ir39East"),
-               ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
-
-# Guam OCONUS
-elif SID == "GUM":
-    SATDATA = []
-
-#CONUS sites
-else:
-    SATDATA = [("West CONUS/Imager Visible", "visibleWest"),
-               ("West CONUS/Imager 11 micron IR", "ir11West"),
-               ("West CONUS/Imager 13 micron (IR)", "ir13West"),
-               ("West CONUS/Imager 3.9 micron IR", "ir39West"),
-               ("West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),
-               ("East CONUS/Imager Visible", "visibleEast"),
-               ("East CONUS/Imager 11 micron IR", "ir11East"),
-               ("East CONUS/Imager 13 micron (IR)", "ir13East"),
-               ("East CONUS/Imager 3.9 micron IR", "ir39East"),
-               ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]
-
-#---------------------------------------------------------------------------
-#
-#  Intersite Coordination Configurations
-#
-#---------------------------------------------------------------------------
-# base urls for the ISC Routing Table
-ISC_ROUTING_TABLE_ADDRESS = {
-    "ANCF" : "http://svcbu-ancf.er.awips.noaa.gov:8080/irt",
-    "BNCF" : "http://svcbu-bncf.er.awips.noaa.gov:8080/irt"
-    }
-
-
-# list of sites that from which you want ISC data (If None, ifpServer will
-# automatically calculate the list.)  Should always include your own site.
-REQUESTED_ISC_SITES = None
-
-# Overall ISC request flag.  Must be set to 1 in order to request and receive
-# ISC data.  Must be 1 to register with the IRT.
-REQUEST_ISC = 0
-
-# Sending control flag.  Set to 1 to send isc when data is saved.
-SEND_ISC_ON_SAVE = 0
-
-# Sending control flag.  Set to 1 to send isc when data is published.
-SEND_ISC_ON_PUBLISH = 0
-
-# List of weather elements to request for ISC.  If set to None, it defaults
-# to the list of all weather elements in the Fcst database.
-REQUESTED_ISC_PARMS = None
-
-# Transmission script for sending data.  This is the script that iscExtract
-# and other routines (e.g., vtec table sharing) will call to perform the
-# actual transmission of data.
-TRANSMIT_SCRIPT = GFESUITE_HOME + '/bin/gfe_msg_send -s %SUBJECT -a %ADDRESSES -i %WMOID -c 11 -p 0 -e %ATTACHMENTS'
-
-
-# Extra ISC parms (weather elements).  These are a list of the baseline
-# weather elements to be added as extra parms to the ISC database.  This
-# is necessary when receiving ISC grids from a site that is a different
-# office type than your own.  You never need to add weather elements
-# to the ISC database that is your own office type.  The format of this
-# entry is a list of tuples.  The tuple is a list of weather elements
-# objects (such as Temp and not "T"), and an office type, such as "rfc".
-EXTRA_ISC_PARMS = [([QPF,FloodingRainThreat], 'rfc'), ([QPF,FloodingRainThreat], 'wfo'), ([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'nc'),([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'wfo')]
-
-#---------------------------------------------------------------------------
-#
-#  Misc. Configurations
-#
-#---------------------------------------------------------------------------
-# defines the number of days to keep log files
-LOG_FILE_PURGE_AFTER = 28
-
-# auto configure NotifyTextProd -- set after OB6
-AUTO_CONFIGURE_NOTIFYTEXTPROD = 1   #0=off,1=on
-
-
-#-----------------------------------
-# DO NOT CHANGE THE FOLLOWING SECTION
-#------------------------------------
-# import the local config file
-
-myOfficeType = SITES[GFESUITE_SITEID][5]
-
-AdditionalISCRouting = [
-   # Configure by adding entries to this list in the form of:
-   # ([WeatherElements],  ModelName, EditAreaPrefix)
-   # Example:
-   # ([Hazards, LAL, CWR], "ISCFire", "FireWxAOR_"),
-]
-
-#---------------------------------------------------------------------------
-# Parm groups.  Combine parms with time constraints
-# list of ([parms], timeConstraints)
-#---------------------------------------------------------------------------
-
-#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
-# There is nothing special about these variables. They are just used as a
-# convienence to set up multiple models in modelDict with the same parameter
-# set.  However, model parms are no longer as generic as they once were and
-# its just as easy to set the parms explicitly in modelDict.
-
-STD6_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC6),
-             ([Haines, MixHgt, FreeWind, TransWind, VentRate], TC6),
-             ([DSI, Stability, Ttrend, RHtrend], TC6),
-             ([SnowAmt, PoP, CWR], TC6NG), ([QPF, Weather, IceAcc, LAL], TC6NG),
-             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),
-             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-             ([MaxT], MaxTTC), ([MinT], MinTTC),
-             ([Wetflag], FireWx1300TC)]
-
-# hourly
-STD1_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC1),
-             ([Haines, MixHgt, FreeWind, TransWind], TC1),
-             ([DSI, Stability, VentRate, Ttrend, RHtrend], TC1),
-             ([SnowAmt, PoP, CWR], TC1), ([QPF, Weather, IceAcc, LAL], TC1),
-             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),
-             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-             ([MaxT], MaxTTC), ([MinT], MinTTC),
-             ([Wetflag], FireWx1300TC)]
-
-# 3 hourly
-STD3_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC3),
-             ([Haines, MixHgt, FreeWind, TransWind], TC3),
-             ([DSI, Stability, VentRate, Ttrend, RHtrend], TC3),
-             ([SnowAmt, PoP, CWR], TC3NG), ([QPF, IceAcc, Weather, LAL], TC3NG),
-             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),
-             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-             ([MaxT], MaxTTC), ([MinT], MinTTC),
-             ([Wetflag], FireWx1300TC)]
-
-# Fcst and official database parameter groupings
-OFFICIALDBS = [([Temp, Td, Wind, Weather, Sky, FzLevel, SnowLevel], TC1),
-    ([HeatIndex, WindChill, RH, SnowAmt, CWR, QPF], TC1),
-    ([PoP, Ttrend, RHtrend, Wind20ft, WindGust], TC1),
-    ([MinT], MinTTC), ([MaxT], MaxTTC),
-    ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-    ([VentRate, LAL, Haines, MixHgt, FreeWind, TransWind], TC1),
-    ([DSI, Stability, MarineLayer], TC1),
-    ([HrsOfSun, InvBurnOffTemp], LT24),
-    ([IceAcc, IceCoverage, Hazards], TC1),
-    ([Wetflag], FireWx1300TC),
-    ([StormTotalSnow], TC1),
-        # Tropical parms
-    ([prob34, prob50, prob64,pws34,pws50,pws64,], TC1),
-    ([InundationMax,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], TC1),
-    ([ProposedSS,DiffSS,tempProposedSS,InitialSS], TC1),
-    ([WindThreat,StormSurgeThreat,FloodingRainThreat,TornadoThreat], TC1),
-    ([pwsD34,pwsD64], PWSDTC),
-    ([pwsN34,pwsN64], PWSNTC),
-    ([pws34int,pws64int,InundationTiming,QPFtoFFGRatio], TC6NG),
-    # DR20541 and 20482
-    ([PoP12hr], TC12NG),
-    ([QPF6hr, SnowAmt6hr], TC6NG),
-    ([cape], LT6NG),
-    ([ApparentT, HeatIndex, WindChill, LkSfcT, SnowMap, SnowRatio, StormTotalQPF], TC1),
-    ]
-
-## JCM Change wave and period (and swanswell) to TC1 for all marine sites
-if SID in groups['marineSites'] or SID in groups['GreatLake_SITES']:
-    OFFICIALDBS.append(([WaveHeight, PeakWaveDir, WindWaveHeight, SurfHeight, Swell, Swell2, Period, Period2], TC1))
-    OFFICIALDBS.append(([SwanSwell, Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9,
-                         Period1, Period3, Period4, Period5, Period6, Period7, Period8, Period9], TC1))
-    OFFICIALDBS.append(([NWPSwind, UWaveDir, VWaveDir, WaveDir, RipProb, ErosionProb, OverwashProb],TC1))
-
-# NWPS
-nwpsCG1_MODEL = [([SwanSwell, Period, WaveHeight, PeakWaveDir, WindWaveHeight, Wind, RipProb, ErosionProb, OverwashProb], TC1)]
-nwpsTrkngCG0_MODEL = [([Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, Period1, Period2, Period3, Period4, Period5, Period6,Period7, Period8, Period9], TC1)]
-
-# OPC TAF parameters (for NW, SW, and E)
-OPCTAFBPARMS = [([WindWaveHeight, WaveHeight], TC1)]
-
-# SAT database parameter groupings
-SATPARMS = [([SatVisE, SatIR11E, SatIR13E, SatIR39E, SatWVE, SatFogE], TC_1M),
-            ([SatVisW, SatIR11W, SatIR13W, SatIR39W, SatWVW, SatFogW], TC_1M)]
-
-# RTMA database parameter groupings
-# DCS17288/DR17144
-if SID in groups['OCONUS_SITES']:
-    RTMAPARMS = [([Temp,Td,RH,Wind,Vis,Pressure,WindGust],TC1),
-             ([MinT],MinTTC), ([MaxT],MaxTTC),
-             ([MinRH],MinRHTC), ([MaxRH],MaxRHTC),
-             ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc],TC1)]
-else:
-    RTMAPARMS = [([Temp,Td,RH,Wind,QPE,Sky,Vis,Pressure,WindGust],TC1),
-             ([MinT],MinTTC), ([MaxT],MaxTTC),
-             ([MinRH],MinRHTC), ([MaxRH],MaxRHTC),
-             ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc,SkyUnc],TC1)]
-
-#---------------------------------------------------------------------------
-# Databases for a site.
-# list of (Database, [parms])
-# Official, Practice, TestFcst, Test are all set after Fcst is defined.
-#---------------------------------------------------------------------------
-
-# Intersite coordination database parameter groupings, based on
-# OFFICIALDBS, but time constraint is always TC1
-ISCPARMS = []
-if type(officeType) != str:
-    raise TypeError, "Office type not a str: " + `officeType`
-else:
-    if officeType not in VALID_OFFICE_TYPES:
-        raise ValueError, "Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]"
-
-
-#
-# new parameters for NewTerrain
-#
-NewTopo     = ("NewTopo",     SCALAR, "ft", "New Topo",      50000.0, -32000.0, 1, NO)
-PrevTopo    = ("PrevTopo",    SCALAR, "ft", "Previous Topo", 50000.0, -32000.0, 1, NO)
-StdTopo     = ("StdTopo",     SCALAR, "ft", "Standard Topo", 50000.0, -32000.0, 1, NO)
-GTOPO       = ("GTOPO",       SCALAR, "ft", "GTOPO30",       50000.0, -32000.0, 1, NO)
-Topo        = ("Topo",        SCALAR, "ft", "Topography",    50000.0, -32000.0, 1, NO)
-
-# Add Topo to ISC parms for NewTerrain
-if type(REQUESTED_ISC_PARMS) is list and not "NewTopo" in REQUESTED_ISC_PARMS:
-    REQUESTED_ISC_PARMS.append("NewTopo")
-ISCPARMS.append(([NewTopo], Persistent))
-
-
-#---------------------------------------------------------------------------
-#
-#  General server configuration section
-#
-#---------------------------------------------------------------------------
-
-#----------------------------------------------------------------------------
-# Server settings     DO NOT CHANGE THESE DEFINITIONS
-#----------------------------------------------------------------------------
-from com.raytheon.edex.plugin.gfe.config import SimpleServerConfig
-IFPConfigServer = SimpleServerConfig()
-#IFPConfigServer.allowedNodes             = []
-IFPConfigServer.allowTopoBelowZero       = 1
-
-#------------------------------------------------------------------------------
-# serverConfig model configuration is now done in the modelDict dictionary.
-# variables D2DMODELS, D2DDBVERSIONS,D2DAccumulativeElements,INITMODULES,
-# INITSKIPS, DATABASES are no longer explicitly set and are not valid
-# to be referenced in localConfig.py.
-
-# WARNING: There can only be one version of a model in modelDict. Fcst,
-# practice and test databases have to be handled separately because there
-# are databases with the same name but different types.  This is ok
-# because these databases are defined after any localConfig customizations
-# of the normal Fcst database.
-
-# modelDict contains the following keys. Only define what is needed, i.e.,
-# it is not required to have every key defined
-#   "DB": Definition of the database, i.e., the first value in a dbs entry:
-#         ("wrfems", GRID, "", NO,  NO,  3, 0). This must be a tuple. The name
-#         in the DB entry must be the same as the model name used as the key
-#         into the modelDict variable.
-#
-#   "Parms" : Definition of the weather element parameters in the database,
-#         i.e., the second part of the dbs entry. This is a list of tuples.
-#
-#   "D2DMODELS" : D2D metadata database name for the source model.
-#
-#   "INITMODULES': Name of the SmartInit module. It is usually just the
-#         name as a string. If the init requires multiple models, use a tuple
-#         of ('smartInit name',[list of model names])
-#         'INITMODULES': ('Local_WPCGuide', ["HPCGuide","HPCERP","HPCWWD"]),
-#
-#   "D2DAccumulativeElements" : List of parm names that are accumulative
-#
-#   "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather
-#         Element Browser. Defaults to 2 if not supplied.
-#
-#   "INITSKIPS" : Used to skip specific model cycles.
-#
-# Example for a model:
-#
-#   modelDict["CMCreg"]={
-#        "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0),
-#        "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF,
-#                    PoP, SnowAmt, SnowRatio], TC3),
-#                  ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG),
-#                  ([QPF12, PoP12],TC12NG),
-#                  ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),
-#                  ([MaxT], MaxTTC), ([MinT], MinTTC),
-#                 ],
-#        "D2DMODELS": "Canadian-Reg",
-#        "INITMODULES": "Local_CMCreg",
-#        "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"],
-#        "D2DDBVERSIONS": 3,
-#   }
-#
-
-# Official, Practice, TestFcst, Test, Restore are all derivations of Fcst and
-# are setup after localConfig is processed.
-modelDict['Fcst'] = {'DB': Fcst, 'Parms': OFFICIALDBS}
-
-# Model Databases
-waveParms=[Period, Period2, SurfHeight, Swell, Swell2, WaveHeight,
-           Wind, WindWaveHeight, ]
-
-modelDict['BaseTerrain'] = {
-            'DB': ('BaseTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0),
-            'Parms': [([StdTopo, GTOPO, PrevTopo], Persistent),
-                     ],
-            }
-
-modelDict['CRMTopo'] = {
-            'D2DDBVERSIONS': 1}
-
-modelDict['ECMWFHiRes'] = {
-            'D2DMODELS': 'ECMWF-HiRes',}
-
-modelDict['ENPwave'] = {
-            'D2DMODELS': 'ENPWAVE253',
-            'DB': ('ENPwave', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC6),
-                     ],
-            }
-
-modelDict['ESTOFS'] = {
-            'D2DMODELS': 'estofsEP',
-            'DB': ('ESTOFS', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'ESTOFS',
-            'Parms': [([AstroTide, StormSurge], TC1),
-                     ],
-            }
-
-modelDict['ETSS'] = {
-            'D2DMODELS': 'ETSS',
-            'DB': ('ETSS', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'ETSS',
-            'Parms': [([StormSurge, SurgeTide], TC1),
-                     ],
-            }
-
-modelDict['ETSSHiRes'] = {
-            'D2DMODELS': 'ETSS-HiRes',
-            'DB': ('ETSSHiRes', 'GRID', '', NO, NO, 2, 0),
-            'INITMODULES': 'ETSSHiRes',
-            'Parms': [([AstroTide, SurgeTide], TC1),
-                     ],                        
-             }
-
-for s in ['ALR', 'FWR', 'KRF', 'MSR', 'ORN', 'PTR', 'RHA', 'RSA', 'STR', 'TAR',
-          'TIR', 'TUA',]:
-    modelDict['FFG'+s] = {'D2DMODELS': 'FFG-'+s}
-
-modelDict['GFS20'] = {
-            'D2DMODELS': 'GFS20',
-            'D2DAccumulativeElements': ['tp3hr','tp6hr', 'tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'],
-            'DB': ('GFS20', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [([Wetflag], FireWx1300TC),
-                     ([MaxRH], MaxRHTC),
-                     ([MaxT], MaxTTC),
-                     ([MinRH], MinRHTC),
-                     ([MinT], MinTTC),
-                     ([HrsOfSun, InvBurnOffTemp, MarineLayer], LT24),
-                     ([DSI, FreeWind, FzLevel, Haines, MixHgt, RH, RHtrend, Sky,
-                       SnowLevel, Stability, Td, Temp, TransWind, Ttrend, VentRate,
-                       Wind, Wind20ft], TC6),
-                     ([CWR, IceAcc, LAL, PoP, QPF, SnowAmt, Weather], TC6NG),
-                     ],
-            }
-
-modelDict['GFS80'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'AVN211',
-            'DB': ('GFS80', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'GFS80',
-            'Parms': STD6_MODEL,
-            }
-
-modelDict['GFSLAMPGrid'] = {
-            'D2DMODELS': 'GFSLAMPGrid',
-            'DB': ('GFSLAMPGrid', 'GRID', '', NO,  NO, 3, 0),
-            'INITMODULES': 'GFSLAMPGrid',
-            'Parms': [([CigHgt, Sky, Td, Temp, Vis, Wind], TC1),
-                     ],
-            }
-
-modelDict['GWW'] = {
-            'DB': ('GWW', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC6),
-                     ],
-            }
-
-modelDict['WaveWatch'] = {
-            'D2DMODELS': 'WaveWatch',}
-
-modelDict['GlobalWave'] = {
-            'D2DMODELS': 'GlobalWave',
-            'DB': ('GlobalWave', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC3),
-                     ],
-            }
-
-modelDict['HIRESWarw'] = {
-            'D2DAccumulativeElements': ['tp'],
-            'D2DMODELS': 'HiResW-ARW-West',
-            'DB': ('HIRESWarw', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'HIRESWarw',
-            'Parms': STD3_MODEL,
-            }
-
-modelDict['HIRESWnmm'] = {
-            'D2DAccumulativeElements': ['tp'],
-            'D2DMODELS': 'HiResW-NMM-West',
-            'DB': ('HIRESWnmm', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'HIRESWnmm',
-            'Parms': STD3_MODEL,
-            }
-
-modelDict['HPCERP'] = {
-            'D2DAccumulativeElements': ['tpHPCndfd'],
-            'D2DDBVERSIONS': 24,
-            'D2DMODELS': 'HPCqpfNDFD',}
-
-modelDict['HPCGRID'] = {
-            'DB': ('HPCGRID', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [([PoP, SnowAmt], LTMOS),
-                     ([MaxT], MaxTTCMOS),
-                     ([MinT], MinTTCMOS),
-                     ([Sky, Td, Temp, Weather, Wind], TC1),
-                     ([QPF], TC6NG),
-                     ],
-            }
-
-modelDict['HPCGuide'] = {
-            'D2DAccumulativeElements': ['pop'],
-            'D2DMODELS': 'HPCGuide',
-            'DB': ('HPCGuide', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'HPCGuide',
-            'Parms': [([MaxT], MaxTTC),
-                     ([MinT], MinTTC),
-                     ([PoP], TC12NG),
-                     ([Sky, Td, Wind], TC6),
-                     ],
-            }
-
-modelDict['HPCQPF'] = {
-            'D2DAccumulativeElements': ['tpHPC'],
-            'D2DMODELS': 'HPCqpf',
-            'DB': ('HPCQPF', 'GRID', '', NO,  NO, 4, 0),
-            'INITMODULES': 'HPCQPF',
-            'Parms': [([QPF], TC6NG),
-                     ],
-            }
-
-modelDict['HRRR'] = {
-            'D2DAccumulativeElements': ['tp', 'crain', 'csnow', 'cfrzr', 'cicep'],
-            'D2DMODELS': 'HRRR',
-            'DB': ('HRRR', 'GRID', '', NO,  NO, 3, 0),
-            'INITMODULES': 'HRRR',
-            'Parms': [([QPF, RH, Sky, Td, Temp, Wind, WindGust], TC1),
-                     ],
-            }
-
-modelDict['HWRF'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'HWRF',}
-
-modelDict['LAPS'] = {
-            'D2DAccumulativeElements': ['pc'],
-            'D2DDBVERSIONS': 6,
-            'D2DMODELS': 'LAPS',
-            'DB': ('LAPS', 'GRID', '', YES, NO, 1, 30),
-            'INITMODULES': 'LAPS',
-            'Parms': [([QPF, Radar, Sky, SnowAmt, Td, Temp, Weather, Wind], TC1),
-                     ],
-            }
-
-modelDict['MOSGuide'] = {
-            'D2DAccumulativeElements': ['pop12hr', 'pop6hr', 'thp12hr', 'thp3hr',
-                                       'thp6hr', 'tcc', 'tp6hr', 'tp12hr', 'wgs'],
-            'D2DMODELS': 'MOSGuide',
-            'DB': ('MOSGuide', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'MOSGuide',
-            'Parms': [([MaxT], MaxTTC),
-                     ([MinT], MinTTC),
-                     ([RH, Td, Temp, Wind], TC1),
-                     ([PoP, PoP12, QPF, QPF12, TstmPrb12], TC12NG),
-                     ([TstmPrb3], TC3NG),
-                     ([PoP6, QPF6, Sky, TstmPrb6, WindGust], TC6NG),
-                     ],
-            }
-
-modelDict['MSAS'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DDBVERSIONS': 6,
-            'D2DMODELS': 'MSAS',
-            'DB': ('MSAS', 'GRID', '', YES, NO, 1, 36),
-            'INITMODULES': 'MSAS',
-            'Parms': [([Td, Temp, Wind], TC1),
-                     ],
-            }
-
-modelDict['NAHwave4'] = {
-            'D2DMODELS': 'NAHwave4',}
-
-modelDict['NAM12'] = {
-            'D2DAccumulativeElements': ['tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'],
-            'D2DMODELS': 'NAM12',
-            'DB': ('NAM12', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'NAM12',
-            'Parms': STD3_MODEL,
-            }
-
-modelDict['NAM20'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'NAM20',}
-
-modelDict['NAM40'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'NAM40',
-            'DB': ('NAM40', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': STD3_MODEL,
-            }
-
-modelDict['NAM80'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'ETA',
-            'DB': ('NAM80', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': STD6_MODEL,
-            }
-
-modelDict['NED'] = {
-            'D2DDBVERSIONS': 1}
-
-modelDict['NamDNG'] = {
-            'D2DMODELS': 'namdng25',
-            'DB': ('NamDNG', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'NamDNG',
-            'Parms': [([MaxRH], MaxRHTC),
-                     ([MaxT], MaxTTC),
-                     ([MinRH], MinRHTC),
-                     ([MinT], MinTTC),
-                     ([PoP12, QPF12], TC12NG),
-                     ([MixHgt, RH, Sky, SnowLevel, Td, Temp, TransWind, Vis,
-                       Wind, WindGust], TC3),
-                     ([MaxRH3, MaxT3, MinT3, PoP, QPF3, SnowAmt], TC3NG),
-                     ([PoP6, QPF6, SnowAmt6], TC6NG),
-                     ],
-            }
-
-modelDict['NationalBlend'] = {
-            'D2DAccumulativeElements': ["pop12hr", "pop", "pop6hr", "tp", "ppi1hr", "ppi6hr",
-                                        "tp1hr", "tp6hr", "thp3hr", "thp6hr",
-                                        "totsn1hr", "totsn6hr", "ficeac1hr", "ficeac6hr"],
-            'D2DMODELS': 'NationalBlend',
-            'DB': ('NationalBlend', 'GRID', '', NO,  NO, 7, 0),
-            'INITMODULES': 'NationalBlend',
-            'Parms': [([Temp, Td, RH, Sky, Wind, WindGust, ApparentT], TC1),
-                     ([QPF1,PPI01,CloudBasePrimary,Ceiling,Visibility],TC1),
-                     ([PoTIP, PoTR, PoTRW, PoTS, PoTSW, PoTZR,],TC1),
-                     ([SnowLevel,MaxTwAloft,ProbIcePresent, ProbRefreezeSleet,SnowRatio],TC1),
-                     ([PositiveEnergyAloft, NegativeEnergyLowLevel],TC1),
-                     ([MixHgt, TransWind, LLWS, VentRate, LLWSHgt, Radar,
-                       SigWaveHgt, Weather, Haines, FosBerg,
-                       SnowAmt01, IceAccum01, TstmPrb1],TC1),
-                     ([TstmPrb3, DryTstmPrb],TC3NG),
-                     ([TstmPrb6, QPF, PoP6, PPI06, SnowAmt, IceAccum,
-                       QPF10Prcntl, QPF50Prcntl, QPF90Prcntl],TC6NG),
-                     ([MaxT], MaxTTC), ([MinT], MinTTC),
-                     ([MaxRH], MaxRHTC), ([MinRH], MinRHTC),([PoP, TstmPrb12],TC12NG),
-                     ],
-            }
-
-modelDict['NationalBlendOC'] = {
-            'D2DMODELS': 'NationalBlendOC',
-            'DB': ('NationalBlend', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'NationalBlendOC',
-            'Parms': [([WGS50pct, WS50Prcntl30m, WS50Prcntl80m, Vis50pct, T50pct,
-                       PMSL10pct, PMSL50pct, PMSL90pct], TC1),
-                     ],
-            }
-
-modelDict['NewTerrain'] = {
-            'DB': ('NewTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0),
-            'Parms': [([NewTopo], Persistent),
-                     ],
-            }
-
-modelDict['PWPF'] = {
-            'D2DMODELS': 'PWPF',}
-
-modelDict['RFCQPF'] = {
-            'D2DMODELS': 'RFCqpf',
-            'DB': ('RFCQPF', 'GRID', '', NO,  NO, 4, 0),
-            'Parms': [([QPF], TC6NG),
-                     ],
-            }
-
-modelDict['RTMA'] = {
-            'D2DAccumulativeElements': ['tp'],
-            'D2DMODELS': 'RTMA25',
-            'DB': ('RTMA', 'GRID', '', YES, NO, 1, 36),
-            'INITMODULES': 'RTMA',
-            'Parms': RTMAPARMS,
-            }
-
-modelDict['RAP13'] = {
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'D2DMODELS': 'RAP13',
-            'DB': ('RAP13', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'RAP13',
-            'INITSKIPS': [1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23],
-            'Parms': STD1_MODEL,
-            }
-
-modelDict['SAT'] = {
-            'DB': ('SAT', 'GRID', '', YES, NO, 1, 12),
-            'Parms': [([SatFogE, SatFogW, SatIR11E, SatIR11W, SatIR13E, SatIR13W,
-                        SatIR39E, SatIR39W, SatVisE, SatVisW, SatWVE, SatWVW],
-                        TC_1M),
-                     ],
-            }
-
-modelDict['SPC'] = {
-            'D2DDBVERSIONS': 8, 'D2DMODELS': 'SPCGuide',}
-
-modelDict['SREF'] = {
-            'D2DMODELS': 'SREF212',
-            'DB': ('SREF', 'GRID', '', NO,  NO, 3, 0),
-            'INITMODULES': 'SREF',
-            'Parms': [([Td, Temp, Wind], TC1),
-                     ],
-            }
-
-modelDict['Satellite'] = {
-            'D2DDBVERSIONS': 6,}
-# Turn on satellite smartInit only if SATDATA has some entries.
-if SATDATA:
-    modelDict['Satellite']['INITMODULES'] = 'SAT'
-
-modelDict['TPCProb'] = {
-            'D2DDBVERSIONS': 30,
-            'D2DMODELS': 'TPCWindProb',
-            'DB': ('TPCProb', 'GRID', '', NO,  NO, 30, 0),
-            'Parms': [([pwsD34, pwsD64], PWSDTC),
-                     ([pwsN34, pwsN64], PWSNTC),
-                     ([prob34, prob50, prob64, pws34, pws50, pws64], TC1),
-                     ],
-            }
-
-modelDict['TPCProbPrelim'] = {
-            'D2DDBVERSIONS': 30,
-            'D2DMODELS': 'TPCWindProb_Prelim',
-            'DB': ('TPCProbPrelim', 'GRID', '', NO,  NO, 30, 0),
-            'Parms': [([pwsD34, pwsD64], PWSDTC),
-                     ([pwsN34, pwsN64], PWSNTC),
-                     ([prob34, prob50, prob64, pws34, pws50, pws64], TC1),
-                     ],
-            }
-
-modelDict['TPCStormSurge'] = {
-            'D2DDBVERSIONS': 1}
-
-modelDict['TPCSurgeProb'] = {
-            'D2DMODELS': 'TPCSurgeProb',
-            'D2DAccumulativeElements': [
-                'Surge10Pct',
-                'Surge20Pct',
-                'Surge30Pct',
-                'Surge40Pct',
-                'Surge50Pct',
-                'Surge90Pct',
-                'PSurge25Ft',
-                'PSurge24Ft',
-                'PSurge23Ft',
-                'PSurge22Ft',
-                'PSurge21Ft',
-                'PSurge20Ft',
-                'PSurge19Ft',
-                'PSurge18Ft',
-                'PSurge17Ft',
-                'PSurge16Ft',
-                'PSurge15Ft',
-                'PSurge14Ft',
-                'PSurge13Ft',
-                'PSurge12Ft',
-                'PSurge11Ft',
-                'PSurge10Ft',
-                'PSurge9Ft',
-                'PSurge8Ft',
-                'PSurge7Ft',
-                'PSurge6Ft',
-                'PSurge5Ft',
-                'PSurge4Ft',
-                'PSurge3Ft',
-                'PSurge2Ft',
-                'PSurge1Ft',
-                'PSurge0Ft',
-                'Surge10Pctincr',
-                'Surge20Pctincr',
-                'Surge30Pctincr',
-                'Surge40Pctincr',
-                'Surge50Pctincr',
-                'Surge90Pctincr',
-                'PSurge20Ftincr',
-                'PSurge19Ftincr',
-                'PSurge18Ftincr',
-                'PSurge17Ftincr',
-                'PSurge16Ftincr',
-                'PSurge15Ftincr',
-                'PSurge14Ftincr',
-                'PSurge13Ftincr',
-                'PSurge12Ftincr',
-                'PSurge11Ftincr',
-                'PSurge10Ftincr',
-                'PSurge9Ftincr',
-                'PSurge8Ftincr',
-                'PSurge7Ftincr',
-                'PSurge6Ftincr',
-                'PSurge5Ftincr',
-                'PSurge4Ftincr',
-                'PSurge3Ftincr',
-                'PSurge2Ftincr',
-                'PSurge1Ftincr',
-                'PSurge0Ftincr',
-            ],
-        }
-
-modelDict['PETSS'] = {
-            'D2DMODELS': 'P-ETSS',
-            'D2DAccumulativeElements': [
-                'Surge10Pct',
-                'Surge20Pct',
-                'Surge30Pct',
-                'Surge40Pct',
-                'Surge50Pct',
-                'Surge90Pct',
-                'Surge10Pctincr',
-                'Surge20Pctincr',
-                'Surge30Pctincr',
-                'Surge40Pctincr',
-                'Surge50Pctincr',
-                'Surge90Pctincr',
-                'PSurge0Ftincr',
-                'PSurge1Ftincr',
-                'PSurge2Ftincr',
-                'PSurge3Ftincr',
-                'PSurge4Ftincr',
-                'PSurge5Ftincr',
-                'PSurge6Ftincr',
-                'PSurge7Ftincr',
-                'PSurge8Ftincr',
-                'PSurge9Ftincr',
-                'PSurge10Ftincr',
-                'PSurge13Ftincr',
-                'PSurge16Ftincr',
-                'PSurge0Ft',
-                'PSurge1Ft',
-                'PSurge2Ft',
-                'PSurge3Ft',
-                'PSurge4Ft',
-                'PSurge5Ft',
-                'PSurge6Ft',
-                'PSurge7Ft',
-                'PSurge8Ft',
-                'PSurge9Ft',
-                'PSurge10Ft',
-                'PSurge13Ft',
-                'PSurge16Ft',
-                'PSurgeMaxincr',
-                'PSurgeMeanincr',
-                'PSurgeMinincr',
-                'PSurgeMax',
-                'PSurgeMean',
-                'PSurgeMin',
-            ],
-        }
-
-modelDict['TPCtcm'] = {
-            'DB': ('TPCtcm', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [([HiWind], TC3),
-                     ],
-            }
-
-modelDict['URMA25'] = {
-            'D2DAccumulativeElements': ['tp'],
-            'D2DMODELS': 'URMA25',
-            'DB': ('URMA25', 'GRID', '', YES, NO, 1, 36),
-            'INITMODULES': 'URMA25',
-            'Parms': [([MaxRH], MaxRHTC),
-                     ([MaxT], MaxTTC),
-                     ([MinRH], MinRHTC),
-                     ([MinT], MinTTC),
-                     ([PressUnc, Pressure, QPE, RH, Sky, SkyUnc, TUnc, Td, TdUnc,
-                        Temp, Vis, VisUnc, WDirUnc, WGustUnc, WSpdUnc, Wind,
-                        WindGust], TC1),
-                     ],
-            }
-
-modelDict['WCwave10'] = {
-            'D2DMODELS': 'WCwave10',
-            'DB': ('WCwave10', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC3),
-                     ],
-            }
-
-modelDict['WCwave4'] = {
-            'D2DMODELS': 'WCwave4',
-            'DB': ('WCwave4', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC3),
-                     ],
-            }
-
-modelDict['WNAWAVE'] = {
-            'DB': ('WNAWAVE', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC6),
-                     ],
-            }
-
-modelDict['WNAWAVE238'] = {
-            'D2DMODELS': 'WNAWAVE238',}
-
-modelDict['WNAwave10'] = {
-            'D2DMODELS': 'WNAwave10',
-            'DB': ('WNAwave10', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC3),
-                     ],
-            }
-
-modelDict['WNAwave4'] = {
-            'D2DMODELS': 'WNAwave4',
-            'DB': ('WNAwave4', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [(waveParms, TC3),
-                     ],
-            }
-
-# This list will be used to set up a default ignoreDatabases list. This is shorter than
-# listing all models to ignore.
-includeOnly=[]
-if SID in groups['ALASKA_SITES']:
-    modelDict['AKwave4'] = {
-            'D2DMODELS': 'AKwave4',
-            'D2DDBVERSIONS': 2,
-            'DB': ('AKwave4', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, 
-                       WindWaveHgt, WindWavePeriod], TC3),
-                     ],
-            }
-
-    modelDict['AKwave10'] = {
-            'D2DMODELS': 'AKwave10',
-            'D2DDBVERSIONS': 2,
-            'DB': ('AKwave10', 'GRID', '', NO,  NO, 2, 0),
-            'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, 
-                        WindWaveHgt, WindWavePeriod], TC3),
-                     ],
-            }
-    
-    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsAK')
-    updateModelDict(modelDict,'ETSS','D2DMODELS', 'ETSS-AK')
-    updateModelDict(modelDict,'GFS20','D2DMODELS', 'AK-GFS22')
-    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-AK')
-    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-AK')
-    updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-AK')
-    updateModelDict(modelDict,'NAM12','D2DMODELS', 'AK-NAM11')
-    updateModelDict(modelDict,'NamDNG','D2DMODELS', 'AK-NamDNG3')
-    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendAK')
-    updateModelDict(modelDict,'RTMA','D2DMODELS', 'AK-RTMA3')
-    updateModelDict(modelDict,'SREF','D2DMODELS', 'SREF216')
-    updateModelDict(modelDict,'URMA','D2DMODELS', 'AK-URMA')
-    updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska')
-    updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska')
-    updateModelDict(modelDict,'RTOFS-Arctic','D2DMODELS', 'RTOFS-Arctic')
-    updateModelDict(modelDict,'RTOFS-Bering','D2DMODELS', 'RTOFS-Bering')
-    updateModelDict(modelDict,'RTOFS-GulfAlaska','D2DMODELS', 'RTOFS-GulfAlaska')
-    updateModelDict(modelDict,'PETSS','D2DMODELS', 'P-ETSS-AK')
-    # Model databases for Alaska
-    includeOnly = ['AKwave4', 'AKwave10', 'BaseTerrain', 'CRMTopo', 'ECMWFHiRes', 'ESTOFS', 
-                   'ETSS',  'GFS20',  'GWW', 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NAM12', 
-                   'NamDNG', 'NationalBlend', 'NED', 'NewTerrain', 'RTMA', 'RTOFS-Alaska', 
-                   'RTOFS-Arctic', 'RTOFS-Bering', 'RTOFS-GulfAlaska', 'SAT', 'SREF', 'URMA',
-                   'nwpsCG1AER', 'nwpsCG1AFG', 'nwpsCG1AJK', 'nwpsCG1ALU', 'nwpsTrkngCG0AER', 
-                   'nwpsTrkngCG0AFG', 'nwpsTrkngCG0AJK', 'nwpsTrkngCG0ALU', 'PETSS',
-                  ]
-
-# Hawaii OCONUS
-elif SID == "HFO":
-    modelDict['GFS75'] = {
-            'D2DMODELS': 'AVN225',
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'DB': ('GFS75', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'GFS75',
-            'Parms': STD6_MODEL,
-            }
-
-    updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch')
-    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')
-    updateModelDict(modelDict,'RTMA','D2DMODELS', 'HI-RTMA')
-    updateModelDict(modelDict,'NamDNG','D2DMODELS', 'HI-NamDNG5')
-    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-HI')
-    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-HI')
-    updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide')
-    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')
-    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')
-    updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes')
-    updateModelDict(modelDict,'RTOFS-Honolulu','D2DMODELS', 'RTOFS-Honolulu')
-    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsHI')
-    updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-HI')
-    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendHI')
-    # Model databases for HFO
-    includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS75', 'WaveWatch', 'GlobalWave',
-                   'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NamDNG', 'NationalBlend',
-                   'RTMA', 'RTOFS-Honolulu', 'SPC', 'TPCProb', 'TPCProbPrelim', 'nwpsCG1GUM',
-                   'nwpsCG1HFO', 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO',
-                  ]
-
-# Guam OCONUS
-elif SID == "GUM":
-    modelDict['GFS75'] = {
-            'D2DMODELS': 'AVN225',
-            'D2DAccumulativeElements': ['tp', 'cp'],
-            'DB': ('GFS75', 'GRID', '', NO,  NO, 2, 0),
-            'INITMODULES': 'GFS75',
-            'Parms': STD6_MODEL,
-            }
-
-    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')
-    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')
-    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')
-    updateModelDict(modelDict,'RTOFS-Guam','D2DMODELS', 'RTOFS-Guam')
-    updateModelDict(modelDict,'RTMA','D2DMODELS', 'Guam-RTMA')
-    # Model databases for GUM
-    includeOnly = ['GFS75', 'GlobalWave', 'RTMA', 'RTOFS-Guam', 'TPCProb',
-                   'TPCProbPrelim', 'nwpsCG1GUM', 'nwpsCG1HFO',
-                   'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO',
-                  ]
-
-# San Juan OCONUS
-elif SID == "SJU":
-    updateModelDict(modelDict,'GFS80','D2DMODELS', 'AVN211')
-    updateModelDict(modelDict,'NAM80','D2DMODELS', 'ETA')
-    updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch')
-    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')
-    updateModelDict(modelDict,'WNAwave10','D2DMODELS', 'WNAwave10')
-    updateModelDict(modelDict,'WNAwave4','D2DMODELS', 'WNAwave4')
-    updateModelDict(modelDict,'RTMA','D2DMODELS', 'PR-RTMA')
-    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-SJU')
-    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-SJU')
-    updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide')
-    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')
-    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')
-    updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes')
-    updateModelDict(modelDict,'RTOFS-Atlantic','D2DMODELS', 'RTOFS-Atlantic')
-    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsPR')
-    updateModelDict(modelDict,'NAHwave4','D2DMODELS', 'NAHwave4')
-    updateModelDict(modelDict,'GFS20','D2DMODELS', 'PR-GFS')
-    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendPR')
-    # Model databases for SJU
-    includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS20', 'GFS80', 'WaveWatch',
-                   'GlobalWave', 'HIRESWarw', 'HIRESWnmm', 'NAHwave4', 'NAM80',
-                   'NationalBlend', 'RTMA', 'RTOFS-Atlantic', 'SPC', 'TPCProb',
-                   'TPCProbPrelim', 'WNAwave10', 'WNAwave4',
-                   'nwpsCG1JAX', 'nwpsCG1KEY', 'nwpsCG1MFL', 'nwpsCG1MLB', 'nwpsCG1SJU',
-                   'nwpsTrkngCG0JAX', 'nwpsTrkngCG0KEY', 'nwpsTrkngCG0MFL',
-                   'nwpsTrkngCG0MLB', 'nwpsTrkngCG0SJU',
-                  ]
-
-# East CONUS changes from default modelDict
-elif SID in groups['CONUS_EAST_SITES']:
-    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsUS')
-    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-East')
-    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-East')
-
-if SID in groups['GreatLake_SITES']:
-    modelDict['GLERL'] = {
-            'D2DMODELS': 'GLERL',
-            'DB': ('GLERL', 'GRID', '', 0, 0, 2, 0),
-            'Parms': [([Period, Swell, WaveHeight], TC1),
-                     ]
-            }
-
-    modelDict['GLWN'] = {'D2DMODELS': 'GLWN'}
-
-# NWPS configuration.
-if SID in ['AFC', 'AER', 'AFG', 'AJK', 'ALU', 'AVAK']:
-    nwpsSites = ['AER', 'AFG', 'AJK', 'ALU',]
-elif SID in ['GUM', 'HFO',]:
-    nwpsSites = ['GUM', 'HFO',]
-elif SID == "SJU":
-    nwpsSites = ['SJU', 'MFL', 'KEY', 'MLB', 'JAX']
-elif SID in ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS',
-             'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL',
-             'MLB', 'JAX',]:
-     nwpsSites = ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS',
-                  'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL',
-                  'MLB', 'JAX', 'SJU',]
-elif SID in ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]:
-    nwpsSites = ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]
-else:
-    nwpsSites = []
-
-for s in nwpsSites:
-    name='nwpsCG1%s' % s
-    modelDict[name] = {
-            'DB': (name, 'GRID', '', NO,  NO, 2, 0),
-            'D2DMODELS': name,
-            'INITMODULES': name,
-            'Parms': nwpsCG1_MODEL,
-            }
-    name='nwpsTrkngCG0%s' % s
-    modelDict[name] = {
-            'DB': (name, 'GRID', '', NO,  NO, 2, 0),
-            'D2DMODELS': name,
-            'INITMODULES': name,
-            'Parms': nwpsTrkngCG0_MODEL,
-            }
-# This list will be used to set up a default ignoreDatabases list. This is shorter than
-# listing all models to ignore. Usually only set up for sites that aren't CONUS WFOs
-# includeOnly is not designed to be changed by localConfig.
-if includeOnly:
-    for m in sorted(modelDict.keys()):
-        if m not in includeOnly and 'D2DMODELS' in modelDict[m]:
-            ignoreDatabases.append(m)
-
-# END modelDict initial set up
-#------------------------------------------------------------------------------
-# Add in optional parms to Fcst parm def
-if SID in groups['powt']:
-    addPowt(modelDict)
-
-if SID in groups['winterProbs']:
-    addWinterWeatherProbs(modelDict)
-
-if SID in groups['rainfallProbs']:
-    addRainfallProbs(modelDict)
-
-D2DMODELS=[]
-D2DDBVERSIONS={}
-D2DAccumulativeElements={}
-INITMODULES={}
-INITSKIPS={}
-
-localParms = []
-localISCParms = []
-localISCExtraParms = []
-localLogFile = ''
-
-if not BASELINE and siteImport('localConfig'):
-    localParms = getattr(localConfig, 'parms', [])
-    localISCParms = getattr(localConfig, 'parmsISC', [])
-    localISCExtraParms = getattr(localConfig, 'extraISCparms', [])
-    localLogFile = getattr(localConfig, 'logFile', '')
-    modelDict['Fcst']['Parms'] += localParms
-    #ensure office type is set properly in localConfig SITES[]
-    if len(SITES[GFESUITE_SITEID]) == 5:
-        a = list(SITES[GFESUITE_SITEID])
-        a.append(myOfficeType)
-        SITES[GFESUITE_SITEID] = tuple(a)
-    else:
-        myOfficeType = SITES[GFESUITE_SITEID][5]  #probably from localConfig
-
-# Instantiate settings from modelDict
-db=dbConfig(modelDict)
-db.addConfiguredModels(ignoreDatabases)
-DATABASES = db.dbs
-D2DMODELS = db.D2DMODELS
-D2DDBVERSIONS = db.D2DDBVERSIONS
-D2DAccumulativeElements = db.D2DAccumulativeElements
-INITMODULES = db.INITMODULES
-INITSKIPS = db.INITSKIPS
-OFFICIALDBS=list(modelDict['Fcst']['Parms'])
-
-# Create Practice and test databases from Fcst
-DATABASES.append((Official, modelDict['Fcst']['Parms'])),
-DATABASES.append((Practice, modelDict['Fcst']['Parms'])),
-DATABASES.append((TestFcst, modelDict['Fcst']['Parms'])),
-DATABASES.append((Test, modelDict['Fcst']['Parms'])),
-
-for entry in AdditionalISCRouting:
-    (parmList, dbName, editAreaPrefix) = entry
-    parmList = list(parmList)
-    addedIscDbDefinition = (dbName, ) + ISC[1:]
-    addedIscParms = [(parmList, TC1)]
-    DATABASES.append((addedIscDbDefinition, addedIscParms))
-
-# Intersite coordination database parameter groupings, based on
-# OFFICIALDBS, but time constraint is always TC1
-for wes, tc in (OFFICIALDBS + localISCParms):
-    ISCPARMS.append((wes, TC1))
-
-# We also add in any extraISCparms as needed, but only for office
-# types other than our own.
-for wes, officeType in (EXTRA_ISC_PARMS + localISCExtraParms):
-    if myOfficeType == officeType:
-        continue
-    if type(officeType) != str:
-        raise TypeError, "Office type not a str: " + `officeType`
-    else:
-        if officeType not in VALID_OFFICE_TYPES:
-            raise ValueError, "Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]"
-    for we in wes:
-        wecopy = list(we)
-        wecopy[0] = wecopy[0] + officeType  #rename the weather element
-        wecopy = tuple(wecopy)
-        ISCPARMS.append(([wecopy], TC1))
-
-# Restore database parameter groupings (based on OFFICIALDBS, but TC1)
-RESTOREPARMS = []
-for wes, tc in modelDict['Fcst']['Parms']:
-    RESTOREPARMS.append((wes, TC1))
-
-# Now add the ISC and Restore databases to the DATABASES groupings
-DATABASES.append((Restore, RESTOREPARMS))
-DATABASES.append((ISC, ISCPARMS))
-
-
-#D logfp=open('/localapps/logs/serverConfig2.log','w')
-#D logfp.write('DATABASE names:\n')
-#D for m in sorted(DATABASES):
-#D     logfp.write('%s\n' % m[0][0])
-#D logfp.write('\n\nDATABASES\n')
-#D pprint.pprint(sorted(DATABASES),logfp,width=130)
-#D logfp.write('\n\nINITMODULES\n')
-#D pprint.pprint(INITMODULES,logfp,width=130)
-#D logfp.write('\n\nD2DMODELS\n')
-#D pprint.pprint(D2DMODELS,logfp,width=130)
-#D logfp.close()
-
-doIt()
-
-#D logfp=open('/localapps/logs/SC_MD2.py','w')
-#D modelDict=createModelDict(locals(),DATABASES,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements,
-#D                   INITMODULES,INITSKIPS,logfp)
-#D logfp.close()
-if localLogFile:
-    printServerConfig(sys.modules[__name__],vars(localConfig),localLogFile)
-#D scfp.close()
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+#

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+#

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+#

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+# serverConfig -- base GFE server configuration file

+#

+# NOTE: THIS FILE SHOULD NOT BE USER-MODIFIED.  INSTEAD REFER TO THE

+# LOCAL CONFIG DOCUMENTATION ON HOW TO OVERRIDE SETTINGS IN THIS FILE.

+#

+# Baseline GFE server configuration

+#

+# ----------------------------------------------------------------------------

+#

+#     SOFTWARE HISTORY

+#

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    08/09/2013          #1571     randerso       Changed projections to use the Java

+#                                                 ProjectionType enumeration

+#    10/03/2013          #2418     dgilling       Update for new pSurge 2.0 data.

+#    10/03/2013          #2424     randerso       Change localTC to use dateutil instead of pytz

+#                                                 to get correct offsets for Alaska

+#    01/17/2014          #2719     randerso       Added NHA domain

+#    02/20/2014          #2824     randerso       Added log message when local override files are not found

+#    03/11/2014          #2897     dgilling       Add new MHWM databases to default configuration.

+#    03/20/2014          #2418     dgilling       Remove unneeded D2D source PHISH.

+#    04/17/2014          #2934     dgilling       Remove alias for TPCSurgeProb D2D database.

+#    05/09/2014          #3148     randerso       Add tpHPCndfd to D2DAccumulativeElements for HPCERP

+#    06/20/2014          #3230     rferrel        Added URMA25.

+#    05/29/2014          #3224     randerso       Added "SPC":8 to D2DDBVERSIONS

+#    07/09/2014          #3146     randerso       Removed unused import

+#    07/10/2014                    swhite         Add surge and tropical threat WEs and their dependencies

+#    01/08/2015          #15035    lshi           add site WNJ

+#    12/03/2014          #3866     rferrel        Added GFS20

+#    01/13/2015          #3955     randerso       Added definitions for NewTerrain database

+#                                                 Added Topo to ISCPARMS

+#    01/19/2015          #4014     dgilling       Added ETSS.

+#    02/11/2015          #4053     rferrel        Added GLWN and moved GLERL to display only for Great Lakes sites..

+#    01/19/2015          #4014     dgilling       Added ETSS.

+#    02/24/2015          #16692    byin           Added RTMA. Removed gfsLR and WaveWatch

+#    03/19/2015          #4300     randerso       Remove GUMa as it is obsolete (per Shannon White)

+#    03/30/2015          #17288    bhunder        Added Guam-RTMA to D2D models

+#    03/30/2015          #17206    yteng          Changed some parameters that are not rate parameters

+#    03/31/2015          #17288    bhunder        Added Weather Params for RTMA

+#    04/03/2015          #4367     dgilling       Change WindGust's time constraints back to TC1

+#                                                 for Fcst/Official.

+#    04/08/2015          #4383     dgilling       Define FireWX ISC configuration parameters.

+#    04/15/2015          #17383    yteng          Change localTC to fix error that time constraints

+#                                                 being off

+#    Apr 25, 2015         4952     njensen        Updated for new JEP API

+#    04/20/2015          #4414     dgilling       Add missing NWPSTrkngCG0 weather elements.

+#    05/12/2015          #17144    bhunder        Added RTMA model

+#    05/29/2015          17496     ryu            Changed parm definitions for Wave1-10 and Period1-10.

+#

+#    05/29/2015          #17144    bhunder        Added weather Params for URMA25 and OCONUS RTMA

+#    09/02/2015          #4819     rferrel        Added HWRF.

+#    09/09/2015          16287     amoore         Additional validation of user input

+#    10/07/2015          #4958     dgilling       Added support for NationalBlend D2D data.

+#    10/13/2015          #4961     randerso       Updated NewTerrain/BaseTerrain database definitions

+#    10/30/2015          #17940    jendrowski     Responded to Code Review.  Mostly syntactical changes.

+#    11/05/2015          #18182    ryu            Change D2DDBVERSIONS value for HPCERP to 24

+#    12/22/2015          #14152    jwatson        Added Sky, Wind to GFSLAMPGrid parms

+#    1/28/2016           #13910    amoore         Wave model data should be available in 3-hrly timesteps

+#    02/09/2016          #5283     nabowle        Remove NGM support.

+#    02/22/2016          #18161    wkwock         Add NationalBlend model for AK, PR, HW

+#    02/23/2016          #14845    jwatson        Changed NamDNG5 to NamDNG for all sources and params.

+#                                                 Changed D2DModels for CONUS and Alaska to

+#                                                 namdng25 and AK-NamDNG3

+#    04/01/2016          18777     ryu            Replace NCF ip addresses.

+#    04/22/2016          #18896    wkwock         Add more nationalBlend Model

+#    06/01/2016                    JCM            removed tc3ng from officialdbs for wave/period elements;

+#                                                 removed Wave_XX and Period_XX; removed Wave10, Period10;

+#                                                 added databases for all sites to baseline

+#    08/08/2016          #5747     randerso       Support removal of wrapper.py

+#    10/05/2016          19293     randerso       Fixed units on Tropical and a few other weather elements

+#    12/12/2016          #19596    bhunder        Added "tp" to NationalBlend model D2DAccumulativeElements

+#    02/20/2017        DCS18966    mdavis/pjendr. NIC adjustment: name changes and removal of obsolete 

+#                                                 smart inits(DCS 19490). Fixed addOptionalParms.

+#    03/17/2017          19673     jmaloney       Added Rip Current Probabilities (RipProb).

+#    06/29/2017          6323      randerso       Added P-ETSS model

+#    07/19/2017        DCS19490    gpetrescu      Removed AKwave10, Wave10 and Period10.

+#    07/12/2017          6324      randerso       Added TPCWindProb_Prelim model

+#    07/12/2017          6253      randerso       Updated for Standard Terrain

+#    08/03/2017          #20054    bhunder        Added changes for ETSS model and for ETSS-HiRes model.

+#    10/03/2017        DR20432     arivera        Replace GFS40 with GFS in SnowRatioGFS and remove

+#                                                 GLOBHwave from SJU model databases.

+#    11/28/2017          6539      randerso       Made P-ETSS and TPCSurgeProb elements D2DAccumulativeElements

+#    12/06/2017        DCS20267    psantos        Add NWPS Rip Current Guidance

+#    12/20/2017          20510     ryu            changes to StormTotalSnow parameter

+#    02/23/2018          #20395    wkwock         Added NBM3.1 elements.

+#    04/03/2018        DR20656     arivera        Missing comma: "Dune Erosion Probability" in optionalParmsDict['marine']

+#    05/09/2018        DR20715     arivera        Missing comma: groups['marineSites'] after 'AVAK'

+#    06/18/2018          16729     ryu            Remove tpHPC element from RFCQPF model and the smart init for the model.

+#

+####################################################################################################

+

+##

+# This is an incremental override file, indicating that the files at different

+# localization levels will be combined. Incremental overrides are achieved by

+# creating a localConfig file at a higher priority localization level that

+# imports this base file.

+#

+# See the Configuration Guides->Server Configuration->Syntax for localConfig.py

+# section of the GFE Online Help for more information.

+##

+

+

+

+#----------------------------------------------------------------------------

+# USEFUL DEFINES

+#----------------------------------------------------------------------------

+

+import siteConfig,imp

+import pprint

+import re

+import sys

+import LogStream

+from collections import defaultdict

+BASELINE = getattr(siteConfig, 'BASELINE', 0)

+

+#D scfp=open('/localapps/logs/scdebug.log','w')

+class dbConfig(object):

+    """Class to create GFE databases from modelDict"""

+    def __init__(self,modelDict):

+        self.modelDict=modelDict

+        self.dbs=[]

+        self.D2DMODELS=[]

+        self.D2DDBVERSIONS={}

+        self.D2DAccumulativeElements={}

+        self.INITMODULES={}

+        self.INITSKIPS={}

+

+    def addConfiguredModels(self,ignoreList=[]):

+        """Setup model databases defined in dbConfigDict.

+        ignoreList can be used to filter out specific models

+        """

+        for m in self.modelDict:

+            if m in ignoreList:

+                continue

+            # Don't allow BC model if regular is in ignore list

+            if m[-2:] == 'BC' and m[:-2] in ignoreList:

+                continue

+            self.addGfeDB(m,self.modelDict[m])

+        return

+    def addGfeDB(self,modelname,dbConfigDict):

+        """Does all the work needed for adding a model to GFE from entries

+        in dbConfigDict. This populates dbs and sets various self

+        variables.

+        """

+        if "DB" in dbConfigDict and "Parms" in dbConfigDict:

+            self.dbs.append((dbConfigDict["DB"],dbConfigDict["Parms"]))

+        if "D2DAccumulativeElements" in dbConfigDict:

+            self.D2DAccumulativeElements[modelname]=dbConfigDict["D2DAccumulativeElements"]

+        if "D2DDBVERSIONS" in dbConfigDict:

+            self.D2DDBVERSIONS[modelname]=dbConfigDict["D2DDBVERSIONS"]

+        if "D2DMODELS" in dbConfigDict:

+            self.D2DMODELS.append((dbConfigDict["D2DMODELS"],modelname))

+        if "INITMODULES" in dbConfigDict:

+            if type(dbConfigDict["INITMODULES"]) is tuple:

+                self.INITMODULES[dbConfigDict["INITMODULES"][0]] = dbConfigDict["INITMODULES"][1]

+            else:

+                self.INITMODULES[dbConfigDict["INITMODULES"]]=[modelname]

+        if "INITSKIPS" in dbConfigDict:

+            self.INITSKIPS[modelname]=dbConfigDict["INITSKIPS"]

+

+#===============================================================================

+#          Utility methods to manage GFE configuration

+#===============================================================================

+def mergeModelDicts(baseDict,addDict):

+    """Combine serverConfig model dict and regional modelDict into one modelDict.

+    Settings in baseDict are maintained unless overridden in addDict. The merging

+    is done on a key by key basis of a specific model's dictionary (baseDict and

+    addDict are dictionaries of dictionaries)

+    This changes baseDict in place so the object passed in as baseDict is modified

+    in the caller's scope.

+    """

+    for m,v in addDict.items():

+        if m not in baseDict:

+            baseDict[m]=v

+        else:

+            for key,val in v.items():

+                baseDict[m][key]=val

+

+def updateModelDict(modelDict,model,key,value):

+    """Udates a specific entry for a model in modelDict.  model and key are dictionary

+    keys into modelDict and modelDict[model] respectively. If model is not defined

+    in modelDict, then a new entry is created. Otherwise, value replaces any existing

+    value in modelDict[model][key].

+    This changes modelDict in place so the object passed in as modelDict is modified

+    in the caller's scope.

+    """

+    if model in modelDict:

+        modelDict[model][key]=value

+    else:

+        modelDict[model]= {key : value}

+

+def alterModelDef(dbTuple, name=None, format=None, dbType=None, single=None,

+                  official=None, numver=None, purgeAge=None):

+    """Alter GFE database definition. The definition is used in the dbs setting

+    and has form:

+                     (name,  format,  type, single, official, numVer, purgeAge)

+    i.e., Practice = ("Fcst",  GRID, "Prac",   YES,       NO,      1,   24)

+

+    Won't use these exact names since some might conflict with builtins

+    Only supply what you want to change. To clone a model definition, just

+    supply name='newname'

+    """

+    n,f,t,s,o,v,p=dbTuple

+    l=[]

+    for old,new in [(n,name),(f,format),(t,dbType),(s,single),(o,official),

+                    (v,numver),(p,purgeAge)]:

+        if new is None:

+            l.append(old)

+        else:

+            l.append(new)

+    return tuple(l)

+

+def createModelDict(localsDict,dbs,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements,

+                  INITMODULES,INITSKIPS):

+    """Convert serverConfig model configuration to a dictionary. This allows

+    legacy serverConfig settings in dbs,D2DMODELS,INITMODULES, etc. to be

+    maintained and then converted into a single dictionary where all settings

+    for a model are together.

+

+    WARNING: There can only be one version of a model in the dbs list. Fcst

+    practice and test databases have to be handled separately.  This is ok

+    because these databases are defined after any localConfig customizations

+    of the normal Fcst database.

+

+    modelDict contains the following keys. Only define what is needed, i.e.,

+    it is not required to have every key defined

+    "DB": Definition of the database, i.e., the first value in a dbs entry:

+          ("wrfems", GRID, "", NO,  NO,  3, 0). This must be a tuple. The name

+          in the DB entry must be the same as the model name used as the key

+          into the modelDict variable.

+

+    "Parms" : Definition of the weather element parameters in the database,

+          i.e., the second part of the dbs entry. This is a list of tuples.

+

+    "D2DMODELS" : D2D metadata database name for the source model.

+

+    "INITMODULES': Name of the SmartInit module. This should be just the module

+          name as a string, not a list.

+

+    "D2DAccumulativeElements" : List of parms that are accumulative

+

+    "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather

+          Element Browser. Defaults to 2 if not supplied.

+

+    "INITSKIPS" : Used to skip model cycles.

+

+    Example for a model:

+

+    modelDict["CMCreg"]={

+         "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0),

+         "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF,

+                     PoP, SnowAmt, SnowRatio], TC3),

+                   ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG),

+                   ([QPF12, PoP12],TC12NG),

+                   ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+                   ([MaxT], MaxTTC), ([MinT], MinTTC),

+                  ],

+         "D2DMODELS": "Canadian-Reg",

+         "INITMODULES": "Local_CMCreg",

+         "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"],

+         "D2DDBVERSIONS": 3,

+    }

+    """

+    # Create self initializing dictionary via collections.defaultdict

+    modelDict=defaultdict(dict)

+    parmsDict={}

+    tcDict={}

+

+    for n,v in sorted(localsDict.items()):

+        if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]:

+            parmsDict[n]=v

+        elif type(v) is tuple and len(v)==3 and type(v[0]) is int:

+            tcDict[n]=v

+

+    # Process dbs entries, i.e., model database definition

+    tcDict={}

+    for item in sorted(dbs):

+        plist=[]

+        parmTmpDict={}

+        pDict={}

+        for pt in item[1]:

+            parmsList=[]

+            # Try to find named parm setting

+            for p in pt[0]:

+                pname=p[0]

+                pDict[pname]=p

+                parmsList.append(pname)

+

+            # Try to get a named time constraint

+            name=next((name for name,v in tcDict.items() if v == pt[1]), None)

+            if name is None:

+                name = repr(pt[1])

+            tcDict[name]=pt[1]

+            if name in parmTmpDict:

+                parmTmpDict[name]+=parmsList

+            else:

+                parmTmpDict[name]=parmsList

+

+        # This consolidates parms by time constraint and sorts parm names.

+        for tc in sorted(parmTmpDict.keys()):

+            theParms=[]

+            for p in sorted(parmTmpDict[tc]):

+               theParms.append(pDict[p])

+            plist.append((theParms, tcDict[tc]))

+

+        modelDict[item[0][0]]={'DB':item[0],'Parms':plist}

+

+    for si,ml in list(INITMODULES.items()):

+        m=ml[0]

+        modelDict[m]['INITMODULES']=si

+    for m,v in list(D2DDBVERSIONS.items()):

+        modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m]

+

+    for m,v in list(D2DAccumulativeElements.items()):

+        modelDict[m]['D2DAccumulativeElements']=v

+    for m,v in list(INITSKIPS.items()):

+        modelDict[m]['INITSKIPS']=v

+    for item in D2DMODELS:

+        if type(item) is str:

+           m=item

+           v=item

+        else:

+           v,m=item

+        if m in modelDict:

+            modelDict[m]['D2DMODELS']=v

+        else:

+            modelDict[m]={'D2DMODELS':v}

+    return modelDict

+

+def changeParm(modelDict,pname,value,modelList=['Fcst']):

+    """Alter a parm that is defined in modelDict Parm setting.

+

+    pname: name of parm. This is a string not the parm definition

+    value: the parm definition tuple. If the None object, then the parm

+        will be deleted.

+    modelList: List of model names to check. An empty list will check all

+        models in modelDict.

+    Return: Nothing. modelDict is altered in place.

+    """

+    if not modelList:

+        modelList=list(modelDict.keys())

+    for m in modelList:

+        if m not in modelDict or 'Parms' not in modelDict[m] or \

+                 not checkForParm(modelDict[m]['Parms'],pname):

+            continue

+

+        newpt=[]

+        # parms is tuple (parmList,TC)

+        for pList,tc in modelDict[m]['Parms']:

+            # This makes a copy of the list of parms, not a reference

+            # this is needed because we are changing the list in place.

+            theParms= list(pList)

+            match=False

+            for matchParm in (p for p in theParms if p[0] == pname):

+                match=True

+                theParms.remove(matchParm)

+            if match and value is not None:

+                theParms.append(value)

+            if theParms:

+                newpt.append((theParms,tc))

+        if newpt != modelDict[m]['Parms']:

+            modelDict[m]['Parms'] = newpt

+

+def changeParmTC(modelDict,pname,newTC,modelList=['Fcst']):

+    """Alter a parm in that is defined in modelDict Parm setting.

+

+    pname: name of parm. This is a string not the parm definition

+    newTC: the new Time Contraint (tuple)

+    modelList: List of model names to check. An empty list will check all

+        models in modelDict.

+    Return: Nothing. modelDict is altered in place.

+    """

+    if not modelList:

+        modelList=list(modelDict.keys())

+    for m in sorted(modelList):

+        if m not in modelDict or 'Parms' not in modelDict[m]:

+            continue

+#d        print m,"checkForParm=",checkForParm(modelDict[m]['Parms'],pname)

+        if not checkForParm(modelDict[m]['Parms'],pname):

+            continue

+

+        newpt=[]

+        # Parms is tuple (parmList,TC)

+        for pList,tc in modelDict[m]['Parms']:

+            # This makes a copy of the list of parms, not a reference

+            # this is needed because we are changing the list in place.

+            theParms= list(pList)

+            matchParm=next((p for p in theParms if p[0] == pname),None)

+#d            print m,matchParm,tc,newTC,len(theParms)

+            if matchParm:

+                theParms.remove(matchParm)

+                newpt.append(([matchParm],newTC))

+#d                print "Added",matchParm,newTC

+            if theParms:

+#d                print "restored",theParms," to",tc

+                newpt.append((theParms,tc))

+        if newpt != modelDict[m]['Parms']:

+#d            print 'Updated model',m

+            modelDict[m]['Parms'] = newpt

+#d            print modelDict[m]['Parms'],'\n'

+

+def checkForParm(parmDef,pname):

+    """Check a model parm definition if a parm named pname is in it.

+

+    parmDef: list of tuples, each tuple is a list of parms and a time

+        contraint. Call with modelDict[modelname]['Parms].

+    pname: Name of parm (string).

+    Returns: Boolean True if found, or False

+    """

+    for item in parmDef:

+        t=next((pt for pt in item[0] if pt[0] == pname),None)

+        if t is not None:

+            return True

+    return False

+

+def getParmNames(parmsDef):

+    """Return a list of parm names in a model parm definition

+

+    parmsDef: list of tuples, each tuple is a list of parms and a time

+        constraint. Call with modelDict[modelname]['Parms].

+    Returns: List of string parameter names

+

+    Here's an example of how to remove unused parms from Fcst, this can

+    run in localConfig:

+

+    parmsToRemove=[]

+    for p in getParmNames(modelDict['Fcst']):

+        pl=p.lower()

+        for t in ['period','swell','wave','surf', 'surge']:

+            if t in pl:

+                parmsToRemove.append(p)

+                break

+    removeParms(modelDict,'Fcst',parmsToRemove)

+    """

+    result=[]

+    for pList,tc in parmsDef:

+        # p is the parmDef tuple where first item is the parm name

+        newParms=[p[0] for p in pList]

+        result+=newParms

+    return sorted(result)

+

+def printServerConfig(moduleObj,localsDict, logFile="/awips2/edex/logs/localConfig.log"):

+    """Dump out ServerConfig final settings. localsDict is a dictionary of

+    local variables in localConfig, normally locals().

+    """

+    # serverConfig log text

+    scText=""

+    try:

+        with open(logFile,"w") as fp:

+            # Print out dbs entries, i.e., model database definition

+            fp.write("Configuration for %s\n" % localsDict['SID'])

+            dbs=DATABASES

+            for item in sorted(dbs):

+                scText += "\ndbs[%s]: %s\n" % (item[0][0], str(item[0]))

+                scText += _dumpParms(item[1])

+

+            # Dump out serverConfig settings likely to be modified by localConfig

+            scvars=["D2DMODELS", "INITMODULES",

+                    "D2DDBVERSIONS", "D2DAccumulativeElements",

+                    "REQUEST_ISC", "SEND_ISC_ON_SAVE",

+                    "SEND_ISC_ON_PUBLISH", "REQUESTED_ISC_PARMS",

+                    "ExtraWEPrecision", "INITSKIPS",

+                    "HazardKeys",

+                    "MAX_USER_BACKGROUND_PROCESSES",

+                    "AdditionalISCRouting",

+                    "ignoreDatabases",

+                   ]

+

+            for item in scvars:

+                scText += "\n%s:\n" % item

+                obj=getattr(moduleObj,item,None)

+                if type(obj) is list:

+                    obj.sort()

+                scText += pprint.pformat(obj) +'\n'

+

+            # This prints out all variables named parms*, i.e., parmsNAM12

+            for k in sorted(localsDict.keys()):

+                if k == "OFFICIALDBS" or re.match("parms[A-Z]+",k) is not None or \

+                         k == "extraISCparms":

+                    scText += "\n%s:\n" % k

+                    scText += _dumpParms(localsDict[k])

+            scText += printModelDict(localsDict)

+            fp.write(scText)

+    except IOError as e:

+        LogStream.logProblem("printServerConfig open file problem "+logFile+" - log not created\n" +LogStream.exc(e))

+

+def printModelDict(localsDict):

+    """Convert serverConfig model configuration to a dictionary. This writes

+    the dictionary as text. This does not create a usable modelDict, just one to

+    use to print out the dictionary as python code."""

+

+    modelDict={}

+    parmsDict={}

+    tcDict={}

+    dbs=DATABASES

+    scText=""

+    for n,v in list(localsDict.items()):

+        if type(v) is tuple and type(v[0]) is str and v[1] in [DISCRETE,SCALAR,VECTOR,WEATHER]:

+            parmsDict[n]=v

+        elif type(v) is tuple and len(v)==3 and type(v[0]) is int:

+            tcDict[n]=v

+

+    scText += '\n'

+    for n in sorted(parmsDict):

+        scText += 'parmVar: %s = %s\n' % (n,repr(parmsDict[n]))

+    scText += '\n'

+    for n in sorted(tcDict):

+        scText += 'TC: %s = %s\n' % (n,repr(tcDict[n]))

+    scText += '\n'

+

+    # Print out dbs entries, i.e., model database definition

+    for item in sorted(dbs):

+        plist=[]

+        parmTmpDict={}

+        for pt in item[1]:

+            parmsList=[]

+            # Try to find named parm setting

+            for p in pt[0]:

+                name=next((name for name,v in parmsDict.items() if v == p), None)

+                if name is not None:

+                    parmsList.append(name)

+                else:

+                    parmsList.append(p[0])

+            theParms='&nlq(['+', '.join(parmsList)+'], '

+            # Try to get a named time constraint

+            name=next((name for name,v in tcDict.items() if v == pt[1]), None)

+            if name is None:

+                name = repr(pt[1])

+            if name in parmTmpDict:

+                parmTmpDict[name]+=parmsList

+            else:

+                parmTmpDict[name]=parmsList

+        # This consolidates parms by time constraint and sorts parm names.

+        for tc in sorted(parmTmpDict.keys()):

+            parmTmpDict[tc]=sorted(parmTmpDict[tc])

+            theParms='&nlq(['+', '.join(parmTmpDict[tc])+'], '

+            plist.append(theParms + tc +')&nrq')

+

+        modelDict[item[0][0]]={'DB':item[0],'Parms':plist}

+    for si,ml in list(INITMODULES.items()):

+        m=ml[0]

+        entry=si

+        if len(ml) > 1:

+            # Multiple d2d models for smartinit

+            # Try to get model from si name

+            if si.find('Local_') == 0:

+                m=si[6:]

+            entry=(si,ml)

+        if m in modelDict:

+            # If a model has multiple SmartInit modules, try to best match which

+            # Smartinit module to assign to the model.

+            if 'INITMODULES' not in modelDict[m] or m in si:

+                modelDict[m]['INITMODULES']=entry

+        else:

+            modelDict[m]={'INITMODULES':entry}

+

+    for m,v in list(D2DDBVERSIONS.items()):

+        if m in modelDict:

+            modelDict[m]['D2DDBVERSIONS']=D2DDBVERSIONS[m]

+        else:

+            modelDict[m]={'D2DDBVERSIONS':D2DDBVERSIONS[m]}

+

+    for m,v in list(D2DAccumulativeElements.items()):

+        if m in modelDict:

+            modelDict[m]['D2DAccumulativeElements']=v

+        else:

+            modelDict[m]={'D2DAccumulativeElements':v}

+    for m,v in list(INITSKIPS.items()):

+        if m in modelDict:

+            modelDict[m]['INITSKIPS']=v

+        else:

+            modelDict[m]={'INITSKIPS':v}

+    for item in D2DMODELS:

+        if type(item) is str:

+           m=item

+           v=item

+        else:

+           v,m=item

+        if m in modelDict:

+            modelDict[m]['D2DMODELS']=v

+        else:

+            modelDict[m]={'D2DMODELS':v}

+

+    for m in sorted(modelDict):

+        text=pprint.pformat(modelDict[m],width=80,indent=0)

+        text=text.replace("'&nlq",'')

+        text=text.replace("&nrq'",'')

+        text=text.replace('"&nlq','')

+        text=text.replace('&nrq"','')

+        text=text.replace(", 'INITMODULES':",",\n'INITMODULES':")

+        text=text.replace(')]}','),\n         ]\n}')

+        text=text.replace('\n','\n            ')

+        scText += "modelDict['%s'] = {\n            %s\n\n" % (m,text[1:])

+    return scText

+

+def _dumpParms(parms):

+    """Pretty prints parms."""

+    pDict={}

+    result=""

+    for item in parms:

+        if type(item) is not tuple:

+            # Not a parm definition!

+            return

+        pList,tc = item

+        for p in pList:

+            pDict[p[0]]=(p,tc)

+    for k in sorted(pDict.keys()):

+        result += "    %s\n" % repr(pDict[k])

+    return result

+

+def addOptionalParms(defaultTC,tcParmDict,parmDict,modelDict):

+    """Adds parms from optionalParmsDict to the Fcst database.

+    This is a convience function if most parms use the default time constraint.

+    Otherwise, its just as easy to hard code what needs to be added for a

+    optionalParmsDict entry.

+

+    defaultTC: Default time constraint to if a parameter specific TC is not

+               defined in tcParmDict.

+    tcParmDict: Dictionary with keys of time constraints. Value is a list of

+                parameter names to be added with that time constraint. Empty

+                dictionary ok if everything should use the default. Example:

+                tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]}

+    parmDict: Parameter dictionary with keys of parameter name and value is

+              the parameter definition tuple. Keys must match keys in tcParmDict.

+    modelDict: The serverConfig modelDict dictionary. Must already have Fcst

+               defined. Changed in place.

+    Returns: The parameter definition added to Fcst

+    """

+

+    tcParms={defaultTC:[]}

+    for tc in tcParmDict:

+        tcParms[tc]=[]

+    if len(tcParmDict) == 0:

+        tcParmDict['dummyTC']=['dummyParm']

+    for pname,value in parmDict.items():

+        # Find the time constrait to use for this parm

+        theTC=defaultTC

+        for tc in tcParmDict:

+            if pname in tcParmDict[tc]:

+                theTC=tc

+                break

+        tcParms[theTC].append(value)

+

+    theParms=[]

+    for tc in tcParms:

+        theParms.append((tcParms[tc],tc))

+    modelDict['Fcst']['Parms'] += theParms

+    return theParms

+

+def addPowt(modelDict):

+    """This sets up PoWT parameters for in Fcst database.

+    """

+    defaultTC=TC1

+    # Use value of time constraint and string name of parm in tcParmDict

+    tcParmDict={TC6NG:['IceLineAcc','IceFlatAcc',]

+               }

+    return addOptionalParms(defaultTC,tcParmDict,

+                            optionalParmsDict['powt'],modelDict)

+

+def addWinterWeatherProbs(modelDict):

+    """This sets up ER Winter Weather Probability parameters in the Fcst database.

+    """

+    defaultTC=TC1

+    # Use value of time constraint and string name of parm in tcParmDict

+    tcParmDict={}

+    return addOptionalParms(defaultTC,tcParmDict,

+                            optionalParmsDict['winterProbs'],modelDict)

+

+def addRainfallProbs(modelDict):

+    """This sets up WPC rainfall probability parameters in the Fcst database.

+    """

+    defaultTC=TC1

+    # Use value of time constraint and string name of parm in tcParmDict

+    tcParmDict={}

+    return addOptionalParms(defaultTC,tcParmDict,

+                            optionalParmsDict['rainfallProb'],modelDict)

+

+# Local-time based time constraints.  Does not automatically account for

+# daylight savings time.  The dst flag is 0 for standard time and manually

+# set to 1 for daylight time (if desired).  The start is specified in

+# seconds local time, e.g., 6*HOUR would indicate 6am.

+def localTC(start,repeat,duration,dst):

+    timezone = SITES[GFESUITE_SITEID][3]

+    import dateutil.tz, datetime

+    tz = dateutil.tz.gettz(timezone)

+    local = datetime.datetime.now(tz)

+    delta = tz.utcoffset(local) - tz.dst(local)

+    offset = delta.days*86400 + delta.seconds

+    start = start - offset

+    if dst == 1:

+        start = start - 3600     #daylight savings flag

+    if start >= 3600 * 24:

+        start = start - 3600 * 24

+    elif start < 0:

+        start = start + 3600 * 24

+    return (start, repeat, duration)

+

+# imports the named module.  If the module

+# does not exist, it is just ignored.  But

+# if it exists and has an error, the exception

+# is thrown.  If the module was imported returns

+# true.

+def siteImport(modName):

+    try:

+        fp, path, des = imp.find_module(modName)

+        if fp:

+            fp.close()

+    except ImportError:

+        LogStream.logEvent("No " + modName + " file found, using baseline settings.");

+        return 0

+    globals()[modName] = __import__(modName)

+    return 1

+

+def doIt():

+    # Import the local site configuration file (if it exists)

+    import doConfig

+    import VTECPartners

+    (models, projections, vis, wx, desDef, allSites, domain, siteId, timeZone,officeTypes) = \

+      doConfig.parse(GFESUITE_SITEID, DATABASES, types, visibilities, SITES,

+      allProjections)

+    IFPConfigServer.models                  = models

+    IFPConfigServer.projectionData          = projections

+    IFPConfigServer.weatherVisibilities     = vis

+    IFPConfigServer.weatherTypes            = wx

+    IFPConfigServer.discreteDefinitions     = desDef

+    IFPConfigServer.allSites                = allSites

+    IFPConfigServer.officeTypes             = officeTypes

+    IFPConfigServer.siteID                  = siteId

+    IFPConfigServer.timeZone                = timeZone

+    IFPConfigServer.d2dModels               = doConfig.d2dParse(D2DMODELS)

+    IFPConfigServer.netCDFDirs              = doConfig.netcdfParse(NETCDFDIRS)

+    IFPConfigServer.satData                 = doConfig.parseSat(SATDATA)

+    IFPConfigServer.domain                  = domain

+

+    (serverHost, mhsid, \

+    rpcPort, \

+    initMethods, accumulativeD2DElements, \

+    initSkips, d2dVersions, \

+    logFilePurgeAfter, \

+    prdDir, baseDir, \

+    extraWEPrecision, \

+    tableFetchTime, \

+    autoConfigureNotifyTextProd, \

+    iscRoutingTableAddress, \

+    requestedISCsites, requestISC, \

+    sendiscOnSave, sendiscOnPublish, \

+    requestedISCparms, \

+    transmitScript) \

+       = doConfig.otherParse(list(SITES.keys()), \

+      GFESUITE_SERVER, GFESUITE_MHSID, \

+      GFESUITE_PORT, INITMODULES,

+      D2DAccumulativeElements,

+      INITSKIPS, D2DDBVERSIONS, LOG_FILE_PURGE_AFTER,

+      GFESUITE_PRDDIR, GFESUITE_HOME,

+      ExtraWEPrecision, VTECPartners.VTEC_REMOTE_TABLE_FETCH_TIME,

+      AUTO_CONFIGURE_NOTIFYTEXTPROD, ISC_ROUTING_TABLE_ADDRESS,

+      REQUESTED_ISC_SITES, REQUEST_ISC, SEND_ISC_ON_SAVE, SEND_ISC_ON_PUBLISH,

+      REQUESTED_ISC_PARMS, TRANSMIT_SCRIPT)

+    IFPConfigServer.serverHost = serverHost

+    IFPConfigServer.mhsid = mhsid

+    IFPConfigServer.rpcPort = rpcPort

+    IFPConfigServer.initMethods = initMethods

+    IFPConfigServer.accumulativeD2DElements = accumulativeD2DElements

+    IFPConfigServer.initSkips = initSkips

+    IFPConfigServer.d2dVersions =  d2dVersions

+    IFPConfigServer.logFilePurgeAfter = logFilePurgeAfter

+    IFPConfigServer.prdDir = prdDir

+    IFPConfigServer.baseDir = baseDir

+    IFPConfigServer.extraWEPrecision = extraWEPrecision

+    IFPConfigServer.tableFetchTime = tableFetchTime

+    IFPConfigServer.autoConfigureNotifyTextProd =  autoConfigureNotifyTextProd

+    IFPConfigServer.iscRoutingTableAddress = iscRoutingTableAddress

+    IFPConfigServer.requestedISCsites = requestedISCsites

+    IFPConfigServer.requestISC = requestISC

+    IFPConfigServer.sendiscOnSave = sendiscOnSave

+    IFPConfigServer.sendiscOnPublish = sendiscOnPublish

+    IFPConfigServer.requestedISCparms = requestedISCparms

+    IFPConfigServer.transmitScript = transmitScript

+    IFPConfigServer.iscRoutingConfig = doConfig.parseAdditionalISCRouting(AdditionalISCRouting)

+

+def getSimpleConfig():

+    return IFPConfigServer

+

+GFESUITE_SITEID = siteConfig.GFESUITE_SITEID

+GFESUITE_MHSID = siteConfig.GFESUITE_MHSID

+GFESUITE_SERVER =  siteConfig.GFESUITE_SERVER

+GFESUITE_HOME   = siteConfig.GFESUITE_HOME

+GFESUITE_PORT   = int(siteConfig.GFESUITE_PORT)

+#GFESUITE_DATDIR = siteConfig.GFESUITE_DATDIR

+GFESUITE_LOGDIR = siteConfig.GFESUITE_LOGDIR

+GFESUITE_PRDDIR = siteConfig.GFESUITE_PRDDIR

+#GFESUITE_SHPDIR = siteConfig.GFESUITE_SHPDIR

+#GFESUITE_TOPODIR = siteConfig.GFESUITE_TOPODIR

+#GFESUITE_VTECDIR = siteConfig.GFESUITE_VTECDIR

+

+SID = GFESUITE_SITEID

+

+# modelDict is a master configuration dictionary for all GFE databases

+# Create self initializing dictionary via collections.defaultdict

+modelDict=defaultdict(dict)

+

+# ignoreDatabases is used when executing the final configuration to ignore

+# certain models. The new paradigm with modelDict is to have one master

+# modelDict and ignore datasets for specific regions or groups. Sites can

+# add to or remove from ignoreDatabases in their localConfig.

+ignoreDatabases=[]

+

+# Groups are a way of setting up groups of parms for special or optionally used

+# methodology. For example, the Probability of Weather Type methodology.

+groups={}

+groups['ALASKA_SITES'] = ['AFG', 'AJK', 'ALU', 'AER', 'ACR', 'AFC', 'VRH', 'AAWU', 'AVAK']

+groups['GreatLake_SITES'] = ['LOT', 'MKX', 'GRB', 'DLH', 'MQT', 'APX', 'GRR', 'DTX',

+                             'IWX', 'CLE', 'BUF', 'PBZ', 'ILN', 'IND', 'ILX', 'MPX', 'FGF']

+groups['CONUS_EAST_SITES'] = ['ALY', 'AKQ', 'APX', 'BGM', 'BMX', 'BOX', 'BTV', 'BUF',

+                              'CAE', 'CAR', 'CHS', 'CLE', 'CTP', 'DTX', 'FFC', 'GRR',

+                              'GSP', 'GYX', 'ILM', 'ILN', 'IND', 'JAN', 'JAX', 'JKL',

+                              'LCH', 'LMK', 'LWX', 'MEG', 'MFL', 'MHX', 'MLB', 'MOB',

+                              'MQT', 'MRX', 'OKX', 'PAH', 'PBZ', 'PHI', 'RAH', 'RLX',

+                              'RNK', 'TAE', 'TBW', 'ALR', 'RHA', 'TAR', 'TIR']

+groups['RFC_SITES'] = ["ACR", "ALR", "FWR", "KRF", "MSR", "ORN", "PTR",

+                       "RHA", "RSA", "STR", "TAR", "TIR", "TUA"]

+

+siteRegion={}

+# need to account for RFCs?

+siteRegion['AR'] = groups['ALASKA_SITES']

+siteRegion['CR'] = ['ABR','APX','ARX','BIS','BOU','CYS','DDC','DLH','DMX','DTX',

+                    'DVN','EAX','FGF','FSD','GID','GJT','GLD','GRB','GRR','ICT',

+                    'ILX','IND','IWX','JKL','LBF','LMK','LOT','LSX','MKX','MPX',

+                    'MQT','OAX','PAH','PUB','RIW','SGF','TOP','UNR']

+siteRegion['ER'] = ['AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE',

+                    'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI',

+                    'RAH','RLX','RNK']

+siteRegion['PR'] = ['GUM','HFO','PBP','PPG']

+siteRegion['SR'] = ['ABQ','AMA','BMX','BRO','CRP','EPZ','EWX','FFC','FWD','HGX',

+                    'HUN','JAN','JAX','KEY','LCH','LIX','LUB','LZK','MAF','MEG',

+                    'MFL','MLB','MOB','MRX','OHX','OUN','SHV','SJT','SJU','TAE',

+                    'TBW','TSA']

+siteRegion['WR'] = ['BOI','BYZ','EKA','FGZ','GGW','HNX','LKN','LOX','MFR','MSO',

+                    'MTR','OTX','PDT','PIH','PQR','PSR','REV','SEW','SGX','SLC',

+                    'STO','TFX','TWC','VEF']

+

+groups['OCONUS_SITES'] = groups['ALASKA_SITES'] + siteRegion['PR'] + ['SJU']

+

+myRegion='ALL'

+for r in siteRegion:

+    if SID in siteRegion[r]:

+        myRegion=r

+        break

+

+groups['powt']=list(groups['OCONUS_SITES']+ siteRegion['CR'] + siteRegion['ER'] + siteRegion['SR'] + siteRegion['WR'])

+groups['marineSites']=[

+                       # CONUS WFOs

+                       "CAR","GYX","BOX","OKX","PHI","LWX","AKQ","MHX","ILM","CHS",

+                       "BRO","CRP","HGX","LCH","LIX","MOB","TAE","TBW","KEY","MFL",

+                       "MLB","JAX","SJU",

+                       "SEW","PQR","MFR","EKA","MTR","LOX","SGX",

+                       # AR sites

+                       'AFC', 'AFG', 'AJK', 'AER', 'ALU', 'VRH', 'AVAK',

+                       # OPC Atlantic and Pacific

+                       'ONA', 'ONP',

+                       # NHC/TAFB Pacific and Atlantic, Storm Surge

+                       'NH1', 'NH2', 'NHA',

+                       # HFO Marine, GUM

+                       'HFO', 'HPA', 'GUM',

+                      ]

+

+groups['winterProbs']= [

+            # ER sites

+            'AKQ','ALY','BGM','BOX','BTV','BUF','CAE','CAR','CHS','CLE',

+            'CTP','GSP','GYX','ILM','ILN','LWX','MHX','OKX','PBZ','PHI',

+            'RAH','RLX','RNK',

+            #CR sites

+            'ABR','BIS','BOU','CYS','DDC','DMX','FGF','FSD','GLD','GRB',

+            'ICT','IND','IWX','JKL','LMK','LOT','MKX','MPX','MQT','OAX',

+            'PAH','PUB','SGF','GJT',

+            #SR sites

+            'FFC','LUB','MRX','OUN','TSA',

+            #WR sites

+            'FGZ','GGW','HNX','LKN','MFR','MSO','OTX','PDT','REV','SEW',

+            'SGX','SLC','STO'

+           ]

+

+groups['rainfallProbs'] = ["BOX"]

+

+#---------------------------------------------------------------------------

+#

+#  Weather Element configuration section.

+#

+#---------------------------------------------------------------------------

+

+SCALAR  = 'Scalar'

+VECTOR  = 'Vector'

+WEATHER = 'Weather'

+DISCRETE = 'Discrete'

+YES = 1

+NO = 0

+

+#SCALAR, VECTOR

+# name/type/units/description/max/min/precision/rateParm/

+#WEATHER

+# name/WEATHER/units/description/

+#DISCRETE

+# keyDef = [(keySym, keyDesc), (keySym, keyDesc)]

+# name/DISCRETE/units/description/overlapCapable/keyDef/

+

+# Standard Public Weather Elements

+SID = GFESUITE_SITEID

+

+maxTempVal=140.0

+minTempVal=-100.0

+maxTdVal=140.0

+minTdVal=-100.0

+maxQpfVal=10.0

+maxIceVal=5.0

+Temp =    ("T", SCALAR, "F", "Surface Temperature", maxTempVal, minTempVal, 0, NO)

+Td =      ("Td", SCALAR, "F", "Dewpoint", maxTdVal, minTdVal, 0, NO)

+MaxT =    ("MaxT", SCALAR, "F", "Maximum Temperature", maxTempVal, minTempVal, 0, NO)

+MinT =    ("MinT", SCALAR, "F", "Minimum Temperature", maxTempVal, minTempVal, 0, NO)

+HeatIndex = ("HeatIndex", SCALAR, "F", "Heat Index", maxTempVal, -80.0, 0, NO)

+WindChill = ("WindChill", SCALAR, "F", "Wind Chill", 120.0, -120.0, 0, NO)

+QPF =     ("QPF", SCALAR, "in", "QPF", maxQpfVal, 0.0, 2, YES)

+Wind =    ("Wind", VECTOR, "kts", "Surface Wind", 125.0, 0.0, 0, NO)

+WindGust = ("WindGust", SCALAR, "kts", "Wind Gust", 125.0, 0.0, 0, NO)

+# special for TPC hurricane winds

+HiWind =    ("Wind", VECTOR, "kts", "Surface Wind", 200.0, 0.0, 0, NO)

+Weather = ("Wx", WEATHER, "wx", "Weather")

+IceAcc = ("IceAccum", SCALAR, "in", "Ice Accumulation", maxIceVal, 0.0, 2, YES)

+StormTotalIce = ('StormTotalIce', SCALAR, 'in', 'Storm Total Ice', maxIceVal, 0.0, 2, YES)

+SnowAmt = ("SnowAmt", SCALAR, "in", "Snowfall amount", 20.0, 0.0, 1, YES)

+StormTotalSnow = ("StormTotalSnow", SCALAR, "in","Storm Total Snow", 180.0, 0.0, 1, NO)

+PoP     = ("PoP", SCALAR, "%", "Prob of Precip", 100.0, 0.0, 0, NO)

+PoP6    = ("PoP6", SCALAR, "%", "Prob of Precip (6hr)", 100.0, 0.0, 0, NO)

+PoP12   = ("PoP12", SCALAR, "%", "Prob of Precip (12hr)", 100.0, 0.0, 0, NO)

+TstmPrb3 = ("TstmPrb3", SCALAR, "%", "Prob of Tstorm (3hr)", 100.0, 0.0, 0, NO)

+TstmPrb6 = ("TstmPrb6", SCALAR, "%", "Prob of Tstorm (6hr)", 100.0, 0.0, 0, NO)

+TstmPrb12 = ("TstmPrb12", SCALAR, "%", "Prob of Tstorm (12hr)", 100.0, 0.0, 0, NO)

+Sky     = ("Sky", SCALAR, "%", "Sky Condition", 100.0, 0.0, 0, NO)

+FzLevel = ("FzLevel", SCALAR, "ft", "Freezing level", 30000.0, 0.0, 0, NO)

+SnowLevel = ("SnowLevel", SCALAR, "ft", "Snow Level", 18000.0, 0.0, 0, NO)

+RH      = ("RH", SCALAR, "%", "Relative Humidity", 100.0, 0.0, 0, NO)

+

+# DR20541 and 20482 - add collaborate PoP, SnowAmt, QPF and ndfd QPF tools

+PoP12hr = ("PoP12hr", SCALAR, "%", "12 hr Chance of Precip", 100.0, 0.0, 0, NO)

+QPF6hr = ("QPF6hr", SCALAR, "in", "6 hr Precipitation (in)", maxQpfVal, 0.0, 2, YES)

+SnowAmt6hr = ("SnowAmt6hr", SCALAR, "in", "6 hr Snowfall", 30.0, 0.0, 1, YES)

+

+# Cobb SnowTool included.

+SnowRatio = ('SnowRatio', SCALAR, 'none', 'Snow Ratio', 40.0, 0.0, 1, NO)

+#totalVV = ('totalVV', SCALAR, 'ubar/s', 'Total VV', 400.0, 0.0, 0, YES)

+cape = ("cape", SCALAR, "1unit", "CAPE", 8000.0, 0.0, 1, NO)

+ApparentT = ("ApparentT", SCALAR, "F", "Apparent Temperature", maxTempVal, -120.0, 0, NO)

+LkSfcT = ("LkSfcT", SCALAR, "C", "Lake Surface T", 40.0, -2.0, 1, NO)

+SnowMap = ("SnowMap", SCALAR, "in", "Snowfall Map", 20.0, 0.0, 1, NO)

+StormTotalQPF = ('StormTotalQPF', SCALAR, 'in', 'Storm Total QPF (in)', 36.0, 0.0, 2, NO)

+SeasonTotalSnow = ('SeasonTotalSnow', SCALAR, 'in', 'Season Total Snow (in)', 150.0, 0.0, 2, NO)

+

+# Fire Weather Weather Elements

+LAL = ("LAL", SCALAR, "cat", "Lightning Activity Level", 6.0, 1.0, 0, NO)

+CWR = ("CWR", SCALAR, "%", "Chance of Wetting Rain", 100.0, 0.0, 0, NO)

+Haines = ("Haines", SCALAR, "cat", "Haines Index", 6.0, 2.0, 0, NO)

+MixHgt = ("MixHgt", SCALAR, "ft", "Mixing Height", 20000.0, 0.0, 0, NO)

+Wind20ft =    ("Wind20ft", VECTOR, "kts", "20ft. Wind", 125.0, 0.0, 0, NO)

+FreeWind = ("FreeWind", VECTOR, "kts", "Free Air Wind", 125.0, 0.0, 0, NO)

+TransWind = ("TransWind", VECTOR, "kts", "Transport Wind", 125.0, 0.0, 0, NO)

+Stability = ("Stability",SCALAR,"cat","Stability", 6.0,1.0,0, NO)

+HrsOfSun = ("HrsOfSun",SCALAR,"hrs","Hours of Sun",24.0,0.0,1, NO)

+MarineLayer = ("MarineLayer",SCALAR,"ft","Depth of Marine Layer", 20000.0,0.0,0,NO)

+InvBurnOffTemp = ("InvBurnOffTemp",SCALAR,"F","Inversion Burn-off Temperature", 120.0,-30.0,0, NO)

+VentRate = ("VentRate", SCALAR, "kt*ft", "VentRate", 500000.0, 0.0, 0, NO)

+DSI = ("DSI", SCALAR, "index", "DSI", 6.0, 0.0, 0, NO)

+MaxRH      = ("MaxRH", SCALAR, "%", "Maximum Relative Humidity", 100.0, 0.0, 0, NO)

+MinRH      = ("MinRH", SCALAR, "%", "Minimum Relative Humidity", 100.0, 0.0, 0, NO)

+Wetflag = ("Wetflag", SCALAR, "yn", "1300LT WetFlag", 1.0, 0.0, 0, NO)

+Ttrend = ("Ttrend", SCALAR, "F", "24hr Temperature Trend", 50.0, -50.0, 0, NO)

+RHtrend = ("RHtrend", SCALAR, "F", "24hr Relative Humidity Trend", 100.0, -100.0, 0, NO)

+

+# HPC Delta weather elements

+DeltaMinT = ('DeltaMinT', SCALAR, 'F', 'DeltaMinT', 130.0, -80.0, 0, NO)

+DeltaMaxT = ('DeltaMaxT', SCALAR, 'F', 'DeltaMaxT', 130.0, -80.0, 0, NO)

+DeltaWind = ("DeltaWind", VECTOR, "kts", "Surface Delta Wind", 125.0, 0.0, 0, NO)

+DeltaSky = ("DeltaSky", SCALAR, "%", "Delta Sky Condition", 100.0, -100.0, 0, NO)

+DeltaPoP = ("DeltaPoP", SCALAR, "%", "Delta Prob of Precip", 100.0, -100.0, 0, NO)

+

+# Special LAPS parms

+Radar = ("Radar", SCALAR, "dbz", "Radar Reflectivity", 80.0, -20.0, 0, NO)

+

+# RTMA parms

+QPE =     ("QPE", SCALAR, "in", "QPE", maxQpfVal, 0.0, 2, YES)

+#if SID in groups['ALASKA_SITES']: - not sure if this needs to be like that

+if SID in groups['OCONUS_SITES']:

+    TUnc =     ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 20.0, 0.0, 0, NO)

+    TdUnc =    ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 25.0, 0.0, 0, NO)

+else:

+    TUnc =     ("TUnc", SCALAR, "F", "Temperature Anl Uncertainty", 10.0, 0.0, 0, NO)

+    TdUnc =    ("TdUnc", SCALAR, "F", "Dewpoint Anl Uncertainty", 15.0, 0.0, 0, NO)

+# DR17144

+SkyUnc  =  ("SkyUnc", SCALAR, "%", "Sky Condition Uncertainty", 100.0, 0.0, 0, NO)

+WSpdUnc =  ("WSpdUnc", SCALAR, "kts", "WSpd Anl Uncertainty", 12.0, 0.0, 0, NO)

+WDirUnc =  ("WDirUnc", SCALAR, "deg", "WDir Anl Uncertainty", 10.0, 0.0, 0, NO)

+VisUnc  =  ("VisUnc", SCALAR, "SM", "Vsby Anl Uncertainty", 10.0, 0.0, 2, NO)

+# DCS 17288

+PressUnc = ("PressUnc", SCALAR, "Pa", "Press Anl Uncertainty", 110000.0, 0.0, 2, NO)

+Pressure = ("Pressure", SCALAR, "Pa", "Pressure", 110000.0, 0.0, 2, NO)

+WGustUnc =  ("WGustUnc", SCALAR, "kts", "WGust Anl Uncertainty", 12.0, 0.0, 0, NO)

+

+# NamDNG parms

+QPF3 =     ("QPF3", SCALAR, "in", "3HR QPF", maxQpfVal, 0.0, 2, YES)

+QPF6 =     ("QPF6", SCALAR, "in", "6HR QPF", maxQpfVal, 0.0, 2, YES)

+QPF12 =    ("QPF12", SCALAR, "in", "12HR QPF", maxQpfVal, 0.0, 2, YES)

+Vis =      ("Vis", SCALAR, "SM", "Visibility", 10.0, 0.0, 2, NO)

+SnowAmt6 = ("SnowAmt6", SCALAR, "in", "Snowfall amount (6hr)", 20.0, 0.0, 1, YES)

+

+MaxT3 =  ("MaxT3", SCALAR, "F", "3hr Maximum Temperature", maxTempVal, minTempVal, 0, NO)

+MinT3 =  ("MinT3", SCALAR, "F", "3hr Minimum Temperature", maxTempVal, minTempVal, 0, NO)

+MaxRH3 = ("MaxRH3", SCALAR, "%", "3hr Maximum Relative Humidity", 100.0, 0.0, 0, NO)

+

+# Parms for ,'SAT',Satellite

+SatVisE  = ("VisibleE", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO)

+SatIR11E = ("IR11E", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO)

+SatIR13E = ("IR13E", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO)

+SatIR39E = ("IR39E", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO)

+SatWVE   = ("WaterVaporE", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO)

+SatFogE  = ("FogE", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO)

+

+SatVisW  = ("VisibleW", SCALAR, "count", "Satellite Albdo %", 255.0, 0.0, 0, NO)

+SatIR11W = ("IR11W", SCALAR, "C", "11 micron temperature", 58.0, -111.0, 0, NO)

+SatIR13W = ("IR13W", SCALAR, "C", "13 micron temperature", 50.0, -111.0, 0, NO)

+SatIR39W = ("IR39W", SCALAR, "C", "3.9 micron temperature", 50.0, -111.0, 0, NO)

+SatWVW   = ("WaterVaporW", SCALAR, "C", "water vapor temperature", -11.0, -62.0, 0, NO)

+SatFogW  = ("FogW", SCALAR, "C", "ir11 - ir39", 50.0, -111.0, 0, NO)

+

+# TPC Wind Probability parms

+prob34 = ("prob34", SCALAR, "%", "WS34 CPROB", 100.0, 0.0, 0, NO)

+prob50 = ("prob50", SCALAR, "%", "WS50 CPROB", 100.0, 0.0, 0, NO)

+prob64 = ("prob64", SCALAR, "%", "WS64 CPROB", 100.0, 0.0, 0, NO)

+pws34 = ("pws34", SCALAR, "%", "34WSIPROB", 100.0, 0.0, 0, NO)

+pws50 = ("pws50", SCALAR, "%", "50WSIPROB", 100.0, 0.0, 0, NO)

+pws64 = ("pws64", SCALAR, "%", "64WSIPROB", 100.0, 0.0, 0, NO)

+pwsD34 = ("pwsD34", SCALAR, "%", "Day34WSIPROB", 100.0, 0.0, 0, NO)

+pwsN34 = ("pwsN34", SCALAR, "%", "Night34WSIPROB", 100.0, 0.0, 0, NO)

+pwsD64 = ("pwsD64", SCALAR, "%", "Day64WSIPROB", 100.0, 0.0, 0, NO)

+pwsN64 = ("pwsN64", SCALAR, "%", "Night64WSI PROB", 100.0, 0.0, 0, NO)

+pws34int = ("pws34int", SCALAR, "%", "34WSIntPROB", 100.0, 0.0, 0, NO)

+pws64int = ("pws64int", SCALAR, "%", "64WSIntPROB", 100.0, 0.0, 0, NO)

+

+# Surge parms for HLS/TCV

+InundationMax = ("InundationMax", SCALAR, "ft", "Max Inundation", 30.0, -100.0, 1, NO)

+InundationTiming = ("InundationTiming", SCALAR, "ft", "Incremental Inundation", 30.0, -100.0, 1, NO)

+SurgeHtPlusTideMSL = ("SurgeHtPlusTideMSL", SCALAR, "ft", "Surge above MSL", 30.0, -100.0, 1, NO)

+SurgeHtPlusTideMLLW = ("SurgeHtPlusTideMLLW", SCALAR, "ft", "Surge above MLLW", 30.0, -100.0, 1, NO)

+SurgeHtPlusTideMHHW = ("SurgeHtPlusTideMHHW", SCALAR, "ft", "Surge above MHHW", 30.0, -100.0, 1, NO)

+SurgeHtPlusTideNAVD = ("SurgeHtPlusTideNAVD", SCALAR, "ft", "Surge above NAVD88", 30.0, -100.0, 1, NO)

+

+# parms for storm surge collaboration

+SShazardKeys = [("",""), ("SS.A", "STORM SURGE WATCH"), ("SS.W", "STORM SURGE WARNING")]

+ProposedSS = ("ProposedSS", DISCRETE, "wwa", "Proposed StormSurge Hazards", YES, SShazardKeys, 7)

+tempProposedSS = ("tempProposedSS", DISCRETE, "wwa", "Temp Proposed StormSurge Hazards",

+              YES, SShazardKeys, 4)

+InitialSS = ("InitialSS", DISCRETE, "wwa", "Initial StormSurge Hazards",

+              YES, SShazardKeys, 4)

+DiffSS = ("DiffSS", SCALAR, "None", "Difference StormSurge Hazards", 2.0, -1.0, 0, NO)

+

+# parms for tropical cyclone threat graphics

+Threat4Keys = [("None","None to Little"), ("Elevated","Elevated"), ("Mod", "Moderate"), ("High", "High"), ("Extreme","Extreme"),]

+

+FloodingRainThreat = ("FloodingRainThreat", DISCRETE, "cat", "Flooding Rain Threat", NO, Threat4Keys,2)

+StormSurgeThreat = ("StormSurgeThreat", DISCRETE, "cat", "Storm Surge Threat", NO, Threat4Keys,2)

+WindThreat = ("WindThreat", DISCRETE, "cat", "Wind Threat", NO, Threat4Keys,2)

+TornadoThreat = ("TornadoThreat", DISCRETE, "cat", "Tornado Threat", NO, Threat4Keys,2)

+#    09/13/2016      JCM    changed precision of QPFtoFFGRatio to 2, max from 8 to 1000

+QPFtoFFGRatio = ("QPFtoFFGRatio", SCALAR, "1", "QPF to FFG Ratio", 1000.0, 0.0, 2, NO)

+

+# Hazards

+HazardKeys = []

+HazardKeys.append(("", ""))  #1st one must be None

+import VTECTable

+kys = list(VTECTable.VTECTable.keys())

+kys.sort()

+for k in kys:

+    HazardKeys.append((k, VTECTable.VTECTable[k]['hdln']))

+

+#H-VTEC keys - will someday add these back in

+#("hydroER", "Hydro - Excessive Rainfall"),

+#("hydroSM", "Hydro - Snow melt"),

+#("hydroRS", "Rain and Snow melt"),

+#("hydroDM", "Dam or Levee Failure"),

+#("hydroGO", "Glacier-Dammed Lake Outburst"),

+#("hydroIJ", "Ice Jam"),

+#("hydroIC", "Rain and/or Snow melt and/or Ice Jam"),

+

+Hazards = ("Hazards", DISCRETE, "wwa", "Hazards", YES, HazardKeys, 4)

+

+# Scalar/Vector Weather Elements that Require Extra Precision (due to their

+# use in calculations) Either form may be used.

+ExtraWEPrecision = []

+

+# Parms for ESTOFS

+AstroTide = ("AstroTide", SCALAR, "ft", "Astro Tide", 20.0, -8.0, 1, NO)

+StormSurge = ("StormSurge", SCALAR, "ft", "Storm Surge", 30.0, -5.0, 1, NO)

+

+# Parms for ETSS and ETSSHiRes

+SurgeTide = ("SurgeTide", SCALAR, "ft", "Surge Tide", 20.0, -8.0, 1, NO)

+

+# Parm for Aviation/GFSLAMPGrid

+CigHgt=("CigHgt",SCALAR,"ft","Ceiling Height",25000.0,-100.0,0,NO)

+

+# Parms for NationalBlend

+QPF1=("QPF1", SCALAR, "in", "1HR QPF", maxQpfVal, 0.0, 2, YES)

+PPI01=('PPI01', SCALAR, '%', '1-H Precip Potential Index', 100.0, 0.0, 0, NO)

+PPI06=('PPI06', SCALAR, '%', '6-H Precip Potential Index', 100.0, 0.0, 0, NO)

+PositiveEnergyAloft=("PositiveEnergyAloft" , SCALAR, "j/kg", "Positive energy aloft" , 500.0, 0.0, 1, NO)

+NegativeEnergyLowLevel=("NegativeEnergyLowLevel" , SCALAR, "j/kg", "Negative energy in the low levels" , 0.0, -500.0, 1, NO)

+SnowAmt01 = ("SnowAmt01", SCALAR, "in", "1-h Snow Accumulation", 20.0, 0.0, 1, YES)

+IceAccum01 = ("IceAccum01", SCALAR, "inch", "1-h Ice Accumulation", maxIceVal, 0.0, 3, NO)

+IceAccum = ("IceAccum", SCALAR, "inch", "6-h Ice Accumulation", 13.0, 0.0, 3, NO)

+TstmPrb1 = ("TstmPrb1", SCALAR, "%", "1-h SREF-based Prob. of a Thunderstorm", 100.0, 0.0, 0, NO)

+DryTstmPrb = ("DryTstmPrb", SCALAR, "%", "3-h SREF-based Prob. of a Dry Thunderstorm", 100.0, 0.0, 0, NO)

+WGS50pct =("WGS50pct", SCALAR, "kts", "10-m Wind Gust",125.0 , 0.0, 0, NO)

+WS50Prcntl30m =("WS50Prcntl30m", SCALAR, "kts", "30-m Wind Speed", 125.0, 0.0, 0, NO)

+WS50Prcntl80m =("WS50Prcntl80m", SCALAR, "kts", "80-m Wind Speed", 125.0, 0.0, 0, NO)

+Vis50pct =("Vis50pct", SCALAR, "SM", "Visibility",10.0 , 0.0, 3, NO)

+T50pct =("T50pct", SCALAR, "F", "Air Temperature", maxTempVal, minTempVal, 1, NO)

+PMSL10pct =("PMSL10pct", SCALAR, "mb", "10th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)

+PMSL50pct =("PMSL50pct", SCALAR, "mb", "50th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)

+PMSL90pct =("PMSL90pct", SCALAR, "mb", "90th percentile Mean Sea Level Pressure", 1100.0, 900.0, 1, NO)

+FosBerg = ("FosBerg", SCALAR, "none", "Fosberg Fire Weather Index", 100.0, 0.0, 0, NO)

+

+

+

+#---------------------------------------------------------------------------

+#

+#  Weather configuration section

+#

+#---------------------------------------------------------------------------

+

+# list of possible visibilities

+visibilities = ['', '0SM', '1/4SM', '1/2SM', '3/4SM', '1SM', '11/2SM',

+                '2SM', '21/2SM', '3SM', '4SM', '5SM', '6SM', 'P6SM']

+

+# list of possible coverages and probabilities

+NOCOV = ('', 'No Coverage')

+ISOD = ('Iso', 'Isolated')

+SCT = ('Sct', 'Scattered')

+NUM = ('Num', 'Numerous')

+WIDE = ('Wide', 'Widespread')

+OCNL = ('Ocnl', 'Occasional')

+SCHC = ('SChc', 'Slight Chance Of')

+CHC = ('Chc', 'Chance Of')

+LKLY = ('Lkly', 'Likely')

+DEFN = ('Def', 'Definite')

+PATCHY = ('Patchy', 'Patchy')

+AREAS = ('Areas', 'Areas of')

+FQT = ('Frq', 'Frequent')

+BRIEF = ('Brf', 'Brief')

+PERIODS = ('Pds', 'Periods of')

+INTM = ('Inter', 'Intermittent')

+

+# list of possible intensities

+INTEN_NONE = ('', 'No intensity')

+INTEN_VERYLIGHT = ('--', 'Very Light')

+INTEN_LIGHT = ('-', 'Light')

+INTEN_MOD = ('m', 'Moderate')

+INTEN_HEAVY = ('+', 'Heavy')

+INTEN_SEVERE = ('+', 'Severe')

+INTEN_DENSE = ('+', 'Dense')

+

+# list of optional attributes

+FQTLTG = ('FL', 'Frequent Lightning')

+GUSTS = ('GW', 'Gusty Winds')

+HVYRAFL = ('HvyRn', 'Heavy Rainfall')

+DMGWND = ('DmgW', 'Damaging Winds')

+SMALLH = ('SmA', 'Small Hail')

+LARGEH = ('LgA', 'Large Hail')

+OUTLYNG = ('OLA','in the outlying areas')

+GRASSY  = ('OGA','on grassy areas')

+OVRPASS = ('OBO','on bridges and overpasses')

+OR = ('OR', 'or')

+DRY = ('Dry', 'dry')

+PRIMARY = ('Primary', 'Highest Ranking')

+MENTION = ('Mention', 'Include Unconditionally')

+TORNADO = ('TOR', 'Tornadoes')

+

+# list of each weather types

+NOWX = ('', 'No Weather',

+          [NOCOV],

+          [INTEN_NONE],

+          [])

+THUNDER = ('T', 'Thunderstorms',

+          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF,

+            PERIODS, INTM],

+          [INTEN_NONE, INTEN_SEVERE],

+          [PRIMARY, MENTION, FQTLTG, HVYRAFL, GUSTS, DMGWND, DRY,

+            LARGEH, SMALLH, TORNADO])

+RAIN = ('R', 'Rain',

+          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+RAINSHOWERS = ('RW', 'Rain Showers',

+          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF,

+            PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+DRIZZLE = ('L', 'Drizzle',

+          [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,

+            BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+FZRAIN = ('ZR', 'Freezing Rain',

+          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+FZDRIZZLE = ('ZL', 'Freezing Drizzle',

+          [PATCHY, AREAS, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,

+            BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+SNOW = ('S', 'Snow',

+          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+SNOWSHOWERS = ('SW', 'Snow Showers',

+          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT,

+            BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+SLEET = ('IP', 'Sleet',

+          [WIDE, SCHC, CHC, LKLY, DEFN, OCNL, FQT, BRIEF, PERIODS, INTM],

+          [INTEN_VERYLIGHT, INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION, OR])

+FOG = ('F', 'Fog',

+          [PATCHY, AREAS, WIDE],

+          [INTEN_NONE, INTEN_DENSE],

+          [PRIMARY, MENTION])

+FREEZEFOG = ('ZF', 'Freezing Fog',

+          [PATCHY, AREAS, WIDE],

+          [INTEN_NONE, INTEN_DENSE],

+          [PRIMARY, MENTION])

+ICEFOG = ('IF', 'Ice Fog',

+          [PATCHY, AREAS, WIDE],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+ICECRYSTAL = ('IC', 'Ice Crystals',

+          [PATCHY, AREAS, WIDE],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+HAZE = ('H', 'Haze',

+          [DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+BLWGSNOW = ('BS', 'Blowing Snow',

+          [PATCHY, AREAS, DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+BLWGSAND = ('BN', 'Blowing Sand',

+          [PATCHY, AREAS, DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+SMOKE = ('K', 'Smoke',

+          [PATCHY, AREAS, DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+BLWGDUST = ('BD', 'Blowing Dust',

+          [PATCHY, AREAS, DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+FROST = ('FR','Frost',

+          [PATCHY, AREAS, WIDE],

+          [INTEN_NONE],

+          [PRIMARY, MENTION, OUTLYNG])

+FRZSPRAY = ('ZY','Freezing Spray',

+          [ISOD, SCT, NUM, WIDE, SCHC, CHC, LKLY, DEFN, OCNL],

+          [INTEN_LIGHT, INTEN_MOD, INTEN_HEAVY],

+          [PRIMARY, MENTION])

+VOLASH = ('VA','Volcanic Ash',

+          [NOCOV],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+WATERSPOUT = ('WP','Waterspouts',

+          [ISOD, SCHC, CHC, LKLY, DEFN],

+          [INTEN_NONE],

+          [PRIMARY, MENTION])

+

+

+types = [NOWX, THUNDER, WATERSPOUT, RAIN, RAINSHOWERS,

+         DRIZZLE, FZRAIN, FZDRIZZLE, SNOW, SNOWSHOWERS,

+         SLEET, FOG, FREEZEFOG, ICEFOG, ICECRYSTAL ,HAZE, BLWGSNOW,

+         BLWGSAND, SMOKE, BLWGDUST, FROST, FRZSPRAY, VOLASH]

+

+

+# PARMS FROM NwsInitsConfig

+#-------------------------------------------------------------------------------

+# Discrete Keys

+#-------------------------------------------------------------------------------

+#

+AirKeys = [("","None"), ("Watch","Watch"), ("Advisory","Advisory"),("Warning", "Warning"),]

+ThreatKeys=[('', 'None'), ('Very Low', 'Very Low'), ('Low', 'Low'),

+            ('Moderate', 'Moderate'), ('High', 'High'), ('Extreme','Extreme')]

+#

+SevereKeys = [('NONE', '0'), ('TSTM', '2'), ('MRGL', '3'), ('SLGT', '4'), ('ENH', '5'), ('MOD', '6'), ('HIGH', '8')]

+

+AirQuality = ('AirQuality', DISCRETE, 'cat', 'Air Quality', NO, AirKeys)

+BasinFFP = ('BasinFFP', DISCRETE, 'none', 'Basin Flash Flood Potential', NO,

+                         [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')])

+CLRIndx = ('CLRIndx', SCALAR, 'none', 'Clearing Index', 1050.0, 0.0, 0, NO)

+CQPF1 = ('CQPF1', SCALAR, 'in', '6hr Cont QPF', maxQpfVal, 0.0, 2, NO)

+Ceiling = ('Ceiling', SCALAR, 'ft', 'Lowest Cloud Base Height', 25000.0, -30000.0, 0, NO)

+CigHgtCat = ('CigHgtCat', SCALAR, 'index', 'Cloud Ceiling Height Category', 6.0, 0.0, 0, NO)

+CloudBaseConditional = ('CloudBaseConditional', SCALAR, '100ft', 'Conditional Cloud Base Height', 250.0, 0.0, 0, NO)

+CloudBasePrimary = ('CloudBasePrimary', SCALAR, '100ft', 'Primary Cloud Base Height', 250.0, 0.0, 0, NO)

+CloudBaseSecondary = ('CloudBaseSecondary', SCALAR, '100ft', 'Secondary Cloud Base Height', 250.0, 0.0, 0, NO)

+ClimoET = ('ClimoET', SCALAR, 'in', 'ClimoET', 0.75, 0.0, 2, NO)

+ClimoETAprA = ('ClimoETAprA', SCALAR, 'in', 'ClimoET AprA', 0.75, 0.0, 2, NO)

+ClimoETAprB = ('ClimoETAprB', SCALAR, 'in', 'ClimoET AprB', 0.75, 0.0, 2, NO)

+ClimoETAugA = ('ClimoETAugA', SCALAR, 'in', 'ClimoET AugA', 0.75, 0.0, 2, NO)

+ClimoETAugB = ('ClimoETAugB', SCALAR, 'in', 'ClimoET AugB', 0.75, 0.0, 2, NO)

+ClimoETDecA = ('ClimoETDecA', SCALAR, 'in', 'ClimoET DecA', 0.75, 0.0, 2, NO)

+ClimoETDecB = ('ClimoETDecB', SCALAR, 'in', 'ClimoET DecB', 0.75, 0.0, 2, NO)

+ClimoETFebA = ('ClimoETFebA', SCALAR, 'in', 'ClimoET FebA', 0.75, 0.0, 2, NO)

+ClimoETFebB = ('ClimoETFebB', SCALAR, 'in', 'ClimoET FebB', 0.75, 0.0, 2, NO)

+ClimoETJanA = ('ClimoETJanA', SCALAR, 'in', 'ClimoET JanA', 0.75, 0.0, 2, NO)

+ClimoETJanB = ('ClimoETJanB', SCALAR, 'in', 'ClimoET JanB', 0.75, 0.0, 2, NO)

+ClimoETJulA = ('ClimoETJulA', SCALAR, 'in', 'ClimoET JulA', 0.75, 0.0, 2, NO)

+ClimoETJulB = ('ClimoETJulB', SCALAR, 'in', 'ClimoET JulB', 0.75, 0.0, 2, NO)

+ClimoETJunA = ('ClimoETJunA', SCALAR, 'in', 'ClimoET JunA', 0.75, 0.0, 2, NO)

+ClimoETJunB = ('ClimoETJunB', SCALAR, 'in', 'ClimoET JunB', 0.75, 0.0, 2, NO)

+ClimoETMarA = ('ClimoETMarA', SCALAR, 'in', 'ClimoET MarA', 0.75, 0.0, 2, NO)

+ClimoETMarB = ('ClimoETMarB', SCALAR, 'in', 'ClimoET MarB', 0.75, 0.0, 2, NO)

+ClimoETMayA = ('ClimoETMayA', SCALAR, 'in', 'ClimoET MayA', 0.75, 0.0, 2, NO)

+ClimoETMayB = ('ClimoETMayB', SCALAR, 'in', 'ClimoET MayB', 0.75, 0.0, 2, NO)

+ClimoETNovA = ('ClimoETNovA', SCALAR, 'in', 'ClimoET NovA', 0.75, 0.0, 2, NO)

+ClimoETNovB = ('ClimoETNovB', SCALAR, 'in', 'ClimoET NovB', 0.75, 0.0, 2, NO)

+ClimoETOctA = ('ClimoETOctA', SCALAR, 'in', 'ClimoET OctA', 0.75, 0.0, 2, NO)

+ClimoETOctB = ('ClimoETOctB', SCALAR, 'in', 'ClimoET OctB', 0.75, 0.0, 2, NO)

+ClimoETSepA = ('ClimoETSepA', SCALAR, 'in', 'ClimoET SepA', 0.75, 0.0, 2, NO)

+ClimoETSepB = ('ClimoETSepB', SCALAR, 'in', 'ClimoET SepB', 0.75, 0.0, 2, NO)

+ClimoPoP = ('ClimoPoP', SCALAR, '%', 'ClimoPoP', 100.0, 0.0, 0, NO)

+ClimoPoPAprA = ('ClimoPoPAprA', SCALAR, '%', 'ClimoPoP AprA', 100.0, 0.0, 0, NO)

+ClimoPoPAprB = ('ClimoPoPAprB', SCALAR, '%', 'ClimoPoP AprB', 100.0, 0.0, 0, NO)

+ClimoPoPAugA = ('ClimoPoPAugA', SCALAR, '%', 'ClimoPoP AugA', 100.0, 0.0, 0, NO)

+ClimoPoPAugB = ('ClimoPoPAugB', SCALAR, '%', 'ClimoPoP AugB', 100.0, 0.0, 0, NO)

+ClimoPoPDecA = ('ClimoPoPDecA', SCALAR, '%', 'ClimoPoP DecA', 100.0, 0.0, 0, NO)

+ClimoPoPDecB = ('ClimoPoPDecB', SCALAR, '%', 'ClimoPoP DecB', 100.0, 0.0, 0, NO)

+ClimoPoPFG = ('ClimoPoPFG', SCALAR, '%', 'ClimoPoP First Guess', 100.0, 0.0, 0, NO)

+ClimoPoPFebA = ('ClimoPoPFebA', SCALAR, '%', 'ClimoPoP FebA', 100.0, 0.0, 0, NO)

+ClimoPoPFebB = ('ClimoPoPFebB', SCALAR, '%', 'ClimoPoP FebB', 100.0, 0.0, 0, NO)

+ClimoPoPJanA = ('ClimoPoPJanA', SCALAR, '%', 'ClimoPoP JanA', 100.0, 0.0, 0, NO)

+ClimoPoPJanB = ('ClimoPoPJanB', SCALAR, '%', 'ClimoPoP JanB', 100.0, 0.0, 0, NO)

+ClimoPoPJulA = ('ClimoPoPJulA', SCALAR, '%', 'ClimoPoP JulA', 100.0, 0.0, 0, NO)

+ClimoPoPJulB = ('ClimoPoPJulB', SCALAR, '%', 'ClimoPoP JulB', 100.0, 0.0, 0, NO)

+ClimoPoPJunA = ('ClimoPoPJunA', SCALAR, '%', 'ClimoPoP JunA', 100.0, 0.0, 0, NO)

+ClimoPoPJunB = ('ClimoPoPJunB', SCALAR, '%', 'ClimoPoP JunB', 100.0, 0.0, 0, NO)

+ClimoPoPMarA = ('ClimoPoPMarA', SCALAR, '%', 'ClimoPoP MarA', 100.0, 0.0, 0, NO)

+ClimoPoPMarB = ('ClimoPoPMarB', SCALAR, '%', 'ClimoPoP MarB', 100.0, 0.0, 0, NO)

+ClimoPoPMayA = ('ClimoPoPMayA', SCALAR, '%', 'ClimoPoP MayA', 100.0, 0.0, 0, NO)

+ClimoPoPMayB = ('ClimoPoPMayB', SCALAR, '%', 'ClimoPoP MayB', 100.0, 0.0, 0, NO)

+ClimoPoPNovA = ('ClimoPoPNovA', SCALAR, '%', 'ClimoPoP NovA', 100.0, 0.0, 0, NO)

+ClimoPoPNovB = ('ClimoPoPNovB', SCALAR, '%', 'ClimoPoP NovB', 100.0, 0.0, 0, NO)

+ClimoPoPOctA = ('ClimoPoPOctA', SCALAR, '%', 'ClimoPoP OctA', 100.0, 0.0, 0, NO)

+ClimoPoPOctB = ('ClimoPoPOctB', SCALAR, '%', 'ClimoPoP OctB', 100.0, 0.0, 0, NO)

+ClimoPoPSepA = ('ClimoPoPSepA', SCALAR, '%', 'ClimoPoP SepA', 100.0, 0.0, 0, NO)

+ClimoPoPSepB = ('ClimoPoPSepB', SCALAR, '%', 'ClimoPoP SepB', 100.0, 0.0, 0, NO)

+CoastalFlood = ('CoastalFlood', DISCRETE, 'cat', 'Coastal Flood', NO, ThreatKeys)

+CondPredHgt = ('CondPredHgt', SCALAR, '100ft', 'Conditional Predominant Cloud Height', 250.0, 0.0, 0, NO)

+CondPredVsby = ('CondPredVsby', SCALAR, 'mi', 'Conditional Predominant Visibility', 10.0, 0.0, 2, NO)

+DenseFogSmoke = ('DenseFogSmoke', DISCRETE, 'cat', 'Dense Fog', NO, ThreatKeys)

+DepartNormFRET = ('DepartNormFRET', SCALAR, 'in', 'DepartNormFRET', 0.35, -0.35, 2, NO)

+Dryness = ('Dryness', DISCRETE, 'none', 'EGB Fuel Dryness', NO,

+           [('NoData', 'NoData'), ('Moist', 'Moist'), ('Dry', 'Dry'), ('VeryDry', 'VeryDry')])

+ExcessiveCold = ('ExcessiveCold', DISCRETE, 'cat', 'Extreme Cold', NO, ThreatKeys)

+ExcessiveHeat = ('ExcessiveHeat', DISCRETE, 'cat', 'Excessive Heat', NO, ThreatKeys)

+FFP = ('FFP', DISCRETE, 'none', 'Flash Flood Potential', NO,

+       [('Dry', 'Dry'), ('Low', 'Low'), ('Moderate', 'Moderate'), ('High', 'High'), ('Very High', 'Very High')])

+FFPI = ('FFPI', SCALAR, 'index', 'Flash Flood Potential Index', 10.0, 0.0, 2, NO)

+FRET = ('FRET', SCALAR, 'in', 'Forecast Reference ET', 0.75, 0.0, 2, NO)

+FRET7Day = ('FRET7Day', SCALAR, 'in/week', 'Weekly Forecast Reference ET', 5.0, 0.0, 2, NO)

+FireWeather = ('FireWeather', DISCRETE, 'cat', 'Wild Fire', NO, ThreatKeys)

+FlashFlood = ('FlashFlood', DISCRETE, 'cat', 'Flash Flood', NO, ThreatKeys)

+Flood = ('Flood', DISCRETE, 'cat', 'River Flood', NO, ThreatKeys)

+FrostFreeze = ('FrostFreeze', DISCRETE, 'cat', 'Frost/Freeze', NO, ThreatKeys)

+FuelMstr = ('FuelMstr', SCALAR, 'none', '10 Hour Fuel Moisture', 40.0, 1.0, 0, NO)

+HainesMid = ('HainesMid', SCALAR, 'cat', 'Mid Level Haines Index', 6.0, 2.0, 0, NO)

+HeatImpactLevels = ('HeatImpactLevels', SCALAR, 'none', 'HeatImpactLevels', 4.0, 0.0, 0, NO)

+HeatImpactLevelsMaxT = ('HeatImpactLevelsMaxT', SCALAR, 'none', 'HeatImpactLevelsMaxT', 4.0, 0.0, 0, NO)

+HeatImpactLevelsMinT = ('HeatImpactLevelsMinT', SCALAR, 'none', 'HeatImpactLevelsMinT', 4.0, 0.0, 0, NO)

+HeatOrangeMaxT = ('HeatOrangeMaxT', SCALAR, 'F', 'Heat Orange MaxT', maxTempVal, minTempVal, 0, NO)

+HeatOrangeMinT = ('HeatOrangeMinT', SCALAR, 'F', 'Heat Orange MinT', maxTempVal, minTempVal, 0, NO)

+HeatRedMaxT = ('HeatRedMaxT', SCALAR, 'F', 'Heat Red MaxT', maxTempVal, minTempVal, 0, NO)

+HeatRedMinT = ('HeatRedMinT', SCALAR, 'F', 'Heat Red MinT', maxTempVal, minTempVal, 0, NO)

+HeatYellowMaxT = ('HeatYellowMaxT', SCALAR, 'F', 'Heat Yellow MaxT', maxTempVal, minTempVal, 0, NO)

+HeatYellowMinT = ('HeatYellowMinT', SCALAR, 'F', 'Heat Yellow MinT', maxTempVal, minTempVal, 0, NO)

+HighWind = ('HighWind', DISCRETE, 'cat', 'High Wind', NO, ThreatKeys)

+IceAccum6hr = ('IceAccum6hr', SCALAR, 'in', '6-hr Ice Accumulation', 2.0, 0.0, 2, NO)

+LLWS = ('LLWS', VECTOR, 'kts', 'Low Level Wind Shear', 125.0, 0.0, 0, NO)

+LLWSHgt = ('LLWSHgt', SCALAR, '100 ft', 'Wind Shear Height', 20.0, 0.0, 0, NO)

+LTG = ('LTG', SCALAR, 'CNT', 'LTG', 100.0, 0.0, 0, NO)

+LTG12 = ('LTG12', SCALAR, 'CNT', 'LTG12', 100.0, 0.0, 0, NO)

+LTG24 = ('LTG24', SCALAR, 'CNT', 'LTG24', 100.0, 0.0, 0, NO)

+Lightning = ('Lightning', DISCRETE, 'cat', 'Lightning', NO, ThreatKeys)

+Max3 = ('Max3', SCALAR, 'F', '3hr Maximum Temperature', maxTempVal, minTempVal, 0, NO)

+Max6 = ('Max6', SCALAR, 'F', '6hr Maximum Temperature', maxTempVal, minTempVal, 0, NO)

+MaxApT = ('MaxApT', SCALAR, 'F', 'Max Apparent Temperature', maxTempVal, -120.0, 0, NO)

+MaxRHError = ('MaxRHError', SCALAR, '%', 'Maximum Relative Humidity Error', 100.0, -100.0, 0, NO)

+MaxRHFcst = ('MaxRHFcst', SCALAR, '%', 'Forecast Maximum Relative Humidity', 100.0, 0.0, 0, NO)

+MaxRHOb = ('MaxRHOb', SCALAR, '%', 'Observed Maximum Relative Humidity', 100.0, 0.0, 0, NO)

+MaxRHObs = ('MaxRHObs', SCALAR, '%', 'Maximum Observed RH', 100.0, 0.0, 0, NO)

+MaxT10 = ('MaxT10', SCALAR, 'F', '10th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)

+MaxT50 = ('MaxT50', SCALAR, 'F', '50th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)

+MaxT90 = ('MaxT90', SCALAR, 'F', '90th Percentile for MaxT', maxTempVal, minTempVal, 0, NO)

+MaxTAloft = ('MaxTAloft', SCALAR, 'C', 'Max Temp in Warm Nose', 40.0, -20.0, 1, NO)

+MaxTError = ('MaxTError', SCALAR, 'F', 'Maximum Temperature Error', 120.0, -120.0, 0, NO)

+MaxTFcst = ('MaxTFcst', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO)

+MaxTOb = ('MaxTOb', SCALAR, 'F', 'Observed Maximum Temperature', maxTempVal, minTempVal, 0, NO)

+MaxTObs = ('MaxTObs', SCALAR, 'F', 'Maximum Temperature Obs', maxTempVal, minTempVal, 0, NO)

+Min3 = ('Min3', SCALAR, 'F', '3hr Minimum Temperature', maxTempVal, minTempVal, 0, NO)

+Min6 = ('Min6', SCALAR, 'F', '6hr Minimum Temperature', maxTempVal, minTempVal, 0, NO)

+MinApT = ('MinApT', SCALAR, 'F', 'Min Apparent Temperature', maxTempVal, -120.0, 0, NO)

+MinRH3 = ('MinRH3', SCALAR, '%', '3hr Minimum Relative Humidity', 100.0, 0.0, 0, NO)

+MinRHError = ('MinRHError', SCALAR, '%', 'Minimum Relative Humidity Error', 100.0, -100.0, 0, NO)

+MinRHFcst = ('MinRHFcst', SCALAR, '%', 'Forecast Minimum Relative Humidity', 100.0, 0.0, 0, NO)

+MinRHOb = ('MinRHOb', SCALAR, '%', 'Observed Minimum Relative Humidity', 100.0, 0.0, 0, NO)

+MinRHObs = ('MinRHObs', SCALAR, '%', 'Minimum Observed RH', 100.0, 0.0, 0, NO)

+MinT10 = ('MinT10', SCALAR, 'F', '10th Percentile for MinT', maxTempVal, minTempVal, 0, NO)

+MinT50 = ('MinT50', SCALAR, 'F', '50th Percentile for MinT', maxTempVal, minTempVal, 0, NO)

+MinT6 = ('MinT6', SCALAR, 'F', 'Minimum Temperature 6Hr', maxTempVal, minTempVal, 0, NO)

+MinT90 = ('MinT90', SCALAR, 'F', '90th Percentile for MinT', maxTempVal, minTempVal, 0, NO)

+MinTError = ('MinTError', SCALAR, 'F', 'Minimum Temperature Error', 120.0, -120.0, 0, NO)

+MinTFcst = ('MinTFcst', SCALAR, 'F', 'Forecast Minimum Temperature', maxTempVal, minTempVal, 0, NO)

+MinTOb = ('MinTOb', SCALAR, 'F', 'Observed Minimum Temperature', maxTempVal, minTempVal, 0, NO)

+MinTObs = ('MinTObs', SCALAR, 'F', 'Minimum Temperature Obs', maxTempVal, minTempVal, 0, NO)

+MixHgtAve = ('MixHgtAve', SCALAR, 'ft', 'Mixing Hgt Average', 20000.0, 0.0, 0, NO)

+MixHgtMSL = ('MixHgtMSL', SCALAR, 'ft', 'Mixing Height above sea level', 30000.0, 0.0, 0, NO)

+MixT1700 = ('MixT1700', SCALAR, 'F', '1700Foot MixingTemp', 110.0, -10.0, 0, NO)

+P95MaxT = ('P95MaxT', SCALAR, 'F', 'P95MaxT', maxTempVal, minTempVal, 0, NO)

+P95MinT = ('P95MinT', SCALAR, 'F', 'P95MinT', maxTempVal, minTempVal, 0, NO)

+     # EKDMOS

+PQPF06001 = ('PQPF06001', SCALAR, '%', '6hr Prob QPF > 0.01', 100.0, 0.0, 0, NO)

+PQPF06005 = ('PQPF06005', SCALAR, '%', '6hr Prob QPF > 0.05', 100.0, 0.0, 0, NO)

+PQPF06010 = ('PQPF06010', SCALAR, '%', '6hr Prob QPF > 0.10', 100.0, 0.0, 0, NO)

+PQPF06015 = ('PQPF06015', SCALAR, '%', '6hr Prob QPF > 0.15', 100.0, 0.0, 0, NO)

+PQPF06020 = ('PQPF06020', SCALAR, '%', '6hr Prob QPF > 0.20', 100.0, 0.0, 0, NO)

+PQPF06025 = ('PQPF06025', SCALAR, '%', '6hr Prob QPF > 0.25', 100.0, 0.0, 0, NO)

+PQPF06030 = ('PQPF06030', SCALAR, '%', '6hr Prob QPF > 0.30', 100.0, 0.0, 0, NO)

+PQPF06040 = ('PQPF06040', SCALAR, '%', '6hr Prob QPF > 0.40', 100.0, 0.0, 0, NO)

+PQPF06050 = ('PQPF06050', SCALAR, '%', '6hr Prob QPF > 0.50', 100.0, 0.0, 0, NO)

+PQPF06075 = ('PQPF06075', SCALAR, '%', '6hr Prob QPF > 0.75', 100.0, 0.0, 0, NO)

+PQPF06100 = ('PQPF06100', SCALAR, '%', '6hr Prob QPF > 1.00', 100.0, 0.0, 0, NO)

+PQPF06150 = ('PQPF06150', SCALAR, '%', '6hr Prob QPF > 1.50', 100.0, 0.0, 0, NO)

+PoP12Fcst = ('PoP12Fcst', SCALAR, '%', 'Forecast Prob. of Precip.', 100.0, 0.0, 0, NO)

+PoP3 = ('PoP3', SCALAR, '%', 'PoP3', 100.0, 0.0, 0, NO)

+PoPPCECMWF = ('PoPPatternClimoECMWF', SCALAR, '%', 'PatternClimoECMWF', 100.0, 0.0, 0, NO)

+PoPPCFIM = ('PoPPatternClimoFIM', SCALAR, '%', 'PatternClimoFIM', 100.0, 0.0, 0, NO)

+PoPPCGEM = ('PoPPatternClimoGEM', SCALAR, '%', 'PatternClimoGEM', 100.0, 0.0, 0, NO)

+PoPPCGFS = ('PoPPatternClimoGFS', SCALAR, '%', 'PatternClimoGFS', 100.0, 0.0, 0, NO)

+PoPPattern1 = ('PoPNortherlyFlow', SCALAR, '%', 'NortherlyFlow', 100.0, 0.0, 0, NO)

+PoPPattern10 = ('PoPRockiesRidge', SCALAR, '%', 'RockiesRidge', 100.0, 0.0, 0, NO)

+PoPPattern11 = ('PoPSouthernFirehose', SCALAR, '%', 'SouthernFirehose', 100.0, 0.0, 0, NO)

+PoPPattern12 = ('PoPNorthernFirehose', SCALAR, '%', 'NorthernFirehose', 100.0, 0.0, 0, NO)

+PoPPattern2 = ('PoPGreatBasinLow', SCALAR, '%', 'GreatBasinLow', 100.0, 0.0, 0, NO)

+PoPPattern3 = ('PoPBroadCyclonicFlow', SCALAR, '%', 'BroadCyclonicFlow', 100.0, 0.0, 0, NO)

+PoPPattern4 = ('PoPCoastalRidge', SCALAR, '%', 'CoastalRidge', 100.0, 0.0, 0, NO)

+PoPPattern5 = ('PoPNorthwestFlow', SCALAR, '%', 'NorthwestFlow', 100.0, 0.0, 0, NO)

+PoPPattern6 = ('PoPZonalFlow', SCALAR, '%', 'ZonalFlow', 100.0, 0.0, 0, NO)

+PoPPattern7 = ('PoPBroadAntiCyclonicFlow', SCALAR, '%', 'BroadAntiCyclonicFlow', 100.0, 0.0, 0, NO)

+PoPPattern8 = ('PoPDiffluentOnshoreFlow', SCALAR, '%', 'DiffluentOnshoreFlow', 100.0, 0.0, 0, NO)

+PoPPattern9 = ('PoPSouthwestFlow', SCALAR, '%', 'SouthwestFlow', 100.0, 0.0, 0, NO)

+PoPWG = ('PoPWG', SCALAR, '%', 'Climo PoP Work Grid', 30.0, -30.0, 0, NO)

+PPFFG = ("PPFFG", SCALAR, "%", "Prob of Excessive Rain in %", 100.0, 0.0 ,0, NO)

+PrecipDur = ('PrecipDur', SCALAR, 'hrs', 'Precipitation Duration', 12.0, 0.0, 1, YES)

+PredHgt = ('PredHgt', SCALAR, '100ft', 'Predominant Cloud Height', 250.0, 0.0, 0, NO)

+PredHgtCat = ('PredHgtCat', SCALAR, 'index', 'Predominant Cloud Height Category', 6.0, 0.0, 0, NO)

+PredHgtRH = ('PredHgtRH', SCALAR, '100ft', 'Pred Cloud Height from RH', 250.0, 1.0, 0, NO)

+PredHgtTempo = ('PredHgtTempo', SCALAR, '100ft', 'Predominant Cloud Height Tempo', 250.0, 0.0, 0, NO)

+PredVsby = ('PredVsby', SCALAR, 'mi', 'Predominant Visibility', 10.0, 0.0, 2, NO)

+Pres = ('Pres', SCALAR, 'mb', 'Pressure', 1100.0, 900.0, 2, NO)

+ProbDmgWind = ('ProbDmgWind', SCALAR, '%', 'Probability of Damaging Wind', 100.0, 0.0, 0, NO)

+ProbExtrmDmgWind = ('ProbExtrmDmgWind', SCALAR, '%', 'Probability of Extreme Damaging Wind', 100.0, 0.0, 0, NO)

+ProbExtrmHail = ('ProbExtrmHail', SCALAR, '%', 'Probability of Extreme Hail', 100.0, 0.0, 0, NO)

+ProbExtrmSvr = ('ProbExtrmSvr', SCALAR, '%', 'Probability of Extreme Severe', 100.0, 0.0, 0, NO)

+ProbExtrmTor = ('ProbExtrmTor', SCALAR, '%', 'Probability of Extreme Tornado', 100.0, 0.0, 0, NO)

+ProbSvrHail = ('ProbSvrHail', SCALAR, '%', 'Probability of Severe Hail', 100.0, 0.0, 0, NO)

+ProbTor = ('ProbTor', SCALAR, '%', 'Probability of Tornado', 100.0, 0.0, 0, NO)

+ProbTotSvr = ('ProbTotSvr', SCALAR, '%', 'Probability of Severe', 100.0, 0.0, 0, NO)

+ProbSnowGTT = ("ProbSnowGTT", SCALAR, "%", "Prob. snow > trace", 100.0, 0.0, 0, NO)

+ProbSnowGT1 = ("ProbSnowGT1", SCALAR, "%", "Prob. snow > 1 inch", 100.0, 0.0, 0, NO)

+ProbSnowGT2 = ("ProbSnowGT2", SCALAR, "%", "Prob. snow > 2 inches ", 100.0, 0.0, 0, NO)

+ProbSnowGT4 = ("ProbSnowGT4", SCALAR, "%", "Prob. snow > 4 inches ", 100.0, 0.0, 0, NO)

+ProbSnowGT6 = ("ProbSnowGT6", SCALAR, "%", "Prob. snow > 6 inches ", 100.0, 0.0, 0, NO)

+ProbSnowGT8 = ("ProbSnowGT8", SCALAR, "%", "Prob. snow > 8 inches", 100.0, 0.0, 0, NO)

+ProbSnowGT12 = ("ProbSnowGT12", SCALAR, "%", "Prob. snow > 12 inches", 100.0, 0.0, 0, NO)

+ProbSnowGT18 = ("ProbSnowGT18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO)

+ProbSnowRT1 = ("ProbSnowRT1", SCALAR, "%", "Prob. snow T-1 inch", 100.0, 0.0, 0, NO)

+ProbSnowR12 = ("ProbSnowR12", SCALAR, "%", "Prob. snow 1-2 inches", 100.0, 0.0, 0, NO)

+ProbSnowR24 = ("ProbSnowR24", SCALAR, "%", "Prob. snow 2-4 inches ", 100.0, 0.0, 0, NO)

+ProbSnowR48 = ("ProbSnowR48", SCALAR, "%", "Prob. snow 4-8 inches ", 100.0, 0.0, 0, NO)

+ProbSnowR812 = ("ProbSnowR812", SCALAR, "%", "Prob. snow 8-12 inches ", 100.0, 0.0, 0, NO)

+ProbSnowR1218 = ("ProbSnowR1218", SCALAR, "%", "Prob. snow 12-18 inches", 100.0, 0.0, 0, NO)

+ProbSnowR18 = ("ProbSnowR18", SCALAR, "%", "Prob. snow > 18 inches", 100.0, 0.0, 0, NO)

+QPE06 = ('QPE06', SCALAR, 'in', 'QPE06', maxQpfVal, 0.0, 2, YES)

+QPE06Ob = ('QPE06Ob', SCALAR, 'in', 'Observed Precip', 20.0, 0.0, 2, NO)

+QPE12 = ('QPE12', SCALAR, 'in', 'QPE12', 15.0, 0.0, 2, YES)

+QPE24 = ('QPE24', SCALAR, 'in', 'QPE24', 15.0, 0.0, 2, YES)

+QPFDS = ('QPFDS', SCALAR, 'in', 'QPFDS', maxQpfVal, 0.0, 2, YES)

+QPFFcst = ('QPFFcst', SCALAR, 'in', 'Forecast Precip.', 10.0, 0.0, 2, NO)

+QPFPCECMWF = ('QPFPatternClimoECMWF', SCALAR, 'in', 'PatternClimoECMWF', maxQpfVal, 0.0, 2, NO)

+QPFPCFIM = ('QPFPatternClimoFIM', SCALAR, 'in', 'PatternClimoFIM', maxQpfVal, 0.0, 2, NO)

+QPFPCGEM = ('QPFPatternClimoGEM', SCALAR, 'in', 'PatternClimoGEM', maxQpfVal, 0.0, 2, NO)

+QPFPCGFS = ('QPFPatternClimoGFS', SCALAR, 'in', 'PatternClimoGFS', maxQpfVal, 0.0, 2, NO)

+QPFPattern1 = ('QPFNortherlyFlow', SCALAR, 'in', 'NortherlyFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern10 = ('QPFRockiesRidge', SCALAR, 'in', 'RockiesRidge', maxQpfVal, 0.0, 2, NO)

+QPFPattern11 = ('QPFSouthernFirehose', SCALAR, 'in', 'SouthernFirehose', maxQpfVal, 0.0, 2, NO)

+QPFPattern12 = ('QPFNorthernFirehose', SCALAR, 'in', 'NorthernFirehose', maxQpfVal, 0.0, 2, NO)

+QPFPattern2 = ('QPFGreatBasinLow', SCALAR, 'in', 'GreatBasinLow', maxQpfVal, 0.0, 2, NO)

+QPFPattern3 = ('QPFBroadCyclonicFlow', SCALAR, 'in', 'BroadCyclonicFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern4 = ('QPFCoastalRidge', SCALAR, 'in', 'CoastalRidge', maxQpfVal, 0.0, 2, NO)

+QPFPattern5 = ('QPFNorthwestFlow', SCALAR, 'in', 'NorthwestFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern6 = ('QPFZonalFlow', SCALAR, 'in', 'ZonalFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern7 = ('QPFBroadAntiCyclonicFlow', SCALAR, 'in', 'BroadAntiCyclonicFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern8 = ('QPFDiffluentOnshoreFlow', SCALAR, 'in', 'DiffluentOnshoreFlow', maxQpfVal, 0.0, 2, NO)

+QPFPattern9 = ('QPFSouthwestFlow', SCALAR, 'in', 'SouthwestFlow', maxQpfVal, 0.0, 2, NO)

+QPFPct = ('QPFPct', SCALAR, '%', 'QPFPct', 300.0, 0.0, 1, YES)

+QPFPctMonthlyClimo = ('QPFPctMonthlyClimo', SCALAR, '%', 'QPF Pct Monthly PRISMClimo', 200.0, 0.0, 0, NO)

+QPFRaw = ('QPFRaw', SCALAR, 'in', 'QPFRaw', maxQpfVal, 0.0, 2, YES)

+QSE06 = ('QSE06', SCALAR, 'in', 'QSE06', 100.0, 0.0, 1, YES)

+RipCurrent = ('RipCurrent', DISCRETE, 'cat', 'Rip Current', NO, ThreatKeys)

+RipCurrentIndex = ('RipCurrentIndex', SCALAR, 'ft', 'Rip Current Index', 16.0, -1.0, 1, NO)

+RipRisk = ("RipRisk", SCALAR, "none", "Rip Current Risk", 3.0, 0.0, 0, NO)

+SPC12hrLP1 = ('SPC12hrLP1', SCALAR, '%', 'SPC 12HR Lightning Probability (1)', 100.0, 0.0, 0, NO)

+SPC12hrLP10 = ('SPC12hrLP10', SCALAR, '%', 'SPC 12HR Lightning Probability (10)', 100.0, 0.0, 0, NO)

+SPC12hrLP100 = ('SPC12hrLP100', SCALAR, '%', 'SPC 12HR Lightning Probability (100)', 100.0, 0.0, 0, NO)

+SPC24hrLP1 = ('SPC24hrLP1', SCALAR, '%', 'SPC 24HR Lightning Probability (1)', 100.0, 0.0, 0, NO)

+SPC24hrLP10 = ('SPC24hrLP10', SCALAR, '%', 'SPC 24HR Lightning Probability (10)', 100.0, 0.0, 0, NO)

+SPC24hrLP100 = ('SPC24hrLP100', SCALAR, '%', 'SPC 24HR Lightning Probability (100)', 100.0, 0.0, 0, NO)

+SPC3hrLP1 = ('SPC3hrLP1', SCALAR, '%', 'SPC 3HR Lightning Probability (1)', 100.0, 0.0, 0, NO)

+SPC3hrLP10 = ('SPC3hrLP10', SCALAR, '%', 'SPC 3HR Lightning Probability (10)', 100.0, 0.0, 0, NO)

+SPC3hrLP100 = ('SPC3hrLP100', SCALAR, '%', 'SPC 3HR Lightning Probability (100)', 100.0, 0.0, 0, NO)

+SevereHail = ('SevereHail', DISCRETE, 'cat', 'Severe Hail', NO, ThreatKeys)

+SevereTstmWind = ('SevereTstmWind', DISCRETE, 'cat', 'SevereTstmWind', NO, ThreatKeys)

+SnowAmt10Prcntl = ('SnowAmt10Prcntl', SCALAR, 'in', 'min case', 50.0, 0.0, 1, NO)

+SnowAmt50Prcntl = ('SnowAmt50Prcntl', SCALAR, 'in', 'avg case', 50.0, 0.0, 1, NO)

+SnowAmt90Prcntl = ('SnowAmt90Prcntl', SCALAR, 'in', 'max case', 50.0, 0.0, 1, NO)

+SnowDepth = ('SnowDepth', SCALAR, 'in', 'Snow Depth', 50.0, 0.0, 0, NO)

+SnowRatioCLIMO = ('SnowRatioCLIMO', SCALAR, '%', 'Snow Ratio Climatology SON-DJF-MAM', 40.0, 0.0, 1, YES)

+SnowRatioGFS = ('SnowRatioGFS', SCALAR, '%', 'Snow Ratio from GFS', 40.0, 0.0, 1, YES)

+SnowRatioHPCMEAN = ('SnowRatioHPCMEAN', SCALAR, '%', 'Snow Ratio from HPC MEAN', 40.0, 0.0, 1, YES)

+SnowRatioNAM = ('SnowRatioNAM', SCALAR, '%', 'Snow Ratio from NAM40', 40.0, 0.0, 1, YES)

+T10 = ('T10', SCALAR, 'F', '10th Percentile for T', maxTempVal, minTempVal, 0, NO)

+T50 = ('T50', SCALAR, 'F', '50th Percentile for T', maxTempVal, minTempVal, 0, NO)

+T90 = ('T90', SCALAR, 'F', '90th Percentile for T', maxTempVal, minTempVal, 0, NO)

+TAloft = ('TAloft', SCALAR, 'F', 'Temperature Aloft', 120.0, -50.0, 1, NO)

+Td10 = ('Td10', SCALAR, 'F', '10th Percentile for DpT', maxTdVal, minTdVal, 0, NO)

+Td50 = ('Td50', SCALAR, 'F', '50th Percentile for DpT', maxTdVal, minTdVal, 0, NO)

+Td90 = ('Td90', SCALAR, 'F', '90th Percentile for DpT', maxTdVal, minTdVal, 0, NO)

+TdAft = ('TdAft', SCALAR, 'F', 'Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdAftError = ('TdAftError', SCALAR, 'F', 'Afternoon Dewpoint Error', 120.0, -120.0, 0, NO)

+TdAftFcst = ('TdAftFcst', SCALAR, 'F', 'Forecast Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdAftOb = ('TdAftOb', SCALAR, 'F', 'Observed Afternoon Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdAftObs = ('TdAftObs', SCALAR, 'F', 'Afternoon Dewpoint Obs', maxTdVal, minTdVal, 0, NO)

+TdMrn = ('TdMrn', SCALAR, 'F', 'Morning Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdMrnError = ('TdMrnError', SCALAR, 'F', 'Morning Dewpoint Error', 120.0, -120.0, 0, NO)

+TdMrnFcst = ('TdMrnFcst', SCALAR, 'F', 'Forecast Morning Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdMrnOb = ('TdMrnOb', SCALAR, 'F', 'Observed Morning Dewpoint', maxTdVal, minTdVal, 0, NO)

+TdMrnObs = ('TdMrnObs', SCALAR, 'F', 'Morning Dewpoint Obs', maxTdVal, minTdVal, 0, NO)

+Tornado = ('Tornado', DISCRETE, 'cat', 'Tornado', NO, ThreatKeys)

+TransWindAve = ('TransWindAve', VECTOR, 'mph', 'Transport Wind Average', 125.0, 0.0, 0, NO)

+Tw = ('Tw', SCALAR, 'F', 'Surface Wet Bulb Temp', 80.0, -50.0, 0, NO)

+VentRateAve = ('VentRateAve', SCALAR, 'mph-ft', 'Vent Rate Average', 500000.0, 0.0, 0, NO)

+Visibility = ('Visibility', SCALAR, 'SM', 'Visibility', 10.0, 0.0, 2, NO)

+VisibilityConditional = ('VisibilityConditional', SCALAR, 'SM', 'Conditional Visibility', 10.0, 0.0, 2, NO)

+Vsby = ('Vsby', SCALAR, 'mi', 'Visibility', 10.0, 0.0, 2, NO)

+WG1 = ('WG1', SCALAR, 'none', 'WorkGrid1', 100.0, -100.0, 0, NO)

+WinterWx = ('WinterWx', DISCRETE, 'cat', 'Winter Weather', NO, ThreatKeys)

+

+#** Parameter sets for specific functionality

+optionalParmsDict = {}

+

+# Marine Weather Elements

+optionalParmsDict['marine']={

+    'WaveDir' : ("WaveDir", VECTOR, "m/s", "Wave Direction", 5.0, 0.0, 2, NO),

+    'WindWaveHeight' : ("WindWaveHgt", SCALAR, "ft", "Wind Wave Height", 100.0, 0.0, 0, NO),

+    'WaveHeight' : ("WaveHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO),

+    'Swell' : ("Swell", VECTOR, "ft", "Primary Swell", 100.0, 0.0, 0, NO),

+    'Swell2' : ("Swell2", VECTOR, "ft", "Secondary Swell", 100.0, 0.0, 0, NO),

+    'Period' : ("Period", SCALAR, "sec", "Primary Period", 30.0, 0.0, 0, NO),

+    'IceCoverage' : ("IceCoverage", SCALAR, "%", "Ice Coverage Amount", 100.0, 0.0, 0, NO),

+    'SurfHeight' : ("SurfHeight", SCALAR, "ft", "Total Wave Height", 100.0, 0.0, 0, NO),

+    ##########DCS3499

+    'SigWaveHgt' : ("SigWaveHgt", SCALAR, "ft",

+                    "Significant wave height of combined wind waves and swells",

+                    30.0, 0.0, 0, NO),

+    'PeakWaveDir' : ("PeakWaveDir", VECTOR, "degree", "Direction of peak wave", 100.0, 0.0, 0, NO),

+    'WindWaveHgt' : ("WindWaveHgt", SCALAR, "ft", "Significant wave height of wind waves", 30.0, 0.0, 0, NO),

+    'WindWavePeriod' : ("WindWavePeriod", SCALAR, "sec.", "Wind wave peak period", 20.0, 0.0, 0, NO),

+    'WindWaveDir' : ("WindWaveDir", VECTOR, "degree", "Direction of wind waves", 100.0, 0.0, 0, NO),

+    'NWPSwind' : ("NWPSwind", VECTOR, "kts", "NWPSwind", 150.0, 0.0, 0, NO),

+    'UWaveDir' : ("UWaveDir", SCALAR, "m/s", "U WaveDir Comp", 0.50, -0.50, 3, NO),

+    'VWaveDir' : ("VWaveDir", SCALAR, "m/s", "V WaveDir Comp", 0.50, -0.50, 3, NO),

+    'SwanSwell' : ("SwanSwell", SCALAR, "ft", "Total Significant Swell Height", 40.0, 0.0, 2, NO),

+    'SST' : ("SST", SCALAR, "F", "Sea Sfc Temp", 100.0, 25.0, 0, NO),

+    'StormTide' : ('StormTide', SCALAR, 'ft', 'Storm Tide', 30.0, -8.0, 1, NO),

+    #Fcst Grids - for partitioned wave groups

+    'Wave1' : ("Wave1", VECTOR, "ft", "WAVE1", 50.0, 0.0, 0, NO),

+    'Wave2' : ("Wave2", VECTOR, "ft", "WAVE2", 50.0, 0.0, 0, NO),

+    'Wave3' : ("Wave3", VECTOR, "ft", "WAVE3", 50.0, 0.0, 0, NO),

+    'Wave4' : ("Wave4", VECTOR, "ft", "WAVE4", 50.0, 0.0, 0, NO),

+    'Wave5' : ("Wave5", VECTOR, "ft", "WAVE5", 50.0, 0.0, 0, NO),

+    'Wave6' : ("Wave6", VECTOR, "ft", "WAVE6", 50.0, 0.0, 0, NO),

+    'Wave7' : ("Wave7", VECTOR, "ft", "Wave7", 50.0, 0.0, 0, NO),

+    'Wave8' : ("Wave8", VECTOR, "ft", "Wave8", 50.0, 0.0, 0, NO),

+    'Wave9' : ("Wave9", VECTOR, "ft", "Wave9", 50.0, 0.0, 0, NO),

+    #Fcst Grids - for partitioned wave groups

+    'Period1' : ("Period1", SCALAR, "sec", "Period1", 30.0, 0.0, 0, NO),

+    'Period2' : ("Period2", SCALAR, "sec", "Period2", 30.0, 0.0, 0, NO),

+    'Period3' : ("Period3", SCALAR, "sec", "Period3", 30.0, 0.0, 0, NO),

+    'Period4' : ("Period4", SCALAR, "sec", "Period4", 30.0, 0.0, 0, NO),

+    'Period5' : ("Period5", SCALAR, "sec", "Period5", 30.0, 0.0, 0, NO),

+    'Period6' : ("Period6", SCALAR, "sec", "Period6", 30.0, 0.0, 0, NO),

+    'Period7' : ("Period7", SCALAR, "sec", "Period7", 30.0, 0.0, 0, NO),

+    'Period8' : ("Period8", SCALAR, "sec", "Period8", 30.0, 0.0, 0, NO),

+    'Period9' : ("Period9", SCALAR, "sec", "Period9", 30.0, 0.0, 0, NO),

+    'RipProb' : ("RipProb", SCALAR, "%", "Rip Current Probability", 100.0, 0.0, 0, NO),

+    'ErosionProb' : ("ErosionProb", SCALAR, "%", "Dune Erosion Probability", 100.0, 0.0, 0, NO),

+    'OverwashProb' : ("OverwashProb", SCALAR, "%", "Dune Overwash Probability", 100.0, 0.0, 0, NO)

+}

+if SID in groups['GreatLake_SITES']:

+    #  Redefine the WaveHeight field to include a decimal point

+    optionalParmsDict['marine'].update({'WaveHeight' :

+                 ("WaveHeight", SCALAR, "ft", "Wave Height", 40.0, 0.0, 1, NO)})

+

+# Parameter set for Probability of weather type, Optional for sites.

+optionalParmsDict['powt']={

+     'PoTBD': ('PotBlowingDust', SCALAR, '%', 'Prob of Blowing Dust', 100.0, 0.0, 0, NO),

+     'PoTBN': ('PotBlowingSand', SCALAR, '%', 'Prob of Blowing Sand', 100.0, 0.0, 0, NO),

+     'PoTBS': ('PotBlowingSnow', SCALAR, '%', 'Prob of Blowing Snow', 100.0, 0.0, 0, NO),

+     'PoTF': ('PotFog', SCALAR, '%', 'Prob of Fog', 100.0, 0.0, 0, NO),

+     'PoTFR': ('PotFrost', SCALAR, '%', 'Prob of Frost', 100.0, 0.0, 0, NO),

+     'PoTFl': ('PotFlurries', SCALAR, '%', 'Prob of Flurries', 100.0, 0.0, 0, NO),

+     'PoTH': ('PotHaze', SCALAR, '%', 'Prob of Haze', 100.0, 0.0, 0, NO),

+     'PoTIC': ('PotIceCrystals', SCALAR, '%', 'Prob of Ice Crystals', 100.0, 0.0, 0, NO),

+     'PoTIF': ('PotIceFog', SCALAR, '%', 'Prob of Ice Fog', 100.0, 0.0, 0, NO),

+     'PoTIP': ('PotSleet', SCALAR, '%', 'Prob of Sleet', 100.0, 0.0, 0, NO),

+     'PoTK': ('PotSmoke', SCALAR, '%', 'Prob of Smoke', 100.0, 0.0, 0, NO),

+     'PoTL': ('PotDrizzle', SCALAR, '%', 'Prob of Drizzle', 100.0, 0.0, 0, NO),

+     'PoTR': ('PotRain', SCALAR, '%', 'Prob of Rain', 100.0, 0.0, 0, NO),

+     'PoTRW': ('PotRainShowers', SCALAR, '%', 'Prob of Rain Showers', 100.0, 0.0, 0, NO),

+     'PoTS': ('PotSnow', SCALAR, '%', 'Prob of Snow', 100.0, 0.0, 0, NO),

+     'PoTSW': ('PotSnowShowers', SCALAR, '%', 'Prob of Snow Showers', 100.0, 0.0, 0, NO),

+     'PoTSp': ('PotSprinkles', SCALAR, '%', 'Prob of Sprinkles', 100.0, 0.0, 0, NO),

+     'PoTSvr': ('PotSevere', SCALAR, '%', 'Prob of Severe Storms', 100.0, 0.0, 0, NO),

+     'PoTT': ('PotThunder', SCALAR, '%', 'Prob of Thunder', 100.0, 0.0, 0, NO),

+     'PoTVA': ('PotVolcanicAsh', SCALAR, '%', 'Prob of Volcanic Ash', 100.0, 0.0, 0, NO),

+     'PoTWP': ('PotWaterspout', SCALAR, '%', 'Prob of Waterspout', 100.0, 0.0, 0, NO),

+     'PoTZF': ('PotFreezingFog', SCALAR, '%', 'Prob of Freezing Fog', 100.0, 0.0, 0, NO),

+     'PoTZL': ('PotFreezingDrizzle', SCALAR, '%', 'Prob of Freezing Drizzle', 100.0, 0.0, 0, NO),

+     'PoTZR': ('PotFreezingRain', SCALAR, '%', 'Prob of Freezing Rain', 100.0, 0.0, 0, NO),

+     'PoTZY': ('PotFreezingSpray', SCALAR, '%', 'Prob of Freezing Spray', 100.0, 0.0, 0, NO),

+     'PoTHZY': ('PotHeavyFreezingSpray', SCALAR, '%', 'Prob of Heavy Freezing Spray', 100.0, 0.0, 0, NO),

+     'RoadTemp' : ("RoadTemp", SCALAR, "F", "Road Temperature", 120.0, -50.0, 0, NO),

+     'MaxTwAloft' : ("MaxTwAloft", SCALAR, 'C', 'Max Wet-Bulb Temp in Warm Nose', 40.0, -20.0, 1, NO),

+     'ProbIcePresent': ("ProbIcePresent", SCALAR, "%", "Prob of Ice Present", 100.0, 0.0, 0, NO),

+     'ProbRefreezeSleet': ("ProbRefreezeSleet", SCALAR, "%", "Prob of Refreeze into Sleet", 100.0, 0.0, 0, NO),

+     'SleetAmt': ("SleetAmt", SCALAR, "in", "Sleet Accumulation", 5.0, 0.0, 1, YES),

+     'IceFlatAcc': ('IceFlatAccum', SCALAR, 'in', 'Flat Ice Accumulation', maxIceVal, 0.0, 2, YES),

+     'IceLineAcc': ('IceLineAccum', SCALAR, 'in', 'Line Ice Accumulation', maxIceVal, 0.0, 2, YES),

+}

+

+# Parameter set for Winter Weather probabilities, Optional for sites.

+#****** Winter 2017 changes

+optionalParmsDict['winterProbs']={

+    # Storm Total Snow related

+    'StormTotalSnowWPC' : ("StormTotalSnowWPC", SCALAR, "in","WPC Storm Total Snow", 50.0, 0.0, 1, NO),

+

+    # Snow Percentiles

+    'SnowAmt5Prcntl' : ("SnowAmt5Prcntl", SCALAR, "in","5 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt10Prcntl' : ("SnowAmt10Prcntl", SCALAR, "in","10 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt25Prcntl' : ("SnowAmt25Prcntl", SCALAR, "in","25 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt50Prcntl' : ("SnowAmt50Prcntl", SCALAR, "in","50 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt75Prcntl' : ("SnowAmt75Prcntl", SCALAR, "in","75 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt90Prcntl' : ("SnowAmt90Prcntl", SCALAR, "in","90 percentile", 100.0, -40.0, 1, NO),

+    'SnowAmt95Prcntl' : ("SnowAmt95Prcntl", SCALAR, "in","95 percentile", 100.0, -40.0, 1, NO),

+

+    # Snow Exceedance Probabilities (Add others as needed)

+    'ProbSnowGET' : ("ProbSnowGET", SCALAR, "%", "Prob. snow >= trace", 100.0, 0.0, 0, NO),

+    'ProbSnowGE1' : ("ProbSnowGE1", SCALAR, "%", "Prob. snow >= 1 inch", 100.0, 0.0, 0, NO),

+    'ProbSnowGE2' : ("ProbSnowGE2", SCALAR, "%", "Prob. snow >= 2 inches", 100.0, 0.0, 0, NO),

+    'ProbSnowGE4' : ("ProbSnowGE4", SCALAR, "%", "Prob. snow >= 4 inches", 100.0, 0.0, 0, NO),

+    'ProbSnowGE6' : ("ProbSnowGE6", SCALAR, "%", "Prob. snow >= 6 inches", 100.0, 0.0, 0, NO),

+    'ProbSnowGE8' : ("ProbSnowGE8", SCALAR, "%", "Prob. snow >= 8 inches", 100.0, 0.0, 0, NO),

+    'ProbSnowGE12' : ("ProbSnowGE12", SCALAR, "%", "Prob. snow >= 12 inches", 100.0, 0.0, 0, NO),

+    'ProbSnowGE18' : ("ProbSnowGE18", SCALAR, "%", "Prob. snow >= 18 inches", 100.0, 0.0, 0, NO),

+

+    # Freezing Rain Percentiles

+    'IceAccum5Prcntl' : ("IceAccum5Prcntl", SCALAR, "in","5 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum10Prcntl' : ("IceAccum10Prcntl", SCALAR, "in","10 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum25Prcntl' : ("IceAccum25Prcntl", SCALAR, "in","25 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum50Prcntl' : ("IceAccum50Prcntl", SCALAR, "in","50 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum75Prcntl' : ("IceAccum75Prcntl", SCALAR, "in","75 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum90Prcntl' : ("IceAccum90Prcntl", SCALAR, "in","90 percentile", 5.0, -4.0, 2, NO),

+    'IceAccum95Prcntl' : ("IceAccum95Prcntl", SCALAR, "in","95 percentile", 5.0, -4.0, 2, NO),

+

+    # Freezing rain accretion probabilities

+    'ProbIceGE001' : ("ProbIceGE001", SCALAR, "%", "Prob. ice >= 0.01", 100.0, 0.0, 0, NO),

+    'ProbIceGE010' : ("ProbIceGE010", SCALAR, "%", "Prob. ice >= 0.10", 100.0, 0.0, 0, NO),

+    'ProbIceGE025' : ("ProbIceGE025", SCALAR, "%", "Prob. ice >= 0.25", 100.0, 0.0, 0, NO),

+    'ProbIceGE050' : ("ProbIceGE050", SCALAR, "%", "Prob. ice >= 0.50", 100.0, 0.0, 0, NO),

+

+# Persist WPC snow prob grids

+    'SnowAmt5PrcntlWPC' : ("SnowAmt5PrcntlWPC", SCALAR, "in","WPC 5th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt10PrcntlWPC' : ("SnowAmt10PrcntlWPC", SCALAR, "in","WPC 10th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt25PrcntlWPC' : ("SnowAmt25PrcntlWPC", SCALAR, "in","WPC 25th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt50PrcntlWPC' : ("SnowAmt50PrcntlWPC", SCALAR, "in","WPC 50th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt75PrcntlWPC' : ("SnowAmt75PrcntlWPC", SCALAR, "in","WPC 75th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt90PrcntlWPC' : ("SnowAmt90PrcntlWPC", SCALAR, "in","WPC 90th percentile snow amount", 100.0, -40.0, 1, NO),

+    'SnowAmt95PrcntlWPC' : ("SnowAmt95PrcntlWPC", SCALAR, "in","WPC 95th percentile snow amount", 100.0, -40.0, 1, NO),

+    'ProbSnowGETWPC' : ("ProbSnowGETWPC", SCALAR, "%", "WPC Prob. snow >= trace", 100.0, 0.0, 0, NO),

+    'ProbSnowGE1WPC' : ("ProbSnowGE1WPC", SCALAR, "%", "WPC Prob. snow >= 1 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE2WPC' : ("ProbSnowGE2WPC", SCALAR, "%", "WPC Prob. snow >= 2 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE4WPC' : ("ProbSnowGE4WPC", SCALAR, "%", "WPC Prob. snow >= 4 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE6WPC' : ("ProbSnowGE6WPC", SCALAR, "%", "WPC Prob. snow >= 6 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE8WPC' : ("ProbSnowGE8WPC", SCALAR, "%", "WPC Prob. snow >= 8 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE12WPC' : ("ProbSnowGE12WPC", SCALAR, "%", "WPC Prob. snow >= 12 in", 100.0, 0.0, 0, NO),

+    'ProbSnowGE18WPC' : ("ProbSnowGE18WPC", SCALAR, "%", "WPC Prob. snow >= 18 in", 100.0, 0.0, 0, NO),

+}

+

+# Add rainfall probability definitions

+optionalParmsDict['rainfallProb']={

+    # Rain Percentiles

+    'QPF5Prcntl' : ("QPF5Prcntl", SCALAR, "in","5 percentile", 36.0, -24.0, 2, NO),

+    'QPF10Prcntl' : ("QPF10Prcntl", SCALAR, "in","10 percentile", 36.0, -24.0, 2, NO),

+    'QPF25Prcntl' : ("QPF25Prcntl", SCALAR, "in","25 percentile", 36.0, -24.0, 2, NO),

+    'QPF50Prcntl' : ("QPF50Prcntl", SCALAR, "in","50 percentile", 36.0, -24.0, 2, NO),

+    'QPF75Prcntl' : ("QPF75Prcntl", SCALAR, "in","75 percentile", 36.0, -24.0, 2, NO),

+    'QPF90Prcntl' : ("QPF90Prcntl", SCALAR, "in","90 percentile", 36.0, -24.0, 2, NO),

+    'QPF95Prcntl' : ("QPF95Prcntl", SCALAR, "in","95 percentile", 36.0, -24.0, 2, NO),

+

+    # Rain Exceedance Probabilities (Add others as needed)

+    'ProbRainGE001' : ("ProbRainGE001", SCALAR, "%", "Prob. Rain >= 0.01 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE010' : ("ProbRainGE010", SCALAR, "%", "Prob. Rain >= 0.10 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE025' : ("ProbRainGE025", SCALAR, "%", "Prob. Rain >= 0.25 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE050' : ("ProbRainGE050", SCALAR, "%", "Prob. Rain >= 0.50 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE075' : ("ProbRainGE075", SCALAR, "%", "Prob. Rain >= 0.75 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE100' : ("ProbRainGE100", SCALAR, "%", "Prob. Rain >= 1.00 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE150' : ("ProbRainGE150", SCALAR, "%", "Prob. Rain >= 1.50 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE200' : ("ProbRainGE200", SCALAR, "%", "Prob. Rain >= 2.00 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE250' : ("ProbRainGE250", SCALAR, "%", "Prob. Rain >= 2.50 in", 100.0, 0.0, 0, NO),

+    'ProbRainGE300' : ("ProbRainGE300", SCALAR, "%", "Prob. Rain >= 3.00 in", 100.0, 0.0, 0, NO),

+}

+

+

+# Make all optional parms available as variables.

+for optionalParmKey in optionalParmsDict:

+    for pname,parm in optionalParmsDict[optionalParmKey].items():

+        setattr(sys.modules[__name__],pname,parm)

+

+#-----------------------------------

+# DO NOT CHANGE THE FOLLOWING SECTION

+#------------------------------------

+if not BASELINE and siteImport('localWxConfig'):

+    types = localWxConfig.types

+

+

+#---------------------------------------------------------------------------

+#

+#  Projection Configuration section.

+#

+#---------------------------------------------------------------------------

+from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData

+ProjectionType = ProjectionData.ProjectionType

+NONE = ProjectionType.NONE

+LAMBERT_CONFORMAL = ProjectionType.LAMBERT_CONFORMAL

+MERCATOR = ProjectionType.MERCATOR

+POLAR_STEREOGRAPHIC = ProjectionType.POLAR_STEREOGRAPHIC

+LATLON = ProjectionType.LATLON

+

+# projectionID / projectionType / latLonLL / latLonUR /

+# latLonOrigin / stdParallelOne / stdParallelTwo / gridPointLL / gridPointUR

+# latIntersect / lonCenter / lonOrigin

+

+Grid201 = ('Grid201',POLAR_STEREOGRAPHIC,

+      (-150.00, -20.826), (-20.90846, 30.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 65), 0.0, 0.0, -105.0)

+

+Grid202 = ('Grid202', POLAR_STEREOGRAPHIC,

+      (-141.028, 7.838), (-18.576, 35.617),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (65, 43), 0.0, 0.0, -105.0)

+

+Grid203 = ('Grid203', POLAR_STEREOGRAPHIC,

+      (-185.837, 19.132), (-53.660, 57.634),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -150.0)

+

+Grid204 = ('Grid204', MERCATOR,

+      (-250.0, -25.0), (-109.129, 60.644),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (93, 68), 0.0, -179.564, 0.0)

+

+Grid205 = ('Grid205', POLAR_STEREOGRAPHIC,

+      (-84.904, 0.616), (-15.000, 45.620),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (45, 39), 0.0, 0.0, -60.0)

+

+Grid206 = ('Grid206', LAMBERT_CONFORMAL,

+      (-117.991, 22.289), (-73.182, 51.072),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (51, 41), 0.0, 0.0, 0.0)

+

+Grid207 = ('Grid207', POLAR_STEREOGRAPHIC,

+      (-175.641, 42.085), (-93.689, 63.976),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (49, 35), 0.0, 0.0, -150.0)

+

+Grid208 = ('Grid208', MERCATOR,

+      (-166.219, 10.656), (-147.844, 27.917),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -157.082, 0.0)

+

+Grid209 = ('Grid209', LAMBERT_CONFORMAL,

+      (-117.991, 22.289), (-73.182, 51.072),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (101, 81), 0.0, 0.0, 0.0)

+

+Grid210 = ('Grid210', MERCATOR,

+      (-77.000, 9.000), (-58.625, 26.422),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (25, 25), 0.0, -67.812, 0.0)

+

+Grid211 = ('Grid211', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (93, 65), 0.0, 0.0, 0.0)

+

+Grid212 = ('Grid212', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (185, 129), 0.0, 0.0, 0.0)

+

+Grid213 = ('Grid213', POLAR_STEREOGRAPHIC,

+      (-141.028, 7.838), (-18.577, 35.617),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (129, 85), 0.0, 0.0, -105.0)

+

+Grid214 = ('Grid214', POLAR_STEREOGRAPHIC,

+      (-175.641, 42.085), (-93.689, 63.975),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (97, 69), 0.0, 0.0, -150.0)

+

+# (new alaska grid)

+Grid214AK = ('Grid214AK', POLAR_STEREOGRAPHIC,

+             (-178.571, 40.5301), (-93.689, 63.975),

+             (0.0, 0.0), 0.0, 0.0, (1,1), (104, 70), 0.0, 0.0, -150.0)

+

+Grid215 = ('Grid215', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (369, 257), 0.0, 0.0, 0.0)

+

+Grid216 = ('Grid216', POLAR_STEREOGRAPHIC,

+      (-173.000, 30.000), (-62.850, 70.111),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (139, 107), 0.0, 0.0, -135.0)

+

+Grid217 = ('Grid217', POLAR_STEREOGRAPHIC,

+      (-173.000, 30.000), (-62.850, 70.111),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (277, 213), 0.0, 0.0, -135.0)

+

+Grid218 = ('Grid218', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (614, 428), 0.0, 0.0, 0.0)

+

+Grid219 = ('Grid219', POLAR_STEREOGRAPHIC,

+      (-119.559, 25.008), (60.339, 24.028),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (385, 465), 0.0, 0.0, -80.0)

+

+Grid221 = ('Grid221', LAMBERT_CONFORMAL,

+      (-145.500, 1.000), (-2.566, 46.352),

+      (-107.0, 50.0), 50.0, 50.0, (1, 1), (349, 277), 0.0, 0.0, 0.0)

+

+Grid222 = ('Grid222', LAMBERT_CONFORMAL,

+      (-145.500, 1.000), (-2.566, 46.352),

+      (-107.0, 50.0), 50.0, 50.0, (1, 1), (59, 47), 0.0, 0.0, 0.0)

+

+Grid225 = ('Grid225', MERCATOR,

+      (-250.0, -25.0), (-109.129, 60.644),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (185, 135), 0.0, -179.564, 0.0)

+

+Grid226 = ('Grid226', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (737, 513), 0.0, 0.0, 0.0)

+

+Grid227 = ('Grid227', LAMBERT_CONFORMAL,

+      (-133.459, 12.190), (-49.385, 57.290),

+      (-95.0, 25.0), 25.0, 25.0, (1, 1), (1473, 1025), 0.0, 0.0, 0.0)

+

+Grid228 = ('Grid228', LATLON,

+      (0.0, 90.0), (359.0, -90.0), (0.0, 0.0), 0.0, 0.0,

+      (1, 1), (144, 73), 0.0, 0.0, 0.0)

+

+Grid229 = ('Grid229', LATLON,

+      (0.0, 90.0), (359.0, -90.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 181), 0.0, 0.0, 0.0)

+

+Grid230 = ('Grid230', LATLON,

+      (0.0, 90.0), (359.5, -90.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 361), 0.0, 0.0, 0.0)

+

+Grid231 = ('Grid231', LATLON,

+      (0.0, 0.0), (359.5, 90.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 181), 0.0, 0.0, 0.0)

+

+Grid232 = ('Grid232', LATLON,

+      (0.0, 0.0), (359.0, 90.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (360, 91), 0.0, 0.0, 0.0)

+

+Grid233 = ('Grid233', LATLON,

+      (0.0, -78.0), (358.750, 78.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (288, 157), 0.0, 0.0, 0.0)

+

+Grid234 = ('Grid234', LATLON,

+      (-98.000, 15.0), (-65.000, -45.0),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (133, 121), 0.0, 0.0, 0.0)

+

+Grid235 = ('Grid235', LATLON,

+      (0.250, 89.750), (359.750, -89.750),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (720, 360), 0.0, 0.0, 0.0)

+

+HRAP = ('HRAP', POLAR_STEREOGRAPHIC,

+      (-119.036, 23.097), (-75.945396, 53.480095),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (801, 881), 0.0, 0.0, -105.0)

+

+NDFD_Oceanic_10K = ('NDFD_Oceanic_10km', MERCATOR,

+      (-230.094, -30.4192), (10.71, 80.01),

+      (0.0, 0.0), 0.0, 0.0, (1, 1), (2517, 1793), 0.0, -109.962, 0.0)

+

+#  Add a new domain for NHC purposes

+GridForNHA = ('GridForNHA', LAMBERT_CONFORMAL,

+      (-103.929, 20.164), (-50.8894, 42.9545),

+      (-95.0, 35.0), 35.0, 35.0, (1, 1), (1833,1241), 0.0, 0.0, 0.0)

+

+# list of all projections

+allProjections = [Grid201, Grid202, Grid203, Grid204, Grid205, Grid206,

+ Grid207, Grid208, Grid209, Grid210, Grid211, Grid212, Grid213, Grid214,

+ Grid214AK, Grid215, Grid216, Grid217, Grid218, Grid219, Grid221, Grid222,

+ Grid225, Grid226, Grid227, Grid228, Grid229, Grid230, Grid231, Grid232,

+ Grid233, Grid234, Grid235, HRAP, NDFD_Oceanic_10K, GridForNHA]

+

+#---------------------------------------------------------------------------

+#

+#  Grid Domain configuration section

+#

+#---------------------------------------------------------------------------

+#

+# xdim/ydim:  Defines the dimensions of the grids. (GFE grid size)

+#

+# origin:  Defines the lower-left corner of the grid (point 0,0) in

+#   world coordinates.

+#

+# extent:  Defines the "size" of the grid in world coordinates.  The upper

+#   right corner is the origin+extent.

+#

+# TimeZone: Defines the timezone used by this site in standard TZ format.

+# Refer to /usr/share/zoneinfo/zone.tab for the correct settings.

+#

+# Projection:  Defines the projection identifier to be used for this domain.

+

+# Note that all parameters for an existing database must use the same

+# projection, though not necessarily the same grid size and location.

+

+# These values are set up for AWIPS.  There is a script at the end

+# of this section that adjusts the resolution for the RPP sites.

+

+#         [xdim, ydim] / (origin) /( extent)  / TimeZone / Projection / OfficeType

+

+SITES = {

+#WFOs

+    # Experimental combined AFC site

+    'AFC' : ([1057, 449], (1.0, 19.00),  (66.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"),

+    'ABQ' : ([145, 145], (36.00, 22.00), (9.0, 9.0), 'MST7MDT', Grid211,"wfo"),

+    'ABR' : ([145, 145], (45.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211,"wfo"),

+    'AER' : ([369, 337], (44.00, 23.00), (23.0, 21.0), 'America/Anchorage', Grid214AK, "wfo"),

+    'AFG' : ([641, 497], (27.0, 38.0),   (40.0, 31.0), 'America/Anchorage', Grid214AK, "wfo"),

+    'AJK' : ([337, 241], (62.0, 23.0),   (21.0, 15.0), 'America/Juneau', Grid214AK, "wfo"),

+    'AKQ' : ([145, 145], (68.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'ALU' : ([865, 449], (1.0, 19.0),    (54.0, 28.0), 'America/Anchorage', Grid214AK, "wfo"),

+    'ALY' : ([145, 145], (70.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'AMA' : ([145, 145], (41.00, 21.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'APX' : ([145, 145], (58.00, 34.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'ARX' : ([145, 145], (52.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'BGM' : ([145, 145], (68.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'BIS' : ([145, 145], (43.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'BMX' : ([145, 145], (58.00, 19.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'BOI' : ([177, 177], (25.00, 34.00), (11.0, 11.0), 'MST7MDT', Grid211, "wfo"),

+    'BOU' : ([145, 145], (38.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'BOX' : ([187, 154], (75.375,34.59375), (5.8125,4.78125), "EST5EDT", Grid211, "wfo"),

+    'BRO' : ([145, 145], (44.00, 10.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'BTV' : ([193, 157], (72.00, 37.15), (6.0, 4.875), 'EST5EDT', Grid211, "wfo"),

+    'BUF' : ([145, 145], (66.00, 32.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'BYZ' : ([145, 145], (36.00, 37.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'CAE' : ([145, 145], (65.00, 20.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'CAR' : ([145, 145], (75.00, 39.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'CHS' : ([145, 145], (65.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'CLE' : ([145, 145], (62.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'CRP' : ([145, 145], (45.00, 11.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'CTP' : ([145, 145], (67.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'CYS' : ([145, 145], (37.00, 31.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'DDC' : ([145, 145], (43.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'DLH' : ([145, 145], (50.00, 37.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'DMX' : ([145, 145], (49.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'DTX' : ([161, 161], (57.00, 34.00), (10.0, 10.0), 'EST5EDT', Grid211, "wfo"),

+    'DVN' : ([145, 145], (52.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'EAX' : ([145, 145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'EKA' : ([145, 145], (20.00, 31.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'EPZ' : ([145, 145], (36.00, 16.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'EWX' : ([145, 145], (44.00, 12.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'FFC' : ([145, 145], (61.00, 18.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'FGF' : ([145, 145], (45.00, 39.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'FGZ' : ([145, 145], (29.00, 23.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),

+    'FSD' : ([177, 177], (43.00, 32.00), (11.0, 11.0), 'CST6CDT', Grid211, "wfo"),

+    'FWD' : ([145, 145], (45.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'GGW' : ([145, 145], (36.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'GID' : ([145, 145], (44.00, 28.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'GJT' : ([145, 145], (34.00, 27.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'GLD' : ([145, 145], (41.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'GRB' : ([145, 145], (54.00, 35.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'GRR' : ([145, 145], (58.00, 33.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'GSP' : ([145, 145], (63.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'GUM' : ([193, 193], (23.0, 26.0), (3.0, 3.0), 'Pacific/Guam', Grid204, "wfo"),

+    'GYX' : ([193,209],  (76.00, 37.375), (6.0, 6.5), 'EST5EDT', Grid211, "wfo"),

+    'HFO' : ([321, 225], (58.78125,29.875),(5.0,3.5), 'Pacific/Honolulu', Grid204, 'wfo'),

+    'HGX' : ([145, 145], (48.00, 13.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'HNX' : ([145, 145], (22.00, 24.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'HUN' : ([161, 161], (60.0, 22.0),   (5.0, 5.0), 'CST6CDT', Grid211, "wfo"),

+    'ICT' : ([145, 145], (45.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'ILM' : ([145, 145], (67.00, 21.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'ILN' : ([145, 145], (60.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'ILX' : ([145, 145], (55.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'IND' : ([145, 145], (58.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'IWX' : ([145, 145], (58.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'JAN' : ([145, 145], (54.00, 18.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'JAX' : ([145, 145], (64.00, 14.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'JKL' : ([145, 145], (61.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'KEY' : ([145, 145], (66.00, 8.00),  (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'LBF' : ([145, 145], (43.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LCH' : ([145, 145], (52.00, 15.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LIX' : ([145, 145], (54.00, 14.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LKN' : ([145, 145], (25.00, 30.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'LMK' : ([145, 145], (59.00, 25.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'LOT' : ([145, 145], (55.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LOX' : ([145, 145], (21.00, 23.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'LSX' : ([145, 145], (52.00, 25.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LUB' : ([145, 145], (39.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'LWX' : ([145, 145], (67.00, 27.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'LZK' : ([145, 145], (51.00, 20.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'MAF' : ([205,247],  (40.375, 16.8125), (6.375, 7.6875), 'CST6CDT', Grid211, "wfo"),

+    'MEG' : ([145, 145], (54.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'MFL' : ([145, 145], (66.00, 9.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'MFR' : ([145, 145], (20.00, 34.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'MHX' : ([145, 145], (68.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'MKX' : ([145, 145], (55.00, 33.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'MLB' : ([145, 145], (66.00, 12.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'MOB' : ([145, 145], (57.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'MPX' : ([145, 145], (50.00, 34.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'MQT' : ([145, 145], (56.00, 36.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'MRX' : ([145, 145], (61.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'MSO' : ([145, 145], (29.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'MTR' : ([145, 145], (20.00, 26.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'OAX' : ([145, 145], (45.00, 30.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'OHX' : ([145, 145], (58.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'OKX' : ([145, 145], (71.00, 30.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'OTX' : ([145, 145], (25.00, 40.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'OUN' : ([145, 145], (44.00, 21.00), (9.0, 9.0), 'CST6CDT',  Grid211, "wfo"),

+    'PAH' : ([145, 145], (56.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'PBZ' : ([145, 145], (65.00, 29.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'PDT' : ([145, 145], (23.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'PHI' : ([145, 145], (70.00, 28.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'PIH' : ([145, 145], (30.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'PQR' : ([145, 145], (19.00, 38.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'PSR' : ([145, 145], (28.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),

+    'PUB' : ([145, 145], (38.00, 26.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'RAH' : ([145, 145], (66.00, 22.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'REV' : ([145, 145], (23.00, 29.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'RIW' : ([145, 145], (35.00, 33.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'RLX' : ([145, 145], (63.00, 26.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'RNK' : ([161, 161], (67.0,  26.00), (5.0, 5.0), 'EST5EDT', Grid211, 'wfo'),

+    'SEW' : ([145, 145], (21.00, 42.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'SGF' : ([145, 145], (51.00, 24.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'SGX' : ([145, 145], (24.00, 21.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'SHV' : ([145, 145], (50.00, 17.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'SJT' : ([145, 145], (43.00, 16.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'SJU' : ([32, 28], (10.0, 10.0), (8.0, 7.0), 'America/Puerto_Rico',Grid210, "wfo"),

+    'SLC' : ([161, 161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "wfo"),

+    'STO' : ([145, 145], (20.00, 28.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+    'TAE' : ([145, 145], (60.00, 15.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'TBW' : ([145, 145], (64.00, 11.00), (9.0, 9.0), 'EST5EDT', Grid211, "wfo"),

+    'TFX' : ([145, 145], (32.00, 39.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'TOP' : ([145, 145], (47.00, 26.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'TSA' : ([145, 145], (48.00, 22.00), (9.0, 9.0), 'CST6CDT', Grid211, "wfo"),

+    'TWC' : ([145, 145], (29.00, 20.00), (9.0, 9.0), 'US/Arizona', Grid211, "wfo"),

+    'UNR' : ([145, 145], (40.00, 34.00), (9.0, 9.0), 'MST7MDT', Grid211, "wfo"),

+    'VEF' : ([145, 145], (26.00, 25.00), (9.0, 9.0), 'PST8PDT', Grid211, "wfo"),

+#RFCs

+    'ACR' : ([565, 415], (26.0, 19.0), (60.0, 44.0), 'America/Anchorage', Grid214AK, "rfc"),

+    'ALR' : ([299, 278], (59.0, 11.0), (17.0, 19.0), 'CST6CDT', Grid211, "rfc"),

+    'FWR' : ([362, 334], (36.0, 11.0), (20.0, 20.0), 'CST6CDT', Grid211, "rfc"),

+    'KRF' : ([408, 356], (33.0, 27.0), (26.0, 22.0), 'CST6CDT', Grid211, "rfc"),

+    'MSR' : ([381, 304], (43.0, 28.0), (24.0, 20.0), 'CST6CDT', Grid211, "rfc"),

+    'ORN' : ([303, 216], (51.0, 16.0), (18.0, 14.0), 'CST6CDT', Grid211, "rfc"),

+    'PTR' : ([218, 308], (21.0, 35.0), (17.0, 19.0), 'PST8PDT', Grid211, "rfc"),

+    'RHA' : ([132, 140], (69.0, 28.0), (7.0, 10.0), 'EST5EDT', Grid211, "rfc"),

+    'RSA' : ([140, 296], (21.0, 23.0), (12.0, 17.0), 'PST8PDT', Grid211, "rfc"),

+    'STR' : ([171, 307], (29.0, 20.0), (13.0, 18.0), 'MST7MDT', Grid211, "rfc"),

+    'TAR' : ([226, 164], (69.0, 34.0), (13.0, 13.0), 'EST5EDT', Grid211, "rfc"),

+    'TIR' : ([220, 171], (59.0, 25.0), (13.0, 12.0), 'EST5EDT', Grid211, "rfc"),

+    'TUA' : ([281, 168], (39.0, 22.0), (18.0, 10.0), 'CST6CDT', Grid211, "rfc"),

+

+#Special Sites - Added Hawaiian High Seas domain

+    'US' : ([267, 159], (18.0, 9.5), (67.0, 40.0), 'EDT5EDT', Grid211, "other"),

+    'FSL' : ([161, 145], (38.50, 27.00), (10.0, 9.0), 'MST7MDT', Grid211, "other"),

+    'NH1' : ([838, 577], (887.0, 121.0), (837.0, 576.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"),

+    'NH2' : ([1188, 363], (1328.0, 365.0), (1187.0, 362.0), 'EST5EDT', NDFD_Oceanic_10K, "wfo"),

+    'ONA' : ([244, 383], (68.9375, 19.5625), (15.1875, 23.875), 'EST5EDT', Grid211, "wfo"),

+    'ONP' : ([396, 415], (8.1875, 21.5625), (24.6875, 25.875), 'PST8PDT', Grid211, "wfo"),

+    'HPA' : ([899, 671], (284.0, 30.0), (898.0, 670.0), 'Pacific/Honolulu', NDFD_Oceanic_10K, "wfo"),

+    'WNJ' : ([301, 346], (1000.0, 475.0), (300.0, 345.0), 'CST6CDT', NDFD_Oceanic_10K, "wfo"),

+

+#Aviation Domains for AAWU

+    'AAWU' : ([705, 457], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'),

+    'AVAK' : ([465, 417], (8.0, 12.0), (29.0, 26.0), 'America/Anchorage', Grid203, 'nc'),

+

+#Regional Offices

+    'VUY' : ([337,449], (62.00, 19.00), (21.0, 28.0), 'EST5EDT', Grid211, "ro"),

+    'BCQ' : ([145,145], (50.00, 27.00), (9.0, 9.0), 'CST6CDT', Grid211, "ro"),

+    'EHU' : ([657,321], (36.00, 9.50), (41.0, 20.0), 'CST6CDT', Grid211, "ro"),

+    'VHW' : ([161,161], (30.00, 28.00), (10.0, 10.0), 'MST7MDT', Grid211, "ro"),

+    'PBP' : ([321,225], (7.00, 11.00), (10.0, 7.0), 'Pacific/Honolulu', Grid208, "ro"),

+    'VRH' : ([1409, 913], (1.0, 11.0), (88.0, 57.0), 'America/Anchorage', Grid214AK, 'nc'),

+

+#National Centers

+    'HAK' : ( [825,553], ( 1.0, 1.0), (103.0, 69.0), 'EST5EDT', Grid214AK, "nc"),

+    'HUS' : ([1073,689], (19.0, 8.0), ( 67.0, 43.0), 'EST5EDT', Grid211,   "nc"),

+    'NHA' : ([1873,1361], (35.5, 3.5), (58.5, 42.5), 'EST5EDT', Grid211, "nc"),

+

+}

+

+# Get list of valid office types, for validation.

+VALID_OFFICE_TYPES = []

+# List of all values of all sites.

+for siteValues in list(SITES.values()):

+    # Office type is the 5th element of each site's values

+    officeType = siteValues[5]

+    if officeType not in VALID_OFFICE_TYPES:

+        # A new office type

+        VALID_OFFICE_TYPES.append(officeType)

+

+#---------------------------------------------------------------------------

+#

+#  Time Constraint configuration section

+#

+#---------------------------------------------------------------------------

+HOUR = 3600

+DAY  = 24 * HOUR

+

+# Start: is the number of seconds since 0000z for the first grid of the day

+# Repeat: is the number of seconds from start until the next grid starts

+# Duration: is the length of the grid in number of seconds

+

+# Examples of constraints:

+# Hourly temperatures

+#     HrTemp = (0, HOUR, HOUR)

+# QPF that is 6 hours long, aligned on 0000z, exists for every 6 hours

+#     Q = (0, HOUR*6, HOUR*6)

+#

+

+# fixed time constraints: start / repeat / duration

+TC_1M    = (0, 60, 60) # 1 minute

+TC1      = (0, HOUR, HOUR)

+TC3      = (0, 3 * HOUR, HOUR)

+TC6      = (0, 6 * HOUR, HOUR)

+TC12     = (0, 12 * HOUR, HOUR)

+TC3NG    = (0, 3 * HOUR, 3 * HOUR)

+TC6NG    = (0, 6 * HOUR, 6 * HOUR)

+TC12NG   = (0, 12 * HOUR, 12 * HOUR)

+TC24NG   = (0, 24 * HOUR, 24 * HOUR)

+TC061212 = (6 * HOUR, 12 * HOUR, 12 * HOUR)

+Persistent = (0, 0, 0)     # special time constraint

+

+

+# The following time constraints are based on local standard time.

+# Change the last parameter from 0 to 1 to force daylight savings time

+# always.

+# PWS TCs changed in OB9.3 for new 6 hour data from NHC

+MaxTTC     = localTC(7*HOUR, 24*HOUR, 13*HOUR, 0)

+MinTTC     = localTC(19*HOUR, 24*HOUR, 14*HOUR, 0)

+MaxRHTC    = localTC(15*HOUR, 24*HOUR, 18*HOUR, 0)

+MinRHTC    = localTC(3*HOUR, 24*HOUR, 18*HOUR, 0)

+LT3NG      = localTC(0*HOUR, 3*HOUR, 3*HOUR, 0)

+LT6NG      = localTC(0*HOUR, 6*HOUR, 6*HOUR, 0)

+LT12NG     = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0)

+LTMOS      = localTC(6*HOUR, 12*HOUR, 12*HOUR, 0)  #special MOS local time

+MaxTTCMOS  = localTC(6*HOUR, 24*HOUR, 12*HOUR, 0)  #special MOS maxT

+MinTTCMOS  = localTC(18*HOUR, 24*HOUR, 12*HOUR, 0)  #special MOS minT

+LT24       = localTC(0*HOUR, 24*HOUR, 24*HOUR, 0)

+FireWx1300TC = localTC(13*HOUR, 24*HOUR, 1*HOUR, 0)   #special FireWx 1pm snap

+#DR3511 DeltaMaxTTC  = localTC(7*HOUR, 24*HOUR, 16*HOUR, 0)  # just for HPCdeltaMaxT

+PWSDTC     = localTC(11*HOUR, 24*HOUR, 12*HOUR, 0)

+PWSNTC     = localTC(23*HOUR, 24*HOUR, 12*HOUR, 0)

+# Alaska OCONUS

+if SID in siteRegion['AR']:

+    MaxTTC     = localTC(5*HOUR, 24*HOUR, 15*HOUR, 0)

+    MinTTC     = localTC(17*HOUR, 24*HOUR, 18*HOUR, 0)

+

+# From NwsInitsConfig

+LT24APT  = localTC(7*HOUR, 24*HOUR, 24*HOUR, 0)

+FireWxAvgTC = localTC( 12*HOUR,  24*HOUR,  6*HOUR, 0)

+LT4HH = localTC(11*HOUR, 24*HOUR, 4*HOUR, 0)

+SPC24 = (12*HOUR, 24*HOUR, 24*HOUR)

+# For WR

+TC0624NG=(6*HOUR,24*HOUR,24*HOUR)

+TC12NG6=(6*HOUR,12*HOUR,12*HOUR)

+# HIL Time Constraint

+HILTC=(6*HOUR,24*HOUR,24*HOUR)

+

+#---------------------------------------------------------------------------

+#

+#  Database/(Model) Attribute Configuration

+#

+#---------------------------------------------------------------------------

+#

+# name:  The model name of the database

+#

+# format:  Either 'GRID' or 'DFM'

+#

+# type:  Optional type of the database

+#

+# single:  YES or NO. YES if this database always exists and is not

+#   based on model-times.  NO if this database is created/destroyed and

+#   is based on model-runs.  When created, the names of these databases have

+#   time stamps.

+#

+# official:  YES or NO.  YES if this is an official database from which

+#   products can be generated.  NO if this is a conventional database.

+#

+# numVer:  Number of versions of this database to retain.

+#

+# purgeAge: Number of hours in the past before grids will be automatically

+#   purged from the database.  If 0, then purging is disabled.

+#

+

+YES = 1

+NO = 0

+GRID = 'GRID'

+# name /  format / type / single / official / numVer / purgeAge

+

+Fcst        = ('Fcst',         GRID,   '', YES, NO,  1, 24)

+Practice    = ('Fcst',         GRID,   'Prac', YES, NO,  1, 24)

+TestFcst    = ('Fcst',         GRID,   'Test', YES, NO,  1, 24)

+Restore     = ('Restore',      GRID,   '', YES, NO,  1, 24)

+Test        = ('Test',         GRID,   'test', NO, NO,  1, 0)

+Official    = ('Official',     GRID,   '', YES, YES, 1, 24)

+ISC         = ('ISC',          GRID,   '', YES, NO,  1, 12)

+

+

+#---------------------------------------------------------------------------

+#

+#  Search path for netCDF data files.

+#  NOTE: This feature was implemented only backward compatibility with existing A1 datasets.

+#        New datasets should be generated in a from that can be ingested by A2

+#        It shoudl only be used for static datasets.

+#        New files will not be recognized without a server restart.

+#

+#---------------------------------------------------------------------------

+# Alaska OCONUS

+if SID in groups['ALASKA_SITES']:

+    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISMAK'),

+                  ('/awips2/edex/data/gfe/climo/PRISMAK800'),

+                  ]

+

+# Hawaii OCONUS

+elif SID == "HFO":

+    NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),

+                  ('/awips2/edex/data/gfe/topo/StdTerrain/Hawaii', 'StdTerrain'),

+                  ]

+

+# San Juan OCONUS

+elif SID == "SJU":

+    NETCDFDIRS = [('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),

+                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),

+                  ('/awips2/edex/data/gfe/topo/StdTerrain/PuertoRico', 'StdTerrain')

+                  ]

+

+# Guam OCONUS

+elif SID == "GUM":

+    NETCDFDIRS = []

+

+#CONUS sites

+elif SID in groups['CONUS_EAST_SITES']:

+    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'),

+                  ('/awips2/edex/data/gfe/climo/NCDC'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),

+                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),

+                  ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'),

+                  ]

+

+else:   #######DCS3501 WEST_CONUS

+    NETCDFDIRS = [('/awips2/edex/data/gfe/climo/PRISM'),

+                  ('/awips2/edex/data/gfe/climo/NCDC'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPO','CRMTopo'),

+                  ('/awips2/edex/data/gfe/topo/NED3ARCSTOPONEW','NED'),

+                  ('/awips2/edex/data/gfe/topo/VDATUMS','VDATUMS'),

+                  ('/awips2/edex/data/gfe/topo/StdTerrain/CONUS', 'StdTerrain'),

+                  ]

+

+#---------------------------------------------------------------------------

+#

+# Where to find (and what to call) satellite data.

+#

+#---------------------------------------------------------------------------

+#

+

+# This table contains product ID and weather element names for satellite data

+#

+# A product ID consists of the sector ID and physical element of the

+# satellite product.

+#

+# Examples:

+#

+#   "East CONUS/Imager Visible"

+#   "East CONUS/Imager 11 micron IR"

+#   "East CONUS/Imager 13 micron (IR)"

+#   "East CONUS/Imager 3.9 micron IR"

+#

+

+# Alaska OCONUS

+if SID in groups['ALASKA_SITES']:

+    SATDATA = []

+

+# Hawaii OCONUS

+elif SID == "HFO":

+    SATDATA = []

+

+# San Juan OCONUS

+elif SID == "SJU":

+    SATDATA = [("East CONUS/Imager Visible", "visibleEast"),

+               ("East CONUS/Imager 11 micron IR", "ir11East"),

+               ("East CONUS/Imager 13 micron (IR)", "ir13East"),

+               ("East CONUS/Imager 3.9 micron IR", "ir39East"),

+               ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]

+

+# Guam OCONUS

+elif SID == "GUM":

+    SATDATA = []

+

+#CONUS sites

+else:

+    SATDATA = [("West CONUS/Imager Visible", "visibleWest"),

+               ("West CONUS/Imager 11 micron IR", "ir11West"),

+               ("West CONUS/Imager 13 micron (IR)", "ir13West"),

+               ("West CONUS/Imager 3.9 micron IR", "ir39West"),

+               ("West CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporWest"),

+               ("East CONUS/Imager Visible", "visibleEast"),

+               ("East CONUS/Imager 11 micron IR", "ir11East"),

+               ("East CONUS/Imager 13 micron (IR)", "ir13East"),

+               ("East CONUS/Imager 3.9 micron IR", "ir39East"),

+               ("East CONUS/Imager 6.7-6.5 micron IR (WV)", "waterVaporEast")]

+

+#---------------------------------------------------------------------------

+#

+#  Intersite Coordination Configurations

+#

+#---------------------------------------------------------------------------

+# base urls for the ISC Routing Table

+ISC_ROUTING_TABLE_ADDRESS = {

+    "ANCF" : "http://svcbu-ancf.er.awips.noaa.gov:8080/irt",

+    "BNCF" : "http://svcbu-bncf.er.awips.noaa.gov:8080/irt"

+    }

+

+

+# list of sites that from which you want ISC data (If None, ifpServer will

+# automatically calculate the list.)  Should always include your own site.

+REQUESTED_ISC_SITES = None

+

+# Overall ISC request flag.  Must be set to 1 in order to request and receive

+# ISC data.  Must be 1 to register with the IRT.

+REQUEST_ISC = 0

+

+# Sending control flag.  Set to 1 to send isc when data is saved.

+SEND_ISC_ON_SAVE = 0

+

+# Sending control flag.  Set to 1 to send isc when data is published.

+SEND_ISC_ON_PUBLISH = 0

+

+# List of weather elements to request for ISC.  If set to None, it defaults

+# to the list of all weather elements in the Fcst database.

+REQUESTED_ISC_PARMS = None

+

+# Transmission script for sending data.  This is the script that iscExtract

+# and other routines (e.g., vtec table sharing) will call to perform the

+# actual transmission of data.

+TRANSMIT_SCRIPT = GFESUITE_HOME + '/bin/gfe_msg_send -s %SUBJECT -a %ADDRESSES -i %WMOID -c 11 -p 0 -e %ATTACHMENTS'

+

+

+# Extra ISC parms (weather elements).  These are a list of the baseline

+# weather elements to be added as extra parms to the ISC database.  This

+# is necessary when receiving ISC grids from a site that is a different

+# office type than your own.  You never need to add weather elements

+# to the ISC database that is your own office type.  The format of this

+# entry is a list of tuples.  The tuple is a list of weather elements

+# objects (such as Temp and not "T"), and an office type, such as "rfc".

+EXTRA_ISC_PARMS = [([QPF,FloodingRainThreat], 'rfc'), ([QPF,FloodingRainThreat], 'wfo'), ([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'nc'),([ProposedSS,Hazards,InundationMax,InundationTiming,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], 'wfo')]

+

+#---------------------------------------------------------------------------

+#

+#  Misc. Configurations

+#

+#---------------------------------------------------------------------------

+# defines the number of days to keep log files

+LOG_FILE_PURGE_AFTER = 28

+

+# auto configure NotifyTextProd -- set after OB6

+AUTO_CONFIGURE_NOTIFYTEXTPROD = 1   #0=off,1=on

+

+

+#-----------------------------------

+# DO NOT CHANGE THE FOLLOWING SECTION

+#------------------------------------

+# import the local config file

+

+myOfficeType = SITES[GFESUITE_SITEID][5]

+

+AdditionalISCRouting = [

+   # Configure by adding entries to this list in the form of:

+   # ([WeatherElements],  ModelName, EditAreaPrefix)

+   # Example:

+   # ([Hazards, LAL, CWR], "ISCFire", "FireWxAOR_"),

+]

+

+#---------------------------------------------------------------------------

+# Parm groups.  Combine parms with time constraints

+# list of ([parms], timeConstraints)

+#---------------------------------------------------------------------------

+

+#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

+# There is nothing special about these variables. They are just used as a

+# convienence to set up multiple models in modelDict with the same parameter

+# set.  However, model parms are no longer as generic as they once were and

+# its just as easy to set the parms explicitly in modelDict.

+

+STD6_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC6),

+             ([Haines, MixHgt, FreeWind, TransWind, VentRate], TC6),

+             ([DSI, Stability, Ttrend, RHtrend], TC6),

+             ([SnowAmt, PoP, CWR], TC6NG), ([QPF, Weather, IceAcc, LAL], TC6NG),

+             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),

+             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+             ([MaxT], MaxTTC), ([MinT], MinTTC),

+             ([Wetflag], FireWx1300TC)]

+

+# hourly

+STD1_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC1),

+             ([Haines, MixHgt, FreeWind, TransWind], TC1),

+             ([DSI, Stability, VentRate, Ttrend, RHtrend], TC1),

+             ([SnowAmt, PoP, CWR], TC1), ([QPF, Weather, IceAcc, LAL], TC1),

+             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),

+             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+             ([MaxT], MaxTTC), ([MinT], MinTTC),

+             ([Wetflag], FireWx1300TC)]

+

+# 3 hourly

+STD3_MODEL = [([Temp, Td, RH, Wind, Wind20ft, Sky, FzLevel, SnowLevel], TC3),

+             ([Haines, MixHgt, FreeWind, TransWind], TC3),

+             ([DSI, Stability, VentRate, Ttrend, RHtrend], TC3),

+             ([SnowAmt, PoP, CWR], TC3NG), ([QPF, IceAcc, Weather, LAL], TC3NG),

+             ([MarineLayer, HrsOfSun, InvBurnOffTemp], LT24),

+             ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+             ([MaxT], MaxTTC), ([MinT], MinTTC),

+             ([Wetflag], FireWx1300TC)]

+

+# Fcst and official database parameter groupings

+OFFICIALDBS = [([Temp, Td, Wind, Weather, Sky, FzLevel, SnowLevel], TC1),

+    ([HeatIndex, WindChill, RH, SnowAmt, CWR, QPF], TC1),

+    ([PoP, Ttrend, RHtrend, Wind20ft, WindGust], TC1),

+    ([MinT], MinTTC), ([MaxT], MaxTTC),

+    ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+    ([VentRate, LAL, Haines, MixHgt, FreeWind, TransWind], TC1),

+    ([DSI, Stability, MarineLayer], TC1),

+    ([HrsOfSun, InvBurnOffTemp], LT24),

+    ([IceAcc, IceCoverage, Hazards], TC1),

+    ([Wetflag], FireWx1300TC),

+    ([StormTotalSnow], TC1),

+        # Tropical parms

+    ([prob34, prob50, prob64,pws34,pws50,pws64,], TC1),

+    ([InundationMax,SurgeHtPlusTideMSL,SurgeHtPlusTideMLLW,SurgeHtPlusTideMHHW,SurgeHtPlusTideNAVD], TC1),

+    ([ProposedSS,DiffSS,tempProposedSS,InitialSS], TC1),

+    ([WindThreat,StormSurgeThreat,FloodingRainThreat,TornadoThreat], TC1),

+    ([pwsD34,pwsD64], PWSDTC),

+    ([pwsN34,pwsN64], PWSNTC),

+    ([pws34int,pws64int,InundationTiming,QPFtoFFGRatio], TC6NG),

+    # DR20541 and 20482

+    ([PoP12hr], TC12NG),

+    ([QPF6hr, SnowAmt6hr], TC6NG),

+    ([cape], LT6NG),

+    ([ApparentT, HeatIndex, WindChill, LkSfcT, SnowMap, SnowRatio, StormTotalQPF], TC1),

+    ]

+

+## JCM Change wave and period (and swanswell) to TC1 for all marine sites

+if SID in groups['marineSites'] or SID in groups['GreatLake_SITES']:

+    OFFICIALDBS.append(([WaveHeight, PeakWaveDir, WindWaveHeight, SurfHeight, Swell, Swell2, Period, Period2], TC1))

+    OFFICIALDBS.append(([SwanSwell, Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9,

+                         Period1, Period3, Period4, Period5, Period6, Period7, Period8, Period9], TC1))

+    OFFICIALDBS.append(([NWPSwind, UWaveDir, VWaveDir, WaveDir, RipProb, ErosionProb, OverwashProb],TC1))

+

+# NWPS

+nwpsCG1_MODEL = [([SwanSwell, Period, WaveHeight, PeakWaveDir, WindWaveHeight, Wind, RipProb, ErosionProb, OverwashProb], TC1)]

+nwpsTrkngCG0_MODEL = [([Wave1, Wave2, Wave3, Wave4, Wave5, Wave6, Wave7, Wave8, Wave9, Period1, Period2, Period3, Period4, Period5, Period6,Period7, Period8, Period9], TC1)]

+

+# OPC TAF parameters (for NW, SW, and E)

+OPCTAFBPARMS = [([WindWaveHeight, WaveHeight], TC1)]

+

+# SAT database parameter groupings

+SATPARMS = [([SatVisE, SatIR11E, SatIR13E, SatIR39E, SatWVE, SatFogE], TC_1M),

+            ([SatVisW, SatIR11W, SatIR13W, SatIR39W, SatWVW, SatFogW], TC_1M)]

+

+# RTMA database parameter groupings

+# DCS17288/DR17144

+if SID in groups['OCONUS_SITES']:

+    RTMAPARMS = [([Temp,Td,RH,Wind,Vis,Pressure,WindGust],TC1),

+             ([MinT],MinTTC), ([MaxT],MaxTTC),

+             ([MinRH],MinRHTC), ([MaxRH],MaxRHTC),

+             ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc],TC1)]

+else:

+    RTMAPARMS = [([Temp,Td,RH,Wind,QPE,Sky,Vis,Pressure,WindGust],TC1),

+             ([MinT],MinTTC), ([MaxT],MaxTTC),

+             ([MinRH],MinRHTC), ([MaxRH],MaxRHTC),

+             ([TUnc,TdUnc,WSpdUnc,WDirUnc,VisUnc,PressUnc,WGustUnc,SkyUnc],TC1)]

+

+#---------------------------------------------------------------------------

+# Databases for a site.

+# list of (Database, [parms])

+# Official, Practice, TestFcst, Test are all set after Fcst is defined.

+#---------------------------------------------------------------------------

+

+# Intersite coordination database parameter groupings, based on

+# OFFICIALDBS, but time constraint is always TC1

+ISCPARMS = []

+if type(officeType) != str:

+    raise TypeError("Office type not a str: " + repr(officeType))

+else:

+    if officeType not in VALID_OFFICE_TYPES:

+        raise ValueError("Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]")

+

+

+#

+# new parameters for NewTerrain

+#

+NewTopo     = ("NewTopo",     SCALAR, "ft", "New Topo",      50000.0, -32000.0, 1, NO)

+PrevTopo    = ("PrevTopo",    SCALAR, "ft", "Previous Topo", 50000.0, -32000.0, 1, NO)

+StdTopo     = ("StdTopo",     SCALAR, "ft", "Standard Topo", 50000.0, -32000.0, 1, NO)

+GTOPO       = ("GTOPO",       SCALAR, "ft", "GTOPO30",       50000.0, -32000.0, 1, NO)

+Topo        = ("Topo",        SCALAR, "ft", "Topography",    50000.0, -32000.0, 1, NO)

+

+# Add Topo to ISC parms for NewTerrain

+if type(REQUESTED_ISC_PARMS) is list and not "NewTopo" in REQUESTED_ISC_PARMS:

+    REQUESTED_ISC_PARMS.append("NewTopo")

+ISCPARMS.append(([NewTopo], Persistent))

+

+

+#---------------------------------------------------------------------------

+#

+#  General server configuration section

+#

+#---------------------------------------------------------------------------

+

+#----------------------------------------------------------------------------

+# Server settings     DO NOT CHANGE THESE DEFINITIONS

+#----------------------------------------------------------------------------

+from com.raytheon.edex.plugin.gfe.config import SimpleServerConfig

+IFPConfigServer = SimpleServerConfig()

+#IFPConfigServer.allowedNodes             = []

+IFPConfigServer.allowTopoBelowZero       = 1

+

+#------------------------------------------------------------------------------

+# serverConfig model configuration is now done in the modelDict dictionary.

+# variables D2DMODELS, D2DDBVERSIONS,D2DAccumulativeElements,INITMODULES,

+# INITSKIPS, DATABASES are no longer explicitly set and are not valid

+# to be referenced in localConfig.py.

+

+# WARNING: There can only be one version of a model in modelDict. Fcst,

+# practice and test databases have to be handled separately because there

+# are databases with the same name but different types.  This is ok

+# because these databases are defined after any localConfig customizations

+# of the normal Fcst database.

+

+# modelDict contains the following keys. Only define what is needed, i.e.,

+# it is not required to have every key defined

+#   "DB": Definition of the database, i.e., the first value in a dbs entry:

+#         ("wrfems", GRID, "", NO,  NO,  3, 0). This must be a tuple. The name

+#         in the DB entry must be the same as the model name used as the key

+#         into the modelDict variable.

+#

+#   "Parms" : Definition of the weather element parameters in the database,

+#         i.e., the second part of the dbs entry. This is a list of tuples.

+#

+#   "D2DMODELS" : D2D metadata database name for the source model.

+#

+#   "INITMODULES': Name of the SmartInit module. It is usually just the

+#         name as a string. If the init requires multiple models, use a tuple

+#         of ('smartInit name',[list of model names])

+#         'INITMODULES': ('Local_WPCGuide', ["HPCGuide","HPCERP","HPCWWD"]),

+#

+#   "D2DAccumulativeElements" : List of parm names that are accumulative

+#

+#   "D2DDBVERSIONS" : Number of versions of a D2D model to show in the Weather

+#         Element Browser. Defaults to 2 if not supplied.

+#

+#   "INITSKIPS" : Used to skip specific model cycles.

+#

+# Example for a model:

+#

+#   modelDict["CMCreg"]={

+#        "DB": ("CMCreg", "GRID", "", NO, NO, 2, 0),

+#        "Parms": [([Temp, Td, RH, Wind, WindGust, Sky, MixHgt, TransWind, QPF,

+#                    PoP, SnowAmt, SnowRatio], TC3),

+#                  ([PoP6, QPF6, QPF6hr, CQPF1],TC6NG),

+#                  ([QPF12, PoP12],TC12NG),

+#                  ([MinRH], MinRHTC), ([MaxRH], MaxRHTC),

+#                  ([MaxT], MaxTTC), ([MinT], MinTTC),

+#                 ],

+#        "D2DMODELS": "Canadian-Reg",

+#        "INITMODULES": "Local_CMCreg",

+#        "D2DAccumulativeElements": ["tpgemreg","tprun","tp3hr","tp6hr"],

+#        "D2DDBVERSIONS": 3,

+#   }

+#

+

+# Official, Practice, TestFcst, Test, Restore are all derivations of Fcst and

+# are setup after localConfig is processed.

+modelDict['Fcst'] = {'DB': Fcst, 'Parms': OFFICIALDBS}

+

+# Model Databases

+waveParms=[Period, Period2, SurfHeight, Swell, Swell2, WaveHeight,

+           Wind, WindWaveHeight, ]

+

+modelDict['BaseTerrain'] = {

+            'DB': ('BaseTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0),

+            'Parms': [([StdTopo, GTOPO, PrevTopo], Persistent),

+                     ],

+            }

+

+modelDict['CRMTopo'] = {

+            'D2DDBVERSIONS': 1}

+

+modelDict['ECMWFHiRes'] = {

+            'D2DMODELS': 'ECMWF-HiRes',}

+

+modelDict['ENPwave'] = {

+            'D2DMODELS': 'ENPWAVE253',

+            'DB': ('ENPwave', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC6),

+                     ],

+            }

+

+modelDict['ESTOFS'] = {

+            'D2DMODELS': 'estofsEP',

+            'DB': ('ESTOFS', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'ESTOFS',

+            'Parms': [([AstroTide, StormSurge], TC1),

+                     ],

+            }

+

+modelDict['ETSS'] = {

+            'D2DMODELS': 'ETSS',

+            'DB': ('ETSS', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'ETSS',

+            'Parms': [([StormSurge, SurgeTide], TC1),

+                     ],

+            }

+

+modelDict['ETSSHiRes'] = {

+            'D2DMODELS': 'ETSS-HiRes',

+            'DB': ('ETSSHiRes', 'GRID', '', NO, NO, 2, 0),

+            'INITMODULES': 'ETSSHiRes',

+            'Parms': [([AstroTide, SurgeTide], TC1),

+                     ],                        

+             }

+

+for s in ['ALR', 'FWR', 'KRF', 'MSR', 'ORN', 'PTR', 'RHA', 'RSA', 'STR', 'TAR',

+          'TIR', 'TUA',]:

+    modelDict['FFG'+s] = {'D2DMODELS': 'FFG-'+s}

+

+modelDict['GFS20'] = {

+            'D2DMODELS': 'GFS20',

+            'D2DAccumulativeElements': ['tp3hr','tp6hr', 'tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'],

+            'DB': ('GFS20', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [([Wetflag], FireWx1300TC),

+                     ([MaxRH], MaxRHTC),

+                     ([MaxT], MaxTTC),

+                     ([MinRH], MinRHTC),

+                     ([MinT], MinTTC),

+                     ([HrsOfSun, InvBurnOffTemp, MarineLayer], LT24),

+                     ([DSI, FreeWind, FzLevel, Haines, MixHgt, RH, RHtrend, Sky,

+                       SnowLevel, Stability, Td, Temp, TransWind, Ttrend, VentRate,

+                       Wind, Wind20ft], TC6),

+                     ([CWR, IceAcc, LAL, PoP, QPF, SnowAmt, Weather], TC6NG),

+                     ],

+            }

+

+modelDict['GFS80'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'AVN211',

+            'DB': ('GFS80', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'GFS80',

+            'Parms': STD6_MODEL,

+            }

+

+modelDict['GFSLAMPGrid'] = {

+            'D2DMODELS': 'GFSLAMPGrid',

+            'DB': ('GFSLAMPGrid', 'GRID', '', NO,  NO, 3, 0),

+            'INITMODULES': 'GFSLAMPGrid',

+            'Parms': [([CigHgt, Sky, Td, Temp, Vis, Wind], TC1),

+                     ],

+            }

+

+modelDict['GWW'] = {

+            'DB': ('GWW', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC6),

+                     ],

+            }

+

+modelDict['WaveWatch'] = {

+            'D2DMODELS': 'WaveWatch',}

+

+modelDict['GlobalWave'] = {

+            'D2DMODELS': 'GlobalWave',

+            'DB': ('GlobalWave', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC3),

+                     ],

+            }

+

+modelDict['HIRESWarw'] = {

+            'D2DAccumulativeElements': ['tp'],

+            'D2DMODELS': 'HiResW-ARW-West',

+            'DB': ('HIRESWarw', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'HIRESWarw',

+            'Parms': STD3_MODEL,

+            }

+

+modelDict['HIRESWnmm'] = {

+            'D2DAccumulativeElements': ['tp'],

+            'D2DMODELS': 'HiResW-NMM-West',

+            'DB': ('HIRESWnmm', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'HIRESWnmm',

+            'Parms': STD3_MODEL,

+            }

+

+modelDict['HPCERP'] = {

+            'D2DAccumulativeElements': ['tpHPCndfd'],

+            'D2DDBVERSIONS': 24,

+            'D2DMODELS': 'HPCqpfNDFD',}

+

+modelDict['HPCGRID'] = {

+            'DB': ('HPCGRID', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [([PoP, SnowAmt], LTMOS),

+                     ([MaxT], MaxTTCMOS),

+                     ([MinT], MinTTCMOS),

+                     ([Sky, Td, Temp, Weather, Wind], TC1),

+                     ([QPF], TC6NG),

+                     ],

+            }

+

+modelDict['HPCGuide'] = {

+            'D2DAccumulativeElements': ['pop'],

+            'D2DMODELS': 'HPCGuide',

+            'DB': ('HPCGuide', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'HPCGuide',

+            'Parms': [([MaxT], MaxTTC),

+                     ([MinT], MinTTC),

+                     ([PoP], TC12NG),

+                     ([Sky, Td, Wind], TC6),

+                     ],

+            }

+

+modelDict['HPCQPF'] = {

+            'D2DAccumulativeElements': ['tpHPC'],

+            'D2DMODELS': 'HPCqpf',

+            'DB': ('HPCQPF', 'GRID', '', NO,  NO, 4, 0),

+            'INITMODULES': 'HPCQPF',

+            'Parms': [([QPF], TC6NG),

+                     ],

+            }

+

+modelDict['HRRR'] = {

+            'D2DAccumulativeElements': ['tp', 'crain', 'csnow', 'cfrzr', 'cicep'],

+            'D2DMODELS': 'HRRR',

+            'DB': ('HRRR', 'GRID', '', NO,  NO, 3, 0),

+            'INITMODULES': 'HRRR',

+            'Parms': [([QPF, RH, Sky, Td, Temp, Wind, WindGust], TC1),

+                     ],

+            }

+

+modelDict['HWRF'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'HWRF',}

+

+modelDict['LAPS'] = {

+            'D2DAccumulativeElements': ['pc'],

+            'D2DDBVERSIONS': 6,

+            'D2DMODELS': 'LAPS',

+            'DB': ('LAPS', 'GRID', '', YES, NO, 1, 30),

+            'INITMODULES': 'LAPS',

+            'Parms': [([QPF, Radar, Sky, SnowAmt, Td, Temp, Weather, Wind], TC1),

+                     ],

+            }

+

+modelDict['MOSGuide'] = {

+            'D2DAccumulativeElements': ['pop12hr', 'pop6hr', 'thp12hr', 'thp3hr',

+                                       'thp6hr', 'tcc', 'tp6hr', 'tp12hr', 'wgs'],

+            'D2DMODELS': 'MOSGuide',

+            'DB': ('MOSGuide', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'MOSGuide',

+            'Parms': [([MaxT], MaxTTC),

+                     ([MinT], MinTTC),

+                     ([RH, Td, Temp, Wind], TC1),

+                     ([PoP, PoP12, QPF, QPF12, TstmPrb12], TC12NG),

+                     ([TstmPrb3], TC3NG),

+                     ([PoP6, QPF6, Sky, TstmPrb6, WindGust], TC6NG),

+                     ],

+            }

+

+modelDict['MSAS'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DDBVERSIONS': 6,

+            'D2DMODELS': 'MSAS',

+            'DB': ('MSAS', 'GRID', '', YES, NO, 1, 36),

+            'INITMODULES': 'MSAS',

+            'Parms': [([Td, Temp, Wind], TC1),

+                     ],

+            }

+

+modelDict['NAHwave4'] = {

+            'D2DMODELS': 'NAHwave4',}

+

+modelDict['NAM12'] = {

+            'D2DAccumulativeElements': ['tp', 'cp', 'crain', 'csnow', 'cfrzr', 'cicep'],

+            'D2DMODELS': 'NAM12',

+            'DB': ('NAM12', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'NAM12',

+            'Parms': STD3_MODEL,

+            }

+

+modelDict['NAM20'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'NAM20',}

+

+modelDict['NAM40'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'NAM40',

+            'DB': ('NAM40', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': STD3_MODEL,

+            }

+

+modelDict['NAM80'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'ETA',

+            'DB': ('NAM80', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': STD6_MODEL,

+            }

+

+modelDict['NED'] = {

+            'D2DDBVERSIONS': 1}

+

+modelDict['NamDNG'] = {

+            'D2DMODELS': 'namdng25',

+            'DB': ('NamDNG', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'NamDNG',

+            'Parms': [([MaxRH], MaxRHTC),

+                     ([MaxT], MaxTTC),

+                     ([MinRH], MinRHTC),

+                     ([MinT], MinTTC),

+                     ([PoP12, QPF12], TC12NG),

+                     ([MixHgt, RH, Sky, SnowLevel, Td, Temp, TransWind, Vis,

+                       Wind, WindGust], TC3),

+                     ([MaxRH3, MaxT3, MinT3, PoP, QPF3, SnowAmt], TC3NG),

+                     ([PoP6, QPF6, SnowAmt6], TC6NG),

+                     ],

+            }

+

+modelDict['NationalBlend'] = {

+            'D2DAccumulativeElements': ["pop12hr", "pop", "pop6hr", "tp", "ppi1hr", "ppi6hr",

+                                        "tp1hr", "tp6hr", "thp3hr", "thp6hr",

+                                        "totsn1hr", "totsn6hr", "ficeac1hr", "ficeac6hr"],

+            'D2DMODELS': 'NationalBlend',

+            'DB': ('NationalBlend', 'GRID', '', NO,  NO, 7, 0),

+            'INITMODULES': 'NationalBlend',

+            'Parms': [([Temp, Td, RH, Sky, Wind, WindGust, ApparentT], TC1),

+                     ([QPF1,PPI01,CloudBasePrimary,Ceiling,Visibility],TC1),

+                     ([PoTIP, PoTR, PoTRW, PoTS, PoTSW, PoTZR,],TC1),

+                     ([SnowLevel,MaxTwAloft,ProbIcePresent, ProbRefreezeSleet,SnowRatio],TC1),

+                     ([PositiveEnergyAloft, NegativeEnergyLowLevel],TC1),

+                     ([MixHgt, TransWind, LLWS, VentRate, LLWSHgt, Radar,

+                       SigWaveHgt, Weather, Haines, FosBerg,

+                       SnowAmt01, IceAccum01, TstmPrb1],TC1),

+                     ([TstmPrb3, DryTstmPrb],TC3NG),

+                     ([TstmPrb6, QPF, PoP6, PPI06, SnowAmt, IceAccum,

+                       QPF10Prcntl, QPF50Prcntl, QPF90Prcntl],TC6NG),

+                     ([MaxT], MaxTTC), ([MinT], MinTTC),

+                     ([MaxRH], MaxRHTC), ([MinRH], MinRHTC),([PoP, TstmPrb12],TC12NG),

+                     ],

+            }

+

+modelDict['NationalBlendOC'] = {

+            'D2DMODELS': 'NationalBlendOC',

+            'DB': ('NationalBlend', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'NationalBlendOC',

+            'Parms': [([WGS50pct, WS50Prcntl30m, WS50Prcntl80m, Vis50pct, T50pct,

+                       PMSL10pct, PMSL50pct, PMSL90pct], TC1),

+                     ],

+            }

+

+modelDict['NewTerrain'] = {

+            'DB': ('NewTerrain', 'GRID', 'EditTopo', YES, NO, 1, 0),

+            'Parms': [([NewTopo], Persistent),

+                     ],

+            }

+

+modelDict['PWPF'] = {

+            'D2DMODELS': 'PWPF',}

+

+modelDict['RFCQPF'] = {

+            'D2DMODELS': 'RFCqpf',

+            'DB': ('RFCQPF', 'GRID', '', NO,  NO, 4, 0),

+            'Parms': [([QPF], TC6NG),

+                     ],

+            }

+

+modelDict['RTMA'] = {

+            'D2DAccumulativeElements': ['tp'],

+            'D2DMODELS': 'RTMA25',

+            'DB': ('RTMA', 'GRID', '', YES, NO, 1, 36),

+            'INITMODULES': 'RTMA',

+            'Parms': RTMAPARMS,

+            }

+

+modelDict['RAP13'] = {

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'D2DMODELS': 'RAP13',

+            'DB': ('RAP13', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'RAP13',

+            'INITSKIPS': [1, 2, 4, 5, 7, 8, 10, 11, 13, 14, 16, 17, 19, 20, 22, 23],

+            'Parms': STD1_MODEL,

+            }

+

+modelDict['SAT'] = {

+            'DB': ('SAT', 'GRID', '', YES, NO, 1, 12),

+            'Parms': [([SatFogE, SatFogW, SatIR11E, SatIR11W, SatIR13E, SatIR13W,

+                        SatIR39E, SatIR39W, SatVisE, SatVisW, SatWVE, SatWVW],

+                        TC_1M),

+                     ],

+            }

+

+modelDict['SPC'] = {

+            'D2DDBVERSIONS': 8, 'D2DMODELS': 'SPCGuide',}

+

+modelDict['SREF'] = {

+            'D2DMODELS': 'SREF212',

+            'DB': ('SREF', 'GRID', '', NO,  NO, 3, 0),

+            'INITMODULES': 'SREF',

+            'Parms': [([Td, Temp, Wind], TC1),

+                     ],

+            }

+

+modelDict['Satellite'] = {

+            'D2DDBVERSIONS': 6,}

+# Turn on satellite smartInit only if SATDATA has some entries.

+if SATDATA:

+    modelDict['Satellite']['INITMODULES'] = 'SAT'

+

+modelDict['TPCProb'] = {

+            'D2DDBVERSIONS': 30,

+            'D2DMODELS': 'TPCWindProb',

+            'DB': ('TPCProb', 'GRID', '', NO,  NO, 30, 0),

+            'Parms': [([pwsD34, pwsD64], PWSDTC),

+                     ([pwsN34, pwsN64], PWSNTC),

+                     ([prob34, prob50, prob64, pws34, pws50, pws64], TC1),

+                     ],

+            }

+

+modelDict['TPCProbPrelim'] = {

+            'D2DDBVERSIONS': 30,

+            'D2DMODELS': 'TPCWindProb_Prelim',

+            'DB': ('TPCProbPrelim', 'GRID', '', NO,  NO, 30, 0),

+            'Parms': [([pwsD34, pwsD64], PWSDTC),

+                     ([pwsN34, pwsN64], PWSNTC),

+                     ([prob34, prob50, prob64, pws34, pws50, pws64], TC1),

+                     ],

+            }

+

+modelDict['TPCStormSurge'] = {

+            'D2DDBVERSIONS': 1}

+

+modelDict['TPCSurgeProb'] = {

+            'D2DMODELS': 'TPCSurgeProb',

+            'D2DAccumulativeElements': [

+                'Surge10Pct',

+                'Surge20Pct',

+                'Surge30Pct',

+                'Surge40Pct',

+                'Surge50Pct',

+                'Surge90Pct',

+                'PSurge25Ft',

+                'PSurge24Ft',

+                'PSurge23Ft',

+                'PSurge22Ft',

+                'PSurge21Ft',

+                'PSurge20Ft',

+                'PSurge19Ft',

+                'PSurge18Ft',

+                'PSurge17Ft',

+                'PSurge16Ft',

+                'PSurge15Ft',

+                'PSurge14Ft',

+                'PSurge13Ft',

+                'PSurge12Ft',

+                'PSurge11Ft',

+                'PSurge10Ft',

+                'PSurge9Ft',

+                'PSurge8Ft',

+                'PSurge7Ft',

+                'PSurge6Ft',

+                'PSurge5Ft',

+                'PSurge4Ft',

+                'PSurge3Ft',

+                'PSurge2Ft',

+                'PSurge1Ft',

+                'PSurge0Ft',

+                'Surge10Pctincr',

+                'Surge20Pctincr',

+                'Surge30Pctincr',

+                'Surge40Pctincr',

+                'Surge50Pctincr',

+                'Surge90Pctincr',

+                'PSurge20Ftincr',

+                'PSurge19Ftincr',

+                'PSurge18Ftincr',

+                'PSurge17Ftincr',

+                'PSurge16Ftincr',

+                'PSurge15Ftincr',

+                'PSurge14Ftincr',

+                'PSurge13Ftincr',

+                'PSurge12Ftincr',

+                'PSurge11Ftincr',

+                'PSurge10Ftincr',

+                'PSurge9Ftincr',

+                'PSurge8Ftincr',

+                'PSurge7Ftincr',

+                'PSurge6Ftincr',

+                'PSurge5Ftincr',

+                'PSurge4Ftincr',

+                'PSurge3Ftincr',

+                'PSurge2Ftincr',

+                'PSurge1Ftincr',

+                'PSurge0Ftincr',

+            ],

+        }

+

+modelDict['PETSS'] = {

+            'D2DMODELS': 'P-ETSS',

+            'D2DAccumulativeElements': [

+                'Surge10Pct',

+                'Surge20Pct',

+                'Surge30Pct',

+                'Surge40Pct',

+                'Surge50Pct',

+                'Surge90Pct',

+                'Surge10Pctincr',

+                'Surge20Pctincr',

+                'Surge30Pctincr',

+                'Surge40Pctincr',

+                'Surge50Pctincr',

+                'Surge90Pctincr',

+                'PSurge0Ftincr',

+                'PSurge1Ftincr',

+                'PSurge2Ftincr',

+                'PSurge3Ftincr',

+                'PSurge4Ftincr',

+                'PSurge5Ftincr',

+                'PSurge6Ftincr',

+                'PSurge7Ftincr',

+                'PSurge8Ftincr',

+                'PSurge9Ftincr',

+                'PSurge10Ftincr',

+                'PSurge13Ftincr',

+                'PSurge16Ftincr',

+                'PSurge0Ft',

+                'PSurge1Ft',

+                'PSurge2Ft',

+                'PSurge3Ft',

+                'PSurge4Ft',

+                'PSurge5Ft',

+                'PSurge6Ft',

+                'PSurge7Ft',

+                'PSurge8Ft',

+                'PSurge9Ft',

+                'PSurge10Ft',

+                'PSurge13Ft',

+                'PSurge16Ft',

+                'PSurgeMaxincr',

+                'PSurgeMeanincr',

+                'PSurgeMinincr',

+                'PSurgeMax',

+                'PSurgeMean',

+                'PSurgeMin',

+            ],

+        }

+

+modelDict['TPCtcm'] = {

+            'DB': ('TPCtcm', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [([HiWind], TC3),

+                     ],

+            }

+

+modelDict['URMA25'] = {

+            'D2DAccumulativeElements': ['tp'],

+            'D2DMODELS': 'URMA25',

+            'DB': ('URMA25', 'GRID', '', YES, NO, 1, 36),

+            'INITMODULES': 'URMA25',

+            'Parms': [([MaxRH], MaxRHTC),

+                     ([MaxT], MaxTTC),

+                     ([MinRH], MinRHTC),

+                     ([MinT], MinTTC),

+                     ([PressUnc, Pressure, QPE, RH, Sky, SkyUnc, TUnc, Td, TdUnc,

+                        Temp, Vis, VisUnc, WDirUnc, WGustUnc, WSpdUnc, Wind,

+                        WindGust], TC1),

+                     ],

+            }

+

+modelDict['WCwave10'] = {

+            'D2DMODELS': 'WCwave10',

+            'DB': ('WCwave10', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC3),

+                     ],

+            }

+

+modelDict['WCwave4'] = {

+            'D2DMODELS': 'WCwave4',

+            'DB': ('WCwave4', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC3),

+                     ],

+            }

+

+modelDict['WNAWAVE'] = {

+            'DB': ('WNAWAVE', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC6),

+                     ],

+            }

+

+modelDict['WNAWAVE238'] = {

+            'D2DMODELS': 'WNAWAVE238',}

+

+modelDict['WNAwave10'] = {

+            'D2DMODELS': 'WNAwave10',

+            'DB': ('WNAwave10', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC3),

+                     ],

+            }

+

+modelDict['WNAwave4'] = {

+            'D2DMODELS': 'WNAwave4',

+            'DB': ('WNAwave4', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [(waveParms, TC3),

+                     ],

+            }

+

+# This list will be used to set up a default ignoreDatabases list. This is shorter than

+# listing all models to ignore.

+includeOnly=[]

+if SID in groups['ALASKA_SITES']:

+    modelDict['AKwave4'] = {

+            'D2DMODELS': 'AKwave4',

+            'D2DDBVERSIONS': 2,

+            'DB': ('AKwave4', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, 

+                       WindWaveHgt, WindWavePeriod], TC3),

+                     ],

+            }

+

+    modelDict['AKwave10'] = {

+            'D2DMODELS': 'AKwave10',

+            'D2DDBVERSIONS': 2,

+            'DB': ('AKwave10', 'GRID', '', NO,  NO, 2, 0),

+            'Parms': [([Period, Period2, Swell, Swell2, WaveHeight, Wind, 

+                        WindWaveHgt, WindWavePeriod], TC3),

+                     ],

+            }

+    

+    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsAK')

+    updateModelDict(modelDict,'ETSS','D2DMODELS', 'ETSS-AK')

+    updateModelDict(modelDict,'GFS20','D2DMODELS', 'AK-GFS22')

+    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-AK')

+    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-AK')

+    updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-AK')

+    updateModelDict(modelDict,'NAM12','D2DMODELS', 'AK-NAM11')

+    updateModelDict(modelDict,'NamDNG','D2DMODELS', 'AK-NamDNG3')

+    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendAK')

+    updateModelDict(modelDict,'RTMA','D2DMODELS', 'AK-RTMA3')

+    updateModelDict(modelDict,'SREF','D2DMODELS', 'SREF216')

+    updateModelDict(modelDict,'URMA','D2DMODELS', 'AK-URMA')

+    updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska')

+    updateModelDict(modelDict,'RTOFS-Alaska','D2DMODELS', 'RTOFS-Alaska')

+    updateModelDict(modelDict,'RTOFS-Arctic','D2DMODELS', 'RTOFS-Arctic')

+    updateModelDict(modelDict,'RTOFS-Bering','D2DMODELS', 'RTOFS-Bering')

+    updateModelDict(modelDict,'RTOFS-GulfAlaska','D2DMODELS', 'RTOFS-GulfAlaska')

+    updateModelDict(modelDict,'PETSS','D2DMODELS', 'P-ETSS-AK')

+    # Model databases for Alaska

+    includeOnly = ['AKwave4', 'AKwave10', 'BaseTerrain', 'CRMTopo', 'ECMWFHiRes', 'ESTOFS', 

+                   'ETSS',  'GFS20',  'GWW', 'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NAM12', 

+                   'NamDNG', 'NationalBlend', 'NED', 'NewTerrain', 'RTMA', 'RTOFS-Alaska', 

+                   'RTOFS-Arctic', 'RTOFS-Bering', 'RTOFS-GulfAlaska', 'SAT', 'SREF', 'URMA',

+                   'nwpsCG1AER', 'nwpsCG1AFG', 'nwpsCG1AJK', 'nwpsCG1ALU', 'nwpsTrkngCG0AER', 

+                   'nwpsTrkngCG0AFG', 'nwpsTrkngCG0AJK', 'nwpsTrkngCG0ALU', 'PETSS',

+                  ]

+

+# Hawaii OCONUS

+elif SID == "HFO":

+    modelDict['GFS75'] = {

+            'D2DMODELS': 'AVN225',

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'DB': ('GFS75', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'GFS75',

+            'Parms': STD6_MODEL,

+            }

+

+    updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch')

+    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')

+    updateModelDict(modelDict,'RTMA','D2DMODELS', 'HI-RTMA')

+    updateModelDict(modelDict,'NamDNG','D2DMODELS', 'HI-NamDNG5')

+    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-HI')

+    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-HI')

+    updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide')

+    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')

+    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')

+    updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes')

+    updateModelDict(modelDict,'RTOFS-Honolulu','D2DMODELS', 'RTOFS-Honolulu')

+    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsHI')

+    updateModelDict(modelDict,'MOSGuide','D2DMODELS', 'MOSGuide-HI')

+    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendHI')

+    # Model databases for HFO

+    includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS75', 'WaveWatch', 'GlobalWave',

+                   'HIRESWarw', 'HIRESWnmm', 'MOSGuide', 'NamDNG', 'NationalBlend',

+                   'RTMA', 'RTOFS-Honolulu', 'SPC', 'TPCProb', 'TPCProbPrelim', 'nwpsCG1GUM',

+                   'nwpsCG1HFO', 'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO',

+                  ]

+

+# Guam OCONUS

+elif SID == "GUM":

+    modelDict['GFS75'] = {

+            'D2DMODELS': 'AVN225',

+            'D2DAccumulativeElements': ['tp', 'cp'],

+            'DB': ('GFS75', 'GRID', '', NO,  NO, 2, 0),

+            'INITMODULES': 'GFS75',

+            'Parms': STD6_MODEL,

+            }

+

+    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')

+    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')

+    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')

+    updateModelDict(modelDict,'RTOFS-Guam','D2DMODELS', 'RTOFS-Guam')

+    updateModelDict(modelDict,'RTMA','D2DMODELS', 'Guam-RTMA')

+    # Model databases for GUM

+    includeOnly = ['GFS75', 'GlobalWave', 'RTMA', 'RTOFS-Guam', 'TPCProb',

+                   'TPCProbPrelim', 'nwpsCG1GUM', 'nwpsCG1HFO',

+                   'nwpsTrkngCG0GUM', 'nwpsTrkngCG0HFO',

+                  ]

+

+# San Juan OCONUS

+elif SID == "SJU":

+    updateModelDict(modelDict,'GFS80','D2DMODELS', 'AVN211')

+    updateModelDict(modelDict,'NAM80','D2DMODELS', 'ETA')

+    updateModelDict(modelDict,'WaveWatch','D2DMODELS', 'WaveWatch')

+    updateModelDict(modelDict,'GlobalWave','D2DMODELS', 'GlobalWave')

+    updateModelDict(modelDict,'WNAwave10','D2DMODELS', 'WNAwave10')

+    updateModelDict(modelDict,'WNAwave4','D2DMODELS', 'WNAwave4')

+    updateModelDict(modelDict,'RTMA','D2DMODELS', 'PR-RTMA')

+    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-SJU')

+    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-SJU')

+    updateModelDict(modelDict,'SPC','D2DMODELS', 'SPCGuide')

+    updateModelDict(modelDict,'TPCProb','D2DMODELS', 'TPCWindProb')

+    updateModelDict(modelDict,'TPCProbPrelim','D2DMODELS', 'TPCWindProb_Prelim')

+    updateModelDict(modelDict,'ECMWFHiRes','D2DMODELS', 'ECMWF-HiRes')

+    updateModelDict(modelDict,'RTOFS-Atlantic','D2DMODELS', 'RTOFS-Atlantic')

+    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsPR')

+    updateModelDict(modelDict,'NAHwave4','D2DMODELS', 'NAHwave4')

+    updateModelDict(modelDict,'GFS20','D2DMODELS', 'PR-GFS')

+    updateModelDict(modelDict,'NationalBlend','D2DMODELS', 'NationalBlendPR')

+    # Model databases for SJU

+    includeOnly = ['ECMWFHiRes', 'ESTOFS', 'GFS20', 'GFS80', 'WaveWatch',

+                   'GlobalWave', 'HIRESWarw', 'HIRESWnmm', 'NAHwave4', 'NAM80',

+                   'NationalBlend', 'RTMA', 'RTOFS-Atlantic', 'SPC', 'TPCProb',

+                   'TPCProbPrelim', 'WNAwave10', 'WNAwave4',

+                   'nwpsCG1JAX', 'nwpsCG1KEY', 'nwpsCG1MFL', 'nwpsCG1MLB', 'nwpsCG1SJU',

+                   'nwpsTrkngCG0JAX', 'nwpsTrkngCG0KEY', 'nwpsTrkngCG0MFL',

+                   'nwpsTrkngCG0MLB', 'nwpsTrkngCG0SJU',

+                  ]

+

+# East CONUS changes from default modelDict

+elif SID in groups['CONUS_EAST_SITES']:

+    updateModelDict(modelDict,'ESTOFS','D2DMODELS', 'estofsUS')

+    updateModelDict(modelDict,'HIRESWarw','D2DMODELS', 'HiResW-ARW-East')

+    updateModelDict(modelDict,'HIRESWnmm','D2DMODELS', 'HiResW-NMM-East')

+

+if SID in groups['GreatLake_SITES']:

+    modelDict['GLERL'] = {

+            'D2DMODELS': 'GLERL',

+            'DB': ('GLERL', 'GRID', '', 0, 0, 2, 0),

+            'Parms': [([Period, Swell, WaveHeight], TC1),

+                     ]

+            }

+

+    modelDict['GLWN'] = {'D2DMODELS': 'GLWN'}

+

+# NWPS configuration.

+if SID in ['AFC', 'AER', 'AFG', 'AJK', 'ALU', 'AVAK']:

+    nwpsSites = ['AER', 'AFG', 'AJK', 'ALU',]

+elif SID in ['GUM', 'HFO',]:

+    nwpsSites = ['GUM', 'HFO',]

+elif SID == "SJU":

+    nwpsSites = ['SJU', 'MFL', 'KEY', 'MLB', 'JAX']

+elif SID in ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS',

+             'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL',

+             'MLB', 'JAX',]:

+     nwpsSites = ['CAR', 'GYX', 'BOX', 'OKX', 'PHI', 'LWX', 'AKQ', 'MHX', 'ILM', 'CHS',

+                  'BRO', 'CRP', 'HGX', 'LCH', 'LIX', 'MOB', 'TAE', 'TBW', 'KEY', 'MFL',

+                  'MLB', 'JAX', 'SJU',]

+elif SID in ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]:

+    nwpsSites = ['SEW', 'PQR', 'MFR', 'EKA', 'MTR', 'LOX', 'SGX',]

+else:

+    nwpsSites = []

+

+for s in nwpsSites:

+    name='nwpsCG1%s' % s

+    modelDict[name] = {

+            'DB': (name, 'GRID', '', NO,  NO, 2, 0),

+            'D2DMODELS': name,

+            'INITMODULES': name,

+            'Parms': nwpsCG1_MODEL,

+            }

+    name='nwpsTrkngCG0%s' % s

+    modelDict[name] = {

+            'DB': (name, 'GRID', '', NO,  NO, 2, 0),

+            'D2DMODELS': name,

+            'INITMODULES': name,

+            'Parms': nwpsTrkngCG0_MODEL,

+            }

+# This list will be used to set up a default ignoreDatabases list. This is shorter than

+# listing all models to ignore. Usually only set up for sites that aren't CONUS WFOs

+# includeOnly is not designed to be changed by localConfig.

+if includeOnly:

+    for m in sorted(modelDict.keys()):

+        if m not in includeOnly and 'D2DMODELS' in modelDict[m]:

+            ignoreDatabases.append(m)

+

+# END modelDict initial set up

+#------------------------------------------------------------------------------

+# Add in optional parms to Fcst parm def

+if SID in groups['powt']:

+    addPowt(modelDict)

+

+if SID in groups['winterProbs']:

+    addWinterWeatherProbs(modelDict)

+

+if SID in groups['rainfallProbs']:

+    addRainfallProbs(modelDict)

+

+D2DMODELS=[]

+D2DDBVERSIONS={}

+D2DAccumulativeElements={}

+INITMODULES={}

+INITSKIPS={}

+

+localParms = []

+localISCParms = []

+localISCExtraParms = []

+localLogFile = ''

+

+if not BASELINE and siteImport('localConfig'):

+    localParms = getattr(localConfig, 'parms', [])

+    localISCParms = getattr(localConfig, 'parmsISC', [])

+    localISCExtraParms = getattr(localConfig, 'extraISCparms', [])

+    localLogFile = getattr(localConfig, 'logFile', '')

+    modelDict['Fcst']['Parms'] += localParms

+    #ensure office type is set properly in localConfig SITES[]

+    if len(SITES[GFESUITE_SITEID]) == 5:

+        a = list(SITES[GFESUITE_SITEID])

+        a.append(myOfficeType)

+        SITES[GFESUITE_SITEID] = tuple(a)

+    else:

+        myOfficeType = SITES[GFESUITE_SITEID][5]  #probably from localConfig

+

+# Instantiate settings from modelDict

+db=dbConfig(modelDict)

+db.addConfiguredModels(ignoreDatabases)

+DATABASES = db.dbs

+D2DMODELS = db.D2DMODELS

+D2DDBVERSIONS = db.D2DDBVERSIONS

+D2DAccumulativeElements = db.D2DAccumulativeElements

+INITMODULES = db.INITMODULES

+INITSKIPS = db.INITSKIPS

+OFFICIALDBS=list(modelDict['Fcst']['Parms'])

+

+# Create Practice and test databases from Fcst

+DATABASES.append((Official, modelDict['Fcst']['Parms'])),

+DATABASES.append((Practice, modelDict['Fcst']['Parms'])),

+DATABASES.append((TestFcst, modelDict['Fcst']['Parms'])),

+DATABASES.append((Test, modelDict['Fcst']['Parms'])),

+

+for entry in AdditionalISCRouting:

+    (parmList, dbName, editAreaPrefix) = entry

+    parmList = list(parmList)

+    addedIscDbDefinition = (dbName, ) + ISC[1:]

+    addedIscParms = [(parmList, TC1)]

+    DATABASES.append((addedIscDbDefinition, addedIscParms))

+

+# Intersite coordination database parameter groupings, based on

+# OFFICIALDBS, but time constraint is always TC1

+for wes, tc in (OFFICIALDBS + localISCParms):

+    ISCPARMS.append((wes, TC1))

+

+# We also add in any extraISCparms as needed, but only for office

+# types other than our own.

+for wes, officeType in (EXTRA_ISC_PARMS + localISCExtraParms):

+    if myOfficeType == officeType:

+        continue

+    if type(officeType) != str:

+        raise TypeError("Office type not a str: " + repr(officeType))

+    else:

+        if officeType not in VALID_OFFICE_TYPES:

+            raise ValueError("Office type: " + str(officeType) + " does not match any of the following: [" + (', '.join(VALID_OFFICE_TYPES)) + "]")

+    for we in wes:

+        wecopy = list(we)

+        wecopy[0] = wecopy[0] + officeType  #rename the weather element

+        wecopy = tuple(wecopy)

+        ISCPARMS.append(([wecopy], TC1))

+

+# Restore database parameter groupings (based on OFFICIALDBS, but TC1)

+RESTOREPARMS = []

+for wes, tc in modelDict['Fcst']['Parms']:

+    RESTOREPARMS.append((wes, TC1))

+

+# Now add the ISC and Restore databases to the DATABASES groupings

+DATABASES.append((Restore, RESTOREPARMS))

+DATABASES.append((ISC, ISCPARMS))

+

+

+#D logfp=open('/localapps/logs/serverConfig2.log','w')

+#D logfp.write('DATABASE names:\n')

+#D for m in sorted(DATABASES):

+#D     logfp.write('%s\n' % m[0][0])

+#D logfp.write('\n\nDATABASES\n')

+#D pprint.pprint(sorted(DATABASES),logfp,width=130)

+#D logfp.write('\n\nINITMODULES\n')

+#D pprint.pprint(INITMODULES,logfp,width=130)

+#D logfp.write('\n\nD2DMODELS\n')

+#D pprint.pprint(D2DMODELS,logfp,width=130)

+#D logfp.close()

+

+doIt()

+

+#D logfp=open('/localapps/logs/SC_MD2.py','w')

+#D modelDict=createModelDict(locals(),DATABASES,D2DMODELS,D2DDBVERSIONS,D2DAccumulativeElements,

+#D                   INITMODULES,INITSKIPS,logfp)

+#D logfp.close()

+if localLogFile:

+    printServerConfig(sys.modules[__name__],vars(localConfig),localLogFile)

+#D scfp.close()

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/AbsTime.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/AbsTime.py
index c9785f9356..b32a7fb02b 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/AbsTime.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/AbsTime.py
@@ -1,118 +1,118 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+#
+# Provides a AWIPS I GFE partially-compatible wrapper to AbsTime
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-# Provides a AWIPS I GFE partially-compatible wrapper to AbsTime
-# 
-# This class extends standard Python dateTime, so many convenience and
-# manipulation methods are available using the standard syntax.  
-#
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    04/10/08                      chammack       Initial Creation.
-#    12/01/2014       #3875        randerso       Set time zone on underlying datetime object to GMT
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-from datetime import datetime, timedelta
-import calendar
-from java.util import Date 
-import JUtil
-
-import dateutil.tz
-GMT_ZONE = dateutil.tz.gettz('GMT')
-
-
-class AbsTime(datetime, JUtil.JavaWrapperClass):
-    
-    def __new__(self, arg):
-       if arg is None:
-           return None;
-       
-       if isinstance(arg, int) or isinstance(arg, long) or isinstance(arg, float):            
-           tmp = datetime.utcfromtimestamp(arg)
-       else:
-           tmp = datetime.utcfromtimestamp(arg.getTime() / 1000)
-           
-       return datetime.__new__(self, tmp.year,tmp.month,
-              tmp.day, tmp.hour, tmp.minute, tmp.second, tzinfo=GMT_ZONE)
-       
-    def unixTime(self):
-        tuple = self.utctimetuple()        
-        return calendar.timegm(tuple)
-    
-    def _fromDateTime(self, dt):
-        return AbsTime(calendar.timegm(dt.utctimetuple()))
-    
-    def __add__(self, arg):        
-        if isinstance(arg, int) or isinstance(arg, long) or isinstance(arg, float):
-            offset = arg         
-        key = {"seconds": offset}
-        dt = datetime.__add__(self, timedelta(**key))
-        return self._fromDateTime(dt)
-        
-    def __sub__(self, arg):
-        if isinstance(arg, AbsTime):
-            return self.unixTime() - arg.unixTime()
-        elif isinstance(arg, int) or isinstance(arg, long) or isinstance(arg, float):
-            offset = arg 
-        key = {"seconds": offset}
-        dt = datetime.__sub__(self, timedelta(**key))
-        return self._fromDateTime(dt)
-        
-    def javaDate(self):
-        if not hasattr(self, '__javaDate'):
-            sec = calendar.timegm(self.utctimetuple())
-            self.__javaDate = Date(sec*1000)
-        return self.__javaDate
-    
-    def toJavaObj(self):
-        return javaDate()
-
-    def stringFmt(self, fmt):
-        return self.strftime(fmt)
-    
-    def string(self):
-        return self.stringFmt("%h %d %y %T GMT")
-
-def absTimeYMD(year, month, day, hour=0, minute=0, second=0):
-    tm = datetime(year, month, day, hour, minute, second)
-    tup = tm.utctimetuple()
-    sec = calendar.timegm(tup)
-    return AbsTime(sec)
-
-def current():
-    from com.raytheon.uf.common.time import SimulatedTime
-    return AbsTime(SimulatedTime.getSystemTime().getTime())
-
-def maxFutureTime():
-    from com.raytheon.uf.common.time import TimeRange as JavaTimeRange
-    tr = JavaTimeRange.allTimes()
-    return AbsTime(tr.getEnd())
-    
+# This class extends standard Python dateTime, so many convenience and
+# manipulation methods are available using the standard syntax.  
+#
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    04/10/08                      chammack       Initial Creation.
+#    12/01/2014       #3875        randerso       Set time zone on underlying datetime object to GMT
+# 
+#
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+from datetime import datetime, timedelta
+import calendar
+from java.util import Date 
+import JUtil
+
+import dateutil.tz
+GMT_ZONE = dateutil.tz.gettz('GMT')
+
+
+class AbsTime(datetime, JUtil.JavaWrapperClass):
+    
+    def __new__(self, arg):
+       if arg is None:
+           return None;
+       
+       if isinstance(arg, int) or isinstance(arg, int) or isinstance(arg, float):            
+           tmp = datetime.utcfromtimestamp(arg)
+       else:
+           tmp = datetime.utcfromtimestamp(arg.getTime() / 1000)
+           
+       return datetime.__new__(self, tmp.year,tmp.month,
+              tmp.day, tmp.hour, tmp.minute, tmp.second, tzinfo=GMT_ZONE)
+       
+    def unixTime(self):
+        tuple = self.utctimetuple()        
+        return calendar.timegm(tuple)
+    
+    def _fromDateTime(self, dt):
+        return AbsTime(calendar.timegm(dt.utctimetuple()))
+    
+    def __add__(self, arg):        
+        if isinstance(arg, int) or isinstance(arg, int) or isinstance(arg, float):
+            offset = arg         
+        key = {"seconds": offset}
+        dt = datetime.__add__(self, timedelta(**key))
+        return self._fromDateTime(dt)
+        
+    def __sub__(self, arg):
+        if isinstance(arg, AbsTime):
+            return self.unixTime() - arg.unixTime()
+        elif isinstance(arg, int) or isinstance(arg, int) or isinstance(arg, float):
+            offset = arg 
+        key = {"seconds": offset}
+        dt = datetime.__sub__(self, timedelta(**key))
+        return self._fromDateTime(dt)
+        
+    def javaDate(self):
+        if not hasattr(self, '__javaDate'):
+            sec = calendar.timegm(self.utctimetuple())
+            self.__javaDate = Date(sec*1000)
+        return self.__javaDate
+    
+    def toJavaObj(self):
+        return javaDate()
+
+    def stringFmt(self, fmt):
+        return self.strftime(fmt)
+    
+    def string(self):
+        return self.stringFmt("%h %d %y %T GMT")
+
+def absTimeYMD(year, month, day, hour=0, minute=0, second=0):
+    tm = datetime(year, month, day, hour, minute, second)
+    tup = tm.utctimetuple()
+    sec = calendar.timegm(tup)
+    return AbsTime(sec)
+
+def current():
+    from com.raytheon.uf.common.time import SimulatedTime
+    return AbsTime(SimulatedTime.getSystemTime().getTime())
+
+def maxFutureTime():
+    from com.raytheon.uf.common.time import TimeRange as JavaTimeRange
+    tr = JavaTimeRange.allTimes()
+    return AbsTime(tr.getEnd())
+    
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/BaseTool.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/BaseTool.py
index c563a4f1f1..502bbb7707 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/BaseTool.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/BaseTool.py
@@ -1,504 +1,512 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-########################################################################
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-#    BaseTool -- library of methods of useful functions, originally ripped out
-#        of SmartScript since they're not specific to GFE and smart tools
-#
-# Author: njensen
-# ----------------------------------------------------------------------------
-########################################################################
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import math
-
-import UnitConvertor
-
-class BaseTool(UnitConvertor.UnitConvertor):
-    
-    def __init__(self):    
-        UnitConvertor.UnitConvertor.__init__(self)        
-        
-        
-    def interpolateValues(self, height, (h1, v1), (h2, v2)):
-        # Interpolate between the height and values
-
-        # Determine height ratio
-        heightRatio = (height - h1)/ (h2 - h1)
-
-        # Check for vector or scalar values
-        if type(v1) == types.TupleType and type(v2) == types.TupleType:
-            heightRatio2 = (h2 - height) / (h2 - h1)
-            s1, d1 = v1
-            s2, d2 = v2
-            uv1 = self.MagDirToUV(s1, d1)
-            uv2 = self.MagDirToUV(s2, d2)
-            u = heightRatio * uv1[0] + heightRatio2 * uv2[0]
-            v = heightRatio * uv1[1] + heightRatio2 * uv2[1]
-            result = self.UVToMagDir(u,v)
-            return result
-
-        else:
-            diffV = v2 - v1
-            return v1 + heightRatio * diffV
-
-    def linear(self, xmin, xmax, ymin, ymax, we):
-        m = (ymax - ymin) / (xmax - xmin + .0000001)
-        b = ymin - m * xmin
-        return m * we + b
-
-    def extrapolate(self, height, (h1, v1),(h2, v2)):
-        # Extrapolate from the height and values
-        if type(v1) == types.TupleType:
-            # Vector -- Work with mag only
-            mag1,dir1 = v1
-            mag2,dir2 = v2
-            slope = (mag2-mag1)/(h2-h1)
-            mag = mag1 + slope * (h1-height)
-            return (mag,dir1)
-        else:
-            slope = (v2-v1)/(h2-h1)
-            return v1 + slope * (h1-height)
-
-    def interpolateScalarValues(self, height, (h1, v1), (h2, v2)):
-        # Interpolate between the height and values
-
-        # Determine height ratio
-        heightRatio = (height - h1)/ (h2 - h1)
-        diffV = v2 - v1
-        return v1 + heightRatio * diffV
-
-    def interpolateVectorValues(self, height, (h1, v1), (h2, v2)):
-        # Interpolate between the height and values
-
-        # Determine height ratio
-        heightRatio = (height - h1)/ (h2 - h1)
-
-        heightRatio2 = (h2 - height) / (h2 - h1)
-        s1, d1 = v1
-        s2, d2 = v2
-        uv1 = self.MagDirToUV(s1, d1)
-        uv2 = self.MagDirToUV(s2, d2)
-        u = heightRatio * uv1[0] + heightRatio2 * uv2[0]
-        v = heightRatio * uv1[1] + heightRatio2 * uv2[1]
-        result = self.UVToMagDir(u,v)
-        return result
-    
-    def getLevels(self, level1, level2, noDataError=1):
-        # Return a list of levels between and including level1 and level2
-        # Will do ascending or descending depending on order of arguments
-        #    levels = self.getLevels("MB900", "MB500") # descending
-        #    levels = self.getLevels("MB600", "MB1000") # ascending
-        levels = []
-        kinds = [("MB",50),("K", 5)]
-        levelKind = None
-        for kind, increment in kinds:
-            if kind in level1:
-                levelKind = kind
-                levelInc = increment
-        if levelKind is None or levelKind not in level2:
-            return errorReturn(
-                noDataError,
-                "SmartScript.getLevels:: Illegal kind of level."+\
-                "Must be MB or K.  Level1, Level2: "+level1+", "+level2)
-        l1 = level1.replace(levelKind, "")
-        l2 = level2.replace(levelKind, "")
-        try:
-            l1 = int(l1)
-            l2 = int(l2)
-        except:
-            return errorReturn(
-                noDataError,
-                "SmartScript.getLevels:: Illegal level."+\
-                "Level1, Level2: "+level1+", "+level2)
-        if l1 > l2:
-            levelInc = -levelInc
-        for i in xrange(l1, l2, levelInc):
-            levels.append(levelKind + str(i))
-        return levels
-    
-    def round(self, val, mode, increment):
-        if not (mode == "RoundUp" or mode == "RoundDown" or mode == "Nearest"):
-            raise TypeError("mode is invalid: " + `mode`)
-        # convert to float
-        value = float(val)
-        # check for the case where no work is needed.
-        if value % increment == 0:
-            return value
-
-        sign = abs(value) / value
-        delta = 0
-        if mode == "RoundUp" and sign > 0:
-            delta = sign * increment
-        elif mode == "RoundDown" and sign < 0:
-            delta = sign * increment
-
-        if mode == "RoundUp":
-            value = (int(value / increment) * increment) + delta
-        elif mode == "RoundDown":
-            value = (int(value / increment) * increment) + delta
-        elif mode == "Nearest":
-            value = int((value + (sign * increment / 2.0)) / increment) * increment
-        return float(value)    
-    
-    def fformat(self, value, roundVal):
-        # Return a string for the floating point value
-        # truncated to the resolution given by roundVal
-        if roundVal > 1.0:
-            return str(int(value))
-        else:
-            exp = abs(int(math.floor(math.log10(roundVal))))
-            formatString = "{:." + str(exp) + "f}"
-            return formatString.format(value)
-
-
-##    Taken from http://starship.python.net/crew/jhauser/NumAdd.py
-##    by Janko Hauser
-##    """
-##    Module with some additional routines for NumPy. Names partly taken
-##    from similar Matrix languages. Functions which nameclashes to
-##    standard Python builtin names have an `a' prepended (amin, amax).
-##
-##    Some of the functions can take any multidimensional array. Currently
-##    they are not save for given lists or other sequence types than arrays.
-##    """
-##    __version__ = '0.1.1'
-##    __email__ = 'jhauser@ifm.uni-kiel.de'
-##    __author__ = 'Janko Hauser'
-
-    def isinf(m):
-        """
-        Returns a condition array, which is true where m has an Inf value.
-        """
-        n = isnan(m)
-        return isnan(numpy.where(n,0,m)*0.)
-
-    def isnan(m):
-        """
-        Returns a condition array, which is true where m has a NaN value.
-        """
-        return numpy.not_equal(m,m)
-
-    def aindex(condition):
-        """
-        Show multidimensional indices where condition is true.
-        Indix-convention is c-style (fastest last).
-        """
-        lin_index = numpy.nonzero(numpy.ravel(condition))
-        sh = list(numpy.shape(condition))
-        sh.reverse()
-        new_index = numpy.zeros((len(lin_index), len(sh)))
-        mod = numpy.zeros(len(lin_index))
-        for j in numpy.arange(len(lin_index)):
-            count=len(sh)
-            for i in sh:
-                lin_index[j], mod[j] = divmod(lin_index[j], i)
-                count = count - 1
-                new_index[j, count] = mod[j]
-        return new_index
-
-    def DelAxis(m):
-        """
-        Removes all axis with length one
-        """
-        sh = m.shape
-        new_shape=[]
-        for axis_length in sh:
-            if axis_length > 1:
-                new_shape.append(axis_length)
-        return numpy.reshape(m,new_shape)
-
-    def around(m, signif=0):
-        """
-        Should round in the way Python builtin round does it. Presume
-        that this is the right way to do it.
-        """
-        m = numpy.asarray(m)
-        s = sign(m)
-        if signif:
-            m = numpy.absolute(m*10.**signif)
-        else:
-            m = numpy.absolute(m)
-        rem = m-m.astype(numpy.Int)
-        m = numpy.where(numpy.less(rem,0.5), numpy.floor(m), numpy.ceil(m))
-        # convert back
-        if signif:
-            m = m*s/(10.**signif)
-        else:
-            m = m*s
-        return m
-
-    def sign(m):
-        """
-        Gives an array with shape of m. Where array less than 0 a=-1,
-        where m greater null a=1, elsewhere a=0.
-        """
-        m = numpy.asarray(m)
-
-        if ((type(m) == type(1.4)) or (type(m) == type(1))):
-            return m-m-numpy.less(m,0)+numpy.greater(m,0)
-        else:
-            return numpy.zeros(numpy.shape(m))-numpy.less(m,0)+numpy.greater(m,0)
-
-    def diag(m, k=0):
-        """
-        Returns the diagonal of m with offset k.
-        """
-        v = numpy.asarray(m)
-        s = numpy.shape(v)
-        if len(s)==1:
-            n = s[0]+numpy.absolute(k)
-            if k > 0:
-                v = numpy.concatenate(v,numpy.zeros(k, v.typecode()))
-            elif k < 0:
-                v = numpy.concatenate(numpy.zeros(-k, v.typecode()),v)
-            return numpy.multiply(eye(n, k=k), v)
-        elif len(s)==2:
-            v = numpy.add.reduce(eye(s[0], s[1], k=k)*v)
-            if k > 0: return v[:-k]
-            elif k < 0: return v[-k:]
-            else: return v
-
-
-    def corrcoef(x, y=None):
-        """
-        The correlation coefficients of the two vectors x and y or for every
-        column of x.
-        """
-        # Handle the shape tests in cov()
-        c = cov(x, y)
-        d = diag(c)
-        return c/numpy.sqrt(numpy.multiply.outer(d,d))
-
-    def cov(x,y=None):
-        """
-        Covariance matrix of colums of x or the two vectors x,y, where
-        each vector represents one column.
-        """
-        if y:
-            x = numpy.transpose(numpy.array([x,y], x.typecode()))
-        mu = numpy.mean(x)
-        sum_cov = 0.0
-        for v in x:
-            sum_cov = sum_cov+numpy.multiply.outer(v,v)
-        return (sum_cov-len(x)*numpy.multiply.outer(mu,mu))/(len(x)-1)
-
-    def amax(m,axis=0):
-        """
-        Returns the maximum values of m along the axis axis.
-        If axis=None return the absolute maximum of m.
-        """
-        if axis == None:
-            return numpy.maximum.reduce(numpy.ravel(m))
-        else:
-            new_shape=list(m.shape)
-            del(new_shape[axis])
-            return numpy.reshape(numpy.maximum.reduce(m,axis),new_shape)
-
-    def amin(m,axis=0):
-        """
-        Returns the minimum values of m along the axis axis.
-        If axis=None return the absolute minimum of m.
-        """
-        if axis == None:
-            return numpy.minimum.reduce(numpy.ravel(m))
-        else:
-            new_shape=list(m.shape)
-            del(new_shape[axis])
-            return numpy.reshape(numpy.minimum.reduce(m,axis),new_shape)
-
-    def mean(m,axis=0):
-        """
-        Returns the mean of m along axis axis.
-        If axis=None return the overall mean of m.
-        """
-        if axis == None:
-            return numpy.add.reduce(numpy.ravel(m))/(numpy.multiply.reduce(m.shape)*1.)
-        else:
-            new_shape=list(m.shape)
-            del(new_shape[axis])
-            return numpy.reshape(numpy.add.reduce(m,axis)/(m.shape[axis]*1.),new_shape)
-
-    def var(m,axis=0):
-        """
-        Variance of m along axis axis.
-        If axis=None return the overall variance.
-        """
-        mu = mean(m,axis)
-        if axis == None:
-            return (numpy.add.reduce(numpy.power(numpy.ravel(m)-mu,2))) / (numpy.multiply.reduce(
-                m.shape)-1.)
-        else:
-            new_shape=list(m.shape)
-            del(new_shape[axis])
-            return numpy.reshape((numpy.add.reduce(numpy.power(m-mu,2),axis)) / (
-                m.shape[axis]-1.),new_shape)
-
-    def std(m,axis=0):
-        """
-        Standard deviation of m along axis axis.
-        If axis=None return the overall standard deviation.
-        """
-        return numpy.sqrt(var(m,axis))
-
-    def diff(m,axis=0):
-        """
-        Foward difference of m along axis axis.
-        """
-        if m.shape[axis] < 2:
-            raise 'Error, axis needs at least be of length 2'
-
-        l_sl=[slice(None,None,None)]*len(m.shape)
-        u_sl=l_sl[:]
-        l_sl[axis]=slice(1,None,1)
-        u_sl[axis]=slice(None,-1,1)
-
-        return m[l_sl]-m[u_sl]
-
-    def ndiff(m,n=1,axis=0):
-        """
-        N-th forward difference along axis axis.
-        """
-        if m.shape[axis] < 2:
-            raise 'Error, axis needs at least be of length 2'
-
-        l_sl=[slice(None,None,None)]*len(m.shape)
-        u_sl=l_sl[:]
-        l_sl[axis]=slice(1,None,1)
-        u_sl[axis]=slice(None,-1,1)
-
-        if n >= 1:
-            return ndiff(m[l_sl]-m[u_sl],n-1,axis)
-        else:
-            return m[l_sl]-m[u_sl]
-
-    def zonec(m,axis=0):
-        """
-        Reduce the field from the corners to the middle of the vertices
-        """
-        if m.shape[axis] < 2:
-            raise 'Error, axis needs at least be of length 2'
-        l_sl=[slice(None,None,None)]*len(m.shape)
-        u_sl=l_sl[:]
-        u_sl[axis]=slice(1,None,1)
-        l_sl[axis]=slice(None,-1,1)
-
-        return (m[l_sl]+m[u_sl])*0.5
-
-    def gradient(var, x, axis=0):
-        """
-        Calculate the partial derivative of var in x. var and x are
-        of the same shape.
-        """
-        return diff(var, axis=axis) / diff(x, axis=axis)
-
-    def maverage(x, width):
-        """maverage(x, width)
-
-        Creates running mean of width over vector x. x needs to be
-        of 1d.
-
-        e.g width = 5
-
-        x =    1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
-
-        1       5 6 7 8 9    11 12 13 14 15
-        1 2 3     6 7 8 9 10       13 14 15
-        1 2 3 4 5   7 8 9 10 11          15
-        2 3 4 5 5   8 9 10 11 12
-        3 4 5 6 7   9 10 11 12 13
-        4 5 6 7 8   10 11 12 13 14
-        """
-        # test for right shape
-        x = numpy.asarray(x)
-        if len(x.shape) > 1:
-            raise 'Error in rmean, input array x needs to be 1d'
-
-        # construct a matrix where the elements of x are repeated with
-        # a shift of one. Can this be used in general?
-        xbar = numpy.zeros(x.shape,'d')
-        w = int(numpy.floor(width/2.))
-        lx = len(x)
-        l = lx - width+1
-        A = numpy.indices((l,width))[1]+1
-        Y = numpy.indices((l,width))[0]
-        B = A+Y
-        U = numpy.reshape(numpy.take(x,numpy.ravel(numpy.transpose(B))-1), (width,l))
-        xbar[w:lx-w] = mean(U)
-
-        # do the start and end points (width/2) by hand
-        for i in numpy.arange(w)+1:
-            xbar[i-1] = mean(x[:i*2-1])[0]
-
-        for i in numpy.arange(w)+1:
-            xbar[lx-i] = mean(x[lx - (i*2) + 1:])[0]
-
-        return xbar,U
-
-    def  raverage(x, width, offset=0):
-        """raverage(x, width, offset=0)
-
-           average creates a centered mean over width data points
-
-           e.g. width = 5
-
-           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
-                   --------- ---------- --------------
-           xbar  =     3         8            13
-
-           e.g. width = 4
-
-           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
-                   ------- ------- ----------
-           xbar  =   2.5      6.5     10.5
-
-           e.g. width = 4, offset = 1
-
-           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
-                     ------- ------- -----------
-           xbar  =     3.5      7.5     11.5
-
-        """
-        if offset:
-            x = take(x,(numpy.arange(offset, len(x))))
-
-        width = width*1.
-        l  = len(x)-width
-        ll = numpy.floor(len(x)/width);
-        i  = numpy.arange(width)
-        A  = numpy.ones((ll,width))*i[numpy.NewAxis,:]
-        j  = numpy.arange(0,width,l)
-        Y  = numpy.transpose(numpy.ones((width,1))*j[numpy.NewAxis,:])
-        B  = A + Y
-        U  = numpy.reshape(numpy.take(x,numpy.ravel(numpy.transpose(B)).astype(numpy.Int)),(width,ll))
-        xbar = mean(U)
-
-        return xbar
-
-    
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+########################################################################
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+#    BaseTool -- library of methods of useful functions, originally ripped out
+#        of SmartScript since they're not specific to GFE and smart tools
+#
+# Author: njensen
+# ----------------------------------------------------------------------------
+########################################################################
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+import math
+
+import UnitConvertor
+
+class BaseTool(UnitConvertor.UnitConvertor):
+    
+    def __init__(self):    
+        UnitConvertor.UnitConvertor.__init__(self)        
+        
+        
+    def interpolateValues(self, height, xxx_todo_changeme, xxx_todo_changeme1):
+        # Interpolate between the height and values
+
+        # Determine height ratio
+        (h1, v1) = xxx_todo_changeme
+        (h2, v2) = xxx_todo_changeme1
+        heightRatio = (height - h1)/ (h2 - h1)
+
+        # Check for vector or scalar values
+        if type(v1) == tuple and type(v2) == tuple:
+            heightRatio2 = (h2 - height) / (h2 - h1)
+            s1, d1 = v1
+            s2, d2 = v2
+            uv1 = self.MagDirToUV(s1, d1)
+            uv2 = self.MagDirToUV(s2, d2)
+            u = heightRatio * uv1[0] + heightRatio2 * uv2[0]
+            v = heightRatio * uv1[1] + heightRatio2 * uv2[1]
+            result = self.UVToMagDir(u,v)
+            return result
+
+        else:
+            diffV = v2 - v1
+            return v1 + heightRatio * diffV
+
+    def linear(self, xmin, xmax, ymin, ymax, we):
+        m = (ymax - ymin) / (xmax - xmin + .0000001)
+        b = ymin - m * xmin
+        return m * we + b
+
+    def extrapolate(self, height, xxx_todo_changeme2, xxx_todo_changeme3):
+        # Extrapolate from the height and values
+        (h1, v1) = xxx_todo_changeme2
+        (h2, v2) = xxx_todo_changeme3
+        if type(v1) == tuple:
+            # Vector -- Work with mag only
+            mag1,dir1 = v1
+            mag2,dir2 = v2
+            slope = (mag2-mag1)/(h2-h1)
+            mag = mag1 + slope * (h1-height)
+            return (mag,dir1)
+        else:
+            slope = (v2-v1)/(h2-h1)
+            return v1 + slope * (h1-height)
+
+    def interpolateScalarValues(self, height, xxx_todo_changeme4, xxx_todo_changeme5):
+        # Interpolate between the height and values
+
+        # Determine height ratio
+        (h1, v1) = xxx_todo_changeme4
+        (h2, v2) = xxx_todo_changeme5
+        heightRatio = (height - h1)/ (h2 - h1)
+        diffV = v2 - v1
+        return v1 + heightRatio * diffV
+
+    def interpolateVectorValues(self, height, xxx_todo_changeme6, xxx_todo_changeme7):
+        # Interpolate between the height and values
+
+        # Determine height ratio
+        (h1, v1) = xxx_todo_changeme6
+        (h2, v2) = xxx_todo_changeme7
+        heightRatio = (height - h1)/ (h2 - h1)
+
+        heightRatio2 = (h2 - height) / (h2 - h1)
+        s1, d1 = v1
+        s2, d2 = v2
+        uv1 = self.MagDirToUV(s1, d1)
+        uv2 = self.MagDirToUV(s2, d2)
+        u = heightRatio * uv1[0] + heightRatio2 * uv2[0]
+        v = heightRatio * uv1[1] + heightRatio2 * uv2[1]
+        result = self.UVToMagDir(u,v)
+        return result
+    
+    def getLevels(self, level1, level2, noDataError=1):
+        # Return a list of levels between and including level1 and level2
+        # Will do ascending or descending depending on order of arguments
+        #    levels = self.getLevels("MB900", "MB500") # descending
+        #    levels = self.getLevels("MB600", "MB1000") # ascending
+        levels = []
+        kinds = [("MB",50),("K", 5)]
+        levelKind = None
+        for kind, increment in kinds:
+            if kind in level1:
+                levelKind = kind
+                levelInc = increment
+        if levelKind is None or levelKind not in level2:
+            return errorReturn(
+                noDataError,
+                "SmartScript.getLevels:: Illegal kind of level."+\
+                "Must be MB or K.  Level1, Level2: "+level1+", "+level2)
+        l1 = level1.replace(levelKind, "")
+        l2 = level2.replace(levelKind, "")
+        try:
+            l1 = int(l1)
+            l2 = int(l2)
+        except:
+            return errorReturn(
+                noDataError,
+                "SmartScript.getLevels:: Illegal level."+\
+                "Level1, Level2: "+level1+", "+level2)
+        if l1 > l2:
+            levelInc = -levelInc
+        for i in range(l1, l2, levelInc):
+            levels.append(levelKind + str(i))
+        return levels
+    
+    def round(self, val, mode, increment):
+        if not (mode == "RoundUp" or mode == "RoundDown" or mode == "Nearest"):
+            raise TypeError("mode is invalid: " + repr(mode))
+        # convert to float
+        value = float(val)
+        # check for the case where no work is needed.
+        if value % increment == 0:
+            return value
+
+        sign = abs(value) / value
+        delta = 0
+        if mode == "RoundUp" and sign > 0:
+            delta = sign * increment
+        elif mode == "RoundDown" and sign < 0:
+            delta = sign * increment
+
+        if mode == "RoundUp":
+            value = (int(value / increment) * increment) + delta
+        elif mode == "RoundDown":
+            value = (int(value / increment) * increment) + delta
+        elif mode == "Nearest":
+            value = int((value + (sign * increment / 2.0)) / increment) * increment
+        return float(value)    
+    
+    def fformat(self, value, roundVal):
+        # Return a string for the floating point value
+        # truncated to the resolution given by roundVal
+        if roundVal > 1.0:
+            return str(int(value))
+        else:
+            exp = abs(int(math.floor(math.log10(roundVal))))
+            formatString = "{:." + str(exp) + "f}"
+            return formatString.format(value)
+
+
+##    Taken from http://starship.python.net/crew/jhauser/NumAdd.py
+##    by Janko Hauser
+##    """
+##    Module with some additional routines for NumPy. Names partly taken
+##    from similar Matrix languages. Functions which nameclashes to
+##    standard Python builtin names have an `a' prepended (amin, amax).
+##
+##    Some of the functions can take any multidimensional array. Currently
+##    they are not save for given lists or other sequence types than arrays.
+##    """
+##    __version__ = '0.1.1'
+##    __email__ = 'jhauser@ifm.uni-kiel.de'
+##    __author__ = 'Janko Hauser'
+
+    def isinf(m):
+        """
+        Returns a condition array, which is true where m has an Inf value.
+        """
+        n = isnan(m)
+        return isnan(numpy.where(n,0,m)*0.)
+
+    def isnan(m):
+        """
+        Returns a condition array, which is true where m has a NaN value.
+        """
+        return numpy.not_equal(m,m)
+
+    def aindex(condition):
+        """
+        Show multidimensional indices where condition is true.
+        Indix-convention is c-style (fastest last).
+        """
+        lin_index = numpy.nonzero(numpy.ravel(condition))
+        sh = list(numpy.shape(condition))
+        sh.reverse()
+        new_index = numpy.zeros((len(lin_index), len(sh)))
+        mod = numpy.zeros(len(lin_index))
+        for j in numpy.arange(len(lin_index)):
+            count=len(sh)
+            for i in sh:
+                lin_index[j], mod[j] = divmod(lin_index[j], i)
+                count = count - 1
+                new_index[j, count] = mod[j]
+        return new_index
+
+    def DelAxis(m):
+        """
+        Removes all axis with length one
+        """
+        sh = m.shape
+        new_shape=[]
+        for axis_length in sh:
+            if axis_length > 1:
+                new_shape.append(axis_length)
+        return numpy.reshape(m,new_shape)
+
+    def around(m, signif=0):
+        """
+        Should round in the way Python builtin round does it. Presume
+        that this is the right way to do it.
+        """
+        m = numpy.asarray(m)
+        s = sign(m)
+        if signif:
+            m = numpy.absolute(m*10.**signif)
+        else:
+            m = numpy.absolute(m)
+        rem = m-m.astype(numpy.Int)
+        m = numpy.where(numpy.less(rem,0.5), numpy.floor(m), numpy.ceil(m))
+        # convert back
+        if signif:
+            m = m*s/(10.**signif)
+        else:
+            m = m*s
+        return m
+
+    def sign(m):
+        """
+        Gives an array with shape of m. Where array less than 0 a=-1,
+        where m greater null a=1, elsewhere a=0.
+        """
+        m = numpy.asarray(m)
+
+        if ((type(m) == type(1.4)) or (type(m) == type(1))):
+            return m-m-numpy.less(m,0)+numpy.greater(m,0)
+        else:
+            return numpy.zeros(numpy.shape(m))-numpy.less(m,0)+numpy.greater(m,0)
+
+    def diag(m, k=0):
+        """
+        Returns the diagonal of m with offset k.
+        """
+        v = numpy.asarray(m)
+        s = numpy.shape(v)
+        if len(s)==1:
+            n = s[0]+numpy.absolute(k)
+            if k > 0:
+                v = numpy.concatenate(v,numpy.zeros(k, v.typecode()))
+            elif k < 0:
+                v = numpy.concatenate(numpy.zeros(-k, v.typecode()),v)
+            return numpy.multiply(eye(n, k=k), v)
+        elif len(s)==2:
+            v = numpy.add.reduce(eye(s[0], s[1], k=k)*v)
+            if k > 0: return v[:-k]
+            elif k < 0: return v[-k:]
+            else: return v
+
+
+    def corrcoef(x, y=None):
+        """
+        The correlation coefficients of the two vectors x and y or for every
+        column of x.
+        """
+        # Handle the shape tests in cov()
+        c = cov(x, y)
+        d = diag(c)
+        return c/numpy.sqrt(numpy.multiply.outer(d,d))
+
+    def cov(x,y=None):
+        """
+        Covariance matrix of colums of x or the two vectors x,y, where
+        each vector represents one column.
+        """
+        if y:
+            x = numpy.transpose(numpy.array([x,y], x.typecode()))
+        mu = numpy.mean(x)
+        sum_cov = 0.0
+        for v in x:
+            sum_cov = sum_cov+numpy.multiply.outer(v,v)
+        return (sum_cov-len(x)*numpy.multiply.outer(mu,mu))/(len(x)-1)
+
+    def amax(m,axis=0):
+        """
+        Returns the maximum values of m along the axis axis.
+        If axis=None return the absolute maximum of m.
+        """
+        if axis == None:
+            return numpy.maximum.reduce(numpy.ravel(m))
+        else:
+            new_shape=list(m.shape)
+            del(new_shape[axis])
+            return numpy.reshape(numpy.maximum.reduce(m,axis),new_shape)
+
+    def amin(m,axis=0):
+        """
+        Returns the minimum values of m along the axis axis.
+        If axis=None return the absolute minimum of m.
+        """
+        if axis == None:
+            return numpy.minimum.reduce(numpy.ravel(m))
+        else:
+            new_shape=list(m.shape)
+            del(new_shape[axis])
+            return numpy.reshape(numpy.minimum.reduce(m,axis),new_shape)
+
+    def mean(m,axis=0):
+        """
+        Returns the mean of m along axis axis.
+        If axis=None return the overall mean of m.
+        """
+        if axis == None:
+            return numpy.add.reduce(numpy.ravel(m))/(numpy.multiply.reduce(m.shape)*1.)
+        else:
+            new_shape=list(m.shape)
+            del(new_shape[axis])
+            return numpy.reshape(numpy.add.reduce(m,axis)/(m.shape[axis]*1.),new_shape)
+
+    def var(m,axis=0):
+        """
+        Variance of m along axis axis.
+        If axis=None return the overall variance.
+        """
+        mu = mean(m,axis)
+        if axis == None:
+            return (numpy.add.reduce(numpy.power(numpy.ravel(m)-mu,2))) / (numpy.multiply.reduce(
+                m.shape)-1.)
+        else:
+            new_shape=list(m.shape)
+            del(new_shape[axis])
+            return numpy.reshape((numpy.add.reduce(numpy.power(m-mu,2),axis)) / (
+                m.shape[axis]-1.),new_shape)
+
+    def std(m,axis=0):
+        """
+        Standard deviation of m along axis axis.
+        If axis=None return the overall standard deviation.
+        """
+        return numpy.sqrt(var(m,axis))
+
+    def diff(m,axis=0):
+        """
+        Foward difference of m along axis axis.
+        """
+        if m.shape[axis] < 2:
+            raise 'Error, axis needs at least be of length 2'
+
+        l_sl=[slice(None,None,None)]*len(m.shape)
+        u_sl=l_sl[:]
+        l_sl[axis]=slice(1,None,1)
+        u_sl[axis]=slice(None,-1,1)
+
+        return m[l_sl]-m[u_sl]
+
+    def ndiff(m,n=1,axis=0):
+        """
+        N-th forward difference along axis axis.
+        """
+        if m.shape[axis] < 2:
+            raise 'Error, axis needs at least be of length 2'
+
+        l_sl=[slice(None,None,None)]*len(m.shape)
+        u_sl=l_sl[:]
+        l_sl[axis]=slice(1,None,1)
+        u_sl[axis]=slice(None,-1,1)
+
+        if n >= 1:
+            return ndiff(m[l_sl]-m[u_sl],n-1,axis)
+        else:
+            return m[l_sl]-m[u_sl]
+
+    def zonec(m,axis=0):
+        """
+        Reduce the field from the corners to the middle of the vertices
+        """
+        if m.shape[axis] < 2:
+            raise 'Error, axis needs at least be of length 2'
+        l_sl=[slice(None,None,None)]*len(m.shape)
+        u_sl=l_sl[:]
+        u_sl[axis]=slice(1,None,1)
+        l_sl[axis]=slice(None,-1,1)
+
+        return (m[l_sl]+m[u_sl])*0.5
+
+    def gradient(var, x, axis=0):
+        """
+        Calculate the partial derivative of var in x. var and x are
+        of the same shape.
+        """
+        return diff(var, axis=axis) / diff(x, axis=axis)
+
+    def maverage(x, width):
+        """maverage(x, width)
+
+        Creates running mean of width over vector x. x needs to be
+        of 1d.
+
+        e.g width = 5
+
+        x =    1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+
+        1       5 6 7 8 9    11 12 13 14 15
+        1 2 3     6 7 8 9 10       13 14 15
+        1 2 3 4 5   7 8 9 10 11          15
+        2 3 4 5 5   8 9 10 11 12
+        3 4 5 6 7   9 10 11 12 13
+        4 5 6 7 8   10 11 12 13 14
+        """
+        # test for right shape
+        x = numpy.asarray(x)
+        if len(x.shape) > 1:
+            raise 'Error in rmean, input array x needs to be 1d'
+
+        # construct a matrix where the elements of x are repeated with
+        # a shift of one. Can this be used in general?
+        xbar = numpy.zeros(x.shape,'d')
+        w = int(numpy.floor(width/2.))
+        lx = len(x)
+        l = lx - width+1
+        A = numpy.indices((l,width))[1]+1
+        Y = numpy.indices((l,width))[0]
+        B = A+Y
+        U = numpy.reshape(numpy.take(x,numpy.ravel(numpy.transpose(B))-1), (width,l))
+        xbar[w:lx-w] = mean(U)
+
+        # do the start and end points (width/2) by hand
+        for i in numpy.arange(w)+1:
+            xbar[i-1] = mean(x[:i*2-1])[0]
+
+        for i in numpy.arange(w)+1:
+            xbar[lx-i] = mean(x[lx - (i*2) + 1:])[0]
+
+        return xbar,U
+
+    def  raverage(x, width, offset=0):
+        """raverage(x, width, offset=0)
+
+           average creates a centered mean over width data points
+
+           e.g. width = 5
+
+           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+                   --------- ---------- --------------
+           xbar  =     3         8            13
+
+           e.g. width = 4
+
+           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+                   ------- ------- ----------
+           xbar  =   2.5      6.5     10.5
+
+           e.g. width = 4, offset = 1
+
+           x     = 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
+                     ------- ------- -----------
+           xbar  =     3.5      7.5     11.5
+
+        """
+        if offset:
+            x = take(x,(numpy.arange(offset, len(x))))
+
+        width = width*1.
+        l  = len(x)-width
+        ll = numpy.floor(len(x)/width);
+        i  = numpy.arange(width)
+        A  = numpy.ones((ll,width))*i[numpy.NewAxis,:]
+        j  = numpy.arange(0,width,l)
+        Y  = numpy.transpose(numpy.ones((width,1))*j[numpy.NewAxis,:])
+        B  = A + Y
+        U  = numpy.reshape(numpy.take(x,numpy.ravel(numpy.transpose(B)).astype(numpy.Int)),(width,ll))
+        xbar = mean(U)
+
+        return xbar
+
+    
         
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/DefaultEditAreaNaming.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/DefaultEditAreaNaming.py
index ad197e6593..674e628e85 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/DefaultEditAreaNaming.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/DefaultEditAreaNaming.py
@@ -1,71 +1,71 @@
-#!/usr/bin/env python
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-import string
-
-# DefaultEditAreaNaming
-# this function defines the default edit area naming convention
-# for use in Maps.py/LocalMaps.py and the MapManager
-
-# could be a string, which is the edit area name (attribute)
-# could be a list, which is the edit area name set of attributes
-# "ZONE"
-
-def defaultEditAreaNaming(info, eanDefinition):
-    # simple case, the edit area name definition is the attribute key
-    if type(eanDefinition) == str:
-        if info.has_key(eanDefinition):
-            return info[eanDefinition]
-        else:
-            return eanDefinition
-
-    elif type(eanDefinition) == list:
-        s = ''
-        for e in eanDefinition:
-            # valid attribute
-            if info.has_key(e):
-                if len(s) == 0:
-                    s = info[e]
-                else:
-                    s = s + "_" + info[e]
-            # not valid attribute, so use definition directly
-            else:
-                if len(s) == 0:
-                    s = e
-                else:
-                    s = s + "_" + e
-
-        return s
-
-
-    else:
-        return ''
-
-
-def getEditAreaName(info, nameAttr):
-    if callable(nameAttr):
-        return nameAttr(info)
-    return defaultEditAreaNaming(info, nameAttr)
+#!/usr/bin/env python

+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+import string

+

+# DefaultEditAreaNaming

+# this function defines the default edit area naming convention

+# for use in Maps.py/LocalMaps.py and the MapManager

+

+# could be a string, which is the edit area name (attribute)

+# could be a list, which is the edit area name set of attributes

+# "ZONE"

+

+def defaultEditAreaNaming(info, eanDefinition):

+    # simple case, the edit area name definition is the attribute key

+    if type(eanDefinition) == str:

+        if eanDefinition in info:

+            return info[eanDefinition]

+        else:

+            return eanDefinition

+

+    elif type(eanDefinition) == list:

+        s = ''

+        for e in eanDefinition:

+            # valid attribute

+            if e in info:

+                if len(s) == 0:

+                    s = info[e]

+                else:

+                    s = s + "_" + info[e]

+            # not valid attribute, so use definition directly

+            else:

+                if len(s) == 0:

+                    s = e

+                else:

+                    s = s + "_" + e

+

+        return s

+

+

+    else:

+        return ''

+

+

+def getEditAreaName(info, nameAttr):

+    if callable(nameAttr):

+        return nameAttr(info)

+    return defaultEditAreaNaming(info, nameAttr)

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/JSmartUtils.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/JSmartUtils.py
index 5c537214c9..2dace3f381 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/JSmartUtils.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/JSmartUtils.py
@@ -1,89 +1,89 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-# Provides Java implementations of common smart utility functions
-# to boost performance.
-#
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    01/14/2013       #1497        njensen        Initial Creation.
-#    10/12/2015       #4967        randerso       Updated for new JEP API
-#    08/02/2016       #5792        dgilling       Remove unnecessary call to getNDArray.
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import jep
-from com.raytheon.uf.common.dataplugin.gfe.util import SmartUtils as JavaSmartUtils
-import numpy
-
-
-def __getMaskIndiciesForJava(mask):
-    flatMask = mask.flat              #flatten the array
-    flatIndicies = numpy.nonzero(flatMask)  # get the indicies of the set cells
-    ysize = mask.shape[1]
-    indexes = []
-    # convert the flat incicies to the x, y indicies
-    for i in flatIndicies:
-        indexes.append((i / ysize, i % ysize))
-
-    #  Make two new jarrays to hold the final coordinate tuples
-    size = len(indexes[0][0])
-    xcoords = jep.jarray(size, jep.JINT_ID)
-    ycoords = jep.jarray(size, jep.JINT_ID)    
-
-    #===================================================================
-    #  Convert the coordinates from a tuple of numpy arrays to a list of
-    #  coordinate tuples
-
-    for index in xrange(size):
-        try:
-            x = indexes[0][0][index]
-            y = indexes[0][1][index]
-            xcoords[index] = int(x)
-            ycoords[index] = int(y)
-        except Exception, e:
-            print e                
-
-    return xcoords, ycoords
-    
-    
-# Originally added for use by BOX SmartInitUtils.SIU_fillEditArea() to speed up their smartInits
-# Should be used by other smartInits that need similar functionality
-def fillEditArea(grid, fillMask, borderMask):    
-    editPointsX, editPointsY  = __getMaskIndiciesForJava(fillMask)
-    borderPointsX, borderPointsY = __getMaskIndiciesForJava(borderMask)
-        
-    gridObj = JavaSmartUtils.fillEditArea(grid, grid.shape[1], grid.shape[0], \
-                                              editPointsY, editPointsX, borderPointsY, borderPointsX)  
-                          
-    retObj = gridObj
-    return retObj
-
-
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+#

+# Provides Java implementations of common smart utility functions

+# to boost performance.

+#

+#    

+#     SOFTWARE HISTORY

+#    

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    01/14/2013       #1497        njensen        Initial Creation.

+#    10/12/2015       #4967        randerso       Updated for new JEP API

+#    08/02/2016       #5792        dgilling       Remove unnecessary call to getNDArray.

+# 

+#

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+

+import jep

+from com.raytheon.uf.common.dataplugin.gfe.util import SmartUtils as JavaSmartUtils

+import numpy

+

+

+def __getMaskIndiciesForJava(mask):

+    flatMask = mask.flat              #flatten the array

+    flatIndicies = numpy.nonzero(flatMask)  # get the indicies of the set cells

+    ysize = mask.shape[1]

+    indexes = []

+    # convert the flat incicies to the x, y indicies

+    for i in flatIndicies:

+        indexes.append((i / ysize, i % ysize))

+

+    #  Make two new jarrays to hold the final coordinate tuples

+    size = len(indexes[0][0])

+    xcoords = jep.jarray(size, jep.JINT_ID)

+    ycoords = jep.jarray(size, jep.JINT_ID)    

+

+    #===================================================================

+    #  Convert the coordinates from a tuple of numpy arrays to a list of

+    #  coordinate tuples

+

+    for index in range(size):

+        try:

+            x = indexes[0][0][index]

+            y = indexes[0][1][index]

+            xcoords[index] = int(x)

+            ycoords[index] = int(y)

+        except Exception as e:

+            print(e)                

+

+    return xcoords, ycoords

+    

+    

+# Originally added for use by BOX SmartInitUtils.SIU_fillEditArea() to speed up their smartInits

+# Should be used by other smartInits that need similar functionality

+def fillEditArea(grid, fillMask, borderMask):    

+    editPointsX, editPointsY  = __getMaskIndiciesForJava(fillMask)

+    borderPointsX, borderPointsY = __getMaskIndiciesForJava(borderMask)

+        

+    gridObj = JavaSmartUtils.fillEditArea(grid, grid.shape[1], grid.shape[0], \

+                                              editPointsY, editPointsX, borderPointsY, borderPointsX)  

+                          

+    retObj = gridObj

+    return retObj

+

+

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/MetLib.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/MetLib.py
index 0c2d04a377..c73456064f 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/MetLib.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/MetLib.py
@@ -1,248 +1,248 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-###  MetLib - a library of meteorological methods for GFE
-###  
-###  This module contains a variety of python methods intended
-###  for making meteorological calculations on gridded data.
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-from numpy import *
-import copy
-
-# CenteredDifference - This method performs a centered difference
-# of the specificd grid.  Edges are calculated using a forward or
-# backward difference so that the grid that is returned is the same
-# size as the input grids.  In general this is a low-level method
-# intended to be used by the derivative methods d_dx, d_dy, d_dz and
-# d_dt.
-#
-# Note that this method returns the simple difference along one axis.
-# Scaling to the correct grid size is the responsibility of the caller.
-def centeredDifference(grid, axis):
-    ## Make sure we have enough dimensions as the axis
-    if axis >= 0 and axis >= len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-    elif axis < 0 and abs(axis) > len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-    # Make a slice list of the appropriate length
-    sliceList= []
-    for s in grid.shape:
-        sliceList.append(slice(None, None, None))
-
-    # Define the slices at the specified axis.  Terms labelled with
-    # "1" refer to the middle of the grid, terms with 2 the first edge
-    # of the grid and terms using "3' the last edge of the grid
-    t1 = copy.copy(sliceList)
-    t1[axis] = slice(1, -1, None)
-    a1 = copy.copy(sliceList)
-    a1[axis] = slice(2, None, None)
-    b1 = copy.copy(sliceList)
-    b1[axis] = slice(0, -2, None)
-    t2 = copy.copy(sliceList)
-    t2[axis] = slice(0, 1, None)
-    a2 = copy.copy(sliceList)
-    a2[axis] = slice(1, 2, None)
-    b2 = copy.copy(sliceList)
-    b2[axis] = t2[axis]
-    t3 = copy.copy(sliceList)
-    t3[axis] = slice(-1, None, None)
-    a3 = copy.copy(sliceList)
-    a3[axis] = t3[axis]
-    b3 = copy.copy(sliceList)
-    b3[axis] = slice(-2, -1, None)
-
-    diff = zeros(grid.shape, float64)
-    # Perform the centered difference
-    diff[t1] = (grid[a1] - grid[b1]) / 2.0  # middle
-    diff[t2] = grid[a2] - grid[b2]  # first edge
-    diff[t3] = grid[a3] - grid[b3]  # last edge
-    return diff
-
-# Returns the forward difference derivative
-def forwardDifference(grid, axis):
-    ## Make sure we have enough dimensions as the axis
-    if axis >= 0 and axis >= len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-    elif axis < 0 and abs(axis) > len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-
-    # make a list of "None' slices from which we will copy
-    sliceList= []
-    for s in grid.shape:
-        sliceList.append(slice(None, None, None))
-    
-    a = copy.copy(sliceList)   # forward cell
-    a[axis] = slice(1, None, None)
-    
-    b = copy.copy(sliceList)  # center cell
-    b[axis] = slice(0, -1, None)
-
-    t1 = copy.copy(sliceList)     # main grid target
-    t1[axis] = slice(0, -1, None)
-
-    t2 = copy.copy(sliceList)
-    t2[axis] = slice(-1, None, None)  # last edge
-
-    t3 = copy.copy(sliceList)   # second-to-last edge
-    t3[axis] = slice(-2, -1, None)
-    
-    diff = zeros(grid.shape, float64)
-
-    diff[t1] = grid[a] - grid[b]
-    diff[t2] = diff[t3]  # copy second-to-last into last edge
-
-    return diff
-
-# Returns a backward difference derivative
-def backwardDifference(grid, axis):
-    ## Make sure we have enough dimensions as the axis
-    if axis >= 0 and axis >= len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-    elif axis < 0 and abs(axis) > len(grid.shape):
-        print "Returning None: axis = ", axis, "grid.shape=", grid.shape
-        return None
-
-    # make a list of "None' slices from which we will copy
-    sliceList= []
-    for s in grid.shape:
-        sliceList.append(slice(None, None, None))
-    
-    a = copy.copy(sliceList)   # center cell
-    a[axis] = slice(1, None, None)
-    
-    b = copy.copy(sliceList)  # backward cell
-    b[axis] = slice(0, -1, None)
-
-    t1 = copy.copy(sliceList)     # main grid target
-    t1[axis] = slice(1, None, None)
-
-    t2 = copy.copy(sliceList)
-    t2[axis] = slice(0, 1, None)  # first edge
-
-    t3 = copy.copy(sliceList)   # second edge
-    t3[axis] = slice(1, 2, None)
-    
-    diff = zeros(grid.shape, float64)
-
-    diff[t1] = grid[a] - grid[b]
-    diff[t2] = diff[t3]  # copy second-to-last into last edge
-
-    return diff
-
-# Returns the derivative along the innermost axis.  By convention
-# this is the x-axis.
-def d_dx(grid):
-    return centeredDifference(grid, -1)
-
-# Returns the derivative along the second innermost axis.  By convention
-# this is the y-axis.
-def d_dy(grid):
-    return -centeredDifference(grid, -2)
-
-# Returns the derivative along the third innermost axis.  By convention
-# this is the z-axis.  If a 2-dimensional grid is specified, an error
-# will be returned from centeredDifference
-def d_dz(grid):
-    return centeredDifference(grid, -3)
-
-# Returns the derivative along the outermost axis.  By convention
-# this is the time-axis.  If a grid of less than 4 dimensions is
-# specified, the centered difference method will report an error.
-def d_dt(grid):
-    return centeredDifference(grid, 0)
-
-# Returns the dot product of the specified vectors.  Both vector grids
-# are assumed to be specified in u, v components.
-def dot(vectorGrid1, vectorGrid2):
-    return vectorGrid1[0] * vectorGrid2[0] + vectorGrid1[1] * vectorGrid2[1]
-
-# Returns the vector gradient of the specified scalar grid.
-def gradient(grid):
-    return (d_dx(grid), d_dy(grid))
-
-# Returns the divergence of the specified Wind grid.  Wind is assumed
-# to be a vector grid specified in u, v components.
-def divergence(Wind):
-    u, v = Wind
-    return d_dx(u) + d_dy(v)
-
-# Returns the vorticity of the specified Wind grid.  Wind is assumed
-# to be a vector grid specified in u, v components.
-def vorticity(Wind):
-    u, v = Wind
-    return d_dx(v) - d_dy(u)
-
-# Returns the advection of the scalarGrid by the windGrid.  The
-# windGrid is assumed to be a vector specified in u, v components.
-def advection(windGrid, scalarGrid):
-    u, v = windGrid
-    return -dot(windGrid, gradient(scalarGrid))
-
-
-### Utility methods
-
-# Utility methods that uses the specified gridLocation 
-# to generate and return a grid of latitude and a grid of longitude at
-# each grid point.  The gridLoc can be obtained with a call to self.getGridLoc()
-# in any SmartTool.
-def getLatLonGrids(gridLoc):
-    # Fetch the grids
-    latLonGrid = gridLoc.getLatLonGrid()
-    latLonGrid = reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F')
-    return latLonGrid[1], latLonGrid[0]
-
-# Returns a grid of gridSpacing or the distance from one grid cell to
-# another in meters.  This scalar representation of grid spacing works
-# well for conformal projections only.  Other projections should use a
-# vector grid spacing or a different grid for x grid spacing and y
-# grid spacing.
-def makeSpacingGrid(gridLoc):
-    DEG_TO_RAD = 0.017453292
-    latGrid, lonGrid = getLatLonGrids(gridLoc)
-
-    # x and y grid spacing must be calculated using the same direction
-    # for both.
-    deltaLon = centeredDifference(lonGrid, axis = -2)
-    deltaLat = d_dy(latGrid)
-
-    dxGrid = abs(cos(latGrid * DEG_TO_RAD) * deltaLon * 111111)
-    dyGrid = deltaLat * 111111 # meters per degree
-
-    # calc the total grid spacing using square root of the sum of the squares.
-    spacing = sqrt(dxGrid * dxGrid + dyGrid * dyGrid)
-
-    return spacing
-
-# Returns a grid of coriolis acceleration based purely on latitude.
-def makeCoriolisGrid(latGrid):
-    DEG_TO_RAD = 0.017453292
-    latGrid = latGrid * DEG_TO_RAD
-    f = 2.0 * 0.00007292 * sin(latGrid)
-    return f
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+###  MetLib - a library of meteorological methods for GFE

+###  

+###  This module contains a variety of python methods intended

+###  for making meteorological calculations on gridded data.

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+

+from numpy import *

+import copy

+

+# CenteredDifference - This method performs a centered difference

+# of the specificd grid.  Edges are calculated using a forward or

+# backward difference so that the grid that is returned is the same

+# size as the input grids.  In general this is a low-level method

+# intended to be used by the derivative methods d_dx, d_dy, d_dz and

+# d_dt.

+#

+# Note that this method returns the simple difference along one axis.

+# Scaling to the correct grid size is the responsibility of the caller.

+def centeredDifference(grid, axis):

+    ## Make sure we have enough dimensions as the axis

+    if axis >= 0 and axis >= len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+    elif axis < 0 and abs(axis) > len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+    # Make a slice list of the appropriate length

+    sliceList= []

+    for s in grid.shape:

+        sliceList.append(slice(None, None, None))

+

+    # Define the slices at the specified axis.  Terms labelled with

+    # "1" refer to the middle of the grid, terms with 2 the first edge

+    # of the grid and terms using "3' the last edge of the grid

+    t1 = copy.copy(sliceList)

+    t1[axis] = slice(1, -1, None)

+    a1 = copy.copy(sliceList)

+    a1[axis] = slice(2, None, None)

+    b1 = copy.copy(sliceList)

+    b1[axis] = slice(0, -2, None)

+    t2 = copy.copy(sliceList)

+    t2[axis] = slice(0, 1, None)

+    a2 = copy.copy(sliceList)

+    a2[axis] = slice(1, 2, None)

+    b2 = copy.copy(sliceList)

+    b2[axis] = t2[axis]

+    t3 = copy.copy(sliceList)

+    t3[axis] = slice(-1, None, None)

+    a3 = copy.copy(sliceList)

+    a3[axis] = t3[axis]

+    b3 = copy.copy(sliceList)

+    b3[axis] = slice(-2, -1, None)

+

+    diff = zeros(grid.shape, float64)

+    # Perform the centered difference

+    diff[t1] = (grid[a1] - grid[b1]) / 2.0  # middle

+    diff[t2] = grid[a2] - grid[b2]  # first edge

+    diff[t3] = grid[a3] - grid[b3]  # last edge

+    return diff

+

+# Returns the forward difference derivative

+def forwardDifference(grid, axis):

+    ## Make sure we have enough dimensions as the axis

+    if axis >= 0 and axis >= len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+    elif axis < 0 and abs(axis) > len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+

+    # make a list of "None' slices from which we will copy

+    sliceList= []

+    for s in grid.shape:

+        sliceList.append(slice(None, None, None))

+    

+    a = copy.copy(sliceList)   # forward cell

+    a[axis] = slice(1, None, None)

+    

+    b = copy.copy(sliceList)  # center cell

+    b[axis] = slice(0, -1, None)

+

+    t1 = copy.copy(sliceList)     # main grid target

+    t1[axis] = slice(0, -1, None)

+

+    t2 = copy.copy(sliceList)

+    t2[axis] = slice(-1, None, None)  # last edge

+

+    t3 = copy.copy(sliceList)   # second-to-last edge

+    t3[axis] = slice(-2, -1, None)

+    

+    diff = zeros(grid.shape, float64)

+

+    diff[t1] = grid[a] - grid[b]

+    diff[t2] = diff[t3]  # copy second-to-last into last edge

+

+    return diff

+

+# Returns a backward difference derivative

+def backwardDifference(grid, axis):

+    ## Make sure we have enough dimensions as the axis

+    if axis >= 0 and axis >= len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+    elif axis < 0 and abs(axis) > len(grid.shape):

+        print("Returning None: axis = ", axis, "grid.shape=", grid.shape)

+        return None

+

+    # make a list of "None' slices from which we will copy

+    sliceList= []

+    for s in grid.shape:

+        sliceList.append(slice(None, None, None))

+    

+    a = copy.copy(sliceList)   # center cell

+    a[axis] = slice(1, None, None)

+    

+    b = copy.copy(sliceList)  # backward cell

+    b[axis] = slice(0, -1, None)

+

+    t1 = copy.copy(sliceList)     # main grid target

+    t1[axis] = slice(1, None, None)

+

+    t2 = copy.copy(sliceList)

+    t2[axis] = slice(0, 1, None)  # first edge

+

+    t3 = copy.copy(sliceList)   # second edge

+    t3[axis] = slice(1, 2, None)

+    

+    diff = zeros(grid.shape, float64)

+

+    diff[t1] = grid[a] - grid[b]

+    diff[t2] = diff[t3]  # copy second-to-last into last edge

+

+    return diff

+

+# Returns the derivative along the innermost axis.  By convention

+# this is the x-axis.

+def d_dx(grid):

+    return centeredDifference(grid, -1)

+

+# Returns the derivative along the second innermost axis.  By convention

+# this is the y-axis.

+def d_dy(grid):

+    return -centeredDifference(grid, -2)

+

+# Returns the derivative along the third innermost axis.  By convention

+# this is the z-axis.  If a 2-dimensional grid is specified, an error

+# will be returned from centeredDifference

+def d_dz(grid):

+    return centeredDifference(grid, -3)

+

+# Returns the derivative along the outermost axis.  By convention

+# this is the time-axis.  If a grid of less than 4 dimensions is

+# specified, the centered difference method will report an error.

+def d_dt(grid):

+    return centeredDifference(grid, 0)

+

+# Returns the dot product of the specified vectors.  Both vector grids

+# are assumed to be specified in u, v components.

+def dot(vectorGrid1, vectorGrid2):

+    return vectorGrid1[0] * vectorGrid2[0] + vectorGrid1[1] * vectorGrid2[1]

+

+# Returns the vector gradient of the specified scalar grid.

+def gradient(grid):

+    return (d_dx(grid), d_dy(grid))

+

+# Returns the divergence of the specified Wind grid.  Wind is assumed

+# to be a vector grid specified in u, v components.

+def divergence(Wind):

+    u, v = Wind

+    return d_dx(u) + d_dy(v)

+

+# Returns the vorticity of the specified Wind grid.  Wind is assumed

+# to be a vector grid specified in u, v components.

+def vorticity(Wind):

+    u, v = Wind

+    return d_dx(v) - d_dy(u)

+

+# Returns the advection of the scalarGrid by the windGrid.  The

+# windGrid is assumed to be a vector specified in u, v components.

+def advection(windGrid, scalarGrid):

+    u, v = windGrid

+    return -dot(windGrid, gradient(scalarGrid))

+

+

+### Utility methods

+

+# Utility methods that uses the specified gridLocation 

+# to generate and return a grid of latitude and a grid of longitude at

+# each grid point.  The gridLoc can be obtained with a call to self.getGridLoc()

+# in any SmartTool.

+def getLatLonGrids(gridLoc):

+    # Fetch the grids

+    latLonGrid = gridLoc.getLatLonGrid()

+    latLonGrid = reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F')

+    return latLonGrid[1], latLonGrid[0]

+

+# Returns a grid of gridSpacing or the distance from one grid cell to

+# another in meters.  This scalar representation of grid spacing works

+# well for conformal projections only.  Other projections should use a

+# vector grid spacing or a different grid for x grid spacing and y

+# grid spacing.

+def makeSpacingGrid(gridLoc):

+    DEG_TO_RAD = 0.017453292

+    latGrid, lonGrid = getLatLonGrids(gridLoc)

+

+    # x and y grid spacing must be calculated using the same direction

+    # for both.

+    deltaLon = centeredDifference(lonGrid, axis = -2)

+    deltaLat = d_dy(latGrid)

+

+    dxGrid = abs(cos(latGrid * DEG_TO_RAD) * deltaLon * 111111)

+    dyGrid = deltaLat * 111111 # meters per degree

+

+    # calc the total grid spacing using square root of the sum of the squares.

+    spacing = sqrt(dxGrid * dxGrid + dyGrid * dyGrid)

+

+    return spacing

+

+# Returns a grid of coriolis acceleration based purely on latitude.

+def makeCoriolisGrid(latGrid):

+    DEG_TO_RAD = 0.017453292

+    latGrid = latGrid * DEG_TO_RAD

+    f = 2.0 * 0.00007292 * sin(latGrid)

+    return f

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ParmID.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ParmID.py
index 6134c769fd..4eb052e2d0 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ParmID.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ParmID.py
@@ -1,124 +1,124 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#
-# SOFTWARE HISTORY
-#
-# Date          Ticket#  Engineer  Description
-# ------------- -------- --------- ---------------------------------------------
-# Feb 06, 2017  5959     randerso  Removed Java .toString() calls 
-#
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID as JavaParmID
-import JUtil
-
-
-class ParmID(JUtil.JavaWrapperClass):
-    "Wrapper class for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID"
-    
-    def __init__(self, name=None, model=None, dbid=None, level=None, jParmId=None):
-        if jParmId is not None:
-            self.__pid = jParmId
-            
-        elif name is None:
-            if model is None and dbid is None and level is None:
-                self.__pid = JavaParmId()
-            else:
-                raise ValueError, '"name" must be given if any arguments are supplied.'
-        else:
-            if dbid is None and model is None and level is None:
-                # name is an ident string
-                self.__pid = JavaParmID(name)
-            elif dbid is None and model is None:
-                raise ValueError, '"level" cannot be specified without "dbid" or "model".' 
-            elif dbid is not None and model is not None:
-                raise ValueError, '"model" and "dbid" cannot both be specified.'
-            elif dbid is not None:
-                # assume it is a DatabaseID.DatabaseID
-                dbid = dbid.toJavaObj()
-                if level is None:
-                    self.__pid = JavaParmID(name, dbid)
-                else:
-                    self.__pid = JavaParmID(name, dbid, level)
-            else:
-                if level is None:
-                    self.__pid = JavaParmID(name, model)
-                else:
-                    self.__pid = JavaParmID(name, model, level)
-
-    def __str__(self):
-        return str(self.__pid)
-
-    @staticmethod
-    def defaultLevel():
-        return JavaParmID.defaultLevel()
-    
-    def toJavaObj(self):
-        return self.__pid
-    
-    def compositeNameUI(self):
-        return self.__pid.compositeNameUI()
-    
-    def parmNameAndLevel(self, composite):
-        retval = self.__pid.parmNameAndLevel(composite)
-        retval = JUtil.javaStringListToPylist(retval)
-        return retval
-
-    def expressionName(self, topoID, mutableID, includeTime):
-        return self.__pid.expressionName(topoID.toJavaObj(), mutableID.toJavaObj(), includeTime)
-    
-    def isValid(self):
-        return self.__pid.isValid()
-        
-    def getParmName(self):
-        return self.__pid.getParmName()
-    
-    def getParmLevel(self):
-        return self.__pid.getParmLevel()
-    
-    def getDbId(self):
-        return DatabaseID.DatabaseID(self.__pid.getDbId())
-    
-    def getCompositeName(self):
-        return self.__pid.getCompositeName()
-    
-    def getShortParmId(self):
-        return self.__pid.getShortParmId()
-    
-    def getParmId(self):
-        return self.__pid.getParmId()
-    
-    @staticmethod
-    def shortSerializer(parmID):
-        return JavaParmID.shortSerializer(parmID.javaParmId())
-
-    @staticmethod
-    def shortDeserializer(parmIDasString):
-        return ParmID(parmIDasString)
-    
-    def getUIFormattedString(self):
-        return self.__pid.getUIFormattedString()
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+#
+# SOFTWARE HISTORY
+#
+# Date          Ticket#  Engineer  Description
+# ------------- -------- --------- ---------------------------------------------
+# Feb 06, 2017  5959     randerso  Removed Java .toString() calls 
+#
+##
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID as JavaParmID
+import JUtil
+
+
+class ParmID(JUtil.JavaWrapperClass):
+    "Wrapper class for com.raytheon.uf.common.dataplugin.gfe.db.objects.ParmID"
+    
+    def __init__(self, name=None, model=None, dbid=None, level=None, jParmId=None):
+        if jParmId is not None:
+            self.__pid = jParmId
+            
+        elif name is None:
+            if model is None and dbid is None and level is None:
+                self.__pid = JavaParmId()
+            else:
+                raise ValueError('"name" must be given if any arguments are supplied.')
+        else:
+            if dbid is None and model is None and level is None:
+                # name is an ident string
+                self.__pid = JavaParmID(name)
+            elif dbid is None and model is None:
+                raise ValueError('"level" cannot be specified without "dbid" or "model".') 
+            elif dbid is not None and model is not None:
+                raise ValueError('"model" and "dbid" cannot both be specified.')
+            elif dbid is not None:
+                # assume it is a DatabaseID.DatabaseID
+                dbid = dbid.toJavaObj()
+                if level is None:
+                    self.__pid = JavaParmID(name, dbid)
+                else:
+                    self.__pid = JavaParmID(name, dbid, level)
+            else:
+                if level is None:
+                    self.__pid = JavaParmID(name, model)
+                else:
+                    self.__pid = JavaParmID(name, model, level)
+
+    def __str__(self):
+        return str(self.__pid)
+
+    @staticmethod
+    def defaultLevel():
+        return JavaParmID.defaultLevel()
+    
+    def toJavaObj(self):
+        return self.__pid
+    
+    def compositeNameUI(self):
+        return self.__pid.compositeNameUI()
+    
+    def parmNameAndLevel(self, composite):
+        retval = self.__pid.parmNameAndLevel(composite)
+        retval = JUtil.javaStringListToPylist(retval)
+        return retval
+
+    def expressionName(self, topoID, mutableID, includeTime):
+        return self.__pid.expressionName(topoID.toJavaObj(), mutableID.toJavaObj(), includeTime)
+    
+    def isValid(self):
+        return self.__pid.isValid()
+        
+    def getParmName(self):
+        return self.__pid.getParmName()
+    
+    def getParmLevel(self):
+        return self.__pid.getParmLevel()
+    
+    def getDbId(self):
+        return DatabaseID.DatabaseID(self.__pid.getDbId())
+    
+    def getCompositeName(self):
+        return self.__pid.getCompositeName()
+    
+    def getShortParmId(self):
+        return self.__pid.getShortParmId()
+    
+    def getParmId(self):
+        return self.__pid.getParmId()
+    
+    @staticmethod
+    def shortSerializer(parmID):
+        return JavaParmID.shortSerializer(parmID.javaParmId())
+
+    @staticmethod
+    def shortDeserializer(parmIDasString):
+        return ParmID(parmIDasString)
+    
+    def getUIFormattedString(self):
+        return self.__pid.getUIFormattedString()
     
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ShapeTable.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ShapeTable.py
index b69af9b9fb..3e70dfd717 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ShapeTable.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/ShapeTable.py
@@ -1,81 +1,81 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-# Python wrapper class for PostGIS table with interface like A1 ShapeFile.py
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    09/04/12            #9441     randerso       Initial Creation.
-#    
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import JUtil
-import DefaultEditAreaNaming
-from com.raytheon.edex.plugin.gfe.reference import DbShapeSource
-
-
-class ShapeTable(JUtil.JavaWrapperClass):
-    def __init__(self, identifier):
-        self.identifier = identifier
-        self.name = None
-        self.editAreaName = None
-        self.groupName = None
-        self.javaObj = DbShapeSource(identifier)
-        pass    
-
-    def filename(self, filen):
-        raise NotImplementedError, "This method is obsolete. See comments in Maps.py"
-    
-    def filter(self, fn):
-        if callable(fn):
-            self._func = fn
-            self.javaObj.setFiltered(True)
-        else:
-            raise TypeError(self.__class__+".filter() requires a function")
-        
-    def doFilter(self, atts):
-        return self._func(atts)
-    
-    def getEAName(self, atts):
-        if self.editAreaName is not None:
-            return DefaultEditAreaNaming.getEditAreaName(atts, self.editAreaName)
-        
-        return ""
-
-    def toJavaObj(self):
-        self.javaObj.setDisplayName(self.name)
-        self.javaObj.setGroupName(self.groupName)
-        if self.editAreaName is not None:
-            self.javaObj.setHasEditAreaName(True);
-        return self.javaObj
-    
-    def __repr__(self):
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+

+#

+# Python wrapper class for PostGIS table with interface like A1 ShapeFile.py

+#  

+#    

+#     SOFTWARE HISTORY

+#    

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    09/04/12            #9441     randerso       Initial Creation.

+#    

+# 

+#

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+

+import JUtil

+import DefaultEditAreaNaming

+from com.raytheon.edex.plugin.gfe.reference import DbShapeSource

+

+

+class ShapeTable(JUtil.JavaWrapperClass):

+    def __init__(self, identifier):

+        self.identifier = identifier

+        self.name = None

+        self.editAreaName = None

+        self.groupName = None

+        self.javaObj = DbShapeSource(identifier)

+        pass    

+

+    def filename(self, filen):

+        raise NotImplementedError("This method is obsolete. See comments in Maps.py")

+    

+    def filter(self, fn):

+        if callable(fn):

+            self._func = fn

+            self.javaObj.setFiltered(True)

+        else:

+            raise TypeError(self.__class__+".filter() requires a function")

+        

+    def doFilter(self, atts):

+        return self._func(atts)

+    

+    def getEAName(self, atts):

+        if self.editAreaName is not None:

+            return DefaultEditAreaNaming.getEditAreaName(atts, self.editAreaName)

+        

+        return ""

+

+    def toJavaObj(self):

+        self.javaObj.setDisplayName(self.name)

+        self.javaObj.setGroupName(self.groupName)

+        if self.editAreaName is not None:

+            self.javaObj.setHasEditAreaName(True);

+        return self.javaObj

+    

+    def __repr__(self):

         return self.identifier
\ No newline at end of file
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/UnitConvertor.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/UnitConvertor.py
index 0813dbb6fc..ef08708364 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/UnitConvertor.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/UnitConvertor.py
@@ -1,163 +1,163 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# UnitConverter.py
-#
-# This class returns a method for converting units.
-#
-# The arguments are the input units, the output units and the "element string"
-# (i.e. "Wind")
-#
-# Author: dmiller
-# ----------------------------------------------------------------------------
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-class UnitConvertor:
-    def __init__(self):
-        pass
-
-    def getConvertMethod(self, inUnits, outUnits, elementString):
-        if inUnits == outUnits:
-            convertMethod = float
-        elif inUnits == "kts" and outUnits == "mph":
-            convertMethod = self.ktToMph
-        elif inUnits == "mph" and outUnits == "kts":
-            convertMethod = self.mphToKt
-
-        elif inUnits == "ft" and outUnits == "m":
-            convertMethod = self.ftToM
-        elif inUnits == "m" and outUnits == "ft":
-            convertMethod = self.mToFt
-
-        elif inUnits == "mm" and outUnits == "in":
-            convertMethod = self.mmToIn
-        elif inUnits == "in" and outUnits == "mm":
-            convertMethod = self.inToMm
-
-        elif inUnits == "kt" and outUnits == "m/s":
-            convertMethod = self.ktToMps
-        elif inUnits == "m/s" and outUnits == "kt":
-            convertMethod = self.mpsToKt
-        elif inUnits == "m/s" and outUnits == "mph":
-            convertMethod = self.mpsToMph
-
-        elif inUnits == "kt-ft" and outUnits == "m^2/s":
-            convertMethod = self.ktftToM2ps
-        elif inUnits == "m^2/s" and outUnits == "kt-ft":
-            convertMethod = self.m2psToKtft
-
-        elif inUnits == "F" and outUnits == "C":
-            convertMethod = self.FtoC
-        elif inUnits == "C" and outUnits == "F":
-            convertMethod = self.CtoF
-
-        elif inUnits == "K" and outUnits == "C":
-            convertMethod = self.KtoC
-        elif inUnits == "C" and outUnits == "K":
-            convertMethod = self.CtoK
-
-        elif inUnits == "K" and outUnits == "F":
-            convertMethod = self.KtoF
-        elif inUnits == "F" and outUnits == "K":
-            convertMethod = self.FtoK
-
-        else:
-            raise TypeError, "Invalid input or output Units for " + \
-            elementString+" :" + inUnits + " " + outUnits
-        return convertMethod
-
-    def ktToMph(self, value):
-        "Convert from knots to mph"
-        return value * 1.151
-
-    def mphToKt(self, value):
-        "Convert from mph to knots"
-        return value * 0.868
-
-    def ktToMps(self, value):
-        "Convert from knots to m/s"
-        return value * 0.515
-
-    def mpsToKt(self, value):
-        "Convert from m/s to knots"
-        return value * 1.944
-
-    def mpsToMph(self, value):
-        "Convert from m/s to mph"
-        return value * 2.237
-
-    def mToFt(self, value):
-        "Convert from meters to feet"
-        return value * 3.28084
-
-    def ftToM(self, value):
-        "Convert from feet to meters"
-        return value / 3.28084
-
-    def mmToIn(self, value):
-        "Convert from millimeters to inches"
-        return value * 25.4
-
-    def inToMm(self, value):
-        "Convert from inches to millimeters"
-        return value * 0.3937
-
-    def ktftToM2ps(self, value):
-        "Convert from kt-ft to m^2/sec"
-        return value * 0.157
-
-    def m2psToKtft(self, value):
-        "Convert from m^2/sec to kt-ft"
-        return value * 6.371
-
-    def FtoC(self, value):
-        "Convert from degrees Fahrenheit to degrees Celcius"
-        return (value - 32) * 0.5556
-
-    def CtoF(self, value):
-        "Convert from degrees Celcius to degrees Fahrenheit"
-        return (value * 1.8) + 32
-
-    def KtoF(self, value):
-        "Convert from Kelvin to degrees Fahrenheit"
-        #return (value * 1.8) - 459.67
-        return self.CtoF(self.KtoC(value))
-
-    def FtoK(self, value):
-        "Convert from degrees Fahrenheit to Kelvin"
-        #return (value - 32)*0.5556 + 273.15
-        return self.CtoK(self.FtoC(value))
-
-    def KtoC(self, value):
-        "Convert from Kelvin to degrees Celcius"
-        return value - 273.15
-
-    def CtoK(self, value):
-        "Convert from degrees Celcius to Kelvin"
-        return value + 273.15
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# UnitConverter.py
+#
+# This class returns a method for converting units.
+#
+# The arguments are the input units, the output units and the "element string"
+# (i.e. "Wind")
+#
+# Author: dmiller
+# ----------------------------------------------------------------------------
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+class UnitConvertor:
+    def __init__(self):
+        pass
+
+    def getConvertMethod(self, inUnits, outUnits, elementString):
+        if inUnits == outUnits:
+            convertMethod = float
+        elif inUnits == "kts" and outUnits == "mph":
+            convertMethod = self.ktToMph
+        elif inUnits == "mph" and outUnits == "kts":
+            convertMethod = self.mphToKt
+
+        elif inUnits == "ft" and outUnits == "m":
+            convertMethod = self.ftToM
+        elif inUnits == "m" and outUnits == "ft":
+            convertMethod = self.mToFt
+
+        elif inUnits == "mm" and outUnits == "in":
+            convertMethod = self.mmToIn
+        elif inUnits == "in" and outUnits == "mm":
+            convertMethod = self.inToMm
+
+        elif inUnits == "kt" and outUnits == "m/s":
+            convertMethod = self.ktToMps
+        elif inUnits == "m/s" and outUnits == "kt":
+            convertMethod = self.mpsToKt
+        elif inUnits == "m/s" and outUnits == "mph":
+            convertMethod = self.mpsToMph
+
+        elif inUnits == "kt-ft" and outUnits == "m^2/s":
+            convertMethod = self.ktftToM2ps
+        elif inUnits == "m^2/s" and outUnits == "kt-ft":
+            convertMethod = self.m2psToKtft
+
+        elif inUnits == "F" and outUnits == "C":
+            convertMethod = self.FtoC
+        elif inUnits == "C" and outUnits == "F":
+            convertMethod = self.CtoF
+
+        elif inUnits == "K" and outUnits == "C":
+            convertMethod = self.KtoC
+        elif inUnits == "C" and outUnits == "K":
+            convertMethod = self.CtoK
+
+        elif inUnits == "K" and outUnits == "F":
+            convertMethod = self.KtoF
+        elif inUnits == "F" and outUnits == "K":
+            convertMethod = self.FtoK
+
+        else:
+            raise TypeError("Invalid input or output Units for " + \
+            elementString+" :" + inUnits + " " + outUnits)
+        return convertMethod
+
+    def ktToMph(self, value):
+        "Convert from knots to mph"
+        return value * 1.151
+
+    def mphToKt(self, value):
+        "Convert from mph to knots"
+        return value * 0.868
+
+    def ktToMps(self, value):
+        "Convert from knots to m/s"
+        return value * 0.515
+
+    def mpsToKt(self, value):
+        "Convert from m/s to knots"
+        return value * 1.944
+
+    def mpsToMph(self, value):
+        "Convert from m/s to mph"
+        return value * 2.237
+
+    def mToFt(self, value):
+        "Convert from meters to feet"
+        return value * 3.28084
+
+    def ftToM(self, value):
+        "Convert from feet to meters"
+        return value / 3.28084
+
+    def mmToIn(self, value):
+        "Convert from millimeters to inches"
+        return value * 25.4
+
+    def inToMm(self, value):
+        "Convert from inches to millimeters"
+        return value * 0.3937
+
+    def ktftToM2ps(self, value):
+        "Convert from kt-ft to m^2/sec"
+        return value * 0.157
+
+    def m2psToKtft(self, value):
+        "Convert from m^2/sec to kt-ft"
+        return value * 6.371
+
+    def FtoC(self, value):
+        "Convert from degrees Fahrenheit to degrees Celcius"
+        return (value - 32) * 0.5556
+
+    def CtoF(self, value):
+        "Convert from degrees Celcius to degrees Fahrenheit"
+        return (value * 1.8) + 32
+
+    def KtoF(self, value):
+        "Convert from Kelvin to degrees Fahrenheit"
+        #return (value * 1.8) - 459.67
+        return self.CtoF(self.KtoC(value))
+
+    def FtoK(self, value):
+        "Convert from degrees Fahrenheit to Kelvin"
+        #return (value - 32)*0.5556 + 273.15
+        return self.CtoK(self.FtoC(value))
+
+    def KtoC(self, value):
+        "Convert from Kelvin to degrees Celcius"
+        return value - 273.15
+
+    def CtoK(self, value):
+        "Convert from degrees Celcius to Kelvin"
+        return value + 273.15
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createAreaDictionary.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createAreaDictionary.py
index d5a8b45e5c..57d733e191 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createAreaDictionary.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createAreaDictionary.py
@@ -1,619 +1,619 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#
-#
-#  Creates area dictionary specific to a site.  Somewhat ported from AWIPS-I.
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    01/08/10             #1209    randerso       Initial Creation.
-#    10/19/12             #1091    dgilling       Support localMaps.py.
-#    10/20/2014           #3685    randerso       Converted text to mixed case
-#                                                 Fixed mapDict to keep zones from different maps separate 
-#
-#    12/05/14        4953          randerso       Changed to use LocalizationSupport
-#    03/10/2015      4129          randerso       Fixed error logging
-#    07/01/2016      18114         ryu            Changed timezone designation from 'MST7' to 'US/Arizona'.
-#    07/15/2016      5749          randerso       Changed preformatted ugcCityString to ugcCities list
-#                                                 Added wfo field to support generation of national TCV
-#    09/28/2016      19293         randerso       Added exception handling for createTCVAreaDictionary
-#    11/21/2016      5959          njensen        Remove unused imports and made more pythonic
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import os, string, copy
-import tempfile, stat
-import LogStream, pprint
-
-from fips2cities import *
-from zones2cities import *
-
-import LocalizationSupport
-
-
-CityLocationDict = {}
-
-# obtain real time zone string from shapefile TIME_ZONE variable
-def getRealTimeZone(tzstring):
-    d = {'M':"MST7MDT",'m':"US/Arizona",'V':'America/Puerto_Rico',
-         'E':"EST5EDT",'e':"EST5",
-         'C':"CST6CDT",'P':"PST8PDT",'A':"America/Anchorage",
-         'H':"Pacific/Honolulu",'G':"Pacific/Guam", 
-         'J':"Pacific/Palu", 'K': "Pacific/Wake", 'F': "Pacific/Ponape"}
-    tzones = []
-    for tz in tzstring:
-        if d.has_key(tz):
-            tzones.append(d[tz])
-
-    if len(tzones) > 1:
-        return tzones
-    elif len(tzones) == 1:
-        return tzones[0]
-    else:
-        LogStream.logProblem("No time zone information decodable: ", tzstring)
-        return None
-
-# sorts the cities by decending population
-def citysort(c1, c2):
-    if c1[1] is None and c2[1] is None:
-        return 0
-    elif c1[1] is None:
-        return 1
-    elif c2[1] is None:
-        return -1
-    elif c1[1] < c2[1]:
-        return 1
-    elif c1[1] > c2[1]:
-        return -1
-    else:
-        return 0
-     
-# Creates the city list part of the area dictionary, based on population
-def makeCityList(dictRecord):
-    if dictRecord.has_key("cities"):
-        cities = copy.deepcopy(dictRecord["cities"])
-        if len(cities) == 0:
-            return None
-        cities.sort(citysort)
-        locs = {}
-        cityList = []
-        count = 0
-        maxPop = cities[0][1] #population of largest city
-        for x in xrange(len(cities)):
-
-            #limit small cities to 25% of the large city population
-            if maxPop is not None and cities[x][1] is not None and \
-              cities[x][1] * 4 < maxPop:
-                break
-            elif maxPop is not None and cities[x][1] is None:
-                break
-
-            cityList.append(cities[x][0])
-
-            # save data to cifyLocation dictionary
-            locs[cities[x][0]] = tuple(map(float, cities[x][2:4]))
-
-            #max of 6 cities in the list
-            count = count + 1
-            if count > 6:
-                break
-
-        return cityList, locs
-
-# handle marine states
-def checkMarineState(ugcCode):        
-    #returns None if unknown, description if known
-    areas = {
-      'AM': 'Atlantic coastal waters',
-      'GM': 'Gulf of Mexico',
-      'LE': 'Lake Erie',
-      'LO': 'Lake Ontario', 
-      'LH': 'Lake Huron',
-      'SC': 'Lake St Clair', 
-      'LM': 'Lake Michigan',
-      'LS': 'Lake Superior',
-      'PZ': 'Pacific coastal waters', 
-      'PK': 'Alaskan coastal waters',
-      'PH': 'Hawaiian coastal waters', 
-      'PM': 'Marianas waters',
-      'AN': 'Atlantic coastal waters', 
-      'PS': 'American Samoa coastal waters',
-      'SL': 'St Lawrence River',
-    }
-    area = ugcCode[0:2]
-    return areas.get(area, None)
-    
-        
-# Utility to create the area dictionary, based on the map background data
-def createAreaDictionary(outputDir, mapDict):
-    LogStream.logEvent("Generating AreaDictionary")
-    areadict = {}
-    mapIter = mapDict.entrySet().iterator()
-    while mapIter.hasNext():
-        mapEntry = mapIter.next() 
-        mapname = str(mapEntry.getKey())
-        attList = mapEntry.getValue()
-        attIter = attList.iterator()
-        while attIter.hasNext():
-            att = attIter.next()
-            ean = str(att.get("editarea"))
-            if len(ean):
-                try:
-                    d = {}
-                    if att.containsKey('zone') and att.containsKey('state'):
-                        d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))
-                    elif att.containsKey('id'):
-                        d['ugcCode'] = str(att.get('id'))
-                    elif att.containsKey('fips') and att.containsKey('state') and \
-                      att.containsKey('countyname'):
-                        d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]
-                        d['ugcName'] = string.strip(str(att.get('countyname')))
-                    else:
-                        continue
-    
-                    if att.containsKey('state'):
-                        d["stateAbbr"] = str(att.get('state'))
-    
-                    if att.containsKey('name'):
-                        d["ugcName"] = string.strip(str(att.get('name')))
-    
-                    if att.containsKey('time_zone'):
-                        tzvalue = getRealTimeZone(str(att.get('time_zone')))
-                        if tzvalue is not None:
-                            d["ugcTimeZone"] = tzvalue
-    
-                    if zonedata.has_key(d['ugcCode']):
-                        cityDict = zonedata[d['ugcCode']]
-                    elif fipsdata.has_key(d['ugcCode']):
-                        cityDict = fipsdata[d['ugcCode']]
-                    else:
-                        cityDict = None
-    
-                    if cityDict:
-                        cityList = makeCityList(cityDict)
-                        if cityList is not None:
-                            cityList, locs = cityList
-                            if len(cityList): 
-                                d["ugcCities"] = cityList
-                                CityLocationDict[ean] = locs
-    
-                    # partOfState codes
-                    if zonedata.has_key(d['ugcCode']):
-                        if zonedata[d['ugcCode']].has_key('partOfState'):
-                            d["partOfState"] = \
-                              zonedata[d['ugcCode']]['partOfState']
-                    elif fipsdata.has_key(d['ugcCode']):
-                        if fipsdata[d['ugcCode']].has_key('partOfState'):
-                            d["partOfState"] = \
-                              fipsdata[d['ugcCode']]['partOfState']
-                          
-                    # wfo
-                    if zonedata.has_key(d['ugcCode']):
-                        if zonedata[d['ugcCode']].has_key('wfo'):
-                            d["wfo"] = \
-                              zonedata[d['ugcCode']]['wfo']
-                    elif fipsdata.has_key(d['ugcCode']):
-                        if fipsdata[d['ugcCode']].has_key('wfo'):
-                            d["wfo"] = \
-                              fipsdata[d['ugcCode']]['wfo']
-                          
-                    # full state name
-                    if zonedata.has_key(d['ugcCode']):
-                        if zonedata[d['ugcCode']].has_key('fullStateName'):
-                            d["fullStateName"] = \
-                              zonedata[d['ugcCode']]['fullStateName']
-                    elif fipsdata.has_key(d['ugcCode']):
-                        if fipsdata[d['ugcCode']].has_key('fullStateName'):
-                            d["fullStateName"] = \
-                              fipsdata[d['ugcCode']]['fullStateName']
-                    else: 
-                        marineState = checkMarineState(d['ugcCode'])
-                        if marineState is not None:
-                            d['fullStateName'] = marineState
-                    
-                          
-                    if areadict.has_key(ean) and d != areadict[ean]:
-                        LogStream.logProblem("Mismatch of definitions in " +\
-                          "AreaDictionary creation. EditAreaName=",  ean,
-                          "AreaDict=\n", areadict[ean], "\nIgnored=\n", d)
-                    else:
-                        areadict[ean] = d
-                except:
-                    LogStream.logProblem("Problem with ", ean, LogStream.exc())
-
-    s = """
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# DefaultAreaDictionary
-#   AreaDictionary file
-#
-# Author: GFE Installation Script
-# ----------------------------------------------------------------------------
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-# Format:
-# AreaDictionary = {
-#    "editArea" : {
-#             "ugcCode": "STZxxx",
-#             "ugcName": "EditAreaName",
-#             "ugcCities": ['City1', 'City2'],
-#             "ugcTimeZone": "MST7MDT",
-#             "fullStateName": "COLORADO",
-#             "partOfState": "NORTHEAST",
-#             "stateAbbr": "CO",
-#             "independentCity": 0,
-#             "locationName": "GeneralAreaName",
-#             }
-#  ...
-#   }
-#  ugcTimeZone:  This field should be replace with the correct time zone
-#                for that zone.  If it is different from the time zone of
-#                the local WFO, it's time zone will appear in the header of
-#                some products in parentheses.  
-#                Using any other strings to define
-#                the time zone may produce undesirable results.
-#                The time zone may also be a list of time zones in case
-#                a forecast zone happens to cover an area that contains
-#                two time zones.
-#                e.g.   "ugcTimeZone" : ["MST7MDT", "PST8PDT"]
-#
-# ugcCode: This field contains the ugc coding for this area, such as COZ023
-#
-# ugcName: This field contains the descriptive name for this area.  It
-#          is used in various products, including Hazard products.  This is
-#          the official county or zone name.
-#
-# locationName: This field is optional, but provides an alternate name that
-#      is used in the text of some products to describe the area.  The
-#      FFA product uses this value if available.
-#
-# ugcCities: This field contains the list of cities for hazard and routine 
-#          products.  
-#
-# fullStateName: This field is used in hazard products to fully describe
-#          the state in which this edit area resides.
-#
-# partOfState: This field describes the location within a state (such as
-#         NORTHEAST) for this area. It is used in hazard products.
-#
-# stateAbbr:  State Abbreviation for the fullStateName.
-#
-# independentCity:  Set to 0 or 1.  Some counties (FIPS coding) are actually
-#        cities.  Setting the flag to 1 will instruct those formatters
-#        to mention independent cities, rather than include this "county"
-#        in the county list of the product.
-#
-# wfo: The wfo(s) with responsibility for the area
-#
-#  
-
-
-AreaDictionary = \
-"""
-    pp = pprint.PrettyPrinter()
-    s = s + pp.pformat(areadict)
-
-    if not os.path.isdir(outputDir):
-        os.makedirs(outputDir)
-
-    outName = os.path.join(outputDir, "DefaultAreaDictionary.py")
-    
-    fh = None
-    try:
-        fh, fpath = tempfile.mkstemp(dir=outputDir, suffix=".py")
-        os.write(fh, s)
-        os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |
-                        stat.S_IRGRP | stat.S_IWGRP | 
-                        stat.S_IROTH)
-        os.close(fh)
-        fh = None
-        os.rename(fpath, outName)
-    except:
-        LogStream.logProblem("Error writing area dictionary", LogStream.exc())
-    finally:
-        if fh is not None:
-            os.close(fh)
-
-
-def createTCVAreaDictionary(outputDir, mapDict, siteID):
-    tcvAreaDictionaryContents = \
-"""
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# TCV_AreaDictionary
-#   TCV_AreaDictionary file
-#
-# Author: GFE Installation Script
-# ----------------------------------------------------------------------------
-
-# Here is an example TCVAreaDictionary for just a single zone and with comments
-# to talk about the structure of the dictionary.
-#
-# TCV_AreaDictionary = {
-#     # Zone
-#     'FLZ173': {
-#         # A list of location names.
-#         'locationsAffected': [
-#             "Miami Beach",
-#             "Downtown Miami",
-#         ],
-#         
-#         # Potential impacts statements can be overriden here; anything not
-#         # overriden here will use the generic potential impacts statements
-#         'potentialImpactsStatements': {
-#             # Section name: "Wind", "Storm Surge", "Flooding Rain" or "Tornado"
-#             "Wind": {
-#                 # Threat level: "None", "Low", "Mod", "High" or "Extreme"
-#                 "Extreme": [
-#                     # Each string will be on its own line
-#                     "Widespread power outages with some areas experiencing long-term outages",
-#                     "Many bridges and access routes connecting barrier islands impassable",
-#                     "Structural category to sturdy buildings with some having complete wall and roof failures",
-#                     "Complete destruction of mobile homes",
-#                     "Numerous roads impassable from large debris",
-#                     
-#                 ],
-#             },
-#         },
-#         
-#         # Additional information that will be displayed at the end of the segment
-#         # The structure is a list containing strings and/or lists. Strings in the
-#         # same list will be idented the same amount. Introducing a list, idents the
-#         # text until it ends. For example:
-#         #
-#         # 'infoSection': [
-#         #     "This will be at tab level 0",
-#         #     [
-#         #         "A new list was introduced so this is at tab level 1",
-#         #         [
-#         #             "Yet another list so this is tab level 2",
-#         #             "Still at tab level 2 here",
-#         #         ],
-#         #         "We are back at tab level 1 because we ended the list",
-#         #     ],
-#         #     "We ended the other list and are back at tab level 0 now",
-#         # ]
-#         'infoSection': [
-#             "Local evacuation and sheltering: Miami-Dade County Emergency Management",
-#             [
-#                 "http://www.miamidade.gov/emergency/",
-#             ],
-#             "Family emergency plans: Federal Emergency Management Agency",
-#             [
-#                 "http://ready.gov/",
-#             ],
-#             "Local weather conditions and forecasts: NWS Miami Florida",
-#             [
-#                 "http://www.srh.noaa.gov/mfl/",
-#             ],
-#         ],
-#     },
-# }
-
-TCV_AreaDictionary = {
-"""
-    
-    zoneSkeletonContents = {
-            'locationsAffected' : [],
-            'potentialImpactsStatements' : {},
-            'infoSection' : [],
-        }
-    
-    existingTCVAreaDictionary = {}
-    try:
-        with open(outputDir + "/TCVAreaDictionary.py", "r") as existingFile:
-            contents = existingFile.read()
-            exec(contents)
-        
-        # TCV_AreaDictionary comes from the existing TCVAreaDictionary when it is exec'ed
-        existingTCVAreaDictionary = TCV_AreaDictionary
-    except Exception:
-        pass
-    
-    for zone in _getZones(siteID):
-        tcvAreaDictionaryContents += "    '" + zone + "': {\n"
-        
-        # Don't clobber existing dictionary entries
-        if zone in existingTCVAreaDictionary:
-            # Add new entries
-            for key in zoneSkeletonContents:
-                if key not in existingTCVAreaDictionary[zone]:
-                    existingTCVAreaDictionary[zone][key] = zoneSkeletonContents[key]
-            
-            # Remove entries that are no longer needed
-            existingKeys = existingTCVAreaDictionary[zone].keys()
-            for key in existingKeys:
-                if key not in zoneSkeletonContents:
-                    existingTCVAreaDictionary[zone].pop(key)
-            
-            tcvAreaDictionaryContents += _formatDictionary(existingTCVAreaDictionary[zone], tabLevel = 2)
-        else:
-            tcvAreaDictionaryContents += _formatDictionary(zoneSkeletonContents, tabLevel = 2)
-        
-        tcvAreaDictionaryContents += "    },\n\n"
-    
-    tcvAreaDictionaryContents += "}\n"
-    
-    with open(outputDir + "/TCVAreaDictionary.py", "w") as file:
-        file.write(tcvAreaDictionaryContents)
-    
-def _getZones(siteID):
-    editAreasFilename = "gfe/combinations/EditAreas_PublicZones_" + \
-                        siteID + ".py"
-    zonesKey = "Zones_" + siteID
-    
-    try:
-        editAreasFileContents = LocalizationSupport.readFile(LocalizationSupport.CAVE_STATIC,
-                                                             LocalizationSupport.CONFIGURED,
-                                                             siteID,
-                                                             editAreasFilename)
-        
-        
-        exec(editAreasFileContents)
-        
-        # EASourceMap comes from the EditAreas file
-        return EASourceMap[zonesKey]
-    except:
-        LogStream.logProblem("Error getting zones for %s: " % siteID, LogStream.exc())
-        return []
-    
-def _formatDictionary(dictionary, tabLevel, output=""):
-    TAB = " " * 4
-    
-    for key in dictionary:
-        output += TAB*tabLevel + repr(key) + ": "
-        
-        value = dictionary[key]
-        if type(value) is dict:
-            output += "{\n"
-            output = _formatDictionary(value, tabLevel+1, output)
-            output += TAB*tabLevel + "},\n"
-        elif type(value) is list:
-            output += "[\n"
-            output = _formatList(value, tabLevel+1, output)
-            output += TAB*tabLevel + "],\n"
-        else:
-            output += repr(value) + ",\n"
-    
-    return output
-    
-def _formatList(theList, tabLevel, output=""):
-    TAB = " " * 4
-    
-    for value in theList:
-        if type(value) is dict:
-            output += TAB*tabLevel + "{\n"
-            output = _formatDictionary(value, tabLevel+1, output)
-            output += TAB*tabLevel + "},\n"
-        elif type(value) is list:
-            output += TAB*tabLevel + "[\n"
-            output = _formatList(value, tabLevel+1, output)
-            output += TAB*tabLevel + "],\n"
-        else:
-            output += TAB*tabLevel + repr(value) + ",\n"
-    
-    return output
-
-
-# Utility to create the city location dictionary
-def createCityLocation(outputDir, mapDict):
-    LogStream.logEvent("Generating CityLocation")
-
-    citydict = CityLocationDict
-
-    for mapname in mapDict:
-        if 'Cities' not in mapname:
-            continue
-         
-        attList = mapDict[mapname]
-        for att in attList:
-            #LogStream.logProblem("att:", att)
-            ean = att['name']
-            state = att['st']
-            county_FIP = att['county_fip']
-
-            if len(ean) and len(state) and len(county_FIP):
-                fip = state + 'C' + county_FIP
-                if not citydict.has_key(fip):
-                    citydict[fip] = {}
-                try:
-                    latitude = float(string.strip(att['lat']))
-                    longitude = float(string.strip(att['lon']))
-                    citydict[fip][ean.upper()] = (latitude, longitude)
-                except:
-                    LogStream.logProblem("Problem creating city location ",
-                                         ean, att, LogStream.exc())
-
-    s = """
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# DefaultCityLocation
-#   CityLocation file
-#
-# Author: GFE Installation Script 
-# ----------------------------------------------------------------------------
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-# Format:
-# CityLocation = {
-#    "editArea": {'cityName1' : (latitude, longitude),
-#                 'cityName2' : (latitude, longitude),
-#                 ...
-#                }
-#  ...
-#   }
-#
-# editArea: name of edit area as in AreaDictionary
-#
-# cityName: name of the city - should be the same as in AreaDictionary.
-#
-# latitude/longitude: city's lat/lon location.
-#  
-
-
-CityLocation = \
-"""
-    pp = pprint.PrettyPrinter()
-    s = s + pp.pformat(citydict)
-
-    if not os.path.isdir(outputDir):
-        os.makedirs(outputDir)
-
-    outName = os.path.join(outputDir, "DefaultCityLocation.py")
-    
-    fh = None
-    try:
-        fh, fpath = tempfile.mkstemp(dir=outputDir, suffix=".py")
-        os.write(fh, s)
-        os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |
-                        stat.S_IRGRP | stat.S_IWGRP | 
-                        stat.S_IROTH)
-        os.close(fh)
-        fh = None
-        os.rename(fpath, outName)
-    except:
-        LogStream.logProblem("Error writing city location", LogStream.exc())
-    finally:
-        if fh is not None:
-            os.close(fh)
-
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+#

+#

+#  Creates area dictionary specific to a site.  Somewhat ported from AWIPS-I.

+#  

+#    

+#     SOFTWARE HISTORY

+#    

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    01/08/10             #1209    randerso       Initial Creation.

+#    10/19/12             #1091    dgilling       Support localMaps.py.

+#    10/20/2014           #3685    randerso       Converted text to mixed case

+#                                                 Fixed mapDict to keep zones from different maps separate 

+#

+#    12/05/14        4953          randerso       Changed to use LocalizationSupport

+#    03/10/2015      4129          randerso       Fixed error logging

+#    07/01/2016      18114         ryu            Changed timezone designation from 'MST7' to 'US/Arizona'.

+#    07/15/2016      5749          randerso       Changed preformatted ugcCityString to ugcCities list

+#                                                 Added wfo field to support generation of national TCV

+#    09/28/2016      19293         randerso       Added exception handling for createTCVAreaDictionary

+#    11/21/2016      5959          njensen        Remove unused imports and made more pythonic

+##

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+

+import os, string, copy

+import tempfile, stat

+import LogStream, pprint

+

+from fips2cities import *

+from zones2cities import *

+

+import LocalizationSupport

+

+

+CityLocationDict = {}

+

+# obtain real time zone string from shapefile TIME_ZONE variable

+def getRealTimeZone(tzstring):

+    d = {'M':"MST7MDT",'m':"US/Arizona",'V':'America/Puerto_Rico',

+         'E':"EST5EDT",'e':"EST5",

+         'C':"CST6CDT",'P':"PST8PDT",'A':"America/Anchorage",

+         'H':"Pacific/Honolulu",'G':"Pacific/Guam", 

+         'J':"Pacific/Palu", 'K': "Pacific/Wake", 'F': "Pacific/Ponape"}

+    tzones = []

+    for tz in tzstring:

+        if tz in d:

+            tzones.append(d[tz])

+

+    if len(tzones) > 1:

+        return tzones

+    elif len(tzones) == 1:

+        return tzones[0]

+    else:

+        LogStream.logProblem("No time zone information decodable: ", tzstring)

+        return None

+

+# sorts the cities by decending population

+def citysort(c1, c2):

+    if c1[1] is None and c2[1] is None:

+        return 0

+    elif c1[1] is None:

+        return 1

+    elif c2[1] is None:

+        return -1

+    elif c1[1] < c2[1]:

+        return 1

+    elif c1[1] > c2[1]:

+        return -1

+    else:

+        return 0

+     

+# Creates the city list part of the area dictionary, based on population

+def makeCityList(dictRecord):

+    if "cities" in dictRecord:

+        cities = copy.deepcopy(dictRecord["cities"])

+        if len(cities) == 0:

+            return None

+        cities.sort(citysort)

+        locs = {}

+        cityList = []

+        count = 0

+        maxPop = cities[0][1] #population of largest city

+        for x in range(len(cities)):

+

+            #limit small cities to 25% of the large city population

+            if maxPop is not None and cities[x][1] is not None and \

+              cities[x][1] * 4 < maxPop:

+                break

+            elif maxPop is not None and cities[x][1] is None:

+                break

+

+            cityList.append(cities[x][0])

+

+            # save data to cifyLocation dictionary

+            locs[cities[x][0]] = tuple(map(float, cities[x][2:4]))

+

+            #max of 6 cities in the list

+            count = count + 1

+            if count > 6:

+                break

+

+        return cityList, locs

+

+# handle marine states

+def checkMarineState(ugcCode):        

+    #returns None if unknown, description if known

+    areas = {

+      'AM': 'Atlantic coastal waters',

+      'GM': 'Gulf of Mexico',

+      'LE': 'Lake Erie',

+      'LO': 'Lake Ontario', 

+      'LH': 'Lake Huron',

+      'SC': 'Lake St Clair', 

+      'LM': 'Lake Michigan',

+      'LS': 'Lake Superior',

+      'PZ': 'Pacific coastal waters', 

+      'PK': 'Alaskan coastal waters',

+      'PH': 'Hawaiian coastal waters', 

+      'PM': 'Marianas waters',

+      'AN': 'Atlantic coastal waters', 

+      'PS': 'American Samoa coastal waters',

+      'SL': 'St Lawrence River',

+    }

+    area = ugcCode[0:2]

+    return areas.get(area, None)

+    

+        

+# Utility to create the area dictionary, based on the map background data

+def createAreaDictionary(outputDir, mapDict):

+    LogStream.logEvent("Generating AreaDictionary")

+    areadict = {}

+    mapIter = mapDict.entrySet().iterator()

+    while mapIter.hasNext():

+        mapEntry = next(mapIter) 

+        mapname = str(mapEntry.getKey())

+        attList = mapEntry.getValue()

+        attIter = attList.iterator()

+        while attIter.hasNext():

+            att = next(attIter)

+            ean = str(att.get("editarea"))

+            if len(ean):

+                try:

+                    d = {}

+                    if att.containsKey('zone') and att.containsKey('state'):

+                        d['ugcCode'] = str(att.get('state')) + "Z" + str(att.get('zone'))

+                    elif att.containsKey('id'):

+                        d['ugcCode'] = str(att.get('id'))

+                    elif att.containsKey('fips') and att.containsKey('state') and \

+                      att.containsKey('countyname'):

+                        d['ugcCode'] = str(att.get('state')) + "C" + str(att.get('fips'))[-3:]

+                        d['ugcName'] = string.strip(str(att.get('countyname')))

+                    else:

+                        continue

+    

+                    if att.containsKey('state'):

+                        d["stateAbbr"] = str(att.get('state'))

+    

+                    if att.containsKey('name'):

+                        d["ugcName"] = string.strip(str(att.get('name')))

+    

+                    if att.containsKey('time_zone'):

+                        tzvalue = getRealTimeZone(str(att.get('time_zone')))

+                        if tzvalue is not None:

+                            d["ugcTimeZone"] = tzvalue

+    

+                    if d['ugcCode'] in zonedata:

+                        cityDict = zonedata[d['ugcCode']]

+                    elif d['ugcCode'] in fipsdata:

+                        cityDict = fipsdata[d['ugcCode']]

+                    else:

+                        cityDict = None

+    

+                    if cityDict:

+                        cityList = makeCityList(cityDict)

+                        if cityList is not None:

+                            cityList, locs = cityList

+                            if len(cityList): 

+                                d["ugcCities"] = cityList

+                                CityLocationDict[ean] = locs

+    

+                    # partOfState codes

+                    if d['ugcCode'] in zonedata:

+                        if 'partOfState' in zonedata[d['ugcCode']]:

+                            d["partOfState"] = \

+                              zonedata[d['ugcCode']]['partOfState']

+                    elif d['ugcCode'] in fipsdata:

+                        if 'partOfState' in fipsdata[d['ugcCode']]:

+                            d["partOfState"] = \

+                              fipsdata[d['ugcCode']]['partOfState']

+                          

+                    # wfo

+                    if d['ugcCode'] in zonedata:

+                        if 'wfo' in zonedata[d['ugcCode']]:

+                            d["wfo"] = \

+                              zonedata[d['ugcCode']]['wfo']

+                    elif d['ugcCode'] in fipsdata:

+                        if 'wfo' in fipsdata[d['ugcCode']]:

+                            d["wfo"] = \

+                              fipsdata[d['ugcCode']]['wfo']

+                          

+                    # full state name

+                    if d['ugcCode'] in zonedata:

+                        if 'fullStateName' in zonedata[d['ugcCode']]:

+                            d["fullStateName"] = \

+                              zonedata[d['ugcCode']]['fullStateName']

+                    elif d['ugcCode'] in fipsdata:

+                        if 'fullStateName' in fipsdata[d['ugcCode']]:

+                            d["fullStateName"] = \

+                              fipsdata[d['ugcCode']]['fullStateName']

+                    else: 

+                        marineState = checkMarineState(d['ugcCode'])

+                        if marineState is not None:

+                            d['fullStateName'] = marineState

+                    

+                          

+                    if ean in areadict and d != areadict[ean]:

+                        LogStream.logProblem("Mismatch of definitions in " +\

+                          "AreaDictionary creation. EditAreaName=",  ean,

+                          "AreaDict=\n", areadict[ean], "\nIgnored=\n", d)

+                    else:

+                        areadict[ean] = d

+                except:

+                    LogStream.logProblem("Problem with ", ean, LogStream.exc())

+

+    s = """

+# ----------------------------------------------------------------------------

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+# DefaultAreaDictionary

+#   AreaDictionary file

+#

+# Author: GFE Installation Script

+# ----------------------------------------------------------------------------

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+# Format:

+# AreaDictionary = {

+#    "editArea" : {

+#             "ugcCode": "STZxxx",

+#             "ugcName": "EditAreaName",

+#             "ugcCities": ['City1', 'City2'],

+#             "ugcTimeZone": "MST7MDT",

+#             "fullStateName": "COLORADO",

+#             "partOfState": "NORTHEAST",

+#             "stateAbbr": "CO",

+#             "independentCity": 0,

+#             "locationName": "GeneralAreaName",

+#             }

+#  ...

+#   }

+#  ugcTimeZone:  This field should be replace with the correct time zone

+#                for that zone.  If it is different from the time zone of

+#                the local WFO, it's time zone will appear in the header of

+#                some products in parentheses.  

+#                Using any other strings to define

+#                the time zone may produce undesirable results.

+#                The time zone may also be a list of time zones in case

+#                a forecast zone happens to cover an area that contains

+#                two time zones.

+#                e.g.   "ugcTimeZone" : ["MST7MDT", "PST8PDT"]

+#

+# ugcCode: This field contains the ugc coding for this area, such as COZ023

+#

+# ugcName: This field contains the descriptive name for this area.  It

+#          is used in various products, including Hazard products.  This is

+#          the official county or zone name.

+#

+# locationName: This field is optional, but provides an alternate name that

+#      is used in the text of some products to describe the area.  The

+#      FFA product uses this value if available.

+#

+# ugcCities: This field contains the list of cities for hazard and routine 

+#          products.  

+#

+# fullStateName: This field is used in hazard products to fully describe

+#          the state in which this edit area resides.

+#

+# partOfState: This field describes the location within a state (such as

+#         NORTHEAST) for this area. It is used in hazard products.

+#

+# stateAbbr:  State Abbreviation for the fullStateName.

+#

+# independentCity:  Set to 0 or 1.  Some counties (FIPS coding) are actually

+#        cities.  Setting the flag to 1 will instruct those formatters

+#        to mention independent cities, rather than include this "county"

+#        in the county list of the product.

+#

+# wfo: The wfo(s) with responsibility for the area

+#

+#  

+

+

+AreaDictionary = \

+"""

+    pp = pprint.PrettyPrinter()

+    s = s + pp.pformat(areadict)

+

+    if not os.path.isdir(outputDir):

+        os.makedirs(outputDir)

+

+    outName = os.path.join(outputDir, "DefaultAreaDictionary.py")

+    

+    fh = None

+    try:

+        fh, fpath = tempfile.mkstemp(dir=outputDir, suffix=".py")

+        os.write(fh, s)

+        os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |

+                        stat.S_IRGRP | stat.S_IWGRP | 

+                        stat.S_IROTH)

+        os.close(fh)

+        fh = None

+        os.rename(fpath, outName)

+    except:

+        LogStream.logProblem("Error writing area dictionary", LogStream.exc())

+    finally:

+        if fh is not None:

+            os.close(fh)

+

+

+def createTCVAreaDictionary(outputDir, mapDict, siteID):

+    tcvAreaDictionaryContents = \

+"""

+# ----------------------------------------------------------------------------

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+# TCV_AreaDictionary

+#   TCV_AreaDictionary file

+#

+# Author: GFE Installation Script

+# ----------------------------------------------------------------------------

+

+# Here is an example TCVAreaDictionary for just a single zone and with comments

+# to talk about the structure of the dictionary.

+#

+# TCV_AreaDictionary = {

+#     # Zone

+#     'FLZ173': {

+#         # A list of location names.

+#         'locationsAffected': [

+#             "Miami Beach",

+#             "Downtown Miami",

+#         ],

+#         

+#         # Potential impacts statements can be overriden here; anything not

+#         # overriden here will use the generic potential impacts statements

+#         'potentialImpactsStatements': {

+#             # Section name: "Wind", "Storm Surge", "Flooding Rain" or "Tornado"

+#             "Wind": {

+#                 # Threat level: "None", "Low", "Mod", "High" or "Extreme"

+#                 "Extreme": [

+#                     # Each string will be on its own line

+#                     "Widespread power outages with some areas experiencing long-term outages",

+#                     "Many bridges and access routes connecting barrier islands impassable",

+#                     "Structural category to sturdy buildings with some having complete wall and roof failures",

+#                     "Complete destruction of mobile homes",

+#                     "Numerous roads impassable from large debris",

+#                     

+#                 ],

+#             },

+#         },

+#         

+#         # Additional information that will be displayed at the end of the segment

+#         # The structure is a list containing strings and/or lists. Strings in the

+#         # same list will be idented the same amount. Introducing a list, idents the

+#         # text until it ends. For example:

+#         #

+#         # 'infoSection': [

+#         #     "This will be at tab level 0",

+#         #     [

+#         #         "A new list was introduced so this is at tab level 1",

+#         #         [

+#         #             "Yet another list so this is tab level 2",

+#         #             "Still at tab level 2 here",

+#         #         ],

+#         #         "We are back at tab level 1 because we ended the list",

+#         #     ],

+#         #     "We ended the other list and are back at tab level 0 now",

+#         # ]

+#         'infoSection': [

+#             "Local evacuation and sheltering: Miami-Dade County Emergency Management",

+#             [

+#                 "http://www.miamidade.gov/emergency/",

+#             ],

+#             "Family emergency plans: Federal Emergency Management Agency",

+#             [

+#                 "http://ready.gov/",

+#             ],

+#             "Local weather conditions and forecasts: NWS Miami Florida",

+#             [

+#                 "http://www.srh.noaa.gov/mfl/",

+#             ],

+#         ],

+#     },

+# }

+

+TCV_AreaDictionary = {

+"""

+    

+    zoneSkeletonContents = {

+            'locationsAffected' : [],

+            'potentialImpactsStatements' : {},

+            'infoSection' : [],

+        }

+    

+    existingTCVAreaDictionary = {}

+    try:

+        with open(outputDir + "/TCVAreaDictionary.py", "r") as existingFile:

+            contents = existingFile.read()

+            exec(contents)

+        

+        # TCV_AreaDictionary comes from the existing TCVAreaDictionary when it is exec'ed

+        existingTCVAreaDictionary = TCV_AreaDictionary

+    except Exception:

+        pass

+    

+    for zone in _getZones(siteID):

+        tcvAreaDictionaryContents += "    '" + zone + "': {\n"

+        

+        # Don't clobber existing dictionary entries

+        if zone in existingTCVAreaDictionary:

+            # Add new entries

+            for key in zoneSkeletonContents:

+                if key not in existingTCVAreaDictionary[zone]:

+                    existingTCVAreaDictionary[zone][key] = zoneSkeletonContents[key]

+            

+            # Remove entries that are no longer needed

+            existingKeys = list(existingTCVAreaDictionary[zone].keys())

+            for key in existingKeys:

+                if key not in zoneSkeletonContents:

+                    existingTCVAreaDictionary[zone].pop(key)

+            

+            tcvAreaDictionaryContents += _formatDictionary(existingTCVAreaDictionary[zone], tabLevel = 2)

+        else:

+            tcvAreaDictionaryContents += _formatDictionary(zoneSkeletonContents, tabLevel = 2)

+        

+        tcvAreaDictionaryContents += "    },\n\n"

+    

+    tcvAreaDictionaryContents += "}\n"

+    

+    with open(outputDir + "/TCVAreaDictionary.py", "w") as file:

+        file.write(tcvAreaDictionaryContents)

+    

+def _getZones(siteID):

+    editAreasFilename = "gfe/combinations/EditAreas_PublicZones_" + \

+                        siteID + ".py"

+    zonesKey = "Zones_" + siteID

+    

+    try:

+        editAreasFileContents = LocalizationSupport.readFile(LocalizationSupport.CAVE_STATIC,

+                                                             LocalizationSupport.CONFIGURED,

+                                                             siteID,

+                                                             editAreasFilename)

+        

+        

+        exec(editAreasFileContents)

+        

+        # EASourceMap comes from the EditAreas file

+        return EASourceMap[zonesKey]

+    except:

+        LogStream.logProblem("Error getting zones for %s: " % siteID, LogStream.exc())

+        return []

+    

+def _formatDictionary(dictionary, tabLevel, output=""):

+    TAB = " " * 4

+    

+    for key in dictionary:

+        output += TAB*tabLevel + repr(key) + ": "

+        

+        value = dictionary[key]

+        if type(value) is dict:

+            output += "{\n"

+            output = _formatDictionary(value, tabLevel+1, output)

+            output += TAB*tabLevel + "},\n"

+        elif type(value) is list:

+            output += "[\n"

+            output = _formatList(value, tabLevel+1, output)

+            output += TAB*tabLevel + "],\n"

+        else:

+            output += repr(value) + ",\n"

+    

+    return output

+    

+def _formatList(theList, tabLevel, output=""):

+    TAB = " " * 4

+    

+    for value in theList:

+        if type(value) is dict:

+            output += TAB*tabLevel + "{\n"

+            output = _formatDictionary(value, tabLevel+1, output)

+            output += TAB*tabLevel + "},\n"

+        elif type(value) is list:

+            output += TAB*tabLevel + "[\n"

+            output = _formatList(value, tabLevel+1, output)

+            output += TAB*tabLevel + "],\n"

+        else:

+            output += TAB*tabLevel + repr(value) + ",\n"

+    

+    return output

+

+

+# Utility to create the city location dictionary

+def createCityLocation(outputDir, mapDict):

+    LogStream.logEvent("Generating CityLocation")

+

+    citydict = CityLocationDict

+

+    for mapname in mapDict:

+        if 'Cities' not in mapname:

+            continue

+         

+        attList = mapDict[mapname]

+        for att in attList:

+            #LogStream.logProblem("att:", att)

+            ean = att['name']

+            state = att['st']

+            county_FIP = att['county_fip']

+

+            if len(ean) and len(state) and len(county_FIP):

+                fip = state + 'C' + county_FIP

+                if fip not in citydict:

+                    citydict[fip] = {}

+                try:

+                    latitude = float(string.strip(att['lat']))

+                    longitude = float(string.strip(att['lon']))

+                    citydict[fip][ean.upper()] = (latitude, longitude)

+                except:

+                    LogStream.logProblem("Problem creating city location ",

+                                         ean, att, LogStream.exc())

+

+    s = """

+# ----------------------------------------------------------------------------

+# This software is in the public domain, furnished "as is", without technical

+# support, and with no warranty, express or implied, as to its usefulness for

+# any purpose.

+#

+# DefaultCityLocation

+#   CityLocation file

+#

+# Author: GFE Installation Script 

+# ----------------------------------------------------------------------------

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+# Format:

+# CityLocation = {

+#    "editArea": {'cityName1' : (latitude, longitude),

+#                 'cityName2' : (latitude, longitude),

+#                 ...

+#                }

+#  ...

+#   }

+#

+# editArea: name of edit area as in AreaDictionary

+#

+# cityName: name of the city - should be the same as in AreaDictionary.

+#

+# latitude/longitude: city's lat/lon location.

+#  

+

+

+CityLocation = \

+"""

+    pp = pprint.PrettyPrinter()

+    s = s + pp.pformat(citydict)

+

+    if not os.path.isdir(outputDir):

+        os.makedirs(outputDir)

+

+    outName = os.path.join(outputDir, "DefaultCityLocation.py")

+    

+    fh = None

+    try:

+        fh, fpath = tempfile.mkstemp(dir=outputDir, suffix=".py")

+        os.write(fh, s)

+        os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |

+                        stat.S_IRGRP | stat.S_IWGRP | 

+                        stat.S_IROTH)

+        os.close(fh)

+        fh = None

+        os.rename(fpath, outName)

+    except:

+        LogStream.logProblem("Error writing city location", LogStream.exc())

+    finally:

+        if fh is not None:

+            os.close(fh)

+

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createComboFiles.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createComboFiles.py
index 4b2b88fd85..9b61b0d8ae 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createComboFiles.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/createComboFiles.py
@@ -1,173 +1,173 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+
+#
+#  Creates combination files specific to a site.  Somewhat ported from AWIPS-I.
+#  
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    10/08/08                      njensen        Initial Creation.
+#    
 # 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-
-#
-#  Creates combination files specific to a site.  Somewhat ported from AWIPS-I.
-#  
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    10/08/08                      njensen        Initial Creation.
-#    
-# 
-#
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import os, glob, string, tempfile, stat
-import LogStream, pprint
-  
-
-# Creation of combination files
-def createComboFiles(definitionDir, outputDir, mapDict):
-    #list of definition files    
-    LogStream.logEvent("definitionDir", definitionDir)    
-    files = glob.glob(definitionDir + '/*Definition.py')
-    for f in files:
-        LogStream.logEvent("File", f)
-        # read the file
-        fd = open(f, 'r')
-        buf = fd.read()
-
-        fd.close()
-
-        LogStream.logVerbose("Definition File:", f)
-
-        # attempt to read in the Definition dictionary
-        try:
-            exec buf
-        except:
-            LogStream.logProblem("Failure on Definition: ", f)
-            continue
-
-        if Definition.has_key("mapNameForCombinations") and \
-          Definition.has_key("defaultEditAreas") and \
-          type(Definition['defaultEditAreas']) is str:
-
-            srcDict = {}   #keep track of what zones from what map
-
-            #determine if a single map or multiple maps
-            if type(Definition["mapNameForCombinations"]) is str:
-                maps = [Definition["mapNameForCombinations"]]
-            else:
-                maps = []
-                for m in Definition["mapNameForCombinations"]:
-                    maps.append(m)
-
-            LogStream.logVerbose("mapNameForCombinations=", maps)
-
-            outName = Definition["defaultEditAreas"]
-            LogStream.logVerbose("Generating Combo File: ", outName)
-
-            #See if the definition limits the zones to subdomains
-            if Definition.has_key("subDomainUGCs") and \
-              Definition["subDomainUGCs"] is not None:
-                limitZones = Definition["subDomainUGCs"]
-            else:
-                limitZones = None
-
-
-            #pull out the EDITAREA attribute from all of the maps
-            eans = []
-            for m in maps:
-                names = mapDict.get(m)
-                if names is not None:
-                    size = names.size()                    
-                    LogStream.logVerbose("processing: ", m, "#recs=",
-                      size)
-                    
-                    for n in range(size):
-                        ean = str(names.get(n))
-                        if limitZones is None or ean in limitZones:
-                            #tracking source map
-                            if len(ean):
-                                slist = srcDict.get(m, [])
-                                if ean not in slist:
-                                    slist.append(ean)
-                                    srcDict[m] = slist
-                                    
-                            #combo file
-                            if ean not in eans and len(ean):
-                                eans.append(ean)
-            eans.sort()
-            LogStream.logVerbose("eans=", eans)
-
-
-            s = """
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Combinations
-#   
-#
-# Author: GFESuite Installation Script
-# ----------------------------------------------------------------------------
-
-# Format:
-# Combinations = [
-#    ([ list of edit areas as named in the GFE ], label),
-#    ...
-#    ]
-
-Combinations = [
-"""
-            s = string.replace(s, "",
-              Definition['defaultEditAreas'])
-            count = 1
-            for ean in eans:
-                s = s + '       (["' + ean + '"],  "Region' + `count` + \
-                  '"),\n'
-                count = count + 1
-            s = s + '       ]\n\n'
-            
-            # output 2nd half of combinations file (srcDict)
-            s = s + "#Source Maps for edit areas\nEASourceMap = \\\n"
-            pp = pprint.PrettyPrinter()
-            s = s + pp.pformat(srcDict)
-
-            if not os.path.isdir(outputDir):
-                os.makedirs(outputDir)
-
-            outName = os.path.join(outputDir, Definition["defaultEditAreas"] + ".py")
-            fh = None
-            try:
-                fh, fpath = tempfile.mkstemp(suffix=".py", dir=outputDir)
-                os.write(fh, s)
-                os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |
-                                stat.S_IRGRP | stat.S_IWGRP | 
-                                stat.S_IROTH)
-                os.close(fh)
-                fh = None
-                os.rename(fpath, outName)
-            except:
-                LogStream.logProblem("Error writing combo files", LogStream.exc())
-            finally:
-                if fh is not None:
-                    os.close(fh)
-                
-                                        
+#
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+import os, glob, string, tempfile, stat
+import LogStream, pprint
+  
+
+# Creation of combination files
+def createComboFiles(definitionDir, outputDir, mapDict):
+    #list of definition files    
+    LogStream.logEvent("definitionDir", definitionDir)    
+    files = glob.glob(definitionDir + '/*Definition.py')
+    for f in files:
+        LogStream.logEvent("File", f)
+        # read the file
+        fd = open(f, 'r')
+        buf = fd.read()
+
+        fd.close()
+
+        LogStream.logVerbose("Definition File:", f)
+
+        # attempt to read in the Definition dictionary
+        try:
+            exec(buf)
+        except:
+            LogStream.logProblem("Failure on Definition: ", f)
+            continue
+
+        if "mapNameForCombinations" in Definition and \
+          "defaultEditAreas" in Definition and \
+          type(Definition['defaultEditAreas']) is str:
+
+            srcDict = {}   #keep track of what zones from what map
+
+            #determine if a single map or multiple maps
+            if type(Definition["mapNameForCombinations"]) is str:
+                maps = [Definition["mapNameForCombinations"]]
+            else:
+                maps = []
+                for m in Definition["mapNameForCombinations"]:
+                    maps.append(m)
+
+            LogStream.logVerbose("mapNameForCombinations=", maps)
+
+            outName = Definition["defaultEditAreas"]
+            LogStream.logVerbose("Generating Combo File: ", outName)
+
+            #See if the definition limits the zones to subdomains
+            if "subDomainUGCs" in Definition and \
+              Definition["subDomainUGCs"] is not None:
+                limitZones = Definition["subDomainUGCs"]
+            else:
+                limitZones = None
+
+
+            #pull out the EDITAREA attribute from all of the maps
+            eans = []
+            for m in maps:
+                names = mapDict.get(m)
+                if names is not None:
+                    size = names.size()                    
+                    LogStream.logVerbose("processing: ", m, "#recs=",
+                      size)
+                    
+                    for n in range(size):
+                        ean = str(names.get(n))
+                        if limitZones is None or ean in limitZones:
+                            #tracking source map
+                            if len(ean):
+                                slist = srcDict.get(m, [])
+                                if ean not in slist:
+                                    slist.append(ean)
+                                    srcDict[m] = slist
+                                    
+                            #combo file
+                            if ean not in eans and len(ean):
+                                eans.append(ean)
+            eans.sort()
+            LogStream.logVerbose("eans=", eans)
+
+
+            s = """
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# Combinations
+#   
+#
+# Author: GFESuite Installation Script
+# ----------------------------------------------------------------------------
+
+# Format:
+# Combinations = [
+#    ([ list of edit areas as named in the GFE ], label),
+#    ...
+#    ]
+
+Combinations = [
+"""
+            s = string.replace(s, "",
+              Definition['defaultEditAreas'])
+            count = 1
+            for ean in eans:
+                s = s + '       (["' + ean + '"],  "Region' + repr(count) + \
+                  '"),\n'
+                count = count + 1
+            s = s + '       ]\n\n'
+            
+            # output 2nd half of combinations file (srcDict)
+            s = s + "#Source Maps for edit areas\nEASourceMap = \\\n"
+            pp = pprint.PrettyPrinter()
+            s = s + pp.pformat(srcDict)
+
+            if not os.path.isdir(outputDir):
+                os.makedirs(outputDir)
+
+            outName = os.path.join(outputDir, Definition["defaultEditAreas"] + ".py")
+            fh = None
+            try:
+                fh, fpath = tempfile.mkstemp(suffix=".py", dir=outputDir)
+                os.write(fh, s)
+                os.chmod(fpath, stat.S_IRUSR | stat.S_IWUSR |
+                                stat.S_IRGRP | stat.S_IWGRP | 
+                                stat.S_IROTH)
+                os.close(fh)
+                fh = None
+                os.rename(fpath, outName)
+            except:
+                LogStream.logProblem("Error writing combo files", LogStream.exc())
+            finally:
+                if fh is not None:
+                    os.close(fh)
+                
+                                        
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/doConfig.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/doConfig.py
index 53aebb6412..a983fa3cb1 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/doConfig.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/doConfig.py
@@ -1,579 +1,578 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# doConfig - validate and convert serverConfig into simple Java objects
-#
-# this file was originally config.py
-# it was renamed to avoid a conflict with jep's built-in config module
-#
-# ----------------------------------------------------------------------------
-#
-#     SOFTWARE HISTORY
-#
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    08/09/2013          #1571     randerso       Changed projections to use the Java             
-#                                                 ProjectionType enumeration
-#    07/09/2014          #3146     randerso       Added check for duplicate smartInit
-#                                  rferrel        Corrected log to alertviz.
-#    11/18/2014          #4953     randerso       Added check for empty unit string
-#    04/09/2015          #4383     dgilling       Added support for FireWx ISC.       
-#    Apr 23, 2015        #4259     njensen        Updated for new JEP API       
-#    09/01/2015          16287     amoore         Additional validation of user input      
-#    05/24/2016          15633     bhunder        Modified so that a parm name could
-#                                                 contain your office type.
-#    09/12/2016          #5861     randerso       Change getSiteID() to return a single value
-#                                                 instead of a list containing only one value.
-#
-########################################################################
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import types,re,configProps
-
-from java.util import ArrayList,LinkedHashMap
-from java.lang import Integer,Float
-from com.vividsolutions.jts.geom import Coordinate
-from java.awt import Point
-
-Databases = {}
-Projections = {}
-DiscreteDef = LinkedHashMap()   #from parseKeys()
-        
-# Check a python sequence to see that
-# it matches the format.
-# data: is a sequence of objects
-# fmt : is a parallel sequence of type objects
-# message : optional message to print on exception
-#
-# Returns data.
-# If len(data) != len(fmt)
-# or the type of each element in data does not
-# match the coresponding type in fmt, then
-# a TypeError is raised.
-# Example:  a, b = check(([1, 3], "foo"), (list, str))
-def check(data, fmt, message, allData = None):
-     if len(data) != len(fmt):
-         m = message + ": Wrong number of items found, " + \
-           "Expected " + `len(fmt)` + ", got " + `len(data)` + \
-           " Input: " + `data`
-         if allData is not None:
-             m = m + ' All: ' + `allData`
-         raise AssertionError, m
-     for i in xrange(len(data)):
-         obj = data[i]
-         if hasattr(obj, "java_name"):
-             t = obj.java_name
-         else:
-             t = type(obj)
-         
-         if t != fmt[i]:
-             m = message + ": Wrong data type found, " + \
-               "Expected " + `fmt[i]` + ", got " + `t` + \
-               " for position #" + `i+1` + " Input: " + `data`
-             if allData is not None:
-                 m = m + ' All: ' + `allData`
-             raise AssertionError, m
-     return data
-
-# dictionary check, keys are strings, values/subvalues  as specified
-def dictCheck(dictionary, value, subvalue, configName):
-    map = LinkedHashMap()
-    if type(dictionary) == dict:
-        for k in dictionary.keys():
-            if type(k) != str:
-                raise TypeError, configName + " key [" + `k` + "] not a str"
-            l = dictionary[k]
-            if type(l) != value:
-                raise TypeError, configName + " value [" + `l` + "] not a " \
-                  + `value`
-            if value == list or value == tuple:
-                n = ArrayList()
-                for m in l:
-                    if type(m) != subvalue:
-                        raise TypeError, configName + " value [" + `l` \
-                          + "] isn't a " + `subvalue` + ": " + `m`
-                    elif subvalue == int:
-                        n.add(Integer(m))
-                    elif subvalue == float:
-                        n.add(Float(m))
-                    else:
-                        n.add(m)
-                map.put(k, n)
-            else:
-                if value == int:
-                    map.put(k,Integer(l))
-                elif value == float:
-                    map.put(k,Float(l))
-                else:
-                    map.put(k, l)
-    else:
-        raise TypeError, configName + " not a dict:" + `dictionary`
-    return map
-
-def getWx(wxtypes, wxvisibilities):
-    from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherCoverage, WeatherIntensity, WeatherAttribute
-    from com.raytheon.edex.plugin.gfe.config import SimpleWeatherTypeConfig
-    types = ArrayList()
-    for t in wxtypes:
-        symbol, description, cov, inten, attr = \
-          check(t, (str, str, list, list, list), "Error in WeatherType")
-        coverages = ArrayList()
-        intensities = ArrayList()
-        attributes = ArrayList()
-        for c in cov:
-            csym, cdes = check(c, (str, str), "Error in Weather Coverage", t)
-            coverages.add(WeatherCoverage(csym, cdes))
-        for i in inten:
-            isym, ides = check(i, (str, str), "Error in Weather Intensity", t)
-            intensities.add(WeatherIntensity(isym, ides))
-        for a in attr:
-            asym, ades = check(a, (str, str), "Error in Weather Attributes", t)
-            attributes.add(WeatherAttribute(asym, ades))
-
-        types.add(SimpleWeatherTypeConfig(symbol, description, coverages,
-                          intensities, attributes))
-    vis = ArrayList()
-    for v in wxvisibilities:
-        vis.add(v)
-    return (vis, types)
-
-#note that DiscreteDef is a dictionary that contains the following
-#coded strings:  ['OVERLAPS', 'AuxLength', sym1, des1, sym2, des2,  ....]
-#We do this to pass to C++ as a InitDict, which
-#is a Dict.
-def parseKeys(name, overlaps, auxLength, keys):
-    if not DiscreteDef.containsKey(name):
-        ts = ArrayList()
-        if overlaps:
-            ts.add("OVERLAPS")
-        else:
-            ts.add("MUTEXC")
-        ts.add(`auxLength`)
-        for symdes in keys:
-            sym, des = check(symdes, (str, str),
-              "Error in DiscreteKey Definition", keys)
-            ts.add(sym)
-            ts.add(des)
-        if overlaps and len(keys) > 0 and keys[0][0] != "":
-            s = "1st discrete key must be  for OVERLAP-defined " +\
-              "weather element. [" + name + "]" + `keys`
-            raise Exception, s
-        DiscreteDef.put(name, ts);
-
-def createParm(parminfo, domain, tc):
-    from com.raytheon.edex.plugin.gfe.config import SimpleGridParmConfig
-
-    m = "Format Error in Weather Element Definition"
-    if len(parminfo) < 2:
-        raise TypeError, m + ': ' +  `parminfo`
-    
-    dim, origin, extent, timezone, projection,officeType = domain
-
-    if parminfo[1] == 'Scalar' or parminfo[1] == 'Vector':
-        parmInfoFmt = (str, str, str, str, float, float, int, int)
-        name, type, units, description, max, min, precision, \
-          rateParm = check(parminfo, parmInfoFmt, m)
-
-    elif parminfo[1] == 'Weather':
-        name, type, units, description = \
-          check(parminfo, (str, str, str, str), m)
-        max = 0
-        min = 0
-        precision = 0
-        rateParm = False
-
-    elif parminfo[1] == 'Discrete':
-        if len(parminfo) == 6:
-            parmInfoFmt = (str, str, str, str, int, list)
-            name, type, units, description, overlaps, keys = \
-              check(parminfo, parmInfoFmt, m)
-            auxSize = 0
-        else:
-            parmInfoFmt = (str, str, str, str, int, list, int)
-            name, type, units, description, overlaps, keys, auxSize = \
-              check(parminfo, parmInfoFmt, m)
-        max = 0.0
-        min = 0.0
-        precision = 0
-        rateParm = False
-        parseKeys(name, overlaps, auxSize, keys)
-
-    else:
-        raise Exception, "Illegal WE type specified for " + `parminfo[0]`
-
-    #don't add parms with your own office type in the name.
-    if name.endswith(officeType):
-        return None     #skip this one
-    
-    if len(units) == 0:
-        raise Exception, 'Unit string must not be empty. For unitless quantities enter "1"'
-    
-    updateProjections(projection)
-    start, repeat, duration = tc
-    timeIndependentParm = (repeat == 0 and duration == 0)
-
-#    return WeatherElement(name, type, units, description, max, min,
-#                          precision, timeIndependentParm, dim, origin,
-#                          extent, start, repeat, duration, rateParm)
-    return SimpleGridParmConfig(name, type, units, description, 1.0*max, 1.0*min,
-                          precision, timeIndependentParm, Point(dim[0], dim[1]), Coordinate(origin[0], origin[1]),
-                          Coordinate(extent[0], extent[1]), start, repeat, duration, rateParm)
-
-def getDB(site, projID, dbinfo):
-    from com.raytheon.edex.plugin.gfe.config import SimpleModelConfig
-
-    dbinfoFmt = (str, str, str, int, int, int, int)
-    name, format, type, single, official, numVer, purgeAge = \
-      check(dbinfo, dbinfoFmt, "Error in Database Attribute Definition")
-
-    if not Databases.has_key(name+type):
-        Databases[name+type] = SimpleModelConfig(site, format, type, name, projID,
-                                        single, official, numVer, purgeAge)
-        
-    return Databases[name+type]
-
-def parseDBItm(site, domain, item):
-#    import serverConfig
-#    domain = serverConfig.SITES[site]
-    
-    dbinfo, parminfo = check(item, (tuple, list),
-      "Database Definition or Parm Group Format Error")
-    projID = domain[4][0]
-
-    db = getDB(site, projID, dbinfo)
-    grids = db.grids
-    for ptc in parminfo:
-        parms, tc = check(ptc, (list, tuple),
-          "Parm Group/Time Constraint Tuple Error")
-        check(tc, (int, int, int), "Time Constraint Format Error", ptc)
-        for parm in parms:
-            grids.add(createParm(parm, domain, tc))
-    db.grids = grids
-
-def updateProjections(projection):
-    from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData
-    # extract projection data
-    projFmt = (str, 
-      "com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData$ProjectionType", 
-      tuple, tuple, tuple, float, float, tuple, tuple, float, float, float)
-    projID, ptype, pllll, pllur, pllo, pspo, pspt, pgpll, pgpur, pli, \
-      plc, plo = check(projection, projFmt, "Format error in Projection")
-    check(pllll, (float, float),
-      "Format error lower left long/lat in Projection", projection)
-    check(pllur, (float, float),
-      "Format error upper right long/lat in Projection", projection)
-    check(pllo, (float, float),
-      "Format error long/lat origin in Projection", projection)
-    check(pgpll, (int, int),
-      "Format error lower left grid point in Projection", projection)
-    check(pgpur, (int, int),
-      "Format error upper right grid point in Projection", projection)
-
-    if not Projections.has_key(projID):
-        Projections[projID] = ProjectionData(projID, ptype, 
-                                             Coordinate(pllll[0],pllll[1]),
-                                             Coordinate(pllur[0],pllur[1]), 
-                                             Coordinate(pllo[0],pllo[1]), 
-                                             pspo, pspt,
-                                             Point(pgpll[0], pgpll[1]), 
-                                             Point(pgpur[0], pgpur[1]), 
-                                             pli, plc, plo)
-
-def parseGridLocation(domain):
-    from com.raytheon.edex.plugin.gfe.config import SimpleGridLocation
-    
-    #if office type is present:
-    if len(domain) == 6:
-        domainFmt = (list,tuple,tuple,str,tuple,str)
-        gridSize, origin, extent, tz, proj, officeType = check(domain, domainFmt, "Format error in SITES line")
-    #if office type is not present:
-    else:
-        domainFmt = (list, tuple, tuple, str, tuple)
-        gridSize, origin, extent, tz, proj = check(domain, domainFmt,
-          "Format error in SITES line")
-    check(gridSize, (int, int), "GridSize format error from SITES", domain)
-    check(origin, (float, float), "Origin format error from SITES", domain)
-    check(extent, (float, float), "Extent format error from SITES", domain)
-
-    projFmt = (str, 
-      "com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData$ProjectionType", 
-      tuple, tuple, tuple, float, float, tuple, tuple, float, float, float)
-    projID, projType, llll, llur, llo, sp1, sp2, gpll, gpur, li, lc, lo = \
-        check(proj, projFmt, "Format error in Projection")
-    check(llll, (float, float),
-      "Format error lower left long/lat in Projection", proj)
-    check(llur, (float, float),
-      "Format error upper right long/lat in Projection", proj)
-    check(llo, (float, float),
-      "Format error long/lat origin in Projection", proj)
-    check(gpll, (int, int),
-      "Format error lower left grid point in Projection", proj)
-    check(gpur, (int, int),
-      "Format error upper right grid point in Projection", proj)
-
-    gloc = SimpleGridLocation(Point(gridSize[0], gridSize[1]), 
-                              Coordinate(origin[0], origin[1]), 
-                              Coordinate(extent[0], extent[1]), 
-                              projID, projType, 
-                              Coordinate(llll[0], llll[1]), 
-                              Coordinate(llur[0], llur[1]), 
-                              Coordinate(llo[0], llo[1]), 
-                              sp1, sp2, 
-                              Point(gpll[0], gpll[1]), 
-                              Point(gpur[0], gpur[1]), 
-                              li, lc, lo)
-    return gloc
-
-def parse(siteId, databases, wxtypes, wxvisibilities, allSites, inProjections):
-    from com.raytheon.edex.plugin.gfe.config import SimpleGridLocation
-    domain = parseGridLocation(allSites[siteId])
-    for itm in databases:
-        parseDBItm(siteId, allSites[siteId], itm)
-
-    if type(wxtypes) != list:
-        raise TypeError, "Format Error in WeatherTypes,  not a list: " \
-          + `wxtypes`
-    if type(wxvisibilities) != list:
-        raise TypeError, "Format Error in visibilities,  not a list: " \
-          + `wxvisibilities`
-    vis, types = getWx(wxtypes, wxvisibilities)
-
-    models = ArrayList()
-    for key in Databases.keys():
-        models.add(Databases[key])
-
-    projections = ArrayList()
-    if type(inProjections) != list:
-        raise TypeError, "Format Error in Projections,  not a list: " \
-          + `inProjections`
-    for p in inProjections: 
-        updateProjections(p)
-    for key in Projections.keys():
-        projections.add(Projections[key])
-
-    allSiteIDs = ArrayList()
-    allOfficeTypes = ArrayList()
-    for key in allSites.keys():
-        allSiteIDs.add(key)
-        try:
-            ot = allSites[key][5]
-            if type(ot) != str:
-               raise TypeError, "Format Error in office type, not a str:",allSites[key]
-        except:
-            ot = "wfo"  #assumes wfo if not present
-        allOfficeTypes.add(ot)
-    
-    timeZone = ArrayList()
-    timeZone.add(allSites[siteId][3])
-
-    return models, projections, vis, types, DiscreteDef, allSiteIDs, domain, siteId, timeZone, allOfficeTypes
-
-def d2dParse(d2dmodels):
-    dict = LinkedHashMap()
-    for entry in d2dmodels:
-        if type(entry) is types.TupleType:
-            d2dModelName, gfeModelName = check(entry, (str, str),
-              "Format error in D2DMODELS entry", d2dmodels)
-
-            dict.put(d2dModelName, gfeModelName)
-
-        elif type(entry) is types.StringType:
-            d2dModelName = entry
-            dict.put(d2dModelName, d2dModelName)
-
-        else:
-            raise SyntaxError, "Invalid syntax for D2DMODELS" + `d2dmodels`
-
-    return dict
-
-def netcdfParse(netcdfDirs):
-    dict = LinkedHashMap()
-    for entry in netcdfDirs:
-        if type(entry) is types.TupleType:
-            direct, modelName = check(entry, (str, str),
-              "Format error in NETCDFDIRS entry", netcdfDirs)
-
-            if direct[-1] == '/':
-                direct = direct[0:-1]
-            dict.put(direct, modelName)
-
-        elif type(entry) is types.StringType:
-            direct = entry
-            if direct[-1] == '/':
-                direct = direct[0:-1]
-            dict.put(direct,  '')
-
-        else:
-            raise SyntaxError, "Invalid syntax for NETCDFDIRS" + `netcdfDirs`
-
-    return dict
-
-def parseSat(satdata):
-    rval = LinkedHashMap()
-    for e in satdata:
-        if type(e) is types.TupleType:
-            direct, name = check(e, (str, str),
-              "Format error in SATDATA entry", satdata)
-            rval.put(direct, name)
-        else:
-            raise SyntaxError, "Invalid syntax for SATDATA" + `satdata`
-    return rval
-
-def otherParse(validSites, serverhost, mhsid, port,  
-  initmodules, accumElem,
-  initskips, d2ddbver, logfilepurge, prddir, home,
-  extraWEPrec, vtecRequestTime, autoConfigureNotifyTextProd,
-  iscRoutingTableAddress, requestedISCsites, requestISC, sendiscOnSave,
-  sendiscOnPublish, requestedISCparms, transmitScript):
-    if type(serverhost) != str:
-        raise TypeError, "GFESUITE_HOST not an str: " + `serverhost`
-    if type(mhsid) != str:
-        raise TypeError, "GFESUITE_MHSID not an str: " + `mhsid`
-    if type(vtecRequestTime) != int:
-        raise TypeError, "VTECPartners: VTEC_REMOTE_TABLE_FETCH_TIME " + \
-          "not an int: " + `vtecRequestTime`
-    if type(port) != int:
-        raise TypeError, "GFESUITE_PORT not an int: " + `port`
-    initmodules = dictCheck(initmodules, list, str, "INITMODULES")
-    accumElem = dictCheck(accumElem, list, str, "D2DAccumulativeElements")
-    initskips = dictCheck(initskips, list, int, "INITSKIPS")
-    d2ddbver = dictCheck(d2ddbver, int, None, "D2DDBVERSIONS")
-    if type(logfilepurge) != int:
-        raise TypeError, "LOG_FILE_PURGE_AFTER not an int: " + `logfilepurge`
-    if type(autoConfigureNotifyTextProd) != int:
-        raise TypeError, "AUTO_CONFIGURE_NOTIFYTEXTPROD not an int: " + \
-          `logfilepurge`
-    if type(prddir) != str:
-        raise TypeError, "GFESUITE_PRDDIR not an str: " + `prddir`
-    if type(home) != str:
-        raise TypeError, "GFESUITE_HOME not an str: " + `home`
-    if type(extraWEPrec) != list:
-        raise TypeError, "ExtraWEPrec not an list: " + `extraWEPrec`
-    else:
-        extraWEPrecision = LinkedHashMap()
-        for e in extraWEPrec:
-            if type(e) == str:
-                extraWEPrecision.put(e, Integer(1))
-            elif type(e) == tuple and len(e) == 2 and type(e[0]) == str and \
-              type(e[1]) == int:
-                extraWEPrecision.put(e[0], Integer(e[1]))
-            else:
-                raise TypeError, \
-                  "Entry in ExtraWEPrec not str or (str, int): " + `e`
-
-    iscRoutingTableAddress = dictCheck(iscRoutingTableAddress,str,str,"ISC_ROUTING_TABLE_ADDRESS")
-    #if type(iscRoutingTableAddress) not in [str, types.NoneType]:
-    #    raise TypeError, "ISC_ROUTING_TABLE_ADDRESS not None or a str: " + \
-    #      `iscRoutingTableAddress`
-    #elif iscRoutingTableAddress is None:
-    #    iscRoutingTableAddress = ""
-
-    reqISCsites = ArrayList()
-    if type(requestedISCsites) not in [list, types.NoneType]:
-        raise TypeError, "REQUESTED_ISC_SITES not None or a list: " + \
-          `requestedISCsites`
-    elif type(requestedISCsites) is list:
-        for r in requestedISCsites:
-            if type(r) != str:
-                raise TypeError, "REQUESTED_ISC_SITES not list of strings: " + \
-                  `requestedISCsites`
-                #Verify requested ISC site is of desired pattern
-            elif r not in validSites:
-                raise ValueError, "Requested ISC site: " + str(r) + " could not be found in serverConfig.py."
-            else:
-                reqISCsites.add(r);
-
-    reqISCparms = ArrayList()
-    if type(requestedISCparms) not in [list, types.NoneType]:
-        raise TypeError, "REQUESTED_ISC_PARMS not None or a list: " + \
-          `requestedISCparms`
-    elif type(requestedISCparms) is list:
-        for r in requestedISCparms:
-            if type(r) != str:
-                raise TypeError, "REQUESTED_ISC_PARMS not list of strings: " + \
-                  `requestedISCparms`
-                  #Verify requested ISC parm is of desired pattern
-            elif not re.match(configProps.ISC_PARM_PATTERN, str(r)):
-                raise ValueError, "Requested ISC parm: " + str(r) + " does not match desired pattern: " + configProps.ISC_PARM_PATTERN
-            else:
-                reqISCparms.add(r)
-
-    if type(requestISC) != bool:
-        #If the type is boolean, it is already a valid value
-        #If the type is not boolean, and is not int, then it is not valid
-        if type(requestISC) != int:
-            raise TypeError, "REQUEST_ISC not an int or boolean: " + `requestISC`
-        #Verify request ISC is of valid value
-        elif not ((requestISC == 0) or (requestISC == 1)):
-            raise ValueError, "REQUEST_ISC is: " + `requestISC` + ", but expected True, False, 0 or 1"
-    
-    if type(sendiscOnSave) != bool:
-        #If the type is boolean, it is already a valid value
-        #If the type is not boolean, and is not int, then it is not valid
-        if type(sendiscOnSave) != int:
-            raise TypeError, "SEND_ISC_ON_SAVE not an int or boolean: " + `sendiscOnSave`
-        #Verify send ISC on save is of valid value
-        elif not ((sendiscOnSave == 0) or (sendiscOnSave == 1)):
-            raise ValueError, "SEND_ISC_ON_SAVE is: " + `sendiscOnSave` + ", but expected True, False, 0 or 1"
-    
-    if type(sendiscOnPublish) != bool:
-        #If the type is boolean, it is already a valid value
-        #If the type is not boolean, and is not int, then it is not valid
-        if type(sendiscOnPublish) != int:
-            raise TypeError, "SEND_ISC_ON_PUBLISH not an int or boolean: " + `sendiscOnPublish`
-        #Verify send ISC on publish is of valid value
-        elif not ((sendiscOnPublish == 0) or (sendiscOnPublish == 1)):
-            raise ValueError, "SEND_ISC_ON_PUBLISH is: " + `sendiscOnPublish` + ", but expected True, False, 0 or 1"
-
-    if type(transmitScript) not in [str, types.NoneType]:
-        raise TypeError, "TRANSMIT_SCRIPT not None or str: " + `transmitScript`
-    elif transmitScript is None:
-        transmitScript = ""
-
-    return serverhost, mhsid, \
-      port, initmodules, accumElem, \
-      initskips, d2ddbver, logfilepurge, prddir, home,\
-      extraWEPrecision, vtecRequestTime, \
-      autoConfigureNotifyTextProd, \
-      iscRoutingTableAddress, reqISCsites, requestISC, sendiscOnSave, \
-      sendiscOnPublish, reqISCparms, transmitScript
-
-def parseAdditionalISCRouting(iscRoutingData):
-    from com.raytheon.edex.plugin.gfe.config import ISCRoutingConfig
-    
-    retVal = ArrayList()
-    if iscRoutingData:
-        try:
-            iter(iscRoutingData)
-        except TypeError:
-            raise TypeError("AdditionalISCRouting should be a list or tuple.")
-        
-        for entry in iscRoutingData:
-            (pyParms, dbName, editAreaPrefix) = check(entry, (list, str, str), "AdditionalISCRouting entry not in correct format.")
-            javaParms = ArrayList()
-            for parm in pyParms:
-                javaParms.add(str(parm[0]))
-            retVal.add(ISCRoutingConfig(javaParms, dbName, editAreaPrefix))
-    
-    return retVal
+##

+# This software was developed and / or modified by Raytheon Company,

+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose

+# export/transfer/disclosure is restricted by U.S. law. Dissemination

+# to non-U.S. persons whether in the United States or abroad requires

+# an export license or other authorization.

+# 

+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for

+# further licensing information.

+##

+# doConfig - validate and convert serverConfig into simple Java objects

+#

+# this file was originally config.py

+# it was renamed to avoid a conflict with jep's built-in config module

+#

+# ----------------------------------------------------------------------------

+#

+#     SOFTWARE HISTORY

+#

+#    Date            Ticket#       Engineer       Description

+#    ------------    ----------    -----------    --------------------------

+#    08/09/2013          #1571     randerso       Changed projections to use the Java             

+#                                                 ProjectionType enumeration

+#    07/09/2014          #3146     randerso       Added check for duplicate smartInit

+#                                  rferrel        Corrected log to alertviz.

+#    11/18/2014          #4953     randerso       Added check for empty unit string

+#    04/09/2015          #4383     dgilling       Added support for FireWx ISC.       

+#    Apr 23, 2015        #4259     njensen        Updated for new JEP API       

+#    09/01/2015          16287     amoore         Additional validation of user input      

+#    05/24/2016          15633     bhunder        Modified so that a parm name could

+#                                                 contain your office type.

+#    09/12/2016          #5861     randerso       Change getSiteID() to return a single value

+#                                                 instead of a list containing only one value.

+#

+########################################################################

+

+##

+# This is a base file that is not intended to be overridden.

+##

+

+

+

+import types,re,configProps

+

+from java.util import ArrayList,LinkedHashMap

+from java.lang import Integer,Float

+from com.vividsolutions.jts.geom import Coordinate

+from java.awt import Point

+

+Databases = {}

+Projections = {}

+DiscreteDef = LinkedHashMap()   #from parseKeys()

+        

+# Check a python sequence to see that

+# it matches the format.

+# data: is a sequence of objects

+# fmt : is a parallel sequence of type objects

+# message : optional message to print on exception

+#

+# Returns data.

+# If len(data) != len(fmt)

+# or the type of each element in data does not

+# match the coresponding type in fmt, then

+# a TypeError is raised.

+# Example:  a, b = check(([1, 3], "foo"), (list, str))

+def check(data, fmt, message, allData = None):

+     if len(data) != len(fmt):

+         m = message + ": Wrong number of items found, " + \

+           "Expected " + repr(len(fmt)) + ", got " + repr(len(data)) + \

+           " Input: " + repr(data)

+         if allData is not None:

+             m = m + ' All: ' + repr(allData)

+         raise AssertionError(m)

+     for i in range(len(data)):

+         obj = data[i]

+         if hasattr(obj, "java_name"):

+             t = obj.java_name

+         else:

+             t = type(obj)

+         

+         if t != fmt[i]:

+             m = message + ": Wrong data type found, " + \

+               "Expected " + repr(fmt[i]) + ", got " + repr(t) + \

+               " for position #" + repr(i+1) + " Input: " + repr(data)

+             if allData is not None:

+                 m = m + ' All: ' + repr(allData)

+             raise AssertionError(m)

+     return data

+

+# dictionary check, keys are strings, values/subvalues  as specified

+def dictCheck(dictionary, value, subvalue, configName):

+    map = LinkedHashMap()

+    if type(dictionary) == dict:

+        for k in list(dictionary.keys()):

+            if type(k) != str:

+                raise TypeError(configName + " key [" + repr(k) + "] not a str")

+            l = dictionary[k]

+            if type(l) != value:

+                raise TypeError(configName + " value [" + repr(l) + "] not a " \

+                  + repr(value))

+            if value == list or value == tuple:

+                n = ArrayList()

+                for m in l:

+                    if type(m) != subvalue:

+                        raise TypeError(configName + " value [" + repr(l) \

+                          + "] isn't a " + repr(subvalue) + ": " + repr(m))

+                    elif subvalue == int:

+                        n.add(Integer(m))

+                    elif subvalue == float:

+                        n.add(Float(m))

+                    else:

+                        n.add(m)

+                map.put(k, n)

+            else:

+                if value == int:

+                    map.put(k,Integer(l))

+                elif value == float:

+                    map.put(k,Float(l))

+                else:

+                    map.put(k, l)

+    else:

+        raise TypeError(configName + " not a dict:" + repr(dictionary))

+    return map

+

+def getWx(wxtypes, wxvisibilities):

+    from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherCoverage, WeatherIntensity, WeatherAttribute

+    from com.raytheon.edex.plugin.gfe.config import SimpleWeatherTypeConfig

+    types = ArrayList()

+    for t in wxtypes:

+        symbol, description, cov, inten, attr = \

+          check(t, (str, str, list, list, list), "Error in WeatherType")

+        coverages = ArrayList()

+        intensities = ArrayList()

+        attributes = ArrayList()

+        for c in cov:

+            csym, cdes = check(c, (str, str), "Error in Weather Coverage", t)

+            coverages.add(WeatherCoverage(csym, cdes))

+        for i in inten:

+            isym, ides = check(i, (str, str), "Error in Weather Intensity", t)

+            intensities.add(WeatherIntensity(isym, ides))

+        for a in attr:

+            asym, ades = check(a, (str, str), "Error in Weather Attributes", t)

+            attributes.add(WeatherAttribute(asym, ades))

+

+        types.add(SimpleWeatherTypeConfig(symbol, description, coverages,

+                          intensities, attributes))

+    vis = ArrayList()

+    for v in wxvisibilities:

+        vis.add(v)

+    return (vis, types)

+

+#note that DiscreteDef is a dictionary that contains the following

+#coded strings:  ['OVERLAPS', 'AuxLength', sym1, des1, sym2, des2,  ....]

+#We do this to pass to C++ as a InitDict, which

+#is a Dict.

+def parseKeys(name, overlaps, auxLength, keys):

+    if not DiscreteDef.containsKey(name):

+        ts = ArrayList()

+        if overlaps:

+            ts.add("OVERLAPS")

+        else:

+            ts.add("MUTEXC")

+        ts.add(repr(auxLength))

+        for symdes in keys:

+            sym, des = check(symdes, (str, str),

+              "Error in DiscreteKey Definition", keys)

+            ts.add(sym)

+            ts.add(des)

+        if overlaps and len(keys) > 0 and keys[0][0] != "":

+            s = "1st discrete key must be  for OVERLAP-defined " +\

+              "weather element. [" + name + "]" + repr(keys)

+            raise Exception(s)

+        DiscreteDef.put(name, ts);

+

+def createParm(parminfo, domain, tc):

+    from com.raytheon.edex.plugin.gfe.config import SimpleGridParmConfig

+

+    m = "Format Error in Weather Element Definition"

+    if len(parminfo) < 2:

+        raise TypeError(m + ': ' +  repr(parminfo))

+    

+    dim, origin, extent, timezone, projection,officeType = domain

+

+    if parminfo[1] == 'Scalar' or parminfo[1] == 'Vector':

+        parmInfoFmt = (str, str, str, str, float, float, int, int)

+        name, type, units, description, max, min, precision, \

+          rateParm = check(parminfo, parmInfoFmt, m)

+

+    elif parminfo[1] == 'Weather':

+        name, type, units, description = \

+          check(parminfo, (str, str, str, str), m)

+        max = 0

+        min = 0

+        precision = 0

+        rateParm = False

+

+    elif parminfo[1] == 'Discrete':

+        if len(parminfo) == 6:

+            parmInfoFmt = (str, str, str, str, int, list)

+            name, type, units, description, overlaps, keys = \

+              check(parminfo, parmInfoFmt, m)

+            auxSize = 0

+        else:

+            parmInfoFmt = (str, str, str, str, int, list, int)

+            name, type, units, description, overlaps, keys, auxSize = \

+              check(parminfo, parmInfoFmt, m)

+        max = 0.0

+        min = 0.0

+        precision = 0

+        rateParm = False

+        parseKeys(name, overlaps, auxSize, keys)

+

+    else:

+        raise Exception("Illegal WE type specified for " + repr(parminfo[0]))

+

+    #don't add parms with your own office type in the name.

+    if name.endswith(officeType):

+        return None     #skip this one

+    

+    if len(units) == 0:

+        raise Exception('Unit string must not be empty. For unitless quantities enter "1"')

+    

+    updateProjections(projection)

+    start, repeat, duration = tc

+    timeIndependentParm = (repeat == 0 and duration == 0)

+

+#    return WeatherElement(name, type, units, description, max, min,

+#                          precision, timeIndependentParm, dim, origin,

+#                          extent, start, repeat, duration, rateParm)

+    return SimpleGridParmConfig(name, type, units, description, 1.0*max, 1.0*min,

+                          precision, timeIndependentParm, Point(dim[0], dim[1]), Coordinate(origin[0], origin[1]),

+                          Coordinate(extent[0], extent[1]), start, repeat, duration, rateParm)

+

+def getDB(site, projID, dbinfo):

+    from com.raytheon.edex.plugin.gfe.config import SimpleModelConfig

+

+    dbinfoFmt = (str, str, str, int, int, int, int)

+    name, format, type, single, official, numVer, purgeAge = \

+      check(dbinfo, dbinfoFmt, "Error in Database Attribute Definition")

+

+    if name+type not in Databases:

+        Databases[name+type] = SimpleModelConfig(site, format, type, name, projID,

+                                        single, official, numVer, purgeAge)

+        

+    return Databases[name+type]

+

+def parseDBItm(site, domain, item):

+#    import serverConfig

+#    domain = serverConfig.SITES[site]

+    

+    dbinfo, parminfo = check(item, (tuple, list),

+      "Database Definition or Parm Group Format Error")

+    projID = domain[4][0]

+

+    db = getDB(site, projID, dbinfo)

+    grids = db.grids

+    for ptc in parminfo:

+        parms, tc = check(ptc, (list, tuple),

+          "Parm Group/Time Constraint Tuple Error")

+        check(tc, (int, int, int), "Time Constraint Format Error", ptc)

+        for parm in parms:

+            grids.add(createParm(parm, domain, tc))

+    db.grids = grids

+

+def updateProjections(projection):

+    from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData

+    # extract projection data

+    projFmt = (str, 

+      "com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData$ProjectionType", 

+      tuple, tuple, tuple, float, float, tuple, tuple, float, float, float)

+    projID, ptype, pllll, pllur, pllo, pspo, pspt, pgpll, pgpur, pli, \

+      plc, plo = check(projection, projFmt, "Format error in Projection")

+    check(pllll, (float, float),

+      "Format error lower left long/lat in Projection", projection)

+    check(pllur, (float, float),

+      "Format error upper right long/lat in Projection", projection)

+    check(pllo, (float, float),

+      "Format error long/lat origin in Projection", projection)

+    check(pgpll, (int, int),

+      "Format error lower left grid point in Projection", projection)

+    check(pgpur, (int, int),

+      "Format error upper right grid point in Projection", projection)

+

+    if projID not in Projections:

+        Projections[projID] = ProjectionData(projID, ptype, 

+                                             Coordinate(pllll[0],pllll[1]),

+                                             Coordinate(pllur[0],pllur[1]), 

+                                             Coordinate(pllo[0],pllo[1]), 

+                                             pspo, pspt,

+                                             Point(pgpll[0], pgpll[1]), 

+                                             Point(pgpur[0], pgpur[1]), 

+                                             pli, plc, plo)

+

+def parseGridLocation(domain):

+    from com.raytheon.edex.plugin.gfe.config import SimpleGridLocation

+    

+    #if office type is present:

+    if len(domain) == 6:

+        domainFmt = (list,tuple,tuple,str,tuple,str)

+        gridSize, origin, extent, tz, proj, officeType = check(domain, domainFmt, "Format error in SITES line")

+    #if office type is not present:

+    else:

+        domainFmt = (list, tuple, tuple, str, tuple)

+        gridSize, origin, extent, tz, proj = check(domain, domainFmt,

+          "Format error in SITES line")

+    check(gridSize, (int, int), "GridSize format error from SITES", domain)

+    check(origin, (float, float), "Origin format error from SITES", domain)

+    check(extent, (float, float), "Extent format error from SITES", domain)

+

+    projFmt = (str, 

+      "com.raytheon.uf.common.dataplugin.gfe.config.ProjectionData$ProjectionType", 

+      tuple, tuple, tuple, float, float, tuple, tuple, float, float, float)

+    projID, projType, llll, llur, llo, sp1, sp2, gpll, gpur, li, lc, lo = \

+        check(proj, projFmt, "Format error in Projection")

+    check(llll, (float, float),

+      "Format error lower left long/lat in Projection", proj)

+    check(llur, (float, float),

+      "Format error upper right long/lat in Projection", proj)

+    check(llo, (float, float),

+      "Format error long/lat origin in Projection", proj)

+    check(gpll, (int, int),

+      "Format error lower left grid point in Projection", proj)

+    check(gpur, (int, int),

+      "Format error upper right grid point in Projection", proj)

+

+    gloc = SimpleGridLocation(Point(gridSize[0], gridSize[1]), 

+                              Coordinate(origin[0], origin[1]), 

+                              Coordinate(extent[0], extent[1]), 

+                              projID, projType, 

+                              Coordinate(llll[0], llll[1]), 

+                              Coordinate(llur[0], llur[1]), 

+                              Coordinate(llo[0], llo[1]), 

+                              sp1, sp2, 

+                              Point(gpll[0], gpll[1]), 

+                              Point(gpur[0], gpur[1]), 

+                              li, lc, lo)

+    return gloc

+

+def parse(siteId, databases, wxtypes, wxvisibilities, allSites, inProjections):

+    from com.raytheon.edex.plugin.gfe.config import SimpleGridLocation

+    domain = parseGridLocation(allSites[siteId])

+    for itm in databases:

+        parseDBItm(siteId, allSites[siteId], itm)

+

+    if type(wxtypes) != list:

+        raise TypeError("Format Error in WeatherTypes,  not a list: " \

+          + repr(wxtypes))

+    if type(wxvisibilities) != list:

+        raise TypeError("Format Error in visibilities,  not a list: " \

+          + repr(wxvisibilities))

+    vis, types = getWx(wxtypes, wxvisibilities)

+

+    models = ArrayList()

+    for key in list(Databases.keys()):

+        models.add(Databases[key])

+

+    projections = ArrayList()

+    if type(inProjections) != list:

+        raise TypeError("Format Error in Projections,  not a list: " \

+          + repr(inProjections))

+    for p in inProjections: 

+        updateProjections(p)

+    for key in list(Projections.keys()):

+        projections.add(Projections[key])

+

+    allSiteIDs = ArrayList()

+    allOfficeTypes = ArrayList()

+    for key in list(allSites.keys()):

+        allSiteIDs.add(key)

+        try:

+            ot = allSites[key][5]

+            if type(ot) != str:

+               raise TypeError("Format Error in office type, not a str:").with_traceback(allSites[key])

+        except:

+            ot = "wfo"  #assumes wfo if not present

+        allOfficeTypes.add(ot)

+    

+    timeZone = ArrayList()

+    timeZone.add(allSites[siteId][3])

+

+    return models, projections, vis, types, DiscreteDef, allSiteIDs, domain, siteId, timeZone, allOfficeTypes

+

+def d2dParse(d2dmodels):

+    dict = LinkedHashMap()

+    for entry in d2dmodels:

+        if type(entry) is tuple:

+            d2dModelName, gfeModelName = check(entry, (str, str),

+              "Format error in D2DMODELS entry", d2dmodels)

+

+            dict.put(d2dModelName, gfeModelName)

+

+        elif type(entry) is bytes:

+            d2dModelName = entry

+            dict.put(d2dModelName, d2dModelName)

+

+        else:

+            raise SyntaxError("Invalid syntax for D2DMODELS" + repr(d2dmodels))

+

+    return dict

+

+def netcdfParse(netcdfDirs):

+    dict = LinkedHashMap()

+    for entry in netcdfDirs:

+        if type(entry) is tuple:

+            direct, modelName = check(entry, (str, str),

+              "Format error in NETCDFDIRS entry", netcdfDirs)

+

+            if direct[-1] == '/':

+                direct = direct[0:-1]

+            dict.put(direct, modelName)

+

+        elif type(entry) is bytes:

+            direct = entry

+            if direct[-1] == '/':

+                direct = direct[0:-1]

+            dict.put(direct,  '')

+

+        else:

+            raise SyntaxError("Invalid syntax for NETCDFDIRS" + repr(netcdfDirs))

+

+    return dict

+

+def parseSat(satdata):

+    rval = LinkedHashMap()

+    for e in satdata:

+        if type(e) is tuple:

+            direct, name = check(e, (str, str),

+              "Format error in SATDATA entry", satdata)

+            rval.put(direct, name)

+        else:

+            raise SyntaxError("Invalid syntax for SATDATA" + repr(satdata))

+    return rval

+

+def otherParse(validSites, serverhost, mhsid, port,  

+  initmodules, accumElem,

+  initskips, d2ddbver, logfilepurge, prddir, home,

+  extraWEPrec, vtecRequestTime, autoConfigureNotifyTextProd,

+  iscRoutingTableAddress, requestedISCsites, requestISC, sendiscOnSave,

+  sendiscOnPublish, requestedISCparms, transmitScript):

+    if type(serverhost) != str:

+        raise TypeError("GFESUITE_HOST not an str: " + repr(serverhost))

+    if type(mhsid) != str:

+        raise TypeError("GFESUITE_MHSID not an str: " + repr(mhsid))

+    if type(vtecRequestTime) != int:

+        raise TypeError("VTECPartners: VTEC_REMOTE_TABLE_FETCH_TIME " + \

+          "not an int: " + repr(vtecRequestTime))

+    if type(port) != int:

+        raise TypeError("GFESUITE_PORT not an int: " + repr(port))

+    initmodules = dictCheck(initmodules, list, str, "INITMODULES")

+    accumElem = dictCheck(accumElem, list, str, "D2DAccumulativeElements")

+    initskips = dictCheck(initskips, list, int, "INITSKIPS")

+    d2ddbver = dictCheck(d2ddbver, int, None, "D2DDBVERSIONS")

+    if type(logfilepurge) != int:

+        raise TypeError("LOG_FILE_PURGE_AFTER not an int: " + repr(logfilepurge))

+    if type(autoConfigureNotifyTextProd) != int:

+        raise TypeError("AUTO_CONFIGURE_NOTIFYTEXTPROD not an int: " + \

+          repr(logfilepurge))

+    if type(prddir) != str:

+        raise TypeError("GFESUITE_PRDDIR not an str: " + repr(prddir))

+    if type(home) != str:

+        raise TypeError("GFESUITE_HOME not an str: " + repr(home))

+    if type(extraWEPrec) != list:

+        raise TypeError("ExtraWEPrec not an list: " + repr(extraWEPrec))

+    else:

+        extraWEPrecision = LinkedHashMap()

+        for e in extraWEPrec:

+            if type(e) == str:

+                extraWEPrecision.put(e, Integer(1))

+            elif type(e) == tuple and len(e) == 2 and type(e[0]) == str and \

+              type(e[1]) == int:

+                extraWEPrecision.put(e[0], Integer(e[1]))

+            else:

+                raise TypeError("Entry in ExtraWEPrec not str or (str, int): " + repr(e))

+

+    iscRoutingTableAddress = dictCheck(iscRoutingTableAddress,str,str,"ISC_ROUTING_TABLE_ADDRESS")

+    #if type(iscRoutingTableAddress) not in [str, types.NoneType]:

+    #    raise TypeError, "ISC_ROUTING_TABLE_ADDRESS not None or a str: " + \

+    #      `iscRoutingTableAddress`

+    #elif iscRoutingTableAddress is None:

+    #    iscRoutingTableAddress = ""

+

+    reqISCsites = ArrayList()

+    if type(requestedISCsites) not in [list, type(None)]:

+        raise TypeError("REQUESTED_ISC_SITES not None or a list: " + \

+          repr(requestedISCsites))

+    elif type(requestedISCsites) is list:

+        for r in requestedISCsites:

+            if type(r) != str:

+                raise TypeError("REQUESTED_ISC_SITES not list of strings: " + \

+                  repr(requestedISCsites))

+                #Verify requested ISC site is of desired pattern

+            elif r not in validSites:

+                raise ValueError("Requested ISC site: " + str(r) + " could not be found in serverConfig.py.")

+            else:

+                reqISCsites.add(r);

+

+    reqISCparms = ArrayList()

+    if type(requestedISCparms) not in [list, type(None)]:

+        raise TypeError("REQUESTED_ISC_PARMS not None or a list: " + \

+          repr(requestedISCparms))

+    elif type(requestedISCparms) is list:

+        for r in requestedISCparms:

+            if type(r) != str:

+                raise TypeError("REQUESTED_ISC_PARMS not list of strings: " + \

+                  repr(requestedISCparms))

+                  #Verify requested ISC parm is of desired pattern

+            elif not re.match(configProps.ISC_PARM_PATTERN, str(r)):

+                raise ValueError("Requested ISC parm: " + str(r) + " does not match desired pattern: " + configProps.ISC_PARM_PATTERN)

+            else:

+                reqISCparms.add(r)

+

+    if type(requestISC) != bool:

+        #If the type is boolean, it is already a valid value

+        #If the type is not boolean, and is not int, then it is not valid

+        if type(requestISC) != int:

+            raise TypeError("REQUEST_ISC not an int or boolean: " + repr(requestISC))

+        #Verify request ISC is of valid value

+        elif not ((requestISC == 0) or (requestISC == 1)):

+            raise ValueError("REQUEST_ISC is: " + repr(requestISC) + ", but expected True, False, 0 or 1")

+    

+    if type(sendiscOnSave) != bool:

+        #If the type is boolean, it is already a valid value

+        #If the type is not boolean, and is not int, then it is not valid

+        if type(sendiscOnSave) != int:

+            raise TypeError("SEND_ISC_ON_SAVE not an int or boolean: " + repr(sendiscOnSave))

+        #Verify send ISC on save is of valid value

+        elif not ((sendiscOnSave == 0) or (sendiscOnSave == 1)):

+            raise ValueError("SEND_ISC_ON_SAVE is: " + repr(sendiscOnSave) + ", but expected True, False, 0 or 1")

+    

+    if type(sendiscOnPublish) != bool:

+        #If the type is boolean, it is already a valid value

+        #If the type is not boolean, and is not int, then it is not valid

+        if type(sendiscOnPublish) != int:

+            raise TypeError("SEND_ISC_ON_PUBLISH not an int or boolean: " + repr(sendiscOnPublish))

+        #Verify send ISC on publish is of valid value

+        elif not ((sendiscOnPublish == 0) or (sendiscOnPublish == 1)):

+            raise ValueError("SEND_ISC_ON_PUBLISH is: " + repr(sendiscOnPublish) + ", but expected True, False, 0 or 1")

+

+    if type(transmitScript) not in [str, type(None)]:

+        raise TypeError("TRANSMIT_SCRIPT not None or str: " + repr(transmitScript))

+    elif transmitScript is None:

+        transmitScript = ""

+

+    return serverhost, mhsid, \

+      port, initmodules, accumElem, \

+      initskips, d2ddbver, logfilepurge, prddir, home,\

+      extraWEPrecision, vtecRequestTime, \

+      autoConfigureNotifyTextProd, \

+      iscRoutingTableAddress, reqISCsites, requestISC, sendiscOnSave, \

+      sendiscOnPublish, reqISCparms, transmitScript

+

+def parseAdditionalISCRouting(iscRoutingData):

+    from com.raytheon.edex.plugin.gfe.config import ISCRoutingConfig

+    

+    retVal = ArrayList()

+    if iscRoutingData:

+        try:

+            iter(iscRoutingData)

+        except TypeError:

+            raise TypeError("AdditionalISCRouting should be a list or tuple.")

+        

+        for entry in iscRoutingData:

+            (pyParms, dbName, editAreaPrefix) = check(entry, (list, str, str), "AdditionalISCRouting entry not in correct format.")

+            javaParms = ArrayList()

+            for parm in pyParms:

+                javaParms.add(str(parm[0]))

+            retVal.add(ISCRoutingConfig(javaParms, dbName, editAreaPrefix))

+    

+    return retVal

diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtAccess.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtAccess.py
index 83db430b70..de47ffb475 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtAccess.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtAccess.py
@@ -1,663 +1,663 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
+##
+# This software was developed and / or modified by Raytheon Company,
+# pursuant to Contract DG133W-05-CQ-1067 with the US Government.

+# 

+# U.S. EXPORT CONTROLLED TECHNICAL DATA

+# This software product contains export-restricted data whose
+# export/transfer/disclosure is restricted by U.S. law. Dissemination
+# to non-U.S. persons whether in the United States or abroad requires
+# an export license or other authorization.
 # 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-# ----------------------------------------------------------------------------
-# This software is in the public domain, furnished "as is", without technical
-# support, and with no warranty, express or implied, as to its usefulness for
-# any purpose.
-#
-# Author: mathewson
-# ----------------------------------------------------------------------------
-##
-#
-# This class provides interfaces to the ISC Routing Table Web Service.
-#
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    12/10/14        #4953         randerso       Cleaned up imports, 
-#                                                 improved spawning of shell cmd 
-#    03/10/2015      #4129         randerso       Refactored server selection code into a reusable method
-#    07/11/2016      #5774         randerso       Change to send WFO message to all active
-#                                                 IFP servers, not just "best"
-#    10/31/2016      #5979         njensen        Cast to primitives for compatibility
-#
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-from xml.etree import ElementTree
-from xml.etree.ElementTree import Element, SubElement
-import socket
-import urllib, urllib2, time, os, copy, string
-import LogStream
-import JUtil
-import iscUtil
-
-class IrtAccess():
-
-    #Constructor taking the web URL for the ISC Routing Table
-    def __init__(self, ancfURL=None, bncfURL=None, logger=None):
-        self.__addrs = {}
-        self.__addrs['ANCF'] = ancfURL
-        self.__addrs['BNCF'] = bncfURL
-        self.__registered = None   #flag to indicate whether we registered
-        if logger is not None:
-            self.__logger=logger
-        else:
-            self.__logger=iscUtil.getLogger("irtAccess","irtServer.log")
-
-    def logEvent(self,*msg):
-        self.__logger.info(iscUtil.tupleToString(*msg))
-    
-    def logProblem(self,*msg):
-        self.__logger.error(iscUtil.tupleToString(*msg))
-        
-    def logException(self,*msg):
-        self.__logger.exception(iscUtil.tupleToString(*msg))    
-    
-    def logVerbose(self,*msg):
-        self.__logger.debug(iscUtil.tupleToString(*msg))
-        
-    def logDebug(self,*msg):
-        self.logVerbose(iscUtil.tupleToString(*msg))
-
-
-    def __checkArgs(self,parmsWanted, gridDims,gridBoundBox, iscWfosWanted):
-        
-        if type(parmsWanted) is not list:
-            parmsWanted = JUtil.javaStringListToPylist(parmsWanted)
-
-        if type(gridDims) is not list:
-            pylist = []
-            size = gridDims.size() 
-            for i in range(size):
-                pylist.append(int(gridDims.get(i)))
-            gridDims = pylist
-        
-        if type(gridBoundBox) is not tuple:
-            gridBoundBox = ((float(gridBoundBox.get(0)), float(gridBoundBox.get(1))), (float(gridBoundBox.get(2)), float(gridBoundBox.get(3))))
-
-        if type(iscWfosWanted) is not list:
-            iscWfosWanted = JUtil.javaStringListToPylist(iscWfosWanted)
-        
-        
-        return parmsWanted, gridDims, gridBoundBox, iscWfosWanted
-        
-    # Registration call for the ISC Routing Table.  Returns True if okay, i.e.,
-    # you are registered.
-    def register(self, mhsid, serverHost, serverPort, serverProtocol,
-      site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
-        
-        parmsWanted, gridDims,gridBoundBox, iscWfosWanted = self.__checkArgs(parmsWanted, gridDims,gridBoundBox, iscWfosWanted)
-        
-        if self.__registered is not None:
-            self.unregister(self.__registered)  #unregister, then reset table
-            self.__regInfo = None
-            self.__timeToReRegister = None
-
-        # shorten parmsWanted list, i.e., don't need the "_SFC".
-        for x in xrange(len(parmsWanted)):
-            idx = parmsWanted[x].find("_SFC")
-            if idx != -1:
-                parmsWanted[x] = parmsWanted[x][0:idx]
-
-        # set up registration information
-        self.__regInfo = {'mhsid': mhsid, 'server': serverHost,
-          'port': serverPort, 'protocol': serverProtocol, 'site': site,
-          'parms': ",".join(parmsWanted), 'dims': self.__listConvert(gridDims),
-          'proj': gridProj, 'Bbox': self.__nestedTupleConvert(gridBoundBox),
-          'wfos': ",".join(iscWfosWanted)}
-
-        # set up unregistration information
-        self.__unregInfo = {'mhsid': mhsid, 'server': serverHost,
-          'port': serverPort, 'protocol': serverProtocol,
-          'wfos': ",".join(iscWfosWanted), 'site': site}
-
-        okay = self.__doRegister()   #perform registration
-        return okay
-
-
-    # Call made by ifpServer to check and perform re-registration.
-    # Returns True if no errors.
-    def checkForReregister(self):
-        if not self.__registered:
-            return True  #do nothing if not registered
-        # check for change of IRT web service
-        if self.__baseURL() != self.__registered:
-            self.unregister(self.__registered)
-            return self.__doRegister()
-        # check for time to re-register
-        if time.time() >= self.__timeToReRegister:
-            return self.__doRegister()
-        return True
-
-    # Call to unregister from the ISC Routing Table.  Returns True if
-    # successful, false otherwise. The irtAddress overrides the calculated
-    # IRT web address if present.
-    def unregister(self, irtAddress=None):
-        if not self.__registered:
-            return True   #do nothing if never registered
-
-        #now unregister
-        status, xml, transIRT = self.__callIRT('unregister', self.__unregInfo, 
-          irtAddress)
-        if status is False:
-            return False
-
-        # decode XML, read status
-        try:
-            tree = ElementTree.ElementTree(ElementTree.XML(xml))
-        except:
-            self.logProblem("Malformed XML on unregister: ", xml)
-            return False
-
-        element = tree.getroot()
-        if element is None:
-            self.logProblem("status tag missing in XML for unregister")
-            return False
-        status = None
-        for attr, value in element.items():
-            if attr == 'ok':
-                status = value
-                break
-        if status is None:
-            self.logProblem("ok attr missing in status tag for unregister")
-            return False
-        if status == "1":
-            self.__registered = None  #reset registration flag
-            self.logEvent("Unregistered from IRT")
-            return True
-        else:
-            self.logProblem("Error on unregistration", element.text)
-            return False
-
-    # routing to get the list of destination servers that are active for
-    # the given domain. Returns status flag and XML string.
-    def getSendAddrs(self, sourceDomain):
-        sourceDomainDict = {'wfoid': sourceDomain}
-        status, xml, transIRT = self.__callIRT('getaddrs', sourceDomainDict)
-        return status, xml
-
-    # routine to get the list of servers that are active for the given list
-    # of domains. Returns status flag and XML string.
-    def getServers(self, wfos):
-        if type(wfos) is not list:
-            wfos = JUtil.javaStringListToPylist(wfos)
-        wfoDict  = {'wfoids': ",".join(wfos)}
-        status, xml, transIRT = self.__callIRT('getservers', wfoDict)
-        return status, xml
-
-    # registers with the ISC routing table.  Returns True if successful,
-    # False otherwise.
-    def __doRegister(self):
-        status, xml, transIRT = self.__callIRT('register', self.__regInfo)
-        if status is False:
-            return False
-
-        # decode XML
-        try:
-            tree = ElementTree.ElementTree(ElementTree.XML(xml))
-        except:
-            self.logProblem("Malformed XML on register: ", xml)
-            return False
-
-        element = tree.getroot()   #status tag is the root tag
-        if element is None:
-            self.logProblem("status tag missing in XML for register")
-            return False
-        ok = None
-        for attr, value in element.items():
-            if attr == 'ok':
-                ok = value
-                break
-        if ok is None:
-            self.logProblem("ok field missing in status tag for register")
-            return False
-
-        # get the ok flag
-        if ok == "1":
-            isoTimeStr = element.text
-            idx = isoTimeStr.find(".")
-            if idx != -1:
-                isoTimeStr = isoTimeStr[0:idx]  #eliminate sub-seconds if any
-            try:
-                # switch to GMT0 for time conversions
-                prevTZ = os.environ.get('TZ', None)
-                os.environ['TZ'] = "GMT0"
-                time.tzset()
-                
-                # Fix to correct importing of the time.strptime method
-                importError = True
-                while importError:
-                    try:
-                        self.__timeToReRegister = time.mktime(time.strptime(isoTimeStr,"%Y-%m-%dT%H:%M:%S"))
-                        importError=False
-                    except ImportError:
-                        importError = True
-                
-                # reset TZ environment variable to previous state
-                if prevTZ:
-                    os.environ['TZ'] = prevTZ
-                    time.tzset()
-            except ValueError:
-                self.logProblem("time string has bad format", isoTimeStr)
-                return False
-            self.__registered = transIRT   #set registration flag
-            self.logEvent("IRT Registration Successful. ",
-              "Re-register time: ",
-              time.asctime(time.gmtime(float(self.__timeToReRegister))))
-            return True   #okay registration
-        else:
-            self.logProblem("Error on registration: ", element.text)
-            return False
-
-    # returns the appropriate (id, url) for the IRT web service.
-    def __baseURL(self):
-        statusFile = '/data/mhs/ncfstatus'
-        #statusFile = '/scratch/ncfstatus'
-        ncf = "ANCF"
-        try:
-            fp = open(statusFile, 'rb')
-            ncf = fp.read()
-            ncf = ncf.strip().upper()
-        except IOError, e:
-            pass
-            #self.logProblem("Can't read NCF status file: ", statusFile,
-            #  "assuming ANCF...")
-        return ncf, self.__addrs.get(ncf)
-
-    # makes call to ISC routing Table service, calling the given function,
-    # with the given attributes (dictionary). Returns the status of the
-    # call (bool), the XML returned, and the IRT address (id, url). 
-    # The optional IRTAddress is used to force the call to a specific 
-    # IRT (id, url).
-    def __callIRT(self, function, attributes, irtAddress=None):
-        retries = 0
-        tDuration = 0.000
-        startT = time.time()
-        #use normal method to calculate IRT address
-        if irtAddress is None:
-            irtid, url = self.__baseURL()
-            acturl = url + "/" + function
-        else:
-            irtid, url = irtAddress
-            acturl = url + "/" + function
-
-        data = urllib.urlencode(attributes)
-        while True:
-            try:
-                prevtimeout = socket.setdefaulttimeout(60.0)
-                #check for update of ANCF/BNCF
-                if irtAddress is None:
-                    irtid, url = self.__baseURL()
-                    acturl = url + "/" + function
-                fd = urllib2.urlopen(acturl, data)
-                xml = fd.read()
-                fd.close()
-                socket.setdefaulttimeout(prevtimeout)
-                break
-            except urllib2.URLError, e:
-                problem = "URLError"
-                problem1 = e
-            except urllib2.HTTPError, e:
-                problem = "HTTPError"
-                problem1 = e
-            except IOError, e:
-                problem = "IOError"
-                problem1 = e
-            except Exception, e:
-                problem = "Exception"
-                problem1 = e
-
-            #failed transaction
-            endT = time.time()
-            tDuration = endT - startT
-            self.logProblem("IRT access: ", problem, function,
-              "t=%-7.3f" % tDuration, "retries=%-1d" % retries, 
-              "IRT=[%s %s]" % (irtid, url), attributes, problem1)
-
-            #try again?
-            retries = retries + 1
-            if retries > 5:
-                socket.setdefaulttimeout(prevtimeout)
-                return False, "", (irtid, url)    #complete failure
-
-
-        #successful transaction
-        endT = time.time()
-        tDuration = endT - startT
-        self.logEvent("IRT access: okay", function,
-          "t=%-7.3f" % tDuration, "retries=%-1d" % retries, 
-          "IRT=[%s %s]" % (irtid, url), attributes)
-        self.logDebug("XML: ", xml)
-        return True, xml, (irtid, url)
-
-    # list convert to comma-deliminated string
-    def __listConvert(self, a):
-        s = ""
-        for x in xrange(len(a)):
-            if len(s):
-                s += "," + `a[x]`
-            else:
-                s += `a[x]`
-        return s
-
-    # domain (x,y),(xe,ye) convert to comma-deliminated string
-    def __nestedTupleConvert(self, a):
-        s = ''
-        for x in xrange(len(a)):
-            for y in xrange(len(a[x])):
-                if len(s):
-                    s += "," + `a[x][y]`
-                else:
-                    s += `a[x][y]`
-        return s
-
-    #----------------------------------------------------------------------
-    # Utility Routines ----------------------------------------------------
-    #----------------------------------------------------------------------
-    def addAddressXML(self, root, serverInfo):
-        #adds the address XML with the source server information
-        #to the root XML tree. Input server information is a dict with
-        #following keys: "mhsid",'host','port','protocol','site'
-        # Returns element for address (in case additional info is required)
-        addressE = SubElement(root, 'address')
-        mhsidE = SubElement(addressE, 'mhsid')
-        mhsidE.text = serverInfo.get('mhsid', "?")
-        serverE = SubElement(addressE, 'server')
-        serverE.text = serverInfo.get('host', "?")
-        portE = SubElement(addressE, 'port')
-        portE.text = str(serverInfo.get('port', "?"))
-        protocolE = SubElement(addressE, 'protocol')
-        protocolE.text = str(serverInfo.get('protocol', "?"))
-        siteE = SubElement(addressE, 'site')
-        siteE.text = serverInfo.get('site', "?")
-
-        #optional components "location" "area" "welist"
-        if serverInfo.has_key('domain') and serverInfo['domain'] is not None:
-            d = serverInfo['domain']
-            locationE = SubElement(addressE, 'location', proj=d['proj'],
-              origx=str(d['origx']), origy=str(d['origy']),
-              extx=str(d['extx']), exty=str(d['exty']))
-        if serverInfo.has_key('area') and serverInfo['area'] is not None:
-            d = serverInfo['area']
-            areaE = SubElement(addressE, 'area', xdim=str(d['xdim']),
-              ydim=str(d['ydim']))
-        if serverInfo.has_key('parms') and serverInfo['parms'] is not None:
-            parms = serverInfo['parms']
-            self.addWelistXML(addressE, parms)
-
-        return addressE
-
-    def addSourceXML(self, root, serverInfo):
-        #adds the source XML with the source server information to the root
-        #XML tree. Input server information is a dict with
-        #following keys: "mhsid",'host','port','protocol','site'
-        #Returns the "source" element and the "address" element.
-        sourcesE = SubElement(root, 'source')
-        addressE = self.addAddressXML(sourcesE, serverInfo)
-        return sourcesE, addressE
-
-    def addDestinationXML(self, root, serverInfos):
-        #adds the destinationXML and server information to the XML root.
-        # Input server information is a list of dicts with
-        #following keys: "mhsid",'host','port','protocol','site'
-        # Returns the destinations elment.
-        destinationsE= SubElement(root, 'destinations')
-        for serverInfo in serverInfos:
-            self.addAddressXML(destinationsE, serverInfo)
-        return destinationsE
-
-    def addWelistXML(self, root, parms):
-        #adds the welist and parms to the XML root. Returns the welist
-        #element.
-        welistE = SubElement(root, 'welist')
-        for parm in parms:
-            parmE = SubElement(welistE, 'parm')
-            parmE.text = parm
-        return welistE
-
-    def decodeXMLAddress(self, element):
-        #decodes the address element which identifies the server
-        #Returns None if not address tag, returns None as part of the
-        #tuple return if that value is not defined. Otherwise returns
-        #a dict with keys "mhsid",'host','port','protocol','site','parms',
-        #'domain'.
-        dict = {}
-        if element.tag != "address":
-            return None   #not address tag
-        parms = None
-        for attrE in element:
-            if attrE.tag == "mhsid":
-                dict['mhsid'] = attrE.text
-            elif attrE.tag == "server":
-                dict['host']  = attrE.text
-            elif attrE.tag == "port":
-                dict['port']  = attrE.text
-            elif attrE.tag == "protocol":
-                dict['protocol']  = attrE.text
-            elif attrE.tag == "site":
-                dict['site']  = attrE.text
-            elif attrE.tag == "welist":
-                parmsE = attrE.getchildren()
-                for parmE in parmsE:
-                   if parms is None:
-                       parms = []
-                   if parmE.tag not in parms:
-                       parms.append(parmE.text)
-            elif attrE.tag == "location":
-                domain = {}
-                for key, value in attrE.items():
-                    domain[key] = value
-                dict['domain'] = domain
-            elif attrE.tag == "area":
-                size = {}
-                for key, value in attrE.items():
-                    size[key] = value
-                dict['area'] = size
-        dict['parms'] = parms
-        return dict
-
-    def transmitFiles(self, subject, addresses, sourceWfo, attachments,
-      xmtScript):
-        # assembles the command and executes it.
-        # determine MHS WMO id for this message
-        wmoid = "TTAA00 "
-        if sourceWfo in ['SJU']:
-            wmoid += "TJSJ"
-        elif sourceWfo in ['AFG', 'AJK', 'HFO', 'GUM']:
-            wmoid += "P" + sourceWfo
-        elif sourceWfo in ['AER', 'ALU']:
-            wmoid += "PAFC"
-        elif len(sourceWfo) == 3:
-            wmoid += "K" + sourceWfo
-        elif len(sourceWfo) == 4:
-            wmoid += sourceWfo
-        else:
-            wmoid += "XXXX"
-        wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time()))
-
-    # Transmit files - do string substitution
-        if xmtScript is not None: 
-            cmd = copy.deepcopy(xmtScript)
-            cmd = string.replace(cmd, "%SUBJECT", subject)
-            cmd = string.replace(cmd, "%ADDRESSES", ",".join(addresses))
-            cmd = string.replace(cmd, "%WMOID", "'" + wmoid + "'")
-            cmd = string.replace(cmd, "%ATTACHMENTS", ",".join(attachments))
-
-            self.logEvent("Transmit: ", cmd) 
-            import siteConfig
-            from subprocess import Popen,PIPE
-            output,err = Popen(cmd, shell=True, stdout=PIPE,stderr=PIPE).communicate()
-            if output.find(siteConfig.GFESUITE_MHSID+"-") == -1:
-                alertMsg = "ISC Send failed transmission to : "+",".join(addresses)+" --> "+output+" "+err
-                self.logProblem(alertMsg)
-            else:
-                self.logEvent(output.rstrip())
-                if len(err) > 0:
-                    self.logProblem(err.rstrip())
-                alertMsg="ISC data successfully transmitted to: "+",".join(addresses)
-                self.logEvent(alertMsg)
-
-        for file in attachments: 
-            try:
-                os.remove(file)
-            except OSError:
-                self.logException("Error removing file: "+file)
-
-    def printServerInfo(self, serverInfo):
-        # assembles a string to print out the server information.  serverInfo
-        # is a dict with keys "mhsid",'host','port','protocol','site'.
-        # Returns the string to print. All input values are strings.
-        mhsid = serverInfo.get('mhsid', '?')
-        host = serverInfo.get('host', '?')
-        port = serverInfo.get('port', '?')
-        protocol = serverInfo.get('protocol', '?')
-        site = serverInfo.get('site', '?')
-        s = "mhs=" + mhsid + ",host=" + host + ",port=" + port +\
-          ",proto=" + protocol + ",site=" + site
-        return s
-    
-    def createDestinationXML(self, destSites, requestingServer, findBestMatch=True):
-        #--------------------------------------------------------------------
-        # Assemble XML source/destination document
-        #--------------------------------------------------------------------
-        iscE = ElementTree.Element('isc')
-        self.addSourceXML(iscE, requestingServer)
-        self.logEvent("Requesting Server:", self.printServerInfo(requestingServer))
-    
-        # who is running the domains requested?
-        status, xml = self.getServers(destSites)
-        if not status:
-            raise Exception('Failure to getServers from IRT')
-    
-        # decode the XML
-        try:
-            serverTree = ElementTree.ElementTree(ElementTree.XML(xml))
-            serversE = serverTree.getroot()
-        except:
-            self.logException("Malformed XML from getServers()")
-            raise
-    
-        if serversE.tag != "servers":
-            raise Exception("Servers packet missing from web server")
-    
-        # process each requested domain returned to us
-        msgSendDest = []
-        chosenServers = []
-        matchingServers = []
-        for domainE in serversE:
-            if domainE.tag != "domain":
-                continue
-            
-            domain = domainE.get('site')
-            servers = []  #list of servers for this domain
-    
-            # decode each server in the domain
-            for addressE in domainE.getchildren():
-                info = self.decodeXMLAddress(addressE)
-                if info is None:
-                    continue   #not address tag
-                servers.append(info)
-                matchingServers.append(info)
-    
-            # server search list in priority.  The px3 entries are used for
-            # dual domain for AFC.
-            hp = [('dx4','98000000'),('px3', '98000000'), ('dx4','98000001'),
-              ('px3', '98000001')]
-    
-            if findBestMatch:
-                # choose one server from this domain, find first dx4, 98000000
-                # try to use one with the same mhsidDest as the site, which
-                # would be the primary operational GFE. Note that the px3 entries
-                # are for AFC.
-                found = False
-                for matchServer, matchPort in hp:
-                    if found:
-                        break        
-                    for server in servers:
-                        if server['host'][0:3] == matchServer and \
-                          server['port'] == matchPort and server['mhsid'] == domain:
-                            chosenServers.append(server)
-                            if server['mhsid'] not in msgSendDest:
-                                msgSendDest.append(server['mhsid'])
-                            found = True
-                            break
-        
-                # find first dx4, 98000000, but perhaps a different mhsid
-                # this is probably not the primary operational GFE
-                for matchServer, matchPort in hp:
-                    if found:
-                        break        
-                        for server in servers:
-                            if server['host'][0:3] == matchServer and \
-                              server['port'] == matchPort:
-                                chosenServers.append(server)
-                                if server['mhsid'] not in msgSendDest:
-                                    msgSendDest.append(server['mhsid'])
-                                found = True
-                                break
-        
-                # if didn't find standard one, then take the first one, but don't
-                # take ourselves unless we are the only one.
-                if not found and servers:
-                    for server in servers:
-                        if server['mhsid'] != requestingServer['mhsid'] \
-                          and server['host'] != requestingServer['host'] \
-                          and server['port'] != requestingServer['port'] \
-                          and server['site'] != requestingServer['site']:
-                            chosenServers.append(server)
-                            if server['mhsid'] not in msgSendDest:
-                                msgSendDest.append(server['mhsid'])
-                            found = True
-                            break;
-                            
-                    if not found:
-                        chosenServers.append(servers[0])
-                        if servers[0]['mhsid'] not in msgSendDest:
-                            msgSendDest.append(servers[0]['mhsid'])
-            else:
-                for server in matchingServers:
-                    chosenServers.append(server)
-                    if server['mhsid'] not in msgSendDest:
-                        msgSendDest.append(server['mhsid'])
-    
-
-        # Display the set of matching servers
-        s = "Matching Servers:"
-        for x in matchingServers:
-            s += "\n" + self.printServerInfo(x)
-        self.logEvent(s)
-    
-        # Display the chosen set of servers
-        s = "Chosen Servers:"
-        for x in chosenServers:
-            s += "\n" + self.printServerInfo(x)
-        self.logEvent(s)
-    
-        self.addDestinationXML(iscE, chosenServers)
-    
-        return msgSendDest, iscE
+# Contractor Name:        Raytheon Company

+# Contractor Address:     6825 Pine Street, Suite 340

+#                         Mail Stop B8

+#                         Omaha, NE 68106

+#                         402.291.0100

+# 

+# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
+# further licensing information.
+##
+# ----------------------------------------------------------------------------
+# This software is in the public domain, furnished "as is", without technical
+# support, and with no warranty, express or implied, as to its usefulness for
+# any purpose.
+#
+# Author: mathewson
+# ----------------------------------------------------------------------------
+##
+#
+# This class provides interfaces to the ISC Routing Table Web Service.
+#
+#    
+#     SOFTWARE HISTORY
+#    
+#    Date            Ticket#       Engineer       Description
+#    ------------    ----------    -----------    --------------------------
+#    12/10/14        #4953         randerso       Cleaned up imports, 
+#                                                 improved spawning of shell cmd 
+#    03/10/2015      #4129         randerso       Refactored server selection code into a reusable method
+#    07/11/2016      #5774         randerso       Change to send WFO message to all active
+#                                                 IFP servers, not just "best"
+#    10/31/2016      #5979         njensen        Cast to primitives for compatibility
+#
+##
+
+##
+# This is a base file that is not intended to be overridden.
+##
+
+
+
+from xml.etree import ElementTree
+from xml.etree.ElementTree import Element, SubElement
+import socket
+import urllib.request, urllib.parse, urllib.error, urllib.request, urllib.error, urllib.parse, time, os, copy, string
+import LogStream
+import JUtil
+import iscUtil
+
+class IrtAccess():
+
+    #Constructor taking the web URL for the ISC Routing Table
+    def __init__(self, ancfURL=None, bncfURL=None, logger=None):
+        self.__addrs = {}
+        self.__addrs['ANCF'] = ancfURL
+        self.__addrs['BNCF'] = bncfURL
+        self.__registered = None   #flag to indicate whether we registered
+        if logger is not None:
+            self.__logger=logger
+        else:
+            self.__logger=iscUtil.getLogger("irtAccess","irtServer.log")
+
+    def logEvent(self,*msg):
+        self.__logger.info(iscUtil.tupleToString(*msg))
+    
+    def logProblem(self,*msg):
+        self.__logger.error(iscUtil.tupleToString(*msg))
+        
+    def logException(self,*msg):
+        self.__logger.exception(iscUtil.tupleToString(*msg))    
+    
+    def logVerbose(self,*msg):
+        self.__logger.debug(iscUtil.tupleToString(*msg))
+        
+    def logDebug(self,*msg):
+        self.logVerbose(iscUtil.tupleToString(*msg))
+
+
+    def __checkArgs(self,parmsWanted, gridDims,gridBoundBox, iscWfosWanted):
+        
+        if type(parmsWanted) is not list:
+            parmsWanted = JUtil.javaStringListToPylist(parmsWanted)
+
+        if type(gridDims) is not list:
+            pylist = []
+            size = gridDims.size() 
+            for i in range(size):
+                pylist.append(int(gridDims.get(i)))
+            gridDims = pylist
+        
+        if type(gridBoundBox) is not tuple:
+            gridBoundBox = ((float(gridBoundBox.get(0)), float(gridBoundBox.get(1))), (float(gridBoundBox.get(2)), float(gridBoundBox.get(3))))
+
+        if type(iscWfosWanted) is not list:
+            iscWfosWanted = JUtil.javaStringListToPylist(iscWfosWanted)
+        
+        
+        return parmsWanted, gridDims, gridBoundBox, iscWfosWanted
+        
+    # Registration call for the ISC Routing Table.  Returns True if okay, i.e.,
+    # you are registered.
+    def register(self, mhsid, serverHost, serverPort, serverProtocol,
+      site, parmsWanted, gridDims, gridProj, gridBoundBox, iscWfosWanted):
+        
+        parmsWanted, gridDims,gridBoundBox, iscWfosWanted = self.__checkArgs(parmsWanted, gridDims,gridBoundBox, iscWfosWanted)
+        
+        if self.__registered is not None:
+            self.unregister(self.__registered)  #unregister, then reset table
+            self.__regInfo = None
+            self.__timeToReRegister = None
+
+        # shorten parmsWanted list, i.e., don't need the "_SFC".
+        for x in range(len(parmsWanted)):
+            idx = parmsWanted[x].find("_SFC")
+            if idx != -1:
+                parmsWanted[x] = parmsWanted[x][0:idx]
+
+        # set up registration information
+        self.__regInfo = {'mhsid': mhsid, 'server': serverHost,
+          'port': serverPort, 'protocol': serverProtocol, 'site': site,
+          'parms': ",".join(parmsWanted), 'dims': self.__listConvert(gridDims),
+          'proj': gridProj, 'Bbox': self.__nestedTupleConvert(gridBoundBox),
+          'wfos': ",".join(iscWfosWanted)}
+
+        # set up unregistration information
+        self.__unregInfo = {'mhsid': mhsid, 'server': serverHost,
+          'port': serverPort, 'protocol': serverProtocol,
+          'wfos': ",".join(iscWfosWanted), 'site': site}
+
+        okay = self.__doRegister()   #perform registration
+        return okay
+
+
+    # Call made by ifpServer to check and perform re-registration.
+    # Returns True if no errors.
+    def checkForReregister(self):
+        if not self.__registered:
+            return True  #do nothing if not registered
+        # check for change of IRT web service
+        if self.__baseURL() != self.__registered:
+            self.unregister(self.__registered)
+            return self.__doRegister()
+        # check for time to re-register
+        if time.time() >= self.__timeToReRegister:
+            return self.__doRegister()
+        return True
+
+    # Call to unregister from the ISC Routing Table.  Returns True if
+    # successful, false otherwise. The irtAddress overrides the calculated
+    # IRT web address if present.
+    def unregister(self, irtAddress=None):
+        if not self.__registered:
+            return True   #do nothing if never registered
+
+        #now unregister
+        status, xml, transIRT = self.__callIRT('unregister', self.__unregInfo, 
+          irtAddress)
+        if status is False:
+            return False
+
+        # decode XML, read status
+        try:
+            tree = ElementTree.ElementTree(ElementTree.XML(xml))
+        except:
+            self.logProblem("Malformed XML on unregister: ", xml)
+            return False
+
+        element = tree.getroot()
+        if element is None:
+            self.logProblem("status tag missing in XML for unregister")
+            return False
+        status = None
+        for attr, value in list(element.items()):
+            if attr == 'ok':
+                status = value
+                break
+        if status is None:
+            self.logProblem("ok attr missing in status tag for unregister")
+            return False
+        if status == "1":
+            self.__registered = None  #reset registration flag
+            self.logEvent("Unregistered from IRT")
+            return True
+        else:
+            self.logProblem("Error on unregistration", element.text)
+            return False
+
+    # routing to get the list of destination servers that are active for
+    # the given domain. Returns status flag and XML string.
+    def getSendAddrs(self, sourceDomain):
+        sourceDomainDict = {'wfoid': sourceDomain}
+        status, xml, transIRT = self.__callIRT('getaddrs', sourceDomainDict)
+        return status, xml
+
+    # routine to get the list of servers that are active for the given list
+    # of domains. Returns status flag and XML string.
+    def getServers(self, wfos):
+        if type(wfos) is not list:
+            wfos = JUtil.javaStringListToPylist(wfos)
+        wfoDict  = {'wfoids': ",".join(wfos)}
+        status, xml, transIRT = self.__callIRT('getservers', wfoDict)
+        return status, xml
+
+    # registers with the ISC routing table.  Returns True if successful,
+    # False otherwise.
+    def __doRegister(self):
+        status, xml, transIRT = self.__callIRT('register', self.__regInfo)
+        if status is False:
+            return False
+
+        # decode XML
+        try:
+            tree = ElementTree.ElementTree(ElementTree.XML(xml))
+        except:
+            self.logProblem("Malformed XML on register: ", xml)
+            return False
+
+        element = tree.getroot()   #status tag is the root tag
+        if element is None:
+            self.logProblem("status tag missing in XML for register")
+            return False
+        ok = None
+        for attr, value in list(element.items()):
+            if attr == 'ok':
+                ok = value
+                break
+        if ok is None:
+            self.logProblem("ok field missing in status tag for register")
+            return False
+
+        # get the ok flag
+        if ok == "1":
+            isoTimeStr = element.text
+            idx = isoTimeStr.find(".")
+            if idx != -1:
+                isoTimeStr = isoTimeStr[0:idx]  #eliminate sub-seconds if any
+            try:
+                # switch to GMT0 for time conversions
+                prevTZ = os.environ.get('TZ', None)
+                os.environ['TZ'] = "GMT0"
+                time.tzset()
+                
+                # Fix to correct importing of the time.strptime method
+                importError = True
+                while importError:
+                    try:
+                        self.__timeToReRegister = time.mktime(time.strptime(isoTimeStr,"%Y-%m-%dT%H:%M:%S"))
+                        importError=False
+                    except ImportError:
+                        importError = True
+                
+                # reset TZ environment variable to previous state
+                if prevTZ:
+                    os.environ['TZ'] = prevTZ
+                    time.tzset()
+            except ValueError:
+                self.logProblem("time string has bad format", isoTimeStr)
+                return False
+            self.__registered = transIRT   #set registration flag
+            self.logEvent("IRT Registration Successful. ",
+              "Re-register time: ",
+              time.asctime(time.gmtime(float(self.__timeToReRegister))))
+            return True   #okay registration
+        else:
+            self.logProblem("Error on registration: ", element.text)
+            return False
+
+    # returns the appropriate (id, url) for the IRT web service.
+    def __baseURL(self):
+        statusFile = '/data/mhs/ncfstatus'
+        #statusFile = '/scratch/ncfstatus'
+        ncf = "ANCF"
+        try:
+            fp = open(statusFile, 'rb')
+            ncf = fp.read()
+            ncf = ncf.strip().upper()
+        except IOError as e:
+            pass
+            #self.logProblem("Can't read NCF status file: ", statusFile,
+            #  "assuming ANCF...")
+        return ncf, self.__addrs.get(ncf)
+
+    # makes call to ISC routing Table service, calling the given function,
+    # with the given attributes (dictionary). Returns the status of the
+    # call (bool), the XML returned, and the IRT address (id, url). 
+    # The optional IRTAddress is used to force the call to a specific 
+    # IRT (id, url).
+    def __callIRT(self, function, attributes, irtAddress=None):
+        retries = 0
+        tDuration = 0.000
+        startT = time.time()
+        #use normal method to calculate IRT address
+        if irtAddress is None:
+            irtid, url = self.__baseURL()
+            acturl = url + "/" + function
+        else:
+            irtid, url = irtAddress
+            acturl = url + "/" + function
+
+        data = urllib.parse.urlencode(attributes)
+        while True:
+            try:
+                prevtimeout = socket.setdefaulttimeout(60.0)
+                #check for update of ANCF/BNCF
+                if irtAddress is None:
+                    irtid, url = self.__baseURL()
+                    acturl = url + "/" + function
+                fd = urllib.request.urlopen(acturl, data)
+                xml = fd.read()
+                fd.close()
+                socket.setdefaulttimeout(prevtimeout)
+                break
+            except urllib.error.URLError as e:
+                problem = "URLError"
+                problem1 = e
+            except urllib.error.HTTPError as e:
+                problem = "HTTPError"
+                problem1 = e
+            except IOError as e:
+                problem = "IOError"
+                problem1 = e
+            except Exception as e:
+                problem = "Exception"
+                problem1 = e
+
+            #failed transaction
+            endT = time.time()
+            tDuration = endT - startT
+            self.logProblem("IRT access: ", problem, function,
+              "t=%-7.3f" % tDuration, "retries=%-1d" % retries, 
+              "IRT=[%s %s]" % (irtid, url), attributes, problem1)
+
+            #try again?
+            retries = retries + 1
+            if retries > 5:
+                socket.setdefaulttimeout(prevtimeout)
+                return False, "", (irtid, url)    #complete failure
+
+
+        #successful transaction
+        endT = time.time()
+        tDuration = endT - startT
+        self.logEvent("IRT access: okay", function,
+          "t=%-7.3f" % tDuration, "retries=%-1d" % retries, 
+          "IRT=[%s %s]" % (irtid, url), attributes)
+        self.logDebug("XML: ", xml)
+        return True, xml, (irtid, url)
+
+    # list convert to comma-deliminated string
+    def __listConvert(self, a):
+        s = ""
+        for x in range(len(a)):
+            if len(s):
+                s += "," + repr(a[x])
+            else:
+                s += repr(a[x])
+        return s
+
+    # domain (x,y),(xe,ye) convert to comma-deliminated string
+    def __nestedTupleConvert(self, a):
+        s = ''
+        for x in range(len(a)):
+            for y in range(len(a[x])):
+                if len(s):
+                    s += "," + repr(a[x][y])
+                else:
+                    s += repr(a[x][y])
+        return s
+
+    #----------------------------------------------------------------------
+    # Utility Routines ----------------------------------------------------
+    #----------------------------------------------------------------------
+    def addAddressXML(self, root, serverInfo):
+        #adds the address XML with the source server information
+        #to the root XML tree. Input server information is a dict with
+        #following keys: "mhsid",'host','port','protocol','site'
+        # Returns element for address (in case additional info is required)
+        addressE = SubElement(root, 'address')
+        mhsidE = SubElement(addressE, 'mhsid')
+        mhsidE.text = serverInfo.get('mhsid', "?")
+        serverE = SubElement(addressE, 'server')
+        serverE.text = serverInfo.get('host', "?")
+        portE = SubElement(addressE, 'port')
+        portE.text = str(serverInfo.get('port', "?"))
+        protocolE = SubElement(addressE, 'protocol')
+        protocolE.text = str(serverInfo.get('protocol', "?"))
+        siteE = SubElement(addressE, 'site')
+        siteE.text = serverInfo.get('site', "?")
+
+        #optional components "location" "area" "welist"
+        if 'domain' in serverInfo and serverInfo['domain'] is not None:
+            d = serverInfo['domain']
+            locationE = SubElement(addressE, 'location', proj=d['proj'],
+              origx=str(d['origx']), origy=str(d['origy']),
+              extx=str(d['extx']), exty=str(d['exty']))
+        if 'area' in serverInfo and serverInfo['area'] is not None:
+            d = serverInfo['area']
+            areaE = SubElement(addressE, 'area', xdim=str(d['xdim']),
+              ydim=str(d['ydim']))
+        if 'parms' in serverInfo and serverInfo['parms'] is not None:
+            parms = serverInfo['parms']
+            self.addWelistXML(addressE, parms)
+
+        return addressE
+
+    def addSourceXML(self, root, serverInfo):
+        #adds the source XML with the source server information to the root
+        #XML tree. Input server information is a dict with
+        #following keys: "mhsid",'host','port','protocol','site'
+        #Returns the "source" element and the "address" element.
+        sourcesE = SubElement(root, 'source')
+        addressE = self.addAddressXML(sourcesE, serverInfo)
+        return sourcesE, addressE
+
+    def addDestinationXML(self, root, serverInfos):
+        #adds the destinationXML and server information to the XML root.
+        # Input server information is a list of dicts with
+        #following keys: "mhsid",'host','port','protocol','site'
+        # Returns the destinations elment.
+        destinationsE= SubElement(root, 'destinations')
+        for serverInfo in serverInfos:
+            self.addAddressXML(destinationsE, serverInfo)
+        return destinationsE
+
+    def addWelistXML(self, root, parms):
+        #adds the welist and parms to the XML root. Returns the welist
+        #element.
+        welistE = SubElement(root, 'welist')
+        for parm in parms:
+            parmE = SubElement(welistE, 'parm')
+            parmE.text = parm
+        return welistE
+
+    def decodeXMLAddress(self, element):
+        #decodes the address element which identifies the server
+        #Returns None if not address tag, returns None as part of the
+        #tuple return if that value is not defined. Otherwise returns
+        #a dict with keys "mhsid",'host','port','protocol','site','parms',
+        #'domain'.
+        dict = {}
+        if element.tag != "address":
+            return None   #not address tag
+        parms = None
+        for attrE in element:
+            if attrE.tag == "mhsid":
+                dict['mhsid'] = attrE.text
+            elif attrE.tag == "server":
+                dict['host']  = attrE.text
+            elif attrE.tag == "port":
+                dict['port']  = attrE.text
+            elif attrE.tag == "protocol":
+                dict['protocol']  = attrE.text
+            elif attrE.tag == "site":
+                dict['site']  = attrE.text
+            elif attrE.tag == "welist":
+                parmsE = attrE.getchildren()
+                for parmE in parmsE:
+                   if parms is None:
+                       parms = []
+                   if parmE.tag not in parms:
+                       parms.append(parmE.text)
+            elif attrE.tag == "location":
+                domain = {}
+                for key, value in list(attrE.items()):
+                    domain[key] = value
+                dict['domain'] = domain
+            elif attrE.tag == "area":
+                size = {}
+                for key, value in list(attrE.items()):
+                    size[key] = value
+                dict['area'] = size
+        dict['parms'] = parms
+        return dict
+
+    def transmitFiles(self, subject, addresses, sourceWfo, attachments,
+      xmtScript):
+        # assembles the command and executes it.
+        # determine MHS WMO id for this message
+        wmoid = "TTAA00 "
+        if sourceWfo in ['SJU']:
+            wmoid += "TJSJ"
+        elif sourceWfo in ['AFG', 'AJK', 'HFO', 'GUM']:
+            wmoid += "P" + sourceWfo
+        elif sourceWfo in ['AER', 'ALU']:
+            wmoid += "PAFC"
+        elif len(sourceWfo) == 3:
+            wmoid += "K" + sourceWfo
+        elif len(sourceWfo) == 4:
+            wmoid += sourceWfo
+        else:
+            wmoid += "XXXX"
+        wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time()))
+
+    # Transmit files - do string substitution
+        if xmtScript is not None: 
+            cmd = copy.deepcopy(xmtScript)
+            cmd = string.replace(cmd, "%SUBJECT", subject)
+            cmd = string.replace(cmd, "%ADDRESSES", ",".join(addresses))
+            cmd = string.replace(cmd, "%WMOID", "'" + wmoid + "'")
+            cmd = string.replace(cmd, "%ATTACHMENTS", ",".join(attachments))
+
+            self.logEvent("Transmit: ", cmd) 
+            import siteConfig
+            from subprocess import Popen,PIPE
+            output,err = Popen(cmd, shell=True, stdout=PIPE,stderr=PIPE).communicate()
+            if output.find(siteConfig.GFESUITE_MHSID+"-") == -1:
+                alertMsg = "ISC Send failed transmission to : "+",".join(addresses)+" --> "+output+" "+err
+                self.logProblem(alertMsg)
+            else:
+                self.logEvent(output.rstrip())
+                if len(err) > 0:
+                    self.logProblem(err.rstrip())
+                alertMsg="ISC data successfully transmitted to: "+",".join(addresses)
+                self.logEvent(alertMsg)
+
+        for file in attachments: 
+            try:
+                os.remove(file)
+            except OSError:
+                self.logException("Error removing file: "+file)
+
+    def printServerInfo(self, serverInfo):
+        # assembles a string to print out the server information.  serverInfo
+        # is a dict with keys "mhsid",'host','port','protocol','site'.
+        # Returns the string to print. All input values are strings.
+        mhsid = serverInfo.get('mhsid', '?')
+        host = serverInfo.get('host', '?')
+        port = serverInfo.get('port', '?')
+        protocol = serverInfo.get('protocol', '?')
+        site = serverInfo.get('site', '?')
+        s = "mhs=" + mhsid + ",host=" + host + ",port=" + port +\
+          ",proto=" + protocol + ",site=" + site
+        return s
+    
+    def createDestinationXML(self, destSites, requestingServer, findBestMatch=True):
+        #--------------------------------------------------------------------
+        # Assemble XML source/destination document
+        #--------------------------------------------------------------------
+        iscE = ElementTree.Element('isc')
+        self.addSourceXML(iscE, requestingServer)
+        self.logEvent("Requesting Server:", self.printServerInfo(requestingServer))
+    
+        # who is running the domains requested?
+        status, xml = self.getServers(destSites)
+        if not status:
+            raise Exception('Failure to getServers from IRT')
+    
+        # decode the XML
+        try:
+            serverTree = ElementTree.ElementTree(ElementTree.XML(xml))
+            serversE = serverTree.getroot()
+        except:
+            self.logException("Malformed XML from getServers()")
+            raise
+    
+        if serversE.tag != "servers":
+            raise Exception("Servers packet missing from web server")
+    
+        # process each requested domain returned to us
+        msgSendDest = []
+        chosenServers = []
+        matchingServers = []
+        for domainE in serversE:
+            if domainE.tag != "domain":
+                continue
+            
+            domain = domainE.get('site')
+            servers = []  #list of servers for this domain
+    
+            # decode each server in the domain
+            for addressE in domainE.getchildren():
+                info = self.decodeXMLAddress(addressE)
+                if info is None:
+                    continue   #not address tag
+                servers.append(info)
+                matchingServers.append(info)
+    
+            # server search list in priority.  The px3 entries are used for
+            # dual domain for AFC.
+            hp = [('dx4','98000000'),('px3', '98000000'), ('dx4','98000001'),
+              ('px3', '98000001')]
+    
+            if findBestMatch:
+                # choose one server from this domain, find first dx4, 98000000
+                # try to use one with the same mhsidDest as the site, which
+                # would be the primary operational GFE. Note that the px3 entries
+                # are for AFC.
+                found = False
+                for matchServer, matchPort in hp:
+                    if found:
+                        break        
+                    for server in servers:
+                        if server['host'][0:3] == matchServer and \
+                          server['port'] == matchPort and server['mhsid'] == domain:
+                            chosenServers.append(server)
+                            if server['mhsid'] not in msgSendDest:
+                                msgSendDest.append(server['mhsid'])
+                            found = True
+                            break
+        
+                # find first dx4, 98000000, but perhaps a different mhsid
+                # this is probably not the primary operational GFE
+                for matchServer, matchPort in hp:
+                    if found:
+                        break        
+                        for server in servers:
+                            if server['host'][0:3] == matchServer and \
+                              server['port'] == matchPort:
+                                chosenServers.append(server)
+                                if server['mhsid'] not in msgSendDest:
+                                    msgSendDest.append(server['mhsid'])
+                                found = True
+                                break
+        
+                # if didn't find standard one, then take the first one, but don't
+                # take ourselves unless we are the only one.
+                if not found and servers:
+                    for server in servers:
+                        if server['mhsid'] != requestingServer['mhsid'] \
+                          and server['host'] != requestingServer['host'] \
+                          and server['port'] != requestingServer['port'] \
+                          and server['site'] != requestingServer['site']:
+                            chosenServers.append(server)
+                            if server['mhsid'] not in msgSendDest:
+                                msgSendDest.append(server['mhsid'])
+                            found = True
+                            break;
+                            
+                    if not found:
+                        chosenServers.append(servers[0])
+                        if servers[0]['mhsid'] not in msgSendDest:
+                            msgSendDest.append(servers[0]['mhsid'])
+            else:
+                for server in matchingServers:
+                    chosenServers.append(server)
+                    if server['mhsid'] not in msgSendDest:
+                        msgSendDest.append(server['mhsid'])
+    
+
+        # Display the set of matching servers
+        s = "Matching Servers:"
+        for x in matchingServers:
+            s += "\n" + self.printServerInfo(x)
+        self.logEvent(s)
+    
+        # Display the chosen set of servers
+        s = "Chosen Servers:"
+        for x in chosenServers:
+            s += "\n" + self.printServerInfo(x)
+        self.logEvent(s)
+    
+        self.addDestinationXML(iscE, chosenServers)
+    
+        return msgSendDest, iscE
diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtServer.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtServer.py
index 095029444a..3d4784ba41 100644
--- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtServer.py
+++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/IrtServer.py
@@ -1,590 +1,590 @@
-##
-# This software was developed and / or modified by Raytheon Company,
-# pursuant to Contract DG133W-05-CQ-1067 with the US Government.
-# 
-# U.S. EXPORT CONTROLLED TECHNICAL DATA
-# This software product contains export-restricted data whose
-# export/transfer/disclosure is restricted by U.S. law. Dissemination
-# to non-U.S. persons whether in the United States or abroad requires
-# an export license or other authorization.
-# 
-# Contractor Name:        Raytheon Company
-# Contractor Address:     6825 Pine Street, Suite 340
-#                         Mail Stop B8
-#                         Omaha, NE 68106
-#                         402.291.0100
-# 
-# See the AWIPS II Master Rights File ("Master Rights File.pdf") for
-# further licensing information.
-##
-#
-# Port of IRT functionality from legacy ifpServer
-#
-#    
-#     SOFTWARE HISTORY
-#    
-#    Date            Ticket#       Engineer       Description
-#    ------------    ----------    -----------    --------------------------
-#    07/14/09        1995          bphillip       Initial Creation.
-#    01/25/13        1447          dgilling       Implement routines needed by
-#                                                 iscDataRec for VTEC table 
-#                                                 sharing.
-#    03/13/13        1759          dgilling       Move siteConfig imports into
-#                                                 functions where module is used
-#                                                 to interact better with IscScript.
-#    05/22/13        1759          dgilling       Add missing import to 
-#                                                 makeISCrequest().
-#    10/16/13        2475          dgilling       Remove unneeded code to handle
-#                                                 registration with IRT.
-#    12/08/2014      4953          randerso       Added support for sending/receiving TCV files
-#                                                 Additional code clean up
-#    03/05/2015      4129          randerso       Fix exception handling on subprocess calls
-#                                                 Fixed error when no TCV files were found
-#    02/22/2016      5374          randerso       Added support for sendWFOMessage
-#    09/12/2016      5861          randerso       Remove references to IFPServerConfigManager
-#                                                 which was largely redundant with IFPServer.
-#    10/31/2016      5979          njensen        Cast to primitives for compatibility
-#
-##
-
-##
-# This is a base file that is not intended to be overridden.
-##
-
-
-
-import cPickle
-
-import LogStream, tempfile, os, sys, JUtil, subprocess, traceback, errno
-import time, copy, string, iscUtil
-
-from com.raytheon.edex.plugin.gfe.isc import IRTManager
-from subprocess import CalledProcessError
-
-PURGE_AGE = 30 * 24 * 60 * 60  # 30 days in seconds
-
-def getLogger():
-    import logging
-    return iscUtil.getLogger("irtServer", logLevel=logging.DEBUG)
-    
-def logEvent(*msg):
-    getLogger().info(iscUtil.tupleToString(*msg))
-
-def logProblem(*msg):
-    getLogger().error(iscUtil.tupleToString(*msg))
-    
-def logException(*msg):
-    getLogger().exception(iscUtil.tupleToString(*msg))    
-
-def logDebug(*msg):
-    getLogger().debug(iscUtil.tupleToString(*msg))
-    
-# called by iscDataRec when another site has requested the active table
-# returns the active table, filtered, pickled.
-def getVTECActiveTable(dataFile, xmlPacket):
-    import siteConfig
-    import VTECPartners
-    
-    if not VTECPartners.VTEC_RESPOND_TO_TABLE_REQUESTS:
-        return   #respond is disabled
-
-    #decode the data (pickled)
-    with open(dataFile, "rb") as fp:
-        info = cPickle.load(fp)
-        
-    (mhsSite, reqsite, filterSites, countDict, issueTime) = info
-
-    #get the active table, and write it to a temporary file
-    from com.raytheon.uf.common.site import SiteMap
-    from com.raytheon.uf.edex.activetable import ActiveTable
-    from com.raytheon.uf.common.activetable import ActiveTableMode
-    from com.raytheon.uf.common.activetable import ActiveTableUtil
-    site4Id = SiteMap.getInstance().getSite4LetterId(siteConfig.GFESUITE_SITEID)
-    javaTable = ActiveTable.getActiveTable(site4Id, ActiveTableMode.OPERATIONAL)
-    dictTable = ActiveTableUtil.convertToDict(javaTable, siteConfig.GFESUITE_SITEID)
-    
-    # we must convert this to a python hash using the A1 field naming conventions
-    # for cross-version compatibility
-    table = []
-    for i in xrange(dictTable.size()):
-        convRecord = JUtil.javaObjToPyVal(dictTable.get(i))
-        convRecord['oid'] = convRecord['officeid']
-        convRecord['vstr'] = convRecord['vtecstr']
-        convRecord['end'] = convRecord['endTime']
-        convRecord['start'] = convRecord['startTime']
-        convRecord['key'] = convRecord['phensig']
-        # remove new fields so we don't pickle two copies
-        del convRecord['officeid']
-        del convRecord['vtecstr']
-        del convRecord['endTime']
-        del convRecord['phensig']
-        del convRecord['startTime']
-        if convRecord.has_key('segText'):
-            convRecord['text'] = convRecord['segText']
-            del convRecord['segText']
-        table.append(convRecord)
-        
-    # additionally, we'll need to pickle our output to match the A1 file
-    # format
-    pickledTable = cPickle.dumps(table)
-    outDir = os.path.join(siteConfig.GFESUITE_PRDDIR, "ATBL")
-    with tempfile.NamedTemporaryFile(suffix='.ato', dir=outDir, delete=False) as fp:
-        fname = fp.name
-        fp.write(pickledTable)
-
-    #write the xmlpacket to a temporary file, if one was passed
-    if xmlPacket is not None:
-        with tempfile.NamedTemporaryFile(suffix='.xml', dir=outDir, delete=False) as fp:
-            fnameXML = fp.name
-            fp.write(xmlPacket)
-
-    from com.raytheon.edex.plugin.gfe.server import IFPServer
-    server = IFPServer.getActiveServer(siteConfig.GFESUITE_SITEID)
-    if server is None:
-        raise Exception("No active IFPServer for site: " + siteConfig.GFESUITE_SITEID)
-    
-    config = server.getConfig()
-    ServerHost = siteConfig.GFESUITE_SERVER
-    ServerPort = str(siteConfig.GFESUITE_PORT)
-    ServerProtocol = str(config.getProtocolVersion())
-    ServerMHS = siteConfig.GFESUITE_MHSID
-    ServerSite = siteConfig.GFESUITE_SITEID
-    XmtScript = config.transmitScript()
-
-    #call sendAT to send the table to the requestor
-    cmd = os.path.join(siteConfig.GFESUITE_HOME, "bin", "sendAT")
-    args = [cmd, '-s', reqsite, '-a', mhsSite, '-H', ServerHost,
-      '-P', ServerPort, '-L', ServerProtocol, '-M', ServerMHS,
-      '-S', ServerSite, '-x', XmtScript]
-    if filterSites is not None:
-        for fs in filterSites:
-            args.append('-f')
-            args.append(fs)
-    if countDict is not None:
-            args.append('-c')
-            args.append(`countDict`)
-    if issueTime is not None:
-        args.append('-t')
-        args.append(`issueTime`)
-    args.append('-v')
-    args.append(fname)
-    if xmlPacket is not None:
-        args.append('-X')
-        args.append(fnameXML)
-    try:
-        output = subprocess.check_output(args, stderr=subprocess.STDOUT)
-        logEvent("sendAT command output: ", output)
-    except subprocess.CalledProcessError as e:
-        logProblem("sendAT returned error code: ", e.returncode, e.output)
-    except:
-        logProblem("Error executing sendAT: ", traceback.format_exc())
-
-#when we receive a requested active table from another site, this function
-#is called from iscDataRec
-def putVTECActiveTable(dataFile, xmlPacket):
-    import siteConfig
-
-    with open(dataFile, "rb") as fp:
-        strTable = fp.read()
-    
-    #write the xmlpacket to a temporary file, if one was passed
-    inDir = os.path.join(siteConfig.GFESUITE_PRDDIR, "ATBL")
-    if xmlPacket is not None:
-        with tempfile.NamedTemporaryFile(suffix='.xml', dir=inDir, delete=False) as fp:
-            fnameXML = fp.name
-            fp.write(xmlPacket)
-    with tempfile.NamedTemporaryFile(suffix='.ati', dir=inDir, delete=False) as fp:
-         fname = fp.name
-         fp.write(strTable)
-    
-    cmd = os.path.join(siteConfig.GFESUITE_HOME, "bin", "ingestAT")
-    args = []
-    args.append(cmd)
-    args.append("-s")
-    args.append(siteConfig.GFESUITE_SITEID)
-    args.append("-f")
-    args.append(fname)
-    if xmlPacket is not None:
-        args.append('-X')
-        args.append(fnameXML)
-    try:
-        output = subprocess.check_output(args, stderr=subprocess.STDOUT)
-        logEvent("ingestAT command output: ", output)
-    except subprocess.CalledProcessError as e:
-        logProblem("ingestAT returned error code: ", e.returncode, e.output)
-    except:
-        logProblem("Error executing ingestAT: ", traceback.format_exc())
-
-def sendWfoMessage(siteID, msgFile):
-    with open(msgFile, 'r') as fp:
-        message = fp.read()
-
-    logEvent("Message received from site: %s\n%s" % (siteID, message))
-
-    # send to AlertViz
-    from awips import NotificationMessage
-    msg = NotificationMessage.NotificationMessage(port='9581', message=message,
-           category='GFE', priority='SIGNIFICANT', source='GFE')
-    msg.send()
-    
-
-def putTCVFiles(siteID, tarFile):
-    from . import LocalizationSupport
-    from . import glob
-    from . import TCVUtil
-    
-    logEvent("Receiving TCV files from " + siteID)
-    
-    siteDir = LocalizationSupport.getLocalizationFile(LocalizationSupport.CAVE_STATIC, 
-                                                     LocalizationSupport.SITE, 
-                                                     siteID, "gfe").getFile()
-    siteDir = siteDir.getParentFile().getParentFile().getAbsolutePath()
-    logDebug("siteDir: "+siteDir)
-
-    try:
-        tmpDir = tempfile.mkdtemp(dir="/tmp")
-        logDebug("tmpDir: "+tmpDir)
-        subprocess.check_call(["cd " + tmpDir + "; tar xvzf " + tarFile], shell=True)
-    except:
-        logException('Error untarring TCV files from site: ' + siteID)
-        raise
-        
-    TCVUtil.purgeAllCanFiles(getLogger())
-    
-    # create the new allCAN files
-    for tmpFile in glob.iglob(os.path.join(tmpDir, "*/gfe/tcvAdvisories/*.allCAN")):
-        # create tcvDir if necessary
-        tcvDir = os.path.dirname(tmpFile).replace(tmpDir, siteDir)
-        logDebug("tcvDir: "+tcvDir)
-        try:
-            os.makedirs(tcvDir, 0755)
-        except OSError, e:
-            if e.errno != errno.EEXIST:
-                logProblem("%s: '%s'" % (e.strerror,e.filename))
-        
-        basename = os.path.basename(tmpFile)
-        stormName = basename.replace(".allCAN", "")
-        allCanPath = os.path.join(tcvDir, basename)
-        logDebug("copying "+tmpFile+" to "+allCanPath)
-        try:
-            # just create the empty allCan file
-            with open(allCanPath, 'w'):
-                pass
-        except:
-            logException("Error creating: "+ allCanPath)
-
-        try:            
-            # delete all JSON files starting with stormName
-            for fn in glob.iglob(os.path.join(tcvDir, stormName + "*.json")):
-                try:
-                    site = fn.replace(siteDir,"").split("/")[1]
-                    basename = os.path.basename(fn)
-                    logDebug("removing canceled file: ", os.path.join(site, "gfe/tcvAdvisories", basename))
-                    LocalizationSupport.deleteFile(LocalizationSupport.CAVE_STATIC, 
-                                                   LocalizationSupport.SITE, site, 
-                                                   "gfe/tcvAdvisories/" + basename)
-                except:
-                    logException("Error removing " + fn)
- 
-             
-            os.remove(tmpFile)
-        except:
-            logException("Error removing JSON files for " + stormName)
-    
-    # copy in the json files
-    for tmpFile in glob.iglob(os.path.join(tmpDir, "*/gfe/tcvAdvisories/*.json")):
-        site = tmpFile.replace(tmpDir,"").split("/")[1]
-        jsonFile = "gfe/tcvAdvisories/" + os.path.basename(tmpFile)
-        logDebug("copying "+tmpFile+" to "+jsonFile)
-        try:
-            with open(tmpFile, 'r') as tf:
-                jsonData = tf.read()
-            LocalizationSupport.writeFile(LocalizationSupport.CAVE_STATIC, 
-                                          LocalizationSupport.SITE, 
-                                          site, jsonFile, jsonData)
-            os.remove(tmpFile)
-        except:
-            logException("Error copying JSON file: "+jsonFile)
-        
-    # delete tmpDir
-    try:
-        for dirpath, dirs, files in os.walk(tmpDir, topdown=False):
-            os.rmdir(dirpath)
-    except:
-        logException("Unable to remove "+ tmpDir)
-    
-    
-def getTCVFiles(ourMhsID, srcServer, destE):
-    import IrtAccess
-    import TCVUtil
-    import siteConfig
-    
-    irt = IrtAccess.IrtAccess("")
-    localSites = [srcServer['site']]
-    for addressE in destE:
-        if addressE.tag != "address":
-            continue
-    
-        destServer = irt.decodeXMLAddress(addressE)
-        if destServer['mhsid'] == ourMhsID:
-            localSites.append(destServer['site'])
-    
-    
-    logEvent("Sending TCV files for " + str(localSites) + " to " + srcServer['mhsid'])
-
-    tcvProductsDir = os.path.join(siteConfig.GFESUITE_HOME, "products", "TCV")
-    
-    # create tcvProductsDir if necessary
-    try:
-        os.makedirs(tcvProductsDir, 0755)
-    except OSError, e:
-        if e.errno != errno.EEXIST:
-            logger.warn("%s: '%s'" % (e.strerror,e.filename))
-
-    # get temporary file name for packaged TCV files
-    with tempfile.NamedTemporaryFile(suffix='.sendtcv', dir=tcvProductsDir, delete=False) as fp:
-        fname = fp.name
-        
-    try:    
-        if TCVUtil.packageTCVFiles(localSites, fname, getLogger()):
-        
-            from xml.etree import ElementTree
-            from xml.etree.ElementTree import Element, SubElement
-            iscE = ElementTree.Element('isc')
-            irt.addSourceXML(iscE, destServer)
-            irt.addDestinationXML(iscE, [srcServer])
-    
-            # create the XML file
-            with tempfile.NamedTemporaryFile(suffix='.xml', dir=tcvProductsDir, delete=False) as fd:
-                fnameXML = fd.name
-                fd.write(ElementTree.tostring(iscE))    
-    
-            # send the files to srcServer
-            sendMHSMessage("PUT_TCV_FILES", srcServer['mhsid'], [fname, fnameXML])
-        else:
-            logEvent('No TCV files to send')
-            
-    except:
-        logException('Error sending TCV files for ' + str(localSites))
-
-# get servers direct call for IRT
-def irtGetServers(ancfURL, bncfURL, iscWfosWanted):
-    import IrtAccess
-    irt = IrtAccess.IrtAccess(ancfURL, bncfURL)
-    xml = None
-    status, xml = irt.getServers(iscWfosWanted)
-    return xml
-
-# make a request for ISC (support for IFPServer.C)
-# xmlRequest is the original request from the GFE's ISCRequestDialog.
-def makeISCrequest(xmlRequest, gridDims, gridProj, gridBoundBox, mhs, host, port, protocol, site, xmtScript):
-    import IrtAccess
-    import siteConfig
-    import xml
-    from xml.etree import ElementTree
-    from xml.etree.ElementTree import Element, SubElement
-    
-    ServerMHS = mhs
-    ServerHost = host
-    ServerPort = port
-    ServerProtocol = protocol
-    ServerSite = site
-    
-    
-    if type(gridDims) != "list":
-        pylist = []
-        size = gridDims.size() 
-        for i in range(size):
-            pylist.append(int(gridDims.get(i)))
-        gridDims = pylist
-    
-    if type(gridBoundBox) != "tuple":
-        gridBoundBox = ((float(gridBoundBox.get(0)), float(gridBoundBox.get(1))), (float(gridBoundBox.get(2)), float(gridBoundBox.get(3))))
-
-    irt = IrtAccess.IrtAccess(None)
-    logEvent("ISC Request (makeISCrequest)")
-
-    # we need to modify the incoming xmlRequest and add the 
-    # and move the  into the  
- requestE = ElementTree.fromstring(xmlRequest) - ourServer = {'mhsid': ServerMHS, - 'host': ServerHost, - 'port': ServerPort, - 'protocol': ServerProtocol, - 'site': ServerSite, - 'area': {'xdim': gridDims[0], - 'ydim': gridDims[1] - }, - 'domain': {'proj': gridProj, - 'origx': gridBoundBox[0][0], - 'origy': gridBoundBox[0][1], - 'extx': gridBoundBox[1][0], - 'exty': gridBoundBox[1][1] - } - } - sourcesE, addressE = irt.addSourceXML(requestE, ourServer) - - #find the and move it - welistE = requestE.find('welist') - requestE.remove(welistE) - addressE.append(welistE) - - # we need to decode the to determine which sites should - # get this request - mhsSites = [] - for destE in requestE.getchildren(): - if destE.tag == "destinations": - for addrE in destE: - if addrE.tag != "address": - continue #not expecting something different, so ignore - serverInfo = irt.decodeXMLAddress(addrE) - logEvent("Destination:", - irt.printServerInfo(serverInfo)) - if serverInfo['mhsid'] not in mhsSites: - mhsSites.append(serverInfo['mhsid']) - - # convert XML tree to a string and write out packet to send via MHS - dir = siteConfig.GFESUITE_PRDDIR + "/ISC" - #create the directories if they don't exist - try: - os.makedirs(dir) - except: - pass - tempfile.tempdir = dir - fname = tempfile.mktemp(".iscRequest") - fp = open(fname, "wb") - buf = ElementTree.tostring(requestE) - fp.write(buf) - fp.close() - - # Transmit the request -- do string substitution - #if XmtScript is not None: - # create the required wfmoid - wmoid = "TTAA00 " - if ServerMHS in ['SJU']: - wmoid += "TJSJ" - elif ServerMHS in ['AFG', 'AJK', 'HFO', 'GUM']: - wmoid += "P" + ServerMHS - elif ServerMHS in ['AER', 'ALU']: - wmoid += "PAFC" - elif len(ServerMHS) == 3: - wmoid += "K" + ServerMHS - elif len(ServerMHS) == 4: - wmoid += ServerMHS - else: - wmoid = "XXXX" - wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) - - cmd = copy.deepcopy(xmtScript) - args = cmd.split(" ") #break down into separate entries - for s1, s2 in [("%SUBJECT", "ISCREQUEST"), - ("%ADDRESSES", ",".join(mhsSites)), ("%WMOID", wmoid), - ("%ATTACHMENTS", fname)]: - for x in xrange(len(args)): - args[x] = string.replace(args[x], s1, s2) - logEvent("ISCRequest xml: ", args) - - # start subprocess to actually make the call - pid = os.fork() - if pid == 0: - try: - os.execvp(args[0], args) - except: - pass - finally: - os.remove(fname) - os._exit(0) - - -def serviceISCRequest(dataFile): - # function called by iscDataRec with an isc request to be serviced. - # We take this information, convert it into a different format, - # and queue the request via the IFPServer to the SendISCMgr - import IrtAccess - import siteConfig - import xml - from xml.etree import ElementTree - from xml.etree.ElementTree import Element, SubElement - irt = IrtAccess.IrtAccess(None) - - logEvent("serviceISCRequest.....") - - # validate xml - inTree = ElementTree.parse(dataFile) - inE = inTree.getroot() - if inE.tag != "iscrequest": - raise Exception, "iscrequest packet missing from request" - - # prepare output XML file - emulating the IRT getSendAddr() format - iscE = Element('destinations') - - # process the input request file. Look for source, destinations. - sourceE = None - for sdwE in inE: - if sdwE.tag == "source": - for addrE in sdwE: - if addrE.tag == "address": #should only be 1 - info = irt.decodeXMLAddress(addrE) #requestor information - logEvent("ISC requestor:", - irt.printServerInfo(info)) - irt.addAddressXML(iscE, info) #into the destinations - elif sdwE.tag == "destinations": - continue #we don't care about these destinations since they are - #where the request went (which is our own server) - - # pass request into C++ IFPServer - xmlDestinations = ElementTree.tostring(iscE) #convert to string - from com.raytheon.edex.plugin.gfe.isc import ServiceISCRequest - ServiceISCRequest.serviceRequest(JUtil.pyValToJavaObj(info['parms']),xmlDestinations,siteConfig.GFESUITE_SITEID) - # ifpServer.serviceISCRequest(info['parms'], xmlDestinations) - -def sendMHSMessage(subject, adressees, attachments, xmtScript=None): - # Transmit the request -- do string substitution - import siteConfig - from com.raytheon.edex.plugin.gfe.server import IFPServer - server = IFPServer.getActiveServer(siteConfig.GFESUITE_SITEID) - if server is None: - raise Exception("No active IFPServer for site: " + siteConfig.GFESUITE_SITEID) - - config = server.getConfig() - ourMHS = siteConfig.GFESUITE_MHSID - - if xmtScript is None: - xmtScript = config.transmitScript() - - # create the required wmoid - wmoid = "TTAA00 " - if ourMHS in ['SJU']: - wmoid += "TJSJ" - elif ourMHS in ['AFG', 'AJK', 'HFO', 'GUM']: - wmoid += "P" + ourMHS - elif ourMHS in ['AER', 'ALU']: - wmoid += "PAFC" - elif len(ourMHS) == 3: - wmoid += "K" + ourMHS - elif len(ourMHS) == 4: - wmoid += ourMHS - else: - wmoid = "XXXX" - wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) - - if type(adressees) in [list, tuple]: - adressees = ",".join(addresses) - - if type(attachments) in [list, tuple]: - attachments = ",".join(attachments) - - cmd = copy.deepcopy(xmtScript) - for s1, s2 in [("%SUBJECT", subject), - ("%ADDRESSES", adressees), - ("%WMOID", wmoid), - ("%ATTACHMENTS", attachments)]: - cmd = cmd.replace(s1, s2) - - logDebug("cmd: "+ cmd) - - # start subprocess to actually make the call - try: - subprocess.check_call([cmd], shell=True) - except: - logException("Error running cmd: " + cmd) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# Port of IRT functionality from legacy ifpServer +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/14/09 1995 bphillip Initial Creation. +# 01/25/13 1447 dgilling Implement routines needed by +# iscDataRec for VTEC table +# sharing. +# 03/13/13 1759 dgilling Move siteConfig imports into +# functions where module is used +# to interact better with IscScript. +# 05/22/13 1759 dgilling Add missing import to +# makeISCrequest(). +# 10/16/13 2475 dgilling Remove unneeded code to handle +# registration with IRT. +# 12/08/2014 4953 randerso Added support for sending/receiving TCV files +# Additional code clean up +# 03/05/2015 4129 randerso Fix exception handling on subprocess calls +# Fixed error when no TCV files were found +# 02/22/2016 5374 randerso Added support for sendWFOMessage +# 09/12/2016 5861 randerso Remove references to IFPServerConfigManager +# which was largely redundant with IFPServer. +# 10/31/2016 5979 njensen Cast to primitives for compatibility +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +import pickle + +import LogStream, tempfile, os, sys, JUtil, subprocess, traceback, errno +import time, copy, string, iscUtil + +from com.raytheon.edex.plugin.gfe.isc import IRTManager +from subprocess import CalledProcessError + +PURGE_AGE = 30 * 24 * 60 * 60 # 30 days in seconds + +def getLogger(): + import logging + return iscUtil.getLogger("irtServer", logLevel=logging.DEBUG) + +def logEvent(*msg): + getLogger().info(iscUtil.tupleToString(*msg)) + +def logProblem(*msg): + getLogger().error(iscUtil.tupleToString(*msg)) + +def logException(*msg): + getLogger().exception(iscUtil.tupleToString(*msg)) + +def logDebug(*msg): + getLogger().debug(iscUtil.tupleToString(*msg)) + +# called by iscDataRec when another site has requested the active table +# returns the active table, filtered, pickled. +def getVTECActiveTable(dataFile, xmlPacket): + import siteConfig + import VTECPartners + + if not VTECPartners.VTEC_RESPOND_TO_TABLE_REQUESTS: + return #respond is disabled + + #decode the data (pickled) + with open(dataFile, "rb") as fp: + info = pickle.load(fp) + + (mhsSite, reqsite, filterSites, countDict, issueTime) = info + + #get the active table, and write it to a temporary file + from com.raytheon.uf.common.site import SiteMap + from com.raytheon.uf.edex.activetable import ActiveTable + from com.raytheon.uf.common.activetable import ActiveTableMode + from com.raytheon.uf.common.activetable import ActiveTableUtil + site4Id = SiteMap.getInstance().getSite4LetterId(siteConfig.GFESUITE_SITEID) + javaTable = ActiveTable.getActiveTable(site4Id, ActiveTableMode.OPERATIONAL) + dictTable = ActiveTableUtil.convertToDict(javaTable, siteConfig.GFESUITE_SITEID) + + # we must convert this to a python hash using the A1 field naming conventions + # for cross-version compatibility + table = [] + for i in range(dictTable.size()): + convRecord = JUtil.javaObjToPyVal(dictTable.get(i)) + convRecord['oid'] = convRecord['officeid'] + convRecord['vstr'] = convRecord['vtecstr'] + convRecord['end'] = convRecord['endTime'] + convRecord['start'] = convRecord['startTime'] + convRecord['key'] = convRecord['phensig'] + # remove new fields so we don't pickle two copies + del convRecord['officeid'] + del convRecord['vtecstr'] + del convRecord['endTime'] + del convRecord['phensig'] + del convRecord['startTime'] + if 'segText' in convRecord: + convRecord['text'] = convRecord['segText'] + del convRecord['segText'] + table.append(convRecord) + + # additionally, we'll need to pickle our output to match the A1 file + # format + pickledTable = pickle.dumps(table) + outDir = os.path.join(siteConfig.GFESUITE_PRDDIR, "ATBL") + with tempfile.NamedTemporaryFile(suffix='.ato', dir=outDir, delete=False) as fp: + fname = fp.name + fp.write(pickledTable) + + #write the xmlpacket to a temporary file, if one was passed + if xmlPacket is not None: + with tempfile.NamedTemporaryFile(suffix='.xml', dir=outDir, delete=False) as fp: + fnameXML = fp.name + fp.write(xmlPacket) + + from com.raytheon.edex.plugin.gfe.server import IFPServer + server = IFPServer.getActiveServer(siteConfig.GFESUITE_SITEID) + if server is None: + raise Exception("No active IFPServer for site: " + siteConfig.GFESUITE_SITEID) + + config = server.getConfig() + ServerHost = siteConfig.GFESUITE_SERVER + ServerPort = str(siteConfig.GFESUITE_PORT) + ServerProtocol = str(config.getProtocolVersion()) + ServerMHS = siteConfig.GFESUITE_MHSID + ServerSite = siteConfig.GFESUITE_SITEID + XmtScript = config.transmitScript() + + #call sendAT to send the table to the requestor + cmd = os.path.join(siteConfig.GFESUITE_HOME, "bin", "sendAT") + args = [cmd, '-s', reqsite, '-a', mhsSite, '-H', ServerHost, + '-P', ServerPort, '-L', ServerProtocol, '-M', ServerMHS, + '-S', ServerSite, '-x', XmtScript] + if filterSites is not None: + for fs in filterSites: + args.append('-f') + args.append(fs) + if countDict is not None: + args.append('-c') + args.append(repr(countDict)) + if issueTime is not None: + args.append('-t') + args.append(repr(issueTime)) + args.append('-v') + args.append(fname) + if xmlPacket is not None: + args.append('-X') + args.append(fnameXML) + try: + output = subprocess.check_output(args, stderr=subprocess.STDOUT) + logEvent("sendAT command output: ", output) + except subprocess.CalledProcessError as e: + logProblem("sendAT returned error code: ", e.returncode, e.output) + except: + logProblem("Error executing sendAT: ", traceback.format_exc()) + +#when we receive a requested active table from another site, this function +#is called from iscDataRec +def putVTECActiveTable(dataFile, xmlPacket): + import siteConfig + + with open(dataFile, "rb") as fp: + strTable = fp.read() + + #write the xmlpacket to a temporary file, if one was passed + inDir = os.path.join(siteConfig.GFESUITE_PRDDIR, "ATBL") + if xmlPacket is not None: + with tempfile.NamedTemporaryFile(suffix='.xml', dir=inDir, delete=False) as fp: + fnameXML = fp.name + fp.write(xmlPacket) + with tempfile.NamedTemporaryFile(suffix='.ati', dir=inDir, delete=False) as fp: + fname = fp.name + fp.write(strTable) + + cmd = os.path.join(siteConfig.GFESUITE_HOME, "bin", "ingestAT") + args = [] + args.append(cmd) + args.append("-s") + args.append(siteConfig.GFESUITE_SITEID) + args.append("-f") + args.append(fname) + if xmlPacket is not None: + args.append('-X') + args.append(fnameXML) + try: + output = subprocess.check_output(args, stderr=subprocess.STDOUT) + logEvent("ingestAT command output: ", output) + except subprocess.CalledProcessError as e: + logProblem("ingestAT returned error code: ", e.returncode, e.output) + except: + logProblem("Error executing ingestAT: ", traceback.format_exc()) + +def sendWfoMessage(siteID, msgFile): + with open(msgFile, 'r') as fp: + message = fp.read() + + logEvent("Message received from site: %s\n%s" % (siteID, message)) + + # send to AlertViz + from awips import NotificationMessage + msg = NotificationMessage.NotificationMessage(port='9581', message=message, + category='GFE', priority='SIGNIFICANT', source='GFE') + msg.send() + + +def putTCVFiles(siteID, tarFile): + from . import LocalizationSupport + from . import glob + from . import TCVUtil + + logEvent("Receiving TCV files from " + siteID) + + siteDir = LocalizationSupport.getLocalizationFile(LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, + siteID, "gfe").getFile() + siteDir = siteDir.getParentFile().getParentFile().getAbsolutePath() + logDebug("siteDir: "+siteDir) + + try: + tmpDir = tempfile.mkdtemp(dir="/tmp") + logDebug("tmpDir: "+tmpDir) + subprocess.check_call(["cd " + tmpDir + "; tar xvzf " + tarFile], shell=True) + except: + logException('Error untarring TCV files from site: ' + siteID) + raise + + TCVUtil.purgeAllCanFiles(getLogger()) + + # create the new allCAN files + for tmpFile in glob.iglob(os.path.join(tmpDir, "*/gfe/tcvAdvisories/*.allCAN")): + # create tcvDir if necessary + tcvDir = os.path.dirname(tmpFile).replace(tmpDir, siteDir) + logDebug("tcvDir: "+tcvDir) + try: + os.makedirs(tcvDir, 0o755) + except OSError as e: + if e.errno != errno.EEXIST: + logProblem("%s: '%s'" % (e.strerror,e.filename)) + + basename = os.path.basename(tmpFile) + stormName = basename.replace(".allCAN", "") + allCanPath = os.path.join(tcvDir, basename) + logDebug("copying "+tmpFile+" to "+allCanPath) + try: + # just create the empty allCan file + with open(allCanPath, 'w'): + pass + except: + logException("Error creating: "+ allCanPath) + + try: + # delete all JSON files starting with stormName + for fn in glob.iglob(os.path.join(tcvDir, stormName + "*.json")): + try: + site = fn.replace(siteDir,"").split("/")[1] + basename = os.path.basename(fn) + logDebug("removing canceled file: ", os.path.join(site, "gfe/tcvAdvisories", basename)) + LocalizationSupport.deleteFile(LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, site, + "gfe/tcvAdvisories/" + basename) + except: + logException("Error removing " + fn) + + + os.remove(tmpFile) + except: + logException("Error removing JSON files for " + stormName) + + # copy in the json files + for tmpFile in glob.iglob(os.path.join(tmpDir, "*/gfe/tcvAdvisories/*.json")): + site = tmpFile.replace(tmpDir,"").split("/")[1] + jsonFile = "gfe/tcvAdvisories/" + os.path.basename(tmpFile) + logDebug("copying "+tmpFile+" to "+jsonFile) + try: + with open(tmpFile, 'r') as tf: + jsonData = tf.read() + LocalizationSupport.writeFile(LocalizationSupport.CAVE_STATIC, + LocalizationSupport.SITE, + site, jsonFile, jsonData) + os.remove(tmpFile) + except: + logException("Error copying JSON file: "+jsonFile) + + # delete tmpDir + try: + for dirpath, dirs, files in os.walk(tmpDir, topdown=False): + os.rmdir(dirpath) + except: + logException("Unable to remove "+ tmpDir) + + +def getTCVFiles(ourMhsID, srcServer, destE): + import IrtAccess + import TCVUtil + import siteConfig + + irt = IrtAccess.IrtAccess("") + localSites = [srcServer['site']] + for addressE in destE: + if addressE.tag != "address": + continue + + destServer = irt.decodeXMLAddress(addressE) + if destServer['mhsid'] == ourMhsID: + localSites.append(destServer['site']) + + + logEvent("Sending TCV files for " + str(localSites) + " to " + srcServer['mhsid']) + + tcvProductsDir = os.path.join(siteConfig.GFESUITE_HOME, "products", "TCV") + + # create tcvProductsDir if necessary + try: + os.makedirs(tcvProductsDir, 0o755) + except OSError as e: + if e.errno != errno.EEXIST: + logger.warn("%s: '%s'" % (e.strerror,e.filename)) + + # get temporary file name for packaged TCV files + with tempfile.NamedTemporaryFile(suffix='.sendtcv', dir=tcvProductsDir, delete=False) as fp: + fname = fp.name + + try: + if TCVUtil.packageTCVFiles(localSites, fname, getLogger()): + + from xml.etree import ElementTree + from xml.etree.ElementTree import Element, SubElement + iscE = ElementTree.Element('isc') + irt.addSourceXML(iscE, destServer) + irt.addDestinationXML(iscE, [srcServer]) + + # create the XML file + with tempfile.NamedTemporaryFile(suffix='.xml', dir=tcvProductsDir, delete=False) as fd: + fnameXML = fd.name + fd.write(ElementTree.tostring(iscE)) + + # send the files to srcServer + sendMHSMessage("PUT_TCV_FILES", srcServer['mhsid'], [fname, fnameXML]) + else: + logEvent('No TCV files to send') + + except: + logException('Error sending TCV files for ' + str(localSites)) + +# get servers direct call for IRT +def irtGetServers(ancfURL, bncfURL, iscWfosWanted): + import IrtAccess + irt = IrtAccess.IrtAccess(ancfURL, bncfURL) + xml = None + status, xml = irt.getServers(iscWfosWanted) + return xml + +# make a request for ISC (support for IFPServer.C) +# xmlRequest is the original request from the GFE's ISCRequestDialog. +def makeISCrequest(xmlRequest, gridDims, gridProj, gridBoundBox, mhs, host, port, protocol, site, xmtScript): + import IrtAccess + import siteConfig + import xml + from xml.etree import ElementTree + from xml.etree.ElementTree import Element, SubElement + + ServerMHS = mhs + ServerHost = host + ServerPort = port + ServerProtocol = protocol + ServerSite = site + + + if type(gridDims) != "list": + pylist = [] + size = gridDims.size() + for i in range(size): + pylist.append(int(gridDims.get(i))) + gridDims = pylist + + if type(gridBoundBox) != "tuple": + gridBoundBox = ((float(gridBoundBox.get(0)), float(gridBoundBox.get(1))), (float(gridBoundBox.get(2)), float(gridBoundBox.get(3)))) + + irt = IrtAccess.IrtAccess(None) + logEvent("ISC Request (makeISCrequest)") + + # we need to modify the incoming xmlRequest and add the + # and move the into the
+ requestE = ElementTree.fromstring(xmlRequest) + ourServer = {'mhsid': ServerMHS, + 'host': ServerHost, + 'port': ServerPort, + 'protocol': ServerProtocol, + 'site': ServerSite, + 'area': {'xdim': gridDims[0], + 'ydim': gridDims[1] + }, + 'domain': {'proj': gridProj, + 'origx': gridBoundBox[0][0], + 'origy': gridBoundBox[0][1], + 'extx': gridBoundBox[1][0], + 'exty': gridBoundBox[1][1] + } + } + sourcesE, addressE = irt.addSourceXML(requestE, ourServer) + + #find the and move it + welistE = requestE.find('welist') + requestE.remove(welistE) + addressE.append(welistE) + + # we need to decode the to determine which sites should + # get this request + mhsSites = [] + for destE in requestE.getchildren(): + if destE.tag == "destinations": + for addrE in destE: + if addrE.tag != "address": + continue #not expecting something different, so ignore + serverInfo = irt.decodeXMLAddress(addrE) + logEvent("Destination:", + irt.printServerInfo(serverInfo)) + if serverInfo['mhsid'] not in mhsSites: + mhsSites.append(serverInfo['mhsid']) + + # convert XML tree to a string and write out packet to send via MHS + dir = siteConfig.GFESUITE_PRDDIR + "/ISC" + #create the directories if they don't exist + try: + os.makedirs(dir) + except: + pass + tempfile.tempdir = dir + fname = tempfile.mktemp(".iscRequest") + fp = open(fname, "wb") + buf = ElementTree.tostring(requestE) + fp.write(buf) + fp.close() + + # Transmit the request -- do string substitution + #if XmtScript is not None: + # create the required wfmoid + wmoid = "TTAA00 " + if ServerMHS in ['SJU']: + wmoid += "TJSJ" + elif ServerMHS in ['AFG', 'AJK', 'HFO', 'GUM']: + wmoid += "P" + ServerMHS + elif ServerMHS in ['AER', 'ALU']: + wmoid += "PAFC" + elif len(ServerMHS) == 3: + wmoid += "K" + ServerMHS + elif len(ServerMHS) == 4: + wmoid += ServerMHS + else: + wmoid = "XXXX" + wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) + + cmd = copy.deepcopy(xmtScript) + args = cmd.split(" ") #break down into separate entries + for s1, s2 in [("%SUBJECT", "ISCREQUEST"), + ("%ADDRESSES", ",".join(mhsSites)), ("%WMOID", wmoid), + ("%ATTACHMENTS", fname)]: + for x in range(len(args)): + args[x] = string.replace(args[x], s1, s2) + logEvent("ISCRequest xml: ", args) + + # start subprocess to actually make the call + pid = os.fork() + if pid == 0: + try: + os.execvp(args[0], args) + except: + pass + finally: + os.remove(fname) + os._exit(0) + + +def serviceISCRequest(dataFile): + # function called by iscDataRec with an isc request to be serviced. + # We take this information, convert it into a different format, + # and queue the request via the IFPServer to the SendISCMgr + import IrtAccess + import siteConfig + import xml + from xml.etree import ElementTree + from xml.etree.ElementTree import Element, SubElement + irt = IrtAccess.IrtAccess(None) + + logEvent("serviceISCRequest.....") + + # validate xml + inTree = ElementTree.parse(dataFile) + inE = inTree.getroot() + if inE.tag != "iscrequest": + raise Exception("iscrequest packet missing from request") + + # prepare output XML file - emulating the IRT getSendAddr() format + iscE = Element('destinations') + + # process the input request file. Look for source, destinations. + sourceE = None + for sdwE in inE: + if sdwE.tag == "source": + for addrE in sdwE: + if addrE.tag == "address": #should only be 1 + info = irt.decodeXMLAddress(addrE) #requestor information + logEvent("ISC requestor:", + irt.printServerInfo(info)) + irt.addAddressXML(iscE, info) #into the destinations + elif sdwE.tag == "destinations": + continue #we don't care about these destinations since they are + #where the request went (which is our own server) + + # pass request into C++ IFPServer + xmlDestinations = ElementTree.tostring(iscE) #convert to string + from com.raytheon.edex.plugin.gfe.isc import ServiceISCRequest + ServiceISCRequest.serviceRequest(JUtil.pyValToJavaObj(info['parms']),xmlDestinations,siteConfig.GFESUITE_SITEID) + # ifpServer.serviceISCRequest(info['parms'], xmlDestinations) + +def sendMHSMessage(subject, adressees, attachments, xmtScript=None): + # Transmit the request -- do string substitution + import siteConfig + from com.raytheon.edex.plugin.gfe.server import IFPServer + server = IFPServer.getActiveServer(siteConfig.GFESUITE_SITEID) + if server is None: + raise Exception("No active IFPServer for site: " + siteConfig.GFESUITE_SITEID) + + config = server.getConfig() + ourMHS = siteConfig.GFESUITE_MHSID + + if xmtScript is None: + xmtScript = config.transmitScript() + + # create the required wmoid + wmoid = "TTAA00 " + if ourMHS in ['SJU']: + wmoid += "TJSJ" + elif ourMHS in ['AFG', 'AJK', 'HFO', 'GUM']: + wmoid += "P" + ourMHS + elif ourMHS in ['AER', 'ALU']: + wmoid += "PAFC" + elif len(ourMHS) == 3: + wmoid += "K" + ourMHS + elif len(ourMHS) == 4: + wmoid += ourMHS + else: + wmoid = "XXXX" + wmoid += " " + time.strftime("%d%H%M", time.gmtime(time.time())) + + if type(adressees) in [list, tuple]: + adressees = ",".join(addresses) + + if type(attachments) in [list, tuple]: + attachments = ",".join(attachments) + + cmd = copy.deepcopy(xmtScript) + for s1, s2 in [("%SUBJECT", subject), + ("%ADDRESSES", adressees), + ("%WMOID", wmoid), + ("%ATTACHMENTS", attachments)]: + cmd = cmd.replace(s1, s2) + + logDebug("cmd: "+ cmd) + + # start subprocess to actually make the call + try: + subprocess.check_call([cmd], shell=True) + except: + logException("Error running cmd: " + cmd) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/ifpnetCDF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/ifpnetCDF.py index bbfda0d1bb..c59c3b5c25 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/ifpnetCDF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/ifpnetCDF.py @@ -1,1439 +1,1439 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Jul 06, 2009 1995 bphillip Initial Creation. -# Mar 11, 2013 1759 dgilling Removed unneeded methods. -# Apr 23, 2013 1937 dgilling Reimplement WECache to match -# A1, big perf improvement. -# May 23, 2013 1759 dgilling Remove unnecessary imports. -# Jun 13, 2013 2044 randerso Updated for changes to TopoDatabaseManager -# Jul 25, 2013 2233 randerso Improved memory utilization and performance -# Aug 09, 2013 1571 randerso Changed projections to use the Java -# ProjectionType enumeration -# Sep 20, 2013 2405 dgilling Clip grids before inserting into cache. -# Oct 22, 2013 2405 rjpeter Remove WECache and store directly to cube. -# Oct 31, 2013 2508 randerso Change to use DiscreteGridSlice.getKeys() -# Aug 14, 2014 3526 randerso Fixed to get sampling definition from -# appropriate site -# Jan 13, 2015 3955 randerso Changed to use ifpServer.getTopoData -# Feb 17, 2015 4139 randerso Removed timeFromComponents and dependent -# functions in favor of calendar.timegm -# Apr 23, 2015 4259 njensen Updated for new JEP API -# May 13, 2015 4427 dgilling Add siteIdOverride field. -# Aug 06, 2015 4718 dgilling Optimize casting when using where with -# NumPy 1.9. -# Apr 07, 2016 5539 randerso Reversed order of parameters/return value in -# collapseKey -# to match order of Wx/Discrete tuple -# May 27, 2016 19014 ryu Fix rounding issue causing Td to be greater -# than T in output netCDF file. -# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager -# which was largely redundant with IFPServer. -# Oct 31, 2016 5979 njensen Cast to primitives for compatibility -# Nov 21, 2016 5959 njensen Removed unused imports and made more pythonic -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# Jul 31, 3017 6342 randerso Removed unused imports. Fixed long standing -# bug in extremaOfSetBits when mask does not overlap grid -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -import string, time, gzip, os, LogStream, stat, traceback -import calendar -from collections import OrderedDict -import numpy -#import pupynere as NetCDF -try: - # dev environment - from Scientific.IO import NetCDF -except: - # runtime we don't have the whole scientific package - import NetCDF -import JUtil -import iscUtil -import logging - -from java.util import ArrayList -from java.io import File -from com.vividsolutions.jts.geom import Coordinate -from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData -ProjectionType = ProjectionData.ProjectionType -from com.raytheon.edex.plugin.gfe.smartinit import IFPDB -from com.raytheon.edex.plugin.gfe.server import IFPServer -from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID -from com.raytheon.uf.common.localization import PathManagerFactory -from com.raytheon.uf.common.localization import LocalizationContext -LocalizationType = LocalizationContext.LocalizationType -LocalizationLevel = LocalizationContext.LocalizationLevel - - -# Original A1 BATCH WRITE COUNT was 10, we found doubling that -# lead to a significant performance increase. -BATCH_WRITE_COUNT = 20 -BATCH_DELAY = 0.0 -ifpNetcdfLogger=None - -## Logging methods ## -def initLogger(logFile=None): - global ifpNetcdfLogger - ifpNetcdfLogger = iscUtil.getLogger("ifpnetCDF",logFile, logLevel=logging.INFO) - -def logEvent(*msg): - ifpNetcdfLogger.info(iscUtil.tupleToString(*msg)) - -def logProblem(*msg): - ifpNetcdfLogger.error(iscUtil.tupleToString(*msg)) - -def logException(*msg): - ifpNetcdfLogger.exception(iscUtil.tupleToString(*msg)) - -def logVerbose(*msg): - ifpNetcdfLogger.debug(iscUtil.tupleToString(*msg)) - -def logDebug(*msg): - logVerbose(iscUtil.tupleToString(*msg)) - - -def retrieveData(we, inv, clipArea): - lst = list(inv) - trs=[] - histDict = OrderedDict() - cube = None - keyList = None - gridType = str(we.getGpi().getGridType()) - - # clipped size - clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) - gridCount = len(inv) - - if gridType == "SCALAR": - cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32) - elif gridType == "VECTOR": - magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) - dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) - cube = (magCube, dirCube) - elif gridType == "WEATHER" or gridType == "DISCRETE": - cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8) - keyList = [] - - cubeIdx = 0 - while len(lst): - i = lst[:BATCH_WRITE_COUNT] - javaTRs = ArrayList() - for tr in i: - javaTRs.add(iscUtil.toJavaTimeRange(tr)) - gridsAndHist = we.get(javaTRs, True) - size = gridsAndHist.size() - for idx in xrange(size): - pair = gridsAndHist.get(idx) - grid = pair.getFirst() - tr = iscUtil.transformTime(grid.getValidTime()) - encodeGridSlice(grid, gridType, clipArea, cube, cubeIdx, keyList) - cubeIdx += 1 - histDict[tr] = encodeGridHistory(pair.getSecond()) - lst = lst[BATCH_WRITE_COUNT:] - time.sleep(BATCH_DELAY) - - if len(histDict) != gridCount: - # retrieved less grids than originally expected, purge ran? - gridCount = len(histDict) - - if gridType == "SCALAR": - oldCube = cube - cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32) - for idx in xrange(gridCount): - cube[idx] = oldCube[idx] - elif gridType == "VECTOR": - oldMagCube = magCube - magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) - oldDirCube = dirCube - dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) - cube = (magCube, dirCube) - for idx in xrange(gridCount): - magCube[idx] = oldMagCube[idx] - dirCube[idx] = oldDirCube[idx] - elif gridType == "WEATHER" or gridType == "DISCRETE": - oldCube = cube - cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8) - for idx in xrange(gridCount): - cube[idx] = oldCube[idx] - return (cube, histDict, keyList) - -###-------------------------------------------------------------------------### -### cube and keyList are out parameters to be filled by this method, idx is the index into cube to use -def encodeGridSlice(grid, gridType, clipArea, cube, idx, keyList): - if gridType == "SCALAR": - cube[idx] = clipToExtrema(grid.getNDArray(), clipArea) - elif gridType == "VECTOR": - vecGrids = grid.getNDArray() - cube[0][idx] = clipToExtrema(vecGrids[0], clipArea) - cube[1][idx] = clipToExtrema(vecGrids[1], clipArea) - elif gridType == "WEATHER" or gridType == "DISCRETE": - keys = grid.getKeys() - gridKeys = [] - - for theKey in keys: - gridKeys.append(str(theKey)) - keyList.append(gridKeys) - cube[idx]= clipToExtrema(grid.getNDArray(), clipArea) - -def encodeGridHistory(histories): - retVal = [] - for i in xrange(histories.size()): - retVal.append(histories.get(i).getCodedString()) - return tuple(retVal) - - -###-------------------------------------------------------------------------### -### Processes the parm list. If the list is empty, get all of the parms -### from the database. If any are missing _SFC add it. -def processParmList(argDict, db): - parmList = argDict['parmList'] - dbList = db.getKeys() - if len(parmList) == 0: - parmList = JUtil.javaStringListToPylist(dbList) - - # now add _SFC to all parmNames with no underscore - for i in range(0, len(parmList)): - if '_' not in parmList[i]: - parmList[i] = parmList[i] + "_SFC" - - # now verify that the parm is in the database - final = [] - for p in parmList: - if dbList.contains(p): - final.append(p) - return final - -###-------------------------------------------------------------------------### -### Returns true if the specified time is contained within the timeRange -def contains(timerange, time): - if timerange[1] - timerange[0]: - return ((time >= timerange[0]) and (time < timerange[1])) - return time == timerange[0] - -###-------------------------------------------------------------------------### -### Returns intersection time range of two time ranges, if no intersection -### then None is returned. -def intersection(tr1, tr2): - if tr1[0] < tr2[0]: - startTime = tr2[0] - else: - startTime = tr1[0] - if tr1[1] > tr2[1]: - endTime = tr2[1] - else: - endTime = tr1[1] - if startTime >= endTime: - return None # no intersection - else: - return (startTime, endTime) - -###-------------------------------------------------------------------------### -def overlaps(tr1, tr2): - "Returns true if the specified time ranges overlap" - if contains(tr2, tr1[0]) or contains(tr1, tr2[0]): - return 1 - return 0 - -###-------------------------------------------------------------------------### -### Makes an integer from the specified string in seconds since 1-Jan-1970 00Z -def getIntTime(timeStr): - "Create an Integer time from a string: YYYYMMDD_HHMM" - - try: - timeTuple = time.strptime(timeStr, "%Y%m%d_%H%M") - except: - logProblem(timeStr, \ - "is not a valid time string. Use YYYYMMDD_HHMM",traceback.format_exc()) - s = timeStr + " is not a valid time string. Use YYYYMMDD_HHMM" - raise SyntaxError, s - return - return calendar.timegm(timeTuple) - -###-------------------------------------------------------------------------### -### Makes a TimeRange from the input string of the form YYYYMMDD_HHMM. -def makeTimeRange(startString, endString): - "Makes a timeRange from the specified time strings." - try: - t1 = getIntTime(startString) - t2 = getIntTime(endString) - except: - raise Exception, "Can't decode YYYYMMDD_HHMM string" - - return (t1, t2) - - -###-------------------------------------------------------------------------### -def timeRangeAsString(tr): - "Prints timeRange in YYYYMMDD_HHMM format" - return time.strftime("%Y%m%d_%H%M", time.gmtime(tr[0])) + " --- " \ - + time.strftime("%Y%m%d_%H%M", time.gmtime(tr[1])) - -###-------------------------------------------------------------------------### -def extremaOfSetBits(mask): - "Returns tuple of extrema of set bits (minx,maxx, miny,maxy)" - nz = numpy.nonzero(mask) - - minx = miny = 0 - maxx = mask.shape[1] - 1 - maxy = mask.shape[0] - 1 - - if nz[1].any(): - minx = nz[1].min() - maxx = nz[1].max() - - if nz[0].any(): - miny = nz[0].min() - maxy = nz[0].max() - - return (minx, maxx, miny, maxy) - -###------------------------------------------------------------------------### -def clipToExtrema(grid, clipArea): - "Clips grid to info in clipArea: (minx, maxx, miny, maxy)" - if clipArea[0] == -1: - return grid # no clipping at all - minx = clipArea[0] - maxx = clipArea[1] - miny = clipArea[2] - maxy = clipArea[3] - return grid[miny:maxy + 1, minx:maxx + 1] - -###-------------------------------------------------------------------------### -### Returns a list of dimension names based on the tuple of integer sizes, -### as well as the names of the dimensions. -### Adds the dimension to the netCDF file, if necessary. -### Special case, if dimension of zero, use a different name. -def getDims(file, dimSizes, dimNames): - if len(dimSizes) != len(dimNames): - raise Exception, "dimSizes and dimNames not same size" - dimList = list(dimSizes) - dimNames = list(dimNames) - actDimNames = [] - existingDimList = file.dimensions.keys() - - for x in xrange(len(dimList)): - dimName = "DIM_" + str(dimSizes[x]) - actDimNames.append(dimName) - if dimName not in existingDimList: - file.createDimension(dimName, dimSizes[x]) - existingDimList.append(dimName) - - return tuple(actDimNames) - -###-------------------------------------------------------------------------### -def getMaskGrid(ifpServer, editAreaName, dbId): - #make a mask with all bits set (y,x) - domain = ifpServer.getConfig().dbDomain() - mask = numpy.ones((int(domain.getNy()), int(domain.getNx())), dtype=numpy.bool) - - if editAreaName == "": - return mask - - # get the edit area - try: - mask = iscUtil.getEditArea(editAreaName, DatabaseID(dbId).getSiteId()) - mask.setGloc(domain) - mask = mask.getGrid().getNDArray().astype(numpy.bool) - except: - logProblem("Edit area:", editAreaName, "not found. Storing entire grid.",traceback.format_exc()) - - return mask - -###-------------------------------------------------------------------------### -def storeLatLonGrids(ifpServer, file, databaseID, invMask, krunch, clipArea): - - # Get the grid location and projection information - gridLoc = ifpServer.getConfig().dbDomain() - pDict = gridLoc.getProjection() - - latLonGrid = gridLoc.getLatLonGrid() - - latLonGrid = numpy.reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F') - - # clip them - lonGrid = clipToExtrema(latLonGrid[0], clipArea) - latGrid = clipToExtrema(latLonGrid[1], clipArea) - - # recast the arrays for compatibility with netCDF - lonGrid = numpy.flipud(lonGrid) - latGrid = numpy.flipud(latGrid) - - # clipped size - clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) - - newsize = (clipSize[1], clipSize[0]) #y,x - latGrid = numpy.resize(latGrid, newsize) - lonGrid = numpy.resize(lonGrid, newsize) - - dims = getDims(file, latGrid.shape, ("y", "x")) - - - # store latitude grid - if krunch: - latVar = file.createVariable("latitude", 'h', dims) - latGrid = (latGrid * 100).astype(numpy.int16) - latVar[:] = latGrid - setattr(latVar, "dataMultiplier", 0.01) - setattr(latVar, "dataOffset", 0) - else: - latVar = file.createVariable("latitude", 'f', dims) - latVar[:] = latGrid - - # make the netCDF attributes - # Descriptive Name - setattr(latVar, "descriptiveName", "latitude") - - # coordinate information - origGridSize = Coordinate(float(gridLoc.getNx()), float(gridLoc.getNy())) - origOrigin = gridLoc.getOrigin() - origExtent = gridLoc.getExtent() - - cellSize = (origExtent.x / (origGridSize.x - 1), - origExtent.y / (origGridSize.y - 1)) - clippedExtent = (cellSize[0] * (clipSize[0] - 1), - cellSize[1] * (clipSize[1] - 1)) - domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) - - clippedOrigin = (origOrigin.x + domainOffset[0] , - origOrigin.y + domainOffset[1]) - - # gridSize, domain origin/extent - setattr(latVar, "gridSize", clipSize) - setattr(latVar, "domainOrigin", clippedOrigin) - setattr(latVar, "domainExtent", clippedExtent) - - #units - setattr(latVar, "units", "degrees") - # projection info - store whatever is in the dictionary - storeProjectionAttributes(latVar, pDict) - - # store longitude grid - if krunch: - lonVar = file.createVariable("longitude", 'h', dims) - lonGrid = (lonGrid * 100).astype(numpy.int16) - lonVar[:] = lonGrid - setattr(lonVar, "dataMultiplier", 0.01) - setattr(lonVar, "dataOffset", 0) - else: - lonVar = file.createVariable("longitude", 'f', dims) - lonVar[:] = lonGrid - - # Descriptive Name - setattr(lonVar, "descriptiveName", "longitude") - - # gridSize, domain origin/extent - setattr(lonVar, "gridSize", clipSize) - setattr(lonVar, "domainOrigin", clippedOrigin) - setattr(lonVar, "domainExtent", clippedExtent) - - #units - setattr(lonVar, "units", "degrees") - # projection info - store whatever is in the dictionary - storeProjectionAttributes(lonVar, pDict) - - logEvent("Saved Latitude/Longitude Grid") - -###-------------------------------------------------------------------------### -def storeTopoGrid(ifpServer, file, databaseID, invMask, clipArea): - "Stores the topo grid in the database" - - # Get the grid location and projection information - gridLoc = ifpServer.getConfig().dbDomain() - pDict = gridLoc.getProjection() - - # Get the topo grid - topoGrid = ifpServer.getTopoData(gridLoc).getPayload().getNDArray() - topoGrid = clipToExtrema(topoGrid, clipArea) - topoGrid = numpy.flipud(topoGrid) - - # clipped size - clipGridSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) - - newsize = (clipGridSize[1], clipGridSize[0]) #y,x - topoGrid = numpy.resize(topoGrid, newsize) - - dims = getDims(file, topoGrid.shape, ("y", "x")) - - # create the netcdf variable - var = file.createVariable("Topo", 'h', dims) - - # round to nearest foot - topoGrid = numpy.array((topoGrid + 0.5) / 1).astype(numpy.int16) - - var[:] = topoGrid - - # make the netCDF attributes - # Descriptive Name - setattr(var, "descriptiveName", "Topography") - - # coordinate information - origGridSize = Coordinate(float(str(gridLoc.getNx())), float(str(gridLoc.getNy()))) - origOrigin = gridLoc.getOrigin() - origExtent = gridLoc.getExtent() - - cellSize = (origExtent.x / (origGridSize.x - 1), - origExtent.y / (origGridSize.y - 1)) - clippedExtent = (cellSize[0] * (clipGridSize[0] - 1), - cellSize[1] * (clipGridSize[1] - 1)) - domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) - - clippedOrigin = (origOrigin.x + domainOffset[0] , - origOrigin.y + domainOffset[1]) - - # gridSize - setattr(var, "gridSize", clipGridSize) - - # Domain origin - setattr(var, "domainOrigin", clippedOrigin) - # Domain extent - setattr(var, "domainExtent", clippedExtent) - #units - setattr(var, "units", "ft") - # projection info - store whatever is in the dictionary - storeProjectionAttributes(var, pDict) - - logEvent("Saved Topo Grid") - -###-------------------------------------------------------------------------### -### -def storeGridDataHistory(file, we, histDict): - "Stores the Grid Data history string for each grid in we." - - # get the maximum size of the history string - maxHistSize = 0 - histList = [] - for (tr, his) in histDict.items(): - hisString = '' - for i,h in enumerate(his): - hisString = hisString + str(h) - if i != len(his) - 1: - hisString = hisString + " ^" - histList.append(hisString) - maxHistSize = max(maxHistSize,len(hisString)) - - # Make the history variable and fill it - histShape = (len(histList), maxHistSize + 1) - histCube = numpy.zeros(histShape, 'c') - for slot, hisString in enumerate(histList): - histCube[slot:] = hisString - - # make the history variable anyway. iscMosaic needs it. - elemName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() - dimNames = ["ngrids_" + elemName, "histLen_" + elemName] - dims = getDims(file, histShape, dimNames) - varName = elemName + "_GridHistory" - - var = file.createVariable(varName, 'c', dims) - - if len(histList) > 0: - # store the cube in the netCDF file - var[:] = histCube - return - -###-------------------------------------------------------------------------### -### -def calcKrunchValues(we): - #Based on the weather element, will return information pertaining - #to the dataType, multiplier, offset, and missing value to use for this - #element. Returns (dataType, multiplier, offset, missingValue, pythonType) - - maxV = we.getGpi().getMaxValue() - minV = we.getGpi().getMinValue() - precision = pow(10, we.getGpi().getPrecision()) - - nentries = ((maxV - minV) * precision) + 1 - - # check for byte possibilities - if nentries <= pow(2, 8) - 1: - multiplier = precision - offset = 0 - minVarValue = -126 - maxVarValue = 127 - if minV * multiplier < minVarValue: - offset = minV - minVarValue / multiplier - if maxV * multiplier > maxVarValue: - offset = maxV - maxVarValue / multiplier - missingValue = -127 - format = "b" - pythonType = numpy.int8 - - # check for short possibilities - elif nentries <= pow(2, 16) - 2: - multiplier = precision - offset = 0 - maxVarValue = pow(2, 15) - 1 - minVarValue = -(pow(2, 15) - 2) - if minV * multiplier < minVarValue: - offset = minV - minVarValue / multiplier - if maxV * multiplier > maxVarValue: - offset = maxV - maxVarValue / multiplier - missingValue = minVarValue - 1 - format = "h" - pythonType = numpy.int16 - - # else full 32-bit float processing, no krunching needed - else: - multiplier = None - offset = None - format = "f" - missingValue = -30000.0 - pythonType = numpy.float32 - return (format, multiplier, offset, missingValue, pythonType) - - -###-------------------------------------------------------------------------### -def storeProjectionAttributes(var, projectionData): - - projectionType = projectionData.getProjectionType() - # store the attributes common to all projections - setattr(var, "latLonLL", (projectionData.getLatLonLL().x, projectionData.getLatLonLL().y)) - setattr(var, "latLonUR", (projectionData.getLatLonUR().x, projectionData.getLatLonUR().y)) - setattr(var, "gridPointLL", (projectionData.getGridPointLL().x, projectionData.getGridPointLL().y)) - setattr(var, "gridPointUR", (projectionData.getGridPointUR().x, projectionData.getGridPointUR().y)) - setattr(var, "projectionType", str(projectionType)) - - # Now store the projection specific attributes - if ProjectionType.LAMBERT_CONFORMAL.equals(projectionType): - setattr(var, "latLonOrigin", (projectionData.getLatLonOrigin().x, projectionData.getLatLonOrigin().y)) - setattr(var, "stdParallelOne", projectionData.getStdParallelOne()) - setattr(var, "stdParallelTwo", projectionData.getStdParallelTwo()) - - if ProjectionType.POLAR_STEREOGRAPHIC.equals(projectionType): - setattr(var, "lonOrigin", projectionData.getLonOrigin()) - - if ProjectionType.MERCATOR.equals(projectionType): - setattr(var, "lonCenter", projectionData.getLonCenter()) - - return - -###-------------------------------------------------------------------------### -def storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride): - "Stores attributes in the netCDF file for any weather element" - - # Note that geo information is modified based on the clip info. - - # TimeRanges - import itertools - setattr(var, "validTimes", list(itertools.chain.from_iterable(timeList))) - - # Descriptive Name - setattr(var, "descriptiveName", we.getGpi().getDescriptiveName()) - - # gridSize - clipGridSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) - setattr(var, "gridSize", clipGridSize) - - # Domain origin and extent - gridLoc = we.getGpi().getGridLoc() - origGridSize = Coordinate(float(str(gridLoc.getNx())), float(str(gridLoc.getNy()))) - origOrigin = gridLoc.getOrigin() - origExtent = gridLoc.getExtent() - - - cellSize = (origExtent.x / (origGridSize.x - 1), - origExtent.y / (origGridSize.y - 1)) - - clippedExtent = (cellSize[0] * (clipGridSize[0] - 1), - cellSize[1] * (clipGridSize[1] - 1)) - - domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) - - clippedOrigin = (origOrigin.x + domainOffset[0] , - origOrigin.y + domainOffset[1]) - - setattr(var, "domainOrigin", clippedOrigin) - setattr(var, "domainExtent", clippedExtent) - - # Min/Max allowable values - setattr(var, "minMaxAllowedValues", (we.getGpi().getMinValue(), we.getGpi().getMaxValue())) - - # determine correct siteID to write to netCDF file - # we needed this siteIdOverride incase we're exporting grids from a subdomain - srcSiteId = we.getParmid().getDbId().getSiteId() - destSideId = srcSiteId - if siteIdOverride: - destSideId = siteIdOverride - fixedDbId = databaseID.replace(srcSiteId + "_", destSideId + "_", 1) - - # data type - setattr(var, "gridType", str(we.getGpi().getGridType())) - # database ID - setattr(var, "databaseID", fixedDbId) - # siteID - #setattr(var, "siteID", we.siteID) - setattr(var, "siteID", destSideId) - # units - setattr(var, "units", we.getGpi().getUnitString()) - # level - setattr(var, "level", we.getParmid().getParmLevel()) - # timeConstraints - setattr(var, "timeConstraints", (we.getGpi().getTimeConstraints().getStartTime(), we.getGpi().getTimeConstraints().getDuration(), we.getGpi().getTimeConstraints().getRepeatInterval())) - # precision - setattr(var, "precision", we.getGpi().getPrecision()) - - # rate parm - setattr(var, "rateDependent", we.getGpi().isRateParm()) - - # projection info - store whatever is in the dictionary - storeProjectionAttributes(var, gridLoc.getProjection()) - - return - - -def findOverlappingTimes(trList, timeRange): - timeList = [] - overlappingTimes = [] - for t in trList: - interTR = intersection(t, timeRange) - if interTR is not None: - overlappingTimes.append(t) - timeList.append(interTR) - - return timeList, overlappingTimes - -###-------------------------------------------------------------------------### -### Stores the specified Scalar WE in the netCDF file whose grids fall within -### the specified timeRange. -def storeScalarWE(we, trList, file, timeRange, databaseID, - invMask, trim, clipArea, krunch, siteIdOverride): - "Stores a weather element to the netCDF file" - - # get the data and store it in a Numeric array. - timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) - - (cube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) - gridCount = len(cube) - for i in xrange(len(overlappingTimes) -1, -1, -1): - ot = overlappingTimes[i] - if not ot in histDict: - del overlappingTimes[i] - del timeList[i] - elif we.getGpi().isRateParm(): - durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0])) - cube[i] *= durRatio - - ### Make sure we found some grids - # make the variable name - varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() - - if len(cube) == 0: - logVerbose("No", varName, "grids found") - - #get the dimension List - dimNames = ["ngrids_" + varName, "y", "x"] - dims = getDims(file, cube.shape, dimNames) - - # Round the values according to the precision - if trim: - if krunch: - format, multiplier, offset, fillValue, pythonType = \ - calcKrunchValues(we) - else: - format, multiplier, offset, fillValue, pythonType = \ - ('f', None, None, -30000.0, numpy.float32) - - # krunch - if multiplier is not None: - cube -= offset - cube *= multiplier - numpy.floor(cube+0.5, out=cube) - # normal trim - else: - digits = we.getGpi().getPrecision() - numpy.around(cube, digits, cube) - cube = cube.astype(pythonType) - - else: - format, multiplier, offset, fillValue, pythonType = \ - ('f', None, None, -30000.0, numpy.float32) - - # mask the data - cube[:,invMask] = fillValue - - # create the variable - var = file.createVariable(varName, format, dims) - if multiplier is not None: - setattr(var, "dataMultiplier", 1.0 / multiplier) - setattr(var, "dataOffset", offset) - - # Save the grids to the netCDF file - for i in range(len(cube)): - var[i] = numpy.flipud(cube[i]) - - # Store the attributes - storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) - setattr(var, "fillValue", fillValue) - - ## Extract the GridDataHistory info and save it - storeGridDataHistory(file, we, histDict) - - logEvent("Saved", gridCount, varName, " grids") - - return gridCount - -###-------------------------------------------------------------------------### -### Stores the specified Vector WE in the netCDF file whose grids fall within -### the specified timeRange. -def storeVectorWE(we, trList, file, timeRange, - databaseID, invMask, trim, clipArea, krunch, siteIdOverride): - "Stores a vector weather element to the netCDF file" - - # get the data and store it in a Numeric array. - timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) - - ((magCube, dirCube), histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) - gridCount = len(magCube) - for i in xrange(len(overlappingTimes) -1, -1, -1): - ot = overlappingTimes[i] - if not ot in histDict: - del overlappingTimes[i] - del timeList[i] - elif we.getGpi().isRateParm(): - durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0])) - magCube[i] *= durRatio - - varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() - - ### Make sure we found some grids - if len(magCube) == 0: - logVerbose("No", varName, "grids found") - - # make the variable name - magVarName = we.getParmid().getParmName() + "_Mag_" + we.getParmid().getParmLevel() - dirVarName = we.getParmid().getParmName() + "_Dir_" + we.getParmid().getParmLevel() - - #get the dimension List - dimNames = ["ngrids_" + varName, "y", "x"] - dims = getDims(file, magCube.shape, dimNames) - - # Round the values according to the precision - if trim: - if krunch: - mformat, mmultiplier, moffset, mfillValue, mpythonType = \ - calcKrunchValues(we) - dformat, dmultiplier, doffset, dfillValue, dpythonType = \ - ('b', 0.1, 0.0, -127, numpy.int8) - else: - mformat, mmultiplier, moffset, mfillValue, mpythonType = \ - ('f', None, None, -30000.0, numpy.dtype(numpy.float32)) - dformat, dmultiplier, doffset, dfillValue, dpythonType = \ - ('f', None, None, -30000.0, numpy.float32) - - # krunch magnitude - if mmultiplier is not None: - magCube -= moffset - magCube *= mmultiplier - numpy.around(magCube,out=magCube) - - # normal trim for magnitude - else: - digits = we.getGpi().getPrecision() - numpy.around(magCube, digits, magCube) - magCube = magCube.astype(mpythonType) - - # krunch direction - if dmultiplier is not None: - dirCube -= doffset - dirCube *= dmultiplier - numpy.around(dirCube,out=dirCube) - - # normal trim for direction - else: - numpy.around(dirCube, -1, dirCube) - dirCube[numpy.greater_equal(dirCube, 360.0)] -= 360.0 - dirCube = dirCube.astype(dpythonType) - - else: - mformat, mmultiplier, moffset, mfillValue, mpythonType = \ - ('f', None, None, -30000.0, numpy.float32) - dformat, dmultiplier, doffset, dfillValue, dpythonType = \ - ('f', None, None, -30000.0, numpy.float32) - - magCube[:,invMask] = mfillValue - dirCube[:,invMask] = dfillValue - - # create the variable - magVar = file.createVariable(magVarName, mformat, dims) - dirVar = file.createVariable(dirVarName, dformat, dims) - if mmultiplier is not None: - setattr(magVar, "dataMultiplier", 1.0 / mmultiplier) - setattr(magVar, "dataOffset", moffset) - if dmultiplier is not None: - setattr(dirVar, "dataMultiplier", 1.0 / dmultiplier) - setattr(dirVar, "dataOffset", doffset) - - # Save the grid to the netCDF file - for i in range(len(magCube)): - magVar[i] = numpy.flipud(magCube[i]) - dirVar[i] = numpy.flipud(dirCube[i]) - - # Store the attributes - overwrite some for mag and dir - storeWEAttributes(magVar, we, timeList, databaseID, clipArea, siteIdOverride) - - # Change the descriptive name - setattr(magVar, "descriptiveName", we.getGpi().getDescriptiveName() + " Magnitude") - setattr(magVar, "fillValue", mfillValue) - storeWEAttributes(dirVar, we, timeList, databaseID, clipArea, siteIdOverride) - - # Special case attributes for wind direction - setattr(dirVar, "minMaxAllowedValues", (0.0, 360.0)) - setattr(dirVar, "descriptiveName", we.getGpi().getDescriptiveName() + " Direction") - setattr(dirVar, "units", "degrees") - if trim: - dirPrecision = -1 - else: - dirPrecision = 0 - setattr(dirVar, "precision", dirPrecision) - setattr(dirVar, "fillValue", dfillValue) - - ## Extract the GridDataHistory info and save it - storeGridDataHistory(file, we, histDict) - - logEvent("Saved", gridCount, varName, "grids") - - return gridCount * 2 #vector has two grids - - -###-------------------------------------------------------------------------### -# Collapse key and bytes. (for discrete and weather) -### Returns tuple of (updated grid, updated key) -def collapseKey(grid, keys): - #make list of unique indexes in the grid - flatGrid = grid.flat - used = numpy.zeros((len(keys)), dtype=numpy.bool) - for n in range(flatGrid.__array__().shape[0]): - used[0xFF & flatGrid[n]] = True - - #make reverse map - map = [] - newKeys = [] - j = 0 - for i in range(len(keys)): - if used[i]: - map.append(j) - newKeys.append(keys[i]) - j = j + 1 - else: - map.append(-1) - - # modify the data - newGrid = grid - for k in range(len(map)): - mask = numpy.equal(numpy.int8(k), grid) - newGrid = numpy.where(mask, numpy.int8(map[k]), newGrid).astype(numpy.int8) - - return (newGrid, newKeys) - -###-------------------------------------------------------------------------### -# Stores the specified Weather WE in the netCDF file whose grids fall within -### the specified timeRange. -def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea, siteIdOverride): - "Stores the Weather weather element to the netCDF file" - - # get the data and store it in a Numeric array. - timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) - - (byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) - gridCount = len(histDict) - for i in xrange(len(overlappingTimes) -1, -1, -1): - ot = overlappingTimes[i] - if not ot in histDict: - del overlappingTimes[i] - del timeList[i] - - # make the variable name - varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() - - ### Make sure we found some grids - if len(byteCube) == 0: - logVerbose("No", varName, "grids found") - - #get the dimension List - dimNames = ["ngrids_" + varName, "y", "x"] - dims = getDims(file, byteCube.shape, dimNames) - - # create the netCDF variable - 'b' for byte type - var = file.createVariable(varName, 'b', dims) - - # Process the weather keys so we store only what is necessary - - for g in range(byteCube.shape[0]): - (byteCube[g], keyList[g]) = collapseKey(byteCube[g], keyList[g]) - - # Mask the values - fillValue = -127 - byteCube[:,invMask] =fillValue - - # Save the grids to the netCDF file - for i in range(len(byteCube)): - var[i] = numpy.flipud(byteCube[i]) - - # Find the max number of keys and max length for all keys - maxKeyCount = 1 - maxKeySize = 0 - - for k in keyList: - if len(k) > maxKeyCount: - maxKeyCount = len(k) - - for s in k: - if len(s) > maxKeySize: - maxKeySize = len(s) - - # create a new netCDF variable to hold the weather keys - wxShape = (gridCount, maxKeyCount, maxKeySize + 1) # zero byte at - # the end - dimNames = ["ngrids_" + varName, "nkeys_" + varName, "keylen_" + varName] - dims = getDims(file, wxShape, dimNames) - keyVarName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + "_wxKeys" - keyVar = file.createVariable(keyVarName, 'c', dims) - - chars = numpy.zeros(wxShape, 'c') - - # now save the weather keys in the netCDF file - for g in range(0, gridCount): - for k in range(0, len(keyList[g])): - for c in range(0, len(keyList[g][k])): - chars[g][k][c] = keyList[g][k][c] - if len(byteCube): - keyVar[:] = chars - - # Store the attributes - storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) - setattr(var, "fillValue", fillValue) - - ## Extract the GridDataHistory info and save it - storeGridDataHistory(file, we, histDict) - - logEvent("Saved", gridCount, varName, "grids") - - return gridCount - -###-------------------------------------------------------------------------### -# Stores the specified Discrete WE in the netCDF file whose grids fall within -### the specified timeRange. -def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea, siteIdOverride): - "Stores the Weather weather element to the netCDF file" - - # get the data and store it in a Numeric array. - timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) - - (byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) - gridCount = len(histDict) - for i in xrange(len(overlappingTimes) -1, -1, -1): - ot = overlappingTimes[i] - if not ot in histDict: - del overlappingTimes[i] - del timeList[i] - - # make the variable name - varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() - - ### Make sure we found some grids - if len(byteCube) == 0: - logVerbose("No", varName, "grids found") - - #get the dimension List - dimNames = ["ngrids_" + varName, "y", "x"] - dims = getDims(file, byteCube.shape, dimNames) - - # create the netCDF variable - 'b' for byte type - var = file.createVariable(varName, 'b', dims) - - # Process the discrete keys so we store only what is necessary - - for g in range(byteCube.shape[0]): - (byteCube[g], keyList[g]) = collapseKey(byteCube[g], keyList[g]) - - # Mask the values - fillValue = -127 - byteCube[:,invMask] = fillValue - - # Save the grids to the netCDF file - for i in range(len(byteCube)): - var[i] = numpy.flipud(byteCube[i]) - - # Find the max number of keys and max length for all keys - maxKeyCount = 1 - maxKeySize = 0 - for k in keyList: - if len(k) > maxKeyCount: - maxKeyCount = len(k) - for s in k: - if len(s) > maxKeySize: - maxKeySize = len(s) - - # create a new netCDF variable to hold the discrete keys - disShape = (gridCount, maxKeyCount, maxKeySize + 1) # zero byte at - # the end - dimNames = ["ngrids_" + varName, "nkeys_" + varName, "keylen_" + varName] - dims = getDims(file, disShape, dimNames) - keyVarName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + "_keys" - keyVar = file.createVariable(keyVarName, 'c', dims) - - chars = numpy.zeros(disShape, 'c') - - # now save the discrete keys in the netCDF file - for g in range(0, gridCount): - for k in range(0, len(keyList[g])): - for c in range(0, len(keyList[g][k])): - chars[g][k][c] = keyList[g][k][c] - if len(byteCube): - keyVar[:] = chars - - # Store the attributes - storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) - setattr(var, "fillValue", fillValue) - - ## Extract the GridDataHistory info and save it - storeGridDataHistory(file, we, histDict) - - logEvent("Saved", gridCount, varName, "grids") - - return gridCount - -###-------------------------------------------------------------------------### -### Store some global attribute to the file -def storeGlobalAtts(file, argDict): - currentTime = int(time.time()) - asciiTime = time.asctime(time.gmtime(int(time.time()))) - setattr(file, "creationTime", currentTime) - setattr(file, "creationTimeString", asciiTime) - if argDict['krunch']: - setattr(file, "fileFormatVersion", "20030117") - else: - setattr(file, "fileFormatVersion", "20010816") - setattr(file, "startProcTime", argDict['startTime']) - setattr(file, "endProcTime", argDict['endTime']) - return - -###-------------------------------------------------------------------------### -### Compresses the file using the gzip library -def compressFile(filename, factor): - - if factor < 1: - factor = 1 - elif factor > 9: - factor = 9 - fp = open(filename, "rb") - fpout = gzip.open(filename + ".gz", "wb", factor) - buffer = fp.read(1024 * 16) - while buffer != "": - fpout.write(buffer) - buffer = fp.read(1024 * 16) - - fp.close() - fpout.close() - # remove the orginal file - os.remove(filename) - -###------------ -# getSamplingDefinition - accesses server to retrieve definition, -# returns None or the sampling definition as Python. -def getSamplingDefinition(configName, siteId): - if configName is None: - return None - pathManager = PathManagerFactory.getPathManager() - fileName = "isc/utilities/" + configName + ".py" - siteContext = pathManager.getContextForSite(LocalizationType.COMMON_STATIC, siteId) - file = pathManager.getFile(siteContext, fileName) - - # if site file not found, try base level - if file is None or not file.exists(): - baseContext = pathManager.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.BASE) - file = pathManager.getFile(baseContext, fileName) - - if file is None or not file.exists(): - s = "Sampling Definition " + configName + " not found, using all grids." - logProblem(s) - return None - from com.raytheon.uf.common.util import FileUtil - data = FileUtil.file2String(file) - try: - exec data - return SampleDef - except: - s = "Bad Sampling Definition found [" + configName + \ - "], using all grids." - logProblem(s,traceback.format_exc()) - return None - - -###------------ -# determineSamplingInventory based on sampling definition -# returns inventory of time ranges to include in the netCDF file. -def determineSamplingValues(samplingDef, parmName, inventory, currentTime): - # we're going to get inventory as a PyJObject (List, actually), - # but to best match AWIPS-1 will return a list of their tuple-based - # time range objects, regardless if we have a valid sample definition or not - - if samplingDef is None or inventory.size() == 0: - newInv = [] - for i in range(0, inventory.size()): - newInv.append(iscUtil.transformTime(inventory.get(i))) - return newInv #all grids - - basetimeDef, offsetDef = samplingDef.get(parmName, samplingDef['default']) - - lastInvT = iscUtil.transformTime(inventory.get(inventory.size()-1))[1] #ending time for last grid - firstInvT = iscUtil.transformTime(inventory.get(0))[0] #starting time for first grid - - # determine basetime - bts = [] - bt = int(currentTime / 86400) * 86400 #0z today - while bt >= firstInvT: - bt = bt - 86400 #back up a day until we are earlier than 1st grid - while bt < lastInvT: - for bval in basetimeDef: - bts.append(bt + bval) - bt = bt + 86400 - basetime = None - for bt in bts: - if currentTime >= bt: - basetime = bt - else: - break - - # now determine the set of possible times - checkTimes = [] - # lastInvT = inventory[ -1][1] #ending time for last grid - - tval = basetime #begin at the basetime - tupleNumber = 0 - while tupleNumber < len(offsetDef): - beginT, intervalT = offsetDef[tupleNumber] - tval = basetime + beginT - while tval < lastInvT: - if tupleNumber < len(offsetDef) - 1: - if tval < basetime + offsetDef[tupleNumber + 1][0]: - checkTimes.append(tval) - tval = tval + intervalT #still in this tuple - else: - break #go onto the next tuple - else: - checkTimes.append(tval) - tval = tval + intervalT #can't compare - in last tuple - tupleNumber = tupleNumber + 1 - - #match them up with the inventory to select the intersecting times - inven = [] - startIndexCheck = 0 - for i in range(0, inventory.size()): - inv = iscUtil.transformTime(inventory.get(i)) - for x in xrange(startIndexCheck, len(checkTimes)): - if contains(inv, checkTimes[x]): - startIndexCheck = x + 1 - if inv not in inven: - inven.append(inv) - break - return inven - - -###-------------------------------------------------------------------------### -### Main program -def main(outputFilename, parmList, databaseID, startTime, - endTime, mask, geoInfo, compressFileFlag, configFileName, - compressFileFactor, trim, krunch, userID, logFileName, siteIdOverride): - initLogger(logFileName) - - -# LogStream.ttyLogOn() - logEvent("ifpnetCDF Starting") -# LogStream.logEvent(AFPS.DBSubsystem_getBuildDate(), -# AFPS.DBSubsystem_getBuiltBy(), AFPS.DBSubsystem_getBuiltOn(), -# AFPS.DBSubsystem_getBuildVersion()) - - if hasattr(parmList, 'java_name'): - parmList = JUtil.javaObjToPyVal(parmList) - - argDict = {"outputFilename": outputFilename, - "parmList": parmList, - "databaseID": databaseID, - "startTime": startTime, - "endTime": endTime, - "mask": mask, - "geoInfo": bool(geoInfo), - "compressFile": bool(compressFileFlag), - "configFileName": configFileName, - "compressFileFactor": int(compressFileFactor), - "trim": bool(trim), - "krunch": bool(krunch), - "userID": userID, - "siteIdOverride" : siteIdOverride, } - logEvent("Command: ", argDict) - - a = os.times() - cpu0 = a[0] + a[1] - start = a[4] - siteId = DatabaseID(databaseID).getSiteId() - ifpServer = IFPServer.getActiveServer(siteId) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - - try: - timeRange = makeTimeRange(argDict['startTime'], argDict['endTime']) - except: - logException("Unable to create TimeRange from arguments: startTime= " + str(argDict['startTime']) + ", endTime= " + argDict['endTime']) - return - - # See if the databaseID is valid. An exception will be tossed - db = IFPDB(argDict['databaseID']) - - # Fill in the parmList with all parms if the input list is empty - argDict['parmList'] = processParmList(argDict, db) - - # Determine the mask - maskGrid = getMaskGrid(ifpServer, argDict['mask'], argDict['databaseID']) - origGridSize = maskGrid.shape - clipArea = extremaOfSetBits(maskGrid) - - maskGrid = clipToExtrema(maskGrid, clipArea) - clippedGridSize = maskGrid.shape - validPointCount = float(numpy.add.reduce(numpy.add.reduce(maskGrid))) - - #invert the mask grid - invMask = numpy.logical_not(maskGrid) - #del maskGrid - - # Determine sampling definition - siteId = DatabaseID(argDict['databaseID']).getSiteId() - samplingDef = getSamplingDefinition(argDict['configFileName'], siteId) - logVerbose("Sampling Definition:", samplingDef) - - # Open the netCDF file - file = NetCDF.NetCDFFile(argDict['outputFilename'], 'w') - - totalGrids = 0 - for p in argDict['parmList']: - - we = db.getItem(p) - - #determine inventory that we want to keep - weInv = determineSamplingValues(samplingDef, p, we.getKeys(), time.time()) - - gridType = str(we.getGpi().getGridType()) - if gridType == "SCALAR": - nGrids = storeScalarWE(we, weInv, file, timeRange, - argDict['databaseID'], invMask, argDict['trim'], clipArea, - argDict['krunch'], argDict['siteIdOverride']) - elif gridType == "VECTOR": - nGrids = storeVectorWE(we, weInv, file, timeRange, - argDict['databaseID'], invMask, argDict['trim'], clipArea, - argDict['krunch'], argDict['siteIdOverride']) - elif gridType == "WEATHER": - nGrids = storeWeatherWE(we, weInv, file, timeRange, - argDict['databaseID'], invMask, clipArea, argDict['siteIdOverride']) - elif gridType == "DISCRETE": - nGrids = storeDiscreteWE(we, weInv, file, timeRange, - argDict['databaseID'], invMask, clipArea, argDict['siteIdOverride']) - else: - s = "Grids of type: " + we.gridType + " are not supported, " + \ - "parm=" + p - logProblem(s) - raise Exception, s - - totalGrids = totalGrids + nGrids - - # store the topo and lat, lon grids if the -g was present - if argDict["geoInfo"]: - storeTopoGrid(ifpServer, file, argDict['databaseID'], invMask, clipArea) - storeLatLonGrids(ifpServer, file, argDict['databaseID'], invMask, - argDict['krunch'], clipArea) - totalGrids = totalGrids + 3 - - storeGlobalAtts(file, argDict) - - file.close() - - fu = os.stat(argDict['outputFilename'])[stat.ST_SIZE] - mb = fu / (1024.0 * 1024.0) - logEvent("Uncompressed Size: ", "%-.3f" % (mb), " MBytes") - a = os.times() - cpu = a[0] + a[1] - stop1 = a[4] - - # Grid statistics - logEvent("Original Grid Size:", origGridSize) - logEvent("Clipped Grid Size: ", clippedGridSize) - logEvent("Valid Points in Grid: ", validPointCount) - logEvent("Total Number of Grids: ", totalGrids) - - perClipped = 100.0 * clippedGridSize[0] * clippedGridSize[1] / \ - (origGridSize[0] * origGridSize[1]) - perValid = 100.0 * validPointCount / (origGridSize[0] * origGridSize[1]) - logEvent("Percent ClippedPts/Original: ", "%-.1f" % (perClipped), - "%") - logEvent("Percent ValidPts/Original: ", "%-.1f" % (perValid), - "%") - - kpts = totalGrids * validPointCount / 1000.0 - logEvent("Total Points: ", "%-.3f" % (kpts), "Kpoints") - - if totalGrids > 0 and validPointCount > 0: - bitsPerPointUncompressed = (fu * 8.0) / (totalGrids * validPointCount) - logEvent("Bits Per Point Uncompressed: ", - "%-.2f" % (bitsPerPointUncompressed)) - - # Finally compress the data with gzip and remove the other file - if argDict['compressFile']: - logEvent("Compressing Output") - compressFile(argDict['outputFilename'], argDict['compressFileFactor']) - f = os.stat(argDict['outputFilename'] + ".gz")[stat.ST_SIZE] - per = 100.0 - 100.0 * f / fu - mb = f / (1024.0 * 1024.0) - logEvent("Compressed Size: ", "%-.3f" % (mb), " MBytes") - logEvent("CompressionPercent=", "%-.1f" % (per), "%") - - if totalGrids > 0 and validPointCount > 0: - bitsPerPointCompressed = (f * 8.0) / (totalGrids * validPointCount) - logEvent("Bits Per Point Compressed: ", - "%-.2f" % (bitsPerPointCompressed)) - - a = os.times() - cpugz = a[0] + a[1] - stop = a[4] - logEvent("Elapsed/CPU time: ", - "%-.2f" % (stop1 - start), "/", "%-.2f" % (cpu - cpu0), "processing,", - "%-.2f" % (stop - stop1), "/", "%-.2f" % (cpugz - cpu), "compress,", - "%-.2f" % (stop - start), "/", "%-.2f" % (cpugz - cpu0), "total") - logEvent("ifpnetCDF Finished") - - - -#if __name__ == "__main__": -# main() - # profile stuff -# import profile, pstats -# profile.run('main()', 'pyprof.out') -# p = pstats.Stats('pyprof.out') -# p.strip_dirs() -# p.sort_stats('time', 'calls').print_stats(15) -# p.print_callers(15) +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Jul 06, 2009 1995 bphillip Initial Creation. +# Mar 11, 2013 1759 dgilling Removed unneeded methods. +# Apr 23, 2013 1937 dgilling Reimplement WECache to match +# A1, big perf improvement. +# May 23, 2013 1759 dgilling Remove unnecessary imports. +# Jun 13, 2013 2044 randerso Updated for changes to TopoDatabaseManager +# Jul 25, 2013 2233 randerso Improved memory utilization and performance +# Aug 09, 2013 1571 randerso Changed projections to use the Java +# ProjectionType enumeration +# Sep 20, 2013 2405 dgilling Clip grids before inserting into cache. +# Oct 22, 2013 2405 rjpeter Remove WECache and store directly to cube. +# Oct 31, 2013 2508 randerso Change to use DiscreteGridSlice.getKeys() +# Aug 14, 2014 3526 randerso Fixed to get sampling definition from +# appropriate site +# Jan 13, 2015 3955 randerso Changed to use ifpServer.getTopoData +# Feb 17, 2015 4139 randerso Removed timeFromComponents and dependent +# functions in favor of calendar.timegm +# Apr 23, 2015 4259 njensen Updated for new JEP API +# May 13, 2015 4427 dgilling Add siteIdOverride field. +# Aug 06, 2015 4718 dgilling Optimize casting when using where with +# NumPy 1.9. +# Apr 07, 2016 5539 randerso Reversed order of parameters/return value in +# collapseKey +# to match order of Wx/Discrete tuple +# May 27, 2016 19014 ryu Fix rounding issue causing Td to be greater +# than T in output netCDF file. +# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager +# which was largely redundant with IFPServer. +# Oct 31, 2016 5979 njensen Cast to primitives for compatibility +# Nov 21, 2016 5959 njensen Removed unused imports and made more pythonic +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# Jul 31, 3017 6342 randerso Removed unused imports. Fixed long standing +# bug in extremaOfSetBits when mask does not overlap grid +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +import string, time, gzip, os, LogStream, stat, traceback +import calendar +from collections import OrderedDict +import numpy +#import pupynere as NetCDF +try: + # dev environment + from Scientific.IO import NetCDF +except: + # runtime we don't have the whole scientific package + import NetCDF +import JUtil +import iscUtil +import logging + +from java.util import ArrayList +from java.io import File +from com.vividsolutions.jts.geom import Coordinate +from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData +ProjectionType = ProjectionData.ProjectionType +from com.raytheon.edex.plugin.gfe.smartinit import IFPDB +from com.raytheon.edex.plugin.gfe.server import IFPServer +from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID +from com.raytheon.uf.common.localization import PathManagerFactory +from com.raytheon.uf.common.localization import LocalizationContext +LocalizationType = LocalizationContext.LocalizationType +LocalizationLevel = LocalizationContext.LocalizationLevel + + +# Original A1 BATCH WRITE COUNT was 10, we found doubling that +# lead to a significant performance increase. +BATCH_WRITE_COUNT = 20 +BATCH_DELAY = 0.0 +ifpNetcdfLogger=None + +## Logging methods ## +def initLogger(logFile=None): + global ifpNetcdfLogger + ifpNetcdfLogger = iscUtil.getLogger("ifpnetCDF",logFile, logLevel=logging.INFO) + +def logEvent(*msg): + ifpNetcdfLogger.info(iscUtil.tupleToString(*msg)) + +def logProblem(*msg): + ifpNetcdfLogger.error(iscUtil.tupleToString(*msg)) + +def logException(*msg): + ifpNetcdfLogger.exception(iscUtil.tupleToString(*msg)) + +def logVerbose(*msg): + ifpNetcdfLogger.debug(iscUtil.tupleToString(*msg)) + +def logDebug(*msg): + logVerbose(iscUtil.tupleToString(*msg)) + + +def retrieveData(we, inv, clipArea): + lst = list(inv) + trs=[] + histDict = OrderedDict() + cube = None + keyList = None + gridType = str(we.getGpi().getGridType()) + + # clipped size + clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) + gridCount = len(inv) + + if gridType == "SCALAR": + cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32) + elif gridType == "VECTOR": + magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) + dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) + cube = (magCube, dirCube) + elif gridType == "WEATHER" or gridType == "DISCRETE": + cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8) + keyList = [] + + cubeIdx = 0 + while len(lst): + i = lst[:BATCH_WRITE_COUNT] + javaTRs = ArrayList() + for tr in i: + javaTRs.add(iscUtil.toJavaTimeRange(tr)) + gridsAndHist = we.get(javaTRs, True) + size = gridsAndHist.size() + for idx in range(size): + pair = gridsAndHist.get(idx) + grid = pair.getFirst() + tr = iscUtil.transformTime(grid.getValidTime()) + encodeGridSlice(grid, gridType, clipArea, cube, cubeIdx, keyList) + cubeIdx += 1 + histDict[tr] = encodeGridHistory(pair.getSecond()) + lst = lst[BATCH_WRITE_COUNT:] + time.sleep(BATCH_DELAY) + + if len(histDict) != gridCount: + # retrieved less grids than originally expected, purge ran? + gridCount = len(histDict) + + if gridType == "SCALAR": + oldCube = cube + cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.float32) + for idx in range(gridCount): + cube[idx] = oldCube[idx] + elif gridType == "VECTOR": + oldMagCube = magCube + magCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) + oldDirCube = dirCube + dirCube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]),dtype=numpy.float32) + cube = (magCube, dirCube) + for idx in range(gridCount): + magCube[idx] = oldMagCube[idx] + dirCube[idx] = oldDirCube[idx] + elif gridType == "WEATHER" or gridType == "DISCRETE": + oldCube = cube + cube = numpy.empty(shape=(gridCount, clipSize[1], clipSize[0]), dtype=numpy.int8) + for idx in range(gridCount): + cube[idx] = oldCube[idx] + return (cube, histDict, keyList) + +###-------------------------------------------------------------------------### +### cube and keyList are out parameters to be filled by this method, idx is the index into cube to use +def encodeGridSlice(grid, gridType, clipArea, cube, idx, keyList): + if gridType == "SCALAR": + cube[idx] = clipToExtrema(grid.getNDArray(), clipArea) + elif gridType == "VECTOR": + vecGrids = grid.getNDArray() + cube[0][idx] = clipToExtrema(vecGrids[0], clipArea) + cube[1][idx] = clipToExtrema(vecGrids[1], clipArea) + elif gridType == "WEATHER" or gridType == "DISCRETE": + keys = grid.getKeys() + gridKeys = [] + + for theKey in keys: + gridKeys.append(str(theKey)) + keyList.append(gridKeys) + cube[idx]= clipToExtrema(grid.getNDArray(), clipArea) + +def encodeGridHistory(histories): + retVal = [] + for i in range(histories.size()): + retVal.append(histories.get(i).getCodedString()) + return tuple(retVal) + + +###-------------------------------------------------------------------------### +### Processes the parm list. If the list is empty, get all of the parms +### from the database. If any are missing _SFC add it. +def processParmList(argDict, db): + parmList = argDict['parmList'] + dbList = db.getKeys() + if len(parmList) == 0: + parmList = JUtil.javaStringListToPylist(dbList) + + # now add _SFC to all parmNames with no underscore + for i in range(0, len(parmList)): + if '_' not in parmList[i]: + parmList[i] = parmList[i] + "_SFC" + + # now verify that the parm is in the database + final = [] + for p in parmList: + if dbList.contains(p): + final.append(p) + return final + +###-------------------------------------------------------------------------### +### Returns true if the specified time is contained within the timeRange +def contains(timerange, time): + if timerange[1] - timerange[0]: + return ((time >= timerange[0]) and (time < timerange[1])) + return time == timerange[0] + +###-------------------------------------------------------------------------### +### Returns intersection time range of two time ranges, if no intersection +### then None is returned. +def intersection(tr1, tr2): + if tr1[0] < tr2[0]: + startTime = tr2[0] + else: + startTime = tr1[0] + if tr1[1] > tr2[1]: + endTime = tr2[1] + else: + endTime = tr1[1] + if startTime >= endTime: + return None # no intersection + else: + return (startTime, endTime) + +###-------------------------------------------------------------------------### +def overlaps(tr1, tr2): + "Returns true if the specified time ranges overlap" + if contains(tr2, tr1[0]) or contains(tr1, tr2[0]): + return 1 + return 0 + +###-------------------------------------------------------------------------### +### Makes an integer from the specified string in seconds since 1-Jan-1970 00Z +def getIntTime(timeStr): + "Create an Integer time from a string: YYYYMMDD_HHMM" + + try: + timeTuple = time.strptime(timeStr, "%Y%m%d_%H%M") + except: + logProblem(timeStr, \ + "is not a valid time string. Use YYYYMMDD_HHMM",traceback.format_exc()) + s = timeStr + " is not a valid time string. Use YYYYMMDD_HHMM" + raise SyntaxError(s) + return + return calendar.timegm(timeTuple) + +###-------------------------------------------------------------------------### +### Makes a TimeRange from the input string of the form YYYYMMDD_HHMM. +def makeTimeRange(startString, endString): + "Makes a timeRange from the specified time strings." + try: + t1 = getIntTime(startString) + t2 = getIntTime(endString) + except: + raise Exception("Can't decode YYYYMMDD_HHMM string") + + return (t1, t2) + + +###-------------------------------------------------------------------------### +def timeRangeAsString(tr): + "Prints timeRange in YYYYMMDD_HHMM format" + return time.strftime("%Y%m%d_%H%M", time.gmtime(tr[0])) + " --- " \ + + time.strftime("%Y%m%d_%H%M", time.gmtime(tr[1])) + +###-------------------------------------------------------------------------### +def extremaOfSetBits(mask): + "Returns tuple of extrema of set bits (minx,maxx, miny,maxy)" + nz = numpy.nonzero(mask) + + minx = miny = 0 + maxx = mask.shape[1] - 1 + maxy = mask.shape[0] - 1 + + if nz[1].any(): + minx = nz[1].min() + maxx = nz[1].max() + + if nz[0].any(): + miny = nz[0].min() + maxy = nz[0].max() + + return (minx, maxx, miny, maxy) + +###------------------------------------------------------------------------### +def clipToExtrema(grid, clipArea): + "Clips grid to info in clipArea: (minx, maxx, miny, maxy)" + if clipArea[0] == -1: + return grid # no clipping at all + minx = clipArea[0] + maxx = clipArea[1] + miny = clipArea[2] + maxy = clipArea[3] + return grid[miny:maxy + 1, minx:maxx + 1] + +###-------------------------------------------------------------------------### +### Returns a list of dimension names based on the tuple of integer sizes, +### as well as the names of the dimensions. +### Adds the dimension to the netCDF file, if necessary. +### Special case, if dimension of zero, use a different name. +def getDims(file, dimSizes, dimNames): + if len(dimSizes) != len(dimNames): + raise Exception("dimSizes and dimNames not same size") + dimList = list(dimSizes) + dimNames = list(dimNames) + actDimNames = [] + existingDimList = list(file.dimensions.keys()) + + for x in range(len(dimList)): + dimName = "DIM_" + str(dimSizes[x]) + actDimNames.append(dimName) + if dimName not in existingDimList: + file.createDimension(dimName, dimSizes[x]) + existingDimList.append(dimName) + + return tuple(actDimNames) + +###-------------------------------------------------------------------------### +def getMaskGrid(ifpServer, editAreaName, dbId): + #make a mask with all bits set (y,x) + domain = ifpServer.getConfig().dbDomain() + mask = numpy.ones((int(domain.getNy()), int(domain.getNx())), dtype=numpy.bool) + + if editAreaName == "": + return mask + + # get the edit area + try: + mask = iscUtil.getEditArea(editAreaName, DatabaseID(dbId).getSiteId()) + mask.setGloc(domain) + mask = mask.getGrid().getNDArray().astype(numpy.bool) + except: + logProblem("Edit area:", editAreaName, "not found. Storing entire grid.",traceback.format_exc()) + + return mask + +###-------------------------------------------------------------------------### +def storeLatLonGrids(ifpServer, file, databaseID, invMask, krunch, clipArea): + + # Get the grid location and projection information + gridLoc = ifpServer.getConfig().dbDomain() + pDict = gridLoc.getProjection() + + latLonGrid = gridLoc.getLatLonGrid() + + latLonGrid = numpy.reshape(latLonGrid, (2, int(gridLoc.getNy()), int(gridLoc.getNx())), order='F') + + # clip them + lonGrid = clipToExtrema(latLonGrid[0], clipArea) + latGrid = clipToExtrema(latLonGrid[1], clipArea) + + # recast the arrays for compatibility with netCDF + lonGrid = numpy.flipud(lonGrid) + latGrid = numpy.flipud(latGrid) + + # clipped size + clipSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) + + newsize = (clipSize[1], clipSize[0]) #y,x + latGrid = numpy.resize(latGrid, newsize) + lonGrid = numpy.resize(lonGrid, newsize) + + dims = getDims(file, latGrid.shape, ("y", "x")) + + + # store latitude grid + if krunch: + latVar = file.createVariable("latitude", 'h', dims) + latGrid = (latGrid * 100).astype(numpy.int16) + latVar[:] = latGrid + setattr(latVar, "dataMultiplier", 0.01) + setattr(latVar, "dataOffset", 0) + else: + latVar = file.createVariable("latitude", 'f', dims) + latVar[:] = latGrid + + # make the netCDF attributes + # Descriptive Name + setattr(latVar, "descriptiveName", "latitude") + + # coordinate information + origGridSize = Coordinate(float(gridLoc.getNx()), float(gridLoc.getNy())) + origOrigin = gridLoc.getOrigin() + origExtent = gridLoc.getExtent() + + cellSize = (origExtent.x / (origGridSize.x - 1), + origExtent.y / (origGridSize.y - 1)) + clippedExtent = (cellSize[0] * (clipSize[0] - 1), + cellSize[1] * (clipSize[1] - 1)) + domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) + + clippedOrigin = (origOrigin.x + domainOffset[0] , + origOrigin.y + domainOffset[1]) + + # gridSize, domain origin/extent + setattr(latVar, "gridSize", clipSize) + setattr(latVar, "domainOrigin", clippedOrigin) + setattr(latVar, "domainExtent", clippedExtent) + + #units + setattr(latVar, "units", "degrees") + # projection info - store whatever is in the dictionary + storeProjectionAttributes(latVar, pDict) + + # store longitude grid + if krunch: + lonVar = file.createVariable("longitude", 'h', dims) + lonGrid = (lonGrid * 100).astype(numpy.int16) + lonVar[:] = lonGrid + setattr(lonVar, "dataMultiplier", 0.01) + setattr(lonVar, "dataOffset", 0) + else: + lonVar = file.createVariable("longitude", 'f', dims) + lonVar[:] = lonGrid + + # Descriptive Name + setattr(lonVar, "descriptiveName", "longitude") + + # gridSize, domain origin/extent + setattr(lonVar, "gridSize", clipSize) + setattr(lonVar, "domainOrigin", clippedOrigin) + setattr(lonVar, "domainExtent", clippedExtent) + + #units + setattr(lonVar, "units", "degrees") + # projection info - store whatever is in the dictionary + storeProjectionAttributes(lonVar, pDict) + + logEvent("Saved Latitude/Longitude Grid") + +###-------------------------------------------------------------------------### +def storeTopoGrid(ifpServer, file, databaseID, invMask, clipArea): + "Stores the topo grid in the database" + + # Get the grid location and projection information + gridLoc = ifpServer.getConfig().dbDomain() + pDict = gridLoc.getProjection() + + # Get the topo grid + topoGrid = ifpServer.getTopoData(gridLoc).getPayload().getNDArray() + topoGrid = clipToExtrema(topoGrid, clipArea) + topoGrid = numpy.flipud(topoGrid) + + # clipped size + clipGridSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) + + newsize = (clipGridSize[1], clipGridSize[0]) #y,x + topoGrid = numpy.resize(topoGrid, newsize) + + dims = getDims(file, topoGrid.shape, ("y", "x")) + + # create the netcdf variable + var = file.createVariable("Topo", 'h', dims) + + # round to nearest foot + topoGrid = numpy.array((topoGrid + 0.5) / 1).astype(numpy.int16) + + var[:] = topoGrid + + # make the netCDF attributes + # Descriptive Name + setattr(var, "descriptiveName", "Topography") + + # coordinate information + origGridSize = Coordinate(float(str(gridLoc.getNx())), float(str(gridLoc.getNy()))) + origOrigin = gridLoc.getOrigin() + origExtent = gridLoc.getExtent() + + cellSize = (origExtent.x / (origGridSize.x - 1), + origExtent.y / (origGridSize.y - 1)) + clippedExtent = (cellSize[0] * (clipGridSize[0] - 1), + cellSize[1] * (clipGridSize[1] - 1)) + domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) + + clippedOrigin = (origOrigin.x + domainOffset[0] , + origOrigin.y + domainOffset[1]) + + # gridSize + setattr(var, "gridSize", clipGridSize) + + # Domain origin + setattr(var, "domainOrigin", clippedOrigin) + # Domain extent + setattr(var, "domainExtent", clippedExtent) + #units + setattr(var, "units", "ft") + # projection info - store whatever is in the dictionary + storeProjectionAttributes(var, pDict) + + logEvent("Saved Topo Grid") + +###-------------------------------------------------------------------------### +### +def storeGridDataHistory(file, we, histDict): + "Stores the Grid Data history string for each grid in we." + + # get the maximum size of the history string + maxHistSize = 0 + histList = [] + for (tr, his) in list(histDict.items()): + hisString = '' + for i,h in enumerate(his): + hisString = hisString + str(h) + if i != len(his) - 1: + hisString = hisString + " ^" + histList.append(hisString) + maxHistSize = max(maxHistSize,len(hisString)) + + # Make the history variable and fill it + histShape = (len(histList), maxHistSize + 1) + histCube = numpy.zeros(histShape, 'c') + for slot, hisString in enumerate(histList): + histCube[slot:] = hisString + + # make the history variable anyway. iscMosaic needs it. + elemName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + dimNames = ["ngrids_" + elemName, "histLen_" + elemName] + dims = getDims(file, histShape, dimNames) + varName = elemName + "_GridHistory" + + var = file.createVariable(varName, 'c', dims) + + if len(histList) > 0: + # store the cube in the netCDF file + var[:] = histCube + return + +###-------------------------------------------------------------------------### +### +def calcKrunchValues(we): + #Based on the weather element, will return information pertaining + #to the dataType, multiplier, offset, and missing value to use for this + #element. Returns (dataType, multiplier, offset, missingValue, pythonType) + + maxV = we.getGpi().getMaxValue() + minV = we.getGpi().getMinValue() + precision = pow(10, we.getGpi().getPrecision()) + + nentries = ((maxV - minV) * precision) + 1 + + # check for byte possibilities + if nentries <= pow(2, 8) - 1: + multiplier = precision + offset = 0 + minVarValue = -126 + maxVarValue = 127 + if minV * multiplier < minVarValue: + offset = minV - minVarValue / multiplier + if maxV * multiplier > maxVarValue: + offset = maxV - maxVarValue / multiplier + missingValue = -127 + format = "b" + pythonType = numpy.int8 + + # check for short possibilities + elif nentries <= pow(2, 16) - 2: + multiplier = precision + offset = 0 + maxVarValue = pow(2, 15) - 1 + minVarValue = -(pow(2, 15) - 2) + if minV * multiplier < minVarValue: + offset = minV - minVarValue / multiplier + if maxV * multiplier > maxVarValue: + offset = maxV - maxVarValue / multiplier + missingValue = minVarValue - 1 + format = "h" + pythonType = numpy.int16 + + # else full 32-bit float processing, no krunching needed + else: + multiplier = None + offset = None + format = "f" + missingValue = -30000.0 + pythonType = numpy.float32 + return (format, multiplier, offset, missingValue, pythonType) + + +###-------------------------------------------------------------------------### +def storeProjectionAttributes(var, projectionData): + + projectionType = projectionData.getProjectionType() + # store the attributes common to all projections + setattr(var, "latLonLL", (projectionData.getLatLonLL().x, projectionData.getLatLonLL().y)) + setattr(var, "latLonUR", (projectionData.getLatLonUR().x, projectionData.getLatLonUR().y)) + setattr(var, "gridPointLL", (projectionData.getGridPointLL().x, projectionData.getGridPointLL().y)) + setattr(var, "gridPointUR", (projectionData.getGridPointUR().x, projectionData.getGridPointUR().y)) + setattr(var, "projectionType", str(projectionType)) + + # Now store the projection specific attributes + if ProjectionType.LAMBERT_CONFORMAL.equals(projectionType): + setattr(var, "latLonOrigin", (projectionData.getLatLonOrigin().x, projectionData.getLatLonOrigin().y)) + setattr(var, "stdParallelOne", projectionData.getStdParallelOne()) + setattr(var, "stdParallelTwo", projectionData.getStdParallelTwo()) + + if ProjectionType.POLAR_STEREOGRAPHIC.equals(projectionType): + setattr(var, "lonOrigin", projectionData.getLonOrigin()) + + if ProjectionType.MERCATOR.equals(projectionType): + setattr(var, "lonCenter", projectionData.getLonCenter()) + + return + +###-------------------------------------------------------------------------### +def storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride): + "Stores attributes in the netCDF file for any weather element" + + # Note that geo information is modified based on the clip info. + + # TimeRanges + import itertools + setattr(var, "validTimes", list(itertools.chain.from_iterable(timeList))) + + # Descriptive Name + setattr(var, "descriptiveName", we.getGpi().getDescriptiveName()) + + # gridSize + clipGridSize = (clipArea[1] - clipArea[0] + 1, clipArea[3] - clipArea[2] + 1) + setattr(var, "gridSize", clipGridSize) + + # Domain origin and extent + gridLoc = we.getGpi().getGridLoc() + origGridSize = Coordinate(float(str(gridLoc.getNx())), float(str(gridLoc.getNy()))) + origOrigin = gridLoc.getOrigin() + origExtent = gridLoc.getExtent() + + + cellSize = (origExtent.x / (origGridSize.x - 1), + origExtent.y / (origGridSize.y - 1)) + + clippedExtent = (cellSize[0] * (clipGridSize[0] - 1), + cellSize[1] * (clipGridSize[1] - 1)) + + domainOffset = (clipArea[0] * cellSize[0], (origGridSize.y - clipArea[3]-1) * cellSize[1]) + + clippedOrigin = (origOrigin.x + domainOffset[0] , + origOrigin.y + domainOffset[1]) + + setattr(var, "domainOrigin", clippedOrigin) + setattr(var, "domainExtent", clippedExtent) + + # Min/Max allowable values + setattr(var, "minMaxAllowedValues", (we.getGpi().getMinValue(), we.getGpi().getMaxValue())) + + # determine correct siteID to write to netCDF file + # we needed this siteIdOverride incase we're exporting grids from a subdomain + srcSiteId = we.getParmid().getDbId().getSiteId() + destSideId = srcSiteId + if siteIdOverride: + destSideId = siteIdOverride + fixedDbId = databaseID.replace(srcSiteId + "_", destSideId + "_", 1) + + # data type + setattr(var, "gridType", str(we.getGpi().getGridType())) + # database ID + setattr(var, "databaseID", fixedDbId) + # siteID + #setattr(var, "siteID", we.siteID) + setattr(var, "siteID", destSideId) + # units + setattr(var, "units", we.getGpi().getUnitString()) + # level + setattr(var, "level", we.getParmid().getParmLevel()) + # timeConstraints + setattr(var, "timeConstraints", (we.getGpi().getTimeConstraints().getStartTime(), we.getGpi().getTimeConstraints().getDuration(), we.getGpi().getTimeConstraints().getRepeatInterval())) + # precision + setattr(var, "precision", we.getGpi().getPrecision()) + + # rate parm + setattr(var, "rateDependent", we.getGpi().isRateParm()) + + # projection info - store whatever is in the dictionary + storeProjectionAttributes(var, gridLoc.getProjection()) + + return + + +def findOverlappingTimes(trList, timeRange): + timeList = [] + overlappingTimes = [] + for t in trList: + interTR = intersection(t, timeRange) + if interTR is not None: + overlappingTimes.append(t) + timeList.append(interTR) + + return timeList, overlappingTimes + +###-------------------------------------------------------------------------### +### Stores the specified Scalar WE in the netCDF file whose grids fall within +### the specified timeRange. +def storeScalarWE(we, trList, file, timeRange, databaseID, + invMask, trim, clipArea, krunch, siteIdOverride): + "Stores a weather element to the netCDF file" + + # get the data and store it in a Numeric array. + timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) + + (cube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) + gridCount = len(cube) + for i in range(len(overlappingTimes) -1, -1, -1): + ot = overlappingTimes[i] + if not ot in histDict: + del overlappingTimes[i] + del timeList[i] + elif we.getGpi().isRateParm(): + durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0])) + cube[i] *= durRatio + + ### Make sure we found some grids + # make the variable name + varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + + if len(cube) == 0: + logVerbose("No", varName, "grids found") + + #get the dimension List + dimNames = ["ngrids_" + varName, "y", "x"] + dims = getDims(file, cube.shape, dimNames) + + # Round the values according to the precision + if trim: + if krunch: + format, multiplier, offset, fillValue, pythonType = \ + calcKrunchValues(we) + else: + format, multiplier, offset, fillValue, pythonType = \ + ('f', None, None, -30000.0, numpy.float32) + + # krunch + if multiplier is not None: + cube -= offset + cube *= multiplier + numpy.floor(cube+0.5, out=cube) + # normal trim + else: + digits = we.getGpi().getPrecision() + numpy.around(cube, digits, cube) + cube = cube.astype(pythonType) + + else: + format, multiplier, offset, fillValue, pythonType = \ + ('f', None, None, -30000.0, numpy.float32) + + # mask the data + cube[:,invMask] = fillValue + + # create the variable + var = file.createVariable(varName, format, dims) + if multiplier is not None: + setattr(var, "dataMultiplier", 1.0 / multiplier) + setattr(var, "dataOffset", offset) + + # Save the grids to the netCDF file + for i in range(len(cube)): + var[i] = numpy.flipud(cube[i]) + + # Store the attributes + storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) + setattr(var, "fillValue", fillValue) + + ## Extract the GridDataHistory info and save it + storeGridDataHistory(file, we, histDict) + + logEvent("Saved", gridCount, varName, " grids") + + return gridCount + +###-------------------------------------------------------------------------### +### Stores the specified Vector WE in the netCDF file whose grids fall within +### the specified timeRange. +def storeVectorWE(we, trList, file, timeRange, + databaseID, invMask, trim, clipArea, krunch, siteIdOverride): + "Stores a vector weather element to the netCDF file" + + # get the data and store it in a Numeric array. + timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) + + ((magCube, dirCube), histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) + gridCount = len(magCube) + for i in range(len(overlappingTimes) -1, -1, -1): + ot = overlappingTimes[i] + if not ot in histDict: + del overlappingTimes[i] + del timeList[i] + elif we.getGpi().isRateParm(): + durRatio = (float(timeList[i][1]-timeList[i][0]))/float((ot[1]-ot[0])) + magCube[i] *= durRatio + + varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + + ### Make sure we found some grids + if len(magCube) == 0: + logVerbose("No", varName, "grids found") + + # make the variable name + magVarName = we.getParmid().getParmName() + "_Mag_" + we.getParmid().getParmLevel() + dirVarName = we.getParmid().getParmName() + "_Dir_" + we.getParmid().getParmLevel() + + #get the dimension List + dimNames = ["ngrids_" + varName, "y", "x"] + dims = getDims(file, magCube.shape, dimNames) + + # Round the values according to the precision + if trim: + if krunch: + mformat, mmultiplier, moffset, mfillValue, mpythonType = \ + calcKrunchValues(we) + dformat, dmultiplier, doffset, dfillValue, dpythonType = \ + ('b', 0.1, 0.0, -127, numpy.int8) + else: + mformat, mmultiplier, moffset, mfillValue, mpythonType = \ + ('f', None, None, -30000.0, numpy.dtype(numpy.float32)) + dformat, dmultiplier, doffset, dfillValue, dpythonType = \ + ('f', None, None, -30000.0, numpy.float32) + + # krunch magnitude + if mmultiplier is not None: + magCube -= moffset + magCube *= mmultiplier + numpy.around(magCube,out=magCube) + + # normal trim for magnitude + else: + digits = we.getGpi().getPrecision() + numpy.around(magCube, digits, magCube) + magCube = magCube.astype(mpythonType) + + # krunch direction + if dmultiplier is not None: + dirCube -= doffset + dirCube *= dmultiplier + numpy.around(dirCube,out=dirCube) + + # normal trim for direction + else: + numpy.around(dirCube, -1, dirCube) + dirCube[numpy.greater_equal(dirCube, 360.0)] -= 360.0 + dirCube = dirCube.astype(dpythonType) + + else: + mformat, mmultiplier, moffset, mfillValue, mpythonType = \ + ('f', None, None, -30000.0, numpy.float32) + dformat, dmultiplier, doffset, dfillValue, dpythonType = \ + ('f', None, None, -30000.0, numpy.float32) + + magCube[:,invMask] = mfillValue + dirCube[:,invMask] = dfillValue + + # create the variable + magVar = file.createVariable(magVarName, mformat, dims) + dirVar = file.createVariable(dirVarName, dformat, dims) + if mmultiplier is not None: + setattr(magVar, "dataMultiplier", 1.0 / mmultiplier) + setattr(magVar, "dataOffset", moffset) + if dmultiplier is not None: + setattr(dirVar, "dataMultiplier", 1.0 / dmultiplier) + setattr(dirVar, "dataOffset", doffset) + + # Save the grid to the netCDF file + for i in range(len(magCube)): + magVar[i] = numpy.flipud(magCube[i]) + dirVar[i] = numpy.flipud(dirCube[i]) + + # Store the attributes - overwrite some for mag and dir + storeWEAttributes(magVar, we, timeList, databaseID, clipArea, siteIdOverride) + + # Change the descriptive name + setattr(magVar, "descriptiveName", we.getGpi().getDescriptiveName() + " Magnitude") + setattr(magVar, "fillValue", mfillValue) + storeWEAttributes(dirVar, we, timeList, databaseID, clipArea, siteIdOverride) + + # Special case attributes for wind direction + setattr(dirVar, "minMaxAllowedValues", (0.0, 360.0)) + setattr(dirVar, "descriptiveName", we.getGpi().getDescriptiveName() + " Direction") + setattr(dirVar, "units", "degrees") + if trim: + dirPrecision = -1 + else: + dirPrecision = 0 + setattr(dirVar, "precision", dirPrecision) + setattr(dirVar, "fillValue", dfillValue) + + ## Extract the GridDataHistory info and save it + storeGridDataHistory(file, we, histDict) + + logEvent("Saved", gridCount, varName, "grids") + + return gridCount * 2 #vector has two grids + + +###-------------------------------------------------------------------------### +# Collapse key and bytes. (for discrete and weather) +### Returns tuple of (updated grid, updated key) +def collapseKey(grid, keys): + #make list of unique indexes in the grid + flatGrid = grid.flat + used = numpy.zeros((len(keys)), dtype=numpy.bool) + for n in range(flatGrid.__array__().shape[0]): + used[0xFF & flatGrid[n]] = True + + #make reverse map + map = [] + newKeys = [] + j = 0 + for i in range(len(keys)): + if used[i]: + map.append(j) + newKeys.append(keys[i]) + j = j + 1 + else: + map.append(-1) + + # modify the data + newGrid = grid + for k in range(len(map)): + mask = numpy.equal(numpy.int8(k), grid) + newGrid = numpy.where(mask, numpy.int8(map[k]), newGrid).astype(numpy.int8) + + return (newGrid, newKeys) + +###-------------------------------------------------------------------------### +# Stores the specified Weather WE in the netCDF file whose grids fall within +### the specified timeRange. +def storeWeatherWE(we, trList, file, timeRange, databaseID, invMask, clipArea, siteIdOverride): + "Stores the Weather weather element to the netCDF file" + + # get the data and store it in a Numeric array. + timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) + + (byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) + gridCount = len(histDict) + for i in range(len(overlappingTimes) -1, -1, -1): + ot = overlappingTimes[i] + if not ot in histDict: + del overlappingTimes[i] + del timeList[i] + + # make the variable name + varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + + ### Make sure we found some grids + if len(byteCube) == 0: + logVerbose("No", varName, "grids found") + + #get the dimension List + dimNames = ["ngrids_" + varName, "y", "x"] + dims = getDims(file, byteCube.shape, dimNames) + + # create the netCDF variable - 'b' for byte type + var = file.createVariable(varName, 'b', dims) + + # Process the weather keys so we store only what is necessary + + for g in range(byteCube.shape[0]): + (byteCube[g], keyList[g]) = collapseKey(byteCube[g], keyList[g]) + + # Mask the values + fillValue = -127 + byteCube[:,invMask] =fillValue + + # Save the grids to the netCDF file + for i in range(len(byteCube)): + var[i] = numpy.flipud(byteCube[i]) + + # Find the max number of keys and max length for all keys + maxKeyCount = 1 + maxKeySize = 0 + + for k in keyList: + if len(k) > maxKeyCount: + maxKeyCount = len(k) + + for s in k: + if len(s) > maxKeySize: + maxKeySize = len(s) + + # create a new netCDF variable to hold the weather keys + wxShape = (gridCount, maxKeyCount, maxKeySize + 1) # zero byte at + # the end + dimNames = ["ngrids_" + varName, "nkeys_" + varName, "keylen_" + varName] + dims = getDims(file, wxShape, dimNames) + keyVarName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + "_wxKeys" + keyVar = file.createVariable(keyVarName, 'c', dims) + + chars = numpy.zeros(wxShape, 'c') + + # now save the weather keys in the netCDF file + for g in range(0, gridCount): + for k in range(0, len(keyList[g])): + for c in range(0, len(keyList[g][k])): + chars[g][k][c] = keyList[g][k][c] + if len(byteCube): + keyVar[:] = chars + + # Store the attributes + storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) + setattr(var, "fillValue", fillValue) + + ## Extract the GridDataHistory info and save it + storeGridDataHistory(file, we, histDict) + + logEvent("Saved", gridCount, varName, "grids") + + return gridCount + +###-------------------------------------------------------------------------### +# Stores the specified Discrete WE in the netCDF file whose grids fall within +### the specified timeRange. +def storeDiscreteWE(we, trList, file, timeRange, databaseID, invMask, clipArea, siteIdOverride): + "Stores the Weather weather element to the netCDF file" + + # get the data and store it in a Numeric array. + timeList, overlappingTimes = findOverlappingTimes(trList, timeRange) + + (byteCube, histDict, keyList) = retrieveData(we, overlappingTimes, clipArea) + gridCount = len(histDict) + for i in range(len(overlappingTimes) -1, -1, -1): + ot = overlappingTimes[i] + if not ot in histDict: + del overlappingTimes[i] + del timeList[i] + + # make the variable name + varName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + + ### Make sure we found some grids + if len(byteCube) == 0: + logVerbose("No", varName, "grids found") + + #get the dimension List + dimNames = ["ngrids_" + varName, "y", "x"] + dims = getDims(file, byteCube.shape, dimNames) + + # create the netCDF variable - 'b' for byte type + var = file.createVariable(varName, 'b', dims) + + # Process the discrete keys so we store only what is necessary + + for g in range(byteCube.shape[0]): + (byteCube[g], keyList[g]) = collapseKey(byteCube[g], keyList[g]) + + # Mask the values + fillValue = -127 + byteCube[:,invMask] = fillValue + + # Save the grids to the netCDF file + for i in range(len(byteCube)): + var[i] = numpy.flipud(byteCube[i]) + + # Find the max number of keys and max length for all keys + maxKeyCount = 1 + maxKeySize = 0 + for k in keyList: + if len(k) > maxKeyCount: + maxKeyCount = len(k) + for s in k: + if len(s) > maxKeySize: + maxKeySize = len(s) + + # create a new netCDF variable to hold the discrete keys + disShape = (gridCount, maxKeyCount, maxKeySize + 1) # zero byte at + # the end + dimNames = ["ngrids_" + varName, "nkeys_" + varName, "keylen_" + varName] + dims = getDims(file, disShape, dimNames) + keyVarName = we.getParmid().getParmName() + "_" + we.getParmid().getParmLevel() + "_keys" + keyVar = file.createVariable(keyVarName, 'c', dims) + + chars = numpy.zeros(disShape, 'c') + + # now save the discrete keys in the netCDF file + for g in range(0, gridCount): + for k in range(0, len(keyList[g])): + for c in range(0, len(keyList[g][k])): + chars[g][k][c] = keyList[g][k][c] + if len(byteCube): + keyVar[:] = chars + + # Store the attributes + storeWEAttributes(var, we, timeList, databaseID, clipArea, siteIdOverride) + setattr(var, "fillValue", fillValue) + + ## Extract the GridDataHistory info and save it + storeGridDataHistory(file, we, histDict) + + logEvent("Saved", gridCount, varName, "grids") + + return gridCount + +###-------------------------------------------------------------------------### +### Store some global attribute to the file +def storeGlobalAtts(file, argDict): + currentTime = int(time.time()) + asciiTime = time.asctime(time.gmtime(int(time.time()))) + setattr(file, "creationTime", currentTime) + setattr(file, "creationTimeString", asciiTime) + if argDict['krunch']: + setattr(file, "fileFormatVersion", "20030117") + else: + setattr(file, "fileFormatVersion", "20010816") + setattr(file, "startProcTime", argDict['startTime']) + setattr(file, "endProcTime", argDict['endTime']) + return + +###-------------------------------------------------------------------------### +### Compresses the file using the gzip library +def compressFile(filename, factor): + + if factor < 1: + factor = 1 + elif factor > 9: + factor = 9 + fp = open(filename, "rb") + fpout = gzip.open(filename + ".gz", "wb", factor) + buffer = fp.read(1024 * 16) + while buffer != "": + fpout.write(buffer) + buffer = fp.read(1024 * 16) + + fp.close() + fpout.close() + # remove the orginal file + os.remove(filename) + +###------------ +# getSamplingDefinition - accesses server to retrieve definition, +# returns None or the sampling definition as Python. +def getSamplingDefinition(configName, siteId): + if configName is None: + return None + pathManager = PathManagerFactory.getPathManager() + fileName = "isc/utilities/" + configName + ".py" + siteContext = pathManager.getContextForSite(LocalizationType.COMMON_STATIC, siteId) + file = pathManager.getFile(siteContext, fileName) + + # if site file not found, try base level + if file is None or not file.exists(): + baseContext = pathManager.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.BASE) + file = pathManager.getFile(baseContext, fileName) + + if file is None or not file.exists(): + s = "Sampling Definition " + configName + " not found, using all grids." + logProblem(s) + return None + from com.raytheon.uf.common.util import FileUtil + data = FileUtil.file2String(file) + try: + exec(data) + return SampleDef + except: + s = "Bad Sampling Definition found [" + configName + \ + "], using all grids." + logProblem(s,traceback.format_exc()) + return None + + +###------------ +# determineSamplingInventory based on sampling definition +# returns inventory of time ranges to include in the netCDF file. +def determineSamplingValues(samplingDef, parmName, inventory, currentTime): + # we're going to get inventory as a PyJObject (List, actually), + # but to best match AWIPS-1 will return a list of their tuple-based + # time range objects, regardless if we have a valid sample definition or not + + if samplingDef is None or inventory.size() == 0: + newInv = [] + for i in range(0, inventory.size()): + newInv.append(iscUtil.transformTime(inventory.get(i))) + return newInv #all grids + + basetimeDef, offsetDef = samplingDef.get(parmName, samplingDef['default']) + + lastInvT = iscUtil.transformTime(inventory.get(inventory.size()-1))[1] #ending time for last grid + firstInvT = iscUtil.transformTime(inventory.get(0))[0] #starting time for first grid + + # determine basetime + bts = [] + bt = int(currentTime / 86400) * 86400 #0z today + while bt >= firstInvT: + bt = bt - 86400 #back up a day until we are earlier than 1st grid + while bt < lastInvT: + for bval in basetimeDef: + bts.append(bt + bval) + bt = bt + 86400 + basetime = None + for bt in bts: + if currentTime >= bt: + basetime = bt + else: + break + + # now determine the set of possible times + checkTimes = [] + # lastInvT = inventory[ -1][1] #ending time for last grid + + tval = basetime #begin at the basetime + tupleNumber = 0 + while tupleNumber < len(offsetDef): + beginT, intervalT = offsetDef[tupleNumber] + tval = basetime + beginT + while tval < lastInvT: + if tupleNumber < len(offsetDef) - 1: + if tval < basetime + offsetDef[tupleNumber + 1][0]: + checkTimes.append(tval) + tval = tval + intervalT #still in this tuple + else: + break #go onto the next tuple + else: + checkTimes.append(tval) + tval = tval + intervalT #can't compare - in last tuple + tupleNumber = tupleNumber + 1 + + #match them up with the inventory to select the intersecting times + inven = [] + startIndexCheck = 0 + for i in range(0, inventory.size()): + inv = iscUtil.transformTime(inventory.get(i)) + for x in range(startIndexCheck, len(checkTimes)): + if contains(inv, checkTimes[x]): + startIndexCheck = x + 1 + if inv not in inven: + inven.append(inv) + break + return inven + + +###-------------------------------------------------------------------------### +### Main program +def main(outputFilename, parmList, databaseID, startTime, + endTime, mask, geoInfo, compressFileFlag, configFileName, + compressFileFactor, trim, krunch, userID, logFileName, siteIdOverride): + initLogger(logFileName) + + +# LogStream.ttyLogOn() + logEvent("ifpnetCDF Starting") +# LogStream.logEvent(AFPS.DBSubsystem_getBuildDate(), +# AFPS.DBSubsystem_getBuiltBy(), AFPS.DBSubsystem_getBuiltOn(), +# AFPS.DBSubsystem_getBuildVersion()) + + if hasattr(parmList, 'java_name'): + parmList = JUtil.javaObjToPyVal(parmList) + + argDict = {"outputFilename": outputFilename, + "parmList": parmList, + "databaseID": databaseID, + "startTime": startTime, + "endTime": endTime, + "mask": mask, + "geoInfo": bool(geoInfo), + "compressFile": bool(compressFileFlag), + "configFileName": configFileName, + "compressFileFactor": int(compressFileFactor), + "trim": bool(trim), + "krunch": bool(krunch), + "userID": userID, + "siteIdOverride" : siteIdOverride, } + logEvent("Command: ", argDict) + + a = os.times() + cpu0 = a[0] + a[1] + start = a[4] + siteId = DatabaseID(databaseID).getSiteId() + ifpServer = IFPServer.getActiveServer(siteId) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + + try: + timeRange = makeTimeRange(argDict['startTime'], argDict['endTime']) + except: + logException("Unable to create TimeRange from arguments: startTime= " + str(argDict['startTime']) + ", endTime= " + argDict['endTime']) + return + + # See if the databaseID is valid. An exception will be tossed + db = IFPDB(argDict['databaseID']) + + # Fill in the parmList with all parms if the input list is empty + argDict['parmList'] = processParmList(argDict, db) + + # Determine the mask + maskGrid = getMaskGrid(ifpServer, argDict['mask'], argDict['databaseID']) + origGridSize = maskGrid.shape + clipArea = extremaOfSetBits(maskGrid) + + maskGrid = clipToExtrema(maskGrid, clipArea) + clippedGridSize = maskGrid.shape + validPointCount = float(numpy.add.reduce(numpy.add.reduce(maskGrid))) + + #invert the mask grid + invMask = numpy.logical_not(maskGrid) + #del maskGrid + + # Determine sampling definition + siteId = DatabaseID(argDict['databaseID']).getSiteId() + samplingDef = getSamplingDefinition(argDict['configFileName'], siteId) + logVerbose("Sampling Definition:", samplingDef) + + # Open the netCDF file + file = NetCDF.NetCDFFile(argDict['outputFilename'], 'w') + + totalGrids = 0 + for p in argDict['parmList']: + + we = db.getItem(p) + + #determine inventory that we want to keep + weInv = determineSamplingValues(samplingDef, p, we.getKeys(), time.time()) + + gridType = str(we.getGpi().getGridType()) + if gridType == "SCALAR": + nGrids = storeScalarWE(we, weInv, file, timeRange, + argDict['databaseID'], invMask, argDict['trim'], clipArea, + argDict['krunch'], argDict['siteIdOverride']) + elif gridType == "VECTOR": + nGrids = storeVectorWE(we, weInv, file, timeRange, + argDict['databaseID'], invMask, argDict['trim'], clipArea, + argDict['krunch'], argDict['siteIdOverride']) + elif gridType == "WEATHER": + nGrids = storeWeatherWE(we, weInv, file, timeRange, + argDict['databaseID'], invMask, clipArea, argDict['siteIdOverride']) + elif gridType == "DISCRETE": + nGrids = storeDiscreteWE(we, weInv, file, timeRange, + argDict['databaseID'], invMask, clipArea, argDict['siteIdOverride']) + else: + s = "Grids of type: " + we.gridType + " are not supported, " + \ + "parm=" + p + logProblem(s) + raise Exception(s) + + totalGrids = totalGrids + nGrids + + # store the topo and lat, lon grids if the -g was present + if argDict["geoInfo"]: + storeTopoGrid(ifpServer, file, argDict['databaseID'], invMask, clipArea) + storeLatLonGrids(ifpServer, file, argDict['databaseID'], invMask, + argDict['krunch'], clipArea) + totalGrids = totalGrids + 3 + + storeGlobalAtts(file, argDict) + + file.close() + + fu = os.stat(argDict['outputFilename'])[stat.ST_SIZE] + mb = fu / (1024.0 * 1024.0) + logEvent("Uncompressed Size: ", "%-.3f" % (mb), " MBytes") + a = os.times() + cpu = a[0] + a[1] + stop1 = a[4] + + # Grid statistics + logEvent("Original Grid Size:", origGridSize) + logEvent("Clipped Grid Size: ", clippedGridSize) + logEvent("Valid Points in Grid: ", validPointCount) + logEvent("Total Number of Grids: ", totalGrids) + + perClipped = 100.0 * clippedGridSize[0] * clippedGridSize[1] / \ + (origGridSize[0] * origGridSize[1]) + perValid = 100.0 * validPointCount / (origGridSize[0] * origGridSize[1]) + logEvent("Percent ClippedPts/Original: ", "%-.1f" % (perClipped), + "%") + logEvent("Percent ValidPts/Original: ", "%-.1f" % (perValid), + "%") + + kpts = totalGrids * validPointCount / 1000.0 + logEvent("Total Points: ", "%-.3f" % (kpts), "Kpoints") + + if totalGrids > 0 and validPointCount > 0: + bitsPerPointUncompressed = (fu * 8.0) / (totalGrids * validPointCount) + logEvent("Bits Per Point Uncompressed: ", + "%-.2f" % (bitsPerPointUncompressed)) + + # Finally compress the data with gzip and remove the other file + if argDict['compressFile']: + logEvent("Compressing Output") + compressFile(argDict['outputFilename'], argDict['compressFileFactor']) + f = os.stat(argDict['outputFilename'] + ".gz")[stat.ST_SIZE] + per = 100.0 - 100.0 * f / fu + mb = f / (1024.0 * 1024.0) + logEvent("Compressed Size: ", "%-.3f" % (mb), " MBytes") + logEvent("CompressionPercent=", "%-.1f" % (per), "%") + + if totalGrids > 0 and validPointCount > 0: + bitsPerPointCompressed = (f * 8.0) / (totalGrids * validPointCount) + logEvent("Bits Per Point Compressed: ", + "%-.2f" % (bitsPerPointCompressed)) + + a = os.times() + cpugz = a[0] + a[1] + stop = a[4] + logEvent("Elapsed/CPU time: ", + "%-.2f" % (stop1 - start), "/", "%-.2f" % (cpu - cpu0), "processing,", + "%-.2f" % (stop - stop1), "/", "%-.2f" % (cpugz - cpu), "compress,", + "%-.2f" % (stop - start), "/", "%-.2f" % (cpugz - cpu0), "total") + logEvent("ifpnetCDF Finished") + + + +#if __name__ == "__main__": +# main() + # profile stuff +# import profile, pstats +# profile.run('main()', 'pyprof.out') +# p = pstats.Stats('pyprof.out') +# p.strip_dirs() +# p.sort_stats('time', 'calls').print_stats(15) +# p.print_callers(15) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscDataRec.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscDataRec.py index 10feb7be1f..90402b19e9 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscDataRec.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscDataRec.py @@ -1,314 +1,314 @@ -#! /bin/sh -# -*-python-*- - -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# Port of iscDataRec.py -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/06/09 1995 bphillip Initial Creation. -# 01/29/13 1447 dgilling Implement VTEC table sharing. -# 03/12/13 1759 dgilling Bypass command-line processing -# for iscMosaic, support changes -# to IscReceiveSrv. -# 01/24/14 2504 randerso removed obsolete A1 comments -# 12/08/2014 4953 randerso Added support for sending/receiving TCV files -# Additional code cleanup -# 04/08/2015 4383 dgilling Support FireWx ISC. -# 04/23/2015 4383 randerso Fixed exception logging -# 02/22/2016 5374 randerso Added support for sendWFOMessage -# 11/21/2016 5959 njensen Removed unused imports and made more pythonic -# 10/19/2017 6279 randerso Only process requests for our site id. -# 10/25/2017 6495 randerso Only process requests for our site id. -# 04/16/2018 7267 mapeters Correctly log filename in purge error messages -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -from xml.etree import ElementTree -from xml.etree.ElementTree import Element, SubElement -import os, stat, time - -import IrtAccess, IrtServer -import iscMosaic, iscUtil - - -iscDataRecLogger = None - -## Logging methods ## -def initLogger(logFile=None): - global iscDataRecLogger - import logging, siteConfig - iscDataRecLogger = iscUtil.getLogger("iscDataRec", logFile) - -def logEvent(*msg): - iscDataRecLogger.info(iscUtil.tupleToString(*msg)) - -def logProblem(*msg): - iscDataRecLogger.error(iscUtil.tupleToString(*msg)) - -def logException(*msg): - iscDataRecLogger.exception(iscUtil.tupleToString(*msg)) - -def logVerbose(*msg): - iscDataRecLogger.debug(iscUtil.tupleToString(*msg)) - -def logDebug(*msg): - logVerbose(iscUtil.tupleToString(*msg)) - -# File Purging Routing -def purgeFiles(files): - for f in files: - try: - os.remove(f) - except: - logException("iscDataRec Failed to remove file %s: " % str(f)) - - -def execIscDataRec(MSGID, SUBJECT, FILES): - import siteConfig - - try: - # logEvent('*** iscDataRec ***', sys.argv[1:]) - logEvent('SUBJECT:', SUBJECT, 'MSGID:', MSGID, "FILES:", FILES) - - time1 = time.clock() - - #get our MHS id - ourMhsID = siteConfig.GFESUITE_MHSID - ourSiteID = siteConfig.GFESUITE_SITEID - - # for most transactions, first attachment is the data file, 2nd file is the - # XML destinations. ISCREQUEST and GET_TCV_FILES have only 1 file being the - # XML destinations. We simulate two files naming them the same. - if SUBJECT in ["ISCREQUEST", "GET_TCV_FILES"]: - FILES.append(FILES[0]) - - dataFile = FILES[0] #first attachment is always the data file - if len(FILES) > 1: - xmlFile = FILES[1] #second attachment is the xml destinations file - fd = open(xmlFile, 'rb') - xmlFileBuf = fd.read() - fd.close() - try: - destTree = ElementTree.ElementTree(ElementTree.XML(xmlFileBuf)) - iscE = destTree.getroot() - except: - logException("Malformed XML received") - return - - #no XML destination information. Default to dx4f,px3 98000000, 98000001 - else: - # create a xml element tree to replace the missing one. This will - # occur when OB8.2 sites send ISC data to OB8.3 sites, and also when - # active table exchanges occur. We default to 98000000 and 98000001 - # on dx4 since that is where the primary and svcbu servers are located. - # This will cause log errors until everyone is on OB8.3. - iscE = Element('isc') - destinationsE = SubElement(iscE, 'destinations') - for x in xrange(98000000, 98000002): - for shost in ['dx4f', 'px3f']: - addressE = SubElement(destinationsE, 'address') - serverE = SubElement(addressE, 'server') - serverE.text = shost - portE = SubElement(addressE, 'port') - portE.text = str(x) - protocolE = SubElement(addressE, 'protocol') - protocolE.text = "20070723" #match this from IFPProtocol.C - mhsE = SubElement(addressE, 'mhsid') - mhsE.text = siteConfig.GFESUITE_MHSID - - irt = IrtAccess.IrtAccess("") - - # find source xml - found = False - for srcE in iscE.getchildren(): - if srcE.tag == "source": - for addressE in srcE: - srcServer = irt.decodeXMLAddress(addressE) - if srcServer is None: - continue - found = True - logEvent("Source:", irt.printServerInfo(srcServer)) - break - if not found: - logEvent("Source: ") - - # find destinations xml - found = False - for destE in iscE.getchildren(): - if destE.tag == "destinations": - found = True - break - if not found: - logProblem("destinations packet missing from xml") - return - - # Handle GET_TCV_FILES out side the loop as it needs to do it's own loop - if SUBJECT == 'GET_TCV_FILES': - IrtServer.getTCVFiles(ourMhsID, srcServer, destE) - else: - # decode and print the source server (if present) - for addressE in destE: - if addressE.tag != "address": - continue - - destServer = irt.decodeXMLAddress(addressE) - - # find destination server information - if destServer['mhsid'].upper() != ourMhsID.upper(): - logDebug(SUBJECT, 'Not our mhs ID of ' + ourMhsID + \ - ', so skipped:', irt.printServerInfo(destServer)) - continue #this destination is for someone else. - - if destServer['site'].upper() != ourSiteID.upper(): - logDebug(SUBJECT, 'Not our site ID of ' + ourSiteID + \ - ', so skipped:', irt.printServerInfo(destServer)) - continue #this destination is for someone else. - - # transmit the data to the ifpServer - time2 = time.clock() - - if SUBJECT == 'PUT_ACTIVE_TABLE': - IrtServer.putVTECActiveTable(dataFile, None) - elif SUBJECT == 'PUT_ACTIVE_TABLE2': - IrtServer.putVTECActiveTable(dataFile, xmlFileBuf) - elif SUBJECT == 'GET_ACTIVE_TABLE': - IrtServer.getVTECActiveTable(dataFile, None) - elif SUBJECT == 'GET_ACTIVE_TABLE2': - IrtServer.getVTECActiveTable(dataFile, xmlFileBuf) - elif SUBJECT in ['ISCGRIDS', 'ISCGRIDS2']: - import serverConfig - - additionalISCRouting = [] - if serverConfig.AdditionalISCRouting: - additionalISCRouting = serverConfig.AdditionalISCRouting - putISCGrids(dataFile, siteConfig.GFESUITE_SITEID, srcServer.get('site'), additionalISCRouting) - elif SUBJECT == 'ISCREQUEST': - IrtServer.serviceISCRequest(dataFile) - elif SUBJECT == 'PUT_TCV_FILES': - IrtServer.putTCVFiles(srcServer.get('site'), dataFile) - elif SUBJECT == 'SEND_WFO_MESSAGE': - IrtServer.sendWfoMessage(srcServer.get('site'), dataFile) - else: - logProblem("unknown subject: ", SUBJECT) - continue - time3 = time.clock() - delta1 = time2 - time1 - delta2 = time3 - time2 - logEvent('Sent to:', - irt.printServerInfo(destServer), "connectT=", delta1, "xmtT=", delta2) - except: - logException("iscDataRec failed!") - - finally: - # cleanup - purgeFiles(FILES) - -def putISCGrids(dataFile, destSite, srcSite, additionalISCRouting): - # iscMosaic now executes multiple times--once for the primary ISC database, - # and once more for each additional ISC database defined in the localConfig - args = {"siteID": destSite, - "userID": 'SITE', - "databaseID": destSite + "_GRID__ISC_00000000_0000", - "parmsToProcess": [], - "blankOtherPeriods": True, - "startTime": None, - "endTime": None, - "altMask": None, - "replaceOnly": False, - "eraseFirst": False, - "announce": "ISC: ", - "renameWE": True, - "iscSends": False, - "inFiles": [dataFile], - "ignoreMask": False, - "adjustTranslate": True, - "deleteInput": False, - "parmsToIgnore": [], - "gridDelay": 0.0, - "logFileName": None} - mosaic = iscMosaic.IscMosaic(args) - mosaic.execute() - - for entry in additionalISCRouting: - (parms, dbName, editAreaPrefix) = entry - parmNameList = [parm[0] + "_SFC" for parm in parms] - args['parmsToProcess'] = parmNameList - args['databaseID'] = destSite + "_GRID__" + dbName + "_00000000_0000" - args['altMask'] = editAreaPrefix + srcSite - mosaic = iscMosaic.IscMosaic(args) - mosaic.execute() - - -#-------------------------------------------------------------------- -# Main Routine -#-------------------------------------------------------------------- - -# The following keywords prefaced by the '%' character cause the -# value from the current message to be inserted into the -# specification string. -# -# MSGID: Unique ID of message -# MSGTYPE: Sender assigned message type -# MSGCODE: Sender assigned message code -# SUBJECT: Sender assigned message subject -# SENDER: Sending site name -# PRIORITY: Sender assigned message priority -# BODY: Path to file containing body of message. -# ENCLIST: List of paths to files for each enclosure. -# ENCLOSE(N): Path to file containing Nth enclosure. -# NUMENC: Total number of enclosures. -# X400DOC: Path to X.400 document file. -# -# /data/adapt/GFESuite/iscDataRec SYS /awips/adapt/GFESUITE/bin/iscDataRec -# %MSGID %SUBJECT %ENCLIST -def main(argv): - initLogger() - try: - logEvent('*** iscDataRec ***', argv) - try: - MSGID = argv[0] - SUBJECT = argv[1] - FILES = argv[2].split(',') - - logEvent('SUBJECT:', SUBJECT, 'MSGID:', MSGID) - #log the incoming files and size - for f in FILES: - filesize = os.stat(f)[stat.ST_SIZE] - logEvent('Received:', f, filesize, "bytes") - execIscDataRec(MSGID, SUBJECT, FILES) - - except: - logException('Failure:') - - except: - logException("FAIL: ") - +#! /bin/sh +# -*-python-*- + +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# Port of iscDataRec.py +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/06/09 1995 bphillip Initial Creation. +# 01/29/13 1447 dgilling Implement VTEC table sharing. +# 03/12/13 1759 dgilling Bypass command-line processing +# for iscMosaic, support changes +# to IscReceiveSrv. +# 01/24/14 2504 randerso removed obsolete A1 comments +# 12/08/2014 4953 randerso Added support for sending/receiving TCV files +# Additional code cleanup +# 04/08/2015 4383 dgilling Support FireWx ISC. +# 04/23/2015 4383 randerso Fixed exception logging +# 02/22/2016 5374 randerso Added support for sendWFOMessage +# 11/21/2016 5959 njensen Removed unused imports and made more pythonic +# 10/19/2017 6279 randerso Only process requests for our site id. +# 10/25/2017 6495 randerso Only process requests for our site id. +# 04/16/2018 7267 mapeters Correctly log filename in purge error messages +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +from xml.etree import ElementTree +from xml.etree.ElementTree import Element, SubElement +import os, stat, time + +import IrtAccess, IrtServer +import iscMosaic, iscUtil + + +iscDataRecLogger = None + +## Logging methods ## +def initLogger(logFile=None): + global iscDataRecLogger + import logging, siteConfig + iscDataRecLogger = iscUtil.getLogger("iscDataRec", logFile) + +def logEvent(*msg): + iscDataRecLogger.info(iscUtil.tupleToString(*msg)) + +def logProblem(*msg): + iscDataRecLogger.error(iscUtil.tupleToString(*msg)) + +def logException(*msg): + iscDataRecLogger.exception(iscUtil.tupleToString(*msg)) + +def logVerbose(*msg): + iscDataRecLogger.debug(iscUtil.tupleToString(*msg)) + +def logDebug(*msg): + logVerbose(iscUtil.tupleToString(*msg)) + +# File Purging Routing +def purgeFiles(files): + for f in files: + try: + os.remove(f) + except: + logException("iscDataRec Failed to remove file %s: " % str(f)) + + +def execIscDataRec(MSGID, SUBJECT, FILES): + import siteConfig + + try: + # logEvent('*** iscDataRec ***', sys.argv[1:]) + logEvent('SUBJECT:', SUBJECT, 'MSGID:', MSGID, "FILES:", FILES) + + time1 = time.clock() + + #get our MHS id + ourMhsID = siteConfig.GFESUITE_MHSID + ourSiteID = siteConfig.GFESUITE_SITEID + + # for most transactions, first attachment is the data file, 2nd file is the + # XML destinations. ISCREQUEST and GET_TCV_FILES have only 1 file being the + # XML destinations. We simulate two files naming them the same. + if SUBJECT in ["ISCREQUEST", "GET_TCV_FILES"]: + FILES.append(FILES[0]) + + dataFile = FILES[0] #first attachment is always the data file + if len(FILES) > 1: + xmlFile = FILES[1] #second attachment is the xml destinations file + fd = open(xmlFile, 'rb') + xmlFileBuf = fd.read() + fd.close() + try: + destTree = ElementTree.ElementTree(ElementTree.XML(xmlFileBuf)) + iscE = destTree.getroot() + except: + logException("Malformed XML received") + return + + #no XML destination information. Default to dx4f,px3 98000000, 98000001 + else: + # create a xml element tree to replace the missing one. This will + # occur when OB8.2 sites send ISC data to OB8.3 sites, and also when + # active table exchanges occur. We default to 98000000 and 98000001 + # on dx4 since that is where the primary and svcbu servers are located. + # This will cause log errors until everyone is on OB8.3. + iscE = Element('isc') + destinationsE = SubElement(iscE, 'destinations') + for x in range(98000000, 98000002): + for shost in ['dx4f', 'px3f']: + addressE = SubElement(destinationsE, 'address') + serverE = SubElement(addressE, 'server') + serverE.text = shost + portE = SubElement(addressE, 'port') + portE.text = str(x) + protocolE = SubElement(addressE, 'protocol') + protocolE.text = "20070723" #match this from IFPProtocol.C + mhsE = SubElement(addressE, 'mhsid') + mhsE.text = siteConfig.GFESUITE_MHSID + + irt = IrtAccess.IrtAccess("") + + # find source xml + found = False + for srcE in iscE.getchildren(): + if srcE.tag == "source": + for addressE in srcE: + srcServer = irt.decodeXMLAddress(addressE) + if srcServer is None: + continue + found = True + logEvent("Source:", irt.printServerInfo(srcServer)) + break + if not found: + logEvent("Source: ") + + # find destinations xml + found = False + for destE in iscE.getchildren(): + if destE.tag == "destinations": + found = True + break + if not found: + logProblem("destinations packet missing from xml") + return + + # Handle GET_TCV_FILES out side the loop as it needs to do it's own loop + if SUBJECT == 'GET_TCV_FILES': + IrtServer.getTCVFiles(ourMhsID, srcServer, destE) + else: + # decode and print the source server (if present) + for addressE in destE: + if addressE.tag != "address": + continue + + destServer = irt.decodeXMLAddress(addressE) + + # find destination server information + if destServer['mhsid'].upper() != ourMhsID.upper(): + logDebug(SUBJECT, 'Not our mhs ID of ' + ourMhsID + \ + ', so skipped:', irt.printServerInfo(destServer)) + continue #this destination is for someone else. + + if destServer['site'].upper() != ourSiteID.upper(): + logDebug(SUBJECT, 'Not our site ID of ' + ourSiteID + \ + ', so skipped:', irt.printServerInfo(destServer)) + continue #this destination is for someone else. + + # transmit the data to the ifpServer + time2 = time.clock() + + if SUBJECT == 'PUT_ACTIVE_TABLE': + IrtServer.putVTECActiveTable(dataFile, None) + elif SUBJECT == 'PUT_ACTIVE_TABLE2': + IrtServer.putVTECActiveTable(dataFile, xmlFileBuf) + elif SUBJECT == 'GET_ACTIVE_TABLE': + IrtServer.getVTECActiveTable(dataFile, None) + elif SUBJECT == 'GET_ACTIVE_TABLE2': + IrtServer.getVTECActiveTable(dataFile, xmlFileBuf) + elif SUBJECT in ['ISCGRIDS', 'ISCGRIDS2']: + import serverConfig + + additionalISCRouting = [] + if serverConfig.AdditionalISCRouting: + additionalISCRouting = serverConfig.AdditionalISCRouting + putISCGrids(dataFile, siteConfig.GFESUITE_SITEID, srcServer.get('site'), additionalISCRouting) + elif SUBJECT == 'ISCREQUEST': + IrtServer.serviceISCRequest(dataFile) + elif SUBJECT == 'PUT_TCV_FILES': + IrtServer.putTCVFiles(srcServer.get('site'), dataFile) + elif SUBJECT == 'SEND_WFO_MESSAGE': + IrtServer.sendWfoMessage(srcServer.get('site'), dataFile) + else: + logProblem("unknown subject: ", SUBJECT) + continue + time3 = time.clock() + delta1 = time2 - time1 + delta2 = time3 - time2 + logEvent('Sent to:', + irt.printServerInfo(destServer), "connectT=", delta1, "xmtT=", delta2) + except: + logException("iscDataRec failed!") + + finally: + # cleanup + purgeFiles(FILES) + +def putISCGrids(dataFile, destSite, srcSite, additionalISCRouting): + # iscMosaic now executes multiple times--once for the primary ISC database, + # and once more for each additional ISC database defined in the localConfig + args = {"siteID": destSite, + "userID": 'SITE', + "databaseID": destSite + "_GRID__ISC_00000000_0000", + "parmsToProcess": [], + "blankOtherPeriods": True, + "startTime": None, + "endTime": None, + "altMask": None, + "replaceOnly": False, + "eraseFirst": False, + "announce": "ISC: ", + "renameWE": True, + "iscSends": False, + "inFiles": [dataFile], + "ignoreMask": False, + "adjustTranslate": True, + "deleteInput": False, + "parmsToIgnore": [], + "gridDelay": 0.0, + "logFileName": None} + mosaic = iscMosaic.IscMosaic(args) + mosaic.execute() + + for entry in additionalISCRouting: + (parms, dbName, editAreaPrefix) = entry + parmNameList = [parm[0] + "_SFC" for parm in parms] + args['parmsToProcess'] = parmNameList + args['databaseID'] = destSite + "_GRID__" + dbName + "_00000000_0000" + args['altMask'] = editAreaPrefix + srcSite + mosaic = iscMosaic.IscMosaic(args) + mosaic.execute() + + +#-------------------------------------------------------------------- +# Main Routine +#-------------------------------------------------------------------- + +# The following keywords prefaced by the '%' character cause the +# value from the current message to be inserted into the +# specification string. +# +# MSGID: Unique ID of message +# MSGTYPE: Sender assigned message type +# MSGCODE: Sender assigned message code +# SUBJECT: Sender assigned message subject +# SENDER: Sending site name +# PRIORITY: Sender assigned message priority +# BODY: Path to file containing body of message. +# ENCLIST: List of paths to files for each enclosure. +# ENCLOSE(N): Path to file containing Nth enclosure. +# NUMENC: Total number of enclosures. +# X400DOC: Path to X.400 document file. +# +# /data/adapt/GFESuite/iscDataRec SYS /awips/adapt/GFESUITE/bin/iscDataRec +# %MSGID %SUBJECT %ENCLIST +def main(argv): + initLogger() + try: + logEvent('*** iscDataRec ***', argv) + try: + MSGID = argv[0] + SUBJECT = argv[1] + FILES = argv[2].split(',') + + logEvent('SUBJECT:', SUBJECT, 'MSGID:', MSGID) + #log the incoming files and size + for f in FILES: + filesize = os.stat(f)[stat.ST_SIZE] + logEvent('Received:', f, filesize, "bytes") + execIscDataRec(MSGID, SUBJECT, FILES) + + except: + logException('Failure:') + + except: + logException("FAIL: ") + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscExtract.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscExtract.py index 140681dd2a..8d7b5d08b5 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscExtract.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscExtract.py @@ -1,495 +1,495 @@ -#!/common/bphillip/awips/bin/python - -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# Port of iscExtract.py -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Jul 06, 2009 1995 bphillip Initial Creation. -# Mar 12, 2013 1759 dgilling Change how ifpnetCDF is called. -# Apr 25, 2015 4952 njensen Updated for new JEP API -# May 21, 2015 4427 dgilling Add new ifpnetCDF argument. -# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager -# which was largely redundant with IFPServer. -# Feb 06, 2017 5959 randerso Code cleanup. -# Feb 22, 2017 6143 randerso Set area to none for fewer transmissions. -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -import JUtil -import ifpnetCDF, iscUtil -import numpy -import tempfile, os, stat, getopt, sys, cPickle, siteConfig -import LogStream, time, traceback, string, IrtAccess, urllib, urllib2 -import xml, copy, string -from xml.etree import ElementTree -from xml.etree.ElementTree import Element, SubElement -from java.io import File -from java.awt import Point -from com.vividsolutions.jts.geom import Coordinate -from java.util import ArrayList -from com.raytheon.edex.plugin.gfe.util import CartDomain2D -from com.raytheon.edex.plugin.gfe.server import IFPServer -from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -CoordinateType = ReferenceData.CoordinateType - -from com.vividsolutions.jts.geom import Coordinate - -parms = [] #parm names -dbid = None #database identifier -startTR = None #ifpnetCDF start time -endTR = None #ifpnetCDF start time -xmlDestinations = None #XML destinations information -ancf = None #IRT web address -bncf = None -xmtScript = None #transmit script -serverHost = None #server host -serverPort = None #server port -serverProtocol = None #serverProtocol -mhsid = None #MHS site identifier -siteID = None #our site id -startT = None - -def logEvent(*msg): - iscUtil.getLogger("iscExtract").info(iscUtil.tupleToString(*msg)) - -def logProblem(*msg): - iscUtil.getLogger("iscExtract").error(iscUtil.tupleToString(*msg)) - -def logException(*msg): - iscUtil.getLogger("iscExtract").exception(iscUtil.tupleToString(*msg)) - -def logVerbose(*msg): - iscUtil.getLogger("iscExtract").debug(iscUtil.tupleToString(*msg)) - -def logDebug(*msg): - logVerbose(iscUtil.tupleToString(*msg)) - - -def executeIscExtract(parmNames, databaseName, startTime, endTime, - irtTableAddressA, irtTableAddressB, transmitScript, ourServerHost, - ourServerPort, ourServerProtocol, ourMHSid, ourSiteID, destinations=None): - - startT = time.time() - parms = parmNames - dbid = databaseName - startTR = startTime - endTR = endTime - xmlDestinations = destinations - ancf = irtTableAddressA - bncf = irtTableAddressB - xmtScript = transmitScript - serverHost = ourServerHost - serverPort = ourServerPort - serverProtocol = ourServerProtocol - mhsid = ourMHSid - siteID = ourSiteID - - ifpServer = IFPServer.getActiveServer(siteID) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - - myOfficeType = ifpServer.getConfig().officeType() - - - #-------------------------------------------------------------------- - # determine the list of destination servers - #-------------------------------------------------------------------- - try: - nowT = time.time() #current time - useUntilTime = None #cached use until time - cacheFilename = "/tmp/" + serverHost + serverPort + ".iscExtract" - cachedXmlDestinations = None - #if xmlDestinations is None: #destinations not on command line - # # check the cache - # try: - # fd = open(cacheFilename, 'rb') - # buf = fd.read() - # fd.close() - # useUntilTime, cachedXmlDestinations = cPickle.loads(buf) - # nowT = time.time() #current time - # if nowT > useUntilTime: - # xmlDestinations = None #cache is too old - # useUntilTime = None - # else: - # logEvent('Using xmlDestinations cache') - # xmlDestinations = cachedXmlDestinations - # except: - # pass - - - # need to contact IRT to get destinations - irt = IrtAccess.IrtAccess(ancf, bncf) - if xmlDestinations is None: - logEvent('contacting IRT to get destinations') - count = 1 - while True: - status, xmlDestinations = irt.getSendAddrs(siteID) - logEvent('IRT getSendAddrs status:', status) - if status: - # if we obtained XML destinations from IRT, then decode - # the useUntilTime field - try: - d = ElementTree.ElementTree(ElementTree.XML(xmlDestinations)) - dE = d.getroot() - for e in dE: - if e.tag == "useuntil": - isoTimeStr = e.text - idx = isoTimeStr.find(".") - if idx != - 1: - isoTimeStr = isoTimeStr[0:idx] #eliminate subseconds - useUntilTime = time.mktime(time.strptime(isoTimeStr, - "%Y-%m-%dT%H:%M:%S")) - logEvent("Use Until: ", isoTimeStr) - except: - logProblem("Malformed XML on getSendAddrs()") - logProblem("XML=", xmlDestinations) - return - if useUntilTime is None: - useUntilTime = time.time() + 180.0 #3 minutes default - logEvent("Using default 180 second useUntilTime") - - # write the cache - fd = open(cacheFilename, 'wb') - buf = cPickle.dumps((useUntilTime, xmlDestinations)) - fd.write(buf) - fd.close() - break #success from the irt - else: - # try again and again for 10 minutes, then use cache - # if available and alert GFE users - if time.time() - nowT > 600.00: - logProblem("Unable to access IRT for send addrs") - if cachedXmlDestinations is None: - s = "Unable to access IRT for send addrs. Previous" + \ - " cache not available." - logProblem(s) - return - # use cached value, even if out of date - else: - xmlDestinations = cachedXmlDestinations - if useUntilTime is not None: - s = time.asctime(time.gmtime(useUntilTime)) - else: - s = "Unknown" - logProblem("Using expired cache. Date=", s) - - #determine when we issued our last GFE alert - #we alert every 30 minutes. - try: - fd = open(cacheFilename + "-warn", 'rb') - buf = fd.read() - fd.close() - lastAlertTime = cPickle.loads(buf) - except: - lastAlertTime = 0 #for way long ago - if time.time() - lastAlertTime > 1800.0: - logProblem("Sending GFE notification") - msg = """ - Contact NCF. ifpServer is unable to contact IRT central server. ISC - traffic routing information is old and possibly incorrect.""" - os.system("sendGfeMessage -u -c GFE -m '" + \ - msg + "'") - fd = open(cacheFilename + "-warn", 'wb') - fd.write(cPickle.dumps(time.time())) - fd.close() - break - - time.sleep(15.0) #sleep awhile and then try again - count = count + 1 - logProblem("Retrying to getSendAddrs()", count) - # qc the XML - try: - destTree = ElementTree.ElementTree(ElementTree.XML(xmlDestinations)) - destE = destTree.getroot() - except: - logProblem("Malformed XML on getSendAddrs() or provided xmlDest") - logProblem("XML=", xmlDestinations) - return - #-------------------------------------------------------------------- - # determine how many transmissions are necessary - #-------------------------------------------------------------------- - xmt = [] - - logEvent("XML dest:", xmlDestinations) - if destE.tag != "destinations": - logProblem("Destinations packet missing from web service") - return - - # create list of individual transmissions (before attempting to combine - doClip = 1 #0 to send entire domain, 1 to do destination clipping (default) - destStr = "Destination Servers:\n" - for addressE in destE: - if addressE.tag == "doclip": - for name, value in addressE.items(): - if name == "clip": - if value == "1": - doClip = 1 - elif value == "0": - doClip = 0 - - logEvent("Clipping State: ", doClip) - - for addressE in destE: - - if addressE.tag != "address": - continue - - # find destination ifpServer info and domain information - serverInfo = irt.decodeXMLAddress(addressE) - - if doClip == 0: - serverInfo['domain'] = None - serverInfo['area'] = None - keycheckfail = False - for key in ['mhsid', 'host', 'port', 'protocol', 'site']: - if not serverInfo.has_key(key): - logProblem("Fail to decode XML. Skipping serverInfo:", - serverInfo) - keycheckfail = True - continue - if keycheckfail: - continue #skipping this destination due to insufficient info - - # get the destination office type - try: - siteIndex = ifpServer.getConfig().allSites().indexOf(serverInfo['site']) - destOfficeType = str(ifpServer.getConfig().officeTypes().get(siteIndex)) - except: - logProblem("Unknown site id to get office type. ", - "Skipping serverInfo:", serverInfo) - continue #skipping this destination due to unknown site id - - # find weather elements that remote ifpServer wants - # that is available in our server and in the -p parm switches - any = False - for parm in serverInfo['parms']: - p1 = string.replace(parm, "_SFC", "") #remove _SFC if exists - - # translation of parm name needed, also if no office type, then - # not wanted from this office. - # example: changes QPFwfo to QPF if we are wfo - # example: discards T if we are wfo and site is non-wfo - if myOfficeType != destOfficeType: - if p1.find(myOfficeType) != - 1: - p1 = string.replace(p1, myOfficeType, "") #remove type - else: - continue #no type, so not intended for our type - # see if parm was listed in the command line switches - if parms.contains(p1): - xmt.append({'serverInfo':[serverInfo], 'parms':[p1], - 'domain': serverInfo['domain'], 'area': serverInfo['area']}) - if not any: - destStr += irt.printServerInfo(serverInfo) + "\n" - any = True - - logEvent(destStr) - - # now combine transmissions - # find same domains, same parms, to combine servers/destinations - i = 0 - while i < len(xmt): - j = i + 1 - while j < len(xmt): - if xmt[i]['domain'] == xmt[j]['domain'] and \ - xmt[i]['area'] == xmt[j]['area'] and \ - xmt[i]['parms'] == xmt[j]['parms']: - for si in xmt[j]['serverInfo']: - if si not in xmt[i]['serverInfo']: - dests = xmt[i]['serverInfo'] - dests.append(si) - xmt[j]['serverInfo'] = dests - del xmt[j] #delete the entry - j = j - 1 #redo this entry index next loop - j = j + 1 - i = i + 1 - - # now try to combine common parm lists (same domain, same servers/destinations) - i = 0 - while i < len(xmt): - j = i + 1 - while j < len(xmt): - if xmt[i]['domain'] == xmt[j]['domain'] and \ - xmt[i]['area'] == xmt[j]['area'] and \ - xmt[i]['serverInfo'] == xmt[j]['serverInfo'] : - iparms = xmt[i]['parms'] - for p in xmt[j]['parms']: - if p not in iparms: - iparms.append(p) - xmt[i]['parms'] = iparms - del xmt[j] #delete the entry - j = j - 1 #redo this entry index for next loop - j = j + 1 - i = i + 1 - - # if doClip, gather some required information - if doClip: - #get the isc send area and grid domain from the ifpServer - iscSendAreaGrid = iscUtil.getEditArea("ISC_Send_Area",siteID) - sourceDomain = ifpServer.getConfig().dbDomain() - - iscSendAreaGrid.setGloc(sourceDomain) - iscSendAreaGrid = iscSendAreaGrid.getGrid() - - - #-------------------------------------------------------------------- - # prepare output files - #-------------------------------------------------------------------- - for dest in xmt: - s = "Processing Xmt Pass:\n" - for sv in dest['serverInfo']: - s += irt.printServerInfo(sv) + '\n' - s += "Domain:" + `dest['domain']` + '\n' - s += "Area:" + `dest['area']` + '\n' - s += "Parms:" + `dest['parms']` + '\n\n' - logEvent(s) - # extract the data using ifpnetCDF - if os.path.exists(siteConfig.GFESUITE_HOME + "/products/ISC") == False: - os.makedirs(siteConfig.GFESUITE_HOME + "/products/ISC") - - tempfile.tempdir = siteConfig.GFESUITE_HOME + "/products/ISC" - fname = tempfile.mktemp(".isc") - - # Determine domain edit area. - - if doClip == 1 and dest['domain'] is not None and \ - dest['domain']['proj'] == sourceDomain.getProjection().getProjectionID(): - #make a GridLocation for our domain - gridSize = Coordinate(float(str(sourceDomain.getNx())), float(str(sourceDomain.getNy()))) - origin = sourceDomain.getOrigin() - extent = sourceDomain.getExtent() - domain = CartDomain2D(origin, extent) - gloc = sourceDomain - - #make a GridLocation covering the area for the destination, expanded - #by 1/2 grid cell - dd = dest['domain'] - da = dest['area'] - cellsizeX = float(dd['extx']) / (float(da['xdim']) - 1.0) - cellsizeY = float(dd['exty']) / (float(da['ydim']) - 1.0) - originD = Coordinate(float(dd['origx']) - cellsizeX / 2.0, - float(dd['origy']) - cellsizeY / 2.0) - extentD = Coordinate(float(dd['extx']) + cellsizeX, - float(dd['exty']) + cellsizeY) - domainD = CartDomain2D(originD, extentD) - - #check for overlap - if not domainD.overlaps(domain): - logEvent("No intersection of domain box, skipping....") - continue #no bits set in the resulting mask, no intersect - - domainD.trim(domain) #trim it to just the overlapping section - - - gridSize = Point(int(da['xdim']),int(da['ydim'])) - destGridLocation = GridLocation("Dest",sourceDomain.getProjection(), - gridSize,domainD.getOrigin(),domainD.getExtent(),"GMT") - - # make a Reference Set - refid = ReferenceID("jibberish") - refSet = ReferenceData(gloc, refid, destGridLocation.getGeometry(), CoordinateType.LATLON) - - # convert destination site's domain to gridpoints - iscMask = refSet.getGrid() - - # "and" it with our ISC_Send_Area - iscMask.andEquals(iscSendAreaGrid) - - if not iscMask.isAnyBitsSet(): - logEvent("No intersection of domain points, skipping....") - continue #no bits set in the resulting mask, no intersect - - # store the grid back into the ifpServer - maskName = "iscExtract" + `time.time()` - refSet.setGrid(iscMask) - iscUtil.saveEditAreaGrid(maskName, refSet, siteID) - - else: #no clipping, or different projection - maskName = "ISC_Send_Area" - - # Run ifpnetCDF for the data - argv = {"outputFilename": fname, - "parmList": dest['parms'], - "databaseID": dbid, - "startTime": startTR, - "endTime": endTR, - "mask": maskName, - "geoInfo": False, - "compressFileFlag": True, - "configFileName": "iscSendSampleDef", - "compressFileFactor": 6, - "trim": True, - "krunch": True, - "userID": "iscExtract", - "logFileName": None, - "siteIdOverride": None} - ifpnetCDF.main(**argv) - - fname = fname + '.gz' - size = os.stat(fname)[stat.ST_SIZE] - endT = time.time() - logEvent('File Size: ', size) - logEvent('After ifpnetCDF, ,wctime:', "%-6.2f" % (endT - startT), - ',cputime:', "%-6.2f" % time.clock()) - - # create XML destinations file for this output - iscE = Element('isc') #create the XML tree root - sourceServer = {'mhsid': mhsid, 'host': serverHost, 'port': serverPort, - 'protocol': serverProtocol, 'site': siteID} - irt.addSourceXML(iscE, sourceServer) - irt.addDestinationXML(iscE, dest['serverInfo']) - - #get the unique list of mhs sites - mhsSites = [] - for si in dest['serverInfo']: - if si['mhsid'] not in mhsSites: - mhsSites.append(si['mhsid']) - - # create the XML file - fnameXML = tempfile.mktemp(".xml") - fd = open(fnameXML, 'wb') - fd.write(ElementTree.tostring(iscE)) - fd.close() - - # Transmit files - do string substitution - irt.transmitFiles("ISCGRIDS2", mhsSites, mhsid, [fname,fnameXML], xmtScript) - # Delete temporary files - if maskName != "ISC_Send_Area": - iscUtil.deleteEditArea(maskName,siteID) - - endT = time.time() - logEvent('After transmission pass, ,wctime:', - "%-6.2f" % (endT - startT), ',cputime:', "%-6.2f" % time.clock()) - - except: - logProblem("Failure", traceback.format_exc()) +#!/common/bphillip/awips/bin/python + +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# Port of iscExtract.py +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Jul 06, 2009 1995 bphillip Initial Creation. +# Mar 12, 2013 1759 dgilling Change how ifpnetCDF is called. +# Apr 25, 2015 4952 njensen Updated for new JEP API +# May 21, 2015 4427 dgilling Add new ifpnetCDF argument. +# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager +# which was largely redundant with IFPServer. +# Feb 06, 2017 5959 randerso Code cleanup. +# Feb 22, 2017 6143 randerso Set area to none for fewer transmissions. +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +import JUtil +import ifpnetCDF, iscUtil +import numpy +import tempfile, os, stat, getopt, sys, pickle, siteConfig +import LogStream, time, traceback, string, IrtAccess, urllib.request, urllib.parse, urllib.error, urllib.request, urllib.error, urllib.parse +import xml, copy, string +from xml.etree import ElementTree +from xml.etree.ElementTree import Element, SubElement +from java.io import File +from java.awt import Point +from com.vividsolutions.jts.geom import Coordinate +from java.util import ArrayList +from com.raytheon.edex.plugin.gfe.util import CartDomain2D +from com.raytheon.edex.plugin.gfe.server import IFPServer +from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +CoordinateType = ReferenceData.CoordinateType + +from com.vividsolutions.jts.geom import Coordinate + +parms = [] #parm names +dbid = None #database identifier +startTR = None #ifpnetCDF start time +endTR = None #ifpnetCDF start time +xmlDestinations = None #XML destinations information +ancf = None #IRT web address +bncf = None +xmtScript = None #transmit script +serverHost = None #server host +serverPort = None #server port +serverProtocol = None #serverProtocol +mhsid = None #MHS site identifier +siteID = None #our site id +startT = None + +def logEvent(*msg): + iscUtil.getLogger("iscExtract").info(iscUtil.tupleToString(*msg)) + +def logProblem(*msg): + iscUtil.getLogger("iscExtract").error(iscUtil.tupleToString(*msg)) + +def logException(*msg): + iscUtil.getLogger("iscExtract").exception(iscUtil.tupleToString(*msg)) + +def logVerbose(*msg): + iscUtil.getLogger("iscExtract").debug(iscUtil.tupleToString(*msg)) + +def logDebug(*msg): + logVerbose(iscUtil.tupleToString(*msg)) + + +def executeIscExtract(parmNames, databaseName, startTime, endTime, + irtTableAddressA, irtTableAddressB, transmitScript, ourServerHost, + ourServerPort, ourServerProtocol, ourMHSid, ourSiteID, destinations=None): + + startT = time.time() + parms = parmNames + dbid = databaseName + startTR = startTime + endTR = endTime + xmlDestinations = destinations + ancf = irtTableAddressA + bncf = irtTableAddressB + xmtScript = transmitScript + serverHost = ourServerHost + serverPort = ourServerPort + serverProtocol = ourServerProtocol + mhsid = ourMHSid + siteID = ourSiteID + + ifpServer = IFPServer.getActiveServer(siteID) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + + myOfficeType = ifpServer.getConfig().officeType() + + + #-------------------------------------------------------------------- + # determine the list of destination servers + #-------------------------------------------------------------------- + try: + nowT = time.time() #current time + useUntilTime = None #cached use until time + cacheFilename = "/tmp/" + serverHost + serverPort + ".iscExtract" + cachedXmlDestinations = None + #if xmlDestinations is None: #destinations not on command line + # # check the cache + # try: + # fd = open(cacheFilename, 'rb') + # buf = fd.read() + # fd.close() + # useUntilTime, cachedXmlDestinations = cPickle.loads(buf) + # nowT = time.time() #current time + # if nowT > useUntilTime: + # xmlDestinations = None #cache is too old + # useUntilTime = None + # else: + # logEvent('Using xmlDestinations cache') + # xmlDestinations = cachedXmlDestinations + # except: + # pass + + + # need to contact IRT to get destinations + irt = IrtAccess.IrtAccess(ancf, bncf) + if xmlDestinations is None: + logEvent('contacting IRT to get destinations') + count = 1 + while True: + status, xmlDestinations = irt.getSendAddrs(siteID) + logEvent('IRT getSendAddrs status:', status) + if status: + # if we obtained XML destinations from IRT, then decode + # the useUntilTime field + try: + d = ElementTree.ElementTree(ElementTree.XML(xmlDestinations)) + dE = d.getroot() + for e in dE: + if e.tag == "useuntil": + isoTimeStr = e.text + idx = isoTimeStr.find(".") + if idx != - 1: + isoTimeStr = isoTimeStr[0:idx] #eliminate subseconds + useUntilTime = time.mktime(time.strptime(isoTimeStr, + "%Y-%m-%dT%H:%M:%S")) + logEvent("Use Until: ", isoTimeStr) + except: + logProblem("Malformed XML on getSendAddrs()") + logProblem("XML=", xmlDestinations) + return + if useUntilTime is None: + useUntilTime = time.time() + 180.0 #3 minutes default + logEvent("Using default 180 second useUntilTime") + + # write the cache + fd = open(cacheFilename, 'wb') + buf = pickle.dumps((useUntilTime, xmlDestinations)) + fd.write(buf) + fd.close() + break #success from the irt + else: + # try again and again for 10 minutes, then use cache + # if available and alert GFE users + if time.time() - nowT > 600.00: + logProblem("Unable to access IRT for send addrs") + if cachedXmlDestinations is None: + s = "Unable to access IRT for send addrs. Previous" + \ + " cache not available." + logProblem(s) + return + # use cached value, even if out of date + else: + xmlDestinations = cachedXmlDestinations + if useUntilTime is not None: + s = time.asctime(time.gmtime(useUntilTime)) + else: + s = "Unknown" + logProblem("Using expired cache. Date=", s) + + #determine when we issued our last GFE alert + #we alert every 30 minutes. + try: + fd = open(cacheFilename + "-warn", 'rb') + buf = fd.read() + fd.close() + lastAlertTime = pickle.loads(buf) + except: + lastAlertTime = 0 #for way long ago + if time.time() - lastAlertTime > 1800.0: + logProblem("Sending GFE notification") + msg = """ + Contact NCF. ifpServer is unable to contact IRT central server. ISC + traffic routing information is old and possibly incorrect.""" + os.system("sendGfeMessage -u -c GFE -m '" + \ + msg + "'") + fd = open(cacheFilename + "-warn", 'wb') + fd.write(pickle.dumps(time.time())) + fd.close() + break + + time.sleep(15.0) #sleep awhile and then try again + count = count + 1 + logProblem("Retrying to getSendAddrs()", count) + # qc the XML + try: + destTree = ElementTree.ElementTree(ElementTree.XML(xmlDestinations)) + destE = destTree.getroot() + except: + logProblem("Malformed XML on getSendAddrs() or provided xmlDest") + logProblem("XML=", xmlDestinations) + return + #-------------------------------------------------------------------- + # determine how many transmissions are necessary + #-------------------------------------------------------------------- + xmt = [] + + logEvent("XML dest:", xmlDestinations) + if destE.tag != "destinations": + logProblem("Destinations packet missing from web service") + return + + # create list of individual transmissions (before attempting to combine + doClip = 1 #0 to send entire domain, 1 to do destination clipping (default) + destStr = "Destination Servers:\n" + for addressE in destE: + if addressE.tag == "doclip": + for name, value in list(addressE.items()): + if name == "clip": + if value == "1": + doClip = 1 + elif value == "0": + doClip = 0 + + logEvent("Clipping State: ", doClip) + + for addressE in destE: + + if addressE.tag != "address": + continue + + # find destination ifpServer info and domain information + serverInfo = irt.decodeXMLAddress(addressE) + + if doClip == 0: + serverInfo['domain'] = None + serverInfo['area'] = None + keycheckfail = False + for key in ['mhsid', 'host', 'port', 'protocol', 'site']: + if key not in serverInfo: + logProblem("Fail to decode XML. Skipping serverInfo:", + serverInfo) + keycheckfail = True + continue + if keycheckfail: + continue #skipping this destination due to insufficient info + + # get the destination office type + try: + siteIndex = ifpServer.getConfig().allSites().indexOf(serverInfo['site']) + destOfficeType = str(ifpServer.getConfig().officeTypes().get(siteIndex)) + except: + logProblem("Unknown site id to get office type. ", + "Skipping serverInfo:", serverInfo) + continue #skipping this destination due to unknown site id + + # find weather elements that remote ifpServer wants + # that is available in our server and in the -p parm switches + any = False + for parm in serverInfo['parms']: + p1 = string.replace(parm, "_SFC", "") #remove _SFC if exists + + # translation of parm name needed, also if no office type, then + # not wanted from this office. + # example: changes QPFwfo to QPF if we are wfo + # example: discards T if we are wfo and site is non-wfo + if myOfficeType != destOfficeType: + if p1.find(myOfficeType) != - 1: + p1 = string.replace(p1, myOfficeType, "") #remove type + else: + continue #no type, so not intended for our type + # see if parm was listed in the command line switches + if parms.contains(p1): + xmt.append({'serverInfo':[serverInfo], 'parms':[p1], + 'domain': serverInfo['domain'], 'area': serverInfo['area']}) + if not any: + destStr += irt.printServerInfo(serverInfo) + "\n" + any = True + + logEvent(destStr) + + # now combine transmissions + # find same domains, same parms, to combine servers/destinations + i = 0 + while i < len(xmt): + j = i + 1 + while j < len(xmt): + if xmt[i]['domain'] == xmt[j]['domain'] and \ + xmt[i]['area'] == xmt[j]['area'] and \ + xmt[i]['parms'] == xmt[j]['parms']: + for si in xmt[j]['serverInfo']: + if si not in xmt[i]['serverInfo']: + dests = xmt[i]['serverInfo'] + dests.append(si) + xmt[j]['serverInfo'] = dests + del xmt[j] #delete the entry + j = j - 1 #redo this entry index next loop + j = j + 1 + i = i + 1 + + # now try to combine common parm lists (same domain, same servers/destinations) + i = 0 + while i < len(xmt): + j = i + 1 + while j < len(xmt): + if xmt[i]['domain'] == xmt[j]['domain'] and \ + xmt[i]['area'] == xmt[j]['area'] and \ + xmt[i]['serverInfo'] == xmt[j]['serverInfo'] : + iparms = xmt[i]['parms'] + for p in xmt[j]['parms']: + if p not in iparms: + iparms.append(p) + xmt[i]['parms'] = iparms + del xmt[j] #delete the entry + j = j - 1 #redo this entry index for next loop + j = j + 1 + i = i + 1 + + # if doClip, gather some required information + if doClip: + #get the isc send area and grid domain from the ifpServer + iscSendAreaGrid = iscUtil.getEditArea("ISC_Send_Area",siteID) + sourceDomain = ifpServer.getConfig().dbDomain() + + iscSendAreaGrid.setGloc(sourceDomain) + iscSendAreaGrid = iscSendAreaGrid.getGrid() + + + #-------------------------------------------------------------------- + # prepare output files + #-------------------------------------------------------------------- + for dest in xmt: + s = "Processing Xmt Pass:\n" + for sv in dest['serverInfo']: + s += irt.printServerInfo(sv) + '\n' + s += "Domain:" + repr(dest['domain']) + '\n' + s += "Area:" + repr(dest['area']) + '\n' + s += "Parms:" + repr(dest['parms']) + '\n\n' + logEvent(s) + # extract the data using ifpnetCDF + if os.path.exists(siteConfig.GFESUITE_HOME + "/products/ISC") == False: + os.makedirs(siteConfig.GFESUITE_HOME + "/products/ISC") + + tempfile.tempdir = siteConfig.GFESUITE_HOME + "/products/ISC" + fname = tempfile.mktemp(".isc") + + # Determine domain edit area. + + if doClip == 1 and dest['domain'] is not None and \ + dest['domain']['proj'] == sourceDomain.getProjection().getProjectionID(): + #make a GridLocation for our domain + gridSize = Coordinate(float(str(sourceDomain.getNx())), float(str(sourceDomain.getNy()))) + origin = sourceDomain.getOrigin() + extent = sourceDomain.getExtent() + domain = CartDomain2D(origin, extent) + gloc = sourceDomain + + #make a GridLocation covering the area for the destination, expanded + #by 1/2 grid cell + dd = dest['domain'] + da = dest['area'] + cellsizeX = float(dd['extx']) / (float(da['xdim']) - 1.0) + cellsizeY = float(dd['exty']) / (float(da['ydim']) - 1.0) + originD = Coordinate(float(dd['origx']) - cellsizeX / 2.0, + float(dd['origy']) - cellsizeY / 2.0) + extentD = Coordinate(float(dd['extx']) + cellsizeX, + float(dd['exty']) + cellsizeY) + domainD = CartDomain2D(originD, extentD) + + #check for overlap + if not domainD.overlaps(domain): + logEvent("No intersection of domain box, skipping....") + continue #no bits set in the resulting mask, no intersect + + domainD.trim(domain) #trim it to just the overlapping section + + + gridSize = Point(int(da['xdim']),int(da['ydim'])) + destGridLocation = GridLocation("Dest",sourceDomain.getProjection(), + gridSize,domainD.getOrigin(),domainD.getExtent(),"GMT") + + # make a Reference Set + refid = ReferenceID("jibberish") + refSet = ReferenceData(gloc, refid, destGridLocation.getGeometry(), CoordinateType.LATLON) + + # convert destination site's domain to gridpoints + iscMask = refSet.getGrid() + + # "and" it with our ISC_Send_Area + iscMask.andEquals(iscSendAreaGrid) + + if not iscMask.isAnyBitsSet(): + logEvent("No intersection of domain points, skipping....") + continue #no bits set in the resulting mask, no intersect + + # store the grid back into the ifpServer + maskName = "iscExtract" + repr(time.time()) + refSet.setGrid(iscMask) + iscUtil.saveEditAreaGrid(maskName, refSet, siteID) + + else: #no clipping, or different projection + maskName = "ISC_Send_Area" + + # Run ifpnetCDF for the data + argv = {"outputFilename": fname, + "parmList": dest['parms'], + "databaseID": dbid, + "startTime": startTR, + "endTime": endTR, + "mask": maskName, + "geoInfo": False, + "compressFileFlag": True, + "configFileName": "iscSendSampleDef", + "compressFileFactor": 6, + "trim": True, + "krunch": True, + "userID": "iscExtract", + "logFileName": None, + "siteIdOverride": None} + ifpnetCDF.main(**argv) + + fname = fname + '.gz' + size = os.stat(fname)[stat.ST_SIZE] + endT = time.time() + logEvent('File Size: ', size) + logEvent('After ifpnetCDF, ,wctime:', "%-6.2f" % (endT - startT), + ',cputime:', "%-6.2f" % time.clock()) + + # create XML destinations file for this output + iscE = Element('isc') #create the XML tree root + sourceServer = {'mhsid': mhsid, 'host': serverHost, 'port': serverPort, + 'protocol': serverProtocol, 'site': siteID} + irt.addSourceXML(iscE, sourceServer) + irt.addDestinationXML(iscE, dest['serverInfo']) + + #get the unique list of mhs sites + mhsSites = [] + for si in dest['serverInfo']: + if si['mhsid'] not in mhsSites: + mhsSites.append(si['mhsid']) + + # create the XML file + fnameXML = tempfile.mktemp(".xml") + fd = open(fnameXML, 'wb') + fd.write(ElementTree.tostring(iscE)) + fd.close() + + # Transmit files - do string substitution + irt.transmitFiles("ISCGRIDS2", mhsSites, mhsid, [fname,fnameXML], xmtScript) + # Delete temporary files + if maskName != "ISC_Send_Area": + iscUtil.deleteEditArea(maskName,siteID) + + endT = time.time() + logEvent('After transmission pass, ,wctime:', + "%-6.2f" % (endT - startT), ',cputime:', "%-6.2f" % time.clock()) + + except: + logProblem("Failure", traceback.format_exc()) \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscMosaic.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscMosaic.py index c2f83789a9..b967f30f90 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscMosaic.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscMosaic.py @@ -1,1764 +1,1764 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -# ------------------------------------------------------------------------------ -# -# Port of iscMosaic.py -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- ------------------------------------------ -# Jul 06, 2009 1995 bphillip Initial Creation. -# Jan 17, 2013 15588 jdynina Fixed Publish history removal -# Mar 12, 2013 1759 dgilling Remove unnecessary command line -# processing. -# Apr 24, 2013 1941 dgilling Re-port WECache to match A1. -# May 08, 2013 1988 dgilling Fix history handling bug in__getDbGrid(). -# May 23, 2013 1759 dgilling Remove unnecessary imports. -# Jun 05, 2013 2063 dgilling Change __siteInDbGrid() to -# call IFPWE.history() like A1. -# Sep 05, 2013 2307 dgilling Fix breakage caused by #2044. -# Oct 31, 2013 2508 randerso Change to use DiscreteGridSlice.getKeys() -# Nov 05, 2013 2517 randerso Restructured logging so it could be used -# by WECache -# Changed WECache to limit the number of -# cached grids kept in memory -# Jan 09, 2014 16952 randerso Fix regression made in #2517 which caused -# errors with overlapping grids -# Feb 04, 2014 17042 ryu Check in changes for randerso. -# Apr 03, 2014 2737 randerso Allow iscMosaic to blankOtherPeriods even -# when no grids received -# Apr 11, 2014 17242 dgilling (code checked in by zhao) -# Jul 22, 2014 17484 randerso Update cluster lock time to prevent time -# out -# Aug 07, 2014 3517 randerso Improved memory utilization and error -# handling when unzipping input file. -# Aug 14, 2014 3526 randerso Fix bug in WECache that could incorrectly -# delete grids in the destination database -# Feb 17, 2015 4139 randerso Replaced call to iscTime.timeFromComponents -# with call to calendar.timegm -# Apr 23, 2015 4383 randerso Changed to log arguments to aid in -# troubleshooting -# Apr 23, 2015 4259 njensen Updated for new JEP API -# Apr 25, 2015 4952 njensen Updated for new JEP API -# Aug 06, 2015 4718 dgilling Optimize casting when using where with -# NumPy 1.9. -# Oct 05, 2015 4951 randerso Fixed siteInDbGrid to retrieve history from -# the cache so it sees changes that have not -# yet been written to the database -# Oct 05, 2015 4961 randerso Fix __calcBlankingTimes to handle persistent -# grids -# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager -# which was largely redundant with IFPServer. -# Feb 22, 2017 6143 randerso Moved renewal of cluster lock into inner -# loop to avoid unexpected time out -# Oct 31, 2016 5979 njensen Cast to primitives for compatibility -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# Apr 05, 2017 6224 randerso Include dbID in cluster lock name -# Oct 05, 2015 4951 randerso Fixed siteInDbGrid to retrieve history -# from the cache so it sees changes that -# have not yet been written to the database -# Oct 05, 2015 4961 randerso Fix __calcBlankingTimes to handle -# persistent grids -# Feb 22, 2017 6143 randerso Moved renewal of cluster lock into inner -# loop to avoid unexpected time out -# Apr 05, 2017 6224 randerso Include dbID in cluster lock name -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -import os, stat, time, string, sys, calendar -import iscTime, iscUtil, mergeGrid -try: - # dev environment - from Scientific.IO import NetCDF -except: - # runtime we don't have the whole scientific package - import NetCDF -import numpy -import JUtil - -from java.lang import System -from java.util import ArrayList -from java.util import LinkedHashMap -from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DFloat -from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DByte -from com.raytheon.uf.common.time import TimeRange -from com.vividsolutions.jts.geom import Coordinate -from java.awt import Point - -from com.raytheon.edex.plugin.gfe.server import IFPServer -from com.raytheon.edex.plugin.gfe.smartinit import IFPDB -from com.raytheon.uf.common.dataplugin.gfe import GridDataHistory -from com.raytheon.uf.common.dataplugin.gfe import RemapGrid -OriginType = GridDataHistory.OriginType -from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData -ProjectionType = ProjectionData.ProjectionType -from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID -from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation -from com.raytheon.uf.common.dataplugin.gfe.slice import DiscreteGridSlice -from com.raytheon.uf.common.dataplugin.gfe.slice import ScalarGridSlice -from com.raytheon.uf.common.dataplugin.gfe.slice import VectorGridSlice -from com.raytheon.uf.common.dataplugin.gfe.slice import WeatherGridSlice -from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey -from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherKey -from com.raytheon.uf.common.dataplugin.gfe.server.notify import UserMessageNotification -from com.raytheon.edex.plugin.gfe.util import SendNotifications -from com.raytheon.uf.common.status import UFStatus -Priority = UFStatus.Priority -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID -CoordinateType = ReferenceData.CoordinateType -from com.raytheon.uf.edex.database.cluster import ClusterLockUtils -from com.raytheon.uf.edex.database.cluster import ClusterTask - -BATCH_DELAY = 0.0 - -MAX_CACHE_BYTES = 64 * 1024 * 1024 # 64 MB - -ISC_USER = "isc" - -logger = None - -## Logging methods ## -def initLogger(logName): - import logging - global logger - logger = iscUtil.getLogger("iscMosaic", logName=logName, logLevel=logging.DEBUG) - -def printTR(tr): - """ - Format time range for printing (yymmdd_hhmm,yymmdd_hhmm) - Works with list or tuple - - Args: - tr: the time range to format - - Returns: - the formatted time range string - """ - if tr is not None: - format = "%Y%m%d_%H%M" - s = '(' + time.strftime(format, time.gmtime(tr[0])) + ',' + \ - time.strftime(format, time.gmtime(tr[1])) + ')' - return s - else: - return "None" - -def printShortTR(tr): - """ - Format time range for printing (dd/hh,dd/hh) - Works with list or tuple - - Args: - tr: the time range to format - - Returns: - the formatted time range string - """ - if tr is not None: - format = "%d/%H" - s = '(' + time.strftime(format, time.gmtime(tr[0])) + '->' + \ - time.strftime(format, time.gmtime(tr[1])) + ')' - return s - else: - return "None" - -class WECache(object): - """ - Cache representing the grids for a weather element that overlap a time range. - The cache will keep a limited number of grids in memory. This limit is determined - at initialization to be the number of grids that will fit in MAX_CACHE_BYTES (or a minimum of 2). - - This is not a general purpose cache. It's behavior is designed to match the access patterns of iscMosaic - """ - def __init__(self, we, tr=None): - self._we = we - logger.debug("WECache creating: %s", str(self._we.getParmid())) - - gridType = self._we.getGridType() - if gridType == "SCALAR": - bytesPerCell = 4 - elif gridType == "VECTOR": - bytesPerCell = 8 - elif gridType == "WEATHER": - bytesPerCell = 1 - elif gridType == "DISCRETE": - bytesPerCell = 1 - - gloc = self._we.getGpi().getGridLoc() - gridBytes = int(gloc.getNx()) * int(gloc.getNy()) * bytesPerCell - - self._maxCacheSize = max(2, MAX_CACHE_BYTES / gridBytes) - self._batchSize = self._maxCacheSize / 2 - logger.debug("WECache max grids: %d, batch grids: %d", self._maxCacheSize, self._batchSize) - - self._inv = {} # Map containing keys for all inventory - self._invCache = None # Cache of inventory sorted by start time - self._dirty = set() # Cache written to but not flushed to disk - self._populated = set() # Grid is currently in the cache - self._loaded = set() # Grid has been loaded into cache at least once - - # get only keys that overlap tr - javaInv = self._we.getKeys(iscUtil.toJavaTimeRange(tr)) - pyInv = [] - for i in xrange(javaInv.size()): - pyInv.append(iscUtil.transformTime(javaInv.get(i))) - - # create unpopulated entries for the entire inventory - for invTr in pyInv: - self._inv[invTr] = None - - # populate first BATCH_READCOUNT grids - if len(pyInv) > self._batchSize: - trList = pyInv[:self._batchSize - 1] - # add on the last tr since it is used by __splitGridsOnProcBoundary - trList.append(pyInv[-1]) - - else: - trList = pyInv - - self.__loadGrids(trList) - - def __loadGrids(self, trList): - javaTRs = ArrayList(len(trList)) - for tr in trList: - javaTRs.add(iscUtil.toJavaTimeRange(tr)) - - gridsAndHist = self._we.get(javaTRs, True) - for idx in range(gridsAndHist.size()): - pair = gridsAndHist.get(idx) - tr = iscUtil.transformTime(pair.getFirst().getValidTime()) - - if tr in self._loaded: - logger.debug("WECache reloading: %s", printTR(tr)) - else: - logger.debug("WECache loading: %s", printTR(tr)) - - g = self.__encodeGridSlice(pair.getFirst()) - h = self.__encodeGridHistory(pair.getSecond()) - self._inv[tr] = (g, h) - self._populated.add(tr) - self._loaded.add(tr) - - def keys(self): - if not self._invCache: - self._invCache = tuple(sorted(self._inv.keys(), key=lambda t: t[0])) - return self._invCache - - def __getitem__(self, tr): - logger.debug("WECache getting: %s", printTR(tr)) - if tr in self._populated or tr in self._dirty: - return self._inv[tr] - - if self._inv.has_key(tr): - self.__handleCacheMiss(tr) - return self._inv[tr] - else: - return None - - def __handleCacheMiss(self, tr): - """ - This function is called when a time range is requested that is not currently in the cache. - It will load the next batch of unpopulated grids in time range order starting with the time range passed in. - - If the cache does not have room for a batch of grids to be loaded without exceeding the max cache size - the earliest dirty grids (or clean if not enough dirty grids are found) are flushed to disk before reading - the next batch. - - Args: - tr: the missing time range - """ - logger.debug("WECache miss: %s", printTR(tr)) - # determine next batch of keys to read - toRead = self.keys() - toRead = toRead[toRead.index(tr):] - toRead = sorted(set(toRead) - self._populated, key=lambda t: t[0]) - toRead = toRead[:self._batchSize] - - # if not room to read another batch - if len(self._populated) + self._batchSize > self._maxCacheSize: - toFlush = [] - # first flush dirty grids - toFlush += self._populated & self._dirty - - # then flush clean grids if necessary - toFlush += self._populated - self._dirty - - # flush only enough to read a batch - toFlush = sorted(toFlush, key=lambda t: t[0]) - toFlush = toFlush[:self._maxCacheSize - self._batchSize] - self.__flushGrids(toFlush) - - self.__loadGrids(toRead) - - def __flushGrids(self, trList): - """ - Flush a list of time ranges from the cache. - Dirty time ranges will be written to disk. - Writes will be done in _batchSize groups - - Args: - trList: the list of time ranges to be flushed - """ - logger.debug("WECache flushing: %d grids", len(trList)) - - saveRequest = LinkedHashMap() - saveList = [] # python time ranges covered by this saveRequest - saveSize = 0 # number of grids in saveRequest - - # get full time range for flush - if (len(trList)): - sortedList = sorted(trList, key=lambda t: t[0]) - flushTR = (sortedList[0][0], sortedList[-1][1]) - else: - flushTR = (0, 2 ** 31 - 1) # all times - - timeSpan = None # time span if this contiguous batch - gridsToSave = ArrayList(self._batchSize) # grids in this contiguous batch - saveBatch = False - for tr in self.keys(): - if tr[1] <= flushTR[0]: - continue - if tr[0] >= flushTR[1]: - break - - dirty = tr in self._dirty - if dirty: - logger.debug("WECache storing: %s", printTR(tr)) - saveList.append(tr) - - pyGrid, pyHist = self._inv[tr] - if pyGrid is not None: - javaGrid = self.__buildJavaGridSlice(tr, pyGrid, pyHist) - gridsToSave.add(javaGrid) - if timeSpan is None: - timeSpan = [tr[0], tr[1]] - else: - timeSpan[1] = tr [1] - saveBatch = gridsToSave.size() >= self._batchSize - - else: # clean grid - # save contiguous dirty blocks - saveBatch = timeSpan is not None - - # if populated and clean just purge from cache - if tr in self._populated: - logger.debug("WECache purging: %s", printTR(tr)) - self._inv[tr] = None - self._populated.remove(tr) - else: - # skip any clean unpopulated grids - logger.debug("WECache skipping: %s", printTR(tr)) - - if saveBatch: - # add this contiguous batch to saveRequest - logger.debug("WECache saving %d grids in %s", gridsToSave.size(), printTR(timeSpan)) - gridSaveTR = iscUtil.toJavaTimeRange(timeSpan) - saveRequest.put(gridSaveTR, gridsToSave) - timeSpan = None - saveBatch = False - saveSize += gridsToSave.size() - gridsToSave = ArrayList(self._batchSize) - - # if this saveRequest has reached the batchSize save it - if saveSize >= self._batchSize: - try: - self._we.put(saveRequest) - except: - raise - else: # no exceptions on save, clear saved grids from cache - # depopulate save grids - for tr in saveList: - self._inv[tr] = None - self._populated.discard(tr) - self._dirty.remove(tr) - saveRequest.clear() - saveList = [] - saveSize = 0 - - # save partial batch if necessary - if len(saveList): - if timeSpan is not None: - logger.debug("WECache saving %d grids in %s", gridsToSave.size(), printTR(timeSpan)) - gridSaveTR = iscUtil.toJavaTimeRange(timeSpan) - saveRequest.put(gridSaveTR, gridsToSave) - - try: - self._we.put(saveRequest) - except: - raise - else: # no exceptions on save, clear saved grids from cache - # depopulate save grids - for tr in saveList: - self._inv[tr] = None - self._populated.discard(tr) - self._dirty.remove(tr) - - return - - def __setitem__(self, tr, value): - if value is None: - logger.debug("WECache erasing: %s", printTR(tr)) - grid = hist = None - else: - logger.debug("WECache setting: %s", printTR(tr)) - grid, hist = value - - # Remove any overlapping grids - tokill = [] - for itr in self._inv: - if self.overlaps(tr, itr): - tokill.append(itr) - for i in tokill: - del self._inv[i] - self._dirty.discard(i) - self._populated.discard(i) - self._loaded.discard(i) - self._invCache = None - - # if cache full flush some grids to disk - if len(self._populated) >= self._maxCacheSize: - toFlush = [] - # first flush dirty grids - toFlush += self._populated & self._dirty - - # then flush clean grids if necessary - toFlush += self._populated - self._dirty - - # flush a full batch is possible - toFlush = sorted(toFlush, key=lambda t: t[0]) - toFlush = toFlush[:self._batchSize] - self.__flushGrids(toFlush) - - # Now add the new grid - self._inv[tr] = (grid, hist) - self._dirty.add(tr) - self._loaded.add(tr) - self._invCache = None - if grid is not None: - self._populated.add(tr) - - def flush(self): - """Writes all dirty time ranges in the WECache to HDF5/DB""" - # flush entire inventory - self.__flushGrids(self.keys()) - - def overlaps(self, tr1, tr2): - if (tr1[0] >= tr2[0] and tr1[0] < tr2[1]) or \ - (tr2[0] >= tr1[0] and tr2[0] < tr1[1]): - return True - return False - - def __encodeGridSlice(self, grid): - gridType = self._we.getGridType() - if gridType == "SCALAR": - return grid.getNDArray() - elif gridType == "VECTOR": - vecGrids = grid.getNDArray() - return (vecGrids[0], vecGrids[1]) - elif gridType == "WEATHER" or gridType == "DISCRETE": - keys = grid.getKeys() - keyList = [] - for theKey in keys: - keyList.append(str(theKey)) - return (grid.getNDArray(), keyList) - - def __encodeGridHistory(self, histories): - retVal = [] - for i in xrange(histories.size()): - retVal.append(histories.get(i).getCodedString()) - return tuple(retVal) - - def __buildJavaGridSlice(self, tr, grid, history): - javaTR = iscUtil.toJavaTimeRange(tr) - javaHist = self.__buildJavaGridHistory(history) - - gridType = self._we.getGridType() - if gridType == "SCALAR": - return self._we.buildScalarSlice(javaTR, grid.astype(numpy.float32), javaHist) - elif gridType == "VECTOR": - return self._we.buildVectorSlice(javaTR, grid[0].astype(numpy.float32), grid[1].astype(numpy.float32), javaHist) - elif gridType == "WEATHER": - return self._we.buildWeatherSlice(javaTR, grid[0].astype(numpy.byte), str(grid[1]), javaHist) - elif gridType == "DISCRETE": - return self._we.buildDiscreteSlice(javaTR, grid[0].astype(numpy.byte), str(grid[1]), javaHist) - - def __buildJavaGridHistory(self, histories): - retVal = ArrayList() - blankPubTime = "Fcst" in str(self._we.getParmid().getDbId()) - for histEntry in histories: - javaHist = GridDataHistory(histEntry) - # strip out publish time to allow for publishing correctly - # when merging Fcst out of A1 - if blankPubTime: - javaHist.setPublishTime(None) - retVal.add(javaHist) - return retVal - - -class IscMosaic: - def __init__(self, args): - self.__mysite = args['siteID'] - ifpServer = IFPServer.getActiveServer(self.__mysite) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - - self.__config = ifpServer.getConfig() - - self.__userID = args['userID'] - self.__db = None # ifpServer database object - self.__dbGrid = None - self.__parmsToProcess = args['parmsToProcess'] - self.__blankOtherPeriods = args['blankOtherPeriods'] - self.__altMask = args['altMask'] - self.__replaceOnly = args['replaceOnly'] - self.__eraseFirst = args['eraseFirst'] - self.__announce = args['announce'] - self.__renameWE = args['renameWE'] - self.__iscSends = args['iscSends'] - if args['databaseID'] is not None: - self.__databaseID = args['databaseID'] - else: - self.__databaseID = self.__mysite + "_GRID__ISC_00000000_0000" - self.__inFiles = args['inFiles'] - self.__ignoreMask = args['ignoreMask'] - self.__adjustTranslate = args['adjustTranslate'] - self.__deleteInput = args['deleteInput'] - self.__parmsToIgnore = args['parmsToIgnore'] - self.__gridDelay = args['gridDelay'] - self.__logFile = args['logFileName'] - - startTime = 0 - if args['startTime'] is not None: - startTime = self.__decodeTimeString(args['startTime']) - endTime = int(2 ** 30 - 1 + 2 ** 30) - if args['endTime'] is not None: - endTime = self.__decodeTimeString(args['endTime']) - self.__processTimePeriod = (startTime, endTime) - - initLogger(self.__logFile) - logger.info("iscMosaic Starting args: %s", str(args)) - - def execute(self): - - # get the WxDefinition and DiscreteDefinition - self.__wxDef = self.__config.getWxDefinition() - self.__disDef = self.__config.getDiscreteDefinition() - - self.__db = IFPDB(self.__databaseID) - - # parms in database - if len(self.__parmsToProcess) == 0: - parmsInDb = self.__db.getKeys() - for i in range(0, parmsInDb.size()): - self.__parmsToProcess.append(str(parmsInDb.get(i))) - - # get office type information - self.__myOfficeType = self.__config.officeType() - - #process each input file - for file in self.__inFiles: - self.__areaMask = None - self.__processInputFile(file) - - if self.__deleteInput: - os.remove(file) - - logger.info("iscMosaic Finished") - - def __processInputFile(self, filename): - - a = os.times() - cpu0 = a[0] + a[1] - start = a[4] - - logger.info("Processing file=%s", filename) - fsize = os.stat(filename)[stat.ST_SIZE] - logger.info("Input file size: %d", fsize) - - gzipFile = None - unzippedFile = None - gzipped = True - try: - import gzip - gzipFile = gzip.open(filename, 'rb') - unzippedFile = open(filename + ".unzipped", 'w') - while True: - buffer = gzipFile.read(65536) - if len(buffer) == 0: - break - unzippedFile.write(buffer) - except IOError as e: - if e.message == "Not a gzipped file": - gzipped = False - else: - raise - else: - # no errors, close and rename the file - unzippedFile.close() - gzipFile.close() - os.rename(unzippedFile.name, gzipFile.filename) - gzipFile = unzippedFile = None - finally: - # close the files in case of error - if gzipFile is not None: - gzipFile.close() - if unzippedFile is not None: - unzippedFile.close() - if not gzipped: - os.remove(unzippedFile.name) - - a = os.times() - cpugz = a[0] + a[1] - stopgz = a[4] - - file = NetCDF.NetCDFFile(filename, "r") - - # check version - fileV = getattr(file, 'fileFormatVersion') - if fileV != "20010816" and fileV != "20030117": - logger.error("Incompatible file format found") - raise Exception, "Incompatible file format" - - # get creation time - self.__creTime = getattr(file, 'creationTime') - creTimeString = time.asctime(time.gmtime(self.__creTime)) - logger.info("CreationTime: %s" , creTimeString) - - # get file start/end processing times - self.__modProcTime = self.__getFileProcessingTimes(file) - if self.__modProcTime is None: - return None - logger.info("Process TR: %s", printTR(self.__modProcTime)) - - # prepare for the notification message - totalTimeRange = None - totalGrids = 0 - totalFails = 0 - pParms = [] - self.__adjDataMsg = [] - - # process each parm in the netCDF file - # Only use one area mask for all parms. This will break - # if we ever use parms with differing dims in a database. - areaMask = None - inFileVars = file.variables.keys() #parm names - - for parm in inFileVars: - tup = self.__getNetCDFInputVariables(file, parm) - - if tup is None: - continue - else: - parmName = tup[0] - vars = tup[1] - remapHistory = tup[2] - - # rename weather element - if self.__renameWE: - siteID = str(getattr(vars[0], "siteID")) - incomingOfficeType = self.__config.getOfficeType(siteID) - if incomingOfficeType != self.__myOfficeType: - idx = parmName.rfind("_") - parmName = parmName[0:idx] + incomingOfficeType + \ - parmName[idx:] - logger.info("Renamed to: %s data from %s", parmName, siteID) - - # ignore this parm? - if parmName in self.__parmsToIgnore: - logger.info("Ignoring %s", parmName) - continue - - # match in ifp database? - if not parmName in self.__parmsToProcess and \ - len(self.__parmsToProcess) != 0: - logger.info("Skipping %s", parmName) - continue - - (pName, pTR, nGrids, nFail) = self.__processParm(parmName, vars, remapHistory, filename) - - # save info for the notification message - pParms.append(pName) - if pTR is not None: - if totalTimeRange is None: - totalTimeRange = pTR - else: - if totalTimeRange[0] > pTR[0]: - totalTimeRange = (pTR[0], totalTimeRange[1]) - if totalTimeRange[1] < pTR[1]: - totalTimeRange = (totalTimeRange[0], pTR[1]) - totalGrids = totalGrids + nGrids - totalFails = totalFails + nFail - - file.close() - - #announce storage - if len(self.__announce) and totalGrids > 0: - msg = self.__announce + self.__siteID + ' ' + `pParms` + ' ' + printShortTR(totalTimeRange) + ' #Grids=' + `totalGrids` - if totalFails: - msg = msg + '[' + `totalFails` + ' FAILED]' - notification = UserMessageNotification(msg, Priority.CRITICAL, "ISC", self.__mysite) - else: - notification = UserMessageNotification(msg, Priority.EVENTA, "ISC", self.__mysite) - logger.info(msg) - SendNotifications.send(notification) - - # announce "modified/adjusted" data - if len(self.__announce) and len(self.__adjDataMsg): - msg = "ISC Data Modified:\n" + "\n".join(self.__adjDataMsg) - notification = UserMessageNotification(msg, Priority.EVENTA, "ISC", self.__mysite) - SendNotifications.send(notification) - - a = os.times() - cpu = a[0] + a[1] - stop = a[4] - logger.info("Elapsed/CPU time: " - "%-.2f / %-.2f decompress, " - "%-.2f / %-.2f processing, " - "%-.2f / %-.2f total", - stopgz - start, cpugz - cpu0, - stop - stopgz, cpu - cpugz, - stop - start, cpu - cpu0) - - - def __processParm(self, parmName, vars, history, filename): - - retries = 5 - retryAttempt = 0 - pName = string.replace(parmName, "_SFC", "") - totalTimeRange = None - inTimesProc = [] - numFailed = 0 - - self.__siteID = str(getattr(vars[0], "siteID")) - inTimes = self.__getIncomingValidTimes(vars[0]) - logger.info("Processing %s #Grids=%d Site=%s", parmName, len(inTimes), self.__siteID) - - if self.__blankOtherPeriods or self.__eraseFirst or len(inTimes) > 0: - while retryAttempt != retries: - lockName = parmName + ":" + self.__databaseID - logger.debug("iscMosaic: Attempting to acquire cluster lock for: %s", lockName) - startTime = time.time() - clusterLock = ClusterLockUtils.lock("ISC Write Lock", lockName , 120000, True) - elapsedTime = (time.time() - startTime) * 1000 - logger.debug("iscMosaic: Request for %s took %d ms", lockName, elapsedTime) - if str(clusterLock.getLockState()) == "SUCCESSFUL": - logger.debug("iscMosaic: Successfully acquired cluster lock for: %s", lockName) - try: - # open up the ifpServer weather element - self.__dbwe = self.__db.getItem(parmName, ISC_USER) - self._wec = WECache(self.__dbwe, tr=self.__modProcTime) - self.__rateParm = self.__dbwe.getGpi().isRateParm() - self.__parmName = parmName - - # get general info for the parm from the input file and output db - inGeoDict = self.__getInputGeoInfo(vars[0]) - inFillV = self.__determineFillValue(vars[0]) - - gridType = getattr(vars[0], "gridType") - minV = self.__dbwe.getGpi().getMinValue() - # compute the site mask - - if self.__areaMask is None: - self.__areaMask = self.__computeAreaMask().getGrid().getNDArray().astype(numpy.bool) - - # create the mergeGrid class - mGrid = mergeGrid.MergeGrid(self.__creTime, self.__siteID, inFillV, - minV, self.__areaMask, gridType, self.__dbwe.getDiscreteKeys()) - - # erase all existing grids first? - self.__dbinv = self._wec.keys() - try: - self.__splitGridsOnProcBoundary(self.__modProcTime) - except: - logger.exception('Failure to splitGridsOnProcBoundary Parm=%s Time=%s', - parmName, printTR(self.__modProcTime)) - - if self.__eraseFirst: - self.__eraseAllGrids(self.__modProcTime) - - # process each incoming grid - inTimesProc = [] - numFailed = 0 - - # process incoming grids - for i in xrange(len(inTimes)): - # Put in a delay so we don't hammer the server so hard. - if self.__gridDelay > 0.0: - time.sleep(self.__gridDelay) - tr = iscTime.intersection(inTimes[i], self.__modProcTime) - if tr is not None: - inTimesProc.append(tr) - try: - logger.debug("Processing Grid: %s TR=%s", parmName, printTR(tr)) - - # get the grid and remap it - grid = self.__getGridFromNetCDF(gridType, vars, i) - - # if WEATHER or DISCRETE, then validate and adjust keys - if self.__adjustTranslate: - if gridType == "DISCRETE": - grid = self.__validateAdjustDiscreteKeys(grid, - self.__parmName, tr) - elif gridType == "WEATHER": - grid = self.__validateAdjustWeatherKeys(grid, - self.__parmName, tr) - - grid = self.__remap(self.__dbwe, grid, inGeoDict, inFillV) - - # if rate parm, then may need to adjust the values - if self.__rateParm and inTimes[i] != tr: - grid = self.__adjustForTime(inTimes[i], tr, grid, - inFillV) - - # merge the grids - self.__processIncomingGrid(lockName, grid, history[i], - mGrid, tr, inFillV) - - except: - logger.exception('Failure to process grid in file [%s] Parm=%s Time=%s', - filename, parmName, printTR(tr)) - numFailed = numFailed + 1 - - else: - logger.debug("Skipping Grid: %s TR=%s outside start/end range", - parmName, printTR(tr)) - - - # blank out any gaps - if self.__blankOtherPeriods == 1: - blankTimes = self.__calcBlankingTimes(inTimesProc) - # get updated inventory - - for i in xrange(len(blankTimes)): - tr = iscTime.intersection(blankTimes[i], self.__modProcTime) - if tr is not None: - try: - logger.debug("Processing Blank: %s TR=%s", - parmName, printTR(tr)) - self.__processBlankTime(mGrid, tr) - except: - logger.exception('Failure to process grid blanking Parm=%s Time=%s', - parmName, printTR(tr)) - - - - # Returns tuple of (parmName, TR, #grids, #fails) - if len(inTimesProc): - totalTimeRange = (inTimesProc[0][0], inTimesProc[ -1][ -1] - 3600) - self._wec.flush() - - retryAttempt = retries - except: - retryAttempt = retryAttempt + 1 - logger.exception("Error saving ISC data. Retrying ( %d / %d )", retryAttempt, retries) - time.sleep(1) - finally: - logger.debug("iscMosaic: Attempting to release cluster lock for: %s", lockName) - ClusterLockUtils.unlock(clusterLock, False) - logger.debug("iscMosaic: Successfully released cluster lock for: %s", lockName) - elif str(clusterLock.getLockState()) == "OLD": - retryAttempt = retryAttempt + 1 - # Clear old lock to retry - logger.debug("Old lock retrieved for ISC write. Attempting to renew lock") - ClusterLockUtils.unlock(clusterLock, False) - elif str(clusterLock.getLockState()) == "FAILED": - retryAttempt = retryAttempt + 1 - if retryAttempt == retries: - logger.error("Cluster lock could not be established for %s at time range %s Data was not saved.", - self._we.getParmid(), TimeRange(tr[0], tr[1])) - else: - logger.error("Cluster lock request failed for ISC write. Retrying ( %d / %d )", - retryAttempt, retries) - time.sleep(1) - - return (pName, totalTimeRange, len(inTimesProc), numFailed) - - def __processIncomingGrid(self, lockName, remappedGrid, remappedHistory, mGrid, tr, inFillV): - # calculate merge - merge = iscTime.mergeTR(tr, self.__dbinv) - - # get the associated db grids, merge, and store - for m in merge: - # update cluster lock time to avoid time out - ClusterLockUtils.updateLockTime("ISC Write Lock", lockName , System.currentTimeMillis()) - - logger.debug("Merge: %s %s %s", printTR(m[0]), - printTR(m[1]), m[2]) - gotGrid = self.__getDbGrid(m[0]) - - if gotGrid is not None: - destGrid = gotGrid[0] - oldHist = gotGrid[1] - else: - destGrid = None - oldHist = None - - # non-rate parms -- keep the data values the same - if not self.__rateParm: - - # merge the grids, but only if the overlaps flag is set, - # we use the minimum value for the fill value since we don't - # support sparse populated grids - if m[2] == 1 or (m[2] == 0 and m[0] == None): - if self.__replaceOnly: - mergedGrid = mGrid.mergeGrid( - (remappedGrid, remappedHistory), None) - else: - mergedGrid = mGrid.mergeGrid ( - (remappedGrid, remappedHistory), (destGrid, oldHist)) - - else: - mergedGrid = (destGrid, oldHist) - - # rate parms -- adjust data values based on times - else: - # merge the grids, but only if the overlaps flag is set, - # we use the minimum value for the fill value since we don't - # support sparse populated grids - if m[2] == 1 or (m[2] == 0 and m[0] == None): - if self.__replaceOnly: - adjGrid = self.__adjustForTime(tr, m[1], remappedGrid, - inFillV) - mergedGrid = mGrid.mergeGrid( - (adjGrid, remappedHistory), None) - else: - adjGridIn = self.__adjustForTime(tr, m[1], - remappedGrid, inFillV) - adjGridDb = self.__adjustForTime(m[0], m[1], destGrid, - 0.0) - mergedGrid = mGrid.mergeGrid(\ - (adjGridIn, remappedHistory), - (adjGridDb, oldHist)) - - else: - adjGrid = self.__adjustForTime(m[0], m[1], destGrid, 0.0) - mergedGrid = (adjGrid, oldHist) - - # store merged grid - self.__storeGrid(m[1], mergedGrid) - - def __storeGrid(self, tr, grid): - if grid is not None and grid[1] is not None and grid[0] is not None: - logger.debug("Store: %s", printTR(tr)) - self._wec[tr] = grid - - if tr not in self.__dbinv: - self.__dbinv = self._wec.keys() - else: - logger.debug("Erase: %s", printTR(tr)) - self._wec[tr] = None - self.__dbinv = self._wec.keys() - - #--------------------------------------------------------------------- - # get db grid - # Gets the needed database grid - # tr = desired grid, identified by time range - # Returns tuple of (grid, history) (or None if unknown) - #--------------------------------------------------------------------- - def __getDbGrid(self, tr): - if tr is None: - return None - - if self.__dbGrid is None or tr != self.__dbGrid[2]: - self.__dbGrid = None - grid = self._wec[tr] - if grid is not None: - destGrid, history = grid - self.__dbGrid = (destGrid, history, tr) - else: - logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName) - return None - - return (self.__dbGrid[0], self.__dbGrid[1]) - - #--------------------------------------------------------------------- - # calculate file start/end processing times - # Returns (startTime, endTime) or None for processing - # file= netcdf file descriptor object - #--------------------------------------------------------------------- - def __getFileProcessingTimes(self, file): - - # try: - startTime = self.__decodeTimeString(getattr(file, 'startProcTime')) - endTime = self.__decodeTimeString(getattr(file, 'endProcTime')) - modProcTime = iscTime.intersection((startTime, endTime), - self.__processTimePeriod) - - if modProcTime is None: - logger.error("Skipping file due to non overlapping periods") - return modProcTime - - def __decodeTimeString(self, timeStr): - "Create an Integer time from a string: YYYYMMDD_HHMM" - - importError = True - while importError: - try: - timeTuple = time.strptime(timeStr, "%Y%m%d_%H%M") - importError = False - except ImportError: - importError = True - except: - logger.exception("%s is not a valid time string. Use YYYYMMDD_HHMM", timeStr) - raise Exception, "Bad date format YYYYMMDD_HHMM" - - return calendar.timegm(timeTuple) - - #--------------------------------------------------------------------- - # get netcdf input variables - # Gets the input variables from the netCDF file based on the parm name. - # The netCDF file is opened on file. - # Returns them as three tuples: (dbParmName, vars, history[]) - # The vars is an array depending upon the data type: - # scalar [0], vector [0=mag,1=dir], wx [0=grid,1=key]. - # The history is a list of history strings. - #--------------------------------------------------------------------- - def __getNetCDFInputVariables(self, file, parmName): - - var = file.variables[parmName] - - # make sure it is a weather element variable - if not hasattr(var, "validTimes"): - return None - - gridType = getattr(var, "gridType") - - # get the history info - if gridType == 'SCALAR': - pn = parmName + "_GridHistory" - elif gridType == 'VECTOR': - indx = string.find(parmName, "_Mag_") - if indx == -1: - return None - pn = parmName[0:indx + 1] + parmName[indx + 5:] + "_GridHistory" - elif gridType == 'WEATHER': - pn = parmName + "_GridHistory" - elif gridType == 'DISCRETE': - pn = parmName + "_GridHistory" - hvar = file.variables[pn] - history = [] - for i in xrange(0, hvar.shape[0]): - h = string.strip(hvar[i].tostring()) - history.append(string.split(h, '^')) - - # handle special cases for Vector and Wx, need to use a second - # variable for wind and weather - gridType = getattr(var, "gridType") - - if gridType == 'SCALAR': - return (parmName, [var], history) - - elif gridType == 'VECTOR': - indx = string.find(parmName, "_Mag_") - if indx != -1: - dirparm = parmName[0:indx] + "_Dir_" + parmName[indx + 5:] - varDir = file.variables[dirparm] - dbparmName = parmName[0:indx] + parmName[indx + 4:] - return (dbparmName, [var, varDir], history) - else: - return None - - elif gridType == 'WEATHER': - varKey = file.variables[parmName + "_wxKeys"] - return (parmName, [var, varKey], history) - - elif gridType == 'DISCRETE': - varKey = file.variables[parmName + "_keys"] - return (parmName, [var, varKey], history) - - else: - return None - - #------------------------------------------------------------------------- - # Get Geographical Input Information - # var is the netCDF variable - #------------------------------------------------------------------------- - def __getInputGeoInfo(self, var): - - # define minimum standard - inProjData = { - 'latIntersect': 0.0, - 'latLonOrigin': (0.0, 0.0), - 'stdParallelTwo': 0.0, - 'stdParallelOne': 0.0, - 'lonCenter': 0.0, - 'lonOrigin': 0.0, - 'latIntersect': 0.0, - 'projectionID': 'hi' - } - - # all projections have this information - data = getattr(var, "latLonLL") - inProjData['latLonLL'] = (float(data[0]), float(data[1])) - data = getattr(var, "latLonUR") - inProjData['latLonUR'] = (float(data[0]), float(data[1])) - inProjData['projectionType'] = str(getattr(var, "projectionType")) - data = getattr(var, "gridPointLL") - inProjData['gridPointLL'] = (int(data[0]), int(data[1])) - data = getattr(var, "gridPointUR") - inProjData['gridPointUR'] = (int(data[0]), int(data[1])) - - # lambert conformal specific information - if inProjData['projectionType'] == 'LAMBERT_CONFORMAL': - data = getattr(var, "latLonOrigin") - inProjData['latLonOrigin'] = (float(data[0]), float(data[1])) - data = getattr(var, "stdParallelOne") - inProjData['stdParallelOne'] = float(data) - data = getattr(var, "stdParallelTwo") - inProjData['stdParallelTwo'] = float(data) - - # polar stereographic - elif inProjData['projectionType'] == 'POLAR_STEREOGRAPHIC': - data = getattr(var, "lonOrigin") - inProjData['lonOrigin'] = float(data) - - # mercator - elif inProjData['projectionType'] == 'MERCATOR': - data = getattr(var, "lonCenter") - inProjData['lonCenter'] = float(data) - - # get specific grid sizes and domains - data = getattr(var, "gridSize") - inProjData['gridSize'] = (int(data[0]), int(data[1])) - origin = getattr(var, "domainOrigin") - extent = getattr(var, "domainExtent") - inProjData['gridDomain'] = \ - ((float(origin[0]), float(origin[1])), (float(extent[0]), float(extent[1]))) - - return inProjData - - #--------------------------------------------------------------------- - # determine fill value for input - # vars = netCDF variables - # Returns fill value to use - # Note: file fill value may be different from processing fill value - # since data may have to be multiplied and offset. - #--------------------------------------------------------------------- - def __determineFillValue(self, var): - gridType = getattr(var, "gridType") - if gridType == 'SCALAR' or gridType == 'VECTOR': - return -30000.0 - else: - return -127 - - #--------------------------------------------------------------------- - # compute the area mask - # Returns areaMask to use based on the siteID. for ISC data, - # the edit area is normally ISC_xxx where xxx is the WFO. - #--------------------------------------------------------------------- - def __computeAreaMask(self): - if self.__ignoreMask: - domain = self.__dbwe.getGpi().getGridLoc() - #maskDims = (int(domain.getNy()), int(domain.getNx())) - #areaMask = numpy.ones(maskDims) - #areaMask.setGloc(domain) - - - areaMask = ReferenceData(domain, ReferenceID("full"), None, CoordinateType.GRID) - areaMask.getGrid() - areaMask.invert() - - elif self.__altMask is not None: - try: - areaMask = iscUtil.getEditArea(self.__altMask, self.__mysite) - areaMask.setGloc(self.__dbwe.getGpi().getGridLoc()) - except: - logger.exception("Unable to access edit mask [%s]", - self.__altMask) - raise Exception("Unknown edit area mask [%s]" % self.__altMask) - else: - maskName = "ISC_" + self.__siteID - try: - areaMask = iscUtil.getEditArea(maskName, self.__mysite) - areaMask.setGloc(self.__dbwe.getGpi().getGridLoc()) - except: - logger.exception("Unable to access edit mask [%s]", maskName) - raise Exception("Unknown edit area mask [%s]" % maskName) - - return areaMask - - #--------------------------------------------------------------------- - # split grids on processing time, so to preserve all grids that - # overlap partially the processing time - # processTimePeriod = time range to process grids for splits - #--------------------------------------------------------------------- - def __splitGridsOnProcBoundary(self, processTimePeriod): - dbinv = self.__dbinv - mergeInfo = iscTime.mergeTR(processTimePeriod, dbinv) - oldGrid = None - oldTR = None - for m in mergeInfo: - if m[0] != m[1]: #split grid needed - if m[0] != oldTR: - oldGrid = self.__getDbGrid(m[0]) - oldTR = m[0] - if oldGrid is not None: - if self.__rateParm: - adjGrid = self.__adjustForTime(m[0], m[1], oldGrid[0], - 0.0) #old db grids don't have missing value flags - self.__storeGrid(m[1], (adjGrid, oldGrid[1])) - else: - self.__storeGrid(m[1], oldGrid) - self.__dbGrid = None - - #------------------------------------------------------------------------- - # Get Incoming netCDF file grid valid times - # netCDFfile, var is the netCDF variable - #------------------------------------------------------------------------- - def __getIncomingValidTimes(self, var): - inTimesA = getattr(var, "validTimes") - ntimes = len(inTimesA) / 2 - times = [] - for t in xrange(ntimes): - times.append((inTimesA[t * 2], inTimesA[t * 2 + 1])) - return times - - #--------------------------------------------------------------------- - # get grid from netCDF file. - # gridType = type of grid: scalar, vector, weather - # vars = netCDF variables - # index = grid index - # Returns grid as: - # scalar = grid - # vector = (magGrid, dirGrid) - # weather = (grid, key) - # Note: the values in the grid may need to be changed if their is - # a dataMultiplier or dataOffset attributes present. This will - # also change the fill Value. - #--------------------------------------------------------------------- - def __getGridFromNetCDF(self, gridType, vars, index): - if gridType == 'SCALAR': - grid = numpy.flipud(vars[0][index]) - return self.__scaleGrid(vars[0], grid) - - elif gridType == 'VECTOR': - magGrid = numpy.flipud(vars[0][index]) - dirGrid = numpy.flipud(vars[1][index]) - return (self.__scaleGrid(vars[0], magGrid), - self.__scaleGrid(vars[1], dirGrid)) - - elif gridType == 'WEATHER': - compKey = self.__compressKey(vars[1][index, :, :]) - grid = (numpy.flipud(vars[0][index]), compKey) - - elif gridType == 'DISCRETE': - compKey = self.__compressKey(vars[1][index, :, :]) - grid = (numpy.flipud(vars[0][index]), compKey) - - return grid - - #--------------------------------------------------------------------- - # scaling changes for incoming grids - # var = netCDF variable - # grid = input grid - # only should be called for SCALAR/VECTOR - #--------------------------------------------------------------------- - def __scaleGrid(self, var, grid): - #scaling changes - inFillV = getattr(var, "fillValue") - - # any scaling needed? - try: - multiplier = getattr(var, "dataMultiplier") - offset = getattr(var, "dataOffset") - except: - multiplier = None - offset = None - - outFillV = self.__determineFillValue(var) - if outFillV == inFillV and multiplier is None: - return grid # no changes needed - outFillV = numpy.float32(outFillV) - - # get mask of valid points - goodDataMask = numpy.not_equal(grid, inFillV) - - # apply the scaling, make a float32 grid - if multiplier is not None: - tempGrid = (grid.astype(numpy.float32) * multiplier) + offset - grid = numpy.where(goodDataMask, tempGrid, outFillV) - # no scaling needed, but the fill value needs changing - else: - grid = numpy.where(goodDataMask, grid, outFillV) - - return grid.astype(numpy.float32) - - def __remap(self, we, grid, inGeoDict, inFillV): - gpi = we.getGpi() - - gridType = str(gpi.getGridType()) - - gs = self.__decodeGridSlice(we, grid, TimeRange()) - - pd = self.__decodeProj(inGeoDict) - fill = inFillV - ifill = int(inFillV) - - origin = Coordinate(float(str(inGeoDict['gridDomain'][0][0])), float(str(inGeoDict['gridDomain'][0][1]))) - extent = Coordinate(float(str(inGeoDict['gridDomain'][1][0])), float(str(inGeoDict['gridDomain'][1][1]))) - - gl = GridLocation("iscMosaic", pd, self.__getSize(gs), origin, extent, "GMT") - mapper = RemapGrid(gl, gpi.getGridLoc()) - - if gridType == 'SCALAR': - newGrid = mapper.remap(gs.getScalarGrid(), fill, gpi.getMaxValue(), gpi.getMinValue(), fill) - return newGrid.getNDArray() - - elif gridType == 'VECTOR': - magGrid = Grid2DFloat(int(gs.getGridParmInfo().getGridLoc().getNx()), int(gs.getGridParmInfo().getGridLoc().getNy())) - dirGrid = Grid2DFloat(int(gs.getGridParmInfo().getGridLoc().getNx()), int(gs.getGridParmInfo().getGridLoc().getNy())) - mapper.remap(gs.getMagGrid(), gs.getDirGrid(), fill, gpi.getMaxValue(), gpi.getMinValue(), fill, magGrid, dirGrid) - return (magGrid.getNDArray(), dirGrid.getNDArray()) - - elif gridType == 'WEATHER': - newGrid = mapper.remap(gs.getWeatherGrid(), fill, fill) - return (newGrid.getNDArray(), grid[1]) - - elif gridType == 'DISCRETE': - newGrid = mapper.remap(gs.getDiscreteGrid(), fill, fill) - return (newGrid.getNDArray(), grid[1]) - - def __decodeGridSlice(self, we, value, tr, history=None): - pid = we.getParmid() - gpi = we.getGpi() - - gridType = str(gpi.getGridType()) - - hist = ArrayList() - - if history is None: - hist.add(GridDataHistory(OriginType.INITIALIZED, pid, tr)) - else: - #FIXME - for i in range(0, len(history)): - hist.add(history[i]) - - if gridType == 'SCALAR': - data = Grid2DFloat.createGrid(value.shape[1], value.shape[0], value) - slice = ScalarGridSlice(tr, gpi, hist, data) - elif gridType == 'VECTOR': - - magVal = value[0] - dirVal = value[1] - - magGrid = Grid2DFloat.createGrid(magVal.shape[1], magVal.shape[0], magVal) - dirGrid = Grid2DFloat.createGrid(dirVal.shape[1], dirVal.shape[0], dirVal) - slice = VectorGridSlice(tr, gpi, hist, magGrid, dirGrid) - elif gridType == 'WEATHER': - data = Grid2DByte.createGrid(value[0].shape[1], value[0].shape[0], value[0]) - keyList = ArrayList() - for key in value[1]: - keyList.add(WeatherKey()) - slice = WeatherGridSlice() - slice.setValidTime(tr) - slice.setGridParmInfo(gpi) - slice.setGridDataHistory(hist) - slice.setWeatherGrid(data) - slice.setKey(keyList) - elif gridType == 'DISCRETE': - data = Grid2DByte.createGrid(value[0].shape[1], value[0].shape[0], value[0]) - keyList = ArrayList() - for key in value[1]: - keyList.add(DiscreteKey()) - slice = DiscreteGridSlice() - slice.setValidTime(tr) - slice.setGridParmInfo(gpi) - slice.setGridDataHistory(hist) - slice.setDiscreteGrid(data) - slice.setKey(keyList) - return slice - - def __decodeProj(self, pdata): - - pid = "GrandUnifiedRemappingProj" - type = ProjectionType.valueOf(pdata["projectionType"]) - llLL = Coordinate(pdata["latLonLL"][0], pdata["latLonLL"][1]) - llUR = Coordinate(pdata["latLonUR"][0], pdata["latLonUR"][1]) - llo = Coordinate(pdata["latLonOrigin"][0], pdata["latLonOrigin"][1]) - sp1 = pdata["stdParallelOne"] - sp2 = pdata["stdParallelTwo"] - gpll = Point(pdata["gridPointLL"][0], pdata["gridPointLL"][1]) - gpur = Point(pdata["gridPointUR"][0], pdata["gridPointUR"][1]) - lati = pdata["latIntersect"] - lonc = pdata["lonCenter"] - lono = pdata["lonOrigin"] - - return ProjectionData(pid, type, llLL, llUR, llo, sp1, sp2, gpll, gpur, lati, lonc, lono) - - def __getSize(self, gs): - gridType = str(gs.getGridParmInfo().getGridType()) - - if gridType == "SCALAR" or gridType == "VECTOR": - return Point(gs.getScalarGrid().getXdim(), gs.getScalarGrid().getYdim()) - elif gridType == "WEATHER": - return Point(gs.getWeatherGrid().getXdim(), gs.getWeatherGrid().getYdim()) - elif gridType == "DISCRETE": - return Point(gs.getDiscreteGrid().getXdim(), gs.getDiscreteGrid().getYdim()) - else: - return None - - #--------------------------------------------------------------------- - # compress key (weather or discrete) - # eliminates the "blank" keys that may exist in the input key - # from the netCDF file. - #--------------------------------------------------------------------- - def __compressKey(self, keys): - outKeys = [] - shape = keys.shape - for k in xrange(shape[0]): - s = "" - for i in xrange(shape[1]): - c = str(keys[k][i]) - if c != '\0': - s = s + c - s = string.strip(s) - if len(s) > 0: - outKeys.append(s) - return outKeys - - #--------------------------------------------------------------------- - # adjust for time - # Adjusts a rate dependent grid based on time durations. No processing - # occurs if the grid, or the times are not valid. - # Returns the new grid. - # trOrg = original grid time range - # trNew = new grid time range - # grid = old grid (NOT HISTORY) - # fillValue = grid fill value - # where the grid is a scalar, vector pair, or weather grid/key pair) - # Returns new grid, adjusted by duration changes. - #--------------------------------------------------------------------- - def __adjustForTime(self, trOrg, trNew, grid, fillValue): - if not self.__rateParm or grid is None: - return grid - newDuration = float(trNew[1] - trNew[0]) - orgDuration = float(trOrg[1] - trOrg[0]) - durationRatio = newDuration / orgDuration - - dataType = str(self.__dbwe.getGpi().getGridType()) - if dataType == 'SCALAR': - fillMask = numpy.equal(grid, fillValue) - return numpy.where(fillMask, grid, grid * durationRatio) - elif dataType == 'VECTOR': - fillMask = numpy.equal(grid[0], fillValue) - newMagGrid = numpy.where(fillMask, grid[0], (grid[0] * durationRatio)) - return (grid[0], grid[1]) - else: - return grid - - def __calcBlankingTimes(self, inTimes): - out = [] - if len(inTimes) == 0: - out.append((0, 2 ** 30 - 1 + 2 ** 30)) - else: - for t in range(len(inTimes)): - if t == 0 and inTimes[t][0] != 0: - out.append((0, inTimes[t][0])) - elif t != 0 : - tr = (inTimes[t - 1][1], inTimes[t][0]) - if tr[0] != tr[1]: - out.append(tr) - - endIndex = len(inTimes) - 1 - tr = (inTimes[endIndex][1], 2 ** 30 - 1 + 2 ** 30) - if tr[0] != tr[1]: - out.append(tr) - - # now limit to the modProcTime - outLimit = [] - for t in out: - inter = iscTime.intersection(t, self.__modProcTime) - if inter is not None: - outLimit.append(inter) - - return outLimit - - def __processBlankTime(self, mGrid, tr): - - # calculate the merge - merge = iscTime.mergeTR(tr, self.__dbinv) - - for m in merge: - # blank the grids, but only if the overlaps flag is clear, - if m[0] != None and m[2] == 1: - if self.__siteInDbGrid(m[0]): - try: - (destGrid, oldHist) = self.__getDbGrid(m[0]) - except: - destGrid = None - oldHist = None - - if self.__rateParm: - adjGrid = self.__adjustForTime(m[0], m[1], destGrid, - 0.0) #old db grids don't have missing data flags - mergedGrid = mGrid.mergeGrid(None, \ - (adjGrid, oldHist)) - else: - mergedGrid = mGrid.mergeGrid(None, (destGrid, oldHist)) - self.__storeGrid(m[1], mergedGrid) - - def __siteInDbGrid(self, tr): - if tr is None: - return None - - grid, history = self._wec[tr] - - if history: - for h in history: - if self.__siteID + "_GRID" in h: - return True - - return False - - #--------------------------------------------------------------------- - # validateAdjustDiscreteKeys() - # grid = incoming grid (grid, key) - # parmName = parm name - # tr = time range of grid - # - # returns 'adjusted grid' with a potentially modified key. The key - # is guaranteed to be compatible with the current ifpServer definition. - #--------------------------------------------------------------------- - def __validateAdjustDiscreteKeys(self, grid, parmName, tr): - - (g, key) = grid #separate out for processing - - if parmName.find("_") == -1: - parmName = parmName + "_SFC" #need parmName_level for dict - - # set up error message - smsg = "Adjusting DiscreteKey for Compatibility: " + parmName + \ - ' tr=' + printTR(tr) - - # get the list of discrete keys for this parameter that are allowed - dd = self.__disDef.keys(parmName) - if dd.size() == 0: - logger.error("Unable to validate keys for %s - no def in DiscreteDefinition", - parmName) - return grid - - #now go through the incoming grid's keys and validate each one - for idx in xrange(len(key)): #each index of the discrete key - keyentry = key[idx] #each discrete key entry - oldEntry = keyentry #save an unchanged copy for reporting - changedReasons = [] - - #overlap check - ci = keyentry.find("^") - if ci != -1 and not self.__disDef.overlaps(parmName): - keyentry = keyentry[0:ci] #reset it to only 1st portion - changedReasons.append("No Overlap Allowed") - - eachKey = keyentry.split("^") - for idx1 in xrange(len(eachKey)): - ke = eachKey[idx1] #each discretesubkey in a discrete key - ai = ke.find(":") #look for aux data - if ai != -1: - aux = ke[ai + 1:] - base = ke[0:ai] - - #too long of aux data check - if len(aux) > self.__disDef.auxDataLength(parmName): - ke = base #eliminate the aux data - changedReasons.append("AuxData Length Exceeded") - - else: - aux = None - base = ke #with no aux data - - #valid key specified check - validKey = False - for i in xrange(dd.size()): - if dd.get(i).getSymbol() == base: - validKey = True - break - if not validKey: - if aux: - ke = dd.get(0).getSymbol() + ":" + aux - else: - ke = dd.get(0).getSymbol() #use 1st one - - changedReasons.append("Unknown Key") - - eachKey[idx1] = ke #store back into list - - keyentry = "^".join(eachKey) #join back to string - if len(changedReasons): - logger.error("%s from [%s] to [%s] (%s)", - smsg, oldEntry, keyentry, ",".join(changedReasons)) - - msg = "%s %s %s [%s] -> [%s] (%s)" % \ - (self.__siteID, parmName, printShortTR(tr), oldEntry, keyentry, ",".join(changedReasons)) - self.__adjDataMsg.append(msg) - - key[idx] = keyentry #store back into list - - - return (g, key) - - #--------------------------------------------------------------------- - # validateAdjustWeatherKeys() - # grid = incoming grid (grid, key) - # parmName = parm name - # tr = time range of grid - # - # returns 'adjusted grid' - #--------------------------------------------------------------------- - def __validateAdjustWeatherKeys(self, grid, parmName, tr): - (g, key) = grid #separate out for processing - - if parmName.find("_") == -1: - parmName = parmName + "_SFC" #need parmName_level for output - - # set up error message - smsg = "Adjusting WeatherKey for Compatibility: " + parmName + \ - ' tr=' + printTR(tr) - - #now go through the incoming grid's keys and validate each one - for idx in xrange(len(key)): #each index of the weather key - changedReasons = [] - keyentry = key[idx] #each weather key entry - oldEntry = keyentry #save an unchanged copy for reporting - ikeys = keyentry.split("^") #split into individual subkeys - for idx1 in xrange(len(ikeys)): - cov, typ, inten, vis, attrs = ikeys[idx1].split(":") - - # check the visibility - visibilities = self.__wxDef.getWeatherVisibilities() - validViz = False - for i in xrange(visibilities.size()): - if visibilities.get(i).getSymbol() == vis: - validViz = True - break - if not validViz: - vis = visibilities.get(0).getSymbol() #assign 1st one - changedReasons.append("Unknown Visibility") - - # check the type - types = self.__wxDef.getWeatherTypes() - validType = False - for i in xrange(types.size()): - if types.get(i).getSymbol() == typ: - validType = True - break - if not validType: - oldEntry = keyentry - typ = "" #no choice but to set to no weather - de = self.__wxDef.typeIndex(typ) - cov = self.__wxDef.coverageSymbol(de, 0) - inten = self.__wxDef.intensitySymbol(de, 0) - vis = self.__wxDef.visibilitySymbol(0) - attrs = "" - changedReasons.append("Unknown Weather Type") - - # type is known, validate other components - else: - de = self.__wxDef.typeIndex(typ) - - # validate coverage - if self.__wxDef.coverageIndex(typ, cov) == -1: - cov = self.__wxDef.coverageSymbol(de, 0) - changedReasons.append("Unknown Coverage") - - # validate intensity - if self.__wxDef.intensityIndex(typ, inten) == -1: - inten = self.__wxDef.intensitySymbol(de, 0) - changedReasons.append("Unknown Intensity") - - # validate attributes - if len(attrs): - atts = attrs.split(",") #get individual attributes - #determine the attributes that are valid - keepAttr = [] - for a in atts: - if self.__wxDef.attributeIndex(typ, a) != -1: - keepAttr.append(a) - if len(atts) != len(keepAttr): - attrs = ",".join(keepAttr) - changedReasons.append("Unknown Attribute") - - # update record - ikeys[idx1] = cov + ":" + typ + ":" + inten + ":" + vis + ":" + attrs - keyentry = "^".join(ikeys) #assemble subkeys - key[idx] = keyentry #put back to original format - - # report any changes - if len(changedReasons): - logger.error("%s from [%s] to [%s] (%s)", - smsg, oldEntry, keyentry, ",".join(changedReasons)) - - msg = "%s %s %s [%s] -> [%s] (%s)" % \ - (self.__siteID, parmName, printShortTR(tr), oldEntry, keyentry, ",".join(changedReasons)) - - self.__adjDataMsg.append(msg) - - return (g, key) - - #--------------------------------------------------------------------- - # erase all grids from the given weather element over the - # processTimePeriod procesTimePeriod = time range to remove grids - #--------------------------------------------------------------------- - def __eraseAllGrids(self, processTimePeriod): - self.__storeGrid(processTimePeriod, None) - self.__dbGrid = None - - -def convertList(unknownList): - retVal = unknownList - if hasattr(unknownList, 'java_name'): - retVal = JUtil.javaObjToPyVal(unknownList) - return retVal - -def main(siteID, userID, databaseID, parmsToProcess, blankOtherPeriods, - startTime, endTime, altMask, replaceOnly, eraseFirst, - announce, renameWE, iscSends, inFiles, ignoreMask, - adjustTranslate, deleteInput, parmsToIgnore, gridDelay, logFileName): - # convert Java types to python and send to IscMosaic for execution - parmsToProcess = convertList(parmsToProcess) - inFiles = convertList(inFiles) - parmsToIgnore = convertList(parmsToIgnore) - argv = {"siteID": siteID, - "userID": userID, - "databaseID": databaseID, - "parmsToProcess": parmsToProcess, - "blankOtherPeriods": bool(blankOtherPeriods), - "startTime": startTime, - "endTime": endTime, - "altMask": altMask, - "replaceOnly": bool(replaceOnly), - "eraseFirst": bool(eraseFirst), - "announce": announce, - "renameWE": bool(renameWE), - "iscSends": bool(iscSends), - "inFiles": inFiles, - "ignoreMask": bool(ignoreMask), - "adjustTranslate": bool(adjustTranslate), - "deleteInput": bool(deleteInput), - "parmsToIgnore": parmsToIgnore, - "gridDelay": float(gridDelay), - "logFileName": logFileName} - mosaic = IscMosaic(argv) - mosaic.execute() - mosaic = None +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +# ------------------------------------------------------------------------------ +# +# Port of iscMosaic.py +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- ------------------------------------------ +# Jul 06, 2009 1995 bphillip Initial Creation. +# Jan 17, 2013 15588 jdynina Fixed Publish history removal +# Mar 12, 2013 1759 dgilling Remove unnecessary command line +# processing. +# Apr 24, 2013 1941 dgilling Re-port WECache to match A1. +# May 08, 2013 1988 dgilling Fix history handling bug in__getDbGrid(). +# May 23, 2013 1759 dgilling Remove unnecessary imports. +# Jun 05, 2013 2063 dgilling Change __siteInDbGrid() to +# call IFPWE.history() like A1. +# Sep 05, 2013 2307 dgilling Fix breakage caused by #2044. +# Oct 31, 2013 2508 randerso Change to use DiscreteGridSlice.getKeys() +# Nov 05, 2013 2517 randerso Restructured logging so it could be used +# by WECache +# Changed WECache to limit the number of +# cached grids kept in memory +# Jan 09, 2014 16952 randerso Fix regression made in #2517 which caused +# errors with overlapping grids +# Feb 04, 2014 17042 ryu Check in changes for randerso. +# Apr 03, 2014 2737 randerso Allow iscMosaic to blankOtherPeriods even +# when no grids received +# Apr 11, 2014 17242 dgilling (code checked in by zhao) +# Jul 22, 2014 17484 randerso Update cluster lock time to prevent time +# out +# Aug 07, 2014 3517 randerso Improved memory utilization and error +# handling when unzipping input file. +# Aug 14, 2014 3526 randerso Fix bug in WECache that could incorrectly +# delete grids in the destination database +# Feb 17, 2015 4139 randerso Replaced call to iscTime.timeFromComponents +# with call to calendar.timegm +# Apr 23, 2015 4383 randerso Changed to log arguments to aid in +# troubleshooting +# Apr 23, 2015 4259 njensen Updated for new JEP API +# Apr 25, 2015 4952 njensen Updated for new JEP API +# Aug 06, 2015 4718 dgilling Optimize casting when using where with +# NumPy 1.9. +# Oct 05, 2015 4951 randerso Fixed siteInDbGrid to retrieve history from +# the cache so it sees changes that have not +# yet been written to the database +# Oct 05, 2015 4961 randerso Fix __calcBlankingTimes to handle persistent +# grids +# Sep 12, 2016 5861 randerso Remove references to IFPServerConfigManager +# which was largely redundant with IFPServer. +# Feb 22, 2017 6143 randerso Moved renewal of cluster lock into inner +# loop to avoid unexpected time out +# Oct 31, 2016 5979 njensen Cast to primitives for compatibility +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# Apr 05, 2017 6224 randerso Include dbID in cluster lock name +# Oct 05, 2015 4951 randerso Fixed siteInDbGrid to retrieve history +# from the cache so it sees changes that +# have not yet been written to the database +# Oct 05, 2015 4961 randerso Fix __calcBlankingTimes to handle +# persistent grids +# Feb 22, 2017 6143 randerso Moved renewal of cluster lock into inner +# loop to avoid unexpected time out +# Apr 05, 2017 6224 randerso Include dbID in cluster lock name +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +import os, stat, time, string, sys, calendar +import iscTime, iscUtil, mergeGrid +try: + # dev environment + from Scientific.IO import NetCDF +except: + # runtime we don't have the whole scientific package + import NetCDF +import numpy +import JUtil + +from java.lang import System +from java.util import ArrayList +from java.util import LinkedHashMap +from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DFloat +from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DByte +from com.raytheon.uf.common.time import TimeRange +from com.vividsolutions.jts.geom import Coordinate +from java.awt import Point + +from com.raytheon.edex.plugin.gfe.server import IFPServer +from com.raytheon.edex.plugin.gfe.smartinit import IFPDB +from com.raytheon.uf.common.dataplugin.gfe import GridDataHistory +from com.raytheon.uf.common.dataplugin.gfe import RemapGrid +OriginType = GridDataHistory.OriginType +from com.raytheon.uf.common.dataplugin.gfe.config import ProjectionData +ProjectionType = ProjectionData.ProjectionType +from com.raytheon.uf.common.dataplugin.gfe.db.objects import DatabaseID +from com.raytheon.uf.common.dataplugin.gfe.db.objects import GridLocation +from com.raytheon.uf.common.dataplugin.gfe.slice import DiscreteGridSlice +from com.raytheon.uf.common.dataplugin.gfe.slice import ScalarGridSlice +from com.raytheon.uf.common.dataplugin.gfe.slice import VectorGridSlice +from com.raytheon.uf.common.dataplugin.gfe.slice import WeatherGridSlice +from com.raytheon.uf.common.dataplugin.gfe.discrete import DiscreteKey +from com.raytheon.uf.common.dataplugin.gfe.weather import WeatherKey +from com.raytheon.uf.common.dataplugin.gfe.server.notify import UserMessageNotification +from com.raytheon.edex.plugin.gfe.util import SendNotifications +from com.raytheon.uf.common.status import UFStatus +Priority = UFStatus.Priority +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceID +CoordinateType = ReferenceData.CoordinateType +from com.raytheon.uf.edex.database.cluster import ClusterLockUtils +from com.raytheon.uf.edex.database.cluster import ClusterTask + +BATCH_DELAY = 0.0 + +MAX_CACHE_BYTES = 64 * 1024 * 1024 # 64 MB + +ISC_USER = "isc" + +logger = None + +## Logging methods ## +def initLogger(logName): + import logging + global logger + logger = iscUtil.getLogger("iscMosaic", logName=logName, logLevel=logging.DEBUG) + +def printTR(tr): + """ + Format time range for printing (yymmdd_hhmm,yymmdd_hhmm) + Works with list or tuple + + Args: + tr: the time range to format + + Returns: + the formatted time range string + """ + if tr is not None: + format = "%Y%m%d_%H%M" + s = '(' + time.strftime(format, time.gmtime(tr[0])) + ',' + \ + time.strftime(format, time.gmtime(tr[1])) + ')' + return s + else: + return "None" + +def printShortTR(tr): + """ + Format time range for printing (dd/hh,dd/hh) + Works with list or tuple + + Args: + tr: the time range to format + + Returns: + the formatted time range string + """ + if tr is not None: + format = "%d/%H" + s = '(' + time.strftime(format, time.gmtime(tr[0])) + '->' + \ + time.strftime(format, time.gmtime(tr[1])) + ')' + return s + else: + return "None" + +class WECache(object): + """ + Cache representing the grids for a weather element that overlap a time range. + The cache will keep a limited number of grids in memory. This limit is determined + at initialization to be the number of grids that will fit in MAX_CACHE_BYTES (or a minimum of 2). + + This is not a general purpose cache. It's behavior is designed to match the access patterns of iscMosaic + """ + def __init__(self, we, tr=None): + self._we = we + logger.debug("WECache creating: %s", str(self._we.getParmid())) + + gridType = self._we.getGridType() + if gridType == "SCALAR": + bytesPerCell = 4 + elif gridType == "VECTOR": + bytesPerCell = 8 + elif gridType == "WEATHER": + bytesPerCell = 1 + elif gridType == "DISCRETE": + bytesPerCell = 1 + + gloc = self._we.getGpi().getGridLoc() + gridBytes = int(gloc.getNx()) * int(gloc.getNy()) * bytesPerCell + + self._maxCacheSize = max(2, MAX_CACHE_BYTES / gridBytes) + self._batchSize = self._maxCacheSize / 2 + logger.debug("WECache max grids: %d, batch grids: %d", self._maxCacheSize, self._batchSize) + + self._inv = {} # Map containing keys for all inventory + self._invCache = None # Cache of inventory sorted by start time + self._dirty = set() # Cache written to but not flushed to disk + self._populated = set() # Grid is currently in the cache + self._loaded = set() # Grid has been loaded into cache at least once + + # get only keys that overlap tr + javaInv = self._we.getKeys(iscUtil.toJavaTimeRange(tr)) + pyInv = [] + for i in range(javaInv.size()): + pyInv.append(iscUtil.transformTime(javaInv.get(i))) + + # create unpopulated entries for the entire inventory + for invTr in pyInv: + self._inv[invTr] = None + + # populate first BATCH_READCOUNT grids + if len(pyInv) > self._batchSize: + trList = pyInv[:self._batchSize - 1] + # add on the last tr since it is used by __splitGridsOnProcBoundary + trList.append(pyInv[-1]) + + else: + trList = pyInv + + self.__loadGrids(trList) + + def __loadGrids(self, trList): + javaTRs = ArrayList(len(trList)) + for tr in trList: + javaTRs.add(iscUtil.toJavaTimeRange(tr)) + + gridsAndHist = self._we.get(javaTRs, True) + for idx in range(gridsAndHist.size()): + pair = gridsAndHist.get(idx) + tr = iscUtil.transformTime(pair.getFirst().getValidTime()) + + if tr in self._loaded: + logger.debug("WECache reloading: %s", printTR(tr)) + else: + logger.debug("WECache loading: %s", printTR(tr)) + + g = self.__encodeGridSlice(pair.getFirst()) + h = self.__encodeGridHistory(pair.getSecond()) + self._inv[tr] = (g, h) + self._populated.add(tr) + self._loaded.add(tr) + + def keys(self): + if not self._invCache: + self._invCache = tuple(sorted(list(self._inv.keys()), key=lambda t: t[0])) + return self._invCache + + def __getitem__(self, tr): + logger.debug("WECache getting: %s", printTR(tr)) + if tr in self._populated or tr in self._dirty: + return self._inv[tr] + + if tr in self._inv: + self.__handleCacheMiss(tr) + return self._inv[tr] + else: + return None + + def __handleCacheMiss(self, tr): + """ + This function is called when a time range is requested that is not currently in the cache. + It will load the next batch of unpopulated grids in time range order starting with the time range passed in. + + If the cache does not have room for a batch of grids to be loaded without exceeding the max cache size + the earliest dirty grids (or clean if not enough dirty grids are found) are flushed to disk before reading + the next batch. + + Args: + tr: the missing time range + """ + logger.debug("WECache miss: %s", printTR(tr)) + # determine next batch of keys to read + toRead = list(self.keys()) + toRead = toRead[toRead.index(tr):] + toRead = sorted(set(toRead) - self._populated, key=lambda t: t[0]) + toRead = toRead[:self._batchSize] + + # if not room to read another batch + if len(self._populated) + self._batchSize > self._maxCacheSize: + toFlush = [] + # first flush dirty grids + toFlush += self._populated & self._dirty + + # then flush clean grids if necessary + toFlush += self._populated - self._dirty + + # flush only enough to read a batch + toFlush = sorted(toFlush, key=lambda t: t[0]) + toFlush = toFlush[:self._maxCacheSize - self._batchSize] + self.__flushGrids(toFlush) + + self.__loadGrids(toRead) + + def __flushGrids(self, trList): + """ + Flush a list of time ranges from the cache. + Dirty time ranges will be written to disk. + Writes will be done in _batchSize groups + + Args: + trList: the list of time ranges to be flushed + """ + logger.debug("WECache flushing: %d grids", len(trList)) + + saveRequest = LinkedHashMap() + saveList = [] # python time ranges covered by this saveRequest + saveSize = 0 # number of grids in saveRequest + + # get full time range for flush + if (len(trList)): + sortedList = sorted(trList, key=lambda t: t[0]) + flushTR = (sortedList[0][0], sortedList[-1][1]) + else: + flushTR = (0, 2 ** 31 - 1) # all times + + timeSpan = None # time span if this contiguous batch + gridsToSave = ArrayList(self._batchSize) # grids in this contiguous batch + saveBatch = False + for tr in list(self.keys()): + if tr[1] <= flushTR[0]: + continue + if tr[0] >= flushTR[1]: + break + + dirty = tr in self._dirty + if dirty: + logger.debug("WECache storing: %s", printTR(tr)) + saveList.append(tr) + + pyGrid, pyHist = self._inv[tr] + if pyGrid is not None: + javaGrid = self.__buildJavaGridSlice(tr, pyGrid, pyHist) + gridsToSave.add(javaGrid) + if timeSpan is None: + timeSpan = [tr[0], tr[1]] + else: + timeSpan[1] = tr [1] + saveBatch = gridsToSave.size() >= self._batchSize + + else: # clean grid + # save contiguous dirty blocks + saveBatch = timeSpan is not None + + # if populated and clean just purge from cache + if tr in self._populated: + logger.debug("WECache purging: %s", printTR(tr)) + self._inv[tr] = None + self._populated.remove(tr) + else: + # skip any clean unpopulated grids + logger.debug("WECache skipping: %s", printTR(tr)) + + if saveBatch: + # add this contiguous batch to saveRequest + logger.debug("WECache saving %d grids in %s", gridsToSave.size(), printTR(timeSpan)) + gridSaveTR = iscUtil.toJavaTimeRange(timeSpan) + saveRequest.put(gridSaveTR, gridsToSave) + timeSpan = None + saveBatch = False + saveSize += gridsToSave.size() + gridsToSave = ArrayList(self._batchSize) + + # if this saveRequest has reached the batchSize save it + if saveSize >= self._batchSize: + try: + self._we.put(saveRequest) + except: + raise + else: # no exceptions on save, clear saved grids from cache + # depopulate save grids + for tr in saveList: + self._inv[tr] = None + self._populated.discard(tr) + self._dirty.remove(tr) + saveRequest.clear() + saveList = [] + saveSize = 0 + + # save partial batch if necessary + if len(saveList): + if timeSpan is not None: + logger.debug("WECache saving %d grids in %s", gridsToSave.size(), printTR(timeSpan)) + gridSaveTR = iscUtil.toJavaTimeRange(timeSpan) + saveRequest.put(gridSaveTR, gridsToSave) + + try: + self._we.put(saveRequest) + except: + raise + else: # no exceptions on save, clear saved grids from cache + # depopulate save grids + for tr in saveList: + self._inv[tr] = None + self._populated.discard(tr) + self._dirty.remove(tr) + + return + + def __setitem__(self, tr, value): + if value is None: + logger.debug("WECache erasing: %s", printTR(tr)) + grid = hist = None + else: + logger.debug("WECache setting: %s", printTR(tr)) + grid, hist = value + + # Remove any overlapping grids + tokill = [] + for itr in self._inv: + if self.overlaps(tr, itr): + tokill.append(itr) + for i in tokill: + del self._inv[i] + self._dirty.discard(i) + self._populated.discard(i) + self._loaded.discard(i) + self._invCache = None + + # if cache full flush some grids to disk + if len(self._populated) >= self._maxCacheSize: + toFlush = [] + # first flush dirty grids + toFlush += self._populated & self._dirty + + # then flush clean grids if necessary + toFlush += self._populated - self._dirty + + # flush a full batch is possible + toFlush = sorted(toFlush, key=lambda t: t[0]) + toFlush = toFlush[:self._batchSize] + self.__flushGrids(toFlush) + + # Now add the new grid + self._inv[tr] = (grid, hist) + self._dirty.add(tr) + self._loaded.add(tr) + self._invCache = None + if grid is not None: + self._populated.add(tr) + + def flush(self): + """Writes all dirty time ranges in the WECache to HDF5/DB""" + # flush entire inventory + self.__flushGrids(list(self.keys())) + + def overlaps(self, tr1, tr2): + if (tr1[0] >= tr2[0] and tr1[0] < tr2[1]) or \ + (tr2[0] >= tr1[0] and tr2[0] < tr1[1]): + return True + return False + + def __encodeGridSlice(self, grid): + gridType = self._we.getGridType() + if gridType == "SCALAR": + return grid.getNDArray() + elif gridType == "VECTOR": + vecGrids = grid.getNDArray() + return (vecGrids[0], vecGrids[1]) + elif gridType == "WEATHER" or gridType == "DISCRETE": + keys = grid.getKeys() + keyList = [] + for theKey in keys: + keyList.append(str(theKey)) + return (grid.getNDArray(), keyList) + + def __encodeGridHistory(self, histories): + retVal = [] + for i in range(histories.size()): + retVal.append(histories.get(i).getCodedString()) + return tuple(retVal) + + def __buildJavaGridSlice(self, tr, grid, history): + javaTR = iscUtil.toJavaTimeRange(tr) + javaHist = self.__buildJavaGridHistory(history) + + gridType = self._we.getGridType() + if gridType == "SCALAR": + return self._we.buildScalarSlice(javaTR, grid.astype(numpy.float32), javaHist) + elif gridType == "VECTOR": + return self._we.buildVectorSlice(javaTR, grid[0].astype(numpy.float32), grid[1].astype(numpy.float32), javaHist) + elif gridType == "WEATHER": + return self._we.buildWeatherSlice(javaTR, grid[0].astype(numpy.byte), str(grid[1]), javaHist) + elif gridType == "DISCRETE": + return self._we.buildDiscreteSlice(javaTR, grid[0].astype(numpy.byte), str(grid[1]), javaHist) + + def __buildJavaGridHistory(self, histories): + retVal = ArrayList() + blankPubTime = "Fcst" in str(self._we.getParmid().getDbId()) + for histEntry in histories: + javaHist = GridDataHistory(histEntry) + # strip out publish time to allow for publishing correctly + # when merging Fcst out of A1 + if blankPubTime: + javaHist.setPublishTime(None) + retVal.add(javaHist) + return retVal + + +class IscMosaic: + def __init__(self, args): + self.__mysite = args['siteID'] + ifpServer = IFPServer.getActiveServer(self.__mysite) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + + self.__config = ifpServer.getConfig() + + self.__userID = args['userID'] + self.__db = None # ifpServer database object + self.__dbGrid = None + self.__parmsToProcess = args['parmsToProcess'] + self.__blankOtherPeriods = args['blankOtherPeriods'] + self.__altMask = args['altMask'] + self.__replaceOnly = args['replaceOnly'] + self.__eraseFirst = args['eraseFirst'] + self.__announce = args['announce'] + self.__renameWE = args['renameWE'] + self.__iscSends = args['iscSends'] + if args['databaseID'] is not None: + self.__databaseID = args['databaseID'] + else: + self.__databaseID = self.__mysite + "_GRID__ISC_00000000_0000" + self.__inFiles = args['inFiles'] + self.__ignoreMask = args['ignoreMask'] + self.__adjustTranslate = args['adjustTranslate'] + self.__deleteInput = args['deleteInput'] + self.__parmsToIgnore = args['parmsToIgnore'] + self.__gridDelay = args['gridDelay'] + self.__logFile = args['logFileName'] + + startTime = 0 + if args['startTime'] is not None: + startTime = self.__decodeTimeString(args['startTime']) + endTime = int(2 ** 30 - 1 + 2 ** 30) + if args['endTime'] is not None: + endTime = self.__decodeTimeString(args['endTime']) + self.__processTimePeriod = (startTime, endTime) + + initLogger(self.__logFile) + logger.info("iscMosaic Starting args: %s", str(args)) + + def execute(self): + + # get the WxDefinition and DiscreteDefinition + self.__wxDef = self.__config.getWxDefinition() + self.__disDef = self.__config.getDiscreteDefinition() + + self.__db = IFPDB(self.__databaseID) + + # parms in database + if len(self.__parmsToProcess) == 0: + parmsInDb = self.__db.getKeys() + for i in range(0, parmsInDb.size()): + self.__parmsToProcess.append(str(parmsInDb.get(i))) + + # get office type information + self.__myOfficeType = self.__config.officeType() + + #process each input file + for file in self.__inFiles: + self.__areaMask = None + self.__processInputFile(file) + + if self.__deleteInput: + os.remove(file) + + logger.info("iscMosaic Finished") + + def __processInputFile(self, filename): + + a = os.times() + cpu0 = a[0] + a[1] + start = a[4] + + logger.info("Processing file=%s", filename) + fsize = os.stat(filename)[stat.ST_SIZE] + logger.info("Input file size: %d", fsize) + + gzipFile = None + unzippedFile = None + gzipped = True + try: + import gzip + gzipFile = gzip.open(filename, 'rb') + unzippedFile = open(filename + ".unzipped", 'w') + while True: + buffer = gzipFile.read(65536) + if len(buffer) == 0: + break + unzippedFile.write(buffer) + except IOError as e: + if e.message == "Not a gzipped file": + gzipped = False + else: + raise + else: + # no errors, close and rename the file + unzippedFile.close() + gzipFile.close() + os.rename(unzippedFile.name, gzipFile.filename) + gzipFile = unzippedFile = None + finally: + # close the files in case of error + if gzipFile is not None: + gzipFile.close() + if unzippedFile is not None: + unzippedFile.close() + if not gzipped: + os.remove(unzippedFile.name) + + a = os.times() + cpugz = a[0] + a[1] + stopgz = a[4] + + file = NetCDF.NetCDFFile(filename, "r") + + # check version + fileV = getattr(file, 'fileFormatVersion') + if fileV != "20010816" and fileV != "20030117": + logger.error("Incompatible file format found") + raise Exception("Incompatible file format") + + # get creation time + self.__creTime = getattr(file, 'creationTime') + creTimeString = time.asctime(time.gmtime(self.__creTime)) + logger.info("CreationTime: %s" , creTimeString) + + # get file start/end processing times + self.__modProcTime = self.__getFileProcessingTimes(file) + if self.__modProcTime is None: + return None + logger.info("Process TR: %s", printTR(self.__modProcTime)) + + # prepare for the notification message + totalTimeRange = None + totalGrids = 0 + totalFails = 0 + pParms = [] + self.__adjDataMsg = [] + + # process each parm in the netCDF file + # Only use one area mask for all parms. This will break + # if we ever use parms with differing dims in a database. + areaMask = None + inFileVars = list(file.variables.keys()) #parm names + + for parm in inFileVars: + tup = self.__getNetCDFInputVariables(file, parm) + + if tup is None: + continue + else: + parmName = tup[0] + vars = tup[1] + remapHistory = tup[2] + + # rename weather element + if self.__renameWE: + siteID = str(getattr(vars[0], "siteID")) + incomingOfficeType = self.__config.getOfficeType(siteID) + if incomingOfficeType != self.__myOfficeType: + idx = parmName.rfind("_") + parmName = parmName[0:idx] + incomingOfficeType + \ + parmName[idx:] + logger.info("Renamed to: %s data from %s", parmName, siteID) + + # ignore this parm? + if parmName in self.__parmsToIgnore: + logger.info("Ignoring %s", parmName) + continue + + # match in ifp database? + if not parmName in self.__parmsToProcess and \ + len(self.__parmsToProcess) != 0: + logger.info("Skipping %s", parmName) + continue + + (pName, pTR, nGrids, nFail) = self.__processParm(parmName, vars, remapHistory, filename) + + # save info for the notification message + pParms.append(pName) + if pTR is not None: + if totalTimeRange is None: + totalTimeRange = pTR + else: + if totalTimeRange[0] > pTR[0]: + totalTimeRange = (pTR[0], totalTimeRange[1]) + if totalTimeRange[1] < pTR[1]: + totalTimeRange = (totalTimeRange[0], pTR[1]) + totalGrids = totalGrids + nGrids + totalFails = totalFails + nFail + + file.close() + + #announce storage + if len(self.__announce) and totalGrids > 0: + msg = self.__announce + self.__siteID + ' ' + repr(pParms) + ' ' + printShortTR(totalTimeRange) + ' #Grids=' + repr(totalGrids) + if totalFails: + msg = msg + '[' + repr(totalFails) + ' FAILED]' + notification = UserMessageNotification(msg, Priority.CRITICAL, "ISC", self.__mysite) + else: + notification = UserMessageNotification(msg, Priority.EVENTA, "ISC", self.__mysite) + logger.info(msg) + SendNotifications.send(notification) + + # announce "modified/adjusted" data + if len(self.__announce) and len(self.__adjDataMsg): + msg = "ISC Data Modified:\n" + "\n".join(self.__adjDataMsg) + notification = UserMessageNotification(msg, Priority.EVENTA, "ISC", self.__mysite) + SendNotifications.send(notification) + + a = os.times() + cpu = a[0] + a[1] + stop = a[4] + logger.info("Elapsed/CPU time: " + "%-.2f / %-.2f decompress, " + "%-.2f / %-.2f processing, " + "%-.2f / %-.2f total", + stopgz - start, cpugz - cpu0, + stop - stopgz, cpu - cpugz, + stop - start, cpu - cpu0) + + + def __processParm(self, parmName, vars, history, filename): + + retries = 5 + retryAttempt = 0 + pName = string.replace(parmName, "_SFC", "") + totalTimeRange = None + inTimesProc = [] + numFailed = 0 + + self.__siteID = str(getattr(vars[0], "siteID")) + inTimes = self.__getIncomingValidTimes(vars[0]) + logger.info("Processing %s #Grids=%d Site=%s", parmName, len(inTimes), self.__siteID) + + if self.__blankOtherPeriods or self.__eraseFirst or len(inTimes) > 0: + while retryAttempt != retries: + lockName = parmName + ":" + self.__databaseID + logger.debug("iscMosaic: Attempting to acquire cluster lock for: %s", lockName) + startTime = time.time() + clusterLock = ClusterLockUtils.lock("ISC Write Lock", lockName , 120000, True) + elapsedTime = (time.time() - startTime) * 1000 + logger.debug("iscMosaic: Request for %s took %d ms", lockName, elapsedTime) + if str(clusterLock.getLockState()) == "SUCCESSFUL": + logger.debug("iscMosaic: Successfully acquired cluster lock for: %s", lockName) + try: + # open up the ifpServer weather element + self.__dbwe = self.__db.getItem(parmName, ISC_USER) + self._wec = WECache(self.__dbwe, tr=self.__modProcTime) + self.__rateParm = self.__dbwe.getGpi().isRateParm() + self.__parmName = parmName + + # get general info for the parm from the input file and output db + inGeoDict = self.__getInputGeoInfo(vars[0]) + inFillV = self.__determineFillValue(vars[0]) + + gridType = getattr(vars[0], "gridType") + minV = self.__dbwe.getGpi().getMinValue() + # compute the site mask + + if self.__areaMask is None: + self.__areaMask = self.__computeAreaMask().getGrid().getNDArray().astype(numpy.bool) + + # create the mergeGrid class + mGrid = mergeGrid.MergeGrid(self.__creTime, self.__siteID, inFillV, + minV, self.__areaMask, gridType, self.__dbwe.getDiscreteKeys()) + + # erase all existing grids first? + self.__dbinv = list(self._wec.keys()) + try: + self.__splitGridsOnProcBoundary(self.__modProcTime) + except: + logger.exception('Failure to splitGridsOnProcBoundary Parm=%s Time=%s', + parmName, printTR(self.__modProcTime)) + + if self.__eraseFirst: + self.__eraseAllGrids(self.__modProcTime) + + # process each incoming grid + inTimesProc = [] + numFailed = 0 + + # process incoming grids + for i in range(len(inTimes)): + # Put in a delay so we don't hammer the server so hard. + if self.__gridDelay > 0.0: + time.sleep(self.__gridDelay) + tr = iscTime.intersection(inTimes[i], self.__modProcTime) + if tr is not None: + inTimesProc.append(tr) + try: + logger.debug("Processing Grid: %s TR=%s", parmName, printTR(tr)) + + # get the grid and remap it + grid = self.__getGridFromNetCDF(gridType, vars, i) + + # if WEATHER or DISCRETE, then validate and adjust keys + if self.__adjustTranslate: + if gridType == "DISCRETE": + grid = self.__validateAdjustDiscreteKeys(grid, + self.__parmName, tr) + elif gridType == "WEATHER": + grid = self.__validateAdjustWeatherKeys(grid, + self.__parmName, tr) + + grid = self.__remap(self.__dbwe, grid, inGeoDict, inFillV) + + # if rate parm, then may need to adjust the values + if self.__rateParm and inTimes[i] != tr: + grid = self.__adjustForTime(inTimes[i], tr, grid, + inFillV) + + # merge the grids + self.__processIncomingGrid(lockName, grid, history[i], + mGrid, tr, inFillV) + + except: + logger.exception('Failure to process grid in file [%s] Parm=%s Time=%s', + filename, parmName, printTR(tr)) + numFailed = numFailed + 1 + + else: + logger.debug("Skipping Grid: %s TR=%s outside start/end range", + parmName, printTR(tr)) + + + # blank out any gaps + if self.__blankOtherPeriods == 1: + blankTimes = self.__calcBlankingTimes(inTimesProc) + # get updated inventory + + for i in range(len(blankTimes)): + tr = iscTime.intersection(blankTimes[i], self.__modProcTime) + if tr is not None: + try: + logger.debug("Processing Blank: %s TR=%s", + parmName, printTR(tr)) + self.__processBlankTime(mGrid, tr) + except: + logger.exception('Failure to process grid blanking Parm=%s Time=%s', + parmName, printTR(tr)) + + + + # Returns tuple of (parmName, TR, #grids, #fails) + if len(inTimesProc): + totalTimeRange = (inTimesProc[0][0], inTimesProc[ -1][ -1] - 3600) + self._wec.flush() + + retryAttempt = retries + except: + retryAttempt = retryAttempt + 1 + logger.exception("Error saving ISC data. Retrying ( %d / %d )", retryAttempt, retries) + time.sleep(1) + finally: + logger.debug("iscMosaic: Attempting to release cluster lock for: %s", lockName) + ClusterLockUtils.unlock(clusterLock, False) + logger.debug("iscMosaic: Successfully released cluster lock for: %s", lockName) + elif str(clusterLock.getLockState()) == "OLD": + retryAttempt = retryAttempt + 1 + # Clear old lock to retry + logger.debug("Old lock retrieved for ISC write. Attempting to renew lock") + ClusterLockUtils.unlock(clusterLock, False) + elif str(clusterLock.getLockState()) == "FAILED": + retryAttempt = retryAttempt + 1 + if retryAttempt == retries: + logger.error("Cluster lock could not be established for %s at time range %s Data was not saved.", + self._we.getParmid(), TimeRange(tr[0], tr[1])) + else: + logger.error("Cluster lock request failed for ISC write. Retrying ( %d / %d )", + retryAttempt, retries) + time.sleep(1) + + return (pName, totalTimeRange, len(inTimesProc), numFailed) + + def __processIncomingGrid(self, lockName, remappedGrid, remappedHistory, mGrid, tr, inFillV): + # calculate merge + merge = iscTime.mergeTR(tr, self.__dbinv) + + # get the associated db grids, merge, and store + for m in merge: + # update cluster lock time to avoid time out + ClusterLockUtils.updateLockTime("ISC Write Lock", lockName , System.currentTimeMillis()) + + logger.debug("Merge: %s %s %s", printTR(m[0]), + printTR(m[1]), m[2]) + gotGrid = self.__getDbGrid(m[0]) + + if gotGrid is not None: + destGrid = gotGrid[0] + oldHist = gotGrid[1] + else: + destGrid = None + oldHist = None + + # non-rate parms -- keep the data values the same + if not self.__rateParm: + + # merge the grids, but only if the overlaps flag is set, + # we use the minimum value for the fill value since we don't + # support sparse populated grids + if m[2] == 1 or (m[2] == 0 and m[0] == None): + if self.__replaceOnly: + mergedGrid = mGrid.mergeGrid( + (remappedGrid, remappedHistory), None) + else: + mergedGrid = mGrid.mergeGrid ( + (remappedGrid, remappedHistory), (destGrid, oldHist)) + + else: + mergedGrid = (destGrid, oldHist) + + # rate parms -- adjust data values based on times + else: + # merge the grids, but only if the overlaps flag is set, + # we use the minimum value for the fill value since we don't + # support sparse populated grids + if m[2] == 1 or (m[2] == 0 and m[0] == None): + if self.__replaceOnly: + adjGrid = self.__adjustForTime(tr, m[1], remappedGrid, + inFillV) + mergedGrid = mGrid.mergeGrid( + (adjGrid, remappedHistory), None) + else: + adjGridIn = self.__adjustForTime(tr, m[1], + remappedGrid, inFillV) + adjGridDb = self.__adjustForTime(m[0], m[1], destGrid, + 0.0) + mergedGrid = mGrid.mergeGrid(\ + (adjGridIn, remappedHistory), + (adjGridDb, oldHist)) + + else: + adjGrid = self.__adjustForTime(m[0], m[1], destGrid, 0.0) + mergedGrid = (adjGrid, oldHist) + + # store merged grid + self.__storeGrid(m[1], mergedGrid) + + def __storeGrid(self, tr, grid): + if grid is not None and grid[1] is not None and grid[0] is not None: + logger.debug("Store: %s", printTR(tr)) + self._wec[tr] = grid + + if tr not in self.__dbinv: + self.__dbinv = list(self._wec.keys()) + else: + logger.debug("Erase: %s", printTR(tr)) + self._wec[tr] = None + self.__dbinv = list(self._wec.keys()) + + #--------------------------------------------------------------------- + # get db grid + # Gets the needed database grid + # tr = desired grid, identified by time range + # Returns tuple of (grid, history) (or None if unknown) + #--------------------------------------------------------------------- + def __getDbGrid(self, tr): + if tr is None: + return None + + if self.__dbGrid is None or tr != self.__dbGrid[2]: + self.__dbGrid = None + grid = self._wec[tr] + if grid is not None: + destGrid, history = grid + self.__dbGrid = (destGrid, history, tr) + else: + logger.error("Unable to access grid for %s for %s", printTR(tr), self.__parmName) + return None + + return (self.__dbGrid[0], self.__dbGrid[1]) + + #--------------------------------------------------------------------- + # calculate file start/end processing times + # Returns (startTime, endTime) or None for processing + # file= netcdf file descriptor object + #--------------------------------------------------------------------- + def __getFileProcessingTimes(self, file): + + # try: + startTime = self.__decodeTimeString(getattr(file, 'startProcTime')) + endTime = self.__decodeTimeString(getattr(file, 'endProcTime')) + modProcTime = iscTime.intersection((startTime, endTime), + self.__processTimePeriod) + + if modProcTime is None: + logger.error("Skipping file due to non overlapping periods") + return modProcTime + + def __decodeTimeString(self, timeStr): + "Create an Integer time from a string: YYYYMMDD_HHMM" + + importError = True + while importError: + try: + timeTuple = time.strptime(timeStr, "%Y%m%d_%H%M") + importError = False + except ImportError: + importError = True + except: + logger.exception("%s is not a valid time string. Use YYYYMMDD_HHMM", timeStr) + raise Exception("Bad date format YYYYMMDD_HHMM") + + return calendar.timegm(timeTuple) + + #--------------------------------------------------------------------- + # get netcdf input variables + # Gets the input variables from the netCDF file based on the parm name. + # The netCDF file is opened on file. + # Returns them as three tuples: (dbParmName, vars, history[]) + # The vars is an array depending upon the data type: + # scalar [0], vector [0=mag,1=dir], wx [0=grid,1=key]. + # The history is a list of history strings. + #--------------------------------------------------------------------- + def __getNetCDFInputVariables(self, file, parmName): + + var = file.variables[parmName] + + # make sure it is a weather element variable + if not hasattr(var, "validTimes"): + return None + + gridType = getattr(var, "gridType") + + # get the history info + if gridType == 'SCALAR': + pn = parmName + "_GridHistory" + elif gridType == 'VECTOR': + indx = string.find(parmName, "_Mag_") + if indx == -1: + return None + pn = parmName[0:indx + 1] + parmName[indx + 5:] + "_GridHistory" + elif gridType == 'WEATHER': + pn = parmName + "_GridHistory" + elif gridType == 'DISCRETE': + pn = parmName + "_GridHistory" + hvar = file.variables[pn] + history = [] + for i in range(0, hvar.shape[0]): + h = string.strip(hvar[i].tostring()) + history.append(string.split(h, '^')) + + # handle special cases for Vector and Wx, need to use a second + # variable for wind and weather + gridType = getattr(var, "gridType") + + if gridType == 'SCALAR': + return (parmName, [var], history) + + elif gridType == 'VECTOR': + indx = string.find(parmName, "_Mag_") + if indx != -1: + dirparm = parmName[0:indx] + "_Dir_" + parmName[indx + 5:] + varDir = file.variables[dirparm] + dbparmName = parmName[0:indx] + parmName[indx + 4:] + return (dbparmName, [var, varDir], history) + else: + return None + + elif gridType == 'WEATHER': + varKey = file.variables[parmName + "_wxKeys"] + return (parmName, [var, varKey], history) + + elif gridType == 'DISCRETE': + varKey = file.variables[parmName + "_keys"] + return (parmName, [var, varKey], history) + + else: + return None + + #------------------------------------------------------------------------- + # Get Geographical Input Information + # var is the netCDF variable + #------------------------------------------------------------------------- + def __getInputGeoInfo(self, var): + + # define minimum standard + inProjData = { + 'latIntersect': 0.0, + 'latLonOrigin': (0.0, 0.0), + 'stdParallelTwo': 0.0, + 'stdParallelOne': 0.0, + 'lonCenter': 0.0, + 'lonOrigin': 0.0, + 'latIntersect': 0.0, + 'projectionID': 'hi' + } + + # all projections have this information + data = getattr(var, "latLonLL") + inProjData['latLonLL'] = (float(data[0]), float(data[1])) + data = getattr(var, "latLonUR") + inProjData['latLonUR'] = (float(data[0]), float(data[1])) + inProjData['projectionType'] = str(getattr(var, "projectionType")) + data = getattr(var, "gridPointLL") + inProjData['gridPointLL'] = (int(data[0]), int(data[1])) + data = getattr(var, "gridPointUR") + inProjData['gridPointUR'] = (int(data[0]), int(data[1])) + + # lambert conformal specific information + if inProjData['projectionType'] == 'LAMBERT_CONFORMAL': + data = getattr(var, "latLonOrigin") + inProjData['latLonOrigin'] = (float(data[0]), float(data[1])) + data = getattr(var, "stdParallelOne") + inProjData['stdParallelOne'] = float(data) + data = getattr(var, "stdParallelTwo") + inProjData['stdParallelTwo'] = float(data) + + # polar stereographic + elif inProjData['projectionType'] == 'POLAR_STEREOGRAPHIC': + data = getattr(var, "lonOrigin") + inProjData['lonOrigin'] = float(data) + + # mercator + elif inProjData['projectionType'] == 'MERCATOR': + data = getattr(var, "lonCenter") + inProjData['lonCenter'] = float(data) + + # get specific grid sizes and domains + data = getattr(var, "gridSize") + inProjData['gridSize'] = (int(data[0]), int(data[1])) + origin = getattr(var, "domainOrigin") + extent = getattr(var, "domainExtent") + inProjData['gridDomain'] = \ + ((float(origin[0]), float(origin[1])), (float(extent[0]), float(extent[1]))) + + return inProjData + + #--------------------------------------------------------------------- + # determine fill value for input + # vars = netCDF variables + # Returns fill value to use + # Note: file fill value may be different from processing fill value + # since data may have to be multiplied and offset. + #--------------------------------------------------------------------- + def __determineFillValue(self, var): + gridType = getattr(var, "gridType") + if gridType == 'SCALAR' or gridType == 'VECTOR': + return -30000.0 + else: + return -127 + + #--------------------------------------------------------------------- + # compute the area mask + # Returns areaMask to use based on the siteID. for ISC data, + # the edit area is normally ISC_xxx where xxx is the WFO. + #--------------------------------------------------------------------- + def __computeAreaMask(self): + if self.__ignoreMask: + domain = self.__dbwe.getGpi().getGridLoc() + #maskDims = (int(domain.getNy()), int(domain.getNx())) + #areaMask = numpy.ones(maskDims) + #areaMask.setGloc(domain) + + + areaMask = ReferenceData(domain, ReferenceID("full"), None, CoordinateType.GRID) + areaMask.getGrid() + areaMask.invert() + + elif self.__altMask is not None: + try: + areaMask = iscUtil.getEditArea(self.__altMask, self.__mysite) + areaMask.setGloc(self.__dbwe.getGpi().getGridLoc()) + except: + logger.exception("Unable to access edit mask [%s]", + self.__altMask) + raise Exception("Unknown edit area mask [%s]" % self.__altMask) + else: + maskName = "ISC_" + self.__siteID + try: + areaMask = iscUtil.getEditArea(maskName, self.__mysite) + areaMask.setGloc(self.__dbwe.getGpi().getGridLoc()) + except: + logger.exception("Unable to access edit mask [%s]", maskName) + raise Exception("Unknown edit area mask [%s]" % maskName) + + return areaMask + + #--------------------------------------------------------------------- + # split grids on processing time, so to preserve all grids that + # overlap partially the processing time + # processTimePeriod = time range to process grids for splits + #--------------------------------------------------------------------- + def __splitGridsOnProcBoundary(self, processTimePeriod): + dbinv = self.__dbinv + mergeInfo = iscTime.mergeTR(processTimePeriod, dbinv) + oldGrid = None + oldTR = None + for m in mergeInfo: + if m[0] != m[1]: #split grid needed + if m[0] != oldTR: + oldGrid = self.__getDbGrid(m[0]) + oldTR = m[0] + if oldGrid is not None: + if self.__rateParm: + adjGrid = self.__adjustForTime(m[0], m[1], oldGrid[0], + 0.0) #old db grids don't have missing value flags + self.__storeGrid(m[1], (adjGrid, oldGrid[1])) + else: + self.__storeGrid(m[1], oldGrid) + self.__dbGrid = None + + #------------------------------------------------------------------------- + # Get Incoming netCDF file grid valid times + # netCDFfile, var is the netCDF variable + #------------------------------------------------------------------------- + def __getIncomingValidTimes(self, var): + inTimesA = getattr(var, "validTimes") + ntimes = len(inTimesA) / 2 + times = [] + for t in range(ntimes): + times.append((inTimesA[t * 2], inTimesA[t * 2 + 1])) + return times + + #--------------------------------------------------------------------- + # get grid from netCDF file. + # gridType = type of grid: scalar, vector, weather + # vars = netCDF variables + # index = grid index + # Returns grid as: + # scalar = grid + # vector = (magGrid, dirGrid) + # weather = (grid, key) + # Note: the values in the grid may need to be changed if their is + # a dataMultiplier or dataOffset attributes present. This will + # also change the fill Value. + #--------------------------------------------------------------------- + def __getGridFromNetCDF(self, gridType, vars, index): + if gridType == 'SCALAR': + grid = numpy.flipud(vars[0][index]) + return self.__scaleGrid(vars[0], grid) + + elif gridType == 'VECTOR': + magGrid = numpy.flipud(vars[0][index]) + dirGrid = numpy.flipud(vars[1][index]) + return (self.__scaleGrid(vars[0], magGrid), + self.__scaleGrid(vars[1], dirGrid)) + + elif gridType == 'WEATHER': + compKey = self.__compressKey(vars[1][index, :, :]) + grid = (numpy.flipud(vars[0][index]), compKey) + + elif gridType == 'DISCRETE': + compKey = self.__compressKey(vars[1][index, :, :]) + grid = (numpy.flipud(vars[0][index]), compKey) + + return grid + + #--------------------------------------------------------------------- + # scaling changes for incoming grids + # var = netCDF variable + # grid = input grid + # only should be called for SCALAR/VECTOR + #--------------------------------------------------------------------- + def __scaleGrid(self, var, grid): + #scaling changes + inFillV = getattr(var, "fillValue") + + # any scaling needed? + try: + multiplier = getattr(var, "dataMultiplier") + offset = getattr(var, "dataOffset") + except: + multiplier = None + offset = None + + outFillV = self.__determineFillValue(var) + if outFillV == inFillV and multiplier is None: + return grid # no changes needed + outFillV = numpy.float32(outFillV) + + # get mask of valid points + goodDataMask = numpy.not_equal(grid, inFillV) + + # apply the scaling, make a float32 grid + if multiplier is not None: + tempGrid = (grid.astype(numpy.float32) * multiplier) + offset + grid = numpy.where(goodDataMask, tempGrid, outFillV) + # no scaling needed, but the fill value needs changing + else: + grid = numpy.where(goodDataMask, grid, outFillV) + + return grid.astype(numpy.float32) + + def __remap(self, we, grid, inGeoDict, inFillV): + gpi = we.getGpi() + + gridType = str(gpi.getGridType()) + + gs = self.__decodeGridSlice(we, grid, TimeRange()) + + pd = self.__decodeProj(inGeoDict) + fill = inFillV + ifill = int(inFillV) + + origin = Coordinate(float(str(inGeoDict['gridDomain'][0][0])), float(str(inGeoDict['gridDomain'][0][1]))) + extent = Coordinate(float(str(inGeoDict['gridDomain'][1][0])), float(str(inGeoDict['gridDomain'][1][1]))) + + gl = GridLocation("iscMosaic", pd, self.__getSize(gs), origin, extent, "GMT") + mapper = RemapGrid(gl, gpi.getGridLoc()) + + if gridType == 'SCALAR': + newGrid = mapper.remap(gs.getScalarGrid(), fill, gpi.getMaxValue(), gpi.getMinValue(), fill) + return newGrid.getNDArray() + + elif gridType == 'VECTOR': + magGrid = Grid2DFloat(int(gs.getGridParmInfo().getGridLoc().getNx()), int(gs.getGridParmInfo().getGridLoc().getNy())) + dirGrid = Grid2DFloat(int(gs.getGridParmInfo().getGridLoc().getNx()), int(gs.getGridParmInfo().getGridLoc().getNy())) + mapper.remap(gs.getMagGrid(), gs.getDirGrid(), fill, gpi.getMaxValue(), gpi.getMinValue(), fill, magGrid, dirGrid) + return (magGrid.getNDArray(), dirGrid.getNDArray()) + + elif gridType == 'WEATHER': + newGrid = mapper.remap(gs.getWeatherGrid(), fill, fill) + return (newGrid.getNDArray(), grid[1]) + + elif gridType == 'DISCRETE': + newGrid = mapper.remap(gs.getDiscreteGrid(), fill, fill) + return (newGrid.getNDArray(), grid[1]) + + def __decodeGridSlice(self, we, value, tr, history=None): + pid = we.getParmid() + gpi = we.getGpi() + + gridType = str(gpi.getGridType()) + + hist = ArrayList() + + if history is None: + hist.add(GridDataHistory(OriginType.INITIALIZED, pid, tr)) + else: + #FIXME + for i in range(0, len(history)): + hist.add(history[i]) + + if gridType == 'SCALAR': + data = Grid2DFloat.createGrid(value.shape[1], value.shape[0], value) + slice = ScalarGridSlice(tr, gpi, hist, data) + elif gridType == 'VECTOR': + + magVal = value[0] + dirVal = value[1] + + magGrid = Grid2DFloat.createGrid(magVal.shape[1], magVal.shape[0], magVal) + dirGrid = Grid2DFloat.createGrid(dirVal.shape[1], dirVal.shape[0], dirVal) + slice = VectorGridSlice(tr, gpi, hist, magGrid, dirGrid) + elif gridType == 'WEATHER': + data = Grid2DByte.createGrid(value[0].shape[1], value[0].shape[0], value[0]) + keyList = ArrayList() + for key in value[1]: + keyList.add(WeatherKey()) + slice = WeatherGridSlice() + slice.setValidTime(tr) + slice.setGridParmInfo(gpi) + slice.setGridDataHistory(hist) + slice.setWeatherGrid(data) + slice.setKey(keyList) + elif gridType == 'DISCRETE': + data = Grid2DByte.createGrid(value[0].shape[1], value[0].shape[0], value[0]) + keyList = ArrayList() + for key in value[1]: + keyList.add(DiscreteKey()) + slice = DiscreteGridSlice() + slice.setValidTime(tr) + slice.setGridParmInfo(gpi) + slice.setGridDataHistory(hist) + slice.setDiscreteGrid(data) + slice.setKey(keyList) + return slice + + def __decodeProj(self, pdata): + + pid = "GrandUnifiedRemappingProj" + type = ProjectionType.valueOf(pdata["projectionType"]) + llLL = Coordinate(pdata["latLonLL"][0], pdata["latLonLL"][1]) + llUR = Coordinate(pdata["latLonUR"][0], pdata["latLonUR"][1]) + llo = Coordinate(pdata["latLonOrigin"][0], pdata["latLonOrigin"][1]) + sp1 = pdata["stdParallelOne"] + sp2 = pdata["stdParallelTwo"] + gpll = Point(pdata["gridPointLL"][0], pdata["gridPointLL"][1]) + gpur = Point(pdata["gridPointUR"][0], pdata["gridPointUR"][1]) + lati = pdata["latIntersect"] + lonc = pdata["lonCenter"] + lono = pdata["lonOrigin"] + + return ProjectionData(pid, type, llLL, llUR, llo, sp1, sp2, gpll, gpur, lati, lonc, lono) + + def __getSize(self, gs): + gridType = str(gs.getGridParmInfo().getGridType()) + + if gridType == "SCALAR" or gridType == "VECTOR": + return Point(gs.getScalarGrid().getXdim(), gs.getScalarGrid().getYdim()) + elif gridType == "WEATHER": + return Point(gs.getWeatherGrid().getXdim(), gs.getWeatherGrid().getYdim()) + elif gridType == "DISCRETE": + return Point(gs.getDiscreteGrid().getXdim(), gs.getDiscreteGrid().getYdim()) + else: + return None + + #--------------------------------------------------------------------- + # compress key (weather or discrete) + # eliminates the "blank" keys that may exist in the input key + # from the netCDF file. + #--------------------------------------------------------------------- + def __compressKey(self, keys): + outKeys = [] + shape = keys.shape + for k in range(shape[0]): + s = "" + for i in range(shape[1]): + c = str(keys[k][i]) + if c != '\0': + s = s + c + s = string.strip(s) + if len(s) > 0: + outKeys.append(s) + return outKeys + + #--------------------------------------------------------------------- + # adjust for time + # Adjusts a rate dependent grid based on time durations. No processing + # occurs if the grid, or the times are not valid. + # Returns the new grid. + # trOrg = original grid time range + # trNew = new grid time range + # grid = old grid (NOT HISTORY) + # fillValue = grid fill value + # where the grid is a scalar, vector pair, or weather grid/key pair) + # Returns new grid, adjusted by duration changes. + #--------------------------------------------------------------------- + def __adjustForTime(self, trOrg, trNew, grid, fillValue): + if not self.__rateParm or grid is None: + return grid + newDuration = float(trNew[1] - trNew[0]) + orgDuration = float(trOrg[1] - trOrg[0]) + durationRatio = newDuration / orgDuration + + dataType = str(self.__dbwe.getGpi().getGridType()) + if dataType == 'SCALAR': + fillMask = numpy.equal(grid, fillValue) + return numpy.where(fillMask, grid, grid * durationRatio) + elif dataType == 'VECTOR': + fillMask = numpy.equal(grid[0], fillValue) + newMagGrid = numpy.where(fillMask, grid[0], (grid[0] * durationRatio)) + return (grid[0], grid[1]) + else: + return grid + + def __calcBlankingTimes(self, inTimes): + out = [] + if len(inTimes) == 0: + out.append((0, 2 ** 30 - 1 + 2 ** 30)) + else: + for t in range(len(inTimes)): + if t == 0 and inTimes[t][0] != 0: + out.append((0, inTimes[t][0])) + elif t != 0 : + tr = (inTimes[t - 1][1], inTimes[t][0]) + if tr[0] != tr[1]: + out.append(tr) + + endIndex = len(inTimes) - 1 + tr = (inTimes[endIndex][1], 2 ** 30 - 1 + 2 ** 30) + if tr[0] != tr[1]: + out.append(tr) + + # now limit to the modProcTime + outLimit = [] + for t in out: + inter = iscTime.intersection(t, self.__modProcTime) + if inter is not None: + outLimit.append(inter) + + return outLimit + + def __processBlankTime(self, mGrid, tr): + + # calculate the merge + merge = iscTime.mergeTR(tr, self.__dbinv) + + for m in merge: + # blank the grids, but only if the overlaps flag is clear, + if m[0] != None and m[2] == 1: + if self.__siteInDbGrid(m[0]): + try: + (destGrid, oldHist) = self.__getDbGrid(m[0]) + except: + destGrid = None + oldHist = None + + if self.__rateParm: + adjGrid = self.__adjustForTime(m[0], m[1], destGrid, + 0.0) #old db grids don't have missing data flags + mergedGrid = mGrid.mergeGrid(None, \ + (adjGrid, oldHist)) + else: + mergedGrid = mGrid.mergeGrid(None, (destGrid, oldHist)) + self.__storeGrid(m[1], mergedGrid) + + def __siteInDbGrid(self, tr): + if tr is None: + return None + + grid, history = self._wec[tr] + + if history: + for h in history: + if self.__siteID + "_GRID" in h: + return True + + return False + + #--------------------------------------------------------------------- + # validateAdjustDiscreteKeys() + # grid = incoming grid (grid, key) + # parmName = parm name + # tr = time range of grid + # + # returns 'adjusted grid' with a potentially modified key. The key + # is guaranteed to be compatible with the current ifpServer definition. + #--------------------------------------------------------------------- + def __validateAdjustDiscreteKeys(self, grid, parmName, tr): + + (g, key) = grid #separate out for processing + + if parmName.find("_") == -1: + parmName = parmName + "_SFC" #need parmName_level for dict + + # set up error message + smsg = "Adjusting DiscreteKey for Compatibility: " + parmName + \ + ' tr=' + printTR(tr) + + # get the list of discrete keys for this parameter that are allowed + dd = self.__disDef.keys(parmName) + if dd.size() == 0: + logger.error("Unable to validate keys for %s - no def in DiscreteDefinition", + parmName) + return grid + + #now go through the incoming grid's keys and validate each one + for idx in range(len(key)): #each index of the discrete key + keyentry = key[idx] #each discrete key entry + oldEntry = keyentry #save an unchanged copy for reporting + changedReasons = [] + + #overlap check + ci = keyentry.find("^") + if ci != -1 and not self.__disDef.overlaps(parmName): + keyentry = keyentry[0:ci] #reset it to only 1st portion + changedReasons.append("No Overlap Allowed") + + eachKey = keyentry.split("^") + for idx1 in range(len(eachKey)): + ke = eachKey[idx1] #each discretesubkey in a discrete key + ai = ke.find(":") #look for aux data + if ai != -1: + aux = ke[ai + 1:] + base = ke[0:ai] + + #too long of aux data check + if len(aux) > self.__disDef.auxDataLength(parmName): + ke = base #eliminate the aux data + changedReasons.append("AuxData Length Exceeded") + + else: + aux = None + base = ke #with no aux data + + #valid key specified check + validKey = False + for i in range(dd.size()): + if dd.get(i).getSymbol() == base: + validKey = True + break + if not validKey: + if aux: + ke = dd.get(0).getSymbol() + ":" + aux + else: + ke = dd.get(0).getSymbol() #use 1st one + + changedReasons.append("Unknown Key") + + eachKey[idx1] = ke #store back into list + + keyentry = "^".join(eachKey) #join back to string + if len(changedReasons): + logger.error("%s from [%s] to [%s] (%s)", + smsg, oldEntry, keyentry, ",".join(changedReasons)) + + msg = "%s %s %s [%s] -> [%s] (%s)" % \ + (self.__siteID, parmName, printShortTR(tr), oldEntry, keyentry, ",".join(changedReasons)) + self.__adjDataMsg.append(msg) + + key[idx] = keyentry #store back into list + + + return (g, key) + + #--------------------------------------------------------------------- + # validateAdjustWeatherKeys() + # grid = incoming grid (grid, key) + # parmName = parm name + # tr = time range of grid + # + # returns 'adjusted grid' + #--------------------------------------------------------------------- + def __validateAdjustWeatherKeys(self, grid, parmName, tr): + (g, key) = grid #separate out for processing + + if parmName.find("_") == -1: + parmName = parmName + "_SFC" #need parmName_level for output + + # set up error message + smsg = "Adjusting WeatherKey for Compatibility: " + parmName + \ + ' tr=' + printTR(tr) + + #now go through the incoming grid's keys and validate each one + for idx in range(len(key)): #each index of the weather key + changedReasons = [] + keyentry = key[idx] #each weather key entry + oldEntry = keyentry #save an unchanged copy for reporting + ikeys = keyentry.split("^") #split into individual subkeys + for idx1 in range(len(ikeys)): + cov, typ, inten, vis, attrs = ikeys[idx1].split(":") + + # check the visibility + visibilities = self.__wxDef.getWeatherVisibilities() + validViz = False + for i in range(visibilities.size()): + if visibilities.get(i).getSymbol() == vis: + validViz = True + break + if not validViz: + vis = visibilities.get(0).getSymbol() #assign 1st one + changedReasons.append("Unknown Visibility") + + # check the type + types = self.__wxDef.getWeatherTypes() + validType = False + for i in range(types.size()): + if types.get(i).getSymbol() == typ: + validType = True + break + if not validType: + oldEntry = keyentry + typ = "" #no choice but to set to no weather + de = self.__wxDef.typeIndex(typ) + cov = self.__wxDef.coverageSymbol(de, 0) + inten = self.__wxDef.intensitySymbol(de, 0) + vis = self.__wxDef.visibilitySymbol(0) + attrs = "" + changedReasons.append("Unknown Weather Type") + + # type is known, validate other components + else: + de = self.__wxDef.typeIndex(typ) + + # validate coverage + if self.__wxDef.coverageIndex(typ, cov) == -1: + cov = self.__wxDef.coverageSymbol(de, 0) + changedReasons.append("Unknown Coverage") + + # validate intensity + if self.__wxDef.intensityIndex(typ, inten) == -1: + inten = self.__wxDef.intensitySymbol(de, 0) + changedReasons.append("Unknown Intensity") + + # validate attributes + if len(attrs): + atts = attrs.split(",") #get individual attributes + #determine the attributes that are valid + keepAttr = [] + for a in atts: + if self.__wxDef.attributeIndex(typ, a) != -1: + keepAttr.append(a) + if len(atts) != len(keepAttr): + attrs = ",".join(keepAttr) + changedReasons.append("Unknown Attribute") + + # update record + ikeys[idx1] = cov + ":" + typ + ":" + inten + ":" + vis + ":" + attrs + keyentry = "^".join(ikeys) #assemble subkeys + key[idx] = keyentry #put back to original format + + # report any changes + if len(changedReasons): + logger.error("%s from [%s] to [%s] (%s)", + smsg, oldEntry, keyentry, ",".join(changedReasons)) + + msg = "%s %s %s [%s] -> [%s] (%s)" % \ + (self.__siteID, parmName, printShortTR(tr), oldEntry, keyentry, ",".join(changedReasons)) + + self.__adjDataMsg.append(msg) + + return (g, key) + + #--------------------------------------------------------------------- + # erase all grids from the given weather element over the + # processTimePeriod procesTimePeriod = time range to remove grids + #--------------------------------------------------------------------- + def __eraseAllGrids(self, processTimePeriod): + self.__storeGrid(processTimePeriod, None) + self.__dbGrid = None + + +def convertList(unknownList): + retVal = unknownList + if hasattr(unknownList, 'java_name'): + retVal = JUtil.javaObjToPyVal(unknownList) + return retVal + +def main(siteID, userID, databaseID, parmsToProcess, blankOtherPeriods, + startTime, endTime, altMask, replaceOnly, eraseFirst, + announce, renameWE, iscSends, inFiles, ignoreMask, + adjustTranslate, deleteInput, parmsToIgnore, gridDelay, logFileName): + # convert Java types to python and send to IscMosaic for execution + parmsToProcess = convertList(parmsToProcess) + inFiles = convertList(inFiles) + parmsToIgnore = convertList(parmsToIgnore) + argv = {"siteID": siteID, + "userID": userID, + "databaseID": databaseID, + "parmsToProcess": parmsToProcess, + "blankOtherPeriods": bool(blankOtherPeriods), + "startTime": startTime, + "endTime": endTime, + "altMask": altMask, + "replaceOnly": bool(replaceOnly), + "eraseFirst": bool(eraseFirst), + "announce": announce, + "renameWE": bool(renameWE), + "iscSends": bool(iscSends), + "inFiles": inFiles, + "ignoreMask": bool(ignoreMask), + "adjustTranslate": bool(adjustTranslate), + "deleteInput": bool(deleteInput), + "parmsToIgnore": parmsToIgnore, + "gridDelay": float(gridDelay), + "logFileName": logFileName} + mosaic = IscMosaic(argv) + mosaic.execute() + mosaic = None diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscUtil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscUtil.py index 6b96eceff0..45872bdf35 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscUtil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/python/isc/iscUtil.py @@ -1,286 +1,286 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# -# Utility module of isc functions -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 07/06/09 1995 bphillip Initial Creation. -# 02/19/13 1637 randerso Removed unused import -# 03/11/13 1759 dgilling Move siteConfig import into -# methods where it's needed. -# 11/07/13 2517 randerso Allow getLogger to override logLevel -# 01/22/14/ 2504 randerso Added hostname to log path -# 04/10/2014 17241 dgilling (code checked in by zhao) -# 04/25/2015 4952 njensen Updated for new JEP API -# 08/14/15 4750 dgilling Stop pickling ISC domainDicts. -# 09/12/2016 5861 randerso Remove references to IFPServerConfigManager -# which was largely redundant with IFPServer. -# 06/06/2017 19967 bwhunder Correct deleteEditArea() to delete file via Localization -# 07/31/2017 6342 randerso Use ReferenceMgr to load/save/delete edit areas -# -# - -## -# This is a base file that is not intended to be overridden. -## - - - -import string, IrtAccess, JUtil, logging -import xml, pickle, tempfile, os, socket -from xml.etree import ElementTree -from xml.etree.ElementTree import Element, SubElement -import LogStream -from time import gmtime, strftime -from com.raytheon.uf.common.time import TimeRange -from com.raytheon.edex.plugin.gfe.server import IFPServer - - -def getEditArea(name, siteID): - ifpServer = IFPServer.getActiveServer(siteID) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - referenceMgr = ifpServer.getReferenceMgr() - - refData = None - sr = referenceMgr.getEditArea(name) - - if sr.isOkay(): - refData = sr.getPayload() - return refData - else: - raise KeyError(" ".join(["EDIT AREA NOT FOUND:", name, "for site", siteID])) - - -def saveEditAreaGrid(name, refData, siteID): - ifpServer = IFPServer.getActiveServer(siteID) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - referenceMgr = ifpServer.getReferenceMgr() - - referenceMgr.saveEditArea(name, refData) - -def deleteEditArea(name, siteID): - ifpServer = IFPServer.getActiveServer(siteID) - if ifpServer is None: - raise Exception("No active IFPServer for site: " + siteId) - referenceMgr = ifpServer.getReferenceMgr() - - referenceMgr.deleteEditArea(name) - -def transformTime(tr): - return (int(tr.getStart().getTime() / 1000), int(tr.getEnd().getTime() / 1000)) - -def toJavaTimeRange(tr): - return TimeRange(long(tr[0]) * long(1000), long(tr[1]) * long(1000)) - -def swapCoord(coord): - temp = coord.y - coord.y = coord.x - coord.x = temp - return coord - -def serverBoxText(server): - #returns text based on the server dictionary that should be placed - #into the dialog - hostport = None - if server['host'][0:3] in ['dx4', 'px3'] and server['port'] in \ - ['98000000', '98000001']: - if server['port'] == "98000000": - hostport = server['host'] + "-primary" - elif server['port'] == "98000001": - hostport = server['host'] + "-svcbu" - - if hostport is None: - hostport = server['host'] + "/" + server['port'] - - return server['site'] + "-> " + hostport + "@" + \ - server['mhsid'].lower() - -def sortServers(a, b): -# sort function for the list of servers. Sorts in priority order for -# most likely to have the data. Order is: -# dx4 or px3 98000000 site==mhsid -# dx4 or px3 98000001 site==mhsid -# dx4 or px3 98000000 site!=mhsid -# dx4 or px3 98000001 site!=mhsid -# all others in random order. - sameSiteA = (a['mhsid'] == a['site']) - sameSiteB = (b['mhsid'] == b['site']) - if sameSiteA and not sameSiteB: - return -1 - elif not sameSiteA and sameSiteB: - return 1 - #both are same sites, check for host next - else: - regHostA = (a['host'][0:3] in ['dx4', 'px3']) - regHostB = (b['host'][0:3] in ['dx4', 'px3']) - if regHostA and not regHostB: - return -1 - elif not regHostA and regHostB: - return 1 - # same host, but not preferred host - else: - regPortA = (a['port'] == "98000000") - regPortB = (b['port'] == "98000000") - if regPortA and not regPortB: - return -1 - elif not regPortA and regPortB: - return 1 - return 1 #must be non-standard, put at end of list - -def createDomainDict(xml): - irt = IrtAccess.IrtAccess("") - #decodes the packet of information from the ISC_REQUEST_QUERY call - #to the ifpServer. This info will be used for creating the dialog. - # Returns the domainDict, which is keyed by domain, and contains - # a list of servers (each server in a dictionary with keys of - # mhsid, host, port, protocol, site. - try: - serverTree = ElementTree.ElementTree(ElementTree.XML(xml)) - serversE = serverTree.getroot() - except: - LogStream.logProblem('Malformed XML in createDomainDict') - return None - if serversE.tag != "servers": - LogStream.logEvent('servers tag not found in createDomainDict') - return None #invalid xml - - #decode XML and create dictionary and parms list - domains = {} - welist = [] - serverDictS2T = {} #key=serverinfo, value=text on GUI - serverDictT2S = {} #key=text on GUI, value=serverinfo - for domainE in serversE: - if domainE.tag == "domain": - site = None - for name, value in domainE.items(): - if name == "site": - site = value - break - if site is None: - LogStream.logProblem('Malformed domain site XML') - continue - for addressE in domainE.getchildren(): - info = irt.decodeXMLAddress(addressE) - if not domains.has_key(site): - domains[site] = [] - list = domains[site] - list.append(info) - guiText = serverBoxText(info) - serverDictT2S[guiText] = info - serverDictS2T[str(info)] = guiText - list.sort(sortServers) - domains[site] = list - - elif domainE.tag == "welist": - for parmE in domainE.getchildren(): - welist.append(parmE.text) - welist.sort() - - retVal = {} - retVal['serverDictS2T'] = serverDictS2T - retVal['serverDictT2S'] = serverDictT2S - retVal['domains'] = domains - return retVal - -def getRequestXML(xml, selectedServers, selectedWEList): - irt = IrtAccess.IrtAccess("") - selectedServers = JUtil.javaStringListToPylist(selectedServers) - selectedWElist = JUtil.javaStringListToPylist(selectedWEList) - - response = createDomainDict(xml) - serverDictT2S = response['serverDictT2S'] - domainDict = response['domains'] - - iscReqE = Element('iscrequest') - servers = [] - for serverT in selectedServers: - server = serverDictT2S[serverT] - servers.append(server) - irt.addDestinationXML(iscReqE, servers) - welistE = SubElement(iscReqE, 'welist') - for we in selectedWElist: - weE = SubElement(welistE, 'parm') - weE.text = we - - # output the list of servers and their priority - s = '\n' - for key in domainDict.keys(): - s += "DOMAIN=" + key + '\n' - servers = selectedServers - for serverT in servers: - server = serverDictT2S[serverT] - if server['site'] == key: - s += " mhs=" + server['mhsid'] + " host=" + \ - server['host'] + " port=" + server['port'] + "\n" - #LogStream.logEvent("Chosen request servers:", s) - - # send to ifpServer - xmlreq = ElementTree.tostring(iscReqE) - - return xmlreq; - -def getLogger(scriptName, logName=None, logLevel=logging.INFO): - # be relocating this import here we allow - # com.raytheon.edex.plugin.gfe.isc.IscScript to dynamically - # modify its include path with the proper siteConfig just before - # execution time - import siteConfig - hostname = socket.gethostname().split('.')[0] - logPath = os.path.join(siteConfig.GFESUITE_LOGDIR, strftime("%Y%m%d", gmtime()), hostname) - if logName is None: - logName = scriptName + ".log" - else: - logDir = os.path.dirname(logName) - if len(logDir) > 0: - logPath = logDir - logName = os.path.basename(logName) - - logFile = os.path.join(logPath, logName) - - try: - os.makedirs(logPath) - except OSError as e: - import errno - if e.errno != errno.EEXIST: - raise e - - theLog = logging.getLogger(scriptName) - theLog.setLevel(logLevel) - ch = logging.FileHandler(logFile) - - ch.setLevel(logLevel) - formatter = logging.Formatter("%(levelname)s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") - ch.setFormatter(formatter) - for h in theLog.handlers: - theLog.removeHandler(h) - theLog.addHandler(ch) - return theLog - -def tupleToString(*msg): - concatMsg = "" - for m in msg: - concatMsg = concatMsg + " " + str(m) - return concatMsg +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# +# Utility module of isc functions +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 07/06/09 1995 bphillip Initial Creation. +# 02/19/13 1637 randerso Removed unused import +# 03/11/13 1759 dgilling Move siteConfig import into +# methods where it's needed. +# 11/07/13 2517 randerso Allow getLogger to override logLevel +# 01/22/14/ 2504 randerso Added hostname to log path +# 04/10/2014 17241 dgilling (code checked in by zhao) +# 04/25/2015 4952 njensen Updated for new JEP API +# 08/14/15 4750 dgilling Stop pickling ISC domainDicts. +# 09/12/2016 5861 randerso Remove references to IFPServerConfigManager +# which was largely redundant with IFPServer. +# 06/06/2017 19967 bwhunder Correct deleteEditArea() to delete file via Localization +# 07/31/2017 6342 randerso Use ReferenceMgr to load/save/delete edit areas +# +# + +## +# This is a base file that is not intended to be overridden. +## + + + +import string, IrtAccess, JUtil, logging +import xml, pickle, tempfile, os, socket +from xml.etree import ElementTree +from xml.etree.ElementTree import Element, SubElement +import LogStream +from time import gmtime, strftime +from com.raytheon.uf.common.time import TimeRange +from com.raytheon.edex.plugin.gfe.server import IFPServer + + +def getEditArea(name, siteID): + ifpServer = IFPServer.getActiveServer(siteID) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + referenceMgr = ifpServer.getReferenceMgr() + + refData = None + sr = referenceMgr.getEditArea(name) + + if sr.isOkay(): + refData = sr.getPayload() + return refData + else: + raise KeyError(" ".join(["EDIT AREA NOT FOUND:", name, "for site", siteID])) + + +def saveEditAreaGrid(name, refData, siteID): + ifpServer = IFPServer.getActiveServer(siteID) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + referenceMgr = ifpServer.getReferenceMgr() + + referenceMgr.saveEditArea(name, refData) + +def deleteEditArea(name, siteID): + ifpServer = IFPServer.getActiveServer(siteID) + if ifpServer is None: + raise Exception("No active IFPServer for site: " + siteId) + referenceMgr = ifpServer.getReferenceMgr() + + referenceMgr.deleteEditArea(name) + +def transformTime(tr): + return (int(tr.getStart().getTime() / 1000), int(tr.getEnd().getTime() / 1000)) + +def toJavaTimeRange(tr): + return TimeRange(int(tr[0]) * int(1000), int(tr[1]) * int(1000)) + +def swapCoord(coord): + temp = coord.y + coord.y = coord.x + coord.x = temp + return coord + +def serverBoxText(server): + #returns text based on the server dictionary that should be placed + #into the dialog + hostport = None + if server['host'][0:3] in ['dx4', 'px3'] and server['port'] in \ + ['98000000', '98000001']: + if server['port'] == "98000000": + hostport = server['host'] + "-primary" + elif server['port'] == "98000001": + hostport = server['host'] + "-svcbu" + + if hostport is None: + hostport = server['host'] + "/" + server['port'] + + return server['site'] + "-> " + hostport + "@" + \ + server['mhsid'].lower() + +def sortServers(a, b): +# sort function for the list of servers. Sorts in priority order for +# most likely to have the data. Order is: +# dx4 or px3 98000000 site==mhsid +# dx4 or px3 98000001 site==mhsid +# dx4 or px3 98000000 site!=mhsid +# dx4 or px3 98000001 site!=mhsid +# all others in random order. + sameSiteA = (a['mhsid'] == a['site']) + sameSiteB = (b['mhsid'] == b['site']) + if sameSiteA and not sameSiteB: + return -1 + elif not sameSiteA and sameSiteB: + return 1 + #both are same sites, check for host next + else: + regHostA = (a['host'][0:3] in ['dx4', 'px3']) + regHostB = (b['host'][0:3] in ['dx4', 'px3']) + if regHostA and not regHostB: + return -1 + elif not regHostA and regHostB: + return 1 + # same host, but not preferred host + else: + regPortA = (a['port'] == "98000000") + regPortB = (b['port'] == "98000000") + if regPortA and not regPortB: + return -1 + elif not regPortA and regPortB: + return 1 + return 1 #must be non-standard, put at end of list + +def createDomainDict(xml): + irt = IrtAccess.IrtAccess("") + #decodes the packet of information from the ISC_REQUEST_QUERY call + #to the ifpServer. This info will be used for creating the dialog. + # Returns the domainDict, which is keyed by domain, and contains + # a list of servers (each server in a dictionary with keys of + # mhsid, host, port, protocol, site. + try: + serverTree = ElementTree.ElementTree(ElementTree.XML(xml)) + serversE = serverTree.getroot() + except: + LogStream.logProblem('Malformed XML in createDomainDict') + return None + if serversE.tag != "servers": + LogStream.logEvent('servers tag not found in createDomainDict') + return None #invalid xml + + #decode XML and create dictionary and parms list + domains = {} + welist = [] + serverDictS2T = {} #key=serverinfo, value=text on GUI + serverDictT2S = {} #key=text on GUI, value=serverinfo + for domainE in serversE: + if domainE.tag == "domain": + site = None + for name, value in list(domainE.items()): + if name == "site": + site = value + break + if site is None: + LogStream.logProblem('Malformed domain site XML') + continue + for addressE in domainE.getchildren(): + info = irt.decodeXMLAddress(addressE) + if site not in domains: + domains[site] = [] + list = domains[site] + list.append(info) + guiText = serverBoxText(info) + serverDictT2S[guiText] = info + serverDictS2T[str(info)] = guiText + list.sort(sortServers) + domains[site] = list + + elif domainE.tag == "welist": + for parmE in domainE.getchildren(): + welist.append(parmE.text) + welist.sort() + + retVal = {} + retVal['serverDictS2T'] = serverDictS2T + retVal['serverDictT2S'] = serverDictT2S + retVal['domains'] = domains + return retVal + +def getRequestXML(xml, selectedServers, selectedWEList): + irt = IrtAccess.IrtAccess("") + selectedServers = JUtil.javaStringListToPylist(selectedServers) + selectedWElist = JUtil.javaStringListToPylist(selectedWEList) + + response = createDomainDict(xml) + serverDictT2S = response['serverDictT2S'] + domainDict = response['domains'] + + iscReqE = Element('iscrequest') + servers = [] + for serverT in selectedServers: + server = serverDictT2S[serverT] + servers.append(server) + irt.addDestinationXML(iscReqE, servers) + welistE = SubElement(iscReqE, 'welist') + for we in selectedWElist: + weE = SubElement(welistE, 'parm') + weE.text = we + + # output the list of servers and their priority + s = '\n' + for key in list(domainDict.keys()): + s += "DOMAIN=" + key + '\n' + servers = selectedServers + for serverT in servers: + server = serverDictT2S[serverT] + if server['site'] == key: + s += " mhs=" + server['mhsid'] + " host=" + \ + server['host'] + " port=" + server['port'] + "\n" + #LogStream.logEvent("Chosen request servers:", s) + + # send to ifpServer + xmlreq = ElementTree.tostring(iscReqE) + + return xmlreq; + +def getLogger(scriptName, logName=None, logLevel=logging.INFO): + # be relocating this import here we allow + # com.raytheon.edex.plugin.gfe.isc.IscScript to dynamically + # modify its include path with the proper siteConfig just before + # execution time + import siteConfig + hostname = socket.gethostname().split('.')[0] + logPath = os.path.join(siteConfig.GFESUITE_LOGDIR, strftime("%Y%m%d", gmtime()), hostname) + if logName is None: + logName = scriptName + ".log" + else: + logDir = os.path.dirname(logName) + if len(logDir) > 0: + logPath = logDir + logName = os.path.basename(logName) + + logFile = os.path.join(logPath, logName) + + try: + os.makedirs(logPath) + except OSError as e: + import errno + if e.errno != errno.EEXIST: + raise e + + theLog = logging.getLogger(scriptName) + theLog.setLevel(logLevel) + ch = logging.FileHandler(logFile) + + ch.setLevel(logLevel) + formatter = logging.Formatter("%(levelname)s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") + ch.setFormatter(formatter) + for h in theLog.handlers: + theLog.removeHandler(h) + theLog.addHandler(ch) + return theLog + +def tupleToString(*msg): + concatMsg = "" + for m in msg: + concatMsg = concatMsg + " " + str(m) + return concatMsg diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS190.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS190.py index 3ab5582fb2..a03967441c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS190.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS190.py @@ -1,541 +1,541 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from GFS190 model -## output. -## -##-------------------------------------------------------------------------- -class GFS190Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "GFS190") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB850", "MB700", "MB500", "MB400", "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -##------------------------------------------------------------------------- -## Internal function that returns the surface temperature based on -## the specified geoPot. height and temperature cube. This function -## interpolates the temperature at elevation specified by the topo -## grid. -##-------------------------------------------------------------------------- - def getSFCt(self, gh_c, t_c, topo): - sp = self.newGrid(-1) # a grid of -1 everywhere - for i in xrange(1, gh_c.shape[0]): # for each level - ## get the points where the gh level is above the topo value - mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) - ## interpolate between this level and the one below it - sp[mask] = self.linear(gh_c[i], gh_c[i - 1], - t_c[i], t_c[i - 1], topo) - - return sp - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, gh_c, t_c, topo): - tmb = self.newGrid(-1) - # calc sfc_temp at topo - for i in xrange(1, gh_c.shape[0]): - # interp temp in this layer - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - # assign temp to points in this layer - m = logical_and(equal(tmb, -1), greater(gh_c[i], topo)) - tmb[m] = tval1[m] - ## convert from kelvin to fahrenheit - return self.KtoF(tmb) - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_BL030): - rh_FHAG2 = rh_BL030 - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop.clip(0, 100, pop) # clip to 100% - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - - ## save the height value in fzl - m = logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) - fzl[m] = val[m] - fzl *= 3.28 # convert to feet - return fzl - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - - m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) - snow[m] = val[m] - # - # convert to feet - # - snow *= 3.28 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) -# snowamt = where(snowmask, snowamt, 0) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks -# pt = where(mask, pt, 0) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) - mh[m] = tmh[m] - - mh -= topo - mh *= 3.28 # convert to feet - return mh - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_BL030): - mag = wind_BL030[0] * 1.94 # get the wind speed and convert - dir = wind_BL030[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - fadir.clip(0, 360, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tmag = tmag * 1.94 # convert to knots - tmag = clip(tmag, 0, 125) # clip speed to 125 knots - return (tmag, tdir) - -##------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##------------------------------------------------------------------------- - def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) -# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) -# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(sli_SFC, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): - bli = sli_SFC # surface lifted index - ttp = self.newGrid(0.00001) # nearly zero grid - lal = self.newGrid(1) # initialize the return grid to 1 - # Add one to lal if QPF > 0.5 - lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli, -3)] += 1 - lal[less(bli, -5)] += 1 - return lal - -def main(): - GFS190Forecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from GFS190 model +## output. +## +##-------------------------------------------------------------------------- +class GFS190Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "GFS190") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB850", "MB700", "MB500", "MB400", "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +##------------------------------------------------------------------------- +## Internal function that returns the surface temperature based on +## the specified geoPot. height and temperature cube. This function +## interpolates the temperature at elevation specified by the topo +## grid. +##-------------------------------------------------------------------------- + def getSFCt(self, gh_c, t_c, topo): + sp = self.newGrid(-1) # a grid of -1 everywhere + for i in range(1, gh_c.shape[0]): # for each level + ## get the points where the gh level is above the topo value + mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) + ## interpolate between this level and the one below it + sp[mask] = self.linear(gh_c[i], gh_c[i - 1], + t_c[i], t_c[i - 1], topo) + + return sp + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, gh_c, t_c, topo): + tmb = self.newGrid(-1) + # calc sfc_temp at topo + for i in range(1, gh_c.shape[0]): + # interp temp in this layer + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + # assign temp to points in this layer + m = logical_and(equal(tmb, -1), greater(gh_c[i], topo)) + tmb[m] = tval1[m] + ## convert from kelvin to fahrenheit + return self.KtoF(tmb) + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_BL030): + rh_FHAG2 = rh_BL030 + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop.clip(0, 100, pop) # clip to 100% + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + + ## save the height value in fzl + m = logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) + fzl[m] = val[m] + fzl *= 3.28 # convert to feet + return fzl + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + + m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) + snow[m] = val[m] + # + # convert to feet + # + snow *= 3.28 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) +# snowamt = where(snowmask, snowamt, 0) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks +# pt = where(mask, pt, 0) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) + mh[m] = tmh[m] + + mh -= topo + mh *= 3.28 # convert to feet + return mh + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_BL030): + mag = wind_BL030[0] * 1.94 # get the wind speed and convert + dir = wind_BL030[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + fadir.clip(0, 360, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tmag = tmag * 1.94 # convert to knots + tmag = clip(tmag, 0, 125) # clip speed to 125 knots + return (tmag, tdir) + +##------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##------------------------------------------------------------------------- + def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) +# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) +# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(sli_SFC, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): + bli = sli_SFC # surface lifted index + ttp = self.newGrid(0.00001) # nearly zero grid + lal = self.newGrid(1) # initialize the return grid to 1 + # Add one to lal if QPF > 0.5 + lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli, -3)] += 1 + lal[less(bli, -5)] += 1 + return lal + +def main(): + GFS190Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS40.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS40.py index 32846ffa44..53cdd4c395 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS40.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS40.py @@ -1,1097 +1,1097 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from GFS80 model -## output. -## -##-------------------------------------------------------------------------- -class GFS40Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "GFS40", "GFS40") - -####-------------------------------------------------------------------------- -#### These levels will be used to create vertical soundings. These are -#### defined here since they are model dependent. -####-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB975", "MB950", "MB925", "MB900", "MB875", "MB850", - "MB825", "MB800", "MB775", "MB750", "MB725", "MB700", - "MB675", "MB650", "MB625", "MB600", "MB575", "MB550", - "MB525", "MB500", "MB450", "MB400", "MB350", "MB300"] - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxT and the T grids -####-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinT and T grids -####-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -####------------------------------------------------------------------------- -#### Calculates the temperature at the elevation indicated in the topo -#### grid. This tool simply interpolates the temperature value from -#### model's isobaric temperature cube. -####------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, t_BL150180, p_SFC, topo, stopo, gh_c, t_c): - p_SFC = p_SFC / 100 # get the surface pres. in mb - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, - p_SFC - 105, p_SFC - 135] - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, t_BL150180] - return self._calcT(temps, pres, topo, stopo, gh_c, t_c) - - def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) # identify points > topo - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val[greater(val, 500)] = 500 - val.clip(-.00001, 10, val) - - m = logical_and(equal(p, -1), higher) - p[m]= exp(val)[m] - - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - - m = logical_and(equal(tmb, -1), higher) - tmb[m] = tval1[m] - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - - m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) - tms[m] = tval2[m] - - - # define the pres. of each of the boundary layers - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - - m = logical_and(equal(st, -1), mask) - st[m] = val[m] - - # where topo level is above highest level in BL fields...use tmb - m = logical_and(equal(st,-1),less(p, pres[-1])) - st[m] = tmb[m] - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - m = equal(st, -1) - st[m] = (tmb - tms + temps[0])[m] - - return self.KtoF(st) - -####------------------------------------------------------------------------- -#### Calculates dew point from the specified pressure, temp and rh -#### fields. -####------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -####------------------------------------------------------------------------- -#### Calculates RH from the T and Td grids -####------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxRH and the RH grids -####-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinRH and RH grids -####-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -####------------------------------------------------------------------------- -#### Calculates QPF from the total precip field out of the model -####------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - -####------------------------------------------------------------------------- -#### Calculates sky (0-100) from the total precip field out of the model -####------------------------------------------------------------------------- - - ########################################################################### - # Calculates Sky condition (fractional cloud cover) from model RH at - # specific pressure levels. Uses reduced equations from Walcek, MWR June - # 1994. Adds up the amount of fractional clouds calculated at each layer - # based on topography (i.e. no clouds below ground) then divides by a - # suggested number of layers to produce an average sky. - ########################################################################### - def calcSky(self, rh_c, gh_c, topo, p_SFC, rh_BL030, rh_BL3060, rh_BL6090, - rh_BL90120, rh_BL120150, rh_BL150180): - - tmpP_SFC = p_SFC.copy() - tmpP_SFC /= 100.0 # convert surfp to millibars - x = 560.0 # delta x (85km - 850km) - - # Define a percentage of f100 to use as a filter (0.0 - 1.0) - # Remember f100 is an exponential function, so changes will be more - # pronounced in the 0.5-1.0 range than the 0.0-0.5 range. - percent = 0.37 - - # Define a list of boundary layer levels to include - BL_levels = ['BL030', 'BL3060', 'BL6090', 'BL90120', 'BL120150', - 'BL150180'] - - # Construct a boundary layer pressure and RH cube - bl_Pcube = [] - bl_RHcube = [] - - # Place all BL RH levels into a cube - bl_RHcube += [rh_BL030] - bl_RHcube += [rh_BL3060] - bl_RHcube += [rh_BL6090] - bl_RHcube += [rh_BL90120] - bl_RHcube += [rh_BL120150] - bl_RHcube += [rh_BL150180] - - bl_RHcube = array(bl_RHcube) - - - # Make a cube of boundary layer pressures - for lvl in BL_levels: - if lvl == 'BL030': - tmpP = tmpP_SFC - 15.0 - elif lvl == 'BL3060': - tmpP = tmpP_SFC - 45.0 - elif lvl == 'BL6090': - tmpP = tmpP_SFC - 75.0 - elif lvl == 'BL90120': - tmpP = tmpP_SFC - 105.0 - elif lvl == 'BL120150': - tmpP = tmpP_SFC - 135.0 - elif lvl == 'BL150180': - tmpP = tmpP_SFC - 165.0 - bl_Pcube += [tmpP] - bl_Pcube = array(bl_Pcube) - - - # Make a model level pressure cube - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - - - # Convert BL pressures to sigma levels - BL_sigma = bl_Pcube / tmpP_SFC - del bl_Pcube - BL_sigma = clip(BL_sigma, 0.1, 1.0) - - # Convert model level pressure cube to sigma surfaces - pp = pmb / tmpP_SFC - del tmpP_SFC - pp = clip(pp, 0.1, 1.0) - - - # Account for topography in the model cube, don't need to worry about - # this with the BL cube since those are guaranteed to be above ground - tmpRH_c = where(less(gh_c, topo), float32(0.0), rh_c) - - #======================================================================= - # Create new RH and sigma cubes - - newRH_c = [] - newSigma_c = [] - - # See which boundary layer levels have pressures > lowest "signficant" - # model level pressure - for bl_i in xrange(BL_sigma.shape[0]): - - # Make a mask to identify which points from the boundary - # layer level have greater pressure than lowest "significant" - # model level - BL_mask = greater(BL_sigma[bl_i], pp[0]) - - # See how many points we've found - count = sum(sum(BL_mask, 1)) - - # If there are no points - don't include this BL level - if count == 0: - continue - - # Compute a temporary RH grid where it is lower than the lowest - # "significant" model level data - tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) - - - # Compute a temporary sigma grid for this boundary layer level - # where it is lower than the lowest "significant" model level - tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) - - # Insert this level into the new RH and sigma cubes - newRH_c += [tmpRH] - newSigma_c += [tmpSigma] - - - # Add lowest "significant" model level to RH and sigma cubes - newRH_c += [tmpRH_c[0]] - newSigma_c += [pp[0]] - - - # Insert boundary layer RH into RH cube where appropriate - for lvl in xrange(1, len(self.levels())): - - # Look at each boundary layer level - for bl_i in xrange(BL_sigma.shape[0]): - - # Make a mask to identify which points from the boundary - # layer level fall between the surrounding "significant" - # model levels - BL_mask = logical_and(greater(BL_sigma[bl_i], pp[lvl]), - less(BL_sigma[bl_i], pp[lvl - 1])) - - # See how many points we've found - count = sum(sum(BL_mask, 1)) - - # If there are no points - don't include this BL level - if count == 0: - continue - - # Compute a temporary RH grid where it is between the two - # "significant" model level data - tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) - - - # Compute a temporary sigma grid for this boundary layer level - # where it is between the two "significant" model levels - tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) - - # Insert this level into the new RH and sigma cubes - newRH_c += [tmpRH] - newSigma_c += [tmpSigma] - - - # Add top of layer we just examined to RH and sigma cube - newRH_c += [tmpRH_c[lvl]] - newSigma_c += [pp[lvl]] - - del bl_RHcube - del BL_sigma - del tmpRH_c - - # Finish off the new cubes - newRH_c = array(newRH_c) - newSigma_c = array(newSigma_c) - - # Determine maximum possible sky fraction - fmax = 78.0 + x / 15.5 - - # Compute sky fraction for both pressure cubes - f100 = where(less(newSigma_c, 0.7), - fmax * (newSigma_c - 0.1) / 0.6, - 30.0 + (1.0 - newSigma_c) * (fmax - 30.0) / 0.3) - - # Compute RH depression at 37% f100 [ (1-RHe) in Walcek ] - c = 0.196 + (0.76 - x / 2834.0) * (1.0 - newSigma_c) - - del newSigma_c - - # Compute critical RH threshold to use as a filter - # Note (percent * f100)/f100 = percent - try: - rhCrit = log(percent) * c + 1.0 - except: - rhCrit = 0.0 - - # Ensure "critical RH" is valid - rhCrit = clip(rhCrit, 0.0, 1.0) - - # Compute sky fraction for the model cube - c = (newRH_c / 100.0 - 1.0) / c - c = exp(c) - f = minimum(f100 * c, 100.0) - - # Where RH is less than the critical value, set it to 0 contribution - f[less(newRH_c / 100.0, rhCrit)] = 0.0 - - del newRH_c - - # Compress cubes vertically - f = self.squishZ(f, (f.shape[0] / 5) - 1) # was 5 - - # Convert sky fractions to an actual percentage - f[4] *= 0.25 - f /= 100.0 - - sky = f[0] - for i in xrange(1, f.shape[0]): - sky = sky + f[i] - sky * f[i] - - grid = sky * 100.0 - - return grid - -####------------------------------------------------------------------------- -#### Calculates Prob. of Precip. based on QPF and RH cube. Where there -#### is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -#### of QPF < 0.2 raise the PoP if it's very humid. -####------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -####------------------------------------------------------------------------- -#### Calculates the Freezing level based on height and temperature -#### cubes. Finds the height at which freezing occurs. -####------------------------------------------------------------------------- - def calcFzLevel(self, gh_FRZ): - return gh_FRZ * 3.28 - -####------------------------------------------------------------------------- -#### Calculates the Snow level based on wet-bulb zero height. -####------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - - m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) - snow[m] = val[m] - # - # convert to feet - # - snow *= 3.28 - - return snow - -####------------------------------------------------------------------------- -#### Calculates Snow amount based on the Temp, Freezing level, QPF, -#### topo and Weather grid -####------------------------------------------------------------------------- -# def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): -# # figure out the snow to liquid ratio -# m1 = less(T, 9) -# m2 = greater_equal(T, 30) -# snowr = T * -0.5 + 22.5 -# snowr[m1] = float32(20)) -# snowr[m2] = float32(0) -# # calc. snow amount based on the QPF and the ratio -# snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), -# snowr * QPF, float32(0)) -# # Only make snow at points where the weather is snow -# snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) -# snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), -# equal(Wx[0], 9))) -# snowamt[logical_not(snowmask)] = 0 -# return snowamt - -########################################################### -## GRR Snow Init - begin -## This Routine Does SnowAmt - Not SnowRatio -########################################################### - def calcSnowAmt(self, T, QPF, gh_c, t_c, rh_c, pvv_c, gh_MB925, gh_MB800, gh_MB850, gh_MB700, gh_MB750, gh_MB650, gh_MB600, gh_MB550): - - #t_c is tCube, rh_c is rhCube, etc. - #we do not want the lowest 4 levels in the cubes - gh_c = gh_c[4:, :, :] - t_c = t_c[4:, :, :] - rh_c = rh_c[4:, :, :] - pvv_c = pvv_c[4:, :, :] - - - print "Got", len(t_c), "t grids and", len(rh_c), "rh grids" - - # Some thresholds used throughout the tool - dryRH = 75.0 # dry atm below this value - lrMin = 10.0 # lapse rate minimum - lrMax = 6.5 # laspe rate maximum - lrMaxAdj = 0.3 # max lapse rate adjustment value - - # extract the shapes and make some variables - #cubeShape = (len(t_c) - 1, t_c.shape[1], t_c.shape[2]) - cubeShape = (len(t_c), t_c.shape[1], t_c.shape[2]) - gridShape = (t_c.shape[1], t_c.shape[2]) - layerSR = zeros(cubeShape, dtype = float32) - pvvAvg = zeros(cubeShape, dtype = float32) - pvvSum = zeros(gridShape, dtype = float32) - - #print "cubeShape = ", cubeShape - - for i in range(len(gh_c) - 1): - #for i in range(len(gh_c)): - #print "processing layer", gh_c[i] - # calculate the average temp and rh in the layer - avgTemp = t_c[i] - 273.15 # Convert to C - avgRH = rh_c[i] - - # get the base snowRatio based on the avgTemp - layerSR[i] = self.baseSnowRatio(avgTemp) - - # adjust snowRatio based on lapseRate - #lr = -(t_c[i+1] - t_c[i]) - #lrAdj = where(greater_equal(lr,6.5), 1.0 + ((lr - lrMin) / (lrMax - lrMin)) * lrMaxAdj, float32(1.0)) - #layerSR[i] = layerSR[i] * lrAdj - - # Calc avg pressure vertical velocity, scale based on RH and sum - # reverse the pvvAvg sign so up is positive - pvvAvg[i] = -10 * (pvv_c[i]) - # clip downward vertical velocities - pvvAvg[i][less(pvvAvg[i], 0.0)] = 0.0 - # Scale vertical velocity as a function of the square of RH. - # This scaling will efectively negate a snowratio contribution in - # layers that are dry. - pvvAvg[i] = where(less(avgRH, 80.0), pvvAvg[i] * ((avgRH * avgRH) / 6400.0), pvvAvg[i]) - pvvSum = pvvSum + pvvAvg[i] - - # Normalize the layerSnowRatio based on the pvv fraction of the total - totalSnowRatio = zeros(gridShape, dtype = float32) - #tweak the pvvSum grid to avoid division by zero - pvvSum[less_equal(pvvSum, 0.0)] = .0001 - for i in range(len(layerSR)): - srGrid = layerSR[i] * pvvAvg[i] / pvvSum - totalSnowRatio = totalSnowRatio + srGrid - - # Finally clip the snowRatio to zero under two conditions - # cube where min colum temp > -8.0C and rh > 75% - # This is basically Baumgardt - Top Down Approach - No ice No dice! - mask = logical_and(less(t_c, 265.15), greater_equal(rh_c, 50.0)) - mask = sum(mask) # reduce to single level by adding bits verically - totalSnowRatio[equal(mask, 0)] = 0.0 - - thicknessSnowRatio = zeros(gridShape, dtype = float32) - -######################################################### -# Pick an applicable thickness scheme for your area - - myThickness = "850-700" - #myThickness = "925-700" - #myThickness = "850-650" - #myThickness = "800-600" - #myThickness = "750-550" - -########################################################## - - if myThickness == "850-700": - thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB850) - 1437.0) / 29.0 , 2) - elif myThickness == "925-700": - thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB925) - 2063.0) / 41.0 , 2) - elif myThickness == "850-650": - thicknessSnowRatio = 20.0 - pow(((gh_MB650 - gh_MB850) - 1986.0) / 39.0 , 2) - elif myThickness == "800-600": - thicknessSnowRatio = 20.0 - pow(((gh_MB600 - gh_MB800) - 2130.0) / 42.0 , 2) - else: # "750-500" - thicknessSnowRatio = 20.0 - pow(((gh_MB550 - gh_MB750) - 2296.0) / 45.0 , 2) - - thicknessSnowRatio[less(thicknessSnowRatio, 0.0)] = 0.0 - - totalSnowRatio = (totalSnowRatio * 0.50) + (thicknessSnowRatio * 0.50) - totalSnowRatio = where(less_equal(pvvSum, 100.0), (totalSnowRatio * 0.01 * pvvSum) + (thicknessSnowRatio * (1.0 - pvvSum * 0.01)), totalSnowRatio) - totalSnowRatio = where(less(pvvSum, 1.0), thicknessSnowRatio, totalSnowRatio) - - # If there's any layer above 0.0C, snowRatio gets 0 - mask = greater(t_c, 272.65) - mask = sum(mask) # reduce to single level by adding bits vertically - # if mask == 0, nowhere in the column is temp < 0.5C - totalSnowRatio[not_equal(mask, 0)] = 0.0 - - #Calculate Snowfall - taper to zero from 31 to 34 F. - snowfall = QPF * totalSnowRatio - snowfall = where(greater(T, 31.0), pow(35.0 - T, 2) / 16.0 * snowfall , snowfall) - snowfall[greater(T, 35.0)] = 0.0 - - # Return the new value - return snowfall - - - ### Given a grid of temperature in Celcius, this method computes - ### the base snowRatio based on the spline curve as defined by the - ### coefficients. - def baseSnowRatio(self, tGrid): - # set up the spline coefficients - tThresh = [-30.0, -21.0, -18.0, -15.0, -12.0, -10.0, -8.0, -5.0, -3.0, 2.0] - a = [9.0, 21.0, 31.0, 35.0, 26.0, 15.0, 9.0, 5.0, 4.0] - b = [0.4441, 3.1119, 2.8870, -0.6599, -5.2475, -4.5685, -1.9786, -0.7544, -0.3329] - c = [0.0, 0.2964, -0.3714, -0.8109, -0.7183, 1.0578, 0.2372, 0.1709, 0.0399] - d = [0.0110, -0.0742, -0.0488, 0.0103, 0.2960, -0.1368, -0.0074, -0.0218, -0.0027] - - # Initialize the coeficient grids - aGrid = self.newGrid(a[-1]) #last value in list - bGrid = self.newGrid(b[-1]) - cGrid = self.newGrid(c[-1]) - dGrid = self.newGrid(d[-1]) - tDiff = zeros(tGrid.shape, dtype = float) - - # define grids of coefficients based on tGrid - for i in range(len(tThresh) - 1): - mask1 = greater_equal(tGrid, tThresh[i]) - mask2 = less(tGrid, tThresh[i + 1]) - mask = logical_and(mask1, mask2) # area b/w threshold - tDiff = where(mask, tGrid - tThresh[i], tDiff) - aGrid = where(mask, a[i], aGrid) - bGrid = where(mask, b[i], bGrid) - cGrid = where(mask, c[i], cGrid) - dGrid = where(mask, d[i], dGrid) - - # Do the calcuation using the grids of spline coefficients - baseRatio = aGrid + bGrid * tDiff + cGrid * tDiff * tDiff \ - + dGrid * pow(tDiff, 3) - - # Clip the snowRatio grid to 10.0 where tGrid is outside limits - #baseRatio[greater(tGrid, 1.0)] = 0.0 - #baseRatio[less(tGrid, tThresh[0])] = 10.0 - - return baseRatio - - -############################################################################### -## END-- GRR Snow Init -############################################################################### - -########################################################### -## GRR SnowRatio Init - begin -## This routine does SnowRatio - Not SnowAmt! -########################################################### - def calcSnowRatio(self, gh_c, t_c, rh_c, pvv_c, gh_MB925, gh_MB800, gh_MB850, gh_MB750, gh_MB700, gh_MB650, gh_MB600, gh_MB550): - - #t_c is tCube, rh_c is rhCube, etc. - #we do not want the lowest 4 levels in the cubes - gh_c = gh_c[4:, :, :] - t_c = t_c[4:, :, :] - rh_c = rh_c[4:, :, :] - pvv_c = pvv_c[4:, :, :] - - - print "Got", len(t_c), "t grids and", len(rh_c), "rh grids" - - # Some thresholds used throughout the tool - dryRH = 75.0 # dry atm below this value - lrMin = 10.0 # lapse rate minimum - lrMax = 6.5 # laspe rate maximum - lrMaxAdj = 0.3 # max lapse rate adjustment value - - # extract the shapes and make some variables - #cubeShape = (len(t_c) - 1, t_c.shape[1], t_c.shape[2]) - cubeShape = (len(t_c), t_c.shape[1], t_c.shape[2]) - gridShape = (t_c.shape[1], t_c.shape[2]) - layerSR = zeros(cubeShape, dtype = float) - pvvAvg = zeros(cubeShape, dtype = float) - pvvSum = zeros(gridShape, dtype = float) - - #print "cubeShape = ", cubeShape - - for i in range(len(gh_c) - 1): - #for i in range(len(gh_c)): - #print "processing layer", gh_c[i] - # calculate the average temp and rh in the layer - avgTemp = t_c[i] - 273.15 # Convert to C - avgRH = rh_c[i] - - # get the base snowRatio based on the avgTemp - layerSR[i] = self.baseSnowRatio(avgTemp) - - # adjust snowRatio based on lapseRate - #lr = -(t_c[i+1] - t_c[i]) - #lrAdj = where(greater_equal(lr,6.5), 1.0 + ((lr - lrMin) / (lrMax - lrMin)) * lrMaxAdj, float32(1.0)) - #layerSR[i] = layerSR[i] * lrAdj - - # Calc avg pressure vertical velocity, scale based on RH and sum - # reverse the pvvAvg sign so up is positive - pvvAvg[i] = -10 * (pvv_c[i]) - # clip downward vertical velocities - pvvAvg[i][less(pvvAvg[i], 0.0)] = 0.0 - # Scale vertical velocity as a function of the square of RH. - # This scaling will efectively negate a snowratio contribution in - # layers that are dry. - pvvAvg[i] = where(less(avgRH, 80.0), pvvAvg[i] * ((avgRH * avgRH) / 6400.0), pvvAvg[i]) - pvvSum = pvvSum + pvvAvg[i] - - # Normalize the layerSnowRatio based on the pvv fraction of the total - totalSnowRatio = zeros(gridShape, dtype = float) - #tweak the pvvSum grid to avoid division by zero - pvvSum[less_equal(pvvSum, 0.0)] = .0001 - - for i in range(len(layerSR)): - srGrid = layerSR[i] * pvvAvg[i] / pvvSum - totalSnowRatio = totalSnowRatio + srGrid - - # Finally clip the snowRatio to zero under two conditions - # cube where min colum temp > -8.0C and rh > 75% - # This is basically Baumgardt - Top Down Approach - No ice No dice! - mask = logical_and(less(t_c, 265.15), greater_equal(rh_c, 50.0)) - mask = sum(mask) # reduce to single level by adding bits verically - totalSnowRatio[equal(mask, 0)] = 0.0 - - thicknessSnowRatio = zeros(gridShape, dtype=float) - -######################################################### -# Pick an applicable thickness scheme for your area - - myThickness = "850-700" - #myThickness = "925-700" - #myThickness = "850-650" - #myThickness = "800-600" - #myThickness = "750-550" - -########################################################## - - if myThickness == "850-700": - thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB850) - 1437.0) / 29.0 , 2) - elif myThickness == "925-700": - thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB925) - 2063.0) / 41.0 , 2) - elif myThickness == "850-650": - thicknessSnowRatio = 20.0 - pow(((gh_MB650 - gh_MB850) - 1986.0) / 39.0 , 2) - elif myThickness == "800-600": - thicknessSnowRatio = 20.0 - pow(((gh_MB600 - gh_MB800) - 2130.0) / 42.0 , 2) - else: # "750-500" - thicknessSnowRatio = 20.0 - pow(((gh_MB550 - gh_MB750) - 2296.0) / 45.0 , 2) - - - - thicknessSnowRatio[less(thicknessSnowRatio, 0.0)] = 0.0 - totalSnowRatio = (totalSnowRatio * 0.50) + (thicknessSnowRatio * 0.50) - totalSnowRatio = where(less_equal(pvvSum, 100.0), (totalSnowRatio * 0.01 * pvvSum) + (thicknessSnowRatio * (1.0 - pvvSum * 0.01)), totalSnowRatio) - totalSnowRatio = where(less(pvvSum, 1.0), thicknessSnowRatio, totalSnowRatio) - - # If there's any layer above 0.0C, snowRatio gets 0 - mask = greater(t_c, 272.65) - mask = sum(mask) # reduce to single level by adding bits vertically - # if mask == 0, nowhere in the column is temp < 0.5C - totalSnowRatio[not_equal(mask, 0)] = 0.0 - - # Return the new value - return totalSnowRatio - - - ### Given a grid of temperature in Celcius, this method computes - ### the base snowRatio based on the spline curve as defined by the - ### coefficients. - def baseSnowRatio(self, tGrid): - # set up the spline coefficients - tThresh = [-30.0, -21.0, -18.0, -15.0, -12.0, -10.0, -8.0, -5.0, -3.0, 2.0] - a = [9.0, 21.0, 31.0, 35.0, 26.0, 15.0, 9.0, 5.0, 4.0] - b = [0.4441, 3.1119, 2.8870, -0.6599, -5.2475, -4.5685, -1.9786, -0.7544, -0.3329] - c = [0.0, 0.2964, -0.3714, -0.8109, -0.7183, 1.0578, 0.2372, 0.1709, 0.0399] - d = [0.0110, -0.0742, -0.0488, 0.0103, 0.2960, -0.1368, -0.0074, -0.0218, -0.0027] - - # Initialize the coeficient grids - aGrid = self.newGrid(a[-1]) #last value in list - bGrid = self.newGrid(b[-1]) - cGrid = self.newGrid(c[-1]) - dGrid = self.newGrid(d[-1]) - tDiff = zeros(tGrid.shape, dtype = float) - - # define grids of coefficients based on tGrid - for i in range(len(tThresh) - 1): - mask1 = greater_equal(tGrid, tThresh[i]) - mask2 = less(tGrid, tThresh[i + 1]) - mask = logical_and(mask1, mask2) # area b/w threshold - tDiff = where(mask, tGrid - tThresh[i], tDiff) - aGrid = where(mask, a[i], aGrid) - bGrid = where(mask, b[i], bGrid) - cGrid = where(mask, c[i], cGrid) - dGrid = where(mask, d[i], dGrid) - - # Do the calcuation using the grids of spline coefficients - baseRatio = aGrid + bGrid * tDiff + cGrid * tDiff * tDiff \ - + dGrid * pow(tDiff, 3) - - # Clip the snowRatio grid to 10.0 where tGrid is outside limits - #baseRatio[greater(tGrid, 1.0)] = 0.0 - #baseRatio[less(tGrid, tThresh[0])] = 10.0 - - return baseRatio - - -############################################################################### -## END-- GRR SnowRatio Init -############################################################################### -####-------------------------------------------------------------------------- -#### Calculate the Haines index based on the temp and RH cubes -#### Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -#### Default is "HIGH". -####-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -####------------------------------------------------------------------------- -#### Calculates the mixing height for the given sfc temperature, -#### temperature cube, height cube and topo -####------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)) - mh[m] = tmh[m] - - mh -= topo - mh *= 3.28 # convert to feet - return mh - -####------------------------------------------------------------------------- -#### Converts the lowest available wind level from m/s to knots -####------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert - dir = wind_FHAG10[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -####------------------------------------------------------------------------- -#### Calculates the wind at 3000 feet AGL. -####------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - fadir.clip(0, 360, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -####------------------------------------------------------------------------- -#### Calculates the average wind vector in the mixed layer as defined -#### by the mixing height. This function creates a mask that identifies -#### all grid points between the ground and the mixing height and calculates -#### a vector average of the wind field in that layer. -####------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - #tdir.clip(0, 359.5, tdir) #should this be added? - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -####------------------------------------------------------------------------- -#### Uses a derivation of the Bourgouin allgorithm to calculate precipitation -#### type, and other algorithms to determine the coverage and intensity. -#### The Bourgoin technique figures out precip type from calculating how -#### long a hydrometer is exposed to alternating layers of above zero (C) and -#### below zero temperature layers. This tool calculates at each grid point -#### which of the four Bourgouin cases apply. Then the appropriate algorithm -#### is applied to that case that further refines the precip. type. Once the -#### type is determined, other algorithms are used to determine the coverage -#### and intensity. See the Weather and Forecasting Journal article Oct. 2000, -#### "A Method to Determine Precipitation Types", by Pierre Bourgouin -####------------------------------------------------------------------------- - def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex[topomask] += 1 - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) -# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) -# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(sli_SFC, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - - return(wx, key) - -####------------------------------------------------------------------------- -#### Calculates chance of wetting rain based on QPF. -####------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -####------------------------------------------------------------------------- -#### Calculates Lightning Activity Level based on total precip., lifted index -#### and 3-D relative humidity. -####------------------------------------------------------------------------- - def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): - bli = sli_SFC # surface lifted index - ttp = self.newGrid(0.00001) # nearly zero grid - lal = self.newGrid(1) # initialize the return grid to 1 - # Add one to lal if QPF > 0.5 - lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli, -3)] += 1 - lal[less(bli, -5)] += 1 - return lal - - -def main(): - GFS40Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from GFS80 model +## output. +## +##-------------------------------------------------------------------------- +class GFS40Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "GFS40", "GFS40") + +####-------------------------------------------------------------------------- +#### These levels will be used to create vertical soundings. These are +#### defined here since they are model dependent. +####-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB975", "MB950", "MB925", "MB900", "MB875", "MB850", + "MB825", "MB800", "MB775", "MB750", "MB725", "MB700", + "MB675", "MB650", "MB625", "MB600", "MB575", "MB550", + "MB525", "MB500", "MB450", "MB400", "MB350", "MB300"] + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxT and the T grids +####-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinT and T grids +####-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +####------------------------------------------------------------------------- +#### Calculates the temperature at the elevation indicated in the topo +#### grid. This tool simply interpolates the temperature value from +#### model's isobaric temperature cube. +####------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, t_BL150180, p_SFC, topo, stopo, gh_c, t_c): + p_SFC = p_SFC / 100 # get the surface pres. in mb + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, + p_SFC - 105, p_SFC - 135] + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, t_BL150180] + return self._calcT(temps, pres, topo, stopo, gh_c, t_c) + + def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) # identify points > topo + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val[greater(val, 500)] = 500 + val.clip(-.00001, 10, val) + + m = logical_and(equal(p, -1), higher) + p[m]= exp(val)[m] + + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + + m = logical_and(equal(tmb, -1), higher) + tmb[m] = tval1[m] + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + + m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) + tms[m] = tval2[m] + + + # define the pres. of each of the boundary layers + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + + m = logical_and(equal(st, -1), mask) + st[m] = val[m] + + # where topo level is above highest level in BL fields...use tmb + m = logical_and(equal(st,-1),less(p, pres[-1])) + st[m] = tmb[m] + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + m = equal(st, -1) + st[m] = (tmb - tms + temps[0])[m] + + return self.KtoF(st) + +####------------------------------------------------------------------------- +#### Calculates dew point from the specified pressure, temp and rh +#### fields. +####------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +####------------------------------------------------------------------------- +#### Calculates RH from the T and Td grids +####------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxRH and the RH grids +####-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinRH and RH grids +####-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +####------------------------------------------------------------------------- +#### Calculates QPF from the total precip field out of the model +####------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + +####------------------------------------------------------------------------- +#### Calculates sky (0-100) from the total precip field out of the model +####------------------------------------------------------------------------- + + ########################################################################### + # Calculates Sky condition (fractional cloud cover) from model RH at + # specific pressure levels. Uses reduced equations from Walcek, MWR June + # 1994. Adds up the amount of fractional clouds calculated at each layer + # based on topography (i.e. no clouds below ground) then divides by a + # suggested number of layers to produce an average sky. + ########################################################################### + def calcSky(self, rh_c, gh_c, topo, p_SFC, rh_BL030, rh_BL3060, rh_BL6090, + rh_BL90120, rh_BL120150, rh_BL150180): + + tmpP_SFC = p_SFC.copy() + tmpP_SFC /= 100.0 # convert surfp to millibars + x = 560.0 # delta x (85km - 850km) + + # Define a percentage of f100 to use as a filter (0.0 - 1.0) + # Remember f100 is an exponential function, so changes will be more + # pronounced in the 0.5-1.0 range than the 0.0-0.5 range. + percent = 0.37 + + # Define a list of boundary layer levels to include + BL_levels = ['BL030', 'BL3060', 'BL6090', 'BL90120', 'BL120150', + 'BL150180'] + + # Construct a boundary layer pressure and RH cube + bl_Pcube = [] + bl_RHcube = [] + + # Place all BL RH levels into a cube + bl_RHcube += [rh_BL030] + bl_RHcube += [rh_BL3060] + bl_RHcube += [rh_BL6090] + bl_RHcube += [rh_BL90120] + bl_RHcube += [rh_BL120150] + bl_RHcube += [rh_BL150180] + + bl_RHcube = array(bl_RHcube) + + + # Make a cube of boundary layer pressures + for lvl in BL_levels: + if lvl == 'BL030': + tmpP = tmpP_SFC - 15.0 + elif lvl == 'BL3060': + tmpP = tmpP_SFC - 45.0 + elif lvl == 'BL6090': + tmpP = tmpP_SFC - 75.0 + elif lvl == 'BL90120': + tmpP = tmpP_SFC - 105.0 + elif lvl == 'BL120150': + tmpP = tmpP_SFC - 135.0 + elif lvl == 'BL150180': + tmpP = tmpP_SFC - 165.0 + bl_Pcube += [tmpP] + bl_Pcube = array(bl_Pcube) + + + # Make a model level pressure cube + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + + + # Convert BL pressures to sigma levels + BL_sigma = bl_Pcube / tmpP_SFC + del bl_Pcube + BL_sigma = clip(BL_sigma, 0.1, 1.0) + + # Convert model level pressure cube to sigma surfaces + pp = pmb / tmpP_SFC + del tmpP_SFC + pp = clip(pp, 0.1, 1.0) + + + # Account for topography in the model cube, don't need to worry about + # this with the BL cube since those are guaranteed to be above ground + tmpRH_c = where(less(gh_c, topo), float32(0.0), rh_c) + + #======================================================================= + # Create new RH and sigma cubes + + newRH_c = [] + newSigma_c = [] + + # See which boundary layer levels have pressures > lowest "signficant" + # model level pressure + for bl_i in range(BL_sigma.shape[0]): + + # Make a mask to identify which points from the boundary + # layer level have greater pressure than lowest "significant" + # model level + BL_mask = greater(BL_sigma[bl_i], pp[0]) + + # See how many points we've found + count = sum(sum(BL_mask, 1)) + + # If there are no points - don't include this BL level + if count == 0: + continue + + # Compute a temporary RH grid where it is lower than the lowest + # "significant" model level data + tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) + + + # Compute a temporary sigma grid for this boundary layer level + # where it is lower than the lowest "significant" model level + tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) + + # Insert this level into the new RH and sigma cubes + newRH_c += [tmpRH] + newSigma_c += [tmpSigma] + + + # Add lowest "significant" model level to RH and sigma cubes + newRH_c += [tmpRH_c[0]] + newSigma_c += [pp[0]] + + + # Insert boundary layer RH into RH cube where appropriate + for lvl in range(1, len(self.levels())): + + # Look at each boundary layer level + for bl_i in range(BL_sigma.shape[0]): + + # Make a mask to identify which points from the boundary + # layer level fall between the surrounding "significant" + # model levels + BL_mask = logical_and(greater(BL_sigma[bl_i], pp[lvl]), + less(BL_sigma[bl_i], pp[lvl - 1])) + + # See how many points we've found + count = sum(sum(BL_mask, 1)) + + # If there are no points - don't include this BL level + if count == 0: + continue + + # Compute a temporary RH grid where it is between the two + # "significant" model level data + tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) + + + # Compute a temporary sigma grid for this boundary layer level + # where it is between the two "significant" model levels + tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) + + # Insert this level into the new RH and sigma cubes + newRH_c += [tmpRH] + newSigma_c += [tmpSigma] + + + # Add top of layer we just examined to RH and sigma cube + newRH_c += [tmpRH_c[lvl]] + newSigma_c += [pp[lvl]] + + del bl_RHcube + del BL_sigma + del tmpRH_c + + # Finish off the new cubes + newRH_c = array(newRH_c) + newSigma_c = array(newSigma_c) + + # Determine maximum possible sky fraction + fmax = 78.0 + x / 15.5 + + # Compute sky fraction for both pressure cubes + f100 = where(less(newSigma_c, 0.7), + fmax * (newSigma_c - 0.1) / 0.6, + 30.0 + (1.0 - newSigma_c) * (fmax - 30.0) / 0.3) + + # Compute RH depression at 37% f100 [ (1-RHe) in Walcek ] + c = 0.196 + (0.76 - x / 2834.0) * (1.0 - newSigma_c) + + del newSigma_c + + # Compute critical RH threshold to use as a filter + # Note (percent * f100)/f100 = percent + try: + rhCrit = log(percent) * c + 1.0 + except: + rhCrit = 0.0 + + # Ensure "critical RH" is valid + rhCrit = clip(rhCrit, 0.0, 1.0) + + # Compute sky fraction for the model cube + c = (newRH_c / 100.0 - 1.0) / c + c = exp(c) + f = minimum(f100 * c, 100.0) + + # Where RH is less than the critical value, set it to 0 contribution + f[less(newRH_c / 100.0, rhCrit)] = 0.0 + + del newRH_c + + # Compress cubes vertically + f = self.squishZ(f, (f.shape[0] / 5) - 1) # was 5 + + # Convert sky fractions to an actual percentage + f[4] *= 0.25 + f /= 100.0 + + sky = f[0] + for i in range(1, f.shape[0]): + sky = sky + f[i] - sky * f[i] + + grid = sky * 100.0 + + return grid + +####------------------------------------------------------------------------- +#### Calculates Prob. of Precip. based on QPF and RH cube. Where there +#### is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +#### of QPF < 0.2 raise the PoP if it's very humid. +####------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +####------------------------------------------------------------------------- +#### Calculates the Freezing level based on height and temperature +#### cubes. Finds the height at which freezing occurs. +####------------------------------------------------------------------------- + def calcFzLevel(self, gh_FRZ): + return gh_FRZ * 3.28 + +####------------------------------------------------------------------------- +#### Calculates the Snow level based on wet-bulb zero height. +####------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + + m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) + snow[m] = val[m] + # + # convert to feet + # + snow *= 3.28 + + return snow + +####------------------------------------------------------------------------- +#### Calculates Snow amount based on the Temp, Freezing level, QPF, +#### topo and Weather grid +####------------------------------------------------------------------------- +# def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): +# # figure out the snow to liquid ratio +# m1 = less(T, 9) +# m2 = greater_equal(T, 30) +# snowr = T * -0.5 + 22.5 +# snowr[m1] = float32(20)) +# snowr[m2] = float32(0) +# # calc. snow amount based on the QPF and the ratio +# snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), +# snowr * QPF, float32(0)) +# # Only make snow at points where the weather is snow +# snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) +# snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), +# equal(Wx[0], 9))) +# snowamt[logical_not(snowmask)] = 0 +# return snowamt + +########################################################### +## GRR Snow Init - begin +## This Routine Does SnowAmt - Not SnowRatio +########################################################### + def calcSnowAmt(self, T, QPF, gh_c, t_c, rh_c, pvv_c, gh_MB925, gh_MB800, gh_MB850, gh_MB700, gh_MB750, gh_MB650, gh_MB600, gh_MB550): + + #t_c is tCube, rh_c is rhCube, etc. + #we do not want the lowest 4 levels in the cubes + gh_c = gh_c[4:, :, :] + t_c = t_c[4:, :, :] + rh_c = rh_c[4:, :, :] + pvv_c = pvv_c[4:, :, :] + + + print("Got", len(t_c), "t grids and", len(rh_c), "rh grids") + + # Some thresholds used throughout the tool + dryRH = 75.0 # dry atm below this value + lrMin = 10.0 # lapse rate minimum + lrMax = 6.5 # laspe rate maximum + lrMaxAdj = 0.3 # max lapse rate adjustment value + + # extract the shapes and make some variables + #cubeShape = (len(t_c) - 1, t_c.shape[1], t_c.shape[2]) + cubeShape = (len(t_c), t_c.shape[1], t_c.shape[2]) + gridShape = (t_c.shape[1], t_c.shape[2]) + layerSR = zeros(cubeShape, dtype = float32) + pvvAvg = zeros(cubeShape, dtype = float32) + pvvSum = zeros(gridShape, dtype = float32) + + #print "cubeShape = ", cubeShape + + for i in range(len(gh_c) - 1): + #for i in range(len(gh_c)): + #print "processing layer", gh_c[i] + # calculate the average temp and rh in the layer + avgTemp = t_c[i] - 273.15 # Convert to C + avgRH = rh_c[i] + + # get the base snowRatio based on the avgTemp + layerSR[i] = self.baseSnowRatio(avgTemp) + + # adjust snowRatio based on lapseRate + #lr = -(t_c[i+1] - t_c[i]) + #lrAdj = where(greater_equal(lr,6.5), 1.0 + ((lr - lrMin) / (lrMax - lrMin)) * lrMaxAdj, float32(1.0)) + #layerSR[i] = layerSR[i] * lrAdj + + # Calc avg pressure vertical velocity, scale based on RH and sum + # reverse the pvvAvg sign so up is positive + pvvAvg[i] = -10 * (pvv_c[i]) + # clip downward vertical velocities + pvvAvg[i][less(pvvAvg[i], 0.0)] = 0.0 + # Scale vertical velocity as a function of the square of RH. + # This scaling will efectively negate a snowratio contribution in + # layers that are dry. + pvvAvg[i] = where(less(avgRH, 80.0), pvvAvg[i] * ((avgRH * avgRH) / 6400.0), pvvAvg[i]) + pvvSum = pvvSum + pvvAvg[i] + + # Normalize the layerSnowRatio based on the pvv fraction of the total + totalSnowRatio = zeros(gridShape, dtype = float32) + #tweak the pvvSum grid to avoid division by zero + pvvSum[less_equal(pvvSum, 0.0)] = .0001 + for i in range(len(layerSR)): + srGrid = layerSR[i] * pvvAvg[i] / pvvSum + totalSnowRatio = totalSnowRatio + srGrid + + # Finally clip the snowRatio to zero under two conditions + # cube where min colum temp > -8.0C and rh > 75% + # This is basically Baumgardt - Top Down Approach - No ice No dice! + mask = logical_and(less(t_c, 265.15), greater_equal(rh_c, 50.0)) + mask = sum(mask) # reduce to single level by adding bits verically + totalSnowRatio[equal(mask, 0)] = 0.0 + + thicknessSnowRatio = zeros(gridShape, dtype = float32) + +######################################################### +# Pick an applicable thickness scheme for your area + + myThickness = "850-700" + #myThickness = "925-700" + #myThickness = "850-650" + #myThickness = "800-600" + #myThickness = "750-550" + +########################################################## + + if myThickness == "850-700": + thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB850) - 1437.0) / 29.0 , 2) + elif myThickness == "925-700": + thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB925) - 2063.0) / 41.0 , 2) + elif myThickness == "850-650": + thicknessSnowRatio = 20.0 - pow(((gh_MB650 - gh_MB850) - 1986.0) / 39.0 , 2) + elif myThickness == "800-600": + thicknessSnowRatio = 20.0 - pow(((gh_MB600 - gh_MB800) - 2130.0) / 42.0 , 2) + else: # "750-500" + thicknessSnowRatio = 20.0 - pow(((gh_MB550 - gh_MB750) - 2296.0) / 45.0 , 2) + + thicknessSnowRatio[less(thicknessSnowRatio, 0.0)] = 0.0 + + totalSnowRatio = (totalSnowRatio * 0.50) + (thicknessSnowRatio * 0.50) + totalSnowRatio = where(less_equal(pvvSum, 100.0), (totalSnowRatio * 0.01 * pvvSum) + (thicknessSnowRatio * (1.0 - pvvSum * 0.01)), totalSnowRatio) + totalSnowRatio = where(less(pvvSum, 1.0), thicknessSnowRatio, totalSnowRatio) + + # If there's any layer above 0.0C, snowRatio gets 0 + mask = greater(t_c, 272.65) + mask = sum(mask) # reduce to single level by adding bits vertically + # if mask == 0, nowhere in the column is temp < 0.5C + totalSnowRatio[not_equal(mask, 0)] = 0.0 + + #Calculate Snowfall - taper to zero from 31 to 34 F. + snowfall = QPF * totalSnowRatio + snowfall = where(greater(T, 31.0), pow(35.0 - T, 2) / 16.0 * snowfall , snowfall) + snowfall[greater(T, 35.0)] = 0.0 + + # Return the new value + return snowfall + + + ### Given a grid of temperature in Celcius, this method computes + ### the base snowRatio based on the spline curve as defined by the + ### coefficients. + def baseSnowRatio(self, tGrid): + # set up the spline coefficients + tThresh = [-30.0, -21.0, -18.0, -15.0, -12.0, -10.0, -8.0, -5.0, -3.0, 2.0] + a = [9.0, 21.0, 31.0, 35.0, 26.0, 15.0, 9.0, 5.0, 4.0] + b = [0.4441, 3.1119, 2.8870, -0.6599, -5.2475, -4.5685, -1.9786, -0.7544, -0.3329] + c = [0.0, 0.2964, -0.3714, -0.8109, -0.7183, 1.0578, 0.2372, 0.1709, 0.0399] + d = [0.0110, -0.0742, -0.0488, 0.0103, 0.2960, -0.1368, -0.0074, -0.0218, -0.0027] + + # Initialize the coeficient grids + aGrid = self.newGrid(a[-1]) #last value in list + bGrid = self.newGrid(b[-1]) + cGrid = self.newGrid(c[-1]) + dGrid = self.newGrid(d[-1]) + tDiff = zeros(tGrid.shape, dtype = float) + + # define grids of coefficients based on tGrid + for i in range(len(tThresh) - 1): + mask1 = greater_equal(tGrid, tThresh[i]) + mask2 = less(tGrid, tThresh[i + 1]) + mask = logical_and(mask1, mask2) # area b/w threshold + tDiff = where(mask, tGrid - tThresh[i], tDiff) + aGrid = where(mask, a[i], aGrid) + bGrid = where(mask, b[i], bGrid) + cGrid = where(mask, c[i], cGrid) + dGrid = where(mask, d[i], dGrid) + + # Do the calcuation using the grids of spline coefficients + baseRatio = aGrid + bGrid * tDiff + cGrid * tDiff * tDiff \ + + dGrid * pow(tDiff, 3) + + # Clip the snowRatio grid to 10.0 where tGrid is outside limits + #baseRatio[greater(tGrid, 1.0)] = 0.0 + #baseRatio[less(tGrid, tThresh[0])] = 10.0 + + return baseRatio + + +############################################################################### +## END-- GRR Snow Init +############################################################################### + +########################################################### +## GRR SnowRatio Init - begin +## This routine does SnowRatio - Not SnowAmt! +########################################################### + def calcSnowRatio(self, gh_c, t_c, rh_c, pvv_c, gh_MB925, gh_MB800, gh_MB850, gh_MB750, gh_MB700, gh_MB650, gh_MB600, gh_MB550): + + #t_c is tCube, rh_c is rhCube, etc. + #we do not want the lowest 4 levels in the cubes + gh_c = gh_c[4:, :, :] + t_c = t_c[4:, :, :] + rh_c = rh_c[4:, :, :] + pvv_c = pvv_c[4:, :, :] + + + print("Got", len(t_c), "t grids and", len(rh_c), "rh grids") + + # Some thresholds used throughout the tool + dryRH = 75.0 # dry atm below this value + lrMin = 10.0 # lapse rate minimum + lrMax = 6.5 # laspe rate maximum + lrMaxAdj = 0.3 # max lapse rate adjustment value + + # extract the shapes and make some variables + #cubeShape = (len(t_c) - 1, t_c.shape[1], t_c.shape[2]) + cubeShape = (len(t_c), t_c.shape[1], t_c.shape[2]) + gridShape = (t_c.shape[1], t_c.shape[2]) + layerSR = zeros(cubeShape, dtype = float) + pvvAvg = zeros(cubeShape, dtype = float) + pvvSum = zeros(gridShape, dtype = float) + + #print "cubeShape = ", cubeShape + + for i in range(len(gh_c) - 1): + #for i in range(len(gh_c)): + #print "processing layer", gh_c[i] + # calculate the average temp and rh in the layer + avgTemp = t_c[i] - 273.15 # Convert to C + avgRH = rh_c[i] + + # get the base snowRatio based on the avgTemp + layerSR[i] = self.baseSnowRatio(avgTemp) + + # adjust snowRatio based on lapseRate + #lr = -(t_c[i+1] - t_c[i]) + #lrAdj = where(greater_equal(lr,6.5), 1.0 + ((lr - lrMin) / (lrMax - lrMin)) * lrMaxAdj, float32(1.0)) + #layerSR[i] = layerSR[i] * lrAdj + + # Calc avg pressure vertical velocity, scale based on RH and sum + # reverse the pvvAvg sign so up is positive + pvvAvg[i] = -10 * (pvv_c[i]) + # clip downward vertical velocities + pvvAvg[i][less(pvvAvg[i], 0.0)] = 0.0 + # Scale vertical velocity as a function of the square of RH. + # This scaling will efectively negate a snowratio contribution in + # layers that are dry. + pvvAvg[i] = where(less(avgRH, 80.0), pvvAvg[i] * ((avgRH * avgRH) / 6400.0), pvvAvg[i]) + pvvSum = pvvSum + pvvAvg[i] + + # Normalize the layerSnowRatio based on the pvv fraction of the total + totalSnowRatio = zeros(gridShape, dtype = float) + #tweak the pvvSum grid to avoid division by zero + pvvSum[less_equal(pvvSum, 0.0)] = .0001 + + for i in range(len(layerSR)): + srGrid = layerSR[i] * pvvAvg[i] / pvvSum + totalSnowRatio = totalSnowRatio + srGrid + + # Finally clip the snowRatio to zero under two conditions + # cube where min colum temp > -8.0C and rh > 75% + # This is basically Baumgardt - Top Down Approach - No ice No dice! + mask = logical_and(less(t_c, 265.15), greater_equal(rh_c, 50.0)) + mask = sum(mask) # reduce to single level by adding bits verically + totalSnowRatio[equal(mask, 0)] = 0.0 + + thicknessSnowRatio = zeros(gridShape, dtype=float) + +######################################################### +# Pick an applicable thickness scheme for your area + + myThickness = "850-700" + #myThickness = "925-700" + #myThickness = "850-650" + #myThickness = "800-600" + #myThickness = "750-550" + +########################################################## + + if myThickness == "850-700": + thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB850) - 1437.0) / 29.0 , 2) + elif myThickness == "925-700": + thicknessSnowRatio = 20.0 - pow(((gh_MB700 - gh_MB925) - 2063.0) / 41.0 , 2) + elif myThickness == "850-650": + thicknessSnowRatio = 20.0 - pow(((gh_MB650 - gh_MB850) - 1986.0) / 39.0 , 2) + elif myThickness == "800-600": + thicknessSnowRatio = 20.0 - pow(((gh_MB600 - gh_MB800) - 2130.0) / 42.0 , 2) + else: # "750-500" + thicknessSnowRatio = 20.0 - pow(((gh_MB550 - gh_MB750) - 2296.0) / 45.0 , 2) + + + + thicknessSnowRatio[less(thicknessSnowRatio, 0.0)] = 0.0 + totalSnowRatio = (totalSnowRatio * 0.50) + (thicknessSnowRatio * 0.50) + totalSnowRatio = where(less_equal(pvvSum, 100.0), (totalSnowRatio * 0.01 * pvvSum) + (thicknessSnowRatio * (1.0 - pvvSum * 0.01)), totalSnowRatio) + totalSnowRatio = where(less(pvvSum, 1.0), thicknessSnowRatio, totalSnowRatio) + + # If there's any layer above 0.0C, snowRatio gets 0 + mask = greater(t_c, 272.65) + mask = sum(mask) # reduce to single level by adding bits vertically + # if mask == 0, nowhere in the column is temp < 0.5C + totalSnowRatio[not_equal(mask, 0)] = 0.0 + + # Return the new value + return totalSnowRatio + + + ### Given a grid of temperature in Celcius, this method computes + ### the base snowRatio based on the spline curve as defined by the + ### coefficients. + def baseSnowRatio(self, tGrid): + # set up the spline coefficients + tThresh = [-30.0, -21.0, -18.0, -15.0, -12.0, -10.0, -8.0, -5.0, -3.0, 2.0] + a = [9.0, 21.0, 31.0, 35.0, 26.0, 15.0, 9.0, 5.0, 4.0] + b = [0.4441, 3.1119, 2.8870, -0.6599, -5.2475, -4.5685, -1.9786, -0.7544, -0.3329] + c = [0.0, 0.2964, -0.3714, -0.8109, -0.7183, 1.0578, 0.2372, 0.1709, 0.0399] + d = [0.0110, -0.0742, -0.0488, 0.0103, 0.2960, -0.1368, -0.0074, -0.0218, -0.0027] + + # Initialize the coeficient grids + aGrid = self.newGrid(a[-1]) #last value in list + bGrid = self.newGrid(b[-1]) + cGrid = self.newGrid(c[-1]) + dGrid = self.newGrid(d[-1]) + tDiff = zeros(tGrid.shape, dtype = float) + + # define grids of coefficients based on tGrid + for i in range(len(tThresh) - 1): + mask1 = greater_equal(tGrid, tThresh[i]) + mask2 = less(tGrid, tThresh[i + 1]) + mask = logical_and(mask1, mask2) # area b/w threshold + tDiff = where(mask, tGrid - tThresh[i], tDiff) + aGrid = where(mask, a[i], aGrid) + bGrid = where(mask, b[i], bGrid) + cGrid = where(mask, c[i], cGrid) + dGrid = where(mask, d[i], dGrid) + + # Do the calcuation using the grids of spline coefficients + baseRatio = aGrid + bGrid * tDiff + cGrid * tDiff * tDiff \ + + dGrid * pow(tDiff, 3) + + # Clip the snowRatio grid to 10.0 where tGrid is outside limits + #baseRatio[greater(tGrid, 1.0)] = 0.0 + #baseRatio[less(tGrid, tThresh[0])] = 10.0 + + return baseRatio + + +############################################################################### +## END-- GRR SnowRatio Init +############################################################################### +####-------------------------------------------------------------------------- +#### Calculate the Haines index based on the temp and RH cubes +#### Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +#### Default is "HIGH". +####-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +####------------------------------------------------------------------------- +#### Calculates the mixing height for the given sfc temperature, +#### temperature cube, height cube and topo +####------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)) + mh[m] = tmh[m] + + mh -= topo + mh *= 3.28 # convert to feet + return mh + +####------------------------------------------------------------------------- +#### Converts the lowest available wind level from m/s to knots +####------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert + dir = wind_FHAG10[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +####------------------------------------------------------------------------- +#### Calculates the wind at 3000 feet AGL. +####------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + fadir.clip(0, 360, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +####------------------------------------------------------------------------- +#### Calculates the average wind vector in the mixed layer as defined +#### by the mixing height. This function creates a mask that identifies +#### all grid points between the ground and the mixing height and calculates +#### a vector average of the wind field in that layer. +####------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + #tdir.clip(0, 359.5, tdir) #should this be added? + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +####------------------------------------------------------------------------- +#### Uses a derivation of the Bourgouin allgorithm to calculate precipitation +#### type, and other algorithms to determine the coverage and intensity. +#### The Bourgoin technique figures out precip type from calculating how +#### long a hydrometer is exposed to alternating layers of above zero (C) and +#### below zero temperature layers. This tool calculates at each grid point +#### which of the four Bourgouin cases apply. Then the appropriate algorithm +#### is applied to that case that further refines the precip. type. Once the +#### type is determined, other algorithms are used to determine the coverage +#### and intensity. See the Weather and Forecasting Journal article Oct. 2000, +#### "A Method to Determine Precipitation Types", by Pierre Bourgouin +####------------------------------------------------------------------------- + def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex[topomask] += 1 + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) +# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) +# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(sli_SFC, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + + return(wx, key) + +####------------------------------------------------------------------------- +#### Calculates chance of wetting rain based on QPF. +####------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +####------------------------------------------------------------------------- +#### Calculates Lightning Activity Level based on total precip., lifted index +#### and 3-D relative humidity. +####------------------------------------------------------------------------- + def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): + bli = sli_SFC # surface lifted index + ttp = self.newGrid(0.00001) # nearly zero grid + lal = self.newGrid(1) # initialize the return grid to 1 + # Add one to lal if QPF > 0.5 + lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli, -3)] += 1 + lal[less(bli, -5)] += 1 + return lal + + +def main(): + GFS40Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS75.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS75.py index 398409ffc5..1424098e09 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS75.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS75.py @@ -1,539 +1,539 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from GFS75 model -## output. -## -##-------------------------------------------------------------------------- -class GFS75Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "GFS75") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900", "MB850", - "MB800", "MB750", "MB700", - "MB650", "MB600", "MB500", "MB450", "MB400", "MB350", - "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, t_FHAG2, stopo, topo): -# Temperature drops by .0074 C/meter per Hans' computations - elevationDiff = topo - stopo # in m. - tcorr = elevationDiff * 0.00714 #K - return self.KtoF(t_FHAG2 - tcorr) - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, gh_c, t_c, rh_BL030, rh_MB850, rh_MB800, rh_MB750, rh_MB700, rh_MB650, - t_MB1000, t_MB900, t_MB850, t_MB800, t_MB750, t_MB700, t_MB650, topo): - - tmb = self.newGrid(-1) - - #calc sfc_temp at topo - for i in xrange(1, gh_c.shape[0]): - - #interpolate temp in this layer - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), greater(gh_c[i], topo)), tval1, tmb) - - temp = self.KtoF(tmb) - - rh = rh_BL030 - rh = where(logical_and(greater_equal(topo, 1327),less(topo, 1828)),rh_MB850, rh) - rh = where(logical_and(greater_equal(topo, 1828),less(topo, 2438)),rh_MB800, rh) - rh = where(logical_and(greater_equal(topo, 2438),less(topo, 3048)),rh_MB750, rh) - rh = where(logical_and(greater_equal(topo, 3048),less(topo, 3657)),rh_MB700, rh) - rh = where(logical_and(greater_equal(topo, 3657),less(topo, 4267)),rh_MB650, rh) - - rh_linear = self.linear(0, 4400, 1.0, 1.001, topo) - rh = rh * rh_linear - rh = clip(rh, 3.0, 100.0) - - - rh = rh + .01 - Tc = .556 * (temp - 32.0) - - ret = .9 * Tc - ret1 = 112 + ret - ret2 = ret1 * (rh ** .125) / 1.7783 - Tdc = ret2 - 112 + .1 * Tc - Td = (1.8 * Tdc) + 32 - - return Td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, rh_BL030, rh_MB850, rh_MB800, rh_MB750, rh_MB700, rh_MB650, topo): - rh = rh_BL030 - - rh = where(logical_and(greater_equal(topo, 1327),less(topo, 1828)),rh_MB850, rh) - rh = where(logical_and(greater_equal(topo, 1828),less(topo, 2438)),rh_MB800, rh) - rh = where(logical_and(greater_equal(topo, 2438),less(topo, 3048)),rh_MB750, rh) - rh = where(logical_and(greater_equal(topo, 3048),less(topo, 3657)),rh_MB700, rh) - rh = where(logical_and(greater_equal(topo, 3657),less(topo, 4267)),rh_MB650, rh) - - rh_linear = self.linear(0, 4400, 1.0, 1.001, topo) - - rh = rh * rh_linear - rh = clip(rh, 3.0, 100.0) - - return rh - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, topo): - rh_c = rh_c[:8, :, :] - rh_c[less_equal(rh_c, 25.0)] = 25.0 - rh_c[less_equal(rh_c, 100.0)] = 100.0 - - rh900 = rh_c[2] - rh850 = rh_c[3] - rh800 = rh_c[4] - rh750 = rh_c[5] - rh700 = rh_c[6] - - index900 = self.linear(25, 100, 0, 100, rh900) - index850 = self.linear(25, 100, 0, 100, rh850) - index800 = self.linear(25, 100, 0, 100, rh800) - index750 = self.linear(25, 100, 0, 125, rh750) - index700 = self.linear(25, 100, 0, 150, rh700) - - skyindex = index900 + index850 + index800 + index750 + index700 - skyindex = clip(skyindex, 0, 500) - - sky = self.linear(0, 500, 0, 100, skyindex) - - return sky - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, rh_c, pvv_MB700, topo): - # use only the first 11 levels (up to 500MB) - rh_c = rh_c[:8, :, :] - rh_c[less_equal(rh_c, 43.0)] = 43.0 - rh_c[greater_equal(rh_c, 100.0)] = 100.0 - - rh900 = rh_c[2] - rh850 = rh_c[3] - rh800 = rh_c[4] - rh750 = rh_c[5] - rh700 = rh_c[6] - omega700 = pvv_MB700 - - popindex900 = self.linear(43, 100, 0, 60, rh900) - popindex850 = self.linear(43, 100, 0, 70, rh850) - popindex800 = self.linear(43, 100, 0, 80, rh800) - popindex750 = self.linear(43, 100, 0, 90, rh750) - popindex700 = self.linear(43, 100, 0, 100, rh700) - - omegacorpos = self.linear(0, -2, 0, 50, omega700) - omegacorneg = self.linear(0, 1, 0, -50, omega700) - - popindex = popindex900 + popindex850 + popindex800 + popindex750 + popindex700 + omegacorpos + omegacorneg - - popindex = clip(popindex, 0, 500) - - pop = self.linear(0, 500, 0, 100, popindex) - - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - - - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - # - # convert to feet - # - snow = snow * 3.28 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks - pt[logical_not(mask)] = 0 - - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10, wind_MB850, wind_MB800, wind_MB750, wind_MB700, wind_MB650, topo): - mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert - dir = wind_FHAG10[1] # get the wind direction - - mag = where(logical_and(greater_equal(topo, 1327), less(topo, 1828)), wind_MB850[0] * 1.94, mag) - dir = where(logical_and(greater_equal(topo, 1327), less(topo, 1828)), wind_MB850[1], dir) - - mag = where(logical_and(greater_equal(topo, 1828), less(topo, 2438)), wind_MB800[0] * 1.94, mag) - dir = where(logical_and(greater_equal(topo, 1828), less(topo, 2438)), wind_MB800[1], dir) - - mag = where(logical_and(greater_equal(topo, 2438), less(topo, 3048)), wind_MB750[0] * 1.94, mag) - dir = where(logical_and(greater_equal(topo, 2438), less(topo, 3048)), wind_MB750[1], dir) - - mag = where(logical_and(greater_equal(topo, 3048), less(topo, 3657)), wind_MB700[0] * 1.94, mag) - dir = where(logical_and(greater_equal(topo, 3048), less(topo, 3657)), wind_MB700[1], dir) - - mag = where(logical_and(greater_equal(topo, 3657), less(topo, 4267)), wind_MB650[0] * 1.94, mag) - dir = where(logical_and(greater_equal(topo, 3657), less(topo, 4267)), wind_MB650[1], dir) - - mag_cor_up = self.linear(1327, 4267, 1, 1.3, topo) - - mag_cor_lo = self.linear(1327, 50, 1, 0.7, topo) - - mag = where(logical_and(greater_equal(topo, 1327),less(topo, 4267)), mag*mag_cor_up, mag) - mag = where(logical_and(greater_equal(topo, 50),less(topo, 1327)), mag*mag_cor_lo, mag) - - dir = clip(dir, 0, 359.5) - - return (mag, dir) - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 360) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - - - def calcWx(self, t_MB850, t_MB700, t_MB500, rh_MB850, rh_MB700, cape_SFC, PoP): - # first calculate K-index to establish heavy rain threat - t850 = t_MB850 - 273.2 - t700 = t_MB700 - 273.2 - t500 = t_MB500 - 273.2 - rh850 = rh_MB850 - rh700 = rh_MB700 - - ratio1 = ((log10(rh850 / 100.0) / 7.5) + (t850 / (t850 + 237.3))) - td850 = ((237.3 * ratio1) / (1.0 - ratio1)) - ratio2 = ((log10(rh700 / 100.0) / 7.5) + (t700 / (t700 + 237.3))) - td700 = ((237.3 * ratio2) / (1.0 - ratio2)) - kindex = ((t850 - t500) + td850 - (t700 - td700)) - - # now on to the weather - - key = ['::::', - "Iso:RW:m::", - "Sct:RW:m::", - "Lkly:RW:m::", - "Ocnl:RW:m::", - "Iso:RW:+::", - "Sct:RW:+::", - "Lkly:RW:+::", - "Ocnl:RW:+::"] - - wx = self.empty(int8) - wx[less_equal(PoP, 14.4)] = 0 - - hvymask = greater_equal(kindex, 35) - wx[logical_and(hvymask, logical_and(greater(PoP, 14.4), less(PoP, 24.4)))] = 5 - wx[logical_and(hvymask, logical_and(greater(PoP, 24.4), less(PoP, 54.4)))] = 6 - wx[logical_and(hvymask, logical_and(greater(PoP, 54.4), less(PoP, 74.4)))] = 7 - wx[logical_and(hvymask, greater(PoP, 74.4))] = 8 - - lgtmask = less(kindex, 35) - wx[logical_and(lgtmask, logical_and(greater(PoP, 14.4), less(PoP, 24.4)))] = 1 - wx[logical_and(lgtmask, logical_and(greater(PoP, 24.4), less(PoP, 54.4)))] = 2 - wx[logical_and(lgtmask, logical_and(greater(PoP, 54.4), less(PoP, 74.4)))] = 3 - wx[logical_and(lgtmask, greater(PoP, 74.4))] = 4 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "": - tcov = "Iso" - key.append(key[i] + "^" + tcov + ":T:::") - wx[logical_and(greater(PoP, 14.4), greater_equal(cape_SFC, 1000))] += 9 - - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): - bli = sli_SFC # surface lifted index - ttp = self.newGrid(0.00001) # nearly zero grid - lal = self.newGrid(1) # initialize the return grid to 1 - # Add one to lal if QPF > 0.5 - lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli, -3)] += 1 - lal[less(bli, -5)] += 1 - - return lal - -def main(): - GFS75Forecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from GFS75 model +## output. +## +##-------------------------------------------------------------------------- +class GFS75Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "GFS75") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900", "MB850", + "MB800", "MB750", "MB700", + "MB650", "MB600", "MB500", "MB450", "MB400", "MB350", + "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, t_FHAG2, stopo, topo): +# Temperature drops by .0074 C/meter per Hans' computations + elevationDiff = topo - stopo # in m. + tcorr = elevationDiff * 0.00714 #K + return self.KtoF(t_FHAG2 - tcorr) + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, gh_c, t_c, rh_BL030, rh_MB850, rh_MB800, rh_MB750, rh_MB700, rh_MB650, + t_MB1000, t_MB900, t_MB850, t_MB800, t_MB750, t_MB700, t_MB650, topo): + + tmb = self.newGrid(-1) + + #calc sfc_temp at topo + for i in range(1, gh_c.shape[0]): + + #interpolate temp in this layer + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), greater(gh_c[i], topo)), tval1, tmb) + + temp = self.KtoF(tmb) + + rh = rh_BL030 + rh = where(logical_and(greater_equal(topo, 1327),less(topo, 1828)),rh_MB850, rh) + rh = where(logical_and(greater_equal(topo, 1828),less(topo, 2438)),rh_MB800, rh) + rh = where(logical_and(greater_equal(topo, 2438),less(topo, 3048)),rh_MB750, rh) + rh = where(logical_and(greater_equal(topo, 3048),less(topo, 3657)),rh_MB700, rh) + rh = where(logical_and(greater_equal(topo, 3657),less(topo, 4267)),rh_MB650, rh) + + rh_linear = self.linear(0, 4400, 1.0, 1.001, topo) + rh = rh * rh_linear + rh = clip(rh, 3.0, 100.0) + + + rh = rh + .01 + Tc = .556 * (temp - 32.0) + + ret = .9 * Tc + ret1 = 112 + ret + ret2 = ret1 * (rh ** .125) / 1.7783 + Tdc = ret2 - 112 + .1 * Tc + Td = (1.8 * Tdc) + 32 + + return Td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, rh_BL030, rh_MB850, rh_MB800, rh_MB750, rh_MB700, rh_MB650, topo): + rh = rh_BL030 + + rh = where(logical_and(greater_equal(topo, 1327),less(topo, 1828)),rh_MB850, rh) + rh = where(logical_and(greater_equal(topo, 1828),less(topo, 2438)),rh_MB800, rh) + rh = where(logical_and(greater_equal(topo, 2438),less(topo, 3048)),rh_MB750, rh) + rh = where(logical_and(greater_equal(topo, 3048),less(topo, 3657)),rh_MB700, rh) + rh = where(logical_and(greater_equal(topo, 3657),less(topo, 4267)),rh_MB650, rh) + + rh_linear = self.linear(0, 4400, 1.0, 1.001, topo) + + rh = rh * rh_linear + rh = clip(rh, 3.0, 100.0) + + return rh + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, topo): + rh_c = rh_c[:8, :, :] + rh_c[less_equal(rh_c, 25.0)] = 25.0 + rh_c[less_equal(rh_c, 100.0)] = 100.0 + + rh900 = rh_c[2] + rh850 = rh_c[3] + rh800 = rh_c[4] + rh750 = rh_c[5] + rh700 = rh_c[6] + + index900 = self.linear(25, 100, 0, 100, rh900) + index850 = self.linear(25, 100, 0, 100, rh850) + index800 = self.linear(25, 100, 0, 100, rh800) + index750 = self.linear(25, 100, 0, 125, rh750) + index700 = self.linear(25, 100, 0, 150, rh700) + + skyindex = index900 + index850 + index800 + index750 + index700 + skyindex = clip(skyindex, 0, 500) + + sky = self.linear(0, 500, 0, 100, skyindex) + + return sky + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, rh_c, pvv_MB700, topo): + # use only the first 11 levels (up to 500MB) + rh_c = rh_c[:8, :, :] + rh_c[less_equal(rh_c, 43.0)] = 43.0 + rh_c[greater_equal(rh_c, 100.0)] = 100.0 + + rh900 = rh_c[2] + rh850 = rh_c[3] + rh800 = rh_c[4] + rh750 = rh_c[5] + rh700 = rh_c[6] + omega700 = pvv_MB700 + + popindex900 = self.linear(43, 100, 0, 60, rh900) + popindex850 = self.linear(43, 100, 0, 70, rh850) + popindex800 = self.linear(43, 100, 0, 80, rh800) + popindex750 = self.linear(43, 100, 0, 90, rh750) + popindex700 = self.linear(43, 100, 0, 100, rh700) + + omegacorpos = self.linear(0, -2, 0, 50, omega700) + omegacorneg = self.linear(0, 1, 0, -50, omega700) + + popindex = popindex900 + popindex850 + popindex800 + popindex750 + popindex700 + omegacorpos + omegacorneg + + popindex = clip(popindex, 0, 500) + + pop = self.linear(0, 500, 0, 100, popindex) + + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + + + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + # + # convert to feet + # + snow = snow * 3.28 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks + pt[logical_not(mask)] = 0 + + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10, wind_MB850, wind_MB800, wind_MB750, wind_MB700, wind_MB650, topo): + mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert + dir = wind_FHAG10[1] # get the wind direction + + mag = where(logical_and(greater_equal(topo, 1327), less(topo, 1828)), wind_MB850[0] * 1.94, mag) + dir = where(logical_and(greater_equal(topo, 1327), less(topo, 1828)), wind_MB850[1], dir) + + mag = where(logical_and(greater_equal(topo, 1828), less(topo, 2438)), wind_MB800[0] * 1.94, mag) + dir = where(logical_and(greater_equal(topo, 1828), less(topo, 2438)), wind_MB800[1], dir) + + mag = where(logical_and(greater_equal(topo, 2438), less(topo, 3048)), wind_MB750[0] * 1.94, mag) + dir = where(logical_and(greater_equal(topo, 2438), less(topo, 3048)), wind_MB750[1], dir) + + mag = where(logical_and(greater_equal(topo, 3048), less(topo, 3657)), wind_MB700[0] * 1.94, mag) + dir = where(logical_and(greater_equal(topo, 3048), less(topo, 3657)), wind_MB700[1], dir) + + mag = where(logical_and(greater_equal(topo, 3657), less(topo, 4267)), wind_MB650[0] * 1.94, mag) + dir = where(logical_and(greater_equal(topo, 3657), less(topo, 4267)), wind_MB650[1], dir) + + mag_cor_up = self.linear(1327, 4267, 1, 1.3, topo) + + mag_cor_lo = self.linear(1327, 50, 1, 0.7, topo) + + mag = where(logical_and(greater_equal(topo, 1327),less(topo, 4267)), mag*mag_cor_up, mag) + mag = where(logical_and(greater_equal(topo, 50),less(topo, 1327)), mag*mag_cor_lo, mag) + + dir = clip(dir, 0, 359.5) + + return (mag, dir) + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 360) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + + + def calcWx(self, t_MB850, t_MB700, t_MB500, rh_MB850, rh_MB700, cape_SFC, PoP): + # first calculate K-index to establish heavy rain threat + t850 = t_MB850 - 273.2 + t700 = t_MB700 - 273.2 + t500 = t_MB500 - 273.2 + rh850 = rh_MB850 + rh700 = rh_MB700 + + ratio1 = ((log10(rh850 / 100.0) / 7.5) + (t850 / (t850 + 237.3))) + td850 = ((237.3 * ratio1) / (1.0 - ratio1)) + ratio2 = ((log10(rh700 / 100.0) / 7.5) + (t700 / (t700 + 237.3))) + td700 = ((237.3 * ratio2) / (1.0 - ratio2)) + kindex = ((t850 - t500) + td850 - (t700 - td700)) + + # now on to the weather + + key = ['::::', + "Iso:RW:m::", + "Sct:RW:m::", + "Lkly:RW:m::", + "Ocnl:RW:m::", + "Iso:RW:+::", + "Sct:RW:+::", + "Lkly:RW:+::", + "Ocnl:RW:+::"] + + wx = self.empty(int8) + wx[less_equal(PoP, 14.4)] = 0 + + hvymask = greater_equal(kindex, 35) + wx[logical_and(hvymask, logical_and(greater(PoP, 14.4), less(PoP, 24.4)))] = 5 + wx[logical_and(hvymask, logical_and(greater(PoP, 24.4), less(PoP, 54.4)))] = 6 + wx[logical_and(hvymask, logical_and(greater(PoP, 54.4), less(PoP, 74.4)))] = 7 + wx[logical_and(hvymask, greater(PoP, 74.4))] = 8 + + lgtmask = less(kindex, 35) + wx[logical_and(lgtmask, logical_and(greater(PoP, 14.4), less(PoP, 24.4)))] = 1 + wx[logical_and(lgtmask, logical_and(greater(PoP, 24.4), less(PoP, 54.4)))] = 2 + wx[logical_and(lgtmask, logical_and(greater(PoP, 54.4), less(PoP, 74.4)))] = 3 + wx[logical_and(lgtmask, greater(PoP, 74.4))] = 4 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "": + tcov = "Iso" + key.append(key[i] + "^" + tcov + ":T:::") + wx[logical_and(greater(PoP, 14.4), greater_equal(cape_SFC, 1000))] += 9 + + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): + bli = sli_SFC # surface lifted index + ttp = self.newGrid(0.00001) # nearly zero grid + lal = self.newGrid(1) # initialize the return grid to 1 + # Add one to lal if QPF > 0.5 + lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli, -3)] += 1 + lal[less(bli, -5)] += 1 + + return lal + +def main(): + GFS75Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS80.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS80.py index 3313b29c61..96c81172ba 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS80.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/GFS80.py @@ -1,563 +1,563 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * -import LogStream - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from GFS80 model -## output. -## -##-------------------------------------------------------------------------- -class GFS80Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "GFS80", "GFS80") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB975", "MB950", "MB925", "MB900", "MB875", "MB850", - "MB825", "MB800", "MB775", "MB750", "MB725", "MB700", - "MB675", "MB650", "MB625", "MB600", "MB575", "MB550", - "MB525", "MB500", "MB450", "MB400", "MB350", "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, t_BL150180, p_SFC, topo, stopo, gh_c, t_c): - p_SFC = p_SFC / 100 # get the surface pres. in mb - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, - p_SFC - 105, p_SFC - 135] - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, t_BL150180] - return self._calcT(temps, pres, topo, stopo, gh_c, t_c) - - def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) # identify points > topo - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val[greater(val, 500)] = 500 - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - - # define the pres. of each of the boundary layers - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p, pres[-1])), tmb, st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + temps[0], st) - return self.KtoF(st) - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - -##------------------------------------------------------------------------- -## Calculates sky (0-100) from the total precip field out of the model -##------------------------------------------------------------------------- - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop = pop + dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - # - # convert to feet - # - snow = snow * 3.28 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0).astype(float32) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - mh -= topo - mh *= 3.28 # convert to feet - return mh - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert - dir = wind_FHAG10[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 360) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tmag = tmag * 1.94 # convert to knots - tmag = clip(tmag, 0, 125) # clip speed to 125 knots - return (tmag, tdir) - -##------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin algorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##------------------------------------------------------------------------- - def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) -# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) -# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(sli_SFC, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): - bli = sli_SFC # surface lifted index - ttp = self.newGrid(0.00001) # nearly zero grid - lal = self.newGrid(1) # initialize the return grid to 1 - # Add one to lal if QPF > 0.5 - lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli, -3)] += 1 - lal[less(bli, -5)] += 1 - return lal - - -def main(): - GFS80Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * +import LogStream + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from GFS80 model +## output. +## +##-------------------------------------------------------------------------- +class GFS80Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "GFS80", "GFS80") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB975", "MB950", "MB925", "MB900", "MB875", "MB850", + "MB825", "MB800", "MB775", "MB750", "MB725", "MB700", + "MB675", "MB650", "MB625", "MB600", "MB575", "MB550", + "MB525", "MB500", "MB450", "MB400", "MB350", "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, t_BL150180, p_SFC, topo, stopo, gh_c, t_c): + p_SFC = p_SFC / 100 # get the surface pres. in mb + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, + p_SFC - 105, p_SFC - 135] + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, t_BL150180] + return self._calcT(temps, pres, topo, stopo, gh_c, t_c) + + def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) # identify points > topo + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val[greater(val, 500)] = 500 + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + + # define the pres. of each of the boundary layers + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p, pres[-1])), tmb, st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + temps[0], st) + return self.KtoF(st) + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + +##------------------------------------------------------------------------- +## Calculates sky (0-100) from the total precip field out of the model +##------------------------------------------------------------------------- + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop = pop + dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + # + # convert to feet + # + snow = snow * 3.28 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0).astype(float32) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + mh -= topo + mh *= 3.28 # convert to feet + return mh + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert + dir = wind_FHAG10[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 360) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tmag = tmag * 1.94 # convert to knots + tmag = clip(tmag, 0, 125) # clip speed to 125 knots + return (tmag, tdir) + +##------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin algorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##------------------------------------------------------------------------- + def calcWx(self, QPF, T, t_c, gh_c, p_SFC, topo, sli_SFC): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) +# convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) +# wx = where(logical_and(not_equal(wx, 0), convecMask), wx + 6, wx) + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(sli_SFC, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, tp_SFC, sli_SFC, rh_c, rh_BL030): + bli = sli_SFC # surface lifted index + ttp = self.newGrid(0.00001) # nearly zero grid + lal = self.newGrid(1) # initialize the return grid to 1 + # Add one to lal if QPF > 0.5 + lal[logical_and(greater(ttp, 0), greater(tp_SFC / ttp, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli, -3)] += 1 + lal[less(bli, -5)] += 1 + return lal + + +def main(): + GFS80Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWarw.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWarw.py index 258b7f536d..d4d363226f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWarw.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWarw.py @@ -1,324 +1,324 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##------------------------------------------------------------------------- -## Model that produces surface weather elements from model -## output. -## -class HIRESWarwForecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "HIRESWarw") - self.oldqpf = 0.0 #initializes a temporary grid to zero - -####-------------------------------------------------------------------------- -#### These levels will be used to create vertical soundings. These are -#### defined here since they are model dependent. -####-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB925", "MB850", "MB700", "MB500", "MB400", "MB300"] - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxT and the T grids -####-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinT and T grids -####-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -####------------------------------------------------------------------------- -#### Calculates the temperature at the elevation indicated in the topo -#### grid. This tool simply interpolates the temperature value from -#### model's isobaric temperature cube. -####------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, p_SFC, topo, stopo, gh_c, t_c): - p_SFC = p_SFC / 100 # get the surface pres. in mb - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, - p_SFC - 105, p_SFC - 135] - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150] - return self._calcT(temps, pres, topo, stopo, gh_c, t_c) - - def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) # identify points > topo - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val[greater(val, 500)] = 500 - val.clip(-.00001, 10, val) - - m = logical_and(equal(p, -1), higher) - p[m]= exp(val)[m] - - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - - m = logical_and(equal(tmb, -1), higher) - tmb[m] = tval1[m] - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - - m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) - tms[m] = tval2[m] - - - # define the pres. of each of the boundary layers - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - - m = logical_and(equal(st, -1), mask) - st[m] = val[m] - - # where topo level is above highest level in BL fields...use tmb - m = logical_and(equal(st,-1),less(p, pres[-1])) - st[m] = tmb[m] - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - m = equal(st, -1) - st[m] = (tmb - tms + temps[0])[m] - - return self.KtoF(st) - -####------------------------------------------------------------------------- -#### Calculates dew point from the specified pressure, temp and rh -#### fields. -####------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -####------------------------------------------------------------------------- -#### Calculates RH from the T and Td grids -####------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxRH and the RH grids -####-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinRH and RH grids -####-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -####------------------------------------------------------------------------- -#### Calculates QPF from the total precip field out of the model -####------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- -# def calcFzLevel(self, gh_c, t_c, topo): -# fzl = self.newGrid(-1) -# -# # for each level in the height cube, find the freezing level -# for i in xrange(gh_c.shape[0]): -# try: -# val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ -# * (273.15 - t_c[i-1]) -# except: -# val = gh_c[i] -# ## save the height value in fzl -# m =logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) -# fzl[m] = val[m] -# -# fzl *= 3.28 # convert to feet -# return fzl -# -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)) - mh[m] = tmh[m] - - mh -= topo - mh *= 3.28 # convert to feet - return mh - -####------------------------------------------------------------------------- -#### Converts the lowest available wind level from m/s to knots -####------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert - dir = wind_FHAG10[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -####------------------------------------------------------------------------- -#### Calculates the wind at 3000 feet AGL. -####------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - fadir.clip(0, 360, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -def main(): - HIRESWarwForecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##------------------------------------------------------------------------- +## Model that produces surface weather elements from model +## output. +## +class HIRESWarwForecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "HIRESWarw") + self.oldqpf = 0.0 #initializes a temporary grid to zero + +####-------------------------------------------------------------------------- +#### These levels will be used to create vertical soundings. These are +#### defined here since they are model dependent. +####-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB925", "MB850", "MB700", "MB500", "MB400", "MB300"] + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxT and the T grids +####-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinT and T grids +####-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +####------------------------------------------------------------------------- +#### Calculates the temperature at the elevation indicated in the topo +#### grid. This tool simply interpolates the temperature value from +#### model's isobaric temperature cube. +####------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, p_SFC, topo, stopo, gh_c, t_c): + p_SFC = p_SFC / 100 # get the surface pres. in mb + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, + p_SFC - 105, p_SFC - 135] + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150] + return self._calcT(temps, pres, topo, stopo, gh_c, t_c) + + def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) # identify points > topo + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val[greater(val, 500)] = 500 + val.clip(-.00001, 10, val) + + m = logical_and(equal(p, -1), higher) + p[m]= exp(val)[m] + + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + + m = logical_and(equal(tmb, -1), higher) + tmb[m] = tval1[m] + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + + m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) + tms[m] = tval2[m] + + + # define the pres. of each of the boundary layers + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + + m = logical_and(equal(st, -1), mask) + st[m] = val[m] + + # where topo level is above highest level in BL fields...use tmb + m = logical_and(equal(st,-1),less(p, pres[-1])) + st[m] = tmb[m] + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + m = equal(st, -1) + st[m] = (tmb - tms + temps[0])[m] + + return self.KtoF(st) + +####------------------------------------------------------------------------- +#### Calculates dew point from the specified pressure, temp and rh +#### fields. +####------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +####------------------------------------------------------------------------- +#### Calculates RH from the T and Td grids +####------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxRH and the RH grids +####-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinRH and RH grids +####-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +####------------------------------------------------------------------------- +#### Calculates QPF from the total precip field out of the model +####------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- +# def calcFzLevel(self, gh_c, t_c, topo): +# fzl = self.newGrid(-1) +# +# # for each level in the height cube, find the freezing level +# for i in xrange(gh_c.shape[0]): +# try: +# val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ +# * (273.15 - t_c[i-1]) +# except: +# val = gh_c[i] +# ## save the height value in fzl +# m =logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) +# fzl[m] = val[m] +# +# fzl *= 3.28 # convert to feet +# return fzl +# +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)) + mh[m] = tmh[m] + + mh -= topo + mh *= 3.28 # convert to feet + return mh + +####------------------------------------------------------------------------- +#### Converts the lowest available wind level from m/s to knots +####------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert + dir = wind_FHAG10[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +####------------------------------------------------------------------------- +#### Calculates the wind at 3000 feet AGL. +####------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + fadir.clip(0, 360, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +def main(): + HIRESWarwForecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWnmm.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWnmm.py index 7ed564b937..4f759123f0 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWnmm.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HIRESWnmm.py @@ -1,324 +1,324 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##------------------------------------------------------------------------- -## Model that produces surface weather elements from model -## output. -## -class HIRESWnmmForecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "HIRESWnmm") - self.oldqpf = 0.0 #initializes a temporary grid to zero - -####-------------------------------------------------------------------------- -#### These levels will be used to create vertical soundings. These are -#### defined here since they are model dependent. -####-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB925", "MB850", "MB700", "MB500", "MB400", "MB300"] - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxT and the T grids -####-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinT and T grids -####-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - -####------------------------------------------------------------------------- -#### Calculates the temperature at the elevation indicated in the topo -#### grid. This tool simply interpolates the temperature value from -#### model's isobaric temperature cube. -####------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, p_SFC, topo, stopo, gh_c, t_c): - p_SFC = p_SFC / 100 # get the surface pres. in mb - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, - p_SFC - 105, p_SFC - 135] - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150] - return self._calcT(temps, pres, topo, stopo, gh_c, t_c) - - def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) # identify points > topo - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val[greater(val, 500)] = 500 - val.clip(-.00001, 10, val) - - m = logical_and(equal(p, -1), higher) - p[m]= exp(val)[m] - - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - - m = logical_and(equal(tmb, -1), higher) - tmb[m] = tval1[m] - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - - m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) - tms[m] = tval2[m] - - - # define the pres. of each of the boundary layers - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - - m = logical_and(equal(st, -1), mask) - st[m] = val[m] - - # where topo level is above highest level in BL fields...use tmb - m = logical_and(equal(st,-1),less(p, pres[-1])) - st[m] = tmb[m] - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - m = equal(st, -1) - st[m] = (tmb - tms + temps[0])[m] - - return self.KtoF(st) - -####------------------------------------------------------------------------- -#### Calculates dew point from the specified pressure, temp and rh -#### fields. -####------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -####------------------------------------------------------------------------- -#### Calculates RH from the T and Td grids -####------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -####------------------------------------------------------------------------- -#### Returns the maximum of the specified MaxRH and the RH grids -####-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -####------------------------------------------------------------------------- -#### Returns the minimum of the specified MinRH and RH grids -####-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -####------------------------------------------------------------------------- -#### Calculates QPF from the total precip field out of the model -####------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- -# def calcFzLevel(self, gh_c, t_c, topo): -# fzl = self.newGrid(-1) -# -# # for each level in the height cube, find the freezing level -# for i in xrange(gh_c.shape[0]): -# try: -# val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ -# * (273.15 - t_c[i-1]) -# except: -# val = gh_c[i] -# ## save the height value in fzl -# m =logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) -# fzl[m] = val[m] -# -# fzl *= 3.28 # convert to feet -# return fzl -# -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - # set up masks - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg # calc. difference - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)) - mh[m] = tmh[m] - - mh -= topo - mh *= 3.28 # convert to feet - return mh - -####------------------------------------------------------------------------- -#### Converts the lowest available wind level from m/s to knots -####------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert - dir = wind_FHAG10[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -####------------------------------------------------------------------------- -#### Calculates the wind at 3000 feet AGL. -####------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - fadir.clip(0, 360, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -def main(): - HIRESWnmmForecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##------------------------------------------------------------------------- +## Model that produces surface weather elements from model +## output. +## +class HIRESWnmmForecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "HIRESWnmm") + self.oldqpf = 0.0 #initializes a temporary grid to zero + +####-------------------------------------------------------------------------- +#### These levels will be used to create vertical soundings. These are +#### defined here since they are model dependent. +####-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB925", "MB850", "MB700", "MB500", "MB400", "MB300"] + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxT and the T grids +####-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinT and T grids +####-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + +####------------------------------------------------------------------------- +#### Calculates the temperature at the elevation indicated in the topo +#### grid. This tool simply interpolates the temperature value from +#### model's isobaric temperature cube. +####------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, p_SFC, topo, stopo, gh_c, t_c): + p_SFC = p_SFC / 100 # get the surface pres. in mb + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, + p_SFC - 105, p_SFC - 135] + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150] + return self._calcT(temps, pres, topo, stopo, gh_c, t_c) + + def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) # identify points > topo + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val[greater(val, 500)] = 500 + val.clip(-.00001, 10, val) + + m = logical_and(equal(p, -1), higher) + p[m]= exp(val)[m] + + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + + m = logical_and(equal(tmb, -1), higher) + tmb[m] = tval1[m] + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + + m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) + tms[m] = tval2[m] + + + # define the pres. of each of the boundary layers + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + + m = logical_and(equal(st, -1), mask) + st[m] = val[m] + + # where topo level is above highest level in BL fields...use tmb + m = logical_and(equal(st,-1),less(p, pres[-1])) + st[m] = tmb[m] + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + m = equal(st, -1) + st[m] = (tmb - tms + temps[0])[m] + + return self.KtoF(st) + +####------------------------------------------------------------------------- +#### Calculates dew point from the specified pressure, temp and rh +#### fields. +####------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +####------------------------------------------------------------------------- +#### Calculates RH from the T and Td grids +####------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +####------------------------------------------------------------------------- +#### Returns the maximum of the specified MaxRH and the RH grids +####-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +####------------------------------------------------------------------------- +#### Returns the minimum of the specified MinRH and RH grids +####-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +####------------------------------------------------------------------------- +#### Calculates QPF from the total precip field out of the model +####------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- +# def calcFzLevel(self, gh_c, t_c, topo): +# fzl = self.newGrid(-1) +# +# # for each level in the height cube, find the freezing level +# for i in xrange(gh_c.shape[0]): +# try: +# val = gh_c[i-1] + (gh_c[i] - gh_c[i-1]) / (t_c[i] - t_c[i-1])\ +# * (273.15 - t_c[i-1]) +# except: +# val = gh_c[i] +# ## save the height value in fzl +# m =logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) +# fzl[m] = val[m] +# +# fzl *= 3.28 # convert to feet +# return fzl +# +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + # set up masks + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg # calc. difference + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)) + mh[m] = tmh[m] + + mh -= topo + mh *= 3.28 # convert to feet + return mh + +####------------------------------------------------------------------------- +#### Converts the lowest available wind level from m/s to knots +####------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] * 1.94 # get the wind speed and convert + dir = wind_FHAG10[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +####------------------------------------------------------------------------- +#### Calculates the wind at 3000 feet AGL. +####------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + fadir.clip(0, 360, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +def main(): + HIRESWnmmForecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HRRR.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HRRR.py index 3fbbb4c6a2..d91311b288 100755 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HRRR.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/HRRR.py @@ -1,212 +1,212 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - - -from Init import * -##-------------------------------------------------------------------------- -class HRRRForecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "HRRR","HRRR") - -# def calcClgHgt(self, cc_CCL): -# ceil = cc_CCL * .03280839 -# ceil[less_equal(ceil, 0.0)] = 250.0 -# return ceil - - def calcVis(self, vis_SFC): - return (vis_SFC * 3.2808) / 5279.85564 - - def calcT(self, t_FHAG2): - return self.KtoF(t_FHAG2) - - def calcTd(self, dpt_FHAG2): - return self.KtoF(dpt_FHAG2) - -##-------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- -## def calcMaxT(self, T, MaxT): -## if MaxT is None: -## return T -## return maximum(MaxT, T) - -##-------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- -## def calcMinT(self, T, MinT): -## if MinT is None: -## return T -## return minimum(MinT, T) - - - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # return the new value - return RH - - def dewFromTandRH(self,T,RH): - tc=(T-32.0)*(5.0/9.0) - rh=clip(RH,0.001,99.999)/100.0 - x=(log(rh)/17.67)+(tc/(tc+243.5)) - tdc=(243.5*x)/(1.0-x) - td=(tdc*9.0/5.0)+32.0 - return td - -##-------------------------------------------------------------------------- -# Calculates QPF from the total precip field out of the model -##-------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - return tp_SFC / 25.4 # convert from millimeters to inches - - def calcQPF6(self, QPF, QPF6): - if QPF6 is None: - QPF6=QPF - else: - QPF6=QPF6+QPF - return QPF6 - - def calcQPF12(self, QPF6, QPF12): - if QPF12 is None: - QPF12=QPF6 - else: - QPF12=QPF12+QPF6 - return QPF12 - -##-------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##-------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - return (wind_FHAG10[0] * 1.94,clip(wind_FHAG10[1], 0, 359.5)) -# -# Return the max of the max wind or wind gust -# - def calcWindGust(self, wgs_FHAG10, wgs1hr_FHAG10): - return (maximum(wgs_FHAG10,wgs1hr_FHAG10) * 1.94) -#========================================================================= -# SnowAmt - simple snow ratio based on surface temperature - multiplied -# times the model QPF amount -#------------------------------------------------------------------------- - - def calcSnowAmt(self,T,QPF): - m1=less(T,9.0) - m2=greater_equal(T,30.0) - snowr=(T*-0.5)+22.5 - snowr[m1] = 20 - snowr[m2] = 0 - snowamt=QPF*snowr - return snowamt - -##-------------------------------------------------------------------------- -## Use cloud base and cloud top to get sky cover -##-------------------------------------------------------------------------- - -# def calcSky(self, gh_CBL, gh_CTL): -# depth=gh_CTL-gh_CBL -# c100=greater_equal(depth, 1000) -# partialcloudy=depth/10 -# sky=0 -# sky=where(depth, c100, sky) -## sky=where(depth, partialcloudy, sky) -# return sky - - - def calcSky(self,tcc_EA): - return tcc_EA - -#-------------------------------------------------------------------------- -# PoP - based strongly on QPF (since when model has one inch of precip the -# chance of getting 0.01 is pretty high). However, there is a big -# difference between a place that model has 0.00 precip and is very -# close to precipitating - and those where model has 0.00 and is a -# thousand miles from the nearest cloud. Thus, uses the average -# -# Uses hyperbolic tangent of QPF, so that it rises quickly as model -# QPF increases - but tapers out to nearly 100% as QPF gets high. -# -# Adjustable parameters: -# topQPF is QPF amount that would give 75% PoP if nothing else -# considered at half this amount, PoP is 45%, at double this -# amount PoP is 96%. Default set at 0.40. -# -#-------------------------------------------------------------------------- -# def calcPoP(self, QPF12): -# -# topQPF=0.40 # QPF value where raw PoP would be 75% -# factor=tanh(QPF12*(1.0/topQPF)) -# factor2=tanh(QPF12*(2.0/topQPF)) -# pop=(factor*100.0)+(factor2*100.0) -# pop=clip(pop,0,100) -# return pop - -##-------------------------------------------------------------------------- -## Use sky, reflecivity, qpf, vis, categoricals to get weather -##-------------------------------------------------------------------------- - - def calcWx(self, T, QPF, Vsby, crain_SFC, csnow_SFC, cicep_SFC, bli_BL0180, cfrzr_SFC, refc_EA): - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - wx[logical_and(greater(QPF, 0.02), greater(T, 35))] = 2 - wx[equal(crain_SFC, 1)] = 2 - wx[equal(cfrzr_SFC, 1)] = 4 - wx[equal(cicep_SFC, 1)] = 5 - wx[equal(csnow_SFC, 1)] = 1 - - # Make showers (scattered/Chc) - convecMask = less(refc_EA, 35) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[logical_and(greater_equal(bli_BL0180, -3), greater_equal(refc_EA, 35))] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -def main(): - HRRRForecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + + +from Init import * +##-------------------------------------------------------------------------- +class HRRRForecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "HRRR","HRRR") + +# def calcClgHgt(self, cc_CCL): +# ceil = cc_CCL * .03280839 +# ceil[less_equal(ceil, 0.0)] = 250.0 +# return ceil + + def calcVis(self, vis_SFC): + return (vis_SFC * 3.2808) / 5279.85564 + + def calcT(self, t_FHAG2): + return self.KtoF(t_FHAG2) + + def calcTd(self, dpt_FHAG2): + return self.KtoF(dpt_FHAG2) + +##-------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- +## def calcMaxT(self, T, MaxT): +## if MaxT is None: +## return T +## return maximum(MaxT, T) + +##-------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- +## def calcMinT(self, T, MinT): +## if MinT is None: +## return T +## return minimum(MinT, T) + + + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # return the new value + return RH + + def dewFromTandRH(self,T,RH): + tc=(T-32.0)*(5.0/9.0) + rh=clip(RH,0.001,99.999)/100.0 + x=(log(rh)/17.67)+(tc/(tc+243.5)) + tdc=(243.5*x)/(1.0-x) + td=(tdc*9.0/5.0)+32.0 + return td + +##-------------------------------------------------------------------------- +# Calculates QPF from the total precip field out of the model +##-------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + return tp_SFC / 25.4 # convert from millimeters to inches + + def calcQPF6(self, QPF, QPF6): + if QPF6 is None: + QPF6=QPF + else: + QPF6=QPF6+QPF + return QPF6 + + def calcQPF12(self, QPF6, QPF12): + if QPF12 is None: + QPF12=QPF6 + else: + QPF12=QPF12+QPF6 + return QPF12 + +##-------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##-------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + return (wind_FHAG10[0] * 1.94,clip(wind_FHAG10[1], 0, 359.5)) +# +# Return the max of the max wind or wind gust +# + def calcWindGust(self, wgs_FHAG10, wgs1hr_FHAG10): + return (maximum(wgs_FHAG10,wgs1hr_FHAG10) * 1.94) +#========================================================================= +# SnowAmt - simple snow ratio based on surface temperature - multiplied +# times the model QPF amount +#------------------------------------------------------------------------- + + def calcSnowAmt(self,T,QPF): + m1=less(T,9.0) + m2=greater_equal(T,30.0) + snowr=(T*-0.5)+22.5 + snowr[m1] = 20 + snowr[m2] = 0 + snowamt=QPF*snowr + return snowamt + +##-------------------------------------------------------------------------- +## Use cloud base and cloud top to get sky cover +##-------------------------------------------------------------------------- + +# def calcSky(self, gh_CBL, gh_CTL): +# depth=gh_CTL-gh_CBL +# c100=greater_equal(depth, 1000) +# partialcloudy=depth/10 +# sky=0 +# sky=where(depth, c100, sky) +## sky=where(depth, partialcloudy, sky) +# return sky + + + def calcSky(self,tcc_EA): + return tcc_EA + +#-------------------------------------------------------------------------- +# PoP - based strongly on QPF (since when model has one inch of precip the +# chance of getting 0.01 is pretty high). However, there is a big +# difference between a place that model has 0.00 precip and is very +# close to precipitating - and those where model has 0.00 and is a +# thousand miles from the nearest cloud. Thus, uses the average +# +# Uses hyperbolic tangent of QPF, so that it rises quickly as model +# QPF increases - but tapers out to nearly 100% as QPF gets high. +# +# Adjustable parameters: +# topQPF is QPF amount that would give 75% PoP if nothing else +# considered at half this amount, PoP is 45%, at double this +# amount PoP is 96%. Default set at 0.40. +# +#-------------------------------------------------------------------------- +# def calcPoP(self, QPF12): +# +# topQPF=0.40 # QPF value where raw PoP would be 75% +# factor=tanh(QPF12*(1.0/topQPF)) +# factor2=tanh(QPF12*(2.0/topQPF)) +# pop=(factor*100.0)+(factor2*100.0) +# pop=clip(pop,0,100) +# return pop + +##-------------------------------------------------------------------------- +## Use sky, reflecivity, qpf, vis, categoricals to get weather +##-------------------------------------------------------------------------- + + def calcWx(self, T, QPF, Vsby, crain_SFC, csnow_SFC, cicep_SFC, bli_BL0180, cfrzr_SFC, refc_EA): + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + wx[logical_and(greater(QPF, 0.02), greater(T, 35))] = 2 + wx[equal(crain_SFC, 1)] = 2 + wx[equal(cfrzr_SFC, 1)] = 4 + wx[equal(cicep_SFC, 1)] = 5 + wx[equal(csnow_SFC, 1)] = 1 + + # Make showers (scattered/Chc) + convecMask = less(refc_EA, 35) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[logical_and(greater_equal(bli_BL0180, -3), greater_equal(refc_EA, 35))] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +def main(): + HRRRForecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/Init.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/Init.py index 6bad508506..6dd82b208d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/Init.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/Init.py @@ -1,1410 +1,1410 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- --------------------------------------------- -# Feb 16, 2012 14439 jdynina modified haines thresholds -# Feb 16, 2012 13917 jdynina merged in changes from TRAC ticket 11391 -# Jul 25, 2012 957 dgilling implement edit areas as args to calc methods. -# Oct 05, 2012 15158 ryu add Forecaster.getDb() -# Apr 04, 2013 1787 randerso fix validTime check to work with accumulative -# parms fix logging so you can actually -# determine why a smartInit is not calculating -# a parameter -# Oct 29, 2013 2476 njensen Improved getting wx/discrete keys when -# retrieving data -# Oct 27, 2014 3766 randerso Changed _getLatest to include error text -# returned from InitClient.createDB() -# Apr 23, 2015 4259 njensen Updated for new JEP API -# Aug 06, 2015 4718 dgilling Prevent numpy 1.9 from wasting memory by -# upcasting scalars too high when using where. -# Aug 13, 2015 4704 randerso Added NumpyJavaEnforcer support for smartInits -# additional code cleanup -# Dec 03, 2015 5168 randerso Fixed problems running calc methods with both -# accumulative and non-accumulative weather -# elements as inputs -# Feb 06, 2017 5959 randerso Removed Java .toString() calls -# -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - - - -import string, sys, re, time, types, getopt, fnmatch, LogStream, DatabaseID, JUtil, AbsTime, TimeRange -import SmartInitParams -import NumpyJavaEnforcer - -from numpy import * -pytime = time - -import RollBackImporter -rollbackImporter = RollBackImporter.RollBackImporter() - - -MAX_TIME = 2**31-1 - -def printTR(tr): - if tr is None: - return "None" - - if hasattr(tr, 'java_name'): - tr = TimeRange.encodeJavaTimeRange(tr) - - msg = '(' - stime = time.gmtime(tr[0]) - etime = time.gmtime(tr[1]) - stime = time.strftime('%Y%m%d_%H%M', stime) - etime = time.strftime('%Y%m%d_%H%M', etime) - msg += stime + ", " + etime - msg += ')' - return msg - -def printTRs(trList): - msg = '[' - for tr in trList: - s = printTR(tr) - msg += s - msg += ', ' - - msg += ']' - return msg - -#-------------------------------------------------------------------------- -# Main program that calls model-specific algorithms to generate ifp grids. -#-------------------------------------------------------------------------- - -#-------------------------------------------------------------------------- -# Definition for model database class. -#-------------------------------------------------------------------------- -class MDB: - def __init__(self, dblist): - self.__dbs = dblist - - def keys(self): - rval = [] - for db in self.__dbs: - rval = rval + db.keys() - return rval - - def __getitem__(self, key): - for db in self.__dbs: - if key in db.keys(): - return db[key] - raise KeyError(key + " not found") - - def __getattr__(self, name): - for db in self.__dbs: - if hasattr(db, name): - return getattr(db, name) - raise AttributeError(name + " not found") - - def getKeys(self): - rval = [] - for db in self.__dbs: - rval = rval + JUtil.javaStringListToPylist(db.getKeys()) - return rval - - def getItem(self, key): - for db in self.__dbs: - keyLow = key.lower() - for k in JUtil.javaStringListToPylist(db.getKeys()): - if k.lower() == keyLow: - return db.getItem(key) - - #for db in self.__dbs: - # try: - # return db.getItem(key) - # except Exception, e: - # # njensen: changed this to log an error to try and determine a deeper issue - # # pass - # LogStream.logProblem(key + " not found: " + str(e)) - - raise KeyError(key + " not found") - - def getModelTime(self): - if len(self.__dbs) > 0: - return self.__dbs[0].getModelTime() - return 0 - -#------------------------------------------------------------------------- -# Utilities -#------------------------------------------------------------------------- -class GridUtilities: - pres = [] - - def squishZ(self, cube, levels): - buckets = cube[0:levels] - inc = cube.shape[0] / float(levels) - start = 0 - end = int(inc) - for i in xrange(int(levels)): - buckets[i] = add.reduce(cube[start:end]) / (end - start) - start = end - end = int(inc * (i + 2)) - return buckets - - def skyFromRH(self, rh_c, gh_c, topo, p_SFC): - tmpP_SFC = p_SFC.copy() - tmpP_SFC /= 100.0 # convert surfp to milibars - x = 78 # delta x (85km - 850km) - - # Make a pressure cube - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - - pp = pmb / tmpP_SFC - pp = clip(pp, 0.1, 1.0) - fmax = 78.0 + x / 15.5 - f100 = where(pp < 0.7, fmax * (pp - 0.1)/0.6, - 30.0 + (1.0 - pp) * (fmax - 30.0) / 0.3) - c = 0.196 + (.76 - x / 2834.0) * (1.0 - pp) - c = (rh_c/100.0 - 1.0) / c - c = exp(c) - f = minimum(f100 * c, 100.0) - f[less(gh_c, topo)] = 0.0 - f = self.squishZ(f, 5) - f[4] *= 0.25 - f /= 100.0 - sky = f[0] - for i in xrange(1, f.shape[0]): - sky = sky + f[i] - sky * f[i] - return sky * 100.0 - - #======================================================================= - # - # Calculate Wetbulb (C) based on temperature (C) and RH (%) - # (all algorithms straight out of GEMPAK - converted to numeric python) - # - def Wetbulb(self, tc, rh, pres): - dpc=self.RHDP(tc,rh) - thte=self.THTE(pres,tc,dpc) - dpc = None - wetbk=self.TMST(thte,pres,0) - return wetbk-273.15 - #======================================================================= - # - # Calculate parcel temp (K) given thetae (K) pressure (mb) and guess - # temperature (K) - # - def TMST(self, thte, pres, tguess): - tg=full_like(thte, tguess) - teclip=clip(thte-270.0,0.0,5000.0) - # - # if guess temp is 0 - make a more reasonable guess - # - tgnu=where(less(tg,1),(thte-0.5*teclip**1.05)*(pres/1000.0)**0.2,tg)\ - -273.15 - tg = teclip = None - epsi=0.01 - # - # Correct the temp up to 100 times. Typically this takes - # less than 5 iterations - # - for i in range(1,100): - i = i # eliminate warning in pychecker - tgnup=tgnu+1.0 - tenu=self.THTE(pres,tgnu,tgnu) - tenup=self.THTE(pres,tgnup,tgnup) - cor=(thte-tenu)/(tenup-tenu) - tenu = tenup = tgnup = None - tgnu=tgnu+cor - # - # get the maximum correction we made this time - # and if it is less than epsi - then we are close - # enough to stop. - # - mcor=maximum.reduce(maximum.reduce(maximum.reduce(abs(cor)))) - cor = None - if (mcor" - format = "%Y%m%d_%H%M" - return "(" + pytime.strftime(format, pytime.gmtime(tr[0])) + ", " +\ - pytime.strftime(format, pytime.gmtime(tr[1])) + ")" - - #-------------------------------------------------------------------------- - # Interpolates a new value given two x, two y, and a new x value. - #-------------------------------------------------------------------------- - def linear(self, xmin, xmax, ymin, ymax, we): - m = (ymax - ymin) / (xmax - xmin + .0000001) - b = ymin - m * xmin - return m * we + b - - #-------------------------------------------------------------------------- - # Converts the value from meters per second to knots. - #-------------------------------------------------------------------------- - def convertMsecToKts(self, value_Msec): - # Convert from meters/sec to Kts - return value_Msec * 1.944 - - #-------------------------------------------------------------------------- - # Converts the value from feet to meters - #-------------------------------------------------------------------------- - def convertFtToM(self, value_Ft): - # Convert the value in Feet to Meters - return value_Ft/3.28084 - - #-------------------------------------------------------------------------- - # Converts the value from Fahrenheit to Kelvin - #-------------------------------------------------------------------------- - def FtoK(self, t): - return (t + 459.67) / 1.8 - #-------------------------------------------------------------------------- - # Converts the value from Fahrenheit to Kelvin - #-------------------------------------------------------------------------- - def convertFtoK(self, t_F): - return self.FtoK(t_F) - - #-------------------------------------------------------------------------- - # Converts the value from Kelvin to Fahrenheit - #-------------------------------------------------------------------------- - def KtoF(self, t): - return t * 1.8 - 459.67 - #-------------------------------------------------------------------------- - # Converts the value from Kelvin to Fahrenheit - #-------------------------------------------------------------------------- - def convertKtoF(self, t): - return self.KtoF(t) - #-------------------------------------------------------------------------- - # Converts the value from meters to statute miles - #-------------------------------------------------------------------------- - def convertMtoSM(self, d): - return d * 0.00062 - #-------------------------------------------------------------------------- - # Calculates the saturation vapor pressure give the temperature in K - #-------------------------------------------------------------------------- - def esat(self, temp): - val = 26.660820 - 0.0091379024 * temp - 6106.3960 / temp - val[greater(val, 100)] = 100 - return exp(val) - - #-------------------------------------------------------------------------- - # Calculates the potential temp. given the temperature and pressure - # potential temp (p = milibars, t = kelvin) - #-------------------------------------------------------------------------- - def ptemp(self, t, p): - p = clip(p, .00001, 1500) - return t * pow((1000 / p), 0.286) - - # Returns the "area" (JKg-1) - def getArea(self, pbot, tbot, ptop, ttop): - logV = self.ptemp(ttop, ptop) / (self.ptemp(tbot, pbot) + 0.00001) - logV = clip(logV, 0.0001, logV) - tavg = (ttop + tbot) / 2.0 - #area = abs(1003.5 * tavg * log(logV)) - area = abs(1.0035 * tavg * log(logV)) - return where(less(tavg, 273.15), area * -1, area) - - # Returns two areas (from getArea). The - # second area is valid if we cross the freezing layer - # and is indicated by the cross mask. - def getAreas(self, pbot, tbot, ptop, ttop): - maxm = maximum(tbot, ttop) - minm = minimum(tbot, ttop) - freeze = self.newGrid(273.15) - crosses = logical_and(less(minm, freeze), greater(maxm, freeze)) - crossp = self.linear(pbot, ptop, tbot, ttop, freeze) - crosst = freeze - crossp = where(crosses, crossp, ptop) - crosst = where(crosses, crosst, ttop) - a1 = self.getArea(pbot, tbot, crossp, crosst) - a2 = self.getArea(crossp, crosst, ptop, ttop) - return a1, a2, crosses - - #-------------------------------------------------------------------------- - # Returns a numeric mask i.e. a grid of 0's and 1's - # where the value is 1 if the given query succeeds - # Arguments: - # wx -- a 2-tuple: - # wxValues : numerical grid of byte values - # keys : list of "ugly strings" where the index of - # the ugly string corresponds to the byte value in - # the wxValues grid. - # query -- a text string representing a query - # isreg -- if 1, the query is treated as a regular expression - # otherwise as a literal string - # Examples: - # # Here we want to treat the query as a regular expression - # PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP) - # # Here we want to treat the query as a literal - # PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP) - # - #-------------------------------------------------------------------------- - def wxMask(self, wx, query, isreg=0): - rv = self.empty(bool) - if not isreg: - for i in xrange(len(wx[1])): - #if fnmatch.fnmatchcase(wx[1][i], query): - if query in wx[1][i]: - rv[equal(wx[0], i)] = True - else: - r = re.compile(query) - for i in xrange(len(wx[1])): - m = r.match(wx[1][i]) - if m is not None: - rv[equal(wx[0], i)] = True - return rv - - #-------------------------------------------------------------------------- - # Returns the byte value that corresponds to the - # given ugly string. It will add a new key if a new ugly - # string is requested. - # Arguments: - # uglyStr: a string representing a weather value - # keys: a list of ugly strings. - # A Wx argument represents a 2-tuple: - # wxValues : numerical grid of byte values - # keys : list of "ugly strings" where the index of - # the ugly string corresponds to the byte value in the wxValues grid. - # For example, if our keys are: - # "Sct:RW:-::" - # "Chc:T:-::" - # "Chc:SW:-::" - # Then, the wxValues grid will have byte values of 0 where - # there is "Sct:RW:-::", 1 where there is "Chc:T:-::" - # and 2 where there is "Chc:SW:-::" - # - #-------------------------------------------------------------------------- - def getIndex(self, uglyStr, keys): - if uglyStr == "": - uglyStr = "::::" - for str in keys: - if uglyStr == str: - return keys.index(uglyStr) - keys.append(uglyStr) - return len(keys)-1 - - #-------------------------------------------------------------------------- - # Place holder for levels function to be implemented by derived classes. - #-------------------------------------------------------------------------- - def levels(self): - return [] - - #-------------------------------------------------------------------------- - # Returns the topography grid. - #-------------------------------------------------------------------------- - def getTopo(self): - topo = self._client.getTopo() - topo = topo.getNDArray() - return topo - - #-------------------------------------------------------------------------- - # Returns a dictionary of magical values that will be used in other - # functions. - #-------------------------------------------------------------------------- - def magicArgs(self): - rval = { "topo" : (self.__topo, (0, MAX_TIME)), - "stopo" : (self.__stopo, (0, MAX_TIME)), - "ctime" : (None, (0, MAX_TIME)), - "stime" : (None, (0, MAX_TIME)), - "mtime" : (None, (0, MAX_TIME))} - for i in self._editAreas: - rval[i] = (None, (0, MAX_TIME)) - return rval - - #-------------------------------------------------------------------------- - # Runs the main program - #-------------------------------------------------------------------------- - def run(self): - dbName = SmartInitParams.params['dbName'] - validTime = SmartInitParams.params['validTime'] - - dbInfo = dbName.split(':') - self.__dbName = dbInfo[0] - - start = time.time() - self.__init() - if self.newdb() is None: - return - - msgDest = "Destination database:" + self.newdb().getModelIdentifier() - - if validTime is not None: - msgDest = msgDest + ", validTime " + pytime.strftime("%Y%m%d_%H%M", pytime.gmtime(validTime.getTime() / 1000)) - - LogStream.logEvent(msgDest) - self.__newwes = {} - self.__srcwes = {} - self._ifpio = IFPIO(self.srcdb(), self.newdb()) - self._ifpio.setLevels(self.levels()) - methods = self.__getMethods() - times = self.__sortTimes(methods, validTime) - tr, numGrids = self.__process(methods, times, int(dbInfo[1])) - stop = time.time() - msgTime = "%s: Elapsed time: %-.1f sec." % (self.newdb().getModelIdentifier(), (stop - start)) - - LogStream.logEvent(msgTime) - #LogStream.logEvent("Network stats: ", self._client.getStats()) - self._announce(self.newdb(), tr, numGrids) - - #-------------------------------------------------------------------------- - # Sends a message to the GFE stating that a database is complete. - #-------------------------------------------------------------------------- - def _announce(self, db, tr, numGrids): - if numGrids == 0: - return - dbTime = db.getModelTime() - if dbTime is not None: - modelTime = AbsTime.AbsTime(db.getModelTime()) - modelTime = modelTime.unixTime() - else: - modelTime = 0 - modelIdentifier = db.getShortModelIdentifier() - - if modelTime != 0: - trRel = ((tr[0]-modelTime)/3600, ((tr[1]-modelTime)/3600)-1) - msg = "Model: " + modelIdentifier + ' ' +\ - `trRel[0]` + '-' + `trRel[1]` + 'h #Grids=' + `numGrids` - else: - msg = "Model: " + modelIdentifier \ - + ' #Grids=' + `numGrids` - self._client.sendUserMessage(msg, "SMARTINIT") - - #-------------------------------------------------------------------------- - # Returns the IFPDB object for the given db - #-------------------------------------------------------------------------- - def getDb(self, dbString): - from com.raytheon.edex.plugin.gfe.smartinit import IFPDB - return IFPDB(dbString) - - #-------------------------------------------------------------------------- - # Returns the source and destination databases, given the srcName. - #-------------------------------------------------------------------------- - def _getLatest(self, client, srcNames, fcstName=None): - # ryu: Added/modified code to allow multiple sources. The srcdb is - # now an MDB. This is needed for (AK)NAM40 init, which sources - # from both NAM40 and NAM20. - - srcdbs = [] - modelName = DatabaseID.databaseID(self.__dbName).modelName() - - if len(srcNames) == 0: - srcNames = [modelName] - - for src in srcNames: - # source model at same model time - fullDBName = self.__dbName.replace(modelName, src) - db = self.getDb(fullDBName) - if db.getKeys().size() == 0: - LogStream.logEvent("Source database " + fullDBName + \ - " is empty.") - else: - srcdbs.append(db) - - srcdb = MDB(srcdbs) - - # I (njensen) removed most of what was here. It was looking at - # the available D2D netcdf data, and then forming a GFE db id - # from that for the target. Instead I'm just passing in - # the target from Java. - - newdb = self.__dbName.replace("D2D", "") - if fcstName and fcstName != modelName: - newdb = newdb.replace(modelName, fcstName) - - # create db if not singleton db - singletons = client.getSingletonIDs() - singletonNeeded = 0 - singletonsize = singletons.size() - for i in range(singletonsize): - d = singletons.get(i) - s = DatabaseID.DatabaseID(d) - if newdb[:-13] == s.modelIdentifier()[:-13]: - singletonNeeded = 1 - break - if singletonNeeded: - newdb = newdb[:-13] + '00000000_0000' - newdb = self.getDb(newdb) - else: - sr = client.createDB(newdb) - if sr.isOkay(): - newdb = self.getDb(newdb) - else: - msg = "Unable to create database for " + str(newdb) + ":\n" + \ - str(sr.message()) - LogStream.logProblem(msg) - newdb = None - - return srcdb, newdb - - #-------------------------------------------------------------------------- - # Returns the target database - #-------------------------------------------------------------------------- - def newdb(self): - return self.__newdb - - #-------------------------------------------------------------------------- - # Returns the source database. - #-------------------------------------------------------------------------- - def srcdb(self): - return self.__srcdb - - #-------------------------------------------------------------------------- - # Convert magnitude and direction to u and v components. - #-------------------------------------------------------------------------- - def _getUV(self, mag, dir): - rad = dir * 0.0174 - u = mag * sin(rad) - v = mag * cos(rad) - return (u, v) - - #-------------------------------------------------------------------------- - # Returns a mask where points are set when the specified query is true. - #-------------------------------------------------------------------------- - def _wxMask(self, wx, query, isreg=0): - rv = self.empty(bool) - if not isreg: - for i in xrange(len(wx[1])): - if fnmatch.fnmatchcase(wx[1][i], query): - rv[equal(wx[0], i)] = True - else: - r = re.compile(query) - for i in xrange(len(wx[1])): - m = r.match(wx[1][i]) - if m is not None: - rv[equal(wx[0], i)] = True - return rv - - #-------------------------------------------------------------------------- - # Returns the magnitude and direction from u and v components. - #-------------------------------------------------------------------------- - def _getMD(self, u, v): - mag = hypot(u, v) - dir = degrees(arctan2(u, v)) - dir[less(dir, 0)] += 360 - return (mag, dir) - - #-------------------------------------------------------------------------- - # Returns true if the specified time is found within the specified - # timeRange. - #-------------------------------------------------------------------------- - def _contains(self, timerange, time): - if timerange[1] - timerange[0]: - return ((time >= timerange[0]) and (time < timerange[1])) - return time == timerange[0] - - #-------------------------------------------------------------------------- - # Returns true if the two timeRanges overlap (share a common time period). - #-------------------------------------------------------------------------- - def _overlaps(self, tr1, tr2): - if self._contains(tr2, tr1[0]) or self._contains(tr1, tr2[0]): - return 1 - return 0 - - #-------------------------------------------------------------------------- - # Returns the overlapping timeRange between two timeRanges (if any). - #-------------------------------------------------------------------------- - def _intersect(self, t1, t2): - if self._overlaps(t1, t2): - return (max(t1[0], t2[0]), min(t1[1], t2[1])) - return None - - #-------------------------------------------------------------------------- - # Returns the "calc" methods for the specified Forecaster object. - #-------------------------------------------------------------------------- - def __getObjMethods(self, obj): - rval = [] - for o in obj.__bases__: - rval += self.__getObjMethods(o) - - magicArgs = self.magicArgs().keys() - for fn in filter(lambda x : x[:4] == "calc", dir(obj)): - mthd = eval("self." + fn) - co = mthd.im_func.func_code - args = co.co_varnames[1:co.co_argcount] - fargs = [] - for a in args: - if a not in magicArgs and string.find(a, '_') == -1: - a += "_SFC" - fargs.append(a) - wename = fn[4:] - if string.find(wename, "_") != -1: - wenameLevel = wename - else: - wenameLevel = wename + "_SFC" - #if wenameLevel not in self.newdb().keys(): - if wenameLevel not in JUtil.javaStringListToPylist(self.newdb().getKeys()): - msg = wenameLevel + " not in " + \ - self.newdb().getModelIdentifier() + " " + "SKIPPING" - LogStream.logEvent(msg) - continue - rval = filter(lambda x,y=wenameLevel : x[0] != y, rval) - rval.append((wenameLevel, mthd, fargs)) - return rval - - #-------------------------------------------------------------------------- - # Gets and returns a list of dependencies. - #-------------------------------------------------------------------------- - def __getdeps(self, m, lst): - rval = [] - for i in m[2]: - if i != m[0]: - for j in lst: - if j[0] == i: - rval = rval + self.__getdeps(j, lst) - else: - if len(m[2]) == 1: - raise ValueError("calc" + i - + " must depend on more than itself") - lst.remove(m) - return rval + [m] - - #-------------------------------------------------------------------------- - # Returns this objects methods - #-------------------------------------------------------------------------- - def __getMethods(self): - rval = [] - methods = self.__getObjMethods(self.__class__) - while len(methods): - rval += self.__getdeps(methods[0], methods) - return rval - - def __request(self, db, pname, time): - if pname[-2:] == "_c": - time = self.__getSrcWE( - pname[:-2] + "_MB500", 0).getTimeRange(time[0]) - rval = (pname[:-2], time, 1) - else: - time = self.__getSrcWE(pname, 0).getTimeRange(time[0]) - rval = (pname, time) - return rval - - def __unpackParm(self, parm): - if parm[-2:] == "_c": - list = [] - base = parm[:-2] - if len(self.levels()) == 0: - raise Exception("Request for " + parm - + " and levels() is empty!") - for l in self._ifpio.levels(): - list.append(base + "_" + l) - return list - return [parm] - - #-------------------------------------------------------------------------- - # Internal function that returns the time periods shared by tr and times. - #-------------------------------------------------------------------------- - def __compTimes(self, tr, times): - # TODO: surely there's a better way to do this - - for time in times: - if len(time) == 0: - return [] - - rval = [] - if len(times) == 1: - if tr is None: - return times[0] - for time in times[0]: - inter = self._intersect(tr, time) - if inter is not None: - rval.append(inter) - return rval - else: - mtimes = times[-1:][0] - ntimes = times[:-1] - for time in mtimes: - if tr is not None: - time = self._intersect(time, tr) - if time is not None: - trv = self.__compTimes(time, ntimes) - for r in trv: - if r not in rval: - rval.append(r) - return rval - - #-------------------------------------------------------------------------- - # Internal function that sorts times. - #-------------------------------------------------------------------------- - def __sortTimes(self, methods, validTime): - rval = [] - calced = [] - for we, mthd, args in methods: -# LogStream.logEvent("Evaluating times for", mthd.func_name) - calced.append(we) - args = filter(lambda x, ma=self.magicArgs().keys() + [we]: - x not in ma, args) - nargs = [] - for a in args: - nargs = nargs + self.__unpackParm(a) - - ttimes = [] - for p in nargs: - # p is an arg, e.g. gh_MB900 - try: - ttimes.append(rval[calced.index(p)]) - except: - # get all available times for that param - try: - ranges = self.__getSrcWE(p).getKeys() - size = ranges.size() - except: - size = 0 - - pylist = [] - for i in range(size): - jtr = ranges.get(i) - valid = False - - if validTime is None: - valid = True - else: - # need check to be inclusive on both ends for methods that - # need both accumulative and non-accumulative parms - valid = validTime.getTime() >= jtr.getStart().getTime() and \ - validTime.getTime() <= jtr.getEnd().getTime() - - if valid: - timelist = TimeRange.encodeJavaTimeRange(jtr) - pylist.append(timelist) - - ttimes.append(pylist) - -# msg = "Times available for " + p + " " + str(validTime) + ":\n" -# timeList = ttimes[-1] -# msg += printTRs(timeList) -# LogStream.logEvent(msg) - - # compare the times of each parm and find where they match up - times = self.__compTimes(None, ttimes) -# LogStream.logEvent("times:", printTRs(times)) - - hadDataButSkipped = {} - for i in range(len(ttimes)): - timeList = ttimes[i] - parmName = nargs[i] - for xtime in timeList: - if xtime not in times: - if hadDataButSkipped.has_key(xtime): - hadDataButSkipped[xtime].append(parmName) - else: - hadDataButSkipped[xtime] = [parmName] -# msg = "hadDataButSkipped: {" -# for tr in hadDataButSkipped: -# msg += printTR(tr) -# msg += ": " -# msg += str(hadDataButSkipped[tr]) -# msg += ", " -# msg += "}" -# LogStream.logEvent(msg) - - hadNoData = [] - for i in range(len(nargs)): - timeList = ttimes[i] - parmName = nargs[i] - if len(timeList) == 0: - hadNoData.append(parmName) -# LogStream.logEvent("hadNoData:",hadNoData) - - missing = {} - for xtime in hadDataButSkipped: - msg = printTR(xtime) - missing[msg] = [] - - for parmName in nargs: - if not hadDataButSkipped[xtime].__contains__(parmName): - missing[msg].append(parmName) - - if len(missing) == 0 and len(hadNoData) > 0: - msg = '' - if (validTime is not None): - vtime = validTime.getTime()/1000 - vtime = time.gmtime(vtime) - msg = time.strftime('%Y%m%d_%H%M', vtime) - missing[msg] = hadNoData -# LogStream.logEvent("missing:",missing) - - if len(missing): - LogStream.logEvent(self.newdb().getModelIdentifier() + ": Skipping calc" + we + " for some times due to the following " + - "missing data:", missing) - # these become the times to run the method for - rval.append(times) - - return rval - - #-------------------------------------------------------------------------- - # Returns a WeatherElement object given it's name. - #-------------------------------------------------------------------------- - def __getSrcWE(self, wename, lock=1): - return self._ifpio.getSrcWE(wename, lock) - - #-------------------------------------------------------------------------- - # Returns a new weather element given the weName. - #-------------------------------------------------------------------------- - def __getNewWE(self, wename, lock=1): - return self._ifpio.getNewWE(wename, lock) - - def __recursiveArg(self, cache, arg, time): - p = self.newdb().getItem(arg) - -# tr = p.getTimeRange(time[0]) - tr = TimeRange.TimeRange(AbsTime.AbsTime(time[0]), AbsTime.AbsTime(time[1])).toJavaObj() - times = p.getKeys(tr) - if times: - tr = times[0] - LogStream.logEvent("retrieving", arg, printTR(tr)) - - pytr = TimeRange.encodeJavaTimeRange(tr) - jslice = p.getItem(tr) - slice = jslice.getNDArray() - if type(slice) is ndarray and slice.dtype == int8: - # discrete or weather - keys = JUtil.javaObjToPyVal(jslice.getKeyList()) - slice = [slice, keys] - cache[arg] = (slice, pytr) - else: - LogStream.logEvent("no data for", arg, printTR(tr)) - cache[arg] = (None, time) - - def __argFill(self, cache, method, time): - we, mthd, args = method - LogStream.logEvent("getting arguments for", mthd.func_name, printTR(time)) - - gargs = [] - if self._ndbkeys is None: - self._ndbkeys = JUtil.javaStringListToPylist(self.newdb().getKeys()) - ndbkeys = self._ndbkeys - for arg in args: - if arg in self._editAreas: - if cache[arg][0] is None: - p = self.newdb().getItem(we) - ea = p.getEditArea(arg).getNDArray() - cache[arg] = (ea, (0, MAX_TIME)) - gargs.append(cache[arg][0]) - continue - if not cache.has_key(arg): - if arg in ndbkeys: - self.__recursiveArg(cache, arg, time) - else: - req = self.__request(self.srcdb(), arg, time) - val = self._ifpio.get(req) - if arg[-2:] == "_c": - self.pres = val[0] - val = val[1] - cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1])) - else: - if cache[arg][1] is not None and \ - not self._overlaps(time, cache[arg][1]): - if arg in ndbkeys: - self.__recursiveArg(cache, arg, time) - else: - req = self.__request(self.srcdb(), arg, time) - val = self._ifpio.get(req) - if arg[-2:] == "_c": - self.pres = val[0] - val = val[1] - cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1])) - else: - LogStream.logEvent("using cached", arg, printTR(cache[arg][1])) - - gargs.append(cache[arg][0]) - return gargs - - def __runMethod(self, method, time, cache): - we, mthd, args = method - - if self.mostRecentCacheClear != time: - self.mostRecentCacheClear = time - for key in cache.keys(): - cacheValue = cache[key] - if len(cacheValue) == 2: - # if they are for time ranges that we've completed calculations for, - # immediately set them to None to free up the memory - if not self._overlaps(cacheValue[1],time): - LogStream.logEvent("Clearing", key, printTR(cacheValue[1])) - cache[key] = (None, cacheValue[1]) - - gargs = self.__argFill(cache, method, time) - - doStore = False - if mthd.im_func is Forecaster.__exists.im_func: - msg = self.newdb().getModelIdentifier() + ": Get : " + we + " " + self._timeRangeStr(time) - LogStream.logEvent(msg) - else: - doStore = True - msg = self.newdb().getModelIdentifier() + ": Calc : " + we + " " + self._timeRangeStr(time) - LogStream.logEvent(msg) - - try: - rval = apply(mthd, tuple(gargs)) - - if rval is not None: - if type(rval) is not ndarray and rval is not None: - if type(rval) is not tuple: - jrval = rval - rval = rval.getNDArray() - if type(rval) is ndarray and rval.dtype == int8: - # discrete or weather - keys = JUtil.javaObjToPyVal(jrval.getKeyList()) - rval = [rval, keys] - else: - LogStream.logEvent("No value returned from calc"+str(we)) - - s = 'grid' - if rval is None: - s = 'None' - LogStream.logEvent("Caching", we, s, printTR(time)) - cache[we] = (rval, time) - - if rval is not None and cache['mtime'][0] is not None and doStore: - parm = self.__getNewWE(we) - LogStream.logEvent("Storing", we, printTR(cache['mtime'][0])) - self._ifpio.store(parm, cache['mtime'][0], cache[we][0]) - except: - LogStream.logProblem(self.newdb().getModelIdentifier() + ": Error while running method " + str(we) + - "\n" + LogStream.exc()) - cache[we] = (None, time) - - def __tsort(self, x, y): - if x[1][0] < y[1][0]: - return -1 - if x[1][0] > y[1][0]: - return 1 - return x[2] - y[2] - - def __flattenTimes(self, methods, times): - lst = [] - for i in xrange(len(methods)): - for t in times[i]: - lst.append((methods[i], t, i)) - lst.sort(self.__tsort) - return lst - - def __exists(self, mtime, wename): - #parm = self.__getNewWE(wename + "_SFC") - parm = self.__getNewWE(wename) - return parm.getItem(mtime) - - def __prune(self, lst): - rval = [] - for m, t, i in lst: - # m is (parmname_level, bound method calcParmname, argument list) - # t is a time range (list of two times) - # i is order? - parm = self.__getNewWE(m[0]) - #parm = self.__getNewWE(m[0] + "_SFC") - tr = TimeRange.encodeJavaTimeRange(parm.getTimeRange(t[0])) - if tr is None: - continue - parmtr = TimeRange.javaTimeRangeListToPyList(parm.getKeys()) - if tr in parmtr: - # Skip (maybe) - for m2, t2, i2 in lst: - if m2[0] == m[0]: - continue - if m2[0] in m[2] and self._overlaps(t2, t): - parm2 = self.__getNewWE(m2[0]) - #parm2 = self.__getNewWE(m2[0] + "_SFC") - tr2 = parm2.getTimeRange(t2[0]) - tr2 = TimeRange.encodeJavaTimeRange(tr2) - parm2tr = TimeRange.javaTimeRangeListToPyList(parm2.getKeys()) - if tr2 is not None and tr2 not in parm2tr: - f = ((m[0], self.__exists, - ('mtime', 'wename')), t, i) - if f not in rval: - rval.append(f) - rval.append((m, t, i)) - continue - if m[0] in m2[2] and self._overlaps(t2, tr): - parm2 = self.__getNewWE(m2[0]) - #parm2 = self.__getNewWE(m2[0] + "_SFC") - tr2 = parm2.getTimeRange(t2[0]) - tr2 = TimeRange.encodeJavaTimeRange(tr2) - parm2tr = TimeRange.javaTimeRangeListToPyList(parm2.getKeys()) - if tr2 is not None and tr2 not in parm2tr: - # Have to fetch the already calced one. - f = ((m[0], self.__exists, - ('mtime', 'wename')), t, i) - if f not in rval: - rval.append(f) - else: - # Need to calc this one - rval.append((m, t, i)) - return rval - - def sourceBaseTime(self): - modelTime = self.srcdb().getModelTime() - if modelTime is None: - modelTime = 0 - t = AbsTime.AbsTime(modelTime) - return t.unixTime() - - # JULIYA MODIFY HERE - def __process(self, methods, times, mode): - numGrids = 0 - trSpan = None - cache = self.magicArgs() - all = mode#Options['all'] manual=1 automatic=0 - list = self.__flattenTimes(methods, times) - if not all: - list = self.__prune(list) - - self.mostRecentCacheClear = None - for m, t, i in list: - cache['ctime'] = (t, (0, MAX_TIME)) - parm = self.__getNewWE(m[0]) - tr = parm.getTimeRange(t[0]) - - # A valid time range was not found so the parameter - # cannot be calculated, so continue - if not tr.isValid(): - continue - - cache['mtime'] = (tr, (0, MAX_TIME)) - cache['wename'] = (m[0], (0, MAX_TIME)) - cache['stime'] = (t[0] - self.sourceBaseTime(), (0, MAX_TIME)) - - try: - self.__runMethod(m, t, cache) - numGrids = numGrids + 1 - if trSpan is None: - trSpan = t - else: - trSpan = (min(trSpan[0],t[0]), max(trSpan[1], t[1])) - except: - LogStream.logProblem("Error in method setup for " + str(m[0]) - + "\n" + LogStream.exc()) - - return (trSpan, numGrids) - -#------------------------------------------------------------------------- -# Weather Element calculations -#------------------------------------------------------------------------- -class IFPIO: - def __init__(self, indb, outdb): - self.__srcwes = {} - self.__newwes = {} - self.eta = indb - self.newdb = outdb - - def getSrcWE(self, wename, lock=1): - rval = None - try: - rval = self.__srcwes[wename] - except: - rval = self.eta.getItem(wename) - self.__srcwes[wename] = rval - return rval - - def getNewWE(self, wename, lock=1): - try: - rval = self.__newwes[wename] - except: - rval = self.newdb.getItem(wename) - self.__newwes[wename] = rval - return rval - - def get(self, qv): - if len(qv) == 2: - name, time = qv - docube = 0 - else: - name, time, docube = qv - if not docube: - p = self.getSrcWE(name, 0) - times = p.getKeys(time) - if times: - time = times[0] - LogStream.logEvent("retrieving", name, printTR(time)) - - slice = p.getItem(time) - out = slice.getNDArray() - if type(out) is ndarray and out.dtype == int8: - # discrete or weather - keys = JUtil.javaObjToPyVal(slice.getKeyList()) - out = [out, keys] - else: - out = self._getcube(name, time) - return out - - #-------------------------------------------------------------------------- - # Sets the grid levels used for many algorithms. - #-------------------------------------------------------------------------- - def setLevels(self, levels): - self._levels = levels - - #-------------------------------------------------------------------------- - # Returns the current list of levels - #-------------------------------------------------------------------------- - def levels(self): - return self._levels - - #-------------------------------------------------------------------------- - # Returns the data cube for the specified parm, and time. - #-------------------------------------------------------------------------- - def _getcube(self, parm, time): - lvls = self.levels() - lst = [] - pres = [] - for l in lvls: - slice = self.get((parm + "_" + l, time)) - lst.append(slice) - pres.append(int(l[2:])) - # only scalars will be ndarray, otherwise it was vector, discrete, or wx - if type(lst[0]) is not ndarray: - ml = [] - dl = [] - for i in lst: - ml.append(i[0]) - dl.append(i[1]) - rval = (array(ml), array(dl)) - else: - rval = array(lst) - return (pres, rval) - - #-------------------------------------------------------------------------- - # Stores the specified grid in the element specified by newwe at the - # specified time - #-------------------------------------------------------------------------- - def store(self, newwe, time, grid): - gridType = newwe.getGridType() - if gridType == "SCALAR": - grid = clip(grid, newwe.getMinAllowedValue(), newwe.getMaxAllowedValue()) - elif gridType == "VECTOR": - mag = clip(grid[0], newwe.getMinAllowedValue(), newwe.getMaxAllowedValue()) - dir = clip(grid[1], 0, 359.5) - grid = (mag, dir) - tr = TimeRange.encodeJavaTimeRange(time) - # safety checks - wrongType = None - saved = False - if type(grid) is ndarray: - grid = NumpyJavaEnforcer.checkdTypes(grid, float32) - # scalar save - newwe.setItemScalar(newwe.getTimeRange(tr[0]), grid) - saved = True - elif (type(grid) is list or type(grid) is tuple) and len(grid) == 2: - if type(grid[0]) is ndarray and type(grid[1]) is ndarray: - magGrid = NumpyJavaEnforcer.checkdTypes(grid[0], float32) - dirGrid = NumpyJavaEnforcer.checkdTypes(grid[1], float32) - # vector save - newwe.setItemVector(newwe.getTimeRange(tr[0]), magGrid, dirGrid) - saved = True - elif type(grid[0]) is ndarray and type(grid[1]) is list: - bgrid = NumpyJavaEnforcer.checkdTypes(grid[0], int8) - - if gridType == "DISCRETE": - newwe.setItemDiscrete(newwe.getTimeRange(tr[0]), bgrid, str(grid[1])) - elif gridType == "WEATHER": - newwe.setItemWeather(newwe.getTimeRange(tr[0]), bgrid, str(grid[1])) - - saved = True - if not saved: - if wrongType is None: - wrongType = type(grid) - msg = str(wrongType) + " type returned from calcMethod is not safe to store for " + str(newwe) - raise TypeError(msg) - -#-------------------------------------------------------------------------- -# Main program -#-------------------------------------------------------------------------- -def runFromJava(dbName, model, validTime): - SmartInitParams.params['dbName'] = dbName - SmartInitParams.params['validTime'] = validTime - - mod = __import__(model) - mod.main() - rollbackImporter.rollback() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- --------------------------------------------- +# Feb 16, 2012 14439 jdynina modified haines thresholds +# Feb 16, 2012 13917 jdynina merged in changes from TRAC ticket 11391 +# Jul 25, 2012 957 dgilling implement edit areas as args to calc methods. +# Oct 05, 2012 15158 ryu add Forecaster.getDb() +# Apr 04, 2013 1787 randerso fix validTime check to work with accumulative +# parms fix logging so you can actually +# determine why a smartInit is not calculating +# a parameter +# Oct 29, 2013 2476 njensen Improved getting wx/discrete keys when +# retrieving data +# Oct 27, 2014 3766 randerso Changed _getLatest to include error text +# returned from InitClient.createDB() +# Apr 23, 2015 4259 njensen Updated for new JEP API +# Aug 06, 2015 4718 dgilling Prevent numpy 1.9 from wasting memory by +# upcasting scalars too high when using where. +# Aug 13, 2015 4704 randerso Added NumpyJavaEnforcer support for smartInits +# additional code cleanup +# Dec 03, 2015 5168 randerso Fixed problems running calc methods with both +# accumulative and non-accumulative weather +# elements as inputs +# Feb 06, 2017 5959 randerso Removed Java .toString() calls +# +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + + + +import string, sys, re, time, types, getopt, fnmatch, LogStream, DatabaseID, JUtil, AbsTime, TimeRange +import SmartInitParams +import NumpyJavaEnforcer + +from numpy import * +pytime = time + +import RollBackImporter +rollbackImporter = RollBackImporter.RollBackImporter() + + +MAX_TIME = 2**31-1 + +def printTR(tr): + if tr is None: + return "None" + + if hasattr(tr, 'java_name'): + tr = TimeRange.encodeJavaTimeRange(tr) + + msg = '(' + stime = time.gmtime(tr[0]) + etime = time.gmtime(tr[1]) + stime = time.strftime('%Y%m%d_%H%M', stime) + etime = time.strftime('%Y%m%d_%H%M', etime) + msg += stime + ", " + etime + msg += ')' + return msg + +def printTRs(trList): + msg = '[' + for tr in trList: + s = printTR(tr) + msg += s + msg += ', ' + + msg += ']' + return msg + +#-------------------------------------------------------------------------- +# Main program that calls model-specific algorithms to generate ifp grids. +#-------------------------------------------------------------------------- + +#-------------------------------------------------------------------------- +# Definition for model database class. +#-------------------------------------------------------------------------- +class MDB: + def __init__(self, dblist): + self.__dbs = dblist + + def keys(self): + rval = [] + for db in self.__dbs: + rval = rval + list(db.keys()) + return rval + + def __getitem__(self, key): + for db in self.__dbs: + if key in list(db.keys()): + return db[key] + raise KeyError(key + " not found") + + def __getattr__(self, name): + for db in self.__dbs: + if hasattr(db, name): + return getattr(db, name) + raise AttributeError(name + " not found") + + def getKeys(self): + rval = [] + for db in self.__dbs: + rval = rval + JUtil.javaStringListToPylist(db.getKeys()) + return rval + + def getItem(self, key): + for db in self.__dbs: + keyLow = key.lower() + for k in JUtil.javaStringListToPylist(db.getKeys()): + if k.lower() == keyLow: + return db.getItem(key) + + #for db in self.__dbs: + # try: + # return db.getItem(key) + # except Exception, e: + # # njensen: changed this to log an error to try and determine a deeper issue + # # pass + # LogStream.logProblem(key + " not found: " + str(e)) + + raise KeyError(key + " not found") + + def getModelTime(self): + if len(self.__dbs) > 0: + return self.__dbs[0].getModelTime() + return 0 + +#------------------------------------------------------------------------- +# Utilities +#------------------------------------------------------------------------- +class GridUtilities: + pres = [] + + def squishZ(self, cube, levels): + buckets = cube[0:levels] + inc = cube.shape[0] / float(levels) + start = 0 + end = int(inc) + for i in range(int(levels)): + buckets[i] = add.reduce(cube[start:end]) / (end - start) + start = end + end = int(inc * (i + 2)) + return buckets + + def skyFromRH(self, rh_c, gh_c, topo, p_SFC): + tmpP_SFC = p_SFC.copy() + tmpP_SFC /= 100.0 # convert surfp to milibars + x = 78 # delta x (85km - 850km) + + # Make a pressure cube + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + + pp = pmb / tmpP_SFC + pp = clip(pp, 0.1, 1.0) + fmax = 78.0 + x / 15.5 + f100 = where(pp < 0.7, fmax * (pp - 0.1)/0.6, + 30.0 + (1.0 - pp) * (fmax - 30.0) / 0.3) + c = 0.196 + (.76 - x / 2834.0) * (1.0 - pp) + c = (rh_c/100.0 - 1.0) / c + c = exp(c) + f = minimum(f100 * c, 100.0) + f[less(gh_c, topo)] = 0.0 + f = self.squishZ(f, 5) + f[4] *= 0.25 + f /= 100.0 + sky = f[0] + for i in range(1, f.shape[0]): + sky = sky + f[i] - sky * f[i] + return sky * 100.0 + + #======================================================================= + # + # Calculate Wetbulb (C) based on temperature (C) and RH (%) + # (all algorithms straight out of GEMPAK - converted to numeric python) + # + def Wetbulb(self, tc, rh, pres): + dpc=self.RHDP(tc,rh) + thte=self.THTE(pres,tc,dpc) + dpc = None + wetbk=self.TMST(thte,pres,0) + return wetbk-273.15 + #======================================================================= + # + # Calculate parcel temp (K) given thetae (K) pressure (mb) and guess + # temperature (K) + # + def TMST(self, thte, pres, tguess): + tg=full_like(thte, tguess) + teclip=clip(thte-270.0,0.0,5000.0) + # + # if guess temp is 0 - make a more reasonable guess + # + tgnu=where(less(tg,1),(thte-0.5*teclip**1.05)*(pres/1000.0)**0.2,tg)\ + -273.15 + tg = teclip = None + epsi=0.01 + # + # Correct the temp up to 100 times. Typically this takes + # less than 5 iterations + # + for i in range(1,100): + i = i # eliminate warning in pychecker + tgnup=tgnu+1.0 + tenu=self.THTE(pres,tgnu,tgnu) + tenup=self.THTE(pres,tgnup,tgnup) + cor=(thte-tenu)/(tenup-tenu) + tenu = tenup = tgnup = None + tgnu=tgnu+cor + # + # get the maximum correction we made this time + # and if it is less than epsi - then we are close + # enough to stop. + # + mcor=maximum.reduce(maximum.reduce(maximum.reduce(abs(cor)))) + cor = None + if (mcor" + format = "%Y%m%d_%H%M" + return "(" + pytime.strftime(format, pytime.gmtime(tr[0])) + ", " +\ + pytime.strftime(format, pytime.gmtime(tr[1])) + ")" + + #-------------------------------------------------------------------------- + # Interpolates a new value given two x, two y, and a new x value. + #-------------------------------------------------------------------------- + def linear(self, xmin, xmax, ymin, ymax, we): + m = (ymax - ymin) / (xmax - xmin + .0000001) + b = ymin - m * xmin + return m * we + b + + #-------------------------------------------------------------------------- + # Converts the value from meters per second to knots. + #-------------------------------------------------------------------------- + def convertMsecToKts(self, value_Msec): + # Convert from meters/sec to Kts + return value_Msec * 1.944 + + #-------------------------------------------------------------------------- + # Converts the value from feet to meters + #-------------------------------------------------------------------------- + def convertFtToM(self, value_Ft): + # Convert the value in Feet to Meters + return value_Ft/3.28084 + + #-------------------------------------------------------------------------- + # Converts the value from Fahrenheit to Kelvin + #-------------------------------------------------------------------------- + def FtoK(self, t): + return (t + 459.67) / 1.8 + #-------------------------------------------------------------------------- + # Converts the value from Fahrenheit to Kelvin + #-------------------------------------------------------------------------- + def convertFtoK(self, t_F): + return self.FtoK(t_F) + + #-------------------------------------------------------------------------- + # Converts the value from Kelvin to Fahrenheit + #-------------------------------------------------------------------------- + def KtoF(self, t): + return t * 1.8 - 459.67 + #-------------------------------------------------------------------------- + # Converts the value from Kelvin to Fahrenheit + #-------------------------------------------------------------------------- + def convertKtoF(self, t): + return self.KtoF(t) + #-------------------------------------------------------------------------- + # Converts the value from meters to statute miles + #-------------------------------------------------------------------------- + def convertMtoSM(self, d): + return d * 0.00062 + #-------------------------------------------------------------------------- + # Calculates the saturation vapor pressure give the temperature in K + #-------------------------------------------------------------------------- + def esat(self, temp): + val = 26.660820 - 0.0091379024 * temp - 6106.3960 / temp + val[greater(val, 100)] = 100 + return exp(val) + + #-------------------------------------------------------------------------- + # Calculates the potential temp. given the temperature and pressure + # potential temp (p = milibars, t = kelvin) + #-------------------------------------------------------------------------- + def ptemp(self, t, p): + p = clip(p, .00001, 1500) + return t * pow((1000 / p), 0.286) + + # Returns the "area" (JKg-1) + def getArea(self, pbot, tbot, ptop, ttop): + logV = self.ptemp(ttop, ptop) / (self.ptemp(tbot, pbot) + 0.00001) + logV = clip(logV, 0.0001, logV) + tavg = (ttop + tbot) / 2.0 + #area = abs(1003.5 * tavg * log(logV)) + area = abs(1.0035 * tavg * log(logV)) + return where(less(tavg, 273.15), area * -1, area) + + # Returns two areas (from getArea). The + # second area is valid if we cross the freezing layer + # and is indicated by the cross mask. + def getAreas(self, pbot, tbot, ptop, ttop): + maxm = maximum(tbot, ttop) + minm = minimum(tbot, ttop) + freeze = self.newGrid(273.15) + crosses = logical_and(less(minm, freeze), greater(maxm, freeze)) + crossp = self.linear(pbot, ptop, tbot, ttop, freeze) + crosst = freeze + crossp = where(crosses, crossp, ptop) + crosst = where(crosses, crosst, ttop) + a1 = self.getArea(pbot, tbot, crossp, crosst) + a2 = self.getArea(crossp, crosst, ptop, ttop) + return a1, a2, crosses + + #-------------------------------------------------------------------------- + # Returns a numeric mask i.e. a grid of 0's and 1's + # where the value is 1 if the given query succeeds + # Arguments: + # wx -- a 2-tuple: + # wxValues : numerical grid of byte values + # keys : list of "ugly strings" where the index of + # the ugly string corresponds to the byte value in + # the wxValues grid. + # query -- a text string representing a query + # isreg -- if 1, the query is treated as a regular expression + # otherwise as a literal string + # Examples: + # # Here we want to treat the query as a regular expression + # PoP = where(self.wxMask(wxTuple, "^Chc:", 1), maximum(40, PoP), PoP) + # # Here we want to treat the query as a literal + # PoP = where(self.wxMask(wxTuple, ":L:") maximum(5, PoP), PoP) + # + #-------------------------------------------------------------------------- + def wxMask(self, wx, query, isreg=0): + rv = self.empty(bool) + if not isreg: + for i in range(len(wx[1])): + #if fnmatch.fnmatchcase(wx[1][i], query): + if query in wx[1][i]: + rv[equal(wx[0], i)] = True + else: + r = re.compile(query) + for i in range(len(wx[1])): + m = r.match(wx[1][i]) + if m is not None: + rv[equal(wx[0], i)] = True + return rv + + #-------------------------------------------------------------------------- + # Returns the byte value that corresponds to the + # given ugly string. It will add a new key if a new ugly + # string is requested. + # Arguments: + # uglyStr: a string representing a weather value + # keys: a list of ugly strings. + # A Wx argument represents a 2-tuple: + # wxValues : numerical grid of byte values + # keys : list of "ugly strings" where the index of + # the ugly string corresponds to the byte value in the wxValues grid. + # For example, if our keys are: + # "Sct:RW:-::" + # "Chc:T:-::" + # "Chc:SW:-::" + # Then, the wxValues grid will have byte values of 0 where + # there is "Sct:RW:-::", 1 where there is "Chc:T:-::" + # and 2 where there is "Chc:SW:-::" + # + #-------------------------------------------------------------------------- + def getIndex(self, uglyStr, keys): + if uglyStr == "": + uglyStr = "::::" + for str in keys: + if uglyStr == str: + return keys.index(uglyStr) + keys.append(uglyStr) + return len(keys)-1 + + #-------------------------------------------------------------------------- + # Place holder for levels function to be implemented by derived classes. + #-------------------------------------------------------------------------- + def levels(self): + return [] + + #-------------------------------------------------------------------------- + # Returns the topography grid. + #-------------------------------------------------------------------------- + def getTopo(self): + topo = self._client.getTopo() + topo = topo.getNDArray() + return topo + + #-------------------------------------------------------------------------- + # Returns a dictionary of magical values that will be used in other + # functions. + #-------------------------------------------------------------------------- + def magicArgs(self): + rval = { "topo" : (self.__topo, (0, MAX_TIME)), + "stopo" : (self.__stopo, (0, MAX_TIME)), + "ctime" : (None, (0, MAX_TIME)), + "stime" : (None, (0, MAX_TIME)), + "mtime" : (None, (0, MAX_TIME))} + for i in self._editAreas: + rval[i] = (None, (0, MAX_TIME)) + return rval + + #-------------------------------------------------------------------------- + # Runs the main program + #-------------------------------------------------------------------------- + def run(self): + dbName = SmartInitParams.params['dbName'] + validTime = SmartInitParams.params['validTime'] + + dbInfo = dbName.split(':') + self.__dbName = dbInfo[0] + + start = time.time() + self.__init() + if self.newdb() is None: + return + + msgDest = "Destination database:" + self.newdb().getModelIdentifier() + + if validTime is not None: + msgDest = msgDest + ", validTime " + pytime.strftime("%Y%m%d_%H%M", pytime.gmtime(validTime.getTime() / 1000)) + + LogStream.logEvent(msgDest) + self.__newwes = {} + self.__srcwes = {} + self._ifpio = IFPIO(self.srcdb(), self.newdb()) + self._ifpio.setLevels(self.levels()) + methods = self.__getMethods() + times = self.__sortTimes(methods, validTime) + tr, numGrids = self.__process(methods, times, int(dbInfo[1])) + stop = time.time() + msgTime = "%s: Elapsed time: %-.1f sec." % (self.newdb().getModelIdentifier(), (stop - start)) + + LogStream.logEvent(msgTime) + #LogStream.logEvent("Network stats: ", self._client.getStats()) + self._announce(self.newdb(), tr, numGrids) + + #-------------------------------------------------------------------------- + # Sends a message to the GFE stating that a database is complete. + #-------------------------------------------------------------------------- + def _announce(self, db, tr, numGrids): + if numGrids == 0: + return + dbTime = db.getModelTime() + if dbTime is not None: + modelTime = AbsTime.AbsTime(db.getModelTime()) + modelTime = modelTime.unixTime() + else: + modelTime = 0 + modelIdentifier = db.getShortModelIdentifier() + + if modelTime != 0: + trRel = ((tr[0]-modelTime)/3600, ((tr[1]-modelTime)/3600)-1) + msg = "Model: " + modelIdentifier + ' ' +\ + repr(trRel[0]) + '-' + repr(trRel[1]) + 'h #Grids=' + repr(numGrids) + else: + msg = "Model: " + modelIdentifier \ + + ' #Grids=' + repr(numGrids) + self._client.sendUserMessage(msg, "SMARTINIT") + + #-------------------------------------------------------------------------- + # Returns the IFPDB object for the given db + #-------------------------------------------------------------------------- + def getDb(self, dbString): + from com.raytheon.edex.plugin.gfe.smartinit import IFPDB + return IFPDB(dbString) + + #-------------------------------------------------------------------------- + # Returns the source and destination databases, given the srcName. + #-------------------------------------------------------------------------- + def _getLatest(self, client, srcNames, fcstName=None): + # ryu: Added/modified code to allow multiple sources. The srcdb is + # now an MDB. This is needed for (AK)NAM40 init, which sources + # from both NAM40 and NAM20. + + srcdbs = [] + modelName = DatabaseID.databaseID(self.__dbName).modelName() + + if len(srcNames) == 0: + srcNames = [modelName] + + for src in srcNames: + # source model at same model time + fullDBName = self.__dbName.replace(modelName, src) + db = self.getDb(fullDBName) + if db.getKeys().size() == 0: + LogStream.logEvent("Source database " + fullDBName + \ + " is empty.") + else: + srcdbs.append(db) + + srcdb = MDB(srcdbs) + + # I (njensen) removed most of what was here. It was looking at + # the available D2D netcdf data, and then forming a GFE db id + # from that for the target. Instead I'm just passing in + # the target from Java. + + newdb = self.__dbName.replace("D2D", "") + if fcstName and fcstName != modelName: + newdb = newdb.replace(modelName, fcstName) + + # create db if not singleton db + singletons = client.getSingletonIDs() + singletonNeeded = 0 + singletonsize = singletons.size() + for i in range(singletonsize): + d = singletons.get(i) + s = DatabaseID.DatabaseID(d) + if newdb[:-13] == s.modelIdentifier()[:-13]: + singletonNeeded = 1 + break + if singletonNeeded: + newdb = newdb[:-13] + '00000000_0000' + newdb = self.getDb(newdb) + else: + sr = client.createDB(newdb) + if sr.isOkay(): + newdb = self.getDb(newdb) + else: + msg = "Unable to create database for " + str(newdb) + ":\n" + \ + str(sr.message()) + LogStream.logProblem(msg) + newdb = None + + return srcdb, newdb + + #-------------------------------------------------------------------------- + # Returns the target database + #-------------------------------------------------------------------------- + def newdb(self): + return self.__newdb + + #-------------------------------------------------------------------------- + # Returns the source database. + #-------------------------------------------------------------------------- + def srcdb(self): + return self.__srcdb + + #-------------------------------------------------------------------------- + # Convert magnitude and direction to u and v components. + #-------------------------------------------------------------------------- + def _getUV(self, mag, dir): + rad = dir * 0.0174 + u = mag * sin(rad) + v = mag * cos(rad) + return (u, v) + + #-------------------------------------------------------------------------- + # Returns a mask where points are set when the specified query is true. + #-------------------------------------------------------------------------- + def _wxMask(self, wx, query, isreg=0): + rv = self.empty(bool) + if not isreg: + for i in range(len(wx[1])): + if fnmatch.fnmatchcase(wx[1][i], query): + rv[equal(wx[0], i)] = True + else: + r = re.compile(query) + for i in range(len(wx[1])): + m = r.match(wx[1][i]) + if m is not None: + rv[equal(wx[0], i)] = True + return rv + + #-------------------------------------------------------------------------- + # Returns the magnitude and direction from u and v components. + #-------------------------------------------------------------------------- + def _getMD(self, u, v): + mag = hypot(u, v) + dir = degrees(arctan2(u, v)) + dir[less(dir, 0)] += 360 + return (mag, dir) + + #-------------------------------------------------------------------------- + # Returns true if the specified time is found within the specified + # timeRange. + #-------------------------------------------------------------------------- + def _contains(self, timerange, time): + if timerange[1] - timerange[0]: + return ((time >= timerange[0]) and (time < timerange[1])) + return time == timerange[0] + + #-------------------------------------------------------------------------- + # Returns true if the two timeRanges overlap (share a common time period). + #-------------------------------------------------------------------------- + def _overlaps(self, tr1, tr2): + if self._contains(tr2, tr1[0]) or self._contains(tr1, tr2[0]): + return 1 + return 0 + + #-------------------------------------------------------------------------- + # Returns the overlapping timeRange between two timeRanges (if any). + #-------------------------------------------------------------------------- + def _intersect(self, t1, t2): + if self._overlaps(t1, t2): + return (max(t1[0], t2[0]), min(t1[1], t2[1])) + return None + + #-------------------------------------------------------------------------- + # Returns the "calc" methods for the specified Forecaster object. + #-------------------------------------------------------------------------- + def __getObjMethods(self, obj): + rval = [] + for o in obj.__bases__: + rval += self.__getObjMethods(o) + + magicArgs = list(self.magicArgs().keys()) + for fn in [x for x in dir(obj) if x[:4] == "calc"]: + mthd = eval("self." + fn) + co = mthd.__func__.__code__ + args = co.co_varnames[1:co.co_argcount] + fargs = [] + for a in args: + if a not in magicArgs and string.find(a, '_') == -1: + a += "_SFC" + fargs.append(a) + wename = fn[4:] + if string.find(wename, "_") != -1: + wenameLevel = wename + else: + wenameLevel = wename + "_SFC" + #if wenameLevel not in self.newdb().keys(): + if wenameLevel not in JUtil.javaStringListToPylist(self.newdb().getKeys()): + msg = wenameLevel + " not in " + \ + self.newdb().getModelIdentifier() + " " + "SKIPPING" + LogStream.logEvent(msg) + continue + rval = list(filter(lambda x,y=wenameLevel : x[0] != y, rval)) + rval.append((wenameLevel, mthd, fargs)) + return rval + + #-------------------------------------------------------------------------- + # Gets and returns a list of dependencies. + #-------------------------------------------------------------------------- + def __getdeps(self, m, lst): + rval = [] + for i in m[2]: + if i != m[0]: + for j in lst: + if j[0] == i: + rval = rval + self.__getdeps(j, lst) + else: + if len(m[2]) == 1: + raise ValueError("calc" + i + + " must depend on more than itself") + lst.remove(m) + return rval + [m] + + #-------------------------------------------------------------------------- + # Returns this objects methods + #-------------------------------------------------------------------------- + def __getMethods(self): + rval = [] + methods = self.__getObjMethods(self.__class__) + while len(methods): + rval += self.__getdeps(methods[0], methods) + return rval + + def __request(self, db, pname, time): + if pname[-2:] == "_c": + time = self.__getSrcWE( + pname[:-2] + "_MB500", 0).getTimeRange(time[0]) + rval = (pname[:-2], time, 1) + else: + time = self.__getSrcWE(pname, 0).getTimeRange(time[0]) + rval = (pname, time) + return rval + + def __unpackParm(self, parm): + if parm[-2:] == "_c": + list = [] + base = parm[:-2] + if len(self.levels()) == 0: + raise Exception("Request for " + parm + + " and levels() is empty!") + for l in self._ifpio.levels(): + list.append(base + "_" + l) + return list + return [parm] + + #-------------------------------------------------------------------------- + # Internal function that returns the time periods shared by tr and times. + #-------------------------------------------------------------------------- + def __compTimes(self, tr, times): + # TODO: surely there's a better way to do this + + for time in times: + if len(time) == 0: + return [] + + rval = [] + if len(times) == 1: + if tr is None: + return times[0] + for time in times[0]: + inter = self._intersect(tr, time) + if inter is not None: + rval.append(inter) + return rval + else: + mtimes = times[-1:][0] + ntimes = times[:-1] + for time in mtimes: + if tr is not None: + time = self._intersect(time, tr) + if time is not None: + trv = self.__compTimes(time, ntimes) + for r in trv: + if r not in rval: + rval.append(r) + return rval + + #-------------------------------------------------------------------------- + # Internal function that sorts times. + #-------------------------------------------------------------------------- + def __sortTimes(self, methods, validTime): + rval = [] + calced = [] + for we, mthd, args in methods: +# LogStream.logEvent("Evaluating times for", mthd.func_name) + calced.append(we) + args = list(filter(lambda x, ma=list(self.magicArgs().keys()) + [we]: + x not in ma, args)) + nargs = [] + for a in args: + nargs = nargs + self.__unpackParm(a) + + ttimes = [] + for p in nargs: + # p is an arg, e.g. gh_MB900 + try: + ttimes.append(rval[calced.index(p)]) + except: + # get all available times for that param + try: + ranges = self.__getSrcWE(p).getKeys() + size = ranges.size() + except: + size = 0 + + pylist = [] + for i in range(size): + jtr = ranges.get(i) + valid = False + + if validTime is None: + valid = True + else: + # need check to be inclusive on both ends for methods that + # need both accumulative and non-accumulative parms + valid = validTime.getTime() >= jtr.getStart().getTime() and \ + validTime.getTime() <= jtr.getEnd().getTime() + + if valid: + timelist = TimeRange.encodeJavaTimeRange(jtr) + pylist.append(timelist) + + ttimes.append(pylist) + +# msg = "Times available for " + p + " " + str(validTime) + ":\n" +# timeList = ttimes[-1] +# msg += printTRs(timeList) +# LogStream.logEvent(msg) + + # compare the times of each parm and find where they match up + times = self.__compTimes(None, ttimes) +# LogStream.logEvent("times:", printTRs(times)) + + hadDataButSkipped = {} + for i in range(len(ttimes)): + timeList = ttimes[i] + parmName = nargs[i] + for xtime in timeList: + if xtime not in times: + if xtime in hadDataButSkipped: + hadDataButSkipped[xtime].append(parmName) + else: + hadDataButSkipped[xtime] = [parmName] +# msg = "hadDataButSkipped: {" +# for tr in hadDataButSkipped: +# msg += printTR(tr) +# msg += ": " +# msg += str(hadDataButSkipped[tr]) +# msg += ", " +# msg += "}" +# LogStream.logEvent(msg) + + hadNoData = [] + for i in range(len(nargs)): + timeList = ttimes[i] + parmName = nargs[i] + if len(timeList) == 0: + hadNoData.append(parmName) +# LogStream.logEvent("hadNoData:",hadNoData) + + missing = {} + for xtime in hadDataButSkipped: + msg = printTR(xtime) + missing[msg] = [] + + for parmName in nargs: + if not hadDataButSkipped[xtime].__contains__(parmName): + missing[msg].append(parmName) + + if len(missing) == 0 and len(hadNoData) > 0: + msg = '' + if (validTime is not None): + vtime = validTime.getTime()/1000 + vtime = time.gmtime(vtime) + msg = time.strftime('%Y%m%d_%H%M', vtime) + missing[msg] = hadNoData +# LogStream.logEvent("missing:",missing) + + if len(missing): + LogStream.logEvent(self.newdb().getModelIdentifier() + ": Skipping calc" + we + " for some times due to the following " + + "missing data:", missing) + # these become the times to run the method for + rval.append(times) + + return rval + + #-------------------------------------------------------------------------- + # Returns a WeatherElement object given it's name. + #-------------------------------------------------------------------------- + def __getSrcWE(self, wename, lock=1): + return self._ifpio.getSrcWE(wename, lock) + + #-------------------------------------------------------------------------- + # Returns a new weather element given the weName. + #-------------------------------------------------------------------------- + def __getNewWE(self, wename, lock=1): + return self._ifpio.getNewWE(wename, lock) + + def __recursiveArg(self, cache, arg, time): + p = self.newdb().getItem(arg) + +# tr = p.getTimeRange(time[0]) + tr = TimeRange.TimeRange(AbsTime.AbsTime(time[0]), AbsTime.AbsTime(time[1])).toJavaObj() + times = p.getKeys(tr) + if times: + tr = times[0] + LogStream.logEvent("retrieving", arg, printTR(tr)) + + pytr = TimeRange.encodeJavaTimeRange(tr) + jslice = p.getItem(tr) + slice = jslice.getNDArray() + if type(slice) is ndarray and slice.dtype == int8: + # discrete or weather + keys = JUtil.javaObjToPyVal(jslice.getKeyList()) + slice = [slice, keys] + cache[arg] = (slice, pytr) + else: + LogStream.logEvent("no data for", arg, printTR(tr)) + cache[arg] = (None, time) + + def __argFill(self, cache, method, time): + we, mthd, args = method + LogStream.logEvent("getting arguments for", mthd.__name__, printTR(time)) + + gargs = [] + if self._ndbkeys is None: + self._ndbkeys = JUtil.javaStringListToPylist(self.newdb().getKeys()) + ndbkeys = self._ndbkeys + for arg in args: + if arg in self._editAreas: + if cache[arg][0] is None: + p = self.newdb().getItem(we) + ea = p.getEditArea(arg).getNDArray() + cache[arg] = (ea, (0, MAX_TIME)) + gargs.append(cache[arg][0]) + continue + if arg not in cache: + if arg in ndbkeys: + self.__recursiveArg(cache, arg, time) + else: + req = self.__request(self.srcdb(), arg, time) + val = self._ifpio.get(req) + if arg[-2:] == "_c": + self.pres = val[0] + val = val[1] + cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1])) + else: + if cache[arg][1] is not None and \ + not self._overlaps(time, cache[arg][1]): + if arg in ndbkeys: + self.__recursiveArg(cache, arg, time) + else: + req = self.__request(self.srcdb(), arg, time) + val = self._ifpio.get(req) + if arg[-2:] == "_c": + self.pres = val[0] + val = val[1] + cache[arg] = (val, TimeRange.encodeJavaTimeRange(req[1])) + else: + LogStream.logEvent("using cached", arg, printTR(cache[arg][1])) + + gargs.append(cache[arg][0]) + return gargs + + def __runMethod(self, method, time, cache): + we, mthd, args = method + + if self.mostRecentCacheClear != time: + self.mostRecentCacheClear = time + for key in list(cache.keys()): + cacheValue = cache[key] + if len(cacheValue) == 2: + # if they are for time ranges that we've completed calculations for, + # immediately set them to None to free up the memory + if not self._overlaps(cacheValue[1],time): + LogStream.logEvent("Clearing", key, printTR(cacheValue[1])) + cache[key] = (None, cacheValue[1]) + + gargs = self.__argFill(cache, method, time) + + doStore = False + if mthd.__func__ is Forecaster.__exists.__func__: + msg = self.newdb().getModelIdentifier() + ": Get : " + we + " " + self._timeRangeStr(time) + LogStream.logEvent(msg) + else: + doStore = True + msg = self.newdb().getModelIdentifier() + ": Calc : " + we + " " + self._timeRangeStr(time) + LogStream.logEvent(msg) + + try: + rval = mthd(*tuple(gargs)) + + if rval is not None: + if type(rval) is not ndarray and rval is not None: + if type(rval) is not tuple: + jrval = rval + rval = rval.getNDArray() + if type(rval) is ndarray and rval.dtype == int8: + # discrete or weather + keys = JUtil.javaObjToPyVal(jrval.getKeyList()) + rval = [rval, keys] + else: + LogStream.logEvent("No value returned from calc"+str(we)) + + s = 'grid' + if rval is None: + s = 'None' + LogStream.logEvent("Caching", we, s, printTR(time)) + cache[we] = (rval, time) + + if rval is not None and cache['mtime'][0] is not None and doStore: + parm = self.__getNewWE(we) + LogStream.logEvent("Storing", we, printTR(cache['mtime'][0])) + self._ifpio.store(parm, cache['mtime'][0], cache[we][0]) + except: + LogStream.logProblem(self.newdb().getModelIdentifier() + ": Error while running method " + str(we) + + "\n" + LogStream.exc()) + cache[we] = (None, time) + + def __tsort(self, x, y): + if x[1][0] < y[1][0]: + return -1 + if x[1][0] > y[1][0]: + return 1 + return x[2] - y[2] + + def __flattenTimes(self, methods, times): + lst = [] + for i in range(len(methods)): + for t in times[i]: + lst.append((methods[i], t, i)) + lst.sort(self.__tsort) + return lst + + def __exists(self, mtime, wename): + #parm = self.__getNewWE(wename + "_SFC") + parm = self.__getNewWE(wename) + return parm.getItem(mtime) + + def __prune(self, lst): + rval = [] + for m, t, i in lst: + # m is (parmname_level, bound method calcParmname, argument list) + # t is a time range (list of two times) + # i is order? + parm = self.__getNewWE(m[0]) + #parm = self.__getNewWE(m[0] + "_SFC") + tr = TimeRange.encodeJavaTimeRange(parm.getTimeRange(t[0])) + if tr is None: + continue + parmtr = TimeRange.javaTimeRangeListToPyList(parm.getKeys()) + if tr in parmtr: + # Skip (maybe) + for m2, t2, i2 in lst: + if m2[0] == m[0]: + continue + if m2[0] in m[2] and self._overlaps(t2, t): + parm2 = self.__getNewWE(m2[0]) + #parm2 = self.__getNewWE(m2[0] + "_SFC") + tr2 = parm2.getTimeRange(t2[0]) + tr2 = TimeRange.encodeJavaTimeRange(tr2) + parm2tr = TimeRange.javaTimeRangeListToPyList(parm2.getKeys()) + if tr2 is not None and tr2 not in parm2tr: + f = ((m[0], self.__exists, + ('mtime', 'wename')), t, i) + if f not in rval: + rval.append(f) + rval.append((m, t, i)) + continue + if m[0] in m2[2] and self._overlaps(t2, tr): + parm2 = self.__getNewWE(m2[0]) + #parm2 = self.__getNewWE(m2[0] + "_SFC") + tr2 = parm2.getTimeRange(t2[0]) + tr2 = TimeRange.encodeJavaTimeRange(tr2) + parm2tr = TimeRange.javaTimeRangeListToPyList(parm2.getKeys()) + if tr2 is not None and tr2 not in parm2tr: + # Have to fetch the already calced one. + f = ((m[0], self.__exists, + ('mtime', 'wename')), t, i) + if f not in rval: + rval.append(f) + else: + # Need to calc this one + rval.append((m, t, i)) + return rval + + def sourceBaseTime(self): + modelTime = self.srcdb().getModelTime() + if modelTime is None: + modelTime = 0 + t = AbsTime.AbsTime(modelTime) + return t.unixTime() + + # JULIYA MODIFY HERE + def __process(self, methods, times, mode): + numGrids = 0 + trSpan = None + cache = self.magicArgs() + all = mode#Options['all'] manual=1 automatic=0 + list = self.__flattenTimes(methods, times) + if not all: + list = self.__prune(list) + + self.mostRecentCacheClear = None + for m, t, i in list: + cache['ctime'] = (t, (0, MAX_TIME)) + parm = self.__getNewWE(m[0]) + tr = parm.getTimeRange(t[0]) + + # A valid time range was not found so the parameter + # cannot be calculated, so continue + if not tr.isValid(): + continue + + cache['mtime'] = (tr, (0, MAX_TIME)) + cache['wename'] = (m[0], (0, MAX_TIME)) + cache['stime'] = (t[0] - self.sourceBaseTime(), (0, MAX_TIME)) + + try: + self.__runMethod(m, t, cache) + numGrids = numGrids + 1 + if trSpan is None: + trSpan = t + else: + trSpan = (min(trSpan[0],t[0]), max(trSpan[1], t[1])) + except: + LogStream.logProblem("Error in method setup for " + str(m[0]) + + "\n" + LogStream.exc()) + + return (trSpan, numGrids) + +#------------------------------------------------------------------------- +# Weather Element calculations +#------------------------------------------------------------------------- +class IFPIO: + def __init__(self, indb, outdb): + self.__srcwes = {} + self.__newwes = {} + self.eta = indb + self.newdb = outdb + + def getSrcWE(self, wename, lock=1): + rval = None + try: + rval = self.__srcwes[wename] + except: + rval = self.eta.getItem(wename) + self.__srcwes[wename] = rval + return rval + + def getNewWE(self, wename, lock=1): + try: + rval = self.__newwes[wename] + except: + rval = self.newdb.getItem(wename) + self.__newwes[wename] = rval + return rval + + def get(self, qv): + if len(qv) == 2: + name, time = qv + docube = 0 + else: + name, time, docube = qv + if not docube: + p = self.getSrcWE(name, 0) + times = p.getKeys(time) + if times: + time = times[0] + LogStream.logEvent("retrieving", name, printTR(time)) + + slice = p.getItem(time) + out = slice.getNDArray() + if type(out) is ndarray and out.dtype == int8: + # discrete or weather + keys = JUtil.javaObjToPyVal(slice.getKeyList()) + out = [out, keys] + else: + out = self._getcube(name, time) + return out + + #-------------------------------------------------------------------------- + # Sets the grid levels used for many algorithms. + #-------------------------------------------------------------------------- + def setLevels(self, levels): + self._levels = levels + + #-------------------------------------------------------------------------- + # Returns the current list of levels + #-------------------------------------------------------------------------- + def levels(self): + return self._levels + + #-------------------------------------------------------------------------- + # Returns the data cube for the specified parm, and time. + #-------------------------------------------------------------------------- + def _getcube(self, parm, time): + lvls = self.levels() + lst = [] + pres = [] + for l in lvls: + slice = self.get((parm + "_" + l, time)) + lst.append(slice) + pres.append(int(l[2:])) + # only scalars will be ndarray, otherwise it was vector, discrete, or wx + if type(lst[0]) is not ndarray: + ml = [] + dl = [] + for i in lst: + ml.append(i[0]) + dl.append(i[1]) + rval = (array(ml), array(dl)) + else: + rval = array(lst) + return (pres, rval) + + #-------------------------------------------------------------------------- + # Stores the specified grid in the element specified by newwe at the + # specified time + #-------------------------------------------------------------------------- + def store(self, newwe, time, grid): + gridType = newwe.getGridType() + if gridType == "SCALAR": + grid = clip(grid, newwe.getMinAllowedValue(), newwe.getMaxAllowedValue()) + elif gridType == "VECTOR": + mag = clip(grid[0], newwe.getMinAllowedValue(), newwe.getMaxAllowedValue()) + dir = clip(grid[1], 0, 359.5) + grid = (mag, dir) + tr = TimeRange.encodeJavaTimeRange(time) + # safety checks + wrongType = None + saved = False + if type(grid) is ndarray: + grid = NumpyJavaEnforcer.checkdTypes(grid, float32) + # scalar save + newwe.setItemScalar(newwe.getTimeRange(tr[0]), grid) + saved = True + elif (type(grid) is list or type(grid) is tuple) and len(grid) == 2: + if type(grid[0]) is ndarray and type(grid[1]) is ndarray: + magGrid = NumpyJavaEnforcer.checkdTypes(grid[0], float32) + dirGrid = NumpyJavaEnforcer.checkdTypes(grid[1], float32) + # vector save + newwe.setItemVector(newwe.getTimeRange(tr[0]), magGrid, dirGrid) + saved = True + elif type(grid[0]) is ndarray and type(grid[1]) is list: + bgrid = NumpyJavaEnforcer.checkdTypes(grid[0], int8) + + if gridType == "DISCRETE": + newwe.setItemDiscrete(newwe.getTimeRange(tr[0]), bgrid, str(grid[1])) + elif gridType == "WEATHER": + newwe.setItemWeather(newwe.getTimeRange(tr[0]), bgrid, str(grid[1])) + + saved = True + if not saved: + if wrongType is None: + wrongType = type(grid) + msg = str(wrongType) + " type returned from calcMethod is not safe to store for " + str(newwe) + raise TypeError(msg) + +#-------------------------------------------------------------------------- +# Main program +#-------------------------------------------------------------------------- +def runFromJava(dbName, model, validTime): + SmartInitParams.params['dbName'] = dbName + SmartInitParams.params['validTime'] = validTime + + mod = __import__(model) + mod.main() + rollbackImporter.rollback() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM12.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM12.py index 9ba5bc5464..ac3784347c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM12.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM12.py @@ -1,1688 +1,1688 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -# -# NAM12 SmartInit -# -# Comments below for each algorithm. -# -# Author: Tim Barker - SOO Boise, ID -# -#============================================================================= -# -# C O N F I G U R A T I O N S E C T I O N -# -#============================================================================= -# -# USE_WETBULB=1 (Yes) or 0 (No). Using wetbulb for calculating snow level -# and precipitation type is probably more accurate - since it would be the -# temperature that the atmosphere would be if you evaporated water into it -# until saturation. Thus...when the model is dry at low layers and you think -# it might precip...then the temperature would likely be much cooler than what -# the model says. However...the wetbulb calculation is VERY slow and you can -# save yourself a lot of time by not doing it. You could argue that if the -# model isn't making precip - then you shouldn't be changing its temps, but -# it really seesm to work well in areas of terrain and 'showery' precip - -# where the model sounding is representative of the large-scale, but inside -# the showers it is cooler. -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 02/16/12 14439 jdynina modified Haines calculation -# 03/04/13 15585 jzeng Modified wxtype range in calcWx() -# -## -USE_WETBULB = 1 -# -# -#============================================================================ -# -# E N D C O N F I G U R A T I O N S E C T I O N -# -#============================================================================ -from Init import * -class NAM12Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "NAM12", "NAM12") - self.BLcubeTime = (None, None) - - def levels(self): - return ["MB1000", "MB975", "MB950", "MB925", - "MB900", "MB875", "MB850", "MB825", - "MB800", "MB775", "MB750", "MB725", - "MB700", "MB675", "MB650", "MB625", - "MB600", "MB575", "MB550", "MB525", - "MB500", "MB450", "MB400", "MB350"] - - #--------------------------------------------------------------------------- - # T - use model sounding to get temperature at real topography instead of - # model topography - # - # Where the topo is above the model topo - use the boundary - # layer temperature to interpolate a temperature...but in radiational - # inversions this is typically too warm because the free air - # temperature from the model is warmer than air near the ground on - # a mountain that sticks up higher than the model mountains. So... - # if there is an inversion (i.e. the boundary layer temp at the - # desired height is warmer than the model surface temp) it only goes - # 1/2 as warm as the raw inversion in the free model atmosphere would - # be. Not sure if this is good for strong and persistent inversions - # like marine inversions - but works well for persistent radiational - # inversions in the intermountain west during the winter - and works - # well for nocturnal inversions all times of the year. - # Where the topo is below the model topo - it uses the lapse rate between - # the two lowest boundary layer levels and extrapolates this downward - - # with the restriction that the lapse rate cannot be more than dry - # adiabatic and inversions are extrapolated at only 1/2 that lapse rate - # and also limited to no more than 1.5C decrease per km. The 1.5C per km - # restriction is arbirary - further research may restrict it more or - # less. The dry adiabatic restriction seems to work fine. - #-------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150, - rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, rh_BL120150, - wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, wind_BL90120, - wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - - BLT = self.BLT - #self.printval("temp:",self.BLT,65,65) - BLH = self.BLH - - st = self.newGrid(-1) - for i in range(1, BLH.shape[0]): - tval = self.linear(BLH[i], BLH[i - 1], BLT[i], BLT[i - 1], topo) - # - # restrict the increase in areas where inversions present - # - m = greater(tval,BLT[0]) - tval[m] = (BLT[0]+((tval-BLT[0])/2.0))[m] - - between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) - m = logical_and(less(st,0.0),between) - st[m] = tval[m] - # - # restrict the lapse rates below the model surface - # - lapse = (BLT[1] - BLT[0]) / (BLH[1] - BLH[0]) - lapse[greater(lapse, 0.0)] /= 2.0 - maxinvert = 1.5 / 1000.0 - lapse[greater(lapse, maxinvert)] = maxinvert - drylapse = -9.8 / 1000.0 - lapse[less(lapse, drylapse)] = drylapse - tst = BLT[0] + ((topo - stopo) * lapse) - - m = less(st,0.0) - st[m] = tst[m] - # - #diff=t_FHAG2-st - #maxdiff=maximum.reduce(maximum.reduce(diff)) - #mindiff=minimum.reduce(minimum.reduce(diff)) - #print "max/min temp change: %6.2f %6.2f"%(maxdiff,mindiff) - # - # change to Fahrenheit - # - return self.KtoF(st) - - #-------------------------------------------------------------------------- - # Td - where topo is above the model topo - it interpolates the dewpoint - # from the model sounding. This allows mountains sticking up into dry - # dry air during nighttime inversions to reflect the dry air aloft. - # Where the topo is below the model topo - it uses the model surface - # mixing ratio, and assumes that is constant to the real topo - and - # uses the temperature at the real topo calculated in calcT - #--------------------------------------------------------------------------- - def calcTd(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, T, stopo, topo, gh_c, t_c, - rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLD = self.BLD - BLH = self.BLH - # - # for real topo above model topo - interpolate dewpoint from the - # model dewpoint sounding - # - sd = self.newGrid(-1) - for i in range(1, BLH.shape[0]): - dval = self.linear(BLH[i], BLH[i - 1], BLD[i], BLD[i - 1], topo) - between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) - - m = logical_and(less(sd,0.0),between) - sd[m] = dval[m] - - # - # for real topo below model topo - use model surface mixing ratio - # and use that mixing ratio with the surface temperature which - # was derived from the low-level lapse rate. - # - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - sd=where(less(sd,0.0),td,sd) - # - # change to Fahrenheit and make sure it is less than temp - # - td = self.KtoF(sd) - td=where(greater(td,T),T,td) - - return td - - #------------------------------------------------------------------------- - # RH - simply calculate RH based on Temp and Dewpoint (both in degrees F) - #------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - return RH - - def dewFromTandRH(self, T, RH): - tc = (T - 32.0) * (5.0 / 9.0) - rh = clip(RH, 0.001, 99.999) / 100.0 - x = (log(rh) / 17.67) + (tc / (tc + 243.5)) - tdc = (243.5 * x) / (1.0 - x) - td = (tdc * 9.0 / 5.0) + 32.0 - return td - - #-------------------------------------------------------------------------- - # Wx - uses a derivation of the Bourgouin algorithm to determin - # precip type. - # - # Uses a sounding of wetbulb temperature (the temperature that it would - # be in the model sounding if precip were falling) and finds areas above - # and below freezing and figures precip phase based on empirical values. - # - # Makes a simple guess at where it will be showers based on the model LI - # - making it showers when LI is less than 2. - # - # Makes a simple guess at where there will be thunderstorms based on the - # model LI less than -1 (SChc), <-3 (Chc), <-5 (Lkly), <-8 (Def). - # - # After determining precip type, it matches the probability part of the Wx - # grid to the existing PoP grid, removing Wx where the PoP grid is below - # 15%. Would love to only calculate the Wx Type here - and not have the - # PoP involved - but this is not the way most people understand it. - #-------------------------------------------------------------------------- - def calcWx(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, PoP, T, RH, bli_BL0180, stopo, - topo, gh_c, t_c, rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLH = self.BLH - # - # use temp or wetbulb - # - if USE_WETBULB == 1: - TT = self.BLE - else: - TT = self.BLT - # - # get temperatures (or wetbulb) at levels above the real topo - # not model topo - # - (BLH, TT) = self.getTopoE(topo, stopo, p_SFC, T, RH, BLH, TT) - # - # calculate number of zero crossings, and areas above/below - # freezing of the wetbulb sounding - # - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - for i in xrange(1, BLH.shape[0]): - a11, a22, cross = self.getAreas(BLH[i - 1], TT[i - 1], BLH[i], TT[i]) - topomask = greater(BLH[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - # - # The basic types we are choosing between - # - wxtypes = ['::::', - "Def:S:-::", - "Def:R:-::", - "Def:S:-::^Def:R:-::", - 'Def:ZR:-::', - 'Def:IP:-::', - 'Def:ZR:-::^Def:IP:-::', - "Def:SW:-::", - "Def:RW:-::", - "Def:SW:-::^Def:RW:-::", - "Def:ZR:-::", - 'Def:IP:-::', - 'Def:ZR:-::^Def:IP:-::'] - - wx = self.empty(int8) - # - # Case d - no zero crossings. All snow or all rain - # - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # - # Case c - one crossing. Snow if little area above freezing. - # Rain if lots of area above freezing. - # Mix if between - # - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # - # Case a - two crossings. Either freezing rain or ice pellets - # ice pellets when surface cold area is big - # freezing rain when surface cold area is small - # mix when between - # - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - # - # Case b - three crossings. If not much in the top warm layer - # then it acts like case c. - # If enough to melt in that layer - then - # see if cold layer is enough to re-freeze - # and be ice pellets - or just remain rain. - # - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - # - # Where LI<2, make showers - # - bli_BL0180 = where(less(bli_BL0180, -18.0), 10.0, bli_BL0180) - convecMask = less(bli_BL0180, 2) - wx[convecMask] += 6 - # - # off the grid need no weather - # - wxgrid = self.empty(int8) - keys = ['::::', ] - wxgrid[less(bli_BL0180, -18.0)] = 0 - # - # Match PoP, and remove non-occurring wx - # - poplimits = [15.0, 25.0, 55.0, 75.0, 101.0] - popprobs = ["None", "SChc", "Chc", "Lkly", "Def"] - for popcat in range(5): - if popcat > 0: - lowlimit = poplimits[popcat - 1] - else: - lowlimit = -1 - ispopcat = logical_and(greater(PoP, lowlimit), - less(PoP, poplimits[popcat])) - # - # If this pop category doesn't exist anywhere - then - # we don't have to worry about it. - # - some = logical_or.reduce(logical_or.reduce(ispopcat)) - if not some: - continue - # - # the no-pop case is easy - make it no weather - # - if popcat == 0: - wxgrid[ispopcat] = 0 - continue - # - # for all others...see if any weather combinations exist - # and add those - # - prob = popprobs[popcat] - for iwx in range(1, 13): - wxstring = wxtypes[iwx] - ispopwx = logical_and(ispopcat, equal(wx, iwx)) - some = any(ispopwx) - if not some: - continue - types = [] - types = string.split(wxstring, "^") - for i in range(len(types)): - type = types[i] - pieces = string.split(type, ":") - pieces[0] = prob - types[i] = string.join(pieces, ":") - wxstring = string.join(types, "^") - keys.append(wxstring) - keynum = len(keys) - 1 - wxgrid[ispopwx] = keynum - # - # thunder is totally separate from PoP, only related to - # the instability. SChc for LI <-1, Chc for LI<-3, - # Lkly for LI<-5, Def for LI<-8 - # - thunder = less_equal(bli_BL0180, -1).astype(int8) - thunder[less_equal(bli_BL0180, -3)] = 2 - thunder[less_equal(bli_BL0180, -5)] = 3 - thunder[less_equal(bli_BL0180, -8)] = 4 - - tprobs = ["None", "SChc", "Chc", "Lkly", "Def"] - for ith in range(1, 5): - tprob = equal(thunder, ith) - some = any(tprob) - if not some: - continue - needadd = where(tprob, wxgrid, int8(0)) - numkeys = len(keys) - for i in range(1, numkeys): - add = equal(needadd, i) - some = any(add) - if not some: - continue - wxstring = keys[i] - addstring = wxstring + "^" + tprobs[ith] + ":T:::" -# print "added thunder:",addstring - keys.append(addstring) - keynum = len(keys) - 1 - wxgrid[add] = keynum - return(wxgrid, keys) - - #-------------------------------------------------------------------------- - # QPF - simply take model QPF and change units to inches - #---------------------------------------- ---------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - #-------------------------------------------------------------------------- - # PoP - based strongly on QPF (since when model has one inch of precip the - # chance of getting 0.01 is pretty high). However, there is a big - # difference between a place that model has 0.00 precip and is very - # close to precipitating - and those where model has 0.00 and is a - # thousand miles from the nearest cloud. Thus, uses the average - # boundary layer RH to make an adjustment on the low end - adding - # to PoP where RH is high. Ignores surface RH to try to ignore fog - # cases. Would also like to consider omega. - # - # Uses hyperbolic tangent of QPF, so that it rises quickly as model - # QPF increases - but tapers out to nearly 100% as QPF gets high. - # Also uses hyperbolic tangent of QPF to reduce the impact of high RH - # as QPF gets higher (since avg RH will always be high when QPF is high) - # - # Adjustable parameters: - # topQPF is QPF amount that would give 75% PoP if nothing else - # considered at half this amount, PoP is 45%, at double this - # amount PoP is 96%. Default set at 0.40. - # RHexcess is amount of average BL RH above which PoP is adjusted - # upward. Default set to 60% - # adjAmount is maximum amount of adjustment if BL RH is - # totally saturated. Default set to 30% - # - #-------------------------------------------------------------------------- - def calcPoP(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, QPF, stopo, topo, gh_c, t_c, - rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLR = self.BLR - - topQPF = 0.40 # QPF value where raw PoP would be 75% - RHexcess = 60.0 # RH above this can add to PoP and below will subtract - adjAmount = 30.0 # amount of adjustment allowed - # - factor = tanh(QPF * (1.0 / topQPF)) - factor2 = tanh(QPF * (2.0 / topQPF)) - # - # - # - rhcube = BLR[1:5] - rhavg = add.reduce(rhcube) / 4.0 - rhmax = 100 - RHexcess - dpop = rhavg - RHexcess - dpop[less(dpop, 0.0)] = 0.0 - dpop = (dpop / rhmax) * (1.0 - factor2) * adjAmount - # - pop = (factor * 100.0) + dpop - pop = clip(pop, 0, 100) - # - return pop - - #-------------------------------------------------------------------------- - # Chance of Wetting Rain (0.1 inch). Same algorithm as PoP, but requires - # more model QPF to get same chances, and higher boundary layer RH - # to get the adjustment (and maximum adjustment is less). - # - # Adjustable parameters: - # topQPF should be higher than PoP topQPF - # Default set at 0.60. - # RHexcess should be higher than PoP RHexcess - # Default set to 80% - # adjAmount should be smaller than PoP adjAmount - # Default set to 10% - # - #-------------------------------------------------------------------------- - def calcCWR(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, PoP, QPF, stopo, topo, gh_c, - t_c, rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLR = self.BLR - # - topQPF = 0.60 # QPF value where raw PoP would be 75% - RHexcess = 70.0 # RH above this can add to PoP and below will subtract - adjAmount = 15.0 # amount of adjustment allowed - # - factor = tanh(QPF * (1.0 / topQPF)) - factor2 = tanh(QPF * (2.0 / topQPF)) - # - # - # - rhcube = BLR[1:5] - rhavg = add.reduce(rhcube) / 4.0 - rhmax = 100 - RHexcess - dpop = rhavg - RHexcess - dpop[less(dpop, 0.0)] = 0.0 - dpop = (dpop / rhmax) * (1.0 - factor2) * adjAmount - # - cwr = (factor * 100.0) + dpop - cwr = clip(cwr, 0, 100) - cwr=where(greater(cwr,PoP),PoP,cwr) - return cwr - - #---------------------------------------------------------------- - # Sky - Calculates cloud percentage in each layer based on - # RH in that layer. Then adds up the percentages in - # the layers. Model clouds seem too 'binary', and so - # they are not used. - # - # We guess that it takes higher RH near the surface (say - # 97%) to get a cloud, but less RH up high (say only 90% - # to get cirrus). Transition width is wider up high, than - # it is near the surface. - # - # Also weight high clouds less in the coverage than - # low clouds. - # - # No downscaling is attempted since an observer can usually - # see MANY gridpoints - and judges clouds based on all of - # them - not just whether there is a cloud in the small - # gridpoint directly overhead. Thus, cloud fields are - # rather smooth. - #---------------------------------------------------------------- - #def calcSky(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - # t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - # rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - # wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, - # wind_c, ctime): - - # self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - # t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - # rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - # wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - # wind_c, ctime) - # BLP=self.BLP - # BLR=self.BLR - # # - # # create a 'sigma' pressure field - # # - # pp = BLP / BLP[0] - # pp = clip(pp, 0.1, 1.0) - # # - # # remove surface level - so surface Fog does not count - # # - # pp=pp[1:] - # BLR=BLR[1:] - # # - # # get weight based on pressure - high levels get counted little - # # maxes out at 700mb, low levels count a little less - # # - # ftop=50 # max coverage at top - # maxlev=0.7 # sigma leve of max allowed coverage - # fmax=100 # max coverage at max level - # fbot=90 # max coverage at surface - # f100 = where(less(pp,maxlev), ((fmax-ftop)*(pp/maxlev))+ftop, - # fbot+(1.0-pp)*(fmax-fbot)/(1.0-maxlev)) - # # - # # ramp-up functions from RH to coverage based on pressure - # # - # midbot=90.0 - # midtop=80.0 - # mid=(pp*(midbot-midtop))+midtop - # widbot=10.0 - # widtop=20.0 - # wid=(pp*(widbot-widtop))+widtop - # c=(0.5*tanh(((BLR-mid)*2.0)/wid))+0.5 - # # - # # coverage for each level based on RH - # # - # f = minimum(f100 * c, 100.0)/100.0 - # # - # # When level 1 has 50% coverage, then 50% coverage - # # at level 2 covers 50% of the remaining clear sky, - # # (so now 75%) and 50% coverage at level 3 covers - # # 50% of the remaining clear sky (now 87%), etc. - # # -# if f.shape[0]: - # sky = f[0] -# else: -# sky = resize(f, f.shape[1:]) - # for i in xrange(1, f.shape[0]): - # sky = sky + f[i] - sky * f[i] - # # - # # Smooth it a little - # # - # pSFCmb=p_SFC/100.0 - # sky=where(less(pSFCmb,500),-9999.0,sky) - # sky=self.smoothpm(sky,2) - # sky=clip(sky*100.0,0.0,100.0) - # return sky - - ##-------------------------------------------------------------------------- - ## Calculates Sky condition (fractional cloud cover) from model RH at specific - ## pressure levels. Uses reduced equations from Walcek, MWR June 1994. - ## Adds up the amount of fractional clouds calculated at each layer based on - ## topography (i.e. no clouds below ground) then divides by a suggested number - ## of layers to produce an average sky. - ##----------------------------------------------------------------------------- - def calcSky(self, rh_c, gh_c, topo, p_SFC, rh_BL030, rh_BL3060, rh_BL6090, - rh_BL90120, rh_BL120150): - - tmpP_SFC = p_SFC.copy() - tmpP_SFC /= 100.0 # convert surfp to millibars - x = 560.0 # delta x (85km - 850km) - - # Define a percentage of f100 to use as a filter (0.0 - 1.0) - # Remember f100 is an exponential function, so changes will be more - # pronounced in the 0.5-1.0 range than the 0.0-0.5 range. - percent = 0.37 - - # Define a list of boundary layer levels to include - BL_levels = ['BL030', 'BL3060', 'BL6090', 'BL90120', 'BL120150'] - - # Construct a boundary layer pressure and RH cube - bl_Pcube = [] - bl_RHcube = [] - - # Place all BL RH levels into a cube - bl_RHcube += [rh_BL030] - bl_RHcube += [rh_BL3060] - bl_RHcube += [rh_BL6090] - bl_RHcube += [rh_BL90120] - bl_RHcube += [rh_BL120150] - bl_RHcube = array(bl_RHcube) - - - # Make a cube of boundary layer pressures - for lvl in BL_levels: - if lvl == 'BL030': - tmpP = tmpP_SFC - 15.0 - elif lvl == 'BL3060': - tmpP = tmpP_SFC - 45.0 - elif lvl == 'BL6090': - tmpP = tmpP_SFC - 75.0 - elif lvl == 'BL90120': - tmpP = tmpP_SFC - 105.0 - elif lvl == 'BL120150': - tmpP = tmpP_SFC - 135.0 - elif lvl == 'BL150180': - tmpP = tmpP_SFC - 165.0 - bl_Pcube += [tmpP] - bl_Pcube = array(bl_Pcube) - - - # Make a model level pressure cube - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - - - # Convert BL pressures to sigma levels - BL_sigma = bl_Pcube / tmpP_SFC - del bl_Pcube - BL_sigma = clip(BL_sigma, 0.1, 1.0) - - # Convert model level pressure cube to sigma surfaces - pp = pmb / tmpP_SFC - del tmpP_SFC - pp = clip(pp, 0.1, 1.0) - - - # Account for topography in the model cube, don't need to worry about - # this with the BL cube since those are guaranteed to be above ground - tmpRH_c = where(less(gh_c, topo), float32(0.0), rh_c) - - #======================================================================= - # Create new RH and sigma cubes - - newRH_c = [] - newSigma_c = [] - - # See which boundary layer levels have pressures > lowest "signficant" - # model level pressure - for bl_i in xrange(BL_sigma.shape[0]): - - # Make a mask to identify which points from the boundary - # layer level have greater pressure than lowest "significant" - # model level - BL_mask = greater(BL_sigma[bl_i], pp[0]) - - # See how many points we've found - count = sum(sum(BL_mask, 1)) - - # If there are no points - don't include this BL level - if count == 0: - continue - - # Compute a temporary RH grid where it is lower than the lowest - # "significant" model level data - tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) - - - # Compute a temporary sigma grid for this boundary layer level - # where it is lower than the lowest "significant" model level - tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) - - # Insert this level into the new RH and sigma cubes - newRH_c += [tmpRH] - newSigma_c += [tmpSigma] - - - # Add lowest "significant" model level to RH and sigma cubes - newRH_c += [tmpRH_c[0]] - newSigma_c += [pp[0]] - - - # Insert boundary layer RH into RH cube where appropriate - for lvl in xrange(1, len(self.levels())): - - # Look at each boundary layer level - for bl_i in xrange(BL_sigma.shape[0]): - - # Make a mask to identify which points from the boundary - # layer level fall between the surrounding "significant" - # model levels - BL_mask = logical_and(greater(BL_sigma[bl_i], pp[lvl]), - less(BL_sigma[bl_i], pp[lvl - 1])) - - # See how many points we've found - count = sum(sum(BL_mask, 1)) - - # If there are no points - don't include this BL level - if count == 0: - continue - - # Compute a temporary RH grid where it is between the two - # "significant" model level data - tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) - - - # Compute a temporary sigma grid for this boundary layer level - # where it is between the two "significant" model levels - tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) - - # Insert this level into the new RH and sigma cubes - newRH_c += [tmpRH] - newSigma_c += [tmpSigma] - - - # Add top of layer we just examined to RH and sigma cube - newRH_c += [tmpRH_c[lvl]] - newSigma_c += [pp[lvl]] - - del bl_RHcube - del BL_sigma - del tmpRH_c - - # Finish off the new cubes - newRH_c = array(newRH_c) - newSigma_c = array(newSigma_c) - - # Determine maximum possible sky fraction - fmax = 78.0 + x / 15.5 - - # Compute sky fraction for both pressure cubes - f100 = where(less(newSigma_c, 0.7), - fmax * (newSigma_c - 0.1) / 0.6, - 30.0 + (1.0 - newSigma_c) * (fmax - 30.0) / 0.3) - - # Compute RH depression at 37% f100 [ (1-RHe) in Walcek ] - c = 0.196 + (0.76 - x / 2834.0) * (1.0 - newSigma_c) - - del newSigma_c - - # Compute critical RH threshold to use as a filter - # Note (percent * f100)/f100 = percent - try: - rhCrit = log(percent) * c + 1.0 - except: - rhCrit = 0.0 - - # Ensure "critical RH" is valid - rhCrit = clip(rhCrit, 0.0, 1.0) - - # Compute sky fraction for the model cube - c = (newRH_c / 100.0 - 1.0) / c - c = exp(c) - f = minimum(f100 * c, 100.0) - - # Where RH is less than the critical value, set it to 0 contribution - f[less(newRH_c / 100.0, rhCrit)] = 0.0 - - del newRH_c - - # Compress cubes vertically - f = self.squishZ(f, (f.shape[0] / 5) - 1) # was 5 - - # Convert sky fractions to an actual percentage - if len(f) >= 5: - f[4] *= 0.25 - else: - LogStream.logEvent("WARNING: Sky data is missing some levels - calculation will be incomplete") - ind = len(f) - 1 - f[ind] *= 0.25 - - f /= 100.0 - - sky = f[0] - for i in xrange(1, f.shape[0]): - sky = sky + f[i] - sky * f[i] - - grid = sky * 100.0 - - return grid - #========================================================================= - # Wind - uses boundary layer wind 'sounding' to get the wind at the - # real elevation rather than the model elevation. When real topo - # is below model topo, just uses the lowest boundary layer wind field. - # - # This typically gives ridgetops a bit too much wind speed - so if speed - # is above the model surface wind speed - it only uses 1/2 of the - # difference. Direction is allowed to reflect the direction at the - # higher level. This gives the wind a 'topography' influenced look - - # with sharp mountains sticking up into 'stronger' wind speeds and - # different wind directions. - #---------------------------------------------------------------- - def calcWind(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, - wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLH = self.BLH - BLW = self.BLW - BLMAG = BLW[0] - BLDIR = BLW[1] - - smag = self.newGrid(-1) - sdir = self.newGrid(-1) - - m = less(topo,BLH[0]) - smag[m] = BLMAG[0][m] - sdir[m] = BLDIR[0][m] - - for i in range(1, BLH.shape[0]): - mval = self.linear(BLH[i], BLH[i - 1], BLMAG[i], BLMAG[i - 1], topo) - dval = self.dirlinear(BLH[i], BLH[i - 1], BLDIR[i], BLDIR[i - 1], topo) - # - # limit winds to be half as strong as wind in - # free atmosphere above the model surface would indicate - # - m = greater(mval,BLMAG[0]) - mval[m] = (BLMAG[0]+((mval-BLMAG[0])/2.0))[m] - - between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) - m = logical_and(less(smag,0.0),between) - smag[m] = mval[m] - sdir[m] = dval[m] - # - # Change to knots - # - smag *= 1.94 - smag[less(p_SFC/100.0, 500.0)] = 0.0 - sdir.clip(0, 359.5, sdir) - return(smag, sdir) - - #======================================================================== - # MixHgt - the height to which a parcel above a 'fire' would rise - # (in height) above ground level (in feet). - # - # Calculated by assuming a parcel above a fire is VERY hot - but the fire - # is very small - so that entrainment quickly makes it only a few degrees - # warmer than the environment. Ideally would want to consider moisture - # and entrainment - but this is a very simple first guess. - # - # This does NO downscaling - and even smooths the field a little at the - # end. We have no observations of this - other than at sounding - # locations - so we have no idea what the spatial patterns should look - # like. - #---------------------------------------------------------------- - def calcMixHgt(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, - wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLT = self.BLT - BLP = self.BLP - BLTheta = self.ptemp(BLT, BLP) - BLH = self.BLH - # - # Potential temp of fire 2 degrees warmer than surface parcel - # - fireHeat = 2.0 - pSFCmb = p_SFC / 100 - fireTheta = self.ptemp(t_FHAG2 + fireHeat, pSFCmb) - # - # find height the fireTheta crosses the sounding theta - # - mixhgt = self.newGrid(-1) - for i in range(1, BLH.shape[0]): - hcross = self.linear(BLTheta[i], BLTheta[i - 1], BLH[i], BLH[i - 1], fireTheta) - cross = logical_and(greater(BLTheta[i], fireTheta), less(mixhgt, 0.0)) - mixhgt[cross] = hcross[cross] - - m = less(mixhgt,0.0) - mixhgt[m] = BLH[-1][m] - # - # Change to height above the model topo (in feet) - # and smooth a little - # - mixhgt -= stopo - mixhgt *= 3.28 - mixhgt[less(pSFCmb, 500)] = -9999.0 - mixhgt = self.smoothpm(mixhgt, 2) - mixhgt.clip(0.0, 50000.0, mixhgt) - return mixhgt - - #=========================================================================== - # SnowAmt - simple snow ratio based on surface temperature - multiplied - # times the model QPF amount - #--------------------------------------------------------------------------- - def calcSnowAmt(self, T, QPF): - snowr = (T * -0.5) + 22.5 - snowr[less(T, 9.0)] = 20 - snowr[greater_equal(T, 30.0)] = 0 - snowamt = QPF * snowr - return snowamt - - #========================================================================== - # Many of the models have had a freezing level in the gh field. - #========================================================================== - def calcFzLevel(self, gh_FRZ): - return gh_FRZ * 3.28 - - #======================================================================== - # calcSnowLevel - takes sounding of the wetbulb temperature and finds the - # lowest elevation (above ground) where wetbulb crosses from - # above freezing to below freezing. When top wetbulb is above - # freezing - puts in height of top level. When surface - # wetbulb is below freezing - assumes a simple dry-adiabtic - # lapse rate below ground (which is ludicrous for a wetbulb - # lapse rate - but who really cares what the snow level is - # when it is below ground anyway?). - # - # This is almost always too noisy so we smooth it with a - # +/- 4 gridpoint average. Note that this means that there - # may be gridpoints where the surface wetbulb is below - # freezing - but the snow level ends up being above - # ground. If this bothers you - remove the smoothing. - #------------------------------------------------------------------------ - def calcSnowLevel(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, T, RH, stopo, topo, gh_c, t_c, - rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLH = self.BLH - # - # - # - if USE_WETBULB == 1: - TT = self.BLE - else: - TT = self.BLT - # - # get wetbulb temperatures above topography - # - (BLH, TT) = self.getTopoE(topo, stopo, p_SFC, T, RH, BLH, TT) - snowlvl = self.newGrid(-1) - # - # find the ones below ground - # - tk = TT[0] - below = less(tk, 273.15) - lapse = 9.8 / 1000.0 - tmsl = tk + (lapse * topo) - hbot = topo * 0.0 - hcross = self.linear(tk, tmsl, topo, hbot, 273.15) - hcross[less(hcross, 0.0)] = 0.0 - - snowlvl[below] = hcross[below] - # - # find the ones above the topo surface - # - tbot = tk - hbot = topo - for i in range(1, BLH.shape[0]): - hcross = self.linear(TT[i], TT[i - 1], BLH[i], BLH[i - 1], 273.15) - cross = logical_and(less_equal(TT[i], 273.15), greater(TT[i - 1], 273.15)) - add = logical_and(cross, less(snowlvl, -0.5)) - snowlvl[add] = hcross[add] - # - # when still above freezing at the top of the BL layer - just - # put in that height (best we can do without more data) - # - m = less(snowlvl,-0.5) - snowlvl[m] = BLH[-1][m] - # - # Change to feet and subtract 500 feet if not using the wetbulb method - # - snowlvl *= 3.28 - if USE_WETBULB != 1: - snowlvl -= 500.0 - # - # Take care of any missing data points - # - pSFCmb = p_SFC / 100.0 - snowlvl[less(pSFCmb, 500.0)] = -9999.0 - # - # Smooth a little to reduce noise - # - snowlvl = self.smoothpm(snowlvl, 4) - snowlvl.clip(0.0, 50000.0, snowlvl) - return snowlvl - - #========================================================================== - # TransWind - the average winds in the layer between the surface - # and the mixing height. - #-------------------------------------------------------------------------- - def calcTransWind(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, MixHgt, stopo, topo, gh_c, t_c, - rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - BLW = self.BLW - BLH = self.BLH - BLM = BLW[0] - BLD = BLW[1] - nmh = stopo + (MixHgt * 0.3048) # convert MixHt from feet -> meters - - pSFCmb = p_SFC / 100.0 - (utot, vtot) = self._getUV(BLM[0], BLD[0]) - numl = self.newGrid(1) - - for i in range(1, BLH.shape[0]): - use = less(BLH[i], nmh) - (u, v) = self._getUV(BLM[i], BLD[i]) - utot[use] += u[use] - vtot[use] += v[use] - numl[use] += 1 - - # - # calculate average - # - utot /= numl - vtot /= numl - # - # Smooth a little - # - utot[less(pSFCmb, 500.0)] = -9999.0 - vtot[less(pSFCmb, 500.0)] = -9999.0 - utot = self.smoothpm(utot, 1) - vtot = self.smoothpm(vtot, 1) - # - # convert utot, vtot to mag, dir - # - (tmag, tdir) = self._getMD(utot, vtot) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return(tmag, tdir) - - #-------------------------------------------------------------------------- - # LAL - Based mainly on lifted index. Adds more when RH at top of BL is - # high, but RH at bottom of BL is low. - #-------------------------------------------------------------------------- - def calcLAL(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, tp_SFC, bli_BL0180, stopo, topo, - gh_c, t_c, rh_c, wind_c, ctime): - - self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, - rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, - wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, - wind_c, ctime) - lal = self.newGrid(1) - BLR = self.BLR - # - # only thing we have is boundary layer lifted index - # set LAL to 2 if LI<0, 3 if LI<-3, 4 if LI<-5 - # - lal[less(bli_BL0180, 0)] += 1 - lal[less(bli_BL0180, -3)] += 1 - lal[less(bli_BL0180, -5)] += 1 - - # - # Add more when RH at top of BL is greater than - # than 70% and RH at bottom of BL is less than 30 - # - V = logical_and(greater(BLR[5], 70), less(BLR[0], 30)) - lal[V] += 1 - # - # Add even more where RH at top of BL is greater than - # 80% and RH at bottom of BL is less than 20% - # - V = logical_and(greater(BLR[5], 80), less(BLR[0], 20)) - lal[V] += 1 - lal[less(bli_BL0180, -18.0)] = 1 - return lal - - ##-------------------------------------------------------------------------- - ## Calculate the Haines index based on the temp and RH cubes - ## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". - ## Default is "HIGH". - ##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - #======================================================================= - # - # Calculate Haines Index - # type is "LOW", "MEDIUM", "HIGH" - # NOTE, the default haines index calcaulation is defined by: - # self.whichHainesIndex, which can be set to "LOW", "MEDIUM", "HIGH". - # Commented out below calc for DR14439 (A1 DR21354) - #======================================================================= - ## def hainesIndex(self, type, t_c, rh_c): - ## dict = {} - ## dict['LOW'] = {'t1Level': 950, 't2Level': 850, 'mLevel': 850, - ## 'stabThresh': [3, 8], 'moiThresh': [5, 10]} - ## dict['MEDIUM'] = {'t1Level': 850, 't2Level': 700, 'mLevel': 850, - ## 'stabThresh': [5, 11], 'moiThresh': [5, 13]} - ## dict['HIGH'] = {'t1Level': 700, 't2Level': 500, 'mLevel': 700, - ## 'stabThresh': [17, 22], 'moiThresh': [14, 21]} - ## dd = dict[type] # proper dictionary for the level - ## - ## # get the needed data, calc dewpoint - ## pres = self.pres - ## t1 = t_c[pres.index(dd['t1Level'])] # t1 level - ## t2 = t_c[pres.index(dd['t2Level'])] # t2 level - ## tMois = t_c[pres.index(dd['mLevel'])] - 273.15 # mLevel t , in C. - ## rhMois = rh_c[pres.index(dd['mLevel'])] / 100.0 # mLevel rh - ## rhMois = where(less_equal(rhMois, 0), 0.00001, rhMois) - ## - ## a = log10(rhMois) / 7.5 + (tMois / (tMois + 237.3)) - ## dpMois = (a * 237.3) / (1.0 - a) - ## - ## hainesT = t1 - t2 - ## hainesM = tMois - dpMois - ## - ## # now make the categories - ## slope = 1.0 / (dd['stabThresh'][1] - dd['stabThresh'][0]) - ## intercept = 1.5 - ((dd['stabThresh'][0] + 0.5) * slope) - ## hainesTi = (slope * hainesT) + intercept - ## hainesT = clip(hainesTi, 1.0, 3.0) - ## - ## slope = 1.0 / (dd['moiThresh'][1] - dd['moiThresh'][0]) - ## intercept = 1.5 - ((dd['moiThresh'][0] + 0.5) * slope) - ## hainesMi = (slope * hainesM) + intercept - ## hainesM = clip(hainesMi, 1.0, 3.0) - ## - ## return hainesT + hainesM - - #--------------------------------------------------------------------------- - # MaxT simply maximum of any T grids during the period - #-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - - #--------------------------------------------------------------------------- - # MinT simply minimum of any T grids during the period - #-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - - #------------------------------------------------------------------------- - # MaxRH is simply maximum of all RH grids during period - #-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - - #------------------------------------------------------------------------- - # MinRH is simply minimum of all RH grids during period - #-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - - #========================================================================== - # Calculate Wetbulb (K) based on temperature (C) and RH (%) - # (all algorithms straight out of GEMPAK - converted to numeric python) - # - def Wetbulb(self, tc, rh, pres): - dpc = self.RHDP(tc, rh) - thte = self.THTE(pres, tc, dpc) - wetbk = self.TMST(thte, pres, 0) - return wetbk - - #======================================================================= - # Calculate parcel temp (K) given thetae (K) pressure (mb) and guess - # temperature (K) (must be 3d cubes) - # - def TMST(self, thte, pres, tguess): - tg = full_like(thte, tguess) - teclip = clip(thte - 270.0, 0.0, 5000.0) - # - # if guess temp is 0 - make a more reasonable guess - # - m = less(tg,1) - tg[m] = ((thte-0.5*teclip**1.05)*(pres/1000.0)**0.2)[m] - - epsi = 0.01 - tgnu = tg - 273.15 - # - # Correct the temp up to 100 times. Typically this takes - # less than 5 iterations - # - for i in range(1, 100): - tgnup = tgnu + 1.0 - tenu = self.THTE(pres, tgnu, tgnu) - tenup = self.THTE(pres, tgnup, tgnup) - cor = (thte - tenu) / (tenup - tenu) - tgnu += cor - # - # get the maximum correction we made this time - # and if it is less than epsi - then we are close - # enough to stop. - # - acor = abs(cor) - mcor = maximum.reduce(maximum.reduce(maximum.reduce(acor))) - if (mcor < epsi): - #print "parcel temp in %d iterations"%i - break - tgnu += 273.15 - return tgnu - - #======================================================================= - # Calculate Dewpoint (C) based on Temperature (C) and RH (%) - # - def RHDP(self, tc, rh): - log1 = log(6.112) - vaps = self.VAPR(tc) - lvapr = log(rh * vaps / 100.0 + 0.0001) - dpc = ((243.5 * (log1 - lvapr)) / (lvapr - log1 - 17.67)) - return dpc - - #======================================================================= - # Calculate Theta-E given Pressure (mb) Temperature (C) and Dewpoint (C) - # - def THTE(self, pres, tc, dpc): - rmix = self.MIXR(dpc, pres) - tk = tc + 273.15 - e = (2.0 / 7.0) * (1.0 - (0.00028 * rmix)) - thtam = tk * (1000.0 / pres) ** e - tlcl = self.TLCL(tc, dpc) - e = ((3.376 / tlcl) - 0.00254) * (rmix * (1.0 + 0.00081 * rmix)) - return (thtam * exp(e)) - - #======================================================================= - # Calculate temperature at LCL (K) given Temperature (C) and Dewpoint (C) - # - def TLCL(self, tc, dpc): - tk = tc + 273.15 - dk = dpc + 273.15 - return((1.0 / (1.0 / (dk - 56.0) + log(tk / dk) / 800.0)) + 56.0) - - #======================================================================= - # Calculate Mixing Ratio (g/kg) given Dewpoint (C) and pressure (mb) - # - def MIXR(self, dpc, pres): - vapr = self.VAPR(dpc) - corr = (1.001 + ((pres - 100.) / 900.) * 0.0034) - e = corr * vapr - mixr = 0.62197 * (e / (pres - e)) * 1000.0 - return mixr - - #======================================================================= - # Calculate Vapor Pressure (mb) from Dewpoint (C) - # or Saturation Vapor Pressure (mb) from Temperature (C) - # - def VAPR(self, tc): - vapr = 6.112 * (exp((17.67 * tc) / (tc + 243.5))) - return vapr - - #========================================================================== - # Get boundary layer cube - cube of values above model surface - # adds in pressure level data above the boundary layer fields - # creates: - # BLT - temperatures (K) - # BLR - relative humidity (% 0-100) - # BLH - height (m) - # BLP - pressure (mb) - # BLW - wind (magnitude kts, direction) - # BLD - dewpoint (K) - # BLE - wetbulb (K) [if desired] - # - def setupBLCube(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, - rh_BL90120, rh_BL120150, wind_FHAG10, wind_BL030, - wind_BL3060, wind_BL6090, wind_BL90120, wind_BL120150, - p_SFC, stopo, gh_c, t_c, rh_c, wind_c, ctime): - # - # check to see if already set up for this time - # - if self.BLcubeTime == ctime: - return - # - # split pressure level wind cube into magnitude and direction - # - mag_c = wind_c[0] - dir_c = wind_c[1] - dew_c = self.RHDP(t_c - 273.15, rh_c) + 273.15 - # - tbl = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] - rbl = [rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, rh_BL120150] - wbl = [wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, wind_BL90120, - wind_BL120150] - pdiff = [0, 30, 60, 90, 120, 150] - - pSFCmb = p_SFC / 100.0 - pSFCmb[less(pSFCmb, 500.0)] = 1013.0 - - p_list = [pSFCmb] - hbot = stopo - h_list = [hbot] - t_list = [t_FHAG2] - r_list = [clip(rh_FHAG2, 0.0001, 99.999)] - m_list = [wind_FHAG10[0]] - d_list = [wind_FHAG10[1]] - w_list = [self.RHDP(t_FHAG2 - 273.15, r_list[0]) + 273.15] - - for i in range(1, len(tbl)): - tavg = tbl[i] - tavgc = tavg - 273.15 - ravg = clip(rbl[i], 0.0001, 99.999) - davgc = self.RHDP(tavgc, ravg) - ptop = clip(pSFCmb - pdiff[i], 1.0, 1050.0) - pbot = clip(pSFCmb - pdiff[i - 1], 1.0, 1050.0) - htop = self.MHGT(tavgc, davgc, ptop, pbot, hbot) - - t_list.append(tavg) - h_list.append((hbot + htop) / 2.0) - wind = wbl[i] - m_list.append(wind[0]) - d_list.append(wind[1]) - p_list.append((pbot + ptop) / 2.0) - r_list.append(ravg) - w_list.append(davgc + 273.15) - - hbot = htop - # - # above the boundary layer...add levels in pressure - # cube - # - numplevs = gh_c.shape[0] - levstoadd = zeros_like(stopo) - for i in range(numplevs): - levstoadd[greater(gh_c[i], hbot)] += 1 - - maxtoadd = maximum.reduce(maximum.reduce(levstoadd)) - for j in range(int(maxtoadd)): - found = zeros_like(stopo) - hlev = zeros_like(stopo) - tlev = zeros_like(stopo) - mlev = zeros_like(stopo) - dlev = zeros_like(stopo) - plev = zeros_like(stopo) - rlev = zeros_like(stopo) - wlev = zeros_like(stopo) - for i in range(numplevs): - usethislev = logical_and(less(found, 0.5), greater(gh_c[i], hbot)) - hlev[usethislev] = gh_c[i][usethislev] - plev[usethislev] = self.pres[i] - tlev[usethislev] = t_c[i][usethislev] - mlev[usethislev] = mag_c[i][usethislev] - dlev[usethislev] = dir_c[i][usethislev] - rlev[usethislev] = rh_c[i][usethislev] - wlev[usethislev] = dew_c[i][usethislev] - found[usethislev] = 1.0 - - numNotFound = count_nonzero(less(found, 0.5)) - if numNotFound < 1: - break - if numNotFound > 0: - notFoundMask = less(found, 0.5) - hlev[notFoundMask] = gh_c[numplevs-1][notFoundMask] - plev[notFoundMask] = self.pres[numplevs-1] - tlev[notFoundMask] = t_c[numplevs-1][notFoundMask] - mlev[notFoundMask] = mag_c[numplevs-1][notFoundMask] - dlev[notFoundMask] = dir_c[numplevs-1][notFoundMask] - rlev[notFoundMask] = rh_c[numplevs-1][notFoundMask] - wlev[notFoundMask] = dew_c[numplevs-1][notFoundMask] - - h_list.append(hlev) - t_list.append(tlev) - p_list.append(plev) - m_list.append(mlev) - d_list.append(dlev) - r_list.append(rlev) - w_list.append(wlev) - hbot = hlev - - self.BLH = array(h_list) - self.BLP = array(p_list) - self.BLT = array(t_list) - self.BLR = array(r_list) - #mags=array(m_list) - #dirs=array(d_list) - #self.BLW=(mags,dirs) - self.BLW = (m_list, d_list) - self.BLD = array(w_list) - if USE_WETBULB == 1: - self.BLE = self.Wetbulb(self.BLT - 273.15, self.BLR, self.BLP) - self.BLcubeTime = ctime - return - - #--------------------------------------------------------------------------- - # Calculate the hydrostatic height (m) at the top of the layer, given an - # average temp (C) and average dewpoint (C) in the layer, the pressure (mb) - # at the top and bottom of the layer, and the height (m) at the bottom - # of the layer. Intended to be used in an integration of hydrostatic - # heights given a starting surface height and temp/dewpoint values in - # pressure levels above - # - def MHGT(self, tmpc, dwpc, ptop, pbot, hbot): - pavg = (ptop + pbot) / 2.0 - scale = self.SCLH(tmpc, dwpc, pavg) - mhgt = hbot + (scale * log(pbot / ptop)) - return mhgt - - #--------------------------------------------------------------------------- - # Calculate Virtual temperature (C) given temp(C), dewpoint (C) - # and pressure(mb) - # - def TVRT(self, tmpc, dwpc, pres): - mixrscale = self.MIXR(dwpc, pres) * 0.001 - tmpk = tmpc + 273.15 - tvrk = tmpk * (1.0 + (mixrscale / 0.62197)) / (1.0 + mixrscale) - tvrt = tvrk - 273.15 - return tvrt - - #--------------------------------------------------------------------------- - # Calculate Scale Height (m) given temp(C), dewpoint(C) and pressure(mb) - # - def SCLH(self, tmpc, dwpc, pres): - rdgas = 287.04 - gravty = 9.80616 - sclh = (rdgas / gravty) * (self.TVRT(tmpc, dwpc, pres) + 273.15) - return sclh - - #-------------------------------------------------------------------------- - # calculate area above/below freezing in J/kg (m2/s2) - # - def getArea(self, hbot, tbot, htop, ttop): - tavg = (ttop + tbot) / 2.0 - e1 = (ttop - 273.15) / 273.15 - e2 = (tbot - 273.15) / 273.15 - area = 9.8 * ((e1 + e2) / 2.0) * (htop - hbot) - return area - - #-------------------------------------------------------------------------- - # calculate areas above/below freezing, and include a flag if it crosses - # in this layer - # - def getAreas(self, hbot, tbot, htop, ttop): - maxm = maximum(tbot, ttop) - minm = minimum(tbot, ttop) - freeze = self.newGrid(273.15) - crosses = logical_and(less(minm, freeze), greater(maxm, freeze)) - crossh = self.linear(tbot, ttop, hbot, htop, freeze) - crosst = freeze - m = logical_not(crosses) - crossh[m] = htop[m] - crosst[m] = ttop[m] - - a1 = self.getArea(hbot, tbot, crossh, crosst) - a2 = self.getArea(crossh, crosst, htop, ttop) - return a1, a2, crosses - - #======================================================================== - # Get a cube of wetbulb temperatures above the real topo - not above the - # model topo. Returns the wetbulb temps and heights - # - def getTopoE(self, topo, stopo, p_SFC, T, RH, BLH, BLE): - - pSFCmb = p_SFC / 100.0 - pSFCmb[less(pSFCmb, 500.0)] = 1013.0 - - tmpc = self.FtoK(T) - 273.15 - hlist = [topo] - if USE_WETBULB == 1: - dwpc = self.RHDP(tmpc, RH) - scale = self.SCLH(tmpc, dwpc, pSFCmb) - ptopo = pSFCmb * exp((stopo - topo) / scale) - ptopo[less(ptopo, 500.0)] = 1013.0 - at = array([tmpc]) - ar = array([RH]) - ap = array([ptopo]) - te = self.Wetbulb(at, ar, ap) - te_SFC = te[0] - tlist = [te_SFC] - else: - tlist = [tmpc + 273.15] - - - numplevs = BLH.shape[0] - levstoadd = zeros_like(topo) - for i in range(numplevs): - levstoadd[greater(BLH[i],topo)] += 1 - maxtoadd = maximum.reduce(maximum.reduce(levstoadd)) - - hbot = topo - for j in range(int(maxtoadd)): - tlev = zeros_like(topo) - hlev = full_like(topo, -5000) - use = zeros_like(topo) - for i in range(BLH.shape[0]): - thislev = logical_and(less(use, 0.5), greater(BLH[i], hbot)) - tlev[thislev] = BLE[i][thislev] - hlev[thislev] = BLH[i][thislev] - use[thislev] = 1.0 - - tlev[less(tlev,0.5)] = BLE[-1][less(tlev,0.5)] - hlev[less(hlev,-2500)] = BLH[-1][less(hlev,-2500)] - - tlist.append(tlev) - hlist.append(hlev) - hbot = hlev - newH = array(hlist) - newE = array(tlist) - - return(newH, newE) - - #=============================================================== - # smooths array by averaging over +/- k gridpoints in each - # direction. At the edges, only averages over the points that - # fit within this "averaging area". If k is zero or negative - # it just returns the original array - # - def smoothpm(self, array, k): - if k > 0: - a = zeros_like(array) - n = zeros_like(array) - for x in range(-k, k + 1): - for y in range(-k, k + 1): - array1 = self.offset(array, x, y) - ok = greater(array1, -9000) - a[ok] += array1[ok] - n[ok] += 1 - - m = less(n,1) - a[m] = array[m] - n[m] = 1 - a /= n - arraysmooth = a - else: - arraysmooth = array - return arraysmooth - - #======================================================================= - # Gets a copy of array that is shifted x,y gridpoints. The edge - # points that are unknown are set to -9999.0. Used in smoothing - # - def offset(self, a, x, y): - sy1, sy2 = self.getindicies(y, a.shape[0]) - sx1, sx2 = self.getindicies(x, a.shape[1]) - b = full_like(a, -9999.0) - b[sy1, sx1] = a[sy2, sx2] - return b - - #============================================================== - # getindicies - used in slicing array - # - def getindicies(self, o, l): - if o > 0: - a = slice(o, l); b = slice(0, l - o) - elif o < 0: - a = slice(0, l + o); b = slice(-o, l) - else: - a = slice(0, l); b = slice(0, l) - return a, b - - #========================================================================== - # A linear interpolation that can be used for directions, where the - # values should never get higher than 360 degrees. We want - # interpolations that cross this 360 degree barrier to "go the - # right way" rather than flip back in the opposite direction - # - def dirlinear(self, xmax, xmin, ymax, ymin, we): - ydif = abs(ymax - ymin) - rotate = greater(ydif, 180.0) - upper = greater(ymin, 180.0) - lower = less(ymin, 180.0) - ymax[logical_and(rotate,upper)] += 360.0 - ymax[logical_and(rotate,lower)] -= 360.0 - slope = (ymax - ymin) / (xmax - xmin + .0000001) - intercept = ymin - slope * xmin - value = slope * we + intercept - value[greater(value, 360)] -= 360 - value[less(value, 0.0)] += 360 - return value - -def main(): - NAM12Forecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +# +# NAM12 SmartInit +# +# Comments below for each algorithm. +# +# Author: Tim Barker - SOO Boise, ID +# +#============================================================================= +# +# C O N F I G U R A T I O N S E C T I O N +# +#============================================================================= +# +# USE_WETBULB=1 (Yes) or 0 (No). Using wetbulb for calculating snow level +# and precipitation type is probably more accurate - since it would be the +# temperature that the atmosphere would be if you evaporated water into it +# until saturation. Thus...when the model is dry at low layers and you think +# it might precip...then the temperature would likely be much cooler than what +# the model says. However...the wetbulb calculation is VERY slow and you can +# save yourself a lot of time by not doing it. You could argue that if the +# model isn't making precip - then you shouldn't be changing its temps, but +# it really seesm to work well in areas of terrain and 'showery' precip - +# where the model sounding is representative of the large-scale, but inside +# the showers it is cooler. +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 02/16/12 14439 jdynina modified Haines calculation +# 03/04/13 15585 jzeng Modified wxtype range in calcWx() +# +## +USE_WETBULB = 1 +# +# +#============================================================================ +# +# E N D C O N F I G U R A T I O N S E C T I O N +# +#============================================================================ +from Init import * +class NAM12Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "NAM12", "NAM12") + self.BLcubeTime = (None, None) + + def levels(self): + return ["MB1000", "MB975", "MB950", "MB925", + "MB900", "MB875", "MB850", "MB825", + "MB800", "MB775", "MB750", "MB725", + "MB700", "MB675", "MB650", "MB625", + "MB600", "MB575", "MB550", "MB525", + "MB500", "MB450", "MB400", "MB350"] + + #--------------------------------------------------------------------------- + # T - use model sounding to get temperature at real topography instead of + # model topography + # + # Where the topo is above the model topo - use the boundary + # layer temperature to interpolate a temperature...but in radiational + # inversions this is typically too warm because the free air + # temperature from the model is warmer than air near the ground on + # a mountain that sticks up higher than the model mountains. So... + # if there is an inversion (i.e. the boundary layer temp at the + # desired height is warmer than the model surface temp) it only goes + # 1/2 as warm as the raw inversion in the free model atmosphere would + # be. Not sure if this is good for strong and persistent inversions + # like marine inversions - but works well for persistent radiational + # inversions in the intermountain west during the winter - and works + # well for nocturnal inversions all times of the year. + # Where the topo is below the model topo - it uses the lapse rate between + # the two lowest boundary layer levels and extrapolates this downward - + # with the restriction that the lapse rate cannot be more than dry + # adiabatic and inversions are extrapolated at only 1/2 that lapse rate + # and also limited to no more than 1.5C decrease per km. The 1.5C per km + # restriction is arbirary - further research may restrict it more or + # less. The dry adiabatic restriction seems to work fine. + #-------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150, + rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, rh_BL120150, + wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, wind_BL90120, + wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + + BLT = self.BLT + #self.printval("temp:",self.BLT,65,65) + BLH = self.BLH + + st = self.newGrid(-1) + for i in range(1, BLH.shape[0]): + tval = self.linear(BLH[i], BLH[i - 1], BLT[i], BLT[i - 1], topo) + # + # restrict the increase in areas where inversions present + # + m = greater(tval,BLT[0]) + tval[m] = (BLT[0]+((tval-BLT[0])/2.0))[m] + + between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) + m = logical_and(less(st,0.0),between) + st[m] = tval[m] + # + # restrict the lapse rates below the model surface + # + lapse = (BLT[1] - BLT[0]) / (BLH[1] - BLH[0]) + lapse[greater(lapse, 0.0)] /= 2.0 + maxinvert = 1.5 / 1000.0 + lapse[greater(lapse, maxinvert)] = maxinvert + drylapse = -9.8 / 1000.0 + lapse[less(lapse, drylapse)] = drylapse + tst = BLT[0] + ((topo - stopo) * lapse) + + m = less(st,0.0) + st[m] = tst[m] + # + #diff=t_FHAG2-st + #maxdiff=maximum.reduce(maximum.reduce(diff)) + #mindiff=minimum.reduce(minimum.reduce(diff)) + #print "max/min temp change: %6.2f %6.2f"%(maxdiff,mindiff) + # + # change to Fahrenheit + # + return self.KtoF(st) + + #-------------------------------------------------------------------------- + # Td - where topo is above the model topo - it interpolates the dewpoint + # from the model sounding. This allows mountains sticking up into dry + # dry air during nighttime inversions to reflect the dry air aloft. + # Where the topo is below the model topo - it uses the model surface + # mixing ratio, and assumes that is constant to the real topo - and + # uses the temperature at the real topo calculated in calcT + #--------------------------------------------------------------------------- + def calcTd(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, T, stopo, topo, gh_c, t_c, + rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLD = self.BLD + BLH = self.BLH + # + # for real topo above model topo - interpolate dewpoint from the + # model dewpoint sounding + # + sd = self.newGrid(-1) + for i in range(1, BLH.shape[0]): + dval = self.linear(BLH[i], BLH[i - 1], BLD[i], BLD[i - 1], topo) + between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) + + m = logical_and(less(sd,0.0),between) + sd[m] = dval[m] + + # + # for real topo below model topo - use model surface mixing ratio + # and use that mixing ratio with the surface temperature which + # was derived from the low-level lapse rate. + # + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + sd=where(less(sd,0.0),td,sd) + # + # change to Fahrenheit and make sure it is less than temp + # + td = self.KtoF(sd) + td=where(greater(td,T),T,td) + + return td + + #------------------------------------------------------------------------- + # RH - simply calculate RH based on Temp and Dewpoint (both in degrees F) + #------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + return RH + + def dewFromTandRH(self, T, RH): + tc = (T - 32.0) * (5.0 / 9.0) + rh = clip(RH, 0.001, 99.999) / 100.0 + x = (log(rh) / 17.67) + (tc / (tc + 243.5)) + tdc = (243.5 * x) / (1.0 - x) + td = (tdc * 9.0 / 5.0) + 32.0 + return td + + #-------------------------------------------------------------------------- + # Wx - uses a derivation of the Bourgouin algorithm to determin + # precip type. + # + # Uses a sounding of wetbulb temperature (the temperature that it would + # be in the model sounding if precip were falling) and finds areas above + # and below freezing and figures precip phase based on empirical values. + # + # Makes a simple guess at where it will be showers based on the model LI + # - making it showers when LI is less than 2. + # + # Makes a simple guess at where there will be thunderstorms based on the + # model LI less than -1 (SChc), <-3 (Chc), <-5 (Lkly), <-8 (Def). + # + # After determining precip type, it matches the probability part of the Wx + # grid to the existing PoP grid, removing Wx where the PoP grid is below + # 15%. Would love to only calculate the Wx Type here - and not have the + # PoP involved - but this is not the way most people understand it. + #-------------------------------------------------------------------------- + def calcWx(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, PoP, T, RH, bli_BL0180, stopo, + topo, gh_c, t_c, rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLH = self.BLH + # + # use temp or wetbulb + # + if USE_WETBULB == 1: + TT = self.BLE + else: + TT = self.BLT + # + # get temperatures (or wetbulb) at levels above the real topo + # not model topo + # + (BLH, TT) = self.getTopoE(topo, stopo, p_SFC, T, RH, BLH, TT) + # + # calculate number of zero crossings, and areas above/below + # freezing of the wetbulb sounding + # + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + for i in range(1, BLH.shape[0]): + a11, a22, cross = self.getAreas(BLH[i - 1], TT[i - 1], BLH[i], TT[i]) + topomask = greater(BLH[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + # + # The basic types we are choosing between + # + wxtypes = ['::::', + "Def:S:-::", + "Def:R:-::", + "Def:S:-::^Def:R:-::", + 'Def:ZR:-::', + 'Def:IP:-::', + 'Def:ZR:-::^Def:IP:-::', + "Def:SW:-::", + "Def:RW:-::", + "Def:SW:-::^Def:RW:-::", + "Def:ZR:-::", + 'Def:IP:-::', + 'Def:ZR:-::^Def:IP:-::'] + + wx = self.empty(int8) + # + # Case d - no zero crossings. All snow or all rain + # + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # + # Case c - one crossing. Snow if little area above freezing. + # Rain if lots of area above freezing. + # Mix if between + # + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # + # Case a - two crossings. Either freezing rain or ice pellets + # ice pellets when surface cold area is big + # freezing rain when surface cold area is small + # mix when between + # + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + # + # Case b - three crossings. If not much in the top warm layer + # then it acts like case c. + # If enough to melt in that layer - then + # see if cold layer is enough to re-freeze + # and be ice pellets - or just remain rain. + # + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + # + # Where LI<2, make showers + # + bli_BL0180 = where(less(bli_BL0180, -18.0), 10.0, bli_BL0180) + convecMask = less(bli_BL0180, 2) + wx[convecMask] += 6 + # + # off the grid need no weather + # + wxgrid = self.empty(int8) + keys = ['::::', ] + wxgrid[less(bli_BL0180, -18.0)] = 0 + # + # Match PoP, and remove non-occurring wx + # + poplimits = [15.0, 25.0, 55.0, 75.0, 101.0] + popprobs = ["None", "SChc", "Chc", "Lkly", "Def"] + for popcat in range(5): + if popcat > 0: + lowlimit = poplimits[popcat - 1] + else: + lowlimit = -1 + ispopcat = logical_and(greater(PoP, lowlimit), + less(PoP, poplimits[popcat])) + # + # If this pop category doesn't exist anywhere - then + # we don't have to worry about it. + # + some = logical_or.reduce(logical_or.reduce(ispopcat)) + if not some: + continue + # + # the no-pop case is easy - make it no weather + # + if popcat == 0: + wxgrid[ispopcat] = 0 + continue + # + # for all others...see if any weather combinations exist + # and add those + # + prob = popprobs[popcat] + for iwx in range(1, 13): + wxstring = wxtypes[iwx] + ispopwx = logical_and(ispopcat, equal(wx, iwx)) + some = any(ispopwx) + if not some: + continue + types = [] + types = string.split(wxstring, "^") + for i in range(len(types)): + type = types[i] + pieces = string.split(type, ":") + pieces[0] = prob + types[i] = string.join(pieces, ":") + wxstring = string.join(types, "^") + keys.append(wxstring) + keynum = len(keys) - 1 + wxgrid[ispopwx] = keynum + # + # thunder is totally separate from PoP, only related to + # the instability. SChc for LI <-1, Chc for LI<-3, + # Lkly for LI<-5, Def for LI<-8 + # + thunder = less_equal(bli_BL0180, -1).astype(int8) + thunder[less_equal(bli_BL0180, -3)] = 2 + thunder[less_equal(bli_BL0180, -5)] = 3 + thunder[less_equal(bli_BL0180, -8)] = 4 + + tprobs = ["None", "SChc", "Chc", "Lkly", "Def"] + for ith in range(1, 5): + tprob = equal(thunder, ith) + some = any(tprob) + if not some: + continue + needadd = where(tprob, wxgrid, int8(0)) + numkeys = len(keys) + for i in range(1, numkeys): + add = equal(needadd, i) + some = any(add) + if not some: + continue + wxstring = keys[i] + addstring = wxstring + "^" + tprobs[ith] + ":T:::" +# print "added thunder:",addstring + keys.append(addstring) + keynum = len(keys) - 1 + wxgrid[add] = keynum + return(wxgrid, keys) + + #-------------------------------------------------------------------------- + # QPF - simply take model QPF and change units to inches + #---------------------------------------- ---------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + #-------------------------------------------------------------------------- + # PoP - based strongly on QPF (since when model has one inch of precip the + # chance of getting 0.01 is pretty high). However, there is a big + # difference between a place that model has 0.00 precip and is very + # close to precipitating - and those where model has 0.00 and is a + # thousand miles from the nearest cloud. Thus, uses the average + # boundary layer RH to make an adjustment on the low end - adding + # to PoP where RH is high. Ignores surface RH to try to ignore fog + # cases. Would also like to consider omega. + # + # Uses hyperbolic tangent of QPF, so that it rises quickly as model + # QPF increases - but tapers out to nearly 100% as QPF gets high. + # Also uses hyperbolic tangent of QPF to reduce the impact of high RH + # as QPF gets higher (since avg RH will always be high when QPF is high) + # + # Adjustable parameters: + # topQPF is QPF amount that would give 75% PoP if nothing else + # considered at half this amount, PoP is 45%, at double this + # amount PoP is 96%. Default set at 0.40. + # RHexcess is amount of average BL RH above which PoP is adjusted + # upward. Default set to 60% + # adjAmount is maximum amount of adjustment if BL RH is + # totally saturated. Default set to 30% + # + #-------------------------------------------------------------------------- + def calcPoP(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, QPF, stopo, topo, gh_c, t_c, + rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLR = self.BLR + + topQPF = 0.40 # QPF value where raw PoP would be 75% + RHexcess = 60.0 # RH above this can add to PoP and below will subtract + adjAmount = 30.0 # amount of adjustment allowed + # + factor = tanh(QPF * (1.0 / topQPF)) + factor2 = tanh(QPF * (2.0 / topQPF)) + # + # + # + rhcube = BLR[1:5] + rhavg = add.reduce(rhcube) / 4.0 + rhmax = 100 - RHexcess + dpop = rhavg - RHexcess + dpop[less(dpop, 0.0)] = 0.0 + dpop = (dpop / rhmax) * (1.0 - factor2) * adjAmount + # + pop = (factor * 100.0) + dpop + pop = clip(pop, 0, 100) + # + return pop + + #-------------------------------------------------------------------------- + # Chance of Wetting Rain (0.1 inch). Same algorithm as PoP, but requires + # more model QPF to get same chances, and higher boundary layer RH + # to get the adjustment (and maximum adjustment is less). + # + # Adjustable parameters: + # topQPF should be higher than PoP topQPF + # Default set at 0.60. + # RHexcess should be higher than PoP RHexcess + # Default set to 80% + # adjAmount should be smaller than PoP adjAmount + # Default set to 10% + # + #-------------------------------------------------------------------------- + def calcCWR(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, PoP, QPF, stopo, topo, gh_c, + t_c, rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLR = self.BLR + # + topQPF = 0.60 # QPF value where raw PoP would be 75% + RHexcess = 70.0 # RH above this can add to PoP and below will subtract + adjAmount = 15.0 # amount of adjustment allowed + # + factor = tanh(QPF * (1.0 / topQPF)) + factor2 = tanh(QPF * (2.0 / topQPF)) + # + # + # + rhcube = BLR[1:5] + rhavg = add.reduce(rhcube) / 4.0 + rhmax = 100 - RHexcess + dpop = rhavg - RHexcess + dpop[less(dpop, 0.0)] = 0.0 + dpop = (dpop / rhmax) * (1.0 - factor2) * adjAmount + # + cwr = (factor * 100.0) + dpop + cwr = clip(cwr, 0, 100) + cwr=where(greater(cwr,PoP),PoP,cwr) + return cwr + + #---------------------------------------------------------------- + # Sky - Calculates cloud percentage in each layer based on + # RH in that layer. Then adds up the percentages in + # the layers. Model clouds seem too 'binary', and so + # they are not used. + # + # We guess that it takes higher RH near the surface (say + # 97%) to get a cloud, but less RH up high (say only 90% + # to get cirrus). Transition width is wider up high, than + # it is near the surface. + # + # Also weight high clouds less in the coverage than + # low clouds. + # + # No downscaling is attempted since an observer can usually + # see MANY gridpoints - and judges clouds based on all of + # them - not just whether there is a cloud in the small + # gridpoint directly overhead. Thus, cloud fields are + # rather smooth. + #---------------------------------------------------------------- + #def calcSky(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + # t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + # rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + # wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, + # wind_c, ctime): + + # self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + # t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + # rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + # wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + # wind_c, ctime) + # BLP=self.BLP + # BLR=self.BLR + # # + # # create a 'sigma' pressure field + # # + # pp = BLP / BLP[0] + # pp = clip(pp, 0.1, 1.0) + # # + # # remove surface level - so surface Fog does not count + # # + # pp=pp[1:] + # BLR=BLR[1:] + # # + # # get weight based on pressure - high levels get counted little + # # maxes out at 700mb, low levels count a little less + # # + # ftop=50 # max coverage at top + # maxlev=0.7 # sigma leve of max allowed coverage + # fmax=100 # max coverage at max level + # fbot=90 # max coverage at surface + # f100 = where(less(pp,maxlev), ((fmax-ftop)*(pp/maxlev))+ftop, + # fbot+(1.0-pp)*(fmax-fbot)/(1.0-maxlev)) + # # + # # ramp-up functions from RH to coverage based on pressure + # # + # midbot=90.0 + # midtop=80.0 + # mid=(pp*(midbot-midtop))+midtop + # widbot=10.0 + # widtop=20.0 + # wid=(pp*(widbot-widtop))+widtop + # c=(0.5*tanh(((BLR-mid)*2.0)/wid))+0.5 + # # + # # coverage for each level based on RH + # # + # f = minimum(f100 * c, 100.0)/100.0 + # # + # # When level 1 has 50% coverage, then 50% coverage + # # at level 2 covers 50% of the remaining clear sky, + # # (so now 75%) and 50% coverage at level 3 covers + # # 50% of the remaining clear sky (now 87%), etc. + # # +# if f.shape[0]: + # sky = f[0] +# else: +# sky = resize(f, f.shape[1:]) + # for i in xrange(1, f.shape[0]): + # sky = sky + f[i] - sky * f[i] + # # + # # Smooth it a little + # # + # pSFCmb=p_SFC/100.0 + # sky=where(less(pSFCmb,500),-9999.0,sky) + # sky=self.smoothpm(sky,2) + # sky=clip(sky*100.0,0.0,100.0) + # return sky + + ##-------------------------------------------------------------------------- + ## Calculates Sky condition (fractional cloud cover) from model RH at specific + ## pressure levels. Uses reduced equations from Walcek, MWR June 1994. + ## Adds up the amount of fractional clouds calculated at each layer based on + ## topography (i.e. no clouds below ground) then divides by a suggested number + ## of layers to produce an average sky. + ##----------------------------------------------------------------------------- + def calcSky(self, rh_c, gh_c, topo, p_SFC, rh_BL030, rh_BL3060, rh_BL6090, + rh_BL90120, rh_BL120150): + + tmpP_SFC = p_SFC.copy() + tmpP_SFC /= 100.0 # convert surfp to millibars + x = 560.0 # delta x (85km - 850km) + + # Define a percentage of f100 to use as a filter (0.0 - 1.0) + # Remember f100 is an exponential function, so changes will be more + # pronounced in the 0.5-1.0 range than the 0.0-0.5 range. + percent = 0.37 + + # Define a list of boundary layer levels to include + BL_levels = ['BL030', 'BL3060', 'BL6090', 'BL90120', 'BL120150'] + + # Construct a boundary layer pressure and RH cube + bl_Pcube = [] + bl_RHcube = [] + + # Place all BL RH levels into a cube + bl_RHcube += [rh_BL030] + bl_RHcube += [rh_BL3060] + bl_RHcube += [rh_BL6090] + bl_RHcube += [rh_BL90120] + bl_RHcube += [rh_BL120150] + bl_RHcube = array(bl_RHcube) + + + # Make a cube of boundary layer pressures + for lvl in BL_levels: + if lvl == 'BL030': + tmpP = tmpP_SFC - 15.0 + elif lvl == 'BL3060': + tmpP = tmpP_SFC - 45.0 + elif lvl == 'BL6090': + tmpP = tmpP_SFC - 75.0 + elif lvl == 'BL90120': + tmpP = tmpP_SFC - 105.0 + elif lvl == 'BL120150': + tmpP = tmpP_SFC - 135.0 + elif lvl == 'BL150180': + tmpP = tmpP_SFC - 165.0 + bl_Pcube += [tmpP] + bl_Pcube = array(bl_Pcube) + + + # Make a model level pressure cube + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + + + # Convert BL pressures to sigma levels + BL_sigma = bl_Pcube / tmpP_SFC + del bl_Pcube + BL_sigma = clip(BL_sigma, 0.1, 1.0) + + # Convert model level pressure cube to sigma surfaces + pp = pmb / tmpP_SFC + del tmpP_SFC + pp = clip(pp, 0.1, 1.0) + + + # Account for topography in the model cube, don't need to worry about + # this with the BL cube since those are guaranteed to be above ground + tmpRH_c = where(less(gh_c, topo), float32(0.0), rh_c) + + #======================================================================= + # Create new RH and sigma cubes + + newRH_c = [] + newSigma_c = [] + + # See which boundary layer levels have pressures > lowest "signficant" + # model level pressure + for bl_i in range(BL_sigma.shape[0]): + + # Make a mask to identify which points from the boundary + # layer level have greater pressure than lowest "significant" + # model level + BL_mask = greater(BL_sigma[bl_i], pp[0]) + + # See how many points we've found + count = sum(sum(BL_mask, 1)) + + # If there are no points - don't include this BL level + if count == 0: + continue + + # Compute a temporary RH grid where it is lower than the lowest + # "significant" model level data + tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) + + + # Compute a temporary sigma grid for this boundary layer level + # where it is lower than the lowest "significant" model level + tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) + + # Insert this level into the new RH and sigma cubes + newRH_c += [tmpRH] + newSigma_c += [tmpSigma] + + + # Add lowest "significant" model level to RH and sigma cubes + newRH_c += [tmpRH_c[0]] + newSigma_c += [pp[0]] + + + # Insert boundary layer RH into RH cube where appropriate + for lvl in range(1, len(self.levels())): + + # Look at each boundary layer level + for bl_i in range(BL_sigma.shape[0]): + + # Make a mask to identify which points from the boundary + # layer level fall between the surrounding "significant" + # model levels + BL_mask = logical_and(greater(BL_sigma[bl_i], pp[lvl]), + less(BL_sigma[bl_i], pp[lvl - 1])) + + # See how many points we've found + count = sum(sum(BL_mask, 1)) + + # If there are no points - don't include this BL level + if count == 0: + continue + + # Compute a temporary RH grid where it is between the two + # "significant" model level data + tmpRH = where(BL_mask, bl_RHcube[bl_i], float32(0.0)) + + + # Compute a temporary sigma grid for this boundary layer level + # where it is between the two "significant" model levels + tmpSigma = where(BL_mask, BL_sigma[bl_i], float32(0.0)) + + # Insert this level into the new RH and sigma cubes + newRH_c += [tmpRH] + newSigma_c += [tmpSigma] + + + # Add top of layer we just examined to RH and sigma cube + newRH_c += [tmpRH_c[lvl]] + newSigma_c += [pp[lvl]] + + del bl_RHcube + del BL_sigma + del tmpRH_c + + # Finish off the new cubes + newRH_c = array(newRH_c) + newSigma_c = array(newSigma_c) + + # Determine maximum possible sky fraction + fmax = 78.0 + x / 15.5 + + # Compute sky fraction for both pressure cubes + f100 = where(less(newSigma_c, 0.7), + fmax * (newSigma_c - 0.1) / 0.6, + 30.0 + (1.0 - newSigma_c) * (fmax - 30.0) / 0.3) + + # Compute RH depression at 37% f100 [ (1-RHe) in Walcek ] + c = 0.196 + (0.76 - x / 2834.0) * (1.0 - newSigma_c) + + del newSigma_c + + # Compute critical RH threshold to use as a filter + # Note (percent * f100)/f100 = percent + try: + rhCrit = log(percent) * c + 1.0 + except: + rhCrit = 0.0 + + # Ensure "critical RH" is valid + rhCrit = clip(rhCrit, 0.0, 1.0) + + # Compute sky fraction for the model cube + c = (newRH_c / 100.0 - 1.0) / c + c = exp(c) + f = minimum(f100 * c, 100.0) + + # Where RH is less than the critical value, set it to 0 contribution + f[less(newRH_c / 100.0, rhCrit)] = 0.0 + + del newRH_c + + # Compress cubes vertically + f = self.squishZ(f, (f.shape[0] / 5) - 1) # was 5 + + # Convert sky fractions to an actual percentage + if len(f) >= 5: + f[4] *= 0.25 + else: + LogStream.logEvent("WARNING: Sky data is missing some levels - calculation will be incomplete") + ind = len(f) - 1 + f[ind] *= 0.25 + + f /= 100.0 + + sky = f[0] + for i in range(1, f.shape[0]): + sky = sky + f[i] - sky * f[i] + + grid = sky * 100.0 + + return grid + #========================================================================= + # Wind - uses boundary layer wind 'sounding' to get the wind at the + # real elevation rather than the model elevation. When real topo + # is below model topo, just uses the lowest boundary layer wind field. + # + # This typically gives ridgetops a bit too much wind speed - so if speed + # is above the model surface wind speed - it only uses 1/2 of the + # difference. Direction is allowed to reflect the direction at the + # higher level. This gives the wind a 'topography' influenced look - + # with sharp mountains sticking up into 'stronger' wind speeds and + # different wind directions. + #---------------------------------------------------------------- + def calcWind(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, + wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLH = self.BLH + BLW = self.BLW + BLMAG = BLW[0] + BLDIR = BLW[1] + + smag = self.newGrid(-1) + sdir = self.newGrid(-1) + + m = less(topo,BLH[0]) + smag[m] = BLMAG[0][m] + sdir[m] = BLDIR[0][m] + + for i in range(1, BLH.shape[0]): + mval = self.linear(BLH[i], BLH[i - 1], BLMAG[i], BLMAG[i - 1], topo) + dval = self.dirlinear(BLH[i], BLH[i - 1], BLDIR[i], BLDIR[i - 1], topo) + # + # limit winds to be half as strong as wind in + # free atmosphere above the model surface would indicate + # + m = greater(mval,BLMAG[0]) + mval[m] = (BLMAG[0]+((mval-BLMAG[0])/2.0))[m] + + between = logical_and(greater_equal(topo, BLH[i - 1]), less(topo, BLH[i])) + m = logical_and(less(smag,0.0),between) + smag[m] = mval[m] + sdir[m] = dval[m] + # + # Change to knots + # + smag *= 1.94 + smag[less(p_SFC/100.0, 500.0)] = 0.0 + sdir.clip(0, 359.5, sdir) + return(smag, sdir) + + #======================================================================== + # MixHgt - the height to which a parcel above a 'fire' would rise + # (in height) above ground level (in feet). + # + # Calculated by assuming a parcel above a fire is VERY hot - but the fire + # is very small - so that entrainment quickly makes it only a few degrees + # warmer than the environment. Ideally would want to consider moisture + # and entrainment - but this is a very simple first guess. + # + # This does NO downscaling - and even smooths the field a little at the + # end. We have no observations of this - other than at sounding + # locations - so we have no idea what the spatial patterns should look + # like. + #---------------------------------------------------------------- + def calcMixHgt(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, topo, gh_c, t_c, rh_c, + wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLT = self.BLT + BLP = self.BLP + BLTheta = self.ptemp(BLT, BLP) + BLH = self.BLH + # + # Potential temp of fire 2 degrees warmer than surface parcel + # + fireHeat = 2.0 + pSFCmb = p_SFC / 100 + fireTheta = self.ptemp(t_FHAG2 + fireHeat, pSFCmb) + # + # find height the fireTheta crosses the sounding theta + # + mixhgt = self.newGrid(-1) + for i in range(1, BLH.shape[0]): + hcross = self.linear(BLTheta[i], BLTheta[i - 1], BLH[i], BLH[i - 1], fireTheta) + cross = logical_and(greater(BLTheta[i], fireTheta), less(mixhgt, 0.0)) + mixhgt[cross] = hcross[cross] + + m = less(mixhgt,0.0) + mixhgt[m] = BLH[-1][m] + # + # Change to height above the model topo (in feet) + # and smooth a little + # + mixhgt -= stopo + mixhgt *= 3.28 + mixhgt[less(pSFCmb, 500)] = -9999.0 + mixhgt = self.smoothpm(mixhgt, 2) + mixhgt.clip(0.0, 50000.0, mixhgt) + return mixhgt + + #=========================================================================== + # SnowAmt - simple snow ratio based on surface temperature - multiplied + # times the model QPF amount + #--------------------------------------------------------------------------- + def calcSnowAmt(self, T, QPF): + snowr = (T * -0.5) + 22.5 + snowr[less(T, 9.0)] = 20 + snowr[greater_equal(T, 30.0)] = 0 + snowamt = QPF * snowr + return snowamt + + #========================================================================== + # Many of the models have had a freezing level in the gh field. + #========================================================================== + def calcFzLevel(self, gh_FRZ): + return gh_FRZ * 3.28 + + #======================================================================== + # calcSnowLevel - takes sounding of the wetbulb temperature and finds the + # lowest elevation (above ground) where wetbulb crosses from + # above freezing to below freezing. When top wetbulb is above + # freezing - puts in height of top level. When surface + # wetbulb is below freezing - assumes a simple dry-adiabtic + # lapse rate below ground (which is ludicrous for a wetbulb + # lapse rate - but who really cares what the snow level is + # when it is below ground anyway?). + # + # This is almost always too noisy so we smooth it with a + # +/- 4 gridpoint average. Note that this means that there + # may be gridpoints where the surface wetbulb is below + # freezing - but the snow level ends up being above + # ground. If this bothers you - remove the smoothing. + #------------------------------------------------------------------------ + def calcSnowLevel(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, T, RH, stopo, topo, gh_c, t_c, + rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLH = self.BLH + # + # + # + if USE_WETBULB == 1: + TT = self.BLE + else: + TT = self.BLT + # + # get wetbulb temperatures above topography + # + (BLH, TT) = self.getTopoE(topo, stopo, p_SFC, T, RH, BLH, TT) + snowlvl = self.newGrid(-1) + # + # find the ones below ground + # + tk = TT[0] + below = less(tk, 273.15) + lapse = 9.8 / 1000.0 + tmsl = tk + (lapse * topo) + hbot = topo * 0.0 + hcross = self.linear(tk, tmsl, topo, hbot, 273.15) + hcross[less(hcross, 0.0)] = 0.0 + + snowlvl[below] = hcross[below] + # + # find the ones above the topo surface + # + tbot = tk + hbot = topo + for i in range(1, BLH.shape[0]): + hcross = self.linear(TT[i], TT[i - 1], BLH[i], BLH[i - 1], 273.15) + cross = logical_and(less_equal(TT[i], 273.15), greater(TT[i - 1], 273.15)) + add = logical_and(cross, less(snowlvl, -0.5)) + snowlvl[add] = hcross[add] + # + # when still above freezing at the top of the BL layer - just + # put in that height (best we can do without more data) + # + m = less(snowlvl,-0.5) + snowlvl[m] = BLH[-1][m] + # + # Change to feet and subtract 500 feet if not using the wetbulb method + # + snowlvl *= 3.28 + if USE_WETBULB != 1: + snowlvl -= 500.0 + # + # Take care of any missing data points + # + pSFCmb = p_SFC / 100.0 + snowlvl[less(pSFCmb, 500.0)] = -9999.0 + # + # Smooth a little to reduce noise + # + snowlvl = self.smoothpm(snowlvl, 4) + snowlvl.clip(0.0, 50000.0, snowlvl) + return snowlvl + + #========================================================================== + # TransWind - the average winds in the layer between the surface + # and the mixing height. + #-------------------------------------------------------------------------- + def calcTransWind(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, MixHgt, stopo, topo, gh_c, t_c, + rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + BLW = self.BLW + BLH = self.BLH + BLM = BLW[0] + BLD = BLW[1] + nmh = stopo + (MixHgt * 0.3048) # convert MixHt from feet -> meters + + pSFCmb = p_SFC / 100.0 + (utot, vtot) = self._getUV(BLM[0], BLD[0]) + numl = self.newGrid(1) + + for i in range(1, BLH.shape[0]): + use = less(BLH[i], nmh) + (u, v) = self._getUV(BLM[i], BLD[i]) + utot[use] += u[use] + vtot[use] += v[use] + numl[use] += 1 + + # + # calculate average + # + utot /= numl + vtot /= numl + # + # Smooth a little + # + utot[less(pSFCmb, 500.0)] = -9999.0 + vtot[less(pSFCmb, 500.0)] = -9999.0 + utot = self.smoothpm(utot, 1) + vtot = self.smoothpm(vtot, 1) + # + # convert utot, vtot to mag, dir + # + (tmag, tdir) = self._getMD(utot, vtot) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return(tmag, tdir) + + #-------------------------------------------------------------------------- + # LAL - Based mainly on lifted index. Adds more when RH at top of BL is + # high, but RH at bottom of BL is low. + #-------------------------------------------------------------------------- + def calcLAL(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, tp_SFC, bli_BL0180, stopo, topo, + gh_c, t_c, rh_c, wind_c, ctime): + + self.setupBLCube(t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, + rh_BL120150, wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, + wind_BL90120, wind_BL120150, p_SFC, stopo, gh_c, t_c, rh_c, + wind_c, ctime) + lal = self.newGrid(1) + BLR = self.BLR + # + # only thing we have is boundary layer lifted index + # set LAL to 2 if LI<0, 3 if LI<-3, 4 if LI<-5 + # + lal[less(bli_BL0180, 0)] += 1 + lal[less(bli_BL0180, -3)] += 1 + lal[less(bli_BL0180, -5)] += 1 + + # + # Add more when RH at top of BL is greater than + # than 70% and RH at bottom of BL is less than 30 + # + V = logical_and(greater(BLR[5], 70), less(BLR[0], 30)) + lal[V] += 1 + # + # Add even more where RH at top of BL is greater than + # 80% and RH at bottom of BL is less than 20% + # + V = logical_and(greater(BLR[5], 80), less(BLR[0], 20)) + lal[V] += 1 + lal[less(bli_BL0180, -18.0)] = 1 + return lal + + ##-------------------------------------------------------------------------- + ## Calculate the Haines index based on the temp and RH cubes + ## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". + ## Default is "HIGH". + ##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + #======================================================================= + # + # Calculate Haines Index + # type is "LOW", "MEDIUM", "HIGH" + # NOTE, the default haines index calcaulation is defined by: + # self.whichHainesIndex, which can be set to "LOW", "MEDIUM", "HIGH". + # Commented out below calc for DR14439 (A1 DR21354) + #======================================================================= + ## def hainesIndex(self, type, t_c, rh_c): + ## dict = {} + ## dict['LOW'] = {'t1Level': 950, 't2Level': 850, 'mLevel': 850, + ## 'stabThresh': [3, 8], 'moiThresh': [5, 10]} + ## dict['MEDIUM'] = {'t1Level': 850, 't2Level': 700, 'mLevel': 850, + ## 'stabThresh': [5, 11], 'moiThresh': [5, 13]} + ## dict['HIGH'] = {'t1Level': 700, 't2Level': 500, 'mLevel': 700, + ## 'stabThresh': [17, 22], 'moiThresh': [14, 21]} + ## dd = dict[type] # proper dictionary for the level + ## + ## # get the needed data, calc dewpoint + ## pres = self.pres + ## t1 = t_c[pres.index(dd['t1Level'])] # t1 level + ## t2 = t_c[pres.index(dd['t2Level'])] # t2 level + ## tMois = t_c[pres.index(dd['mLevel'])] - 273.15 # mLevel t , in C. + ## rhMois = rh_c[pres.index(dd['mLevel'])] / 100.0 # mLevel rh + ## rhMois = where(less_equal(rhMois, 0), 0.00001, rhMois) + ## + ## a = log10(rhMois) / 7.5 + (tMois / (tMois + 237.3)) + ## dpMois = (a * 237.3) / (1.0 - a) + ## + ## hainesT = t1 - t2 + ## hainesM = tMois - dpMois + ## + ## # now make the categories + ## slope = 1.0 / (dd['stabThresh'][1] - dd['stabThresh'][0]) + ## intercept = 1.5 - ((dd['stabThresh'][0] + 0.5) * slope) + ## hainesTi = (slope * hainesT) + intercept + ## hainesT = clip(hainesTi, 1.0, 3.0) + ## + ## slope = 1.0 / (dd['moiThresh'][1] - dd['moiThresh'][0]) + ## intercept = 1.5 - ((dd['moiThresh'][0] + 0.5) * slope) + ## hainesMi = (slope * hainesM) + intercept + ## hainesM = clip(hainesMi, 1.0, 3.0) + ## + ## return hainesT + hainesM + + #--------------------------------------------------------------------------- + # MaxT simply maximum of any T grids during the period + #-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + + #--------------------------------------------------------------------------- + # MinT simply minimum of any T grids during the period + #-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + + #------------------------------------------------------------------------- + # MaxRH is simply maximum of all RH grids during period + #-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + + #------------------------------------------------------------------------- + # MinRH is simply minimum of all RH grids during period + #-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + + #========================================================================== + # Calculate Wetbulb (K) based on temperature (C) and RH (%) + # (all algorithms straight out of GEMPAK - converted to numeric python) + # + def Wetbulb(self, tc, rh, pres): + dpc = self.RHDP(tc, rh) + thte = self.THTE(pres, tc, dpc) + wetbk = self.TMST(thte, pres, 0) + return wetbk + + #======================================================================= + # Calculate parcel temp (K) given thetae (K) pressure (mb) and guess + # temperature (K) (must be 3d cubes) + # + def TMST(self, thte, pres, tguess): + tg = full_like(thte, tguess) + teclip = clip(thte - 270.0, 0.0, 5000.0) + # + # if guess temp is 0 - make a more reasonable guess + # + m = less(tg,1) + tg[m] = ((thte-0.5*teclip**1.05)*(pres/1000.0)**0.2)[m] + + epsi = 0.01 + tgnu = tg - 273.15 + # + # Correct the temp up to 100 times. Typically this takes + # less than 5 iterations + # + for i in range(1, 100): + tgnup = tgnu + 1.0 + tenu = self.THTE(pres, tgnu, tgnu) + tenup = self.THTE(pres, tgnup, tgnup) + cor = (thte - tenu) / (tenup - tenu) + tgnu += cor + # + # get the maximum correction we made this time + # and if it is less than epsi - then we are close + # enough to stop. + # + acor = abs(cor) + mcor = maximum.reduce(maximum.reduce(maximum.reduce(acor))) + if (mcor < epsi): + #print "parcel temp in %d iterations"%i + break + tgnu += 273.15 + return tgnu + + #======================================================================= + # Calculate Dewpoint (C) based on Temperature (C) and RH (%) + # + def RHDP(self, tc, rh): + log1 = log(6.112) + vaps = self.VAPR(tc) + lvapr = log(rh * vaps / 100.0 + 0.0001) + dpc = ((243.5 * (log1 - lvapr)) / (lvapr - log1 - 17.67)) + return dpc + + #======================================================================= + # Calculate Theta-E given Pressure (mb) Temperature (C) and Dewpoint (C) + # + def THTE(self, pres, tc, dpc): + rmix = self.MIXR(dpc, pres) + tk = tc + 273.15 + e = (2.0 / 7.0) * (1.0 - (0.00028 * rmix)) + thtam = tk * (1000.0 / pres) ** e + tlcl = self.TLCL(tc, dpc) + e = ((3.376 / tlcl) - 0.00254) * (rmix * (1.0 + 0.00081 * rmix)) + return (thtam * exp(e)) + + #======================================================================= + # Calculate temperature at LCL (K) given Temperature (C) and Dewpoint (C) + # + def TLCL(self, tc, dpc): + tk = tc + 273.15 + dk = dpc + 273.15 + return((1.0 / (1.0 / (dk - 56.0) + log(tk / dk) / 800.0)) + 56.0) + + #======================================================================= + # Calculate Mixing Ratio (g/kg) given Dewpoint (C) and pressure (mb) + # + def MIXR(self, dpc, pres): + vapr = self.VAPR(dpc) + corr = (1.001 + ((pres - 100.) / 900.) * 0.0034) + e = corr * vapr + mixr = 0.62197 * (e / (pres - e)) * 1000.0 + return mixr + + #======================================================================= + # Calculate Vapor Pressure (mb) from Dewpoint (C) + # or Saturation Vapor Pressure (mb) from Temperature (C) + # + def VAPR(self, tc): + vapr = 6.112 * (exp((17.67 * tc) / (tc + 243.5))) + return vapr + + #========================================================================== + # Get boundary layer cube - cube of values above model surface + # adds in pressure level data above the boundary layer fields + # creates: + # BLT - temperatures (K) + # BLR - relative humidity (% 0-100) + # BLH - height (m) + # BLP - pressure (mb) + # BLW - wind (magnitude kts, direction) + # BLD - dewpoint (K) + # BLE - wetbulb (K) [if desired] + # + def setupBLCube(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, + rh_BL90120, rh_BL120150, wind_FHAG10, wind_BL030, + wind_BL3060, wind_BL6090, wind_BL90120, wind_BL120150, + p_SFC, stopo, gh_c, t_c, rh_c, wind_c, ctime): + # + # check to see if already set up for this time + # + if self.BLcubeTime == ctime: + return + # + # split pressure level wind cube into magnitude and direction + # + mag_c = wind_c[0] + dir_c = wind_c[1] + dew_c = self.RHDP(t_c - 273.15, rh_c) + 273.15 + # + tbl = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] + rbl = [rh_FHAG2, rh_BL030, rh_BL3060, rh_BL6090, rh_BL90120, rh_BL120150] + wbl = [wind_FHAG10, wind_BL030, wind_BL3060, wind_BL6090, wind_BL90120, + wind_BL120150] + pdiff = [0, 30, 60, 90, 120, 150] + + pSFCmb = p_SFC / 100.0 + pSFCmb[less(pSFCmb, 500.0)] = 1013.0 + + p_list = [pSFCmb] + hbot = stopo + h_list = [hbot] + t_list = [t_FHAG2] + r_list = [clip(rh_FHAG2, 0.0001, 99.999)] + m_list = [wind_FHAG10[0]] + d_list = [wind_FHAG10[1]] + w_list = [self.RHDP(t_FHAG2 - 273.15, r_list[0]) + 273.15] + + for i in range(1, len(tbl)): + tavg = tbl[i] + tavgc = tavg - 273.15 + ravg = clip(rbl[i], 0.0001, 99.999) + davgc = self.RHDP(tavgc, ravg) + ptop = clip(pSFCmb - pdiff[i], 1.0, 1050.0) + pbot = clip(pSFCmb - pdiff[i - 1], 1.0, 1050.0) + htop = self.MHGT(tavgc, davgc, ptop, pbot, hbot) + + t_list.append(tavg) + h_list.append((hbot + htop) / 2.0) + wind = wbl[i] + m_list.append(wind[0]) + d_list.append(wind[1]) + p_list.append((pbot + ptop) / 2.0) + r_list.append(ravg) + w_list.append(davgc + 273.15) + + hbot = htop + # + # above the boundary layer...add levels in pressure + # cube + # + numplevs = gh_c.shape[0] + levstoadd = zeros_like(stopo) + for i in range(numplevs): + levstoadd[greater(gh_c[i], hbot)] += 1 + + maxtoadd = maximum.reduce(maximum.reduce(levstoadd)) + for j in range(int(maxtoadd)): + found = zeros_like(stopo) + hlev = zeros_like(stopo) + tlev = zeros_like(stopo) + mlev = zeros_like(stopo) + dlev = zeros_like(stopo) + plev = zeros_like(stopo) + rlev = zeros_like(stopo) + wlev = zeros_like(stopo) + for i in range(numplevs): + usethislev = logical_and(less(found, 0.5), greater(gh_c[i], hbot)) + hlev[usethislev] = gh_c[i][usethislev] + plev[usethislev] = self.pres[i] + tlev[usethislev] = t_c[i][usethislev] + mlev[usethislev] = mag_c[i][usethislev] + dlev[usethislev] = dir_c[i][usethislev] + rlev[usethislev] = rh_c[i][usethislev] + wlev[usethislev] = dew_c[i][usethislev] + found[usethislev] = 1.0 + + numNotFound = count_nonzero(less(found, 0.5)) + if numNotFound < 1: + break + if numNotFound > 0: + notFoundMask = less(found, 0.5) + hlev[notFoundMask] = gh_c[numplevs-1][notFoundMask] + plev[notFoundMask] = self.pres[numplevs-1] + tlev[notFoundMask] = t_c[numplevs-1][notFoundMask] + mlev[notFoundMask] = mag_c[numplevs-1][notFoundMask] + dlev[notFoundMask] = dir_c[numplevs-1][notFoundMask] + rlev[notFoundMask] = rh_c[numplevs-1][notFoundMask] + wlev[notFoundMask] = dew_c[numplevs-1][notFoundMask] + + h_list.append(hlev) + t_list.append(tlev) + p_list.append(plev) + m_list.append(mlev) + d_list.append(dlev) + r_list.append(rlev) + w_list.append(wlev) + hbot = hlev + + self.BLH = array(h_list) + self.BLP = array(p_list) + self.BLT = array(t_list) + self.BLR = array(r_list) + #mags=array(m_list) + #dirs=array(d_list) + #self.BLW=(mags,dirs) + self.BLW = (m_list, d_list) + self.BLD = array(w_list) + if USE_WETBULB == 1: + self.BLE = self.Wetbulb(self.BLT - 273.15, self.BLR, self.BLP) + self.BLcubeTime = ctime + return + + #--------------------------------------------------------------------------- + # Calculate the hydrostatic height (m) at the top of the layer, given an + # average temp (C) and average dewpoint (C) in the layer, the pressure (mb) + # at the top and bottom of the layer, and the height (m) at the bottom + # of the layer. Intended to be used in an integration of hydrostatic + # heights given a starting surface height and temp/dewpoint values in + # pressure levels above + # + def MHGT(self, tmpc, dwpc, ptop, pbot, hbot): + pavg = (ptop + pbot) / 2.0 + scale = self.SCLH(tmpc, dwpc, pavg) + mhgt = hbot + (scale * log(pbot / ptop)) + return mhgt + + #--------------------------------------------------------------------------- + # Calculate Virtual temperature (C) given temp(C), dewpoint (C) + # and pressure(mb) + # + def TVRT(self, tmpc, dwpc, pres): + mixrscale = self.MIXR(dwpc, pres) * 0.001 + tmpk = tmpc + 273.15 + tvrk = tmpk * (1.0 + (mixrscale / 0.62197)) / (1.0 + mixrscale) + tvrt = tvrk - 273.15 + return tvrt + + #--------------------------------------------------------------------------- + # Calculate Scale Height (m) given temp(C), dewpoint(C) and pressure(mb) + # + def SCLH(self, tmpc, dwpc, pres): + rdgas = 287.04 + gravty = 9.80616 + sclh = (rdgas / gravty) * (self.TVRT(tmpc, dwpc, pres) + 273.15) + return sclh + + #-------------------------------------------------------------------------- + # calculate area above/below freezing in J/kg (m2/s2) + # + def getArea(self, hbot, tbot, htop, ttop): + tavg = (ttop + tbot) / 2.0 + e1 = (ttop - 273.15) / 273.15 + e2 = (tbot - 273.15) / 273.15 + area = 9.8 * ((e1 + e2) / 2.0) * (htop - hbot) + return area + + #-------------------------------------------------------------------------- + # calculate areas above/below freezing, and include a flag if it crosses + # in this layer + # + def getAreas(self, hbot, tbot, htop, ttop): + maxm = maximum(tbot, ttop) + minm = minimum(tbot, ttop) + freeze = self.newGrid(273.15) + crosses = logical_and(less(minm, freeze), greater(maxm, freeze)) + crossh = self.linear(tbot, ttop, hbot, htop, freeze) + crosst = freeze + m = logical_not(crosses) + crossh[m] = htop[m] + crosst[m] = ttop[m] + + a1 = self.getArea(hbot, tbot, crossh, crosst) + a2 = self.getArea(crossh, crosst, htop, ttop) + return a1, a2, crosses + + #======================================================================== + # Get a cube of wetbulb temperatures above the real topo - not above the + # model topo. Returns the wetbulb temps and heights + # + def getTopoE(self, topo, stopo, p_SFC, T, RH, BLH, BLE): + + pSFCmb = p_SFC / 100.0 + pSFCmb[less(pSFCmb, 500.0)] = 1013.0 + + tmpc = self.FtoK(T) - 273.15 + hlist = [topo] + if USE_WETBULB == 1: + dwpc = self.RHDP(tmpc, RH) + scale = self.SCLH(tmpc, dwpc, pSFCmb) + ptopo = pSFCmb * exp((stopo - topo) / scale) + ptopo[less(ptopo, 500.0)] = 1013.0 + at = array([tmpc]) + ar = array([RH]) + ap = array([ptopo]) + te = self.Wetbulb(at, ar, ap) + te_SFC = te[0] + tlist = [te_SFC] + else: + tlist = [tmpc + 273.15] + + + numplevs = BLH.shape[0] + levstoadd = zeros_like(topo) + for i in range(numplevs): + levstoadd[greater(BLH[i],topo)] += 1 + maxtoadd = maximum.reduce(maximum.reduce(levstoadd)) + + hbot = topo + for j in range(int(maxtoadd)): + tlev = zeros_like(topo) + hlev = full_like(topo, -5000) + use = zeros_like(topo) + for i in range(BLH.shape[0]): + thislev = logical_and(less(use, 0.5), greater(BLH[i], hbot)) + tlev[thislev] = BLE[i][thislev] + hlev[thislev] = BLH[i][thislev] + use[thislev] = 1.0 + + tlev[less(tlev,0.5)] = BLE[-1][less(tlev,0.5)] + hlev[less(hlev,-2500)] = BLH[-1][less(hlev,-2500)] + + tlist.append(tlev) + hlist.append(hlev) + hbot = hlev + newH = array(hlist) + newE = array(tlist) + + return(newH, newE) + + #=============================================================== + # smooths array by averaging over +/- k gridpoints in each + # direction. At the edges, only averages over the points that + # fit within this "averaging area". If k is zero or negative + # it just returns the original array + # + def smoothpm(self, array, k): + if k > 0: + a = zeros_like(array) + n = zeros_like(array) + for x in range(-k, k + 1): + for y in range(-k, k + 1): + array1 = self.offset(array, x, y) + ok = greater(array1, -9000) + a[ok] += array1[ok] + n[ok] += 1 + + m = less(n,1) + a[m] = array[m] + n[m] = 1 + a /= n + arraysmooth = a + else: + arraysmooth = array + return arraysmooth + + #======================================================================= + # Gets a copy of array that is shifted x,y gridpoints. The edge + # points that are unknown are set to -9999.0. Used in smoothing + # + def offset(self, a, x, y): + sy1, sy2 = self.getindicies(y, a.shape[0]) + sx1, sx2 = self.getindicies(x, a.shape[1]) + b = full_like(a, -9999.0) + b[sy1, sx1] = a[sy2, sx2] + return b + + #============================================================== + # getindicies - used in slicing array + # + def getindicies(self, o, l): + if o > 0: + a = slice(o, l); b = slice(0, l - o) + elif o < 0: + a = slice(0, l + o); b = slice(-o, l) + else: + a = slice(0, l); b = slice(0, l) + return a, b + + #========================================================================== + # A linear interpolation that can be used for directions, where the + # values should never get higher than 360 degrees. We want + # interpolations that cross this 360 degree barrier to "go the + # right way" rather than flip back in the opposite direction + # + def dirlinear(self, xmax, xmin, ymax, ymin, we): + ydif = abs(ymax - ymin) + rotate = greater(ydif, 180.0) + upper = greater(ymin, 180.0) + lower = less(ymin, 180.0) + ymax[logical_and(rotate,upper)] += 360.0 + ymax[logical_and(rotate,lower)] -= 360.0 + slope = (ymax - ymin) / (xmax - xmin + .0000001) + intercept = ymin - slope * xmin + value = slope * we + intercept + value[greater(value, 360)] -= 360 + value[less(value, 0.0)] += 360 + return value + +def main(): + NAM12Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM40.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM40.py index 6d8606dad3..820f43ceb6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM40.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM40.py @@ -1,578 +1,578 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from NAM40 model -## output. -## -##-------------------------------------------------------------------------- -class NAM40Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "NAM40", "NAM40") - # this model is found in two different files, just to be interesting - self.addSources(["NAM20"]) - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB975", "MB950", "MB925", "MB900", "MB875", "MB850", - "MB825", "MB800", "MB775", "MB750", "MB725", - "MB700", "MB675", "MB650", "MB625", "MB600", - "MB550", "MB500", "MB450", "MB400", "MB350", "MB300"] - -##-------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##-------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##-------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool uses the model's boundary layers to calculate a lapse -## rate and then applies that lapse rate to the difference between the -## model topography and the true topography. This algorithm calculates -## the surface temperature for three different sets of points: those that -## fall above the boundary layer, in the boundary layer, and below the -## boundary layer. -##-------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, p_SFC, topo, stopo, gh_c, t_c): - p_SFC = p_SFC / 100 # get the surface pres. in mb - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, - p_SFC - 105, p_SFC - 135] - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] - return self._calcT(temps, pres, topo, stopo, gh_c, t_c) - - def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) # identify points > topo - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val[greater(val, 500)] = 500 - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - - # define the pres. of each of the boundary layers - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p, pres[-1])), tmb, st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + temps[0], st) - return self.KtoF(st) - -##-------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##-------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC / 100 - sfce) + .00001) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - m = w > ws - td[m] = T[m] - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##-------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##-------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##-------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##-------------------------------------------------------------------------- - def calcPoP(self, pop_SFC): - return pop_SFC -# def calcPoP(self, gh_c, rh_c, QPF, topo): -# rhavg = where(less(gh_c, topo), -1, rh_c) -# rhavg = where(greater(gh_c, topo + 5000 * 0.3048), -# -1, rhavg) -# count = where(not_equal(rhavg, -1), 1, 0) -# rhavg = where(equal(rhavg, -1), 0, rhavg) -# count = add.reduce(count, 0) -# rhavg = add.reduce(rhavg, 0) -# ## add this much based on humidity only -# dpop = where(count, rhavg / (count + .001), 0) - 70.0 -# dpop = where(less(dpop, -30), -30, dpop) -# ## calculate the base PoP -# pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) -# pop = pop + dpop # add the adjustment based on humidity -# pop = clip(pop, 0, 100) # clip to 100% -# return pop - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - ## save the height value in fzl - m = logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) - fzl[m] = val[m] - fzl *= 3.28 # convert to feet - return fzl - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc.clip(-120, 60, tc) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - - m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) - snow[m] = val[m] - - # - # convert to feet - # - snow *= 3.28 - - return snow - -##-------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##-------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) - mh[m] = tmh[m] - mh -= topo - mh *= 3.28 # convert to feet - return mh - - -##-------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##-------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] # get wind dir - mag *= 1.94 # convert to knots - dir.clip(0, 359.5, dir) - return (mag, dir) # assemble speed and dir into a tuple - -##-------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##-------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - - fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tdir) # clip speed to 125 knots - return (tmag, tdir) - -##-------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin algorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgouin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##-------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, - bli_BL0180): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a11[m] - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a11[m] - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a11[m] - - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a22[m] - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a22 - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a22[m] - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, logical_and(greater_equal(a1, 5.6), less(a1, 13.2)))] = 3 - - - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(bli_BL0180, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##-------------------------------------------------------------------------- - def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_BL0180, -3)] += 1 - lal[less(bli_BL0180, -5)] += 1 - return lal - -def main(): - NAM40Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from NAM40 model +## output. +## +##-------------------------------------------------------------------------- +class NAM40Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "NAM40", "NAM40") + # this model is found in two different files, just to be interesting + self.addSources(["NAM20"]) + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB975", "MB950", "MB925", "MB900", "MB875", "MB850", + "MB825", "MB800", "MB775", "MB750", "MB725", + "MB700", "MB675", "MB650", "MB625", "MB600", + "MB550", "MB500", "MB450", "MB400", "MB350", "MB300"] + +##-------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##-------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##-------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool uses the model's boundary layers to calculate a lapse +## rate and then applies that lapse rate to the difference between the +## model topography and the true topography. This algorithm calculates +## the surface temperature for three different sets of points: those that +## fall above the boundary layer, in the boundary layer, and below the +## boundary layer. +##-------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, p_SFC, topo, stopo, gh_c, t_c): + p_SFC = p_SFC / 100 # get the surface pres. in mb + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, + p_SFC - 105, p_SFC - 135] + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] + return self._calcT(temps, pres, topo, stopo, gh_c, t_c) + + def _calcT(self, temps, pres, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) # identify points > topo + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val[greater(val, 500)] = 500 + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + + # define the pres. of each of the boundary layers + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p, pres[-1])), tmb, st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + temps[0], st) + return self.KtoF(st) + +##-------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##-------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC / 100 - sfce) + .00001) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + m = w > ws + td[m] = T[m] + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##-------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##-------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##-------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##-------------------------------------------------------------------------- + def calcPoP(self, pop_SFC): + return pop_SFC +# def calcPoP(self, gh_c, rh_c, QPF, topo): +# rhavg = where(less(gh_c, topo), -1, rh_c) +# rhavg = where(greater(gh_c, topo + 5000 * 0.3048), +# -1, rhavg) +# count = where(not_equal(rhavg, -1), 1, 0) +# rhavg = where(equal(rhavg, -1), 0, rhavg) +# count = add.reduce(count, 0) +# rhavg = add.reduce(rhavg, 0) +# ## add this much based on humidity only +# dpop = where(count, rhavg / (count + .001), 0) - 70.0 +# dpop = where(less(dpop, -30), -30, dpop) +# ## calculate the base PoP +# pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) +# pop = pop + dpop # add the adjustment based on humidity +# pop = clip(pop, 0, 100) # clip to 100% +# return pop + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + ## save the height value in fzl + m = logical_and(equal(fzl, -1), less_equal(t_c[i], 273.15)) + fzl[m] = val[m] + fzl *= 3.28 # convert to feet + return fzl + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc.clip(-120, 60, tc) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + + m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) + snow[m] = val[m] + + # + # convert to feet + # + snow *= 3.28 + + return snow + +##-------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##-------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) + mh[m] = tmh[m] + mh -= topo + mh *= 3.28 # convert to feet + return mh + + +##-------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##-------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] # get wind dir + mag *= 1.94 # convert to knots + dir.clip(0, 359.5, dir) + return (mag, dir) # assemble speed and dir into a tuple + +##-------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##-------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + + fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tdir) # clip speed to 125 knots + return (tmag, tdir) + +##-------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin algorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgouin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##-------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, + bli_BL0180): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a11[m] + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a11[m] + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a11[m] + + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a22[m] + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a22 + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a22[m] + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, logical_and(greater_equal(a1, 5.6), less(a1, 13.2)))] = 3 + + + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(bli_BL0180, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##-------------------------------------------------------------------------- + def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_BL0180, -3)] += 1 + lal[less(bli_BL0180, -5)] += 1 + return lal + +def main(): + NAM40Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM80.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM80.py index 5750acfd51..f76860a1d8 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM80.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM80.py @@ -1,551 +1,551 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from NAM80 model -## output. -## -##-------------------------------------------------------------------------- -class NAM80Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "NAM80", "NAM80") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", - "MB700", "MB650", "MB600", "MB550", "MB500", - "MB450", "MB400", "MB350"] - -##-------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##-------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##-------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool uses the model's boundary layers to calculate a lapse -## rate and then applies that lapse rate to the difference between the -## model topography and the true topography. This algorithm calculates -## the surface temperature for three different sets of points: those that -## fall above the boundary layer, in the boundary layer, and below the -## boundary layer. -##-------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, p_SFC, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), - tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - p_SFC = p_SFC / 100 # get te surface pres. in mb - # define the pres. of each of the boundary layers - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, - p_SFC - 135] - # list of temperature grids - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + t_FHAG2, st) - return self.KtoF(st) - -##-------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##-------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##-------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##-------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##-------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##-------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - # convert to feet - # - snow = snow * 3.28 - - return snow - -##-------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##-------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 - -##-------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##-------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] # get wind dir - mag = mag * 1.94 # convert to knots - dir = clip(dir, 0, 359.5) - return (mag, dir) # assemble speed and dir into a tuple - -##-------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##-------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 # 3000 feet - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - # Interpolate (maybe) - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.00001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir = clip(tdir, 0, 359.5) - tmag = tmag * 1.94 # convert to knots - tmag = clip(tmag, 0, 125) # clip speed to 125 knots - return (tmag, tdir) - - -##-------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##-------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, - bli_BL0180): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(bli_BL0180, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##-------------------------------------------------------------------------- - def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_BL0180, -3)] += 1 - lal[less(bli_BL0180, -5)] += 1 - return lal - - -def main(): - NAM80Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from NAM80 model +## output. +## +##-------------------------------------------------------------------------- +class NAM80Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "NAM80", "NAM80") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", + "MB700", "MB650", "MB600", "MB550", "MB500", + "MB450", "MB400", "MB350"] + +##-------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##-------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##-------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool uses the model's boundary layers to calculate a lapse +## rate and then applies that lapse rate to the difference between the +## model topography and the true topography. This algorithm calculates +## the surface temperature for three different sets of points: those that +## fall above the boundary layer, in the boundary layer, and below the +## boundary layer. +##-------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, p_SFC, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), + tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + p_SFC = p_SFC / 100 # get te surface pres. in mb + # define the pres. of each of the boundary layers + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, + p_SFC - 135] + # list of temperature grids + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + t_FHAG2, st) + return self.KtoF(st) + +##-------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##-------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##-------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##-------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##-------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##-------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + # convert to feet + # + snow = snow * 3.28 + + return snow + +##-------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##-------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 + +##-------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##-------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] # get wind dir + mag = mag * 1.94 # convert to knots + dir = clip(dir, 0, 359.5) + return (mag, dir) # assemble speed and dir into a tuple + +##-------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##-------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 # 3000 feet + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + # Interpolate (maybe) + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.00001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir = clip(tdir, 0, 359.5) + tmag = tmag * 1.94 # convert to knots + tmag = clip(tmag, 0, 125) # clip speed to 125 knots + return (tmag, tdir) + + +##-------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##-------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, + bli_BL0180): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(bli_BL0180, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##-------------------------------------------------------------------------- + def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_BL0180, -3)] += 1 + lal[less(bli_BL0180, -5)] += 1 + return lal + + +def main(): + NAM80Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM95.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM95.py index 06cbc78d92..8af4bd9cdf 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM95.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/NAM95.py @@ -1,553 +1,553 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from NAM95 model -## output. (Alaska) -## -##-------------------------------------------------------------------------- -class NAM95Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "NAM95", "NAM95") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", - "MB700", "MB650", "MB600", "MB550", "MB500", - "MB450", "MB400", "MB350"] - -##-------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##-------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##-------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool uses the model's boundary layers to calculate a lapse -## rate and then applies that lapse rate to the difference between the -## model topography and the true topography. This algorithm calculates -## the surface temperature for three different sets of points: those that -## fall above the boundary layer, in the boundary layer, and below the -## boundary layer. -##-------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, - t_BL120150, p_SFC, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), - tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - p_SFC = p_SFC / 100 # get te surface pres. in mb - # define the pres. of each of the boundary layers - pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, - p_SFC - 135] - # list of temperature grids - temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + t_FHAG2, st) - return self.KtoF(st) - -##-------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##-------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##-------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##-------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##-------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##-------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -##-------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##-------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - # - # convert to feet - # - snow = snow * 3.28 - - return snow - -##-------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##-------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - -##-------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##-------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 - -##-------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##-------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] # get wind dir - mag = mag * 1.94 # convert to knots - dir = clip(dir, 0, 359.5) - return (mag, dir) # assemble speed and dir into a tuple - -##-------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##-------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 # 3000 feet - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - # Interpolate (maybe) - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##-------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##-------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.00001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tdir) # clip speed to 125 knots - return (tmag, tdir) - - -##-------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##-------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, - bli_BL0180): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") - wx[less_equal(bli_BL0180, -3)] += 13 - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##-------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##-------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##-------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##-------------------------------------------------------------------------- - def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_BL0180, -3)] += 1 - lal[less(bli_BL0180, -5)] += 1 - return lal - - -def main(): - NAM95Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from NAM95 model +## output. (Alaska) +## +##-------------------------------------------------------------------------- +class NAM95Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "NAM95", "NAM95") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", + "MB700", "MB650", "MB600", "MB550", "MB500", + "MB450", "MB400", "MB350"] + +##-------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##-------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##-------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool uses the model's boundary layers to calculate a lapse +## rate and then applies that lapse rate to the difference between the +## model topography and the true topography. This algorithm calculates +## the surface temperature for three different sets of points: those that +## fall above the boundary layer, in the boundary layer, and below the +## boundary layer. +##-------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, + t_BL120150, p_SFC, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), + tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + p_SFC = p_SFC / 100 # get te surface pres. in mb + # define the pres. of each of the boundary layers + pres = [p_SFC, p_SFC - 15, p_SFC - 45, p_SFC - 75, p_SFC - 105, + p_SFC - 135] + # list of temperature grids + temps = [t_FHAG2, t_BL030, t_BL3060, t_BL6090, t_BL90120, t_BL120150] + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + t_FHAG2, st) + return self.KtoF(st) + +##-------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##-------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##-------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##-------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##-------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##-------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +##-------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##-------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + # + # convert to feet + # + snow = snow * 3.28 + + return snow + +##-------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##-------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + +##-------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##-------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 + +##-------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##-------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] # get wind dir + mag = mag * 1.94 # convert to knots + dir = clip(dir, 0, 359.5) + return (mag, dir) # assemble speed and dir into a tuple + +##-------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##-------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 # 3000 feet + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + # Interpolate (maybe) + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##-------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##-------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.00001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tdir) # clip speed to 125 knots + return (tmag, tdir) + + +##-------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##-------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC, + bli_BL0180): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") + wx[less_equal(bli_BL0180, -3)] += 13 + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##-------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##-------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##-------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##-------------------------------------------------------------------------- + def calcLAL(self, bli_BL0180, tp_SFC, cp_SFC, rh_c, rh_FHAG2): + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_BL0180, -3)] += 1 + lal[less(bli_BL0180, -5)] += 1 + return lal + + +def main(): + NAM95Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP13.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP13.py index 044d2ca7f7..4c268a5ace 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP13.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP13.py @@ -1,559 +1,559 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from RAP13 model -## output. -## -##-------------------------------------------------------------------------- -class RAP13Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "RAP13") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", "MB700", - "MB650", "MB600", "MB550", "MB500", "MB450", "MB400", - "MB350", "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL6090, t_BL150180, - p_SFC, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val.clip(-.00001, 10, val) - - m = logical_and(equal(p, -1), higher) - p[m] = exp(val)[m] - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - - m = logical_and(equal(tmb, -1), higher) - tmb[m] = tval1[m] - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - - m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) - tms[m] = tval2[m] - - p_SFC /= 100 - # define the pres. of each of the boundary layers - pres = [p_SFC, p_SFC - 15, p_SFC - 75, p_SFC - 168] - - # list of temperature grids - temps = [t_FHAG2, t_BL030, t_BL6090, t_BL150180] - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - - m = logical_and(equal(st, -1), mask) - st[m] = val[m] - - # where topo level is above highest level in BL fields...use tmb - m = logical_and(equal(st,-1),less(p,p_SFC-135)) - st[m] = tmb[m] - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - m = equal(st, -1) - st[m] = (tmb - tms + t_FHAG2)[m] - return self.KtoF(st) - - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat.clip(0.00001, tsfcesat, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - - m = w > ws - td[m] = T[m] - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, cp_SFC, lgsp_SFC): - tp_SFC = cp_SFC + lgsp_SFC - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop.clip(0, 100, pop) # clip to 100% - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_FRZ): - return gh_FRZ / 0.3048 - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb.clip(1, 1050, pmb) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc.clip(-120, 60, tc) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) - snow[m] = val[m] - # - # convert to feet - # - snow /= 0.3048 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo / 0.3048), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) - mh[m] = tmh[m] - mh -= topo - mh /= 0.3048 # convert to feet - return mh - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] - mag += 1.94 # convert m/s to knots - dir.clip(0, 359.5, dir) - return (mag, dir) - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - m = logical_and(equal(famag, -1), mask[i]) - famag[m] = wm[i][m] - - m = logical_and(equal(fadir, -1), mask[i]) - fadir[m] = wd[i][m] - - fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 - famag *= 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -##------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, lgsp_SFC, cp_SFC): - tp_SFC = cp_SFC + lgsp_SFC - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a11[m] - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a11[m] - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a11[m] - - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - - m = logical_and(equal(aindex, 0), topomask) - a1[m] += a22[m] - - m = logical_and(equal(aindex, 1), topomask) - a2[m] += a22[m] - - m = logical_and(equal(aindex, 2), topomask) - a3[m] += a22[m] - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # This section commented out since bli is no longer available for RUC. - # Thunder - #for i in xrange(len(key)): - #tcov = string.split(key[i], ":")[0] - #if tcov == "Chc" or tcov == "": - #tcov = "Sct" - #key.append(key[i] + "^" + tcov - #+ ":T:::") - #wx = where(less_equal(bli_SFC, -3), wx + 13, wx) - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, bli_SFC, lgsp_SFC, cp_SFC, rh_c, rh_FHAG2): - tp_SFC = cp_SFC + lgsp_SFC - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_SFC, -3)] += 1 - lal[less(bli_SFC, -5)] += 1 - return lal - -def main(): +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from RAP13 model +## output. +## +##-------------------------------------------------------------------------- +class RAP13Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "RAP13") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", "MB700", + "MB650", "MB600", "MB550", "MB500", "MB450", "MB400", + "MB350", "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL6090, t_BL150180, + p_SFC, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val.clip(-.00001, 10, val) + + m = logical_and(equal(p, -1), higher) + p[m] = exp(val)[m] + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + + m = logical_and(equal(tmb, -1), higher) + tmb[m] = tval1[m] + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + + m = logical_and(equal(tms, -1), greater(gh_c[i], stopo)) + tms[m] = tval2[m] + + p_SFC /= 100 + # define the pres. of each of the boundary layers + pres = [p_SFC, p_SFC - 15, p_SFC - 75, p_SFC - 168] + + # list of temperature grids + temps = [t_FHAG2, t_BL030, t_BL6090, t_BL150180] + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + + m = logical_and(equal(st, -1), mask) + st[m] = val[m] + + # where topo level is above highest level in BL fields...use tmb + m = logical_and(equal(st,-1),less(p,p_SFC-135)) + st[m] = tmb[m] + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + m = equal(st, -1) + st[m] = (tmb - tms + t_FHAG2)[m] + return self.KtoF(st) + + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat.clip(0.00001, tsfcesat, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + + m = w > ws + td[m] = T[m] + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, cp_SFC, lgsp_SFC): + tp_SFC = cp_SFC + lgsp_SFC + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop.clip(0, 100, pop) # clip to 100% + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_FRZ): + return gh_FRZ / 0.3048 + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb.clip(1, 1050, pmb) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc.clip(-120, 60, tc) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + m = logical_and(equal(snow, -1), less_equal(wetb[i], 0)) + snow[m] = val[m] + # + # convert to feet + # + snow /= 0.3048 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo / 0.3048), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + m = logical_and(logical_and(mask[i], equal(mh, -1)), greater(diffpt, 3)) + mh[m] = tmh[m] + mh -= topo + mh /= 0.3048 # convert to feet + return mh + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] + mag += 1.94 # convert m/s to knots + dir.clip(0, 359.5, dir) + return (mag, dir) + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + m = logical_and(equal(famag, -1), mask[i]) + famag[m] = wm[i][m] + + m = logical_and(equal(fadir, -1), mask[i]) + fadir[m] = wd[i][m] + + fadir.clip(0, 359.5, fadir) # clip the value to 0, 360 + famag *= 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +##------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, lgsp_SFC, cp_SFC): + tp_SFC = cp_SFC + lgsp_SFC + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a11[m] + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a11[m] + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a11[m] + + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + + m = logical_and(equal(aindex, 0), topomask) + a1[m] += a22[m] + + m = logical_and(equal(aindex, 1), topomask) + a2[m] += a22[m] + + m = logical_and(equal(aindex, 2), topomask) + a3[m] += a22[m] + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # This section commented out since bli is no longer available for RUC. + # Thunder + #for i in xrange(len(key)): + #tcov = string.split(key[i], ":")[0] + #if tcov == "Chc" or tcov == "": + #tcov = "Sct" + #key.append(key[i] + "^" + tcov + #+ ":T:::") + #wx = where(less_equal(bli_SFC, -3), wx + 13, wx) + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, bli_SFC, lgsp_SFC, cp_SFC, rh_c, rh_FHAG2): + tp_SFC = cp_SFC + lgsp_SFC + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_SFC, -3)] += 1 + lal[less(bli_SFC, -5)] += 1 + return lal + +def main(): RAP13Forecaster().run() \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP40.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP40.py index 1c812d02ef..1449b0bbad 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP40.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/RAP40.py @@ -1,534 +1,534 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * -##-------------------------------------------------------------------------- -## Module that calculates surface weather elements from RAP40 model -## output. -## -##-------------------------------------------------------------------------- -class RAP40Forecaster(Forecaster): - def __init__(self): - Forecaster.__init__(self, "RAP40") - -##-------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##-------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", "MB700", - "MB650", "MB600", "MB550", "MB500", "MB450", "MB400", - "MB350", "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##-------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##-------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, t_FHAG2, t_BL030, t_BL6090, t_BL150180, - p_SFC, topo, stopo, gh_c, t_c): - p = self.newGrid(-1) - tmb = self.newGrid(-1) - tms = self.newGrid(-1) - # go up the column to figure out the surface pressure - for i in xrange(1, gh_c.shape[0]): - higher = greater(gh_c[i], topo) - # interpolate the pressure at topo height - val = self.linear(gh_c[i], gh_c[i - 1], - log(self.pres[i]), log(self.pres[i - 1]), topo) - val = clip(val, -.00001, 10) - p = where(logical_and(equal(p, -1), higher), - exp(val), p) - # interpolate the temperature at true elevation - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - tmb = where(logical_and(equal(tmb, -1), higher), - tval1, tmb) - # interpolate the temperature at model elevation - tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) - tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), - tval2, tms) - - p_SFC = p_SFC / 100 - # define the pres. of each of the boundary layers - pres = [p_SFC, p_SFC - 15, p_SFC - 75, p_SFC - 168] - - # list of temperature grids - temps = [t_FHAG2, t_BL030, t_BL6090, t_BL150180] - st = self.newGrid(-1) - # Calculate the lapse rate in units of pressure - for i in xrange(1, len(pres)): - val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) - gm = greater(pres[i - 1], p) - lm = less_equal(pres[i], p) - mask = logical_and(gm, lm) - st = where(logical_and(equal(st, -1), mask), - val, st) - - # where topo level is above highest level in BL fields...use tmb - st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) - - # where topo level is below model surface...use difference - # of t at pressure of surface and tFHAG2 and subtract from tmb - st = where(equal(st, -1), tmb - tms + t_FHAG2, st) - return self.KtoF(st) - - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) - # at the true surface - tsfce = self.esat(self.FtoK(T)) - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz - ws = (0.622 * tsfce) / (newp - tsfce) - rh = w / ws - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, p_SFC): - return self.skyFromRH(rh_c, gh_c, topo, p_SFC) - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_FRZ): - return gh_FRZ * 3.28 - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - # - # convert to feet - # - snow = snow * 3.28 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_FHAG10): - mag = wind_FHAG10[0] # get the wind grids - dir = wind_FHAG10[1] - mag = mag * 1.94 # convert m/s to knots - dir = clip(dir, 0, 359.5) - return (mag, dir) - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - # find the points that are above the 3000 foot level - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - mask = add.reduce(mask).astype(float32) - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir.clip(0, 359.5, tdir) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -##------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##------------------------------------------------------------------------- - def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC): - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - wx = self.empty(int8) - # Case d (snow) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # This section commented out since bli is no longer available for RUC. - # Thunder - #for i in xrange(len(key)): - #tcov = string.split(key[i], ":")[0] - #if tcov == "Chc" or tcov == "": - #tcov = "Sct" - #key.append(key[i] + "^" + tcov - #+ ":T:::") - #wx = where(less_equal(bli_SFC, -3), wx + 13, wx) - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip., lifted index -## and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, bli_SFC, tp_SFC, cp_SFC, rh_c, rh_FHAG2): - lal = self.newGrid(1) - # Add one to lal if we have 0.5 mm of precip. - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - # Add one to lal if mid-level rh high and low level rh low - lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[less(bli_SFC, -3)] += 1 - lal[less(bli_SFC, -5)] += 1 - return lal - -def main(): - RAP40Forecaster().run() +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * +##-------------------------------------------------------------------------- +## Module that calculates surface weather elements from RAP40 model +## output. +## +##-------------------------------------------------------------------------- +class RAP40Forecaster(Forecaster): + def __init__(self): + Forecaster.__init__(self, "RAP40") + +##-------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##-------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB950", "MB900", "MB850", "MB800", "MB750", "MB700", + "MB650", "MB600", "MB550", "MB500", "MB450", "MB400", + "MB350", "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##-------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##-------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, t_FHAG2, t_BL030, t_BL6090, t_BL150180, + p_SFC, topo, stopo, gh_c, t_c): + p = self.newGrid(-1) + tmb = self.newGrid(-1) + tms = self.newGrid(-1) + # go up the column to figure out the surface pressure + for i in range(1, gh_c.shape[0]): + higher = greater(gh_c[i], topo) + # interpolate the pressure at topo height + val = self.linear(gh_c[i], gh_c[i - 1], + log(self.pres[i]), log(self.pres[i - 1]), topo) + val = clip(val, -.00001, 10) + p = where(logical_and(equal(p, -1), higher), + exp(val), p) + # interpolate the temperature at true elevation + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + tmb = where(logical_and(equal(tmb, -1), higher), + tval1, tmb) + # interpolate the temperature at model elevation + tval2 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], stopo) + tms = where(logical_and(equal(tms, -1), greater(gh_c[i], stopo)), + tval2, tms) + + p_SFC = p_SFC / 100 + # define the pres. of each of the boundary layers + pres = [p_SFC, p_SFC - 15, p_SFC - 75, p_SFC - 168] + + # list of temperature grids + temps = [t_FHAG2, t_BL030, t_BL6090, t_BL150180] + st = self.newGrid(-1) + # Calculate the lapse rate in units of pressure + for i in range(1, len(pres)): + val = self.linear(pres[i], pres[i - 1], temps[i], temps[i - 1], p) + gm = greater(pres[i - 1], p) + lm = less_equal(pres[i], p) + mask = logical_and(gm, lm) + st = where(logical_and(equal(st, -1), mask), + val, st) + + # where topo level is above highest level in BL fields...use tmb + st = where(logical_and(equal(st,-1),less(p,p_SFC-135)),tmb,st) + + # where topo level is below model surface...use difference + # of t at pressure of surface and tFHAG2 and subtract from tmb + st = where(equal(st, -1), tmb - tms + t_FHAG2, st) + return self.KtoF(st) + + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, p_SFC, T, t_FHAG2, stopo, topo, rh_FHAG2): + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce) + # at the true surface + tsfce = self.esat(self.FtoK(T)) + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz + ws = (0.622 * tsfce) / (newp - tsfce) + rh = w / ws + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, p_SFC): + return self.skyFromRH(rh_c, gh_c, topo, p_SFC) + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_FRZ): + return gh_FRZ * 3.28 + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + # + # convert to feet + # + snow = snow * 3.28 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_FHAG10): + mag = wind_FHAG10[0] # get the wind grids + dir = wind_FHAG10[1] + mag = mag * 1.94 # convert m/s to knots + dir = clip(dir, 0, 359.5) + return (mag, dir) + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + # find the points that are above the 3000 foot level + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + mask = add.reduce(mask).astype(float32) + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir.clip(0, 359.5, tdir) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +##------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##------------------------------------------------------------------------- + def calcWx(self, QPF, T, p_SFC, t_c, gh_c, topo, tp_SFC, cp_SFC): + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + wx = self.empty(int8) + # Case d (snow) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # This section commented out since bli is no longer available for RUC. + # Thunder + #for i in xrange(len(key)): + #tcov = string.split(key[i], ":")[0] + #if tcov == "Chc" or tcov == "": + #tcov = "Sct" + #key.append(key[i] + "^" + tcov + #+ ":T:::") + #wx = where(less_equal(bli_SFC, -3), wx + 13, wx) + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip., lifted index +## and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, bli_SFC, tp_SFC, cp_SFC, rh_c, rh_FHAG2): + lal = self.newGrid(1) + # Add one to lal if we have 0.5 mm of precip. + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + # Add one to lal if mid-level rh high and low level rh low + lal[logical_and(greater(midrh, 70), less(rh_FHAG2, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[less(bli_SFC, -3)] += 1 + lal[less(bli_SFC, -5)] += 1 + return lal + +def main(): + RAP40Forecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/gfsLR.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/gfsLR.py index 9b549f8200..0ade454104 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/gfsLR.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/smartinit/gfsLR.py @@ -1,545 +1,545 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# Configuration Guides->Smart Initialization Configuration section of the GFE -# Online Help for guidance on creating a new smart init -## - -from Init import * - -class gfsLRForecaster(Forecaster): -##------------------------------------------------------------------------- -## Module that calculates surface weather elements from gfsLR model -## output. -##------------------------------------------------------------------------- - def __init__(self): - Forecaster.__init__(self, "gfsLR") - -##------------------------------------------------------------------------- -## These levels will be used to create vertical soundings. These are -## defined here since they are model dependent. -##------------------------------------------------------------------------- - def levels(self): - return ["MB1000", "MB850", "MB700", "MB500", "MB300"] - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxT and the T grids -##------------------------------------------------------------------------- - def calcMaxT(self, T, MaxT): - if MaxT is None: - return T - return maximum(MaxT, T) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinT and T grids -##------------------------------------------------------------------------- - def calcMinT(self, T, MinT): - if MinT is None: - return T - return minimum(MinT, T) - -##------------------------------------------------------------------------- -## Calculates dew point from the specified pressure, temp and rh -## fields. -##------------------------------------------------------------------------- - def calcTd(self, gh_c, T, t_BL030, stopo, topo, rh_BL030): - p_SFC = self.getSFCP(gh_c, stopo) - 15 * 100 - t_FHAG2 = t_BL030 - rh_FHAG2 = rh_BL030 - # at the model surface - sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O - w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar - # at the true surface - tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc - dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar - newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. - ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio - rh = w / ws # calc relative humidity - # Finally, calculate the dew point - tsfcesat = rh * tsfce - tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) - b = 26.66082 - log(tsfcesat) - td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 - td = self.KtoF(td) - td = where(w > ws, T, td) - return td - -##------------------------------------------------------------------------- -## Calculates RH from the T and Td grids -##------------------------------------------------------------------------- - def calcRH(self, T, Td): - Tc = .556 * (T - 32.0) - Tdc = .556 * (Td - 32.0) - Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - # Return the new value - return RH - -##------------------------------------------------------------------------- -## Returns the maximum of the specified MaxRH and the RH grids -##-------------------------------------------------------------------------- - def calcMaxRH(self, RH, MaxRH): - if MaxRH is None: - return RH - return maximum(MaxRH, RH) - -##------------------------------------------------------------------------- -## Returns the minimum of the specified MinRH and RH grids -##-------------------------------------------------------------------------- - def calcMinRH(self, RH, MinRH): - if MinRH is None: - return RH - return minimum(MinRH, RH) - -##------------------------------------------------------------------------- -## Internal function that returns the surface temperature based on -## the specified geoPot. height and temperature cube. This function -## interpolates the temperature at elevation specified by the topo -## grid. -##------------------------------------------------------------------------- - def getSFCt(self, gh_c, t_c, topo): - sp = self.newGrid(-1) - for i in xrange(1, gh_c.shape[0]): - mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) - sp[mask] = self.linear(gh_c[i], gh_c[i - 1], - t_c[i], t_c[i - 1], topo) - return sp - -##------------------------------------------------------------------------- -## Calculates the temperature at the elevation indicated in the topo -## grid. This tool simply interpolates the temperature value from -## model's isobaric temperature cube. -##------------------------------------------------------------------------- - def calcT(self, gh_c, t_c, topo): - tmb = self.newGrid(-1) - # calc sfc_temp at topo - for i in xrange(1, gh_c.shape[0]): - # interp temp in this layer - tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) - # assign temp to points in this layer - tmb = where(logical_and(equal(tmb, -1), greater(gh_c[i], topo)), - tval1, tmb) - ## convert from kelvin to fahrenheit - return self.KtoF(tmb) - -##------------------------------------------------------------------------- -## Calculates QPF from the total precip field out of the model -##------------------------------------------------------------------------- - def calcQPF(self, tp_SFC): - qpf = tp_SFC / 25.4 # convert from millimeters to inches - return qpf - - def calcSky(self, rh_c, gh_c, topo, pmsl_MSL): - return self.skyFromRH(rh_c, gh_c, topo, pmsl_MSL) - -##------------------------------------------------------------------------- -## Calculates Prob. of Precip. based on QPF and RH cube. Where there -## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas -## of QPF < 0.2 raise the PoP if it's very humid. -##------------------------------------------------------------------------- - def calcPoP(self, gh_c, rh_c, QPF, topo): - rhavg = where(less(gh_c, topo), float32(-1), rh_c) - rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 - count = not_equal(rhavg, -1) - rhavg[equal(rhavg, -1)] = 0 - count = add.reduce(count, 0, dtype=float32) - rhavg = add.reduce(rhavg, 0) - ## add this much based on humidity only - dpop = where(count, rhavg / (count + .001), 0) - 70.0 - dpop[less(dpop, -30)] = -30 - ## calculate the base PoP - pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) - pop += dpop # add the adjustment based on humidity - pop = clip(pop, 0, 100) # clip to 100% - return pop - -##------------------------------------------------------------------------- -## Calculates the Freezing level based on height and temperature -## cubes. Finds the height at which freezing occurs. -##------------------------------------------------------------------------- - def calcFzLevel(self, gh_c, t_c, topo): - fzl = self.newGrid(-1) - # for each level in the height cube, find the freezing level - for i in xrange(gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ - * (273.15 - t_c[i - 1]) - except: - val = gh_c[i] - ## save the height value in fzl - fzl = where(logical_and(equal(fzl, -1), - less_equal(t_c[i], 273.15)), val, fzl) - - return fzl * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Calculates the Snow level based on wet-bulb zero height. -##------------------------------------------------------------------------- - def calcSnowLevel(self, gh_c, t_c, rh_c): - # Only use the levels that are >= freezind (plus one level) - # This is a performance and memory optimization - clipindex = 2 - for i in xrange(t_c.shape[0] - 1, -1, -1): - if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: - clipindex = i + 1 - break - gh_c = gh_c[:clipindex, :, :] - t_c = t_c[:clipindex, :, :] - rh_c = rh_c[:clipindex, :, :] - - snow = self.newGrid(-1) - # - # make pressure cube - # - pmb = ones_like(gh_c) - for i in xrange(gh_c.shape[0]): - pmb[i] = self.pres[i] - pmb = clip(pmb, 1, 1050) - # - # convert temps to C and limit to reasonable values - # - tc = t_c - 273.15 - tc = clip(tc, -120, 60) - # - # limit RH to reasonable values - # - rh = clip(rh_c, 0.5, 99.5) - # - # calculate the wetbulb temperatures - # (this is expensive - even in numeric python - and somewhat - # wasteful, since you do not need to calculate the wetbulb - # temp for all levels when it may cross zero way down toward - # the bottom. Nevertheless - all the gridpoints will cross - # zero at different levels - so you cannot know ahead of time - # how high up to calculate them. In the end - this was the - # most expedient way to code it - and it works - so I stuck - # with it. - # - wetb = self.Wetbulb(tc, rh, pmb) - tc = rh = pmb = None - # - # find the zero level - # - for i in xrange(1, gh_c.shape[0]): - try: - val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ - * (-wetb[i - 1]) - except: - val = gh_c[i] - snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), - val, snow) - - # - # convert to feet - # - snow = snow * 3.28 - - return snow - -##------------------------------------------------------------------------- -## Calculates Snow amount based on the Temp, Freezing level, QPF, -## topo and Weather grid -##------------------------------------------------------------------------- - def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): - # figure out the snow to liquid ratio - snowr = T * -0.5 + 22.5 - snowr[less(T, 9)] = 20 - snowr[greater_equal(T, 30)] = 0 - # calc. snow amount based on the QPF and the ratio - snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), - snowr * QPF, float32(0)) - # Only make snow at points where the weather is snow - snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) - snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), - equal(Wx[0], 9))) - snowamt[logical_not(snowmask)] = 0 - - return snowamt - -##-------------------------------------------------------------------------- -## Calculate the Haines index based on the temp and RH cubes -## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". -## Default is "HIGH". -##-------------------------------------------------------------------------- - def calcHaines(self, t_c, rh_c): - return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) - - -##------------------------------------------------------------------------- -## Internal function that returns a pressure from the geopotential -## height cube at an elevation specified by the topo grid. -##------------------------------------------------------------------------- - def getSFCP(self, gh_c, topo): - pc = [] - for i in self.pres: - pc = pc + [self.newGrid(i)] - pc = log(array(pc)) - sp = self.newGrid(-1) - for i in xrange(1, gh_c.shape[0]): - mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) - sp = where(mask, self.linear(gh_c[i], gh_c[i - 1], - pc[i], pc[i - 1], topo), sp) - - sp = exp(sp) - return sp * 100 - -##------------------------------------------------------------------------- -## Calculates the mixing height for the given sfc temperature, -## temperature cube, height cube and topo -##------------------------------------------------------------------------- - def calcMixHgt(self, T, topo, t_c, gh_c): - mask = greater_equal(gh_c, topo) # points where height > topo - pt = [] - for i in xrange(len(self.pres)): # for each pres. level - p = self.newGrid(self.pres[i]) # get the pres. value in mb - tmp = self.ptemp(t_c[i], p) # calculate the pot. temp - pt = pt + [tmp] # add to the list - pt = array(pt) - pt[logical_not(mask)] = 0 - avg = add.accumulate(pt, 0) - count = add.accumulate(mask, 0) - mh = self.newGrid(-1) - # for each pres. level, calculate a running avg. of pot temp. - # As soon as the next point deviates from the running avg by - # more than 3 deg. C, interpolate to get the mixing height. - for i in xrange(1, avg.shape[0]): - runavg = avg[i] / (count[i] + .0001) # calc. running avg - diffpt = pt[i] - runavg - # calc. the interpolated mixing height - tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) - # assign new values if the difference is greater than 3 - mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), - greater(diffpt, 3)), tmh, mh) - return (mh - topo) * 3.28 # convert to feet - -##------------------------------------------------------------------------- -## Converts the lowest available wind level from m/s to knots -##------------------------------------------------------------------------- - def calcWind(self, wind_BL030): - mag = wind_BL030[0] * 1.94 # get the wind speed and convert - dir = wind_BL030[1] # get wind dir - return (mag, dir) # assemble speed and dir into a tuple - -##------------------------------------------------------------------------- -## Calculates the wind at 3000 feet AGL. -##------------------------------------------------------------------------- - def calcFreeWind(self, gh_c, wind_c, topo): - wm = wind_c[0] - wd = wind_c[1] - # Make a grid that's topo + 3000 feet (914 meters) - fatopo = topo + 914.4 - mask = greater_equal(gh_c, fatopo) - # initialize the grids into which the value are stored - famag = self.newGrid(-1) - fadir = self.newGrid(-1) - # start at the bottom and store the first point we find that's - # above the topo + 3000 feet level. - for i in xrange(wind_c[0].shape[0]): - famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) - fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) - fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 - famag = famag * 1.94 # convert to knots - return (famag, fadir) # return the tuple of grids - -##------------------------------------------------------------------------- -## Calculates the average wind vector in the mixed layer as defined -## by the mixing height. This function creates a mask that identifies -## all grid points between the ground and the mixing height and calculates -## a vector average of the wind field in that layer. -##------------------------------------------------------------------------- - def calcTransWind(self, MixHgt, wind_c, gh_c, topo): - nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters - u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids - # set a mask at points between the topo and topo + MixHt - mask = logical_and(greater_equal(gh_c, topo), - less_equal(gh_c, nmh + topo)) - # set the points outside the layer to zero - u[logical_not(mask)] = 0 - v[logical_not(mask)] = 0 - - mask = add.reduce(mask).astype(float32) # add up the number of set points vert. - mmask = mask + 0.0001 - # calculate the average value in the mixed layerlayer - u = where(mask, add.reduce(u) / mmask, float32(0)) - v = where(mask, add.reduce(v) / mmask, float32(0)) - # convert u, v to mag, dir - tmag, tdir = self._getMD(u, v) - tdir = clip(tdir, 0, 359.5) - tmag *= 1.94 # convert to knots - tmag.clip(0, 125, tmag) # clip speed to 125 knots - return (tmag, tdir) - -##------------------------------------------------------------------------- -## Uses a derivation of the Bourgouin allgorithm to calculate precipitation -## type, and other algorithms to determine the coverage and intensity. -## The Bourgoin technique figures out precip type from calculating how -## long a hydrometer is exposed to alternating layers of above zero (C) and -## below zero temperature layers. This tool calculates at each grid point -## which of the four Bourgouin cases apply. Then the appropriate algorithm -## is applied to that case that further refines the precip. type. Once the -## type is determined, other algorithms are used to determine the coverage -## and intensity. See the Weather and Forecasting Journal article Oct. 2000, -## "A Method to Determine Precipitation Types", by Pierre Bourgouin -##------------------------------------------------------------------------- - def calcWx(self, QPF, T, t_c, gh_c, stopo, topo, tp_SFC, cp_SFC): - p_SFC = self.getSFCP(gh_c, stopo) - gh_c = gh_c[:13, :, :] - t_c = t_c[:13, :, :] - T = self.FtoK(T) - p_SFC = p_SFC / 100 # sfc pres. in mb - pres = self.pres - a1 = self.empty() - a2 = self.empty() - a3 = self.empty() - aindex = self.empty() - # Go through the levels to identify each case type 0-3 - for i in xrange(1, gh_c.shape[0] - 1): - # get the sfc pres. and temp. - pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) - tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) - # Calculate the area of this layer in Temp/pres coordinates - a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) - topomask = greater(gh_c[i], topo) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a11, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a11, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a11, a3) - topomask = logical_and(topomask, cross) - aindex = where(topomask, aindex + 1, aindex) - a1 = where(logical_and(equal(aindex, 0), topomask), - a1 + a22, a1) - a2 = where(logical_and(equal(aindex, 1), topomask), - a2 + a22, a2) - a3 = where(logical_and(equal(aindex, 2), topomask), - a3 + a22, a3) - - # Now apply a different algorithm for each type - key = ['::::', - "Wide:S:-::", "Wide:R:-::", - "Wide:S:-::^Wide:R:-::", - 'Wide:ZR:-::', 'Wide:IP:-::', - 'Wide:ZR:-::^Wide:IP:-::', - "Sct:SW:-::", "Sct:RW:-::", - "Sct:SW:-::^Sct:RW:-::", - "Chc:ZR:-::", 'Chc:IP:-::', - 'Chc:ZR:-::^Chc:IP:-::'] - - # Case d (snow) - wx = self.empty(int8) - snowmask = equal(aindex, 0) - wx[logical_and(snowmask, greater(a1, 0))] = 2 - wx[logical_and(snowmask, less_equal(a1, 0))] = 1 - - # Case c (rain / snow / rainSnowMix) - srmask = equal(aindex, 1) - wx[logical_and(srmask, less(a1, 5.6))] = 1 - wx[logical_and(srmask, greater(a1, 13.2))] = 2 - wx[logical_and(srmask, - logical_and(greater_equal(a1, 5.6), - less(a1, 13.2)))] = 3 - - # Case a (Freezing Rain / Ice Pellets) - ipmask = equal(aindex, 2) - ipm = greater(a1, a2 * 0.66 + 66) - wx[logical_and(ipmask, ipm)] = 5 - zrm = less(a1, a2 * 0.66 + 46) - wx[logical_and(ipmask, zrm)] = 4 - zrm = logical_not(zrm) - ipm = logical_not(ipm) - wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 - - # Case b (Ice pellets / rain) - cmask = greater_equal(aindex, 3) - ipmask = logical_and(less(a3, 2), cmask) - wx[logical_and(ipmask, less(a1, 5.6))] = 1 - wx[logical_and(ipmask, greater(a1, 13.2))] = 2 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), - less_equal(a1, 13.2)))] = 3 - - ipmask = logical_and(greater_equal(a3, 2), cmask) - wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 - wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 - wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), - less_equal(a1, 66 + 0.66 * a2)))] = 6 - - # Make showers (scattered/Chc) - convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) - wx[logical_and(not_equal(wx, 0), convecMask)] += 6 - - # Thunder - for i in xrange(len(key)): - tcov = string.split(key[i], ":")[0] - if tcov == "Chc" or tcov == "": - tcov = "Sct" - key.append(key[i] + "^" + tcov - + ":T:::") -# wx = where(less_equal(bli_BL0180, -3), wx + 13, wx) - - # No wx where no qpf - wx[less(QPF, 0.01)] = 0 - return(wx, key) - -##------------------------------------------------------------------------- -## Calculates chance of wetting rain based on QPF. -##------------------------------------------------------------------------- - def calcCWR(self, QPF): - m1 = less(QPF, 0.01) # all the places that are dry - m2 = greater_equal(QPF, 0.3) # all the places that are wet - # all the places that are 0.01 to 0.10 - m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) - # all the places that are 0.1 to 0.3 - m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) - # assign 0 to the dry grid point, 100 to the wet grid points, - # and a ramping function to all point in between - cwr = where(m1, float32(0), where(m2, float32(100), - where(m3, 444.4 * (QPF - 0.01) + 10, - where(m4, 250 * (QPF - 0.1) + 50, - QPF)))) - return cwr - -##------------------------------------------------------------------------- -## Calculates Lightning Activity Level based on total precip. -## convenctivePrecip, and 3-D relative humidity. -##------------------------------------------------------------------------- - def calcLAL(self, tp_SFC, cp_SFC, rh_c, rh_BL030): - cp_SFC = where(equal(cp_SFC, 0), float32(0.00001), cp_SFC) - lal = self.newGrid(1) - # Add one to lal if QPF > 0.5 - lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 - # make an average rh field - midrh = add.reduce(rh_c[6:9], 0) / 3 - lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 - - # Add on to lal if lifted index is <-3 and another if <-5 - lal[greater(cp_SFC, 2.0)] += 1 - lal[greater(cp_SFC, 8.0)] += 1 - - return lal - - -def main(): - gfsLRForecaster().run() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# Configuration Guides->Smart Initialization Configuration section of the GFE +# Online Help for guidance on creating a new smart init +## + +from Init import * + +class gfsLRForecaster(Forecaster): +##------------------------------------------------------------------------- +## Module that calculates surface weather elements from gfsLR model +## output. +##------------------------------------------------------------------------- + def __init__(self): + Forecaster.__init__(self, "gfsLR") + +##------------------------------------------------------------------------- +## These levels will be used to create vertical soundings. These are +## defined here since they are model dependent. +##------------------------------------------------------------------------- + def levels(self): + return ["MB1000", "MB850", "MB700", "MB500", "MB300"] + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxT and the T grids +##------------------------------------------------------------------------- + def calcMaxT(self, T, MaxT): + if MaxT is None: + return T + return maximum(MaxT, T) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinT and T grids +##------------------------------------------------------------------------- + def calcMinT(self, T, MinT): + if MinT is None: + return T + return minimum(MinT, T) + +##------------------------------------------------------------------------- +## Calculates dew point from the specified pressure, temp and rh +## fields. +##------------------------------------------------------------------------- + def calcTd(self, gh_c, T, t_BL030, stopo, topo, rh_BL030): + p_SFC = self.getSFCP(gh_c, stopo) - 15 * 100 + t_FHAG2 = t_BL030 + rh_FHAG2 = rh_BL030 + # at the model surface + sfce = rh_FHAG2 / 100 * self.esat(t_FHAG2) # partial pres of H2O + w = (0.622 * sfce) / ((p_SFC + 0.0001) / 100 - sfce)# meters / millibar + # at the true surface + tsfce = self.esat(self.FtoK(T)) # saturation vap.pres. at sfc + dpdz = 287.04 * t_FHAG2 / (p_SFC / 100 * 9.8) # meters / millibar + newp = p_SFC / 100 + (stopo - topo) / dpdz # adj. sfc press. + ws = (0.622 * tsfce) / (newp - tsfce) # sat. mixing ratio + rh = w / ws # calc relative humidity + # Finally, calculate the dew point + tsfcesat = rh * tsfce + tsfcesat = clip(tsfcesat, 0.00001, tsfcesat) + b = 26.66082 - log(tsfcesat) + td = (b - sqrt(b * b - 223.1986)) / 0.0182758048 + td = self.KtoF(td) + td = where(w > ws, T, td) + return td + +##------------------------------------------------------------------------- +## Calculates RH from the T and Td grids +##------------------------------------------------------------------------- + def calcRH(self, T, Td): + Tc = .556 * (T - 32.0) + Tdc = .556 * (Td - 32.0) + Vt = 6.11 * pow(10, (Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10, (Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + # Return the new value + return RH + +##------------------------------------------------------------------------- +## Returns the maximum of the specified MaxRH and the RH grids +##-------------------------------------------------------------------------- + def calcMaxRH(self, RH, MaxRH): + if MaxRH is None: + return RH + return maximum(MaxRH, RH) + +##------------------------------------------------------------------------- +## Returns the minimum of the specified MinRH and RH grids +##-------------------------------------------------------------------------- + def calcMinRH(self, RH, MinRH): + if MinRH is None: + return RH + return minimum(MinRH, RH) + +##------------------------------------------------------------------------- +## Internal function that returns the surface temperature based on +## the specified geoPot. height and temperature cube. This function +## interpolates the temperature at elevation specified by the topo +## grid. +##------------------------------------------------------------------------- + def getSFCt(self, gh_c, t_c, topo): + sp = self.newGrid(-1) + for i in range(1, gh_c.shape[0]): + mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) + sp[mask] = self.linear(gh_c[i], gh_c[i - 1], + t_c[i], t_c[i - 1], topo) + return sp + +##------------------------------------------------------------------------- +## Calculates the temperature at the elevation indicated in the topo +## grid. This tool simply interpolates the temperature value from +## model's isobaric temperature cube. +##------------------------------------------------------------------------- + def calcT(self, gh_c, t_c, topo): + tmb = self.newGrid(-1) + # calc sfc_temp at topo + for i in range(1, gh_c.shape[0]): + # interp temp in this layer + tval1 = self.linear(gh_c[i], gh_c[i - 1], t_c[i], t_c[i - 1], topo) + # assign temp to points in this layer + tmb = where(logical_and(equal(tmb, -1), greater(gh_c[i], topo)), + tval1, tmb) + ## convert from kelvin to fahrenheit + return self.KtoF(tmb) + +##------------------------------------------------------------------------- +## Calculates QPF from the total precip field out of the model +##------------------------------------------------------------------------- + def calcQPF(self, tp_SFC): + qpf = tp_SFC / 25.4 # convert from millimeters to inches + return qpf + + def calcSky(self, rh_c, gh_c, topo, pmsl_MSL): + return self.skyFromRH(rh_c, gh_c, topo, pmsl_MSL) + +##------------------------------------------------------------------------- +## Calculates Prob. of Precip. based on QPF and RH cube. Where there +## is QPF > 0 ramp the PoP from (0.01, 35%) to 100%. Then in areas +## of QPF < 0.2 raise the PoP if it's very humid. +##------------------------------------------------------------------------- + def calcPoP(self, gh_c, rh_c, QPF, topo): + rhavg = where(less(gh_c, topo), float32(-1), rh_c) + rhavg[greater(gh_c, topo + 5000 * 0.3048)] = -1 + count = not_equal(rhavg, -1) + rhavg[equal(rhavg, -1)] = 0 + count = add.reduce(count, 0, dtype=float32) + rhavg = add.reduce(rhavg, 0) + ## add this much based on humidity only + dpop = where(count, rhavg / (count + .001), 0) - 70.0 + dpop[less(dpop, -30)] = -30 + ## calculate the base PoP + pop = where(less(QPF, 0.02), QPF * 1000, QPF * 350 + 13) + pop += dpop # add the adjustment based on humidity + pop = clip(pop, 0, 100) # clip to 100% + return pop + +##------------------------------------------------------------------------- +## Calculates the Freezing level based on height and temperature +## cubes. Finds the height at which freezing occurs. +##------------------------------------------------------------------------- + def calcFzLevel(self, gh_c, t_c, topo): + fzl = self.newGrid(-1) + # for each level in the height cube, find the freezing level + for i in range(gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (t_c[i] - t_c[i - 1])\ + * (273.15 - t_c[i - 1]) + except: + val = gh_c[i] + ## save the height value in fzl + fzl = where(logical_and(equal(fzl, -1), + less_equal(t_c[i], 273.15)), val, fzl) + + return fzl * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Calculates the Snow level based on wet-bulb zero height. +##------------------------------------------------------------------------- + def calcSnowLevel(self, gh_c, t_c, rh_c): + # Only use the levels that are >= freezind (plus one level) + # This is a performance and memory optimization + clipindex = 2 + for i in range(t_c.shape[0] - 1, -1, -1): + if maximum.reduce(maximum.reduce(t_c[i])) >= 273.15: + clipindex = i + 1 + break + gh_c = gh_c[:clipindex, :, :] + t_c = t_c[:clipindex, :, :] + rh_c = rh_c[:clipindex, :, :] + + snow = self.newGrid(-1) + # + # make pressure cube + # + pmb = ones_like(gh_c) + for i in range(gh_c.shape[0]): + pmb[i] = self.pres[i] + pmb = clip(pmb, 1, 1050) + # + # convert temps to C and limit to reasonable values + # + tc = t_c - 273.15 + tc = clip(tc, -120, 60) + # + # limit RH to reasonable values + # + rh = clip(rh_c, 0.5, 99.5) + # + # calculate the wetbulb temperatures + # (this is expensive - even in numeric python - and somewhat + # wasteful, since you do not need to calculate the wetbulb + # temp for all levels when it may cross zero way down toward + # the bottom. Nevertheless - all the gridpoints will cross + # zero at different levels - so you cannot know ahead of time + # how high up to calculate them. In the end - this was the + # most expedient way to code it - and it works - so I stuck + # with it. + # + wetb = self.Wetbulb(tc, rh, pmb) + tc = rh = pmb = None + # + # find the zero level + # + for i in range(1, gh_c.shape[0]): + try: + val = gh_c[i - 1] + (gh_c[i] - gh_c[i - 1]) / (wetb[i] - wetb[i - 1])\ + * (-wetb[i - 1]) + except: + val = gh_c[i] + snow = where(logical_and(equal(snow, -1), less_equal(wetb[i], 0)), + val, snow) + + # + # convert to feet + # + snow = snow * 3.28 + + return snow + +##------------------------------------------------------------------------- +## Calculates Snow amount based on the Temp, Freezing level, QPF, +## topo and Weather grid +##------------------------------------------------------------------------- + def calcSnowAmt(self, T, FzLevel, QPF, topo, Wx): + # figure out the snow to liquid ratio + snowr = T * -0.5 + 22.5 + snowr[less(T, 9)] = 20 + snowr[greater_equal(T, 30)] = 0 + # calc. snow amount based on the QPF and the ratio + snowamt = where(less_equal(FzLevel - 1000, topo * 3.28), + snowr * QPF, float32(0)) + # Only make snow at points where the weather is snow + snowmask = logical_or(equal(Wx[0], 1), equal(Wx[0], 3)) + snowmask = logical_or(snowmask, logical_or(equal(Wx[0], 7), + equal(Wx[0], 9))) + snowamt[logical_not(snowmask)] = 0 + + return snowamt + +##-------------------------------------------------------------------------- +## Calculate the Haines index based on the temp and RH cubes +## Define self.whichHainesIndex to be "HIGH", "MEDIUM", or "LOW". +## Default is "HIGH". +##-------------------------------------------------------------------------- + def calcHaines(self, t_c, rh_c): + return self.hainesIndex(self.whichHainesIndex, t_c, rh_c) + + +##------------------------------------------------------------------------- +## Internal function that returns a pressure from the geopotential +## height cube at an elevation specified by the topo grid. +##------------------------------------------------------------------------- + def getSFCP(self, gh_c, topo): + pc = [] + for i in self.pres: + pc = pc + [self.newGrid(i)] + pc = log(array(pc)) + sp = self.newGrid(-1) + for i in range(1, gh_c.shape[0]): + mask = logical_and(equal(sp, -1), greater_equal(gh_c[i], topo)) + sp = where(mask, self.linear(gh_c[i], gh_c[i - 1], + pc[i], pc[i - 1], topo), sp) + + sp = exp(sp) + return sp * 100 + +##------------------------------------------------------------------------- +## Calculates the mixing height for the given sfc temperature, +## temperature cube, height cube and topo +##------------------------------------------------------------------------- + def calcMixHgt(self, T, topo, t_c, gh_c): + mask = greater_equal(gh_c, topo) # points where height > topo + pt = [] + for i in range(len(self.pres)): # for each pres. level + p = self.newGrid(self.pres[i]) # get the pres. value in mb + tmp = self.ptemp(t_c[i], p) # calculate the pot. temp + pt = pt + [tmp] # add to the list + pt = array(pt) + pt[logical_not(mask)] = 0 + avg = add.accumulate(pt, 0) + count = add.accumulate(mask, 0) + mh = self.newGrid(-1) + # for each pres. level, calculate a running avg. of pot temp. + # As soon as the next point deviates from the running avg by + # more than 3 deg. C, interpolate to get the mixing height. + for i in range(1, avg.shape[0]): + runavg = avg[i] / (count[i] + .0001) # calc. running avg + diffpt = pt[i] - runavg + # calc. the interpolated mixing height + tmh = self.linear(pt[i], pt[i - 1], gh_c[i], gh_c[i - 1], runavg) + # assign new values if the difference is greater than 3 + mh = where(logical_and(logical_and(mask[i], equal(mh, -1)), + greater(diffpt, 3)), tmh, mh) + return (mh - topo) * 3.28 # convert to feet + +##------------------------------------------------------------------------- +## Converts the lowest available wind level from m/s to knots +##------------------------------------------------------------------------- + def calcWind(self, wind_BL030): + mag = wind_BL030[0] * 1.94 # get the wind speed and convert + dir = wind_BL030[1] # get wind dir + return (mag, dir) # assemble speed and dir into a tuple + +##------------------------------------------------------------------------- +## Calculates the wind at 3000 feet AGL. +##------------------------------------------------------------------------- + def calcFreeWind(self, gh_c, wind_c, topo): + wm = wind_c[0] + wd = wind_c[1] + # Make a grid that's topo + 3000 feet (914 meters) + fatopo = topo + 914.4 + mask = greater_equal(gh_c, fatopo) + # initialize the grids into which the value are stored + famag = self.newGrid(-1) + fadir = self.newGrid(-1) + # start at the bottom and store the first point we find that's + # above the topo + 3000 feet level. + for i in range(wind_c[0].shape[0]): + famag = where(logical_and(equal(famag, -1), mask[i]), wm[i], famag) + fadir = where(logical_and(equal(fadir, -1), mask[i]), wd[i], fadir) + fadir = clip(fadir, 0, 359.5) # clip the value to 0, 360 + famag = famag * 1.94 # convert to knots + return (famag, fadir) # return the tuple of grids + +##------------------------------------------------------------------------- +## Calculates the average wind vector in the mixed layer as defined +## by the mixing height. This function creates a mask that identifies +## all grid points between the ground and the mixing height and calculates +## a vector average of the wind field in that layer. +##------------------------------------------------------------------------- + def calcTransWind(self, MixHgt, wind_c, gh_c, topo): + nmh = MixHgt * 0.3048 # convert MixHt from feet -> meters + u, v = self._getUV(wind_c[0], wind_c[1]) # get the wind grids + # set a mask at points between the topo and topo + MixHt + mask = logical_and(greater_equal(gh_c, topo), + less_equal(gh_c, nmh + topo)) + # set the points outside the layer to zero + u[logical_not(mask)] = 0 + v[logical_not(mask)] = 0 + + mask = add.reduce(mask).astype(float32) # add up the number of set points vert. + mmask = mask + 0.0001 + # calculate the average value in the mixed layerlayer + u = where(mask, add.reduce(u) / mmask, float32(0)) + v = where(mask, add.reduce(v) / mmask, float32(0)) + # convert u, v to mag, dir + tmag, tdir = self._getMD(u, v) + tdir = clip(tdir, 0, 359.5) + tmag *= 1.94 # convert to knots + tmag.clip(0, 125, tmag) # clip speed to 125 knots + return (tmag, tdir) + +##------------------------------------------------------------------------- +## Uses a derivation of the Bourgouin allgorithm to calculate precipitation +## type, and other algorithms to determine the coverage and intensity. +## The Bourgoin technique figures out precip type from calculating how +## long a hydrometer is exposed to alternating layers of above zero (C) and +## below zero temperature layers. This tool calculates at each grid point +## which of the four Bourgouin cases apply. Then the appropriate algorithm +## is applied to that case that further refines the precip. type. Once the +## type is determined, other algorithms are used to determine the coverage +## and intensity. See the Weather and Forecasting Journal article Oct. 2000, +## "A Method to Determine Precipitation Types", by Pierre Bourgouin +##------------------------------------------------------------------------- + def calcWx(self, QPF, T, t_c, gh_c, stopo, topo, tp_SFC, cp_SFC): + p_SFC = self.getSFCP(gh_c, stopo) + gh_c = gh_c[:13, :, :] + t_c = t_c[:13, :, :] + T = self.FtoK(T) + p_SFC = p_SFC / 100 # sfc pres. in mb + pres = self.pres + a1 = self.empty() + a2 = self.empty() + a3 = self.empty() + aindex = self.empty() + # Go through the levels to identify each case type 0-3 + for i in range(1, gh_c.shape[0] - 1): + # get the sfc pres. and temp. + pbot = where(greater(gh_c[i - 1], topo), pres[i - 1], p_SFC) + tbot = where(greater(gh_c[i - 1], topo), t_c[i - 1], T) + # Calculate the area of this layer in Temp/pres coordinates + a11, a22, cross = self.getAreas(pbot, tbot, pres[i], t_c[i]) + topomask = greater(gh_c[i], topo) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a11, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a11, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a11, a3) + topomask = logical_and(topomask, cross) + aindex = where(topomask, aindex + 1, aindex) + a1 = where(logical_and(equal(aindex, 0), topomask), + a1 + a22, a1) + a2 = where(logical_and(equal(aindex, 1), topomask), + a2 + a22, a2) + a3 = where(logical_and(equal(aindex, 2), topomask), + a3 + a22, a3) + + # Now apply a different algorithm for each type + key = ['::::', + "Wide:S:-::", "Wide:R:-::", + "Wide:S:-::^Wide:R:-::", + 'Wide:ZR:-::', 'Wide:IP:-::', + 'Wide:ZR:-::^Wide:IP:-::', + "Sct:SW:-::", "Sct:RW:-::", + "Sct:SW:-::^Sct:RW:-::", + "Chc:ZR:-::", 'Chc:IP:-::', + 'Chc:ZR:-::^Chc:IP:-::'] + + # Case d (snow) + wx = self.empty(int8) + snowmask = equal(aindex, 0) + wx[logical_and(snowmask, greater(a1, 0))] = 2 + wx[logical_and(snowmask, less_equal(a1, 0))] = 1 + + # Case c (rain / snow / rainSnowMix) + srmask = equal(aindex, 1) + wx[logical_and(srmask, less(a1, 5.6))] = 1 + wx[logical_and(srmask, greater(a1, 13.2))] = 2 + wx[logical_and(srmask, + logical_and(greater_equal(a1, 5.6), + less(a1, 13.2)))] = 3 + + # Case a (Freezing Rain / Ice Pellets) + ipmask = equal(aindex, 2) + ipm = greater(a1, a2 * 0.66 + 66) + wx[logical_and(ipmask, ipm)] = 5 + zrm = less(a1, a2 * 0.66 + 46) + wx[logical_and(ipmask, zrm)] = 4 + zrm = logical_not(zrm) + ipm = logical_not(ipm) + wx[logical_and(ipmask, logical_and(zrm, ipm))] = 6 + + # Case b (Ice pellets / rain) + cmask = greater_equal(aindex, 3) + ipmask = logical_and(less(a3, 2), cmask) + wx[logical_and(ipmask, less(a1, 5.6))] = 1 + wx[logical_and(ipmask, greater(a1, 13.2))] = 2 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 5.6), + less_equal(a1, 13.2)))] = 3 + + ipmask = logical_and(greater_equal(a3, 2), cmask) + wx[logical_and(ipmask, greater(a1, 66 + 0.66 * a2))] = 5 + wx[logical_and(ipmask, less(a1, 46 + 0.66 * a2))] = 4 + wx[logical_and(ipmask, logical_and(greater_equal(a1, 46 + 0.66 * a2), + less_equal(a1, 66 + 0.66 * a2)))] = 6 + + # Make showers (scattered/Chc) + convecMask = greater(cp_SFC / (tp_SFC + .001), 0.5) + wx[logical_and(not_equal(wx, 0), convecMask)] += 6 + + # Thunder + for i in range(len(key)): + tcov = string.split(key[i], ":")[0] + if tcov == "Chc" or tcov == "": + tcov = "Sct" + key.append(key[i] + "^" + tcov + + ":T:::") +# wx = where(less_equal(bli_BL0180, -3), wx + 13, wx) + + # No wx where no qpf + wx[less(QPF, 0.01)] = 0 + return(wx, key) + +##------------------------------------------------------------------------- +## Calculates chance of wetting rain based on QPF. +##------------------------------------------------------------------------- + def calcCWR(self, QPF): + m1 = less(QPF, 0.01) # all the places that are dry + m2 = greater_equal(QPF, 0.3) # all the places that are wet + # all the places that are 0.01 to 0.10 + m3 = logical_and(greater_equal(QPF, 0.01), less_equal(QPF, 0.1)) + # all the places that are 0.1 to 0.3 + m4 = logical_and(greater(QPF, 0.1), less(QPF, 0.3)) + # assign 0 to the dry grid point, 100 to the wet grid points, + # and a ramping function to all point in between + cwr = where(m1, float32(0), where(m2, float32(100), + where(m3, 444.4 * (QPF - 0.01) + 10, + where(m4, 250 * (QPF - 0.1) + 50, + QPF)))) + return cwr + +##------------------------------------------------------------------------- +## Calculates Lightning Activity Level based on total precip. +## convenctivePrecip, and 3-D relative humidity. +##------------------------------------------------------------------------- + def calcLAL(self, tp_SFC, cp_SFC, rh_c, rh_BL030): + cp_SFC = where(equal(cp_SFC, 0), float32(0.00001), cp_SFC) + lal = self.newGrid(1) + # Add one to lal if QPF > 0.5 + lal[logical_and(greater(cp_SFC, 0), greater(tp_SFC / cp_SFC, 0.5))] += 1 + # make an average rh field + midrh = add.reduce(rh_c[6:9], 0) / 3 + lal[logical_and(greater(midrh, 70), less(rh_BL030, 30))] += 1 + + # Add on to lal if lifted index is <-3 and another if <-5 + lal[greater(cp_SFC, 2.0)] += 1 + lal[greater(cp_SFC, 8.0)] += 1 + + return lal + + +def main(): + gfsLRForecaster().run() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/Generator.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/Generator.py index 9bc7219d77..c68323349c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/Generator.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/Generator.py @@ -1,818 +1,820 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# Generate site specific text products. -# -# This script is run at install time to customize a set of the text products -# for a given site. -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jun 23, 2008 1180 jelkins Initial creation -# Jul 08, 2008 1222 jelkins Modified for use within Java -# Jul 09, 2008 1222 jelkins Split command line loader from class -# Jul 24, 2012 #944 dgilling Refactored to support separate -# generation of products and utilities. -# Sep 07, 2012 #1150 dgilling Ensure all necessary dirs get created. -# May 12, 2014 2536 bclement renamed text plugin to include uf in name -# Oct 20, 2014 #3685 randerso Changed how SiteInfo is loaded. -# Fixed logging to log to a file -# Cleaned up how protected file updates are returned -# Jan 23, 2015 #4027 randerso Cleaned up import of SiteInfo -# Apr 25, 2015 4952 njensen Updated for new JEP API -# Jul 18, 2016 5747 dgilling Move edex_static to common_static. -# Aug 09, 2016 5801 tgurney Use AfosToAwipsLookup -# Sep 28, 2016 19293 randerso Added filtering by productsPerDomain -# Nov 21, 2016 5959 njensen Removed unused imports and made more pythonic -# Nov 28, 2016 5939 dgilling Add override behavior comment to -# generated files. -# Mar 17, 2017 5935 mapeters Updated override behavior comments -# in generated files -# Aug 08, 2017 6379 njensen Use ProtectedFileLookup -# -# @author: jelkins -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -__version__ = "1.0" - -import errno -import os -from os.path import basename -from os.path import join -from os.path import dirname -from com.raytheon.uf.common.localization import PathManagerFactory -from com.raytheon.uf.common.localization import LocalizationContext -from com.raytheon.uf.common.protectedfiles import ProtectedFileLookup -LocalizationType = LocalizationContext.LocalizationType -LocalizationLevel = LocalizationContext.LocalizationLevel - - -# SCRIPT_DIR passed in from Java - -# ---- Standard Paths ---------------------------------------------- - -LIBRARY_DIR = join(SCRIPT_DIR , "library") -TEMPLATE_DIR = join(SCRIPT_DIR , "templates") -PREFERENCE_DIR = join(SCRIPT_DIR , "preferences") - -# ---- "Import" Preferences and Library Files ------------------------------- -from sys import path -path.append(join(LIBRARY_DIR,"../")) -path.append(join(PREFERENCE_DIR,"../")) - -from preferences.configureTextProducts import NWSProducts as NWS_PRODUCTS - -from os.path import basename -from os.path import dirname -from os.path import abspath -from os.path import join - -# ---- Setup Logging ---------------------------------------------------------- -import logging -from time import strftime, gmtime -timeStamp = strftime("%Y%m%d", gmtime()) -logFile = '/awips2/edex/logs/configureTextProducts-'+timeStamp+'.log' - -LOG = logging.getLogger("configureTextProducts") -LOG.setLevel(logging.DEBUG) -handler = logging.FileHandler(logFile) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(levelname)-5s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") -handler.setFormatter(formatter) -for h in LOG.handlers: - LOG.removeHandler(h) -LOG.addHandler(handler) - - -# List of protected files -fileList = [] - -#Installation information for product formatters. -#Directories to Process, src/dest -ProcessDirectories = [ - { - 'src': "gfe/textproducts/templates/product", - 'dest': "textProducts" - }, - { - 'src': "gfe/textproducts/templates/utility", - 'dest': "textUtilities/regular" - }, - ] - -# The following three constants are used by the __Substitutor class to add -# override behavior comments to the generated files. The first comment appears -# in every file because we protect the base templates from override. For -# generated template text product files, we replace the first comment with the -# second. For generated files that we do not protect, we replace the first -# comment with the third. -BASE_TEMPLATE_OVERRIDE_COMMENT =\ -"""## -# This is a base file that is not intended to be overridden. -## -""" - -GENERATED_TEMPLATE_OVERRIDE_COMMENT =\ -"""## -# This is a base file that is not intended to be overridden. -# -# This file can be subclassed to override behavior. Please see the -# GFE Training Guide->GFE Text Products User Guide section of the GFE Online -# Help for guidance on creating a new text product. -## -""" - -GENERATED_LOCAL_OVERRIDE_COMMENT =\ -"""## -# This is an absolute override file, indicating that a higher priority version -# of the file will completely replace a lower priority version of the file. -## -""" - -from SiteCFG import SiteInfo - -class Generator(): - """Generates site specific text products from base template files. - - This class handles the substituting and renaming of template files. - """ - - def __init__(self): - """Class constructor""" - self.__destination = None - self.__siteId = None - - def setSiteId(self,siteId): - """Set the site ID - - Checks if the given site ID is valid and sets the site ID. - - @param siteId: site ID - @type siteId: string - - @raise LookupError: when the site ID is invalid - """ - if siteId in SiteInfo.keys(): - self.__siteId = siteId - else: - raise LookupError, ' unknown WFO: ' + siteId - - def setDestination(self, value): - """Set this generator's output directory - - Verifies the directory exists and is writable - - @param value: this value should be a fully qualified path - @type value: string - - @raise IOError: when the directory does not exist or is not writable - """ - - try: - os.makedirs(value, 0755) - except OSError, e: - if e.errno != errno.EEXIST: - LOG.warn("%s: '%s'" % (e.strerror,e.filename)) - - self.__destination = value - - - def getSiteId(self): - """Get the site ID - - @return: the site ID - @rtype: string - """ - return self.__siteId - - def getDestination(self): - """The directory into which the generated files are placed - - @return: the directory in which the generated files are placed - @rtype: string - """ - return self.__destination - - def create(self, siteId, destinationDir): - """Create text products""" - LOG.debug("Configuring Text Products for %s........" % siteId) - - try: - self.setSiteId(siteId) - except LookupError: - LOG.warning(siteId + "is not a known WFO. Skipping text formatter configuration.") - return - - self.setDestination(destinationDir) - - self.__delete() - self.__createPilDictionary(self.__siteId) - - created = 0 - for dirInfo in ProcessDirectories: - created += self.__create(dirInfo['src'], dirInfo['dest']) - LOG.info("%d text products created" % created) - LOG.debug("Configuration of Text Products for %s finished" % siteId) - - return self.getProtectedFiles() - - def delete(self): - """Delete text products""" - - LOG.debug("Deleting Text Products Begin........") - deleted = self.__delete() - - LOG.info("%d text products deleted" % deleted) - - # ---- Delete Empty Directory ----------------------------------------- - - for dirInfo in ProcessDirectories: - try: - os.rmdir(os.path.join(self.getDestination(), dirInfo['dest'])) - except OSError, description: - LOG.warn("unable to remove directory (%s)" % description) - pass - - def info(self): - """Text product information for this site""" - - LOG.debug("PIL Information for %s Begin......" % self.__siteId) - pils = self.__createPilDictionary(self.__siteId) - - self.__printPilDictionary(pils) - - LOG.info("%d total PILs found" % len(pils)) - - LOG.debug("PIL Information for %s End" % self.__siteId) - - def allinfo(self): - """Text product information for all sites in this generator's a2a file - """ - found = 0 - - LOG.debug("PIL Information for all sites Begin.......") - - for site in SiteInfo.keys(): - LOG.info("--------------------------------------------") - LOG.info("%s %s %s" % (site, - SiteInfo[site]['fullStationID'], - SiteInfo[site]['wfoCityState'])) - pils = self.__createPilDictionary(site) - self.__printPilDictionary(pils) - found += len(pils) - - LOG.info("%d total PILs found" % found) - LOG.debug("PIL Information for all sites End") - - # ---- Private Methods -------------------------------------------------- - - class __Substitutor(): - """Substitute values inside file contents and filenames - - This class contains all methods capable of performing the substitution. - """ - - def __init__(self, product, pilInfo, multiPilFlag, siteID, allowsOverride, isTextProduct): - """Class constructor - - Initialize the class - - @param product: product in which substitution is performed - @type product: string - - @param pilInfo: PIL information for the product - @type pilInfo: dictionary - - @param multiPilFlag: indicates there is more than one Pil - @type multiPilFlag: boolean - - @type siteID: string - - @param allowsOverride: indicates whether the generated file can be overridden. - @type allowsOverride: boolean - - @param isTextProduct: indicates whether the file is a text product or utility - @type isTextProduct: boolean - """ - self.__product = product - self.__pilInfo = pilInfo - self.__multiPilFlag = multiPilFlag - self.__siteID = siteID - self.__allowsOverride = bool(allowsOverride) - self.__isTextProduct = isTextProduct - - - def substituteKeywords(self, subDict, textData): - """Replace all instances of the find: replace pairs in the text - - Given a dictionary of keywords and their values, along with text - data, performs the string substitution and returns the updated text. - - @param subDict: words to find (keys) and what to replace (value) - them with - @type subDict: dictionary - - @param textData: text in which to find and replace values - @type textData: string - - @return: the updated text - @rtype: string - """ - from copy import deepcopy - from string import replace - - txt = deepcopy(textData) - for k in subDict.keys(): - txt = replace(txt, k, subDict[k]) - return txt - - def replaceText(self,contents): - """Replace the contents of the template with the correct values - - Build the substitution dictionary and perform substitution - - @param contents: the template contents - @type contents: string - - @return: the contents with all template values substituted - @rtype: string - """ - - siteid = self.__siteID - product = self.__product - multiPilFlag = self.__multiPilFlag - pilInfo = self.__pilInfo - - from preferences.configureTextProducts import ProductToStandardMapping - - subDict = {} - subDict[''] = siteid.strip() - subDict[''] = SiteInfo[siteid]['region'].strip() - subDict[''] = SiteInfo[siteid]['wfoCityState'].strip() - subDict[''] = SiteInfo[siteid]['wfoCity'].strip() - subDict[''] = SiteInfo[siteid]['fullStationID'].strip() - subDict[''] = SiteInfo[siteid]['state'].strip() - if product is not None: - subDict[''] = product.strip() - if ProductToStandardMapping.has_key(product): - subDict[''] = ProductToStandardMapping[product].strip() - else: - subDict[''] = product.strip() - if pilInfo is not None: - for k in pilInfo.keys(): - subDict['<' + k + '>'] = pilInfo[k].strip() - if pilInfo is not None and pilInfo.has_key("pil") and multiPilFlag: - subDict[''] = pilInfo["pil"][3:6].strip() #pil=nnnxxx, want xxx - else: - subDict['_'] = "" #no multiple pils - if self.__allowsOverride: - subDict[BASE_TEMPLATE_OVERRIDE_COMMENT] = GENERATED_LOCAL_OVERRIDE_COMMENT - elif self.__isTextProduct: - #include subclass comment for text products, not utilities - subDict[BASE_TEMPLATE_OVERRIDE_COMMENT] = GENERATED_TEMPLATE_OVERRIDE_COMMENT - - return self.substituteKeywords(subDict, contents) - - def replaceFilename(self,fileName): - """Replace any template variables in the filename with correct - values - - @param fileName: the filename for this product - @type fileName: string - - @return: the updated filename - @rtype: string - """ - - siteid = self.__siteID - product = self.__product - multiPilFlag = self.__multiPilFlag - pilInfo = self.__pilInfo - - subDict = {} - subDict['Site'] = siteid.strip() - subDict['Region'] = SiteInfo[siteid]['region'].strip() - if product is not None: - subDict['Product'] = product.strip() - if pilInfo is not None and pilInfo.has_key("pil") and multiPilFlag: - subDict['MultiPil'] = pilInfo["pil"][3:6].strip() #xxx of nnnxxx - else: - subDict['_MultiPil'] = "" #no pil information, remove entry - - return self.substituteKeywords(subDict,fileName) - - def __getTemplateFiles(self, srcDir): - """Get a list of all template files - - @return: a list of all template files - @rtype: list - """ - - pathMgr = PathManagerFactory.getPathManager() - baseCtx = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.BASE) - templateFiles = pathMgr.listFiles(baseCtx, srcDir, None, False, True) - return templateFiles - - def __printPilDictionary(self, pillist): - """Prints the PIL dictionary information - - @param pillist: site specific PIL information - @type pillist: dictionary - """ - - pils = pillist.keys() - pils.sort() - LOG.info("PIL List: (product) (pil, wmoid, site)") - for p in pils: - LOG.info("%s %s" % (p,pillist[p])) - - def __createPilDictionary(self, siteid): - """Update the SiteInfo with a PIL dictionary - - Read the a2a data from the database, create PIL information, and add the information - to the SiteInfo dictionary. - - @param site: the site for which PIL information is created - @type site: string - - @return: the PILS found for the site - @rtype: dictionary - """ - - siteD = SiteInfo[siteid] - stationID4 = siteD['fullStationID'] - - from com.raytheon.uf.edex.plugin.text import AfosToAwipsLookup - - response = AfosToAwipsLookup.lookupAfosId(stationID4, None, None) - errMsg = response.getErrorMessage() - if errMsg is not None and "" != errMsg.strip(): - raise Exception(errMsg) - - siteTable = [] - # idList is a Java list of non-String objects. - # use the Java iterator - idListIt = response.getIdList().iterator() - while( idListIt.hasNext() ): - item = idListIt.next() - pil = item.getAfosid() - wmoid = item.getWmottaaii() - site4 = item.getWmocccc() - if pil[6:] == "AER" and siteid == "ALU" or \ - pil[6:] == "ALU" and siteid == "AER": #ignore other AFC domain - continue - if pil[3:6] == "OFF" and pil[6:] in ['N11','N12','N13','N14']: - continue - siteTable.append((pil, wmoid, site4)) - - # get the pil list for each product - pillist = {} - for nnn in NWS_PRODUCTS: - found = 0 - pilEntries = [] - for pil, wmoid, site4 in siteTable: - if pil[3:6] == nnn: - pilEntries.append((pil, wmoid, site4)) - - pillist[nnn] = pilEntries - d = [] #list of entries for this particular pil - for pil, wmoid, site4 in pilEntries: - e = {} - e['wmoID'] = wmoid - e['pil'] = pil[3:] - e['textdbPil'] = pil - e['awipsWANPil'] = site4 + pil[3:] - d.append(e) - siteD[nnn] = d #store the pil dictionary back into the SiteInfo - - return pillist - - def __getProductFromFilename(self,regularNameBase): - """get product from filename - - Looks for exact matches in the DirectFileToProductMapping dictionary, - which contains only those names that you can't determine the - product id from the name. If name begins with Product, then returns - the set of products based on the TemplatedProducts dictionary. - - @param regularNameBase: the filename without the extension - @type regularNameBase: string - - @return a set of products based on the TemplatedProducts dictionary - @rtype: list of strings - """ - import types, string - from preferences.configureTextProducts import DirectFileToProductMapping - from preferences.configureTextProducts import TemplatedProducts - - if regularNameBase == "NONE": - return ["NONE"] - - #in the direct mapping dictionary? - if DirectFileToProductMapping.has_key(regularNameBase): - v = DirectFileToProductMapping[regularNameBase] - if type(v) is types.StringType: - return [v] - else: - return v - - #look for Product in the filename - index = string.find(regularNameBase, "Product") - if index == 0: - if TemplatedProducts.has_key(regularNameBase): - return TemplatedProducts[regularNameBase] - else: - return None - - #attempt to extract out the PIL based on the filename. Look for - #3 uppercase letters, separated by an underscore. This entry must - #exist in the NWSProducts directory. - sections = string.split(regularNameBase, "_") - for sec in sections: - if len(sec) == 3 and sec in NWS_PRODUCTS: - return [sec] - - #got here, don't know what this product is - LOG.error("Unknown Product: %s" % regularNameBase) - return None - - def __create(self, srcDir, destDir): - """Build the formatters for this site - - Substitute the appropriate values into the templates. Name and place - the generated file into the correct destination. - - @return the number of products that were created - @rtype: number - """ - LOG.debug("Processing Formatter Templates.......") - - from preferences.configureTextProducts import productsPerDomain - - try: - os.makedirs(join(self.getDestination(), destDir)) - except OSError, e: - if e.errno != errno.EEXIST: - LOG.error("%s: '%s'" % (e.strerror,e.filename)) - return 0 - - isTextProduct = (destDir == "textProducts") - - siteid = self.__siteId - productsWritten = 0 - - # ---- Gather a list of all template Files -------------------------- - - templateFiles = self.__getTemplateFiles(srcDir) - - # ---- Process the files --------------------------------------------- - - import stat, string - for lf in templateFiles: - - fileName = str(lf.getFile().getAbsolutePath()) - isProtected = ProtectedFileLookup.isProtected(lf) - - #skip svn files - if string.find(fileName,".svn-base") != -1: - continue - - #decode the filename - regularName = basename(fileName) - index = string.find(regularName, '.') - if index != -1: - regNameExt = regularName[index:] - regNameBase = regularName[0:index] - else: - regNameExt = "" - regNameBase = regularName - - #read the source - try: - fd = open(fileName) - fileContents = fd.read() - fd.close() - except IOError, (num,info): - LOG.warn("Unable to read template (%s): %s" % (info, - basename(fileName))) - continue - - #get source permissions - perms = os.stat(fileName)[stat.ST_MODE] & 0777 - - LOG.debug("=========================================" + \ - "=======================") - LOG.debug("file: %s" % fileName[len(SCRIPT_DIR):]) - LOG.debug("regularNameBase: %s" % regNameBase) - LOG.debug("RegNameExt: %s" % regNameExt) - #determine products, for templated could be more than 1 - products = self.__getProductFromFilename(regNameBase) - if products is None: - continue - LOG.debug("PRODUCTS: %s" % products) - # ---- Process the products ------------------------------------ - - for product in products: - LOG.debug(" --------------") - LOG.debug(" PRODUCT: %s" % product) - - # get the pil information - pilNames = self.__getProductFromFilename(product) #indirect - LOG.debug(" PRODUCT PIL: %s" % pilNames) - - if pilNames is None or len(pilNames) == 0: - continue - - # extract out the pil information from the dictionary - if SiteInfo[siteid].has_key(pilNames[0]): - pils = SiteInfo[siteid][pilNames[0]] - else: - #set pils to empty list if none defined - pils = [{'awipsWANPil': 'kssscccnnn', - 'textdbPil': 'cccnnnxxx', - 'pil': 'nnnxxx', 'wmoID': 'wmoidd'}] - LOG.debug(" PILS: %s" % pils) - - # ---- Process each PIL --------------------------------------- - - for pilInfo in pils: - - if product != 'NONE' and \ - productsPerDomain.has_key(siteid) and \ - not pilInfo['pil'] in productsPerDomain[siteid]: - LOG.debug("Skipping %s, not in productsPerDomain[%s]", pilInfo['pil'], siteid) - continue - - multiple = len(pils) > 1 - - # ---- Perform Substitutions ----------------------------- - - substitutor = self.__Substitutor(product,pilInfo,multiple, - self.__siteId, not isProtected, isTextProduct) - - newFileContents = substitutor.replaceText(fileContents) - destName = substitutor.replaceFilename(regNameBase) - - # ---- Output to File ----------------------------------- - - destFilename = join(self.getDestination(), destDir, destName) + regNameExt - LOG.debug(" ---> %s" % destFilename) - - try: - os.chmod(destFilename, 0644) - os.remove(destFilename) - LOG.debug(""" Replacing the existing file: %s - - The file may already exist due to any of the following - reasons: - - 1. Multiple PILS exist but the template filename does - not have 'MultiPil' in its name. - 2. Multiple Products exist but the template filename - does not have 'Product' in its name. - 3. The destination directory was not previously cleared. - """ % destName) - productsWritten = productsWritten - 1 - except: - pass - - try: - fd = open(destFilename, 'w', 0644) - fd.write(newFileContents) - fd.close() - os.chmod(destFilename, perms) - productsWritten = productsWritten + 1 - - # Set protected if file is read only - isWritable = os.access(destFilename, os.W_OK) - - if isProtected: - self.addProtection(destFilename) - - except IOError, (number, description): - LOG.warn(" %s" % description) - - # write out the protected file - - - return productsWritten - - def addProtection(self, filename): - # cleanup the filename - parts = filename.split("configured/"+self.__siteId) - newName = "CONFIGURED:cave_static" + parts[1] - - fileList.append(newName) - - def getProtectedFiles(self): - return fileList - - def __delete(self): - """Delete all pre-existing text templates - - @return the number of pre-existing products that were deleted - @rtype: number - """ - - productsRemoved = 0 - - import string - import glob - import os - - LOG.debug(" Deleting Existing Baseline Templates Begin........") - - from preferences.configureTextProducts import templateProds - - #make a list of all known products - allProducts = [] - for p in NWS_PRODUCTS: - allProducts.append(p) - for p in templateProds: - allProducts.append(p) - - for dirInfo in ProcessDirectories: - templateFiles = self.__getTemplateFiles(dirInfo['src']) - - #determine potential files, based on the template files - for lf in templateFiles: - tf = str(lf.getFile().getAbsolutePath()) - - LOG.debug("Template= %s" % basename(tf)) - mname = basename(tf) - globExpressions = [] - - #wildcard the Site - mname = string.replace(mname, "Site", "???") #always 3 chars - - #wildcard the Region - mname = string.replace(mname, "Region", "??") #always 2 chars - - #wildcard the _MultiPil - wcards = [] - if string.find(mname, "_MultiPil") != -1: - wcards.append(string.replace(mname, "_MultiPil", "")) - wcards.append(string.replace(mname, "_MultiPil", "_?")) - wcards.append(string.replace(mname, "_MultiPil", "_??")) - wcards.append(string.replace(mname, "_MultiPil", "_???")) - else: - wcards.append(mname) - - #wildcard the Product - if string.find(mname, "Product") == 0: - for wc in wcards: - for prd in allProducts: - ge = prd + wc[7:] #Product is first 7 characters - globExpressions.append(ge) - - #simple case - Product does not need to be expanded - else: - for wc in wcards: - globExpressions.append(wc) - - for g in globExpressions: - searchString = join(self.getDestination(), dirInfo['dest'], g) - delfiles = glob.glob(searchString) - - for fn in delfiles: - #delete any existing file - try: - os.chmod(fn, 0644) - os.remove(fn) - LOG.debug(" DEL---> %s" % fn) - productsRemoved += 1 - except: - pass - - LOG.debug(" Deleting Existing Baseline Templates Finished........") - - return productsRemoved - -def runFromJava(siteId, destinationDir): - generator = Generator() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# Generate site specific text products. +# +# This script is run at install time to customize a set of the text products +# for a given site. +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jun 23, 2008 1180 jelkins Initial creation +# Jul 08, 2008 1222 jelkins Modified for use within Java +# Jul 09, 2008 1222 jelkins Split command line loader from class +# Jul 24, 2012 #944 dgilling Refactored to support separate +# generation of products and utilities. +# Sep 07, 2012 #1150 dgilling Ensure all necessary dirs get created. +# May 12, 2014 2536 bclement renamed text plugin to include uf in name +# Oct 20, 2014 #3685 randerso Changed how SiteInfo is loaded. +# Fixed logging to log to a file +# Cleaned up how protected file updates are returned +# Jan 23, 2015 #4027 randerso Cleaned up import of SiteInfo +# Apr 25, 2015 4952 njensen Updated for new JEP API +# Jul 18, 2016 5747 dgilling Move edex_static to common_static. +# Aug 09, 2016 5801 tgurney Use AfosToAwipsLookup +# Sep 28, 2016 19293 randerso Added filtering by productsPerDomain +# Nov 21, 2016 5959 njensen Removed unused imports and made more pythonic +# Nov 28, 2016 5939 dgilling Add override behavior comment to +# generated files. +# Mar 17, 2017 5935 mapeters Updated override behavior comments +# in generated files +# Aug 08, 2017 6379 njensen Use ProtectedFileLookup +# +# @author: jelkins +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +__version__ = "1.0" + +import errno +import os +from os.path import basename +from os.path import join +from os.path import dirname +from com.raytheon.uf.common.localization import PathManagerFactory +from com.raytheon.uf.common.localization import LocalizationContext +from com.raytheon.uf.common.protectedfiles import ProtectedFileLookup +LocalizationType = LocalizationContext.LocalizationType +LocalizationLevel = LocalizationContext.LocalizationLevel + + +# SCRIPT_DIR passed in from Java + +# ---- Standard Paths ---------------------------------------------- + +LIBRARY_DIR = join(SCRIPT_DIR , "library") +TEMPLATE_DIR = join(SCRIPT_DIR , "templates") +PREFERENCE_DIR = join(SCRIPT_DIR , "preferences") + +# ---- "Import" Preferences and Library Files ------------------------------- +from sys import path +path.append(join(LIBRARY_DIR,"../")) +path.append(join(PREFERENCE_DIR,"../")) + +from .preferences.configureTextProducts import NWSProducts as NWS_PRODUCTS + +from os.path import basename +from os.path import dirname +from os.path import abspath +from os.path import join + +# ---- Setup Logging ---------------------------------------------------------- +import logging +from time import strftime, gmtime +timeStamp = strftime("%Y%m%d", gmtime()) +logFile = '/awips2/edex/logs/configureTextProducts-'+timeStamp+'.log' + +LOG = logging.getLogger("configureTextProducts") +LOG.setLevel(logging.DEBUG) +handler = logging.FileHandler(logFile) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(levelname)-5s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") +handler.setFormatter(formatter) +for h in LOG.handlers: + LOG.removeHandler(h) +LOG.addHandler(handler) + + +# List of protected files +fileList = [] + +#Installation information for product formatters. +#Directories to Process, src/dest +ProcessDirectories = [ + { + 'src': "gfe/textproducts/templates/product", + 'dest': "textProducts" + }, + { + 'src': "gfe/textproducts/templates/utility", + 'dest': "textUtilities/regular" + }, + ] + +# The following three constants are used by the __Substitutor class to add +# override behavior comments to the generated files. The first comment appears +# in every file because we protect the base templates from override. For +# generated template text product files, we replace the first comment with the +# second. For generated files that we do not protect, we replace the first +# comment with the third. +BASE_TEMPLATE_OVERRIDE_COMMENT =\ +"""## +# This is a base file that is not intended to be overridden. +## +""" + +GENERATED_TEMPLATE_OVERRIDE_COMMENT =\ +"""## +# This is a base file that is not intended to be overridden. +# +# This file can be subclassed to override behavior. Please see the +# GFE Training Guide->GFE Text Products User Guide section of the GFE Online +# Help for guidance on creating a new text product. +## +""" + +GENERATED_LOCAL_OVERRIDE_COMMENT =\ +"""## +# This is an absolute override file, indicating that a higher priority version +# of the file will completely replace a lower priority version of the file. +## +""" + +from SiteCFG import SiteInfo + +class Generator(): + """Generates site specific text products from base template files. + + This class handles the substituting and renaming of template files. + """ + + def __init__(self): + """Class constructor""" + self.__destination = None + self.__siteId = None + + def setSiteId(self,siteId): + """Set the site ID + + Checks if the given site ID is valid and sets the site ID. + + @param siteId: site ID + @type siteId: string + + @raise LookupError: when the site ID is invalid + """ + if siteId in list(SiteInfo.keys()): + self.__siteId = siteId + else: + raise LookupError(' unknown WFO: ' + siteId) + + def setDestination(self, value): + """Set this generator's output directory + + Verifies the directory exists and is writable + + @param value: this value should be a fully qualified path + @type value: string + + @raise IOError: when the directory does not exist or is not writable + """ + + try: + os.makedirs(value, 0o755) + except OSError as e: + if e.errno != errno.EEXIST: + LOG.warn("%s: '%s'" % (e.strerror,e.filename)) + + self.__destination = value + + + def getSiteId(self): + """Get the site ID + + @return: the site ID + @rtype: string + """ + return self.__siteId + + def getDestination(self): + """The directory into which the generated files are placed + + @return: the directory in which the generated files are placed + @rtype: string + """ + return self.__destination + + def create(self, siteId, destinationDir): + """Create text products""" + LOG.debug("Configuring Text Products for %s........" % siteId) + + try: + self.setSiteId(siteId) + except LookupError: + LOG.warning(siteId + "is not a known WFO. Skipping text formatter configuration.") + return + + self.setDestination(destinationDir) + + self.__delete() + self.__createPilDictionary(self.__siteId) + + created = 0 + for dirInfo in ProcessDirectories: + created += self.__create(dirInfo['src'], dirInfo['dest']) + LOG.info("%d text products created" % created) + LOG.debug("Configuration of Text Products for %s finished" % siteId) + + return self.getProtectedFiles() + + def delete(self): + """Delete text products""" + + LOG.debug("Deleting Text Products Begin........") + deleted = self.__delete() + + LOG.info("%d text products deleted" % deleted) + + # ---- Delete Empty Directory ----------------------------------------- + + for dirInfo in ProcessDirectories: + try: + os.rmdir(os.path.join(self.getDestination(), dirInfo['dest'])) + except OSError as description: + LOG.warn("unable to remove directory (%s)" % description) + pass + + def info(self): + """Text product information for this site""" + + LOG.debug("PIL Information for %s Begin......" % self.__siteId) + pils = self.__createPilDictionary(self.__siteId) + + self.__printPilDictionary(pils) + + LOG.info("%d total PILs found" % len(pils)) + + LOG.debug("PIL Information for %s End" % self.__siteId) + + def allinfo(self): + """Text product information for all sites in this generator's a2a file + """ + found = 0 + + LOG.debug("PIL Information for all sites Begin.......") + + for site in list(SiteInfo.keys()): + LOG.info("--------------------------------------------") + LOG.info("%s %s %s" % (site, + SiteInfo[site]['fullStationID'], + SiteInfo[site]['wfoCityState'])) + pils = self.__createPilDictionary(site) + self.__printPilDictionary(pils) + found += len(pils) + + LOG.info("%d total PILs found" % found) + LOG.debug("PIL Information for all sites End") + + # ---- Private Methods -------------------------------------------------- + + class __Substitutor(): + """Substitute values inside file contents and filenames + + This class contains all methods capable of performing the substitution. + """ + + def __init__(self, product, pilInfo, multiPilFlag, siteID, allowsOverride, isTextProduct): + """Class constructor + + Initialize the class + + @param product: product in which substitution is performed + @type product: string + + @param pilInfo: PIL information for the product + @type pilInfo: dictionary + + @param multiPilFlag: indicates there is more than one Pil + @type multiPilFlag: boolean + + @type siteID: string + + @param allowsOverride: indicates whether the generated file can be overridden. + @type allowsOverride: boolean + + @param isTextProduct: indicates whether the file is a text product or utility + @type isTextProduct: boolean + """ + self.__product = product + self.__pilInfo = pilInfo + self.__multiPilFlag = multiPilFlag + self.__siteID = siteID + self.__allowsOverride = bool(allowsOverride) + self.__isTextProduct = isTextProduct + + + def substituteKeywords(self, subDict, textData): + """Replace all instances of the find: replace pairs in the text + + Given a dictionary of keywords and their values, along with text + data, performs the string substitution and returns the updated text. + + @param subDict: words to find (keys) and what to replace (value) + them with + @type subDict: dictionary + + @param textData: text in which to find and replace values + @type textData: string + + @return: the updated text + @rtype: string + """ + from copy import deepcopy + from string import replace + + txt = deepcopy(textData) + for k in list(subDict.keys()): + txt = replace(txt, k, subDict[k]) + return txt + + def replaceText(self,contents): + """Replace the contents of the template with the correct values + + Build the substitution dictionary and perform substitution + + @param contents: the template contents + @type contents: string + + @return: the contents with all template values substituted + @rtype: string + """ + + siteid = self.__siteID + product = self.__product + multiPilFlag = self.__multiPilFlag + pilInfo = self.__pilInfo + + from .preferences.configureTextProducts import ProductToStandardMapping + + subDict = {} + subDict[''] = siteid.strip() + subDict[''] = SiteInfo[siteid]['region'].strip() + subDict[''] = SiteInfo[siteid]['wfoCityState'].strip() + subDict[''] = SiteInfo[siteid]['wfoCity'].strip() + subDict[''] = SiteInfo[siteid]['fullStationID'].strip() + subDict[''] = SiteInfo[siteid]['state'].strip() + if product is not None: + subDict[''] = product.strip() + if product in ProductToStandardMapping: + subDict[''] = ProductToStandardMapping[product].strip() + else: + subDict[''] = product.strip() + if pilInfo is not None: + for k in list(pilInfo.keys()): + subDict['<' + k + '>'] = pilInfo[k].strip() + if pilInfo is not None and "pil" in pilInfo and multiPilFlag: + subDict[''] = pilInfo["pil"][3:6].strip() #pil=nnnxxx, want xxx + else: + subDict['_'] = "" #no multiple pils + if self.__allowsOverride: + subDict[BASE_TEMPLATE_OVERRIDE_COMMENT] = GENERATED_LOCAL_OVERRIDE_COMMENT + elif self.__isTextProduct: + #include subclass comment for text products, not utilities + subDict[BASE_TEMPLATE_OVERRIDE_COMMENT] = GENERATED_TEMPLATE_OVERRIDE_COMMENT + + return self.substituteKeywords(subDict, contents) + + def replaceFilename(self,fileName): + """Replace any template variables in the filename with correct + values + + @param fileName: the filename for this product + @type fileName: string + + @return: the updated filename + @rtype: string + """ + + siteid = self.__siteID + product = self.__product + multiPilFlag = self.__multiPilFlag + pilInfo = self.__pilInfo + + subDict = {} + subDict['Site'] = siteid.strip() + subDict['Region'] = SiteInfo[siteid]['region'].strip() + if product is not None: + subDict['Product'] = product.strip() + if pilInfo is not None and "pil" in pilInfo and multiPilFlag: + subDict['MultiPil'] = pilInfo["pil"][3:6].strip() #xxx of nnnxxx + else: + subDict['_MultiPil'] = "" #no pil information, remove entry + + return self.substituteKeywords(subDict,fileName) + + def __getTemplateFiles(self, srcDir): + """Get a list of all template files + + @return: a list of all template files + @rtype: list + """ + + pathMgr = PathManagerFactory.getPathManager() + baseCtx = pathMgr.getContext(LocalizationType.COMMON_STATIC, LocalizationLevel.BASE) + templateFiles = pathMgr.listFiles(baseCtx, srcDir, None, False, True) + return templateFiles + + def __printPilDictionary(self, pillist): + """Prints the PIL dictionary information + + @param pillist: site specific PIL information + @type pillist: dictionary + """ + + pils = list(pillist.keys()) + pils.sort() + LOG.info("PIL List: (product) (pil, wmoid, site)") + for p in pils: + LOG.info("%s %s" % (p,pillist[p])) + + def __createPilDictionary(self, siteid): + """Update the SiteInfo with a PIL dictionary + + Read the a2a data from the database, create PIL information, and add the information + to the SiteInfo dictionary. + + @param site: the site for which PIL information is created + @type site: string + + @return: the PILS found for the site + @rtype: dictionary + """ + + siteD = SiteInfo[siteid] + stationID4 = siteD['fullStationID'] + + from com.raytheon.uf.edex.plugin.text import AfosToAwipsLookup + + response = AfosToAwipsLookup.lookupAfosId(stationID4, None, None) + errMsg = response.getErrorMessage() + if errMsg is not None and "" != errMsg.strip(): + raise Exception(errMsg) + + siteTable = [] + # idList is a Java list of non-String objects. + # use the Java iterator + idListIt = response.getIdList().iterator() + while( idListIt.hasNext() ): + item = next(idListIt) + pil = item.getAfosid() + wmoid = item.getWmottaaii() + site4 = item.getWmocccc() + if pil[6:] == "AER" and siteid == "ALU" or \ + pil[6:] == "ALU" and siteid == "AER": #ignore other AFC domain + continue + if pil[3:6] == "OFF" and pil[6:] in ['N11','N12','N13','N14']: + continue + siteTable.append((pil, wmoid, site4)) + + # get the pil list for each product + pillist = {} + for nnn in NWS_PRODUCTS: + found = 0 + pilEntries = [] + for pil, wmoid, site4 in siteTable: + if pil[3:6] == nnn: + pilEntries.append((pil, wmoid, site4)) + + pillist[nnn] = pilEntries + d = [] #list of entries for this particular pil + for pil, wmoid, site4 in pilEntries: + e = {} + e['wmoID'] = wmoid + e['pil'] = pil[3:] + e['textdbPil'] = pil + e['awipsWANPil'] = site4 + pil[3:] + d.append(e) + siteD[nnn] = d #store the pil dictionary back into the SiteInfo + + return pillist + + def __getProductFromFilename(self,regularNameBase): + """get product from filename + + Looks for exact matches in the DirectFileToProductMapping dictionary, + which contains only those names that you can't determine the + product id from the name. If name begins with Product, then returns + the set of products based on the TemplatedProducts dictionary. + + @param regularNameBase: the filename without the extension + @type regularNameBase: string + + @return a set of products based on the TemplatedProducts dictionary + @rtype: list of strings + """ + import types, string + from .preferences.configureTextProducts import DirectFileToProductMapping + from .preferences.configureTextProducts import TemplatedProducts + + if regularNameBase == "NONE": + return ["NONE"] + + #in the direct mapping dictionary? + if regularNameBase in DirectFileToProductMapping: + v = DirectFileToProductMapping[regularNameBase] + if type(v) is bytes: + return [v] + else: + return v + + #look for Product in the filename + index = string.find(regularNameBase, "Product") + if index == 0: + if regularNameBase in TemplatedProducts: + return TemplatedProducts[regularNameBase] + else: + return None + + #attempt to extract out the PIL based on the filename. Look for + #3 uppercase letters, separated by an underscore. This entry must + #exist in the NWSProducts directory. + sections = string.split(regularNameBase, "_") + for sec in sections: + if len(sec) == 3 and sec in NWS_PRODUCTS: + return [sec] + + #got here, don't know what this product is + LOG.error("Unknown Product: %s" % regularNameBase) + return None + + def __create(self, srcDir, destDir): + """Build the formatters for this site + + Substitute the appropriate values into the templates. Name and place + the generated file into the correct destination. + + @return the number of products that were created + @rtype: number + """ + LOG.debug("Processing Formatter Templates.......") + + from .preferences.configureTextProducts import productsPerDomain + + try: + os.makedirs(join(self.getDestination(), destDir)) + except OSError as e: + if e.errno != errno.EEXIST: + LOG.error("%s: '%s'" % (e.strerror,e.filename)) + return 0 + + isTextProduct = (destDir == "textProducts") + + siteid = self.__siteId + productsWritten = 0 + + # ---- Gather a list of all template Files -------------------------- + + templateFiles = self.__getTemplateFiles(srcDir) + + # ---- Process the files --------------------------------------------- + + import stat, string + for lf in templateFiles: + + fileName = str(lf.getFile().getAbsolutePath()) + isProtected = ProtectedFileLookup.isProtected(lf) + + #skip svn files + if string.find(fileName,".svn-base") != -1: + continue + + #decode the filename + regularName = basename(fileName) + index = string.find(regularName, '.') + if index != -1: + regNameExt = regularName[index:] + regNameBase = regularName[0:index] + else: + regNameExt = "" + regNameBase = regularName + + #read the source + try: + fd = open(fileName) + fileContents = fd.read() + fd.close() + except IOError as xxx_todo_changeme1: + (num,info) = xxx_todo_changeme1.args + LOG.warn("Unable to read template (%s): %s" % (info, + basename(fileName))) + continue + + #get source permissions + perms = os.stat(fileName)[stat.ST_MODE] & 0o777 + + LOG.debug("=========================================" + \ + "=======================") + LOG.debug("file: %s" % fileName[len(SCRIPT_DIR):]) + LOG.debug("regularNameBase: %s" % regNameBase) + LOG.debug("RegNameExt: %s" % regNameExt) + #determine products, for templated could be more than 1 + products = self.__getProductFromFilename(regNameBase) + if products is None: + continue + LOG.debug("PRODUCTS: %s" % products) + # ---- Process the products ------------------------------------ + + for product in products: + LOG.debug(" --------------") + LOG.debug(" PRODUCT: %s" % product) + + # get the pil information + pilNames = self.__getProductFromFilename(product) #indirect + LOG.debug(" PRODUCT PIL: %s" % pilNames) + + if pilNames is None or len(pilNames) == 0: + continue + + # extract out the pil information from the dictionary + if pilNames[0] in SiteInfo[siteid]: + pils = SiteInfo[siteid][pilNames[0]] + else: + #set pils to empty list if none defined + pils = [{'awipsWANPil': 'kssscccnnn', + 'textdbPil': 'cccnnnxxx', + 'pil': 'nnnxxx', 'wmoID': 'wmoidd'}] + LOG.debug(" PILS: %s" % pils) + + # ---- Process each PIL --------------------------------------- + + for pilInfo in pils: + + if product != 'NONE' and \ + siteid in productsPerDomain and \ + not pilInfo['pil'] in productsPerDomain[siteid]: + LOG.debug("Skipping %s, not in productsPerDomain[%s]", pilInfo['pil'], siteid) + continue + + multiple = len(pils) > 1 + + # ---- Perform Substitutions ----------------------------- + + substitutor = self.__Substitutor(product,pilInfo,multiple, + self.__siteId, not isProtected, isTextProduct) + + newFileContents = substitutor.replaceText(fileContents) + destName = substitutor.replaceFilename(regNameBase) + + # ---- Output to File ----------------------------------- + + destFilename = join(self.getDestination(), destDir, destName) + regNameExt + LOG.debug(" ---> %s" % destFilename) + + try: + os.chmod(destFilename, 0o644) + os.remove(destFilename) + LOG.debug(""" Replacing the existing file: %s + + The file may already exist due to any of the following + reasons: + + 1. Multiple PILS exist but the template filename does + not have 'MultiPil' in its name. + 2. Multiple Products exist but the template filename + does not have 'Product' in its name. + 3. The destination directory was not previously cleared. + """ % destName) + productsWritten = productsWritten - 1 + except: + pass + + try: + fd = open(destFilename, 'w', 0o644) + fd.write(newFileContents) + fd.close() + os.chmod(destFilename, perms) + productsWritten = productsWritten + 1 + + # Set protected if file is read only + isWritable = os.access(destFilename, os.W_OK) + + if isProtected: + self.addProtection(destFilename) + + except IOError as xxx_todo_changeme: + (number, description) = xxx_todo_changeme.args + LOG.warn(" %s" % description) + + # write out the protected file + + + return productsWritten + + def addProtection(self, filename): + # cleanup the filename + parts = filename.split("configured/"+self.__siteId) + newName = "CONFIGURED:cave_static" + parts[1] + + fileList.append(newName) + + def getProtectedFiles(self): + return fileList + + def __delete(self): + """Delete all pre-existing text templates + + @return the number of pre-existing products that were deleted + @rtype: number + """ + + productsRemoved = 0 + + import string + import glob + import os + + LOG.debug(" Deleting Existing Baseline Templates Begin........") + + from .preferences.configureTextProducts import templateProds + + #make a list of all known products + allProducts = [] + for p in NWS_PRODUCTS: + allProducts.append(p) + for p in templateProds: + allProducts.append(p) + + for dirInfo in ProcessDirectories: + templateFiles = self.__getTemplateFiles(dirInfo['src']) + + #determine potential files, based on the template files + for lf in templateFiles: + tf = str(lf.getFile().getAbsolutePath()) + + LOG.debug("Template= %s" % basename(tf)) + mname = basename(tf) + globExpressions = [] + + #wildcard the Site + mname = string.replace(mname, "Site", "???") #always 3 chars + + #wildcard the Region + mname = string.replace(mname, "Region", "??") #always 2 chars + + #wildcard the _MultiPil + wcards = [] + if string.find(mname, "_MultiPil") != -1: + wcards.append(string.replace(mname, "_MultiPil", "")) + wcards.append(string.replace(mname, "_MultiPil", "_?")) + wcards.append(string.replace(mname, "_MultiPil", "_??")) + wcards.append(string.replace(mname, "_MultiPil", "_???")) + else: + wcards.append(mname) + + #wildcard the Product + if string.find(mname, "Product") == 0: + for wc in wcards: + for prd in allProducts: + ge = prd + wc[7:] #Product is first 7 characters + globExpressions.append(ge) + + #simple case - Product does not need to be expanded + else: + for wc in wcards: + globExpressions.append(wc) + + for g in globExpressions: + searchString = join(self.getDestination(), dirInfo['dest'], g) + delfiles = glob.glob(searchString) + + for fn in delfiles: + #delete any existing file + try: + os.chmod(fn, 0o644) + os.remove(fn) + LOG.debug(" DEL---> %s" % fn) + productsRemoved += 1 + except: + pass + + LOG.debug(" Deleting Existing Baseline Templates Finished........") + + return productsRemoved + +def runFromJava(siteId, destinationDir): + generator = Generator() return generator.create(siteId, destinationDir) \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/configureTextProducts.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/configureTextProducts.py index c23a63bb06..106b2f842e 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/configureTextProducts.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/configureTextProducts.py @@ -1,199 +1,199 @@ -#!/usr/bin/env python - -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# Generate site specific text products. -# -# This script is run at install time to customize a set of the text products -# for a given site. -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Jul 09, 2008 1222 jelkins Split command line loader from class -# Jan 26, 2015 4033 randerso Fix logging broken by #3685 -# Sep 28, 2016 19293 randerso Removed commented out code -# -# @author: jelkins -# -## - -## -# This is a base file that is not intended to be overridden. -## - -__version__ = "1.0" - -from sys import argv -from os.path import basename -from os.path import dirname -from os.path import abspath -from os.path import join - -from awips import ThriftClient -from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import ConfigureTextProductsRequest - -SCRIPT_DIR = abspath(dirname(argv[0])) - -# ---- Setup Logging ---------------------------------------------------------- -from . import logging -from time import strftime, gmtime -timeStamp = strftime("%Y%m%d", gmtime()) -logFile = '/awips2/edex/logs/configureTextProducts-'+timeStamp+'.log' - -LOG = logging.getLogger("configureTextProducts") -LOG.setLevel(logging.DEBUG) -handler = logging.FileHandler(logFile) -handler.setLevel(logging.DEBUG) -formatter = logging.Formatter("%(levelname)-5s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") -handler.setFormatter(formatter) -for h in LOG.handlers: - LOG.removeHandler(h) -LOG.addHandler(handler) - -class ConfigureTextProducts: - """Command Line Interface for the TextProductsGenerator - """ - - USAGE = """Usage: %prog [OPTIONS...] SITE_ID [DESTINATION_DIR] - - This script automatically configures the GFESuite set of text formatters - based on an afos2awips PIL/WMOID table and a given SITE_ID. Text formatters - are placed into the given DESTINATION_DIR. - - For example: %prog OAX ~/awips/edex/opt/data/utility/cave_static/site/OAX/gfe/userPython/textProducts""" - - def __init__(self): - """Class constructor - - This constructor initializes the OptionParser - """ - from optparse import OptionParser - - self.__optionParser = OptionParser(ConfigureTextProducts.USAGE) - - self.programName = self.__optionParser.get_prog_name() - - def main(self): - """System entry point. - - Executes this script from the command line. - - @type args: list - @param args: contains the commands used to launch the script - """ - - - # get the command line options - (option, arg) = self.__parseOptions() - - request = ConfigureTextProductsRequest() - request.mode = option.mode.lower() - request.template = option.template - request.site = arg[0] - if (len(arg) > 1): - request.destinationDir = arg[1] - else: - request.destinationDir = None - - response = None - try: - thriftClient = ThriftClient.ThriftClient(option.host) - response = thriftClient.sendRequest(request) - except Exception, e: - self.__error(e, 2) - - if response is not None and \ - response.msg is not None and \ - not "" == response.msg: - self.__error(response.msg, 2) - - def __error(self,message,status): - """Output an error message and exit to the system - - Generates non-usage type errors. For usage errors use the - __optionParser.error() method - - @param message: a message to output before exiting to the system - @type message: string - - @param status: a number to return to the system on exit - @type status: number - """ - - from sys import exit - - LOG.error(message) - exit(status) - - def __parseOptions(self): - """Parse command line options and arguments - - Setup usage, options, and check for required number of arguments - - @return: a tuple of options and arguments - @rtype: tuple - """ - - self.__optionParser.add_option("--template",metavar="DIR", - help="base template DIR") - - self.__optionParser.add_option("-m", "--mode", - default="CREATE", - help="create, delete, or display a PIL/WMOID table" - ) - - self.__optionParser.add_option("--host", - default="localhost", - help="the machine to configure (defaults to localhost)") - - from optparse import OptionGroup - - modeGroup = OptionGroup(self.__optionParser, "MODE is one of the following") - - modeGroup.add_option("-- CREATE", action="store_true", - help="creates the formatter templates [default]") - - modeGroup.add_option("-- DELETE", action="store_true", - help="deletes the formatter templates") - - modeGroup.add_option("-- INFO", action="store_true", - help="list the PIL/WMOID information for this SITE") - - modeGroup.add_option("-- ALLINFO", action="store_true", - help="list the PIL/WMOID information for all sites") - - self.__optionParser.add_option_group(modeGroup) - - (options, args) = self.__optionParser.parse_args() - - progName = self.__optionParser.get_prog_name() - - if len(args) < 1: - self.__optionParser.error("incorrect number of arguments\n"+ - "Try `"+progName+" --help' for more information.") - - return (options, args) - -# check if run from the command line -if __name__ == "__main__": - - ConfigureTextProducts().main() +#!/usr/bin/env python + +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# Generate site specific text products. +# +# This script is run at install time to customize a set of the text products +# for a given site. +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Jul 09, 2008 1222 jelkins Split command line loader from class +# Jan 26, 2015 4033 randerso Fix logging broken by #3685 +# Sep 28, 2016 19293 randerso Removed commented out code +# +# @author: jelkins +# +## + +## +# This is a base file that is not intended to be overridden. +## + +__version__ = "1.0" + +from sys import argv +from os.path import basename +from os.path import dirname +from os.path import abspath +from os.path import join + +from awips import ThriftClient +from dynamicserialize.dstypes.com.raytheon.uf.common.dataplugin.gfe.request import ConfigureTextProductsRequest + +SCRIPT_DIR = abspath(dirname(argv[0])) + +# ---- Setup Logging ---------------------------------------------------------- +from . import logging +from time import strftime, gmtime +timeStamp = strftime("%Y%m%d", gmtime()) +logFile = '/awips2/edex/logs/configureTextProducts-'+timeStamp+'.log' + +LOG = logging.getLogger("configureTextProducts") +LOG.setLevel(logging.DEBUG) +handler = logging.FileHandler(logFile) +handler.setLevel(logging.DEBUG) +formatter = logging.Formatter("%(levelname)-5s %(asctime)s [%(process)d:%(thread)d] %(filename)s: %(message)s") +handler.setFormatter(formatter) +for h in LOG.handlers: + LOG.removeHandler(h) +LOG.addHandler(handler) + +class ConfigureTextProducts: + """Command Line Interface for the TextProductsGenerator + """ + + USAGE = """Usage: %prog [OPTIONS...] SITE_ID [DESTINATION_DIR] + + This script automatically configures the GFESuite set of text formatters + based on an afos2awips PIL/WMOID table and a given SITE_ID. Text formatters + are placed into the given DESTINATION_DIR. + + For example: %prog OAX ~/awips/edex/opt/data/utility/cave_static/site/OAX/gfe/userPython/textProducts""" + + def __init__(self): + """Class constructor + + This constructor initializes the OptionParser + """ + from optparse import OptionParser + + self.__optionParser = OptionParser(ConfigureTextProducts.USAGE) + + self.programName = self.__optionParser.get_prog_name() + + def main(self): + """System entry point. + + Executes this script from the command line. + + @type args: list + @param args: contains the commands used to launch the script + """ + + + # get the command line options + (option, arg) = self.__parseOptions() + + request = ConfigureTextProductsRequest() + request.mode = option.mode.lower() + request.template = option.template + request.site = arg[0] + if (len(arg) > 1): + request.destinationDir = arg[1] + else: + request.destinationDir = None + + response = None + try: + thriftClient = ThriftClient.ThriftClient(option.host) + response = thriftClient.sendRequest(request) + except Exception as e: + self.__error(e, 2) + + if response is not None and \ + response.msg is not None and \ + not "" == response.msg: + self.__error(response.msg, 2) + + def __error(self,message,status): + """Output an error message and exit to the system + + Generates non-usage type errors. For usage errors use the + __optionParser.error() method + + @param message: a message to output before exiting to the system + @type message: string + + @param status: a number to return to the system on exit + @type status: number + """ + + from sys import exit + + LOG.error(message) + exit(status) + + def __parseOptions(self): + """Parse command line options and arguments + + Setup usage, options, and check for required number of arguments + + @return: a tuple of options and arguments + @rtype: tuple + """ + + self.__optionParser.add_option("--template",metavar="DIR", + help="base template DIR") + + self.__optionParser.add_option("-m", "--mode", + default="CREATE", + help="create, delete, or display a PIL/WMOID table" + ) + + self.__optionParser.add_option("--host", + default="localhost", + help="the machine to configure (defaults to localhost)") + + from optparse import OptionGroup + + modeGroup = OptionGroup(self.__optionParser, "MODE is one of the following") + + modeGroup.add_option("-- CREATE", action="store_true", + help="creates the formatter templates [default]") + + modeGroup.add_option("-- DELETE", action="store_true", + help="deletes the formatter templates") + + modeGroup.add_option("-- INFO", action="store_true", + help="list the PIL/WMOID information for this SITE") + + modeGroup.add_option("-- ALLINFO", action="store_true", + help="list the PIL/WMOID information for all sites") + + self.__optionParser.add_option_group(modeGroup) + + (options, args) = self.__optionParser.parse_args() + + progName = self.__optionParser.get_prog_name() + + if len(args) < 1: + self.__optionParser.error("incorrect number of arguments\n"+ + "Try `"+progName+" --help' for more information.") + + return (options, args) + +# check if run from the command line +if __name__ == "__main__": + + ConfigureTextProducts().main() diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/library/SimpleLog.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/library/SimpleLog.py index 88852b46d5..20166cbc2f 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/library/SimpleLog.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/library/SimpleLog.py @@ -1,58 +1,58 @@ - -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - - - -__version__ = "1.0" - -class SimpleLog(): - """ - Log messages to standard out - - SOFTWARE HISTORY - Date Ticket# Engineer Description - ------------ ---------- ----------- -------------------------- - Jul 09, 2008 1222 jelkins Initial version - - @author: jelkins - - Class with a default method handler idea from: - http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/307618 - """ - def __init__(self,logName): - self.logName = logName - - def handlerFunctionClosure(self,name): - def handlerFunction(message): - - from time import strftime - dateTime = strftime("%Y-%m-%d %H:%M:%S,000") - - from string import upper - print "%s %s %s: %s" % (upper(name),dateTime,self.logName,message) - - return handlerFunction - def __getattr__(self,name): + +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + + + +__version__ = "1.0" + +class SimpleLog(): + """ + Log messages to standard out + + SOFTWARE HISTORY + Date Ticket# Engineer Description + ------------ ---------- ----------- -------------------------- + Jul 09, 2008 1222 jelkins Initial version + + @author: jelkins + + Class with a default method handler idea from: + http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/307618 + """ + def __init__(self,logName): + self.logName = logName + + def handlerFunctionClosure(self,name): + def handlerFunction(message): + + from time import strftime + dateTime = strftime("%Y-%m-%d %H:%M:%S,000") + + from string import upper + print(("%s %s %s: %s" % (upper(name),dateTime,self.logName,message))) + + return handlerFunction + def __getattr__(self,name): return self.handlerFunctionClosure(name) \ No newline at end of file diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/moduleTest.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/moduleTest.py index 0428cfead2..6354c00b92 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/moduleTest.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/moduleTest.py @@ -1,31 +1,31 @@ -#!/usr/bin/env python -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +#!/usr/bin/env python +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - - - -from Generator import Generator - -generator = Generator("OAX",".") - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + + + +from .Generator import Generator + +generator = Generator("OAX",".") + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AFD.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AFD.py index 8d7bf6f979..19994caa2d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AFD.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AFD.py @@ -1,1113 +1,1113 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 20, 2014 #3685 randerso Changed to support mixed case -# -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# File Name: AFD.py -# Description: This product creates a Area Forecast Discussion product. -# Contributed by Eastern Region (Jim Noel, Rob Radzanowski) and -# Southern Region (Brian Curran) -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# AFD, AFD___Definition, AFD__Overrides -#------------------------------------------------------------------------- -# Weather Elements Needed: MinT, MaxT, PoP -#------------------------------------------------------------------------- -# Edit Areas Needed: Individual edit areas are required, one for each -# preliminary temp/PoP forecast point. -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: None -#------------------------------------------------------------------------- -# Component Products: None -#------------------------------------------------------------------------- -# User Configurable Variables: -# Definition Section: -# displayName If not None, defines how product appears in GFE GUI -# -# defaultEditAreas defines edit area names and station IDs for edit areas -# expected in the form of (editAreaName, 3letterStationID) -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID full station identifier (4letter, KSLC) -# -# wmoID WMO ID for product header, such as FOUS45 -# -# pil Product pil, such as CCFBOX -# -# debug If on, debug_print statements will appear. -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from FormatterLauncher. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. - -# topicDividers List of tuples describing the various topic dividers: -# (topicName, topicDivider, alwaysInclude, includeInGUI) where -# --topicName is "Synopsis", "Update", etc. -# --topicDivider will appear in the product ".SYNOPSIS..." -# --alwaysInclude: if 1, the topic divider will always -# appear in the product. -# Otherwise, the user can choose at run-time whether -# to include the topic divider. -# --If 1, and alwaysInclude == 0, this item will appear -# in the GUI to be selected at run time. -# Some options, like PrevDisc should not appear in the -# GUI since they are tied to other user input e.g. -# getPreviousAFD. -# state_IDs The state_ID definitions below are for the W/W/A portion of -# your AFD. Multiple state IDs (including MARINE) are separated -# by commas. -# tieUpdateToPreviousAFD If 1, then when "Include Previous AFD" is chosen in the GUI, -# the UPDATE topic divider will automatically be included. -# fcstrNumberFormat The fcstrNumberFormat can take three values: -# Brief - short term forecaster and long term forecaster -# numbers separated by a slash. -# Verbose - "SHORT TERM...xx" and "LONG TERM...yy". -# None - no forecaster numbers added to the end of the AFD. -# NOTE: names or numbers may be used. -# shortTermForecasters : List of identifiers (number strings or names) for the -# short term forecasters -# longTerm Forecasters : List of identifiers (number strings or names) for the -# long term forecasters -# aviationForecasters : List of identifiers (number strings or names) for the -# aviation forecasters -# pointEditAreas If non-empty list, a point temp/pop table will be produced. -# popStartZ_AM start time for PoP for AM issuance in Zulu, (12 for 12z) -# Usually changed only for OCONUS sites. -# useZoneNames If 1, will use zone names instead of ugc codes in the W/W/A portion. -# abbreviateUGCs If 1, will abbreviate ugc string. Instead of: FLZ042-FLZ043-FLZ044 -# produce: FLZ042>FLZ044 -# WWA_Nil The WWA_Nil definition will be used for nil watches, warnings, -# or advisories. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# None -# -# To look up additional tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# The grids are sampled according to the following rules: -# MaxT/MinT: Four periods, 12 hours apart, daily, set up to take the -# MaxT grid overlapping noon LT and MinT grid overlapping midnight LT -# PoP: Four periods, 12 hours apart, 12z-00z, and 00z-12z. Periods can -# be overridden using the popStartZ_AM field for OCONUS sites. -# -# Missing data will be shown with MMM for temperatures and PoPs. -#------------------------------------------------------------------------- -# Example Output: -# -## -##FXUS64 KMAF 041309 -##AFDMAF -## -##AREA FORECAST DISCUSSION -##NATIONAL WEATHER SERVICE MIDLAND/ODESSA TX -##809 AM CDT SAT OCT 4 2003 -## -##.SHORT TERM... -## -## -##&& -## -##.LONG TERM... -## -## -##&& -## -##.PRELIMINARY POINT TEMPS/POPS... -##MAF 84 60 84 60 / 100 30 20 10 -##CNM 83 56 85 57 / 100 20 10 0 -##MRF 79 53 79 53 / 100 30 10 0 -##FST 86 62 87 62 / 100 30 10 10 -## -##&& -## -##.MAF WATCHES/WARNINGS/ADVISORIES... -##TX...NONE. -##NM...NONE. -## -##&& -## -##$$ -## -##99/99 -######################################################################## - -import TextRules -import SampleAnalysis -import string, time, types, os, re, copy -import ModuleAccessor, ProcessVariableList -import AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - - Definition = { - "type": "smart", - - "displayName": "None", # for Product Generation Menu - "database" : "Official", # Source database. "Official", "Fcst", or "ISC" - - "defaultEditAreas" : "EditAreas_PublicMarineFireWx__", - "editAreaSuffix": None, - - # Edit Areas for creating the optional Preliminary Point Temp/PoPs - "pointEditAreas": [], - "outputFile": "{prddir}/TEXT/AFD_.txt", - "debug": 0, - - "productName": "Area Forecast Discussion", - "fullStationID" : "", # 4 letter station ID - "wmoID" : "", # WMO code - "wfoCityState" : "", # Location of WFO - "pil" : "", # product pil - "textdbPil" : "", # Product ID for storing to AWIPS text database. - "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. - "wfoSiteID": "", - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - # Language - "language": "english", - "lineLength": 66, #Maximum line length - - "state_IDs": ["ST"], - "tieUpdateToPreviousAFD": 0, - "fcstrNumberFormat" : "Brief", # Brief, Verbose, or None - "shortTermForecasters": ["99","01","02","03"], - "longTermForecasters": ["99","01","02","03"], - "aviationForecasters": ["99","01","02","03"], - "useZoneNames": 0, - "abbreviateUGCs": 1, - - "topicDividers" : [ - # topicName, topicDivider, alwaysInclude, includeInGUI - - ("Update", ".UPDATE...", 0, 1), - ("Synopsis", ".SYNOPSIS...", 0, 1), - - # EITHER Discussion OR ShortTerm/LongTerm should always be included. - ("Discussion", ".DISCUSSION...", 0, 0), - ("ShortTerm", ".SHORT TERM...", 1, 0), - ("LongTerm", ".LONG TERM...", 1, 0), - - # Optional dividers - ("Aviation", ".AVIATION...", 0, 1), - ("Marine", ".MARINE...", 0, 1), - ("FireWeather",".FIRE WEATHER...", 0, 1), - ("Hydro", ".HYDROLOGY...", 0, 1), - ("Climate", ".CLIMATE...", 0, 1), - - # Controlled by "includePreviousAFD" - ("PrevDisc", ".PREV DISCUSSION...", 0, 0), - - # Controlled by "pointEditAreas" - ("Prelim", ".PRELIMINARY POINT TEMPS/POPS...", 0, 0), - ], - - "popStartZ_AM": 12, #hour UTC - "WWA_Nil" : "None.", - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - } - - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - -#################################################################### -# generateForecast: -# AFD formatter engine. -#################################################################### - def generateForecast(self, argDict): - - # Get variables from varDict and Definition: - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the segments - hazardsC = argDict['hazards'] - self._segmentList = self.organizeHazards(hazardsC.rawAnalyzedTable()) - - # Determine time ranges: - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Initialize the output string: - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - fcst = self._makeProduct(fcst, argDict) - - # Append the $$ delimiter and the forecaster numbers: - fcst = self._postProcessProduct(fcst, argDict) - return fcst - -#################################################################### -# _processVariableList -# Displays user dialog. -#################################################################### - - def _processVariableList(self, definition): - # Get Definition variables - for key in definition.keys(): - exec "self._" + key + "= definition[key]" - - # Create the list of optional topic dividers to appear in GUI - self._options = [] - for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: - if topicName == "Update" and self._tieUpdateToPreviousAFD: - continue - if alwaysInclude: - continue - if includeInGUI: - self._options.append(topicDivider) - - varList = [] - varList.append((("Product Issuance", "productIssuance"), "Morning", "radio", - ["Morning","Afternoon"])) - varList.append((("Optional\nTopics", "optionalTopics"), [], "check", - self._options)) - varList.append((("Include\nPrevious AFD?", "includePreviousAFD"), "NO", "radio", - ["NO", "YES"])) - varList.append((("Short Term\nForecaster", "shortTermFcstrNumber") , "99", "radio", - self._shortTermForecasters)) - varList.append((("Long Term\nForecaster", "longTermFcstrNumber"), "99", "radio", - self._longTermForecasters)) - varList.append((("Aviation\nForecaster","aviationFcstrNumber"), "", "radio", - self._aviationForecasters)) - return self._callProcessVariableList("AFD Values", varList, varDict={}) - - def _callProcessVariableList(self, title, varList, varDict): - processVarList = ProcessVariableList.ProcessVariableList( - title, varList, varDict={}) - self._selectionStatus = processVarList.status() - if not self._selectionStatus == "OK": - return None # User Cancelled - return processVarList.varDict() - -#################################################################### -# _getVariables: -# Retrieves variables and definitions. -#################################################################### - def _getVariables(self, argDict): - # Make variable assignments - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - self._longTermFcstrNumber = self._getForecasterNumber(self._longTermFcstrNumber) - self._shortTermFcstrNumber = self._getForecasterNumber(self._shortTermFcstrNumber) - self._aviationFcstrNumber = self._getForecasterNumber(self._aviationFcstrNumber) - - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Set up information for Hazards product - # TODO uncomment following line? - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - # Check for state id: ST indicating that the user needs to - # set up the list of state id's - if len(self._state_IDs) == 1 and self._state_IDs[0] == "ST": - return "WARNING:You must set up 'state_IDs' in Definition section before running the AFD." - return - -#################################################################### -# _determineTimeRanges: -# Determine time ranges for product. Returns popPeriods and -# tempPeriods which are a list of tuples (timeRange, label). -# Also determines the timeLabel string for the MND header. -# Adapted from previous _determineTimeRanges found in CCF -# formatter. -#################################################################### - def _determineTimeRanges(self, argDict): - - # Calculate ddhhmm string value: - self._timeRange = self.createTimeRange(0, 240) - self._currentTime = argDict['creationTime'] #ZULU - self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( - self._currentTime)) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - self._issueTime = AbsTime.current() - - # If generating temp/pop table, determine time ranges - if len(self._pointEditAreas) > 0: - # numPeriods is the number of forecast periods used for the - # preliminary temp/PoP block: - numPeriods = 4 - - # PoP Time ranges: four periods - # If AM, begin at 12z of issue day (default), may be overridden - # by the popStartZ_AM flag. - # If PM, begin at 00z of next day (default), may be overridden - # by the popStartZ_AM flag. - if self._productIssuance == "Morning": - startT = self._popStartZ_AM - else: - startT = self._popStartZ_AM + 12 # account for PM start later - - # rollover - different days from gmtime and local time - # so we need to sample the PoP from "yesterday" - # for MDT, rollover occurs from 5pm-midnight LST - if time.gmtime(self._currentTime)[2] != \ - time.localtime(self._currentTime)[2]: - startT = startT - 24 - - popStartTR = self.createTimeRange(startT, startT + 1, mode="Zulu") - timePeriod = 12 - timeSpan = 12 - self._popPeriods = self.getPeriods(popStartTR, timePeriod, - timeSpan, numPeriods) - - # Temp Time ranges: four periods, 12 hours apart, 5 hour span - # This is to catch the correct Max/Min temp grid - # If AM, begin with noon LT of issue day to catch MaxT - # If PM, begin with midnight LT of issue day to get MinT - if self._productIssuance == "Morning": - tempStartTR = self.createTimeRange(10, 15) - else: - tempStartTR = self.createTimeRange(22, 27) - timePeriod = 12 - timeSpan = 5 - self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, - numPeriods) - return - -#################################################################### -# _addTopicDividers: -# Puts in the required and optional topic dividers per NWSI 10-503. -# Check for Update tied to getPreviousAFD. -# Get the previous discussion if requested. -#################################################################### - - def _addTopicDividers(self, fcst, argDict): - # Flag for adding the aviation forecaster number later - self._addedAviation = 0 - pad = "\n\n\n&&\n\n" - #print "\naddTopicDividers: user options", self._optionalTopics - - for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: - - # Handle PrevDisc and Prelim in the order specified in the topicDividers list. - if topicName == "PrevDisc" and self._includePreviousAFD == "YES": - fcst = self._getPreviousAFD(fcst, argDict, divider=1) + "&&\n\n" - continue - if topicName == "Prelim": - fcst = self._makePrelimNumberBlock(fcst, argDict) - continue - - # See if we need to add this divider - addDivider = 0 - # If alwaysInclude OR the user chose this divider from the GUI - # add the topic divider - if alwaysInclude or topicDivider in self._optionalTopics: - addDivider = 1 - # Handle Update if it's tied to previous AFD and the - # user chose to include the previous AFD - if topicName == "Update" and self._tieUpdateToPreviousAFD \ - and self._includePreviousAFD == "YES": - addDivider = 1 - if not addDivider: - continue - - # Add padding - #print "Adding divider", topicName, topicDivider - # Check for Aviation so we can later add the aviationFcstrNumber - if topicName == "Aviation": - self._addedAviation = 1 - if topicDivider == "": - continue - if topicName == "ShortTerm": - fcst += topicDivider + "\n\n" - else: - fcst += topicDivider + pad - return fcst - - def _getTopicDivider(self, topic): - for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: - if topicName == topic: - return topicDivider - return "" - -#################################################################### -# _getPreviousAFD: -# Gets the previous AFD. Strips the leading MND header and the -# trailing W/W/A block. Also lops off the prelim number block if -# present.. -#################################################################### - def _getPreviousAFD(self, fcst, argDict, divider=0): - # Initialize strings and lists: - WWABlockString = "." + self._wfoSiteID + " WATCHES/W" - newAFD = [] - - # Retrieve the previous AFD and store the list in AFD_old: - prevAFD = self.getPreviousProduct(self._textdbPil) - prevAFD = string.split(prevAFD, "\n") # ADDED newline delimeter 12/7/04 bc - - # Initialize starting and ending indices: - start_index = 0 - end_index = len(prevAFD) - if end_index == 0: - print "WARNING -- Previous AFD has zero length." - - # Place newlines back at the end of each element in list prevAFD: - # ADDED 12/7/04 bc - for index in xrange(start_index, end_index): - prevAFD[index] = prevAFD[index] + "\n" - - # Make a copy of prevAFD to modify - oldAFD = prevAFD - # Loop through the list to find the first dot delimeter. Once - # found, then set start_index to the index in AFDlist. This will - # effectively strip off the MND header. Will also handle headlines - # too! - body_start_index = start_index - for index in xrange(start_index, end_index): - if oldAFD[index][:1] == ".": # first dot - body_start_index = index - break - - # Loop through the list to find the beginning of the W/W/A block. - # Once found, then set end_index to the index in AFDlist. This will - # strip off everything below the W/W/A block including this block: - body_end_index = end_index - for index in xrange(body_start_index, end_index): - if re.match(WWABlockString, oldAFD[index]) != None: - body_end_index = index - break - - # Make another pass to lop off the preliminary number block if it - # is present and reset end_index. - prelim_divider = self._getTopicDivider("Prelim") - if prelim_divider: - for index in xrange(body_start_index, body_end_index): - if re.match(prelim_divider, oldAFD[index]) != None: - body_end_index = index - break - - # Suggested by Rob R. @ CTP and from ER supplement to 10-503... - # Strip out the ampersands and the leading dot on the - # topic divider and place in newAFD. - for index in xrange(body_start_index, body_end_index): - if (oldAFD[index][:1] == "."): - newAFD.append(oldAFD[index][1:]) - elif (oldAFD[index][:2] == "&&"): - index = index + 1 - else: - newAFD.append(oldAFD[index]) - - if divider: - # If previous issuance time is desired, append it to the - # _PrevDisc_Divider string: - issuance_dateTime = "" - # Loop through the list to find the issuance time string. - for index in xrange(start_index, end_index-1): - if prevAFD[index][:8] == "National": # next line has date time stamp - issuance_dateTime = str(prevAFD[index+1]) - break - # Build issuance_DateTime string: - # Strip off trailing newline... - issuance_dateTime = " /issued " + issuance_dateTime[:-1] + "/ \n" - # Eliminate double whitespace characters if present: - issuance_dateTime = re.sub(r" ", r" ", issuance_dateTime) # PATCH 12/7/04 bc - fcst = fcst + self._getTopicDivider("PrevDisc") + issuance_dateTime + "\n" # PATCH 12/7/04 bc - - # Now test for multiple newlines. If this isn't a newline, write to fcst. - # If it is a newline, test the next one down. If it's also a newline, - # write the first newline and skip to the next line. - for index in xrange(0, len(newAFD)-1): - if newAFD[index] != "\n": - fcst = fcst + newAFD[index] - else: - if newAFD[index+1] != "\n": - fcst = fcst + newAFD[index] - index = index + 1 - fcst = fcst + "\n" - return fcst - -#################################################################### -# _makePrelimNumberBlock: -# Creates the prelim number block for the AFD. -#################################################################### - def _makePrelimNumberBlock(self, fcst, argDict): - # Get the areaList: - if len(self._pointEditAreas) == 0: - return fcst - - # Convert (lat, lon, dim) entries to editAreas - self._areaList = [] - for editArea, areaLabel in self._pointEditAreas: - if type(editArea) is types.TupleType: - lat, lon, dim = editArea - editArea = self.createLatLonArea(lat, lon, dim) - self._areaList.append((editArea, areaLabel)) - - # Append the prelim numbers divider: - fcst = fcst + self._getTopicDivider("Prelim") + "\n" - - # Sample the temps and PoPs: - self._sampleData(argDict) - - # Generate the preliminary numbers: - for editArea, areaLabel in self._areaList: - fcst = fcst + areaLabel + " " - fcst = self._makeFirstGuess(fcst, editArea, areaLabel, argDict) - fcst = fcst + "\n&&\n\n" - return fcst - -#################################################################### -# _preProcessProduct: -# Creates the MND header for the AFD. Checks to see if this is -# a routine, corrected, or updated issuance and appends this -# to the MND. -#################################################################### - def _preProcessProduct(self, fcst, argDict): - # Add product heading to fcst string - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + \ - self._pil + "\n\n" - - fcst = fcst + s.upper(); - - issuedByString = self.getIssuedByString() - - productName = self.checkTestMode(argDict, self._productName) - - s = productName + "\n" + \ - "National Weather Service " + \ - self._wfoCityState +"\n" + \ - issuedByString + \ - self._timeLabel + "\n\n" - fcst = fcst + s - return fcst - -#################################################################### -# _makeProduct: -# Formats the product -#################################################################### - def _makeProduct(self, fcst, argDict): - - # Insert topic dividers: - fcst = self._addTopicDividers(fcst, argDict) - - # Make the Hazard block: - fcst = self._makeHazardBlock(fcst,argDict) - return fcst - -#################################################################### -# _makeFirstGuess: -# Creates the "first guess" temp and PoP forecasts for each edit -# area. Note the format is not strictly NWSI 10-503 compliant -# as the directive makes no allowances for temperatures above 100 -# or below zero, nor does it allow for 100% PoPs. But I got -# permission to do it this way from SRH, so... -#################################################################### - def _makeFirstGuess(self, fcst, editArea, areaLabel, argDict): - # Produce temp forecast substring: - separators = [" ", " ", " ", " / ",] - for index in xrange(0, 4): - timeRange, label = self._tempPeriods[index] - fcst = fcst + self._getMinOrMax(self._analysisListTemp(), - editArea, timeRange) + separators[index] - - # Produce PoP forecast substring - separators = [" ", " ", " ", " "] - for index in xrange(0, 4): - timeRange, label = self._popPeriods[index] - fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, - timeRange) + separators[index] - fcst = fcst + "\n" - return fcst - - -#################################################################### -# marineNameDict -# Used in the makeHazardBlock to determine the defined names -# for the marine zones. This function can be overridden to change -# the names of the marine areas. -#################################################################### - def marineNameDict(self): - # dictionary for marine zone identifiers - return {} #use the two-letter ids for the marine areas - - # if you want descriptive names for the marine areas - #return {'AM': 'Atlantic coastal Waters', 'GM': 'Gulf of Mexico', - # 'LE': 'Lake Erie', 'LO': 'Lake Ontario', 'LH': 'Lake Huron', - # 'LC': 'Lake St Clair', 'LM': 'Lake Michigan', 'LS': 'Lake Superior', - # 'PZ': 'Pacific coastal waters', 'PK': 'Alaskan coastal waters', - # 'PH': 'Hawaiian coastal waters', 'PM': 'Marianas waters', - # 'AN': 'Atlantic coastal waters', - # 'PS': 'American Samoa coastal waters', 'SL': 'St Lawrence River'} - -#################################################################### -# _makeHazardBlock: -# Cycles through the list of segments and reports the Hazards -#################################################################### - - def _makeHazardBlock(self, fcst, argDict): - - fcst = fcst + "." + self._wfoSiteID + \ - " WATCHES/WARNINGS/ADVISORIES...\n" - - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - - # get combinations file used, which contains extra info which will - # tell us which zones are marine, firewx and public - combo = self._defaultEditAreas - fireWxPhenSig = [("FW","W"), ("FW","A")] - fireWxZones = [] - otherZones = [] - if type(combo) is types.StringType: - try: - m = __import__(combo) - for map in m.EASourceMap.keys(): - if map.find("FireWx") != -1: - fireWxZones = m.EASourceMap[map] - else: - for idz in m.EASourceMap[map]: - if idz not in otherZones: - otherZones.append(idz) - except: - otherZones = None - - - marine = self.marineNameDict() - # - # Get every hazard and hazard combination in effect, separate them - # into records by state - # - hazardsRaw = argDict['hazards'].rawAnalyzedTable() - hazards = self._combineHazardRecords(hazardsRaw, argDict) - - stateDict = {} - for h in hazards: - #determine the states in this record - sd = {} - ids = h['id'] - for id in ids: - stateid = id[0:2] - if sd.has_key(stateid): - locs = sd[stateid] - locs.append(id) - sd[stateid] = locs - else: - sd[stateid] = [id] - #add the record to the appropriate "state" in stateDict - for state in sd.keys(): - hcopy = copy.deepcopy(h) - if stateDict.has_key(state): - recs = stateDict[state] - hcopy['id'] = sd[state] - recs.append(hcopy) - stateDict[state] = recs - else: - hcopy['id'] = sd[state] - stateDict[state] = [hcopy] - - - - # - # For every state we are responsible for, check for hazards - # - - for eachState in self._state_IDs: - if stateDict.has_key(eachState): - stateHazardList = stateDict[eachState] - else: - stateHazardList = [] - - # add the state identifier (only if multiple states) - if len(self._state_IDs) > 1: - #marine zone - if eachState in marine.keys(): - fcst = fcst + marine[eachState] + "..." - else: - fcst = fcst + eachState + "..." - - # If no hazards are found, append the null phrase - - if len(stateHazardList) == 0: - fcst = fcst + self._WWA_Nil + "\n" - continue - - # If hazards are found, then build the hazard phrases - for i in xrange(len(stateHazardList)): - eachHazard = stateHazardList[i] - - # special check code for firewx - if (eachHazard['phen'],eachHazard['sig']) in fireWxPhenSig: - firezones = [] - for id in eachHazard['id']: - if id in fireWxZones and id not in firezones: - firezones.append(id) - eachHazard['id'] = firezones #eliminated public - stateHazardList[i] = eachHazard - else: - otherzones = [] - for id in eachHazard['id']: - if (otherZones is None or id in otherZones) and id not in otherzones: - otherzones.append(id) - eachHazard['id'] = otherzones #eliminated firewx - stateHazardList[i] = eachHazard - - - - # hazard name - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - # timing phrase - timing = self.getTimingPhrase(eachHazard, argDict['creationTime']) - - # ids - ids = eachHazard['id'] - if len(ids) == 0: - continue #skip hazard string if no zones - if self._useZoneNames == 1: - zoneNames = [] - for id in ids: - zoneNames.append(areaDict[id]['ugcName']) - ids = zoneNames - ids.sort() - idString = "-".join(ids) - if self._useZoneNames == 0 and self._abbreviateUGCs == 1: - idString = self.makeUGCString(ids) - - # hazard phrase - phrase = hazName + ' ' + timing + ' for ' + idString + '.' - - # Indent if there is a state list associated - if len(self._state_IDs) > 1: - phrase = self.indentText(phrase, indentFirstString = '', - indentNextString = ' ', - maxWidth=self._lineLength, - breakStrings=[" ", "-"]) - else: - phrase = self.indentText(phrase, indentFirstString = '', - indentNextString = '', - maxWidth=self._lineLength, - breakStrings=[" ", "-"]) - - # Apply the hazard phrases - if len(self._state_IDs) > 1: - #don't indent 1st one - if i == 0: - fcst = fcst + phrase + '\n' - #ident the remainder - else: - fcst = fcst + " " + phrase + '\n' - else: - fcst = fcst + phrase + '\n' #never ident - only 1 state - - fcst = fcst + "&&\n\n" - return fcst - -#################################################################### -# _combineHazardRecords -# Consolidate the hazard records for the hazard block. Combines -# "like" records by "id". Like records are those with the -# same phen, sig, start, and ending times. -# by the fcstrNumberFormat variable. -#################################################################### - def _combineHazardRecords(self, hazrecs, argDict): - ptable = copy.deepcopy(hazrecs) - import VTECTableUtil - vtu = VTECTableUtil.VTECTableUtil(activeTableFileName = None) - compare = ['phen','sig','startTime','endTime'] - acts = ['NEW','CON','EXT','EXB','EXA'] #live event - ctable = [] - for a in ptable: - if a['act'] not in acts: - continue #ignore non-live events - #ensure we combine records currently active, but may have diff - #start times - if a['startTime'] < argDict['creationTime']: - a['startTime'] = argDict['creationTime'] - found = 0 - for c in ctable: - if vtu.hazardCompare(a, c, compare): - found = 1 - zones = [a['id']] - - allzones = c['id'] - for z in zones: - allzones.append(z) - c['id'] = allzones - break - if found == 0: - newc = copy.deepcopy(a) - if newc['id'] is not list: - newc['id'] = [newc['id']] - ctable.append(newc) - - return ctable - - -#################################################################### -# _postProcessProduct: -# Appends the $$ delimeter followed by the short and long term -# forecaster numbers. Display of forecaster numbers is governed -# by the fcstrNumberFormat variable. -#################################################################### - def _postProcessProduct(self, fcst, argDict): - # Put in the $$ delimeter: - fcst = fcst + "$$\n\n" - # Add the forecaster numbers to the fcst string: - if self._fcstrNumberFormat == "Brief": - fcst = fcst + self._shortTermFcstrNumber - if self._longTermFcstrNumber != "": - fcst = fcst + "/" + self._longTermFcstrNumber - if self._addedAviation: - fcst = fcst + "/" +self._aviationFcstrNumber - - elif self._fcstrNumberFormat == "Verbose": - fcst = fcst + "SHORT TERM..." + self._shortTermFcstrNumber - if self._longTermFcstrNumber != "": - fcst = fcst + "\nLONG TERM...." + self._longTermFcstrNumber - if self._addedAviation: - fcst = fcst + "\nAVIATION..." + self._aviationFcstrNumber - - return fcst - -##################################################################### -# _sampleData, _analysisListPop, _analysisListTemp: -# Sample the temp and PoP grids. Returns the samplers for temp and -# PoP. -##################################################################### - def _sampleData(self, argDict): - # Sample the data. Returns the samplers for pop and temp - sampleList = [] - sampleList.append((self._analysisListPoP(), self._popPeriods)) - sampleList.append((self._analysisListTemp(), self._tempPeriods)) - sampleInfo = [] - for analList, periods in sampleList: - sampleInfo.append((analList, periods, self._areaList)) - - self._sampler = self.getSampler(argDict, sampleInfo) - return - - def _analysisListPoP(self): - return [ - ("PoP", self.stdDevMaxAvg), - ] - - def _analysisListTemp(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ] - -#################################################################### -# _getMinOrMax: -# Returns a Max or Min value depending on availability. -#################################################################### - def _getMinOrMax(self, analysisList, area, timeRange): - - statDict = self.getStatDict(self._sampler, analysisList, - timeRange, area) - dayNight = self.getPeriod(timeRange,shiftToLocal=1) - if dayNight == self.DAYTIME(): - maxV = self.getStats(statDict, "MaxT") - return self._getTemp(maxV) - else: - minV = self.getStats(statDict, "MinT") - return self._getTemp(minV) - -#################################################################### -# _getTemp: -# Returns a three character string containing the temperature. -# For positive values less than 100, the leading 0 is replaced by -# a space. If no grid is found, "MMM" is returned. -#################################################################### - def _getTemp(self, value): - if value is None: - return "MMM" #for missing - value = int(round(value)) - valStr = string.rjust(`value`, 3) - return valStr - -#################################################################### -# _getPoP: -# Returns a three character string containing the PoP to the -# nearest 10 percent. -#################################################################### - def _getPoP(self,analysisList,area,timeRange): - statDict = self.getStatDict( - self._sampler, analysisList, timeRange, area) - pop = self.getStats(statDict, "PoP") - if pop is None: - return "MMM" - value = int(self.round(pop,"Nearest",10)) - valStr = string.rjust(`value`, 3) - return valStr - -#################################################################### -# _getForecasterNumber: -# Takes a number string or name and returns a string. -# Removes leading zeros from numbers. -#################################################################### - def _getForecasterNumber(self, numberString): - try: - result = "" # set result to null - num = int(numberString) # convert numberString to integer - if num > 99 or num < 0: # if outside [0,99] assign 99 string - result = `99` - elif num < 10: # if less than 10 pad leading 0: - result = string.zfill(`num`, 2) - else: # convert back to string - result = `num` - return result - except: - return numberString - -#################################################################### - -### Removed inland tropical hazards in OB9.3 - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", - "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING - ('TY.W', tropicalActions, 'Tropical1'), # TYPHOON WARNING - ('TR.W', tropicalActions, 'Tropical2'), # TROPICAL STORM WARNING - ('HU.A', tropicalActions, 'Tropical3'), # HURRICANE WATCH - ('TY.A', tropicalActions, 'Tropical4'), # TYPHOON WATCH - ('TR.A', tropicalActions, 'Tropical5'), # TROPICAL STORM WATCH - ('HF.A', allActions, 'Marine'), # HURRICANE FORCE WIND WATCH - ('HF.W', allActions, 'Marine1'), # HURRICANE FORCE WIND WARNING - ('SR.A', allActions, 'Marine2'), # STORM WATCH - ('SR.W', allActions, 'Marine3'), # STORM WARNING - ('GL.A', allActions, 'Marine4'), # GALE WATCH - ('GL.W', allActions, 'Marine5'), # GALE WARNING - ('SE.A', allActions, 'Marine6'), # HAZARDOUS SEAS WATCH - ('SE.W', allActions, 'Marine7'), # HAZARDOUS SEAS WARNING - ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH - ('UP.W', allActions, 'IceAccr1'), # HEAVY FREEZING SPRAY WARNING - ('UP.Y', allActions, 'IceAccr2'), # FREEZING SPRAY ADVISORY - ('SC.Y', allActions, 'Marine8'), # SMALL CRAFT ADVISORY - ('SW.Y', allActions, 'Marine9'), # SMALL CRAFT ADVISORY - ('RB.Y', allActions, 'Marine10'), # SMALL CRAFT ADVISORY - ('SI.Y', allActions, 'Marine11'), # SMALL CRAFT ADVISORY - ('BW.Y', allActions, 'Marine12'), # BRISK WIND ADVISORY - ('MH.W', allActions, 'Marine16'), # VOLCANIC ASHFALL WARNING - ('MF.Y', allActions, 'Marine13'), # DENSE FOG ADVISORY - ('MS.Y', allActions, 'Marine14'), # DENSE SMOKE ADVISORY - ('MH.Y', allActions, 'Marine15'), # VOLCANIC ASHFALL ADVISORY - ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING - ('IS.W', allActions, 'WinterWx1'), # ICE STORM WARNING - ('LE.W', allActions, 'WinterWx2'), # LAKE EFFECT SNOW WARNING - ('WS.W', allActions, 'WinterWx3'), # WINTER STORM WARNING - ('WW.Y', allActions, 'WinterWx4'), # WINTER WEATHER ADVISORY - ('WS.A', allActions, 'WinterWx5'), # WINTER STORM WATCH - ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING - ('WC.Y', allActions, 'WindChill1'), # WIND CHILL ADVISORY - ('WC.A', allActions, 'WindChill2'), # WIND CHILL WATCH - ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING - ('DU.Y', allActions, 'Dust1'), # BLOWING DUST ADVISORY - ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING - ('EC.A', allActions, 'Cold2'), # EXTREME COLD WATCH - ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING - ('EH.A', allActions, 'Heat1'), # EXCESSIVE HEAT WATCH - ('HT.Y', allActions, 'Heat2'), # HEAT ADVISORY - ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('ZF.Y', allActions, 'Fog2'), # FREEZING FOG ADVISORY - ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING - ('FZ.W', allActions, 'FrostFreeze1'), # FREEZE WARNING - ('FR.Y', allActions, 'FrostFreeze2'), # FROST ADVISORY - ('HZ.A', allActions, 'FrostFreeze3'), # HARD FREEZE WATCH - ('FZ.A', allActions, 'FrostFreeze4'), # FREEZE WATCH - ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING - ('WI.Y', allActions, 'Wind1'), # WIND ADVISORY - ('LW.Y', allActions, 'Wind2'), # LAKE WIND ADVISORY - ('HW.A', allActions, 'Wind3'), # HIGH WIND WATCH - ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH - ('FA.A', allActions, 'Flood1'), # FLOOD WATCH - ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING - ('CF.Y', allActions, 'CoastalFlood3'), # COASTAL FLOOD ADVISORY - ('CF.A', allActions, 'CoastalFlood1'), # COASTAL FLOOD WATCH - ('LS.W', allActions, 'CoastalFlood5'), # LAKESHORE FLOOD WARNING - ('LS.A', allActions, 'CoastalFlood2'), # LAKESHORE FLOOD WATCH - ('LS.Y', allActions, 'CoastalFlood4'), # LAKESHORE FLOOD ADVISORY - ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY - ('AS.O', allActions, 'AirStag1'), # AIR STAGNATION OUTLOOK - ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING - ('SU.Y', allActions, 'HighSurf1'), # HIGH SURF ADVISORY - ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK - ('BH.S', allActions, 'BeachHaz'), # BEACH HAZARDS STATEMENT - ('AF.W', allActions, 'Ashfall2'), # VOLCANIC ASHFALL WARNING - ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING - ('TS.A', allActions, 'Tsunami1'), # TSUNAMI WATCH - ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING - ('FW.A', allActions, 'FireWx1'), # FIRE WEATHER WATCH - ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY - ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING - ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY - ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH - ] +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 20, 2014 #3685 randerso Changed to support mixed case +# +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# File Name: AFD.py +# Description: This product creates a Area Forecast Discussion product. +# Contributed by Eastern Region (Jim Noel, Rob Radzanowski) and +# Southern Region (Brian Curran) +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# AFD, AFD___Definition, AFD__Overrides +#------------------------------------------------------------------------- +# Weather Elements Needed: MinT, MaxT, PoP +#------------------------------------------------------------------------- +# Edit Areas Needed: Individual edit areas are required, one for each +# preliminary temp/PoP forecast point. +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: None +#------------------------------------------------------------------------- +# Component Products: None +#------------------------------------------------------------------------- +# User Configurable Variables: +# Definition Section: +# displayName If not None, defines how product appears in GFE GUI +# +# defaultEditAreas defines edit area names and station IDs for edit areas +# expected in the form of (editAreaName, 3letterStationID) +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID full station identifier (4letter, KSLC) +# +# wmoID WMO ID for product header, such as FOUS45 +# +# pil Product pil, such as CCFBOX +# +# debug If on, debug_print statements will appear. +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from FormatterLauncher. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. + +# topicDividers List of tuples describing the various topic dividers: +# (topicName, topicDivider, alwaysInclude, includeInGUI) where +# --topicName is "Synopsis", "Update", etc. +# --topicDivider will appear in the product ".SYNOPSIS..." +# --alwaysInclude: if 1, the topic divider will always +# appear in the product. +# Otherwise, the user can choose at run-time whether +# to include the topic divider. +# --If 1, and alwaysInclude == 0, this item will appear +# in the GUI to be selected at run time. +# Some options, like PrevDisc should not appear in the +# GUI since they are tied to other user input e.g. +# getPreviousAFD. +# state_IDs The state_ID definitions below are for the W/W/A portion of +# your AFD. Multiple state IDs (including MARINE) are separated +# by commas. +# tieUpdateToPreviousAFD If 1, then when "Include Previous AFD" is chosen in the GUI, +# the UPDATE topic divider will automatically be included. +# fcstrNumberFormat The fcstrNumberFormat can take three values: +# Brief - short term forecaster and long term forecaster +# numbers separated by a slash. +# Verbose - "SHORT TERM...xx" and "LONG TERM...yy". +# None - no forecaster numbers added to the end of the AFD. +# NOTE: names or numbers may be used. +# shortTermForecasters : List of identifiers (number strings or names) for the +# short term forecasters +# longTerm Forecasters : List of identifiers (number strings or names) for the +# long term forecasters +# aviationForecasters : List of identifiers (number strings or names) for the +# aviation forecasters +# pointEditAreas If non-empty list, a point temp/pop table will be produced. +# popStartZ_AM start time for PoP for AM issuance in Zulu, (12 for 12z) +# Usually changed only for OCONUS sites. +# useZoneNames If 1, will use zone names instead of ugc codes in the W/W/A portion. +# abbreviateUGCs If 1, will abbreviate ugc string. Instead of: FLZ042-FLZ043-FLZ044 +# produce: FLZ042>FLZ044 +# WWA_Nil The WWA_Nil definition will be used for nil watches, warnings, +# or advisories. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# None +# +# To look up additional tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# The grids are sampled according to the following rules: +# MaxT/MinT: Four periods, 12 hours apart, daily, set up to take the +# MaxT grid overlapping noon LT and MinT grid overlapping midnight LT +# PoP: Four periods, 12 hours apart, 12z-00z, and 00z-12z. Periods can +# be overridden using the popStartZ_AM field for OCONUS sites. +# +# Missing data will be shown with MMM for temperatures and PoPs. +#------------------------------------------------------------------------- +# Example Output: +# +## +##FXUS64 KMAF 041309 +##AFDMAF +## +##AREA FORECAST DISCUSSION +##NATIONAL WEATHER SERVICE MIDLAND/ODESSA TX +##809 AM CDT SAT OCT 4 2003 +## +##.SHORT TERM... +## +## +##&& +## +##.LONG TERM... +## +## +##&& +## +##.PRELIMINARY POINT TEMPS/POPS... +##MAF 84 60 84 60 / 100 30 20 10 +##CNM 83 56 85 57 / 100 20 10 0 +##MRF 79 53 79 53 / 100 30 10 0 +##FST 86 62 87 62 / 100 30 10 10 +## +##&& +## +##.MAF WATCHES/WARNINGS/ADVISORIES... +##TX...NONE. +##NM...NONE. +## +##&& +## +##$$ +## +##99/99 +######################################################################## + +import TextRules +import SampleAnalysis +import string, time, types, os, re, copy +import ModuleAccessor, ProcessVariableList +import AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + + Definition = { + "type": "smart", + + "displayName": "None", # for Product Generation Menu + "database" : "Official", # Source database. "Official", "Fcst", or "ISC" + + "defaultEditAreas" : "EditAreas_PublicMarineFireWx__", + "editAreaSuffix": None, + + # Edit Areas for creating the optional Preliminary Point Temp/PoPs + "pointEditAreas": [], + "outputFile": "{prddir}/TEXT/AFD_.txt", + "debug": 0, + + "productName": "Area Forecast Discussion", + "fullStationID" : "", # 4 letter station ID + "wmoID" : "", # WMO code + "wfoCityState" : "", # Location of WFO + "pil" : "", # product pil + "textdbPil" : "", # Product ID for storing to AWIPS text database. + "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. + "wfoSiteID": "", + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + # Language + "language": "english", + "lineLength": 66, #Maximum line length + + "state_IDs": ["ST"], + "tieUpdateToPreviousAFD": 0, + "fcstrNumberFormat" : "Brief", # Brief, Verbose, or None + "shortTermForecasters": ["99","01","02","03"], + "longTermForecasters": ["99","01","02","03"], + "aviationForecasters": ["99","01","02","03"], + "useZoneNames": 0, + "abbreviateUGCs": 1, + + "topicDividers" : [ + # topicName, topicDivider, alwaysInclude, includeInGUI + + ("Update", ".UPDATE...", 0, 1), + ("Synopsis", ".SYNOPSIS...", 0, 1), + + # EITHER Discussion OR ShortTerm/LongTerm should always be included. + ("Discussion", ".DISCUSSION...", 0, 0), + ("ShortTerm", ".SHORT TERM...", 1, 0), + ("LongTerm", ".LONG TERM...", 1, 0), + + # Optional dividers + ("Aviation", ".AVIATION...", 0, 1), + ("Marine", ".MARINE...", 0, 1), + ("FireWeather",".FIRE WEATHER...", 0, 1), + ("Hydro", ".HYDROLOGY...", 0, 1), + ("Climate", ".CLIMATE...", 0, 1), + + # Controlled by "includePreviousAFD" + ("PrevDisc", ".PREV DISCUSSION...", 0, 0), + + # Controlled by "pointEditAreas" + ("Prelim", ".PRELIMINARY POINT TEMPS/POPS...", 0, 0), + ], + + "popStartZ_AM": 12, #hour UTC + "WWA_Nil" : "None.", + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + } + + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + +#################################################################### +# generateForecast: +# AFD formatter engine. +#################################################################### + def generateForecast(self, argDict): + + # Get variables from varDict and Definition: + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the segments + hazardsC = argDict['hazards'] + self._segmentList = self.organizeHazards(hazardsC.rawAnalyzedTable()) + + # Determine time ranges: + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Initialize the output string: + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + fcst = self._makeProduct(fcst, argDict) + + # Append the $$ delimiter and the forecaster numbers: + fcst = self._postProcessProduct(fcst, argDict) + return fcst + +#################################################################### +# _processVariableList +# Displays user dialog. +#################################################################### + + def _processVariableList(self, definition): + # Get Definition variables + for key in list(definition.keys()): + exec("self._" + key + "= definition[key]") + + # Create the list of optional topic dividers to appear in GUI + self._options = [] + for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: + if topicName == "Update" and self._tieUpdateToPreviousAFD: + continue + if alwaysInclude: + continue + if includeInGUI: + self._options.append(topicDivider) + + varList = [] + varList.append((("Product Issuance", "productIssuance"), "Morning", "radio", + ["Morning","Afternoon"])) + varList.append((("Optional\nTopics", "optionalTopics"), [], "check", + self._options)) + varList.append((("Include\nPrevious AFD?", "includePreviousAFD"), "NO", "radio", + ["NO", "YES"])) + varList.append((("Short Term\nForecaster", "shortTermFcstrNumber") , "99", "radio", + self._shortTermForecasters)) + varList.append((("Long Term\nForecaster", "longTermFcstrNumber"), "99", "radio", + self._longTermForecasters)) + varList.append((("Aviation\nForecaster","aviationFcstrNumber"), "", "radio", + self._aviationForecasters)) + return self._callProcessVariableList("AFD Values", varList, varDict={}) + + def _callProcessVariableList(self, title, varList, varDict): + processVarList = ProcessVariableList.ProcessVariableList( + title, varList, varDict={}) + self._selectionStatus = processVarList.status() + if not self._selectionStatus == "OK": + return None # User Cancelled + return processVarList.varDict() + +#################################################################### +# _getVariables: +# Retrieves variables and definitions. +#################################################################### + def _getVariables(self, argDict): + # Make variable assignments + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + self._longTermFcstrNumber = self._getForecasterNumber(self._longTermFcstrNumber) + self._shortTermFcstrNumber = self._getForecasterNumber(self._shortTermFcstrNumber) + self._aviationFcstrNumber = self._getForecasterNumber(self._aviationFcstrNumber) + + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Set up information for Hazards product + # TODO uncomment following line? + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + # Check for state id: ST indicating that the user needs to + # set up the list of state id's + if len(self._state_IDs) == 1 and self._state_IDs[0] == "ST": + return "WARNING:You must set up 'state_IDs' in Definition section before running the AFD." + return + +#################################################################### +# _determineTimeRanges: +# Determine time ranges for product. Returns popPeriods and +# tempPeriods which are a list of tuples (timeRange, label). +# Also determines the timeLabel string for the MND header. +# Adapted from previous _determineTimeRanges found in CCF +# formatter. +#################################################################### + def _determineTimeRanges(self, argDict): + + # Calculate ddhhmm string value: + self._timeRange = self.createTimeRange(0, 240) + self._currentTime = argDict['creationTime'] #ZULU + self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( + self._currentTime)) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + self._issueTime = AbsTime.current() + + # If generating temp/pop table, determine time ranges + if len(self._pointEditAreas) > 0: + # numPeriods is the number of forecast periods used for the + # preliminary temp/PoP block: + numPeriods = 4 + + # PoP Time ranges: four periods + # If AM, begin at 12z of issue day (default), may be overridden + # by the popStartZ_AM flag. + # If PM, begin at 00z of next day (default), may be overridden + # by the popStartZ_AM flag. + if self._productIssuance == "Morning": + startT = self._popStartZ_AM + else: + startT = self._popStartZ_AM + 12 # account for PM start later + + # rollover - different days from gmtime and local time + # so we need to sample the PoP from "yesterday" + # for MDT, rollover occurs from 5pm-midnight LST + if time.gmtime(self._currentTime)[2] != \ + time.localtime(self._currentTime)[2]: + startT = startT - 24 + + popStartTR = self.createTimeRange(startT, startT + 1, mode="Zulu") + timePeriod = 12 + timeSpan = 12 + self._popPeriods = self.getPeriods(popStartTR, timePeriod, + timeSpan, numPeriods) + + # Temp Time ranges: four periods, 12 hours apart, 5 hour span + # This is to catch the correct Max/Min temp grid + # If AM, begin with noon LT of issue day to catch MaxT + # If PM, begin with midnight LT of issue day to get MinT + if self._productIssuance == "Morning": + tempStartTR = self.createTimeRange(10, 15) + else: + tempStartTR = self.createTimeRange(22, 27) + timePeriod = 12 + timeSpan = 5 + self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, + numPeriods) + return + +#################################################################### +# _addTopicDividers: +# Puts in the required and optional topic dividers per NWSI 10-503. +# Check for Update tied to getPreviousAFD. +# Get the previous discussion if requested. +#################################################################### + + def _addTopicDividers(self, fcst, argDict): + # Flag for adding the aviation forecaster number later + self._addedAviation = 0 + pad = "\n\n\n&&\n\n" + #print "\naddTopicDividers: user options", self._optionalTopics + + for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: + + # Handle PrevDisc and Prelim in the order specified in the topicDividers list. + if topicName == "PrevDisc" and self._includePreviousAFD == "YES": + fcst = self._getPreviousAFD(fcst, argDict, divider=1) + "&&\n\n" + continue + if topicName == "Prelim": + fcst = self._makePrelimNumberBlock(fcst, argDict) + continue + + # See if we need to add this divider + addDivider = 0 + # If alwaysInclude OR the user chose this divider from the GUI + # add the topic divider + if alwaysInclude or topicDivider in self._optionalTopics: + addDivider = 1 + # Handle Update if it's tied to previous AFD and the + # user chose to include the previous AFD + if topicName == "Update" and self._tieUpdateToPreviousAFD \ + and self._includePreviousAFD == "YES": + addDivider = 1 + if not addDivider: + continue + + # Add padding + #print "Adding divider", topicName, topicDivider + # Check for Aviation so we can later add the aviationFcstrNumber + if topicName == "Aviation": + self._addedAviation = 1 + if topicDivider == "": + continue + if topicName == "ShortTerm": + fcst += topicDivider + "\n\n" + else: + fcst += topicDivider + pad + return fcst + + def _getTopicDivider(self, topic): + for topicName, topicDivider, alwaysInclude, includeInGUI in self._topicDividers: + if topicName == topic: + return topicDivider + return "" + +#################################################################### +# _getPreviousAFD: +# Gets the previous AFD. Strips the leading MND header and the +# trailing W/W/A block. Also lops off the prelim number block if +# present.. +#################################################################### + def _getPreviousAFD(self, fcst, argDict, divider=0): + # Initialize strings and lists: + WWABlockString = "." + self._wfoSiteID + " WATCHES/W" + newAFD = [] + + # Retrieve the previous AFD and store the list in AFD_old: + prevAFD = self.getPreviousProduct(self._textdbPil) + prevAFD = string.split(prevAFD, "\n") # ADDED newline delimeter 12/7/04 bc + + # Initialize starting and ending indices: + start_index = 0 + end_index = len(prevAFD) + if end_index == 0: + print("WARNING -- Previous AFD has zero length.") + + # Place newlines back at the end of each element in list prevAFD: + # ADDED 12/7/04 bc + for index in range(start_index, end_index): + prevAFD[index] = prevAFD[index] + "\n" + + # Make a copy of prevAFD to modify + oldAFD = prevAFD + # Loop through the list to find the first dot delimeter. Once + # found, then set start_index to the index in AFDlist. This will + # effectively strip off the MND header. Will also handle headlines + # too! + body_start_index = start_index + for index in range(start_index, end_index): + if oldAFD[index][:1] == ".": # first dot + body_start_index = index + break + + # Loop through the list to find the beginning of the W/W/A block. + # Once found, then set end_index to the index in AFDlist. This will + # strip off everything below the W/W/A block including this block: + body_end_index = end_index + for index in range(body_start_index, end_index): + if re.match(WWABlockString, oldAFD[index]) != None: + body_end_index = index + break + + # Make another pass to lop off the preliminary number block if it + # is present and reset end_index. + prelim_divider = self._getTopicDivider("Prelim") + if prelim_divider: + for index in range(body_start_index, body_end_index): + if re.match(prelim_divider, oldAFD[index]) != None: + body_end_index = index + break + + # Suggested by Rob R. @ CTP and from ER supplement to 10-503... + # Strip out the ampersands and the leading dot on the + # topic divider and place in newAFD. + for index in range(body_start_index, body_end_index): + if (oldAFD[index][:1] == "."): + newAFD.append(oldAFD[index][1:]) + elif (oldAFD[index][:2] == "&&"): + index = index + 1 + else: + newAFD.append(oldAFD[index]) + + if divider: + # If previous issuance time is desired, append it to the + # _PrevDisc_Divider string: + issuance_dateTime = "" + # Loop through the list to find the issuance time string. + for index in range(start_index, end_index-1): + if prevAFD[index][:8] == "National": # next line has date time stamp + issuance_dateTime = str(prevAFD[index+1]) + break + # Build issuance_DateTime string: + # Strip off trailing newline... + issuance_dateTime = " /issued " + issuance_dateTime[:-1] + "/ \n" + # Eliminate double whitespace characters if present: + issuance_dateTime = re.sub(r" ", r" ", issuance_dateTime) # PATCH 12/7/04 bc + fcst = fcst + self._getTopicDivider("PrevDisc") + issuance_dateTime + "\n" # PATCH 12/7/04 bc + + # Now test for multiple newlines. If this isn't a newline, write to fcst. + # If it is a newline, test the next one down. If it's also a newline, + # write the first newline and skip to the next line. + for index in range(0, len(newAFD)-1): + if newAFD[index] != "\n": + fcst = fcst + newAFD[index] + else: + if newAFD[index+1] != "\n": + fcst = fcst + newAFD[index] + index = index + 1 + fcst = fcst + "\n" + return fcst + +#################################################################### +# _makePrelimNumberBlock: +# Creates the prelim number block for the AFD. +#################################################################### + def _makePrelimNumberBlock(self, fcst, argDict): + # Get the areaList: + if len(self._pointEditAreas) == 0: + return fcst + + # Convert (lat, lon, dim) entries to editAreas + self._areaList = [] + for editArea, areaLabel in self._pointEditAreas: + if type(editArea) is tuple: + lat, lon, dim = editArea + editArea = self.createLatLonArea(lat, lon, dim) + self._areaList.append((editArea, areaLabel)) + + # Append the prelim numbers divider: + fcst = fcst + self._getTopicDivider("Prelim") + "\n" + + # Sample the temps and PoPs: + self._sampleData(argDict) + + # Generate the preliminary numbers: + for editArea, areaLabel in self._areaList: + fcst = fcst + areaLabel + " " + fcst = self._makeFirstGuess(fcst, editArea, areaLabel, argDict) + fcst = fcst + "\n&&\n\n" + return fcst + +#################################################################### +# _preProcessProduct: +# Creates the MND header for the AFD. Checks to see if this is +# a routine, corrected, or updated issuance and appends this +# to the MND. +#################################################################### + def _preProcessProduct(self, fcst, argDict): + # Add product heading to fcst string + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + \ + self._pil + "\n\n" + + fcst = fcst + s.upper(); + + issuedByString = self.getIssuedByString() + + productName = self.checkTestMode(argDict, self._productName) + + s = productName + "\n" + \ + "National Weather Service " + \ + self._wfoCityState +"\n" + \ + issuedByString + \ + self._timeLabel + "\n\n" + fcst = fcst + s + return fcst + +#################################################################### +# _makeProduct: +# Formats the product +#################################################################### + def _makeProduct(self, fcst, argDict): + + # Insert topic dividers: + fcst = self._addTopicDividers(fcst, argDict) + + # Make the Hazard block: + fcst = self._makeHazardBlock(fcst,argDict) + return fcst + +#################################################################### +# _makeFirstGuess: +# Creates the "first guess" temp and PoP forecasts for each edit +# area. Note the format is not strictly NWSI 10-503 compliant +# as the directive makes no allowances for temperatures above 100 +# or below zero, nor does it allow for 100% PoPs. But I got +# permission to do it this way from SRH, so... +#################################################################### + def _makeFirstGuess(self, fcst, editArea, areaLabel, argDict): + # Produce temp forecast substring: + separators = [" ", " ", " ", " / ",] + for index in range(0, 4): + timeRange, label = self._tempPeriods[index] + fcst = fcst + self._getMinOrMax(self._analysisListTemp(), + editArea, timeRange) + separators[index] + + # Produce PoP forecast substring + separators = [" ", " ", " ", " "] + for index in range(0, 4): + timeRange, label = self._popPeriods[index] + fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, + timeRange) + separators[index] + fcst = fcst + "\n" + return fcst + + +#################################################################### +# marineNameDict +# Used in the makeHazardBlock to determine the defined names +# for the marine zones. This function can be overridden to change +# the names of the marine areas. +#################################################################### + def marineNameDict(self): + # dictionary for marine zone identifiers + return {} #use the two-letter ids for the marine areas + + # if you want descriptive names for the marine areas + #return {'AM': 'Atlantic coastal Waters', 'GM': 'Gulf of Mexico', + # 'LE': 'Lake Erie', 'LO': 'Lake Ontario', 'LH': 'Lake Huron', + # 'LC': 'Lake St Clair', 'LM': 'Lake Michigan', 'LS': 'Lake Superior', + # 'PZ': 'Pacific coastal waters', 'PK': 'Alaskan coastal waters', + # 'PH': 'Hawaiian coastal waters', 'PM': 'Marianas waters', + # 'AN': 'Atlantic coastal waters', + # 'PS': 'American Samoa coastal waters', 'SL': 'St Lawrence River'} + +#################################################################### +# _makeHazardBlock: +# Cycles through the list of segments and reports the Hazards +#################################################################### + + def _makeHazardBlock(self, fcst, argDict): + + fcst = fcst + "." + self._wfoSiteID + \ + " WATCHES/WARNINGS/ADVISORIES...\n" + + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + + # get combinations file used, which contains extra info which will + # tell us which zones are marine, firewx and public + combo = self._defaultEditAreas + fireWxPhenSig = [("FW","W"), ("FW","A")] + fireWxZones = [] + otherZones = [] + if type(combo) is bytes: + try: + m = __import__(combo) + for map in list(m.EASourceMap.keys()): + if map.find("FireWx") != -1: + fireWxZones = m.EASourceMap[map] + else: + for idz in m.EASourceMap[map]: + if idz not in otherZones: + otherZones.append(idz) + except: + otherZones = None + + + marine = self.marineNameDict() + # + # Get every hazard and hazard combination in effect, separate them + # into records by state + # + hazardsRaw = argDict['hazards'].rawAnalyzedTable() + hazards = self._combineHazardRecords(hazardsRaw, argDict) + + stateDict = {} + for h in hazards: + #determine the states in this record + sd = {} + ids = h['id'] + for id in ids: + stateid = id[0:2] + if stateid in sd: + locs = sd[stateid] + locs.append(id) + sd[stateid] = locs + else: + sd[stateid] = [id] + #add the record to the appropriate "state" in stateDict + for state in list(sd.keys()): + hcopy = copy.deepcopy(h) + if state in stateDict: + recs = stateDict[state] + hcopy['id'] = sd[state] + recs.append(hcopy) + stateDict[state] = recs + else: + hcopy['id'] = sd[state] + stateDict[state] = [hcopy] + + + + # + # For every state we are responsible for, check for hazards + # + + for eachState in self._state_IDs: + if eachState in stateDict: + stateHazardList = stateDict[eachState] + else: + stateHazardList = [] + + # add the state identifier (only if multiple states) + if len(self._state_IDs) > 1: + #marine zone + if eachState in list(marine.keys()): + fcst = fcst + marine[eachState] + "..." + else: + fcst = fcst + eachState + "..." + + # If no hazards are found, append the null phrase + + if len(stateHazardList) == 0: + fcst = fcst + self._WWA_Nil + "\n" + continue + + # If hazards are found, then build the hazard phrases + for i in range(len(stateHazardList)): + eachHazard = stateHazardList[i] + + # special check code for firewx + if (eachHazard['phen'],eachHazard['sig']) in fireWxPhenSig: + firezones = [] + for id in eachHazard['id']: + if id in fireWxZones and id not in firezones: + firezones.append(id) + eachHazard['id'] = firezones #eliminated public + stateHazardList[i] = eachHazard + else: + otherzones = [] + for id in eachHazard['id']: + if (otherZones is None or id in otherZones) and id not in otherzones: + otherzones.append(id) + eachHazard['id'] = otherzones #eliminated firewx + stateHazardList[i] = eachHazard + + + + # hazard name + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + # timing phrase + timing = self.getTimingPhrase(eachHazard, argDict['creationTime']) + + # ids + ids = eachHazard['id'] + if len(ids) == 0: + continue #skip hazard string if no zones + if self._useZoneNames == 1: + zoneNames = [] + for id in ids: + zoneNames.append(areaDict[id]['ugcName']) + ids = zoneNames + ids.sort() + idString = "-".join(ids) + if self._useZoneNames == 0 and self._abbreviateUGCs == 1: + idString = self.makeUGCString(ids) + + # hazard phrase + phrase = hazName + ' ' + timing + ' for ' + idString + '.' + + # Indent if there is a state list associated + if len(self._state_IDs) > 1: + phrase = self.indentText(phrase, indentFirstString = '', + indentNextString = ' ', + maxWidth=self._lineLength, + breakStrings=[" ", "-"]) + else: + phrase = self.indentText(phrase, indentFirstString = '', + indentNextString = '', + maxWidth=self._lineLength, + breakStrings=[" ", "-"]) + + # Apply the hazard phrases + if len(self._state_IDs) > 1: + #don't indent 1st one + if i == 0: + fcst = fcst + phrase + '\n' + #ident the remainder + else: + fcst = fcst + " " + phrase + '\n' + else: + fcst = fcst + phrase + '\n' #never ident - only 1 state + + fcst = fcst + "&&\n\n" + return fcst + +#################################################################### +# _combineHazardRecords +# Consolidate the hazard records for the hazard block. Combines +# "like" records by "id". Like records are those with the +# same phen, sig, start, and ending times. +# by the fcstrNumberFormat variable. +#################################################################### + def _combineHazardRecords(self, hazrecs, argDict): + ptable = copy.deepcopy(hazrecs) + import VTECTableUtil + vtu = VTECTableUtil.VTECTableUtil(activeTableFileName = None) + compare = ['phen','sig','startTime','endTime'] + acts = ['NEW','CON','EXT','EXB','EXA'] #live event + ctable = [] + for a in ptable: + if a['act'] not in acts: + continue #ignore non-live events + #ensure we combine records currently active, but may have diff + #start times + if a['startTime'] < argDict['creationTime']: + a['startTime'] = argDict['creationTime'] + found = 0 + for c in ctable: + if vtu.hazardCompare(a, c, compare): + found = 1 + zones = [a['id']] + + allzones = c['id'] + for z in zones: + allzones.append(z) + c['id'] = allzones + break + if found == 0: + newc = copy.deepcopy(a) + if newc['id'] is not list: + newc['id'] = [newc['id']] + ctable.append(newc) + + return ctable + + +#################################################################### +# _postProcessProduct: +# Appends the $$ delimeter followed by the short and long term +# forecaster numbers. Display of forecaster numbers is governed +# by the fcstrNumberFormat variable. +#################################################################### + def _postProcessProduct(self, fcst, argDict): + # Put in the $$ delimeter: + fcst = fcst + "$$\n\n" + # Add the forecaster numbers to the fcst string: + if self._fcstrNumberFormat == "Brief": + fcst = fcst + self._shortTermFcstrNumber + if self._longTermFcstrNumber != "": + fcst = fcst + "/" + self._longTermFcstrNumber + if self._addedAviation: + fcst = fcst + "/" +self._aviationFcstrNumber + + elif self._fcstrNumberFormat == "Verbose": + fcst = fcst + "SHORT TERM..." + self._shortTermFcstrNumber + if self._longTermFcstrNumber != "": + fcst = fcst + "\nLONG TERM...." + self._longTermFcstrNumber + if self._addedAviation: + fcst = fcst + "\nAVIATION..." + self._aviationFcstrNumber + + return fcst + +##################################################################### +# _sampleData, _analysisListPop, _analysisListTemp: +# Sample the temp and PoP grids. Returns the samplers for temp and +# PoP. +##################################################################### + def _sampleData(self, argDict): + # Sample the data. Returns the samplers for pop and temp + sampleList = [] + sampleList.append((self._analysisListPoP(), self._popPeriods)) + sampleList.append((self._analysisListTemp(), self._tempPeriods)) + sampleInfo = [] + for analList, periods in sampleList: + sampleInfo.append((analList, periods, self._areaList)) + + self._sampler = self.getSampler(argDict, sampleInfo) + return + + def _analysisListPoP(self): + return [ + ("PoP", self.stdDevMaxAvg), + ] + + def _analysisListTemp(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ] + +#################################################################### +# _getMinOrMax: +# Returns a Max or Min value depending on availability. +#################################################################### + def _getMinOrMax(self, analysisList, area, timeRange): + + statDict = self.getStatDict(self._sampler, analysisList, + timeRange, area) + dayNight = self.getPeriod(timeRange,shiftToLocal=1) + if dayNight == self.DAYTIME(): + maxV = self.getStats(statDict, "MaxT") + return self._getTemp(maxV) + else: + minV = self.getStats(statDict, "MinT") + return self._getTemp(minV) + +#################################################################### +# _getTemp: +# Returns a three character string containing the temperature. +# For positive values less than 100, the leading 0 is replaced by +# a space. If no grid is found, "MMM" is returned. +#################################################################### + def _getTemp(self, value): + if value is None: + return "MMM" #for missing + value = int(round(value)) + valStr = string.rjust(repr(value), 3) + return valStr + +#################################################################### +# _getPoP: +# Returns a three character string containing the PoP to the +# nearest 10 percent. +#################################################################### + def _getPoP(self,analysisList,area,timeRange): + statDict = self.getStatDict( + self._sampler, analysisList, timeRange, area) + pop = self.getStats(statDict, "PoP") + if pop is None: + return "MMM" + value = int(self.round(pop,"Nearest",10)) + valStr = string.rjust(repr(value), 3) + return valStr + +#################################################################### +# _getForecasterNumber: +# Takes a number string or name and returns a string. +# Removes leading zeros from numbers. +#################################################################### + def _getForecasterNumber(self, numberString): + try: + result = "" # set result to null + num = int(numberString) # convert numberString to integer + if num > 99 or num < 0: # if outside [0,99] assign 99 string + result = repr(99) + elif num < 10: # if less than 10 pad leading 0: + result = string.zfill(repr(num), 2) + else: # convert back to string + result = repr(num) + return result + except: + return numberString + +#################################################################### + +### Removed inland tropical hazards in OB9.3 + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", + "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING + ('TY.W', tropicalActions, 'Tropical1'), # TYPHOON WARNING + ('TR.W', tropicalActions, 'Tropical2'), # TROPICAL STORM WARNING + ('HU.A', tropicalActions, 'Tropical3'), # HURRICANE WATCH + ('TY.A', tropicalActions, 'Tropical4'), # TYPHOON WATCH + ('TR.A', tropicalActions, 'Tropical5'), # TROPICAL STORM WATCH + ('HF.A', allActions, 'Marine'), # HURRICANE FORCE WIND WATCH + ('HF.W', allActions, 'Marine1'), # HURRICANE FORCE WIND WARNING + ('SR.A', allActions, 'Marine2'), # STORM WATCH + ('SR.W', allActions, 'Marine3'), # STORM WARNING + ('GL.A', allActions, 'Marine4'), # GALE WATCH + ('GL.W', allActions, 'Marine5'), # GALE WARNING + ('SE.A', allActions, 'Marine6'), # HAZARDOUS SEAS WATCH + ('SE.W', allActions, 'Marine7'), # HAZARDOUS SEAS WARNING + ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH + ('UP.W', allActions, 'IceAccr1'), # HEAVY FREEZING SPRAY WARNING + ('UP.Y', allActions, 'IceAccr2'), # FREEZING SPRAY ADVISORY + ('SC.Y', allActions, 'Marine8'), # SMALL CRAFT ADVISORY + ('SW.Y', allActions, 'Marine9'), # SMALL CRAFT ADVISORY + ('RB.Y', allActions, 'Marine10'), # SMALL CRAFT ADVISORY + ('SI.Y', allActions, 'Marine11'), # SMALL CRAFT ADVISORY + ('BW.Y', allActions, 'Marine12'), # BRISK WIND ADVISORY + ('MH.W', allActions, 'Marine16'), # VOLCANIC ASHFALL WARNING + ('MF.Y', allActions, 'Marine13'), # DENSE FOG ADVISORY + ('MS.Y', allActions, 'Marine14'), # DENSE SMOKE ADVISORY + ('MH.Y', allActions, 'Marine15'), # VOLCANIC ASHFALL ADVISORY + ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING + ('IS.W', allActions, 'WinterWx1'), # ICE STORM WARNING + ('LE.W', allActions, 'WinterWx2'), # LAKE EFFECT SNOW WARNING + ('WS.W', allActions, 'WinterWx3'), # WINTER STORM WARNING + ('WW.Y', allActions, 'WinterWx4'), # WINTER WEATHER ADVISORY + ('WS.A', allActions, 'WinterWx5'), # WINTER STORM WATCH + ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING + ('WC.Y', allActions, 'WindChill1'), # WIND CHILL ADVISORY + ('WC.A', allActions, 'WindChill2'), # WIND CHILL WATCH + ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING + ('DU.Y', allActions, 'Dust1'), # BLOWING DUST ADVISORY + ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING + ('EC.A', allActions, 'Cold2'), # EXTREME COLD WATCH + ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING + ('EH.A', allActions, 'Heat1'), # EXCESSIVE HEAT WATCH + ('HT.Y', allActions, 'Heat2'), # HEAT ADVISORY + ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('ZF.Y', allActions, 'Fog2'), # FREEZING FOG ADVISORY + ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING + ('FZ.W', allActions, 'FrostFreeze1'), # FREEZE WARNING + ('FR.Y', allActions, 'FrostFreeze2'), # FROST ADVISORY + ('HZ.A', allActions, 'FrostFreeze3'), # HARD FREEZE WATCH + ('FZ.A', allActions, 'FrostFreeze4'), # FREEZE WATCH + ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING + ('WI.Y', allActions, 'Wind1'), # WIND ADVISORY + ('LW.Y', allActions, 'Wind2'), # LAKE WIND ADVISORY + ('HW.A', allActions, 'Wind3'), # HIGH WIND WATCH + ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH + ('FA.A', allActions, 'Flood1'), # FLOOD WATCH + ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING + ('CF.Y', allActions, 'CoastalFlood3'), # COASTAL FLOOD ADVISORY + ('CF.A', allActions, 'CoastalFlood1'), # COASTAL FLOOD WATCH + ('LS.W', allActions, 'CoastalFlood5'), # LAKESHORE FLOOD WARNING + ('LS.A', allActions, 'CoastalFlood2'), # LAKESHORE FLOOD WATCH + ('LS.Y', allActions, 'CoastalFlood4'), # LAKESHORE FLOOD ADVISORY + ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY + ('AS.O', allActions, 'AirStag1'), # AIR STAGNATION OUTLOOK + ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING + ('SU.Y', allActions, 'HighSurf1'), # HIGH SURF ADVISORY + ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK + ('BH.S', allActions, 'BeachHaz'), # BEACH HAZARDS STATEMENT + ('AF.W', allActions, 'Ashfall2'), # VOLCANIC ASHFALL WARNING + ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING + ('TS.A', allActions, 'Tsunami1'), # TSUNAMI WATCH + ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING + ('FW.A', allActions, 'FireWx1'), # FIRE WEATHER WATCH + ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY + ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING + ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY + ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AreaFcst.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AreaFcst.py index cd35b14902..a7f17fae62 100755 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AreaFcst.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/AreaFcst.py @@ -1,1519 +1,1519 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# May 01, 2015 17421 ryu Changed analysis methods for StormTotalSnow +# +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product creates a ZFP-type series of text phrases +# for consecutive time periods for a list of edit areas. It can be +# used to create a ZFP or an SAF. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# AreaFcst, ZFP___Definition, ZFP__Overrides, +# SAF_Overrides, SAF___Definition, SAF__Overrides +# Optional: MultipleElement_Aux_Local (to create a MultipleElementTable) +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# +# You must set the following: +# +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" +# +# Optional Configuration Items +# +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from FormatterLauncher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. This value is also used for +# the default GUI entry for storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# lineLength Desired maximum length of each line. +# +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) + +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will be done. +# defaultEditAreas defines edit areas, default is Combinations +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# directiveType 10-503 or C11 +# arealSkyAnalysis Set to 1 to include analysis for Sky for +# areal vs. traditional sky phrasing. This is made +# optional since there is a performance cost to +# include this analysis. +# useStormTotalSnow Set to 1 to use StormTotalSnow grids for reporting total snow +# includeExtended (applies to C11 only) Include the extended forecast +# extendedLabel (applies to C11 only) Includes label before extended forecast +# includeEveningPeriod (applies to C11 only) Include a 6 hour Evening period on the 3rd day +# +# includeMultipleElementTable +# cityDictionary +# To include a MultipleElementTable (e.g. Temp Pop Table) +# for each area in the current Combination: +# Set "includeMultipleElement" to 1 +# Set the "elementList" and "singleValueFormat" flag if desired +# "elementList" may include "Temp", "PoP", and/or "Humidity" +# "singleValueFormat" lists only one value per element +# Make sure you are using a Combinations file +# Modify the CityDictionary TextUtility file or create a new one +# areaDictionary Modify the AreaDictionary utility with UGC information about zones. +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Out to 7 days: +# MinT, MaxT, T, +# Sky, PoP, Wind, Wx -- every 6 hours +# Optional: SnowAmt, IceAccum, SnowLevel, WindChill, HeatIndex, +# StormTotalSnow +# Out to 3 days: +# WindGust (opt.) (every 6 hours) # -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# May 01, 2015 17421 ryu Changed analysis methods for StormTotalSnow -# -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product creates a ZFP-type series of text phrases -# for consecutive time periods for a list of edit areas. It can be -# used to create a ZFP or an SAF. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# AreaFcst, ZFP___Definition, ZFP__Overrides, -# SAF_Overrides, SAF___Definition, SAF__Overrides -# Optional: MultipleElement_Aux_Local (to create a MultipleElementTable) -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# -# You must set the following: -# -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" -# -# Optional Configuration Items -# -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from FormatterLauncher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. This value is also used for -# the default GUI entry for storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# lineLength Desired maximum length of each line. -# -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) - -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will be done. -# defaultEditAreas defines edit areas, default is Combinations -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# directiveType 10-503 or C11 -# arealSkyAnalysis Set to 1 to include analysis for Sky for -# areal vs. traditional sky phrasing. This is made -# optional since there is a performance cost to -# include this analysis. -# useStormTotalSnow Set to 1 to use StormTotalSnow grids for reporting total snow -# includeExtended (applies to C11 only) Include the extended forecast -# extendedLabel (applies to C11 only) Includes label before extended forecast -# includeEveningPeriod (applies to C11 only) Include a 6 hour Evening period on the 3rd day -# -# includeMultipleElementTable -# cityDictionary -# To include a MultipleElementTable (e.g. Temp Pop Table) -# for each area in the current Combination: -# Set "includeMultipleElement" to 1 -# Set the "elementList" and "singleValueFormat" flag if desired -# "elementList" may include "Temp", "PoP", and/or "Humidity" -# "singleValueFormat" lists only one value per element -# Make sure you are using a Combinations file -# Modify the CityDictionary TextUtility file or create a new one -# areaDictionary Modify the AreaDictionary utility with UGC information about zones. -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Out to 7 days: -# MinT, MaxT, T, -# Sky, PoP, Wind, Wx -- every 6 hours -# Optional: SnowAmt, IceAccum, SnowLevel, WindChill, HeatIndex, -# StormTotalSnow -# Out to 3 days: -# WindGust (opt.) (every 6 hours) -# -# NOTE: If you choose to include a "pre-1st period" for the -# Morning or Afternoon issuance, you must have grids in the -# pre-1st period or you will not see any data for that element. -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations file -#------------------------------------------------------------------------- -# Component Products: -# 10-503: -# Period_1 -# Period_2_3 -# Period_4_5 -# Period_6_14 -# -# C11: -# FirstFcstPeriod -# AreaFcstPeriod -# Evening -# LaterPeriod -# LaterPeriodWithoutLows -# C11Extended -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from ConfigVariables: -# maximum_range_nlValue_dict -# minimum_range_nlValue_dict -# range_nlValue_dict -# phrase_descriptor_dict -# pop_lower_threshold -# pop_upper_threshold -# null_nlValue_dict -# first_null_phrase_dict -# null_phrase_dict -# increment_nlValue_dict -# vector_mag_difference_nlValue_dict -# scalar_difference_nlValue_dict -# value_connector_dict -# from WxPhrases: -# pop_wx_lower_threshold -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# from ScalarPhrases: -# pop_sky_lower_threshold -# pop_snow_lower_threshold -# pop_snowLevel_upper_threshold -# snowLevel_maximum_phrase -# temp_trend_nlValue -# temp_trends_addToPhrase_flag -# windChillTemp_difference -# heatIndexTemp_difference -# areal_sky_flag -# from CombinedPhrases: -# useCombinedSkyPopWx -# from VectorRelatedPhrases: -# lake_wind_areaNames -# useWindsForGusts_flag -# from SampleAnalysis: -# stdDev_dict -# -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS C11 and 10-503 Directives for Public Weather Services. -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, types, copy -import TimeRange - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - # If Flooding is "Yes", the system will insert a statement in the - # product when FFA, FLW, or FLS is in effect. - #(("Flooding?", "flooding") , "No", "radio", ["Yes","No"]), - #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), - ] - - Definition = { - "type": "smart", - "displayName": "None", - - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/ZFP_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Zones_", - - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_ZFP__", - "editAreaSuffix": None, - - # product identifiers - "productName": "Zone Forecast Product", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city,state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - "periodCombining" : 0, # If 1, combine periods, if possible - "lineLength": 66, #product line length - "fixedExpire": 1, #ensure VTEC actions don't affect segment time - - # Choices are "10-503" or "C11" - "directiveType": "10-503", - # Set to 1 to include analysis for Sky for areal vs. traditional - # sky phrasing. - "arealSkyAnalysis":0, - # Set to 1 to use StormTotalSnow grid - "useStormTotalSnow": 0, - #"directiveType": "C11", - # Applies to C11 directive only: - "includeExtended": 1, - # Set to one if you want an extended label - "extendedLabel": 1, - # Set to one if you want a 6-hour evening period instead of - # 18-hour period without lows - "includeEveningPeriod": 1, - - # Product-specific variables: - # To include a MultipleElementTable (e.g. Temp Pop Table) - # for each area in the current Combination: - # Set "includeMultipleElement" to 1 - # Set the "elementList" and "singleValueFormat" flag if desired - # "elementList" may include "Temp", "PoP", and/or "Humidity" - # "singleValueFormat" lists only one value per element - # Make sure you are using a Combinations file - # Modify the CityDictionary TextUtility file or create a new one - "includeMultipleElementTable": 0, - "elementList" : ["Temp", "PoP"], - "singleValueFormat": 0, - "cityDictionary": "CityDictionary", - - # Weather-related flags - "hoursSChcEnds": 24, - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - # Language - "language": "english", - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - - - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - def lake_wind_areaNames(self, tree, node): - # Return list of edit area names for which the lake_wind_phrase - # should be generated - # If you want the phrase potentially generated for all zones, use: - # return ["ALL"] - return [] - - def lake_wind_thresholds(self, tree, node): - # Return upper and lower lake_wind thresholds in mph. - # Only apply phrase for max wind speed of 25 to 35 mph. At 35 mph - # and higher, an advisory of some sort will be in effect and phrase - # will not be needed. - return 25, 35 - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Wind"] = "mph" - dict["WindGust"] = "mph" - return dict - - def range_nlValue_dict(self, tree, node): - # If the range of values is less than this threshold, - # the data will be reported as a single value - # e.g HIGHS AROUND 80 - dict = TextRules.TextRules.range_nlValue_dict(self, tree, node) - dict["MaxT"] = 5 - dict["MinT"] = 5 - dict["MinRH"] = 5 - dict["MaxRH"] = 5 - return dict - - # If you are using the C11 format with periodCombining on, - # set this variable to zero for proper labeling. - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - return 1 - - def gust_wind_difference_nlValue(self, tree, node): - # Difference between gust and maxWind below which gusts are not - # mentioned. Units are MPH - if self._includeTropical: - return 5 - else: - return 10 - - def temporalCoverage_hours(self, parmHisto, timeRange, componentName): - # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids - # and this parameter is 2 or higher, tropical cyclone winds affecting the very - # early or latter part of a forecast period might be neglected. 1 assures - # maximum sensitivity. - if self._includeTropical: - return 1 - else: - return 0 - - def moderated_dict(self, parmHisto, timeRange, componentName): - """ - Modifed to lower the high end filter threshold from 20 MPH to - 15 MPH for Tropical. - """ - # COMMENT: This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. The thresholds chosen below gave best results - # during testing with 2004 and 2005 tropical cyclones. This dict - # is used with the moderatedMinMax analysis method specified in the - # TropicalPeriod definitions specified further down for use with - # tropical cyclones with wind parameters. - - # Get Baseline thresholds - dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, - timeRange, componentName) - - # Change thresholds for Wind, WindGust, WaveHeight and Swell - if self._includeTropical: - dict["Wind"] = (0, 15) - dict["WindGust"] = (0, 15) - dict["WaveHeight"] = (0, 15) - dict["Swell"] = (0, 15) - return dict - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - #----------------------------------------------------------------------- - # COMMENT: Override max ranges for certain fields - # This dict specifications allows for wind speed ranges of up to 20 mph - # during tropical cyclone situations allowing for far better wind speed - # phrases. - #----------------------------------------------------------------------- - if self._includeTropical: - dict["Wind"] = {'default': 5, - (0.0, 4.0): 0, - (4.0, 33.0): 5, - (33.0, 52.0): 10, - (52.0, 200.0): 20, - } - return dict - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - return self.stdDevMaxAvg - #return self.maxMode - #return self.maximum - - # These Components are named to indicate which period of the forecast - # they define. - - # Special alterations to analysisList and phraseList for Tropical formatting - def addTropical(self, analysisList, phraseList, includeHazards=True): - newAnalysisList = [] - for entry in analysisList: - # Sampling defined as a tuple (field, statistic, temporal rate) - # If this is NOT a Wind or WindGust statistic - if entry[0] not in ["Hazards", "Wind", "WindGust"]: - # Add this statistic to the new analysisList - newAnalysisList.append(entry) - newAnalysisList += [ - ("Wind", self.vectorModeratedMinMax, [6]), - ("WindGust", self.moderatedMinMax, [6]), - ("pws34", self.maximum), - ("pws64", self.maximum), - ("pwsN34", self.maximum), - ("pwsN64", self.maximum), - ("pwsD34", self.maximum), - ("pwsD64", self.maximum), - ] - if includeHazards: - newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) - - phraseList.insert(0, self.pws_phrase) - return newAnalysisList, phraseList - - def Period_1(self): - analysisList = [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [3]), - ("PoP", self._PoP_analysisMethod("Period_1"), [3]), - ("PoP", self.binnedPercent, [3]), - ("SnowAmt", self.accumMinMax), - ("StormTotalSnow", self.minMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [3]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ] - phraseList = [ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.heavyPrecip_phrase, - self.visibility_phrase, - self.snow_phrase, - self.total_snow_phrase, - self.snowLevel_phrase, - self.iceAccumulation_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.wind_withGusts_phrase, - self.lake_wind_phrase, - self.popMax_phrase, - self.windChill_phrase, - # Alternative - #self.windBased_windChill_phrase, - self.heatIndex_phrase, - ] - - if self._includeTropical: - analysisList, phraseList = self.addTropical(analysisList, phraseList) - - component = { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": analysisList, - "phraseList": phraseList, -## "additionalAreas": [ -## # Areas listed by weather element that will be -## # sampled and analysed. -## # E.g. used for reporting population centers for temperatures. -## ("MaxT", ["City1", "City2"]), -## ("MinT", ["City1", "City2"]), -## ], -## "intersectAreas": [ -## # Areas listed by weather element that will be -## # intersected with the current area then -## # sampled and analysed. -## # E.g. used in local effects methods. -## ("MaxT", ["Mountains"]), -## ("MinT", ["Valleys"]), -## ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [3])) - if self._useStormTotalSnow: - phraseList = component["phraseList"] - index = phraseList.index(self.total_snow_phrase) - phraseList[index] = self.stormTotalSnow_phrase - component["phraseList"] = phraseList - return component - - def Period_2_3(self): - # No Lake Wind phrase - analysisList = [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Period_2_3"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("StormTotalSnow", self.minMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ] - phraseList = [ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.heavyPrecip_phrase, - self.visibility_phrase, - self.snow_phrase, - self.total_snow_phrase, - self.snowLevel_phrase, - self.iceAccumulation_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.wind_withGusts_phrase, -# self.lake_wind_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ] - - if self._includeTropical: - analysisList, phraseList = self.addTropical(analysisList, phraseList) - - component = { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": analysisList, - "phraseList": phraseList, - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - if self._useStormTotalSnow: - phraseList = component["phraseList"] - index = phraseList.index(self.total_snow_phrase) - phraseList[index] = self.stormTotalSnow_phrase - component["phraseList"] = phraseList - return component - - def Period_4_5(self): - # Descriptive snow phrase - - analysisList = [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Period_4_5"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ] - phraseList = [ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.heavyPrecip_phrase, - self.visibility_phrase, - self.descriptive_snow_phrase, - self.snowLevel_phrase, - self.iceAccumulation_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.wind_withGusts_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ] - - if self._includeTropical: - analysisList, phraseList = self.addTropical(analysisList, phraseList) - - component = { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": analysisList, - "phraseList": phraseList, - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def Period_6_14(self): - analysisList = [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Period_6_14"), [6]), - ("PoP", self.binnedPercent, [6]), - ("Wind", self.vectorMedianRange, [12]), - ("SnowAmt", self.accumMinMax), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ] - phraseList = [ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.heavyPrecip_phrase, - self.visibility_phrase, - self.descriptive_snow_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ] - - if self._includeTropical: - analysisList, phraseList = self.addTropical( - analysisList, phraseList, includeHazards=False) - - component = { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": analysisList, - "phraseList": phraseList, - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def ExtraSampling(self): - # sampling for temp trends - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": [ - ("MaxT", self.stdDevMinMax, [0]), - ("MinT", self.stdDevMinMax, [0]), - ], - "phraseList": [], - "intersectAreas": [], - } - - def TotalSnowSampling(self): - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": [ - ("SnowAmt", self.moderatedMinMax), - ], - "phraseList":[], - } - - ########################################################################### - ### C11 Definitions - ########################################################################### - - def FirstFcstPeriod(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("FirstFcstPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("StormTotalSnow", self.minMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("WindGust", self.maximum, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.snow_phrase, - self.total_snow_phrase, - self.snowLevel_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.wind_withGusts_phrase, - self.lake_wind_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - if self._useStormTotalSnow: - phraseList = component["phraseList"] - index = phraseList.index(self.total_snow_phrase) - phraseList[index] = self.stormTotalSnow_phrase - component["phraseList"] = phraseList - return component - - def AreaFcstPeriod(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("AreaFcstPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ("SnowAmt", self.accumMinMax), - ("StormTotalSnow", self.minMax), - ("IceAccum", self.accumMinMax), - ("SnowLevel", self.avg), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("Wx", self.rankedWx, [6]), - ("WindChill", self.minMax, [6]), - ("HeatIndex", self.minMax, [6]), - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.severeWeather_phrase, - self.snow_phrase, - self.total_snow_phrase, - self.snowLevel_phrase, - self.highs_phrase, - self.lows_phrase, - #self.highs_range_phrase, - #self.lows_range_phrase, - self.steady_temp_trends, - self.temp_trends, - self.wind_withGusts_phrase, - self.popMax_phrase, - self.windChill_phrase, - self.heatIndex_phrase, - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - if self._useStormTotalSnow: - phraseList = component["phraseList"] - index = phraseList.index(self.total_snow_phrase) - phraseList[index] = self.stormTotalSnow_phrase - component["phraseList"] = phraseList - return component - - def Evening(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("Evening"), [6]), - ("PoP", self.binnedPercent, [6]), - ("Wind", self.vectorMedianRange), - ("Wind", self.vectorMinMax), - ("Wx", self.rankedWx, [6]) - ], - "phraseList":[ - self.wind_summary, - self.sky_phrase, - self.weather_phrase, - #self.popMax_phrase - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def LaterPeriod(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MinT", self.avg), - #("MaxT", self.avg), - ("MinT", self.stdDevMinMax), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("LaterPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("Wx", self.rankedWx, [6]) - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - # Uncomment if you want detailed winds included: - #self.wind_withGusts_phrase, - self.highs_phrase, - self.lows_phrase, - self.temp_trends, - self.popMax_phrase - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def LaterPeriodWithoutLows(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - #("MaxT", self.avg), - ("MaxT", self.stdDevMinMax), - ("T", self.hourlyTemp), - ("Sky", self.median, [6]), - ("PoP", self._PoP_analysisMethod("LaterPeriodWithoutLows"), [6]), - ("PoP", self.binnedPercent, [6]), - ("Wind", self.vectorMedianRange, [6]), - ("Wind", self.vectorMinMax, [6]), - ("Wx", self.rankedWx, [6]) - ], - "phraseList":[ - self.wind_summary, - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.highs_phrase, - self.popMax_phrase - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def C11Extended(self): - component = { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ("T", self.hourlyTemp), - ("Sky", self.median, [12]), - ("PoP", self._PoP_analysisMethod("C11Extended"), [12]), - ("PoP", self.binnedPercent, [12]), - ("Wind", self.vectorMedianRange), - ("Wind", self.vectorMinMax), - ("Wx", self.rankedWx, [12]), - ], - "phraseList":[ - self.wind_summary, - #self.reportTrends, - self.sky_phrase, - self.weather_phrase, - self.extended_lows_phrase, - self.extended_highs_phrase, - ], - } - if self._arealSkyAnalysis: - component["analysisList"].append(("Sky", self.binnedPercent, [6])) - return component - - def ExtendedLabel(self): - return { - "type": "component", - "methodList": [ - self.setLabel, - ], - "analysisList": [], - "phraseList":[], - } - def setLabel(self, tree, component): - component.set("words", "\n.EXTENDED...\n") - return self.DONE() - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(50) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - #Tropical exceptions - try: - self._includeTropical = self._includeTropical == "Yes" - except: - self._includeTropical = False - # VERY IMP: Period combination will break the Tropical Formatters. - # So regardless of what the user enters through the Definition file - # the variable is set to 0 here if running the Tropical Formatters. - # The same comment applies to the specification of the directive type. - # Tropical Formatters will only work with that specified to be 10-503 - # so it is forced like so here. - if self._includeTropical: - self._periodCombining = 0 - self._directiveType = "10-503" - if self._productIssuance == "Morning with Pre-1st Period": - self._productIssuance = "Morning" - if self._productIssuance == "Afternoon with Pre-1st Period": - self._productIssuance = "Afternoon" - - self._language = argDict["language"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - - if not self._useStormTotalSnow: - # Add a "custom" component to sample SnowAmt for 12 hours - # prior to product start - productStart = self._issuanceInfo.timeRange().startTime() - tr = TimeRange.TimeRange(productStart - 12*3600, productStart) - self._issuanceInfo.narrativeDef().append(\ - ("Custom", ("TotalSnowSampling", tr))) - - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) - self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") - - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - # Product header - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - productName = self.checkTestMode(argDict, productName) - - issuedByString = self.getIssuedByString() - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # The following lines insert a statement - # at the top of the forecast that describes the time periods - # of the temp/pop table. Comment out the lines if you - # do not want this statement. - if self._includeMultipleElementTable == 1: - fcst = self._temp_pop_statement(fcst, argDict) - - # Set the "includeFloodingQuestion" flag if you want the possibility of - # a flood statement. - fcst = self._flood_statement(fcst, argDict) - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcst = fcst + self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - if self._includeMultipleElementTable == 1: - fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - fcst = string.replace(fcst, "%expireTime", self._expireTimeStr) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - if self._definition["directiveType"] == "C11": - return self._C11_issuance_list(argDict) - else: - return self._10_503_issuance_list(argDict) - - def _C11_issuance_list(self, argDict): - try: - if self._definition["includeEveningPeriod"] == 1: - narrativeDefAM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), - ("Evening", 6), - ("Custom", ("ExtraSampling", (-24, 12))), - ] - narrativeDefPM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriod", 12), - ("Evening", 6), - ("Custom", ("ExtraSampling", (-24, 24))), - ] - else: - narrativeDefAM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), - ("Custom", ("ExtraSampling", (-24, 12))), - ] - narrativeDefPM = [ - ("FirstFcstPeriod", "period1"), - ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), - ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), - ("Custom", ("ExtraSampling", (-24, 24))), - ] - extended = [("C11Extended", 24),("C11Extended", 24), ("C11Extended", 24),("C11Extended", 24)] - if self._includeExtended == 1: - if self._extendedLabel == 1: - narrativeDefAM.append(("ExtendedLabel",0)) - narrativeDefPM.append(("ExtendedLabel",0)) - narrativeDefAM = narrativeDefAM + extended - narrativeDefPM = narrativeDefPM + extended - except: - narrativeDefAM = None - narrativeDefPM = None - - #(description, startHour, endHour, expireHour, - # period1 label, period1 lateNight lateDay phrase, todayFlag, narrative definition) - return [ - ("Morning", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, - ".REST OF TONIGHT...", "late in the night","early in the evening", - 1, narrativeDefPM), - ("Early Morning Update", "issuanceHour", self.DAY(), 4, - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - ] - - def _10_503_issuance_list(self, argDict): - narrativeDefAM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Custom", ("ExtraSampling", (-24, 12))), - ] - narrativeDefPM = [ - ("Period_1", "period1"), - ("Period_2_3", 12), ("Period_2_3", 12), - ("Period_4_5", 12), ("Period_4_5", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), - ("Period_6_14", 12), - ("Custom", ("ExtraSampling", (-24, 24))), - ] - - return [ - ("Morning", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, - ".REST OF TONIGHT...", "early in the morning","early in the evening", - 1, narrativeDefPM), - # For the early morning update, this produces: - # Rest of Tonight: - # Monday - # Monday night - ("Early Morning Update", "issuanceHour", self.DAY(), 4, - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - # Alternative - # For the early morning update, this produces: - # Early this morning: - # Today - # Tonight - #("Evening Update", "issuanceHour", 24 + self.DAY(), 4, - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), 4, - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, narrativeDefPM), - ] - - def lateDay_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - # Some Western Region offices insert a special statement - # at the top of the ZFP for the temp/PoP table time periods. - # This function creates the statement. Usually this is called - # by preProcessProduct. This code was contributed by Ron Miller, - # NWS Spokane - def _temp_pop_statement(self, fcst, argDict): - # Determine the number of periods based on what package we're issuing. - myTimeRanges = self.getMultipleElementTableRanges( - self._productIssuance, self._singleValueFormat) - - header = self._temp_pop_statement_header(argDict) - - # Loop through each period - labels = [] - for myTimeRange, label in myTimeRanges: - labels.append(label) - - header += self.punctuateList(labels) + "." - - header = self.endline(header,argDict["lineLength"]) - fcst = fcst + header + "\n\n" - return fcst - - def _temp_pop_statement_header(self, argDict): - return "Spot temperatures and probabilities of measurable precipitation are for " - - # Western Region offices insert a special statement - # at the top of the ZFP if there are any active flood watches or - # warnings. Usually this is called by preProcessProduct. - # This code was contributed by Ron Miller, NWS Spokane - def _flood_statement(self, fcst, argDict): - # Now add in a hydro statement if flooding is occurring - varDict = argDict["varDict"] - try: - self._flooding = varDict["Flooding?"] - except: - self._flooding = "No" - if self._flooding == "Yes": - fcst = fcst + self._flood_statement_wording(argDict) - return fcst - - def _flood_statement_wording(self, argDict): - return "...Flood watches and/or warnings have been issued for portions of\n" \ - + "the zone forecast area. Please refer to the latest flood bulletin\n" \ - + "for details...\n\n" - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - tropicalActions = ["NEW", "EXA","UPG", "CAN", "CON"] - return [ - ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING - ('SS.W', tropicalActions, 'Surge'), # Storm Surge WARNING - ('TY.W', tropicalActions, 'Tropical'), # TYPHOON WARNING - ('TR.W', tropicalActions, 'Tropical1'), # TROPICAL STORM WARNING - ('HU.A', tropicalActions, 'Tropical'), # HURRICANE WATCH - ('SS.A', tropicalActions, 'Surge'), # Storm Surge WATCH - ('TY.A', tropicalActions, 'Tropical'), # TYPHOON WATCH - ('TR.A', tropicalActions, 'Tropical1'), # TROPICAL STORM WATCH - ('HF.W', allActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING - ('IS.W', allActions, 'WinterWx'), # ICE STORM WARNING - ('LE.W', allActions, 'WinterWx'), # LAKE EFFECT SNOW WARNING - ('WS.W', allActions, 'WinterWx'), # WINTER STORM WARNING - ('WW.Y', allActions, 'WinterWx'), # WINTER WEATHER ADVISORY - ('WS.A', allActions, 'WinterWx'), # WINTER STORM WATCH - ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING - ('WC.Y', allActions, 'WindChill'), # WIND CHILL ADVISORY - ('WC.A', allActions, 'WindChill'), # WIND CHILL WATCH - ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING - ('DU.Y', allActions, 'Dust'), # BLOWING DUST ADVISORY - ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING - ('EC.A', allActions, 'Cold'), # EXTREME COLD WATCH - ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING - ('EH.A', allActions, 'Heat'), # EXCESSIVE HEAT WATCH - ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY - ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING - ('FZ.W', allActions, 'FrostFreeze'), # FREEZE WARNING - ('FR.Y', allActions, 'FrostFreeze'), # FROST ADVISORY - ('HZ.A', allActions, 'FrostFreeze'), # HARD FREEZE WATCH - ('FZ.A', allActions, 'FrostFreeze'), # FREEZE WATCH - ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING - ('WI.Y', allActions, 'Wind'), # WIND ADVISORY - ('LW.Y', allActions, 'Wind'), # LAKE WIND ADVISORY - ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH - ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('ZF.Y', allActions, 'FreezeFog'), # FREEZING FOG ADVISORY - ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH - ('FA.A', allActions, 'Flood'), # FLOOD WATCH - ('FA.W', allActions, 'Flood'), # FLOOD WARNING - ('FA.Y', allActions, 'Flood'), # FLOOD ADVISORY - ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING - ('LS.W', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WARNING - ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY - ('LS.Y', allActions, 'CoastalFlood'), # LAKESHORE FLOOD ADVISORY - ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH - ('LS.A', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WATCH - ('UP.W', allActions, 'IceAccr'), # ICE ACCRETION WARNING - ('UP.Y', allActions, 'IceAccr'), # ICE ACCRETION ADVISORY - ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY - ('AS.O', allActions, 'AirStag'), # AIR STAGNATION OUTLOOK - ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING - ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY - ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK - ('BH.S', allActions, 'BeachHaz'), # BEACH HAZARDS STATEMENT - ('AF.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING - ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ('LO.Y', allActions, 'Drought'), # LOW WATER ADVISORY - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING - ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY - ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH - ] +# NOTE: If you choose to include a "pre-1st period" for the +# Morning or Afternoon issuance, you must have grids in the +# pre-1st period or you will not see any data for that element. +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations file +#------------------------------------------------------------------------- +# Component Products: +# 10-503: +# Period_1 +# Period_2_3 +# Period_4_5 +# Period_6_14 +# +# C11: +# FirstFcstPeriod +# AreaFcstPeriod +# Evening +# LaterPeriod +# LaterPeriodWithoutLows +# C11Extended +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from ConfigVariables: +# maximum_range_nlValue_dict +# minimum_range_nlValue_dict +# range_nlValue_dict +# phrase_descriptor_dict +# pop_lower_threshold +# pop_upper_threshold +# null_nlValue_dict +# first_null_phrase_dict +# null_phrase_dict +# increment_nlValue_dict +# vector_mag_difference_nlValue_dict +# scalar_difference_nlValue_dict +# value_connector_dict +# from WxPhrases: +# pop_wx_lower_threshold +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# from ScalarPhrases: +# pop_sky_lower_threshold +# pop_snow_lower_threshold +# pop_snowLevel_upper_threshold +# snowLevel_maximum_phrase +# temp_trend_nlValue +# temp_trends_addToPhrase_flag +# windChillTemp_difference +# heatIndexTemp_difference +# areal_sky_flag +# from CombinedPhrases: +# useCombinedSkyPopWx +# from VectorRelatedPhrases: +# lake_wind_areaNames +# useWindsForGusts_flag +# from SampleAnalysis: +# stdDev_dict +# +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS C11 and 10-503 Directives for Public Weather Services. +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, types, copy +import TimeRange + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + # If Flooding is "Yes", the system will insert a statement in the + # product when FFA, FLW, or FLS is in effect. + #(("Flooding?", "flooding") , "No", "radio", ["Yes","No"]), + #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), + ] + + Definition = { + "type": "smart", + "displayName": "None", + + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/ZFP_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Zones_", + + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_ZFP__", + "editAreaSuffix": None, + + # product identifiers + "productName": "Zone Forecast Product", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city,state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + "periodCombining" : 0, # If 1, combine periods, if possible + "lineLength": 66, #product line length + "fixedExpire": 1, #ensure VTEC actions don't affect segment time + + # Choices are "10-503" or "C11" + "directiveType": "10-503", + # Set to 1 to include analysis for Sky for areal vs. traditional + # sky phrasing. + "arealSkyAnalysis":0, + # Set to 1 to use StormTotalSnow grid + "useStormTotalSnow": 0, + #"directiveType": "C11", + # Applies to C11 directive only: + "includeExtended": 1, + # Set to one if you want an extended label + "extendedLabel": 1, + # Set to one if you want a 6-hour evening period instead of + # 18-hour period without lows + "includeEveningPeriod": 1, + + # Product-specific variables: + # To include a MultipleElementTable (e.g. Temp Pop Table) + # for each area in the current Combination: + # Set "includeMultipleElement" to 1 + # Set the "elementList" and "singleValueFormat" flag if desired + # "elementList" may include "Temp", "PoP", and/or "Humidity" + # "singleValueFormat" lists only one value per element + # Make sure you are using a Combinations file + # Modify the CityDictionary TextUtility file or create a new one + "includeMultipleElementTable": 0, + "elementList" : ["Temp", "PoP"], + "singleValueFormat": 0, + "cityDictionary": "CityDictionary", + + # Weather-related flags + "hoursSChcEnds": 24, + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + # Language + "language": "english", + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + + + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + def lake_wind_areaNames(self, tree, node): + # Return list of edit area names for which the lake_wind_phrase + # should be generated + # If you want the phrase potentially generated for all zones, use: + # return ["ALL"] + return [] + + def lake_wind_thresholds(self, tree, node): + # Return upper and lower lake_wind thresholds in mph. + # Only apply phrase for max wind speed of 25 to 35 mph. At 35 mph + # and higher, an advisory of some sort will be in effect and phrase + # will not be needed. + return 25, 35 + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Wind"] = "mph" + dict["WindGust"] = "mph" + return dict + + def range_nlValue_dict(self, tree, node): + # If the range of values is less than this threshold, + # the data will be reported as a single value + # e.g HIGHS AROUND 80 + dict = TextRules.TextRules.range_nlValue_dict(self, tree, node) + dict["MaxT"] = 5 + dict["MinT"] = 5 + dict["MinRH"] = 5 + dict["MaxRH"] = 5 + return dict + + # If you are using the C11 format with periodCombining on, + # set this variable to zero for proper labeling. + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + return 1 + + def gust_wind_difference_nlValue(self, tree, node): + # Difference between gust and maxWind below which gusts are not + # mentioned. Units are MPH + if self._includeTropical: + return 5 + else: + return 10 + + def temporalCoverage_hours(self, parmHisto, timeRange, componentName): + # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids + # and this parameter is 2 or higher, tropical cyclone winds affecting the very + # early or latter part of a forecast period might be neglected. 1 assures + # maximum sensitivity. + if self._includeTropical: + return 1 + else: + return 0 + + def moderated_dict(self, parmHisto, timeRange, componentName): + """ + Modifed to lower the high end filter threshold from 20 MPH to + 15 MPH for Tropical. + """ + # COMMENT: This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. The thresholds chosen below gave best results + # during testing with 2004 and 2005 tropical cyclones. This dict + # is used with the moderatedMinMax analysis method specified in the + # TropicalPeriod definitions specified further down for use with + # tropical cyclones with wind parameters. + + # Get Baseline thresholds + dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, + timeRange, componentName) + + # Change thresholds for Wind, WindGust, WaveHeight and Swell + if self._includeTropical: + dict["Wind"] = (0, 15) + dict["WindGust"] = (0, 15) + dict["WaveHeight"] = (0, 15) + dict["Swell"] = (0, 15) + return dict + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + #----------------------------------------------------------------------- + # COMMENT: Override max ranges for certain fields + # This dict specifications allows for wind speed ranges of up to 20 mph + # during tropical cyclone situations allowing for far better wind speed + # phrases. + #----------------------------------------------------------------------- + if self._includeTropical: + dict["Wind"] = {'default': 5, + (0.0, 4.0): 0, + (4.0, 33.0): 5, + (33.0, 52.0): 10, + (52.0, 200.0): 20, + } + return dict + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + return self.stdDevMaxAvg + #return self.maxMode + #return self.maximum + + # These Components are named to indicate which period of the forecast + # they define. + + # Special alterations to analysisList and phraseList for Tropical formatting + def addTropical(self, analysisList, phraseList, includeHazards=True): + newAnalysisList = [] + for entry in analysisList: + # Sampling defined as a tuple (field, statistic, temporal rate) + # If this is NOT a Wind or WindGust statistic + if entry[0] not in ["Hazards", "Wind", "WindGust"]: + # Add this statistic to the new analysisList + newAnalysisList.append(entry) + newAnalysisList += [ + ("Wind", self.vectorModeratedMinMax, [6]), + ("WindGust", self.moderatedMinMax, [6]), + ("pws34", self.maximum), + ("pws64", self.maximum), + ("pwsN34", self.maximum), + ("pwsN64", self.maximum), + ("pwsD34", self.maximum), + ("pwsD64", self.maximum), + ] + if includeHazards: + newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) + + phraseList.insert(0, self.pws_phrase) + return newAnalysisList, phraseList + + def Period_1(self): + analysisList = [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [3]), + ("PoP", self._PoP_analysisMethod("Period_1"), [3]), + ("PoP", self.binnedPercent, [3]), + ("SnowAmt", self.accumMinMax), + ("StormTotalSnow", self.minMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [3]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ] + phraseList = [ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.heavyPrecip_phrase, + self.visibility_phrase, + self.snow_phrase, + self.total_snow_phrase, + self.snowLevel_phrase, + self.iceAccumulation_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.wind_withGusts_phrase, + self.lake_wind_phrase, + self.popMax_phrase, + self.windChill_phrase, + # Alternative + #self.windBased_windChill_phrase, + self.heatIndex_phrase, + ] + + if self._includeTropical: + analysisList, phraseList = self.addTropical(analysisList, phraseList) + + component = { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": analysisList, + "phraseList": phraseList, +## "additionalAreas": [ +## # Areas listed by weather element that will be +## # sampled and analysed. +## # E.g. used for reporting population centers for temperatures. +## ("MaxT", ["City1", "City2"]), +## ("MinT", ["City1", "City2"]), +## ], +## "intersectAreas": [ +## # Areas listed by weather element that will be +## # intersected with the current area then +## # sampled and analysed. +## # E.g. used in local effects methods. +## ("MaxT", ["Mountains"]), +## ("MinT", ["Valleys"]), +## ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [3])) + if self._useStormTotalSnow: + phraseList = component["phraseList"] + index = phraseList.index(self.total_snow_phrase) + phraseList[index] = self.stormTotalSnow_phrase + component["phraseList"] = phraseList + return component + + def Period_2_3(self): + # No Lake Wind phrase + analysisList = [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Period_2_3"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("StormTotalSnow", self.minMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ] + phraseList = [ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.heavyPrecip_phrase, + self.visibility_phrase, + self.snow_phrase, + self.total_snow_phrase, + self.snowLevel_phrase, + self.iceAccumulation_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.wind_withGusts_phrase, +# self.lake_wind_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ] + + if self._includeTropical: + analysisList, phraseList = self.addTropical(analysisList, phraseList) + + component = { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": analysisList, + "phraseList": phraseList, + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + if self._useStormTotalSnow: + phraseList = component["phraseList"] + index = phraseList.index(self.total_snow_phrase) + phraseList[index] = self.stormTotalSnow_phrase + component["phraseList"] = phraseList + return component + + def Period_4_5(self): + # Descriptive snow phrase + + analysisList = [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Period_4_5"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ] + phraseList = [ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.heavyPrecip_phrase, + self.visibility_phrase, + self.descriptive_snow_phrase, + self.snowLevel_phrase, + self.iceAccumulation_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.wind_withGusts_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ] + + if self._includeTropical: + analysisList, phraseList = self.addTropical(analysisList, phraseList) + + component = { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": analysisList, + "phraseList": phraseList, + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def Period_6_14(self): + analysisList = [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Period_6_14"), [6]), + ("PoP", self.binnedPercent, [6]), + ("Wind", self.vectorMedianRange, [12]), + ("SnowAmt", self.accumMinMax), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ] + phraseList = [ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.heavyPrecip_phrase, + self.visibility_phrase, + self.descriptive_snow_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ] + + if self._includeTropical: + analysisList, phraseList = self.addTropical( + analysisList, phraseList, includeHazards=False) + + component = { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": analysisList, + "phraseList": phraseList, + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def ExtraSampling(self): + # sampling for temp trends + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": [ + ("MaxT", self.stdDevMinMax, [0]), + ("MinT", self.stdDevMinMax, [0]), + ], + "phraseList": [], + "intersectAreas": [], + } + + def TotalSnowSampling(self): + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": [ + ("SnowAmt", self.moderatedMinMax), + ], + "phraseList":[], + } + + ########################################################################### + ### C11 Definitions + ########################################################################### + + def FirstFcstPeriod(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("FirstFcstPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("StormTotalSnow", self.minMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("WindGust", self.maximum, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.snow_phrase, + self.total_snow_phrase, + self.snowLevel_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.wind_withGusts_phrase, + self.lake_wind_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + if self._useStormTotalSnow: + phraseList = component["phraseList"] + index = phraseList.index(self.total_snow_phrase) + phraseList[index] = self.stormTotalSnow_phrase + component["phraseList"] = phraseList + return component + + def AreaFcstPeriod(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("AreaFcstPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ("SnowAmt", self.accumMinMax), + ("StormTotalSnow", self.minMax), + ("IceAccum", self.accumMinMax), + ("SnowLevel", self.avg), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("Wx", self.rankedWx, [6]), + ("WindChill", self.minMax, [6]), + ("HeatIndex", self.minMax, [6]), + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.severeWeather_phrase, + self.snow_phrase, + self.total_snow_phrase, + self.snowLevel_phrase, + self.highs_phrase, + self.lows_phrase, + #self.highs_range_phrase, + #self.lows_range_phrase, + self.steady_temp_trends, + self.temp_trends, + self.wind_withGusts_phrase, + self.popMax_phrase, + self.windChill_phrase, + self.heatIndex_phrase, + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + if self._useStormTotalSnow: + phraseList = component["phraseList"] + index = phraseList.index(self.total_snow_phrase) + phraseList[index] = self.stormTotalSnow_phrase + component["phraseList"] = phraseList + return component + + def Evening(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("Evening"), [6]), + ("PoP", self.binnedPercent, [6]), + ("Wind", self.vectorMedianRange), + ("Wind", self.vectorMinMax), + ("Wx", self.rankedWx, [6]) + ], + "phraseList":[ + self.wind_summary, + self.sky_phrase, + self.weather_phrase, + #self.popMax_phrase + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def LaterPeriod(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MinT", self.avg), + #("MaxT", self.avg), + ("MinT", self.stdDevMinMax), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("LaterPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("Wx", self.rankedWx, [6]) + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + # Uncomment if you want detailed winds included: + #self.wind_withGusts_phrase, + self.highs_phrase, + self.lows_phrase, + self.temp_trends, + self.popMax_phrase + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def LaterPeriodWithoutLows(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + #("MaxT", self.avg), + ("MaxT", self.stdDevMinMax), + ("T", self.hourlyTemp), + ("Sky", self.median, [6]), + ("PoP", self._PoP_analysisMethod("LaterPeriodWithoutLows"), [6]), + ("PoP", self.binnedPercent, [6]), + ("Wind", self.vectorMedianRange, [6]), + ("Wind", self.vectorMinMax, [6]), + ("Wx", self.rankedWx, [6]) + ], + "phraseList":[ + self.wind_summary, + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.highs_phrase, + self.popMax_phrase + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def C11Extended(self): + component = { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ("T", self.hourlyTemp), + ("Sky", self.median, [12]), + ("PoP", self._PoP_analysisMethod("C11Extended"), [12]), + ("PoP", self.binnedPercent, [12]), + ("Wind", self.vectorMedianRange), + ("Wind", self.vectorMinMax), + ("Wx", self.rankedWx, [12]), + ], + "phraseList":[ + self.wind_summary, + #self.reportTrends, + self.sky_phrase, + self.weather_phrase, + self.extended_lows_phrase, + self.extended_highs_phrase, + ], + } + if self._arealSkyAnalysis: + component["analysisList"].append(("Sky", self.binnedPercent, [6])) + return component + + def ExtendedLabel(self): + return { + "type": "component", + "methodList": [ + self.setLabel, + ], + "analysisList": [], + "phraseList":[], + } + def setLabel(self, tree, component): + component.set("words", "\n.EXTENDED...\n") + return self.DONE() + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(50) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + #Tropical exceptions + try: + self._includeTropical = self._includeTropical == "Yes" + except: + self._includeTropical = False + # VERY IMP: Period combination will break the Tropical Formatters. + # So regardless of what the user enters through the Definition file + # the variable is set to 0 here if running the Tropical Formatters. + # The same comment applies to the specification of the directive type. + # Tropical Formatters will only work with that specified to be 10-503 + # so it is forced like so here. + if self._includeTropical: + self._periodCombining = 0 + self._directiveType = "10-503" + if self._productIssuance == "Morning with Pre-1st Period": + self._productIssuance = "Morning" + if self._productIssuance == "Afternoon with Pre-1st Period": + self._productIssuance = "Afternoon" + + self._language = argDict["language"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + + if not self._useStormTotalSnow: + # Add a "custom" component to sample SnowAmt for 12 hours + # prior to product start + productStart = self._issuanceInfo.timeRange().startTime() + tr = TimeRange.TimeRange(productStart - 12*3600, productStart) + self._issuanceInfo.narrativeDef().append(\ + ("Custom", ("TotalSnowSampling", tr))) + + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) + self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") + + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + # Product header + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + productName = self.checkTestMode(argDict, productName) + + issuedByString = self.getIssuedByString() + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # The following lines insert a statement + # at the top of the forecast that describes the time periods + # of the temp/pop table. Comment out the lines if you + # do not want this statement. + if self._includeMultipleElementTable == 1: + fcst = self._temp_pop_statement(fcst, argDict) + + # Set the "includeFloodingQuestion" flag if you want the possibility of + # a flood statement. + fcst = self._flood_statement(fcst, argDict) + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcst = fcst + self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + if self._includeMultipleElementTable == 1: + fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + fcst = string.replace(fcst, "%expireTime", self._expireTimeStr) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + if self._definition["directiveType"] == "C11": + return self._C11_issuance_list(argDict) + else: + return self._10_503_issuance_list(argDict) + + def _C11_issuance_list(self, argDict): + try: + if self._definition["includeEveningPeriod"] == 1: + narrativeDefAM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), + ("Evening", 6), + ("Custom", ("ExtraSampling", (-24, 12))), + ] + narrativeDefPM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriod", 12), + ("Evening", 6), + ("Custom", ("ExtraSampling", (-24, 24))), + ] + else: + narrativeDefAM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), + ("Custom", ("ExtraSampling", (-24, 12))), + ] + narrativeDefPM = [ + ("FirstFcstPeriod", "period1"), + ("AreaFcstPeriod", 12), ("AreaFcstPeriod", 12), + ("LaterPeriod", 12), ("LaterPeriod", 12), ("LaterPeriodWithoutLows", 18), + ("Custom", ("ExtraSampling", (-24, 24))), + ] + extended = [("C11Extended", 24),("C11Extended", 24), ("C11Extended", 24),("C11Extended", 24)] + if self._includeExtended == 1: + if self._extendedLabel == 1: + narrativeDefAM.append(("ExtendedLabel",0)) + narrativeDefPM.append(("ExtendedLabel",0)) + narrativeDefAM = narrativeDefAM + extended + narrativeDefPM = narrativeDefPM + extended + except: + narrativeDefAM = None + narrativeDefPM = None + + #(description, startHour, endHour, expireHour, + # period1 label, period1 lateNight lateDay phrase, todayFlag, narrative definition) + return [ + ("Morning", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, + ".REST OF TONIGHT...", "late in the night","early in the evening", + 1, narrativeDefPM), + ("Early Morning Update", "issuanceHour", self.DAY(), 4, + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + ] + + def _10_503_issuance_list(self, argDict): + narrativeDefAM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Custom", ("ExtraSampling", (-24, 12))), + ] + narrativeDefPM = [ + ("Period_1", "period1"), + ("Period_2_3", 12), ("Period_2_3", 12), + ("Period_4_5", 12), ("Period_4_5", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), ("Period_6_14", 12), + ("Period_6_14", 12), + ("Custom", ("ExtraSampling", (-24, 24))), + ] + + return [ + ("Morning", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning with Pre-1st Period", self.DAY()-2, self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", self.NIGHT()-2, 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, + ".REST OF TONIGHT...", "early in the morning","early in the evening", + 1, narrativeDefPM), + # For the early morning update, this produces: + # Rest of Tonight: + # Monday + # Monday night + ("Early Morning Update", "issuanceHour", self.DAY(), 4, + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + # Alternative + # For the early morning update, this produces: + # Early this morning: + # Today + # Tonight + #("Evening Update", "issuanceHour", 24 + self.DAY(), 4, + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), 4, + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, narrativeDefPM), + ] + + def lateDay_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + # Some Western Region offices insert a special statement + # at the top of the ZFP for the temp/PoP table time periods. + # This function creates the statement. Usually this is called + # by preProcessProduct. This code was contributed by Ron Miller, + # NWS Spokane + def _temp_pop_statement(self, fcst, argDict): + # Determine the number of periods based on what package we're issuing. + myTimeRanges = self.getMultipleElementTableRanges( + self._productIssuance, self._singleValueFormat) + + header = self._temp_pop_statement_header(argDict) + + # Loop through each period + labels = [] + for myTimeRange, label in myTimeRanges: + labels.append(label) + + header += self.punctuateList(labels) + "." + + header = self.endline(header,argDict["lineLength"]) + fcst = fcst + header + "\n\n" + return fcst + + def _temp_pop_statement_header(self, argDict): + return "Spot temperatures and probabilities of measurable precipitation are for " + + # Western Region offices insert a special statement + # at the top of the ZFP if there are any active flood watches or + # warnings. Usually this is called by preProcessProduct. + # This code was contributed by Ron Miller, NWS Spokane + def _flood_statement(self, fcst, argDict): + # Now add in a hydro statement if flooding is occurring + varDict = argDict["varDict"] + try: + self._flooding = varDict["Flooding?"] + except: + self._flooding = "No" + if self._flooding == "Yes": + fcst = fcst + self._flood_statement_wording(argDict) + return fcst + + def _flood_statement_wording(self, argDict): + return "...Flood watches and/or warnings have been issued for portions of\n" \ + + "the zone forecast area. Please refer to the latest flood bulletin\n" \ + + "for details...\n\n" + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + tropicalActions = ["NEW", "EXA","UPG", "CAN", "CON"] + return [ + ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING + ('SS.W', tropicalActions, 'Surge'), # Storm Surge WARNING + ('TY.W', tropicalActions, 'Tropical'), # TYPHOON WARNING + ('TR.W', tropicalActions, 'Tropical1'), # TROPICAL STORM WARNING + ('HU.A', tropicalActions, 'Tropical'), # HURRICANE WATCH + ('SS.A', tropicalActions, 'Surge'), # Storm Surge WATCH + ('TY.A', tropicalActions, 'Tropical'), # TYPHOON WATCH + ('TR.A', tropicalActions, 'Tropical1'), # TROPICAL STORM WATCH + ('HF.W', allActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING + ('IS.W', allActions, 'WinterWx'), # ICE STORM WARNING + ('LE.W', allActions, 'WinterWx'), # LAKE EFFECT SNOW WARNING + ('WS.W', allActions, 'WinterWx'), # WINTER STORM WARNING + ('WW.Y', allActions, 'WinterWx'), # WINTER WEATHER ADVISORY + ('WS.A', allActions, 'WinterWx'), # WINTER STORM WATCH + ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING + ('WC.Y', allActions, 'WindChill'), # WIND CHILL ADVISORY + ('WC.A', allActions, 'WindChill'), # WIND CHILL WATCH + ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING + ('DU.Y', allActions, 'Dust'), # BLOWING DUST ADVISORY + ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING + ('EC.A', allActions, 'Cold'), # EXTREME COLD WATCH + ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING + ('EH.A', allActions, 'Heat'), # EXCESSIVE HEAT WATCH + ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY + ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING + ('FZ.W', allActions, 'FrostFreeze'), # FREEZE WARNING + ('FR.Y', allActions, 'FrostFreeze'), # FROST ADVISORY + ('HZ.A', allActions, 'FrostFreeze'), # HARD FREEZE WATCH + ('FZ.A', allActions, 'FrostFreeze'), # FREEZE WATCH + ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING + ('WI.Y', allActions, 'Wind'), # WIND ADVISORY + ('LW.Y', allActions, 'Wind'), # LAKE WIND ADVISORY + ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH + ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('ZF.Y', allActions, 'FreezeFog'), # FREEZING FOG ADVISORY + ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH + ('FA.A', allActions, 'Flood'), # FLOOD WATCH + ('FA.W', allActions, 'Flood'), # FLOOD WARNING + ('FA.Y', allActions, 'Flood'), # FLOOD ADVISORY + ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING + ('LS.W', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WARNING + ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY + ('LS.Y', allActions, 'CoastalFlood'), # LAKESHORE FLOOD ADVISORY + ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH + ('LS.A', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WATCH + ('UP.W', allActions, 'IceAccr'), # ICE ACCRETION WARNING + ('UP.Y', allActions, 'IceAccr'), # ICE ACCRETION ADVISORY + ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY + ('AS.O', allActions, 'AirStag'), # AIR STAGNATION OUTLOOK + ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING + ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY + ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK + ('BH.S', allActions, 'BeachHaz'), # BEACH HAZARDS STATEMENT + ('AF.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING + ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ('LO.Y', allActions, 'Drought'), # LOW WATER ADVISORY + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING + ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY + ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CCF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CCF.py index d0db7101dc..4b2a42e6d6 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CCF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CCF.py @@ -1,571 +1,571 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# File Name: CCF.py -# Description: This product creates a Coded Cities Forecast table -# containing 7-days of forecast data for PoP, Weather, Snow, and Max -# and Min Temps. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# CCF, CCF___Definition, CCF__Override -#------------------------------------------------------------------------- -# User Configurable Variables: -# Definition Section: -# displayName If not None, defines how product appears in GFE GUI -## -# defaultEditAreas defines edit area names and station IDs for edit areas -# expected in the form of (editAreaName, 3letterStationID) -# -# fullStationID full station identifier (4letter, KSLC) -# -# wmoID WMO ID for product header, such as FOUS45 -# -# pil Product pil, such as CCFBOX -# -# debug If on, debug_print statements will appear. -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# alwaysIncludeSnow include snow in product (1=yes,0=no) -# -# popStartZ_AM start time for PoP for AM issuance in Zulu, (12 for 12z) -# Usually changed only for OCONUS sites. -# wxStartLT_AM start time for Wx for AM issuance in LT, (6 for 6am) -# wxStopLT_AM stop time for Wx for AM issuance in LT, (18 for 6pm) -# -# -#------------------------------------------------------------------------- -# Weather Elements Needed: MinT, MaxT, PoP, Wx, -# Sky, Wind, and SnowAmt (optional). The Sky, Wx, -# and Max/MinT are used to determine the weather character code. All -# weather elements out to 14 periods (7 days) except for SnowAmt which -# is needed for the first 3 periods only. -#------------------------------------------------------------------------- -# Edit Areas Needed: Typically point edit areas are required for each -# entry to you wish to generate the product for. -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: None -#------------------------------------------------------------------------- -# Component Products: None -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# None -# -# To look up additional tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# The grids are sampled according to the following rules: -# MaxT/MinT: 14 periods, 12 hours apart, daily, set up to take the -# MaxT grid overlapping noon LT and MinT grid overlapping midnight LT -# PoP: 14 periods, 12 hours apart, 12z-00z, and 00z-12z. Periods can -# be overridden using the popStartZ_AM field for OCONUS sites. -# Snow: same as PoP, but only 3 periods. -# Weather: 7 days, 12 hours, Daylight periods usually 6am-6pm LT, but -# can be overridden using the wxStartLT_AM and wxStopLT_AM field. -# -# Missing data will be shown with MMM for temperatures and snow amounts, '?' -# for Wx, and '/' for PoPs. -#------------------------------------------------------------------------- -# Example Output: -# -##FOUS45 KSLC 091329 -##CCFSLC -## -##SLC UU 071/057 078/062 088 99000 0000/0000/0000 -## UBBBB 061/075 059/087 071/079 058/075 0000000-00/ -##PVU BU 068/061 077/064 088 99400 0000/0000/0000 -## UBEUU 066/071 061/084 075/085 065/078 010-150032/ -######################################################################## - - -import TextRules -import SampleAnalysis -import string, time, types - - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Product Issuance", "productIssuance"), "Morning", "radio", - ["Morning","Afternoon"]), - (("Forecaster Number", "forecasterNumber"), "99", "alphaNumeric"), - ] - Definition = { - "type": "smart", - "displayName": "None", # for Product Generation Menu - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/CCF_.txt", - "debug": 0, - - "defaultEditAreas": [("area1", "AREA1"), - ("area2", "AREA2"), - ], - - # product identifiers - "fullStationID": "", # full station identifier (4letter, KSLC) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - "awipsTEXTDBhost": None, # textdb cmd host, or None for local - - # options - "alwaysIncludeSnow": 1, # include snow always (1=yes,0=no) - "popStartZ_AM": 12, # start time for PoP for AM issuance in Zulu - "wxStartLT_AM" : 6, # start time for Wx for AM issuance in LT - "wxStopLT_AM" : 18, # stop time for Wx for AM issuance in LT - "AMnumPeriods": 13, # set to 14 if using the C-20 directive - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, - "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Determine Forecaster Number and issuance time - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - self._forecasterNumber = self._getForecasterNumber(self._forecasterNumber) - - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - def _determineTimeRanges(self, argDict): - # Determine time ranges for product - # Returns popPeriods, snowPeriods, tempPeriods, codePeriods which - # are a list of tuples (timeRange, label). - - # Calculate ddhhmm string value - self._currentTime = argDict['creationTime'] #ZULU - self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( - self._currentTime)) - - # PoP Time ranges : - # 13 or 14 12-hour periods - # If AM, begin at 12z of issue day (default), may be overridden - # by the popStartZ_AM flag. - # If PM, begin at 00z of next day (default), may be overridden - # by the popStartZ_AM flag. - if self._productIssuance == "Morning": - startT = self._popStartZ_AM - else: - startT = self._popStartZ_AM + 12 # account for PM start later - - # rollover - different days from gmtime and local time - # so we need to sample the PoP from "yesterday" - # for MDT, rollover occurs from 5pm-midnight LST - if time.gmtime(self._currentTime)[2] != \ - time.localtime(self._currentTime)[2]: - startT = startT - 24 - - popStartTR = self.createTimeRange(startT, startT + 1, mode="Zulu") - timePeriod = 12 - timeSpan = 12 - if self._productIssuance == "Morning": - numPeriods = self._AMnumPeriods - else: - numPeriods = 14 - self._popPeriods = self.getPeriods(popStartTR, timePeriod, - timeSpan, numPeriods) - - # Snow Time Ranges, same as PoP, but not as many - self._snowPeriods = self._popPeriods[0:3] - - # Temp Time ranges : 13 or 14 periods, 12 hours apart, 5 hour span - # This is to catch the correct Max/Min temp grid - # If AM, begin with noon LT of issue day to catch MaxT - # If PM, begin with midnight LT of issue day to get MinT - if self._productIssuance == "Morning": - tempStartTR = self.createTimeRange(10, 15) - else: - tempStartTR = self.createTimeRange(22, 27) - timePeriod = 12 - timeSpan = 5 - if self._productIssuance == "Morning": - numPeriods = self._AMnumPeriods - else: - numPeriods = 14 - self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, - numPeriods) - - # Code Time ranges : - # 7 non-consecutive DAYLIGHT 12 hour periods - # If AM, begin at "wxStartLT_AM" of issue day - # If PM, begin at "wxStartLT_AM" of next day - if self._productIssuance == "Morning": - codeStartTR = self.createTimeRange(self._wxStartLT_AM, - self._wxStartLT_AM + 1) - else: - codeStartTR = self.createTimeRange(self._wxStartLT_AM + 24, - self._wxStartLT_AM + 25) - timePeriod = 24 - timeSpan = self._wxStopLT_AM - self._wxStartLT_AM - numPeriods = 7 - self._codePeriods = self.getPeriods(codeStartTR, timePeriod, timeSpan, - numPeriods) - - return - - def _sampleData(self, argDict): - # Sample the data. Returns the samplers for pop, snow, temp, and code - sampleList = [] - sampleList.append((self._analysisListPoP(), self._popPeriods)) - sampleList.append((self._analysisListSnow(), self._snowPeriods)) - sampleList.append((self._analysisListTemp(), self._tempPeriods)) - sampleList.append((self._analysisListCode(), self._codePeriods)) - sampleInfo = [] - for analList, periods in sampleList: - sampleInfo.append((analList, periods, self._areaList)) - - self._sampler = self.getSampler(argDict, sampleInfo) - return - - def _preProcessProduct(self, fcst, argDict): - # Add product heading to fcst string - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + areaLabel + " " - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - # creates the product for each edit area - - ######################## - # first line of product - ######################## - - # wx codes - for index in xrange(0, 2): - timeRange, label = self._codePeriods[index] - fcst = fcst + self._getCCFCode(self._analysisListCode(), - editArea, timeRange) - fcst = fcst + " " - - # max/min temp codes - separators = ["/", " ", "/", " ", " "] - for index in xrange(0, 5): - timeRange, label = self._tempPeriods[index] - fcst = fcst + self._getMinOrMax(self._analysisListTemp(), - editArea, timeRange) + separators[index] - - # forecaster number - fcst = fcst + self._forecasterNumber - - # Pop fields - for index in xrange(0, 3): - timeRange, label = self._popPeriods[index] - fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, - timeRange) - - # Snow fields - if self._alwaysIncludeSnow: - fcst = fcst + self._addSnowEntries(self._analysisListSnow(), - self._snowPeriods, editArea) - fcst = fcst + "\n" - - ######################## - # second line of product - ######################## - - fcst = fcst + " " # ident 4 spaces on the 2nd line - - # wx codes - startIndex = 2 - for index in xrange(startIndex, len(self._codePeriods)): - timeRange, label = self._codePeriods[index] - fcst = fcst + self._getCCFCode(self._analysisListCode(), - editArea, timeRange) - fcst = fcst + " " - - # max/min temp codes - startIndex = 5 - separators = ["/", " ", "/", " ", "/", " ", "/", " ", " "] - for index in xrange(startIndex, len(self._tempPeriods)): - timeRange, label = self._tempPeriods[index] - fcst = fcst + self._getMinOrMax(self._analysisListTemp(), - editArea, timeRange) + separators[index-startIndex] - - # Pop fields - startIndex = 3 - for index in xrange(startIndex, len(self._popPeriods)): - timeRange, label = self._popPeriods[index] - fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, - timeRange) - fcst = fcst + "\n" - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - #fcst = fcst + "\n$$\n\n" - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _analysisListPoP(self): - return [ - ("PoP", self.stdDevMaxAvg), - ] - - def _analysisListSnow(self): - return [ - ("SnowAmt", self.minMaxSum), - ] - - def _analysisListTemp(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ] - - def _analysisListCode(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ("PoP", self.stdDevMaxAvg), - ("Wx", self.dominantWx), - ("Sky", self.avg), - ("Wind", self.vectorAvg) - ] - - def _getMinOrMax(self, analysisList, area, timeRange): - # Return a Max or Min value depending on availability - # Examples: 076 for positive, 915 for negative, MMM for missing - statDict = self.getStatDict(self._sampler, analysisList, - timeRange, area) - dayNight = self.getPeriod(timeRange,shiftToLocal=1) - if dayNight == self.DAYTIME(): - maxV = self.getStats(statDict, "MaxT") - return self._temperatureFormat(maxV) - else: - minV = self.getStats(statDict, "MinT") - return self._temperatureFormat(minV) - - def _temperatureFormat(self, value): - # Return a 3-digit string with leading zeroes given the value - # Examples: 076 for positive, 915 for negative, MMM for missing - if value is None: - return "MMM" #for missing - value = int(round(value)) - if value < 0: - value = abs(value) + 900 - valStr = `value` - while len(valStr) < 3: - valStr = "0" + valStr - return valStr - - def _getPoP(self,analysisList,area,timeRange): - # Return a one-digit string value representing the 10's place of - # rounded PoP value. Values are: 0-123456789+ and "/" for missing. - statDict = self.getStatDict( - self._sampler, analysisList, timeRange, area) - pop = self.getStats(statDict, "PoP__stdDevMaxAvg") - if pop is None: - return "/" #Missing symbol - popMax5=int(self.round(pop,"Nearest",5)) - if popMax5 == 5: - return "-" - popMax10=int(self.round(pop,"Nearest",10)) - if popMax10 >90: - return "+" - val="%1.1d" % int(popMax10/10) - return val - - def _getCCFCode(self, analysisList, area, timeRange): - # Return the CCF code (single character) which depicts the sky, - # wind, weather, obstructions. Example: "B". "?" is returned - # for missing data. - statDict = self.getStatDict( - self._sampler, analysisList, timeRange, area) - code = self.getCode(statDict, timeRange) - if code is None: - return "?" - else: - return code - - def _getForecasterNumber(self, fnString): - # Returns a string representing the specified forecaster number. - # Reformats the string to ensure it is 2 digits. - try: - forecasterNumber = int(fnString) - if forecasterNumber > 99 or forecasterNumber < 0: - return "99" - elif forecasterNumber < 10: - return "0" + `forecasterNumber` - else: - return `forecasterNumber` - except: - return "99" - - def _addSnowEntries(self, analysisList, timePeriods, editArea): - # Snow entry processing. Returns ranges of snow values in the - # edit area for each of the periods. - # Example: 0102/0202/0000. Will return "MMMM" for missing periods. - # This function will "calculate" a snow range if a single value - # is provided to it, to make the output more realistic. - returnString = " " - for period, label in timePeriods: - statDict = self.getStatDict(self._sampler, analysisList, - period, editArea) - stats = self.getStats(statDict, "SnowAmt__minMaxSum") - - if stats is None: - returnString = returnString + "MMMM/" #Missing Data - else: - minV, maxV, sumV = stats - minAdj, maxAdj = self._adjustSnowAmounts(minV, maxV, sumV) - minString = string.rjust(`int(round(minAdj))`, 2) - maxString = string.rjust(`int(round(maxAdj))`, 2) - if minString[0] == " ": # fill in leading zero - minString = "0" + minString[1:] - if maxString[0] == " ": # fill in leading zero - maxString = "0" + maxString[1:] - returnString = returnString + minString + maxString + "/" - - # strip off the final "/" - if returnString[-1] == "/": - returnString = returnString[:-1] - - return returnString - - def _adjustSnowAmounts(self, minV, maxV, sumV): - # Snow Amount Adjustments. Since the CCF is typically a point - # forecast, the minV and maxV are identical, but we would prefer - # to report a "real" range. This method compares the min and maxV - # and if the range isn't sufficient, then puts a range into the - # data. - - # rangeThreshold - min/max must differ less than this in order - # to use the snow table, otherwise, the min/max values are used. - rangeThreshold = 2 - if maxV - minV > rangeThreshold: - return minV, maxV - - # use snow table, simply based on the average (sumV) found. However, - # the maximum returned is never less than the actual maximum found, - # and the minimum returned is never more than the actual minimum found. - # the tuples are (snowLessThanValue, returnedMinVal, returnedMaxVal) - table = [\ - (0.2, 0, 0), #less than 0.2", then report no snow at all - (1, 0, 1), - (2, 1, 2), - (3, 1, 3), - (4, 2, 4), - (5, 3, 5), - (6, 4, 6), - (7, 4, 8), - (10, 6, 10), - (10000, int(round(sumV)), int(round(sumV))+5) - ] - - # calculate the entry in the table that is appropriate - for threshold, tableMin, tableMax in table: - if sumV < threshold: - # found a match in the table - if tableMin > minV: - reportedMin = minV #actual min is less than table min - else: - reportedMin = tableMin # use the table min - if tableMax < maxV: - reportedMax = maxV #actual max is greater than table max - else: - reportedMax = tableMax # use the table max - return reportedMin, reportedMax - - #should never get here since the table last entry is HUGE. - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# File Name: CCF.py +# Description: This product creates a Coded Cities Forecast table +# containing 7-days of forecast data for PoP, Weather, Snow, and Max +# and Min Temps. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# CCF, CCF___Definition, CCF__Override +#------------------------------------------------------------------------- +# User Configurable Variables: +# Definition Section: +# displayName If not None, defines how product appears in GFE GUI +## +# defaultEditAreas defines edit area names and station IDs for edit areas +# expected in the form of (editAreaName, 3letterStationID) +# +# fullStationID full station identifier (4letter, KSLC) +# +# wmoID WMO ID for product header, such as FOUS45 +# +# pil Product pil, such as CCFBOX +# +# debug If on, debug_print statements will appear. +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# alwaysIncludeSnow include snow in product (1=yes,0=no) +# +# popStartZ_AM start time for PoP for AM issuance in Zulu, (12 for 12z) +# Usually changed only for OCONUS sites. +# wxStartLT_AM start time for Wx for AM issuance in LT, (6 for 6am) +# wxStopLT_AM stop time for Wx for AM issuance in LT, (18 for 6pm) +# +# +#------------------------------------------------------------------------- +# Weather Elements Needed: MinT, MaxT, PoP, Wx, +# Sky, Wind, and SnowAmt (optional). The Sky, Wx, +# and Max/MinT are used to determine the weather character code. All +# weather elements out to 14 periods (7 days) except for SnowAmt which +# is needed for the first 3 periods only. +#------------------------------------------------------------------------- +# Edit Areas Needed: Typically point edit areas are required for each +# entry to you wish to generate the product for. +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: None +#------------------------------------------------------------------------- +# Component Products: None +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# None +# +# To look up additional tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# The grids are sampled according to the following rules: +# MaxT/MinT: 14 periods, 12 hours apart, daily, set up to take the +# MaxT grid overlapping noon LT and MinT grid overlapping midnight LT +# PoP: 14 periods, 12 hours apart, 12z-00z, and 00z-12z. Periods can +# be overridden using the popStartZ_AM field for OCONUS sites. +# Snow: same as PoP, but only 3 periods. +# Weather: 7 days, 12 hours, Daylight periods usually 6am-6pm LT, but +# can be overridden using the wxStartLT_AM and wxStopLT_AM field. +# +# Missing data will be shown with MMM for temperatures and snow amounts, '?' +# for Wx, and '/' for PoPs. +#------------------------------------------------------------------------- +# Example Output: +# +##FOUS45 KSLC 091329 +##CCFSLC +## +##SLC UU 071/057 078/062 088 99000 0000/0000/0000 +## UBBBB 061/075 059/087 071/079 058/075 0000000-00/ +##PVU BU 068/061 077/064 088 99400 0000/0000/0000 +## UBEUU 066/071 061/084 075/085 065/078 010-150032/ +######################################################################## + + +import TextRules +import SampleAnalysis +import string, time, types + + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Product Issuance", "productIssuance"), "Morning", "radio", + ["Morning","Afternoon"]), + (("Forecaster Number", "forecasterNumber"), "99", "alphaNumeric"), + ] + Definition = { + "type": "smart", + "displayName": "None", # for Product Generation Menu + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/CCF_.txt", + "debug": 0, + + "defaultEditAreas": [("area1", "AREA1"), + ("area2", "AREA2"), + ], + + # product identifiers + "fullStationID": "", # full station identifier (4letter, KSLC) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + "awipsTEXTDBhost": None, # textdb cmd host, or None for local + + # options + "alwaysIncludeSnow": 1, # include snow always (1=yes,0=no) + "popStartZ_AM": 12, # start time for PoP for AM issuance in Zulu + "wxStartLT_AM" : 6, # start time for Wx for AM issuance in LT + "wxStopLT_AM" : 18, # stop time for Wx for AM issuance in LT + "AMnumPeriods": 13, # set to 14 if using the C-20 directive + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, + "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Determine Forecaster Number and issuance time + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + self._forecasterNumber = self._getForecasterNumber(self._forecasterNumber) + + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + def _determineTimeRanges(self, argDict): + # Determine time ranges for product + # Returns popPeriods, snowPeriods, tempPeriods, codePeriods which + # are a list of tuples (timeRange, label). + + # Calculate ddhhmm string value + self._currentTime = argDict['creationTime'] #ZULU + self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( + self._currentTime)) + + # PoP Time ranges : + # 13 or 14 12-hour periods + # If AM, begin at 12z of issue day (default), may be overridden + # by the popStartZ_AM flag. + # If PM, begin at 00z of next day (default), may be overridden + # by the popStartZ_AM flag. + if self._productIssuance == "Morning": + startT = self._popStartZ_AM + else: + startT = self._popStartZ_AM + 12 # account for PM start later + + # rollover - different days from gmtime and local time + # so we need to sample the PoP from "yesterday" + # for MDT, rollover occurs from 5pm-midnight LST + if time.gmtime(self._currentTime)[2] != \ + time.localtime(self._currentTime)[2]: + startT = startT - 24 + + popStartTR = self.createTimeRange(startT, startT + 1, mode="Zulu") + timePeriod = 12 + timeSpan = 12 + if self._productIssuance == "Morning": + numPeriods = self._AMnumPeriods + else: + numPeriods = 14 + self._popPeriods = self.getPeriods(popStartTR, timePeriod, + timeSpan, numPeriods) + + # Snow Time Ranges, same as PoP, but not as many + self._snowPeriods = self._popPeriods[0:3] + + # Temp Time ranges : 13 or 14 periods, 12 hours apart, 5 hour span + # This is to catch the correct Max/Min temp grid + # If AM, begin with noon LT of issue day to catch MaxT + # If PM, begin with midnight LT of issue day to get MinT + if self._productIssuance == "Morning": + tempStartTR = self.createTimeRange(10, 15) + else: + tempStartTR = self.createTimeRange(22, 27) + timePeriod = 12 + timeSpan = 5 + if self._productIssuance == "Morning": + numPeriods = self._AMnumPeriods + else: + numPeriods = 14 + self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, + numPeriods) + + # Code Time ranges : + # 7 non-consecutive DAYLIGHT 12 hour periods + # If AM, begin at "wxStartLT_AM" of issue day + # If PM, begin at "wxStartLT_AM" of next day + if self._productIssuance == "Morning": + codeStartTR = self.createTimeRange(self._wxStartLT_AM, + self._wxStartLT_AM + 1) + else: + codeStartTR = self.createTimeRange(self._wxStartLT_AM + 24, + self._wxStartLT_AM + 25) + timePeriod = 24 + timeSpan = self._wxStopLT_AM - self._wxStartLT_AM + numPeriods = 7 + self._codePeriods = self.getPeriods(codeStartTR, timePeriod, timeSpan, + numPeriods) + + return + + def _sampleData(self, argDict): + # Sample the data. Returns the samplers for pop, snow, temp, and code + sampleList = [] + sampleList.append((self._analysisListPoP(), self._popPeriods)) + sampleList.append((self._analysisListSnow(), self._snowPeriods)) + sampleList.append((self._analysisListTemp(), self._tempPeriods)) + sampleList.append((self._analysisListCode(), self._codePeriods)) + sampleInfo = [] + for analList, periods in sampleList: + sampleInfo.append((analList, periods, self._areaList)) + + self._sampler = self.getSampler(argDict, sampleInfo) + return + + def _preProcessProduct(self, fcst, argDict): + # Add product heading to fcst string + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + areaLabel + " " + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + # creates the product for each edit area + + ######################## + # first line of product + ######################## + + # wx codes + for index in range(0, 2): + timeRange, label = self._codePeriods[index] + fcst = fcst + self._getCCFCode(self._analysisListCode(), + editArea, timeRange) + fcst = fcst + " " + + # max/min temp codes + separators = ["/", " ", "/", " ", " "] + for index in range(0, 5): + timeRange, label = self._tempPeriods[index] + fcst = fcst + self._getMinOrMax(self._analysisListTemp(), + editArea, timeRange) + separators[index] + + # forecaster number + fcst = fcst + self._forecasterNumber + + # Pop fields + for index in range(0, 3): + timeRange, label = self._popPeriods[index] + fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, + timeRange) + + # Snow fields + if self._alwaysIncludeSnow: + fcst = fcst + self._addSnowEntries(self._analysisListSnow(), + self._snowPeriods, editArea) + fcst = fcst + "\n" + + ######################## + # second line of product + ######################## + + fcst = fcst + " " # ident 4 spaces on the 2nd line + + # wx codes + startIndex = 2 + for index in range(startIndex, len(self._codePeriods)): + timeRange, label = self._codePeriods[index] + fcst = fcst + self._getCCFCode(self._analysisListCode(), + editArea, timeRange) + fcst = fcst + " " + + # max/min temp codes + startIndex = 5 + separators = ["/", " ", "/", " ", "/", " ", "/", " ", " "] + for index in range(startIndex, len(self._tempPeriods)): + timeRange, label = self._tempPeriods[index] + fcst = fcst + self._getMinOrMax(self._analysisListTemp(), + editArea, timeRange) + separators[index-startIndex] + + # Pop fields + startIndex = 3 + for index in range(startIndex, len(self._popPeriods)): + timeRange, label = self._popPeriods[index] + fcst = fcst + self._getPoP(self._analysisListPoP(), editArea, + timeRange) + fcst = fcst + "\n" + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + #fcst = fcst + "\n$$\n\n" + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _analysisListPoP(self): + return [ + ("PoP", self.stdDevMaxAvg), + ] + + def _analysisListSnow(self): + return [ + ("SnowAmt", self.minMaxSum), + ] + + def _analysisListTemp(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ] + + def _analysisListCode(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ("PoP", self.stdDevMaxAvg), + ("Wx", self.dominantWx), + ("Sky", self.avg), + ("Wind", self.vectorAvg) + ] + + def _getMinOrMax(self, analysisList, area, timeRange): + # Return a Max or Min value depending on availability + # Examples: 076 for positive, 915 for negative, MMM for missing + statDict = self.getStatDict(self._sampler, analysisList, + timeRange, area) + dayNight = self.getPeriod(timeRange,shiftToLocal=1) + if dayNight == self.DAYTIME(): + maxV = self.getStats(statDict, "MaxT") + return self._temperatureFormat(maxV) + else: + minV = self.getStats(statDict, "MinT") + return self._temperatureFormat(minV) + + def _temperatureFormat(self, value): + # Return a 3-digit string with leading zeroes given the value + # Examples: 076 for positive, 915 for negative, MMM for missing + if value is None: + return "MMM" #for missing + value = int(round(value)) + if value < 0: + value = abs(value) + 900 + valStr = repr(value) + while len(valStr) < 3: + valStr = "0" + valStr + return valStr + + def _getPoP(self,analysisList,area,timeRange): + # Return a one-digit string value representing the 10's place of + # rounded PoP value. Values are: 0-123456789+ and "/" for missing. + statDict = self.getStatDict( + self._sampler, analysisList, timeRange, area) + pop = self.getStats(statDict, "PoP__stdDevMaxAvg") + if pop is None: + return "/" #Missing symbol + popMax5=int(self.round(pop,"Nearest",5)) + if popMax5 == 5: + return "-" + popMax10=int(self.round(pop,"Nearest",10)) + if popMax10 >90: + return "+" + val="%1.1d" % int(popMax10/10) + return val + + def _getCCFCode(self, analysisList, area, timeRange): + # Return the CCF code (single character) which depicts the sky, + # wind, weather, obstructions. Example: "B". "?" is returned + # for missing data. + statDict = self.getStatDict( + self._sampler, analysisList, timeRange, area) + code = self.getCode(statDict, timeRange) + if code is None: + return "?" + else: + return code + + def _getForecasterNumber(self, fnString): + # Returns a string representing the specified forecaster number. + # Reformats the string to ensure it is 2 digits. + try: + forecasterNumber = int(fnString) + if forecasterNumber > 99 or forecasterNumber < 0: + return "99" + elif forecasterNumber < 10: + return "0" + repr(forecasterNumber) + else: + return repr(forecasterNumber) + except: + return "99" + + def _addSnowEntries(self, analysisList, timePeriods, editArea): + # Snow entry processing. Returns ranges of snow values in the + # edit area for each of the periods. + # Example: 0102/0202/0000. Will return "MMMM" for missing periods. + # This function will "calculate" a snow range if a single value + # is provided to it, to make the output more realistic. + returnString = " " + for period, label in timePeriods: + statDict = self.getStatDict(self._sampler, analysisList, + period, editArea) + stats = self.getStats(statDict, "SnowAmt__minMaxSum") + + if stats is None: + returnString = returnString + "MMMM/" #Missing Data + else: + minV, maxV, sumV = stats + minAdj, maxAdj = self._adjustSnowAmounts(minV, maxV, sumV) + minString = string.rjust(repr(int(round(minAdj))), 2) + maxString = string.rjust(repr(int(round(maxAdj))), 2) + if minString[0] == " ": # fill in leading zero + minString = "0" + minString[1:] + if maxString[0] == " ": # fill in leading zero + maxString = "0" + maxString[1:] + returnString = returnString + minString + maxString + "/" + + # strip off the final "/" + if returnString[-1] == "/": + returnString = returnString[:-1] + + return returnString + + def _adjustSnowAmounts(self, minV, maxV, sumV): + # Snow Amount Adjustments. Since the CCF is typically a point + # forecast, the minV and maxV are identical, but we would prefer + # to report a "real" range. This method compares the min and maxV + # and if the range isn't sufficient, then puts a range into the + # data. + + # rangeThreshold - min/max must differ less than this in order + # to use the snow table, otherwise, the min/max values are used. + rangeThreshold = 2 + if maxV - minV > rangeThreshold: + return minV, maxV + + # use snow table, simply based on the average (sumV) found. However, + # the maximum returned is never less than the actual maximum found, + # and the minimum returned is never more than the actual minimum found. + # the tuples are (snowLessThanValue, returnedMinVal, returnedMaxVal) + table = [\ + (0.2, 0, 0), #less than 0.2", then report no snow at all + (1, 0, 1), + (2, 1, 2), + (3, 1, 3), + (4, 2, 4), + (5, 3, 5), + (6, 4, 6), + (7, 4, 8), + (10, 6, 10), + (10000, int(round(sumV)), int(round(sumV))+5) + ] + + # calculate the entry in the table that is appropriate + for threshold, tableMin, tableMax in table: + if sumV < threshold: + # found a match in the table + if tableMin > minV: + reportedMin = minV #actual min is less than table min + else: + reportedMin = tableMin # use the table min + if tableMax < maxV: + reportedMax = maxV #actual max is greater than table max + else: + reportedMax = tableMax # use the table max + return reportedMin, reportedMax + + #should never get here since the table last entry is HUGE. + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF.py index 1c1ee8eebb..d80b0f77b8 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF.py @@ -1,1327 +1,1327 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: CWF (Coastal Waters Forecast) -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# CWF.py, CWF___Definition, CWF__Overrides -#------------------------------------------------------------------------- -# Customization Points in Local File: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Coastal Waters Forecast" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "CWFBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the FormatterLauncher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# includeEveningPeriod Include a 6 hour Evening period on the 3rd day -# useAbbreviations -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, -# NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# areaDictionary Modify the AreaDictionary utility with UGC -# information about zones -# useHolidays Set to 1 to use holidays in the time period labels -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# OVERRIDES -# -# Required Overrides -# -# _Text1(), _Text2() Descriptive text for header -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) -# WaveHeight and/or WindWaveHgt -# (every 6 hours to 3 days, then every 12 hours to 7 days) -# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) -# Optional: -# WindGust (every 3 hours to 7 days) -# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# CWFPeriod (component) -# CWFPeriodMid (component) -# ExtendedLabel(component) -# CWFExtended (component) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from CWF: -# _Text1 -# _Text2 -# _issuance_list -# from MarinePhrases -# inlandWatersAreas -# inlandWatersWave_element -# seasWaveHeight_element -# seasWindWave_element -# waveHeight_wind_threshold -# marine_wind_flag -# marine_wind_combining_flag -# marine_wind_verbose_flag -# from ConfigVariables -# phrase_descriptor_dict -# null_nlValue_dict -# first_null_phrase_dict -# null_phrase_dict -# maximum_range_nlValue_dict -# from WxPhrases: -# embedded_visibility_flag -# visibility_wx_threshold -# significant_wx_visibility_subkeys -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# from SampleAnalysis -# moderated_dict -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, re, types -import TimeRange - - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), - ] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/CWF_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Marine_Zones_", - - "lineLength": 66, - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_CWF__", - "editAreaSuffix": None, - # product identifiers - "productName": "Coastal Waters Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city,state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "periodCombining" : 0, # If 1, combine periods, if possible - # Product-specific variables: - # Set to one if you want a 6-hour evening period instead of - # 18-hour period without lows - "includeEveningPeriod": 1, - "useAbbreviations": 0, - - # Weather-related flags - "hoursSChcEnds": 24, - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - # Language - "language": "english", - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING METHODS, THRESHOLDS AND VARIABLES - ######################################################################## - # MUST BE OVERRIDDEN IN LOCAL FILE - def _Text1(self): - return "" - - def _Text2(self, argDict): - synopsis = "" - - # Try to get Synopsis from previous CWF - #productID = "BOSCWFBOS" - #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") - # Clean up the previous synopsis - #synopsis = re.sub(r'\n', r' ', synopsis) - #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") - - # Convert absolute time pointer to a tuple of values like that - # returned by time.gmtime() - #expTuple = time.strptime('%s' % (self._expireTime), - # '%b %d %y %H:%M:%S GMT') - expTuple = self._expireTime.utctimetuple() - - # Format expiration time for inclusion in synopsis header - expTime = time.strftime('%d%H%M', expTuple) - siteID = self.getSiteID(argDict) - - if len("_") == 0: - ugc = self.synopsisUGC(siteID) - heading = self.synopsisHeading(siteID) - else: - ugc = self.synopsisUGC(siteID, self._pil[-3:]) - heading = self.synopsisHeading(siteID, self._pil[-3:]) - - return "%s-%s-\n" % (ugc, expTime) + self._timeLabel + "\n\n" + \ - heading + "\n" + synopsis + "\n$$\n\n" - - ######################################################################## - - def pop_wx_lower_threshold(self, tree, node): - # Always report weather - return 0 - - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self.marineRounding, - } - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Visibility"] = "NM" - return dict - - def gust_wind_difference_nlValue(self, tree, node): - # Difference between gust and maxWind below which gusts are not - # mentioned. Units are MPH - if self._includeTropical: - return 5 - else: - return 10 - - def temporalCoverage_hours(self, parmHisto, timeRange, componentName): - # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids - # and this parameter is 2 or higher, tropical cyclone winds affecting the very - # early or latter part of a forecast period might be neglected. 1 assures - # maximum sensitivity. - if self._includeTropical: - return 1 - else: - return 0 - - - def moderated_dict(self, parmHisto, timeRange, componentName): - """ - Modifed to lower the high end filter threshold from 20 MPH to - 15 MPH for Tropical. - """ - # COMMENT: This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. The thresholds chosen below gave best results - # during testing with 2004 and 2005 tropical cyclones. This dict - # is used with the moderatedMinMax analysis method specified in the - # TropicalPeriod definitions specified further down for use with - # tropical cyclones with wind parameters. - - # Get Baseline thresholds - dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, - timeRange, componentName) - - # Change thresholds for Wind, WindGust, WaveHeight and Swell - if self._includeTropical: - dict["Wind"] = (0, 15) - dict["WindGust"] = (0, 15) - dict["WaveHeight"] = (0, 15) - dict["Swell"] = (0, 15) - return dict - - - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - #return self.maxMode - #return self.maximum - return self.stdDevMaxAvg - - - def addTropical(self, analysisList, phraseList, includeHazards=True): - newAnalysisList = [] - for entry in analysisList: - # Sampling defined as a tuple (field, statistic, temporal rate) - # If this is NOT a Wind or WindGust statistic - if entry[0] not in ["Hazards", "Wind", "WindGust", "WaveHeight", "Swell"]: - # Add this statistic to the new analysisList - newAnalysisList.append(entry) - newAnalysisList += [ - ("Wind", self.vectorModeratedMinMax, [6]), - ("WindGust", self.moderatedMinMax, [6]), - ("WaveHeight", self.moderatedMax, [6]), - ("Swell", self.vectorModeratedMinMax, [6]), - ("pws34", self.maximum), - ("pws64", self.maximum), - ("pwsN34", self.maximum), - ("pwsN64", self.maximum), - ("pwsD34", self.maximum), - ("pwsD64", self.maximum), - ] - if includeHazards: - newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) - - phraseList.insert(0, self.pws_phrase) - return newAnalysisList, phraseList - - def CWFPeriod(self): - analysisList = [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [3]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [3]), - ("WaveHeight", self.moderatedMinMax, [6]), - ("WindWaveHgt", self.moderatedMinMax, [6]), - ("Swell", self.vectorModeratedMinMax, [6]), - ("Swell2", self.vectorModeratedMinMax, [6]), - ("Period", self.moderatedMinMax, [6]), - ("Period2", self.moderatedMinMax, [6]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("PoP", self._PoP_analysisMethod("CWFPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [3]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.moderatedMax, [3]), - #("WaveHeight", self.moderatedMax, [6]), - #("WindWaveHgt", self.moderatedMax, [6]), - #("Swell", self.vectorModeratedMax, [6]), - #("Swell2", self.vectorModeratedMax, [6]), - #("Period", self.moderatedMax, [6]), - #("Period2", self.moderatedMax, [6]), - #("Wx", self.rankedWx, [6]), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("CWFPeriod")), - #("PoP", self.binnedPercent, [6]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # Split time range in quarters for Wind and WindGust - #("Wind", self.vectorMinMax, [3]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.maximum, [3]), - #("WaveHeight", self.minMax, [6]), - #("WindWaveHgt", self.minMax, [6]), - # Split time range in half for Wx and Swell - #("Swell", self.vectorMinMax, [6]), - #("Swell2", self.vectorMinMax, [6]), - #("Period", self.avg, [6]), - #("Period2", self.avg, [6]), - #("Wx", self.rankedWx, [6]), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("CWFPeriod")), - #("PoP", self.binnedPercent, [6]), - ] - phraseList = [ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - self.wave_withPeriods_phrase, - # Alternative: - #self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - self.weather_phrase, - self.visibility_phrase, - ] - if self._includeTropical: - analysisList, phraseList = self.addTropical(analysisList, phraseList) - - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - - "analysisList": analysisList, - "phraseList": phraseList, - - } - - def CWFPeriodMid(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - - "analysisList": [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [6]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [6]), - ("WaveHeight", self.moderatedMinMax, [6]), - ("WindWaveHgt", self.moderatedMinMax, [6]), - ("Swell", self.vectorModeratedMinMax, [6]), - ("Swell2", self.vectorModeratedMinMax, [6]), - ("Wx", self.rankedWx, [6]), - ("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), - ("PoP", self.binnedPercent, [6]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [6]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.moderatedMax, [6]), - #("WaveHeight", self.moderatedMax, [6]), - #("WindWaveHgt", self.moderatedMax, [6]), - #("Swell", self.vectorModeratedMax, [6]), - #("Swell2", self.vectorModeratedMax, [6]), - #("Wx", self.rankedWx, [6]), - #("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), - #("PoP", self.binnedPercent, [6]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # Split time range in quarters for Wind and WindGust - #("Wind", self.vectorMinMax, [6]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.maximum, [3]), - #("WaveHeight", self.minMax, [6]), - #("WindWaveHgt", self.minMax, [6]), - # Split time range in half for Wx and Swell - #("Swell", self.vectorMinMax, [6]), - #("Swell2", self.vectorMinMax, [6]), - #("Wx", self.rankedWx, [6]), - #("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), - #("PoP", self.binnedPercent, [6]), - ], - - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - self.swell_phrase, - # WEATHER - self.weather_phrase, - self.visibility_phrase, - ], - } - - def combine_singleValues_flag_dict(self, tree, node): - # Dictionary of weather elements to combine using single values - # rather than ranges. If you are using single value statistics - # for a weather element, you will want to set this flag to 1. - # If there is no entry for an element, min/max combining will - # be done. - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node - dict = TextRules.TextRules.combine_singleValues_flag_dict(self, tree, node) - #dict["Wind"] = 1 - #dict["WindGust"] = 1 - #dict["Swell"] = 1 - #dict["Swell2"] = 1 - #dict["WindWaveHgt"] = 1 - #dict["WaveHeight"] = 1 - return dict - - def ExtendedLabel(self): - return { - "type": "component", - "methodList": [self.setLabel], - "analysisList": [], - "phraseList":[], - } - def setLabel(self, tree, component): - component.set("words", "\n.Extended forecast...\n") - return self.DONE() - - def CWFExtended(self): - return { "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [6]), - ("WindGust", self.moderatedMinMax, [12]), - ("WaveHeight", self.moderatedMinMax, [12]), - ("WindWaveHgt", self.moderatedMinMax, [12]), - #("Wx", self.rankedWx), - #("T", self.minMax), # needed for weather_phrase - #("PoP", self._PoP_analysisMethod("CWFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorModeratedMinMax, [12]), - #("Swell2", self.vectorModeratedMinMax, [12]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [6]), - #("WindGust", self.moderatedMax, [12]), - #("WaveHeight", self.moderatedMax, [12]), - #("WindWaveHgt", self.moderatedMax, [12]), - #("Wx", self.rankedWx), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("CWFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorModeratedMax, [12]), - #("Swell2", self.vectorModeratedMax, [12]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # dictionary module. - #("Wind", self.vectorMinMax, [6]), - #("WindGust", self.minMax, [12]), - #("WaveHeight", self.minMax, [12]), - #("WindWaveHgt", self.minMax, [12]), - #("Wx", self.rankedWx), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("CWFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorMinMax, [12]), - #("Swell2", self.vectorMinMax, [12]), - ], - "phraseList":[ - # WIND - self.marine_wind_phrase, - # WAVEHEIGHT - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - #self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - #self.weather_phrase, - #self.visibility_phrase, - ], - } - - def generateForecast(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - skipAreas = self._skipAreas(argDict) - argDict["editArea"] = (editArea, areaLabel) - if self.currentAreaContains(argDict, skipAreas): - continue - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - - # Tropical exceptions - try: - self._includeTropical = self._includeTropical == "Yes" - except: - self._includeTropical = False - if self._includeTropical: - self._periodCombining = 0 - if self._productIssuance == "Morning with Pre-1st Period": - self._productIssuance = "Morning" - if self._productIssuance == "Afternoon with Pre-1st Period": - self._productIssuance = "Afternoon" - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - # Re-calculate issueTime - expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) - self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s =productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - fcst = fcst + self._Text1() - try: - text2 = self._Text2(argDict) - except: - import LogStream - LogStream.logProblem(LogStream.exc()) - text2 = self._Text2() - fcst = fcst + text2 - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - print "Generating Forecast for", areaLabel - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcstSegment = self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - - # Handle abbreviations - if self._useAbbreviations == 1: - fcstSegment = self.marine_abbreviateText(fcstSegment) - fcstSegment = re.sub(r'\n', r' ',fcstSegment) - fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) - fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) - fcst = fcst + fcstSegment - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - #fcst = fcst + """NNNN """ - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - try: - includeTropical = self._includeTropical - except: - includeTropical = False - - if includeTropical: - narrativeDefAM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ] - narrativeDefPM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ] - else: - if self._definition["includeEveningPeriod"] == 1: - narrativeDefAM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), - ("CWFExtended", 24) - ] - narrativeDefPM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), - ("CWFExtended", 24) - ] - else: - narrativeDefAM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), - ("CWFExtended", 24), - ("CWFExtended", 24) - ] - narrativeDefPM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), - ("CWFExtended", 24), - ("CWFExtended", 24) - ] - - return [ - ("Morning", self.DAY(), self.NIGHT(), "issuanceHour + 13", - ".TODAY...", "early", "late", 1, narrativeDefAM), - ("Morning with Pre-1st Period", "issuanceHour", self.NIGHT(), - "issuanceHour + 13", ".TODAY...", "early", "late", 1, - narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), - "issuanceHour + 13", ".REST OF TODAY...", "early in the morning", - "late in the afternoon", 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), "issuanceHour + 13", - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), "issuanceHour + 13", - ".TONIGHT...", "late in the night", "early in the evening", 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", "issuanceHour", 24 + self.DAY(), - "issuanceHour + 13", ".TONIGHT...", "late in the night", "early in the evening", 1, - narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", - ".REST OF TONIGHT...", "early in the morning", "early in the evening", 1, - narrativeDefPM), - # For the early morning update, this produces: - # REST OF TONIGHT: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", - ".REST OF TONIGHT...", "early in the morning", "late in the afternoon", - 0, narrativeDefPM), - # Alternative - # For the early morning update, this produces: - # EARLY THIS MORNING: - # TODAY - # TONIGHT - #("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, narrativeDefPM), - ] - -## def _issuance_list(self, argDict): -## # This method sets up configurable issuance times with associated -## # narrative definitions. See the Text Product User Guide for documentation. -## if self._definition["includeEveningPeriod"] == 1: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriod", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## else: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFExtended", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] - -## return [ -## ("430 AM", self.DAY(), self.NIGHT(), 17, -## ".TODAY...", "early in the morning", "late in the afternoon", -## 1, narrativeDefAM), -## ("1030 AM", "issuanceHour", self.NIGHT(), 17, -## ".TODAY...", "early in the morning", "late in the afternoon", -## 1, narrativeDefAM), -## # End times are tomorrow: -## ("430 PM", self.NIGHT(), 24 + self.DAY(), 24 + 5, -## ".TONIGHT...", "late in the night", "early in the evening", -## 1, narrativeDefPM), -## ("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 5, -## ".TONIGHT...", "late in the night", "early in the evening", -## 1, narrativeDefPM), -## ] - - # Alternative issuance list using CWFPeriodMid -## def _issuance_list(self, argDict): -## # This method sets up configurable issuance times with associated -## # narrative definitions. See the Text Product User Guide for documentation. -## if self._definition["includeEveningPeriod"] == 1: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFPeriodMid", 6), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFPeriodMid", 6), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## else: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] - -## return [ -## # WRS modified the "label" and issuance starthour and expiration hours -## # early phrases -## # note: the start, end times and expiration times are local time -## # -## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time -## # -## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). -## # startHour -- start hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. -## # endHour -- end hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. The start -## # expirationHour -- hour when the product expires (in local time) -## # This is relative to midnight local time of the -## # current day. -## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF Today..." -## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified -## # e.g. "Partly cloudy in the early morning." -## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified -## # e.g. "Partly cloudy in the early evening." -## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, -## # otherwise, weekday wording will apply. -## # narrative definition -- component and time period pairs - -## # 330 AM Early morning issuance starts at 1200Z or when product is actually -## # is actually issued. Ends -## ("230 AM", self.DAY()-4, self.NIGHT(), 17, -## ".TODAY...", "before sunrise", "late afternoon", -## 1, narrativeDefAM), -## ("830 AM", self.DAY()+2, self.NIGHT(), 17, -## ".TODAY...", "early this morning", "late afternoon", -## 1, narrativeDefAM), -## # End times are tomorrow: -## ("230 PM", self.DAY()+8, self.NIGHT()+12, 24+5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24+5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ] - - def lateDay_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" - # is set to zero. - # NOTE: This applied only to periods that are exactly 24-hours in length. - # Periods longer than that will always be split into day and night labels - # (e.g. Sunday through Monday night) - compName = node.getComponentName() - if compName == "CWFExtended": - return 0 - else: - return 1 - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* *")] - - def _skipAreas(self, argDict): - # These are edit areas that the formatter will skip - return [] - - def inlandWatersAreas(self, tree, node): - # List of edit area names that are inland or bay waters - # as opposed to "seas" - # The phrasing for these areas will be treated differently - # (see the waveRange_phrase) - # - # e.g. - # return ["TampaBayWaters"] - return ["area3"] - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "winds" - dict["seas"] = "combined seas" - dict["inland waters"] = "bay and inland waters" - dict["chop"] = "bay and inland waters" - dict["mixed swell"] = "mixed swell" - dict["waves"] = "wind waves" - dict["dominant period"] = "dominant period" - # Apply only if marine_wind_flag (see above) is set to 1: - dict["hurricane force winds to"] = "hurricane force winds to" - dict["storm force winds to"] = "storm force winds to" - dict["gales to"] = "gales to" - dict["up to"] = "up to" - return dict - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and not reported. - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["WaveHeight"] = 3 - dict["WindWaveHgt"] = 3 - dict["Wind"] = 5 - dict["WindGust"] = 33 - dict["Swell"] = 5 - dict["Visibility"] = 3 # in nautical miles. Report if less than this value. - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "waves 2 feet or less" - dict["WindWaveHgt"] = "waves 2 feet or less" - dict["Wind"] = "wind variable less than 5 knots" - dict["Swell"] = "" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "2 feet or less" - dict["WindWaveHgt"] = "2 feet or less" - dict["Wind"] = "variable less than 5 knots" - dict["Wx"] = "" - dict["Swell"] = "light" - return dict - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - #----------------------------------------------------------------------- - # COMMENT: Override max ranges for certain fields - # This dict specifications allows for wind speed ranges of up to 20 mph - # during tropical cyclone situations allowing for far better wind speed - # phrases. - #----------------------------------------------------------------------- - if self._includeTropical: - dict["Wind"] = {'default': 5, - (0.0, 4.0): 0, - (4.0, 33.0): 5, - (33.0, 52.0): 10, - (52.0, 200.0): 20, - } - else: - dict["Wind"] = 10 - dict["Swell"] = 5 - dict["Swell2"] = 5 - dict["WaveHeight"] = 2 - dict["WindWaveHgt"] = 2 - return dict - - def vector_mag_hook(self, tree, node, minMag, maxMag, units, elementName, words): - # Further refinement and customization of the wind phrase can be done here - # Get winds to match the headlines. - # - # Get the maxWind for the entire period which is the value used to determine the - # headlines such as "small craft advisory". - if elementName != "Wind": - return words - timeRange = node.parent.getTimeRange() - maxWind, dir = tree.stats.get("Wind", timeRange, mergeMethod="Max") - around = self.phrase_descriptor(tree, node, "around", elementName) - if around != "" and around.find(" ") < 0: - around = around + " " - - # New for around 10 knots. - if maxWind >=9 and maxWind <= 11 and maxMag == 10: - words = around + "10 " + units - - # New for around 15 knots. - if maxWind >=14 and maxWind < 17 and maxMag == 15: - words = around + "15 " + units - - # New to match small craft headline. - if maxWind >=20 and maxMag == 20: - words = around + "20 " + units - - # New to match gale headline. - if maxWind >=30 and maxWind <= 34 and maxMag == 30: - words = around + "30 " + units - - # New to match gale headline. - if maxWind >=45 and maxWind < 50 and maxMag == 50: - words = around + "45 " + units - - return words - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "CON", 'CAN', 'UPG', - 'EXP'] - return [ - ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH - ('SR.A', marineActions, 'Marine'), # STORM WATCH - ('GL.A', marineActions, 'Marine'), # GALE WATCH - ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS - ('UP.A', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH - ('HU.W', allActions, 'Tropical'), # HURRICANE WARNING - ('TY.W', allActions, 'Tropical'), # TYPHOON WARNING - ('TR.W', allActions, 'Tropical1'), # TROPICAL STORM WARNING - ('HU.A', allActions, 'Tropical'), # HURRICANE WATCH - ('TY.A', allActions, 'Tropical'), # TYPHOON WATCH - ('TR.A', allActions, 'Tropical1'), # TROPICAL STORM WATCH - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine'), # GALE WARNING - ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS - ('UP.W', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('RB.Y', marineActions, 'Marine'), # ROUGH BAR - ('SI.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY - ('SC.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY - ('SW.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY - ('BW.Y', marineActions, 'Marine'), # BRISK WIND ADVISORY - ('MF.Y', marineActions, 'Fog'), # MARINE DENSE FOG ADVISORY - ('MS.Y', marineActions, 'Smoke'), # MARINE DENSE SMOKE ADVISORY - ('UP.Y', marineActions, 'IceAccr'), # FREEZING SPRAY ADVISORY - ('MH.W', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL WARNING - ('MH.Y', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL ADVISORY - ('TO.A', marineActions, 'Convective'), # TORNADO WATCH - ('SV.A', marineActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY - ] +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: CWF (Coastal Waters Forecast) +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# CWF.py, CWF___Definition, CWF__Overrides +#------------------------------------------------------------------------- +# Customization Points in Local File: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Coastal Waters Forecast" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "CWFBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the FormatterLauncher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# includeEveningPeriod Include a 6 hour Evening period on the 3rd day +# useAbbreviations +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, +# NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# areaDictionary Modify the AreaDictionary utility with UGC +# information about zones +# useHolidays Set to 1 to use holidays in the time period labels +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# OVERRIDES +# +# Required Overrides +# +# _Text1(), _Text2() Descriptive text for header +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) +# WaveHeight and/or WindWaveHgt +# (every 6 hours to 3 days, then every 12 hours to 7 days) +# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) +# Optional: +# WindGust (every 3 hours to 7 days) +# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# CWFPeriod (component) +# CWFPeriodMid (component) +# ExtendedLabel(component) +# CWFExtended (component) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from CWF: +# _Text1 +# _Text2 +# _issuance_list +# from MarinePhrases +# inlandWatersAreas +# inlandWatersWave_element +# seasWaveHeight_element +# seasWindWave_element +# waveHeight_wind_threshold +# marine_wind_flag +# marine_wind_combining_flag +# marine_wind_verbose_flag +# from ConfigVariables +# phrase_descriptor_dict +# null_nlValue_dict +# first_null_phrase_dict +# null_phrase_dict +# maximum_range_nlValue_dict +# from WxPhrases: +# embedded_visibility_flag +# visibility_wx_threshold +# significant_wx_visibility_subkeys +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# from SampleAnalysis +# moderated_dict +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, re, types +import TimeRange + + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), + ] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/CWF_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Marine_Zones_", + + "lineLength": 66, + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_CWF__", + "editAreaSuffix": None, + # product identifiers + "productName": "Coastal Waters Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city,state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "periodCombining" : 0, # If 1, combine periods, if possible + # Product-specific variables: + # Set to one if you want a 6-hour evening period instead of + # 18-hour period without lows + "includeEveningPeriod": 1, + "useAbbreviations": 0, + + # Weather-related flags + "hoursSChcEnds": 24, + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + # Language + "language": "english", + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING METHODS, THRESHOLDS AND VARIABLES + ######################################################################## + # MUST BE OVERRIDDEN IN LOCAL FILE + def _Text1(self): + return "" + + def _Text2(self, argDict): + synopsis = "" + + # Try to get Synopsis from previous CWF + #productID = "BOSCWFBOS" + #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") + # Clean up the previous synopsis + #synopsis = re.sub(r'\n', r' ', synopsis) + #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") + + # Convert absolute time pointer to a tuple of values like that + # returned by time.gmtime() + #expTuple = time.strptime('%s' % (self._expireTime), + # '%b %d %y %H:%M:%S GMT') + expTuple = self._expireTime.utctimetuple() + + # Format expiration time for inclusion in synopsis header + expTime = time.strftime('%d%H%M', expTuple) + siteID = self.getSiteID(argDict) + + if len("_") == 0: + ugc = self.synopsisUGC(siteID) + heading = self.synopsisHeading(siteID) + else: + ugc = self.synopsisUGC(siteID, self._pil[-3:]) + heading = self.synopsisHeading(siteID, self._pil[-3:]) + + return "%s-%s-\n" % (ugc, expTime) + self._timeLabel + "\n\n" + \ + heading + "\n" + synopsis + "\n$$\n\n" + + ######################################################################## + + def pop_wx_lower_threshold(self, tree, node): + # Always report weather + return 0 + + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self.marineRounding, + } + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Visibility"] = "NM" + return dict + + def gust_wind_difference_nlValue(self, tree, node): + # Difference between gust and maxWind below which gusts are not + # mentioned. Units are MPH + if self._includeTropical: + return 5 + else: + return 10 + + def temporalCoverage_hours(self, parmHisto, timeRange, componentName): + # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids + # and this parameter is 2 or higher, tropical cyclone winds affecting the very + # early or latter part of a forecast period might be neglected. 1 assures + # maximum sensitivity. + if self._includeTropical: + return 1 + else: + return 0 + + + def moderated_dict(self, parmHisto, timeRange, componentName): + """ + Modifed to lower the high end filter threshold from 20 MPH to + 15 MPH for Tropical. + """ + # COMMENT: This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. The thresholds chosen below gave best results + # during testing with 2004 and 2005 tropical cyclones. This dict + # is used with the moderatedMinMax analysis method specified in the + # TropicalPeriod definitions specified further down for use with + # tropical cyclones with wind parameters. + + # Get Baseline thresholds + dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, + timeRange, componentName) + + # Change thresholds for Wind, WindGust, WaveHeight and Swell + if self._includeTropical: + dict["Wind"] = (0, 15) + dict["WindGust"] = (0, 15) + dict["WaveHeight"] = (0, 15) + dict["Swell"] = (0, 15) + return dict + + + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + #return self.maxMode + #return self.maximum + return self.stdDevMaxAvg + + + def addTropical(self, analysisList, phraseList, includeHazards=True): + newAnalysisList = [] + for entry in analysisList: + # Sampling defined as a tuple (field, statistic, temporal rate) + # If this is NOT a Wind or WindGust statistic + if entry[0] not in ["Hazards", "Wind", "WindGust", "WaveHeight", "Swell"]: + # Add this statistic to the new analysisList + newAnalysisList.append(entry) + newAnalysisList += [ + ("Wind", self.vectorModeratedMinMax, [6]), + ("WindGust", self.moderatedMinMax, [6]), + ("WaveHeight", self.moderatedMax, [6]), + ("Swell", self.vectorModeratedMinMax, [6]), + ("pws34", self.maximum), + ("pws64", self.maximum), + ("pwsN34", self.maximum), + ("pwsN64", self.maximum), + ("pwsD34", self.maximum), + ("pwsD64", self.maximum), + ] + if includeHazards: + newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) + + phraseList.insert(0, self.pws_phrase) + return newAnalysisList, phraseList + + def CWFPeriod(self): + analysisList = [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [3]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [3]), + ("WaveHeight", self.moderatedMinMax, [6]), + ("WindWaveHgt", self.moderatedMinMax, [6]), + ("Swell", self.vectorModeratedMinMax, [6]), + ("Swell2", self.vectorModeratedMinMax, [6]), + ("Period", self.moderatedMinMax, [6]), + ("Period2", self.moderatedMinMax, [6]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("PoP", self._PoP_analysisMethod("CWFPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [3]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.moderatedMax, [3]), + #("WaveHeight", self.moderatedMax, [6]), + #("WindWaveHgt", self.moderatedMax, [6]), + #("Swell", self.vectorModeratedMax, [6]), + #("Swell2", self.vectorModeratedMax, [6]), + #("Period", self.moderatedMax, [6]), + #("Period2", self.moderatedMax, [6]), + #("Wx", self.rankedWx, [6]), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("CWFPeriod")), + #("PoP", self.binnedPercent, [6]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # Split time range in quarters for Wind and WindGust + #("Wind", self.vectorMinMax, [3]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.maximum, [3]), + #("WaveHeight", self.minMax, [6]), + #("WindWaveHgt", self.minMax, [6]), + # Split time range in half for Wx and Swell + #("Swell", self.vectorMinMax, [6]), + #("Swell2", self.vectorMinMax, [6]), + #("Period", self.avg, [6]), + #("Period2", self.avg, [6]), + #("Wx", self.rankedWx, [6]), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("CWFPeriod")), + #("PoP", self.binnedPercent, [6]), + ] + phraseList = [ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + self.wave_withPeriods_phrase, + # Alternative: + #self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + self.weather_phrase, + self.visibility_phrase, + ] + if self._includeTropical: + analysisList, phraseList = self.addTropical(analysisList, phraseList) + + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + + "analysisList": analysisList, + "phraseList": phraseList, + + } + + def CWFPeriodMid(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + + "analysisList": [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [6]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [6]), + ("WaveHeight", self.moderatedMinMax, [6]), + ("WindWaveHgt", self.moderatedMinMax, [6]), + ("Swell", self.vectorModeratedMinMax, [6]), + ("Swell2", self.vectorModeratedMinMax, [6]), + ("Wx", self.rankedWx, [6]), + ("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), + ("PoP", self.binnedPercent, [6]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [6]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.moderatedMax, [6]), + #("WaveHeight", self.moderatedMax, [6]), + #("WindWaveHgt", self.moderatedMax, [6]), + #("Swell", self.vectorModeratedMax, [6]), + #("Swell2", self.vectorModeratedMax, [6]), + #("Wx", self.rankedWx, [6]), + #("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), + #("PoP", self.binnedPercent, [6]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # Split time range in quarters for Wind and WindGust + #("Wind", self.vectorMinMax, [6]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.maximum, [3]), + #("WaveHeight", self.minMax, [6]), + #("WindWaveHgt", self.minMax, [6]), + # Split time range in half for Wx and Swell + #("Swell", self.vectorMinMax, [6]), + #("Swell2", self.vectorMinMax, [6]), + #("Wx", self.rankedWx, [6]), + #("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), + #("PoP", self.binnedPercent, [6]), + ], + + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + self.swell_phrase, + # WEATHER + self.weather_phrase, + self.visibility_phrase, + ], + } + + def combine_singleValues_flag_dict(self, tree, node): + # Dictionary of weather elements to combine using single values + # rather than ranges. If you are using single value statistics + # for a weather element, you will want to set this flag to 1. + # If there is no entry for an element, min/max combining will + # be done. + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node + dict = TextRules.TextRules.combine_singleValues_flag_dict(self, tree, node) + #dict["Wind"] = 1 + #dict["WindGust"] = 1 + #dict["Swell"] = 1 + #dict["Swell2"] = 1 + #dict["WindWaveHgt"] = 1 + #dict["WaveHeight"] = 1 + return dict + + def ExtendedLabel(self): + return { + "type": "component", + "methodList": [self.setLabel], + "analysisList": [], + "phraseList":[], + } + def setLabel(self, tree, component): + component.set("words", "\n.Extended forecast...\n") + return self.DONE() + + def CWFExtended(self): + return { "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [6]), + ("WindGust", self.moderatedMinMax, [12]), + ("WaveHeight", self.moderatedMinMax, [12]), + ("WindWaveHgt", self.moderatedMinMax, [12]), + #("Wx", self.rankedWx), + #("T", self.minMax), # needed for weather_phrase + #("PoP", self._PoP_analysisMethod("CWFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorModeratedMinMax, [12]), + #("Swell2", self.vectorModeratedMinMax, [12]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [6]), + #("WindGust", self.moderatedMax, [12]), + #("WaveHeight", self.moderatedMax, [12]), + #("WindWaveHgt", self.moderatedMax, [12]), + #("Wx", self.rankedWx), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("CWFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorModeratedMax, [12]), + #("Swell2", self.vectorModeratedMax, [12]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # dictionary module. + #("Wind", self.vectorMinMax, [6]), + #("WindGust", self.minMax, [12]), + #("WaveHeight", self.minMax, [12]), + #("WindWaveHgt", self.minMax, [12]), + #("Wx", self.rankedWx), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("CWFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorMinMax, [12]), + #("Swell2", self.vectorMinMax, [12]), + ], + "phraseList":[ + # WIND + self.marine_wind_phrase, + # WAVEHEIGHT + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + #self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + #self.weather_phrase, + #self.visibility_phrase, + ], + } + + def generateForecast(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + skipAreas = self._skipAreas(argDict) + argDict["editArea"] = (editArea, areaLabel) + if self.currentAreaContains(argDict, skipAreas): + continue + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + + # Tropical exceptions + try: + self._includeTropical = self._includeTropical == "Yes" + except: + self._includeTropical = False + if self._includeTropical: + self._periodCombining = 0 + if self._productIssuance == "Morning with Pre-1st Period": + self._productIssuance = "Morning" + if self._productIssuance == "Afternoon with Pre-1st Period": + self._productIssuance = "Afternoon" + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + # Re-calculate issueTime + expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) + self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s =productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + fcst = fcst + self._Text1() + try: + text2 = self._Text2(argDict) + except: + import LogStream + LogStream.logProblem(LogStream.exc()) + text2 = self._Text2() + fcst = fcst + text2 + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + print(("Generating Forecast for", areaLabel)) + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcstSegment = self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + + # Handle abbreviations + if self._useAbbreviations == 1: + fcstSegment = self.marine_abbreviateText(fcstSegment) + fcstSegment = re.sub(r'\n', r' ',fcstSegment) + fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) + fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) + fcst = fcst + fcstSegment + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + #fcst = fcst + """NNNN """ + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + try: + includeTropical = self._includeTropical + except: + includeTropical = False + + if includeTropical: + narrativeDefAM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ] + narrativeDefPM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ] + else: + if self._definition["includeEveningPeriod"] == 1: + narrativeDefAM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), + ("CWFExtended", 24) + ] + narrativeDefPM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), + ("CWFExtended", 24) + ] + else: + narrativeDefAM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), + ("CWFExtended", 24), + ("CWFExtended", 24) + ] + narrativeDefPM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), + ("CWFExtended", 24), + ("CWFExtended", 24) + ] + + return [ + ("Morning", self.DAY(), self.NIGHT(), "issuanceHour + 13", + ".TODAY...", "early", "late", 1, narrativeDefAM), + ("Morning with Pre-1st Period", "issuanceHour", self.NIGHT(), + "issuanceHour + 13", ".TODAY...", "early", "late", 1, + narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), + "issuanceHour + 13", ".REST OF TODAY...", "early in the morning", + "late in the afternoon", 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), "issuanceHour + 13", + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), "issuanceHour + 13", + ".TONIGHT...", "late in the night", "early in the evening", 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", "issuanceHour", 24 + self.DAY(), + "issuanceHour + 13", ".TONIGHT...", "late in the night", "early in the evening", 1, + narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", + ".REST OF TONIGHT...", "early in the morning", "early in the evening", 1, + narrativeDefPM), + # For the early morning update, this produces: + # REST OF TONIGHT: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", + ".REST OF TONIGHT...", "early in the morning", "late in the afternoon", + 0, narrativeDefPM), + # Alternative + # For the early morning update, this produces: + # EARLY THIS MORNING: + # TODAY + # TONIGHT + #("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, narrativeDefPM), + ] + +## def _issuance_list(self, argDict): +## # This method sets up configurable issuance times with associated +## # narrative definitions. See the Text Product User Guide for documentation. +## if self._definition["includeEveningPeriod"] == 1: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriod", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## else: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFExtended", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] + +## return [ +## ("430 AM", self.DAY(), self.NIGHT(), 17, +## ".TODAY...", "early in the morning", "late in the afternoon", +## 1, narrativeDefAM), +## ("1030 AM", "issuanceHour", self.NIGHT(), 17, +## ".TODAY...", "early in the morning", "late in the afternoon", +## 1, narrativeDefAM), +## # End times are tomorrow: +## ("430 PM", self.NIGHT(), 24 + self.DAY(), 24 + 5, +## ".TONIGHT...", "late in the night", "early in the evening", +## 1, narrativeDefPM), +## ("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 5, +## ".TONIGHT...", "late in the night", "early in the evening", +## 1, narrativeDefPM), +## ] + + # Alternative issuance list using CWFPeriodMid +## def _issuance_list(self, argDict): +## # This method sets up configurable issuance times with associated +## # narrative definitions. See the Text Product User Guide for documentation. +## if self._definition["includeEveningPeriod"] == 1: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFPeriodMid", 6), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFPeriodMid", 6), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## else: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] + +## return [ +## # WRS modified the "label" and issuance starthour and expiration hours +## # early phrases +## # note: the start, end times and expiration times are local time +## # +## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time +## # +## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). +## # startHour -- start hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. +## # endHour -- end hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. The start +## # expirationHour -- hour when the product expires (in local time) +## # This is relative to midnight local time of the +## # current day. +## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF Today..." +## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified +## # e.g. "Partly cloudy in the early morning." +## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified +## # e.g. "Partly cloudy in the early evening." +## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, +## # otherwise, weekday wording will apply. +## # narrative definition -- component and time period pairs + +## # 330 AM Early morning issuance starts at 1200Z or when product is actually +## # is actually issued. Ends +## ("230 AM", self.DAY()-4, self.NIGHT(), 17, +## ".TODAY...", "before sunrise", "late afternoon", +## 1, narrativeDefAM), +## ("830 AM", self.DAY()+2, self.NIGHT(), 17, +## ".TODAY...", "early this morning", "late afternoon", +## 1, narrativeDefAM), +## # End times are tomorrow: +## ("230 PM", self.DAY()+8, self.NIGHT()+12, 24+5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24+5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ] + + def lateDay_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" + # is set to zero. + # NOTE: This applied only to periods that are exactly 24-hours in length. + # Periods longer than that will always be split into day and night labels + # (e.g. Sunday through Monday night) + compName = node.getComponentName() + if compName == "CWFExtended": + return 0 + else: + return 1 + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* *")] + + def _skipAreas(self, argDict): + # These are edit areas that the formatter will skip + return [] + + def inlandWatersAreas(self, tree, node): + # List of edit area names that are inland or bay waters + # as opposed to "seas" + # The phrasing for these areas will be treated differently + # (see the waveRange_phrase) + # + # e.g. + # return ["TampaBayWaters"] + return ["area3"] + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "winds" + dict["seas"] = "combined seas" + dict["inland waters"] = "bay and inland waters" + dict["chop"] = "bay and inland waters" + dict["mixed swell"] = "mixed swell" + dict["waves"] = "wind waves" + dict["dominant period"] = "dominant period" + # Apply only if marine_wind_flag (see above) is set to 1: + dict["hurricane force winds to"] = "hurricane force winds to" + dict["storm force winds to"] = "storm force winds to" + dict["gales to"] = "gales to" + dict["up to"] = "up to" + return dict + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and not reported. + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["WaveHeight"] = 3 + dict["WindWaveHgt"] = 3 + dict["Wind"] = 5 + dict["WindGust"] = 33 + dict["Swell"] = 5 + dict["Visibility"] = 3 # in nautical miles. Report if less than this value. + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "waves 2 feet or less" + dict["WindWaveHgt"] = "waves 2 feet or less" + dict["Wind"] = "wind variable less than 5 knots" + dict["Swell"] = "" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "2 feet or less" + dict["WindWaveHgt"] = "2 feet or less" + dict["Wind"] = "variable less than 5 knots" + dict["Wx"] = "" + dict["Swell"] = "light" + return dict + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + #----------------------------------------------------------------------- + # COMMENT: Override max ranges for certain fields + # This dict specifications allows for wind speed ranges of up to 20 mph + # during tropical cyclone situations allowing for far better wind speed + # phrases. + #----------------------------------------------------------------------- + if self._includeTropical: + dict["Wind"] = {'default': 5, + (0.0, 4.0): 0, + (4.0, 33.0): 5, + (33.0, 52.0): 10, + (52.0, 200.0): 20, + } + else: + dict["Wind"] = 10 + dict["Swell"] = 5 + dict["Swell2"] = 5 + dict["WaveHeight"] = 2 + dict["WindWaveHgt"] = 2 + return dict + + def vector_mag_hook(self, tree, node, minMag, maxMag, units, elementName, words): + # Further refinement and customization of the wind phrase can be done here + # Get winds to match the headlines. + # + # Get the maxWind for the entire period which is the value used to determine the + # headlines such as "small craft advisory". + if elementName != "Wind": + return words + timeRange = node.parent.getTimeRange() + maxWind, dir = tree.stats.get("Wind", timeRange, mergeMethod="Max") + around = self.phrase_descriptor(tree, node, "around", elementName) + if around != "" and around.find(" ") < 0: + around = around + " " + + # New for around 10 knots. + if maxWind >=9 and maxWind <= 11 and maxMag == 10: + words = around + "10 " + units + + # New for around 15 knots. + if maxWind >=14 and maxWind < 17 and maxMag == 15: + words = around + "15 " + units + + # New to match small craft headline. + if maxWind >=20 and maxMag == 20: + words = around + "20 " + units + + # New to match gale headline. + if maxWind >=30 and maxWind <= 34 and maxMag == 30: + words = around + "30 " + units + + # New to match gale headline. + if maxWind >=45 and maxWind < 50 and maxMag == 50: + words = around + "45 " + units + + return words + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "CON", 'CAN', 'UPG', + 'EXP'] + return [ + ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH + ('SR.A', marineActions, 'Marine'), # STORM WATCH + ('GL.A', marineActions, 'Marine'), # GALE WATCH + ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS + ('UP.A', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH + ('HU.W', allActions, 'Tropical'), # HURRICANE WARNING + ('TY.W', allActions, 'Tropical'), # TYPHOON WARNING + ('TR.W', allActions, 'Tropical1'), # TROPICAL STORM WARNING + ('HU.A', allActions, 'Tropical'), # HURRICANE WATCH + ('TY.A', allActions, 'Tropical'), # TYPHOON WATCH + ('TR.A', allActions, 'Tropical1'), # TROPICAL STORM WATCH + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine'), # GALE WARNING + ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS + ('UP.W', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('RB.Y', marineActions, 'Marine'), # ROUGH BAR + ('SI.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY + ('SC.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY + ('SW.Y', marineActions, 'Marine'), # SMALL CRAFT ADVISORY + ('BW.Y', marineActions, 'Marine'), # BRISK WIND ADVISORY + ('MF.Y', marineActions, 'Fog'), # MARINE DENSE FOG ADVISORY + ('MS.Y', marineActions, 'Smoke'), # MARINE DENSE SMOKE ADVISORY + ('UP.Y', marineActions, 'IceAccr'), # FREEZING SPRAY ADVISORY + ('MH.W', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL WARNING + ('MH.Y', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL ADVISORY + ('TO.A', marineActions, 'Convective'), # TORNADO WATCH + ('SV.A', marineActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF_Pacific.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF_Pacific.py index 53fc9954ec..0c28278623 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF_Pacific.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CWF_Pacific.py @@ -1,1380 +1,1380 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: CWF_Pacific (Coastal Waters Forecast) -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# CWF_Pacific.py, CWF_Pacific___Definition, -# CWF_Pacific__Overrides -#------------------------------------------------------------------------- -# Customization Points in Local File: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# productName defines name of product e.g. "Coastal Waters Forecast" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "CWFBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState City,state that the WFO is located in, such as "Buffalo, NY" -# -# Optional Configuration Items -# -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# includeEveningPeriod Include a 6 hour Evening period on the 3rd day -# useAbbreviations -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, -# NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# River Bar Zones -# riverBarZones - list to designate any river bars in the forecast -# -# -# areaDictionary Modify the AreaDictionary utility with UGC -# information about zones -# -# useHolidays Set to 1 to use holidays in the time period labels -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# OVERRIDES -# -# Required Overrides -# -# _Text1(), _Text2() Descriptive text for header -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) -# WaveHeight and/or WindWaveHgt -# (every 6 hours to 3 days, then every 12 hours to 7 days) -# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) -# Optional: -# WindGust (every 3 hours to 7 days) -# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# CWFPeriod (component) -# CWFPeriodMid (component) -# ExtendedLabel(component) -# CWFExtended (component) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from CWF_Pacific: -# _Text1 -# _Text2 -# _issuance_list -# from MarinePhrases -# inlandWatersAreas -# inlandWatersWave_element -# seasWaveHeight_element -# seasWindWave_element -# waveHeight_wind_threshold -# marine_wind_flag -# marine_wind_combining_flag -# marine_wind_verbose_flag -# from ConfigVariables -# phrase_descriptor_dict -# phrase_connector_dict -# null_nlValue_dict -# first_null_phrase_dict -# null_phrase_dict -# maximum_range_nlValue_dict -# combine_singleValues_flag_dict -# from WxPhrases: -# embedded_visibility_flag -# visibility_wx_threshold -# significant_wx_visibility_subkeys -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# from SampleAnalysis -# moderated_dict -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import ModuleAccessor -import time, string, re, types -import TimeRange - - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), - ] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/CWF_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Marine_Zones_", - - "lineLength": 66, - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_CWF__", - "editAreaSuffix": None, - # product identifiers - "productName": "Coastal Waters Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "previousProductID": "", # Product ID for river bar forecast. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - "periodCombining" : 0, # If 1, combine periods, if possible - # Product-specific variables: - # Set to one if you want a 6-hour evening period instead of - # 18-hour period without lows - "includeEveningPeriod": 1, - "useAbbreviations": 1, - - # Weather-related flags - "hoursSChcEnds": 2, - - # riverBarZones[] list to designate any river bars in the forecast - "riverBarZones" : [], - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - # Language - "language": "english", - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING METHODS, THRESHOLDS AND VARIABLES - ######################################################################## - # MUST BE OVERRIDDEN IN LOCAL FILE - def _Text1(self): - return "" - - def _Text2(self, argDict): - synopsis = "" - - # Try to get Synopsis from previous CWF - #productID = "BOSCWFBOS" - #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") - # Clean up the previous synopsis - #synopsis = re.sub(r'\n', r' ', synopsis) - #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") - - # Convert absolute time pointer to a tuple of values like that - # returned by time.gmtime() - #expTuple = time.strptime('%s' % (self._expireTime), - # '%b %d %y %H:%M:%S GMT') - expTuple = self._expireTime.utctimetuple() - - # Format expiration time for inclusion in synopsis header - expTime = time.strftime('%d%H%M', expTuple) - siteID = self.getSiteID(argDict) - - if len("_") == 0: - ugc = self.synopsisUGC(siteID) - heading = self.synopsisHeading(siteID) - else: - ugc = self.synopsisUGC(siteID, self._pil[-3:]) - heading = self.synopsisHeading(siteID, self._pil[-3:]) - - return "%s-%s-\n" % (ugc, expTime) + self._timeLabel + "\n\n" + \ - heading + "\n" + synopsis + "\n$$\n\n" - - - ######################################################################## - def _getBarForecast(self, editArea, areaLabel, argDict): - # IF current area contains a river bar zone - # return the riverBarForecast - for barZone in self._riverBarZones: - if self.currentAreaContains(argDict["tree"], [barZone]): - barName = self._getAreaLabel(barZone) - return " " + self.getPreviousProduct(self._previousProductID, barName) - return "" - - def _getAreaLabel(self, areaName): - # Get label from AreaDictionary - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - if areaName in areaDict.keys(): - entry = areaDict[areaName] - if entry.has_key("ugcName"): - return entry["ugcName"] - return "" - - ######################################################################## - - def pop_wx_lower_threshold(self, tree, node): - # Always report weather - return 0 - - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self.marineRounding, - } - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Visibility"] = "NM" - return dict - - def gust_wind_difference_nlValue(self, tree, node): - # Difference between gust and maxWind below which gusts are not - # mentioned. Units are MPH - if self._includeTropical: - return 5 - else: - return 10 - - - - # SampleAnalysis overrides - def temporalCoverage_hours(self, parmHisto, timeRange, componentName): - # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids - # and this parameter is 2 or higher, tropical cyclone winds affecting the very - # early or latter part of a forecast period might be neglected. 1 assures - # maximum sensitivity. - if self._includeTropical: - return 1 - else: - return 0 - - def moderated_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. - dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) - dict["Wind"] = (0, 20) - if self._includeTropical: - dict["Wind"] = (0, 15) - dict["WindGust"] = (0, 15) - dict["WaveHeight"] = (0, 15) - dict["Swell"] = (0, 15) - return dict - - # Text Rules overrides - def _skipAreas(self, argDict): - # These are edit areas that the formatter will skip - return [] - - def inlandWatersAreas(self, tree, node): - # List of edit area names that are inland or bay waters - # as opposed to "seas" - # The phrasing for these areas will be treated differently - # (see the waveRange_phrase) - # - # e.g. - # return ["TampaBayWaters"] - return ["area3"] - - def inlandWatersWave_element(self, tree, node): - # Weather element first and second choice to use for reporting inland waters waves - # "Waves 1 to 2 feet." - # If there is incomplete or no data for the first element, the second will be used. - return ("WindWaveHgt", "WaveHeight") - - def seasWaveHeight_element(self, tree, node): - # Weather element to use for reporting seas - # "Combined seas 10 to 15 feet." - # IF above wind or swell thresholds - return "WaveHeight" - - def seasWindWave_element(self, tree, node): - # Weather element to use for reporting seas waves - # "Wind waves 3 to 4 feet." - # IF above wind or swell thresholds - return "WindWaveHgt" - - def waveHeight_wind_threshold(self, tree, node): - # Wind value above which waveHeight (combined seas) - # is reported vs. wind waves. - # Also, the Swell phrase is omitted if this threshold is exceeded. - # Unit is knots - return 34 - - def combinedSeas_threshold(self, tree, node): - # See wave_phrase - # If waves and swells are above this threshold, - # combined seas will be reported and no Swell phrase will be reported. - # Units: feet - return 7 - - def marine_wind_flag(self, tree, node): - # If 1, Wind wording will reflect the - # crossing of significant thresholds such as gales. - # E.g. "West gales to 35 knots." instead of "West winds 35 knots." - return 0 - - def marine_wind_combining_flag(self, tree, node): - # If 1, Wind combining will reflect the - # crossing of significant thresholds such as gales. - # E.g. "North hurricane force winds to 100 knots." instead of - # "North hurricane force winds to 100 knots easing to - # to 80 knots in the afternoon." - return 0 - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "wind" - dict["seas"] = "combined seas" - dict["inland waters"] = "bay and inland waters" - dict["chop"] = "bay and inland waters" - dict["mixed swell"] = "mixed swell" - dict["waves"] = "wind waves" - dict["dominant period"] = "dominant period" - # Apply only if marine_wind_flag (see above) is set to 1: - dict["hurricane force winds to"] = "hurricane force winds to" - dict["storm force winds to"] = "storm force winds to" - dict["gales to"] = "gales to" - dict["up to"] = "up to" - dict["around"] = "" - return dict - - def phrase_connector_dict(self, tree, node): - # Dictionary of connecting phrases for various - # weather element phrases - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node - dict = TextRules.TextRules.phrase_connector_dict(self, tree, node) - dict["rising to"] = { - "Wind": ", rising to ", - "Swell": ", building to ", - "Swell2": ", building to ", - "WaveHeight": ", building to ", - "WindWaveHgt": ", building to ", - } - - dict["easing to"] = { - "Wind": ", easing to ", - "Swell": ", subsiding to ", - "Swell2": ", subsiding to ", - "WaveHeight": ", subsiding to ", - "WindWaveHgt": ", subsiding to ", - } - dict["backing"] = { - "Wind": ", backing to ", - "Swell": ", becoming ", - "Swell2": ", becoming ", - "WaveHeight": ", becoming ", - "WindWaveHgt": ", becoming ", - } - - dict["veering"] = { - "Wind": ", veering to ", - "Swell": ", becoming ", - "Swell2": ", becoming ", - "WaveHeight": ", becoming ", - "WindWaveHgt": ", becoming ", - } - - dict["becoming"] = ", becoming " - dict["increasing to"] = { - "Wind": ", rising to ", - "Swell": ", building to ", - "Swell2": ", building to ", - "WaveHeight": ", building to ", - "WindWaveHgt": ", building to ", - } - dict["decreasing to"] = { - "Wind": ", easing to ", - "Swell": ", subsiding to ", - "Swell2": ", subsiding to ", - "WaveHeight": ", subsiding to ", - "WindWaveHgt": ", subsiding to ", - } - dict["shifting to the"] = ", shifting to the " - dict["becoming onshore"] = " becoming onshore " - dict["then"] = {"Wx": ". ", - "Vector": ", becoming ", - "Scalar": ", becoming ", - "otherwise": ", becoming ", - } - return dict - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and not reported. - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["WaveHeight"] = 3 - dict["WindWaveHgt"] = 3 - dict["Wind"] = 5 - dict["WindGust"] = 33 - dict["Swell"] = 5 - dict["Visibility"] = 3 # in nautical miles. Report if less than this value. - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "waves 2 feet or less" - dict["WindWaveHgt"] = "wind waves 2 feet or less" - dict["Wind"] = "wind variable less than 5 knots" - dict["Swell"] = "" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "2 feet or less" - dict["WindWaveHgt"] = "2 feet or less" - dict["Wind"] = "variable less than 5 knots" - dict["Wx"] = "" - dict["Swell"] = "light" - return dict - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - if self._includeTropical: - dict["Wind"] = {'default': 5, - (0.0, 4.0): 0, - (4.0, 33.0): 5, - (33.0, 52.0): 10, - (52.0, 200.0): 20, - } - else: - dict["Wind"] = 10 - dict["Swell"] = 5 - dict["Swell2"] = 5 - dict["WaveHeight"] = 2 - dict["WindWaveHgt"] = 2 - return dict - - def combine_singleValues_flag_dict(self, tree, node): - # Dictionary of weather elements to combine using single values - # rather than ranges. If you are using single value statistics - # for a weather element, you will want to set this flag to 1. - # If there is no entry for an element, min/max combining will - # be done. - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node - dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) - dict["Wind"] = 1 - dict["WindGust"] = 1 - dict["Swell"] = 1 - dict["Swell2"] = 1 - dict["WindWaveHgt"] = 1 - dict["WaveHeight"] = 1 - return dict - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - return self.stdDevMaxAvg - #return self.maxMode - #return self.maximum - - - def addTropical(self, analysisList, phraseList, includeHazards=True): - newAnalysisList = [] - for entry in analysisList: - # Sampling defined as a tuple (field, statistic, temporal rate) - # If this is NOT a Wind or WindGust statistic - if entry[0] not in ["Hazards", "Wind", "WindGust", "WaveHeight", "Swell"]: - # Add this statistic to the new analysisList - newAnalysisList.append(entry) - newAnalysisList += [ - ("Wind", self.vectorModeratedMinMax, [6]), - ("WindGust", self.moderatedMinMax, [6]), - ("WaveHeight", self.moderatedMax, [6]), - ("Swell", self.vectorModeratedMinMax, [6]), - ("pws34", self.maximum), - ("pws64", self.maximum), - ("pwsN34", self.maximum), - ("pwsN64", self.maximum), - ("pwsD34", self.maximum), - ("pwsD64", self.maximum), - ] - if includeHazards: - newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) - phraseList.insert(0, self.pws_phrase) - return newAnalysisList, phraseList - - def CWFPeriod(self): - analysisList = [ - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the combine_singleValues_flag_dict in the Local file. - ("Wind", self.vectorModeratedMax, [6]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [6]), - ("WaveHeight", self.moderatedMax, [6]), - ("WindWaveHgt", self.moderatedMax, [6]), - ("Swell", self.vectorModeratedMax, [6]), - ("Swell2", self.vectorModeratedMax, [6]), - ("Period", self.moderatedMax, [6]), - ("Period2", self.moderatedMax, [6]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("PoP", self._PoP_analysisMethod("CWFPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ] - phraseList = [ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - self.wave_withPeriods_phrase, - # Alternative: - #self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - self.weather_phrase, - self.visibility_phrase, - ] - if self._includeTropical: - analysisList, phraseList = self.addTropical(analysisList, phraseList) - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": analysisList, - "phraseList": phraseList, - } - - def CWFPeriodMid(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - - "analysisList": [ - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - ("Wind", self.vectorModeratedMax, [6]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [6]), - ("WaveHeight", self.moderatedMax, [6]), - ("WindWaveHgt", self.moderatedMax, [6]), - ("Swell", self.vectorModeratedMax, [6]), - ("Swell2", self.vectorModeratedMax, [6]), - ("Wx", self.rankedWx, [6]), - ("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - self.weather_phrase, - self.visibility_phrase, - ], - } - - def CWFExtended(self): - return { "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Wind", self.vectorModeratedMax, [6]), - ("WindGust", self.moderatedMax, [12]), - ("WaveHeight", self.moderatedMax, [12]), - ("WindWaveHgt", self.moderatedMax, [12]), - ("Wx", self.rankedWx), - ("T", self.minMax), - ("PoP", self._PoP_analysisMethod("CWFExtended")), - ("PoP", self.binnedPercent), - ("Swell", self.vectorModeratedMax, [12]), - ("Swell2", self.vectorModeratedMax, [12]), - ], - "phraseList":[ - # WIND - self.marine_wind_phrase, - # WAVEHEIGHT - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # Optional: - self.chop_phrase, - # SWELLS AND PERIODS - #self.swell_withPeriods_phrase, - # Alternative: - self.swell_phrase, - #self.period_phrase, - # WEATHER - #self.weather_phrase, - #self.visibility_phrase, - ], - } - -## def _issuance_list(self, argDict): -## # This method sets up configurable issuance times with associated -## # narrative definitions. See the Text Product User Guide for documentation. -## if self._definition["includeEveningPeriod"] == 1: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFPeriodMid", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## else: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFExtended", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] - -## return [ -## # WRS modified the "label" and issuance starthour and expiration hours -## # early phrases -## # note: the start, end times and expiration times are local time -## # -## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time -## # -## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). -## # startHour -- start hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. -## # endHour -- end hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. The start -## # expirationHour -- hour when the product expires (in local time) -## # This is relitive to midnight local time of the -## # current day. -## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF TODAY..." -## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified -## # e.g. "Partly cloudy in the early morning." -## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified -## # e.g. "Partly cloudy in the early evening." -## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, -## # otherwise, weekday wording will apply. -## # narrative definition -- component and time period pairs - -## # 330 AM Early morning issuance starts at 1200Z or when product is actually -## # is actually issued. Ends -## ("230 AM", self.DAY()-4, self.NIGHT(), self.NIGHT(), # WR 8.5 -## ".TODAY...", "before sunrise", "late afternoon", -## 1, narrativeDefAM), -## ("830 AM", self.DAY()+2, self.NIGHT(), self.NIGHT(), # WR 14.5, -## ".TODAY...", "early this morning", "late afternoon", -## 1, narrativeDefAM), -## # End times are tomorrow: -## ("230 PM", self.DAY()+8, self.NIGHT()+12, self.NIGHT()+12, # WR 20.5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24 + self.DAY(), # WR 26.5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ] - - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - if self._definition["includeEveningPeriod"] == 1: - narrativeDefAM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), ("CWFExtended", 24) - ] - narrativeDefPM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), - ("CWFPeriod", 12), - ("CWFExtended", 24), ("CWFExtended", 24) - ] - else: - narrativeDefAM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), - ("CWFExtended", 24), ("CWFExtended", 24) - ] - narrativeDefPM = [ - ("CWFPeriod", "period1"), - ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), - ("CWFExtended", 24), ("CWFExtended", 24) - ] - - return [ - ("Morning", self.DAY(), self.NIGHT(), "issuanceHour + 13", - ".TODAY...", "early", "late", 1, narrativeDefAM), - ("Morning with Pre-1st Period", "issuanceHour", self.NIGHT(), - "issuanceHour + 13", ".TODAY...", "early", "late", 1, - narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), - "issuanceHour + 13", ".REST OF TODAY...", "early in the morning", - "late in the afternoon", 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), "issuanceHour + 13", - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), "issuanceHour + 13", - ".TONIGHT...", "late in the night", "early in the evening", 1, narrativeDefPM), - ("Afternoon with Pre-1st Period", "issuanceHour", 24 + self.DAY(), - "issuanceHour + 13", ".TONIGHT...", "late in the night", "early in the evening", 1, - narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", - ".REST OF TONIGHT...", "early in the morning", "early in the evening", 1, - narrativeDefPM), - # For the early morning update, this produces: - # REST OF TONIGHT: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", - ".REST OF TONIGHT...", "early in the morning", "late in the afternoon", - 0, narrativeDefPM), - # Alternative - # For the early morning update, this produces: - # EARLY THIS MORNING: - # TODAY - # TONIGHT - #("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, narrativeDefPM), - ] - - - - - # Handling visibility within the weather phrase - def visibility_wx_threshold(self, tree, node): - # Weather will be reported if the visibility is below - # this threshold (in NM) OR if it includes a - # significant_wx_visibility_subkey (see below) - return None - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* T"), ("* FS")] - - # Configurable Weather Values - def wxCoverageDescriptors(self): - list = TextRules.TextRules.wxCoverageDescriptors(self) - #list.append(("Chc", "*", "*", "*", "a chance")) - return list - - def wxTypeDescriptors(self): - list = TextRules.TextRules.wxTypeDescriptors(self) - #list.append( ("*", "T", "*", "Dry", "dry thunderstorms") ) - #list.append( ("*", "RW", "*", "*", "rain showers") ) - return list - - def wxAttributeDescriptors(self): - list = TextRules.TextRules.wxAttributeDescriptors(self) - #list.append( ("*", "T", "*", "Dry", "") ) - return list - - def wxIntensityDescriptors(self): - list = TextRules.TextRules.wxIntensityDescriptors(self) - #list.append(("*", "RW", "--", "*", "light")) - return list - - def wxCombinations(self): - # This is the list of which wxTypes should be combined into one. - # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will - # be combined into one key and the key with the dominant coverage will - # be used as the combined key. - # You may also specify a method which will be - # -- given arguments subkey1 and subkey2 and - # -- should return - # -- a flag = 1 if they are to be combined, 0 otherwise - # -- the combined key to be used - # Note: The method will be called twice, once with (subkey1, subkey2) - # and once with (subkey2, subkey1) so you can assume one ordering. - # See the example below, "combine_T_RW" - # - return [ - ("RW", "R"), - ("SW", "S"), - self.combine_T_RW, - ] - - def combine_T_RW(self, subkey1, subkey2): - # Combine T and RW only if the coverage of T - # is dominant over the coverage of RW - wxType1 = subkey1.wxType() - wxType2 = subkey2.wxType() - if wxType1 == "T" and wxType2 == "RW": - order = self.dominantCoverageOrder(subkey1, subkey2) - if order == -1 or order == 0: - return 1, subkey1 - return 0, None - - def ExtendedLabel(self): - return { - "type": "component", - "methodList": [self.setLabel], - "analysisList": [], - "phraseList":[], - } - def setLabel(self, tree, component): - component.set("words", "\n.Extended forecast...\n") - return self.DONE() - - def generateForecast(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - skipAreas = self._skipAreas(argDict) - argDict["editArea"] = (editArea, areaLabel) - if self.currentAreaContains(argDict, skipAreas): - continue - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - - # Tropical exceptions - try: - self._includeTropical = self._includeTropical == "Yes" - except: - self._includeTropical = False - if self._includeTropical: - self._periodCombining = 0 - if self._productIssuance == "Morning with Pre-1st Period": - self._productIssuance = "Morning" - if self._productIssuance == "Afternoon with Pre-1st Period": - self._productIssuance = "Afternoon" - - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - # Re-calculate issueTime - #self._issueTime = self.strToGMT(staticIssueTime) - expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) - self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - fcst = fcst + self._Text1() - try: - text2 = self._Text2(argDict) - except: - text2 = self._Text2() - fcst = fcst + text2 - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - print "Generating Forecast for", areaLabel - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - # Handle River BarForecast - barFcst = self._getBarForecast(editArea, areaLabel, argDict) - if barFcst != "": - fcst = fcst + barFcst - - # Handle regular zone forecast - else: - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcstSegment = self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - - # Handle abbreviations - if self._useAbbreviations == 1: - fcstSegment = self.marine_abbreviateText(fcstSegment) - fcstSegment = re.sub(r'\n', r' ',fcstSegment) - fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) - fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) - fcst = fcst + fcstSegment - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## -## def _issuance_list(self, argDict): -## # This method sets up configurable issuance times with associated -## # narrative definitions. See the Text Product User Guide for documentation. -## if self._definition["includeEveningPeriod"] == 1: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriod", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriod", 12), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## else: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), -## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] - -## return [ -## ("430 AM", self.DAY(), self.NIGHT(), 17, -## ".TODAY...", "early in the morning", "late in the afternoon", -## 1, narrativeDefAM), -## ("1030 AM", "issuanceHour", self.NIGHT(), 17, -## ".TODAY...", "early in the morning", "late in the afternoon", -## 1, narrativeDefAM), -## # End times are tomorrow: -## ("430 PM", self.NIGHT(), 24 + self.DAY(), 24 + 5, -## ".TONIGHT...", "late in the night", "early in the evening", -## 1, narrativeDefPM), -## ("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 5, -## ".TONIGHT...", "late in the night", "early in the evening", -## 1, narrativeDefPM), -## ] - - # Alternative issuance list using CWFPeriodMid -## def _issuance_list(self, argDict): -## # This method sets up configurable issuance times with associated -## # narrative definitions. See the Text Product User Guide for documentation. -## if self._definition["includeEveningPeriod"] == 1: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFPeriodMid", 6), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), -## ("CWFPeriodMid", 6), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## else: -## narrativeDefAM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] -## narrativeDefPM = [ -## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), -## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), -## ("CWFExtended", 24), ("CWFExtended", 24) -## ] - -## return [ -## # WRS modified the "label" and issuance starthour and expiration hours -## # early phrases -## # note: the start, end times and expiration times are local time -## # -## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time -## # -## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). -## # startHour -- start hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. -## # endHour -- end hour (in local time) for the first period. -## # These times are relative to self.DAY() and -## # self.NIGHT() which default to 6 and 18, respectively. The start -## # expirationHour -- hour when the product expires (in local time) -## # This is relitive to midnight local time of the -## # current day. -## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF TODAY..." -## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified -## # e.g. "Partly cloudy in the early morning." -## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified -## # e.g. "Partly cloudy in the early evening." -## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, -## # otherwise, weekday wording will apply. -## # narrative definition -- component and time period pairs - -## # 330 AM Early morning issuance starts at 1200Z or when product is actually -## # is actually issued. Ends -## ("230 AM", self.DAY()-4, self.NIGHT(), 17, -## ".TODAY...", "before sunrise", "late afternoon", -## 1, narrativeDefAM), -## ("830 AM", self.DAY()+2, self.NIGHT(), 17, -## ".TODAY...", "early this morning", "late afternoon", -## 1, narrativeDefAM), -## # End times are tomorrow: -## ("230 PM", self.DAY()+8, self.NIGHT()+12, 24+5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24+5, -## ".TONIGHT...", "late tonight", "before dark", -## 1, narrativeDefPM), -## ] - - def lateDay_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" - # is set to zero. - # NOTE: This applied only to periods that are exactly 24-hours in length. - # Periods longer than that will always be split into day and night labels - # (e.g. SUNDAY THROUGH MONDAY NIGHT) - compName = node.getComponentName() - if compName == "CWFExtended": - return 0 - else: - return 1 - - def _skipAreas(self, argDict): - # These are edit areas that the formatter will skip - return [] - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CON", 'CAN', - 'EXP'] - return [ - ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH - ('SR.A', marineActions, 'Marine'), # STORM WATCH - ('GL.A', marineActions, 'Marine2'), # GALE WATCH - ('SE.A', marineActions, 'Marine3'), # HAZARDOUS SEAS - ('UP.A', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine2'), # GALE WARNING - ('SE.W', marineActions, 'Marine3'), # HAZARDOUS SEAS - ('UP.W', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('HU.W', allActions, 'Hurricane'), # HURRICANE WARNING - ('TY.W', allActions, 'Typhoon'), # TYPHOON WARNING - ('TR.W', allActions, 'Tropical'), # TROPICAL STORM WARNING - ('HU.A', allActions, 'Hurricane'), # HURRICANE WATCH - ('TY.A', allActions, 'Typhoon'), # TYPHOON WATCH - ('TR.A', allActions, 'Tropical'), # TROPICAL STORM WATCH - ('UP.Y', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY - ('MF.Y', marineActions, 'Fog'), # MARINE DENSE FOG ADVISORY - ('MS.Y', marineActions, 'Smoke'), # MARINE DENSE SMOKE ADVISORY - ('MH.W', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL WARNING - ('MH.Y', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL ADVISORY - ('RB.Y', marineActions, 'Marine'), # ROUGH BAR - ('SI.Y', marineActions, 'Marine1'), # SMALL CRAFT ADVISORY - ('SC.Y', marineActions, 'Marine2'), # SMALL CRAFT ADVISORY - ('SW.Y', marineActions, 'Marine3'), # SMALL CRAFT ADVISORY - ('BW.Y', marineActions, 'Marine4'), # BRISK WIND ADVISORY - ('TO.A', marineActions, 'Convective'), # TORNADO WATCH - ('SV.A', marineActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY - ] +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: CWF_Pacific (Coastal Waters Forecast) +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# CWF_Pacific.py, CWF_Pacific___Definition, +# CWF_Pacific__Overrides +#------------------------------------------------------------------------- +# Customization Points in Local File: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# productName defines name of product e.g. "Coastal Waters Forecast" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "CWFBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState City,state that the WFO is located in, such as "Buffalo, NY" +# +# Optional Configuration Items +# +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# includeEveningPeriod Include a 6 hour Evening period on the 3rd day +# useAbbreviations +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, +# NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# River Bar Zones +# riverBarZones - list to designate any river bars in the forecast +# +# +# areaDictionary Modify the AreaDictionary utility with UGC +# information about zones +# +# useHolidays Set to 1 to use holidays in the time period labels +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# OVERRIDES +# +# Required Overrides +# +# _Text1(), _Text2() Descriptive text for header +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) +# WaveHeight and/or WindWaveHgt +# (every 6 hours to 3 days, then every 12 hours to 7 days) +# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) +# Optional: +# WindGust (every 3 hours to 7 days) +# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# CWFPeriod (component) +# CWFPeriodMid (component) +# ExtendedLabel(component) +# CWFExtended (component) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from CWF_Pacific: +# _Text1 +# _Text2 +# _issuance_list +# from MarinePhrases +# inlandWatersAreas +# inlandWatersWave_element +# seasWaveHeight_element +# seasWindWave_element +# waveHeight_wind_threshold +# marine_wind_flag +# marine_wind_combining_flag +# marine_wind_verbose_flag +# from ConfigVariables +# phrase_descriptor_dict +# phrase_connector_dict +# null_nlValue_dict +# first_null_phrase_dict +# null_phrase_dict +# maximum_range_nlValue_dict +# combine_singleValues_flag_dict +# from WxPhrases: +# embedded_visibility_flag +# visibility_wx_threshold +# significant_wx_visibility_subkeys +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# from SampleAnalysis +# moderated_dict +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import ModuleAccessor +import time, string, re, types +import TimeRange + + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + #(("Include Tropical?", "includeTropical") , "No", "radio", ["Yes","No"]), + ] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/CWF_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Marine_Zones_", + + "lineLength": 66, + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_CWF__", + "editAreaSuffix": None, + # product identifiers + "productName": "Coastal Waters Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "previousProductID": "", # Product ID for river bar forecast. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + "periodCombining" : 0, # If 1, combine periods, if possible + # Product-specific variables: + # Set to one if you want a 6-hour evening period instead of + # 18-hour period without lows + "includeEveningPeriod": 1, + "useAbbreviations": 1, + + # Weather-related flags + "hoursSChcEnds": 2, + + # riverBarZones[] list to designate any river bars in the forecast + "riverBarZones" : [], + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + # Language + "language": "english", + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING METHODS, THRESHOLDS AND VARIABLES + ######################################################################## + # MUST BE OVERRIDDEN IN LOCAL FILE + def _Text1(self): + return "" + + def _Text2(self, argDict): + synopsis = "" + + # Try to get Synopsis from previous CWF + #productID = "BOSCWFBOS" + #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") + # Clean up the previous synopsis + #synopsis = re.sub(r'\n', r' ', synopsis) + #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") + + # Convert absolute time pointer to a tuple of values like that + # returned by time.gmtime() + #expTuple = time.strptime('%s' % (self._expireTime), + # '%b %d %y %H:%M:%S GMT') + expTuple = self._expireTime.utctimetuple() + + # Format expiration time for inclusion in synopsis header + expTime = time.strftime('%d%H%M', expTuple) + siteID = self.getSiteID(argDict) + + if len("_") == 0: + ugc = self.synopsisUGC(siteID) + heading = self.synopsisHeading(siteID) + else: + ugc = self.synopsisUGC(siteID, self._pil[-3:]) + heading = self.synopsisHeading(siteID, self._pil[-3:]) + + return "%s-%s-\n" % (ugc, expTime) + self._timeLabel + "\n\n" + \ + heading + "\n" + synopsis + "\n$$\n\n" + + + ######################################################################## + def _getBarForecast(self, editArea, areaLabel, argDict): + # IF current area contains a river bar zone + # return the riverBarForecast + for barZone in self._riverBarZones: + if self.currentAreaContains(argDict["tree"], [barZone]): + barName = self._getAreaLabel(barZone) + return " " + self.getPreviousProduct(self._previousProductID, barName) + return "" + + def _getAreaLabel(self, areaName): + # Get label from AreaDictionary + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + if areaName in list(areaDict.keys()): + entry = areaDict[areaName] + if "ugcName" in entry: + return entry["ugcName"] + return "" + + ######################################################################## + + def pop_wx_lower_threshold(self, tree, node): + # Always report weather + return 0 + + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self.marineRounding, + } + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Visibility"] = "NM" + return dict + + def gust_wind_difference_nlValue(self, tree, node): + # Difference between gust and maxWind below which gusts are not + # mentioned. Units are MPH + if self._includeTropical: + return 5 + else: + return 10 + + + + # SampleAnalysis overrides + def temporalCoverage_hours(self, parmHisto, timeRange, componentName): + # COMMENT: At WFO MFL we use 3 hrly wind grids. If you use 1 hrly wind grids + # and this parameter is 2 or higher, tropical cyclone winds affecting the very + # early or latter part of a forecast period might be neglected. 1 assures + # maximum sensitivity. + if self._includeTropical: + return 1 + else: + return 0 + + def moderated_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. + dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) + dict["Wind"] = (0, 20) + if self._includeTropical: + dict["Wind"] = (0, 15) + dict["WindGust"] = (0, 15) + dict["WaveHeight"] = (0, 15) + dict["Swell"] = (0, 15) + return dict + + # Text Rules overrides + def _skipAreas(self, argDict): + # These are edit areas that the formatter will skip + return [] + + def inlandWatersAreas(self, tree, node): + # List of edit area names that are inland or bay waters + # as opposed to "seas" + # The phrasing for these areas will be treated differently + # (see the waveRange_phrase) + # + # e.g. + # return ["TampaBayWaters"] + return ["area3"] + + def inlandWatersWave_element(self, tree, node): + # Weather element first and second choice to use for reporting inland waters waves + # "Waves 1 to 2 feet." + # If there is incomplete or no data for the first element, the second will be used. + return ("WindWaveHgt", "WaveHeight") + + def seasWaveHeight_element(self, tree, node): + # Weather element to use for reporting seas + # "Combined seas 10 to 15 feet." + # IF above wind or swell thresholds + return "WaveHeight" + + def seasWindWave_element(self, tree, node): + # Weather element to use for reporting seas waves + # "Wind waves 3 to 4 feet." + # IF above wind or swell thresholds + return "WindWaveHgt" + + def waveHeight_wind_threshold(self, tree, node): + # Wind value above which waveHeight (combined seas) + # is reported vs. wind waves. + # Also, the Swell phrase is omitted if this threshold is exceeded. + # Unit is knots + return 34 + + def combinedSeas_threshold(self, tree, node): + # See wave_phrase + # If waves and swells are above this threshold, + # combined seas will be reported and no Swell phrase will be reported. + # Units: feet + return 7 + + def marine_wind_flag(self, tree, node): + # If 1, Wind wording will reflect the + # crossing of significant thresholds such as gales. + # E.g. "West gales to 35 knots." instead of "West winds 35 knots." + return 0 + + def marine_wind_combining_flag(self, tree, node): + # If 1, Wind combining will reflect the + # crossing of significant thresholds such as gales. + # E.g. "North hurricane force winds to 100 knots." instead of + # "North hurricane force winds to 100 knots easing to + # to 80 knots in the afternoon." + return 0 + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "wind" + dict["seas"] = "combined seas" + dict["inland waters"] = "bay and inland waters" + dict["chop"] = "bay and inland waters" + dict["mixed swell"] = "mixed swell" + dict["waves"] = "wind waves" + dict["dominant period"] = "dominant period" + # Apply only if marine_wind_flag (see above) is set to 1: + dict["hurricane force winds to"] = "hurricane force winds to" + dict["storm force winds to"] = "storm force winds to" + dict["gales to"] = "gales to" + dict["up to"] = "up to" + dict["around"] = "" + return dict + + def phrase_connector_dict(self, tree, node): + # Dictionary of connecting phrases for various + # weather element phrases + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node + dict = TextRules.TextRules.phrase_connector_dict(self, tree, node) + dict["rising to"] = { + "Wind": ", rising to ", + "Swell": ", building to ", + "Swell2": ", building to ", + "WaveHeight": ", building to ", + "WindWaveHgt": ", building to ", + } + + dict["easing to"] = { + "Wind": ", easing to ", + "Swell": ", subsiding to ", + "Swell2": ", subsiding to ", + "WaveHeight": ", subsiding to ", + "WindWaveHgt": ", subsiding to ", + } + dict["backing"] = { + "Wind": ", backing to ", + "Swell": ", becoming ", + "Swell2": ", becoming ", + "WaveHeight": ", becoming ", + "WindWaveHgt": ", becoming ", + } + + dict["veering"] = { + "Wind": ", veering to ", + "Swell": ", becoming ", + "Swell2": ", becoming ", + "WaveHeight": ", becoming ", + "WindWaveHgt": ", becoming ", + } + + dict["becoming"] = ", becoming " + dict["increasing to"] = { + "Wind": ", rising to ", + "Swell": ", building to ", + "Swell2": ", building to ", + "WaveHeight": ", building to ", + "WindWaveHgt": ", building to ", + } + dict["decreasing to"] = { + "Wind": ", easing to ", + "Swell": ", subsiding to ", + "Swell2": ", subsiding to ", + "WaveHeight": ", subsiding to ", + "WindWaveHgt": ", subsiding to ", + } + dict["shifting to the"] = ", shifting to the " + dict["becoming onshore"] = " becoming onshore " + dict["then"] = {"Wx": ". ", + "Vector": ", becoming ", + "Scalar": ", becoming ", + "otherwise": ", becoming ", + } + return dict + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and not reported. + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["WaveHeight"] = 3 + dict["WindWaveHgt"] = 3 + dict["Wind"] = 5 + dict["WindGust"] = 33 + dict["Swell"] = 5 + dict["Visibility"] = 3 # in nautical miles. Report if less than this value. + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "waves 2 feet or less" + dict["WindWaveHgt"] = "wind waves 2 feet or less" + dict["Wind"] = "wind variable less than 5 knots" + dict["Swell"] = "" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "2 feet or less" + dict["WindWaveHgt"] = "2 feet or less" + dict["Wind"] = "variable less than 5 knots" + dict["Wx"] = "" + dict["Swell"] = "light" + return dict + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + if self._includeTropical: + dict["Wind"] = {'default': 5, + (0.0, 4.0): 0, + (4.0, 33.0): 5, + (33.0, 52.0): 10, + (52.0, 200.0): 20, + } + else: + dict["Wind"] = 10 + dict["Swell"] = 5 + dict["Swell2"] = 5 + dict["WaveHeight"] = 2 + dict["WindWaveHgt"] = 2 + return dict + + def combine_singleValues_flag_dict(self, tree, node): + # Dictionary of weather elements to combine using single values + # rather than ranges. If you are using single value statistics + # for a weather element, you will want to set this flag to 1. + # If there is no entry for an element, min/max combining will + # be done. + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node + dict = TextRules.TextRules.increment_nlValue_dict(self, tree, node) + dict["Wind"] = 1 + dict["WindGust"] = 1 + dict["Swell"] = 1 + dict["Swell2"] = 1 + dict["WindWaveHgt"] = 1 + dict["WaveHeight"] = 1 + return dict + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + return self.stdDevMaxAvg + #return self.maxMode + #return self.maximum + + + def addTropical(self, analysisList, phraseList, includeHazards=True): + newAnalysisList = [] + for entry in analysisList: + # Sampling defined as a tuple (field, statistic, temporal rate) + # If this is NOT a Wind or WindGust statistic + if entry[0] not in ["Hazards", "Wind", "WindGust", "WaveHeight", "Swell"]: + # Add this statistic to the new analysisList + newAnalysisList.append(entry) + newAnalysisList += [ + ("Wind", self.vectorModeratedMinMax, [6]), + ("WindGust", self.moderatedMinMax, [6]), + ("WaveHeight", self.moderatedMax, [6]), + ("Swell", self.vectorModeratedMinMax, [6]), + ("pws34", self.maximum), + ("pws64", self.maximum), + ("pwsN34", self.maximum), + ("pwsN64", self.maximum), + ("pwsD34", self.maximum), + ("pwsD64", self.maximum), + ] + if includeHazards: + newAnalysisList.append(("Hazards", self.discreteTimeRangesByKey)) + phraseList.insert(0, self.pws_phrase) + return newAnalysisList, phraseList + + def CWFPeriod(self): + analysisList = [ + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the combine_singleValues_flag_dict in the Local file. + ("Wind", self.vectorModeratedMax, [6]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [6]), + ("WaveHeight", self.moderatedMax, [6]), + ("WindWaveHgt", self.moderatedMax, [6]), + ("Swell", self.vectorModeratedMax, [6]), + ("Swell2", self.vectorModeratedMax, [6]), + ("Period", self.moderatedMax, [6]), + ("Period2", self.moderatedMax, [6]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("PoP", self._PoP_analysisMethod("CWFPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ] + phraseList = [ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + self.wave_withPeriods_phrase, + # Alternative: + #self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + self.weather_phrase, + self.visibility_phrase, + ] + if self._includeTropical: + analysisList, phraseList = self.addTropical(analysisList, phraseList) + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": analysisList, + "phraseList": phraseList, + } + + def CWFPeriodMid(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + + "analysisList": [ + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + ("Wind", self.vectorModeratedMax, [6]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [6]), + ("WaveHeight", self.moderatedMax, [6]), + ("WindWaveHgt", self.moderatedMax, [6]), + ("Swell", self.vectorModeratedMax, [6]), + ("Swell2", self.vectorModeratedMax, [6]), + ("Wx", self.rankedWx, [6]), + ("PoP", self._PoP_analysisMethod("CWFPeriodMid"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + self.weather_phrase, + self.visibility_phrase, + ], + } + + def CWFExtended(self): + return { "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Wind", self.vectorModeratedMax, [6]), + ("WindGust", self.moderatedMax, [12]), + ("WaveHeight", self.moderatedMax, [12]), + ("WindWaveHgt", self.moderatedMax, [12]), + ("Wx", self.rankedWx), + ("T", self.minMax), + ("PoP", self._PoP_analysisMethod("CWFExtended")), + ("PoP", self.binnedPercent), + ("Swell", self.vectorModeratedMax, [12]), + ("Swell2", self.vectorModeratedMax, [12]), + ], + "phraseList":[ + # WIND + self.marine_wind_phrase, + # WAVEHEIGHT + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # Optional: + self.chop_phrase, + # SWELLS AND PERIODS + #self.swell_withPeriods_phrase, + # Alternative: + self.swell_phrase, + #self.period_phrase, + # WEATHER + #self.weather_phrase, + #self.visibility_phrase, + ], + } + +## def _issuance_list(self, argDict): +## # This method sets up configurable issuance times with associated +## # narrative definitions. See the Text Product User Guide for documentation. +## if self._definition["includeEveningPeriod"] == 1: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFPeriodMid", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## else: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFExtended", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] + +## return [ +## # WRS modified the "label" and issuance starthour and expiration hours +## # early phrases +## # note: the start, end times and expiration times are local time +## # +## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time +## # +## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). +## # startHour -- start hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. +## # endHour -- end hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. The start +## # expirationHour -- hour when the product expires (in local time) +## # This is relitive to midnight local time of the +## # current day. +## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF TODAY..." +## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified +## # e.g. "Partly cloudy in the early morning." +## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified +## # e.g. "Partly cloudy in the early evening." +## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, +## # otherwise, weekday wording will apply. +## # narrative definition -- component and time period pairs + +## # 330 AM Early morning issuance starts at 1200Z or when product is actually +## # is actually issued. Ends +## ("230 AM", self.DAY()-4, self.NIGHT(), self.NIGHT(), # WR 8.5 +## ".TODAY...", "before sunrise", "late afternoon", +## 1, narrativeDefAM), +## ("830 AM", self.DAY()+2, self.NIGHT(), self.NIGHT(), # WR 14.5, +## ".TODAY...", "early this morning", "late afternoon", +## 1, narrativeDefAM), +## # End times are tomorrow: +## ("230 PM", self.DAY()+8, self.NIGHT()+12, self.NIGHT()+12, # WR 20.5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24 + self.DAY(), # WR 26.5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ] + + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + if self._definition["includeEveningPeriod"] == 1: + narrativeDefAM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), ("CWFExtended", 24) + ] + narrativeDefPM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), + ("CWFPeriod", 12), + ("CWFExtended", 24), ("CWFExtended", 24) + ] + else: + narrativeDefAM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), + ("CWFExtended", 24), ("CWFExtended", 24) + ] + narrativeDefPM = [ + ("CWFPeriod", "period1"), + ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), + ("CWFExtended", 24), ("CWFExtended", 24) + ] + + return [ + ("Morning", self.DAY(), self.NIGHT(), "issuanceHour + 13", + ".TODAY...", "early", "late", 1, narrativeDefAM), + ("Morning with Pre-1st Period", "issuanceHour", self.NIGHT(), + "issuanceHour + 13", ".TODAY...", "early", "late", 1, + narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), + "issuanceHour + 13", ".REST OF TODAY...", "early in the morning", + "late in the afternoon", 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), "issuanceHour + 13", + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), "issuanceHour + 13", + ".TONIGHT...", "late in the night", "early in the evening", 1, narrativeDefPM), + ("Afternoon with Pre-1st Period", "issuanceHour", 24 + self.DAY(), + "issuanceHour + 13", ".TONIGHT...", "late in the night", "early in the evening", 1, + narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", + ".REST OF TONIGHT...", "early in the morning", "early in the evening", 1, + narrativeDefPM), + # For the early morning update, this produces: + # REST OF TONIGHT: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", + ".REST OF TONIGHT...", "early in the morning", "late in the afternoon", + 0, narrativeDefPM), + # Alternative + # For the early morning update, this produces: + # EARLY THIS MORNING: + # TODAY + # TONIGHT + #("Evening Update", "issuanceHour", 24 + self.DAY(), "issuanceHour + 13", + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), "issuanceHour + 13", + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, narrativeDefPM), + ] + + + + + # Handling visibility within the weather phrase + def visibility_wx_threshold(self, tree, node): + # Weather will be reported if the visibility is below + # this threshold (in NM) OR if it includes a + # significant_wx_visibility_subkey (see below) + return None + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* T"), ("* FS")] + + # Configurable Weather Values + def wxCoverageDescriptors(self): + list = TextRules.TextRules.wxCoverageDescriptors(self) + #list.append(("Chc", "*", "*", "*", "a chance")) + return list + + def wxTypeDescriptors(self): + list = TextRules.TextRules.wxTypeDescriptors(self) + #list.append( ("*", "T", "*", "Dry", "dry thunderstorms") ) + #list.append( ("*", "RW", "*", "*", "rain showers") ) + return list + + def wxAttributeDescriptors(self): + list = TextRules.TextRules.wxAttributeDescriptors(self) + #list.append( ("*", "T", "*", "Dry", "") ) + return list + + def wxIntensityDescriptors(self): + list = TextRules.TextRules.wxIntensityDescriptors(self) + #list.append(("*", "RW", "--", "*", "light")) + return list + + def wxCombinations(self): + # This is the list of which wxTypes should be combined into one. + # For example, if ("RW", "R") appears, then wxTypes of "RW" and "R" will + # be combined into one key and the key with the dominant coverage will + # be used as the combined key. + # You may also specify a method which will be + # -- given arguments subkey1 and subkey2 and + # -- should return + # -- a flag = 1 if they are to be combined, 0 otherwise + # -- the combined key to be used + # Note: The method will be called twice, once with (subkey1, subkey2) + # and once with (subkey2, subkey1) so you can assume one ordering. + # See the example below, "combine_T_RW" + # + return [ + ("RW", "R"), + ("SW", "S"), + self.combine_T_RW, + ] + + def combine_T_RW(self, subkey1, subkey2): + # Combine T and RW only if the coverage of T + # is dominant over the coverage of RW + wxType1 = subkey1.wxType() + wxType2 = subkey2.wxType() + if wxType1 == "T" and wxType2 == "RW": + order = self.dominantCoverageOrder(subkey1, subkey2) + if order == -1 or order == 0: + return 1, subkey1 + return 0, None + + def ExtendedLabel(self): + return { + "type": "component", + "methodList": [self.setLabel], + "analysisList": [], + "phraseList":[], + } + def setLabel(self, tree, component): + component.set("words", "\n.Extended forecast...\n") + return self.DONE() + + def generateForecast(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + skipAreas = self._skipAreas(argDict) + argDict["editArea"] = (editArea, areaLabel) + if self.currentAreaContains(argDict, skipAreas): + continue + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + + # Tropical exceptions + try: + self._includeTropical = self._includeTropical == "Yes" + except: + self._includeTropical = False + if self._includeTropical: + self._periodCombining = 0 + if self._productIssuance == "Morning with Pre-1st Period": + self._productIssuance = "Morning" + if self._productIssuance == "Afternoon with Pre-1st Period": + self._productIssuance = "Afternoon" + + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + # Re-calculate issueTime + #self._issueTime = self.strToGMT(staticIssueTime) + expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) + self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + fcst = fcst + self._Text1() + try: + text2 = self._Text2(argDict) + except: + text2 = self._Text2() + fcst = fcst + text2 + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + print(("Generating Forecast for", areaLabel)) + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + # Handle River BarForecast + barFcst = self._getBarForecast(editArea, areaLabel, argDict) + if barFcst != "": + fcst = fcst + barFcst + + # Handle regular zone forecast + else: + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcstSegment = self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + + # Handle abbreviations + if self._useAbbreviations == 1: + fcstSegment = self.marine_abbreviateText(fcstSegment) + fcstSegment = re.sub(r'\n', r' ',fcstSegment) + fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) + fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) + fcst = fcst + fcstSegment + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## +## def _issuance_list(self, argDict): +## # This method sets up configurable issuance times with associated +## # narrative definitions. See the Text Product User Guide for documentation. +## if self._definition["includeEveningPeriod"] == 1: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriod", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriod", 12), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## else: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), +## ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 12), ("CWFPeriod", 24), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] + +## return [ +## ("430 AM", self.DAY(), self.NIGHT(), 17, +## ".TODAY...", "early in the morning", "late in the afternoon", +## 1, narrativeDefAM), +## ("1030 AM", "issuanceHour", self.NIGHT(), 17, +## ".TODAY...", "early in the morning", "late in the afternoon", +## 1, narrativeDefAM), +## # End times are tomorrow: +## ("430 PM", self.NIGHT(), 24 + self.DAY(), 24 + 5, +## ".TONIGHT...", "late in the night", "early in the evening", +## 1, narrativeDefPM), +## ("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 5, +## ".TONIGHT...", "late in the night", "early in the evening", +## 1, narrativeDefPM), +## ] + + # Alternative issuance list using CWFPeriodMid +## def _issuance_list(self, argDict): +## # This method sets up configurable issuance times with associated +## # narrative definitions. See the Text Product User Guide for documentation. +## if self._definition["includeEveningPeriod"] == 1: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFPeriodMid", 6), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"),("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), +## ("CWFPeriodMid", 6), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## else: +## narrativeDefAM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] +## narrativeDefPM = [ +## ("CWFPeriod", "period1"), ("CWFPeriod", 12), ("CWFPeriod", 12), +## ("CWFPeriodMid", 12), ("CWFPeriodMid", 12), ("CWFPeriodMid", 18), +## ("CWFExtended", 24), ("CWFExtended", 24) +## ] + +## return [ +## # WRS modified the "label" and issuance starthour and expiration hours +## # early phrases +## # note: the start, end times and expiration times are local time +## # +## # note: self.DAY = 0600 Local time and self.NIGHT = 1800 Local time +## # +## # description -- text to appear in the startUp dialog for the product (e.g. 330 AM). +## # startHour -- start hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. +## # endHour -- end hour (in local time) for the first period. +## # These times are relative to self.DAY() and +## # self.NIGHT() which default to 6 and 18, respectively. The start +## # expirationHour -- hour when the product expires (in local time) +## # This is relitive to midnight local time of the +## # current day. +## # period1 Label -- the label for the first period. e.g. ".TODAY...", ".REST OF TODAY..." +## # period1 lateNight phrase -- phrase to use if the hours of 3am to 6am must be qualified +## # e.g. "Partly cloudy in the early morning." +## # period1 lateDay phrase -- phrase to use if the hours of 3pm to 6pm must be qualified +## # e.g. "Partly cloudy in the early evening." +## # todayFlag -- if 1, "Today" and "Tonight" phrasing will be used in subsequent periods, +## # otherwise, weekday wording will apply. +## # narrative definition -- component and time period pairs + +## # 330 AM Early morning issuance starts at 1200Z or when product is actually +## # is actually issued. Ends +## ("230 AM", self.DAY()-4, self.NIGHT(), 17, +## ".TODAY...", "before sunrise", "late afternoon", +## 1, narrativeDefAM), +## ("830 AM", self.DAY()+2, self.NIGHT(), 17, +## ".TODAY...", "early this morning", "late afternoon", +## 1, narrativeDefAM), +## # End times are tomorrow: +## ("230 PM", self.DAY()+8, self.NIGHT()+12, 24+5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ("830 PM", self.NIGHT()+2, 24 + self.DAY(), 24+5, +## ".TONIGHT...", "late tonight", "before dark", +## 1, narrativeDefPM), +## ] + + def lateDay_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" + # is set to zero. + # NOTE: This applied only to periods that are exactly 24-hours in length. + # Periods longer than that will always be split into day and night labels + # (e.g. SUNDAY THROUGH MONDAY NIGHT) + compName = node.getComponentName() + if compName == "CWFExtended": + return 0 + else: + return 1 + + def _skipAreas(self, argDict): + # These are edit areas that the formatter will skip + return [] + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CON", 'CAN', + 'EXP'] + return [ + ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH + ('SR.A', marineActions, 'Marine'), # STORM WATCH + ('GL.A', marineActions, 'Marine2'), # GALE WATCH + ('SE.A', marineActions, 'Marine3'), # HAZARDOUS SEAS + ('UP.A', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine2'), # GALE WARNING + ('SE.W', marineActions, 'Marine3'), # HAZARDOUS SEAS + ('UP.W', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('HU.W', allActions, 'Hurricane'), # HURRICANE WARNING + ('TY.W', allActions, 'Typhoon'), # TYPHOON WARNING + ('TR.W', allActions, 'Tropical'), # TROPICAL STORM WARNING + ('HU.A', allActions, 'Hurricane'), # HURRICANE WATCH + ('TY.A', allActions, 'Typhoon'), # TYPHOON WATCH + ('TR.A', allActions, 'Tropical'), # TROPICAL STORM WATCH + ('UP.Y', marineActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY + ('MF.Y', marineActions, 'Fog'), # MARINE DENSE FOG ADVISORY + ('MS.Y', marineActions, 'Smoke'), # MARINE DENSE SMOKE ADVISORY + ('MH.W', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL WARNING + ('MH.Y', marineActions, 'Ashfall'), # MARINE VOLCANIC ASHFALL ADVISORY + ('RB.Y', marineActions, 'Marine'), # ROUGH BAR + ('SI.Y', marineActions, 'Marine1'), # SMALL CRAFT ADVISORY + ('SC.Y', marineActions, 'Marine2'), # SMALL CRAFT ADVISORY + ('SW.Y', marineActions, 'Marine3'), # SMALL CRAFT ADVISORY + ('BW.Y', marineActions, 'Marine4'), # BRISK WIND ADVISORY + ('TO.A', marineActions, 'Convective'), # TORNADO WATCH + ('SV.A', marineActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ('LO.Y', marineActions, 'LowWater'), # LOW WATER ADVISORY + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg.py index b6381f8274..7151eb4fd4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg.py @@ -1,202 +1,202 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CivilEmerg.py -# Standard File for Civil Emergencies -# -# Author: Matt Davis -# ---------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS 10-518 Directive for further information. -#------------------------------------------------------------------------- - -import GenericReport -import TextRules -import string, time, re, os, types, copy - - -class TextProduct(GenericReport.TextProduct): - - VariableList = [ - - # - # Local Agency configuration - # - - (("Source", "source") , - " EMERGENCY MANAGEMENT AGENCY ", - "alphaNumeric"), - - # - # Do not change these - # - - (("EAS Level", "eas") , "NONE", "radio", - ["NONE", - "URGENT - IMMEDIATE BROADCAST REQUESTED", - "BULLETIN - IMMEDIATE BROADCAST REQUESTED", - "BULLETIN - EAS ACTIVATION REQUESTED" - ]) - ] - - - Definition = { - "type": "smart", - "displayName": None, # for Product Generation Menu - "database" : "Official", # Source database. "Official", "Fcst", or "ISC" - - "outputFile": "{prddir}/TEXT/ADR_.txt", - "debug": 0, - - # Name of map background for creating Combinations - "mapNameForCombinations": "FIPS_", - - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_ADR_", - - # product identifiers - "productName": "Administrative Message", # product name - "fullStationID" : "", # 4 letter station ID - "wmoID" : "", # WMO code - "wfoCityState" : "", # Location of WFO - "pil" : "", # product pil - "textdbPil" : "", # Product ID for storing to AWIPS text database. - "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. - "wfoSiteID": "", - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - # Language - "language": "english", - "lineLength": 66, #Maximum line length - "includeCities" : 0, # Cities included in area header - "cityDescriptor" : "Including the cities of", - "includeZoneNames" : 0, # Zone names will be included in the area header - "includeIssueTime" : 0, # This should be set to zero - "singleComboOnly" : 1, # Used for non-segmented products - "purgeTime" : 3, # Expiration in hours - "callToAction" : 0, # disable call to action markers - } - - #Use Zone codes for PR and AR regions - if "" in ['PR','AR']: - Definition["mapNameForCombinations"] = "Zones_" - - def __init__(self): - GenericReport.TextProduct.__init__(self) - - def _preProcessProduct(self, fcst, argDict): - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - - # - # First, generate WMO lines - # - - fcst = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n" - - # - # Next, add the non-segmented UGC data - # - - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas, cityDescriptor=self._cityDescriptor, - includeCities=self._includeCities, includeZoneNames = self._includeZoneNames, - includeIssueTime = self._includeIssueTime) - - fcst = fcst + areaHeader + "\n" - - # - # Last, add the product name/time lines - # - - try: - if self._eas == "None": - self._eas = "" - else: - self._eas = self._eas + "\n" - except: - self._eas = "" - - try: - source = self._source + '\n' - except: - source = "" - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, self._productName) - fcst = fcst + self._eas + productName + "\n" +\ - source +\ - "Relayed by National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - fcst = fcst + "...Administrative Message...\n\n" - fcst = fcst + "The following message is transmitted" + \ - " at the request of the " + self._source + "." - - if self._callToAction: - fcst = self._makeCallToAction(fcst, editArea, areaLabel, argDict) - - return fcst - - def _makeCallToAction(self, fcst, editArea, areaLabel, argDict): - ctaBodyPhrase = "" - if self._callToAction: - ctaBodyPhrase = "\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n" + \ - ctaBodyPhrase + \ - "|* Call to action goes here *|\n\n" + \ - "\n\n&&\n\n" - fcst = fcst + ctaBodyPhrase - return fcst - - def _postProcessProduct(self, fcst, argDict): - # - # Clean up multiple line feeds - # - - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - - - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CivilEmerg.py +# Standard File for Civil Emergencies +# +# Author: Matt Davis +# ---------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS 10-518 Directive for further information. +#------------------------------------------------------------------------- + +import GenericReport +import TextRules +import string, time, re, os, types, copy + + +class TextProduct(GenericReport.TextProduct): + + VariableList = [ + + # + # Local Agency configuration + # + + (("Source", "source") , + " EMERGENCY MANAGEMENT AGENCY ", + "alphaNumeric"), + + # + # Do not change these + # + + (("EAS Level", "eas") , "NONE", "radio", + ["NONE", + "URGENT - IMMEDIATE BROADCAST REQUESTED", + "BULLETIN - IMMEDIATE BROADCAST REQUESTED", + "BULLETIN - EAS ACTIVATION REQUESTED" + ]) + ] + + + Definition = { + "type": "smart", + "displayName": None, # for Product Generation Menu + "database" : "Official", # Source database. "Official", "Fcst", or "ISC" + + "outputFile": "{prddir}/TEXT/ADR_.txt", + "debug": 0, + + # Name of map background for creating Combinations + "mapNameForCombinations": "FIPS_", + + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_ADR_", + + # product identifiers + "productName": "Administrative Message", # product name + "fullStationID" : "", # 4 letter station ID + "wmoID" : "", # WMO code + "wfoCityState" : "", # Location of WFO + "pil" : "", # product pil + "textdbPil" : "", # Product ID for storing to AWIPS text database. + "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. + "wfoSiteID": "", + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + # Language + "language": "english", + "lineLength": 66, #Maximum line length + "includeCities" : 0, # Cities included in area header + "cityDescriptor" : "Including the cities of", + "includeZoneNames" : 0, # Zone names will be included in the area header + "includeIssueTime" : 0, # This should be set to zero + "singleComboOnly" : 1, # Used for non-segmented products + "purgeTime" : 3, # Expiration in hours + "callToAction" : 0, # disable call to action markers + } + + #Use Zone codes for PR and AR regions + if "" in ['PR','AR']: + Definition["mapNameForCombinations"] = "Zones_" + + def __init__(self): + GenericReport.TextProduct.__init__(self) + + def _preProcessProduct(self, fcst, argDict): + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + + # + # First, generate WMO lines + # + + fcst = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n" + + # + # Next, add the non-segmented UGC data + # + + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas, cityDescriptor=self._cityDescriptor, + includeCities=self._includeCities, includeZoneNames = self._includeZoneNames, + includeIssueTime = self._includeIssueTime) + + fcst = fcst + areaHeader + "\n" + + # + # Last, add the product name/time lines + # + + try: + if self._eas == "None": + self._eas = "" + else: + self._eas = self._eas + "\n" + except: + self._eas = "" + + try: + source = self._source + '\n' + except: + source = "" + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, self._productName) + fcst = fcst + self._eas + productName + "\n" +\ + source +\ + "Relayed by National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + fcst = fcst + "...Administrative Message...\n\n" + fcst = fcst + "The following message is transmitted" + \ + " at the request of the " + self._source + "." + + if self._callToAction: + fcst = self._makeCallToAction(fcst, editArea, areaLabel, argDict) + + return fcst + + def _makeCallToAction(self, fcst, editArea, areaLabel, argDict): + ctaBodyPhrase = "" + if self._callToAction: + ctaBodyPhrase = "\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n" + \ + ctaBodyPhrase + \ + "|* Call to action goes here *|\n\n" + \ + "\n\n&&\n\n" + fcst = fcst + ctaBodyPhrase + return fcst + + def _postProcessProduct(self, fcst, argDict): + # + # Clean up multiple line feeds + # + + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + + + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg_EQR_MultiPil_Local.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg_EQR_MultiPil_Local.py index 7f582cd024..fd4f42e65d 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg_EQR_MultiPil_Local.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/CivilEmerg_EQR_MultiPil_Local.py @@ -1,129 +1,129 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# CivilEmerg_EQR_Local -# This product produces an earthquake report. -# -# Author: davis -# ---------------------------------------------------------------------------- -## -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 20, 2014 #3685 randerso Changed to support mixed case -# Oct 10, 2016 #5749 randerso Converted ellipses to commas -# -## - -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS 10-518 Directive for further information. -#------------------------------------------------------------------------- - -import CivilEmerg -import TextRules -import string, time, re, os, types, copy - -class TextProduct(CivilEmerg.TextProduct): - Definition = copy.deepcopy(CivilEmerg.TextProduct.Definition) - Definition["displayName"] = "CivilEmergency_EQR_ (Earthquake Report)" - # for Product Generation Menu - Definition["outputFile"] = "{prddir}/TEXT/EQR_.txt" - ## Edit Areas: Create Combinations file with edit area combinations. - Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display - Definition["defaultEditAreas"] = "Combinations_EQR_" - Definition["productName"] = "Earthquake Report" # product name - Definition["wmoID"] = "" # WMO code - Definition["pil"] = "" # product pil - Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. - Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - - Definition["mapNameForCombinations"] = "Zones_" - - VariableList = [ - (("Issuance Type", "issuanceType") , "Preliminary", "radio", - ["Preliminary", "Update"]), - (("Official Earthquake Info Source:", "eqInfo") , "Golden", "radio", - ["Golden", "WC/ATWC", "PTWC"]), - (("Felt:", "felt") , "weakly", "radio", - ["weakly", "moderately", "strongly", "very strongly"]), - (("How Many Reports:", "extent") , "a single person", "radio", - ["a single person", "a few people", "many people", "numerous persons"]), - (("Damage", "damage") , "No", "radio", - ["No", "Slight", "Moderate", "Considerable", "Extensive"]), - (("Damage Type", "damageType") , [], "check", - ["objects falling from shelves", "dishes rattled or broken", - "cracked chimneys", "communications towers fallen", - "collapsed bridges", "collapsed overpasses", "train rails bent", - "fissures have opened in the ground", "gas mains broken", - "complete destruction of structures", "some casualties"]), - ] - - def __init__(self): - CivilEmerg.TextProduct.__init__(self) - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - fcst = fcst + "An earthquake has been felt " + self._felt + " by " +\ - self._extent + " " + "in the |*enter area*| area. " + \ - self._damage + " damage has been reported. " - - if self._damage != "No": - fcst = fcst + " Damage reports so far: " - fcst = fcst + self.punctuateList(self._damageType) - fcst = fcst + "." - - fcst = fcst + "\n\n" - - if self._eqInfo == "Golden": - eqOffice = "National Earthquake Information Center in Golden Colorado" - elif self._eqInfo == "WC/ATWC": - eqOffice = "West Coast/Alaska Tsunami Warning Center" - else: - eqOffice = "Pacific Tsunami Warning Center" - - - if self._issuanceType == "Preliminary": - fcst = fcst + "Information released in this statement is preliminary. Updates, including Richter scale magnitude, will be provided as more information becomes available from the " + eqOffice + "." - else: - fcst = fcst + "The " + eqOffice + " has indicated that an earthquake magnitude *mag* on the Richter scale was centered at *lat*/*lon* or about *miles* *direction* of *city*, *state*.\n\nAny further information will be made available when it is received from the " + eqOffice + "." - - return fcst - - def _postProcessProduct(self, fcst, argDict): - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# CivilEmerg_EQR_Local +# This product produces an earthquake report. +# +# Author: davis +# ---------------------------------------------------------------------------- +## +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 20, 2014 #3685 randerso Changed to support mixed case +# Oct 10, 2016 #5749 randerso Converted ellipses to commas +# +## + +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS 10-518 Directive for further information. +#------------------------------------------------------------------------- + +import CivilEmerg +import TextRules +import string, time, re, os, types, copy + +class TextProduct(CivilEmerg.TextProduct): + Definition = copy.deepcopy(CivilEmerg.TextProduct.Definition) + Definition["displayName"] = "CivilEmergency_EQR_ (Earthquake Report)" + # for Product Generation Menu + Definition["outputFile"] = "{prddir}/TEXT/EQR_.txt" + ## Edit Areas: Create Combinations file with edit area combinations. + Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display + Definition["defaultEditAreas"] = "Combinations_EQR_" + Definition["productName"] = "Earthquake Report" # product name + Definition["wmoID"] = "" # WMO code + Definition["pil"] = "" # product pil + Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. + Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + + Definition["mapNameForCombinations"] = "Zones_" + + VariableList = [ + (("Issuance Type", "issuanceType") , "Preliminary", "radio", + ["Preliminary", "Update"]), + (("Official Earthquake Info Source:", "eqInfo") , "Golden", "radio", + ["Golden", "WC/ATWC", "PTWC"]), + (("Felt:", "felt") , "weakly", "radio", + ["weakly", "moderately", "strongly", "very strongly"]), + (("How Many Reports:", "extent") , "a single person", "radio", + ["a single person", "a few people", "many people", "numerous persons"]), + (("Damage", "damage") , "No", "radio", + ["No", "Slight", "Moderate", "Considerable", "Extensive"]), + (("Damage Type", "damageType") , [], "check", + ["objects falling from shelves", "dishes rattled or broken", + "cracked chimneys", "communications towers fallen", + "collapsed bridges", "collapsed overpasses", "train rails bent", + "fissures have opened in the ground", "gas mains broken", + "complete destruction of structures", "some casualties"]), + ] + + def __init__(self): + CivilEmerg.TextProduct.__init__(self) + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + fcst = fcst + "An earthquake has been felt " + self._felt + " by " +\ + self._extent + " " + "in the |*enter area*| area. " + \ + self._damage + " damage has been reported. " + + if self._damage != "No": + fcst = fcst + " Damage reports so far: " + fcst = fcst + self.punctuateList(self._damageType) + fcst = fcst + "." + + fcst = fcst + "\n\n" + + if self._eqInfo == "Golden": + eqOffice = "National Earthquake Information Center in Golden Colorado" + elif self._eqInfo == "WC/ATWC": + eqOffice = "West Coast/Alaska Tsunami Warning Center" + else: + eqOffice = "Pacific Tsunami Warning Center" + + + if self._issuanceType == "Preliminary": + fcst = fcst + "Information released in this statement is preliminary. Updates, including Richter scale magnitude, will be provided as more information becomes available from the " + eqOffice + "." + else: + fcst = fcst + "The " + eqOffice + " has indicated that an earthquake magnitude *mag* on the Richter scale was centered at *lat*/*lon* or about *miles* *direction* of *city*, *state*.\n\nAny further information will be made available when it is received from the " + eqOffice + "." + + return fcst + + def _postProcessProduct(self, fcst, argDict): + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWF.py index 0b3587ea26..881733c074 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWF.py @@ -1,1045 +1,1045 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This produces a narrative Fire Weather Forecast. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# FWF, FWF___Definition, FWF__Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined for the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# defaultEditAreas defines edit areas, default is Combinations -# areaDictionary Modify the AreaDictionary utility with UGC information -# about zones -# useRH If 1, use RH grids instead of MaxRH, MinRH -# summaryExtended -# summaryArea If summaryExtended == 1, then a summary extended forecast will -# be generated for the given summaryArea -# individualExtended If individualExtended == 1, an extended forecast will be -# generated for each individual area -# extendedLabel If extendedLabel== 1, a label will be included for each -# individual extended -# lightningPhrases Set this to 1 if you want Lightning Activity -# reported with phrases like "1-8 strikes", -# "9-15 strikes", etc. -# windAdjustmentFactor Winds are reported from the Wind20ft grid -# if available. Otherwise, the Wind grid is -# used with the magnitude multiplied -# by this wind adjustment factor. Winds reported -# by RAWS sites are frequently lower than ASOS winds -# due to the fact that they use a 10-min average. -# A common adjustment factor is 80% (0.80). -# If you want no adjustment to the winds -# then set this variable to 1.00. -# NOTE: This value can optionally be specified as an -# nlValue dictionary. -# -# tempLocalEffects Set to 1 to after defining edit areas AboveElev -# and BelowElev to enable local effects for temperature -# and RH -# windLocalEffects Set to 1 after defining edit areas Ridges and Valleys -# to enable local effects for wind -# -# lineLength defaults to 66, specifies length of lines in output -# -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# Multiple Element Table -# To include a MultipleElementTable (e.g. Temp Pop Values) -# for each area in the current Combination: -# Set "includeMultipleElementTable" to 1 -# Set the "elementList" and "singleValueFormat" flag if desired -# "elementList" may include "Temp", "PoP", and/or "Humidity" -# "singleValueFormat" lists only one value per element -# Make sure you are using a Combinations file -# Modify the CityDictionary TextUtility file or create a new one -# To include a MultipleElementTable (e.g. Temp Pop Values) -# for each period for each area in the current Combination: -# Set "includeMultipleElementTable_perPeriod" to 1 -# Set the "elementList" and -# Set the "singleValueFormat" flag to 1 -# "elementList" may include "Temp", "PoP", and/or "Humidity" -# Make sure you are using a Combinations file -# Modify the CityDictionary TextUtility file or create a new one -# -# includeMultipleElementTable -# includeMultipleElementTable_perPeriod -# elementList -# singleValueFormat -# cityDictionary -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# useHolidays Set to 1 to use holidays in the time period labels -# includeTrends Set to 1 to include Temp and RH trends -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Hazards (optional): If provided, headlines will be generated. -# Sky, LAL, Wind (6 hourly), MaxRH, MinRH (out 2 days), PoP, MaxT, -# MinT, T, Wx (out to 7 days) -# Optional: -# Ttrend, RHtrend, Haines, TransWind, MixHgt, VentRate, CWR -# MarineLayer: If used, uncomment MarineLayer lines in: -# getFirePeriod_analysisList -# getFirePeriod_phraseList -#------------------------------------------------------------------------- -# Edit Areas Needed: -# Optional: -# For local effects: AboveElev, BelowElev -# NOTE: Set the Definition["tempLocalEffects"] = 1 in the Site Definition File -# Define edit areas with an appropriate elevation -# (e.g. in CO, it is 11000 for timberline). -# This will be used to report local effects for temperature and RH. -# For Valley/Ridge Winds: Valleys, Ridges -# NOTE: Set the Definition["windLocalEffects"] = 1 in the Site Definition File -# Define edit areas for Valleys and Ridges to be intersected with the current -# area for reporting as separate Valley and Ridge winds. -# Be sure and set up the list of areas for which you want separate -# Valley and Ridge winds in the "ridgeValleyAreas" list in the Local file. -# IF you do NOT want to distinguish between Valley and Ridge winds, -# you can comment out these edit area entries in the "intersectArea" section -# of the Local file. -# Fire Area for the extended summary. This defaults to your CWA, but you -# can change it in the Definition overrides if you like. -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# FirePeriod (component) -# ExtendedLabel(component) -# FireExtendedShortTerm (component) -# FireExtended (component) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# Modify the _tempLocalEffects and _rhLocalEffects " above timberline " -# descriptor (in the FWF_Local file) to appropriate wording for your area. -# -# COMMON OVERRIDES -# from FWF -# getFirePeriod_intersectAreas -# _tempLocalEffects_list -# _rhLocalEffects_list -# from ConfigVariables: -# units_descriptor_dict -# untilPhrasing_flag_dict -# from FirePhrases: -# includeSkyRanges_flag -# ridgeValleyAreas -# from WxPhrases: -# pop_wx_lower_threshold -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Fire Weather Services. -#------------------------------------------------------------------------- - - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, types -import TimeRange - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [] - Definition = { - "type": "smart", - "displayName": "None", - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/FWF_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "FireWxZones_", - - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_FWF__", - "editAreaSuffix": None, - - # product identifiers - "productName": "Fire Weather Planning Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - "periodCombining" : 0, # If 1, combine periods, if possible - "lineLength" : 66, # line length - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - # Product-specific variables: - "lightningPhrases": 0, # Set to 1 to report lightning as phrases (e.g. 1-8 STRIKES) - "windAdjustmentFactor": 0.80, # Adjustment for Wind if no Wind20ft grid available - - # To include a MultipleElementTable (e.g. Temp Pop Table) - # for each area in the current Combination: - # Set "includeMultipleElement" to 1 - # Set the "elementList" and "singleValueFormat" flag if desired - # "elementList" may include "Temp", "PoP", and/or "Humidity" - # "singleValueFormat" lists only one value per element - # Make sure you are using a Combinations file - # Modify the CityDictionary TextUtility file or create a new one - "includeMultipleElementTable": 0, - "includeMultipleElementTable_perPeriod": 0, - "elementList" : ["Temp", "Humidity", "PoP"], - "singleValueFormat": 0, - "cityDictionary": "CityDictionary", - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - # Language - "language": "english", - - "useRH": 0, # Use RH grids instead of MaxRH, MinRH - # If summaryExtended == 1, then a summary extended forecast will - # be generated for the given summaryArea - "summaryExtended": 1, - "summaryArea":"FireWxAOR__", - # If individualExtended == 1, an extended forecast will be - # generated for each individual area - # If extendedLabel == 1, a label will be included for each - # individual extended - "individualExtended": 0, - "extendedLabel": 0, - - # Weather-related flags - "hoursSChcEnds": 24, - - "useHolidays": 1, # Set to 1 to use holidays in the time period labels - "includeTrends": 1, # Set to 1 to include Temp and RH trends - "tempLocalEffects": 0, # Set to 1 to enable Temp and RH local effects AFTER - # creating AboveElev and BelowElev edit areas - "windLocalEffects": 0, # Set to 1 to enable wind local effects AFTER - # creating Ridges and Valleys edit areas - "fixedExpire": 1, #ensure VTEC actions don't affect expiration time - - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - # Uncomment any combinations you wish to collapse. - # For example, if the first entry is uncommented, - # the phrase: scattered rain showers and widespread rain - # will collapse to: scattered rain showers. - def wxCombinations(self): - return [ - ("RW", "R"), - ("SW", "S"), - ## ("T","RW"), - ] - - def minMax_std_deviation(self, parmHisto, timeRange, componentName): - # Replaces MINMAX_STD_DEVIATION - # Number of standard deviations to compute around the weighted - # average for a stdDev_MinMax - return 1.4 - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Wind"] = "mph" - dict["Wind20ft"] = "mph" - dict["TransWind"] = "mph" - dict["FreeWind"] = "mph" - dict["WindGust"] = "mph" - return dict - - ############################# - # Overrides to take care of Wind in the Extended forecast - # Use Wind20ft if available, else use adjusted Wind - # - def adjust_method_dict(self, tree, node): - # Special adjustment methods - # - return { - "Wind": self._adjustWind, - } - - def _adjustWind(self, value): - # adjustment for winds - factor = self.nlValue(self._windAdjustmentFactor, value) - value = value * factor - return value - - def wind_summary_words(self, tree, node): - # See if there's data for Wind20ft - elementName = "Wind" - args = node.get("args") - if args is not None: - elementName = args[0] - if elementName == "Wind": - elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) - words = self.vector_summary(tree, node, elementName) - return self.setWords(node, words) - - def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): - args = node.get("args") - if args is not None: - element = args[0] - if element == "Wind": - # See if there's data for Wind20ft - element = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) - wind = self.ElementInfo(element, "List", self.VECTOR()) - elementInfoList = [wind] - if gustFlag: - windGust = self.ElementInfo( - "WindGust", "Max", phraseDef=self.gust_phrase) - elementInfoList.append(windGust) - node.set("gustFlag", 1) - if connectorMethod is None: - connectorMethod = self.vectorConnector - self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) - return self.DONE() - - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - # If not extended, make descriptor empty - componentName = node.getComponent().get("name") - if componentName == "FirePeriod": - dict["Wind"] = "" - dict["Wind20ft"] = "" - return dict - - def nextDay24HourLabel_flag(self, tree, node): - # Return 1 to have the TimeDescriptor module label 24 hour periods starting - # after 1600 as the next day. - # This is needed for the Fire Weather Extended product, - # but not for other products when period combining. - return 1 - - def untilPhrasing_flag_dict(self, tree, node): - # If set to 1, "until" time descriptor phrasing will be used. - # E.g. "North winds 20 MPH until 10 AM, then 35 MPH" - # - # NOTE: Be sure to increase the temporal resolution by - # overriding "getFirePeriod_analysisList" from the FWF standard file. - # E.g. ("MixHgt", self.minMax, [0]), - - dict = TextRules.TextRules.untilPhrasing_flag_dict(self, tree, node) - dict["LAL"] = 1 - componentName = node.getComponent().get("name") - if componentName == "FirePeriod": - dict["Sky"] = 1 - dict["Wx"] = 1 - return dict - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - #return self.maxMode - #return self.maximum - return self.stdDevMaxAvg - - def FirePeriod(self): - phraseList = self.getFirePeriod_phraseList() - analysisList = self.getFirePeriod_analysisList() - intersectAreas = self.getFirePeriod_intersectAreas() - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assembleIndentedPhrases, - ], - "analysisList": analysisList, - "phraseList": phraseList, - "intersectAreas": intersectAreas, - } - - def ExtraSampling(self): - analysisList = [ - ("MaxT", self.mode, [0]), - ("MinT", self.mode, [0]), - ] - if self._useRH: - analysisList += [("RH", self.mode, [0])] - else: - analysisList += [("MaxRH", self.mode, [0]), - ("MinRH", self.mode, [0])] - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": analysisList, - "phraseList": [], - "intersectAreas": self.getFirePeriod_intersectAreas(), - } - - def getFirePeriod_analysisList(self): - analysisList = [ - ("Sky", self.minMax, [0]), - ("PoP", self._PoP_analysisMethod("FirePeriod"), [3]), - ("PoP", self.binnedPercent, [3]), - ("Wx", self.rankedWx, [0]), - ("LAL", self.maximum, [0]), - ("LAL", self.binnedPercent, [0]), - ("MaxT", self.minMax), - ("MinT", self.minMax), - ("T", self.minMax), - ("MaxRH", self.minMax), - ("MinRH", self.minMax), - ("RH", self.minMax), - ("MaxT", self.mode), # for trends - ("MinT", self.mode), # for trends - ("MaxRH", self.mode), # for trends - ("MinRH", self.mode), # for trends - ("RH", self.mode), # for trends - ("Ttrend", self.avg), # for trends - ("RHtrend", self.avg), # for trends - ("Wind", self.vectorAvg, [6]), - ("Wind20ft", self.vectorAvg, [6]), - #("RidgeWind", self.vectorAvg, [6]), - ("Haines", self.avg), - ("TransWind", self.vectorAvg, [6]), - ("FreeWind", self.vectorAvg, [6]), - ("MixHgt", self.minMax), - ("VentRate", self.minMax), - ("CWR", self.avg), - #("MarineLayer", self.minMax), - ] - return analysisList - - def getFirePeriod_phraseList(self): - if self._useRH: - dayRH = "RH" - nightRH = "RH" - else: - dayRH = "MinRH" - nightRH = "MaxRH" - phraseList = [ - self.skyWeather_byTimeRange_compoundPhrase, - self.lal_phrase, - (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1], - self._tempLocalEffects_list()), - (self.trend_DayOrNight_phrase, ["MaxT", "MinT", "Ttrend", 1, 1], - self._tempTrendLocalEffects_list()), - (self.dayOrNight_phrase, [dayRH, nightRH, 1, 1], self._rhLocalEffects_list()), - (self.trend_DayOrNight_phrase, [dayRH, nightRH, "RHtrend", 1, 1], - self._rhTrendLocalEffects_list()), - self.humidityRecovery_phrase, - self.fireWind_compoundPhrase, - self.fireWind_label_phrase, - self.fireValleyWind_compoundPhrase, - self.fireRidgeWind_compoundPhrase, - self.haines_phrase, - self.smokeDispersal_phrase, - self.mixingHgt_phrase, - self.transportWind_phrase, - #self.freeWind_phrase, - self.cwr_phrase, - #self.marineLayer_phrase, - ] - # Remove trend methods - if self._includeTrends != 1: - newList = [] - for phrase in phraseList: - if type(phrase) is types.TupleType: - phraseMethod = phrase[0] - if phraseMethod == self.trend_DayOrNight_phrase: - continue - newList.append(phrase) - phraseList = newList - # Add multipleElementTable - if self._includeMultipleElementTable_perPeriod: - phraseList.append(self.multipleElementTable_perPeriod_phrase) - return phraseList - - def getFirePeriod_intersectAreas(self): - tempList = [] - windList = [] - if self._tempLocalEffects: - tempList = [ - ("MinT", ["BelowElev", "AboveElev"]), - ("MaxT", ["BelowElev", "AboveElev"]), - ("MinRH", ["BelowElev", "AboveElev"]), - ("MaxRH", ["BelowElev", "AboveElev"]), - ("RH", ["BelowElev", "AboveElev"]), - ("Ttrend", ["BelowElev", "AboveElev"]), - ("RHtrend", ["BelowElev", "AboveElev"]), - ] - if self._windLocalEffects: - windList = [ - ("Wind", ["Valleys", "Ridges"]), - ("Wind20ft", ["Valleys", "Ridges"]), - ("WindGust", ["Valleys", "Ridges"]), - ] - return tempList + windList - - def _tempLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", " above timberline") - return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] - - def _rhLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", " above timberline") - return [self.LocalEffect([leArea1, leArea2], self._rhTrigger, ", except ")] - - def _tempTrendLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "above timberline") - return [self.LocalEffect([leArea1, leArea2], self._trendTrigger, ", except ")] - - def _rhTrendLocalEffects_list(self): - leArea1 = self.LocalEffectArea("BelowElev", "") - leArea2 = self.LocalEffectArea("AboveElev", "above timberline") - return [self.LocalEffect([leArea1, leArea2], self._trendTrigger, ", except ")] - - def _rhTrigger(self, tree, node, localEffect, leArea1Label, leArea2Label): - first = node.getAncestor("firstElement") - element = first.name - dataType = first.dataType - timeRange = node.getTimeRange() - mergeMethod="MinMax" - if element == "RH": - day = self.getPeriod(timeRange, 1) - if day: - mergeMethod="Min" - else: - mergeMethod="Max" - - area1Stats = tree.stats.get(element, timeRange, leArea1Label, - mergeMethod=mergeMethod) - area2Stats = tree.stats.get(element, timeRange, leArea2Label, - mergeMethod=mergeMethod) - #print "\nLocal effects", element, timeRange - #print leArea1Label, area1Stats - #print leArea2Label, area2Stats - if area1Stats is None or area2Stats is None: - return 0 - flag = self.checkLocalEffectDifference( - tree, node, dataType, 8, area1Stats, area2Stats, - leArea1Label, leArea2Label) - return flag - - def _trendTrigger(self, tree, node, localEffect, leArea1Label, leArea2Label): - #print "*** Inside _trendTrigger ***" - first = node.getAncestor("firstElement") - element = first.name - #print "element", element - dataType = first.dataType - timeRange = node.getTimeRange() - - if element.find("T") >= 0: - trendElement = "Ttrend" - else: - trendElement="RHtrend" - - # trend stats - area1AbsDiff, area1RawDiff = self.getTrendStats( - tree, node, element, timeRange, leArea1Label, trendElement) - area2AbsDiff, area2RawDiff = self.getTrendStats( - tree, node, element, timeRange, leArea2Label, trendElement) - - #print "\nTrend Local effects", element, timeRange - #print "\t\tBelow", area1RawDiff - #print "\t\tAbove", area2RawDiff - - if area1AbsDiff is None or area2AbsDiff is None: - return 0 - - # Use rawDiff because sign is important e.g. warmer vs. cooler - flag = self.checkLocalEffectDifference( - tree, node, dataType, 4, area1RawDiff, area2RawDiff, - leArea1Label, leArea2Label) - return flag - - def skyPopWx_excludePoP_flag(self, tree, node): - # If set to 1, PoP will not be included in the skyPopWx_phrase - return 1 - - def ExtendedLabel(self): - return { - "type": "component", - "methodList": [self.setLabel], - "analysisList": [], - "phraseList":[], - } - def setLabel(self, tree, component): - component.set("words", "\n.Forecast days 3 through 7......\n") - return self.DONE() - - def FireExtendedShortTerm(self): - return { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ("T", self.hourlyTemp), - ("Sky", self.avg, [12]), - ("Wind", self.vectorAvg), - ("Wind20ft", self.vectorAvg), - ("Wx", self.rankedWx, [6]), - ("PoP", self._PoP_analysisMethod("FireExtendedShortTerm"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.reportTrends, - self.wind_summary, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.lows_phrase, - self.highs_phrase, - self.wind_phrase, - ], - } - - def FireExtended(self): - return { - "type": "component", - "methodList": [ - self.orderPhrases, - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ("T", self.hourlyTemp), - ("Sky", self.avg, [12]), - ("Wx", self.rankedWx, [6]), - ("PoP", self._PoP_analysisMethod("FireExtended"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.reportTrends, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.lows_phrase, - self.highs_phrase, - ], - } - - def ExtendedNarrative(self): - return { - "type": "narrative", - "methodList": [self.assembleChildWords], - ## Components - "narrativeDef": [ - ("FireExtendedShortTerm",24),("FireExtendedShortTerm",24), - ("FireExtendedShortTerm",24), - ("FireExtended",24),("FireExtended",24) - ], - } - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict), argDict["creationTime"]) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - # Determine the extended range - if self._individualExtended == 1: - self._extendedStart = self._timeRange.endTime() - 24*5*3600 - else: - self._extendedStart = self._timeRange.endTime() - self._extendedRange = TimeRange.TimeRange( - self._extendedStart, self._extendedStart + 3600) - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - # Product header - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # Put in a place holder for the headlines to be substituted in - # "postProcessProduct" - fcst = fcst + "" - self._prodHeadlines = [] - - fcst = fcst + ".DISCUSSION..." + "\n\n\n\n\n" - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - - self._addHeadlines(headlines) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcst = fcst + self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - if self._includeMultipleElementTable == 1: - fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - fcst = fcst + "\n$$\n\n" - return fcst - - def _postProcessProduct(self, fcst, argDict): - # Add one extended - if self._summaryExtended == 1: - fcst = fcst + "\n.FORECAST DAYS 3 THROUGH 7...\n\n" - extended = self.generateProduct("ExtendedNarrative", - argDict, area=self._summaryArea, - timeRange=self._extendedRange) - fcst = fcst + extended - fcst = fcst + "\n.OUTLOOK\n\n\n$$\n" - - # Make summary headline string and substitute for "" placeholder - headlineStr = "" - for h in self._prodHeadlines: - headlineStr = headlineStr + "..." + h + "...\n" - if len(self._prodHeadlines): - headlineStr = headlineStr + "\n" - fcst = fcst.replace("", headlineStr) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _addHeadlines(self, headlines): - # Add the headlines to the list of product headlines - headlines = headlines.split("...") - for headline in headlines: - if len(headline) == 0 or headline[0] == '\n': - continue - if headline not in self._prodHeadlines: - self._prodHeadlines.append(headline) - - def _issuance_list(self, argDict): - narrativeDefAM = [ - ("FirePeriod", "period1"), - ("FirePeriod", 12), ("FirePeriod", 12), - ("Custom", ("ExtraSampling", (-24, 12))), - ] - narrativeDefPM = [ - ("FirePeriod", "period1"), - ("FirePeriod", 12), ("FirePeriod", 12), ("FirePeriod", 12), - ("Custom", ("ExtraSampling", (-24, 24))), - ] - extended = [ - ("FireExtendedShortTerm", 24), ("FireExtendedShortTerm", 24), - ("FireExtendedShortTerm", 24), - ("FireExtended", 24), ("FireExtended", 24), - ] - try: - if self._individualExtended == 1: - if self._extendedLabel == 1: - narrativeDefAM.append(("ExtendedLabel",0)) - narrativeDefPM.append(("ExtendedLabel",0)) - narrativeDefAM = narrativeDefAM + extended - narrativeDefPM = narrativeDefPM + extended - except: - pass - - return [ - ("Morning", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, - ".REST OF TONIGHT...", "late in the night","early in the evening", - 1, narrativeDefPM), - # For the early morning update, this produces: - # Rest of Tonight: - # MONDAY - # MONDAY NIGHT - ("Early Morning Update", "issuanceHour", self.DAY(), 4, - ".REST OF TONIGHT...", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - # Alternative - # For the early morning update, this produces: - # EARLY THIS MORNING: - # Today - # Tonight - #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, - # ".REST OF TONIGHT...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - #("Early Morning Update", "issuanceHour", self.DAY(), 4, - # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", - # 1, narrativeDefPM), - ] - - def lateDay_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING - ('FW.A', allActions, 'FireWx'), # FIRE WEATHER WATCH - ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING - ('WI.Y', allActions, 'Wind'), # WIND ADVISORY - ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH - ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING - ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY - ] - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This produces a narrative Fire Weather Forecast. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# FWF, FWF___Definition, FWF__Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined for the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# defaultEditAreas defines edit areas, default is Combinations +# areaDictionary Modify the AreaDictionary utility with UGC information +# about zones +# useRH If 1, use RH grids instead of MaxRH, MinRH +# summaryExtended +# summaryArea If summaryExtended == 1, then a summary extended forecast will +# be generated for the given summaryArea +# individualExtended If individualExtended == 1, an extended forecast will be +# generated for each individual area +# extendedLabel If extendedLabel== 1, a label will be included for each +# individual extended +# lightningPhrases Set this to 1 if you want Lightning Activity +# reported with phrases like "1-8 strikes", +# "9-15 strikes", etc. +# windAdjustmentFactor Winds are reported from the Wind20ft grid +# if available. Otherwise, the Wind grid is +# used with the magnitude multiplied +# by this wind adjustment factor. Winds reported +# by RAWS sites are frequently lower than ASOS winds +# due to the fact that they use a 10-min average. +# A common adjustment factor is 80% (0.80). +# If you want no adjustment to the winds +# then set this variable to 1.00. +# NOTE: This value can optionally be specified as an +# nlValue dictionary. +# +# tempLocalEffects Set to 1 to after defining edit areas AboveElev +# and BelowElev to enable local effects for temperature +# and RH +# windLocalEffects Set to 1 after defining edit areas Ridges and Valleys +# to enable local effects for wind +# +# lineLength defaults to 66, specifies length of lines in output +# +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# Multiple Element Table +# To include a MultipleElementTable (e.g. Temp Pop Values) +# for each area in the current Combination: +# Set "includeMultipleElementTable" to 1 +# Set the "elementList" and "singleValueFormat" flag if desired +# "elementList" may include "Temp", "PoP", and/or "Humidity" +# "singleValueFormat" lists only one value per element +# Make sure you are using a Combinations file +# Modify the CityDictionary TextUtility file or create a new one +# To include a MultipleElementTable (e.g. Temp Pop Values) +# for each period for each area in the current Combination: +# Set "includeMultipleElementTable_perPeriod" to 1 +# Set the "elementList" and +# Set the "singleValueFormat" flag to 1 +# "elementList" may include "Temp", "PoP", and/or "Humidity" +# Make sure you are using a Combinations file +# Modify the CityDictionary TextUtility file or create a new one +# +# includeMultipleElementTable +# includeMultipleElementTable_perPeriod +# elementList +# singleValueFormat +# cityDictionary +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# useHolidays Set to 1 to use holidays in the time period labels +# includeTrends Set to 1 to include Temp and RH trends +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Hazards (optional): If provided, headlines will be generated. +# Sky, LAL, Wind (6 hourly), MaxRH, MinRH (out 2 days), PoP, MaxT, +# MinT, T, Wx (out to 7 days) +# Optional: +# Ttrend, RHtrend, Haines, TransWind, MixHgt, VentRate, CWR +# MarineLayer: If used, uncomment MarineLayer lines in: +# getFirePeriod_analysisList +# getFirePeriod_phraseList +#------------------------------------------------------------------------- +# Edit Areas Needed: +# Optional: +# For local effects: AboveElev, BelowElev +# NOTE: Set the Definition["tempLocalEffects"] = 1 in the Site Definition File +# Define edit areas with an appropriate elevation +# (e.g. in CO, it is 11000 for timberline). +# This will be used to report local effects for temperature and RH. +# For Valley/Ridge Winds: Valleys, Ridges +# NOTE: Set the Definition["windLocalEffects"] = 1 in the Site Definition File +# Define edit areas for Valleys and Ridges to be intersected with the current +# area for reporting as separate Valley and Ridge winds. +# Be sure and set up the list of areas for which you want separate +# Valley and Ridge winds in the "ridgeValleyAreas" list in the Local file. +# IF you do NOT want to distinguish between Valley and Ridge winds, +# you can comment out these edit area entries in the "intersectArea" section +# of the Local file. +# Fire Area for the extended summary. This defaults to your CWA, but you +# can change it in the Definition overrides if you like. +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# FirePeriod (component) +# ExtendedLabel(component) +# FireExtendedShortTerm (component) +# FireExtended (component) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# Modify the _tempLocalEffects and _rhLocalEffects " above timberline " +# descriptor (in the FWF_Local file) to appropriate wording for your area. +# +# COMMON OVERRIDES +# from FWF +# getFirePeriod_intersectAreas +# _tempLocalEffects_list +# _rhLocalEffects_list +# from ConfigVariables: +# units_descriptor_dict +# untilPhrasing_flag_dict +# from FirePhrases: +# includeSkyRanges_flag +# ridgeValleyAreas +# from WxPhrases: +# pop_wx_lower_threshold +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Fire Weather Services. +#------------------------------------------------------------------------- + + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, types +import TimeRange + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [] + Definition = { + "type": "smart", + "displayName": "None", + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/FWF_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "FireWxZones_", + + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_FWF__", + "editAreaSuffix": None, + + # product identifiers + "productName": "Fire Weather Planning Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + "periodCombining" : 0, # If 1, combine periods, if possible + "lineLength" : 66, # line length + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + # Product-specific variables: + "lightningPhrases": 0, # Set to 1 to report lightning as phrases (e.g. 1-8 STRIKES) + "windAdjustmentFactor": 0.80, # Adjustment for Wind if no Wind20ft grid available + + # To include a MultipleElementTable (e.g. Temp Pop Table) + # for each area in the current Combination: + # Set "includeMultipleElement" to 1 + # Set the "elementList" and "singleValueFormat" flag if desired + # "elementList" may include "Temp", "PoP", and/or "Humidity" + # "singleValueFormat" lists only one value per element + # Make sure you are using a Combinations file + # Modify the CityDictionary TextUtility file or create a new one + "includeMultipleElementTable": 0, + "includeMultipleElementTable_perPeriod": 0, + "elementList" : ["Temp", "Humidity", "PoP"], + "singleValueFormat": 0, + "cityDictionary": "CityDictionary", + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + # Language + "language": "english", + + "useRH": 0, # Use RH grids instead of MaxRH, MinRH + # If summaryExtended == 1, then a summary extended forecast will + # be generated for the given summaryArea + "summaryExtended": 1, + "summaryArea":"FireWxAOR__", + # If individualExtended == 1, an extended forecast will be + # generated for each individual area + # If extendedLabel == 1, a label will be included for each + # individual extended + "individualExtended": 0, + "extendedLabel": 0, + + # Weather-related flags + "hoursSChcEnds": 24, + + "useHolidays": 1, # Set to 1 to use holidays in the time period labels + "includeTrends": 1, # Set to 1 to include Temp and RH trends + "tempLocalEffects": 0, # Set to 1 to enable Temp and RH local effects AFTER + # creating AboveElev and BelowElev edit areas + "windLocalEffects": 0, # Set to 1 to enable wind local effects AFTER + # creating Ridges and Valleys edit areas + "fixedExpire": 1, #ensure VTEC actions don't affect expiration time + + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + # Uncomment any combinations you wish to collapse. + # For example, if the first entry is uncommented, + # the phrase: scattered rain showers and widespread rain + # will collapse to: scattered rain showers. + def wxCombinations(self): + return [ + ("RW", "R"), + ("SW", "S"), + ## ("T","RW"), + ] + + def minMax_std_deviation(self, parmHisto, timeRange, componentName): + # Replaces MINMAX_STD_DEVIATION + # Number of standard deviations to compute around the weighted + # average for a stdDev_MinMax + return 1.4 + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Wind"] = "mph" + dict["Wind20ft"] = "mph" + dict["TransWind"] = "mph" + dict["FreeWind"] = "mph" + dict["WindGust"] = "mph" + return dict + + ############################# + # Overrides to take care of Wind in the Extended forecast + # Use Wind20ft if available, else use adjusted Wind + # + def adjust_method_dict(self, tree, node): + # Special adjustment methods + # + return { + "Wind": self._adjustWind, + } + + def _adjustWind(self, value): + # adjustment for winds + factor = self.nlValue(self._windAdjustmentFactor, value) + value = value * factor + return value + + def wind_summary_words(self, tree, node): + # See if there's data for Wind20ft + elementName = "Wind" + args = node.get("args") + if args is not None: + elementName = args[0] + if elementName == "Wind": + elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) + words = self.vector_summary(tree, node, elementName) + return self.setWords(node, words) + + def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): + args = node.get("args") + if args is not None: + element = args[0] + if element == "Wind": + # See if there's data for Wind20ft + element = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) + wind = self.ElementInfo(element, "List", self.VECTOR()) + elementInfoList = [wind] + if gustFlag: + windGust = self.ElementInfo( + "WindGust", "Max", phraseDef=self.gust_phrase) + elementInfoList.append(windGust) + node.set("gustFlag", 1) + if connectorMethod is None: + connectorMethod = self.vectorConnector + self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) + return self.DONE() + + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + # If not extended, make descriptor empty + componentName = node.getComponent().get("name") + if componentName == "FirePeriod": + dict["Wind"] = "" + dict["Wind20ft"] = "" + return dict + + def nextDay24HourLabel_flag(self, tree, node): + # Return 1 to have the TimeDescriptor module label 24 hour periods starting + # after 1600 as the next day. + # This is needed for the Fire Weather Extended product, + # but not for other products when period combining. + return 1 + + def untilPhrasing_flag_dict(self, tree, node): + # If set to 1, "until" time descriptor phrasing will be used. + # E.g. "North winds 20 MPH until 10 AM, then 35 MPH" + # + # NOTE: Be sure to increase the temporal resolution by + # overriding "getFirePeriod_analysisList" from the FWF standard file. + # E.g. ("MixHgt", self.minMax, [0]), + + dict = TextRules.TextRules.untilPhrasing_flag_dict(self, tree, node) + dict["LAL"] = 1 + componentName = node.getComponent().get("name") + if componentName == "FirePeriod": + dict["Sky"] = 1 + dict["Wx"] = 1 + return dict + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + #return self.maxMode + #return self.maximum + return self.stdDevMaxAvg + + def FirePeriod(self): + phraseList = self.getFirePeriod_phraseList() + analysisList = self.getFirePeriod_analysisList() + intersectAreas = self.getFirePeriod_intersectAreas() + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assembleIndentedPhrases, + ], + "analysisList": analysisList, + "phraseList": phraseList, + "intersectAreas": intersectAreas, + } + + def ExtraSampling(self): + analysisList = [ + ("MaxT", self.mode, [0]), + ("MinT", self.mode, [0]), + ] + if self._useRH: + analysisList += [("RH", self.mode, [0])] + else: + analysisList += [("MaxRH", self.mode, [0]), + ("MinRH", self.mode, [0])] + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": analysisList, + "phraseList": [], + "intersectAreas": self.getFirePeriod_intersectAreas(), + } + + def getFirePeriod_analysisList(self): + analysisList = [ + ("Sky", self.minMax, [0]), + ("PoP", self._PoP_analysisMethod("FirePeriod"), [3]), + ("PoP", self.binnedPercent, [3]), + ("Wx", self.rankedWx, [0]), + ("LAL", self.maximum, [0]), + ("LAL", self.binnedPercent, [0]), + ("MaxT", self.minMax), + ("MinT", self.minMax), + ("T", self.minMax), + ("MaxRH", self.minMax), + ("MinRH", self.minMax), + ("RH", self.minMax), + ("MaxT", self.mode), # for trends + ("MinT", self.mode), # for trends + ("MaxRH", self.mode), # for trends + ("MinRH", self.mode), # for trends + ("RH", self.mode), # for trends + ("Ttrend", self.avg), # for trends + ("RHtrend", self.avg), # for trends + ("Wind", self.vectorAvg, [6]), + ("Wind20ft", self.vectorAvg, [6]), + #("RidgeWind", self.vectorAvg, [6]), + ("Haines", self.avg), + ("TransWind", self.vectorAvg, [6]), + ("FreeWind", self.vectorAvg, [6]), + ("MixHgt", self.minMax), + ("VentRate", self.minMax), + ("CWR", self.avg), + #("MarineLayer", self.minMax), + ] + return analysisList + + def getFirePeriod_phraseList(self): + if self._useRH: + dayRH = "RH" + nightRH = "RH" + else: + dayRH = "MinRH" + nightRH = "MaxRH" + phraseList = [ + self.skyWeather_byTimeRange_compoundPhrase, + self.lal_phrase, + (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1], + self._tempLocalEffects_list()), + (self.trend_DayOrNight_phrase, ["MaxT", "MinT", "Ttrend", 1, 1], + self._tempTrendLocalEffects_list()), + (self.dayOrNight_phrase, [dayRH, nightRH, 1, 1], self._rhLocalEffects_list()), + (self.trend_DayOrNight_phrase, [dayRH, nightRH, "RHtrend", 1, 1], + self._rhTrendLocalEffects_list()), + self.humidityRecovery_phrase, + self.fireWind_compoundPhrase, + self.fireWind_label_phrase, + self.fireValleyWind_compoundPhrase, + self.fireRidgeWind_compoundPhrase, + self.haines_phrase, + self.smokeDispersal_phrase, + self.mixingHgt_phrase, + self.transportWind_phrase, + #self.freeWind_phrase, + self.cwr_phrase, + #self.marineLayer_phrase, + ] + # Remove trend methods + if self._includeTrends != 1: + newList = [] + for phrase in phraseList: + if type(phrase) is tuple: + phraseMethod = phrase[0] + if phraseMethod == self.trend_DayOrNight_phrase: + continue + newList.append(phrase) + phraseList = newList + # Add multipleElementTable + if self._includeMultipleElementTable_perPeriod: + phraseList.append(self.multipleElementTable_perPeriod_phrase) + return phraseList + + def getFirePeriod_intersectAreas(self): + tempList = [] + windList = [] + if self._tempLocalEffects: + tempList = [ + ("MinT", ["BelowElev", "AboveElev"]), + ("MaxT", ["BelowElev", "AboveElev"]), + ("MinRH", ["BelowElev", "AboveElev"]), + ("MaxRH", ["BelowElev", "AboveElev"]), + ("RH", ["BelowElev", "AboveElev"]), + ("Ttrend", ["BelowElev", "AboveElev"]), + ("RHtrend", ["BelowElev", "AboveElev"]), + ] + if self._windLocalEffects: + windList = [ + ("Wind", ["Valleys", "Ridges"]), + ("Wind20ft", ["Valleys", "Ridges"]), + ("WindGust", ["Valleys", "Ridges"]), + ] + return tempList + windList + + def _tempLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", " above timberline") + return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] + + def _rhLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", " above timberline") + return [self.LocalEffect([leArea1, leArea2], self._rhTrigger, ", except ")] + + def _tempTrendLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "above timberline") + return [self.LocalEffect([leArea1, leArea2], self._trendTrigger, ", except ")] + + def _rhTrendLocalEffects_list(self): + leArea1 = self.LocalEffectArea("BelowElev", "") + leArea2 = self.LocalEffectArea("AboveElev", "above timberline") + return [self.LocalEffect([leArea1, leArea2], self._trendTrigger, ", except ")] + + def _rhTrigger(self, tree, node, localEffect, leArea1Label, leArea2Label): + first = node.getAncestor("firstElement") + element = first.name + dataType = first.dataType + timeRange = node.getTimeRange() + mergeMethod="MinMax" + if element == "RH": + day = self.getPeriod(timeRange, 1) + if day: + mergeMethod="Min" + else: + mergeMethod="Max" + + area1Stats = tree.stats.get(element, timeRange, leArea1Label, + mergeMethod=mergeMethod) + area2Stats = tree.stats.get(element, timeRange, leArea2Label, + mergeMethod=mergeMethod) + #print "\nLocal effects", element, timeRange + #print leArea1Label, area1Stats + #print leArea2Label, area2Stats + if area1Stats is None or area2Stats is None: + return 0 + flag = self.checkLocalEffectDifference( + tree, node, dataType, 8, area1Stats, area2Stats, + leArea1Label, leArea2Label) + return flag + + def _trendTrigger(self, tree, node, localEffect, leArea1Label, leArea2Label): + #print "*** Inside _trendTrigger ***" + first = node.getAncestor("firstElement") + element = first.name + #print "element", element + dataType = first.dataType + timeRange = node.getTimeRange() + + if element.find("T") >= 0: + trendElement = "Ttrend" + else: + trendElement="RHtrend" + + # trend stats + area1AbsDiff, area1RawDiff = self.getTrendStats( + tree, node, element, timeRange, leArea1Label, trendElement) + area2AbsDiff, area2RawDiff = self.getTrendStats( + tree, node, element, timeRange, leArea2Label, trendElement) + + #print "\nTrend Local effects", element, timeRange + #print "\t\tBelow", area1RawDiff + #print "\t\tAbove", area2RawDiff + + if area1AbsDiff is None or area2AbsDiff is None: + return 0 + + # Use rawDiff because sign is important e.g. warmer vs. cooler + flag = self.checkLocalEffectDifference( + tree, node, dataType, 4, area1RawDiff, area2RawDiff, + leArea1Label, leArea2Label) + return flag + + def skyPopWx_excludePoP_flag(self, tree, node): + # If set to 1, PoP will not be included in the skyPopWx_phrase + return 1 + + def ExtendedLabel(self): + return { + "type": "component", + "methodList": [self.setLabel], + "analysisList": [], + "phraseList":[], + } + def setLabel(self, tree, component): + component.set("words", "\n.Forecast days 3 through 7......\n") + return self.DONE() + + def FireExtendedShortTerm(self): + return { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ("T", self.hourlyTemp), + ("Sky", self.avg, [12]), + ("Wind", self.vectorAvg), + ("Wind20ft", self.vectorAvg), + ("Wx", self.rankedWx, [6]), + ("PoP", self._PoP_analysisMethod("FireExtendedShortTerm"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.reportTrends, + self.wind_summary, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.lows_phrase, + self.highs_phrase, + self.wind_phrase, + ], + } + + def FireExtended(self): + return { + "type": "component", + "methodList": [ + self.orderPhrases, + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ("T", self.hourlyTemp), + ("Sky", self.avg, [12]), + ("Wx", self.rankedWx, [6]), + ("PoP", self._PoP_analysisMethod("FireExtended"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.reportTrends, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.lows_phrase, + self.highs_phrase, + ], + } + + def ExtendedNarrative(self): + return { + "type": "narrative", + "methodList": [self.assembleChildWords], + ## Components + "narrativeDef": [ + ("FireExtendedShortTerm",24),("FireExtendedShortTerm",24), + ("FireExtendedShortTerm",24), + ("FireExtended",24),("FireExtended",24) + ], + } + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict), argDict["creationTime"]) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + # Determine the extended range + if self._individualExtended == 1: + self._extendedStart = self._timeRange.endTime() - 24*5*3600 + else: + self._extendedStart = self._timeRange.endTime() + self._extendedRange = TimeRange.TimeRange( + self._extendedStart, self._extendedStart + 3600) + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + # Product header + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # Put in a place holder for the headlines to be substituted in + # "postProcessProduct" + fcst = fcst + "" + self._prodHeadlines = [] + + fcst = fcst + ".DISCUSSION..." + "\n\n\n\n\n" + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + + self._addHeadlines(headlines) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcst = fcst + self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + if self._includeMultipleElementTable == 1: + fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + fcst = fcst + "\n$$\n\n" + return fcst + + def _postProcessProduct(self, fcst, argDict): + # Add one extended + if self._summaryExtended == 1: + fcst = fcst + "\n.FORECAST DAYS 3 THROUGH 7...\n\n" + extended = self.generateProduct("ExtendedNarrative", + argDict, area=self._summaryArea, + timeRange=self._extendedRange) + fcst = fcst + extended + fcst = fcst + "\n.OUTLOOK\n\n\n$$\n" + + # Make summary headline string and substitute for "" placeholder + headlineStr = "" + for h in self._prodHeadlines: + headlineStr = headlineStr + "..." + h + "...\n" + if len(self._prodHeadlines): + headlineStr = headlineStr + "\n" + fcst = fcst.replace("", headlineStr) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _addHeadlines(self, headlines): + # Add the headlines to the list of product headlines + headlines = headlines.split("...") + for headline in headlines: + if len(headline) == 0 or headline[0] == '\n': + continue + if headline not in self._prodHeadlines: + self._prodHeadlines.append(headline) + + def _issuance_list(self, argDict): + narrativeDefAM = [ + ("FirePeriod", "period1"), + ("FirePeriod", 12), ("FirePeriod", 12), + ("Custom", ("ExtraSampling", (-24, 12))), + ] + narrativeDefPM = [ + ("FirePeriod", "period1"), + ("FirePeriod", 12), ("FirePeriod", 12), ("FirePeriod", 12), + ("Custom", ("ExtraSampling", (-24, 24))), + ] + extended = [ + ("FireExtendedShortTerm", 24), ("FireExtendedShortTerm", 24), + ("FireExtendedShortTerm", 24), + ("FireExtended", 24), ("FireExtended", 24), + ] + try: + if self._individualExtended == 1: + if self._extendedLabel == 1: + narrativeDefAM.append(("ExtendedLabel",0)) + narrativeDefPM.append(("ExtendedLabel",0)) + narrativeDefAM = narrativeDefAM + extended + narrativeDefPM = narrativeDefPM + extended + except: + pass + + return [ + ("Morning", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("Afternoon", self.NIGHT(), 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, + ".REST OF TONIGHT...", "late in the night","early in the evening", + 1, narrativeDefPM), + # For the early morning update, this produces: + # Rest of Tonight: + # MONDAY + # MONDAY NIGHT + ("Early Morning Update", "issuanceHour", self.DAY(), 4, + ".REST OF TONIGHT...", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + # Alternative + # For the early morning update, this produces: + # EARLY THIS MORNING: + # Today + # Tonight + #("Evening Update", "issuanceHour", 24 + self.DAY(), 24 + 4, + # ".REST OF TONIGHT...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + #("Early Morning Update", "issuanceHour", self.DAY(), 4, + # ".EARLY THIS MORNING...", "early in the morning", "late in the afternoon", + # 1, narrativeDefPM), + ] + + def lateDay_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING + ('FW.A', allActions, 'FireWx'), # FIRE WEATHER WATCH + ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING + ('WI.Y', allActions, 'Wind'), # WIND ADVISORY + ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH + ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING + ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY + ] + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWFTable.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWFTable.py index 0a33a2d480..d3554e10a9 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWFTable.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWFTable.py @@ -1,1910 +1,1910 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This produces a Fire Weather Forecast in tabular format. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# FWFTable, FWFTable___Definition, FWFTable__Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Fire Weather Table" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "FWFBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will be done. -# columnJustification # Left (l) or right (r) justification for columns -# areaDictionary Modify the AreaDictionary utility with UGC information -# about zones. -# useRH If 1, use RH grids instead of MaxRH, MinRH -# summaryExtended -# summaryArea If summaryExtended == 1, then a summary extended forecast will -# be generated for the given summaryArea -# individualExtended If individualExtended == 1, an extended forecast will be -# generated for each individual area -# extendedLabel If extendedLabel== 1, a label will be included for each -# individual extended -# mixingParmsDayAndNight Set this to 1 if you want Mixing Height, -# Transport Wind and Vent Index reported in -# night periods. -# mixHgtMethod Can be "Max" or "Avg" for mixHgt analysis method -# lightningPhrases Set this to 1 if you want Lightning Activity -# reported with phrases like "1-8 strikes", -# "9-15 strikes", etc. -# windAdjustmentFactor Winds are reported from the Wind20ft grid -# if available. Otherwise, the Wind grid is used -# with the magnitude multiplied by this wind -# adjustment factor. Winds reported by RAWS sites -# are frequently lower than ASOS winds -# due to the fact that use a 10-min average. -# A common adjustment factor is 80% (0.80). If -# you want no adjustment to the winds then set -# this variable to 1.00. -# NOTE: This value can optionally be specified as an -# nlValue dictionary. -# -# tableLightWindThreshold Threshold for a light wind string in the table -# tableLightWindPhrase Light wind string in the table -# tableLightWindThreshold Threshold for a light wind string in the table -# minGustMph Gusts will not be reported below this value -# windGustDiffMph Gusts will be reported only if the difference between -# gust and max wind exceeds this amount. -# -# humidityRecovery_percentage If max humidity is above this percentage, -# humidity recovery will be Excellent. -# rhPhraseThreshold The MinRH phrase will be included in the extended, IF -# the MinRH is less than this threshold. -# The default (-1) will not produce a MinRH phrase. -# includeOutlooks Set this to 1 to include long-range outlook -# placeholders at the end of the product. These -# are appended by _postProcessProduct. -# useHolidays Set to 1 to use holidays in the time period labels -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types -# popWxThreshold -Affects the values in the following rows: -# -## * CHANCE PRECIP -## * Precip amount -## * Precip duration -## * Precip begin -## * Precip end - -## We will put values in these rows according to the following assumptions: -## --If there is no weather, then all the above fields will be blank, zero or None. -## --If the PoP falls below a the popWxThreshold, value (default 1), -## then all of the above fields are blank, zero, or None. -## --If QPF is 0.00, then CHANCE PRECIP, Precip duration, Precip begin and -## Precip end will represent no precip. -## --Thus, if Wx is non-accumulating i.e. drizzle or snow flurries, the Precip Type -## could be non-empty, but since QPF would be 0.0, -## then no CHANCE PRECIP, Precip duration, Precip begin and Precip end will be reported. - -# areaDictionary Modify the AreaDictionary utility with UGC -# information about zones. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Sky, PoP, Wx, MaxT, MinT, T, Wind, Wind20ft, QPF, MaxRH, MinRH, -# MixHgt, TransWind, VentRate, HrsOfSun, CWR, Haines, LAL -# RH (optional -- can be used in place of MinRH, MaxRH. Set the "useRH" flag) -# Ttrend (optional -- if not included, prior day's data is used) -# RHtrend (optional -- if not included, prior day's data is used) -#------------------------------------------------------------------------- -# Edit Areas Needed: area1, area2 -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -#------------------------------------------------------------------------- -# Component Products: -# Extended -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from FWF_Table -# _rowList -# from WxPhrases: -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Fire Weather Services. -####################################################################### - -import TextRules -import SampleAnalysis -import time, string, types -import TimeRange, AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Product Issuance", "productIssuance") , "Morning", "radio", - ["Morning", "Afternoon"]), - ] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/FWFTable_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "FireWxZones_", - - "lineLength": 66, - ## Edit Areas: - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_FWFTable__", - - # product identifiers - "productName": "Fire Weather Planning Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - "periodCombining" : 0, # If 1, combine periods, if possible - - "fixedExpire": 1, #ensure VTEC actions don't affect segment time - - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - # Optional Configuration Items - "columnJustification": "l", # Left (l) or right (r) justification for columns - "useRH": 0, # Use RH grids instead of MaxRH, MinRH - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - - # If summaryExtended == 1, then a summary extended forecast will - # be generated for the given summaryArea - "summaryExtended": 1, - "summaryArea":"FireWxAOR__", - # If individualExtended == 1, an extended forecast will be - # generated for each individual area - # If extendedLabel == 1, a label will be included for each - # individual extended - "individualExtended": 0, - "extendedLabel": 0, - # Set the following variable to 1 if you want Mixing Height, - # Transport Wind and Vent Index reported in night periods. - "mixingParmsDayAndNight": 0, - "mixHgtMethod": "Max", # Can be Max of Avg - # Set the following variable to 1 if you want Lightning Activity - # reported with phrases like "1-8 strikes", "9-15 strikes", etc. - "lightningPhrases": 0, - # Winds are reported from the Wind20ft grid if available. - # Otherwise, the Wind grid is used with the magnitude multiplied - # by this wind adjustment factor. - # Winds reported by RAWS sites are frequently lower than ASOS winds - # due to the fact that use a 10-min average. A common adjustment - # factor is 80% (0.80). If you want no adjust ment to the winds - # then set this variable to 1.00 - "windAdjustmentFactor": 0.80, - # Threshold for a light wind string in the table - "tableLightWindThreshold" : 5, - # Light wind string in the table - "tableLightWindPhrase" : "Lgt/Var", - # Use a range for the winds in the table 1=yes - "tableWindRanges" : 0, - # Gusts will not be reported below this value. - "minGustMph": 17, - # Gust - wind must exceed this threshold to be reported. - "windGustDiffMph": 7, - # If max humidity is above this percentage, humidity recovery - # will be Excellent. - "humidityRecovery_percentage": 50, - # Set to MinRH value below which you would like a MinRH phrase in the Extended. - # Default (-1) is no MinRH phrase. - "rhPhraseThreshold": -1, - # Set the following variable to 1 to include long-range outlook - # placeholders at the end of the product. These are appended by - # _postProcessProduct - "includeOutlooks": 0, - - # Weather-related flags - "hoursSChcEnds": 36, - "popWxThreshold": 1, - - "language": "english", - - # Set to 1 to use holidays in the time period labels - "useHolidays": 0, - - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def _rowList(self): - # The rowList is controls what parameters go into the table. - # The list is a set of (label:method) pairs. - # You may change the label if you like. - # The order of the list determines the order of the rows in the table - # so you may re-arrange the order if you like. - return [ - # Directive requirements - ("Cloud cover", self._cloudCover_row), - ("Precip type", self._precipType_row), - ("Chance precip (%)", self._chancePrecip_row), - ("Temp (24h trend)", self._tempWithTrend_row), - ("RH % (24h trend)",self._rhWithTrend_row), - # Use these if you do not want trends - #("TEMP", self._temp_row), - #("RH %", self._rh_row), - ("20ftWnd-val/AM(mph)", self._windValleyMph_row), - ("20ftWnd-rdg/PM(mph)", self._windRidgeMph_row), - # Directive optional products - ("Precip amount", self._precipAmount_row), - ("Precip duration", self._precipDuration_row), - ("Precip begin", self._precipBegin_row), - ("Precip end", self._precipEnd_row), - ("Mixing hgt(m-AGL/MSL)", self._mixHgtM_row), - ("Mixing hgt(ft-AGL/MSL)", self._mixHgtFt_row), - ("Transport wnd (kts)", self._transWindKts_row), - ("Transport wnd (m/s)", self._transWindMS_row), - ("Transport wnd (mph)", self._transWindMph_row), - ("Vent rate (kt-ft)", self._ventRateKtFt_row), - ("Vent rate (m/s-m)", self._ventRate_row), - ("Vent rate (mph-ft)", self._ventRateMphFt_row), - ("Dispersion", self._dispersion_row), - ("DSI", self._dsi_row), - ("Sunshine hours", self._sunHours_row), -## # If you need Ceiling, uncomment the Ceiling line in _getAnalysisList -## #("CEILING", self._ceiling_row), - ("CWR", self._cwr_row), - ("LAL", self._lal_row), - ("Haines Index", self._haines_row), - ("RH recovery", self._rhRecovery_row), -## # If you need 500m Mix Hgt Temp, uncomment the MixHgt500 -## # line in _getAnalysisList -## #("MIX HGT 500", self._mixHgt500_row), - ("Stability class", self._stability_row), - ] - - ######################################################################## - # COMPONENT PRODUCTS - ######################################################################## - - def ExtendedNarrative(self): - # check for period combining first - if self._periodCombining: - methodList = [self.combineComponentStats, self.assembleChildWords] - else: - methodList = [self.assembleChildWords] - - return { - "type": "narrative", - "displayName": None, - "timePeriodMethod ": self.timeRangeLabel, - ## Components - "methodList": methodList, - "narrativeDef": [ - ("Extended",24),("Extended",24),("Extended",24), - ("Extended",24), ("Extended",24)], - } - - def Extended(self): - return { - "type": "component", - "methodList": [self.orderPhrases, self.consolidateSubPhrases, - self.assemblePhrases, self.wordWrap], - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ("MinRH", self.avg, [0]), - ("T", self.hourlyTemp), - ("T", self.minMax), - ("PoP", self.stdDevMaxAvg, [12]), - ("PoP", self.binnedPercent, [12]), - ("Sky", self.median, [12]), - #("Wind", self.vectorMedian), - ("Wind", self.vectorMinMax, [6]), - ("Wind20ft", self.vectorMedian), - ("Wind20ft", self.vectorMinMax), - ("Wx", self.rankedWx, [12]), - ("Wx", self.rankedWx, [0]), - ], - "phraseList":[ - self.wind_summary, - self.sky_phrase, - self.skyPopWx_phrase, - self.weather_phrase, - self.reportTrends, - self.lows_phrase, - self.highs_phrase, - self.rh_phrase, - self.wind_withGusts_phrase, - ], - } - - - ############################# - # Overrides to take care of Wind in the Extended forecast - # Use Wind20ft if available, else use adjusted Wind - # - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self._adjustWind, - } - def _adjustWind(self, value, mode, increment, maxFlag): - # Rounding for marine winds - factor = self.nlValue(self._windAdjustmentFactor, value) - value = value * factor - return self.round(value, mode, increment) - - def wind_summary_words(self, tree, node): - # Uses vectorAvg, vectorMedian, vectorMinMax - # See if there's data for Wind20ft - elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) - words = self.vector_summary(tree, node, elementName) - return self.setWords(node, words) - - def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): - # See if there's data for Wind20ft - elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) - wind = self.ElementInfo(elementName, "List", self.VECTOR()) - elementInfoList = [wind] - if gustFlag: - windGust = self.ElementInfo( - "WindGust", "Max", phraseDef=self.gust_phrase) - elementInfoList.append(windGust) - node.set("gustFlag", 1) - if connectorMethod is None: - connectorMethod = self.vectorConnector - self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) - return self.DONE() - - def nextDay24HourLabel_flag(self, tree, node): - # Return 1 to have the TimeDescriptor module label 24 hour periods starting - # after 1600 as the next day. - # This is needed for the Fire Weather Extended product, - # but not for other products when period combining. - return 1 - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - # Uncomment any combinations you wish to collapse. - # For example, if the first entry is uncommented, - # the phrase: scattered rain showers and widespread rain - # will collapse to: scattered rain showers. - def wxCombinations(self): - return [ - ("RW", "R"), - ("SW", "S"), - ## ("T","RW"), - ] - - def temporalCoverage_hours_dict(self, parmHisto, timeRange, componentName): - # This is the hours of overlap of a grid with the TIMERANGE - # in order to include it in the analysis. In addition, if a grid - # is completely contained within the time range, it will be included. - # Used by temporalCoverage_flag - return { - "MinRH": 5, - "MaxRH": 5, - "MinT": 4, - "MaxT": 4, - #"Haines":0, - #"PoP" : 0, - } - - def minMax_std_deviation(self, parmHisto, timeRange, componentName): - # Replaces MINMAX_STD_DEVIATION - # Number of standard deviations to compute around the weighted - # average for a stdDev_MinMax - return 1.4 - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Wind"] = "mph" - dict["Wind20ft"] = "mph" - dict["WindGust"] = "mph" - return dict - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get variables from Definition and VariableList - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - # Set up product-specific variables - self._colWidth = 13 - if self._columnJustification == "l": - self._rowLabelWidth = 22 - else: - self._rowLabelWidth = 24 - self._fixedValueWidth = 13 - self._analysisList = self._getAnalysisList() - - return None - - def _determineTimeRanges(self, argDict): - # Determine the time ranges which need to be samplePM - # Set up self._timeRangeList, self._extendedRange - # Create a list (or lists) of tuples: (timeRange, timeRangeLabel) - self._currentTime = argDict['creationTime'] - self._isDST = time.localtime(self._currentTime)[8] - self._currentHour = time.gmtime(self._currentTime)[3] - - if self._productIssuance == "Morning": - rangeNames = ["Today", "Tonight", "Tomorrow"] - - else: - dayTime3 = self.createTimeRange(54, 66, "LT") - rangeNames = ["Tonight", "Tomorrow", "Tomorrow Night", dayTime3] - - self._timeRangeList = self.getTimeRangeList( - argDict, rangeNames, self._getLabel) - - # Determine time range to BEGIN the extended forecast - length = len(self._timeRangeList) - lastPeriod = self._timeRangeList[length-1][0] - - self._extendedRange = TimeRange.TimeRange( - lastPeriod.endTime(), lastPeriod.endTime() + 3600) - - # Determine prior time range - firstPeriod, label = self._timeRangeList[0] - self._priorTimeRange = TimeRange.TimeRange( - firstPeriod.startTime() - 24*3600, firstPeriod.startTime()) - - # Get entire timeRange of table for Headlines - # Tom says: I'm very unsure about removing this line........... - self._timeRange = TimeRange.TimeRange( - firstPeriod.startTime(), lastPeriod.endTime()) - argDict["productTimeRange"] = self._timeRange - - # Determine issue time - self._issueTime = AbsTime.current() - - # Sets up the expiration time - self._expireTime, self._ddhhmmTimeExpire = \ - self.getExpireTimeFromLToffset(self._currentTime, - self.expireOffset(), "") - - # Calculate current times - self._ddhhmmTime = time.strftime( - "%d%H%M", time.gmtime(self._currentTime)) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - return - - def _sampleData(self, argDict): - # Get a sampler (or set of samplers) - samplerList = [] - samplerList.append((self._getAnalysisList(), self._timeRangeList)) - samplerList.append((self._priorAnalysisList(), - [(self._priorTimeRange, "")])) - - sampleInfo = [] - for analysisList, periods in samplerList: - sampleInfo.append((analysisList, periods, self._areaList)) - self._sampler = self.getSampler(argDict, sampleInfo) - # Get sampler for first 12 hours of extended period - extTimeRange = self._extendedRange - self._extTR = TimeRange.TimeRange(extTimeRange.startTime(), - extTimeRange.startTime() + 43200) - extTrTuple = [(self._extTR, "Extended")] - self._extAnalList = self._getExtAnalysisList() - extSampleInfo = (self._extAnalList, extTrTuple, self._areaList) - self._extSampler = self.getSampler(argDict, extSampleInfo) - return self._sampler - - def _preProcessProduct(self, fcst, argDict): - # Add product heading to fcst string - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # Put in a place holder for the headlines to be substituted in - # "postProcessProduct" - fcst = fcst + "" - self._prodHeadlines = [] - - fcst = fcst + ".DISCUSSION..." + "\n\n\n\n\n" - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - self._addHeadlines(headlines) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - # Return a text string representing the product - # for the given edit area - # Column headings - fcst = fcst + string.center("", self._rowLabelWidth) - for period, label in self._timeRangeList: - if label == "Rest of Today": - label = "Today" - if label == "Rest of Tonight": - label = "Tonight" - if self._columnJustification == "l": - fcst = fcst + string.ljust(label, self._colWidth) - else: - fcst = fcst + string.rjust(label, self._colWidth) - fcst = fcst + "\n\n" - - # Get the statistics for this edit area and all time ranges - statList = self.getStatList( - self._sampler, self._analysisList, self._timeRangeList, editArea) - - # Get the prior statistics for this edit area and all time ranges - priorStatDict = self.getStatDict( - self._sampler, self._priorAnalysisList(), self._priorTimeRange, - editArea) - - # Get a statDict for the first 12 hours of the extended - self._extStatDict = self.getStatDict(self._extSampler, - self._extAnalList, self._extTR, - editArea) - - # Format each row of table - for label, method in self._rowList(): - fcst = method(fcst, label, statList, priorStatDict) - fcst = fcst + "\n" - fcst = fcst + "Remarks...None.\n\n" - - # Produce Individual Extended Forecast - if self._individualExtended == 1: - if self._extendedLabel == 1: - fcst = fcst + ".FORECAST FOR DAYS 3 THROUGH 7...\n\n" - extended = self.generateProduct("ExtendedNarrative", argDict, - area = editArea, timeRange=self._extendedRange) - fcst = fcst + extended - return fcst - - def _cloudCover_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._sky, None, self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _precipType_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self.wxVal, ["Wx"], self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _chancePrecip_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._popVal, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _tempWithTrend_row(self, fcst, label, statList, priorStatDict): - dayElement = "MaxT" - nightElement = "MinT" - dayMinMax = "Max" - nightMinMax = "Min" - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self.dayOrNightVal, [dayElement, nightElement, dayMinMax, - nightMinMax, "Ttrend", priorStatDict, statList, - self._timeRangeList], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _temp_row(self, fcst, label, statList, priorStatDict): - dayElement = "MaxT" - nightElement = "MinT" - dayMinMax = "Max" - nightMinMax = "Min" - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self.dayOrNightVal, [dayElement, nightElement, dayMinMax, - nightMinMax, None, priorStatDict, statList, - self._timeRangeList], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _rhWithTrend_row(self, fcst, label, statList, priorStatDict): - if self._useRH == 1: - dayElement = "RH" - nightElement = "RH" - else: - dayElement = "MinRH" - nightElement = "MaxRH" - dayMinMax = "Min" - nightMinMax = "Max" - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, self.dayOrNightVal, - [dayElement, nightElement, dayMinMax, nightMinMax, "RHtrend", - priorStatDict, statList, self._timeRangeList], - self._rowLabelWidth, self._fixedValueWidth, self._columnJustification) - return fcst - - def _rh_row(self, fcst, label, statList, priorStatDict): - if self._useRH == 1: - dayElement = "RH" - nightElement = "RH" - else: - dayElement = "MinRH" - nightElement = "MaxRH" - dayMinMax = "Min" - nightMinMax = "Max" - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, self.dayOrNightVal, - [dayElement, nightElement, dayMinMax, nightMinMax, None, - priorStatDict, statList, self._timeRangeList], - self._rowLabelWidth, self._fixedValueWidth, self._columnJustification) - return fcst - - def _windValleyMph_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._wind,["AM"], self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _windRidgeMph_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._wind,["PM"], self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _precipAmount_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._qpfVal, None, self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _precipDuration_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._duration, ["Wx"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _precipBegin_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._begin, ["Wx__dominantWx_0", self._timeRangeList, statList], - self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _precipEnd_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._end, ["Wx__dominantWx_0", self._timeRangeList, statList], - self._rowLabelWidth, self._fixedValueWidth, - self._columnJustification) - return fcst - - def _mixHgtFt_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._mixHgt, ["ft"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _mixHgtM_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._mixHgt, ["m"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _transWindKts_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._transWind, ["kts"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _transWindMS_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._transWind, ["ms"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _transWindMph_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._transWind, ["mph"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _ventRate_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._ventRate, ["m/s-m"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _ventRateKtFt_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._ventRate, ["kt-ft"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _ventRateMphFt_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._ventRate, ["mph-ft"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _dispersion_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._dispersion, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _dsi_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._dsi, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _sunHours_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._sunHours, ["HrsOfSun"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _ceiling_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._cigHeight, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _cwr_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._cwr, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _lal_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._getLightning, ["LAL"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _haines_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._haines, ["Haines"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _rhRecovery_row(self, fcst, label, statList, priorStatDict): - if self._useRH == 1: - element = "RH" - priorElement = "RH" - else: - element = "MaxRH" - priorElement = "MinRH" - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._rhRecovery, [element, priorElement, priorStatDict], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _mixHgt500_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._mixHgt500, None, self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _stability_row(self, fcst, label, statList, priorStatDict): - fcst = fcst + self.makeRow( - label, self._colWidth, self._timeRangeList, statList, - self._stability, ["Stability"], self._rowLabelWidth, - self._fixedValueWidth, self._columnJustification) - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - fcst = fcst + "\n$$\n\n" - return fcst - - def _postProcessProduct(self, fcst, argDict): - if self._summaryExtended == 1: - fcst = fcst + "\n.FORECAST FOR DAYS 3 THROUGH 7...\n\n" - extended = self.generateProduct("ExtendedNarrative", - argDict, area=self._summaryArea, - timeRange=self._extendedRange) - fcst = fcst + extended - if self._includeOutlooks: - fcst = fcst + "\n.OUTLOOK 6 TO 10 DAYS... \n\n.OUTLOOK 8 TO 14 DAYS...\n\n\n" - fcst = fcst + "\n.OUTLOOK\n\n" - - # Make summary headline string and substitute for "" placeholder - headlineStr = "" - for h in self._prodHeadlines: - headlineStr = headlineStr + "..." + h + "...\n" - if len(self._prodHeadlines): - headlineStr = headlineStr + "\n" - fcst = fcst.replace("", headlineStr) - - # Uncomment to include = sign - #fcst = fcst + "=\n" - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - # provides expiration time offset from today's midnight based on issuance. - def expireOffset(self): - if self._productIssuance == "Morning": - # 6pm today - return 16 - else: - # 6am tomorrow - return 24+4 - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _getAnalysisList(self): - return[ - ("Sky", self.avg), - ("PoP", self.stdDevMaxAvg), - ("Wx", self.dominantWx, [12]), - ("Wx", self.dominantWx, [0]), - ("MaxT", self.stdDevMinMax), - ("MinT", self.stdDevMinMax), - ("T", self.minMax), - ("Wind", self.vectorAvg, [6]), - ("Wind20ft", self.vectorAvg, [6]), - ("WindGust", self.stdDevMax, [6]), - ("QPF", self.minMaxSum), - ("MaxRH", self.stdDevMinMax), - ("MinRH", self.stdDevMinMax), - ("RH", self.minMax), - ("MixHgt", self.minMax, [0]), - ("MixHgt", self.avg, [0]), - ("TransWind", self.vectorAvg, [0]), - ("VentRate", self.minMax, [0]), # aka "Dispersion" prior to RPP20 - ("DSI", self.avg), - ("HrsOfSun", self.avg), - # Uncomment the next line if you're carrying Cig Height - #("Ceiling", self.minMax), - ("CWR", self.stdDevMaxAvg), - ("Haines", self.minMaxAvg), - ("LAL", self.maximum), - ("Ttrend", self.minMax), - ("RHtrend", self.minMax), - ("Stability", self.avg), - # Uncomment the next line if you're carrying 500m mix height temp - #("MixHgt500", self.avg), - ] - - def _priorAnalysisList(self): - return[ - ("MaxT", self.avg), - ("MinT", self.avg), - ("MaxRH", self.avg), - ("MinRH", self.avg), - ("RH", self.minMax), - ("T", self.minMax), - ] - - def _getExtAnalysisList(self): - # Need to get the same items in the table's regular analysis - # list for "Wx", "PoP", "QPF" in the extended analysis list - # to work with _checkPrecip - analList = [] - for item in self._getAnalysisList(): - if item[0] in ["Wx", "PoP", "QPF"]: - analList.append(item) - #print analList - return analList - - def _getLabel(self, range): - "Produce a column label given the timeRange" - return self.getWeekday( - range, holidays=self._useHolidays, shiftToLocal=1, labelType="CapsAbbreviated", - today=1, tomorrow=0) - - def _sky(self, statDict, timeRange, argList): - # Return a sky value - sky = self.getStats(statDict, "Sky") - if sky is None: - value = "" - elif sky <= 6: - value = "Clear" - elif sky <= 31: - value = "MClear" - elif sky <= 69: - value = "PCldy" - elif sky <= 94: - value = "MCldy" - else: - value = "Cloudy" - return value - - def _wind(self, statDict, timeRange, argList): - # Use Wind20ft if available, - # otherwise adjust Wind - # Stats vectorAvg - - # Get Wind stats - adjust = 0 - windStats = self.getStats(statDict,"Wind20ft") - if windStats is None: - windStats = self.getStats(statDict,"Wind") - if windStats is None: - return "" - adjust = 1 - - (mag1, dir1), subRange = windStats[0] - if len(windStats) > 1: - (mag2, dir2), subRange = windStats[1] - else: - (mag2, dir2), subRange = windStats[0] - # Convert to mph - mag1 = self.ktToMph(mag1) - mag2 = self.ktToMph(mag2) - # Adjust if using Wind - if adjust: - mag1 = mag1*self._windAdjustmentFactor - mag2 = mag2*self._windAdjustmentFactor - - # Get Gust Stats - gustStats = self.getStats(statDict,"WindGust") - if gustStats is None: - gust1 = 0 - gust2 = 0 - else: - gust1,subRange = gustStats[0] - if len(gustStats) > 1: - gust2,subRange = gustStats[1] - else: - gust2 = gust1 - gust1 = self.ktToMph(gust1)*self._windAdjustmentFactor - gust2 = self.ktToMph(gust2)*self._windAdjustmentFactor - - # This method is called twice for each time period, - # once for AM winds and once for PM winds - # For the AM winds: - # If the time period is daytime, report the morning winds - # For the PM winds: - # If the time period is daytime, report the afternoon winds - # Otherwise, report the evening winds - amPm = argList[0] - day = self.getPeriod(timeRange,1) - if amPm == "AM": - if day == 1: - windMag = mag1 - windDir = dir1 - windGustVal = gust1 - else: - windMag = None - windGustVal = 0 - else: - if day == 1: - windMag = mag2 - windDir = dir2 - windGustVal = gust2 - else: - windMag = mag1 - windDir = dir1 - windGustVal = gust1 - - # Determine wind string - windString = "" - if windMag is not None: - if windMag < self._tableLightWindThreshold: - windString = self._tableLightWindPhrase - elif self._tableWindRanges: - windString = self._getVectorRange(((windMag-2, windMag+2), windDir)) - else: - windString = self.getVectorVal((windMag, windDir)) - - # Determine gust string - gustString = '' - if windString != "" and windString != self._tableLightWindPhrase: - if windGustVal >= self._minGustMph and (windGustVal - windMag) >= self._windGustDiffMph: - gustString = ' G'+`int(windGustVal)` - return windString + gustString - - def _qpfVal(self, statDict, timeRange, argList): - qpf = self.getStats(statDict, "QPF") - if qpf is None: - return "" - precipFlag, wx, qpf, pop = \ - self._checkPrecip(statDict, timeRange, argList) - if precipFlag: - return self.fformat(qpf, .01) - else: - return "0.00" - - def _duration(self, statDict, timeRange, argList): - precipFlag, wx, qpf, pop = \ - self._checkPrecip(statDict, timeRange, argList) - if wx == "NONE": - return "" - if not precipFlag: - return "" - statsByRange = self.getStats(statDict,"Wx__dominantWx_0") - if statsByRange is None: - return "" - # Found in TableBuilder: - return self.wxDuration(statsByRange, timeRange) - - def _begin(self, statDict, timeRange, argList): - # Check if this period should have precip based on Wx, QPF, PoP - precipFlag, wx, qpf, pop = \ - self._checkPrecip(statDict, timeRange, argList) - #print "_begin:",timeRange,precipFlag, wx, qpf, pop - if not precipFlag: - return "" - - durationRange = self._getTR(statDict, timeRange, argList) - if durationRange is None: - return "" - durStart = durationRange.startTime() - if durStart < timeRange.startTime(): - return "Continuing" - value = string.strip(self.localHourLabel(durStart)) - return value - - def _end(self, statDict, timeRange, argList): - # Check if this period should have precip based on Wx, QPF, PoP - precipFlag, wx, qpf, pop = \ - self._checkPrecip(statDict, timeRange, argList) - #print "_end:",timeRange,precipFlag, wx, qpf, pop - if not precipFlag: - return "" - - durationRange = self._getTR(statDict, timeRange, argList, ending=1) - if durationRange is None: - return "" - durEnd = durationRange.endTime() - if durEnd > timeRange.endTime(): - return "Continuing" - value = string.strip(self.localHourLabel(durEnd)) - return value - - def _getTR(self, statDict, timeRange, argList, ending=0): - # Get a beginning or ending timeRange for weather occurring. - #print "_getTR:",timeRange,ending - - # Parse the argList - element = argList[0] - trList = argList[1] - statList = argList[2] - - # Get the length of the statList, so we know how many periods we have - statLength = len(statList) - - # Get index for current time range - currentIndex = self._getIndex(timeRange, trList) - #print "_getTR: currentIndex = ", currentIndex, "tr = ", timeRange, "Ending = ", ending - # Use the index to access the previous and next time range in the - # statList. - if currentIndex is None: - #print "_getTR: no currentIndex returning none" - return None - nextIndex = currentIndex + 1 - - # Set prevIndex to one less than the current, unless this is the first - # period, then just use the current index. - if currentIndex > 0: - prevIndex = currentIndex - 1 - else: - prevIndex = currentIndex - prevTR = trList[prevIndex] - # If we're on the last period of the table, we need to access the stats - # from the extended portion. - if currentIndex < statLength - 1: - nextStatDict = statList[nextIndex] - nextTR = trList[nextIndex] - else: - nextStatDict = self._extStatDict - nextTR = self._extTR - - if prevIndex != currentIndex: - prevStatDict = statList[prevIndex] - prevStats = self.getStats(prevStatDict, element) - nextStats = self.getStats(nextStatDict, element) - eStats = self.getStats(statDict, element) - #print "_getTR: element, estats=",element,eStats - if eStats is None: - #print "_getTR: no eStats returning none" - return None - - # if looking for ending time, reverse so last time range is first - if ending == 1: - eStats.reverse() - - # "range" will be first time range found where there is "" - range = None - - for values, tr in eStats: - #print "_getTR: values, tr=",values, tr - for subKey in values: - #print "_getTR: subKey=",subKey - if self.precip_related(subKey): - range = tr - break - if range is not None: - break - - if currentIndex > 0 and not ending: - # If the precip startTime found in the previous for-loop equals the - # startTime for the current timeRange, then we need to look at the - # previous timeRange to see if precip is "Continuing". - if range is not None and range.startTime() == timeRange.startTime(): - #PJ Make sure previous period has Wx/QPF/PoP - precipFlag, wx, qpf, pop = \ - self._checkPrecip(prevStatDict, prevTR, argList) - #print "getTR beg _checkPrecip:",prevTR,precipFlag, wx, qpf, pop - if precipFlag: - prevRange = range - prevLength = len(prevStats) - lastIndex = prevLength - 1 - val, tr = prevStats[lastIndex] - for subKey in val: - if self.precip_related(subKey): - prevRange = tr - break - range = prevRange - - if ending == 1: - # If range has not been set OR the precip endTime equals the - # endTime for the current timeRange, then we need to look at the - # next timeRange to determine if precip is "Continuing". - if range is not None and range.endTime() == timeRange.endTime(): - #PJ Make sure next period has Wx/QPF/PoP - precipFlag, wx, qpf, pop = \ - self._checkPrecip(nextStatDict, nextTR, argList) - #print "getTR end _checkPrecip:",nextTR,precipFlag, wx, qpf, pop - if precipFlag: - nextRange = None - val, tr = nextStats[0] - if len(val) == 0: - return range - for subKey in val: - if self.precip_related(subKey): - nextRange = tr - break - - if nextRange is None: - return range - - range = nextRange - #print "_getTR: returning:",range - return range - - def _getIndex(self, timeRange, trList): - index = 0 - for tr, label in trList: - if timeRange == tr: - return index - index = index + 1 - return - - def _mixHgt(self, statDict, timeRange, argList): - # Report MixHgt - mixHgt = self._mixHgtValue(statDict, timeRange, argList) - if mixHgt is None: - return "" - else: - return self.getScalarVal(mixHgt) - - def _mixHgtValue(self, statDict, timeRange, argList): - # Report MixHgt - units = argList[0] - day = self.getPeriod(timeRange, 1) - method = self._mixHgtMethod - if day: - minMax = "Max" - else: - minMax = "Min" - if self._mixingParmsDayAndNight: - day = 1 - if day == 1: - mixHgt = self._getMixHgt(statDict, minMax, method) - if mixHgt != None: - if units == "m": - mixHgt = mixHgt/3.2808 - mixHgt = self.round(mixHgt,"Nearest",1) - return mixHgt - else: - return None - - def _getMixHgt(self, statDict, minMax, method): - # Returns min and max MixHgt with associated time ranges - avgMixStats = self.getStats(statDict, "MixHgt__avg") - minMaxMixStats = self.getStats(statDict, "MixHgt") - if avgMixStats is None or minMaxMixStats is None: - return None - #print "mixStats = ", mixStats, "\n\n" - mixMax = -1 - mixAvg = -1 - mixMin = 200000 - # Find the highest average mix height and subRange - method = self._mixHgtMethod - for mixStats, subRange in avgMixStats: - avg = mixStats - if avg > mixAvg: - mixAvg = avg - avgRange = subRange - # Find the absolute min and max mixHgt and subRanges - for mixStats, subRange in minMaxMixStats: - min, max = self.getValue(mixStats, "MinMax") - if max > mixMax: - mixMax = max - maxRange = subRange - if min < mixMin: - mixMin = min - minRange = subRange - if minMax == "Min": - return mixMin - elif minMax == "Max": - if method == "Avg": - return mixAvg - else: - return mixMax - elif minMax == "MinMax": - if method == "Avg": - return mixMin, mixAvg - else: - return mixMin, mixMax - else: - return (mixMin, minRange), (mixMax, maxRange), (mixAvg, avgRange) - - def _getCorrelatedStats(self, statDict, element, minMax, dataType=None): - # Return the value (min or max) for the given element - # that correlates with the min or max Mixing Height - statsByRange = self.getStats(statDict, element) - method = self._mixHgtMethod - if statsByRange is None: - return None - # Find the subRange that has the min or max mixing height' - mixHgt = self._getMixHgt(statDict, "All", method) - if mixHgt is None: - return None - mixMin, mixMax, mixAvg = mixHgt - if minMax == "Min": - mix, subRange = mixMin - else: - if method == "Avg": - mix, subRange = mixAvg - else: - mix, subRange = mixMax - # Find the first stats that overlap the mixing height subRange - for stats, statRange in statsByRange: - if statRange.overlaps(subRange): - break - stats = self.getValue(stats, minMax, dataType) - return stats - - - def _transWind(self, statDict, timeRange, argList): - # Return the transport wind as a string - day = self.getPeriod(timeRange, 1) - if day: - minMax = "Max" - else: - minMax = "Min" - if self._mixingParmsDayAndNight: - day = 1 - if day == 1: - transWind = self._transWindValue(statDict, timeRange, argList) - if transWind is None: - return "N/A" - mag, dir = transWind - return self.getVectorVal((mag,dir)) - else: - return "" - - def _transWindValue(self, statDict, timeRange, argList): - # Return the transport wind as a tuple of magnitude and direction - - units = argList[0] - day = self.getPeriod(timeRange, 1) - if day: - minMax = "Max" - else: - minMax = "Min" - if self._mixingParmsDayAndNight: - day = 1 - if day == 1: - transWind = self._getCorrelatedStats( - statDict, "TransWind", minMax, self.VECTOR()) - if transWind != None: - mag, dir = transWind - if units == "ms": - mag = mag/1.94 - elif units == "mph": - mag = mag * 1.15 - mag = self.round(mag, "Nearest", 1) - return mag, dir - else: - return None - - def _getVectorRange(self, value): - # Return text representation of vector value - # Value is a tuple of magnitude and direction - # E.g. returned value: SW 19 - - # Check for no data - if value == () or value is None: - return " " - else: - mag1, mag2 = value[0] - dir = value[1] - mag1 = `int(mag1)` - mag2 = `int(mag2)` - magStr = mag1 + "-" + mag2 - magStrLen = len(magStr) - rjustLen = magStrLen + 1 - magStr = string.rjust(magStr, rjustLen) - if type(dir) is not types.StringType: - dir = self.dirToText(dir) - dirStr = string.rjust(dir,2) - return dirStr + magStr - - def wxVal(self, stats, timeRange, argList): - # Return a weather text string value - # The desired element name muxt be the first element of argList - # e.g., SNOW - element = argList[0] - wxStats = self.getStats(stats, element) - if wxStats is None: - return "" - value = "" - #print "\nIn wxVal" - for wxValue, timeRange in wxStats: - #print wxValue, timeRange - val = self.short_weather_phrase(element, wxValue) - val = val.replace("|", " ") - val = string.replace(val,"THUNDER STORMS","THUNDERSTORMS") - val = string.replace(val,"THUNDERSTORMS","TSTMS") - val = string.replace(val,"FREEZING RAIN","FRZ RAIN") - val = string.replace(val,"FREEZING DRIZZLE","FRZ DRZL") - val = string.replace(val,"RAIN SHOWERS","SHOWERS") - val = string.replace(val,"thunder storms","thunderstorms") - val = string.replace(val,"thunderstorms","tstms") - val = string.replace(val,"freezing rain","frz rain") - val = string.replace(val,"freezing drizzle","frz drzl ") - val = string.replace(val,"rain showers ","showers") - if self.wxOrder(val)= 0: - isFreezing = 1 - if val == "rain": - isLiquid = 1 - if val == "snow": - isSnow = 1 - if val == "sleet": - isSleet = 1 - if self.wxOrder(val) 1: - value = "1-8 STRIKES" - if lal > 2: - value = "9-15 STRIKES" - if lal > 3: - value = "16-25 STRIKES" - if lal > 4: - value = ">25 STRIKES" - if lal > 5: - value = "DRY LIGHTNING" - else: - if lal is None: - value = "N/A" - else: - value = self.getScalarVal(lal) - return value - - def _haines(self,statDict, timeRange, argList): - haines = self.getStats(statDict, "Haines") - if haines is None: - return "" - min, max, avg = haines - return string.strip(`int(avg)`) - - def _rhRecovery(self, statDict, timeRange, argList): - element = argList[0] - priorElement = argList[1] - priorStatDict = argList[2] - rh = self.getStats(statDict, element) - if rh is None: - return "" - rh = self.getValue(rh, "Max") - if rh > self._humidityRecovery_percentage: - return "Excellent" - priorRH = self.getStats(priorStatDict, priorElement) - if priorRH is None: - return "" - priorRH = self.getValue(priorRH, "Max") - diff = rh - priorRH - words = "" - for threshold, label in self._humidityRecovery_valueList(): - if diff <= threshold: - words = label - break - return words - - def _humidityRecovery_valueList(self): - "Used to convert percent difference to corresponding category" - return [ - (25, "Poor"), - (55, "Moderate"), - (70, "Good"), - (100,"Excellent"), - ] - - def _calcVentRate(self, statDict, timeRange, argList): - units=argList[0] - if units == "kt-ft": - mixHgtArgList=["ft"] - transWindArgList=["kts"] - elif units == "mph-ft": - mixHgtArgList=["ft"] - transWindArgList=["mph"] - elif units == "m/s-m": - mixHgtArgList=["m"] - transWindArgList=["ms"] - else: - # Unknown configuration - return None - - mixHgt = self._mixHgtValue(statDict, timeRange, mixHgtArgList) - if mixHgt is None: - return None - transWind = self._transWindValue(statDict, timeRange, transWindArgList) - if transWind is None: - return None - mag,dir = transWind - return mixHgt*mag - - def _ventRate(self, statDict, timeRange, argList): - day = self.getPeriod(timeRange, 1) - if day: - minMax = "Max" - else: - minMax = "Min" - if self._mixingParmsDayAndNight: - day = 1 - if day == 1: - vr = self._getCorrelatedStats(statDict, "VentRate", minMax) - if vr is None: - vr = self._calcVentRate(statDict, timeRange, argList) - if vr is None: - return "" - else: - units = argList[0] - if units == "m/s-m": - vr = vr/1.94/3.2808 - elif units == "mph-ft": - vr = vr*1.15 - return `int(self.round(vr, "Nearest", 1))` - else: - return "" - - def _dispersion(self, statDict, timeRange, argList): - day = self.getPeriod(timeRange, 1) - if day: - minMax = "Max" - else: - minMax = "Min" - if self._mixingParmsDayAndNight: - day = 1 - if day == 1: - vr = self._getCorrelatedStats(statDict, "VentRate", minMax) - if vr is None: - argList = ["kt-ft"] - vr = self._calcVentRate(statDict, timeRange, argList) - if vr is None: - return "" - elif vr < 2000: - return "1" - elif vr < 4000: - return "2" - elif vr < 8000: - return "3" - elif vr < 16000: - return "4" - else: - return "5" - else: - return "" - - def _dsi(self, statDict, timeRange, argList): - # Return dsi - dsi = self.getStats(statDict, "DSI") - if dsi is None: - return "N/A" - else: - return self.getScalarVal(dsi) - - def _sunHours(self, statDict, timeRange, argList): - # Return sunshine hours...in day periods only! - sunStats = self.getStats(statDict, "HrsOfSun") - if sunStats is None: - return "N/A" - else: - day = self.getPeriod(timeRange, 1) - if day == 1: - return self.getScalarVal(sunStats) - else: - return "" - - def _popVal(self, statDict, timeRange, argList): - # Return the max PoP if weather in this period - pop = self.getStats(statDict, "PoP") - if pop is None: - return " " - precipFlag, wx, qpf, pop = \ - self._checkPrecip(statDict, timeRange, argList) - if precipFlag: - return self.getScalarVal(pop) - else: - return "0" - - def _checkPrecip(self, statDict, timeRange, argList): - """This sets a flag to indicate precip or no precip - in the time range. Checks Wx, PoP and QPF to allow - different rows with precip related info to be consistent. - Also provides the value for the Wx, QPF and PoP rows. This is - so the same rounded value is used for all checks. So if you - need to change rounding for PoP or QPF, it is all in one - place. Finally, new Definition['popWxThreshold'] is required - (Default value should be 1). PoP < popWxThreshold indicates - no precip.""" - precipFlag = 1 - weather = self.wxVal(statDict, timeRange, ["Wx"]) - if weather == "NONE": - precipFlag = 0 - # Weather could be non-precipitating so next check QPF - qpfStats = self.getStats(statDict, "QPF") - if qpfStats is None: - precipFlag = 0 - qpf = None - else: - min, max, sum = qpfStats - qpf = self.round(sum, "Nearest", .01) - if qpf <= 0.0: - precipFlag = 0 - # Next check pop: - pop = self.getStats(statDict, "PoP") - if pop is None: - precipFlag = 0 - else: - pop = self.round(pop, "Nearest", 10) - if pop < self._popWxThreshold: - precipFlag = 0 - return precipFlag, weather, qpf, pop - - def _cwr(self, statDict, timeRange, argList): - # Return the max CWR. Make sure that weather is not "NONE" - weather = self.wxVal(statDict, timeRange, ["Wx"]) - if weather == "NONE": - return "0" - else: - val = self.getStats(statDict, "CWR") - if val is None: - return " " - val = self.round(val, "Nearest", 10) - return self.getScalarVal(val) - - def _mixHgt500(self, statDict, timeRange, argList): - # Return 500m MixHgt for daytime only - mixTempStats = self.getStats(statDict, "MixHgt500") - day = self.getPeriod(timeRange, 1) - if day == 1: - if mixTempStats is None: - return "N/A" - else: - return self.getScalarVal(mixTempStats) - else: - return "" - - def _cigHeight(self, statDict, timeRange, argList): - # Return ceiling height in feet - cigStats = self.getStats(statDict, "Ceiling") - if cigStats is None: - return "N/A" - else: - min, max = cigStats - min = self.round(min, "Nearest", 1) - if min <= 3000: - value = self.round(min, "Nearest", 100) - elif min <= 6500: - value = self.round(min, "Nearest", 500) - elif min <= 12000: - value = self.round(min, "Nearest", 1000) - else: - return "NO CIG" - return `value` - - def _addHeadlines(self, headlines): - # Add the headlines to the list of product headlines - headlines = headlines.split("...") - for headline in headlines: - if len(headline) == 0 or headline[0] == '\n': - continue - if headline not in self._prodHeadlines: - self._prodHeadlines.append(headline) - - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING - ('FW.A', allActions, 'FireWx'), # FIRE WEATHER WATCH - ] - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This produces a Fire Weather Forecast in tabular format. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# FWFTable, FWFTable___Definition, FWFTable__Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Fire Weather Table" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "FWFBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will be done. +# columnJustification # Left (l) or right (r) justification for columns +# areaDictionary Modify the AreaDictionary utility with UGC information +# about zones. +# useRH If 1, use RH grids instead of MaxRH, MinRH +# summaryExtended +# summaryArea If summaryExtended == 1, then a summary extended forecast will +# be generated for the given summaryArea +# individualExtended If individualExtended == 1, an extended forecast will be +# generated for each individual area +# extendedLabel If extendedLabel== 1, a label will be included for each +# individual extended +# mixingParmsDayAndNight Set this to 1 if you want Mixing Height, +# Transport Wind and Vent Index reported in +# night periods. +# mixHgtMethod Can be "Max" or "Avg" for mixHgt analysis method +# lightningPhrases Set this to 1 if you want Lightning Activity +# reported with phrases like "1-8 strikes", +# "9-15 strikes", etc. +# windAdjustmentFactor Winds are reported from the Wind20ft grid +# if available. Otherwise, the Wind grid is used +# with the magnitude multiplied by this wind +# adjustment factor. Winds reported by RAWS sites +# are frequently lower than ASOS winds +# due to the fact that use a 10-min average. +# A common adjustment factor is 80% (0.80). If +# you want no adjustment to the winds then set +# this variable to 1.00. +# NOTE: This value can optionally be specified as an +# nlValue dictionary. +# +# tableLightWindThreshold Threshold for a light wind string in the table +# tableLightWindPhrase Light wind string in the table +# tableLightWindThreshold Threshold for a light wind string in the table +# minGustMph Gusts will not be reported below this value +# windGustDiffMph Gusts will be reported only if the difference between +# gust and max wind exceeds this amount. +# +# humidityRecovery_percentage If max humidity is above this percentage, +# humidity recovery will be Excellent. +# rhPhraseThreshold The MinRH phrase will be included in the extended, IF +# the MinRH is less than this threshold. +# The default (-1) will not produce a MinRH phrase. +# includeOutlooks Set this to 1 to include long-range outlook +# placeholders at the end of the product. These +# are appended by _postProcessProduct. +# useHolidays Set to 1 to use holidays in the time period labels +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types +# popWxThreshold -Affects the values in the following rows: +# +## * CHANCE PRECIP +## * Precip amount +## * Precip duration +## * Precip begin +## * Precip end + +## We will put values in these rows according to the following assumptions: +## --If there is no weather, then all the above fields will be blank, zero or None. +## --If the PoP falls below a the popWxThreshold, value (default 1), +## then all of the above fields are blank, zero, or None. +## --If QPF is 0.00, then CHANCE PRECIP, Precip duration, Precip begin and +## Precip end will represent no precip. +## --Thus, if Wx is non-accumulating i.e. drizzle or snow flurries, the Precip Type +## could be non-empty, but since QPF would be 0.0, +## then no CHANCE PRECIP, Precip duration, Precip begin and Precip end will be reported. + +# areaDictionary Modify the AreaDictionary utility with UGC +# information about zones. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Sky, PoP, Wx, MaxT, MinT, T, Wind, Wind20ft, QPF, MaxRH, MinRH, +# MixHgt, TransWind, VentRate, HrsOfSun, CWR, Haines, LAL +# RH (optional -- can be used in place of MinRH, MaxRH. Set the "useRH" flag) +# Ttrend (optional -- if not included, prior day's data is used) +# RHtrend (optional -- if not included, prior day's data is used) +#------------------------------------------------------------------------- +# Edit Areas Needed: area1, area2 +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +#------------------------------------------------------------------------- +# Component Products: +# Extended +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from FWF_Table +# _rowList +# from WxPhrases: +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Fire Weather Services. +####################################################################### + +import TextRules +import SampleAnalysis +import time, string, types +import TimeRange, AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Product Issuance", "productIssuance") , "Morning", "radio", + ["Morning", "Afternoon"]), + ] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/FWFTable_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "FireWxZones_", + + "lineLength": 66, + ## Edit Areas: + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_FWFTable__", + + # product identifiers + "productName": "Fire Weather Planning Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + "periodCombining" : 0, # If 1, combine periods, if possible + + "fixedExpire": 1, #ensure VTEC actions don't affect segment time + + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + # Optional Configuration Items + "columnJustification": "l", # Left (l) or right (r) justification for columns + "useRH": 0, # Use RH grids instead of MaxRH, MinRH + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + + # If summaryExtended == 1, then a summary extended forecast will + # be generated for the given summaryArea + "summaryExtended": 1, + "summaryArea":"FireWxAOR__", + # If individualExtended == 1, an extended forecast will be + # generated for each individual area + # If extendedLabel == 1, a label will be included for each + # individual extended + "individualExtended": 0, + "extendedLabel": 0, + # Set the following variable to 1 if you want Mixing Height, + # Transport Wind and Vent Index reported in night periods. + "mixingParmsDayAndNight": 0, + "mixHgtMethod": "Max", # Can be Max of Avg + # Set the following variable to 1 if you want Lightning Activity + # reported with phrases like "1-8 strikes", "9-15 strikes", etc. + "lightningPhrases": 0, + # Winds are reported from the Wind20ft grid if available. + # Otherwise, the Wind grid is used with the magnitude multiplied + # by this wind adjustment factor. + # Winds reported by RAWS sites are frequently lower than ASOS winds + # due to the fact that use a 10-min average. A common adjustment + # factor is 80% (0.80). If you want no adjust ment to the winds + # then set this variable to 1.00 + "windAdjustmentFactor": 0.80, + # Threshold for a light wind string in the table + "tableLightWindThreshold" : 5, + # Light wind string in the table + "tableLightWindPhrase" : "Lgt/Var", + # Use a range for the winds in the table 1=yes + "tableWindRanges" : 0, + # Gusts will not be reported below this value. + "minGustMph": 17, + # Gust - wind must exceed this threshold to be reported. + "windGustDiffMph": 7, + # If max humidity is above this percentage, humidity recovery + # will be Excellent. + "humidityRecovery_percentage": 50, + # Set to MinRH value below which you would like a MinRH phrase in the Extended. + # Default (-1) is no MinRH phrase. + "rhPhraseThreshold": -1, + # Set the following variable to 1 to include long-range outlook + # placeholders at the end of the product. These are appended by + # _postProcessProduct + "includeOutlooks": 0, + + # Weather-related flags + "hoursSChcEnds": 36, + "popWxThreshold": 1, + + "language": "english", + + # Set to 1 to use holidays in the time period labels + "useHolidays": 0, + + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def _rowList(self): + # The rowList is controls what parameters go into the table. + # The list is a set of (label:method) pairs. + # You may change the label if you like. + # The order of the list determines the order of the rows in the table + # so you may re-arrange the order if you like. + return [ + # Directive requirements + ("Cloud cover", self._cloudCover_row), + ("Precip type", self._precipType_row), + ("Chance precip (%)", self._chancePrecip_row), + ("Temp (24h trend)", self._tempWithTrend_row), + ("RH % (24h trend)",self._rhWithTrend_row), + # Use these if you do not want trends + #("TEMP", self._temp_row), + #("RH %", self._rh_row), + ("20ftWnd-val/AM(mph)", self._windValleyMph_row), + ("20ftWnd-rdg/PM(mph)", self._windRidgeMph_row), + # Directive optional products + ("Precip amount", self._precipAmount_row), + ("Precip duration", self._precipDuration_row), + ("Precip begin", self._precipBegin_row), + ("Precip end", self._precipEnd_row), + ("Mixing hgt(m-AGL/MSL)", self._mixHgtM_row), + ("Mixing hgt(ft-AGL/MSL)", self._mixHgtFt_row), + ("Transport wnd (kts)", self._transWindKts_row), + ("Transport wnd (m/s)", self._transWindMS_row), + ("Transport wnd (mph)", self._transWindMph_row), + ("Vent rate (kt-ft)", self._ventRateKtFt_row), + ("Vent rate (m/s-m)", self._ventRate_row), + ("Vent rate (mph-ft)", self._ventRateMphFt_row), + ("Dispersion", self._dispersion_row), + ("DSI", self._dsi_row), + ("Sunshine hours", self._sunHours_row), +## # If you need Ceiling, uncomment the Ceiling line in _getAnalysisList +## #("CEILING", self._ceiling_row), + ("CWR", self._cwr_row), + ("LAL", self._lal_row), + ("Haines Index", self._haines_row), + ("RH recovery", self._rhRecovery_row), +## # If you need 500m Mix Hgt Temp, uncomment the MixHgt500 +## # line in _getAnalysisList +## #("MIX HGT 500", self._mixHgt500_row), + ("Stability class", self._stability_row), + ] + + ######################################################################## + # COMPONENT PRODUCTS + ######################################################################## + + def ExtendedNarrative(self): + # check for period combining first + if self._periodCombining: + methodList = [self.combineComponentStats, self.assembleChildWords] + else: + methodList = [self.assembleChildWords] + + return { + "type": "narrative", + "displayName": None, + "timePeriodMethod ": self.timeRangeLabel, + ## Components + "methodList": methodList, + "narrativeDef": [ + ("Extended",24),("Extended",24),("Extended",24), + ("Extended",24), ("Extended",24)], + } + + def Extended(self): + return { + "type": "component", + "methodList": [self.orderPhrases, self.consolidateSubPhrases, + self.assemblePhrases, self.wordWrap], + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ("MinRH", self.avg, [0]), + ("T", self.hourlyTemp), + ("T", self.minMax), + ("PoP", self.stdDevMaxAvg, [12]), + ("PoP", self.binnedPercent, [12]), + ("Sky", self.median, [12]), + #("Wind", self.vectorMedian), + ("Wind", self.vectorMinMax, [6]), + ("Wind20ft", self.vectorMedian), + ("Wind20ft", self.vectorMinMax), + ("Wx", self.rankedWx, [12]), + ("Wx", self.rankedWx, [0]), + ], + "phraseList":[ + self.wind_summary, + self.sky_phrase, + self.skyPopWx_phrase, + self.weather_phrase, + self.reportTrends, + self.lows_phrase, + self.highs_phrase, + self.rh_phrase, + self.wind_withGusts_phrase, + ], + } + + + ############################# + # Overrides to take care of Wind in the Extended forecast + # Use Wind20ft if available, else use adjusted Wind + # + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self._adjustWind, + } + def _adjustWind(self, value, mode, increment, maxFlag): + # Rounding for marine winds + factor = self.nlValue(self._windAdjustmentFactor, value) + value = value * factor + return self.round(value, mode, increment) + + def wind_summary_words(self, tree, node): + # Uses vectorAvg, vectorMedian, vectorMinMax + # See if there's data for Wind20ft + elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) + words = self.vector_summary(tree, node, elementName) + return self.setWords(node, words) + + def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): + # See if there's data for Wind20ft + elementName = self.chooseElement(tree, node, ["Wind20ft", "Wind"]) + wind = self.ElementInfo(elementName, "List", self.VECTOR()) + elementInfoList = [wind] + if gustFlag: + windGust = self.ElementInfo( + "WindGust", "Max", phraseDef=self.gust_phrase) + elementInfoList.append(windGust) + node.set("gustFlag", 1) + if connectorMethod is None: + connectorMethod = self.vectorConnector + self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) + return self.DONE() + + def nextDay24HourLabel_flag(self, tree, node): + # Return 1 to have the TimeDescriptor module label 24 hour periods starting + # after 1600 as the next day. + # This is needed for the Fire Weather Extended product, + # but not for other products when period combining. + return 1 + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + # Uncomment any combinations you wish to collapse. + # For example, if the first entry is uncommented, + # the phrase: scattered rain showers and widespread rain + # will collapse to: scattered rain showers. + def wxCombinations(self): + return [ + ("RW", "R"), + ("SW", "S"), + ## ("T","RW"), + ] + + def temporalCoverage_hours_dict(self, parmHisto, timeRange, componentName): + # This is the hours of overlap of a grid with the TIMERANGE + # in order to include it in the analysis. In addition, if a grid + # is completely contained within the time range, it will be included. + # Used by temporalCoverage_flag + return { + "MinRH": 5, + "MaxRH": 5, + "MinT": 4, + "MaxT": 4, + #"Haines":0, + #"PoP" : 0, + } + + def minMax_std_deviation(self, parmHisto, timeRange, componentName): + # Replaces MINMAX_STD_DEVIATION + # Number of standard deviations to compute around the weighted + # average for a stdDev_MinMax + return 1.4 + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Wind"] = "mph" + dict["Wind20ft"] = "mph" + dict["WindGust"] = "mph" + return dict + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get variables from Definition and VariableList + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + # Set up product-specific variables + self._colWidth = 13 + if self._columnJustification == "l": + self._rowLabelWidth = 22 + else: + self._rowLabelWidth = 24 + self._fixedValueWidth = 13 + self._analysisList = self._getAnalysisList() + + return None + + def _determineTimeRanges(self, argDict): + # Determine the time ranges which need to be samplePM + # Set up self._timeRangeList, self._extendedRange + # Create a list (or lists) of tuples: (timeRange, timeRangeLabel) + self._currentTime = argDict['creationTime'] + self._isDST = time.localtime(self._currentTime)[8] + self._currentHour = time.gmtime(self._currentTime)[3] + + if self._productIssuance == "Morning": + rangeNames = ["Today", "Tonight", "Tomorrow"] + + else: + dayTime3 = self.createTimeRange(54, 66, "LT") + rangeNames = ["Tonight", "Tomorrow", "Tomorrow Night", dayTime3] + + self._timeRangeList = self.getTimeRangeList( + argDict, rangeNames, self._getLabel) + + # Determine time range to BEGIN the extended forecast + length = len(self._timeRangeList) + lastPeriod = self._timeRangeList[length-1][0] + + self._extendedRange = TimeRange.TimeRange( + lastPeriod.endTime(), lastPeriod.endTime() + 3600) + + # Determine prior time range + firstPeriod, label = self._timeRangeList[0] + self._priorTimeRange = TimeRange.TimeRange( + firstPeriod.startTime() - 24*3600, firstPeriod.startTime()) + + # Get entire timeRange of table for Headlines + # Tom says: I'm very unsure about removing this line........... + self._timeRange = TimeRange.TimeRange( + firstPeriod.startTime(), lastPeriod.endTime()) + argDict["productTimeRange"] = self._timeRange + + # Determine issue time + self._issueTime = AbsTime.current() + + # Sets up the expiration time + self._expireTime, self._ddhhmmTimeExpire = \ + self.getExpireTimeFromLToffset(self._currentTime, + self.expireOffset(), "") + + # Calculate current times + self._ddhhmmTime = time.strftime( + "%d%H%M", time.gmtime(self._currentTime)) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + return + + def _sampleData(self, argDict): + # Get a sampler (or set of samplers) + samplerList = [] + samplerList.append((self._getAnalysisList(), self._timeRangeList)) + samplerList.append((self._priorAnalysisList(), + [(self._priorTimeRange, "")])) + + sampleInfo = [] + for analysisList, periods in samplerList: + sampleInfo.append((analysisList, periods, self._areaList)) + self._sampler = self.getSampler(argDict, sampleInfo) + # Get sampler for first 12 hours of extended period + extTimeRange = self._extendedRange + self._extTR = TimeRange.TimeRange(extTimeRange.startTime(), + extTimeRange.startTime() + 43200) + extTrTuple = [(self._extTR, "Extended")] + self._extAnalList = self._getExtAnalysisList() + extSampleInfo = (self._extAnalList, extTrTuple, self._areaList) + self._extSampler = self.getSampler(argDict, extSampleInfo) + return self._sampler + + def _preProcessProduct(self, fcst, argDict): + # Add product heading to fcst string + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # Put in a place holder for the headlines to be substituted in + # "postProcessProduct" + fcst = fcst + "" + self._prodHeadlines = [] + + fcst = fcst + ".DISCUSSION..." + "\n\n\n\n\n" + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + self._addHeadlines(headlines) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + # Return a text string representing the product + # for the given edit area + # Column headings + fcst = fcst + string.center("", self._rowLabelWidth) + for period, label in self._timeRangeList: + if label == "Rest of Today": + label = "Today" + if label == "Rest of Tonight": + label = "Tonight" + if self._columnJustification == "l": + fcst = fcst + string.ljust(label, self._colWidth) + else: + fcst = fcst + string.rjust(label, self._colWidth) + fcst = fcst + "\n\n" + + # Get the statistics for this edit area and all time ranges + statList = self.getStatList( + self._sampler, self._analysisList, self._timeRangeList, editArea) + + # Get the prior statistics for this edit area and all time ranges + priorStatDict = self.getStatDict( + self._sampler, self._priorAnalysisList(), self._priorTimeRange, + editArea) + + # Get a statDict for the first 12 hours of the extended + self._extStatDict = self.getStatDict(self._extSampler, + self._extAnalList, self._extTR, + editArea) + + # Format each row of table + for label, method in self._rowList(): + fcst = method(fcst, label, statList, priorStatDict) + fcst = fcst + "\n" + fcst = fcst + "Remarks...None.\n\n" + + # Produce Individual Extended Forecast + if self._individualExtended == 1: + if self._extendedLabel == 1: + fcst = fcst + ".FORECAST FOR DAYS 3 THROUGH 7...\n\n" + extended = self.generateProduct("ExtendedNarrative", argDict, + area = editArea, timeRange=self._extendedRange) + fcst = fcst + extended + return fcst + + def _cloudCover_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._sky, None, self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _precipType_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self.wxVal, ["Wx"], self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _chancePrecip_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._popVal, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _tempWithTrend_row(self, fcst, label, statList, priorStatDict): + dayElement = "MaxT" + nightElement = "MinT" + dayMinMax = "Max" + nightMinMax = "Min" + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self.dayOrNightVal, [dayElement, nightElement, dayMinMax, + nightMinMax, "Ttrend", priorStatDict, statList, + self._timeRangeList], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _temp_row(self, fcst, label, statList, priorStatDict): + dayElement = "MaxT" + nightElement = "MinT" + dayMinMax = "Max" + nightMinMax = "Min" + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self.dayOrNightVal, [dayElement, nightElement, dayMinMax, + nightMinMax, None, priorStatDict, statList, + self._timeRangeList], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _rhWithTrend_row(self, fcst, label, statList, priorStatDict): + if self._useRH == 1: + dayElement = "RH" + nightElement = "RH" + else: + dayElement = "MinRH" + nightElement = "MaxRH" + dayMinMax = "Min" + nightMinMax = "Max" + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, self.dayOrNightVal, + [dayElement, nightElement, dayMinMax, nightMinMax, "RHtrend", + priorStatDict, statList, self._timeRangeList], + self._rowLabelWidth, self._fixedValueWidth, self._columnJustification) + return fcst + + def _rh_row(self, fcst, label, statList, priorStatDict): + if self._useRH == 1: + dayElement = "RH" + nightElement = "RH" + else: + dayElement = "MinRH" + nightElement = "MaxRH" + dayMinMax = "Min" + nightMinMax = "Max" + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, self.dayOrNightVal, + [dayElement, nightElement, dayMinMax, nightMinMax, None, + priorStatDict, statList, self._timeRangeList], + self._rowLabelWidth, self._fixedValueWidth, self._columnJustification) + return fcst + + def _windValleyMph_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._wind,["AM"], self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _windRidgeMph_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._wind,["PM"], self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _precipAmount_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._qpfVal, None, self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _precipDuration_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._duration, ["Wx"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _precipBegin_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._begin, ["Wx__dominantWx_0", self._timeRangeList, statList], + self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _precipEnd_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._end, ["Wx__dominantWx_0", self._timeRangeList, statList], + self._rowLabelWidth, self._fixedValueWidth, + self._columnJustification) + return fcst + + def _mixHgtFt_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._mixHgt, ["ft"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _mixHgtM_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._mixHgt, ["m"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _transWindKts_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._transWind, ["kts"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _transWindMS_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._transWind, ["ms"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _transWindMph_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._transWind, ["mph"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _ventRate_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._ventRate, ["m/s-m"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _ventRateKtFt_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._ventRate, ["kt-ft"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _ventRateMphFt_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._ventRate, ["mph-ft"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _dispersion_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._dispersion, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _dsi_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._dsi, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _sunHours_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._sunHours, ["HrsOfSun"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _ceiling_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._cigHeight, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _cwr_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._cwr, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _lal_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._getLightning, ["LAL"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _haines_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._haines, ["Haines"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _rhRecovery_row(self, fcst, label, statList, priorStatDict): + if self._useRH == 1: + element = "RH" + priorElement = "RH" + else: + element = "MaxRH" + priorElement = "MinRH" + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._rhRecovery, [element, priorElement, priorStatDict], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _mixHgt500_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._mixHgt500, None, self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _stability_row(self, fcst, label, statList, priorStatDict): + fcst = fcst + self.makeRow( + label, self._colWidth, self._timeRangeList, statList, + self._stability, ["Stability"], self._rowLabelWidth, + self._fixedValueWidth, self._columnJustification) + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + fcst = fcst + "\n$$\n\n" + return fcst + + def _postProcessProduct(self, fcst, argDict): + if self._summaryExtended == 1: + fcst = fcst + "\n.FORECAST FOR DAYS 3 THROUGH 7...\n\n" + extended = self.generateProduct("ExtendedNarrative", + argDict, area=self._summaryArea, + timeRange=self._extendedRange) + fcst = fcst + extended + if self._includeOutlooks: + fcst = fcst + "\n.OUTLOOK 6 TO 10 DAYS... \n\n.OUTLOOK 8 TO 14 DAYS...\n\n\n" + fcst = fcst + "\n.OUTLOOK\n\n" + + # Make summary headline string and substitute for "" placeholder + headlineStr = "" + for h in self._prodHeadlines: + headlineStr = headlineStr + "..." + h + "...\n" + if len(self._prodHeadlines): + headlineStr = headlineStr + "\n" + fcst = fcst.replace("", headlineStr) + + # Uncomment to include = sign + #fcst = fcst + "=\n" + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + # provides expiration time offset from today's midnight based on issuance. + def expireOffset(self): + if self._productIssuance == "Morning": + # 6pm today + return 16 + else: + # 6am tomorrow + return 24+4 + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _getAnalysisList(self): + return[ + ("Sky", self.avg), + ("PoP", self.stdDevMaxAvg), + ("Wx", self.dominantWx, [12]), + ("Wx", self.dominantWx, [0]), + ("MaxT", self.stdDevMinMax), + ("MinT", self.stdDevMinMax), + ("T", self.minMax), + ("Wind", self.vectorAvg, [6]), + ("Wind20ft", self.vectorAvg, [6]), + ("WindGust", self.stdDevMax, [6]), + ("QPF", self.minMaxSum), + ("MaxRH", self.stdDevMinMax), + ("MinRH", self.stdDevMinMax), + ("RH", self.minMax), + ("MixHgt", self.minMax, [0]), + ("MixHgt", self.avg, [0]), + ("TransWind", self.vectorAvg, [0]), + ("VentRate", self.minMax, [0]), # aka "Dispersion" prior to RPP20 + ("DSI", self.avg), + ("HrsOfSun", self.avg), + # Uncomment the next line if you're carrying Cig Height + #("Ceiling", self.minMax), + ("CWR", self.stdDevMaxAvg), + ("Haines", self.minMaxAvg), + ("LAL", self.maximum), + ("Ttrend", self.minMax), + ("RHtrend", self.minMax), + ("Stability", self.avg), + # Uncomment the next line if you're carrying 500m mix height temp + #("MixHgt500", self.avg), + ] + + def _priorAnalysisList(self): + return[ + ("MaxT", self.avg), + ("MinT", self.avg), + ("MaxRH", self.avg), + ("MinRH", self.avg), + ("RH", self.minMax), + ("T", self.minMax), + ] + + def _getExtAnalysisList(self): + # Need to get the same items in the table's regular analysis + # list for "Wx", "PoP", "QPF" in the extended analysis list + # to work with _checkPrecip + analList = [] + for item in self._getAnalysisList(): + if item[0] in ["Wx", "PoP", "QPF"]: + analList.append(item) + #print analList + return analList + + def _getLabel(self, range): + "Produce a column label given the timeRange" + return self.getWeekday( + range, holidays=self._useHolidays, shiftToLocal=1, labelType="CapsAbbreviated", + today=1, tomorrow=0) + + def _sky(self, statDict, timeRange, argList): + # Return a sky value + sky = self.getStats(statDict, "Sky") + if sky is None: + value = "" + elif sky <= 6: + value = "Clear" + elif sky <= 31: + value = "MClear" + elif sky <= 69: + value = "PCldy" + elif sky <= 94: + value = "MCldy" + else: + value = "Cloudy" + return value + + def _wind(self, statDict, timeRange, argList): + # Use Wind20ft if available, + # otherwise adjust Wind + # Stats vectorAvg + + # Get Wind stats + adjust = 0 + windStats = self.getStats(statDict,"Wind20ft") + if windStats is None: + windStats = self.getStats(statDict,"Wind") + if windStats is None: + return "" + adjust = 1 + + (mag1, dir1), subRange = windStats[0] + if len(windStats) > 1: + (mag2, dir2), subRange = windStats[1] + else: + (mag2, dir2), subRange = windStats[0] + # Convert to mph + mag1 = self.ktToMph(mag1) + mag2 = self.ktToMph(mag2) + # Adjust if using Wind + if adjust: + mag1 = mag1*self._windAdjustmentFactor + mag2 = mag2*self._windAdjustmentFactor + + # Get Gust Stats + gustStats = self.getStats(statDict,"WindGust") + if gustStats is None: + gust1 = 0 + gust2 = 0 + else: + gust1,subRange = gustStats[0] + if len(gustStats) > 1: + gust2,subRange = gustStats[1] + else: + gust2 = gust1 + gust1 = self.ktToMph(gust1)*self._windAdjustmentFactor + gust2 = self.ktToMph(gust2)*self._windAdjustmentFactor + + # This method is called twice for each time period, + # once for AM winds and once for PM winds + # For the AM winds: + # If the time period is daytime, report the morning winds + # For the PM winds: + # If the time period is daytime, report the afternoon winds + # Otherwise, report the evening winds + amPm = argList[0] + day = self.getPeriod(timeRange,1) + if amPm == "AM": + if day == 1: + windMag = mag1 + windDir = dir1 + windGustVal = gust1 + else: + windMag = None + windGustVal = 0 + else: + if day == 1: + windMag = mag2 + windDir = dir2 + windGustVal = gust2 + else: + windMag = mag1 + windDir = dir1 + windGustVal = gust1 + + # Determine wind string + windString = "" + if windMag is not None: + if windMag < self._tableLightWindThreshold: + windString = self._tableLightWindPhrase + elif self._tableWindRanges: + windString = self._getVectorRange(((windMag-2, windMag+2), windDir)) + else: + windString = self.getVectorVal((windMag, windDir)) + + # Determine gust string + gustString = '' + if windString != "" and windString != self._tableLightWindPhrase: + if windGustVal >= self._minGustMph and (windGustVal - windMag) >= self._windGustDiffMph: + gustString = ' G'+repr(int(windGustVal)) + return windString + gustString + + def _qpfVal(self, statDict, timeRange, argList): + qpf = self.getStats(statDict, "QPF") + if qpf is None: + return "" + precipFlag, wx, qpf, pop = \ + self._checkPrecip(statDict, timeRange, argList) + if precipFlag: + return self.fformat(qpf, .01) + else: + return "0.00" + + def _duration(self, statDict, timeRange, argList): + precipFlag, wx, qpf, pop = \ + self._checkPrecip(statDict, timeRange, argList) + if wx == "NONE": + return "" + if not precipFlag: + return "" + statsByRange = self.getStats(statDict,"Wx__dominantWx_0") + if statsByRange is None: + return "" + # Found in TableBuilder: + return self.wxDuration(statsByRange, timeRange) + + def _begin(self, statDict, timeRange, argList): + # Check if this period should have precip based on Wx, QPF, PoP + precipFlag, wx, qpf, pop = \ + self._checkPrecip(statDict, timeRange, argList) + #print "_begin:",timeRange,precipFlag, wx, qpf, pop + if not precipFlag: + return "" + + durationRange = self._getTR(statDict, timeRange, argList) + if durationRange is None: + return "" + durStart = durationRange.startTime() + if durStart < timeRange.startTime(): + return "Continuing" + value = string.strip(self.localHourLabel(durStart)) + return value + + def _end(self, statDict, timeRange, argList): + # Check if this period should have precip based on Wx, QPF, PoP + precipFlag, wx, qpf, pop = \ + self._checkPrecip(statDict, timeRange, argList) + #print "_end:",timeRange,precipFlag, wx, qpf, pop + if not precipFlag: + return "" + + durationRange = self._getTR(statDict, timeRange, argList, ending=1) + if durationRange is None: + return "" + durEnd = durationRange.endTime() + if durEnd > timeRange.endTime(): + return "Continuing" + value = string.strip(self.localHourLabel(durEnd)) + return value + + def _getTR(self, statDict, timeRange, argList, ending=0): + # Get a beginning or ending timeRange for weather occurring. + #print "_getTR:",timeRange,ending + + # Parse the argList + element = argList[0] + trList = argList[1] + statList = argList[2] + + # Get the length of the statList, so we know how many periods we have + statLength = len(statList) + + # Get index for current time range + currentIndex = self._getIndex(timeRange, trList) + #print "_getTR: currentIndex = ", currentIndex, "tr = ", timeRange, "Ending = ", ending + # Use the index to access the previous and next time range in the + # statList. + if currentIndex is None: + #print "_getTR: no currentIndex returning none" + return None + nextIndex = currentIndex + 1 + + # Set prevIndex to one less than the current, unless this is the first + # period, then just use the current index. + if currentIndex > 0: + prevIndex = currentIndex - 1 + else: + prevIndex = currentIndex + prevTR = trList[prevIndex] + # If we're on the last period of the table, we need to access the stats + # from the extended portion. + if currentIndex < statLength - 1: + nextStatDict = statList[nextIndex] + nextTR = trList[nextIndex] + else: + nextStatDict = self._extStatDict + nextTR = self._extTR + + if prevIndex != currentIndex: + prevStatDict = statList[prevIndex] + prevStats = self.getStats(prevStatDict, element) + nextStats = self.getStats(nextStatDict, element) + eStats = self.getStats(statDict, element) + #print "_getTR: element, estats=",element,eStats + if eStats is None: + #print "_getTR: no eStats returning none" + return None + + # if looking for ending time, reverse so last time range is first + if ending == 1: + eStats.reverse() + + # "range" will be first time range found where there is "" + range = None + + for values, tr in eStats: + #print "_getTR: values, tr=",values, tr + for subKey in values: + #print "_getTR: subKey=",subKey + if self.precip_related(subKey): + range = tr + break + if range is not None: + break + + if currentIndex > 0 and not ending: + # If the precip startTime found in the previous for-loop equals the + # startTime for the current timeRange, then we need to look at the + # previous timeRange to see if precip is "Continuing". + if range is not None and range.startTime() == timeRange.startTime(): + #PJ Make sure previous period has Wx/QPF/PoP + precipFlag, wx, qpf, pop = \ + self._checkPrecip(prevStatDict, prevTR, argList) + #print "getTR beg _checkPrecip:",prevTR,precipFlag, wx, qpf, pop + if precipFlag: + prevRange = range + prevLength = len(prevStats) + lastIndex = prevLength - 1 + val, tr = prevStats[lastIndex] + for subKey in val: + if self.precip_related(subKey): + prevRange = tr + break + range = prevRange + + if ending == 1: + # If range has not been set OR the precip endTime equals the + # endTime for the current timeRange, then we need to look at the + # next timeRange to determine if precip is "Continuing". + if range is not None and range.endTime() == timeRange.endTime(): + #PJ Make sure next period has Wx/QPF/PoP + precipFlag, wx, qpf, pop = \ + self._checkPrecip(nextStatDict, nextTR, argList) + #print "getTR end _checkPrecip:",nextTR,precipFlag, wx, qpf, pop + if precipFlag: + nextRange = None + val, tr = nextStats[0] + if len(val) == 0: + return range + for subKey in val: + if self.precip_related(subKey): + nextRange = tr + break + + if nextRange is None: + return range + + range = nextRange + #print "_getTR: returning:",range + return range + + def _getIndex(self, timeRange, trList): + index = 0 + for tr, label in trList: + if timeRange == tr: + return index + index = index + 1 + return + + def _mixHgt(self, statDict, timeRange, argList): + # Report MixHgt + mixHgt = self._mixHgtValue(statDict, timeRange, argList) + if mixHgt is None: + return "" + else: + return self.getScalarVal(mixHgt) + + def _mixHgtValue(self, statDict, timeRange, argList): + # Report MixHgt + units = argList[0] + day = self.getPeriod(timeRange, 1) + method = self._mixHgtMethod + if day: + minMax = "Max" + else: + minMax = "Min" + if self._mixingParmsDayAndNight: + day = 1 + if day == 1: + mixHgt = self._getMixHgt(statDict, minMax, method) + if mixHgt != None: + if units == "m": + mixHgt = mixHgt/3.2808 + mixHgt = self.round(mixHgt,"Nearest",1) + return mixHgt + else: + return None + + def _getMixHgt(self, statDict, minMax, method): + # Returns min and max MixHgt with associated time ranges + avgMixStats = self.getStats(statDict, "MixHgt__avg") + minMaxMixStats = self.getStats(statDict, "MixHgt") + if avgMixStats is None or minMaxMixStats is None: + return None + #print "mixStats = ", mixStats, "\n\n" + mixMax = -1 + mixAvg = -1 + mixMin = 200000 + # Find the highest average mix height and subRange + method = self._mixHgtMethod + for mixStats, subRange in avgMixStats: + avg = mixStats + if avg > mixAvg: + mixAvg = avg + avgRange = subRange + # Find the absolute min and max mixHgt and subRanges + for mixStats, subRange in minMaxMixStats: + min, max = self.getValue(mixStats, "MinMax") + if max > mixMax: + mixMax = max + maxRange = subRange + if min < mixMin: + mixMin = min + minRange = subRange + if minMax == "Min": + return mixMin + elif minMax == "Max": + if method == "Avg": + return mixAvg + else: + return mixMax + elif minMax == "MinMax": + if method == "Avg": + return mixMin, mixAvg + else: + return mixMin, mixMax + else: + return (mixMin, minRange), (mixMax, maxRange), (mixAvg, avgRange) + + def _getCorrelatedStats(self, statDict, element, minMax, dataType=None): + # Return the value (min or max) for the given element + # that correlates with the min or max Mixing Height + statsByRange = self.getStats(statDict, element) + method = self._mixHgtMethod + if statsByRange is None: + return None + # Find the subRange that has the min or max mixing height' + mixHgt = self._getMixHgt(statDict, "All", method) + if mixHgt is None: + return None + mixMin, mixMax, mixAvg = mixHgt + if minMax == "Min": + mix, subRange = mixMin + else: + if method == "Avg": + mix, subRange = mixAvg + else: + mix, subRange = mixMax + # Find the first stats that overlap the mixing height subRange + for stats, statRange in statsByRange: + if statRange.overlaps(subRange): + break + stats = self.getValue(stats, minMax, dataType) + return stats + + + def _transWind(self, statDict, timeRange, argList): + # Return the transport wind as a string + day = self.getPeriod(timeRange, 1) + if day: + minMax = "Max" + else: + minMax = "Min" + if self._mixingParmsDayAndNight: + day = 1 + if day == 1: + transWind = self._transWindValue(statDict, timeRange, argList) + if transWind is None: + return "N/A" + mag, dir = transWind + return self.getVectorVal((mag,dir)) + else: + return "" + + def _transWindValue(self, statDict, timeRange, argList): + # Return the transport wind as a tuple of magnitude and direction + + units = argList[0] + day = self.getPeriod(timeRange, 1) + if day: + minMax = "Max" + else: + minMax = "Min" + if self._mixingParmsDayAndNight: + day = 1 + if day == 1: + transWind = self._getCorrelatedStats( + statDict, "TransWind", minMax, self.VECTOR()) + if transWind != None: + mag, dir = transWind + if units == "ms": + mag = mag/1.94 + elif units == "mph": + mag = mag * 1.15 + mag = self.round(mag, "Nearest", 1) + return mag, dir + else: + return None + + def _getVectorRange(self, value): + # Return text representation of vector value + # Value is a tuple of magnitude and direction + # E.g. returned value: SW 19 + + # Check for no data + if value == () or value is None: + return " " + else: + mag1, mag2 = value[0] + dir = value[1] + mag1 = repr(int(mag1)) + mag2 = repr(int(mag2)) + magStr = mag1 + "-" + mag2 + magStrLen = len(magStr) + rjustLen = magStrLen + 1 + magStr = string.rjust(magStr, rjustLen) + if type(dir) is not bytes: + dir = self.dirToText(dir) + dirStr = string.rjust(dir,2) + return dirStr + magStr + + def wxVal(self, stats, timeRange, argList): + # Return a weather text string value + # The desired element name muxt be the first element of argList + # e.g., SNOW + element = argList[0] + wxStats = self.getStats(stats, element) + if wxStats is None: + return "" + value = "" + #print "\nIn wxVal" + for wxValue, timeRange in wxStats: + #print wxValue, timeRange + val = self.short_weather_phrase(element, wxValue) + val = val.replace("|", " ") + val = string.replace(val,"THUNDER STORMS","THUNDERSTORMS") + val = string.replace(val,"THUNDERSTORMS","TSTMS") + val = string.replace(val,"FREEZING RAIN","FRZ RAIN") + val = string.replace(val,"FREEZING DRIZZLE","FRZ DRZL") + val = string.replace(val,"RAIN SHOWERS","SHOWERS") + val = string.replace(val,"thunder storms","thunderstorms") + val = string.replace(val,"thunderstorms","tstms") + val = string.replace(val,"freezing rain","frz rain") + val = string.replace(val,"freezing drizzle","frz drzl ") + val = string.replace(val,"rain showers ","showers") + if self.wxOrder(val)= 0: + isFreezing = 1 + if val == "rain": + isLiquid = 1 + if val == "snow": + isSnow = 1 + if val == "sleet": + isSleet = 1 + if self.wxOrder(val) 1: + value = "1-8 STRIKES" + if lal > 2: + value = "9-15 STRIKES" + if lal > 3: + value = "16-25 STRIKES" + if lal > 4: + value = ">25 STRIKES" + if lal > 5: + value = "DRY LIGHTNING" + else: + if lal is None: + value = "N/A" + else: + value = self.getScalarVal(lal) + return value + + def _haines(self,statDict, timeRange, argList): + haines = self.getStats(statDict, "Haines") + if haines is None: + return "" + min, max, avg = haines + return string.strip(repr(int(avg))) + + def _rhRecovery(self, statDict, timeRange, argList): + element = argList[0] + priorElement = argList[1] + priorStatDict = argList[2] + rh = self.getStats(statDict, element) + if rh is None: + return "" + rh = self.getValue(rh, "Max") + if rh > self._humidityRecovery_percentage: + return "Excellent" + priorRH = self.getStats(priorStatDict, priorElement) + if priorRH is None: + return "" + priorRH = self.getValue(priorRH, "Max") + diff = rh - priorRH + words = "" + for threshold, label in self._humidityRecovery_valueList(): + if diff <= threshold: + words = label + break + return words + + def _humidityRecovery_valueList(self): + "Used to convert percent difference to corresponding category" + return [ + (25, "Poor"), + (55, "Moderate"), + (70, "Good"), + (100,"Excellent"), + ] + + def _calcVentRate(self, statDict, timeRange, argList): + units=argList[0] + if units == "kt-ft": + mixHgtArgList=["ft"] + transWindArgList=["kts"] + elif units == "mph-ft": + mixHgtArgList=["ft"] + transWindArgList=["mph"] + elif units == "m/s-m": + mixHgtArgList=["m"] + transWindArgList=["ms"] + else: + # Unknown configuration + return None + + mixHgt = self._mixHgtValue(statDict, timeRange, mixHgtArgList) + if mixHgt is None: + return None + transWind = self._transWindValue(statDict, timeRange, transWindArgList) + if transWind is None: + return None + mag,dir = transWind + return mixHgt*mag + + def _ventRate(self, statDict, timeRange, argList): + day = self.getPeriod(timeRange, 1) + if day: + minMax = "Max" + else: + minMax = "Min" + if self._mixingParmsDayAndNight: + day = 1 + if day == 1: + vr = self._getCorrelatedStats(statDict, "VentRate", minMax) + if vr is None: + vr = self._calcVentRate(statDict, timeRange, argList) + if vr is None: + return "" + else: + units = argList[0] + if units == "m/s-m": + vr = vr/1.94/3.2808 + elif units == "mph-ft": + vr = vr*1.15 + return repr(int(self.round(vr, "Nearest", 1))) + else: + return "" + + def _dispersion(self, statDict, timeRange, argList): + day = self.getPeriod(timeRange, 1) + if day: + minMax = "Max" + else: + minMax = "Min" + if self._mixingParmsDayAndNight: + day = 1 + if day == 1: + vr = self._getCorrelatedStats(statDict, "VentRate", minMax) + if vr is None: + argList = ["kt-ft"] + vr = self._calcVentRate(statDict, timeRange, argList) + if vr is None: + return "" + elif vr < 2000: + return "1" + elif vr < 4000: + return "2" + elif vr < 8000: + return "3" + elif vr < 16000: + return "4" + else: + return "5" + else: + return "" + + def _dsi(self, statDict, timeRange, argList): + # Return dsi + dsi = self.getStats(statDict, "DSI") + if dsi is None: + return "N/A" + else: + return self.getScalarVal(dsi) + + def _sunHours(self, statDict, timeRange, argList): + # Return sunshine hours...in day periods only! + sunStats = self.getStats(statDict, "HrsOfSun") + if sunStats is None: + return "N/A" + else: + day = self.getPeriod(timeRange, 1) + if day == 1: + return self.getScalarVal(sunStats) + else: + return "" + + def _popVal(self, statDict, timeRange, argList): + # Return the max PoP if weather in this period + pop = self.getStats(statDict, "PoP") + if pop is None: + return " " + precipFlag, wx, qpf, pop = \ + self._checkPrecip(statDict, timeRange, argList) + if precipFlag: + return self.getScalarVal(pop) + else: + return "0" + + def _checkPrecip(self, statDict, timeRange, argList): + """This sets a flag to indicate precip or no precip + in the time range. Checks Wx, PoP and QPF to allow + different rows with precip related info to be consistent. + Also provides the value for the Wx, QPF and PoP rows. This is + so the same rounded value is used for all checks. So if you + need to change rounding for PoP or QPF, it is all in one + place. Finally, new Definition['popWxThreshold'] is required + (Default value should be 1). PoP < popWxThreshold indicates + no precip.""" + precipFlag = 1 + weather = self.wxVal(statDict, timeRange, ["Wx"]) + if weather == "NONE": + precipFlag = 0 + # Weather could be non-precipitating so next check QPF + qpfStats = self.getStats(statDict, "QPF") + if qpfStats is None: + precipFlag = 0 + qpf = None + else: + min, max, sum = qpfStats + qpf = self.round(sum, "Nearest", .01) + if qpf <= 0.0: + precipFlag = 0 + # Next check pop: + pop = self.getStats(statDict, "PoP") + if pop is None: + precipFlag = 0 + else: + pop = self.round(pop, "Nearest", 10) + if pop < self._popWxThreshold: + precipFlag = 0 + return precipFlag, weather, qpf, pop + + def _cwr(self, statDict, timeRange, argList): + # Return the max CWR. Make sure that weather is not "NONE" + weather = self.wxVal(statDict, timeRange, ["Wx"]) + if weather == "NONE": + return "0" + else: + val = self.getStats(statDict, "CWR") + if val is None: + return " " + val = self.round(val, "Nearest", 10) + return self.getScalarVal(val) + + def _mixHgt500(self, statDict, timeRange, argList): + # Return 500m MixHgt for daytime only + mixTempStats = self.getStats(statDict, "MixHgt500") + day = self.getPeriod(timeRange, 1) + if day == 1: + if mixTempStats is None: + return "N/A" + else: + return self.getScalarVal(mixTempStats) + else: + return "" + + def _cigHeight(self, statDict, timeRange, argList): + # Return ceiling height in feet + cigStats = self.getStats(statDict, "Ceiling") + if cigStats is None: + return "N/A" + else: + min, max = cigStats + min = self.round(min, "Nearest", 1) + if min <= 3000: + value = self.round(min, "Nearest", 100) + elif min <= 6500: + value = self.round(min, "Nearest", 500) + elif min <= 12000: + value = self.round(min, "Nearest", 1000) + else: + return "NO CIG" + return repr(value) + + def _addHeadlines(self, headlines): + # Add the headlines to the list of product headlines + headlines = headlines.split("...") + for headline in headlines: + if len(headline) == 0 or headline[0] == '\n': + continue + if headline not in self._prodHeadlines: + self._prodHeadlines.append(headline) + + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FW.W', allActions, 'FireWx'), # RED FLAG WARNING + ('FW.A', allActions, 'FireWx'), # FIRE WEATHER WATCH + ] + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWM.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWM.py index 9396b44931..774c549eaf 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWM.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWM.py @@ -1,591 +1,591 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product creates a table of Fire Weather values for a list of edit areas. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# FWM, FWM___Definition, FWM__Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "FWMBOS" -# -# Optional Configuration Items -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# reportAsTrendsForZONE If 1, Report data values as trends rather -# than actual values for ZONE. -# reportAsTrendsForFCST If 1, Report data values as trends rather -# than actual values for FCST. -# reportTandRHforZONE If 1, Report T and RH values for ZONE -# reportTandRHforFCST If 1, Report T and RH values for FCST -# reportWindDirectionForZONE If 1, Inserts comma placeholder for wind -# direction for ZONE. -# reportWindDirectionForFCST If 1, Reports wind direction for FCST. -# If reporting as trends, inserts placeholder -# comma. -# textDirection If 1, the wind direction (if included) -# is 16-point compass instead of numeric. -# fuelMoisturePlaceHolder String to hold the place for fuel moisture -# windAdjustmentFactor Winds are reported from the Wind20ft grid if -# available.Otherwise, the Wind grid is used -# with the magnitude multiplied by this wind -# adjustment factor. Winds reported by RAWS sites -# are frequently lower than ASOS winds due to the -# fact that use a 10-min average. A common -# adjustment factor is 80% (0.80). If you want -# no adjustment to the winds then set this variable -# to 1.00. -# wxDurPopThreshold Pop threshold for reporting wx duration -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Sky -# Wx -# T -# Wind -# LAL -# RH -# PoP -# Wetflag -# Ttrend (optional -- if not provided prior day's data is used) -# RHtrend (optinal -- if not provided prior day's data is used) -#------------------------------------------------------------------------- -# Edit Areas Needed: area1, area2 -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -#------------------------------------------------------------------------- -# Component Products: None -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Fire Weather Services. -####################################################################### - - -import TextRules -import SampleAnalysis -import string, time, types -import math - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - Definition = { - "type": "smart", - "displayName": "None", # for Product Generation Menu - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/FWM_.txt", - "debug": 0, - - "defaultEditAreas": [("area1", "086401"), - ("area2", "086402"), - ("area3", "668"), - ], - # product identifiers - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - # optional variables - "reportAsTrendsForZONE": 1, # Report data values as trends for ZONE - "reportAsTrendsForFCST": 1, # Report data values as trends for FCST - "reportTandRHforZONE": 1, # Report T and RH values for ZONE - "reportTandRHforFCST": 1, # Report T and RH values for FCST - # wind direction - "reportWindDirectionForZONE": 0, # If 1, Inserts comma placeholder - # for wind direction for ZONE - "reportWindDirectionForFCST": 1, # Reports wind direction for FCST. - # If reporting as trends, inserts - # comma. - "textDirection": 1, # Report wind direction as 16-point compass - "fuelMoisturePlaceHolder": "", # String to hold the place for - # fuel moisture. - "windAdjustmentFactor": 0.80, # Adjustment for Wind if - # no Wind20ft grid. - "wxDurPopThreshold": 70, # Pop threshold for reporting wx duration - } - - ######################################################################## - # PRODUCT-SPECIFIC THRESHOLDS AND VARIABLES - ######################################################################## - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - self._issueTime = self.getCurrentTime(argDict, "%H%M%S", upperCase=1) - self._WIMSTime = self._getWIMSTime(argDict['creationTime']) - self._definition = argDict["forecastDef"] - - # Get variables from VariableList - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - - def _determineTimeRanges(self, argDict): - # Set up self._timeRangeList - self._timeRangeList = [] - ranges = [ - (37,38,"sky_range"), # Tomorrow 13L_14L - (37,38,"temp_range"), # Tomorrow 13L_14L - (37,38,"rh_range"), # Tomorrow 13L_14L - (14,23,"lal1_range"), # Today 13L-23L - (23,47,"lal2_range"), # Today 23L-Tomorrow 23L - (37,38,"wind_range"), # Tomorrow 13L_14L - (13,38,"maxT_range"), # Today 13L Tomorrow 13L - (13,38,"minT_range"), # Today_13L_Tomorrow_13L - (13,38,"maxRH_range"), # Today_13L_Tomorrow_13L - (13,38,"minRH_range"), # Today_13L_Tomorrow_13L - (13,29,"wxDur1_range"), # Today 13L Tomorrow_05L - (29,37,"wxDur2_range"), # Tomorrow_05L_13L - (37,38,"wetflag_range"), # Tomorrow 13L_14L - (13,14,"prior_range"), # Today 13L_14L - ] - daylight = self.daylight() - # If daylight savings in effect, adjust so that times are - # local standard times (LST) - for startHour, endHour, name in ranges: - if daylight: - startHour = startHour+1 - endHour = endHour+1 - range = self.createTimeRange(startHour, endHour) - self._timeRangeList.append((range, name)) - return - - def _sampleData(self, argDict): - # Sample the data - self._sampler = self.getSampler(argDict, - (self._getAnalysisList(), self._timeRangeList, self._areaList)) - return - - def _preProcessProduct(self, fcst, argDict): - # This is the header for the overall product - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area - if len(areaLabel) == 6: - label = "FCST," - self._productType = "FCST" - else: - label = "ZONE," - self._productType = "ZONE" - fcst = fcst + label + areaLabel + "," + self._WIMSTime + ",13," - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - self._statList = self.getStatList( - self._sampler, self._getAnalysisList(), self._timeRangeList, editArea) - #print "\n\n", areaLabel, self._statList - - # State of Sky - str = self._sky("sky_range") - fcst = fcst + str + "," - - # Temp - str = self._getStrValue("T", "temp_range", - trendElement="Ttrend", priorRangeName="prior_range") - fcst = fcst + str + "," - - # RH - str = self._getStrValue("RH", "rh_range", - trendElement="RHtrend", priorRangeName="prior_range") - fcst = fcst + str + "," - - # LAL1 - str = self._getStrValue("LAL", "lal1_range", stats="max") - fcst = fcst + str + "," - - # LAL2 - str = self._getStrValue("LAL", "lal2_range", stats="max") - fcst = fcst + str + "," - - # Wind Dir and Speed - str = self._getWind("wind_range", priorRangeName="prior_range") - fcst = fcst + str + "," - - # Fuel Moisture - Place holder - fcst = fcst + self._fuelMoisturePlaceHolder + "," - - # T and RH - reportTandRH = 0 - if self._productType == "ZONE" and self._reportTandRHforZONE == 1: - reportTandRH = 1 - if self._productType == "FCST" and self._reportTandRHforFCST == 1: - reportTandRH = 1 - - if reportTandRH == 0: - fcst = fcst + ",,,," - else: - # MaxT - str = self._getStrValue("T", "maxT_range", stats="max") - fcst = fcst + str + "," - - # MinT - str = self._getStrValue("T", "minT_range", stats="min") - fcst = fcst + str + "," - - # MaxRH - str = self._getStrValue("RH", "maxRH_range", stats="max") - fcst = fcst + str + "," - - # MinRH - str = self._getStrValue("RH", "minRH_range", stats="min") - fcst = fcst + str + "," - - # WxDur1 - percent = self._getValue("PoP", "wxDur1_range") - if percent is None or percent <= self._wxDurPopThreshold: - str = "0" - else: - str = self._getWxDur("wxDur1_range") - fcst = fcst + str + "," - - # WxDur2 - percent = self._getValue("PoP", "wxDur2_range") - if percent is None or percent <= self._wxDurPopThreshold: - str = "0" - else: - str = self._getWxDur("wxDur2_range") - fcst = fcst + str + "," - - # Wet Flag - str = self._getWetflag("wetflag_range") - fcst = fcst + str - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the footer for an edit area - return fcst + "\n" - - def _postProcessProduct(self, fcst, argDict): - fcst = string.replace( - fcst,"%IssueTime", - self.getCurrentTime(argDict,"%d%H%M", shiftToLocal=0, upperCase=1)) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _getAnalysisList(self): - return [ - ("Sky", self.avg), - ("Wx", self.dominantWx), - ("Wx", self.dominantWx, [0]), - ("T", self.avg), - ("T", self.minMax), - ("Wind", self.vectorAvg), - ("Wind20ft", self.vectorAvg), - ("LAL", self.minMax), - ("RH", self.avg), - ("RH", self.minMax), - ("PoP", self.stdDevMaxAvg), - ("Wetflag", self.avg), - ("Ttrend", self.avg), # for trends - ("RHtrend", self.avg), # for trends - ] - - def _getWIMSTime(self, currentTime): - t_shift = 24 # Number of hours from current time - return time.strftime("%y%m%d",time.localtime(currentTime+t_shift*60*60)) - - def _sky(self, rangeName): - # Return a sky value - sky = self._getValue("Sky", rangeName) - pop = self._getValue("PoP", rangeName) - if sky is None or pop is None: - return "" - if pop < 70: - if sky <= 10: - value = "0" - elif sky <= 50: - value = "1" - elif sky <= 90: - value = "2" - elif sky > 90: - value = "3" - return value - else: - value = self._getWxCode(rangeName) - return value - - def _getWxCode(self, rangeName): - i=0 - for period, label in self._timeRangeList: - if label == rangeName: - break - i=i+1 - subKeyList = self.getStats(self._statList[i], "Wx") - if subKeyList is None or len(subKeyList) == 0: - return "" - # Take the first weather key of a combination - # There is a possible problem if RW shows up before T - # So we want to catch T if it occurs? - wxKey = subKeyList[0] - wxType = wxKey.wxType() - -## if wxType == "F": -## return "4" -## if wxType == "L": -## return "5" - if wxType == "R": - return "6" - if wxType == "S" or wxType == "IP": - return "7" - if wxType == "RW": - return "8" - if wxType == "T": - return "9" - return "" - - def _getWind(self, rangeName, priorRangeName=None): - # Use Wind20ft if available, otherwise adjust Wind - adjust = 0 - value = self._getValue("Wind20ft", rangeName) - if value is None: - value = self._getValue("Wind", rangeName) - if value is None: - return "" - adjust = 1 - mag, dir = value - mag_mph = self.ktToMph(mag) - if adjust: - mag_mph = mag_mph * self._windAdjustmentFactor - mag_mph = int(mag_mph) - # Report as trend OR by value - if self._reportAsTrends() == 1 and priorRangeName is not None: - adjustPrior = 0 - priorValue = self._getValue("Wind20ft", priorRangeName) - if priorValue is None: - priorValue = self._getValue("Wind", priorRangeName) - if priorValue is None: - pass - else: - adjust = 1 - if priorValue is None: - diff = 0 - else: - priorMag, dir = priorValue - priorMag_mph = int(self.ktToMph(priorMag)) - if adjust: - priorMag_mph = priorMag_mph * self._windAdjustmentFactor - diff = int(mag_mph - priorMag_mph) - if diff >= 0: - magStr = "+"+`diff` - else: - magStr = `diff` - dir = None - # By Value - else: - mag_mph = int(mag_mph) - if mag_mph < 10: - magStr = "0" +`mag_mph` - else: - magStr = `mag_mph` - - # Handle direction - dirStr = "" - if self._productType == "ZONE" and self._reportWindDirectionForZONE == 1: - dirStr = "," - elif self._productType == "FCST" and self._reportWindDirectionForFCST == 1: - if dir is None: - dirStr = "," - elif self._textDirection == 1: - dirStr = self.dirTo16PtText(dir) + "," - else: - dirStr = `int(dir)` + "," - return dirStr + magStr - - def _getWxDur(self, rangeName): - statsByRange = self._getValue("Wx__dominantWx_0", rangeName) - if statsByRange is None: - return "" - for range, label in self._timeRangeList: - if label == rangeName: - timeRange = range - break - return self.wxDuration(statsByRange, timeRange) - - def _getStrValue(self, element, rangeName, trendElement=None, priorRangeName=None, stats="avg"): - # Return a scalar text string value - if stats == "min" or stats == "max": - element = element + "__minMax" - value = self._getValue(element, rangeName, stats) - if value is None: - return "" - value = int(value) - if self._reportAsTrends() == 1: - if trendElement is not None: - # Try to get data from trendElement - trendValue = self._getValue(trendElement, rangeName, stats) - if trendValue is not None: - return `int(trendValue)` - if priorRangeName is not None: - # Get data from prior day's data - priorValue = self._getValue(element, priorRangeName, stats) - if priorValue is not None: - priorValue = int(priorValue) - diff = value - priorValue - if diff >= 0: - return "+"+`diff` - else: - return `diff` - return `value` - - def _getValue(self, element, rangeName, stats="avg"): - # Return a scalar value - i=0 - for period, label in self._timeRangeList: - if label == rangeName: - break - i=i+1 - if stats == "avg": - return self.getStats(self._statList[i], element) - if stats == "max": - value = self.getStats(self._statList[i], element) - if value is not None: - value = value[1] - return value - if stats == "min": - value = self.getStats(self._statList[i], element) - if value is not None: - value = value[0] - return value - - def _getWetflag(self, rangeName): - value = self._getValue("Wetflag", rangeName) - if value is None: - return "N" - if value == 0: - return "N" - else: - return "Y" - - def _reportAsTrends(self): - if self._productType == "FCST" and self._reportAsTrendsForFCST == 1: - return 1 - if self._productType == "ZONE" and self._reportAsTrendsForZONE == 1: - return 1 - return 0 - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product creates a table of Fire Weather values for a list of edit areas. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# FWM, FWM___Definition, FWM__Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "FWMBOS" +# +# Optional Configuration Items +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# reportAsTrendsForZONE If 1, Report data values as trends rather +# than actual values for ZONE. +# reportAsTrendsForFCST If 1, Report data values as trends rather +# than actual values for FCST. +# reportTandRHforZONE If 1, Report T and RH values for ZONE +# reportTandRHforFCST If 1, Report T and RH values for FCST +# reportWindDirectionForZONE If 1, Inserts comma placeholder for wind +# direction for ZONE. +# reportWindDirectionForFCST If 1, Reports wind direction for FCST. +# If reporting as trends, inserts placeholder +# comma. +# textDirection If 1, the wind direction (if included) +# is 16-point compass instead of numeric. +# fuelMoisturePlaceHolder String to hold the place for fuel moisture +# windAdjustmentFactor Winds are reported from the Wind20ft grid if +# available.Otherwise, the Wind grid is used +# with the magnitude multiplied by this wind +# adjustment factor. Winds reported by RAWS sites +# are frequently lower than ASOS winds due to the +# fact that use a 10-min average. A common +# adjustment factor is 80% (0.80). If you want +# no adjustment to the winds then set this variable +# to 1.00. +# wxDurPopThreshold Pop threshold for reporting wx duration +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Sky +# Wx +# T +# Wind +# LAL +# RH +# PoP +# Wetflag +# Ttrend (optional -- if not provided prior day's data is used) +# RHtrend (optinal -- if not provided prior day's data is used) +#------------------------------------------------------------------------- +# Edit Areas Needed: area1, area2 +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +#------------------------------------------------------------------------- +# Component Products: None +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Fire Weather Services. +####################################################################### + + +import TextRules +import SampleAnalysis +import string, time, types +import math + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + Definition = { + "type": "smart", + "displayName": "None", # for Product Generation Menu + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/FWM_.txt", + "debug": 0, + + "defaultEditAreas": [("area1", "086401"), + ("area2", "086402"), + ("area3", "668"), + ], + # product identifiers + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + # optional variables + "reportAsTrendsForZONE": 1, # Report data values as trends for ZONE + "reportAsTrendsForFCST": 1, # Report data values as trends for FCST + "reportTandRHforZONE": 1, # Report T and RH values for ZONE + "reportTandRHforFCST": 1, # Report T and RH values for FCST + # wind direction + "reportWindDirectionForZONE": 0, # If 1, Inserts comma placeholder + # for wind direction for ZONE + "reportWindDirectionForFCST": 1, # Reports wind direction for FCST. + # If reporting as trends, inserts + # comma. + "textDirection": 1, # Report wind direction as 16-point compass + "fuelMoisturePlaceHolder": "", # String to hold the place for + # fuel moisture. + "windAdjustmentFactor": 0.80, # Adjustment for Wind if + # no Wind20ft grid. + "wxDurPopThreshold": 70, # Pop threshold for reporting wx duration + } + + ######################################################################## + # PRODUCT-SPECIFIC THRESHOLDS AND VARIABLES + ######################################################################## + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + self._issueTime = self.getCurrentTime(argDict, "%H%M%S", upperCase=1) + self._WIMSTime = self._getWIMSTime(argDict['creationTime']) + self._definition = argDict["forecastDef"] + + # Get variables from VariableList + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + + def _determineTimeRanges(self, argDict): + # Set up self._timeRangeList + self._timeRangeList = [] + ranges = [ + (37,38,"sky_range"), # Tomorrow 13L_14L + (37,38,"temp_range"), # Tomorrow 13L_14L + (37,38,"rh_range"), # Tomorrow 13L_14L + (14,23,"lal1_range"), # Today 13L-23L + (23,47,"lal2_range"), # Today 23L-Tomorrow 23L + (37,38,"wind_range"), # Tomorrow 13L_14L + (13,38,"maxT_range"), # Today 13L Tomorrow 13L + (13,38,"minT_range"), # Today_13L_Tomorrow_13L + (13,38,"maxRH_range"), # Today_13L_Tomorrow_13L + (13,38,"minRH_range"), # Today_13L_Tomorrow_13L + (13,29,"wxDur1_range"), # Today 13L Tomorrow_05L + (29,37,"wxDur2_range"), # Tomorrow_05L_13L + (37,38,"wetflag_range"), # Tomorrow 13L_14L + (13,14,"prior_range"), # Today 13L_14L + ] + daylight = self.daylight() + # If daylight savings in effect, adjust so that times are + # local standard times (LST) + for startHour, endHour, name in ranges: + if daylight: + startHour = startHour+1 + endHour = endHour+1 + range = self.createTimeRange(startHour, endHour) + self._timeRangeList.append((range, name)) + return + + def _sampleData(self, argDict): + # Sample the data + self._sampler = self.getSampler(argDict, + (self._getAnalysisList(), self._timeRangeList, self._areaList)) + return + + def _preProcessProduct(self, fcst, argDict): + # This is the header for the overall product + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area + if len(areaLabel) == 6: + label = "FCST," + self._productType = "FCST" + else: + label = "ZONE," + self._productType = "ZONE" + fcst = fcst + label + areaLabel + "," + self._WIMSTime + ",13," + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + self._statList = self.getStatList( + self._sampler, self._getAnalysisList(), self._timeRangeList, editArea) + #print "\n\n", areaLabel, self._statList + + # State of Sky + str = self._sky("sky_range") + fcst = fcst + str + "," + + # Temp + str = self._getStrValue("T", "temp_range", + trendElement="Ttrend", priorRangeName="prior_range") + fcst = fcst + str + "," + + # RH + str = self._getStrValue("RH", "rh_range", + trendElement="RHtrend", priorRangeName="prior_range") + fcst = fcst + str + "," + + # LAL1 + str = self._getStrValue("LAL", "lal1_range", stats="max") + fcst = fcst + str + "," + + # LAL2 + str = self._getStrValue("LAL", "lal2_range", stats="max") + fcst = fcst + str + "," + + # Wind Dir and Speed + str = self._getWind("wind_range", priorRangeName="prior_range") + fcst = fcst + str + "," + + # Fuel Moisture - Place holder + fcst = fcst + self._fuelMoisturePlaceHolder + "," + + # T and RH + reportTandRH = 0 + if self._productType == "ZONE" and self._reportTandRHforZONE == 1: + reportTandRH = 1 + if self._productType == "FCST" and self._reportTandRHforFCST == 1: + reportTandRH = 1 + + if reportTandRH == 0: + fcst = fcst + ",,,," + else: + # MaxT + str = self._getStrValue("T", "maxT_range", stats="max") + fcst = fcst + str + "," + + # MinT + str = self._getStrValue("T", "minT_range", stats="min") + fcst = fcst + str + "," + + # MaxRH + str = self._getStrValue("RH", "maxRH_range", stats="max") + fcst = fcst + str + "," + + # MinRH + str = self._getStrValue("RH", "minRH_range", stats="min") + fcst = fcst + str + "," + + # WxDur1 + percent = self._getValue("PoP", "wxDur1_range") + if percent is None or percent <= self._wxDurPopThreshold: + str = "0" + else: + str = self._getWxDur("wxDur1_range") + fcst = fcst + str + "," + + # WxDur2 + percent = self._getValue("PoP", "wxDur2_range") + if percent is None or percent <= self._wxDurPopThreshold: + str = "0" + else: + str = self._getWxDur("wxDur2_range") + fcst = fcst + str + "," + + # Wet Flag + str = self._getWetflag("wetflag_range") + fcst = fcst + str + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the footer for an edit area + return fcst + "\n" + + def _postProcessProduct(self, fcst, argDict): + fcst = string.replace( + fcst,"%IssueTime", + self.getCurrentTime(argDict,"%d%H%M", shiftToLocal=0, upperCase=1)) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _getAnalysisList(self): + return [ + ("Sky", self.avg), + ("Wx", self.dominantWx), + ("Wx", self.dominantWx, [0]), + ("T", self.avg), + ("T", self.minMax), + ("Wind", self.vectorAvg), + ("Wind20ft", self.vectorAvg), + ("LAL", self.minMax), + ("RH", self.avg), + ("RH", self.minMax), + ("PoP", self.stdDevMaxAvg), + ("Wetflag", self.avg), + ("Ttrend", self.avg), # for trends + ("RHtrend", self.avg), # for trends + ] + + def _getWIMSTime(self, currentTime): + t_shift = 24 # Number of hours from current time + return time.strftime("%y%m%d",time.localtime(currentTime+t_shift*60*60)) + + def _sky(self, rangeName): + # Return a sky value + sky = self._getValue("Sky", rangeName) + pop = self._getValue("PoP", rangeName) + if sky is None or pop is None: + return "" + if pop < 70: + if sky <= 10: + value = "0" + elif sky <= 50: + value = "1" + elif sky <= 90: + value = "2" + elif sky > 90: + value = "3" + return value + else: + value = self._getWxCode(rangeName) + return value + + def _getWxCode(self, rangeName): + i=0 + for period, label in self._timeRangeList: + if label == rangeName: + break + i=i+1 + subKeyList = self.getStats(self._statList[i], "Wx") + if subKeyList is None or len(subKeyList) == 0: + return "" + # Take the first weather key of a combination + # There is a possible problem if RW shows up before T + # So we want to catch T if it occurs? + wxKey = subKeyList[0] + wxType = wxKey.wxType() + +## if wxType == "F": +## return "4" +## if wxType == "L": +## return "5" + if wxType == "R": + return "6" + if wxType == "S" or wxType == "IP": + return "7" + if wxType == "RW": + return "8" + if wxType == "T": + return "9" + return "" + + def _getWind(self, rangeName, priorRangeName=None): + # Use Wind20ft if available, otherwise adjust Wind + adjust = 0 + value = self._getValue("Wind20ft", rangeName) + if value is None: + value = self._getValue("Wind", rangeName) + if value is None: + return "" + adjust = 1 + mag, dir = value + mag_mph = self.ktToMph(mag) + if adjust: + mag_mph = mag_mph * self._windAdjustmentFactor + mag_mph = int(mag_mph) + # Report as trend OR by value + if self._reportAsTrends() == 1 and priorRangeName is not None: + adjustPrior = 0 + priorValue = self._getValue("Wind20ft", priorRangeName) + if priorValue is None: + priorValue = self._getValue("Wind", priorRangeName) + if priorValue is None: + pass + else: + adjust = 1 + if priorValue is None: + diff = 0 + else: + priorMag, dir = priorValue + priorMag_mph = int(self.ktToMph(priorMag)) + if adjust: + priorMag_mph = priorMag_mph * self._windAdjustmentFactor + diff = int(mag_mph - priorMag_mph) + if diff >= 0: + magStr = "+"+repr(diff) + else: + magStr = repr(diff) + dir = None + # By Value + else: + mag_mph = int(mag_mph) + if mag_mph < 10: + magStr = "0" +repr(mag_mph) + else: + magStr = repr(mag_mph) + + # Handle direction + dirStr = "" + if self._productType == "ZONE" and self._reportWindDirectionForZONE == 1: + dirStr = "," + elif self._productType == "FCST" and self._reportWindDirectionForFCST == 1: + if dir is None: + dirStr = "," + elif self._textDirection == 1: + dirStr = self.dirTo16PtText(dir) + "," + else: + dirStr = repr(int(dir)) + "," + return dirStr + magStr + + def _getWxDur(self, rangeName): + statsByRange = self._getValue("Wx__dominantWx_0", rangeName) + if statsByRange is None: + return "" + for range, label in self._timeRangeList: + if label == rangeName: + timeRange = range + break + return self.wxDuration(statsByRange, timeRange) + + def _getStrValue(self, element, rangeName, trendElement=None, priorRangeName=None, stats="avg"): + # Return a scalar text string value + if stats == "min" or stats == "max": + element = element + "__minMax" + value = self._getValue(element, rangeName, stats) + if value is None: + return "" + value = int(value) + if self._reportAsTrends() == 1: + if trendElement is not None: + # Try to get data from trendElement + trendValue = self._getValue(trendElement, rangeName, stats) + if trendValue is not None: + return repr(int(trendValue)) + if priorRangeName is not None: + # Get data from prior day's data + priorValue = self._getValue(element, priorRangeName, stats) + if priorValue is not None: + priorValue = int(priorValue) + diff = value - priorValue + if diff >= 0: + return "+"+repr(diff) + else: + return repr(diff) + return repr(value) + + def _getValue(self, element, rangeName, stats="avg"): + # Return a scalar value + i=0 + for period, label in self._timeRangeList: + if label == rangeName: + break + i=i+1 + if stats == "avg": + return self.getStats(self._statList[i], element) + if stats == "max": + value = self.getStats(self._statList[i], element) + if value is not None: + value = value[1] + return value + if stats == "min": + value = self.getStats(self._statList[i], element) + if value is not None: + value = value[0] + return value + + def _getWetflag(self, rangeName): + value = self._getValue("Wetflag", rangeName) + if value is None: + return "N" + if value == 0: + return "N" + else: + return "Y" + + def _reportAsTrends(self): + if self._productType == "FCST" and self._reportAsTrendsForFCST == 1: + return 1 + if self._productType == "ZONE" and self._reportAsTrendsForZONE == 1: + return 1 + return 0 + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil.py index a6b36ffe6e..e95850ab3c 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil.py @@ -1,137 +1,137 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# --------------------------------------------------------------------- -# -# FWS__ -# -# This file should not be edited by the site. -# Site changes should go in FWS__Overrides for methods and -# FWS___Definition to set up Product Definition Settings -# -# -# --------------------------------------------------------------------- - -import FWF -import sys, copy, types - - -# Construct the names of the definition and override TextUtilities -siteDefinition = "FWS___Definition" -siteOverrides = "FWS__Overrides" -regionOverrides = "FWS__Overrides" -FWF_siteOverrides = "FWF__Overrides" -FWF_regionOverrides = "FWF__Overrides" - -# Import the local site's Product Definition specifications -exec "import "+siteDefinition - -# Import the local site's Overrides -exec "import "+siteOverrides -exec "import "+FWF_siteOverrides - -# Import Regional Overrides -exec "import "+regionOverrides -exec "import "+FWF_regionOverrides - -# Patches -import Patch_Overrides -# Special FWS overrides -import FWS_Overrides - -# These statements get the class object for the region and site overrides class -# The class and the module name (the file name) must be the same! -regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] -siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] -FWF_regionOverrides_object=sys.modules[FWF_regionOverrides].__dict__[FWF_regionOverrides] -FWF_siteOverrides_object=sys.modules[FWF_siteOverrides].__dict__[FWF_siteOverrides] - -# Get the region and site definitions into a known variable name -exec "localDefinition = " + siteDefinition + ".Definition" -exec "regionDefinition = " + regionOverrides + ".Definition" -exec "FWF_regionDefinition = " + FWF_regionOverrides + ".Definition" -exec "FWS_Definition = FWS_Overrides.Definition" - -class TextProduct( - siteOverrides_object, - regionOverrides_object, - FWS_Overrides.FWS_Overrides, - FWF_siteOverrides_object, - FWF_regionOverrides_object, - Patch_Overrides.Patch_Overrides, - FWF.TextProduct - ): - Definition = copy.deepcopy(FWF.TextProduct.Definition) - - # Get FWF Regional Definition settings - for key in FWF_regionDefinition.keys(): - Definition[key] = FWF_regionDefinition[key] - - # Get FWS Definition settings - for key in FWS_Definition.keys(): - Definition[key] = FWS_Definition[key] - - # Get Regional Definition settings - for key in regionDefinition.keys(): - Definition[key] = regionDefinition[key] - - # Get the Site Definition Settings - for key in localDefinition.keys(): - Definition[key] = localDefinition[key] - - # Get the VariableList if overridden in FWF Region - try: - exec "VariableList = "+FWF_regionOverrides+".VariableList" - except: - pass - - # Get the VariableList if overridden in FWF Region - try: - exec "VariableList = "+FWS_Overrides+".VariableList" - except: - pass - - # Get the VariableList if overridden in Region - try: - exec "VariableList = "+regionOverrides+".VariableList" - except: - pass - - # Get the VariableList if overridden in Site - try: - exec "VariableList = "+siteDefinition+".VariableList" - except: - pass - - # Definition overrides should really go in FWS___Definition - # but may be put here for testing. - # Most common would be need to set unique display name - ##Definition["displayName"] = "Test_FWS_" - - def __init__(self): - FWF.TextProduct.__init__(self) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# --------------------------------------------------------------------- +# +# FWS__ +# +# This file should not be edited by the site. +# Site changes should go in FWS__Overrides for methods and +# FWS___Definition to set up Product Definition Settings +# +# +# --------------------------------------------------------------------- + +import FWF +import sys, copy, types + + +# Construct the names of the definition and override TextUtilities +siteDefinition = "FWS___Definition" +siteOverrides = "FWS__Overrides" +regionOverrides = "FWS__Overrides" +FWF_siteOverrides = "FWF__Overrides" +FWF_regionOverrides = "FWF__Overrides" + +# Import the local site's Product Definition specifications +exec("import "+siteDefinition) + +# Import the local site's Overrides +exec("import "+siteOverrides) +exec("import "+FWF_siteOverrides) + +# Import Regional Overrides +exec("import "+regionOverrides) +exec("import "+FWF_regionOverrides) + +# Patches +import Patch_Overrides +# Special FWS overrides +import FWS_Overrides + +# These statements get the class object for the region and site overrides class +# The class and the module name (the file name) must be the same! +regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] +siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] +FWF_regionOverrides_object=sys.modules[FWF_regionOverrides].__dict__[FWF_regionOverrides] +FWF_siteOverrides_object=sys.modules[FWF_siteOverrides].__dict__[FWF_siteOverrides] + +# Get the region and site definitions into a known variable name +exec("localDefinition = " + siteDefinition + ".Definition") +exec("regionDefinition = " + regionOverrides + ".Definition") +exec("FWF_regionDefinition = " + FWF_regionOverrides + ".Definition") +exec("FWS_Definition = FWS_Overrides.Definition") + +class TextProduct( + siteOverrides_object, + regionOverrides_object, + FWS_Overrides.FWS_Overrides, + FWF_siteOverrides_object, + FWF_regionOverrides_object, + Patch_Overrides.Patch_Overrides, + FWF.TextProduct + ): + Definition = copy.deepcopy(FWF.TextProduct.Definition) + + # Get FWF Regional Definition settings + for key in list(FWF_regionDefinition.keys()): + Definition[key] = FWF_regionDefinition[key] + + # Get FWS Definition settings + for key in list(FWS_Definition.keys()): + Definition[key] = FWS_Definition[key] + + # Get Regional Definition settings + for key in list(regionDefinition.keys()): + Definition[key] = regionDefinition[key] + + # Get the Site Definition Settings + for key in list(localDefinition.keys()): + Definition[key] = localDefinition[key] + + # Get the VariableList if overridden in FWF Region + try: + exec("VariableList = "+FWF_regionOverrides+".VariableList") + except: + pass + + # Get the VariableList if overridden in FWF Region + try: + exec("VariableList = "+FWS_Overrides+".VariableList") + except: + pass + + # Get the VariableList if overridden in Region + try: + exec("VariableList = "+regionOverrides+".VariableList") + except: + pass + + # Get the VariableList if overridden in Site + try: + exec("VariableList = "+siteDefinition+".VariableList") + except: + pass + + # Definition overrides should really go in FWS___Definition + # but may be put here for testing. + # Most common would be need to set unique display name + ##Definition["displayName"] = "Test_FWS_" + + def __init__(self): + FWF.TextProduct.__init__(self) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil_Baseline.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil_Baseline.py index b63f86458b..ae1766d301 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil_Baseline.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/FWS_Site_MultiPil_Baseline.py @@ -1,136 +1,136 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# --------------------------------------------------------------------- -# -# FWS___Baseline -# -# This file should not be edited by the site. -# Site changes should go in FWS__Overrides for methods and -# FWS__ Definition to set up Product Definition Settings -# -# -# --------------------------------------------------------------------- - -import FWF -import sys, copy, types - - -# Construct the names of the definition and override TextUtilities -siteDefinition = "FWS___Definition" -siteOverrides = "FWS__Overrides" -regionOverrides = "FWS__Overrides" -FWF_siteOverrides = "FWF__Overrides" -FWF_regionOverrides = "FWF__Overrides" - -# Import the local site's Product Definition specifications -exec "import "+siteDefinition - -# Import the local site's Overrides -exec "import "+siteOverrides -exec "import "+FWF_siteOverrides - -# Import Regional Overrides -exec "import "+regionOverrides -exec "import "+FWF_regionOverrides - -# Patches -import Patch_Overrides -# Special FWS overrides -import FWS_Overrides - -# These statements get the class object for the region and site overrides class -# The class and the module name (the file name) must be the same! -#regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] -#siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] -#FWF_regionOverrides_object=sys.modules[FWF_regionOverrides].__dict__[FWF_regionOverrides] -#FWF_siteOverrides_object=sys.modules[FWF_siteOverrides].__dict__[FWF_siteOverrides] - -# Get the region and site definitions into a known variable name -exec "localDefinition = " + siteDefinition + ".Definition" -exec "regionDefinition = " + regionOverrides + ".Definition" -exec "FWF_regionDefinition = " + FWF_regionOverrides + ".Definition" -exec "FWS_Definition = FWS_Overrides.Definition" - -class TextProduct( - #siteOverrides_object, - #regionOverrides_object, - FWS_Overrides.FWS_Overrides, - #FWF_siteOverrides_object, - #FWF_regionOverrides_object, - Patch_Overrides.Patch_Overrides, - FWF.TextProduct - ): - Definition = copy.deepcopy(FWF.TextProduct.Definition) - - # Get FWF Regional Definition settings - #for key in FWF_regionDefinition.keys(): - # Definition[key] = FWF_regionDefinition[key] - - # Get FWS Definition settings - for key in FWS_Definition.keys(): - Definition[key] = FWS_Definition[key] - - # Get Regional Definition settings - #for key in regionDefinition.keys(): - # Definition[key] = regionDefinition[key] - - # Get the Site Definition Settings - for key in localDefinition.keys(): - Definition[key] = localDefinition[key] - - # Get the VariableList if overridden in FWF Region - #try: - # exec "VariableList = "+FWF_regionOverrides+".VariableList" - #except: - # pass - - # Get the VariableList if overridden in FWF Region - try: - exec "VariableList = FWS_Overrides.VariableList" - except: - pass - - # Get the VariableList if overridden in Region - #try: - # exec "VariableList = "+regionOverrides+".VariableList" - #except: - # pass - - # Get the VariableList if overridden in Site - try: - exec "VariableList = "+siteDefinition+".VariableList" - except: - pass - - # To turn on this product for testing, - # set the display name. - Definition["displayName"] = "Baseline__FWS" - - def __init__(self): - FWF.TextProduct.__init__(self) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# --------------------------------------------------------------------- +# +# FWS___Baseline +# +# This file should not be edited by the site. +# Site changes should go in FWS__Overrides for methods and +# FWS__ Definition to set up Product Definition Settings +# +# +# --------------------------------------------------------------------- + +import FWF +import sys, copy, types + + +# Construct the names of the definition and override TextUtilities +siteDefinition = "FWS___Definition" +siteOverrides = "FWS__Overrides" +regionOverrides = "FWS__Overrides" +FWF_siteOverrides = "FWF__Overrides" +FWF_regionOverrides = "FWF__Overrides" + +# Import the local site's Product Definition specifications +exec("import "+siteDefinition) + +# Import the local site's Overrides +exec("import "+siteOverrides) +exec("import "+FWF_siteOverrides) + +# Import Regional Overrides +exec("import "+regionOverrides) +exec("import "+FWF_regionOverrides) + +# Patches +import Patch_Overrides +# Special FWS overrides +import FWS_Overrides + +# These statements get the class object for the region and site overrides class +# The class and the module name (the file name) must be the same! +#regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] +#siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] +#FWF_regionOverrides_object=sys.modules[FWF_regionOverrides].__dict__[FWF_regionOverrides] +#FWF_siteOverrides_object=sys.modules[FWF_siteOverrides].__dict__[FWF_siteOverrides] + +# Get the region and site definitions into a known variable name +exec("localDefinition = " + siteDefinition + ".Definition") +exec("regionDefinition = " + regionOverrides + ".Definition") +exec("FWF_regionDefinition = " + FWF_regionOverrides + ".Definition") +exec("FWS_Definition = FWS_Overrides.Definition") + +class TextProduct( + #siteOverrides_object, + #regionOverrides_object, + FWS_Overrides.FWS_Overrides, + #FWF_siteOverrides_object, + #FWF_regionOverrides_object, + Patch_Overrides.Patch_Overrides, + FWF.TextProduct + ): + Definition = copy.deepcopy(FWF.TextProduct.Definition) + + # Get FWF Regional Definition settings + #for key in FWF_regionDefinition.keys(): + # Definition[key] = FWF_regionDefinition[key] + + # Get FWS Definition settings + for key in list(FWS_Definition.keys()): + Definition[key] = FWS_Definition[key] + + # Get Regional Definition settings + #for key in regionDefinition.keys(): + # Definition[key] = regionDefinition[key] + + # Get the Site Definition Settings + for key in list(localDefinition.keys()): + Definition[key] = localDefinition[key] + + # Get the VariableList if overridden in FWF Region + #try: + # exec "VariableList = "+FWF_regionOverrides+".VariableList" + #except: + # pass + + # Get the VariableList if overridden in FWF Region + try: + exec("VariableList = FWS_Overrides.VariableList") + except: + pass + + # Get the VariableList if overridden in Region + #try: + # exec "VariableList = "+regionOverrides+".VariableList" + #except: + # pass + + # Get the VariableList if overridden in Site + try: + exec("VariableList = "+siteDefinition+".VariableList") + except: + pass + + # To turn on this product for testing, + # set the display name. + Definition["displayName"] = "Baseline__FWS" + + def __init__(self): + FWF.TextProduct.__init__(self) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GLF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GLF.py index 94ca87d0ac..8450c12181 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GLF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GLF.py @@ -1,1684 +1,1684 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product creates a Great Lakes Forecast product. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# GLF, GLF___Definition, GLF__Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas...see "Edit Areas Needed" section below -# for all edit areas needed. Just uncomment the lines below the -# lake the formatter will be run for...and comment -# out the lines below the lakes not used. -# productName defines name of product e.g. "Open Lakes Forecast" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "GLFLS" -# areaName (opt.) Area name for product header, such as "Lake Superior" -# wfoCityState WFO location, such as "Buffalo NY" -# lake_name Name of lake...not including the word "Lake" e.g "Superior" -# lakezone Zone code for the text portion of the forecast e.g. "LSZ260" -# maforzone Zone code for the mafor portion of the forecast e.g. "LSZ261" -# headerphrase Phrase for the header portion of forecast immediately above -# the SYNOPSIS section. -# Optional Configuration Items -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# useAbbreviations (default == 1) -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# processMafor (default == 1) -# 1 --> The MAFOR code will be processed -# 0 --> The MAFOR code will not be processed -# useHolidays Set to 1 to use holidays in the time period labels -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -## -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -#------------------------------------------------------------------------- -# Weather Elements Needed: -# To 5 days: -# Grids need to be continuous: -# Wind (5 days) -# WaveHeight (5 days) -# Wx (5 days) -# T (36 hours) -# Optional: -# WindGust (5 days) -#------------------------------------------------------------------------- -# Edit Areas Needed: left side for E-W oriented lakes: right side for -# N-S oriented lakes: "whole lake" edit area needed -# for all lakes. -# west_half (north_half) -# east_half (south_half) -# east_one_third (south_one_third) -# west_one_third (north_one_third) -# east_two_thirds (south_two_thirds) -# west_two_thirds (north_two_thirds) -# east_one_quarter (south_one_quarter) -# west_one_quarter (north_one_quarter) -# east_three_quarters (south_three_quarters) -# west_three_quarters (north_three_quarters) -# "whole lake" -- name the edit area the name of the entire lake (e.g. SUPERIOR, ST_CLAIR, HURON, etc) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Component Products: -# GLFFcstFirst -# GLFFcstShort -# GLFFcstExt -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Component Products: -# GLFFcstShort -# GLFFcstExt -#------------------------------------------------------------------------- -# Additional Information: -# -# from ConfigVariables: -# maximum_range_nlValue_dict -# minimum_range_nlValue_dict -# phrase_descriptor_dict -# scalar_difference_nlValue_dict -#------------------------------------------------------------------------- -# Example Output: -# -## FOUS43 KMQT 301825 -## GLFLS -## LSZ260-310300 - -## OPEN LAKE FORECAST FOR LAKE SUPERIOR -## NATIONAL WEATHER SERVICE MARQUETTE MI -## 125 PM EST WED OCT 30 2002 - -## LAKE SUPERIOR FORECAST BEYOND FIVE NM FROM SHORE - -## .SYNOPSIS... - -## WEST HALF - -## ...GALE WARNING IN EFFECT... - -## .TONIGHT...NW GALES TO 45 KT EASING TO GALES TO 40 -## KT LATE IN THE NIGHT. CHANCE OF SNOW SHOWERS. WAVES 7 TO 10 FT -## SUBSIDING TO 6 TO 9 FT. -## .THU...W WIND 15 TO 25 KT. SNOW SHOWERS LIKELY. WAVES 4 -## TO 7 FT SUBSIDING TO 4 TO 6 FT. -## .THU NIGHT...W WIND 15 TO 25 KT VEERING NW WIND TO -## 30 KT AFTER MIDNIGHT. SNOW SHOWERS LIKELY. WAVES 4 TO 6 FT. -## .FRI...NW WIND TO 30 KT. RAIN AND SNOW LIKELY. -## WAVES 4 TO 7 FT. -## .FRI NIGHT...NW WIND 15 TO 25 KT BACKING W WIND TO -## 30 KT IN THE LATE EVENING AND OVERNIGHT. RAIN AND SNOW LIKELY. -## WAVES 5 TO 8 FT. -## .SAT...W WIND TO 30 KT VEERING NW. WAVES 4 TO -## 7 FT SUBSIDING TO 3 TO 5 FT. -## .SUN...NW WIND 10 TO 20 KT BACKING W 5 TO 15 KT -## IN THE EVENING. WAVES 3 TO 5 FT. -## .MON...W WIND 5 TO 15 KT AFTER MIDNIGHT VEERING NW -## BACKING W UP TO 10 KT IN THE LATE MORNING AND EARLY AFTERNOON -## BACKING SW BACKING S 5 TO 15 KT IN THE EVENING. WAVES -## 2 TO 4 FT BUILDING TO 3 TO 5 FT. - -## EAST HALF - -## ...GALE WARNING IN EFFECT... - -## .TONIGHT...NW GALES TO 45 KT EASING TO GALES TO 40 -## KT LATE IN THE NIGHT. CHANCE OF SNOW SHOWERS. WAVES 7 TO 10 FT -## SUBSIDING TO 6 TO 9 FT. -## .THU...W WIND 15 TO 25 KT. SNOW SHOWERS LIKELY. WAVES 4 -## TO 7 FT SUBSIDING TO 4 TO 6 FT. -## .THU NIGHT...W WIND 10 TO 20 KT VEERING NW 15 TO -## 25 KT IN THE LATE EVENING AND OVERNIGHT. SNOW SHOWERS LIKELY. -## WAVES 4 TO 6 FT BUILDING TO 6 TO 9 FT. -## .FRI...NW WIND TO 30 KT. RAIN AND SNOW LIKELY. -## WAVES 6 TO 9 FT BUILDING TO 7 TO 10 FT. -## .FRI NIGHT...NW WIND 15 TO 25 KT BACKING W WIND TO -## 30 KT AFTER MIDNIGHT. RAIN AND SNOW LIKELY. WAVES 7 TO 10 FT -## SUBSIDING TO 6 TO 9 FT. -## .SAT...W WIND TO 30 KT VEERING NW. WAVES 6 TO -## 9 FT SUBSIDING TO 5 TO 8 FT. -## .SUN...N WIND 15 TO 25 KT BACKING NW 10 TO 20 -## KT. WAVES 4 TO 7 FT. -## .MON...NW WIND 10 TO 20 KT EASING TO UP TO 10 KT -## EARLY IN THE AFTERNOON BACKING SE VEERING S 5 TO 15 KT -## LATE IN THE EVENING. WAVES 4 TO 7 FT. - -## $$ - -## LSZ261-310300- -## MAFOR 3022/ -## SUPERIOR WEST 1/2...GALE WARNING IN EFFECT...13760 11640 11620 13630 -## 220610 - -## SUPERIOR EAST 1/2...GALE WARNING IN EFFECT...13760 11740 12620 12630 -## 220610 - -## $$ - -# - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string -import os, re, types -import TimeRange, AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - # W-E orientation - (("Groupings", "groupings"), "West 1/2:East 1/2", "radio", - ["West 1/2:East 1/2", "West 1/3:East 2/3", "West 2/3:East 1/3", - "West 1/4:East 3/4", "West 3/4:East 1/4", "Entire Lake"]), - # N-S orientation - #(("Groupings", "groupings") , "North 1/2:South 1/2", "radio", - # ["North 1/2:South 1/2", "North 1/3:South 2/3", "North 2/3:South 1/3", - # "North 1/4:South 3/4", "North 3/4:South 1/4", "Entire Lake"]), - ] - - Definition = { - "type": "smart", - "displayName": "None", - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/GLF_.txt", - "debug": 0, - - "lineLength": 66, - ## Edit Areas - "defaultEditAreas" : [("west_half", "WEST HALF\n\n"), - ("east_half", "EAST HALF\n\n")], - # product identifiers - "lake_name": "Superior", # use -- Superior, Huron, Erie, Ontario, Michigan, St_Clair - "productName": "Open Lakes Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "Statename", # Name of state, such as "Georgia" - "wfoCityState": "", # Location of WFO - city state - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - "headerphrase": "Lake Superior forecast beyond five nautical miles from shore", # header phrase - "lakezone": "LSZ260", # Zone code for the Lake - "maforzone": "LSZ261", # Mafor zone code - "processmafor" : 1, # process mafor data: 1=yes, 0=no - - "periodCombining" : 0, # If 1, combine periods, if possible - - "useAbbreviations": 1, # Use marine abbreviations - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - - # Weather-related flags - "hoursSChcEnds": 24, - - # Language - "language": "english", - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - ### THRESHOLDS AND VARIABLES - ### Analysis Class - ### To override, override the associated method in your text product class. - def temporalCoverage_threshold(self, parmHisto, timeRange, componentName): - # Replaces IN_RANGE_THRESHOLD -- Note that this threshold is now used - # differently i.e. it is the percentage of the TIMERANGE covered by the - # grid in order to include it in the analysis - # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) - # Used by temporalCoverage_flag - return 5.0 - - def temporalCoverage_dict(self, parmHisto, timeRange, componentName): - # Replaces IN_RANGE_DICT -- Note that this these thresholds are now used - return { - "LAL": 0, - "MinRH": 0, - "MaxRH": 0, - "MinT": 50, - "MaxT": 10, - "Haines": 0, - "Wx": 15, - "PoP" : 50, - } - - # Uncomment any combinations you wish to collapse. - # For example, if the first entry is uncommented, - # the phrase: scattered rain showers and widespread rain - # will collapse to: scattered rain showers. - def wxCombinations(self): - return [ - ("RW", "R"), - ("SW", "S"), - ## ("T","RW"), - ] - - def vector_mag_difference_nlValue_dict(self, tree, node): - # Replaces WIND_THRESHOLD - # Magnitude difference. If the difference between magnitudes - # for sub-ranges is greater than this value, - # the different magnitudes will be noted in the phrase. - # Units can vary depending on the element and product - return { - "Wind": 10, - "Wind20ft": 10, - "TransWind": 10, - "FreeWind": 10, - "Swell": 1, # ft - "Swell2": 1, # ft - } - - - def vector_dir_difference_dict(self, tree, node): - # Direction difference. If the difference between directions - # for 2 sub-periods is greater than this value, - # the different directions will be noted in the phrase. - # Units are degrees - return { - "Wind": 50, # degrees - "TransWind": 60, # mph - "FreeWind": 60, # mph - "Swell":60, # degrees - "Swell2":60, # degrees - } - - def maxReported_threshold_dict(self, tree, node): - # Winds will not be reported above this value: - # For example, if set to 30, all winds above 30 will - # be reported as: - # "Winds up to 30 knots." - return { - "Wind": 200, # knots or mph depending on product - } - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and not reported. - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["Wind"] = 7 - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["Wave"] = "variable winds less than 10 knots" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["Wind"] = "variable less than 10 knots" - dict["Wx"] = "" - return dict - - def gust_wind_difference_nlValue(self, tree, node): - # Difference between gust and maxWind below which gusts are not mentioned - # Units are mph - return 15 - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product -# return {} - return { - "Wind": 10, - } - - def marine_wind_flag(self, tree, node): - # If 1, Wind combining and wording will reflect the - # crossing of significant thresholds such as gales - return 1 - - def marine_wind_combining_flag(self, tree, node): - # If 1, Wind combining will reflect the - # crossing of significant thresholds such as gales. - # E.g. "Hurricane forece winds to 00 knots." instead of - # "North hurricane force winds to 100 knots easing to - # hurricane force winds to 80 knots in the afternoon." - return 1 - - def marine_abbreviateText(self, fcst): - fcst = re.sub(r'(?i)(\W|^)NORTH(?!WARD|ERN|WESTWARD|EASTWARD|WESTERN|EASTERN)(?=\W|$)', r'\1N', fcst) - fcst = re.sub(r'(?i)(\W|^)SOUTH(?!WARD|ERN|WESTWARD|EASTWARD|WESTERN|EASTERN)(?=\W|$)', r'\1S', fcst) - fcst = re.sub(r'(?i)(\W|^)EAST(?!WARD|ERN)(?=\W|$)', r'\1E', fcst) - fcst = re.sub(r'(?i)(\W|^)WEST(?!WARD|ERN)(?=\W|$)', r'\1W', fcst) - fcst = re.sub(r'(?i)(\W|^)KNOTS?(?=\W|$)', r'\1kt', fcst) - fcst = re.sub(r'(?i)(\W|^)MILLIBARS?(?=\W|$)', r'\1mb', fcst) - fcst = re.sub(r'(?i)(\W|^)FATHOMS?(?=\W|$)', r'\1fm', fcst) - fcst = re.sub(r'(?i)(\W|^)NAUTICAL MILES?(?=\W|$)', r'\1nm', fcst) - fcst = re.sub(r'(?i)(\W|^)(?:FOOT|FEET)(?=\W|$)', r'\1ft', fcst) - fcst = re.sub(r'(?i)(\W|^)POSITION(?=\W|$)', r'\1PSN', fcst) - fcst = re.sub(r'(?i)(\W|^)VISIBILITY(?=\W|$)', r'\1VSBY', fcst) - fcst = re.sub(r'(?i)(\W|^)THUNDERSTORM(?=\W|$)', r'\1TSTM', fcst) - fcst = re.sub(r'(?i)(\W|^)AVERAGE(?=\W|$)', r'\1AVG', fcst) - fcst = re.sub(r'(?i)(\W|^)ATLANTIC(?=\W|$)', r'\1ATLC', fcst) - fcst = re.sub(r'(?i)(\W|^)LONGITUDE(?=\W|$)', r'\1LONG', fcst) - fcst = re.sub(r'(?i)(\W|^)PACIFIC(?=\W|$)', r'\1PAC', fcst) - fcst = re.sub(r'(?i)(\W|^)DEGREE(?=\W|$)', r'\1deg', fcst) - fcst = re.sub(r'(?i)(\W|^)PRESSURE(?=\W|$)', r'\1PRES', fcst) - fcst = re.sub(r'(?i)(\W|^)(SUN)DAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(MON)DAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(TUE)SDAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(WED)NESDAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(THU)RSDAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(FRI)DAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)(SAT)URDAY(?=\W|$)', r'\1\2', fcst) - fcst = re.sub(r'(?i)(\W|^)W HALF(?=\W|$)', r'\1West half', fcst) - fcst = re.sub(r'(?i)(\W|^)E HALF(?=\W|$)', r'\1east half', fcst) - fcst = re.sub(r'(?i)(\W|^)N HALF(?=\W|$)', r'\1north half', fcst) - fcst = re.sub(r'(?i)(\W|^)S HALF(?=\W|$)', r'\1soutH half', fcst) - fcst = re.sub(r'(?i)(\W|^)W THIRD(?=\W|$)', r'\1west third', fcst) - fcst = re.sub(r'(?i)(\W|^)E THIRD(?=\W|$)', r'\1east third', fcst) - fcst = re.sub(r'(?i)(\W|^)N THIRD(?=\W|$)', r'\1north third', fcst) - fcst = re.sub(r'(?i)(\W|^)S THIRD(?=\W|$)', r'\1south third', fcst) - fcst = re.sub(r'(?i)(\W|^)W TWO(?=\W|$)', r'\1west two', fcst) - fcst = re.sub(r'(?i)(\W|^)E TWO(?=\W|$)', r'\1east two', fcst) - fcst = re.sub(r'(?i)(\W|^)N TWO(?=\W|$)', r'\1north two', fcst) - fcst = re.sub(r'(?i)(\W|^)S TWO(?=\W|$)', r'\1south two', fcst) - return fcst - - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self.marineRounding, - } - - def waveHeight_words(self, tree, node): - "Create phrase for waves" - statDict = node.getStatDict() - stats = self.getStats(statDict, "WaveHeight") - if stats is None: - nodataPhrase = self.noWaveHeight_phrase( - tree, node, "WaveHeight", "WaveHeight") - return self.setWords(node.parent, nodataPhrase) - - min, max = self.getValue(stats, "MinMax") - #avg = (min + max)/2 - words = self.wave_range(max) - return self.setWords(node, words) - - def wave_range(self, avg): - # Make wave ranges based off the average wave value - table = ((2, "2 feet or less"), (3, "1 to 3 feet"), - (4, "2 to 4 feet"), (5, "3 to 5 feet"), - (6, "4 to 6 feet"), (8, "5 to 8 feet"), - (9, "6 to 9 feet"), (10, "7 to 10 feet"), - (11, "8 to 11 feet"), (12, "9 to 12 feet"), - (14, "10 to 14 feet"), (17, "12 to 17 feet"), - (20, "15 to 20 feet"), (25, "20 to 25 feet"), - (30, "25 to 30 feet"), (100, "over 30 feet")) - range = "" - for max, str in table: - if avg <= max: - range = str - break - return range - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - return self.stdDevMaxAvg - #return self.maxMode - #return self.maximum - - def GLFFcstFirst(self): - - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Wind", self.vectorMinMax, [3]), - ("WindGust", self.minMax, [3]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("WaveHeight", self.minMax, [3]), - ("PoP", self._PoP_analysisMethod("GLFFcstFirst"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.marine_wind_withGusts_phrase, - self.weather_orSky_phrase, - self.visibility_phrase, - self.severeWeather_phrase, - self.waveHeight_phrase, - ], - } - - - def GLFFcstShort(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Wind", self.vectorMinMax, [3]), - ("WindGust", self.minMax, [3]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("WaveHeight", self.minMax, [3]), - ("PoP", self._PoP_analysisMethod("GLFFcstShort"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.marine_wind_withGusts_phrase, - self.weather_orSky_phrase, - self.visibility_phrase, - self.severeWeather_phrase, - self.waveHeight_phrase, - self._warnOutlook_phrase, - ], - } - - def GLFFcstExt(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - ("Wind", self.vectorMinMax, [3]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("WaveHeight", self.minMax, [6]), - ("PoP", self._PoP_analysisMethod("GLFFcstExt"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.marine_wind_phrase, - self.weather_phrase, - self.visibility_phrase, - self.waveHeight_phrase, - ], - } - - def generateForecast(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the edit areas - try: - if self._groupings == "West 1/3:east 2/3": - self._areaList = [("west_one_third", "WEST THIRD"), - ("east_two_thirds", "EAST TWO THIRDS")] - elif self._groupings == "West 2/3:East 1/3": - self._areaList = [("west_two_thirds", "WEST TWO THIRDS"), - ("east_one_third", "EAST ONE THIRD")] - elif self._groupings == "West 1/4:East 3/4": - self._areaList = [("west_one_quarter", "WEST QUARTER"), - ("east_three_quarters", "EAST THREE QUARTERS")] - elif self._groupings == "West 3/4:East 1/4": - self._areaList = [("west_three_quarters", "WEST THREE QUARTERS"), - ("east_one_quarter", "EAST ONE QUARTER")] - elif self._groupings == "Entire Lake": - self._areaList = [(self._lake_name, "")] - elif self._groupings == "West 1/2:East 1/2": - self._areaList = [("west_half", "WEST HALF"), ("east_half", "EAST HALF")] - elif self._groupings == "North 1/3:South 2/3": - self._areaList = [("north_one_third", "NORTH THIRD"), - ("south_two_thirds", "SOUTH TWO THIRDS")] - elif self._groupings == "North 2/3:South 1/3": - self._areaList = [("north_two_thirds", "NORTH TWO THIRDS"), - ("south_one_third", "SOUTH ONE THIRD")] - elif self._groupings == "North 1/4:South 3/4": - self._areaList = [("north_one_quarter", "NORTH QUARTER"), - ("south_three_quarters", "SOUTH THREE QUARTERS")] - elif self._groupings == "North 3/4:South 1/4": - self._areaList = [("north_three_quarters", "NORTH THREE QUARTERS"), - ("south_one_quarter", "SOUTH ONE QUARTER")] - elif self._groupings == "Entire Lake": - self._areaList = [(self._lake_name, "")] - elif self._groupings == "North 1/2:South 1/2": - self._areaList = [("north_half", "NORTH HALF"), ("south_half", "SOUTH HALF")] - else: - self._areaList = [(self._lake_name, "")] - except: - self._areaList = [(self._lake_name, "")] - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # determine time ranges for MAFOR - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - - # make sure outlook flag is set to 0 - self._outlookflag = 0 - - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList - self._groupings = None - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - - # Initialize mafor list - self._mafors = [] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._issueTime = self._issuanceInfo.issueTime() - self._expireTime = self._issuanceInfo.expireTime() - self._expireTimeDDHHMM = time.strftime("%d%H%M", - time.gmtime(self._expireTime.unixTime())) - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - self._definition["priorPeriod"] = 24 - - # Set up self._headlineRange - self._headlineRange = TimeRange.TimeRange(self._timeRange.startTime(), - self._timeRange.startTime() + 24*3600) - - - trDict = {} - trList = [] - currentLocalTime, shift = self.determineTimeShift() - day = currentLocalTime.day - month = currentLocalTime.month - year = currentLocalTime.year - startTime = AbsTime.absTimeYMD(year, month, day) - - if self._productIssuance == "400 AM": - start = self.localTime(startTime, 8, shift) - wxstart = self.localTime(startTime, 6, shift) - wxend = self.localTime(startTime, 18, shift) - if self._productIssuance == "1000 AM": - start = self.localTime(startTime, 14, shift) - wxstart = self.localTime(startTime, 10, shift) - wxend = self.localTime(startTime, 18, shift) - if self._productIssuance == "400 PM": - start = self.localTime(startTime, 20, shift) - wxstart = self.localTime(startTime, 18, shift) - wxend = self.localTime(startTime, 30, shift) - if self._productIssuance == "1000 PM": - start = self.localTime(startTime, 26, shift) - wxstart = self.localTime(startTime, 22, shift) - wxend = self.localTime(startTime, 30, shift) - # MAFOR config - timeRange = TimeRange.TimeRange(start, start + 3600) - periods = self.getPeriods(timeRange, 3, 1, 8) - # coded winds and waves - for i in range(0,8): - trList.append(periods[i]) - trDict["MAFOR"+`i`] = periods[i][0] - - self._trDict = trDict - self._trList = trList - - # worded weather times in mafor - self._wxtimerange1 = TimeRange.TimeRange(wxstart, wxend) - self._wxtimerange2 = TimeRange.TimeRange(wxend, wxend+(3600*12)) - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, - self._areaList, self._issuanceInfo) - if error is not None: - return error - print "AREA LIST: ", self._areaList - self._sampler = self.getSampler(argDict, - (self._MAFORAnalysisList(), self._trList, self._areaList)) - return None - - def _preProcessProduct(self, fcst, argDict): - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + '\n' - - fcst = fcst + s.upper() - - s = self._lakezone + "-" + self._expireTimeDDHHMM + "-\n\n" +\ - productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - fcst = fcst + self._headerphrase + "\n\n" + ".SYNOPSIS..." + "\n\n" - - # Set up hazards - self.getHazards(argDict, self._areaList) - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - fcst = fcst + areaLabel+ "\n\n" - - # Headlines - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - argDict["language"] = self._language - - # Generate Narrative Forecast for Edit Area - fcstSegment = self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - - # Handle abbreviations - if self._useAbbreviations == 1: - fcstSegment = self.marine_abbreviateText(fcstSegment) - fcstSegment = re.sub(r'\n', r' ',fcstSegment) - fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) - fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) - fcst = fcst + fcstSegment - - # mafor stuff - - # grab headline in mafor - maforheadline = self.generateProduct("MaforHeadline", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._headlineRange) - - maforheadline = string.replace(maforheadline, "\n", "") - - if self._processmafor == 1: - mafor = self._makeMAFOR(editArea, areaLabel, self._trDict, - self._trList, self._MAFORAnalysisList(), maforheadline, argDict, areaLabel) - self._mafors.append(mafor) - ## - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - - # Adjust some phrases to local requirements - # ======================================== - fcst = string.replace(fcst,"widespread rain", "occasional rain") - fcst = string.replace(fcst,"widespread showers", "showers") - fcst = string.replace(fcst,"widespread thunderstorms", "thunderstorms") - - fcst = string.replace(fcst, "rain showers", "showers") - fcst = string.replace(fcst, "thunderstorms and showers", "showers and thunderstorms") - #phrase = string.replace(phrase, "widespread", "") - - # This is the footer for an edit area combination - return fcst + "\n" - - def _postProcessProduct(self, fcst, argDict): - - fcst = fcst + "$$\n\n" - - if string.find(fcst, "storm force") > 0 or\ - string.find(fcst, "storm warning") > 0 or\ - string.find(fcst, "hurricane") > 0: - fcst = fcst + "&&STORM\n\n" - - if self._processmafor == 1: - maforzone = self._maforzone+"-"+ self._expireTimeDDHHMM + "-\n" - maforissue = "MAFOR " + self._getMaforTime(argDict) + "/" + "\n" - - fcst = fcst + maforzone + maforissue - for mafor in self._mafors: - fcst = fcst + mafor + "\n" - fcst = fcst + "$$\n\n" - - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _warnOutlook_phrase(self): - return { - "phraseMethods": [ - self._warnOutlook_words, # phrase.words - ], - } - def _warnOutlook_words(self, tree, phrase): - # will put an outlook phrase in the text - - timeRange = phrase.getTimeRange() - windStats = tree.stats.get("Wind", timeRange, mergeMethod="Max") - if windStats is None: - return self.setWords(phrase, "") - - max, dir = windStats - words = "" - if max >= 34 and (self._outlookflag == 0): - words = "a gale warning may be needed" - self._outlookflag = 1 - if max >= 48 and (self._outlookflag == 0 or self._outlookflag == 1): - words = "a storm warning may be needed" - self._outlookflag = 2 - if max < 34: - words = "" - self._outlookflag = 0 - return self.setWords(phrase, words) - - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - narrativeDefAM = [ - ("GLFFcstFirst", "period1"), ("GLFFcstFirst", 12), ("GLFFcstShort", 12), ("GLFFcstShort", 12), - ("GLFFcstExt", 18), ("GLFFcstExt", 24), ("GLFFcstExt", 24), - ] - narrativeDefPM = [ - ("GLFFcstFirst", "period1"), ("GLFFcstFirst", 12), ("GLFFcstShort", 12), ("GLFFcstShort", 12), - ("GLFFcstShort", 12), - ("GLFFcstExt", 18), ("GLFFcstExt", 24), ("GLFFcstExt", 24), - ] - return [ - ("400 AM", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("1000 AM", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - # End times are tomorrow: - ("400 PM", self.NIGHT(), 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("1000 PM", "issuanceHour", 24 + self.DAY(), 24 + 4, - ".REST OF TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ] - - def lateDay_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" - # is set to zero. - # NOTE: This applied only to periods that are exactly 24-hours in length. - # Periods longer than that will always be split into day and night labels - # (e.g. SUNDAY THROUGH MONDAY NIGHT) - compName = node.getComponentName() - if compName == "GLFFcstExt": - return 0 - else: - return 1 - -# def _getExpTime(self, argDict): -# # get exp time for EST -# currentTime = argDict['creationTime'] -# curday = time.strftime("%d", time.localtime(currentTime)) -# nextday = time.strftime("%d",time.localtime(currentTime + 86400)) -# timezone = time.strftime("%Z", time.localtime(currentTime)) -# -# if self._productIssuance == "400 AM": -# if timezone == "EST" or timezone == "CST": -# return curday + "1500" -# else: -# return curday + "1400" -# elif self._productIssuance == "1000 AM": -# if timezone == "EST" or timezone == "CST": -# return curday + "2100" -# else: -# return curday + "2000" -# elif self._productIssuance == "400 PM": -# if timezone == "EST" or timezone == "CST": -# return nextday + "0300" -# else: -# return nextday + "0200" -# elif self._productIssuance == "1000 PM": -# if timezone == "EST" or timezone == "CST": -# return nextday + "0900" -# else: -# return nextday + "0800" -# - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* *")] - - ################################# - # MAFOR code - - def WxMAFOR(self): - return { - "type":"component", - "methodList": [ - self.assemblemaforPhrases, - self.wordWrap, - ], - "analysisList": [ - #("Wind", self.vectorMinMax), - #("WaveHeight", self.minMax), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("PoP", self._PoP_analysisMethod("WxMAFOR"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - self.weather_phrase, - ], - } - - def WaveMAFOR(self): - return { - "type":"component", - "methodList": [ - self.assemblemaforPhrases, - self.wordWrap, - ], - "analysisList": [ - #("Wind", self.vectorMinMax), - #("WaveHeight", self.minMax), - ("WaveHeight", self.minMax, [6]), - ], - "phraseList":[ - self.waveHeight_phrase, - ], - } - - def assemblemaforPhrases(self, tree, component): - # Assemble component phrases and add Label - # Qualify the phrases with local effect qualifiers - # if present. - # e.g. "near the coast" - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - fcst = "" - lastQualifier = None - lastPhrase = None - for phrase in component.get("childList"): - words = phrase.get("words") - if words is None: - return - words, lastQualifier = self.qualifyWords( - phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) - lastPhrase = phrase - fcst = fcst + words - # Add label - curLocalTime, shift = self.determineTimeShift() - issuanceInfo = tree.get("issuanceInfo") - index = component.getIndex() - label = self.createmaforLabel(component.get("timeRange"), - issuanceInfo, curLocalTime, shift, index) - if fcst == "": - label = "" - return self.setWords(component, fcst + label) - - def createmaforLabel(self, timeRange, issuanceInfo, currentLocalTime, shift, index=0): - # Make a label given the timeRange in GMT and the shift to - # convert it to local time. currentLocalTime can be used to - # compare to current day. - - if timeRange.duration() <= 3600: - return "" - if index == 0: - try: - label = issuanceInfo.period1Label() - if label != "": - return label - except: - pass - try: - today = issuanceInfo.todayFlag() - except: - today = 1 - label = self.getWeekday(timeRange, holidays=0, shiftToLocal=1, - labelType="Combo", today=today, - tomorrow=0) - return label - - def maforheadline_phrase(self): - return { - "phraseMethods": [ - self.maforheadline_words, # phrase.words - ], - } - def maforheadline_words(self, tree, phrase): - timeRange = phrase.getTimeRange() - waveHeightStats = tree.stats.get("WaveHeight", timeRange, mergeMethod="Max") - windStats = tree.stats.get("Wind", timeRange, mergeMethod="Max") - if waveHeightStats is None or windStats is None: - return self.setWords(phrase,"@") - - # Look at max waveHeight - waveheightMax = int(waveHeightStats) - - # Look at the max wind for the period - windMax, dir = windStats - #print "wind", windStats, windMax, argDict["timeRange"] - - words = "" - if windMax >= 64: - words = "...hurricane force wind warning in effect..." - elif windMax >= 48: - words = "...storm warning in effect..." - elif windMax >= 34: - words = "...gale warning in effect..." -# elif windMax >= 21: -# words = "\n\n...small craft advisory in effect.." -# elif waveheightMax >= 5: -# words = "\n\n...small craft advisory in effect.." - else: - words = "" - return self.setWords(phrase, words) - - def _MAFORAnalysisList(self): - return [ - ("Wind", self.vectorMinMax), - ("WindGust", self.minMax), - ("WaveHeight", self.minMax), - ("Wx", self.dominantWx), - ("T", self.minMax), - ] - - def MaforHeadline(self): - return { - "type":"component", - "methodList": [ - self.assembleChildWords, - self.wordWrap, - ], - "analysisList": [ - ("Wind", self.vectorMinMax), - ("WaveHeight", self.minMax), - ], - "phraseList":[ - self.maforheadline_phrase - ], - } - - def _makeMAFOR(self, editArea, areaList, trDict, trList, analysis, maforheadline, argDict, areaLabel): - - MAFOR = [] - - for i in range (0,8): - statDict = self.getStatDict(self._sampler, analysis, - trDict["MAFOR"+ `i`], editArea) - windstats = self.getStats(statDict, "Wind") - - statDict2 = self.getStatDict(self._sampler, analysis, - trDict["MAFOR"+ `i`], editArea) - wxstats = self.getStats(statDict2, "Wx") - - statDict3 = self.getStatDict(self._sampler, analysis, - trDict["MAFOR"+ `i`], editArea) - Tstats = self.getStats(statDict3, "T") - - statDict4 = self.getStatDict(self._sampler, analysis, - trDict["MAFOR"+ `i`], editArea) - Guststats = self.getStats(statDict4, "WindGust") - - if Guststats is None: - Gustmax = 0 - else: - Gustmax = self.getValue(Guststats, "Max") - - if Tstats is None: - Tmin = 32 - else: - Tmin, Tmax = Tstats - - # wind mafor code - - if windstats == "None" or windstats is None: - windcode = "MMMM" - maxspd = 0 - else: - windspd, winddir = windstats - minspd, maxspd = windspd - windcode = self._findwind(winddir, maxspd) - - # weather mafor code # - - if wxstats is None or wxstats == []: - wxcode = "M" - else: - subkeyList = wxstats - wxcode = self._wxcode(maxspd, Gustmax, Tmin, subkeyList) - - code = windcode + wxcode - MAFOR.append(code) - - code1 = code2 = code3 = code4 = code5 = code6 = code7 = code8 = '' - - first = 0 - next = 1 - time = 1 - mafor = "" - ok = 1 - # sort the code # - while first < 8: - while ok == 1: - if next > 7: - final = first - first = next - ok = 0 - break - if MAFOR[first] == MAFOR[next]: - first = first - next = next + 1 - time = time + 1 - ok = 1 - else: - final = first - first = next - next = next + 1 - ok = 0 - break - - mafor = self._makeCode(mafor, time, MAFOR[final]) - ok = 1 - time = 1 - - mafor = mafor + "." - mafor = string.replace(mafor, " .", ". ") - - # wx wording # - - wxmafor1 = self.generateProduct("WxMAFOR", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._wxtimerange1) - wxmafor1 = string.replace(wxmafor1, "\n", "") - wxmafor1 = string.replace(wxmafor1, ". .", " ") - wxmafor1 = string.replace(wxmafor1, "..", " ") - if wxmafor1 != "": - wxmafor1 = wxmafor1 + ". " - - wxmafor2 = self.generateProduct("WxMAFOR", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._wxtimerange2) - wxmafor2 = string.replace(wxmafor2, "\n", "") - wxmafor2 = string.replace(wxmafor2, ". .", " ") - wxmafor2 = string.replace(wxmafor2, "..", " ") - if wxmafor2 != "": - wxmafor2 = wxmafor2 + ". " - - wxmafor = wxmafor1 + wxmafor2 - - mafor = mafor + wxmafor - - # wave wording # - - wavemafor1 = self.generateProduct("WaveMAFOR", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._wxtimerange1) - wavemafor1 = string.replace(wavemafor1, "\n", "") - wavemafor1 = string.replace(wavemafor1, "..", " ") - wavemafor1 = string.replace(wavemafor1, ". .", " ") - wavemafor1 = wavemafor1 + ". " - - wavemafor2 = self.generateProduct("WaveMAFOR", argDict, - area = editArea, areaLabel=areaLabel, - timeRange = self._wxtimerange2) - wavemafor2 = string.replace(wavemafor2, "\n", "") - wavemafor2 = string.replace(wavemafor2, ". .", " ") - wavemafor2 = string.replace(wavemafor2, "..", " ") - wavemafor2 = wavemafor2 + ". " - - wavemafor = wavemafor1 + wavemafor2 - - mafor = mafor + wavemafor - - # waves mafor code # - waveMAFOR = [] - for j in range (0,2): - statDict2 = self.getStatDict(self._sampler, analysis, - trDict["MAFOR"+ `j`], editArea) - wavestats = self.getStats(statDict2, "WaveHeight") - if wavestats is "None" or wavestats is None: - #mafor = mafor + " " + "MMMMM" - wavecode = "MMMM" - waveMAFOR.append(wavecode) - else: - min, max = wavestats - wavecode, minimum, maximum = self._findwave(max) - waveMAFOR.append(wavecode) - - if waveMAFOR[0] == waveMAFOR[1]: - mafor = mafor + "22" + waveMAFOR[0] + ". " - else: - mafor = mafor + "21" + waveMAFOR[0] + " " +\ - "21" + waveMAFOR[1] + ". " - - if maforheadline == "": - connector = " " - else: - connector = "" - - mafor = self._lake_name + " " + areaList + connector + maforheadline + mafor - mafor = string.replace(mafor, "_", " ") - mafor = string.replace(mafor, "HALF", "1/2") - mafor = string.replace(mafor, "THREE QUARTERS", "3/4") - mafor = string.replace(mafor, "QUARTER", "1/4") - mafor = string.replace(mafor, "TWO THIRDS", "2/3") - mafor = string.replace(mafor, "THIRD", "1/3") - mafor = string.replace(mafor, "LAKE SUPERIOR", "") - - mafor = self.linebreak(mafor, 69) - - return mafor - def _findwind(self, winddir, windspd): - - winddir = self.dirToText(winddir) - minspd = 7 #min wind speed that direction is included in mafor - - if windspd<=minspd: - dir = `9` - - if winddir == "N": - dir = `8` - elif winddir == "NE": - dir = `1` - elif winddir == "E": - dir = `2` - elif winddir == "SE": - dir = `3` - elif winddir == "S": - dir = `4` - elif winddir == "SW": - dir = `5` - elif winddir == "W": - dir = `6` - elif winddir == "NW": - dir = `7` - else: - dir = `9` - - if windspd > 60: - spd = `9` - if windspd <= 60: - spd = `8` - if windspd <= 55: - spd = `7` - if windspd <= 48: - spd = `6` - if windspd <= 35: - spd = `5` - if windspd <= 33: - spd = `4` - if windspd <= 25: - spd = `3` - if windspd <= 20: - spd = `2` - if windspd <= 15: - spd = `1` - if windspd <= 10: - spd = `0` - - return dir + spd - -############ -# weather mafor code -############ - - def _wxcode(self, windspd, Gust, T, subkeyList): - - index = 0 - - length = len(subkeyList) - for wxKey in subkeyList: - wxType = wxKey.wxType() - cov = wxKey.coverage() - vis = wxKey.visibility() - - number = "0" - - if wxType == "ZY" and T >= 23 and T < 32: - number = "1" - - if wxType == "ZY" and T < 23: - number = "2" - - if wxType == "WG": - if cov == "Def" or cov == "Wide" or cov == "Areas": - number = "3" - - if wxType == "WG": - if vis == "1/4SM" or vis == "0SM": - if cov == "Def" or cov == "Wide" or cov == "Areas": - number = "4" - - if wxType == "L": - if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": - number = "5" - - if wxType == "R" or wxType == "RW": - if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": - number = "6" - - if wxType == "S" or wxType == "SW": - if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": - number = "7" - - if (Gust - windspd) > 16: - if wxType == "SW" or wxType == "RW" or wxType == "T": - number = "8" - - if wxType == "T": - if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": - number = "9" - - index = index + 1 - if index <= length: - break - - return number - - - def _makeCode(self, mafor, time, wind): - - if time <=4: - code = "1" + `time` + wind - if time == 5: - code = "14" + wind + " 11" + wind - if time == 6: - code = "15" + wind - if time == 7: - code = "15" + wind + " 11" + wind - if time == 8: - code = "16" + wind - -# mafor = mafor + " " + code - mafor = mafor + code + " " - - return mafor - -############ -# wave mafor code -############ - - def _findwave(self, avg): - - if avg == 0: - range = "0002" - min=0 - max=2 - elif avg <= 2: - range = "0002" - min=0 - max=2 - elif avg > 2 and avg <= 3: - range = "0103" - min=1 - max=3 - elif avg > 3 and avg <= 4: - range = "0204" - min=2 - max=4 - elif avg > 4 and avg <= 5: - range = "0305" - min=3 - max=5 - elif avg > 5 and avg <= 6: - range = "0306" - min=3 - max=6 - elif avg > 6 and avg <= 7: - range = "0407" - min=4 - max=7 - elif avg > 7 and avg <= 8: - range = "0508" - min=5 - max=8 - elif avg > 8 and avg <= 10: - range = "0610" - min=6 - max=10 - elif avg > 10 and avg <= 12: - range = "0812" - min=8 - max=12 - elif avg > 12 and avg <= 14: - range = "1014" - min=10 - max=14 - elif avg > 14 and avg <= 16: - range = "1216" - min=12 - max=16 - elif avg > 16 and avg <= 18: - range = "1418" - min=14 - max=18 - elif avg > 18 and avg <= 20: - range = "1520" - min=15 - max=20 - elif avg > 20 and avg <= 23: - range = "1823" - min=18 - max=23 - elif avg > 23 and avg <= 25: - range = "2025" - min=20 - max=25 - elif avg > 25: - range = "2530" - min=25 - max=30 - else: - range = "MMMM" - min=0 - max=0 - - return range, min, max - - - def _getMaforTime(self, argDict): - # get mafor time - currentTime = argDict['creationTime'] - curday = time.strftime("%d", time.localtime(currentTime)) - nextday = time.strftime("%d",time.localtime(currentTime + 86400)) - timezone = time.strftime("%Z", time.localtime(currentTime)) - - if self._productIssuance == "400 AM": - if timezone == "EST" or timezone == "CST": - return curday + "10" - else: - return curday + "09" - elif self._productIssuance == "1000 AM": - if timezone == "EST" or timezone == "CST": - return curday + "16" - else: - return curday + "15" - elif self._productIssuance == "400 PM": - if timezone == "EST" or timezone == "CST": - return curday + "22" - else: - return curday + "21" - elif self._productIssuance == "1000 PM": - if timezone == "EST" or timezone == "CST": - return curday + "14" - else: - return nextday + "03" - - def timePeriod_descriptorTable(self, tree, node): - # Contains definition for localtime start/end times and phrase - # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase - day = self.DAY() - return [ - (day, (day+3)%24, "early in the morning"), # 6-9 - (day, (day+6)%24, "in the morning"), # 6-12 - (day, (day+9)%24, "until late afternoon"), # 6-15 - (day, (day+12)%24, ""), # 6-18 - ((day+3)%24, (day+6)%24, "late in the morning"), # 9-12 - ((day+3)%24, (day+9)%24, "around midday"), # 9-15 - ((day+3)%24, (day+12)%24, "by noon"), # 9-18 - ((day+6)%24, (day+9)%24, "early in the afternoon"), # 12-15 - ((day+6)%24, (day+12)%24, "in the afternoon"), # 12-18 - ((day+9)%24, (day+12)%24, "late in the afternoon"), # 15-18 - ((day+12)%24, (day+15)%24, "early in the evening"), # 18-21 - ((day+12)%24, (day+18)%24, "in the evening"), # 18-0 - ((day+12)%24, (day+21)%24, "through early morning"), # 18-3 - ((day+12)%24, day, ""), # 18-6 - ((day+15)%24, (day+18)%24, "late in the evening"), # 21-0 - ((day+15)%24, (day+21)%24, "around midnight"), # 21-3 - ((day+15)%24, day, "overnight"), # 21-6 - ((day+18)%24, (day+21)%24, "after midnight"), # 0-3 - ((day+18)%24, day, "after midnight"), # 0-6 - ((day+18)%24, (day+6)%24, ""), # 0-12 - ((day+21)%24, day, "early in the morning"), # 3-6 - ] - - def phrase_descriptor_dict(self, tree, node): - # Dictionary of descriptors for various weather elements in phrases - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node, key, element - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "winds" - dict["WindGust"] = "gusts up to" - dict["WaveHeight"] = "waves" - dict["hurricane force winds to"]= "hurricane force winds to" - dict["storm force winds to"] = "storm force winds to" - dict["gales to"] = "gales to" - dict["up to"] = "" - dict["around"] = "" - # Used for Headlines - dict["EXPECTED"] = "EXPECTED" - dict["IN EFFECT"] = "IN EFFECT" - return dict - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than or equal to this value, - # the different values will be noted in the phrase. - dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) - dict["WaveHeight"] = { - (0, 6) : 1, - (6, 20) : 5, - 'default': 10, - } - return dict - - def minimum_range_nlValue_dict(self, tree, node): - # This threshold is the "smallest" min/max difference allowed between values reported. - # For example, if threshold is set to 5 for "MaxT", and the min value is 45 - # and the max value is 46, the range will be adjusted to at least a 5 degree - # range e.g. 43-48. These are the values that are then submitted for phrasing - # such as: - # HIGHS IN THE MID 40S - dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) - dict["Wind"] = { - (0, 5) : 0, # will be reported as "null" - (5, 8) : 5, - "default" : 10, - } - return dict - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH - ('SR.A', marineActions, 'Marine'), # STORM WATCH - ('GL.A', marineActions, 'Marine'), # GALE WATCH - ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS WATCH - ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine'), # GALE WARNING - ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS WARNING - ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ('MH.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING - ('MH.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ('MF.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('MS.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('LO.Y', allActions, 'LowWater'), # LOW WATER ADVISORY - ] +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product creates a Great Lakes Forecast product. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# GLF, GLF___Definition, GLF__Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas...see "Edit Areas Needed" section below +# for all edit areas needed. Just uncomment the lines below the +# lake the formatter will be run for...and comment +# out the lines below the lakes not used. +# productName defines name of product e.g. "Open Lakes Forecast" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "GLFLS" +# areaName (opt.) Area name for product header, such as "Lake Superior" +# wfoCityState WFO location, such as "Buffalo NY" +# lake_name Name of lake...not including the word "Lake" e.g "Superior" +# lakezone Zone code for the text portion of the forecast e.g. "LSZ260" +# maforzone Zone code for the mafor portion of the forecast e.g. "LSZ261" +# headerphrase Phrase for the header portion of forecast immediately above +# the SYNOPSIS section. +# Optional Configuration Items +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# useAbbreviations (default == 1) +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# processMafor (default == 1) +# 1 --> The MAFOR code will be processed +# 0 --> The MAFOR code will not be processed +# useHolidays Set to 1 to use holidays in the time period labels +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +## +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +#------------------------------------------------------------------------- +# Weather Elements Needed: +# To 5 days: +# Grids need to be continuous: +# Wind (5 days) +# WaveHeight (5 days) +# Wx (5 days) +# T (36 hours) +# Optional: +# WindGust (5 days) +#------------------------------------------------------------------------- +# Edit Areas Needed: left side for E-W oriented lakes: right side for +# N-S oriented lakes: "whole lake" edit area needed +# for all lakes. +# west_half (north_half) +# east_half (south_half) +# east_one_third (south_one_third) +# west_one_third (north_one_third) +# east_two_thirds (south_two_thirds) +# west_two_thirds (north_two_thirds) +# east_one_quarter (south_one_quarter) +# west_one_quarter (north_one_quarter) +# east_three_quarters (south_three_quarters) +# west_three_quarters (north_three_quarters) +# "whole lake" -- name the edit area the name of the entire lake (e.g. SUPERIOR, ST_CLAIR, HURON, etc) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Component Products: +# GLFFcstFirst +# GLFFcstShort +# GLFFcstExt +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Component Products: +# GLFFcstShort +# GLFFcstExt +#------------------------------------------------------------------------- +# Additional Information: +# +# from ConfigVariables: +# maximum_range_nlValue_dict +# minimum_range_nlValue_dict +# phrase_descriptor_dict +# scalar_difference_nlValue_dict +#------------------------------------------------------------------------- +# Example Output: +# +## FOUS43 KMQT 301825 +## GLFLS +## LSZ260-310300 + +## OPEN LAKE FORECAST FOR LAKE SUPERIOR +## NATIONAL WEATHER SERVICE MARQUETTE MI +## 125 PM EST WED OCT 30 2002 + +## LAKE SUPERIOR FORECAST BEYOND FIVE NM FROM SHORE + +## .SYNOPSIS... + +## WEST HALF + +## ...GALE WARNING IN EFFECT... + +## .TONIGHT...NW GALES TO 45 KT EASING TO GALES TO 40 +## KT LATE IN THE NIGHT. CHANCE OF SNOW SHOWERS. WAVES 7 TO 10 FT +## SUBSIDING TO 6 TO 9 FT. +## .THU...W WIND 15 TO 25 KT. SNOW SHOWERS LIKELY. WAVES 4 +## TO 7 FT SUBSIDING TO 4 TO 6 FT. +## .THU NIGHT...W WIND 15 TO 25 KT VEERING NW WIND TO +## 30 KT AFTER MIDNIGHT. SNOW SHOWERS LIKELY. WAVES 4 TO 6 FT. +## .FRI...NW WIND TO 30 KT. RAIN AND SNOW LIKELY. +## WAVES 4 TO 7 FT. +## .FRI NIGHT...NW WIND 15 TO 25 KT BACKING W WIND TO +## 30 KT IN THE LATE EVENING AND OVERNIGHT. RAIN AND SNOW LIKELY. +## WAVES 5 TO 8 FT. +## .SAT...W WIND TO 30 KT VEERING NW. WAVES 4 TO +## 7 FT SUBSIDING TO 3 TO 5 FT. +## .SUN...NW WIND 10 TO 20 KT BACKING W 5 TO 15 KT +## IN THE EVENING. WAVES 3 TO 5 FT. +## .MON...W WIND 5 TO 15 KT AFTER MIDNIGHT VEERING NW +## BACKING W UP TO 10 KT IN THE LATE MORNING AND EARLY AFTERNOON +## BACKING SW BACKING S 5 TO 15 KT IN THE EVENING. WAVES +## 2 TO 4 FT BUILDING TO 3 TO 5 FT. + +## EAST HALF + +## ...GALE WARNING IN EFFECT... + +## .TONIGHT...NW GALES TO 45 KT EASING TO GALES TO 40 +## KT LATE IN THE NIGHT. CHANCE OF SNOW SHOWERS. WAVES 7 TO 10 FT +## SUBSIDING TO 6 TO 9 FT. +## .THU...W WIND 15 TO 25 KT. SNOW SHOWERS LIKELY. WAVES 4 +## TO 7 FT SUBSIDING TO 4 TO 6 FT. +## .THU NIGHT...W WIND 10 TO 20 KT VEERING NW 15 TO +## 25 KT IN THE LATE EVENING AND OVERNIGHT. SNOW SHOWERS LIKELY. +## WAVES 4 TO 6 FT BUILDING TO 6 TO 9 FT. +## .FRI...NW WIND TO 30 KT. RAIN AND SNOW LIKELY. +## WAVES 6 TO 9 FT BUILDING TO 7 TO 10 FT. +## .FRI NIGHT...NW WIND 15 TO 25 KT BACKING W WIND TO +## 30 KT AFTER MIDNIGHT. RAIN AND SNOW LIKELY. WAVES 7 TO 10 FT +## SUBSIDING TO 6 TO 9 FT. +## .SAT...W WIND TO 30 KT VEERING NW. WAVES 6 TO +## 9 FT SUBSIDING TO 5 TO 8 FT. +## .SUN...N WIND 15 TO 25 KT BACKING NW 10 TO 20 +## KT. WAVES 4 TO 7 FT. +## .MON...NW WIND 10 TO 20 KT EASING TO UP TO 10 KT +## EARLY IN THE AFTERNOON BACKING SE VEERING S 5 TO 15 KT +## LATE IN THE EVENING. WAVES 4 TO 7 FT. + +## $$ + +## LSZ261-310300- +## MAFOR 3022/ +## SUPERIOR WEST 1/2...GALE WARNING IN EFFECT...13760 11640 11620 13630 +## 220610 + +## SUPERIOR EAST 1/2...GALE WARNING IN EFFECT...13760 11740 12620 12630 +## 220610 + +## $$ + +# + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string +import os, re, types +import TimeRange, AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + # W-E orientation + (("Groupings", "groupings"), "West 1/2:East 1/2", "radio", + ["West 1/2:East 1/2", "West 1/3:East 2/3", "West 2/3:East 1/3", + "West 1/4:East 3/4", "West 3/4:East 1/4", "Entire Lake"]), + # N-S orientation + #(("Groupings", "groupings") , "North 1/2:South 1/2", "radio", + # ["North 1/2:South 1/2", "North 1/3:South 2/3", "North 2/3:South 1/3", + # "North 1/4:South 3/4", "North 3/4:South 1/4", "Entire Lake"]), + ] + + Definition = { + "type": "smart", + "displayName": "None", + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/GLF_.txt", + "debug": 0, + + "lineLength": 66, + ## Edit Areas + "defaultEditAreas" : [("west_half", "WEST HALF\n\n"), + ("east_half", "EAST HALF\n\n")], + # product identifiers + "lake_name": "Superior", # use -- Superior, Huron, Erie, Ontario, Michigan, St_Clair + "productName": "Open Lakes Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "Statename", # Name of state, such as "Georgia" + "wfoCityState": "", # Location of WFO - city state + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + "headerphrase": "Lake Superior forecast beyond five nautical miles from shore", # header phrase + "lakezone": "LSZ260", # Zone code for the Lake + "maforzone": "LSZ261", # Mafor zone code + "processmafor" : 1, # process mafor data: 1=yes, 0=no + + "periodCombining" : 0, # If 1, combine periods, if possible + + "useAbbreviations": 1, # Use marine abbreviations + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + + # Weather-related flags + "hoursSChcEnds": 24, + + # Language + "language": "english", + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + ### THRESHOLDS AND VARIABLES + ### Analysis Class + ### To override, override the associated method in your text product class. + def temporalCoverage_threshold(self, parmHisto, timeRange, componentName): + # Replaces IN_RANGE_THRESHOLD -- Note that this threshold is now used + # differently i.e. it is the percentage of the TIMERANGE covered by the + # grid in order to include it in the analysis + # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) + # Used by temporalCoverage_flag + return 5.0 + + def temporalCoverage_dict(self, parmHisto, timeRange, componentName): + # Replaces IN_RANGE_DICT -- Note that this these thresholds are now used + return { + "LAL": 0, + "MinRH": 0, + "MaxRH": 0, + "MinT": 50, + "MaxT": 10, + "Haines": 0, + "Wx": 15, + "PoP" : 50, + } + + # Uncomment any combinations you wish to collapse. + # For example, if the first entry is uncommented, + # the phrase: scattered rain showers and widespread rain + # will collapse to: scattered rain showers. + def wxCombinations(self): + return [ + ("RW", "R"), + ("SW", "S"), + ## ("T","RW"), + ] + + def vector_mag_difference_nlValue_dict(self, tree, node): + # Replaces WIND_THRESHOLD + # Magnitude difference. If the difference between magnitudes + # for sub-ranges is greater than this value, + # the different magnitudes will be noted in the phrase. + # Units can vary depending on the element and product + return { + "Wind": 10, + "Wind20ft": 10, + "TransWind": 10, + "FreeWind": 10, + "Swell": 1, # ft + "Swell2": 1, # ft + } + + + def vector_dir_difference_dict(self, tree, node): + # Direction difference. If the difference between directions + # for 2 sub-periods is greater than this value, + # the different directions will be noted in the phrase. + # Units are degrees + return { + "Wind": 50, # degrees + "TransWind": 60, # mph + "FreeWind": 60, # mph + "Swell":60, # degrees + "Swell2":60, # degrees + } + + def maxReported_threshold_dict(self, tree, node): + # Winds will not be reported above this value: + # For example, if set to 30, all winds above 30 will + # be reported as: + # "Winds up to 30 knots." + return { + "Wind": 200, # knots or mph depending on product + } + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and not reported. + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["Wind"] = 7 + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["Wave"] = "variable winds less than 10 knots" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["Wind"] = "variable less than 10 knots" + dict["Wx"] = "" + return dict + + def gust_wind_difference_nlValue(self, tree, node): + # Difference between gust and maxWind below which gusts are not mentioned + # Units are mph + return 15 + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product +# return {} + return { + "Wind": 10, + } + + def marine_wind_flag(self, tree, node): + # If 1, Wind combining and wording will reflect the + # crossing of significant thresholds such as gales + return 1 + + def marine_wind_combining_flag(self, tree, node): + # If 1, Wind combining will reflect the + # crossing of significant thresholds such as gales. + # E.g. "Hurricane forece winds to 00 knots." instead of + # "North hurricane force winds to 100 knots easing to + # hurricane force winds to 80 knots in the afternoon." + return 1 + + def marine_abbreviateText(self, fcst): + fcst = re.sub(r'(?i)(\W|^)NORTH(?!WARD|ERN|WESTWARD|EASTWARD|WESTERN|EASTERN)(?=\W|$)', r'\1N', fcst) + fcst = re.sub(r'(?i)(\W|^)SOUTH(?!WARD|ERN|WESTWARD|EASTWARD|WESTERN|EASTERN)(?=\W|$)', r'\1S', fcst) + fcst = re.sub(r'(?i)(\W|^)EAST(?!WARD|ERN)(?=\W|$)', r'\1E', fcst) + fcst = re.sub(r'(?i)(\W|^)WEST(?!WARD|ERN)(?=\W|$)', r'\1W', fcst) + fcst = re.sub(r'(?i)(\W|^)KNOTS?(?=\W|$)', r'\1kt', fcst) + fcst = re.sub(r'(?i)(\W|^)MILLIBARS?(?=\W|$)', r'\1mb', fcst) + fcst = re.sub(r'(?i)(\W|^)FATHOMS?(?=\W|$)', r'\1fm', fcst) + fcst = re.sub(r'(?i)(\W|^)NAUTICAL MILES?(?=\W|$)', r'\1nm', fcst) + fcst = re.sub(r'(?i)(\W|^)(?:FOOT|FEET)(?=\W|$)', r'\1ft', fcst) + fcst = re.sub(r'(?i)(\W|^)POSITION(?=\W|$)', r'\1PSN', fcst) + fcst = re.sub(r'(?i)(\W|^)VISIBILITY(?=\W|$)', r'\1VSBY', fcst) + fcst = re.sub(r'(?i)(\W|^)THUNDERSTORM(?=\W|$)', r'\1TSTM', fcst) + fcst = re.sub(r'(?i)(\W|^)AVERAGE(?=\W|$)', r'\1AVG', fcst) + fcst = re.sub(r'(?i)(\W|^)ATLANTIC(?=\W|$)', r'\1ATLC', fcst) + fcst = re.sub(r'(?i)(\W|^)LONGITUDE(?=\W|$)', r'\1LONG', fcst) + fcst = re.sub(r'(?i)(\W|^)PACIFIC(?=\W|$)', r'\1PAC', fcst) + fcst = re.sub(r'(?i)(\W|^)DEGREE(?=\W|$)', r'\1deg', fcst) + fcst = re.sub(r'(?i)(\W|^)PRESSURE(?=\W|$)', r'\1PRES', fcst) + fcst = re.sub(r'(?i)(\W|^)(SUN)DAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(MON)DAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(TUE)SDAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(WED)NESDAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(THU)RSDAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(FRI)DAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)(SAT)URDAY(?=\W|$)', r'\1\2', fcst) + fcst = re.sub(r'(?i)(\W|^)W HALF(?=\W|$)', r'\1West half', fcst) + fcst = re.sub(r'(?i)(\W|^)E HALF(?=\W|$)', r'\1east half', fcst) + fcst = re.sub(r'(?i)(\W|^)N HALF(?=\W|$)', r'\1north half', fcst) + fcst = re.sub(r'(?i)(\W|^)S HALF(?=\W|$)', r'\1soutH half', fcst) + fcst = re.sub(r'(?i)(\W|^)W THIRD(?=\W|$)', r'\1west third', fcst) + fcst = re.sub(r'(?i)(\W|^)E THIRD(?=\W|$)', r'\1east third', fcst) + fcst = re.sub(r'(?i)(\W|^)N THIRD(?=\W|$)', r'\1north third', fcst) + fcst = re.sub(r'(?i)(\W|^)S THIRD(?=\W|$)', r'\1south third', fcst) + fcst = re.sub(r'(?i)(\W|^)W TWO(?=\W|$)', r'\1west two', fcst) + fcst = re.sub(r'(?i)(\W|^)E TWO(?=\W|$)', r'\1east two', fcst) + fcst = re.sub(r'(?i)(\W|^)N TWO(?=\W|$)', r'\1north two', fcst) + fcst = re.sub(r'(?i)(\W|^)S TWO(?=\W|$)', r'\1south two', fcst) + return fcst + + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self.marineRounding, + } + + def waveHeight_words(self, tree, node): + "Create phrase for waves" + statDict = node.getStatDict() + stats = self.getStats(statDict, "WaveHeight") + if stats is None: + nodataPhrase = self.noWaveHeight_phrase( + tree, node, "WaveHeight", "WaveHeight") + return self.setWords(node.parent, nodataPhrase) + + min, max = self.getValue(stats, "MinMax") + #avg = (min + max)/2 + words = self.wave_range(max) + return self.setWords(node, words) + + def wave_range(self, avg): + # Make wave ranges based off the average wave value + table = ((2, "2 feet or less"), (3, "1 to 3 feet"), + (4, "2 to 4 feet"), (5, "3 to 5 feet"), + (6, "4 to 6 feet"), (8, "5 to 8 feet"), + (9, "6 to 9 feet"), (10, "7 to 10 feet"), + (11, "8 to 11 feet"), (12, "9 to 12 feet"), + (14, "10 to 14 feet"), (17, "12 to 17 feet"), + (20, "15 to 20 feet"), (25, "20 to 25 feet"), + (30, "25 to 30 feet"), (100, "over 30 feet")) + range = "" + for max, str in table: + if avg <= max: + range = str + break + return range + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + return self.stdDevMaxAvg + #return self.maxMode + #return self.maximum + + def GLFFcstFirst(self): + + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Wind", self.vectorMinMax, [3]), + ("WindGust", self.minMax, [3]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("WaveHeight", self.minMax, [3]), + ("PoP", self._PoP_analysisMethod("GLFFcstFirst"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.marine_wind_withGusts_phrase, + self.weather_orSky_phrase, + self.visibility_phrase, + self.severeWeather_phrase, + self.waveHeight_phrase, + ], + } + + + def GLFFcstShort(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Wind", self.vectorMinMax, [3]), + ("WindGust", self.minMax, [3]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("WaveHeight", self.minMax, [3]), + ("PoP", self._PoP_analysisMethod("GLFFcstShort"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.marine_wind_withGusts_phrase, + self.weather_orSky_phrase, + self.visibility_phrase, + self.severeWeather_phrase, + self.waveHeight_phrase, + self._warnOutlook_phrase, + ], + } + + def GLFFcstExt(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + ("Wind", self.vectorMinMax, [3]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("WaveHeight", self.minMax, [6]), + ("PoP", self._PoP_analysisMethod("GLFFcstExt"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.marine_wind_phrase, + self.weather_phrase, + self.visibility_phrase, + self.waveHeight_phrase, + ], + } + + def generateForecast(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the edit areas + try: + if self._groupings == "West 1/3:east 2/3": + self._areaList = [("west_one_third", "WEST THIRD"), + ("east_two_thirds", "EAST TWO THIRDS")] + elif self._groupings == "West 2/3:East 1/3": + self._areaList = [("west_two_thirds", "WEST TWO THIRDS"), + ("east_one_third", "EAST ONE THIRD")] + elif self._groupings == "West 1/4:East 3/4": + self._areaList = [("west_one_quarter", "WEST QUARTER"), + ("east_three_quarters", "EAST THREE QUARTERS")] + elif self._groupings == "West 3/4:East 1/4": + self._areaList = [("west_three_quarters", "WEST THREE QUARTERS"), + ("east_one_quarter", "EAST ONE QUARTER")] + elif self._groupings == "Entire Lake": + self._areaList = [(self._lake_name, "")] + elif self._groupings == "West 1/2:East 1/2": + self._areaList = [("west_half", "WEST HALF"), ("east_half", "EAST HALF")] + elif self._groupings == "North 1/3:South 2/3": + self._areaList = [("north_one_third", "NORTH THIRD"), + ("south_two_thirds", "SOUTH TWO THIRDS")] + elif self._groupings == "North 2/3:South 1/3": + self._areaList = [("north_two_thirds", "NORTH TWO THIRDS"), + ("south_one_third", "SOUTH ONE THIRD")] + elif self._groupings == "North 1/4:South 3/4": + self._areaList = [("north_one_quarter", "NORTH QUARTER"), + ("south_three_quarters", "SOUTH THREE QUARTERS")] + elif self._groupings == "North 3/4:South 1/4": + self._areaList = [("north_three_quarters", "NORTH THREE QUARTERS"), + ("south_one_quarter", "SOUTH ONE QUARTER")] + elif self._groupings == "Entire Lake": + self._areaList = [(self._lake_name, "")] + elif self._groupings == "North 1/2:South 1/2": + self._areaList = [("north_half", "NORTH HALF"), ("south_half", "SOUTH HALF")] + else: + self._areaList = [(self._lake_name, "")] + except: + self._areaList = [(self._lake_name, "")] + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # determine time ranges for MAFOR + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + + # make sure outlook flag is set to 0 + self._outlookflag = 0 + + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList + self._groupings = None + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + + # Initialize mafor list + self._mafors = [] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._issueTime = self._issuanceInfo.issueTime() + self._expireTime = self._issuanceInfo.expireTime() + self._expireTimeDDHHMM = time.strftime("%d%H%M", + time.gmtime(self._expireTime.unixTime())) + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + self._definition["priorPeriod"] = 24 + + # Set up self._headlineRange + self._headlineRange = TimeRange.TimeRange(self._timeRange.startTime(), + self._timeRange.startTime() + 24*3600) + + + trDict = {} + trList = [] + currentLocalTime, shift = self.determineTimeShift() + day = currentLocalTime.day + month = currentLocalTime.month + year = currentLocalTime.year + startTime = AbsTime.absTimeYMD(year, month, day) + + if self._productIssuance == "400 AM": + start = self.localTime(startTime, 8, shift) + wxstart = self.localTime(startTime, 6, shift) + wxend = self.localTime(startTime, 18, shift) + if self._productIssuance == "1000 AM": + start = self.localTime(startTime, 14, shift) + wxstart = self.localTime(startTime, 10, shift) + wxend = self.localTime(startTime, 18, shift) + if self._productIssuance == "400 PM": + start = self.localTime(startTime, 20, shift) + wxstart = self.localTime(startTime, 18, shift) + wxend = self.localTime(startTime, 30, shift) + if self._productIssuance == "1000 PM": + start = self.localTime(startTime, 26, shift) + wxstart = self.localTime(startTime, 22, shift) + wxend = self.localTime(startTime, 30, shift) + # MAFOR config + timeRange = TimeRange.TimeRange(start, start + 3600) + periods = self.getPeriods(timeRange, 3, 1, 8) + # coded winds and waves + for i in range(0,8): + trList.append(periods[i]) + trDict["MAFOR"+repr(i)] = periods[i][0] + + self._trDict = trDict + self._trList = trList + + # worded weather times in mafor + self._wxtimerange1 = TimeRange.TimeRange(wxstart, wxend) + self._wxtimerange2 = TimeRange.TimeRange(wxend, wxend+(3600*12)) + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, + self._areaList, self._issuanceInfo) + if error is not None: + return error + print(("AREA LIST: ", self._areaList)) + self._sampler = self.getSampler(argDict, + (self._MAFORAnalysisList(), self._trList, self._areaList)) + return None + + def _preProcessProduct(self, fcst, argDict): + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + '\n' + + fcst = fcst + s.upper() + + s = self._lakezone + "-" + self._expireTimeDDHHMM + "-\n\n" +\ + productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + fcst = fcst + self._headerphrase + "\n\n" + ".SYNOPSIS..." + "\n\n" + + # Set up hazards + self.getHazards(argDict, self._areaList) + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + fcst = fcst + areaLabel+ "\n\n" + + # Headlines + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + argDict["language"] = self._language + + # Generate Narrative Forecast for Edit Area + fcstSegment = self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + + # Handle abbreviations + if self._useAbbreviations == 1: + fcstSegment = self.marine_abbreviateText(fcstSegment) + fcstSegment = re.sub(r'\n', r' ',fcstSegment) + fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) + fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) + fcst = fcst + fcstSegment + + # mafor stuff + + # grab headline in mafor + maforheadline = self.generateProduct("MaforHeadline", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._headlineRange) + + maforheadline = string.replace(maforheadline, "\n", "") + + if self._processmafor == 1: + mafor = self._makeMAFOR(editArea, areaLabel, self._trDict, + self._trList, self._MAFORAnalysisList(), maforheadline, argDict, areaLabel) + self._mafors.append(mafor) + ## + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + + # Adjust some phrases to local requirements + # ======================================== + fcst = string.replace(fcst,"widespread rain", "occasional rain") + fcst = string.replace(fcst,"widespread showers", "showers") + fcst = string.replace(fcst,"widespread thunderstorms", "thunderstorms") + + fcst = string.replace(fcst, "rain showers", "showers") + fcst = string.replace(fcst, "thunderstorms and showers", "showers and thunderstorms") + #phrase = string.replace(phrase, "widespread", "") + + # This is the footer for an edit area combination + return fcst + "\n" + + def _postProcessProduct(self, fcst, argDict): + + fcst = fcst + "$$\n\n" + + if string.find(fcst, "storm force") > 0 or\ + string.find(fcst, "storm warning") > 0 or\ + string.find(fcst, "hurricane") > 0: + fcst = fcst + "&&STORM\n\n" + + if self._processmafor == 1: + maforzone = self._maforzone+"-"+ self._expireTimeDDHHMM + "-\n" + maforissue = "MAFOR " + self._getMaforTime(argDict) + "/" + "\n" + + fcst = fcst + maforzone + maforissue + for mafor in self._mafors: + fcst = fcst + mafor + "\n" + fcst = fcst + "$$\n\n" + + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _warnOutlook_phrase(self): + return { + "phraseMethods": [ + self._warnOutlook_words, # phrase.words + ], + } + def _warnOutlook_words(self, tree, phrase): + # will put an outlook phrase in the text + + timeRange = phrase.getTimeRange() + windStats = tree.stats.get("Wind", timeRange, mergeMethod="Max") + if windStats is None: + return self.setWords(phrase, "") + + max, dir = windStats + words = "" + if max >= 34 and (self._outlookflag == 0): + words = "a gale warning may be needed" + self._outlookflag = 1 + if max >= 48 and (self._outlookflag == 0 or self._outlookflag == 1): + words = "a storm warning may be needed" + self._outlookflag = 2 + if max < 34: + words = "" + self._outlookflag = 0 + return self.setWords(phrase, words) + + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + narrativeDefAM = [ + ("GLFFcstFirst", "period1"), ("GLFFcstFirst", 12), ("GLFFcstShort", 12), ("GLFFcstShort", 12), + ("GLFFcstExt", 18), ("GLFFcstExt", 24), ("GLFFcstExt", 24), + ] + narrativeDefPM = [ + ("GLFFcstFirst", "period1"), ("GLFFcstFirst", 12), ("GLFFcstShort", 12), ("GLFFcstShort", 12), + ("GLFFcstShort", 12), + ("GLFFcstExt", 18), ("GLFFcstExt", 24), ("GLFFcstExt", 24), + ] + return [ + ("400 AM", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("1000 AM", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + # End times are tomorrow: + ("400 PM", self.NIGHT(), 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("1000 PM", "issuanceHour", 24 + self.DAY(), 24 + 4, + ".REST OF TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ] + + def lateDay_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" + # is set to zero. + # NOTE: This applied only to periods that are exactly 24-hours in length. + # Periods longer than that will always be split into day and night labels + # (e.g. SUNDAY THROUGH MONDAY NIGHT) + compName = node.getComponentName() + if compName == "GLFFcstExt": + return 0 + else: + return 1 + +# def _getExpTime(self, argDict): +# # get exp time for EST +# currentTime = argDict['creationTime'] +# curday = time.strftime("%d", time.localtime(currentTime)) +# nextday = time.strftime("%d",time.localtime(currentTime + 86400)) +# timezone = time.strftime("%Z", time.localtime(currentTime)) +# +# if self._productIssuance == "400 AM": +# if timezone == "EST" or timezone == "CST": +# return curday + "1500" +# else: +# return curday + "1400" +# elif self._productIssuance == "1000 AM": +# if timezone == "EST" or timezone == "CST": +# return curday + "2100" +# else: +# return curday + "2000" +# elif self._productIssuance == "400 PM": +# if timezone == "EST" or timezone == "CST": +# return nextday + "0300" +# else: +# return nextday + "0200" +# elif self._productIssuance == "1000 PM": +# if timezone == "EST" or timezone == "CST": +# return nextday + "0900" +# else: +# return nextday + "0800" +# + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* *")] + + ################################# + # MAFOR code + + def WxMAFOR(self): + return { + "type":"component", + "methodList": [ + self.assemblemaforPhrases, + self.wordWrap, + ], + "analysisList": [ + #("Wind", self.vectorMinMax), + #("WaveHeight", self.minMax), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("PoP", self._PoP_analysisMethod("WxMAFOR"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + self.weather_phrase, + ], + } + + def WaveMAFOR(self): + return { + "type":"component", + "methodList": [ + self.assemblemaforPhrases, + self.wordWrap, + ], + "analysisList": [ + #("Wind", self.vectorMinMax), + #("WaveHeight", self.minMax), + ("WaveHeight", self.minMax, [6]), + ], + "phraseList":[ + self.waveHeight_phrase, + ], + } + + def assemblemaforPhrases(self, tree, component): + # Assemble component phrases and add Label + # Qualify the phrases with local effect qualifiers + # if present. + # e.g. "near the coast" + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + fcst = "" + lastQualifier = None + lastPhrase = None + for phrase in component.get("childList"): + words = phrase.get("words") + if words is None: + return + words, lastQualifier = self.qualifyWords( + phrase, words, "conjunctiveQualifier", lastQualifier, lastPhrase) + lastPhrase = phrase + fcst = fcst + words + # Add label + curLocalTime, shift = self.determineTimeShift() + issuanceInfo = tree.get("issuanceInfo") + index = component.getIndex() + label = self.createmaforLabel(component.get("timeRange"), + issuanceInfo, curLocalTime, shift, index) + if fcst == "": + label = "" + return self.setWords(component, fcst + label) + + def createmaforLabel(self, timeRange, issuanceInfo, currentLocalTime, shift, index=0): + # Make a label given the timeRange in GMT and the shift to + # convert it to local time. currentLocalTime can be used to + # compare to current day. + + if timeRange.duration() <= 3600: + return "" + if index == 0: + try: + label = issuanceInfo.period1Label() + if label != "": + return label + except: + pass + try: + today = issuanceInfo.todayFlag() + except: + today = 1 + label = self.getWeekday(timeRange, holidays=0, shiftToLocal=1, + labelType="Combo", today=today, + tomorrow=0) + return label + + def maforheadline_phrase(self): + return { + "phraseMethods": [ + self.maforheadline_words, # phrase.words + ], + } + def maforheadline_words(self, tree, phrase): + timeRange = phrase.getTimeRange() + waveHeightStats = tree.stats.get("WaveHeight", timeRange, mergeMethod="Max") + windStats = tree.stats.get("Wind", timeRange, mergeMethod="Max") + if waveHeightStats is None or windStats is None: + return self.setWords(phrase,"@") + + # Look at max waveHeight + waveheightMax = int(waveHeightStats) + + # Look at the max wind for the period + windMax, dir = windStats + #print "wind", windStats, windMax, argDict["timeRange"] + + words = "" + if windMax >= 64: + words = "...hurricane force wind warning in effect..." + elif windMax >= 48: + words = "...storm warning in effect..." + elif windMax >= 34: + words = "...gale warning in effect..." +# elif windMax >= 21: +# words = "\n\n...small craft advisory in effect.." +# elif waveheightMax >= 5: +# words = "\n\n...small craft advisory in effect.." + else: + words = "" + return self.setWords(phrase, words) + + def _MAFORAnalysisList(self): + return [ + ("Wind", self.vectorMinMax), + ("WindGust", self.minMax), + ("WaveHeight", self.minMax), + ("Wx", self.dominantWx), + ("T", self.minMax), + ] + + def MaforHeadline(self): + return { + "type":"component", + "methodList": [ + self.assembleChildWords, + self.wordWrap, + ], + "analysisList": [ + ("Wind", self.vectorMinMax), + ("WaveHeight", self.minMax), + ], + "phraseList":[ + self.maforheadline_phrase + ], + } + + def _makeMAFOR(self, editArea, areaList, trDict, trList, analysis, maforheadline, argDict, areaLabel): + + MAFOR = [] + + for i in range (0,8): + statDict = self.getStatDict(self._sampler, analysis, + trDict["MAFOR"+ repr(i)], editArea) + windstats = self.getStats(statDict, "Wind") + + statDict2 = self.getStatDict(self._sampler, analysis, + trDict["MAFOR"+ repr(i)], editArea) + wxstats = self.getStats(statDict2, "Wx") + + statDict3 = self.getStatDict(self._sampler, analysis, + trDict["MAFOR"+ repr(i)], editArea) + Tstats = self.getStats(statDict3, "T") + + statDict4 = self.getStatDict(self._sampler, analysis, + trDict["MAFOR"+ repr(i)], editArea) + Guststats = self.getStats(statDict4, "WindGust") + + if Guststats is None: + Gustmax = 0 + else: + Gustmax = self.getValue(Guststats, "Max") + + if Tstats is None: + Tmin = 32 + else: + Tmin, Tmax = Tstats + + # wind mafor code + + if windstats == "None" or windstats is None: + windcode = "MMMM" + maxspd = 0 + else: + windspd, winddir = windstats + minspd, maxspd = windspd + windcode = self._findwind(winddir, maxspd) + + # weather mafor code # + + if wxstats is None or wxstats == []: + wxcode = "M" + else: + subkeyList = wxstats + wxcode = self._wxcode(maxspd, Gustmax, Tmin, subkeyList) + + code = windcode + wxcode + MAFOR.append(code) + + code1 = code2 = code3 = code4 = code5 = code6 = code7 = code8 = '' + + first = 0 + next = 1 + time = 1 + mafor = "" + ok = 1 + # sort the code # + while first < 8: + while ok == 1: + if next > 7: + final = first + first = next + ok = 0 + break + if MAFOR[first] == MAFOR[next]: + first = first + next = next + 1 + time = time + 1 + ok = 1 + else: + final = first + first = next + next = next + 1 + ok = 0 + break + + mafor = self._makeCode(mafor, time, MAFOR[final]) + ok = 1 + time = 1 + + mafor = mafor + "." + mafor = string.replace(mafor, " .", ". ") + + # wx wording # + + wxmafor1 = self.generateProduct("WxMAFOR", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._wxtimerange1) + wxmafor1 = string.replace(wxmafor1, "\n", "") + wxmafor1 = string.replace(wxmafor1, ". .", " ") + wxmafor1 = string.replace(wxmafor1, "..", " ") + if wxmafor1 != "": + wxmafor1 = wxmafor1 + ". " + + wxmafor2 = self.generateProduct("WxMAFOR", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._wxtimerange2) + wxmafor2 = string.replace(wxmafor2, "\n", "") + wxmafor2 = string.replace(wxmafor2, ". .", " ") + wxmafor2 = string.replace(wxmafor2, "..", " ") + if wxmafor2 != "": + wxmafor2 = wxmafor2 + ". " + + wxmafor = wxmafor1 + wxmafor2 + + mafor = mafor + wxmafor + + # wave wording # + + wavemafor1 = self.generateProduct("WaveMAFOR", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._wxtimerange1) + wavemafor1 = string.replace(wavemafor1, "\n", "") + wavemafor1 = string.replace(wavemafor1, "..", " ") + wavemafor1 = string.replace(wavemafor1, ". .", " ") + wavemafor1 = wavemafor1 + ". " + + wavemafor2 = self.generateProduct("WaveMAFOR", argDict, + area = editArea, areaLabel=areaLabel, + timeRange = self._wxtimerange2) + wavemafor2 = string.replace(wavemafor2, "\n", "") + wavemafor2 = string.replace(wavemafor2, ". .", " ") + wavemafor2 = string.replace(wavemafor2, "..", " ") + wavemafor2 = wavemafor2 + ". " + + wavemafor = wavemafor1 + wavemafor2 + + mafor = mafor + wavemafor + + # waves mafor code # + waveMAFOR = [] + for j in range (0,2): + statDict2 = self.getStatDict(self._sampler, analysis, + trDict["MAFOR"+ repr(j)], editArea) + wavestats = self.getStats(statDict2, "WaveHeight") + if wavestats is "None" or wavestats is None: + #mafor = mafor + " " + "MMMMM" + wavecode = "MMMM" + waveMAFOR.append(wavecode) + else: + min, max = wavestats + wavecode, minimum, maximum = self._findwave(max) + waveMAFOR.append(wavecode) + + if waveMAFOR[0] == waveMAFOR[1]: + mafor = mafor + "22" + waveMAFOR[0] + ". " + else: + mafor = mafor + "21" + waveMAFOR[0] + " " +\ + "21" + waveMAFOR[1] + ". " + + if maforheadline == "": + connector = " " + else: + connector = "" + + mafor = self._lake_name + " " + areaList + connector + maforheadline + mafor + mafor = string.replace(mafor, "_", " ") + mafor = string.replace(mafor, "HALF", "1/2") + mafor = string.replace(mafor, "THREE QUARTERS", "3/4") + mafor = string.replace(mafor, "QUARTER", "1/4") + mafor = string.replace(mafor, "TWO THIRDS", "2/3") + mafor = string.replace(mafor, "THIRD", "1/3") + mafor = string.replace(mafor, "LAKE SUPERIOR", "") + + mafor = self.linebreak(mafor, 69) + + return mafor + def _findwind(self, winddir, windspd): + + winddir = self.dirToText(winddir) + minspd = 7 #min wind speed that direction is included in mafor + + if windspd<=minspd: + dir = repr(9) + + if winddir == "N": + dir = repr(8) + elif winddir == "NE": + dir = repr(1) + elif winddir == "E": + dir = repr(2) + elif winddir == "SE": + dir = repr(3) + elif winddir == "S": + dir = repr(4) + elif winddir == "SW": + dir = repr(5) + elif winddir == "W": + dir = repr(6) + elif winddir == "NW": + dir = repr(7) + else: + dir = repr(9) + + if windspd > 60: + spd = repr(9) + if windspd <= 60: + spd = repr(8) + if windspd <= 55: + spd = repr(7) + if windspd <= 48: + spd = repr(6) + if windspd <= 35: + spd = repr(5) + if windspd <= 33: + spd = repr(4) + if windspd <= 25: + spd = repr(3) + if windspd <= 20: + spd = repr(2) + if windspd <= 15: + spd = repr(1) + if windspd <= 10: + spd = repr(0) + + return dir + spd + +############ +# weather mafor code +############ + + def _wxcode(self, windspd, Gust, T, subkeyList): + + index = 0 + + length = len(subkeyList) + for wxKey in subkeyList: + wxType = wxKey.wxType() + cov = wxKey.coverage() + vis = wxKey.visibility() + + number = "0" + + if wxType == "ZY" and T >= 23 and T < 32: + number = "1" + + if wxType == "ZY" and T < 23: + number = "2" + + if wxType == "WG": + if cov == "Def" or cov == "Wide" or cov == "Areas": + number = "3" + + if wxType == "WG": + if vis == "1/4SM" or vis == "0SM": + if cov == "Def" or cov == "Wide" or cov == "Areas": + number = "4" + + if wxType == "L": + if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": + number = "5" + + if wxType == "R" or wxType == "RW": + if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": + number = "6" + + if wxType == "S" or wxType == "SW": + if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": + number = "7" + + if (Gust - windspd) > 16: + if wxType == "SW" or wxType == "RW" or wxType == "T": + number = "8" + + if wxType == "T": + if cov == "Like" or cov == "Ocnl" or cov == "Def" or cov == "Wide" or cov == "Num" or cov == "Areas": + number = "9" + + index = index + 1 + if index <= length: + break + + return number + + + def _makeCode(self, mafor, time, wind): + + if time <=4: + code = "1" + repr(time) + wind + if time == 5: + code = "14" + wind + " 11" + wind + if time == 6: + code = "15" + wind + if time == 7: + code = "15" + wind + " 11" + wind + if time == 8: + code = "16" + wind + +# mafor = mafor + " " + code + mafor = mafor + code + " " + + return mafor + +############ +# wave mafor code +############ + + def _findwave(self, avg): + + if avg == 0: + range = "0002" + min=0 + max=2 + elif avg <= 2: + range = "0002" + min=0 + max=2 + elif avg > 2 and avg <= 3: + range = "0103" + min=1 + max=3 + elif avg > 3 and avg <= 4: + range = "0204" + min=2 + max=4 + elif avg > 4 and avg <= 5: + range = "0305" + min=3 + max=5 + elif avg > 5 and avg <= 6: + range = "0306" + min=3 + max=6 + elif avg > 6 and avg <= 7: + range = "0407" + min=4 + max=7 + elif avg > 7 and avg <= 8: + range = "0508" + min=5 + max=8 + elif avg > 8 and avg <= 10: + range = "0610" + min=6 + max=10 + elif avg > 10 and avg <= 12: + range = "0812" + min=8 + max=12 + elif avg > 12 and avg <= 14: + range = "1014" + min=10 + max=14 + elif avg > 14 and avg <= 16: + range = "1216" + min=12 + max=16 + elif avg > 16 and avg <= 18: + range = "1418" + min=14 + max=18 + elif avg > 18 and avg <= 20: + range = "1520" + min=15 + max=20 + elif avg > 20 and avg <= 23: + range = "1823" + min=18 + max=23 + elif avg > 23 and avg <= 25: + range = "2025" + min=20 + max=25 + elif avg > 25: + range = "2530" + min=25 + max=30 + else: + range = "MMMM" + min=0 + max=0 + + return range, min, max + + + def _getMaforTime(self, argDict): + # get mafor time + currentTime = argDict['creationTime'] + curday = time.strftime("%d", time.localtime(currentTime)) + nextday = time.strftime("%d",time.localtime(currentTime + 86400)) + timezone = time.strftime("%Z", time.localtime(currentTime)) + + if self._productIssuance == "400 AM": + if timezone == "EST" or timezone == "CST": + return curday + "10" + else: + return curday + "09" + elif self._productIssuance == "1000 AM": + if timezone == "EST" or timezone == "CST": + return curday + "16" + else: + return curday + "15" + elif self._productIssuance == "400 PM": + if timezone == "EST" or timezone == "CST": + return curday + "22" + else: + return curday + "21" + elif self._productIssuance == "1000 PM": + if timezone == "EST" or timezone == "CST": + return curday + "14" + else: + return nextday + "03" + + def timePeriod_descriptorTable(self, tree, node): + # Contains definition for localtime start/end times and phrase + # Tuples, 0=startHrLT, 1=endHrLT, 2=phrase + day = self.DAY() + return [ + (day, (day+3)%24, "early in the morning"), # 6-9 + (day, (day+6)%24, "in the morning"), # 6-12 + (day, (day+9)%24, "until late afternoon"), # 6-15 + (day, (day+12)%24, ""), # 6-18 + ((day+3)%24, (day+6)%24, "late in the morning"), # 9-12 + ((day+3)%24, (day+9)%24, "around midday"), # 9-15 + ((day+3)%24, (day+12)%24, "by noon"), # 9-18 + ((day+6)%24, (day+9)%24, "early in the afternoon"), # 12-15 + ((day+6)%24, (day+12)%24, "in the afternoon"), # 12-18 + ((day+9)%24, (day+12)%24, "late in the afternoon"), # 15-18 + ((day+12)%24, (day+15)%24, "early in the evening"), # 18-21 + ((day+12)%24, (day+18)%24, "in the evening"), # 18-0 + ((day+12)%24, (day+21)%24, "through early morning"), # 18-3 + ((day+12)%24, day, ""), # 18-6 + ((day+15)%24, (day+18)%24, "late in the evening"), # 21-0 + ((day+15)%24, (day+21)%24, "around midnight"), # 21-3 + ((day+15)%24, day, "overnight"), # 21-6 + ((day+18)%24, (day+21)%24, "after midnight"), # 0-3 + ((day+18)%24, day, "after midnight"), # 0-6 + ((day+18)%24, (day+6)%24, ""), # 0-12 + ((day+21)%24, day, "early in the morning"), # 3-6 + ] + + def phrase_descriptor_dict(self, tree, node): + # Dictionary of descriptors for various weather elements in phrases + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node, key, element + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "winds" + dict["WindGust"] = "gusts up to" + dict["WaveHeight"] = "waves" + dict["hurricane force winds to"]= "hurricane force winds to" + dict["storm force winds to"] = "storm force winds to" + dict["gales to"] = "gales to" + dict["up to"] = "" + dict["around"] = "" + # Used for Headlines + dict["EXPECTED"] = "EXPECTED" + dict["IN EFFECT"] = "IN EFFECT" + return dict + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than or equal to this value, + # the different values will be noted in the phrase. + dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) + dict["WaveHeight"] = { + (0, 6) : 1, + (6, 20) : 5, + 'default': 10, + } + return dict + + def minimum_range_nlValue_dict(self, tree, node): + # This threshold is the "smallest" min/max difference allowed between values reported. + # For example, if threshold is set to 5 for "MaxT", and the min value is 45 + # and the max value is 46, the range will be adjusted to at least a 5 degree + # range e.g. 43-48. These are the values that are then submitted for phrasing + # such as: + # HIGHS IN THE MID 40S + dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) + dict["Wind"] = { + (0, 5) : 0, # will be reported as "null" + (5, 8) : 5, + "default" : 10, + } + return dict + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH + ('SR.A', marineActions, 'Marine'), # STORM WATCH + ('GL.A', marineActions, 'Marine'), # GALE WATCH + ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS WATCH + ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine'), # GALE WARNING + ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS WARNING + ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ('MH.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING + ('MH.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ('MF.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('MS.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('LO.Y', allActions, 'LowWater'), # LOW WATER ADVISORY + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py index 8a82d93e4b..660edff477 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericHazards.py @@ -1,1168 +1,1168 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -# ---------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 05/07/2015 4027 randerso Migrated A1 OB9.16 code to A2 -# 06/17/2015 4027 dgilling Perform case-insensitive -# comparisons in foundCTAs. -# 07/13/2015 4648 randerso Fix bullets in follow up products -# 02/24/2016 5411 randerso Make bullet headers upper case -# 07/15/2016 5749 randerso Replaced ellipses with commas in hazardBodyText -# - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product is a template for creating Hazard Products. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# GenericHazards -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# -# You must set the following: -# -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" -# -# Optional Configuration Items -# -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product. -# Product is saved if autoWrite is 1. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. The product is not -# automatically stored unless autoStore is 1. This -# value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. The product is not -# automatically transmitted unless autoSend is 1. -# This value is also used for the default GUI -# entry for storage. -# autoSend If set to 1, then the product will be automatically -# sent on the AWIPS WAN to the "autoSendAddress" with -# the "awipsWANPil after product creation. -# autoStore If set to 1, then the product will be automatically -# stored into the text database using the "textdbPil" -# after product creation. -# autoWrite If set to 1, then the product will be automatically -# written to the "output" named disk file after -# product creation. -# -# lineLength max length of each line -# -# defaultEditAreas defines edit areas, default is Combinations -# -# purgeTime Maximum number of hours past issuance time for the -# expire time. -# includeCities If 1, cities will be included in the area header -# accurateCities If 1, cities are determined from grids -# citiesPhrase "Including the cities of" phrase used when including -# cities -# includeZoneNames If 1, zone names will be included in the area header -# easPhrase Optional EAS phrase to be include in product header -# -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# includeOverviewHeadline If 1, the overview header is templated -# includeOverview If 1, the overview section is templated -# bulletProd If 1, the product will use a bullet format -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Hazards -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations file -#------------------------------------------------------------------------- -# Component Products: -# Hazards -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -#------------------------------------------------------------------------- -# Example Output: -#------------------------------------------------------------------------- - -import LogStream -import TextRules -import SampleAnalysis -import time, string, types, copy, re -import CallToActions -import AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis, - CallToActions.CallToActions): - Definition = { - "type": "smart", - "displayName": None, - - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/genHaz.txt", - "debug": 0, - # Name of map background for creating Combinations - # Can be: - # Zones_BOU - # FireWxZones_BOU - # Counties - # Marine_Zones_BOU - "mapNameForCombinations": "Zones_", - - ## Edit Areas: Create Combinations file with edit area combinations. - ## Can be: - ## EditAreas_PublicZones_BOU - ## EditAreas_FireWx_BOU - ## EditAreas_FIPS_BOU - ## EditAreas_MarineZones_BOU - "defaultEditAreas" : "EditAreas_PublicZones__", - - # product identifiers - "productName": "Generic Hazard Product", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city,state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - "periodCombining" : 0, # If 1, combine periods, if possible - - # automatic functions - "autoSend": 0, #set to 1 to automatically transmit product - "autoSendAddress": "000", #transmission address - "autoStore": 0, #set to 1 to automatically store product in textDB - "autoWrite": 0, #set to 1 to automatically write product to file - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - # Language - "language": "english", - - "lineLength": 66, #Maximum line length - - "purgeTime": 8, # Maximum hours for expireTime - "includeCities": 1 , # Cities included in area header - "accurateCities": 0, # Include all cities in area header - "cityLocation": "CityLocation", # City lat/lon dictionary to use - "cityDescriptor":"Including the cities of", - "includeZoneNames":1, # Zone names will be included in the area header - "easPhrase" :"", # Optional EAS phrase to be include in product header - - "includeOverviewHeadline": 1, #include overview header - "includeOverview": 1, #include overview section - "bulletProd": 0, # do not default to bullets - "hazardSamplingThreshold": (10, None), #(%cov, #points) - "callToAction": 1, - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - self.__overviewText = "" - self.__procCTA = None - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the segments - hazardsC = argDict['hazards'] - segmentList = self.organizeHazards(hazardsC.rawAnalyzedTable()) - if len(segmentList) == 0: - return "No hazards to report" - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each segment in the segmentList - fraction = 0 - fractionOne = 1.0/float(len(segmentList)) - percent = 50.0 - self.setProgressPercentage(50) - for segmentAreas in segmentList: - self.progressMessage(fraction, percent, "Making Product for Segment") - fcst = self._preProcessArea(fcst, segmentAreas, self._expireTime, argDict) - fcst = self._makeProduct(fcst, segmentAreas, argDict) - fcst = self._postProcessArea(fcst, segmentAreas, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - - # Set up information for Hazards product - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the time range for 0-240 hours - self._timeRange = self.createTimeRange(0, 240) - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._issueTime = AbsTime.AbsTime(argDict['creationTime']) - self._currentTime = argDict['creationTime'] - self._expireTime = self._issueTime + self._purgeTime*3600 - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - def _preProcessProduct(self, fcst, argDict): - # Product header - if self._areaName != "": - self._areaName = " for " + self._areaName - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, - self._productName + self._areaName) - - if len(self._easPhrase) != 0: - eas = self._easPhrase + '\n' - else: - eas = '' - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = eas + productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - fcst = fcst + "Default overview section\n" - return fcst - - def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, "", self._issueTime, expireTime, - self._areaDictionary, None, cityDescriptor=self._cityDescriptor, - areaList=segmentAreas, includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames, - accurateCities = self._accurateCities) - fcst = fcst + areaHeader - return fcst - - def _makeProduct(self, fcst, segmentAreas, argDict): - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - # get the hazards text - - # We only need to get headlines for the first edit area - # in the segment since all areas in the segment have - # the same headlines - editArea = segmentAreas[0] - areaLabel = editArea - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - return fcst - - def _postProcessArea(self, fcst, segmentAreas, argDict): - return fcst + "\n\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - # - # If an overview exists for this product, insert it - # - overview = self.finalOverviewText() - overviewSearch = re.compile(r'Default overview section', re.DOTALL) - fcst = overviewSearch.sub(overview, fcst) - # - # Added to place line feeds in the CAP tags to keep separate from CTAs - - fcst = string.replace(fcst, \ - r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ - r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") - fcst = string.replace(fcst, "\n ","\n") - fcst = string.replace(fcst, "&&", "\n&&\n") - - # Prevent empty Call to Action Tags - fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ - "", fcst) - - fcst = self._indentBulletText(fcst) - - # - # Clean up multiple line feeds - # - - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - - # finish progress meter - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - def allowedHazards(self): - return [] - - # Added for DR 21194 - def _bulletDict(self): - return [] - - # Added for DR 21309 - def _bulletOrder(self): - return [] - -## Replaced by 21309 code -## def _getBullets(self, newBulletList, argDict): -## -## ### get the bullet dictionary and split the bullets -## bDict = self._bulletDict() -## bLine = bDict.get(eachHazard['phen']) -## print 20* "*" + (eachHazard['phen']) -## bList = newBulletList.split(",") -## -## ### initialize the bullet output -## bullets = "" -## -## ### loop through the bullets and format the output -## for b in bList: -## bullets = bullets + "* " + b + "...|* Enter bullet text *|\n\n" -## # bullets = bullets + "\n" -## return bullets - - def _indentBulletText(self, prevText): - - print prevText - ### if previous text is empty, return nothing - if prevText is None: - return prevText - - ### - ### split the text - ### - bullets = [] - bullets = string.split(prevText, '\n\n') - if len(bullets) <= 1: - return prevText - - ### - ### process the text - ### - outText = "" - for b in bullets: - ### if first character is a * we found a bullet - if re.match("\*", b): - ### remove line feeds - removeLF = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) - bullet = removeLF.sub(r'\1 \2',b) - ### indent code - bullet = self.indentText(bullet, indentFirstString = '', - indentNextString = ' ', maxWidth=self._lineLength, - breakStrings=[" ", "..."]) - ### - ### the "-" in the breakStrings line above is causing issues with - ### offices that use "-20 degrees" in the text. - ### - outText = outText + bullet + "\n\n" - else: ### not a bullet, CTA text - outText = outText + b + "\n\n" - - ### that's it - print outText - return outText - - # The _hazardTimePhrases method is passed a hazard key, and returns - # time phrase wording consistent with that generated by the headline - # algorithms in DiscretePhrases. - # - def hazardTimePhrases(self, hazard, argDict, prefixSpace=True): - timeWords = self.getTimingPhrase(hazard, argDict['creationTime']) - if prefixSpace and len(timeWords): - timeWords = " " + timeWords #add a leading space - return timeWords - - # - # The method hazardBodyText creates an attribution phrase - # - - def hazardBodyText(self, hazardList, argDict): - - bulletProd = self._bulletProd - hazardBodyPhrase = '' - - # - # First, sort the hazards for this segment by importance - # - - sortedHazardList = [] - for each in ['W', 'Y', 'A', 'O', 'S']: - for eachHazard in hazardList: - if eachHazard['sig'] == each: - if eachHazard not in sortedHazardList: - sortedHazardList.append(eachHazard) - - # - # Next, break them into individual lists based on action - # - - newList = [] - canList = [] - expList = [] - extList = [] - conList = [] - upgList = [] - statementList = [] - - for eachHazard in sortedHazardList: - if eachHazard['sig'] in ['S']and eachHazard['phen'] in ['CF', 'LS']: - statementList.append(eachHazard) - elif eachHazard['act'] in ['NEW', 'EXA', 'EXB']: - newList.append(eachHazard) - elif eachHazard['act'] in ['CAN']: - canList.append(eachHazard) - elif eachHazard['act'] in ['EXP']: - expList.append(eachHazard) - elif eachHazard['act'] in ['EXT']: - extList.append(eachHazard) - elif eachHazard['act'] in ['UPG']: - upgList.append(eachHazard) - else: - conList.append(eachHazard) - - # - # Now, go through each list and build the phrases - # - - nwsIntroUsed = 0 - - # - # This is for the new hazards - # - - phraseCount = 0 - lastHdln = None - for eachHazard in newList: - hdln = eachHazard['hdln'] - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) - hazNameACap = self.sentence(hazNameA, addPeriod=False) - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - if hazName in ["Winter Weather Advisory", "Winter Storm Warning", "Beach Hazards Statement"]: - forPhrase = " for |* Enter hazard type *|" - else: - forPhrase ="" - - if nwsIntroUsed == 0: - hazardBodyPhrase = "The National Weather Service in " + self._wfoCity - nwsIntroUsed = 1 - if phraseCount == 0: - phraseCount = 1 - if eachHazard['phen'] in ['HU', 'TR', 'TY']: - hazardBodyPhrase = hazardBodyPhrase + " has issued " + \ - hazNameA + ". " - else: - hazardBodyPhrase += " has issued " + hazNameA + forPhrase + \ - ", which is in effect" + endTimePhrase + ". " - elif phraseCount == 1: - phraseCount = 2 - if hdln != lastHdln: - if eachHazard['phen'] in ['HU', 'TR', 'TY']: - hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ - " has also been issued." - else: - hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ - " has also been issued. This " + hazName + forPhrase + \ - " is in effect" + endTimePhrase + ". " - else: - if eachHazard['phen'] in ['HU', 'TR', 'TY']: - hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ - " has also been issued." - else: - hazardBodyPhrase = hazardBodyPhrase + hazNameACap + forPhrase + \ - " has also been issued" + endTimePhrase + ". " - else: - if eachHazard['phen'] in ['HU', 'TR', 'TY']: - hazardBodyPhrase += "In addition, " + \ - hazNameA + " has been issued." - else: - hazardBodyPhrase += "In addition, " + \ - hazNameA + forPhrase + " has been issued. This " + hazName + \ - " is in effect" + endTimePhrase + ". " - lastHdln = hdln - # - # This is for the can hazards - # - - for eachHazard in canList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - if nwsIntroUsed == 0: - hazardBodyPhrase = "The National Weather Service in " +\ - self._wfoCity - nwsIntroUsed = 1 - hazardBodyPhrase = hazardBodyPhrase + \ - " has cancelled the " + hazName + ". " - else: - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " has been cancelled. " - - # - # This is for the exp hazards - # - - phraseCount = 0 - for eachHazard in expList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - if eachHazard['endTime'] <= argDict['creationTime']: - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is no longer in effect. " - else: - expTimeCurrent = argDict['creationTime'] - timeWords = self.getTimingPhrase(eachHazard, expTimeCurrent) - - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " will expire " + timeWords + ". " - - # - # This is for ext hazards - # - - for eachHazard in extList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is now in effect" + endTimePhrase + ". " - - # - # This is for upgrade hazards - # - - for eachHazard in upgList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is no longer in effect. " - - # - # This is for con hazards - # - - for eachHazard in conList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) - hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ - " remains in effect" + endTimePhrase + ". " - - # - # This is for statement hazards - # - - for eachHazard in statementList: - hazardBodyPhrase = "...|* Add statement headline *|...\n\n" - - # - # This adds segment text - # - - segmentText = '' - - # - # Check that this segment codes to determine capture or not, - # and frame captured text or not - # - incTextFlag, incFramingCodes, skipCTAs, forceCTAList = \ - self.useCaptureText(sortedHazardList) - - # - # - # Check that the previous text exists - # - - - foundCTAs = [] - for eachHazard in sortedHazardList: - if eachHazard.has_key('prevText'): - prevText = eachHazard['prevText'] - if eachHazard['pil'] == 'MWS': - startPara = 0 - else: - startPara = 1 - segmentText, foundCTAs = self.cleanCapturedText(prevText, - startPara, addFramingCodes = False, - skipCTAs = skipCTAs) - tester = segmentText[0] - if tester == '*': - startPara = 1 - else: - startPara = 2 - - segmentText, foundCTAs = self.cleanCapturedText(prevText, - startPara, addFramingCodes = False, - skipCTAs = skipCTAs) - - # - # Check that the segment text isn't very short or blank - # - - if len(segmentText) < 6: - incTextFlag = 0 - - # DR 21309 code addition from Middendorf (BYZ) - # - # Now if there is a new hazard and previous segment Text, then - # we may have to add bullets. - # - if incTextFlag and bulletProd: - for eachHazard in sortedHazardList: - if not eachHazard.has_key('prevText'): - newBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") - print "newBullets = ", newBullets - print "segment text is: ", segmentText - for bullet in newBullets: - if re.search("\* " + bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) is None: - print bullet + " not in segmentText" - start = self._bulletOrder().index(bullet) + 1 - end = len(self._bulletOrder()) - bulletFlag = 1 - for i in range(start,end): - if (re.search("\* " + self._bulletOrder()[i] + "\.\.\.", segmentText, flags=re.IGNORECASE) is not None) and bulletFlag: - print "* " + self._bulletOrder()[i] + "... found!" - segmentTextSplit = re.split("\* " + self._bulletOrder()[i] + "\.\.\.", segmentText, flags=re.IGNORECASE) - segmentText = string.join(segmentTextSplit,"* " + bullet.upper() + \ - "...|* Enter bullet text *|\n\n* " + self._bulletOrder()[i] + "...") - bulletFlag = 0 - if bulletFlag: - print "appending to bottom list of bullets!" - segmentTextSplit = re.split("PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", segmentText, flags=re.IGNORECASE) - segmentText = "\n" + string.join(segmentTextSplit,"* " + bullet.upper() + \ - "...|* Enter bullet text *|\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...") - bulletFlag = 0 - # - # Now if there is a can/exp hazard and previous segment Text, then - # we may have to remove bullets. - # - - if incTextFlag and bulletProd: - # First make list of bullets that we need to keep. - keepBulletList = [] - for eachHazard in sortedHazardList: - if eachHazard['act'] not in ["CAN","EXP"]: - saveBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") - for saveBullet in saveBullets: - if saveBullet not in keepBulletList: - keepBulletList.append(saveBullet) - # Now determine which bullets we have to remove. - removeBulletList = [] - for eachHazard in sortedHazardList: - if eachHazard['act'] in ["CAN","EXP"]: - canBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") - for canBullet in canBullets: - if canBullet not in keepBulletList and canBullet not in removeBulletList: - removeBulletList.append(canBullet) - print "hazardBodyText info: keepBulletList: ",keepBulletList - print "hazardBodyText info: removeBulletList: ",removeBulletList - # Finally remove the bullets no longer needed. - for bullet in removeBulletList: - if re.search("\* "+ bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) is not None: - segmentTextSplit = re.split("\* " + bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) - print "segmentTextSplit is ", segmentTextSplit - segmentTextSplit2 = string.split(segmentTextSplit[1],"*",1) - if len(segmentTextSplit2) == 2: - segmentTextSplit[1] = "*" + segmentTextSplit2[1] - else: - segmentTextSplit2 = re.split("PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", segmentTextSplit[1], 1, flags=re.IGNORECASE) - if len(segmentTextSplit2) == 2: - segmentTextSplit[1] = "PRECAUTIONARY/PREPAREDNESS ACTIONS..." + segmentTextSplit2[1] - segmentText = string.join(segmentTextSplit,"") - if removeBulletList != []: - segmentText = "|*\n" + segmentText + "*|" - else: - segmentText = segmentText - # - # If segment passes the above checks, add the text - # - - print "hazardBodyText info: incTextFlag: ",incTextFlag - if incTextFlag: - print "hazardBodyText info: segmentText: ",segmentText - hazardBodyPhrase = hazardBodyPhrase + "\n\n" + \ - segmentText + '\n\n' - - elif bulletProd: - bulletFlag = 0 - if eachHazard['act'] == 'CAN': - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Wrap-up text goes here *|.\n" - elif eachHazard['act'] == 'EXP': - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Wrap-up text goes here *|.\n" - else: - bulletFlag = 1 -## print "bulletFlag is: ",bulletFlag - if bulletFlag: - newBulletList = [] - bullets = "" - for eachHazard in sortedHazardList: - ### get the default bullets for all hazards from the bullet diction - newBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") - for newBullet in newBullets: - if newBullet not in newBulletList: - newBulletList.append(newBullet) - print "my bullets are: ", newBulletList - ### Determine the correct order for all bullets - bulletOrder = self._bulletOrder() - staticBulletOrder = self._bulletOrder() - for bullet in staticBulletOrder: - print "correct bullet order should be: ", bulletOrder - if bullet not in newBulletList: - bulletOrder.remove(bullet) - print "reordered bullets are: ", bulletOrder - for b in bulletOrder: - bullets = bullets + "* " + b.upper() + "...|* Enter bullet text *|\n\n" - - hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets - - # If segment doesn't pass the checks, put in framing codes - else: - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Statement text goes here *|.\n\n" - - # End code for DR 21310 - - # - # This adds the call to action statements. This is only performed - # if the segment is 'NEW' or if the previous text has been discarded - # due to a CAN/EXP/UPG segment - # - - # remove items from forceCTAList if they exist in foundCTAs. Note - # that the formats of these lists are different, thus this code - # is more complicated - for ent in foundCTAs: - #only process CTAs that are vtec phen/sig based - if ent.find('.') == 2: - phensig = (ent[0:2], ent[3]) #phen.sig - if phensig in forceCTAList: - del forceCTAList[forceCTAList.index(phensig)] - - hazardBodyPhrase = hazardBodyPhrase + '\n\n' - ctas = [] - for (phen,sig) in forceCTAList: - hazardPhenSig = phen + "." + sig - cta = self.defaultCTA(hazardPhenSig) - if cta not in ctas: - ctas.append(cta) - - if len(ctas) > 0: - hazardBodyPhrase = hazardBodyPhrase + \ - 'PRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n' - for c in ctas: - hazardBodyPhrase = hazardBodyPhrase + c + '\n\n' - hazardBodyPhrase = hazardBodyPhrase + '&&\n\n' - - # Make sure there is only one CAP tag pairs - hazardBodyPhrase = re.sub(r'&&\s*PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n', \ - "", hazardBodyPhrase) - - return hazardBodyPhrase - - def finalOverviewText(self): - #if didn't calculate any, use the default - if len(self.__overviewText) == 0: - - if self._includeOverviewHeadline: - overviewHeadline = "...|*Overview headline (must edit)*|...\n\n" - else: - overviewHeadline = "" - - if self._includeOverview: - overviewBody = ".|*Overview (must edit)*|.\n\n" - else: - overviewBody = "" - - #assemble the lines - overview = overviewHeadline + overviewBody - return overview - - else: - return self.__overviewText - - def overviewText(self, hazardList, pil): - - # - # This method finds an overview in the previous product - # - - overview = "" - for each in hazardList: - if (each.has_key('prevOverviewText') and - each.has_key('pil') and - each.has_key('endTime') and - each.has_key('act')): - if (each['pil'] == pil and - each['endTime'] > self._currentTime and - each['act'] not in ['CAN', 'EXP']): - overview = each['prevOverviewText'] - self.__overviewText, dummy = self.cleanCapturedText( - overview, 0) - break - - def useCaptureText(self, hazardList): - #Based on the hazardlist, returns a tuple indicating: - # (inc capture text, inc framing codes, skip CTAs, forceCTAList) - # - # For the values to be considered, the 'hdln' value must be - # present in the list, or it needs to be a Statement (sig="S") - cans = ['CAN','UPG','EXP'] - acts = ['NEW','EXT','EXA','EXB','CON'] - foundACTS = 0 - foundCANS = 0 - foundSig = [] - for eh in hazardList: - if eh['act'] in acts and (len(eh['hdln']) or eh['sig'] == 'S'): - foundACTS = 1 - if eh['act'] in cans and (len(eh['hdln']) or eh['sig'] == 'S'): - foundCANS = 1 - if eh['sig'] not in foundSig: - foundSig.append(eh['sig']) - - includeFrameCodes = 0 - includeText = 1 - skipCTAs = 0 - forceCTAList = [] - - # all actions are in CAN, UPG, EXP only (don't include text) - if foundCANS and not foundACTS: - if 'S' in foundSig and len(foundSig) == 1: #only S - includeFrameCodes = 1 #capture text, but frame it - else: - includeText = 0 #end of non statement - - # something in CANS and something in acts (frame it, include text) - elif foundCANS and foundACTS: - includeFrameCodes = 1 - skipCTAs = 1 - for eh in hazardList: - if eh['act'] in acts and \ - (eh['phen'], eh['sig']) not in forceCTAList and \ - len(eh['hdln']): - forceCTAList.append((eh['phen'], eh['sig'])) - - #everything in active entries, captured text is used, but still - # need to handle the "NEW" entries. - else: - for eh in hazardList: - if eh['act'] in ['NEW'] and len(eh['hdln']): - forceCTAList.append((eh['phen'], eh['sig'])) - - return (includeText, includeFrameCodes, skipCTAs, forceCTAList) - - - - def cleanCapturedText(self, text, paragraphs, addFramingCodes = False, - skipCTAs = False): - # - # This method takes a block of text, wraps it preserving blank lines, - # then returns the part after 'paragraphs'. So, if paragraphs is 0, it - # returns the whole thing, if it's 2, it returns paragraphs 2 -> end, etc. - # Headlines are always removed. - # Framing codes are added if specified. - # - paras = self.convertSingleParas(text) #single paragraphs - - # keep track of any call to actions found - foundCTAs = [] - - # Process the paragraphs, keep only the interested ones - paraCount = 0 - processedText = '' - for eachPara in paras: - if paraCount >= paragraphs: - found = self.ctasFound(eachPara) #get list of ctas found - if skipCTAs and len(found): - pass - else: - processedText = processedText + eachPara + '\n\n' - #keep track of remaining CTAs in processed text - for f in found: - if f not in foundCTAs: - foundCTAs.append(f) - if eachPara.find('...') == 0: - pass #ignore headlines - paraCount = paraCount + 1 - - # Add framing codes - if addFramingCodes: - processedText = processedText.rstrip() - processedText = "|*\n" + processedText + "*|\n" - - # Wrap - processedText = self.endline(processedText, - linelength=self._lineLength, breakStr=[" ", "-", "..."]) - - - return processedText, foundCTAs - - def decodeBulletedText(self, prevText): - # returns the bullet paragraph text or None, returns the - # regular text after the bullets. The afterText is text up to - # the next bullet or up to "The National Weather Service". Note - # that this only correctly handles the 1st set of entries in - # a segment, thus double events will only decode the first set - # of bullets and text. The multipleRecords is set to 1 in the - # event that there are multiple sets of bullets. In this case - # only the 1st set was captured/decoded. - # (hazard, time, basis, impact, afterText, multipleRecords) - if prevText is None: - return (None, None, None, None, None, None) - - # find the bullets - bullets = [] - buf = prevText.split('\n\n* ') - if len(buf) <= 1: - return (None, None, None, None, None, None) - - multRecords = 0 #indicator of multiple sets of bullets - - for x in xrange(len(buf)): - if x == 0: - continue #headlines and text before the bullets - bullets.append(buf[x]) - - # find only the bulleted text, defined by the double line feed term. - # of the text - regText = "" #regular text after bullets - for x in xrange(1, len(bullets)): - index = bullets[x].find('\n\n') - if index != -1: - regText = bullets[x][index+2:] - bullets[x] = bullets[x][0:index] #eliminate after bullet text - if len(bullets) > x+2: #more bullets are present - multRecords = 1 - bullets = bullets[0:x+1] #only interested in these bullets - break - - # regular text is the remainder of the text. However we only - # want text from the last in the series of bullets to the - # beginning of any next NWS phrase. - lines = regText.split('\n') - for x in xrange(len(lines)): - if lines[x].find('The National Weather Service') == 0: - lines = lines[0:x] #eliminate following lines - break - regText = ("\n").join(lines) - - # now clean up the text - for x in xrange(len(bullets)): - bullets[x] = string.replace(bullets[x],'\n',' ') - removeLF = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) - regText = removeLF.sub(r'\1 \2',regText) - - # extract out each section for returning the values - if len(bullets) >= 1: - hazard = bullets[0] - else: - hazard = None - if len(bullets) >= 2: - time = bullets[1] - else: - time = None - if len(bullets) >= 3: - basis = bullets[2] - else: - basis = None - if len(bullets) >= 4: - impact = bullets[3] - else: - impact = None - if len(regText) == 0: - regText = None #no regular text after bullets - - return (hazard, time, basis, impact, regText, multRecords) - - def substituteBulletedText(self, capText, defaultText, frameit="Never"): - #returns a properly formatted bulleted text based on - #the capText variable. If capText is None or 0 length, then - #the default text is used. frameit can be "Never", in which - #nothing is wrapped in framing codes, "Always" in which the - #text (default or cap) is wrapped in framing codes, or - #DefaultOnly" in which just the default text is wrapped. - if capText is not None and len(capText): - textToUse = capText[0].upper()+capText[1:] - if frameit == "Always": - textToUse = "|* " + textToUse + " *|" - else: - textToUse = defaultText - if frameit == "Always" or frameit == "DefaultOnly": - textToUse = "|* " + textToUse + " *|" - - # add bullet codes - textToUse = "* " + textToUse - - # format it - return self.indentText(textToUse, indentFirstString = '', - indentNextString = ' ', maxWidth=self._lineLength, - breakStrings=[" ", "-", "..."]) - - - def convertSingleParas(self, text): - #returns a list of paragraphs based on the input text. - lf = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) - ptext = lf.sub(r'\1 \2', text) - ptext = ptext.replace('\n\n', '\n') - paragraphs = ptext.split('\n') - return paragraphs - - def ctasFound(self, text): - #returns types of ctas found. The identifier is the pil (e.g., ZFP), - #phen/sig (e.g., DU.Y), or GENERIC. Uses the CallToAction definitions. - - #convert text to single paragraphs - paragraphs = self.convertSingleParas(text) - for x in xrange(len(paragraphs)): - paragraphs[x] = string.replace(paragraphs[x],' ','') - - #make list of call to actions (type, cta text) - if self.__procCTA is None: - self.__procCTA = [] - ctao = CallToActions.CallToActions() - d = ctao.ctaDict() - for k in d.keys(): - func = d[k] - items = func() - for it in items: - if type(it) == types.TupleType: - it = it[1] #get second string which is the CTA - ctaParas = self.convertSingleParas(it) - for cta in ctaParas: - self.__procCTA.append((k,string.replace(cta,' ',''))) - d = ctao.ctaPilDict() - for k in d.keys(): - func = d[k] - items = func() - for it in items: - if type(it) == types.TupleType: - it = it[1] #get second string which is the CTA - ctaParas = self.convertSingleParas(it) - for cta in ctaParas: - self.__procCTA.append((k,string.replace(cta,' ',''))) - - ctas = ctao.genericCTAs() - for it in ctas: - if type(it) == types.TupleType: - it = it[1] #get second string which is the CTA - ctaParas = self.convertSingleParas(it) - for cta in ctaParas: - self.__procCTA.append(("GENERIC", - string.replace(cta,' ',''))) - - #compare - found = [] - for para in paragraphs: - for (ctaType, cta) in self.__procCTA: - ## Added following line to account for framing code issues in CTA - cta = re.sub("\|\*.*\*\|","",cta) - # We want this comparison to be case-insensitive just in case - # the site is not transmitting in mixed case yet. - if para.upper() == cta.upper() and ctaType not in found: - found.append(ctaType) - return found - - - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +# ---------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 05/07/2015 4027 randerso Migrated A1 OB9.16 code to A2 +# 06/17/2015 4027 dgilling Perform case-insensitive +# comparisons in foundCTAs. +# 07/13/2015 4648 randerso Fix bullets in follow up products +# 02/24/2016 5411 randerso Make bullet headers upper case +# 07/15/2016 5749 randerso Replaced ellipses with commas in hazardBodyText +# + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product is a template for creating Hazard Products. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# GenericHazards +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# +# You must set the following: +# +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" +# +# Optional Configuration Items +# +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product. +# Product is saved if autoWrite is 1. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. The product is not +# automatically stored unless autoStore is 1. This +# value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. The product is not +# automatically transmitted unless autoSend is 1. +# This value is also used for the default GUI +# entry for storage. +# autoSend If set to 1, then the product will be automatically +# sent on the AWIPS WAN to the "autoSendAddress" with +# the "awipsWANPil after product creation. +# autoStore If set to 1, then the product will be automatically +# stored into the text database using the "textdbPil" +# after product creation. +# autoWrite If set to 1, then the product will be automatically +# written to the "output" named disk file after +# product creation. +# +# lineLength max length of each line +# +# defaultEditAreas defines edit areas, default is Combinations +# +# purgeTime Maximum number of hours past issuance time for the +# expire time. +# includeCities If 1, cities will be included in the area header +# accurateCities If 1, cities are determined from grids +# citiesPhrase "Including the cities of" phrase used when including +# cities +# includeZoneNames If 1, zone names will be included in the area header +# easPhrase Optional EAS phrase to be include in product header +# +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# includeOverviewHeadline If 1, the overview header is templated +# includeOverview If 1, the overview section is templated +# bulletProd If 1, the product will use a bullet format +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Hazards +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations file +#------------------------------------------------------------------------- +# Component Products: +# Hazards +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +#------------------------------------------------------------------------- +# Example Output: +#------------------------------------------------------------------------- + +import LogStream +import TextRules +import SampleAnalysis +import time, string, types, copy, re +import CallToActions +import AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis, + CallToActions.CallToActions): + Definition = { + "type": "smart", + "displayName": None, + + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/genHaz.txt", + "debug": 0, + # Name of map background for creating Combinations + # Can be: + # Zones_BOU + # FireWxZones_BOU + # Counties + # Marine_Zones_BOU + "mapNameForCombinations": "Zones_", + + ## Edit Areas: Create Combinations file with edit area combinations. + ## Can be: + ## EditAreas_PublicZones_BOU + ## EditAreas_FireWx_BOU + ## EditAreas_FIPS_BOU + ## EditAreas_MarineZones_BOU + "defaultEditAreas" : "EditAreas_PublicZones__", + + # product identifiers + "productName": "Generic Hazard Product", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city,state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + "periodCombining" : 0, # If 1, combine periods, if possible + + # automatic functions + "autoSend": 0, #set to 1 to automatically transmit product + "autoSendAddress": "000", #transmission address + "autoStore": 0, #set to 1 to automatically store product in textDB + "autoWrite": 0, #set to 1 to automatically write product to file + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + # Language + "language": "english", + + "lineLength": 66, #Maximum line length + + "purgeTime": 8, # Maximum hours for expireTime + "includeCities": 1 , # Cities included in area header + "accurateCities": 0, # Include all cities in area header + "cityLocation": "CityLocation", # City lat/lon dictionary to use + "cityDescriptor":"Including the cities of", + "includeZoneNames":1, # Zone names will be included in the area header + "easPhrase" :"", # Optional EAS phrase to be include in product header + + "includeOverviewHeadline": 1, #include overview header + "includeOverview": 1, #include overview section + "bulletProd": 0, # do not default to bullets + "hazardSamplingThreshold": (10, None), #(%cov, #points) + "callToAction": 1, + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + self.__overviewText = "" + self.__procCTA = None + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the segments + hazardsC = argDict['hazards'] + segmentList = self.organizeHazards(hazardsC.rawAnalyzedTable()) + if len(segmentList) == 0: + return "No hazards to report" + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each segment in the segmentList + fraction = 0 + fractionOne = 1.0/float(len(segmentList)) + percent = 50.0 + self.setProgressPercentage(50) + for segmentAreas in segmentList: + self.progressMessage(fraction, percent, "Making Product for Segment") + fcst = self._preProcessArea(fcst, segmentAreas, self._expireTime, argDict) + fcst = self._makeProduct(fcst, segmentAreas, argDict) + fcst = self._postProcessArea(fcst, segmentAreas, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + + # Set up information for Hazards product + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the time range for 0-240 hours + self._timeRange = self.createTimeRange(0, 240) + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._issueTime = AbsTime.AbsTime(argDict['creationTime']) + self._currentTime = argDict['creationTime'] + self._expireTime = self._issueTime + self._purgeTime*3600 + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + def _preProcessProduct(self, fcst, argDict): + # Product header + if self._areaName != "": + self._areaName = " for " + self._areaName + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, + self._productName + self._areaName) + + if len(self._easPhrase) != 0: + eas = self._easPhrase + '\n' + else: + eas = '' + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = eas + productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + fcst = fcst + "Default overview section\n" + return fcst + + def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, "", self._issueTime, expireTime, + self._areaDictionary, None, cityDescriptor=self._cityDescriptor, + areaList=segmentAreas, includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames, + accurateCities = self._accurateCities) + fcst = fcst + areaHeader + return fcst + + def _makeProduct(self, fcst, segmentAreas, argDict): + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + # get the hazards text + + # We only need to get headlines for the first edit area + # in the segment since all areas in the segment have + # the same headlines + editArea = segmentAreas[0] + areaLabel = editArea + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + return fcst + + def _postProcessArea(self, fcst, segmentAreas, argDict): + return fcst + "\n\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + # + # If an overview exists for this product, insert it + # + overview = self.finalOverviewText() + overviewSearch = re.compile(r'Default overview section', re.DOTALL) + fcst = overviewSearch.sub(overview, fcst) + # + # Added to place line feeds in the CAP tags to keep separate from CTAs + + fcst = string.replace(fcst, \ + r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ + r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") + fcst = string.replace(fcst, "\n ","\n") + fcst = string.replace(fcst, "&&", "\n&&\n") + + # Prevent empty Call to Action Tags + fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ + "", fcst) + + fcst = self._indentBulletText(fcst) + + # + # Clean up multiple line feeds + # + + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + + # finish progress meter + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + def allowedHazards(self): + return [] + + # Added for DR 21194 + def _bulletDict(self): + return [] + + # Added for DR 21309 + def _bulletOrder(self): + return [] + +## Replaced by 21309 code +## def _getBullets(self, newBulletList, argDict): +## +## ### get the bullet dictionary and split the bullets +## bDict = self._bulletDict() +## bLine = bDict.get(eachHazard['phen']) +## print 20* "*" + (eachHazard['phen']) +## bList = newBulletList.split(",") +## +## ### initialize the bullet output +## bullets = "" +## +## ### loop through the bullets and format the output +## for b in bList: +## bullets = bullets + "* " + b + "...|* Enter bullet text *|\n\n" +## # bullets = bullets + "\n" +## return bullets + + def _indentBulletText(self, prevText): + + print(prevText) + ### if previous text is empty, return nothing + if prevText is None: + return prevText + + ### + ### split the text + ### + bullets = [] + bullets = string.split(prevText, '\n\n') + if len(bullets) <= 1: + return prevText + + ### + ### process the text + ### + outText = "" + for b in bullets: + ### if first character is a * we found a bullet + if re.match("\*", b): + ### remove line feeds + removeLF = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) + bullet = removeLF.sub(r'\1 \2',b) + ### indent code + bullet = self.indentText(bullet, indentFirstString = '', + indentNextString = ' ', maxWidth=self._lineLength, + breakStrings=[" ", "..."]) + ### + ### the "-" in the breakStrings line above is causing issues with + ### offices that use "-20 degrees" in the text. + ### + outText = outText + bullet + "\n\n" + else: ### not a bullet, CTA text + outText = outText + b + "\n\n" + + ### that's it + print(outText) + return outText + + # The _hazardTimePhrases method is passed a hazard key, and returns + # time phrase wording consistent with that generated by the headline + # algorithms in DiscretePhrases. + # + def hazardTimePhrases(self, hazard, argDict, prefixSpace=True): + timeWords = self.getTimingPhrase(hazard, argDict['creationTime']) + if prefixSpace and len(timeWords): + timeWords = " " + timeWords #add a leading space + return timeWords + + # + # The method hazardBodyText creates an attribution phrase + # + + def hazardBodyText(self, hazardList, argDict): + + bulletProd = self._bulletProd + hazardBodyPhrase = '' + + # + # First, sort the hazards for this segment by importance + # + + sortedHazardList = [] + for each in ['W', 'Y', 'A', 'O', 'S']: + for eachHazard in hazardList: + if eachHazard['sig'] == each: + if eachHazard not in sortedHazardList: + sortedHazardList.append(eachHazard) + + # + # Next, break them into individual lists based on action + # + + newList = [] + canList = [] + expList = [] + extList = [] + conList = [] + upgList = [] + statementList = [] + + for eachHazard in sortedHazardList: + if eachHazard['sig'] in ['S']and eachHazard['phen'] in ['CF', 'LS']: + statementList.append(eachHazard) + elif eachHazard['act'] in ['NEW', 'EXA', 'EXB']: + newList.append(eachHazard) + elif eachHazard['act'] in ['CAN']: + canList.append(eachHazard) + elif eachHazard['act'] in ['EXP']: + expList.append(eachHazard) + elif eachHazard['act'] in ['EXT']: + extList.append(eachHazard) + elif eachHazard['act'] in ['UPG']: + upgList.append(eachHazard) + else: + conList.append(eachHazard) + + # + # Now, go through each list and build the phrases + # + + nwsIntroUsed = 0 + + # + # This is for the new hazards + # + + phraseCount = 0 + lastHdln = None + for eachHazard in newList: + hdln = eachHazard['hdln'] + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) + hazNameACap = self.sentence(hazNameA, addPeriod=False) + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + if hazName in ["Winter Weather Advisory", "Winter Storm Warning", "Beach Hazards Statement"]: + forPhrase = " for |* Enter hazard type *|" + else: + forPhrase ="" + + if nwsIntroUsed == 0: + hazardBodyPhrase = "The National Weather Service in " + self._wfoCity + nwsIntroUsed = 1 + if phraseCount == 0: + phraseCount = 1 + if eachHazard['phen'] in ['HU', 'TR', 'TY']: + hazardBodyPhrase = hazardBodyPhrase + " has issued " + \ + hazNameA + ". " + else: + hazardBodyPhrase += " has issued " + hazNameA + forPhrase + \ + ", which is in effect" + endTimePhrase + ". " + elif phraseCount == 1: + phraseCount = 2 + if hdln != lastHdln: + if eachHazard['phen'] in ['HU', 'TR', 'TY']: + hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ + " has also been issued." + else: + hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ + " has also been issued. This " + hazName + forPhrase + \ + " is in effect" + endTimePhrase + ". " + else: + if eachHazard['phen'] in ['HU', 'TR', 'TY']: + hazardBodyPhrase = hazardBodyPhrase + hazNameACap + \ + " has also been issued." + else: + hazardBodyPhrase = hazardBodyPhrase + hazNameACap + forPhrase + \ + " has also been issued" + endTimePhrase + ". " + else: + if eachHazard['phen'] in ['HU', 'TR', 'TY']: + hazardBodyPhrase += "In addition, " + \ + hazNameA + " has been issued." + else: + hazardBodyPhrase += "In addition, " + \ + hazNameA + forPhrase + " has been issued. This " + hazName + \ + " is in effect" + endTimePhrase + ". " + lastHdln = hdln + # + # This is for the can hazards + # + + for eachHazard in canList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + if nwsIntroUsed == 0: + hazardBodyPhrase = "The National Weather Service in " +\ + self._wfoCity + nwsIntroUsed = 1 + hazardBodyPhrase = hazardBodyPhrase + \ + " has cancelled the " + hazName + ". " + else: + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " has been cancelled. " + + # + # This is for the exp hazards + # + + phraseCount = 0 + for eachHazard in expList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + if eachHazard['endTime'] <= argDict['creationTime']: + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is no longer in effect. " + else: + expTimeCurrent = argDict['creationTime'] + timeWords = self.getTimingPhrase(eachHazard, expTimeCurrent) + + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " will expire " + timeWords + ". " + + # + # This is for ext hazards + # + + for eachHazard in extList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is now in effect" + endTimePhrase + ". " + + # + # This is for upgrade hazards + # + + for eachHazard in upgList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is no longer in effect. " + + # + # This is for con hazards + # + + for eachHazard in conList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) + hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ + " remains in effect" + endTimePhrase + ". " + + # + # This is for statement hazards + # + + for eachHazard in statementList: + hazardBodyPhrase = "...|* Add statement headline *|...\n\n" + + # + # This adds segment text + # + + segmentText = '' + + # + # Check that this segment codes to determine capture or not, + # and frame captured text or not + # + incTextFlag, incFramingCodes, skipCTAs, forceCTAList = \ + self.useCaptureText(sortedHazardList) + + # + # + # Check that the previous text exists + # + + + foundCTAs = [] + for eachHazard in sortedHazardList: + if 'prevText' in eachHazard: + prevText = eachHazard['prevText'] + if eachHazard['pil'] == 'MWS': + startPara = 0 + else: + startPara = 1 + segmentText, foundCTAs = self.cleanCapturedText(prevText, + startPara, addFramingCodes = False, + skipCTAs = skipCTAs) + tester = segmentText[0] + if tester == '*': + startPara = 1 + else: + startPara = 2 + + segmentText, foundCTAs = self.cleanCapturedText(prevText, + startPara, addFramingCodes = False, + skipCTAs = skipCTAs) + + # + # Check that the segment text isn't very short or blank + # + + if len(segmentText) < 6: + incTextFlag = 0 + + # DR 21309 code addition from Middendorf (BYZ) + # + # Now if there is a new hazard and previous segment Text, then + # we may have to add bullets. + # + if incTextFlag and bulletProd: + for eachHazard in sortedHazardList: + if 'prevText' not in eachHazard: + newBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") + print("newBullets = ", newBullets) + print("segment text is: ", segmentText) + for bullet in newBullets: + if re.search("\* " + bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) is None: + print(bullet + " not in segmentText") + start = self._bulletOrder().index(bullet) + 1 + end = len(self._bulletOrder()) + bulletFlag = 1 + for i in range(start,end): + if (re.search("\* " + self._bulletOrder()[i] + "\.\.\.", segmentText, flags=re.IGNORECASE) is not None) and bulletFlag: + print("* " + self._bulletOrder()[i] + "... found!") + segmentTextSplit = re.split("\* " + self._bulletOrder()[i] + "\.\.\.", segmentText, flags=re.IGNORECASE) + segmentText = string.join(segmentTextSplit,"* " + bullet.upper() + \ + "...|* Enter bullet text *|\n\n* " + self._bulletOrder()[i] + "...") + bulletFlag = 0 + if bulletFlag: + print("appending to bottom list of bullets!") + segmentTextSplit = re.split("PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", segmentText, flags=re.IGNORECASE) + segmentText = "\n" + string.join(segmentTextSplit,"* " + bullet.upper() + \ + "...|* Enter bullet text *|\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...") + bulletFlag = 0 + # + # Now if there is a can/exp hazard and previous segment Text, then + # we may have to remove bullets. + # + + if incTextFlag and bulletProd: + # First make list of bullets that we need to keep. + keepBulletList = [] + for eachHazard in sortedHazardList: + if eachHazard['act'] not in ["CAN","EXP"]: + saveBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") + for saveBullet in saveBullets: + if saveBullet not in keepBulletList: + keepBulletList.append(saveBullet) + # Now determine which bullets we have to remove. + removeBulletList = [] + for eachHazard in sortedHazardList: + if eachHazard['act'] in ["CAN","EXP"]: + canBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") + for canBullet in canBullets: + if canBullet not in keepBulletList and canBullet not in removeBulletList: + removeBulletList.append(canBullet) + print("hazardBodyText info: keepBulletList: ",keepBulletList) + print("hazardBodyText info: removeBulletList: ",removeBulletList) + # Finally remove the bullets no longer needed. + for bullet in removeBulletList: + if re.search("\* "+ bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) is not None: + segmentTextSplit = re.split("\* " + bullet + "\.\.\.", segmentText, flags=re.IGNORECASE) + print("segmentTextSplit is ", segmentTextSplit) + segmentTextSplit2 = string.split(segmentTextSplit[1],"*",1) + if len(segmentTextSplit2) == 2: + segmentTextSplit[1] = "*" + segmentTextSplit2[1] + else: + segmentTextSplit2 = re.split("PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", segmentTextSplit[1], 1, flags=re.IGNORECASE) + if len(segmentTextSplit2) == 2: + segmentTextSplit[1] = "PRECAUTIONARY/PREPAREDNESS ACTIONS..." + segmentTextSplit2[1] + segmentText = string.join(segmentTextSplit,"") + if removeBulletList != []: + segmentText = "|*\n" + segmentText + "*|" + else: + segmentText = segmentText + # + # If segment passes the above checks, add the text + # + + print("hazardBodyText info: incTextFlag: ",incTextFlag) + if incTextFlag: + print("hazardBodyText info: segmentText: ",segmentText) + hazardBodyPhrase = hazardBodyPhrase + "\n\n" + \ + segmentText + '\n\n' + + elif bulletProd: + bulletFlag = 0 + if eachHazard['act'] == 'CAN': + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Wrap-up text goes here *|.\n" + elif eachHazard['act'] == 'EXP': + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Wrap-up text goes here *|.\n" + else: + bulletFlag = 1 +## print "bulletFlag is: ",bulletFlag + if bulletFlag: + newBulletList = [] + bullets = "" + for eachHazard in sortedHazardList: + ### get the default bullets for all hazards from the bullet diction + newBullets = string.split(self._bulletDict().get(eachHazard['phen']),",") + for newBullet in newBullets: + if newBullet not in newBulletList: + newBulletList.append(newBullet) + print("my bullets are: ", newBulletList) + ### Determine the correct order for all bullets + bulletOrder = self._bulletOrder() + staticBulletOrder = self._bulletOrder() + for bullet in staticBulletOrder: + print("correct bullet order should be: ", bulletOrder) + if bullet not in newBulletList: + bulletOrder.remove(bullet) + print("reordered bullets are: ", bulletOrder) + for b in bulletOrder: + bullets = bullets + "* " + b.upper() + "...|* Enter bullet text *|\n\n" + + hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets + + # If segment doesn't pass the checks, put in framing codes + else: + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Statement text goes here *|.\n\n" + + # End code for DR 21310 + + # + # This adds the call to action statements. This is only performed + # if the segment is 'NEW' or if the previous text has been discarded + # due to a CAN/EXP/UPG segment + # + + # remove items from forceCTAList if they exist in foundCTAs. Note + # that the formats of these lists are different, thus this code + # is more complicated + for ent in foundCTAs: + #only process CTAs that are vtec phen/sig based + if ent.find('.') == 2: + phensig = (ent[0:2], ent[3]) #phen.sig + if phensig in forceCTAList: + del forceCTAList[forceCTAList.index(phensig)] + + hazardBodyPhrase = hazardBodyPhrase + '\n\n' + ctas = [] + for (phen,sig) in forceCTAList: + hazardPhenSig = phen + "." + sig + cta = self.defaultCTA(hazardPhenSig) + if cta not in ctas: + ctas.append(cta) + + if len(ctas) > 0: + hazardBodyPhrase = hazardBodyPhrase + \ + 'PRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n' + for c in ctas: + hazardBodyPhrase = hazardBodyPhrase + c + '\n\n' + hazardBodyPhrase = hazardBodyPhrase + '&&\n\n' + + # Make sure there is only one CAP tag pairs + hazardBodyPhrase = re.sub(r'&&\s*PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n', \ + "", hazardBodyPhrase) + + return hazardBodyPhrase + + def finalOverviewText(self): + #if didn't calculate any, use the default + if len(self.__overviewText) == 0: + + if self._includeOverviewHeadline: + overviewHeadline = "...|*Overview headline (must edit)*|...\n\n" + else: + overviewHeadline = "" + + if self._includeOverview: + overviewBody = ".|*Overview (must edit)*|.\n\n" + else: + overviewBody = "" + + #assemble the lines + overview = overviewHeadline + overviewBody + return overview + + else: + return self.__overviewText + + def overviewText(self, hazardList, pil): + + # + # This method finds an overview in the previous product + # + + overview = "" + for each in hazardList: + if ('prevOverviewText' in each and + 'pil' in each and + 'endTime' in each and + 'act' in each): + if (each['pil'] == pil and + each['endTime'] > self._currentTime and + each['act'] not in ['CAN', 'EXP']): + overview = each['prevOverviewText'] + self.__overviewText, dummy = self.cleanCapturedText( + overview, 0) + break + + def useCaptureText(self, hazardList): + #Based on the hazardlist, returns a tuple indicating: + # (inc capture text, inc framing codes, skip CTAs, forceCTAList) + # + # For the values to be considered, the 'hdln' value must be + # present in the list, or it needs to be a Statement (sig="S") + cans = ['CAN','UPG','EXP'] + acts = ['NEW','EXT','EXA','EXB','CON'] + foundACTS = 0 + foundCANS = 0 + foundSig = [] + for eh in hazardList: + if eh['act'] in acts and (len(eh['hdln']) or eh['sig'] == 'S'): + foundACTS = 1 + if eh['act'] in cans and (len(eh['hdln']) or eh['sig'] == 'S'): + foundCANS = 1 + if eh['sig'] not in foundSig: + foundSig.append(eh['sig']) + + includeFrameCodes = 0 + includeText = 1 + skipCTAs = 0 + forceCTAList = [] + + # all actions are in CAN, UPG, EXP only (don't include text) + if foundCANS and not foundACTS: + if 'S' in foundSig and len(foundSig) == 1: #only S + includeFrameCodes = 1 #capture text, but frame it + else: + includeText = 0 #end of non statement + + # something in CANS and something in acts (frame it, include text) + elif foundCANS and foundACTS: + includeFrameCodes = 1 + skipCTAs = 1 + for eh in hazardList: + if eh['act'] in acts and \ + (eh['phen'], eh['sig']) not in forceCTAList and \ + len(eh['hdln']): + forceCTAList.append((eh['phen'], eh['sig'])) + + #everything in active entries, captured text is used, but still + # need to handle the "NEW" entries. + else: + for eh in hazardList: + if eh['act'] in ['NEW'] and len(eh['hdln']): + forceCTAList.append((eh['phen'], eh['sig'])) + + return (includeText, includeFrameCodes, skipCTAs, forceCTAList) + + + + def cleanCapturedText(self, text, paragraphs, addFramingCodes = False, + skipCTAs = False): + # + # This method takes a block of text, wraps it preserving blank lines, + # then returns the part after 'paragraphs'. So, if paragraphs is 0, it + # returns the whole thing, if it's 2, it returns paragraphs 2 -> end, etc. + # Headlines are always removed. + # Framing codes are added if specified. + # + paras = self.convertSingleParas(text) #single paragraphs + + # keep track of any call to actions found + foundCTAs = [] + + # Process the paragraphs, keep only the interested ones + paraCount = 0 + processedText = '' + for eachPara in paras: + if paraCount >= paragraphs: + found = self.ctasFound(eachPara) #get list of ctas found + if skipCTAs and len(found): + pass + else: + processedText = processedText + eachPara + '\n\n' + #keep track of remaining CTAs in processed text + for f in found: + if f not in foundCTAs: + foundCTAs.append(f) + if eachPara.find('...') == 0: + pass #ignore headlines + paraCount = paraCount + 1 + + # Add framing codes + if addFramingCodes: + processedText = processedText.rstrip() + processedText = "|*\n" + processedText + "*|\n" + + # Wrap + processedText = self.endline(processedText, + linelength=self._lineLength, breakStr=[" ", "-", "..."]) + + + return processedText, foundCTAs + + def decodeBulletedText(self, prevText): + # returns the bullet paragraph text or None, returns the + # regular text after the bullets. The afterText is text up to + # the next bullet or up to "The National Weather Service". Note + # that this only correctly handles the 1st set of entries in + # a segment, thus double events will only decode the first set + # of bullets and text. The multipleRecords is set to 1 in the + # event that there are multiple sets of bullets. In this case + # only the 1st set was captured/decoded. + # (hazard, time, basis, impact, afterText, multipleRecords) + if prevText is None: + return (None, None, None, None, None, None) + + # find the bullets + bullets = [] + buf = prevText.split('\n\n* ') + if len(buf) <= 1: + return (None, None, None, None, None, None) + + multRecords = 0 #indicator of multiple sets of bullets + + for x in range(len(buf)): + if x == 0: + continue #headlines and text before the bullets + bullets.append(buf[x]) + + # find only the bulleted text, defined by the double line feed term. + # of the text + regText = "" #regular text after bullets + for x in range(1, len(bullets)): + index = bullets[x].find('\n\n') + if index != -1: + regText = bullets[x][index+2:] + bullets[x] = bullets[x][0:index] #eliminate after bullet text + if len(bullets) > x+2: #more bullets are present + multRecords = 1 + bullets = bullets[0:x+1] #only interested in these bullets + break + + # regular text is the remainder of the text. However we only + # want text from the last in the series of bullets to the + # beginning of any next NWS phrase. + lines = regText.split('\n') + for x in range(len(lines)): + if lines[x].find('The National Weather Service') == 0: + lines = lines[0:x] #eliminate following lines + break + regText = ("\n").join(lines) + + # now clean up the text + for x in range(len(bullets)): + bullets[x] = string.replace(bullets[x],'\n',' ') + removeLF = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) + regText = removeLF.sub(r'\1 \2',regText) + + # extract out each section for returning the values + if len(bullets) >= 1: + hazard = bullets[0] + else: + hazard = None + if len(bullets) >= 2: + time = bullets[1] + else: + time = None + if len(bullets) >= 3: + basis = bullets[2] + else: + basis = None + if len(bullets) >= 4: + impact = bullets[3] + else: + impact = None + if len(regText) == 0: + regText = None #no regular text after bullets + + return (hazard, time, basis, impact, regText, multRecords) + + def substituteBulletedText(self, capText, defaultText, frameit="Never"): + #returns a properly formatted bulleted text based on + #the capText variable. If capText is None or 0 length, then + #the default text is used. frameit can be "Never", in which + #nothing is wrapped in framing codes, "Always" in which the + #text (default or cap) is wrapped in framing codes, or + #DefaultOnly" in which just the default text is wrapped. + if capText is not None and len(capText): + textToUse = capText[0].upper()+capText[1:] + if frameit == "Always": + textToUse = "|* " + textToUse + " *|" + else: + textToUse = defaultText + if frameit == "Always" or frameit == "DefaultOnly": + textToUse = "|* " + textToUse + " *|" + + # add bullet codes + textToUse = "* " + textToUse + + # format it + return self.indentText(textToUse, indentFirstString = '', + indentNextString = ' ', maxWidth=self._lineLength, + breakStrings=[" ", "-", "..."]) + + + def convertSingleParas(self, text): + #returns a list of paragraphs based on the input text. + lf = re.compile(r'(s*[^\n])\n([^\n])', re.DOTALL) + ptext = lf.sub(r'\1 \2', text) + ptext = ptext.replace('\n\n', '\n') + paragraphs = ptext.split('\n') + return paragraphs + + def ctasFound(self, text): + #returns types of ctas found. The identifier is the pil (e.g., ZFP), + #phen/sig (e.g., DU.Y), or GENERIC. Uses the CallToAction definitions. + + #convert text to single paragraphs + paragraphs = self.convertSingleParas(text) + for x in range(len(paragraphs)): + paragraphs[x] = string.replace(paragraphs[x],' ','') + + #make list of call to actions (type, cta text) + if self.__procCTA is None: + self.__procCTA = [] + ctao = CallToActions.CallToActions() + d = ctao.ctaDict() + for k in list(d.keys()): + func = d[k] + items = func() + for it in items: + if type(it) == tuple: + it = it[1] #get second string which is the CTA + ctaParas = self.convertSingleParas(it) + for cta in ctaParas: + self.__procCTA.append((k,string.replace(cta,' ',''))) + d = ctao.ctaPilDict() + for k in list(d.keys()): + func = d[k] + items = func() + for it in items: + if type(it) == tuple: + it = it[1] #get second string which is the CTA + ctaParas = self.convertSingleParas(it) + for cta in ctaParas: + self.__procCTA.append((k,string.replace(cta,' ',''))) + + ctas = ctao.genericCTAs() + for it in ctas: + if type(it) == tuple: + it = it[1] #get second string which is the CTA + ctaParas = self.convertSingleParas(it) + for cta in ctaParas: + self.__procCTA.append(("GENERIC", + string.replace(cta,' ',''))) + + #compare + found = [] + for para in paragraphs: + for (ctaType, cta) in self.__procCTA: + ## Added following line to account for framing code issues in CTA + cta = re.sub("\|\*.*\*\|","",cta) + # We want this comparison to be case-insensitive just in case + # the site is not transmitting in mixed case yet. + if para.upper() == cta.upper() and ctaType not in found: + found.append(ctaType) + return found + + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericReport.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericReport.py index 1f1bf55f34..c19d725afb 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericReport.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/GenericReport.py @@ -1,304 +1,304 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This is a generic product template. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. - -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# -# You must set the following: -# -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" -# -# Optional Configuration Items -# -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# lineLength Desired maximum length of each line. -# -# defaultEditAreas Defines edit areas, default is Combinations -# includeCities If 1, cities will be included in the area header -# cityDescriptor "Including the cities of" phrase used when including -# cities -# includeZoneNames If 1, zone names will be included in the area header -# -# areaDictionary Modify the AreaDictionary utility with UGC information about zones. -# -# singleComboOnly If set to 1, this indicates that the zone combiner should only use one -# combination. This is used for non-segmented products. -# -# purgeTime Default expiration in hours -# -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations file -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS C11 and 10-503 Directives for Public Weather Services. -#------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 20, 2014 #3685 randerso Removed upper case conversions -# -## - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, types, copy -import ProcessVariableList, AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - - Definition = { - "type": "smart", - "displayName": None, # for Product Generation Menu - "database" : "Official", # Source database. "Official", "Fcst", or "ISC" - - "outputFile": "", - "debug": 0, - - # Name of map background for creating Combinations - "mapNameForCombinations": "", - - ## Edit Areas: Create Combinations file with edit area combinations. - "defaultEditAreas" : "", - "includeCities": 1 , # Cities included in area header - "cityDescriptor":"Including the cities of", - "includeZoneNames":1, # Zone names will be included in the area header - - # product identifiers - "productName": "", # product name - "fullStationID" : "", # 4 letter station ID - "wmoID" : "", # WMO code - "wfoCityState" : "", # Location of WFO - "pil" : "", # product pil - "textdbPil" : "", # Product ID for storing to AWIPS text database. - "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. - "wfoSiteID": "", - "areaName": "", #optional area name for header - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - - # Use only a single zone combination (1 for non-segmented product, 0 - segmented) - "singleComboOnly": 0, - - # Language - "language": "english", - "lineLength": 66, # Maximum line length - - # Expiration - "purgeTime" : 2, # Default Expiration in hours - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(50) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - # Calculate ddhhmm string value: - self._timeRange = self.createTimeRange(0, 240) - self._currentTime = argDict['creationTime'] #ZULU - self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( - self._currentTime)) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - self._issueTime = AbsTime.current() - - self._expireTime = self._issueTime + self._purgeTime*3600 - - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - return None - - def _preProcessProduct(self, fcst, argDict): - # Product header - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas, - cityDescriptor=self._cityDescriptor, includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames) - fcst = fcst + areaHeader - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n\n$$\n" - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This is a generic product template. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. + +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# +# You must set the following: +# +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState City,state that the WFO is located in, such as "Buffalo NY" +# +# Optional Configuration Items +# +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# lineLength Desired maximum length of each line. +# +# defaultEditAreas Defines edit areas, default is Combinations +# includeCities If 1, cities will be included in the area header +# cityDescriptor "Including the cities of" phrase used when including +# cities +# includeZoneNames If 1, zone names will be included in the area header +# +# areaDictionary Modify the AreaDictionary utility with UGC information about zones. +# +# singleComboOnly If set to 1, this indicates that the zone combiner should only use one +# combination. This is used for non-segmented products. +# +# purgeTime Default expiration in hours +# +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations file +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS C11 and 10-503 Directives for Public Weather Services. +#------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 20, 2014 #3685 randerso Removed upper case conversions +# +## + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, types, copy +import ProcessVariableList, AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + + Definition = { + "type": "smart", + "displayName": None, # for Product Generation Menu + "database" : "Official", # Source database. "Official", "Fcst", or "ISC" + + "outputFile": "", + "debug": 0, + + # Name of map background for creating Combinations + "mapNameForCombinations": "", + + ## Edit Areas: Create Combinations file with edit area combinations. + "defaultEditAreas" : "", + "includeCities": 1 , # Cities included in area header + "cityDescriptor":"Including the cities of", + "includeZoneNames":1, # Zone names will be included in the area header + + # product identifiers + "productName": "", # product name + "fullStationID" : "", # 4 letter station ID + "wmoID" : "", # WMO code + "wfoCityState" : "", # Location of WFO + "pil" : "", # product pil + "textdbPil" : "", # Product ID for storing to AWIPS text database. + "awipsWANPil" : "", # Product ID for transmitting to AWIPS WAN. + "wfoSiteID": "", + "areaName": "", #optional area name for header + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + + # Use only a single zone combination (1 for non-segmented product, 0 - segmented) + "singleComboOnly": 0, + + # Language + "language": "english", + "lineLength": 66, # Maximum line length + + # Expiration + "purgeTime" : 2, # Default Expiration in hours + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(50) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + # Calculate ddhhmm string value: + self._timeRange = self.createTimeRange(0, 240) + self._currentTime = argDict['creationTime'] #ZULU + self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( + self._currentTime)) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + self._issueTime = AbsTime.current() + + self._expireTime = self._issueTime + self._purgeTime*3600 + + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + return None + + def _preProcessProduct(self, fcst, argDict): + # Product header + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas, + cityDescriptor=self._cityDescriptor, includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames) + fcst = fcst + areaHeader + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n\n$$\n" + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/HLS.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/HLS.py index 5200938983..95c3d8b5c3 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/HLS.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/HLS.py @@ -1,3170 +1,3172 @@ -# Version 2018.06.05 - -import GenericHazards -import string, time, os, re, types, copy, LogStream, collections -import ModuleAccessor, SampleAnalysis, EditAreaUtils, VTECTable -import math -import Tkinter -import LocalizationSupport - -from AbsTime import * -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID -from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit as JavaGrid2DBit -AWIPS_ENVIRON = "AWIPS2" - -import HLSTCV_Common - -class TextProduct(HLSTCV_Common.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition["displayName"] = "None" - Definition["outputFile"] = "{prddir}/TEXT/HLS.txt" - Definition["database"] = "Official" # Source database - Definition["debug"] = 1 - Definition["mapNameForCombinations"] = "Zones_" - Definition["defaultEditAreas"] = "Combinations_HLS_" - Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display - - Definition["productName"] = "Local Statement" - - Definition["fullStationID" ] = "" - Definition["wmoID" ] = "" - Definition["wfoCityState" ] = "" - Definition["pil" ] = "" - Definition["textdbPil" ] = "" - Definition["awipsWANPil" ] = "" - Definition["site"] = "" - Definition["wfoCity"] = "" - - Definition["areaName"] = "" #optional area name for product - Definition["areaDictionary"] = "AreaDictionary" - Definition["language"] = "english" - Definition["lineLength"] = 71 #Maximum line length - Definition["tabLength"] = 4 - - Definition["purgeTime"] = 8 # Default Expiration in hours if - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header - Definition["easPhrase"] = \ - "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header - Definition["callToAction"] = 1 - - # Add options for debugging - Definition["debug"] = { - #TextProduct - "__init__": 0, - "_inlandAreas": 0, - "_coastalAreas": 0, - "_cwa": 0, - "_cwa_descriptor": 0, - "_localReferencePoints": 0, - "_localReferencePoints_defaults": 0, - "_referencePointLimit": 0, - "_productParts_HLS": 0, - "_analysisList_HLS": 0, - "_analysisList_HLS_WholeDomain": 0, - "_intersectAnalysisList_HLS": 0, - "generateForecast": 0, - "_initializeVariables": 0, - "_initializeHeadlines": 0, - "_initializeSamplingDict": 0, - "_noOpParts": 0, - "_areaList": 0, - "_summaryHeadlines": 0, - "_changesHazards": 0, - "_currentHazards": 0, - "_stormInformation": 0, - "_situationOverview": 0, - "_windSection": 0, - "_surgeSection": 0, - "_floodingRainSection": 0, - "_tornadoSection": 0, - "_coastalHazardsSection": 0, - "_preparednessSection": 0, - "_evacuationStatements": 0, - "_otherPreparednessActions": 0, - "_additionalSourcesInfo": 0, - "_nextUpdate": 0, - "_impactsKeyFunction": 0, - "_getPotentialImpactsStatements": 0, - "_impactCategoryToThreatLevel": 0, - "_determineHazardStates": 0, - "_sampleHLSData": 0, - "_sampleTCVAdvisory": 0, - "_sampleRankedDiscreteValue": 0, - "_sampleMostSignificantDiscreteValue": 0, - "_getDominantThreatLevel": 0, - "_getHighestThreat": 0, - "_getLowestThreat": 0, - "_setHazardImpactCategories": 0, - "_createWholeDomainEditArea": 0, - "_determineHazards": 0, - "_formatLocalTime": 0, - "_getTimeZoneList": 0, - "_grabHeadline": 0, - "_getStormInfo": 0, - "_grabStormInfo": 0, - "_decodeStormInfo": 0, - "_expandBearings": 0, - "_removeKM": 0, - "_cleanText": 0, - "_calcLocalReferences": 0, - "_calcReference": 0, - "_distanceFromLatLon": 0, - "_bearing": 0, - "_dirInEnglish": 0, - "_overview_list": 0, - "_displayGUI": 0, - "_frame": 0, - - #HLSTCV_Common - "allowedHazards": 0, - "allowedHeadlines": 0, - "_initializeVariables": 0, - "moderated_dict": 0, - "_wmoHeader": 0, - "_productHeader": 0, - "_ugcHeader": 0, - "_processProductParts": 0, - "_createProductDictionary": 0, - "_initializeProductDictionary": 0, - "_formatProductDictionary": 0, - "_getStatValue": 0, - "_allAreas": 0, - "_groupSegments": 0, - "_getSegmentVTECRecordsTuples": 0, - "_computeIntersectAreas": 0, - "_initializeHazardsTable": 0, - "_getHazardsTable": 0, - "_ignoreActions": 0, - "_setVTECActiveTable": 0, - "_getVtecRecords": 0, - "_getAllowedHazardList": 0, - "_altFilterMethod": 0, - "_filterHazards": 0, - "_getAdditionalHazards": 0, - "_checkHazard": 0, - "_initializeTimeVariables": 0, - "_determineTimeRanges": 0, - "_createPeriodList": 0, - "_calculateStartTime": 0, - "_formatPeriod": 0, - "_getTimeDesc": 0, - "_getPartOfDay": 0, - "_initializeStormInformation": 0, - "_parseTCP": 0, - "_getStormTypeFromTCP": 0, - "_getStormNameFromTCP": 0, - "_getAdvisoryTypeFromTCP": 0, - "_getAdvisoryNumberStringFromTCP": 0, - "_getStormNumberStringFromTCP": 0, - "_getStormIDStringFromTCP": 0, - "_useTestTCP": 0, - "_testTCP": 0, - "_initializeAdvisories": 0, - "_synchronizeAdvisories": 0, - "_getLocalAdvisoryDirectoryPath": 0, - "_getStormAdvisoryNames": 0, - "_loadLastTwoAdvisories": 0, - "_loadAdvisory": 0, - "_getAdvisoryPath": 0, - "_getAdvisoryFilename": 0, - "_processVariableList": 0, - "_GUI_sizing_dict": 0, - "_GUI1_configDict": 0, - "_font_GUI_dict": 0, - - #Overview_Dialog - "body": 0, - "_makeStep3": 0, - "_makeButtons": 0, - "okCB": 0, - - #Common_Dialog - "getVarDict": 0, - "_makeRadioOrCheckList": 0, - "_makeEntry": 0, - "cancelCB": 0, - "_entryName": 0, - "_makeTuple": 0, - "_setVarDict": 0, - "status": 0, - "buttonbox": 0, - - #LegacyFormatter - "execute": 0, - "_processProductParts": 0, - "processWmoHeader": 0, - "processProductHeader": 0, - "processSummaryHeadlines": 0, - "processHazards": 1, - "_addToGroupedHazards": 1, - "_sortHazardsType": 0, - "_consolidateGroupedHazards": 1, - "_createHazardTextParts": 0, - "_areaWords": 0, - "processStormInformation": 0, - "processSituationOverview": 0, - "processHazardsSection": 0, - "processSubParts": 0, - - #TextProductCommon - "setUp": 0, - "hazardTimeZones": 0, - "getExpireTime": 0, - "getHeadlinesAndSections": 0, - "formatUGCs": 0, - "getFormattedTime": 0, - "formatUGC_names": 0, - "formatNameString": 0, - "getVal": 0, - "formatDatetime": 0, - "flush": 0, - "makeUGCString": 0, - "checkLastArrow": 0, - } - -# Definition["debug"] = 1 # turn on ALL debug messages - Definition["debug"] = 0 # turn off ALL debug messages - - def __init__(self): - HLSTCV_Common.TextProduct.__init__(self) - - ##################################################################### - ##################################################################### - ### Organization of Formatter Code - - ############################################################### - ### MUST OVERRIDE DEFINITIONS !!! - ### _inlandAreas, _coastalAreas, _cwa, _cwa_descriptor, - ### _localReferencePoints, _localReferencePoints_defaults - ############################################################### - - ############################################################### - ### Optional Overrides - ### _referencePointLimit - ############################################################### - - ############################################################### - ### HLS Product and Segment Parts Definition - ############################################################### - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - ############################################################### - - ############################################################### - # CODE - ############################################################### - ### High level flow of formatter - ### generateForecast, _initializeVariables, - ### _loadLastTwoAdvisories, _determineTimeRanges, - ### _initializeSamplingDict, _sampleTCVAdvisory, - ### _sampleHLSData, _determineHazardStates, - ### _setHazardImpactCategories, _createProductDictionary, - ### _formatProductDictionary - ############################################################### - - ############################################################### - ### Product Parts Implementation - ############################################################### - - ############################################################### - ### Sampling and Statistics related methods - ############################################################### - - ############################################################### - ### Area, Zone and Segment related methods - ############################################################### - - ############################################################### - ### Hazards related methods - ############################################################### - - ############################################################### - ### Time related methods - ############################################################### - - ############################################################### - ### Storm Information and TCP related methods - ############################################################### - - ############################################################### - ### GUI related methods - ############################################################### - - - ############################################################### - ### MUST OVERRIDE DEFINITIONS !!! - - def _inlandAreas(self): - return [ - #"FLZ063", "FLZ066", "FLZ067", "FLZ068", "FLZ070", - #"FLZ071", "FLZ072", "FLZ073", "FLZ074", - ] - - def _coastalAreas(self): - return [ - #"FLZ069", "FLZ075", "FLZ168", "FLZ172", "FLZ173", "FLZ174", - ] - - def _cwa(self): - return "" #"MFL" - - def _cwa_descriptor(self): - return "" #"South Florida" - - def _localReferencePoints(self): - # Give the name and lat/lon for each local reference point - return [ - #("West Palm Beach, FL", (26.71, -80.06)), - #("Fort Lauderdale, FL", (26.12, -80.15)), - #("Miami, FL", (25.77, -80.20)), - #("Miami Beach, FL", (25.81, -80.13)), - #("Naples, FL", (26.14, -81.80)), - #("Marco Island, FL", (25.94, -81.73)), - ] - - def _localReferencePoints_defaults(self): - # Give a list of the local reference point names to be - # turned on by default - return [] #["Miami, FL", "Naples, FL"] - - ############################################################### - ### Optional Overrides - - def _referencePointLimit(self): - # Give the number of reference points allowed to be chosen - # Also give a label (e.g. "two") for the GUI - return (2, "two") - - ############################################################### - ### HLS Product and Segment Parts Definition - - def _productParts_HLS(self, segment_vtecRecords_tuples): - partsList = [ - 'wmoHeader', - 'ugcHeader', - 'productHeader', - 'areaList', - 'summaryHeadlines', - 'newInformationHeader', - 'changesHazards', - 'currentHazards', - 'stormInformation', - 'situationOverview', - 'sigPotentialImpacts', - ] - - if self._ImpactsAnticipated: - includedImpacts = sorted(self._IncludedImpacts, key=self._impactsKeyFunction) - for ((_, sectionName), _) in includedImpacts: - self.debug_print("adding section = '%s'" % (sectionName), 1) - partsList.append(sectionName) - - partsList.append('preparednessSection') - - if self._ImpactsAnticipated: - partsList.append('evacuationStatements') - partsList.append('otherPreparednessActions') - partsList.append('additionalSourcesInfo') - - partsList.append('nextUpdate') - partsList.append('endProduct') - - self.debug_print("Product Parts partsList =\n\n%s\n" % (self._pp.pformat(partsList)), 1) - - return { - 'partsList': partsList - } - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - - def _analysisList_HLS(self): - # Sample over 120 hours beginning at current time - analysisList = [ - # Wind Section - ("WindThreat", self.rankedDiscreteValue), - ("WindThreat", self.mostSignificantDiscreteValue), - - # Flooding Rain Section - ("QPFtoFFGRatio", self.moderatedMax, [6]), - ("FloodingRainThreat", self.rankedDiscreteValue), - ("FloodingRainThreat", self.mostSignificantDiscreteValue), - - # Tornado Section - ("TornadoThreat", self.rankedDiscreteValue), - ("TornadoThreat", self.mostSignificantDiscreteValue), - ] - - return analysisList - - def _analysisList_HLS_WholeDomain(self): - # Sample over 120 hours beginning at current time - analysisList = [ - # Wind Section - ("Wind", self.vectorModeratedMax, [6]), - ] - - return analysisList - - def _intersectAnalysisList_HLS(self): - # The grids for the Surge Section will be intersected with a special edit area - analysisList = [ - ("InundationMax", self.moderatedMax, [6]), - ("StormSurgeThreat", self.rankedDiscreteValue), - ("StormSurgeThreat", self.mostSignificantDiscreteValue), - ] - - return analysisList - - ############################################################### - ### High level flow of formatter - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - error = self._initializeVariables(argDict) - if error is not None: - return error - - if self._stormName is None or self._stormName == "": - return "Could not determine the storm name" - - - if self._ImpactsAnticipated: - self._loadLastTwoAdvisories() - if (self._previousAdvisory is None or \ - not self._previousAdvisoryMatchesNumber): - return "A TCV must be transmitted before an HLS can be run" - - if len(self._IncludedImpacts) == 0: - return "At least one potential impact section needs to be included." - - # Determine time ranges - self._determineTimeRanges(argDict) - - if self._ImpactsAnticipated: - - # Sample the data - self._initializeSamplingDict() - self._sampleTCVAdvisory(self._previousAdvisory) - self._sampleHLSData(argDict) - - self._determineHazardStates() - - for threatName in ['WindThreat', 'StormSurgeThreat', 'FloodingRainThreat', 'TornadoThreat']: - self._setHazardImpactCategories(threatName) - - # Create the product dictionary and format it to create the output - productDict = self._createProductDictionary(self._productParts_HLS, - self._allAreas(), - areProductPartsSegmented=False) - productOutput = self._formatProductDictionary(LegacyFormatter, productDict) - - return productOutput - - def _initializeVariables(self, argDict): - error = HLSTCV_Common.TextProduct._initializeVariables(self, argDict) - if error is not None: - return error - - self._getStormInfo(argDict) - - self._initializeHeadlines() - - #======================================================================= - # Now produce a UGC header using only the WFO selected zones - #======================================================================= - - # Get the Combinations file for the HLS - accessor = ModuleAccessor.ModuleAccessor() - self.debug_print("self._defaultEditAreas = %s" % (self._pp.pformat(self._defaultEditAreas)), 1) - - # combos is a list of tuples. Each tuple is a grouping of zones - # (a list of zones, combo name). - combos = accessor.variable(self._defaultEditAreas, "Combinations") - - # If we could not find a Combinations file for the HLS - if combos is None: - LogStream.logVerbose("Combination file not found: " + self._pp.pformat(self._defaultEditAreas)) - - # Default to using the entire CWA - self._ugcs = sorted(self._allAreas()) - - # Otherwise, construct the final list of WFO selected zones - else: - self.debug_print("Segments from Zone Combiner = %s" % (self._pp.pformat(combos)), 1) - - # Create a list containing all zones from all combination groups - selectedZones = reduce(lambda zones, combo: zones + combo[0], - combos, - []) - - # Use the selected zones for the UGC header - self._ugcs = sorted(selectedZones) - - self.debug_print("Final Zones for UGC header = %s" % (self._pp.pformat(self._ugcs)), 1) - - return None - - def _initializeHeadlines(self): - if self._MainHeadline == "Enter": - self._headlines = [self._MainHeadline_entry] - elif self._MainHeadline == "UsePrev": - self._prevHLS = self.getPreviousProduct(self._textdbPil) - self._headlines = [self._grabHeadline(self._prevHLS)] - elif self._MainHeadline == "UseTCP": - try: - self._headlines = [self._grabHeadline(self._TCP)] - except: - self._headlines = [] - - def _initializeSamplingDict(self): - self._samplingDict = dict() - statsDict = dict() - statsDict['catastrophicThreshold'] = None - statsDict['decidingField'] = None - statsDict['inputThreatLow'] = None - statsDict['inputThreatHigh'] = None - statsDict['inputThreatDominant'] = None - statsDict['impactMin'] = None - statsDict['impactMax'] = None - statsDict['impactRange'] = None - statsDict['impactRangeMax'] = None - - self._samplingDict['WindThreat'] = copy.copy(statsDict) - self._samplingDict['StormSurgeThreat'] = copy.copy(statsDict) - self._samplingDict['FloodingRainThreat'] = copy.copy(statsDict) - self._samplingDict['TornadoThreat'] = copy.copy(statsDict) - - self._samplingDict['WindThreat']['catastrophicThreshold'] = 137 # knots - self._samplingDict['StormSurgeThreat']['catastrophicThreshold'] = 14 # feet - self._samplingDict['FloodingRainThreat']['catastrophicThreshold'] = 3 # percent - - ############################################################### - ### Product Parts Implementation - - def _noOpParts(self): - ''' - These represent product parts that should be skipped when calling product part methods. - They will be handled automatically by the formatters. - ''' - return ['CR', 'endProduct', 'endSegment', 'doubleAmpersand', 'newInformationHeader', 'sigPotentialImpacts'] - - ################# Product Level - - def _areaList(self, productDict, productSegmentGroup, productSegment): - productDict['areaList'] = "This product covers " + self._cwa_descriptor() - - def _summaryHeadlines(self, productDict, productSegmentGroup, productSegment): - productDict['summaryHeadlines'] = self._headlines - - def _changesHazards(self, productDict, productSegmentGroup, productSegment): - if (not self._ImpactsAnticipated) or \ - (self._ImpactsAnticipated and self._GeneralOnsetTime == "recovery"): - productDict['changesHazards'] = [] - else: - productDict['changesHazards'] = self._changesHazardsList - - def _currentHazards(self, productDict, productSegmentGroup, productSegment): - if (not self._ImpactsAnticipated) or \ - (self._ImpactsAnticipated and self._GeneralOnsetTime == "recovery"): - productDict['currentHazards'] = [] - else: - productDict['currentHazards'] = self._currentHazardsList - - def _stormInformation(self, productDict, productSegmentGroup, productSegment): - stormInfoDict = dict() - if self._ImpactsAnticipated: - stormInfoDict['references'] = self._stormLocalReferences - stormInfoDict['location'] = self._stormLocation - stormInfoDict['intensity'] = self._stormIntensityTrend - stormInfoDict['movement'] = self._stormMovementTrend - productDict['stormInformation'] = stormInfoDict - - def _situationOverview(self, productDict, productSegmentGroup, productSegment): - # Use generic text for the situation overview - productDict['situationOverview'] = self._frame("Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc.") - - # Get the WRKHLS product minus header that has the situation overview we want - wrkhlsProduct = self.getPreviousProduct("WRKHLS")[40:] - - # If we found the overview - if len(wrkhlsProduct) > 0: - # Clean and frame the imported overview and use it instead of the generic text - productDict['situationOverview'] = self._frame(self._cleanText(wrkhlsProduct.strip())) - - def _windSection(self, productDict, productSegmentGroup, productSegment): - sectionDict = dict() - sectionDict['title'] = "Wind" - sectionDict['impactRange'] = "" - sectionDict['impactLib'] = [] - sectionDict['additionalImpactRange'] = [] - - impactMin = self._samplingDict['WindThreat']['impactMin'] - impactMax = self._samplingDict['WindThreat']['impactMax'] - impactRange = self._samplingDict['WindThreat']['impactRange'] - impactRangeMax = self._samplingDict['WindThreat']['impactRangeMax'] - inputThreatDominant = self._samplingDict['WindThreat']['inputThreatDominant'] - - # Test the simplest case first - if impactMin == "none" and impactMax == "none": - sectionDict['impactRange'] = impactRange - productDict['windSection'] = sectionDict - return - - qualifier = self._getImpactsQualifier(impactMax) - - # If there is only one impact across the entire CWA, and it is the max - if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for " + qualifier + "wind having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against " + qualifier + "wind having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from the main wind event are now unfolding across " + self._cwa_descriptor() + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - sectionDict['impactRange'] = "Little to no additional wind impacts expected." - # Handle the case where the impacts are not the same across the entire CWA - else: - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for " + qualifier + "wind having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against " + qualifier + "wind having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from the main wind event are now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - sectionDict['impactRange'] = "Little to no additional wind impacts expected." - - if self._GeneralOnsetTime != "recovery": - sectionDict['impactLib'] = self._getPotentialImpactsStatements("Wind", self._impactCategoryToThreatLevel(impactMax)) - else: - sectionDict['impactLib'] = ["Community officials are now assessing the extent of actual wind impacts accordingly.", - "Emergency response teams are attending to casualty situations as needed.", - "Emergency work crews are restoring essential community infrastructure as necessary.", - "If you have an emergency dial 9 1 1.", - ] - - # If there are additional areas - if impactRange != impactMax: - qualifier = self._getImpactsQualifier(impactRangeMax) - - if self._GeneralOnsetTime == "check plans": - curPhrase = "Also, prepare for " + qualifier + "wind having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "complete preparations": - curPhrase = "Also, protect against " + qualifier + "wind having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "hunker down": - curPhrase = "Potential impacts from the main wind event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactRange + " impacts." - else: - curPhrase = "Little to no additional wind impacts expected." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - # If there is no impact across more than one half the area, include a statement for that as well - if inputThreatDominant == "None": - - curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ - ", little to no impact is anticipated." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - productDict['windSection'] = sectionDict - - def _surgeSection(self, productDict, productSegmentGroup, productSegment): - sectionDict = dict() - sectionDict['title'] = "Surge" - sectionDict['impactRange'] = "" - sectionDict['impactLib'] = [] - sectionDict['additionalImpactRange'] = [] - sectionDict['variedImpacts'] = True - - impactMin = self._samplingDict['StormSurgeThreat']['impactMin'] - impactMax = self._samplingDict['StormSurgeThreat']['impactMax'] - impactRange = self._samplingDict['StormSurgeThreat']['impactRange'] - impactRangeMax = self._samplingDict['StormSurgeThreat']['impactRangeMax'] - inputThreatDominant = self._samplingDict['StormSurgeThreat']['inputThreatDominant'] - - self.debug_print("DEBUG: B4 %s" % - (self._pp.pformat(self._samplingDict['StormSurgeThreat'])), 1) - - # Test the simplest case first - if impactMin == "none" and impactMax == "none": - sectionDict['impactRange'] = impactRange - productDict['surgeSection'] = sectionDict - return - - # See if we need to include the term "life-threatening" surge - # This corresponds to threat levels of Moderate, High and Extreme - lifeThreatening = "" - - if impactMax in ["significant", "extensive", "devastating", "catastrophic"]: - lifeThreatening = "life-threatening " - elif impactMax == "limited": - lifeThreatening = "locally hazardous " - - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for " + lifeThreatening + "surge having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against " + lifeThreatening + "surge having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from the main surge event are now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - sectionDict['impactRange'] = "Little to no additional surge impacts expected." - - if self._GeneralOnsetTime != "recovery": - sectionDict['impactLib'] = self._getPotentialImpactsStatements("Storm Surge", self._impactCategoryToThreatLevel(impactMax)) - else: - sectionDict['impactLib'] = ["Community officials are now assessing the extent of actual surge impacts accordingly.", - "Emergency response teams are attending to casualty situations as needed.", - "Emergency work crews are restoring essential community infrastructure as necessary.", - "If you have an emergency dial 9 1 1.", - ] - - # Reexamine the impact range - we need to separate out "life-threatening" surge categories into a separate statement - impactParts = impactRange.split(" ") - - # Initialize a variable to keep the proper scope. This will hold any leftover surge categories - impactRangeRest = "" - - # Look at the high end of the range - if len(impactParts) == 3 and impactParts[2] in ["significant", "extensive", "devastating", "catastrophic"]: - # We have some "life-threatening" categories we need to split out - check the low end - if impactParts[0] in ["limited", "none"]: - # Make a new range to report - impactRange = "significant" - impactRangeMax = impactRange - - if impactParts[2] != "significant": - impactRange += " to " + impactParts[2] - impactRangeMax = impactParts[2] - - impactRangeRest = impactParts[0] - - # Ensure the leftover impact range is set - just in case we need it - # This should only ever be "limited" in the case of surge under current policy - elif len(impactParts) == 1: - impactRangeRest = impactParts[0] - - self.debug_print("DEBUG: impactRange = '%s' impactMax = '%s' impactMin = '%s'" % - (impactRange, impactMax, impactMin), 1) - # If there are additional life-threatening surge areas - if impactRange != impactMax and impactRange != impactMin: - - lifeThreatening = "" - - if impactRangeMax in ["significant", "extensive", "devastating", "catastrophic"]: - lifeThreatening = "life-threatening " - elif impactRangeMax == "limited": - lifeThreatening = "locally hazardous " - - if self._GeneralOnsetTime == "check plans": - curPhrase = "Also, prepare for " + lifeThreatening + "surge having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "complete preparations": - curPhrase = "Also, protect against " + lifeThreatening + "surge having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "hunker down": - curPhrase = "Potential impacts from the main surge event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactRange + " impacts." - else: - curPhrase = "Little to no additional surge impacts expected." - - self.debug_print("DEBUG: curPhrase = '%s'" % (curPhrase), 1) - self.debug_print("DEBUG: sectionDict['additionalImpactRange'] = \n'%s'" % - (sectionDict['additionalImpactRange']), 1) - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - # If there are additional areas - if impactRangeRest != impactMax: - - lifeThreatening = "locally hazardous " - - if self._GeneralOnsetTime == "check plans": - curPhrase = "Also, prepare for " + lifeThreatening + "surge having possible " + impactRangeRest + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "complete preparations": - curPhrase = "Also, protect against " + lifeThreatening + "surge having possible " + impactRangeRest + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "hunker down": - curPhrase = "Potential impacts from the main surge event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactRangeRest + " impacts." - else: - curPhrase = "Little to no additional surge impacts expected." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - # If there is no impact across more than one half the area, include a statement for that as well - if inputThreatDominant == "None": - - curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ - ", little to no impact is anticipated." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - self.debug_print("Final Surge sectionDict['additionalImpactRange'] = '%s'" % - (sectionDict['additionalImpactRange']), 1) - productDict['surgeSection'] = sectionDict - - def _floodingRainSection(self, productDict, productSegmentGroup, productSegment): - sectionDict = dict() - sectionDict['title'] = "Flooding Rain" - sectionDict['impactRange'] = "" - sectionDict['impactLib'] = [] - sectionDict['additionalImpactRange'] = [] - sectionDict['variedImpacts'] = False - - impactMin = self._samplingDict['FloodingRainThreat']['impactMin'] - impactMax = self._samplingDict['FloodingRainThreat']['impactMax'] - impactRange = self._samplingDict['FloodingRainThreat']['impactRange'] - impactRangeMax = self._samplingDict['FloodingRainThreat']['impactRangeMax'] - inputThreatDominant = self._samplingDict['FloodingRainThreat']['inputThreatDominant'] - - self.debug_print("In _floodingRainSection", 1) - self.debug_print("_samplingDict = \n\n%s\n" % (self._pp.pformat(self._samplingDict['FloodingRainThreat'])), 1) - - # Test the simplest case first - if impactMin == "none" and impactMax == "none": - sectionDict['impactRange'] = impactRange - productDict['floodingRainSection'] = sectionDict - return - - qualifier = "" - if impactMax in ["extensive", "devastating", "catastrophic"]: - qualifier = "life-threatening " - elif impactMax == "significant": - qualifier = "dangerous " - elif impactMax == "limited": - qualifier = "locally hazardous " - - # If there is only one impact across the entire CWA, and it is the max - if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from the flooding rain are still unfolding across " + self._cwa_descriptor() + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - sectionDict['impactRange'] = "Additional impacts from flooding rain are still a concern across " + self._cwa_descriptor() + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactMax + " potential." - # Handle the case where the impacts are not the same across the entire CWA - else: - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from the flooding rain are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - if impactMax != "none": - sectionDict['impactRange'] = "Additional impacts from flooding rain are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactMax + " potential." - else: - sectionDict['impactRange'] = "Little to no additional impacts expected from flooding rain." - - if self._GeneralOnsetTime != "recovery": - sectionDict['impactLib'] = self._getPotentialImpactsStatements("Flooding Rain", self._impactCategoryToThreatLevel(impactMax)) - else: - sectionDict['impactLib'] = [] - - # If there are additional areas - if impactRange != impactMax: - - qualifier = "" - if impactRangeMax in ["extensive", "devastating", "catastrophic"]: - qualifier = "life-threatening " - elif impactRangeMax == "significant": - qualifier = "dangerous " - elif impactRangeMax == "limited": - qualifier = "locally hazardous " - - if self._GeneralOnsetTime == "check plans": - curPhrase = "Prepare for " + qualifier + "rainfall flooding having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "complete preparations": - curPhrase = "Protect against " + qualifier + "rainfall flooding having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "hunker down": - curPhrase = "Potential impacts from the flooding rain are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactRange + " impacts." - else: - if impactMax != "none": - curPhrase = "Additional impacts from flooding rain are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactRange + " potential." - else: - curPhrase = "Little to no additional impacts expected from flooding rain." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - # If there is no impact across more than one half the area, include a statement for that as well - if inputThreatDominant == "None": - - curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ - ", little to no impact is anticipated." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - productDict['floodingRainSection'] = sectionDict - - def _tornadoSection(self, productDict, productSegmentGroup, productSegment): - sectionDict = dict() - sectionDict['title'] = "Tornadoes" - sectionDict['impactRange'] = "" - sectionDict['impactLib'] = [] - sectionDict['additionalImpactRange'] = [] - sectionDict['variedImpacts'] = False - - impactMin = self._samplingDict['TornadoThreat']['impactMin'] - impactMax = self._samplingDict['TornadoThreat']['impactMax'] - impactRange = self._samplingDict['TornadoThreat']['impactRange'] - impactRangeMax = self._samplingDict['TornadoThreat']['impactRangeMax'] - inputThreatDominant = self._samplingDict['TornadoThreat']['inputThreatDominant'] - - # Test the simplest case first - if impactMin == "none" and impactMax == "none": - sectionDict['impactRange'] = impactRange - productDict['tornadoSection'] = sectionDict - return - - # For tornadoes only, Cap at devastating - if impactMax in ["devastating", "catastrophic"]: - impactMax = "devastating" - if impactMin in ["devastating", "catastrophic"]: - impactMin = "devastating" - if impactRange in ["devastating", "catastrophic"]: - impactRange = "devastating" - impactRangeMax = impactRange - - # If the max impact category is "catastrophic", and we lumped "devastating" in with it, ensure "devastating" is not - # leftover as the high end of the range - impactParts = impactRange.split(" ") # split up the impact range - - # If "devastating" is the high end of the range - if len(impactParts) == 3 and impactParts[2] == "devastating": - # If the first part is not "extensive" - if impactParts[0] != "extensive": - # Force the upper end to be 1 category lower - impactRange.replace("devastating", "extensive") - impactRangeMax = "extensive" - # Otherwise, the impact is just "extensive" - else: - impactRange = "extensive" - impactRangeMax = "extensive" - - qualifier = "" - if impactMax in ["extensive", "devastating"]: - qualifier = "particularly dangerous " - elif impactMax == "significant": - qualifier = "dangerous " - - # If there is only one impact across the entire CWA, and it is the max - if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from tornadoes are still unfolding across " + self._cwa_descriptor() + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - sectionDict['impactRange'] = "Additional impacts from tornadoes are still a concern across " + self._cwa_descriptor() + ". Remain well braced against " + qualifier + "tornado event having further " + impactMax + " impact potential." - # Handle the case where the impacts are not the same across the entire CWA - else: - if self._GeneralOnsetTime == "check plans": - sectionDict['impactRange'] = "Prepare for a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" - elif self._GeneralOnsetTime == "complete preparations": - sectionDict['impactRange'] = "Protect against a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" - elif self._GeneralOnsetTime == "hunker down": - sectionDict['impactRange'] = "Potential impacts from tornadoes are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" - else: - if impactMax != "none": - sectionDict['impactRange'] = "Additional impacts from tornadoes are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against " + qualifier + "tornado event having further " + impactMax + " impact potential." - else: - sectionDict['impactRange'] = "Little to no additional impacts expected from tornadoes." - - if self._GeneralOnsetTime != "recovery": - sectionDict['impactLib'] = self._getPotentialImpactsStatements("Tornado", self._impactCategoryToThreatLevel(impactMax)) - else: - sectionDict['impactLib'] = [] - - # If there are additional areas - if impactRange != impactMax: - - qualifier = "" - if impactRangeMax in ["extensive", "devastating"]: - qualifier = "particularly dangerous " - elif impactRangeMax == "significant": - qualifier = "dangerous " - - if self._GeneralOnsetTime == "check plans": - curPhrase = "Prepare for a " + qualifier + "tornado event having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "complete preparations": - curPhrase = "Protect against a " + qualifier + "tornado event having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." - elif self._GeneralOnsetTime == "hunker down": - curPhrase = "Potential impacts from tornadoes are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactRange + " impacts." - else: - if impactMax != "none": - curPhrase = "Additional impacts from tornadoes are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against " + qualifier + "tornado event having further " + impactRange + " impact potential." - else: - curPhrase = "Little to no additional impacts expected from tornadoes." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - # If there is no impact across more than one half the area, include a statement for that as well - if inputThreatDominant == "None": - - curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ - ", little to no impact is anticipated." - - # If this phrase is not already part of the additional impacts - if curPhrase not in sectionDict['additionalImpactRange']: - - # Add it now - sectionDict['additionalImpactRange'].append(curPhrase) - - productDict['tornadoSection'] = sectionDict - - def _getImpactsQualifier(self, impact): - qualifier = "" - if impact in ["extensive", "devastating", "catastrophic"]: - qualifier = "life-threatening " - elif impact == "significant": - qualifier = "dangerous " - elif impact == "limited": - qualifier = "hazardous " - - return qualifier - - def _coastalHazardsSection(self, productDict, productSegmentGroup, productSegment): - productDict['coastalHazardsSection'] = self._frame("Enter here a statement of any additional hazards of concern along the coast such as rip currents, high waves, concerns for beach erosion etc etc if not already done in the surge section.") - - def _preparednessSection(self, productDict, productSegmentGroup, productSegment): - sectionDict = dict() - sectionDict['title'] = "PRECAUTIONARY/PREPAREDNESS ACTIONS" - - sectionDict['genericAction'] = None - if not self._ImpactsAnticipated: - sectionDict['genericAction'] = "It is always a good idea to check your preparedness plans so when and if the time comes during hurricane season, you are ready to execute them. A good resource is ready.gov." - - productDict['preparednessSection'] = sectionDict - - def _evacuationStatements(self, productDict, productSegmentGroup, productSegment): - evacuationDict = dict() - evacuationDict['title'] = "Evacuations" - - import TCVDictionary - evacuationDict['statements'] = TCVDictionary.EvacuationStatements - - productDict['evacuationStatements'] = evacuationDict - - def _otherPreparednessActions(self, productDict, productSegmentGroup, productSegment): - actionsDict = dict() - actionsDict['title'] = "Other Preparedness Information" - - import TCVDictionary - actionsDict['actions'] = TCVDictionary.OtherPreparednessActions[self._GeneralOnsetTime] - - productDict['otherPreparednessActions'] = actionsDict - - def _additionalSourcesInfo(self, productDict, productSegmentGroup, productSegment): - infoDict = dict() - infoDict['title'] = "Additional Sources of Information" - - import TCVDictionary - infoDict['sources'] = TCVDictionary.AdditionalSources - - productDict['additionalSourcesInfo'] = infoDict - - def _nextUpdate(self, productDict, productSegmentGroup, productSegment): - - if not self._ImpactsAnticipated: - productDict['nextUpdate'] = "At this time...additional local statements are not anticipated unless conditions warrant." - elif self._NextUpdate == "LastIssuance": # or not self._ImpactsAnticipated: - productDict['nextUpdate'] = "As it pertains to this event...this will be the last local statement issued by the National Weather Service in " + \ - self._wfoCityState + \ - " regarding the effects of tropical cyclone hazards upon the area." - elif self._NextUpdate == "Conditions": - productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \ - self._wfoCityState + \ - " as conditions warrant." - elif self._NextUpdate == "Enter": - productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \ - self._wfoCityState + \ - " around " + self._NextUpdate_entry.strip() + ", or sooner if conditions warrant." - - ################# Product Parts Helper Methods - - def _impactsKeyFunction(self, optionIndexTuple): - ((_, _), indexStr) = optionIndexTuple - indexStr = indexStr.strip() - if len(indexStr) == 0: - return 9999 - else: - return int(indexStr) - - def _getPotentialImpactsStatements(self, elementName, maxThreat): - import TCVDictionary - potentialImpactStatements = TCVDictionary.PotentialImpactStatements - statements = potentialImpactStatements[elementName][maxThreat] - - return statements - - def _impactCategoryToThreatLevel(self, impactCategory): - if impactCategory == "catastrophic" or impactCategory == "devastating": - return "Extreme" - elif impactCategory == "extensive": - return "High" - elif impactCategory == "significant": - return "Mod" - elif impactCategory == "limited": - return "Elevated" - else: - return "None" - - def _determineHazardStates(self): - self._currentHazardsList = [] - self._changesHazardsList = [] - - self.debug_print("*"*80) - keys = self._previousAdvisory.keys() - keys.sort() - for key in keys: - self.debug_print("%s : %s" % (key, self._previousAdvisory[key]), 1) - for hazard in self._previousAdvisory["HazardsForHLS"]: - self.debug_print("DEBUG Hazard: %s" % (self._pp.pformat(hazard)), 1) - if hazard['act'] != 'CON': - self._changesHazardsList.append(hazard) - if hazard['act'] not in ['CAN', "UPG"]: - self._currentHazardsList.append(hazard) - - self.debug_print("-"*80, 1) - self.debug_print("self._changesHazardsList = %s" % (self._changesHazardsList), 1) - self.debug_print("self._currentHazardsList = %s" % (self._currentHazardsList), 1) - - ############################################################### - ### Sampling and Statistics related methods - - def _sampleHLSData(self, argDict): - editAreas = [(self._cwa(), self._cwa())] - - cwaSampler = self.getSampler(argDict, - (self._analysisList_HLS(), self._timeRangeList3Hour, editAreas)) - - statList = self.getStatList(cwaSampler, - self._analysisList_HLS(), - self._timeRangeList3Hour, - self._cwa()) - - for period in range(len(statList)): - - self.debug_print("=" * 100, 1) - self.debug_print("In _sampleHLSData for period %s (%s)" % \ - (period, self._timeRangeList3Hour[period][0]), 1) - - statDict = statList[period] - for threatName in ['WindThreat', 'FloodingRainThreat', 'TornadoThreat']: - self._sampleRankedDiscreteValue(threatName, statDict) - # TODO: Investigate if this sampling method is still really needed. The JSON files may - # have all the needed information now - self._sampleMostSignificantDiscreteValue(threatName, statDict) - - qpfToFfgRatio = self._getStatValue(statDict, "QPFtoFFGRatio", "Max") - decidingField = self._samplingDict['FloodingRainThreat']['decidingField'] - if decidingField is None or qpfToFfgRatio > decidingField: - self._samplingDict['FloodingRainThreat']['decidingField'] = qpfToFfgRatio - - self.debug_print("WindThreat = %s" % (self._samplingDict['WindThreat']['inputThreatDominant']), 1) - self.debug_print("FloodingRainThreat = %s" % (self._samplingDict['FloodingRainThreat']['inputThreatDominant']), 1) - self.debug_print("TornadoThreat = %s" % (self._samplingDict['TornadoThreat']['inputThreatDominant']), 1) - - - - self._createWholeDomainEditArea(argDict) - editAreas = [("WholeDomain", "WholeDomain")] - wholeDomainSampler = self.getSampler(argDict, - (self._analysisList_HLS_WholeDomain(), self._timeRangeList3Hour, editAreas)) - - statList = self.getStatList(wholeDomainSampler, - self._analysisList_HLS_WholeDomain(), - self._timeRangeList3Hour, - "WholeDomain") - - for period in range(len(statList)): - statDict = statList[period] - maxWind = self._getStatValue(statDict, "Wind", "Max", self.VECTOR()) - decidingField = self._samplingDict['WindThreat']['decidingField'] - if decidingField is None or maxWind > decidingField: - self._samplingDict['WindThreat']['decidingField'] = maxWind - - - - editAreas = [(self._cwa(), self._cwa())] - intersectAreas = self._computeIntersectAreas(editAreas, argDict) - if len(intersectAreas) != 0: - self.debug_print("Sampling StormSurgeThreat, now") - intersectSampler = self.getSampler(argDict, - (self._intersectAnalysisList_HLS(), self._timeRangeList3Hour, intersectAreas)) - - statList = self.getStatList(intersectSampler, - self._intersectAnalysisList_HLS(), - self._timeRangeList3Hour, - "intersect_" + self._cwa()) - - for period in range(len(statList)): - statDict = statList[period] - self.debug_print("current stormSurge statDict = %s" % (self._pp.pformat(statDict)), 1) - self._sampleRankedDiscreteValue('StormSurgeThreat', statDict) - - inundationMax = self._getStatValue(statDict, "InundationMax", "Max") - decidingField = self._samplingDict['StormSurgeThreat']['decidingField'] - if decidingField is None or inundationMax > decidingField: - self._samplingDict['StormSurgeThreat']['decidingField'] = inundationMax - - self.debug_print("StormSurgeThreat = %s" % (self._samplingDict['StormSurgeThreat']['inputThreatDominant']), 1) - - def _sampleTCVAdvisory(self, advisory): - self.debug_print("sampling TCV advisory!", 1) - seenValidThreatLevel = {} - for zone in advisory["ZoneData"]: - self.debug_print("-" * 60, 1) - self.debug_print("Looking at zone %s" % (zone), 1) - for key in advisory["ZoneData"][zone]: - if "Threat" not in key or "highestHunkerDown" in key: - continue - - if key not in seenValidThreatLevel: - seenValidThreatLevel[key] = False - - self.debug_print("Looking at key '%s'" % (key), 1) - - threatLevel = advisory["ZoneData"][zone][key] - self.debug_print(" Threat level = %s" % (threatLevel), 1) - - if (self._samplingDict[key]['inputThreatLow'] is None) and (not seenValidThreatLevel[key]): - self._samplingDict[key]['inputThreatLow'] = threatLevel - if (self._samplingDict[key]['inputThreatHigh'] is None) and (not seenValidThreatLevel[key]): - self._samplingDict[key]['inputThreatHigh'] = threatLevel - - if threatLevel != None: - seenValidThreatLevel[key] = True - - lowThreat = self._samplingDict[key]['inputThreatLow'] - highThreat = self._samplingDict[key]['inputThreatHigh'] - threatOrder = self.mostSignificantDiscrete_keyOrder_dict(None, None, None)[key] - - self.debug_print("***** threatOrder = %s" % (repr(threatOrder)), 1) - - if threatOrder.index(threatLevel) < threatOrder.index(lowThreat): - lowThreat = threatLevel - if threatOrder.index(threatLevel) > threatOrder.index(highThreat): - highThreat = threatLevel - - if lowThreat is None: - self.debug_print(" low threat = Python None", 1) - else: - self.debug_print(" low threat = %s" % (lowThreat), 1) - self.debug_print(" high threat = %s" % (highThreat), 1) - - self._samplingDict[key]['inputThreatLow'] = lowThreat - self._samplingDict[key]['inputThreatHigh'] = highThreat - - self.debug_print("Sampling dict =\n\n%s\n" % (self._pp.pformat(self._samplingDict)), 1) - - def _sampleRankedDiscreteValue(self, threatName, statDict): - self.debug_print("-" * 60, 1) - self.debug_print("_sampleRankedDiscreteValue statDict =\n\n%s\n" % (self._pp.pformat(statDict)), 1) - rankedThreatLevels = self.getStats(statDict, threatName + "__rankedDiscreteValue") - self.debug_print("sampling %s" % (threatName), 1) - self.debug_print("sampleData: rankedThreatLevels =\n\n%s\n" % (self._pp.pformat(rankedThreatLevels)), 1) - if rankedThreatLevels is not None: - dominantThreatLevel = self._getDominantThreatLevel(threatName, rankedThreatLevels) - self.debug_print("dominantThreatLevel = %s" % (dominantThreatLevel), 1) - - currentDominantThreatLevel = self._samplingDict[threatName]['inputThreatDominant'] - self.debug_print("currentDominantThreatLevel = %s" % (currentDominantThreatLevel), 1) - self._samplingDict[threatName]['inputThreatDominant'] = self._getHighestThreat(threatName, - dominantThreatLevel, - currentDominantThreatLevel) - self.debug_print("new dominant = %s" % (self._samplingDict[threatName]['inputThreatDominant']), 1) - - def _sampleMostSignificantDiscreteValue(self, threatName, statDict): - self.debug_print("_sampleMostSignificantDiscreteValue for %s" % (threatName), 1) - threatLevel = self.getStats(statDict, threatName + "__mostSignificantDiscreteValue") - self.debug_print("threatLevel = %s" % (threatLevel), 1) - if threatLevel is not None: - inputThreatLow = self._samplingDict[threatName]['inputThreatLow'] - self.debug_print("current inputThreatLow = %s" % (inputThreatLow), 1) - if inputThreatLow is None: - self._samplingDict[threatName]['inputThreatLow'] = threatLevel - else: - self._samplingDict[threatName]['inputThreatLow'] = self._getLowestThreat(threatName, - threatLevel, - inputThreatLow) - self.debug_print("new inputThreatLow = %s" % (self._samplingDict[threatName]['inputThreatLow']), 1) - - inputThreatHigh = self._samplingDict[threatName]['inputThreatHigh'] - self.debug_print("current inputThreatHigh = %s" % (inputThreatHigh), 1) - self._samplingDict[threatName]['inputThreatHigh'] = self._getHighestThreat(threatName, - threatLevel, - inputThreatHigh) - self.debug_print("new inputThreatHigh = %s" % (self._samplingDict[threatName]['inputThreatHigh']), 1) - - def _getDominantThreatLevel(self, threatName, rankedThreatLevels): - dominantLevelWithHighestRank = None - highestRank = None - - for (level, rank) in rankedThreatLevels: - if highestRank is None or rank > highestRank: - highestRank = rank - dominantLevelWithHighestRank = level - elif rank == highestRank: - dominantLevelWithHighestRank = self._getHighestThreat(threatName, - dominantLevelWithHighestRank, - level) - - return dominantLevelWithHighestRank - - def _getHighestThreat(self, threatName, threatLevel1, threatLevel2): - keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(None, None, None) - keyOrder = keyOrderDict[threatName] - - level1Index = keyOrder.index(threatLevel1) - level2Index = keyOrder.index(threatLevel2) - - if level1Index < level2Index: - return threatLevel2 - elif level1Index == level2Index: - return threatLevel1 - else: - return threatLevel1 - - def _getLowestThreat(self, threatName, threatLevel1, threatLevel2): - keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(None, None, None) - keyOrder = keyOrderDict[threatName] - - level1Index = keyOrder.index(threatLevel1) - level2Index = keyOrder.index(threatLevel2) - - if level1Index < level2Index: - return threatLevel1 - elif level1Index == level2Index: - return threatLevel1 - else: - return threatLevel2 - - def _setHazardImpactCategories(self, threatName): - inputThreatLow = self._samplingDict[threatName]['inputThreatLow'] - inputThreatHigh = self._samplingDict[threatName]['inputThreatHigh'] - inputThreatDominant = self._samplingDict[threatName]['inputThreatDominant'] - decidingField = self._samplingDict[threatName]['decidingField'] - catastrophicThreshold = self._samplingDict[threatName]['catastrophicThreshold'] - - self.debug_print("-" * 60, 1) - self.debug_print("DEBUG: _setHazardImpactCategories for %s" % (threatName), 1) - - impactMin = None - impactMax = None - impactRange = None - impactRangeMax = None - - # Determine lowest impact category - if inputThreatLow == "Extreme": - if threatName != "TornadoThreat" and decidingField >= catastrophicThreshold: - impactMin = "catastrophic" - else: - impactMin = "devastating" - elif inputThreatLow == "High": - impactMin = "extensive" - elif inputThreatLow == "Mod": - impactMin = "significant" - elif inputThreatLow == "Elevated": - impactMin = "limited" - else: - impactMin = "none" - - # Determine highest impact category - if inputThreatHigh == "Extreme": - if threatName != "TornadoThreat" and decidingField >= catastrophicThreshold: - impactMax = "catastrophic" - impactRangeMax = "devastating" - else: - impactMax = "devastating" - impactRangeMax = "extensive" - elif inputThreatHigh == "High": - impactMax = "extensive" - impactRangeMax = "significant" - elif inputThreatHigh == "Mod": - impactMax = "significant" - impactRangeMax = "limited" - elif inputThreatHigh == "Elevated": - impactMax = "limited" - impactRangeMax = "none" - else: - impactMax = "none" - impactRangeMax = "none" - - self.debug_print( - "DEBUG: impactMin = '%s' impactMax = '%s' impactRangeMax = '%s'" % \ - (impactMin, impactMax, impactRangeMax), 1) - - # Determine dominant impact category for rest of CWA - No impact - if impactMin == "none" and impactMax == "none": - impactRange = "Little to no " + self._frame("additional") + " impacts are anticipated at this time across " + self._cwa_descriptor() + "." - # Otherwise, at least some impact will be experienced across the CWA - else: - # Do not permit the lowest category to be "None", if the highest category is also not "None" - # This is to avoid poor impact range wording in situations of tight gradients across a CWA - # (e.g. "None to High") - if impactMin == "none" and impactMax != "none": - impactMin = "limited" - - if impactMin == impactMax: - impactRange = impactMax - impactRangeMax = impactMax - elif impactMin == impactRangeMax: - impactRange = impactRangeMax - else: - impactRange = impactMin + " to " + impactRangeMax - - self._samplingDict[threatName]['impactMin'] = impactMin - self._samplingDict[threatName]['impactMax'] = impactMax - self._samplingDict[threatName]['impactRange'] = impactRange - self._samplingDict[threatName]['impactRangeMax'] = impactRangeMax - - ############################################################### - ### Area, Zone and Segment related methods - - def _createWholeDomainEditArea(self, argDict): - editAreaUtils = EditAreaUtils.EditAreaUtils() - editAreaUtils.setUp(None, argDict) - - gridLoc = editAreaUtils.getGridLoc() - grid2Dbit = JavaGrid2DBit( gridLoc.gridSize().x, gridLoc.gridSize().y ) - grid2Dbit.setAllValues(1) - - refID = ReferenceID("WholeDomain") - refData = ReferenceData(gridLoc, refID, grid2Dbit) - editAreaUtils.saveEditAreas([refData]) - - ############################################################### - ### Hazards related methods - - def _determineHazards(self, segments): - # Return a list of hazards from the given segments in the form: - # (key, landList, marineList, coastalList, inlandList) - # where key is (hdln, act, phen, sig) and the lists show which areas - # contain the hazard separated by category - hazAreaList = [] - for segment in segments: - hazardTable = self._argDict["hazards"] - hazards = hazardTable.getHazardList(segment) - for hazard in hazards: - action = hazard['act'] - hazAreaList.append((hazard, segment)) - # Consolidate hazards (there could be multiple segments with the same phen/sig/act) - hazardDict = {} - hazardList = [] - for hazard, segment in hazAreaList: - key = (hazard['hdln'], hazard['act'], hazard['phen'], hazard['sig']) - if key not in hazardDict.keys(): - hazardDict[key] = segment - hazardList.append(key) - else: - hazardDict[key] = hazardDict[key]+segment - - self.debug_print("hazardList =\n\n%s\n" % (self._pp.pformat(hazardList)), 1) - - return hazardList - - ############################################################### - ### Time related methods - - def _formatLocalTime(self, para, areas): - # Create a time string in local time - # e.g. 2 AM EDT - # Get the Z time hour - timeSearch = re.compile("...([0-9]+) *(Z|UTC)...") - timeStr = timeSearch.search(para) - -## gmtStr = para[timeStr.start():timeStr.end()] -## gmt = gmtStr.strip("...").replace("Z","") -## gmtHour = int(gmt)/100 - - # This code could bomb in the unlikely event we don't find a UTC - # time. We should probably add some kind of default hour here, - # keyed off the current hour, to prevent this. (MHB) - try: - # Convert the hour portion of the time string to an integer - gmtHour = int(timeStr.group(1)[:2]) - except: - gmtHour = time.gmtime().tm_hour - - gmtTR = self.createTimeRange(gmtHour, gmtHour+1, "Zulu") - gmtTime = gmtTR.startTime().unixTime() - - # Now make a string for each time zone - zoneList = self._getTimeZoneList(areas) - timeStrs = [] - timeDesc = "" - for timeZone in zoneList: - timeStr = self.formatTimeString(gmtTime, "%I %p %Z ", timeZone) - timeStr = string.replace(timeStr, " ", " ") - timeStr = string.strip(timeStr) - timeStr = timeStr.lstrip("0") - if timeStr not in timeStrs: - if len(timeStrs) > 0: - timeDesc += "...OR " - timeStrs.append(timeStr) - timeDesc += timeStr - return timeDesc - - def _getTimeZoneList(self, areaList): - # NOTE -- this code was taken from the middle of getAreaHeader - # in Header.py -- it really should be put back in and used - # in Header.py, but to avoid confusion, I'm repeating it here - # get this time zone - thisTimeZone = os.environ["TZ"] - zoneList = [] - # check to see if we have any areas outside our time zone - for areaName in areaList: - if areaName in self._areaDict.keys(): - entry = self._areaDict[areaName] - if "ugcTimeZone" not in entry: #add your site tz - if thisTimeZone not in zoneList: - zoneList.append(thisTimeZone) - continue # skip this entry - timeZoneList = entry["ugcTimeZone"] - if type(timeZoneList) is types.StringType: # a single value - timeZoneList = [timeZoneList] # make it into a list - for timeZone in timeZoneList: - if timeZone not in zoneList: - zoneList.append(timeZone) - # if the resulting zoneList is empty, put in our time zone - if len(zoneList) == 0: - zoneList.append(thisTimeZone) - # if the resulting zoneList has our time zone in it, be sure it - # is the first one in the list - try: - index = zoneList.index(thisTimeZone) - if index != 0: - del zoneList[index] - zoneList.insert(0, thisTimeZone) - except: - pass - return zoneList - - ############################################################### - ### Storm Information and TCP related methods - - def _grabHeadline(self, text=''): - # Get first headline found in text and return it as a string - - self.debug_print("_grabHeadline text = '%s'" % (text)) - - # Fixed pattern to grab headline (MHB 04/08/2009) - # See if there is a headline in this text - headlineSearch = re.findall("(?ism)^(\.{3}.+?\.{3}) *\n", text) - - self.debug_print("old headlineSearch = %s" % (headlineSearch), 1) - - # If we could not find original headlines, try to use 'new' HLS style - if headlineSearch is None or headlineSearch == []: - headlineSearch = re.findall("(?ism)^\*\*.+?\*\* *\n", text) - - self.debug_print("now headlineSearch = %s" % (headlineSearch), 1) - - # If we found a headline - if len(headlineSearch) > 0: - - # Remove the first and last ellipses - if they exist - headlineSearch[0] = re.sub("^\.\.\.", "", headlineSearch[0]) - headlineSearch[0] = re.sub("\.\.\.$", "", headlineSearch[0]) - -# # Remove the first and last '**' - if they exist - headlineSearch[0] = headlineSearch[0].replace("**", "").strip() - - # Return the first cleaned-up headline string we found - return self._cleanText(headlineSearch[0]) - - # Otherwise, return an indicator there is no headline in this text - else: - return "" # Changed to an null string instead of None - # (MHB 04/08/2009) - - def _getStormInfo(self, argDict): - # Get the Storm information - self._stormType = "Tropical" - self._stormName = "Cyclone" - self._stormTypeName = self._stormType + " " +self._stormName - - - stormDict = self._grabStormInfo(self._TCP) - self._stormName = stormDict.get("StormName", "") - self._stormType = stormDict.get("StormType", "") - self._stormTypeName = self._stormType + " " + self._stormName - self._decodeStormInfo(stormDict) - # Storm movement in mph and the stated movement trend - self._stormMovementTrend = self._expandBearings("Movement " + stormDict.get("StormMotion","")) - # Storm intensity in mph and the stated intensity trend. - self._stormIntensityTrend = "Storm Intensity " + stormDict.get("StormIntensity","") - - self.debug_print("Begin storm information", 1) - self.debug_print("storm dict = %s" % (stormDict), 1) - self.debug_print("storm name = %s" % (self._stormName), 1) - self.debug_print("type = %s" % (self._stormType), 1) - self.debug_print("type name = %s" % (self._stormTypeName), 1) - self.debug_print("time = %s" % (self._stormTime), 1) - self.debug_print("lat = %s" % (self._stormLat), 1) - self.debug_print("lon = %s" % (self._stormLon), 1) - self.debug_print("location = %s" % (str(self._stormLocation)), 1) - self.debug_print("reference = %s" % (self._stormReference), 1) - self.debug_print("references = %s" % (self._stormLocalReferences), 1) - self.debug_print("movement trend = %s" % (self._stormMovementTrend), 1) - self.debug_print("intensity trend = %s" % (self._stormIntensityTrend), 1) - self.debug_print("End storm information", 1) - - def _grabStormInfo(self, tcp): - # Get the storm information from the selected TCP - # return a dictionary - # Initialize a dictionary to hold the information we want - dict = {"StormType" : "|* fill in storm type here *|", - "StormName" : "|* fill in storm name here *|", - "StormTime" : "|* Enter storm time *| ", - "StormLat": "", - "StormLon": "", - "StormReference": "", - "StormIntensity": "", - "StormMotion": "", - "StormInfo": "", - "StormCenter": "", - } - #======================================================================= - # If we got the latest public advisory - - if tcp is not None and len(tcp) > 0: - - #=================================================================== - # Try to determine the storm type and name automatically - - # Updated version to handle WFO GUM advisories. This pattern will - # handle multiple word names (including certain special characters) - # This is for the NHC format. - mndSearch = re.search("(?im)^.*?(HURRICANE|(POTENTIAL|SUB|POST.?)" + - "?TROPICAL (STORM|DEPRESSION|CYCLONE)|" + - "(SUPER )?TYPHOON|REMNANTS OF) " + - "([A-Z0-9\-\(\) ]+?)" + - "(SPECIAL |INTERMEDIATE )?ADVISORY", tcp) - - # Display some debug info - if flag is set - self.debug_print("mndSearch = '%s'" % (mndSearch), 1) - - # If we found the storm type and name in the MND header - if mndSearch is not None: - - # Pick off the storm type and name - dict["StormType"] = mndSearch.group(1).strip() - dict["StormName"] = mndSearch.group(5).strip() - - #################################################################### - #################################################################### - # 12/15/2010 (MHB) - we should not need this anymore, but will - # leave it for the 2011 season as a fail-safe. - - # Look for the HPC format instead - else: - - mndSearch = re.search("(?im)^PUBLIC ADVISORY.+?FOR REMNANTS " + - "OF ([A-Z0-9\-\(\) ]+)", tcp) - - # If we found the storm type and name in the MND header - if mndSearch is not None: - - # Pick off the storm type and name - dict["StormType"] = "Remnants of" - dict["StormName"] = mndSearch.group(1).strip() - - # end possible removal - 12/15/2010 (MHB) - #################################################################### - #################################################################### - - #=================================================================== - # Clean up the product for easier parsing - - tcp = self._cleanText(tcp) - - #=================================================================== - # Now try to grab the latest storm information - - # Look for the new NHC format first - summarySearch = re.search("(?is)SUMMARY OF (.+?)\.{3}.+?" + - "LOCATION\.{3}(.+?[NS]) +(.+?[WE]).+?" + - "(ABOUT .+?)MAXIMUM SUSTAINED WIND.+?" + - "(\d+ MPH).+?", tcp) - - #-------------------------------------------------------------------- - # If we found the NHC summary section - - if summarySearch is not None: - - # Set aside some information we'll need later on - dict["StormTime"] = summarySearch.group(1).strip() - dict["StormLat"] = summarySearch.group(2).strip() - dict["StormLon"] = summarySearch.group(3).strip() - dict["StormReference"] = summarySearch.group(4).strip() - dict["StormIntensity"] = summarySearch.group(5).strip().lower() - - haveStormMotion = True - if tcp.find("PRESENT MOVEMENT...STATIONARY") != -1: - dict["StormMotion"] = "Stationary" - else: - summarySearch = re.search("PRESENT MOVEMENT\.{3}(.+?)\.{3}", tcp) - - if summarySearch is not None: - dict["StormMotion"] = summarySearch.group(1).strip().lower() - else: - haveStormMotion = False - - #================================================================ - # Use the remaining summary groups to contruct a paragraph - # similar to the "old" TCP format, and save that for later use - - # Start the paragraph with the advisory time - dict["StormCenter"] = "AT %s...THE CENTER OF " % \ - (dict["StormTime"]) - - # Now add some phrasing to maintain proper grammar, if needed - if dict["StormType"] == "Remnants of": - dict["StormCenter"] = "%s THE" % (dict["StormCenter"]) - - # Now add the storm type and storm name - dict["StormCenter"] = "%s %s %s " % (dict["StormCenter"], - dict["StormType"], - dict["StormName"]) - - # Now add the storm position - dict["StormCenter"] = \ - "%s WAS LOCATED AT LATITUDE %s...LONGITUDE %s." % \ - (dict["StormCenter"], dict["StormLat"], dict["StormLon"]) - - #---------------------------------------------------------------- - # Now add the primary NHC geographic reference - - # Get all the NHC references - starting with the word 'About' - # after the first one - referenceIndex = dict["StormReference"][4:].find('About') - - # Assume we only have one NHC reference point by default - nhcReference = dict["StormReference"] - - self.debug_print("referenceIndex = %s" % (referenceIndex), 1) - - # If we have more than one NHC reference point - if referenceIndex != -1: - - # Adjust this index to account for the first 'About' - referenceIndex += 4 - - # Only keep the first NHC reference location - nhcReference = dict["StormReference"][:referenceIndex] - - # Convert any abbreviated bearings to full words - nhcReference = self._expandBearings(nhcReference) - - # Add only first one to the summary paragraph for brevity - dict["StormCenter"] = "%s THIS WAS %s. " % \ - (dict["StormCenter"], - self._removeKM(nhcReference.strip())) - - #---------------------------------------------------------------- - # Add the maximum sustained wind speed phrase - - dict["StormCenter"] = "%s MAXIMUM SUSTAINED WINDS WERE %s." % \ - (dict["StormCenter"], - self._removeKM(dict["StormIntensity"])) - - #---------------------------------------------------------------- - # Now add the storm motion - - if haveStormMotion: - dict["StormCenter"] = "%s THE STORM MOTION WAS %s." % \ - (dict["StormCenter"], - self._removeKM(dict["StormMotion"])) - - #################################################################### - #################################################################### - # 12/15/2010 (MHB) - we should not need this anymore, but will - # leave it for the 2011 season as a fail-safe. - #-------------------------------------------------------------------- - # Search the product for the legacy storm info section - in case - # the new NHC style was not found - - stormInfoSearch = \ - re.search('(?is)(AT +(\d+ +[AP]M [AECMPH][DS]T)' + - '\.{3}\d+ *(Z|UTC)\.{3}THE (CENTER|REMNANTS|EYE) .+)', - tcp) - - # Display some debug info - if flag is set - self.debug_print("storminfoSearch = '%s'" % (stormInfoSearch)) - if stormInfoSearch is not None: - self.debug_print("\n\n%s\n" % - (self._pp.pformat(stormInfoSearch.groups())), 1) - - # If we found the storm info section of the product - if stormInfoSearch is not None: - for group in stormInfoSearch.groups(): - self.debug_print('-'*50, 1) - self.debug_print("%s\n" % (group), 1) - - # Clean this section up a bit. Keep each paragraph separate - # by a single , but remove all others as well as extra - # spaces. Then store this text in the TCP dictionary - dict["StormInfo"] = stormInfoSearch.group(1).strip() - - # Set aside the first paragraph of the storm info since it - # contains the TPC-provided reference point - if we haven't - # already found this information - if len(dict["StormCenter"].strip()) == 0: - dict["StormCenter"] = dict["StormInfo"].split('\n')[0] - - # If we have not already found the advisory time - get it from - # the legacy format - if dict["StormTime"] == "|* Enter storm time *| ": - dict["StormTime"] = stormInfoSearch.group(2).strip() - - # Set aside the first paragraph of the storm info since it - # contains the TPC-provided reference point - if we haven't - # already found this information - if len(dict["StormCenter"].strip()) == 0: - dict["StormCenter"] = dict["StormInfo"].split('\n')[0] - - #=================================================================== - # Now try to grab the repeated storm information summary - - repeatInfo = re.search("(?is)(\.{3}SUMMARY.+?\.)\n *\n", - tcp) - # If we cannot find the summary, try to find a "repeating" section - if repeatInfo is None: - repeatInfo = re.search("(?is)(REPEATING.+?\.)\n *\n", tcp) - self.debug_print(self._pp.pformat(repeatInfo), 1) - - # If we found the repeated storm information summary - if repeatInfo is not None: - - # Clean up this paragraph - summary = repeatInfo.group(1).strip() - - #=============================================================== - # Now try to grab the latest storm location - if we need it - - if dict["StormLat"] == "" or dict["StormLon"] == "": - - # Search the product for the storm location section - locationSearch = \ - re.search('(?is).+LOCATION.*?(\d+\.\d+ *N).+?' + - '(\d+\.\d+ *[EW])', summary) - - # Display some debug info - if flag is set - self.debug_print("locationSearch = '%s'" % (locationSearch), 1) - if locationSearch is not None: - self.debug_print("\n\n%s\n" % (self._pp.pformat(locationSearch.groups())), 1) - - # If we found the storm location section of the product - if locationSearch is not None: - - # Pick off the storm latitude and longitude - dict["StormLat"] = locationSearch.group(1).strip() - dict["StormLon"] = locationSearch.group(2).strip() - - #=============================================================== - # Now try to grab the latest storm intensity - if we need it - - if dict["StormIntensity"] == "": - - # Search the product for the storm intensity section - intensitySearch = \ - re.search('(?i).+MAXIMUM SUST.+?(\d+ *MPH)', summary) - - # Display some debug info - if flag is set - self.debug_print("intensitySearch = '%s'" % - (intensitySearch), 1) - - # If we found the storm intensity section of the product - if intensitySearch is not None: - - # Pick off the storm intensity - dict["StormIntensity"] = intensitySearch.group(1).strip() - - #=============================================================== - # Now try to grab the latest storm motion - if we need it - - if dict["StormMotion"] == "": - - # Search the product for the storm motion section - motionSearch = re.search('(?i).+MOVEMENT\.{3}(.+?\d+ MPH)', - summary) - if motionSearch is None: - motionSearch = re.search('(?i).+MOVEMENT(.+?\d+.+?)\.', - summary) - - # Display some debug info - if flag is set - self.debug_print("motionSearch = '%s'" % (motionSearch), 1) - - # If we found the storm motion section of the product - if motionSearch is not None: - - # Pick off the storm motion - motion = motionSearch.group(1).strip() - - # Fix the motion (i.e no '...') - dict["StormMotion"] = re.sub('(?i)\.{3}', ' the ', motion) - - # end possible removal - 12/15/2010 (MHB) - #################################################################### - #################################################################### - - #======================================================================== - # Display final decoded information from TCP - - self.debug_print("*" *80, 1) - self.debug_print("Final TCP Info...\n", 1) - self.debug_print('dict["StormType"] = %s' % (dict["StormType"]), 1) - self.debug_print('dict["StormName"] = %s' % (dict["StormName"]), 1) - self.debug_print('dict["StormTime"] = %s' % (dict["StormTime"]), 1) - self.debug_print('dict["StormLat"] = %s' % (dict["StormLat"]), 1) - self.debug_print('dict["StormLon"] = %s' % (dict["StormLon"]), 1) - self.debug_print('dict["StormReference"] = %s' % (dict["StormReference"]), 1) - self.debug_print('dict["StormIntensity"] = %s' % (dict["StormIntensity"]), 1) - self.debug_print('dict["StormMotion"] = %s' % (dict["StormMotion"]), 1) - self.debug_print('dict["StormInfo"] = %s' % (dict["StormInfo"]), 1) - self.debug_print('dict["StormCenter"] = %s' % (dict["StormCenter"]), 1) - - # Return the dictionary will all the information we found in the TCP - return dict - - def _decodeStormInfo(self, stormDict): - self._stormTime = "|* Enter Storm Time *| " - self._stormLat = "|* Enter Storm Lat *| " - self._stormLon = "|* Enter Storm Lon *| " - self._stormLocation = "|* Enter Storm Location *| " - self._stormReference = "" - self._stormLocalReferences = "" - para = stormDict.get("StormCenter", "") - self.debug_print("para %d %s" % (len(para), para), 1) - if len(para)<= 0: - return - - # Create the time string - self._stormTime = self._formatLocalTime(para, self._allAreas()) - - # Find stormLat, stormLon and stormLocation - # e.g. LATITUDE 15.7 NORTH...LONGITUDE 80.0 WEST - stormLocation ="" - stormLat = None - stormLon = None - - # Make a pattern to find the latest storm location - coordPtn = re.compile("(?i)(LATITUDE ([\d\.]+) ?((N|S)(O[RU]TH)?))..." + - "(AND )?(LONGITUDE ([\d\.]+) ?((W|E)([AE]ST)?)).+?") -## + "OR ((ABOUT )?.+)") - - # Make a pattern to find the NHC reference location - refPtn = re.compile("(?i)(WAS|OR) ((ABOUT )?\d+ MILES.+?" + - "(NORTH|SOUTH|EAST|WEST).+?)\.") - - # Try to find these patterns in the text - coordPtnMatch = coordPtn.search(para) - self.debug_print("+" * 90, 1) - self.debug_print("coordinate search...", 1) - if coordPtnMatch is not None: - self.debug_print("\n\n%s|n" % (self._pp.pformat(coordPtnMatch.groups())), 1) - - refPtnMatch = refPtn.search(para) - self.debug_print("reference search...", 1) - if refPtnMatch is not None: - self.debug_print("\n\n%s|n" % (self._pp.pformat(refPtnMatch.groups())), 1) - - # If we found the coordinates we were after - if coordPtnMatch is not None: - - # If we have the correct paragraph, set aside the latitude and - # longitude info as numbers - self._stormLat = float(coordPtnMatch.group(2)) - self._stormLon = float(coordPtnMatch.group(8)) # was 7 - - # Adjust latitude and longitude as need for "other" hemispheres - if coordPtnMatch.group(4) in ["S", "s"]: - self._stormLat *= -1.0 - - if coordPtnMatch.group(10) in ["W", "w"]: - self._stormLon *= -1.0 - - # Construct the storm location pair and remove the "Latitude " and "Longitude " text - self._stormLocation = (coordPtnMatch.group(1)[9:], coordPtnMatch.group(7)[10:]) - - # If we found the primary NHC reference we were after - if refPtnMatch is not None: - - # Set aside all the geographic reference text -## stormReference = coordPtnMatch.group(11) - stormReference = refPtnMatch.group(2) - - # Watch out for some grammar gotchas with this reference - stormReference = re.sub("(?i)^(WAS|OR) ", "", stormReference) - - # See if there are multiple geographic references - if re.search('(?i) and ', stormReference) is not None: - - # Yes there are multiple references, so only keep the - # first one - stormReference = re.sub("(?i) AND .+", "", stormReference) - - # Also remove any metric distances - self._stormReference = self._removeKM(stormReference) - - # Miles/km from chosen local reference - self._stormLocalReferences = self._calcLocalReferences( - self._stormLat, self._stormLon) - - self.debug_print("stormLocalRefs = %s" % (self._stormLocalReferences), 1) - - # Compare the NHC reference to the local references - for localRef in self._stormLocalReferences: - - self.debug_print("self._stormReference = '%s', localRef = '%s'" % (self._stormReference, localRef), 1) - - # Get the locations from these statements - nhcRef = re.search('(?i)(north|south|east|west) of (.+)', - self._stormReference) - testRef = re.search('(?i)(north|south|east|west) of (.+)', - localRef) - - if nhcRef is not None: - self.debug_print("nhcRef = '%s'" % (nhcRef.group(2)), 1) - - if testRef is not None: - self.debug_print("testRef = '%s'" % (testRef.group(2)), 1) - - # If we have a local reference that matches the national - # center reference - if testRef is not None and nhcRef is not None and \ - re.search("(?i)%s" % (testRef.group(2).strip()), - nhcRef.group(2)) is not None: - - # Do not include the national reference - self._stormReference = "" - - def _expandBearings(self, text): - # Convert any abbreviated bearings to full words - text = text.replace(' n ', ' North ') - text = text.replace(' nne ', ' North-northeast ') - text = text.replace(' ne ', ' Northeast ') - text = text.replace(' ene ', ' East-northeast ') - text = text.replace(' e ', ' East ') - text = text.replace(' ese ', ' East-southeast ') - text = text.replace(' se ', ' Southeast ') - text = text.replace(' sse ', ' South-southeast ') - text = text.replace(' s ', ' South ') - text = text.replace(' ssw ', ' South-southwest ') - text = text.replace(' sw ', ' Southwest ') - text = text.replace(' wsw ', ' West-southwest ') - text = text.replace(' w ', ' West ') - text = text.replace(' wnw ', ' West-northwest ') - text = text.replace(' nw ', ' Northwest ') - text = text.replace(' nnw ', ' North-northwest ') - - return text - - # Modified 12/15/2010 (MHB) - modified to recognize the new way NHC will - # present metric speeds. Will continue to recognize the "old" way for - # testing purposes as well. - def _removeKM(self, words): - # Remove references to KM e.g. - # 420 KM... 100 KM/HR... - - self.debug_print("words = '%s'" % (words), 1) - - kmSearch = re.compile("\.\.\. *[0-9]+ +(KM|KM/HR?) *\.?\.?\.?") - - # Replace metric reference with a space to keep words from mashing - # together. - words = kmSearch.sub(" ", words) - - # Make sure we don't have any double space issues with this text - doubleSpaces = re.findall(' +', words) - for doubleSpace in doubleSpaces: - words = re.sub(doubleSpace, ' ', words) - - self.debug_print("\tfinal words = '%s'" % (words), 1) - return words - - def _cleanText(self, text=''): - # Cleans up text for easier string searches, but retains paragraphs - - # Replace all single characters with a space - text = re.sub("\n(?! *\n)", " ", text) - - # Ensure all text is only single-spaced - text = re.sub(" +", " ", text) - - # Remove all spaces at the start of a new paragraph - text = re.sub("(?m)^ +", "", text) - - # Do not allow any spaces after an ellipsis - text = re.sub("\.{3} +", "...", text) - - # Finally, ensure the paragraphs are put back - text = re.sub("\n", "\n\n", text) - - # Return the cleaned-up text - return text - - def _calcLocalReferences(self, lat0, lon0): - localRefs = [] - refList = self._LocalReferencePoints - #refList.append(("Grand Cayman", (19.2, -81.4))) - # Limit reference points - refLimit = self._referencePointLimit() - if len(refList) > refLimit: - refList = refList[0:refLimit] - for label, latLon in refList: - lat, lon = latLon - localRef = self._calcReference(lat0, lon0, lat, lon) - localRef = localRef + " of " + label - localRef = localRef.replace(",","") - localRefs.append(localRef) - return localRefs - - def _calcReference(self, lat0, lon0, lat1, lon1): - #return self._oldCalcReference(lat0, lon0, lat1, lon1) - distKm = self._distanceFromLatLon(lat0, lon0, lat1, lon1) - distMph = distKm * 0.62 - # Round to nearest 10 - distMph = self.round(distMph, "Nearest", 10) - distMph_str = `int((distMph/10)*10)` - #distKm_str = `int((distKm/10)*10)` - direction = self._bearing(lat1, lon1, lat0, lon0) - direction = self._dirInEnglish(direction) - localRef ="About "+distMph_str+" miles "+direction - self.debug_print("localRef = %s" % (localRef), 1) - return localRef - - # Returns the distance from lat0, lon0 to lat1, lon1 in kilometers - def _distanceFromLatLon(self, lat0, lon0, lat1, lon1): - R = 6371.0 - lat0 = math.radians(lat0) - lon0 = math.radians(lon0) - lat1 = math.radians(lat1) - lon1 = math.radians(lon1) - dist = math.acos(math.sin(lat0) * math.sin(lat1) + math.cos(lat0) * math.cos(lat1) * math.cos(lon1 - lon0)) * R - return dist - - def _bearing(self, lat0, lon0, lat1, lon1): - - dlat = math.radians((lat0 - lat1)) - dlon = math.radians((lon0 - lon1)) - - y = math.sin(dlon) * math.cos(math.radians(lat1)) - x = math.cos(math.radians(lat0)) * math.sin(math.radians(lat1)) - \ - (math.sin(math.radians(lat0)) * math.cos(math.radians(lat1)) * math.cos(dlon)) - - direction = math.degrees(math.atan2(x, y)) - 90.0 - if direction < 0.0: - direction = direction + 360.0 - direction = direction % 360 - - return direction - - def _dirInEnglish(self, direction): - dirList = ["north", "north-northeast", "northeast", "east-northeast", - "east", "east-southeast", "southeast", "south-southeast", - "south", "south-southwest", "southwest", "west-southwest", - "west", "west-northwest", "northwest", "north-northwest"] - dirIndex = int((direction + 11.25) / 22.5) - if dirIndex > 15: - dirIndex = dirIndex - 16 - return dirList[dirIndex] - - ############################################################### - ### GUI related methods - - def _overview_list(self): - if self._site == "HFO": - stormInfoOptions = ["TCPCP1", "TCPCP2", "TCPCP3", "TCPCP4", "TCPCP5"] - else: - stormInfoOptions = ["TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5"] - - stormInfoOptions.append("Enter PIL below (e.g. WRKTCP):") - - return [ - { - "name": "ImpactsAnticipated", - "label": "Step 1. Potential Impacts Anticipated?", - "options": [ - ("Yes (NOTE: Any case other than dispel rumors must\n" - "have current TCP for storm in question)", True), - ("No (Dispel Rumors)", False), - ], - "default": "Yes (NOTE: Any case other than dispel rumors must\n" - "have current TCP for storm in question)", - }, - { - "name": "StormInfo", - "label": "Step 2. Obtain Storm Type/Name/Info", - "options": stormInfoOptions, - "entryField": " ", - }, - { - "name":"IncludedImpacts", - "label": "Step 3. Potential Impacts to Include and Order", - "optionType": "check", - "options": [ - ("Wind", 'windSection'), - ("Surge", 'surgeSection'), - ("Flooding Rain", 'floodingRainSection'), - ("Tornadoes", 'tornadoSection'), - ("Other Coastal Hazards", 'coastalHazardsSection') - ], - "default": ["Wind", "Surge", "Flooding Rain", "Tornadoes"], - }, - { - "name":"LocalReferencePoints", - "label": "Step 4. Locate Storm Relative to Local Reference Points\n(choose at most "\ - +self._referencePointLimit()[1]+")", - "optionType": "check", - "options": self._localReferencePoints(), - "default": self._localReferencePoints_defaults(), - }, - { - "name":"GeneralOnsetTime", - "label": "Step 5. General Time to Onset", - "options": [ - ("Watch", 'check plans'), - ("Warning", 'complete preparations'), - ("Conditions/Ongoing", 'hunker down'), - ("Recovery (After last TCV)", 'recovery'), - ], - }, - { - "name": "NextUpdate", - "label": "Step 6. Indicate Next Update Time", - "options": [ - ("As Conditions Warrant", "Conditions"), - ("Last Issuance", "LastIssuance"), - ("Enter Approximate Time (below)", "Enter") - ], - "default": "Enter Approximate Time (below)", - "entryField": " e.g. 6 AM EDT", - }, - { - "name": "MainHeadline", - "label": "Step 7. Input Main Headline (required)", - "options": [ - ("Enter Unique Headline (to right)", "Enter"), - ("Use Previous HLS Headline", "UsePrev"), - ("Use Latest TCP Headline", "UseTCP"), - ], - "entryField": " ", - }, - ] - - def _displayGUI(self, infoDict=None): - dialog = Overview_Dialog(self, "HLS", infoDict) - status = dialog.status() - LogStream.logVerbose("status="+status) - if status == "Cancel": - return None - else: - return dialog.getVarDict() - - def _frame(self, text): - return "|* " + text + " *|" - - -class Overview_Dialog(HLSTCV_Common.Common_Dialog): - def __init__(self, parent, title, infoDict=None): - HLSTCV_Common.Common_Dialog.__init__(self, parent, title, infoDict) - - def body(self, master): - # build the main display dialog - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list() - fontDict = self._parent._font_GUI_dict() - - # OVERVIEW header - headerFG, headerFont = fontDict["headers"] - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - - numBoxes = 3 - - boxes = [] - for i in range(numBoxes): - newBox = Tkinter.Frame(master) - newBox.pack(side=Tkinter.TOP, expand=Tkinter.NO, - fill=Tkinter.Y, anchor=Tkinter.W) - boxes.append(newBox) - - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - index = overviewList.index(infoDict) - if index in [0,1,2]: - boxNum = 0 - buttonSide=Tkinter.TOP - frameSide = Tkinter.LEFT - elif index in [3,4,5]: - boxNum = 1 -# buttonSide=Tkinter.LEFT -# frameSide=Tkinter.TOP - buttonSide=Tkinter.TOP - frameSide=Tkinter.LEFT - else: - boxNum = 2 - buttonSide=Tkinter.TOP - frameSide=Tkinter.LEFT - - box = boxes[boxNum] - - if name == "MainHeadline": entryField = None - - if name == "IncludedImpacts": - tkObject_dict[name] = self._makeStep3( - box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, - entryField=entryField, headerFG=headerFG, - headerFont=headerFont) - else: - tkObject_dict[name], entryObject = self._makeRadioOrCheckList( - box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, - entryField=entryField, headerFG=headerFG, - headerFont=headerFont, boxType=optionType) - if entryObject is not None: - tkObject_dict[self._entryName(name)] = entryObject - - if name == "MainHeadline": - frame = Tkinter.Frame(box, relief=Tkinter.GROOVE, borderwidth=1) - tkObject_dict[self._entryName(name)] = self._makeEntry(frame, "", 80) - frame.pack(fill=Tkinter.X, expand=Tkinter.YES) - - # Buttons - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - self._makeButtons(frame) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - - def _makeStep3(self, master, label, elementList, default=None, - buttonSide=Tkinter.TOP, frameSide=Tkinter.LEFT, entryField=None, - headerFG=None, headerFont=None, - listFrameRelief=Tkinter.GROOVE): - listFrame = Tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) - - if label != "": - listLabel = Tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) - listLabel.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10) - - ivar = Tkinter.IntVar() - ivarEntryPairList = [] - for element in elementList: - index = elementList.index(element) - if type(element) is types.TupleType: - element, key = element - - ivar = Tkinter.IntVar() - if default is not None and element in default: ivar.set(1) - else: ivar.set(0) - - buttonFrame = Tkinter.Frame(listFrame) - - button= Tkinter.Checkbutton(buttonFrame, variable=ivar, text=element) - button.grid(row=0, column=0, sticky=Tkinter.W+Tkinter.E) - button.grid_columnconfigure(0, weight=1) - - svar = Tkinter.StringVar() - entry = Tkinter.Entry(buttonFrame, textvariable=svar, relief=Tkinter.SUNKEN, width=3) - entry.grid(row=0, column=1, sticky=Tkinter.E) - - ivarEntryPairList.append((ivar, svar)) - - buttonFrame.pack(side=buttonSide, fill=Tkinter.X, expand=Tkinter.YES, padx=4) - - noteLabel = Tkinter.Label(listFrame, text="Note: Check Hazards to include (left) and order number (right)") - noteLabel.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10) - - # packing - listFrame.pack(side=frameSide, expand=Tkinter.NO, fill=Tkinter.Y) #, anchor=Tkinter.N) - - return ivarEntryPairList - - def _makeButtons(self, master): - frame = Tkinter.Frame(master) - buttonList = self._parent._GUI1_configDict().get("buttonList", []) - for button, label in buttonList: - if button == "Run": - command = self.okCB - else: # Cancel - command = self.cancelCB - Tkinter.Button(frame, text=label, command=command, width=10, - state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5, padx=10) - frame.pack() - - def okCB(self): - # pull the data from the tkObject_dict before they get toasted - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list() - print("in okCB!") - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - if optionType == "check": - checkList = [] - ivarList = tkObject_dict[name] - for i in range(len(options)): - if name == "IncludedImpacts": - ivar, svar = ivarList[i] - if ivar.get(): - checkList.append((options[i], svar.get())) - else: - if ivarList[i].get(): - print("adding option = %s" % (self._pp.pformat(options[i]))) - checkList.append(options[i]) - value = checkList - self._setVarDict(name, value) - else: - value = tkObject_dict[name].get() - self._setVarDict(name, value, options) - - if entryField is not None: - entryName = self._entryName(name) - self._setVarDict(entryName, tkObject_dict[entryName].get()) - # close window and set status "Ok" - self._status = "Ok" - self.withdraw() - self.ok() - - -class LegacyFormatter(): - def __init__(self, textProduct): - self._textProduct = textProduct - self.TAB = " "*self._textProduct._tabLength - self._tpc = HLSTCV_Common.TextProductCommon() - - - def execute(self, productDict): - self.productDict = productDict - productParts = self._tpc.getVal(productDict, 'productParts', []) - text = self._processProductParts(productDict, productParts.get('partsList')) - return text - - def _processProductParts(self, productDict, productParts, skipParts=[]): - ''' - Adds the product parts to the product - @param productDict -- dictionary of information -- could be the product dictionary or a sub-part such as a segment - @param skipParts -- necessary to avoid repetition when calling this method recursively - @param productParts -- list of instances of the ProductPart class with information about how to format each product part - @return text -- product string - ''' - text = "" - self._textProduct.debug_print("productParts = %s" % (productParts)) - for part in productParts: - valtype = type(part) - if valtype is str: - name = part - elif valtype is tuple: - name = part[0] - infoDicts = part[1] - self._textProduct.debug_print("name = %s" % (name), 1) - self._textProduct.debug_print("infoDicts =\n\n%s\n" % (self._pp.pformat(infoDicts)), 1) - newtext = self.processSubParts(productDict.get(name), infoDicts) - self._textProduct.debug_print("newtext type = %s" % (type(newtext)), 1) - self._textProduct.debug_print("newtext =\n\n%s\b" % (self._pp.pformat(newtext)), 1) - text += newtext - continue - elif valtype is list: - self._textProduct.debug_print('GOT HERE -- found list', 1) - self._tpc.flush() - # TODO THIS SHOULD BE REMOVED AFTER THE REFACTOR OF HazardServicesProductGenerationHandler.JAVA - tup = (part[0], part[1]) - part = tup - name = part[0] - - - if name == 'wmoHeader': - text += self.processWmoHeader(productDict['wmoHeader']) - elif name == 'ugcHeader': - text += productDict['ugcHeader'] + "\n\n" - elif name == 'productHeader': - text += self.processProductHeader(productDict['productHeader']) - elif name == 'vtecRecords': - for vtecString in productDict['vtecRecords']: - text += vtecString + '\n' - elif name == 'areaList': - text += productDict['areaList'] + "\n\n" - elif name == 'issuanceTimeDate': - text += productDict['issuanceTimeDate'] + '\n\n' - elif name == 'summaryHeadlines': - text += self.processSummaryHeadlines(productDict['summaryHeadlines']) - elif name == "newInformationHeader": - header = "NEW INFORMATION" - text += header + "\n" + "-"*len(header) + "\n\n" - elif name == "changesHazards": - text += "* CHANGES TO WATCHES AND WARNINGS:\n" + \ - self.processHazards(productDict['changesHazards'], isChangesHazards=True) - elif name == "currentHazards": - text += "* CURRENT WATCHES AND WARNINGS:\n" + \ - self.processHazards(productDict['currentHazards'], isChangesHazards=False) - elif name == "stormInformation": - text += self.processStormInformation(productDict['stormInformation']) - elif name == "situationOverview": - text += self.processSituationOverview(productDict['situationOverview']) - elif name == "sigPotentialImpacts": - header = "POTENTIAL IMPACTS" - text += header + "\n" + "-"*len(header) + "\n\n" - if not self._textProduct._ImpactsAnticipated: - text += "None\n\n" - elif name in ['windSection', 'surgeSection', 'floodingRainSection', 'tornadoSection']: - text += self.processHazardsSection(productDict[name]) - elif name == "coastalHazardsSection": - text += "* OTHER COASTAL HAZARDS:\n" - text += self._textProduct.indentText(productDict[name], maxWidth=self._textProduct._lineLength) + "\n" - elif name == "preparednessSection": - header = productDict[name]['title'] - text += header + "\n" + "-"*len(header) + "\n\n" - if productDict[name]['genericAction'] is not None: - text += self._textProduct.indentText(productDict[name]['genericAction'], maxWidth=self._textProduct._lineLength) + "\n" - elif name == "evacuationStatements": - text += "* " + productDict[name]['title'].upper() + ":\n|* " - for statement in productDict[name]['statements']: - text += self._textProduct.indentText(statement, maxWidth=self._textProduct._lineLength) + "\n" - text += "*|\n" - elif name == "otherPreparednessActions": - text += "* " + productDict[name]['title'].upper() + ":\n|* " - for action in productDict[name]['actions']: - text += self._textProduct.indentText(action, maxWidth=self._textProduct._lineLength) + "\n" - text += "*|\n" - elif name == "additionalSourcesInfo": - text += "* " + productDict[name]['title'].upper() + ":\n" - for source in productDict[name]['sources']: - text += self._textProduct.indentText(source, maxWidth=self._textProduct._lineLength) - text += "\n" - elif name == "nextUpdate": - header = "NEXT UPDATE" - text += header + "\n" + "-"*len(header) + "\n\n" - text += self._textProduct.indentText(productDict[name], maxWidth=self._textProduct._lineLength) + "\n" - elif 'sectionHeader' in name: - text += "* " + productDict[name].upper() + "\n" - elif 'Subsection' in name: - text += self.processSubsection(productDict[name]) - elif name == 'infoSection': - text += self.processInfoSection(productDict['infoSection']) - elif name == 'endProduct': - text += '$$\n' - elif name == 'CR': - text += '\n' - elif name == 'doubleAmpersand': - text += '&&\n' - elif name not in self._noOpParts(): - textStr = productDict.get(name) - self._textProduct.debug_print("name = %s" % (name), 1) - self._textProduct.debug_print("textStr = '%s'" % (textStr), 1) - if textStr: - text += textStr + '\n' - return text - - def _noOpParts(self): - ''' - These represent product parts that should be skipped when calling product part methods. - They will be handled automatically by the formatters. - ''' - return ["setup_segment"] #['CR', 'endProduct', 'endSegment', 'issuanceDateTime', 'doubleAmpersand'] - - def processWmoHeader(self, wmoHeader): - text = wmoHeader['TTAAii'] + ' ' + wmoHeader['fullStationID'] + ' ' + wmoHeader['ddhhmmTime'] + '\n' - text += wmoHeader['productID'] + wmoHeader['siteID'] + '\n' - return text - - def processProductHeader(self, headerDict): - if not self._textProduct._ImpactsAnticipated: - text = "Tropical Local Statement\n" - text += "National Weather Service " + headerDict['cityState'] + '\n' - text += headerDict['issuanceTimeDate'] + '\n\n' - - else: - text = headerDict['stormType'] + ' ' + headerDict['stormName'] + ' ' + headerDict['productName'] - - advisoryText = '' - if headerDict['advisoryType'] is not None and \ - headerDict['advisoryType'].lower() in ["intermediate", "special"]: - advisoryText = headerDict['advisoryType'] + ' ' - - if headerDict['advisoryNumber'] is not None: - advisoryText += 'Advisory Number ' + headerDict['advisoryNumber'] - - if len(advisoryText) > 0: - if len(text + " " + advisoryText) > self._textProduct._lineLength: - text += '\n' - else: - text += ' ' - - text += advisoryText + '\n' - else: - text += '\n' - - text += "National Weather Service " + headerDict['cityState'] + " " + headerDict['stormNumber'] + '\n' - text += headerDict['issuanceTimeDate'] + '\n\n' - - return text - - def processSummaryHeadlines(self, headlinesList): - if headlinesList in [[], [""]]: - text = "**" + self._textProduct._frame("Enter headline here") + "**\n\n" - else: - text = "" - for headline in headlinesList: - text += self._textProduct.indentText("**" + headline + "** ", - maxWidth=self._textProduct._lineLength) - - text = self._textProduct._frame(text) + "\n\n" - - return text - - def processHazards(self, hazardsList, isChangesHazards): - text = "" - - if len(hazardsList) == 0: - if isChangesHazards and \ - self._textProduct._ImpactsAnticipated and \ - self._textProduct._GeneralOnsetTime == "recovery": - text = self.TAB + "- All watches and warnings have been canceled\n" - else: - text = self.TAB + "- None\n" - else: - # Group the hazards together by status, areas and headlines - groupedHazards = dict() - - # Grab the appropriate headlines for this section - actions = ['NEW', 'EXA'] # always want these - if isChangesHazards: - actions.append('CAN') # only want these for 'changes' - else: - actions.append('CON') # only want these for 'current' - - for hazard in hazardsList: - # If this is an action we want, update the active headline - if hazard['act'] in actions: - headline = hazard['phensig'][:4] - - # Group areas together which have identical hazards - groupedHazards = self._addToGroupedHazards(hazard, headline, groupedHazards) - - self._textProduct.debug_print("groupedHazards = %s" - % self._textProduct._pp.pformat(groupedHazards), 1) - - groupedHazards = self._consolidateGroupedHazards(groupedHazards, isChangesHazards) - - self._textProduct.debug_print("consolidated groupedHazards = %s" - % self._textProduct._pp.pformat(groupedHazards), 1) - - self._textProduct.debug_print("\n\nCreating text bullets for %s section..." - % ("CHANGES" if isChangesHazards else "CURRENT"), 1) - - # Create the hazard text using the grouped hazards - for (sortedAreas, hazards) in groupedHazards.items(): - self._textProduct.debug_print("Creating text for hazards covering %s (%s)" - % (sortedAreas, self._areaWords(sortedAreas)), 1) - - if isChangesHazards: - hazardTextParts = self._createChangesTextParts(hazards) - else: - hazardTextParts = self._createCurrentTextParts(hazards) - - self._textProduct.debug_print("hazardTextParts = %s" - % self._textProduct._pp.pformat(hazardTextParts), 1) - - groupedHazardText = self._textProduct.punctuateList(hazardTextParts) - groupedHazardText = groupedHazardText.replace(" and The ", " and the ") - groupedHazardText = groupedHazardText.replace(" and A ", " and a ") - groupedHazardText = groupedHazardText.replace(", The ", ", the ") - groupedHazardText = groupedHazardText.replace(", A ", ", a ") - - groupedHazardText += " for " + self._areaWords(sortedAreas) - - self._textProduct.debug_print("groupedHazardText = '%s'" % groupedHazardText, 1) - - text += self._textProduct.indentText(groupedHazardText, - indentFirstString = self.TAB + "- ", - indentNextString = self.TAB + " ", - maxWidth=self._textProduct._lineLength) - - self._textProduct.debug_print("text = '%s'" % text, 1) - - text += "\n" - - return text - - def _addToGroupedHazards(self, hazard, headline, groupedHazards): - # Only consider certain hazard statuses - status = hazard['act'] - if status not in ["CON", "NEW", "EXA", "CAN", "UPG"]: - return groupedHazards - - self._textProduct.debug_print("\n\nIn _addToGroupedHazardsList, adding a '%s' hazard" - % status, 1) - - areas = hazard['id'] - sortedAreas = tuple(sorted(areas)) - previousPhenSig = None - upgrades = dict() - - if status in ["NEW", "EXA"] and hazard.get('upgradeFrom', None) is not None: - - # The phensig of the hazard that got upgraded to this headline - previousPhenSig = hazard['upgradeFrom']['phensig'] - upgrades[headline] = set([previousPhenSig]) - - - self._textProduct.debug_print("Areas affected %s (%s)" - % (sortedAreas, self._areaWords(sortedAreas)), 1) - self._textProduct.debug_print("Headline = %s" % headline, 1) - self._textProduct.debug_print("previousPhenSig = %s" % previousPhenSig, 1) - self._textProduct.debug_print("upgrades = %s" % upgrades, 1) - - self._textProduct.debug_print("Trying to find where to put the hazard info...", 1) - - #======================================================================= - - if sortedAreas not in groupedHazards: - - self._textProduct.debug_print("Creating a new areas entry...", 1) - - groupedHazards[sortedAreas] = { - status: ([headline], upgrades), - } - - # If we already have a record for this area - else: - - self._textProduct.debug_print("Adding to an existing areas entry...", 1) - - if status == "UPG": - - # Reset the "previous" phensig - previousPhenSig = hazard['phensig'][:4] - self._textProduct.debug_print("Now previousPhenSig = %s" % \ - previousPhenSig, 1) - - # Determine the upgrade - for action in groupedHazards[sortedAreas]: - if action not in ['NEW', 'EXA']: - continue - - sortedHeadlines, upgrades = groupedHazards[sortedAreas][action] - - foundUpg = False - for hl in sortedHeadlines: - if VTECTable.upgradeHazardsDict.has_key(hl) and \ - previousPhenSig in VTECTable.upgradeHazardsDict[hl]: - headline = hl - foundUpg = True - break - - if foundUpg: - # add to upgrades and we are done - upgrades.setdefault(headline, set()).add(previousPhenSig) - break - - else: - (sortedHeadlines, upgrades) = groupedHazards[sortedAreas]\ - .setdefault(status, ([], {})) - - sortedHeadlines.append(headline) - sortedHeadlines.sort(self._sortHazardsType) - - if (previousPhenSig is not None and - headline in VTECTable.upgradeHazardsDict and - previousPhenSig in VTECTable.upgradeHazardsDict[headline]): - - # add to upgrades - upgrades.setdefault(headline, set()).add(previousPhenSig) - - - return groupedHazards - - # Method to sort tropical headlines by priority, then type - def _sortHazardsType(self, a, b): - - # Warnings always go first - if ".W" in a and ".W" not in b: - return -1 - elif ".W" not in a and ".W" in b: - return 1 - else: - - # Both have the same priority of hazard, now sort by type - - # Storm Surge headlines first - # just need to look for Warning vs. Watch) - if "SS." in a and "SS." not in b: - return -1 - elif "SS." not in a and "SS." in b: - return 1 - elif "SS." in a and "SS." in b: - return 0 - - # Hurricane headlines next - # (Surge already accounted for, just need to check for Tropical) - if "HU." in a and "TR." in b: - return -1 - elif "TR." in a and "HU." in b: - return 1 - elif "HU." in a and "HU." in b: - return 0 - - # Tropical Storm headlines last - if "TR." not in a and "TR." in b: - return -1 - elif "TR." in a and "TR." not in b: - return 1 - elif "TR." in a and "TR." in b: - return 0 - - - def _consolidateGroupedHazards(self, groupedHazards, isChangesHazards): - """Combine areas that share the same headlines and status""" - - self._textProduct.debug_print("Trying to consolidate grouped hazards...", 1) - - newGroupedHazards = dict() - - for sortedAreas1, hazards1 in groupedHazards.items(): - self._textProduct.debug_print("Working on areas %s..." - % self._textProduct._pp.pformat(sortedAreas1), 1) - - # Clean up use of EXA - if not isChangesHazards: - newHazards1 = dict() - - # Current section, we don't care about upgrades - newUpgrades = dict() - - for (action, (hazard, upgrades)) in hazards1.items(): - - if action in ["NEW", "EXA"]: - action = "CON" - - if action in newHazards1: - newHazards1[action][0].extend(hazard) - newHazards1[action][0].sort(self._sortHazardsType) - -# newHazards1[action] = (newHazards1[action][0], dict()) - -# for phensig, upgrade in upgrades.items(): -# -# curUpgrade = newHazard1[action][1].get(phensig, set()) -# newHazard1[action][1][phensig] = curUpgrade | upgrade -# newHazards1[action][1] = newUpgrades - else: - newHazards1[action] = (hazard, newUpgrades) - - - # Reset the dictionary - hazards1 = newHazards1 - -# print "Done with merging EXA and CON" - - foundMatchingInfo = False - for sortedAreas2, hazards2 in newGroupedHazards.items(): - - if hazards1 == hazards2: - self._textProduct.debug_print("Combining areas %s and %s..." - % (self._textProduct._pp.pformat(sortedAreas1), - self._textProduct._pp.pformat(sortedAreas2)), - 1) - - # Both areas contain the same information, combine the areas and remove duplicates - combinedAreas = tuple(sorted(set(sortedAreas1 + sortedAreas2))) - newGroupedHazards[combinedAreas] = hazards1 - - # Remove the old, uncombined areas - del newGroupedHazards[sortedAreas2] - - foundMatchingInfo = True - break - - if not foundMatchingInfo: - self._textProduct.debug_print("Adding new area entry...", 1) - newGroupedHazards[sortedAreas1] = hazards1 - - return newGroupedHazards - - def _createChangesTextParts(self, hazards): - self._textProduct.debug_print("hazards = %s" - % self._textProduct._pp.pformat(hazards), 1) - - hazardTextParts = [] - for status, (sortedPhensigs, upgrades) in hazards.items(): - - self._textProduct.debug_print("status = %s" % status, 1) - self._textProduct.debug_print("upgrades = %s" % upgrades, 1) - self._textProduct.debug_print("sortedPhensigs = %s" % sortedPhensigs, 1) - - # Convert the headlines from VTEC into text - sortedHeadlines = [] - for phensig in sortedPhensigs: - if VTECTable.VTECTable[phensig]['hdln'] not in sortedHeadlines: - sortedHeadlines.append(VTECTable.VTECTable[phensig]['hdln']) - - self._textProduct.debug_print("sortedHeadlines = %s" % sortedHeadlines, 1) - - hasText = " has " - # If there is more than one hazard - if len(sortedHeadlines) > 1: - hasText = " have " - - if status == "CAN": - - canHeadlines = self._textProduct.punctuateList(sortedHeadlines) - self._textProduct.debug_print("canHeadlines = '%s'" % canHeadlines, 1) - print("canHeadlines = '%s'" % canHeadlines) - - canText = "The " + canHeadlines + hasText + "been cancelled" - self._textProduct.debug_print("Result: '%s'" % canText, 1) - hazardTextParts.append(canText) - - elif status in ["NEW", "EXA"]: - if len(upgrades) > 0: - upgradeTextParts = [] - for phensig, previousPhenSig in upgrades.items(): - headline = VTECTable.VTECTable[phensig]['hdln'] - upgradeList = [] - - # Convert the list of VTEC codes to a headline - for prevPhenSig in previousPhenSig: - curHeadline = VTECTable.VTECTable[prevPhenSig]['hdln'] - if curHeadline not in upgradeList: - upgradeList.append(curHeadline) - - upgradedHeadlines = self._textProduct.punctuateList(upgradeList) - - upgHasText = " has " - # If there are no 'and's - if len(upgradeList) > 1: - upgHasText = " have " - - upgradeTextParts.append("The " + upgradedHeadlines + - upgHasText + "been upgraded to a " + headline) - - # Make sure we don't repeat information multiple times in the same section - try: - sortedHeadlines.remove(headline) - except: - print "*** Warning: attempt to remove \"%s\" from %s"\ - % (headline, sortedHeadlines) - - upgradesText = self._textProduct.punctuateList(upgradeTextParts) - - self._textProduct.debug_print("Changes Result: '%s'" % upgradesText, 1) - hazardTextParts.append(upgradesText) - - # NEW and EXA hazards can have both upgrades and headlines associated with them - if len(sortedHeadlines) > 0: - headlines = self._textProduct.punctuateList(sortedHeadlines) - self._textProduct.debug_print("headlines = '%s'" % headlines, 1) - - newExaChangesText = "A " + headlines + hasText + "been issued" - self._textProduct.debug_print("Changes Result: '%s'" % newExaChangesText, 1) - hazardTextParts.append(newExaChangesText) - - return hazardTextParts - - def _createCurrentTextParts(self, hazards): - self._textProduct.debug_print("hazards = %s" - % self._textProduct._pp.pformat(hazards), 1) - - hazardTextParts = [] - sortedHeadlines = [] - - for status, (sortedPhensigs, upgrades) in hazards.items(): - - self._textProduct.debug_print("status = %s" % status, 1) - self._textProduct.debug_print("upgrades = %s" % upgrades, 1) - self._textProduct.debug_print("sortedPhensigs = %s" % sortedPhensigs, 1) - - # Convert the headlines from VTEC into text - for phensig in sortedPhensigs: - if VTECTable.VTECTable[phensig]['hdln'] not in sortedHeadlines: - sortedHeadlines.append(VTECTable.VTECTable[phensig]['hdln']) - - self._textProduct.debug_print("sortedHeadlines = %s" % sortedHeadlines, 1) - - headlines = self._textProduct.punctuateList(sortedHeadlines) - self._textProduct.debug_print("headlines = '%s'" % headlines, 1) - - isText = " is " - # If there is more than one hazard - if len(sortedHeadlines) > 1: - isText = " are " - - if status in ["NEW", "EXA", "CON"]: - # NEW and EXA hazards can have both upgrades and headlines associated with them - if len(sortedHeadlines) > 0: - - # If we are all done processing this group - numKeys = len(hazards.keys()) - numHeadlines = len(sortedHeadlines) - numPhensigs = len(sortedPhensigs) -# print "Current test keys = %d headlines = %d phensigs = %d" % (numKeys, numHeadlines, numPhensigs) - - if (numKeys == numHeadlines) or (numKeys == 1 and numHeadlines == numPhensigs): - newExaCurrentText = "A " + headlines + isText + "in effect" - self._textProduct.debug_print("Current Result: '%s'" % newExaCurrentText, 1) - hazardTextParts.append(newExaCurrentText) - - return hazardTextParts - - def _areaWords(self, sortedAreas): - if sortedAreas == tuple(): - return "" - names = [] - areaDict = self._textProduct._areaDict - for area in sortedAreas: - name = areaDict[area].get('altName', areaDict[area].get('ugcName', '')) - names.append(name) - names.sort() - areaWords = self._textProduct.formatCountyString("", names)[1:] - return areaWords - - def processStormInformation(self, stormInfoDict): - text = "* STORM INFORMATION:\n" - - if len(stormInfoDict) == 0: - text += self.TAB + "- None\n\n" - else: - referenceText = " or ".join(stormInfoDict['references']) + "\n" - referenceText = referenceText.replace(" or About", " or about" ) - - text += self._textProduct.indentText(referenceText, - indentFirstString = self.TAB + "- ", - indentNextString = self.TAB + " ", - maxWidth=self._textProduct._lineLength) - - (lat, lon) = stormInfoDict['location'] - text += self.TAB + "- " + lat + " " + lon + "\n" - - text += self.TAB + "- " + stormInfoDict['intensity'] + "\n" - - text += self.TAB + "- " + stormInfoDict['movement'] + "\n\n" - - return text - - def processSituationOverview(self, overviewText): - title = "SITUATION OVERVIEW" - text = title + "\n" + "-"*len(title) + "\n\n" - - text += self._textProduct.endline(overviewText, linelength=self._textProduct._lineLength) - text += "\n" - - return text - - def processHazardsSection(self, sectionDict): - text = "* " + sectionDict['title'].upper() + ":\n" - - impactRangeText = sectionDict['impactRange'] - text += self._textProduct.indentText(impactRangeText, maxWidth=self._textProduct._lineLength) - - if self._textProduct._GeneralOnsetTime == "recovery" and len(sectionDict['impactLib']) != 0: - text += "|*\n" - - for impact in sectionDict['impactLib']: - text += self._textProduct.indentText(impact, - indentFirstString = self.TAB + "- ", - indentNextString = self.TAB + " ", - maxWidth=self._textProduct._lineLength) - - if self._textProduct._GeneralOnsetTime == "recovery" and len(sectionDict['impactLib']) != 0: - text += "*|\n" - - if len(sectionDict['additionalImpactRange']) != 0: - text += "\n" - - additionalImpactRangeText = "" - curAdditionalImpactText = "" - count = 1 - - self._textProduct.debug_print("DEBUG: %d sectionDict['additionalImpactRange'] = '%s'" % (len(sectionDict['additionalImpactRange']), sectionDict['additionalImpactRange'])) - for additionalImpact in sectionDict['additionalImpactRange']: - - self._textProduct.debug_print("additionalImpact = '%s'" % (additionalImpact)) - self._textProduct.debug_print("count = %d" % (count)) - - curAdditionalImpactText += \ - self._textProduct.indentText(additionalImpact, - maxWidth=self._textProduct._lineLength) - - if count != len(sectionDict['additionalImpactRange']) and \ - len(curAdditionalImpactText) > 0: - curAdditionalImpactText += "\n" - - self._textProduct.debug_print("DEBUG: curAdditionalImpactText ='%s'" % (curAdditionalImpactText)) - - count += 1 - - # If this additional impact is not already included in the output - if additionalImpactRangeText.find(curAdditionalImpactText) == -1: - - # Add this additional impact text - self._textProduct.debug_print("Adding current impact. '%s'" % (curAdditionalImpactText)) - additionalImpactRangeText += curAdditionalImpactText - - text += additionalImpactRangeText - - text += "\n" - return text - - def processSubParts(self, subParts, infoDicts): - """ - Generates Legacy text from a list of subParts e.g. segments or sections - @param subParts: a list of dictionaries for each subPart - @param partsLists: a list of Product Parts for each segment - @return: Returns the legacy text of the subParts - """ - text = '' - for i in range(len(subParts)): - self._textProduct.debug_print("subpart subParts[i] = %s" % (subParts[i])) - self._textProduct.debug_print("subpart infoDicts[i] = %s" % (infoDicts[i])) - newtext = self._processProductParts(subParts[i], infoDicts[i].get('partsList')) - self._textProduct.debug_print("subpart newtext type = %s" % (type(newtext))) - self._textProduct.debug_print("subpart newtext = '%s'" % (self._pp.pformat(newtext))) - text += newtext - - return text +# Version 2018.06.05 + +import GenericHazards +import string, time, os, re, types, copy, LogStream, collections +import ModuleAccessor, SampleAnalysis, EditAreaUtils, VTECTable +import math +import tkinter +import LocalizationSupport + +from AbsTime import * +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID +from com.raytheon.uf.common.dataplugin.gfe.grid import Grid2DBit as JavaGrid2DBit +from six.moves import reduce + +AWIPS_ENVIRON = "AWIPS2" + +import HLSTCV_Common + +class TextProduct(HLSTCV_Common.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition["displayName"] = "None" + Definition["outputFile"] = "{prddir}/TEXT/HLS.txt" + Definition["database"] = "Official" # Source database + Definition["debug"] = 1 + Definition["mapNameForCombinations"] = "Zones_" + Definition["defaultEditAreas"] = "Combinations_HLS_" + Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display + + Definition["productName"] = "Local Statement" + + Definition["fullStationID" ] = "" + Definition["wmoID" ] = "" + Definition["wfoCityState" ] = "" + Definition["pil" ] = "" + Definition["textdbPil" ] = "" + Definition["awipsWANPil" ] = "" + Definition["site"] = "" + Definition["wfoCity"] = "" + + Definition["areaName"] = "" #optional area name for product + Definition["areaDictionary"] = "AreaDictionary" + Definition["language"] = "english" + Definition["lineLength"] = 71 #Maximum line length + Definition["tabLength"] = 4 + + Definition["purgeTime"] = 8 # Default Expiration in hours if + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header + Definition["easPhrase"] = \ + "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header + Definition["callToAction"] = 1 + + # Add options for debugging + Definition["debug"] = { + #TextProduct + "__init__": 0, + "_inlandAreas": 0, + "_coastalAreas": 0, + "_cwa": 0, + "_cwa_descriptor": 0, + "_localReferencePoints": 0, + "_localReferencePoints_defaults": 0, + "_referencePointLimit": 0, + "_productParts_HLS": 0, + "_analysisList_HLS": 0, + "_analysisList_HLS_WholeDomain": 0, + "_intersectAnalysisList_HLS": 0, + "generateForecast": 0, + "_initializeVariables": 0, + "_initializeHeadlines": 0, + "_initializeSamplingDict": 0, + "_noOpParts": 0, + "_areaList": 0, + "_summaryHeadlines": 0, + "_changesHazards": 0, + "_currentHazards": 0, + "_stormInformation": 0, + "_situationOverview": 0, + "_windSection": 0, + "_surgeSection": 0, + "_floodingRainSection": 0, + "_tornadoSection": 0, + "_coastalHazardsSection": 0, + "_preparednessSection": 0, + "_evacuationStatements": 0, + "_otherPreparednessActions": 0, + "_additionalSourcesInfo": 0, + "_nextUpdate": 0, + "_impactsKeyFunction": 0, + "_getPotentialImpactsStatements": 0, + "_impactCategoryToThreatLevel": 0, + "_determineHazardStates": 0, + "_sampleHLSData": 0, + "_sampleTCVAdvisory": 0, + "_sampleRankedDiscreteValue": 0, + "_sampleMostSignificantDiscreteValue": 0, + "_getDominantThreatLevel": 0, + "_getHighestThreat": 0, + "_getLowestThreat": 0, + "_setHazardImpactCategories": 0, + "_createWholeDomainEditArea": 0, + "_determineHazards": 0, + "_formatLocalTime": 0, + "_getTimeZoneList": 0, + "_grabHeadline": 0, + "_getStormInfo": 0, + "_grabStormInfo": 0, + "_decodeStormInfo": 0, + "_expandBearings": 0, + "_removeKM": 0, + "_cleanText": 0, + "_calcLocalReferences": 0, + "_calcReference": 0, + "_distanceFromLatLon": 0, + "_bearing": 0, + "_dirInEnglish": 0, + "_overview_list": 0, + "_displayGUI": 0, + "_frame": 0, + + #HLSTCV_Common + "allowedHazards": 0, + "allowedHeadlines": 0, + "_initializeVariables": 0, + "moderated_dict": 0, + "_wmoHeader": 0, + "_productHeader": 0, + "_ugcHeader": 0, + "_processProductParts": 0, + "_createProductDictionary": 0, + "_initializeProductDictionary": 0, + "_formatProductDictionary": 0, + "_getStatValue": 0, + "_allAreas": 0, + "_groupSegments": 0, + "_getSegmentVTECRecordsTuples": 0, + "_computeIntersectAreas": 0, + "_initializeHazardsTable": 0, + "_getHazardsTable": 0, + "_ignoreActions": 0, + "_setVTECActiveTable": 0, + "_getVtecRecords": 0, + "_getAllowedHazardList": 0, + "_altFilterMethod": 0, + "_filterHazards": 0, + "_getAdditionalHazards": 0, + "_checkHazard": 0, + "_initializeTimeVariables": 0, + "_determineTimeRanges": 0, + "_createPeriodList": 0, + "_calculateStartTime": 0, + "_formatPeriod": 0, + "_getTimeDesc": 0, + "_getPartOfDay": 0, + "_initializeStormInformation": 0, + "_parseTCP": 0, + "_getStormTypeFromTCP": 0, + "_getStormNameFromTCP": 0, + "_getAdvisoryTypeFromTCP": 0, + "_getAdvisoryNumberStringFromTCP": 0, + "_getStormNumberStringFromTCP": 0, + "_getStormIDStringFromTCP": 0, + "_useTestTCP": 0, + "_testTCP": 0, + "_initializeAdvisories": 0, + "_synchronizeAdvisories": 0, + "_getLocalAdvisoryDirectoryPath": 0, + "_getStormAdvisoryNames": 0, + "_loadLastTwoAdvisories": 0, + "_loadAdvisory": 0, + "_getAdvisoryPath": 0, + "_getAdvisoryFilename": 0, + "_processVariableList": 0, + "_GUI_sizing_dict": 0, + "_GUI1_configDict": 0, + "_font_GUI_dict": 0, + + #Overview_Dialog + "body": 0, + "_makeStep3": 0, + "_makeButtons": 0, + "okCB": 0, + + #Common_Dialog + "getVarDict": 0, + "_makeRadioOrCheckList": 0, + "_makeEntry": 0, + "cancelCB": 0, + "_entryName": 0, + "_makeTuple": 0, + "_setVarDict": 0, + "status": 0, + "buttonbox": 0, + + #LegacyFormatter + "execute": 0, + "_processProductParts": 0, + "processWmoHeader": 0, + "processProductHeader": 0, + "processSummaryHeadlines": 0, + "processHazards": 1, + "_addToGroupedHazards": 1, + "_sortHazardsType": 0, + "_consolidateGroupedHazards": 1, + "_createHazardTextParts": 0, + "_areaWords": 0, + "processStormInformation": 0, + "processSituationOverview": 0, + "processHazardsSection": 0, + "processSubParts": 0, + + #TextProductCommon + "setUp": 0, + "hazardTimeZones": 0, + "getExpireTime": 0, + "getHeadlinesAndSections": 0, + "formatUGCs": 0, + "getFormattedTime": 0, + "formatUGC_names": 0, + "formatNameString": 0, + "getVal": 0, + "formatDatetime": 0, + "flush": 0, + "makeUGCString": 0, + "checkLastArrow": 0, + } + +# Definition["debug"] = 1 # turn on ALL debug messages + Definition["debug"] = 0 # turn off ALL debug messages + + def __init__(self): + HLSTCV_Common.TextProduct.__init__(self) + + ##################################################################### + ##################################################################### + ### Organization of Formatter Code + + ############################################################### + ### MUST OVERRIDE DEFINITIONS !!! + ### _inlandAreas, _coastalAreas, _cwa, _cwa_descriptor, + ### _localReferencePoints, _localReferencePoints_defaults + ############################################################### + + ############################################################### + ### Optional Overrides + ### _referencePointLimit + ############################################################### + + ############################################################### + ### HLS Product and Segment Parts Definition + ############################################################### + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + ############################################################### + + ############################################################### + # CODE + ############################################################### + ### High level flow of formatter + ### generateForecast, _initializeVariables, + ### _loadLastTwoAdvisories, _determineTimeRanges, + ### _initializeSamplingDict, _sampleTCVAdvisory, + ### _sampleHLSData, _determineHazardStates, + ### _setHazardImpactCategories, _createProductDictionary, + ### _formatProductDictionary + ############################################################### + + ############################################################### + ### Product Parts Implementation + ############################################################### + + ############################################################### + ### Sampling and Statistics related methods + ############################################################### + + ############################################################### + ### Area, Zone and Segment related methods + ############################################################### + + ############################################################### + ### Hazards related methods + ############################################################### + + ############################################################### + ### Time related methods + ############################################################### + + ############################################################### + ### Storm Information and TCP related methods + ############################################################### + + ############################################################### + ### GUI related methods + ############################################################### + + + ############################################################### + ### MUST OVERRIDE DEFINITIONS !!! + + def _inlandAreas(self): + return [ + #"FLZ063", "FLZ066", "FLZ067", "FLZ068", "FLZ070", + #"FLZ071", "FLZ072", "FLZ073", "FLZ074", + ] + + def _coastalAreas(self): + return [ + #"FLZ069", "FLZ075", "FLZ168", "FLZ172", "FLZ173", "FLZ174", + ] + + def _cwa(self): + return "" #"MFL" + + def _cwa_descriptor(self): + return "" #"South Florida" + + def _localReferencePoints(self): + # Give the name and lat/lon for each local reference point + return [ + #("West Palm Beach, FL", (26.71, -80.06)), + #("Fort Lauderdale, FL", (26.12, -80.15)), + #("Miami, FL", (25.77, -80.20)), + #("Miami Beach, FL", (25.81, -80.13)), + #("Naples, FL", (26.14, -81.80)), + #("Marco Island, FL", (25.94, -81.73)), + ] + + def _localReferencePoints_defaults(self): + # Give a list of the local reference point names to be + # turned on by default + return [] #["Miami, FL", "Naples, FL"] + + ############################################################### + ### Optional Overrides + + def _referencePointLimit(self): + # Give the number of reference points allowed to be chosen + # Also give a label (e.g. "two") for the GUI + return (2, "two") + + ############################################################### + ### HLS Product and Segment Parts Definition + + def _productParts_HLS(self, segment_vtecRecords_tuples): + partsList = [ + 'wmoHeader', + 'ugcHeader', + 'productHeader', + 'areaList', + 'summaryHeadlines', + 'newInformationHeader', + 'changesHazards', + 'currentHazards', + 'stormInformation', + 'situationOverview', + 'sigPotentialImpacts', + ] + + if self._ImpactsAnticipated: + includedImpacts = sorted(self._IncludedImpacts, key=self._impactsKeyFunction) + for ((_, sectionName), _) in includedImpacts: + self.debug_print("adding section = '%s'" % (sectionName), 1) + partsList.append(sectionName) + + partsList.append('preparednessSection') + + if self._ImpactsAnticipated: + partsList.append('evacuationStatements') + partsList.append('otherPreparednessActions') + partsList.append('additionalSourcesInfo') + + partsList.append('nextUpdate') + partsList.append('endProduct') + + self.debug_print("Product Parts partsList =\n\n%s\n" % (self._pp.pformat(partsList)), 1) + + return { + 'partsList': partsList + } + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + + def _analysisList_HLS(self): + # Sample over 120 hours beginning at current time + analysisList = [ + # Wind Section + ("WindThreat", self.rankedDiscreteValue), + ("WindThreat", self.mostSignificantDiscreteValue), + + # Flooding Rain Section + ("QPFtoFFGRatio", self.moderatedMax, [6]), + ("FloodingRainThreat", self.rankedDiscreteValue), + ("FloodingRainThreat", self.mostSignificantDiscreteValue), + + # Tornado Section + ("TornadoThreat", self.rankedDiscreteValue), + ("TornadoThreat", self.mostSignificantDiscreteValue), + ] + + return analysisList + + def _analysisList_HLS_WholeDomain(self): + # Sample over 120 hours beginning at current time + analysisList = [ + # Wind Section + ("Wind", self.vectorModeratedMax, [6]), + ] + + return analysisList + + def _intersectAnalysisList_HLS(self): + # The grids for the Surge Section will be intersected with a special edit area + analysisList = [ + ("InundationMax", self.moderatedMax, [6]), + ("StormSurgeThreat", self.rankedDiscreteValue), + ("StormSurgeThreat", self.mostSignificantDiscreteValue), + ] + + return analysisList + + ############################################################### + ### High level flow of formatter + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + error = self._initializeVariables(argDict) + if error is not None: + return error + + if self._stormName is None or self._stormName == "": + return "Could not determine the storm name" + + + if self._ImpactsAnticipated: + self._loadLastTwoAdvisories() + if (self._previousAdvisory is None or \ + not self._previousAdvisoryMatchesNumber): + return "A TCV must be transmitted before an HLS can be run" + + if len(self._IncludedImpacts) == 0: + return "At least one potential impact section needs to be included." + + # Determine time ranges + self._determineTimeRanges(argDict) + + if self._ImpactsAnticipated: + + # Sample the data + self._initializeSamplingDict() + self._sampleTCVAdvisory(self._previousAdvisory) + self._sampleHLSData(argDict) + + self._determineHazardStates() + + for threatName in ['WindThreat', 'StormSurgeThreat', 'FloodingRainThreat', 'TornadoThreat']: + self._setHazardImpactCategories(threatName) + + # Create the product dictionary and format it to create the output + productDict = self._createProductDictionary(self._productParts_HLS, + self._allAreas(), + areProductPartsSegmented=False) + productOutput = self._formatProductDictionary(LegacyFormatter, productDict) + + return productOutput + + def _initializeVariables(self, argDict): + error = HLSTCV_Common.TextProduct._initializeVariables(self, argDict) + if error is not None: + return error + + self._getStormInfo(argDict) + + self._initializeHeadlines() + + #======================================================================= + # Now produce a UGC header using only the WFO selected zones + #======================================================================= + + # Get the Combinations file for the HLS + accessor = ModuleAccessor.ModuleAccessor() + self.debug_print("self._defaultEditAreas = %s" % (self._pp.pformat(self._defaultEditAreas)), 1) + + # combos is a list of tuples. Each tuple is a grouping of zones + # (a list of zones, combo name). + combos = accessor.variable(self._defaultEditAreas, "Combinations") + + # If we could not find a Combinations file for the HLS + if combos is None: + LogStream.logVerbose("Combination file not found: " + self._pp.pformat(self._defaultEditAreas)) + + # Default to using the entire CWA + self._ugcs = sorted(self._allAreas()) + + # Otherwise, construct the final list of WFO selected zones + else: + self.debug_print("Segments from Zone Combiner = %s" % (self._pp.pformat(combos)), 1) + + # Create a list containing all zones from all combination groups + selectedZones = reduce(lambda zones, combo: zones + combo[0], + combos, + []) + + # Use the selected zones for the UGC header + self._ugcs = sorted(selectedZones) + + self.debug_print("Final Zones for UGC header = %s" % (self._pp.pformat(self._ugcs)), 1) + + return None + + def _initializeHeadlines(self): + if self._MainHeadline == "Enter": + self._headlines = [self._MainHeadline_entry] + elif self._MainHeadline == "UsePrev": + self._prevHLS = self.getPreviousProduct(self._textdbPil) + self._headlines = [self._grabHeadline(self._prevHLS)] + elif self._MainHeadline == "UseTCP": + try: + self._headlines = [self._grabHeadline(self._TCP)] + except: + self._headlines = [] + + def _initializeSamplingDict(self): + self._samplingDict = dict() + statsDict = dict() + statsDict['catastrophicThreshold'] = None + statsDict['decidingField'] = None + statsDict['inputThreatLow'] = None + statsDict['inputThreatHigh'] = None + statsDict['inputThreatDominant'] = None + statsDict['impactMin'] = None + statsDict['impactMax'] = None + statsDict['impactRange'] = None + statsDict['impactRangeMax'] = None + + self._samplingDict['WindThreat'] = copy.copy(statsDict) + self._samplingDict['StormSurgeThreat'] = copy.copy(statsDict) + self._samplingDict['FloodingRainThreat'] = copy.copy(statsDict) + self._samplingDict['TornadoThreat'] = copy.copy(statsDict) + + self._samplingDict['WindThreat']['catastrophicThreshold'] = 137 # knots + self._samplingDict['StormSurgeThreat']['catastrophicThreshold'] = 14 # feet + self._samplingDict['FloodingRainThreat']['catastrophicThreshold'] = 3 # percent + + ############################################################### + ### Product Parts Implementation + + def _noOpParts(self): + ''' + These represent product parts that should be skipped when calling product part methods. + They will be handled automatically by the formatters. + ''' + return ['CR', 'endProduct', 'endSegment', 'doubleAmpersand', 'newInformationHeader', 'sigPotentialImpacts'] + + ################# Product Level + + def _areaList(self, productDict, productSegmentGroup, productSegment): + productDict['areaList'] = "This product covers " + self._cwa_descriptor() + + def _summaryHeadlines(self, productDict, productSegmentGroup, productSegment): + productDict['summaryHeadlines'] = self._headlines + + def _changesHazards(self, productDict, productSegmentGroup, productSegment): + if (not self._ImpactsAnticipated) or \ + (self._ImpactsAnticipated and self._GeneralOnsetTime == "recovery"): + productDict['changesHazards'] = [] + else: + productDict['changesHazards'] = self._changesHazardsList + + def _currentHazards(self, productDict, productSegmentGroup, productSegment): + if (not self._ImpactsAnticipated) or \ + (self._ImpactsAnticipated and self._GeneralOnsetTime == "recovery"): + productDict['currentHazards'] = [] + else: + productDict['currentHazards'] = self._currentHazardsList + + def _stormInformation(self, productDict, productSegmentGroup, productSegment): + stormInfoDict = dict() + if self._ImpactsAnticipated: + stormInfoDict['references'] = self._stormLocalReferences + stormInfoDict['location'] = self._stormLocation + stormInfoDict['intensity'] = self._stormIntensityTrend + stormInfoDict['movement'] = self._stormMovementTrend + productDict['stormInformation'] = stormInfoDict + + def _situationOverview(self, productDict, productSegmentGroup, productSegment): + # Use generic text for the situation overview + productDict['situationOverview'] = self._frame("Succinctly describe the expected evolution of the event for the cwa; which hazards are of greater (or lesser) concern, forecast focus, etc.") + + # Get the WRKHLS product minus header that has the situation overview we want + wrkhlsProduct = self.getPreviousProduct("WRKHLS")[40:] + + # If we found the overview + if len(wrkhlsProduct) > 0: + # Clean and frame the imported overview and use it instead of the generic text + productDict['situationOverview'] = self._frame(self._cleanText(wrkhlsProduct.strip())) + + def _windSection(self, productDict, productSegmentGroup, productSegment): + sectionDict = dict() + sectionDict['title'] = "Wind" + sectionDict['impactRange'] = "" + sectionDict['impactLib'] = [] + sectionDict['additionalImpactRange'] = [] + + impactMin = self._samplingDict['WindThreat']['impactMin'] + impactMax = self._samplingDict['WindThreat']['impactMax'] + impactRange = self._samplingDict['WindThreat']['impactRange'] + impactRangeMax = self._samplingDict['WindThreat']['impactRangeMax'] + inputThreatDominant = self._samplingDict['WindThreat']['inputThreatDominant'] + + # Test the simplest case first + if impactMin == "none" and impactMax == "none": + sectionDict['impactRange'] = impactRange + productDict['windSection'] = sectionDict + return + + qualifier = self._getImpactsQualifier(impactMax) + + # If there is only one impact across the entire CWA, and it is the max + if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for " + qualifier + "wind having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against " + qualifier + "wind having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from the main wind event are now unfolding across " + self._cwa_descriptor() + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + sectionDict['impactRange'] = "Little to no additional wind impacts expected." + # Handle the case where the impacts are not the same across the entire CWA + else: + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for " + qualifier + "wind having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against " + qualifier + "wind having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from the main wind event are now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + sectionDict['impactRange'] = "Little to no additional wind impacts expected." + + if self._GeneralOnsetTime != "recovery": + sectionDict['impactLib'] = self._getPotentialImpactsStatements("Wind", self._impactCategoryToThreatLevel(impactMax)) + else: + sectionDict['impactLib'] = ["Community officials are now assessing the extent of actual wind impacts accordingly.", + "Emergency response teams are attending to casualty situations as needed.", + "Emergency work crews are restoring essential community infrastructure as necessary.", + "If you have an emergency dial 9 1 1.", + ] + + # If there are additional areas + if impactRange != impactMax: + qualifier = self._getImpactsQualifier(impactRangeMax) + + if self._GeneralOnsetTime == "check plans": + curPhrase = "Also, prepare for " + qualifier + "wind having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "complete preparations": + curPhrase = "Also, protect against " + qualifier + "wind having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "hunker down": + curPhrase = "Potential impacts from the main wind event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well sheltered from " + qualifier + "wind having " + self._frame("possible | additional") + " " + impactRange + " impacts." + else: + curPhrase = "Little to no additional wind impacts expected." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + # If there is no impact across more than one half the area, include a statement for that as well + if inputThreatDominant == "None": + + curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ + ", little to no impact is anticipated." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + productDict['windSection'] = sectionDict + + def _surgeSection(self, productDict, productSegmentGroup, productSegment): + sectionDict = dict() + sectionDict['title'] = "Surge" + sectionDict['impactRange'] = "" + sectionDict['impactLib'] = [] + sectionDict['additionalImpactRange'] = [] + sectionDict['variedImpacts'] = True + + impactMin = self._samplingDict['StormSurgeThreat']['impactMin'] + impactMax = self._samplingDict['StormSurgeThreat']['impactMax'] + impactRange = self._samplingDict['StormSurgeThreat']['impactRange'] + impactRangeMax = self._samplingDict['StormSurgeThreat']['impactRangeMax'] + inputThreatDominant = self._samplingDict['StormSurgeThreat']['inputThreatDominant'] + + self.debug_print("DEBUG: B4 %s" % + (self._pp.pformat(self._samplingDict['StormSurgeThreat'])), 1) + + # Test the simplest case first + if impactMin == "none" and impactMax == "none": + sectionDict['impactRange'] = impactRange + productDict['surgeSection'] = sectionDict + return + + # See if we need to include the term "life-threatening" surge + # This corresponds to threat levels of Moderate, High and Extreme + lifeThreatening = "" + + if impactMax in ["significant", "extensive", "devastating", "catastrophic"]: + lifeThreatening = "life-threatening " + elif impactMax == "limited": + lifeThreatening = "locally hazardous " + + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for " + lifeThreatening + "surge having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against " + lifeThreatening + "surge having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts in this area include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from the main surge event are now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + sectionDict['impactRange'] = "Little to no additional surge impacts expected." + + if self._GeneralOnsetTime != "recovery": + sectionDict['impactLib'] = self._getPotentialImpactsStatements("Storm Surge", self._impactCategoryToThreatLevel(impactMax)) + else: + sectionDict['impactLib'] = ["Community officials are now assessing the extent of actual surge impacts accordingly.", + "Emergency response teams are attending to casualty situations as needed.", + "Emergency work crews are restoring essential community infrastructure as necessary.", + "If you have an emergency dial 9 1 1.", + ] + + # Reexamine the impact range - we need to separate out "life-threatening" surge categories into a separate statement + impactParts = impactRange.split(" ") + + # Initialize a variable to keep the proper scope. This will hold any leftover surge categories + impactRangeRest = "" + + # Look at the high end of the range + if len(impactParts) == 3 and impactParts[2] in ["significant", "extensive", "devastating", "catastrophic"]: + # We have some "life-threatening" categories we need to split out - check the low end + if impactParts[0] in ["limited", "none"]: + # Make a new range to report + impactRange = "significant" + impactRangeMax = impactRange + + if impactParts[2] != "significant": + impactRange += " to " + impactParts[2] + impactRangeMax = impactParts[2] + + impactRangeRest = impactParts[0] + + # Ensure the leftover impact range is set - just in case we need it + # This should only ever be "limited" in the case of surge under current policy + elif len(impactParts) == 1: + impactRangeRest = impactParts[0] + + self.debug_print("DEBUG: impactRange = '%s' impactMax = '%s' impactMin = '%s'" % + (impactRange, impactMax, impactMin), 1) + # If there are additional life-threatening surge areas + if impactRange != impactMax and impactRange != impactMin: + + lifeThreatening = "" + + if impactRangeMax in ["significant", "extensive", "devastating", "catastrophic"]: + lifeThreatening = "life-threatening " + elif impactRangeMax == "limited": + lifeThreatening = "locally hazardous " + + if self._GeneralOnsetTime == "check plans": + curPhrase = "Also, prepare for " + lifeThreatening + "surge having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "complete preparations": + curPhrase = "Also, protect against " + lifeThreatening + "surge having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "hunker down": + curPhrase = "Potential impacts from the main surge event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactRange + " impacts." + else: + curPhrase = "Little to no additional surge impacts expected." + + self.debug_print("DEBUG: curPhrase = '%s'" % (curPhrase), 1) + self.debug_print("DEBUG: sectionDict['additionalImpactRange'] = \n'%s'" % + (sectionDict['additionalImpactRange']), 1) + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + # If there are additional areas + if impactRangeRest != impactMax: + + lifeThreatening = "locally hazardous " + + if self._GeneralOnsetTime == "check plans": + curPhrase = "Also, prepare for " + lifeThreatening + "surge having possible " + impactRangeRest + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "complete preparations": + curPhrase = "Also, protect against " + lifeThreatening + "surge having possible " + impactRangeRest + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "hunker down": + curPhrase = "Potential impacts from the main surge event are also now unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well away from " + lifeThreatening + "surge having " + self._frame("possible | additional") + " " + impactRangeRest + " impacts." + else: + curPhrase = "Little to no additional surge impacts expected." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + # If there is no impact across more than one half the area, include a statement for that as well + if inputThreatDominant == "None": + + curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ + ", little to no impact is anticipated." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + self.debug_print("Final Surge sectionDict['additionalImpactRange'] = '%s'" % + (sectionDict['additionalImpactRange']), 1) + productDict['surgeSection'] = sectionDict + + def _floodingRainSection(self, productDict, productSegmentGroup, productSegment): + sectionDict = dict() + sectionDict['title'] = "Flooding Rain" + sectionDict['impactRange'] = "" + sectionDict['impactLib'] = [] + sectionDict['additionalImpactRange'] = [] + sectionDict['variedImpacts'] = False + + impactMin = self._samplingDict['FloodingRainThreat']['impactMin'] + impactMax = self._samplingDict['FloodingRainThreat']['impactMax'] + impactRange = self._samplingDict['FloodingRainThreat']['impactRange'] + impactRangeMax = self._samplingDict['FloodingRainThreat']['impactRangeMax'] + inputThreatDominant = self._samplingDict['FloodingRainThreat']['inputThreatDominant'] + + self.debug_print("In _floodingRainSection", 1) + self.debug_print("_samplingDict = \n\n%s\n" % (self._pp.pformat(self._samplingDict['FloodingRainThreat'])), 1) + + # Test the simplest case first + if impactMin == "none" and impactMax == "none": + sectionDict['impactRange'] = impactRange + productDict['floodingRainSection'] = sectionDict + return + + qualifier = "" + if impactMax in ["extensive", "devastating", "catastrophic"]: + qualifier = "life-threatening " + elif impactMax == "significant": + qualifier = "dangerous " + elif impactMax == "limited": + qualifier = "locally hazardous " + + # If there is only one impact across the entire CWA, and it is the max + if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from the flooding rain are still unfolding across " + self._cwa_descriptor() + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + sectionDict['impactRange'] = "Additional impacts from flooding rain are still a concern across " + self._cwa_descriptor() + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactMax + " potential." + # Handle the case where the impacts are not the same across the entire CWA + else: + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against " + qualifier + "rainfall flooding having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from the flooding rain are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + if impactMax != "none": + sectionDict['impactRange'] = "Additional impacts from flooding rain are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactMax + " potential." + else: + sectionDict['impactRange'] = "Little to no additional impacts expected from flooding rain." + + if self._GeneralOnsetTime != "recovery": + sectionDict['impactLib'] = self._getPotentialImpactsStatements("Flooding Rain", self._impactCategoryToThreatLevel(impactMax)) + else: + sectionDict['impactLib'] = [] + + # If there are additional areas + if impactRange != impactMax: + + qualifier = "" + if impactRangeMax in ["extensive", "devastating", "catastrophic"]: + qualifier = "life-threatening " + elif impactRangeMax == "significant": + qualifier = "dangerous " + elif impactRangeMax == "limited": + qualifier = "locally hazardous " + + if self._GeneralOnsetTime == "check plans": + curPhrase = "Prepare for " + qualifier + "rainfall flooding having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "complete preparations": + curPhrase = "Protect against " + qualifier + "rainfall flooding having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "hunker down": + curPhrase = "Potential impacts from the flooding rain are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having " + self._frame("possible | additional") + " " + impactRange + " impacts." + else: + if impactMax != "none": + curPhrase = "Additional impacts from flooding rain are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well guarded against " + qualifier + "flood waters having further impacts of " + impactRange + " potential." + else: + curPhrase = "Little to no additional impacts expected from flooding rain." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + # If there is no impact across more than one half the area, include a statement for that as well + if inputThreatDominant == "None": + + curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ + ", little to no impact is anticipated." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + productDict['floodingRainSection'] = sectionDict + + def _tornadoSection(self, productDict, productSegmentGroup, productSegment): + sectionDict = dict() + sectionDict['title'] = "Tornadoes" + sectionDict['impactRange'] = "" + sectionDict['impactLib'] = [] + sectionDict['additionalImpactRange'] = [] + sectionDict['variedImpacts'] = False + + impactMin = self._samplingDict['TornadoThreat']['impactMin'] + impactMax = self._samplingDict['TornadoThreat']['impactMax'] + impactRange = self._samplingDict['TornadoThreat']['impactRange'] + impactRangeMax = self._samplingDict['TornadoThreat']['impactRangeMax'] + inputThreatDominant = self._samplingDict['TornadoThreat']['inputThreatDominant'] + + # Test the simplest case first + if impactMin == "none" and impactMax == "none": + sectionDict['impactRange'] = impactRange + productDict['tornadoSection'] = sectionDict + return + + # For tornadoes only, Cap at devastating + if impactMax in ["devastating", "catastrophic"]: + impactMax = "devastating" + if impactMin in ["devastating", "catastrophic"]: + impactMin = "devastating" + if impactRange in ["devastating", "catastrophic"]: + impactRange = "devastating" + impactRangeMax = impactRange + + # If the max impact category is "catastrophic", and we lumped "devastating" in with it, ensure "devastating" is not + # leftover as the high end of the range + impactParts = impactRange.split(" ") # split up the impact range + + # If "devastating" is the high end of the range + if len(impactParts) == 3 and impactParts[2] == "devastating": + # If the first part is not "extensive" + if impactParts[0] != "extensive": + # Force the upper end to be 1 category lower + impactRange.replace("devastating", "extensive") + impactRangeMax = "extensive" + # Otherwise, the impact is just "extensive" + else: + impactRange = "extensive" + impactRangeMax = "extensive" + + qualifier = "" + if impactMax in ["extensive", "devastating"]: + qualifier = "particularly dangerous " + elif impactMax == "significant": + qualifier = "dangerous " + + # If there is only one impact across the entire CWA, and it is the max + if impactMax != "none" and impactMin == impactMax and inputThreatDominant != "None": + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._cwa_descriptor() + ". Potential impacts include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from tornadoes are still unfolding across " + self._cwa_descriptor() + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + sectionDict['impactRange'] = "Additional impacts from tornadoes are still a concern across " + self._cwa_descriptor() + ". Remain well braced against " + qualifier + "tornado event having further " + impactMax + " impact potential." + # Handle the case where the impacts are not the same across the entire CWA + else: + if self._GeneralOnsetTime == "check plans": + sectionDict['impactRange'] = "Prepare for a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" + elif self._GeneralOnsetTime == "complete preparations": + sectionDict['impactRange'] = "Protect against a " + qualifier + "tornado event having possible " + impactMax + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + ". Potential impacts include:" + elif self._GeneralOnsetTime == "hunker down": + sectionDict['impactRange'] = "Potential impacts from tornadoes are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactMax + " impacts. If realized, these impacts include:" + else: + if impactMax != "none": + sectionDict['impactRange'] = "Additional impacts from tornadoes are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against " + qualifier + "tornado event having further " + impactMax + " impact potential." + else: + sectionDict['impactRange'] = "Little to no additional impacts expected from tornadoes." + + if self._GeneralOnsetTime != "recovery": + sectionDict['impactLib'] = self._getPotentialImpactsStatements("Tornado", self._impactCategoryToThreatLevel(impactMax)) + else: + sectionDict['impactLib'] = [] + + # If there are additional areas + if impactRange != impactMax: + + qualifier = "" + if impactRangeMax in ["extensive", "devastating"]: + qualifier = "particularly dangerous " + elif impactRangeMax == "significant": + qualifier = "dangerous " + + if self._GeneralOnsetTime == "check plans": + curPhrase = "Prepare for a " + qualifier + "tornado event having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "complete preparations": + curPhrase = "Protect against a " + qualifier + "tornado event having possible " + impactRange + " impacts across " + self._frame("ENTER AREA DESCRIPTION") + "." + elif self._GeneralOnsetTime == "hunker down": + curPhrase = "Potential impacts from tornadoes are still unfolding across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against a " + qualifier + "tornado event having " + self._frame("possible | additional") + " " + impactRange + " impacts." + else: + if impactMax != "none": + curPhrase = "Additional impacts from tornadoes are still a concern across " + self._frame("ENTER AREA DESCRIPTION") + ". Remain well braced against " + qualifier + "tornado event having further " + impactRange + " impact potential." + else: + curPhrase = "Little to no additional impacts expected from tornadoes." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + # If there is no impact across more than one half the area, include a statement for that as well + if inputThreatDominant == "None": + + curPhrase = "Elsewhere across " + self._cwa_descriptor() + \ + ", little to no impact is anticipated." + + # If this phrase is not already part of the additional impacts + if curPhrase not in sectionDict['additionalImpactRange']: + + # Add it now + sectionDict['additionalImpactRange'].append(curPhrase) + + productDict['tornadoSection'] = sectionDict + + def _getImpactsQualifier(self, impact): + qualifier = "" + if impact in ["extensive", "devastating", "catastrophic"]: + qualifier = "life-threatening " + elif impact == "significant": + qualifier = "dangerous " + elif impact == "limited": + qualifier = "hazardous " + + return qualifier + + def _coastalHazardsSection(self, productDict, productSegmentGroup, productSegment): + productDict['coastalHazardsSection'] = self._frame("Enter here a statement of any additional hazards of concern along the coast such as rip currents, high waves, concerns for beach erosion etc etc if not already done in the surge section.") + + def _preparednessSection(self, productDict, productSegmentGroup, productSegment): + sectionDict = dict() + sectionDict['title'] = "PRECAUTIONARY/PREPAREDNESS ACTIONS" + + sectionDict['genericAction'] = None + if not self._ImpactsAnticipated: + sectionDict['genericAction'] = "It is always a good idea to check your preparedness plans so when and if the time comes during hurricane season, you are ready to execute them. A good resource is ready.gov." + + productDict['preparednessSection'] = sectionDict + + def _evacuationStatements(self, productDict, productSegmentGroup, productSegment): + evacuationDict = dict() + evacuationDict['title'] = "Evacuations" + + import TCVDictionary + evacuationDict['statements'] = TCVDictionary.EvacuationStatements + + productDict['evacuationStatements'] = evacuationDict + + def _otherPreparednessActions(self, productDict, productSegmentGroup, productSegment): + actionsDict = dict() + actionsDict['title'] = "Other Preparedness Information" + + import TCVDictionary + actionsDict['actions'] = TCVDictionary.OtherPreparednessActions[self._GeneralOnsetTime] + + productDict['otherPreparednessActions'] = actionsDict + + def _additionalSourcesInfo(self, productDict, productSegmentGroup, productSegment): + infoDict = dict() + infoDict['title'] = "Additional Sources of Information" + + import TCVDictionary + infoDict['sources'] = TCVDictionary.AdditionalSources + + productDict['additionalSourcesInfo'] = infoDict + + def _nextUpdate(self, productDict, productSegmentGroup, productSegment): + + if not self._ImpactsAnticipated: + productDict['nextUpdate'] = "At this time...additional local statements are not anticipated unless conditions warrant." + elif self._NextUpdate == "LastIssuance": # or not self._ImpactsAnticipated: + productDict['nextUpdate'] = "As it pertains to this event...this will be the last local statement issued by the National Weather Service in " + \ + self._wfoCityState + \ + " regarding the effects of tropical cyclone hazards upon the area." + elif self._NextUpdate == "Conditions": + productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \ + self._wfoCityState + \ + " as conditions warrant." + elif self._NextUpdate == "Enter": + productDict['nextUpdate'] = "The next local statement will be issued by the National Weather Service in " + \ + self._wfoCityState + \ + " around " + self._NextUpdate_entry.strip() + ", or sooner if conditions warrant." + + ################# Product Parts Helper Methods + + def _impactsKeyFunction(self, optionIndexTuple): + ((_, _), indexStr) = optionIndexTuple + indexStr = indexStr.strip() + if len(indexStr) == 0: + return 9999 + else: + return int(indexStr) + + def _getPotentialImpactsStatements(self, elementName, maxThreat): + import TCVDictionary + potentialImpactStatements = TCVDictionary.PotentialImpactStatements + statements = potentialImpactStatements[elementName][maxThreat] + + return statements + + def _impactCategoryToThreatLevel(self, impactCategory): + if impactCategory == "catastrophic" or impactCategory == "devastating": + return "Extreme" + elif impactCategory == "extensive": + return "High" + elif impactCategory == "significant": + return "Mod" + elif impactCategory == "limited": + return "Elevated" + else: + return "None" + + def _determineHazardStates(self): + self._currentHazardsList = [] + self._changesHazardsList = [] + + self.debug_print("*"*80) + keys = list(self._previousAdvisory.keys()) + keys.sort() + for key in keys: + self.debug_print("%s : %s" % (key, self._previousAdvisory[key]), 1) + for hazard in self._previousAdvisory["HazardsForHLS"]: + self.debug_print("DEBUG Hazard: %s" % (self._pp.pformat(hazard)), 1) + if hazard['act'] != 'CON': + self._changesHazardsList.append(hazard) + if hazard['act'] not in ['CAN', "UPG"]: + self._currentHazardsList.append(hazard) + + self.debug_print("-"*80, 1) + self.debug_print("self._changesHazardsList = %s" % (self._changesHazardsList), 1) + self.debug_print("self._currentHazardsList = %s" % (self._currentHazardsList), 1) + + ############################################################### + ### Sampling and Statistics related methods + + def _sampleHLSData(self, argDict): + editAreas = [(self._cwa(), self._cwa())] + + cwaSampler = self.getSampler(argDict, + (self._analysisList_HLS(), self._timeRangeList3Hour, editAreas)) + + statList = self.getStatList(cwaSampler, + self._analysisList_HLS(), + self._timeRangeList3Hour, + self._cwa()) + + for period in range(len(statList)): + + self.debug_print("=" * 100, 1) + self.debug_print("In _sampleHLSData for period %s (%s)" % \ + (period, self._timeRangeList3Hour[period][0]), 1) + + statDict = statList[period] + for threatName in ['WindThreat', 'FloodingRainThreat', 'TornadoThreat']: + self._sampleRankedDiscreteValue(threatName, statDict) + # TODO: Investigate if this sampling method is still really needed. The JSON files may + # have all the needed information now + self._sampleMostSignificantDiscreteValue(threatName, statDict) + + qpfToFfgRatio = self._getStatValue(statDict, "QPFtoFFGRatio", "Max") + decidingField = self._samplingDict['FloodingRainThreat']['decidingField'] + if decidingField is None or qpfToFfgRatio > decidingField: + self._samplingDict['FloodingRainThreat']['decidingField'] = qpfToFfgRatio + + self.debug_print("WindThreat = %s" % (self._samplingDict['WindThreat']['inputThreatDominant']), 1) + self.debug_print("FloodingRainThreat = %s" % (self._samplingDict['FloodingRainThreat']['inputThreatDominant']), 1) + self.debug_print("TornadoThreat = %s" % (self._samplingDict['TornadoThreat']['inputThreatDominant']), 1) + + + + self._createWholeDomainEditArea(argDict) + editAreas = [("WholeDomain", "WholeDomain")] + wholeDomainSampler = self.getSampler(argDict, + (self._analysisList_HLS_WholeDomain(), self._timeRangeList3Hour, editAreas)) + + statList = self.getStatList(wholeDomainSampler, + self._analysisList_HLS_WholeDomain(), + self._timeRangeList3Hour, + "WholeDomain") + + for period in range(len(statList)): + statDict = statList[period] + maxWind = self._getStatValue(statDict, "Wind", "Max", self.VECTOR()) + decidingField = self._samplingDict['WindThreat']['decidingField'] + if decidingField is None or maxWind > decidingField: + self._samplingDict['WindThreat']['decidingField'] = maxWind + + + + editAreas = [(self._cwa(), self._cwa())] + intersectAreas = self._computeIntersectAreas(editAreas, argDict) + if len(intersectAreas) != 0: + self.debug_print("Sampling StormSurgeThreat, now") + intersectSampler = self.getSampler(argDict, + (self._intersectAnalysisList_HLS(), self._timeRangeList3Hour, intersectAreas)) + + statList = self.getStatList(intersectSampler, + self._intersectAnalysisList_HLS(), + self._timeRangeList3Hour, + "intersect_" + self._cwa()) + + for period in range(len(statList)): + statDict = statList[period] + self.debug_print("current stormSurge statDict = %s" % (self._pp.pformat(statDict)), 1) + self._sampleRankedDiscreteValue('StormSurgeThreat', statDict) + + inundationMax = self._getStatValue(statDict, "InundationMax", "Max") + decidingField = self._samplingDict['StormSurgeThreat']['decidingField'] + if decidingField is None or inundationMax > decidingField: + self._samplingDict['StormSurgeThreat']['decidingField'] = inundationMax + + self.debug_print("StormSurgeThreat = %s" % (self._samplingDict['StormSurgeThreat']['inputThreatDominant']), 1) + + def _sampleTCVAdvisory(self, advisory): + self.debug_print("sampling TCV advisory!", 1) + seenValidThreatLevel = {} + for zone in advisory["ZoneData"]: + self.debug_print("-" * 60, 1) + self.debug_print("Looking at zone %s" % (zone), 1) + for key in advisory["ZoneData"][zone]: + if "Threat" not in key or "highestHunkerDown" in key: + continue + + if key not in seenValidThreatLevel: + seenValidThreatLevel[key] = False + + self.debug_print("Looking at key '%s'" % (key), 1) + + threatLevel = advisory["ZoneData"][zone][key] + self.debug_print(" Threat level = %s" % (threatLevel), 1) + + if (self._samplingDict[key]['inputThreatLow'] is None) and (not seenValidThreatLevel[key]): + self._samplingDict[key]['inputThreatLow'] = threatLevel + if (self._samplingDict[key]['inputThreatHigh'] is None) and (not seenValidThreatLevel[key]): + self._samplingDict[key]['inputThreatHigh'] = threatLevel + + if threatLevel != None: + seenValidThreatLevel[key] = True + + lowThreat = self._samplingDict[key]['inputThreatLow'] + highThreat = self._samplingDict[key]['inputThreatHigh'] + threatOrder = self.mostSignificantDiscrete_keyOrder_dict(None, None, None)[key] + + self.debug_print("***** threatOrder = %s" % (repr(threatOrder)), 1) + + if threatOrder.index(threatLevel) < threatOrder.index(lowThreat): + lowThreat = threatLevel + if threatOrder.index(threatLevel) > threatOrder.index(highThreat): + highThreat = threatLevel + + if lowThreat is None: + self.debug_print(" low threat = Python None", 1) + else: + self.debug_print(" low threat = %s" % (lowThreat), 1) + self.debug_print(" high threat = %s" % (highThreat), 1) + + self._samplingDict[key]['inputThreatLow'] = lowThreat + self._samplingDict[key]['inputThreatHigh'] = highThreat + + self.debug_print("Sampling dict =\n\n%s\n" % (self._pp.pformat(self._samplingDict)), 1) + + def _sampleRankedDiscreteValue(self, threatName, statDict): + self.debug_print("-" * 60, 1) + self.debug_print("_sampleRankedDiscreteValue statDict =\n\n%s\n" % (self._pp.pformat(statDict)), 1) + rankedThreatLevels = self.getStats(statDict, threatName + "__rankedDiscreteValue") + self.debug_print("sampling %s" % (threatName), 1) + self.debug_print("sampleData: rankedThreatLevels =\n\n%s\n" % (self._pp.pformat(rankedThreatLevels)), 1) + if rankedThreatLevels is not None: + dominantThreatLevel = self._getDominantThreatLevel(threatName, rankedThreatLevels) + self.debug_print("dominantThreatLevel = %s" % (dominantThreatLevel), 1) + + currentDominantThreatLevel = self._samplingDict[threatName]['inputThreatDominant'] + self.debug_print("currentDominantThreatLevel = %s" % (currentDominantThreatLevel), 1) + self._samplingDict[threatName]['inputThreatDominant'] = self._getHighestThreat(threatName, + dominantThreatLevel, + currentDominantThreatLevel) + self.debug_print("new dominant = %s" % (self._samplingDict[threatName]['inputThreatDominant']), 1) + + def _sampleMostSignificantDiscreteValue(self, threatName, statDict): + self.debug_print("_sampleMostSignificantDiscreteValue for %s" % (threatName), 1) + threatLevel = self.getStats(statDict, threatName + "__mostSignificantDiscreteValue") + self.debug_print("threatLevel = %s" % (threatLevel), 1) + if threatLevel is not None: + inputThreatLow = self._samplingDict[threatName]['inputThreatLow'] + self.debug_print("current inputThreatLow = %s" % (inputThreatLow), 1) + if inputThreatLow is None: + self._samplingDict[threatName]['inputThreatLow'] = threatLevel + else: + self._samplingDict[threatName]['inputThreatLow'] = self._getLowestThreat(threatName, + threatLevel, + inputThreatLow) + self.debug_print("new inputThreatLow = %s" % (self._samplingDict[threatName]['inputThreatLow']), 1) + + inputThreatHigh = self._samplingDict[threatName]['inputThreatHigh'] + self.debug_print("current inputThreatHigh = %s" % (inputThreatHigh), 1) + self._samplingDict[threatName]['inputThreatHigh'] = self._getHighestThreat(threatName, + threatLevel, + inputThreatHigh) + self.debug_print("new inputThreatHigh = %s" % (self._samplingDict[threatName]['inputThreatHigh']), 1) + + def _getDominantThreatLevel(self, threatName, rankedThreatLevels): + dominantLevelWithHighestRank = None + highestRank = None + + for (level, rank) in rankedThreatLevels: + if highestRank is None or rank > highestRank: + highestRank = rank + dominantLevelWithHighestRank = level + elif rank == highestRank: + dominantLevelWithHighestRank = self._getHighestThreat(threatName, + dominantLevelWithHighestRank, + level) + + return dominantLevelWithHighestRank + + def _getHighestThreat(self, threatName, threatLevel1, threatLevel2): + keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(None, None, None) + keyOrder = keyOrderDict[threatName] + + level1Index = keyOrder.index(threatLevel1) + level2Index = keyOrder.index(threatLevel2) + + if level1Index < level2Index: + return threatLevel2 + elif level1Index == level2Index: + return threatLevel1 + else: + return threatLevel1 + + def _getLowestThreat(self, threatName, threatLevel1, threatLevel2): + keyOrderDict = self.mostSignificantDiscrete_keyOrder_dict(None, None, None) + keyOrder = keyOrderDict[threatName] + + level1Index = keyOrder.index(threatLevel1) + level2Index = keyOrder.index(threatLevel2) + + if level1Index < level2Index: + return threatLevel1 + elif level1Index == level2Index: + return threatLevel1 + else: + return threatLevel2 + + def _setHazardImpactCategories(self, threatName): + inputThreatLow = self._samplingDict[threatName]['inputThreatLow'] + inputThreatHigh = self._samplingDict[threatName]['inputThreatHigh'] + inputThreatDominant = self._samplingDict[threatName]['inputThreatDominant'] + decidingField = self._samplingDict[threatName]['decidingField'] + catastrophicThreshold = self._samplingDict[threatName]['catastrophicThreshold'] + + self.debug_print("-" * 60, 1) + self.debug_print("DEBUG: _setHazardImpactCategories for %s" % (threatName), 1) + + impactMin = None + impactMax = None + impactRange = None + impactRangeMax = None + + # Determine lowest impact category + if inputThreatLow == "Extreme": + if threatName != "TornadoThreat" and decidingField >= catastrophicThreshold: + impactMin = "catastrophic" + else: + impactMin = "devastating" + elif inputThreatLow == "High": + impactMin = "extensive" + elif inputThreatLow == "Mod": + impactMin = "significant" + elif inputThreatLow == "Elevated": + impactMin = "limited" + else: + impactMin = "none" + + # Determine highest impact category + if inputThreatHigh == "Extreme": + if threatName != "TornadoThreat" and decidingField >= catastrophicThreshold: + impactMax = "catastrophic" + impactRangeMax = "devastating" + else: + impactMax = "devastating" + impactRangeMax = "extensive" + elif inputThreatHigh == "High": + impactMax = "extensive" + impactRangeMax = "significant" + elif inputThreatHigh == "Mod": + impactMax = "significant" + impactRangeMax = "limited" + elif inputThreatHigh == "Elevated": + impactMax = "limited" + impactRangeMax = "none" + else: + impactMax = "none" + impactRangeMax = "none" + + self.debug_print( + "DEBUG: impactMin = '%s' impactMax = '%s' impactRangeMax = '%s'" % \ + (impactMin, impactMax, impactRangeMax), 1) + + # Determine dominant impact category for rest of CWA - No impact + if impactMin == "none" and impactMax == "none": + impactRange = "Little to no " + self._frame("additional") + " impacts are anticipated at this time across " + self._cwa_descriptor() + "." + # Otherwise, at least some impact will be experienced across the CWA + else: + # Do not permit the lowest category to be "None", if the highest category is also not "None" + # This is to avoid poor impact range wording in situations of tight gradients across a CWA + # (e.g. "None to High") + if impactMin == "none" and impactMax != "none": + impactMin = "limited" + + if impactMin == impactMax: + impactRange = impactMax + impactRangeMax = impactMax + elif impactMin == impactRangeMax: + impactRange = impactRangeMax + else: + impactRange = impactMin + " to " + impactRangeMax + + self._samplingDict[threatName]['impactMin'] = impactMin + self._samplingDict[threatName]['impactMax'] = impactMax + self._samplingDict[threatName]['impactRange'] = impactRange + self._samplingDict[threatName]['impactRangeMax'] = impactRangeMax + + ############################################################### + ### Area, Zone and Segment related methods + + def _createWholeDomainEditArea(self, argDict): + editAreaUtils = EditAreaUtils.EditAreaUtils() + editAreaUtils.setUp(None, argDict) + + gridLoc = editAreaUtils.getGridLoc() + grid2Dbit = JavaGrid2DBit( gridLoc.gridSize().x, gridLoc.gridSize().y ) + grid2Dbit.setAllValues(1) + + refID = ReferenceID("WholeDomain") + refData = ReferenceData(gridLoc, refID, grid2Dbit) + editAreaUtils.saveEditAreas([refData]) + + ############################################################### + ### Hazards related methods + + def _determineHazards(self, segments): + # Return a list of hazards from the given segments in the form: + # (key, landList, marineList, coastalList, inlandList) + # where key is (hdln, act, phen, sig) and the lists show which areas + # contain the hazard separated by category + hazAreaList = [] + for segment in segments: + hazardTable = self._argDict["hazards"] + hazards = hazardTable.getHazardList(segment) + for hazard in hazards: + action = hazard['act'] + hazAreaList.append((hazard, segment)) + # Consolidate hazards (there could be multiple segments with the same phen/sig/act) + hazardDict = {} + hazardList = [] + for hazard, segment in hazAreaList: + key = (hazard['hdln'], hazard['act'], hazard['phen'], hazard['sig']) + if key not in list(hazardDict.keys()): + hazardDict[key] = segment + hazardList.append(key) + else: + hazardDict[key] = hazardDict[key]+segment + + self.debug_print("hazardList =\n\n%s\n" % (self._pp.pformat(hazardList)), 1) + + return hazardList + + ############################################################### + ### Time related methods + + def _formatLocalTime(self, para, areas): + # Create a time string in local time + # e.g. 2 AM EDT + # Get the Z time hour + timeSearch = re.compile("...([0-9]+) *(Z|UTC)...") + timeStr = timeSearch.search(para) + +## gmtStr = para[timeStr.start():timeStr.end()] +## gmt = gmtStr.strip("...").replace("Z","") +## gmtHour = int(gmt)/100 + + # This code could bomb in the unlikely event we don't find a UTC + # time. We should probably add some kind of default hour here, + # keyed off the current hour, to prevent this. (MHB) + try: + # Convert the hour portion of the time string to an integer + gmtHour = int(timeStr.group(1)[:2]) + except: + gmtHour = time.gmtime().tm_hour + + gmtTR = self.createTimeRange(gmtHour, gmtHour+1, "Zulu") + gmtTime = gmtTR.startTime().unixTime() + + # Now make a string for each time zone + zoneList = self._getTimeZoneList(areas) + timeStrs = [] + timeDesc = "" + for timeZone in zoneList: + timeStr = self.formatTimeString(gmtTime, "%I %p %Z ", timeZone) + timeStr = string.replace(timeStr, " ", " ") + timeStr = string.strip(timeStr) + timeStr = timeStr.lstrip("0") + if timeStr not in timeStrs: + if len(timeStrs) > 0: + timeDesc += "...OR " + timeStrs.append(timeStr) + timeDesc += timeStr + return timeDesc + + def _getTimeZoneList(self, areaList): + # NOTE -- this code was taken from the middle of getAreaHeader + # in Header.py -- it really should be put back in and used + # in Header.py, but to avoid confusion, I'm repeating it here + # get this time zone + thisTimeZone = os.environ["TZ"] + zoneList = [] + # check to see if we have any areas outside our time zone + for areaName in areaList: + if areaName in list(self._areaDict.keys()): + entry = self._areaDict[areaName] + if "ugcTimeZone" not in entry: #add your site tz + if thisTimeZone not in zoneList: + zoneList.append(thisTimeZone) + continue # skip this entry + timeZoneList = entry["ugcTimeZone"] + if type(timeZoneList) is bytes: # a single value + timeZoneList = [timeZoneList] # make it into a list + for timeZone in timeZoneList: + if timeZone not in zoneList: + zoneList.append(timeZone) + # if the resulting zoneList is empty, put in our time zone + if len(zoneList) == 0: + zoneList.append(thisTimeZone) + # if the resulting zoneList has our time zone in it, be sure it + # is the first one in the list + try: + index = zoneList.index(thisTimeZone) + if index != 0: + del zoneList[index] + zoneList.insert(0, thisTimeZone) + except: + pass + return zoneList + + ############################################################### + ### Storm Information and TCP related methods + + def _grabHeadline(self, text=''): + # Get first headline found in text and return it as a string + + self.debug_print("_grabHeadline text = '%s'" % (text)) + + # Fixed pattern to grab headline (MHB 04/08/2009) + # See if there is a headline in this text + headlineSearch = re.findall("(?ism)^(\.{3}.+?\.{3}) *\n", text) + + self.debug_print("old headlineSearch = %s" % (headlineSearch), 1) + + # If we could not find original headlines, try to use 'new' HLS style + if headlineSearch is None or headlineSearch == []: + headlineSearch = re.findall("(?ism)^\*\*.+?\*\* *\n", text) + + self.debug_print("now headlineSearch = %s" % (headlineSearch), 1) + + # If we found a headline + if len(headlineSearch) > 0: + + # Remove the first and last ellipses - if they exist + headlineSearch[0] = re.sub("^\.\.\.", "", headlineSearch[0]) + headlineSearch[0] = re.sub("\.\.\.$", "", headlineSearch[0]) + +# # Remove the first and last '**' - if they exist + headlineSearch[0] = headlineSearch[0].replace("**", "").strip() + + # Return the first cleaned-up headline string we found + return self._cleanText(headlineSearch[0]) + + # Otherwise, return an indicator there is no headline in this text + else: + return "" # Changed to an null string instead of None + # (MHB 04/08/2009) + + def _getStormInfo(self, argDict): + # Get the Storm information + self._stormType = "Tropical" + self._stormName = "Cyclone" + self._stormTypeName = self._stormType + " " +self._stormName + + + stormDict = self._grabStormInfo(self._TCP) + self._stormName = stormDict.get("StormName", "") + self._stormType = stormDict.get("StormType", "") + self._stormTypeName = self._stormType + " " + self._stormName + self._decodeStormInfo(stormDict) + # Storm movement in mph and the stated movement trend + self._stormMovementTrend = self._expandBearings("Movement " + stormDict.get("StormMotion","")) + # Storm intensity in mph and the stated intensity trend. + self._stormIntensityTrend = "Storm Intensity " + stormDict.get("StormIntensity","") + + self.debug_print("Begin storm information", 1) + self.debug_print("storm dict = %s" % (stormDict), 1) + self.debug_print("storm name = %s" % (self._stormName), 1) + self.debug_print("type = %s" % (self._stormType), 1) + self.debug_print("type name = %s" % (self._stormTypeName), 1) + self.debug_print("time = %s" % (self._stormTime), 1) + self.debug_print("lat = %s" % (self._stormLat), 1) + self.debug_print("lon = %s" % (self._stormLon), 1) + self.debug_print("location = %s" % (str(self._stormLocation)), 1) + self.debug_print("reference = %s" % (self._stormReference), 1) + self.debug_print("references = %s" % (self._stormLocalReferences), 1) + self.debug_print("movement trend = %s" % (self._stormMovementTrend), 1) + self.debug_print("intensity trend = %s" % (self._stormIntensityTrend), 1) + self.debug_print("End storm information", 1) + + def _grabStormInfo(self, tcp): + # Get the storm information from the selected TCP + # return a dictionary + # Initialize a dictionary to hold the information we want + dict = {"StormType" : "|* fill in storm type here *|", + "StormName" : "|* fill in storm name here *|", + "StormTime" : "|* Enter storm time *| ", + "StormLat": "", + "StormLon": "", + "StormReference": "", + "StormIntensity": "", + "StormMotion": "", + "StormInfo": "", + "StormCenter": "", + } + #======================================================================= + # If we got the latest public advisory + + if tcp is not None and len(tcp) > 0: + + #=================================================================== + # Try to determine the storm type and name automatically + + # Updated version to handle WFO GUM advisories. This pattern will + # handle multiple word names (including certain special characters) + # This is for the NHC format. + mndSearch = re.search("(?im)^.*?(HURRICANE|(POTENTIAL|SUB|POST.?)" + + "?TROPICAL (STORM|DEPRESSION|CYCLONE)|" + + "(SUPER )?TYPHOON|REMNANTS OF) " + + "([A-Z0-9\-\(\) ]+?)" + + "(SPECIAL |INTERMEDIATE )?ADVISORY", tcp) + + # Display some debug info - if flag is set + self.debug_print("mndSearch = '%s'" % (mndSearch), 1) + + # If we found the storm type and name in the MND header + if mndSearch is not None: + + # Pick off the storm type and name + dict["StormType"] = mndSearch.group(1).strip() + dict["StormName"] = mndSearch.group(5).strip() + + #################################################################### + #################################################################### + # 12/15/2010 (MHB) - we should not need this anymore, but will + # leave it for the 2011 season as a fail-safe. + + # Look for the HPC format instead + else: + + mndSearch = re.search("(?im)^PUBLIC ADVISORY.+?FOR REMNANTS " + + "OF ([A-Z0-9\-\(\) ]+)", tcp) + + # If we found the storm type and name in the MND header + if mndSearch is not None: + + # Pick off the storm type and name + dict["StormType"] = "Remnants of" + dict["StormName"] = mndSearch.group(1).strip() + + # end possible removal - 12/15/2010 (MHB) + #################################################################### + #################################################################### + + #=================================================================== + # Clean up the product for easier parsing + + tcp = self._cleanText(tcp) + + #=================================================================== + # Now try to grab the latest storm information + + # Look for the new NHC format first + summarySearch = re.search("(?is)SUMMARY OF (.+?)\.{3}.+?" + + "LOCATION\.{3}(.+?[NS]) +(.+?[WE]).+?" + + "(ABOUT .+?)MAXIMUM SUSTAINED WIND.+?" + + "(\d+ MPH).+?", tcp) + + #-------------------------------------------------------------------- + # If we found the NHC summary section + + if summarySearch is not None: + + # Set aside some information we'll need later on + dict["StormTime"] = summarySearch.group(1).strip() + dict["StormLat"] = summarySearch.group(2).strip() + dict["StormLon"] = summarySearch.group(3).strip() + dict["StormReference"] = summarySearch.group(4).strip() + dict["StormIntensity"] = summarySearch.group(5).strip().lower() + + haveStormMotion = True + if tcp.find("PRESENT MOVEMENT...STATIONARY") != -1: + dict["StormMotion"] = "Stationary" + else: + summarySearch = re.search("PRESENT MOVEMENT\.{3}(.+?)\.{3}", tcp) + + if summarySearch is not None: + dict["StormMotion"] = summarySearch.group(1).strip().lower() + else: + haveStormMotion = False + + #================================================================ + # Use the remaining summary groups to contruct a paragraph + # similar to the "old" TCP format, and save that for later use + + # Start the paragraph with the advisory time + dict["StormCenter"] = "AT %s...THE CENTER OF " % \ + (dict["StormTime"]) + + # Now add some phrasing to maintain proper grammar, if needed + if dict["StormType"] == "Remnants of": + dict["StormCenter"] = "%s THE" % (dict["StormCenter"]) + + # Now add the storm type and storm name + dict["StormCenter"] = "%s %s %s " % (dict["StormCenter"], + dict["StormType"], + dict["StormName"]) + + # Now add the storm position + dict["StormCenter"] = \ + "%s WAS LOCATED AT LATITUDE %s...LONGITUDE %s." % \ + (dict["StormCenter"], dict["StormLat"], dict["StormLon"]) + + #---------------------------------------------------------------- + # Now add the primary NHC geographic reference + + # Get all the NHC references - starting with the word 'About' + # after the first one + referenceIndex = dict["StormReference"][4:].find('About') + + # Assume we only have one NHC reference point by default + nhcReference = dict["StormReference"] + + self.debug_print("referenceIndex = %s" % (referenceIndex), 1) + + # If we have more than one NHC reference point + if referenceIndex != -1: + + # Adjust this index to account for the first 'About' + referenceIndex += 4 + + # Only keep the first NHC reference location + nhcReference = dict["StormReference"][:referenceIndex] + + # Convert any abbreviated bearings to full words + nhcReference = self._expandBearings(nhcReference) + + # Add only first one to the summary paragraph for brevity + dict["StormCenter"] = "%s THIS WAS %s. " % \ + (dict["StormCenter"], + self._removeKM(nhcReference.strip())) + + #---------------------------------------------------------------- + # Add the maximum sustained wind speed phrase + + dict["StormCenter"] = "%s MAXIMUM SUSTAINED WINDS WERE %s." % \ + (dict["StormCenter"], + self._removeKM(dict["StormIntensity"])) + + #---------------------------------------------------------------- + # Now add the storm motion + + if haveStormMotion: + dict["StormCenter"] = "%s THE STORM MOTION WAS %s." % \ + (dict["StormCenter"], + self._removeKM(dict["StormMotion"])) + + #################################################################### + #################################################################### + # 12/15/2010 (MHB) - we should not need this anymore, but will + # leave it for the 2011 season as a fail-safe. + #-------------------------------------------------------------------- + # Search the product for the legacy storm info section - in case + # the new NHC style was not found + + stormInfoSearch = \ + re.search('(?is)(AT +(\d+ +[AP]M [AECMPH][DS]T)' + + '\.{3}\d+ *(Z|UTC)\.{3}THE (CENTER|REMNANTS|EYE) .+)', + tcp) + + # Display some debug info - if flag is set + self.debug_print("storminfoSearch = '%s'" % (stormInfoSearch)) + if stormInfoSearch is not None: + self.debug_print("\n\n%s\n" % + (self._pp.pformat(stormInfoSearch.groups())), 1) + + # If we found the storm info section of the product + if stormInfoSearch is not None: + for group in stormInfoSearch.groups(): + self.debug_print('-'*50, 1) + self.debug_print("%s\n" % (group), 1) + + # Clean this section up a bit. Keep each paragraph separate + # by a single , but remove all others as well as extra + # spaces. Then store this text in the TCP dictionary + dict["StormInfo"] = stormInfoSearch.group(1).strip() + + # Set aside the first paragraph of the storm info since it + # contains the TPC-provided reference point - if we haven't + # already found this information + if len(dict["StormCenter"].strip()) == 0: + dict["StormCenter"] = dict["StormInfo"].split('\n')[0] + + # If we have not already found the advisory time - get it from + # the legacy format + if dict["StormTime"] == "|* Enter storm time *| ": + dict["StormTime"] = stormInfoSearch.group(2).strip() + + # Set aside the first paragraph of the storm info since it + # contains the TPC-provided reference point - if we haven't + # already found this information + if len(dict["StormCenter"].strip()) == 0: + dict["StormCenter"] = dict["StormInfo"].split('\n')[0] + + #=================================================================== + # Now try to grab the repeated storm information summary + + repeatInfo = re.search("(?is)(\.{3}SUMMARY.+?\.)\n *\n", + tcp) + # If we cannot find the summary, try to find a "repeating" section + if repeatInfo is None: + repeatInfo = re.search("(?is)(REPEATING.+?\.)\n *\n", tcp) + self.debug_print(self._pp.pformat(repeatInfo), 1) + + # If we found the repeated storm information summary + if repeatInfo is not None: + + # Clean up this paragraph + summary = repeatInfo.group(1).strip() + + #=============================================================== + # Now try to grab the latest storm location - if we need it + + if dict["StormLat"] == "" or dict["StormLon"] == "": + + # Search the product for the storm location section + locationSearch = \ + re.search('(?is).+LOCATION.*?(\d+\.\d+ *N).+?' + + '(\d+\.\d+ *[EW])', summary) + + # Display some debug info - if flag is set + self.debug_print("locationSearch = '%s'" % (locationSearch), 1) + if locationSearch is not None: + self.debug_print("\n\n%s\n" % (self._pp.pformat(locationSearch.groups())), 1) + + # If we found the storm location section of the product + if locationSearch is not None: + + # Pick off the storm latitude and longitude + dict["StormLat"] = locationSearch.group(1).strip() + dict["StormLon"] = locationSearch.group(2).strip() + + #=============================================================== + # Now try to grab the latest storm intensity - if we need it + + if dict["StormIntensity"] == "": + + # Search the product for the storm intensity section + intensitySearch = \ + re.search('(?i).+MAXIMUM SUST.+?(\d+ *MPH)', summary) + + # Display some debug info - if flag is set + self.debug_print("intensitySearch = '%s'" % + (intensitySearch), 1) + + # If we found the storm intensity section of the product + if intensitySearch is not None: + + # Pick off the storm intensity + dict["StormIntensity"] = intensitySearch.group(1).strip() + + #=============================================================== + # Now try to grab the latest storm motion - if we need it + + if dict["StormMotion"] == "": + + # Search the product for the storm motion section + motionSearch = re.search('(?i).+MOVEMENT\.{3}(.+?\d+ MPH)', + summary) + if motionSearch is None: + motionSearch = re.search('(?i).+MOVEMENT(.+?\d+.+?)\.', + summary) + + # Display some debug info - if flag is set + self.debug_print("motionSearch = '%s'" % (motionSearch), 1) + + # If we found the storm motion section of the product + if motionSearch is not None: + + # Pick off the storm motion + motion = motionSearch.group(1).strip() + + # Fix the motion (i.e no '...') + dict["StormMotion"] = re.sub('(?i)\.{3}', ' the ', motion) + + # end possible removal - 12/15/2010 (MHB) + #################################################################### + #################################################################### + + #======================================================================== + # Display final decoded information from TCP + + self.debug_print("*" *80, 1) + self.debug_print("Final TCP Info...\n", 1) + self.debug_print('dict["StormType"] = %s' % (dict["StormType"]), 1) + self.debug_print('dict["StormName"] = %s' % (dict["StormName"]), 1) + self.debug_print('dict["StormTime"] = %s' % (dict["StormTime"]), 1) + self.debug_print('dict["StormLat"] = %s' % (dict["StormLat"]), 1) + self.debug_print('dict["StormLon"] = %s' % (dict["StormLon"]), 1) + self.debug_print('dict["StormReference"] = %s' % (dict["StormReference"]), 1) + self.debug_print('dict["StormIntensity"] = %s' % (dict["StormIntensity"]), 1) + self.debug_print('dict["StormMotion"] = %s' % (dict["StormMotion"]), 1) + self.debug_print('dict["StormInfo"] = %s' % (dict["StormInfo"]), 1) + self.debug_print('dict["StormCenter"] = %s' % (dict["StormCenter"]), 1) + + # Return the dictionary will all the information we found in the TCP + return dict + + def _decodeStormInfo(self, stormDict): + self._stormTime = "|* Enter Storm Time *| " + self._stormLat = "|* Enter Storm Lat *| " + self._stormLon = "|* Enter Storm Lon *| " + self._stormLocation = "|* Enter Storm Location *| " + self._stormReference = "" + self._stormLocalReferences = "" + para = stormDict.get("StormCenter", "") + self.debug_print("para %d %s" % (len(para), para), 1) + if len(para)<= 0: + return + + # Create the time string + self._stormTime = self._formatLocalTime(para, self._allAreas()) + + # Find stormLat, stormLon and stormLocation + # e.g. LATITUDE 15.7 NORTH...LONGITUDE 80.0 WEST + stormLocation ="" + stormLat = None + stormLon = None + + # Make a pattern to find the latest storm location + coordPtn = re.compile("(?i)(LATITUDE ([\d\.]+) ?((N|S)(O[RU]TH)?))..." + + "(AND )?(LONGITUDE ([\d\.]+) ?((W|E)([AE]ST)?)).+?") +## + "OR ((ABOUT )?.+)") + + # Make a pattern to find the NHC reference location + refPtn = re.compile("(?i)(WAS|OR) ((ABOUT )?\d+ MILES.+?" + + "(NORTH|SOUTH|EAST|WEST).+?)\.") + + # Try to find these patterns in the text + coordPtnMatch = coordPtn.search(para) + self.debug_print("+" * 90, 1) + self.debug_print("coordinate search...", 1) + if coordPtnMatch is not None: + self.debug_print("\n\n%s|n" % (self._pp.pformat(coordPtnMatch.groups())), 1) + + refPtnMatch = refPtn.search(para) + self.debug_print("reference search...", 1) + if refPtnMatch is not None: + self.debug_print("\n\n%s|n" % (self._pp.pformat(refPtnMatch.groups())), 1) + + # If we found the coordinates we were after + if coordPtnMatch is not None: + + # If we have the correct paragraph, set aside the latitude and + # longitude info as numbers + self._stormLat = float(coordPtnMatch.group(2)) + self._stormLon = float(coordPtnMatch.group(8)) # was 7 + + # Adjust latitude and longitude as need for "other" hemispheres + if coordPtnMatch.group(4) in ["S", "s"]: + self._stormLat *= -1.0 + + if coordPtnMatch.group(10) in ["W", "w"]: + self._stormLon *= -1.0 + + # Construct the storm location pair and remove the "Latitude " and "Longitude " text + self._stormLocation = (coordPtnMatch.group(1)[9:], coordPtnMatch.group(7)[10:]) + + # If we found the primary NHC reference we were after + if refPtnMatch is not None: + + # Set aside all the geographic reference text +## stormReference = coordPtnMatch.group(11) + stormReference = refPtnMatch.group(2) + + # Watch out for some grammar gotchas with this reference + stormReference = re.sub("(?i)^(WAS|OR) ", "", stormReference) + + # See if there are multiple geographic references + if re.search('(?i) and ', stormReference) is not None: + + # Yes there are multiple references, so only keep the + # first one + stormReference = re.sub("(?i) AND .+", "", stormReference) + + # Also remove any metric distances + self._stormReference = self._removeKM(stormReference) + + # Miles/km from chosen local reference + self._stormLocalReferences = self._calcLocalReferences( + self._stormLat, self._stormLon) + + self.debug_print("stormLocalRefs = %s" % (self._stormLocalReferences), 1) + + # Compare the NHC reference to the local references + for localRef in self._stormLocalReferences: + + self.debug_print("self._stormReference = '%s', localRef = '%s'" % (self._stormReference, localRef), 1) + + # Get the locations from these statements + nhcRef = re.search('(?i)(north|south|east|west) of (.+)', + self._stormReference) + testRef = re.search('(?i)(north|south|east|west) of (.+)', + localRef) + + if nhcRef is not None: + self.debug_print("nhcRef = '%s'" % (nhcRef.group(2)), 1) + + if testRef is not None: + self.debug_print("testRef = '%s'" % (testRef.group(2)), 1) + + # If we have a local reference that matches the national + # center reference + if testRef is not None and nhcRef is not None and \ + re.search("(?i)%s" % (testRef.group(2).strip()), + nhcRef.group(2)) is not None: + + # Do not include the national reference + self._stormReference = "" + + def _expandBearings(self, text): + # Convert any abbreviated bearings to full words + text = text.replace(' n ', ' North ') + text = text.replace(' nne ', ' North-northeast ') + text = text.replace(' ne ', ' Northeast ') + text = text.replace(' ene ', ' East-northeast ') + text = text.replace(' e ', ' East ') + text = text.replace(' ese ', ' East-southeast ') + text = text.replace(' se ', ' Southeast ') + text = text.replace(' sse ', ' South-southeast ') + text = text.replace(' s ', ' South ') + text = text.replace(' ssw ', ' South-southwest ') + text = text.replace(' sw ', ' Southwest ') + text = text.replace(' wsw ', ' West-southwest ') + text = text.replace(' w ', ' West ') + text = text.replace(' wnw ', ' West-northwest ') + text = text.replace(' nw ', ' Northwest ') + text = text.replace(' nnw ', ' North-northwest ') + + return text + + # Modified 12/15/2010 (MHB) - modified to recognize the new way NHC will + # present metric speeds. Will continue to recognize the "old" way for + # testing purposes as well. + def _removeKM(self, words): + # Remove references to KM e.g. + # 420 KM... 100 KM/HR... + + self.debug_print("words = '%s'" % (words), 1) + + kmSearch = re.compile("\.\.\. *[0-9]+ +(KM|KM/HR?) *\.?\.?\.?") + + # Replace metric reference with a space to keep words from mashing + # together. + words = kmSearch.sub(" ", words) + + # Make sure we don't have any double space issues with this text + doubleSpaces = re.findall(' +', words) + for doubleSpace in doubleSpaces: + words = re.sub(doubleSpace, ' ', words) + + self.debug_print("\tfinal words = '%s'" % (words), 1) + return words + + def _cleanText(self, text=''): + # Cleans up text for easier string searches, but retains paragraphs + + # Replace all single characters with a space + text = re.sub("\n(?! *\n)", " ", text) + + # Ensure all text is only single-spaced + text = re.sub(" +", " ", text) + + # Remove all spaces at the start of a new paragraph + text = re.sub("(?m)^ +", "", text) + + # Do not allow any spaces after an ellipsis + text = re.sub("\.{3} +", "...", text) + + # Finally, ensure the paragraphs are put back + text = re.sub("\n", "\n\n", text) + + # Return the cleaned-up text + return text + + def _calcLocalReferences(self, lat0, lon0): + localRefs = [] + refList = self._LocalReferencePoints + #refList.append(("Grand Cayman", (19.2, -81.4))) + # Limit reference points + refLimit = self._referencePointLimit() + if len(refList) > refLimit: + refList = refList[0:refLimit] + for label, latLon in refList: + lat, lon = latLon + localRef = self._calcReference(lat0, lon0, lat, lon) + localRef = localRef + " of " + label + localRef = localRef.replace(",","") + localRefs.append(localRef) + return localRefs + + def _calcReference(self, lat0, lon0, lat1, lon1): + #return self._oldCalcReference(lat0, lon0, lat1, lon1) + distKm = self._distanceFromLatLon(lat0, lon0, lat1, lon1) + distMph = distKm * 0.62 + # Round to nearest 10 + distMph = self.round(distMph, "Nearest", 10) + distMph_str = repr(int((distMph/10)*10)) + #distKm_str = `int((distKm/10)*10)` + direction = self._bearing(lat1, lon1, lat0, lon0) + direction = self._dirInEnglish(direction) + localRef ="About "+distMph_str+" miles "+direction + self.debug_print("localRef = %s" % (localRef), 1) + return localRef + + # Returns the distance from lat0, lon0 to lat1, lon1 in kilometers + def _distanceFromLatLon(self, lat0, lon0, lat1, lon1): + R = 6371.0 + lat0 = math.radians(lat0) + lon0 = math.radians(lon0) + lat1 = math.radians(lat1) + lon1 = math.radians(lon1) + dist = math.acos(math.sin(lat0) * math.sin(lat1) + math.cos(lat0) * math.cos(lat1) * math.cos(lon1 - lon0)) * R + return dist + + def _bearing(self, lat0, lon0, lat1, lon1): + + dlat = math.radians((lat0 - lat1)) + dlon = math.radians((lon0 - lon1)) + + y = math.sin(dlon) * math.cos(math.radians(lat1)) + x = math.cos(math.radians(lat0)) * math.sin(math.radians(lat1)) - \ + (math.sin(math.radians(lat0)) * math.cos(math.radians(lat1)) * math.cos(dlon)) + + direction = math.degrees(math.atan2(x, y)) - 90.0 + if direction < 0.0: + direction = direction + 360.0 + direction = direction % 360 + + return direction + + def _dirInEnglish(self, direction): + dirList = ["north", "north-northeast", "northeast", "east-northeast", + "east", "east-southeast", "southeast", "south-southeast", + "south", "south-southwest", "southwest", "west-southwest", + "west", "west-northwest", "northwest", "north-northwest"] + dirIndex = int((direction + 11.25) / 22.5) + if dirIndex > 15: + dirIndex = dirIndex - 16 + return dirList[dirIndex] + + ############################################################### + ### GUI related methods + + def _overview_list(self): + if self._site == "HFO": + stormInfoOptions = ["TCPCP1", "TCPCP2", "TCPCP3", "TCPCP4", "TCPCP5"] + else: + stormInfoOptions = ["TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5"] + + stormInfoOptions.append("Enter PIL below (e.g. WRKTCP):") + + return [ + { + "name": "ImpactsAnticipated", + "label": "Step 1. Potential Impacts Anticipated?", + "options": [ + ("Yes (NOTE: Any case other than dispel rumors must\n" + "have current TCP for storm in question)", True), + ("No (Dispel Rumors)", False), + ], + "default": "Yes (NOTE: Any case other than dispel rumors must\n" + "have current TCP for storm in question)", + }, + { + "name": "StormInfo", + "label": "Step 2. Obtain Storm Type/Name/Info", + "options": stormInfoOptions, + "entryField": " ", + }, + { + "name":"IncludedImpacts", + "label": "Step 3. Potential Impacts to Include and Order", + "optionType": "check", + "options": [ + ("Wind", 'windSection'), + ("Surge", 'surgeSection'), + ("Flooding Rain", 'floodingRainSection'), + ("Tornadoes", 'tornadoSection'), + ("Other Coastal Hazards", 'coastalHazardsSection') + ], + "default": ["Wind", "Surge", "Flooding Rain", "Tornadoes"], + }, + { + "name":"LocalReferencePoints", + "label": "Step 4. Locate Storm Relative to Local Reference Points\n(choose at most "\ + +self._referencePointLimit()[1]+")", + "optionType": "check", + "options": self._localReferencePoints(), + "default": self._localReferencePoints_defaults(), + }, + { + "name":"GeneralOnsetTime", + "label": "Step 5. General Time to Onset", + "options": [ + ("Watch", 'check plans'), + ("Warning", 'complete preparations'), + ("Conditions/Ongoing", 'hunker down'), + ("Recovery (After last TCV)", 'recovery'), + ], + }, + { + "name": "NextUpdate", + "label": "Step 6. Indicate Next Update Time", + "options": [ + ("As Conditions Warrant", "Conditions"), + ("Last Issuance", "LastIssuance"), + ("Enter Approximate Time (below)", "Enter") + ], + "default": "Enter Approximate Time (below)", + "entryField": " e.g. 6 AM EDT", + }, + { + "name": "MainHeadline", + "label": "Step 7. Input Main Headline (required)", + "options": [ + ("Enter Unique Headline (to right)", "Enter"), + ("Use Previous HLS Headline", "UsePrev"), + ("Use Latest TCP Headline", "UseTCP"), + ], + "entryField": " ", + }, + ] + + def _displayGUI(self, infoDict=None): + dialog = Overview_Dialog(self, "HLS", infoDict) + status = dialog.status() + LogStream.logVerbose("status="+status) + if status == "Cancel": + return None + else: + return dialog.getVarDict() + + def _frame(self, text): + return "|* " + text + " *|" + + +class Overview_Dialog(HLSTCV_Common.Common_Dialog): + def __init__(self, parent, title, infoDict=None): + HLSTCV_Common.Common_Dialog.__init__(self, parent, title, infoDict) + + def body(self, master): + # build the main display dialog + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list() + fontDict = self._parent._font_GUI_dict() + + # OVERVIEW header + headerFG, headerFont = fontDict["headers"] + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + + numBoxes = 3 + + boxes = [] + for i in range(numBoxes): + newBox = tkinter.Frame(master) + newBox.pack(side=tkinter.TOP, expand=tkinter.NO, + fill=tkinter.Y, anchor=tkinter.W) + boxes.append(newBox) + + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + index = overviewList.index(infoDict) + if index in [0,1,2]: + boxNum = 0 + buttonSide=tkinter.TOP + frameSide = tkinter.LEFT + elif index in [3,4,5]: + boxNum = 1 +# buttonSide=Tkinter.LEFT +# frameSide=Tkinter.TOP + buttonSide=tkinter.TOP + frameSide=tkinter.LEFT + else: + boxNum = 2 + buttonSide=tkinter.TOP + frameSide=tkinter.LEFT + + box = boxes[boxNum] + + if name == "MainHeadline": entryField = None + + if name == "IncludedImpacts": + tkObject_dict[name] = self._makeStep3( + box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, + entryField=entryField, headerFG=headerFG, + headerFont=headerFont) + else: + tkObject_dict[name], entryObject = self._makeRadioOrCheckList( + box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, + entryField=entryField, headerFG=headerFG, + headerFont=headerFont, boxType=optionType) + if entryObject is not None: + tkObject_dict[self._entryName(name)] = entryObject + + if name == "MainHeadline": + frame = tkinter.Frame(box, relief=tkinter.GROOVE, borderwidth=1) + tkObject_dict[self._entryName(name)] = self._makeEntry(frame, "", 80) + frame.pack(fill=tkinter.X, expand=tkinter.YES) + + # Buttons + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + self._makeButtons(frame) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + + def _makeStep3(self, master, label, elementList, default=None, + buttonSide=tkinter.TOP, frameSide=tkinter.LEFT, entryField=None, + headerFG=None, headerFont=None, + listFrameRelief=tkinter.GROOVE): + listFrame = tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) + + if label != "": + listLabel = tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) + listLabel.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10) + + ivar = tkinter.IntVar() + ivarEntryPairList = [] + for element in elementList: + index = elementList.index(element) + if type(element) is tuple: + element, key = element + + ivar = tkinter.IntVar() + if default is not None and element in default: ivar.set(1) + else: ivar.set(0) + + buttonFrame = tkinter.Frame(listFrame) + + button= tkinter.Checkbutton(buttonFrame, variable=ivar, text=element) + button.grid(row=0, column=0, sticky=tkinter.W+tkinter.E) + button.grid_columnconfigure(0, weight=1) + + svar = tkinter.StringVar() + entry = tkinter.Entry(buttonFrame, textvariable=svar, relief=tkinter.SUNKEN, width=3) + entry.grid(row=0, column=1, sticky=tkinter.E) + + ivarEntryPairList.append((ivar, svar)) + + buttonFrame.pack(side=buttonSide, fill=tkinter.X, expand=tkinter.YES, padx=4) + + noteLabel = tkinter.Label(listFrame, text="Note: Check Hazards to include (left) and order number (right)") + noteLabel.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10) + + # packing + listFrame.pack(side=frameSide, expand=tkinter.NO, fill=tkinter.Y) #, anchor=Tkinter.N) + + return ivarEntryPairList + + def _makeButtons(self, master): + frame = tkinter.Frame(master) + buttonList = self._parent._GUI1_configDict().get("buttonList", []) + for button, label in buttonList: + if button == "Run": + command = self.okCB + else: # Cancel + command = self.cancelCB + tkinter.Button(frame, text=label, command=command, width=10, + state=tkinter.NORMAL).pack(side=tkinter.LEFT, pady=5, padx=10) + frame.pack() + + def okCB(self): + # pull the data from the tkObject_dict before they get toasted + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list() + print("in okCB!") + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + if optionType == "check": + checkList = [] + ivarList = tkObject_dict[name] + for i in range(len(options)): + if name == "IncludedImpacts": + ivar, svar = ivarList[i] + if ivar.get(): + checkList.append((options[i], svar.get())) + else: + if ivarList[i].get(): + print(("adding option = %s" % (self._pp.pformat(options[i])))) + checkList.append(options[i]) + value = checkList + self._setVarDict(name, value) + else: + value = tkObject_dict[name].get() + self._setVarDict(name, value, options) + + if entryField is not None: + entryName = self._entryName(name) + self._setVarDict(entryName, tkObject_dict[entryName].get()) + # close window and set status "Ok" + self._status = "Ok" + self.withdraw() + self.ok() + + +class LegacyFormatter(): + def __init__(self, textProduct): + self._textProduct = textProduct + self.TAB = " "*self._textProduct._tabLength + self._tpc = HLSTCV_Common.TextProductCommon() + + + def execute(self, productDict): + self.productDict = productDict + productParts = self._tpc.getVal(productDict, 'productParts', []) + text = self._processProductParts(productDict, productParts.get('partsList')) + return text + + def _processProductParts(self, productDict, productParts, skipParts=[]): + ''' + Adds the product parts to the product + @param productDict -- dictionary of information -- could be the product dictionary or a sub-part such as a segment + @param skipParts -- necessary to avoid repetition when calling this method recursively + @param productParts -- list of instances of the ProductPart class with information about how to format each product part + @return text -- product string + ''' + text = "" + self._textProduct.debug_print("productParts = %s" % (productParts)) + for part in productParts: + valtype = type(part) + if valtype is str: + name = part + elif valtype is tuple: + name = part[0] + infoDicts = part[1] + self._textProduct.debug_print("name = %s" % (name), 1) + self._textProduct.debug_print("infoDicts =\n\n%s\n" % (self._pp.pformat(infoDicts)), 1) + newtext = self.processSubParts(productDict.get(name), infoDicts) + self._textProduct.debug_print("newtext type = %s" % (type(newtext)), 1) + self._textProduct.debug_print("newtext =\n\n%s\b" % (self._pp.pformat(newtext)), 1) + text += newtext + continue + elif valtype is list: + self._textProduct.debug_print('GOT HERE -- found list', 1) + self._tpc.flush() + # TODO THIS SHOULD BE REMOVED AFTER THE REFACTOR OF HazardServicesProductGenerationHandler.JAVA + tup = (part[0], part[1]) + part = tup + name = part[0] + + + if name == 'wmoHeader': + text += self.processWmoHeader(productDict['wmoHeader']) + elif name == 'ugcHeader': + text += productDict['ugcHeader'] + "\n\n" + elif name == 'productHeader': + text += self.processProductHeader(productDict['productHeader']) + elif name == 'vtecRecords': + for vtecString in productDict['vtecRecords']: + text += vtecString + '\n' + elif name == 'areaList': + text += productDict['areaList'] + "\n\n" + elif name == 'issuanceTimeDate': + text += productDict['issuanceTimeDate'] + '\n\n' + elif name == 'summaryHeadlines': + text += self.processSummaryHeadlines(productDict['summaryHeadlines']) + elif name == "newInformationHeader": + header = "NEW INFORMATION" + text += header + "\n" + "-"*len(header) + "\n\n" + elif name == "changesHazards": + text += "* CHANGES TO WATCHES AND WARNINGS:\n" + \ + self.processHazards(productDict['changesHazards'], isChangesHazards=True) + elif name == "currentHazards": + text += "* CURRENT WATCHES AND WARNINGS:\n" + \ + self.processHazards(productDict['currentHazards'], isChangesHazards=False) + elif name == "stormInformation": + text += self.processStormInformation(productDict['stormInformation']) + elif name == "situationOverview": + text += self.processSituationOverview(productDict['situationOverview']) + elif name == "sigPotentialImpacts": + header = "POTENTIAL IMPACTS" + text += header + "\n" + "-"*len(header) + "\n\n" + if not self._textProduct._ImpactsAnticipated: + text += "None\n\n" + elif name in ['windSection', 'surgeSection', 'floodingRainSection', 'tornadoSection']: + text += self.processHazardsSection(productDict[name]) + elif name == "coastalHazardsSection": + text += "* OTHER COASTAL HAZARDS:\n" + text += self._textProduct.indentText(productDict[name], maxWidth=self._textProduct._lineLength) + "\n" + elif name == "preparednessSection": + header = productDict[name]['title'] + text += header + "\n" + "-"*len(header) + "\n\n" + if productDict[name]['genericAction'] is not None: + text += self._textProduct.indentText(productDict[name]['genericAction'], maxWidth=self._textProduct._lineLength) + "\n" + elif name == "evacuationStatements": + text += "* " + productDict[name]['title'].upper() + ":\n|* " + for statement in productDict[name]['statements']: + text += self._textProduct.indentText(statement, maxWidth=self._textProduct._lineLength) + "\n" + text += "*|\n" + elif name == "otherPreparednessActions": + text += "* " + productDict[name]['title'].upper() + ":\n|* " + for action in productDict[name]['actions']: + text += self._textProduct.indentText(action, maxWidth=self._textProduct._lineLength) + "\n" + text += "*|\n" + elif name == "additionalSourcesInfo": + text += "* " + productDict[name]['title'].upper() + ":\n" + for source in productDict[name]['sources']: + text += self._textProduct.indentText(source, maxWidth=self._textProduct._lineLength) + text += "\n" + elif name == "nextUpdate": + header = "NEXT UPDATE" + text += header + "\n" + "-"*len(header) + "\n\n" + text += self._textProduct.indentText(productDict[name], maxWidth=self._textProduct._lineLength) + "\n" + elif 'sectionHeader' in name: + text += "* " + productDict[name].upper() + "\n" + elif 'Subsection' in name: + text += self.processSubsection(productDict[name]) + elif name == 'infoSection': + text += self.processInfoSection(productDict['infoSection']) + elif name == 'endProduct': + text += '$$\n' + elif name == 'CR': + text += '\n' + elif name == 'doubleAmpersand': + text += '&&\n' + elif name not in self._noOpParts(): + textStr = productDict.get(name) + self._textProduct.debug_print("name = %s" % (name), 1) + self._textProduct.debug_print("textStr = '%s'" % (textStr), 1) + if textStr: + text += textStr + '\n' + return text + + def _noOpParts(self): + ''' + These represent product parts that should be skipped when calling product part methods. + They will be handled automatically by the formatters. + ''' + return ["setup_segment"] #['CR', 'endProduct', 'endSegment', 'issuanceDateTime', 'doubleAmpersand'] + + def processWmoHeader(self, wmoHeader): + text = wmoHeader['TTAAii'] + ' ' + wmoHeader['fullStationID'] + ' ' + wmoHeader['ddhhmmTime'] + '\n' + text += wmoHeader['productID'] + wmoHeader['siteID'] + '\n' + return text + + def processProductHeader(self, headerDict): + if not self._textProduct._ImpactsAnticipated: + text = "Tropical Local Statement\n" + text += "National Weather Service " + headerDict['cityState'] + '\n' + text += headerDict['issuanceTimeDate'] + '\n\n' + + else: + text = headerDict['stormType'] + ' ' + headerDict['stormName'] + ' ' + headerDict['productName'] + + advisoryText = '' + if headerDict['advisoryType'] is not None and \ + headerDict['advisoryType'].lower() in ["intermediate", "special"]: + advisoryText = headerDict['advisoryType'] + ' ' + + if headerDict['advisoryNumber'] is not None: + advisoryText += 'Advisory Number ' + headerDict['advisoryNumber'] + + if len(advisoryText) > 0: + if len(text + " " + advisoryText) > self._textProduct._lineLength: + text += '\n' + else: + text += ' ' + + text += advisoryText + '\n' + else: + text += '\n' + + text += "National Weather Service " + headerDict['cityState'] + " " + headerDict['stormNumber'] + '\n' + text += headerDict['issuanceTimeDate'] + '\n\n' + + return text + + def processSummaryHeadlines(self, headlinesList): + if headlinesList in [[], [""]]: + text = "**" + self._textProduct._frame("Enter headline here") + "**\n\n" + else: + text = "" + for headline in headlinesList: + text += self._textProduct.indentText("**" + headline + "** ", + maxWidth=self._textProduct._lineLength) + + text = self._textProduct._frame(text) + "\n\n" + + return text + + def processHazards(self, hazardsList, isChangesHazards): + text = "" + + if len(hazardsList) == 0: + if isChangesHazards and \ + self._textProduct._ImpactsAnticipated and \ + self._textProduct._GeneralOnsetTime == "recovery": + text = self.TAB + "- All watches and warnings have been canceled\n" + else: + text = self.TAB + "- None\n" + else: + # Group the hazards together by status, areas and headlines + groupedHazards = dict() + + # Grab the appropriate headlines for this section + actions = ['NEW', 'EXA'] # always want these + if isChangesHazards: + actions.append('CAN') # only want these for 'changes' + else: + actions.append('CON') # only want these for 'current' + + for hazard in hazardsList: + # If this is an action we want, update the active headline + if hazard['act'] in actions: + headline = hazard['phensig'][:4] + + # Group areas together which have identical hazards + groupedHazards = self._addToGroupedHazards(hazard, headline, groupedHazards) + + self._textProduct.debug_print("groupedHazards = %s" + % self._textProduct._pp.pformat(groupedHazards), 1) + + groupedHazards = self._consolidateGroupedHazards(groupedHazards, isChangesHazards) + + self._textProduct.debug_print("consolidated groupedHazards = %s" + % self._textProduct._pp.pformat(groupedHazards), 1) + + self._textProduct.debug_print("\n\nCreating text bullets for %s section..." + % ("CHANGES" if isChangesHazards else "CURRENT"), 1) + + # Create the hazard text using the grouped hazards + for (sortedAreas, hazards) in list(groupedHazards.items()): + self._textProduct.debug_print("Creating text for hazards covering %s (%s)" + % (sortedAreas, self._areaWords(sortedAreas)), 1) + + if isChangesHazards: + hazardTextParts = self._createChangesTextParts(hazards) + else: + hazardTextParts = self._createCurrentTextParts(hazards) + + self._textProduct.debug_print("hazardTextParts = %s" + % self._textProduct._pp.pformat(hazardTextParts), 1) + + groupedHazardText = self._textProduct.punctuateList(hazardTextParts) + groupedHazardText = groupedHazardText.replace(" and The ", " and the ") + groupedHazardText = groupedHazardText.replace(" and A ", " and a ") + groupedHazardText = groupedHazardText.replace(", The ", ", the ") + groupedHazardText = groupedHazardText.replace(", A ", ", a ") + + groupedHazardText += " for " + self._areaWords(sortedAreas) + + self._textProduct.debug_print("groupedHazardText = '%s'" % groupedHazardText, 1) + + text += self._textProduct.indentText(groupedHazardText, + indentFirstString = self.TAB + "- ", + indentNextString = self.TAB + " ", + maxWidth=self._textProduct._lineLength) + + self._textProduct.debug_print("text = '%s'" % text, 1) + + text += "\n" + + return text + + def _addToGroupedHazards(self, hazard, headline, groupedHazards): + # Only consider certain hazard statuses + status = hazard['act'] + if status not in ["CON", "NEW", "EXA", "CAN", "UPG"]: + return groupedHazards + + self._textProduct.debug_print("\n\nIn _addToGroupedHazardsList, adding a '%s' hazard" + % status, 1) + + areas = hazard['id'] + sortedAreas = tuple(sorted(areas)) + previousPhenSig = None + upgrades = dict() + + if status in ["NEW", "EXA"] and hazard.get('upgradeFrom', None) is not None: + + # The phensig of the hazard that got upgraded to this headline + previousPhenSig = hazard['upgradeFrom']['phensig'] + upgrades[headline] = set([previousPhenSig]) + + + self._textProduct.debug_print("Areas affected %s (%s)" + % (sortedAreas, self._areaWords(sortedAreas)), 1) + self._textProduct.debug_print("Headline = %s" % headline, 1) + self._textProduct.debug_print("previousPhenSig = %s" % previousPhenSig, 1) + self._textProduct.debug_print("upgrades = %s" % upgrades, 1) + + self._textProduct.debug_print("Trying to find where to put the hazard info...", 1) + + #======================================================================= + + if sortedAreas not in groupedHazards: + + self._textProduct.debug_print("Creating a new areas entry...", 1) + + groupedHazards[sortedAreas] = { + status: ([headline], upgrades), + } + + # If we already have a record for this area + else: + + self._textProduct.debug_print("Adding to an existing areas entry...", 1) + + if status == "UPG": + + # Reset the "previous" phensig + previousPhenSig = hazard['phensig'][:4] + self._textProduct.debug_print("Now previousPhenSig = %s" % \ + previousPhenSig, 1) + + # Determine the upgrade + for action in groupedHazards[sortedAreas]: + if action not in ['NEW', 'EXA']: + continue + + sortedHeadlines, upgrades = groupedHazards[sortedAreas][action] + + foundUpg = False + for hl in sortedHeadlines: + if hl in VTECTable.upgradeHazardsDict and \ + previousPhenSig in VTECTable.upgradeHazardsDict[hl]: + headline = hl + foundUpg = True + break + + if foundUpg: + # add to upgrades and we are done + upgrades.setdefault(headline, set()).add(previousPhenSig) + break + + else: + (sortedHeadlines, upgrades) = groupedHazards[sortedAreas]\ + .setdefault(status, ([], {})) + + sortedHeadlines.append(headline) + sortedHeadlines.sort(self._sortHazardsType) + + if (previousPhenSig is not None and + headline in VTECTable.upgradeHazardsDict and + previousPhenSig in VTECTable.upgradeHazardsDict[headline]): + + # add to upgrades + upgrades.setdefault(headline, set()).add(previousPhenSig) + + + return groupedHazards + + # Method to sort tropical headlines by priority, then type + def _sortHazardsType(self, a, b): + + # Warnings always go first + if ".W" in a and ".W" not in b: + return -1 + elif ".W" not in a and ".W" in b: + return 1 + else: + + # Both have the same priority of hazard, now sort by type + + # Storm Surge headlines first + # just need to look for Warning vs. Watch) + if "SS." in a and "SS." not in b: + return -1 + elif "SS." not in a and "SS." in b: + return 1 + elif "SS." in a and "SS." in b: + return 0 + + # Hurricane headlines next + # (Surge already accounted for, just need to check for Tropical) + if "HU." in a and "TR." in b: + return -1 + elif "TR." in a and "HU." in b: + return 1 + elif "HU." in a and "HU." in b: + return 0 + + # Tropical Storm headlines last + if "TR." not in a and "TR." in b: + return -1 + elif "TR." in a and "TR." not in b: + return 1 + elif "TR." in a and "TR." in b: + return 0 + + + def _consolidateGroupedHazards(self, groupedHazards, isChangesHazards): + """Combine areas that share the same headlines and status""" + + self._textProduct.debug_print("Trying to consolidate grouped hazards...", 1) + + newGroupedHazards = dict() + + for sortedAreas1, hazards1 in list(groupedHazards.items()): + self._textProduct.debug_print("Working on areas %s..." + % self._textProduct._pp.pformat(sortedAreas1), 1) + + # Clean up use of EXA + if not isChangesHazards: + newHazards1 = dict() + + # Current section, we don't care about upgrades + newUpgrades = dict() + + for (action, (hazard, upgrades)) in list(hazards1.items()): + + if action in ["NEW", "EXA"]: + action = "CON" + + if action in newHazards1: + newHazards1[action][0].extend(hazard) + newHazards1[action][0].sort(self._sortHazardsType) + +# newHazards1[action] = (newHazards1[action][0], dict()) + +# for phensig, upgrade in upgrades.items(): +# +# curUpgrade = newHazard1[action][1].get(phensig, set()) +# newHazard1[action][1][phensig] = curUpgrade | upgrade +# newHazards1[action][1] = newUpgrades + else: + newHazards1[action] = (hazard, newUpgrades) + + + # Reset the dictionary + hazards1 = newHazards1 + +# print "Done with merging EXA and CON" + + foundMatchingInfo = False + for sortedAreas2, hazards2 in list(newGroupedHazards.items()): + + if hazards1 == hazards2: + self._textProduct.debug_print("Combining areas %s and %s..." + % (self._textProduct._pp.pformat(sortedAreas1), + self._textProduct._pp.pformat(sortedAreas2)), + 1) + + # Both areas contain the same information, combine the areas and remove duplicates + combinedAreas = tuple(sorted(set(sortedAreas1 + sortedAreas2))) + newGroupedHazards[combinedAreas] = hazards1 + + # Remove the old, uncombined areas + del newGroupedHazards[sortedAreas2] + + foundMatchingInfo = True + break + + if not foundMatchingInfo: + self._textProduct.debug_print("Adding new area entry...", 1) + newGroupedHazards[sortedAreas1] = hazards1 + + return newGroupedHazards + + def _createChangesTextParts(self, hazards): + self._textProduct.debug_print("hazards = %s" + % self._textProduct._pp.pformat(hazards), 1) + + hazardTextParts = [] + for status, (sortedPhensigs, upgrades) in list(hazards.items()): + + self._textProduct.debug_print("status = %s" % status, 1) + self._textProduct.debug_print("upgrades = %s" % upgrades, 1) + self._textProduct.debug_print("sortedPhensigs = %s" % sortedPhensigs, 1) + + # Convert the headlines from VTEC into text + sortedHeadlines = [] + for phensig in sortedPhensigs: + if VTECTable.VTECTable[phensig]['hdln'] not in sortedHeadlines: + sortedHeadlines.append(VTECTable.VTECTable[phensig]['hdln']) + + self._textProduct.debug_print("sortedHeadlines = %s" % sortedHeadlines, 1) + + hasText = " has " + # If there is more than one hazard + if len(sortedHeadlines) > 1: + hasText = " have " + + if status == "CAN": + + canHeadlines = self._textProduct.punctuateList(sortedHeadlines) + self._textProduct.debug_print("canHeadlines = '%s'" % canHeadlines, 1) + print(("canHeadlines = '%s'" % canHeadlines)) + + canText = "The " + canHeadlines + hasText + "been cancelled" + self._textProduct.debug_print("Result: '%s'" % canText, 1) + hazardTextParts.append(canText) + + elif status in ["NEW", "EXA"]: + if len(upgrades) > 0: + upgradeTextParts = [] + for phensig, previousPhenSig in list(upgrades.items()): + headline = VTECTable.VTECTable[phensig]['hdln'] + upgradeList = [] + + # Convert the list of VTEC codes to a headline + for prevPhenSig in previousPhenSig: + curHeadline = VTECTable.VTECTable[prevPhenSig]['hdln'] + if curHeadline not in upgradeList: + upgradeList.append(curHeadline) + + upgradedHeadlines = self._textProduct.punctuateList(upgradeList) + + upgHasText = " has " + # If there are no 'and's + if len(upgradeList) > 1: + upgHasText = " have " + + upgradeTextParts.append("The " + upgradedHeadlines + + upgHasText + "been upgraded to a " + headline) + + # Make sure we don't repeat information multiple times in the same section + try: + sortedHeadlines.remove(headline) + except: + print("*** Warning: attempt to remove \"%s\" from %s"\ + % (headline, sortedHeadlines)) + + upgradesText = self._textProduct.punctuateList(upgradeTextParts) + + self._textProduct.debug_print("Changes Result: '%s'" % upgradesText, 1) + hazardTextParts.append(upgradesText) + + # NEW and EXA hazards can have both upgrades and headlines associated with them + if len(sortedHeadlines) > 0: + headlines = self._textProduct.punctuateList(sortedHeadlines) + self._textProduct.debug_print("headlines = '%s'" % headlines, 1) + + newExaChangesText = "A " + headlines + hasText + "been issued" + self._textProduct.debug_print("Changes Result: '%s'" % newExaChangesText, 1) + hazardTextParts.append(newExaChangesText) + + return hazardTextParts + + def _createCurrentTextParts(self, hazards): + self._textProduct.debug_print("hazards = %s" + % self._textProduct._pp.pformat(hazards), 1) + + hazardTextParts = [] + sortedHeadlines = [] + + for status, (sortedPhensigs, upgrades) in list(hazards.items()): + + self._textProduct.debug_print("status = %s" % status, 1) + self._textProduct.debug_print("upgrades = %s" % upgrades, 1) + self._textProduct.debug_print("sortedPhensigs = %s" % sortedPhensigs, 1) + + # Convert the headlines from VTEC into text + for phensig in sortedPhensigs: + if VTECTable.VTECTable[phensig]['hdln'] not in sortedHeadlines: + sortedHeadlines.append(VTECTable.VTECTable[phensig]['hdln']) + + self._textProduct.debug_print("sortedHeadlines = %s" % sortedHeadlines, 1) + + headlines = self._textProduct.punctuateList(sortedHeadlines) + self._textProduct.debug_print("headlines = '%s'" % headlines, 1) + + isText = " is " + # If there is more than one hazard + if len(sortedHeadlines) > 1: + isText = " are " + + if status in ["NEW", "EXA", "CON"]: + # NEW and EXA hazards can have both upgrades and headlines associated with them + if len(sortedHeadlines) > 0: + + # If we are all done processing this group + numKeys = len(list(hazards.keys())) + numHeadlines = len(sortedHeadlines) + numPhensigs = len(sortedPhensigs) +# print "Current test keys = %d headlines = %d phensigs = %d" % (numKeys, numHeadlines, numPhensigs) + + if (numKeys == numHeadlines) or (numKeys == 1 and numHeadlines == numPhensigs): + newExaCurrentText = "A " + headlines + isText + "in effect" + self._textProduct.debug_print("Current Result: '%s'" % newExaCurrentText, 1) + hazardTextParts.append(newExaCurrentText) + + return hazardTextParts + + def _areaWords(self, sortedAreas): + if sortedAreas == tuple(): + return "" + names = [] + areaDict = self._textProduct._areaDict + for area in sortedAreas: + name = areaDict[area].get('altName', areaDict[area].get('ugcName', '')) + names.append(name) + names.sort() + areaWords = self._textProduct.formatCountyString("", names)[1:] + return areaWords + + def processStormInformation(self, stormInfoDict): + text = "* STORM INFORMATION:\n" + + if len(stormInfoDict) == 0: + text += self.TAB + "- None\n\n" + else: + referenceText = " or ".join(stormInfoDict['references']) + "\n" + referenceText = referenceText.replace(" or About", " or about" ) + + text += self._textProduct.indentText(referenceText, + indentFirstString = self.TAB + "- ", + indentNextString = self.TAB + " ", + maxWidth=self._textProduct._lineLength) + + (lat, lon) = stormInfoDict['location'] + text += self.TAB + "- " + lat + " " + lon + "\n" + + text += self.TAB + "- " + stormInfoDict['intensity'] + "\n" + + text += self.TAB + "- " + stormInfoDict['movement'] + "\n\n" + + return text + + def processSituationOverview(self, overviewText): + title = "SITUATION OVERVIEW" + text = title + "\n" + "-"*len(title) + "\n\n" + + text += self._textProduct.endline(overviewText, linelength=self._textProduct._lineLength) + text += "\n" + + return text + + def processHazardsSection(self, sectionDict): + text = "* " + sectionDict['title'].upper() + ":\n" + + impactRangeText = sectionDict['impactRange'] + text += self._textProduct.indentText(impactRangeText, maxWidth=self._textProduct._lineLength) + + if self._textProduct._GeneralOnsetTime == "recovery" and len(sectionDict['impactLib']) != 0: + text += "|*\n" + + for impact in sectionDict['impactLib']: + text += self._textProduct.indentText(impact, + indentFirstString = self.TAB + "- ", + indentNextString = self.TAB + " ", + maxWidth=self._textProduct._lineLength) + + if self._textProduct._GeneralOnsetTime == "recovery" and len(sectionDict['impactLib']) != 0: + text += "*|\n" + + if len(sectionDict['additionalImpactRange']) != 0: + text += "\n" + + additionalImpactRangeText = "" + curAdditionalImpactText = "" + count = 1 + + self._textProduct.debug_print("DEBUG: %d sectionDict['additionalImpactRange'] = '%s'" % (len(sectionDict['additionalImpactRange']), sectionDict['additionalImpactRange'])) + for additionalImpact in sectionDict['additionalImpactRange']: + + self._textProduct.debug_print("additionalImpact = '%s'" % (additionalImpact)) + self._textProduct.debug_print("count = %d" % (count)) + + curAdditionalImpactText += \ + self._textProduct.indentText(additionalImpact, + maxWidth=self._textProduct._lineLength) + + if count != len(sectionDict['additionalImpactRange']) and \ + len(curAdditionalImpactText) > 0: + curAdditionalImpactText += "\n" + + self._textProduct.debug_print("DEBUG: curAdditionalImpactText ='%s'" % (curAdditionalImpactText)) + + count += 1 + + # If this additional impact is not already included in the output + if additionalImpactRangeText.find(curAdditionalImpactText) == -1: + + # Add this additional impact text + self._textProduct.debug_print("Adding current impact. '%s'" % (curAdditionalImpactText)) + additionalImpactRangeText += curAdditionalImpactText + + text += additionalImpactRangeText + + text += "\n" + return text + + def processSubParts(self, subParts, infoDicts): + """ + Generates Legacy text from a list of subParts e.g. segments or sections + @param subParts: a list of dictionaries for each subPart + @param partsLists: a list of Product Parts for each segment + @return: Returns the legacy text of the subParts + """ + text = '' + for i in range(len(subParts)): + self._textProduct.debug_print("subpart subParts[i] = %s" % (subParts[i])) + self._textProduct.debug_print("subpart infoDicts[i] = %s" % (infoDicts[i])) + newtext = self._processProductParts(subParts[i], infoDicts[i].get('partsList')) + self._textProduct.debug_print("subpart newtext type = %s" % (type(newtext))) + self._textProduct.debug_print("subpart newtext = '%s'" % (self._pp.pformat(newtext))) + text += newtext + + return text diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_AQA_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_AQA_MultiPil.py index 663b846367..6191473094 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_AQA_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_AQA_MultiPil.py @@ -1,347 +1,347 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## -######################################################################## -# Hazard_AQA.py -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 20, 2014 #3685 randerso Changed to support mixed case -# Apr 28, 2015 #4027 randerso Additional changes for mixed case -# Jul 15, 2016 #5749 randerso Replaced ellipses with commas -# -## - -## -# This is a base file that is not intended to be overridden. -## - - - -import GenericHazards -import string, time, re, os, types, copy -import ProcessVariableList - -class TextProduct(GenericHazards.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition['displayName'] = None - Definition['displayName'] = "BaselineHazard_AQA_ (Air Quality Alert)" - - Definition["defaultEditAreas"] = "EditAreas_PublicZones__" - Definition["mapNameForCombinations"] = "Zones_" # Map background for creating Combinations - - #Special multiple product domains for certain sites: - if "" == "AFG": - if "_" == "_AFG": - Definition["subDomainUGCs"] = ["AKZ218","AKZ219","AKZ220","AKZ221", - "AKZ222","AKZ223","AKZ224","AKZ225", - "AKZ226"] - elif "_" == "_NSB": - Definition["subDomainUGCs"] = ["AKZ201","AKZ202","AKZ203","AKZ204", - "AKZ205","AKZ206"] - elif "_" == "_WCZ": - Definition["subDomainUGCs"] = ["AKZ207","AKZ208","AKZ209","AKZ210", - "AKZ211","AKZ212","AKZ213","AKZ214", - "AKZ215","AKZ216","AKZ217","AKZ227"] - - # Header configuration items - Definition["productName"] = "Air Quality Alert" # name of product - Definition["fullStationID"] = "" # full station identifier (4letter) - Definition["wmoID"] = "" # WMO ID - Definition["pil"] = "" # product pil - #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" - Definition["wfoCityState"] = "" # Location of WFO - city state - Definition["wfoCity"] = "" # WFO Name as it should appear in a text product - Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. - Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - Definition["outputFile"] = "{prddir}/TEXT/AQA_.txt" - - # OPTIONAL CONFIGURATION ITEMS - #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" - #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished - #Definition["debug"] = 1 - Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines - - Definition["purgeTime"] = 24 # Maximum hours for expireTime from issueTime - Definition["includeCities"] = 1 # Cities included in area header - Definition["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header - Definition["lineLength"] = 69 - Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated - Definition["includeOverview"] = 1 #If 1, the overview section is templated - ######################################################### - # Be sure to override the agencyDict # - # in your Definition file # - ######################################################### - Definition["agencyDict"] = { - - "Forsyth": { - "name": "Forsyth County Environmental Affairs Department Winston-Salem NC", - "declaration": "The Forsyth County Environmental Affairs Department has issued an Air Quality Action Day, ", - "zones": ["FLZ039"], - "text": "A Code @ALERTCODE Air Quality Alert for Ozone has been issued. Ground level ozone concentrations within the region may approach or exceed unhealthy standards. @ALERTCTA For additional information, please visit the Forsyth County Environmental Affairs Department web site at http://www.co.forsyth.nc.us/envaffairs.", - }, - - "NC": { - "name": "North Carolina Department of Environmental and Natural Resources Raleigh NC", - "declaration": "The North Carolina Department of Environmental and Natural Resources has issued an Air Quality Action Day, ", - "zones" : ["FLZ042", "FLZ043","FLZ048"], - "text": "A Code @ALERTCODE Air Quality Alert for Ozone has been issued. Ground level ozone concentrations within the region may approach or exceed unhealthy standards. @ALERTCTA For additional information, please visit the North Carolina Division of Air Quality web site at http://daq.state.nc.us/airaware/forecast/.", - }, - } - - ############################################################ - # Override the alertCodes and alertCTAsDict for your Site. # - # If you do not want to use alertCodes, # - # set alertCodes to [] # - # If you want alertCodes and/or alertCTA messages to # - # appear in your product, put @ALERTCODE and @ALERTCTA # - # in the "text" for each agency in the agencyDict # - ############################################################ - Definition["alertCodes"] = ["Orange", "Red", "Purple"] - Definition["alertCTAsDict"] = { - "Orange": "Members of sensitive groups may experience health effects. The general public is not likely to be affected.", - "Red" : "Everyone may experience health effects. Members of sensitive groups May experience more serious health effects.", - "Purple" : "Health alert: everyone may experience serious health effects.", - } - - - #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - - def _processVariableList(self, definition): - # Get Definition variables - for key in definition.keys(): - exec "self._" + key + "= definition[key]" - alertCodes = self._alertCodes - if alertCodes != []: - varList = [(("Alert Level", "alertCode"), alertCodes[0], - "radio", alertCodes)] - return self._callProcessVariableList("Input Info", varList, varDict={}) - else: - return {} - - def _callProcessVariableList(self, title, varList, varDict): - processVarList = ProcessVariableList.ProcessVariableList( - title, varList, varDict={}) - self._selectionStatus = processVarList.status() - if not self._selectionStatus.upper() == "OK": - return None # User Cancelled - return processVarList.varDict() - - def _preProcessProduct(self, fcst, argDict): - """ - Modified to allow a multiple MND list of agencies and to insert - Relayed By wording. - """ - - # Product header - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " +\ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" - - # Placeholder for Agency Names to be filled in in _postProcessProduct - #s = s + "@AGENCYNAMES" + "\n" - s = s + "Relayed by National Weather Service " + self._wfoCityState + "\n" +\ - issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - return fcst - - def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime): - """ - Modified to change start and end PhraseType to EXPLICIT. - This will provide exact start and stop times for the AQA hazard.""" - - # Return - # "startPhraseType" and "endPhraseType" - # Each can be one of these phraseTypes: - # "EXPLICIT" will return words such as "5 PM" - # "FUZZY4" will return words such as "THIS EVENING" - # "DAY_NIGHT_ONLY" use only weekday or weekday "Night" e.g. - # "Sunday" or "Sunday night" or "Today" or "Tonight" - # Note: You will probably want to set both the - # startPhraseType and endPhraseType to DAY_NIGHT_ONLY to - # have this work correctly. - # "None" will result in no words - # OR a method which takes arguments: - # issueTime, eventTime, timeZone, and timeType - # and returns: - # phraseType, (hourStr, hourTZstr, description) - # You can use "timingWordTableFUZZY8" as an example to - # write your own method. - # - # If you simply return None, no timing words will be used. - - # Note that you can use the information given to determine which - # timing phrases to use. In particular, the "key" is the Hazard - # key so different local headlines can use different timing. - # - startPhraseType = "EXPLICIT" - endPhraseType = "EXPLICIT" - - #Example code - #startTime = timeRange.startTime().unixTime() - #if startTime <= issuanceTime + 12 * 3600: # 12 hours past issuance - #startPhraseType = "EXPLICIT" - #endTime = timeRange.endTime().unixTime() - #if endTime <= issuanceTime + 12 * 3600: # 12 hours past issuance - #endPhraseType = "EXPLICIT" - - return startPhraseType, endPhraseType - - # Returns a formatted string announcing the hazards that are valid with - # timing phrases - def getHazardString(self, tree, node, fcstArea): - if len(fcstArea) <= 0: - return "" - hazardTable = self._hazards.getHazardList(fcstArea) - returnStr = "" - issuanceTime = self._issueTime.unixTime() - - returnStr = self.makeHeadlinePhrases(tree, node, hazardTable, - issuanceTime) - #Test mode? - returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), - returnStr) - - return returnStr - - def _makeProduct(self, fcst, segmentAreas, argDict): - """ - Modified to allow headlines. Segments will be automatically - generated based on hazards grid, if not already broken up.""" - editArea = segmentAreas[0] - areaLabel = editArea - - # Get combinations to be used for the segment - - combinations = argDict["combinations"] - if combinations is not None: - areaList = self.getCurrentAreaNames(argDict, areaLabel) - print "using combinations, areaList=", areaList - usingCombo = 1 - else: - for editArea, label in defaultEditAreas: - if label == areaLabel: - areaList = [editArea] - print "not using combinations, areaList=", areaList - - # Generate the standard headline for the segment - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - self._hazardTR = self.createTimeRange(0,240) - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._hazardTR) - - self._agencyNames = "" - - # If no valid AQA hazard grid, just return a placeholder - if headlines == "": - return fcst + "|* Statement text *|" - - # If valid hazard grid, insert headline, agency attribution, and any default text - else: - - # Make sure main headline is in upper case. - upperhead = string.upper(headlines) - fcst = fcst + upperhead - #strip out the line feed within headlines - headlines = string.split(headlines,'\n') - headlines = string.join(headlines) - - #create an attribution phrase containing headline info - HeadIssue1 = "" - HeadIssue2 = "" - HeadIssue = "" - - # Determine the list of agencies associated with the segmentAreas - agencies = [] - for areaLabel in segmentAreas: - # Find the agency for this areaLabel - for agency in self._agencyDict.keys(): - if agency not in agencies: - zones = self._agencyDict[agency]['zones'] - if areaLabel in zones: - agencies.append(agency) - name = self._agencyDict[agency]['name'] - self._agencyNames = self._agencyNames + "\n" + name - - # Make the headline using the first agency only - if agencies == []: - print "\n\nCheck set up of agencyDict!! -- no agencyDict entry for "+`segmentAreas`+"\n\n" - agency = agencies[0] - HeadIssue1 = self._agencyDict[agency]['declaration'] - HeadIssue2 = headlines - if "remains" in headlines: # This is an update - HeadIssue2 = Headissue2[29:len(HeadIssue2)-4] - else: # This is the first issuance - HeadIssue2 = HeadIssue2[21:len(HeadIssue2)-4] - HeadIssue = HeadIssue1 + HeadIssue2 + "\n\n" + self._agencyDict[agency]['text'] - fcst = fcst + HeadIssue + "\n\n" - - return fcst - - def _postProcessProduct(self, fcst, argDict): - """ - Handles word-wrapping, line feeds, color-code text replacements - and lower case.""" - - # Replace the string '@AGENCYNAMES' with the agency names - #fcst = fcst.replace('@AGENCYNAMES', self._agencyNames) - if self._alertCodes != []: - # Replace the string '@ALERTCODE' with the appropriate alertcode - fcst = fcst.replace('@ALERTCODE', self._alertCode) - - # Insert specific CTA based on alertCode. - if re.search('@ALERTCTA',fcst) != None: - fcst = re.sub('@ALERTCTA', self._alertCTAsDict[self._alertCode],fcst) - - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) - - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] - return [ - ('AQ.Y', allActions, 'AirQual'), # AIR QUALITY ALERT - ] - - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## +######################################################################## +# Hazard_AQA.py +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 20, 2014 #3685 randerso Changed to support mixed case +# Apr 28, 2015 #4027 randerso Additional changes for mixed case +# Jul 15, 2016 #5749 randerso Replaced ellipses with commas +# +## + +## +# This is a base file that is not intended to be overridden. +## + + + +import GenericHazards +import string, time, re, os, types, copy +import ProcessVariableList + +class TextProduct(GenericHazards.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition['displayName'] = None + Definition['displayName'] = "BaselineHazard_AQA_ (Air Quality Alert)" + + Definition["defaultEditAreas"] = "EditAreas_PublicZones__" + Definition["mapNameForCombinations"] = "Zones_" # Map background for creating Combinations + + #Special multiple product domains for certain sites: + if "" == "AFG": + if "_" == "_AFG": + Definition["subDomainUGCs"] = ["AKZ218","AKZ219","AKZ220","AKZ221", + "AKZ222","AKZ223","AKZ224","AKZ225", + "AKZ226"] + elif "_" == "_NSB": + Definition["subDomainUGCs"] = ["AKZ201","AKZ202","AKZ203","AKZ204", + "AKZ205","AKZ206"] + elif "_" == "_WCZ": + Definition["subDomainUGCs"] = ["AKZ207","AKZ208","AKZ209","AKZ210", + "AKZ211","AKZ212","AKZ213","AKZ214", + "AKZ215","AKZ216","AKZ217","AKZ227"] + + # Header configuration items + Definition["productName"] = "Air Quality Alert" # name of product + Definition["fullStationID"] = "" # full station identifier (4letter) + Definition["wmoID"] = "" # WMO ID + Definition["pil"] = "" # product pil + #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" + Definition["wfoCityState"] = "" # Location of WFO - city state + Definition["wfoCity"] = "" # WFO Name as it should appear in a text product + Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. + Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + Definition["outputFile"] = "{prddir}/TEXT/AQA_.txt" + + # OPTIONAL CONFIGURATION ITEMS + #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" + #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished + #Definition["debug"] = 1 + Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines + + Definition["purgeTime"] = 24 # Maximum hours for expireTime from issueTime + Definition["includeCities"] = 1 # Cities included in area header + Definition["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header + Definition["lineLength"] = 69 + Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated + Definition["includeOverview"] = 1 #If 1, the overview section is templated + ######################################################### + # Be sure to override the agencyDict # + # in your Definition file # + ######################################################### + Definition["agencyDict"] = { + + "Forsyth": { + "name": "Forsyth County Environmental Affairs Department Winston-Salem NC", + "declaration": "The Forsyth County Environmental Affairs Department has issued an Air Quality Action Day, ", + "zones": ["FLZ039"], + "text": "A Code @ALERTCODE Air Quality Alert for Ozone has been issued. Ground level ozone concentrations within the region may approach or exceed unhealthy standards. @ALERTCTA For additional information, please visit the Forsyth County Environmental Affairs Department web site at http://www.co.forsyth.nc.us/envaffairs.", + }, + + "NC": { + "name": "North Carolina Department of Environmental and Natural Resources Raleigh NC", + "declaration": "The North Carolina Department of Environmental and Natural Resources has issued an Air Quality Action Day, ", + "zones" : ["FLZ042", "FLZ043","FLZ048"], + "text": "A Code @ALERTCODE Air Quality Alert for Ozone has been issued. Ground level ozone concentrations within the region may approach or exceed unhealthy standards. @ALERTCTA For additional information, please visit the North Carolina Division of Air Quality web site at http://daq.state.nc.us/airaware/forecast/.", + }, + } + + ############################################################ + # Override the alertCodes and alertCTAsDict for your Site. # + # If you do not want to use alertCodes, # + # set alertCodes to [] # + # If you want alertCodes and/or alertCTA messages to # + # appear in your product, put @ALERTCODE and @ALERTCTA # + # in the "text" for each agency in the agencyDict # + ############################################################ + Definition["alertCodes"] = ["Orange", "Red", "Purple"] + Definition["alertCTAsDict"] = { + "Orange": "Members of sensitive groups may experience health effects. The general public is not likely to be affected.", + "Red" : "Everyone may experience health effects. Members of sensitive groups May experience more serious health effects.", + "Purple" : "Health alert: everyone may experience serious health effects.", + } + + + #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + + def _processVariableList(self, definition): + # Get Definition variables + for key in list(definition.keys()): + exec("self._" + key + "= definition[key]") + alertCodes = self._alertCodes + if alertCodes != []: + varList = [(("Alert Level", "alertCode"), alertCodes[0], + "radio", alertCodes)] + return self._callProcessVariableList("Input Info", varList, varDict={}) + else: + return {} + + def _callProcessVariableList(self, title, varList, varDict): + processVarList = ProcessVariableList.ProcessVariableList( + title, varList, varDict={}) + self._selectionStatus = processVarList.status() + if not self._selectionStatus.upper() == "OK": + return None # User Cancelled + return processVarList.varDict() + + def _preProcessProduct(self, fcst, argDict): + """ + Modified to allow a multiple MND list of agencies and to insert + Relayed By wording. + """ + + # Product header + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " +\ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" + + # Placeholder for Agency Names to be filled in in _postProcessProduct + #s = s + "@AGENCYNAMES" + "\n" + s = s + "Relayed by National Weather Service " + self._wfoCityState + "\n" +\ + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + return fcst + + def headlinesTiming(self, tree, node, key, timeRange, areaLabel, issuanceTime): + """ + Modified to change start and end PhraseType to EXPLICIT. + This will provide exact start and stop times for the AQA hazard.""" + + # Return + # "startPhraseType" and "endPhraseType" + # Each can be one of these phraseTypes: + # "EXPLICIT" will return words such as "5 PM" + # "FUZZY4" will return words such as "THIS EVENING" + # "DAY_NIGHT_ONLY" use only weekday or weekday "Night" e.g. + # "Sunday" or "Sunday night" or "Today" or "Tonight" + # Note: You will probably want to set both the + # startPhraseType and endPhraseType to DAY_NIGHT_ONLY to + # have this work correctly. + # "None" will result in no words + # OR a method which takes arguments: + # issueTime, eventTime, timeZone, and timeType + # and returns: + # phraseType, (hourStr, hourTZstr, description) + # You can use "timingWordTableFUZZY8" as an example to + # write your own method. + # + # If you simply return None, no timing words will be used. + + # Note that you can use the information given to determine which + # timing phrases to use. In particular, the "key" is the Hazard + # key so different local headlines can use different timing. + # + startPhraseType = "EXPLICIT" + endPhraseType = "EXPLICIT" + + #Example code + #startTime = timeRange.startTime().unixTime() + #if startTime <= issuanceTime + 12 * 3600: # 12 hours past issuance + #startPhraseType = "EXPLICIT" + #endTime = timeRange.endTime().unixTime() + #if endTime <= issuanceTime + 12 * 3600: # 12 hours past issuance + #endPhraseType = "EXPLICIT" + + return startPhraseType, endPhraseType + + # Returns a formatted string announcing the hazards that are valid with + # timing phrases + def getHazardString(self, tree, node, fcstArea): + if len(fcstArea) <= 0: + return "" + hazardTable = self._hazards.getHazardList(fcstArea) + returnStr = "" + issuanceTime = self._issueTime.unixTime() + + returnStr = self.makeHeadlinePhrases(tree, node, hazardTable, + issuanceTime) + #Test mode? + returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), + returnStr) + + return returnStr + + def _makeProduct(self, fcst, segmentAreas, argDict): + """ + Modified to allow headlines. Segments will be automatically + generated based on hazards grid, if not already broken up.""" + editArea = segmentAreas[0] + areaLabel = editArea + + # Get combinations to be used for the segment + + combinations = argDict["combinations"] + if combinations is not None: + areaList = self.getCurrentAreaNames(argDict, areaLabel) + print(("using combinations, areaList=", areaList)) + usingCombo = 1 + else: + for editArea, label in defaultEditAreas: + if label == areaLabel: + areaList = [editArea] + print(("not using combinations, areaList=", areaList)) + + # Generate the standard headline for the segment + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + self._hazardTR = self.createTimeRange(0,240) + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._hazardTR) + + self._agencyNames = "" + + # If no valid AQA hazard grid, just return a placeholder + if headlines == "": + return fcst + "|* Statement text *|" + + # If valid hazard grid, insert headline, agency attribution, and any default text + else: + + # Make sure main headline is in upper case. + upperhead = string.upper(headlines) + fcst = fcst + upperhead + #strip out the line feed within headlines + headlines = string.split(headlines,'\n') + headlines = string.join(headlines) + + #create an attribution phrase containing headline info + HeadIssue1 = "" + HeadIssue2 = "" + HeadIssue = "" + + # Determine the list of agencies associated with the segmentAreas + agencies = [] + for areaLabel in segmentAreas: + # Find the agency for this areaLabel + for agency in list(self._agencyDict.keys()): + if agency not in agencies: + zones = self._agencyDict[agency]['zones'] + if areaLabel in zones: + agencies.append(agency) + name = self._agencyDict[agency]['name'] + self._agencyNames = self._agencyNames + "\n" + name + + # Make the headline using the first agency only + if agencies == []: + print(("\n\nCheck set up of agencyDict!! -- no agencyDict entry for "+repr(segmentAreas)+"\n\n")) + agency = agencies[0] + HeadIssue1 = self._agencyDict[agency]['declaration'] + HeadIssue2 = headlines + if "remains" in headlines: # This is an update + HeadIssue2 = Headissue2[29:len(HeadIssue2)-4] + else: # This is the first issuance + HeadIssue2 = HeadIssue2[21:len(HeadIssue2)-4] + HeadIssue = HeadIssue1 + HeadIssue2 + "\n\n" + self._agencyDict[agency]['text'] + fcst = fcst + HeadIssue + "\n\n" + + return fcst + + def _postProcessProduct(self, fcst, argDict): + """ + Handles word-wrapping, line feeds, color-code text replacements + and lower case.""" + + # Replace the string '@AGENCYNAMES' with the agency names + #fcst = fcst.replace('@AGENCYNAMES', self._agencyNames) + if self._alertCodes != []: + # Replace the string '@ALERTCODE' with the appropriate alertcode + fcst = fcst.replace('@ALERTCODE', self._alertCode) + + # Insert specific CTA based on alertCode. + if re.search('@ALERTCTA',fcst) != None: + fcst = re.sub('@ALERTCTA', self._alertCTAsDict[self._alertCode],fcst) + + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) + + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] + return [ + ('AQ.Y', allActions, 'AirQual'), # AIR QUALITY ALERT + ] + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_FFA_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_FFA_MultiPil.py index 791e0bdfd4..2a12a38553 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_FFA_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_FFA_MultiPil.py @@ -1,551 +1,551 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -######################################################################## -# Hazard_FFA.py -# -## -########################################################################## -import GenericHazards - -import string, time, re, os, types, copy - -class TextProduct(GenericHazards.TextProduct): - VariableList = [ - (("Flood Reason (HVTEC)", "floodReason"), - "ER (Excessive Rainfall)", "radio", - ["ER (Excessive Rainfall)", - "SM (Snow Melt)", - "RS (Rain and Snow Melt)", - "DM (Dam or Levee Failure)", - "DR (Upstream Dam Release)", - "GO (Glacier-Dammed Lake Outburst)", - "IJ (Ice Jam)", - "IC (Rain and/or Snow melt and/or Ice Jam)", - "FS (Upstream Flooding plus Storm Surge)", - "FT (Upstream Flooding plus Tidal Effects)", - "ET (Elevated Upstream Flow plus Tidal Effects)", - "WT (Wind and/or Tidal Effects)", - "OT (Other Effects)", - "UU (Unknown)" - ]), - ] - - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition['displayName'] = None - Definition['displayName'] = "BaselineHazard_FFA_ (Flood Watch)" # Flood Watch - - Definition["areaType"] = "ZONES" # OR "FIPS" - if Definition["areaType"] == "FIPS": - Definition["defaultEditAreas"] = "EditAreas_FIPS__" #Where XXX = site id - Definition["mapNameForCombinations"] = "FIPS_" - else: - Definition["defaultEditAreas"] = "EditAreas_PublicZones__" #Where XXX = site id - Definition["mapNameForCombinations"] = "Zones_" - - # Header configuration items - Definition["productName"] = "Flood Watch" # name of product - Definition["fullStationID"] = "" # full station identifier (4letter) - Definition["wmoID"] = "" # WMO ID - Definition["pil"] = "" # product pil - #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" - Definition["wfoCityState"] = "" # Location of WFO - city state - Definition["wfoCity"] = "" # WFO Name as it should appear in a text product - Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. - Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - Definition["outputFile"] = "{prddir}/TEXT/FFA_.txt" - - # OPTIONAL CONFIGURATION ITEMS - #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" - #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished - #Definition["debug"] = 1 - #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines - - Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime - Definition["includeCities"] = 1 # Cities included in area header - Definition["accurateCities"] = 0 # If 1, cities are based on grids; - # otherwise full list is included - Definition["cityLocation"] = "CityLocation" # City lat/lon dictionary to use - Definition["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - Definition["includeIssueTime"] = 1 # This should be set to zero for products - # that do not include a time lime below the UGC - #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header - Definition["lineLength"] = 66 - - Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated - Definition["includeOverview"] = 1 #If 1, the overview section is templated - #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) - Definition["callToAction"] = 1 - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FF.A', allActions, 'FlashFlood'), - ('FA.A', allActions, 'Flood'), - ] - - def _preProcessProduct(self, fcst, argDict): - - # - # The code below determines if a NEW, EXA, or EXB is being created. - # This will determine if the EAS phrase is needed. - # - - hazards = argDict['hazards'] - segmentList = self.organizeHazards(hazards.rawAnalyzedTable()) - timeRange = self.createTimeRange(0, 240) - listOfHazards = [] - useEAS = 0 - for each_watch_area in segmentList: - for each_area in each_watch_area: - areaWatchList = self._hazards.getHazardList(each_area) - for eachHazard in areaWatchList: - if (eachHazard['act'] in ['NEW', 'EXA', 'EXB', 'EXT'] and - (eachHazard['phen'] == 'FF' or eachHazard['phen'] == 'FA')): - useEAS = 1 - break - - - - # - # Special Product header code to add EAS Phrase if needed - # - - if self._areaName != "": - self._areaName = " for " + self._areaName - if useEAS == 1: - easPhrase = "URGENT - IMMEDIATE BROADCAST REQUESTED\n" - else: - easPhrase = "" - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, self._productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = easPhrase +\ - productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n" + \ - self._easPhrase + "\n\n" - fcst = fcst + s - - fcst = fcst + "Default overview section\n" - return fcst - - - def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): - - # - # This is the header for an edit area combination - # - - editArea = segmentAreas[0] - areaLabel = editArea - HVTEC = "/00000.0." + self._floodReason[0:2] + \ - ".000000T0000Z.000000T0000Z.000000T0000Z.OO/" - - areaHeader = self.makeAreaHeader( - argDict, "", self._issueTime, expireTime, - self._areaDictionary, None, cityDescriptor=self._cityDescriptor, - areaList=segmentAreas, includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames, includeIssueTime = self._includeIssueTime, - hVTECString=HVTEC, - accurateCities = self._accurateCities) - - fcst = fcst + areaHeader + '\n' - return fcst - - - def _makeProduct(self, fcst, segmentAreas, argDict): - argDict["language"] = self._language - - # - # This section generates the headline on the segment - # - - # stuff argDict with the segmentAreas for DiscretePhrases - argDict['segmentAreas'] = segmentAreas - - editArea = segmentAreas[0] - areaLabel = editArea - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - # - # This section generates the attribution statements and calls-to-action - # - - hazards = argDict['hazards'] - timeRange = self.createTimeRange(0, 240) - listOfHazards = hazards.getHazardList(segmentAreas) - - attrPhrase = "" - - # - # Check for special case where a CAN/EXP is paired with a - # NEW/EXA/EXB/EXT - # - includeText, includeFrameCodes, skipCTAs, forceCTAList = \ - self.useCaptureText(listOfHazards) - - # sort the sections within the product - listOfHazards.sort(self.sortSection) - - # find any "CAN" with non-CAN for reasons of text capture - canHazard = None - for eh in listOfHazards: - if eh['act'] in ['CAN','EXP','UPG']: - canHazard = eh - break #take the first one - - # Make Area Phrase - areas = self.getGeneralAreaList(listOfHazards[0]['id'], - areaDictName = self._areaDictionary) - areas = self.simplifyAreas(areas) - areaPhrase = self.makeAreaPhrase(areas, listOfHazards[0]['id']) - areaPhraseShort = self.makeAreaPhrase(areas, listOfHazards[0]['id'], - True) - - #process each part of the section - for hazard in listOfHazards: - if hazard['act'] in ['CAN','EXP','UPG']: - phrase = self.makeSection(hazard, None, areaPhraseShort, - argDict) - else: - phrase = self.makeSection(hazard, canHazard, areaPhrase, - argDict) - fcst = fcst + phrase + "\n\n" - - - self.overviewText(listOfHazards, "FFA") - - fcst = self.endline(fcst, linelength=self._lineLength, - breakStr=[" ", "...", "-"]) - return fcst - - - def simplifyAreas(self, areas): - #simplifies the area phrases by combining subareas, returns the - #areas. - - # rules: 1) multiple states and multiple direction terms in a state, - # only mention the state. 2) Multiple states but single directional - # term in a state, include the directional term. 3) Single state, - # include the directional terms. - - #determine how many states, and how many areas within each state - stateDict = {} #key is state, value is count of portions of state - for state, partOfState, names in areas: - if stateDict.has_key(state): - stateDict[state] = stateDict[state] + 1 - else: - stateDict[state] = 1 - - # if single state, include all directional terms - if len(stateDict.keys()) < 2: - return areas #unchanged - - # multiple states - multiple direction terms in a state - # keep states sorted in same order as present. - out = [] - for state, partOfState, names in areas: - if stateDict[state] == 1: - names.sort() - out.append((state, partOfState, names)) - elif len(out) == 0 or state != out[-1][0]: #new state - out.append((state, "", names)) #leave out partOfState - else: #same state as before - nmeList = out[-1][2] - for n in names: - nmeList.append(n) - nmeList.sort() - - return out - - - - - def makeAreaPhrase(self, areaGroups, areas, generalOnly=False): - #creates the area phrase based on the groups of areas (areaGroups, - #such as NE Pennsylvania), and the areas (areas), individual zones. - #returns the area phrase. Area phrase does not have a terminating - #period. - areaGroupLen = len(areaGroups) - if areaGroupLen == 1: - areaPhrase = "a portion of " - else: - areaPhrase = "portions of " - - #parts of the states - areaList = [] - for state, partOfState, names in areaGroups: - if partOfState == '' or partOfState == ' ': - areaList.append(state) - else: - areaList.append(partOfState + " " + state) - - areaPhrase += self.punctuateList(areaList) - - #including phrase, have to count what we have - d = {'Independent city': ("Independent city", "Independent cities"), - 'Parish': ("Parish", "Parishes"), - 'County': ("County", "Counties"), - 'Zone': ("Area", "Areas") } - icCnt = 0 - parishCnt = 0 - zoneCnt = 0 - countyCnt = 0 - for state, partOfState, names in areaGroups: - for name,nameType in names: - if nameType == "zone": - zoneCnt = zoneCnt + 1 - elif nameType == "county": - countyCnt = countyCnt + 1 - elif nameType == "independent city": - icCnt = icCnt + 1 - elif nameType == "parish": - parishCnt = parishCnt + 1 - - incPhrases = [] - if zoneCnt == 1: - incPhrases.append("area") - elif zoneCnt > 1: - incPhrases.append("areas") - if countyCnt == 1: - incPhrases.append("county") - elif countyCnt > 1: - incPhrases.append("counties") - if icCnt == 1: - incPhrases.append("independent city") - elif icCnt > 1: - incPhrases.append("independent cities") - if parishCnt == 1: - incPhrases.append("parish") - elif parishCnt > 1: - incPhrases.append("parishes") - incPhrase = self.punctuateList(incPhrases) - - if generalOnly: - return areaPhrase - - - areaPhrase += ", including the following " + incPhrase + ", " - - #list of the specific areas - for i in xrange(len(areaGroups)): - state, partOfState, names = areaGroups[i] - if state == "The District of Columbia": - areaPhrase += state - else: - # extract out the names - snames = [] - for name,nameType in names: - snames.append(name) - - # single (don't mention state, partOfState again) - if len(areaGroups) == 1: - phrase = "" - # complex phrasing (state, partOfState, and names) - else: - if i == 0: - phrase = "in " - else: - phrase = "In " - if partOfState != '' and partOfState != ' ': - phrase += partOfState + ' ' - phrase += state + ", " - - phrase += self.punctuateList(snames) - - areaPhrase += phrase - if i != len(areaGroups) - 1: - areaPhrase += '. ' #another one coming, add period - - return areaPhrase - - def sortSection(self, r1, r2): - #sorts the hazards in a particular order for the sections within - #each segment. We try to keep this in the same order as the - #headlines order for clarity. - return self.regularSortHazardAlg(r1, r2) - - def makeSection(self, hazard, canHazard, areaPhrase, argDict): - #creates a section of the FFA product. The hazard record is passed - #in. canHazard is any associated CAN/EXP/UPG hazard, areaPhrase is - #the area description for the segment. - - nwsPhrase = "The National Weather Service in " + self._wfoCity + " has " - - # - # Attribution and 1st bullet (headPhrase) - # - headPhrase = None - attribution = '' - - hazName = self.hazardName(hazard['hdln'], argDict, False) - - if hazard['act'] == 'NEW' and len(hazard['hdln']): - attribution = nwsPhrase + "issued a" - headPhrase = self.substituteBulletedText(hazName + " for " + areaPhrase + ".", None, "Never") - - elif hazard['act'] == 'CON' and len(hazard['hdln']): - attribution = "The " + hazName + " continues for" - headPhrase = self.substituteBulletedText(areaPhrase + ".", None, "Never") - - elif hazard['act'] == 'EXA' and len(hazard['hdln']): - attribution = nwsPhrase + "expanded the" - headPhrase = self.substituteBulletedText(hazName + " to include " + areaPhrase + ".", None, "Never") - - elif hazard['act'] == 'EXT' and len(hazard['hdln']): - attribution = 'The ' + hazName + " is now in effect for" - headPhrase = self.substituteBulletedText(areaPhrase + ".", None, "Never") - - elif hazard['act'] == 'EXB' and len(hazard['hdln']): - attribution = nwsPhrase + "expanded the" - headPhrase = self.substituteBulletedText(hazName + " to include " + areaPhrase + ".", None, "Never") - - elif hazard['act'] == 'CAN' and len(hazard['hdln']): - attribution = "The " + hazName + \ - " for " + areaPhrase + " has been cancelled. " + \ - "|* brief post-synopsis/summary of hydromet activity *|\n\n" - - elif hazard['act'] == 'EXP' and len(hazard['hdln']): - expTimeCurrent = argDict['creationTime'] - if hazard['endTime'] <= expTimeCurrent: - attribution = "The " + hazName + \ - " for " + areaPhrase + " has expired. " + \ - "|* brief post-synopsis/summary of hydromet activity *|" - else: - timeWords = self.getTimingPhrase(hazard, expTimeCurrent) - attribution = "The " + hazName + \ - " for " + areaPhrase + " will expire " + timeWords + \ - ". " + \ - "|* brief post-synopsis/summary of hydromet activity *|" - - #wrap it, if headPhrase, then we have bullets - if headPhrase is not None: -# headPhrase = self.indentText(headPhrase, indentFirstString = '', -# indentNextString = ' ', maxWidth=self._lineLength, -# breakStrings=[" ", "-", "..."]) - - endTimePhrase = self.hazardTimePhrases(hazard, argDict, - prefixSpace=False) - endTimePhrase = self.substituteBulletedText(endTimePhrase, - "Time is missing", "DefaultOnly") - - # 3rd bullet (basis), 4th bullet (impacts) - if hazard['act'] == "NEW" and canHazard: - capText = canHazard.get('prevText', None) - else: - capText = hazard.get('prevText', None) - (haz, timeB, basis, impact, remainder, multRecords) = \ - self.decodeBulletedText(capText) - - defaultBasis = { - 'NEW': ("Basis for the watch", "Always"), - 'CON': ("Describe current situation", "DefaultOnly"), - 'EXT': ("Basis for extending the watch", "DefaultOnly"), - 'EXB': ("Basis for expansion of the watch", "DefaultOnly"), - 'EXA': ("Basis for expansion of the watch", "DefaultOnly"), - } - b = defaultBasis[hazard['act']] - if multRecords == 0: - basisPhrase = self.substituteBulletedText(basis, b[0], b[1]) - else: - basisPhrase = self.substituteBulletedText(basis, b[0], "Always") - - if (hazard['act'] == "NEW" and canHazard) or multRecords: - framing = "Always" - else: - framing = "DefaultOnly" - impactsPhrase = self.substituteBulletedText(impact, - "(optional) potential impacts of flooding", framing) - - #remainder of text - general = '' - addCTA = False - ctaBodyPhrase = '' - if remainder is not None and \ - (canHazard or hazard['act'] != "NEW"): - if hazard['act'] == "CON" or hazard['act'] == "EXT" or \ - hazard['act'] == "EXA" or hazard['act'] == "EXB": - paras = self.convertSingleParas(remainder) - pattern = re.compile("THE FLASH FLOOD WATCH FOR.*HAS BEEN CANCELLED.*", re.IGNORECASE) - general = '\n\n'.join([p for p in paras if not pattern.match(p)]) - elif canHazard is None: - general = remainder #use all - else: - #frame the text, without the ctas - addCTA = True - paras = self.convertSingleParas(remainder) - for p in paras: - found = self.ctasFound(p) - if len(found) == 0: - general = general + p + '\n\n' - if len(general): - general = "|* " + general[:-2] + " *|\n\n" - else: - addCTA = True - - # add in call to actions - if addCTA: - key = hazard['phen'] + '.' + hazard['sig'] - cta = self.defaultCTA(key) - else: - cta = '' - - if len(cta) > 1: - ctaBodyPhrase ="\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n" + \ - cta + \ - "\n\n&&\n\n" - else: - ctaBodyPhrase = cta - - if ctaBodyPhrase.find('PRECAUTIONARY/PREPAREDNESS ACTIONS...') != -1 and \ - attribution.find('&&') != -1: - attribution = attribution.replace('&&','') - ctaBodyPhrase = ctaBodyPhrase.replace('PRECAUTIONARY/PREPAREDNESS ACTIONS...','') - - attrPhrase = attribution + '\n\n' + headPhrase + '\n' + \ - endTimePhrase + '\n' + basisPhrase + '\n' + impactsPhrase + \ - '\n' + general + ctaBodyPhrase + '\n' - - #no headPhrase (EXP or CAN alone) - else: - attrPhrase = attribution - - return attrPhrase - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +######################################################################## +# Hazard_FFA.py +# +## +########################################################################## +import GenericHazards + +import string, time, re, os, types, copy + +class TextProduct(GenericHazards.TextProduct): + VariableList = [ + (("Flood Reason (HVTEC)", "floodReason"), + "ER (Excessive Rainfall)", "radio", + ["ER (Excessive Rainfall)", + "SM (Snow Melt)", + "RS (Rain and Snow Melt)", + "DM (Dam or Levee Failure)", + "DR (Upstream Dam Release)", + "GO (Glacier-Dammed Lake Outburst)", + "IJ (Ice Jam)", + "IC (Rain and/or Snow melt and/or Ice Jam)", + "FS (Upstream Flooding plus Storm Surge)", + "FT (Upstream Flooding plus Tidal Effects)", + "ET (Elevated Upstream Flow plus Tidal Effects)", + "WT (Wind and/or Tidal Effects)", + "OT (Other Effects)", + "UU (Unknown)" + ]), + ] + + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition['displayName'] = None + Definition['displayName'] = "BaselineHazard_FFA_ (Flood Watch)" # Flood Watch + + Definition["areaType"] = "ZONES" # OR "FIPS" + if Definition["areaType"] == "FIPS": + Definition["defaultEditAreas"] = "EditAreas_FIPS__" #Where XXX = site id + Definition["mapNameForCombinations"] = "FIPS_" + else: + Definition["defaultEditAreas"] = "EditAreas_PublicZones__" #Where XXX = site id + Definition["mapNameForCombinations"] = "Zones_" + + # Header configuration items + Definition["productName"] = "Flood Watch" # name of product + Definition["fullStationID"] = "" # full station identifier (4letter) + Definition["wmoID"] = "" # WMO ID + Definition["pil"] = "" # product pil + #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" + Definition["wfoCityState"] = "" # Location of WFO - city state + Definition["wfoCity"] = "" # WFO Name as it should appear in a text product + Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. + Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + Definition["outputFile"] = "{prddir}/TEXT/FFA_.txt" + + # OPTIONAL CONFIGURATION ITEMS + #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" + #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished + #Definition["debug"] = 1 + #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines + + Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime + Definition["includeCities"] = 1 # Cities included in area header + Definition["accurateCities"] = 0 # If 1, cities are based on grids; + # otherwise full list is included + Definition["cityLocation"] = "CityLocation" # City lat/lon dictionary to use + Definition["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + Definition["includeIssueTime"] = 1 # This should be set to zero for products + # that do not include a time lime below the UGC + #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header + Definition["lineLength"] = 66 + + Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated + Definition["includeOverview"] = 1 #If 1, the overview section is templated + #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) + Definition["callToAction"] = 1 + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FF.A', allActions, 'FlashFlood'), + ('FA.A', allActions, 'Flood'), + ] + + def _preProcessProduct(self, fcst, argDict): + + # + # The code below determines if a NEW, EXA, or EXB is being created. + # This will determine if the EAS phrase is needed. + # + + hazards = argDict['hazards'] + segmentList = self.organizeHazards(hazards.rawAnalyzedTable()) + timeRange = self.createTimeRange(0, 240) + listOfHazards = [] + useEAS = 0 + for each_watch_area in segmentList: + for each_area in each_watch_area: + areaWatchList = self._hazards.getHazardList(each_area) + for eachHazard in areaWatchList: + if (eachHazard['act'] in ['NEW', 'EXA', 'EXB', 'EXT'] and + (eachHazard['phen'] == 'FF' or eachHazard['phen'] == 'FA')): + useEAS = 1 + break + + + + # + # Special Product header code to add EAS Phrase if needed + # + + if self._areaName != "": + self._areaName = " for " + self._areaName + if useEAS == 1: + easPhrase = "URGENT - IMMEDIATE BROADCAST REQUESTED\n" + else: + easPhrase = "" + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, self._productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = easPhrase +\ + productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n" + \ + self._easPhrase + "\n\n" + fcst = fcst + s + + fcst = fcst + "Default overview section\n" + return fcst + + + def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): + + # + # This is the header for an edit area combination + # + + editArea = segmentAreas[0] + areaLabel = editArea + HVTEC = "/00000.0." + self._floodReason[0:2] + \ + ".000000T0000Z.000000T0000Z.000000T0000Z.OO/" + + areaHeader = self.makeAreaHeader( + argDict, "", self._issueTime, expireTime, + self._areaDictionary, None, cityDescriptor=self._cityDescriptor, + areaList=segmentAreas, includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames, includeIssueTime = self._includeIssueTime, + hVTECString=HVTEC, + accurateCities = self._accurateCities) + + fcst = fcst + areaHeader + '\n' + return fcst + + + def _makeProduct(self, fcst, segmentAreas, argDict): + argDict["language"] = self._language + + # + # This section generates the headline on the segment + # + + # stuff argDict with the segmentAreas for DiscretePhrases + argDict['segmentAreas'] = segmentAreas + + editArea = segmentAreas[0] + areaLabel = editArea + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + # + # This section generates the attribution statements and calls-to-action + # + + hazards = argDict['hazards'] + timeRange = self.createTimeRange(0, 240) + listOfHazards = hazards.getHazardList(segmentAreas) + + attrPhrase = "" + + # + # Check for special case where a CAN/EXP is paired with a + # NEW/EXA/EXB/EXT + # + includeText, includeFrameCodes, skipCTAs, forceCTAList = \ + self.useCaptureText(listOfHazards) + + # sort the sections within the product + listOfHazards.sort(self.sortSection) + + # find any "CAN" with non-CAN for reasons of text capture + canHazard = None + for eh in listOfHazards: + if eh['act'] in ['CAN','EXP','UPG']: + canHazard = eh + break #take the first one + + # Make Area Phrase + areas = self.getGeneralAreaList(listOfHazards[0]['id'], + areaDictName = self._areaDictionary) + areas = self.simplifyAreas(areas) + areaPhrase = self.makeAreaPhrase(areas, listOfHazards[0]['id']) + areaPhraseShort = self.makeAreaPhrase(areas, listOfHazards[0]['id'], + True) + + #process each part of the section + for hazard in listOfHazards: + if hazard['act'] in ['CAN','EXP','UPG']: + phrase = self.makeSection(hazard, None, areaPhraseShort, + argDict) + else: + phrase = self.makeSection(hazard, canHazard, areaPhrase, + argDict) + fcst = fcst + phrase + "\n\n" + + + self.overviewText(listOfHazards, "FFA") + + fcst = self.endline(fcst, linelength=self._lineLength, + breakStr=[" ", "...", "-"]) + return fcst + + + def simplifyAreas(self, areas): + #simplifies the area phrases by combining subareas, returns the + #areas. + + # rules: 1) multiple states and multiple direction terms in a state, + # only mention the state. 2) Multiple states but single directional + # term in a state, include the directional term. 3) Single state, + # include the directional terms. + + #determine how many states, and how many areas within each state + stateDict = {} #key is state, value is count of portions of state + for state, partOfState, names in areas: + if state in stateDict: + stateDict[state] = stateDict[state] + 1 + else: + stateDict[state] = 1 + + # if single state, include all directional terms + if len(list(stateDict.keys())) < 2: + return areas #unchanged + + # multiple states - multiple direction terms in a state + # keep states sorted in same order as present. + out = [] + for state, partOfState, names in areas: + if stateDict[state] == 1: + names.sort() + out.append((state, partOfState, names)) + elif len(out) == 0 or state != out[-1][0]: #new state + out.append((state, "", names)) #leave out partOfState + else: #same state as before + nmeList = out[-1][2] + for n in names: + nmeList.append(n) + nmeList.sort() + + return out + + + + + def makeAreaPhrase(self, areaGroups, areas, generalOnly=False): + #creates the area phrase based on the groups of areas (areaGroups, + #such as NE Pennsylvania), and the areas (areas), individual zones. + #returns the area phrase. Area phrase does not have a terminating + #period. + areaGroupLen = len(areaGroups) + if areaGroupLen == 1: + areaPhrase = "a portion of " + else: + areaPhrase = "portions of " + + #parts of the states + areaList = [] + for state, partOfState, names in areaGroups: + if partOfState == '' or partOfState == ' ': + areaList.append(state) + else: + areaList.append(partOfState + " " + state) + + areaPhrase += self.punctuateList(areaList) + + #including phrase, have to count what we have + d = {'Independent city': ("Independent city", "Independent cities"), + 'Parish': ("Parish", "Parishes"), + 'County': ("County", "Counties"), + 'Zone': ("Area", "Areas") } + icCnt = 0 + parishCnt = 0 + zoneCnt = 0 + countyCnt = 0 + for state, partOfState, names in areaGroups: + for name,nameType in names: + if nameType == "zone": + zoneCnt = zoneCnt + 1 + elif nameType == "county": + countyCnt = countyCnt + 1 + elif nameType == "independent city": + icCnt = icCnt + 1 + elif nameType == "parish": + parishCnt = parishCnt + 1 + + incPhrases = [] + if zoneCnt == 1: + incPhrases.append("area") + elif zoneCnt > 1: + incPhrases.append("areas") + if countyCnt == 1: + incPhrases.append("county") + elif countyCnt > 1: + incPhrases.append("counties") + if icCnt == 1: + incPhrases.append("independent city") + elif icCnt > 1: + incPhrases.append("independent cities") + if parishCnt == 1: + incPhrases.append("parish") + elif parishCnt > 1: + incPhrases.append("parishes") + incPhrase = self.punctuateList(incPhrases) + + if generalOnly: + return areaPhrase + + + areaPhrase += ", including the following " + incPhrase + ", " + + #list of the specific areas + for i in range(len(areaGroups)): + state, partOfState, names = areaGroups[i] + if state == "The District of Columbia": + areaPhrase += state + else: + # extract out the names + snames = [] + for name,nameType in names: + snames.append(name) + + # single (don't mention state, partOfState again) + if len(areaGroups) == 1: + phrase = "" + # complex phrasing (state, partOfState, and names) + else: + if i == 0: + phrase = "in " + else: + phrase = "In " + if partOfState != '' and partOfState != ' ': + phrase += partOfState + ' ' + phrase += state + ", " + + phrase += self.punctuateList(snames) + + areaPhrase += phrase + if i != len(areaGroups) - 1: + areaPhrase += '. ' #another one coming, add period + + return areaPhrase + + def sortSection(self, r1, r2): + #sorts the hazards in a particular order for the sections within + #each segment. We try to keep this in the same order as the + #headlines order for clarity. + return self.regularSortHazardAlg(r1, r2) + + def makeSection(self, hazard, canHazard, areaPhrase, argDict): + #creates a section of the FFA product. The hazard record is passed + #in. canHazard is any associated CAN/EXP/UPG hazard, areaPhrase is + #the area description for the segment. + + nwsPhrase = "The National Weather Service in " + self._wfoCity + " has " + + # + # Attribution and 1st bullet (headPhrase) + # + headPhrase = None + attribution = '' + + hazName = self.hazardName(hazard['hdln'], argDict, False) + + if hazard['act'] == 'NEW' and len(hazard['hdln']): + attribution = nwsPhrase + "issued a" + headPhrase = self.substituteBulletedText(hazName + " for " + areaPhrase + ".", None, "Never") + + elif hazard['act'] == 'CON' and len(hazard['hdln']): + attribution = "The " + hazName + " continues for" + headPhrase = self.substituteBulletedText(areaPhrase + ".", None, "Never") + + elif hazard['act'] == 'EXA' and len(hazard['hdln']): + attribution = nwsPhrase + "expanded the" + headPhrase = self.substituteBulletedText(hazName + " to include " + areaPhrase + ".", None, "Never") + + elif hazard['act'] == 'EXT' and len(hazard['hdln']): + attribution = 'The ' + hazName + " is now in effect for" + headPhrase = self.substituteBulletedText(areaPhrase + ".", None, "Never") + + elif hazard['act'] == 'EXB' and len(hazard['hdln']): + attribution = nwsPhrase + "expanded the" + headPhrase = self.substituteBulletedText(hazName + " to include " + areaPhrase + ".", None, "Never") + + elif hazard['act'] == 'CAN' and len(hazard['hdln']): + attribution = "The " + hazName + \ + " for " + areaPhrase + " has been cancelled. " + \ + "|* brief post-synopsis/summary of hydromet activity *|\n\n" + + elif hazard['act'] == 'EXP' and len(hazard['hdln']): + expTimeCurrent = argDict['creationTime'] + if hazard['endTime'] <= expTimeCurrent: + attribution = "The " + hazName + \ + " for " + areaPhrase + " has expired. " + \ + "|* brief post-synopsis/summary of hydromet activity *|" + else: + timeWords = self.getTimingPhrase(hazard, expTimeCurrent) + attribution = "The " + hazName + \ + " for " + areaPhrase + " will expire " + timeWords + \ + ". " + \ + "|* brief post-synopsis/summary of hydromet activity *|" + + #wrap it, if headPhrase, then we have bullets + if headPhrase is not None: +# headPhrase = self.indentText(headPhrase, indentFirstString = '', +# indentNextString = ' ', maxWidth=self._lineLength, +# breakStrings=[" ", "-", "..."]) + + endTimePhrase = self.hazardTimePhrases(hazard, argDict, + prefixSpace=False) + endTimePhrase = self.substituteBulletedText(endTimePhrase, + "Time is missing", "DefaultOnly") + + # 3rd bullet (basis), 4th bullet (impacts) + if hazard['act'] == "NEW" and canHazard: + capText = canHazard.get('prevText', None) + else: + capText = hazard.get('prevText', None) + (haz, timeB, basis, impact, remainder, multRecords) = \ + self.decodeBulletedText(capText) + + defaultBasis = { + 'NEW': ("Basis for the watch", "Always"), + 'CON': ("Describe current situation", "DefaultOnly"), + 'EXT': ("Basis for extending the watch", "DefaultOnly"), + 'EXB': ("Basis for expansion of the watch", "DefaultOnly"), + 'EXA': ("Basis for expansion of the watch", "DefaultOnly"), + } + b = defaultBasis[hazard['act']] + if multRecords == 0: + basisPhrase = self.substituteBulletedText(basis, b[0], b[1]) + else: + basisPhrase = self.substituteBulletedText(basis, b[0], "Always") + + if (hazard['act'] == "NEW" and canHazard) or multRecords: + framing = "Always" + else: + framing = "DefaultOnly" + impactsPhrase = self.substituteBulletedText(impact, + "(optional) potential impacts of flooding", framing) + + #remainder of text + general = '' + addCTA = False + ctaBodyPhrase = '' + if remainder is not None and \ + (canHazard or hazard['act'] != "NEW"): + if hazard['act'] == "CON" or hazard['act'] == "EXT" or \ + hazard['act'] == "EXA" or hazard['act'] == "EXB": + paras = self.convertSingleParas(remainder) + pattern = re.compile("THE FLASH FLOOD WATCH FOR.*HAS BEEN CANCELLED.*", re.IGNORECASE) + general = '\n\n'.join([p for p in paras if not pattern.match(p)]) + elif canHazard is None: + general = remainder #use all + else: + #frame the text, without the ctas + addCTA = True + paras = self.convertSingleParas(remainder) + for p in paras: + found = self.ctasFound(p) + if len(found) == 0: + general = general + p + '\n\n' + if len(general): + general = "|* " + general[:-2] + " *|\n\n" + else: + addCTA = True + + # add in call to actions + if addCTA: + key = hazard['phen'] + '.' + hazard['sig'] + cta = self.defaultCTA(key) + else: + cta = '' + + if len(cta) > 1: + ctaBodyPhrase ="\n\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n" + \ + cta + \ + "\n\n&&\n\n" + else: + ctaBodyPhrase = cta + + if ctaBodyPhrase.find('PRECAUTIONARY/PREPAREDNESS ACTIONS...') != -1 and \ + attribution.find('&&') != -1: + attribution = attribution.replace('&&','') + ctaBodyPhrase = ctaBodyPhrase.replace('PRECAUTIONARY/PREPAREDNESS ACTIONS...','') + + attrPhrase = attribution + '\n\n' + headPhrase + '\n' + \ + endTimePhrase + '\n' + basisPhrase + '\n' + impactsPhrase + \ + '\n' + general + ctaBodyPhrase + '\n' + + #no headPhrase (EXP or CAN alone) + else: + attrPhrase = attribution + + return attrPhrase + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HLS.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HLS.py index 709fbae9c0..9759b15139 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HLS.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HLS.py @@ -1,7148 +1,7148 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Hazard_HLS -# Produces HLS product. -# -# Author: (Initial) Matt Davis/ARX -# OB9.2 Tracy Hansen -# OB9.3 Shannon White/Tracy Hansen/Matt Belk -# OB17.3.1 Shannon White (updated to use InundationMax and remove all references to MSL) -# -# -# Version 6/8/2017 -# ---------------------------------------------------------------------------- - -import GenericHazards -import string, time, re, os, glob, types, copy, LogStream -import ModuleAccessor, SampleAnalysis -from math import * -import AbsTime, DatabaseID, StartupDialog -from com.raytheon.uf.viz.core import VizApp -from com.raytheon.uf.common.gfe.ifpclient import PyFPClient - - -DEG_TO_RAD = 0.017453292 - -from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID -CoordinateType = ReferenceData.CoordinateType - - -import sys, types -sys.argv = [__name__] - -class TextProduct(GenericHazards.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition["displayName"] = "None" - Definition["outputFile"] = "{prddir}/TEXT/Hazard_HLS.txt" - Definition["database"] = "Official" # Source database - Definition["debug"] = 1 - Definition["mapNameForCombinations"] = ["Zones_"] - #Definition["mapNameForCombinations"] = ["Zones_","Marine_Zones_"] - Definition["defaultEditAreas"] = "EditAreas_PublicZones_" - Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display - - Definition["productName"] = "Tropical Cyclone Local Statement" - - Definition["fullStationID" ] = "" - Definition["wmoID" ] = "" - Definition["wfoCityState" ] = "" - Definition["pil" ] = "" - Definition["textdbPil" ] = "" - Definition["awipsWANPil" ] = "" - Definition["site"] = "" - Definition["wfoCity"] = "" - - Definition["areaName"] = "" #optional area name for product - Definition["areaDictionary"] = "AreaDictionary" - Definition["language"] = "english" - Definition["lineLength"] = 66 #Maximum line length - - Definition["purgeTime"] = 8 # Default Expiration in hours if - Definition["includeCities"] = 0 # Cities not included in area header - Definition["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header - Definition["easPhrase"] = \ - "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header - Definition["callToAction"] = 1 - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - - ##################################################################### - ##################################################################### - ### Organization of Formatter Code - - ############################################################### - ### MUST OVERRIDE ZONE DEFINITIONS !!! - ### _inlandAreas, _coastalAreas, _marineAreas, _cwa - ############################################################### - - - ############################################################### - ### Optional Overrides, HLS GUI options and Configuration for - ### Situations and Scenarios - # - # _areaDisplayType_land and _marine -- how zones are displayed in GUI's - # _GUI_labels -- wording for GUI titles - # _font_GUI_dict -- font for GUI titles - # - # _overview_list -- list of Overview GUI frames (GUI 1) - # _overviewEndInstructions - # _overviewSections - # - # _situation_list -- list of situations (each is a dictionary) - # _segmentSections -- list of segment sections (each is a dictionary) - ############################################################### - - ############################################################### - ### Hazards and Additional Hazards - ### allowedHazards is used for segmentation e.g. HU.W, TR.W... - ### allowedHeadlines are additional hazards reported in overview - ### e.g. CF.W, FA.A, TO.A... - ############################################################### - - ############################################################### - # CODE - ############################################################### - ### High level flow of formatter - ### generateForecast, determineTimeRanges, sampleData, - ### preProcessProduct, makeProduct, postProcessProduct... - ############################################################### - - ############################################################### - ### Helper methods -- Getting statistics from grids, - ### summarizing hazards found, determining inland/coastal/marine - ### _getSegmentInfo, _checkHazard, _orderSections, - ### _findInDictList, _accessDict - ### _analysisList_HLS - ###################################################################### - ### Previous Product Helper methods - ###################################################################### - - ###################################################################### - ### OVERVIEW Sections - ###################################################################### - ### SEGMENT Sections - #################################################### - ### Precautionary Preparedness Statement Dictionaries - ###################################################################### - ### Wind Situation/Scenario methods - ###################################################################### - ### Segment statements and thresholds e.g. Wind Statements - ##################################################### - - ############################################################### - ### Example TCP product for automated testing - ############################################################### - - ##################################################### - ### HLS GUI Processing - # - ##################################################################### - ## TK GUI Classes - ##################################################################### - ##################################################################### - - ############################################################### - ### MUST OVERRIDE these methods! - - def _inlandAreas(self): - return [ - #"FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043", - ] - - def _coastalAreas(self): - return [ - #"FLZ039", "FLZ042", "FLZ048", "FLZ049", "FLZ050", "FLZ051", "FLZ055", "FLZ060", - #"FLZ062", "FLZ065", - ] - def _marineAreas(self): - return [ - #"GMZ830", "GMZ850", "GMZ853", "GMZ856", "GMZ856", "GMZ870","GMZ873","GMZ876" - ] - - def _cwa(self): - return "" #"TBW" - - def _cwa_descriptor(self): - return "" # "central west Florida" - - def _maor_descriptor(self): - return "" # "west central Florida waters and the Gulf of Mexico" - - def _cwa_maor_descriptor(self): - return "" #"west Florida waters and the Gulf of Mexico" - - def _localReferencePoints(self): - # Give the name and lat/lon for each local reference point - return [ - #("Tampa Bay, FL", (28.01, -82.48)), - #("Cape Coral, FL", (26.63, -82.00)), - #("Lakeland, FL", (28.04, -81.95)), - #("Sarasota, FL", (27.37, -82.55)), - ] - - def _localReferencePoints_defaults(self): - # Give a list of the local reference point names to be - # turned on by default - return [] # ["Tampa Bay, FL", "Sarasota, FL"] - - ##################################################################################### - ##################################################################################### - ### Optional Overrides, HLS GUI options and Configuration for - ### Situations and Scenarios - - def _overviewFormat(self): - # For overview headlines specify "listAreas" if you want specific - # locations listed. - # Otherwise, specify "generic" for a general "All" or "Portions of" - # the CWA - return { - "land": "listAreas", - "marine": "generic", - } - - def _areaDisplayType_land(self): - # You can set this to any key within the AreaDictionary. - # e.g. 'ugcName', 'altName', ' - # Also include the width of the display window - #return ('ugcCode', 10) - return ('ugcName', 15) - - def _referencePointLimit(self): - # Give the number of reference points allowed to be chosen - # Also give a label (e.g. "two") for the GUI - return (2, "two") - - def _areaDisplayType_marine(self): - # You can set this to any key within the AreaDictionary. - # e.g. 'ugcName', 'altName', ' - # Also include the width of the display window - return ('ugcCode', 10) - #return ('ugcName', 15) - - ################# - - # 02/28/2011 (SW/MHB) - Modified the GUI behavior so that the ECs are limited to the - # appropriate options. - # - def _overview_list(self, argDict): - allCON = argDict.get("allCON", False) - forceAbbrev = argDict.get("forceAbbrev", False) - allHUS = argDict.get("allHUS", False) - watchEC = argDict.get("watchEC", False) - allCAN = argDict.get("allCAN", False) - step1Options = [] - step6Options = [] - step7Options = [] - if allCON: - step1Options = [ - ("Use This GUI to Create Overview Text", "CreateFromGUI"), - ("Use Previous Situation Overview Text", "UsePrev"), - ] - else: - step1Options = [ - ("Use This GUI to Create Overview Text", "CreateFromGUI"), - ] - if forceAbbrev: - step6Options = [ - ("Abbreviated Issuance (WWA First Issuance Everywhere at the same time)", "Abbreviated"), - ] - step7Options = [ - ("Shortly (for Abbreviated Issuances)", "Shortly"), - ] - elif allCAN: - step6Options = [ - ("Post-Event (All hazards over everywhere)", "PostEvent"), - ("Post-Tropical", "PostTropical"), - ] - step7Options = [ - ("Last Issuance", "LastIssuance"), - ] - elif allHUS: - step6Options = [ - ("Non-Event (WWA Not Expected)", "NonEvent"), - ("Pre-Event (WWA Possible Soon; Early Evacuations)", "PreEvent"), - ("Post-Event (WWA Over, Statements Still Needed)", "PostEvent"), - ] - step7Options = [ - ("As Conditions Warrant", "Conditions"), - ("Enter Approximate Time (below)", "Enter"), - ] - elif watchEC: - step6Options = [ - ("Watches (No Warnings)", "Watch"), - ] - step7Options = [ - ("As Conditions Warrant", "Conditions"), - ("Enter Approximate Time (below)", "Enter"), - ] - else: - step6Options = [ - ("Warnings (With or Without Watches)", "Warning"), - ("Conditions Occurring (With Warnings)", "Conditions"), - ("Post-Event (WWA Ended and replaced by HU.S)", "PostEvent"), - ] - step7Options = [ - ("As Conditions Warrant", "Conditions"), - ("Enter Approximate Time (below)", "Enter"), - ] - - return [ - { - "name": "OverviewEditMode", - "label":"Step 1. Choose Overview Edit Mode", - "options": step1Options, - }, - { - "name": "StormInfo", - "label": "Step 2. Obtain Storm Type/Name/Info", - "options": [ - "TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5", - "Enter PIL below (e.g. TCPEP1):", - ], - "entryField": " ", - }, - { - "name": "Uncertainty", - "label": "Step 3. Declare Degree of Uncertainty", - "options": [ - ("Smaller Degree", "Low"), - ("Average Degree", "Average"), - ("Larger Degree", "High"), - ("N/A", "N/A"), - ], - "default": "N/A", - }, - { - "name":"LocalReferencePoints", - "label": "Step 4. Locate Storm Relative to Local Reference Points (choose at most "\ - +self._referencePointLimit()[1]+")", - "optionType": "check", - "options": self._localReferencePoints(), - "default": self._localReferencePoints_defaults(), - }, - { - "name": "MainHeadline", - "label": "Step 5. Input Main Headline (required) ", - "options": [ - ("Enter Unique Headline (below)", "Enter"), - ("Use Previous HLS Headline", "UsePrev"), - ("Use Latest TCP Headline", "UseTCP"), - ], - "entryField": "", - }, - { - "name":"EventContext", - "label": "Step 6. Establish Event Context for CWA/MAOR (related to TC WWAs only)", - "options": step6Options, - }, - { - "name": "NextUpdate", - "label": "Step 7. Indicate Next Update Time", - "options": step7Options, - "default": "Enter Approximate Time (below)", - "entryField": " e.g. 6 AM EDT", - }, - ] - - def _overviewEndInstructions(self): - return """Note: Please enter the necessary Overview (CWA/MAOR) information \n above before continuing to the Segmented (Zone Group) information. """ - - def _overviewSections(self): - # A list of dictionaries -- each dictionary represents a section. - # The order of the list is the order the sections will appear in the GUI. - # Fields in the dictionary can be: - # name -- name of section -- THIS should not be changed by the user since - # the code logic keys off this name - # label -- label for the section to appear in the GUI - # title -- text to appear in the product for the section - # endStr -- text to appear at the end of the section. - # NOTE: We are assuming the endStr is UNIQUE within the section and will - # not appear except at the end of the section!! - return [ - { - "name": "Overview_NewInformation", - "label": "New Information", - "title": ".NEW INFORMATION...\n", - }, - { - "name": "AreasAffected", - "label": "Areas Affected", - "title": ".AREAS AFFECTED...\n", - }, - { - "name":"WatchesWarnings", - "label":"Watches/Warnings", - "title":".Watches/Warnings...\n", - }, - { - "name":"StormInformation", - "label":"Storm Information", - "title": ".STORM INFORMATION...\n", - }, - { - "name":"SituationOverview", - "label":"Situation Overview", - "title": ".SITUATION OVERVIEW...\n" - }, - { - "name": "Overview_PrecautionaryPreparednessActions", - "label": "PRECAUTIONARY/PREPAREDNESS ACTIONS", - "title": ".PRECAUTIONARY/PREPAREDNESS ACTIONS...\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n", - "endStr": "\n&&" - }, - { - "name": "NextUpdate", - "label": "Next Update", - "title": ".NEXT UPDATE...\n", - }, - ] - - def _situation_list(self): - # List of dictionaries where each dictionary represents a situation - # Entries in the dictionary can be: - # name -- name of situation -- THIS should not be changed by the user since - # the code logic keys off this name - # label -- label for the situation to appear in the GUI - # hazPairings -- list of action/phen/sig combo that need to exist to trigger an option - # ec -- list of a event contexts which will need to overlap with hazPairings - # scenarios -- list of possible scenarios for this - # situation and conditions in (label, name) form. - # MODIFIED 3/7 to add wrap-up scenarios for non-events to Non-Event and Post-Event - # MODIFIED 8/22 to add UPGTR.W to Abbreviated - # NOTE: You can change the scenario labels, but - # The scenario names should not be changed as - # they are tied directly to the code! - # - - return [ - { - "name": "NonEvent", - "label":"Non-Event", -## "action": ["NEW", "CON", "EXA"], -## "hazards": ["HU.S"], - "hazPairings": ["NEWHU.S", "CONHU.S", "EXAHU.S", "CANHU.S"], - "ec": ["NonEvent", "PreEvent", "Watch", "Warning", - "Conditions", "PostTropical"], - "scenarios": [ - ("Non-Event Wind Threat", "ActiveNonEvent"), - ("Cancel Non-Event", "EndNonEvent"), - ], - }, - { - "name": "PreEvent", - "label":"Pre-Event", -## "action": ["NEW", "CON", "EXA"], -## "hazards": ["HU.S"], - "hazPairings": ["NEWHU.S", "CONHU.S", "EXAHU.S"], - "ec": ["PreEvent", "Watch", "Warning", "Conditions"], - "scenarios": [ - ("Advancing Wind Threat", "Advancing"), - ("Peripheral Wind Threat", "Peripheral"), - ("In Situ Developing Wind Threat", "InSitu"), - ], - }, - { - "name": "Abbreviated", - "label": "Abbreviated", -## "action": ["NEW", "EXA", "UPG", "CAN"], -## "hazards": ["TR.A", "HU.A", "TR.W", "HU.W", "TY.A", "TY.W", "HU.S"], - "hazPairings": ["NEWHU.S","EXAHU.S","NEWHU.A","EXAHU.A","NEWTY.A","EXATY.A", - "NEWTR.A","EXATR.A","NEWHU.W","EXAHU.W","NEWTY.W","EXATY.W", - "NEWTR.W","EXATR.W","CANHU.S","UPGHU.A","UPGTY.A","UPGTR.A", - "UPGTR.W","CANHU.W","CANTY.W","CANTR.W", - "CANTR.A","CANHU.A","CANTY.A"], - "ec": ["Abbreviated", "Watch", "Warning", "Conditions"], - "scenarios": [ - ("First Issuance", "FirstIssuance"), - ], - }, - { - "name": "Watch", - "label": "Watch", -## "action": ["NEW","CON", "UPG"], -## "hazards": ["TR.A", "HU.A", "TY.A"], - "hazPairings": ["CONHU.A","CONTY.A","CONTR.A"], - "ec": ["Watch", "Warning", "Conditions"], - "scenarios": [ - ("Advancing Wind Threat", "Advancing"), - ("Peripheral Wind Threat", "Peripheral"), - ("In Situ Developing Wind Threat", "InSitu"), - ], - }, - { - "name": "Warning", - "label": "Warning", -## "action": ["NEW","CON", "UPG", "CAN"], -## "hazards": ["TR.W", "HU.W", "TY.W"], - "hazPairings": ["CONHU.W", "CONTY.W", "CONTR.W", "CANHU.A", "CANTY.A"], - "ec": ["Warning", "Conditions"], - "scenarios": [ - ("Advancing Wind Threat", "Advancing"), - ("Peripheral Wind Threat", "Peripheral"), - ("In Situ Developing Wind Threat", "InSitu"), - ], - }, - { - "name": "Conditions", - "label": "Conditions", -## "action": ["NEW","CON", "UPG", "CAN"], -## "hazards": ["TR.W", "HU.W", "TY.W"], - "hazPairings": ["CONHU.W", "CONTY.W", "CONTR.W"], - "ec": ["Conditions"], - "scenarios": [ - ("Imminent Wind Threat", "Imminent"), - ("Ongoing Wind Threat", "Ongoing"), - ("Diminishing Wind Threat", "Diminishing"), - ], - }, - { - "name": "PostEvent", - "label": "Post-Event", -## "action": ["NEW", "EXA", "CON", "CAN"], -## "hazards": ["TR.A", "HU.A", "TR.W", "HU.W", "TY.A", "TY.W", "HU.S"], - "hazPairings": ["CANHU.W", "CANTY.W", "CANTR.W", "NEWHU.S", "CONHU.S", - "EXAHU.S","CANHU.S", "CANHU.A", "CANTY.A", "CANTR.A"], - "ec": ["Warning", "Conditions", "PostEvent"], - "scenarios": [ - ("Immediate Rescue/Recovery", "Immediate"), - ("Minor/No Impact", "NoImpact"), - ("Longer-term Rescue/Recovery","LongTerm"), - ], - }, - { - "name": "PostTropical", - "label": "Post-Tropical", -## "action": ["CAN"], -## "hazards": ["TR.W", "HU.W"], - "hazPairings": ["CANHU.W", "CANTR.W"], - "ec": ["PostTropical"], - "scenarios": [ - ("In Progress", "InProgress"), - ("Completed Transition", "Completed"), - ], - }, - ] - - def importMethod(self, argDict, segment): - # This is a dummy method for importing text - # Enter code here to get text from a flat file or previous product if desired - # Then specify this in the "importMethod" entry for the desired segment - # in the _segmentSections set up - return "" - - def _segmentSections(self): - # A list of dictionaries -- each dictionary represents a section. - # The order of the list is the order the sections will appear in the GUI. - # Fields in the dictionary can be: - # name -- name of section -- THIS should not be changed by the user since - # the code logic keys off this name - # label -- label for the section to appear in the GUI - # inSegments -- "always", "optional" or situation-specific - # excludeFromSituations -- list of situations for which this section is NOT - # to be included - # includeFor -- can be list of areas for which to include this section as an option such that - # if ANY segment area meets the criteria, the section will be included as an - # option OR - # a method to be called with 2 arguments: (name, segmentAreas) which - # should return True/False for the section to be included as an option. - # defaultOn -- IF included as an option in the GUI, specify whether it should be defaulted ON - # Set to True/False OR specify a method to be called. - # The method will be called with 2 arguments: (name, segmentAreas) - # usePrev -- if True, include a "Use Previous" check box on GUI - # importPIL -- specify a product PIL from which to get section information - # If present, a check box will appear on the GUI for importing - # importMethod -- optional method for importing information from an external source. - # Specify a method (see example above) for getting text from - # an external source. - # If present, a check box will appear on the GUI for importing - # NOTE: If both importPIL and importMethod are present, the importPIL will be used. - # orderBox -- if True, include a text box on GUI to enter an order number - # title -- This is the section title that will appear in the product. - # It can be a tuple consisting of: - # (Title for Public zones, Title for Marine Zones) - # For example: - # ("...Winds...\n","...Winds and Seas...\n") - # - # To ADD a new section, you must - # --Add a dictionary for the section in this list - # --Provide a method for producing the contents of the section. - # The name of the method must match the "name" field for the new - # section. (Look at the "Tornadoes" method for an example.) - # For example, if you add a section to this list: - # - # { - # "name": "CoastalHazards", - # "label": "Coastal Hazards", - # "defaultOn": True, - # "includeFor": ["coastal"], - # "orderBox": True, - # "usePrev": True, - # "inSegments": "optional", - # "importMethod": None, - # "title": "...Coastal Hazards...\n", - # }, - - # Then you must have a method which returns a text string: - # - # def CoastalHazards(self, title, argDict, segment, section, info): - # - return [ - { - "name": "NewInformation", - "label": "New Information", - "defaultOn": False, - "includeFor": self._allAreas(), - "usePrev": False, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title": "...New Information...\n", - }, - { - "name": "PrecautionaryPreparednessActions", - "label": "PRECAUTIONARY/PREPAREDNESS ACTIONS", - "defaultOn": True, - "includeFor": self._allAreas(), - "usePrev": True, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title": "...PRECAUTIONARY/PREPAREDNESS ACTIONS...\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n", - "endStr":"\n&&", - }, - { - "name": "Probability", - "label": "Probability of Tropical Storm/Hurricane Conditions", - "defaultOn": False, - "includeFor": self._allAreas(), - "inSegments": "optional", - "excludeFromSituations": ["Conditions", "PostEvent", "PostTropical"], - "importMethod": None, - "importPIL": None, - "title": "...Probability of tropical storm/hurricane conditions...\n", - }, - { - "name": "Wind", - "label": "Winds and Seas", - "defaultOn": True, - "includeFor": self._allAreas(), - "orderBox": True, - "usePrev": True, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title": ("...Winds...\n","...Winds and Seas...\n"), - }, - { - "name": "StormSurgeTide", - "label": "Storm Surge and Storm Tide", - "defaultOn": True, - "includeFor": self._coastalAreas(), - "orderBox": True, - "usePrev": True, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title":"...Storm surge and storm tide...\n" , - }, - { - "name": "InlandFlooding", - "label": "Inland Flooding", - "defaultOn": True, - "includeFor": self._inlandAreas()+self._coastalAreas(), - "orderBox": True, - "usePrev": True, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title": "...Inland flooding...\n", - }, - { - "name": "Tornadoes", - "label": "Tornadoes and Waterspouts", - "defaultOn": False, - "includeFor": self._allAreas(), - "orderBox": True, - "usePrev": True, - "inSegments": "optional", - "importMethod": None, - "importPIL": None, - "title": ("...Tornadoes...\n","...Tornadoes and Waterspouts...\n") - }, -# { -# "name": "Marine", -# "label": "Marine", -# "defaultOn": True, -# "includeFor": self._marineAreas(), -# "orderBox": True, -# "usePrev": True, -# "inSegments": "optional", -# "importMethod": None, -# "importPIL": None, -# "title":"...Marine...\n" , -# }, - ] - - def _defaultOn_StormSurgeTide(self, name, segmentAreas): - # Default logic will set StormSurgeTide to ON if there are any coastal zones. - # Local offices can add to the list of accepted areas (e.g. if some inland - # zones should have the Storm Surge Tide section defaulted on) - # OR change the logic as in any way desired. - defaultOn = False - for area in segmentAreas: - if area in self._coastalAreas(): - defaultOn = True - return defaultOn - - def _allAreas(self): - return self._inlandAreas() + self._coastalAreas() + self._marineAreas() - - ########## GUI Configuration - - def _GUI_sizing_dict(self): - # This contains values that adjust the GUI sizing. - return { - "GUI_height_limit": 800, # limit to GUI height in canvas pixels - #"GUI_2_width": 820, # width for GUI 2 - "GUI_2_width": 1200, # width for GUI 2 - #"GUI_3_width": 970, # width for GUI 3 - "GUI_3_width": 1200, # width for GUI 3 - "zoneLines": 10, # number of zones to display without scrolling - "charSize": 9, - } - - def _GUI1_configDict(self): - return { - # Order and inclusion of GUI1 buttons - # Each entry is (name of button in GUI code, desired label on GUI) - "buttonList":[ - ("PreviousHLS","PreviousHLS"), - ("Reset","Reset"), - ("Next","Next"), - ("Cancel","Cancel"), - ], - } - - def _GUI2_configDict(self): - return { - # Order and inclusion of GUI1 buttons - # Each entry is (name of button in GUI code, desired label on GUI) - "buttonList":[ - ("Next", "Next"), - ("Cancel", "Cancel"), - ], - } - - def _GUI3_configDict(self): - return { - # Order and inclusion of GUI1 buttons - # Each entry is (name of button in GUI code, desired label on GUI) - "buttonList":[ - ("Ok", "Ok"), - ("Cancel","Cancel"), - ], - } - - def _GUI_labels(self): - return { - 'GUI_2': "Step 8. Choose Situation Per Zone Group", - 'GUI_3a': "Step 9a. Choose Scenario Per Zone Group", - 'GUI_3b':"Step 9b. Identify & Order Sections", - } - - def _font_GUI_dict(self): - return { - "headers": ("blue", ("Helvetica", 14, "bold")), - "instructions": (None, ("Helvetica", 12, "italic")), - } - - - ##################################################################################### - - - ############################################################### - ### Hazards and Additional Hazards - # allowedHazards is used for segmentation - # allowedHeadlines are additional hazards reported in overview - - def allowedHazards(self): - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HU.A',allActions,'Hurricane'), - ('HU.W',allActions,'Hurricane'), - ('HU.S',allActions,'Hurricane'), - ('TY.A',allActions,'Typhoon'), - ('TY.W',allActions,'Typhoon'), - ('TR.A',allActions,'Tropical'), - ('TR.W',allActions,'Tropical'), - ] - - def allowedHeadlines(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH - ('FA.A', allActions, 'Flood'), # FLOOD WATCH - ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING - ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY - ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH - ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING - ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY - ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ('SR.W', allActions, 'Marine'), - ('SR.A', allActions, 'Marine'), - ('GL.W', allActions, 'Marine'), - ('GL.A', allActions, 'Marine'), - ('SC.Y', allActions, 'Marine'), - ('SI.Y', allActions, 'Marine'), - ('SW.Y', allActions, 'Marine'), - ('RB.Y', allActions, 'Marine'), - ('HF.W', allActions, 'Marine'), - ('HF.A', allActions, 'Marine'), - ] - - def _ignoreActions(self): - # Ignore hazards with these action codes in the overview headlines - # NOTE: the VTEC and segments will still include them correctly. - return ['CAN', 'UPG'] - - ############################################################### - ### NOTES - - ## HANDLING HLS SEGMENTATION - ## - ## Problem: - ## The system is set up to sample hazards using the combinations file - ## edit areas i.e. do not sample zones not in the combinations - ## segmenting strictly according to the hazards i.e. all zones - ## in a combination with the same hazard will be in a - ## segment. - ## - ## The HLS formatter uses the combinations file differently. - ## Segmenting is initially done according to the hazards as - ## above, but IF a forecaster wants to further split the - ## segment, he/she can set up a combination in the zone - ## combiner to do so. This however, is optional and all - ## areas (land and marine) in the WFO need to be sampled for - ## hazards regardless of what's in the combinations file. - ## - ## Solution: - ## The HLS code has several relatively independent pieces, and - ## each has to sample the hazards correctly: - ## - ## --GUI code: Sampled by "_determineSegments" and stored in - ## argDict['hazards'] - ## - ## --Formatter Logic code: Sampled by "_getProductInfo" and - ## stored in argDict['hazards']. Note: we can't re-use the - ## hazards set by the GUI code because between the time the - ## GUI is called and the formatter code is invoked, the - ## TextFormatter infrastructure has re-sampled the hazards - ## using the combinations file as above and reset the - ## argDict['hazards'] entry. - ## - ## --Override DiscretePhrases "getHazardList" to use the hazards - ## stored in argDict rather than using the one automatically - ## generated by TextFormatter infrastructure which uses the - ## combinations file as above. - - ############################################################### - ### High level flow of formatter - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Use previous for entire product - try: - if self._UsePrev: - return self.getPreviousProduct(self._textdbPil) - except: - pass - - #print "\n\nvarDict", argDict["varDict"] - segmentList = [areas for segNum, areas, situation, scenario, - sections, extraInfo in self._segments] - #print "\n\nSegment Information", self._segments, "\n\n" - if len(segmentList) == 0: - return "No hazards to report" - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each segment in the segmentList - fraction = 0 - fractionOne = 1.0/float(len(segmentList)) - percent = 50.0 - self.setProgressPercentage(50) - for segment in self._segments: - self.progressMessage(fraction, percent, "Making Product for Segment") - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - fcst = self._preProcessArea(fcst, segmentAreas, self._expireTime, argDict) - fcst = self._makeProduct(fcst, segment, argDict) - fcst = self._postProcessArea(fcst, segmentAreas, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - ######### Time ranges - - def _resolution(self): - return 3 - - def _determineTimeRanges(self, argDict): - # Set up the time range for 0-120 hours - self._issueTime = AbsTime.AbsTime(argDict['creationTime']) - - # Create a time range from the issuanceHour out 120 hours - # First get the current local time - localTime = time.localtime(argDict['creationTime']) - year = localTime[0] - month = localTime[1] - day = localTime[2] - hour = localTime[3] - # Now "truncate" to a 6-hourly boundary and compute startTime in local Time. - hour = int (int(hour/6) * 6) - startTime = AbsTime.absTimeYMD(year, month, day, hour) - # Finally, convert back to GMT - localTime, shift = self.determineTimeShift() - startTime = startTime - shift - self._timeRange = self.makeTimeRange(startTime, startTime+120*3600) - - # Determine the time range list, making sure they are on hour boundaries - # w.r.t. midnight today according to the resolution - subRanges = self.divideRange(self._timeRange, self._resolution()) - trList = [] - for tr in subRanges: - # print tr - trList.append((tr, "Label")) - self._timeRangeList = trList - - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._currentTime = argDict['creationTime'] - self._expireTime = self._issueTime + self._purgeTime*3600 - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - ######### Sample Data - ## Since the segments are determined by user input, - ## we need to determine combinations now (usually done automatically - ## by TextFormatter infrastructure.) - - def _sampleData(self, argDict): - # Sample the data - editAreas = self._makeSegmentEditAreas(argDict) - areas = self._marineAreas() - areas.append(self._cwa()) - cwa_maor = self._makeCombination(argDict, areas) - editAreas.append(cwa_maor) - self._cwaMaorArea, self._cwaMaorLabel = cwa_maor - self._sampler = self.getSampler(argDict, - (self._analysisList_HLS(), self._timeRangeList, editAreas)) - - def _makeSegmentEditAreas(self, argDict): - areasList = [segmentAreas - for segmentNum, segmentAreas, situation, scenario, - sections, extraInfo in self._segments] - #print "areaList", areasList - editAreas = [] - self._editAreaDict = {} - for areas in areasList: - if len(areas)>1: - # Make a combination on the fly - editArea, label = self._makeCombination(argDict, areas) - # e.g. editArea, Combo1 - self._editAreaDict[tuple(areas)] = editArea - editAreas.append((editArea, label)) - else: - area = areas[0] - self._editAreaDict[tuple(areas)] = area - editAreas.append((area, area)) - return editAreas - - def _getComboNumber(self): - try: - self.__comboNumber = self.__comboNumber + 1 - except: - self.__comboNumber = 1 - return self.__comboNumber - - def _makeCombination(self, argDict, areaNames): - # Given a list of area names, return a combination edit area - gridLoc = argDict["ifpClient"].getDBGridLocation() - comboList = [] - for areaName in areaNames: - newArea = self.getEditArea(areaName, argDict) - if areaNames.index(areaName) == 0: - comboNumber = self._getComboNumber() - label = "Combo"+`comboNumber` - refId = ReferenceID(label) - #area = AFPS.ReferenceData( - # gridLoc, refId, newArea.polygons(), - # AFPS.ReferenceData.LATLON) - #area.convertToAWIPS() - area = ReferenceData(gridLoc, refId, newArea.getPolygons(CoordinateType.LATLON), CoordinateType.LATLON) - comboList.append(newArea.getId().getName()) - area = self.unionAreas(label, area, newArea) - return area, label - - ###### Generate headers and Overview sections - - def _preProcessProduct(self, fcst, argDict): - - self._prevHLS = self.getPreviousProduct(self._textdbPil) - - info = self._getProductInfo(argDict) - self._getStormInfo(argDict, info) - if self._stormTypeName.find("|*")>=0: sn = "Tropical Cyclone" - else: sn = self._stormTypeName - actualProductName = sn + " Local Statement" - actualProductName = self.checkTestMode(argDict, actualProductName) - - # Product header - if self._areaName != "": - self._areaName = " for " + self._areaName - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, actualProductName + self._areaName) - - if len(self._easPhrase) != 0: - eas = self._easPhrase.upper() + '\n' - else: - eas = '' - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = eas + productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # Main Headline - mh = self._MainHeadline - if mh == "Enter": - hl = self._MainHeadline_entry - elif mh == "UsePrev": - hl = self._grabHeadline(self._prevHLS) - elif mh == "UseTCP": - try: # If unnamed or downgraded, we won't have a TCP product - hl = self._grabHeadline(self._TCP) - except: - hl = "" - - if hl == "": - hl = self._frame("Enter headline here") - hl = self._addEllipses(hl) - fcst = fcst + hl + "\n\n" + self._overview(argDict, info) - return fcst - - # Modified 4/21/09 (MHB) - Fixed a problem with the construction of the - # overview when using previous text. This will fix the problem with - # getting multiple copies of the first zone segment header. The - # _grabSection method is not capable of recognizing the end of the - # "Next Update" overview section on its own. Implemented the - # _grabOverview method (already defined in the baseline) to parse out the - # entire overview, with which _grabSection will work correctly. - # - # Modified 12/24/10 (MHB) - Added capability to specify which sections - # of the overview can use previous text. All other sections will be - # forced to update. This involves a new call to the _grabSection method. - - def _overview(self, argDict, info): - - # Establish previous HLS text for testing - if needed - #if len(self._prevHLS.strip()) == 0: - # self._prevHLS = self._testPrevHLS() - - overview = "" - if self._OverviewEditMode == "FormatFree": - return self._frame("Enter Overview Information") + "\n\n" - - if self._OverviewEditMode == "UsePrev": - usePrev = True - - # Set aside to overview text, so we don't have to search the - # entire HLS product - prevOverview = self._grabOverview(self._prevHLS) - else: - usePrev = False - -## print "prev = '%s'" % (prevOverview) - - # Get the list of sections which must be present, in order - sections = self._overviewSections() - for sectionDict in sections: - title = sectionDict.get("title", '') - - # Start out with a blank text for this section - sectionText = "" - - # If we are requested to use previous text, and this is a section - # we can use it for - if usePrev and \ - title.strip() not in self._noPrevTextOverviewSections(): -# print "Looking for previous '%s'" % (title) - - # Get the previous text for this section - sectionText = self._grabSection(prevOverview, title, True) - -# print usePrev, len(sectionText.strip()) -# print "'%s'" % (sectionText.strip()) - - # If we are not using the previous text, or we could not find - # the previous section text - if not usePrev or len(sectionText.strip()) == 0: - exec "sectionText = self." + sectionDict["name"] + "(title, sectionDict, info)" - - # Ensure the grabbed text is wrapped to the correct product length - sectionText = self.endline(sectionText, self._lineLength) - - # Add this section text to the current overview - overview = overview + sectionText + "\n\n" - - # Return completed overview - return overview - - ########## Produce Segment Sections - -## # Modified 12/24/10 (MHB) - Added capability to specify which sections -## # of the segment can use previous text. All other sections will be -## # forced to update. This involves a new call to the _grabSection method. -## - def _makeProduct(self, fcst, segment, argDict): - argDict["language"] = self._language - self._stormPrevHLS = False - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - info = self._getSegmentInfo([segment]) - self._setStats(info, argDict, segmentAreas, self._editAreaDict, - self._sampler, self._analysisList_HLS(), self._timeRangeList) - - # - # This section generates the segment sections - # - hazardsC = argDict['hazards'] - listOfHazards = hazardsC.getHazardList(segmentAreas) - if listOfHazards == []: - return fcst - -# LogStream.logProblem("=== VARS ===", -# "\nhazardsC=",hazardsC, -# "\nlistOfHazards=",listOfHazards, -# "\nsegmentAreas=",segmentAreas, -# "\nphen.sig=",listOfHazards[0]['phensig'], -# ) - LogStream.logEvent("=== VARS ===", - "\nhazardsC=",hazardsC, - "\nlistOfHazards=",listOfHazards, - "\nsegmentAreas=",segmentAreas, - "\nphen.sig=",listOfHazards[0]['phensig'], - ) - - prevHLS = self._getPrevText(listOfHazards) - - # This section generates the headline on the segment - # - # stuff argDict with the segmentAreas for DiscretePhrases - argDict['segmentAreas'] = segmentAreas - editArea = segmentAreas[0] - # Stuff the headline for HU.S in extraInfo - self._setUp_HU_S_Headline(extraInfo, prevHLS) - argDict["extraInfo"] = extraInfo - areaLabel = editArea - - fcst = fcst + self.generateProduct("Hazards", argDict, area=editArea, - areaLabel=areaLabel, timeRange=self._timeRange) - - # self._segments = - # (segNum, areas, chosen situationName, chosen scenarioName, - # sections, extraInfo) - # For example: -## (1, ['FLZ052'],'Warning', 'Advancing', -## # list of sections: name, order, usePrev, useImport -## [ -## ('PrecautionaryPreparednessActions', None, 0, None), -## ('Probability', '', 0, None), -## ('Winds', '', 0, None), -## ('StormSurgeTide', '', 0, None), -## ('InlandFlooding', '', 0, None), -## ('Tornadoes', '', 0, None), -## ('Marine', '', 0, None) -## ] -## # Extra information for HU.S headlines -## {'userHeadline_HU_S': 'Headline for HU.S', -## 'usePrev_HU_S_Headline':0}, -## ), - - sections = self._orderSections(sections) - # iterate over the sections for this segment - for section in sections: - - # Initialize text for this section - sectionText = '' - - # Get info about this section - sectionName, order, usePrev, useImport = section - #print "section", sectionName, order, usePrev, useImport - title = self._findInDictList( - self._segmentSections(), "name", sectionName, "title") - title = self._extractTitle(info, title) - - # If we should use previous section text, and this is a section - # permitted to use previous text - if usePrev and \ - title.strip() not in self._noPrevTextSegmentSections(): - - # If we will also be importing text for this section - if useImport: - # Frame the previous text to force forecaster to review it - sectionText = self._grabSection(prevHLS, title, True) - else: - - # Just get the previous section text without framing codes - sectionText = self._grabSection(prevHLS, title) - - # If we are not using previous text, or could not find it - if not usePrev or len(sectionText.strip()) == 0: - # Make a blank shell section as a place-holder - exec "sectionText = self." + sectionName + "(title, argDict, segment, section, info)" - - # If we should also import text - if useImport: - importText = self._getImportText(argDict, segment, sectionName, - title) - if importText.strip() != "": - - print "\n\n" + "*"*80 - print "sectionText = '%s'" % (sectionText) - - # Look to see if there are any section headers and framing - # codes with dummy text in the existing section text - sectionMatch = re.search("(?is)^(\.{3}.+\.{3}.+?)\|\*" + - " *(additional free|enter|add)", - sectionText) - - - # If we are not using the previous text, and the text - # contains both a section header and dummy text in framing - # codes - if sectionMatch is not None: - - # Keep the section header then add the imported text - sectionText = sectionMatch.group(1).strip() + "\n" + \ - importText - - # Otherwise, append imported text to the end of section - else: - sectionText = sectionText + "\n\n" + importText - - # Add endStr - endStr = self._findInDictList( - self._segmentSections(), "name", sectionName, "endStr") - #print "\n***********endStr", endStr, sectionName - if endStr is not None: - # Remove first in case an endStr was added from previous or imported text - # Note that we're assuming endStr's are unique within the section!! - sectionText = sectionText.replace(endStr, "") - # Now put the endStr at the end - sectionText = sectionText + endStr - - # Add this section to the segment - sectionText = sectionText + "\n\n" - fcst = fcst + sectionText - - # Word wrap this segment - fcst = self.endline(fcst, linelength=self._lineLength, - breakStr=[" ", "-", "..."]) - return fcst - - # Added 12/24/10 (MHB) - Define a method to specify which overview - # sections are not permitted to use text from a previous HLS. This means - # these sections will always have new text created/imported for it. The - # section titles must match those as defined in the _overviewSections - # method (although any final "\n" can be ignored. - def _noPrevTextOverviewSections(self): - return [".NEW INFORMATION...", ".AREAS AFFECTED...", - ".WATCHES/WARNINGS...", ".STORM INFORMATION...", - ".NEXT UPDATE...", - ".PRECAUTIONARY/PREPAREDNESS ACTIONS...\n" + - "PRECAUTIONARY/PREPAREDNESS ACTIONS..." - ] - - # Added 12/24/10 (MHB) - Define a method to specify which segment - # sections are not permitted to use text from a previous HLS. This means - # these sections will always have new text created/imported for it. The - # section titles must match those as defined in the _segmentSections - # method (although any final "\n" can be ignored. - def _noPrevTextSegmentSections(self): - return ["...New Information...", - "...Probability of tropical storm/hurricane conditions...", - ] - - def _getImportText(self, argDict, segment, sectionName, title): - importText = "" - # Look for importPIL - importPil = self._findInDictList( - self._segmentSections(), "name", sectionName, "importPIL") - if importPil is not None: - importProduct = self.getPreviousProduct(importPil) - importText = self._grabSection(importProduct, title) - # Remove the title - importText = importText.replace(title, "") - else: # Try importMethod - importMethod = self._findInDictList( - self._segmentSections(), "name", sectionName, "importMethod") - if importMethod is not None: - importText = importMethod(argDict, segment) - if len(importText.strip()) > 0: - # Clean up and word-wrap imported text - importText = self._cleanText(importText.strip()) - # Add the imported text to this section - importText = self._frame(importText) + "\n\n" - return importText - - # Modified 4/22/09 (MHB) - fixed logging options of search as it could - # lead to debugging confusion. Only want log info after the loop has - # completed. - def _getPrevText(self, listOfHazards): - #======================================================================= - # Set aside the previous text for this segment - if we can - - prevHLS = '' - - # Look through all hazards for this segment - for hazIndex in range(len(listOfHazards)): - - # See if this hazard has a previous text text key - if listOfHazards[hazIndex].has_key('prevText'): - - # Try to get the previous text from this hazard - prevHLS = listOfHazards[hazIndex]['prevText'] - - # If there is actually something there - if len(prevHLS) > 0: - - # No point in continuing - we found the previous text - break - - # If there is still not something there - if prevHLS == '': -# LogStream.logProblem("No 'prevText' found for this segment") - LogStream.logEvent("No 'prevText' found for this segment") - else: -# LogStream.logProblem("\nprevText=", prevHLS) - LogStream.logEvent("\nprevText=", prevHLS) - - return prevHLS - - ############ Clean up - - def _postProcessProduct(self, fcst, argDict): - fcst = self.endline(fcst, linelength=self._lineLength, - breakStr=[" ", "-", "..."]) - fcst = fcst.replace("\n ","\n") - fcst = fcst.replace("&&", "\n&&\n") - - # Prevent empty Call to Action Tags - fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ - "", fcst) - # - # Clean up multiple line feeds - # - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - # finish progress meter - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ############################################################### - ### Helper methods for getting information about the segments - - - ## From the tropical formatters -- want to use the same configuration - - def moderated_dict(self, parmHisto, timeRange, componentName): - """ - Modifed to lower the high end filter threshold from 20 MPH to - 15 MPH for Tropical. - """ - # COMMENT: This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. The thresholds chosen below gave best results - # during testing with 2004 and 2005 tropical cyclones. This dict - # is used with the moderatedMinMax analysis method specified in the - # TropicalPeriod definitions specified further down for use with - # tropical cyclones with wind parameters. - - # Get Baseline thresholds - dict = SampleAnalysis.SampleAnalysis.moderated_dict( - self, parmHisto, timeRange, componentName) - - # Change thresholds for Wind, WindGust, WaveHeight and Swell - # For entire area, we want the actual maximum - if parmHisto.area().getId().getName() == self._cwaMaorLabel: - dict["Wind"] = (0,0) - else: - dict["Wind"] = (0, 15) - dict["WindGust"] = (0, 15) - dict["WaveHeight"] = (0, 15) - dict["Swell"] = (0, 15) - dict["InundationMax"] = (0,2) - return dict - - # This is a very simple way to round values -- if we need - # something more sophisticated, we'll add it later. - def _increment(self, element): - dict = { - "Wind": 5, - "WindGust": 5, - "InundationMax": 1, - } - return dict.get(element, 0) - - def _ktToMph(self, value, element): - newVal = self.ktToMph(value) - newVal = self.round(newVal, "Nearest", self._increment(element)) - return newVal - - class SegInfo: - def __init__(self): - pass - - def _getSegmentInfo(self, segments): - # Used to handle all the information required for - # both overview and segment sections - # Determines - # kinds of areas included (land, marine, coastal, inland) - # hazards included - # and for segments, the wind and probability data from the grids - # - # All of this can then be passed in the "info" object - # to the section methods for reporting - # - if type(segments) is not types.ListType: - segments = [segments] - allAreas = [] - for segment in segments: - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - allAreas = allAreas + segmentAreas - - info = self.SegInfo() - - # Determine the types of areas included in segments - # anyXX means that there is at least one area in the segment that is XX - # allXX means that all the XX areas are in the segment - info.anyInland, info.allInland, info.inlandAreas = self._checkAreas( - allAreas, self._inlandAreas()) - info.anyCoastal, info.allCoastal, info.coastalAreas = self._checkAreas( - allAreas, self._coastalAreas()) - info.anyMarine, info.allMarine, info.marineAreas = self._checkAreas( - allAreas, self._marineAreas()) - info.anyLand = info.anyInland or info.anyCoastal - info.allLand = info.allInland and info.allCoastal - info.landAreas = info.inlandAreas + info.coastalAreas - info.allAreas = info.inlandAreas + info.coastalAreas + info.marineAreas - info.cwa= self._generalAreas(info.landAreas + info.marineAreas) - info.cwaShort = info.cwa.rstrip(".\n") - info.all_cwa_maor_areas = self._inlandAreas() + self._coastalAreas() + self._marineAreas() - self._determineHazards(info, segments) - return info - - def _determineHazards(self, info, segments): - # Return a list of hazards from the given segments in the form: - # (key, landList, marineList, coastalList, inlandList) - # where key is (hdln, act, phen, sig) and the lists show which areas - # contain the hazard separated by category - info.hazardHdlns = [] - hazAreaList = [] - for segment in segments: - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - hazardTable = self._argDict["hazards"] - hazards = hazardTable.getHazardList(segmentAreas) - for hazard in hazards: - action = hazard['act'] - hazAreaList.append((hazard, segmentAreas)) - # Consolidate hazards (there could be multiple segments with the same phen/sig/act) - hazardDict = {} - hazardList = [] - for hazard, segmentAreas in hazAreaList: - key = (hazard['hdln'], hazard['act'], hazard['phen'], hazard['sig']) - if key not in hazardDict.keys(): - hazardDict[key] = segmentAreas - hazardList.append(key) - else: - hazardDict[key] = hazardDict[key]+segmentAreas - # Now we have areas that have the same headline and must split them into land/marine - for key in hazardList: - landAreas = [] - marineAreas = [] - coastalAreas = [] - inlandAreas = [] - hdln, act, phen, sig = key - hazAreas = hazardDict[key] - for area in hazAreas: - if area in info.landAreas: landAreas.append(area) - if area in info.marineAreas: marineAreas.append(area) - if area in info.coastalAreas: coastalAreas.append(area) - if area in info.inlandAreas: inlandAreas.append(area) - info.hazardHdlns.append(( - key, landAreas, marineAreas, coastalAreas, inlandAreas)) - #print "\nhazardList", info.hazardHdlns - - def _getWindStats(self, argDict, sampler, analysisList, timeRangeList, editArea): - statList = self.getStatList(sampler, analysisList, timeRangeList, editArea) - maxWind = 0 - print "\nGetting CWA MAOR wind stats" - for i in range(len(statList)): - tr, label = timeRangeList[i] - statDict = statList[i] - wind = self._getStatValue(statDict, "Wind", "Max", self.VECTOR()) - print " wind value", wind, tr - if wind > maxWind: - maxWind = wind - print " returning maxWind", maxWind - return maxWind - - def _setStats(self, info, argDict, segmentAreas, editAreaDict, - sampler, analysisList, timeRangeList): - # Get statistics for this segment and attach them to the info object - editArea = editAreaDict[tuple(segmentAreas)] - statList = self.getStatList(sampler, analysisList, timeRangeList, editArea) - #print "statList", statList - - # Determine the MaxWind and MaxWindGust values and duration of maxWind - info.maxWind = None - info.minWind = None - info.avgWind = None - info.maxGust = None - - # Determine first grid that has Wind >= 34 - info.wind34Time = None - - # Determine min and max probabilities for 34, 64 - info.minProb34 = None - info.maxProb34 = None - info.minProb64 = None - info.maxProb64 = None - - # Time of maximum pws34int and pws64int - info.maxINTprob34 = None - info.maxINTprob64 = None - - pws34Max = None - pws64Max = None - info.pwstrend = None - - # These need to be initialized to None so we'll know if NO grids are present - info.inundationMax = None - - # Pass 1: Determine maximum values - for i in range(len(statList)): - tr, label = timeRangeList[i] - statDict = statList[i] - #print "\ntr", tr - - stats = self.getStats(statDict, "prob34") - if stats is not None: - min34, max34 = stats - if info.maxProb34 is None: info.maxProb34 = max34 - if info.minProb34 is None: info.minProb34 = min34 - if min34 < info.minProb34: - info.minProb34 = min34 - if max34 > info.maxProb34: - info.maxProb34 = max34 - - stats = self.getStats(statDict, "prob64") - if stats is not None: - min64, max64 = stats - if info.minProb64 is None: info.minProb64 = min64 - if info.maxProb64 is None: info.maxProb64 = max64 - if min64 < info.minProb64: - info.minProb64 = min64 - if max64 > info.maxProb64: - info.maxProb64 = max64 - - pws34int = self._getStatValue(statDict, "pws34int", "Max") - if pws34int is not None: - if pws34int > pws34Max: - info.maxINTprob34 = tr - pws34Max = pws34int - pws64int = self._getStatValue(statDict, "pws64int", "Max") - if pws64int is not None: - if pws64int > pws64Max: - info.maxINTprob64 = tr - pws64Max = pws64int - - # Get wind and gust values -- - wind = self._getStatValue(statDict, "Wind", "MinMax", self.VECTOR()) - if wind is not None: - minWind, maxWind = wind - #print "minWind, maxWind", minWind, maxWind - if info.maxWind is None: - info.minWind = minWind - info.maxWind = maxWind - else: # Check for maxWind increasing or decreasing - if maxWind > info.maxWind: - info.maxWind = maxWind - if minWind < info.minWind: - info.minWind = minWind - if info.wind34Time is None and info.maxWind >= 34: - info.wind34Time = tr - windGust = self._getStatValue(statDict, "WindGust", "Max") - if windGust is not None: - if info.maxGust is None: - info.maxGust = windGust - if windGust > info.maxGust: - info.maxGust = windGust - - info.inundationMax = self._pickupMaxStats( - statDict, info.inundationMax, "InundationMax") - - # Round to increment - if info.maxWind is not None and info.minWind is not None: - info.avgWind = (info.maxWind + info.minWind)/2.0 - info.maxWind = int(self.round(info.maxWind, "Nearest", self._increment("Wind"))) - info.minWind = int(self.round(info.minWind, "Nearest", self._increment("Wind"))) - info.avgWind = self.round(info.avgWind, "Nearest", self._increment("Wind")) - if info.maxGust is not None: - info.maxGust = int(self.round(info.maxGust, "Nearest", self._increment("WindGust"))) - if info.inundationMax is not None: - info.inundationMax = int(self.round( - info.inundationMax,"Nearest", self._increment("InundationMax"))) - if info.inundationMax > 10: - info.deltaSurge = info.inundationMax - 4 - elif info.inundationMax > 6: - info.deltaSurge = info.inundationMax - 3 - elif info.inundationMax > 2: - info.deltaSurge = info.inundationMax - 2 - - print "\n\nStats for segment", segmentAreas - print " maxWind, maxWindGust", info.maxWind, info.maxGust - print " minWind, avgWind", info.minWind, info.avgWind - print - print " 34 Info" - print " min, max prob34", info.minProb34, info.maxProb34 - print " maxINTprob34", info.maxINTprob34 - print " 64 Info" - print " min, max, prob64", info.minProb64, info.maxProb64 - print " maxINTprob64", info.maxINTprob64 - print " InundationMax", info.inundationMax - - # Make additional passes to determine durations - # These are values for which we need to calculate durations - # In addition, we'll calculate for the maxWind value - windDurValues = [34, 50, 64] - info.windDur = {} - print "Durations" - for durVal in windDurValues + [info.maxWind]: - info.windDur[durVal] = self._determineDuration(durVal, statList, timeRangeList) - print " ", durVal, info.windDur[durVal] - print - - def _pickupMaxStats(self, statDict, curValue, element): - # Given an element and curValue, pick up the stats from the statDict - # If stats are greater than the curValue, replace and return curValue - # Assumes that curValue is initialized to None - stats = self.getStats(statDict, element) - if stats is not None: - if curValue is None: - curValue = 0 - if stats > curValue: - curValue = stats - return curValue - - def _determineDuration(self, durValue, statList, timeRangeList): - # Determine maxWind, wind34, and wind64 durations, and end time of 34, 64 - # This will be the first time range that a value goes from above 34, 64 to below - #print "\n Determine Duration for", durValue - hit = False - beg = None - end = None - for i in range(len(statList)): - tr, label = timeRangeList[i] - statDict = statList[i] - # Get wind stats - wind = self._getStatValue(statDict, "Wind", "MinMax", self.VECTOR()) - if wind is not None: - minWind, maxWind = wind - #print " minWind, maxWind", minWind, maxWind - maxWind = int(self.round(maxWind, "Nearest", self._increment("Wind"))) - #print " new maxWind", maxWind - if end is None: - if hit and maxWind < durValue: - end = tr - elif not hit and maxWind >= durValue: - hit = True - beg = tr - #print "beg, end", beg, end, "\n" - if beg is not None: - if end is None: - end = tr - newTR = self.makeTimeRange(beg.startTime(), end.startTime()) - return newTR - return None - - def _getStatValue(self, statDict, element,method=None, dataType=None): - stats = statDict.get(element, None) - if stats is None: return None - if type(stats) is types.ListType: - stats = stats[0] - stats, tr = stats - if dataType==self.VECTOR(): - stats, dir = stats - return self.getValue(stats, method) - - def _determineDescriptor(self, info, areas): - # Return the descriptor for the type of areas given plus - # the comparison list to determine if the areas cover all or portions - # of the descriptor - # If all marine, return maor_descriptor and marineAreas - # If all land, return cwa_descriptor and landAreas - # else return cwa_maor_descriptor and all land and marine areas - any, all, areas = self._checkAreas(areas, self._marineAreas()) - if all: - return self._maor_descriptor(), self._marineAreas() - landAreas = self._inlandAreas()+self._coastalAreas() - any, all, areas = self._checkAreas(areas, landAreas) - if all: - return self._cwa_descriptor(), landAreas - return self._cwa_maor_descriptor(), info.all_cwa_maor_areas - - def _checkAreas(self, segmentAreas, checkAreas): - # all is True if ALL checkAreas are in the segmentAreas - # e.g. all land areas are in the segment areas - all = True - any = False - areas = [] - for area in segmentAreas: - if area in checkAreas: - any = True - areas.append(area) - else: - all = False - return any, all, areas - - def _areaType(self, areas): - inland = False - coastal = False - marine = False - for area in areas: - if area in self._inlandAreas(): - inland = True - break - for area in areas: - if area in self._coastalAreas(): - coastal=True - break - for area in areas: - if area in self._marineAreas(): - marine=True - break - return inland, coastal, marine - - def _checkHazard(self, hazardHdlns, phenSigList, checkAreaTypes=None, - checkAreas=None, returnList=False, mode="any", includeCAN=False): - # Given a list of hazards in the form - # (key, landList, marineList, coastalList, inlandList) - # where key is (hdln, act, phen, sig) and the lists show which areas - # contain the hazard - # If mode == "any": - # Check to see if any of the given phenSigList = [(phen, sig), (phen, sig)] - # are found - # If mode == "all": - # Check to see if all of the given phenSigList are found - # IF checkAreaTypes is given, then check against that particular area type(s) i.e. - # "land", "marine", etc. - # IF checkAreas is given, only return areas that are in that list - # IF returnList=True, returns a list of (key, areas) that meet the criteria - # IF includeCAN is True then CAN hazards will be included as well. - # Otherwise, they are ignored. - # - # E.g. hdlnList = self._checkHazard(hazardHdlns, [("FA","W")], returnList=True) -# print "phenSigList is ", phenSigList - chosen = [] - for key, landList, marineList, coastalList, inlandList in hazardHdlns: -# print "what is mode?", mode - hazAreas = landList+marineList - hazValue = (key, hazAreas) -# print "hazValue is ", hazValue - hdln, act, phen, sig = key - if not includeCAN and act == "CAN": - continue - for checkPhen, checkSig in phenSigList: -# print "checkPhen is ", checkPhen -# print "checkSig is ", checkSig - if phen == checkPhen and sig == checkSig: - if checkAreaTypes is not None: - # Check for land, marine, etc. - for checkAreaType in checkAreaTypes: - exec "testList = " + checkAreaType + "List" -# print "testList is", testList - if testList != []: - chosen.append(hazValue) -# print "chosen is ", chosen - elif checkAreas is not None: - acceptedAreas=[] - for hazArea in hazAreas: - if hazArea in checkAreas: - acceptedAreas.append(hazArea) - if acceptedAreas!=[]: - chosen.append((key, acceptedAreas)) - else: - chosen.append(hazValue) - if not returnList and chosen!=[]: break - if not returnList: - return chosen!=[] - return chosen - - #### Handling of HU.S headlines per segment - def _setUp_HU_S_Headline(self, extraInfo, prevHLS): - # Stuff the headline for HU.S in extraInfo - usePrev = extraInfo.get("usePrev_HU_S_Headline") - if usePrev: - headline = self._grabHeadline(prevHLS) - else: - headline = extraInfo.get("userHeadline_HU_S") - if headline == "": - headline = self._frame("Enter headline here") - extraInfo["headline_HU_S"] = headline - - # OVERRIDE from DiscretePhrases -- Must use argDict hazards set up - # by the HLS rather than the one generated by the Text Formatter - # infrastructure which uses the combinations file differently than - # we want for the HLS. See "Handling HLS segmentation" note - # above. - - # Returns a formatted string announcing the hazards that are valid with - # timing phrases - def getHazardString(self, tree, node, fcstArea): - if len(fcstArea) <= 0: - return "" - #hazardTable = self._hazards.getHazardList(fcstArea) - argDict = tree.get("argDict") - hazardList = argDict["hazards"].getHazardList(fcstArea) - returnStr = "" - issuanceTime = self._issueTime.unixTime() - returnStr = self.makeHeadlinePhrases(tree, node, hazardList, - issuanceTime) - #Test mode? - returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), - returnStr) - return returnStr - - # OVERRIDE from DiscretePhrases - # USES the HU_S headline stuffed into argDict["extraInfo"] - # Makes multiple headlines based on the hazards list and returns - # the lot. - def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, - testMode=0): - returnStr = "" - # make a deepcopy since we plan to mess with it. - hList = copy.deepcopy(hazardList) - - # sort headlines in appropriate order - if len(hList): - if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: - hList.sort(self.marineSortHazardAlg) - else: - hList.sort(self.regularSortHazardAlg) - numHdlns = len(hList) - - while len(hList) > 0: - hazard = hList[0] - - # Check for HU.S headline - # Only report cancelled HU.S if it is a singleton hazard - hazStr = None - if hazard['hdln'] == "" and hazard['phen']=="HU" and hazard['sig']=="S": - if hazard['act'] != "CAN" or numHdlns == 1: - argDict= tree.get("argDict") - extraInfo = argDict.get("extraInfo", None) - if extraInfo is not None: - hdln = extraInfo.get("headline_HU_S", None) - if hdln is not None: - hazard['hdln'] = hdln - hazStr = hazard['hdln'] - # Strip ellipses since they will be added later - hazStr = hazStr.rstrip("...").lstrip("...") - - # Can't make phrases with hazards with no 'hdln' entry - if hazard['hdln'] == "": - hList.remove(hazard) - continue - - phenSig = hazard['phen'] + "." + hazard['sig'] - actionCodeList = self.getAllowedActionCodes(phenSig) - - # if the action is not in the actionCodeList, skip it - if hazard['sig'] != "": # it's not locally defined - if not hazard['act'] in actionCodeList: - print "...Ignoring action code:", hazard['act'], \ - hazard['hdln'] - hList.remove(hazard) - continue - - # get the headline phrase - if hazStr is None: - hazStr = self.makeStandardPhrase(hazard, issuanceTime) - if len(hazStr): - # Call user hook - localStr = self.addSpace(self.hazard_hook( - tree, node, hazard['phen'], hazard['sig'], hazard['act'], - hazard['startTime'], hazard['endTime']), "leading") - returnStr = returnStr + "..." + hazStr + localStr + "...\n" - - # always remove the main hazard from the list - hList.remove(hazard) - - return returnStr - #### END Handling of HU.S headlines per segment - - def _frame(self, text): - return "|* " + text + " *| " - - def _orderSections(self, sections): - # We are assuming that IF someone orders one section, then - # ALL must be ordered, order numbers are correct, and proper integers 1-(xxx) - # AND the ordered sections follow the sections that cannot be ordered - # Otherwise, we revert to default ordering. - - # If order is None, it means that the section should be in the order it appears in the list - # If order is a number, then order it - - #print "\nOrdering sections" - # Gather the sections that need to be ordered - ordered = [] - unordered = [] - orderError = False - for section in sections: - sectionName, order, usePrev, useImport = section - #print sectionName - if order is not None: - try: - index = int(order)-1 - except: - orderError=True - # We will punt ordering - break - ordered.append((section, index)) - else: - unordered.append(section) - - #print "ordered", ordered - #print "unordered", unordered - - # Order the sections correctly - #print "orderError", orderError - if not orderError: - orderedSections = [] - for i in range(len(ordered)): orderedSections.append(None) - error = False - for section, index in ordered: - try: - if orderedSections[index] != None: - error = True - except: error = True - if error: break - orderedSections[index] = section - # Add them back to the end of the unordered sections - if error: newSections = sections - else: newSections = unordered + orderedSections - else: - newSections = sections - - #print "\nreordered", newSections - - # Now add in required sections that did not show up in the GUI - # Assume that the required sections will appear before the optional ones. - requiredSections = [] - for sectionEntry in self._segmentSections(): - if sectionEntry["inSegments"] == "always": - sectionName = sectionEntry.get('name') - requiredSections.append((sectionName, None, None, None)) - finalSections = requiredSections + newSections - #print "\nfinalSections", finalSections, "\n" - return finalSections - -# def headlineRegExpr(self): -# # modify this to change how "previous HLS" catches the 2nd headline -# # the first headline will be defined by the required headline -# headlineRegEx = r'^((?:.\.\.[^\n]+?\.\.\.)|(?:\$\$))$' -# return headlineRegEx - - def _findInDictList(self, dictList, identifier, value, field): - for dictionary in dictList: - if dictionary[identifier] == value: - return dictionary.get(field, None) - return None - - def _accessDict(self, dictionary, keys): - value = dictionary - for key in keys: value = value[key] - return value - - def _addEllipses(self, string): - # Add beginning and ending ellipses to non-null string - # (if not there already) - if string != "": - string = string.rstrip("...") - string = string.lstrip("...") - string = "..." + string + "..." - return string - - def _analysisList_HLS(self): - # 120 hours = time period of prob34, 64 grids - # prob34 and prob64 are 120 hour grids, so just sample one value (maximum) for the - # whole time period - # MaxINTProb34 = time period when the 6 hourly pws34int is maximum - # MaxINTProb64 = time period when the 6 hourly pws64int is maximum - # XXX : if <=32: 5, 32-42: 10, 50: 20? (same as Tropical formatters) - # - - # Sample over 120 hours beginning at current time (OR time of prob34/prob64) - return [ - ("Wind", self.vectorModeratedMinMax, [6]), - ("WindGust", self.moderatedMinMax, [6]), - ("prob34", self.minMax), # 120 hour value - ("prob64", self.minMax), # 120 hour value - ("pws34int", self.maximum, [6]), - ("pws64int", self.maximum, [6]), - ("InundationMax", self.moderatedMax), - ] - - ##################################################################################### - ##################################################################################### - ### Previous Product Helper methods - - def _grabStormInfo(self, tcp): - # Get the storm information from the selected TCP - # return a dictionary - # Initialize a dictionary to hold the information we want - dict = {"StormType" : "|* fill in storm type here *|", - "StormName" : "|* fill in storm name here *|", - "StormTime" : "|* Enter storm time *| ", - "StormLat": "", - "StormLon": "", - "StormReference": "", - "StormIntensity": "", - "StormMotion": "", - "StormInfo": "", - "StormCenter": "", - } - #======================================================================= - # If we got the latest public advisory - - if tcp is not None and len(tcp) > 0: - - #=================================================================== - # Try to determine the storm type and name automatically - - # Updated version to handle WFO GUM advisories. This pattern will - # handle multiple word names (including certain special characters) - # This is for the NHC format. - mndSearch = re.search("(?im)^.*?(HURRICANE|(SUB|POST.?)?TROPICAL " + - "(STORM|DEPRESSION)|(SUPER )?TYPHOON|" + - "REMNANTS OF) ([A-Z0-9\-\(\) ]+?)" + - "(SPECIAL |INTERMEDIATE )?ADVISORY", tcp) - - # Display some debug info - if flag is set - self.debug_print("mndSearch = '%s'" % (mndSearch)) - - # If we found the storm type and name in the MND header - if mndSearch is not None: - - # Pick off the storm type and name - dict["StormType"] = mndSearch.group(1).strip() - dict["StormName"] = mndSearch.group(5).strip() - - #################################################################### - #################################################################### - # 12/15/2010 (MHB) - we should not need this anymore, but will - # leave it for the 2011 season as a fail-safe. - - # Look for the HPC format instead - else: - - mndSearch = re.search("(?im)^PUBLIC ADVISORY.+?FOR REMNANTS " + - "OF ([A-Z0-9\-\(\) ]+)", tcp) - - # If we found the storm type and name in the MND header - if mndSearch is not None: - - # Pick off the storm type and name - dict["StormType"] = "Remnants of" - dict["StormName"] = mndSearch.group(1).strip() - - # end possible removal - 12/15/2010 (MHB) - #################################################################### - #################################################################### - - #=================================================================== - # Clean up the product for easier parsing - - tcp = self._cleanText(tcp) - - #=================================================================== - # Now try to grab the latest storm information - - # Look for the new NHC format first - summarySearch = re.search("(?is)SUMMARY OF (.+?)\.{3}.+?" + - "LOCATION\.{3}(.+?[NS]) +(.+?[WE]).+?" + - "(ABOUT .+?)MAXIMUM SUSTAINED WIND.+?" + - "(\d+ MPH).+?PRESENT MOVEMENT\.{3}" + - "(.+?)\.{3}", tcp) - - #-------------------------------------------------------------------- - # If we found the NHC summary section - - if summarySearch is not None: - - # Set aside some information we'll need later on - dict["StormTime"] = summarySearch.group(1).strip() - dict["StormLat"] = summarySearch.group(2).strip() - dict["StormLon"] = summarySearch.group(3).strip() - dict["StormReference"] = summarySearch.group(4).strip() - dict["StormIntensity"] = summarySearch.group(5).strip() - dict["StormMotion"] = summarySearch.group(6).strip() - - #================================================================ - # Use the remaining summary groups to contruct a paragraph - # similar to the "old" TCP format, and save that for later use - - # Start the paragraph with the advisory time - dict["StormCenter"] = "AT %s...THE CENTER OF " % \ - (dict["StormTime"]) - - # Now add some phrasing to maintain proper grammar, if needed - if dict["StormType"] == "Remnants of": - dict["StormCenter"] = "%s THE" % (dict["StormCenter"]) - - # Now add the storm type and storm name - dict["StormCenter"] = "%s %s %s " % (dict["StormCenter"], - dict["StormType"], - dict["StormName"]) - - # Now add the storm position - dict["StormCenter"] = \ - "%s WAS LOCATED AT LATITUDE %s...LONGITUDE %s." % \ - (dict["StormCenter"], dict["StormLat"], dict["StormLon"]) - - #---------------------------------------------------------------- - # Now add the primary NHC geographic reference - - # Get all the NHC references - starting with the word 'about' - # after the first one - referenceIndex = dict["StormReference"][4:].find('about') - - # Assume we only have one NHC reference point by default - nhcReference = dict["StormReference"] - -## print "referenceIndex = ", referenceIndex - - # If we have more than one NHC reference point - if referenceIndex != -1: - - # Adjust this index to account for the first 'about' - referenceIndex += 4 - - # Only keep the first NHC reference location - nhcReference = dict["StormReference"][:referenceIndex] - - # Convert any abbreviated bearings to full words - nhcReference = nhcReference.replace(' N ', ' north ') - nhcReference = nhcReference.replace(' NNE ', ' north-northeast ') - nhcReference = nhcReference.replace(' NE ', ' northeast ') - nhcReference = nhcReference.replace(' ENE ', ' east-northeast ') - nhcReference = nhcReference.replace(' E ', ' east ') - nhcReference = nhcReference.replace(' ESE ', ' east-southeast ') - nhcReference = nhcReference.replace(' SE ', ' southeast ') - nhcReference = nhcReference.replace(' SSE ', ' south-southeast ') - nhcReference = nhcReference.replace(' S ', ' south ') - nhcReference = nhcReference.replace(' SSW ', ' south-southwest ') - nhcReference = nhcReference.replace(' SW ', ' southwest ') - nhcReference = nhcReference.replace(' WSW ', ' west-southwest ') - nhcReference = nhcReference.replace(' W ', ' west ') - nhcReference = nhcReference.replace(' WNW ', ' west-northwest ') - nhcReference = nhcReference.replace(' NW ', ' northwest ') - nhcReference = nhcReference.replace(' NNW ', ' north-northwest ') - - # Add only first one to the summary paragraph for brevity - dict["StormCenter"] = "%s this was %s. " % \ - (dict["StormCenter"], - self._removeKM(nhcReference.strip())) - - #---------------------------------------------------------------- - # Add the maximum sustained wind speed phrase - - dict["StormCenter"] = "%s maximum sustained winds were %s." % \ - (dict["StormCenter"], - self._removeKM(dict["StormIntensity"])) - - #---------------------------------------------------------------- - # Now add the storm motion - - dict["StormCenter"] = "%s the storm motion was %s." % \ - (dict["StormCenter"], - self._removeKM(dict["StormMotion"])) - - #################################################################### - #################################################################### - # 12/15/2010 (MHB) - we should not need this anymore, but will - # leave it for the 2011 season as a fail-safe. - #-------------------------------------------------------------------- - # Search the product for the legacy storm info section - in case - # the new NHC style was not found - - stormInfoSearch = \ - re.search('(?is)(AT +(\d+ +[AP]M [AECMPH][DS]T)' + - '\.{3}\d+ *(Z|UTC)\.{3}THE (CENTER|REMNANTS|EYE) .+)', - tcp) - - # Display some debug info - if flag is set - self.debug_print("storminfoSearch = '%s'" % (stormInfoSearch)) -## print stormInfoSearch.groups() - - # If we found the storm info section of the product - if stormInfoSearch is not None: -# for group in stormInfoSearch.groups(): -# print '\t' + '-'*50 -# print "%s\n" % (group) - - # Clean this section up a bit. Keep each paragraph separate - # by a single , but remove all others as well as extra - # spaces. Then store this text in the TCP dictionary - dict["StormInfo"] = stormInfoSearch.group(1).strip() - - # Set aside the first paragraph of the storm info since it - # contains the TPC-provided reference point - if we haven't - # already found this information - if len(dict["StormCenter"].strip()) == 0: - dict["StormCenter"] = dict["StormInfo"].split('\n')[0] - - # If we have not already found the advisory time - get it from - # the legacy format - if dict["StormTime"] == "|* Enter storm time *| ": - dict["StormTime"] = stormInfoSearch.group(2).strip() - - # Set aside the first paragraph of the storm info since it - # contains the TPC-provided reference point - if we haven't - # already found this information - if len(dict["StormCenter"].strip()) == 0: - dict["StormCenter"] = dict["StormInfo"].split('\n')[0] - - #=================================================================== - # Now try to grab the repeated storm information summary - - repeatInfo = re.search("(?is)(\.{3}SUMMARY.+?\.)\n *\n", - tcp) - # If we cannot find the summary, try to find a "repeating" section - if repeatInfo is None: - repeatInfo = re.search("(?is)(REPEATING.+?\.)\n *\n", tcp) -## print repeatInfo - - # If we found the repeated storm information summary - if repeatInfo is not None: - - # Clean up this paragraph - summary = repeatInfo.group(1).strip() - - #=============================================================== - # Now try to grab the latest storm location - if we need it - - if dict["StormLat"] == "" or dict["StormLon"] == "": - - # Search the product for the storm location section - locationSearch = \ - re.search('(?is).+LOCATION.*?(\d+\.\d+ *N).+?' + - '(\d+\.\d+ *[EW])', summary) - - # Display some debug info - if flag is set - self.debug_print("locationSearch = '%s'" % (locationSearch)) -## print locationSearch.groups() - - # If we found the storm location section of the product - if locationSearch is not None: - - # Pick off the storm latitude and longitude - dict["StormLat"] = locationSearch.group(1).strip() - dict["StormLon"] = locationSearch.group(2).strip() - - #=============================================================== - # Now try to grab the latest storm intensity - if we need it - - if dict["StormIntensity"] == "": - - # Search the product for the storm intensity section - intensitySearch = \ - re.search('(?i).+MAXIMUM SUST.+?(\d+ *MPH)', summary) - - # Display some debug info - if flag is set - self.debug_print("intensitySearch = '%s'" % - (intensitySearch)) - - # If we found the storm intensity section of the product - if intensitySearch is not None: - - # Pick off the storm intensity - dict["StormIntensity"] = intensitySearch.group(1).strip() - - #=============================================================== - # Now try to grab the latest storm motion - if we need it - - if dict["StormMotion"] == "": - - # Search the product for the storm motion section - motionSearch = re.search('(?i).+MOVEMENT\.{3}(.+?\d+ MPH)', - summary) - if motionSearch is None: - motionSearch = re.search('(?i).+MOVEMENT(.+?\d+.+?)\.', - summary) - - # Display some debug info - if flag is set - self.debug_print("motionSearch = '%s'" % (motionSearch)) - - # If we found the storm motion section of the product - if motionSearch is not None: - - # Pick off the storm motion - motion = motionSearch.group(1).strip() - - # Fix the motion (i.e no '...') - dict["StormMotion"] = re.sub('(?i)\.{3}', ' the ', - motion) - - # end possible removal - 12/15/2010 (MHB) - #################################################################### - #################################################################### - - #======================================================================== - # Display final decoded information from TCP - -## print "\n\n" + "*" *80 -## print "Final TCP Info...\n" -## print 'dict["StormType"] = ', dict["StormType"] -## print 'dict["StormName"] = ', dict["StormName"] -## print 'dict["StormTime"] = ', dict["StormTime"] -## print 'dict["StormLat"] = ', dict["StormLat"] -## print 'dict["StormLon"] = ', dict["StormLon"] -## print 'dict["StormReference"] = ', dict["StormReference"] -## print 'dict["StormIntensity"] = ', dict["StormIntensity"] -## print 'dict["StormMotion"] = ', dict["StormMotion"] -## print 'dict["StormInfo"] = ', dict["StormInfo"] -## print 'dict["StormCenter"] = ', dict["StormCenter"] - - # Return the dictionary will all the information we found in the TCP - return dict - - def _cleanText(self, text=''): - # Cleans up text for easier string searches, but retains paragraphs - - # Replace all single characters with a space - text = re.sub("\n(?! *\n)", " ", text) - - # Ensure all text is only single-spaced - text = re.sub(" +", " ", text) - - # Remove all spaces at the start of a new paragraph - text = re.sub("(?m)^ +", "", text) - - # Do not allow any spaces after an ellipsis - text = re.sub("\.{3} +", "...", text) - - # Finally, ensure the paragraphs are put back - text = re.sub("\n", "\n\n", text) - - # Return the cleaned-up text - return text - - def _grabHeadline(self, text=''): - # Get first headline found in text and return it as a string - - # Fixed pattern to grab headline (MHB 04/08/2009) - # See if there is a headline in this text - headlineSearch = re.findall("(?ism)^(\.{3}.+?\.{3}) *\n", text) - - self.debug_print("headlineSearch = %s" % (headlineSearch)) - - # If we found a headline - if len(headlineSearch) > 0: - - # Return the first cleaned-up headline string we found - return self._cleanText(headlineSearch[0]) - - # Otherwise, return an indicator there is no headline in this text - else: - return '' # Changed to an null string instead of None - # (MHB 04/08/2009) - - # Modified 4/22/09 (MHB) - fixed pattern to grab entire synopsis. - # Previous version only seemed to grab the last section of the overview. - def _grabOverview(self, text=''): - # Grab the overview section of a previous HLS from the overall - # overall headline to the start of the first zone segment - - # See if there is an overwiew in this text - overviewSearch = re.search("(?is)(\.+.+)?[A-Z]{2}Z\d{3}", text) - - # If we found a headline - if overviewSearch is not None: - - # Remove any zone blocks we may have grabbed by accident - overview = re.sub("(?is)[A-Z]{2}Z\d{3}.+", "", - overviewSearch.group(1).strip()) - - # Return the cleaned-up overview string - return overview - - # Otherwise, return an indicator there is no overview in this text - else: - return '' - - # Modified 12/15/2010 (MHB) - added a new flag which will cause the - # grabbed section text to be wrapped in framing codes if set to True. - def _grabSection(self, text='', section='', useFrameCodes=False): - # Grab the specified subsection of text from the overall text - - #print "\n\nGRABBING SECTION", section - #print "'%s'" % (text) - - # If a subsection header was defined - if section != '': - - # Add ending text so that last section can be found. - text = text + "\n..." - - # See if we can find it - sectionSearch = re.search("(?ism).*^%s(.+?)^\." % (section), text) - - # If we found the specified subsection - if sectionSearch is not None: - -# print sectionSearch.groups() -# print "'%s'" % (self._cleanText(sectionSearch.group(1).strip())) - # Clean it up - sectionText = self._cleanText(sectionSearch.group(1).strip()) - - # If we should wrap framing codes around this text - if useFrameCodes: - sectionText = self._frame(sectionText) # do it - - # Return the cleaned-up subsection - return section + sectionText - - # If we made it this far, return a null string - return '' - - def _getProductInfo(self, argDict): - # The current argDict['hazards'] is set automatically by TextFormatter.py - # to use the zone combinations. We need to re-do it to have all the - # hazards from the entire area - #print "\n***************getProductInfo calling getHazardsTable" - allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() - argDict["combinations"]= [(allAreas,"Region1")] - argDict['definition'] = self._definition - hazards = self._getHazardsTable(argDict, self.filterMethod) - argDict['hazards'] = hazards - - # Set up the areaDictionary for all to use - accessor = ModuleAccessor.ModuleAccessor() - self._areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - # Get the statistics and general information for the segments - # to be used for the Overview sections - info = self._getSegmentInfo(self._segments) - info.maxWind_CWA_MAOR = self._getWindStats( - argDict, self._sampler, self._analysisList_HLS(), - self._timeRangeList, self._cwaMaorLabel) - return info - - # Modified 12/15/2010 (MHB) - fixed a potential problem with the - # _stormTypeName variable. If used as a failsafe it would have come out - # as "Cyclone Tropical" instead of "Tropical Cyclone". Also disabled - # the "Unnamed" option. - def _getStormInfo(self, argDict, info): - # Get the Storm information - st = self._StormInfo - self._stormType = "Tropical" - self._stormName = "Cyclone" - self._stormTypeName = self._stormType + " " +self._stormName - - # Get the name -# if st.find("N/A") >=0: -# self._stormTypeName = self._StormInfo_entry -# if len(self._stormTypeName.strip()) == 0: -# self._stormTypeName = self._frame("Enter Storm Name") -# return - - # Get the product - if st == "Enter PIL below (e.g. TCPEP1):": - productID = self._StormInfo_entry - else: productID = self._StormInfo - if self._useTestTCP(): - self._TCP = self._TCP_Product() - else: - self._TCP = self.getPreviousProduct(productID) - stormDict = self._grabStormInfo(self._TCP) - self._stormName = stormDict.get("StormName", "") - self._stormType = stormDict.get("StormType", "") - self._stormTypeName = self._stormType + " " + self._stormName - self._decodeStormInfo(stormDict, info) - # Storm movement in mph and the stated movement trend - self._stormMovementTrend = "Storm Motion was " + stormDict.get("StormMotion","") - # Storm intensity in mph and the stated intensity trend. - self._stormIntensityTrend = "Storm Intensity was " + stormDict.get("StormIntensity","") - - ## New version from MHB 1/13/10 - def _decodeStormInfo(self, stormDict, info): - self._stormTime = "|* Enter Storm Time *| " - self._stormLat = "|* Enter Storm Lat *| " - self._stormLon = "|* Enter Storm Lon *| " - self._stormLocation = "|* Enter Storm Location *| " - self._stormReference = "" - self._stormLocalReferences = "" - para = stormDict.get("StormCenter", "") - # print "\npara", len(para), para - if len(para)<= 0: - return - - # Create the time string - self._stormTime = self._formatLocalTime(para, info.allAreas) - - # Find stormLat, stormLon and stormLocation - # e.g. LATITUDE 15.7 NORTH...LONGITUDE 80.0 WEST - stormLocation ="" - stormLat = None - stormLon = None - - # Make a pattern to find the latest storm location - coordPtn = re.compile("(?i)(LATITUDE ([\d\.]+) ?((N|S)(O[RU]TH)?))..." + - "(AND )?(LONGITUDE ([\d\.]+) ?((W|E)([AE]ST)?)).+?") -## + "OR ((ABOUT )?.+)") - - # Make a pattern to find the NHC reference location - refPtn = re.compile("(?i)(WAS|OR) ((ABOUT )?\d+ MILES.+?" + - "(NORTH|SOUTH|EAST|WEST).+?)\.") - - # Try to find these patterns in the text - coordPtnMatch = coordPtn.search(para) -## print "+" * 90 -## print "\ncoordinate search..." -## print coordPtnMatch.groups() - - refPtnMatch = refPtn.search(para) -## print "\nreference search..." -## print refPtnMatch.groups() - - # If we found the coordinates we were after - if coordPtnMatch is not None: - - # If we have the correct paragraph, set aside the latitude and - # longitude info as numbers - self._stormLat = float(coordPtnMatch.group(2)) - self._stormLon = float(coordPtnMatch.group(8)) # was 7 - - # Adjust latitude and longitude as need for "other" hemispheres - if coordPtnMatch.group(4) in ["S", "s"]: - self._stormLat *= -1.0 - - if coordPtnMatch.group(10) in ["W", "w"]: - self._stormLon *= -1.0 - - # Construct the storm location subphrase - self._stormLocation = "%s...%s" % (coordPtnMatch.group(1), - coordPtnMatch.group(7)) # was 6 - - # If we found the primary NHC reference we were after - if refPtnMatch is not None: - - # Set aside all the geographic reference text -## stormReference = coordPtnMatch.group(11) - stormReference = refPtnMatch.group(2) - - # Watch out for some grammar gotchas with this reference - stormReference = re.sub("(?i)^(WAS|OR) ", "", stormReference) - - # See if there are multiple geographic references - if re.search('(?i) and ', stormReference) is not None: - - # Yes there are multiple references, so only keep the - # first one - stormReference = re.sub("(?i) AND .+", "", stormReference) - - # Also remove any metric distances - self._stormReference = self._removeKM(stormReference) - - # Miles/km from chosen local reference - self._stormLocalReferences = self._calcLocalReferences( - self._stormLat, self._stormLon) - -## print "stormLocalRefs = ", self._stormLocalReferences - - # Compare the NHC reference to the local references - for localRef in self._stormLocalReferences: - -## print self._stormReference, localRef - - # Get the locations from these statements - nhcRef = re.search('(?i)(north|south|east|west) of (.+)', - self._stormReference) - testRef = re.search('(?i)(north|south|east|west) of (.+)', - localRef) - -## print "nhcRef = '%s'\ttestRef = '%s'" % (nhcRef.group(2), testRef.group(2)) - - # If we have a local reference that matches the national - # center reference - if testRef is not None and nhcRef is not None and \ - re.search("(?i)%s" % (testRef.group(2).strip()), - nhcRef.group(2)) is not None: - - # Do not include the national reference - self._stormReference = "" - - # Modified 12/15/2010 (MHB) - modified to recognize the new way NHC will - # present metric speeds. Will continue to recognize the "old" way for - # testing purposes as well. - def _removeKM(self, words): - # Remove references to KM e.g. - # 420 KM... 100 KM/HR... - -# print "words = '%s'" % (words) - - kmSearch = re.compile("\.\.\. *[0-9]+ +(KM|KM/HR?) *\.?\.?\.?") - - # Replace metric reference with a space to keep words from mashing - # together. - words = kmSearch.sub(" ", words) - - # Make sure we don't have any double space issues with this text - doubleSpaces = re.findall(' +', words) - for doubleSpace in doubleSpaces: - words = re.sub(doubleSpace, ' ', words) - -# print "\tfinal words = '%s'" % (words) - return words - - - def _formatLocalTime(self, para, areas): - # Create a time string in local time - # e.g. 2 AM EDT - # Get the Z time hour - timeSearch = re.compile("...([0-9]+) *(Z|UTC)...") - timeStr = timeSearch.search(para) - -## gmtStr = para[timeStr.start():timeStr.end()] -## gmt = gmtStr.strip("...").replace("Z","") -## gmtHour = int(gmt)/100 - - # This code could bomb in the unlikely event we don't find a UTC - # time. We should probably add some kind of default hour here, - # keyed off the current hour, to prevent this. (MHB) - try: - # Convert the hour portion of the time string to an integer - gmtHour = int(timeStr.group(1)[:2]) - except: - gmtHour = time.gmtime().tm_hour - - gmtTR = self.createTimeRange(gmtHour, gmtHour+1, "Zulu") - gmtTime = gmtTR.startTime().unixTime() - - # Now make a string for each time zone - zoneList = self._getTimeZoneList(areas) - timeStrs = [] - timeDesc = "" - for timeZone in zoneList: - timeStr = self.formatTimeString(gmtTime, "%I %p %Z ", timeZone) - timeStr = string.replace(timeStr, " ", " ") - timeStr = string.strip(timeStr) - timeStr = timeStr.lstrip("0") - if timeStr not in timeStrs: - if len(timeStrs) > 0: - timeDesc += "...OR " - timeStrs.append(timeStr) - timeDesc += timeStr - return timeDesc - - def _getTimeZoneList(self, areaList): - # NOTE -- this code was taken from the middle of getAreaHeader - # in Header.py -- it really should be put back in and used - # in Header.py, but to avoid confusion, I'm repeating it here - # get this time zone - thisTimeZone = os.environ["TZ"] - zoneList = [] - # check to see if we have any areas outside our time zone - for areaName in areaList: - if areaName in self._areaDict.keys(): - entry = self._areaDict[areaName] - if not entry.has_key("ugcTimeZone"): #add your site tz - if thisTimeZone not in zoneList: - zoneList.append(thisTimeZone) - continue # skip this entry - timeZoneList = entry["ugcTimeZone"] - if type(timeZoneList) is types.StringType: # a single value - timeZoneList = [timeZoneList] # make it into a list - for timeZone in timeZoneList: - if timeZone not in zoneList: - zoneList.append(timeZone) - # if the resulting zoneList is empty, put in our time zone - if len(zoneList) == 0: - zoneList.append(thisTimeZone) - # if the resulting zoneList has our time zone in it, be sure it - # is the first one in the list - try: - index = zoneList.index(thisTimeZone) - if index != 0: - del zoneList[index] - zoneList.insert(0, thisTimeZone) - except: - pass - return zoneList - - def _calcLocalReferences(self, lat0, lon0): - localRefs = [] - refList = self._LocalReferencePoints - #refList.append(("Grand Cayman", (19.2, -81.4))) - # Limit reference points - refLimit = self._referencePointLimit() - if len(refList) > refLimit: - refList = refList[0:refLimit] - for label, latLon in refList: - lat, lon = latLon - localRef = self._calcReference(lat0, lon0, lat, lon) - localRef = localRef + " OF " + label - localRef = localRef.replace(",","") - localRefs.append(localRef) - return localRefs - - def _oldCalcReference(self, lat0, lon0, lat1, lon1): - RAD_TO_DEG = 57.296083 - #print "\ncalcReference", lat0, lon0, lat1, lon1 - #lat1 = lat0 + 1.0 - #lon1 = lon0 + 1.0 - latDist = (lat0-lat1) * 111.0 - avgLat = abs(lat0+lat1) / 2.0 - lonDist = (lon0-lon1) * 111.0 * cos(avgLat/RAD_TO_DEG) - #lonDist = 111.0 - #latDist = 111.0 - distKm = sqrt((latDist*latDist)+(lonDist*lonDist)) - distMph = distKm * 0.62 - # Round to nearest 10 - distMph = self.round(distMph, "Nearest", 10) - distMph_str = `int((distMph/10)*10)` - distKm_str = `int((distKm/10)*10)` - direct = atan2(lon0-lon1, lat0-lat1) * RAD_TO_DEG - direction = self._dirInEnglish(direct) - localRef ="About "+distMph_str+" miles "+direction - print "localRef", localRef - return localRef - - def _calcReference(self, lat0, lon0, lat1, lon1): - #return self._oldCalcReference(lat0, lon0, lat1, lon1) - distKm = self._distanceFromLatLon(lat0, lon0, lat1, lon1) - distMph = distKm * 0.62 - # Round to nearest 10 - distMph = self.round(distMph, "Nearest", 10) - distMph_str = `int((distMph/10)*10)` - #distKm_str = `int((distKm/10)*10)` - direction = self._bearing(lat1, lon1, lat0, lon0) - direction = self._dirInEnglish(direction) - localRef ="About "+distMph_str+" miles "+direction - #print "localRef", localRef - return localRef - - # Returns the distance from lat0, lon0 to lat1, lon1 in kilometers - def _distanceFromLatLon(self, lat0, lon0, lat1, lon1): - R = 6371.0 - lat0 = lat0 * DEG_TO_RAD - lon0 = lon0 * DEG_TO_RAD - lat1 = lat1 * DEG_TO_RAD - lon1 = lon1 * DEG_TO_RAD - dist = acos(sin(lat0) * sin(lat1) + cos(lat0) * cos(lat1) * cos(lon1 - lon0)) * R - return dist - - def _bearing(self, lat0, lon0, lat1, lon1): - - dlat = (lat0 - lat1) * DEG_TO_RAD - dlon = (lon0 - lon1) * DEG_TO_RAD - - y = sin(dlon) * cos(lat1 * DEG_TO_RAD) - x = cos(lat0 * DEG_TO_RAD) * sin(lat1 * DEG_TO_RAD) - \ - (sin(lat0 * DEG_TO_RAD) * cos(lat1 * DEG_TO_RAD) * cos(dlon)) - - direction = (atan2(x, y) / DEG_TO_RAD) - 90.0 - if direction < 0.0: - direction = direction + 360.0 - direction = direction % 360 - - return direction - -## lat0 = 30.0 -## lat1 = 20.0 -## lon0 = -80.0 -## lon1 = -90.0 - -## print "complex dist:", distComplex(lat0, lon0, lat1, lon1) -## print "bearing:", bearing(lat0, lon0, lat1, lon1) - - - def _dirInEnglish(self, direction): - dirList = ["north", "north-northeast", "northeast", "east-northeast", - "east", "east-southeast", "southeast", "south-southeast", - "south", "south-southwest", "southwest", "west-southwest", - "west", "west-northwest", "northwest", "north-northwest"] - dirIndex = int((direction + 11.25) / 22.5) - if dirIndex > 15: - dirIndex = dirIndex - 16 - return dirList[dirIndex] - - ##################################################################################### - ##################################################################################### - ####### OVERVIEW Sections - -## def Overview_NewInformation(self, title, sectionDict, info): -## t="" -## ec = self._EventContext -## if ec =="Abbreviated": -## t+="New watches and or warnings have been issued. \n" -## else: -## t+= self._frame("Please enter new information here. Keep it concise.") + "\n" -## return title + t - - def Overview_NewInformation(self, title, sectionDict, info): - t="" - ec = self._EventContext - print "info.hazardHdlns = ", info.hazardHdlns - - if ec =="Abbreviated": - hdlns = info.hazardHdlns - #print "\n Headlines" - reported = 0 - for hazardHdln in hdlns: - key, landList, marineList, coastalList, inlandList = hazardHdln - #print "hazard", hazardHdln - hdln, act, phen, sig = key - if phen == "HU" and sig == "S": - continue - if act in self._ignoreActions(): - continue - if hdlns.index(hazardHdln) > 0: - t+= " and " - t+= "A " + hdln - reported += 1 - if reported > 0: - if reported > 1: t+= " have " - else: t+= " has " - t+="now been issued. " - elif ec == "PostEvent": - t+="Warnings have been discontinued.\n" - - else: - t+= self._frame("Please enter new information here. Keep it concise.") + "\n" - return title + t - -############################################################################################ - - def AreasAffected(self, title, sectionDict, info): - t = title - - if info.anyLand and info.anyMarine: - t+= "This local statement provides important information and recommended actions for people and marine interests in " - t+=self._all_select(info.allLand and info.allMarine) - t+= " locations and coastal water of "+self._cwa_maor_descriptor()+ ". " - - else: - if info.anyLand: - t+= "This local statement provides important information and recommended actions for people in " - t+=self._all_select(info.allLand) - t+= " locations within " + self._cwa_descriptor() + ". " - - elif info.anyMarine: - t+= "This local statement offers guidance and recommendations for mariners...as well as other marine interests...along " - t+= self._all_select(info.allMarine) - t+= " coastal water of " + self._maor_descriptor() + ". " - return t + "\n" - - def _all_select(self, value): - if value: return "All" - else: return "Select" - - def _generalAreas(self, segmentAreas): - """This method formats the general area description given the list of segmentAreas. - """ - # This method could grab information from a file formatted elsewhere. - # To use this capability, call this method with the appropriate - # argument: - # - # text = self._ingestExternalFile("") - - text = '' - - # Make the general area Phrase - similar to HWO - generalAreas = self.getGeneralAreaList(segmentAreas, areaDictName=self._areaDictionary) - - # Make a list of all general areas we found - #parts of the states - areaList = [] - for state, partOfState, names in generalAreas: - if partOfState == '' or partOfState == ' ': - areaList.append(state) - else: - areaList.append(partOfState + " " + state) - - # Add this general area to the text - areaPhrase = self.punctuateList(areaList) - - # If we found any text - finish it up - if len(areaPhrase.strip()) > 0: - text = "%s.\n\n" % (areaPhrase) - - # Return the completed text - return text - - ##################################################################################### - def WatchesWarnings(self, title, sectionDict, info): - t= title - ec = self._EventContext - fmtDict = self._overviewFormat() - - # Any WW will be False if there are no Watches or Warnings in the CWA or MAOR - anyWW = self._checkHazard( - info.hazardHdlns, [("HU","W"),("TY", "W"),("TR","W"), ("HU","A"),("TY", "A"),("TR","A")]) - - # Find HU_S headlines and separate into "land" and "marine" - # There will only be ONE HU S entry in hazardHdlns since they are - # consolidated across segments - HU_S_Hdlns = [] - HU_S_landList = [] - HU_S_marineList = [] - for key, landList, marineList, coastalList, inlandList in info.hazardHdlns: - hdln, act, phen, sig = key - if act in self._ignoreActions(): - continue - if phen == "HU" and sig == "S": - HU_S_Hdlns.append((key, coastalList + inlandList, "land")) - HU_S_Hdlns.append((key, marineList, "marine")) - if len(coastalList + inlandList) > 0: - HU_S_landList = HU_S_landList + coastalList + inlandList - if len(marineList) > 0: - HU_S_marineList = HU_S_marineList + marineList - - if ec == "NonEvent" and not anyWW and len(HU_S_Hdlns)>0: - t+="Tropical cyclone watches and warnings are not in effect anywhere across " - t+=self._cwa_maor_descriptor() + ".\n" - - elif ec == "PreEvent" and not anyWW: - if len(HU_S_landList) > 0: - t+="Although tropical cyclone watches or warnings are not in effect anywhere across " - t+=self._cwa_descriptor() - t+="...possible impacts from related hazards are becoming a concern for " - if fmtDict["land"] == "listAreas": - t+= self._describeLocations(info, HU_S_landList, end="...")+ ".\n" - else: - t+="portions of the area.\n" - - if len(HU_S_landList) > 0 and len(HU_S_marineList)>0: t+="\n" - - if len(HU_S_marineList)>0: - t+="For marine interests...although tropical cyclone watches or warnings are not in effect anywhere across " - t+=self._maor_descriptor() - t+="...possible impacts from related hazards are becoming a concern for " - if fmtDict["marine"] == "listAreas": - t+= self._describeLocations(info, HU_S_marineList, end="...")+ ".\n" - else: - t+="portions of the "+ self._maor_descriptor() + ".\n" - - elif ec == "PostEvent" and not anyWW: # and (len(HU_S_landList)>0 or len(HU_S_marineList)>0): - t+="Tropical cyclone watches and warnings are no longer in effect anywhere across " - t+=self._cwa_maor_descriptor() + ".\n" - - elif ec == "PostTropical" and not anyWW: # and (len(HU_S_landList)>0 or len(HU_S_marineList)>0): - t+="Tropical cyclone watches and warnings are no longer in effect anywhere across " - t+=self._cwa_maor_descriptor() - t+=". The issuance of tropical cyclone watches and warnings is being transitioned over to watches and warnings traditionally issued for non-tropical cyclone events.\n" - else: - t+=self._overview_headlines(info) - if ec == "Abbreviated": - t+=self._definition_stmt(info) - if anyWW: t+=self._overview_HU_S_headlines(info, HU_S_Hdlns) - t+=self._additional_headlines(info) - return t - - def _definition_stmt(self, info): - t = "" - foundwatch = False - foundwarning = False - desc = " means that " - descwatch = " conditions are possible within the next 48 hours somewhere within the specified " + \ - "areas.\n\n" - descwarning = " conditions are expected within the next 36 hours somewhere within the specified " + \ - "areas.\n\n" - ppwatch = " All persons in the watch areas should review their preparedness plan and be ready to " + \ - "implement it should a warning be issued for their area.\n\n" - ppwarning = " All persons in the warning areas should already have preparations underway to protect " + \ - "life and property.\n\n" - - # Initialize a new dictionary to pair phenSig codes with their action - hazardDict = {} - - # Iterate over all of the hazards - for hazardTuple in info.hazardHdlns: - print "\n\n" + "*"*80 - print "hazardTuple is:", hazardTuple - - # Grab the phenomena code - hazard = hazardTuple[0] - print "hazard is:", hazard - - # Split up the phenomena code - (title, action, phen, sig) = hazard - - # Store the action for this phenomena - hazardDict["%s.%s" % (phen, sig)] = action - - #----------------------------------------------------------------------- - # Look at each of the hazards - if self._checkHazard(info.hazardHdlns, [("HU","W")]) and \ - hazardDict["HU.W"] not in ["CAN", "UPG"]: - hazardPhen = "Hurricane" - hazardSig = "Warning" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwarning = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning - - if self._checkHazard(info.hazardHdlns, [("TY", "W")]) and \ - hazardDict["TY.W"] not in ["CAN", "UPG"]: - hazardPhen = "Typhoon" - hazardSig = "Warning" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwarning = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning - - if self._checkHazard(info.hazardHdlns, [("TR","W")]) and \ - hazardDict["TR.W"] not in ["CAN", "UPG"]: - hazardPhen = "Tropical Storm" - hazardSig = "Warning" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwarning = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning - - if foundwarning: - t+= ppwarning - - if self._checkHazard(info.hazardHdlns, [("HU","A")]) and \ - hazardDict["HU.A"] not in ["CAN", "UPG"]: - hazardPhen = "Hurricane" - hazardSig = "Watch" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwatch = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch - - if self._checkHazard(info.hazardHdlns, [("TY", "A")]) and \ - hazardDict["TY.A"] not in ["CAN", "UPG"]: - hazardPhen = "Typhoon" - hazardSig = "Watch" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwatch = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch - - if self._checkHazard(info.hazardHdlns, [("TR","A")]) and \ - hazardDict["TR.A"] not in ["CAN", "UPG"]: - hazardPhen = "Tropical Storm" - hazardSig = "Watch" - hazardPhenSig = hazardPhen+" "+hazardSig - foundwatch = True - t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch - - if foundwatch: - t+= ppwatch - - t+= "In order to make the best decisions...be sure that you understand the terminology and " + \ - "definitions associated with tropical cyclone events.\n\n" - - - return t - - # In order to have the HazardsTable use the allowedHeadlines list, - # we need to supply a filterMethod that uses allowedHeadlines instead of allowedHazards - def _getAllowedHazardList(self, allowedHazardList=None): - if allowedHazardList is None: - allowedHazardList = self.allowedHazards() - hazardList = [] - for h in allowedHazardList: - if type(h) is types.TupleType: - hazardList.append(h[0]) - else: - hazardList.append(h) - return hazardList - - def _altFilterMethod(self, hazardTable, allowedHazardsOnly=False): - # Remove hazards not in allowedHeadlines list - allowedHazardList = self._getAllowedHazardList(self.allowedHeadlines()) - return self._filterHazards(hazardTable, allowedHazardList, - allowedHazardsOnly) - - def _filterHazards(self, hazardTable, allowedHazardList, - allowedHazardsOnly=False): - newTable = [] - hazStr = "" - for i in range(len(hazardTable)): - if hazardTable[i]['sig'] != "": # VTEC - hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] - else: #non-VTEC - hazStr = hazardTable[i]['phen'] - - if hazStr in allowedHazardList: - newTable.append(hazardTable[i]) - if allowedHazardsOnly: - return newTable - # get a raw list of unique edit areas - zoneList = [] - for t in newTable: - if t['id'] not in zoneList: - zoneList.append(t['id']) - for zone in zoneList: - # Remove lower priority hazards of the same type - self.filterZoneHazards(zone, newTable) - return newTable - - def _overview_headline_groups(self): - landAreas = self._inlandAreas()+ self._coastalAreas() - return [ - (["HU.W"], landAreas, "Hurricane Warning"), - (["TY.W"], landAreas, "Typhoon Warning"), - (["HU.W"], self._marineAreas(), "For marine interests...a Hurricane Warning"), - (["TY.W"], self._marineAreas(), "For marine interests...a Typhoon Warning"), - - (["TR.W", "HU.A"], landAreas, "Tropical storm warning and a Hurricane Watch"), - (["TR.W", "TY.A"], landAreas, "Tropical storm warning and a Typhoon Watch"), - (["TR.W", "HU.A"], self._marineAreas(), - "For marine interests...a tropical storm warning and a Hurricane Watch"), - (["TR.W", "TY.A"], self._marineAreas(), - "For marine interests...a tropical storm warning and a Typhoon Watch"), - - (["TR.W"], landAreas, "Tropical storm warning"), - (["TR.W"], self._marineAreas(), "For marine interests...a Tropical Storm Warning"), - - (["HU.A"], landAreas, "Hurricane watch"), - (["TY.A"], landAreas, "Typhoon watch"), - (["HU.A"], self._marineAreas(), "For marine interests...a Hurricane Watch"), - (["TY.A"], self._marineAreas(), "For marine interests...a Typhoon Watch"), - - (["TR.A"], landAreas, "Tropical storm watch"), - (["TR.A"], self._marineAreas(), "For marine interests...a Tropical Storm Watch"), - - ] - - def _overview_headlines(self, info): - # Put together Watches and Warnings - # Need to group hazards - hazardGroups = self._overview_headline_groups() - hdlns = [] - for hazards, areas, hdln in hazardGroups: - hdlns = hdlns + self._getHazardHdlns(info, hazards, hdln, areas) - t="" - t+=self._headlines(info, hdlns, qualifier=True) - return t - - def _getHazardHdlns(self, info, hazards, hdln, areas): - # For the overview -- - # Return a list of (key, areaList) tuples for the given hazards - # where key is (hdln, act, phen, sig) - # Use ignoreActions and then separate w.r.t. NEW, etc versus CON - hazardTable = self._argDict["hazards"] - newAreas = [] - conAreas = [] - sortedHazards = copy.deepcopy(hazards) - sortedHazards.sort() - if len(hazards) > 1: - # If we are testing for more than one hazard (e.g. TR.W and HU.A) - # If an area has both hazards with the same action, then the normal - # algorithm will catch it. - # However, if one is CON and the other NEW, then we will allow - # mixing of CON and NEW - tryMixingConNew_flag = True - else: - tryMixingConNew_flag = False - #print "\n Checking for ", hazards - for area in areas: - #print " Area ", area - # For each area determine the set of newHazards and conHazards - areaHazards = hazardTable.getHazardList([area]) - #print " hazards", areaHazards - newHazards = [] - conHazards = [] - for areaHaz in areaHazards: - act = areaHaz['act'] - if act in self._ignoreActions(): - continue - phenSig = areaHaz['phen'] + "." + areaHaz['sig'] - if phenSig == "HU.S": - continue - if act == "CON": conHazards.append(phenSig) - else: newHazards.append(phenSig) - newHazards.sort() - conHazards.sort() - if newHazards == sortedHazards: - newAreas.append(area) - elif conHazards == sortedHazards: - conAreas.append(area) - elif tryMixingConNew_flag: - newHazards = newHazards + conHazards - newHazards.sort() - if newHazards == sortedHazards: - newAreas.append(area) - #print "new con Areas", newAreas, conAreas - # Compose hdln lists - new = [] - con = [] - phen, sig = hazards[0].split('.') - if len(newAreas) > 0: - key = (hdln, "NEW", phen, sig) - new = [(key, newAreas)] - if len(conAreas) > 0: - key = (hdln, "CON", phen, sig) - con = [(key, conAreas)] - #print "new, con", new, con - return new + con - - def _overview_HU_S_headlines(self, info, HU_S_Hdlns): - # Gather and report the HU_S headlines - t = "" - fmtDict = self._overviewFormat() - if fmtDict["land"]=="generic" and fmtDict["marine"]=="generic": - return t - - if len(HU_S_Hdlns) == 0: - return t - - for key, areaList, areaType in HU_S_Hdlns: - # Report only if there is a non-empty areaList and - # overview format is "listAreas" i.e. specific - if len(areaList) == 0 or fmtDict[areaType] == "generic": - continue - if areaType == "marine": t+="\nFor marine interests..." - else: t+="\n" - t+="Although tropical cyclone watches or warnings are not in effect for " - t+= self._describeLocations(info, areaList, end="...") - t+= "possible impacts from related hazards are still a concern.\n" - return t - - def _additional_headlines(self, info): - # Report additional headlines - t="" - self._hazardHdlns, self._huAreas = self._getAdditionalHazards(info) - t+=self._getAdditionalHeadlines(self._hazardHdlns, self._huAreas, info) - return t - - def _getAdditionalHazards(self, info): - argDict = self._argDict - argDict['definition'] = self._definition - altHazards = self._getHazardsTable(argDict, self._altFilterMethod) - conTable = altHazards.consolidatedTableByID() - - # Consolidate across action codes - hazDict = {} - for hazard in conTable: - hdln=hazard['hdln'] - phen=hazard['phen'] - sig=hazard['sig'] - act=hazard['act'] - if act in self._ignoreActions(): - continue - for area in hazard['id']: - hazDict.setdefault((hdln, phen, sig), []).append(area) - - #print "hazDict", hazDict - hazardHdlns=[] - huAreas = [] -# print "\nAdditional Hazard Headlines" - for key in hazDict.keys(): - hdln, phen, sig = key - huAreas = huAreas + hazDict[key] - hazardHdln = ((hdln, "NEW", phen,sig), hazDict[key], [],[],[]) - #print " ", hazardHdln, hazDict[key] - hazardHdlns.append(hazardHdln) - return hazardHdlns, huAreas - - def _getAdditionalHeadlines(self, hazardHdlns, huAreas, info): - # We have a list of hazardHdlns and can use checkHazards - # Additional Hazards - t="" - hdlnList = self._checkHazard(hazardHdlns, [("FA","A"),("FF","A")], returnList=True) - print "hdlnList", hdlnList - if len(hdlnList) > 0: - t+="\n" - t+=self._headlines(info, hdlnList, self._allPortions, ending=". ") - t+="Please listen closely for any Flood Warnings that might be in effect for your area.\n" - - hdlnList = self._checkHazard(hazardHdlns, [("TO","A")], returnList=True) - print "hdlnList", hdlnList - if len(hdlnList) > 0: - t+="\n" - t+=self._headlines(info, hdlnList, self._allPortions, ending=". ") - t+="Please listen closely for any Tornado Warnings that might be in effect for your area.\n" - - # Check additional hazards - checkHazards = [("CF","W"), ("CF","A"),("CF","Y"),("RP","S"),("SU","W"),("SU","A"),("SU","Y"), - ("SR","W"),("SR","A"),("GL","W"),("GL","A"), - ("SC","Y"),("SI","Y"),("SW","Y"), ("RB","Y")] - hazList = self._checkHazard(hazardHdlns, checkHazards, returnList=True) - if len(hazList) > 0: - t+= "\nPlease check the latest public and marine forecasts for detailed information about additional hazards.\n" - return t - - def _allPortions(self, info, hazAreas, prefix="", suffix=""): - # Used for overview headlines - descriptor, checkAreas = self._determineDescriptor(info, hazAreas) - portions = prefix + "portions of " + suffix - allPortions = self._checkAreaInclusion(checkAreas, hazAreas, "all of ", portions) - return allPortions + descriptor - - def _allParts(self, info, hazAreas): - # Used for overview additional headlines - descriptor, checkAreas = self._determineDescriptor(info, hazAreas) - allParts = self._checkAreaInclusion(checkAreas, hazAreas, "all ", "part ") - return allParts + "OF " + descriptor + ". " - - def _entirePortions(self, info, hazAreas): - # Used by the optional template for optional sections - return self._checkAreaInclusion( - info.allAreas, hazAreas, "the entire area. ", "portions of the area. ") - - def _checkAreaInclusion(self, compareAreas, hazAreas, allWords, partWords): - words = allWords - for area in compareAreas: - if area not in hazAreas: - words = partWords - break - return words - - def _headlines(self, info, headlineList, areaWordMethod=None, - ending="\n\n", qualifier=False): - # Create the headlines from list of (key, hazAreas) - # where key is (hdln, act, phen, sig) - t = "" - for key, hazAreas in headlineList: - hdln, act, phen, sig = key - if act == "CON": actWords = " continues for " - else: actWords = " is in effect for " - # Skip HU.S headines - if (phen =='HU' and sig =='S'): - continue - if hdln[0] in ["A","I"]:a='an ' - elif hdln.find("for") == 0: a = ' ' - else: a ='a ' - - #print "\n Headline", hdln, phen - if areaWordMethod is not None: - areaWords=areaWordMethod(info, hazAreas) - else: - areaWords=self._describeLocations(info, hazAreas, qualifier=qualifier) - t+= a+hdln + actWords + areaWords + ending - return t - - def _areaWords(self, areas): - if areas == []: - return "" - names = [] - areaDict = self._areaDict - areas.sort() - for area in areas: - name = areaDict[area].get('altName', areaDict[area].get('ugcName', '')) - names.append(name) - areaTypeWords = "" - areaWords = self.formatCountyString("", names)[1:] - return areaWords - - def _describeLocations(self, info, areaList, end=". ", prefix="", - suffix="", qualifier=False): - t = "" - fmtDict = self._overviewFormat() - inland, coastal, marine = self._areaType(areaList) - #print "inland, coastal, marine", inland, coastal, marine, areaList - if inland or coastal: fmt = fmtDict["land"] - else: fmt = fmtDict["marine"] - - if fmt == "generic": - suffix = "" - if qualifier: - if inland: suffix = "inland " - elif coastal: suffix = "coastal " - t+= self._allPortions(info, areaList, prefix, suffix) - else: - t+= "The following locations, " + self._areaWords(areaList) - t+= end - return t - - ##################################################################################### - def StormInformation(self, title, sectionDict, info): - t = title - - st = self._StormInfo -# if st.find("N/A (unnamed)") >= 0: -# t+="Although the system of concern has not been named..." -# t+="it is being actively monitored for signs of tropical cyclone development. " -# -# elif st.find("N/A (downgraded)")>= 0: -# t+=self._stormTypeName+" has been downgraded to below tropical storm strength..." -# t+="but will continue to be monitored until it no longer threatens the area. " -# -# else: - t+="At "+ self._stormTime + "...the center of " - - # Fix the grammar if dealing with "remnants" - if re.search("(?i)remnants", self._stormTypeName) is not None: - t+="The " - - t+=self._stormTypeName + " was located near " - t+=self._stormLocation - - # if we kept the national reference - if self._stormReference.strip() != "": - t+= "...OR " + self._stormReference - - # Finish off the storm location sentence - t+= ". " - - # Now add the local references - localRefs = self._stormLocalReferences - if len(localRefs) > 0: - t+= "This was " - for localRef in self._stormLocalReferences: - if localRefs.index(localRef) > 0: - orStr = "...or " - else: - orStr = "" - t+= orStr + localRef - t+= ". " - - # Do not place storm motion and intensity on separate lines of text -# t+="\n" - t = t.replace("miles...", "miles ") - sm = self._stormMovementTrend - si = self._stormIntensityTrend - - # Combine the storm motion and intensity before we frame them - - smi = "" - if sm != "": smi += sm + '.' - if si != "": smi += ' ' + si + '.' -## t+= self._frame(smi) - t += smi - return t - - ##################################################################################### - def SituationOverview(self, title, sectionDict, info): - t = title - un = self._Uncertainty - ec = self._EventContext - if ec == "Abbreviated": - hdlns = info.hazardHdlns - #print "\n Headlines" - reported = 0 - for hazardHdln in hdlns: - key, landList, marineList, coastalList, inlandList = hazardHdln - #print "hazard", hazardHdln - hdln, act, phen, sig = key - if phen == "HU" and sig == "S": - continue - if act in self._ignoreActions(): - continue - if hdlns.index(hazardHdln) > 0: - t+= " and " - t+= "A " + hdln - reported += 1 - if reported > 0: - if reported > 1: t+= " have " - else: t+= " has " - t+="now been issued. " - t+="A more detailed statement will follow shortly.\n" - - if ec in ["PreEvent","Watch","Warning"]: - if un=="High": - t+="It is vital that you do not focus on the exact forecast track. " - t+="To do so could result in bad decisions and place you or those you are " - t+="responsible for at greater risk. " - elif un == "Average": - t+="When making decisions...do not focus on the exact forecast track. " - - if ec != "Abbreviated": t+=self._frame("Succinctly describe the expected evolution of the event for the CWA & MAOR; which hazards are of greater (or lesser) concern, forecast focus, etc.")+ "\n" - - if ec in ["PreEvent", "Watch"]: - if info.anyLand: - t+="It is too early to provide exact wind and surge forecast values for specific locations. " - damage = self._getCategoryDamage(info.maxWind_CWA_MAOR) - if damage.strip() != "": - t+="A general concern should be for the possibility of "+damage+" somewhere within "\ - + self._cwa_descriptor() + ". " - - return t - ##################################################################################### - def Overview_PrecautionaryPreparednessActions(self, title, sectionDict, info): - t = title - ec = self._EventContext - if ec == "NonEvent": t+=self.overview_pp_nonEvent(info) - elif ec == "PreEvent": t+= self.overview_pp_preEvent(info) - elif ec == "Abbreviated": t+=self._overview_pp_abbrev(info) - elif ec == "Watch": t+=self._overview_pp_watch(info) - elif ec == "Warning": t+=self._overview_pp_warning(info) - elif ec == "Conditions": t+=self._overview_pp_conditions(info) - elif ec == "PostEvent": t+=self._overview_pp_postEvent(info) - elif ec == "PostTropical": t+=self._overview_pp_postTropical(info) - endStr = sectionDict.get("endStr", "") - return t + endStr - - def overview_pp_nonEvent(self, info): - t="" - if info.anyInland or info.anyCoastal: - t+= """ -People are urged to remain informed and listen for any -significant changes to the forecast. Do not listen to rumors or -uninformed opinions. Rather...seek authoritative information from -your local National Weather Service office and emergency -management. - -""" - - if info.anyCoastal or info.anyMarine: - t+= """ -Mariners should keep informed of the latest coastal waters -forecast. -""" - return self._frame(t.strip()) - - def overview_pp_preEvent(self, info): - t = "" - if info.anyInland or info.anyCoastal: - t+= """ -Even before the issuance of watches or warnings...it may become -necessary for local authorities to render evacuation orders. If -told to leave...do so as soon as possible. - -This is a good time for residents to go over their hurricane -disaster plan. Visitors are encouraged to check with hotel -management or with local officials regarding any actions they -should take. - -The following are suggested actions that can be taken at this -time... -- check batteries for radios and flashlights. -- stock up on drinking water and canned or dried food. -- ensure you have a manual can opener. -- have enough for at least three to five days per person. -- gather medicines...toiletries...and first aid supplies. -- have a sufficient amount of cash on hand since credit cards and - automated cash machines do not work without power. -- check fuel levels on automobiles...generators...and chain saws. -- if you need to make a trip to the hardware store...the grocery - store...or the gas station...do so as early as possible. -- determine where you should seek shelter if the storm approaches - your area. -- consider whether you live in a potential evacuation zone. If - so...identify prescribed evacuation routes which lead out of the - threatened areas. -- learn the locations of official shelters. - -Please visit www.ready.gov for a more complete list of items to -include in an emergency preparedness kit. - -In all cases...heed the advice of local officials and comply with -any orders that are issued. - -""" - if info.anyCoastal or info.anyMarine: - t+= """ -Mariners should monitor the coastal waters forecast for unsafe -conditions. Consider early steps for securing your craft. If -small craft must go out and current conditions allow...do not -venture far from port and do not stay out very long. Return to -port quickly if a watch or warning is issued. - -""" - return self._frame(t.strip()) - - def _overview_pp_abbrev(self, info): - t="" - -## print "\n\n" + "*"*80 -## -## print info.hazardHdlns - - #----------------------------------------------------------------------- - # Determine if this is a downgrade - - downgradeWarning = 0 # flag to track any downgrades of a warning - upgradeWarning = 0 # flag to track any downgrades of a warning - - # If we have more than one hazard - if len(info.hazardHdlns) > 1: - - # Set aside the hazard info for comparison - (baseTitle, baseAction, basePhen, baseSig) = info.hazardHdlns[0][0] - baseAreas =[] - - # Combine all the areas affected by this hazard into one list - for areaList in info.hazardHdlns[0][1:]: - baseAreas = baseAreas + areaList - - # Look through all the hazards we have - after the first one - for hazard in xrange(1, len(info.hazardHdlns)): - - print "\nworking on hazard index -> ", hazard - print info.hazardHdlns[hazard], "\n" - - # Split up the hazard info for this hazard - (title, action, phen, sig) = info.hazardHdlns[hazard][0] - areas =[] - - # Combine all the areas affected by this hazard into one list - for areaList in info.hazardHdlns[hazard][1:]: - areas = areas + areaList - - print "baseAreas = ", baseAreas - print "areas = ", areas - print "basePhen = ", basePhen, " baseSig = ", baseSig, " baseAction = ", baseAction - print " phen = ", phen, " sig = ", sig, " action = ", action - - # Look specifically for the case where we are downgrading from - # a hurricane/typhoon warning to a tropical storm warning - if ((basePhen in ["HU", "TY"] and baseSig == "W" and - baseAction == "CAN" and phen == "TR" and sig == "W" and - action in ["NEW", "EXA"]) or - (basePhen == "TR" and baseSig == "W" and - baseAction in ["NEW", "EXA"] and phen in ["HU", "TY"] and - sig == "W" and action == "CAN")): - - print "\nWorking on an downgrade here." - - # See if the current zone combination is part of downgrade - for area in areas: - - # If this zone segment is part of the downgrade - if area in baseAreas: - - # Indicate the downgrade and move on - downgradeWarning = 1 - break - - # Look specifically for the case where we are upgrading from - # a tropical storm warning to a hurricane warning - if ((basePhen == "TR" and baseSig == "W" and - baseAction == "UPG" and phen in ["HU", "TY"] and - sig == "W" and action in ["NEW", "EXA"]) or - (basePhen in ["HU", "TY"] and baseSig == "W" and - baseAction in ["NEW", "EXA"] and phen== "TR" and - sig == "W" and action == "UPG")): - - print "\nWorking on an upgrade here." - - # See if the current zone combination is part of downgrade - for area in areas: - - # If this zone segment is part of the downgrade - if area in baseAreas: - - # Indicate the downgrade and move on - upgradeWarning = 1 - break - - print "upgrade = ", upgradeWarning, "\tdowngrade = ", downgradeWarning - # If there are and land or coastal sites - if info.anyInland or info.anyCoastal: - - # If there are no upgrades or downgrades - if not upgradeWarning and not downgradeWarning: - - # Completely new watches/warnings - t+=""" -For those under a watch or warning...now is the time to initiate -preparations according to your hurricane disaster plan specific -to your home or business. - -For those nearby...review your hurricane disaster plan and -become ready to act if a watch or a warning is later issued for -your area. - -It is important to actively listen for forthcoming information -from your local National Weather Service office and emergency -management agency. - -""" - - # If this is an upgraded warning - if upgradeWarning and not downgradeWarning: - - # upgraded warning - t+=""" -For those now under the new warning...now is the time to -initiate preparations according to your hurricane disaster plan -specific to your home or business...if you have not already -done so. - -It is important to actively listen for forthcoming information -from your local National Weather Service office and emergency -management agency. - -""" - - # If this is a downgraded warning - if downgradeWarning and not upgradeWarning: - - # Downgraded warning - t+=""" -While the intensity of this storm is no longer expected to be -as strong...there is still a threat to life and property. For -those still under a warning...continue to implement your -hurricane disaster plan specific to your home or business. - -It is important to actively listen for forthcoming information -from your local National Weather Service office and emergency -management agency. - -""" - - # If this is a upgrade and downgraded warning - if downgradeWarning and upgradeWarning: - - # Huh?! warning - t+=""" -There is still a threat to life and property. Continue to -implement your hurricane disaster plan specific to your home or -business. - -It is important to actively listen for forthcoming information -from your local National Weather Service office and emergency -management agency. - -""" - - # Marine zones - if info.anyMarine: - - # If there are no upgrades or downgrades - if not upgradeWarning and not downgradeWarning: - - # Completely new watches/warnings - t+= """ -Mariners are urged to make all necessary preparations to return -to port...seek safe harbor...and secure their craft. Now is the -time to initiate preparations according to your emergency plan -for tropical systems. Monitor weather broadcasts for changes to -the latest forecast and listen for further statements from local -officials. - -""" - - - # If this is an upgraded warning - if upgradeWarning and not downgradeWarning: - - # upgraded warning - t+=""" -Mariners are urged to return to port...seek safe harbor...and -secure their craft. Now is the time to complete preparations -according to your emergency plan for tropical systems. Monitor -weather broadcasts for changes to the latest forecast and -listen for further statements from local officials. - -""" - - # If this is a downgraded warning - if downgradeWarning and not upgradeWarning: - - # Downgraded warning - t+=""" -While the intensity of this storm is no longer expected to be -as strong...there is still a threat to life and property. -Mariners are urged to remain in port and secure their craft. -Continue to implement your emergency plan for tropical systems. -Monitor weather broadcasts for changes to the latest forecast -and listen for further statements from local officials. - -""" - - # If this is a upgrade and downgraded warning - if downgradeWarning and upgradeWarning: - - # Huh?! warning - t+=""" -There is still a threat to life and property. Continue to -implement your emergency plan for tropical systems. Monitor -weather broadcasts for changes to the latest forecast and -listen for further statements from local officials. - -""" - - return self._frame(t.strip()) - - - def _overview_pp_watch(self, info): - t="" - public_A= self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["land"]) - coastal_A=self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["coastal"]) - marine_A=self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["marine"]) - if public_A: - t+= """ -For those under a watch...now is the time to begin preparing your -home or business according to your hurricane disaster plan. -Listen for possible warnings and be ready to evacuate if -necessary. Heed the advice of local officials and comply with any -orders that are issued. - -""" - if coastal_A: - t+= """ -For interests at ports...docks...and marinas...it is recommended -that you perform the prescribed preparations according to your -emergency operations plan for tropical cyclones. If you live on a -boat...begin to safely secure your craft and make plans to leave -it for adequate land based shelter. Listen for possible warnings. - -""" - if coastal_A or marine_A: - t+= """ -Regarding the coastal waters under a watch...small craft should -return to port or seek safe harbor. - -Closely monitor NOAA weather radio or other local news outlets -for official storm information. Listen for possible changes to -the forecast. - -""" - if public_A: - t+= """ -For additional precautionary and preparedness information... -Please refer to the detailed recommendations relative to your -location as further described by your local National Weather -Service office and your local emergency management. - -""" - return self._frame(t.strip()) - - def _overview_pp_warning(self, info): - t="" - public_W= self._checkHazard(info.hazardHdlns, - [("HU","W"),("TR","W"),("TY","W")], ["land"]) - coastal_W=self._checkHazard(info.hazardHdlns, - [("HU","W"),("TR","W"),("TY","W")], ["coastal"]) - marine_W=self._checkHazard(info.hazardHdlns, - [("HU","W"),("TR","W"),("TY","W")], ["marine"]) - public_A= self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["land"]) - coastal_A=self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["coastal"]) - marine_A=self._checkHazard(info.hazardHdlns, - [("HU","A"),("TR","A"),("TY","A")], ["marine"]) - if public_W: - t+= """ -For those under a warning...now is the time to rush to completion -preparations for the protection of life and property. Evacuate if -directed to do so by local officials...or if your home is -vulnerable to high winds or flooding. - -""" - if coastal_W: - t+= """ -For interests at ports...docks...and marinas...urgently complete -prescribed preparations according to your emergency operations -plan for tropical cyclones. If you live on a boat...make final -preparations for securing your craft before leaving it. Be sure -to account for the possible closure of bridges and causeways. - -""" - if coastal_W or marine_W: - t+= """ -Regarding any coastal waters under a warning...small craft should -remain in port and well secured. - -""" - if public_A: - t+= """ -For those under a watch...continue with your preparations and -listen for possible warnings. - -""" - if coastal_A or marine_A: - t+= """ -Regarding any coastal waters under a watch...small craft should -return to port or seek safe harbor. Determine the best strategy -for securing your craft. - -Closely monitor NOAA weather radio or other local news outlets -for official storm information. Listen for possible changes to -the forecast. - -""" - if public_W: - t+= """ -For additional precautionary and preparedness information... -Please refer to the detailed recommendations relative to your -location as further described by your local National Weather -Service office and local emergency management. - -""" - return self._frame(t.strip()) - - def _overview_pp_conditions(self, info): - t="" - if info.anyLand: - t+= """ -During the storm...stay inside and away from windows. Do not -venture outside when high winds are occurring or during temporary -lulls as flying debris can easily...and suddenly...cause serious -injury. - -Have a well-charged cell phone nearby...keeping network -communications as open as possible for emergencies. - -Closely monitor NOAA weather radio or other local news outlets -for official storm information. Listen for possible changes to -the forecast. - -""" - if info.anyMarine: - t+= """ -For small craft who failed to make it to safe harbor or port... -And are now in distress...radio your situation according to -maritime protocol. If appropriate...deploy your emergency -distress beacon. Ensure that everyone is wearing life jackets... -And survival suits if available. - -""" - return self._frame(t.strip()) - - def _overview_pp_postEvent(self, info): - t="" - if info.anyLand: - t+= """ -Many casualties occur after a storm has passed. Be smart and use -caution. Continue to heed the advice of local officials as they -conduct rescue and recovery efforts. Wait for the all-clear -signal before re-entering evacuation zones or any area that -received significant damage or flooding. - -Pay attention for possible road closures and stay away from -downed power lines. Listen for any boil water alerts. - -""" - if info.anyCoastal or info.anyMarine: - t+= """ -Mariners should check the latest coastal waters forecast before -making any definite plans. - -""" - return self._frame(t.strip()) - - def _overview_pp_postTropical(self, info): - t="" - if info.anyLand: - t+= """ -Everyone is urged to stay informed of the situation. Remain -diligent in your efforts to protect life and property. - -""" - if info.anyCoastal or info.anyMarine: - t+= """ -Mariners are advised to keep their guard up while closely -monitoring the latest coastal waters forecast. Small craft should -remain in port until this storm passes. - -""" - return self._frame(t.strip()) - -##################################################################################### - def NextUpdate(self, title, sectionDict, info): - t = title - wfo = self._wfoCity - if self._NextUpdate == "Shortly": - t+= "The next local statement will be issued by the National Weather Service in " - t+= wfo - t+= " shortly. It will provide important details regarding the evolving tropical cyclone threats and their potential impacts upon the area. " - elif self._NextUpdate == "Enter": - t+="The next local statement will be issued by the National Weather Service in " - t+= wfo - t+=" around " - t+= self._NextUpdate_entry - t+="...or sooner if conditions warrant. " - elif self._NextUpdate == "Conditions": - t+="The next local statement will be issued by the National Weather Service in " - t+=wfo - t+=" as conditions warrant. " - elif self._NextUpdate == "LastIssuance": - t+="As it pertains to this event...this will be the last local statement issued by the National Weather Service in " - t+=wfo - t+=" regarding the effects of tropical cyclone hazards upon the area. " - return t - - ##################################################################################### - ##################################################################################### - ####### SEGMENT Sections - - def NewInformation(self, title, argDict, segment, section, info): - t="" - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - if situation=="Abbreviated": - hdlns = info.hazardHdlns - #print "\n Headlines" - reported = 0 - for hazardHdln in hdlns: - key, landList, marineList, coastalList, inlandList = hazardHdln - #print "hazard", hazardHdln - hdln, act, phen, sig = key - if phen == "HU" and sig == "S": - continue - if act in self._ignoreActions(): - continue - if hdlns.index(hazardHdln) > 0: - t+= " and " - t+= "A " + hdln - reported += 1 - if reported > 0: - if reported > 1: t+= " have " - else: t+= " has " - t+="now been issued. " - t+="A more detailed statement will follow shortly.\n" - else: - t+= self._frame("Please enter new information here.") + "\n" - return title + t - - ##################################################################################### - def PrecautionaryPreparednessActions(self, title, argDict, segment, section, info): - t="" - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - # NonEvent - if situation == "NonEvent": - if scenario=="ActiveNonEvent": - if info.anyLand: - t+=self._pp_dict("NonEvent", ["ActiveNonEvent", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("NonEvent", ["ActiveNonEvent", "marine"]) - elif scenario=="EndNonEvent": - if info.anyLand or info.anyMarine: - t+=self._pp_dict("NonEvent", ["EndNonEvent", "everywhere"]) - - # PreEvent - elif situation=="PreEvent": - if scenario=="Advancing": - if info.anyLand: - t+=self._pp_dict("PreEvent", ["Advancing", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("PreEvent", ["Advancing", "marine"]) - elif scenario=="Peripheral": - if info.anyLand: - t+=self._pp_dict("PreEvent", ["Peripheral", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("PreEvent", ["Peripheral", "marine"]) - elif scenario=="InSitu": - if info.anyLand: - t+=self._pp_dict("PreEvent", ["InSitu", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("PreEvent", ["InSitu", "marine"]) - - # Abbreviated - elif situation=="Abbreviated": - HU_A = self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]) - TR_W = self._checkHazard(info.hazardHdlns, [("TR","W")]) - if self._checkHazard(info.hazardHdlns, [("HU","W"),("TY","W")]): - if info.anyLand: - t+= self._pp_dict("Abbreviated", ["HU_W", "land"]) - if info.anyMarine: - t+=self._pp_dict("Abbreviated", ["HU_W", "marine"]) - elif HU_A and TR_W: - if info.anyLand: - t+=self._pp_dict("Abbreviated", ["TR_W_HU_A", "land"]) - if info.anyMarine: - t+=self._pp_dict("Abbreviated", ["TR_W_HU_A", "marine"]) - elif self._checkHazard(info.hazardHdlns, [("HU","A")]): - if info.anyLand: - t+=self._pp_dict("Abbreviated", ["HU_A", "land"]) - if info.anyMarine: - t+=self._pp_dict("Abbreviated", ["HU_A", "marine"]) - elif TR_W: - if info.anyLand: - t+=self._pp_dict("Abbreviated", ["TR_W", "land"]) - if info.anyMarine: - t+=self._pp_dict("Abbreviated", ["TR_W", "marine"]) - elif self._checkHazard(info.hazardHdlns, [("TR","A")]): - if info.anyLand: - t+=self._pp_dict("Abbreviated", ["TR_A", "land"]) - if info.anyMarine: - t+=self._pp_dict("Abbreviated", ["TR_A", "marine"]) - - - elif situation=="Watch": - if self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]): - if scenario == "Advancing": - if info.anyLand: - t+=self._pp_dict("Watch", ["HU_A", "Advancing", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["HU_A", "Advancing", "marine"]) - elif scenario == "Peripheral": - if info.anyLand: - t+=self._pp_dict("Watch", ["HU_A", "Peripheral", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["HU_A", "Peripheral", "land"]) - else: # In Situ - if info.anyLand: - t+=self._pp_dict("Watch", ["HU_A", "InSitu", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["HU_A", "InSitu", "marine"]) - if self._checkHazard(info.hazardHdlns, [("TR","A")]): - if scenario == "Advancing": - if info.anyLand: - t+=self._pp_dict("Watch", ["TR_A", "Advancing", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["TR_A", "Advancing", "marine"]) - elif scenario == "Peripheral": - if info.anyLand: - t+=self._pp_dict("Watch", ["TR_A", "Peripheral", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["TR_A", "Peripheral", "land"]) - else: # In Situ - if info.anyLand: - t+=self._pp_dict("Watch", ["TR_A", "InSitu", "land"]) - if info.anyMarine: - t+=self._pp_dict("Watch", ["TR_A", "InSitu", "marine"]) - # Warning - elif situation=="Warning": - HU_W = self._checkHazard(info.hazardHdlns, [("HU","W"),("TY","W")]) - TR_W = self._checkHazard(info.hazardHdlns, [("TR","W")]) - HU_A = self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]) - if HU_W: - if scenario == "Advancing": - if info.anyLand: - t+=self._pp_dict("Warning", ["HU_W", "Advancing", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["HU_W", "Advancing", "marine"]) - elif scenario == "Peripheral": - if info.anyLand: - t+=self._pp_dict("Warning", ["HU_W", "Peripheral", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["HU_W", "Peripheral", "land"]) - else: # In Situ - if info.anyLand: - t+=self._pp_dict("Warning", ["HU_W", "InSitu", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["HU_W", "InSitu", "marine"]) - elif TR_W and HU_A: - if scenario == "Advancing": - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "Advancing", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "Advancing", "marine"]) - elif scenario == "Peripheral": - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "Peripheral", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "Peripheral", "land"]) - else: # In Situ - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "InSitu", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W_HU_A", "InSitu", "marine"]) - elif TR_W: - if scenario == "Advancing": - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W", "Advancing", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W", "Advancing", "marine"]) - elif scenario == "Peripheral": - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W", "Peripheral", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W", "Peripheral", "land"]) - else: # In Situ - if info.anyLand: - t+=self._pp_dict("Warning", ["TR_W", "InSitu", "land"]) - if info.anyMarine: - t+=self._pp_dict("Warning", ["TR_W", "InSitu", "marine"]) - - # Conditions - elif situation=="Conditions": - if scenario=="Imminent": - if self._checkCategory(info.maxWind, "Cat3"): - if info.anyLand: - t+=self._pp_dict("Conditions", ["Imminent", "Cat3", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Imminent", "Cat3", "marine"]) - elif self._checkCategory(info.maxWind, "Cat1"): - if info.anyLand: - t+=self._pp_dict("Conditions", ["Imminent", "Cat1", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Imminent", "Cat1", "marine"]) - elif info.maxWind >= 34: - if info.anyLand: - t+=self._pp_dict("Conditions", ["Imminent", "34", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Imminent", "34", "marine"]) - - elif scenario == "Ongoing": - if self._checkCategory(info.maxWind, "Cat3"): - if info.anyLand: - t+=self._pp_dict("Conditions", ["Ongoing", "Cat3", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Ongoing", "Cat3", "marine"]) - elif self._checkCategory(info.maxWind, "Cat1"): - if info.anyLand: - t+=self._pp_dict("Conditions", ["Ongoing", "Cat1", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Ongoing", "Cat1", "marine"]) - elif info.maxWind >= 34: - if info.anyLand: - t+=self._pp_dict("Conditions", ["Ongoing", "34", "land"]) - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Ongoing", "34", "marine"]) - - elif scenario == "Diminishing": - if info.anyLand: - if info.maxWind >= 64: - desc = "Hurricane" - somewhat = "" - elif info.maxWind >= 34: - desc = "Tropical storm" - somewhat = "Somewhat " - else: - desc = "Strong wind" - somewhat = "Somewhat " - landStr=self._pp_dict("Conditions", ["Diminishing", "land"]) - landStr = landStr.replace("{desc}", desc) - landStr = landStr.replace("{somewhat} ", somewhat) - t+=landStr - if info.anyMarine: - t+=self._pp_dict("Conditions", ["Diminishing", "marine"]) - - # PostEvent - elif situation=="PostEvent": - if scenario=="Immediate": - if info.anyLand: - t+=self._pp_dict("PostEvent", ["Immediate", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("PostEvent", ["Immediate", "marine"]) - elif scenario== "NoImpact": - if info.anyLand or info.anyMarine: - t+=self._pp_dict("PostEvent", ["NoImpact", "general"]) - elif scenario=="LongTerm": - if info.anyLand: - t+=self._pp_dict("PostEvent", ["LongTerm", "land"]) - if info.anyCoastal or info.anyMarine: - t+=self._pp_dict("PostEvent", ["LongTerm", "marine"]) - - # PostTropical - elif situation=="PostTropical": - if scenario=="InProgress": - t+=self._pp_dict("PostTropical", ["InProgress"]) - else: - t+=self._pp_dict("PostTropical", ["Completed"]) - - return title + t - - ##################################################################################### - def Probability(self, title, argDict, segment, section, info): - t="" - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - if situation=="NonEvent": - t+=self._frame("As currently assessed...the onset of either tropical storm or hurricane conditions is unlikely to occur.")+ "\n" - - elif situation=="PreEvent": - if scenario=="Advancing": t+=self._prob_stmts(info) + "\n" - elif scenario=="Peripheral": t+=self._prob_stmts(info, ifWording=True) + "\n" - else: - t+="At this time...the probability of either tropical storm or hurricane conditions cannot be determined until the system becomes an active tropical cyclone. However...based on the latest outlook...the chance of tropical cyclone formation is " - t+= self._frame("low/medium/high from twoxxx. ") - elif situation=="Abbreviated": - pass - elif situation in ["Watch", "Warning"]: - if scenario=="Advancing": t+=self._prob_stmts(info) + "\n" - elif scenario in ["Peripheral", "InSitu"]: - t+=self._prob_stmts(info, ifWording=True) + "\n" - elif situation in ["Conditions", "PostEvent", "PostTropical"]: - pass - - return title + t - - ##################################################################################### - def Wind(self, title, argDict, segment, section, info): - t="" - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - # NonEvent - if situation=="NonEvent": - t+=self._wind_NonEvent(info)+ "\n" - # PreEvent - elif situation=="PreEvent": - if scenario=="Advancing": - t+=self._wind_PreEvent_Advancing(info) - elif scenario=="Peripheral": - t+=self._wind_PreEvent_Peripheral(info) - else: # In Situ - t+=self._wind_PreEvent_InSitu(info) - t+=self._genericImpact_stmt(info) + "\n" - - # Abbreviated - elif situation=="Abbreviated": - pass - - # Watch - elif situation=="Watch": - if scenario=="Advancing": - t+=self._wind_Watch_Advancing(info) - elif scenario=="Peripheral": - t+=self._wind_Watch_Peripheral(info) - else: # In Situ - t+=self._wind_Watch_InSitu(info) - t+=self._genericImpact_stmt(info) + "\n" - - # Warning - elif situation=="Warning": - if scenario=="Advancing": - t+=self._wind_Warning_Advancing(info) - elif scenario=="Peripheral": - t+=self._wind_Warning_Peripheral(info) - else: # In Situ - t+=self._wind_Warning_InSitu(info) - t+=self._potentialImpact_stmt(info) + "\n" - - # Conditions - elif situation=="Conditions": - if scenario=="Imminent": - t+=self._wind_Conditions_Imminent(info) - elif scenario == "Ongoing": - t+=self._wind_Conditions_Ongoing(info) - elif scenario == "Diminishing": - t+=self._wind_Conditions_Diminishing(info) - t+=self._potentialImpact_stmt(info) + "\n" - - # PostEvent - elif situation=="PostEvent": - t+= self._wind_PostEvent(info, scenario) + "\n" - - # PostTropical - elif situation=="PostTropical": - if scenario=="InProgress": - t+=self._wind_PostTropical_InProgress(info) - elif scenario == "Completed": - t+=self._wind_PostTropical_Completed(info) - t+=self._potentialImpact_stmt(info) + "\n" - - if info.anyMarine: - t+=self._frame("Add Wording for Seas Here") + "\n" - - return title + t - - ##################################################################################### - def _optionalSection_template(self, argDict, segment, info, hazardList, listenList=[], - checkAreaTypes=[]): - t="" - if hazardList != []: - try: - hazardHdlns = self._hazardHdlns - except: - self._hazardHdlns, self._huAreas = self._getAdditionalHazards(info) - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - hdlnList = self._checkHazard( - self._hazardHdlns, hazardList, checkAreas=segmentAreas, returnList=True) - if len(hdlnList) > 0: - t+=self._headlines(info, hdlnList, self._entirePortions, ending="") - t+="See latest forecast for latest information. " - for listen in listenList: - t+=listen + "\n" - t+=self._frame("Additional free edit area with relevant info here.") + "\n" - t+=self._frame("Potential impact statement from impact library for specific hazard.")+ "\n" - t+="\n" - return t - - ##################################################################################### - def StormSurgeTide(self, title, argDict, segment, section, info): -# hazards = [("CF","W"), ("CF","A"), ("CF","Y"), ("SU","W"),("SU","Y")] -# listenList = [] -# t=self._optionalSection_template(argDict, segment, info, hazards, listenList, -# checkAreaTypes=["coastal"]) - - if info.inundationMax is None: - return title + self._frame("Enter surge text here") - - t= "" - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - # NonEvent - if situation=="NonEvent": - pass # t+=self._surge_NonEvent(info)+ "\n" ?? - # PreEvent - elif situation=="PreEvent": - if scenario=="Advancing": - t+=self._surge_PreEvent_Advancing(info) - elif scenario=="Peripheral": - t+=self._surge_PreEvent_Peripheral(info) - else: # In Situ - t+=self._surge_PreEvent_InSitu(info) - t+="\n" - - # Abbreviated - elif situation=="Abbreviated": - pass - - # Watch - elif situation=="Watch": - if scenario=="Advancing": - t+=self._surge_Watch_Advancing(info) - elif scenario=="Peripheral": - t+=self._surge_Watch_Peripheral(info) - else: # In Situ - t+=self._surge_Watch_InSitu(info) - if info.inundationMax > 0 and scenario != "InSitu": - t+= self._surge_Watch_Impact_stmt(info, segment) - t+= "\n" - - # Warning - elif situation=="Warning": - if scenario=="Advancing": - t+=self._surge_Warning_Advancing(info) - elif scenario=="Peripheral": - t+=self._surge_Warning_Peripheral(info) - else: # In Situ - t+=self._surge_Warning_InSitu(info) - if info.inundationMax > 0 and scenario != "InSitu": - t+=self._surge_Impact_stmt(info, segment) - t+= "\n" - - # Conditions - elif situation=="Conditions": - if scenario=="Imminent": - t+=self._surge_Conditions_Imminent(info) - elif scenario == "Ongoing": - t+=self._surge_Conditions_Ongoing(info) - elif scenario == "Diminishing": - t+=self._surge_Conditions_Diminishing(info) - if info.inundationMax > 0 and scenario != "Diminishing": - t+=self._surge_Impact_stmt(info, segment) - t+="\n" - - # PostEvent - elif situation=="PostEvent": - t+= self._surge_PostEvent(info, scenario) - t+= "\n" - - # PostTropical - elif situation=="PostTropical": - if scenario=="InProgress": - t+=self._surge_PostTropical_InProgress(info) - elif scenario == "Completed": - t+=self._surge_PostTropical_Completed(info) - if info.inundationMax > 0: - t+=self._surge_Impact_stmt(info, segment) - t+= "\n" - - return title + t - - ##################################################################################### - def InlandFlooding(self, title, argDict, segment, section, info): - hazards = [("FF", "A"), ("FA","A")] - listenList = [ - "Listen for possible flood warnings for your location...and be ready to act if flooding rains occur. " - ] - t=self._optionalSection_template(argDict, segment, info, hazards, listenList, - checkAreaTypes=["land"]) - return title + t - - ##################################################################################### - def Tornadoes(self, title, argDict, segment, section, info): - hazards = [("TO", "A")] - listenList = [ - "Listen for possible Tornado Warnings for your location...and be ready to act quickly if a tornado approaches. " - ] - t=self._optionalSection_template(argDict, segment, info, hazards, listenList) - return title + t - - ##################################################################################### - def Marine(self, title, argDict, segment, section, info): - hazards = [('SR','W'), ('SR','A'), ('GL','W'), ('GL','A'), ('RB','Y'), - ('SC','Y'), ('SI','Y'), ('SW','Y'), ('HF','W'), ('HF','A')] - listenList = [] - t=self._optionalSection_template(argDict, segment, info, hazards, listenList, - checkAreaTypes=["marine"]) - return title + t - - ##################################################################################### - def _extractTitle(self, info, title): - # Extract correct title for Public vs. Marine segments - if type(title) is types.TupleType: - if info.anyMarine: title = title[1] - else: title = title[0] - return title - - - ##################################################################################### - ## Precautionary Preparedness Statement Dictionaries - ## - ## To keep from cluttering the code, the text is in these dictionaries - ## That way, the code logic can be more easily seen - - def _pp_dict(self, situation, keys): - exec "textDict = self._" + situation + "_textDict()" - return self._accessDict(textDict, keys) - - def _NonEvent_textDict(self): - return { - "ActiveNonEvent":{ - "land":self._frame("Take advantage of this opportunity to review your hurricane disaster plan. If you do not have a plan, make one. If you need assistance with your plan, contact the National Weather Service, local emergency management, or American Red Cross.\n\nStore adequate food and drink supplies for each member of the family for at least three to five days. Replace batteries in flashlights and portable radios. Fix loose and clogged rain gutters and downspouts. Trim overhanging trees and shrubbery. Also, acquire plywood or other materials to protect your home or business. Review your insurance policy, updating it if necessary.")+"\n", - "marine":self._frame("Boat owners and captains of small craft should take this opportunity to review their emergency operations plan for tropical cyclones and evaluate their state of readiness for this season.")+"\n", - }, - "EndNonEvent":{ - "everywhere":self._frame("THIS EVENT IS NO LONGER EXPECTED TO HAVE AN IMPACT ACROSS THE AREA AT THIS TIME.\n\nadd other wording here.")+"\n", - } - } - - def _PreEvent_textDict(self): - return { - "Advancing": { - "land":self._frame("Everyone is strongly urged to stay informed. If early evacuation orders are issued for your area, stay calm and take the necessary steps to leave as soon as possible and in an orderly fashion.\n\nMake plans to evacuate if you live on the immediate coast and barrier islands, or in a high rise building, or in a mobile home, or in a place that floods easily. Be ready to act if a watch or warning is issued for your area.") + "\n", - "marine":self._frame("As soon as possible, small craft are urged to return to port or to seek safe harbor. Take early steps to secure your craft.") + "\n", - }, - "Peripheral": { - "land":self._frame("Stay informed and listen for changes to the forecast. Be ready to act if watches or warnings become necessary for your area.")+"\n", - "marine":self._frame("Small craft should consider returning to port or seeking safe harbor.")+"\n", - }, - "InSitu": { - "land":self._frame("Stay informed of the latest forecast. Do not get caught off guard and be ready to act quickly if watches or warnings become necessary for your area.")+"\n", - "marine":self._frame("As soon as possible, small craft are urged to return to port or to seek safe harbor. Take early steps to secure your craft.")+"\n", - } - } - - def _Abbreviated_textDict(self): - return { - "HU_A": { - "land":self._frame("Now is the time to begin implementing your hurricane disaster plan. Additional recommendations for your area will be offered shortly.") + "\n", - "marine":self._frame("For marine interests, implement actions according to your emergency operations plan for possible hurricane conditions.") + "\n", - }, - "TR_A": { - "land":self._frame("This is a good time to begin implementing your disaster plan for possible tropical storm conditions. Additional recommendations for your area will be offered shortly.") + "\n", - "marine":self._frame("For marine interests, implement actions according to your emergency operations plan for possible tropical storm conditions.")+"\n", - }, - "HU_W": { - "land":self._frame("According to your hurricane disaster plan, preparations to protect life and property should be nearing completion. Additional recommendations for your area will be offered shortly.")+"\n", - "marine":self._frame("For marine interests, urgently complete actions according to your emergency operations plan for hurricane conditions.") + "\n", - }, - "TR_W": { - "land":self._frame("According to your disaster plan for tropical storm conditions, preparations to protect life and property should be nearing completion. Additional recommendations for your area will be offered shortly.")+"\n", - "marine":self._frame("For marine interests, urgently complete actions according to your emergency operations plan for tropical storm conditions.")+"\n", - }, - "TR_W_HU_A": { - "land":self._frame("Diligently complete actions according to your hurricane disaster plan for tropical storm warnings. Be ready to implement your plan for hurricane warnings should this warning be upgraded in the future. Additional recommendations for your area will be offered shortly.")+"\n", - "marine":self._frame("For marine interests, urgently complete actions according to your mariners emergency operations plan for tropical storm warnings, but also be ready to implment your plan for hurricane warnings should this warning be upgraded in the future.")+"\n", - } - } - - def _Watch_textDict(self): - return { - "HU_A": { - "Advancing": { - "land":self._frame("Stay calm and keep informed. Comply with any evacuation orders that are issued for your area. If your home is vulnerable to high winds, or you live in a surge zone or any location prone to flooding, evacuate to a designated shelter or ride out the storm in the sturdy home of family or friends outside of evacuation zones.\n\nItems to bring to a shelter include a first aid kit, medicines and prescriptions, baby food and diapers, games and books, toiletries, a battery powered radio, a cell phone, flashlights with extra batteries, a blanket or sleeping bag for each person, personal identification, copies of key papers such as insurance policies, available cash and credit cards. Remember, pets are not allowed in most public shelters, so check ahead with your intended shelter.\n\nRegarding your home or business, cover all windows and doors with shutters or plywood. Move patio furniture and other loose objects indoors. Brace all exterior doors, including garage doors. Do this as early as possible.\n\nIf you need to make a trip to the hardware store, the grocery store, or the gas station, do so as early as possible.")+"\n", - "marine": self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.")+"\n", - }, - "Peripheral": { - "land":self._frame("Keep informed and listen for possible changes to the forecast. Comply with any evacuation orders issued for your area. If you live in a mobile home, make plans to evacuate.\n\nGather clothes, important papers, medicines, and small valuables and keep them ready to go on short notice. Gas up your vehicles and have extra cash on hand.\n\nRegarding your home or business, cover all windows and doors with shutters or plywood. Move patio furniture and other loose objects indoors. Brace all exterior doors, including garage doors.")+ "\n", - "marine":self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.")+"\n", - }, - "InSitu": { - "land":self._frame("Do not get caught unprepared as conditions are subject to change rapidly. The potential impacts are simply too great to ignore the threat. Err on the side of caution and take appropriate actions for possible hurricane conditions.")+"\n", - "marine":self._frame("Boat owners and captains of small craft should not allow themselves to get caught unprepared. Err on the side of caution and take protective actions. Determine the best strategy for securing their craft.")+"\n", - }, - }, - "TR_A": { - "Advancing": { - "land": self._frame("Preparations should be made as soon as possible, before conditions deteriorate. Keep informed while listening for possible warnings. Secure loose outdoor objects which can be blown around. Strongly consider evacuating if you live in a mobile home, and do so if ordered by local officials.") + "\n", - "marine":self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.") + "\n", - }, - "Peripheral":{ - "land": self._frame("Stay informed and listen for possible changes to the forecast. Preparations for this storm should be made as soon as possible.") + "\n", - "marine": self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.") + "\n", - }, - "InSitu":{ - "land": self._frame("Do not get caught unprepared. Err on the side of caution and take appropriate actions for possible tropical storm conditions.") + "\n", - "marine": self._frame("Boat owners and captains of small craft should not allow themselves to get caught unprepared. Err on the side of caution and take protective actions. Determine the best strategy for securing their craft.") + "\n", - }, - }, - } - - def _Warning_textDict(self): - return { - "HU_W": { - "Advancing": { - "land":self._frame("Make the final preparations to protect life and property. Rush to completion the hardening of your home or business by closing shutters and bracing garage doors.\n\nIf evacuating, leave as soon as possible. Guard against being stuck out on roadways when dangerous winds and heavy rains arrive. Again, do not stay in a mobile or manufactured home. Remember, pets are not allowed in most official shelters, so check ahead with your intended shelter.\n\nIf staying in a home, turn the refrigerator to maximum cold and keep it closed. Turn off propane tanks and unplug small appliances. Fill the bathtub with water in case the tap water becomes unavailable after the storm. This is for cleaning and flushing purposes. Do not drink it.") +"\n", - "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "Peripheral": { - "land":self._frame("Make preparations to protect life and property. Complete the hardening of your home or business by closing shutters and bracing garage doors.\n\nIf evacuating, leave as soon as possible. Guard against being stuck out on roadways when dangerous winds and heavy rains arrive. Again, do not stay in a mobile or manufactured home. Remember, pets are not allowed in most official shelters, so check ahead with your intended shelter.") +"\n", - "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "InSitu": { - "land":self._frame("This is a dangerous and rapidly developing situation. Err on the side of caution and urgently take actions to protect life and property. Comply with any evacuation orders issued by local authorities for your area. If you live in a mobile home, leave it for more substantial shelter.") +"\n", - "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - }, - "TR_W": { - "Advancing": { - "land": self._frame("Final preparations to protect life and property should be completed before conditions deteriorate. The onset of gusty winds and heavy rains can cause outside activities to become dangerous. Secure loose outdoor objects which can be blown around. If you live in a mobile home, leave it for more substantial shelter.") +"\n", - "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "Peripheral":{ - "land": self._frame("Outside preparations should be completed as soon as possible before the onset of gusty winds and heavy rains which can cause outside activities to become dangerous.") +"\n", - "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "InSitu":{ - "land": self._frame("This is a potentially dangerous and rapidly developing situation. Err on the side of caution and complete preparations for tropical storm conditions.") +"\n", - "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - }, - "TR_W_HU_A": { - "Advancing": { - "land":self._frame("Final actions to protect life and property should be completed before conditions deteriorate. Cover windows and doors with shutters or plywood. Move patio furniture and other loose outdoor objects inside. Brace all exterior doors, including garage doors.\n\nComply with any evacuation orders issued for your area. If you live in a mobile home, leave it for more substantial shelter. If your home is vulnerable to high winds, or you live in a surge zone or any location prone to flooding, evacuate to a designated shelter or ride out the storm in the sturdy home of family or friends outside of evacuation zones.") +"\n", - "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "Peripheral":{ - "land": self._frame("Preparations to protect life and property should be completed as soon as possible since the onset of gusty winds and heavy rains can cause outside activities to become dangerous. Cover windows and doors with shutters or plywood. Move patio furniture and other loose outdoor objects inside. Brace all exterior doors, including garage doors.\n\nComply with any evacuation orders issued for your area. If you live in a mobile home, leave it for more substantial shelter.") +"\n", - "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - "InSitu":{ - "land": self._frame("This is a potentially dangerous and rapidly developing situation. Do not get caught unprepared. Err on the side of caution and complete preparations for tropical storm conditions and possible hurricane conditions.") +"\n", - "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", - }, - }, - } - - def _Conditions_textDict(self): - return { - "Imminent": { - "Cat3": { - "land": self._frame("Very dangerous conditions will soon occur. Move to an interior room on the lowest floor of your home or shelter, and stay away from windows and external doors. Listen for extreme wind warnings which are issued for the imminent onset of extreme winds greater than 115 mph. If issued, act quickly to take that final step to protect yourself and others, and possibly save lives.")+"\n", - "marine":self._frame("Small craft should already be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", - }, - "Cat1": { - "land":self._frame("Dangerous hurricane conditions will soon occur. Everyone should be quickly moving to safety within their home or shelter. Once inside, ensure all windows and doors are secured before dangerous winds arrive. Move to an interior room on the lowest floor.\n\nDo not go outside into the eye of hurricanes. Within the eye, conditions can become temporarily calm, which can be misleading. Once the eye passes, the winds will change direction and quickly increase again to dangerous speeds.\n\nBe aware that the loss of commercial power can happen quickly. Keep emergency gear handy.") + "\n", - "marine":self._frame("Small craft should already be in port and well secured. Captains of small craft and their crews should already be safely within land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", - }, - "34": { - "land":self._frame("Tropical storm conditions will soon occur. All evacuees should quickly arrive to their designated shelter. Everyone should remain alert and move inside.\n\nListen for possible flood or tornado warnings.") + "\n", - "marine":self._frame("Small craft should already be in port and well secured. Captains of small craft and their crews should already be safely within land based shelters.")+"\n", - }, - }, - - "Ongoing": { - "Cat3": { - "land":self._frame("Very dangerous conditions are occurring now. Go to the safest place within your home or shelter and stay there. Be ready to protect your head and body in case your shelter fails.") + "\n", - "marine":self._frame("Small craft should be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", - }, - "Cat1": { - "land":self._frame("Dangerous hurricane conditions are occurring now. Remain in an interior room on the lowest floor. Stay away from windows and external doors. Keep emergency gear handy.") + "\n", - "marine":self._frame("Small craft should be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", - }, - "34": { - "land":self._frame("Tropical storm conditions are occurring. Remain alert and stay inside.\n\nListen for possible flood or tornado warnings.") + "\n", - "marine":self._frame("Small craft should be in port and well secured.")+"\n", - }, - }, - "Diminishing": { - "land":self._frame("As {desc} conditions diminish, do not go outside to check for damage or to implement temporary repairs as the wind situation will remain {somewhat} dangerous until high winds fully subside. Do not open the doors of your home or shelter. Wait for the all-clear signal.\n\nStay inside and listen for possible flood and tornado warnings.")+"\n", - "marine":self._frame("Small craft should stay in port and remain well secured.")+"\n", - }, - } - - def _PostEvent_textDict(self): - return { - "Immediate": { - "land":self._frame("If you or someone else needs emergency help, call 9 1 1.\n\nAs soon as you are able, check in with your points of contact among family and friends. Inform them of your status and condition. Be a good samaritan and check in on your neighbors.\n\nListen to NOAA weather radio and other local news media for the latest information on storm impacts.\n\nIf you are using a portable generator, observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fires. Portable generators should be operated outdoors, in a dry and well ventilated place. Do not store fuel inside your home or garage.\n\nIf you received roof damage, do not go up on the roof until the threat of gusty winds and heavy rain has fully subsided. If operating chain saws and portable generators, review the operators manual and observe all safety precautions.\n\nStay out of flooded areas as the water may be contaminated or the road might have been washed away. Test drinking water before using, particularly from wells. Stay away from downed power lines too.")+"\n", - "marine":self._frame("Small craft should remain in port or safe harbor until winds and seas subside. For any small craft who are in distress, or if you see someone else in distress, radio your situation according to maritime protocol. If appropriate, deploy your emergency distress beacon.")+"\n", - }, - "NoImpact": { - "general": self._frame("This event is no longer expected to have an impact across the area at this time. Use the opportunity to revise preparedness plans and remain prepared for future events.\n\nAdd other wrap-up wording here.")+"\n", - }, - "LongTerm": { - "land": self._frame("Continue to listen to NOAA weather radio and other local news media for the latest information on storm impacts.\n\nIf you are using a portable generator, observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fires. Portable generators should be operated outdoors, in a dry and well ventilated place. Do not store fuel inside your home or garage.\n\nChain saws can be very helpful when removing fallen trees and large branches. Yet, operating a chain saw is dangerous work. Be sure to review operating procedures for safe cutting. To reduce the chance of mishap or injury, work with another person who has experience.\n\nDo not go sight seeing into areas which have been hardest hit as you may hinder ongoing rescue and recovery operations.\n\nStay out of flooded areas as the water may be contaminated or the road might have been washed away. Test drinking water before using, particularly from wells. Stay away from downed power lines too.")+"\n\n", - "marine": self._frame("Small craft should ensure that winds and seas have fully subsided before venturing out.")+"\n\n", - "general": self._frame("For the latest information regarding the threat of hazardous weather of any type, listen to NOAA weather radio or visit your local National Weather Service web site.")+"\n", - }, - } - - def _PostTropical_textDict(self): - return { - "InProgress": self._frame( -""" -Although the system is losing its tropical characteristics, the -potential impacts are similar to those previously indicated -regardless of its nature. Continue with readiness actions as -recommended."""), - "Completed": self._frame( -""" -Although the system has become non-tropical, the potential -impacts are similar to those previously indicated. Continue with -readiness actions as recommended."""), - } - - ##################################################################################### - ## Wind Situation/Scenario methods - - ############ - - def _wind_NonEvent(self, info): - t="" - t+=self._frame("Tropical cyclone watches or warnings are currently not in effect, nor are they likely under present circumstances.\n\nThe latest forecast is for maximum winds to remain below tropical storm force. At this time, remain calm and stay informed.") - return t - ############## - - def _wind_PreEvent_Advancing(self, info): - t="" - t+="Tropical cyclone watches or warnings are likely to be issued in the near future. As " + self._stormTypeName + " moves closer, the threat for sustained high winds will likely increase. " - t+=self._wind_stmt(info)+ ". " - t+=self._beginWind_stmt(info.maxWind, 50, info.windDur[34], end=". ") - return t - - def _wind_Watch_Advancing(self, info): - t="" - t+="AS "+self._stormTypeName+" moves closer, the threat for sustained high winds is likely to increase. " - t+=self._wind_stmt(info)+". " - t+=self._beginWind_stmt(info.maxWind, 50, info.windDur[34], end=". ") - return t - - def _wind_Warning_Advancing(self, info): - t="" - if self._formatPeriod(info.windDur[34]) == "": - t+="|* these zones are not within the 34kt radii. Rerun and choose peripheral. *|" - else: - t+="AS "+self._stormTypeName+" approaches, sustained tropical storm force winds are expected to begin " - t+=self._formatPeriod(info.windDur[34]) - if info.maxWind >= 64: - t+= " and hurricane force winds " + self._formatPeriod(info.windDur[64]) + ". " - t+=self._specific_wind_stmt(info, intro="Hurricane force winds are forecast to last", - duration=True, reportWindValues=False, - windDur=info.windDur[64], end=". ") - else: - t+= ". " - - t+=self._specific_wind_stmt(info, intro="Maximum winds are forecast to be in the ", - addRange=True) - t+=". " - return t - - ############ - def _wind_PreEvent_Peripheral(self,info): - t= "" - t+="At this time, the issuance of tropical cyclone watches or warnings is uncertain. As " + self._stormTypeName + " passes nearby, the threat for sustained high winds should not increase. However, some tropical storm force gusts may still occur. Since there is still uncertainty, closely monitor the forecast for any significant changes. " - return t - - def _wind_Watch_Peripheral(self, info): - t= "" - t+="AS "+self._stormTypeName+" passes nearby, the threat for sustained high winds should not increase. However, there is still some possibility for tropical storm force winds. Since there is still uncertainty, closely monitor the forecast for any significant changes. " - return t - - def _wind_Warning_Peripheral(self, info): - t="" - t+=self._specific_wind_stmt(info) + ". " - t+="However, as "+self._stormTypeName+" approaches, stronger winds are still possible. Continue to closely monitor the forecast for any significant changes and be ready to act. " - return t - - ############ - def _wind_PreEvent_InSitu(self, info): - t="" - t+="Tropical cyclone watches or warnings are currently not in effect for the area. However, if tropical cyclone development becomes likely then they could be quickly needed.\n\n" - t+=self._wind_stmt(info)+ ". " - t+="Since there is still uncertainty, closely monitor the forecast for any significant changes. " - return t - - def _wind_Watch_InSitu(self, info): - t="" - t+="AS "+self._stormTypeName+" develops, the threat for sustained high winds may increase. Since there is still uncertainty, closely monitor the forecast for any significant changes. " - return t - - def _wind_Warning_InSitu(self, info): - t="" - t+="AS "+self._stormTypeName+" continues to develop, the threat for sustained high winds may increase soon. " - t+=self._specific_wind_stmt(info)+ ". " - t+="Since there is still uncertainty, closely monitor the forecast for any significant changes. " - return t - - ################ - def _wind_Conditions_Imminent(self, info): - t="" - if self._checkCategory(info.maxWind, "Cat3"): - catInfo = self._getCategoryInfo(info.maxWind) - t+="As the center of "+self._stormTypeName+" approaches, "+catInfo - t+=" winds are imminent. " - t+=self._specific_wind_stmt( - info, intro="Maximum winds of ", end=" are expected. ") - t+=self._fallBelow_stmt(info, end=". ") - - elif info.maxWind >= 34: - catInfo = self._getCategoryInfo(info.maxWind) - t+="AS "+self._stormTypeName+" approaches, sustained "+catInfo - t+="Winds are imminent. " - t+=self._specific_wind_stmt( - info, intro="Maximum winds of ",end=" are expected. ") - t+=self._fallBelow_stmt(info, end=". ") - return t - - def _wind_Conditions_Ongoing(self, info): - t="" - period = info.windDur[info.maxWind] - if self._checkCategory(info.maxWind, "Cat3"): - catInfo = self._getCategoryInfo(info.maxWind) - t+=self._windContinue_stmt(info, period, catInfo + "Will continue ", end=". ") - if info.maxWind >= 50: t+=self._fallBelow_stmt(info, end=". ") - - elif info.maxWind >= 34: - t+=self._specific_wind_stmt(info, intro="Sustained winds of ") - t+=self._windContinue_stmt(info, period, intro=" will continue ", end=". ") - if info.maxWind>= 50: t+=self._fallBelow_stmt(info, end=". ") - return t - - def _wind_Conditions_Diminishing(self, info): - t="" - t+="AS "+self._stormTypeName+" exits the area, high winds will continue to diminish. Warnings will be discontinued as soon as the threat completely subsides. " - return t - - ############## - def _wind_PostEvent(self, info, scenario): - t="" - if scenario=="Immediate": - t+="Tropical cyclone warnings have been discontinued. Sustained high winds are no longer expected but strong wind gusts may still occur. " - else: - t+="Sustained high winds or wind gusts are no longer expected. Please refer to the latest National Weather Service forecast for wind information. " - return t - - ############ - def _wind_PostTropical_InProgress(self, info): - t="" - if info.anyLand: - t+="The remnants of "+self._stormTypeName - if info.maxWind >= 34: - t+=" will still impact the region with sustained winds equivalent to " - t+=self._windDesc(info) + "Winds. " - else: - t+=" could still impact the region with tropical storm force winds. " - t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") - t+=self._fallBelow_stmt(info, end=". ") - - if not info.anyLand and info.anyMarine: - t+="The remnants of "+self._stormTypeName - if info.maxWind >= 34: - t+=" will still impact the region with sustained winds equivalent to " - t+=self._marineWindDesc(info) + "Winds. " - else: - t+=" could still impact the region with gale force winds. " - t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") - t+=self._fallBelow_stmt(info, end=". ") - return t - - def _wind_PostTropical_Completed(self, info): - t="" - if info.anyLand: - t+="As the remnants of "+self._stormTypeName+" affect the area, " - if info.maxWind >= 34: - t+=" sustained winds equivalent to " - windDesc = self._windDesc(info) - t+=windDesc + "winds are still possible. " - else: - "Tropical storm force winds could still impact the region. " - t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") - t+=self._fallBelow_stmt(info, end=". ") - - if not info.anyLand and info.anyMarine: - t+="As the remnants of "+self._stormTypeName+" affect the area, " - if info.maxWind >= 34: - t+=" sustained winds equivalent to " - windDesc = self._marineWindDesc(info) +" winds are still expected. " - else: - t+=" gale force winds could still impact the region. " - t+=self._specific_wind_stmt(info, intro="Winds of ", end=" are expected")+ ". " - t+=self._fallBelow_stmt(info, marine=True, end=". ") - return t - - - ##################################################################################### - ## Storm Surge and Storm Tide Situation/Scenario methods - - def _surge_PreEvent_Advancing(self, info): - t = "" - t += "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by """ + self._stormTypeName - t += ". Much depends on the precise size, intensity and track of this system as it approaches the coast. At this time, there is a general concern for the chance of " - t += self._frame("( minor | moderate | major -- you should base this on your MEOWS)") + " coastal flooding." - return t - - def _surge_PreEvent_Peripheral(self, info): - t = "" - t+= "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " - t+= self._stormTypeName - t+= ". Much depends on the precise size, intensity and track of this system as it passes nearby. At this time, there is a general concern for the chance of " - t+= self._frame("( minor | moderate | major -- you should base this on your MEOWS)") + " coastal flooding." - return t - - def _surge_PreEvent_InSitu(self, info): - t = "" - t+= "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " - t+= self._stormTypeName - t+= ". Much depends on the precise size, intensity, and track of the system if it more fully develops. Since there is considerable uncertainty, closely monitor the latest forecast." - - return t - - ############## - - # Changed 6/8/2017 - Modified to use above ground only and remove all MSL - - def _surge_Watch_Advancing(self, info): - t="" - t+="It is still too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + self._stormTypeName - t+=". Much depends on the precise size, intensity and track of the system as it approaches the coast. " - if info.inundationMax > 0: - t+="Given the latest forecast, there is a reasonable worst case potential flood inundation" - if info.inundationMax > 2: - t+=" of " + `info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+="up to" + `info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here or delete this paragraph or consider deleting the whole storm surge and tide section.") - return t - - def _surge_Watch_Peripheral(self, info): - t="" - t+="It is still too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + self._stormTypeName - t+=". Much depends on the precise size, intensity and track of the system as it approaches the coast and passes nearby. " - if info.inundationMax > 0: - t+="Given the latest forecast, there is a reasonable worst case potential flood inundation" - if info.inundationMax > 2: - t+=" of " + `info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+="up to" + `info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here or delete this paragraph or consider deleting the whole storm surge and tide section.") - return t - - - def _surge_Watch_InSitu(self, info): - t="" - t+="It is too early to determine if there will be any appreciable coastal flooding within the forecast area from combined storm surge and tide waters associated with " - t+=self._stormTypeName + ". Much depends on the precise size, intensity, and track of the system if it more fully develops. Since there is considerable uncertainty, closely monitor the latest forecast. " - return t - - - ################ - - def _surge_Warning_Advancing(self, info): - t = "" - if info.inundationMax > 0: - - t+="As "+self._stormTypeName+" approaches the coast, there is an increasing chance for potential flood inundation " - - if info.inundationMax > 2: - t+="of " +`info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+="up to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - - t+= "\n\nThe locations most likely to realize the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters. Further describe inundation elsewhere within the surge zone as applicable. Be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. ") - t+="The most likely period of impact will be " - t+=self._frame("Be sure to cite the expected period of onset. Remember surge waters often arrive well before the core winds and can rise very quickly. ") - - else: - t+="The impact from combined storm surge and tide waters is expected to be minimal. " - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together. ") - - return t - - - def _surge_Warning_Peripheral(self, info): - t = "" - if info.inundationMax > 0: - - t+="Although the core of "+self._stormTypeName+" is not currently forecast to move across coastal sections of the forecast area at this time, " - t+="there is still a chance for potential flood inundation " - - if info.inundationMax > 2: - t+="of " +`info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+="up to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - - t+= "\n\nThe locations most likely to realize the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters. Further describe inundation elsewhere within the surge zone as applicable. Be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. ") - t+="The most likely period of impact will be " - t+=self._frame("Be sure to cite the expected period of onset. Remember surge waters often arrive well before the core winds and can rise very quickly. ") - - else: - t+="The impact from combined storm surge and tide waters is expected to be minimal. " - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together. ") - - return t - - def _surge_Warning_InSitu(self, info): - t="" - t+="As "+self._stormTypeName+" continues to develop, combined storm surge and tide waters may increase suddenly. Since there is considerable uncertainty, continue to closely monitor the latest forecast. " - t+="At this time, there is a general concern for the chance of " - t+=self._frame("(minor| moderate| major)")+ " coastal flooding. " - return t - - ############### - - def _surge_Conditions_Imminent(self, info): - t = "" - if info.inundationMax > 0: - t+="With the imminent arrival of "+self._stormTypeName+", potential flood inundation " - - if info.inundationMax > 2: - t+="of "+`info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground is likely somewhere within the surge zone." - else: - t+="up to "+`info.inundationMax`+" feet above ground is likely somewhere within parts of the surge zone." - - t+="\n\nThe locations most likely to realize the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters; further describe inundation elsewhere within the surge zone as applicable; be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. Also stress the rapid water rises that are likely. ") - else: - t+="The impact from combined storm surge and tide waters is expected to be minimal. " - t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - def _surge_Conditions_Ongoing(self, info): - t = "" - if info.inundationMax > 0: - t+="Expect flood inundation " - - if info.inundationMax > 2: - t+="of "+`info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - else: - t+="up to "+`info.inundationMax`+" feet above ground somewhere within the surge zone." - - t+="\n\nThe locations most likely realizing the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters; further describe inundation elsewhere within the surge zone as applicable; be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation.") - else: - t+= "Minimal storm tide impacts are being observed. " - t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - - def _surge_Conditions_Diminishing(self, info): - t = "" - if info.inundationMax > 0: - t+="Although coastal flood waters will soon begin to partially recede, " - t+="do not attempt to return to evacuated areas until official confirmation is " - t+="received that it is safe to do so. " - t+="\n\nContinued coastal inundation " - - if info.inundationMax > 2: - t+="of "+`info.deltaSurge`+" to "+`info.inundationMax`+" feet above ground can be expected somewhere within the surge zone." - else: - t+="up to "+`info.inundationMax`+" feet above ground can be expected somewhere within the surge zone." - - else: - t+="Minimal storm tide impacts are being observed. " - t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - - ############# - def _surge_PostEvent(self, info, scenario): - t = "" - if scenario == "Immediate": - if info.inundationMax > 0: - t+="As wind conditions associated with "+self._stormTypeName+" continue to improve, coastal flood waters will be slower to recede. Certain areas may still be inundated. Do not attempt to return to evacuated areas until official confirmation is received that it is safe to do so." - t+="\n\nThe locations which realized the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations that experienced greatest inundation flooding remember that in the absence of tidal or other kind of observations the real time slosh run at the time of landfall is likely your best source of information here, not necessarily the psurge data; further describe inundation elsehwere within the surge zone as applicable; describe any known impacts.") - else: - t+="Minimal storm tide impacts are being observed. " - t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - ############# - def _surge_PostTropical_InProgress(self, info): - t = "" - if info.inundationMax > 0: - t+="As "+self._stormTypeName+" impacts the forecast area, potential flood inundation " - if info.inundationMax > 2: - t+="of "+`info.deltaSurge`+" to "+`info.inundationMax` - else: - t+="up to "+`info.inundationMax` - t+=" feet above ground is likely somewhere within the surge zone. " - t+="\n\nThe locations which will likely realize the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest inundation concerns, including inland reach; further describe inundation elsewhere within the surge zone as applicable.") - else: - t+="The impact from combined storm surge and tide waters is expected to be minimal. " - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - def _surge_PostTropical_Completed(self, info): - t = "" - if info.inundationMax > 0: - t+="As former "+self._stormTypeName+" impacts the forecast area, potential flood inundation " - if info.inundationMax > 2: - t+="of "+`info.deltaSurge`+" to "+`info.inundationMax` - else: - t+="up to "+`info.inundationMax` - t+=" feet above ground is likely somewhere within the surge zone. " - t+="\n\nThe locations which will likely realize the greatest flooding include " - t+=self._frame("Relative to the segment, explicitly list locations of greatest inundation concerns, including inland reach; further describe inundation elsewhere within the surge zone as applicable.") - else: - t+="The impact from combined storm surge and tide waters is expected to be minimal. " - t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") - return t - - ############## - ####### Total Water Level thresholds and statements - ####### NOTE: Thresholds are being compared to the InundationMax values - ############## Impact Statements - - def _surge_Watch_Impact_stmt(self, info, segment): - t="" - water_dict = self._totalWaterLevel_dict(info, segment) - if info.inundationMax >= water_dict.get("Extreme", 7): - damage="Widespread major" - - elif info.inundationMax >= water_dict.get("High", 5): - damage="Areas of major" - - elif info.inundationMax >= water_dict.get("Moderate", 3): - damage="Areas of moderate" - - elif info.inundationMax >= water_dict.get("Low", 1): - damage="Areas of minor" - else: - damage = None - if damage is not None: - t+="\n\n"+self._frame("At this time, there is a general concern" + - " for the chance of "+ damage + - " coastal flooding.") - return t - - def _surge_Impact_stmt(self, info, segment): - t="" - water_dict = self._totalWaterLevel_dict(info, segment) - if info.inundationMax >= water_dict.get("Extreme", 7): - damage= self._totalWaterLevel_Extreme_stmt(info, segment) - - elif info.inundationMax >= water_dict.get("High", 5): - damage= self._totalWaterLevel_High_stmt(info, segment) - - elif info.inundationMax >= water_dict.get("Moderate", 3): - damage= self._totalWaterLevel_Moderate_stmt(info, segment) - - elif info.inundationMax >= water_dict.get("Low", 1): - damage= self._totalWaterLevel_Low_stmt(info, segment) - else: - damage ="Minor coastal flood damage" - t+="\n\n"+self._frame(damage) - return t - - def _totalWaterLevel_byZone_dict(self): - # Enter customized values for land and marine zones - return { - "zone1": { - "Extreme": 7, - "High": 5, - "Moderate": 3, - "Low": 1, - }, - "default": { - "Extreme": 7, - "High": 5, - "Moderate": 3, - "Low": 1, - }, - } - - def _totalWaterLevel_dict(self, info, segment): - # SurgeHtPlusTide thresholds for Total Water Level statements. - # The threshold values for the segment will be determined by - # examining the thresholds for each zone in the segment and choosing - # the values for the zone which has the *lowest* Extreme value - segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment - twl_dict = self._totalWaterLevel_byZone_dict() - return_dict = None - for zoneName in segmentAreas: - zone_dict = twl_dict.get(zoneName, twl_dict["default"]) - if return_dict is None: return_dict = zone_dict - elif zone_dict["Extreme"] < return_dict["Extreme"]: - return_dict = zone_dict - return return_dict - - def _totalWaterLevel_Extreme_stmt(self, info, segment): - t = "" - t+= """ -There is an extreme threat to life and property from coastal -flooding, potentially having a catastrophic impact. The concern -is for the chance of widespread major coastal flooding to occur -within the surge zone, resulting in devastating and life- -threatening inundation. If realized, people within the -threatened areas who failed to heed official evacuation orders -will likely die. - -Coastal communities will likely be devastated, with numerous -homes and businesses near the shore completely destroyed. -Shoreside condominiums and hotels may also be destroyed, -Especially those with inadequate structural support. Flood waters -are likely to extend well inland, further expanding the overall -threat to life and property. Vehicles of any type will likely be -submerged or swept away. - -Roads and bridges will likely be damaged or washed out by the -combined effects of storm surge and tide waters, battering -waves, and floating debris. This could leave entire flood-prone -coastal communities cut off, perhaps for several weeks or -more, and with no power or water. -""" - return t - - def _totalWaterLevel_High_stmt(self, info, segment): - t = "" - t+= """ -There is a critical threat to life and property from coastal -flooding, potentially having a high impact. The concern is for -the chance of major coastal flooding to occur in areas within the -surge zone, resulting in very damaging and life-threatening -inundation. If realized, people within the threatened areas who -failed to heed official evacuation orders will have needlessly -placed their lives in grave danger and may be swept away. - -Most coastal communities will likely be heavily damaged, with -many homes and businesses near the shore destroyed by battering -waves and floating debris. Some shoreside condominiums and hotels -may also be damaged, especially those with inadequate structural -support. Flood waters are likely to extend well inland, further -expanding the overall threat to life and property. Most vehicles -of any type will likely be submerged or swept away. - -Severe beach erosion will occur. Most roads and some bridges will -likely be damaged or washed out, leaving entire flood-prone -coastal communities cut off, perhaps for a week or more, and -with no power or water. -""" - return t - - def _totalWaterLevel_Moderate_stmt(self, info, segment): - t = "" - t+= """ -There is a significant threat to life and property from coastal -flooding, potentially having a moderate impact. The concern is -for the chance of moderate coastal flooding to occur in areas -within the surge zone, resulting in damaging and -life-threatening inundation. If realized, people within the -threatened areas who failed to heed official evacuation orders -will have needlessly placed their lives in danger. This is -especially true for those staying behind in vulnerable locations -such as homes and businesses near the shore, and one story -dwellings in flood-prone areas. - -Several coastal communities will likely be damaged, with those -structures not raised or protected by a seawall being subject to -significant flooding, especially during high tide. Large waves -and pounding surf will accentuate property damage in exposed -locations. Flood waters may extend well inland in spots. Many -cars will likely be submerged or washed away. - -Substantial beach erosion will occur. Many roads will likely be -damaged or washed out by the flood waters, leaving sections of -coastal communities in flood prone areas temporarily cut off. -Roadway travel may be dangerous with several roads closed. -""" - return t - - def _totalWaterLevel_Low_stmt(self, info, segment): - t = "" - t+= """ -There is an elevated threat to life and property from coastal -flooding, potentially having a low but notable impact. The -concern is for the chance of minor coastal flooding to occur in -areas within the surge zone, resulting in shallow inundation. If -realized, people within the threatened areas who failed to act -according to their personal disaster plan will have needlessly -placed themselves at some measure of risk. - -Many homes and businesses along the shoreline, or in flood-prone -areas, will likely experience some water entering inside, -Especially for those structures not raised or protected by a -seawall. Higher waves and pounding surf will increase the -likelihood of property damage near the coast, especially in -exposed locations. Some cars may take on water or even become -displaced. - -Moderate beach erosion will occur, which may become substantial -if conditions extend through multiple high tides. Several roads -in flood-prone areas will likely be closed. -""" - return t - - ##################################################################################### - ### Segment statements and thresholds -- these are the templates for phrasing - ### I am calling them stmts to distinguish from the text product "phrases" that - ### use the "tree, node" infrastructure. - ### These stmts are simpler (at least at this point)... - - ## In general, "stmt" methods do not add periods or carriage returns - ## It is up to the calling method to do so - - def _prob_stmts(self, info, ifWording=False): - t="" - probHurricane = self._probHurricane_stmt(info, end=". ") - probTropStorm = self._probTropStorm_stmt(info, end=". ") - if probHurricane != "" and probTropStorm != "": - t+=probHurricane + "Also, " + probTropStorm - else: - t+= probHurricane - t+= probTropStorm - t+=self._probTrend_stmt(info, end=". ") - if self._checkOnsetTime(info): - t+=self._onsetTropStorm_stmt(info, ifWording, end=". ") - t+=self._onsetHurricane_stmt(info, ifWording, end=". ") - return t - - def _probHurricane_thresholds(self): - return { - "littleChance": 3, - "chance": 6, - "onset": 6, # To trigger onset statement - } - - def _probTropStorm_thresholds(self): - return { - "littleChance":10, - "chance": 20, - "onset": 20, # To trigger onset statement - } - - def _prob_threshold(self): - # If the difference between min and max probabilities are greater - # than this, a range will not be reported. For example, - # Instead of "20 to 50 PERCENT", we would say "up to 50 percent" - return 10 - - def _probStorm_stmt(self, info, thresholds, minProb, maxProb, - conditions="Hurricane", end=""): - t="" - if minProb is None or maxProb is None: - return t - little = thresholds.get('littleChance', 3) - chance = thresholds.get('chance', 6) - minProb = int(minProb) - maxProb = int(maxProb) - if maxProb < little: - t+="There is little chance for "+conditions+" conditions at this time" - else: - t+="The chance for "+conditions+" conditions at this time is " - if maxProb > chance: - if minProb < little: - t+="less than or equal to "+ `maxProb` - elif minProb >= maxProb: - t+= `maxProb` - elif maxProb-minProb <= self._prob_threshold(): - t+= `minProb` + ' TO '+ `maxProb` - else: - t+= 'UP TO ' + `maxProb` - t+= " percent" - else: t+="very small" - return t + end - - def _probHurricane_stmt(self, info, end=""): - thresholds = self._probHurricane_thresholds() - return self._probStorm_stmt( - info, self._probHurricane_thresholds(), info.minProb64, info.maxProb64, - conditions="Hurricane", end=end) - - def _probTropStorm_stmt(self, info, end=""): - thresholds = self._probTropStorm_thresholds() - return self._probStorm_stmt( - info, self._probTropStorm_thresholds(), info.minProb34, info.maxProb34, - conditions="Tropical Storm", end=end) - - def _probTrend_stmt(self, info, end=""): - t="" - if info.pwstrend is None: - t+=self._frame("This represents a general {upward/downward/steady} trend since the last forecast" + end) - - else: - t+="This represents a general " - if info.pwstrend > 3: t+= " upward" - elif info.pwstrend > -3 and info.pwstrend < 3: t+= " steady" - else: t+= " downward" - t+=" trend since the last forecast" + end - return t - - def _checkOnsetTime(self, info): - # Check onset time. If <= 24 hours from current time, return False - if info.wind34Time is None: - return False - curTime = AbsTime.AbsTime(self._argDict["creationTime"]) - if info.wind34Time.startTime() <= curTime + 24*3600: - return False - return True - - def _onsetHurricane_stmt(self, info, ifWording=False, end=""): - thresholds = self._probHurricane_thresholds() - t="" - if ifWording: - condition = info.maxProb64 > thresholds.get('onset', 6) - else: - condition = info.maxWind >= 64 - if condition: - if ifWording: - t+="If hurricane conditions were to occur, the most likely period of onset is " - else: - #t+="The most likely period of onset of hurricane conditions is " - t+="The onset of hurricane conditions could start as early as " - t+=self._formatPeriod(info.maxINTprob64, resolution=6) - t+=end - return t - - def _onsetTropStorm_stmt(self, info, ifWording=False, end=""): - thresholds = self._probTropStorm_thresholds() - t="" - if ifWording: - condition = info.maxProb34 > thresholds.get('onset', 20) - else: - condition = info.maxWind >= 34 - - if condition: - if ifWording: - t+="If tropical storm conditions were to occur, the most likely period of onset is " - else: - #t+="The most likely period of onset of tropical storm conditions is " - t+="The onset of tropical storm conditions could start as early as " - t+=self._formatPeriod(info.maxINTprob34, resolution=6) - t+=end - return t - - def _beginWind_stmt(self, value, threshold, timeRange, intro=None, end=""): - t="" - if value >= threshold: - if intro is None: - intro="Tropical storm force winds are currently forecast to begin affecting the area " - t+=intro + self._formatPeriod(timeRange) - t+=end - return t - - def _windContinue_stmt(self, info, period, intro=None, end=""): - t="" - if intro is not None: t+=intro - else: t+="Winds will continue " - if period is None: return t + end - t+="through " + self._formatPeriod(period, useEndTime=True) + end - return t - - def _fallBelow_stmt(self, info, intro=None, marine=False, end=""): - t= "" - if info.windDur[64] is None and info.windDur[34] is None: return t - if intro is None: intro = "Winds are not forecast to fall below " - hurricane = False - t+=intro - if info.maxWind >= 64 and info.windDur[64] is not None: - t+="hurricane force until "+ self._formatPeriod(info.windDur[64], useEndTime=True) - hurricane = True - if info.windDur[34] is not None: - if hurricane: t+=", and below " - if marine: t+="gale force until " - else: t+="tropical storm force until " - t+=self._formatPeriod(info.windDur[34], useEndTime=True) - t+=end - return t - - def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False, - resolution=3): - # Format period (a timeRange) resulting in - # DAY + MORNING / AFTERNOON / EVENING / OVERNIGHT. - # If wholePeriod, format FROM ... TO... - - #print "\nFormat period", wholePeriod, period - if period is None: return "" - if useEndTime: - startTime = period.endTime() - else: - startTime = period.startTime() - result = self._getTimeDesc(startTime, resolution, shiftToLocal) - #print "result", result - if wholePeriod: - endResult = self._getTimeDesc(period.endTime(), resolution, shiftToLocal) - #print "endResult", endResult - if result != endResult: - result=result + " TO "+ endResult - return result - - def _getTimeDesc(self, startTime, resolution=3, shiftToLocal=True): - # Create phrase such as Tuesday morning - # Handle today/tonight and "this" morning/afternoon/etc.. - # - print "\n\n**************Formatting Period for GMT starttime ", startTime - labels = self.Labels()["SimpleWorded"] - currentTime = self._issueTime - print " currentTime", currentTime - if shiftToLocal: - currentLocalTime, shift = self.determineTimeShift() - startTime = startTime + shift - currentTime = currentTime + shift - print " shift, shifted start, current", shift/3600, startTime, currentTime - hour = startTime.hour - prevDay = False - prevDay, partOfDay = self._getPartOfDay(hour, resolution) - if prevDay: - startTime = startTime - 24*3600 - todayFlag = currentTime.day == startTime.day - if todayFlag: - if partOfDay.find("midnight")>0: todayWord = "Tonight" - else: todayWord = "This" - weekday = todayWord - else: - weekday = labels["Weekday"][startTime.weekday()] - if partOfDay.find("") >= 0: - result = partOfDay.replace('', weekday) - else: - result = weekday + " " + partOfDay - print "Result", result - return result - - def _getPartOfDay(self, hour, resolution): - prevDay = False - if resolution == 3: - if hour < 3: - prevDay = True - partOfDay = "After midnight" - elif hour < 6: - partOfDay = "early morning" - elif hour < 9: - partOfDay = "morning" - elif hour < 12: - partOfDay = "late morning" - elif hour < 15: - partOfDay = "early afternoon" - elif hour < 18: - partOfDay = "late afternoon" - elif hour < 21: - partOfDay = "early evening" - else: - partOfDay = "late evening" - else: - if hour < 6: - prevDay = True - partOfDay = "After midnight" - elif hour < 12: partOfDay = "Morning" - elif hour < 18: partOfDay = "Afternoon" - else: partOfDay = "Evening" - return prevDay, partOfDay - - def _wind_stmt_type(self): - # return "categorical" - return "specific" - - def _wind_stmt(self, info, intro=None, units=None, withTiming=True): - t="" - if intro is None: - intro="The latest forecast is for " - t+=intro - descriptor, duration = self._categorical_wind_info(info) - t+= descriptor - if withTiming and duration is not None: - t+= " from " + self._formatPeriod(duration, wholePeriod=True) - return t - - def _categorical_wind_info(self, info): - t="" - if info.maxWind >= 64: - t+="Hurricane force winds" - duration = info.windDur[64] - elif info.maxWind >=50: - t+="Strong tropical storm force winds" - duration = info.windDur[50] - elif info.maxWind >=34: - t+="Tropical storm force winds" - duration = info.windDur[34] - else: - t+="Winds to remain below tropical storm force" - duration = None - return t, duration - - def _specific_wind_stmt(self, info, units=None, intro=None, duration=False, windDur=None, - addRange=False, end=None, reportWindValues=True): - t="" - if info.maxWind is None: return t - if intro is None: - intro = "The latest area forecast is for maximum winds of " - t+= intro - - if reportWindValues: - t+=self._formatWindRange(info, info.maxWind, units, "Wind") - if addRange: t+= " range" - if info.maxGust is not None: - t+=" with gusts to " - t+=self._formatWindValue(info, info.maxGust, units, "WindGust") - if windDur is None: - windDur = info.windDur[info.maxWind] - if duration and windDur is not None: - t+= " for " - duration = windDur.duration()/3600 - if duration <= 3: t+= "a few " - elif duration <= 6: t+= "several " - else: t+= "many " - t+= "hours" - if end is not None: t+=end - return t - - def _formatWindValue(self, info, value, units=None, element="Wind"): - if value is None: return "" - if self._getUnits(info, units) == "mph": - value = self._ktToMph(value, element) - units = " mph" - else: - units = " knots" - return `int(value)` + units - - def _getUnits(self, info, units=None): - #{UNIT} = equal to MPH if public zone segment or KNOTS if marine segment. - # If in Overview or a combined segment (Not possible this season) then default to MPH. - # If called from overview, set units == "mph" - if units is not None: return units - if info.anyLand: return "mph" - else: return "kts" - - def _formatWindRange(self, info, windKts, units, element): - # Add a range to hiVal and report it - if windKts is None: return "" - units = self._getUnits(info, units) - if units == "mph": - hiVal = self._ktToMph(windKts, element) - unitStr = " mph" - else: - hiVal = windKts - unitStr = " knots" - lowVal = self._windRange_value(windKts, hiVal) - return `int(lowVal)` + " TO " + `int(hiVal)` + unitStr - - def _windRange_value(self, windKts, windValue): - # Given windValue in kts, return the lower range value - if windKts > 52: return windValue - 20 - elif windKts > 34: return windValue - 10 - return windKts - 5 - - def _hurricaneWind_categories(self): - # Dictionary representing wind thresholds in kts - # for category 1, 2, 3, 4 or 5 hurricanes. - return { - 'Cat1': (64, 83), - 'Cat2': (83, 96), - 'Cat3': (96, 114), - 'Cat4': (114, 136), - 'Cat5': (136, 250), - } - - def _checkCategory(self, wind, category): - minVal, maxVal = self._hurricaneWind_categories().get(category, (None, None)) - if wind >=minVal: - return True - return False - - def _getCategoryInfo(self, wind): - catDict = self._hurricaneWind_categories() - - for key, label in [ - ("Cat5","Catastrophic category 5 hurricane force "), - ("Cat4", "Destructive category 4 hurricane force "), - ("Cat3","Very dangerous category 3 hurricane force "), - ("Cat2", "Category 2 hurricane force "), - ("Cat1", "Category 1 hurricane force "), - ]: - minVal, maxVal = catDict[key] - if wind >= minVal: - return label - if wind >= 50: - return "Strong tropical storm force " - elif wind >= 34: - return "Tropical storm force " - return "Strong " - - def _getCategoryDamage(self, wind): - # Convert from knots to mph - wind_mph = self._ktToMph(wind, "Wind") - if wind_mph > 130: - return "Catastrophic damage" - elif wind_mph > 110: - return "Devastating damage" - elif wind_mph > 90: - return "At least extensive damage" - elif wind_mph > 75: - return "At least widespread damage" - elif wind_mph > 60: - return "At least damaging winds" - elif wind_mph > 50: - return "At least damaging winds likely" - elif wind_mph > 40: - return "At least minor to locally moderate damage" - elif wind > 30: - return "At least minor damage" - else: - return "" - -## catDict = self._hurricaneWind_categories() -## for key, label in [ -## ("Cat5","Catastrophic damage"), -## ("Cat4", "At least devastating damage"), -## ("Cat3", "At least extensive damage"), -## ("Cat2", "At least widespread damage"), -## ("Cat1", "At least moderate damage"), -## ]: -## minVal, maxVal = catDict[key] -## if wind >= minVal: -## return label -## if wind >= 50: -## return "At least minor to locally moderate" -## elif wind >= 34: -## return "At least minor damage" -## return "Damage" - - def _windDesc(self, info): - if info.maxWind >= 64: - return "hurricane force " - elif info.maxWind >= 50: - return "strong tropical storm force " - elif info.maxWind >= 34: - return "tropical storm force " - else: - return "strong " - - def _marineWindDesc(self, info): - if info.maxWind >= 64: - return "hurricane force " - elif info.maxWind >= 48: - return "storm force " - elif info.maxWind >= 34: - return "gale force " - else: - return "strong " - - def _potentialImpact_thresholds(self): - # Units are mph - return { - 'noImpact': 30, - 'minor': 40, - 'moderate': 50, - 'damageLikely': 60, - 'damageExpected': 75, - 'danger': 90, - 'extremeDanger': 110, - 'devastating': 130, - } - - def _potentialImpact_stmt(self, info): - if info.allMarine: # No impact statements for marine yet. - return "" - - thresholds = self._potentialImpact_thresholds() - t="" - if info.maxWind is None: return t - # Convert to mph -- use avg Wind value - wind_mph = self._ktToMph(info.maxWind, "Wind") - if wind_mph <= thresholds.get('noImpact', 30): - return t - t+="\n" - if wind_mph <= thresholds.get('minor', 40): - t+="Minor damage may occur to older mobile homes. Residents should move loose items indoors, such as garbage cans and outdoor furniture, as they will be blown around. Newly planted or young trees and shrubs may be uprooted if not secured properly. Isolated power outages will be possible.\n" - elif wind_mph <= thresholds.get('moderate', 50): - t+="Minor to moderate damage is likely to many mobile homes, especially those that have canopies, awnings, or carports. Poorly constructed homes may sustain minor wall damage and partial roof removal. Other homes may have minor roof and siding damage. Some loose outdoor items will be tossed around and may cause additional damage. A few power lines will be knocked down resulting in scattered power outages. Some large branches of healthy trees will be snapped. Most newly planted trees and shrubs will be damaged or uprooted.\n" - elif wind_mph <= thresholds.get('damageLikely', 60): - t+="Damaging winds are likely. Most poorly anchored mobile homes will be damaged, some severely. Other homes may have damage to shingles, siding, gutters and windows, especially if these items are not properly secured. Loose outdoor items will become airborne, causing additional damage and possible injury. Some power lines will be knocked down by falling trees, resulting in scattered power outages. Many large branches of trees will be snapped, and a few trees will be uprooted.\n" - elif wind_mph <= thresholds.get('damageExpected', 75): - t+="Damaging winds are expected. Poorly anchored mobile homes may be destroyed, along with those of old or poor construction. Some well anchored mobile homes will have substantial damage to roofs, walls, and windows, and could become uninhabitable. Some homes of frame construction will sustain partial wall and roof failure, and possibly blown out windows. Loose outdoor items will become projectiles, causing additional damage and possible injury. Many areas will experience power outages with some downed power poles. Numerous large branches of healthy trees will snap. Some trees will be uprooted, especially where the ground is saturated.\n" - elif wind_mph <= thresholds.get('danger', 90): - t+="Very dangerous winds will produce widespread damage. Airborne debris will cause damage. Persons struck by debris may be injured or possibly killed. The majority of mobile homes will be severely damaged, overturned and uninhabitable. Some homes of frame construction will experience major damage, including roofs being lifted off and walls partially collapsing, leaving them uninhabitable. Well constructed homes will have damage to shingles, siding, and gutters. Windows will be blown out if not properly covered. Partial roof failure is expected at some industrial parks, especially to those buildings with light weight steel and aluminum coverings. Some low rise apartment building roofs may be torn off, along with siding and shingle damage. A number of glass windows in high rise buildings will be blown out. Loose outdoor items will become projectiles, causing additional damage and possible injury. Extensive damage to power lines and poles will likely result in widespread power outages that could last from several days to weeks. Numerous large branches will break. Many trees will be uprooted or snapped.\n" - elif wind_mph <= thresholds.get('extremeDanger', 110): - t+="Extremely dangerous winds will cause extensive damage. Structural collapse of some homes could cause severe injuries or possible death. Persons struck by airborne debris risk injury and possible death. Most mobile homes will be destroyed. Numerous homes of poor to average construction will be destroyed or severely damaged, leaving them uninhabitable. Considerable damage to well constructed homes is expected. A number of roofs and exterior walls will fail. Many metal roofs will be torn off buildings at industrial parks. Partial roof and exterior wall failures are likely at low rise apartment buildings. Many windows in high rise buildings will be blown out. Falling and broken glass will pose a significant danger even after the storm. Near total power loss is expected. Potable water could become scarce as filtration systems begin to fail. Many trees will be snapped or uprooted and block numerous roads.\n" - elif wind_mph <= thresholds.get('devastating', 130): - t+="Devastating damage is expected. Collapse of some residential structures will put lives at risk. Airborne debris will cause extensive damage. Persons, pets, and livestock struck by the wind blown debris will be injured or killed. Nearly all mobile homes will be destroyed. Most homes will sustain severe damage with potential for complete roof failure and wall collapse. Most industrial buildings will be destroyed, with others experiencing partial roof and wall damage. Most low rise apartment buildings will be severely damaged or destroyed, and others will have partial roof and wall failure. Numerous windows will be blown out of high rise buildings resulting in falling glass, which will pose a threat for days to weeks after the storm. Considerable structural damage to large buildings is possible. Electricity and water will be unavailable for days and perhaps weeks after the storm passes. Most trees will be snapped or uprooted. Fallen trees may cut off residential areas for days to weeks.\n" - else: - t+="Catastrophic damage is expected. Collapse of residential structures will put lives at risk. Severe injury or death is likely for persons, pets, and livestock struck by wind blown debris. Most of the area will be uninhabitable for weeks, perhaps longer. Most homes will be destroyed, with total roof failure and wall collapse. Nearly all industrial buildings and low rise apartment buildings will be severely damaged or destroyed. Nearly all windows will be blown out of high rise buildings resulting in falling glass, which will pose a threat for days to weeks after the storm. Considerable structural damage to large buildings is likely. Nearly all trees will be snapped or uprooted and power poles downed. Fallen trees and power poles will isolate residential areas. Power outages will last for weeks to possibly months. Long term water shortages will increase human suffering.\n" - return self._frame(t) - - def _genericImpact_stmt(self, info): - if info.allMarine: - return "" - t="" - damage = self._getCategoryDamage(info.maxWind) - if damage.strip() == "": return t - t+="\n" - t+=self._frame("A general concern should be for the possibility of " + damage + " somewhere within the area.") - return t - - ############################################################### - ### Example TCP product for automated testing - ############################################################### - ## Used for testing and debugging - def _useTestTCP(self): - #return True - return False - - def _TCP_Product(self): - return""" -ZCZC MIATCPAT2 ALL -TTAA00 KNHC DDHHMM -BULLETIN -HURRICANE KATRINA ADVISORY NUMBER 10 -NWS TPC/NATIONAL HURRICANE CENTER Miami FL -11 PM EDT Thu Aug 25 2005 - -...Eye of Katrina moving southwestward across Miami-Dade county... - - -Summary of 1100 PM EDT...0300 UTC...information ------------------------------------------------ -Location...25.5N 80.7W -About 35 miles...55 km SW of Miami Florida -About 20 miles...30 km NW of Homestead Florida -Maximum sustained winds...75 mph...120 km/hr -Present movement...west-southwest or 265 degrees at 8 mph...13 km/hr -Minimum central pressure...984 mb...29.06 inches - - -Watches and Warnings --------------------- -Changes with this advisory... - -*The tropical storm warning and tropical storm watch along the east -coast of Florida north of Jupiter have been discontinued. - - -Summary of Warnings and Watches in effect... - -A Hurricane Warning is in effect for... -*The southeast Florida coast from Jupiter Inlet southward to -Florida City, including Lake Okeechobee. Preparations to protect -life and property should have been completed. - -A tropical storm warning is in effect for... -*All the Florida Keys and Florida Bay from Key West northward -*The gulf coast of Florida from Longboat Key south and eastward -to south of Florida City. - -A Tropical Storm Watch is in effect for... -*The Florida west coast from north of Longboat Key to Anclote Key. - -Interests elsewhere along the gulf coast of the United States should -monitor the progress of Katrina. - -For storm information specific to your area, including possible -inland watches and warnings, please monitor products issued -by your local weather office. - - -Discussion and 48-hour outlook ------------------------------- -At 11 PM EDT...0300 UTC...the eye of hurricane Katrina was located -near latitude 25.5 north, longitude 80.7 west. Katrina is moving -toward the southwest near 8 mph...13 km/hr and this motion is -expected to continue during the next several hours. Katrina is -expected to move over the Gulf of Mexico Friday and Saturday. - -Maximum sustained winds are near 75 mph...130 km/hr with higher -gusts. Katrina is a category one hurricane on the Saffir-Simpson -scale. Some additional weakening is anticipated while Katrina is -over land, and it could weaken to a tropical storm early on Friday. -Restrengthening is expected on Friday or Saturday, and Katrina -could become a dangerous hurricane in the Gulf of Mexico in 2 to -3 days. - -Hurricane force winds extend outward up to 10 miles from the -center, and tropical storm force winds extend outward up to -70 miles. A wind gust to 87 mph...140 km/hr was recorded at Miami -National Weather Service Forecast Office/National Hurricane Center -and 81 mph...131 km/hr at the TaMiami airport this evening. - -Estimated minimum central pressure is 984 mb...29.06 inches. - - -Storm hazards -------------- -Storm surge flooding...2 to 4 feet above normal tide levels, can be -expected along the west coast of Florida in areas of onshore flow -south of Venice and in Florida bay. Storm surge should begin to -decrease along the east coast of Florida. - -Rainfall...Katrina is expected to produce a significant heavy -rainfall event over south Florida and the Florida Keys. Total -rainfall accumulations of 6 to 10 inches with isolated maximum -amounts of 15 to 20 inches are possible. - -Tornadoes...isolated tornadoes will also be possible over eastern -Florida and the Florida Keys. - - -Next advisory --------------- -Next intermediate advisories...100 AM and 300 AM EDT. -Next complete advisory...500 AM EDT. - -$$ -forecaster Avila - -NNNN -""" -## return """ -##ZCZC MIATCPEP5 ALL -##TTAA00 KNHC DDHHMM -##BULLETIN -##HURRICANE LINDA ADVISORY NUMBER 12 -##NWS TPC/NATIONAL HURRICANE CENTER MIAMI FL EP152009 -##800 PM PDT Wed Sep 09 2009 -## -##...Linda becomes a hurricane, the sixth hurricane of the eastern -##Pacific season... -## -## -##Summary of 800 PM PDT...0300 UTC...information -##---------------------------------------------- -##Location...17.1n 129.4w -##About 1325 miles...2135 km wsw of the southern tip of Baja California -##Maximum sustained winds...80 mph...130 km/hr -##Present movement...northwest or 320 degrees at 6 mph...9 km/hr -##Minimum central pressure...984 mb...29.06 inches -## -## -##Watches and Warnings -##-------------------- -##There are no coastal tropical cyclone watches or warnings in effect. -## -## -##Discussion and 48-hour outlook -##------------------------------ -##At 800 PM PDT...0300 UTC, the center of hurricane Linda was located -##near latitude 17.1 north, longitude 129.4 west. Linda is moving -##toward the northwest near 6 mph...9 km/hr, and this general motion -##is expected to continue for the next couple of days. -## -##Maximum sustained winds are near 80 mph...130 km/hr, with higher -##gusts. Little change in strength is expected tonight and Thursday, -##with Linda forecast to weaken Thursday night and Friday. -## -##Hurricane force winds extend outward up to 25 miles...35 km...from -##the center, and tropical storm force winds extend outward up to -##125 miles...205 km. -## -##Estimated minimum central pressure is 984 mb...29.06 inches. -## -## -##Storm Hazards -##------------- -##None affecting land. -## -## -##Next advisory -##-------------- -##Next complete advisory...200 AM PDT. -## -##$$ -##forecaster Beven -##NNNN -##""" - - - - ##################################################################################### - ##################################################### - ### HLS GUI Processing and Configurable Dictionaries for - # the Overview GUI, Situations and Scenarios - - def _processVariableList(self, definition, parent): - # Get Definition variables - for key in definition.keys(): - exec "self._" + key + "= definition[key]" - - segmentList, argDict = self._determineSegments(definition, parent) - if len(segmentList) == 0: - return {("segments", "segments"):[]} - - # Overview GUI - while True: - overviewDict = self._displayGUI(argDict, segmentList, "Overview") - if overviewDict == "UsePrev": - return {("UsePrev", "UsePrev"): True} - elif overviewDict == "Reset": - continue # Display Overview GUI again - if overviewDict is None: - return None - break - - # Situation GUI (per segment) - situationDict = self._displayGUI(argDict, segmentList, "Situation", overviewDict) - if situationDict is None: - return None - - # Scenario GUI (per segment) - scenarioDict = self._displayGUI(argDict, segmentList, "Scenario", situationDict) - if scenarioDict is None: - return None - - # Consolidate information from GUI's - varDict = overviewDict - varDict[("segments:","segments")] = scenarioDict["segments"] - return varDict - - def _determineSegments(self, definition, parent): - # Get the segments based on hazards "overlaid" with combinations file - argDict = {} - - dataMgr = parent - argDict['dataMgr'] = dataMgr - argDict["databaseID"] = self._getDbId(dataMgr, definition['database']) - argDict["ifpClient"] = PyFPClient(VizApp.getWsId(), dataMgr.getSiteID()) - import VTECMessageType - vtecMode = VTECMessageType.getVTECMessageType(self._pil) - argDict["vtecMode"] = vtecMode - gfeMode = dataMgr.getOpMode().name() - if gfeMode == "PRACTICE": - argDict["vtecActiveTable"] = "PRACTICE" - else: - argDict["vtecActiveTable"] = "active" - argDict['creationTime'] = int(time.time()/60)*60.0 - argDict["definition"] = definition - accessor = ModuleAccessor.ModuleAccessor() - dfEditAreas = self._defaultEditAreas -# print "dfEditAreas", dfEditAreas - dfEditAreas = accessor.variable(dfEditAreas, "Combinations") - if dfEditAreas is None: - LogStream.logVerbose("Combination file not found: " + dfEditAreas) - return [], None - - # Need to check hazards against all edit areas in the CWA MAOR - allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() - argDict["combinations"]= [(allAreas,"Region1")] - #print "\n****************determineSegments calling getHazardsTable" - hazards = self._getHazardsTable(argDict, self.filterMethod) - argDict["hazards"] = hazards - - # Get the segments resulting from Hazards - - #print "\nRaw Analyzed", hazards.rawAnalyzedTable() - hazSegments = self.organizeHazards(hazards.rawAnalyzedTable()) - print "\nSegments from HazardsTable organizeHazards", hazSegments - combos = dfEditAreas - print "\nSegments from Zone Combiner", combos - # "Overlay" the forecaster-entered combinations onto the segments - segmentList = self._refineSegments(hazSegments, combos) - print "\nNew segments", segmentList - - # Check for all CON - allCON = True - segmentAreas = [] - for segmentAreas in hazSegments: - hazardList = hazards.getHazardList(segmentAreas) - for hazard in hazardList: - action = hazard['act'] - #print "hazard", hazard - if action != "CON": - allCON = False - break - if not allCON: break - argDict["allCON"] = allCON - #print "allCON", allCON - - # Determine if we should have Event Context limited to Abbreviated. - # Here are the rules: - # --If there are no Continuations, limit to abbreviated UNLESS - # --If all are HU.S, do not limit to abbreviated, but do limit to Pre or Non Event - # --IF all are CAN, UPG (ignoreActions), do not limit to abbreviated - - noCON = True - allHUS = True - allIgnoreActions = True - - segmentAreas = [] - for segmentAreas in hazSegments: - hazardList = hazards.getHazardList(segmentAreas) - for hazard in hazardList: - action = hazard['act'] - sig = hazard['sig'] - #print "hazard", hazard - if action == "CON": - noCON = False - if sig != "S": - allHUS = False - if action not in self._ignoreActions(): - allIgnoreActions = False - forceAbbrev = noCON - if allHUS or allIgnoreActions: forceAbbrev = False - argDict["forceAbbrev"] = forceAbbrev - argDict["allHUS"] = allHUS - #print "noCON", noCON - #print "allHUS", allHUS - #print "allIgnoreActions", allIgnoreActions - #print "forceAbbrev", forceAbbrev - - # Determine if sigs are watches and/or statements to limit - - watchEC = True - segmentAreas = [] - for segmentAreas in hazSegments: - hazardList = hazards.getHazardList(segmentAreas) - for hazard in hazardList: - sig = hazard['sig'] - if sig == "W": - watchEC = False - break - if not watchEC: break - argDict["watchEC"] = watchEC - - - ### Determine if all actions are cancel to limit to Post Event ot Tropical - - allCAN = True - segmentAreas = [] - for segmentAreas in hazSegments: - hazardList = hazards.getHazardList(segmentAreas) - for hazard in hazardList: - action = hazard['act'] - #print "hazard", hazard - if action != "CAN": - allCAN = False - break - if not allCAN: break - argDict["allCAN"] = allCAN - - return segmentList, argDict - - def _refineSegments(self, hazSegments, combos): - """Break down each segment further according to combos given. - Make sure the resulting segments follow the ordering of the combos. - """ - if combos == []: - return hazSegments - newSegments = [] # list of lists - newAreas = [] - for combo, label in combos: - # Each combination will be tested to see if it can stay intact - # i.e. if all areas in the combo are in the same segment - # else split it into like segments - # - # segmentMapping is a list where each entry is - # the hazSegment in which the corresponding combo area appears. - # (We need to define self._segmentList for the mapping function - # to use) - self._segmentList = hazSegments - segmentMapping = map(self._findSegment, combo) - #print " segmentMapping", segmentMapping - - # segmentDict keys will be the hazSegments and - # we will gather all the areas of the combos that appear - # in each of these hazSegments - segmentDict = {} - keyList = [] - for areaName in combo: - #print " Adding", areaName - key = tuple(segmentMapping[combo.index(areaName)]) - if key == (): # If no hazard for area, do not include - continue - if key not in keyList: - keyList.append(key) - segmentDict.setdefault(key,[]).append(areaName) - #print " segmentDict", segmentDict - - # Keep track of the areas that we are including - for key in keyList: - segAreas = segmentDict[key] - newAreas = newAreas + segAreas - newSegments.append(segAreas) - #print " newSegments", newSegments - # Now add in the hazAreas that have not been accounted for - # in the combinations - hazAreas = [] - for hazSegment in hazSegments: hazAreas = hazAreas + hazSegment - for hazSegment in hazSegments: - newSeg = [] - for hazArea in hazSegment: - if hazArea not in newAreas: - newSeg.append(hazArea) - if newSeg != []: - newSegments.append(newSeg) - return newSegments - - def _getDbId(self, dataMgr, db): - pm = dataMgr.getParmManager() - if db in ['Fcst', 'Fcst_Prac','Fcst_Test']: return str(pm.getMutableDatabase()) - elif db == 'Official': return str(pm.getProductDB()) - elif db == 'ISC': - dbs = pm.getIscDatabases() - if len(dbs): - iscDB = str(dbs[-1]) #last one is the real one by convention - else: - iscDB = str(DatabaseID.databaseID_default().toJavaObj()) - - return iscDB - - def _findSegment(self, areaName): - for segment in self._segmentList: - if areaName in segment: - return segment - return [] - - def _getHazardsTable(self, argDict, filterMethod, editAreas=None): - # Set up edit areas as list of lists - # Need to check hazards against all edit areas in the CWA MAOR - allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() - argDict["combinations"]= [(allAreas,"Region1")] - dfEditAreas = argDict["combinations"] - editAreas = [] - for area, label in dfEditAreas: - if type(area) is types.ListType: - editAreas.append(area) - elif type(area) is types.TupleType: #LatLon - editAreas.append([self.__getLatLonAreaName(area)]) - else: - editAreas.append([area]) - # Get Product ID and other info for HazardsTable - pil = self._pil - stationID4 = self._fullStationID - productCategory = pil[0:3] #part of the pil - definition = argDict['definition'] - sampleThreshold = definition.get("hazardSamplingThreshold", (10, None)) - # Process the hazards - accurateCities = definition.get('accurateCities', 0) - cityRefData = [] - import HazardsTable - hazards = HazardsTable.HazardsTable( - argDict["ifpClient"], editAreas, productCategory, filterMethod, - argDict["databaseID"], - stationID4, argDict["vtecActiveTable"], argDict["vtecMode"], sampleThreshold, - creationTime=argDict["creationTime"], accurateCities=accurateCities, - cityEditAreas=cityRefData, dataMgr=argDict['dataMgr']) - return hazards - - ################################################################################### - ################################################################################### - ## TK GUI Classes - ## - ## IF you want to override the GUI, you must include all the code - ## from here on. This includes the calling method _displayGUI - - def _displayGUI(self, argDict, segmentList, dialogName, infoDict=None): - if dialogName == "Overview": - dialogClass = HLS_Overview - elif dialogName == "Situation": - dialogClass = HLS_Situation - elif dialogName == "Scenario": - dialogClass = HLS_Scenario - dialog = dialogClass(self, argDict, segmentList, infoDict) - status = dialog.status() - LogStream.logVerbose("status="+status) - if status == "Cancel": - return None - elif status in ["Reset", "UsePrev"]: - return status - else: - return dialog.getVarDict() - - -import Tkinter, copy, re - - -class AutoScrollbar(Tkinter.Scrollbar): - # a scrollbar that hides itself if it's not needed. only - # works if you use the grid geometry manager. - def set(self, lo, hi): - if float(lo) <= 0.0 and float(hi) >= 1.0: - # grid_remove is currently missing from Tkinter! - self.tk.call("grid", "remove", self) - else: - self.grid() - Tkinter.Scrollbar.set(self, lo, hi) - def pack(self, **kw): - raise Tkinter.TclError, "cannot use pack with this widget" - def place(self, **kw): - raise Tkinter.TclError, "cannot use place with this widget" - - -class ScrolledBox(Tkinter.Frame,): - def __init__(self, parent=None, side='right', **kw): - """Scrolled Box widget with vertical scrollbar on the right or left. - """ - Tkinter.Frame.__init__(self, parent, **kw) - if side == 'right': - ysbCol=1 - csbCol=0 - else: - ysbCol=0 - csbCol=1 - - self.grid_rowconfigure(0, weight=1) - self.grid_columnconfigure(csbCol, weight=1) - - xscrollbar = AutoScrollbar(self, orient=Tkinter.HORIZONTAL) - xscrollbar.grid(row=1, column=csbCol, sticky=Tkinter.E+Tkinter.W) - - yscrollbar = AutoScrollbar(self) - yscrollbar.grid(row=0, column=ysbCol, sticky=Tkinter.N+Tkinter.S) - - canvas = Tkinter.Canvas(self, bd=0,relief=Tkinter.SUNKEN, - xscrollcommand=xscrollbar.set, - yscrollcommand=yscrollbar.set) - self._interior = Tkinter.Frame(canvas) - canvas.create_window(0, 0, window=self._interior, anchor='nw') - - self._canvas = canvas - canvas.bind('', self.configCB) - canvas.grid(row=0, column=csbCol, sticky='nsew') - - xscrollbar.config(command=canvas.xview) - yscrollbar.config(command=canvas.yview) - - def interior(self): - return self._interior - - def configCB(self, event): - self._canvas.config(scrollregion=self._canvas.bbox(Tkinter.ALL)) - - -class HLS_Dialog(StartupDialog.IFPDialog): - def __init__(self, parent, argDict, segmentList, infoDict=None): - self._status = "Cancel" # exception, or user-cancels - self._tkObject_dict = {} # place to store reference to tk objects - self._varDict = {} # all end results must be saved here - self._argDict = argDict - self._segmentList = segmentList - self._infoDict = infoDict - self._parent = parent - StartupDialog.IFPDialog.__init__(self, parent=None, title="HLS") - - def getVarDict(self): - return self._varDict - - def _makeRadioOrCheckList(self, master, label, elementList, default=None, - buttonSide=Tkinter.TOP, frameSide=Tkinter.LEFT, entryField=None, - headerFG=None, headerFont=None, boxType="radio", - listFrameRelief=Tkinter.GROOVE): - listFrame = Tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) - - if label != "": - listLabel = Tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) - listLabel.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10) - - ivar = Tkinter.IntVar() - defaultIndex = 0 - ivarList = [] - for element in elementList: - index = elementList.index(element) - if type(element) is types.TupleType: - element, key = element - if boxType== "radio": - button = Tkinter.Radiobutton(listFrame, variable=ivar, text=element, value=index) - else: - ivar = Tkinter.IntVar() - if default is not None and element in default: ivar.set(1) - else: ivar.set(0) - button= Tkinter.Checkbutton(listFrame, variable=ivar, text=element) - ivarList.append(ivar) - button.pack(side=buttonSide, anchor=Tkinter.W, expand=Tkinter.YES, padx=4) - # Look for default - if element == default: - defaultIndex = index - - entryObject = None - if entryField is not None: - entryObject = self._makeEntry(listFrame, entryField) - # packing - listFrame.pack(side=frameSide, expand=Tkinter.NO, fill=Tkinter.Y) #, anchor=Tkinter.N) - #listFrame.pack(side=frameSide, expand=Tkinter.YES, fill=Tkinter.Y, anchor=Tkinter.N) - - if boxType == "radio": - ivar.set(defaultIndex) # set the default - if boxType == "check": - ivar = ivarList - return ivar, entryObject - - def _makeCheckList(self, master, label, elementList, side=Tkinter.TOP, - segmentAreas=None, maxLen=None, colLabels=True): - """Custom Checklists for Scenario GUI, Step 9b. - """ - listFrame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=2) - - if len(label)>0: - listLabel = Tkinter.Label(listFrame, text=label) - listLabel.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO, padx=10) - - # Frames for each column. Nothing fancy, make all the same with optional - # label at the top. Define columns and labels in colL dict. Frames will - # be put in colF dict to use below. Order of columns will be done at the end - # when the columns are packed. - colL={'order': '', - 'prev':'Prev', - 'import':'Import', - 'section':'Section', - } - colF = {} - for c in colL.keys(): - colF[c]=Tkinter.Frame(listFrame, relief=Tkinter.FLAT, borderwidth=0) - if colLabels: - Tkinter.Label(colF[c],text=colL[c]).pack(side=Tkinter.TOP,anchor=Tkinter.W) - - # Do for all rows, filling in all columns. - ivarList = [] - for eleDict in elementList: - name = eleDict.get('name', "") - label = eleDict.get('label', "") - #print "********* _makeCheckList dict=",eleDict - - #--- Order Entry box - frame=colF['order'] - if eleDict.get("orderBox", False): - iOrder = Tkinter.Entry(frame, relief=Tkinter.SUNKEN, width=2) - else: - iOrder = Tkinter.Label(frame,text='', width=2) - iOrder.pack(side=Tkinter.TOP, anchor=Tkinter.W,expand=Tkinter.YES) - - #--- usePrev Checkbutton - frame=colF['prev'] - if eleDict.get("usePrev", False): - prevVar = Tkinter.IntVar() - prevVar.set(0) - iPrev = Tkinter.Checkbutton(frame, variable=prevVar, text="") - else: - iPrev = Tkinter.Label(frame,text='', width=3) - prevVar = None - iPrev.pack(side=Tkinter.TOP, anchor=Tkinter.W,expand=Tkinter.NO) - - #--- importMethod - Checkbutton - frame=colF['import'] - if eleDict.get("importMethod", False) or eleDict.get("importPIL", False): - importVar = Tkinter.IntVar() - importVar.set(0) - iImport = Tkinter.Checkbutton(frame, variable=importVar,text="") - else: - iImport = Tkinter.Label(frame,text='', width=1) - importVar = None - iImport.pack(side=Tkinter.TOP, anchor=Tkinter.W,expand=Tkinter.YES) - - #--- Section name Checkbutton - frame=colF['section'] - ivar = Tkinter.IntVar() - defaultOn = eleDict.get("defaultOn", 0) - if type(defaultOn) is types.MethodType: - defaultOn = defaultOn(name, segmentAreas) - ivar.set(defaultOn) - button = Tkinter.Checkbutton(frame, variable=ivar, text=label) - button.pack(side=Tkinter.TOP, anchor=Tkinter.W, expand=Tkinter.YES, padx=0) - - #print "\nAppending", name, button, iOrder, iPrev - ivarList.append((name, ivar, iOrder, prevVar, importVar)) - - # packing - listFrame.pack(side=Tkinter.LEFT,expand=Tkinter.YES,fill=Tkinter.Y,anchor=Tkinter.N) - - # Change the order of the colums by the order of list - for c in ['order','prev','section', 'import']: - #for c in ['order','prev','import','section']: - colF[c].pack(side=Tkinter.LEFT,expand=Tkinter.YES, - fill=Tkinter.Y,anchor=Tkinter.N) - return ivarList - - def _makeEntry(self, frame, text, width=20): - label = Tkinter.Label(frame, text=text) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - entry = Tkinter.Entry(frame, relief=Tkinter.SUNKEN, width=width) - entry.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - return entry - - def _makeSegmentColumns(self, segNum, frame, segmentAreas): - # Need standard widths so the columns line up across segments - sn_width = 4 - hz_width = 15 - - widgets = [] - - segNumFrame = Tkinter.Frame(frame, relief=Tkinter.FLAT, width=sn_width) - label = Tkinter.Label(segNumFrame, text=`segNum`) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.YES) - #segNumFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, - # fill=Tkinter.Y, anchor=Tkinter.N) - widgets.append(segNumFrame) - - landAreas = self._parent._inlandAreas() + self._parent._coastalAreas() - areaDisplayType_land, width1 = self._parent._areaDisplayType_land() - areaDisplayType_marine, width2 = self._parent._areaDisplayType_marine() - zf_width = max(width1, width2) - -# sb = ScrolledBox(frame) - sb = ScrolledBox(frame, side="left") - interior = sb.interior() - - zoneFrame = Tkinter.Frame(interior, relief=Tkinter.FLAT, width=zf_width) - if areaDisplayType_land != 'ugcCode' or areaDisplayType_marine != 'ugcCode': - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(self._parent._areaDictionary,"AreaDictionary") - - segmentAreas.sort() - for area in segmentAreas: - if area in landAreas: - areaDisplayType = areaDisplayType_land - else: - areaDisplayType = areaDisplayType_marine - if areaDisplayType != "ugcCode": - try: area = areaDict[area].get(areaDisplayType) - except: pass - if area is None: - area = "" - area= self._linebreak(area, zf_width) - label = Tkinter.Label(zoneFrame, text=area, width=zf_width, anchor=Tkinter.W) - label.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - zoneFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, - fill=Tkinter.Y, anchor=Tkinter.N) - #sb.pack(side=Tkinter.LEFT, fill=Tkinter.BOTH, expand=Tkinter.NO) - interior.update() - h=interior.winfo_reqheight() - sizeDict = self._parent._GUI_sizing_dict() - zoneLines = sizeDict["zoneLines"] - charSize = sizeDict["charSize"] - heightLimit = zoneLines * charSize - if h > heightLimit: - h = heightLimit - w=interior.winfo_reqwidth() - sb._canvas["height"] = h - sb._canvas["width"] = w - widgets.append(sb) - #sb.pack(side=Tkinter.LEFT) - - hazardFrame = Tkinter.Frame(frame, relief=Tkinter.FLAT, width=hz_width) - hazardTable = self._argDict["hazards"] - hazards = hazardTable.getHazardList(segmentAreas) - if hazards == []: - hazards = [{'phensig':'None'}] - hazardKeys = [] - addEntry=False - # Updated code below to make Situation selection smarter - for hazard in hazards: - hazKey = hazard['phensig'] + " " + hazard['act'] - sitKey = hazard['act'] + hazard['phen'] + "." + hazard['sig'] - hazardKeys.append(hazard['phen']+"."+hazard['sig']) - label = Tkinter.Label(hazardFrame, text=hazKey,width=hz_width) - label.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - if hazard['phen'] == "HU" and hazard['sig'] == "S": - addEntry = True - #hazardFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, - # fill=Tkinter.Y, anchor=Tkinter.N) - widgets.append(hazardFrame) - print "\n\n***********************" - print "sitKey is ", sitKey - return widgets, hazardKeys, sitKey, addEntry - - def _linebreak(self, phrase, linelength, breakStr=[" ", "..."]): - # Break phrase into lines the given linelength - if len(phrase) <= linelength: return phrase - start = 0 - str = "" - further = 0 - while start < len(phrase): - end = start + linelength + further - if end >= len(phrase): - str = str + phrase[start:len(phrase)] + "\n" - break - breakFound = 0 - #search for break characters in string - for breakChars in breakStr: - ind = string.rfind(phrase, breakChars, start, end) - if ind >= 0: - breakFound = 1 - break - #if not found, then we need to search further, this makes the - #line too long, but it is better than simply splitting a word - #in the middle of it. - if breakFound == 0: - further = further + 1 - continue - - if breakChars != " ": - # We want to preserve the break characters, not drop them - includeInd = ind + len(breakChars) - else: - includeInd = ind - - str = str + phrase[start:includeInd] + "\n" - start = ind + len(breakChars) - further = 0 - return str - - def _makeLine(self, interior, row, columnspan, width=200, char="-"): - row = row+1 - lineFrame = Tkinter.Frame(interior, relief=Tkinter.FLAT) - text="" - for i in range(width): text = text + char - label = Tkinter.Label(lineFrame, text=text) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - lineFrame.grid(row=row, columnspan=columnspan) - return row - - def cancelCB(self): - self._status = "Cancel" - #self.cancel() - self.withdraw() - self.destroy() - - def _entryName(self, name): - return name+"_entry" - - def _makeTuple(self,str): - str = re.sub('(?im)[^_a-z]', '', str) - return (str+":",str) - - def _setVarDict(self, key, value, options=None): - if options is not None: - value = options[value] - if type(value) is types.TupleType: - value = value[1] - self._varDict[self._makeTuple(key)] = value - - def status(self): - return self._status - - def buttonbox(self): - # override the existing ok/cancel button box, removing it. - # we do this so that we can attach our own hooks into the functions. - pass - - -class HLS_Overview(HLS_Dialog): - def __init__(self, parent, argDict, segmentList, infoDict=None): - HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) - - def body(self, master): - # build the main display dialog - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list(self._argDict) - endInstructions = self._parent._overviewEndInstructions() - fontDict = self._parent._font_GUI_dict() - - # OVERVIEW header - headerFG, headerFont = fontDict["headers"] - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - label = Tkinter.Label(frame, text="OVERVIEW", fg=headerFG, font=headerFont) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - - numBoxes = 3 - - boxes = [] - for i in range(numBoxes): - newBox = Tkinter.Frame(master) - newBox.pack(side=Tkinter.TOP, expand=Tkinter.NO, - fill=Tkinter.Y, anchor=Tkinter.W) - boxes.append(newBox) - - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - index = overviewList.index(infoDict) - if index < 3: - boxNum = 0 - buttonSide=Tkinter.TOP - frameSide = Tkinter.LEFT - elif index in [3,4]: - boxNum = 1 - buttonSide=Tkinter.LEFT - frameSide=Tkinter.TOP - elif index in [5,6]: - boxNum = 2 - buttonSide=Tkinter.TOP - frameSide=Tkinter.LEFT - - box = boxes[boxNum] - - if name == "MainHeadline": entryField = None - - tkObject_dict[name], entryObject = self._makeRadioOrCheckList( - box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, - entryField=entryField, headerFG=headerFG, - headerFont=headerFont, boxType=optionType) - if entryObject is not None: - tkObject_dict[self._entryName(name)] = entryObject - - if name == "MainHeadline": - frame = Tkinter.Frame(box, relief=Tkinter.GROOVE, borderwidth=1) - tkObject_dict[self._entryName(name)] = self._makeEntry(frame, "", 80) - frame.pack(fill=Tkinter.X, expand=Tkinter.YES) - - # End Instructions and Buttons - fg, font = fontDict["instructions"] - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - label = Tkinter.Label(frame, text=endInstructions, fg=fg, font=font) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - self._makeButtons(frame) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - -############## Graying PreviousHLS button - def _makeButtons(self, master): - frame = Tkinter.Frame(master) - buttonList = self._parent._GUI1_configDict().get("buttonList", []) - for button, label in buttonList: - state = Tkinter.NORMAL - if button == "PreviousHLS": - command = self.previousCB - allCON = self._argDict.get("allCON", False) - if not allCON: state = Tkinter.DISABLED - elif button == "Reset": - command = self.resetCB - elif button == "Next": - command = self.okCB - else: # Cancel - command = self.cancelCB - Tkinter.Button(frame, text=label, command=command, width=10, - state=state).pack(side=Tkinter.LEFT, pady=5, padx=10) - frame.pack() - - def resetCB(self): - self._status = "Reset" - self.ok() - - def previousCB(self): - self._status = "UsePrev" - self.ok() - - def okCB(self): - # pull the data from the tkObject_dict before they get toasted - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list(self._argDict) - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - if optionType == "check": - checkList = [] - ivarList = tkObject_dict[name] - for i in range(len(options)): - if ivarList[i].get(): - checkList.append(options[i]) - value = checkList - self._setVarDict(name, value) - else: - value = tkObject_dict[name].get() - self._setVarDict(name, value, options) - - if entryField is not None: - entryName = self._entryName(name) - self._setVarDict(entryName, tkObject_dict[entryName].get()) - # close window and set status "Ok" - self._status = "Ok" - self.ok() - -class HLS_Situation(HLS_Dialog): - def __init__(self, parent, argDict, segmentList, infoDict=None): - HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) - - def body(self, master): - tkObject_dict = self._tkObject_dict - situations = self._parent._situation_list() - self._situationLabels = [entry['label'] for entry in situations] - fontDict = self._parent._font_GUI_dict() - headerFG, headerFont = fontDict["headers"] - guiLabels = self._parent._GUI_labels() - - sizeDict = self._parent._GUI_sizing_dict() - heightLimit = sizeDict["GUI_height_limit"] - width = sizeDict["GUI_2_width"] - zoneLines = sizeDict["zoneLines"] - - sb = ScrolledBox(master) - interior = sb.interior() - - row = 0 - columns=4 - # SITUATION header - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - label = Tkinter.Label(frame, text="SITUATIONS", fg=headerFG, font=headerFont) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) -## frame.grid(row=0, columnspan=columns, sticky=Tkinter.W) -## row = row + 1 - - # Labels - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - text = guiLabels['GUI_2'] - label = Tkinter.Label(frame, fg=headerFG, font=headerFont, text=text) - label.pack(side=Tkinter.LEFT, fill=Tkinter.X, expand=Tkinter.NO) - frame.pack(side=Tkinter.TOP,fill=Tkinter.X, expand=Tkinter.NO) -## frame.grid(row=row, columnspan=columns, sticky=Tkinter.W) -## row=row+1 - - uiSegments = [] - self._segNum = 0 - for segmentAreas in self._segmentList: - widgets, uiSegment = self._makeSegmentFrame(interior, segmentAreas) - uiSegments.append(uiSegment) - column = 0 - for widget in widgets: - widget.grid(sticky=Tkinter.N+Tkinter.W, row=row, column=column) - column = column +1 - row=self._makeLine(interior, row, columnspan=columns, width=160) - row = row+1 - tkObject_dict["segments"] = uiSegments - #segBox.pack(fill=Tkinter.X, expand=Tkinter.YES) - bframe = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=0) - self._makeButtons(bframe, row=row, columnspan=columns) - - # Get the requested size of the interior frame. - # While winfo_reqheight should get the size the widget is requesting, - # It seems you still have to call update first, and calling update - # certainly won't hurt - PJ - interior.update() - h=interior.winfo_reqheight() - if h > heightLimit: - h = heightLimit - w=interior.winfo_reqwidth() - if w > width: - w = width - sb._canvas["height"] = h - sb._canvas["width"] = w - - sb.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=Tkinter.YES) - bframe.pack(side=Tkinter.BOTTOM,fill=Tkinter.X, expand=Tkinter.NO) - - def _makeSegmentFrame(self, master, segmentAreas): - #frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - self._segNum+=1 - segNum = self._segNum - widgets, hazardKeys, sitKey, addEntry = self._makeSegmentColumns(segNum, master, segmentAreas) - - # Find situations for segment hazards - # IF the Event Context is "Abbreviated", "NonEvent" or "PostEvent" - # only allow that matching situation - # Further refine the choices based on the action/phen/sig combo - # and the EC - - ec = self._infoDict[("EventContext:", "EventContext")] - onlySituation = None - if ec in ["Abbreviated", "NonEvent", "PostEvent"]: - onlySituation = ec - situationDicts = self._parent._situation_list() - situations = [] - for sitDict in situationDicts: - sitEC = sitDict.get("ec", []) - sitPairs = sitDict.get("hazPairings", []) -## sitActions = sitDict.get("action", []) - if sitKey in sitPairs and ec in sitEC: -## for hazardKey in hazardKeys: -## if hazardKey in sitHazards: - if sitDict not in situations: - sitName = sitDict['name'] - if onlySituation and sitName != onlySituation: - continue - situations.append(sitDict) - - print "situations are: ", situations - situationLabels = [sitDict['label'] for sitDict in situations] - situationEntryFrame = Tkinter.Frame(master, relief=Tkinter.FLAT, borderwidth=1) - situationFrame = Tkinter.Frame(situationEntryFrame, relief=Tkinter.FLAT, borderwidth=1) - uiSituation, entryObj = self._makeRadioOrCheckList( - situationFrame, " ",situationLabels, buttonSide=Tkinter.LEFT, frameSide=Tkinter.LEFT, - listFrameRelief=Tkinter.FLAT) - situationFrame.pack(side=Tkinter.TOP, expand=Tkinter.YES, - fill=Tkinter.Y, anchor=Tkinter.W) - - if addEntry: - # Add an entry field for headline plus option to Use previous - entryFrame = Tkinter.Frame(situationEntryFrame, relief=Tkinter.GROOVE, borderwidth=1) - uiEntry = self._makeEntry(entryFrame, "Headline", width=45) - ivarList = self._makeCheckList( - entryFrame, "", [{"name":"Use Prev", "label":"UsePrev"}], colLabels=False) - name, uiUsePrev, iOrder, iPrev, iImport = ivarList[0] - entryFrame.pack(side=Tkinter.TOP) - else: - uiEntry = None - uiUsePrev = None - #situationEntryFrame.pack(side=Tkinter.LEFT, expand=Tkinter.YES, - # fill=Tkinter.Y, anchor=Tkinter.W) - widgets.append(situationEntryFrame) -## -## frame.pack(fill=Tkinter.X, expand=Tkinter.YES) - return widgets, (segNum, segmentAreas, uiSituation, situations, uiEntry, uiUsePrev) - - def _makeButtons(self, master, row, columnspan): - frame = Tkinter.Frame(master) - buttonList = self._parent._GUI2_configDict().get("buttonList", []) - for button, label in buttonList: - if button == "Next": - command = self.okCB - else: # button == "Cancel": - command = self.cancelCB - Tkinter.Button(frame, text=label, command=command, width=10, - state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5, padx=10) - frame.grid(row=row, columnspan=columnspan) - - def okCB(self): - # pull the data from the tkObject_dict before they get toasted - tkObject_dict = self._tkObject_dict - segments = [] - #print "\nsegments", tkObject_dict["segments"] - for segNum, segment, uiSituation, situationDicts, uiEntry, uiUsePrev in tkObject_dict["segments"]: - index = uiSituation.get() - sitDict = copy.deepcopy(situationDicts[index]) - if uiEntry is not None: sitDict['userHeadline_HU_S'] = uiEntry.get() - if uiUsePrev is not None: sitDict['usePrev_HU_S_Headline'] = uiUsePrev.get() - segments.append((segNum, segment, sitDict)) - self._varDict["segments"] = segments - - #print "varDict", self._varDict - # close window and set status "Ok" - self._status = "Ok" - self.ok() - -class HLS_Scenario(HLS_Dialog): - def __init__(self, parent, argDict, segmentList, infoDict=None): - HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) - - def body(self, master): - - sizeDict = self._parent._GUI_sizing_dict() - heightLimit = sizeDict["GUI_height_limit"] - width = sizeDict["GUI_3_width"] - segments = self._infoDict["segments"] - - sb = ScrolledBox(master) - interior = sb.interior() - - # build the main display dialog - columns=6 - tkObject_dict = self._tkObject_dict - #box = Tkinter.Frame(interior) - row, tkObject_dict["segments"] = self._makeScenarioGUI(interior) - bframe = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=0) - self._makeButtons(bframe, row=row, columnspan=columns) - #box.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=Tkinter.YES) - - interior.update() - h=interior.winfo_reqheight() - if h > heightLimit: - h = heightLimit - w=interior.winfo_reqwidth() - if w > width: - w = width - sb._canvas["height"] = h - sb._canvas["width"] = w - - sb.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=Tkinter.YES) - bframe.pack(side=Tkinter.BOTTOM,fill=Tkinter.X, expand=Tkinter.NO) - - def _makeScenarioGUI(self, master): - # Entry for each segment - - #segBox = Tkinter.Frame(master) - # Labels - row=0 - columns=6 - segments = self._infoDict["segments"] - guiLabels = self._parent._GUI_labels() - fontDict = self._parent._font_GUI_dict() - headerFG, headerFont = fontDict["headers"] - texta = guiLabels['GUI_3a'] - textb = guiLabels['GUI_3b'] - labela = Tkinter.Label(master, fg=headerFG, font=headerFont, text=texta) - labela.grid(row=0, columnspan=5) - labelb = Tkinter.Label(master, fg=headerFG, font=headerFont, text=textb) - labelb.grid(row=0, column=5, sticky=Tkinter.W) - #frame.pack(fill=Tkinter.X, expand=Tkinter.YES) - #frame.grid(row=0, columnspan=columns, sticky=Tkinter.W) - row=row+1 - - # Segments - uiSegments = [] - #print "\n\nInfoDict", self._infoDict - for segment in segments: - widgets, uiSegment = self._makeSegmentFrame(master, segment) - uiSegments.append((uiSegment)) - column=0 - for widget in widgets: - widget.grid(sticky=Tkinter.N+Tkinter.W, row=row, column=column) - column = column +1 - row=self._makeLine(master, row, columnspan=columns, width=190) - row = row+1 - #segBox.pack(fill=Tkinter.X, expand=Tkinter.YES) - return row, uiSegments - - def _makeSegmentFrame(self, master, segment): - #frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - segNum, areas, situation = segment - situationLabel = situation['label'] - situationName = situation['name'] - - widgets, addEntry, sitKey, hazardKeys = self._makeSegmentColumns(segNum, master, areas) - - # Situation - situationFrame = Tkinter.Frame(master, relief=Tkinter.FLAT,borderwidth=1) - if len(situationLabel) > 14: - situationLabel = situationLabel[0:14] + "\n" + situationLabel[14:] - label = Tkinter.Label(situationFrame, text=situationLabel, width=15) - label.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - #situationFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, - # fill=Tkinter.Y, anchor=Tkinter.N) - widgets.append(situationFrame) - - # Scenarios - scenarioFrame = Tkinter.Frame(master, relief=Tkinter.FLAT, borderwidth=1, width=30) - scenarios = situation["scenarios"] - uiScenario = self._makeRadioOrCheckList( - scenarioFrame, "", scenarios, buttonSide=Tkinter.TOP, frameSide=Tkinter.LEFT, - listFrameRelief=Tkinter.FLAT) - #scenarioFrame.pack(side=Tkinter.LEFT, expand=Tkinter.YES, fill=Tkinter.Y, - # anchor=Tkinter.W) - widgets.append(scenarioFrame) - - # Sections - # If Abbreviated, no sections will be shown - sectionFrame = Tkinter.Frame(master, relief=Tkinter.FLAT, borderwidth=1) - if situationLabel == "Abbreviated": - uiSections = None - text = " " - label = Tkinter.Label(sectionFrame, text=text) - label.pack(side=Tkinter.TOP, expand=Tkinter.NO,fill=Tkinter.Y, anchor=Tkinter.W) - else: - sectionList = self._parent._segmentSections() - # Determine maximum label length -## maxLen = 0 -## for section in sectionList: -## label=section.get('label') -## labelLen = len(label) -## if labelLen > maxLen: maxLen = labelLen -## text = str.ljust(" Prev", maxLen+40) + "Import" -## print "text", maxLen, len(text), text -## usePrevLabel = Tkinter.Label(sectionFrame, text=text, width=maxLen+2) - #usePrevLabel = Tkinter.Label(sectionFrame, text="Prev Import", width=11) - #usePrevLabel.pack(side=Tkinter.TOP, expand=Tkinter.NO,fill=Tkinter.Y, anchor=Tkinter.W) - # Filter for sections to be displayed - sections = [] - for section in sectionList: - inSegments = section.get('inSegments', None) - if inSegments in [None, 'always']: - continue - # Check excludeFromSituations - excluded = section.get('excludeFromSituations', []) - if situationName in excluded: - continue - # Check for whether or not to include - # If ANY zone meets the "includeFor" criteria for the section, - # Then it will be included as an option - includeFor = section.get("includeFor", None) - if includeFor is None: - include = True - elif type(includeFor) is types.MethodType: - name = section.get('name') - include = includeFor(name, areas) - else: - include = False - for area in areas: - if area in includeFor: - include=True - break - if include: sections.append(section) - uiSections = self._makeCheckList(sectionFrame, "", sections, segmentAreas=areas) - #sectionFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, - # fill=Tkinter.Y, anchor=Tkinter.N) - widgets.append(sectionFrame) - - #frame.pack(fill=Tkinter.X, expand=Tkinter.YES) - return widgets, (segNum, areas, situation, uiScenario, scenarios, uiSections) - - def _makeButtons(self, master, row, columnspan): - # create the basic dialog buttons the user sees (Ok, Cancel) - frame = Tkinter.Frame(master) - buttonList = self._parent._GUI3_configDict().get("buttonList", []) - for button, label in buttonList: - if button == "Ok": - command = self.okCB - else: # button == "Cancel": - command = self.cancelCB - Tkinter.Button(frame, text=label, command=command, width=10, - state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5, padx=10) - #frame.pack() - frame.grid(row=row, columnspan=columnspan) - - def okCB(self): - # pull the data before they get toasted - tkObject_dict = self._tkObject_dict - - segments = [] - for segNum, areas, situation, uiScenario, scenarios, uiSections in tkObject_dict["segments"]: - extraInfo = { - "usePrev_HU_S_Headline":situation.get("usePrev_HU_S_Headline",None), - "userHeadline_HU_S":situation.get("userHeadline_HU_S",None), - } - # Only need the situation name for varDict - situation= situation["name"] - scenarioObj, entryObj = uiScenario - label, scenario = scenarios[scenarioObj.get()] - sections = [] - if uiSections is not None: - for name, iCheck, iOrder, prevVar, importVar in uiSections: - if iCheck.get(): - try: - order = iOrder.get() - except: - order = None - try: - usePrev = prevVar.get() - except: - usePrev = 0 - try: - useImport = importVar.get() - except: - useImport = None - sections.append((name, order, usePrev, - useImport)) - #print "usePrev, useImport", name, usePrev, useImport - segments.append((segNum, areas, situation, scenario, sections, extraInfo)) - self._varDict["segments"] = segments - LogStream.logVerbose("varDict=",self._varDict) - - # close window and set status "Ok" - self._status = "Ok" - self.ok() - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Hazard_HLS +# Produces HLS product. +# +# Author: (Initial) Matt Davis/ARX +# OB9.2 Tracy Hansen +# OB9.3 Shannon White/Tracy Hansen/Matt Belk +# OB17.3.1 Shannon White (updated to use InundationMax and remove all references to MSL) +# +# +# Version 6/8/2017 +# ---------------------------------------------------------------------------- + +import GenericHazards +import string, time, re, os, glob, types, copy, LogStream +import ModuleAccessor, SampleAnalysis +from math import * +import AbsTime, DatabaseID, StartupDialog +from com.raytheon.uf.viz.core import VizApp +from com.raytheon.uf.common.gfe.ifpclient import PyFPClient + + +DEG_TO_RAD = 0.017453292 + +from com.raytheon.uf.common.dataplugin.gfe.reference import ReferenceData, ReferenceID +CoordinateType = ReferenceData.CoordinateType + + +import sys, types +sys.argv = [__name__] + +class TextProduct(GenericHazards.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition["displayName"] = "None" + Definition["outputFile"] = "{prddir}/TEXT/Hazard_HLS.txt" + Definition["database"] = "Official" # Source database + Definition["debug"] = 1 + Definition["mapNameForCombinations"] = ["Zones_"] + #Definition["mapNameForCombinations"] = ["Zones_","Marine_Zones_"] + Definition["defaultEditAreas"] = "EditAreas_PublicZones_" + Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display + + Definition["productName"] = "Tropical Cyclone Local Statement" + + Definition["fullStationID" ] = "" + Definition["wmoID" ] = "" + Definition["wfoCityState" ] = "" + Definition["pil" ] = "" + Definition["textdbPil" ] = "" + Definition["awipsWANPil" ] = "" + Definition["site"] = "" + Definition["wfoCity"] = "" + + Definition["areaName"] = "" #optional area name for product + Definition["areaDictionary"] = "AreaDictionary" + Definition["language"] = "english" + Definition["lineLength"] = 66 #Maximum line length + + Definition["purgeTime"] = 8 # Default Expiration in hours if + Definition["includeCities"] = 0 # Cities not included in area header + Definition["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header + Definition["easPhrase"] = \ + "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header + Definition["callToAction"] = 1 + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + + ##################################################################### + ##################################################################### + ### Organization of Formatter Code + + ############################################################### + ### MUST OVERRIDE ZONE DEFINITIONS !!! + ### _inlandAreas, _coastalAreas, _marineAreas, _cwa + ############################################################### + + + ############################################################### + ### Optional Overrides, HLS GUI options and Configuration for + ### Situations and Scenarios + # + # _areaDisplayType_land and _marine -- how zones are displayed in GUI's + # _GUI_labels -- wording for GUI titles + # _font_GUI_dict -- font for GUI titles + # + # _overview_list -- list of Overview GUI frames (GUI 1) + # _overviewEndInstructions + # _overviewSections + # + # _situation_list -- list of situations (each is a dictionary) + # _segmentSections -- list of segment sections (each is a dictionary) + ############################################################### + + ############################################################### + ### Hazards and Additional Hazards + ### allowedHazards is used for segmentation e.g. HU.W, TR.W... + ### allowedHeadlines are additional hazards reported in overview + ### e.g. CF.W, FA.A, TO.A... + ############################################################### + + ############################################################### + # CODE + ############################################################### + ### High level flow of formatter + ### generateForecast, determineTimeRanges, sampleData, + ### preProcessProduct, makeProduct, postProcessProduct... + ############################################################### + + ############################################################### + ### Helper methods -- Getting statistics from grids, + ### summarizing hazards found, determining inland/coastal/marine + ### _getSegmentInfo, _checkHazard, _orderSections, + ### _findInDictList, _accessDict + ### _analysisList_HLS + ###################################################################### + ### Previous Product Helper methods + ###################################################################### + + ###################################################################### + ### OVERVIEW Sections + ###################################################################### + ### SEGMENT Sections + #################################################### + ### Precautionary Preparedness Statement Dictionaries + ###################################################################### + ### Wind Situation/Scenario methods + ###################################################################### + ### Segment statements and thresholds e.g. Wind Statements + ##################################################### + + ############################################################### + ### Example TCP product for automated testing + ############################################################### + + ##################################################### + ### HLS GUI Processing + # + ##################################################################### + ## TK GUI Classes + ##################################################################### + ##################################################################### + + ############################################################### + ### MUST OVERRIDE these methods! + + def _inlandAreas(self): + return [ + #"FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043", + ] + + def _coastalAreas(self): + return [ + #"FLZ039", "FLZ042", "FLZ048", "FLZ049", "FLZ050", "FLZ051", "FLZ055", "FLZ060", + #"FLZ062", "FLZ065", + ] + def _marineAreas(self): + return [ + #"GMZ830", "GMZ850", "GMZ853", "GMZ856", "GMZ856", "GMZ870","GMZ873","GMZ876" + ] + + def _cwa(self): + return "" #"TBW" + + def _cwa_descriptor(self): + return "" # "central west Florida" + + def _maor_descriptor(self): + return "" # "west central Florida waters and the Gulf of Mexico" + + def _cwa_maor_descriptor(self): + return "" #"west Florida waters and the Gulf of Mexico" + + def _localReferencePoints(self): + # Give the name and lat/lon for each local reference point + return [ + #("Tampa Bay, FL", (28.01, -82.48)), + #("Cape Coral, FL", (26.63, -82.00)), + #("Lakeland, FL", (28.04, -81.95)), + #("Sarasota, FL", (27.37, -82.55)), + ] + + def _localReferencePoints_defaults(self): + # Give a list of the local reference point names to be + # turned on by default + return [] # ["Tampa Bay, FL", "Sarasota, FL"] + + ##################################################################################### + ##################################################################################### + ### Optional Overrides, HLS GUI options and Configuration for + ### Situations and Scenarios + + def _overviewFormat(self): + # For overview headlines specify "listAreas" if you want specific + # locations listed. + # Otherwise, specify "generic" for a general "All" or "Portions of" + # the CWA + return { + "land": "listAreas", + "marine": "generic", + } + + def _areaDisplayType_land(self): + # You can set this to any key within the AreaDictionary. + # e.g. 'ugcName', 'altName', ' + # Also include the width of the display window + #return ('ugcCode', 10) + return ('ugcName', 15) + + def _referencePointLimit(self): + # Give the number of reference points allowed to be chosen + # Also give a label (e.g. "two") for the GUI + return (2, "two") + + def _areaDisplayType_marine(self): + # You can set this to any key within the AreaDictionary. + # e.g. 'ugcName', 'altName', ' + # Also include the width of the display window + return ('ugcCode', 10) + #return ('ugcName', 15) + + ################# + + # 02/28/2011 (SW/MHB) - Modified the GUI behavior so that the ECs are limited to the + # appropriate options. + # + def _overview_list(self, argDict): + allCON = argDict.get("allCON", False) + forceAbbrev = argDict.get("forceAbbrev", False) + allHUS = argDict.get("allHUS", False) + watchEC = argDict.get("watchEC", False) + allCAN = argDict.get("allCAN", False) + step1Options = [] + step6Options = [] + step7Options = [] + if allCON: + step1Options = [ + ("Use This GUI to Create Overview Text", "CreateFromGUI"), + ("Use Previous Situation Overview Text", "UsePrev"), + ] + else: + step1Options = [ + ("Use This GUI to Create Overview Text", "CreateFromGUI"), + ] + if forceAbbrev: + step6Options = [ + ("Abbreviated Issuance (WWA First Issuance Everywhere at the same time)", "Abbreviated"), + ] + step7Options = [ + ("Shortly (for Abbreviated Issuances)", "Shortly"), + ] + elif allCAN: + step6Options = [ + ("Post-Event (All hazards over everywhere)", "PostEvent"), + ("Post-Tropical", "PostTropical"), + ] + step7Options = [ + ("Last Issuance", "LastIssuance"), + ] + elif allHUS: + step6Options = [ + ("Non-Event (WWA Not Expected)", "NonEvent"), + ("Pre-Event (WWA Possible Soon; Early Evacuations)", "PreEvent"), + ("Post-Event (WWA Over, Statements Still Needed)", "PostEvent"), + ] + step7Options = [ + ("As Conditions Warrant", "Conditions"), + ("Enter Approximate Time (below)", "Enter"), + ] + elif watchEC: + step6Options = [ + ("Watches (No Warnings)", "Watch"), + ] + step7Options = [ + ("As Conditions Warrant", "Conditions"), + ("Enter Approximate Time (below)", "Enter"), + ] + else: + step6Options = [ + ("Warnings (With or Without Watches)", "Warning"), + ("Conditions Occurring (With Warnings)", "Conditions"), + ("Post-Event (WWA Ended and replaced by HU.S)", "PostEvent"), + ] + step7Options = [ + ("As Conditions Warrant", "Conditions"), + ("Enter Approximate Time (below)", "Enter"), + ] + + return [ + { + "name": "OverviewEditMode", + "label":"Step 1. Choose Overview Edit Mode", + "options": step1Options, + }, + { + "name": "StormInfo", + "label": "Step 2. Obtain Storm Type/Name/Info", + "options": [ + "TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5", + "Enter PIL below (e.g. TCPEP1):", + ], + "entryField": " ", + }, + { + "name": "Uncertainty", + "label": "Step 3. Declare Degree of Uncertainty", + "options": [ + ("Smaller Degree", "Low"), + ("Average Degree", "Average"), + ("Larger Degree", "High"), + ("N/A", "N/A"), + ], + "default": "N/A", + }, + { + "name":"LocalReferencePoints", + "label": "Step 4. Locate Storm Relative to Local Reference Points (choose at most "\ + +self._referencePointLimit()[1]+")", + "optionType": "check", + "options": self._localReferencePoints(), + "default": self._localReferencePoints_defaults(), + }, + { + "name": "MainHeadline", + "label": "Step 5. Input Main Headline (required) ", + "options": [ + ("Enter Unique Headline (below)", "Enter"), + ("Use Previous HLS Headline", "UsePrev"), + ("Use Latest TCP Headline", "UseTCP"), + ], + "entryField": "", + }, + { + "name":"EventContext", + "label": "Step 6. Establish Event Context for CWA/MAOR (related to TC WWAs only)", + "options": step6Options, + }, + { + "name": "NextUpdate", + "label": "Step 7. Indicate Next Update Time", + "options": step7Options, + "default": "Enter Approximate Time (below)", + "entryField": " e.g. 6 AM EDT", + }, + ] + + def _overviewEndInstructions(self): + return """Note: Please enter the necessary Overview (CWA/MAOR) information \n above before continuing to the Segmented (Zone Group) information. """ + + def _overviewSections(self): + # A list of dictionaries -- each dictionary represents a section. + # The order of the list is the order the sections will appear in the GUI. + # Fields in the dictionary can be: + # name -- name of section -- THIS should not be changed by the user since + # the code logic keys off this name + # label -- label for the section to appear in the GUI + # title -- text to appear in the product for the section + # endStr -- text to appear at the end of the section. + # NOTE: We are assuming the endStr is UNIQUE within the section and will + # not appear except at the end of the section!! + return [ + { + "name": "Overview_NewInformation", + "label": "New Information", + "title": ".NEW INFORMATION...\n", + }, + { + "name": "AreasAffected", + "label": "Areas Affected", + "title": ".AREAS AFFECTED...\n", + }, + { + "name":"WatchesWarnings", + "label":"Watches/Warnings", + "title":".Watches/Warnings...\n", + }, + { + "name":"StormInformation", + "label":"Storm Information", + "title": ".STORM INFORMATION...\n", + }, + { + "name":"SituationOverview", + "label":"Situation Overview", + "title": ".SITUATION OVERVIEW...\n" + }, + { + "name": "Overview_PrecautionaryPreparednessActions", + "label": "PRECAUTIONARY/PREPAREDNESS ACTIONS", + "title": ".PRECAUTIONARY/PREPAREDNESS ACTIONS...\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n", + "endStr": "\n&&" + }, + { + "name": "NextUpdate", + "label": "Next Update", + "title": ".NEXT UPDATE...\n", + }, + ] + + def _situation_list(self): + # List of dictionaries where each dictionary represents a situation + # Entries in the dictionary can be: + # name -- name of situation -- THIS should not be changed by the user since + # the code logic keys off this name + # label -- label for the situation to appear in the GUI + # hazPairings -- list of action/phen/sig combo that need to exist to trigger an option + # ec -- list of a event contexts which will need to overlap with hazPairings + # scenarios -- list of possible scenarios for this + # situation and conditions in (label, name) form. + # MODIFIED 3/7 to add wrap-up scenarios for non-events to Non-Event and Post-Event + # MODIFIED 8/22 to add UPGTR.W to Abbreviated + # NOTE: You can change the scenario labels, but + # The scenario names should not be changed as + # they are tied directly to the code! + # + + return [ + { + "name": "NonEvent", + "label":"Non-Event", +## "action": ["NEW", "CON", "EXA"], +## "hazards": ["HU.S"], + "hazPairings": ["NEWHU.S", "CONHU.S", "EXAHU.S", "CANHU.S"], + "ec": ["NonEvent", "PreEvent", "Watch", "Warning", + "Conditions", "PostTropical"], + "scenarios": [ + ("Non-Event Wind Threat", "ActiveNonEvent"), + ("Cancel Non-Event", "EndNonEvent"), + ], + }, + { + "name": "PreEvent", + "label":"Pre-Event", +## "action": ["NEW", "CON", "EXA"], +## "hazards": ["HU.S"], + "hazPairings": ["NEWHU.S", "CONHU.S", "EXAHU.S"], + "ec": ["PreEvent", "Watch", "Warning", "Conditions"], + "scenarios": [ + ("Advancing Wind Threat", "Advancing"), + ("Peripheral Wind Threat", "Peripheral"), + ("In Situ Developing Wind Threat", "InSitu"), + ], + }, + { + "name": "Abbreviated", + "label": "Abbreviated", +## "action": ["NEW", "EXA", "UPG", "CAN"], +## "hazards": ["TR.A", "HU.A", "TR.W", "HU.W", "TY.A", "TY.W", "HU.S"], + "hazPairings": ["NEWHU.S","EXAHU.S","NEWHU.A","EXAHU.A","NEWTY.A","EXATY.A", + "NEWTR.A","EXATR.A","NEWHU.W","EXAHU.W","NEWTY.W","EXATY.W", + "NEWTR.W","EXATR.W","CANHU.S","UPGHU.A","UPGTY.A","UPGTR.A", + "UPGTR.W","CANHU.W","CANTY.W","CANTR.W", + "CANTR.A","CANHU.A","CANTY.A"], + "ec": ["Abbreviated", "Watch", "Warning", "Conditions"], + "scenarios": [ + ("First Issuance", "FirstIssuance"), + ], + }, + { + "name": "Watch", + "label": "Watch", +## "action": ["NEW","CON", "UPG"], +## "hazards": ["TR.A", "HU.A", "TY.A"], + "hazPairings": ["CONHU.A","CONTY.A","CONTR.A"], + "ec": ["Watch", "Warning", "Conditions"], + "scenarios": [ + ("Advancing Wind Threat", "Advancing"), + ("Peripheral Wind Threat", "Peripheral"), + ("In Situ Developing Wind Threat", "InSitu"), + ], + }, + { + "name": "Warning", + "label": "Warning", +## "action": ["NEW","CON", "UPG", "CAN"], +## "hazards": ["TR.W", "HU.W", "TY.W"], + "hazPairings": ["CONHU.W", "CONTY.W", "CONTR.W", "CANHU.A", "CANTY.A"], + "ec": ["Warning", "Conditions"], + "scenarios": [ + ("Advancing Wind Threat", "Advancing"), + ("Peripheral Wind Threat", "Peripheral"), + ("In Situ Developing Wind Threat", "InSitu"), + ], + }, + { + "name": "Conditions", + "label": "Conditions", +## "action": ["NEW","CON", "UPG", "CAN"], +## "hazards": ["TR.W", "HU.W", "TY.W"], + "hazPairings": ["CONHU.W", "CONTY.W", "CONTR.W"], + "ec": ["Conditions"], + "scenarios": [ + ("Imminent Wind Threat", "Imminent"), + ("Ongoing Wind Threat", "Ongoing"), + ("Diminishing Wind Threat", "Diminishing"), + ], + }, + { + "name": "PostEvent", + "label": "Post-Event", +## "action": ["NEW", "EXA", "CON", "CAN"], +## "hazards": ["TR.A", "HU.A", "TR.W", "HU.W", "TY.A", "TY.W", "HU.S"], + "hazPairings": ["CANHU.W", "CANTY.W", "CANTR.W", "NEWHU.S", "CONHU.S", + "EXAHU.S","CANHU.S", "CANHU.A", "CANTY.A", "CANTR.A"], + "ec": ["Warning", "Conditions", "PostEvent"], + "scenarios": [ + ("Immediate Rescue/Recovery", "Immediate"), + ("Minor/No Impact", "NoImpact"), + ("Longer-term Rescue/Recovery","LongTerm"), + ], + }, + { + "name": "PostTropical", + "label": "Post-Tropical", +## "action": ["CAN"], +## "hazards": ["TR.W", "HU.W"], + "hazPairings": ["CANHU.W", "CANTR.W"], + "ec": ["PostTropical"], + "scenarios": [ + ("In Progress", "InProgress"), + ("Completed Transition", "Completed"), + ], + }, + ] + + def importMethod(self, argDict, segment): + # This is a dummy method for importing text + # Enter code here to get text from a flat file or previous product if desired + # Then specify this in the "importMethod" entry for the desired segment + # in the _segmentSections set up + return "" + + def _segmentSections(self): + # A list of dictionaries -- each dictionary represents a section. + # The order of the list is the order the sections will appear in the GUI. + # Fields in the dictionary can be: + # name -- name of section -- THIS should not be changed by the user since + # the code logic keys off this name + # label -- label for the section to appear in the GUI + # inSegments -- "always", "optional" or situation-specific + # excludeFromSituations -- list of situations for which this section is NOT + # to be included + # includeFor -- can be list of areas for which to include this section as an option such that + # if ANY segment area meets the criteria, the section will be included as an + # option OR + # a method to be called with 2 arguments: (name, segmentAreas) which + # should return True/False for the section to be included as an option. + # defaultOn -- IF included as an option in the GUI, specify whether it should be defaulted ON + # Set to True/False OR specify a method to be called. + # The method will be called with 2 arguments: (name, segmentAreas) + # usePrev -- if True, include a "Use Previous" check box on GUI + # importPIL -- specify a product PIL from which to get section information + # If present, a check box will appear on the GUI for importing + # importMethod -- optional method for importing information from an external source. + # Specify a method (see example above) for getting text from + # an external source. + # If present, a check box will appear on the GUI for importing + # NOTE: If both importPIL and importMethod are present, the importPIL will be used. + # orderBox -- if True, include a text box on GUI to enter an order number + # title -- This is the section title that will appear in the product. + # It can be a tuple consisting of: + # (Title for Public zones, Title for Marine Zones) + # For example: + # ("...Winds...\n","...Winds and Seas...\n") + # + # To ADD a new section, you must + # --Add a dictionary for the section in this list + # --Provide a method for producing the contents of the section. + # The name of the method must match the "name" field for the new + # section. (Look at the "Tornadoes" method for an example.) + # For example, if you add a section to this list: + # + # { + # "name": "CoastalHazards", + # "label": "Coastal Hazards", + # "defaultOn": True, + # "includeFor": ["coastal"], + # "orderBox": True, + # "usePrev": True, + # "inSegments": "optional", + # "importMethod": None, + # "title": "...Coastal Hazards...\n", + # }, + + # Then you must have a method which returns a text string: + # + # def CoastalHazards(self, title, argDict, segment, section, info): + # + return [ + { + "name": "NewInformation", + "label": "New Information", + "defaultOn": False, + "includeFor": self._allAreas(), + "usePrev": False, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title": "...New Information...\n", + }, + { + "name": "PrecautionaryPreparednessActions", + "label": "PRECAUTIONARY/PREPAREDNESS ACTIONS", + "defaultOn": True, + "includeFor": self._allAreas(), + "usePrev": True, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title": "...PRECAUTIONARY/PREPAREDNESS ACTIONS...\nPRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n", + "endStr":"\n&&", + }, + { + "name": "Probability", + "label": "Probability of Tropical Storm/Hurricane Conditions", + "defaultOn": False, + "includeFor": self._allAreas(), + "inSegments": "optional", + "excludeFromSituations": ["Conditions", "PostEvent", "PostTropical"], + "importMethod": None, + "importPIL": None, + "title": "...Probability of tropical storm/hurricane conditions...\n", + }, + { + "name": "Wind", + "label": "Winds and Seas", + "defaultOn": True, + "includeFor": self._allAreas(), + "orderBox": True, + "usePrev": True, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title": ("...Winds...\n","...Winds and Seas...\n"), + }, + { + "name": "StormSurgeTide", + "label": "Storm Surge and Storm Tide", + "defaultOn": True, + "includeFor": self._coastalAreas(), + "orderBox": True, + "usePrev": True, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title":"...Storm surge and storm tide...\n" , + }, + { + "name": "InlandFlooding", + "label": "Inland Flooding", + "defaultOn": True, + "includeFor": self._inlandAreas()+self._coastalAreas(), + "orderBox": True, + "usePrev": True, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title": "...Inland flooding...\n", + }, + { + "name": "Tornadoes", + "label": "Tornadoes and Waterspouts", + "defaultOn": False, + "includeFor": self._allAreas(), + "orderBox": True, + "usePrev": True, + "inSegments": "optional", + "importMethod": None, + "importPIL": None, + "title": ("...Tornadoes...\n","...Tornadoes and Waterspouts...\n") + }, +# { +# "name": "Marine", +# "label": "Marine", +# "defaultOn": True, +# "includeFor": self._marineAreas(), +# "orderBox": True, +# "usePrev": True, +# "inSegments": "optional", +# "importMethod": None, +# "importPIL": None, +# "title":"...Marine...\n" , +# }, + ] + + def _defaultOn_StormSurgeTide(self, name, segmentAreas): + # Default logic will set StormSurgeTide to ON if there are any coastal zones. + # Local offices can add to the list of accepted areas (e.g. if some inland + # zones should have the Storm Surge Tide section defaulted on) + # OR change the logic as in any way desired. + defaultOn = False + for area in segmentAreas: + if area in self._coastalAreas(): + defaultOn = True + return defaultOn + + def _allAreas(self): + return self._inlandAreas() + self._coastalAreas() + self._marineAreas() + + ########## GUI Configuration + + def _GUI_sizing_dict(self): + # This contains values that adjust the GUI sizing. + return { + "GUI_height_limit": 800, # limit to GUI height in canvas pixels + #"GUI_2_width": 820, # width for GUI 2 + "GUI_2_width": 1200, # width for GUI 2 + #"GUI_3_width": 970, # width for GUI 3 + "GUI_3_width": 1200, # width for GUI 3 + "zoneLines": 10, # number of zones to display without scrolling + "charSize": 9, + } + + def _GUI1_configDict(self): + return { + # Order and inclusion of GUI1 buttons + # Each entry is (name of button in GUI code, desired label on GUI) + "buttonList":[ + ("PreviousHLS","PreviousHLS"), + ("Reset","Reset"), + ("Next","Next"), + ("Cancel","Cancel"), + ], + } + + def _GUI2_configDict(self): + return { + # Order and inclusion of GUI1 buttons + # Each entry is (name of button in GUI code, desired label on GUI) + "buttonList":[ + ("Next", "Next"), + ("Cancel", "Cancel"), + ], + } + + def _GUI3_configDict(self): + return { + # Order and inclusion of GUI1 buttons + # Each entry is (name of button in GUI code, desired label on GUI) + "buttonList":[ + ("Ok", "Ok"), + ("Cancel","Cancel"), + ], + } + + def _GUI_labels(self): + return { + 'GUI_2': "Step 8. Choose Situation Per Zone Group", + 'GUI_3a': "Step 9a. Choose Scenario Per Zone Group", + 'GUI_3b':"Step 9b. Identify & Order Sections", + } + + def _font_GUI_dict(self): + return { + "headers": ("blue", ("Helvetica", 14, "bold")), + "instructions": (None, ("Helvetica", 12, "italic")), + } + + + ##################################################################################### + + + ############################################################### + ### Hazards and Additional Hazards + # allowedHazards is used for segmentation + # allowedHeadlines are additional hazards reported in overview + + def allowedHazards(self): + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", "EXP"] + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HU.A',allActions,'Hurricane'), + ('HU.W',allActions,'Hurricane'), + ('HU.S',allActions,'Hurricane'), + ('TY.A',allActions,'Typhoon'), + ('TY.W',allActions,'Typhoon'), + ('TR.A',allActions,'Tropical'), + ('TR.W',allActions,'Tropical'), + ] + + def allowedHeadlines(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH + ('FA.A', allActions, 'Flood'), # FLOOD WATCH + ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING + ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY + ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH + ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING + ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY + ('RP.S', allActions, 'Rip'), # HIGH RIP CURRENT RISK + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ('SR.W', allActions, 'Marine'), + ('SR.A', allActions, 'Marine'), + ('GL.W', allActions, 'Marine'), + ('GL.A', allActions, 'Marine'), + ('SC.Y', allActions, 'Marine'), + ('SI.Y', allActions, 'Marine'), + ('SW.Y', allActions, 'Marine'), + ('RB.Y', allActions, 'Marine'), + ('HF.W', allActions, 'Marine'), + ('HF.A', allActions, 'Marine'), + ] + + def _ignoreActions(self): + # Ignore hazards with these action codes in the overview headlines + # NOTE: the VTEC and segments will still include them correctly. + return ['CAN', 'UPG'] + + ############################################################### + ### NOTES + + ## HANDLING HLS SEGMENTATION + ## + ## Problem: + ## The system is set up to sample hazards using the combinations file + ## edit areas i.e. do not sample zones not in the combinations + ## segmenting strictly according to the hazards i.e. all zones + ## in a combination with the same hazard will be in a + ## segment. + ## + ## The HLS formatter uses the combinations file differently. + ## Segmenting is initially done according to the hazards as + ## above, but IF a forecaster wants to further split the + ## segment, he/she can set up a combination in the zone + ## combiner to do so. This however, is optional and all + ## areas (land and marine) in the WFO need to be sampled for + ## hazards regardless of what's in the combinations file. + ## + ## Solution: + ## The HLS code has several relatively independent pieces, and + ## each has to sample the hazards correctly: + ## + ## --GUI code: Sampled by "_determineSegments" and stored in + ## argDict['hazards'] + ## + ## --Formatter Logic code: Sampled by "_getProductInfo" and + ## stored in argDict['hazards']. Note: we can't re-use the + ## hazards set by the GUI code because between the time the + ## GUI is called and the formatter code is invoked, the + ## TextFormatter infrastructure has re-sampled the hazards + ## using the combinations file as above and reset the + ## argDict['hazards'] entry. + ## + ## --Override DiscretePhrases "getHazardList" to use the hazards + ## stored in argDict rather than using the one automatically + ## generated by TextFormatter infrastructure which uses the + ## combinations file as above. + + ############################################################### + ### High level flow of formatter + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Use previous for entire product + try: + if self._UsePrev: + return self.getPreviousProduct(self._textdbPil) + except: + pass + + #print "\n\nvarDict", argDict["varDict"] + segmentList = [areas for segNum, areas, situation, scenario, + sections, extraInfo in self._segments] + #print "\n\nSegment Information", self._segments, "\n\n" + if len(segmentList) == 0: + return "No hazards to report" + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each segment in the segmentList + fraction = 0 + fractionOne = 1.0/float(len(segmentList)) + percent = 50.0 + self.setProgressPercentage(50) + for segment in self._segments: + self.progressMessage(fraction, percent, "Making Product for Segment") + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + fcst = self._preProcessArea(fcst, segmentAreas, self._expireTime, argDict) + fcst = self._makeProduct(fcst, segment, argDict) + fcst = self._postProcessArea(fcst, segmentAreas, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + ######### Time ranges + + def _resolution(self): + return 3 + + def _determineTimeRanges(self, argDict): + # Set up the time range for 0-120 hours + self._issueTime = AbsTime.AbsTime(argDict['creationTime']) + + # Create a time range from the issuanceHour out 120 hours + # First get the current local time + localTime = time.localtime(argDict['creationTime']) + year = localTime[0] + month = localTime[1] + day = localTime[2] + hour = localTime[3] + # Now "truncate" to a 6-hourly boundary and compute startTime in local Time. + hour = int (int(hour/6) * 6) + startTime = AbsTime.absTimeYMD(year, month, day, hour) + # Finally, convert back to GMT + localTime, shift = self.determineTimeShift() + startTime = startTime - shift + self._timeRange = self.makeTimeRange(startTime, startTime+120*3600) + + # Determine the time range list, making sure they are on hour boundaries + # w.r.t. midnight today according to the resolution + subRanges = self.divideRange(self._timeRange, self._resolution()) + trList = [] + for tr in subRanges: + # print tr + trList.append((tr, "Label")) + self._timeRangeList = trList + + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._currentTime = argDict['creationTime'] + self._expireTime = self._issueTime + self._purgeTime*3600 + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + ######### Sample Data + ## Since the segments are determined by user input, + ## we need to determine combinations now (usually done automatically + ## by TextFormatter infrastructure.) + + def _sampleData(self, argDict): + # Sample the data + editAreas = self._makeSegmentEditAreas(argDict) + areas = self._marineAreas() + areas.append(self._cwa()) + cwa_maor = self._makeCombination(argDict, areas) + editAreas.append(cwa_maor) + self._cwaMaorArea, self._cwaMaorLabel = cwa_maor + self._sampler = self.getSampler(argDict, + (self._analysisList_HLS(), self._timeRangeList, editAreas)) + + def _makeSegmentEditAreas(self, argDict): + areasList = [segmentAreas + for segmentNum, segmentAreas, situation, scenario, + sections, extraInfo in self._segments] + #print "areaList", areasList + editAreas = [] + self._editAreaDict = {} + for areas in areasList: + if len(areas)>1: + # Make a combination on the fly + editArea, label = self._makeCombination(argDict, areas) + # e.g. editArea, Combo1 + self._editAreaDict[tuple(areas)] = editArea + editAreas.append((editArea, label)) + else: + area = areas[0] + self._editAreaDict[tuple(areas)] = area + editAreas.append((area, area)) + return editAreas + + def _getComboNumber(self): + try: + self.__comboNumber = self.__comboNumber + 1 + except: + self.__comboNumber = 1 + return self.__comboNumber + + def _makeCombination(self, argDict, areaNames): + # Given a list of area names, return a combination edit area + gridLoc = argDict["ifpClient"].getDBGridLocation() + comboList = [] + for areaName in areaNames: + newArea = self.getEditArea(areaName, argDict) + if areaNames.index(areaName) == 0: + comboNumber = self._getComboNumber() + label = "Combo"+repr(comboNumber) + refId = ReferenceID(label) + #area = AFPS.ReferenceData( + # gridLoc, refId, newArea.polygons(), + # AFPS.ReferenceData.LATLON) + #area.convertToAWIPS() + area = ReferenceData(gridLoc, refId, newArea.getPolygons(CoordinateType.LATLON), CoordinateType.LATLON) + comboList.append(newArea.getId().getName()) + area = self.unionAreas(label, area, newArea) + return area, label + + ###### Generate headers and Overview sections + + def _preProcessProduct(self, fcst, argDict): + + self._prevHLS = self.getPreviousProduct(self._textdbPil) + + info = self._getProductInfo(argDict) + self._getStormInfo(argDict, info) + if self._stormTypeName.find("|*")>=0: sn = "Tropical Cyclone" + else: sn = self._stormTypeName + actualProductName = sn + " Local Statement" + actualProductName = self.checkTestMode(argDict, actualProductName) + + # Product header + if self._areaName != "": + self._areaName = " for " + self._areaName + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, actualProductName + self._areaName) + + if len(self._easPhrase) != 0: + eas = self._easPhrase.upper() + '\n' + else: + eas = '' + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = eas + productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # Main Headline + mh = self._MainHeadline + if mh == "Enter": + hl = self._MainHeadline_entry + elif mh == "UsePrev": + hl = self._grabHeadline(self._prevHLS) + elif mh == "UseTCP": + try: # If unnamed or downgraded, we won't have a TCP product + hl = self._grabHeadline(self._TCP) + except: + hl = "" + + if hl == "": + hl = self._frame("Enter headline here") + hl = self._addEllipses(hl) + fcst = fcst + hl + "\n\n" + self._overview(argDict, info) + return fcst + + # Modified 4/21/09 (MHB) - Fixed a problem with the construction of the + # overview when using previous text. This will fix the problem with + # getting multiple copies of the first zone segment header. The + # _grabSection method is not capable of recognizing the end of the + # "Next Update" overview section on its own. Implemented the + # _grabOverview method (already defined in the baseline) to parse out the + # entire overview, with which _grabSection will work correctly. + # + # Modified 12/24/10 (MHB) - Added capability to specify which sections + # of the overview can use previous text. All other sections will be + # forced to update. This involves a new call to the _grabSection method. + + def _overview(self, argDict, info): + + # Establish previous HLS text for testing - if needed + #if len(self._prevHLS.strip()) == 0: + # self._prevHLS = self._testPrevHLS() + + overview = "" + if self._OverviewEditMode == "FormatFree": + return self._frame("Enter Overview Information") + "\n\n" + + if self._OverviewEditMode == "UsePrev": + usePrev = True + + # Set aside to overview text, so we don't have to search the + # entire HLS product + prevOverview = self._grabOverview(self._prevHLS) + else: + usePrev = False + +## print "prev = '%s'" % (prevOverview) + + # Get the list of sections which must be present, in order + sections = self._overviewSections() + for sectionDict in sections: + title = sectionDict.get("title", '') + + # Start out with a blank text for this section + sectionText = "" + + # If we are requested to use previous text, and this is a section + # we can use it for + if usePrev and \ + title.strip() not in self._noPrevTextOverviewSections(): +# print "Looking for previous '%s'" % (title) + + # Get the previous text for this section + sectionText = self._grabSection(prevOverview, title, True) + +# print usePrev, len(sectionText.strip()) +# print "'%s'" % (sectionText.strip()) + + # If we are not using the previous text, or we could not find + # the previous section text + if not usePrev or len(sectionText.strip()) == 0: + exec("sectionText = self." + sectionDict["name"] + "(title, sectionDict, info)") + + # Ensure the grabbed text is wrapped to the correct product length + sectionText = self.endline(sectionText, self._lineLength) + + # Add this section text to the current overview + overview = overview + sectionText + "\n\n" + + # Return completed overview + return overview + + ########## Produce Segment Sections + +## # Modified 12/24/10 (MHB) - Added capability to specify which sections +## # of the segment can use previous text. All other sections will be +## # forced to update. This involves a new call to the _grabSection method. +## + def _makeProduct(self, fcst, segment, argDict): + argDict["language"] = self._language + self._stormPrevHLS = False + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + info = self._getSegmentInfo([segment]) + self._setStats(info, argDict, segmentAreas, self._editAreaDict, + self._sampler, self._analysisList_HLS(), self._timeRangeList) + + # + # This section generates the segment sections + # + hazardsC = argDict['hazards'] + listOfHazards = hazardsC.getHazardList(segmentAreas) + if listOfHazards == []: + return fcst + +# LogStream.logProblem("=== VARS ===", +# "\nhazardsC=",hazardsC, +# "\nlistOfHazards=",listOfHazards, +# "\nsegmentAreas=",segmentAreas, +# "\nphen.sig=",listOfHazards[0]['phensig'], +# ) + LogStream.logEvent("=== VARS ===", + "\nhazardsC=",hazardsC, + "\nlistOfHazards=",listOfHazards, + "\nsegmentAreas=",segmentAreas, + "\nphen.sig=",listOfHazards[0]['phensig'], + ) + + prevHLS = self._getPrevText(listOfHazards) + + # This section generates the headline on the segment + # + # stuff argDict with the segmentAreas for DiscretePhrases + argDict['segmentAreas'] = segmentAreas + editArea = segmentAreas[0] + # Stuff the headline for HU.S in extraInfo + self._setUp_HU_S_Headline(extraInfo, prevHLS) + argDict["extraInfo"] = extraInfo + areaLabel = editArea + + fcst = fcst + self.generateProduct("Hazards", argDict, area=editArea, + areaLabel=areaLabel, timeRange=self._timeRange) + + # self._segments = + # (segNum, areas, chosen situationName, chosen scenarioName, + # sections, extraInfo) + # For example: +## (1, ['FLZ052'],'Warning', 'Advancing', +## # list of sections: name, order, usePrev, useImport +## [ +## ('PrecautionaryPreparednessActions', None, 0, None), +## ('Probability', '', 0, None), +## ('Winds', '', 0, None), +## ('StormSurgeTide', '', 0, None), +## ('InlandFlooding', '', 0, None), +## ('Tornadoes', '', 0, None), +## ('Marine', '', 0, None) +## ] +## # Extra information for HU.S headlines +## {'userHeadline_HU_S': 'Headline for HU.S', +## 'usePrev_HU_S_Headline':0}, +## ), + + sections = self._orderSections(sections) + # iterate over the sections for this segment + for section in sections: + + # Initialize text for this section + sectionText = '' + + # Get info about this section + sectionName, order, usePrev, useImport = section + #print "section", sectionName, order, usePrev, useImport + title = self._findInDictList( + self._segmentSections(), "name", sectionName, "title") + title = self._extractTitle(info, title) + + # If we should use previous section text, and this is a section + # permitted to use previous text + if usePrev and \ + title.strip() not in self._noPrevTextSegmentSections(): + + # If we will also be importing text for this section + if useImport: + # Frame the previous text to force forecaster to review it + sectionText = self._grabSection(prevHLS, title, True) + else: + + # Just get the previous section text without framing codes + sectionText = self._grabSection(prevHLS, title) + + # If we are not using previous text, or could not find it + if not usePrev or len(sectionText.strip()) == 0: + # Make a blank shell section as a place-holder + exec("sectionText = self." + sectionName + "(title, argDict, segment, section, info)") + + # If we should also import text + if useImport: + importText = self._getImportText(argDict, segment, sectionName, + title) + if importText.strip() != "": + + print("\n\n" + "*"*80) + print("sectionText = '%s'" % (sectionText)) + + # Look to see if there are any section headers and framing + # codes with dummy text in the existing section text + sectionMatch = re.search("(?is)^(\.{3}.+\.{3}.+?)\|\*" + + " *(additional free|enter|add)", + sectionText) + + + # If we are not using the previous text, and the text + # contains both a section header and dummy text in framing + # codes + if sectionMatch is not None: + + # Keep the section header then add the imported text + sectionText = sectionMatch.group(1).strip() + "\n" + \ + importText + + # Otherwise, append imported text to the end of section + else: + sectionText = sectionText + "\n\n" + importText + + # Add endStr + endStr = self._findInDictList( + self._segmentSections(), "name", sectionName, "endStr") + #print "\n***********endStr", endStr, sectionName + if endStr is not None: + # Remove first in case an endStr was added from previous or imported text + # Note that we're assuming endStr's are unique within the section!! + sectionText = sectionText.replace(endStr, "") + # Now put the endStr at the end + sectionText = sectionText + endStr + + # Add this section to the segment + sectionText = sectionText + "\n\n" + fcst = fcst + sectionText + + # Word wrap this segment + fcst = self.endline(fcst, linelength=self._lineLength, + breakStr=[" ", "-", "..."]) + return fcst + + # Added 12/24/10 (MHB) - Define a method to specify which overview + # sections are not permitted to use text from a previous HLS. This means + # these sections will always have new text created/imported for it. The + # section titles must match those as defined in the _overviewSections + # method (although any final "\n" can be ignored. + def _noPrevTextOverviewSections(self): + return [".NEW INFORMATION...", ".AREAS AFFECTED...", + ".WATCHES/WARNINGS...", ".STORM INFORMATION...", + ".NEXT UPDATE...", + ".PRECAUTIONARY/PREPAREDNESS ACTIONS...\n" + + "PRECAUTIONARY/PREPAREDNESS ACTIONS..." + ] + + # Added 12/24/10 (MHB) - Define a method to specify which segment + # sections are not permitted to use text from a previous HLS. This means + # these sections will always have new text created/imported for it. The + # section titles must match those as defined in the _segmentSections + # method (although any final "\n" can be ignored. + def _noPrevTextSegmentSections(self): + return ["...New Information...", + "...Probability of tropical storm/hurricane conditions...", + ] + + def _getImportText(self, argDict, segment, sectionName, title): + importText = "" + # Look for importPIL + importPil = self._findInDictList( + self._segmentSections(), "name", sectionName, "importPIL") + if importPil is not None: + importProduct = self.getPreviousProduct(importPil) + importText = self._grabSection(importProduct, title) + # Remove the title + importText = importText.replace(title, "") + else: # Try importMethod + importMethod = self._findInDictList( + self._segmentSections(), "name", sectionName, "importMethod") + if importMethod is not None: + importText = importMethod(argDict, segment) + if len(importText.strip()) > 0: + # Clean up and word-wrap imported text + importText = self._cleanText(importText.strip()) + # Add the imported text to this section + importText = self._frame(importText) + "\n\n" + return importText + + # Modified 4/22/09 (MHB) - fixed logging options of search as it could + # lead to debugging confusion. Only want log info after the loop has + # completed. + def _getPrevText(self, listOfHazards): + #======================================================================= + # Set aside the previous text for this segment - if we can + + prevHLS = '' + + # Look through all hazards for this segment + for hazIndex in range(len(listOfHazards)): + + # See if this hazard has a previous text text key + if 'prevText' in listOfHazards[hazIndex]: + + # Try to get the previous text from this hazard + prevHLS = listOfHazards[hazIndex]['prevText'] + + # If there is actually something there + if len(prevHLS) > 0: + + # No point in continuing - we found the previous text + break + + # If there is still not something there + if prevHLS == '': +# LogStream.logProblem("No 'prevText' found for this segment") + LogStream.logEvent("No 'prevText' found for this segment") + else: +# LogStream.logProblem("\nprevText=", prevHLS) + LogStream.logEvent("\nprevText=", prevHLS) + + return prevHLS + + ############ Clean up + + def _postProcessProduct(self, fcst, argDict): + fcst = self.endline(fcst, linelength=self._lineLength, + breakStr=[" ", "-", "..."]) + fcst = fcst.replace("\n ","\n") + fcst = fcst.replace("&&", "\n&&\n") + + # Prevent empty Call to Action Tags + fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ + "", fcst) + # + # Clean up multiple line feeds + # + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + # finish progress meter + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ############################################################### + ### Helper methods for getting information about the segments + + + ## From the tropical formatters -- want to use the same configuration + + def moderated_dict(self, parmHisto, timeRange, componentName): + """ + Modifed to lower the high end filter threshold from 20 MPH to + 15 MPH for Tropical. + """ + # COMMENT: This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. The thresholds chosen below gave best results + # during testing with 2004 and 2005 tropical cyclones. This dict + # is used with the moderatedMinMax analysis method specified in the + # TropicalPeriod definitions specified further down for use with + # tropical cyclones with wind parameters. + + # Get Baseline thresholds + dict = SampleAnalysis.SampleAnalysis.moderated_dict( + self, parmHisto, timeRange, componentName) + + # Change thresholds for Wind, WindGust, WaveHeight and Swell + # For entire area, we want the actual maximum + if parmHisto.area().getId().getName() == self._cwaMaorLabel: + dict["Wind"] = (0,0) + else: + dict["Wind"] = (0, 15) + dict["WindGust"] = (0, 15) + dict["WaveHeight"] = (0, 15) + dict["Swell"] = (0, 15) + dict["InundationMax"] = (0,2) + return dict + + # This is a very simple way to round values -- if we need + # something more sophisticated, we'll add it later. + def _increment(self, element): + dict = { + "Wind": 5, + "WindGust": 5, + "InundationMax": 1, + } + return dict.get(element, 0) + + def _ktToMph(self, value, element): + newVal = self.ktToMph(value) + newVal = self.round(newVal, "Nearest", self._increment(element)) + return newVal + + class SegInfo: + def __init__(self): + pass + + def _getSegmentInfo(self, segments): + # Used to handle all the information required for + # both overview and segment sections + # Determines + # kinds of areas included (land, marine, coastal, inland) + # hazards included + # and for segments, the wind and probability data from the grids + # + # All of this can then be passed in the "info" object + # to the section methods for reporting + # + if type(segments) is not list: + segments = [segments] + allAreas = [] + for segment in segments: + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + allAreas = allAreas + segmentAreas + + info = self.SegInfo() + + # Determine the types of areas included in segments + # anyXX means that there is at least one area in the segment that is XX + # allXX means that all the XX areas are in the segment + info.anyInland, info.allInland, info.inlandAreas = self._checkAreas( + allAreas, self._inlandAreas()) + info.anyCoastal, info.allCoastal, info.coastalAreas = self._checkAreas( + allAreas, self._coastalAreas()) + info.anyMarine, info.allMarine, info.marineAreas = self._checkAreas( + allAreas, self._marineAreas()) + info.anyLand = info.anyInland or info.anyCoastal + info.allLand = info.allInland and info.allCoastal + info.landAreas = info.inlandAreas + info.coastalAreas + info.allAreas = info.inlandAreas + info.coastalAreas + info.marineAreas + info.cwa= self._generalAreas(info.landAreas + info.marineAreas) + info.cwaShort = info.cwa.rstrip(".\n") + info.all_cwa_maor_areas = self._inlandAreas() + self._coastalAreas() + self._marineAreas() + self._determineHazards(info, segments) + return info + + def _determineHazards(self, info, segments): + # Return a list of hazards from the given segments in the form: + # (key, landList, marineList, coastalList, inlandList) + # where key is (hdln, act, phen, sig) and the lists show which areas + # contain the hazard separated by category + info.hazardHdlns = [] + hazAreaList = [] + for segment in segments: + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + hazardTable = self._argDict["hazards"] + hazards = hazardTable.getHazardList(segmentAreas) + for hazard in hazards: + action = hazard['act'] + hazAreaList.append((hazard, segmentAreas)) + # Consolidate hazards (there could be multiple segments with the same phen/sig/act) + hazardDict = {} + hazardList = [] + for hazard, segmentAreas in hazAreaList: + key = (hazard['hdln'], hazard['act'], hazard['phen'], hazard['sig']) + if key not in list(hazardDict.keys()): + hazardDict[key] = segmentAreas + hazardList.append(key) + else: + hazardDict[key] = hazardDict[key]+segmentAreas + # Now we have areas that have the same headline and must split them into land/marine + for key in hazardList: + landAreas = [] + marineAreas = [] + coastalAreas = [] + inlandAreas = [] + hdln, act, phen, sig = key + hazAreas = hazardDict[key] + for area in hazAreas: + if area in info.landAreas: landAreas.append(area) + if area in info.marineAreas: marineAreas.append(area) + if area in info.coastalAreas: coastalAreas.append(area) + if area in info.inlandAreas: inlandAreas.append(area) + info.hazardHdlns.append(( + key, landAreas, marineAreas, coastalAreas, inlandAreas)) + #print "\nhazardList", info.hazardHdlns + + def _getWindStats(self, argDict, sampler, analysisList, timeRangeList, editArea): + statList = self.getStatList(sampler, analysisList, timeRangeList, editArea) + maxWind = 0 + print("\nGetting CWA MAOR wind stats") + for i in range(len(statList)): + tr, label = timeRangeList[i] + statDict = statList[i] + wind = self._getStatValue(statDict, "Wind", "Max", self.VECTOR()) + print(" wind value", wind, tr) + if wind > maxWind: + maxWind = wind + print(" returning maxWind", maxWind) + return maxWind + + def _setStats(self, info, argDict, segmentAreas, editAreaDict, + sampler, analysisList, timeRangeList): + # Get statistics for this segment and attach them to the info object + editArea = editAreaDict[tuple(segmentAreas)] + statList = self.getStatList(sampler, analysisList, timeRangeList, editArea) + #print "statList", statList + + # Determine the MaxWind and MaxWindGust values and duration of maxWind + info.maxWind = None + info.minWind = None + info.avgWind = None + info.maxGust = None + + # Determine first grid that has Wind >= 34 + info.wind34Time = None + + # Determine min and max probabilities for 34, 64 + info.minProb34 = None + info.maxProb34 = None + info.minProb64 = None + info.maxProb64 = None + + # Time of maximum pws34int and pws64int + info.maxINTprob34 = None + info.maxINTprob64 = None + + pws34Max = None + pws64Max = None + info.pwstrend = None + + # These need to be initialized to None so we'll know if NO grids are present + info.inundationMax = None + + # Pass 1: Determine maximum values + for i in range(len(statList)): + tr, label = timeRangeList[i] + statDict = statList[i] + #print "\ntr", tr + + stats = self.getStats(statDict, "prob34") + if stats is not None: + min34, max34 = stats + if info.maxProb34 is None: info.maxProb34 = max34 + if info.minProb34 is None: info.minProb34 = min34 + if min34 < info.minProb34: + info.minProb34 = min34 + if max34 > info.maxProb34: + info.maxProb34 = max34 + + stats = self.getStats(statDict, "prob64") + if stats is not None: + min64, max64 = stats + if info.minProb64 is None: info.minProb64 = min64 + if info.maxProb64 is None: info.maxProb64 = max64 + if min64 < info.minProb64: + info.minProb64 = min64 + if max64 > info.maxProb64: + info.maxProb64 = max64 + + pws34int = self._getStatValue(statDict, "pws34int", "Max") + if pws34int is not None: + if pws34int > pws34Max: + info.maxINTprob34 = tr + pws34Max = pws34int + pws64int = self._getStatValue(statDict, "pws64int", "Max") + if pws64int is not None: + if pws64int > pws64Max: + info.maxINTprob64 = tr + pws64Max = pws64int + + # Get wind and gust values -- + wind = self._getStatValue(statDict, "Wind", "MinMax", self.VECTOR()) + if wind is not None: + minWind, maxWind = wind + #print "minWind, maxWind", minWind, maxWind + if info.maxWind is None: + info.minWind = minWind + info.maxWind = maxWind + else: # Check for maxWind increasing or decreasing + if maxWind > info.maxWind: + info.maxWind = maxWind + if minWind < info.minWind: + info.minWind = minWind + if info.wind34Time is None and info.maxWind >= 34: + info.wind34Time = tr + windGust = self._getStatValue(statDict, "WindGust", "Max") + if windGust is not None: + if info.maxGust is None: + info.maxGust = windGust + if windGust > info.maxGust: + info.maxGust = windGust + + info.inundationMax = self._pickupMaxStats( + statDict, info.inundationMax, "InundationMax") + + # Round to increment + if info.maxWind is not None and info.minWind is not None: + info.avgWind = (info.maxWind + info.minWind)/2.0 + info.maxWind = int(self.round(info.maxWind, "Nearest", self._increment("Wind"))) + info.minWind = int(self.round(info.minWind, "Nearest", self._increment("Wind"))) + info.avgWind = self.round(info.avgWind, "Nearest", self._increment("Wind")) + if info.maxGust is not None: + info.maxGust = int(self.round(info.maxGust, "Nearest", self._increment("WindGust"))) + if info.inundationMax is not None: + info.inundationMax = int(self.round( + info.inundationMax,"Nearest", self._increment("InundationMax"))) + if info.inundationMax > 10: + info.deltaSurge = info.inundationMax - 4 + elif info.inundationMax > 6: + info.deltaSurge = info.inundationMax - 3 + elif info.inundationMax > 2: + info.deltaSurge = info.inundationMax - 2 + + print("\n\nStats for segment", segmentAreas) + print(" maxWind, maxWindGust", info.maxWind, info.maxGust) + print(" minWind, avgWind", info.minWind, info.avgWind) + print() + print(" 34 Info") + print(" min, max prob34", info.minProb34, info.maxProb34) + print(" maxINTprob34", info.maxINTprob34) + print(" 64 Info") + print(" min, max, prob64", info.minProb64, info.maxProb64) + print(" maxINTprob64", info.maxINTprob64) + print(" InundationMax", info.inundationMax) + + # Make additional passes to determine durations + # These are values for which we need to calculate durations + # In addition, we'll calculate for the maxWind value + windDurValues = [34, 50, 64] + info.windDur = {} + print("Durations") + for durVal in windDurValues + [info.maxWind]: + info.windDur[durVal] = self._determineDuration(durVal, statList, timeRangeList) + print(" ", durVal, info.windDur[durVal]) + print() + + def _pickupMaxStats(self, statDict, curValue, element): + # Given an element and curValue, pick up the stats from the statDict + # If stats are greater than the curValue, replace and return curValue + # Assumes that curValue is initialized to None + stats = self.getStats(statDict, element) + if stats is not None: + if curValue is None: + curValue = 0 + if stats > curValue: + curValue = stats + return curValue + + def _determineDuration(self, durValue, statList, timeRangeList): + # Determine maxWind, wind34, and wind64 durations, and end time of 34, 64 + # This will be the first time range that a value goes from above 34, 64 to below + #print "\n Determine Duration for", durValue + hit = False + beg = None + end = None + for i in range(len(statList)): + tr, label = timeRangeList[i] + statDict = statList[i] + # Get wind stats + wind = self._getStatValue(statDict, "Wind", "MinMax", self.VECTOR()) + if wind is not None: + minWind, maxWind = wind + #print " minWind, maxWind", minWind, maxWind + maxWind = int(self.round(maxWind, "Nearest", self._increment("Wind"))) + #print " new maxWind", maxWind + if end is None: + if hit and maxWind < durValue: + end = tr + elif not hit and maxWind >= durValue: + hit = True + beg = tr + #print "beg, end", beg, end, "\n" + if beg is not None: + if end is None: + end = tr + newTR = self.makeTimeRange(beg.startTime(), end.startTime()) + return newTR + return None + + def _getStatValue(self, statDict, element,method=None, dataType=None): + stats = statDict.get(element, None) + if stats is None: return None + if type(stats) is list: + stats = stats[0] + stats, tr = stats + if dataType==self.VECTOR(): + stats, dir = stats + return self.getValue(stats, method) + + def _determineDescriptor(self, info, areas): + # Return the descriptor for the type of areas given plus + # the comparison list to determine if the areas cover all or portions + # of the descriptor + # If all marine, return maor_descriptor and marineAreas + # If all land, return cwa_descriptor and landAreas + # else return cwa_maor_descriptor and all land and marine areas + any, all, areas = self._checkAreas(areas, self._marineAreas()) + if all: + return self._maor_descriptor(), self._marineAreas() + landAreas = self._inlandAreas()+self._coastalAreas() + any, all, areas = self._checkAreas(areas, landAreas) + if all: + return self._cwa_descriptor(), landAreas + return self._cwa_maor_descriptor(), info.all_cwa_maor_areas + + def _checkAreas(self, segmentAreas, checkAreas): + # all is True if ALL checkAreas are in the segmentAreas + # e.g. all land areas are in the segment areas + all = True + any = False + areas = [] + for area in segmentAreas: + if area in checkAreas: + any = True + areas.append(area) + else: + all = False + return any, all, areas + + def _areaType(self, areas): + inland = False + coastal = False + marine = False + for area in areas: + if area in self._inlandAreas(): + inland = True + break + for area in areas: + if area in self._coastalAreas(): + coastal=True + break + for area in areas: + if area in self._marineAreas(): + marine=True + break + return inland, coastal, marine + + def _checkHazard(self, hazardHdlns, phenSigList, checkAreaTypes=None, + checkAreas=None, returnList=False, mode="any", includeCAN=False): + # Given a list of hazards in the form + # (key, landList, marineList, coastalList, inlandList) + # where key is (hdln, act, phen, sig) and the lists show which areas + # contain the hazard + # If mode == "any": + # Check to see if any of the given phenSigList = [(phen, sig), (phen, sig)] + # are found + # If mode == "all": + # Check to see if all of the given phenSigList are found + # IF checkAreaTypes is given, then check against that particular area type(s) i.e. + # "land", "marine", etc. + # IF checkAreas is given, only return areas that are in that list + # IF returnList=True, returns a list of (key, areas) that meet the criteria + # IF includeCAN is True then CAN hazards will be included as well. + # Otherwise, they are ignored. + # + # E.g. hdlnList = self._checkHazard(hazardHdlns, [("FA","W")], returnList=True) +# print "phenSigList is ", phenSigList + chosen = [] + for key, landList, marineList, coastalList, inlandList in hazardHdlns: +# print "what is mode?", mode + hazAreas = landList+marineList + hazValue = (key, hazAreas) +# print "hazValue is ", hazValue + hdln, act, phen, sig = key + if not includeCAN and act == "CAN": + continue + for checkPhen, checkSig in phenSigList: +# print "checkPhen is ", checkPhen +# print "checkSig is ", checkSig + if phen == checkPhen and sig == checkSig: + if checkAreaTypes is not None: + # Check for land, marine, etc. + for checkAreaType in checkAreaTypes: + exec("testList = " + checkAreaType + "List") +# print "testList is", testList + if testList != []: + chosen.append(hazValue) +# print "chosen is ", chosen + elif checkAreas is not None: + acceptedAreas=[] + for hazArea in hazAreas: + if hazArea in checkAreas: + acceptedAreas.append(hazArea) + if acceptedAreas!=[]: + chosen.append((key, acceptedAreas)) + else: + chosen.append(hazValue) + if not returnList and chosen!=[]: break + if not returnList: + return chosen!=[] + return chosen + + #### Handling of HU.S headlines per segment + def _setUp_HU_S_Headline(self, extraInfo, prevHLS): + # Stuff the headline for HU.S in extraInfo + usePrev = extraInfo.get("usePrev_HU_S_Headline") + if usePrev: + headline = self._grabHeadline(prevHLS) + else: + headline = extraInfo.get("userHeadline_HU_S") + if headline == "": + headline = self._frame("Enter headline here") + extraInfo["headline_HU_S"] = headline + + # OVERRIDE from DiscretePhrases -- Must use argDict hazards set up + # by the HLS rather than the one generated by the Text Formatter + # infrastructure which uses the combinations file differently than + # we want for the HLS. See "Handling HLS segmentation" note + # above. + + # Returns a formatted string announcing the hazards that are valid with + # timing phrases + def getHazardString(self, tree, node, fcstArea): + if len(fcstArea) <= 0: + return "" + #hazardTable = self._hazards.getHazardList(fcstArea) + argDict = tree.get("argDict") + hazardList = argDict["hazards"].getHazardList(fcstArea) + returnStr = "" + issuanceTime = self._issueTime.unixTime() + returnStr = self.makeHeadlinePhrases(tree, node, hazardList, + issuanceTime) + #Test mode? + returnStr = self.headlinePhraseTESTcheck(tree.get("argDict"), + returnStr) + return returnStr + + # OVERRIDE from DiscretePhrases + # USES the HU_S headline stuffed into argDict["extraInfo"] + # Makes multiple headlines based on the hazards list and returns + # the lot. + def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, + testMode=0): + returnStr = "" + # make a deepcopy since we plan to mess with it. + hList = copy.deepcopy(hazardList) + + # sort headlines in appropriate order + if len(hList): + if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: + hList.sort(self.marineSortHazardAlg) + else: + hList.sort(self.regularSortHazardAlg) + numHdlns = len(hList) + + while len(hList) > 0: + hazard = hList[0] + + # Check for HU.S headline + # Only report cancelled HU.S if it is a singleton hazard + hazStr = None + if hazard['hdln'] == "" and hazard['phen']=="HU" and hazard['sig']=="S": + if hazard['act'] != "CAN" or numHdlns == 1: + argDict= tree.get("argDict") + extraInfo = argDict.get("extraInfo", None) + if extraInfo is not None: + hdln = extraInfo.get("headline_HU_S", None) + if hdln is not None: + hazard['hdln'] = hdln + hazStr = hazard['hdln'] + # Strip ellipses since they will be added later + hazStr = hazStr.rstrip("...").lstrip("...") + + # Can't make phrases with hazards with no 'hdln' entry + if hazard['hdln'] == "": + hList.remove(hazard) + continue + + phenSig = hazard['phen'] + "." + hazard['sig'] + actionCodeList = self.getAllowedActionCodes(phenSig) + + # if the action is not in the actionCodeList, skip it + if hazard['sig'] != "": # it's not locally defined + if not hazard['act'] in actionCodeList: + print("...Ignoring action code:", hazard['act'], \ + hazard['hdln']) + hList.remove(hazard) + continue + + # get the headline phrase + if hazStr is None: + hazStr = self.makeStandardPhrase(hazard, issuanceTime) + if len(hazStr): + # Call user hook + localStr = self.addSpace(self.hazard_hook( + tree, node, hazard['phen'], hazard['sig'], hazard['act'], + hazard['startTime'], hazard['endTime']), "leading") + returnStr = returnStr + "..." + hazStr + localStr + "...\n" + + # always remove the main hazard from the list + hList.remove(hazard) + + return returnStr + #### END Handling of HU.S headlines per segment + + def _frame(self, text): + return "|* " + text + " *| " + + def _orderSections(self, sections): + # We are assuming that IF someone orders one section, then + # ALL must be ordered, order numbers are correct, and proper integers 1-(xxx) + # AND the ordered sections follow the sections that cannot be ordered + # Otherwise, we revert to default ordering. + + # If order is None, it means that the section should be in the order it appears in the list + # If order is a number, then order it + + #print "\nOrdering sections" + # Gather the sections that need to be ordered + ordered = [] + unordered = [] + orderError = False + for section in sections: + sectionName, order, usePrev, useImport = section + #print sectionName + if order is not None: + try: + index = int(order)-1 + except: + orderError=True + # We will punt ordering + break + ordered.append((section, index)) + else: + unordered.append(section) + + #print "ordered", ordered + #print "unordered", unordered + + # Order the sections correctly + #print "orderError", orderError + if not orderError: + orderedSections = [] + for i in range(len(ordered)): orderedSections.append(None) + error = False + for section, index in ordered: + try: + if orderedSections[index] != None: + error = True + except: error = True + if error: break + orderedSections[index] = section + # Add them back to the end of the unordered sections + if error: newSections = sections + else: newSections = unordered + orderedSections + else: + newSections = sections + + #print "\nreordered", newSections + + # Now add in required sections that did not show up in the GUI + # Assume that the required sections will appear before the optional ones. + requiredSections = [] + for sectionEntry in self._segmentSections(): + if sectionEntry["inSegments"] == "always": + sectionName = sectionEntry.get('name') + requiredSections.append((sectionName, None, None, None)) + finalSections = requiredSections + newSections + #print "\nfinalSections", finalSections, "\n" + return finalSections + +# def headlineRegExpr(self): +# # modify this to change how "previous HLS" catches the 2nd headline +# # the first headline will be defined by the required headline +# headlineRegEx = r'^((?:.\.\.[^\n]+?\.\.\.)|(?:\$\$))$' +# return headlineRegEx + + def _findInDictList(self, dictList, identifier, value, field): + for dictionary in dictList: + if dictionary[identifier] == value: + return dictionary.get(field, None) + return None + + def _accessDict(self, dictionary, keys): + value = dictionary + for key in keys: value = value[key] + return value + + def _addEllipses(self, string): + # Add beginning and ending ellipses to non-null string + # (if not there already) + if string != "": + string = string.rstrip("...") + string = string.lstrip("...") + string = "..." + string + "..." + return string + + def _analysisList_HLS(self): + # 120 hours = time period of prob34, 64 grids + # prob34 and prob64 are 120 hour grids, so just sample one value (maximum) for the + # whole time period + # MaxINTProb34 = time period when the 6 hourly pws34int is maximum + # MaxINTProb64 = time period when the 6 hourly pws64int is maximum + # XXX : if <=32: 5, 32-42: 10, 50: 20? (same as Tropical formatters) + # + + # Sample over 120 hours beginning at current time (OR time of prob34/prob64) + return [ + ("Wind", self.vectorModeratedMinMax, [6]), + ("WindGust", self.moderatedMinMax, [6]), + ("prob34", self.minMax), # 120 hour value + ("prob64", self.minMax), # 120 hour value + ("pws34int", self.maximum, [6]), + ("pws64int", self.maximum, [6]), + ("InundationMax", self.moderatedMax), + ] + + ##################################################################################### + ##################################################################################### + ### Previous Product Helper methods + + def _grabStormInfo(self, tcp): + # Get the storm information from the selected TCP + # return a dictionary + # Initialize a dictionary to hold the information we want + dict = {"StormType" : "|* fill in storm type here *|", + "StormName" : "|* fill in storm name here *|", + "StormTime" : "|* Enter storm time *| ", + "StormLat": "", + "StormLon": "", + "StormReference": "", + "StormIntensity": "", + "StormMotion": "", + "StormInfo": "", + "StormCenter": "", + } + #======================================================================= + # If we got the latest public advisory + + if tcp is not None and len(tcp) > 0: + + #=================================================================== + # Try to determine the storm type and name automatically + + # Updated version to handle WFO GUM advisories. This pattern will + # handle multiple word names (including certain special characters) + # This is for the NHC format. + mndSearch = re.search("(?im)^.*?(HURRICANE|(SUB|POST.?)?TROPICAL " + + "(STORM|DEPRESSION)|(SUPER )?TYPHOON|" + + "REMNANTS OF) ([A-Z0-9\-\(\) ]+?)" + + "(SPECIAL |INTERMEDIATE )?ADVISORY", tcp) + + # Display some debug info - if flag is set + self.debug_print("mndSearch = '%s'" % (mndSearch)) + + # If we found the storm type and name in the MND header + if mndSearch is not None: + + # Pick off the storm type and name + dict["StormType"] = mndSearch.group(1).strip() + dict["StormName"] = mndSearch.group(5).strip() + + #################################################################### + #################################################################### + # 12/15/2010 (MHB) - we should not need this anymore, but will + # leave it for the 2011 season as a fail-safe. + + # Look for the HPC format instead + else: + + mndSearch = re.search("(?im)^PUBLIC ADVISORY.+?FOR REMNANTS " + + "OF ([A-Z0-9\-\(\) ]+)", tcp) + + # If we found the storm type and name in the MND header + if mndSearch is not None: + + # Pick off the storm type and name + dict["StormType"] = "Remnants of" + dict["StormName"] = mndSearch.group(1).strip() + + # end possible removal - 12/15/2010 (MHB) + #################################################################### + #################################################################### + + #=================================================================== + # Clean up the product for easier parsing + + tcp = self._cleanText(tcp) + + #=================================================================== + # Now try to grab the latest storm information + + # Look for the new NHC format first + summarySearch = re.search("(?is)SUMMARY OF (.+?)\.{3}.+?" + + "LOCATION\.{3}(.+?[NS]) +(.+?[WE]).+?" + + "(ABOUT .+?)MAXIMUM SUSTAINED WIND.+?" + + "(\d+ MPH).+?PRESENT MOVEMENT\.{3}" + + "(.+?)\.{3}", tcp) + + #-------------------------------------------------------------------- + # If we found the NHC summary section + + if summarySearch is not None: + + # Set aside some information we'll need later on + dict["StormTime"] = summarySearch.group(1).strip() + dict["StormLat"] = summarySearch.group(2).strip() + dict["StormLon"] = summarySearch.group(3).strip() + dict["StormReference"] = summarySearch.group(4).strip() + dict["StormIntensity"] = summarySearch.group(5).strip() + dict["StormMotion"] = summarySearch.group(6).strip() + + #================================================================ + # Use the remaining summary groups to contruct a paragraph + # similar to the "old" TCP format, and save that for later use + + # Start the paragraph with the advisory time + dict["StormCenter"] = "AT %s...THE CENTER OF " % \ + (dict["StormTime"]) + + # Now add some phrasing to maintain proper grammar, if needed + if dict["StormType"] == "Remnants of": + dict["StormCenter"] = "%s THE" % (dict["StormCenter"]) + + # Now add the storm type and storm name + dict["StormCenter"] = "%s %s %s " % (dict["StormCenter"], + dict["StormType"], + dict["StormName"]) + + # Now add the storm position + dict["StormCenter"] = \ + "%s WAS LOCATED AT LATITUDE %s...LONGITUDE %s." % \ + (dict["StormCenter"], dict["StormLat"], dict["StormLon"]) + + #---------------------------------------------------------------- + # Now add the primary NHC geographic reference + + # Get all the NHC references - starting with the word 'about' + # after the first one + referenceIndex = dict["StormReference"][4:].find('about') + + # Assume we only have one NHC reference point by default + nhcReference = dict["StormReference"] + +## print "referenceIndex = ", referenceIndex + + # If we have more than one NHC reference point + if referenceIndex != -1: + + # Adjust this index to account for the first 'about' + referenceIndex += 4 + + # Only keep the first NHC reference location + nhcReference = dict["StormReference"][:referenceIndex] + + # Convert any abbreviated bearings to full words + nhcReference = nhcReference.replace(' N ', ' north ') + nhcReference = nhcReference.replace(' NNE ', ' north-northeast ') + nhcReference = nhcReference.replace(' NE ', ' northeast ') + nhcReference = nhcReference.replace(' ENE ', ' east-northeast ') + nhcReference = nhcReference.replace(' E ', ' east ') + nhcReference = nhcReference.replace(' ESE ', ' east-southeast ') + nhcReference = nhcReference.replace(' SE ', ' southeast ') + nhcReference = nhcReference.replace(' SSE ', ' south-southeast ') + nhcReference = nhcReference.replace(' S ', ' south ') + nhcReference = nhcReference.replace(' SSW ', ' south-southwest ') + nhcReference = nhcReference.replace(' SW ', ' southwest ') + nhcReference = nhcReference.replace(' WSW ', ' west-southwest ') + nhcReference = nhcReference.replace(' W ', ' west ') + nhcReference = nhcReference.replace(' WNW ', ' west-northwest ') + nhcReference = nhcReference.replace(' NW ', ' northwest ') + nhcReference = nhcReference.replace(' NNW ', ' north-northwest ') + + # Add only first one to the summary paragraph for brevity + dict["StormCenter"] = "%s this was %s. " % \ + (dict["StormCenter"], + self._removeKM(nhcReference.strip())) + + #---------------------------------------------------------------- + # Add the maximum sustained wind speed phrase + + dict["StormCenter"] = "%s maximum sustained winds were %s." % \ + (dict["StormCenter"], + self._removeKM(dict["StormIntensity"])) + + #---------------------------------------------------------------- + # Now add the storm motion + + dict["StormCenter"] = "%s the storm motion was %s." % \ + (dict["StormCenter"], + self._removeKM(dict["StormMotion"])) + + #################################################################### + #################################################################### + # 12/15/2010 (MHB) - we should not need this anymore, but will + # leave it for the 2011 season as a fail-safe. + #-------------------------------------------------------------------- + # Search the product for the legacy storm info section - in case + # the new NHC style was not found + + stormInfoSearch = \ + re.search('(?is)(AT +(\d+ +[AP]M [AECMPH][DS]T)' + + '\.{3}\d+ *(Z|UTC)\.{3}THE (CENTER|REMNANTS|EYE) .+)', + tcp) + + # Display some debug info - if flag is set + self.debug_print("storminfoSearch = '%s'" % (stormInfoSearch)) +## print stormInfoSearch.groups() + + # If we found the storm info section of the product + if stormInfoSearch is not None: +# for group in stormInfoSearch.groups(): +# print '\t' + '-'*50 +# print "%s\n" % (group) + + # Clean this section up a bit. Keep each paragraph separate + # by a single , but remove all others as well as extra + # spaces. Then store this text in the TCP dictionary + dict["StormInfo"] = stormInfoSearch.group(1).strip() + + # Set aside the first paragraph of the storm info since it + # contains the TPC-provided reference point - if we haven't + # already found this information + if len(dict["StormCenter"].strip()) == 0: + dict["StormCenter"] = dict["StormInfo"].split('\n')[0] + + # If we have not already found the advisory time - get it from + # the legacy format + if dict["StormTime"] == "|* Enter storm time *| ": + dict["StormTime"] = stormInfoSearch.group(2).strip() + + # Set aside the first paragraph of the storm info since it + # contains the TPC-provided reference point - if we haven't + # already found this information + if len(dict["StormCenter"].strip()) == 0: + dict["StormCenter"] = dict["StormInfo"].split('\n')[0] + + #=================================================================== + # Now try to grab the repeated storm information summary + + repeatInfo = re.search("(?is)(\.{3}SUMMARY.+?\.)\n *\n", + tcp) + # If we cannot find the summary, try to find a "repeating" section + if repeatInfo is None: + repeatInfo = re.search("(?is)(REPEATING.+?\.)\n *\n", tcp) +## print repeatInfo + + # If we found the repeated storm information summary + if repeatInfo is not None: + + # Clean up this paragraph + summary = repeatInfo.group(1).strip() + + #=============================================================== + # Now try to grab the latest storm location - if we need it + + if dict["StormLat"] == "" or dict["StormLon"] == "": + + # Search the product for the storm location section + locationSearch = \ + re.search('(?is).+LOCATION.*?(\d+\.\d+ *N).+?' + + '(\d+\.\d+ *[EW])', summary) + + # Display some debug info - if flag is set + self.debug_print("locationSearch = '%s'" % (locationSearch)) +## print locationSearch.groups() + + # If we found the storm location section of the product + if locationSearch is not None: + + # Pick off the storm latitude and longitude + dict["StormLat"] = locationSearch.group(1).strip() + dict["StormLon"] = locationSearch.group(2).strip() + + #=============================================================== + # Now try to grab the latest storm intensity - if we need it + + if dict["StormIntensity"] == "": + + # Search the product for the storm intensity section + intensitySearch = \ + re.search('(?i).+MAXIMUM SUST.+?(\d+ *MPH)', summary) + + # Display some debug info - if flag is set + self.debug_print("intensitySearch = '%s'" % + (intensitySearch)) + + # If we found the storm intensity section of the product + if intensitySearch is not None: + + # Pick off the storm intensity + dict["StormIntensity"] = intensitySearch.group(1).strip() + + #=============================================================== + # Now try to grab the latest storm motion - if we need it + + if dict["StormMotion"] == "": + + # Search the product for the storm motion section + motionSearch = re.search('(?i).+MOVEMENT\.{3}(.+?\d+ MPH)', + summary) + if motionSearch is None: + motionSearch = re.search('(?i).+MOVEMENT(.+?\d+.+?)\.', + summary) + + # Display some debug info - if flag is set + self.debug_print("motionSearch = '%s'" % (motionSearch)) + + # If we found the storm motion section of the product + if motionSearch is not None: + + # Pick off the storm motion + motion = motionSearch.group(1).strip() + + # Fix the motion (i.e no '...') + dict["StormMotion"] = re.sub('(?i)\.{3}', ' the ', + motion) + + # end possible removal - 12/15/2010 (MHB) + #################################################################### + #################################################################### + + #======================================================================== + # Display final decoded information from TCP + +## print "\n\n" + "*" *80 +## print "Final TCP Info...\n" +## print 'dict["StormType"] = ', dict["StormType"] +## print 'dict["StormName"] = ', dict["StormName"] +## print 'dict["StormTime"] = ', dict["StormTime"] +## print 'dict["StormLat"] = ', dict["StormLat"] +## print 'dict["StormLon"] = ', dict["StormLon"] +## print 'dict["StormReference"] = ', dict["StormReference"] +## print 'dict["StormIntensity"] = ', dict["StormIntensity"] +## print 'dict["StormMotion"] = ', dict["StormMotion"] +## print 'dict["StormInfo"] = ', dict["StormInfo"] +## print 'dict["StormCenter"] = ', dict["StormCenter"] + + # Return the dictionary will all the information we found in the TCP + return dict + + def _cleanText(self, text=''): + # Cleans up text for easier string searches, but retains paragraphs + + # Replace all single characters with a space + text = re.sub("\n(?! *\n)", " ", text) + + # Ensure all text is only single-spaced + text = re.sub(" +", " ", text) + + # Remove all spaces at the start of a new paragraph + text = re.sub("(?m)^ +", "", text) + + # Do not allow any spaces after an ellipsis + text = re.sub("\.{3} +", "...", text) + + # Finally, ensure the paragraphs are put back + text = re.sub("\n", "\n\n", text) + + # Return the cleaned-up text + return text + + def _grabHeadline(self, text=''): + # Get first headline found in text and return it as a string + + # Fixed pattern to grab headline (MHB 04/08/2009) + # See if there is a headline in this text + headlineSearch = re.findall("(?ism)^(\.{3}.+?\.{3}) *\n", text) + + self.debug_print("headlineSearch = %s" % (headlineSearch)) + + # If we found a headline + if len(headlineSearch) > 0: + + # Return the first cleaned-up headline string we found + return self._cleanText(headlineSearch[0]) + + # Otherwise, return an indicator there is no headline in this text + else: + return '' # Changed to an null string instead of None + # (MHB 04/08/2009) + + # Modified 4/22/09 (MHB) - fixed pattern to grab entire synopsis. + # Previous version only seemed to grab the last section of the overview. + def _grabOverview(self, text=''): + # Grab the overview section of a previous HLS from the overall + # overall headline to the start of the first zone segment + + # See if there is an overwiew in this text + overviewSearch = re.search("(?is)(\.+.+)?[A-Z]{2}Z\d{3}", text) + + # If we found a headline + if overviewSearch is not None: + + # Remove any zone blocks we may have grabbed by accident + overview = re.sub("(?is)[A-Z]{2}Z\d{3}.+", "", + overviewSearch.group(1).strip()) + + # Return the cleaned-up overview string + return overview + + # Otherwise, return an indicator there is no overview in this text + else: + return '' + + # Modified 12/15/2010 (MHB) - added a new flag which will cause the + # grabbed section text to be wrapped in framing codes if set to True. + def _grabSection(self, text='', section='', useFrameCodes=False): + # Grab the specified subsection of text from the overall text + + #print "\n\nGRABBING SECTION", section + #print "'%s'" % (text) + + # If a subsection header was defined + if section != '': + + # Add ending text so that last section can be found. + text = text + "\n..." + + # See if we can find it + sectionSearch = re.search("(?ism).*^%s(.+?)^\." % (section), text) + + # If we found the specified subsection + if sectionSearch is not None: + +# print sectionSearch.groups() +# print "'%s'" % (self._cleanText(sectionSearch.group(1).strip())) + # Clean it up + sectionText = self._cleanText(sectionSearch.group(1).strip()) + + # If we should wrap framing codes around this text + if useFrameCodes: + sectionText = self._frame(sectionText) # do it + + # Return the cleaned-up subsection + return section + sectionText + + # If we made it this far, return a null string + return '' + + def _getProductInfo(self, argDict): + # The current argDict['hazards'] is set automatically by TextFormatter.py + # to use the zone combinations. We need to re-do it to have all the + # hazards from the entire area + #print "\n***************getProductInfo calling getHazardsTable" + allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() + argDict["combinations"]= [(allAreas,"Region1")] + argDict['definition'] = self._definition + hazards = self._getHazardsTable(argDict, self.filterMethod) + argDict['hazards'] = hazards + + # Set up the areaDictionary for all to use + accessor = ModuleAccessor.ModuleAccessor() + self._areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + # Get the statistics and general information for the segments + # to be used for the Overview sections + info = self._getSegmentInfo(self._segments) + info.maxWind_CWA_MAOR = self._getWindStats( + argDict, self._sampler, self._analysisList_HLS(), + self._timeRangeList, self._cwaMaorLabel) + return info + + # Modified 12/15/2010 (MHB) - fixed a potential problem with the + # _stormTypeName variable. If used as a failsafe it would have come out + # as "Cyclone Tropical" instead of "Tropical Cyclone". Also disabled + # the "Unnamed" option. + def _getStormInfo(self, argDict, info): + # Get the Storm information + st = self._StormInfo + self._stormType = "Tropical" + self._stormName = "Cyclone" + self._stormTypeName = self._stormType + " " +self._stormName + + # Get the name +# if st.find("N/A") >=0: +# self._stormTypeName = self._StormInfo_entry +# if len(self._stormTypeName.strip()) == 0: +# self._stormTypeName = self._frame("Enter Storm Name") +# return + + # Get the product + if st == "Enter PIL below (e.g. TCPEP1):": + productID = self._StormInfo_entry + else: productID = self._StormInfo + if self._useTestTCP(): + self._TCP = self._TCP_Product() + else: + self._TCP = self.getPreviousProduct(productID) + stormDict = self._grabStormInfo(self._TCP) + self._stormName = stormDict.get("StormName", "") + self._stormType = stormDict.get("StormType", "") + self._stormTypeName = self._stormType + " " + self._stormName + self._decodeStormInfo(stormDict, info) + # Storm movement in mph and the stated movement trend + self._stormMovementTrend = "Storm Motion was " + stormDict.get("StormMotion","") + # Storm intensity in mph and the stated intensity trend. + self._stormIntensityTrend = "Storm Intensity was " + stormDict.get("StormIntensity","") + + ## New version from MHB 1/13/10 + def _decodeStormInfo(self, stormDict, info): + self._stormTime = "|* Enter Storm Time *| " + self._stormLat = "|* Enter Storm Lat *| " + self._stormLon = "|* Enter Storm Lon *| " + self._stormLocation = "|* Enter Storm Location *| " + self._stormReference = "" + self._stormLocalReferences = "" + para = stormDict.get("StormCenter", "") + # print "\npara", len(para), para + if len(para)<= 0: + return + + # Create the time string + self._stormTime = self._formatLocalTime(para, info.allAreas) + + # Find stormLat, stormLon and stormLocation + # e.g. LATITUDE 15.7 NORTH...LONGITUDE 80.0 WEST + stormLocation ="" + stormLat = None + stormLon = None + + # Make a pattern to find the latest storm location + coordPtn = re.compile("(?i)(LATITUDE ([\d\.]+) ?((N|S)(O[RU]TH)?))..." + + "(AND )?(LONGITUDE ([\d\.]+) ?((W|E)([AE]ST)?)).+?") +## + "OR ((ABOUT )?.+)") + + # Make a pattern to find the NHC reference location + refPtn = re.compile("(?i)(WAS|OR) ((ABOUT )?\d+ MILES.+?" + + "(NORTH|SOUTH|EAST|WEST).+?)\.") + + # Try to find these patterns in the text + coordPtnMatch = coordPtn.search(para) +## print "+" * 90 +## print "\ncoordinate search..." +## print coordPtnMatch.groups() + + refPtnMatch = refPtn.search(para) +## print "\nreference search..." +## print refPtnMatch.groups() + + # If we found the coordinates we were after + if coordPtnMatch is not None: + + # If we have the correct paragraph, set aside the latitude and + # longitude info as numbers + self._stormLat = float(coordPtnMatch.group(2)) + self._stormLon = float(coordPtnMatch.group(8)) # was 7 + + # Adjust latitude and longitude as need for "other" hemispheres + if coordPtnMatch.group(4) in ["S", "s"]: + self._stormLat *= -1.0 + + if coordPtnMatch.group(10) in ["W", "w"]: + self._stormLon *= -1.0 + + # Construct the storm location subphrase + self._stormLocation = "%s...%s" % (coordPtnMatch.group(1), + coordPtnMatch.group(7)) # was 6 + + # If we found the primary NHC reference we were after + if refPtnMatch is not None: + + # Set aside all the geographic reference text +## stormReference = coordPtnMatch.group(11) + stormReference = refPtnMatch.group(2) + + # Watch out for some grammar gotchas with this reference + stormReference = re.sub("(?i)^(WAS|OR) ", "", stormReference) + + # See if there are multiple geographic references + if re.search('(?i) and ', stormReference) is not None: + + # Yes there are multiple references, so only keep the + # first one + stormReference = re.sub("(?i) AND .+", "", stormReference) + + # Also remove any metric distances + self._stormReference = self._removeKM(stormReference) + + # Miles/km from chosen local reference + self._stormLocalReferences = self._calcLocalReferences( + self._stormLat, self._stormLon) + +## print "stormLocalRefs = ", self._stormLocalReferences + + # Compare the NHC reference to the local references + for localRef in self._stormLocalReferences: + +## print self._stormReference, localRef + + # Get the locations from these statements + nhcRef = re.search('(?i)(north|south|east|west) of (.+)', + self._stormReference) + testRef = re.search('(?i)(north|south|east|west) of (.+)', + localRef) + +## print "nhcRef = '%s'\ttestRef = '%s'" % (nhcRef.group(2), testRef.group(2)) + + # If we have a local reference that matches the national + # center reference + if testRef is not None and nhcRef is not None and \ + re.search("(?i)%s" % (testRef.group(2).strip()), + nhcRef.group(2)) is not None: + + # Do not include the national reference + self._stormReference = "" + + # Modified 12/15/2010 (MHB) - modified to recognize the new way NHC will + # present metric speeds. Will continue to recognize the "old" way for + # testing purposes as well. + def _removeKM(self, words): + # Remove references to KM e.g. + # 420 KM... 100 KM/HR... + +# print "words = '%s'" % (words) + + kmSearch = re.compile("\.\.\. *[0-9]+ +(KM|KM/HR?) *\.?\.?\.?") + + # Replace metric reference with a space to keep words from mashing + # together. + words = kmSearch.sub(" ", words) + + # Make sure we don't have any double space issues with this text + doubleSpaces = re.findall(' +', words) + for doubleSpace in doubleSpaces: + words = re.sub(doubleSpace, ' ', words) + +# print "\tfinal words = '%s'" % (words) + return words + + + def _formatLocalTime(self, para, areas): + # Create a time string in local time + # e.g. 2 AM EDT + # Get the Z time hour + timeSearch = re.compile("...([0-9]+) *(Z|UTC)...") + timeStr = timeSearch.search(para) + +## gmtStr = para[timeStr.start():timeStr.end()] +## gmt = gmtStr.strip("...").replace("Z","") +## gmtHour = int(gmt)/100 + + # This code could bomb in the unlikely event we don't find a UTC + # time. We should probably add some kind of default hour here, + # keyed off the current hour, to prevent this. (MHB) + try: + # Convert the hour portion of the time string to an integer + gmtHour = int(timeStr.group(1)[:2]) + except: + gmtHour = time.gmtime().tm_hour + + gmtTR = self.createTimeRange(gmtHour, gmtHour+1, "Zulu") + gmtTime = gmtTR.startTime().unixTime() + + # Now make a string for each time zone + zoneList = self._getTimeZoneList(areas) + timeStrs = [] + timeDesc = "" + for timeZone in zoneList: + timeStr = self.formatTimeString(gmtTime, "%I %p %Z ", timeZone) + timeStr = string.replace(timeStr, " ", " ") + timeStr = string.strip(timeStr) + timeStr = timeStr.lstrip("0") + if timeStr not in timeStrs: + if len(timeStrs) > 0: + timeDesc += "...OR " + timeStrs.append(timeStr) + timeDesc += timeStr + return timeDesc + + def _getTimeZoneList(self, areaList): + # NOTE -- this code was taken from the middle of getAreaHeader + # in Header.py -- it really should be put back in and used + # in Header.py, but to avoid confusion, I'm repeating it here + # get this time zone + thisTimeZone = os.environ["TZ"] + zoneList = [] + # check to see if we have any areas outside our time zone + for areaName in areaList: + if areaName in list(self._areaDict.keys()): + entry = self._areaDict[areaName] + if "ugcTimeZone" not in entry: #add your site tz + if thisTimeZone not in zoneList: + zoneList.append(thisTimeZone) + continue # skip this entry + timeZoneList = entry["ugcTimeZone"] + if type(timeZoneList) is bytes: # a single value + timeZoneList = [timeZoneList] # make it into a list + for timeZone in timeZoneList: + if timeZone not in zoneList: + zoneList.append(timeZone) + # if the resulting zoneList is empty, put in our time zone + if len(zoneList) == 0: + zoneList.append(thisTimeZone) + # if the resulting zoneList has our time zone in it, be sure it + # is the first one in the list + try: + index = zoneList.index(thisTimeZone) + if index != 0: + del zoneList[index] + zoneList.insert(0, thisTimeZone) + except: + pass + return zoneList + + def _calcLocalReferences(self, lat0, lon0): + localRefs = [] + refList = self._LocalReferencePoints + #refList.append(("Grand Cayman", (19.2, -81.4))) + # Limit reference points + refLimit = self._referencePointLimit() + if len(refList) > refLimit: + refList = refList[0:refLimit] + for label, latLon in refList: + lat, lon = latLon + localRef = self._calcReference(lat0, lon0, lat, lon) + localRef = localRef + " OF " + label + localRef = localRef.replace(",","") + localRefs.append(localRef) + return localRefs + + def _oldCalcReference(self, lat0, lon0, lat1, lon1): + RAD_TO_DEG = 57.296083 + #print "\ncalcReference", lat0, lon0, lat1, lon1 + #lat1 = lat0 + 1.0 + #lon1 = lon0 + 1.0 + latDist = (lat0-lat1) * 111.0 + avgLat = abs(lat0+lat1) / 2.0 + lonDist = (lon0-lon1) * 111.0 * cos(avgLat/RAD_TO_DEG) + #lonDist = 111.0 + #latDist = 111.0 + distKm = sqrt((latDist*latDist)+(lonDist*lonDist)) + distMph = distKm * 0.62 + # Round to nearest 10 + distMph = self.round(distMph, "Nearest", 10) + distMph_str = repr(int((distMph/10)*10)) + distKm_str = repr(int((distKm/10)*10)) + direct = atan2(lon0-lon1, lat0-lat1) * RAD_TO_DEG + direction = self._dirInEnglish(direct) + localRef ="About "+distMph_str+" miles "+direction + print("localRef", localRef) + return localRef + + def _calcReference(self, lat0, lon0, lat1, lon1): + #return self._oldCalcReference(lat0, lon0, lat1, lon1) + distKm = self._distanceFromLatLon(lat0, lon0, lat1, lon1) + distMph = distKm * 0.62 + # Round to nearest 10 + distMph = self.round(distMph, "Nearest", 10) + distMph_str = repr(int((distMph/10)*10)) + #distKm_str = `int((distKm/10)*10)` + direction = self._bearing(lat1, lon1, lat0, lon0) + direction = self._dirInEnglish(direction) + localRef ="About "+distMph_str+" miles "+direction + #print "localRef", localRef + return localRef + + # Returns the distance from lat0, lon0 to lat1, lon1 in kilometers + def _distanceFromLatLon(self, lat0, lon0, lat1, lon1): + R = 6371.0 + lat0 = lat0 * DEG_TO_RAD + lon0 = lon0 * DEG_TO_RAD + lat1 = lat1 * DEG_TO_RAD + lon1 = lon1 * DEG_TO_RAD + dist = acos(sin(lat0) * sin(lat1) + cos(lat0) * cos(lat1) * cos(lon1 - lon0)) * R + return dist + + def _bearing(self, lat0, lon0, lat1, lon1): + + dlat = (lat0 - lat1) * DEG_TO_RAD + dlon = (lon0 - lon1) * DEG_TO_RAD + + y = sin(dlon) * cos(lat1 * DEG_TO_RAD) + x = cos(lat0 * DEG_TO_RAD) * sin(lat1 * DEG_TO_RAD) - \ + (sin(lat0 * DEG_TO_RAD) * cos(lat1 * DEG_TO_RAD) * cos(dlon)) + + direction = (atan2(x, y) / DEG_TO_RAD) - 90.0 + if direction < 0.0: + direction = direction + 360.0 + direction = direction % 360 + + return direction + +## lat0 = 30.0 +## lat1 = 20.0 +## lon0 = -80.0 +## lon1 = -90.0 + +## print "complex dist:", distComplex(lat0, lon0, lat1, lon1) +## print "bearing:", bearing(lat0, lon0, lat1, lon1) + + + def _dirInEnglish(self, direction): + dirList = ["north", "north-northeast", "northeast", "east-northeast", + "east", "east-southeast", "southeast", "south-southeast", + "south", "south-southwest", "southwest", "west-southwest", + "west", "west-northwest", "northwest", "north-northwest"] + dirIndex = int((direction + 11.25) / 22.5) + if dirIndex > 15: + dirIndex = dirIndex - 16 + return dirList[dirIndex] + + ##################################################################################### + ##################################################################################### + ####### OVERVIEW Sections + +## def Overview_NewInformation(self, title, sectionDict, info): +## t="" +## ec = self._EventContext +## if ec =="Abbreviated": +## t+="New watches and or warnings have been issued. \n" +## else: +## t+= self._frame("Please enter new information here. Keep it concise.") + "\n" +## return title + t + + def Overview_NewInformation(self, title, sectionDict, info): + t="" + ec = self._EventContext + print("info.hazardHdlns = ", info.hazardHdlns) + + if ec =="Abbreviated": + hdlns = info.hazardHdlns + #print "\n Headlines" + reported = 0 + for hazardHdln in hdlns: + key, landList, marineList, coastalList, inlandList = hazardHdln + #print "hazard", hazardHdln + hdln, act, phen, sig = key + if phen == "HU" and sig == "S": + continue + if act in self._ignoreActions(): + continue + if hdlns.index(hazardHdln) > 0: + t+= " and " + t+= "A " + hdln + reported += 1 + if reported > 0: + if reported > 1: t+= " have " + else: t+= " has " + t+="now been issued. " + elif ec == "PostEvent": + t+="Warnings have been discontinued.\n" + + else: + t+= self._frame("Please enter new information here. Keep it concise.") + "\n" + return title + t + +############################################################################################ + + def AreasAffected(self, title, sectionDict, info): + t = title + + if info.anyLand and info.anyMarine: + t+= "This local statement provides important information and recommended actions for people and marine interests in " + t+=self._all_select(info.allLand and info.allMarine) + t+= " locations and coastal water of "+self._cwa_maor_descriptor()+ ". " + + else: + if info.anyLand: + t+= "This local statement provides important information and recommended actions for people in " + t+=self._all_select(info.allLand) + t+= " locations within " + self._cwa_descriptor() + ". " + + elif info.anyMarine: + t+= "This local statement offers guidance and recommendations for mariners...as well as other marine interests...along " + t+= self._all_select(info.allMarine) + t+= " coastal water of " + self._maor_descriptor() + ". " + return t + "\n" + + def _all_select(self, value): + if value: return "All" + else: return "Select" + + def _generalAreas(self, segmentAreas): + """This method formats the general area description given the list of segmentAreas. + """ + # This method could grab information from a file formatted elsewhere. + # To use this capability, call this method with the appropriate + # argument: + # + # text = self._ingestExternalFile("") + + text = '' + + # Make the general area Phrase - similar to HWO + generalAreas = self.getGeneralAreaList(segmentAreas, areaDictName=self._areaDictionary) + + # Make a list of all general areas we found + #parts of the states + areaList = [] + for state, partOfState, names in generalAreas: + if partOfState == '' or partOfState == ' ': + areaList.append(state) + else: + areaList.append(partOfState + " " + state) + + # Add this general area to the text + areaPhrase = self.punctuateList(areaList) + + # If we found any text - finish it up + if len(areaPhrase.strip()) > 0: + text = "%s.\n\n" % (areaPhrase) + + # Return the completed text + return text + + ##################################################################################### + def WatchesWarnings(self, title, sectionDict, info): + t= title + ec = self._EventContext + fmtDict = self._overviewFormat() + + # Any WW will be False if there are no Watches or Warnings in the CWA or MAOR + anyWW = self._checkHazard( + info.hazardHdlns, [("HU","W"),("TY", "W"),("TR","W"), ("HU","A"),("TY", "A"),("TR","A")]) + + # Find HU_S headlines and separate into "land" and "marine" + # There will only be ONE HU S entry in hazardHdlns since they are + # consolidated across segments + HU_S_Hdlns = [] + HU_S_landList = [] + HU_S_marineList = [] + for key, landList, marineList, coastalList, inlandList in info.hazardHdlns: + hdln, act, phen, sig = key + if act in self._ignoreActions(): + continue + if phen == "HU" and sig == "S": + HU_S_Hdlns.append((key, coastalList + inlandList, "land")) + HU_S_Hdlns.append((key, marineList, "marine")) + if len(coastalList + inlandList) > 0: + HU_S_landList = HU_S_landList + coastalList + inlandList + if len(marineList) > 0: + HU_S_marineList = HU_S_marineList + marineList + + if ec == "NonEvent" and not anyWW and len(HU_S_Hdlns)>0: + t+="Tropical cyclone watches and warnings are not in effect anywhere across " + t+=self._cwa_maor_descriptor() + ".\n" + + elif ec == "PreEvent" and not anyWW: + if len(HU_S_landList) > 0: + t+="Although tropical cyclone watches or warnings are not in effect anywhere across " + t+=self._cwa_descriptor() + t+="...possible impacts from related hazards are becoming a concern for " + if fmtDict["land"] == "listAreas": + t+= self._describeLocations(info, HU_S_landList, end="...")+ ".\n" + else: + t+="portions of the area.\n" + + if len(HU_S_landList) > 0 and len(HU_S_marineList)>0: t+="\n" + + if len(HU_S_marineList)>0: + t+="For marine interests...although tropical cyclone watches or warnings are not in effect anywhere across " + t+=self._maor_descriptor() + t+="...possible impacts from related hazards are becoming a concern for " + if fmtDict["marine"] == "listAreas": + t+= self._describeLocations(info, HU_S_marineList, end="...")+ ".\n" + else: + t+="portions of the "+ self._maor_descriptor() + ".\n" + + elif ec == "PostEvent" and not anyWW: # and (len(HU_S_landList)>0 or len(HU_S_marineList)>0): + t+="Tropical cyclone watches and warnings are no longer in effect anywhere across " + t+=self._cwa_maor_descriptor() + ".\n" + + elif ec == "PostTropical" and not anyWW: # and (len(HU_S_landList)>0 or len(HU_S_marineList)>0): + t+="Tropical cyclone watches and warnings are no longer in effect anywhere across " + t+=self._cwa_maor_descriptor() + t+=". The issuance of tropical cyclone watches and warnings is being transitioned over to watches and warnings traditionally issued for non-tropical cyclone events.\n" + else: + t+=self._overview_headlines(info) + if ec == "Abbreviated": + t+=self._definition_stmt(info) + if anyWW: t+=self._overview_HU_S_headlines(info, HU_S_Hdlns) + t+=self._additional_headlines(info) + return t + + def _definition_stmt(self, info): + t = "" + foundwatch = False + foundwarning = False + desc = " means that " + descwatch = " conditions are possible within the next 48 hours somewhere within the specified " + \ + "areas.\n\n" + descwarning = " conditions are expected within the next 36 hours somewhere within the specified " + \ + "areas.\n\n" + ppwatch = " All persons in the watch areas should review their preparedness plan and be ready to " + \ + "implement it should a warning be issued for their area.\n\n" + ppwarning = " All persons in the warning areas should already have preparations underway to protect " + \ + "life and property.\n\n" + + # Initialize a new dictionary to pair phenSig codes with their action + hazardDict = {} + + # Iterate over all of the hazards + for hazardTuple in info.hazardHdlns: + print("\n\n" + "*"*80) + print("hazardTuple is:", hazardTuple) + + # Grab the phenomena code + hazard = hazardTuple[0] + print("hazard is:", hazard) + + # Split up the phenomena code + (title, action, phen, sig) = hazard + + # Store the action for this phenomena + hazardDict["%s.%s" % (phen, sig)] = action + + #----------------------------------------------------------------------- + # Look at each of the hazards + if self._checkHazard(info.hazardHdlns, [("HU","W")]) and \ + hazardDict["HU.W"] not in ["CAN", "UPG"]: + hazardPhen = "Hurricane" + hazardSig = "Warning" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwarning = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning + + if self._checkHazard(info.hazardHdlns, [("TY", "W")]) and \ + hazardDict["TY.W"] not in ["CAN", "UPG"]: + hazardPhen = "Typhoon" + hazardSig = "Warning" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwarning = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning + + if self._checkHazard(info.hazardHdlns, [("TR","W")]) and \ + hazardDict["TR.W"] not in ["CAN", "UPG"]: + hazardPhen = "Tropical Storm" + hazardSig = "Warning" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwarning = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwarning + + if foundwarning: + t+= ppwarning + + if self._checkHazard(info.hazardHdlns, [("HU","A")]) and \ + hazardDict["HU.A"] not in ["CAN", "UPG"]: + hazardPhen = "Hurricane" + hazardSig = "Watch" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwatch = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch + + if self._checkHazard(info.hazardHdlns, [("TY", "A")]) and \ + hazardDict["TY.A"] not in ["CAN", "UPG"]: + hazardPhen = "Typhoon" + hazardSig = "Watch" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwatch = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch + + if self._checkHazard(info.hazardHdlns, [("TR","A")]) and \ + hazardDict["TR.A"] not in ["CAN", "UPG"]: + hazardPhen = "Tropical Storm" + hazardSig = "Watch" + hazardPhenSig = hazardPhen+" "+hazardSig + foundwatch = True + t+= "A "+hazardPhenSig + desc + hazardPhen + descwatch + + if foundwatch: + t+= ppwatch + + t+= "In order to make the best decisions...be sure that you understand the terminology and " + \ + "definitions associated with tropical cyclone events.\n\n" + + + return t + + # In order to have the HazardsTable use the allowedHeadlines list, + # we need to supply a filterMethod that uses allowedHeadlines instead of allowedHazards + def _getAllowedHazardList(self, allowedHazardList=None): + if allowedHazardList is None: + allowedHazardList = self.allowedHazards() + hazardList = [] + for h in allowedHazardList: + if type(h) is tuple: + hazardList.append(h[0]) + else: + hazardList.append(h) + return hazardList + + def _altFilterMethod(self, hazardTable, allowedHazardsOnly=False): + # Remove hazards not in allowedHeadlines list + allowedHazardList = self._getAllowedHazardList(self.allowedHeadlines()) + return self._filterHazards(hazardTable, allowedHazardList, + allowedHazardsOnly) + + def _filterHazards(self, hazardTable, allowedHazardList, + allowedHazardsOnly=False): + newTable = [] + hazStr = "" + for i in range(len(hazardTable)): + if hazardTable[i]['sig'] != "": # VTEC + hazStr = hazardTable[i]['phen'] + "." + hazardTable[i]['sig'] + else: #non-VTEC + hazStr = hazardTable[i]['phen'] + + if hazStr in allowedHazardList: + newTable.append(hazardTable[i]) + if allowedHazardsOnly: + return newTable + # get a raw list of unique edit areas + zoneList = [] + for t in newTable: + if t['id'] not in zoneList: + zoneList.append(t['id']) + for zone in zoneList: + # Remove lower priority hazards of the same type + self.filterZoneHazards(zone, newTable) + return newTable + + def _overview_headline_groups(self): + landAreas = self._inlandAreas()+ self._coastalAreas() + return [ + (["HU.W"], landAreas, "Hurricane Warning"), + (["TY.W"], landAreas, "Typhoon Warning"), + (["HU.W"], self._marineAreas(), "For marine interests...a Hurricane Warning"), + (["TY.W"], self._marineAreas(), "For marine interests...a Typhoon Warning"), + + (["TR.W", "HU.A"], landAreas, "Tropical storm warning and a Hurricane Watch"), + (["TR.W", "TY.A"], landAreas, "Tropical storm warning and a Typhoon Watch"), + (["TR.W", "HU.A"], self._marineAreas(), + "For marine interests...a tropical storm warning and a Hurricane Watch"), + (["TR.W", "TY.A"], self._marineAreas(), + "For marine interests...a tropical storm warning and a Typhoon Watch"), + + (["TR.W"], landAreas, "Tropical storm warning"), + (["TR.W"], self._marineAreas(), "For marine interests...a Tropical Storm Warning"), + + (["HU.A"], landAreas, "Hurricane watch"), + (["TY.A"], landAreas, "Typhoon watch"), + (["HU.A"], self._marineAreas(), "For marine interests...a Hurricane Watch"), + (["TY.A"], self._marineAreas(), "For marine interests...a Typhoon Watch"), + + (["TR.A"], landAreas, "Tropical storm watch"), + (["TR.A"], self._marineAreas(), "For marine interests...a Tropical Storm Watch"), + + ] + + def _overview_headlines(self, info): + # Put together Watches and Warnings + # Need to group hazards + hazardGroups = self._overview_headline_groups() + hdlns = [] + for hazards, areas, hdln in hazardGroups: + hdlns = hdlns + self._getHazardHdlns(info, hazards, hdln, areas) + t="" + t+=self._headlines(info, hdlns, qualifier=True) + return t + + def _getHazardHdlns(self, info, hazards, hdln, areas): + # For the overview -- + # Return a list of (key, areaList) tuples for the given hazards + # where key is (hdln, act, phen, sig) + # Use ignoreActions and then separate w.r.t. NEW, etc versus CON + hazardTable = self._argDict["hazards"] + newAreas = [] + conAreas = [] + sortedHazards = copy.deepcopy(hazards) + sortedHazards.sort() + if len(hazards) > 1: + # If we are testing for more than one hazard (e.g. TR.W and HU.A) + # If an area has both hazards with the same action, then the normal + # algorithm will catch it. + # However, if one is CON and the other NEW, then we will allow + # mixing of CON and NEW + tryMixingConNew_flag = True + else: + tryMixingConNew_flag = False + #print "\n Checking for ", hazards + for area in areas: + #print " Area ", area + # For each area determine the set of newHazards and conHazards + areaHazards = hazardTable.getHazardList([area]) + #print " hazards", areaHazards + newHazards = [] + conHazards = [] + for areaHaz in areaHazards: + act = areaHaz['act'] + if act in self._ignoreActions(): + continue + phenSig = areaHaz['phen'] + "." + areaHaz['sig'] + if phenSig == "HU.S": + continue + if act == "CON": conHazards.append(phenSig) + else: newHazards.append(phenSig) + newHazards.sort() + conHazards.sort() + if newHazards == sortedHazards: + newAreas.append(area) + elif conHazards == sortedHazards: + conAreas.append(area) + elif tryMixingConNew_flag: + newHazards = newHazards + conHazards + newHazards.sort() + if newHazards == sortedHazards: + newAreas.append(area) + #print "new con Areas", newAreas, conAreas + # Compose hdln lists + new = [] + con = [] + phen, sig = hazards[0].split('.') + if len(newAreas) > 0: + key = (hdln, "NEW", phen, sig) + new = [(key, newAreas)] + if len(conAreas) > 0: + key = (hdln, "CON", phen, sig) + con = [(key, conAreas)] + #print "new, con", new, con + return new + con + + def _overview_HU_S_headlines(self, info, HU_S_Hdlns): + # Gather and report the HU_S headlines + t = "" + fmtDict = self._overviewFormat() + if fmtDict["land"]=="generic" and fmtDict["marine"]=="generic": + return t + + if len(HU_S_Hdlns) == 0: + return t + + for key, areaList, areaType in HU_S_Hdlns: + # Report only if there is a non-empty areaList and + # overview format is "listAreas" i.e. specific + if len(areaList) == 0 or fmtDict[areaType] == "generic": + continue + if areaType == "marine": t+="\nFor marine interests..." + else: t+="\n" + t+="Although tropical cyclone watches or warnings are not in effect for " + t+= self._describeLocations(info, areaList, end="...") + t+= "possible impacts from related hazards are still a concern.\n" + return t + + def _additional_headlines(self, info): + # Report additional headlines + t="" + self._hazardHdlns, self._huAreas = self._getAdditionalHazards(info) + t+=self._getAdditionalHeadlines(self._hazardHdlns, self._huAreas, info) + return t + + def _getAdditionalHazards(self, info): + argDict = self._argDict + argDict['definition'] = self._definition + altHazards = self._getHazardsTable(argDict, self._altFilterMethod) + conTable = altHazards.consolidatedTableByID() + + # Consolidate across action codes + hazDict = {} + for hazard in conTable: + hdln=hazard['hdln'] + phen=hazard['phen'] + sig=hazard['sig'] + act=hazard['act'] + if act in self._ignoreActions(): + continue + for area in hazard['id']: + hazDict.setdefault((hdln, phen, sig), []).append(area) + + #print "hazDict", hazDict + hazardHdlns=[] + huAreas = [] +# print "\nAdditional Hazard Headlines" + for key in list(hazDict.keys()): + hdln, phen, sig = key + huAreas = huAreas + hazDict[key] + hazardHdln = ((hdln, "NEW", phen,sig), hazDict[key], [],[],[]) + #print " ", hazardHdln, hazDict[key] + hazardHdlns.append(hazardHdln) + return hazardHdlns, huAreas + + def _getAdditionalHeadlines(self, hazardHdlns, huAreas, info): + # We have a list of hazardHdlns and can use checkHazards + # Additional Hazards + t="" + hdlnList = self._checkHazard(hazardHdlns, [("FA","A"),("FF","A")], returnList=True) + print("hdlnList", hdlnList) + if len(hdlnList) > 0: + t+="\n" + t+=self._headlines(info, hdlnList, self._allPortions, ending=". ") + t+="Please listen closely for any Flood Warnings that might be in effect for your area.\n" + + hdlnList = self._checkHazard(hazardHdlns, [("TO","A")], returnList=True) + print("hdlnList", hdlnList) + if len(hdlnList) > 0: + t+="\n" + t+=self._headlines(info, hdlnList, self._allPortions, ending=". ") + t+="Please listen closely for any Tornado Warnings that might be in effect for your area.\n" + + # Check additional hazards + checkHazards = [("CF","W"), ("CF","A"),("CF","Y"),("RP","S"),("SU","W"),("SU","A"),("SU","Y"), + ("SR","W"),("SR","A"),("GL","W"),("GL","A"), + ("SC","Y"),("SI","Y"),("SW","Y"), ("RB","Y")] + hazList = self._checkHazard(hazardHdlns, checkHazards, returnList=True) + if len(hazList) > 0: + t+= "\nPlease check the latest public and marine forecasts for detailed information about additional hazards.\n" + return t + + def _allPortions(self, info, hazAreas, prefix="", suffix=""): + # Used for overview headlines + descriptor, checkAreas = self._determineDescriptor(info, hazAreas) + portions = prefix + "portions of " + suffix + allPortions = self._checkAreaInclusion(checkAreas, hazAreas, "all of ", portions) + return allPortions + descriptor + + def _allParts(self, info, hazAreas): + # Used for overview additional headlines + descriptor, checkAreas = self._determineDescriptor(info, hazAreas) + allParts = self._checkAreaInclusion(checkAreas, hazAreas, "all ", "part ") + return allParts + "OF " + descriptor + ". " + + def _entirePortions(self, info, hazAreas): + # Used by the optional template for optional sections + return self._checkAreaInclusion( + info.allAreas, hazAreas, "the entire area. ", "portions of the area. ") + + def _checkAreaInclusion(self, compareAreas, hazAreas, allWords, partWords): + words = allWords + for area in compareAreas: + if area not in hazAreas: + words = partWords + break + return words + + def _headlines(self, info, headlineList, areaWordMethod=None, + ending="\n\n", qualifier=False): + # Create the headlines from list of (key, hazAreas) + # where key is (hdln, act, phen, sig) + t = "" + for key, hazAreas in headlineList: + hdln, act, phen, sig = key + if act == "CON": actWords = " continues for " + else: actWords = " is in effect for " + # Skip HU.S headines + if (phen =='HU' and sig =='S'): + continue + if hdln[0] in ["A","I"]:a='an ' + elif hdln.find("for") == 0: a = ' ' + else: a ='a ' + + #print "\n Headline", hdln, phen + if areaWordMethod is not None: + areaWords=areaWordMethod(info, hazAreas) + else: + areaWords=self._describeLocations(info, hazAreas, qualifier=qualifier) + t+= a+hdln + actWords + areaWords + ending + return t + + def _areaWords(self, areas): + if areas == []: + return "" + names = [] + areaDict = self._areaDict + areas.sort() + for area in areas: + name = areaDict[area].get('altName', areaDict[area].get('ugcName', '')) + names.append(name) + areaTypeWords = "" + areaWords = self.formatCountyString("", names)[1:] + return areaWords + + def _describeLocations(self, info, areaList, end=". ", prefix="", + suffix="", qualifier=False): + t = "" + fmtDict = self._overviewFormat() + inland, coastal, marine = self._areaType(areaList) + #print "inland, coastal, marine", inland, coastal, marine, areaList + if inland or coastal: fmt = fmtDict["land"] + else: fmt = fmtDict["marine"] + + if fmt == "generic": + suffix = "" + if qualifier: + if inland: suffix = "inland " + elif coastal: suffix = "coastal " + t+= self._allPortions(info, areaList, prefix, suffix) + else: + t+= "The following locations, " + self._areaWords(areaList) + t+= end + return t + + ##################################################################################### + def StormInformation(self, title, sectionDict, info): + t = title + + st = self._StormInfo +# if st.find("N/A (unnamed)") >= 0: +# t+="Although the system of concern has not been named..." +# t+="it is being actively monitored for signs of tropical cyclone development. " +# +# elif st.find("N/A (downgraded)")>= 0: +# t+=self._stormTypeName+" has been downgraded to below tropical storm strength..." +# t+="but will continue to be monitored until it no longer threatens the area. " +# +# else: + t+="At "+ self._stormTime + "...the center of " + + # Fix the grammar if dealing with "remnants" + if re.search("(?i)remnants", self._stormTypeName) is not None: + t+="The " + + t+=self._stormTypeName + " was located near " + t+=self._stormLocation + + # if we kept the national reference + if self._stormReference.strip() != "": + t+= "...OR " + self._stormReference + + # Finish off the storm location sentence + t+= ". " + + # Now add the local references + localRefs = self._stormLocalReferences + if len(localRefs) > 0: + t+= "This was " + for localRef in self._stormLocalReferences: + if localRefs.index(localRef) > 0: + orStr = "...or " + else: + orStr = "" + t+= orStr + localRef + t+= ". " + + # Do not place storm motion and intensity on separate lines of text +# t+="\n" + t = t.replace("miles...", "miles ") + sm = self._stormMovementTrend + si = self._stormIntensityTrend + + # Combine the storm motion and intensity before we frame them + + smi = "" + if sm != "": smi += sm + '.' + if si != "": smi += ' ' + si + '.' +## t+= self._frame(smi) + t += smi + return t + + ##################################################################################### + def SituationOverview(self, title, sectionDict, info): + t = title + un = self._Uncertainty + ec = self._EventContext + if ec == "Abbreviated": + hdlns = info.hazardHdlns + #print "\n Headlines" + reported = 0 + for hazardHdln in hdlns: + key, landList, marineList, coastalList, inlandList = hazardHdln + #print "hazard", hazardHdln + hdln, act, phen, sig = key + if phen == "HU" and sig == "S": + continue + if act in self._ignoreActions(): + continue + if hdlns.index(hazardHdln) > 0: + t+= " and " + t+= "A " + hdln + reported += 1 + if reported > 0: + if reported > 1: t+= " have " + else: t+= " has " + t+="now been issued. " + t+="A more detailed statement will follow shortly.\n" + + if ec in ["PreEvent","Watch","Warning"]: + if un=="High": + t+="It is vital that you do not focus on the exact forecast track. " + t+="To do so could result in bad decisions and place you or those you are " + t+="responsible for at greater risk. " + elif un == "Average": + t+="When making decisions...do not focus on the exact forecast track. " + + if ec != "Abbreviated": t+=self._frame("Succinctly describe the expected evolution of the event for the CWA & MAOR; which hazards are of greater (or lesser) concern, forecast focus, etc.")+ "\n" + + if ec in ["PreEvent", "Watch"]: + if info.anyLand: + t+="It is too early to provide exact wind and surge forecast values for specific locations. " + damage = self._getCategoryDamage(info.maxWind_CWA_MAOR) + if damage.strip() != "": + t+="A general concern should be for the possibility of "+damage+" somewhere within "\ + + self._cwa_descriptor() + ". " + + return t + ##################################################################################### + def Overview_PrecautionaryPreparednessActions(self, title, sectionDict, info): + t = title + ec = self._EventContext + if ec == "NonEvent": t+=self.overview_pp_nonEvent(info) + elif ec == "PreEvent": t+= self.overview_pp_preEvent(info) + elif ec == "Abbreviated": t+=self._overview_pp_abbrev(info) + elif ec == "Watch": t+=self._overview_pp_watch(info) + elif ec == "Warning": t+=self._overview_pp_warning(info) + elif ec == "Conditions": t+=self._overview_pp_conditions(info) + elif ec == "PostEvent": t+=self._overview_pp_postEvent(info) + elif ec == "PostTropical": t+=self._overview_pp_postTropical(info) + endStr = sectionDict.get("endStr", "") + return t + endStr + + def overview_pp_nonEvent(self, info): + t="" + if info.anyInland or info.anyCoastal: + t+= """ +People are urged to remain informed and listen for any +significant changes to the forecast. Do not listen to rumors or +uninformed opinions. Rather...seek authoritative information from +your local National Weather Service office and emergency +management. + +""" + + if info.anyCoastal or info.anyMarine: + t+= """ +Mariners should keep informed of the latest coastal waters +forecast. +""" + return self._frame(t.strip()) + + def overview_pp_preEvent(self, info): + t = "" + if info.anyInland or info.anyCoastal: + t+= """ +Even before the issuance of watches or warnings...it may become +necessary for local authorities to render evacuation orders. If +told to leave...do so as soon as possible. + +This is a good time for residents to go over their hurricane +disaster plan. Visitors are encouraged to check with hotel +management or with local officials regarding any actions they +should take. + +The following are suggested actions that can be taken at this +time... +- check batteries for radios and flashlights. +- stock up on drinking water and canned or dried food. +- ensure you have a manual can opener. +- have enough for at least three to five days per person. +- gather medicines...toiletries...and first aid supplies. +- have a sufficient amount of cash on hand since credit cards and + automated cash machines do not work without power. +- check fuel levels on automobiles...generators...and chain saws. +- if you need to make a trip to the hardware store...the grocery + store...or the gas station...do so as early as possible. +- determine where you should seek shelter if the storm approaches + your area. +- consider whether you live in a potential evacuation zone. If + so...identify prescribed evacuation routes which lead out of the + threatened areas. +- learn the locations of official shelters. + +Please visit www.ready.gov for a more complete list of items to +include in an emergency preparedness kit. + +In all cases...heed the advice of local officials and comply with +any orders that are issued. + +""" + if info.anyCoastal or info.anyMarine: + t+= """ +Mariners should monitor the coastal waters forecast for unsafe +conditions. Consider early steps for securing your craft. If +small craft must go out and current conditions allow...do not +venture far from port and do not stay out very long. Return to +port quickly if a watch or warning is issued. + +""" + return self._frame(t.strip()) + + def _overview_pp_abbrev(self, info): + t="" + +## print "\n\n" + "*"*80 +## +## print info.hazardHdlns + + #----------------------------------------------------------------------- + # Determine if this is a downgrade + + downgradeWarning = 0 # flag to track any downgrades of a warning + upgradeWarning = 0 # flag to track any downgrades of a warning + + # If we have more than one hazard + if len(info.hazardHdlns) > 1: + + # Set aside the hazard info for comparison + (baseTitle, baseAction, basePhen, baseSig) = info.hazardHdlns[0][0] + baseAreas =[] + + # Combine all the areas affected by this hazard into one list + for areaList in info.hazardHdlns[0][1:]: + baseAreas = baseAreas + areaList + + # Look through all the hazards we have - after the first one + for hazard in range(1, len(info.hazardHdlns)): + + print("\nworking on hazard index -> ", hazard) + print(info.hazardHdlns[hazard], "\n") + + # Split up the hazard info for this hazard + (title, action, phen, sig) = info.hazardHdlns[hazard][0] + areas =[] + + # Combine all the areas affected by this hazard into one list + for areaList in info.hazardHdlns[hazard][1:]: + areas = areas + areaList + + print("baseAreas = ", baseAreas) + print("areas = ", areas) + print("basePhen = ", basePhen, " baseSig = ", baseSig, " baseAction = ", baseAction) + print(" phen = ", phen, " sig = ", sig, " action = ", action) + + # Look specifically for the case where we are downgrading from + # a hurricane/typhoon warning to a tropical storm warning + if ((basePhen in ["HU", "TY"] and baseSig == "W" and + baseAction == "CAN" and phen == "TR" and sig == "W" and + action in ["NEW", "EXA"]) or + (basePhen == "TR" and baseSig == "W" and + baseAction in ["NEW", "EXA"] and phen in ["HU", "TY"] and + sig == "W" and action == "CAN")): + + print("\nWorking on an downgrade here.") + + # See if the current zone combination is part of downgrade + for area in areas: + + # If this zone segment is part of the downgrade + if area in baseAreas: + + # Indicate the downgrade and move on + downgradeWarning = 1 + break + + # Look specifically for the case where we are upgrading from + # a tropical storm warning to a hurricane warning + if ((basePhen == "TR" and baseSig == "W" and + baseAction == "UPG" and phen in ["HU", "TY"] and + sig == "W" and action in ["NEW", "EXA"]) or + (basePhen in ["HU", "TY"] and baseSig == "W" and + baseAction in ["NEW", "EXA"] and phen== "TR" and + sig == "W" and action == "UPG")): + + print("\nWorking on an upgrade here.") + + # See if the current zone combination is part of downgrade + for area in areas: + + # If this zone segment is part of the downgrade + if area in baseAreas: + + # Indicate the downgrade and move on + upgradeWarning = 1 + break + + print("upgrade = ", upgradeWarning, "\tdowngrade = ", downgradeWarning) + # If there are and land or coastal sites + if info.anyInland or info.anyCoastal: + + # If there are no upgrades or downgrades + if not upgradeWarning and not downgradeWarning: + + # Completely new watches/warnings + t+=""" +For those under a watch or warning...now is the time to initiate +preparations according to your hurricane disaster plan specific +to your home or business. + +For those nearby...review your hurricane disaster plan and +become ready to act if a watch or a warning is later issued for +your area. + +It is important to actively listen for forthcoming information +from your local National Weather Service office and emergency +management agency. + +""" + + # If this is an upgraded warning + if upgradeWarning and not downgradeWarning: + + # upgraded warning + t+=""" +For those now under the new warning...now is the time to +initiate preparations according to your hurricane disaster plan +specific to your home or business...if you have not already +done so. + +It is important to actively listen for forthcoming information +from your local National Weather Service office and emergency +management agency. + +""" + + # If this is a downgraded warning + if downgradeWarning and not upgradeWarning: + + # Downgraded warning + t+=""" +While the intensity of this storm is no longer expected to be +as strong...there is still a threat to life and property. For +those still under a warning...continue to implement your +hurricane disaster plan specific to your home or business. + +It is important to actively listen for forthcoming information +from your local National Weather Service office and emergency +management agency. + +""" + + # If this is a upgrade and downgraded warning + if downgradeWarning and upgradeWarning: + + # Huh?! warning + t+=""" +There is still a threat to life and property. Continue to +implement your hurricane disaster plan specific to your home or +business. + +It is important to actively listen for forthcoming information +from your local National Weather Service office and emergency +management agency. + +""" + + # Marine zones + if info.anyMarine: + + # If there are no upgrades or downgrades + if not upgradeWarning and not downgradeWarning: + + # Completely new watches/warnings + t+= """ +Mariners are urged to make all necessary preparations to return +to port...seek safe harbor...and secure their craft. Now is the +time to initiate preparations according to your emergency plan +for tropical systems. Monitor weather broadcasts for changes to +the latest forecast and listen for further statements from local +officials. + +""" + + + # If this is an upgraded warning + if upgradeWarning and not downgradeWarning: + + # upgraded warning + t+=""" +Mariners are urged to return to port...seek safe harbor...and +secure their craft. Now is the time to complete preparations +according to your emergency plan for tropical systems. Monitor +weather broadcasts for changes to the latest forecast and +listen for further statements from local officials. + +""" + + # If this is a downgraded warning + if downgradeWarning and not upgradeWarning: + + # Downgraded warning + t+=""" +While the intensity of this storm is no longer expected to be +as strong...there is still a threat to life and property. +Mariners are urged to remain in port and secure their craft. +Continue to implement your emergency plan for tropical systems. +Monitor weather broadcasts for changes to the latest forecast +and listen for further statements from local officials. + +""" + + # If this is a upgrade and downgraded warning + if downgradeWarning and upgradeWarning: + + # Huh?! warning + t+=""" +There is still a threat to life and property. Continue to +implement your emergency plan for tropical systems. Monitor +weather broadcasts for changes to the latest forecast and +listen for further statements from local officials. + +""" + + return self._frame(t.strip()) + + + def _overview_pp_watch(self, info): + t="" + public_A= self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["land"]) + coastal_A=self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["coastal"]) + marine_A=self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["marine"]) + if public_A: + t+= """ +For those under a watch...now is the time to begin preparing your +home or business according to your hurricane disaster plan. +Listen for possible warnings and be ready to evacuate if +necessary. Heed the advice of local officials and comply with any +orders that are issued. + +""" + if coastal_A: + t+= """ +For interests at ports...docks...and marinas...it is recommended +that you perform the prescribed preparations according to your +emergency operations plan for tropical cyclones. If you live on a +boat...begin to safely secure your craft and make plans to leave +it for adequate land based shelter. Listen for possible warnings. + +""" + if coastal_A or marine_A: + t+= """ +Regarding the coastal waters under a watch...small craft should +return to port or seek safe harbor. + +Closely monitor NOAA weather radio or other local news outlets +for official storm information. Listen for possible changes to +the forecast. + +""" + if public_A: + t+= """ +For additional precautionary and preparedness information... +Please refer to the detailed recommendations relative to your +location as further described by your local National Weather +Service office and your local emergency management. + +""" + return self._frame(t.strip()) + + def _overview_pp_warning(self, info): + t="" + public_W= self._checkHazard(info.hazardHdlns, + [("HU","W"),("TR","W"),("TY","W")], ["land"]) + coastal_W=self._checkHazard(info.hazardHdlns, + [("HU","W"),("TR","W"),("TY","W")], ["coastal"]) + marine_W=self._checkHazard(info.hazardHdlns, + [("HU","W"),("TR","W"),("TY","W")], ["marine"]) + public_A= self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["land"]) + coastal_A=self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["coastal"]) + marine_A=self._checkHazard(info.hazardHdlns, + [("HU","A"),("TR","A"),("TY","A")], ["marine"]) + if public_W: + t+= """ +For those under a warning...now is the time to rush to completion +preparations for the protection of life and property. Evacuate if +directed to do so by local officials...or if your home is +vulnerable to high winds or flooding. + +""" + if coastal_W: + t+= """ +For interests at ports...docks...and marinas...urgently complete +prescribed preparations according to your emergency operations +plan for tropical cyclones. If you live on a boat...make final +preparations for securing your craft before leaving it. Be sure +to account for the possible closure of bridges and causeways. + +""" + if coastal_W or marine_W: + t+= """ +Regarding any coastal waters under a warning...small craft should +remain in port and well secured. + +""" + if public_A: + t+= """ +For those under a watch...continue with your preparations and +listen for possible warnings. + +""" + if coastal_A or marine_A: + t+= """ +Regarding any coastal waters under a watch...small craft should +return to port or seek safe harbor. Determine the best strategy +for securing your craft. + +Closely monitor NOAA weather radio or other local news outlets +for official storm information. Listen for possible changes to +the forecast. + +""" + if public_W: + t+= """ +For additional precautionary and preparedness information... +Please refer to the detailed recommendations relative to your +location as further described by your local National Weather +Service office and local emergency management. + +""" + return self._frame(t.strip()) + + def _overview_pp_conditions(self, info): + t="" + if info.anyLand: + t+= """ +During the storm...stay inside and away from windows. Do not +venture outside when high winds are occurring or during temporary +lulls as flying debris can easily...and suddenly...cause serious +injury. + +Have a well-charged cell phone nearby...keeping network +communications as open as possible for emergencies. + +Closely monitor NOAA weather radio or other local news outlets +for official storm information. Listen for possible changes to +the forecast. + +""" + if info.anyMarine: + t+= """ +For small craft who failed to make it to safe harbor or port... +And are now in distress...radio your situation according to +maritime protocol. If appropriate...deploy your emergency +distress beacon. Ensure that everyone is wearing life jackets... +And survival suits if available. + +""" + return self._frame(t.strip()) + + def _overview_pp_postEvent(self, info): + t="" + if info.anyLand: + t+= """ +Many casualties occur after a storm has passed. Be smart and use +caution. Continue to heed the advice of local officials as they +conduct rescue and recovery efforts. Wait for the all-clear +signal before re-entering evacuation zones or any area that +received significant damage or flooding. + +Pay attention for possible road closures and stay away from +downed power lines. Listen for any boil water alerts. + +""" + if info.anyCoastal or info.anyMarine: + t+= """ +Mariners should check the latest coastal waters forecast before +making any definite plans. + +""" + return self._frame(t.strip()) + + def _overview_pp_postTropical(self, info): + t="" + if info.anyLand: + t+= """ +Everyone is urged to stay informed of the situation. Remain +diligent in your efforts to protect life and property. + +""" + if info.anyCoastal or info.anyMarine: + t+= """ +Mariners are advised to keep their guard up while closely +monitoring the latest coastal waters forecast. Small craft should +remain in port until this storm passes. + +""" + return self._frame(t.strip()) + +##################################################################################### + def NextUpdate(self, title, sectionDict, info): + t = title + wfo = self._wfoCity + if self._NextUpdate == "Shortly": + t+= "The next local statement will be issued by the National Weather Service in " + t+= wfo + t+= " shortly. It will provide important details regarding the evolving tropical cyclone threats and their potential impacts upon the area. " + elif self._NextUpdate == "Enter": + t+="The next local statement will be issued by the National Weather Service in " + t+= wfo + t+=" around " + t+= self._NextUpdate_entry + t+="...or sooner if conditions warrant. " + elif self._NextUpdate == "Conditions": + t+="The next local statement will be issued by the National Weather Service in " + t+=wfo + t+=" as conditions warrant. " + elif self._NextUpdate == "LastIssuance": + t+="As it pertains to this event...this will be the last local statement issued by the National Weather Service in " + t+=wfo + t+=" regarding the effects of tropical cyclone hazards upon the area. " + return t + + ##################################################################################### + ##################################################################################### + ####### SEGMENT Sections + + def NewInformation(self, title, argDict, segment, section, info): + t="" + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + if situation=="Abbreviated": + hdlns = info.hazardHdlns + #print "\n Headlines" + reported = 0 + for hazardHdln in hdlns: + key, landList, marineList, coastalList, inlandList = hazardHdln + #print "hazard", hazardHdln + hdln, act, phen, sig = key + if phen == "HU" and sig == "S": + continue + if act in self._ignoreActions(): + continue + if hdlns.index(hazardHdln) > 0: + t+= " and " + t+= "A " + hdln + reported += 1 + if reported > 0: + if reported > 1: t+= " have " + else: t+= " has " + t+="now been issued. " + t+="A more detailed statement will follow shortly.\n" + else: + t+= self._frame("Please enter new information here.") + "\n" + return title + t + + ##################################################################################### + def PrecautionaryPreparednessActions(self, title, argDict, segment, section, info): + t="" + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + # NonEvent + if situation == "NonEvent": + if scenario=="ActiveNonEvent": + if info.anyLand: + t+=self._pp_dict("NonEvent", ["ActiveNonEvent", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("NonEvent", ["ActiveNonEvent", "marine"]) + elif scenario=="EndNonEvent": + if info.anyLand or info.anyMarine: + t+=self._pp_dict("NonEvent", ["EndNonEvent", "everywhere"]) + + # PreEvent + elif situation=="PreEvent": + if scenario=="Advancing": + if info.anyLand: + t+=self._pp_dict("PreEvent", ["Advancing", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("PreEvent", ["Advancing", "marine"]) + elif scenario=="Peripheral": + if info.anyLand: + t+=self._pp_dict("PreEvent", ["Peripheral", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("PreEvent", ["Peripheral", "marine"]) + elif scenario=="InSitu": + if info.anyLand: + t+=self._pp_dict("PreEvent", ["InSitu", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("PreEvent", ["InSitu", "marine"]) + + # Abbreviated + elif situation=="Abbreviated": + HU_A = self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]) + TR_W = self._checkHazard(info.hazardHdlns, [("TR","W")]) + if self._checkHazard(info.hazardHdlns, [("HU","W"),("TY","W")]): + if info.anyLand: + t+= self._pp_dict("Abbreviated", ["HU_W", "land"]) + if info.anyMarine: + t+=self._pp_dict("Abbreviated", ["HU_W", "marine"]) + elif HU_A and TR_W: + if info.anyLand: + t+=self._pp_dict("Abbreviated", ["TR_W_HU_A", "land"]) + if info.anyMarine: + t+=self._pp_dict("Abbreviated", ["TR_W_HU_A", "marine"]) + elif self._checkHazard(info.hazardHdlns, [("HU","A")]): + if info.anyLand: + t+=self._pp_dict("Abbreviated", ["HU_A", "land"]) + if info.anyMarine: + t+=self._pp_dict("Abbreviated", ["HU_A", "marine"]) + elif TR_W: + if info.anyLand: + t+=self._pp_dict("Abbreviated", ["TR_W", "land"]) + if info.anyMarine: + t+=self._pp_dict("Abbreviated", ["TR_W", "marine"]) + elif self._checkHazard(info.hazardHdlns, [("TR","A")]): + if info.anyLand: + t+=self._pp_dict("Abbreviated", ["TR_A", "land"]) + if info.anyMarine: + t+=self._pp_dict("Abbreviated", ["TR_A", "marine"]) + + + elif situation=="Watch": + if self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]): + if scenario == "Advancing": + if info.anyLand: + t+=self._pp_dict("Watch", ["HU_A", "Advancing", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["HU_A", "Advancing", "marine"]) + elif scenario == "Peripheral": + if info.anyLand: + t+=self._pp_dict("Watch", ["HU_A", "Peripheral", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["HU_A", "Peripheral", "land"]) + else: # In Situ + if info.anyLand: + t+=self._pp_dict("Watch", ["HU_A", "InSitu", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["HU_A", "InSitu", "marine"]) + if self._checkHazard(info.hazardHdlns, [("TR","A")]): + if scenario == "Advancing": + if info.anyLand: + t+=self._pp_dict("Watch", ["TR_A", "Advancing", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["TR_A", "Advancing", "marine"]) + elif scenario == "Peripheral": + if info.anyLand: + t+=self._pp_dict("Watch", ["TR_A", "Peripheral", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["TR_A", "Peripheral", "land"]) + else: # In Situ + if info.anyLand: + t+=self._pp_dict("Watch", ["TR_A", "InSitu", "land"]) + if info.anyMarine: + t+=self._pp_dict("Watch", ["TR_A", "InSitu", "marine"]) + # Warning + elif situation=="Warning": + HU_W = self._checkHazard(info.hazardHdlns, [("HU","W"),("TY","W")]) + TR_W = self._checkHazard(info.hazardHdlns, [("TR","W")]) + HU_A = self._checkHazard(info.hazardHdlns, [("HU","A"),("TY","A")]) + if HU_W: + if scenario == "Advancing": + if info.anyLand: + t+=self._pp_dict("Warning", ["HU_W", "Advancing", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["HU_W", "Advancing", "marine"]) + elif scenario == "Peripheral": + if info.anyLand: + t+=self._pp_dict("Warning", ["HU_W", "Peripheral", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["HU_W", "Peripheral", "land"]) + else: # In Situ + if info.anyLand: + t+=self._pp_dict("Warning", ["HU_W", "InSitu", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["HU_W", "InSitu", "marine"]) + elif TR_W and HU_A: + if scenario == "Advancing": + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "Advancing", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "Advancing", "marine"]) + elif scenario == "Peripheral": + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "Peripheral", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "Peripheral", "land"]) + else: # In Situ + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "InSitu", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W_HU_A", "InSitu", "marine"]) + elif TR_W: + if scenario == "Advancing": + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W", "Advancing", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W", "Advancing", "marine"]) + elif scenario == "Peripheral": + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W", "Peripheral", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W", "Peripheral", "land"]) + else: # In Situ + if info.anyLand: + t+=self._pp_dict("Warning", ["TR_W", "InSitu", "land"]) + if info.anyMarine: + t+=self._pp_dict("Warning", ["TR_W", "InSitu", "marine"]) + + # Conditions + elif situation=="Conditions": + if scenario=="Imminent": + if self._checkCategory(info.maxWind, "Cat3"): + if info.anyLand: + t+=self._pp_dict("Conditions", ["Imminent", "Cat3", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Imminent", "Cat3", "marine"]) + elif self._checkCategory(info.maxWind, "Cat1"): + if info.anyLand: + t+=self._pp_dict("Conditions", ["Imminent", "Cat1", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Imminent", "Cat1", "marine"]) + elif info.maxWind >= 34: + if info.anyLand: + t+=self._pp_dict("Conditions", ["Imminent", "34", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Imminent", "34", "marine"]) + + elif scenario == "Ongoing": + if self._checkCategory(info.maxWind, "Cat3"): + if info.anyLand: + t+=self._pp_dict("Conditions", ["Ongoing", "Cat3", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Ongoing", "Cat3", "marine"]) + elif self._checkCategory(info.maxWind, "Cat1"): + if info.anyLand: + t+=self._pp_dict("Conditions", ["Ongoing", "Cat1", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Ongoing", "Cat1", "marine"]) + elif info.maxWind >= 34: + if info.anyLand: + t+=self._pp_dict("Conditions", ["Ongoing", "34", "land"]) + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Ongoing", "34", "marine"]) + + elif scenario == "Diminishing": + if info.anyLand: + if info.maxWind >= 64: + desc = "Hurricane" + somewhat = "" + elif info.maxWind >= 34: + desc = "Tropical storm" + somewhat = "Somewhat " + else: + desc = "Strong wind" + somewhat = "Somewhat " + landStr=self._pp_dict("Conditions", ["Diminishing", "land"]) + landStr = landStr.replace("{desc}", desc) + landStr = landStr.replace("{somewhat} ", somewhat) + t+=landStr + if info.anyMarine: + t+=self._pp_dict("Conditions", ["Diminishing", "marine"]) + + # PostEvent + elif situation=="PostEvent": + if scenario=="Immediate": + if info.anyLand: + t+=self._pp_dict("PostEvent", ["Immediate", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("PostEvent", ["Immediate", "marine"]) + elif scenario== "NoImpact": + if info.anyLand or info.anyMarine: + t+=self._pp_dict("PostEvent", ["NoImpact", "general"]) + elif scenario=="LongTerm": + if info.anyLand: + t+=self._pp_dict("PostEvent", ["LongTerm", "land"]) + if info.anyCoastal or info.anyMarine: + t+=self._pp_dict("PostEvent", ["LongTerm", "marine"]) + + # PostTropical + elif situation=="PostTropical": + if scenario=="InProgress": + t+=self._pp_dict("PostTropical", ["InProgress"]) + else: + t+=self._pp_dict("PostTropical", ["Completed"]) + + return title + t + + ##################################################################################### + def Probability(self, title, argDict, segment, section, info): + t="" + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + if situation=="NonEvent": + t+=self._frame("As currently assessed...the onset of either tropical storm or hurricane conditions is unlikely to occur.")+ "\n" + + elif situation=="PreEvent": + if scenario=="Advancing": t+=self._prob_stmts(info) + "\n" + elif scenario=="Peripheral": t+=self._prob_stmts(info, ifWording=True) + "\n" + else: + t+="At this time...the probability of either tropical storm or hurricane conditions cannot be determined until the system becomes an active tropical cyclone. However...based on the latest outlook...the chance of tropical cyclone formation is " + t+= self._frame("low/medium/high from twoxxx. ") + elif situation=="Abbreviated": + pass + elif situation in ["Watch", "Warning"]: + if scenario=="Advancing": t+=self._prob_stmts(info) + "\n" + elif scenario in ["Peripheral", "InSitu"]: + t+=self._prob_stmts(info, ifWording=True) + "\n" + elif situation in ["Conditions", "PostEvent", "PostTropical"]: + pass + + return title + t + + ##################################################################################### + def Wind(self, title, argDict, segment, section, info): + t="" + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + # NonEvent + if situation=="NonEvent": + t+=self._wind_NonEvent(info)+ "\n" + # PreEvent + elif situation=="PreEvent": + if scenario=="Advancing": + t+=self._wind_PreEvent_Advancing(info) + elif scenario=="Peripheral": + t+=self._wind_PreEvent_Peripheral(info) + else: # In Situ + t+=self._wind_PreEvent_InSitu(info) + t+=self._genericImpact_stmt(info) + "\n" + + # Abbreviated + elif situation=="Abbreviated": + pass + + # Watch + elif situation=="Watch": + if scenario=="Advancing": + t+=self._wind_Watch_Advancing(info) + elif scenario=="Peripheral": + t+=self._wind_Watch_Peripheral(info) + else: # In Situ + t+=self._wind_Watch_InSitu(info) + t+=self._genericImpact_stmt(info) + "\n" + + # Warning + elif situation=="Warning": + if scenario=="Advancing": + t+=self._wind_Warning_Advancing(info) + elif scenario=="Peripheral": + t+=self._wind_Warning_Peripheral(info) + else: # In Situ + t+=self._wind_Warning_InSitu(info) + t+=self._potentialImpact_stmt(info) + "\n" + + # Conditions + elif situation=="Conditions": + if scenario=="Imminent": + t+=self._wind_Conditions_Imminent(info) + elif scenario == "Ongoing": + t+=self._wind_Conditions_Ongoing(info) + elif scenario == "Diminishing": + t+=self._wind_Conditions_Diminishing(info) + t+=self._potentialImpact_stmt(info) + "\n" + + # PostEvent + elif situation=="PostEvent": + t+= self._wind_PostEvent(info, scenario) + "\n" + + # PostTropical + elif situation=="PostTropical": + if scenario=="InProgress": + t+=self._wind_PostTropical_InProgress(info) + elif scenario == "Completed": + t+=self._wind_PostTropical_Completed(info) + t+=self._potentialImpact_stmt(info) + "\n" + + if info.anyMarine: + t+=self._frame("Add Wording for Seas Here") + "\n" + + return title + t + + ##################################################################################### + def _optionalSection_template(self, argDict, segment, info, hazardList, listenList=[], + checkAreaTypes=[]): + t="" + if hazardList != []: + try: + hazardHdlns = self._hazardHdlns + except: + self._hazardHdlns, self._huAreas = self._getAdditionalHazards(info) + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + hdlnList = self._checkHazard( + self._hazardHdlns, hazardList, checkAreas=segmentAreas, returnList=True) + if len(hdlnList) > 0: + t+=self._headlines(info, hdlnList, self._entirePortions, ending="") + t+="See latest forecast for latest information. " + for listen in listenList: + t+=listen + "\n" + t+=self._frame("Additional free edit area with relevant info here.") + "\n" + t+=self._frame("Potential impact statement from impact library for specific hazard.")+ "\n" + t+="\n" + return t + + ##################################################################################### + def StormSurgeTide(self, title, argDict, segment, section, info): +# hazards = [("CF","W"), ("CF","A"), ("CF","Y"), ("SU","W"),("SU","Y")] +# listenList = [] +# t=self._optionalSection_template(argDict, segment, info, hazards, listenList, +# checkAreaTypes=["coastal"]) + + if info.inundationMax is None: + return title + self._frame("Enter surge text here") + + t= "" + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + # NonEvent + if situation=="NonEvent": + pass # t+=self._surge_NonEvent(info)+ "\n" ?? + # PreEvent + elif situation=="PreEvent": + if scenario=="Advancing": + t+=self._surge_PreEvent_Advancing(info) + elif scenario=="Peripheral": + t+=self._surge_PreEvent_Peripheral(info) + else: # In Situ + t+=self._surge_PreEvent_InSitu(info) + t+="\n" + + # Abbreviated + elif situation=="Abbreviated": + pass + + # Watch + elif situation=="Watch": + if scenario=="Advancing": + t+=self._surge_Watch_Advancing(info) + elif scenario=="Peripheral": + t+=self._surge_Watch_Peripheral(info) + else: # In Situ + t+=self._surge_Watch_InSitu(info) + if info.inundationMax > 0 and scenario != "InSitu": + t+= self._surge_Watch_Impact_stmt(info, segment) + t+= "\n" + + # Warning + elif situation=="Warning": + if scenario=="Advancing": + t+=self._surge_Warning_Advancing(info) + elif scenario=="Peripheral": + t+=self._surge_Warning_Peripheral(info) + else: # In Situ + t+=self._surge_Warning_InSitu(info) + if info.inundationMax > 0 and scenario != "InSitu": + t+=self._surge_Impact_stmt(info, segment) + t+= "\n" + + # Conditions + elif situation=="Conditions": + if scenario=="Imminent": + t+=self._surge_Conditions_Imminent(info) + elif scenario == "Ongoing": + t+=self._surge_Conditions_Ongoing(info) + elif scenario == "Diminishing": + t+=self._surge_Conditions_Diminishing(info) + if info.inundationMax > 0 and scenario != "Diminishing": + t+=self._surge_Impact_stmt(info, segment) + t+="\n" + + # PostEvent + elif situation=="PostEvent": + t+= self._surge_PostEvent(info, scenario) + t+= "\n" + + # PostTropical + elif situation=="PostTropical": + if scenario=="InProgress": + t+=self._surge_PostTropical_InProgress(info) + elif scenario == "Completed": + t+=self._surge_PostTropical_Completed(info) + if info.inundationMax > 0: + t+=self._surge_Impact_stmt(info, segment) + t+= "\n" + + return title + t + + ##################################################################################### + def InlandFlooding(self, title, argDict, segment, section, info): + hazards = [("FF", "A"), ("FA","A")] + listenList = [ + "Listen for possible flood warnings for your location...and be ready to act if flooding rains occur. " + ] + t=self._optionalSection_template(argDict, segment, info, hazards, listenList, + checkAreaTypes=["land"]) + return title + t + + ##################################################################################### + def Tornadoes(self, title, argDict, segment, section, info): + hazards = [("TO", "A")] + listenList = [ + "Listen for possible Tornado Warnings for your location...and be ready to act quickly if a tornado approaches. " + ] + t=self._optionalSection_template(argDict, segment, info, hazards, listenList) + return title + t + + ##################################################################################### + def Marine(self, title, argDict, segment, section, info): + hazards = [('SR','W'), ('SR','A'), ('GL','W'), ('GL','A'), ('RB','Y'), + ('SC','Y'), ('SI','Y'), ('SW','Y'), ('HF','W'), ('HF','A')] + listenList = [] + t=self._optionalSection_template(argDict, segment, info, hazards, listenList, + checkAreaTypes=["marine"]) + return title + t + + ##################################################################################### + def _extractTitle(self, info, title): + # Extract correct title for Public vs. Marine segments + if type(title) is tuple: + if info.anyMarine: title = title[1] + else: title = title[0] + return title + + + ##################################################################################### + ## Precautionary Preparedness Statement Dictionaries + ## + ## To keep from cluttering the code, the text is in these dictionaries + ## That way, the code logic can be more easily seen + + def _pp_dict(self, situation, keys): + exec("textDict = self._" + situation + "_textDict()") + return self._accessDict(textDict, keys) + + def _NonEvent_textDict(self): + return { + "ActiveNonEvent":{ + "land":self._frame("Take advantage of this opportunity to review your hurricane disaster plan. If you do not have a plan, make one. If you need assistance with your plan, contact the National Weather Service, local emergency management, or American Red Cross.\n\nStore adequate food and drink supplies for each member of the family for at least three to five days. Replace batteries in flashlights and portable radios. Fix loose and clogged rain gutters and downspouts. Trim overhanging trees and shrubbery. Also, acquire plywood or other materials to protect your home or business. Review your insurance policy, updating it if necessary.")+"\n", + "marine":self._frame("Boat owners and captains of small craft should take this opportunity to review their emergency operations plan for tropical cyclones and evaluate their state of readiness for this season.")+"\n", + }, + "EndNonEvent":{ + "everywhere":self._frame("THIS EVENT IS NO LONGER EXPECTED TO HAVE AN IMPACT ACROSS THE AREA AT THIS TIME.\n\nadd other wording here.")+"\n", + } + } + + def _PreEvent_textDict(self): + return { + "Advancing": { + "land":self._frame("Everyone is strongly urged to stay informed. If early evacuation orders are issued for your area, stay calm and take the necessary steps to leave as soon as possible and in an orderly fashion.\n\nMake plans to evacuate if you live on the immediate coast and barrier islands, or in a high rise building, or in a mobile home, or in a place that floods easily. Be ready to act if a watch or warning is issued for your area.") + "\n", + "marine":self._frame("As soon as possible, small craft are urged to return to port or to seek safe harbor. Take early steps to secure your craft.") + "\n", + }, + "Peripheral": { + "land":self._frame("Stay informed and listen for changes to the forecast. Be ready to act if watches or warnings become necessary for your area.")+"\n", + "marine":self._frame("Small craft should consider returning to port or seeking safe harbor.")+"\n", + }, + "InSitu": { + "land":self._frame("Stay informed of the latest forecast. Do not get caught off guard and be ready to act quickly if watches or warnings become necessary for your area.")+"\n", + "marine":self._frame("As soon as possible, small craft are urged to return to port or to seek safe harbor. Take early steps to secure your craft.")+"\n", + } + } + + def _Abbreviated_textDict(self): + return { + "HU_A": { + "land":self._frame("Now is the time to begin implementing your hurricane disaster plan. Additional recommendations for your area will be offered shortly.") + "\n", + "marine":self._frame("For marine interests, implement actions according to your emergency operations plan for possible hurricane conditions.") + "\n", + }, + "TR_A": { + "land":self._frame("This is a good time to begin implementing your disaster plan for possible tropical storm conditions. Additional recommendations for your area will be offered shortly.") + "\n", + "marine":self._frame("For marine interests, implement actions according to your emergency operations plan for possible tropical storm conditions.")+"\n", + }, + "HU_W": { + "land":self._frame("According to your hurricane disaster plan, preparations to protect life and property should be nearing completion. Additional recommendations for your area will be offered shortly.")+"\n", + "marine":self._frame("For marine interests, urgently complete actions according to your emergency operations plan for hurricane conditions.") + "\n", + }, + "TR_W": { + "land":self._frame("According to your disaster plan for tropical storm conditions, preparations to protect life and property should be nearing completion. Additional recommendations for your area will be offered shortly.")+"\n", + "marine":self._frame("For marine interests, urgently complete actions according to your emergency operations plan for tropical storm conditions.")+"\n", + }, + "TR_W_HU_A": { + "land":self._frame("Diligently complete actions according to your hurricane disaster plan for tropical storm warnings. Be ready to implement your plan for hurricane warnings should this warning be upgraded in the future. Additional recommendations for your area will be offered shortly.")+"\n", + "marine":self._frame("For marine interests, urgently complete actions according to your mariners emergency operations plan for tropical storm warnings, but also be ready to implment your plan for hurricane warnings should this warning be upgraded in the future.")+"\n", + } + } + + def _Watch_textDict(self): + return { + "HU_A": { + "Advancing": { + "land":self._frame("Stay calm and keep informed. Comply with any evacuation orders that are issued for your area. If your home is vulnerable to high winds, or you live in a surge zone or any location prone to flooding, evacuate to a designated shelter or ride out the storm in the sturdy home of family or friends outside of evacuation zones.\n\nItems to bring to a shelter include a first aid kit, medicines and prescriptions, baby food and diapers, games and books, toiletries, a battery powered radio, a cell phone, flashlights with extra batteries, a blanket or sleeping bag for each person, personal identification, copies of key papers such as insurance policies, available cash and credit cards. Remember, pets are not allowed in most public shelters, so check ahead with your intended shelter.\n\nRegarding your home or business, cover all windows and doors with shutters or plywood. Move patio furniture and other loose objects indoors. Brace all exterior doors, including garage doors. Do this as early as possible.\n\nIf you need to make a trip to the hardware store, the grocery store, or the gas station, do so as early as possible.")+"\n", + "marine": self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.")+"\n", + }, + "Peripheral": { + "land":self._frame("Keep informed and listen for possible changes to the forecast. Comply with any evacuation orders issued for your area. If you live in a mobile home, make plans to evacuate.\n\nGather clothes, important papers, medicines, and small valuables and keep them ready to go on short notice. Gas up your vehicles and have extra cash on hand.\n\nRegarding your home or business, cover all windows and doors with shutters or plywood. Move patio furniture and other loose objects indoors. Brace all exterior doors, including garage doors.")+ "\n", + "marine":self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.")+"\n", + }, + "InSitu": { + "land":self._frame("Do not get caught unprepared as conditions are subject to change rapidly. The potential impacts are simply too great to ignore the threat. Err on the side of caution and take appropriate actions for possible hurricane conditions.")+"\n", + "marine":self._frame("Boat owners and captains of small craft should not allow themselves to get caught unprepared. Err on the side of caution and take protective actions. Determine the best strategy for securing their craft.")+"\n", + }, + }, + "TR_A": { + "Advancing": { + "land": self._frame("Preparations should be made as soon as possible, before conditions deteriorate. Keep informed while listening for possible warnings. Secure loose outdoor objects which can be blown around. Strongly consider evacuating if you live in a mobile home, and do so if ordered by local officials.") + "\n", + "marine":self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.") + "\n", + }, + "Peripheral":{ + "land": self._frame("Stay informed and listen for possible changes to the forecast. Preparations for this storm should be made as soon as possible.") + "\n", + "marine": self._frame("Boat owners and captains of small craft need to determine the best strategy for securing their craft.") + "\n", + }, + "InSitu":{ + "land": self._frame("Do not get caught unprepared. Err on the side of caution and take appropriate actions for possible tropical storm conditions.") + "\n", + "marine": self._frame("Boat owners and captains of small craft should not allow themselves to get caught unprepared. Err on the side of caution and take protective actions. Determine the best strategy for securing their craft.") + "\n", + }, + }, + } + + def _Warning_textDict(self): + return { + "HU_W": { + "Advancing": { + "land":self._frame("Make the final preparations to protect life and property. Rush to completion the hardening of your home or business by closing shutters and bracing garage doors.\n\nIf evacuating, leave as soon as possible. Guard against being stuck out on roadways when dangerous winds and heavy rains arrive. Again, do not stay in a mobile or manufactured home. Remember, pets are not allowed in most official shelters, so check ahead with your intended shelter.\n\nIf staying in a home, turn the refrigerator to maximum cold and keep it closed. Turn off propane tanks and unplug small appliances. Fill the bathtub with water in case the tap water becomes unavailable after the storm. This is for cleaning and flushing purposes. Do not drink it.") +"\n", + "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "Peripheral": { + "land":self._frame("Make preparations to protect life and property. Complete the hardening of your home or business by closing shutters and bracing garage doors.\n\nIf evacuating, leave as soon as possible. Guard against being stuck out on roadways when dangerous winds and heavy rains arrive. Again, do not stay in a mobile or manufactured home. Remember, pets are not allowed in most official shelters, so check ahead with your intended shelter.") +"\n", + "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "InSitu": { + "land":self._frame("This is a dangerous and rapidly developing situation. Err on the side of caution and urgently take actions to protect life and property. Comply with any evacuation orders issued by local authorities for your area. If you live in a mobile home, leave it for more substantial shelter.") +"\n", + "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + }, + "TR_W": { + "Advancing": { + "land": self._frame("Final preparations to protect life and property should be completed before conditions deteriorate. The onset of gusty winds and heavy rains can cause outside activities to become dangerous. Secure loose outdoor objects which can be blown around. If you live in a mobile home, leave it for more substantial shelter.") +"\n", + "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "Peripheral":{ + "land": self._frame("Outside preparations should be completed as soon as possible before the onset of gusty winds and heavy rains which can cause outside activities to become dangerous.") +"\n", + "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "InSitu":{ + "land": self._frame("This is a potentially dangerous and rapidly developing situation. Err on the side of caution and complete preparations for tropical storm conditions.") +"\n", + "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + }, + "TR_W_HU_A": { + "Advancing": { + "land":self._frame("Final actions to protect life and property should be completed before conditions deteriorate. Cover windows and doors with shutters or plywood. Move patio furniture and other loose outdoor objects inside. Brace all exterior doors, including garage doors.\n\nComply with any evacuation orders issued for your area. If you live in a mobile home, leave it for more substantial shelter. If your home is vulnerable to high winds, or you live in a surge zone or any location prone to flooding, evacuate to a designated shelter or ride out the storm in the sturdy home of family or friends outside of evacuation zones.") +"\n", + "marine":self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "Peripheral":{ + "land": self._frame("Preparations to protect life and property should be completed as soon as possible since the onset of gusty winds and heavy rains can cause outside activities to become dangerous. Cover windows and doors with shutters or plywood. Move patio furniture and other loose outdoor objects inside. Brace all exterior doors, including garage doors.\n\nComply with any evacuation orders issued for your area. If you live in a mobile home, leave it for more substantial shelter.") +"\n", + "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + "InSitu":{ + "land": self._frame("This is a potentially dangerous and rapidly developing situation. Do not get caught unprepared. Err on the side of caution and complete preparations for tropical storm conditions and possible hurricane conditions.") +"\n", + "marine": self._frame("Boat owners and captains of small craft should rush to completion the securing of their craft.") +"\n", + }, + }, + } + + def _Conditions_textDict(self): + return { + "Imminent": { + "Cat3": { + "land": self._frame("Very dangerous conditions will soon occur. Move to an interior room on the lowest floor of your home or shelter, and stay away from windows and external doors. Listen for extreme wind warnings which are issued for the imminent onset of extreme winds greater than 115 mph. If issued, act quickly to take that final step to protect yourself and others, and possibly save lives.")+"\n", + "marine":self._frame("Small craft should already be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", + }, + "Cat1": { + "land":self._frame("Dangerous hurricane conditions will soon occur. Everyone should be quickly moving to safety within their home or shelter. Once inside, ensure all windows and doors are secured before dangerous winds arrive. Move to an interior room on the lowest floor.\n\nDo not go outside into the eye of hurricanes. Within the eye, conditions can become temporarily calm, which can be misleading. Once the eye passes, the winds will change direction and quickly increase again to dangerous speeds.\n\nBe aware that the loss of commercial power can happen quickly. Keep emergency gear handy.") + "\n", + "marine":self._frame("Small craft should already be in port and well secured. Captains of small craft and their crews should already be safely within land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", + }, + "34": { + "land":self._frame("Tropical storm conditions will soon occur. All evacuees should quickly arrive to their designated shelter. Everyone should remain alert and move inside.\n\nListen for possible flood or tornado warnings.") + "\n", + "marine":self._frame("Small craft should already be in port and well secured. Captains of small craft and their crews should already be safely within land based shelters.")+"\n", + }, + }, + + "Ongoing": { + "Cat3": { + "land":self._frame("Very dangerous conditions are occurring now. Go to the safest place within your home or shelter and stay there. Be ready to protect your head and body in case your shelter fails.") + "\n", + "marine":self._frame("Small craft should be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", + }, + "Cat1": { + "land":self._frame("Dangerous hurricane conditions are occurring now. Remain in an interior room on the lowest floor. Stay away from windows and external doors. Keep emergency gear handy.") + "\n", + "marine":self._frame("Small craft should be in port and well secured. Crews should be inside land based shelters. Do not attempt to ride out this storm on your vessel.")+"\n", + }, + "34": { + "land":self._frame("Tropical storm conditions are occurring. Remain alert and stay inside.\n\nListen for possible flood or tornado warnings.") + "\n", + "marine":self._frame("Small craft should be in port and well secured.")+"\n", + }, + }, + "Diminishing": { + "land":self._frame("As {desc} conditions diminish, do not go outside to check for damage or to implement temporary repairs as the wind situation will remain {somewhat} dangerous until high winds fully subside. Do not open the doors of your home or shelter. Wait for the all-clear signal.\n\nStay inside and listen for possible flood and tornado warnings.")+"\n", + "marine":self._frame("Small craft should stay in port and remain well secured.")+"\n", + }, + } + + def _PostEvent_textDict(self): + return { + "Immediate": { + "land":self._frame("If you or someone else needs emergency help, call 9 1 1.\n\nAs soon as you are able, check in with your points of contact among family and friends. Inform them of your status and condition. Be a good samaritan and check in on your neighbors.\n\nListen to NOAA weather radio and other local news media for the latest information on storm impacts.\n\nIf you are using a portable generator, observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fires. Portable generators should be operated outdoors, in a dry and well ventilated place. Do not store fuel inside your home or garage.\n\nIf you received roof damage, do not go up on the roof until the threat of gusty winds and heavy rain has fully subsided. If operating chain saws and portable generators, review the operators manual and observe all safety precautions.\n\nStay out of flooded areas as the water may be contaminated or the road might have been washed away. Test drinking water before using, particularly from wells. Stay away from downed power lines too.")+"\n", + "marine":self._frame("Small craft should remain in port or safe harbor until winds and seas subside. For any small craft who are in distress, or if you see someone else in distress, radio your situation according to maritime protocol. If appropriate, deploy your emergency distress beacon.")+"\n", + }, + "NoImpact": { + "general": self._frame("This event is no longer expected to have an impact across the area at this time. Use the opportunity to revise preparedness plans and remain prepared for future events.\n\nAdd other wrap-up wording here.")+"\n", + }, + "LongTerm": { + "land": self._frame("Continue to listen to NOAA weather radio and other local news media for the latest information on storm impacts.\n\nIf you are using a portable generator, observe all safety precautions to avoid carbon monoxide poisoning, electrocution, or fires. Portable generators should be operated outdoors, in a dry and well ventilated place. Do not store fuel inside your home or garage.\n\nChain saws can be very helpful when removing fallen trees and large branches. Yet, operating a chain saw is dangerous work. Be sure to review operating procedures for safe cutting. To reduce the chance of mishap or injury, work with another person who has experience.\n\nDo not go sight seeing into areas which have been hardest hit as you may hinder ongoing rescue and recovery operations.\n\nStay out of flooded areas as the water may be contaminated or the road might have been washed away. Test drinking water before using, particularly from wells. Stay away from downed power lines too.")+"\n\n", + "marine": self._frame("Small craft should ensure that winds and seas have fully subsided before venturing out.")+"\n\n", + "general": self._frame("For the latest information regarding the threat of hazardous weather of any type, listen to NOAA weather radio or visit your local National Weather Service web site.")+"\n", + }, + } + + def _PostTropical_textDict(self): + return { + "InProgress": self._frame( +""" +Although the system is losing its tropical characteristics, the +potential impacts are similar to those previously indicated +regardless of its nature. Continue with readiness actions as +recommended."""), + "Completed": self._frame( +""" +Although the system has become non-tropical, the potential +impacts are similar to those previously indicated. Continue with +readiness actions as recommended."""), + } + + ##################################################################################### + ## Wind Situation/Scenario methods + + ############ + + def _wind_NonEvent(self, info): + t="" + t+=self._frame("Tropical cyclone watches or warnings are currently not in effect, nor are they likely under present circumstances.\n\nThe latest forecast is for maximum winds to remain below tropical storm force. At this time, remain calm and stay informed.") + return t + ############## + + def _wind_PreEvent_Advancing(self, info): + t="" + t+="Tropical cyclone watches or warnings are likely to be issued in the near future. As " + self._stormTypeName + " moves closer, the threat for sustained high winds will likely increase. " + t+=self._wind_stmt(info)+ ". " + t+=self._beginWind_stmt(info.maxWind, 50, info.windDur[34], end=". ") + return t + + def _wind_Watch_Advancing(self, info): + t="" + t+="AS "+self._stormTypeName+" moves closer, the threat for sustained high winds is likely to increase. " + t+=self._wind_stmt(info)+". " + t+=self._beginWind_stmt(info.maxWind, 50, info.windDur[34], end=". ") + return t + + def _wind_Warning_Advancing(self, info): + t="" + if self._formatPeriod(info.windDur[34]) == "": + t+="|* these zones are not within the 34kt radii. Rerun and choose peripheral. *|" + else: + t+="AS "+self._stormTypeName+" approaches, sustained tropical storm force winds are expected to begin " + t+=self._formatPeriod(info.windDur[34]) + if info.maxWind >= 64: + t+= " and hurricane force winds " + self._formatPeriod(info.windDur[64]) + ". " + t+=self._specific_wind_stmt(info, intro="Hurricane force winds are forecast to last", + duration=True, reportWindValues=False, + windDur=info.windDur[64], end=". ") + else: + t+= ". " + + t+=self._specific_wind_stmt(info, intro="Maximum winds are forecast to be in the ", + addRange=True) + t+=". " + return t + + ############ + def _wind_PreEvent_Peripheral(self,info): + t= "" + t+="At this time, the issuance of tropical cyclone watches or warnings is uncertain. As " + self._stormTypeName + " passes nearby, the threat for sustained high winds should not increase. However, some tropical storm force gusts may still occur. Since there is still uncertainty, closely monitor the forecast for any significant changes. " + return t + + def _wind_Watch_Peripheral(self, info): + t= "" + t+="AS "+self._stormTypeName+" passes nearby, the threat for sustained high winds should not increase. However, there is still some possibility for tropical storm force winds. Since there is still uncertainty, closely monitor the forecast for any significant changes. " + return t + + def _wind_Warning_Peripheral(self, info): + t="" + t+=self._specific_wind_stmt(info) + ". " + t+="However, as "+self._stormTypeName+" approaches, stronger winds are still possible. Continue to closely monitor the forecast for any significant changes and be ready to act. " + return t + + ############ + def _wind_PreEvent_InSitu(self, info): + t="" + t+="Tropical cyclone watches or warnings are currently not in effect for the area. However, if tropical cyclone development becomes likely then they could be quickly needed.\n\n" + t+=self._wind_stmt(info)+ ". " + t+="Since there is still uncertainty, closely monitor the forecast for any significant changes. " + return t + + def _wind_Watch_InSitu(self, info): + t="" + t+="AS "+self._stormTypeName+" develops, the threat for sustained high winds may increase. Since there is still uncertainty, closely monitor the forecast for any significant changes. " + return t + + def _wind_Warning_InSitu(self, info): + t="" + t+="AS "+self._stormTypeName+" continues to develop, the threat for sustained high winds may increase soon. " + t+=self._specific_wind_stmt(info)+ ". " + t+="Since there is still uncertainty, closely monitor the forecast for any significant changes. " + return t + + ################ + def _wind_Conditions_Imminent(self, info): + t="" + if self._checkCategory(info.maxWind, "Cat3"): + catInfo = self._getCategoryInfo(info.maxWind) + t+="As the center of "+self._stormTypeName+" approaches, "+catInfo + t+=" winds are imminent. " + t+=self._specific_wind_stmt( + info, intro="Maximum winds of ", end=" are expected. ") + t+=self._fallBelow_stmt(info, end=". ") + + elif info.maxWind >= 34: + catInfo = self._getCategoryInfo(info.maxWind) + t+="AS "+self._stormTypeName+" approaches, sustained "+catInfo + t+="Winds are imminent. " + t+=self._specific_wind_stmt( + info, intro="Maximum winds of ",end=" are expected. ") + t+=self._fallBelow_stmt(info, end=". ") + return t + + def _wind_Conditions_Ongoing(self, info): + t="" + period = info.windDur[info.maxWind] + if self._checkCategory(info.maxWind, "Cat3"): + catInfo = self._getCategoryInfo(info.maxWind) + t+=self._windContinue_stmt(info, period, catInfo + "Will continue ", end=". ") + if info.maxWind >= 50: t+=self._fallBelow_stmt(info, end=". ") + + elif info.maxWind >= 34: + t+=self._specific_wind_stmt(info, intro="Sustained winds of ") + t+=self._windContinue_stmt(info, period, intro=" will continue ", end=". ") + if info.maxWind>= 50: t+=self._fallBelow_stmt(info, end=". ") + return t + + def _wind_Conditions_Diminishing(self, info): + t="" + t+="AS "+self._stormTypeName+" exits the area, high winds will continue to diminish. Warnings will be discontinued as soon as the threat completely subsides. " + return t + + ############## + def _wind_PostEvent(self, info, scenario): + t="" + if scenario=="Immediate": + t+="Tropical cyclone warnings have been discontinued. Sustained high winds are no longer expected but strong wind gusts may still occur. " + else: + t+="Sustained high winds or wind gusts are no longer expected. Please refer to the latest National Weather Service forecast for wind information. " + return t + + ############ + def _wind_PostTropical_InProgress(self, info): + t="" + if info.anyLand: + t+="The remnants of "+self._stormTypeName + if info.maxWind >= 34: + t+=" will still impact the region with sustained winds equivalent to " + t+=self._windDesc(info) + "Winds. " + else: + t+=" could still impact the region with tropical storm force winds. " + t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") + t+=self._fallBelow_stmt(info, end=". ") + + if not info.anyLand and info.anyMarine: + t+="The remnants of "+self._stormTypeName + if info.maxWind >= 34: + t+=" will still impact the region with sustained winds equivalent to " + t+=self._marineWindDesc(info) + "Winds. " + else: + t+=" could still impact the region with gale force winds. " + t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") + t+=self._fallBelow_stmt(info, end=". ") + return t + + def _wind_PostTropical_Completed(self, info): + t="" + if info.anyLand: + t+="As the remnants of "+self._stormTypeName+" affect the area, " + if info.maxWind >= 34: + t+=" sustained winds equivalent to " + windDesc = self._windDesc(info) + t+=windDesc + "winds are still possible. " + else: + "Tropical storm force winds could still impact the region. " + t+=self._specific_wind_stmt(info, intro="Maximum winds of ", end=" are expected. ") + t+=self._fallBelow_stmt(info, end=". ") + + if not info.anyLand and info.anyMarine: + t+="As the remnants of "+self._stormTypeName+" affect the area, " + if info.maxWind >= 34: + t+=" sustained winds equivalent to " + windDesc = self._marineWindDesc(info) +" winds are still expected. " + else: + t+=" gale force winds could still impact the region. " + t+=self._specific_wind_stmt(info, intro="Winds of ", end=" are expected")+ ". " + t+=self._fallBelow_stmt(info, marine=True, end=". ") + return t + + + ##################################################################################### + ## Storm Surge and Storm Tide Situation/Scenario methods + + def _surge_PreEvent_Advancing(self, info): + t = "" + t += "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by """ + self._stormTypeName + t += ". Much depends on the precise size, intensity and track of this system as it approaches the coast. At this time, there is a general concern for the chance of " + t += self._frame("( minor | moderate | major -- you should base this on your MEOWS)") + " coastal flooding." + return t + + def _surge_PreEvent_Peripheral(self, info): + t = "" + t+= "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + t+= self._stormTypeName + t+= ". Much depends on the precise size, intensity and track of this system as it passes nearby. At this time, there is a general concern for the chance of " + t+= self._frame("( minor | moderate | major -- you should base this on your MEOWS)") + " coastal flooding." + return t + + def _surge_PreEvent_InSitu(self, info): + t = "" + t+= "It is too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + t+= self._stormTypeName + t+= ". Much depends on the precise size, intensity, and track of the system if it more fully develops. Since there is considerable uncertainty, closely monitor the latest forecast." + + return t + + ############## + + # Changed 6/8/2017 - Modified to use above ground only and remove all MSL + + def _surge_Watch_Advancing(self, info): + t="" + t+="It is still too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + self._stormTypeName + t+=". Much depends on the precise size, intensity and track of the system as it approaches the coast. " + if info.inundationMax > 0: + t+="Given the latest forecast, there is a reasonable worst case potential flood inundation" + if info.inundationMax > 2: + t+=" of " + repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+="up to" + repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here or delete this paragraph or consider deleting the whole storm surge and tide section.") + return t + + def _surge_Watch_Peripheral(self, info): + t="" + t+="It is still too early to determine the exact heights of combined storm surge and tide waters for specific locations within the forecast area to be caused by " + self._stormTypeName + t+=". Much depends on the precise size, intensity and track of the system as it approaches the coast and passes nearby. " + if info.inundationMax > 0: + t+="Given the latest forecast, there is a reasonable worst case potential flood inundation" + if info.inundationMax > 2: + t+=" of " + repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+="up to" + repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here or delete this paragraph or consider deleting the whole storm surge and tide section.") + return t + + + def _surge_Watch_InSitu(self, info): + t="" + t+="It is too early to determine if there will be any appreciable coastal flooding within the forecast area from combined storm surge and tide waters associated with " + t+=self._stormTypeName + ". Much depends on the precise size, intensity, and track of the system if it more fully develops. Since there is considerable uncertainty, closely monitor the latest forecast. " + return t + + + ################ + + def _surge_Warning_Advancing(self, info): + t = "" + if info.inundationMax > 0: + + t+="As "+self._stormTypeName+" approaches the coast, there is an increasing chance for potential flood inundation " + + if info.inundationMax > 2: + t+="of " +repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+="up to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + + t+= "\n\nThe locations most likely to realize the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters. Further describe inundation elsewhere within the surge zone as applicable. Be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. ") + t+="The most likely period of impact will be " + t+=self._frame("Be sure to cite the expected period of onset. Remember surge waters often arrive well before the core winds and can rise very quickly. ") + + else: + t+="The impact from combined storm surge and tide waters is expected to be minimal. " + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together. ") + + return t + + + def _surge_Warning_Peripheral(self, info): + t = "" + if info.inundationMax > 0: + + t+="Although the core of "+self._stormTypeName+" is not currently forecast to move across coastal sections of the forecast area at this time, " + t+="there is still a chance for potential flood inundation " + + if info.inundationMax > 2: + t+="of " +repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+="up to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + + t+= "\n\nThe locations most likely to realize the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters. Further describe inundation elsewhere within the surge zone as applicable. Be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. ") + t+="The most likely period of impact will be " + t+=self._frame("Be sure to cite the expected period of onset. Remember surge waters often arrive well before the core winds and can rise very quickly. ") + + else: + t+="The impact from combined storm surge and tide waters is expected to be minimal. " + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together. ") + + return t + + def _surge_Warning_InSitu(self, info): + t="" + t+="As "+self._stormTypeName+" continues to develop, combined storm surge and tide waters may increase suddenly. Since there is considerable uncertainty, continue to closely monitor the latest forecast. " + t+="At this time, there is a general concern for the chance of " + t+=self._frame("(minor| moderate| major)")+ " coastal flooding. " + return t + + ############### + + def _surge_Conditions_Imminent(self, info): + t = "" + if info.inundationMax > 0: + t+="With the imminent arrival of "+self._stormTypeName+", potential flood inundation " + + if info.inundationMax > 2: + t+="of "+repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground is likely somewhere within the surge zone." + else: + t+="up to "+repr(info.inundationMax)+" feet above ground is likely somewhere within parts of the surge zone." + + t+="\n\nThe locations most likely to realize the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters; further describe inundation elsewhere within the surge zone as applicable; be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation. Also stress the rapid water rises that are likely. ") + else: + t+="The impact from combined storm surge and tide waters is expected to be minimal. " + t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + def _surge_Conditions_Ongoing(self, info): + t = "" + if info.inundationMax > 0: + t+="Expect flood inundation " + + if info.inundationMax > 2: + t+="of "+repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + else: + t+="up to "+repr(info.inundationMax)+" feet above ground somewhere within the surge zone." + + t+="\n\nThe locations most likely realizing the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest concern relative to inundation as that is what the impact statement below is based on for the worst affected area, include inland reach of the inundation waters; further describe inundation elsewhere within the surge zone as applicable; be aware that locations experiencing the highest storm surge and tide may not realize the greatest inundation.") + else: + t+= "Minimal storm tide impacts are being observed. " + t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + + def _surge_Conditions_Diminishing(self, info): + t = "" + if info.inundationMax > 0: + t+="Although coastal flood waters will soon begin to partially recede, " + t+="do not attempt to return to evacuated areas until official confirmation is " + t+="received that it is safe to do so. " + t+="\n\nContinued coastal inundation " + + if info.inundationMax > 2: + t+="of "+repr(info.deltaSurge)+" to "+repr(info.inundationMax)+" feet above ground can be expected somewhere within the surge zone." + else: + t+="up to "+repr(info.inundationMax)+" feet above ground can be expected somewhere within the surge zone." + + else: + t+="Minimal storm tide impacts are being observed. " + t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + + ############# + def _surge_PostEvent(self, info, scenario): + t = "" + if scenario == "Immediate": + if info.inundationMax > 0: + t+="As wind conditions associated with "+self._stormTypeName+" continue to improve, coastal flood waters will be slower to recede. Certain areas may still be inundated. Do not attempt to return to evacuated areas until official confirmation is received that it is safe to do so." + t+="\n\nThe locations which realized the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations that experienced greatest inundation flooding remember that in the absence of tidal or other kind of observations the real time slosh run at the time of landfall is likely your best source of information here, not necessarily the psurge data; further describe inundation elsehwere within the surge zone as applicable; describe any known impacts.") + else: + t+="Minimal storm tide impacts are being observed. " + t+=self._frame("According to the latest surge grids, coastal flooding is negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + ############# + def _surge_PostTropical_InProgress(self, info): + t = "" + if info.inundationMax > 0: + t+="As "+self._stormTypeName+" impacts the forecast area, potential flood inundation " + if info.inundationMax > 2: + t+="of "+repr(info.deltaSurge)+" to "+repr(info.inundationMax) + else: + t+="up to "+repr(info.inundationMax) + t+=" feet above ground is likely somewhere within the surge zone. " + t+="\n\nThe locations which will likely realize the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest inundation concerns, including inland reach; further describe inundation elsewhere within the surge zone as applicable.") + else: + t+="The impact from combined storm surge and tide waters is expected to be minimal. " + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + def _surge_PostTropical_Completed(self, info): + t = "" + if info.inundationMax > 0: + t+="As former "+self._stormTypeName+" impacts the forecast area, potential flood inundation " + if info.inundationMax > 2: + t+="of "+repr(info.deltaSurge)+" to "+repr(info.inundationMax) + else: + t+="up to "+repr(info.inundationMax) + t+=" feet above ground is likely somewhere within the surge zone. " + t+="\n\nThe locations which will likely realize the greatest flooding include " + t+=self._frame("Relative to the segment, explicitly list locations of greatest inundation concerns, including inland reach; further describe inundation elsewhere within the surge zone as applicable.") + else: + t+="The impact from combined storm surge and tide waters is expected to be minimal. " + t+=self._frame("According to the latest surge grids, coastal flooding is likely to be negligible. Please further describe your coastal flooding concerns here, leave this statement as is or delete the storm surge section all together.") + return t + + ############## + ####### Total Water Level thresholds and statements + ####### NOTE: Thresholds are being compared to the InundationMax values + ############## Impact Statements + + def _surge_Watch_Impact_stmt(self, info, segment): + t="" + water_dict = self._totalWaterLevel_dict(info, segment) + if info.inundationMax >= water_dict.get("Extreme", 7): + damage="Widespread major" + + elif info.inundationMax >= water_dict.get("High", 5): + damage="Areas of major" + + elif info.inundationMax >= water_dict.get("Moderate", 3): + damage="Areas of moderate" + + elif info.inundationMax >= water_dict.get("Low", 1): + damage="Areas of minor" + else: + damage = None + if damage is not None: + t+="\n\n"+self._frame("At this time, there is a general concern" + + " for the chance of "+ damage + + " coastal flooding.") + return t + + def _surge_Impact_stmt(self, info, segment): + t="" + water_dict = self._totalWaterLevel_dict(info, segment) + if info.inundationMax >= water_dict.get("Extreme", 7): + damage= self._totalWaterLevel_Extreme_stmt(info, segment) + + elif info.inundationMax >= water_dict.get("High", 5): + damage= self._totalWaterLevel_High_stmt(info, segment) + + elif info.inundationMax >= water_dict.get("Moderate", 3): + damage= self._totalWaterLevel_Moderate_stmt(info, segment) + + elif info.inundationMax >= water_dict.get("Low", 1): + damage= self._totalWaterLevel_Low_stmt(info, segment) + else: + damage ="Minor coastal flood damage" + t+="\n\n"+self._frame(damage) + return t + + def _totalWaterLevel_byZone_dict(self): + # Enter customized values for land and marine zones + return { + "zone1": { + "Extreme": 7, + "High": 5, + "Moderate": 3, + "Low": 1, + }, + "default": { + "Extreme": 7, + "High": 5, + "Moderate": 3, + "Low": 1, + }, + } + + def _totalWaterLevel_dict(self, info, segment): + # SurgeHtPlusTide thresholds for Total Water Level statements. + # The threshold values for the segment will be determined by + # examining the thresholds for each zone in the segment and choosing + # the values for the zone which has the *lowest* Extreme value + segmentNum, segmentAreas, situation, scenario, sections, extraInfo = segment + twl_dict = self._totalWaterLevel_byZone_dict() + return_dict = None + for zoneName in segmentAreas: + zone_dict = twl_dict.get(zoneName, twl_dict["default"]) + if return_dict is None: return_dict = zone_dict + elif zone_dict["Extreme"] < return_dict["Extreme"]: + return_dict = zone_dict + return return_dict + + def _totalWaterLevel_Extreme_stmt(self, info, segment): + t = "" + t+= """ +There is an extreme threat to life and property from coastal +flooding, potentially having a catastrophic impact. The concern +is for the chance of widespread major coastal flooding to occur +within the surge zone, resulting in devastating and life- +threatening inundation. If realized, people within the +threatened areas who failed to heed official evacuation orders +will likely die. + +Coastal communities will likely be devastated, with numerous +homes and businesses near the shore completely destroyed. +Shoreside condominiums and hotels may also be destroyed, +Especially those with inadequate structural support. Flood waters +are likely to extend well inland, further expanding the overall +threat to life and property. Vehicles of any type will likely be +submerged or swept away. + +Roads and bridges will likely be damaged or washed out by the +combined effects of storm surge and tide waters, battering +waves, and floating debris. This could leave entire flood-prone +coastal communities cut off, perhaps for several weeks or +more, and with no power or water. +""" + return t + + def _totalWaterLevel_High_stmt(self, info, segment): + t = "" + t+= """ +There is a critical threat to life and property from coastal +flooding, potentially having a high impact. The concern is for +the chance of major coastal flooding to occur in areas within the +surge zone, resulting in very damaging and life-threatening +inundation. If realized, people within the threatened areas who +failed to heed official evacuation orders will have needlessly +placed their lives in grave danger and may be swept away. + +Most coastal communities will likely be heavily damaged, with +many homes and businesses near the shore destroyed by battering +waves and floating debris. Some shoreside condominiums and hotels +may also be damaged, especially those with inadequate structural +support. Flood waters are likely to extend well inland, further +expanding the overall threat to life and property. Most vehicles +of any type will likely be submerged or swept away. + +Severe beach erosion will occur. Most roads and some bridges will +likely be damaged or washed out, leaving entire flood-prone +coastal communities cut off, perhaps for a week or more, and +with no power or water. +""" + return t + + def _totalWaterLevel_Moderate_stmt(self, info, segment): + t = "" + t+= """ +There is a significant threat to life and property from coastal +flooding, potentially having a moderate impact. The concern is +for the chance of moderate coastal flooding to occur in areas +within the surge zone, resulting in damaging and +life-threatening inundation. If realized, people within the +threatened areas who failed to heed official evacuation orders +will have needlessly placed their lives in danger. This is +especially true for those staying behind in vulnerable locations +such as homes and businesses near the shore, and one story +dwellings in flood-prone areas. + +Several coastal communities will likely be damaged, with those +structures not raised or protected by a seawall being subject to +significant flooding, especially during high tide. Large waves +and pounding surf will accentuate property damage in exposed +locations. Flood waters may extend well inland in spots. Many +cars will likely be submerged or washed away. + +Substantial beach erosion will occur. Many roads will likely be +damaged or washed out by the flood waters, leaving sections of +coastal communities in flood prone areas temporarily cut off. +Roadway travel may be dangerous with several roads closed. +""" + return t + + def _totalWaterLevel_Low_stmt(self, info, segment): + t = "" + t+= """ +There is an elevated threat to life and property from coastal +flooding, potentially having a low but notable impact. The +concern is for the chance of minor coastal flooding to occur in +areas within the surge zone, resulting in shallow inundation. If +realized, people within the threatened areas who failed to act +according to their personal disaster plan will have needlessly +placed themselves at some measure of risk. + +Many homes and businesses along the shoreline, or in flood-prone +areas, will likely experience some water entering inside, +Especially for those structures not raised or protected by a +seawall. Higher waves and pounding surf will increase the +likelihood of property damage near the coast, especially in +exposed locations. Some cars may take on water or even become +displaced. + +Moderate beach erosion will occur, which may become substantial +if conditions extend through multiple high tides. Several roads +in flood-prone areas will likely be closed. +""" + return t + + ##################################################################################### + ### Segment statements and thresholds -- these are the templates for phrasing + ### I am calling them stmts to distinguish from the text product "phrases" that + ### use the "tree, node" infrastructure. + ### These stmts are simpler (at least at this point)... + + ## In general, "stmt" methods do not add periods or carriage returns + ## It is up to the calling method to do so + + def _prob_stmts(self, info, ifWording=False): + t="" + probHurricane = self._probHurricane_stmt(info, end=". ") + probTropStorm = self._probTropStorm_stmt(info, end=". ") + if probHurricane != "" and probTropStorm != "": + t+=probHurricane + "Also, " + probTropStorm + else: + t+= probHurricane + t+= probTropStorm + t+=self._probTrend_stmt(info, end=". ") + if self._checkOnsetTime(info): + t+=self._onsetTropStorm_stmt(info, ifWording, end=". ") + t+=self._onsetHurricane_stmt(info, ifWording, end=". ") + return t + + def _probHurricane_thresholds(self): + return { + "littleChance": 3, + "chance": 6, + "onset": 6, # To trigger onset statement + } + + def _probTropStorm_thresholds(self): + return { + "littleChance":10, + "chance": 20, + "onset": 20, # To trigger onset statement + } + + def _prob_threshold(self): + # If the difference between min and max probabilities are greater + # than this, a range will not be reported. For example, + # Instead of "20 to 50 PERCENT", we would say "up to 50 percent" + return 10 + + def _probStorm_stmt(self, info, thresholds, minProb, maxProb, + conditions="Hurricane", end=""): + t="" + if minProb is None or maxProb is None: + return t + little = thresholds.get('littleChance', 3) + chance = thresholds.get('chance', 6) + minProb = int(minProb) + maxProb = int(maxProb) + if maxProb < little: + t+="There is little chance for "+conditions+" conditions at this time" + else: + t+="The chance for "+conditions+" conditions at this time is " + if maxProb > chance: + if minProb < little: + t+="less than or equal to "+ repr(maxProb) + elif minProb >= maxProb: + t+= repr(maxProb) + elif maxProb-minProb <= self._prob_threshold(): + t+= repr(minProb) + ' TO '+ repr(maxProb) + else: + t+= 'UP TO ' + repr(maxProb) + t+= " percent" + else: t+="very small" + return t + end + + def _probHurricane_stmt(self, info, end=""): + thresholds = self._probHurricane_thresholds() + return self._probStorm_stmt( + info, self._probHurricane_thresholds(), info.minProb64, info.maxProb64, + conditions="Hurricane", end=end) + + def _probTropStorm_stmt(self, info, end=""): + thresholds = self._probTropStorm_thresholds() + return self._probStorm_stmt( + info, self._probTropStorm_thresholds(), info.minProb34, info.maxProb34, + conditions="Tropical Storm", end=end) + + def _probTrend_stmt(self, info, end=""): + t="" + if info.pwstrend is None: + t+=self._frame("This represents a general {upward/downward/steady} trend since the last forecast" + end) + + else: + t+="This represents a general " + if info.pwstrend > 3: t+= " upward" + elif info.pwstrend > -3 and info.pwstrend < 3: t+= " steady" + else: t+= " downward" + t+=" trend since the last forecast" + end + return t + + def _checkOnsetTime(self, info): + # Check onset time. If <= 24 hours from current time, return False + if info.wind34Time is None: + return False + curTime = AbsTime.AbsTime(self._argDict["creationTime"]) + if info.wind34Time.startTime() <= curTime + 24*3600: + return False + return True + + def _onsetHurricane_stmt(self, info, ifWording=False, end=""): + thresholds = self._probHurricane_thresholds() + t="" + if ifWording: + condition = info.maxProb64 > thresholds.get('onset', 6) + else: + condition = info.maxWind >= 64 + if condition: + if ifWording: + t+="If hurricane conditions were to occur, the most likely period of onset is " + else: + #t+="The most likely period of onset of hurricane conditions is " + t+="The onset of hurricane conditions could start as early as " + t+=self._formatPeriod(info.maxINTprob64, resolution=6) + t+=end + return t + + def _onsetTropStorm_stmt(self, info, ifWording=False, end=""): + thresholds = self._probTropStorm_thresholds() + t="" + if ifWording: + condition = info.maxProb34 > thresholds.get('onset', 20) + else: + condition = info.maxWind >= 34 + + if condition: + if ifWording: + t+="If tropical storm conditions were to occur, the most likely period of onset is " + else: + #t+="The most likely period of onset of tropical storm conditions is " + t+="The onset of tropical storm conditions could start as early as " + t+=self._formatPeriod(info.maxINTprob34, resolution=6) + t+=end + return t + + def _beginWind_stmt(self, value, threshold, timeRange, intro=None, end=""): + t="" + if value >= threshold: + if intro is None: + intro="Tropical storm force winds are currently forecast to begin affecting the area " + t+=intro + self._formatPeriod(timeRange) + t+=end + return t + + def _windContinue_stmt(self, info, period, intro=None, end=""): + t="" + if intro is not None: t+=intro + else: t+="Winds will continue " + if period is None: return t + end + t+="through " + self._formatPeriod(period, useEndTime=True) + end + return t + + def _fallBelow_stmt(self, info, intro=None, marine=False, end=""): + t= "" + if info.windDur[64] is None and info.windDur[34] is None: return t + if intro is None: intro = "Winds are not forecast to fall below " + hurricane = False + t+=intro + if info.maxWind >= 64 and info.windDur[64] is not None: + t+="hurricane force until "+ self._formatPeriod(info.windDur[64], useEndTime=True) + hurricane = True + if info.windDur[34] is not None: + if hurricane: t+=", and below " + if marine: t+="gale force until " + else: t+="tropical storm force until " + t+=self._formatPeriod(info.windDur[34], useEndTime=True) + t+=end + return t + + def _formatPeriod(self, period, wholePeriod=False, shiftToLocal=True, useEndTime=False, + resolution=3): + # Format period (a timeRange) resulting in + # DAY + MORNING / AFTERNOON / EVENING / OVERNIGHT. + # If wholePeriod, format FROM ... TO... + + #print "\nFormat period", wholePeriod, period + if period is None: return "" + if useEndTime: + startTime = period.endTime() + else: + startTime = period.startTime() + result = self._getTimeDesc(startTime, resolution, shiftToLocal) + #print "result", result + if wholePeriod: + endResult = self._getTimeDesc(period.endTime(), resolution, shiftToLocal) + #print "endResult", endResult + if result != endResult: + result=result + " TO "+ endResult + return result + + def _getTimeDesc(self, startTime, resolution=3, shiftToLocal=True): + # Create phrase such as Tuesday morning + # Handle today/tonight and "this" morning/afternoon/etc.. + # + print("\n\n**************Formatting Period for GMT starttime ", startTime) + labels = self.Labels()["SimpleWorded"] + currentTime = self._issueTime + print(" currentTime", currentTime) + if shiftToLocal: + currentLocalTime, shift = self.determineTimeShift() + startTime = startTime + shift + currentTime = currentTime + shift + print(" shift, shifted start, current", shift/3600, startTime, currentTime) + hour = startTime.hour + prevDay = False + prevDay, partOfDay = self._getPartOfDay(hour, resolution) + if prevDay: + startTime = startTime - 24*3600 + todayFlag = currentTime.day == startTime.day + if todayFlag: + if partOfDay.find("midnight")>0: todayWord = "Tonight" + else: todayWord = "This" + weekday = todayWord + else: + weekday = labels["Weekday"][startTime.weekday()] + if partOfDay.find("") >= 0: + result = partOfDay.replace('', weekday) + else: + result = weekday + " " + partOfDay + print("Result", result) + return result + + def _getPartOfDay(self, hour, resolution): + prevDay = False + if resolution == 3: + if hour < 3: + prevDay = True + partOfDay = "After midnight" + elif hour < 6: + partOfDay = "early morning" + elif hour < 9: + partOfDay = "morning" + elif hour < 12: + partOfDay = "late morning" + elif hour < 15: + partOfDay = "early afternoon" + elif hour < 18: + partOfDay = "late afternoon" + elif hour < 21: + partOfDay = "early evening" + else: + partOfDay = "late evening" + else: + if hour < 6: + prevDay = True + partOfDay = "After midnight" + elif hour < 12: partOfDay = "Morning" + elif hour < 18: partOfDay = "Afternoon" + else: partOfDay = "Evening" + return prevDay, partOfDay + + def _wind_stmt_type(self): + # return "categorical" + return "specific" + + def _wind_stmt(self, info, intro=None, units=None, withTiming=True): + t="" + if intro is None: + intro="The latest forecast is for " + t+=intro + descriptor, duration = self._categorical_wind_info(info) + t+= descriptor + if withTiming and duration is not None: + t+= " from " + self._formatPeriod(duration, wholePeriod=True) + return t + + def _categorical_wind_info(self, info): + t="" + if info.maxWind >= 64: + t+="Hurricane force winds" + duration = info.windDur[64] + elif info.maxWind >=50: + t+="Strong tropical storm force winds" + duration = info.windDur[50] + elif info.maxWind >=34: + t+="Tropical storm force winds" + duration = info.windDur[34] + else: + t+="Winds to remain below tropical storm force" + duration = None + return t, duration + + def _specific_wind_stmt(self, info, units=None, intro=None, duration=False, windDur=None, + addRange=False, end=None, reportWindValues=True): + t="" + if info.maxWind is None: return t + if intro is None: + intro = "The latest area forecast is for maximum winds of " + t+= intro + + if reportWindValues: + t+=self._formatWindRange(info, info.maxWind, units, "Wind") + if addRange: t+= " range" + if info.maxGust is not None: + t+=" with gusts to " + t+=self._formatWindValue(info, info.maxGust, units, "WindGust") + if windDur is None: + windDur = info.windDur[info.maxWind] + if duration and windDur is not None: + t+= " for " + duration = windDur.duration()/3600 + if duration <= 3: t+= "a few " + elif duration <= 6: t+= "several " + else: t+= "many " + t+= "hours" + if end is not None: t+=end + return t + + def _formatWindValue(self, info, value, units=None, element="Wind"): + if value is None: return "" + if self._getUnits(info, units) == "mph": + value = self._ktToMph(value, element) + units = " mph" + else: + units = " knots" + return repr(int(value)) + units + + def _getUnits(self, info, units=None): + #{UNIT} = equal to MPH if public zone segment or KNOTS if marine segment. + # If in Overview or a combined segment (Not possible this season) then default to MPH. + # If called from overview, set units == "mph" + if units is not None: return units + if info.anyLand: return "mph" + else: return "kts" + + def _formatWindRange(self, info, windKts, units, element): + # Add a range to hiVal and report it + if windKts is None: return "" + units = self._getUnits(info, units) + if units == "mph": + hiVal = self._ktToMph(windKts, element) + unitStr = " mph" + else: + hiVal = windKts + unitStr = " knots" + lowVal = self._windRange_value(windKts, hiVal) + return repr(int(lowVal)) + " TO " + repr(int(hiVal)) + unitStr + + def _windRange_value(self, windKts, windValue): + # Given windValue in kts, return the lower range value + if windKts > 52: return windValue - 20 + elif windKts > 34: return windValue - 10 + return windKts - 5 + + def _hurricaneWind_categories(self): + # Dictionary representing wind thresholds in kts + # for category 1, 2, 3, 4 or 5 hurricanes. + return { + 'Cat1': (64, 83), + 'Cat2': (83, 96), + 'Cat3': (96, 114), + 'Cat4': (114, 136), + 'Cat5': (136, 250), + } + + def _checkCategory(self, wind, category): + minVal, maxVal = self._hurricaneWind_categories().get(category, (None, None)) + if wind >=minVal: + return True + return False + + def _getCategoryInfo(self, wind): + catDict = self._hurricaneWind_categories() + + for key, label in [ + ("Cat5","Catastrophic category 5 hurricane force "), + ("Cat4", "Destructive category 4 hurricane force "), + ("Cat3","Very dangerous category 3 hurricane force "), + ("Cat2", "Category 2 hurricane force "), + ("Cat1", "Category 1 hurricane force "), + ]: + minVal, maxVal = catDict[key] + if wind >= minVal: + return label + if wind >= 50: + return "Strong tropical storm force " + elif wind >= 34: + return "Tropical storm force " + return "Strong " + + def _getCategoryDamage(self, wind): + # Convert from knots to mph + wind_mph = self._ktToMph(wind, "Wind") + if wind_mph > 130: + return "Catastrophic damage" + elif wind_mph > 110: + return "Devastating damage" + elif wind_mph > 90: + return "At least extensive damage" + elif wind_mph > 75: + return "At least widespread damage" + elif wind_mph > 60: + return "At least damaging winds" + elif wind_mph > 50: + return "At least damaging winds likely" + elif wind_mph > 40: + return "At least minor to locally moderate damage" + elif wind > 30: + return "At least minor damage" + else: + return "" + +## catDict = self._hurricaneWind_categories() +## for key, label in [ +## ("Cat5","Catastrophic damage"), +## ("Cat4", "At least devastating damage"), +## ("Cat3", "At least extensive damage"), +## ("Cat2", "At least widespread damage"), +## ("Cat1", "At least moderate damage"), +## ]: +## minVal, maxVal = catDict[key] +## if wind >= minVal: +## return label +## if wind >= 50: +## return "At least minor to locally moderate" +## elif wind >= 34: +## return "At least minor damage" +## return "Damage" + + def _windDesc(self, info): + if info.maxWind >= 64: + return "hurricane force " + elif info.maxWind >= 50: + return "strong tropical storm force " + elif info.maxWind >= 34: + return "tropical storm force " + else: + return "strong " + + def _marineWindDesc(self, info): + if info.maxWind >= 64: + return "hurricane force " + elif info.maxWind >= 48: + return "storm force " + elif info.maxWind >= 34: + return "gale force " + else: + return "strong " + + def _potentialImpact_thresholds(self): + # Units are mph + return { + 'noImpact': 30, + 'minor': 40, + 'moderate': 50, + 'damageLikely': 60, + 'damageExpected': 75, + 'danger': 90, + 'extremeDanger': 110, + 'devastating': 130, + } + + def _potentialImpact_stmt(self, info): + if info.allMarine: # No impact statements for marine yet. + return "" + + thresholds = self._potentialImpact_thresholds() + t="" + if info.maxWind is None: return t + # Convert to mph -- use avg Wind value + wind_mph = self._ktToMph(info.maxWind, "Wind") + if wind_mph <= thresholds.get('noImpact', 30): + return t + t+="\n" + if wind_mph <= thresholds.get('minor', 40): + t+="Minor damage may occur to older mobile homes. Residents should move loose items indoors, such as garbage cans and outdoor furniture, as they will be blown around. Newly planted or young trees and shrubs may be uprooted if not secured properly. Isolated power outages will be possible.\n" + elif wind_mph <= thresholds.get('moderate', 50): + t+="Minor to moderate damage is likely to many mobile homes, especially those that have canopies, awnings, or carports. Poorly constructed homes may sustain minor wall damage and partial roof removal. Other homes may have minor roof and siding damage. Some loose outdoor items will be tossed around and may cause additional damage. A few power lines will be knocked down resulting in scattered power outages. Some large branches of healthy trees will be snapped. Most newly planted trees and shrubs will be damaged or uprooted.\n" + elif wind_mph <= thresholds.get('damageLikely', 60): + t+="Damaging winds are likely. Most poorly anchored mobile homes will be damaged, some severely. Other homes may have damage to shingles, siding, gutters and windows, especially if these items are not properly secured. Loose outdoor items will become airborne, causing additional damage and possible injury. Some power lines will be knocked down by falling trees, resulting in scattered power outages. Many large branches of trees will be snapped, and a few trees will be uprooted.\n" + elif wind_mph <= thresholds.get('damageExpected', 75): + t+="Damaging winds are expected. Poorly anchored mobile homes may be destroyed, along with those of old or poor construction. Some well anchored mobile homes will have substantial damage to roofs, walls, and windows, and could become uninhabitable. Some homes of frame construction will sustain partial wall and roof failure, and possibly blown out windows. Loose outdoor items will become projectiles, causing additional damage and possible injury. Many areas will experience power outages with some downed power poles. Numerous large branches of healthy trees will snap. Some trees will be uprooted, especially where the ground is saturated.\n" + elif wind_mph <= thresholds.get('danger', 90): + t+="Very dangerous winds will produce widespread damage. Airborne debris will cause damage. Persons struck by debris may be injured or possibly killed. The majority of mobile homes will be severely damaged, overturned and uninhabitable. Some homes of frame construction will experience major damage, including roofs being lifted off and walls partially collapsing, leaving them uninhabitable. Well constructed homes will have damage to shingles, siding, and gutters. Windows will be blown out if not properly covered. Partial roof failure is expected at some industrial parks, especially to those buildings with light weight steel and aluminum coverings. Some low rise apartment building roofs may be torn off, along with siding and shingle damage. A number of glass windows in high rise buildings will be blown out. Loose outdoor items will become projectiles, causing additional damage and possible injury. Extensive damage to power lines and poles will likely result in widespread power outages that could last from several days to weeks. Numerous large branches will break. Many trees will be uprooted or snapped.\n" + elif wind_mph <= thresholds.get('extremeDanger', 110): + t+="Extremely dangerous winds will cause extensive damage. Structural collapse of some homes could cause severe injuries or possible death. Persons struck by airborne debris risk injury and possible death. Most mobile homes will be destroyed. Numerous homes of poor to average construction will be destroyed or severely damaged, leaving them uninhabitable. Considerable damage to well constructed homes is expected. A number of roofs and exterior walls will fail. Many metal roofs will be torn off buildings at industrial parks. Partial roof and exterior wall failures are likely at low rise apartment buildings. Many windows in high rise buildings will be blown out. Falling and broken glass will pose a significant danger even after the storm. Near total power loss is expected. Potable water could become scarce as filtration systems begin to fail. Many trees will be snapped or uprooted and block numerous roads.\n" + elif wind_mph <= thresholds.get('devastating', 130): + t+="Devastating damage is expected. Collapse of some residential structures will put lives at risk. Airborne debris will cause extensive damage. Persons, pets, and livestock struck by the wind blown debris will be injured or killed. Nearly all mobile homes will be destroyed. Most homes will sustain severe damage with potential for complete roof failure and wall collapse. Most industrial buildings will be destroyed, with others experiencing partial roof and wall damage. Most low rise apartment buildings will be severely damaged or destroyed, and others will have partial roof and wall failure. Numerous windows will be blown out of high rise buildings resulting in falling glass, which will pose a threat for days to weeks after the storm. Considerable structural damage to large buildings is possible. Electricity and water will be unavailable for days and perhaps weeks after the storm passes. Most trees will be snapped or uprooted. Fallen trees may cut off residential areas for days to weeks.\n" + else: + t+="Catastrophic damage is expected. Collapse of residential structures will put lives at risk. Severe injury or death is likely for persons, pets, and livestock struck by wind blown debris. Most of the area will be uninhabitable for weeks, perhaps longer. Most homes will be destroyed, with total roof failure and wall collapse. Nearly all industrial buildings and low rise apartment buildings will be severely damaged or destroyed. Nearly all windows will be blown out of high rise buildings resulting in falling glass, which will pose a threat for days to weeks after the storm. Considerable structural damage to large buildings is likely. Nearly all trees will be snapped or uprooted and power poles downed. Fallen trees and power poles will isolate residential areas. Power outages will last for weeks to possibly months. Long term water shortages will increase human suffering.\n" + return self._frame(t) + + def _genericImpact_stmt(self, info): + if info.allMarine: + return "" + t="" + damage = self._getCategoryDamage(info.maxWind) + if damage.strip() == "": return t + t+="\n" + t+=self._frame("A general concern should be for the possibility of " + damage + " somewhere within the area.") + return t + + ############################################################### + ### Example TCP product for automated testing + ############################################################### + ## Used for testing and debugging + def _useTestTCP(self): + #return True + return False + + def _TCP_Product(self): + return""" +ZCZC MIATCPAT2 ALL +TTAA00 KNHC DDHHMM +BULLETIN +HURRICANE KATRINA ADVISORY NUMBER 10 +NWS TPC/NATIONAL HURRICANE CENTER Miami FL +11 PM EDT Thu Aug 25 2005 + +...Eye of Katrina moving southwestward across Miami-Dade county... + + +Summary of 1100 PM EDT...0300 UTC...information +----------------------------------------------- +Location...25.5N 80.7W +About 35 miles...55 km SW of Miami Florida +About 20 miles...30 km NW of Homestead Florida +Maximum sustained winds...75 mph...120 km/hr +Present movement...west-southwest or 265 degrees at 8 mph...13 km/hr +Minimum central pressure...984 mb...29.06 inches + + +Watches and Warnings +-------------------- +Changes with this advisory... + +*The tropical storm warning and tropical storm watch along the east +coast of Florida north of Jupiter have been discontinued. + + +Summary of Warnings and Watches in effect... + +A Hurricane Warning is in effect for... +*The southeast Florida coast from Jupiter Inlet southward to +Florida City, including Lake Okeechobee. Preparations to protect +life and property should have been completed. + +A tropical storm warning is in effect for... +*All the Florida Keys and Florida Bay from Key West northward +*The gulf coast of Florida from Longboat Key south and eastward +to south of Florida City. + +A Tropical Storm Watch is in effect for... +*The Florida west coast from north of Longboat Key to Anclote Key. + +Interests elsewhere along the gulf coast of the United States should +monitor the progress of Katrina. + +For storm information specific to your area, including possible +inland watches and warnings, please monitor products issued +by your local weather office. + + +Discussion and 48-hour outlook +------------------------------ +At 11 PM EDT...0300 UTC...the eye of hurricane Katrina was located +near latitude 25.5 north, longitude 80.7 west. Katrina is moving +toward the southwest near 8 mph...13 km/hr and this motion is +expected to continue during the next several hours. Katrina is +expected to move over the Gulf of Mexico Friday and Saturday. + +Maximum sustained winds are near 75 mph...130 km/hr with higher +gusts. Katrina is a category one hurricane on the Saffir-Simpson +scale. Some additional weakening is anticipated while Katrina is +over land, and it could weaken to a tropical storm early on Friday. +Restrengthening is expected on Friday or Saturday, and Katrina +could become a dangerous hurricane in the Gulf of Mexico in 2 to +3 days. + +Hurricane force winds extend outward up to 10 miles from the +center, and tropical storm force winds extend outward up to +70 miles. A wind gust to 87 mph...140 km/hr was recorded at Miami +National Weather Service Forecast Office/National Hurricane Center +and 81 mph...131 km/hr at the TaMiami airport this evening. + +Estimated minimum central pressure is 984 mb...29.06 inches. + + +Storm hazards +------------- +Storm surge flooding...2 to 4 feet above normal tide levels, can be +expected along the west coast of Florida in areas of onshore flow +south of Venice and in Florida bay. Storm surge should begin to +decrease along the east coast of Florida. + +Rainfall...Katrina is expected to produce a significant heavy +rainfall event over south Florida and the Florida Keys. Total +rainfall accumulations of 6 to 10 inches with isolated maximum +amounts of 15 to 20 inches are possible. + +Tornadoes...isolated tornadoes will also be possible over eastern +Florida and the Florida Keys. + + +Next advisory +-------------- +Next intermediate advisories...100 AM and 300 AM EDT. +Next complete advisory...500 AM EDT. + +$$ +forecaster Avila + +NNNN +""" +## return """ +##ZCZC MIATCPEP5 ALL +##TTAA00 KNHC DDHHMM +##BULLETIN +##HURRICANE LINDA ADVISORY NUMBER 12 +##NWS TPC/NATIONAL HURRICANE CENTER MIAMI FL EP152009 +##800 PM PDT Wed Sep 09 2009 +## +##...Linda becomes a hurricane, the sixth hurricane of the eastern +##Pacific season... +## +## +##Summary of 800 PM PDT...0300 UTC...information +##---------------------------------------------- +##Location...17.1n 129.4w +##About 1325 miles...2135 km wsw of the southern tip of Baja California +##Maximum sustained winds...80 mph...130 km/hr +##Present movement...northwest or 320 degrees at 6 mph...9 km/hr +##Minimum central pressure...984 mb...29.06 inches +## +## +##Watches and Warnings +##-------------------- +##There are no coastal tropical cyclone watches or warnings in effect. +## +## +##Discussion and 48-hour outlook +##------------------------------ +##At 800 PM PDT...0300 UTC, the center of hurricane Linda was located +##near latitude 17.1 north, longitude 129.4 west. Linda is moving +##toward the northwest near 6 mph...9 km/hr, and this general motion +##is expected to continue for the next couple of days. +## +##Maximum sustained winds are near 80 mph...130 km/hr, with higher +##gusts. Little change in strength is expected tonight and Thursday, +##with Linda forecast to weaken Thursday night and Friday. +## +##Hurricane force winds extend outward up to 25 miles...35 km...from +##the center, and tropical storm force winds extend outward up to +##125 miles...205 km. +## +##Estimated minimum central pressure is 984 mb...29.06 inches. +## +## +##Storm Hazards +##------------- +##None affecting land. +## +## +##Next advisory +##-------------- +##Next complete advisory...200 AM PDT. +## +##$$ +##forecaster Beven +##NNNN +##""" + + + + ##################################################################################### + ##################################################### + ### HLS GUI Processing and Configurable Dictionaries for + # the Overview GUI, Situations and Scenarios + + def _processVariableList(self, definition, parent): + # Get Definition variables + for key in list(definition.keys()): + exec("self._" + key + "= definition[key]") + + segmentList, argDict = self._determineSegments(definition, parent) + if len(segmentList) == 0: + return {("segments", "segments"):[]} + + # Overview GUI + while True: + overviewDict = self._displayGUI(argDict, segmentList, "Overview") + if overviewDict == "UsePrev": + return {("UsePrev", "UsePrev"): True} + elif overviewDict == "Reset": + continue # Display Overview GUI again + if overviewDict is None: + return None + break + + # Situation GUI (per segment) + situationDict = self._displayGUI(argDict, segmentList, "Situation", overviewDict) + if situationDict is None: + return None + + # Scenario GUI (per segment) + scenarioDict = self._displayGUI(argDict, segmentList, "Scenario", situationDict) + if scenarioDict is None: + return None + + # Consolidate information from GUI's + varDict = overviewDict + varDict[("segments:","segments")] = scenarioDict["segments"] + return varDict + + def _determineSegments(self, definition, parent): + # Get the segments based on hazards "overlaid" with combinations file + argDict = {} + + dataMgr = parent + argDict['dataMgr'] = dataMgr + argDict["databaseID"] = self._getDbId(dataMgr, definition['database']) + argDict["ifpClient"] = PyFPClient(VizApp.getWsId(), dataMgr.getSiteID()) + import VTECMessageType + vtecMode = VTECMessageType.getVTECMessageType(self._pil) + argDict["vtecMode"] = vtecMode + gfeMode = dataMgr.getOpMode().name() + if gfeMode == "PRACTICE": + argDict["vtecActiveTable"] = "PRACTICE" + else: + argDict["vtecActiveTable"] = "active" + argDict['creationTime'] = int(time.time()/60)*60.0 + argDict["definition"] = definition + accessor = ModuleAccessor.ModuleAccessor() + dfEditAreas = self._defaultEditAreas +# print "dfEditAreas", dfEditAreas + dfEditAreas = accessor.variable(dfEditAreas, "Combinations") + if dfEditAreas is None: + LogStream.logVerbose("Combination file not found: " + dfEditAreas) + return [], None + + # Need to check hazards against all edit areas in the CWA MAOR + allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() + argDict["combinations"]= [(allAreas,"Region1")] + #print "\n****************determineSegments calling getHazardsTable" + hazards = self._getHazardsTable(argDict, self.filterMethod) + argDict["hazards"] = hazards + + # Get the segments resulting from Hazards + + #print "\nRaw Analyzed", hazards.rawAnalyzedTable() + hazSegments = self.organizeHazards(hazards.rawAnalyzedTable()) + print("\nSegments from HazardsTable organizeHazards", hazSegments) + combos = dfEditAreas + print("\nSegments from Zone Combiner", combos) + # "Overlay" the forecaster-entered combinations onto the segments + segmentList = self._refineSegments(hazSegments, combos) + print("\nNew segments", segmentList) + + # Check for all CON + allCON = True + segmentAreas = [] + for segmentAreas in hazSegments: + hazardList = hazards.getHazardList(segmentAreas) + for hazard in hazardList: + action = hazard['act'] + #print "hazard", hazard + if action != "CON": + allCON = False + break + if not allCON: break + argDict["allCON"] = allCON + #print "allCON", allCON + + # Determine if we should have Event Context limited to Abbreviated. + # Here are the rules: + # --If there are no Continuations, limit to abbreviated UNLESS + # --If all are HU.S, do not limit to abbreviated, but do limit to Pre or Non Event + # --IF all are CAN, UPG (ignoreActions), do not limit to abbreviated + + noCON = True + allHUS = True + allIgnoreActions = True + + segmentAreas = [] + for segmentAreas in hazSegments: + hazardList = hazards.getHazardList(segmentAreas) + for hazard in hazardList: + action = hazard['act'] + sig = hazard['sig'] + #print "hazard", hazard + if action == "CON": + noCON = False + if sig != "S": + allHUS = False + if action not in self._ignoreActions(): + allIgnoreActions = False + forceAbbrev = noCON + if allHUS or allIgnoreActions: forceAbbrev = False + argDict["forceAbbrev"] = forceAbbrev + argDict["allHUS"] = allHUS + #print "noCON", noCON + #print "allHUS", allHUS + #print "allIgnoreActions", allIgnoreActions + #print "forceAbbrev", forceAbbrev + + # Determine if sigs are watches and/or statements to limit + + watchEC = True + segmentAreas = [] + for segmentAreas in hazSegments: + hazardList = hazards.getHazardList(segmentAreas) + for hazard in hazardList: + sig = hazard['sig'] + if sig == "W": + watchEC = False + break + if not watchEC: break + argDict["watchEC"] = watchEC + + + ### Determine if all actions are cancel to limit to Post Event ot Tropical + + allCAN = True + segmentAreas = [] + for segmentAreas in hazSegments: + hazardList = hazards.getHazardList(segmentAreas) + for hazard in hazardList: + action = hazard['act'] + #print "hazard", hazard + if action != "CAN": + allCAN = False + break + if not allCAN: break + argDict["allCAN"] = allCAN + + return segmentList, argDict + + def _refineSegments(self, hazSegments, combos): + """Break down each segment further according to combos given. + Make sure the resulting segments follow the ordering of the combos. + """ + if combos == []: + return hazSegments + newSegments = [] # list of lists + newAreas = [] + for combo, label in combos: + # Each combination will be tested to see if it can stay intact + # i.e. if all areas in the combo are in the same segment + # else split it into like segments + # + # segmentMapping is a list where each entry is + # the hazSegment in which the corresponding combo area appears. + # (We need to define self._segmentList for the mapping function + # to use) + self._segmentList = hazSegments + segmentMapping = list(map(self._findSegment, combo)) + #print " segmentMapping", segmentMapping + + # segmentDict keys will be the hazSegments and + # we will gather all the areas of the combos that appear + # in each of these hazSegments + segmentDict = {} + keyList = [] + for areaName in combo: + #print " Adding", areaName + key = tuple(segmentMapping[combo.index(areaName)]) + if key == (): # If no hazard for area, do not include + continue + if key not in keyList: + keyList.append(key) + segmentDict.setdefault(key,[]).append(areaName) + #print " segmentDict", segmentDict + + # Keep track of the areas that we are including + for key in keyList: + segAreas = segmentDict[key] + newAreas = newAreas + segAreas + newSegments.append(segAreas) + #print " newSegments", newSegments + # Now add in the hazAreas that have not been accounted for + # in the combinations + hazAreas = [] + for hazSegment in hazSegments: hazAreas = hazAreas + hazSegment + for hazSegment in hazSegments: + newSeg = [] + for hazArea in hazSegment: + if hazArea not in newAreas: + newSeg.append(hazArea) + if newSeg != []: + newSegments.append(newSeg) + return newSegments + + def _getDbId(self, dataMgr, db): + pm = dataMgr.getParmManager() + if db in ['Fcst', 'Fcst_Prac','Fcst_Test']: return str(pm.getMutableDatabase()) + elif db == 'Official': return str(pm.getProductDB()) + elif db == 'ISC': + dbs = pm.getIscDatabases() + if len(dbs): + iscDB = str(dbs[-1]) #last one is the real one by convention + else: + iscDB = str(DatabaseID.databaseID_default().toJavaObj()) + + return iscDB + + def _findSegment(self, areaName): + for segment in self._segmentList: + if areaName in segment: + return segment + return [] + + def _getHazardsTable(self, argDict, filterMethod, editAreas=None): + # Set up edit areas as list of lists + # Need to check hazards against all edit areas in the CWA MAOR + allAreas = self._inlandAreas()+self._marineAreas()+self._coastalAreas() + argDict["combinations"]= [(allAreas,"Region1")] + dfEditAreas = argDict["combinations"] + editAreas = [] + for area, label in dfEditAreas: + if type(area) is list: + editAreas.append(area) + elif type(area) is tuple: #LatLon + editAreas.append([self.__getLatLonAreaName(area)]) + else: + editAreas.append([area]) + # Get Product ID and other info for HazardsTable + pil = self._pil + stationID4 = self._fullStationID + productCategory = pil[0:3] #part of the pil + definition = argDict['definition'] + sampleThreshold = definition.get("hazardSamplingThreshold", (10, None)) + # Process the hazards + accurateCities = definition.get('accurateCities', 0) + cityRefData = [] + import HazardsTable + hazards = HazardsTable.HazardsTable( + argDict["ifpClient"], editAreas, productCategory, filterMethod, + argDict["databaseID"], + stationID4, argDict["vtecActiveTable"], argDict["vtecMode"], sampleThreshold, + creationTime=argDict["creationTime"], accurateCities=accurateCities, + cityEditAreas=cityRefData, dataMgr=argDict['dataMgr']) + return hazards + + ################################################################################### + ################################################################################### + ## TK GUI Classes + ## + ## IF you want to override the GUI, you must include all the code + ## from here on. This includes the calling method _displayGUI + + def _displayGUI(self, argDict, segmentList, dialogName, infoDict=None): + if dialogName == "Overview": + dialogClass = HLS_Overview + elif dialogName == "Situation": + dialogClass = HLS_Situation + elif dialogName == "Scenario": + dialogClass = HLS_Scenario + dialog = dialogClass(self, argDict, segmentList, infoDict) + status = dialog.status() + LogStream.logVerbose("status="+status) + if status == "Cancel": + return None + elif status in ["Reset", "UsePrev"]: + return status + else: + return dialog.getVarDict() + + +import tkinter, copy, re + + +class AutoScrollbar(tkinter.Scrollbar): + # a scrollbar that hides itself if it's not needed. only + # works if you use the grid geometry manager. + def set(self, lo, hi): + if float(lo) <= 0.0 and float(hi) >= 1.0: + # grid_remove is currently missing from Tkinter! + self.tk.call("grid", "remove", self) + else: + self.grid() + tkinter.Scrollbar.set(self, lo, hi) + def pack(self, **kw): + raise tkinter.TclError("cannot use pack with this widget") + def place(self, **kw): + raise tkinter.TclError("cannot use place with this widget") + + +class ScrolledBox(tkinter.Frame,): + def __init__(self, parent=None, side='right', **kw): + """Scrolled Box widget with vertical scrollbar on the right or left. + """ + tkinter.Frame.__init__(self, parent, **kw) + if side == 'right': + ysbCol=1 + csbCol=0 + else: + ysbCol=0 + csbCol=1 + + self.grid_rowconfigure(0, weight=1) + self.grid_columnconfigure(csbCol, weight=1) + + xscrollbar = AutoScrollbar(self, orient=tkinter.HORIZONTAL) + xscrollbar.grid(row=1, column=csbCol, sticky=tkinter.E+tkinter.W) + + yscrollbar = AutoScrollbar(self) + yscrollbar.grid(row=0, column=ysbCol, sticky=tkinter.N+tkinter.S) + + canvas = tkinter.Canvas(self, bd=0,relief=tkinter.SUNKEN, + xscrollcommand=xscrollbar.set, + yscrollcommand=yscrollbar.set) + self._interior = tkinter.Frame(canvas) + canvas.create_window(0, 0, window=self._interior, anchor='nw') + + self._canvas = canvas + canvas.bind('', self.configCB) + canvas.grid(row=0, column=csbCol, sticky='nsew') + + xscrollbar.config(command=canvas.xview) + yscrollbar.config(command=canvas.yview) + + def interior(self): + return self._interior + + def configCB(self, event): + self._canvas.config(scrollregion=self._canvas.bbox(tkinter.ALL)) + + +class HLS_Dialog(StartupDialog.IFPDialog): + def __init__(self, parent, argDict, segmentList, infoDict=None): + self._status = "Cancel" # exception, or user-cancels + self._tkObject_dict = {} # place to store reference to tk objects + self._varDict = {} # all end results must be saved here + self._argDict = argDict + self._segmentList = segmentList + self._infoDict = infoDict + self._parent = parent + StartupDialog.IFPDialog.__init__(self, parent=None, title="HLS") + + def getVarDict(self): + return self._varDict + + def _makeRadioOrCheckList(self, master, label, elementList, default=None, + buttonSide=tkinter.TOP, frameSide=tkinter.LEFT, entryField=None, + headerFG=None, headerFont=None, boxType="radio", + listFrameRelief=tkinter.GROOVE): + listFrame = tkinter.Frame(master, relief=listFrameRelief, borderwidth=1) + + if label != "": + listLabel = tkinter.Label(listFrame, text=label, fg=headerFG, font=headerFont) + listLabel.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10) + + ivar = tkinter.IntVar() + defaultIndex = 0 + ivarList = [] + for element in elementList: + index = elementList.index(element) + if type(element) is tuple: + element, key = element + if boxType== "radio": + button = tkinter.Radiobutton(listFrame, variable=ivar, text=element, value=index) + else: + ivar = tkinter.IntVar() + if default is not None and element in default: ivar.set(1) + else: ivar.set(0) + button= tkinter.Checkbutton(listFrame, variable=ivar, text=element) + ivarList.append(ivar) + button.pack(side=buttonSide, anchor=tkinter.W, expand=tkinter.YES, padx=4) + # Look for default + if element == default: + defaultIndex = index + + entryObject = None + if entryField is not None: + entryObject = self._makeEntry(listFrame, entryField) + # packing + listFrame.pack(side=frameSide, expand=tkinter.NO, fill=tkinter.Y) #, anchor=Tkinter.N) + #listFrame.pack(side=frameSide, expand=Tkinter.YES, fill=Tkinter.Y, anchor=Tkinter.N) + + if boxType == "radio": + ivar.set(defaultIndex) # set the default + if boxType == "check": + ivar = ivarList + return ivar, entryObject + + def _makeCheckList(self, master, label, elementList, side=tkinter.TOP, + segmentAreas=None, maxLen=None, colLabels=True): + """Custom Checklists for Scenario GUI, Step 9b. + """ + listFrame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=2) + + if len(label)>0: + listLabel = tkinter.Label(listFrame, text=label) + listLabel.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO, padx=10) + + # Frames for each column. Nothing fancy, make all the same with optional + # label at the top. Define columns and labels in colL dict. Frames will + # be put in colF dict to use below. Order of columns will be done at the end + # when the columns are packed. + colL={'order': '', + 'prev':'Prev', + 'import':'Import', + 'section':'Section', + } + colF = {} + for c in list(colL.keys()): + colF[c]=tkinter.Frame(listFrame, relief=tkinter.FLAT, borderwidth=0) + if colLabels: + tkinter.Label(colF[c],text=colL[c]).pack(side=tkinter.TOP,anchor=tkinter.W) + + # Do for all rows, filling in all columns. + ivarList = [] + for eleDict in elementList: + name = eleDict.get('name', "") + label = eleDict.get('label', "") + #print "********* _makeCheckList dict=",eleDict + + #--- Order Entry box + frame=colF['order'] + if eleDict.get("orderBox", False): + iOrder = tkinter.Entry(frame, relief=tkinter.SUNKEN, width=2) + else: + iOrder = tkinter.Label(frame,text='', width=2) + iOrder.pack(side=tkinter.TOP, anchor=tkinter.W,expand=tkinter.YES) + + #--- usePrev Checkbutton + frame=colF['prev'] + if eleDict.get("usePrev", False): + prevVar = tkinter.IntVar() + prevVar.set(0) + iPrev = tkinter.Checkbutton(frame, variable=prevVar, text="") + else: + iPrev = tkinter.Label(frame,text='', width=3) + prevVar = None + iPrev.pack(side=tkinter.TOP, anchor=tkinter.W,expand=tkinter.NO) + + #--- importMethod - Checkbutton + frame=colF['import'] + if eleDict.get("importMethod", False) or eleDict.get("importPIL", False): + importVar = tkinter.IntVar() + importVar.set(0) + iImport = tkinter.Checkbutton(frame, variable=importVar,text="") + else: + iImport = tkinter.Label(frame,text='', width=1) + importVar = None + iImport.pack(side=tkinter.TOP, anchor=tkinter.W,expand=tkinter.YES) + + #--- Section name Checkbutton + frame=colF['section'] + ivar = tkinter.IntVar() + defaultOn = eleDict.get("defaultOn", 0) + if type(defaultOn) is types.MethodType: + defaultOn = defaultOn(name, segmentAreas) + ivar.set(defaultOn) + button = tkinter.Checkbutton(frame, variable=ivar, text=label) + button.pack(side=tkinter.TOP, anchor=tkinter.W, expand=tkinter.YES, padx=0) + + #print "\nAppending", name, button, iOrder, iPrev + ivarList.append((name, ivar, iOrder, prevVar, importVar)) + + # packing + listFrame.pack(side=tkinter.LEFT,expand=tkinter.YES,fill=tkinter.Y,anchor=tkinter.N) + + # Change the order of the colums by the order of list + for c in ['order','prev','section', 'import']: + #for c in ['order','prev','import','section']: + colF[c].pack(side=tkinter.LEFT,expand=tkinter.YES, + fill=tkinter.Y,anchor=tkinter.N) + return ivarList + + def _makeEntry(self, frame, text, width=20): + label = tkinter.Label(frame, text=text) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + entry = tkinter.Entry(frame, relief=tkinter.SUNKEN, width=width) + entry.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + return entry + + def _makeSegmentColumns(self, segNum, frame, segmentAreas): + # Need standard widths so the columns line up across segments + sn_width = 4 + hz_width = 15 + + widgets = [] + + segNumFrame = tkinter.Frame(frame, relief=tkinter.FLAT, width=sn_width) + label = tkinter.Label(segNumFrame, text=repr(segNum)) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.YES) + #segNumFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, + # fill=Tkinter.Y, anchor=Tkinter.N) + widgets.append(segNumFrame) + + landAreas = self._parent._inlandAreas() + self._parent._coastalAreas() + areaDisplayType_land, width1 = self._parent._areaDisplayType_land() + areaDisplayType_marine, width2 = self._parent._areaDisplayType_marine() + zf_width = max(width1, width2) + +# sb = ScrolledBox(frame) + sb = ScrolledBox(frame, side="left") + interior = sb.interior() + + zoneFrame = tkinter.Frame(interior, relief=tkinter.FLAT, width=zf_width) + if areaDisplayType_land != 'ugcCode' or areaDisplayType_marine != 'ugcCode': + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(self._parent._areaDictionary,"AreaDictionary") + + segmentAreas.sort() + for area in segmentAreas: + if area in landAreas: + areaDisplayType = areaDisplayType_land + else: + areaDisplayType = areaDisplayType_marine + if areaDisplayType != "ugcCode": + try: area = areaDict[area].get(areaDisplayType) + except: pass + if area is None: + area = "" + area= self._linebreak(area, zf_width) + label = tkinter.Label(zoneFrame, text=area, width=zf_width, anchor=tkinter.W) + label.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + zoneFrame.pack(side=tkinter.LEFT, expand=tkinter.NO, + fill=tkinter.Y, anchor=tkinter.N) + #sb.pack(side=Tkinter.LEFT, fill=Tkinter.BOTH, expand=Tkinter.NO) + interior.update() + h=interior.winfo_reqheight() + sizeDict = self._parent._GUI_sizing_dict() + zoneLines = sizeDict["zoneLines"] + charSize = sizeDict["charSize"] + heightLimit = zoneLines * charSize + if h > heightLimit: + h = heightLimit + w=interior.winfo_reqwidth() + sb._canvas["height"] = h + sb._canvas["width"] = w + widgets.append(sb) + #sb.pack(side=Tkinter.LEFT) + + hazardFrame = tkinter.Frame(frame, relief=tkinter.FLAT, width=hz_width) + hazardTable = self._argDict["hazards"] + hazards = hazardTable.getHazardList(segmentAreas) + if hazards == []: + hazards = [{'phensig':'None'}] + hazardKeys = [] + addEntry=False + # Updated code below to make Situation selection smarter + for hazard in hazards: + hazKey = hazard['phensig'] + " " + hazard['act'] + sitKey = hazard['act'] + hazard['phen'] + "." + hazard['sig'] + hazardKeys.append(hazard['phen']+"."+hazard['sig']) + label = tkinter.Label(hazardFrame, text=hazKey,width=hz_width) + label.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + if hazard['phen'] == "HU" and hazard['sig'] == "S": + addEntry = True + #hazardFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, + # fill=Tkinter.Y, anchor=Tkinter.N) + widgets.append(hazardFrame) + print("\n\n***********************") + print("sitKey is ", sitKey) + return widgets, hazardKeys, sitKey, addEntry + + def _linebreak(self, phrase, linelength, breakStr=[" ", "..."]): + # Break phrase into lines the given linelength + if len(phrase) <= linelength: return phrase + start = 0 + str = "" + further = 0 + while start < len(phrase): + end = start + linelength + further + if end >= len(phrase): + str = str + phrase[start:len(phrase)] + "\n" + break + breakFound = 0 + #search for break characters in string + for breakChars in breakStr: + ind = string.rfind(phrase, breakChars, start, end) + if ind >= 0: + breakFound = 1 + break + #if not found, then we need to search further, this makes the + #line too long, but it is better than simply splitting a word + #in the middle of it. + if breakFound == 0: + further = further + 1 + continue + + if breakChars != " ": + # We want to preserve the break characters, not drop them + includeInd = ind + len(breakChars) + else: + includeInd = ind + + str = str + phrase[start:includeInd] + "\n" + start = ind + len(breakChars) + further = 0 + return str + + def _makeLine(self, interior, row, columnspan, width=200, char="-"): + row = row+1 + lineFrame = tkinter.Frame(interior, relief=tkinter.FLAT) + text="" + for i in range(width): text = text + char + label = tkinter.Label(lineFrame, text=text) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + lineFrame.grid(row=row, columnspan=columnspan) + return row + + def cancelCB(self): + self._status = "Cancel" + #self.cancel() + self.withdraw() + self.destroy() + + def _entryName(self, name): + return name+"_entry" + + def _makeTuple(self,str): + str = re.sub('(?im)[^_a-z]', '', str) + return (str+":",str) + + def _setVarDict(self, key, value, options=None): + if options is not None: + value = options[value] + if type(value) is tuple: + value = value[1] + self._varDict[self._makeTuple(key)] = value + + def status(self): + return self._status + + def buttonbox(self): + # override the existing ok/cancel button box, removing it. + # we do this so that we can attach our own hooks into the functions. + pass + + +class HLS_Overview(HLS_Dialog): + def __init__(self, parent, argDict, segmentList, infoDict=None): + HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) + + def body(self, master): + # build the main display dialog + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list(self._argDict) + endInstructions = self._parent._overviewEndInstructions() + fontDict = self._parent._font_GUI_dict() + + # OVERVIEW header + headerFG, headerFont = fontDict["headers"] + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + label = tkinter.Label(frame, text="OVERVIEW", fg=headerFG, font=headerFont) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + + numBoxes = 3 + + boxes = [] + for i in range(numBoxes): + newBox = tkinter.Frame(master) + newBox.pack(side=tkinter.TOP, expand=tkinter.NO, + fill=tkinter.Y, anchor=tkinter.W) + boxes.append(newBox) + + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + index = overviewList.index(infoDict) + if index < 3: + boxNum = 0 + buttonSide=tkinter.TOP + frameSide = tkinter.LEFT + elif index in [3,4]: + boxNum = 1 + buttonSide=tkinter.LEFT + frameSide=tkinter.TOP + elif index in [5,6]: + boxNum = 2 + buttonSide=tkinter.TOP + frameSide=tkinter.LEFT + + box = boxes[boxNum] + + if name == "MainHeadline": entryField = None + + tkObject_dict[name], entryObject = self._makeRadioOrCheckList( + box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, + entryField=entryField, headerFG=headerFG, + headerFont=headerFont, boxType=optionType) + if entryObject is not None: + tkObject_dict[self._entryName(name)] = entryObject + + if name == "MainHeadline": + frame = tkinter.Frame(box, relief=tkinter.GROOVE, borderwidth=1) + tkObject_dict[self._entryName(name)] = self._makeEntry(frame, "", 80) + frame.pack(fill=tkinter.X, expand=tkinter.YES) + + # End Instructions and Buttons + fg, font = fontDict["instructions"] + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + label = tkinter.Label(frame, text=endInstructions, fg=fg, font=font) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + self._makeButtons(frame) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + +############## Graying PreviousHLS button + def _makeButtons(self, master): + frame = tkinter.Frame(master) + buttonList = self._parent._GUI1_configDict().get("buttonList", []) + for button, label in buttonList: + state = tkinter.NORMAL + if button == "PreviousHLS": + command = self.previousCB + allCON = self._argDict.get("allCON", False) + if not allCON: state = tkinter.DISABLED + elif button == "Reset": + command = self.resetCB + elif button == "Next": + command = self.okCB + else: # Cancel + command = self.cancelCB + tkinter.Button(frame, text=label, command=command, width=10, + state=state).pack(side=tkinter.LEFT, pady=5, padx=10) + frame.pack() + + def resetCB(self): + self._status = "Reset" + self.ok() + + def previousCB(self): + self._status = "UsePrev" + self.ok() + + def okCB(self): + # pull the data from the tkObject_dict before they get toasted + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list(self._argDict) + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + if optionType == "check": + checkList = [] + ivarList = tkObject_dict[name] + for i in range(len(options)): + if ivarList[i].get(): + checkList.append(options[i]) + value = checkList + self._setVarDict(name, value) + else: + value = tkObject_dict[name].get() + self._setVarDict(name, value, options) + + if entryField is not None: + entryName = self._entryName(name) + self._setVarDict(entryName, tkObject_dict[entryName].get()) + # close window and set status "Ok" + self._status = "Ok" + self.ok() + +class HLS_Situation(HLS_Dialog): + def __init__(self, parent, argDict, segmentList, infoDict=None): + HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) + + def body(self, master): + tkObject_dict = self._tkObject_dict + situations = self._parent._situation_list() + self._situationLabels = [entry['label'] for entry in situations] + fontDict = self._parent._font_GUI_dict() + headerFG, headerFont = fontDict["headers"] + guiLabels = self._parent._GUI_labels() + + sizeDict = self._parent._GUI_sizing_dict() + heightLimit = sizeDict["GUI_height_limit"] + width = sizeDict["GUI_2_width"] + zoneLines = sizeDict["zoneLines"] + + sb = ScrolledBox(master) + interior = sb.interior() + + row = 0 + columns=4 + # SITUATION header + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + label = tkinter.Label(frame, text="SITUATIONS", fg=headerFG, font=headerFont) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) +## frame.grid(row=0, columnspan=columns, sticky=Tkinter.W) +## row = row + 1 + + # Labels + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + text = guiLabels['GUI_2'] + label = tkinter.Label(frame, fg=headerFG, font=headerFont, text=text) + label.pack(side=tkinter.LEFT, fill=tkinter.X, expand=tkinter.NO) + frame.pack(side=tkinter.TOP,fill=tkinter.X, expand=tkinter.NO) +## frame.grid(row=row, columnspan=columns, sticky=Tkinter.W) +## row=row+1 + + uiSegments = [] + self._segNum = 0 + for segmentAreas in self._segmentList: + widgets, uiSegment = self._makeSegmentFrame(interior, segmentAreas) + uiSegments.append(uiSegment) + column = 0 + for widget in widgets: + widget.grid(sticky=tkinter.N+tkinter.W, row=row, column=column) + column = column +1 + row=self._makeLine(interior, row, columnspan=columns, width=160) + row = row+1 + tkObject_dict["segments"] = uiSegments + #segBox.pack(fill=Tkinter.X, expand=Tkinter.YES) + bframe = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=0) + self._makeButtons(bframe, row=row, columnspan=columns) + + # Get the requested size of the interior frame. + # While winfo_reqheight should get the size the widget is requesting, + # It seems you still have to call update first, and calling update + # certainly won't hurt - PJ + interior.update() + h=interior.winfo_reqheight() + if h > heightLimit: + h = heightLimit + w=interior.winfo_reqwidth() + if w > width: + w = width + sb._canvas["height"] = h + sb._canvas["width"] = w + + sb.pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=tkinter.YES) + bframe.pack(side=tkinter.BOTTOM,fill=tkinter.X, expand=tkinter.NO) + + def _makeSegmentFrame(self, master, segmentAreas): + #frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) + self._segNum+=1 + segNum = self._segNum + widgets, hazardKeys, sitKey, addEntry = self._makeSegmentColumns(segNum, master, segmentAreas) + + # Find situations for segment hazards + # IF the Event Context is "Abbreviated", "NonEvent" or "PostEvent" + # only allow that matching situation + # Further refine the choices based on the action/phen/sig combo + # and the EC + + ec = self._infoDict[("EventContext:", "EventContext")] + onlySituation = None + if ec in ["Abbreviated", "NonEvent", "PostEvent"]: + onlySituation = ec + situationDicts = self._parent._situation_list() + situations = [] + for sitDict in situationDicts: + sitEC = sitDict.get("ec", []) + sitPairs = sitDict.get("hazPairings", []) +## sitActions = sitDict.get("action", []) + if sitKey in sitPairs and ec in sitEC: +## for hazardKey in hazardKeys: +## if hazardKey in sitHazards: + if sitDict not in situations: + sitName = sitDict['name'] + if onlySituation and sitName != onlySituation: + continue + situations.append(sitDict) + + print("situations are: ", situations) + situationLabels = [sitDict['label'] for sitDict in situations] + situationEntryFrame = tkinter.Frame(master, relief=tkinter.FLAT, borderwidth=1) + situationFrame = tkinter.Frame(situationEntryFrame, relief=tkinter.FLAT, borderwidth=1) + uiSituation, entryObj = self._makeRadioOrCheckList( + situationFrame, " ",situationLabels, buttonSide=tkinter.LEFT, frameSide=tkinter.LEFT, + listFrameRelief=tkinter.FLAT) + situationFrame.pack(side=tkinter.TOP, expand=tkinter.YES, + fill=tkinter.Y, anchor=tkinter.W) + + if addEntry: + # Add an entry field for headline plus option to Use previous + entryFrame = tkinter.Frame(situationEntryFrame, relief=tkinter.GROOVE, borderwidth=1) + uiEntry = self._makeEntry(entryFrame, "Headline", width=45) + ivarList = self._makeCheckList( + entryFrame, "", [{"name":"Use Prev", "label":"UsePrev"}], colLabels=False) + name, uiUsePrev, iOrder, iPrev, iImport = ivarList[0] + entryFrame.pack(side=tkinter.TOP) + else: + uiEntry = None + uiUsePrev = None + #situationEntryFrame.pack(side=Tkinter.LEFT, expand=Tkinter.YES, + # fill=Tkinter.Y, anchor=Tkinter.W) + widgets.append(situationEntryFrame) +## +## frame.pack(fill=Tkinter.X, expand=Tkinter.YES) + return widgets, (segNum, segmentAreas, uiSituation, situations, uiEntry, uiUsePrev) + + def _makeButtons(self, master, row, columnspan): + frame = tkinter.Frame(master) + buttonList = self._parent._GUI2_configDict().get("buttonList", []) + for button, label in buttonList: + if button == "Next": + command = self.okCB + else: # button == "Cancel": + command = self.cancelCB + tkinter.Button(frame, text=label, command=command, width=10, + state=tkinter.NORMAL).pack(side=tkinter.LEFT, pady=5, padx=10) + frame.grid(row=row, columnspan=columnspan) + + def okCB(self): + # pull the data from the tkObject_dict before they get toasted + tkObject_dict = self._tkObject_dict + segments = [] + #print "\nsegments", tkObject_dict["segments"] + for segNum, segment, uiSituation, situationDicts, uiEntry, uiUsePrev in tkObject_dict["segments"]: + index = uiSituation.get() + sitDict = copy.deepcopy(situationDicts[index]) + if uiEntry is not None: sitDict['userHeadline_HU_S'] = uiEntry.get() + if uiUsePrev is not None: sitDict['usePrev_HU_S_Headline'] = uiUsePrev.get() + segments.append((segNum, segment, sitDict)) + self._varDict["segments"] = segments + + #print "varDict", self._varDict + # close window and set status "Ok" + self._status = "Ok" + self.ok() + +class HLS_Scenario(HLS_Dialog): + def __init__(self, parent, argDict, segmentList, infoDict=None): + HLS_Dialog.__init__(self, parent, argDict, segmentList, infoDict) + + def body(self, master): + + sizeDict = self._parent._GUI_sizing_dict() + heightLimit = sizeDict["GUI_height_limit"] + width = sizeDict["GUI_3_width"] + segments = self._infoDict["segments"] + + sb = ScrolledBox(master) + interior = sb.interior() + + # build the main display dialog + columns=6 + tkObject_dict = self._tkObject_dict + #box = Tkinter.Frame(interior) + row, tkObject_dict["segments"] = self._makeScenarioGUI(interior) + bframe = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=0) + self._makeButtons(bframe, row=row, columnspan=columns) + #box.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=Tkinter.YES) + + interior.update() + h=interior.winfo_reqheight() + if h > heightLimit: + h = heightLimit + w=interior.winfo_reqwidth() + if w > width: + w = width + sb._canvas["height"] = h + sb._canvas["width"] = w + + sb.pack(side=tkinter.TOP, fill=tkinter.BOTH, expand=tkinter.YES) + bframe.pack(side=tkinter.BOTTOM,fill=tkinter.X, expand=tkinter.NO) + + def _makeScenarioGUI(self, master): + # Entry for each segment + + #segBox = Tkinter.Frame(master) + # Labels + row=0 + columns=6 + segments = self._infoDict["segments"] + guiLabels = self._parent._GUI_labels() + fontDict = self._parent._font_GUI_dict() + headerFG, headerFont = fontDict["headers"] + texta = guiLabels['GUI_3a'] + textb = guiLabels['GUI_3b'] + labela = tkinter.Label(master, fg=headerFG, font=headerFont, text=texta) + labela.grid(row=0, columnspan=5) + labelb = tkinter.Label(master, fg=headerFG, font=headerFont, text=textb) + labelb.grid(row=0, column=5, sticky=tkinter.W) + #frame.pack(fill=Tkinter.X, expand=Tkinter.YES) + #frame.grid(row=0, columnspan=columns, sticky=Tkinter.W) + row=row+1 + + # Segments + uiSegments = [] + #print "\n\nInfoDict", self._infoDict + for segment in segments: + widgets, uiSegment = self._makeSegmentFrame(master, segment) + uiSegments.append((uiSegment)) + column=0 + for widget in widgets: + widget.grid(sticky=tkinter.N+tkinter.W, row=row, column=column) + column = column +1 + row=self._makeLine(master, row, columnspan=columns, width=190) + row = row+1 + #segBox.pack(fill=Tkinter.X, expand=Tkinter.YES) + return row, uiSegments + + def _makeSegmentFrame(self, master, segment): + #frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) + segNum, areas, situation = segment + situationLabel = situation['label'] + situationName = situation['name'] + + widgets, addEntry, sitKey, hazardKeys = self._makeSegmentColumns(segNum, master, areas) + + # Situation + situationFrame = tkinter.Frame(master, relief=tkinter.FLAT,borderwidth=1) + if len(situationLabel) > 14: + situationLabel = situationLabel[0:14] + "\n" + situationLabel[14:] + label = tkinter.Label(situationFrame, text=situationLabel, width=15) + label.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + #situationFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, + # fill=Tkinter.Y, anchor=Tkinter.N) + widgets.append(situationFrame) + + # Scenarios + scenarioFrame = tkinter.Frame(master, relief=tkinter.FLAT, borderwidth=1, width=30) + scenarios = situation["scenarios"] + uiScenario = self._makeRadioOrCheckList( + scenarioFrame, "", scenarios, buttonSide=tkinter.TOP, frameSide=tkinter.LEFT, + listFrameRelief=tkinter.FLAT) + #scenarioFrame.pack(side=Tkinter.LEFT, expand=Tkinter.YES, fill=Tkinter.Y, + # anchor=Tkinter.W) + widgets.append(scenarioFrame) + + # Sections + # If Abbreviated, no sections will be shown + sectionFrame = tkinter.Frame(master, relief=tkinter.FLAT, borderwidth=1) + if situationLabel == "Abbreviated": + uiSections = None + text = " " + label = tkinter.Label(sectionFrame, text=text) + label.pack(side=tkinter.TOP, expand=tkinter.NO,fill=tkinter.Y, anchor=tkinter.W) + else: + sectionList = self._parent._segmentSections() + # Determine maximum label length +## maxLen = 0 +## for section in sectionList: +## label=section.get('label') +## labelLen = len(label) +## if labelLen > maxLen: maxLen = labelLen +## text = str.ljust(" Prev", maxLen+40) + "Import" +## print "text", maxLen, len(text), text +## usePrevLabel = Tkinter.Label(sectionFrame, text=text, width=maxLen+2) + #usePrevLabel = Tkinter.Label(sectionFrame, text="Prev Import", width=11) + #usePrevLabel.pack(side=Tkinter.TOP, expand=Tkinter.NO,fill=Tkinter.Y, anchor=Tkinter.W) + # Filter for sections to be displayed + sections = [] + for section in sectionList: + inSegments = section.get('inSegments', None) + if inSegments in [None, 'always']: + continue + # Check excludeFromSituations + excluded = section.get('excludeFromSituations', []) + if situationName in excluded: + continue + # Check for whether or not to include + # If ANY zone meets the "includeFor" criteria for the section, + # Then it will be included as an option + includeFor = section.get("includeFor", None) + if includeFor is None: + include = True + elif type(includeFor) is types.MethodType: + name = section.get('name') + include = includeFor(name, areas) + else: + include = False + for area in areas: + if area in includeFor: + include=True + break + if include: sections.append(section) + uiSections = self._makeCheckList(sectionFrame, "", sections, segmentAreas=areas) + #sectionFrame.pack(side=Tkinter.LEFT, expand=Tkinter.NO, + # fill=Tkinter.Y, anchor=Tkinter.N) + widgets.append(sectionFrame) + + #frame.pack(fill=Tkinter.X, expand=Tkinter.YES) + return widgets, (segNum, areas, situation, uiScenario, scenarios, uiSections) + + def _makeButtons(self, master, row, columnspan): + # create the basic dialog buttons the user sees (Ok, Cancel) + frame = tkinter.Frame(master) + buttonList = self._parent._GUI3_configDict().get("buttonList", []) + for button, label in buttonList: + if button == "Ok": + command = self.okCB + else: # button == "Cancel": + command = self.cancelCB + tkinter.Button(frame, text=label, command=command, width=10, + state=tkinter.NORMAL).pack(side=tkinter.LEFT, pady=5, padx=10) + #frame.pack() + frame.grid(row=row, columnspan=columnspan) + + def okCB(self): + # pull the data before they get toasted + tkObject_dict = self._tkObject_dict + + segments = [] + for segNum, areas, situation, uiScenario, scenarios, uiSections in tkObject_dict["segments"]: + extraInfo = { + "usePrev_HU_S_Headline":situation.get("usePrev_HU_S_Headline",None), + "userHeadline_HU_S":situation.get("userHeadline_HU_S",None), + } + # Only need the situation name for varDict + situation= situation["name"] + scenarioObj, entryObj = uiScenario + label, scenario = scenarios[scenarioObj.get()] + sections = [] + if uiSections is not None: + for name, iCheck, iOrder, prevVar, importVar in uiSections: + if iCheck.get(): + try: + order = iOrder.get() + except: + order = None + try: + usePrev = prevVar.get() + except: + usePrev = 0 + try: + useImport = importVar.get() + except: + useImport = None + sections.append((name, order, usePrev, + useImport)) + #print "usePrev, useImport", name, usePrev, useImport + segments.append((segNum, areas, situation, scenario, sections, extraInfo)) + self._varDict["segments"] = segments + LogStream.logVerbose("varDict=",self._varDict) + + # close window and set status "Ok" + self._status = "Ok" + self.ok() + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HWO_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HWO_MultiPil.py index 43e4df748e..42c41d6821 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HWO_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_HWO_MultiPil.py @@ -1,256 +1,256 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# ---------------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -# -# Hazard_HWO -# Produces HWO product. -# -# Author: Matt Davis/ARX -# ---------------------------------------------------------------------------- - -import GenericReport -import TextRules -import string, time, re, os, types, copy - -class TextProduct(GenericReport.TextProduct): - Definition = copy.deepcopy(GenericReport.TextProduct.Definition) - - #Definition['displayName'] = None - Definition['displayName'] = "BaselineHazard_HWO_ (Hazardous Weather Outlook)" - - Definition["outputFile"] = "{prddir}/TEXT/HWO_.txt" - Definition["database"] = "Official" # Source database - - Definition["debug"] = 0 - - # Name of map background for creating Combinations - Definition["mapNameForCombinations"] = ["Zones_", - "Marine_Zones_"] - - ## Edit Areas: Create Combinations file with edit area combinations. - Definition["defaultEditAreas"] = "Combinations_HWO__" - Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display - - # product identifiers - Definition["productName"] = "Hazardous Weather Outlook" # product name - Definition["fullStationID" ] = "" # 4 letter station ID - Definition["wmoID" ] = "" # WMO code - Definition["wfoCityState" ] = "" # Location of WFO - Definition["pil" ] = "" # product pil - Definition["textdbPil" ] = "" # pil: storing to AWIPS textdb - Definition["awipsWANPil" ] = "" # pil: transmitting to WAN. - Definition["wfoSiteID"] = "" - Definition["areaName"] = "" #optional area name for product - - # Area Dictionary -- Descriptive information about zones - Definition["areaDictionary"] = "AreaDictionary" - - # Language - Definition["language"] = "english" - Definition["lineLength"] = 66 #Maximum line length - - # Expirations - Definition["purgeTime"] = 24 # Default Expiration in hours if - - # Header format - Definition["includeCities"] = 0 # Cities not included in area header - Definition ["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - - # - # The below is used for NIL HWOS - # - - VariableList = [ - (("HWO Type", "hwoType") , "No Active Weather", "radio", - ["No Active Weather", "Active Weather"]) - ] - - def __init__(self): - GenericReport.TextProduct.__init__(self) - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - - # - # Get combinations to be used in the header - # - - combinations = argDict["combinations"] - if combinations is not None: - headerAreaList = self.getCurrentAreaNames(argDict, areaLabel) - usingCombo = 1 - else: - for editArea, label in self._defaultEditAreas: - if label == areaLabel: - headerAreaList = [editArea] - - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas, - cityDescriptor=self._cityDescriptor, areaList=headerAreaList, - includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames) - fcst = fcst + areaHeader - return fcst - - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - # - # Get combinations to be used for the segment - # - - combinations = argDict["combinations"] - if combinations is not None: - areaList = self.getCurrentAreaNames(argDict, areaLabel) - usingCombo = 1 - else: - for editArea, label in self._defaultEditAreas: - if label == areaLabel: - areaList = [editArea] - - # - # Make the general area Phrase - # - - generalAreas = self.getGeneralAreaList(areaList, areaDictName=self._areaDictionary) - areaPhrase = "This Hazardous Weather Outlook is for portions of " - - # Make a list of all general areas we found - #parts of the states - areaList = [] - for state, partOfState, names in generalAreas: - if partOfState == '' or partOfState == ' ': - areaList.append(state) - else: - areaList.append(partOfState + " " + state) - - # Add this general area to the text - areaPhrase += self.punctuateList(areaList) - fcst = fcst + areaPhrase + ".\n\n" - - # - # Make the time period headers - # - - currentTime = argDict['creationTime'] - currentHour = time.strftime("%H", time.localtime(currentTime)) - currentHour = int(currentHour) - - - if currentHour < 3: - baseTime = currentTime - 10800 - else: - baseTime = currentTime - - tommorow = time.strftime("%A", time.localtime(baseTime + 86400)) - daySeven = time.strftime("%A", time.localtime(baseTime + 518400)) - - - if currentHour >= 16 or currentHour < 3: - dayOnePhrase = ".DAY ONE...Tonight" - elif currentHour >= 3 and currentHour < 11: - dayOnePhrase = ".DAY ONE...Today and Tonight" - elif currentHour >= 11 and currentHour < 16: - dayOnePhrase = ".DAY ONE...This Afternoon and Tonight" - - dayTwoSevenPhrase = ".DAYS TWO THROUGH SEVEN..." + tommorow +\ - " through " + daySeven - - # - # Check for the NIL HWO - # - - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - dayOneText = "" - dayTwoSevenText = "" - spotterText = "" - - if self._hwoType == "No Active Weather": - dayOneText = self.getDayOneText() - dayTwoSevenText = self.getDayTwoSevenText() - spotterText = self.getSpotterText() - - spotterInfoStmt = self.getSpotterInfoStmt() - - fcst = fcst + dayOnePhrase + "\n\n" + dayOneText + "\n\n" +\ - dayTwoSevenPhrase + "\n\n" + dayTwoSevenText + "\n\n" +\ - spotterInfoStmt + "\n\n" +\ - spotterText + "\n\n" - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) - - # - # Clean up multiple line feeds - # - - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - - # - # Finish Progress Meter - # - - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - # REQUIRED OVERRIDES - - - # OPTIONAL OVERRIDES - def getDayOneText(self): - return "No hazardous weather is expected at this time." - - def getDayTwoSevenText(self): - return "No hazardous weather is expected at this time." - - def getSpotterText(self): - return "Spotter activation will not be needed." - - def getSpotterInfoStmt(self): - return ".SPOTTER INFORMATION STATEMENT..." - - # PATCHES: To be removed with each new release - - - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# ---------------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +# +# Hazard_HWO +# Produces HWO product. +# +# Author: Matt Davis/ARX +# ---------------------------------------------------------------------------- + +import GenericReport +import TextRules +import string, time, re, os, types, copy + +class TextProduct(GenericReport.TextProduct): + Definition = copy.deepcopy(GenericReport.TextProduct.Definition) + + #Definition['displayName'] = None + Definition['displayName'] = "BaselineHazard_HWO_ (Hazardous Weather Outlook)" + + Definition["outputFile"] = "{prddir}/TEXT/HWO_.txt" + Definition["database"] = "Official" # Source database + + Definition["debug"] = 0 + + # Name of map background for creating Combinations + Definition["mapNameForCombinations"] = ["Zones_", + "Marine_Zones_"] + + ## Edit Areas: Create Combinations file with edit area combinations. + Definition["defaultEditAreas"] = "Combinations_HWO__" + Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display + + # product identifiers + Definition["productName"] = "Hazardous Weather Outlook" # product name + Definition["fullStationID" ] = "" # 4 letter station ID + Definition["wmoID" ] = "" # WMO code + Definition["wfoCityState" ] = "" # Location of WFO + Definition["pil" ] = "" # product pil + Definition["textdbPil" ] = "" # pil: storing to AWIPS textdb + Definition["awipsWANPil" ] = "" # pil: transmitting to WAN. + Definition["wfoSiteID"] = "" + Definition["areaName"] = "" #optional area name for product + + # Area Dictionary -- Descriptive information about zones + Definition["areaDictionary"] = "AreaDictionary" + + # Language + Definition["language"] = "english" + Definition["lineLength"] = 66 #Maximum line length + + # Expirations + Definition["purgeTime"] = 24 # Default Expiration in hours if + + # Header format + Definition["includeCities"] = 0 # Cities not included in area header + Definition ["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + + # + # The below is used for NIL HWOS + # + + VariableList = [ + (("HWO Type", "hwoType") , "No Active Weather", "radio", + ["No Active Weather", "Active Weather"]) + ] + + def __init__(self): + GenericReport.TextProduct.__init__(self) + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + + # + # Get combinations to be used in the header + # + + combinations = argDict["combinations"] + if combinations is not None: + headerAreaList = self.getCurrentAreaNames(argDict, areaLabel) + usingCombo = 1 + else: + for editArea, label in self._defaultEditAreas: + if label == areaLabel: + headerAreaList = [editArea] + + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas, + cityDescriptor=self._cityDescriptor, areaList=headerAreaList, + includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames) + fcst = fcst + areaHeader + return fcst + + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + # + # Get combinations to be used for the segment + # + + combinations = argDict["combinations"] + if combinations is not None: + areaList = self.getCurrentAreaNames(argDict, areaLabel) + usingCombo = 1 + else: + for editArea, label in self._defaultEditAreas: + if label == areaLabel: + areaList = [editArea] + + # + # Make the general area Phrase + # + + generalAreas = self.getGeneralAreaList(areaList, areaDictName=self._areaDictionary) + areaPhrase = "This Hazardous Weather Outlook is for portions of " + + # Make a list of all general areas we found + #parts of the states + areaList = [] + for state, partOfState, names in generalAreas: + if partOfState == '' or partOfState == ' ': + areaList.append(state) + else: + areaList.append(partOfState + " " + state) + + # Add this general area to the text + areaPhrase += self.punctuateList(areaList) + fcst = fcst + areaPhrase + ".\n\n" + + # + # Make the time period headers + # + + currentTime = argDict['creationTime'] + currentHour = time.strftime("%H", time.localtime(currentTime)) + currentHour = int(currentHour) + + + if currentHour < 3: + baseTime = currentTime - 10800 + else: + baseTime = currentTime + + tommorow = time.strftime("%A", time.localtime(baseTime + 86400)) + daySeven = time.strftime("%A", time.localtime(baseTime + 518400)) + + + if currentHour >= 16 or currentHour < 3: + dayOnePhrase = ".DAY ONE...Tonight" + elif currentHour >= 3 and currentHour < 11: + dayOnePhrase = ".DAY ONE...Today and Tonight" + elif currentHour >= 11 and currentHour < 16: + dayOnePhrase = ".DAY ONE...This Afternoon and Tonight" + + dayTwoSevenPhrase = ".DAYS TWO THROUGH SEVEN..." + tommorow +\ + " through " + daySeven + + # + # Check for the NIL HWO + # + + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + dayOneText = "" + dayTwoSevenText = "" + spotterText = "" + + if self._hwoType == "No Active Weather": + dayOneText = self.getDayOneText() + dayTwoSevenText = self.getDayTwoSevenText() + spotterText = self.getSpotterText() + + spotterInfoStmt = self.getSpotterInfoStmt() + + fcst = fcst + dayOnePhrase + "\n\n" + dayOneText + "\n\n" +\ + dayTwoSevenPhrase + "\n\n" + dayTwoSevenText + "\n\n" +\ + spotterInfoStmt + "\n\n" +\ + spotterText + "\n\n" + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) + + # + # Clean up multiple line feeds + # + + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + + # + # Finish Progress Meter + # + + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + # REQUIRED OVERRIDES + + + # OPTIONAL OVERRIDES + def getDayOneText(self): + return "No hazardous weather is expected at this time." + + def getDayTwoSevenText(self): + return "No hazardous weather is expected at this time." + + def getSpotterText(self): + return "Spotter activation will not be needed." + + def getSpotterInfoStmt(self): + return ".SPOTTER INFORMATION STATEMENT..." + + # PATCHES: To be removed with each new release + + + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_RFW_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_RFW_MultiPil.py index c65ae5e1b2..e1c53e1d39 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_RFW_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_RFW_MultiPil.py @@ -1,925 +1,925 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -######################################################################## -# Hazard_RFW.py -# Updated for OB9.3 by -## Brian Brong/REV, Ron Miller/OTX, Jeff Zeltwagner/NWSTC, Shannon White/FDTB -########################################################################## -import LogStream -import GenericHazards -import string, time, re, os, types, copy, sets -import CallToActions -import ModuleAccessor, StringUtils - -class TextProduct(GenericHazards.TextProduct): - VariableList = [ - (("Select RFW Type", "rfwType"), [], "check", []), - (("Source for Headline and \nAffected Area Bullet", "elevationSource"), - "Grids", "radio", ["Grids", "Previous Text"]), - ] - - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition['displayName'] = None - Definition['displayName'] = "BaselineHazard_RFW_ (FireWx Watch/Warning)" - - Definition["defaultEditAreas"] = "EditAreas_FireWx__" - Definition["mapNameForCombinations"] = "FireWxZones_" # Map background for creating Combinations - - #Special multiple product domains for certain sites: - if "" == "AFG": - if "_" == "_AFG": - Definition["subDomainUGCs"] = ["AKZ218","AKZ219","AKZ220","AKZ221", - "AKZ222","AKZ223","AKZ224","AKZ225", - "AKZ226"] - elif "_" == "_NSB": - Definition["subDomainUGCs"] = ["AKZ201","AKZ202","AKZ203","AKZ204", - "AKZ205","AKZ206"] - elif "_" == "_WCZ": - Definition["subDomainUGCs"] = ["AKZ207","AKZ208","AKZ209","AKZ210", - "AKZ211","AKZ212","AKZ213","AKZ214", - "AKZ215","AKZ216","AKZ217","AKZ227"] - - # Header configuration items - Definition["productName"] = "URGENT - FIRE WEATHER MESSAGE" # name of product - Definition["fullStationID"] = "" # full station identifier (4letter) - Definition["wmoID"] = "" # WMO ID - Definition["pil"] = "" # product pil - #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" - Definition["wfoCityState"] = "" # Location of WFO - city state - Definition["wfoCity"] = "" # WFO Name as it should appear in a text product - Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. - Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - Definition["outputFile"] = "{prddir}/TEXT/RFW_.txt" - - # OPTIONAL CONFIGURATION ITEMS - #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" - #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished - #Definition["debug"] = 1 - #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines - - Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime - Definition["includeCities"] = 0 # Cities included in area header - Definition["accurateCities"] = 0 # If 1, cities are based on grids; - # otherwise full list is included - Definition["cityLocation"] = "CityLocation" # City lat/lon dictionary to use - Definition["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - Definition["includeIssueTime"] = 1 # This should be set to zero for products - # that do not include a time lime below the UGC - #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header - Definition["lineLength"] = 66 - Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated - Definition["includeOverview"] = 1 #If 1, the overview section is templated - Definition["bulletProd"] = 1 #If 1, the product is bulletted - - #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) - - Definition["numInHeadline"] = 1 - Definition["GenericBullets"] = ["AFFECTED AREA", "WIND", "HUMIDITY", "THUNDERSTORMS", "HIGHEST THREAT", "IMPACTS"] - Definition["locationsBullet"] = "AFFECTED AREA" - Definition["noNameInBullet"] = 1 - Definition["includeStateName"] = 0 - Definition["urlText"] = "" - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('FW.W', allActions, 'FireWx'), - ('FW.A', allActions, 'FireWx') - ] - - def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): - - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, "", self._issueTime, expireTime, - self._areaDictionary, None, cityDescriptor=self._cityDescriptor, - areaList=segmentAreas, includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames, - includeIssueTime = self._includeIssueTime, - accurateCities = self._accurateCities) - - fcst = fcst + areaHeader + "\n" - return fcst - - # - # Overridden to allow for attribution statement - # - def hazardBodyText(self, hazardList, argDict): - - hazardBodyPhrase = '' - - # - # First, sort the hazards for this segment by importance - # - - sortedHazardList = [] - for each in ['W', 'Y', 'A', 'S']: - for eachHazard in hazardList: - if eachHazard['sig'] == each: - if eachHazard not in sortedHazardList: - sortedHazardList.append(eachHazard) - - # - # Next, break them into individual lists based on action - # - - newList = [] - canList = [] - expList = [] - extList = [] - conList = [] - upgList = [] - statementList = [] - - for eachHazard in sortedHazardList: - if eachHazard['act'] in ['NEW', 'EXA', 'EXB'] and eachHazard['sig'] != 'S': - newList.append(eachHazard) - elif eachHazard['act'] in ['CAN'] and eachHazard['sig'] != 'S': - canList.append(eachHazard) - elif eachHazard['act'] in ['EXP'] and eachHazard['sig'] != 'S': - expList.append(eachHazard) - elif eachHazard['act'] in ['EXT'] and eachHazard['sig'] != 'S': - extList.append(eachHazard) - elif eachHazard['act'] in ['UPG'] and eachHazard['sig'] != 'S': - upgList.append(eachHazard) - elif eachHazard['sig'] != 'S': - conList.append(eachHazard) - elif eachHazard['sig'] == 'S': - statementList.append(eachHazard) - - # - # Now, go through each list and build the phrases - # - - nwsIntroUsed = 0 - - # - # This is for the new hazards - # - - phraseCount = 0 - lastHdln = None - for eachHazard in newList: - hdln = eachHazard['hdln'] - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - if nwsIntroUsed == 0: - hazardBodyPhrase = "The National Weather Service in " + self._wfoCity - nwsIntroUsed = 1 - if phraseCount == 0: - phraseCount = 1 - hazardBodyPhrase = hazardBodyPhrase + " has issued " + \ - hazNameA + ", which is in effect" + endTimePhrase + ". " - elif phraseCount == 1: - phraseCount = 2 - if hdln != lastHdln: - hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ - " has also been issued. This " + hazName + \ - " is in effect" + endTimePhrase + ". " - else: - hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ - " has also been issued" + endTimePhrase + ". " - else: - hazardBodyPhrase = hazardBodyPhrase + "In addition..." + \ - hazNameA + " has been issued. This " + hazName + \ - " is in effect" + endTimePhrase + ". " - lastHdln = hdln - - # - # This is for the can hazards - # - - for eachHazard in canList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - if nwsIntroUsed == 0: - hazardBodyPhrase = "The National Weather Service in " +\ - self._wfoCity - nwsIntroUsed = 1 - hazardBodyPhrase = hazardBodyPhrase + \ - " has cancelled the " + hazName + ". " - else: - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " has been cancelled. " - - # - # This is for the exp hazards - # - - phraseCount = 0 - for eachHazard in expList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - if eachHazard['endTime'] <= argDict['creationTime']: - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is no longer in effect. " - else: - expTimeCurrent = argDict['creationTime'] - timeWords = self.getTimingPhrase(eachHazard, expTimeCurrent) - - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " will expire " + timeWords + ". " - - # - # This is for ext hazards - # - - for eachHazard in extList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is now in effect" + endTimePhrase + ". " - - # - # This is for upgrade hazards - # - - for eachHazard in upgList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - hazName = self.hazardName(eachHazard['hdln'], argDict, False) - - hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ - " is no longer in effect. " - - # - # This is for con hazards - # - - for eachHazard in conList: - if len(eachHazard['hdln']) == 0: - continue #no defined headline, skip phrase - if self._bulletProd: - continue # No attribution for this case if it is a bullet product - endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) - hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) - hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ - " remains in effect" + endTimePhrase + ". " - - # - # This is for statement hazards - # - - #we will add in text later either by text capture or - #framing codes as needed - #for eachHazard in statementList: - # hazardBodyPhrase = hazardBodyPhrase + "|* Statement text *|." - - # - # This adds segment text - # - - segmentText = '' - - # - # Check that this segment codes to determine capture or not, - # and frame captured text or not - # - incTextFlag, incFramingCodes, skipCTAs, forceCTAList = \ - self.useCaptureText(sortedHazardList) - - # - # - # Check that the previous text exists - # - - foundCTAs = [] - for eachHazard in sortedHazardList: - if eachHazard.has_key('prevText'): - prevText = eachHazard['prevText'] - - if eachHazard['sig'] == 'S': - startPara = 0 - else: - startPara = 1 - segmentText, foundCTAs = self.cleanCapturedText(prevText, - startPara, addFramingCodes = incFramingCodes, - skipCTAs = skipCTAs) - tester = segmentText[0] - if tester == '*': - startPara = 1 - else: - startPara = 2 - - segmentText, foundCTAs = self.cleanCapturedText(prevText, - startPara, addFramingCodes = incFramingCodes, - skipCTAs = skipCTAs) - - # split the current bullets - split_bullets = segmentText.split("\n\n") - # reset the segmentText variable - segmentText = "" - for current_bullet in split_bullets: - - - # if the user wants to use the gridded data for the elevation and location info... - if self._elevationSource == "Grids": - # check to see if this is the ELEVATION bullet. If so, replace it with - # the elevation from the grid. - - ### Modification of Ron Miller's code to remove the check for the elevations bullet - ### since it could be the same as the locationsBullet. - ### Caused issues during testing at SLC. Thanks Linda! - ### bsb 5-9-10. - ### - ### locationsBullet is defined in the configuration section - locBullet = "* " + self._locationsBullet + "..." - # check to see if this is the LOCATIONS bullet. If so, replace it with - # the locations from the grid. - if current_bullet.find(locBullet) >= 0: - new_locations = self._getLocationsList(self._areaDictionary,argDict,eachHazard['seg']) - current_bullet = self.indentText(self._locationsBullet + "..." + new_locations, \ - indentFirstString="* ", indentNextString=" ", \ - maxWidth=65,breakStrings=[" ","..."]) - - # for all bullets, ensure that it's indented - if current_bullet.find("* ") == 0: - current_bullet = self.indentText(current_bullet.replace("* ",""), \ - indentFirstString="* ", indentNextString=" ", \ - maxWidth=65,breakStrings=[" ","..."]) - - # now add the bullet back to the list. check to make sure that it's - # not just a blank line. - if len(current_bullet) > 1: - segmentText = segmentText + current_bullet + "\n\n" - - - ################################################################################# - # End Ron Miller's added code - ################################################################################# - - # - # Check that the segment text isn't very short or blank - # - - if len(segmentText) < 6: - incTextFlag = 0 - # - # If segment passes the above checks, add the text - # - - if incTextFlag: - hazardBodyPhrase = hazardBodyPhrase + "\n\n" + \ - segmentText + '\n\n' -# added below for DR21194 -## else: -## if eachHazard['act'] != 'CAN': -## ### get the default bullets from the bullet dictionary -## bullets = self._getBullets(eachHazard, argDict, self._areaDictionary) -## hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets -## else: -## hazardBodyPhrase = hazardBodyPhrase + \ -## "\n\n|* Cancellation text goes here *|.\n" - elif self._bulletProd: - forceList = ['HW','DS','EH','EC','BZ','WS','IS'] - for h in newList: - if h['phen'] in forceList: - eachHazard = h - if eachHazard['act'] == 'CAN': - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Wrap-up text goes here *|.\n" - elif eachHazard['act'] == 'EXP': - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Wrap-up text goes here *|.\n" - else: - ### get the default bullets from the bullet dictionary - bullets = self._getBullets(eachHazard, argDict, self._areaDictionary) - hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets - else: - hazardBodyPhrase = hazardBodyPhrase + \ - "\n\n|* Segment text goes here *|.\n\n" -# end addition - - # - # This adds the call to action statements. This is only performed - # if the segment is 'NEW' or if the previous text has been discarded - # due to a CAN/EXP/UPG segment - # - - # remove items from forceCTAList if they exist in foundCTAs. Note - # that the formats of these lists are different, thus this code - # is more complicated - for ent in foundCTAs: - #only process CTAs that are vtec phen/sig based - if ent.find('.') == 2: - phensig = (ent[0:2], ent[3]) #phen.sig - if phensig in forceCTAList: - del forceCTAList[forceCTAList.index(phensig)] - - hazardBodyPhrase = hazardBodyPhrase + '\n\n' - ctas = [] - for (phen,sig) in forceCTAList: - hazardPhenSig = phen + "." + sig - cta = self.defaultCTA(hazardPhenSig) - if cta not in ctas: - ctas.append(cta) - - if len(ctas) > 0: - hazardBodyPhrase = hazardBodyPhrase + \ - 'PRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n' - for c in ctas: - hazardBodyPhrase = hazardBodyPhrase + c + '\n\n' - hazardBodyPhrase = hazardBodyPhrase + '&&\n\n' - - # Make sure there is only one CAP tag pairs - hazardBodyPhrase = re.sub(r'&&\s*PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n', \ - "", hazardBodyPhrase) - - return hazardBodyPhrase - - ####################################################################### - ### - ### Modified to create additive data for the RFW headlines. - ### For now returns the headline from the previous product for CON, - ### and EXT issuances - ### - ####################################################################### - - def hazard_hook(self, tree, node, hazard, hazardPhen, hazardSig, hazardAct, - hazardStart, hazardEnd, hazardSeg=""): - ### create the additive data for the fire headlines - - ### check for the source - if self._elevationSource == "Grids": - - ### initialize the phrase - le = " FOR " - ### Set the phrase from the forecaster selections - if len(self._rfwType) > 0: - ### add the event type - phraseDict = self._bulletDict() ### get the phrase/bullet dictionary - for t in self._rfwType: - le = le + phraseDict.get(t)[0] - ### add zone numbers or generic location description to headline - if self._numInHeadline == 0: - le = le + "FOR |* location description *|" - else: - le = le + self._headlineNumbers(hazard['id']) - else: - ### if no event type selected make a generic phrase - if self._numInHeadline == 0: - le = le + "|* event type *| FOR |* location description *|" - else: - le = le + "|* event type *| " + self._headlineNumbers(hazard['id']) - else: - ### get the additive data from the previous product - le = self._buildForPhrase(hazard) - return le - - - ########################################################################### - ### - ### From DiscretePhrases - ### RJM modified this routine to pass the hazard segment number to the - ### hazard_hook routine. Makes multiple headlines based on the hazards list - ### and returns the lot. - ### - ########################################################################### - - def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, testMode=0): - returnStr = "" - # make a deepcopy since we plan to mess with it. - hList = copy.deepcopy(hazardList) - - # sort headlines in appropriate order - if len(hList): - if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: - hList.sort(self.marineSortHazardAlg) - else: - hList.sort(self.regularSortHazardAlg) - - while len(hList) > 0: - hazard = hList[0] - - # Can't make phrases with hazards with no 'hdln' entry - if hazard['hdln'] == "": - hList.remove(hazard) - continue - - phenSig = hazard['phen'] + "." + hazard['sig'] - actionCodeList = self.getAllowedActionCodes(phenSig) - - # if the action is not in the actionCodeList, skip it - if hazard['sig'] != "": # it's not locally defined - if not hazard['act'] in actionCodeList: - hList.remove(hazard) - continue - - # get the headline phrase - hazStr = self.makeStandardPhrase(hazard, issuanceTime) - if len(hazStr): - # Call user hook - localStr = self.hazard_hook(tree, node, hazard, hazard['phen'], hazard['sig'], hazard['act'], - ######################################################################## - # modified by RJM to pass the hazard_segment to the hazard_hook routine. - # hazard['start'], hazard['end']), "leading") - hazard['startTime'], hazard['endTime'], hazard['seg']) - ######################################################################## - returnStr = returnStr + "..." + hazStr.upper() + localStr + "...\n" - - # always remove the main hazard from the list - hList.remove(hazard) - - ### get rid of any spaces in the ellipses - returnStr = returnStr.replace(" ...","...") - - return returnStr - - ########################################################################### - ### - ### to insert zone numbers into the headlines - ### - ########################################################################### - - def _headlineNumbers(self, idList): - numList = [] - ### get rid of the state ids (NVZ, CAZ) in idlist - for i in range (len(idList)): - numList.append(idList[i].replace(idList[i][:3], '')) - ### sort for increasing order - numList.sort() - ### initialize the zone number list - if len(numList) > 1: - numStr = "FOR FIRE WEATHER ZONES " - else: - numStr = "FOR FIRE WEATHER ZONE " - - numStr += self.punctuateList(numList).upper() - - return numStr - - - ########################################################################### - ### - ### inserted to grab the zone name and number and insert into the locations - ### bullet - ### - ########################################################################### - - def _getLocationsList(self, areaDictionary, argDict, seg): - - # Access the UGC information for the area(s) if available - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(areaDictionary, "AreaDictionary") - areaList = argDict['segmentAreas'] - ugcList = [] - zoneNameList = [] - stateList = [] - nameString = "" - - # Cycle through each zone in this segment - for areaName in areaList: - if areaName in areaDict.keys(): - if areaDict.has_key(areaName): - entry = areaDict[areaName] - else: - entry = {} - LogStream.logProblem(\ - "AreaDictionary missing definition for [" + \ - areaName + "].") - if entry.has_key('ugcName'): - ugcName = entry['ugcName'] - else: - ugcName = areaName #missing UGCname - LogStream.logProblem(\ - "AreaDictionary missing ugcName definition for [" + \ - areaName + "].") - if entry.has_key('ugcCode'): - ugcCode = entry['ugcCode'] - else: - ugcCode = areaName #missing UGCcode - LogStream.logProblem(\ - "AreaDictionary missing ugcCode definition for [" + \ - areaName + "].") - if entry.has_key('fullStateName'): - ugcState = entry['fullStateName'] - else: - ugcState = areaName #missing fullStateName - LogStream.logEvent(\ - "AreaDictionary missing fullStateName definition for [" + \ - areaName + "].") - if ugcName not in ugcList: - ugcList.append((ugcState, ugcName, ugcCode[3:])) - if ugcState not in stateList: - stateList.append(ugcState) - - - ### sort ugclist by state - ugcList.sort() - stateList.sort() - - ### check the length of stateList for multiple states - if len(stateList) <= 1: ### only one state - - ### include state name - if self._includeStateName == 1: - nameString += "In " + stateList[0] + "..." - - ### sort based on zone number - ugcList = sorted(ugcList, key=lambda ugc: ugc[2]) - - if self._noNameInBullet == 0: ### include zone names and numbers - zoneList = ["Fire weather zone " + ugc[2] + " " + ugc[1] for ugc in ugcList] - - else: ### include zone numbers - if len(ugcList) > 1: - nameString += "Fire weather zones " - else: - nameString += "Fire weather zone " - - zoneList = [ugc[2] for ugc in ugcList] - - nameString += self.punctuateList(zoneList) + "." - - else: ### more than one state - - for state in stateList: - - ### include state name - if self._includeStateName == 1: - nameString = nameString + "In " + state + "..." - - newList = [] ### split up ugcList for each state. - for st, name, num in ugcList: - if st == state: - newList.append((num, name)) - - ### sort for zone numbers - newList.sort() - - if self._noNameInBullet == 0: ### include zone names - zoneList = ["Fire weather zone " + ugc[0] + " " + ugc[1] for ugc in newList] - else: ### don't include zone names - if len(newList) > 1: - nameString += "Fire weather zones " - else: - nameString += "Fire weather zone " - - zoneList = [ugc[0] for ugc in newList] - - nameString += self.punctuateList(zoneList) + "." - - ### get rid of any spaces in the ellipses - nameString = nameString.replace("... ","...") - nameString = nameString.replace(" ...","...") - - return nameString - - ####################################################################### - ### - ### Override the GenericHazards method for special RFW case - ### - ####################################################################### - - def _getBullets(self, eachHazard, argDict, areaDictionary): - - ### - ### set up the bullet list - ### - bList = [] - - ### get the list from the GUI if the forecaster entered anything - if len(self._rfwType) > 0: - for b in self._rfwType: - dict = self._bulletDict() - bList = bList + dict.get(b)[1] - - bList.append("Impacts") - - ### get the default configured list - else: - ### Use GenericBullets defined locally to throw in some generic bullets - bList = self._GenericBullets - - ### remove any duplicate entries in the bList - ### removeDups is in CommonUtils - bList = self.removeDups(bList) - - ### initialize the bullet output - bullets = "" - - ### loop through the bullets and format the output - for b in bList: - b = b.upper() - if b == self._locationsBullet.upper(): - locations = self._getLocationsList(areaDictionary, argDict, eachHazard['seg']) - bullets = bullets + StringUtils.StringUtils().indentText(b+"..."+locations, \ - indentFirstString="* ", indentNextString=" ", \ - maxWidth=65,breakStrings=[" ","..."]) + "\n\n" - elif b == "Extreme grassland fire danger".upper(): - bullets = bullets + "* " + b + "...is forecast.\n\n" - - elif b == "Highest threat".upper(): - bullets = bullets + "|* * " + b + "...is located (optional bullet)*|\n\n" - - elif b == "Impacts".upper(): - bullets = bullets + "* " + b + "...any fires that develop will likely spread rapidly." - bullets = bullets + " Outdoor burning is not recommended.\n\n" - else: - bullets = bullets + "* " + b + "...|* Enter bullet text *|\n\n" - - return bullets - - ############################################################################################ - ### - ### code from PQR to return additive data from previous RFW headlines - ### - ############################################################################################ - - def _buildForPhrase (self, eHazard): - forPhrase = " for |* enter reason for RFW *|" - - if eHazard.has_key('prevText'): - prevProduct = eHazard['prevText'] - - #Get the fire wx info from the initial issuance product - # Find the start of the text using the beginSearchString - - previousProduct = "" - previousProduct = re.sub(r'\n([^\n])', r' \1', prevProduct) - beginSearchString = "FOR (.*)" - endSearchString = "\.\.\.\n" - matchObjstart = re.search(beginSearchString, previousProduct, re.DOTALL) - ### - ### added the next two lines as a trap since if the search found nothing - ### it caused the RFW formatter to fail - if matchObjstart is None: - return "" - matchObjend = re.search(endSearchString, matchObjstart.group(1),re.DOTALL) - if matchObjend != None: - hazardTypeForWeather = matchObjstart.group(1)[:matchObjend.start(0)] - - else: - hazardTypeForWeather = "" - forPhrase = " for " + hazardTypeForWeather - return forPhrase - - - def _makeProduct(self, fcst, segmentAreas, argDict): - argDict["language"] = self._language - - # - # This section generates the headline on the segment - # - - # stuff argDict with the segmentAreas for DiscretePhrases - argDict['segmentAreas'] = segmentAreas - - editArea = segmentAreas[0] - areaLabel = editArea - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - # - # This section generates the attribution statements and calls-to-action - # - - hazardsC = argDict['hazards'] - listOfHazards = hazardsC.getHazardList(segmentAreas) - fcst = fcst + self.hazardBodyText(listOfHazards, argDict) - - # - # If an overview exists for this product, calculate it - # - - self.overviewText(listOfHazards, "RFW") - - # - # Clean up and return - # - - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) - return fcst - - ########################################################################### - ### - ## Added code to add URL to end of product - ### - ########################################################################### - - def _postProcessProduct(self, fcst, argDict): - # - # If an overview exists for this product, insert it - # - overview = self.finalOverviewText() - overviewSearch = re.compile(r'Default overview section', re.DOTALL) - fcst = overviewSearch.sub(overview, fcst) - - # - # Added to place line feeds in the CAP tags to keep separate from CTAs - - fcst = string.replace(fcst, \ - r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ - r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") - # Commented out following line to prevent it from changing bullet indentation - #fcst = string.replace(fcst, "\n ","\n") - fcst = string.replace(fcst, "&&", "\n&&\n") - - # Prevent empty Call to Action Tags - fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ - "", fcst) - - fcst = self._indentBulletText(fcst) - - # - # Clean up multiple line feeds - # - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - ### add the url text from the configuration section - fcst = fcst + "\n" + self._urlText - - # finish progress meter - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - def cleanCapturedText(self, text, paragraphs, addFramingCodes = False, skipCTAs = False): - # - # This method takes a block of text, wraps it preserving blank lines, - # then returns the part after 'paragaphs'. So, if paragraphs is 0, it - # returns the whole thing, if it's 2, it retunrs paragraphs 2 -> end, etc. - # Headlines are always removed. - # Framing codes are added if specified. - # - paras = self.convertSingleParas(text) #single paragraphs - - # keep track of any call to actions found - foundCTAs = [] - - # Process the paragraphs, keep only the interested ones - paraCount = 0 - processedText = '' - for eachPara in paras: - if paraCount >= paragraphs: - found = self.ctasFound(eachPara) #get list of ctas found - if skipCTAs and len(found): - pass - else: - processedText = processedText + eachPara + '\n\n' - #keep track of remaining CTAs in processed text - for f in found: - if f not in foundCTAs: - foundCTAs.append(f) - if eachPara.find('...') == 0: - pass #ignore headlines - paraCount = paraCount + 1 - - # Add framing codes - if addFramingCodes: - processedText = processedText.rstrip() - ### - ### added the \n to get the framing code on its own line - ### - processedText = "|*\n\n" + processedText + "\n*|\n\n" - - # Wrap - processedText = self.endline(processedText, - linelength=self._lineLength, breakStr=[" ", "-", "..."]) - - - return processedText, foundCTAs +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +######################################################################## +# Hazard_RFW.py +# Updated for OB9.3 by +## Brian Brong/REV, Ron Miller/OTX, Jeff Zeltwagner/NWSTC, Shannon White/FDTB +########################################################################## +import LogStream +import GenericHazards +import string, time, re, os, types, copy, sets +import CallToActions +import ModuleAccessor, StringUtils + +class TextProduct(GenericHazards.TextProduct): + VariableList = [ + (("Select RFW Type", "rfwType"), [], "check", []), + (("Source for Headline and \nAffected Area Bullet", "elevationSource"), + "Grids", "radio", ["Grids", "Previous Text"]), + ] + + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition['displayName'] = None + Definition['displayName'] = "BaselineHazard_RFW_ (FireWx Watch/Warning)" + + Definition["defaultEditAreas"] = "EditAreas_FireWx__" + Definition["mapNameForCombinations"] = "FireWxZones_" # Map background for creating Combinations + + #Special multiple product domains for certain sites: + if "" == "AFG": + if "_" == "_AFG": + Definition["subDomainUGCs"] = ["AKZ218","AKZ219","AKZ220","AKZ221", + "AKZ222","AKZ223","AKZ224","AKZ225", + "AKZ226"] + elif "_" == "_NSB": + Definition["subDomainUGCs"] = ["AKZ201","AKZ202","AKZ203","AKZ204", + "AKZ205","AKZ206"] + elif "_" == "_WCZ": + Definition["subDomainUGCs"] = ["AKZ207","AKZ208","AKZ209","AKZ210", + "AKZ211","AKZ212","AKZ213","AKZ214", + "AKZ215","AKZ216","AKZ217","AKZ227"] + + # Header configuration items + Definition["productName"] = "URGENT - FIRE WEATHER MESSAGE" # name of product + Definition["fullStationID"] = "" # full station identifier (4letter) + Definition["wmoID"] = "" # WMO ID + Definition["pil"] = "" # product pil + #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" + Definition["wfoCityState"] = "" # Location of WFO - city state + Definition["wfoCity"] = "" # WFO Name as it should appear in a text product + Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. + Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + Definition["outputFile"] = "{prddir}/TEXT/RFW_.txt" + + # OPTIONAL CONFIGURATION ITEMS + #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" + #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished + #Definition["debug"] = 1 + #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines + + Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime + Definition["includeCities"] = 0 # Cities included in area header + Definition["accurateCities"] = 0 # If 1, cities are based on grids; + # otherwise full list is included + Definition["cityLocation"] = "CityLocation" # City lat/lon dictionary to use + Definition["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + Definition["includeIssueTime"] = 1 # This should be set to zero for products + # that do not include a time lime below the UGC + #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header + Definition["lineLength"] = 66 + Definition["includeOverviewHeadline"] = 1 #If 1, the overview header is templated + Definition["includeOverview"] = 1 #If 1, the overview section is templated + Definition["bulletProd"] = 1 #If 1, the product is bulletted + + #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) + + Definition["numInHeadline"] = 1 + Definition["GenericBullets"] = ["AFFECTED AREA", "WIND", "HUMIDITY", "THUNDERSTORMS", "HIGHEST THREAT", "IMPACTS"] + Definition["locationsBullet"] = "AFFECTED AREA" + Definition["noNameInBullet"] = 1 + Definition["includeStateName"] = 0 + Definition["urlText"] = "" + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('FW.W', allActions, 'FireWx'), + ('FW.A', allActions, 'FireWx') + ] + + def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): + + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, "", self._issueTime, expireTime, + self._areaDictionary, None, cityDescriptor=self._cityDescriptor, + areaList=segmentAreas, includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames, + includeIssueTime = self._includeIssueTime, + accurateCities = self._accurateCities) + + fcst = fcst + areaHeader + "\n" + return fcst + + # + # Overridden to allow for attribution statement + # + def hazardBodyText(self, hazardList, argDict): + + hazardBodyPhrase = '' + + # + # First, sort the hazards for this segment by importance + # + + sortedHazardList = [] + for each in ['W', 'Y', 'A', 'S']: + for eachHazard in hazardList: + if eachHazard['sig'] == each: + if eachHazard not in sortedHazardList: + sortedHazardList.append(eachHazard) + + # + # Next, break them into individual lists based on action + # + + newList = [] + canList = [] + expList = [] + extList = [] + conList = [] + upgList = [] + statementList = [] + + for eachHazard in sortedHazardList: + if eachHazard['act'] in ['NEW', 'EXA', 'EXB'] and eachHazard['sig'] != 'S': + newList.append(eachHazard) + elif eachHazard['act'] in ['CAN'] and eachHazard['sig'] != 'S': + canList.append(eachHazard) + elif eachHazard['act'] in ['EXP'] and eachHazard['sig'] != 'S': + expList.append(eachHazard) + elif eachHazard['act'] in ['EXT'] and eachHazard['sig'] != 'S': + extList.append(eachHazard) + elif eachHazard['act'] in ['UPG'] and eachHazard['sig'] != 'S': + upgList.append(eachHazard) + elif eachHazard['sig'] != 'S': + conList.append(eachHazard) + elif eachHazard['sig'] == 'S': + statementList.append(eachHazard) + + # + # Now, go through each list and build the phrases + # + + nwsIntroUsed = 0 + + # + # This is for the new hazards + # + + phraseCount = 0 + lastHdln = None + for eachHazard in newList: + hdln = eachHazard['hdln'] + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + if nwsIntroUsed == 0: + hazardBodyPhrase = "The National Weather Service in " + self._wfoCity + nwsIntroUsed = 1 + if phraseCount == 0: + phraseCount = 1 + hazardBodyPhrase = hazardBodyPhrase + " has issued " + \ + hazNameA + ", which is in effect" + endTimePhrase + ". " + elif phraseCount == 1: + phraseCount = 2 + if hdln != lastHdln: + hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ + " has also been issued. This " + hazName + \ + " is in effect" + endTimePhrase + ". " + else: + hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ + " has also been issued" + endTimePhrase + ". " + else: + hazardBodyPhrase = hazardBodyPhrase + "In addition..." + \ + hazNameA + " has been issued. This " + hazName + \ + " is in effect" + endTimePhrase + ". " + lastHdln = hdln + + # + # This is for the can hazards + # + + for eachHazard in canList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + if nwsIntroUsed == 0: + hazardBodyPhrase = "The National Weather Service in " +\ + self._wfoCity + nwsIntroUsed = 1 + hazardBodyPhrase = hazardBodyPhrase + \ + " has cancelled the " + hazName + ". " + else: + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " has been cancelled. " + + # + # This is for the exp hazards + # + + phraseCount = 0 + for eachHazard in expList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + if eachHazard['endTime'] <= argDict['creationTime']: + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is no longer in effect. " + else: + expTimeCurrent = argDict['creationTime'] + timeWords = self.getTimingPhrase(eachHazard, expTimeCurrent) + + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " will expire " + timeWords + ". " + + # + # This is for ext hazards + # + + for eachHazard in extList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is now in effect" + endTimePhrase + ". " + + # + # This is for upgrade hazards + # + + for eachHazard in upgList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + hazName = self.hazardName(eachHazard['hdln'], argDict, False) + + hazardBodyPhrase = hazardBodyPhrase + "The " + hazName + \ + " is no longer in effect. " + + # + # This is for con hazards + # + + for eachHazard in conList: + if len(eachHazard['hdln']) == 0: + continue #no defined headline, skip phrase + if self._bulletProd: + continue # No attribution for this case if it is a bullet product + endTimePhrase = self.hazardTimePhrases(eachHazard, argDict) + hazNameA = self.hazardName(eachHazard['hdln'], argDict, True) + hazardBodyPhrase = hazardBodyPhrase + hazNameA + \ + " remains in effect" + endTimePhrase + ". " + + # + # This is for statement hazards + # + + #we will add in text later either by text capture or + #framing codes as needed + #for eachHazard in statementList: + # hazardBodyPhrase = hazardBodyPhrase + "|* Statement text *|." + + # + # This adds segment text + # + + segmentText = '' + + # + # Check that this segment codes to determine capture or not, + # and frame captured text or not + # + incTextFlag, incFramingCodes, skipCTAs, forceCTAList = \ + self.useCaptureText(sortedHazardList) + + # + # + # Check that the previous text exists + # + + foundCTAs = [] + for eachHazard in sortedHazardList: + if 'prevText' in eachHazard: + prevText = eachHazard['prevText'] + + if eachHazard['sig'] == 'S': + startPara = 0 + else: + startPara = 1 + segmentText, foundCTAs = self.cleanCapturedText(prevText, + startPara, addFramingCodes = incFramingCodes, + skipCTAs = skipCTAs) + tester = segmentText[0] + if tester == '*': + startPara = 1 + else: + startPara = 2 + + segmentText, foundCTAs = self.cleanCapturedText(prevText, + startPara, addFramingCodes = incFramingCodes, + skipCTAs = skipCTAs) + + # split the current bullets + split_bullets = segmentText.split("\n\n") + # reset the segmentText variable + segmentText = "" + for current_bullet in split_bullets: + + + # if the user wants to use the gridded data for the elevation and location info... + if self._elevationSource == "Grids": + # check to see if this is the ELEVATION bullet. If so, replace it with + # the elevation from the grid. + + ### Modification of Ron Miller's code to remove the check for the elevations bullet + ### since it could be the same as the locationsBullet. + ### Caused issues during testing at SLC. Thanks Linda! + ### bsb 5-9-10. + ### + ### locationsBullet is defined in the configuration section + locBullet = "* " + self._locationsBullet + "..." + # check to see if this is the LOCATIONS bullet. If so, replace it with + # the locations from the grid. + if current_bullet.find(locBullet) >= 0: + new_locations = self._getLocationsList(self._areaDictionary,argDict,eachHazard['seg']) + current_bullet = self.indentText(self._locationsBullet + "..." + new_locations, \ + indentFirstString="* ", indentNextString=" ", \ + maxWidth=65,breakStrings=[" ","..."]) + + # for all bullets, ensure that it's indented + if current_bullet.find("* ") == 0: + current_bullet = self.indentText(current_bullet.replace("* ",""), \ + indentFirstString="* ", indentNextString=" ", \ + maxWidth=65,breakStrings=[" ","..."]) + + # now add the bullet back to the list. check to make sure that it's + # not just a blank line. + if len(current_bullet) > 1: + segmentText = segmentText + current_bullet + "\n\n" + + + ################################################################################# + # End Ron Miller's added code + ################################################################################# + + # + # Check that the segment text isn't very short or blank + # + + if len(segmentText) < 6: + incTextFlag = 0 + # + # If segment passes the above checks, add the text + # + + if incTextFlag: + hazardBodyPhrase = hazardBodyPhrase + "\n\n" + \ + segmentText + '\n\n' +# added below for DR21194 +## else: +## if eachHazard['act'] != 'CAN': +## ### get the default bullets from the bullet dictionary +## bullets = self._getBullets(eachHazard, argDict, self._areaDictionary) +## hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets +## else: +## hazardBodyPhrase = hazardBodyPhrase + \ +## "\n\n|* Cancellation text goes here *|.\n" + elif self._bulletProd: + forceList = ['HW','DS','EH','EC','BZ','WS','IS'] + for h in newList: + if h['phen'] in forceList: + eachHazard = h + if eachHazard['act'] == 'CAN': + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Wrap-up text goes here *|.\n" + elif eachHazard['act'] == 'EXP': + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Wrap-up text goes here *|.\n" + else: + ### get the default bullets from the bullet dictionary + bullets = self._getBullets(eachHazard, argDict, self._areaDictionary) + hazardBodyPhrase = hazardBodyPhrase + "\n\n" + bullets + else: + hazardBodyPhrase = hazardBodyPhrase + \ + "\n\n|* Segment text goes here *|.\n\n" +# end addition + + # + # This adds the call to action statements. This is only performed + # if the segment is 'NEW' or if the previous text has been discarded + # due to a CAN/EXP/UPG segment + # + + # remove items from forceCTAList if they exist in foundCTAs. Note + # that the formats of these lists are different, thus this code + # is more complicated + for ent in foundCTAs: + #only process CTAs that are vtec phen/sig based + if ent.find('.') == 2: + phensig = (ent[0:2], ent[3]) #phen.sig + if phensig in forceCTAList: + del forceCTAList[forceCTAList.index(phensig)] + + hazardBodyPhrase = hazardBodyPhrase + '\n\n' + ctas = [] + for (phen,sig) in forceCTAList: + hazardPhenSig = phen + "." + sig + cta = self.defaultCTA(hazardPhenSig) + if cta not in ctas: + ctas.append(cta) + + if len(ctas) > 0: + hazardBodyPhrase = hazardBodyPhrase + \ + 'PRECAUTIONARY/PREPAREDNESS ACTIONS...\n\n' + for c in ctas: + hazardBodyPhrase = hazardBodyPhrase + c + '\n\n' + hazardBodyPhrase = hazardBodyPhrase + '&&\n\n' + + # Make sure there is only one CAP tag pairs + hazardBodyPhrase = re.sub(r'&&\s*PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n', \ + "", hazardBodyPhrase) + + return hazardBodyPhrase + + ####################################################################### + ### + ### Modified to create additive data for the RFW headlines. + ### For now returns the headline from the previous product for CON, + ### and EXT issuances + ### + ####################################################################### + + def hazard_hook(self, tree, node, hazard, hazardPhen, hazardSig, hazardAct, + hazardStart, hazardEnd, hazardSeg=""): + ### create the additive data for the fire headlines + + ### check for the source + if self._elevationSource == "Grids": + + ### initialize the phrase + le = " FOR " + ### Set the phrase from the forecaster selections + if len(self._rfwType) > 0: + ### add the event type + phraseDict = self._bulletDict() ### get the phrase/bullet dictionary + for t in self._rfwType: + le = le + phraseDict.get(t)[0] + ### add zone numbers or generic location description to headline + if self._numInHeadline == 0: + le = le + "FOR |* location description *|" + else: + le = le + self._headlineNumbers(hazard['id']) + else: + ### if no event type selected make a generic phrase + if self._numInHeadline == 0: + le = le + "|* event type *| FOR |* location description *|" + else: + le = le + "|* event type *| " + self._headlineNumbers(hazard['id']) + else: + ### get the additive data from the previous product + le = self._buildForPhrase(hazard) + return le + + + ########################################################################### + ### + ### From DiscretePhrases + ### RJM modified this routine to pass the hazard segment number to the + ### hazard_hook routine. Makes multiple headlines based on the hazards list + ### and returns the lot. + ### + ########################################################################### + + def makeHeadlinePhrases(self, tree, node, hazardList, issuanceTime, testMode=0): + returnStr = "" + # make a deepcopy since we plan to mess with it. + hList = copy.deepcopy(hazardList) + + # sort headlines in appropriate order + if len(hList): + if hList[0]['pil'] in ['CWF','NSH','OFF','GLF']: + hList.sort(self.marineSortHazardAlg) + else: + hList.sort(self.regularSortHazardAlg) + + while len(hList) > 0: + hazard = hList[0] + + # Can't make phrases with hazards with no 'hdln' entry + if hazard['hdln'] == "": + hList.remove(hazard) + continue + + phenSig = hazard['phen'] + "." + hazard['sig'] + actionCodeList = self.getAllowedActionCodes(phenSig) + + # if the action is not in the actionCodeList, skip it + if hazard['sig'] != "": # it's not locally defined + if not hazard['act'] in actionCodeList: + hList.remove(hazard) + continue + + # get the headline phrase + hazStr = self.makeStandardPhrase(hazard, issuanceTime) + if len(hazStr): + # Call user hook + localStr = self.hazard_hook(tree, node, hazard, hazard['phen'], hazard['sig'], hazard['act'], + ######################################################################## + # modified by RJM to pass the hazard_segment to the hazard_hook routine. + # hazard['start'], hazard['end']), "leading") + hazard['startTime'], hazard['endTime'], hazard['seg']) + ######################################################################## + returnStr = returnStr + "..." + hazStr.upper() + localStr + "...\n" + + # always remove the main hazard from the list + hList.remove(hazard) + + ### get rid of any spaces in the ellipses + returnStr = returnStr.replace(" ...","...") + + return returnStr + + ########################################################################### + ### + ### to insert zone numbers into the headlines + ### + ########################################################################### + + def _headlineNumbers(self, idList): + numList = [] + ### get rid of the state ids (NVZ, CAZ) in idlist + for i in range (len(idList)): + numList.append(idList[i].replace(idList[i][:3], '')) + ### sort for increasing order + numList.sort() + ### initialize the zone number list + if len(numList) > 1: + numStr = "FOR FIRE WEATHER ZONES " + else: + numStr = "FOR FIRE WEATHER ZONE " + + numStr += self.punctuateList(numList).upper() + + return numStr + + + ########################################################################### + ### + ### inserted to grab the zone name and number and insert into the locations + ### bullet + ### + ########################################################################### + + def _getLocationsList(self, areaDictionary, argDict, seg): + + # Access the UGC information for the area(s) if available + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(areaDictionary, "AreaDictionary") + areaList = argDict['segmentAreas'] + ugcList = [] + zoneNameList = [] + stateList = [] + nameString = "" + + # Cycle through each zone in this segment + for areaName in areaList: + if areaName in list(areaDict.keys()): + if areaName in areaDict: + entry = areaDict[areaName] + else: + entry = {} + LogStream.logProblem(\ + "AreaDictionary missing definition for [" + \ + areaName + "].") + if 'ugcName' in entry: + ugcName = entry['ugcName'] + else: + ugcName = areaName #missing UGCname + LogStream.logProblem(\ + "AreaDictionary missing ugcName definition for [" + \ + areaName + "].") + if 'ugcCode' in entry: + ugcCode = entry['ugcCode'] + else: + ugcCode = areaName #missing UGCcode + LogStream.logProblem(\ + "AreaDictionary missing ugcCode definition for [" + \ + areaName + "].") + if 'fullStateName' in entry: + ugcState = entry['fullStateName'] + else: + ugcState = areaName #missing fullStateName + LogStream.logEvent(\ + "AreaDictionary missing fullStateName definition for [" + \ + areaName + "].") + if ugcName not in ugcList: + ugcList.append((ugcState, ugcName, ugcCode[3:])) + if ugcState not in stateList: + stateList.append(ugcState) + + + ### sort ugclist by state + ugcList.sort() + stateList.sort() + + ### check the length of stateList for multiple states + if len(stateList) <= 1: ### only one state + + ### include state name + if self._includeStateName == 1: + nameString += "In " + stateList[0] + "..." + + ### sort based on zone number + ugcList = sorted(ugcList, key=lambda ugc: ugc[2]) + + if self._noNameInBullet == 0: ### include zone names and numbers + zoneList = ["Fire weather zone " + ugc[2] + " " + ugc[1] for ugc in ugcList] + + else: ### include zone numbers + if len(ugcList) > 1: + nameString += "Fire weather zones " + else: + nameString += "Fire weather zone " + + zoneList = [ugc[2] for ugc in ugcList] + + nameString += self.punctuateList(zoneList) + "." + + else: ### more than one state + + for state in stateList: + + ### include state name + if self._includeStateName == 1: + nameString = nameString + "In " + state + "..." + + newList = [] ### split up ugcList for each state. + for st, name, num in ugcList: + if st == state: + newList.append((num, name)) + + ### sort for zone numbers + newList.sort() + + if self._noNameInBullet == 0: ### include zone names + zoneList = ["Fire weather zone " + ugc[0] + " " + ugc[1] for ugc in newList] + else: ### don't include zone names + if len(newList) > 1: + nameString += "Fire weather zones " + else: + nameString += "Fire weather zone " + + zoneList = [ugc[0] for ugc in newList] + + nameString += self.punctuateList(zoneList) + "." + + ### get rid of any spaces in the ellipses + nameString = nameString.replace("... ","...") + nameString = nameString.replace(" ...","...") + + return nameString + + ####################################################################### + ### + ### Override the GenericHazards method for special RFW case + ### + ####################################################################### + + def _getBullets(self, eachHazard, argDict, areaDictionary): + + ### + ### set up the bullet list + ### + bList = [] + + ### get the list from the GUI if the forecaster entered anything + if len(self._rfwType) > 0: + for b in self._rfwType: + dict = self._bulletDict() + bList = bList + dict.get(b)[1] + + bList.append("Impacts") + + ### get the default configured list + else: + ### Use GenericBullets defined locally to throw in some generic bullets + bList = self._GenericBullets + + ### remove any duplicate entries in the bList + ### removeDups is in CommonUtils + bList = self.removeDups(bList) + + ### initialize the bullet output + bullets = "" + + ### loop through the bullets and format the output + for b in bList: + b = b.upper() + if b == self._locationsBullet.upper(): + locations = self._getLocationsList(areaDictionary, argDict, eachHazard['seg']) + bullets = bullets + StringUtils.StringUtils().indentText(b+"..."+locations, \ + indentFirstString="* ", indentNextString=" ", \ + maxWidth=65,breakStrings=[" ","..."]) + "\n\n" + elif b == "Extreme grassland fire danger".upper(): + bullets = bullets + "* " + b + "...is forecast.\n\n" + + elif b == "Highest threat".upper(): + bullets = bullets + "|* * " + b + "...is located (optional bullet)*|\n\n" + + elif b == "Impacts".upper(): + bullets = bullets + "* " + b + "...any fires that develop will likely spread rapidly." + bullets = bullets + " Outdoor burning is not recommended.\n\n" + else: + bullets = bullets + "* " + b + "...|* Enter bullet text *|\n\n" + + return bullets + + ############################################################################################ + ### + ### code from PQR to return additive data from previous RFW headlines + ### + ############################################################################################ + + def _buildForPhrase (self, eHazard): + forPhrase = " for |* enter reason for RFW *|" + + if 'prevText' in eHazard: + prevProduct = eHazard['prevText'] + + #Get the fire wx info from the initial issuance product + # Find the start of the text using the beginSearchString + + previousProduct = "" + previousProduct = re.sub(r'\n([^\n])', r' \1', prevProduct) + beginSearchString = "FOR (.*)" + endSearchString = "\.\.\.\n" + matchObjstart = re.search(beginSearchString, previousProduct, re.DOTALL) + ### + ### added the next two lines as a trap since if the search found nothing + ### it caused the RFW formatter to fail + if matchObjstart is None: + return "" + matchObjend = re.search(endSearchString, matchObjstart.group(1),re.DOTALL) + if matchObjend != None: + hazardTypeForWeather = matchObjstart.group(1)[:matchObjend.start(0)] + + else: + hazardTypeForWeather = "" + forPhrase = " for " + hazardTypeForWeather + return forPhrase + + + def _makeProduct(self, fcst, segmentAreas, argDict): + argDict["language"] = self._language + + # + # This section generates the headline on the segment + # + + # stuff argDict with the segmentAreas for DiscretePhrases + argDict['segmentAreas'] = segmentAreas + + editArea = segmentAreas[0] + areaLabel = editArea + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + # + # This section generates the attribution statements and calls-to-action + # + + hazardsC = argDict['hazards'] + listOfHazards = hazardsC.getHazardList(segmentAreas) + fcst = fcst + self.hazardBodyText(listOfHazards, argDict) + + # + # If an overview exists for this product, calculate it + # + + self.overviewText(listOfHazards, "RFW") + + # + # Clean up and return + # + + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "-", "..."]) + return fcst + + ########################################################################### + ### + ## Added code to add URL to end of product + ### + ########################################################################### + + def _postProcessProduct(self, fcst, argDict): + # + # If an overview exists for this product, insert it + # + overview = self.finalOverviewText() + overviewSearch = re.compile(r'Default overview section', re.DOTALL) + fcst = overviewSearch.sub(overview, fcst) + + # + # Added to place line feeds in the CAP tags to keep separate from CTAs + + fcst = string.replace(fcst, \ + r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ + r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") + # Commented out following line to prevent it from changing bullet indentation + #fcst = string.replace(fcst, "\n ","\n") + fcst = string.replace(fcst, "&&", "\n&&\n") + + # Prevent empty Call to Action Tags + fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ + "", fcst) + + fcst = self._indentBulletText(fcst) + + # + # Clean up multiple line feeds + # + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + ### add the url text from the configuration section + fcst = fcst + "\n" + self._urlText + + # finish progress meter + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + def cleanCapturedText(self, text, paragraphs, addFramingCodes = False, skipCTAs = False): + # + # This method takes a block of text, wraps it preserving blank lines, + # then returns the part after 'paragaphs'. So, if paragraphs is 0, it + # returns the whole thing, if it's 2, it retunrs paragraphs 2 -> end, etc. + # Headlines are always removed. + # Framing codes are added if specified. + # + paras = self.convertSingleParas(text) #single paragraphs + + # keep track of any call to actions found + foundCTAs = [] + + # Process the paragraphs, keep only the interested ones + paraCount = 0 + processedText = '' + for eachPara in paras: + if paraCount >= paragraphs: + found = self.ctasFound(eachPara) #get list of ctas found + if skipCTAs and len(found): + pass + else: + processedText = processedText + eachPara + '\n\n' + #keep track of remaining CTAs in processed text + for f in found: + if f not in foundCTAs: + foundCTAs.append(f) + if eachPara.find('...') == 0: + pass #ignore headlines + paraCount = paraCount + 1 + + # Add framing codes + if addFramingCodes: + processedText = processedText.rstrip() + ### + ### added the \n to get the framing code on its own line + ### + processedText = "|*\n\n" + processedText + "\n*|\n\n" + + # Wrap + processedText = self.endline(processedText, + linelength=self._lineLength, breakStr=[" ", "-", "..."]) + + + return processedText, foundCTAs diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCV.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCV.py index ea97038170..478d859cb7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCV.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCV.py @@ -1,3980 +1,3980 @@ -# -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# Version 2018.6.5-0 - -import GenericHazards -import JsonSupport -import LocalizationSupport -import time, types, copy, LogStream, collections -import ModuleAccessor -import math -import TimeRange -from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID - - -from AbsTime import * -AWIPS_ENVIRON = "AWIPS2" - -import HLSTCV_Common - -class TextProduct(HLSTCV_Common.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition["displayName"] = "None" - Definition["outputFile"] = "{prddir}/TEXT/TCV.txt" - Definition["database"] = "Official" # Source database - Definition["mapNameForCombinations"] = "Zones_" - Definition["defaultEditAreas"] = "Combinations_TCV_" - Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display - - Definition["productName"] = "Local Watch/Warning Statement" - - Definition["fullStationID" ] = "" - Definition["wmoID" ] = "" - Definition["wfoCityState" ] = "" - Definition["pil" ] = "" - Definition["textdbPil" ] = "" - Definition["awipsWANPil" ] = "" - Definition["site"] = "" - Definition["wfoCity"] = "" - - Definition["areaName"] = "" #optional area name for product - Definition["areaDictionary"] = "AreaDictionary" - Definition["language"] = "english" - Definition["lineLength"] = 71 #Maximum line length - Definition["tabLength"] = 4 - - Definition["purgeTime"] = 8 # Default Expiration in hours if - Definition["includeZoneNames"] = 1 # Zone names will be included in the area header - Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header - Definition["easPhrase"] = \ - "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header - Definition["callToAction"] = 1 - Definition["hazardSamplingThreshold"] = (3, None) - - - Definition["threatPhrase"] = { - "Wind": { - "Extreme": "Potential for wind greater than 110 mph", - "High": "Potential for wind 74 to 110 mph", - "Moderate": "Potential for wind 58 to 73 mph", - "Elevated": "Potential for wind 39 to 57 mph", - "None": "Wind less than 39 mph" - }, - "Storm Surge": { - "Extreme": "Potential for storm surge flooding greater than 9 feet above ground", - "High": "Potential for storm surge flooding greater than 6 feet above ground", - "Moderate": "Potential for storm surge flooding greater than 3 feet above ground", - "Elevated": "Potential for storm surge flooding greater than 1 foot above ground", - "None": "Little to no storm surge flooding" - }, - "Flooding Rain": { - "Extreme": "Potential for extreme flooding rain", - "High": "Potential for major flooding rain", - "Moderate": "Potential for moderate flooding rain", - "Elevated": "Potential for localized flooding rain", - "None": "Little or no potential for flooding rain" - }, - "Tornado": { - "Extreme": "Potential for an outbreak of tornadoes", - "High": "Potential for many tornadoes", - "Moderate": "Potential for several tornadoes", - "Elevated": "Potential for a few tornadoes", - "None": "Tornadoes not expected" - } - } - - Definition["debug"] = { - #TextProduct - "__init__": 0, - "_inlandAreas": 0, - "_coastalAreas": 0, - "_cwa": 0, - "_productParts_TCV": 0, - "_segmentParts_TCV": 0, - "_analysisList": 0, - "_intersectAnalysisList": 0, - "_extraRainfallAnalysisList": 0, - "generateForecast": 0, - "_initializeVariables": 0, - "_performGridChecks": 0, - "_isCorrectNumGrids": 0, - "_checkContinuousDuration": 0, - "_noOpParts": 0, - "_easMessage": 0, - "_setup_segment": 0, - "_vtecRecords": 0, - "_areaList": 0, - "_issuanceTimeDate": 0, - "_summaryHeadlines": 0, - "_locationsAffected": 0, - "_fcstConfidence": 0, - "_infoSection": 0, - "_endSection": 0, - "_hazardDefinition": 0, - "_threatKeyOrder": 0, - "_sampleData": 0, - "_getStats": 0, - "_determineSegments": 0, - "_getRefinedHazardSegments": 0, - "_refineSegments": 0, - "_makeSegmentEditAreas": 0, - "_findSegment": 0, - "_getAllVTECRecords": 0, - "_getHazardsForHLS": 0, - "_convertToISO": 0, - "_convertToDatetime": 0, - "_initializeSegmentZoneData": 0, - "_archiveCurrentAdvisory": 0, - "_saveAdvisory": 0, - "_overview_list": 0, - "_displayGUI": 0, - - #HLSTCV_Common - "allowedHazards": 0, - "allowedHeadlines": 0, - "_initializeVariables": 0, - "moderated_dict": 0, - "_wmoHeader": 0, - "_productHeader": 0, - "_ugcHeader": 0, - "_processProductParts": 0, - "_createProductDictionary": 0, - "_initializeProductDictionary": 0, - "_formatProductDictionary": 0, - "_getStatValue": 0, - "_allAreas": 0, - "_groupSegments": 0, - "_getSegmentVTECRecordsTuples": 0, - "_computeIntersectAreas": 0, - "_initializeHazardsTable": 0, - "_getHazardsTable": 0, - "_ignoreActions": 0, - "_setVTECActiveTable": 0, - "_getVtecRecords": 0, - "_getAllowedHazardList": 0, - "_altFilterMethod": 0, - "_filterHazards": 0, - "_getAdditionalHazards": 0, - "_checkHazard": 0, - "_initializeTimeVariables": 0, - "_determineTimeRanges": 0, - "_createPeriodList": 0, - "_calculateStartTime": 0, - "_formatPeriod": 0, - "_getTimeDesc": 0, - "_getPartOfDay": 0, - "_initializeStormInformation": 0, - "_parseTCP": 0, - "_getStormTypeFromTCP": 0, - "_getStormNameFromTCP": 0, - "_getAdvisoryTypeFromTCP": 0, - "_getAdvisoryNumberStringFromTCP": 0, - "_getStormNumberStringFromTCP": 0, - "_getStormIDStringFromTCP": 0, - "_useTestTCP": 0, - "_testTCP": 0, - "_initializeAdvisories": 0, - "_synchronizeAdvisories": 0, - "_getLocalAdvisoryDirectoryPath": 0, - "_getStormAdvisoryNames": 0, - "_loadLastTwoAdvisories": 0, - "_loadAdvisory": 0, - "_getAdvisoryPath": 0, - "_getAdvisoryFilename": 0, - "_processVariableList": 0, - "_GUI_sizing_dict": 0, - "_GUI1_configDict": 0, - "_font_GUI_dict": 0, - - #Overview_Dialog - "body": 0, - "_makeButtons": 0, - "okCB": 0, - - #Common_Dialog - "getVarDict": 0, - "_makeRadioOrCheckList": 0, - "_makeEntry": 0, - "cancelCB": 0, - "_entryName": 0, - "_makeTuple": 0, - "_setVarDict": 0, - "status": 0, - "buttonbox": 0, - - #SectionCommon - "_setProductPartValue": 0, - "_finalSectionParts": 0, - "_sectionHeader": 0, - "_lifePropertyThreatSummary": 0, - "_getThreatTrendSentence": 0, - "_getThreatTrendValue": 0, - "_threatDifference": 0, - "_isThreatDecreasing": 0, - "_isThreatIncreasing": 0, - "_advisoryHasValidKey": 0, - "_isMagnitudeIncreasing": 0, - "_calculateThreatStatementTr": 0, - "_pastThreatsNotNone": 0, - "_setThreatStatementsProductParts": 0, - "_getThreatStatements": 0, - "_potentialImpactsSummary": 0, - "_getPotentialImpactsSummaryText": 0, - "_potentialImpactsStatements": 0, - "_getPotentialImpactsStatements": 0, - "_preparationStatement": 0, - - #Unique to each section, but common method name - "sectionParts": 0, - "_forecastSubsection": 0, - "_latestForecastSummary": 0, - "_threatSubsection": 0, - "_threatTrend": 0, - "_threatStatements": 0, - "_impactsSubsection": 0, - "_setStats": 0, - - #WindSection - "_peakWind": 0, - "_windowTS": 0, - "_windowHU": 0, - "_moderatedMaxWindMph_categories": 0, - "_ktToMph": 0, - "_increment": 0, - - #StormSurgeSection - "_peakSurge": 0, - "_surgeWindow": 0, - - #FloodingRainSection - "_peakRain": 0, - "_rainRange": 0, - - #TornadoSection - "_tornadoSituation": 0, - - #SectionCommonStats - "_initializeSegmentAdvisories": 0, - "_updateThreatStats": 0, - "_calculateHourOffset": 0, - - #WindSectionStats - "_determineCurrentPeriod": 0, - "_updateStatsForPwsXXint": 0, - "_updateStatsForPwsTXX": 0, - "_updateStatsForWind": 0, - "_updateMaxWindGust": 0, - "_calculateProbOnset": 0, - "_calculateMaxPwsXXintTr": 0, - "_computeWindOnsetAndEnd": 0, - "_createWindowText": 0, - "_getConfiguredTime": 0, - "_calculateUTCandLocalHourOffset": 0, - "_isValidDayTime": 0, - - #Unique to each formatter, but common method name - "execute": 0, - - #XMLFormatter - "xmlKeys": 0, - "sectionKeys": 0, - "getSectionKey": 0, - "dictionary": 0, - "list": 0, - - #LegacyFormatter - "_processProductParts": 0, - "processWmoHeader": 0, - "processProductHeader": 0, - "processLocationsAffected": 0, - "processSubsection": 0, - "processThreatStatements": 0, - "processImpactsStatements": 0, - "processInfoSection": 0, - "_buildInfoSection": 0, - "processSummaryHeadlines": 0, - "processSubParts": 0, - - #TextProductCommon - "setUp": 0, - "hazardTimeZones": 0, - "getExpireTime": 0, - "getHeadlinesAndSections": 0, - "formatUGCs": 0, - "getFormattedTime": 0, - "formatUGC_names": 0, - "formatNameString": 0, - "getVal": 0, - "formatDatetime": 0, - "flush": 0, - "makeUGCString": 0, - "checkLastArrow": 0, - } -# Definition["debug"] = 1 # turn on ALL debug messages - Definition["debug"] = 0 # turn off ALL debug messages - - - def __init__(self): - HLSTCV_Common.TextProduct.__init__(self) - - ##################################################################### - ##################################################################### - ### Organization of Formatter Code - - ############################################################### - ### MUST OVERRIDE DEFINITIONS !!! - ### _inlandAreas, _coastalAreas, _cwa - ############################################################### - - ############################################################### - ### TCV Product and Segment Parts Definition - ############################################################### - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - ############################################################### - - ############################################################### - # CODE - ############################################################### - ### High level flow of formatter - ### generateForecast, _initializeVariables, - ### _determineSegments, _determineTimeRanges, _sampleData, - ### _createProductDictionary, _formatProductDictionary, - ### _archiveCurrentAdvisory - ############################################################### - - ############################################################### - ### Product Parts Implementation - ############################################################### - - ############################################################### - ### Sampling and Statistics related methods - ############################################################### - - ############################################################### - ### Area, Zone and Segment related methods - ############################################################### - - ############################################################### - ### Hazards related methods - ############################################################### - - ############################################################### - ### Time related methods - ############################################################### - - ############################################################### - ### Advisory related methods - ############################################################### - - ############################################################### - ### GUI related methods - ############################################################### - - - ############################################################### - ### MUST OVERRIDE DEFINITIONS !!! - - def _inlandAreas(self): - return [ - #"FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043", - ] - - def _coastalAreas(self): - return [ - #"FLZ039", "FLZ042", "FLZ048", "FLZ049", "FLZ050", "FLZ051", "FLZ055", "FLZ060", - #"FLZ062", - ] - - def _cwa(self): - return "" #"MFL" - - ############################################################### - ### TCV Product and Segment Parts Definition - - def _productParts_TCV(self, segment_vtecRecords_tuples): - segmentParts = [] - for segment_vtecRecords_tuple in segment_vtecRecords_tuples: - segmentParts.append(self._segmentParts_TCV(segment_vtecRecords_tuple)) - return { - 'partsList': [ - 'wmoHeader', - 'easMessage', - 'productHeader', - ('segments', segmentParts), - ] - } - - def _segmentParts_TCV(self, segment_vtecRecords_tuple): - segment, _ = segment_vtecRecords_tuple - - windSection = 'windSection[\'' + segment + '\']' - stormSurgeSection = 'stormSurgeSection[\'' + segment + '\']' - floodingRainSection = 'floodingRainSection[\'' + segment + '\']' - tornadoSection = 'tornadoSection[\'' + segment + '\']' - - partsList = [ - 'setup_segment', - 'ugcHeader', - 'vtecRecords', - 'areaList', - 'issuanceTimeDate', - 'summaryHeadlines', - 'locationsAffected', - 'fcstConfidence', - (windSection, self._windSection[segment].sectionParts(segment_vtecRecords_tuple)), - ] - - # The storm surge section should never be inserted into - # "inland" zones, since there will never be a surge impact. - if segment not in self._inlandAreas(): - partsList.append( - (stormSurgeSection, self._stormSurgeSection[segment].sectionParts(segment_vtecRecords_tuple))) - - partsList.extend([ - (floodingRainSection, self._floodingRainSection[segment].sectionParts(segment_vtecRecords_tuple)), - (tornadoSection, self._tornadoSection[segment].sectionParts(segment_vtecRecords_tuple)), - 'infoSection', - 'endSection']) - - return { - 'arguments': segment_vtecRecords_tuple, - 'partsList': partsList - } - - ############################################################### - ### Analysis Lists, SampleAnalysis Overrides and other - ### analysis related methods - - def _analysisList(self): - # Sample over 120 hours beginning at current time - analysisList = [ - # Wind Section - ("Wind", self.vectorModeratedMax, [3]), - ("WindGust", self.moderatedMax, [3]), - ("WindThreat", self.mostSignificantDiscreteValue), - ("pws34int", self.moderatedMax, [3]), - ("pws64int", self.moderatedMax, [3]), - ("pwsD34", self.moderatedMax), - ("pwsN34", self.moderatedMax), - ("pwsD64", self.moderatedMax), - ("pwsN64", self.moderatedMax), - - # Flooding Rain Section - ("QPF", self.accumSum, [72]), - ("FloodingRainThreat", self.mostSignificantDiscreteValue), - - # Tornado Section - ("TornadoThreat", self.mostSignificantDiscreteValue), - ] - - return analysisList - - def _intersectAnalysisList(self): - # The grids for the Surge Section will be intersected with a special edit area - analysisList = [ - ("InundationMax", self.moderatedMax), - ("InundationTiming", self.moderatedMax, [6]), - ] - - return analysisList - - def _extraRainfallAnalysisList(self): - analysisList = [ - ("QPF", self.accumSum), - ] - - return analysisList - - ############################################################### - ### High level flow of formatter - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - self.debug_print("argDict = %s" % (self._pp.pformat(argDict)), 1) - - error = self._initializeVariables(argDict) - if error is not None: - return error - - if self._stormName is None or self._stormName == "": - return "Could not determine the storm name" - - self._segmentList = self._determineSegments() - self.debug_print("Segment Information: %s" % (self._pp.pformat(self._segmentList)), 1) - if len(self._segmentList) == 0: - return "No hazards to report" - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Make sure we have all of the necessary grids before continuing - error = self._performGridChecks(argDict) - if error is not None: - return error - - # Sample the data - self._sampleData(argDict) - - # Create the product dictionary and format it to create the output - productDict = self._createProductDictionary(self._productParts_TCV, - self._segmentList, - areProductPartsSegmented=True) - productOutput = self._formatProductDictionary(LegacyFormatter, productDict) - - self._archiveCurrentAdvisory() - - return productOutput - - def _initializeVariables(self, argDict): - error = HLSTCV_Common.TextProduct._initializeVariables(self, argDict) - if error is not None: - return error - - self._windSection = dict() - self._stormSurgeSection = dict() - self._floodingRainSection = dict() - self._tornadoSection = dict() - - self._initializeAdvisories() - - return None - - def _performGridChecks(self, argDict): - gridChecks = [(self._isCorrectNumGrids, "FloodingRainThreat", 1, argDict), - (self._isCorrectNumGrids, "TornadoThreat", 1, argDict), - (self._isContinuousDuration, "QPF", 72, argDict),] - - if self._WSPGridsAvailable: - gridChecks += [(self._isCorrectNumGrids, "WindThreat", 1, argDict), - (self._isContinuousDuration, "Wind", 120, argDict), - (self._isContinuousDuration, "WindGust", 120, argDict), - (self._isContinuousDuration, "pws34int", 114, argDict), - (self._isContinuousDuration, "pws64int", 114, argDict), - (self._isCombinedContinuousDuration, "pwsD34", "pwsN34", 102, argDict), - (self._isCombinedContinuousDuration, "pwsD64", "pwsN64", 102, argDict),] - - if self._PopulateSurge and len(self._coastalAreas()) != 0: - gridChecks += [(self._isCorrectNumGrids, "InundationMax", 1, argDict), - (self._isCorrectNumGrids, "InundationTiming", 12, argDict),] - - missingGridErrors = [] - for gridCheck in gridChecks: - # The first element is the grid check function to call and - # the rest of the elements are the arguments to the function - if not gridCheck[0](*gridCheck[1:]): - error = "" - if gridCheck[0] == self._isCorrectNumGrids: - if gridCheck[2] == 1: - error = "%s needs at least 1 grid" % (gridCheck[1]) - else: - error = "%s needs at least %s grids" % (gridCheck[1], gridCheck[2]) - elif gridCheck[0] == self._isContinuousDuration: - error = "%s needs at least %s continuous hours worth of data" % \ - (gridCheck[1], gridCheck[2]) - else: - error = "%s and %s combined need at least %s continuous hours worth of data" % \ - (gridCheck[1], gridCheck[2], gridCheck[3]) - - missingGridErrors.append(error) - - if len(missingGridErrors) != 0: - error = "There were problems with the following weather elements:\n" - - for gridError in missingGridErrors: - error += "\t" + gridError + "\n" - - return error - - return None - - def _isCorrectNumGrids(self, weatherElement, expectedNumGrids, argDict): - ifpClient = argDict["ifpClient"] - dbId = argDict["databaseID"] - parmId = ParmID(weatherElement, dbId) - times = ifpClient.getGridInventory(parmId) - - self.debug_print("_isCorrectNumGrids test for element: %s" % weatherElement, 1) - self.debug_print("Expected number of grids: %s" % expectedNumGrids, 1) - - gridTimes = [] - for index in range(len(times)): - gridTime = TimeRange.TimeRange(times[index]) - - if (gridTime.endTime() <= self._timeRange1Hour.startTime() or - gridTime.startTime() >= self._timeRange1Hour.endTime()): - -# prettyStartTime = self._pp.pformat(str(gridTime.startTime())) -# prettyEndTime = self._pp.pformat(str(gridTime.endTime())) -# self.debug_print("skipping grid %s (%s - %s): outside of time range" -# % (index, prettyStartTime, prettyEndTime), 1) - pass - else: - gridTimes.append(gridTime) - - self.debug_print("Actual number of grids: %s" % len(gridTimes), 1) - - retval = len(gridTimes) >= expectedNumGrids - if not retval: - self.debug_print("_isCorrectNumGrids test failed", 1) - self.debug_print("self._timeRange1Hour: %s" % str(self._timeRange1Hour), 1) - self.debug_print("times: %s" % str(times), 1) - return retval - - def _isContinuousDuration(self, weatherElement, minimumNumHours, argDict): - return self._checkContinuousDuration([weatherElement], minimumNumHours, argDict) - - def _isCombinedContinuousDuration(self, weatherElement1, weatherElement2, minimumNumHours, argDict): - return self._checkContinuousDuration([weatherElement1, weatherElement2], minimumNumHours, argDict) - - def _checkContinuousDuration(self, weatherElementList, minimumNumHours, argDict): - - self.debug_print("_checkContinuousDuration for elements: %s" % \ - self._pp.pformat(weatherElementList), 1) - self.debug_print("Minimum Number of Hours: %s" % minimumNumHours, 1) - - ifpClient = argDict["ifpClient"] - dbId = argDict["databaseID"] - - gridTimes = [] - inventoryDict = {} - for weatherElement in weatherElementList: - parmId = ParmID(weatherElement, dbId) - times = ifpClient.getGridInventory(parmId) - inventoryDict[weatherElement] = times - - for index in range(times.size()): - gridTimes.append(TimeRange.TimeRange(times[index])) - - if len(gridTimes) == 0: - # No grids - self.debug_print("No grids found.", 1) - return False - - gridTimes = sorted(gridTimes, key= lambda gridTime: gridTime.startTime()) - - totalHours = 0 - previousEndTime = None - for gridTime in gridTimes: - if gridTime.endTime() <= self._timeRange1Hour.startTime(): -# prettyEndTime = self._pp.pformat(str(gridTime.endTime())) -# prettyStartTime = self._pp.pformat(str(self._timeRange1Hour.startTime())) -# self.debug_print("skipping: grid end time (%s) before time range start time (%s)" -# % (prettyEndTime, prettyStartTime), 1) - continue - - if gridTime.startTime() >= self._timeRange1Hour.endTime(): -# prettyStartTime = self._pp.pformat(str(gridTime.startTime())) -# prettyEndTime = self._pp.pformat(str(self._timeRange1Hour.endTime())) -# self.debug_print("done: grid start time (%s) after time range end time (%s)" -# % (prettyStartTime, prettyEndTime), 1) - break - - if previousEndTime is None: - previousEndTime = gridTime.startTime() - - if previousEndTime != gridTime.startTime(): - break - - previousEndTime = gridTime.endTime() - totalHours += gridTime.duration() / 3600 # Convert from seconds to hours - - self.debug_print("Total Hours of continuous grids: %s" % totalHours, 1) - - retval = totalHours >= minimumNumHours - if not retval: - self.debug_print("_checkContinuousDuration failed.", 1) - self.debug_print("self._timeRange1Hour: %s" % self._pp.pformat(self._timeRange1Hour), 1) - for we in inventoryDict: - self.debug_print("times for %s: %s" % (we, str(inventoryDict[we])), 1) - self.debug_print("Not continuous at: %s" % str(previousEndTime), 1) - - return retval - - ############################################################### - ### Product Parts Implementation - - def _noOpParts(self): - ''' - These represent product parts that should be skipped when calling product part methods. - They will be handled automatically by the formatters. - ''' - return ['CR', 'endProduct', 'endSegment', 'doubleAmpersand'] - - ################# Product Level - - def _easMessage(self, productDict, productSegmentGroup, arguments=None): - productDict['easMessage'] = self._easPhrase - - ################# Segment Level - - def _setup_segment(self, segmentDict, productSegmentGroup, productSegment): - segment, vtecRecords = productSegment - self.debug_print('setup_segment productSegment %s' % (self._pp.pformat(productSegment)), 1) - # NOTE -- using _getVtecRecords to change to milliseconds - segmentVtecRecords = self._getVtecRecords(segment) - - # UGCs and Expire Time - # Assume that the geoType is the same for all hazard events in the segment i.e. area or point - self._ugcs = [segment] - self._timeZones = self._tpc.hazardTimeZones(self._ugcs) - - # In order to compute the expire time, the VTEC record times - # need to be in milliseconds. - recordsInMS = [] - for record in segmentVtecRecords: - recordInMS = copy.copy(record) - - recordInMS["startTime"] = recordInMS["startTime"] * 1000 - recordInMS["endTime"] = recordInMS["endTime"] * 1000 - if recordInMS.has_key("purgeTime"): - recordInMS["purgeTime"] = recordInMS["purgeTime"] * 1000 - if recordInMS.has_key("issueTime"): - recordInMS["issueTime"] = recordInMS["issueTime"] * 1000 - - recordsInMS.append(recordInMS) - - # Get the expire time in milliseconds since the epoch - self._expireTime = self._tpc.getExpireTime( - self._issueTime_ms, self._purgeHours, recordsInMS) - # Then convert it to a date - segmentDict['expireTime'] = self._convertToISO(self._expireTime) - - # Don't show UPG headlines - nonUPGrecords = [] - for record in segmentVtecRecords: - if record['act'] != "UPG": - nonUPGrecords.append(record) - self._summaryHeadlines_value, _ = self._tpc.getHeadlinesAndSections( - nonUPGrecords, self._productID, self._issueTime_secs) - - def _vtecRecords(self, segmentDict, productSegmentGroup, productSegment): - segment, vtecRecords = productSegment - records = [] - for vtecRecord in vtecRecords: - vstr = vtecRecord["vtecstr"] - - self.debug_print("vtecRecord = %s" % (self._pp.pformat(vtecRecord)), 1) - - self.debug_print("final vstr = %s" % vstr, 1) - records.append(vstr) - segmentDict['vtecRecords'] = records - - def _areaList(self, segmentDict, productSegmentGroup, productSegment): - # Area String - segmentDict['areaList'] = self._tpc.formatUGC_names(self._ugcs) - - def _issuanceTimeDate(self, segmentDict, productSegmentGroup, productSegment): - segmentDict['issuanceTimeDate'] = self._timeLabel - - def _summaryHeadlines(self, segmentDict, productSegmentGroup, productSegment): - segment, vtecRecords = productSegment - definitions = [] - hazardsFound = [] - - for (phenSig, actions, name) in self.allowedHazards(): - for vtecRecord in vtecRecords: - # The 'phensig' in the VTEC record could contain an - # ETN. As such, we need to strip the ETN before doing a - # comparison with the allowedHazards. - if vtecRecord["phensig"].split(":")[0] == phenSig and \ - phenSig not in hazardsFound and \ - vtecRecord["act"] in ["NEW", "EXA"]: - - hazardsFound.append(phenSig) - definition = self._hazardDefinition(phenSig) - if definition != "": - definitions.append(definition) - - summaryDict = collections.OrderedDict() - headlines = self._summaryHeadlines_value.split("\n") - headlinesInEffect = [] - for headline in headlines: - if len(headline) != 0: - headlinesInEffect.append(headline) - summaryDict['headlinesInEffect'] = headlinesInEffect - summaryDict['headlineDefinitions'] = definitions - segmentDict['summaryHeadlines'] = summaryDict - - def _locationsAffected(self, segmentDict, productSegmentGroup, productSegment): - segment, vtecRecords = productSegment - import TCVAreaDictionary - tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary - - segmentDict['locationsAffected'] = [] - if segment in tcv_AreaDictionary: - segmentDict['locationsAffected'] = tcv_AreaDictionary[segment]["locationsAffected"] - - def _fcstConfidence(self, segmentDict, productSegmentGroup, productSegment): - # TODO - Get this from the TCM product potentially? Not included until provided from NHC - return "" - - def _infoSection(self, segmentDict, productSegmentGroup, productSegment): - segment, vtecRecords = productSegment - import TCVAreaDictionary - tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary - - segment, vtecRecords = productSegment - infoSection = [] - if segment in tcv_AreaDictionary: - infoSection = tcv_AreaDictionary[segment]["infoSection"] - - segmentDict['infoSection'] = infoSection - - def _endSection(self, segmentDict, productSegmentGroup, productSegment): - segmentDict['endSection'] = "\n$$" - - ################# Product Parts Helper Methods - - def _hazardDefinition(self, phenSig): - import VTECTable - - phen, sig = phenSig.split('.') - headline = VTECTable.VTECTable[phenSig]["hdln"] - - definition = "A " + headline + " means " - - if phenSig == "HU.W": - definition += "hurricane-force winds are expected" - - elif phenSig == "HU.A": - definition += "hurricane-force winds are possible" - - elif phenSig == "TR.W": - definition += "tropical storm-force winds are expected" - - elif phenSig == "TR.A": - definition += "tropical storm-force winds are possible" - - elif phenSig == "SS.W": - definition += "there is a danger of life-threatening inundation, from rising water moving inland from the coastline," - - elif phenSig == "SS.A": - definition += "life-threatening inundation, from rising water moving inland from the coastline, is possible" - - else: - return "" - - if sig == "W": # Warning - definition += " somewhere within this area within the next 36 hours" - elif sig == "A": # Watch - definition += " somewhere within this area within the next 48 hours" - - return definition - - ############################################################### - ### Sampling and Statistics related methods - - def _threatKeyOrder(self): - return [None, "None", "Elevated", "Mod", "High", "Extreme"] - - def _sampleData(self, argDict): - # Sample the data - self._createSamplers(argDict) - - # We need to preserve the ordering of the zones based off the zone combiner ordering - sortedAreas = sorted(self._allAreas(), - key=lambda x: self._segmentList.index(x) if x in self._segmentList else 9999) - for segment in sortedAreas: - self._initializeSegmentZoneData(segment) - - # We need stats for all zones to be saved in the advisory, - # regardless of whether or not it has a hazard in it. Getting - # the stats causes them to be added to the advisory. - windStats, stormSurgeStats, floodingRainStats, tornadoStats = \ - self._getStats(self._argDict, - segment, - self._editAreaDict, - self._timeRangeList1Hour, - self._timeRangeList3Hour, - self._timeRangeList6Hour) - - # Only show zones with hazards in the output - if segment in self._segmentList: - # These segment sections will be added to the product parts - self._windSection[segment] = WindSection(self, segment, windStats) - self._stormSurgeSection[segment] = StormSurgeSection(self, segment, stormSurgeStats) - self._floodingRainSection[segment] = FloodingRainSection(self, segment, floodingRainStats) - self._tornadoSection[segment] = TornadoSection(self, segment, tornadoStats) - - def _createSamplers(self, argDict): - # Create the samplers used for sampling the data - editAreas = self._makeSegmentEditAreas(argDict) - - # The sampler used for Wind section related stats - self._sampler1Hour = self.getSampler(argDict, - (self._analysisList(), self._timeRangeList1Hour, editAreas)) - - # The sampler used for Flooding Rain and Storm Surge section related stats - self._sampler3Hour = self.getSampler(argDict, - (self._analysisList(), self._timeRangeList3Hour, editAreas)) - - # For storm surge, the edit areas are intersected with a special edit area. - # If there aren't any coastal areas, they won't have the special edit area - # though so don't execute this code in that case. - if len(self._coastalAreas()) > 0: - intersectAreas = self._computeIntersectAreas(editAreas, argDict) - self._intersectSampler = self.getSampler(argDict, - (self._intersectAnalysisList(), self._timeRangeList6Hour, intersectAreas)) - - # Make a sample period for the previous rainfall - self._previousRainfallTR = [(self._extraSampleTimeRange, "PrevRainfall")] - self._extraRainfallSampler = self.getSampler(argDict, - (self._extraRainfallAnalysisList(), self._previousRainfallTR, - editAreas)) - - def _getStats(self, argDict, segment, editAreaDict, timeRangeList1Hour, timeRangeList3Hour, timeRangeList6Hour): - # Get statistics for this segment - - editArea = editAreaDict[segment] - - statList1Hour = self.getStatList(self._sampler1Hour, - self._analysisList(), - timeRangeList1Hour, - editArea) - - statList3Hour = self.getStatList(self._sampler3Hour, - self._analysisList(), - timeRangeList3Hour, - editArea) - - self.debug_print("*"*80, 1) - self.debug_print("editArea =" + editArea, 1) - self.debug_print("timeRangeList1Hour = %s" % (self._pp.pformat(timeRangeList1Hour)), 1) - self.debug_print("timeRangeList3Hour = %s" % (self._pp.pformat(timeRangeList3Hour)), 1) - self.debug_print("timeRangeList6Hour = %s" % (self._pp.pformat(timeRangeList6Hour)), 1) - self.debug_print("statList1Hour = %s" % (self._pp.pformat(statList1Hour)), 1) - self.debug_print("statList3Hour = %s" % (self._pp.pformat(statList3Hour)), 1) - self.debug_print("-"*40, 1) - - windStats = WindSectionStats(self, segment, statList1Hour, timeRangeList1Hour) - - # The surge section needs sampling done with an intersected edit area - if editArea in self._coastalAreas(): - intersectEditArea = "intersect_"+editArea - intersectStatList = self.getStatList(self._intersectSampler, - self._intersectAnalysisList(), - timeRangeList6Hour, - intersectEditArea) - else: - intersectStatList = "InlandArea" - - self.debug_print("intersectStatList = %s" % (self._pp.pformat(intersectStatList)), 1) - self.debug_print("-"*40, 1) - - stormSurgeStats = StormSurgeSectionStats(self, segment, intersectStatList, timeRangeList6Hour) - - # These stats are for handling the extra rainfall - extraRainfallStatList = self.getStatList(self._extraRainfallSampler, - self._extraRainfallAnalysisList(), - self._previousRainfallTR, - editArea) - - floodingRainStats = FloodingRainSectionStats(self, segment, - statList3Hour, timeRangeList3Hour, - extraRainfallStatList, self._previousRainfallTR) - tornadoStats = TornadoSectionStats(self, segment, statList3Hour, timeRangeList3Hour) - - return (windStats, stormSurgeStats, floodingRainStats, tornadoStats) - - ############################################################### - ### Area, Zone and Segment related methods - - def _determineSegments(self): - # Get the segments based on hazards "overlaid" with combinations file - - # Get the forecaster entered combinations - accessor = ModuleAccessor.ModuleAccessor() - self.debug_print("self._defaultEditAreas = %s" % (self._pp.pformat(self._defaultEditAreas)), 1) - combos = accessor.variable(self._defaultEditAreas, "Combinations") - # combos is a list of tuples. Each tuple is a grouping of zones (a list of zones, combo name). - if combos is None: - LogStream.logVerbose("Combination file not found: " + self._defaultEditAreas) - return [] - self.debug_print("Segments from Zone Combiner = %s" % (self._pp.pformat(combos)), 1) - - # "Overlay" the forecaster-entered combinations onto the segments - # so that the zones are ordered and grouped (as much as possible) - # as indicated in the zone combiner. - refinedHazardSegments = self._getRefinedHazardSegments(combos) - - # Instead of a segment being a group of zones, it will be just a single zone. - # So collapse this list of lists down to a list of zones (aka. segments) - segments = [] - for segment in refinedHazardSegments: - segments += segment - - return segments - - def _getRefinedHazardSegments(self, combos): - # Get a list of list of zones that are ordered and grouped - # based off of hazards and the provided zone combinations. - - # Get the raw analyzed table (a list of VTEC records) and organize the hazards - # to get a list of lists of zones that have the same hazards - self.debug_print("Raw Analyzed %s" % (self._pp.pformat(self._hazardsTable.rawAnalyzedTable())), 1) - hazSegments = self.organizeHazards(self._hazardsTable.rawAnalyzedTable()) - self.debug_print("Segments from HazardsTable organizeHazards %s" % (self._pp.pformat(hazSegments)), 1) - - # "Overlay" the forecaster-entered combinations onto the segments - # so that the zones are ordered and grouped (as much as possible) - # as indicated in the zone combiner. - refinedSegments = self._refineSegments(hazSegments, combos) - self.debug_print("New segments = %s" % (self._pp.pformat(refinedSegments)), 1) - - return refinedSegments - - def _refineSegments(self, hazSegments, combos): - """Reorder and regroup (as much as possible) the hazard segments - based off of the ordering and grouping in combos. Zones will - only be combined into groups if they share the same hazards - (regardless of whether they are grouped together in combos). - """ - if combos == []: - return hazSegments - newSegments = [] # list of lists - newAreas = [] - for combo, label in combos: - # Each combination will be tested to see if it can stay intact - # i.e. if all areas in the combo are in the same segment - # else split it into like segments - # - # segmentMapping is a list where each entry is - # the hazSegment in which the corresponding combo area appears. - # (We need to define self._segmentList for the mapping function - # to use) - self._segmentList = hazSegments - self.debug_print("self._segmentList = %s" % (self._pp.pformat(self._segmentList)), 1) - self.debug_print("current combo = %s" % (self._pp.pformat(combo)), 1) - segmentMapping = map(self._findSegment, combo) - self.debug_print(" segmentMapping = %s" % (self._pp.pformat(segmentMapping)), 1) - - # segmentDict keys will be the hazSegments and - # we will gather all the areas of the combos that appear - # in each of these hazSegments - segmentDict = {} - keyList = [] - for areaName in combo: - self.debug_print(" Adding %s" % (areaName), 1) - key = tuple(segmentMapping[combo.index(areaName)]) - if key == (): # If no hazard for area, do not include - continue - if key not in keyList: - keyList.append(key) - segmentDict.setdefault(key,[]).append(areaName) - self.debug_print(" segmentDict = %s" % (self._pp.pformat(segmentDict)), 1) - - # Keep track of the areas that we are including - for key in keyList: - segAreas = segmentDict[key] - newAreas = newAreas + segAreas - newSegments.append(segAreas) - self.debug_print(" newAreas = %s" % (self._pp.pformat(newAreas)), 1) - self.debug_print(" newSegments = %s" % (self._pp.pformat(newSegments)), 1) - self.debug_print(" newSegments = %s" % (self._pp.pformat(newSegments)), 1) - # Now add in the hazAreas that have not been accounted for - # in the combinations - for hazSegment in hazSegments: - newSeg = [] - for hazArea in hazSegment: - if hazArea not in newAreas: - newSeg.append(hazArea) - if newSeg != []: - newSegments.append(newSeg) - self.debug_print(" final newSegments = %s" % (self._pp.pformat(newSegments)), 1) - return newSegments - - def _makeSegmentEditAreas(self, argDict): - # Create the edit areas that will be sampled - areasList = self._allAreas() - self.debug_print("areasList = %s" % (self._pp.pformat(areasList)), 1) - editAreas = [] - self._editAreaDict = {} - for area in areasList: - self._editAreaDict[area] = area - editAreas.append((area, area)) - return editAreas - - def _findSegment(self, areaName): - # Determine which hazard group a zone belongs to - for segment in self._segmentList: - if areaName in segment: - return segment - return [] - - ############################################################### - ### Hazards related methods - - def _getAllVTECRecords(self): - allRecords = [] - # Only the segments in _segmentList contain hazards so no - # need to check everything in _allAreas() - for segment in self._segmentList: - allRecords += self._getVtecRecords(segment) - - return allRecords - - def _getHazardsForHLS(self): - # Get all the hazards so that the HLS will have access to them. - # Areas that share the same hazards are grouped together - # into a single hazard. - hazardTable = self._argDict["hazards"] - - # Create a single grouping of all zones. This will make it so that - # the hazards are grouped together as much as possible so that we - # don't repeat hazard information for zones in HLS. - combos = [([self._allAreas()], "AllAreas")] - - # "Overlay" this group of all zones onto the segments - # so that we get as few groups of zones as possible. - refinedHazardSegments = self._getRefinedHazardSegments(combos) - - allHazards = [] - for segment in refinedHazardSegments: - hazardsList = hazardTable.getHazardList(segment) - for hazard in hazardsList: - # If this is a correction, don't generate new hazards, - # use the previous ones - if hazard['act'] == 'COR': - return self._previousAdvisory["HazardsForHLS"] - else: - # Tropical hazards shouldn't ever have EXT and EXB actions since - # they are "until further notice" - if hazard["act"] == "EXT": - hazard["act"] = "CON" - elif hazard["act"] == "EXB": - hazard["act"] = "EXA" - - allHazards.append(hazard) - - return allHazards - - ############################################################### - ### Time related methods - - def _convertToISO(self, time_ms, local=None): - # Convert milliseconds since the epoch to a date - import datetime - dt = datetime.datetime.fromtimestamp(time_ms / 1000) - if local: - timeZone = self._timeZones[0] - else: - timeZone = None - return self._tpc.formatDatetime(dt, timeZone=timeZone) - - def _convertToDatetime(self, time_ms): - import datetime - return datetime.datetime.fromtimestamp(time_ms / 1000) - - ############################################################### - ### Advisory related methods - - def _initializeSegmentZoneData(self, segment): - # The current advisory will be populated when getting a section's stats - self._currentAdvisory['ZoneData'][segment] = { - "WindThreat": None, - "WindForecast": None, - "WindHighestPhaseReached": None, - "highestHunkerDownWindThreat": "None", - "StormSurgeThreat": None, - "StormSurgeForecast": None, - "StormSurgeHighestPhaseReached": None, - "highestHunkerDownSurgeThreat": "None", - "FloodingRainThreat": None, - "FloodingRainForecast": None, - "TornadoThreat": None, - } - - # Make sure our highest threats and phases aren't lost - previousSegmentAdvisory = None - if self._previousAdvisory is not None: - previousSegmentAdvisory = self._previousAdvisory['ZoneData'][segment] - - if previousSegmentAdvisory is not None: - currentSegmentAdvisory = self._currentAdvisory['ZoneData'][segment] - - currentSegmentAdvisory["WindHighestPhaseReached"] = \ - previousSegmentAdvisory["WindHighestPhaseReached"] - - currentSegmentAdvisory["highestHunkerDownWindThreat"] = \ - previousSegmentAdvisory["highestHunkerDownWindThreat"] - - currentSegmentAdvisory["StormSurgeHighestPhaseReached"] = \ - previousSegmentAdvisory["StormSurgeHighestPhaseReached"] - - currentSegmentAdvisory["highestHunkerDownSurgeThreat"] = \ - previousSegmentAdvisory["highestHunkerDownSurgeThreat"] - - def _getPreviousAdvisories(self): - stormAdvisories = self._getStormAdvisoryNames() - - self.debug_print("DEBUG: stormAdvisories = %s" % - (self._pp.pformat(stormAdvisories)), 1) - - previousAdvisories = [] - - # Get the current storm number from the TCP (ie. AL092016) - curStormNumber = self._getStormNumberStringFromTCP() - self.debug_print("DEBUG: curStormNumber = %s" % - (curStormNumber), 1) - - # Filter out the advisories we wish to process further - for advisory in stormAdvisories: - - # If this was an advisory for the current storm - if advisory.startswith(curStormNumber): - - # Load this advisory for this storm - curAdvisory = self._loadAdvisory(advisory) - - if curAdvisory is not None: - previousAdvisories.append(curAdvisory) - - - self.debug_print("DEBUG: previous advisories = %s" % - (self._pp.pformat(previousAdvisories)), 1) - - return previousAdvisories - - def _archiveCurrentAdvisory(self): - ### Determine if all actions are canceled - allCAN = True - for vtecRecord in self._getAllVTECRecords(): - action = vtecRecord['act'] - if action != "CAN": - allCAN = False - break - - self._currentAdvisory["AllCAN"] = allCAN - self._currentAdvisory["CreationTime"] = self._issueTime_secs - self._currentAdvisory["Transmitted"] = False - self._currentAdvisory["StormName"] = self._getStormNameFromTCP() - self._currentAdvisory["StormNumber"] = self._getStormNumberStringFromTCP() - self._currentAdvisory["StormID"] = self._getStormIDStringFromTCP() - self._currentAdvisory["AdvisoryNumber"] = self._getAdvisoryNumberStringFromTCP() - self._currentAdvisory["HazardsForHLS"] = self._getHazardsForHLS() - - self._saveAdvisory("pending", self._currentAdvisory) - - def _saveAdvisory(self, advisoryName, advisoryDict): - self._synchronizeAdvisories() - fileName = self._getAdvisoryFilename(advisoryName) - - self.debug_print("Saving %s to %s" % (advisoryName, fileName), 1) - self.debug_print("advisoryDict: %s" % (self._pp.pformat(advisoryDict)), 1) - - try: - JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC, - self._site, - fileName, - advisoryDict) - except Exception, e: - LogStream.logProblem("Exception saving %s: %s" % (fileName, LogStream.exc())) - else: # No exceptions occurred - self.debug_print("Wrote file contents for: %s" % (fileName), 1) - - # Purposely allow this to throw - self._synchronizeAdvisories() - - ############################################################### - ### GUI related methods - - def _overview_list(self): - if self._site == "HFO": - stormInfoOptions = ["TCPCP1", "TCPCP2", "TCPCP3", "TCPCP4", "TCPCP5"] - else: - stormInfoOptions = ["TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5"] - - stormInfoOptions.append("Enter PIL below (e.g. WRKTCP):") - - return [ - { - "name": "StormInfo", - "label": "Obtain Storm Type/Name/Info", - "options": stormInfoOptions, - "entryField": " ", - }, - { - "name": "PopulateSurge", - "label": "Populate Surge Section", - "options": [ - ("Populate", True), - ("Do not populate", False), - ], - "default": "Populate", - }, - { - "name": "WSPGridsAvailable", - "label": "Are WSP grids available?", - "options": [ - ("Yes", True), - ("No", False), - ], - "default": "Yes", - }, - ] - - def _displayGUI(self, infoDict=None): - dialog = Overview_Dialog(self, "TCV", infoDict) - status = dialog.status() - LogStream.logVerbose("status="+status) - if status == "Cancel": - return None - else: - return dialog.getVarDict() - -import Tkinter -class Overview_Dialog(HLSTCV_Common.Common_Dialog): - def __init__(self, parent, title, infoDict=None): - HLSTCV_Common.Common_Dialog.__init__(self, parent, title, infoDict) - - def body(self, master): - # build the main display dialog - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list() - fontDict = self._parent._font_GUI_dict() - - # OVERVIEW header - headerFG, headerFont = fontDict["headers"] - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - - numBoxes = 2 - - boxes = [] - for i in range(numBoxes): - newBox = Tkinter.Frame(master) - newBox.pack(side=Tkinter.TOP, expand=Tkinter.NO, - fill=Tkinter.Y, anchor=Tkinter.W) - boxes.append(newBox) - - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - index = overviewList.index(infoDict) - if index == 0: - boxNum = 0 - buttonSide=Tkinter.TOP - frameSide = Tkinter.LEFT - else: - boxNum = 1 - buttonSide=Tkinter.LEFT - frameSide=Tkinter.TOP - - box = boxes[boxNum] - - tkObject_dict[name], entryObject = self._makeRadioOrCheckList( - box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, - entryField=entryField, headerFG=headerFG, - headerFont=headerFont, boxType=optionType) - if entryObject is not None: - tkObject_dict[self._entryName(name)] = entryObject - - # End Instructions and Button - frame = Tkinter.Frame(master, relief=Tkinter.GROOVE, borderwidth=1) - self._makeButtons(frame) - frame.pack(side=Tkinter.TOP, fill=Tkinter.X, expand=Tkinter.NO) - - def _makeButtons(self, master): - frame = Tkinter.Frame(master) - buttonList = self._parent._GUI1_configDict().get("buttonList", []) - for button, label in buttonList: - if button == "Run": - command = self.okCB - else: # Cancel - command = self.cancelCB - Tkinter.Button(frame, text=label, command=command, width=10, - state=Tkinter.NORMAL).pack(side=Tkinter.LEFT, pady=5, padx=10) - frame.pack() - - def okCB(self): - # pull the data from the tkObject_dict before they get toasted - tkObject_dict = self._tkObject_dict - overviewList = self._parent._overview_list() - for infoDict in overviewList: - name = infoDict["name"] - label = infoDict["label"] - options = infoDict.get("options", []) - entryField = infoDict.get("entryField", None) - default = infoDict.get("default", None) - optionType = infoDict.get("optionType", "radio") - - if optionType == "check": - checkList = [] - ivarList = tkObject_dict[name] - for i in range(len(options)): - if ivarList[i].get(): - checkList.append(options[i]) - value = checkList - self._setVarDict(name, value) - else: - value = tkObject_dict[name].get() - self._setVarDict(name, value, options) - - if entryField is not None: - entryName = self._entryName(name) - self._setVarDict(entryName, tkObject_dict[entryName].get()) - # close window and set status "Ok" - self._status = "Ok" - self.withdraw() - self.ok() - - -class SectionCommon(): - def __init__(self, textProduct, segment, sectionHeaderName): - self._textProduct = textProduct - self._sectionHeaderName = sectionHeaderName - self._segment = segment - self._tr = None - self.isThreatNoneForEntireStorm = False - - def _isThreatNoneForEntireStorm(self, threatName): - previousAdvisories = self._textProduct._getPreviousAdvisories() - - # For the first advisory, this needs to be false otherwise - # potential impacts could be wrong - if len(previousAdvisories) == 0: - return False - - for advisory in previousAdvisories: - if advisory["ZoneData"][self._segment][threatName] != "None": - return False - - return False - - def _setProductPartValue(self, dictionary, productPartName, value): - dictionary[self._sectionName + '._' + productPartName] = value - - def _finalSectionParts(self, segment_vtecRecords_tuple, parts): - finalParts = [] - for partName in parts: - if partName not in self._textProduct._noOpParts(): - finalParts.append(self._sectionName + '._' + partName) - else: - finalParts.append(partName) - - return [{ - 'arguments': segment_vtecRecords_tuple, - 'partsList': finalParts - }] - - def _sectionHeader(self, segmentDict, productSegmentGroup, productSegment): - self._setProductPartValue(segmentDict, 'sectionHeader', self._sectionHeaderName) - - def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._maxThreat is not None: - threatLevel = self._stats._maxThreat - if threatLevel == "Mod": - threatLevel = "Moderate" - - threatStatement = \ - self._textProduct._threatPhrase[self._sectionHeaderName][threatLevel] - - self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', "POTENTIAL THREAT TO LIFE AND PROPERTY: " + threatStatement) - - # This new method will convert the single word threat trend into - # an appropriate sentence - def _getThreatTrendSentence(self, section, threatTrendValue): - - if threatTrendValue.upper() == "INCREASING": - text = "The %s threat has increased" % (section) - elif threatTrendValue.upper() == "DECREASING": - text = "The %s threat has decreased" % (section) - elif threatTrendValue.upper() == "NEARLY STEADY": - text = "The %s threat has remained nearly steady" % (section) - - return text + " from the previous assessment." - - def _getThreatTrendValue(self, elementName, magnitudeIncreaseThreshold): - threatKey = elementName + "Threat" - forecastKey = elementName + "Forecast" - - self._textProduct.debug_print("THREAT DEBUG for %s" % (elementName), 1) - - self._textProduct.debug_print("getThreatTrendValue _currentAdvisory =\n%s" % (self._textProduct._pp.pformat(self._stats._currentAdvisory)), 1) - self._textProduct.debug_print("getThreatTrendValue _previousAdvisory =\n%s" % (self._textProduct._pp.pformat(self._stats._previousAdvisory)), 1) - - if (self._stats._currentAdvisory is None) or (self._stats._previousAdvisory is None): - # Only compute a threat trend if we have 2 or more advisories - return None - - currentThreat = self._stats._currentAdvisory[threatKey] - previousThreat = self._stats._previousAdvisory[threatKey] - shorterTermTrendDifference = self._threatDifference(currentThreat, previousThreat) - - self._textProduct.debug_print("currentThreat = %s" % (self._textProduct._pp.pformat(currentThreat)), 1) - self._textProduct.debug_print("previousThreat = %s" % (self._textProduct._pp.pformat(previousThreat)), 1) - self._textProduct.debug_print("shorterTermTrendDifference = %s" % (shorterTermTrendDifference), 1) - - previousPreviousThreat = None - longerTermTrendDifference = None - if self._stats._previousPreviousAdvisory is not None: - self._textProduct.debug_print("_previousPreviousAdvisory is not None", 1) - previousPreviousThreat = self._stats._previousPreviousAdvisory[threatKey] - self._textProduct.debug_print("previousPreviousThreat = %s" % (self._textProduct._pp.pformat(previousPreviousThreat)), 1) - longerTermTrendDifference = self._threatDifference(currentThreat, previousPreviousThreat) - self._textProduct.debug_print("longerTermTrendDifference = %s" % (longerTermTrendDifference), 1) - - threatTrendValue = "NEARLY STEADY" - self._textProduct.debug_print("magnitudeIncreaseThreshold = %s forecastKey = '%s'" % (magnitudeIncreaseThreshold, forecastKey), 1) - if self._isThreatDecreasing(shorterTermTrendDifference, longerTermTrendDifference): - self._textProduct.debug_print("threat is decreasing", 1) - threatTrendValue = "DECREASING" - elif self._isThreatIncreasing(shorterTermTrendDifference, longerTermTrendDifference): - self._textProduct.debug_print("threat is increasing", 1) - threatTrendValue = "INCREASING" - # NOTE: Modified so more threat levels can be classified as increasing when forecast has increased - elif currentThreat in ["Mod", "High", "Extreme"] and \ - self._isMagnitudeIncreasing(forecastKey, magnitudeIncreaseThreshold): - self._textProduct.debug_print("Increasing based on magnitude", 1) - threatTrendValue = "INCREASING" - - return threatTrendValue - - def _threatDifference(self, threat1, threat2): - threatLevels = self._textProduct._threatKeyOrder() - self._textProduct.debug_print("threat1 index = %s" % (threatLevels.index(threat1)), 1) - self._textProduct.debug_print("threat2 index = %s" % (threatLevels.index(threat2)), 1) - return threatLevels.index(threat1) - threatLevels.index(threat2) - - def _isThreatDecreasing(self, shorterTermTrendDifference, longerTermTrendDifference): - #If the current threat is at least 1 category lower than both previous advisories - if (shorterTermTrendDifference < 0 and \ - longerTermTrendDifference is not None and \ - longerTermTrendDifference < 0): - self._textProduct.debug_print("the current threat is at least 1 category lower than both previous advisories", 1) - return True - #Or if the current threat decreased by more than 1 category - elif shorterTermTrendDifference < -1: - self._textProduct.debug_print("the current threat decreased by more than 1 category", 1) - return True - else: - self._textProduct.debug_print("the current threat is not decreasing", 1) - return False - - def _isThreatIncreasing(self, shorterTermTrendDifference, longerTermTrendDifference): - #If the current threat is at least 1 category higher than both previous advisories - if (shorterTermTrendDifference > 0 and \ - longerTermTrendDifference is not None and \ - longerTermTrendDifference > 0): - self._textProduct.debug_print("the current threat is at least 1 category higher than both previous advisories", 1) - return True - #Or if the current threat increased by more than 1 category - elif shorterTermTrendDifference > 1: - self._textProduct.debug_print("the current threat increased by more than 1 category", 1) - return True - else: - self._textProduct.debug_print("the current threat is not increasing", 1) - return False - - def _advisoryHasValidKey(self, advisory, key): - return (advisory is not None) and \ - (advisory.has_key(key)) and \ - (advisory[key] is not None) - - def _isMagnitudeIncreasing(self, forecastKey, threshold): -# currentValue, previousValue, previousPreviousValue - self._textProduct.debug_print("_isMagnitudeIncreasing", 1) - self._textProduct.debug_print("forecastKey = %s" % (forecastKey), 1) - self._textProduct.debug_print("threshold = %s" % (threshold), 1) - - if self._advisoryHasValidKey(self._stats._currentAdvisory, forecastKey) and \ - self._advisoryHasValidKey(self._stats._previousAdvisory, forecastKey): - currentValue = self._stats._currentAdvisory[forecastKey] - previousValue = self._stats._previousAdvisory[forecastKey] - self._textProduct.debug_print("currentValue = %s" % (currentValue), 1) - self._textProduct.debug_print("previousValue = %s" % (previousValue), 1) - - if (currentValue - previousValue) >= threshold: - self._textProduct.debug_print("the current magnitude has increased by more than the threshold since the last advisory", 1) - return True - elif self._advisoryHasValidKey(self._stats._previousPreviousAdvisory, forecastKey): - previousPreviousValue = self._stats._previousPreviousAdvisory[forecastKey] - self._textProduct.debug_print("previousPreviousValue = %s" % (previousPreviousValue), 1) - - if (currentValue - previousPreviousValue) >= threshold: - self._textProduct.debug_print("the current magnitude has increased by more than the threshold since the previous previous advisory", 1) - return True - else: - self._textProduct.debug_print("the current magnitude does not meet the requirements to be considered increasing", 1) - return False - else: - self._textProduct.debug_print("the current magnitude did not increase past threshold and could not look at the previous previous advisory", 1) - return False - else: - self._textProduct.debug_print("the current advisory and/or previous advisory did not have key: %s" % (forecastKey), 1) - return False - - def _calculateThreatStatementTr(self, onsetHour, endHour, section): - phase = "default" - - self._textProduct.debug_print("section = %s" % (section), 1) - self._textProduct.debug_print("onset hour = %s" % (onsetHour), 1) - self._textProduct.debug_print("end hour = %s" % (endHour), 1) - - if section == "Wind": - threatGrid = "WindThreat" - highestHunkerDownThreatKey = "highestHunkerDownWindThreat" - elif section == "Surge": - threatGrid = "StormSurgeThreat" - highestHunkerDownThreatKey = "highestHunkerDownSurgeThreat" - - previousSegmentAdvisory = None - if self._textProduct._previousAdvisory is not None: - previousSegmentAdvisory = \ - self._textProduct._previousAdvisory['ZoneData'][self._segment] - currentSegmentAdvisory = \ - self._textProduct._currentAdvisory['ZoneData'][self._segment] - - if (onsetHour is not None): - if onsetHour > 36: - phase = "check plans" - elif onsetHour > 6: - phase = "complete preparations" - elif (onsetHour <= 6) and (endHour is not None) and (endHour > 0): - phase = "hunker down" - - previousHighestHunkerDownThreat = None - if previousSegmentAdvisory is not None: - previousHighestHunkerDownThreat = \ - previousSegmentAdvisory[highestHunkerDownThreatKey] - - self._textProduct.debug_print( - "%s previous highest hunker down threat is -> %s for %s" - % (section, previousHighestHunkerDownThreat, self._segment), 1) - - currentHunkerDownThreat = currentSegmentAdvisory[threatGrid] - - threatSeverity = {threat:severity for severity,threat in - enumerate(self._textProduct._threatKeyOrder())} - - if threatSeverity.get(currentHunkerDownThreat) > \ - threatSeverity.get(previousHighestHunkerDownThreat): - - currentSegmentAdvisory[highestHunkerDownThreatKey] = \ - currentHunkerDownThreat - - self._textProduct.debug_print( - "%s current highest hunker down threat is -> %s for %s" - % (section, currentSegmentAdvisory[highestHunkerDownThreatKey], self._segment), 1) - - self._textProduct.debug_print( - "Before default phase handling. %s phase is currently -> %s for %s" - % (section, phase, self._segment), 1) - - # We are here because we had no onset time - if phase == "default": - if currentSegmentAdvisory[threatGrid] in \ - ["Elevated", "Mod", "High", "Extreme"]: - - phase = "check plans" - - # Checking to see if we ever had a threat. If so, set to recovery - elif self._pastThreatsNotNone(threatGrid): - phase = "recovery" - - # If we are still default, that means we have no onset and have - # never had any threat - if phase == "default": - phase = "check plans" - - self._textProduct.debug_print( - "After default phase handling. %s phase is -> %s for %s" - % (section, phase, self._segment), 1) - - # --------------------------------------------------------------------- - - # "default" isn't ordered because it can occur at multiple points - # before the recovery phase - phaseOrder = [None, "check plans", "complete preparations", - "hunker down", "recovery"] - - if self._sectionHeaderName == "Storm Surge": - highestPhaseReachedKey = "StormSurgeHighestPhaseReached" - else: - # Flooding Rain and Tornado are tied to Wind so that's why they use - # Wind's phase - highestPhaseReachedKey = "WindHighestPhaseReached" - - previousHighestPhaseReached = None - if previousSegmentAdvisory is not None: - previousHighestPhaseReached = \ - previousSegmentAdvisory[highestPhaseReachedKey] - - self._textProduct.debug_print( - "%s previous highestPhaseReached is -> '%s' for '%s'" % - (self._sectionHeaderName, previousHighestPhaseReached, self._segment), 1) - - # Don't allow the event to regress to an earlier phase - if previousHighestPhaseReached == "recovery": - phase = "recovery" - - previousHighestPhaseIndex = phaseOrder.index(previousHighestPhaseReached) - currentPhaseIndex = phaseOrder.index(phase) - - if currentPhaseIndex > previousHighestPhaseIndex: - currentSegmentAdvisory[highestPhaseReachedKey] = phase - - currentHighestPhaseReached = currentSegmentAdvisory[highestPhaseReachedKey] - self._textProduct.debug_print( - "End of method. %s current phase is -> %s for %s" % - (section, phase, self._segment), 1) - self._textProduct.debug_print( - "End of method. %s current highestPhaseReached is -> %s for %s" % - (section, currentHighestPhaseReached, self._segment), 1) - - return currentHighestPhaseReached - - def _pastThreatsNotNone(self, threatGrid): - - # Will need to modify this to be both Wind and Surge once SS codes are added - previousAdvisories = self._textProduct._getPreviousAdvisories() - - # If there are NOT any advisories to process - no need to continue - if len(previousAdvisories) == 0: - return False - - # Look at all past advisories for this storm - for advisory in previousAdvisories: - - # We had a threat previously - if advisory["ZoneData"][self._segment][threatGrid] in ["Elevated", "Mod", "High", "Extreme"]: - return True - - return False - - def _setThreatStatementsProductParts(self, segmentDict, productSegment, tr): - - self._textProduct.debug_print("tr = %s %s" % - (self._textProduct._pp.pformat(tr), self._sectionHeaderName), 1) -# if tr is not None and self._stats._maxThreat is not None: - if tr is not None: - (planning, action, preparation) = self._getThreatStatements(productSegment, - self._sectionHeaderName, - self._stats._maxThreat, - tr) - - self._setProductPartValue(segmentDict, 'threatStatements', - [planning, action, preparation]) - else: - self._textProduct.debug_print("this is not a valid time range", 1) - return - - def _getThreatStatements(self, productSegment, sectionName, maxThreat, tr): -# import TCVDictionary -# threatStatements = TCVDictionary.ThreatStatements - - with open("/awips2/cave/etc/gfe/userPython/utilities/TCVDictionary.py", 'r') as pythonFile: - fileContents = pythonFile.read() - exec(fileContents) - - # ThreatStatements comes from TCVDictionary.py when it is exec'ed - threatStatements = ThreatStatements - - if tr == "recovery": - if "Surge" in sectionName: - maxThreat = self._textProduct._currentAdvisory['ZoneData'][self._segment]['highestHunkerDownSurgeThreat'] - elif "Wind" in sectionName: - maxThreat = self._textProduct._currentAdvisory['ZoneData'][self._segment]['highestHunkerDownWindThreat'] - - - self._textProduct.debug_print(40*"-", 1) - self._textProduct.debug_print("sectionName = %s, maxThreat = %s, tr = %s" % - (sectionName, maxThreat, self._textProduct._pp.pformat(tr)), 1) - -# if maxThreat is None: -# maxThreat = "None" - - statements = threatStatements[sectionName][maxThreat][tr] - planning = statements["planning"] - preparation = statements["preparation"] - action = statements["action"] - - return (planning, preparation, action) - - def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._maxThreat is not None: - summary = self._getPotentialImpactsSummaryText(self._stats._maxThreat) - self._setProductPartValue(segmentDict, 'potentialImpactsSummary', summary) - - def _getPotentialImpactsSummaryText(self, maxThreat): - if self.isThreatNoneForEntireStorm: - return "POTENTIAL IMPACTS: Little to None" - if self._tr is not None and self._sectionHeaderName in ["Wind", "Storm Surge"]: - if self._tr == "hunker down": - return "POTENTIAL IMPACTS: Unfolding" - elif self._tr == "recovery": - return "REALIZED IMPACTS: Being Assessed" - - if maxThreat == "Extreme": - impactLevel = "Devastating to Catastrophic" - elif maxThreat == "High": - impactLevel = "Extensive" - elif maxThreat == "Mod": - impactLevel = "Significant" - elif maxThreat == "Elevated": - impactLevel = "Limited" - else: - impactLevel = "Little to None" - - return "POTENTIAL IMPACTS: " + impactLevel - - def _potentialImpactsStatements(self, segmentDict, productSegmentGroup, productSegment): - self._textProduct.debug_print("segment = %s, elementName = %s, maxThreat = %s" % - (productSegment[0], self._sectionHeaderName, self._stats._maxThreat), 1) - if self._stats._maxThreat is not None: - statements = self._getPotentialImpactsStatements(productSegment, self._sectionHeaderName, self._stats._maxThreat) - self._setProductPartValue(segmentDict, 'potentialImpactsStatements', statements) - - def _getPotentialImpactsStatements(self, productSegment, elementName, maxThreat): - import TCVDictionary - potentialImpactStatements = TCVDictionary.PotentialImpactStatements - statements = potentialImpactStatements[elementName][maxThreat] - - import TCVAreaDictionary - tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary - - segment, vtecRecords = productSegment - - self._textProduct.debug_print("zone number = %s, elementName = %s, maxThreat = %s, tr = %s" % - (segment, elementName, maxThreat, self._tr), 1) - - if segment in tcv_AreaDictionary: - potentialImpactStatements = tcv_AreaDictionary[segment]["potentialImpactsStatements"] - - # Check for any overrides - try: - statements = potentialImpactStatements[elementName][maxThreat] - except KeyError: - pass - - if self.isThreatNoneForEntireStorm: - return statements - - if self._tr is not None: - specialStatements = self._specialImpactsStatements() - if self._tr in specialStatements.keys(): - if self._tr in ["recovery", "hunker down"] and self.isThreatNoneForEntireStorm: - return statements - else: - return specialStatements[self._tr] - - # If this is the "default" case - if self._tr == "default" and len(statements) > 0: - if elementName in ["Wind", "Storm Surge"]: - if statements[0].find("If realized, ") == -1: - statements[0] = "If realized, " + statements[0][0].lower() + statements[0][1:] - - return statements - - # Specific hazard sections can override this to provide special impacts statements - def _specialImpactsStatements(self): - return {} - - def _preparationStatement(self, severityString): - preparationStatement = "" - if severityString == "Devastating" or severityString == "Extensive impacts": - preparationStatement += "Aggressive " - - preparationStatement += "preparations should be made for chance of " - - if severityString == "Devastating": - preparationStatement += "devastating to catastrophic" - elif severityString == "Extensive impacts": - preparationStatement += "extensive" - elif severityString == "Significant": - preparationStatement += "significant" - elif severityString == "Limited": - preparationStatement += "limited" - - preparationStatement += " impacts based on latest threat" - - return preparationStatement - -class WindSection(SectionCommon): - def __init__(self, textProduct, segment, stats): - SectionCommon.__init__(self, textProduct, segment, "Wind") - self._sectionName = 'windSection[\'' + segment + '\']' - self._stats = stats - self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("WindThreat") - - def sectionParts(self, segment_vtecRecords_tuple): - parts = [ - 'sectionHeader', - 'forecastSubsection', - 'threatSubsection', - 'impactsSubsection', - ] - - return self._finalSectionParts(segment_vtecRecords_tuple, parts) - - def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._peakWind(subsectionDict, productSegmentGroup, productSegment) - self._windowTS(subsectionDict, productSegmentGroup, productSegment) - self._windowHU(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) - - def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") - elif self._stats._maxWind is None: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "No wind forecast") - else: - categoryLabel = None - categories = self._moderatedMaxWindMph_categories() - moderatedMaxWind = self._ktToMph(self._stats._maxWind, "Wind") - for key in categories.keys(): - minVal, maxVal = categories[key] - if minVal <= moderatedMaxWind and moderatedMaxWind < maxVal: - categoryLabel = key - break - - forecastText = "LATEST LOCAL FORECAST: " - if categoryLabel is not None: - forecastText += "Equivalent " + categoryLabel + " force wind" - else: - segment, vtecRecords = productSegment - numRecords = len(vtecRecords) - possibleHazardsFound = False - - for i in range(numRecords): - vtecRecord = vtecRecords[i] - if (vtecRecord["phensig"] in ["HU.A", "HU.W", "TR.A", "TR.W"] or \ - self._stats._windowTS is not None) and \ - vtecRecord["act"] != "CAN": - forecastText += "Tropical storm force winds remain possible" - possibleHazardsFound = True - break - if not possibleHazardsFound: - forecastText += "Below tropical storm force wind" - - self._setProductPartValue(segmentDict, 'latestForecastSummary', forecastText) - - def _peakWind(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._maxWind is not None: - windText = "Peak Wind Forecast: " - moderatedMaxWind = self._ktToMph(self._stats._maxWind, "Wind") - if moderatedMaxWind >= 74: - maxRange = 20 - elif moderatedMaxWind >= 58: - maxRange = 15 - elif moderatedMaxWind >= 20: - maxRange = 10 - else: - maxRange = 5 - - windText += str(int(moderatedMaxWind - maxRange)) + "-" + str(int(moderatedMaxWind)) + " mph" - if self._stats._maxGust is not None: - moderatedMaxWindGust = self._ktToMph(self._stats._maxGust, "WindGust") - -# # We want to round the wind gust to the nearest 5 kt -# moderatedMaxWindGust = \ -# self._textProduct.round(moderatedMaxWindGust, "Nearest", 5) - - windText += " with gusts to " + str(int(moderatedMaxWindGust)) + " mph" - - self._setProductPartValue(segmentDict, 'peakWind', windText) - - def _windowTS(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._windowTS is not None: - self._setProductPartValue(segmentDict, 'windowTS', self._stats._windowTS) - - def _windowHU(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._windowHU is not None: - self._setProductPartValue(segmentDict, 'windowHU', self._stats._windowHU) - - def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._threatTrend(subsectionDict, productSegmentGroup, productSegment) - self._threatStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) - - def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', - "Threat to Life and Property: Not available at this time. To be updated shortly.") - else: - SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) - - def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): - threatTrendValue = \ - self._getThreatTrendValue("Wind", - magnitudeIncreaseThreshold=self._textProduct.mphToKt(15)) - - if threatTrendValue is not None: - # Convert the threat trend to a sentence - threatTrendSentence = \ - self._getThreatTrendSentence("wind", threatTrendValue) - - self._setProductPartValue(segmentDict, 'threatTrend', - threatTrendSentence) - - def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): - self._tr = self._calculateThreatStatementTr(self._stats._onset34Hour, - self._stats._end34Hour, "Wind") - self._textProduct.debug_print("in _threatStatements tr = %s" % - (self._textProduct._pp.pformat(self._tr)), 1) - - if not hasattr(self._textProduct, "_windThreatStatementsTr"): - self._textProduct._windThreatStatementsTr = dict() - - self._textProduct._windThreatStatementsTr[self._segment] = self._tr - - self._setThreatStatementsProductParts(segmentDict, productSegment, - self._tr) - - def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) - - def _specialImpactsStatements(self): - return {"hunker down": ["Potential impacts from the main wind event are unfolding.", - # "The extent of realized impacts will depend on the actual strength, duration, and exposure of the wind as experienced at particular locations.", - ], - "recovery": ["Little to no additional wind impacts expected. Community officials are now assessing the extent of actual wind impacts accordingly.", - ], - } - - def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'potentialImpactsSummary', - "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") - else: - SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) - - ### Supporting functions - def _moderatedMaxWindMph_categories(self): - # Dictionary representing wind thresholds in kts - # for category 1, 2, 3, 4 or 5 hurricanes. - return { - 'Cat 5 Hurricane': (157, 999), - 'Cat 4 Hurricane': (130, 157), - 'Cat 3 Hurricane': (111, 130), - 'Cat 2 Hurricane': ( 96, 111), - 'Cat 1 Hurricane': ( 74, 96), - 'Strong Tropical Storm': ( 58, 73), - 'Tropical Storm': ( 39, 58), - } - - def _ktToMph(self, value, element): - newVal = self._textProduct.ktToMph(value) - newVal = self._textProduct.round(newVal, "Nearest", self._increment(element)) - return newVal - - # This is a very simple way to round values -- if we need - # something more sophisticated, we'll add it later. - def _increment(self, element): - dict = { - "Wind": 5, - "WindGust": 5, - "InundationMax": 0.1, - } - return dict.get(element, 0) - -class StormSurgeSection(SectionCommon): - def __init__(self, textProduct, segment, stats): - SectionCommon.__init__(self, textProduct, segment, "Storm Surge") - self._sectionName = 'stormSurgeSection[\'' + segment + '\']' - self._stats = stats - self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("StormSurgeThreat") - - def sectionParts(self, segment_vtecRecords_tuple): - parts = [ - 'sectionHeader', - 'forecastSubsection', - 'threatSubsection', - 'impactsSubsection', - ] - - return self._finalSectionParts(segment_vtecRecords_tuple, parts) - - def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._PopulateSurge: - self._peakSurge(subsectionDict, productSegmentGroup, productSegment) - self._surgeWindow(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) - - def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._PopulateSurge: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") - - elif "None" in self._stats._windowSurge or \ - self._stats._inundationMax is None or \ - self._stats._inundationMax <= 1: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "No storm surge inundation forecast") - else: - max = self._stats._inundationMax - summary = "LATEST LOCAL FORECAST: " - - if 1 < max and max < 4: - summary += "Localized" - elif 4 <= max and max < 12: - summary += "Life-threatening" - else: - summary += "Life-threatening and historic" - - self._setProductPartValue(segmentDict, 'latestForecastSummary', - summary + " storm surge possible") - - def _peakSurge(self, segmentDict, productSegmentGroup, productSegment): - self._textProduct.debug_print("_peakSurge _inundationMax = %s" % (self._stats._inundationMax), 1) - - # DR 17727: To make the output consistent, max threat should be calculated here - self._stats._maxThreat = "None" - - if self._stats._inundationMax is not None and self._stats._inundationMax > 1: - max = self._stats._inundationMax - if max > 10: - maxRange = 4 - self._stats._maxThreat = "Extreme" - elif max > 6: - maxRange = 3 - if max > 9: - self._stats._maxThreat = "Extreme" - else: - self._stats._maxThreat = "High" - elif max >= 3: - maxRange = 2 - if max > 3: - self._stats._maxThreat = "Mod" - else: - self._stats._maxThreat = "Elevated" - else: - maxRange = None - if max > 1: - self._stats._maxThreat = "Elevated" - - self._textProduct.debug_print("_peakSurge maxRange = %s" % (maxRange), 1) - self._textProduct.debug_print("_peakSurge _maxThreat = %s" % (self._stats._maxThreat), 1) - - # Save off the surge threat to the advisory - self._textProduct._currentAdvisory['ZoneData'][self._segment]["StormSurgeThreat"] = self._stats._maxThreat - - if maxRange is not None: - words = str(int(max - maxRange)) + "-" + str(int(max)) + " feet above ground" - elif max > 0: - - # We were getting really weird values of peak surge - # (e.g. "up to 1.70000004768 feet"). This fix will round up - # to the nearest integer value -# words = "up to " + str(max) + " feet above ground" - words = "up to " + str(int(max + 0.5)) + " feet above ground" - else: - words = "" - - if len(words) > 0: - self._setProductPartValue(segmentDict, 'peakSurge', - "Peak Storm Surge Inundation: The potential for " + words + " somewhere within surge prone areas") - else: - self._setProductPartValue(segmentDict, 'peakSurge', - "Peak Storm Surge Inundation: The potential for little to no storm surge inundation") - - def _surgeWindow(self, segmentDict, productSegmentGroup, productSegment): - if "None" not in self._stats._windowSurge: - self._setProductPartValue(segmentDict, 'surgeWindow', self._stats._windowSurge) - - def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._PopulateSurge: - self._threatTrend(subsectionDict, productSegmentGroup, productSegment) - self._threatStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) - - def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._PopulateSurge: - self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', - "Threat to Life and Property: Not available at this time. To be updated shortly.") - else: - SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) - - def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): - threatTrendValue = self._getThreatTrendValue("StormSurge", magnitudeIncreaseThreshold=4) - - if threatTrendValue is not None: - # Convert the threat trend to a sentence - threatTrendSentence = \ - self._getThreatTrendSentence("storm surge", threatTrendValue) - - self._setProductPartValue(segmentDict, 'threatTrend', - threatTrendSentence) - - def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): - self._textProduct.debug_print("Surge Threat Statements", 1) - self._tr = self._calculateThreatStatementTr(self._stats._onsetSurgeHour, - self._stats._endSurgeHour, "Surge") - - self._setThreatStatementsProductParts(segmentDict, productSegment, - self._tr) - - def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._PopulateSurge: - self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) - - def _specialImpactsStatements(self): - return {"hunker down": ["Potential impacts from the main surge event are unfolding.", - # "The extent of realized impacts will depend on the actual height of storm surge moving onshore and the resulting depth of coastal flooding as experienced at particular locations.", - ], - "recovery": ["Little to no additional surge impacts expected. Community officials are now assessing the extent of actual surge impacts accordingly.", - ], - } - - def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._PopulateSurge: - self._setProductPartValue(segmentDict, 'potentialImpactsSummary', - "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") - else: - SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) - - -class FloodingRainSection(SectionCommon): - def __init__(self, textProduct, segment, stats): - SectionCommon.__init__(self, textProduct, segment, "Flooding Rain") - self._sectionName = 'floodingRainSection[\'' + segment + '\']' - self._stats = stats - self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("FloodingRainThreat") - - def sectionParts(self, segment_vtecRecords_tuple): - parts = [ - 'sectionHeader', - 'forecastSubsection', - 'threatSubsection', - 'impactsSubsection', - ] - - return self._finalSectionParts(segment_vtecRecords_tuple, parts) - - def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._peakRain(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) - - def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") - else: - summary = "" # was "No Flood Watch is in effect" - segment, vtecRecords = productSegment - - headlines, _ = self._textProduct._getAdditionalHazards() - headlineList = self._textProduct._checkHazard(headlines, - [("FA","A"),("FF","A")], - returnList = True) - - if len(headlineList) != 0: - # Extract the first flood headline out (there will only be 1 in effect at a time) - (key, areaList) = headlineList[0] - (headline, _, _, _) = key - - # Make sure it is for our zone - if self._segment in areaList: - summary = headline + " is in effect" - - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: " + summary) - - def _peakRain(self, segmentDict, productSegmentGroup, productSegment): - if self._stats._sumAccum is not None: - words = self._rainRange(int(self._stats._sumAccum + 0.5)) - - # If we have previous rainfall - if self._stats._prevAccum not in [0.0, None] and (int(self._stats._sumAccum + 0.5)) != 0: - words = "Additional " + words - self._setProductPartValue(segmentDict, 'peakRain', "Peak Rainfall Amounts: " + words) - - def _rainRange(self, sumAccum): - minAccum = 0 - maxAccum = 0 - - if sumAccum == 0 and self._stats._prevAccum not in [0.0, None]: - return "No additional significant rainfall forecast" - elif sumAccum == 0 and self._stats._prevAccum in [0.0, None]: - return "No significant rainfall forecast" - elif sumAccum == 1: - return "around 1 inch" - elif sumAccum == 2: - minAccum, maxAccum = (1, 3) - elif sumAccum == 3: - minAccum, maxAccum = (2, 4) - elif sumAccum in [4,5]: - minAccum, maxAccum = (3, 6) - elif sumAccum in [6,7]: - minAccum, maxAccum = (4, 8) - elif sumAccum in [8,9]: - minAccum, maxAccum = (6, 10) - elif sumAccum in [10,11]: - minAccum, maxAccum = (8, 12) - elif sumAccum in [12,13,14]: - minAccum, maxAccum = (10, 15) - elif sumAccum in [15,16,17]: - minAccum, maxAccum = (12, 18) - elif 17 < sumAccum and sumAccum < 25: - minAccum, maxAccum = (18, 24) - else: - return "More than two feet" - - return "%d-%d inches, with locally higher amounts" % (minAccum, maxAccum) - - def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._threatTrend(subsectionDict, productSegmentGroup, productSegment) - self._threatStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) - - def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', - "Threat to Life and Property: Not available at this time. To be updated shortly.") - else: - SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) - - def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): - threatTrendValue = self._getThreatTrendValue("FloodingRain", magnitudeIncreaseThreshold=4) - - if threatTrendValue is not None: - # Convert the threat trend to a sentence - threatTrendSentence = \ - self._getThreatTrendSentence("flooding rain", threatTrendValue) - - self._setProductPartValue(segmentDict, 'threatTrend', - threatTrendSentence) - - def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): - self._tr = self._textProduct._windThreatStatementsTr[self._segment] - - self._setThreatStatementsProductParts(segmentDict, productSegment, self._tr) - - def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) - - def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'potentialImpactsSummary', - "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") - else: - SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) - -class TornadoSection(SectionCommon): - def __init__(self, textProduct, segment, stats): - SectionCommon.__init__(self, textProduct, segment, "Tornado") - self._sectionName = 'tornadoSection[\'' + segment + '\']' - self._stats = stats - self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("TornadoThreat") - - def sectionParts(self, segment_vtecRecords_tuple): - parts = [ - 'sectionHeader', - 'forecastSubsection', - 'threatSubsection', - 'impactsSubsection', - ] - - return self._finalSectionParts(segment_vtecRecords_tuple, parts) - - def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._tornadoSituation(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) - - def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") - else: - summary = "" - segment, vtecRecords = productSegment - - headlines, _ = self._textProduct._getAdditionalHazards() - headlineList = self._textProduct._checkHazard(headlines, - [("TO","A")], - returnList = True) - if len(headlineList) != 0: - # Extract the first tornado headline out (there will only be 1 in effect at a time) - (key, areaList) = headlineList[0] - (headline, _, _, _) = key - - # Make sure it is for our zone - if self._segment in areaList: - summary = "Tornado Watch is in effect" - - self._setProductPartValue(segmentDict, 'latestForecastSummary', - "LATEST LOCAL FORECAST: " + summary) - - def _tornadoSituation(self, segmentDict, productSegmentGroup, productSegment): - - # Now add the bullet about tornado situation - if self._stats._maxThreat in ["Extreme", "High"]: - qualifier = "very favorable" - elif self._stats._maxThreat in ["Mod"]: - qualifier = "favorable" - elif self._stats._maxThreat in ["Elevated"]: - qualifier = "somewhat favorable" - else: - qualifier = "unfavorable" - - words = "Situation is %s for tornadoes" % (qualifier) - - self._setProductPartValue(segmentDict, 'tornadoSituation', words) - - - def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._threatTrend(subsectionDict, productSegmentGroup, productSegment) - self._threatStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) - - def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', - "Threat to Life and Property: Not available at this time. To be updated shortly.") - else: - SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) - - def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): - threatTrendValue = self._getThreatTrendValue("Tornado", - magnitudeIncreaseThreshold=None) - - if threatTrendValue is not None: - # Convert the threat trend to a sentence - threatTrendSentence = \ - self._getThreatTrendSentence("tornado", threatTrendValue) - - self._setProductPartValue(segmentDict, 'threatTrend', - threatTrendSentence) - - def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): - self._tr = self._textProduct._windThreatStatementsTr[self._segment] - - self._setThreatStatementsProductParts(segmentDict, productSegment, self._tr) - - def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): - subsectionDict = collections.OrderedDict() - self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) - - if self._textProduct._WSPGridsAvailable: - self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) - - if len(subsectionDict) > 0: - self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) - - def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): - if not self._textProduct._WSPGridsAvailable: - self._setProductPartValue(segmentDict, 'potentialImpactsSummary', - "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") - else: - SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) - - -############################################################### -### TCV Statistics Classes - -class SectionCommonStats(): - def __init__(self, textProduct, segment): - self._textProduct = textProduct - self._segment = segment - - self._initializeSegmentAdvisories() - - # The maximum threat level during the entire advisory - self._maxThreat = None - - - def _initializeSegmentAdvisories(self): - self._currentAdvisory = self._textProduct._currentAdvisory['ZoneData'][self._segment] - - self._previousAdvisory = None - self._textProduct.debug_print("textProduct._previousAdvisory = '%s'" % (self._textProduct._previousAdvisory)) - if self._textProduct._previousAdvisory is not None: - if self._textProduct._previousAdvisory['ZoneData'].has_key(self._segment): - self._previousAdvisory = self._textProduct._previousAdvisory['ZoneData'][self._segment] - - self._textProduct.debug_print("textProduct._previousPreviousAdvisory = '%s'" % \ - (self._textProduct._previousPreviousAdvisory)) - self._previousPreviousAdvisory = None - if self._textProduct._previousPreviousAdvisory is not None: - self._previousPreviousAdvisory = self._textProduct._previousPreviousAdvisory['ZoneData'][self._segment] - - def _updateThreatStats(self, tr, statDict, threatGridName): - self._textProduct.debug_print("In _updateThreatStats for %s" % (threatGridName), 1) - self._textProduct.debug_print("maxThreat = %s" % (self._maxThreat), 1) - - threatLevel = self._textProduct._getStatValue(statDict, threatGridName) - if threatLevel is not None: - threatLevels = self._textProduct._threatKeyOrder() - self._textProduct.debug_print("current threatLevel = %s" % (threatLevel), 1) - if self._maxThreat is None or \ - threatLevels.index(threatLevel) > threatLevels.index(self._maxThreat): - self._textProduct.debug_print("updating max threat to = %s" % (threatLevel), 1) - self._maxThreat = threatLevel - - def _calculateHourOffset(self, targetTime): - self._textProduct.debug_print("Calculating hours from issuance time for %s" - % (self._textProduct._pp.pformat(targetTime)), 1) - self._textProduct.debug_print("target unix time = %s" - % (self._textProduct._pp.pformat(targetTime.unixTime())), 1) - self._textProduct.debug_print("issuance unix time = %s" - % (self._textProduct._pp.pformat(self._textProduct._issueTime_secs)), 1) - - seconds = targetTime.unixTime() - self._textProduct._issueTime_secs - hour = int(round(seconds/60.0/60.0)) - self._textProduct.debug_print("hour offset = %s" % (hour), 1) - if hour < 0: - hour = 0 - - self._textProduct.debug_print("final hour offset = %s" % (hour), 1) - - return hour - -class WindSectionStats(SectionCommonStats): - def __init__(self, textProduct, segment, statList, timeRangeList): - SectionCommonStats.__init__(self, textProduct, segment) - # The maximum wind speed that occurs during the entire advisory. - self._maxWind = None - - # The maximum wind gust speed that occurs during the entire advisory. - self._maxGust = None - - # The number of hours since the issuance time when the wind first becomes >= 34 kts. - self._onset34Hour = None - - # The number of hours since the issuance time when the wind drops below 34 kts. - self._end34Hour = None - - # Text describing when tropical storm force winds (>= 34 kts) start and end. - self._windowTS = None - - # Text describing when hurricane force winds (>= 64 kts) start and end. - self._windowHU = None - - # Only gather stats if we have the wind speed probability grids available - if self._textProduct._WSPGridsAvailable: - self._textProduct.debug_print("#"*90) - self._textProduct.debug_print("Setting wind stats for %s" % (segment), 1) - - self._setStats(statList, timeRangeList) - self._textProduct.debug_print("#"*90) - - # pws34int and pws64int grids give you the probability of 34/64 kt winds - # occurring during the grid time range. The grids are 6 hours long so they - # give you a more specific starting or ending time which allows for better - # descriptions of when events start. - class PwsXXintStats(): - def __init__(self): - # The maximum value in pws34/64int grids across the entire advisory. - self.max = None - - # The number of hours since the issuance time when this maximum value first occurs. - self.onsetHour = None - - # pwsD34, pwsN34, pwsD64 and pwsN64 grids give you the probability of 34/64 - # kt winds occurring during the grid time range. They are 12 hour long day - # and night grids that match ZPF periods. They give you a ball park idea of - # when an event will start or end and if it's day or night time and then - # the pwsXXint grids can be used to narrow down the time frame. - class PwsTXXStats(): - def __init__(self): - # Depending on when the issuance time is, there may be a day or night - # grid that we need to drop at the beginning so that we start with the - # grid that occurs during our issuance time so that our windows are - # accurate. - - # We need to do special logic the first time around so record if this - # is the first run through the loop or not. - self.firstRun = True - - # Indicates if we need to possibly drop the first grid or not. - self.dropFirstGridType = None - - # Indicates if we actually did drop the first grid. Sometimes we will - # determine that we need to drop the grid if it exists but it doesn't - # end up existing so we don't actually drop anything in some cases. - self.droppedFirstGrid = False - - # Indicate the period (actually a 0-based index into a list of periods) - # that contains the first correct grid. - self.periodWithFirstCorrectGrid = None - - # The AbsTime of when the grids first met or exceeded the threshold. - self.onsetTime = None - - # The AbsTime of when the grids last met or exceeded the threshold. - self.endTime = None - - # Start and end hour information from the Wind grids. - class WindStats(): - def __init__(self): - # The number of hours since issuance time when the wind first gets >= 34/64 knots. - self.onsetHour = None - # The number of hours since issuance time when the wind is last >= 34/64 knots. - self.endHour = None - - # Information needed for creating the wind window text. - class WindowInfo(): - def __init__(self, eventType): - # The type (as a string) of the event this window is for (Tropical Storm or Hurricane). - self.eventType = eventType - # The number of hours since issuance time when the tropical storm or hurricane starts. - self.onsetHour = None - # The number of hours since issuance time when the tropical storm or hurricane ends. - self.endHour = None - # The resolution to use when determining the wording for the end time of the window. - self.endTimeResolution = None - # Determines if we should create window text for this event (did wind exceed threshold?) - self.shouldCreateWindowText = True - # The constructed window text. - self.windowText = None - - def _setStats(self, statList, timeRangeList): - pws34intStats = self.PwsXXintStats() - pws64intStats = self.PwsXXintStats() - pwsT34Stats = self.PwsTXXStats() - pwsT64Stats = self.PwsTXXStats() - wind34timeInfo = self.WindStats() - wind64timeInfo = self.WindStats() - prob34Onset = None - - for index in range(len(statList)): - tr, _ = timeRangeList[index] - statDict = statList[index] - - self._textProduct.debug_print("="*90, 1) - self._textProduct.debug_print("\n\ntr = %s" % (tr), 1) - - self._textProduct.debug_print("*"*90, 1) - currentPeriod = self._determineCurrentPeriod(tr) - - self._textProduct.debug_print("*"*90, 1) - self._updateStatsForPwsXXint(tr, statDict, "pws34int", pws34intStats) - self._textProduct.debug_print("-"*45, 1) - self._updateStatsForPwsXXint(tr, statDict, "pws64int", pws64intStats) - - self._textProduct.debug_print("*"*90, 1) - self._updateStatsForPwsTXX(tr, statDict, "pwsD34", "pwsN34", pwsT34Stats, currentPeriod) - self._textProduct.debug_print("-"*45, 1) - self._updateStatsForPwsTXX(tr, statDict, "pwsD64", "pwsN64", pwsT64Stats, currentPeriod) - - # Calculate an additional probabilistic onset hour for scenarios where we weren't - # able to calculate the onset the usual way. This is only done for tropical - # storms to help determine the correct TR (check plans, etc.) - if prob34Onset is None and pwsT34Stats.onsetTime is not None: - self._textProduct.debug_print("*"*90, 1) - self._textProduct.debug_print("Found pwsD/N34 onset time, calculating prob34Onset", 1) - prob34Onset = self._calculateProbOnset(timeRangeList, statList, index, "pws34int") - - self._textProduct.debug_print("*"*90, 1) - self._updateStatsForWind(tr, statDict, wind34timeInfo, speed=34) - self._textProduct.debug_print("-"*45, 1) - self._updateStatsForWind(tr, statDict, wind64timeInfo, speed=64) - - self._textProduct.debug_print("*"*90, 1) - self._updateMaxWindGust(statDict) - - self._textProduct.debug_print("*"*90, 1) - self._updateThreatStats(tr, statDict, "WindThreat") - - self._textProduct.debug_print("="*90, 1) - - #Tropical Storm - self._textProduct.debug_print("Tropical Storm Window:", 1) - tropicalStormWindow = self.WindowInfo("Tropical Storm") - tropicalStormWindow = self._computeWindOnsetAndEnd(tropicalStormWindow, - wind34timeInfo, - pws34intStats, - pwsT34Stats, - prob34Onset) - tropicalStormWindow = self._createWindowText(tropicalStormWindow) - # The tropical storm onset and end hours will be used for calculating threat statements - self._onset34Hour = tropicalStormWindow.onsetHour - self._end34Hour = tropicalStormWindow.endHour - self._windowTS = tropicalStormWindow.windowText - - #Hurricane - self._textProduct.debug_print("-"*45, 1) - self._textProduct.debug_print("Hurricane Window:", 1) - hurricaneWindow = self.WindowInfo("Hurricane") - hurricaneWindow = self._computeWindOnsetAndEnd(hurricaneWindow, - wind64timeInfo, - pws64intStats, - pwsT64Stats) - - # Make sure the hurricane window end time resolution is the same - # resolution used for tropical storms so that hurricanes don't appear - # to end after tropical storms - hurricaneWindow.endTimeResolution = tropicalStormWindow.endTimeResolution - - hurricaneWindow = self._createWindowText(hurricaneWindow) - self._windowHU = hurricaneWindow.windowText - - self._textProduct.debug_print("-"*45, 1) - self._currentAdvisory["WindThreat"] = self._maxThreat - self._currentAdvisory["WindForecast"] = self._maxWind - - self._textProduct.debug_print("+"*60, 1) - self._textProduct.debug_print("In WindSectionStats._setStats", 1) - self._textProduct.debug_print("pws34intStats.max = %s" % (pws34intStats.max), 1) - self._textProduct.debug_print("pws64intStats.max = %s" % (pws64intStats.max), 1) - self._textProduct.debug_print("pwsT34Stats.periodWithFirstCorrectGrid = %s" % (pwsT34Stats.periodWithFirstCorrectGrid), 1) - self._textProduct.debug_print("pwsT34Stats.endTime = '%s'" % (pwsT34Stats.endTime), 1) - self._textProduct.debug_print("pwsT64Stats.periodWithFirstCorrectGrid = %s" % (pwsT64Stats.periodWithFirstCorrectGrid), 1) - self._textProduct.debug_print("pwsT64Stats.endTime = '%s'" % (pwsT64Stats.endTime), 1) - self._textProduct.debug_print("self._maxWind = %s" % (self._maxWind), 1) - self._textProduct.debug_print("self._maxGust = %s" % (self._maxGust), 1) - self._textProduct.debug_print("self._maxThreat = %s" % (self._maxThreat), 1) - - def _determineCurrentPeriod(self, tr): - currentPeriod = None - for periodIndex, periodTr in enumerate(self._textProduct._periodList): - self._textProduct.debug_print("\n\nperiodIndex = %d periodList tr = %s" - % (periodIndex, repr(periodTr)), 1) - - if (periodIndex == 0) and (tr.startTime().unixTime() < periodTr.startTime().unixTime()): - # If the tr is before the first period, use the first period - currentPeriod = periodIndex - break - elif (periodIndex == len(self._textProduct._periodList) - 1) and \ - (tr.startTime().unixTime() >= periodTr.endTime().unixTime()): - # If the tr is after (or at the end of) the last period, use the last period - currentPeriod = periodIndex - break - elif periodTr.contains(tr.startTime()): - currentPeriod = periodIndex - break - - self._textProduct.debug_print("\n\ncurrentPeriod index = %s" % (currentPeriod), 1) - self._textProduct.debug_print("\n\ncurrentPeriod tr = %s" - % (self._textProduct._periodList[currentPeriod]), 1) - - return currentPeriod - - def _updateStatsForPwsXXint(self, tr, statDict, gridName, pwsXXintStats): - pwsXXint = self._textProduct._getStatValue(statDict, gridName, "Max") - - self._textProduct.debug_print("Wind Window Debug: pwsXXintStats gridName = %s" % (gridName), 1) - self._textProduct.debug_print("Wind Window Debug: pwsXXintStats pwsXXint = %s" % (pwsXXint), 1) - - if pwsXXint is not None: - if pwsXXintStats.max is None or pwsXXint > pwsXXintStats.max: - pwsXXintStats.max = pwsXXint - pwsXXintStats.onsetHour = self._calculateHourOffset(tr.startTime()) - - self._textProduct.debug_print("Wind Window Debug: pwsXXintStats Found a new max value!", 1) - self._textProduct.debug_print("Wind Window Debug: pwsXXintStats onsetHour = %s" % (pwsXXintStats.onsetHour), 1) - - def _updateStatsForPwsTXX(self, tr, statDict, dayGridName, nightGridName, pwsTXXStats, period): - if pwsTXXStats.firstRun: - self._textProduct.debug_print("first run for _updateStatsForPwsTXX!", 1) - self._textProduct.debug_print("grids: %s %s" % (dayGridName, nightGridName), 1) - pwsTXXStats.firstRun = False - localtime = time.localtime(self._textProduct._issueTime_secs) - self._textProduct.debug_print("localtime = %s" % (localtime), 1) - - if localtime.tm_hour >= 15: # 3PM to midnight - self._textProduct.debug_print("between 3PM and midnight!", 1) - pwsTXXStats.dropFirstGridType = "day" - self._textProduct.debug_print("need to drop the day grid(s) if they come first", 1) - elif localtime.tm_hour >= 3 and localtime.tm_hour < 12: # 3AM to noon - self._textProduct.debug_print("between 3AM and noon!", 1) - pwsTXXStats.dropFirstGridType = "night" - self._textProduct.debug_print("need to drop the night grid(s) if they come first", 1) - else: - self._textProduct.debug_print("not dropping any grids!", 1) - - - pwsDXX = self._textProduct._getStatValue(statDict, dayGridName, "Max") - pwsNXX = self._textProduct._getStatValue(statDict, nightGridName, "Max") - - maxPws = None - self._textProduct.debug_print("%s pwsDXX = %s pwsNXX = %s " % - (self._textProduct._pp.pformat(tr),pwsDXX, pwsNXX), 1) - - # Determine coversion factor to get DAY and NIGHT in UTC - utcHourOffset = self._calculateUTCandLocalHourOffset() - - # See if this hour a valid DAYtime hour - isValidDay = self._isValidDayTime(tr.startTime().hour, - self._textProduct.DAY() + utcHourOffset, - self._textProduct.NIGHT() + utcHourOffset) - - # If we have pwsD data, and this is a time period it applies to - if pwsDXX is not None and isValidDay: - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats DAY", 1) - - if pwsTXXStats.dropFirstGridType == "day": - self._textProduct.debug_print("Wind Window Debug: dropping a day grid", 1) - self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) - pwsTXXStats.droppedFirstGrid = True - return - elif pwsTXXStats.dropFirstGridType == "night": - # We dropped all the necessary grids now that we found a day grid so stop dropping - pwsTXXStats.dropFirstGridType = None - pwsTXXStats.periodWithFirstCorrectGrid = period - self._textProduct.debug_print("Wind Window Debug: found day grid; done dropping night grids", 1) - self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) - - maxPws = pwsDXX - - # If we have pwsN data, and this is a time period it applies to - elif pwsNXX is not None and not isValidDay: - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats NIGHT", 1) - - if pwsTXXStats.dropFirstGridType == "night": - self._textProduct.debug_print("Wind Window Debug: dropping a night grid", 1) - self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) - pwsTXXStats.droppedFirstGrid = True - return - elif pwsTXXStats.dropFirstGridType == "day": - # We dropped all the necessary grids now that we found a night grid so stop dropping - pwsTXXStats.dropFirstGridType = None - pwsTXXStats.periodWithFirstCorrectGrid = period - self._textProduct.debug_print("Wind Window Debug: found night grid; done dropping day grids", 1) - self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) - - maxPws = pwsNXX - - # These two statements will need to be reevaluated when this product is - # expanded to the Pacific basin (MHB - 02/03/2015) - elif pwsDXX is not None and tr.startTime().hour in [21, 0, 3]: - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats DAY ignored", 1) - - elif pwsNXX is not None and tr.startTime().hour in [9, 12, 15]: - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats NIGHT ignored", 1) - - threshold34index = 0 - threshold64index = 1 - if maxPws is not None: - # Don't shift if the period with the first correct grid is period 0 - if pwsTXXStats.droppedFirstGrid and pwsTXXStats.periodWithFirstCorrectGrid != 0: - period = period - 1 # We dropped the first grid so we are off-by-one - self._textProduct.debug_print("shifting period back 1...new period = %s" % - (period), 1) - - # Just set the first correct period to period zero, if it hasn't - # been set yet, so the missing grid check will not fail - if pwsTXXStats.periodWithFirstCorrectGrid is None: - pwsTXXStats.periodWithFirstCorrectGrid = 0 - - if "64" in dayGridName: - index = threshold64index - else: #if "34" - index = threshold34index - - threshold = None - thresholds = self._textProduct.windSpdProb_thresholds(threshold, threshold) - if period == 0: - (thresholdLow, thresholdHigh) = thresholds[period][index] - threshold = thresholdLow - else: - threshold = thresholds[period][index] - self._textProduct.debug_print("Probability threshold for period %s = %s" - % (period, threshold), 1) - - if maxPws > threshold: - if pwsTXXStats.onsetTime is None: - pwsTXXStats.onsetTime = tr.startTime() - - trEndTime = tr.endTime() - periodEndTime = self._textProduct._periodList[period].endTime() - - # Don't go past the end of the period - if trEndTime <= periodEndTime: - pwsTXXStats.endTime = trEndTime - else: - pwsTXXStats.endTime = periodEndTime - - self._textProduct.debug_print("Wind Window Debug: probability threshold = %s (period index %s)" % (threshold, period), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats dayGridName = %s" % (dayGridName), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats nightGridName = %s" % (nightGridName), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats original tr = %s" % (self._textProduct._pp.pformat(tr)), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats maxPws = %s" %(self._textProduct._pp.pformat(maxPws)), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats onsetTime = %s" % (self._textProduct._pp.pformat(pwsTXXStats.onsetTime)), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats endTime = %s" % (self._textProduct._pp.pformat(pwsTXXStats.endTime)), 1) - self._textProduct.debug_print("Wind Window Debug: period tr = %s" % (self._textProduct._pp.pformat(self._textProduct._periodList[period])), 1) - - def _updateStatsForWind(self, tr, statDict, timeInfo, speed): - self._textProduct.debug_print("Wind Window Debug: In _updateStatsForWind", 1) - self._textProduct.debug_print("Wind Window Debug: timeInfo tr = %s" % (self._textProduct._pp.pformat(tr)), 1) - self._textProduct.debug_print("Wind Window Debug: timeInfo speed threshold = %s" % (speed), 1) - self._textProduct.debug_print("Wind Window Debug: timeInfo maxWind = %s" % (self._maxWind), 1) - - wind = self._textProduct._getStatValue(statDict, "Wind", "Max", self._textProduct.VECTOR()) - self._textProduct.debug_print("Wind Window Debug: current wind value = %s" % (wind), 1) - - if wind is not None: - if self._maxWind is None or wind > self._maxWind: - self._textProduct.debug_print("Wind Window Debug: Found new max wind value!", 1) - self._maxWind = wind - - if wind >= speed: - self._textProduct.debug_print("Wind Window Debug: current wind >= speed!", 1) - - if timeInfo.onsetHour is None: - timeInfo.onsetHour = self._calculateHourOffset(tr.startTime()) - - self._textProduct.debug_print("Wind Window Debug: onsetHour was None", 1) - self._textProduct.debug_print("Wind Window Debug: timeInfo onsetHour = %s" % (timeInfo.onsetHour), 1) - - # Always update the end time (it's the last time we exceeded the speed) - timeInfo.endHour = self._calculateHourOffset(tr.endTime()) - self._textProduct.debug_print("Wind Window Debug: timeInfo endHour = %s" % (timeInfo.endHour), 1) - - def _updateMaxWindGust(self, statDict): - windGust = self._textProduct._getStatValue(statDict, "WindGust", "Max") - self._textProduct.debug_print("Wind Window Debug: current windGust value = %s" % (windGust), 1) - - if windGust is not None: - if self._maxGust is None or windGust > self._maxGust: - self._textProduct.debug_print("Wind Window Debug: Found new max windGust value!", 1) - self._maxGust = windGust - - def _calculateProbOnset(self, timeRangeList, statList, index, pwsXXintGridName): - self._textProduct.debug_print("Wind Window Debug: in _calculateProbOnset", 1) - - # Calculate corresponding maximum intersecting pwsXXint tr - maxPwsXXintTr = self._calculateMaxPwsXXintTr(timeRangeList, statList, index, - pwsXXintGridName) - self._textProduct.debug_print("Wind Window Debug: maxPwsXXintTr = %s" % (maxPwsXXintTr), 1) - - # Calculate hours since issuance time to start time - probOnset = self._calculateHourOffset(maxPwsXXintTr.startTime()) - self._textProduct.debug_print("Wind Window Debug: probOnset = %s" % (probOnset), 1) - - return probOnset - - def _calculateMaxPwsXXintTr(self, timeRangeList, statList, index, gridName): - self._textProduct.debug_print("Wind Window Debug: gridName = %s" % (gridName), 1) - - # The current tr is always the first pwsXXint grid that intersects the onset pwsTXX grid - currTr, _ = timeRangeList[index] - currStatDict = statList[index] - currPwsXXint = self._textProduct._getStatValue(currStatDict, gridName, "Max") - self._textProduct.debug_print("Wind Window Debug: currTr = %s" % (currTr), 1) - self._textProduct.debug_print("Wind Window Debug: currPwsXXint = %s" % (currPwsXXint), 1) - - # Now try to find the next intersecting pwsXXint grid. - # pwsXXint grids are 6-hours long with times: 00-06, 06-12, 12-18, 18-00 GMT - if 0 <= currTr.startTime().hour < 6: - nextTrStartHour = 6 - elif 6 <= currTr.startTime().hour < 12: - nextTrStartHour = 12 - elif 12 <= currTr.startTime().hour < 18: - nextTrStartHour = 18 - else: - nextTrStartHour = 0 - - nextTr = None - nextPwsXXint = None - - for nextIndex in range(index + 1, len(statList)): - nextTr, _ = timeRangeList[nextIndex] - if nextTr.startTime().hour != nextTrStartHour: - continue - - nextStatDict = statList[nextIndex] - nextPwsXXint = self._textProduct._getStatValue(nextStatDict, gridName, "Max") - self._textProduct.debug_print("Wind Window Debug: nextTr = %s" % (nextTr), 1) - self._textProduct.debug_print("Wind Window Debug: nextPwsXXint = %s" % (nextPwsXXint), 1) - - if (nextPwsXXint is None) or (currPwsXXint >= nextPwsXXint): - return currTr - else: - return nextTr - - def _computeWindOnsetAndEnd(self, windowInfo, windTimeInfo, pwsXXintStats, pwsTXXStats, probOnset=None): - self._textProduct.debug_print("Wind Window Debug: In _computeWindOnsetAndEnd", 1) - self._textProduct.debug_print("Wind Window Debug: windTimeInfo.onsetHour = %s" % (windTimeInfo.onsetHour), 1) - self._textProduct.debug_print("Wind Window Debug: pwsXXintStats.onsetHour = %s" % (pwsXXintStats.onsetHour), 1) - self._textProduct.debug_print("Wind Window Debug: windTimeInfo.endHour = %s" % (windTimeInfo.endHour), 1) - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats.endTime = %s" % (pwsTXXStats.endTime), 1) - if pwsTXXStats.endTime is not None: - self._textProduct.debug_print("Wind Window Debug: pwsTXXStats end hour = %s" % (self._calculateHourOffset(pwsTXXStats.endTime)), 1) - self._textProduct.debug_print("Wind Window Debug: probOnset = %s" % (probOnset), 1) - - if windTimeInfo.onsetHour is not None: - if windTimeInfo.onsetHour < 6: - self._textProduct.debug_print("onsetHour for wind is < 6, using that as window onset hour", 1) - windowInfo.onsetHour = windTimeInfo.onsetHour - self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) - elif pwsXXintStats.onsetHour is not None: - self._textProduct.debug_print("onsetHour for pwsXXintStats is not None", 1) - self._textProduct.debug_print("using min onset hour betweeen wind and pwsXXintStats", 1) - windowInfo.onsetHour = min(windTimeInfo.onsetHour, pwsXXintStats.onsetHour) - self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) - else: - self._textProduct.debug_print("ERROR: onsetHour for pwsXXintStats is None. Check the grids.", 1) - return windowInfo - else: - self._textProduct.debug_print("windTimeInfo.onsetHour was None, using probOnset (%s) instead" - % probOnset, 1) - windowInfo.onsetHour = probOnset - self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) - - self._textProduct.debug_print("Since wind threshold not exceeded, will not create window text", 1) - windowInfo.shouldCreateWindowText = False - - if windowInfo.onsetHour is None: - # We won't have a timing window - self._textProduct.debug_print("onsetHour for wind is None", 1) - return windowInfo - - windEndHourExists = windTimeInfo.endHour is not None - windEndHourOutOfRange = windTimeInfo.endHour > 114 or windTimeInfo.endHour < 6 - pwsTXXEndTimeExists = pwsTXXStats.endTime is not None - - if (not windEndHourExists or windEndHourOutOfRange) or \ - (windEndHourExists and not pwsTXXEndTimeExists): - self._textProduct.debug_print("using Wind end hour for the window wind hour", 1) - self._textProduct.debug_print("\twind end hour exists? %s" % windEndHourExists, 1) - self._textProduct.debug_print("\twind end hour out of range? %s" % windEndHourOutOfRange, 1) - self._textProduct.debug_print("\tpwsTXX end time exists? %s" % pwsTXXEndTimeExists, 1) - windowInfo.endHour = windTimeInfo.endHour - self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) - elif pwsTXXEndTimeExists: - self._textProduct.debug_print("endTime for pwsTXXStats is not None", 1) - self._textProduct.debug_print("converting endTime to a configured time", 1) - configuredTime = self._getConfiguredTime(pwsTXXStats.endTime) - - probEndHour = self._calculateHourOffset(configuredTime) - - self._textProduct.debug_print("using rounded average betweeen wind end hour and configured pwsTXXStats end hour", 1) - windowInfo.endHour = int(round(self._textProduct.average(windTimeInfo.endHour, probEndHour))) - self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) - - return windowInfo - - def _createWindowText(self, windowInfo): - windowInfo.windowText = "Window for " + windowInfo.eventType + " force winds: " - self._textProduct.debug_print("In _createWindowText", 1) - self._textProduct.debug_print("window stats:", 1) - self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) - self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) - self._textProduct.debug_print("endTimeResolution = %s" % (windowInfo.endTimeResolution), 1) - self._textProduct.debug_print("shouldCreateWindowText = %s" % (windowInfo.shouldCreateWindowText), 1) - - if windowInfo.onsetHour is None or not windowInfo.shouldCreateWindowText: - # We do not want a statement of a non-existent window - windowInfo.windowText = None - else: - startTime = AbsTime(self._textProduct._issueTime_secs + windowInfo.onsetHour*60*60) - if windowInfo.endHour is not None: - endTime = AbsTime(self._textProduct._issueTime_secs + windowInfo.endHour*60*60) - windowPeriod = self._textProduct.makeTimeRange(startTime, endTime) - else: - windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1) - self._textProduct.debug_print("window period = %s" % (windowPeriod), 1) - - startTimeDescriptor = "" - if windowInfo.onsetHour >= 18: - startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 6) - elif 6 <= windowInfo.onsetHour < 18: - startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 3) - - if len(startTimeDescriptor) == 0 and windowInfo.endHour is None: - windowInfo.windowText = None - elif len(startTimeDescriptor) != 0 and windowInfo.endHour > 114: - windowInfo.windowText += "Begins " + startTimeDescriptor - else: - connector = "through " - endTimeDescriptor = "the next few hours" - - if windowInfo.endHour is not None: - if windowInfo.endTimeResolution is None: - if windowInfo.endHour >= 18: - windowInfo.endTimeResolution = 6 - elif 6 <= windowInfo.endHour < 18: - windowInfo.endTimeResolution = 3 - - if windowInfo.endTimeResolution is not None: - endTimeDescriptor = \ - self._textProduct._formatPeriod(windowPeriod, - useEndTime = True, - resolution = windowInfo.endTimeResolution) - - # If we are not talking about the next few hours - if endTimeDescriptor != "the next few hours": - connector = "until " - - if len(startTimeDescriptor) != 0: - connector = " " + connector - windowInfo.windowText += startTimeDescriptor + connector + endTimeDescriptor - - return windowInfo - - def _getConfiguredTime(self, originalTime): - self._textProduct.debug_print("original time = %s" % - (self._textProduct._pp.pformat(originalTime)), 1) - - unixTime = originalTime.unixTime() - localTime = time.localtime(unixTime) - self._textProduct.debug_print("original time in local time is %s" % - (self._textProduct._pp.pformat(localTime)), 1) - utcHourOffset = self._calculateUTCandLocalHourOffset() - self._textProduct.debug_print("utcHourOffset = %s" % (utcHourOffset), 1) - - # Remember these times are in local time zone, so hour 0 is - # midnight of the current calendar day. - if localTime.tm_hour > 6 and localTime.tm_hour <= 18: - # It's daytime, so use the end of the daytime period (18 = 6PM). - # NIGHT returns the start of the nighttime period which is the - # end of the daytime period. - configuredTime = absTimeYMD(originalTime.year, - originalTime.month, - originalTime.day, - self._textProduct.NIGHT()) - else: - # It's nighttime, so use the end of the nighttime period (6 = 6AM). - # DAY returns the start of the daytime period which is the end of - # the nighttime period. - configuredTime = absTimeYMD(originalTime.year, - originalTime.month, - originalTime.day, - self._textProduct.DAY()) - self._textProduct.debug_print("configuredTime (local time) = %s" % - (self._textProduct._pp.pformat(configuredTime)), 1) - - # The configured time is local time so we need to add an offset to make the entire date UTC - configuredUnixTime = configuredTime.unixTime() + (utcHourOffset * 3600) - configuredTime = AbsTime(configuredUnixTime) - self._textProduct.debug_print("configuredTime (UTC time) = %s" % - (self._textProduct._pp.pformat(configuredTime)), 1) - - return configuredTime - - def _calculateUTCandLocalHourOffset(self): - if time.daylight: - # This is daylight savings time so it needs to be handled differently - return int(time.altzone // 3600) - else: - utc = time.gmtime() - local = time.localtime() - - diffInSeconds = time.mktime(utc) - time.mktime(local) - return int(diffInSeconds // 3600) - - def _isValidDayTime(self, trStartHour, utcDay, utcNight): - - # Handle case where "night" starts at an "earlier" UTC hour than "day" - # (e.g. DAY = 18Z and NIGHT = 06Z) - if (utcNight < utcDay) and \ - (trStartHour >= utcDay or trStartHour < utcNight): - - # If we are toward the end of the daytime, and more than 1 hour - # from its end - if (trStartHour < utcNight) and (utcNight - trStartHour) > 1: - return True - elif trStartHour >= utcDay: - return True - - # Handle "normal" case where "day" starts before "night" in UTC - elif trStartHour >= utcDay and trStartHour < utcNight and \ - (utcNight - trStartHour) > 1: - return True - - # If we made it this far, this is not a valid "day" hour - return False - - -class StormSurgeSectionStats(SectionCommonStats): - def __init__(self, textProduct, segment, intersectStatList, timeRangeList): - SectionCommonStats.__init__(self, textProduct, segment) - self._inundationMax = None - self._onsetSurgeHour = None - self._endSurgeHour = None - self._windowSurge = None - - # Only gather stats if we are populating the surge section - if self._textProduct._PopulateSurge: - self._setStats(intersectStatList, timeRangeList) - - def _setStats(self, statList, timeRangeList): - windows = [] - phishStartTime = None - phishEndTime = None - - # If this is an inland area, just move on - if statList == "InlandArea": - return - - self._textProduct.debug_print("*"*100, 1) - self._textProduct.debug_print("Setting Surge Section stats for %s" % self._segment, 1) - - statDict = statList[0] - self._textProduct.debug_print("StatDict %s" % statDict, 1) - - self._inundationMax = self._textProduct._getStatValue(statDict, "InundationMax", "Max") - self._textProduct.debug_print("Raw self._inundationMax = %s" % (repr(self._inundationMax)), 1) - - if self._inundationMax is not None: - self._inundationMax = round(self._inundationMax) - self._textProduct.debug_print("self._inundationMax = %s" % (self._inundationMax), 1) - - self._textProduct.debug_print("length of statList = %s" % (len(statList)), 1) - for period in range(len(statList)): - tr, _ = timeRangeList[period] - statDict = statList[period] - self._textProduct.debug_print("-"*50, 1) - self._textProduct.debug_print("tr = %s" % (self._textProduct._pp.pformat(tr)), 1) - self._textProduct.debug_print("statDict = %s" % (self._textProduct._pp.pformat(statDict)), 1) - - - curPhish = self._textProduct._getStatValue(statDict, "InundationTiming", "Max") - self._textProduct.debug_print("curPhish = '%s'" % (str(curPhish)), 1) - self._textProduct.debug_print("phishStartTime = %s phishEndTime = %s" % - (str(phishStartTime), str(phishEndTime)), 1) - - if (curPhish is None) or (curPhish == 'None'): - self._textProduct.debug_print("Done: Reached end of grids (curPhish was None)", 1) - break - - - # For start time: - # If inundationMax > 3: - # Looking for 2 consecutive grids with a surge height > 1 - # Start will be the start time of the FIRST of the 2 consecutive grids - # If 1 < inundationMax <= 3: - # Looking for 1 grid with a surge height > 1 - # Start will be the start time of this grid - # - # For end time: - # Looking for 2 consecutive grids with a surge height <= 1 - # End will be the start time of the FIRST of the 2 consecutive grids - - # If we have another period after this one, we may need to look at the two - # consecutive periods for start and end time conditions - isLastPeriod = True - if period < len(statList) - 1: - isLastPeriod = False - nextTr, _ = timeRangeList[period+1] - nextStatDict = statList[period+1] - nextPhish = self._textProduct._getStatValue(nextStatDict, "InundationTiming", "Max") - - self._textProduct.debug_print("nextTr = %s" % (self._textProduct._pp.pformat(nextTr)), 1) - self._textProduct.debug_print("nextStatDict = %s" % (self._textProduct._pp.pformat(nextStatDict)), 1) - self._textProduct.debug_print("nextPhish = '%s'" % (str(nextPhish)), 1) - - # Set what the condition is for determining the start time - if (self._inundationMax > 3) and (not isLastPeriod): - startCondition = (curPhish > 1) and (nextPhish > 1) - self._textProduct.debug_print("startCondition looking at 2 periods", 1) - elif 1 < self._inundationMax <= 3: - startCondition = curPhish > 1 - self._textProduct.debug_print("startCondition looking at 1 period", 1) - else: - startCondition = False - self._textProduct.debug_print("no startCondition, done", 1) - break - - # Set what the condition is for determining the end time - if not isLastPeriod: - endCondition = (curPhish <= 1) and (nextPhish <= 1) - self._textProduct.debug_print("endCondition looking at 2 periods", 1) - else: - endCondition = False - self._textProduct.debug_print("this is the last period, no endCondition possible", 1) - - if startCondition and (phishStartTime is None): - phishStartTime = tr.startTime() - elif endCondition and (phishStartTime is not None) and (phishEndTime is None): - phishEndTime = tr.startTime() - - # We found a new window, save it, reset and look for any additional windows - self._textProduct.debug_print("Found a new window:", 1) - self._textProduct.debug_print("window phishStartTime = %s window phishEndTime = %s" % - (str(phishStartTime), str(phishEndTime)), 1) - - windows.append((phishStartTime, phishEndTime)) - phishStartTime = None - phishEndTime = None - - self._textProduct.debug_print("Looking for additional windows", 1) - - self._textProduct.debug_print("new phishStartTime = %s new phishEndTime = %s" % - (str(phishStartTime), str(phishEndTime)), 1) - - # Check for the case where a window doesn't end - if (phishStartTime is not None) and (phishEndTime is None): - self._textProduct.debug_print("Found a never-ending window:", 1) - self._textProduct.debug_print("window phishStartTime = %s window phishEndTime = %s" % - (str(phishStartTime), str(phishEndTime)), 1) - windows.append((phishStartTime, None)) - - # Create the final window - if len(windows) == 0: - phishStartTime = None - phishEndTime = None - else: - phishStartTime = windows[0][0] # Start time of first window - phishEndTime = windows[-1][1] # End time of last window - - self._textProduct.debug_print("Constructed the final window:", 1) - self._textProduct.debug_print("final phishStartTime = %s final phishEndTime = %s" % - (str(phishStartTime), str(phishEndTime)), 1) - - self._windowSurge = "Window of concern: " - - if phishStartTime is None: - if self._inundationMax is None or self._inundationMax <= 1: - self._windowSurge += "None" - else: - self._windowSurge += "Around high tide" - else: - self._onsetSurgeHour = self._calculateHourOffset(phishStartTime) - startTime = AbsTime(self._textProduct._issueTime_secs + self._onsetSurgeHour*60*60) - - self._textProduct.debug_print("surge startTime = %s self._onsetSurgeHour = %s " % - (self._textProduct._pp.pformat(startTime), self._onsetSurgeHour), 1) - if phishEndTime is not None: - self._endSurgeHour = self._calculateHourOffset(phishEndTime) - endTime = AbsTime(self._textProduct._issueTime_secs + self._endSurgeHour*60*60) - windowPeriod = self._textProduct.makeTimeRange(startTime, endTime) - else: - windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1) - self._textProduct.debug_print("surge window period = %s" % (windowPeriod), 1) - - startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod) - - if phishEndTime is None: - self._windowSurge += "Begins " + startTimeDescriptor - else: - endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, useEndTime = True) - - if self._onsetSurgeHour > 12: - self._windowSurge += startTimeDescriptor +\ - " until " +\ - endTimeDescriptor - else: - self._windowSurge += "through " + endTimeDescriptor - - if self._inundationMax is not None: - # inundationMax is already rounded but should be stored as an int and not a float - self._currentAdvisory["StormSurgeForecast"] = int(self._inundationMax) - - self._textProduct.debug_print("+"*60, 1) - self._textProduct.debug_print("Done in StormSurgeSectionStats._setStats:", 1) - self._textProduct.debug_print("self._inundationMax = '%s'" % - (self._inundationMax), 1) - self._textProduct.debug_print("self._onsetSurgeHour = '%s'" % - (self._onsetSurgeHour), 1) - self._textProduct.debug_print("self._endSurgeHour = '%s'" % - (self._endSurgeHour), 1) - self._textProduct.debug_print("self._windowSurge = '%s'" % - (self._windowSurge), 1) - self._textProduct.debug_print("self._maxThreat = '%s'" % - (self._maxThreat), 1) - self._textProduct.debug_print("+"*60, 1) - - -class FloodingRainSectionStats(SectionCommonStats): - def __init__(self, textProduct, segment, statList, timeRangeList, - extraRainfallStatList, previousRainfallTRlist): - SectionCommonStats.__init__(self, textProduct, segment) - self._sumAccum = None - self._prevAccum = 0.00 - - self._setStats(statList, timeRangeList, extraRainfallStatList, - previousRainfallTRlist) - - def _setStats(self, statList, timeRangeList, extraRainfallStatList, - previousRainfallTRlist): - for period in range(len(statList)): - tr, _ = timeRangeList[period] - statDict = statList[period] - - value = self._textProduct._getStatValue(statDict, "QPF") - - if value is not None: - if self._sumAccum is None: - self._sumAccum = value - else: - self._sumAccum += value - - self._updateThreatStats(tr, statDict, "FloodingRainThreat") - - self._currentAdvisory["FloodingRainThreat"] = self._maxThreat - if self._sumAccum is not None: - # Round so that we don't end up with stats like 4.03143835067749 - self._currentAdvisory["FloodingRainForecast"] = \ - self._textProduct.round(self._sumAccum, "Nearest", 0.5) - - # Now compute the previous rainfall - for period in range(len(extraRainfallStatList)): - tr, _ = timeRangeList[period] - prevStatDict = extraRainfallStatList[period] - - prevStats = self._textProduct._getStatValue(prevStatDict, "QPF") - self._textProduct.debug_print("prevStats = %s" % (prevStats), 1) - if prevStats is not None: - - if self._prevAccum is not None: - self._prevAccum += prevStats - else: - self._prevAccum = prevStats - else: - self._prevAccum = 0.00 - - if self._prevAccum is not None and self._prevAccum >= 0.10: - # Round so that we don't end up with stats like 4.03143835067749 - self._currentAdvisory["PreviousRainfall"] = \ - self._textProduct.round(self._prevAccum, "Nearest", 0.1) - else: - # Otherwise, do not consider this sgnificant rainfall - self._currentAdvisory["PreviousRainfall"] = 0.00 - - self._textProduct.debug_print("+"*60, 1) - self._textProduct.debug_print("In FloodingRainSectionStats._setStats", 1) - self._textProduct.debug_print("self._sumAccum = '%s'" % (self._sumAccum), 1) - self._textProduct.debug_print("self._maxThreat = '%s'" % (self._maxThreat), 1) - - -class TornadoSectionStats(SectionCommonStats): - def __init__(self, textProduct, segment, statList, timeRangeList): - SectionCommonStats.__init__(self, textProduct, segment) - - self._setStats(statList, timeRangeList) - - def _setStats(self, statList, timeRangeList): - for period in range(len(statList)): - tr, _ = timeRangeList[period] - statDict = statList[period] - - self._updateThreatStats(tr, statDict, "TornadoThreat") - - self._currentAdvisory["TornadoThreat"] = self._maxThreat - - self._textProduct.debug_print("+"*60, 1) - self._textProduct.debug_print("In TornadoSectionStats._setStats", 1) - self._textProduct.debug_print("self._maxThreat = '%s'" % (self._maxThreat), 1) - - -from xml.etree.ElementTree import Element, SubElement, tostring, dump -import xml.dom.minidom as minidom -import re -class XMLFormatter(): - def __init__(self, textProduct): - self._textProduct = textProduct - - def execute(self, productDict): - xml = Element('product') - self.dictionary(xml, productDict) - self._textProduct.debug_print("XML = %s" % (xml), 1) - self._textProduct.debug_print("XML dump = %s", dump(xml), 1) - prettyXML = minidom.parseString(tostring(xml)) - return prettyXML.toprettyxml() #tostring(xml) - - def xmlKeys(self): - return [ - 'wmoHeader', - 'TTAAii', - 'originatingOffice', - 'productID', - 'siteID', - 'fullStationID', - 'ddhhmmTime', - 'easMessage', - 'productHeader', - 'disclaimer', - 'cityState', - 'stormNumber', - 'productName', - 'stormName', - 'advisoryType', - 'advisoryNumber', - 'issuedByString', - 'issuanceTimeDate', - - 'segments', - 'ugcHeader', - 'vtecRecords', - 'areaList', - 'issuanceTimeDate', - 'summaryHeadlines', - 'headlinesInEffect', - 'headlineDefinitions', - 'locationsAffected', - 'fcstConfidence', - #section keys will be inserted here (see sectionKeys) - 'infoSection', - - 'endProduct', - ] - - def sectionKeys(self): - return [ - 'windSection', - 'sectionHeader', - 'forecastSubsection', - 'latestForecastSummary', - 'peakWind', - 'windowTS', - 'windowHU', - 'threatSubsection', - 'lifePropertyThreatSummary', - 'threatTrend', - 'threatStatements', - 'impactsSubsection', - 'potentialImpactsSummary', - 'potentialImpactsStatements', - - 'stormSurgeSection', - 'sectionHeader', - 'forecastSubsection', - 'latestForecastSummary', - 'peakSurge', - 'surgeWindow', - 'threatSubsection', - 'lifePropertyThreatSummary', - 'threatTrend', - 'threatStatements', - 'impactsSubsection', - 'potentialImpactsSummary', - 'potentialImpactsStatements', - - 'floodingRainSection', - 'sectionHeader', - 'forecastSubsection', - 'latestForecastSummary', - 'peakRain', - 'threatSubsection', - 'lifePropertyThreatSummary', - 'threatTrend', - 'threatStatements', - 'impactsSubsection', - 'potentialImpactsSummary', - 'potentialImpactsStatements', - - 'tornadoSection', - 'sectionHeader', - 'forecastSubsection', - 'latestForecastSummary', - 'tornadoSituation', - 'threatSubsection', - 'lifePropertyThreatSummary', - 'threatStatements', - 'impactsSubsection', - 'potentialImpactsSummary', - 'potentialImpactsStatements', - ] - - def getSectionKey(self, key): - sectionKey = re.sub("\['......'\]", "", key) - - if "._" in sectionKey: - sectionKey = re.sub(".*\._", "", sectionKey) - - self._textProduct.debug_print("sectionKey = %s" % (sectionKey), 1) - return sectionKey - - def dictionary(self, xml, productDict): - ''' - Returns the dictionary in XML format. - @param productDict: dictionary values - @return: Returns the dictionary in XML format. - ''' - if productDict is not None: - for key in productDict: - value = productDict[key] - editable = False -# if isinstance(key, KeyInfo): -# editable = key.isEditable() -# key = key.getName() - - if key not in self.xmlKeys(): - sectionKey = self.getSectionKey(key) - if sectionKey not in self.sectionKeys(): - self._textProduct.debug_print("skipping '%s' in XML" % (key), 1) - continue - else: - key = sectionKey - if isinstance(value, dict): - subElement = SubElement(xml,key) - self.dictionary(subElement, value) - elif isinstance(value, list): - if key == 'cityList': - subElement = SubElement(xml,'cityList') - if editable: - subElement.attrib['editable'] = 'true' - self.list(subElement, 'city', value) -# elif key == 'infoSection': -# subElement = SubElement(xml, key) -# legacyFormatter = LegacyFormatter(self._textProduct) -# legacyText = legacyFormatter.processInfoSection(value) -# legacyText = legacyText.encode('string-escape') -# subElement.text = legacyText -# if editable: -# subElement.attrib['editable'] = 'true' - else: - self.list(xml, key, value) - else: - subElement = SubElement(xml,key) - subElement.text = value - if editable: - subElement.attrib['editable'] = 'true' - - def list(self, xml, key, data): - ''' - Returns the list in XML format. - @param data: list of values - @return: Returns the list in XML format. - ''' - editable = False -# if isinstance(key, KeyInfo): -# editable = key.isEditable() -# key = key.getName() - if data is not None: - if 'info' in key and 'Section' in key: - subElement = SubElement(xml, key) - self._textProduct.debug_print("info key = '%s'" % (key), 1) - self._textProduct.debug_print("value = %s" % (data), 1) - if isinstance(data, list): - subkey = 'info' + 'Sub' + key[4:] - for value in data: - self.list(subElement, subkey, value) - else: - subElement.text = data - else: - for value in data: - - subElement = SubElement(xml, key) - if editable: - subElement.attrib['editable'] = 'true' - - if isinstance(value, dict): - self.dictionary(subElement, value) - elif isinstance(value, list): - if key == 'cityList': - subElement = SubElement(xml,'cityList') - if editable: - subElement.attrib['editable'] = 'true' - self.list(subElement, 'city', value) - else: - self.list(xml, key, value) - else: - subElement.text = value - - -class LegacyFormatter(): - def __init__(self, textProduct): - self._textProduct = textProduct - self.TAB = " "*self._textProduct._tabLength - self._tpc = HLSTCV_Common.TextProductCommon() - - def execute(self, productDict): - self.productDict = productDict - productParts = self._tpc.getVal(productDict, 'productParts', []) - text = self._processProductParts(productDict, productParts.get('partsList')) - return text - - def _processProductParts(self, productDict, productParts, skipParts=[]): - ''' - Adds the product parts to the product - @param productDict -- dictionary of information -- could be the product dictionary or a sub-part such as a segment - @param skipParts -- necessary to avoid repetition when calling this method recursively - @param productParts -- list of instances of the ProductPart class with information about how to format each product part - @return text -- product string - ''' - text = '' - self._textProduct.debug_print("productParts = %s" % (self._textProduct._pp.pformat(productParts)), 1) - for part in productParts: - valtype = type(part) - if valtype is str: - name = part - elif valtype is tuple: - name = part[0] - infoDicts = part[1] - newtext = self.processSubParts(productDict.get(name), infoDicts) - text += newtext - continue - elif valtype is list: - self._tpc.flush() - # TODO THIS SHOULD BE REMOVED AFTER THE REFACTOR OF HazardServicesProductGenerationHandler.JAVA - tup = (part[0], part[1]) - part = tup - name = part[0] - - - if name == 'wmoHeader': - text += self.processWmoHeader(productDict['wmoHeader']) + '\n' - elif name == 'easMessage': - text += productDict['easMessage'] + '\n' - elif name == 'productHeader': - text += self.processProductHeader(productDict['productHeader']) - elif name == 'vtecRecords': - for vtecString in productDict['vtecRecords']: - text += vtecString + '\n' - elif name == 'areaList': - text += self._textProduct.indentText(productDict['areaList'], '', '', - maxWidth=self._textProduct._lineLength) - elif name == 'issuanceTimeDate': - text += productDict['issuanceTimeDate'] + '\n\n' - elif name == 'summaryHeadlines': - text += self.processSummaryHeadlines(productDict['summaryHeadlines']) - elif name == 'locationsAffected': - text += self.processLocationsAffected(productDict['locationsAffected']) - elif 'sectionHeader' in name: - text += "* " + productDict[name].upper() + "\n" - elif 'Subsection' in name: - text += self.processSubsection(productDict[name]) - elif name == 'infoSection': - text += self.processInfoSection(productDict['infoSection']) - elif name in ['endProduct', 'endSection']: - text += '$$\n' - elif name == 'CR': - text += '\n' - elif name == 'doubleAmpersand': - text += '&&\n' - elif name not in self._noOpParts(): - textStr = productDict.get(name) - if textStr: - text += textStr + '\n' - - # Cleanup the case of the last segment which will wind up with two sets - # of '$$' - text = re.sub("\$\$\n+\$\$", "$$\n", text) - - # Return completed text - return text - - def _noOpParts(self): - ''' - These represent product parts that should be skipped when calling product part methods. - They will be handled automatically by the formatters. - ''' - return ["setup_segment"] #['CR', 'endProduct', 'endSegment', 'issuanceDateTime', 'doubleAmpersand'] - - def processWmoHeader(self, wmoHeader): - text = wmoHeader['TTAAii'] + ' ' + wmoHeader['fullStationID'] + ' ' + wmoHeader['ddhhmmTime'] + '\n' - text += wmoHeader['productID'] + wmoHeader['siteID'] + '\n' - return text - - def processProductHeader(self, headerDict): - text = headerDict['stormName'] + ' ' + headerDict['productName'] - - advisoryText = '' - if headerDict['advisoryType'] is not None and \ - headerDict['advisoryType'].lower() in ["intermediate", "special"]: - advisoryText = headerDict['advisoryType'] + ' ' - - if headerDict['advisoryNumber'] is not None: - advisoryText += 'Advisory Number ' + headerDict['advisoryNumber'] - - if len(advisoryText) > 0: - if len(text + "/" + advisoryText) > self._textProduct._lineLength: - text += '\n' - else: - text += '/' - - text += advisoryText + '\n' - else: - text += '\n' - - text += "National Weather Service " + headerDict['cityState'] + " " + headerDict['stormNumber'] + '\n' - text += headerDict['issuanceTimeDate'] + '\n\n' - - return text - - def processLocationsAffected(self, locationsAffectedList): - if len(locationsAffectedList) == 0: - return "" - - text = "* LOCATIONS AFFECTED\n" - for location in locationsAffectedList: - text += self.TAB + "- " + location + "\n" - return text + "\n" - - def processSubsection(self, subsectionOrderedDict): - text = "" - for partName in subsectionOrderedDict: - if "Summary" in partName: - firstIndentText = self.TAB + "- " - nextIndentText = self.TAB + " " - text += self._textProduct.indentText(subsectionOrderedDict[partName], - firstIndentText, - nextIndentText, - maxWidth = self._textProduct._lineLength) - else: - firstIndentText = self.TAB*2 + "- " - nextIndentText = self.TAB*2 + " " - if "threatStatements" in partName: - text += self.processThreatStatements(firstIndentText, - nextIndentText, - subsectionOrderedDict[partName]) - elif "potentialImpactsStatements" in partName: - text += self.processImpactsStatements(firstIndentText, - nextIndentText, - subsectionOrderedDict[partName]) - else: - text += self._textProduct.indentText(subsectionOrderedDict[partName], - firstIndentText, - nextIndentText, - maxWidth=self._textProduct._lineLength) - - return text + "\n" - - def processThreatStatements(self, firstIndentText, nextIndentText, threatStatements): - planning = threatStatements[0] - text = self._textProduct.indentText(planning, - firstIndentText, - nextIndentText, - maxWidth=self._textProduct._lineLength) - - preparation = threatStatements[1] - text += self._textProduct.indentText(preparation, - firstIndentText, - nextIndentText, - maxWidth=self._textProduct._lineLength) - - action = threatStatements[2] - text += self._textProduct.indentText(action, - firstIndentText, - nextIndentText, - maxWidth=self._textProduct._lineLength) - - return text - - def processImpactsStatements(self, firstIndentText, nextIndentText, statements): - text = "" - - for statement in statements: - text += self._textProduct.indentText(statement, - firstIndentText, - nextIndentText, - maxWidth=self._textProduct._lineLength) - - return text - - def processInfoSection(self, infoSection): - if len(infoSection) == 0: - return "" - - text = "* FOR MORE INFORMATION:\n" - text += self._buildInfoSection(infoSection, tabLevel=1) - return text + "\n$$\n\n" - - def _buildInfoSection(self, infoSection, tabLevel): - text = "" - for component in infoSection: - if type(component) is str: - text += self.TAB*tabLevel + "- " + component + "\n" - elif type(component) is list: - text += self._buildInfoSection(component, tabLevel+1) - return text - - def processSummaryHeadlines(self, summaryDict): - text = "" - for headline in summaryDict['headlinesInEffect']: - text += headline.upper() + "\n" - - text += "\n" - - for definition in summaryDict['headlineDefinitions']: - text += self._textProduct.indentText(definition, - maxWidth=self._textProduct._lineLength) \ - + "\n" - return text - - def processSubParts(self, subParts, infoDicts): - """ - Generates Legacy text from a list of subParts e.g. segments or sections - @param subParts: a list of dictionaries for each subPart - @param partsLists: a list of Product Parts for each segment - @return: Returns the legacy text of the subParts - """ - text = '' - for i in range(len(subParts)): - newtext = self._processProductParts(subParts[i], infoDicts[i].get('partsList')) - text += newtext - return text - +# +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# Version 2018.6.5-0 + +import GenericHazards +import JsonSupport +import LocalizationSupport +import time, types, copy, LogStream, collections +import ModuleAccessor +import math +import TimeRange +from com.raytheon.uf.common.dataplugin.gfe.db.objects import ParmID + + +from AbsTime import * +AWIPS_ENVIRON = "AWIPS2" + +import HLSTCV_Common + +class TextProduct(HLSTCV_Common.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition["displayName"] = "None" + Definition["outputFile"] = "{prddir}/TEXT/TCV.txt" + Definition["database"] = "Official" # Source database + Definition["mapNameForCombinations"] = "Zones_" + Definition["defaultEditAreas"] = "Combinations_TCV_" + Definition["showZoneCombiner"] = 1 # 1 to cause zone combiner to display + + Definition["productName"] = "Local Watch/Warning Statement" + + Definition["fullStationID" ] = "" + Definition["wmoID" ] = "" + Definition["wfoCityState" ] = "" + Definition["pil" ] = "" + Definition["textdbPil" ] = "" + Definition["awipsWANPil" ] = "" + Definition["site"] = "" + Definition["wfoCity"] = "" + + Definition["areaName"] = "" #optional area name for product + Definition["areaDictionary"] = "AreaDictionary" + Definition["language"] = "english" + Definition["lineLength"] = 71 #Maximum line length + Definition["tabLength"] = 4 + + Definition["purgeTime"] = 8 # Default Expiration in hours if + Definition["includeZoneNames"] = 1 # Zone names will be included in the area header + Definition["includeIssueTime"] = 0 # Issue Time will be included in the area header + Definition["easPhrase"] = \ + "URGENT - IMMEDIATE BROADCAST REQUESTED" # Optional EAS phrase to be include in product header + Definition["callToAction"] = 1 + Definition["hazardSamplingThreshold"] = (3, None) + + + Definition["threatPhrase"] = { + "Wind": { + "Extreme": "Potential for wind greater than 110 mph", + "High": "Potential for wind 74 to 110 mph", + "Moderate": "Potential for wind 58 to 73 mph", + "Elevated": "Potential for wind 39 to 57 mph", + "None": "Wind less than 39 mph" + }, + "Storm Surge": { + "Extreme": "Potential for storm surge flooding greater than 9 feet above ground", + "High": "Potential for storm surge flooding greater than 6 feet above ground", + "Moderate": "Potential for storm surge flooding greater than 3 feet above ground", + "Elevated": "Potential for storm surge flooding greater than 1 foot above ground", + "None": "Little to no storm surge flooding" + }, + "Flooding Rain": { + "Extreme": "Potential for extreme flooding rain", + "High": "Potential for major flooding rain", + "Moderate": "Potential for moderate flooding rain", + "Elevated": "Potential for localized flooding rain", + "None": "Little or no potential for flooding rain" + }, + "Tornado": { + "Extreme": "Potential for an outbreak of tornadoes", + "High": "Potential for many tornadoes", + "Moderate": "Potential for several tornadoes", + "Elevated": "Potential for a few tornadoes", + "None": "Tornadoes not expected" + } + } + + Definition["debug"] = { + #TextProduct + "__init__": 0, + "_inlandAreas": 0, + "_coastalAreas": 0, + "_cwa": 0, + "_productParts_TCV": 0, + "_segmentParts_TCV": 0, + "_analysisList": 0, + "_intersectAnalysisList": 0, + "_extraRainfallAnalysisList": 0, + "generateForecast": 0, + "_initializeVariables": 0, + "_performGridChecks": 0, + "_isCorrectNumGrids": 0, + "_checkContinuousDuration": 0, + "_noOpParts": 0, + "_easMessage": 0, + "_setup_segment": 0, + "_vtecRecords": 0, + "_areaList": 0, + "_issuanceTimeDate": 0, + "_summaryHeadlines": 0, + "_locationsAffected": 0, + "_fcstConfidence": 0, + "_infoSection": 0, + "_endSection": 0, + "_hazardDefinition": 0, + "_threatKeyOrder": 0, + "_sampleData": 0, + "_getStats": 0, + "_determineSegments": 0, + "_getRefinedHazardSegments": 0, + "_refineSegments": 0, + "_makeSegmentEditAreas": 0, + "_findSegment": 0, + "_getAllVTECRecords": 0, + "_getHazardsForHLS": 0, + "_convertToISO": 0, + "_convertToDatetime": 0, + "_initializeSegmentZoneData": 0, + "_archiveCurrentAdvisory": 0, + "_saveAdvisory": 0, + "_overview_list": 0, + "_displayGUI": 0, + + #HLSTCV_Common + "allowedHazards": 0, + "allowedHeadlines": 0, + "_initializeVariables": 0, + "moderated_dict": 0, + "_wmoHeader": 0, + "_productHeader": 0, + "_ugcHeader": 0, + "_processProductParts": 0, + "_createProductDictionary": 0, + "_initializeProductDictionary": 0, + "_formatProductDictionary": 0, + "_getStatValue": 0, + "_allAreas": 0, + "_groupSegments": 0, + "_getSegmentVTECRecordsTuples": 0, + "_computeIntersectAreas": 0, + "_initializeHazardsTable": 0, + "_getHazardsTable": 0, + "_ignoreActions": 0, + "_setVTECActiveTable": 0, + "_getVtecRecords": 0, + "_getAllowedHazardList": 0, + "_altFilterMethod": 0, + "_filterHazards": 0, + "_getAdditionalHazards": 0, + "_checkHazard": 0, + "_initializeTimeVariables": 0, + "_determineTimeRanges": 0, + "_createPeriodList": 0, + "_calculateStartTime": 0, + "_formatPeriod": 0, + "_getTimeDesc": 0, + "_getPartOfDay": 0, + "_initializeStormInformation": 0, + "_parseTCP": 0, + "_getStormTypeFromTCP": 0, + "_getStormNameFromTCP": 0, + "_getAdvisoryTypeFromTCP": 0, + "_getAdvisoryNumberStringFromTCP": 0, + "_getStormNumberStringFromTCP": 0, + "_getStormIDStringFromTCP": 0, + "_useTestTCP": 0, + "_testTCP": 0, + "_initializeAdvisories": 0, + "_synchronizeAdvisories": 0, + "_getLocalAdvisoryDirectoryPath": 0, + "_getStormAdvisoryNames": 0, + "_loadLastTwoAdvisories": 0, + "_loadAdvisory": 0, + "_getAdvisoryPath": 0, + "_getAdvisoryFilename": 0, + "_processVariableList": 0, + "_GUI_sizing_dict": 0, + "_GUI1_configDict": 0, + "_font_GUI_dict": 0, + + #Overview_Dialog + "body": 0, + "_makeButtons": 0, + "okCB": 0, + + #Common_Dialog + "getVarDict": 0, + "_makeRadioOrCheckList": 0, + "_makeEntry": 0, + "cancelCB": 0, + "_entryName": 0, + "_makeTuple": 0, + "_setVarDict": 0, + "status": 0, + "buttonbox": 0, + + #SectionCommon + "_setProductPartValue": 0, + "_finalSectionParts": 0, + "_sectionHeader": 0, + "_lifePropertyThreatSummary": 0, + "_getThreatTrendSentence": 0, + "_getThreatTrendValue": 0, + "_threatDifference": 0, + "_isThreatDecreasing": 0, + "_isThreatIncreasing": 0, + "_advisoryHasValidKey": 0, + "_isMagnitudeIncreasing": 0, + "_calculateThreatStatementTr": 0, + "_pastThreatsNotNone": 0, + "_setThreatStatementsProductParts": 0, + "_getThreatStatements": 0, + "_potentialImpactsSummary": 0, + "_getPotentialImpactsSummaryText": 0, + "_potentialImpactsStatements": 0, + "_getPotentialImpactsStatements": 0, + "_preparationStatement": 0, + + #Unique to each section, but common method name + "sectionParts": 0, + "_forecastSubsection": 0, + "_latestForecastSummary": 0, + "_threatSubsection": 0, + "_threatTrend": 0, + "_threatStatements": 0, + "_impactsSubsection": 0, + "_setStats": 0, + + #WindSection + "_peakWind": 0, + "_windowTS": 0, + "_windowHU": 0, + "_moderatedMaxWindMph_categories": 0, + "_ktToMph": 0, + "_increment": 0, + + #StormSurgeSection + "_peakSurge": 0, + "_surgeWindow": 0, + + #FloodingRainSection + "_peakRain": 0, + "_rainRange": 0, + + #TornadoSection + "_tornadoSituation": 0, + + #SectionCommonStats + "_initializeSegmentAdvisories": 0, + "_updateThreatStats": 0, + "_calculateHourOffset": 0, + + #WindSectionStats + "_determineCurrentPeriod": 0, + "_updateStatsForPwsXXint": 0, + "_updateStatsForPwsTXX": 0, + "_updateStatsForWind": 0, + "_updateMaxWindGust": 0, + "_calculateProbOnset": 0, + "_calculateMaxPwsXXintTr": 0, + "_computeWindOnsetAndEnd": 0, + "_createWindowText": 0, + "_getConfiguredTime": 0, + "_calculateUTCandLocalHourOffset": 0, + "_isValidDayTime": 0, + + #Unique to each formatter, but common method name + "execute": 0, + + #XMLFormatter + "xmlKeys": 0, + "sectionKeys": 0, + "getSectionKey": 0, + "dictionary": 0, + "list": 0, + + #LegacyFormatter + "_processProductParts": 0, + "processWmoHeader": 0, + "processProductHeader": 0, + "processLocationsAffected": 0, + "processSubsection": 0, + "processThreatStatements": 0, + "processImpactsStatements": 0, + "processInfoSection": 0, + "_buildInfoSection": 0, + "processSummaryHeadlines": 0, + "processSubParts": 0, + + #TextProductCommon + "setUp": 0, + "hazardTimeZones": 0, + "getExpireTime": 0, + "getHeadlinesAndSections": 0, + "formatUGCs": 0, + "getFormattedTime": 0, + "formatUGC_names": 0, + "formatNameString": 0, + "getVal": 0, + "formatDatetime": 0, + "flush": 0, + "makeUGCString": 0, + "checkLastArrow": 0, + } +# Definition["debug"] = 1 # turn on ALL debug messages + Definition["debug"] = 0 # turn off ALL debug messages + + + def __init__(self): + HLSTCV_Common.TextProduct.__init__(self) + + ##################################################################### + ##################################################################### + ### Organization of Formatter Code + + ############################################################### + ### MUST OVERRIDE DEFINITIONS !!! + ### _inlandAreas, _coastalAreas, _cwa + ############################################################### + + ############################################################### + ### TCV Product and Segment Parts Definition + ############################################################### + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + ############################################################### + + ############################################################### + # CODE + ############################################################### + ### High level flow of formatter + ### generateForecast, _initializeVariables, + ### _determineSegments, _determineTimeRanges, _sampleData, + ### _createProductDictionary, _formatProductDictionary, + ### _archiveCurrentAdvisory + ############################################################### + + ############################################################### + ### Product Parts Implementation + ############################################################### + + ############################################################### + ### Sampling and Statistics related methods + ############################################################### + + ############################################################### + ### Area, Zone and Segment related methods + ############################################################### + + ############################################################### + ### Hazards related methods + ############################################################### + + ############################################################### + ### Time related methods + ############################################################### + + ############################################################### + ### Advisory related methods + ############################################################### + + ############################################################### + ### GUI related methods + ############################################################### + + + ############################################################### + ### MUST OVERRIDE DEFINITIONS !!! + + def _inlandAreas(self): + return [ + #"FLZ052", "FLZ056", "FLZ057", "FLZ061", "FLZ043", + ] + + def _coastalAreas(self): + return [ + #"FLZ039", "FLZ042", "FLZ048", "FLZ049", "FLZ050", "FLZ051", "FLZ055", "FLZ060", + #"FLZ062", + ] + + def _cwa(self): + return "" #"MFL" + + ############################################################### + ### TCV Product and Segment Parts Definition + + def _productParts_TCV(self, segment_vtecRecords_tuples): + segmentParts = [] + for segment_vtecRecords_tuple in segment_vtecRecords_tuples: + segmentParts.append(self._segmentParts_TCV(segment_vtecRecords_tuple)) + return { + 'partsList': [ + 'wmoHeader', + 'easMessage', + 'productHeader', + ('segments', segmentParts), + ] + } + + def _segmentParts_TCV(self, segment_vtecRecords_tuple): + segment, _ = segment_vtecRecords_tuple + + windSection = 'windSection[\'' + segment + '\']' + stormSurgeSection = 'stormSurgeSection[\'' + segment + '\']' + floodingRainSection = 'floodingRainSection[\'' + segment + '\']' + tornadoSection = 'tornadoSection[\'' + segment + '\']' + + partsList = [ + 'setup_segment', + 'ugcHeader', + 'vtecRecords', + 'areaList', + 'issuanceTimeDate', + 'summaryHeadlines', + 'locationsAffected', + 'fcstConfidence', + (windSection, self._windSection[segment].sectionParts(segment_vtecRecords_tuple)), + ] + + # The storm surge section should never be inserted into + # "inland" zones, since there will never be a surge impact. + if segment not in self._inlandAreas(): + partsList.append( + (stormSurgeSection, self._stormSurgeSection[segment].sectionParts(segment_vtecRecords_tuple))) + + partsList.extend([ + (floodingRainSection, self._floodingRainSection[segment].sectionParts(segment_vtecRecords_tuple)), + (tornadoSection, self._tornadoSection[segment].sectionParts(segment_vtecRecords_tuple)), + 'infoSection', + 'endSection']) + + return { + 'arguments': segment_vtecRecords_tuple, + 'partsList': partsList + } + + ############################################################### + ### Analysis Lists, SampleAnalysis Overrides and other + ### analysis related methods + + def _analysisList(self): + # Sample over 120 hours beginning at current time + analysisList = [ + # Wind Section + ("Wind", self.vectorModeratedMax, [3]), + ("WindGust", self.moderatedMax, [3]), + ("WindThreat", self.mostSignificantDiscreteValue), + ("pws34int", self.moderatedMax, [3]), + ("pws64int", self.moderatedMax, [3]), + ("pwsD34", self.moderatedMax), + ("pwsN34", self.moderatedMax), + ("pwsD64", self.moderatedMax), + ("pwsN64", self.moderatedMax), + + # Flooding Rain Section + ("QPF", self.accumSum, [72]), + ("FloodingRainThreat", self.mostSignificantDiscreteValue), + + # Tornado Section + ("TornadoThreat", self.mostSignificantDiscreteValue), + ] + + return analysisList + + def _intersectAnalysisList(self): + # The grids for the Surge Section will be intersected with a special edit area + analysisList = [ + ("InundationMax", self.moderatedMax), + ("InundationTiming", self.moderatedMax, [6]), + ] + + return analysisList + + def _extraRainfallAnalysisList(self): + analysisList = [ + ("QPF", self.accumSum), + ] + + return analysisList + + ############################################################### + ### High level flow of formatter + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + self.debug_print("argDict = %s" % (self._pp.pformat(argDict)), 1) + + error = self._initializeVariables(argDict) + if error is not None: + return error + + if self._stormName is None or self._stormName == "": + return "Could not determine the storm name" + + self._segmentList = self._determineSegments() + self.debug_print("Segment Information: %s" % (self._pp.pformat(self._segmentList)), 1) + if len(self._segmentList) == 0: + return "No hazards to report" + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Make sure we have all of the necessary grids before continuing + error = self._performGridChecks(argDict) + if error is not None: + return error + + # Sample the data + self._sampleData(argDict) + + # Create the product dictionary and format it to create the output + productDict = self._createProductDictionary(self._productParts_TCV, + self._segmentList, + areProductPartsSegmented=True) + productOutput = self._formatProductDictionary(LegacyFormatter, productDict) + + self._archiveCurrentAdvisory() + + return productOutput + + def _initializeVariables(self, argDict): + error = HLSTCV_Common.TextProduct._initializeVariables(self, argDict) + if error is not None: + return error + + self._windSection = dict() + self._stormSurgeSection = dict() + self._floodingRainSection = dict() + self._tornadoSection = dict() + + self._initializeAdvisories() + + return None + + def _performGridChecks(self, argDict): + gridChecks = [(self._isCorrectNumGrids, "FloodingRainThreat", 1, argDict), + (self._isCorrectNumGrids, "TornadoThreat", 1, argDict), + (self._isContinuousDuration, "QPF", 72, argDict),] + + if self._WSPGridsAvailable: + gridChecks += [(self._isCorrectNumGrids, "WindThreat", 1, argDict), + (self._isContinuousDuration, "Wind", 120, argDict), + (self._isContinuousDuration, "WindGust", 120, argDict), + (self._isContinuousDuration, "pws34int", 114, argDict), + (self._isContinuousDuration, "pws64int", 114, argDict), + (self._isCombinedContinuousDuration, "pwsD34", "pwsN34", 102, argDict), + (self._isCombinedContinuousDuration, "pwsD64", "pwsN64", 102, argDict),] + + if self._PopulateSurge and len(self._coastalAreas()) != 0: + gridChecks += [(self._isCorrectNumGrids, "InundationMax", 1, argDict), + (self._isCorrectNumGrids, "InundationTiming", 12, argDict),] + + missingGridErrors = [] + for gridCheck in gridChecks: + # The first element is the grid check function to call and + # the rest of the elements are the arguments to the function + if not gridCheck[0](*gridCheck[1:]): + error = "" + if gridCheck[0] == self._isCorrectNumGrids: + if gridCheck[2] == 1: + error = "%s needs at least 1 grid" % (gridCheck[1]) + else: + error = "%s needs at least %s grids" % (gridCheck[1], gridCheck[2]) + elif gridCheck[0] == self._isContinuousDuration: + error = "%s needs at least %s continuous hours worth of data" % \ + (gridCheck[1], gridCheck[2]) + else: + error = "%s and %s combined need at least %s continuous hours worth of data" % \ + (gridCheck[1], gridCheck[2], gridCheck[3]) + + missingGridErrors.append(error) + + if len(missingGridErrors) != 0: + error = "There were problems with the following weather elements:\n" + + for gridError in missingGridErrors: + error += "\t" + gridError + "\n" + + return error + + return None + + def _isCorrectNumGrids(self, weatherElement, expectedNumGrids, argDict): + ifpClient = argDict["ifpClient"] + dbId = argDict["databaseID"] + parmId = ParmID(weatherElement, dbId) + times = ifpClient.getGridInventory(parmId) + + self.debug_print("_isCorrectNumGrids test for element: %s" % weatherElement, 1) + self.debug_print("Expected number of grids: %s" % expectedNumGrids, 1) + + gridTimes = [] + for index in range(len(times)): + gridTime = TimeRange.TimeRange(times[index]) + + if (gridTime.endTime() <= self._timeRange1Hour.startTime() or + gridTime.startTime() >= self._timeRange1Hour.endTime()): + +# prettyStartTime = self._pp.pformat(str(gridTime.startTime())) +# prettyEndTime = self._pp.pformat(str(gridTime.endTime())) +# self.debug_print("skipping grid %s (%s - %s): outside of time range" +# % (index, prettyStartTime, prettyEndTime), 1) + pass + else: + gridTimes.append(gridTime) + + self.debug_print("Actual number of grids: %s" % len(gridTimes), 1) + + retval = len(gridTimes) >= expectedNumGrids + if not retval: + self.debug_print("_isCorrectNumGrids test failed", 1) + self.debug_print("self._timeRange1Hour: %s" % str(self._timeRange1Hour), 1) + self.debug_print("times: %s" % str(times), 1) + return retval + + def _isContinuousDuration(self, weatherElement, minimumNumHours, argDict): + return self._checkContinuousDuration([weatherElement], minimumNumHours, argDict) + + def _isCombinedContinuousDuration(self, weatherElement1, weatherElement2, minimumNumHours, argDict): + return self._checkContinuousDuration([weatherElement1, weatherElement2], minimumNumHours, argDict) + + def _checkContinuousDuration(self, weatherElementList, minimumNumHours, argDict): + + self.debug_print("_checkContinuousDuration for elements: %s" % \ + self._pp.pformat(weatherElementList), 1) + self.debug_print("Minimum Number of Hours: %s" % minimumNumHours, 1) + + ifpClient = argDict["ifpClient"] + dbId = argDict["databaseID"] + + gridTimes = [] + inventoryDict = {} + for weatherElement in weatherElementList: + parmId = ParmID(weatherElement, dbId) + times = ifpClient.getGridInventory(parmId) + inventoryDict[weatherElement] = times + + for index in range(times.size()): + gridTimes.append(TimeRange.TimeRange(times[index])) + + if len(gridTimes) == 0: + # No grids + self.debug_print("No grids found.", 1) + return False + + gridTimes = sorted(gridTimes, key= lambda gridTime: gridTime.startTime()) + + totalHours = 0 + previousEndTime = None + for gridTime in gridTimes: + if gridTime.endTime() <= self._timeRange1Hour.startTime(): +# prettyEndTime = self._pp.pformat(str(gridTime.endTime())) +# prettyStartTime = self._pp.pformat(str(self._timeRange1Hour.startTime())) +# self.debug_print("skipping: grid end time (%s) before time range start time (%s)" +# % (prettyEndTime, prettyStartTime), 1) + continue + + if gridTime.startTime() >= self._timeRange1Hour.endTime(): +# prettyStartTime = self._pp.pformat(str(gridTime.startTime())) +# prettyEndTime = self._pp.pformat(str(self._timeRange1Hour.endTime())) +# self.debug_print("done: grid start time (%s) after time range end time (%s)" +# % (prettyStartTime, prettyEndTime), 1) + break + + if previousEndTime is None: + previousEndTime = gridTime.startTime() + + if previousEndTime != gridTime.startTime(): + break + + previousEndTime = gridTime.endTime() + totalHours += gridTime.duration() / 3600 # Convert from seconds to hours + + self.debug_print("Total Hours of continuous grids: %s" % totalHours, 1) + + retval = totalHours >= minimumNumHours + if not retval: + self.debug_print("_checkContinuousDuration failed.", 1) + self.debug_print("self._timeRange1Hour: %s" % self._pp.pformat(self._timeRange1Hour), 1) + for we in inventoryDict: + self.debug_print("times for %s: %s" % (we, str(inventoryDict[we])), 1) + self.debug_print("Not continuous at: %s" % str(previousEndTime), 1) + + return retval + + ############################################################### + ### Product Parts Implementation + + def _noOpParts(self): + ''' + These represent product parts that should be skipped when calling product part methods. + They will be handled automatically by the formatters. + ''' + return ['CR', 'endProduct', 'endSegment', 'doubleAmpersand'] + + ################# Product Level + + def _easMessage(self, productDict, productSegmentGroup, arguments=None): + productDict['easMessage'] = self._easPhrase + + ################# Segment Level + + def _setup_segment(self, segmentDict, productSegmentGroup, productSegment): + segment, vtecRecords = productSegment + self.debug_print('setup_segment productSegment %s' % (self._pp.pformat(productSegment)), 1) + # NOTE -- using _getVtecRecords to change to milliseconds + segmentVtecRecords = self._getVtecRecords(segment) + + # UGCs and Expire Time + # Assume that the geoType is the same for all hazard events in the segment i.e. area or point + self._ugcs = [segment] + self._timeZones = self._tpc.hazardTimeZones(self._ugcs) + + # In order to compute the expire time, the VTEC record times + # need to be in milliseconds. + recordsInMS = [] + for record in segmentVtecRecords: + recordInMS = copy.copy(record) + + recordInMS["startTime"] = recordInMS["startTime"] * 1000 + recordInMS["endTime"] = recordInMS["endTime"] * 1000 + if "purgeTime" in recordInMS: + recordInMS["purgeTime"] = recordInMS["purgeTime"] * 1000 + if "issueTime" in recordInMS: + recordInMS["issueTime"] = recordInMS["issueTime"] * 1000 + + recordsInMS.append(recordInMS) + + # Get the expire time in milliseconds since the epoch + self._expireTime = self._tpc.getExpireTime( + self._issueTime_ms, self._purgeHours, recordsInMS) + # Then convert it to a date + segmentDict['expireTime'] = self._convertToISO(self._expireTime) + + # Don't show UPG headlines + nonUPGrecords = [] + for record in segmentVtecRecords: + if record['act'] != "UPG": + nonUPGrecords.append(record) + self._summaryHeadlines_value, _ = self._tpc.getHeadlinesAndSections( + nonUPGrecords, self._productID, self._issueTime_secs) + + def _vtecRecords(self, segmentDict, productSegmentGroup, productSegment): + segment, vtecRecords = productSegment + records = [] + for vtecRecord in vtecRecords: + vstr = vtecRecord["vtecstr"] + + self.debug_print("vtecRecord = %s" % (self._pp.pformat(vtecRecord)), 1) + + self.debug_print("final vstr = %s" % vstr, 1) + records.append(vstr) + segmentDict['vtecRecords'] = records + + def _areaList(self, segmentDict, productSegmentGroup, productSegment): + # Area String + segmentDict['areaList'] = self._tpc.formatUGC_names(self._ugcs) + + def _issuanceTimeDate(self, segmentDict, productSegmentGroup, productSegment): + segmentDict['issuanceTimeDate'] = self._timeLabel + + def _summaryHeadlines(self, segmentDict, productSegmentGroup, productSegment): + segment, vtecRecords = productSegment + definitions = [] + hazardsFound = [] + + for (phenSig, actions, name) in self.allowedHazards(): + for vtecRecord in vtecRecords: + # The 'phensig' in the VTEC record could contain an + # ETN. As such, we need to strip the ETN before doing a + # comparison with the allowedHazards. + if vtecRecord["phensig"].split(":")[0] == phenSig and \ + phenSig not in hazardsFound and \ + vtecRecord["act"] in ["NEW", "EXA"]: + + hazardsFound.append(phenSig) + definition = self._hazardDefinition(phenSig) + if definition != "": + definitions.append(definition) + + summaryDict = collections.OrderedDict() + headlines = self._summaryHeadlines_value.split("\n") + headlinesInEffect = [] + for headline in headlines: + if len(headline) != 0: + headlinesInEffect.append(headline) + summaryDict['headlinesInEffect'] = headlinesInEffect + summaryDict['headlineDefinitions'] = definitions + segmentDict['summaryHeadlines'] = summaryDict + + def _locationsAffected(self, segmentDict, productSegmentGroup, productSegment): + segment, vtecRecords = productSegment + import TCVAreaDictionary + tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary + + segmentDict['locationsAffected'] = [] + if segment in tcv_AreaDictionary: + segmentDict['locationsAffected'] = tcv_AreaDictionary[segment]["locationsAffected"] + + def _fcstConfidence(self, segmentDict, productSegmentGroup, productSegment): + # TODO - Get this from the TCM product potentially? Not included until provided from NHC + return "" + + def _infoSection(self, segmentDict, productSegmentGroup, productSegment): + segment, vtecRecords = productSegment + import TCVAreaDictionary + tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary + + segment, vtecRecords = productSegment + infoSection = [] + if segment in tcv_AreaDictionary: + infoSection = tcv_AreaDictionary[segment]["infoSection"] + + segmentDict['infoSection'] = infoSection + + def _endSection(self, segmentDict, productSegmentGroup, productSegment): + segmentDict['endSection'] = "\n$$" + + ################# Product Parts Helper Methods + + def _hazardDefinition(self, phenSig): + import VTECTable + + phen, sig = phenSig.split('.') + headline = VTECTable.VTECTable[phenSig]["hdln"] + + definition = "A " + headline + " means " + + if phenSig == "HU.W": + definition += "hurricane-force winds are expected" + + elif phenSig == "HU.A": + definition += "hurricane-force winds are possible" + + elif phenSig == "TR.W": + definition += "tropical storm-force winds are expected" + + elif phenSig == "TR.A": + definition += "tropical storm-force winds are possible" + + elif phenSig == "SS.W": + definition += "there is a danger of life-threatening inundation, from rising water moving inland from the coastline," + + elif phenSig == "SS.A": + definition += "life-threatening inundation, from rising water moving inland from the coastline, is possible" + + else: + return "" + + if sig == "W": # Warning + definition += " somewhere within this area within the next 36 hours" + elif sig == "A": # Watch + definition += " somewhere within this area within the next 48 hours" + + return definition + + ############################################################### + ### Sampling and Statistics related methods + + def _threatKeyOrder(self): + return [None, "None", "Elevated", "Mod", "High", "Extreme"] + + def _sampleData(self, argDict): + # Sample the data + self._createSamplers(argDict) + + # We need to preserve the ordering of the zones based off the zone combiner ordering + sortedAreas = sorted(self._allAreas(), + key=lambda x: self._segmentList.index(x) if x in self._segmentList else 9999) + for segment in sortedAreas: + self._initializeSegmentZoneData(segment) + + # We need stats for all zones to be saved in the advisory, + # regardless of whether or not it has a hazard in it. Getting + # the stats causes them to be added to the advisory. + windStats, stormSurgeStats, floodingRainStats, tornadoStats = \ + self._getStats(self._argDict, + segment, + self._editAreaDict, + self._timeRangeList1Hour, + self._timeRangeList3Hour, + self._timeRangeList6Hour) + + # Only show zones with hazards in the output + if segment in self._segmentList: + # These segment sections will be added to the product parts + self._windSection[segment] = WindSection(self, segment, windStats) + self._stormSurgeSection[segment] = StormSurgeSection(self, segment, stormSurgeStats) + self._floodingRainSection[segment] = FloodingRainSection(self, segment, floodingRainStats) + self._tornadoSection[segment] = TornadoSection(self, segment, tornadoStats) + + def _createSamplers(self, argDict): + # Create the samplers used for sampling the data + editAreas = self._makeSegmentEditAreas(argDict) + + # The sampler used for Wind section related stats + self._sampler1Hour = self.getSampler(argDict, + (self._analysisList(), self._timeRangeList1Hour, editAreas)) + + # The sampler used for Flooding Rain and Storm Surge section related stats + self._sampler3Hour = self.getSampler(argDict, + (self._analysisList(), self._timeRangeList3Hour, editAreas)) + + # For storm surge, the edit areas are intersected with a special edit area. + # If there aren't any coastal areas, they won't have the special edit area + # though so don't execute this code in that case. + if len(self._coastalAreas()) > 0: + intersectAreas = self._computeIntersectAreas(editAreas, argDict) + self._intersectSampler = self.getSampler(argDict, + (self._intersectAnalysisList(), self._timeRangeList6Hour, intersectAreas)) + + # Make a sample period for the previous rainfall + self._previousRainfallTR = [(self._extraSampleTimeRange, "PrevRainfall")] + self._extraRainfallSampler = self.getSampler(argDict, + (self._extraRainfallAnalysisList(), self._previousRainfallTR, + editAreas)) + + def _getStats(self, argDict, segment, editAreaDict, timeRangeList1Hour, timeRangeList3Hour, timeRangeList6Hour): + # Get statistics for this segment + + editArea = editAreaDict[segment] + + statList1Hour = self.getStatList(self._sampler1Hour, + self._analysisList(), + timeRangeList1Hour, + editArea) + + statList3Hour = self.getStatList(self._sampler3Hour, + self._analysisList(), + timeRangeList3Hour, + editArea) + + self.debug_print("*"*80, 1) + self.debug_print("editArea =" + editArea, 1) + self.debug_print("timeRangeList1Hour = %s" % (self._pp.pformat(timeRangeList1Hour)), 1) + self.debug_print("timeRangeList3Hour = %s" % (self._pp.pformat(timeRangeList3Hour)), 1) + self.debug_print("timeRangeList6Hour = %s" % (self._pp.pformat(timeRangeList6Hour)), 1) + self.debug_print("statList1Hour = %s" % (self._pp.pformat(statList1Hour)), 1) + self.debug_print("statList3Hour = %s" % (self._pp.pformat(statList3Hour)), 1) + self.debug_print("-"*40, 1) + + windStats = WindSectionStats(self, segment, statList1Hour, timeRangeList1Hour) + + # The surge section needs sampling done with an intersected edit area + if editArea in self._coastalAreas(): + intersectEditArea = "intersect_"+editArea + intersectStatList = self.getStatList(self._intersectSampler, + self._intersectAnalysisList(), + timeRangeList6Hour, + intersectEditArea) + else: + intersectStatList = "InlandArea" + + self.debug_print("intersectStatList = %s" % (self._pp.pformat(intersectStatList)), 1) + self.debug_print("-"*40, 1) + + stormSurgeStats = StormSurgeSectionStats(self, segment, intersectStatList, timeRangeList6Hour) + + # These stats are for handling the extra rainfall + extraRainfallStatList = self.getStatList(self._extraRainfallSampler, + self._extraRainfallAnalysisList(), + self._previousRainfallTR, + editArea) + + floodingRainStats = FloodingRainSectionStats(self, segment, + statList3Hour, timeRangeList3Hour, + extraRainfallStatList, self._previousRainfallTR) + tornadoStats = TornadoSectionStats(self, segment, statList3Hour, timeRangeList3Hour) + + return (windStats, stormSurgeStats, floodingRainStats, tornadoStats) + + ############################################################### + ### Area, Zone and Segment related methods + + def _determineSegments(self): + # Get the segments based on hazards "overlaid" with combinations file + + # Get the forecaster entered combinations + accessor = ModuleAccessor.ModuleAccessor() + self.debug_print("self._defaultEditAreas = %s" % (self._pp.pformat(self._defaultEditAreas)), 1) + combos = accessor.variable(self._defaultEditAreas, "Combinations") + # combos is a list of tuples. Each tuple is a grouping of zones (a list of zones, combo name). + if combos is None: + LogStream.logVerbose("Combination file not found: " + self._defaultEditAreas) + return [] + self.debug_print("Segments from Zone Combiner = %s" % (self._pp.pformat(combos)), 1) + + # "Overlay" the forecaster-entered combinations onto the segments + # so that the zones are ordered and grouped (as much as possible) + # as indicated in the zone combiner. + refinedHazardSegments = self._getRefinedHazardSegments(combos) + + # Instead of a segment being a group of zones, it will be just a single zone. + # So collapse this list of lists down to a list of zones (aka. segments) + segments = [] + for segment in refinedHazardSegments: + segments += segment + + return segments + + def _getRefinedHazardSegments(self, combos): + # Get a list of list of zones that are ordered and grouped + # based off of hazards and the provided zone combinations. + + # Get the raw analyzed table (a list of VTEC records) and organize the hazards + # to get a list of lists of zones that have the same hazards + self.debug_print("Raw Analyzed %s" % (self._pp.pformat(self._hazardsTable.rawAnalyzedTable())), 1) + hazSegments = self.organizeHazards(self._hazardsTable.rawAnalyzedTable()) + self.debug_print("Segments from HazardsTable organizeHazards %s" % (self._pp.pformat(hazSegments)), 1) + + # "Overlay" the forecaster-entered combinations onto the segments + # so that the zones are ordered and grouped (as much as possible) + # as indicated in the zone combiner. + refinedSegments = self._refineSegments(hazSegments, combos) + self.debug_print("New segments = %s" % (self._pp.pformat(refinedSegments)), 1) + + return refinedSegments + + def _refineSegments(self, hazSegments, combos): + """Reorder and regroup (as much as possible) the hazard segments + based off of the ordering and grouping in combos. Zones will + only be combined into groups if they share the same hazards + (regardless of whether they are grouped together in combos). + """ + if combos == []: + return hazSegments + newSegments = [] # list of lists + newAreas = [] + for combo, label in combos: + # Each combination will be tested to see if it can stay intact + # i.e. if all areas in the combo are in the same segment + # else split it into like segments + # + # segmentMapping is a list where each entry is + # the hazSegment in which the corresponding combo area appears. + # (We need to define self._segmentList for the mapping function + # to use) + self._segmentList = hazSegments + self.debug_print("self._segmentList = %s" % (self._pp.pformat(self._segmentList)), 1) + self.debug_print("current combo = %s" % (self._pp.pformat(combo)), 1) + segmentMapping = list(map(self._findSegment, combo)) + self.debug_print(" segmentMapping = %s" % (self._pp.pformat(segmentMapping)), 1) + + # segmentDict keys will be the hazSegments and + # we will gather all the areas of the combos that appear + # in each of these hazSegments + segmentDict = {} + keyList = [] + for areaName in combo: + self.debug_print(" Adding %s" % (areaName), 1) + key = tuple(segmentMapping[combo.index(areaName)]) + if key == (): # If no hazard for area, do not include + continue + if key not in keyList: + keyList.append(key) + segmentDict.setdefault(key,[]).append(areaName) + self.debug_print(" segmentDict = %s" % (self._pp.pformat(segmentDict)), 1) + + # Keep track of the areas that we are including + for key in keyList: + segAreas = segmentDict[key] + newAreas = newAreas + segAreas + newSegments.append(segAreas) + self.debug_print(" newAreas = %s" % (self._pp.pformat(newAreas)), 1) + self.debug_print(" newSegments = %s" % (self._pp.pformat(newSegments)), 1) + self.debug_print(" newSegments = %s" % (self._pp.pformat(newSegments)), 1) + # Now add in the hazAreas that have not been accounted for + # in the combinations + for hazSegment in hazSegments: + newSeg = [] + for hazArea in hazSegment: + if hazArea not in newAreas: + newSeg.append(hazArea) + if newSeg != []: + newSegments.append(newSeg) + self.debug_print(" final newSegments = %s" % (self._pp.pformat(newSegments)), 1) + return newSegments + + def _makeSegmentEditAreas(self, argDict): + # Create the edit areas that will be sampled + areasList = self._allAreas() + self.debug_print("areasList = %s" % (self._pp.pformat(areasList)), 1) + editAreas = [] + self._editAreaDict = {} + for area in areasList: + self._editAreaDict[area] = area + editAreas.append((area, area)) + return editAreas + + def _findSegment(self, areaName): + # Determine which hazard group a zone belongs to + for segment in self._segmentList: + if areaName in segment: + return segment + return [] + + ############################################################### + ### Hazards related methods + + def _getAllVTECRecords(self): + allRecords = [] + # Only the segments in _segmentList contain hazards so no + # need to check everything in _allAreas() + for segment in self._segmentList: + allRecords += self._getVtecRecords(segment) + + return allRecords + + def _getHazardsForHLS(self): + # Get all the hazards so that the HLS will have access to them. + # Areas that share the same hazards are grouped together + # into a single hazard. + hazardTable = self._argDict["hazards"] + + # Create a single grouping of all zones. This will make it so that + # the hazards are grouped together as much as possible so that we + # don't repeat hazard information for zones in HLS. + combos = [([self._allAreas()], "AllAreas")] + + # "Overlay" this group of all zones onto the segments + # so that we get as few groups of zones as possible. + refinedHazardSegments = self._getRefinedHazardSegments(combos) + + allHazards = [] + for segment in refinedHazardSegments: + hazardsList = hazardTable.getHazardList(segment) + for hazard in hazardsList: + # If this is a correction, don't generate new hazards, + # use the previous ones + if hazard['act'] == 'COR': + return self._previousAdvisory["HazardsForHLS"] + else: + # Tropical hazards shouldn't ever have EXT and EXB actions since + # they are "until further notice" + if hazard["act"] == "EXT": + hazard["act"] = "CON" + elif hazard["act"] == "EXB": + hazard["act"] = "EXA" + + allHazards.append(hazard) + + return allHazards + + ############################################################### + ### Time related methods + + def _convertToISO(self, time_ms, local=None): + # Convert milliseconds since the epoch to a date + import datetime + dt = datetime.datetime.fromtimestamp(time_ms / 1000) + if local: + timeZone = self._timeZones[0] + else: + timeZone = None + return self._tpc.formatDatetime(dt, timeZone=timeZone) + + def _convertToDatetime(self, time_ms): + import datetime + return datetime.datetime.fromtimestamp(time_ms / 1000) + + ############################################################### + ### Advisory related methods + + def _initializeSegmentZoneData(self, segment): + # The current advisory will be populated when getting a section's stats + self._currentAdvisory['ZoneData'][segment] = { + "WindThreat": None, + "WindForecast": None, + "WindHighestPhaseReached": None, + "highestHunkerDownWindThreat": "None", + "StormSurgeThreat": None, + "StormSurgeForecast": None, + "StormSurgeHighestPhaseReached": None, + "highestHunkerDownSurgeThreat": "None", + "FloodingRainThreat": None, + "FloodingRainForecast": None, + "TornadoThreat": None, + } + + # Make sure our highest threats and phases aren't lost + previousSegmentAdvisory = None + if self._previousAdvisory is not None: + previousSegmentAdvisory = self._previousAdvisory['ZoneData'][segment] + + if previousSegmentAdvisory is not None: + currentSegmentAdvisory = self._currentAdvisory['ZoneData'][segment] + + currentSegmentAdvisory["WindHighestPhaseReached"] = \ + previousSegmentAdvisory["WindHighestPhaseReached"] + + currentSegmentAdvisory["highestHunkerDownWindThreat"] = \ + previousSegmentAdvisory["highestHunkerDownWindThreat"] + + currentSegmentAdvisory["StormSurgeHighestPhaseReached"] = \ + previousSegmentAdvisory["StormSurgeHighestPhaseReached"] + + currentSegmentAdvisory["highestHunkerDownSurgeThreat"] = \ + previousSegmentAdvisory["highestHunkerDownSurgeThreat"] + + def _getPreviousAdvisories(self): + stormAdvisories = self._getStormAdvisoryNames() + + self.debug_print("DEBUG: stormAdvisories = %s" % + (self._pp.pformat(stormAdvisories)), 1) + + previousAdvisories = [] + + # Get the current storm number from the TCP (ie. AL092016) + curStormNumber = self._getStormNumberStringFromTCP() + self.debug_print("DEBUG: curStormNumber = %s" % + (curStormNumber), 1) + + # Filter out the advisories we wish to process further + for advisory in stormAdvisories: + + # If this was an advisory for the current storm + if advisory.startswith(curStormNumber): + + # Load this advisory for this storm + curAdvisory = self._loadAdvisory(advisory) + + if curAdvisory is not None: + previousAdvisories.append(curAdvisory) + + + self.debug_print("DEBUG: previous advisories = %s" % + (self._pp.pformat(previousAdvisories)), 1) + + return previousAdvisories + + def _archiveCurrentAdvisory(self): + ### Determine if all actions are canceled + allCAN = True + for vtecRecord in self._getAllVTECRecords(): + action = vtecRecord['act'] + if action != "CAN": + allCAN = False + break + + self._currentAdvisory["AllCAN"] = allCAN + self._currentAdvisory["CreationTime"] = self._issueTime_secs + self._currentAdvisory["Transmitted"] = False + self._currentAdvisory["StormName"] = self._getStormNameFromTCP() + self._currentAdvisory["StormNumber"] = self._getStormNumberStringFromTCP() + self._currentAdvisory["StormID"] = self._getStormIDStringFromTCP() + self._currentAdvisory["AdvisoryNumber"] = self._getAdvisoryNumberStringFromTCP() + self._currentAdvisory["HazardsForHLS"] = self._getHazardsForHLS() + + self._saveAdvisory("pending", self._currentAdvisory) + + def _saveAdvisory(self, advisoryName, advisoryDict): + self._synchronizeAdvisories() + fileName = self._getAdvisoryFilename(advisoryName) + + self.debug_print("Saving %s to %s" % (advisoryName, fileName), 1) + self.debug_print("advisoryDict: %s" % (self._pp.pformat(advisoryDict)), 1) + + try: + JsonSupport.saveToJson(LocalizationSupport.CAVE_STATIC, + self._site, + fileName, + advisoryDict) + except Exception as e: + LogStream.logProblem("Exception saving %s: %s" % (fileName, LogStream.exc())) + else: # No exceptions occurred + self.debug_print("Wrote file contents for: %s" % (fileName), 1) + + # Purposely allow this to throw + self._synchronizeAdvisories() + + ############################################################### + ### GUI related methods + + def _overview_list(self): + if self._site == "HFO": + stormInfoOptions = ["TCPCP1", "TCPCP2", "TCPCP3", "TCPCP4", "TCPCP5"] + else: + stormInfoOptions = ["TCPAT1", "TCPAT2", "TCPAT3", "TCPAT4", "TCPAT5"] + + stormInfoOptions.append("Enter PIL below (e.g. WRKTCP):") + + return [ + { + "name": "StormInfo", + "label": "Obtain Storm Type/Name/Info", + "options": stormInfoOptions, + "entryField": " ", + }, + { + "name": "PopulateSurge", + "label": "Populate Surge Section", + "options": [ + ("Populate", True), + ("Do not populate", False), + ], + "default": "Populate", + }, + { + "name": "WSPGridsAvailable", + "label": "Are WSP grids available?", + "options": [ + ("Yes", True), + ("No", False), + ], + "default": "Yes", + }, + ] + + def _displayGUI(self, infoDict=None): + dialog = Overview_Dialog(self, "TCV", infoDict) + status = dialog.status() + LogStream.logVerbose("status="+status) + if status == "Cancel": + return None + else: + return dialog.getVarDict() + +import tkinter +class Overview_Dialog(HLSTCV_Common.Common_Dialog): + def __init__(self, parent, title, infoDict=None): + HLSTCV_Common.Common_Dialog.__init__(self, parent, title, infoDict) + + def body(self, master): + # build the main display dialog + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list() + fontDict = self._parent._font_GUI_dict() + + # OVERVIEW header + headerFG, headerFont = fontDict["headers"] + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + + numBoxes = 2 + + boxes = [] + for i in range(numBoxes): + newBox = tkinter.Frame(master) + newBox.pack(side=tkinter.TOP, expand=tkinter.NO, + fill=tkinter.Y, anchor=tkinter.W) + boxes.append(newBox) + + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + index = overviewList.index(infoDict) + if index == 0: + boxNum = 0 + buttonSide=tkinter.TOP + frameSide = tkinter.LEFT + else: + boxNum = 1 + buttonSide=tkinter.LEFT + frameSide=tkinter.TOP + + box = boxes[boxNum] + + tkObject_dict[name], entryObject = self._makeRadioOrCheckList( + box, label, options, default, buttonSide=buttonSide, frameSide=frameSide, + entryField=entryField, headerFG=headerFG, + headerFont=headerFont, boxType=optionType) + if entryObject is not None: + tkObject_dict[self._entryName(name)] = entryObject + + # End Instructions and Button + frame = tkinter.Frame(master, relief=tkinter.GROOVE, borderwidth=1) + self._makeButtons(frame) + frame.pack(side=tkinter.TOP, fill=tkinter.X, expand=tkinter.NO) + + def _makeButtons(self, master): + frame = tkinter.Frame(master) + buttonList = self._parent._GUI1_configDict().get("buttonList", []) + for button, label in buttonList: + if button == "Run": + command = self.okCB + else: # Cancel + command = self.cancelCB + tkinter.Button(frame, text=label, command=command, width=10, + state=tkinter.NORMAL).pack(side=tkinter.LEFT, pady=5, padx=10) + frame.pack() + + def okCB(self): + # pull the data from the tkObject_dict before they get toasted + tkObject_dict = self._tkObject_dict + overviewList = self._parent._overview_list() + for infoDict in overviewList: + name = infoDict["name"] + label = infoDict["label"] + options = infoDict.get("options", []) + entryField = infoDict.get("entryField", None) + default = infoDict.get("default", None) + optionType = infoDict.get("optionType", "radio") + + if optionType == "check": + checkList = [] + ivarList = tkObject_dict[name] + for i in range(len(options)): + if ivarList[i].get(): + checkList.append(options[i]) + value = checkList + self._setVarDict(name, value) + else: + value = tkObject_dict[name].get() + self._setVarDict(name, value, options) + + if entryField is not None: + entryName = self._entryName(name) + self._setVarDict(entryName, tkObject_dict[entryName].get()) + # close window and set status "Ok" + self._status = "Ok" + self.withdraw() + self.ok() + + +class SectionCommon(): + def __init__(self, textProduct, segment, sectionHeaderName): + self._textProduct = textProduct + self._sectionHeaderName = sectionHeaderName + self._segment = segment + self._tr = None + self.isThreatNoneForEntireStorm = False + + def _isThreatNoneForEntireStorm(self, threatName): + previousAdvisories = self._textProduct._getPreviousAdvisories() + + # For the first advisory, this needs to be false otherwise + # potential impacts could be wrong + if len(previousAdvisories) == 0: + return False + + for advisory in previousAdvisories: + if advisory["ZoneData"][self._segment][threatName] != "None": + return False + + return False + + def _setProductPartValue(self, dictionary, productPartName, value): + dictionary[self._sectionName + '._' + productPartName] = value + + def _finalSectionParts(self, segment_vtecRecords_tuple, parts): + finalParts = [] + for partName in parts: + if partName not in self._textProduct._noOpParts(): + finalParts.append(self._sectionName + '._' + partName) + else: + finalParts.append(partName) + + return [{ + 'arguments': segment_vtecRecords_tuple, + 'partsList': finalParts + }] + + def _sectionHeader(self, segmentDict, productSegmentGroup, productSegment): + self._setProductPartValue(segmentDict, 'sectionHeader', self._sectionHeaderName) + + def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._maxThreat is not None: + threatLevel = self._stats._maxThreat + if threatLevel == "Mod": + threatLevel = "Moderate" + + threatStatement = \ + self._textProduct._threatPhrase[self._sectionHeaderName][threatLevel] + + self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', "POTENTIAL THREAT TO LIFE AND PROPERTY: " + threatStatement) + + # This new method will convert the single word threat trend into + # an appropriate sentence + def _getThreatTrendSentence(self, section, threatTrendValue): + + if threatTrendValue.upper() == "INCREASING": + text = "The %s threat has increased" % (section) + elif threatTrendValue.upper() == "DECREASING": + text = "The %s threat has decreased" % (section) + elif threatTrendValue.upper() == "NEARLY STEADY": + text = "The %s threat has remained nearly steady" % (section) + + return text + " from the previous assessment." + + def _getThreatTrendValue(self, elementName, magnitudeIncreaseThreshold): + threatKey = elementName + "Threat" + forecastKey = elementName + "Forecast" + + self._textProduct.debug_print("THREAT DEBUG for %s" % (elementName), 1) + + self._textProduct.debug_print("getThreatTrendValue _currentAdvisory =\n%s" % (self._textProduct._pp.pformat(self._stats._currentAdvisory)), 1) + self._textProduct.debug_print("getThreatTrendValue _previousAdvisory =\n%s" % (self._textProduct._pp.pformat(self._stats._previousAdvisory)), 1) + + if (self._stats._currentAdvisory is None) or (self._stats._previousAdvisory is None): + # Only compute a threat trend if we have 2 or more advisories + return None + + currentThreat = self._stats._currentAdvisory[threatKey] + previousThreat = self._stats._previousAdvisory[threatKey] + shorterTermTrendDifference = self._threatDifference(currentThreat, previousThreat) + + self._textProduct.debug_print("currentThreat = %s" % (self._textProduct._pp.pformat(currentThreat)), 1) + self._textProduct.debug_print("previousThreat = %s" % (self._textProduct._pp.pformat(previousThreat)), 1) + self._textProduct.debug_print("shorterTermTrendDifference = %s" % (shorterTermTrendDifference), 1) + + previousPreviousThreat = None + longerTermTrendDifference = None + if self._stats._previousPreviousAdvisory is not None: + self._textProduct.debug_print("_previousPreviousAdvisory is not None", 1) + previousPreviousThreat = self._stats._previousPreviousAdvisory[threatKey] + self._textProduct.debug_print("previousPreviousThreat = %s" % (self._textProduct._pp.pformat(previousPreviousThreat)), 1) + longerTermTrendDifference = self._threatDifference(currentThreat, previousPreviousThreat) + self._textProduct.debug_print("longerTermTrendDifference = %s" % (longerTermTrendDifference), 1) + + threatTrendValue = "NEARLY STEADY" + self._textProduct.debug_print("magnitudeIncreaseThreshold = %s forecastKey = '%s'" % (magnitudeIncreaseThreshold, forecastKey), 1) + if self._isThreatDecreasing(shorterTermTrendDifference, longerTermTrendDifference): + self._textProduct.debug_print("threat is decreasing", 1) + threatTrendValue = "DECREASING" + elif self._isThreatIncreasing(shorterTermTrendDifference, longerTermTrendDifference): + self._textProduct.debug_print("threat is increasing", 1) + threatTrendValue = "INCREASING" + # NOTE: Modified so more threat levels can be classified as increasing when forecast has increased + elif currentThreat in ["Mod", "High", "Extreme"] and \ + self._isMagnitudeIncreasing(forecastKey, magnitudeIncreaseThreshold): + self._textProduct.debug_print("Increasing based on magnitude", 1) + threatTrendValue = "INCREASING" + + return threatTrendValue + + def _threatDifference(self, threat1, threat2): + threatLevels = self._textProduct._threatKeyOrder() + self._textProduct.debug_print("threat1 index = %s" % (threatLevels.index(threat1)), 1) + self._textProduct.debug_print("threat2 index = %s" % (threatLevels.index(threat2)), 1) + return threatLevels.index(threat1) - threatLevels.index(threat2) + + def _isThreatDecreasing(self, shorterTermTrendDifference, longerTermTrendDifference): + #If the current threat is at least 1 category lower than both previous advisories + if (shorterTermTrendDifference < 0 and \ + longerTermTrendDifference is not None and \ + longerTermTrendDifference < 0): + self._textProduct.debug_print("the current threat is at least 1 category lower than both previous advisories", 1) + return True + #Or if the current threat decreased by more than 1 category + elif shorterTermTrendDifference < -1: + self._textProduct.debug_print("the current threat decreased by more than 1 category", 1) + return True + else: + self._textProduct.debug_print("the current threat is not decreasing", 1) + return False + + def _isThreatIncreasing(self, shorterTermTrendDifference, longerTermTrendDifference): + #If the current threat is at least 1 category higher than both previous advisories + if (shorterTermTrendDifference > 0 and \ + longerTermTrendDifference is not None and \ + longerTermTrendDifference > 0): + self._textProduct.debug_print("the current threat is at least 1 category higher than both previous advisories", 1) + return True + #Or if the current threat increased by more than 1 category + elif shorterTermTrendDifference > 1: + self._textProduct.debug_print("the current threat increased by more than 1 category", 1) + return True + else: + self._textProduct.debug_print("the current threat is not increasing", 1) + return False + + def _advisoryHasValidKey(self, advisory, key): + return (advisory is not None) and \ + (key in advisory) and \ + (advisory[key] is not None) + + def _isMagnitudeIncreasing(self, forecastKey, threshold): +# currentValue, previousValue, previousPreviousValue + self._textProduct.debug_print("_isMagnitudeIncreasing", 1) + self._textProduct.debug_print("forecastKey = %s" % (forecastKey), 1) + self._textProduct.debug_print("threshold = %s" % (threshold), 1) + + if self._advisoryHasValidKey(self._stats._currentAdvisory, forecastKey) and \ + self._advisoryHasValidKey(self._stats._previousAdvisory, forecastKey): + currentValue = self._stats._currentAdvisory[forecastKey] + previousValue = self._stats._previousAdvisory[forecastKey] + self._textProduct.debug_print("currentValue = %s" % (currentValue), 1) + self._textProduct.debug_print("previousValue = %s" % (previousValue), 1) + + if (currentValue - previousValue) >= threshold: + self._textProduct.debug_print("the current magnitude has increased by more than the threshold since the last advisory", 1) + return True + elif self._advisoryHasValidKey(self._stats._previousPreviousAdvisory, forecastKey): + previousPreviousValue = self._stats._previousPreviousAdvisory[forecastKey] + self._textProduct.debug_print("previousPreviousValue = %s" % (previousPreviousValue), 1) + + if (currentValue - previousPreviousValue) >= threshold: + self._textProduct.debug_print("the current magnitude has increased by more than the threshold since the previous previous advisory", 1) + return True + else: + self._textProduct.debug_print("the current magnitude does not meet the requirements to be considered increasing", 1) + return False + else: + self._textProduct.debug_print("the current magnitude did not increase past threshold and could not look at the previous previous advisory", 1) + return False + else: + self._textProduct.debug_print("the current advisory and/or previous advisory did not have key: %s" % (forecastKey), 1) + return False + + def _calculateThreatStatementTr(self, onsetHour, endHour, section): + phase = "default" + + self._textProduct.debug_print("section = %s" % (section), 1) + self._textProduct.debug_print("onset hour = %s" % (onsetHour), 1) + self._textProduct.debug_print("end hour = %s" % (endHour), 1) + + if section == "Wind": + threatGrid = "WindThreat" + highestHunkerDownThreatKey = "highestHunkerDownWindThreat" + elif section == "Surge": + threatGrid = "StormSurgeThreat" + highestHunkerDownThreatKey = "highestHunkerDownSurgeThreat" + + previousSegmentAdvisory = None + if self._textProduct._previousAdvisory is not None: + previousSegmentAdvisory = \ + self._textProduct._previousAdvisory['ZoneData'][self._segment] + currentSegmentAdvisory = \ + self._textProduct._currentAdvisory['ZoneData'][self._segment] + + if (onsetHour is not None): + if onsetHour > 36: + phase = "check plans" + elif onsetHour > 6: + phase = "complete preparations" + elif (onsetHour <= 6) and (endHour is not None) and (endHour > 0): + phase = "hunker down" + + previousHighestHunkerDownThreat = None + if previousSegmentAdvisory is not None: + previousHighestHunkerDownThreat = \ + previousSegmentAdvisory[highestHunkerDownThreatKey] + + self._textProduct.debug_print( + "%s previous highest hunker down threat is -> %s for %s" + % (section, previousHighestHunkerDownThreat, self._segment), 1) + + currentHunkerDownThreat = currentSegmentAdvisory[threatGrid] + + threatSeverity = {threat:severity for severity,threat in + enumerate(self._textProduct._threatKeyOrder())} + + if threatSeverity.get(currentHunkerDownThreat) > \ + threatSeverity.get(previousHighestHunkerDownThreat): + + currentSegmentAdvisory[highestHunkerDownThreatKey] = \ + currentHunkerDownThreat + + self._textProduct.debug_print( + "%s current highest hunker down threat is -> %s for %s" + % (section, currentSegmentAdvisory[highestHunkerDownThreatKey], self._segment), 1) + + self._textProduct.debug_print( + "Before default phase handling. %s phase is currently -> %s for %s" + % (section, phase, self._segment), 1) + + # We are here because we had no onset time + if phase == "default": + if currentSegmentAdvisory[threatGrid] in \ + ["Elevated", "Mod", "High", "Extreme"]: + + phase = "check plans" + + # Checking to see if we ever had a threat. If so, set to recovery + elif self._pastThreatsNotNone(threatGrid): + phase = "recovery" + + # If we are still default, that means we have no onset and have + # never had any threat + if phase == "default": + phase = "check plans" + + self._textProduct.debug_print( + "After default phase handling. %s phase is -> %s for %s" + % (section, phase, self._segment), 1) + + # --------------------------------------------------------------------- + + # "default" isn't ordered because it can occur at multiple points + # before the recovery phase + phaseOrder = [None, "check plans", "complete preparations", + "hunker down", "recovery"] + + if self._sectionHeaderName == "Storm Surge": + highestPhaseReachedKey = "StormSurgeHighestPhaseReached" + else: + # Flooding Rain and Tornado are tied to Wind so that's why they use + # Wind's phase + highestPhaseReachedKey = "WindHighestPhaseReached" + + previousHighestPhaseReached = None + if previousSegmentAdvisory is not None: + previousHighestPhaseReached = \ + previousSegmentAdvisory[highestPhaseReachedKey] + + self._textProduct.debug_print( + "%s previous highestPhaseReached is -> '%s' for '%s'" % + (self._sectionHeaderName, previousHighestPhaseReached, self._segment), 1) + + # Don't allow the event to regress to an earlier phase + if previousHighestPhaseReached == "recovery": + phase = "recovery" + + previousHighestPhaseIndex = phaseOrder.index(previousHighestPhaseReached) + currentPhaseIndex = phaseOrder.index(phase) + + if currentPhaseIndex > previousHighestPhaseIndex: + currentSegmentAdvisory[highestPhaseReachedKey] = phase + + currentHighestPhaseReached = currentSegmentAdvisory[highestPhaseReachedKey] + self._textProduct.debug_print( + "End of method. %s current phase is -> %s for %s" % + (section, phase, self._segment), 1) + self._textProduct.debug_print( + "End of method. %s current highestPhaseReached is -> %s for %s" % + (section, currentHighestPhaseReached, self._segment), 1) + + return currentHighestPhaseReached + + def _pastThreatsNotNone(self, threatGrid): + + # Will need to modify this to be both Wind and Surge once SS codes are added + previousAdvisories = self._textProduct._getPreviousAdvisories() + + # If there are NOT any advisories to process - no need to continue + if len(previousAdvisories) == 0: + return False + + # Look at all past advisories for this storm + for advisory in previousAdvisories: + + # We had a threat previously + if advisory["ZoneData"][self._segment][threatGrid] in ["Elevated", "Mod", "High", "Extreme"]: + return True + + return False + + def _setThreatStatementsProductParts(self, segmentDict, productSegment, tr): + + self._textProduct.debug_print("tr = %s %s" % + (self._textProduct._pp.pformat(tr), self._sectionHeaderName), 1) +# if tr is not None and self._stats._maxThreat is not None: + if tr is not None: + (planning, action, preparation) = self._getThreatStatements(productSegment, + self._sectionHeaderName, + self._stats._maxThreat, + tr) + + self._setProductPartValue(segmentDict, 'threatStatements', + [planning, action, preparation]) + else: + self._textProduct.debug_print("this is not a valid time range", 1) + return + + def _getThreatStatements(self, productSegment, sectionName, maxThreat, tr): +# import TCVDictionary +# threatStatements = TCVDictionary.ThreatStatements + + with open("/awips2/cave/etc/gfe/userPython/utilities/TCVDictionary.py", 'r') as pythonFile: + fileContents = pythonFile.read() + exec(fileContents) + + # ThreatStatements comes from TCVDictionary.py when it is exec'ed + threatStatements = ThreatStatements + + if tr == "recovery": + if "Surge" in sectionName: + maxThreat = self._textProduct._currentAdvisory['ZoneData'][self._segment]['highestHunkerDownSurgeThreat'] + elif "Wind" in sectionName: + maxThreat = self._textProduct._currentAdvisory['ZoneData'][self._segment]['highestHunkerDownWindThreat'] + + + self._textProduct.debug_print(40*"-", 1) + self._textProduct.debug_print("sectionName = %s, maxThreat = %s, tr = %s" % + (sectionName, maxThreat, self._textProduct._pp.pformat(tr)), 1) + +# if maxThreat is None: +# maxThreat = "None" + + statements = threatStatements[sectionName][maxThreat][tr] + planning = statements["planning"] + preparation = statements["preparation"] + action = statements["action"] + + return (planning, preparation, action) + + def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._maxThreat is not None: + summary = self._getPotentialImpactsSummaryText(self._stats._maxThreat) + self._setProductPartValue(segmentDict, 'potentialImpactsSummary', summary) + + def _getPotentialImpactsSummaryText(self, maxThreat): + if self.isThreatNoneForEntireStorm: + return "POTENTIAL IMPACTS: Little to None" + if self._tr is not None and self._sectionHeaderName in ["Wind", "Storm Surge"]: + if self._tr == "hunker down": + return "POTENTIAL IMPACTS: Unfolding" + elif self._tr == "recovery": + return "REALIZED IMPACTS: Being Assessed" + + if maxThreat == "Extreme": + impactLevel = "Devastating to Catastrophic" + elif maxThreat == "High": + impactLevel = "Extensive" + elif maxThreat == "Mod": + impactLevel = "Significant" + elif maxThreat == "Elevated": + impactLevel = "Limited" + else: + impactLevel = "Little to None" + + return "POTENTIAL IMPACTS: " + impactLevel + + def _potentialImpactsStatements(self, segmentDict, productSegmentGroup, productSegment): + self._textProduct.debug_print("segment = %s, elementName = %s, maxThreat = %s" % + (productSegment[0], self._sectionHeaderName, self._stats._maxThreat), 1) + if self._stats._maxThreat is not None: + statements = self._getPotentialImpactsStatements(productSegment, self._sectionHeaderName, self._stats._maxThreat) + self._setProductPartValue(segmentDict, 'potentialImpactsStatements', statements) + + def _getPotentialImpactsStatements(self, productSegment, elementName, maxThreat): + import TCVDictionary + potentialImpactStatements = TCVDictionary.PotentialImpactStatements + statements = potentialImpactStatements[elementName][maxThreat] + + import TCVAreaDictionary + tcv_AreaDictionary = TCVAreaDictionary.TCV_AreaDictionary + + segment, vtecRecords = productSegment + + self._textProduct.debug_print("zone number = %s, elementName = %s, maxThreat = %s, tr = %s" % + (segment, elementName, maxThreat, self._tr), 1) + + if segment in tcv_AreaDictionary: + potentialImpactStatements = tcv_AreaDictionary[segment]["potentialImpactsStatements"] + + # Check for any overrides + try: + statements = potentialImpactStatements[elementName][maxThreat] + except KeyError: + pass + + if self.isThreatNoneForEntireStorm: + return statements + + if self._tr is not None: + specialStatements = self._specialImpactsStatements() + if self._tr in list(specialStatements.keys()): + if self._tr in ["recovery", "hunker down"] and self.isThreatNoneForEntireStorm: + return statements + else: + return specialStatements[self._tr] + + # If this is the "default" case + if self._tr == "default" and len(statements) > 0: + if elementName in ["Wind", "Storm Surge"]: + if statements[0].find("If realized, ") == -1: + statements[0] = "If realized, " + statements[0][0].lower() + statements[0][1:] + + return statements + + # Specific hazard sections can override this to provide special impacts statements + def _specialImpactsStatements(self): + return {} + + def _preparationStatement(self, severityString): + preparationStatement = "" + if severityString == "Devastating" or severityString == "Extensive impacts": + preparationStatement += "Aggressive " + + preparationStatement += "preparations should be made for chance of " + + if severityString == "Devastating": + preparationStatement += "devastating to catastrophic" + elif severityString == "Extensive impacts": + preparationStatement += "extensive" + elif severityString == "Significant": + preparationStatement += "significant" + elif severityString == "Limited": + preparationStatement += "limited" + + preparationStatement += " impacts based on latest threat" + + return preparationStatement + +class WindSection(SectionCommon): + def __init__(self, textProduct, segment, stats): + SectionCommon.__init__(self, textProduct, segment, "Wind") + self._sectionName = 'windSection[\'' + segment + '\']' + self._stats = stats + self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("WindThreat") + + def sectionParts(self, segment_vtecRecords_tuple): + parts = [ + 'sectionHeader', + 'forecastSubsection', + 'threatSubsection', + 'impactsSubsection', + ] + + return self._finalSectionParts(segment_vtecRecords_tuple, parts) + + def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._peakWind(subsectionDict, productSegmentGroup, productSegment) + self._windowTS(subsectionDict, productSegmentGroup, productSegment) + self._windowHU(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) + + def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") + elif self._stats._maxWind is None: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "No wind forecast") + else: + categoryLabel = None + categories = self._moderatedMaxWindMph_categories() + moderatedMaxWind = self._ktToMph(self._stats._maxWind, "Wind") + for key in list(categories.keys()): + minVal, maxVal = categories[key] + if minVal <= moderatedMaxWind and moderatedMaxWind < maxVal: + categoryLabel = key + break + + forecastText = "LATEST LOCAL FORECAST: " + if categoryLabel is not None: + forecastText += "Equivalent " + categoryLabel + " force wind" + else: + segment, vtecRecords = productSegment + numRecords = len(vtecRecords) + possibleHazardsFound = False + + for i in range(numRecords): + vtecRecord = vtecRecords[i] + if (vtecRecord["phensig"] in ["HU.A", "HU.W", "TR.A", "TR.W"] or \ + self._stats._windowTS is not None) and \ + vtecRecord["act"] != "CAN": + forecastText += "Tropical storm force winds remain possible" + possibleHazardsFound = True + break + if not possibleHazardsFound: + forecastText += "Below tropical storm force wind" + + self._setProductPartValue(segmentDict, 'latestForecastSummary', forecastText) + + def _peakWind(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._maxWind is not None: + windText = "Peak Wind Forecast: " + moderatedMaxWind = self._ktToMph(self._stats._maxWind, "Wind") + if moderatedMaxWind >= 74: + maxRange = 20 + elif moderatedMaxWind >= 58: + maxRange = 15 + elif moderatedMaxWind >= 20: + maxRange = 10 + else: + maxRange = 5 + + windText += str(int(moderatedMaxWind - maxRange)) + "-" + str(int(moderatedMaxWind)) + " mph" + if self._stats._maxGust is not None: + moderatedMaxWindGust = self._ktToMph(self._stats._maxGust, "WindGust") + +# # We want to round the wind gust to the nearest 5 kt +# moderatedMaxWindGust = \ +# self._textProduct.round(moderatedMaxWindGust, "Nearest", 5) + + windText += " with gusts to " + str(int(moderatedMaxWindGust)) + " mph" + + self._setProductPartValue(segmentDict, 'peakWind', windText) + + def _windowTS(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._windowTS is not None: + self._setProductPartValue(segmentDict, 'windowTS', self._stats._windowTS) + + def _windowHU(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._windowHU is not None: + self._setProductPartValue(segmentDict, 'windowHU', self._stats._windowHU) + + def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._threatTrend(subsectionDict, productSegmentGroup, productSegment) + self._threatStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) + + def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', + "Threat to Life and Property: Not available at this time. To be updated shortly.") + else: + SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) + + def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): + threatTrendValue = \ + self._getThreatTrendValue("Wind", + magnitudeIncreaseThreshold=self._textProduct.mphToKt(15)) + + if threatTrendValue is not None: + # Convert the threat trend to a sentence + threatTrendSentence = \ + self._getThreatTrendSentence("wind", threatTrendValue) + + self._setProductPartValue(segmentDict, 'threatTrend', + threatTrendSentence) + + def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): + self._tr = self._calculateThreatStatementTr(self._stats._onset34Hour, + self._stats._end34Hour, "Wind") + self._textProduct.debug_print("in _threatStatements tr = %s" % + (self._textProduct._pp.pformat(self._tr)), 1) + + if not hasattr(self._textProduct, "_windThreatStatementsTr"): + self._textProduct._windThreatStatementsTr = dict() + + self._textProduct._windThreatStatementsTr[self._segment] = self._tr + + self._setThreatStatementsProductParts(segmentDict, productSegment, + self._tr) + + def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) + + def _specialImpactsStatements(self): + return {"hunker down": ["Potential impacts from the main wind event are unfolding.", + # "The extent of realized impacts will depend on the actual strength, duration, and exposure of the wind as experienced at particular locations.", + ], + "recovery": ["Little to no additional wind impacts expected. Community officials are now assessing the extent of actual wind impacts accordingly.", + ], + } + + def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'potentialImpactsSummary', + "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") + else: + SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) + + ### Supporting functions + def _moderatedMaxWindMph_categories(self): + # Dictionary representing wind thresholds in kts + # for category 1, 2, 3, 4 or 5 hurricanes. + return { + 'Cat 5 Hurricane': (157, 999), + 'Cat 4 Hurricane': (130, 157), + 'Cat 3 Hurricane': (111, 130), + 'Cat 2 Hurricane': ( 96, 111), + 'Cat 1 Hurricane': ( 74, 96), + 'Strong Tropical Storm': ( 58, 73), + 'Tropical Storm': ( 39, 58), + } + + def _ktToMph(self, value, element): + newVal = self._textProduct.ktToMph(value) + newVal = self._textProduct.round(newVal, "Nearest", self._increment(element)) + return newVal + + # This is a very simple way to round values -- if we need + # something more sophisticated, we'll add it later. + def _increment(self, element): + dict = { + "Wind": 5, + "WindGust": 5, + "InundationMax": 0.1, + } + return dict.get(element, 0) + +class StormSurgeSection(SectionCommon): + def __init__(self, textProduct, segment, stats): + SectionCommon.__init__(self, textProduct, segment, "Storm Surge") + self._sectionName = 'stormSurgeSection[\'' + segment + '\']' + self._stats = stats + self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("StormSurgeThreat") + + def sectionParts(self, segment_vtecRecords_tuple): + parts = [ + 'sectionHeader', + 'forecastSubsection', + 'threatSubsection', + 'impactsSubsection', + ] + + return self._finalSectionParts(segment_vtecRecords_tuple, parts) + + def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._PopulateSurge: + self._peakSurge(subsectionDict, productSegmentGroup, productSegment) + self._surgeWindow(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) + + def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._PopulateSurge: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") + + elif "None" in self._stats._windowSurge or \ + self._stats._inundationMax is None or \ + self._stats._inundationMax <= 1: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "No storm surge inundation forecast") + else: + max = self._stats._inundationMax + summary = "LATEST LOCAL FORECAST: " + + if 1 < max and max < 4: + summary += "Localized" + elif 4 <= max and max < 12: + summary += "Life-threatening" + else: + summary += "Life-threatening and historic" + + self._setProductPartValue(segmentDict, 'latestForecastSummary', + summary + " storm surge possible") + + def _peakSurge(self, segmentDict, productSegmentGroup, productSegment): + self._textProduct.debug_print("_peakSurge _inundationMax = %s" % (self._stats._inundationMax), 1) + + # DR 17727: To make the output consistent, max threat should be calculated here + self._stats._maxThreat = "None" + + if self._stats._inundationMax is not None and self._stats._inundationMax > 1: + max = self._stats._inundationMax + if max > 10: + maxRange = 4 + self._stats._maxThreat = "Extreme" + elif max > 6: + maxRange = 3 + if max > 9: + self._stats._maxThreat = "Extreme" + else: + self._stats._maxThreat = "High" + elif max >= 3: + maxRange = 2 + if max > 3: + self._stats._maxThreat = "Mod" + else: + self._stats._maxThreat = "Elevated" + else: + maxRange = None + if max > 1: + self._stats._maxThreat = "Elevated" + + self._textProduct.debug_print("_peakSurge maxRange = %s" % (maxRange), 1) + self._textProduct.debug_print("_peakSurge _maxThreat = %s" % (self._stats._maxThreat), 1) + + # Save off the surge threat to the advisory + self._textProduct._currentAdvisory['ZoneData'][self._segment]["StormSurgeThreat"] = self._stats._maxThreat + + if maxRange is not None: + words = str(int(max - maxRange)) + "-" + str(int(max)) + " feet above ground" + elif max > 0: + + # We were getting really weird values of peak surge + # (e.g. "up to 1.70000004768 feet"). This fix will round up + # to the nearest integer value +# words = "up to " + str(max) + " feet above ground" + words = "up to " + str(int(max + 0.5)) + " feet above ground" + else: + words = "" + + if len(words) > 0: + self._setProductPartValue(segmentDict, 'peakSurge', + "Peak Storm Surge Inundation: The potential for " + words + " somewhere within surge prone areas") + else: + self._setProductPartValue(segmentDict, 'peakSurge', + "Peak Storm Surge Inundation: The potential for little to no storm surge inundation") + + def _surgeWindow(self, segmentDict, productSegmentGroup, productSegment): + if "None" not in self._stats._windowSurge: + self._setProductPartValue(segmentDict, 'surgeWindow', self._stats._windowSurge) + + def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._PopulateSurge: + self._threatTrend(subsectionDict, productSegmentGroup, productSegment) + self._threatStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) + + def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._PopulateSurge: + self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', + "Threat to Life and Property: Not available at this time. To be updated shortly.") + else: + SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) + + def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): + threatTrendValue = self._getThreatTrendValue("StormSurge", magnitudeIncreaseThreshold=4) + + if threatTrendValue is not None: + # Convert the threat trend to a sentence + threatTrendSentence = \ + self._getThreatTrendSentence("storm surge", threatTrendValue) + + self._setProductPartValue(segmentDict, 'threatTrend', + threatTrendSentence) + + def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): + self._textProduct.debug_print("Surge Threat Statements", 1) + self._tr = self._calculateThreatStatementTr(self._stats._onsetSurgeHour, + self._stats._endSurgeHour, "Surge") + + self._setThreatStatementsProductParts(segmentDict, productSegment, + self._tr) + + def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._PopulateSurge: + self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) + + def _specialImpactsStatements(self): + return {"hunker down": ["Potential impacts from the main surge event are unfolding.", + # "The extent of realized impacts will depend on the actual height of storm surge moving onshore and the resulting depth of coastal flooding as experienced at particular locations.", + ], + "recovery": ["Little to no additional surge impacts expected. Community officials are now assessing the extent of actual surge impacts accordingly.", + ], + } + + def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._PopulateSurge: + self._setProductPartValue(segmentDict, 'potentialImpactsSummary', + "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") + else: + SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) + + +class FloodingRainSection(SectionCommon): + def __init__(self, textProduct, segment, stats): + SectionCommon.__init__(self, textProduct, segment, "Flooding Rain") + self._sectionName = 'floodingRainSection[\'' + segment + '\']' + self._stats = stats + self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("FloodingRainThreat") + + def sectionParts(self, segment_vtecRecords_tuple): + parts = [ + 'sectionHeader', + 'forecastSubsection', + 'threatSubsection', + 'impactsSubsection', + ] + + return self._finalSectionParts(segment_vtecRecords_tuple, parts) + + def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._peakRain(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) + + def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") + else: + summary = "" # was "No Flood Watch is in effect" + segment, vtecRecords = productSegment + + headlines, _ = self._textProduct._getAdditionalHazards() + headlineList = self._textProduct._checkHazard(headlines, + [("FA","A"),("FF","A")], + returnList = True) + + if len(headlineList) != 0: + # Extract the first flood headline out (there will only be 1 in effect at a time) + (key, areaList) = headlineList[0] + (headline, _, _, _) = key + + # Make sure it is for our zone + if self._segment in areaList: + summary = headline + " is in effect" + + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: " + summary) + + def _peakRain(self, segmentDict, productSegmentGroup, productSegment): + if self._stats._sumAccum is not None: + words = self._rainRange(int(self._stats._sumAccum + 0.5)) + + # If we have previous rainfall + if self._stats._prevAccum not in [0.0, None] and (int(self._stats._sumAccum + 0.5)) != 0: + words = "Additional " + words + self._setProductPartValue(segmentDict, 'peakRain', "Peak Rainfall Amounts: " + words) + + def _rainRange(self, sumAccum): + minAccum = 0 + maxAccum = 0 + + if sumAccum == 0 and self._stats._prevAccum not in [0.0, None]: + return "No additional significant rainfall forecast" + elif sumAccum == 0 and self._stats._prevAccum in [0.0, None]: + return "No significant rainfall forecast" + elif sumAccum == 1: + return "around 1 inch" + elif sumAccum == 2: + minAccum, maxAccum = (1, 3) + elif sumAccum == 3: + minAccum, maxAccum = (2, 4) + elif sumAccum in [4,5]: + minAccum, maxAccum = (3, 6) + elif sumAccum in [6,7]: + minAccum, maxAccum = (4, 8) + elif sumAccum in [8,9]: + minAccum, maxAccum = (6, 10) + elif sumAccum in [10,11]: + minAccum, maxAccum = (8, 12) + elif sumAccum in [12,13,14]: + minAccum, maxAccum = (10, 15) + elif sumAccum in [15,16,17]: + minAccum, maxAccum = (12, 18) + elif 17 < sumAccum and sumAccum < 25: + minAccum, maxAccum = (18, 24) + else: + return "More than two feet" + + return "%d-%d inches, with locally higher amounts" % (minAccum, maxAccum) + + def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._threatTrend(subsectionDict, productSegmentGroup, productSegment) + self._threatStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) + + def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', + "Threat to Life and Property: Not available at this time. To be updated shortly.") + else: + SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) + + def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): + threatTrendValue = self._getThreatTrendValue("FloodingRain", magnitudeIncreaseThreshold=4) + + if threatTrendValue is not None: + # Convert the threat trend to a sentence + threatTrendSentence = \ + self._getThreatTrendSentence("flooding rain", threatTrendValue) + + self._setProductPartValue(segmentDict, 'threatTrend', + threatTrendSentence) + + def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): + self._tr = self._textProduct._windThreatStatementsTr[self._segment] + + self._setThreatStatementsProductParts(segmentDict, productSegment, self._tr) + + def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) + + def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'potentialImpactsSummary', + "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") + else: + SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) + +class TornadoSection(SectionCommon): + def __init__(self, textProduct, segment, stats): + SectionCommon.__init__(self, textProduct, segment, "Tornado") + self._sectionName = 'tornadoSection[\'' + segment + '\']' + self._stats = stats + self.isThreatNoneForEntireStorm = self._isThreatNoneForEntireStorm("TornadoThreat") + + def sectionParts(self, segment_vtecRecords_tuple): + parts = [ + 'sectionHeader', + 'forecastSubsection', + 'threatSubsection', + 'impactsSubsection', + ] + + return self._finalSectionParts(segment_vtecRecords_tuple, parts) + + def _forecastSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._latestForecastSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._tornadoSituation(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'forecastSubsection', subsectionDict) + + def _latestForecastSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: Not available at this time. To be updated shortly.") + else: + summary = "" + segment, vtecRecords = productSegment + + headlines, _ = self._textProduct._getAdditionalHazards() + headlineList = self._textProduct._checkHazard(headlines, + [("TO","A")], + returnList = True) + if len(headlineList) != 0: + # Extract the first tornado headline out (there will only be 1 in effect at a time) + (key, areaList) = headlineList[0] + (headline, _, _, _) = key + + # Make sure it is for our zone + if self._segment in areaList: + summary = "Tornado Watch is in effect" + + self._setProductPartValue(segmentDict, 'latestForecastSummary', + "LATEST LOCAL FORECAST: " + summary) + + def _tornadoSituation(self, segmentDict, productSegmentGroup, productSegment): + + # Now add the bullet about tornado situation + if self._stats._maxThreat in ["Extreme", "High"]: + qualifier = "very favorable" + elif self._stats._maxThreat in ["Mod"]: + qualifier = "favorable" + elif self._stats._maxThreat in ["Elevated"]: + qualifier = "somewhat favorable" + else: + qualifier = "unfavorable" + + words = "Situation is %s for tornadoes" % (qualifier) + + self._setProductPartValue(segmentDict, 'tornadoSituation', words) + + + def _threatSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._lifePropertyThreatSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._threatTrend(subsectionDict, productSegmentGroup, productSegment) + self._threatStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'threatSubsection', subsectionDict) + + def _lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'lifePropertyThreatSummary', + "Threat to Life and Property: Not available at this time. To be updated shortly.") + else: + SectionCommon._lifePropertyThreatSummary(self, segmentDict, productSegmentGroup, productSegment) + + def _threatTrend(self, segmentDict, productSegmentGroup, productSegment): + threatTrendValue = self._getThreatTrendValue("Tornado", + magnitudeIncreaseThreshold=None) + + if threatTrendValue is not None: + # Convert the threat trend to a sentence + threatTrendSentence = \ + self._getThreatTrendSentence("tornado", threatTrendValue) + + self._setProductPartValue(segmentDict, 'threatTrend', + threatTrendSentence) + + def _threatStatements(self, segmentDict, productSegmentGroup, productSegment): + self._tr = self._textProduct._windThreatStatementsTr[self._segment] + + self._setThreatStatementsProductParts(segmentDict, productSegment, self._tr) + + def _impactsSubsection(self, segmentDict, productSegmentGroup, productSegment): + subsectionDict = collections.OrderedDict() + self._potentialImpactsSummary(subsectionDict, productSegmentGroup, productSegment) + + if self._textProduct._WSPGridsAvailable: + self._potentialImpactsStatements(subsectionDict, productSegmentGroup, productSegment) + + if len(subsectionDict) > 0: + self._setProductPartValue(segmentDict, 'impactsSubsection', subsectionDict) + + def _potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment): + if not self._textProduct._WSPGridsAvailable: + self._setProductPartValue(segmentDict, 'potentialImpactsSummary', + "POTENTIAL IMPACTS: Not available at this time. To be updated shortly.") + else: + SectionCommon._potentialImpactsSummary(self, segmentDict, productSegmentGroup, productSegment) + + +############################################################### +### TCV Statistics Classes + +class SectionCommonStats(): + def __init__(self, textProduct, segment): + self._textProduct = textProduct + self._segment = segment + + self._initializeSegmentAdvisories() + + # The maximum threat level during the entire advisory + self._maxThreat = None + + + def _initializeSegmentAdvisories(self): + self._currentAdvisory = self._textProduct._currentAdvisory['ZoneData'][self._segment] + + self._previousAdvisory = None + self._textProduct.debug_print("textProduct._previousAdvisory = '%s'" % (self._textProduct._previousAdvisory)) + if self._textProduct._previousAdvisory is not None: + if self._segment in self._textProduct._previousAdvisory['ZoneData']: + self._previousAdvisory = self._textProduct._previousAdvisory['ZoneData'][self._segment] + + self._textProduct.debug_print("textProduct._previousPreviousAdvisory = '%s'" % \ + (self._textProduct._previousPreviousAdvisory)) + self._previousPreviousAdvisory = None + if self._textProduct._previousPreviousAdvisory is not None: + self._previousPreviousAdvisory = self._textProduct._previousPreviousAdvisory['ZoneData'][self._segment] + + def _updateThreatStats(self, tr, statDict, threatGridName): + self._textProduct.debug_print("In _updateThreatStats for %s" % (threatGridName), 1) + self._textProduct.debug_print("maxThreat = %s" % (self._maxThreat), 1) + + threatLevel = self._textProduct._getStatValue(statDict, threatGridName) + if threatLevel is not None: + threatLevels = self._textProduct._threatKeyOrder() + self._textProduct.debug_print("current threatLevel = %s" % (threatLevel), 1) + if self._maxThreat is None or \ + threatLevels.index(threatLevel) > threatLevels.index(self._maxThreat): + self._textProduct.debug_print("updating max threat to = %s" % (threatLevel), 1) + self._maxThreat = threatLevel + + def _calculateHourOffset(self, targetTime): + self._textProduct.debug_print("Calculating hours from issuance time for %s" + % (self._textProduct._pp.pformat(targetTime)), 1) + self._textProduct.debug_print("target unix time = %s" + % (self._textProduct._pp.pformat(targetTime.unixTime())), 1) + self._textProduct.debug_print("issuance unix time = %s" + % (self._textProduct._pp.pformat(self._textProduct._issueTime_secs)), 1) + + seconds = targetTime.unixTime() - self._textProduct._issueTime_secs + hour = int(round(seconds/60.0/60.0)) + self._textProduct.debug_print("hour offset = %s" % (hour), 1) + if hour < 0: + hour = 0 + + self._textProduct.debug_print("final hour offset = %s" % (hour), 1) + + return hour + +class WindSectionStats(SectionCommonStats): + def __init__(self, textProduct, segment, statList, timeRangeList): + SectionCommonStats.__init__(self, textProduct, segment) + # The maximum wind speed that occurs during the entire advisory. + self._maxWind = None + + # The maximum wind gust speed that occurs during the entire advisory. + self._maxGust = None + + # The number of hours since the issuance time when the wind first becomes >= 34 kts. + self._onset34Hour = None + + # The number of hours since the issuance time when the wind drops below 34 kts. + self._end34Hour = None + + # Text describing when tropical storm force winds (>= 34 kts) start and end. + self._windowTS = None + + # Text describing when hurricane force winds (>= 64 kts) start and end. + self._windowHU = None + + # Only gather stats if we have the wind speed probability grids available + if self._textProduct._WSPGridsAvailable: + self._textProduct.debug_print("#"*90) + self._textProduct.debug_print("Setting wind stats for %s" % (segment), 1) + + self._setStats(statList, timeRangeList) + self._textProduct.debug_print("#"*90) + + # pws34int and pws64int grids give you the probability of 34/64 kt winds + # occurring during the grid time range. The grids are 6 hours long so they + # give you a more specific starting or ending time which allows for better + # descriptions of when events start. + class PwsXXintStats(): + def __init__(self): + # The maximum value in pws34/64int grids across the entire advisory. + self.max = None + + # The number of hours since the issuance time when this maximum value first occurs. + self.onsetHour = None + + # pwsD34, pwsN34, pwsD64 and pwsN64 grids give you the probability of 34/64 + # kt winds occurring during the grid time range. They are 12 hour long day + # and night grids that match ZPF periods. They give you a ball park idea of + # when an event will start or end and if it's day or night time and then + # the pwsXXint grids can be used to narrow down the time frame. + class PwsTXXStats(): + def __init__(self): + # Depending on when the issuance time is, there may be a day or night + # grid that we need to drop at the beginning so that we start with the + # grid that occurs during our issuance time so that our windows are + # accurate. + + # We need to do special logic the first time around so record if this + # is the first run through the loop or not. + self.firstRun = True + + # Indicates if we need to possibly drop the first grid or not. + self.dropFirstGridType = None + + # Indicates if we actually did drop the first grid. Sometimes we will + # determine that we need to drop the grid if it exists but it doesn't + # end up existing so we don't actually drop anything in some cases. + self.droppedFirstGrid = False + + # Indicate the period (actually a 0-based index into a list of periods) + # that contains the first correct grid. + self.periodWithFirstCorrectGrid = None + + # The AbsTime of when the grids first met or exceeded the threshold. + self.onsetTime = None + + # The AbsTime of when the grids last met or exceeded the threshold. + self.endTime = None + + # Start and end hour information from the Wind grids. + class WindStats(): + def __init__(self): + # The number of hours since issuance time when the wind first gets >= 34/64 knots. + self.onsetHour = None + # The number of hours since issuance time when the wind is last >= 34/64 knots. + self.endHour = None + + # Information needed for creating the wind window text. + class WindowInfo(): + def __init__(self, eventType): + # The type (as a string) of the event this window is for (Tropical Storm or Hurricane). + self.eventType = eventType + # The number of hours since issuance time when the tropical storm or hurricane starts. + self.onsetHour = None + # The number of hours since issuance time when the tropical storm or hurricane ends. + self.endHour = None + # The resolution to use when determining the wording for the end time of the window. + self.endTimeResolution = None + # Determines if we should create window text for this event (did wind exceed threshold?) + self.shouldCreateWindowText = True + # The constructed window text. + self.windowText = None + + def _setStats(self, statList, timeRangeList): + pws34intStats = self.PwsXXintStats() + pws64intStats = self.PwsXXintStats() + pwsT34Stats = self.PwsTXXStats() + pwsT64Stats = self.PwsTXXStats() + wind34timeInfo = self.WindStats() + wind64timeInfo = self.WindStats() + prob34Onset = None + + for index in range(len(statList)): + tr, _ = timeRangeList[index] + statDict = statList[index] + + self._textProduct.debug_print("="*90, 1) + self._textProduct.debug_print("\n\ntr = %s" % (tr), 1) + + self._textProduct.debug_print("*"*90, 1) + currentPeriod = self._determineCurrentPeriod(tr) + + self._textProduct.debug_print("*"*90, 1) + self._updateStatsForPwsXXint(tr, statDict, "pws34int", pws34intStats) + self._textProduct.debug_print("-"*45, 1) + self._updateStatsForPwsXXint(tr, statDict, "pws64int", pws64intStats) + + self._textProduct.debug_print("*"*90, 1) + self._updateStatsForPwsTXX(tr, statDict, "pwsD34", "pwsN34", pwsT34Stats, currentPeriod) + self._textProduct.debug_print("-"*45, 1) + self._updateStatsForPwsTXX(tr, statDict, "pwsD64", "pwsN64", pwsT64Stats, currentPeriod) + + # Calculate an additional probabilistic onset hour for scenarios where we weren't + # able to calculate the onset the usual way. This is only done for tropical + # storms to help determine the correct TR (check plans, etc.) + if prob34Onset is None and pwsT34Stats.onsetTime is not None: + self._textProduct.debug_print("*"*90, 1) + self._textProduct.debug_print("Found pwsD/N34 onset time, calculating prob34Onset", 1) + prob34Onset = self._calculateProbOnset(timeRangeList, statList, index, "pws34int") + + self._textProduct.debug_print("*"*90, 1) + self._updateStatsForWind(tr, statDict, wind34timeInfo, speed=34) + self._textProduct.debug_print("-"*45, 1) + self._updateStatsForWind(tr, statDict, wind64timeInfo, speed=64) + + self._textProduct.debug_print("*"*90, 1) + self._updateMaxWindGust(statDict) + + self._textProduct.debug_print("*"*90, 1) + self._updateThreatStats(tr, statDict, "WindThreat") + + self._textProduct.debug_print("="*90, 1) + + #Tropical Storm + self._textProduct.debug_print("Tropical Storm Window:", 1) + tropicalStormWindow = self.WindowInfo("Tropical Storm") + tropicalStormWindow = self._computeWindOnsetAndEnd(tropicalStormWindow, + wind34timeInfo, + pws34intStats, + pwsT34Stats, + prob34Onset) + tropicalStormWindow = self._createWindowText(tropicalStormWindow) + # The tropical storm onset and end hours will be used for calculating threat statements + self._onset34Hour = tropicalStormWindow.onsetHour + self._end34Hour = tropicalStormWindow.endHour + self._windowTS = tropicalStormWindow.windowText + + #Hurricane + self._textProduct.debug_print("-"*45, 1) + self._textProduct.debug_print("Hurricane Window:", 1) + hurricaneWindow = self.WindowInfo("Hurricane") + hurricaneWindow = self._computeWindOnsetAndEnd(hurricaneWindow, + wind64timeInfo, + pws64intStats, + pwsT64Stats) + + # Make sure the hurricane window end time resolution is the same + # resolution used for tropical storms so that hurricanes don't appear + # to end after tropical storms + hurricaneWindow.endTimeResolution = tropicalStormWindow.endTimeResolution + + hurricaneWindow = self._createWindowText(hurricaneWindow) + self._windowHU = hurricaneWindow.windowText + + self._textProduct.debug_print("-"*45, 1) + self._currentAdvisory["WindThreat"] = self._maxThreat + self._currentAdvisory["WindForecast"] = self._maxWind + + self._textProduct.debug_print("+"*60, 1) + self._textProduct.debug_print("In WindSectionStats._setStats", 1) + self._textProduct.debug_print("pws34intStats.max = %s" % (pws34intStats.max), 1) + self._textProduct.debug_print("pws64intStats.max = %s" % (pws64intStats.max), 1) + self._textProduct.debug_print("pwsT34Stats.periodWithFirstCorrectGrid = %s" % (pwsT34Stats.periodWithFirstCorrectGrid), 1) + self._textProduct.debug_print("pwsT34Stats.endTime = '%s'" % (pwsT34Stats.endTime), 1) + self._textProduct.debug_print("pwsT64Stats.periodWithFirstCorrectGrid = %s" % (pwsT64Stats.periodWithFirstCorrectGrid), 1) + self._textProduct.debug_print("pwsT64Stats.endTime = '%s'" % (pwsT64Stats.endTime), 1) + self._textProduct.debug_print("self._maxWind = %s" % (self._maxWind), 1) + self._textProduct.debug_print("self._maxGust = %s" % (self._maxGust), 1) + self._textProduct.debug_print("self._maxThreat = %s" % (self._maxThreat), 1) + + def _determineCurrentPeriod(self, tr): + currentPeriod = None + for periodIndex, periodTr in enumerate(self._textProduct._periodList): + self._textProduct.debug_print("\n\nperiodIndex = %d periodList tr = %s" + % (periodIndex, repr(periodTr)), 1) + + if (periodIndex == 0) and (tr.startTime().unixTime() < periodTr.startTime().unixTime()): + # If the tr is before the first period, use the first period + currentPeriod = periodIndex + break + elif (periodIndex == len(self._textProduct._periodList) - 1) and \ + (tr.startTime().unixTime() >= periodTr.endTime().unixTime()): + # If the tr is after (or at the end of) the last period, use the last period + currentPeriod = periodIndex + break + elif periodTr.contains(tr.startTime()): + currentPeriod = periodIndex + break + + self._textProduct.debug_print("\n\ncurrentPeriod index = %s" % (currentPeriod), 1) + self._textProduct.debug_print("\n\ncurrentPeriod tr = %s" + % (self._textProduct._periodList[currentPeriod]), 1) + + return currentPeriod + + def _updateStatsForPwsXXint(self, tr, statDict, gridName, pwsXXintStats): + pwsXXint = self._textProduct._getStatValue(statDict, gridName, "Max") + + self._textProduct.debug_print("Wind Window Debug: pwsXXintStats gridName = %s" % (gridName), 1) + self._textProduct.debug_print("Wind Window Debug: pwsXXintStats pwsXXint = %s" % (pwsXXint), 1) + + if pwsXXint is not None: + if pwsXXintStats.max is None or pwsXXint > pwsXXintStats.max: + pwsXXintStats.max = pwsXXint + pwsXXintStats.onsetHour = self._calculateHourOffset(tr.startTime()) + + self._textProduct.debug_print("Wind Window Debug: pwsXXintStats Found a new max value!", 1) + self._textProduct.debug_print("Wind Window Debug: pwsXXintStats onsetHour = %s" % (pwsXXintStats.onsetHour), 1) + + def _updateStatsForPwsTXX(self, tr, statDict, dayGridName, nightGridName, pwsTXXStats, period): + if pwsTXXStats.firstRun: + self._textProduct.debug_print("first run for _updateStatsForPwsTXX!", 1) + self._textProduct.debug_print("grids: %s %s" % (dayGridName, nightGridName), 1) + pwsTXXStats.firstRun = False + localtime = time.localtime(self._textProduct._issueTime_secs) + self._textProduct.debug_print("localtime = %s" % (localtime), 1) + + if localtime.tm_hour >= 15: # 3PM to midnight + self._textProduct.debug_print("between 3PM and midnight!", 1) + pwsTXXStats.dropFirstGridType = "day" + self._textProduct.debug_print("need to drop the day grid(s) if they come first", 1) + elif localtime.tm_hour >= 3 and localtime.tm_hour < 12: # 3AM to noon + self._textProduct.debug_print("between 3AM and noon!", 1) + pwsTXXStats.dropFirstGridType = "night" + self._textProduct.debug_print("need to drop the night grid(s) if they come first", 1) + else: + self._textProduct.debug_print("not dropping any grids!", 1) + + + pwsDXX = self._textProduct._getStatValue(statDict, dayGridName, "Max") + pwsNXX = self._textProduct._getStatValue(statDict, nightGridName, "Max") + + maxPws = None + self._textProduct.debug_print("%s pwsDXX = %s pwsNXX = %s " % + (self._textProduct._pp.pformat(tr),pwsDXX, pwsNXX), 1) + + # Determine coversion factor to get DAY and NIGHT in UTC + utcHourOffset = self._calculateUTCandLocalHourOffset() + + # See if this hour a valid DAYtime hour + isValidDay = self._isValidDayTime(tr.startTime().hour, + self._textProduct.DAY() + utcHourOffset, + self._textProduct.NIGHT() + utcHourOffset) + + # If we have pwsD data, and this is a time period it applies to + if pwsDXX is not None and isValidDay: + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats DAY", 1) + + if pwsTXXStats.dropFirstGridType == "day": + self._textProduct.debug_print("Wind Window Debug: dropping a day grid", 1) + self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) + pwsTXXStats.droppedFirstGrid = True + return + elif pwsTXXStats.dropFirstGridType == "night": + # We dropped all the necessary grids now that we found a day grid so stop dropping + pwsTXXStats.dropFirstGridType = None + pwsTXXStats.periodWithFirstCorrectGrid = period + self._textProduct.debug_print("Wind Window Debug: found day grid; done dropping night grids", 1) + self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) + + maxPws = pwsDXX + + # If we have pwsN data, and this is a time period it applies to + elif pwsNXX is not None and not isValidDay: + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats NIGHT", 1) + + if pwsTXXStats.dropFirstGridType == "night": + self._textProduct.debug_print("Wind Window Debug: dropping a night grid", 1) + self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) + pwsTXXStats.droppedFirstGrid = True + return + elif pwsTXXStats.dropFirstGridType == "day": + # We dropped all the necessary grids now that we found a night grid so stop dropping + pwsTXXStats.dropFirstGridType = None + pwsTXXStats.periodWithFirstCorrectGrid = period + self._textProduct.debug_print("Wind Window Debug: found night grid; done dropping day grids", 1) + self._textProduct.debug_print("Wind Window Debug: tr = %s, period = %s" % (tr, period), 1) + + maxPws = pwsNXX + + # These two statements will need to be reevaluated when this product is + # expanded to the Pacific basin (MHB - 02/03/2015) + elif pwsDXX is not None and tr.startTime().hour in [21, 0, 3]: + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats DAY ignored", 1) + + elif pwsNXX is not None and tr.startTime().hour in [9, 12, 15]: + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats NIGHT ignored", 1) + + threshold34index = 0 + threshold64index = 1 + if maxPws is not None: + # Don't shift if the period with the first correct grid is period 0 + if pwsTXXStats.droppedFirstGrid and pwsTXXStats.periodWithFirstCorrectGrid != 0: + period = period - 1 # We dropped the first grid so we are off-by-one + self._textProduct.debug_print("shifting period back 1...new period = %s" % + (period), 1) + + # Just set the first correct period to period zero, if it hasn't + # been set yet, so the missing grid check will not fail + if pwsTXXStats.periodWithFirstCorrectGrid is None: + pwsTXXStats.periodWithFirstCorrectGrid = 0 + + if "64" in dayGridName: + index = threshold64index + else: #if "34" + index = threshold34index + + threshold = None + thresholds = self._textProduct.windSpdProb_thresholds(threshold, threshold) + if period == 0: + (thresholdLow, thresholdHigh) = thresholds[period][index] + threshold = thresholdLow + else: + threshold = thresholds[period][index] + self._textProduct.debug_print("Probability threshold for period %s = %s" + % (period, threshold), 1) + + if maxPws > threshold: + if pwsTXXStats.onsetTime is None: + pwsTXXStats.onsetTime = tr.startTime() + + trEndTime = tr.endTime() + periodEndTime = self._textProduct._periodList[period].endTime() + + # Don't go past the end of the period + if trEndTime <= periodEndTime: + pwsTXXStats.endTime = trEndTime + else: + pwsTXXStats.endTime = periodEndTime + + self._textProduct.debug_print("Wind Window Debug: probability threshold = %s (period index %s)" % (threshold, period), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats dayGridName = %s" % (dayGridName), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats nightGridName = %s" % (nightGridName), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats original tr = %s" % (self._textProduct._pp.pformat(tr)), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats maxPws = %s" %(self._textProduct._pp.pformat(maxPws)), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats onsetTime = %s" % (self._textProduct._pp.pformat(pwsTXXStats.onsetTime)), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats endTime = %s" % (self._textProduct._pp.pformat(pwsTXXStats.endTime)), 1) + self._textProduct.debug_print("Wind Window Debug: period tr = %s" % (self._textProduct._pp.pformat(self._textProduct._periodList[period])), 1) + + def _updateStatsForWind(self, tr, statDict, timeInfo, speed): + self._textProduct.debug_print("Wind Window Debug: In _updateStatsForWind", 1) + self._textProduct.debug_print("Wind Window Debug: timeInfo tr = %s" % (self._textProduct._pp.pformat(tr)), 1) + self._textProduct.debug_print("Wind Window Debug: timeInfo speed threshold = %s" % (speed), 1) + self._textProduct.debug_print("Wind Window Debug: timeInfo maxWind = %s" % (self._maxWind), 1) + + wind = self._textProduct._getStatValue(statDict, "Wind", "Max", self._textProduct.VECTOR()) + self._textProduct.debug_print("Wind Window Debug: current wind value = %s" % (wind), 1) + + if wind is not None: + if self._maxWind is None or wind > self._maxWind: + self._textProduct.debug_print("Wind Window Debug: Found new max wind value!", 1) + self._maxWind = wind + + if wind >= speed: + self._textProduct.debug_print("Wind Window Debug: current wind >= speed!", 1) + + if timeInfo.onsetHour is None: + timeInfo.onsetHour = self._calculateHourOffset(tr.startTime()) + + self._textProduct.debug_print("Wind Window Debug: onsetHour was None", 1) + self._textProduct.debug_print("Wind Window Debug: timeInfo onsetHour = %s" % (timeInfo.onsetHour), 1) + + # Always update the end time (it's the last time we exceeded the speed) + timeInfo.endHour = self._calculateHourOffset(tr.endTime()) + self._textProduct.debug_print("Wind Window Debug: timeInfo endHour = %s" % (timeInfo.endHour), 1) + + def _updateMaxWindGust(self, statDict): + windGust = self._textProduct._getStatValue(statDict, "WindGust", "Max") + self._textProduct.debug_print("Wind Window Debug: current windGust value = %s" % (windGust), 1) + + if windGust is not None: + if self._maxGust is None or windGust > self._maxGust: + self._textProduct.debug_print("Wind Window Debug: Found new max windGust value!", 1) + self._maxGust = windGust + + def _calculateProbOnset(self, timeRangeList, statList, index, pwsXXintGridName): + self._textProduct.debug_print("Wind Window Debug: in _calculateProbOnset", 1) + + # Calculate corresponding maximum intersecting pwsXXint tr + maxPwsXXintTr = self._calculateMaxPwsXXintTr(timeRangeList, statList, index, + pwsXXintGridName) + self._textProduct.debug_print("Wind Window Debug: maxPwsXXintTr = %s" % (maxPwsXXintTr), 1) + + # Calculate hours since issuance time to start time + probOnset = self._calculateHourOffset(maxPwsXXintTr.startTime()) + self._textProduct.debug_print("Wind Window Debug: probOnset = %s" % (probOnset), 1) + + return probOnset + + def _calculateMaxPwsXXintTr(self, timeRangeList, statList, index, gridName): + self._textProduct.debug_print("Wind Window Debug: gridName = %s" % (gridName), 1) + + # The current tr is always the first pwsXXint grid that intersects the onset pwsTXX grid + currTr, _ = timeRangeList[index] + currStatDict = statList[index] + currPwsXXint = self._textProduct._getStatValue(currStatDict, gridName, "Max") + self._textProduct.debug_print("Wind Window Debug: currTr = %s" % (currTr), 1) + self._textProduct.debug_print("Wind Window Debug: currPwsXXint = %s" % (currPwsXXint), 1) + + # Now try to find the next intersecting pwsXXint grid. + # pwsXXint grids are 6-hours long with times: 00-06, 06-12, 12-18, 18-00 GMT + if 0 <= currTr.startTime().hour < 6: + nextTrStartHour = 6 + elif 6 <= currTr.startTime().hour < 12: + nextTrStartHour = 12 + elif 12 <= currTr.startTime().hour < 18: + nextTrStartHour = 18 + else: + nextTrStartHour = 0 + + nextTr = None + nextPwsXXint = None + + for nextIndex in range(index + 1, len(statList)): + nextTr, _ = timeRangeList[nextIndex] + if nextTr.startTime().hour != nextTrStartHour: + continue + + nextStatDict = statList[nextIndex] + nextPwsXXint = self._textProduct._getStatValue(nextStatDict, gridName, "Max") + self._textProduct.debug_print("Wind Window Debug: nextTr = %s" % (nextTr), 1) + self._textProduct.debug_print("Wind Window Debug: nextPwsXXint = %s" % (nextPwsXXint), 1) + + if (nextPwsXXint is None) or (currPwsXXint >= nextPwsXXint): + return currTr + else: + return nextTr + + def _computeWindOnsetAndEnd(self, windowInfo, windTimeInfo, pwsXXintStats, pwsTXXStats, probOnset=None): + self._textProduct.debug_print("Wind Window Debug: In _computeWindOnsetAndEnd", 1) + self._textProduct.debug_print("Wind Window Debug: windTimeInfo.onsetHour = %s" % (windTimeInfo.onsetHour), 1) + self._textProduct.debug_print("Wind Window Debug: pwsXXintStats.onsetHour = %s" % (pwsXXintStats.onsetHour), 1) + self._textProduct.debug_print("Wind Window Debug: windTimeInfo.endHour = %s" % (windTimeInfo.endHour), 1) + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats.endTime = %s" % (pwsTXXStats.endTime), 1) + if pwsTXXStats.endTime is not None: + self._textProduct.debug_print("Wind Window Debug: pwsTXXStats end hour = %s" % (self._calculateHourOffset(pwsTXXStats.endTime)), 1) + self._textProduct.debug_print("Wind Window Debug: probOnset = %s" % (probOnset), 1) + + if windTimeInfo.onsetHour is not None: + if windTimeInfo.onsetHour < 6: + self._textProduct.debug_print("onsetHour for wind is < 6, using that as window onset hour", 1) + windowInfo.onsetHour = windTimeInfo.onsetHour + self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) + elif pwsXXintStats.onsetHour is not None: + self._textProduct.debug_print("onsetHour for pwsXXintStats is not None", 1) + self._textProduct.debug_print("using min onset hour betweeen wind and pwsXXintStats", 1) + windowInfo.onsetHour = min(windTimeInfo.onsetHour, pwsXXintStats.onsetHour) + self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) + else: + self._textProduct.debug_print("ERROR: onsetHour for pwsXXintStats is None. Check the grids.", 1) + return windowInfo + else: + self._textProduct.debug_print("windTimeInfo.onsetHour was None, using probOnset (%s) instead" + % probOnset, 1) + windowInfo.onsetHour = probOnset + self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) + + self._textProduct.debug_print("Since wind threshold not exceeded, will not create window text", 1) + windowInfo.shouldCreateWindowText = False + + if windowInfo.onsetHour is None: + # We won't have a timing window + self._textProduct.debug_print("onsetHour for wind is None", 1) + return windowInfo + + windEndHourExists = windTimeInfo.endHour is not None + windEndHourOutOfRange = windTimeInfo.endHour > 114 or windTimeInfo.endHour < 6 + pwsTXXEndTimeExists = pwsTXXStats.endTime is not None + + if (not windEndHourExists or windEndHourOutOfRange) or \ + (windEndHourExists and not pwsTXXEndTimeExists): + self._textProduct.debug_print("using Wind end hour for the window wind hour", 1) + self._textProduct.debug_print("\twind end hour exists? %s" % windEndHourExists, 1) + self._textProduct.debug_print("\twind end hour out of range? %s" % windEndHourOutOfRange, 1) + self._textProduct.debug_print("\tpwsTXX end time exists? %s" % pwsTXXEndTimeExists, 1) + windowInfo.endHour = windTimeInfo.endHour + self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) + elif pwsTXXEndTimeExists: + self._textProduct.debug_print("endTime for pwsTXXStats is not None", 1) + self._textProduct.debug_print("converting endTime to a configured time", 1) + configuredTime = self._getConfiguredTime(pwsTXXStats.endTime) + + probEndHour = self._calculateHourOffset(configuredTime) + + self._textProduct.debug_print("using rounded average betweeen wind end hour and configured pwsTXXStats end hour", 1) + windowInfo.endHour = int(round(self._textProduct.average(windTimeInfo.endHour, probEndHour))) + self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) + + return windowInfo + + def _createWindowText(self, windowInfo): + windowInfo.windowText = "Window for " + windowInfo.eventType + " force winds: " + self._textProduct.debug_print("In _createWindowText", 1) + self._textProduct.debug_print("window stats:", 1) + self._textProduct.debug_print("onsetHour = %s" % (windowInfo.onsetHour), 1) + self._textProduct.debug_print("endHour = %s" % (windowInfo.endHour), 1) + self._textProduct.debug_print("endTimeResolution = %s" % (windowInfo.endTimeResolution), 1) + self._textProduct.debug_print("shouldCreateWindowText = %s" % (windowInfo.shouldCreateWindowText), 1) + + if windowInfo.onsetHour is None or not windowInfo.shouldCreateWindowText: + # We do not want a statement of a non-existent window + windowInfo.windowText = None + else: + startTime = AbsTime(self._textProduct._issueTime_secs + windowInfo.onsetHour*60*60) + if windowInfo.endHour is not None: + endTime = AbsTime(self._textProduct._issueTime_secs + windowInfo.endHour*60*60) + windowPeriod = self._textProduct.makeTimeRange(startTime, endTime) + else: + windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1) + self._textProduct.debug_print("window period = %s" % (windowPeriod), 1) + + startTimeDescriptor = "" + if windowInfo.onsetHour >= 18: + startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 6) + elif 6 <= windowInfo.onsetHour < 18: + startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, resolution = 3) + + if len(startTimeDescriptor) == 0 and windowInfo.endHour is None: + windowInfo.windowText = None + elif len(startTimeDescriptor) != 0 and windowInfo.endHour > 114: + windowInfo.windowText += "Begins " + startTimeDescriptor + else: + connector = "through " + endTimeDescriptor = "the next few hours" + + if windowInfo.endHour is not None: + if windowInfo.endTimeResolution is None: + if windowInfo.endHour >= 18: + windowInfo.endTimeResolution = 6 + elif 6 <= windowInfo.endHour < 18: + windowInfo.endTimeResolution = 3 + + if windowInfo.endTimeResolution is not None: + endTimeDescriptor = \ + self._textProduct._formatPeriod(windowPeriod, + useEndTime = True, + resolution = windowInfo.endTimeResolution) + + # If we are not talking about the next few hours + if endTimeDescriptor != "the next few hours": + connector = "until " + + if len(startTimeDescriptor) != 0: + connector = " " + connector + windowInfo.windowText += startTimeDescriptor + connector + endTimeDescriptor + + return windowInfo + + def _getConfiguredTime(self, originalTime): + self._textProduct.debug_print("original time = %s" % + (self._textProduct._pp.pformat(originalTime)), 1) + + unixTime = originalTime.unixTime() + localTime = time.localtime(unixTime) + self._textProduct.debug_print("original time in local time is %s" % + (self._textProduct._pp.pformat(localTime)), 1) + utcHourOffset = self._calculateUTCandLocalHourOffset() + self._textProduct.debug_print("utcHourOffset = %s" % (utcHourOffset), 1) + + # Remember these times are in local time zone, so hour 0 is + # midnight of the current calendar day. + if localTime.tm_hour > 6 and localTime.tm_hour <= 18: + # It's daytime, so use the end of the daytime period (18 = 6PM). + # NIGHT returns the start of the nighttime period which is the + # end of the daytime period. + configuredTime = absTimeYMD(originalTime.year, + originalTime.month, + originalTime.day, + self._textProduct.NIGHT()) + else: + # It's nighttime, so use the end of the nighttime period (6 = 6AM). + # DAY returns the start of the daytime period which is the end of + # the nighttime period. + configuredTime = absTimeYMD(originalTime.year, + originalTime.month, + originalTime.day, + self._textProduct.DAY()) + self._textProduct.debug_print("configuredTime (local time) = %s" % + (self._textProduct._pp.pformat(configuredTime)), 1) + + # The configured time is local time so we need to add an offset to make the entire date UTC + configuredUnixTime = configuredTime.unixTime() + (utcHourOffset * 3600) + configuredTime = AbsTime(configuredUnixTime) + self._textProduct.debug_print("configuredTime (UTC time) = %s" % + (self._textProduct._pp.pformat(configuredTime)), 1) + + return configuredTime + + def _calculateUTCandLocalHourOffset(self): + if time.daylight: + # This is daylight savings time so it needs to be handled differently + return int(time.altzone // 3600) + else: + utc = time.gmtime() + local = time.localtime() + + diffInSeconds = time.mktime(utc) - time.mktime(local) + return int(diffInSeconds // 3600) + + def _isValidDayTime(self, trStartHour, utcDay, utcNight): + + # Handle case where "night" starts at an "earlier" UTC hour than "day" + # (e.g. DAY = 18Z and NIGHT = 06Z) + if (utcNight < utcDay) and \ + (trStartHour >= utcDay or trStartHour < utcNight): + + # If we are toward the end of the daytime, and more than 1 hour + # from its end + if (trStartHour < utcNight) and (utcNight - trStartHour) > 1: + return True + elif trStartHour >= utcDay: + return True + + # Handle "normal" case where "day" starts before "night" in UTC + elif trStartHour >= utcDay and trStartHour < utcNight and \ + (utcNight - trStartHour) > 1: + return True + + # If we made it this far, this is not a valid "day" hour + return False + + +class StormSurgeSectionStats(SectionCommonStats): + def __init__(self, textProduct, segment, intersectStatList, timeRangeList): + SectionCommonStats.__init__(self, textProduct, segment) + self._inundationMax = None + self._onsetSurgeHour = None + self._endSurgeHour = None + self._windowSurge = None + + # Only gather stats if we are populating the surge section + if self._textProduct._PopulateSurge: + self._setStats(intersectStatList, timeRangeList) + + def _setStats(self, statList, timeRangeList): + windows = [] + phishStartTime = None + phishEndTime = None + + # If this is an inland area, just move on + if statList == "InlandArea": + return + + self._textProduct.debug_print("*"*100, 1) + self._textProduct.debug_print("Setting Surge Section stats for %s" % self._segment, 1) + + statDict = statList[0] + self._textProduct.debug_print("StatDict %s" % statDict, 1) + + self._inundationMax = self._textProduct._getStatValue(statDict, "InundationMax", "Max") + self._textProduct.debug_print("Raw self._inundationMax = %s" % (repr(self._inundationMax)), 1) + + if self._inundationMax is not None: + self._inundationMax = round(self._inundationMax) + self._textProduct.debug_print("self._inundationMax = %s" % (self._inundationMax), 1) + + self._textProduct.debug_print("length of statList = %s" % (len(statList)), 1) + for period in range(len(statList)): + tr, _ = timeRangeList[period] + statDict = statList[period] + self._textProduct.debug_print("-"*50, 1) + self._textProduct.debug_print("tr = %s" % (self._textProduct._pp.pformat(tr)), 1) + self._textProduct.debug_print("statDict = %s" % (self._textProduct._pp.pformat(statDict)), 1) + + + curPhish = self._textProduct._getStatValue(statDict, "InundationTiming", "Max") + self._textProduct.debug_print("curPhish = '%s'" % (str(curPhish)), 1) + self._textProduct.debug_print("phishStartTime = %s phishEndTime = %s" % + (str(phishStartTime), str(phishEndTime)), 1) + + if (curPhish is None) or (curPhish == 'None'): + self._textProduct.debug_print("Done: Reached end of grids (curPhish was None)", 1) + break + + + # For start time: + # If inundationMax > 3: + # Looking for 2 consecutive grids with a surge height > 1 + # Start will be the start time of the FIRST of the 2 consecutive grids + # If 1 < inundationMax <= 3: + # Looking for 1 grid with a surge height > 1 + # Start will be the start time of this grid + # + # For end time: + # Looking for 2 consecutive grids with a surge height <= 1 + # End will be the start time of the FIRST of the 2 consecutive grids + + # If we have another period after this one, we may need to look at the two + # consecutive periods for start and end time conditions + isLastPeriod = True + if period < len(statList) - 1: + isLastPeriod = False + nextTr, _ = timeRangeList[period+1] + nextStatDict = statList[period+1] + nextPhish = self._textProduct._getStatValue(nextStatDict, "InundationTiming", "Max") + + self._textProduct.debug_print("nextTr = %s" % (self._textProduct._pp.pformat(nextTr)), 1) + self._textProduct.debug_print("nextStatDict = %s" % (self._textProduct._pp.pformat(nextStatDict)), 1) + self._textProduct.debug_print("nextPhish = '%s'" % (str(nextPhish)), 1) + + # Set what the condition is for determining the start time + if (self._inundationMax > 3) and (not isLastPeriod): + startCondition = (curPhish > 1) and (nextPhish > 1) + self._textProduct.debug_print("startCondition looking at 2 periods", 1) + elif 1 < self._inundationMax <= 3: + startCondition = curPhish > 1 + self._textProduct.debug_print("startCondition looking at 1 period", 1) + else: + startCondition = False + self._textProduct.debug_print("no startCondition, done", 1) + break + + # Set what the condition is for determining the end time + if not isLastPeriod: + endCondition = (curPhish <= 1) and (nextPhish <= 1) + self._textProduct.debug_print("endCondition looking at 2 periods", 1) + else: + endCondition = False + self._textProduct.debug_print("this is the last period, no endCondition possible", 1) + + if startCondition and (phishStartTime is None): + phishStartTime = tr.startTime() + elif endCondition and (phishStartTime is not None) and (phishEndTime is None): + phishEndTime = tr.startTime() + + # We found a new window, save it, reset and look for any additional windows + self._textProduct.debug_print("Found a new window:", 1) + self._textProduct.debug_print("window phishStartTime = %s window phishEndTime = %s" % + (str(phishStartTime), str(phishEndTime)), 1) + + windows.append((phishStartTime, phishEndTime)) + phishStartTime = None + phishEndTime = None + + self._textProduct.debug_print("Looking for additional windows", 1) + + self._textProduct.debug_print("new phishStartTime = %s new phishEndTime = %s" % + (str(phishStartTime), str(phishEndTime)), 1) + + # Check for the case where a window doesn't end + if (phishStartTime is not None) and (phishEndTime is None): + self._textProduct.debug_print("Found a never-ending window:", 1) + self._textProduct.debug_print("window phishStartTime = %s window phishEndTime = %s" % + (str(phishStartTime), str(phishEndTime)), 1) + windows.append((phishStartTime, None)) + + # Create the final window + if len(windows) == 0: + phishStartTime = None + phishEndTime = None + else: + phishStartTime = windows[0][0] # Start time of first window + phishEndTime = windows[-1][1] # End time of last window + + self._textProduct.debug_print("Constructed the final window:", 1) + self._textProduct.debug_print("final phishStartTime = %s final phishEndTime = %s" % + (str(phishStartTime), str(phishEndTime)), 1) + + self._windowSurge = "Window of concern: " + + if phishStartTime is None: + if self._inundationMax is None or self._inundationMax <= 1: + self._windowSurge += "None" + else: + self._windowSurge += "Around high tide" + else: + self._onsetSurgeHour = self._calculateHourOffset(phishStartTime) + startTime = AbsTime(self._textProduct._issueTime_secs + self._onsetSurgeHour*60*60) + + self._textProduct.debug_print("surge startTime = %s self._onsetSurgeHour = %s " % + (self._textProduct._pp.pformat(startTime), self._onsetSurgeHour), 1) + if phishEndTime is not None: + self._endSurgeHour = self._calculateHourOffset(phishEndTime) + endTime = AbsTime(self._textProduct._issueTime_secs + self._endSurgeHour*60*60) + windowPeriod = self._textProduct.makeTimeRange(startTime, endTime) + else: + windowPeriod = self._textProduct.makeTimeRange(startTime, startTime + 1) + self._textProduct.debug_print("surge window period = %s" % (windowPeriod), 1) + + startTimeDescriptor = self._textProduct._formatPeriod(windowPeriod) + + if phishEndTime is None: + self._windowSurge += "Begins " + startTimeDescriptor + else: + endTimeDescriptor = self._textProduct._formatPeriod(windowPeriod, useEndTime = True) + + if self._onsetSurgeHour > 12: + self._windowSurge += startTimeDescriptor +\ + " until " +\ + endTimeDescriptor + else: + self._windowSurge += "through " + endTimeDescriptor + + if self._inundationMax is not None: + # inundationMax is already rounded but should be stored as an int and not a float + self._currentAdvisory["StormSurgeForecast"] = int(self._inundationMax) + + self._textProduct.debug_print("+"*60, 1) + self._textProduct.debug_print("Done in StormSurgeSectionStats._setStats:", 1) + self._textProduct.debug_print("self._inundationMax = '%s'" % + (self._inundationMax), 1) + self._textProduct.debug_print("self._onsetSurgeHour = '%s'" % + (self._onsetSurgeHour), 1) + self._textProduct.debug_print("self._endSurgeHour = '%s'" % + (self._endSurgeHour), 1) + self._textProduct.debug_print("self._windowSurge = '%s'" % + (self._windowSurge), 1) + self._textProduct.debug_print("self._maxThreat = '%s'" % + (self._maxThreat), 1) + self._textProduct.debug_print("+"*60, 1) + + +class FloodingRainSectionStats(SectionCommonStats): + def __init__(self, textProduct, segment, statList, timeRangeList, + extraRainfallStatList, previousRainfallTRlist): + SectionCommonStats.__init__(self, textProduct, segment) + self._sumAccum = None + self._prevAccum = 0.00 + + self._setStats(statList, timeRangeList, extraRainfallStatList, + previousRainfallTRlist) + + def _setStats(self, statList, timeRangeList, extraRainfallStatList, + previousRainfallTRlist): + for period in range(len(statList)): + tr, _ = timeRangeList[period] + statDict = statList[period] + + value = self._textProduct._getStatValue(statDict, "QPF") + + if value is not None: + if self._sumAccum is None: + self._sumAccum = value + else: + self._sumAccum += value + + self._updateThreatStats(tr, statDict, "FloodingRainThreat") + + self._currentAdvisory["FloodingRainThreat"] = self._maxThreat + if self._sumAccum is not None: + # Round so that we don't end up with stats like 4.03143835067749 + self._currentAdvisory["FloodingRainForecast"] = \ + self._textProduct.round(self._sumAccum, "Nearest", 0.5) + + # Now compute the previous rainfall + for period in range(len(extraRainfallStatList)): + tr, _ = timeRangeList[period] + prevStatDict = extraRainfallStatList[period] + + prevStats = self._textProduct._getStatValue(prevStatDict, "QPF") + self._textProduct.debug_print("prevStats = %s" % (prevStats), 1) + if prevStats is not None: + + if self._prevAccum is not None: + self._prevAccum += prevStats + else: + self._prevAccum = prevStats + else: + self._prevAccum = 0.00 + + if self._prevAccum is not None and self._prevAccum >= 0.10: + # Round so that we don't end up with stats like 4.03143835067749 + self._currentAdvisory["PreviousRainfall"] = \ + self._textProduct.round(self._prevAccum, "Nearest", 0.1) + else: + # Otherwise, do not consider this sgnificant rainfall + self._currentAdvisory["PreviousRainfall"] = 0.00 + + self._textProduct.debug_print("+"*60, 1) + self._textProduct.debug_print("In FloodingRainSectionStats._setStats", 1) + self._textProduct.debug_print("self._sumAccum = '%s'" % (self._sumAccum), 1) + self._textProduct.debug_print("self._maxThreat = '%s'" % (self._maxThreat), 1) + + +class TornadoSectionStats(SectionCommonStats): + def __init__(self, textProduct, segment, statList, timeRangeList): + SectionCommonStats.__init__(self, textProduct, segment) + + self._setStats(statList, timeRangeList) + + def _setStats(self, statList, timeRangeList): + for period in range(len(statList)): + tr, _ = timeRangeList[period] + statDict = statList[period] + + self._updateThreatStats(tr, statDict, "TornadoThreat") + + self._currentAdvisory["TornadoThreat"] = self._maxThreat + + self._textProduct.debug_print("+"*60, 1) + self._textProduct.debug_print("In TornadoSectionStats._setStats", 1) + self._textProduct.debug_print("self._maxThreat = '%s'" % (self._maxThreat), 1) + + +from xml.etree.ElementTree import Element, SubElement, tostring, dump +import xml.dom.minidom as minidom +import re +class XMLFormatter(): + def __init__(self, textProduct): + self._textProduct = textProduct + + def execute(self, productDict): + xml = Element('product') + self.dictionary(xml, productDict) + self._textProduct.debug_print("XML = %s" % (xml), 1) + self._textProduct.debug_print("XML dump = %s", dump(xml), 1) + prettyXML = minidom.parseString(tostring(xml)) + return prettyXML.toprettyxml() #tostring(xml) + + def xmlKeys(self): + return [ + 'wmoHeader', + 'TTAAii', + 'originatingOffice', + 'productID', + 'siteID', + 'fullStationID', + 'ddhhmmTime', + 'easMessage', + 'productHeader', + 'disclaimer', + 'cityState', + 'stormNumber', + 'productName', + 'stormName', + 'advisoryType', + 'advisoryNumber', + 'issuedByString', + 'issuanceTimeDate', + + 'segments', + 'ugcHeader', + 'vtecRecords', + 'areaList', + 'issuanceTimeDate', + 'summaryHeadlines', + 'headlinesInEffect', + 'headlineDefinitions', + 'locationsAffected', + 'fcstConfidence', + #section keys will be inserted here (see sectionKeys) + 'infoSection', + + 'endProduct', + ] + + def sectionKeys(self): + return [ + 'windSection', + 'sectionHeader', + 'forecastSubsection', + 'latestForecastSummary', + 'peakWind', + 'windowTS', + 'windowHU', + 'threatSubsection', + 'lifePropertyThreatSummary', + 'threatTrend', + 'threatStatements', + 'impactsSubsection', + 'potentialImpactsSummary', + 'potentialImpactsStatements', + + 'stormSurgeSection', + 'sectionHeader', + 'forecastSubsection', + 'latestForecastSummary', + 'peakSurge', + 'surgeWindow', + 'threatSubsection', + 'lifePropertyThreatSummary', + 'threatTrend', + 'threatStatements', + 'impactsSubsection', + 'potentialImpactsSummary', + 'potentialImpactsStatements', + + 'floodingRainSection', + 'sectionHeader', + 'forecastSubsection', + 'latestForecastSummary', + 'peakRain', + 'threatSubsection', + 'lifePropertyThreatSummary', + 'threatTrend', + 'threatStatements', + 'impactsSubsection', + 'potentialImpactsSummary', + 'potentialImpactsStatements', + + 'tornadoSection', + 'sectionHeader', + 'forecastSubsection', + 'latestForecastSummary', + 'tornadoSituation', + 'threatSubsection', + 'lifePropertyThreatSummary', + 'threatStatements', + 'impactsSubsection', + 'potentialImpactsSummary', + 'potentialImpactsStatements', + ] + + def getSectionKey(self, key): + sectionKey = re.sub("\['......'\]", "", key) + + if "._" in sectionKey: + sectionKey = re.sub(".*\._", "", sectionKey) + + self._textProduct.debug_print("sectionKey = %s" % (sectionKey), 1) + return sectionKey + + def dictionary(self, xml, productDict): + ''' + Returns the dictionary in XML format. + @param productDict: dictionary values + @return: Returns the dictionary in XML format. + ''' + if productDict is not None: + for key in productDict: + value = productDict[key] + editable = False +# if isinstance(key, KeyInfo): +# editable = key.isEditable() +# key = key.getName() + + if key not in self.xmlKeys(): + sectionKey = self.getSectionKey(key) + if sectionKey not in self.sectionKeys(): + self._textProduct.debug_print("skipping '%s' in XML" % (key), 1) + continue + else: + key = sectionKey + if isinstance(value, dict): + subElement = SubElement(xml,key) + self.dictionary(subElement, value) + elif isinstance(value, list): + if key == 'cityList': + subElement = SubElement(xml,'cityList') + if editable: + subElement.attrib['editable'] = 'true' + self.list(subElement, 'city', value) +# elif key == 'infoSection': +# subElement = SubElement(xml, key) +# legacyFormatter = LegacyFormatter(self._textProduct) +# legacyText = legacyFormatter.processInfoSection(value) +# legacyText = legacyText.encode('string-escape') +# subElement.text = legacyText +# if editable: +# subElement.attrib['editable'] = 'true' + else: + self.list(xml, key, value) + else: + subElement = SubElement(xml,key) + subElement.text = value + if editable: + subElement.attrib['editable'] = 'true' + + def list(self, xml, key, data): + ''' + Returns the list in XML format. + @param data: list of values + @return: Returns the list in XML format. + ''' + editable = False +# if isinstance(key, KeyInfo): +# editable = key.isEditable() +# key = key.getName() + if data is not None: + if 'info' in key and 'Section' in key: + subElement = SubElement(xml, key) + self._textProduct.debug_print("info key = '%s'" % (key), 1) + self._textProduct.debug_print("value = %s" % (data), 1) + if isinstance(data, list): + subkey = 'info' + 'Sub' + key[4:] + for value in data: + self.list(subElement, subkey, value) + else: + subElement.text = data + else: + for value in data: + + subElement = SubElement(xml, key) + if editable: + subElement.attrib['editable'] = 'true' + + if isinstance(value, dict): + self.dictionary(subElement, value) + elif isinstance(value, list): + if key == 'cityList': + subElement = SubElement(xml,'cityList') + if editable: + subElement.attrib['editable'] = 'true' + self.list(subElement, 'city', value) + else: + self.list(xml, key, value) + else: + subElement.text = value + + +class LegacyFormatter(): + def __init__(self, textProduct): + self._textProduct = textProduct + self.TAB = " "*self._textProduct._tabLength + self._tpc = HLSTCV_Common.TextProductCommon() + + def execute(self, productDict): + self.productDict = productDict + productParts = self._tpc.getVal(productDict, 'productParts', []) + text = self._processProductParts(productDict, productParts.get('partsList')) + return text + + def _processProductParts(self, productDict, productParts, skipParts=[]): + ''' + Adds the product parts to the product + @param productDict -- dictionary of information -- could be the product dictionary or a sub-part such as a segment + @param skipParts -- necessary to avoid repetition when calling this method recursively + @param productParts -- list of instances of the ProductPart class with information about how to format each product part + @return text -- product string + ''' + text = '' + self._textProduct.debug_print("productParts = %s" % (self._textProduct._pp.pformat(productParts)), 1) + for part in productParts: + valtype = type(part) + if valtype is str: + name = part + elif valtype is tuple: + name = part[0] + infoDicts = part[1] + newtext = self.processSubParts(productDict.get(name), infoDicts) + text += newtext + continue + elif valtype is list: + self._tpc.flush() + # TODO THIS SHOULD BE REMOVED AFTER THE REFACTOR OF HazardServicesProductGenerationHandler.JAVA + tup = (part[0], part[1]) + part = tup + name = part[0] + + + if name == 'wmoHeader': + text += self.processWmoHeader(productDict['wmoHeader']) + '\n' + elif name == 'easMessage': + text += productDict['easMessage'] + '\n' + elif name == 'productHeader': + text += self.processProductHeader(productDict['productHeader']) + elif name == 'vtecRecords': + for vtecString in productDict['vtecRecords']: + text += vtecString + '\n' + elif name == 'areaList': + text += self._textProduct.indentText(productDict['areaList'], '', '', + maxWidth=self._textProduct._lineLength) + elif name == 'issuanceTimeDate': + text += productDict['issuanceTimeDate'] + '\n\n' + elif name == 'summaryHeadlines': + text += self.processSummaryHeadlines(productDict['summaryHeadlines']) + elif name == 'locationsAffected': + text += self.processLocationsAffected(productDict['locationsAffected']) + elif 'sectionHeader' in name: + text += "* " + productDict[name].upper() + "\n" + elif 'Subsection' in name: + text += self.processSubsection(productDict[name]) + elif name == 'infoSection': + text += self.processInfoSection(productDict['infoSection']) + elif name in ['endProduct', 'endSection']: + text += '$$\n' + elif name == 'CR': + text += '\n' + elif name == 'doubleAmpersand': + text += '&&\n' + elif name not in self._noOpParts(): + textStr = productDict.get(name) + if textStr: + text += textStr + '\n' + + # Cleanup the case of the last segment which will wind up with two sets + # of '$$' + text = re.sub("\$\$\n+\$\$", "$$\n", text) + + # Return completed text + return text + + def _noOpParts(self): + ''' + These represent product parts that should be skipped when calling product part methods. + They will be handled automatically by the formatters. + ''' + return ["setup_segment"] #['CR', 'endProduct', 'endSegment', 'issuanceDateTime', 'doubleAmpersand'] + + def processWmoHeader(self, wmoHeader): + text = wmoHeader['TTAAii'] + ' ' + wmoHeader['fullStationID'] + ' ' + wmoHeader['ddhhmmTime'] + '\n' + text += wmoHeader['productID'] + wmoHeader['siteID'] + '\n' + return text + + def processProductHeader(self, headerDict): + text = headerDict['stormName'] + ' ' + headerDict['productName'] + + advisoryText = '' + if headerDict['advisoryType'] is not None and \ + headerDict['advisoryType'].lower() in ["intermediate", "special"]: + advisoryText = headerDict['advisoryType'] + ' ' + + if headerDict['advisoryNumber'] is not None: + advisoryText += 'Advisory Number ' + headerDict['advisoryNumber'] + + if len(advisoryText) > 0: + if len(text + "/" + advisoryText) > self._textProduct._lineLength: + text += '\n' + else: + text += '/' + + text += advisoryText + '\n' + else: + text += '\n' + + text += "National Weather Service " + headerDict['cityState'] + " " + headerDict['stormNumber'] + '\n' + text += headerDict['issuanceTimeDate'] + '\n\n' + + return text + + def processLocationsAffected(self, locationsAffectedList): + if len(locationsAffectedList) == 0: + return "" + + text = "* LOCATIONS AFFECTED\n" + for location in locationsAffectedList: + text += self.TAB + "- " + location + "\n" + return text + "\n" + + def processSubsection(self, subsectionOrderedDict): + text = "" + for partName in subsectionOrderedDict: + if "Summary" in partName: + firstIndentText = self.TAB + "- " + nextIndentText = self.TAB + " " + text += self._textProduct.indentText(subsectionOrderedDict[partName], + firstIndentText, + nextIndentText, + maxWidth = self._textProduct._lineLength) + else: + firstIndentText = self.TAB*2 + "- " + nextIndentText = self.TAB*2 + " " + if "threatStatements" in partName: + text += self.processThreatStatements(firstIndentText, + nextIndentText, + subsectionOrderedDict[partName]) + elif "potentialImpactsStatements" in partName: + text += self.processImpactsStatements(firstIndentText, + nextIndentText, + subsectionOrderedDict[partName]) + else: + text += self._textProduct.indentText(subsectionOrderedDict[partName], + firstIndentText, + nextIndentText, + maxWidth=self._textProduct._lineLength) + + return text + "\n" + + def processThreatStatements(self, firstIndentText, nextIndentText, threatStatements): + planning = threatStatements[0] + text = self._textProduct.indentText(planning, + firstIndentText, + nextIndentText, + maxWidth=self._textProduct._lineLength) + + preparation = threatStatements[1] + text += self._textProduct.indentText(preparation, + firstIndentText, + nextIndentText, + maxWidth=self._textProduct._lineLength) + + action = threatStatements[2] + text += self._textProduct.indentText(action, + firstIndentText, + nextIndentText, + maxWidth=self._textProduct._lineLength) + + return text + + def processImpactsStatements(self, firstIndentText, nextIndentText, statements): + text = "" + + for statement in statements: + text += self._textProduct.indentText(statement, + firstIndentText, + nextIndentText, + maxWidth=self._textProduct._lineLength) + + return text + + def processInfoSection(self, infoSection): + if len(infoSection) == 0: + return "" + + text = "* FOR MORE INFORMATION:\n" + text += self._buildInfoSection(infoSection, tabLevel=1) + return text + "\n$$\n\n" + + def _buildInfoSection(self, infoSection, tabLevel): + text = "" + for component in infoSection: + if type(component) is str: + text += self.TAB*tabLevel + "- " + component + "\n" + elif type(component) is list: + text += self._buildInfoSection(component, tabLevel+1) + return text + + def processSummaryHeadlines(self, summaryDict): + text = "" + for headline in summaryDict['headlinesInEffect']: + text += headline.upper() + "\n" + + text += "\n" + + for definition in summaryDict['headlineDefinitions']: + text += self._textProduct.indentText(definition, + maxWidth=self._textProduct._lineLength) \ + + "\n" + return text + + def processSubParts(self, subParts, infoDicts): + """ + Generates Legacy text from a list of subParts e.g. segments or sections + @param subParts: a list of dictionaries for each subPart + @param partsLists: a list of Product Parts for each segment + @return: Returns the legacy text of the subParts + """ + text = '' + for i in range(len(subParts)): + newtext = self._processProductParts(subParts[i], infoDicts[i].get('partsList')) + text += newtext + return text + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCVNHC_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCVNHC_MultiPil.py index 645cdcef94..4ab2ac226e 100755 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCVNHC_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_TCVNHC_MultiPil.py @@ -1,630 +1,630 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -######################################################################## -# Hazard_TCVNHC.py -#------------------------------------------------------------------------- -# Description: This product will generate a Tropical Cyclone -# Watch/Warning (TCV) product for areas covered by the U.S. National -# Weather Service. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# Hazard_TCV, Hazard_TCV_Definition -#------------------------------------------------------------------------- -# Version: 2015.4.7 (04/07/2015) -#------------------------------------------------------------------------- -# Weather Elements Needed: Hazards -#------------------------------------------------------------------------- -# Edit Areas Needed: One sample area for each segment between -# official NHC breakpoints -#------------------------------------------------------------------------- -# Programmers and Support including product team leader's email: -# Matthew H. Belk NOAA/NWS Taunton, MA Matthew.Belk@noaa.gov -#------------------------------------------------------------------------- -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------- -------- --------- -------------------------------------------- -# Apr 07, 2015 mbelk Initial creation -# Sep 19, 2016 19293 randerso Initial baseline check in -# Oct 31, 2016 25946 randerso Changed to keep Hazard_TCVNHC from -# overwriting Hazard_TCV -# Nov 11, 2016 19293 randerso Fix issuing office line -# Jun 26, 2017 6325 randerso Remove Breakpoints from product header -# -######################################################################## - - -import string, time, re, os, types, copy, sets, pprint - -import GenericHazards -import LogStream -import TropicalHazards - - -#=============================================================================== -# Define a series of commands to get data for all available active storms -##class TextProduct(GenericHazards.TextProduct): -class TextProduct(TropicalHazards.TropicalHazards, GenericHazards.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition['displayName'] = "Hazard_" - Definition["outputFile"] = "{prddir}/TEXT/.txt" - Definition["database"] = "Official" - Definition["mapNameForCombinations"] = "Zones_" - Definition["defaultEditAreas"] = "Combinations__" - Definition["showZoneCombiner"] = 0 - - # Header configuration items - Definition["productName"] = "PROTOTYPE TCV" # Warning! DO NOT CHANGE. - # The productName gets substituted later in the formatter! - - Definition["fullStationID" ] = "" - Definition["wmoID" ] = "" - Definition["wfoCityState" ] = "" - Definition["pil" ] = "" - Definition["textdbPil" ] = "" - Definition["awipsWANPil" ] = "" - Definition["site"] = "" - Definition["wfoCity"] = "" - - # OPTIONAL CONFIGURATION ITEMS - - # Source database. "Official", "Fcst", or "ISC" - #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished - #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines - - Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime - Definition["includeCities"] = 0 # Cities included in area header - Definition["accurateCities"] = 0 # If 1, cities are based on grids; - # otherwise full list is included - Definition["cityLocation"] = "" # City lat/lon dictionary to use - #Definition["cityDescriptor"] = "" - Definition["includeZoneNames"] = 0 # Zone names will be included in the area header - Definition["lineLength"] = 68 # line length - Definition["easPhrase"] = "" - Definition["includeOverviewHeadline"] = 0 #If 1, the overview header is templated - Definition["includeOverview"] = 0 #If 1, the overview section is template - Definition["bulletProd"] = 0 #If 1, the product will have a bullet format - - ### Survey text to insert below the last $$ of the product - Definition["urlText"] = "" - ### - -# Definition["areaDictionary"] = "TropicalAreaDictionary" - - - Definition["hazardSamplingThreshold"] = (3, None) #(%cov, #points) - - - Definition["debug"] = {"generateForecast": 0, - "_preProcessProduct":0, - "_makeProduct":0, - "_postProcessProduct":0, - "_formatTCVline":0, -# "":0, - } - - - #=========================================================================== - # Define a variableList to get answers to questions we need - -# VariableList = [(("Storm Name","stormName"),"","radio", stormNames)] - - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - TropicalHazards.TropicalHazards.__init__(self) - - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the sampled hazards - self._sampledHazards = argDict["hazards"] - - print "="*70 - print "raw analyzed hazards" - print pprint.pformat(self._sampledHazards.rawAnalyzedTable()) - print "="*70 - - # Get the combinations where there are any tropical hazards - segmentList = self.organizeHazards(self._sampledHazards.rawAnalyzedTable()) - - # If there is nothing to do - leave now - if len(segmentList) == 0: - return "NO COASTAL TROPICAL WATCHES OR WARNINGS TO REPORT" - - #======================================================================= - # Determine time ranges - error = self._determineTimeRanges(argDict) - - if error is not None: - return error - - #======================================================================= - # Now organize all hazards by phenomena and significance - - rawHazards = self._sampledHazards.rawAnalyzedTable() - - hazardPhenSig = \ - self._sampledHazards._HazardsTable__organizeByPhenSig(rawHazards) - - #======================================================================= - # Initialize the output string - - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - #======================================================================= - # Make the shortcut hazard dictionary along with its sorted keys - - (self._hazardAreaDict, self._hazardAreaDictKeyList) = \ - self._constructHazardDict(hazardPhenSig, self._filterETN) - - - #======================================================================= - # Make a list of segments we need to process - - segmentList = self._constructSegmentList(self._hazardAreaDict, - self._hazardAreaDictKeyList) - - # Generate the product for each segment in the segmentList - fraction = 0 - if len(segmentList) > 0: - fractionOne = 1.0/float(len(segmentList)) - percent = 50.0 - self.setProgressPercentage(50) - for segmentAreas in segmentList: - - # Separate out the various edit area types - (segments, ugcZones, zones, islands, water) = segmentAreas - - # If this is the international TCV - if "Intl" in self._displayName: - - # Do not permit any UGC zones - ugcZones = [] - - self.debug_print("\n" + "*"*90, 1) - self.debug_print("In generateForecast\n", 1) - self.debug_print("segments:\n %s" % (pprint.pformat(segments)), 1) - self.debug_print("ugcZones:\n %s" % (pprint.pformat(ugcZones)), 1) - self.debug_print("zones:\n %s" % (pprint.pformat(zones)), 1) - self.debug_print("islands:\n %s" % (pprint.pformat(islands)), 1) - self.debug_print("water:\n %s" % (pprint.pformat(water)), 1) - - self.progressMessage(fraction, percent, "Making Product for Segment") - - #------------------------------------------------------------------- - # Decide which set of zones we want to process for header - - if len(ugcZones) > 0: - activeZones = ugcZones - else: - activeZones = zones - - self.debug_print("+"*90, 1) - self.debug_print("header zones =\n%s" % - (pprint.pformat(activeZones)), 1) - - # If there are no zones to process - move on to the next segment - if len(activeZones) == 0: - print "No active zones - moving on" - continue - - # Format UGC header using the UGC zones - fcst = self._preProcessArea(fcst, activeZones, self._expireTime, - argDict) - - #------------------------------------------------------------------- - # Decide which set of areas we want to process for body - - # CHANGE to get zones, not breakpoint segments - if len(ugcZones) > 0: - activeAreas = ugcZones - else: - activeAreas = zones - - self.debug_print("body zones =\n%s" % (activeAreas), 1) - - # Make the product body using the segment areas - fcst = self._makeProduct(fcst, activeAreas, argDict) - - # Finish off the product block using the zones - fcst = self._postProcessArea(fcst, activeAreas, argDict) - - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - - # - # Overridden to produce product header using info for specific storms - # - - def _preProcessProduct(self, fcst, argDict): - - # Product header - if self._areaName != "": - self._areaName = " FOR " + self._areaName - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, - self._productName + self._areaName) - - # Get the storm info that goes with this PIL - pil = self._pil[-3:] - self.debug_print("pil '%s'" % (pil), 1) - - # Pull needed storm info from the stormDict based on selected stormName - stormDict = self._loadAdvisory(pil) - - # if we do not have the correct storm information - if stormDict is None: - - # There is no point continuing - fcst = "Could not find valid storm information for the " + \ - "TCV%s product. " % (pil) + \ - "Please run the StormInfo procedure for this storm, " + \ - "if needed, and try again." - return fcst - - # Pull needed storm info from the stormDict based on selected stormName - stormName = stormDict["stormName"] - stormType = stormDict["stormType"] - advType = stormDict["advisoryType"] - advNum = stormDict["advisoryNumber"] - - # Ensure the VTEC ETN is correct for a national center - stormNum = stormDict["stormNumber"] - self._filterETN = int(stormNum) - if self._filterETN < 1000: - self._filterETN += 1000 - - self.debug_print("*" * 80, 1) - self.debug_print("my ETN is: %s" % (self._filterETN), 1) - self.debug_print("*" * 80, 1) - - # Correct the WMO header based on selected PIL - pilNumber = pil[-1:] - wmoID = self._wmoID[:-1] + pilNumber - - # Modify the product name to include the advisory package info - productName = re.sub("(?i)PROTOTYPE TCV", - "%s Watch/Warning Routine Advisory Number %s" % (stormName, advNum), - productName) - - # Handle intermediate or special advisories - if advType in ["Special", "Intermediate"]: - - productName = re.sub("(?i)Routine", advType, productName) - - # Otherwise, remove the "routine" wording - else: - - productName = re.sub("(?i)Routine ", "", productName) - - - # Insert the EAS phrase - if it exists - if len(self._easPhrase) != 0: - eas = self._easPhrase + '\n' - else: - eas = '' - - # Format a storm code for this product - stormCode = "AL%02d%4d" % (int(stormNum), time.gmtime().tm_year) - - # Set aside a variable to track impacted WFOs - self._wfoList = [] - - # Construct the MND header text - s = wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + "TCV" + pil + "\n\n" - - fcst = fcst + s.upper() +\ - eas + productName + "\n" +\ - "NWS " + self._wfoCityState + \ - " %s\n" % (stormCode) + issuedByString + self._timeLabel + \ - "\n\n" - - fcst = fcst + ".%s %s\n\n" % (stormType, stormName) - - # Now add the disclaimer - fcst += """ -Caution...this product only approximately conveys the extent of -tropical cyclone wind and surge watches and warnings. Please see -the latest public advisory from the National Hurricane Center for -the precise lateral extent of wind watches and warnings along the -coast...as well as the approximate lateral extent of surge watches -and warnings. The precise extent of surge watches and warnings -can be found in the NWS National Digital Forecast Database Hazard -grids. - -""" - -#---|----1----|----2----|----3----|----4----|----5----|----6----|----7 - # Return the completed text we have so far - return fcst - - # - # Overridden to produce breakpoint end points within each segment - # - - def _makeProduct(self, fcst, segmentAreas, argDict): - - # Set the language - argDict["language"] = self._language - - # Make a list of any impacted WFOs from all breakpoint segments - self._getAffectedWFOs(segmentAreas) - - # Get ready to define the segment text for this group - startLine = "" - endLine = "" - - #=================================================================== - # Determine if we just want to list the end points of this segment, - # or all of the segments in this list - - self.debug_print("\nLast chance, segmentAreas =\n%s" % - (pprint.pformat(segmentAreas)), 1) - - # If this segment is something other than an island - if len(segmentAreas) > 1: - - self.debug_print("using endpoints", 1) - - # Start the display list with the first segment in this list - displayList = [segmentAreas[0]] - - # If the last segment in the list is a different segment - if segmentAreas[-1] not in displayList: - - # Add this segment as well - displayList.append(segmentAreas[-1]) - - # Otherwise, display all of the points - else: - displayList = segmentAreas[:] - - self.debug_print("\ndisplayList =\n%s" % - (pprint.pformat(displayList)), 1) - - - # Do not include non-UGC codes in mainland USA TCV - self.debug_print("displayName = '%s'" % (self._displayName), 1) - if self._displayName.find("Intl") == -1 and displayList != [] and \ - len(displayList[0]) >= 3 and displayList[0][3] != "Z": - - self.debug_print("Do not display non-Z codes in USA TCV", 1) - displayList = [] - - - #=================================================================== - # Look through each zone of this hazard group - - for id in displayList: - - index = displayList.index(id) - result = abs(len(displayList) - index) - - # Get the entry for this area - if self._tropicalAreaDict.has_key(id): - entry = self._tropicalAreaDict[id] - else: - entry = {} - LogStream.logProblem(\ - "AreaDictionary missing definition for [" + id + "].") - - #--------------------------------------------------------------- - # If we have not already constructed the info for the starting - # point of this segment, or this is an island point - - if len(startLine) == 0: # or result > 1: - - # Construct it now - startLine += self._formatTCVline(entry) - - #--------------------------------------------------------------- - # If we need the ending point info - - elif len(endLine) == 0: # or result == 1: - - # Construct it now - endLine = self._formatTCVline(entry, "end") - - # Prevent both lines from being the same thing - if startLine == endLine: - - # Reset the endline - we don't need it - endLine = "" - - return fcst - - - # - # Overridden to include affected WFO list - # - - def _postProcessProduct(self, fcst, argDict): - # - # If an overview exists for this product, insert it - # - overview = self.finalOverviewText() - overviewSearch = re.compile(r'DEFAULT OVERVIEW SECTION', re.DOTALL) - fcst = overviewSearch.sub(overview, fcst) - # - # Added to place line feeds in the CAP tags to keep separate from CTAs - fcst = fcst.replace(r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ - r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") - fcst = fcst.replace(".:", ".") - fcst = fcst.replace("\n ","\n") - fcst = fcst.replace("&&", "\n&&\n") - - # Add the impacted WFO list at the bottom of the TCV - wfoListText = "ATTN...WFO..." - - self._wfoList.sort() - - for wfo in self._wfoList: - wfoListText += "%s..." % (wfo) - - fcst += "\n" + self.endline(wfoListText) + "\n" - - - # Now handle the EAS urgency coding - urgent = 0 - followup = 1 - prodNameKey = '' - fullKeyList = [] - newList = ['NEW', 'EXA', 'EXB'] - - # Remove EAS line if not urgent - if urgent == 0 and len(self._easPhrase): - fcst = fcst.replace(self._easPhrase + '\n', '', 1) - - # Prevent empty Call to Action Tags - fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ - "", fcst) - - # Remove any empty framing code - fcst = re.sub("\|\*\s*\*\|", "", fcst) - - # Indent the bullet text - fcst = self._indentBulletText(fcst) - - # - # Clean up multiple line feeds - # - - fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) - fcst = fixMultiLF.sub(r'\1', fcst) - - # - # Clean up the JSON file for this storm - if it is all over - # - if self._allCAN: - # set allCAN flag in json file - advisoryName = self._pil[-3:] - advisoryDict = self._loadAdvisory(advisoryName) - advisoryDict['AllCAN'] = True - self._saveAdvisory(advisoryName, advisoryDict) - - # - # Finish Progress Meter - # - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - - # Add the url text from the configuration section - fcst = fcst + "\n" + self._urlText - - return fcst - - -################################################################################ -# New common utility methods for tropical hazard formatters -################################################################################ - - #=========================================================================== - # Define a method to format a breakpoint text line within a TCV segment - - def _formatTCVline(self, entry, type="start"): - """TropicalHazards addition of _formatTCVline. - - This method will produce a list of NWS WFOs impacted by tropical - hazards with the specified identifiers. - - Arguments: - entry -> TropicalAreaDictionary entry for an edit area - type -> type of breakpoint to produce (optional, defaults to start) - """ - - # If this is not an ending point - if type != "end": - - # Get the information we need - point = entry["startBreakpoint"].strip() - lat = entry["startLat"].strip() - lon = entry["startLon"].strip() - state = entry["startState"].strip() - - # Otherwise - get the end point info for this segment - else: - - # Get the information we need - point = entry["endBreakpoint"].strip() - lat = entry["endLat"].strip() - lon = entry["endLon"].strip() - state = entry["endState"].strip() - - - # Clean up the state so there are no spaces or dashes - state = re.sub("[ _-]+", "", state) - - #----------------------------------------------------------------------- - # If this is not the border of a state or country - - if re.search("(?i)border", point) is None: - - # Add the state/country - point += "-" + state - - #----------------------------------------------------------------------- - # Append the appropriate hemisphere of the latitude - - if lat.find("-") != -1: - lat += "S" - lat = lat.replace("-", "") - else: - lat += "N" - - #----------------------------------------------------------------------- - # Append the appropriate hemisphere of the longitude - - if lon.find("-") != -1: - lon += "W" - lon = lon.replace("-", "") - else: - lon += "E" - - #----------------------------------------------------------------------- - # Now construct the final formatted line - - text = "%-36s%6s%7s\n" % (re.sub("[ _]+", "-", point) + " ", lat, lon) - - # Return the text - return text - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +######################################################################## +# Hazard_TCVNHC.py +#------------------------------------------------------------------------- +# Description: This product will generate a Tropical Cyclone +# Watch/Warning (TCV) product for areas covered by the U.S. National +# Weather Service. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# Hazard_TCV, Hazard_TCV_Definition +#------------------------------------------------------------------------- +# Version: 2015.4.7 (04/07/2015) +#------------------------------------------------------------------------- +# Weather Elements Needed: Hazards +#------------------------------------------------------------------------- +# Edit Areas Needed: One sample area for each segment between +# official NHC breakpoints +#------------------------------------------------------------------------- +# Programmers and Support including product team leader's email: +# Matthew H. Belk NOAA/NWS Taunton, MA Matthew.Belk@noaa.gov +#------------------------------------------------------------------------- +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------- -------- --------- -------------------------------------------- +# Apr 07, 2015 mbelk Initial creation +# Sep 19, 2016 19293 randerso Initial baseline check in +# Oct 31, 2016 25946 randerso Changed to keep Hazard_TCVNHC from +# overwriting Hazard_TCV +# Nov 11, 2016 19293 randerso Fix issuing office line +# Jun 26, 2017 6325 randerso Remove Breakpoints from product header +# +######################################################################## + + +import string, time, re, os, types, copy, sets, pprint + +import GenericHazards +import LogStream +import TropicalHazards + + +#=============================================================================== +# Define a series of commands to get data for all available active storms +##class TextProduct(GenericHazards.TextProduct): +class TextProduct(TropicalHazards.TropicalHazards, GenericHazards.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition['displayName'] = "Hazard_" + Definition["outputFile"] = "{prddir}/TEXT/.txt" + Definition["database"] = "Official" + Definition["mapNameForCombinations"] = "Zones_" + Definition["defaultEditAreas"] = "Combinations__" + Definition["showZoneCombiner"] = 0 + + # Header configuration items + Definition["productName"] = "PROTOTYPE TCV" # Warning! DO NOT CHANGE. + # The productName gets substituted later in the formatter! + + Definition["fullStationID" ] = "" + Definition["wmoID" ] = "" + Definition["wfoCityState" ] = "" + Definition["pil" ] = "" + Definition["textdbPil" ] = "" + Definition["awipsWANPil" ] = "" + Definition["site"] = "" + Definition["wfoCity"] = "" + + # OPTIONAL CONFIGURATION ITEMS + + # Source database. "Official", "Fcst", or "ISC" + #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished + #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines + + Definition["purgeTime"] = 8 # Maximum hours for expireTime from issueTime + Definition["includeCities"] = 0 # Cities included in area header + Definition["accurateCities"] = 0 # If 1, cities are based on grids; + # otherwise full list is included + Definition["cityLocation"] = "" # City lat/lon dictionary to use + #Definition["cityDescriptor"] = "" + Definition["includeZoneNames"] = 0 # Zone names will be included in the area header + Definition["lineLength"] = 68 # line length + Definition["easPhrase"] = "" + Definition["includeOverviewHeadline"] = 0 #If 1, the overview header is templated + Definition["includeOverview"] = 0 #If 1, the overview section is template + Definition["bulletProd"] = 0 #If 1, the product will have a bullet format + + ### Survey text to insert below the last $$ of the product + Definition["urlText"] = "" + ### + +# Definition["areaDictionary"] = "TropicalAreaDictionary" + + + Definition["hazardSamplingThreshold"] = (3, None) #(%cov, #points) + + + Definition["debug"] = {"generateForecast": 0, + "_preProcessProduct":0, + "_makeProduct":0, + "_postProcessProduct":0, + "_formatTCVline":0, +# "":0, + } + + + #=========================================================================== + # Define a variableList to get answers to questions we need + +# VariableList = [(("Storm Name","stormName"),"","radio", stormNames)] + + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + TropicalHazards.TropicalHazards.__init__(self) + + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the sampled hazards + self._sampledHazards = argDict["hazards"] + + print("="*70) + print("raw analyzed hazards") + print(pprint.pformat(self._sampledHazards.rawAnalyzedTable())) + print("="*70) + + # Get the combinations where there are any tropical hazards + segmentList = self.organizeHazards(self._sampledHazards.rawAnalyzedTable()) + + # If there is nothing to do - leave now + if len(segmentList) == 0: + return "NO COASTAL TROPICAL WATCHES OR WARNINGS TO REPORT" + + #======================================================================= + # Determine time ranges + error = self._determineTimeRanges(argDict) + + if error is not None: + return error + + #======================================================================= + # Now organize all hazards by phenomena and significance + + rawHazards = self._sampledHazards.rawAnalyzedTable() + + hazardPhenSig = \ + self._sampledHazards._HazardsTable__organizeByPhenSig(rawHazards) + + #======================================================================= + # Initialize the output string + + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + #======================================================================= + # Make the shortcut hazard dictionary along with its sorted keys + + (self._hazardAreaDict, self._hazardAreaDictKeyList) = \ + self._constructHazardDict(hazardPhenSig, self._filterETN) + + + #======================================================================= + # Make a list of segments we need to process + + segmentList = self._constructSegmentList(self._hazardAreaDict, + self._hazardAreaDictKeyList) + + # Generate the product for each segment in the segmentList + fraction = 0 + if len(segmentList) > 0: + fractionOne = 1.0/float(len(segmentList)) + percent = 50.0 + self.setProgressPercentage(50) + for segmentAreas in segmentList: + + # Separate out the various edit area types + (segments, ugcZones, zones, islands, water) = segmentAreas + + # If this is the international TCV + if "Intl" in self._displayName: + + # Do not permit any UGC zones + ugcZones = [] + + self.debug_print("\n" + "*"*90, 1) + self.debug_print("In generateForecast\n", 1) + self.debug_print("segments:\n %s" % (pprint.pformat(segments)), 1) + self.debug_print("ugcZones:\n %s" % (pprint.pformat(ugcZones)), 1) + self.debug_print("zones:\n %s" % (pprint.pformat(zones)), 1) + self.debug_print("islands:\n %s" % (pprint.pformat(islands)), 1) + self.debug_print("water:\n %s" % (pprint.pformat(water)), 1) + + self.progressMessage(fraction, percent, "Making Product for Segment") + + #------------------------------------------------------------------- + # Decide which set of zones we want to process for header + + if len(ugcZones) > 0: + activeZones = ugcZones + else: + activeZones = zones + + self.debug_print("+"*90, 1) + self.debug_print("header zones =\n%s" % + (pprint.pformat(activeZones)), 1) + + # If there are no zones to process - move on to the next segment + if len(activeZones) == 0: + print("No active zones - moving on") + continue + + # Format UGC header using the UGC zones + fcst = self._preProcessArea(fcst, activeZones, self._expireTime, + argDict) + + #------------------------------------------------------------------- + # Decide which set of areas we want to process for body + + # CHANGE to get zones, not breakpoint segments + if len(ugcZones) > 0: + activeAreas = ugcZones + else: + activeAreas = zones + + self.debug_print("body zones =\n%s" % (activeAreas), 1) + + # Make the product body using the segment areas + fcst = self._makeProduct(fcst, activeAreas, argDict) + + # Finish off the product block using the zones + fcst = self._postProcessArea(fcst, activeAreas, argDict) + + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + + # + # Overridden to produce product header using info for specific storms + # + + def _preProcessProduct(self, fcst, argDict): + + # Product header + if self._areaName != "": + self._areaName = " FOR " + self._areaName + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, + self._productName + self._areaName) + + # Get the storm info that goes with this PIL + pil = self._pil[-3:] + self.debug_print("pil '%s'" % (pil), 1) + + # Pull needed storm info from the stormDict based on selected stormName + stormDict = self._loadAdvisory(pil) + + # if we do not have the correct storm information + if stormDict is None: + + # There is no point continuing + fcst = "Could not find valid storm information for the " + \ + "TCV%s product. " % (pil) + \ + "Please run the StormInfo procedure for this storm, " + \ + "if needed, and try again." + return fcst + + # Pull needed storm info from the stormDict based on selected stormName + stormName = stormDict["stormName"] + stormType = stormDict["stormType"] + advType = stormDict["advisoryType"] + advNum = stormDict["advisoryNumber"] + + # Ensure the VTEC ETN is correct for a national center + stormNum = stormDict["stormNumber"] + self._filterETN = int(stormNum) + if self._filterETN < 1000: + self._filterETN += 1000 + + self.debug_print("*" * 80, 1) + self.debug_print("my ETN is: %s" % (self._filterETN), 1) + self.debug_print("*" * 80, 1) + + # Correct the WMO header based on selected PIL + pilNumber = pil[-1:] + wmoID = self._wmoID[:-1] + pilNumber + + # Modify the product name to include the advisory package info + productName = re.sub("(?i)PROTOTYPE TCV", + "%s Watch/Warning Routine Advisory Number %s" % (stormName, advNum), + productName) + + # Handle intermediate or special advisories + if advType in ["Special", "Intermediate"]: + + productName = re.sub("(?i)Routine", advType, productName) + + # Otherwise, remove the "routine" wording + else: + + productName = re.sub("(?i)Routine ", "", productName) + + + # Insert the EAS phrase - if it exists + if len(self._easPhrase) != 0: + eas = self._easPhrase + '\n' + else: + eas = '' + + # Format a storm code for this product + stormCode = "AL%02d%4d" % (int(stormNum), time.gmtime().tm_year) + + # Set aside a variable to track impacted WFOs + self._wfoList = [] + + # Construct the MND header text + s = wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + "TCV" + pil + "\n\n" + + fcst = fcst + s.upper() +\ + eas + productName + "\n" +\ + "NWS " + self._wfoCityState + \ + " %s\n" % (stormCode) + issuedByString + self._timeLabel + \ + "\n\n" + + fcst = fcst + ".%s %s\n\n" % (stormType, stormName) + + # Now add the disclaimer + fcst += """ +Caution...this product only approximately conveys the extent of +tropical cyclone wind and surge watches and warnings. Please see +the latest public advisory from the National Hurricane Center for +the precise lateral extent of wind watches and warnings along the +coast...as well as the approximate lateral extent of surge watches +and warnings. The precise extent of surge watches and warnings +can be found in the NWS National Digital Forecast Database Hazard +grids. + +""" + +#---|----1----|----2----|----3----|----4----|----5----|----6----|----7 + # Return the completed text we have so far + return fcst + + # + # Overridden to produce breakpoint end points within each segment + # + + def _makeProduct(self, fcst, segmentAreas, argDict): + + # Set the language + argDict["language"] = self._language + + # Make a list of any impacted WFOs from all breakpoint segments + self._getAffectedWFOs(segmentAreas) + + # Get ready to define the segment text for this group + startLine = "" + endLine = "" + + #=================================================================== + # Determine if we just want to list the end points of this segment, + # or all of the segments in this list + + self.debug_print("\nLast chance, segmentAreas =\n%s" % + (pprint.pformat(segmentAreas)), 1) + + # If this segment is something other than an island + if len(segmentAreas) > 1: + + self.debug_print("using endpoints", 1) + + # Start the display list with the first segment in this list + displayList = [segmentAreas[0]] + + # If the last segment in the list is a different segment + if segmentAreas[-1] not in displayList: + + # Add this segment as well + displayList.append(segmentAreas[-1]) + + # Otherwise, display all of the points + else: + displayList = segmentAreas[:] + + self.debug_print("\ndisplayList =\n%s" % + (pprint.pformat(displayList)), 1) + + + # Do not include non-UGC codes in mainland USA TCV + self.debug_print("displayName = '%s'" % (self._displayName), 1) + if self._displayName.find("Intl") == -1 and displayList != [] and \ + len(displayList[0]) >= 3 and displayList[0][3] != "Z": + + self.debug_print("Do not display non-Z codes in USA TCV", 1) + displayList = [] + + + #=================================================================== + # Look through each zone of this hazard group + + for id in displayList: + + index = displayList.index(id) + result = abs(len(displayList) - index) + + # Get the entry for this area + if id in self._tropicalAreaDict: + entry = self._tropicalAreaDict[id] + else: + entry = {} + LogStream.logProblem(\ + "AreaDictionary missing definition for [" + id + "].") + + #--------------------------------------------------------------- + # If we have not already constructed the info for the starting + # point of this segment, or this is an island point + + if len(startLine) == 0: # or result > 1: + + # Construct it now + startLine += self._formatTCVline(entry) + + #--------------------------------------------------------------- + # If we need the ending point info + + elif len(endLine) == 0: # or result == 1: + + # Construct it now + endLine = self._formatTCVline(entry, "end") + + # Prevent both lines from being the same thing + if startLine == endLine: + + # Reset the endline - we don't need it + endLine = "" + + return fcst + + + # + # Overridden to include affected WFO list + # + + def _postProcessProduct(self, fcst, argDict): + # + # If an overview exists for this product, insert it + # + overview = self.finalOverviewText() + overviewSearch = re.compile(r'DEFAULT OVERVIEW SECTION', re.DOTALL) + fcst = overviewSearch.sub(overview, fcst) + # + # Added to place line feeds in the CAP tags to keep separate from CTAs + fcst = fcst.replace(r"PRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.", \ + r"\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\n") + fcst = fcst.replace(".:", ".") + fcst = fcst.replace("\n ","\n") + fcst = fcst.replace("&&", "\n&&\n") + + # Add the impacted WFO list at the bottom of the TCV + wfoListText = "ATTN...WFO..." + + self._wfoList.sort() + + for wfo in self._wfoList: + wfoListText += "%s..." % (wfo) + + fcst += "\n" + self.endline(wfoListText) + "\n" + + + # Now handle the EAS urgency coding + urgent = 0 + followup = 1 + prodNameKey = '' + fullKeyList = [] + newList = ['NEW', 'EXA', 'EXB'] + + # Remove EAS line if not urgent + if urgent == 0 and len(self._easPhrase): + fcst = fcst.replace(self._easPhrase + '\n', '', 1) + + # Prevent empty Call to Action Tags + fcst = re.sub(r'\nPRECAUTIONARY/PREPAREDNESS ACTIONS\.\.\.\s*&&\n', \ + "", fcst) + + # Remove any empty framing code + fcst = re.sub("\|\*\s*\*\|", "", fcst) + + # Indent the bullet text + fcst = self._indentBulletText(fcst) + + # + # Clean up multiple line feeds + # + + fixMultiLF = re.compile(r'(\n\n)\n*', re.DOTALL) + fcst = fixMultiLF.sub(r'\1', fcst) + + # + # Clean up the JSON file for this storm - if it is all over + # + if self._allCAN: + # set allCAN flag in json file + advisoryName = self._pil[-3:] + advisoryDict = self._loadAdvisory(advisoryName) + advisoryDict['AllCAN'] = True + self._saveAdvisory(advisoryName, advisoryDict) + + # + # Finish Progress Meter + # + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + + # Add the url text from the configuration section + fcst = fcst + "\n" + self._urlText + + return fcst + + +################################################################################ +# New common utility methods for tropical hazard formatters +################################################################################ + + #=========================================================================== + # Define a method to format a breakpoint text line within a TCV segment + + def _formatTCVline(self, entry, type="start"): + """TropicalHazards addition of _formatTCVline. + + This method will produce a list of NWS WFOs impacted by tropical + hazards with the specified identifiers. + + Arguments: + entry -> TropicalAreaDictionary entry for an edit area + type -> type of breakpoint to produce (optional, defaults to start) + """ + + # If this is not an ending point + if type != "end": + + # Get the information we need + point = entry["startBreakpoint"].strip() + lat = entry["startLat"].strip() + lon = entry["startLon"].strip() + state = entry["startState"].strip() + + # Otherwise - get the end point info for this segment + else: + + # Get the information we need + point = entry["endBreakpoint"].strip() + lat = entry["endLat"].strip() + lon = entry["endLon"].strip() + state = entry["endState"].strip() + + + # Clean up the state so there are no spaces or dashes + state = re.sub("[ _-]+", "", state) + + #----------------------------------------------------------------------- + # If this is not the border of a state or country + + if re.search("(?i)border", point) is None: + + # Add the state/country + point += "-" + state + + #----------------------------------------------------------------------- + # Append the appropriate hemisphere of the latitude + + if lat.find("-") != -1: + lat += "S" + lat = lat.replace("-", "") + else: + lat += "N" + + #----------------------------------------------------------------------- + # Append the appropriate hemisphere of the longitude + + if lon.find("-") != -1: + lon += "W" + lon = lon.replace("-", "") + else: + lon += "E" + + #----------------------------------------------------------------------- + # Now construct the final formatted line + + text = "%-36s%6s%7s\n" % (re.sub("[ _]+", "-", point) + " ", lat, lon) + + # Return the text + return text + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_WCN_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_WCN_MultiPil.py index 3086100384..b3ac08c2a7 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_WCN_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/Hazard_WCN_MultiPil.py @@ -1,728 +1,728 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -####################################################################### -# Hazard_WCN.py -# -## -########################################################################## -## -# -# SOFTWARE HISTORY -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# Oct 20, 2014 #3685 randerso Changed to support mixed case -# Jul 15, 2016 #5749 randerso Replaced ellipses with commas -# -## - -import GenericHazards -import string, time, re, os, types, copy, sets -import ModuleAccessor, LogStream -import VTECTable - -class TextProduct(GenericHazards.TextProduct): - Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) - - Definition['displayName'] = None - Definition['displayName'] = "BaselineHazard_WCN_ (Convective Watch)" - - Definition["defaultEditAreas"] = "EditAreas_FIPS__" - Definition["mapNameForCombinations"] = ["FIPS_", "Marine_Zones_"] - # Map background for creating Combinations - - # Header configuration items - Definition["productName"] = "Watch County Notification" # name of product - Definition["fullStationID"] = "" # full station identifier (4letter) - Definition["wmoID"] = "" # WMO ID - Definition["pil"] = "" # product pil - #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" - Definition["wfoCityState"] = "" # Location of WFO - city state - Definition["wfoCity"] = "" # WFO Name as it should appear in a text product - Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. - Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. - Definition["outputFile"] = "{prddir}/TEXT/WCN_.txt" - - # OPTIONAL CONFIGURATION ITEMS - #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" - #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished - #Definition["debug"] = 1 - #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines - - Definition["purgeTime"] = 15 # Maximum hours for expireTime from issueTime - Definition["includeCities"] = 0 # Cities included in area header - Definition["cityDescriptor"] = "Including the cities of" - Definition["includeZoneNames"] = 0 # Zone names will be included in the area header - Definition["includeIssueTime"] = 0 # This should be set to zero for products - # that do not include a time lime below the UGC - #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header - Definition["lineLength"] = 66 - - #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) - - Definition["statePartMode"] = "byState" #"byState" or "byPart" formatting - #options. byState summarizes - #count by state. "byPart" - #counts by part of state. - - - def __init__(self): - GenericHazards.TextProduct.__init__(self) - - def _preProcessProduct(self, fcst, argDict): - - # - # The code below determines the set of ETNs for the header - # - - self._hazards = argDict['hazards'] - hazards = self._hazards.rawAnalyzedTable() - allWatchList = [] - for hazard in hazards: - if hazard['etn'] not in allWatchList: - allWatchList.append(hazard['etn']) - - if len(allWatchList) == 1: - watchPhrase = " for Watch " + str(allWatchList[0]) - else: - watchPhrase = " for Watches " - allWatchList.sort() - for x in xrange(len(allWatchList)): - watchPhrase = watchPhrase + str(allWatchList[x]) - if x != len(allWatchList) - 1: - watchPhrase = watchPhrase + "/" - - # - # Special Product header code to add watch number determined above - # - - if self._areaName != "": - self._areaName = " for " + self._areaName - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, - self._productName + watchPhrase) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n" + self._easPhrase + "\n" - fcst = fcst + s - - return fcst - - - def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): - - # - # This is the header for an edit area combination - # - - editArea = segmentAreas[0] - areaLabel = editArea - areaHeader = self.makeAreaHeader( - argDict, "", self._issueTime, expireTime, - self._areaDictionary, None, cityDescriptor=self._cityDescriptor, - areaList=segmentAreas, includeCities=self._includeCities, - includeZoneNames = self._includeZoneNames, includeIssueTime = self._includeIssueTime) - fcst = fcst + areaHeader + "\n" - return fcst - - - def _makeProduct(self, fcst, segmentAreas, argDict): - argDict["language"] = self._language - - #issuance time - issuanceTime = self._issueTime.unixTime() - - # - # Set up the edit areas being dealt with - # - - editArea = segmentAreas[0] - areaLabel = editArea - - # - # Build a list of the merged hazards being returned - # - - listOfHazards = self._hazards.getHazardList(segmentAreas) - - # Ensure hdln is defined, since other products can reset this - for h in listOfHazards: - if len(h['hdln']) == 0: - phensig = h['phen'] + '.' + h['sig'] - if VTECTable.VTECTable.has_key(phensig): - h['hdln'] = VTECTable.VTECTable[phensig]['hdln'] - - # - # Prepare to build phrases - # - - attrPhrase = "" - actionTest = [] - hazardListLength = len(listOfHazards) - - # - # Start building phrases - # - - phraseType = "" #CANCEL, NEW, REPLACE, EXPIRE (zone listing wording) - - # - # First check to see if this segment contains a CAN and a NEW - # - - if hazardListLength == 2: - - phraseType = "REPLACE" - - activeActions = ['NEW','EXB','EXA','EXT','CON'] - - # - # Element 0 is is CAN, UPG. Element 1 is active actions - # - - if listOfHazards[1]['act'] in activeActions and \ - listOfHazards[0]['act'] in ['CAN', 'UPG']: - #change forces next block to execute, code savings - listOfHazards.reverse() - - # - # Element 0 is active actions, Element 1 is CAN, UPG - # - if listOfHazards[1]['act'] in ['CAN','UPG'] and \ - listOfHazards[0]['act'] in activeActions: - newWatch = listOfHazards[0] - oldWatch = listOfHazards[1] - newWatchName = self.hazardName(newWatch['hdln'], argDict, - False) + " " + str(newWatch['etn']) - oldWatchName = self.hazardName(oldWatch['hdln'], argDict, - False) + " " + str(oldWatch['etn']) - validTime = self.getTimingPhrase(newWatch, issuanceTime) - - attrPhrase = "The National Weather Service has issued " + \ - newWatchName + " " + validTime + \ - " which replaces a portion of " + oldWatchName + '. ' + \ - "The new watch is valid for the following areas" - - - # - # Element 0 is EXP, Element 1 is active actions - # - - if listOfHazards[1]['act'] in activeActions and \ - listOfHazards[0]['act'] == 'EXP': - #change forces next block to execute, code savings - listOfHazards.reverse() - - # - # Element 0 is is active actions. Element 1 is EXP - # - - if listOfHazards[1]['act'] == 'EXP' and \ - listOfHazards[0]['act'] in activeActions: - newWatch = listOfHazards[0] - oldWatch = listOfHazards[1] - newWatchName = self.hazardName(newWatch['hdln'], argDict, - False) + " " + str(newWatch['etn']) - oldWatchName = self.hazardName(oldWatch['hdln'], argDict, - False) + " " + str(oldWatch['etn']) - validTime = self.getTimingPhrase(newWatch, issuanceTime) - - if oldWatch['endTime'] > argDict['creationTime']: - expirePhrase = "will be allowed to expire." - else: - expirePhrase = "has expired." - - attrPhrase = "The National Weather Service has issued " + \ - newWatchName + ' ' + validTime + ". " + \ - oldWatchName + " " + expirePhrase + \ - " The new watch is valid for the following areas" - - # - # Else if the hazardListLength isn't 2 - # - - else: - for eachHazard in listOfHazards: - etnString = str(eachHazard['etn']) - watchName = self.hazardName(eachHazard['hdln'], argDict, - False) + " " + etnString #complete name and etn - validTime = self.getTimingPhrase(eachHazard, issuanceTime) - - # - # Phrase for NEW - # - - if eachHazard['act'] == 'NEW': - attrPhrase = "The National Weather Service has issued " +\ - watchName + " in effect " +\ - validTime + " for the following areas" - phraseType = "NEW" - - # - # Phrase for CON - # - - elif eachHazard['act'] == 'CON': - attrPhrase = watchName + " remains valid " + validTime + \ - " for the following areas" - phraseType = "NEW" - - # - # Phrase for EXP - # - - elif eachHazard['act'] == 'EXP': - if eachHazard['endTime'] > argDict['creationTime']: - attrPhrase = "The National Weather Service" + \ - " will allow " + watchName + " to expire " +\ - validTime + " for the following areas" - else: - attrPhrase = "The National Weather Service" + \ - " has allowed " + watchName + " to expire" +\ - " for the following areas" - phraseType = "EXPIRE" - - # - # Phrase for CAN - # - - elif eachHazard['act'] == 'CAN': - attrPhrase = "The National Weather Service" +\ - " has cancelled " + watchName + \ - " for the following areas" - phraseType = "CANCEL" - - # - # Phrase for EXA and EXB - # - - elif eachHazard['act'] in ['EXA', 'EXB']: - attrPhrase="The National Weather Service has extended " +\ - watchName + " to include the following areas " + \ - validTime - phraseType = "NEW" - - # - # Phrase for EXT - # - - elif eachHazard['act'] == 'EXT': - phraseType = "NEW" - #prevExpPhrase = self.getHourAMPMTimeZone(\ - # eachHazard['previousEnd'], eachHazard['id']) - prevRec = copy.deepcopy(eachHazard) - prevRec['endTime'] = eachHazard['previousEnd'] - prevExpPhrase = self.getTimingPhrase(prevRec, issuanceTime) - - attrPhrase = watchName + ", previously in effect " +\ - prevExpPhrase + ", is now in effect " + \ - validTime + " for the following areas" - - # - # Generic Phrase...should never reach this point - # - - else: - startingPhrase = "The National Weather Service" + \ - " has issued |* watch type *| |* watch number *|" + \ - " until |* watch end time *| for the following areas" - attrPhrase = startingPhrase - phraseType = "NEW" - - # - # Add phrase to forecast - # - - fcst = fcst + attrPhrase + '\n\n' - - - # Get the phrasing set up for the type of event - if phraseType == "NEW": - county1 = "In {area} this watch includes {number} {placeType}" - county2 = "In {area} this watch includes {number} {placeTypes}" - indepCity1 = "In {area} this watch includes {number} " + \ - "independent city" - indepCity2 = "In {area} this watch includes {number} " + \ - "independent cities" - marine = "This watch includes the following adjacent coastal waters" - - elif phraseType == "CANCEL": - county1 = "In {area} this cancels {number} {placeType}" - county2 = "In {area} this cancels {number} {placeTypes}" - indepCity1 = "In {area} this cancels {number} INDEPENDENT CITY" - indepCity2 = "In {area} this cancels {number} INDEPENDENT CITIES" - marine = "This cancels the following adjacent coastal waters" - - elif phraseType == "EXPIRE": - county1 = "In {area} this allows to expire {number} {placeType}" - county2 = "In {area} this allows to expire {number} {placeTypes}" - indepCity1 = "In {area} this allows to expire {number} " +\ - "independent city" - indepCity2 = "In {area} this allows to expire {number} " +\ - "independent cities" - marine = "This allows to expire the following adjacent coastal waters" - - elif phraseType == "REPLACE": - county1 = "In {area} the new watch includes {number} {placeType}" - county2 = "In {area} the new watch includes {number} {placeTypes}" - indepCity1 = "In {area} the new watch includes {number} " + \ - "independent city" - indepCity2 = "In {area} the new watch includes {number} " + \ - "independent cities" - marine = "The new watch includes the following adjacent coastal waters" - - else: - raise Exception, "Illegal phraseType in WCN formatter. " +\ - "Expected NEW, CANCEL, EXPIRE, or REPLACE. Got " + phraseType - - - # Add the list of counties - countyTuple = self._getFilteredAreaList( - segmentAreas, mode="COUNTY", areaDictName=self._areaDictionary) - fcst = fcst + self._makeTextFromCountyTuple(countyTuple, - mainFormatSingular = county1, mainFormatPlural = county2, - mode=self._statePartMode) - - # Add the lists of independent cities - countyTuple = self._getFilteredAreaList( - segmentAreas, mode="CITY", areaDictName=self._areaDictionary) - fcst = fcst + self._makeTextFromCountyTuple(countyTuple, - mainFormatSingular = indepCity1, mainFormatPlural = indepCity2, - mode=self._statePartMode) - - # Add the lists of marine zones - countyTuple = self._getFilteredAreaList( - segmentAreas, mode="ZONE", areaDictName=self._areaDictionary) - fcst = fcst + self._makeTextFromMarineTuple(countyTuple, - mainFormat = marine) - - # Add the lists of cities - fcst = fcst + "\n\n" + self.getCityList( - segmentAreas, areaDictName = self._areaDictionary, addPeriod=True, - forceAlphaSort=True) - - # - # Line Wrap - # - - fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) - - # - # Finished - # - - return fcst - - def _postProcessArea(self, fcst, segmentAreas, argDict): - fcst = fcst + "$$\n\n" - return fcst - - - def _countFilteredAreaList(self, countyTuples, index): - #Returns a dictionary. dictionary is based on the 'index' element - # of the tuple (key) and is a count of the number of those - # records found. - dict = {} - for values in countyTuples: - key = values[index] - count = dict.get(key, 0) - count = count + 1 - dict[key] = count - - return dict - - - def _getFilteredAreaList(self, areaList, areaDictName="AreaDictionary", - mode="COUNTY"): - #returns list of sorted tuples: - # [(state, partOfState, partOfState State, zonename)] - #mode='COUNTY','ZONE','CITY' - - # Access the UGC information for the area(s) if available - areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, - "AreaDictionary") - if areaDict is None: - return [] - - # sort by zone name - if mode == "ZONE": - areaList.sort() - - # Make a list of (state, partOfStateAndState, county) tuples - countyList = [] - for areaName in areaList: - if areaDict.has_key(areaName): - entry = areaDict[areaName] - else: - entry = {} - LogStream.logProblem(\ - "AreaDictionary missing definition for [" + areaName + "].") - - if mode == "COUNTY": - if len(areaName) == 6 and areaName[2] != "C": #not ssCnnn - continue #not a county fips - if entry.has_key("independentCity") and \ - entry["independentCity"] == 1: - continue #independent city, when in county mode - elif mode == "CITY": - if len(areaName) == 6 and areaName[2] != "C": #not ssCnnn - continue #not a county/city fips - if not entry.has_key("independentCity") or \ - entry["independentCity"] == 0: - continue #not independent city, when in city mode - elif mode == "ZONE": - if len(areaName) == 6 and areaName[2] != "Z": #not ssZnnn - continue #not a zone code - else: - raise Exception, "Illegal mode specified " + mode - - if entry.has_key("ugcName") and len(entry['ugcName']): - # Get fullStateName - state = areaName[0:2] - if entry.has_key("fullStateName") and \ - len(entry['fullStateName']): - state = entry["fullStateName"] - else: - state = "" - LogStream.logProblem("AreaDictionary does not contain " +\ - 'fullStateName definition for ', areaName) - - - # Get part-of-state information with state (not for Zones) - if mode == "ZONE": #marine - partOfState = "" - else: - if entry.has_key("partOfState") and \ - len(entry['partOfState']): - partOfState = entry["partOfState"] + ' ' + state - else: - partOfState = " " + state - LogStream.logProblem(\ - "AreaDictionary does not contain " +\ - 'partOfState definition for ', areaName) - - # Get county name - county = entry["ugcName"] - - # Eliminate the name County and others, if in the name - if mode == "COUNTY": - val = ['County','Counties','Parish','Parishes'] - for v in val: - county = county.replace(" " + v, "") - countyList.append((state, partOfState, county)) - - #missing ugcName - else: - countyList.append(("", "", areaName)) - LogStream.logProblem("AreaDictionary does not contain " +\ - 'ugcName definition for ', areaName) - - - # Sort by state, part of state, then county - if mode != "ZONE": - countyList.sort() #state, partOfState, county - return countyList - - def _makeTextFromMarineTuple(self, countyTuple, lineLength=66, colWidth=22, - mainFormat="This watch includes the following adjacent coastal waters"): - - #countyTuple: (state, partOfStateAndState, name) - #extract out the marine zones - mzones = [] - for state, partOfState, name in countyTuple: - mzones.append(name) - - if len(mzones) == 0: - return "" - - return mainFormat + "\n\n" + \ - self.formatCountyColumns(mzones, colWidth, lineLength) + '\n\n' - - - def _makeTextFromCountyTuple(self, countyTuple, lineLength=66, colWidth=22, - mainFormatSingular="In {area} this watch includes {number} {placeType}", - mainFormatPlural="In {area} this watch includes {number} {placeTypes}", - subFormat="In {area}", mode="byState"): - - #countyTuple: (state, partOfStateAndState, name) - #The type of text depends upon the mode: "byState" or "byPart" - # "byState" formatting: - # mainFormatSingular/mainFormatPlural (for each state) - # subFormat (for each partOfState) - # column formatting of names - # - # "byPart" formatting: - # (subFormat not used): - # mainFormatSingular/mainFormatPlural (for each partOfState State) - # column formatting of names - - # Format - if mode == "byState": - return self._byStateTextFromCountyTuple(countyTuple, lineLength, - colWidth, mainFormatSingular, mainFormatPlural, subFormat) - elif mode == "byPart": - return self._byPartTextFromCountyTuple(countyTuple, lineLength, - colWidth, mainFormatSingular, mainFormatPlural) - else: - raise Exception, "Illegal mode in makeTextFromCountyTuple(): " +\ - `mode` - - def _byStateTextFromCountyTuple(self, countyTuple, lineLength, - colWidth, mainFormatSingular, mainFormatPlural, subFormat): - - #Determine counts for each area - counts = self._countFilteredAreaList(countyTuple, 0) - - # Convert countyTuple into format that follows the text format - # byState: [(state, [(partOfStateAndState, [names])]] - geoList = [] - geoPList = [] - names = [] - curState = None - curPart = None - for state, partState, name in countyTuple: - if curState == state: - if curPart == partState: - names.append(name) - else: - if len(names): - geoPList.append((curPart, names)) - names = [name] - curPart = partState - else: - if len(names): - geoPList.append((curPart, names)) - if len(geoPList): - geoList.append((curState, geoPList)) - geoPList = [] - names = [name] - curPart = partState - curState = state - - if len(names): - geoPList.append((curPart, names)) - geoList.append((curState, geoPList)) - - # Now Format the text - result = '' - for state, partStateNames in geoList: - - #special District of Columbia, no parts of state descriptors - if state == "District of Columbia": - result = result + "The District of Columbia\n\n" - continue - - ccount = counts.get(state, 0) - if ccount > 1: - header = mainFormatPlural - else: - header = mainFormatSingular - header = string.replace(header, '{area}', state) - header = string.replace(header, '{number}', str(ccount)) - if state == "Louisiana": - header = string.replace(header, '{placeType}', "parish") - header = string.replace(header, '{placeTypes}', "parishes") - else: - header = string.replace(header, '{placeType}', "county") - header = string.replace(header, '{placeTypes}', "counties") - - - result = result + header + '\n\n' - - for partState, names in partStateNames: - subheader = subFormat - subheader = string.replace(subheader, '{area}', partState) - result = result + subheader + '\n\n' - - counties = self.formatCountyColumns(names, colWidth, - lineLength) - result = result + counties + '\n\n' - - return result - - - def _byPartTextFromCountyTuple(self, countyTuple, lineLength, - colWidth, mainFormatSingular, mainFormatPlural): - - #Determine counts for each area - counts = self._countFilteredAreaList(countyTuple, 1) - - # Convert countyTuple into format that follows the text format - # byPart: [(partOfStateAndState, [names])] - geoList = [] - names = [] - curSection = None #partState - for state, partState, name in countyTuple: - if partState == curSection: - names.append(name) - else: - if len(names): - geoList.append((curSection, names)) - names = [name] - curSection = partState - if len(names): - geoList.append((curSection, names)) - - # Now Format the text - result = '' - for partState, names in geoList: - - #special District of Columbia - if partState.find("District of Columbia") != -1: - result = result + "The District of Columbia\n\n" - continue - - ccount = counts.get(partState, 0) - if ccount > 1: - header = mainFormatPlural - else: - header = mainFormatSingular - header = string.replace(header, '{area}', partState) - header = string.replace(header, '{number}', str(ccount)) - if partState.find("Louisiana") != -1: - header = string.replace(header, '{placeType}', "parish") - header = string.replace(header, '{placeTypes}', "parishes") - else: - header = string.replace(header, '{placeType}', "county") - header = string.replace(header, '{placeTypes}', "counties") - - counties = self.formatCountyColumns(names, colWidth, lineLength) - - result = result + header + '\n\n' + counties + '\n\n' - - return result - - - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('TO.A', allActions, 'Convective'), - ('SV.A', allActions, 'Convective') - ] +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +####################################################################### +# Hazard_WCN.py +# +## +########################################################################## +## +# +# SOFTWARE HISTORY +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# Oct 20, 2014 #3685 randerso Changed to support mixed case +# Jul 15, 2016 #5749 randerso Replaced ellipses with commas +# +## + +import GenericHazards +import string, time, re, os, types, copy, sets +import ModuleAccessor, LogStream +import VTECTable + +class TextProduct(GenericHazards.TextProduct): + Definition = copy.deepcopy(GenericHazards.TextProduct.Definition) + + Definition['displayName'] = None + Definition['displayName'] = "BaselineHazard_WCN_ (Convective Watch)" + + Definition["defaultEditAreas"] = "EditAreas_FIPS__" + Definition["mapNameForCombinations"] = ["FIPS_", "Marine_Zones_"] + # Map background for creating Combinations + + # Header configuration items + Definition["productName"] = "Watch County Notification" # name of product + Definition["fullStationID"] = "" # full station identifier (4letter) + Definition["wmoID"] = "" # WMO ID + Definition["pil"] = "" # product pil + #Definition["areaName"] = "Statename" # Name of state, such as "Georgia" + Definition["wfoCityState"] = "" # Location of WFO - city state + Definition["wfoCity"] = "" # WFO Name as it should appear in a text product + Definition["textdbPil"] = "" # Product ID for storing to AWIPS text database. + Definition["awipsWANPil"] = "" # Product ID for transmitting to AWIPS WAN. + Definition["outputFile"] = "{prddir}/TEXT/WCN_.txt" + + # OPTIONAL CONFIGURATION ITEMS + #Definition["database"] = "Official" # Source database. "Official", "Fcst", or "ISC" + #Definition["displayOutputDialog"] = 0 # If 1 will display results when finished + #Definition["debug"] = 1 + #Definition["headlineEditAreaGroup"] = "Zones" # Name of EditAreaGroup for sampling headlines + + Definition["purgeTime"] = 15 # Maximum hours for expireTime from issueTime + Definition["includeCities"] = 0 # Cities included in area header + Definition["cityDescriptor"] = "Including the cities of" + Definition["includeZoneNames"] = 0 # Zone names will be included in the area header + Definition["includeIssueTime"] = 0 # This should be set to zero for products + # that do not include a time lime below the UGC + #Definition["easPhrase"] = "" # Optional EAS phrase to be include in product header + Definition["lineLength"] = 66 + + #Definition["hazardSamplingThreshold"] = (10, None) #(%cov, #points) + + Definition["statePartMode"] = "byState" #"byState" or "byPart" formatting + #options. byState summarizes + #count by state. "byPart" + #counts by part of state. + + + def __init__(self): + GenericHazards.TextProduct.__init__(self) + + def _preProcessProduct(self, fcst, argDict): + + # + # The code below determines the set of ETNs for the header + # + + self._hazards = argDict['hazards'] + hazards = self._hazards.rawAnalyzedTable() + allWatchList = [] + for hazard in hazards: + if hazard['etn'] not in allWatchList: + allWatchList.append(hazard['etn']) + + if len(allWatchList) == 1: + watchPhrase = " for Watch " + str(allWatchList[0]) + else: + watchPhrase = " for Watches " + allWatchList.sort() + for x in range(len(allWatchList)): + watchPhrase = watchPhrase + str(allWatchList[x]) + if x != len(allWatchList) - 1: + watchPhrase = watchPhrase + "/" + + # + # Special Product header code to add watch number determined above + # + + if self._areaName != "": + self._areaName = " for " + self._areaName + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, + self._productName + watchPhrase) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n" + self._easPhrase + "\n" + fcst = fcst + s + + return fcst + + + def _preProcessArea(self, fcst, segmentAreas, expireTime, argDict): + + # + # This is the header for an edit area combination + # + + editArea = segmentAreas[0] + areaLabel = editArea + areaHeader = self.makeAreaHeader( + argDict, "", self._issueTime, expireTime, + self._areaDictionary, None, cityDescriptor=self._cityDescriptor, + areaList=segmentAreas, includeCities=self._includeCities, + includeZoneNames = self._includeZoneNames, includeIssueTime = self._includeIssueTime) + fcst = fcst + areaHeader + "\n" + return fcst + + + def _makeProduct(self, fcst, segmentAreas, argDict): + argDict["language"] = self._language + + #issuance time + issuanceTime = self._issueTime.unixTime() + + # + # Set up the edit areas being dealt with + # + + editArea = segmentAreas[0] + areaLabel = editArea + + # + # Build a list of the merged hazards being returned + # + + listOfHazards = self._hazards.getHazardList(segmentAreas) + + # Ensure hdln is defined, since other products can reset this + for h in listOfHazards: + if len(h['hdln']) == 0: + phensig = h['phen'] + '.' + h['sig'] + if phensig in VTECTable.VTECTable: + h['hdln'] = VTECTable.VTECTable[phensig]['hdln'] + + # + # Prepare to build phrases + # + + attrPhrase = "" + actionTest = [] + hazardListLength = len(listOfHazards) + + # + # Start building phrases + # + + phraseType = "" #CANCEL, NEW, REPLACE, EXPIRE (zone listing wording) + + # + # First check to see if this segment contains a CAN and a NEW + # + + if hazardListLength == 2: + + phraseType = "REPLACE" + + activeActions = ['NEW','EXB','EXA','EXT','CON'] + + # + # Element 0 is is CAN, UPG. Element 1 is active actions + # + + if listOfHazards[1]['act'] in activeActions and \ + listOfHazards[0]['act'] in ['CAN', 'UPG']: + #change forces next block to execute, code savings + listOfHazards.reverse() + + # + # Element 0 is active actions, Element 1 is CAN, UPG + # + if listOfHazards[1]['act'] in ['CAN','UPG'] and \ + listOfHazards[0]['act'] in activeActions: + newWatch = listOfHazards[0] + oldWatch = listOfHazards[1] + newWatchName = self.hazardName(newWatch['hdln'], argDict, + False) + " " + str(newWatch['etn']) + oldWatchName = self.hazardName(oldWatch['hdln'], argDict, + False) + " " + str(oldWatch['etn']) + validTime = self.getTimingPhrase(newWatch, issuanceTime) + + attrPhrase = "The National Weather Service has issued " + \ + newWatchName + " " + validTime + \ + " which replaces a portion of " + oldWatchName + '. ' + \ + "The new watch is valid for the following areas" + + + # + # Element 0 is EXP, Element 1 is active actions + # + + if listOfHazards[1]['act'] in activeActions and \ + listOfHazards[0]['act'] == 'EXP': + #change forces next block to execute, code savings + listOfHazards.reverse() + + # + # Element 0 is is active actions. Element 1 is EXP + # + + if listOfHazards[1]['act'] == 'EXP' and \ + listOfHazards[0]['act'] in activeActions: + newWatch = listOfHazards[0] + oldWatch = listOfHazards[1] + newWatchName = self.hazardName(newWatch['hdln'], argDict, + False) + " " + str(newWatch['etn']) + oldWatchName = self.hazardName(oldWatch['hdln'], argDict, + False) + " " + str(oldWatch['etn']) + validTime = self.getTimingPhrase(newWatch, issuanceTime) + + if oldWatch['endTime'] > argDict['creationTime']: + expirePhrase = "will be allowed to expire." + else: + expirePhrase = "has expired." + + attrPhrase = "The National Weather Service has issued " + \ + newWatchName + ' ' + validTime + ". " + \ + oldWatchName + " " + expirePhrase + \ + " The new watch is valid for the following areas" + + # + # Else if the hazardListLength isn't 2 + # + + else: + for eachHazard in listOfHazards: + etnString = str(eachHazard['etn']) + watchName = self.hazardName(eachHazard['hdln'], argDict, + False) + " " + etnString #complete name and etn + validTime = self.getTimingPhrase(eachHazard, issuanceTime) + + # + # Phrase for NEW + # + + if eachHazard['act'] == 'NEW': + attrPhrase = "The National Weather Service has issued " +\ + watchName + " in effect " +\ + validTime + " for the following areas" + phraseType = "NEW" + + # + # Phrase for CON + # + + elif eachHazard['act'] == 'CON': + attrPhrase = watchName + " remains valid " + validTime + \ + " for the following areas" + phraseType = "NEW" + + # + # Phrase for EXP + # + + elif eachHazard['act'] == 'EXP': + if eachHazard['endTime'] > argDict['creationTime']: + attrPhrase = "The National Weather Service" + \ + " will allow " + watchName + " to expire " +\ + validTime + " for the following areas" + else: + attrPhrase = "The National Weather Service" + \ + " has allowed " + watchName + " to expire" +\ + " for the following areas" + phraseType = "EXPIRE" + + # + # Phrase for CAN + # + + elif eachHazard['act'] == 'CAN': + attrPhrase = "The National Weather Service" +\ + " has cancelled " + watchName + \ + " for the following areas" + phraseType = "CANCEL" + + # + # Phrase for EXA and EXB + # + + elif eachHazard['act'] in ['EXA', 'EXB']: + attrPhrase="The National Weather Service has extended " +\ + watchName + " to include the following areas " + \ + validTime + phraseType = "NEW" + + # + # Phrase for EXT + # + + elif eachHazard['act'] == 'EXT': + phraseType = "NEW" + #prevExpPhrase = self.getHourAMPMTimeZone(\ + # eachHazard['previousEnd'], eachHazard['id']) + prevRec = copy.deepcopy(eachHazard) + prevRec['endTime'] = eachHazard['previousEnd'] + prevExpPhrase = self.getTimingPhrase(prevRec, issuanceTime) + + attrPhrase = watchName + ", previously in effect " +\ + prevExpPhrase + ", is now in effect " + \ + validTime + " for the following areas" + + # + # Generic Phrase...should never reach this point + # + + else: + startingPhrase = "The National Weather Service" + \ + " has issued |* watch type *| |* watch number *|" + \ + " until |* watch end time *| for the following areas" + attrPhrase = startingPhrase + phraseType = "NEW" + + # + # Add phrase to forecast + # + + fcst = fcst + attrPhrase + '\n\n' + + + # Get the phrasing set up for the type of event + if phraseType == "NEW": + county1 = "In {area} this watch includes {number} {placeType}" + county2 = "In {area} this watch includes {number} {placeTypes}" + indepCity1 = "In {area} this watch includes {number} " + \ + "independent city" + indepCity2 = "In {area} this watch includes {number} " + \ + "independent cities" + marine = "This watch includes the following adjacent coastal waters" + + elif phraseType == "CANCEL": + county1 = "In {area} this cancels {number} {placeType}" + county2 = "In {area} this cancels {number} {placeTypes}" + indepCity1 = "In {area} this cancels {number} INDEPENDENT CITY" + indepCity2 = "In {area} this cancels {number} INDEPENDENT CITIES" + marine = "This cancels the following adjacent coastal waters" + + elif phraseType == "EXPIRE": + county1 = "In {area} this allows to expire {number} {placeType}" + county2 = "In {area} this allows to expire {number} {placeTypes}" + indepCity1 = "In {area} this allows to expire {number} " +\ + "independent city" + indepCity2 = "In {area} this allows to expire {number} " +\ + "independent cities" + marine = "This allows to expire the following adjacent coastal waters" + + elif phraseType == "REPLACE": + county1 = "In {area} the new watch includes {number} {placeType}" + county2 = "In {area} the new watch includes {number} {placeTypes}" + indepCity1 = "In {area} the new watch includes {number} " + \ + "independent city" + indepCity2 = "In {area} the new watch includes {number} " + \ + "independent cities" + marine = "The new watch includes the following adjacent coastal waters" + + else: + raise Exception("Illegal phraseType in WCN formatter. " +\ + "Expected NEW, CANCEL, EXPIRE, or REPLACE. Got " + phraseType) + + + # Add the list of counties + countyTuple = self._getFilteredAreaList( + segmentAreas, mode="COUNTY", areaDictName=self._areaDictionary) + fcst = fcst + self._makeTextFromCountyTuple(countyTuple, + mainFormatSingular = county1, mainFormatPlural = county2, + mode=self._statePartMode) + + # Add the lists of independent cities + countyTuple = self._getFilteredAreaList( + segmentAreas, mode="CITY", areaDictName=self._areaDictionary) + fcst = fcst + self._makeTextFromCountyTuple(countyTuple, + mainFormatSingular = indepCity1, mainFormatPlural = indepCity2, + mode=self._statePartMode) + + # Add the lists of marine zones + countyTuple = self._getFilteredAreaList( + segmentAreas, mode="ZONE", areaDictName=self._areaDictionary) + fcst = fcst + self._makeTextFromMarineTuple(countyTuple, + mainFormat = marine) + + # Add the lists of cities + fcst = fcst + "\n\n" + self.getCityList( + segmentAreas, areaDictName = self._areaDictionary, addPeriod=True, + forceAlphaSort=True) + + # + # Line Wrap + # + + fcst = self.endline(fcst, linelength=self._lineLength, breakStr=[" ", "...", "-"]) + + # + # Finished + # + + return fcst + + def _postProcessArea(self, fcst, segmentAreas, argDict): + fcst = fcst + "$$\n\n" + return fcst + + + def _countFilteredAreaList(self, countyTuples, index): + #Returns a dictionary. dictionary is based on the 'index' element + # of the tuple (key) and is a count of the number of those + # records found. + dict = {} + for values in countyTuples: + key = values[index] + count = dict.get(key, 0) + count = count + 1 + dict[key] = count + + return dict + + + def _getFilteredAreaList(self, areaList, areaDictName="AreaDictionary", + mode="COUNTY"): + #returns list of sorted tuples: + # [(state, partOfState, partOfState State, zonename)] + #mode='COUNTY','ZONE','CITY' + + # Access the UGC information for the area(s) if available + areaDict = ModuleAccessor.ModuleAccessor().variable(areaDictName, + "AreaDictionary") + if areaDict is None: + return [] + + # sort by zone name + if mode == "ZONE": + areaList.sort() + + # Make a list of (state, partOfStateAndState, county) tuples + countyList = [] + for areaName in areaList: + if areaName in areaDict: + entry = areaDict[areaName] + else: + entry = {} + LogStream.logProblem(\ + "AreaDictionary missing definition for [" + areaName + "].") + + if mode == "COUNTY": + if len(areaName) == 6 and areaName[2] != "C": #not ssCnnn + continue #not a county fips + if "independentCity" in entry and \ + entry["independentCity"] == 1: + continue #independent city, when in county mode + elif mode == "CITY": + if len(areaName) == 6 and areaName[2] != "C": #not ssCnnn + continue #not a county/city fips + if "independentCity" not in entry or \ + entry["independentCity"] == 0: + continue #not independent city, when in city mode + elif mode == "ZONE": + if len(areaName) == 6 and areaName[2] != "Z": #not ssZnnn + continue #not a zone code + else: + raise Exception("Illegal mode specified " + mode) + + if "ugcName" in entry and len(entry['ugcName']): + # Get fullStateName + state = areaName[0:2] + if "fullStateName" in entry and \ + len(entry['fullStateName']): + state = entry["fullStateName"] + else: + state = "" + LogStream.logProblem("AreaDictionary does not contain " +\ + 'fullStateName definition for ', areaName) + + + # Get part-of-state information with state (not for Zones) + if mode == "ZONE": #marine + partOfState = "" + else: + if "partOfState" in entry and \ + len(entry['partOfState']): + partOfState = entry["partOfState"] + ' ' + state + else: + partOfState = " " + state + LogStream.logProblem(\ + "AreaDictionary does not contain " +\ + 'partOfState definition for ', areaName) + + # Get county name + county = entry["ugcName"] + + # Eliminate the name County and others, if in the name + if mode == "COUNTY": + val = ['County','Counties','Parish','Parishes'] + for v in val: + county = county.replace(" " + v, "") + countyList.append((state, partOfState, county)) + + #missing ugcName + else: + countyList.append(("", "", areaName)) + LogStream.logProblem("AreaDictionary does not contain " +\ + 'ugcName definition for ', areaName) + + + # Sort by state, part of state, then county + if mode != "ZONE": + countyList.sort() #state, partOfState, county + return countyList + + def _makeTextFromMarineTuple(self, countyTuple, lineLength=66, colWidth=22, + mainFormat="This watch includes the following adjacent coastal waters"): + + #countyTuple: (state, partOfStateAndState, name) + #extract out the marine zones + mzones = [] + for state, partOfState, name in countyTuple: + mzones.append(name) + + if len(mzones) == 0: + return "" + + return mainFormat + "\n\n" + \ + self.formatCountyColumns(mzones, colWidth, lineLength) + '\n\n' + + + def _makeTextFromCountyTuple(self, countyTuple, lineLength=66, colWidth=22, + mainFormatSingular="In {area} this watch includes {number} {placeType}", + mainFormatPlural="In {area} this watch includes {number} {placeTypes}", + subFormat="In {area}", mode="byState"): + + #countyTuple: (state, partOfStateAndState, name) + #The type of text depends upon the mode: "byState" or "byPart" + # "byState" formatting: + # mainFormatSingular/mainFormatPlural (for each state) + # subFormat (for each partOfState) + # column formatting of names + # + # "byPart" formatting: + # (subFormat not used): + # mainFormatSingular/mainFormatPlural (for each partOfState State) + # column formatting of names + + # Format + if mode == "byState": + return self._byStateTextFromCountyTuple(countyTuple, lineLength, + colWidth, mainFormatSingular, mainFormatPlural, subFormat) + elif mode == "byPart": + return self._byPartTextFromCountyTuple(countyTuple, lineLength, + colWidth, mainFormatSingular, mainFormatPlural) + else: + raise Exception("Illegal mode in makeTextFromCountyTuple(): " +\ + repr(mode)) + + def _byStateTextFromCountyTuple(self, countyTuple, lineLength, + colWidth, mainFormatSingular, mainFormatPlural, subFormat): + + #Determine counts for each area + counts = self._countFilteredAreaList(countyTuple, 0) + + # Convert countyTuple into format that follows the text format + # byState: [(state, [(partOfStateAndState, [names])]] + geoList = [] + geoPList = [] + names = [] + curState = None + curPart = None + for state, partState, name in countyTuple: + if curState == state: + if curPart == partState: + names.append(name) + else: + if len(names): + geoPList.append((curPart, names)) + names = [name] + curPart = partState + else: + if len(names): + geoPList.append((curPart, names)) + if len(geoPList): + geoList.append((curState, geoPList)) + geoPList = [] + names = [name] + curPart = partState + curState = state + + if len(names): + geoPList.append((curPart, names)) + geoList.append((curState, geoPList)) + + # Now Format the text + result = '' + for state, partStateNames in geoList: + + #special District of Columbia, no parts of state descriptors + if state == "District of Columbia": + result = result + "The District of Columbia\n\n" + continue + + ccount = counts.get(state, 0) + if ccount > 1: + header = mainFormatPlural + else: + header = mainFormatSingular + header = string.replace(header, '{area}', state) + header = string.replace(header, '{number}', str(ccount)) + if state == "Louisiana": + header = string.replace(header, '{placeType}', "parish") + header = string.replace(header, '{placeTypes}', "parishes") + else: + header = string.replace(header, '{placeType}', "county") + header = string.replace(header, '{placeTypes}', "counties") + + + result = result + header + '\n\n' + + for partState, names in partStateNames: + subheader = subFormat + subheader = string.replace(subheader, '{area}', partState) + result = result + subheader + '\n\n' + + counties = self.formatCountyColumns(names, colWidth, + lineLength) + result = result + counties + '\n\n' + + return result + + + def _byPartTextFromCountyTuple(self, countyTuple, lineLength, + colWidth, mainFormatSingular, mainFormatPlural): + + #Determine counts for each area + counts = self._countFilteredAreaList(countyTuple, 1) + + # Convert countyTuple into format that follows the text format + # byPart: [(partOfStateAndState, [names])] + geoList = [] + names = [] + curSection = None #partState + for state, partState, name in countyTuple: + if partState == curSection: + names.append(name) + else: + if len(names): + geoList.append((curSection, names)) + names = [name] + curSection = partState + if len(names): + geoList.append((curSection, names)) + + # Now Format the text + result = '' + for partState, names in geoList: + + #special District of Columbia + if partState.find("District of Columbia") != -1: + result = result + "The District of Columbia\n\n" + continue + + ccount = counts.get(partState, 0) + if ccount > 1: + header = mainFormatPlural + else: + header = mainFormatSingular + header = string.replace(header, '{area}', partState) + header = string.replace(header, '{number}', str(ccount)) + if partState.find("Louisiana") != -1: + header = string.replace(header, '{placeType}', "parish") + header = string.replace(header, '{placeTypes}', "parishes") + else: + header = string.replace(header, '{placeType}', "county") + header = string.replace(header, '{placeTypes}', "counties") + + counties = self.formatCountyColumns(names, colWidth, lineLength) + + result = result + header + '\n\n' + counties + '\n\n' + + return result + + + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('TO.A', allActions, 'Convective'), + ('SV.A', allActions, 'Convective') + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/MVF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/MVF.py index 9d97cb6c83..2e1261e770 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/MVF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/MVF.py @@ -1,409 +1,409 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: The produces a Marine Verification Forecast. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# MVF.py, MVF___Definition, MVF__Overrides -#------------------------------------------------------------------------- -# Customization Points in Local File: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Marine Verification Forecast" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "MVFBOS" -# zoneCode ZONE code for product header, such as "NYZ001>025" -# stateName State name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Wind -# WaveHeight -#------------------------------------------------------------------------- -# Edit Areas Needed: area1, area2 -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# None -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up additional tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -####################################################################### - -import TextRules -import SampleAnalysis -import string, time, types -import math - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Forecaster Number", "forecasterNumber"), 99, "alphaNumeric"), - # (("Tropical Storm Winds", "tropicalStorm"), "no", "radio", ["NO", "YES"]), - ] - Definition = { - "type": "smart", - "displayName": "None", # for Product Generation Menu - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/MVF_.txt", - "debug": 0, - - # Label is Buoy/C-MAN identifier - "defaultEditAreas": [("area1", "45004"), - ("area2", "45005"), - ], - # product identifiers - "productName": "Marine Verification Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "zoneCode": "stZALL", # Zone Code, such as "GAZ025-056" - "stateName": "", # Name of state, such as "Georgia" - "wfoCityState": "", # Location of WFO - city state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Get variables from VariableList - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - try: - if self._tropicalStorm == "YES": - self._tropicalStorm = 1 - else: - self._tropicalStorm = 0 - except: - self._tropicalStorm = 0 - - try: - forecasterNumber = int(self._forecasterNumber) - if forecasterNumber < 10: - self._forecasterNumber = "0" + `forecasterNumber` - else: - self._forecasterNumber = `forecasterNumber` - except: - pass - - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - self._expirationTimeOffset = 12 #hours - return None - -## ## OLD ALGORITHM -## # Determine time ranges for product -## # Set up self._timeRangeList -## # Based on currentLocalTime time: -## # if prior to 12Z, use 18Z today and 6Z tomorrow -## # else use 6Z tomorrow and 18Z tomorrow -## currentLocalTime, shift = self.determineTimeShift() -## if currentLocalTime.hour() < 12: -## startHour = 18 -## label1 = "18" -## label2 = "06" -## else: -## startHour = 24+6 -## label1 = "06" -## label2 = "18" -## tr1 = self.createTimeRange(startHour, 12) -## tr2 = self.createTimeRange(startHour + 12, 12) -## self._timeRangeList = [(tr1, label1), (tr2,label2)] - - def _determineTimeRanges(self, argDict): - # Determine time ranges for product - # Set up self._timeRangeList - # based on current time. - - # Assume that: Verification is done for a 5 hour window - # centered around 06Z and 18Z. (Belk) - # So the time ranges are 04z up to 09Z and 16Z up to 21Z. - # 09Z and 21Z are not included. - - # If current time is prior to 12Z, - # use 18Z today and 6Z tomorrow - # else use 6Z tomorrow and 18Z tomorrow - gmTime = time.gmtime(argDict['creationTime']) - gmHour = gmTime[3] - if gmHour < 12: - startHour = 16 - label1 = "18" - label2 = "06" - else: - startHour = 24+4 - label1 = "06" - label2 = "18" - tr1 = self.createTimeRange(startHour, startHour + 5, mode="Zulu") - tr2 = self.createTimeRange(startHour + 12, startHour + 12 + 5, mode="Zulu") - self._timeRangeList = [(tr1, label1), (tr2,label2)] - - #print self._timeRangeList - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - # Determine expiration time - expireTime = argDict['creationTime'] + self._expirationTimeOffset*3600 - self._expireTime = time.strftime("%d%H%M",time.gmtime(expireTime)) - - return - - def _sampleData(self, argDict): - # Sample the data - self._sampler = self.getSampler(argDict, - (self._getAnalysisList(), self._timeRangeList, self._areaList)) - - def _preProcessProduct(self, fcst, argDict): - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - #self._issueTime = self.getCurrentTime(argDict) - #fcst = fcst + areaLabel + "\n" + self._issueTime + "\n\n" - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - statList = self.getStatList(self._sampler, self._getAnalysisList(),\ - self._timeRangeList, editArea) - - fcst = fcst + "%%F" - fcst = fcst + self._forecasterNumber - fcst = fcst + " " + areaLabel + " " - - index = 0 - for statDict in statList: - fcst = fcst + self._timeRangeList[index][1] + "/" - - # Warning Status - str = self._warningStatus(statDict, argDict) - fcst = fcst + str + "/" - - # Wind direction and speed : ddff - str = self._windDirSpeed(statDict, argDict) - fcst = fcst + str + "/" - - # Wave Height - str = self._sigWaveHeight(statDict, argDict) - fcst = fcst + str + "/" - - index += 1 - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - fcst = fcst + "\n" - return fcst - - def _postProcessProduct(self, fcst, argDict): - fcst = fcst + "\n\n$$" - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _getCurrentTimeString(self): - return time.strftime("%d%M%H", time.gmtime()) - - def _getAnalysisList(self): - return [ - ("Wind", self.vectorAvg), - ("WaveHeight", self.avg), - ] - - def _warningStatus(self, statDict, argDict): - # Return a warning status - wind = self.getStats(statDict, "Wind") - - # Need to use total or 'combined' seas - waves = self.getStats(statDict, "WaveHeight") # fixed - - if wind is None: - return "NO" - mag, dir = wind - - #non-tropical - if self._tropicalStorm == 0: - if mag < 25 and waves < 5: - return "NO" - elif mag < 34: # gales start at 34 kts - return "SC" - elif mag < 48: # storms start at 48 kts - return "GL" - elif mag < 63: - return "ST" - else: - return "HF" - #tropical - else: - if mag < 25 and waves < 5: - return "NO" - elif mag < 34: # gales start at 34 kts - return "SC" - elif mag < 63: # TS/HR winds - return "TS" - else: - return "HR" - - - def _windDirSpeed(self, statDict, argDict): - # Return a wind direction and speed - - wind = self.getStats(statDict, "Wind") - if wind is None: - return "9999" - mag, dir = wind - #print "wind mag:", mag, " dir:", dir - - # initial direction - if self._variableWinds(mag, dir) == 1: #variable winds - dirTens = 99 - else: - dir = self.round(dir, "Nearest", 10) - dirTens = int(dir/10.0) - if dirTens == 0: - dirTens = 36 #want 36 for N, not 0 - mag = int(self.round(mag, "Nearest", 1)) - - intvalue = dirTens*100 + mag%100 #output value as int - - # check to see if mag >= 100, a special case - # add 50 to the direction value - while mag >= 100: - mag = mag - 100 - intvalue = intvalue + 5000 - - #format as string - return "%04i" % intvalue - - def _variableWinds(self, mag, dir): - # This method can be easily overridden with - # criteria for variable winds - if mag < 5: - return 1 - else: - return 0 - - def _sigWaveHeight(self, statDict, argDict): - # Return a significant WaveHeight - wave = self.getStats(statDict, "WaveHeight") - if wave is None: - return "99" - if wave < 10: - return "0" + `int(wave)` - else: - return `int(wave)` +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: The produces a Marine Verification Forecast. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# MVF.py, MVF___Definition, MVF__Overrides +#------------------------------------------------------------------------- +# Customization Points in Local File: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Marine Verification Forecast" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "MVFBOS" +# zoneCode ZONE code for product header, such as "NYZ001>025" +# stateName State name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Wind +# WaveHeight +#------------------------------------------------------------------------- +# Edit Areas Needed: area1, area2 +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# None +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up additional tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +####################################################################### + +import TextRules +import SampleAnalysis +import string, time, types +import math + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Forecaster Number", "forecasterNumber"), 99, "alphaNumeric"), + # (("Tropical Storm Winds", "tropicalStorm"), "no", "radio", ["NO", "YES"]), + ] + Definition = { + "type": "smart", + "displayName": "None", # for Product Generation Menu + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/MVF_.txt", + "debug": 0, + + # Label is Buoy/C-MAN identifier + "defaultEditAreas": [("area1", "45004"), + ("area2", "45005"), + ], + # product identifiers + "productName": "Marine Verification Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "zoneCode": "stZALL", # Zone Code, such as "GAZ025-056" + "stateName": "", # Name of state, such as "Georgia" + "wfoCityState": "", # Location of WFO - city state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Get variables from VariableList + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + try: + if self._tropicalStorm == "YES": + self._tropicalStorm = 1 + else: + self._tropicalStorm = 0 + except: + self._tropicalStorm = 0 + + try: + forecasterNumber = int(self._forecasterNumber) + if forecasterNumber < 10: + self._forecasterNumber = "0" + repr(forecasterNumber) + else: + self._forecasterNumber = repr(forecasterNumber) + except: + pass + + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + self._expirationTimeOffset = 12 #hours + return None + +## ## OLD ALGORITHM +## # Determine time ranges for product +## # Set up self._timeRangeList +## # Based on currentLocalTime time: +## # if prior to 12Z, use 18Z today and 6Z tomorrow +## # else use 6Z tomorrow and 18Z tomorrow +## currentLocalTime, shift = self.determineTimeShift() +## if currentLocalTime.hour() < 12: +## startHour = 18 +## label1 = "18" +## label2 = "06" +## else: +## startHour = 24+6 +## label1 = "06" +## label2 = "18" +## tr1 = self.createTimeRange(startHour, 12) +## tr2 = self.createTimeRange(startHour + 12, 12) +## self._timeRangeList = [(tr1, label1), (tr2,label2)] + + def _determineTimeRanges(self, argDict): + # Determine time ranges for product + # Set up self._timeRangeList + # based on current time. + + # Assume that: Verification is done for a 5 hour window + # centered around 06Z and 18Z. (Belk) + # So the time ranges are 04z up to 09Z and 16Z up to 21Z. + # 09Z and 21Z are not included. + + # If current time is prior to 12Z, + # use 18Z today and 6Z tomorrow + # else use 6Z tomorrow and 18Z tomorrow + gmTime = time.gmtime(argDict['creationTime']) + gmHour = gmTime[3] + if gmHour < 12: + startHour = 16 + label1 = "18" + label2 = "06" + else: + startHour = 24+4 + label1 = "06" + label2 = "18" + tr1 = self.createTimeRange(startHour, startHour + 5, mode="Zulu") + tr2 = self.createTimeRange(startHour + 12, startHour + 12 + 5, mode="Zulu") + self._timeRangeList = [(tr1, label1), (tr2,label2)] + + #print self._timeRangeList + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + # Determine expiration time + expireTime = argDict['creationTime'] + self._expirationTimeOffset*3600 + self._expireTime = time.strftime("%d%H%M",time.gmtime(expireTime)) + + return + + def _sampleData(self, argDict): + # Sample the data + self._sampler = self.getSampler(argDict, + (self._getAnalysisList(), self._timeRangeList, self._areaList)) + + def _preProcessProduct(self, fcst, argDict): + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + #self._issueTime = self.getCurrentTime(argDict) + #fcst = fcst + areaLabel + "\n" + self._issueTime + "\n\n" + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + statList = self.getStatList(self._sampler, self._getAnalysisList(),\ + self._timeRangeList, editArea) + + fcst = fcst + "%%F" + fcst = fcst + self._forecasterNumber + fcst = fcst + " " + areaLabel + " " + + index = 0 + for statDict in statList: + fcst = fcst + self._timeRangeList[index][1] + "/" + + # Warning Status + str = self._warningStatus(statDict, argDict) + fcst = fcst + str + "/" + + # Wind direction and speed : ddff + str = self._windDirSpeed(statDict, argDict) + fcst = fcst + str + "/" + + # Wave Height + str = self._sigWaveHeight(statDict, argDict) + fcst = fcst + str + "/" + + index += 1 + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + fcst = fcst + "\n" + return fcst + + def _postProcessProduct(self, fcst, argDict): + fcst = fcst + "\n\n$$" + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _getCurrentTimeString(self): + return time.strftime("%d%M%H", time.gmtime()) + + def _getAnalysisList(self): + return [ + ("Wind", self.vectorAvg), + ("WaveHeight", self.avg), + ] + + def _warningStatus(self, statDict, argDict): + # Return a warning status + wind = self.getStats(statDict, "Wind") + + # Need to use total or 'combined' seas + waves = self.getStats(statDict, "WaveHeight") # fixed + + if wind is None: + return "NO" + mag, dir = wind + + #non-tropical + if self._tropicalStorm == 0: + if mag < 25 and waves < 5: + return "NO" + elif mag < 34: # gales start at 34 kts + return "SC" + elif mag < 48: # storms start at 48 kts + return "GL" + elif mag < 63: + return "ST" + else: + return "HF" + #tropical + else: + if mag < 25 and waves < 5: + return "NO" + elif mag < 34: # gales start at 34 kts + return "SC" + elif mag < 63: # TS/HR winds + return "TS" + else: + return "HR" + + + def _windDirSpeed(self, statDict, argDict): + # Return a wind direction and speed + + wind = self.getStats(statDict, "Wind") + if wind is None: + return "9999" + mag, dir = wind + #print "wind mag:", mag, " dir:", dir + + # initial direction + if self._variableWinds(mag, dir) == 1: #variable winds + dirTens = 99 + else: + dir = self.round(dir, "Nearest", 10) + dirTens = int(dir/10.0) + if dirTens == 0: + dirTens = 36 #want 36 for N, not 0 + mag = int(self.round(mag, "Nearest", 1)) + + intvalue = dirTens*100 + mag%100 #output value as int + + # check to see if mag >= 100, a special case + # add 50 to the direction value + while mag >= 100: + mag = mag - 100 + intvalue = intvalue + 5000 + + #format as string + return "%04i" % intvalue + + def _variableWinds(self, mag, dir): + # This method can be easily overridden with + # criteria for variable winds + if mag < 5: + return 1 + else: + return 0 + + def _sigWaveHeight(self, statDict, argDict): + # Return a significant WaveHeight + wave = self.getStats(statDict, "WaveHeight") + if wave is None: + return "99" + if wave < 10: + return "0" + repr(int(wave)) + else: + return repr(int(wave)) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/NSH.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/NSH.py index 2520c5d7a9..4673792263 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/NSH.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/NSH.py @@ -1,797 +1,797 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product creates a Near Shore Marine product. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# NSH, NSH___Definition, NSH__Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# REQUIRED OVERRIDE: -# _lakeStmt -- override with correct lake name(s) for your site -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the AWIPS product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, components an attempt will be made to combine components -# or time periods into one. Otherwise no period combining will -# will be done. -# useAbbreviations -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# areaDictionary Modify the AreaDictionary utility with UGC information about zones -# useHolidays Set to 1 to use holidays in the time period labels -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -#------------------------------------------------------------------------- -# Weather Elements Needed: -# To 2 days: -# Wind (every 3 hours) -# WaveHeight and/or WindWaveHgt (every 6 hours) -# Wx (every 6 hours) -# Sky (every 6 hours) -# Optional: -# WindGust (every 3 hours) -#------------------------------------------------------------------------- -# Edit Areas Needed: -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# Hazards (optional): If Discrete grid provided, headlines will be generated. -# NSHPeriod -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from CWF: -# _Text1 -# _Text2 -# _Text3 -# _issuance_list -# from ConfigVariables: -# maximum_range_nlValue_dict -# minimum_range_nlValue_dict -# phrase_descriptor_dict -# scalar_difference_nlValue_dict -# -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, re, types - -### adding import of os for the MWW turnkey code at the end of the file -import os - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/NSH_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Marine_Zones_", - - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_NSH__", - "editAreaSuffix": None, - - # product identifiers - "productName": "Nearshore Marine Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" - "wfoCityState": "", # Location of WFO - city state - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "periodCombining" : 0, # If 1, combine periods, if possible - - - "lineLength": 66, # product line length - "useAbbreviations": 0, # Use marine abbreviations - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - - # Weather-related flags - "hoursSChcEnds": 24, - - # Language - "language": "english", - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - self._outlookflag = 0 - - # TO BE OVERRIDDEN IN LOCAL FILE - def _Text1(self): - return "" - - def _Text2(self): - return "" - - def _Text3(self): - return "" - - def _lakeStmt(self, argDict): - return "For waters within five nautical miles of shore on Lake (name)" - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - ### THRESHOLDS AND VARIABLES - ### Analysis Class - ### To override, override the associated method in your text product class. - def temporalCoverage_threshold(self, parmHisto, timeRange, componentName): - # Replaces IN_RANGE_THRESHOLD -- Note that this threshold is now used - # differently i.e. it is the percentage of the TIMERANGE covered by the - # grid in order to include it in the analysis - # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) - # Used by temporalCoverage_flag - return 5.0 - - def temporalCoverage_dict(self, parmHisto, timeRange, componentName): - # Replaces IN_RANGE_DICT -- Note that this these thresholds are now used - return { - "LAL": 0, - "MinRH": 0, - "MaxRH": 0, - "MinT": 50, - "MaxT": 10, - "Haines": 0, - "Wx": 15, - "PoP" : 50, - } - - # Uncomment any combinations you wish to collapse. - # For example, if the first entry is uncommented, - # the phrase: scattered rain showers and widespread rain - # will collapse to: scattered rain showers. - def wxCombinations(self): - return [ - ("RW", "R"), - ("SW", "S"), - ## ("T","RW"), - ] - - def vector_dir_difference_dict(self, tree, node): - # Direction difference. If the difference between directions - # for 2 sub-periods is greater than this value, - # the different directions will be noted in the phrase. - # Units are degrees - return { - "Wind": 40, # degrees - "TransWind": 60, # mph - "FreeWind": 60, # mph - "Swell":60, # degrees - "Swell2":60, # degrees - } - - def phrase_descriptor_dict(self, tree, node): - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "wind" - dict["around"] = "" - return dict - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and not reported. - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["Wind"] = 5 - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["Wind"] = "variable winds 10 knots or less" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["Wind"] = "variable 10 knots or less" - dict["Wx"] = "" - return dict - - def marine_wind_flag(self, tree, node): - # If 1, Wind combining and wording will reflect the - # crossing of significant thresholds such as gales - return 1 - - def marine_wind_combining_flag(self, tree, node): - # If 1, Wind combining will reflect the - # crossing of significant thresholds such as gales. - # E.g. "Hurricane force winds to 100 knots." instead of - # "north hurricane force winds to 100 knots easing to - # hurricane force winds to 80 knots in the afternoon." - return 1 - - def postProcessPhrases(self, tree, node): - words = node.get("words") - if words is not None: - words = string.replace(words, "thunderstorms and rain showers", - "showers and thunderstorms") - words = string.replace(words, "snow showers and rain showers", "rain and snow showers") - words = string.replace(words, "rain showers and snow showers", "rain and snow showers") - #print "words = ", words - words = string.replace(words, "light rain showers", "rain showers") - words = string.replace(words, "rain showers", "showers") - #print "words 2= ", words - words = string.replace(words, "winds hurricane", "hurricane") - words = string.replace(words, "winds gales", "gales") - words = string.replace(words, "winds storm", "storm") - words = string.replace(words, "to to", "to") - words = string.replace(words, "winds 10 knots", "winds around 10 knots") - words = string.replace(words, "winds 5 knots", "winds around 5 knots") - words = string.replace(words, "and chance of", "and a chance of") - return self.setWords(node, words) - - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self.marineRounding, - } - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than or equal to this value, - # the different values will be noted in the phrase. - dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) - dict["WaveHeight"] = { - (0, 6) : 1, - (6, 20) : 5, - 'default': 10, - } - return dict - - def minimum_range_nlValue_dict(self, tree, node): - # This threshold is the "smallest" min/max difference allowed between values reported. - # For example, if threshold is set to 5 for "MaxT", and the min value is 45 - # and the max value is 46, the range will be adjusted to at least a 5 degree - # range e.g. 43-48. These are the values that are then submitted for phrasing - # such as: - # HIGHS IN THE MID 40S - dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) - dict["Wind"] = { - (0,5) : 0, # will be reported as "null" - (5, 8) : 5, - "default" : 10, - } - return dict - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - return self.stdDevMaxAvg - #return self.maxMode - #return self.maximum - - def NSHFirstPeriod(self): - return { - "type": "phrase", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.postProcessPhrases, - self.wordWrap, - ], - "analysisList": [ - #("Wind", self.vectorMinMax, [3]), - ("Wind", self.vectorMinMax, [4]), - #("WindGust", self.maximum, [3]), - ("WindGust", self.maximum, [6]), - ("WaveHeight", self.minMax, [3]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("Sky", self.avg, [6]), - ("PoP", self._PoP_analysisMethod("NSHFirstPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WEATHER - self.weather_orSky_phrase, - self.visibility_phrase, - # WAVES - self.waveHeight_phrase, - # Optional: - #self.chop_phrase, - ], - "lineLength": 66, - "runTimeEditArea": "yes", - } - - - def NSHPeriod(self): - return { - "type": "phrase", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.postProcessPhrases, - self.wordWrap, - ], - "analysisList": [ - #("Wind", self.vectorMinMax, [3]), - ("Wind", self.vectorMinMax, [4]), - #("WindGust", self.maximum, [3]), - ("WindGust", self.maximum, [6]), - ("WaveHeight", self.minMax, [3]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("Sky", self.avg, [6]), - ("PoP", self._PoP_analysisMethod("NSHPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - ], - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WEATHER - self.weather_orSky_phrase, - self.visibility_phrase, - # WAVES - self.waveHeight_phrase, - # Optional: - #self.chop_phrase, - #outlook phrase - self._warnOutlook_phrase, - ], - "lineLength": 66, - "runTimeEditArea": "yes", - } - - def generateForecast(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - #part below is to eliminate time prompt added by Meade - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - localtime = time.localtime(argDict['creationTime']) - localHour = localtime[3] - self._setProductIssuance(localHour) - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._format = "Standard" - self._extended = "Without Extended" - self._language = argDict["language"] - - # Make argDict accessible - self.__argDict = argDict - - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - self._definition["priorPeriod"] = 24 - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - fcst = fcst + self._lakeStmt(argDict) + "\n\n" - fcst = fcst + self._Text1() - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - # Produce Headline product - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcstSegment = self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - - # Handle abbreviations - if self._useAbbreviations == 1: - fcstSegment = self.marine_abbreviateText(fcstSegment) - fcstSegment = re.sub(r'\n', r' ',fcstSegment) - fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) - fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) - fcst = fcst + fcstSegment - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - fcst = fcst + self._Text2() - fcst = fcst + self._Text3() - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _setProductIssuance(self, localHour): - if localHour >= 0 and localHour <= 8: - self._productIssuance = "430 AM" - elif localHour > 8 and localHour <= 13: - self._productIssuance = "Morning Update" - elif localHour > 13 and localHour <= 18: - self._productIssuance = "430 PM" - else: - self._productIssuance = "Evening Update" - - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - narrativeDefAM = [ - ("NSHFirstPeriod", "period1"), ("NSHPeriod", 12), ("NSHPeriod", 12), ("NSHPeriod", 12), -# ("NSHExtended", 24), ("NSHExtended", 24), ("NSHExtended", 24), - ] - narrativeDefPM = [ - ("NSHFirstPeriod", "period1"), ("NSHPeriod", 12), ("NSHPeriod", 12), ("NSHPeriod", 12), -# ("NSHExtended", 24), ("NSHExtended", 24), ("NSHExtended", 24), - ] - return [ - ("430 AM", self.DAY(), self.NIGHT(), 11, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), 17, - ".THIS AFTERNOON...", "early", "late", - 1, narrativeDefAM), - # End times are tomorrow: - ("430 PM", self.NIGHT(), 24 + self.DAY(), 23, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - ("Evening Update", "issuanceHour", 24 + self.DAY(), 24+5, - ".OVERNIGHT...", "toward daybreak", "early in the evening", - 1, narrativeDefPM), - ] - - - def lateDay_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, tree, node, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" - # is set to zero. - # NOTE: This applied only to periods that are exactly 24-hours in length. - # Periods longer than that will always be split into day and night labels - # (e.g. SUNDAY THROUGH MONDAY NIGHT) - compName = node.getComponentName() - if compName == "NSHExtended": - return 0 - else: - return 1 - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* *")] - - ######################################################################## - # OVERRIDING METHODS - ######################################################################## - - def _warnOutlook_phrase(self): - return { - "phraseMethods": [ - self._warnOutlook_words, # phrase.words - ], - } - def _warnOutlook_words(self, tree, phrase): - # will put an outlook phrase in the text - - windStats = tree.stats.get("Wind", phrase.getTimeRange(), mergeMethod="Max") - if windStats is None: - return self.setWords(phrase, "") - - max, dir = windStats - words = "" - if max >= 23 and (self._outlookflag != 1): - words = "a small craft advisory may be needed" - self._outlookflag = 1 - if max >= 34 and (self._outlookflag != 2): - words = "a gale warning may be needed" - self._outlookflag = 2 - if max >= 48 and (self._outlookflag != 3): - words = "a storm warning may be needed" - self._outlookflag = 3 - if max >= 64 and (self._outlookflag != 4): - self._outlookflag = 4 - words = "a hurricane force wind warning may be needed" - if max < 23: - words = "" - self._outlookflag = 0 - return self.setWords(phrase, words) - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - - - return [ - ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH - ('SR.A', marineActions, 'Marine'), # STORM WATCH - ('GL.A', marineActions, 'Marine'), # GALE WATCH - ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS - ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine'), # GALE WARNING - ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS - ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('RB.Y', allActions, 'Marine'), #ROUGH BAR - ('SI.Y', allActions, 'Marine'), #SMALL CRAFT ADVISORY - ('SC.Y', allActions, 'Marine'), # SMALL CRAFT ADVISORY - ('SW.Y', allActions, 'Marine'), # SMALL CRAFT ADVISORY - ('BW.Y', allActions, 'Marine'), # BRISK WIND ADVISORY - ('MF.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('MS.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('UP.Y', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY - ('MH.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING - ('MH.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ('LO.Y', allActions, 'LowWater'), # LOW WATER ADVISORY - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ] +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product creates a Near Shore Marine product. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# NSH, NSH___Definition, NSH__Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# REQUIRED OVERRIDE: +# _lakeStmt -- override with correct lake name(s) for your site +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the AWIPS product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, components an attempt will be made to combine components +# or time periods into one. Otherwise no period combining will +# will be done. +# useAbbreviations +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# areaDictionary Modify the AreaDictionary utility with UGC information about zones +# useHolidays Set to 1 to use holidays in the time period labels +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +#------------------------------------------------------------------------- +# Weather Elements Needed: +# To 2 days: +# Wind (every 3 hours) +# WaveHeight and/or WindWaveHgt (every 6 hours) +# Wx (every 6 hours) +# Sky (every 6 hours) +# Optional: +# WindGust (every 3 hours) +#------------------------------------------------------------------------- +# Edit Areas Needed: +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# Hazards (optional): If Discrete grid provided, headlines will be generated. +# NSHPeriod +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from CWF: +# _Text1 +# _Text2 +# _Text3 +# _issuance_list +# from ConfigVariables: +# maximum_range_nlValue_dict +# minimum_range_nlValue_dict +# phrase_descriptor_dict +# scalar_difference_nlValue_dict +# +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, re, types + +### adding import of os for the MWW turnkey code at the end of the file +import os + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/NSH_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Marine_Zones_", + + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_NSH__", + "editAreaSuffix": None, + + # product identifiers + "productName": "Nearshore Marine Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" + "wfoCityState": "", # Location of WFO - city state + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "periodCombining" : 0, # If 1, combine periods, if possible + + + "lineLength": 66, # product line length + "useAbbreviations": 0, # Use marine abbreviations + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + + # Weather-related flags + "hoursSChcEnds": 24, + + # Language + "language": "english", + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + self._outlookflag = 0 + + # TO BE OVERRIDDEN IN LOCAL FILE + def _Text1(self): + return "" + + def _Text2(self): + return "" + + def _Text3(self): + return "" + + def _lakeStmt(self, argDict): + return "For waters within five nautical miles of shore on Lake (name)" + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + ### THRESHOLDS AND VARIABLES + ### Analysis Class + ### To override, override the associated method in your text product class. + def temporalCoverage_threshold(self, parmHisto, timeRange, componentName): + # Replaces IN_RANGE_THRESHOLD -- Note that this threshold is now used + # differently i.e. it is the percentage of the TIMERANGE covered by the + # grid in order to include it in the analysis + # Percentage of temporal coverage default value (if not found in temporalCoverage_dict) + # Used by temporalCoverage_flag + return 5.0 + + def temporalCoverage_dict(self, parmHisto, timeRange, componentName): + # Replaces IN_RANGE_DICT -- Note that this these thresholds are now used + return { + "LAL": 0, + "MinRH": 0, + "MaxRH": 0, + "MinT": 50, + "MaxT": 10, + "Haines": 0, + "Wx": 15, + "PoP" : 50, + } + + # Uncomment any combinations you wish to collapse. + # For example, if the first entry is uncommented, + # the phrase: scattered rain showers and widespread rain + # will collapse to: scattered rain showers. + def wxCombinations(self): + return [ + ("RW", "R"), + ("SW", "S"), + ## ("T","RW"), + ] + + def vector_dir_difference_dict(self, tree, node): + # Direction difference. If the difference between directions + # for 2 sub-periods is greater than this value, + # the different directions will be noted in the phrase. + # Units are degrees + return { + "Wind": 40, # degrees + "TransWind": 60, # mph + "FreeWind": 60, # mph + "Swell":60, # degrees + "Swell2":60, # degrees + } + + def phrase_descriptor_dict(self, tree, node): + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "wind" + dict["around"] = "" + return dict + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and not reported. + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["Wind"] = 5 + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["Wind"] = "variable winds 10 knots or less" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["Wind"] = "variable 10 knots or less" + dict["Wx"] = "" + return dict + + def marine_wind_flag(self, tree, node): + # If 1, Wind combining and wording will reflect the + # crossing of significant thresholds such as gales + return 1 + + def marine_wind_combining_flag(self, tree, node): + # If 1, Wind combining will reflect the + # crossing of significant thresholds such as gales. + # E.g. "Hurricane force winds to 100 knots." instead of + # "north hurricane force winds to 100 knots easing to + # hurricane force winds to 80 knots in the afternoon." + return 1 + + def postProcessPhrases(self, tree, node): + words = node.get("words") + if words is not None: + words = string.replace(words, "thunderstorms and rain showers", + "showers and thunderstorms") + words = string.replace(words, "snow showers and rain showers", "rain and snow showers") + words = string.replace(words, "rain showers and snow showers", "rain and snow showers") + #print "words = ", words + words = string.replace(words, "light rain showers", "rain showers") + words = string.replace(words, "rain showers", "showers") + #print "words 2= ", words + words = string.replace(words, "winds hurricane", "hurricane") + words = string.replace(words, "winds gales", "gales") + words = string.replace(words, "winds storm", "storm") + words = string.replace(words, "to to", "to") + words = string.replace(words, "winds 10 knots", "winds around 10 knots") + words = string.replace(words, "winds 5 knots", "winds around 5 knots") + words = string.replace(words, "and chance of", "and a chance of") + return self.setWords(node, words) + + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self.marineRounding, + } + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than or equal to this value, + # the different values will be noted in the phrase. + dict = TextRules.TextRules.scalar_difference_nlValue_dict(self, tree, node) + dict["WaveHeight"] = { + (0, 6) : 1, + (6, 20) : 5, + 'default': 10, + } + return dict + + def minimum_range_nlValue_dict(self, tree, node): + # This threshold is the "smallest" min/max difference allowed between values reported. + # For example, if threshold is set to 5 for "MaxT", and the min value is 45 + # and the max value is 46, the range will be adjusted to at least a 5 degree + # range e.g. 43-48. These are the values that are then submitted for phrasing + # such as: + # HIGHS IN THE MID 40S + dict = TextRules.TextRules.minimum_range_nlValue_dict(self, tree, node) + dict["Wind"] = { + (0,5) : 0, # will be reported as "null" + (5, 8) : 5, + "default" : 10, + } + return dict + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + return self.stdDevMaxAvg + #return self.maxMode + #return self.maximum + + def NSHFirstPeriod(self): + return { + "type": "phrase", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.postProcessPhrases, + self.wordWrap, + ], + "analysisList": [ + #("Wind", self.vectorMinMax, [3]), + ("Wind", self.vectorMinMax, [4]), + #("WindGust", self.maximum, [3]), + ("WindGust", self.maximum, [6]), + ("WaveHeight", self.minMax, [3]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("Sky", self.avg, [6]), + ("PoP", self._PoP_analysisMethod("NSHFirstPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WEATHER + self.weather_orSky_phrase, + self.visibility_phrase, + # WAVES + self.waveHeight_phrase, + # Optional: + #self.chop_phrase, + ], + "lineLength": 66, + "runTimeEditArea": "yes", + } + + + def NSHPeriod(self): + return { + "type": "phrase", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.postProcessPhrases, + self.wordWrap, + ], + "analysisList": [ + #("Wind", self.vectorMinMax, [3]), + ("Wind", self.vectorMinMax, [4]), + #("WindGust", self.maximum, [3]), + ("WindGust", self.maximum, [6]), + ("WaveHeight", self.minMax, [3]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("Sky", self.avg, [6]), + ("PoP", self._PoP_analysisMethod("NSHPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + ], + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WEATHER + self.weather_orSky_phrase, + self.visibility_phrase, + # WAVES + self.waveHeight_phrase, + # Optional: + #self.chop_phrase, + #outlook phrase + self._warnOutlook_phrase, + ], + "lineLength": 66, + "runTimeEditArea": "yes", + } + + def generateForecast(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + #part below is to eliminate time prompt added by Meade + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + localtime = time.localtime(argDict['creationTime']) + localHour = localtime[3] + self._setProductIssuance(localHour) + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._format = "Standard" + self._extended = "Without Extended" + self._language = argDict["language"] + + # Make argDict accessible + self.__argDict = argDict + + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + self._definition["priorPeriod"] = 24 + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + fcst = fcst + self._lakeStmt(argDict) + "\n\n" + fcst = fcst + self._Text1() + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + # Produce Headline product + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcstSegment = self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + + # Handle abbreviations + if self._useAbbreviations == 1: + fcstSegment = self.marine_abbreviateText(fcstSegment) + fcstSegment = re.sub(r'\n', r' ',fcstSegment) + fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) + fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) + fcst = fcst + fcstSegment + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + fcst = fcst + self._Text2() + fcst = fcst + self._Text3() + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _setProductIssuance(self, localHour): + if localHour >= 0 and localHour <= 8: + self._productIssuance = "430 AM" + elif localHour > 8 and localHour <= 13: + self._productIssuance = "Morning Update" + elif localHour > 13 and localHour <= 18: + self._productIssuance = "430 PM" + else: + self._productIssuance = "Evening Update" + + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + narrativeDefAM = [ + ("NSHFirstPeriod", "period1"), ("NSHPeriod", 12), ("NSHPeriod", 12), ("NSHPeriod", 12), +# ("NSHExtended", 24), ("NSHExtended", 24), ("NSHExtended", 24), + ] + narrativeDefPM = [ + ("NSHFirstPeriod", "period1"), ("NSHPeriod", 12), ("NSHPeriod", 12), ("NSHPeriod", 12), +# ("NSHExtended", 24), ("NSHExtended", 24), ("NSHExtended", 24), + ] + return [ + ("430 AM", self.DAY(), self.NIGHT(), 11, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), 17, + ".THIS AFTERNOON...", "early", "late", + 1, narrativeDefAM), + # End times are tomorrow: + ("430 PM", self.NIGHT(), 24 + self.DAY(), 23, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + ("Evening Update", "issuanceHour", 24 + self.DAY(), 24+5, + ".OVERNIGHT...", "toward daybreak", "early in the evening", + 1, narrativeDefPM), + ] + + + def lateDay_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, tree, node, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" + # is set to zero. + # NOTE: This applied only to periods that are exactly 24-hours in length. + # Periods longer than that will always be split into day and night labels + # (e.g. SUNDAY THROUGH MONDAY NIGHT) + compName = node.getComponentName() + if compName == "NSHExtended": + return 0 + else: + return 1 + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* *")] + + ######################################################################## + # OVERRIDING METHODS + ######################################################################## + + def _warnOutlook_phrase(self): + return { + "phraseMethods": [ + self._warnOutlook_words, # phrase.words + ], + } + def _warnOutlook_words(self, tree, phrase): + # will put an outlook phrase in the text + + windStats = tree.stats.get("Wind", phrase.getTimeRange(), mergeMethod="Max") + if windStats is None: + return self.setWords(phrase, "") + + max, dir = windStats + words = "" + if max >= 23 and (self._outlookflag != 1): + words = "a small craft advisory may be needed" + self._outlookflag = 1 + if max >= 34 and (self._outlookflag != 2): + words = "a gale warning may be needed" + self._outlookflag = 2 + if max >= 48 and (self._outlookflag != 3): + words = "a storm warning may be needed" + self._outlookflag = 3 + if max >= 64 and (self._outlookflag != 4): + self._outlookflag = 4 + words = "a hurricane force wind warning may be needed" + if max < 23: + words = "" + self._outlookflag = 0 + return self.setWords(phrase, words) + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + + + return [ + ('HF.A', marineActions, 'Marine'), # HURRICANE FORCE WIND WATCH + ('SR.A', marineActions, 'Marine'), # STORM WATCH + ('GL.A', marineActions, 'Marine'), # GALE WATCH + ('SE.A', marineActions, 'Marine'), # HAZARDOUS SEAS + ('UP.A', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WATCH + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine'), # GALE WARNING + ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS + ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('RB.Y', allActions, 'Marine'), #ROUGH BAR + ('SI.Y', allActions, 'Marine'), #SMALL CRAFT ADVISORY + ('SC.Y', allActions, 'Marine'), # SMALL CRAFT ADVISORY + ('SW.Y', allActions, 'Marine'), # SMALL CRAFT ADVISORY + ('BW.Y', allActions, 'Marine'), # BRISK WIND ADVISORY + ('MF.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('MS.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('UP.Y', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY + ('MH.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING + ('MH.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ('LO.Y', allActions, 'LowWater'), # LOW WATER ADVISORY + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/OFF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/OFF.py index fb616d3c57..f836dde156 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/OFF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/OFF.py @@ -1,1093 +1,1093 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: OFF (Offshore Forecast) -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# OFF.py, OFF ___Definition, OFF__Override -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# defaultEditAreas defines edit areas, default is Combinations -# -# productName defines name of product e.g. "Coastal Waters Rorecast" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "OFFBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState City,state that the WFO is located in, such as "Buffalo, NY" -# -# synopsisUGC UGC code for Synopsis -# synopsisHeading Heading for Synopsis -# -# Optional Configuration Items -# -# editAreaSuffix default None. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# hazardSamplingThreshold Defines the percentage coverage or number of -# grid points in a zone that must contain the hazard -# in order for it to be considered. Tuple (percent, points) -# -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# includeEveningPeriod Include a 6 hour Evening period on the 3rd day -# useAbbreviations -# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, -# NW instead of NORTHWEST -# (See marine_abbreviateText in the TextRules module) -# -# Weather-related flags -# hoursSChcEnds - specifies hours past the beginning of the first -# first period of the product to stop including 'Slight -# Chance' or 'Isolated' weather types (ERH policy -# allows values of 1-5 * 12 hour periods) -# -# areaDictionary Modify the AreaDictionary utility with UGC -# information about zones -# -# useHolidays Set to 1 to use holidays in the time period labels -# -# Trouble-shooting items -# passLimit -- Limit on passes allowed through Narrative Tree -# trace -- Set to 1 to turn on trace through Narrative Tree -# -# OVERRIDES -# -# Required Overrides -# -# _Text1(), _Text2() Descriptive text for header -# -# NARRATIVE CUSTOMIZATION POINTS -# The phrases in this product can be customized in many ways by overriding -# infrastructure methods in the Local file. -# You will see common overrides in the Local file and you may change them -# in that there. -# For further customization, you can determine which phrases your product is -# using by examining the Component Product Definitions below. -# Then, you can look up the phrase in the Text Product User Guide which will -# describe the all the relevant override methods associated with the phrase. -# Refer to the Customization section of the Text Product User Guide -# for step-by-step information. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) -# WaveHeight and/or WindWaveHgt -# (every 6 hours to 3 days, then every 12 hours to 7 days) -# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) -# Optional: -# WindGust (every 3 hours to 7 days) -# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) -#------------------------------------------------------------------------- -# Edit Areas Needed: None -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: -# Combinations -#------------------------------------------------------------------------- -# Component Products: -# OFFPeriod (component) -# OFFPeriodMid (component) -# OFFExtended (component) -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# -# COMMON OVERRIDES -# from OFF: -# _Text1 -# _Text2 -# _issuance_list -# riverBarForecast_dict -# from MarinePhrases -# inlandWatersAreas -# inlandWatersWave_element -# seasWaveHeight_element -# seasWindWave_element -# waveHeight_wind_threshold -# marine_wind_flag -# marine_wind_combining_flag -# marine_wind_verbose_flag -# from ConfigVariables -# phrase_descriptor_dict -# phrase_connector_dict -# null_nlValue_dict -# first_null_phrase_dict -# null_phrase_dict -# maximum_range_nlValue_dict -# combine_singleValues_flag_dict -# from WxPhrases: -# embedded_visibility_flag -# visibility_wx_threshold -# significant_wx_visibility_subkeys -# wxCoverageDescriptors -# wxTypeDescriptors -# wxAttributeDescriptors -# wxIntensityDescriptors -# wxCombinations -# combine_T_RW -# from SampleAnalysis -# moderated_dict -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, re, types -import TimeRange - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [] - Definition = { - "type": "smart", - "displayName": "None", - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/OFF_.txt", - "debug": 0, - # Name of map background for creating Combinations - "mapNameForCombinations": "Marine_Zones_", - - "lineLength": 66, - ## Edit Areas: Create Combinations file with edit area combinations. - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_OFF__", - "editAreaSuffix": None, - # product identifiers - "productName": "Offshore Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city state - - "synopsisUGC": "", # UGC code for synopsis - "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - "hazardSamplingThreshold": (10, None), #(%cov, #points) - - "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time - - "periodCombining" : 0, # If 1, combine periods, if possible - # Product-specific variables: - # Set to one if you want a 6-hour evening period instead of - # 18-hour period without lows - "includeEveningPeriod": 1, - "useAbbreviations": 0, - - # Weather-related flags - "hoursSChcEnds": 24, - - # Area Dictionary -- Descriptive information about zones - "areaDictionary": "AreaDictionary", - "useHolidays": 0, # Set to 1 to use holidays in the time period labels - # Language - "language": "english", - - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - ######################################################################## - # OVERRIDING METHODS, THRESHOLDS AND VARIABLES - ######################################################################## - # MUST BE OVERRIDDEN IN LOCAL FILE - def _Text1(self): - return "" - - def _Text2(self): - synopsis = "" - - # Try to get Synopsis from previous CWF - #productID = "BOSCWFBOS" - #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") - # Clean up the previous synopsis - #synopsis = re.sub(r'\n', r' ', synopsis) - #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") - - # Convert absolute time pointer to a tuple of values like that - # returned by time.gmtime() - #expTuple = time.strptime('%s' % (self._expireTime), - # '%b %d %y %H:%M:%S GMT') - expTuple = self._expireTime.utctimetuple() - - # Format expiration time for inclusion in synopsis header - expTime = time.strftime('%d%H%M', expTuple) - - return "%s-%s-\n" % (self._synopsisUGC, expTime) + \ - self._timeLabel + "\n\n" + \ - self._synopsisHeading + "\n" + \ - synopsis + "\n$$\n\n" - - ######################################################################## - - # SampleAnalysis overrides - def moderated_dict(self, parmHisto, timeRange, componentName): - # This dictionary defines the low and high limit at which - # outliers will be removed when calculating moderated stats. - # By convention the first value listed is the percentage - # allowed for low values and second the percentage allowed - # for high values. - dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) - dict["Wind"] = (0, 20) - dict["WaveHeight"] = (5,5) - return dict - - def null_nlValue_dict(self, tree, node): - # Threshold below which values are considered "null" and not reported. - # Units depend on the element and product - dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) - dict["WaveHeight"] = 6 - dict["WindWaveHgt"] = 6 - dict["Wind"] = 15 - dict["WindGust"] = 120 - dict["Swell"] = 5 - dict["Visibility"] = 5 # in nautical miles. Report if less than this value. - return dict - - # ConfigVariables Overrides - def phrase_descriptor_dict(self, tree, node): - # Descriptors for phrases - dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) - dict["Wind"] = "winds" - dict["WaveHeight"] = "seas" - dict["seas"] = "seas" - dict["mixed swell"] = "mixed swell" - dict["waves"] = "seas" - dict["dominant period"] = "dominant period" - # Apply only if marine_wind_flag (see above) is set to 1: - dict["hurricane force winds to"] = "hurricane force winds to" - dict["storm force winds to"] = "storm force winds to" - dict["gales to"] = "gales to" - dict["up to"] = "up to" - dict["around"] = "" - return dict - - def first_null_phrase_dict(self, tree, node): - # Phrase to use if values THROUGHOUT the period or - # in the first period are Null (i.e. below threshold OR NoWx) - # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. - dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "seas 6 feet or less" - dict["WindWaveHgt"] = "seas 6 feet or less" - dict["Wind"] = "winds 15 knots or less" - dict["Swell"] = "" - return dict - - def null_phrase_dict(self, tree, node): - # Phrase to use for null values in subPhrases other than the first - # Can be an empty string - # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" - dict = TextRules.TextRules.null_phrase_dict(self, tree, node) - dict["WaveHeight"] = "6 feet or less" - dict["WindWaveHgt"] = "6 feet or less" - dict["Wind"] = "less than 15 knots" - dict["Wx"] = "" - dict["Swell"] = "light" - return dict - - def phrase_connector_dict(self, tree, node): - # Dictionary of connecting phrases for various - # weather element phrases - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node - dict = TextRules.TextRules.phrase_connector_dict(self, tree, node) - dict["rising to"] = { - "Wind": ", rising to ", - "Swell": ", building to ", - "Swell2": ", building to ", - "WaveHeight": ", building to ", - "WindWaveHgt": ", building to ", - } - - dict["easing to"] = { - "Wind": ", diminishing to ", - "Swell": ", subsiding to ", - "Swell2": ", subsiding to ", - "WaveHeight": ", subsiding to ", - "WindWaveHgt": ", subsiding to ", - } - dict["backing"] = { - "Wind": ", becoming ", - "Swell": ", becoming ", - "Swell2": ", becoming ", - "WaveHeight": ", becoming ", - "WindWaveHgt": ", becoming ", - } - - dict["veering"] = { - "Wind": ", becoming ", - "Swell": ", becoming ", - "Swell2": ", becoming ", - "WaveHeight": ", becoming ", - "WindWaveHgt": ", becoming ", - } - - dict["becoming"] = ", becoming " - dict["increasing to"] = { - "Wind": ", rising to ", - "Swell": ", building to ", - "Swell2": ", building to ", - "WaveHeight": ", building to ", - "WindWaveHgt": ", building to ", - } - dict["decreasing to"] = { - "Wind": ", diminishing to ", - "Swell": ", subsiding to ", - "Swell2": ", subsiding to ", - "WaveHeight": ", subsiding to ", - "WindWaveHgt": ", subsiding to ", - } - dict["shifting to the"] = ", shifting to the " - dict["becoming onshore"] = " becoming onshore " - dict["then"] = {"Wx": ". ", - "Vector": ", becoming ", - "Scalar": ", becoming ", - "otherwise": ", becoming ", - } - return dict - - def maximum_range_nlValue_dict(self, tree, node): - # Maximum range to be reported within a phrase - # e.g. 5 to 10 mph - # Units depend on the product - dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) - dict["Wind"] = 10 - dict["Swell"] = 5 - dict["Swell2"] = 5 - dict["WaveHeight"] = 4 - dict["WindWaveHgt"] = 2 - return dict - - def rounding_method_dict(self, tree, node): - # Special rounding methods - # - return { - "Wind": self.marineRounding, - } - - def vector_mag_difference_nlValue_dict(self, tree, node): - # Replaces WIND_THRESHOLD - # Magnitude difference. If the difference between magnitudes - # for sub-ranges is greater than or equal to this value, - # the different magnitudes will be noted in the phrase. - # Units can vary depending on the element and product - return { - "Wind": 10, - "Swell": 5, # ft - "Swell2": 5, # ft - } - - def vector_dir_difference_dict(self, tree, node): - # Replaces WIND_DIR_DIFFERENCE - # Direction difference. If the difference between directions - # for sub-ranges is greater than or equal to this value, - # the different directions will be noted in the phrase. - # Units are degrees - return { - "Wind": 90, # degrees - "Swell":60, # degrees - "Swell2":60, # degrees - } - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Visibility"] = "NM" - return dict - - def scalar_difference_nlValue_dict(self, tree, node): - # Scalar difference. If the difference between scalar values - # for 2 sub-periods is greater than or equal to this value, - # the different values will be noted in the phrase. - return { - "WindGust": 20, # knots or mph depending on product - "Period": 5, # seconds - "WaveHeight": self.waveht_scalar_value ,#0, # in feet - "WindWaveHgt": 5, # feet - } - - def waveht_scalar_value(self,tree,node,elementName,elementName1): - # calculating the scalar value for changes based on wave height - wave = tree.stats.get("WaveHeight", node.getTimeRange(), node.getAreaLabel(), - mergeMethod="Max") -# print wave, "Wave!" - if wave is None: - return 10 - if wave <= 6: - rtnval = 6 - else: - val = wave * .25 - rtnval = int(val+0.5) - - def periodCombining_elementList(self, tree, node): - # Weather Elements to determine whether to combine periods - #return ["Sky", "Wind", "Wx", "PoP", "MaxT", "MinT"] - # Marine - return ["WaveHeight", "Wind", "Wx"] - - # WxPhrases Overrides - def pop_wx_lower_threshold(self, tree, node): - # Always report weather - return 0 - - # MarinePhrases Overrides - def seasWaveHeight_element(self, tree, node): - # Weather element to use for reporting seas - # "combined seas 10 to 15 feet." - # IF above wind or swell thresholds - return "WaveHeight" - - def waveHeight_wind_threshold(self, tree, node): - # wind value above which waveHeight is reported vs. wind waves - # Unit is knots - return 0 - - def splitDay24HourLabel_flag(self, tree, node): - # Return 0 to have the TimeDescriptor module label 24 hour periods - # with simply the weekday name (e.g. Saturday) - # instead of including the day and night periods - # (e.g. Saturday and Saturday night) - # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" - # is set to zero. - # NOTE: This applied only to periods that are exactly 24-hours in length. - # Periods longer than that will always be split into day and night labels - # (e.g. SUNDAY THROUGH MONDAY NIGHT) - compName = node.getComponentName() - if compName == "OFFExtended": - return 0 - else: - return 1 - - def _skipAreas(self, argDict): - # These are edit areas that the formatter will skip - return [] - - def inlandWatersAreas(self, tree, node): - # List of edit area names that are inland or bay waters - # as opposed to "seas" - # The phrasing for these areas will be treated differently - # (see the waveRange_phrase) - # - # e.g. - # return ["TampaBayWaters"] - return ["area3"] - - ######################################################################## - # COMPONENT PRODUCT DEFINITIONS - ######################################################################## - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - #return self.maxMode - #return self.maximum - return self.stdDevMaxAvg - - def OFFPeriod(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - - "analysisList": [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [3]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [3]), - ("WaveHeight", self.moderatedMinMax, [6]), - ("WindWaveHgt", self.moderatedMinMax, [6]), - ("Swell", self.vectorModeratedMinMax, [6]), - ("Swell2", self.vectorModeratedMinMax, [6]), - ("Period", self.moderatedMinMax, [6]), - ("Period2", self.moderatedMinMax, [6]), - ("Wx", self.rankedWx, [6]), - ("T", self.minMax), - ("PoP", self._PoP_analysisMethod("OFFPeriod"), [6]), - ("PoP", self.binnedPercent, [6]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [3]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.moderatedMax, [3]), - #("WaveHeight", self.moderatedMax, [6]), - #("WindWaveHgt", self.moderatedMax, [6]), - #("Swell", self.vectorModeratedMax, [6]), - #("Swell2", self.vectorModeratedMax, [6]), - #("Period", self.moderatedMax, [6]), - #("Period2", self.moderatedMax, [6]), - #("Wx", self.rankedWx, [6]), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("OFFPeriod")), - #("PoP", self.binnedPercent, [6]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # Split time range in quarters for Wind and WindGust - #("Wind", self.vectorMinMax, [3]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.maximum, [3]), - #("WaveHeight", self.minMax, [6]), - #("WindWaveHgt", self.minMax, [6]), - # Split time range in half for Wx and Swell - #("Swell", self.vectorMinMax, [6]), - #("Swell2", self.vectorMinMax, [6]), - #("Period", self.avg, [6]), - #("Period2", self.avg, [6]), - #("Wx", self.rankedWx, [6]), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("OFFPeriod")), - #("PoP", self.binnedPercent, [6]), - ], - - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - self.wave_withPeriods_phrase, - # Alternative: - #self.wave_phrase, - # SWELLS AND PERIODS - self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - self.weather_phrase, - self.visibility_phrase, - ], - - } - - def OFFPeriodMid(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - - "analysisList": [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [12]), - ("Wind", self.vectorMinMax, [12]), - ("WindGust", self.moderatedMax, [12]), - ("WaveHeight", self.moderatedMinMax, [12]), - ("WindWaveHgt", self.moderatedMinMax, [12]), - ("Swell", self.vectorModeratedMinMax, [12]), - ("Swell2", self.vectorModeratedMinMax, [12]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [12]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.moderatedMax, [12]), - #("WaveHeight", self.moderatedMax, [12]), - #("WindWaveHgt", self.moderatedMax, [12]), - #("Swell", self.vectorModeratedMax, [12]), - #("Swell2", self.vectorModeratedMax, [12]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # Split time range in quarters for Wind and WindGust - #("Wind", self.vectorMinMax, [12]), - #("Wind", self.vectorMinMax, [12]), - #("WindGust", self.maximum, [3]), - #("WaveHeight", self.minMax, [12]), - #("WindWaveHgt", self.minMax, [12]), - # Split time range in half for Wx and Swell - #("Swell", self.vectorMinMax, [12]), - #("Swell2", self.vectorMinMax, [12]), - ], - - "phraseList":[ - # WINDS - self.marine_wind_withGusts_phrase, - # Alternative: - #self.marine_wind_phrase, - #self.gust_phrase, - # WAVES - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # SWELLS AND PERIODS - self.swell_phrase, - ], - } - - def combine_singleValues_flag_dict(self, tree, node): - # Dictionary of weather elements to combine using single values - # rather than ranges. If you are using single value statistics - # for a weather element, you will want to set this flag to 1. - # If there is no entry for an element, min/max combining will - # be done. - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node - dict = TextRules.TextRules.combine_singleValues_flag_dict(self, tree, node) - #dict["Wind"] = 1 - #dict["WindGust"] = 1 - #dict["Swell"] = 1 - #dict["Swell2"] = 1 - #dict["WindWaveHgt"] = 1 - #dict["WaveHeight"] = 1 - return dict - - def OFFExtended(self): - return { "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [ - # NOTE: Choose from the following analysis options. - # Do not remove the "vectorMinMax" analysis for - # "Wind". This is necessary to get an absolute max if - # the useWindsForGusts flag is on. - - # Use the following if you want moderated ranges - # (e.g. N WIND 10 to 20 KT) - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - ("Wind", self.vectorModeratedMinMax, [24]), - ("WindGust", self.moderatedMinMax, [24]), - ("WaveHeight", self.moderatedMinMax, [24]), - ("WindWaveHgt", self.moderatedMinMax, [24]), - #("Wx", self.rankedWx), - #("T", self.minMax), # needed for weather_phrase - #("PoP", self._PoP_analysisMethod("OFFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorModeratedMinMax, [12]), - #("Swell2", self.vectorModeratedMinMax, [12]), - - # Use the following if you want moderated - # single values (e.g. N WIND 20 KT). - # Set the moderating percentage in the "moderated_dict" - # dictionary module. - # NOTE: If you use these methods, include and uncomment - # the "combine_singleValues_flag_dict" in your Local file (see below) - #("Wind", self.vectorModeratedMax, [6]), - #("WindGust", self.moderatedMax, [12]), - #("WaveHeight", self.moderatedMax, [12]), - #("WindWaveHgt", self.moderatedMax, [12]), - #("Wx", self.rankedWx), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("OFFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorModeratedMax, [12]), - #("Swell2", self.vectorModeratedMax, [12]), - - # Use the following if you want absolute ranges. - # Set the maximum range values in the "maximum_range_nlValue_dict" - # dictionary module. - # dictionary module. - #("Wind", self.vectorMinMax, [6]), - #("WindGust", self.minMax, [12]), - #("WaveHeight", self.minMax, [12]), - #("WindWaveHgt", self.minMax, [12]), - #("Wx", self.rankedWx), - #("T", self.minMax), - #("PoP", self._PoP_analysisMethod("OFFExtended")), - #("PoP", self.binnedPercent), - #("Swell", self.vectorMinMax, [12]), - #("Swell2", self.vectorMinMax, [12]), - ], - "phraseList":[ - # WIND - self.marine_wind_phrase, - # WAVEHEIGHT - #self.wave_withPeriods_phrase, - # Alternative: - self.wave_phrase, - # SWELLS AND PERIODS - #self.swell_withPeriods_phrase, - # Alternative: - #self.swell_phrase, - #self.period_phrase, - # WEATHER - #self.weather_phrase, - #self.visibility_phrase, - ], - } - - def generateForecast(self, argDict): - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - skipAreas = self._skipAreas(argDict) - argDict["editArea"] = (editArea, areaLabel) - if self.currentAreaContains(argDict, skipAreas): - continue - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) - self._timeLabel = staticIssueTime + " " + self.getCurrentTime( - argDict, " %Z %a %b %e %Y", stripLeading=1) - # Re-calculate issueTime - self._issueTime = self.strToGMT(staticIssueTime) - expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) - self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict): - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - fcst = fcst + self._Text1() - try: - text2 = self._Text2(argDict["host"]) - except: - text2 = self._Text2() - fcst = fcst + text2 - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - print "Generating Forecast for", areaLabel - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas) - fcst = fcst + areaHeader - - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - argDict["language"] = self._language - # Generate Narrative Forecast for Edit Area - fcstSegment = self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - - # Handle abbreviations - if self._useAbbreviations == 1: - fcstSegment = self.marine_abbreviateText(fcstSegment) - fcstSegment = re.sub(r'\n', r' ',fcstSegment) - fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) - fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) - fcst = fcst + fcstSegment - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n\n" - - def _postProcessProduct(self, fcst, argDict): - #fcst = fcst + """NNNN """ - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - def _issuance_list(self, argDict): - # This method sets up configurable issuance times with associated - # narrative definitions. See the Text Product User Guide for documentation. - if self._definition["includeEveningPeriod"] == 1: - narrativeDefAM = [ - ("OFFPeriod", "period1"), - ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), - ("OFFPeriodMid", 12), - ("OFFExtended", 24), ("OFFExtended", 24) - ] - narrativeDefPM = [ - ("OFFPeriod", "period1"), - ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), - ("OFFPeriodMid", 12), - ("OFFExtended", 24), ("OFFExtended", 24) - ] - else: - narrativeDefAM = [ - ("OFFPeriod", "period1"), - ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 24), - ("OFFExtended", 24), ("OFFExtended", 24) - ] - narrativeDefPM = [ - ("OFFPeriod", "period1"), - ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 24), - ("OFFExtended", 24), ("OFFExtended", 24) - ] - - return [ - ("400 AM", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - #("1030 AM", "issuanceHour", self.NIGHT(), 16, - # ".TODAY...", "early in the morning", "late in the afternoon", - # 1, narrativeDefAM), - # End times are tomorrow: - ("400 PM", self.NIGHT(), 24 + self.DAY(), 24 + 4, - ".TONIGHT...", "late in the night", "early in the evening", - 1, narrativeDefPM), - #("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 4, - # ".TONIGHT...", "late in the night", "early in the evening", - # 1, narrativeDefPM), - ] - - - def lateDay_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - - def significant_wx_visibility_subkeys(self, tree, node): - # Weather values that constitute significant weather to - # be reported regardless of visibility. - # If your visibility_wx_threshold is None, you do not need - # to set up these subkeys since weather will always be - # reported. - # Set of tuples of weather key search tuples in the form: - # (cov type inten) - # Wildcards are permitted. - return [("* *")] - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - def allowedHazards(self): - - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", - "EXP"] - marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING - ('TY.W', tropicalActions, 'Tropical'), # TYPHOON WARNING - ('TR.W', tropicalActions, 'Tropical'), # TROPICAL STORM WARNING - ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('SR.W', marineActions, 'Marine'), # STORM WARNING - ('GL.W', marineActions, 'Marine'), # GALE WARNING - ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS - ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING - ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('UP.Y', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY - ('AF.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING - ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ] +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: OFF (Offshore Forecast) +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# OFF.py, OFF ___Definition, OFF__Override +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# defaultEditAreas defines edit areas, default is Combinations +# +# productName defines name of product e.g. "Coastal Waters Rorecast" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "OFFBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState City,state that the WFO is located in, such as "Buffalo, NY" +# +# synopsisUGC UGC code for Synopsis +# synopsisHeading Heading for Synopsis +# +# Optional Configuration Items +# +# editAreaSuffix default None. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# hazardSamplingThreshold Defines the percentage coverage or number of +# grid points in a zone that must contain the hazard +# in order for it to be considered. Tuple (percent, points) +# +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# includeEveningPeriod Include a 6 hour Evening period on the 3rd day +# useAbbreviations +# If 1, use marine abbreviations e.g. TSTM instead of THUNDERSTORM, +# NW instead of NORTHWEST +# (See marine_abbreviateText in the TextRules module) +# +# Weather-related flags +# hoursSChcEnds - specifies hours past the beginning of the first +# first period of the product to stop including 'Slight +# Chance' or 'Isolated' weather types (ERH policy +# allows values of 1-5 * 12 hour periods) +# +# areaDictionary Modify the AreaDictionary utility with UGC +# information about zones +# +# useHolidays Set to 1 to use holidays in the time period labels +# +# Trouble-shooting items +# passLimit -- Limit on passes allowed through Narrative Tree +# trace -- Set to 1 to turn on trace through Narrative Tree +# +# OVERRIDES +# +# Required Overrides +# +# _Text1(), _Text2() Descriptive text for header +# +# NARRATIVE CUSTOMIZATION POINTS +# The phrases in this product can be customized in many ways by overriding +# infrastructure methods in the Local file. +# You will see common overrides in the Local file and you may change them +# in that there. +# For further customization, you can determine which phrases your product is +# using by examining the Component Product Definitions below. +# Then, you can look up the phrase in the Text Product User Guide which will +# describe the all the relevant override methods associated with the phrase. +# Refer to the Customization section of the Text Product User Guide +# for step-by-step information. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Wind (every 3 hours to 3 days, then every 6 hours to 7 days) +# WaveHeight and/or WindWaveHgt +# (every 6 hours to 3 days, then every 12 hours to 7 days) +# Wx (every 6 hours to 3 days, then every 12 hours to 7 days) +# Optional: +# WindGust (every 3 hours to 7 days) +# Swell, Swell2, Period, Period2 (every 6 hours to 7 days) +#------------------------------------------------------------------------- +# Edit Areas Needed: None +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: +# Combinations +#------------------------------------------------------------------------- +# Component Products: +# OFFPeriod (component) +# OFFPeriodMid (component) +# OFFExtended (component) +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# +# COMMON OVERRIDES +# from OFF: +# _Text1 +# _Text2 +# _issuance_list +# riverBarForecast_dict +# from MarinePhrases +# inlandWatersAreas +# inlandWatersWave_element +# seasWaveHeight_element +# seasWindWave_element +# waveHeight_wind_threshold +# marine_wind_flag +# marine_wind_combining_flag +# marine_wind_verbose_flag +# from ConfigVariables +# phrase_descriptor_dict +# phrase_connector_dict +# null_nlValue_dict +# first_null_phrase_dict +# null_phrase_dict +# maximum_range_nlValue_dict +# combine_singleValues_flag_dict +# from WxPhrases: +# embedded_visibility_flag +# visibility_wx_threshold +# significant_wx_visibility_subkeys +# wxCoverageDescriptors +# wxTypeDescriptors +# wxAttributeDescriptors +# wxIntensityDescriptors +# wxCombinations +# combine_T_RW +# from SampleAnalysis +# moderated_dict +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, re, types +import TimeRange + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [] + Definition = { + "type": "smart", + "displayName": "None", + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/OFF_.txt", + "debug": 0, + # Name of map background for creating Combinations + "mapNameForCombinations": "Marine_Zones_", + + "lineLength": 66, + ## Edit Areas: Create Combinations file with edit area combinations. + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_OFF__", + "editAreaSuffix": None, + # product identifiers + "productName": "Offshore Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city state + + "synopsisUGC": "", # UGC code for synopsis + "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + "hazardSamplingThreshold": (10, None), #(%cov, #points) + + "fixedExpire": 1, #ensure VTEC actions don't affect segment expiration time + + "periodCombining" : 0, # If 1, combine periods, if possible + # Product-specific variables: + # Set to one if you want a 6-hour evening period instead of + # 18-hour period without lows + "includeEveningPeriod": 1, + "useAbbreviations": 0, + + # Weather-related flags + "hoursSChcEnds": 24, + + # Area Dictionary -- Descriptive information about zones + "areaDictionary": "AreaDictionary", + "useHolidays": 0, # Set to 1 to use holidays in the time period labels + # Language + "language": "english", + + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + ######################################################################## + # OVERRIDING METHODS, THRESHOLDS AND VARIABLES + ######################################################################## + # MUST BE OVERRIDDEN IN LOCAL FILE + def _Text1(self): + return "" + + def _Text2(self): + synopsis = "" + + # Try to get Synopsis from previous CWF + #productID = "BOSCWFBOS" + #synopsis = self.getPreviousProduct(productID, "SYNOPSIS") + # Clean up the previous synopsis + #synopsis = re.sub(r'\n', r' ', synopsis) + #synopsis = self.endline(synopsis, linelength=66, breakStr=" ") + + # Convert absolute time pointer to a tuple of values like that + # returned by time.gmtime() + #expTuple = time.strptime('%s' % (self._expireTime), + # '%b %d %y %H:%M:%S GMT') + expTuple = self._expireTime.utctimetuple() + + # Format expiration time for inclusion in synopsis header + expTime = time.strftime('%d%H%M', expTuple) + + return "%s-%s-\n" % (self._synopsisUGC, expTime) + \ + self._timeLabel + "\n\n" + \ + self._synopsisHeading + "\n" + \ + synopsis + "\n$$\n\n" + + ######################################################################## + + # SampleAnalysis overrides + def moderated_dict(self, parmHisto, timeRange, componentName): + # This dictionary defines the low and high limit at which + # outliers will be removed when calculating moderated stats. + # By convention the first value listed is the percentage + # allowed for low values and second the percentage allowed + # for high values. + dict = SampleAnalysis.SampleAnalysis.moderated_dict(self, parmHisto, timeRange, componentName) + dict["Wind"] = (0, 20) + dict["WaveHeight"] = (5,5) + return dict + + def null_nlValue_dict(self, tree, node): + # Threshold below which values are considered "null" and not reported. + # Units depend on the element and product + dict = TextRules.TextRules.null_nlValue_dict(self, tree, node) + dict["WaveHeight"] = 6 + dict["WindWaveHgt"] = 6 + dict["Wind"] = 15 + dict["WindGust"] = 120 + dict["Swell"] = 5 + dict["Visibility"] = 5 # in nautical miles. Report if less than this value. + return dict + + # ConfigVariables Overrides + def phrase_descriptor_dict(self, tree, node): + # Descriptors for phrases + dict = TextRules.TextRules.phrase_descriptor_dict(self, tree, node) + dict["Wind"] = "winds" + dict["WaveHeight"] = "seas" + dict["seas"] = "seas" + dict["mixed swell"] = "mixed swell" + dict["waves"] = "seas" + dict["dominant period"] = "dominant period" + # Apply only if marine_wind_flag (see above) is set to 1: + dict["hurricane force winds to"] = "hurricane force winds to" + dict["storm force winds to"] = "storm force winds to" + dict["gales to"] = "gales to" + dict["up to"] = "up to" + dict["around"] = "" + return dict + + def first_null_phrase_dict(self, tree, node): + # Phrase to use if values THROUGHOUT the period or + # in the first period are Null (i.e. below threshold OR NoWx) + # E.g. LIGHT WINDS. or LIGHT WINDS BECOMING N 5 MPH. + dict = TextRules.TextRules.first_null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "seas 6 feet or less" + dict["WindWaveHgt"] = "seas 6 feet or less" + dict["Wind"] = "winds 15 knots or less" + dict["Swell"] = "" + return dict + + def null_phrase_dict(self, tree, node): + # Phrase to use for null values in subPhrases other than the first + # Can be an empty string + # E.g. "NORTH WINDS 20 to 25 KNOTS BECOMING LIGHT" + dict = TextRules.TextRules.null_phrase_dict(self, tree, node) + dict["WaveHeight"] = "6 feet or less" + dict["WindWaveHgt"] = "6 feet or less" + dict["Wind"] = "less than 15 knots" + dict["Wx"] = "" + dict["Swell"] = "light" + return dict + + def phrase_connector_dict(self, tree, node): + # Dictionary of connecting phrases for various + # weather element phrases + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node + dict = TextRules.TextRules.phrase_connector_dict(self, tree, node) + dict["rising to"] = { + "Wind": ", rising to ", + "Swell": ", building to ", + "Swell2": ", building to ", + "WaveHeight": ", building to ", + "WindWaveHgt": ", building to ", + } + + dict["easing to"] = { + "Wind": ", diminishing to ", + "Swell": ", subsiding to ", + "Swell2": ", subsiding to ", + "WaveHeight": ", subsiding to ", + "WindWaveHgt": ", subsiding to ", + } + dict["backing"] = { + "Wind": ", becoming ", + "Swell": ", becoming ", + "Swell2": ", becoming ", + "WaveHeight": ", becoming ", + "WindWaveHgt": ", becoming ", + } + + dict["veering"] = { + "Wind": ", becoming ", + "Swell": ", becoming ", + "Swell2": ", becoming ", + "WaveHeight": ", becoming ", + "WindWaveHgt": ", becoming ", + } + + dict["becoming"] = ", becoming " + dict["increasing to"] = { + "Wind": ", rising to ", + "Swell": ", building to ", + "Swell2": ", building to ", + "WaveHeight": ", building to ", + "WindWaveHgt": ", building to ", + } + dict["decreasing to"] = { + "Wind": ", diminishing to ", + "Swell": ", subsiding to ", + "Swell2": ", subsiding to ", + "WaveHeight": ", subsiding to ", + "WindWaveHgt": ", subsiding to ", + } + dict["shifting to the"] = ", shifting to the " + dict["becoming onshore"] = " becoming onshore " + dict["then"] = {"Wx": ". ", + "Vector": ", becoming ", + "Scalar": ", becoming ", + "otherwise": ", becoming ", + } + return dict + + def maximum_range_nlValue_dict(self, tree, node): + # Maximum range to be reported within a phrase + # e.g. 5 to 10 mph + # Units depend on the product + dict = TextRules.TextRules.maximum_range_nlValue_dict(self, tree, node) + dict["Wind"] = 10 + dict["Swell"] = 5 + dict["Swell2"] = 5 + dict["WaveHeight"] = 4 + dict["WindWaveHgt"] = 2 + return dict + + def rounding_method_dict(self, tree, node): + # Special rounding methods + # + return { + "Wind": self.marineRounding, + } + + def vector_mag_difference_nlValue_dict(self, tree, node): + # Replaces WIND_THRESHOLD + # Magnitude difference. If the difference between magnitudes + # for sub-ranges is greater than or equal to this value, + # the different magnitudes will be noted in the phrase. + # Units can vary depending on the element and product + return { + "Wind": 10, + "Swell": 5, # ft + "Swell2": 5, # ft + } + + def vector_dir_difference_dict(self, tree, node): + # Replaces WIND_DIR_DIFFERENCE + # Direction difference. If the difference between directions + # for sub-ranges is greater than or equal to this value, + # the different directions will be noted in the phrase. + # Units are degrees + return { + "Wind": 90, # degrees + "Swell":60, # degrees + "Swell2":60, # degrees + } + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Visibility"] = "NM" + return dict + + def scalar_difference_nlValue_dict(self, tree, node): + # Scalar difference. If the difference between scalar values + # for 2 sub-periods is greater than or equal to this value, + # the different values will be noted in the phrase. + return { + "WindGust": 20, # knots or mph depending on product + "Period": 5, # seconds + "WaveHeight": self.waveht_scalar_value ,#0, # in feet + "WindWaveHgt": 5, # feet + } + + def waveht_scalar_value(self,tree,node,elementName,elementName1): + # calculating the scalar value for changes based on wave height + wave = tree.stats.get("WaveHeight", node.getTimeRange(), node.getAreaLabel(), + mergeMethod="Max") +# print wave, "Wave!" + if wave is None: + return 10 + if wave <= 6: + rtnval = 6 + else: + val = wave * .25 + rtnval = int(val+0.5) + + def periodCombining_elementList(self, tree, node): + # Weather Elements to determine whether to combine periods + #return ["Sky", "Wind", "Wx", "PoP", "MaxT", "MinT"] + # Marine + return ["WaveHeight", "Wind", "Wx"] + + # WxPhrases Overrides + def pop_wx_lower_threshold(self, tree, node): + # Always report weather + return 0 + + # MarinePhrases Overrides + def seasWaveHeight_element(self, tree, node): + # Weather element to use for reporting seas + # "combined seas 10 to 15 feet." + # IF above wind or swell thresholds + return "WaveHeight" + + def waveHeight_wind_threshold(self, tree, node): + # wind value above which waveHeight is reported vs. wind waves + # Unit is knots + return 0 + + def splitDay24HourLabel_flag(self, tree, node): + # Return 0 to have the TimeDescriptor module label 24 hour periods + # with simply the weekday name (e.g. Saturday) + # instead of including the day and night periods + # (e.g. Saturday and Saturday night) + # NOTE: If you set this flag to 1, make sure the "nextDay24HourLabel_flag" + # is set to zero. + # NOTE: This applied only to periods that are exactly 24-hours in length. + # Periods longer than that will always be split into day and night labels + # (e.g. SUNDAY THROUGH MONDAY NIGHT) + compName = node.getComponentName() + if compName == "OFFExtended": + return 0 + else: + return 1 + + def _skipAreas(self, argDict): + # These are edit areas that the formatter will skip + return [] + + def inlandWatersAreas(self, tree, node): + # List of edit area names that are inland or bay waters + # as opposed to "seas" + # The phrasing for these areas will be treated differently + # (see the waveRange_phrase) + # + # e.g. + # return ["TampaBayWaters"] + return ["area3"] + + ######################################################################## + # COMPONENT PRODUCT DEFINITIONS + ######################################################################## + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + #return self.maxMode + #return self.maximum + return self.stdDevMaxAvg + + def OFFPeriod(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + + "analysisList": [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [3]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [3]), + ("WaveHeight", self.moderatedMinMax, [6]), + ("WindWaveHgt", self.moderatedMinMax, [6]), + ("Swell", self.vectorModeratedMinMax, [6]), + ("Swell2", self.vectorModeratedMinMax, [6]), + ("Period", self.moderatedMinMax, [6]), + ("Period2", self.moderatedMinMax, [6]), + ("Wx", self.rankedWx, [6]), + ("T", self.minMax), + ("PoP", self._PoP_analysisMethod("OFFPeriod"), [6]), + ("PoP", self.binnedPercent, [6]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [3]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.moderatedMax, [3]), + #("WaveHeight", self.moderatedMax, [6]), + #("WindWaveHgt", self.moderatedMax, [6]), + #("Swell", self.vectorModeratedMax, [6]), + #("Swell2", self.vectorModeratedMax, [6]), + #("Period", self.moderatedMax, [6]), + #("Period2", self.moderatedMax, [6]), + #("Wx", self.rankedWx, [6]), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("OFFPeriod")), + #("PoP", self.binnedPercent, [6]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # Split time range in quarters for Wind and WindGust + #("Wind", self.vectorMinMax, [3]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.maximum, [3]), + #("WaveHeight", self.minMax, [6]), + #("WindWaveHgt", self.minMax, [6]), + # Split time range in half for Wx and Swell + #("Swell", self.vectorMinMax, [6]), + #("Swell2", self.vectorMinMax, [6]), + #("Period", self.avg, [6]), + #("Period2", self.avg, [6]), + #("Wx", self.rankedWx, [6]), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("OFFPeriod")), + #("PoP", self.binnedPercent, [6]), + ], + + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + self.wave_withPeriods_phrase, + # Alternative: + #self.wave_phrase, + # SWELLS AND PERIODS + self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + self.weather_phrase, + self.visibility_phrase, + ], + + } + + def OFFPeriodMid(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + + "analysisList": [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [12]), + ("Wind", self.vectorMinMax, [12]), + ("WindGust", self.moderatedMax, [12]), + ("WaveHeight", self.moderatedMinMax, [12]), + ("WindWaveHgt", self.moderatedMinMax, [12]), + ("Swell", self.vectorModeratedMinMax, [12]), + ("Swell2", self.vectorModeratedMinMax, [12]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [12]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.moderatedMax, [12]), + #("WaveHeight", self.moderatedMax, [12]), + #("WindWaveHgt", self.moderatedMax, [12]), + #("Swell", self.vectorModeratedMax, [12]), + #("Swell2", self.vectorModeratedMax, [12]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # Split time range in quarters for Wind and WindGust + #("Wind", self.vectorMinMax, [12]), + #("Wind", self.vectorMinMax, [12]), + #("WindGust", self.maximum, [3]), + #("WaveHeight", self.minMax, [12]), + #("WindWaveHgt", self.minMax, [12]), + # Split time range in half for Wx and Swell + #("Swell", self.vectorMinMax, [12]), + #("Swell2", self.vectorMinMax, [12]), + ], + + "phraseList":[ + # WINDS + self.marine_wind_withGusts_phrase, + # Alternative: + #self.marine_wind_phrase, + #self.gust_phrase, + # WAVES + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # SWELLS AND PERIODS + self.swell_phrase, + ], + } + + def combine_singleValues_flag_dict(self, tree, node): + # Dictionary of weather elements to combine using single values + # rather than ranges. If you are using single value statistics + # for a weather element, you will want to set this flag to 1. + # If there is no entry for an element, min/max combining will + # be done. + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node + dict = TextRules.TextRules.combine_singleValues_flag_dict(self, tree, node) + #dict["Wind"] = 1 + #dict["WindGust"] = 1 + #dict["Swell"] = 1 + #dict["Swell2"] = 1 + #dict["WindWaveHgt"] = 1 + #dict["WaveHeight"] = 1 + return dict + + def OFFExtended(self): + return { "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [ + # NOTE: Choose from the following analysis options. + # Do not remove the "vectorMinMax" analysis for + # "Wind". This is necessary to get an absolute max if + # the useWindsForGusts flag is on. + + # Use the following if you want moderated ranges + # (e.g. N WIND 10 to 20 KT) + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + ("Wind", self.vectorModeratedMinMax, [24]), + ("WindGust", self.moderatedMinMax, [24]), + ("WaveHeight", self.moderatedMinMax, [24]), + ("WindWaveHgt", self.moderatedMinMax, [24]), + #("Wx", self.rankedWx), + #("T", self.minMax), # needed for weather_phrase + #("PoP", self._PoP_analysisMethod("OFFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorModeratedMinMax, [12]), + #("Swell2", self.vectorModeratedMinMax, [12]), + + # Use the following if you want moderated + # single values (e.g. N WIND 20 KT). + # Set the moderating percentage in the "moderated_dict" + # dictionary module. + # NOTE: If you use these methods, include and uncomment + # the "combine_singleValues_flag_dict" in your Local file (see below) + #("Wind", self.vectorModeratedMax, [6]), + #("WindGust", self.moderatedMax, [12]), + #("WaveHeight", self.moderatedMax, [12]), + #("WindWaveHgt", self.moderatedMax, [12]), + #("Wx", self.rankedWx), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("OFFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorModeratedMax, [12]), + #("Swell2", self.vectorModeratedMax, [12]), + + # Use the following if you want absolute ranges. + # Set the maximum range values in the "maximum_range_nlValue_dict" + # dictionary module. + # dictionary module. + #("Wind", self.vectorMinMax, [6]), + #("WindGust", self.minMax, [12]), + #("WaveHeight", self.minMax, [12]), + #("WindWaveHgt", self.minMax, [12]), + #("Wx", self.rankedWx), + #("T", self.minMax), + #("PoP", self._PoP_analysisMethod("OFFExtended")), + #("PoP", self.binnedPercent), + #("Swell", self.vectorMinMax, [12]), + #("Swell2", self.vectorMinMax, [12]), + ], + "phraseList":[ + # WIND + self.marine_wind_phrase, + # WAVEHEIGHT + #self.wave_withPeriods_phrase, + # Alternative: + self.wave_phrase, + # SWELLS AND PERIODS + #self.swell_withPeriods_phrase, + # Alternative: + #self.swell_phrase, + #self.period_phrase, + # WEATHER + #self.weather_phrase, + #self.visibility_phrase, + ], + } + + def generateForecast(self, argDict): + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + skipAreas = self._skipAreas(argDict) + argDict["editArea"] = (editArea, areaLabel) + if self.currentAreaContains(argDict, skipAreas): + continue + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + staticIssueTime=re.sub(r'(\d{3,4} [AP]M).*',r'\1',self._productIssuance) + self._timeLabel = staticIssueTime + " " + self.getCurrentTime( + argDict, " %Z %a %b %e %Y", stripLeading=1) + # Re-calculate issueTime + self._issueTime = self.strToGMT(staticIssueTime) + expireTimeRange = TimeRange.TimeRange(self._expireTime, self._expireTime + 3600) + self._expireTimeStr = self.timeDisplay(expireTimeRange, "", "", "%d%H%M", "") + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict): + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + fcst = fcst + self._Text1() + try: + text2 = self._Text2(argDict["host"]) + except: + text2 = self._Text2() + fcst = fcst + text2 + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + print(("Generating Forecast for", areaLabel)) + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas) + fcst = fcst + areaHeader + + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + argDict["language"] = self._language + # Generate Narrative Forecast for Edit Area + fcstSegment = self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + + # Handle abbreviations + if self._useAbbreviations == 1: + fcstSegment = self.marine_abbreviateText(fcstSegment) + fcstSegment = re.sub(r'\n', r' ',fcstSegment) + fcstSegment = re.sub(r' (\.[A-Za-z])', r'\n\1',fcstSegment) + fcstSegment = self.endline(fcstSegment, linelength=self._lineLength) + fcst = fcst + fcstSegment + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n\n" + + def _postProcessProduct(self, fcst, argDict): + #fcst = fcst + """NNNN """ + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + def _issuance_list(self, argDict): + # This method sets up configurable issuance times with associated + # narrative definitions. See the Text Product User Guide for documentation. + if self._definition["includeEveningPeriod"] == 1: + narrativeDefAM = [ + ("OFFPeriod", "period1"), + ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), + ("OFFPeriodMid", 12), + ("OFFExtended", 24), ("OFFExtended", 24) + ] + narrativeDefPM = [ + ("OFFPeriod", "period1"), + ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), + ("OFFPeriodMid", 12), + ("OFFExtended", 24), ("OFFExtended", 24) + ] + else: + narrativeDefAM = [ + ("OFFPeriod", "period1"), + ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 24), + ("OFFExtended", 24), ("OFFExtended", 24) + ] + narrativeDefPM = [ + ("OFFPeriod", "period1"), + ("OFFPeriod", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 12), ("OFFPeriodMid", 24), + ("OFFExtended", 24), ("OFFExtended", 24) + ] + + return [ + ("400 AM", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + #("1030 AM", "issuanceHour", self.NIGHT(), 16, + # ".TODAY...", "early in the morning", "late in the afternoon", + # 1, narrativeDefAM), + # End times are tomorrow: + ("400 PM", self.NIGHT(), 24 + self.DAY(), 24 + 4, + ".TONIGHT...", "late in the night", "early in the evening", + 1, narrativeDefPM), + #("1030 PM", "issuanceHour", 24 + self.DAY(), 24 + 4, + # ".TONIGHT...", "late in the night", "early in the evening", + # 1, narrativeDefPM), + ] + + + def lateDay_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + + def significant_wx_visibility_subkeys(self, tree, node): + # Weather values that constitute significant weather to + # be reported regardless of visibility. + # If your visibility_wx_threshold is None, you do not need + # to set up these subkeys since weather will always be + # reported. + # Set of tuples of weather key search tuples in the form: + # (cov type inten) + # Wildcards are permitted. + return [("* *")] + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + def allowedHazards(self): + + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + tropicalActions = ["NEW", "EXA", "EXB", "EXT", "UPG", "CAN", "CON", + "EXP"] + marineActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HU.W', tropicalActions, 'Tropical'), # HURRICANE WARNING + ('TY.W', tropicalActions, 'Tropical'), # TYPHOON WARNING + ('TR.W', tropicalActions, 'Tropical'), # TROPICAL STORM WARNING + ('HF.W', marineActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('SR.W', marineActions, 'Marine'), # STORM WARNING + ('GL.W', marineActions, 'Marine'), # GALE WARNING + ('SE.W', marineActions, 'Marine'), # HAZARDOUS SEAS + ('UP.W', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY WARNING + ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('UP.Y', allActions, 'IceAccr'), # HEAVY FREEZING SPRAY ADVISORY + ('AF.W', allActions, 'Ashfall'), # VOLCANIC ASHFALL WARNING + ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ] diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/PFM.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/PFM.py index 9fe842e25b..0de17b570b 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/PFM.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/PFM.py @@ -1,1965 +1,1965 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# File Name: PFM.py -# Description: This product creates an AFM or PFM table containing -# times across the top and weather element as rows. Depending on -# the Local file, the user can generate an AFM or PFM. User can control -# the point at which entries are displayed as ranges vs. single values, -# and can optionally include Heat Index and Wind Chill. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# PFM, AFM___Definition, AFM__Overrides -# PFM___Definition, PFM__Overrides -#------------------------------------------------------------------------- -# User Configurable Variables: -# Definition Section: -# displayName If not None, defines how product appears in GFE GUI -# -# fullStationID Full station identifier, 4 letter, such as KSLC -# -# wfoCityState Identifier for the location of your WFO, such as -# "Cleveland, Ohio" -# -# wmoID WMO ID for product header, such as "FOUS51" -# -# pil Product pil, such as "AFMBOS" -# -# productType Product Type, must be "AFM" or "PFM" -# -# editAreaSuffix default None. For AFM only. Allows for generating the body of the product for -# an edit area that is a subset (e.g. population areas) of the -# edit areas specified in the defaultEditAreas. So given the edit area, -# "COZ035" and the editAreaSuffix is "_pt", then the edit area that -# will be sampled and reported for the body of the product will be -# "COZ035_pt". If no such edit area exists, the system will simply -# use the original edit area. -# Note that Hazards will always be generated for the entire edit area. -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined or the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# defaultEditAreas defines edit area names, ugc code and area descriptor -# (editAreaName, "ugc1\nareaDescription1") -# areaDictionary area dictionary format defining geographical codes -# (used if combinations file is specified in -# default edit areas) -# -# tempRangeThreshold If the data range of temperature exceeds this threshold, -# then a range of temperatures will be output rather -# than a single value. -# -# qpfRangeThreshold If the data range of QPF exceeds this threshold, -# then a range of qpf values will be output rather -# than a single value. -# -# snowRangeThreshold If the data range of snow exceeds this threshold, -# then a range of snow values will be output rather -# than a single value. -# -# includeSnowAmt Set to 1 to include SnowAmt, 0 to exclude. -# -# includeHeatIndex Set to 1 to include HeatIndex, 0 to exclude. Heat -# Index is only included when Heat Index grids are -# present, this option is 1, and the Heat Index -# thresholds are exceeded. -# -# heatIndexDifference Defines the threshold HeatIndex-T before reporting -# heat index. Heat Index must be this many degrees -# above the temperature before a heat index is reported. -# -# heatIndexLimit Defines the absolute threshold below which no -# heat index temperatures will be reported. -# -# includeWindChill Set to 1 to include WindChill, 0 to exclude. Wind -# Chill is only included when the Wind Chill grids -# are present, this option is 1, and the Wind Chill -# thresholds are exceeded. -# -# windChillDifference Defines the threshold T-WindChill before reporting -# wind chill. WindChill must be this many degrees -# below the temperature before a wind chill is reported. -# -# windChillLimit Defines the absolute threshold above which no -# wind chill temperatures will be reported. -# -# separateByTimeZone Normally set to "effectiveTZ" to ensure that zones -# combined in the product all have the same effective -# timezone, e.g., EST5, EST5EDT are considered the same -# in the winter since the effective tz is EST5. -# Can also be None to not separate out -# zones, or set to "actualTZ" to separate by actual -# TZ environment variable, e.g., EST5, EST5EDT are -# considered different in the winter. -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# To 66 hours: WindGust, QPF, SnowAmt, HeatIndex (opt.), WindChill (opt.). -# To 7 days: MaxT, MinT, T, Td, Wind, Sky, PoP, and Wx. -#------------------------------------------------------------------------- -# Edit Areas Needed: Typically area or point edit areas are required, -# depending upon whether an AFM or PFM is being created. The -# required format of the edit area naming string for the AFM is: -# (editAreaName, "areaName\nareaDescriptor") -# The required format for the PFM is: -# (editAreaName, "ugc code\npoint description\nyy.yyN xxx.xxW") -# where the editAreaName is the name of the sampling edit area, -# areaName is the UGC codes for the area, such as "COZ043" -# areaDescriptor is the descriptive name of that area. -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: None -#------------------------------------------------------------------------- -# Component Products: None -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -#------------------------------------------------------------------------- -# Additional Information: -# Sampling is complicated in this product since the column headers mean -# different things to different weather elements. In some cases, the -# column header denotes the ending time, in other cases, the starting -# time. There are 3hr, 6hr, and 12hr elements in the top portion of -# the product, and 6hr and 12hr elements in the extended (bottom) portion -# of the product. -# -# If HeatIndex is enabled, the HeatIndex row (and values) will only appear -# if the threshold values are exceeded from the HeatIndex grid. If -# HeatIndex appears, then Max Heat will appear in the output. -# -# If WindChill is enabled, the WindChill row (and values) will only appear -# if the threshold values are exceeded from the WindChill grid. If -# Wind Chill appears, then Min Chill will appear in the output. -# -# WindGust will only appear if WindGust grids are present and the wind -# gust values exceed the wind values by at least 10mph. -# -# Fields are blanked in the output if those times are before the -# product issuance time. -# -# Note that no headers will appear for Day 3 on the Afternoon issuance due to -# inadequate space. -# -#------------------------------------------------------------------------- -# Example Output: -# -## FOUS51 KRLX 311833 -## AFMRLX - -## AREA FORECAST MATRICES -## NATIONAL WEATHER SERVICE CHARLESTON WV -## 1233 PM MDT WED JUL 31 2002 - -## Area 1-010333 -## 1233 PM MDT WED JUL 31 2002 - -## Date Wed 07/31/02 Thu 08/01/02 Fri 08/02/02 -## UTC 3hrly 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 -## MDT 3hrly 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 09 12 15 18 - -## Max/Min 92 94 96 62 64 66 77 79 81 61 63 65 81 83 85 -## Temp 81 75 86 94 74 76 73 69 66 64 67 72 76 79 76 70 66 63 67 73 79 83 -## Dewpt 34 32 29 26 52 52 52 51 51 50 49 49 49 49 50 51 52 53 52 50 49 48 -## RH 18 20 12 8 46 43 47 52 58 60 52 44 38 34 40 50 60 69 58 44 34 29 -## Wind dir NW NW NW N S SE SE S S S S S S SE S S S S S S S S -## Wind spd 3 6 5 5 8 10 11 10 11 11 11 10 12 12 13 14 14 13 14 14 13 13 -## Wind gust -## Clouds CL CL CL CL MM OV OV OV SC SC SC FW FW FW SC SC BK OV BK SC FW FW -## PoP 12hr 0 0 28 0 16 -## QPF 12hr 0.04 0.00 0.03 0.00 0.00 -## Snow 12hr 00-00 00-00 00-00 00-00 00-00 -## Rain shwrs SC SC -## Rain WP WP WP WP - -## Date Sat 08/03/02 Sun 08/04/02 Mon 08/05/02 Tue 08/06/02 -## UTC 6hrly 06 12 18 00 06 12 18 00 06 12 18 00 06 12 18 00 06 -## MDT 6hrly 00 06 12 18 00 06 12 18 00 06 12 18 00 06 12 18 00 - -## Max/Min 65 94 64 79 63 83 65 87 -## Temp 74 65 72 76 69 64 72 79 70 63 73 83 74 65 72 87 -## Dewpt 49 50 48 52 51 50 49 49 51 53 50 48 49 50 48 45 -## PWind dir S S S S S S S S -## Wind char BZ BZ GN GN BZ BZ BZ BZ -## Avg clouds FW FW SC SC BK BK FW FW SC SC SC SC FW SC SC -## PoP 12hr 0 1 0 28 0 16 0 1 -## Rain shwrs SC -## Rain WP - -## $$ - -########################################################################### - -import TextRules -import SampleAnalysis -import LogStream -import string, time, types, os -import TimeRange, AbsTime - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Product Issuance", "productIssuance"), "Morning", "radio", - ["Morning","Afternoon"]), - # (("Tropical Storm Winds", "tropicalStorm"), "NO", "radio", ["NO", "YES"]), - ] - Definition = { - "type": "smart", - "displayName": "None", - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/PFM_.txt", - "debug": 0, - - "defaultEditAreas" : "Combinations_PFM__", - "editAreaSuffix": None, - # Name of map background for creating Combinations - "mapNameForCombinations": "Zones_", - - "runTimeEditAreas" : "no", # if yes, ask user at run time - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - # Product-specific variables: - "tempRangeThreshold": 5, # determines range vs. single value output - "qpfRangeThreshold": 0.05, # determines range vs. single value output - "snowRangeThreshold": 3, # determines range vs. single value output - - # Options for product - "includeSnowAmt": 1, # set to 1 to include snow amount in output - "includeHeatIndex": 1, # set to 1 to include heat index in output - "includeWindChill": 1, # set to 1 to include wind chill in output - "windChillDifference": 5, # T-WindChill difference before reporting - "windChillLimit": 40, # don't report wind chills above this value - "heatIndexDifference": 0, # indicates HI-T difference before reporting - "heatIndexLimit": 80, # don't report heat index below this value - - # Area Dictionary - Descriptive info about zones (AFM only) - "areaDictionary": "AreaDictionary", - - # Identifiers for product - "fullStationID": "", # full station identifier (4letter) - "wfoCityState": "", # city,state of wfo for header - "wmoID": "", # WMO ID code - "pil": "", # product pil - "productType": "PFM", # Denotes product type, "AFM", or "PFM" - - # purge time - "fixedExpire": 1, #ensure VTEC actions don't affect segment time - - "separateByTimeZone": "effectiveTZ", #separate segments based on tz - - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # get current time - self._currentTime = argDict['creationTime'] - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived fromEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges, for each possible time zone - self._areaTZ = self.getAreaTZ(argDict, self._areaList) #all TimeZones - tzDictTR = {} - for key in self._areaTZ.keys(): - tz = self._areaTZ[key] - if not tzDictTR.has_key(tz): - tzDictTR[tz] = self._determineTimeRanges(argDict, tz) - self._determineZuluTimeRanges(argDict, tzDictTR) - - - # Sample the data. - sampleInfo = [] - # determine all of the areas using the same time zone - for timeZone in tzDictTR.keys(): - areasInTimeZone = [] - for area in self._areaList: - areaLabel = area[1] #(editArea, areaLabel) - if self._areaTZ[areaLabel] == timeZone: - areasInTimeZone.append(area) - sampleInfo += self._sampleData(argDict, tzDictTR[timeZone], - areasInTimeZone) - self._sampler = self.getSampler(argDict, sampleInfo) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - tz = self._areaTZ[areaLabel] - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict, - tzDictTR[tz]['timeLabel']) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict, - tzDictTR[tz], tz) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Set up product-specific variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - try: - if self._tropicalStorm == "YES": - self._tropicalStorm = 1 - else: - self._tropicalStorm = 0 - except: - self._tropicalStorm = 0 - - - self._lineLength = 66 #only will apply to UGC header - - # Basic widths for product - self._rowLabelWidth = 13 #width of row label - self._top3hrWidth = 3 #top part of product, 3hrly width - self._top6hrWidth = 6 #top part of product, 6hrly width - self._top12hrWidth = 12 # top part of product, 12hrly width - self._bottom6hrWidth = 3 # bottom part, 6hr width - self._bottom12hrWidth = 6 # bottom part, 12hr width - self._extraDayWidth = 2 # extra spaces between days, bottom part - - - def getAreaTZ(self, argDict, areaList): - #gets the list of areas and their time zones for the product - #the areas are defined by the "areaList" and aren't the actual - #zones UGCs, e.g., "Combo0" and not "FLZ050". Only 1 time zone - #is permitted in the area. Returns the time zone environmental - #variable, e.g., EST5EDT, with the effective TZ, e.g., EST, in a - #dictionary keyed by the areaLabel. - import ModuleAccessor - accessor = ModuleAccessor.ModuleAccessor() - areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - tzDir = {} - localTZ = os.environ['TZ'] #current WFO time zone - localTZid = time.strftime("%Z", time.localtime(argDict['creationTime'])) - for editArea, areaLabel in areaList: - areas = self.getCurrentAreaNames(argDict, areaLabel) #get areas - tzs = [] - for area in areas: - #extract 1st string out for PFM. The AFM is in correct format. - if self._productType == "PFM": - areaStrings = string.split(area, '\n') - area = areaStrings[0] #1st line is the UGC code - try: - zoneTZ = areaDict[area]['ugcTimeZone'] - prevTZ = os.environ['TZ'] - os.environ['TZ'] = zoneTZ - time.tzset() - tzid = time.strftime("%Z", - time.localtime(argDict['creationTime'])) - os.environ['TZ'] = prevTZ - time.tzset() - except: - zoneTZ = localTZ - tzid = localTZid - LogStream.logEvent("WARNING: Entry " + area + - " does not have time zone info in AreaDictionary. Using default time zone.", LogStream.exc()) - - if (zoneTZ, tzid) not in tzs: - tzs.append((zoneTZ, tzid)) - #should only be 1 effective time zone at this point. - if len(tzs) == 0: - tzDir[areaLabel] = localTZ #force localTZ - elif len(tzs) == 1: - tzDir[areaLabel] = tzs[0][0] #use the 1 time zone - else: - tzid = tzs[0][1] #1st one, get the effective id - for x in xrange(1, len(tzs)): - if tzs[x][1] != tzid: - LogStream.logProblem(\ - "WARNING: Multiple Effective Time Zones in segment." + - str(tzs) + " for " + str(areas) + " -- using first one.") - tzDir[areaLabel] = tzs[0][0] #use the 1 time zone - - return tzDir - - def _determineZuluTimeRanges(self, argDict, tzDictTR): - # Determine time ranges that deal with Zulu-time, i.e., not - # specific times. The tzDictTR is the group of dictionaries - # returned from determineTimeRanges(). - - ################################## - # Setup Time Labels - ################################## - # Sets up the product's time labels - - self._ddhhmmTime = time.strftime("%d%H%M", - time.gmtime(self._currentTime)) - - # Sets up the expiration time - self._expireTime, self._ddhhmmTimeExpire = \ - self.getExpireTimeFromLToffset(self._currentTime, - self.expireOffset(), "") - - # time label for the MND header - self._mndTimeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - # the overall time range - earliest = None - latest = None - for key in tzDictTR.keys(): - d = tzDictTR[key] - if earliest is None: - earliest = d['top_3hr'][0][0].startTime() - latest = d['bottom_12hr'][-1][0].endTime() - else: - earliest = min(d['top_3hr'][0][0].startTime(),earliest) - latest = max(d['bottom_12hr'][-1][0].endTime(),latest) - - self._timeRange = TimeRange.TimeRange(earliest, latest) - argDict["productTimeRange"] = self._timeRange - - def _shortenPeriods(self, periods): - #Shorten the period containing the current time so that we don't - #sample the entire period. Periods is from getPeriods(), returns - #modified version. - thisHour = int(self._currentTime / 3600) * 3600 #truncated to hh:00 - for x in xrange(len(periods)): - tr, label = periods[x] - startT = tr.startTime().unixTime() - endT = tr.endTime().unixTime() - if startT < thisHour and thisHour < endT: - tr = TimeRange.TimeRange(AbsTime.AbsTime(thisHour), AbsTime.AbsTime(endT)) - periods[x] = (tr, label) - elif thisHour < startT: - break - return periods - - def _creTR(self, baseTime, offset): - # creates and returns a python TimeRange, based on the AbsTime baseTime, - # and the offset (in hours). The length of the TimeRange is one hour. - # This is a substitute for the createTimeRange() in determineTimeRanges - startTime = baseTime + 3600*offset - return TimeRange.TimeRange(startTime, startTime + 3600) - - def _determineTimeRanges(self, argDict, timeZone): - # Determine time ranges for product - fairly complicated since - # multiple samplings and two sets of tables (short-term, long-term) - # Sets up: - # top_3hr - top row of AFM, 3 hourly LT periods - # top_3hr_snap - top row of AFM, 3 hourly LT periods/labels - # top_6hr - top row of AFM, 6 hourly LT periods - # top_6hrShort - top row of AFM, 6 hourly LT periods, start now - # top_12hr - top row of AFM, 12 hourly sample periods - # top_12hrShort - top row of AFM, 12 hourly sample periods, start now - # bottom_6hr - bottom row of AFM, 6 hourly LT periods - # bottom_6hr_snap - bottom row of AFM, 6 hourly, snapshots, labels - # bottom_12hr - bottom row of AFM, 12 hourly sample periods - # - # Returns dictionary for the particular timezone containing the - # above keys and values. - tzDict = {} - - # change the time zone - prevTZ = os.environ['TZ'] - os.environ['TZ'] = timeZone - time.tzset() - - # determine the optimal time for the zulu-based product columns, - # that most closely mirror 3AM LT and 3PM LT. The final baseTime is - # the column label for the first column. - if self._productIssuance == "Afternoon": - #check for update for Afternoon issuance after midnight - if time.localtime(self._currentTime)[3] >= 4: - tr = self.createTimeRange(15, 15+1) #3PM target (not update) - else: - tr = self.createTimeRange(15-24, 15-24+1) #3PM target yesterday - else: - tr = self.createTimeRange(3, 3+1) #3AM target - - # determine offset between potential times, want 3z,9z,15,21z - baseTime = (((tr.startTime().unixTime() - 3600*3) - / 3600 / 6 ) * 3600*6) + 3600*3 - offsetFromBaseTime = ((((tr.startTime().unixTime() - 3600*3) - / 3600.0 / 6.0 ) * 3600*6) + 3600*3 - baseTime) / 3600 - offsetFromBaseTime = int(offsetFromBaseTime) - if offsetFromBaseTime > 3: #over halfway to next basetime - baseTime = baseTime + 3600*6 #move to next 6 hour increment - baseTime = AbsTime.AbsTime(baseTime) #convert to AbsTime - - # Set up the beginning Time Range - note they are different for the - # 3/6hrly, and the 12hrly sampling. Note that the 6hr is offset by - # 1 hour to allow the minChill/maxHeat to include the hour indicated - # by the column. Comments are for offsets for sampling periods from - # first column's labeled time. Length of created time range is 1 hour - topTimeRange3hr = self._creTR(baseTime, -3) #-3h to 0h - topTimeRange6hr = self._creTR(baseTime, -2) #-2h to 4h - topTimeRange3hrSnap = self._creTR(baseTime, 0) #0h to 1h - topTimeRange12hr = self._creTR(baseTime, 3) #3h to 15h - bottomTimeRange6hr = self._creTR(baseTime, -3+66) #63h to 66h - bottomTimeRange6hrSnap = self._creTR(baseTime, -3+72) #69h to 70h - bottomTimeRange12hr = self._creTR(baseTime, 3+60) #63h to 75h - - ################################## - # Set up 3hr, 6hr, and 12hr elements in top portion - ################################## - timePeriod = 3 - timeSpan = 3 - numPeriods = 22 - tzDict['top_3hr'] = self.getPeriods(topTimeRange3hr, timePeriod, - timeSpan, numPeriods) - timeSpan = 1 - tzDict['top_3hr_snap'] = self.getPeriods(topTimeRange3hrSnap, - timePeriod, timeSpan, numPeriods, - self._hour24localLabel) - timePeriod = 6 - timeSpan = 6 - numPeriods = 11 - tzDict['top_6hr'] = self.getPeriods(topTimeRange6hr, timePeriod, - timeSpan, numPeriods) - periods = self.getPeriods(topTimeRange6hr, timePeriod, - timeSpan, numPeriods) - tzDict['top_6hrShort'] = self._shortenPeriods(periods) - - - timePeriod = 12 - timeSpan = 12 - numPeriods = 5 - tzDict['top_12hr'] = self.getPeriods(topTimeRange12hr, - timePeriod, timeSpan, numPeriods) - periods = self.getPeriods(topTimeRange12hr, - timePeriod, timeSpan, numPeriods) - tzDict['top_12hrShort'] = self._shortenPeriods(periods) - - ################################## - # Set up 6hr and 12hr elements in bottom portion - ################################## - timePeriod = 6 - timeSpan = 6 - if self._productIssuance == "Morning": - numPeriods = 16 - else: - numPeriods = 18 - tzDict['bottom_6hr'] = self.getPeriods(bottomTimeRange6hr, - timePeriod, timeSpan, numPeriods) - timeSpan = 1 - tzDict['bottom_6hr_snap'] = self.getPeriods( - bottomTimeRange6hrSnap, - timePeriod, timeSpan, numPeriods, - self._hour24localLabel) - - timePeriod = 12 - timeSpan = 12 - if self._productIssuance == "Morning": - numPeriods = 8 - else: - numPeriods = 9 - tzDict['bottom_12hr'] = self.getPeriods(bottomTimeRange12hr, - timePeriod, timeSpan, numPeriods, - self._hour24localLabel) - - - # timeLabel is the spelled-out version of the current time - # for each UGC header - tzDict['timeLabel'] = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - # restore the time zone - os.environ['TZ'] = prevTZ - time.tzset() - - return tzDict - - def _sampleData(self, argDict, tp, areas): - # Sample the data. (tp) Time Periods is a dictionary containing - # all of the time periods and categories to sample for a single time - # zone. areas are the areas that have this time zone. - # Different sampler lists are required due to different time periods - # and different elements. Sets up the following to sample: - # 3hr_top = 3hrly, top of product, 3 hr durations - # 3hr_snap_top = 3hrly, top of product, 1 hr duration - # 6hr_top = 6hrly, top of product, 6 hr durations - # 12hr_top = 12hrly, top of product - # 6hr_bottom = 6 hrly, bottom of product, 6 hr durations - # 6hr_snap_bottom = 6 hrly, bottom of product, 1 hr durations - # 12hr_bottom = 12hrly, bottom of product - - # the analysis method is called "self._analysisList_" plus the - # name of the sample period, which is the key. - # Each entry: (analysisList, periods, areaList) - - - sampleInfo = [ - (self._analysisList_top_3hr(), tp['top_3hr'], areas), - (self._analysisList_top_3hr_snap(), tp['top_3hr_snap'], areas), - (self._analysisList_top_6hr(), tp['top_6hr'], areas), - (self._analysisList_top_6hrShort(), tp['top_6hrShort'], areas), - (self._analysisList_top_12hr(), tp['top_12hr'], areas), - (self._analysisList_top_12hrShort(), tp['top_12hrShort'], areas), - (self._analysisList_bottom_6hr(), tp['bottom_6hr'], areas), - (self._analysisList_bottom_6hr_snap(), tp['bottom_6hr_snap'], areas), - (self._analysisList_bottom_12hr(), tp['bottom_12hr'], areas), - ] - - return sampleInfo - - def _preProcessProduct(self, fcst, argDict): - # Add product heading to fcst string - if self._productType == "AFM": - productDescription = "Area Forecast Matrices" - else: - productDescription = "Point Forecast Matrices" - - issuedByString = self.getIssuedByString() - - productName = self.checkTestMode(argDict, - productDescription) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" + \ - "National Weather Service " + \ - self._wfoCityState +"\n" + \ - issuedByString + \ - self._mndTimeLabel + "\n\n" - fcst = fcst + s - - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict, timeLabel): - # extract out the ugc codes and the area descriptors - # AFM: combinations filename, or (ugc, descriptionlines) - # PFM: areaLabel length 4 (ugc, description, latlon, elevation) - areaStrings = string.split(areaLabel, '\n') - - #AFM setup with combinations file - if self._productType == "AFM" and len(areaStrings) == 1: - areaHeader = self.makeAreaHeader( - argDict, areaLabel, AbsTime.AbsTime(self._currentTime), - AbsTime.AbsTime(self._expireTime), self._areaDictionary, - self._defaultEditAreas) - - fcst = fcst + areaHeader - - - #No Combinations File or PFM product - else: - if self._productType == "PFM": - if len(areaStrings) != 4 and len(areaStrings) != 3: - raise SyntaxError, """ -PFM requires defaultEditArea format of (editAreaName, -ugcLine\\narea description line\\nll.llN lll.llW\\nElev -editAreaName is the name of the edit area for sampling -ugcLine is the ugc code representing the area, and is used for timezone info -area description line describes the area -ll.llN lll.llW is the latitude and longitude for the area -Elev is a string representing the station elevation, e.g., 423. -Found description: """ + areaLabel - - latlon = areaStrings[2] - if not self._latlonCheck(latlon): - raise SyntaxError, "PFM lat/lon format must be " +\ - "exactly ll.llN lll.llW found:" + latlon - - ugcCode = areaStrings[0] - s = ugcCode + "-" + self._ddhhmmTimeExpire + "-\n" - - # descriptor lines, lat/lon lines, elevation - if len(areaStrings) == 4: - s += areaStrings[1] + "\n" + areaStrings[2] + " Elev. " + \ - areaStrings[3] + " ft" + "\n" - #old OB8.2 format - else: - s += areaStrings[1] + "\n" + areaStrings[2] + " Elev. " + \ - "???? ft" + "\n" - #log "old" format - LogStream.logProblem("WARNING: Old defaultEditArea format " + """ -PFM requires defaultEditArea format of (editAreaName, -ugcLine\\narea description line\\nll.llN lll.llW\\nElev -editAreaName is the name of the edit area for sampling -ugcLine is the ugc code representing the area, and is used for timezone info -area description line describes the area -ll.llN lll.llW is the latitude and longitude for the area -Elev is a string representing the station elevation, e.g., 423. -Found description: """ + areaLabel) - - fcst = fcst + s + timeLabel + "\n\n" - - # setup to handle the hazards - self._hazards = argDict['hazards'] - self._combinations = argDict['combinations'] - - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict, timePeriods, - timeZone): - - ############################################################### - # TOP PART OF PRODUCT - valid current time out to around 60hr) - ############################################################### - - # Day, Period Label (UTC), Period Label (LT) - dateLabel, utcLabel, ltLabel = self._calcPeriodLabels(\ - timePeriods['top_3hr_snap'], self._top3hrWidth, - self._rowLabelWidth, 3, timeZone) - fcst = fcst + dateLabel + "\n" + ltLabel + "\n" + utcLabel + "\n\n" - - # Create statLists - statList_3hr = self.getStatList( - self._sampler, self._analysisList_top_3hr(), - timePeriods['top_3hr'], editArea) - statList_3hr_snap = self.getStatList( - self._sampler, self._analysisList_top_3hr_snap(), - timePeriods['top_3hr_snap'], editArea) - statList_6hr = self.getStatList( - self._sampler, self._analysisList_top_6hr(), - timePeriods['top_6hr'], editArea) - statList_6hrShort = self.getStatList( - self._sampler, self._analysisList_top_6hrShort(), - timePeriods['top_6hrShort'], editArea) - statList_12hr = self.getStatList(self._sampler, - self._analysisList_top_12hr(), timePeriods['top_12hr'], editArea) - statList_12hrShort = self.getStatList(self._sampler, - self._analysisList_top_12hrShort(), timePeriods['top_12hrShort'], - editArea) - - # Max/Min - tpmaxmin = timePeriods['top_12hr'][0][0] - if self.getPeriod(tpmaxmin, shiftToLocal=1) == self.DAYTIME(): - label = "Max/Min" - else: - label = "Min/Max" - fcst=fcst+ self.makeRow( - label, self._top12hrWidth, timePeriods['top_12hr'], - statList_12hr, self._mxmnValue, [1], - self._rowLabelWidth, 18) - - # Temp - fcst=fcst+ self.makeRow( - "Temp", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._tempValue, ['T'], self._rowLabelWidth) - - # DewPt - fcst=fcst+ self.makeRow( - "Dewpt", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._tempValue, ['Td'], self._rowLabelWidth) - - #RH - fcst=fcst+ self.makeRow( - "RH", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._rhValue, [], self._rowLabelWidth) - - # Wind direction - fcst=fcst+ self.makeRow( - "Wind dir", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._windValue, ["dir"], - self._rowLabelWidth) - - # Windspd - fcst=fcst+ self.makeRow( - "Wind spd", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._windValue, ["speed"], - self._rowLabelWidth) - - # Wind Gust - gustRow = self.makeRow( - "Wind gust", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._windGustValue, [], - self._rowLabelWidth) - if string.strip(gustRow) != "Wind gust": - fcst=fcst+gustRow - - # Sky - fcst=fcst+ self.makeRow( - "Clouds", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._skyValue, [], self._rowLabelWidth) - - # Pop - fcst=fcst+ self.makeRow( - "PoP 12hr", self._top12hrWidth, timePeriods['top_12hr'], - statList_12hrShort, self._popValue, [], self._rowLabelWidth, - 18) - - # QPF - fcst=fcst+ self.makeRow( - "QPF 12hr", self._top12hrWidth, timePeriods['top_12hr'], - statList_12hrShort, self._qpfValue, [], - self._rowLabelWidth, 18) - - #Max qpf - #fcst=fcst+ self.makeRow( - #"MAX QPF", self._top12hrWidth, timePeriods['top_12hr'], - #statList_12hrShort, None, [], self._rowLabelWidth, 18) - - # SnowAmt - if self._includeSnowAmt: - snowBasetime = \ - ((timePeriods['top_3hr_snap'][0])[0]).startTime().unixTime() - fcst=fcst+ self.makeRow( - "Snow 12hr", self._top12hrWidth, timePeriods['top_12hr'], - statList_12hrShort, self._snowValue, [snowBasetime], - self._rowLabelWidth, 18) - - # Weather - fcst = self._createWxRows(fcst, timePeriods['top_3hr_snap'], - self._sampler, - self._analysisList_top_3hr_snap(), editArea, - self._top3hrWidth) - - # OBVIS - fcst = self._createObVisRows(fcst, timePeriods['top_3hr_snap'], - self._sampler, - self._analysisList_top_3hr_snap(), editArea, - self._top3hrWidth) - - # Wind Chill - if self._includeWindChill: - row = self.makeRow( - "Wind chill", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._windChillValue, [], - self._rowLabelWidth) - if string.strip(row) != "Wind chill": - fcst=fcst+row - fcst = fcst + self.makeRow( - "Min chill", self._top6hrWidth, timePeriods['top_6hrShort'], - statList_6hrShort, self._minWindChillValue, [], - self._rowLabelWidth) - - - # Heat Index - if self._includeHeatIndex: - row = self.makeRow( - "Heat index", self._top3hrWidth, timePeriods['top_3hr_snap'], - statList_3hr_snap, self._heatIndexValue, [], - self._rowLabelWidth) - if string.strip(row) != "Heat index": - fcst=fcst+row - fcst = fcst + self.makeRow( - "Max heat", self._top6hrWidth, timePeriods['top_6hrShort'], - statList_6hrShort, self._maxHeatIndexValue, [], - self._rowLabelWidth) - - # Hazards (WWA) - #need to make list of actual edit areas first - combinations = argDict["combinations"] - if combinations is not None: - areaList = self.getCurrentAreaNames(argDict, areaLabel) - else: - areaList = [editArea.getId().getName()] - fcst = self._createWWARows(fcst, timePeriods['top_3hr_snap'], - areaList, self._top3hrWidth) - - fcst = fcst + "\n\n" - - ###################################### - # Extended Forecast - ###################################### - # Determine the column spacing - 3 normally, 5 between days LT - # Determine change in day by labels. - colSpacing6hr = [] - colSpacing12hr = [] - - # set the time zone - prevTZ = os.environ['TZ'] - os.environ['TZ'] = timeZone - time.tzset() - - # determine zulu alignment for bottom section, gather all of the - # possible zulu hours (can't be more than 2 due to 12h intervals) - zuluHours = [] - for x in xrange(0, 2): - period, label = timePeriods['bottom_12hr'][x] - zuluHours.append(period.endTime().hour) - - # add extra space for the first column in each day, except 1st one - # We treat midnight at the end of the day so we subtract 1 to get - # the right day for extra space insertion. We use startTime, since - # the values are valid at the label time. - dayOfMonthProcessed6 = None - firstTime = True - for period, label in timePeriods['bottom_6hr_snap']: - dayOfMonth = (period.startTime() -1 + self.determineShift()).day - if dayOfMonth != dayOfMonthProcessed6 and not firstTime: - colSpacing6hr.append(self._bottom6hrWidth + - self._extraDayWidth) - else: - colSpacing6hr.append(self._bottom6hrWidth) - dayOfMonthProcessed6 = dayOfMonth - firstTime = False - - # now determine the bottom 12hr periods and their spacing - runningTotal = 0 - for x in xrange(len(timePeriods['bottom_6hr_snap'])): - period, label = timePeriods['bottom_6hr_snap'][x] - space6 = colSpacing6hr[x] - runningTotal += space6 - if period.startTime().hour in zuluHours: - colSpacing12hr.append(runningTotal) - runningTotal = 0 - - # reset the time zone - os.environ['TZ'] = prevTZ - time.tzset() - - # Create statLists for bottom portion - statList_6hr_snap = self.getStatList( - self._sampler, - self._analysisList_bottom_6hr_snap(), - timePeriods['bottom_6hr_snap'], editArea) - statList_6hr = self.getStatList( - self._sampler, - self._analysisList_bottom_6hr(), - timePeriods['bottom_6hr'], editArea) - statList_12hr = self.getStatList(self._sampler, - self._analysisList_bottom_12hr(), - timePeriods['bottom_12hr'], editArea) - - # Day, Period Label (UTC), Period Label (LT) - dateLabel, utcLabel, ltLabel = self._calcPeriodLabels(\ - timePeriods['bottom_6hr_snap'], colSpacing6hr, - self._rowLabelWidth, 6, timeZone) - fcst = fcst + dateLabel + "\n" + ltLabel + "\n" + utcLabel + "\n\n" - - # Max/MinT - tpmaxmin = timePeriods['bottom_12hr'][0][0] - if self.getPeriod(tpmaxmin, shiftToLocal=1) == self.DAYTIME(): - label = "Max/Min" - else: - label = "Min/Max" - fcst=fcst+ self.makeRow( - label, colSpacing12hr, timePeriods['bottom_12hr'], - statList_12hr, self._mxmnValue, [0], - self._rowLabelWidth) - - # Temp - fcst=fcst+ self.makeRow( - "Temp", colSpacing6hr, timePeriods['bottom_6hr_snap'], - statList_6hr_snap, self._tempValue, ["T"], self._rowLabelWidth) - - - # Dewpt - fcst=fcst+ self.makeRow( - "Dewpt", colSpacing6hr, timePeriods['bottom_6hr_snap'], - statList_6hr_snap, self._tempValue, ["Td"], self._rowLabelWidth) - - # Predominant Wind direction - fcst=fcst+ self.makeRow( - "PWind dir", colSpacing12hr, timePeriods['bottom_12hr'], - statList_12hr, self._windValue, ["dir"], - self._rowLabelWidth) - - # Wind character - fcst=fcst+ self.makeRow( - "Wind char", colSpacing12hr, timePeriods['bottom_12hr'], - statList_12hr, self._windCharValue, ["Wind", "speed", 0], - self._rowLabelWidth) - - # Average Clouds - 6hrly - fcst=fcst+ self.makeRow( - "Avg clouds", colSpacing6hr, timePeriods['bottom_6hr'], - statList_6hr, self._skyValue, [], self._rowLabelWidth) - - - # Pop - fcst=fcst+ self.makeRow( - "PoP 12hr", colSpacing12hr, timePeriods['bottom_12hr'], - statList_12hr, self._popValue, [], self._rowLabelWidth) - - # Weather - fcst = self._createWxRows(fcst, timePeriods['bottom_6hr'], - self._sampler, - self._analysisList_bottom_6hr(), editArea, - colSpacing6hr) - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n$$\n" - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - # provides expiration time offset from today's midnight based on issuance. - def expireOffset(self): - if self._productIssuance == "Morning": - #4pm today - return 16 - else: - #4am tomorrow - return 24+4 - - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _analysisList_top_3hr(self): #None needed - return [ - - ] - - def _analysisList_top_3hr_snap(self): - return [ - ("T", self.avg), - ("Wind", self.vectorAvg), - ("WindGust", self.maximum), - ("WindChill", self.avg), - ("HeatIndex", self.avg), - ("Sky", self.avg), - ("Td", self.avg), - ("RH", self.avg), - ("Wx", self.dominantWx), - ] - - def _analysisList_top_6hr(self): - return [ - ] - - def _analysisList_top_6hrShort(self): - return [ - ("HeatIndex", self.minMax), - ("WindChill", self.minMax), - ] - - - def _analysisList_top_12hr(self): - return [ - ("MaxT", self.minMaxAvg), - ("MinT", self.minMaxAvg), - ] - - def _analysisList_top_12hrShort(self): - return [ - ("PoP",self.stdDevMaxAvg), - ("QPF",self.minMaxSum), - ("SnowAmt", self.minMaxSum), - ] - - def _analysisList_bottom_6hr(self): - return [ - ("Sky", self.avg), - ("Wx", self.dominantWx), - ] - - def _analysisList_bottom_6hr_snap(self): - return [ - ("T", self.avg), - ("Td", self.avg), - ("RH", self.avg), - ] - - def _analysisList_bottom_12hr(self): - return [ - ("Wind", self.vectorAvg), - ("PoP",self.stdDevMaxAvg), - ("MaxT", self.avg), - ("MinT", self.avg), - ("Sky", self.avg) - ] - - def _hour24zuluLabel(self, timeRange): - # returns the starting time of the timeRange in zulu, such as "03" - label = timeRange.startTime().stringFmt("%H") - return string.rjust(label, self._top3hrWidth) - - def _hour24localLabel(self, timeRange): - # returns the starting time of the timeRange in localtime, such as "06" - start = timeRange.startTime() + self.determineShift() - label = start.stringFmt("%H") - return string.rjust(label, self._top3hrWidth) - - def _tempValue(self, statDict, timeRange, argList): - # return a string for the temperatures, such as "85" - # can return MM for no data, blanks if timeRange is earlier than now - if timeRange.startTime().unixTime() < self._currentTime: - return "" - val = self.getStats(statDict, argList[0]) - if val is None: - return "MM" - return `int(round(val))` - - def _rhValue(self, statDict, timeRange, argList): - # return a string for the rh, such as "85" - # can return MM for no data, blanks if timeRange is earlier than now - if timeRange.startTime().unixTime() < self._currentTime: - return "" - # get the T and Td - tval = self.getStats(statDict, "T") - if tval is None: - return "MM" - tdval = self.getStats(statDict, "Td") - if tdval is None: - return "MM" - t = round(tval) - td = round(tdval) - - #calc RH from T and Td - Tc = .556 * (t - 32.0) - Tdc = .556 * (td - 32.0) - Vt = 6.11 * pow(10,(Tc * 7.5 / (Tc + 237.3))) - Vd = 6.11 * pow(10,(Tdc * 7.5 / (Tdc + 237.3))) - RH = (Vd / Vt) * 100.0 - - return `int(round(RH))` - - def _popValue(self, statDict, timeRange, argList): - # return a string for the pop, such as "80" - # PoP is rounded to nearest 10%, plus the 5% single value is allowed - # can return MM for no data, blanks if timeRange is earlier than now - if timeRange.endTime().unixTime() < self._currentTime: - return "" - val = self.getStats(statDict, "PoP__stdDevMaxAvg") - if val is None: - return "MM" - popMax5=int(self.round(val,"Nearest",5)) - if popMax5 == 5: - return "5" - popMax10=int(self.round(val,"Nearest",10)) - return `int(popMax10)` - - def _qpfValue(self, statDict, timeRange, argList): - # Return a string for the QPF, such as 0, 0.05, or 0.25-0.49 - # can return "MM" for missing data, - # blanks if timeRange earlier than now - # PFMs don't have ranges ever. - if timeRange.endTime().unixTime() < self._currentTime: - return "" - val = self.getStats(statDict, "QPF__minMaxSum") - if val is None: - return "MM" - minV, maxV, sumV = val - if maxV - minV > self._qpfRangeThreshold and self._productType == "AFM": - minString = string.strip("%5.2f" %minV) - maxString = string.strip("%5.2f" %maxV) - return minString+"-"+maxString - elif sumV < 0.005: - return "0" - else: - return string.strip("%5.2f" %sumV) - - - def _snowValue(self, statDict, timeRange, argList): - # Return a string for the Snow, such as 00-00, 5, or 5-9 - # Can return "MM" for missing data, blanks if timeRange - # earlier than now, or if greater than 36 hrs from the base time. - # PFMs don't have ranges ever. - # -- basetime is the argList[0] - if timeRange.endTime().unixTime() < self._currentTime: - return "" - basetime = argList[0] - if timeRange.startTime().unixTime() >= basetime + 36 * 3600: - return "" - val = self.getStats(statDict, "SnowAmt__minMaxSum") - if val is None: - return "MM" - minV, maxV, sumV = val - if maxV - minV > self._snowRangeThreshold and \ - self._productType == "AFM": - minString = `int(round(minV))` - maxString = `int(round(maxV))` - return minString+"-"+maxString - elif sumV < 0.1: - return "00-00" - elif sumV < 0.5: - return "T" - else: - return `int(round(sumV))` - - def _mxmnValue(self, statDict, timeRange, argList): - # Return a string for the max or min temperatures. - # String may be a single value, such as 75, or a range 60 65 70 - # where the values are min, ave, max. Can return MM for missing data - # Blanks if timeRange earlier than now. - # PFMs don't ever have ranges - if timeRange.endTime().unixTime() < self._currentTime: - return "" - dayNight = self.getPeriod(timeRange, shiftToLocal=1) - if dayNight == self.DAYTIME(): - val = self.getStats(statDict, "MaxT") - else: - val = self.getStats(statDict, "MinT") - if val == None: - return "MM" - - # did we get a tuple, or just a single value? - if type(val) is types.TupleType: - minV, maxV, aveV = val - if maxV - minV > self._tempRangeThreshold and \ - self._productType == "AFM": - minString = string.rjust(`int(round(minV))`, self._top3hrWidth) - aveString = string.rjust(`int(round(aveV))`, self._top3hrWidth) - maxString = string.rjust(`int(round(maxV))`, self._top3hrWidth) - return minString+aveString+maxString - else: - return `int(round(aveV))` - else: - return `int(round(val))` - - - def _windChillValue(self, statDict, timeRange, argList): - # Returns string for WindChill, such as "25" - # Returns "" for missing data, blanks if data earlier than now. - # Returns "" if wind chill not below a certain value. - if timeRange.startTime().unixTime() < self._currentTime: - return "" - - chill = self.getStats(statDict,"WindChill") - temp = self.getStats(statDict,"T") - if chill is None or temp is None: - return "" - - if chill < temp and chill < self._windChillLimit and \ - (temp - chill) > self._windChillDifference: - return `int(round(chill))` - else: - return "" - - def _heatIndexValue(self, statDict, timeRange, argList): - # Returns string for HeatIndex, such as "85" - # Returns "" for missing data, blanks if data earlier than now. - # Returns "" if heat index not above thresholds - if timeRange.startTime().unixTime() < self._currentTime: - return "" - - heat = self.getStats(statDict,"HeatIndex") - temp = self.getStats(statDict,"T") - if heat is None or temp is None: - return "" - - if heat >= self._heatIndexLimit and \ - (heat - temp) >= self._heatIndexDifference: - return `int(round(heat))` - else: - return "" - - def _minWindChillValue(self, statDict, timeRange, argList): - # Returns string for WindChill, such as "25" - # Returns "" for missing data, blanks if data earlier than now. - # Returns wind chill only if below user-set wind chill limit. - if timeRange.endTime().unixTime() < self._currentTime: - return "" - - chill = self.getStats(statDict,"WindChill") - if chill is None: - return "" - minV, maxV = chill - if minV < self._windChillLimit: - return `int(round(minV))` - else: - return "" - - def _maxHeatIndexValue(self, statDict, timeRange, argList): - # Returns string for HeatIndex, such as "85" - # Returns "" for missing data, blanks if data earlier than now. - if timeRange.endTime().unixTime() < self._currentTime: - return "" - - heat = self.getStats(statDict,"HeatIndex") - if heat is None: - return "" - - minV, maxV = heat - if maxV >= self._heatIndexLimit: - return `int(round(maxV))` - return "" - - - def _windGustValue(self, statDict, timeRange, argList): - # Returns string for WindGust, such as "25" - # Returns "" for missing data, blanks if data earlier than now. - # Returns "" if gust doesn't exceed normal wind by 10 - # If tropical winds, symbols are used for wind gust. - if timeRange.startTime().unixTime() < self._currentTime: - return "" - - windGustVal = self.getStats(statDict,"WindGust") - windVal = self.getStats(statDict,"Wind") - if windGustVal is None or windVal is None: - return "" - - maxGust = windGustVal - mag, dir = windVal - if maxGust - mag < (10/1.15): - return "" # gust is not significant - speed = round(maxGust) * 1.15 # convert to MPH - - # hurricane force - fcstTime = timeRange.endTime().unixTime() - self._currentTime - if self._tropicalStorm and fcstTime > 24*3600 and speed >= 74: - return 'HG' - - return `int(speed)` - - - def _windValue(self, statDict, timeRange, argList): - # Returns string for Wind, either direction or speed - # Format is "25" for speed, "NW" for direction - # Can return "MM" for missing data, blanks if data earlier than now. - # If tropical storm, and high winds, different symbols are used. - # argList[0] for type(dir,speed) - if timeRange.startTime().unixTime() < self._currentTime: - return "" - windType = argList[0] - windVal = self.getStats(statDict,"Wind") - if windVal is None: - return "MM" - mag, dir = windVal - - # hurricane force - 64kt limits - fcstTime = timeRange.endTime().unixTime() - self._currentTime - if self._tropicalStorm and fcstTime > 24*3600 and mag >= 64: - return 'HU' - - if windType == "dir": - return self.convertDirection(dir) - else: - speed = round(mag) * 1.15 # convert to MPH - return `int(speed)` - - def _windCharValue(self, statDict, timeRange, argList): - # Returns wind character (speed characteristic), such as "WY" - # Can return "MM" for missing data, blanks if timeRange - # earlier than now. - if timeRange.endTime().unixTime() < self._currentTime: - return "" - value = self.getStats(statDict, "Wind") - if value is None: - return "MM" - maxV, dir = value - # convert to MPH - speed = maxV * 1.15 - - # hurricane force - fcstTime = timeRange.endTime().unixTime() - self._currentTime - if self._tropicalStorm and fcstTime > 24*3600 and speed >= 74: - return 'HU' - - # Convert the speed into a text value category. Table shows the - # max speed allowed for the symbol. - codes = [("LT", 8), ("GN", 15), ("BZ", 23), ("WY", 31), ("VW", 40), - ("SD", 74), ("HF", 300) - ] - for symbol, threshold in codes: - if speed < threshold: - return symbol - return "HF" - - def _skyValue(self, statDict, timeRange, argList): - # Returns string for sky condition, such as "SC" - # Can return "MM" for missing data, blanks if timeRange - # earlier than now - if timeRange.startTime().unixTime() < self._currentTime: - return "" - value = self.getStats(statDict, "Sky") - if value is None: - return "MM" - # Convert the sky cover percentage into a text value. - # Table shows the max cloud cover % for that symbol - codes = [("CL", 5), ("FW", 25), ("SC", 50), ("B1", 69), - ("B2", 87), ("OV", 200)] - for symbol, threshold in codes: - if value <= threshold: - return symbol - - def _createWxRows(self, fcst, periods, sampler, analysisList, - area, colWidth): - # Create the row(s) for weather types with codes as column values - # Create a dictionary of values (Snow, Rain, etc.) with - # a list of the periods in which they occurred - # Can return "MM" for missing data, blanks for period earlier than now - wxDict = {} - for period, label in periods: - - # Get the Statistics - statDict = self.getStatDict(sampler, analysisList, period, area) - - - # Wx - wxValues = self._getWxValues(statDict["Wx"]) - for wxVal, covCode in wxValues: - if wxDict.has_key(wxVal): - wxDict[wxVal].append((period, covCode)) - else: - wxDict[wxVal] = [(period, covCode)] - - # Create a row for each weather value in the dictionary - sortedKeys = wxDict.keys() - sortedKeys.sort() - for wxVal in sortedKeys: - if wxVal == "": - continue - fcst = self.addRowLabel(fcst, wxVal, self._rowLabelWidth) - values = wxDict[wxVal] - - # Add a column for each period - for x in xrange(len(periods)): - period, label = periods[x] - if type(colWidth) is types.ListType: - width = colWidth[x] - else: - width = colWidth - value = "" - if period.startTime().unixTime() >= self._currentTime: - for dictPeriod, covCode in values: - if period == dictPeriod: - value = covCode - fcst = self.addColValue(fcst, value, width) - fcst = fcst + "\n" - return fcst - - def _getWxValues(self, subkeyList): - if subkeyList is None: - return "" - wxValues = [] - for x in xrange(len(subkeyList)): - wxKey = subkeyList[x] - wxValue = "" - wxCov = "" - wxType = wxKey.wxType() - # The following table relates GFE weather types to words used - # in the AFM/PFM. - codes = [("R","Rain"), ("RW", "Rain shwrs"), ("T","Tstms"), - ("L","Drizzle"), ("S", "Snow"), ("SW", "Snowshwrs"), - ("IP", "Sleet"), ("ZR", "Frzg rain"), - ("ZL", "Frzg drzl") - ] - - # These special codes are used for very light intensities. - veryLightCodes = [("RW","Sprinkles"), ("SW", "Flurries")] - for gfecode, symbol in veryLightCodes: - if wxType == gfecode and wxKey.intensity() == "--": - wxValue = symbol - break - if wxValue == '': - for gfecode, symbol in codes: - if wxType == gfecode: - wxValue = symbol - break - - # determine the coverage codes - note that these are - # different for the AFM and the PFM (since PFM is a point) - coverage = wxKey.coverage() - - - # The following table relates the gfe coverage/probability code - # with the AFM coverage/probability code. - afmCodes = { - 'T': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", - 'Inter': "O"}, - 'R': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - 'Pds': "O", 'Inter': "O"}, - 'RW': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", - 'Inter': "O"}, - 'L': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", "Areas": "AR", - "Patchy": "PA", 'Frq': "O", 'Brf': "O", 'Pds': "O", - 'Inter': "O"}, - 'ZL': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", "Areas": "AR", - "Patchy": "PA", 'Frq': "O", 'Brf': "O", 'Pds': "O", - 'Inter': "O"}, - 'ZR': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - 'Pds': "O", 'Inter': "O"}, - 'S': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - 'Pds': "O", 'Inter': "O"}, - 'SW': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", - 'Inter': "O"}, - 'IP': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - 'Pds': "O", 'Inter': "O"} - } - - # The following table relates the gfe coverage/probability code - # with the PFM coverage/probability code. Note that some codes - # for the pfm have been remapped to probability terms due to - # the point nature of the pfm. - pfmCodes = { - 'T': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'R': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'RW': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'L': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", "Areas": "AR", "Patchy": "O", - 'Frq': "O", 'Brf': "O", "Pds": "O", 'Inter': "O"}, - 'ZL': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", "Areas": "AR", "Patchy": "O", - 'Frq': "O", 'Brf': "O", "Pds": "O", 'Inter': "O"}, - 'ZR': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'S': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'SW': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", - "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", - "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"}, - 'IP': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", - "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", - "Pds": "O", 'Inter': "O"} - } - - - # now map the codes - if self._productType == "AFM": - if afmCodes.has_key(wxType) and \ - afmCodes[wxType].has_key(coverage): - wxCov = afmCodes[wxType][coverage] - else: - wxCov = "?" - elif self._productType == "PFM": - if pfmCodes.has_key(wxType) and \ - pfmCodes[wxType].has_key(coverage): - wxCov = pfmCodes[wxType][coverage] - else: - wxCov = "?" - - if wxValue != "": - wxValues.append((wxValue, wxCov)) - - return wxValues - - - def _createWWARows(self, fcst, periods, editAreaList, colWidth): - # Create the row(s) for WWAs with codes as column values - # Create a dictionary of values (TO.A, SV.A, etc.) with - # a list of the periods in which they occurred - # Blanks for period earlier than now - hazards = self._hazards.getHazardList(editAreaList) - allowedActions = ['NEW','CON','EXT','EXA','EXB'] - - wwaDict = {} - for period, label in periods: - - # filter out records to this time period - hazRecords = [] - for h in hazards: - if h['act'] in allowedActions and \ - self.__overlaps((h['startTime'],h['endTime']), - (period.startTime().unixTime(),period.endTime().unixTime())): - hazRecords.append(h) - - # Hazards - create the row data - wwaValues = self._getWWAValues(hazRecords) - for wwaVal, sigfCode in wwaValues: - if wwaDict.has_key(wwaVal): - wwaDict[wwaVal].append((period, sigfCode)) - else: - wwaDict[wwaVal] = [(period, sigfCode)] - - # Create a row for each hazard value in the dictionary - sortedKeys = wwaDict.keys() - sortedKeys.sort() - for wwaVal in sortedKeys: - fcst = self.addRowLabel(fcst, wwaVal, self._rowLabelWidth) - values = wwaDict[wwaVal] - - # Add a column for each period - for x in xrange(len(periods)): - period, label = periods[x] - if type(colWidth) is types.ListType: - width = colWidth[x] - else: - width = colWidth - value = "" - if period.startTime().unixTime() >= self._currentTime: - for dictPeriod, sigfCode in values: - if period == dictPeriod: - value = sigfCode - fcst = self.addColValue(fcst, value, width) - fcst = fcst + "\n" - return fcst - - def _getWWAValues(self, hazRecs): - wwaValues = [] - - # The following table relates VTEC phens with row labels - # in the AFM/PFM. - codes = { - "AF":"Ashfall", "AS":"Air Stag", "BS":"Blowing Snow", - "BW":"Brisk Wind", "BZ":"Blizzard", "CF":"Coast Flood", - "DU":"Blowing Dust", "EC":"Extreme Cold", - "EH":"Excess Heat", "FA":"Flood", "FF":"Flash Flood", - "FG":"Dense Fog", "FL": "Flood", "FR":"Frost", "FZ":"Freeze", - "HF":"Hurr Frc Wnd", "HI":"Inland Hurr", "HS":"Heavy Snow", - "HT":"Heat", "HU":"Hurricane", "HW":"High Wind", "HZ": "Hard Freeze", - "IP":"Sleet", "IS":"Ice Storm", "LB":"LkEff SnBlSn", - "LE":"Lk Eff Snow", "LS":"Lkshore Fld", "UP":"Ice Accre", - "LW":"Lake Wind", "SB":"Snow BloSnow", "SM":"Dense Smoke", - "SN":"Snow", "SU":"High Surf", "SV":"Svr Tstorm", - "TI":"Inl Trp Strm", "TO":"Tornado", "TR":"Trop Storm", - "TS":"Tsunami", "TY":"Typhoon", "WC":"Wind Chill", "WI":"Wind", - "WS":"Winter Storm", "WW":"Winter Weath", "ZF":"Freezing Fog", - "ZR":"Frzng Rain", "FW.W": "Red Flag", "FW.A": "Fire Weather", - "ZL":"Frzg Drzl"} - - for rec in hazRecs: - phen = rec['phen'] - sig = rec['sig'] - phensig = rec['phen'] + '.' + rec['sig'] - if codes.has_key(phen): - wwaValues.append((codes[phen], sig)) - elif codes.has_key(phensig): - wwaValues.append((codes[phensig], sig)) - - return wwaValues - - def _createObVisRows(self, fcst, periods, sampler, analysisList, - area, colWidth): - # creates and adds to "fcst" the OBVIS row. OBVIS row included only - # if there are OBVIS present. - # determine the obvis values for each period - obvisValues = [] - for period, label in periods: - - # Get the Statistics - statDict = self.getStatDict(sampler, analysisList, period, area) - - # Wx - obvisVal = self._getObVisValues(statDict["Wx"]) - if period.startTime().unixTime() >= self._currentTime: - obvisValues.append(obvisVal) - else: - obvisValues.append("") # period earlier than now so blank it - - # Any OBVIS values at all? - any = 0 - for o in obvisValues: - if o != "": - any = 1 - break; - if any == 0: # no OBVIS, so skip the row - return fcst - - fcst = self.addRowLabel(fcst, "Obvis", self._rowLabelWidth) - for x in xrange(len(obvisValues)): - if type(colWidth) is types.ListType: - width = colWidth[x] - else: - width = colWidth - fcst = fcst + string.rjust(obvisValues[x], width) - fcst = fcst + "\n" - return fcst - - def _getObVisValues(self, subkeyList): - # Returns the obvis code given the weather subkey list - if subkeyList is None: - return "" - wxValues = [] - for x in xrange(len(subkeyList)): - wxKey = subkeyList[x] - wxInten = wxKey.intensity() - wxCov = wxKey.coverage() - wxType = wxKey.wxType() - - # Various types of Fog - if wxType in ["F", "IF", "ZF"]: - if wxInten == "+": - if wxCov == "Patchy": - return "PF+" - else: - return "F+" - elif wxCov == "Patchy": - return "PF" - else: - return "F" - - # Other obvis checks - else: - # The following table relates gfe wx type codes to AFM/PFM - # type codes. - codes = [("H","H"), ("BS", "BS"), ("K","K"), ("BD", "BD"), - ("VA", "AF")] - for gfecode, symbol in codes: - if wxType == gfecode: - return symbol - - - return "" - - def _calcPeriodLabels(self, periods, colWidth, startPoint, intervalHours, - timeZone): - # Calculate the period labels and returns as (date, utc, lt) strings - #Date THU 08/01/02 FRI 08/02/02 - #UTC 3hrly 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 - #MDT 3hrly 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 - - # determine the column widths - colWidths = [] - if type(colWidth) is types.ListType: - colWidths = colWidth - else: - for p in periods: - colWidths.append(colWidth) - - # calculate the zulu labels - zuluLabels = [] - for period,label in periods: - zuluLabels.append(self._hour24zuluLabel(period)) - - # zulu string - zulu = "UTC " + `intervalHours` + "hrly " - zulu = string.ljust(zulu, startPoint) - for x in xrange(len(zuluLabels)): - zulu = self.addColValue(zulu, zuluLabels[x], colWidths[x]) - - # set the time zone - prevTZ = os.environ['TZ'] - os.environ['TZ'] = timeZone - time.tzset() - - # date and LT string (beginning) - dateS = string.ljust('Date', startPoint) - ltZone = time.strftime("%Z",time.localtime(self._currentTime)) - lt = string.ljust(ltZone, 4) + `intervalHours` + "hrly " - lt = string.ljust(lt, startPoint) - - # remainder of Date and LT strings - dayOfMonthProcessed = None - for x in xrange(len(periods)): - timePeriod, label = periods[x] - hour = int(label) - - # 00LT is considered the end of the day for labeling, so subtract 1 - dayOfMonth = (timePeriod.startTime() - 1 + self.determineShift()).day - - # add in local time string - prevLTlen = len(lt) - lt = self.addColValue(lt, label, colWidths[x]) - - # calculate amount of room to write data - colAvail = 0 - for y in xrange(x+1,len(periods)): - colAvail = colAvail + colWidths[y] - - # handle the Date string - if dayOfMonth != dayOfMonthProcessed: - if intervalHours == 3: #top section - if colAvail < 3: - format = "%a" - elif hour < 6: - continue # label too early in the day - elif hour >= 6 and hour < 15: - format = "%a %m/%d/%y" - elif hour >= 15 and hour < 19: - format = " %m/%d/%y" - else: - continue #not enough remaining room - elif intervalHours == 6: #bottom section - if colAvail < 3: - format = "%a" - elif hour < 1: - continue # label too early in the day - elif hour >= 1 and hour < 7: - format = "%a %m/%d/%y" - elif hour >= 7 and hour < 13: - format = " %m/%d/%y" - elif hour >= 13 and hour < 17: - format = " %m/%d" - else: - continue #not enough remaining room - else: - raise Exception, "Expected 3 or 6 intervalHours" - - index = 0 - nfill = prevLTlen - len(dateS) -1 + colWidths[x] - 1 - dateS = dateS + string.ljust(' ',nfill) - dayTime = timePeriod.startTime() + self.determineShift() - dString = dayTime.stringFmt(format) - dateS = dateS + dString - - dayOfMonthProcessed = dayOfMonth #mark this date processed - - - # reset time zone - os.environ['TZ'] = prevTZ - time.tzset() - - return (dateS, zulu, lt) - - def _latlonCheck(self, latlon): - # Routine checks the latlon format for PFM. Returns true if ok. - # Format must be ll.llN lll.llW - if len(latlon) != 14: - return 0 - if latlon[5] != 'S' and latlon[5] != 'N': - return 0 - if latlon[13] != 'W' and latlon[13] != 'E': - return 0 - if latlon[2] != '.' or latlon[10] != '.': - return 0 - digits = [0,1,3,4,8,9,11,12] - for d in digits: - if not latlon[d].isdigit(): - return 0 - if latlon[7] != ' ' and latlon[7] != "1": - return 0 - return 1 - - - - # Returns a list of the Hazards allowed for this product in VTEC format. - # These are sorted in priority order - most important first. - #### Removed inland tropical hazards in OB9.3 - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CON"] - return [ - ('HU.W', allActions, 'Tropical'), # HURRICANE WARNING - ('TY.W', allActions, 'Tropical'), # TYPHOON WARNING - ('TR.W', allActions, 'Tropical1'), # TROPICAL STORM WARNING - ('HU.A', allActions, 'Tropical'), # HURRICANE WATCH - ('TY.A', allActions, 'Tropical'), # TYPHOON WATCH - ('TR.A', allActions, 'Tropical1'), # TROPICAL STORM WATCH - ('HF.W', allActions, 'Marine'), # HURRICANE FORCE WIND WARNING - ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING - ('IS.W', allActions, 'WinterWx'), # ICE STORM WARNING - ('LE.W', allActions, 'WinterWx'), # LAKE EFFECT SNOW WARNING - ('WS.W', allActions, 'WinterWx'), # WINTER STORM WARNING - ('WW.Y', allActions, 'WinterWx'), # WINTER WEATHER ADVISORY - ('WS.A', allActions, 'WinterWx'), # WINTER STORM WATCH - ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING - ('WC.Y', allActions, 'WindChill'), # WIND CHILL ADVISORY - ('WC.A', allActions, 'WindChill'), # WIND CHILL WATCH - ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING - ('DU.Y', allActions, 'Dust'), # BLOWING DUST ADVISORY - ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING - ('EC.A', allActions, 'Cold'), # EXTREME COLD WATCH - ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING - ('EH.A', allActions, 'Heat'), # EXCESSIVE HEAT WATCH - ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY - ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY - ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING - ('FZ.W', allActions, 'FrostFreeze'), # FREEZE WARNING - ('FR.Y', allActions, 'FrostFreeze'), # FROST ADVISORY - ('HZ.A', allActions, 'FrostFreeze'), # HARD FREEZE WATCH - ('FZ.A', allActions, 'FrostFreeze'), # FREEZE WATCH - ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING - ('WI.Y', allActions, 'Wind'), # WIND ADVISORY - ('LW.Y', allActions, 'Wind'), # LAKE WIND ADVISORY - ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH - ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY - ('ZF.Y', allActions, 'FreezeFog'), # FREEZING FOG ADVISORY - ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH - ('FA.A', allActions, 'Flood'), # FLOOD WATCH - ('FA.W', allActions, 'Flood'), # FLOOD WARNING - ('FA.Y', allActions, 'Flood'), # FLOOD ADVISORY - ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING - ('LS.W', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WARNING - ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY - ('LS.Y', allActions, 'CoastalFlood'), # LAKESHORE FLOOD ADVISORY - ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH - ('LS.A', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WATCH - ('UP.W', allActions, 'IceAcc'), # ICE ACCREATION WARNING - ('UP.Y', allActions, 'IceAcc'), # ICE ACCREATION ADVISORY - ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY - ('AS.O', allActions, 'AirStag'), # AIR STAGNATION OUTLOOK - ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING - ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY - ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY - ('TO.A', allActions, 'Convective'), # TORNADO WATCH - ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH - ] - - ######################################################################## - # UTILITY FUNCTIONS - ######################################################################## - # time contains, if time range (tr) contains time (t), return 1 - def __containsT(self, tr, t): - return (t >= tr[0] and t < tr[1]) - - # time overlaps, if tr1 overlaps tr2 (adjacent is not an overlap) - def __overlaps(self, tr1, tr2): - if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): - return 1 - return 0 - - - ######################################################################## - # OVERRIDING THRESHOLDS AND VARIABLES - ######################################################################## - - def getDefaultPercentage(self, parmName): - return 5.0 - - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# File Name: PFM.py +# Description: This product creates an AFM or PFM table containing +# times across the top and weather element as rows. Depending on +# the Local file, the user can generate an AFM or PFM. User can control +# the point at which entries are displayed as ranges vs. single values, +# and can optionally include Heat Index and Wind Chill. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# PFM, AFM___Definition, AFM__Overrides +# PFM___Definition, PFM__Overrides +#------------------------------------------------------------------------- +# User Configurable Variables: +# Definition Section: +# displayName If not None, defines how product appears in GFE GUI +# +# fullStationID Full station identifier, 4 letter, such as KSLC +# +# wfoCityState Identifier for the location of your WFO, such as +# "Cleveland, Ohio" +# +# wmoID WMO ID for product header, such as "FOUS51" +# +# pil Product pil, such as "AFMBOS" +# +# productType Product Type, must be "AFM" or "PFM" +# +# editAreaSuffix default None. For AFM only. Allows for generating the body of the product for +# an edit area that is a subset (e.g. population areas) of the +# edit areas specified in the defaultEditAreas. So given the edit area, +# "COZ035" and the editAreaSuffix is "_pt", then the edit area that +# will be sampled and reported for the body of the product will be +# "COZ035_pt". If no such edit area exists, the system will simply +# use the original edit area. +# Note that Hazards will always be generated for the entire edit area. +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined or the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# defaultEditAreas defines edit area names, ugc code and area descriptor +# (editAreaName, "ugc1\nareaDescription1") +# areaDictionary area dictionary format defining geographical codes +# (used if combinations file is specified in +# default edit areas) +# +# tempRangeThreshold If the data range of temperature exceeds this threshold, +# then a range of temperatures will be output rather +# than a single value. +# +# qpfRangeThreshold If the data range of QPF exceeds this threshold, +# then a range of qpf values will be output rather +# than a single value. +# +# snowRangeThreshold If the data range of snow exceeds this threshold, +# then a range of snow values will be output rather +# than a single value. +# +# includeSnowAmt Set to 1 to include SnowAmt, 0 to exclude. +# +# includeHeatIndex Set to 1 to include HeatIndex, 0 to exclude. Heat +# Index is only included when Heat Index grids are +# present, this option is 1, and the Heat Index +# thresholds are exceeded. +# +# heatIndexDifference Defines the threshold HeatIndex-T before reporting +# heat index. Heat Index must be this many degrees +# above the temperature before a heat index is reported. +# +# heatIndexLimit Defines the absolute threshold below which no +# heat index temperatures will be reported. +# +# includeWindChill Set to 1 to include WindChill, 0 to exclude. Wind +# Chill is only included when the Wind Chill grids +# are present, this option is 1, and the Wind Chill +# thresholds are exceeded. +# +# windChillDifference Defines the threshold T-WindChill before reporting +# wind chill. WindChill must be this many degrees +# below the temperature before a wind chill is reported. +# +# windChillLimit Defines the absolute threshold above which no +# wind chill temperatures will be reported. +# +# separateByTimeZone Normally set to "effectiveTZ" to ensure that zones +# combined in the product all have the same effective +# timezone, e.g., EST5, EST5EDT are considered the same +# in the winter since the effective tz is EST5. +# Can also be None to not separate out +# zones, or set to "actualTZ" to separate by actual +# TZ environment variable, e.g., EST5, EST5EDT are +# considered different in the winter. +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# To 66 hours: WindGust, QPF, SnowAmt, HeatIndex (opt.), WindChill (opt.). +# To 7 days: MaxT, MinT, T, Td, Wind, Sky, PoP, and Wx. +#------------------------------------------------------------------------- +# Edit Areas Needed: Typically area or point edit areas are required, +# depending upon whether an AFM or PFM is being created. The +# required format of the edit area naming string for the AFM is: +# (editAreaName, "areaName\nareaDescriptor") +# The required format for the PFM is: +# (editAreaName, "ugc code\npoint description\nyy.yyN xxx.xxW") +# where the editAreaName is the name of the sampling edit area, +# areaName is the UGC codes for the area, such as "COZ043" +# areaDescriptor is the descriptive name of that area. +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: None +#------------------------------------------------------------------------- +# Component Products: None +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +#------------------------------------------------------------------------- +# Additional Information: +# Sampling is complicated in this product since the column headers mean +# different things to different weather elements. In some cases, the +# column header denotes the ending time, in other cases, the starting +# time. There are 3hr, 6hr, and 12hr elements in the top portion of +# the product, and 6hr and 12hr elements in the extended (bottom) portion +# of the product. +# +# If HeatIndex is enabled, the HeatIndex row (and values) will only appear +# if the threshold values are exceeded from the HeatIndex grid. If +# HeatIndex appears, then Max Heat will appear in the output. +# +# If WindChill is enabled, the WindChill row (and values) will only appear +# if the threshold values are exceeded from the WindChill grid. If +# Wind Chill appears, then Min Chill will appear in the output. +# +# WindGust will only appear if WindGust grids are present and the wind +# gust values exceed the wind values by at least 10mph. +# +# Fields are blanked in the output if those times are before the +# product issuance time. +# +# Note that no headers will appear for Day 3 on the Afternoon issuance due to +# inadequate space. +# +#------------------------------------------------------------------------- +# Example Output: +# +## FOUS51 KRLX 311833 +## AFMRLX + +## AREA FORECAST MATRICES +## NATIONAL WEATHER SERVICE CHARLESTON WV +## 1233 PM MDT WED JUL 31 2002 + +## Area 1-010333 +## 1233 PM MDT WED JUL 31 2002 + +## Date Wed 07/31/02 Thu 08/01/02 Fri 08/02/02 +## UTC 3hrly 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 +## MDT 3hrly 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 09 12 15 18 + +## Max/Min 92 94 96 62 64 66 77 79 81 61 63 65 81 83 85 +## Temp 81 75 86 94 74 76 73 69 66 64 67 72 76 79 76 70 66 63 67 73 79 83 +## Dewpt 34 32 29 26 52 52 52 51 51 50 49 49 49 49 50 51 52 53 52 50 49 48 +## RH 18 20 12 8 46 43 47 52 58 60 52 44 38 34 40 50 60 69 58 44 34 29 +## Wind dir NW NW NW N S SE SE S S S S S S SE S S S S S S S S +## Wind spd 3 6 5 5 8 10 11 10 11 11 11 10 12 12 13 14 14 13 14 14 13 13 +## Wind gust +## Clouds CL CL CL CL MM OV OV OV SC SC SC FW FW FW SC SC BK OV BK SC FW FW +## PoP 12hr 0 0 28 0 16 +## QPF 12hr 0.04 0.00 0.03 0.00 0.00 +## Snow 12hr 00-00 00-00 00-00 00-00 00-00 +## Rain shwrs SC SC +## Rain WP WP WP WP + +## Date Sat 08/03/02 Sun 08/04/02 Mon 08/05/02 Tue 08/06/02 +## UTC 6hrly 06 12 18 00 06 12 18 00 06 12 18 00 06 12 18 00 06 +## MDT 6hrly 00 06 12 18 00 06 12 18 00 06 12 18 00 06 12 18 00 + +## Max/Min 65 94 64 79 63 83 65 87 +## Temp 74 65 72 76 69 64 72 79 70 63 73 83 74 65 72 87 +## Dewpt 49 50 48 52 51 50 49 49 51 53 50 48 49 50 48 45 +## PWind dir S S S S S S S S +## Wind char BZ BZ GN GN BZ BZ BZ BZ +## Avg clouds FW FW SC SC BK BK FW FW SC SC SC SC FW SC SC +## PoP 12hr 0 1 0 28 0 16 0 1 +## Rain shwrs SC +## Rain WP + +## $$ + +########################################################################### + +import TextRules +import SampleAnalysis +import LogStream +import string, time, types, os +import TimeRange, AbsTime + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Product Issuance", "productIssuance"), "Morning", "radio", + ["Morning","Afternoon"]), + # (("Tropical Storm Winds", "tropicalStorm"), "NO", "radio", ["NO", "YES"]), + ] + Definition = { + "type": "smart", + "displayName": "None", + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/PFM_.txt", + "debug": 0, + + "defaultEditAreas" : "Combinations_PFM__", + "editAreaSuffix": None, + # Name of map background for creating Combinations + "mapNameForCombinations": "Zones_", + + "runTimeEditAreas" : "no", # if yes, ask user at run time + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + # Product-specific variables: + "tempRangeThreshold": 5, # determines range vs. single value output + "qpfRangeThreshold": 0.05, # determines range vs. single value output + "snowRangeThreshold": 3, # determines range vs. single value output + + # Options for product + "includeSnowAmt": 1, # set to 1 to include snow amount in output + "includeHeatIndex": 1, # set to 1 to include heat index in output + "includeWindChill": 1, # set to 1 to include wind chill in output + "windChillDifference": 5, # T-WindChill difference before reporting + "windChillLimit": 40, # don't report wind chills above this value + "heatIndexDifference": 0, # indicates HI-T difference before reporting + "heatIndexLimit": 80, # don't report heat index below this value + + # Area Dictionary - Descriptive info about zones (AFM only) + "areaDictionary": "AreaDictionary", + + # Identifiers for product + "fullStationID": "", # full station identifier (4letter) + "wfoCityState": "", # city,state of wfo for header + "wmoID": "", # WMO ID code + "pil": "", # product pil + "productType": "PFM", # Denotes product type, "AFM", or "PFM" + + # purge time + "fixedExpire": 1, #ensure VTEC actions don't affect segment time + + "separateByTimeZone": "effectiveTZ", #separate segments based on tz + + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # get current time + self._currentTime = argDict['creationTime'] + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived fromEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges, for each possible time zone + self._areaTZ = self.getAreaTZ(argDict, self._areaList) #all TimeZones + tzDictTR = {} + for key in list(self._areaTZ.keys()): + tz = self._areaTZ[key] + if tz not in tzDictTR: + tzDictTR[tz] = self._determineTimeRanges(argDict, tz) + self._determineZuluTimeRanges(argDict, tzDictTR) + + + # Sample the data. + sampleInfo = [] + # determine all of the areas using the same time zone + for timeZone in list(tzDictTR.keys()): + areasInTimeZone = [] + for area in self._areaList: + areaLabel = area[1] #(editArea, areaLabel) + if self._areaTZ[areaLabel] == timeZone: + areasInTimeZone.append(area) + sampleInfo += self._sampleData(argDict, tzDictTR[timeZone], + areasInTimeZone) + self._sampler = self.getSampler(argDict, sampleInfo) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + tz = self._areaTZ[areaLabel] + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict, + tzDictTR[tz]['timeLabel']) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict, + tzDictTR[tz], tz) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Set up product-specific variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + try: + if self._tropicalStorm == "YES": + self._tropicalStorm = 1 + else: + self._tropicalStorm = 0 + except: + self._tropicalStorm = 0 + + + self._lineLength = 66 #only will apply to UGC header + + # Basic widths for product + self._rowLabelWidth = 13 #width of row label + self._top3hrWidth = 3 #top part of product, 3hrly width + self._top6hrWidth = 6 #top part of product, 6hrly width + self._top12hrWidth = 12 # top part of product, 12hrly width + self._bottom6hrWidth = 3 # bottom part, 6hr width + self._bottom12hrWidth = 6 # bottom part, 12hr width + self._extraDayWidth = 2 # extra spaces between days, bottom part + + + def getAreaTZ(self, argDict, areaList): + #gets the list of areas and their time zones for the product + #the areas are defined by the "areaList" and aren't the actual + #zones UGCs, e.g., "Combo0" and not "FLZ050". Only 1 time zone + #is permitted in the area. Returns the time zone environmental + #variable, e.g., EST5EDT, with the effective TZ, e.g., EST, in a + #dictionary keyed by the areaLabel. + import ModuleAccessor + accessor = ModuleAccessor.ModuleAccessor() + areaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + tzDir = {} + localTZ = os.environ['TZ'] #current WFO time zone + localTZid = time.strftime("%Z", time.localtime(argDict['creationTime'])) + for editArea, areaLabel in areaList: + areas = self.getCurrentAreaNames(argDict, areaLabel) #get areas + tzs = [] + for area in areas: + #extract 1st string out for PFM. The AFM is in correct format. + if self._productType == "PFM": + areaStrings = string.split(area, '\n') + area = areaStrings[0] #1st line is the UGC code + try: + zoneTZ = areaDict[area]['ugcTimeZone'] + prevTZ = os.environ['TZ'] + os.environ['TZ'] = zoneTZ + time.tzset() + tzid = time.strftime("%Z", + time.localtime(argDict['creationTime'])) + os.environ['TZ'] = prevTZ + time.tzset() + except: + zoneTZ = localTZ + tzid = localTZid + LogStream.logEvent("WARNING: Entry " + area + + " does not have time zone info in AreaDictionary. Using default time zone.", LogStream.exc()) + + if (zoneTZ, tzid) not in tzs: + tzs.append((zoneTZ, tzid)) + #should only be 1 effective time zone at this point. + if len(tzs) == 0: + tzDir[areaLabel] = localTZ #force localTZ + elif len(tzs) == 1: + tzDir[areaLabel] = tzs[0][0] #use the 1 time zone + else: + tzid = tzs[0][1] #1st one, get the effective id + for x in range(1, len(tzs)): + if tzs[x][1] != tzid: + LogStream.logProblem(\ + "WARNING: Multiple Effective Time Zones in segment." + + str(tzs) + " for " + str(areas) + " -- using first one.") + tzDir[areaLabel] = tzs[0][0] #use the 1 time zone + + return tzDir + + def _determineZuluTimeRanges(self, argDict, tzDictTR): + # Determine time ranges that deal with Zulu-time, i.e., not + # specific times. The tzDictTR is the group of dictionaries + # returned from determineTimeRanges(). + + ################################## + # Setup Time Labels + ################################## + # Sets up the product's time labels + + self._ddhhmmTime = time.strftime("%d%H%M", + time.gmtime(self._currentTime)) + + # Sets up the expiration time + self._expireTime, self._ddhhmmTimeExpire = \ + self.getExpireTimeFromLToffset(self._currentTime, + self.expireOffset(), "") + + # time label for the MND header + self._mndTimeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + # the overall time range + earliest = None + latest = None + for key in list(tzDictTR.keys()): + d = tzDictTR[key] + if earliest is None: + earliest = d['top_3hr'][0][0].startTime() + latest = d['bottom_12hr'][-1][0].endTime() + else: + earliest = min(d['top_3hr'][0][0].startTime(),earliest) + latest = max(d['bottom_12hr'][-1][0].endTime(),latest) + + self._timeRange = TimeRange.TimeRange(earliest, latest) + argDict["productTimeRange"] = self._timeRange + + def _shortenPeriods(self, periods): + #Shorten the period containing the current time so that we don't + #sample the entire period. Periods is from getPeriods(), returns + #modified version. + thisHour = int(self._currentTime / 3600) * 3600 #truncated to hh:00 + for x in range(len(periods)): + tr, label = periods[x] + startT = tr.startTime().unixTime() + endT = tr.endTime().unixTime() + if startT < thisHour and thisHour < endT: + tr = TimeRange.TimeRange(AbsTime.AbsTime(thisHour), AbsTime.AbsTime(endT)) + periods[x] = (tr, label) + elif thisHour < startT: + break + return periods + + def _creTR(self, baseTime, offset): + # creates and returns a python TimeRange, based on the AbsTime baseTime, + # and the offset (in hours). The length of the TimeRange is one hour. + # This is a substitute for the createTimeRange() in determineTimeRanges + startTime = baseTime + 3600*offset + return TimeRange.TimeRange(startTime, startTime + 3600) + + def _determineTimeRanges(self, argDict, timeZone): + # Determine time ranges for product - fairly complicated since + # multiple samplings and two sets of tables (short-term, long-term) + # Sets up: + # top_3hr - top row of AFM, 3 hourly LT periods + # top_3hr_snap - top row of AFM, 3 hourly LT periods/labels + # top_6hr - top row of AFM, 6 hourly LT periods + # top_6hrShort - top row of AFM, 6 hourly LT periods, start now + # top_12hr - top row of AFM, 12 hourly sample periods + # top_12hrShort - top row of AFM, 12 hourly sample periods, start now + # bottom_6hr - bottom row of AFM, 6 hourly LT periods + # bottom_6hr_snap - bottom row of AFM, 6 hourly, snapshots, labels + # bottom_12hr - bottom row of AFM, 12 hourly sample periods + # + # Returns dictionary for the particular timezone containing the + # above keys and values. + tzDict = {} + + # change the time zone + prevTZ = os.environ['TZ'] + os.environ['TZ'] = timeZone + time.tzset() + + # determine the optimal time for the zulu-based product columns, + # that most closely mirror 3AM LT and 3PM LT. The final baseTime is + # the column label for the first column. + if self._productIssuance == "Afternoon": + #check for update for Afternoon issuance after midnight + if time.localtime(self._currentTime)[3] >= 4: + tr = self.createTimeRange(15, 15+1) #3PM target (not update) + else: + tr = self.createTimeRange(15-24, 15-24+1) #3PM target yesterday + else: + tr = self.createTimeRange(3, 3+1) #3AM target + + # determine offset between potential times, want 3z,9z,15,21z + baseTime = (((tr.startTime().unixTime() - 3600*3) + / 3600 / 6 ) * 3600*6) + 3600*3 + offsetFromBaseTime = ((((tr.startTime().unixTime() - 3600*3) + / 3600.0 / 6.0 ) * 3600*6) + 3600*3 - baseTime) / 3600 + offsetFromBaseTime = int(offsetFromBaseTime) + if offsetFromBaseTime > 3: #over halfway to next basetime + baseTime = baseTime + 3600*6 #move to next 6 hour increment + baseTime = AbsTime.AbsTime(baseTime) #convert to AbsTime + + # Set up the beginning Time Range - note they are different for the + # 3/6hrly, and the 12hrly sampling. Note that the 6hr is offset by + # 1 hour to allow the minChill/maxHeat to include the hour indicated + # by the column. Comments are for offsets for sampling periods from + # first column's labeled time. Length of created time range is 1 hour + topTimeRange3hr = self._creTR(baseTime, -3) #-3h to 0h + topTimeRange6hr = self._creTR(baseTime, -2) #-2h to 4h + topTimeRange3hrSnap = self._creTR(baseTime, 0) #0h to 1h + topTimeRange12hr = self._creTR(baseTime, 3) #3h to 15h + bottomTimeRange6hr = self._creTR(baseTime, -3+66) #63h to 66h + bottomTimeRange6hrSnap = self._creTR(baseTime, -3+72) #69h to 70h + bottomTimeRange12hr = self._creTR(baseTime, 3+60) #63h to 75h + + ################################## + # Set up 3hr, 6hr, and 12hr elements in top portion + ################################## + timePeriod = 3 + timeSpan = 3 + numPeriods = 22 + tzDict['top_3hr'] = self.getPeriods(topTimeRange3hr, timePeriod, + timeSpan, numPeriods) + timeSpan = 1 + tzDict['top_3hr_snap'] = self.getPeriods(topTimeRange3hrSnap, + timePeriod, timeSpan, numPeriods, + self._hour24localLabel) + timePeriod = 6 + timeSpan = 6 + numPeriods = 11 + tzDict['top_6hr'] = self.getPeriods(topTimeRange6hr, timePeriod, + timeSpan, numPeriods) + periods = self.getPeriods(topTimeRange6hr, timePeriod, + timeSpan, numPeriods) + tzDict['top_6hrShort'] = self._shortenPeriods(periods) + + + timePeriod = 12 + timeSpan = 12 + numPeriods = 5 + tzDict['top_12hr'] = self.getPeriods(topTimeRange12hr, + timePeriod, timeSpan, numPeriods) + periods = self.getPeriods(topTimeRange12hr, + timePeriod, timeSpan, numPeriods) + tzDict['top_12hrShort'] = self._shortenPeriods(periods) + + ################################## + # Set up 6hr and 12hr elements in bottom portion + ################################## + timePeriod = 6 + timeSpan = 6 + if self._productIssuance == "Morning": + numPeriods = 16 + else: + numPeriods = 18 + tzDict['bottom_6hr'] = self.getPeriods(bottomTimeRange6hr, + timePeriod, timeSpan, numPeriods) + timeSpan = 1 + tzDict['bottom_6hr_snap'] = self.getPeriods( + bottomTimeRange6hrSnap, + timePeriod, timeSpan, numPeriods, + self._hour24localLabel) + + timePeriod = 12 + timeSpan = 12 + if self._productIssuance == "Morning": + numPeriods = 8 + else: + numPeriods = 9 + tzDict['bottom_12hr'] = self.getPeriods(bottomTimeRange12hr, + timePeriod, timeSpan, numPeriods, + self._hour24localLabel) + + + # timeLabel is the spelled-out version of the current time + # for each UGC header + tzDict['timeLabel'] = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + # restore the time zone + os.environ['TZ'] = prevTZ + time.tzset() + + return tzDict + + def _sampleData(self, argDict, tp, areas): + # Sample the data. (tp) Time Periods is a dictionary containing + # all of the time periods and categories to sample for a single time + # zone. areas are the areas that have this time zone. + # Different sampler lists are required due to different time periods + # and different elements. Sets up the following to sample: + # 3hr_top = 3hrly, top of product, 3 hr durations + # 3hr_snap_top = 3hrly, top of product, 1 hr duration + # 6hr_top = 6hrly, top of product, 6 hr durations + # 12hr_top = 12hrly, top of product + # 6hr_bottom = 6 hrly, bottom of product, 6 hr durations + # 6hr_snap_bottom = 6 hrly, bottom of product, 1 hr durations + # 12hr_bottom = 12hrly, bottom of product + + # the analysis method is called "self._analysisList_" plus the + # name of the sample period, which is the key. + # Each entry: (analysisList, periods, areaList) + + + sampleInfo = [ + (self._analysisList_top_3hr(), tp['top_3hr'], areas), + (self._analysisList_top_3hr_snap(), tp['top_3hr_snap'], areas), + (self._analysisList_top_6hr(), tp['top_6hr'], areas), + (self._analysisList_top_6hrShort(), tp['top_6hrShort'], areas), + (self._analysisList_top_12hr(), tp['top_12hr'], areas), + (self._analysisList_top_12hrShort(), tp['top_12hrShort'], areas), + (self._analysisList_bottom_6hr(), tp['bottom_6hr'], areas), + (self._analysisList_bottom_6hr_snap(), tp['bottom_6hr_snap'], areas), + (self._analysisList_bottom_12hr(), tp['bottom_12hr'], areas), + ] + + return sampleInfo + + def _preProcessProduct(self, fcst, argDict): + # Add product heading to fcst string + if self._productType == "AFM": + productDescription = "Area Forecast Matrices" + else: + productDescription = "Point Forecast Matrices" + + issuedByString = self.getIssuedByString() + + productName = self.checkTestMode(argDict, + productDescription) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" + \ + "National Weather Service " + \ + self._wfoCityState +"\n" + \ + issuedByString + \ + self._mndTimeLabel + "\n\n" + fcst = fcst + s + + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict, timeLabel): + # extract out the ugc codes and the area descriptors + # AFM: combinations filename, or (ugc, descriptionlines) + # PFM: areaLabel length 4 (ugc, description, latlon, elevation) + areaStrings = string.split(areaLabel, '\n') + + #AFM setup with combinations file + if self._productType == "AFM" and len(areaStrings) == 1: + areaHeader = self.makeAreaHeader( + argDict, areaLabel, AbsTime.AbsTime(self._currentTime), + AbsTime.AbsTime(self._expireTime), self._areaDictionary, + self._defaultEditAreas) + + fcst = fcst + areaHeader + + + #No Combinations File or PFM product + else: + if self._productType == "PFM": + if len(areaStrings) != 4 and len(areaStrings) != 3: + raise SyntaxError(""" +PFM requires defaultEditArea format of (editAreaName, +ugcLine\\narea description line\\nll.llN lll.llW\\nElev +editAreaName is the name of the edit area for sampling +ugcLine is the ugc code representing the area, and is used for timezone info +area description line describes the area +ll.llN lll.llW is the latitude and longitude for the area +Elev is a string representing the station elevation, e.g., 423. +Found description: """ + areaLabel) + + latlon = areaStrings[2] + if not self._latlonCheck(latlon): + raise SyntaxError("PFM lat/lon format must be " +\ + "exactly ll.llN lll.llW found:" + latlon) + + ugcCode = areaStrings[0] + s = ugcCode + "-" + self._ddhhmmTimeExpire + "-\n" + + # descriptor lines, lat/lon lines, elevation + if len(areaStrings) == 4: + s += areaStrings[1] + "\n" + areaStrings[2] + " Elev. " + \ + areaStrings[3] + " ft" + "\n" + #old OB8.2 format + else: + s += areaStrings[1] + "\n" + areaStrings[2] + " Elev. " + \ + "???? ft" + "\n" + #log "old" format + LogStream.logProblem("WARNING: Old defaultEditArea format " + """ +PFM requires defaultEditArea format of (editAreaName, +ugcLine\\narea description line\\nll.llN lll.llW\\nElev +editAreaName is the name of the edit area for sampling +ugcLine is the ugc code representing the area, and is used for timezone info +area description line describes the area +ll.llN lll.llW is the latitude and longitude for the area +Elev is a string representing the station elevation, e.g., 423. +Found description: """ + areaLabel) + + fcst = fcst + s + timeLabel + "\n\n" + + # setup to handle the hazards + self._hazards = argDict['hazards'] + self._combinations = argDict['combinations'] + + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict, timePeriods, + timeZone): + + ############################################################### + # TOP PART OF PRODUCT - valid current time out to around 60hr) + ############################################################### + + # Day, Period Label (UTC), Period Label (LT) + dateLabel, utcLabel, ltLabel = self._calcPeriodLabels(\ + timePeriods['top_3hr_snap'], self._top3hrWidth, + self._rowLabelWidth, 3, timeZone) + fcst = fcst + dateLabel + "\n" + ltLabel + "\n" + utcLabel + "\n\n" + + # Create statLists + statList_3hr = self.getStatList( + self._sampler, self._analysisList_top_3hr(), + timePeriods['top_3hr'], editArea) + statList_3hr_snap = self.getStatList( + self._sampler, self._analysisList_top_3hr_snap(), + timePeriods['top_3hr_snap'], editArea) + statList_6hr = self.getStatList( + self._sampler, self._analysisList_top_6hr(), + timePeriods['top_6hr'], editArea) + statList_6hrShort = self.getStatList( + self._sampler, self._analysisList_top_6hrShort(), + timePeriods['top_6hrShort'], editArea) + statList_12hr = self.getStatList(self._sampler, + self._analysisList_top_12hr(), timePeriods['top_12hr'], editArea) + statList_12hrShort = self.getStatList(self._sampler, + self._analysisList_top_12hrShort(), timePeriods['top_12hrShort'], + editArea) + + # Max/Min + tpmaxmin = timePeriods['top_12hr'][0][0] + if self.getPeriod(tpmaxmin, shiftToLocal=1) == self.DAYTIME(): + label = "Max/Min" + else: + label = "Min/Max" + fcst=fcst+ self.makeRow( + label, self._top12hrWidth, timePeriods['top_12hr'], + statList_12hr, self._mxmnValue, [1], + self._rowLabelWidth, 18) + + # Temp + fcst=fcst+ self.makeRow( + "Temp", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._tempValue, ['T'], self._rowLabelWidth) + + # DewPt + fcst=fcst+ self.makeRow( + "Dewpt", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._tempValue, ['Td'], self._rowLabelWidth) + + #RH + fcst=fcst+ self.makeRow( + "RH", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._rhValue, [], self._rowLabelWidth) + + # Wind direction + fcst=fcst+ self.makeRow( + "Wind dir", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._windValue, ["dir"], + self._rowLabelWidth) + + # Windspd + fcst=fcst+ self.makeRow( + "Wind spd", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._windValue, ["speed"], + self._rowLabelWidth) + + # Wind Gust + gustRow = self.makeRow( + "Wind gust", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._windGustValue, [], + self._rowLabelWidth) + if string.strip(gustRow) != "Wind gust": + fcst=fcst+gustRow + + # Sky + fcst=fcst+ self.makeRow( + "Clouds", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._skyValue, [], self._rowLabelWidth) + + # Pop + fcst=fcst+ self.makeRow( + "PoP 12hr", self._top12hrWidth, timePeriods['top_12hr'], + statList_12hrShort, self._popValue, [], self._rowLabelWidth, + 18) + + # QPF + fcst=fcst+ self.makeRow( + "QPF 12hr", self._top12hrWidth, timePeriods['top_12hr'], + statList_12hrShort, self._qpfValue, [], + self._rowLabelWidth, 18) + + #Max qpf + #fcst=fcst+ self.makeRow( + #"MAX QPF", self._top12hrWidth, timePeriods['top_12hr'], + #statList_12hrShort, None, [], self._rowLabelWidth, 18) + + # SnowAmt + if self._includeSnowAmt: + snowBasetime = \ + ((timePeriods['top_3hr_snap'][0])[0]).startTime().unixTime() + fcst=fcst+ self.makeRow( + "Snow 12hr", self._top12hrWidth, timePeriods['top_12hr'], + statList_12hrShort, self._snowValue, [snowBasetime], + self._rowLabelWidth, 18) + + # Weather + fcst = self._createWxRows(fcst, timePeriods['top_3hr_snap'], + self._sampler, + self._analysisList_top_3hr_snap(), editArea, + self._top3hrWidth) + + # OBVIS + fcst = self._createObVisRows(fcst, timePeriods['top_3hr_snap'], + self._sampler, + self._analysisList_top_3hr_snap(), editArea, + self._top3hrWidth) + + # Wind Chill + if self._includeWindChill: + row = self.makeRow( + "Wind chill", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._windChillValue, [], + self._rowLabelWidth) + if string.strip(row) != "Wind chill": + fcst=fcst+row + fcst = fcst + self.makeRow( + "Min chill", self._top6hrWidth, timePeriods['top_6hrShort'], + statList_6hrShort, self._minWindChillValue, [], + self._rowLabelWidth) + + + # Heat Index + if self._includeHeatIndex: + row = self.makeRow( + "Heat index", self._top3hrWidth, timePeriods['top_3hr_snap'], + statList_3hr_snap, self._heatIndexValue, [], + self._rowLabelWidth) + if string.strip(row) != "Heat index": + fcst=fcst+row + fcst = fcst + self.makeRow( + "Max heat", self._top6hrWidth, timePeriods['top_6hrShort'], + statList_6hrShort, self._maxHeatIndexValue, [], + self._rowLabelWidth) + + # Hazards (WWA) + #need to make list of actual edit areas first + combinations = argDict["combinations"] + if combinations is not None: + areaList = self.getCurrentAreaNames(argDict, areaLabel) + else: + areaList = [editArea.getId().getName()] + fcst = self._createWWARows(fcst, timePeriods['top_3hr_snap'], + areaList, self._top3hrWidth) + + fcst = fcst + "\n\n" + + ###################################### + # Extended Forecast + ###################################### + # Determine the column spacing - 3 normally, 5 between days LT + # Determine change in day by labels. + colSpacing6hr = [] + colSpacing12hr = [] + + # set the time zone + prevTZ = os.environ['TZ'] + os.environ['TZ'] = timeZone + time.tzset() + + # determine zulu alignment for bottom section, gather all of the + # possible zulu hours (can't be more than 2 due to 12h intervals) + zuluHours = [] + for x in range(0, 2): + period, label = timePeriods['bottom_12hr'][x] + zuluHours.append(period.endTime().hour) + + # add extra space for the first column in each day, except 1st one + # We treat midnight at the end of the day so we subtract 1 to get + # the right day for extra space insertion. We use startTime, since + # the values are valid at the label time. + dayOfMonthProcessed6 = None + firstTime = True + for period, label in timePeriods['bottom_6hr_snap']: + dayOfMonth = (period.startTime() -1 + self.determineShift()).day + if dayOfMonth != dayOfMonthProcessed6 and not firstTime: + colSpacing6hr.append(self._bottom6hrWidth + + self._extraDayWidth) + else: + colSpacing6hr.append(self._bottom6hrWidth) + dayOfMonthProcessed6 = dayOfMonth + firstTime = False + + # now determine the bottom 12hr periods and their spacing + runningTotal = 0 + for x in range(len(timePeriods['bottom_6hr_snap'])): + period, label = timePeriods['bottom_6hr_snap'][x] + space6 = colSpacing6hr[x] + runningTotal += space6 + if period.startTime().hour in zuluHours: + colSpacing12hr.append(runningTotal) + runningTotal = 0 + + # reset the time zone + os.environ['TZ'] = prevTZ + time.tzset() + + # Create statLists for bottom portion + statList_6hr_snap = self.getStatList( + self._sampler, + self._analysisList_bottom_6hr_snap(), + timePeriods['bottom_6hr_snap'], editArea) + statList_6hr = self.getStatList( + self._sampler, + self._analysisList_bottom_6hr(), + timePeriods['bottom_6hr'], editArea) + statList_12hr = self.getStatList(self._sampler, + self._analysisList_bottom_12hr(), + timePeriods['bottom_12hr'], editArea) + + # Day, Period Label (UTC), Period Label (LT) + dateLabel, utcLabel, ltLabel = self._calcPeriodLabels(\ + timePeriods['bottom_6hr_snap'], colSpacing6hr, + self._rowLabelWidth, 6, timeZone) + fcst = fcst + dateLabel + "\n" + ltLabel + "\n" + utcLabel + "\n\n" + + # Max/MinT + tpmaxmin = timePeriods['bottom_12hr'][0][0] + if self.getPeriod(tpmaxmin, shiftToLocal=1) == self.DAYTIME(): + label = "Max/Min" + else: + label = "Min/Max" + fcst=fcst+ self.makeRow( + label, colSpacing12hr, timePeriods['bottom_12hr'], + statList_12hr, self._mxmnValue, [0], + self._rowLabelWidth) + + # Temp + fcst=fcst+ self.makeRow( + "Temp", colSpacing6hr, timePeriods['bottom_6hr_snap'], + statList_6hr_snap, self._tempValue, ["T"], self._rowLabelWidth) + + + # Dewpt + fcst=fcst+ self.makeRow( + "Dewpt", colSpacing6hr, timePeriods['bottom_6hr_snap'], + statList_6hr_snap, self._tempValue, ["Td"], self._rowLabelWidth) + + # Predominant Wind direction + fcst=fcst+ self.makeRow( + "PWind dir", colSpacing12hr, timePeriods['bottom_12hr'], + statList_12hr, self._windValue, ["dir"], + self._rowLabelWidth) + + # Wind character + fcst=fcst+ self.makeRow( + "Wind char", colSpacing12hr, timePeriods['bottom_12hr'], + statList_12hr, self._windCharValue, ["Wind", "speed", 0], + self._rowLabelWidth) + + # Average Clouds - 6hrly + fcst=fcst+ self.makeRow( + "Avg clouds", colSpacing6hr, timePeriods['bottom_6hr'], + statList_6hr, self._skyValue, [], self._rowLabelWidth) + + + # Pop + fcst=fcst+ self.makeRow( + "PoP 12hr", colSpacing12hr, timePeriods['bottom_12hr'], + statList_12hr, self._popValue, [], self._rowLabelWidth) + + # Weather + fcst = self._createWxRows(fcst, timePeriods['bottom_6hr'], + self._sampler, + self._analysisList_bottom_6hr(), editArea, + colSpacing6hr) + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n$$\n" + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + # provides expiration time offset from today's midnight based on issuance. + def expireOffset(self): + if self._productIssuance == "Morning": + #4pm today + return 16 + else: + #4am tomorrow + return 24+4 + + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _analysisList_top_3hr(self): #None needed + return [ + + ] + + def _analysisList_top_3hr_snap(self): + return [ + ("T", self.avg), + ("Wind", self.vectorAvg), + ("WindGust", self.maximum), + ("WindChill", self.avg), + ("HeatIndex", self.avg), + ("Sky", self.avg), + ("Td", self.avg), + ("RH", self.avg), + ("Wx", self.dominantWx), + ] + + def _analysisList_top_6hr(self): + return [ + ] + + def _analysisList_top_6hrShort(self): + return [ + ("HeatIndex", self.minMax), + ("WindChill", self.minMax), + ] + + + def _analysisList_top_12hr(self): + return [ + ("MaxT", self.minMaxAvg), + ("MinT", self.minMaxAvg), + ] + + def _analysisList_top_12hrShort(self): + return [ + ("PoP",self.stdDevMaxAvg), + ("QPF",self.minMaxSum), + ("SnowAmt", self.minMaxSum), + ] + + def _analysisList_bottom_6hr(self): + return [ + ("Sky", self.avg), + ("Wx", self.dominantWx), + ] + + def _analysisList_bottom_6hr_snap(self): + return [ + ("T", self.avg), + ("Td", self.avg), + ("RH", self.avg), + ] + + def _analysisList_bottom_12hr(self): + return [ + ("Wind", self.vectorAvg), + ("PoP",self.stdDevMaxAvg), + ("MaxT", self.avg), + ("MinT", self.avg), + ("Sky", self.avg) + ] + + def _hour24zuluLabel(self, timeRange): + # returns the starting time of the timeRange in zulu, such as "03" + label = timeRange.startTime().stringFmt("%H") + return string.rjust(label, self._top3hrWidth) + + def _hour24localLabel(self, timeRange): + # returns the starting time of the timeRange in localtime, such as "06" + start = timeRange.startTime() + self.determineShift() + label = start.stringFmt("%H") + return string.rjust(label, self._top3hrWidth) + + def _tempValue(self, statDict, timeRange, argList): + # return a string for the temperatures, such as "85" + # can return MM for no data, blanks if timeRange is earlier than now + if timeRange.startTime().unixTime() < self._currentTime: + return "" + val = self.getStats(statDict, argList[0]) + if val is None: + return "MM" + return repr(int(round(val))) + + def _rhValue(self, statDict, timeRange, argList): + # return a string for the rh, such as "85" + # can return MM for no data, blanks if timeRange is earlier than now + if timeRange.startTime().unixTime() < self._currentTime: + return "" + # get the T and Td + tval = self.getStats(statDict, "T") + if tval is None: + return "MM" + tdval = self.getStats(statDict, "Td") + if tdval is None: + return "MM" + t = round(tval) + td = round(tdval) + + #calc RH from T and Td + Tc = .556 * (t - 32.0) + Tdc = .556 * (td - 32.0) + Vt = 6.11 * pow(10,(Tc * 7.5 / (Tc + 237.3))) + Vd = 6.11 * pow(10,(Tdc * 7.5 / (Tdc + 237.3))) + RH = (Vd / Vt) * 100.0 + + return repr(int(round(RH))) + + def _popValue(self, statDict, timeRange, argList): + # return a string for the pop, such as "80" + # PoP is rounded to nearest 10%, plus the 5% single value is allowed + # can return MM for no data, blanks if timeRange is earlier than now + if timeRange.endTime().unixTime() < self._currentTime: + return "" + val = self.getStats(statDict, "PoP__stdDevMaxAvg") + if val is None: + return "MM" + popMax5=int(self.round(val,"Nearest",5)) + if popMax5 == 5: + return "5" + popMax10=int(self.round(val,"Nearest",10)) + return repr(int(popMax10)) + + def _qpfValue(self, statDict, timeRange, argList): + # Return a string for the QPF, such as 0, 0.05, or 0.25-0.49 + # can return "MM" for missing data, + # blanks if timeRange earlier than now + # PFMs don't have ranges ever. + if timeRange.endTime().unixTime() < self._currentTime: + return "" + val = self.getStats(statDict, "QPF__minMaxSum") + if val is None: + return "MM" + minV, maxV, sumV = val + if maxV - minV > self._qpfRangeThreshold and self._productType == "AFM": + minString = string.strip("%5.2f" %minV) + maxString = string.strip("%5.2f" %maxV) + return minString+"-"+maxString + elif sumV < 0.005: + return "0" + else: + return string.strip("%5.2f" %sumV) + + + def _snowValue(self, statDict, timeRange, argList): + # Return a string for the Snow, such as 00-00, 5, or 5-9 + # Can return "MM" for missing data, blanks if timeRange + # earlier than now, or if greater than 36 hrs from the base time. + # PFMs don't have ranges ever. + # -- basetime is the argList[0] + if timeRange.endTime().unixTime() < self._currentTime: + return "" + basetime = argList[0] + if timeRange.startTime().unixTime() >= basetime + 36 * 3600: + return "" + val = self.getStats(statDict, "SnowAmt__minMaxSum") + if val is None: + return "MM" + minV, maxV, sumV = val + if maxV - minV > self._snowRangeThreshold and \ + self._productType == "AFM": + minString = repr(int(round(minV))) + maxString = repr(int(round(maxV))) + return minString+"-"+maxString + elif sumV < 0.1: + return "00-00" + elif sumV < 0.5: + return "T" + else: + return repr(int(round(sumV))) + + def _mxmnValue(self, statDict, timeRange, argList): + # Return a string for the max or min temperatures. + # String may be a single value, such as 75, or a range 60 65 70 + # where the values are min, ave, max. Can return MM for missing data + # Blanks if timeRange earlier than now. + # PFMs don't ever have ranges + if timeRange.endTime().unixTime() < self._currentTime: + return "" + dayNight = self.getPeriod(timeRange, shiftToLocal=1) + if dayNight == self.DAYTIME(): + val = self.getStats(statDict, "MaxT") + else: + val = self.getStats(statDict, "MinT") + if val == None: + return "MM" + + # did we get a tuple, or just a single value? + if type(val) is tuple: + minV, maxV, aveV = val + if maxV - minV > self._tempRangeThreshold and \ + self._productType == "AFM": + minString = string.rjust(repr(int(round(minV))), self._top3hrWidth) + aveString = string.rjust(repr(int(round(aveV))), self._top3hrWidth) + maxString = string.rjust(repr(int(round(maxV))), self._top3hrWidth) + return minString+aveString+maxString + else: + return repr(int(round(aveV))) + else: + return repr(int(round(val))) + + + def _windChillValue(self, statDict, timeRange, argList): + # Returns string for WindChill, such as "25" + # Returns "" for missing data, blanks if data earlier than now. + # Returns "" if wind chill not below a certain value. + if timeRange.startTime().unixTime() < self._currentTime: + return "" + + chill = self.getStats(statDict,"WindChill") + temp = self.getStats(statDict,"T") + if chill is None or temp is None: + return "" + + if chill < temp and chill < self._windChillLimit and \ + (temp - chill) > self._windChillDifference: + return repr(int(round(chill))) + else: + return "" + + def _heatIndexValue(self, statDict, timeRange, argList): + # Returns string for HeatIndex, such as "85" + # Returns "" for missing data, blanks if data earlier than now. + # Returns "" if heat index not above thresholds + if timeRange.startTime().unixTime() < self._currentTime: + return "" + + heat = self.getStats(statDict,"HeatIndex") + temp = self.getStats(statDict,"T") + if heat is None or temp is None: + return "" + + if heat >= self._heatIndexLimit and \ + (heat - temp) >= self._heatIndexDifference: + return repr(int(round(heat))) + else: + return "" + + def _minWindChillValue(self, statDict, timeRange, argList): + # Returns string for WindChill, such as "25" + # Returns "" for missing data, blanks if data earlier than now. + # Returns wind chill only if below user-set wind chill limit. + if timeRange.endTime().unixTime() < self._currentTime: + return "" + + chill = self.getStats(statDict,"WindChill") + if chill is None: + return "" + minV, maxV = chill + if minV < self._windChillLimit: + return repr(int(round(minV))) + else: + return "" + + def _maxHeatIndexValue(self, statDict, timeRange, argList): + # Returns string for HeatIndex, such as "85" + # Returns "" for missing data, blanks if data earlier than now. + if timeRange.endTime().unixTime() < self._currentTime: + return "" + + heat = self.getStats(statDict,"HeatIndex") + if heat is None: + return "" + + minV, maxV = heat + if maxV >= self._heatIndexLimit: + return repr(int(round(maxV))) + return "" + + + def _windGustValue(self, statDict, timeRange, argList): + # Returns string for WindGust, such as "25" + # Returns "" for missing data, blanks if data earlier than now. + # Returns "" if gust doesn't exceed normal wind by 10 + # If tropical winds, symbols are used for wind gust. + if timeRange.startTime().unixTime() < self._currentTime: + return "" + + windGustVal = self.getStats(statDict,"WindGust") + windVal = self.getStats(statDict,"Wind") + if windGustVal is None or windVal is None: + return "" + + maxGust = windGustVal + mag, dir = windVal + if maxGust - mag < (10/1.15): + return "" # gust is not significant + speed = round(maxGust) * 1.15 # convert to MPH + + # hurricane force + fcstTime = timeRange.endTime().unixTime() - self._currentTime + if self._tropicalStorm and fcstTime > 24*3600 and speed >= 74: + return 'HG' + + return repr(int(speed)) + + + def _windValue(self, statDict, timeRange, argList): + # Returns string for Wind, either direction or speed + # Format is "25" for speed, "NW" for direction + # Can return "MM" for missing data, blanks if data earlier than now. + # If tropical storm, and high winds, different symbols are used. + # argList[0] for type(dir,speed) + if timeRange.startTime().unixTime() < self._currentTime: + return "" + windType = argList[0] + windVal = self.getStats(statDict,"Wind") + if windVal is None: + return "MM" + mag, dir = windVal + + # hurricane force - 64kt limits + fcstTime = timeRange.endTime().unixTime() - self._currentTime + if self._tropicalStorm and fcstTime > 24*3600 and mag >= 64: + return 'HU' + + if windType == "dir": + return self.convertDirection(dir) + else: + speed = round(mag) * 1.15 # convert to MPH + return repr(int(speed)) + + def _windCharValue(self, statDict, timeRange, argList): + # Returns wind character (speed characteristic), such as "WY" + # Can return "MM" for missing data, blanks if timeRange + # earlier than now. + if timeRange.endTime().unixTime() < self._currentTime: + return "" + value = self.getStats(statDict, "Wind") + if value is None: + return "MM" + maxV, dir = value + # convert to MPH + speed = maxV * 1.15 + + # hurricane force + fcstTime = timeRange.endTime().unixTime() - self._currentTime + if self._tropicalStorm and fcstTime > 24*3600 and speed >= 74: + return 'HU' + + # Convert the speed into a text value category. Table shows the + # max speed allowed for the symbol. + codes = [("LT", 8), ("GN", 15), ("BZ", 23), ("WY", 31), ("VW", 40), + ("SD", 74), ("HF", 300) + ] + for symbol, threshold in codes: + if speed < threshold: + return symbol + return "HF" + + def _skyValue(self, statDict, timeRange, argList): + # Returns string for sky condition, such as "SC" + # Can return "MM" for missing data, blanks if timeRange + # earlier than now + if timeRange.startTime().unixTime() < self._currentTime: + return "" + value = self.getStats(statDict, "Sky") + if value is None: + return "MM" + # Convert the sky cover percentage into a text value. + # Table shows the max cloud cover % for that symbol + codes = [("CL", 5), ("FW", 25), ("SC", 50), ("B1", 69), + ("B2", 87), ("OV", 200)] + for symbol, threshold in codes: + if value <= threshold: + return symbol + + def _createWxRows(self, fcst, periods, sampler, analysisList, + area, colWidth): + # Create the row(s) for weather types with codes as column values + # Create a dictionary of values (Snow, Rain, etc.) with + # a list of the periods in which they occurred + # Can return "MM" for missing data, blanks for period earlier than now + wxDict = {} + for period, label in periods: + + # Get the Statistics + statDict = self.getStatDict(sampler, analysisList, period, area) + + + # Wx + wxValues = self._getWxValues(statDict["Wx"]) + for wxVal, covCode in wxValues: + if wxVal in wxDict: + wxDict[wxVal].append((period, covCode)) + else: + wxDict[wxVal] = [(period, covCode)] + + # Create a row for each weather value in the dictionary + sortedKeys = list(wxDict.keys()) + sortedKeys.sort() + for wxVal in sortedKeys: + if wxVal == "": + continue + fcst = self.addRowLabel(fcst, wxVal, self._rowLabelWidth) + values = wxDict[wxVal] + + # Add a column for each period + for x in range(len(periods)): + period, label = periods[x] + if type(colWidth) is list: + width = colWidth[x] + else: + width = colWidth + value = "" + if period.startTime().unixTime() >= self._currentTime: + for dictPeriod, covCode in values: + if period == dictPeriod: + value = covCode + fcst = self.addColValue(fcst, value, width) + fcst = fcst + "\n" + return fcst + + def _getWxValues(self, subkeyList): + if subkeyList is None: + return "" + wxValues = [] + for x in range(len(subkeyList)): + wxKey = subkeyList[x] + wxValue = "" + wxCov = "" + wxType = wxKey.wxType() + # The following table relates GFE weather types to words used + # in the AFM/PFM. + codes = [("R","Rain"), ("RW", "Rain shwrs"), ("T","Tstms"), + ("L","Drizzle"), ("S", "Snow"), ("SW", "Snowshwrs"), + ("IP", "Sleet"), ("ZR", "Frzg rain"), + ("ZL", "Frzg drzl") + ] + + # These special codes are used for very light intensities. + veryLightCodes = [("RW","Sprinkles"), ("SW", "Flurries")] + for gfecode, symbol in veryLightCodes: + if wxType == gfecode and wxKey.intensity() == "--": + wxValue = symbol + break + if wxValue == '': + for gfecode, symbol in codes: + if wxType == gfecode: + wxValue = symbol + break + + # determine the coverage codes - note that these are + # different for the AFM and the PFM (since PFM is a point) + coverage = wxKey.coverage() + + + # The following table relates the gfe coverage/probability code + # with the AFM coverage/probability code. + afmCodes = { + 'T': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", + 'Inter': "O"}, + 'R': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + 'Pds': "O", 'Inter': "O"}, + 'RW': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", + 'Inter': "O"}, + 'L': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", "Areas": "AR", + "Patchy": "PA", 'Frq': "O", 'Brf': "O", 'Pds': "O", + 'Inter': "O"}, + 'ZL': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", "Areas": "AR", + "Patchy": "PA", 'Frq': "O", 'Brf': "O", 'Pds': "O", + 'Inter': "O"}, + 'ZR': {"Wide": "WD", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + 'Pds': "O", 'Inter': "O"}, + 'S': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + 'Pds': "O", 'Inter': "O"}, + 'SW': {"Iso": "IS", "Sct": "SC", "Num": "NM", "Wide": "EC", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", 'Pds': "O", + 'Inter': "O"}, + 'IP': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + 'Pds': "O", 'Inter': "O"} + } + + # The following table relates the gfe coverage/probability code + # with the PFM coverage/probability code. Note that some codes + # for the pfm have been remapped to probability terms due to + # the point nature of the pfm. + pfmCodes = { + 'T': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'R': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'RW': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'L': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", "Areas": "AR", "Patchy": "O", + 'Frq': "O", 'Brf': "O", "Pds": "O", 'Inter': "O"}, + 'ZL': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", "Areas": "AR", "Patchy": "O", + 'Frq': "O", 'Brf': "O", "Pds": "O", 'Inter': "O"}, + 'ZR': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'S': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'SW': {"Iso": "S", "Sct": "C", "Num": "L", "Wide": "O", + "Ocnl": "O", "SChc": "S", "Chc": "C", "Lkly": "L", + "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"}, + 'IP': {"Wide": "O", "Ocnl": "O", "SChc": "S", "Chc": "C", + "Lkly": "L", "Def": "D", 'Frq': "O", 'Brf': "O", + "Pds": "O", 'Inter': "O"} + } + + + # now map the codes + if self._productType == "AFM": + if wxType in afmCodes and \ + coverage in afmCodes[wxType]: + wxCov = afmCodes[wxType][coverage] + else: + wxCov = "?" + elif self._productType == "PFM": + if wxType in pfmCodes and \ + coverage in pfmCodes[wxType]: + wxCov = pfmCodes[wxType][coverage] + else: + wxCov = "?" + + if wxValue != "": + wxValues.append((wxValue, wxCov)) + + return wxValues + + + def _createWWARows(self, fcst, periods, editAreaList, colWidth): + # Create the row(s) for WWAs with codes as column values + # Create a dictionary of values (TO.A, SV.A, etc.) with + # a list of the periods in which they occurred + # Blanks for period earlier than now + hazards = self._hazards.getHazardList(editAreaList) + allowedActions = ['NEW','CON','EXT','EXA','EXB'] + + wwaDict = {} + for period, label in periods: + + # filter out records to this time period + hazRecords = [] + for h in hazards: + if h['act'] in allowedActions and \ + self.__overlaps((h['startTime'],h['endTime']), + (period.startTime().unixTime(),period.endTime().unixTime())): + hazRecords.append(h) + + # Hazards - create the row data + wwaValues = self._getWWAValues(hazRecords) + for wwaVal, sigfCode in wwaValues: + if wwaVal in wwaDict: + wwaDict[wwaVal].append((period, sigfCode)) + else: + wwaDict[wwaVal] = [(period, sigfCode)] + + # Create a row for each hazard value in the dictionary + sortedKeys = list(wwaDict.keys()) + sortedKeys.sort() + for wwaVal in sortedKeys: + fcst = self.addRowLabel(fcst, wwaVal, self._rowLabelWidth) + values = wwaDict[wwaVal] + + # Add a column for each period + for x in range(len(periods)): + period, label = periods[x] + if type(colWidth) is list: + width = colWidth[x] + else: + width = colWidth + value = "" + if period.startTime().unixTime() >= self._currentTime: + for dictPeriod, sigfCode in values: + if period == dictPeriod: + value = sigfCode + fcst = self.addColValue(fcst, value, width) + fcst = fcst + "\n" + return fcst + + def _getWWAValues(self, hazRecs): + wwaValues = [] + + # The following table relates VTEC phens with row labels + # in the AFM/PFM. + codes = { + "AF":"Ashfall", "AS":"Air Stag", "BS":"Blowing Snow", + "BW":"Brisk Wind", "BZ":"Blizzard", "CF":"Coast Flood", + "DU":"Blowing Dust", "EC":"Extreme Cold", + "EH":"Excess Heat", "FA":"Flood", "FF":"Flash Flood", + "FG":"Dense Fog", "FL": "Flood", "FR":"Frost", "FZ":"Freeze", + "HF":"Hurr Frc Wnd", "HI":"Inland Hurr", "HS":"Heavy Snow", + "HT":"Heat", "HU":"Hurricane", "HW":"High Wind", "HZ": "Hard Freeze", + "IP":"Sleet", "IS":"Ice Storm", "LB":"LkEff SnBlSn", + "LE":"Lk Eff Snow", "LS":"Lkshore Fld", "UP":"Ice Accre", + "LW":"Lake Wind", "SB":"Snow BloSnow", "SM":"Dense Smoke", + "SN":"Snow", "SU":"High Surf", "SV":"Svr Tstorm", + "TI":"Inl Trp Strm", "TO":"Tornado", "TR":"Trop Storm", + "TS":"Tsunami", "TY":"Typhoon", "WC":"Wind Chill", "WI":"Wind", + "WS":"Winter Storm", "WW":"Winter Weath", "ZF":"Freezing Fog", + "ZR":"Frzng Rain", "FW.W": "Red Flag", "FW.A": "Fire Weather", + "ZL":"Frzg Drzl"} + + for rec in hazRecs: + phen = rec['phen'] + sig = rec['sig'] + phensig = rec['phen'] + '.' + rec['sig'] + if phen in codes: + wwaValues.append((codes[phen], sig)) + elif phensig in codes: + wwaValues.append((codes[phensig], sig)) + + return wwaValues + + def _createObVisRows(self, fcst, periods, sampler, analysisList, + area, colWidth): + # creates and adds to "fcst" the OBVIS row. OBVIS row included only + # if there are OBVIS present. + # determine the obvis values for each period + obvisValues = [] + for period, label in periods: + + # Get the Statistics + statDict = self.getStatDict(sampler, analysisList, period, area) + + # Wx + obvisVal = self._getObVisValues(statDict["Wx"]) + if period.startTime().unixTime() >= self._currentTime: + obvisValues.append(obvisVal) + else: + obvisValues.append("") # period earlier than now so blank it + + # Any OBVIS values at all? + any = 0 + for o in obvisValues: + if o != "": + any = 1 + break; + if any == 0: # no OBVIS, so skip the row + return fcst + + fcst = self.addRowLabel(fcst, "Obvis", self._rowLabelWidth) + for x in range(len(obvisValues)): + if type(colWidth) is list: + width = colWidth[x] + else: + width = colWidth + fcst = fcst + string.rjust(obvisValues[x], width) + fcst = fcst + "\n" + return fcst + + def _getObVisValues(self, subkeyList): + # Returns the obvis code given the weather subkey list + if subkeyList is None: + return "" + wxValues = [] + for x in range(len(subkeyList)): + wxKey = subkeyList[x] + wxInten = wxKey.intensity() + wxCov = wxKey.coverage() + wxType = wxKey.wxType() + + # Various types of Fog + if wxType in ["F", "IF", "ZF"]: + if wxInten == "+": + if wxCov == "Patchy": + return "PF+" + else: + return "F+" + elif wxCov == "Patchy": + return "PF" + else: + return "F" + + # Other obvis checks + else: + # The following table relates gfe wx type codes to AFM/PFM + # type codes. + codes = [("H","H"), ("BS", "BS"), ("K","K"), ("BD", "BD"), + ("VA", "AF")] + for gfecode, symbol in codes: + if wxType == gfecode: + return symbol + + + return "" + + def _calcPeriodLabels(self, periods, colWidth, startPoint, intervalHours, + timeZone): + # Calculate the period labels and returns as (date, utc, lt) strings + #Date THU 08/01/02 FRI 08/02/02 + #UTC 3hrly 09 12 15 18 21 00 03 06 09 12 15 18 21 00 03 06 + #MDT 3hrly 03 06 09 12 15 18 21 00 03 06 09 12 15 18 21 00 + + # determine the column widths + colWidths = [] + if type(colWidth) is list: + colWidths = colWidth + else: + for p in periods: + colWidths.append(colWidth) + + # calculate the zulu labels + zuluLabels = [] + for period,label in periods: + zuluLabels.append(self._hour24zuluLabel(period)) + + # zulu string + zulu = "UTC " + repr(intervalHours) + "hrly " + zulu = string.ljust(zulu, startPoint) + for x in range(len(zuluLabels)): + zulu = self.addColValue(zulu, zuluLabels[x], colWidths[x]) + + # set the time zone + prevTZ = os.environ['TZ'] + os.environ['TZ'] = timeZone + time.tzset() + + # date and LT string (beginning) + dateS = string.ljust('Date', startPoint) + ltZone = time.strftime("%Z",time.localtime(self._currentTime)) + lt = string.ljust(ltZone, 4) + repr(intervalHours) + "hrly " + lt = string.ljust(lt, startPoint) + + # remainder of Date and LT strings + dayOfMonthProcessed = None + for x in range(len(periods)): + timePeriod, label = periods[x] + hour = int(label) + + # 00LT is considered the end of the day for labeling, so subtract 1 + dayOfMonth = (timePeriod.startTime() - 1 + self.determineShift()).day + + # add in local time string + prevLTlen = len(lt) + lt = self.addColValue(lt, label, colWidths[x]) + + # calculate amount of room to write data + colAvail = 0 + for y in range(x+1,len(periods)): + colAvail = colAvail + colWidths[y] + + # handle the Date string + if dayOfMonth != dayOfMonthProcessed: + if intervalHours == 3: #top section + if colAvail < 3: + format = "%a" + elif hour < 6: + continue # label too early in the day + elif hour >= 6 and hour < 15: + format = "%a %m/%d/%y" + elif hour >= 15 and hour < 19: + format = " %m/%d/%y" + else: + continue #not enough remaining room + elif intervalHours == 6: #bottom section + if colAvail < 3: + format = "%a" + elif hour < 1: + continue # label too early in the day + elif hour >= 1 and hour < 7: + format = "%a %m/%d/%y" + elif hour >= 7 and hour < 13: + format = " %m/%d/%y" + elif hour >= 13 and hour < 17: + format = " %m/%d" + else: + continue #not enough remaining room + else: + raise Exception("Expected 3 or 6 intervalHours") + + index = 0 + nfill = prevLTlen - len(dateS) -1 + colWidths[x] - 1 + dateS = dateS + string.ljust(' ',nfill) + dayTime = timePeriod.startTime() + self.determineShift() + dString = dayTime.stringFmt(format) + dateS = dateS + dString + + dayOfMonthProcessed = dayOfMonth #mark this date processed + + + # reset time zone + os.environ['TZ'] = prevTZ + time.tzset() + + return (dateS, zulu, lt) + + def _latlonCheck(self, latlon): + # Routine checks the latlon format for PFM. Returns true if ok. + # Format must be ll.llN lll.llW + if len(latlon) != 14: + return 0 + if latlon[5] != 'S' and latlon[5] != 'N': + return 0 + if latlon[13] != 'W' and latlon[13] != 'E': + return 0 + if latlon[2] != '.' or latlon[10] != '.': + return 0 + digits = [0,1,3,4,8,9,11,12] + for d in digits: + if not latlon[d].isdigit(): + return 0 + if latlon[7] != ' ' and latlon[7] != "1": + return 0 + return 1 + + + + # Returns a list of the Hazards allowed for this product in VTEC format. + # These are sorted in priority order - most important first. + #### Removed inland tropical hazards in OB9.3 + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CON"] + return [ + ('HU.W', allActions, 'Tropical'), # HURRICANE WARNING + ('TY.W', allActions, 'Tropical'), # TYPHOON WARNING + ('TR.W', allActions, 'Tropical1'), # TROPICAL STORM WARNING + ('HU.A', allActions, 'Tropical'), # HURRICANE WATCH + ('TY.A', allActions, 'Tropical'), # TYPHOON WATCH + ('TR.A', allActions, 'Tropical1'), # TROPICAL STORM WATCH + ('HF.W', allActions, 'Marine'), # HURRICANE FORCE WIND WARNING + ('BZ.W', allActions, 'WinterWx'), # BLIZZARD WARNING + ('IS.W', allActions, 'WinterWx'), # ICE STORM WARNING + ('LE.W', allActions, 'WinterWx'), # LAKE EFFECT SNOW WARNING + ('WS.W', allActions, 'WinterWx'), # WINTER STORM WARNING + ('WW.Y', allActions, 'WinterWx'), # WINTER WEATHER ADVISORY + ('WS.A', allActions, 'WinterWx'), # WINTER STORM WATCH + ('WC.W', allActions, 'WindChill'), # WIND CHILL WARNING + ('WC.Y', allActions, 'WindChill'), # WIND CHILL ADVISORY + ('WC.A', allActions, 'WindChill'), # WIND CHILL WATCH + ('DU.W', allActions, 'Dust'), # BLOWING DUST WARNING + ('DU.Y', allActions, 'Dust'), # BLOWING DUST ADVISORY + ('EC.W', allActions, 'Cold'), # EXTREME COLD WARNING + ('EC.A', allActions, 'Cold'), # EXTREME COLD WATCH + ('EH.W', allActions, 'Heat'), # EXCESSIVE HEAT WARNING + ('EH.A', allActions, 'Heat'), # EXCESSIVE HEAT WATCH + ('HT.Y', allActions, 'Heat'), # HEAT ADVISORY + ('FG.Y', allActions, 'Fog'), # DENSE FOG ADVISORY + ('HZ.W', allActions, 'FrostFreeze'), # HARD FREEZE WARNING + ('FZ.W', allActions, 'FrostFreeze'), # FREEZE WARNING + ('FR.Y', allActions, 'FrostFreeze'), # FROST ADVISORY + ('HZ.A', allActions, 'FrostFreeze'), # HARD FREEZE WATCH + ('FZ.A', allActions, 'FrostFreeze'), # FREEZE WATCH + ('HW.W', allActions, 'Wind'), # HIGH WIND WARNING + ('WI.Y', allActions, 'Wind'), # WIND ADVISORY + ('LW.Y', allActions, 'Wind'), # LAKE WIND ADVISORY + ('HW.A', allActions, 'Wind'), # HIGH WIND WATCH + ('SM.Y', allActions, 'Smoke'), # DENSE SMOKE ADVISORY + ('ZF.Y', allActions, 'FreezeFog'), # FREEZING FOG ADVISORY + ('FF.A', allActions, 'Flood'), # FLASH FLOOD WATCH + ('FA.A', allActions, 'Flood'), # FLOOD WATCH + ('FA.W', allActions, 'Flood'), # FLOOD WARNING + ('FA.Y', allActions, 'Flood'), # FLOOD ADVISORY + ('CF.W', allActions, 'CoastalFlood'), # COASTAL FLOOD WARNING + ('LS.W', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WARNING + ('CF.Y', allActions, 'CoastalFlood'), # COASTAL FLOOD ADVISORY + ('LS.Y', allActions, 'CoastalFlood'), # LAKESHORE FLOOD ADVISORY + ('CF.A', allActions, 'CoastalFlood'), # COASTAL FLOOD WATCH + ('LS.A', allActions, 'CoastalFlood'), # LAKESHORE FLOOD WATCH + ('UP.W', allActions, 'IceAcc'), # ICE ACCREATION WARNING + ('UP.Y', allActions, 'IceAcc'), # ICE ACCREATION ADVISORY + ('AS.Y', allActions, 'AirStag'), # AIR STAGNATION ADVISORY + ('AS.O', allActions, 'AirStag'), # AIR STAGNATION OUTLOOK + ('SU.W', allActions, 'HighSurf'), # HIGH SURF WARNING + ('SU.Y', allActions, 'HighSurf'), # HIGH SURF ADVISORY + ('AF.Y', allActions, 'Ashfall'), # VOLCANIC ASHFALL ADVISORY + ('TO.A', allActions, 'Convective'), # TORNADO WATCH + ('SV.A', allActions, 'Convective'), # SEVERE THUNDERSTORM WATCH + ] + + ######################################################################## + # UTILITY FUNCTIONS + ######################################################################## + # time contains, if time range (tr) contains time (t), return 1 + def __containsT(self, tr, t): + return (t >= tr[0] and t < tr[1]) + + # time overlaps, if tr1 overlaps tr2 (adjacent is not an overlap) + def __overlaps(self, tr1, tr2): + if self.__containsT(tr2, tr1[0]) or self.__containsT(tr1, tr2[0]): + return 1 + return 0 + + + ######################################################################## + # OVERRIDING THRESHOLDS AND VARIABLES + ######################################################################## + + def getDefaultPercentage(self, parmName): + return 5.0 + + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil.py index ffdadb8d89..127f5ad501 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil.py @@ -1,137 +1,137 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# --------------------------------------------------------------------- -# -# SAF__ -# -# This file should not be edited by the site. -# Site changes should go in SAF__Overrides for methods and -# SAF__ Definition to set up Product Definition Settings -# -# -# --------------------------------------------------------------------- - -import AreaFcst -import sys, copy, types - - -# Construct the names of the definition and override TextUtilities -siteDefinition = "SAF___Definition" -siteOverrides = "SAF__Overrides" -regionOverrides = "SAF__Overrides" -ZFP_siteOverrides = "ZFP__Overrides" -ZFP_regionOverrides = "ZFP__Overrides" - -# Import the local site's Product Definition specifications -exec "import "+siteDefinition - -# Import the local site's Overrides -exec "import "+siteOverrides -exec "import "+ZFP_siteOverrides - -# Import Regional Overrides -exec "import "+regionOverrides -exec "import "+ZFP_regionOverrides - -# Patches -import Patch_Overrides -# Special SAF overrides -import SAF_Overrides - -# These statements get the class object for the region and site overrides class -# The class and the module name (the file name) must be the same! -regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] -siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] -ZFP_regionOverrides_object=sys.modules[ZFP_regionOverrides].__dict__[ZFP_regionOverrides] -ZFP_siteOverrides_object=sys.modules[ZFP_siteOverrides].__dict__[ZFP_siteOverrides] - -# Get the region and site definitions into a known variable name -exec "localDefinition = " + siteDefinition + ".Definition" -exec "regionDefinition = " + regionOverrides + ".Definition" -exec "ZFP_regionDefinition = " + ZFP_regionOverrides + ".Definition" -exec "SAF_Definition = SAF_Overrides.Definition" - -class TextProduct( - siteOverrides_object, - regionOverrides_object, - SAF_Overrides.SAF_Overrides, - ZFP_siteOverrides_object, - ZFP_regionOverrides_object, - Patch_Overrides.Patch_Overrides, - AreaFcst.TextProduct - ): - Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) - - # Get ZFP Regional Definition settings - for key in ZFP_regionDefinition.keys(): - Definition[key] = ZFP_regionDefinition[key] - - # Get SAF Definition settings - for key in SAF_Definition.keys(): - Definition[key] = SAF_Definition[key] - - # Get Regional Definition settings - for key in regionDefinition.keys(): - Definition[key] = regionDefinition[key] - - # Get the Site Definition Settings - for key in localDefinition.keys(): - Definition[key] = localDefinition[key] - - # Get the VariableList if overridden in ZFP Region - try: - exec "VariableList = "+ZFP_regionOverrides+".VariableList" - except: - pass - - # Get the VariableList if overridden in ZFP Region - try: - VariableList = SAF_Overrides.VariableList - except: - pass - - # Get the VariableList if overridden in Region - try: - exec "VariableList = "+regionOverrides+".VariableList" - except: - pass - - # Get the VariableList if overridden in Site - try: - exec "VariableList = "+siteDefinition+".VariableList" - except: - pass - - # Definition overrides should really go in SAF__ Definition - # but may be put here for testing. - # Most common would be need to set unique display name - ##Definition["displayName"] = "Test_SAF_" - - def __init__(self): - AreaFcst.TextProduct.__init__(self) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# --------------------------------------------------------------------- +# +# SAF__ +# +# This file should not be edited by the site. +# Site changes should go in SAF__Overrides for methods and +# SAF__ Definition to set up Product Definition Settings +# +# +# --------------------------------------------------------------------- + +import AreaFcst +import sys, copy, types + + +# Construct the names of the definition and override TextUtilities +siteDefinition = "SAF___Definition" +siteOverrides = "SAF__Overrides" +regionOverrides = "SAF__Overrides" +ZFP_siteOverrides = "ZFP__Overrides" +ZFP_regionOverrides = "ZFP__Overrides" + +# Import the local site's Product Definition specifications +exec("import "+siteDefinition) + +# Import the local site's Overrides +exec("import "+siteOverrides) +exec("import "+ZFP_siteOverrides) + +# Import Regional Overrides +exec("import "+regionOverrides) +exec("import "+ZFP_regionOverrides) + +# Patches +import Patch_Overrides +# Special SAF overrides +import SAF_Overrides + +# These statements get the class object for the region and site overrides class +# The class and the module name (the file name) must be the same! +regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] +siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] +ZFP_regionOverrides_object=sys.modules[ZFP_regionOverrides].__dict__[ZFP_regionOverrides] +ZFP_siteOverrides_object=sys.modules[ZFP_siteOverrides].__dict__[ZFP_siteOverrides] + +# Get the region and site definitions into a known variable name +exec("localDefinition = " + siteDefinition + ".Definition") +exec("regionDefinition = " + regionOverrides + ".Definition") +exec("ZFP_regionDefinition = " + ZFP_regionOverrides + ".Definition") +exec("SAF_Definition = SAF_Overrides.Definition") + +class TextProduct( + siteOverrides_object, + regionOverrides_object, + SAF_Overrides.SAF_Overrides, + ZFP_siteOverrides_object, + ZFP_regionOverrides_object, + Patch_Overrides.Patch_Overrides, + AreaFcst.TextProduct + ): + Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) + + # Get ZFP Regional Definition settings + for key in list(ZFP_regionDefinition.keys()): + Definition[key] = ZFP_regionDefinition[key] + + # Get SAF Definition settings + for key in list(SAF_Definition.keys()): + Definition[key] = SAF_Definition[key] + + # Get Regional Definition settings + for key in list(regionDefinition.keys()): + Definition[key] = regionDefinition[key] + + # Get the Site Definition Settings + for key in list(localDefinition.keys()): + Definition[key] = localDefinition[key] + + # Get the VariableList if overridden in ZFP Region + try: + exec("VariableList = "+ZFP_regionOverrides+".VariableList") + except: + pass + + # Get the VariableList if overridden in ZFP Region + try: + VariableList = SAF_Overrides.VariableList + except: + pass + + # Get the VariableList if overridden in Region + try: + exec("VariableList = "+regionOverrides+".VariableList") + except: + pass + + # Get the VariableList if overridden in Site + try: + exec("VariableList = "+siteDefinition+".VariableList") + except: + pass + + # Definition overrides should really go in SAF__ Definition + # but may be put here for testing. + # Most common would be need to set unique display name + ##Definition["displayName"] = "Test_SAF_" + + def __init__(self): + AreaFcst.TextProduct.__init__(self) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil_Baseline.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil_Baseline.py index 74bc6532f5..94b2bd29a2 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil_Baseline.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SAF_Site_MultiPil_Baseline.py @@ -1,136 +1,136 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -# --------------------------------------------------------------------- -# This software is in the public domain, furnished "as is", without -# technical support, and with no warranty, express or implied, as to -# its usefulness for any purpose. -# --------------------------------------------------------------------- -# -# SAF___Baseline -# -# This file should not be edited by the site. -# Site changes should go in SAF__Overrides for methods and -# SAF___Definition to set up Product Definition Settings -# -# -# --------------------------------------------------------------------- - -import AreaFcst -import sys, copy, types - - -# Construct the names of the definition and override TextUtilities -siteDefinition = "SAF___Definition" -siteOverrides = "SAF__Overrides" -regionOverrides = "SAF__Overrides" -ZFP_siteOverrides = "ZFP__Overrides" -ZFP_regionOverrides = "ZFP__Overrides" - -# Import the local site's Product Definition specifications -exec "import "+siteDefinition - -# Import the local site's Overrides -exec "import "+siteOverrides -exec "import "+ZFP_siteOverrides - -# Import Regional Overrides -exec "import "+regionOverrides -exec "import "+ZFP_regionOverrides - -# Patches -import Patch_Overrides -# Special SAF overrides -import SAF_Overrides - -# These statements get the class object for the region and site overrides class -# The class and the module name (the file name) must be the same! -#regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] -#siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] -#ZFP_regionOverrides_object=sys.modules[ZFP_regionOverrides].__dict__[ZFP_regionOverrides] -#ZFP_siteOverrides_object=sys.modules[ZFP_siteOverrides].__dict__[ZFP_siteOverrides] - -# Get the region and site definitions into a known variable name -exec "localDefinition = " + siteDefinition + ".Definition" -exec "regionDefinition = " + regionOverrides + ".Definition" -exec "ZFP_regionDefinition = " + ZFP_regionOverrides + ".Definition" -exec "SAF_Definition = SAF_Overrides.Definition" - -class TextProduct( - #siteOverrides_object, - #regionOverrides_object, - SAF_Overrides.SAF_Overrides, - #ZFP_siteOverrides_object, - #ZFP_regionOverrides_object, - Patch_Overrides.Patch_Overrides, - AreaFcst.TextProduct - ): - Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) - - # Get ZFP Regional Definition settings - #for key in ZFP_regionDefinition.keys(): - # Definition[key] = ZFP_regionDefinition[key] - - # Get SAF Definition settings - for key in SAF_Definition.keys(): - Definition[key] = SAF_Definition[key] - - # Get Regional Definition settings - #for key in regionDefinition.keys(): - # Definition[key] = regionDefinition[key] - - # Get the Site Definition Settings - for key in localDefinition.keys(): - Definition[key] = localDefinition[key] - - # Get the VariableList if overridden in ZFP Region - #try: - # exec "VariableList = "+ZFP_regionOverrides+".VariableList" - #except: - # pass - - # Get the VariableList if overridden in ZFP Region - try: - VariableList = SAF_Overrides.VariableList - except: - pass - - # Get the VariableList if overridden in Region - #try: - # exec "VariableList = "+regionOverrides+".VariableList" - #except: - # pass - - # Get the VariableList if overridden in Site - try: - exec "VariableList = "+siteDefinition+".VariableList" - except: - pass - - # To turn on this product for testing, - # set the display name. - Definition["displayName"] = "Baseline_SAF_" - - def __init__(self): - AreaFcst.TextProduct.__init__(self) +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +# --------------------------------------------------------------------- +# This software is in the public domain, furnished "as is", without +# technical support, and with no warranty, express or implied, as to +# its usefulness for any purpose. +# --------------------------------------------------------------------- +# +# SAF___Baseline +# +# This file should not be edited by the site. +# Site changes should go in SAF__Overrides for methods and +# SAF___Definition to set up Product Definition Settings +# +# +# --------------------------------------------------------------------- + +import AreaFcst +import sys, copy, types + + +# Construct the names of the definition and override TextUtilities +siteDefinition = "SAF___Definition" +siteOverrides = "SAF__Overrides" +regionOverrides = "SAF__Overrides" +ZFP_siteOverrides = "ZFP__Overrides" +ZFP_regionOverrides = "ZFP__Overrides" + +# Import the local site's Product Definition specifications +exec("import "+siteDefinition) + +# Import the local site's Overrides +exec("import "+siteOverrides) +exec("import "+ZFP_siteOverrides) + +# Import Regional Overrides +exec("import "+regionOverrides) +exec("import "+ZFP_regionOverrides) + +# Patches +import Patch_Overrides +# Special SAF overrides +import SAF_Overrides + +# These statements get the class object for the region and site overrides class +# The class and the module name (the file name) must be the same! +#regionOverrides_object=sys.modules[regionOverrides].__dict__[regionOverrides] +#siteOverrides_object=sys.modules[siteOverrides].__dict__[siteOverrides] +#ZFP_regionOverrides_object=sys.modules[ZFP_regionOverrides].__dict__[ZFP_regionOverrides] +#ZFP_siteOverrides_object=sys.modules[ZFP_siteOverrides].__dict__[ZFP_siteOverrides] + +# Get the region and site definitions into a known variable name +exec("localDefinition = " + siteDefinition + ".Definition") +exec("regionDefinition = " + regionOverrides + ".Definition") +exec("ZFP_regionDefinition = " + ZFP_regionOverrides + ".Definition") +exec("SAF_Definition = SAF_Overrides.Definition") + +class TextProduct( + #siteOverrides_object, + #regionOverrides_object, + SAF_Overrides.SAF_Overrides, + #ZFP_siteOverrides_object, + #ZFP_regionOverrides_object, + Patch_Overrides.Patch_Overrides, + AreaFcst.TextProduct + ): + Definition = copy.deepcopy(AreaFcst.TextProduct.Definition) + + # Get ZFP Regional Definition settings + #for key in ZFP_regionDefinition.keys(): + # Definition[key] = ZFP_regionDefinition[key] + + # Get SAF Definition settings + for key in list(SAF_Definition.keys()): + Definition[key] = SAF_Definition[key] + + # Get Regional Definition settings + #for key in regionDefinition.keys(): + # Definition[key] = regionDefinition[key] + + # Get the Site Definition Settings + for key in list(localDefinition.keys()): + Definition[key] = localDefinition[key] + + # Get the VariableList if overridden in ZFP Region + #try: + # exec "VariableList = "+ZFP_regionOverrides+".VariableList" + #except: + # pass + + # Get the VariableList if overridden in ZFP Region + try: + VariableList = SAF_Overrides.VariableList + except: + pass + + # Get the VariableList if overridden in Region + #try: + # exec "VariableList = "+regionOverrides+".VariableList" + #except: + # pass + + # Get the VariableList if overridden in Site + try: + exec("VariableList = "+siteDefinition+".VariableList") + except: + pass + + # To turn on this product for testing, + # set the display name. + Definition["displayName"] = "Baseline_SAF_" + + def __init__(self): + AreaFcst.TextProduct.__init__(self) diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SFT.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SFT.py index ed1dc658f0..9ce3ea5c43 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SFT.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SFT.py @@ -1,667 +1,667 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. # -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This product creates the Tabular State Forecast, which -# consists of a 7-day forecast over multiple areas, with the following -# elements: MaxT/MinT, daytime weather, PoP, QPF, and Snow. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# SFT, SFT___Definition, SFT__Overrides -#------------------------------------------------------------------------- -# User Configurable Variables: -# Definition Section: -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# -# defaultEditAreas defines edit area names, region names, and city/area -# names in one of two formats, depending upon whether -# you are supporting regional headers. Choose one and -# use it throughout the product. -# (editAreaName, "Regionlabel\nCitylabel") -# (editAreaName, "Citylabel") -# -# productName defines name of product e.g. "Tabular State Forecast" -# -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# -# wmoID WMO ID code for product header, such as "FOUS45" -# -# pil Product pil, such as "SFTBOS" -# -# zoneCode ZONE code for product header, such as "NYZ001>025" -# -# stateName State name for product header, such as "Western New York" -# -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Out to 7 days: MinT, MaxT, PoP, Wx, Sky, Wind, QPF (opt.) -#------------------------------------------------------------------------- -# Edit Areas Needed: area1, area2, area3 -#------------------------------------------------------------------------- -# Associated Utilities Files e.g. Combinations file: None -#------------------------------------------------------------------------- -# Component Products: None -#------------------------------------------------------------------------- -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# -# To look up additional tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Additional Information: -# Note that time sampling will not be correct if product is updated -# after midnight for the PM issuance. -#------------------------------------------------------------------------- -# Example Output: -# -## FOUS45 Kxxx 241925 -## SFTxxx -## stZALL-251025- - -## TABULAR STATE FORECAST FOR STATENAME -## NATIONAL WEATHER SERVICE WfoCity WfoState -## 125 PM MDT TUE JUN 24 2003 - -## ROWS INCLUDE... -## DAILY PREDOMINANT DAYTIME WEATHER 6AM-6PM -## FORECAST TEMPERATURES...EARLY MORNING LOW/DAYTIME HIGH -## PROBABILITY OF PRECIPITATION 6AM-6PM/DAYTIME 6AM-6PM -## - INDICATES TEMPERATURES BELOW ZERO -## MM INDICATES MISSING DATA - - -## FCST FCST FCST FCST FCST FCST FCST -## TODAY WED THU FRI SAT SUN MON -## JUN 24 JUN 25 JUN 26 JUN 27 JUN 28 JUN 29 JUN 30 - -## ...REGION1... -## CITY1 -## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY -## /77 46/61 46/70 53/75 60/75 60/79 66/86 -## /00 20/30 30/40 30/40 30/40 30/40 30/40 - -## CITY2 -## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY -## /77 46/61 46/70 53/75 60/75 60/79 66/86 -## /00 20/30 30/40 30/40 30/40 30/40 30/40 - -## ...REGION2... -## CITY3 -## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY -## /77 46/61 46/70 53/75 60/75 60/79 66/86 -## /00 20/30 30/40 30/40 30/40 30/40 30/40 - - -## $$ -# -######################################################################## - -import TextRules -import SampleAnalysis -import string, time, types - -from WxMethods import * - - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - VariableList = [ - (("Product Issuance", "productIssuance") , "Morning", "radio", - ["Morning","Afternoon"]), - ] - Definition = { - "type": "smart", - "displayName": "None", # for Product Generation Menu - "database": "ISC", - # Defines output location of finished product. - "outputFile": "{prddir}/TEXT/SFT_.txt", - "debug": 0, - - "defaultEditAreas": [("area1", "Region1\nCity1"), - ("area2", "Region1\nCity2"), - ("area3", "Region2\nCity3"), - ], - - # product identifiers - "productName": "Tabular State Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "zoneCode": "stZ000", # Zone Code, such as "GAZ025-056" - "stateName": "", # Name of state, such as "Georgia" - "wfoCityState": "", # Location of WFO - city state - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. - - # Product expiration/purge time - "fixedExpire": 1, #ensure VTEC actions don't affect expiration time - - - # options - "alwaysIncludePoP": 1, # include PoP, 0=no,1=yes - "alwaysIncludeQPF": 0, # include QFP, 0=no,1=yes - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - - def generateForecast(self, argDict): - # Generate formatted product for a list of edit areas - - # Get variables from varDict and Definition - self._getVariables(argDict) - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - self._determineTimeRanges(argDict) - - # Sample the data - self._sampleData(argDict) - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, - "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - - fcst = self._postProcessProduct(fcst, argDict) - return fcst - - def _getVariables(self, argDict): - # Determine whether AM or PM product type - - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # row alignments - self._rowLabelWidth = 3 - self._columnWidth = 9 - - def _determineTimeRanges(self, argDict): - # Determine time ranges for product - # Sets up self._popPeriods, self._snowPeriods, - # self._tempPeriods, self._codePeriods which - # are lists of tuples (timeRange, label). - - # Calculate ddhhmm string value - self._currentTime = argDict['creationTime'] #ZULU - self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( - self._currentTime)) - - # Determine expiration time - self._expireTime, self._ddhhmmTimeExpire = \ - self.getExpireTimeFromLToffset(self._currentTime, - self.expireOffset(), "") - - # timeLabel is the spelled-out version of the current time - self._timeLabel = self.getCurrentTime( - argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) - - # Number of days to sample (7 for AM, 7 for PM). Since PoP is - # sampled every 12 hours, we have either 13 or 14 periods to sample. - numPeriods = 7 - if self._productIssuance == "Morning": - numPeriodsPoP = 13 - else: - numPeriodsPoP = 14 - - # PM issuances always start tomorrow, thus we offset by 24 hours for - # most of the items, but only 12 for the PoP, since we want to catch - # tonights PoP. - if self._productIssuance == "Morning": - pmOffset = 0 - pmPopOffset = 0 - else: - pmOffset = 24 - pmPopOffset = 12 - - # table header ranges: only used for the labels - timePeriod = 24 - timeSpan = 12 - tableStartTR = self.createTimeRange(6+pmOffset, 6+pmOffset+11) - self._tablePeriods = self.getPeriods(tableStartTR, timePeriod, - timeSpan, numPeriods) - - # PoP Time ranges : - # 7 days of 12-hour periods - day and night - sync'd to 6am/6pm - # If AM, begin at 6am of issue day, then every 12h - # If PM, begin at 6pm of issue day, then every 12h - timePeriod = 12 - timeSpan = 12 - popStartTR = self.createTimeRange(6+pmPopOffset, 6+pmPopOffset+1) #6am - self._popPeriods = self.getPeriods(popStartTR, timePeriod, - timeSpan, numPeriodsPoP) - - - # QPF and Snow Time Ranges, 24hr summaries, 7 periods - # midnight to midnight, today for AM issuance, starting tmrw for PM - timePeriod = 24 - timeSpan = 24 - accStartTR = self.createTimeRange(0+pmOffset, 1+pmOffset) #midnight - self._qpfPeriods = self.getPeriods(accStartTR, timePeriod, timeSpan, - numPeriods) - - # Temp Time ranges : 7 or 8 periods, 24 hours apart, 12 hour span - # to get both the Max and Min Temps - # This is to catch the correct Max/Min temp grid - # Always begin with 3am LT of issue day to catch MinT - - timePeriod = 24 - timeSpan = 12 - tempStartTR = self.createTimeRange(3+pmOffset, 4+pmOffset) - self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, - numPeriods) - - # Code Time ranges : - # 7 or 8 non-consecutive DAYLIGHT 12 hour periods - # If AM, begin at 6amLT of issue day - # If PM, begin at 6amLT tomorrow - timePeriod = 24 - timeSpan = 12 - codeStartTR = self.createTimeRange(6+pmOffset, 7+pmOffset) - self._codePeriods = self.getPeriods(codeStartTR, timePeriod, timeSpan, - numPeriods) - - return - - def _sampleData(self, argDict): - # Sample the data. - # Sets up self._sampler including sampling for - # pop, snow, temp, and code - sampleList = [] - sampleList.append((self._analysisListPoP(), self._popPeriods)) - sampleList.append((self._analysisListQPF(), self._qpfPeriods)) - sampleList.append((self._analysisListTemp(), self._tempPeriods)) - sampleList.append((self._analysisListCode(), self._codePeriods)) - sampleInfo = [] - for analList, periods in sampleList: - sampleInfo.append((analList, periods, self._areaList)) - self._sampler = self.getSampler(argDict, sampleInfo) - return - - def _preProcessProduct(self, fcst, argDict): - # initialize variable showing last region value in body - self._lastRegion = None - - # Add product heading to fcst string - issuedByString = self.getIssuedByString() - - productName = self._productName + " for " + self._stateName - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n" + \ - self._zoneCode + "-" + self._ddhhmmTimeExpire + "-\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - return fcst + self._rowDescription() + "\n\n" + self._tableHeader() + "\n\n" - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # determine the region and area names, separated by a new line - # only print out the region if it is different from the last one - regarea = string.split(areaLabel, '\n') - if len(regarea) > 1: - region = regarea[0] - area = regarea[1] - - # region has changed, need to output it - if self._lastRegion is None or region != self._lastRegion: - fcst = fcst + "..." + region.upper() + "...\n " + area + "\n" - self._lastRegion = region - return fcst - else: - fcst = fcst + " " + area + "\n" - else: - fcst = fcst + regarea[0] + "\n" - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - - noRowLabel = "" - - # Weather Code - statList = self.getStatList(self._sampler, - self._analysisListCode(), self._codePeriods, editArea) - fcst = fcst + self.makeRow(noRowLabel, self._columnWidth, - self._codePeriods, statList, - self._getCCFCode, [], self._rowLabelWidth, - justify = 'l') - - # Max/Min Temperatures - statList = self.getStatList(self._sampler, - self._analysisListTemp(), self._tempPeriods, editArea) - fcst = fcst + self.makeRow( - noRowLabel, self._columnWidth, self._tempPeriods, - statList, self._mxmnValue, [], - self._rowLabelWidth, justify = 'l') - - # PoP - if self._alwaysIncludePoP != 0: - statList = self.getStatList(self._sampler, - self._analysisListPoP(), self._popPeriods, editArea) - - #need to make our own popColumnWidth for ###/### (###, /###xx) - popColumnWidth = [] - justifyList = [] - slashPeriods = [] - # special case, AM and 1st period, no value so slide entries right - if self._productIssuance == "Morning": - offset = 1 - popRowLabelWidth = self._rowLabelWidth + 3 - else: - offset = 0 - popRowLabelWidth = self._rowLabelWidth - - for x in xrange(len(self._popPeriods)): - if x%2 == offset: - wid = 3 - justifyMode = 'r' - else: - wid = self._columnWidth - 3 - justifyMode = 'l' - slashPeriods.append(self._popPeriods[x]) - popColumnWidth.append(wid) - justifyList.append(justifyMode) - - fcst = fcst + self.makeRow(noRowLabel, popColumnWidth, - self._popPeriods, statList, self._popValue, [slashPeriods], - popRowLabelWidth, justify = justifyList) - - - # QPF - if self._alwaysIncludeQPF != 0: - statList = self.getStatList(self._sampler, - self._analysisListQPF(), self._qpfPeriods, editArea) - fcst = fcst + self.makeRow( - noRowLabel, self._columnWidth, self._qpfPeriods, statList, - self._qpfValue, [], self._rowLabelWidth, justify = 'l') - - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - return fcst + "\n" - - def _postProcessProduct(self, fcst, argDict): - fcst = string.replace(fcst,"!"," ") - fcst = fcst + "\n$$\n\n" - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - return fcst - - # provides expiration time offset from today's midnight based on issuance. - def expireOffset(self): - if self._productIssuance == "Morning": - #4pm today - return 17 - else: - #4am tomorrow - return 24+5 - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - - def _analysisListTemp(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ] - - def _analysisListCode(self): - return [ - ("MinT", self.avg), - ("MaxT", self.avg), - ("PoP", self.stdDevMaxAvg), - ("Wx", self.dominantWx), - ("Sky", self.avg), - ("Wind", self.vectorAvg) - ] - - def _analysisListQPF(self): - return [ - ("QPF", self.minMaxSum), - ] - - def _analysisListPoP(self): - return [ - ("PoP", self.stdDevMaxAvg), - ] - - def _popValue(self, statDict, timeRange, argList): - #arglist[0] is a list of timeRanges for which a leading "/" is required. - slashList = argList[0] - # return a string for the pop, such as "80" - # can return MM for no data - - val = self.getStats(statDict, "PoP__stdDevMaxAvg") - if val is None: - popString = "MM" - else: - popMax10=int(self.round(val,"Nearest",10)) - popString = `popMax10` - if popString == "0": - popString = "00" #requires a leading zero - - # leading '/' required? - if (timeRange, "") in slashList: - return "/" + popString - else: - return popString - - def _qpfValue(self, statDict, timeRange, argList): - # Return a string for the QPF, such as 0, 0.05 - # can return "--" for missing data - val = self.getStats(statDict, "QPF__minMaxSum") - if val is None: - return "MM" - minV, maxV, sumV = val - if maxV < 0.01: - return "0.00" - else: - return string.strip("%5.2f" %sumV) - - def _getCCFCode(self, statDict, timeRange, argList): - translateCodes = [("P","BLZZRD"),("T","TSTRMS"),("O","RNSNOW"), - ("R","RAIN"),("S","SNOW"),("W","SHWRS"),("J","SNOSHWR"), - ("L","DRZL"),("X","SLEET"),("Y","FZRAIN"),("Z","FZDRZL"), - ("M","FLRRYS"),("Q","BLGSNO"),("N","WINDY"),("F","FOGGY"), - ("G","VRYHOT"),("I","VRYCLD"),("D","DUST"),("H","HAZE"), - ("K","SMOKE"),("C","CLOUDY"),("E","MOCLDY"),("B","PTCLDY"), - ("U","SUNNY"),("A","FAIR"),("V","CLEAR") - ] - # Return the SFT weather code - code = self.getCode(statDict, timeRange) - if code is None: - return "MM" - else: - for symbol, word in translateCodes: - if code == symbol: - return word - return "MM" - - - def _mxmnValue(self, statDict, timeRange, argList): - # Return a string for the max or min temperatures. - # String may be a single value, such as /75, or 54/75 (Min/Max) - # Can return MM for missing data - # Get the numbers - maxval = self.getStats(statDict, "MaxT") - if maxval == None: - maxString = "MM" - else: - maxString = `int(round(maxval))` - minval = self.getStats(statDict, "MinT") - if minval == None: - minString = "MM" - else: - minString = `int(round(minval))` - - # special case, AM and 1st period, only 1 number (MaxT) goes here - if timeRange == self._tempPeriods[0][0] and self._productIssuance == "Morning": - return "!!/" + maxString - - # normal cases, two numbers (Min/Max) - else: - return minString + "/" + maxString - - - def _tableHeader(self): - # Put in the table header with the dates - # qpfPeriods are midnight to midnight - convenient for labeling - fcstString, dayNumString, dateString = self._calcPeriodLabels( - self._tablePeriods, self._columnWidth, self._rowLabelWidth) - return fcstString + "\n" + dayNumString + "\n" + dateString - - def _popTimeLabel(self): - # Returns the valid time for the daily POP field - return " nighttime 6PM-6AM/daytime 6AM-6PM" - - def _qpfTimeLabel(self): - # Returns the valid time for the daily qpf field - return " 12AM-12AM" - - def _rowDescription(self): - # Returns the row description between the product title and - # table header. Override in your local file as needed. - - ident = " " - - # s is the built-up string containing the description - s = "ROWS INCLUDE...\n" - - - # Weather - s = s + ident + "Daily predominant daytime weather 6AM-6PM" - - # Temps - s = s + "\n" + ident + \ - "Forecast temperatures...early morning low/daytime high" - - # PoP - if self._alwaysIncludePoP: - s = s + "\n" + ident + ident + ident + \ - "Probability of precipitation" + self._popTimeLabel() - - # other stuff - s = s + "\n" + \ - ident + ident + ident + " - indicates temperatures below zero\n" + \ - ident + ident + ident + "MM indicates missing data" - - #QPF - if self._alwaysIncludeQPF: - s = s + "\n" + ident + \ - "Quantitative precipitation - inches -" + \ - self._qpfTimeLabel() - - - return s + "\n" - - def _calcPeriodLabels(self, periods, colWidth, startPoint): - fcstString = string.ljust(" ", startPoint) - dayNumString = string.ljust(" ", startPoint) - dateString = string.ljust(" ", startPoint) - - for index in xrange(len(periods)): - fcstString = fcstString + string.ljust("FCST", colWidth) - dayNumString = dayNumString + string.ljust( - self._periodNumString(index, periods[index][0]), colWidth) - timeRange, label = periods[index] - dateString = dateString + string.ljust( - self._formatDate(timeRange), colWidth) - - return (fcstString, dayNumString, dateString) - - def _formatDate(self, timeRange): - # calculate the local time - startTime = timeRange.startTime() + self.determineShift() - label = startTime.stringFmt("%b %d") - return label - - def _periodNumString(self, num, period): - #AM: Today, Tmrw, NextDay, Day 4, 5, 6, 7 - #PM: Tngt, Tmrw, NextDay, Day 3, 4, 5, 6 - if num == 0 and self._productIssuance == "Morning": - return "Today" - else: - # calculate the day of the week based on the period - periodTime = period.startTime().unixTime() - return time.strftime("%a", time.localtime(periodTime)) - +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This product creates the Tabular State Forecast, which +# consists of a 7-day forecast over multiple areas, with the following +# elements: MaxT/MinT, daytime weather, PoP, QPF, and Snow. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# SFT, SFT___Definition, SFT__Overrides +#------------------------------------------------------------------------- +# User Configurable Variables: +# Definition Section: +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# +# defaultEditAreas defines edit area names, region names, and city/area +# names in one of two formats, depending upon whether +# you are supporting regional headers. Choose one and +# use it throughout the product. +# (editAreaName, "Regionlabel\nCitylabel") +# (editAreaName, "Citylabel") +# +# productName defines name of product e.g. "Tabular State Forecast" +# +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# +# wmoID WMO ID code for product header, such as "FOUS45" +# +# pil Product pil, such as "SFTBOS" +# +# zoneCode ZONE code for product header, such as "NYZ001>025" +# +# stateName State name for product header, such as "Western New York" +# +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Out to 7 days: MinT, MaxT, PoP, Wx, Sky, Wind, QPF (opt.) +#------------------------------------------------------------------------- +# Edit Areas Needed: area1, area2, area3 +#------------------------------------------------------------------------- +# Associated Utilities Files e.g. Combinations file: None +#------------------------------------------------------------------------- +# Component Products: None +#------------------------------------------------------------------------- +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# +# To look up additional tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Additional Information: +# Note that time sampling will not be correct if product is updated +# after midnight for the PM issuance. +#------------------------------------------------------------------------- +# Example Output: +# +## FOUS45 Kxxx 241925 +## SFTxxx +## stZALL-251025- + +## TABULAR STATE FORECAST FOR STATENAME +## NATIONAL WEATHER SERVICE WfoCity WfoState +## 125 PM MDT TUE JUN 24 2003 + +## ROWS INCLUDE... +## DAILY PREDOMINANT DAYTIME WEATHER 6AM-6PM +## FORECAST TEMPERATURES...EARLY MORNING LOW/DAYTIME HIGH +## PROBABILITY OF PRECIPITATION 6AM-6PM/DAYTIME 6AM-6PM +## - INDICATES TEMPERATURES BELOW ZERO +## MM INDICATES MISSING DATA + + +## FCST FCST FCST FCST FCST FCST FCST +## TODAY WED THU FRI SAT SUN MON +## JUN 24 JUN 25 JUN 26 JUN 27 JUN 28 JUN 29 JUN 30 + +## ...REGION1... +## CITY1 +## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY +## /77 46/61 46/70 53/75 60/75 60/79 66/86 +## /00 20/30 30/40 30/40 30/40 30/40 30/40 + +## CITY2 +## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY +## /77 46/61 46/70 53/75 60/75 60/79 66/86 +## /00 20/30 30/40 30/40 30/40 30/40 30/40 + +## ...REGION2... +## CITY3 +## SUNNY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY PTCLDY +## /77 46/61 46/70 53/75 60/75 60/79 66/86 +## /00 20/30 30/40 30/40 30/40 30/40 30/40 + + +## $$ +# +######################################################################## + +import TextRules +import SampleAnalysis +import string, time, types + +from WxMethods import * + + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + VariableList = [ + (("Product Issuance", "productIssuance") , "Morning", "radio", + ["Morning","Afternoon"]), + ] + Definition = { + "type": "smart", + "displayName": "None", # for Product Generation Menu + "database": "ISC", + # Defines output location of finished product. + "outputFile": "{prddir}/TEXT/SFT_.txt", + "debug": 0, + + "defaultEditAreas": [("area1", "Region1\nCity1"), + ("area2", "Region1\nCity2"), + ("area3", "Region2\nCity3"), + ], + + # product identifiers + "productName": "Tabular State Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "zoneCode": "stZ000", # Zone Code, such as "GAZ025-056" + "stateName": "", # Name of state, such as "Georgia" + "wfoCityState": "", # Location of WFO - city state + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil": "", # Product ID for transmitting to AWIPS WAN. + + # Product expiration/purge time + "fixedExpire": 1, #ensure VTEC actions don't affect expiration time + + + # options + "alwaysIncludePoP": 1, # include PoP, 0=no,1=yes + "alwaysIncludeQPF": 0, # include QFP, 0=no,1=yes + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + + def generateForecast(self, argDict): + # Generate formatted product for a list of edit areas + + # Get variables from varDict and Definition + self._getVariables(argDict) + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + self._determineTimeRanges(argDict) + + # Sample the data + self._sampleData(argDict) + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, + "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + + fcst = self._postProcessProduct(fcst, argDict) + return fcst + + def _getVariables(self, argDict): + # Determine whether AM or PM product type + + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # row alignments + self._rowLabelWidth = 3 + self._columnWidth = 9 + + def _determineTimeRanges(self, argDict): + # Determine time ranges for product + # Sets up self._popPeriods, self._snowPeriods, + # self._tempPeriods, self._codePeriods which + # are lists of tuples (timeRange, label). + + # Calculate ddhhmm string value + self._currentTime = argDict['creationTime'] #ZULU + self._ddhhmmTime = time.strftime("%d%H%M",time.gmtime( + self._currentTime)) + + # Determine expiration time + self._expireTime, self._ddhhmmTimeExpire = \ + self.getExpireTimeFromLToffset(self._currentTime, + self.expireOffset(), "") + + # timeLabel is the spelled-out version of the current time + self._timeLabel = self.getCurrentTime( + argDict, "%l%M %p %Z %a %b %e %Y", stripLeading=1) + + # Number of days to sample (7 for AM, 7 for PM). Since PoP is + # sampled every 12 hours, we have either 13 or 14 periods to sample. + numPeriods = 7 + if self._productIssuance == "Morning": + numPeriodsPoP = 13 + else: + numPeriodsPoP = 14 + + # PM issuances always start tomorrow, thus we offset by 24 hours for + # most of the items, but only 12 for the PoP, since we want to catch + # tonights PoP. + if self._productIssuance == "Morning": + pmOffset = 0 + pmPopOffset = 0 + else: + pmOffset = 24 + pmPopOffset = 12 + + # table header ranges: only used for the labels + timePeriod = 24 + timeSpan = 12 + tableStartTR = self.createTimeRange(6+pmOffset, 6+pmOffset+11) + self._tablePeriods = self.getPeriods(tableStartTR, timePeriod, + timeSpan, numPeriods) + + # PoP Time ranges : + # 7 days of 12-hour periods - day and night - sync'd to 6am/6pm + # If AM, begin at 6am of issue day, then every 12h + # If PM, begin at 6pm of issue day, then every 12h + timePeriod = 12 + timeSpan = 12 + popStartTR = self.createTimeRange(6+pmPopOffset, 6+pmPopOffset+1) #6am + self._popPeriods = self.getPeriods(popStartTR, timePeriod, + timeSpan, numPeriodsPoP) + + + # QPF and Snow Time Ranges, 24hr summaries, 7 periods + # midnight to midnight, today for AM issuance, starting tmrw for PM + timePeriod = 24 + timeSpan = 24 + accStartTR = self.createTimeRange(0+pmOffset, 1+pmOffset) #midnight + self._qpfPeriods = self.getPeriods(accStartTR, timePeriod, timeSpan, + numPeriods) + + # Temp Time ranges : 7 or 8 periods, 24 hours apart, 12 hour span + # to get both the Max and Min Temps + # This is to catch the correct Max/Min temp grid + # Always begin with 3am LT of issue day to catch MinT + + timePeriod = 24 + timeSpan = 12 + tempStartTR = self.createTimeRange(3+pmOffset, 4+pmOffset) + self._tempPeriods = self.getPeriods(tempStartTR, timePeriod, timeSpan, + numPeriods) + + # Code Time ranges : + # 7 or 8 non-consecutive DAYLIGHT 12 hour periods + # If AM, begin at 6amLT of issue day + # If PM, begin at 6amLT tomorrow + timePeriod = 24 + timeSpan = 12 + codeStartTR = self.createTimeRange(6+pmOffset, 7+pmOffset) + self._codePeriods = self.getPeriods(codeStartTR, timePeriod, timeSpan, + numPeriods) + + return + + def _sampleData(self, argDict): + # Sample the data. + # Sets up self._sampler including sampling for + # pop, snow, temp, and code + sampleList = [] + sampleList.append((self._analysisListPoP(), self._popPeriods)) + sampleList.append((self._analysisListQPF(), self._qpfPeriods)) + sampleList.append((self._analysisListTemp(), self._tempPeriods)) + sampleList.append((self._analysisListCode(), self._codePeriods)) + sampleInfo = [] + for analList, periods in sampleList: + sampleInfo.append((analList, periods, self._areaList)) + self._sampler = self.getSampler(argDict, sampleInfo) + return + + def _preProcessProduct(self, fcst, argDict): + # initialize variable showing last region value in body + self._lastRegion = None + + # Add product heading to fcst string + issuedByString = self.getIssuedByString() + + productName = self._productName + " for " + self._stateName + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n" + \ + self._zoneCode + "-" + self._ddhhmmTimeExpire + "-\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + return fcst + self._rowDescription() + "\n\n" + self._tableHeader() + "\n\n" + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # determine the region and area names, separated by a new line + # only print out the region if it is different from the last one + regarea = string.split(areaLabel, '\n') + if len(regarea) > 1: + region = regarea[0] + area = regarea[1] + + # region has changed, need to output it + if self._lastRegion is None or region != self._lastRegion: + fcst = fcst + "..." + region.upper() + "...\n " + area + "\n" + self._lastRegion = region + return fcst + else: + fcst = fcst + " " + area + "\n" + else: + fcst = fcst + regarea[0] + "\n" + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + + noRowLabel = "" + + # Weather Code + statList = self.getStatList(self._sampler, + self._analysisListCode(), self._codePeriods, editArea) + fcst = fcst + self.makeRow(noRowLabel, self._columnWidth, + self._codePeriods, statList, + self._getCCFCode, [], self._rowLabelWidth, + justify = 'l') + + # Max/Min Temperatures + statList = self.getStatList(self._sampler, + self._analysisListTemp(), self._tempPeriods, editArea) + fcst = fcst + self.makeRow( + noRowLabel, self._columnWidth, self._tempPeriods, + statList, self._mxmnValue, [], + self._rowLabelWidth, justify = 'l') + + # PoP + if self._alwaysIncludePoP != 0: + statList = self.getStatList(self._sampler, + self._analysisListPoP(), self._popPeriods, editArea) + + #need to make our own popColumnWidth for ###/### (###, /###xx) + popColumnWidth = [] + justifyList = [] + slashPeriods = [] + # special case, AM and 1st period, no value so slide entries right + if self._productIssuance == "Morning": + offset = 1 + popRowLabelWidth = self._rowLabelWidth + 3 + else: + offset = 0 + popRowLabelWidth = self._rowLabelWidth + + for x in range(len(self._popPeriods)): + if x%2 == offset: + wid = 3 + justifyMode = 'r' + else: + wid = self._columnWidth - 3 + justifyMode = 'l' + slashPeriods.append(self._popPeriods[x]) + popColumnWidth.append(wid) + justifyList.append(justifyMode) + + fcst = fcst + self.makeRow(noRowLabel, popColumnWidth, + self._popPeriods, statList, self._popValue, [slashPeriods], + popRowLabelWidth, justify = justifyList) + + + # QPF + if self._alwaysIncludeQPF != 0: + statList = self.getStatList(self._sampler, + self._analysisListQPF(), self._qpfPeriods, editArea) + fcst = fcst + self.makeRow( + noRowLabel, self._columnWidth, self._qpfPeriods, statList, + self._qpfValue, [], self._rowLabelWidth, justify = 'l') + + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + return fcst + "\n" + + def _postProcessProduct(self, fcst, argDict): + fcst = string.replace(fcst,"!"," ") + fcst = fcst + "\n$$\n\n" + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + return fcst + + # provides expiration time offset from today's midnight based on issuance. + def expireOffset(self): + if self._productIssuance == "Morning": + #4pm today + return 17 + else: + #4am tomorrow + return 24+5 + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + + def _analysisListTemp(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ] + + def _analysisListCode(self): + return [ + ("MinT", self.avg), + ("MaxT", self.avg), + ("PoP", self.stdDevMaxAvg), + ("Wx", self.dominantWx), + ("Sky", self.avg), + ("Wind", self.vectorAvg) + ] + + def _analysisListQPF(self): + return [ + ("QPF", self.minMaxSum), + ] + + def _analysisListPoP(self): + return [ + ("PoP", self.stdDevMaxAvg), + ] + + def _popValue(self, statDict, timeRange, argList): + #arglist[0] is a list of timeRanges for which a leading "/" is required. + slashList = argList[0] + # return a string for the pop, such as "80" + # can return MM for no data + + val = self.getStats(statDict, "PoP__stdDevMaxAvg") + if val is None: + popString = "MM" + else: + popMax10=int(self.round(val,"Nearest",10)) + popString = repr(popMax10) + if popString == "0": + popString = "00" #requires a leading zero + + # leading '/' required? + if (timeRange, "") in slashList: + return "/" + popString + else: + return popString + + def _qpfValue(self, statDict, timeRange, argList): + # Return a string for the QPF, such as 0, 0.05 + # can return "--" for missing data + val = self.getStats(statDict, "QPF__minMaxSum") + if val is None: + return "MM" + minV, maxV, sumV = val + if maxV < 0.01: + return "0.00" + else: + return string.strip("%5.2f" %sumV) + + def _getCCFCode(self, statDict, timeRange, argList): + translateCodes = [("P","BLZZRD"),("T","TSTRMS"),("O","RNSNOW"), + ("R","RAIN"),("S","SNOW"),("W","SHWRS"),("J","SNOSHWR"), + ("L","DRZL"),("X","SLEET"),("Y","FZRAIN"),("Z","FZDRZL"), + ("M","FLRRYS"),("Q","BLGSNO"),("N","WINDY"),("F","FOGGY"), + ("G","VRYHOT"),("I","VRYCLD"),("D","DUST"),("H","HAZE"), + ("K","SMOKE"),("C","CLOUDY"),("E","MOCLDY"),("B","PTCLDY"), + ("U","SUNNY"),("A","FAIR"),("V","CLEAR") + ] + # Return the SFT weather code + code = self.getCode(statDict, timeRange) + if code is None: + return "MM" + else: + for symbol, word in translateCodes: + if code == symbol: + return word + return "MM" + + + def _mxmnValue(self, statDict, timeRange, argList): + # Return a string for the max or min temperatures. + # String may be a single value, such as /75, or 54/75 (Min/Max) + # Can return MM for missing data + # Get the numbers + maxval = self.getStats(statDict, "MaxT") + if maxval == None: + maxString = "MM" + else: + maxString = repr(int(round(maxval))) + minval = self.getStats(statDict, "MinT") + if minval == None: + minString = "MM" + else: + minString = repr(int(round(minval))) + + # special case, AM and 1st period, only 1 number (MaxT) goes here + if timeRange == self._tempPeriods[0][0] and self._productIssuance == "Morning": + return "!!/" + maxString + + # normal cases, two numbers (Min/Max) + else: + return minString + "/" + maxString + + + def _tableHeader(self): + # Put in the table header with the dates + # qpfPeriods are midnight to midnight - convenient for labeling + fcstString, dayNumString, dateString = self._calcPeriodLabels( + self._tablePeriods, self._columnWidth, self._rowLabelWidth) + return fcstString + "\n" + dayNumString + "\n" + dateString + + def _popTimeLabel(self): + # Returns the valid time for the daily POP field + return " nighttime 6PM-6AM/daytime 6AM-6PM" + + def _qpfTimeLabel(self): + # Returns the valid time for the daily qpf field + return " 12AM-12AM" + + def _rowDescription(self): + # Returns the row description between the product title and + # table header. Override in your local file as needed. + + ident = " " + + # s is the built-up string containing the description + s = "ROWS INCLUDE...\n" + + + # Weather + s = s + ident + "Daily predominant daytime weather 6AM-6PM" + + # Temps + s = s + "\n" + ident + \ + "Forecast temperatures...early morning low/daytime high" + + # PoP + if self._alwaysIncludePoP: + s = s + "\n" + ident + ident + ident + \ + "Probability of precipitation" + self._popTimeLabel() + + # other stuff + s = s + "\n" + \ + ident + ident + ident + " - indicates temperatures below zero\n" + \ + ident + ident + ident + "MM indicates missing data" + + #QPF + if self._alwaysIncludeQPF: + s = s + "\n" + ident + \ + "Quantitative precipitation - inches -" + \ + self._qpfTimeLabel() + + + return s + "\n" + + def _calcPeriodLabels(self, periods, colWidth, startPoint): + fcstString = string.ljust(" ", startPoint) + dayNumString = string.ljust(" ", startPoint) + dateString = string.ljust(" ", startPoint) + + for index in range(len(periods)): + fcstString = fcstString + string.ljust("FCST", colWidth) + dayNumString = dayNumString + string.ljust( + self._periodNumString(index, periods[index][0]), colWidth) + timeRange, label = periods[index] + dateString = dateString + string.ljust( + self._formatDate(timeRange), colWidth) + + return (fcstString, dayNumString, dateString) + + def _formatDate(self, timeRange): + # calculate the local time + startTime = timeRange.startTime() + self.determineShift() + label = startTime.stringFmt("%b %d") + return label + + def _periodNumString(self, num, period): + #AM: Today, Tmrw, NextDay, Day 4, 5, 6, 7 + #PM: Tngt, Tmrw, NextDay, Day 3, 4, 5, 6 + if num == 0 and self._productIssuance == "Morning": + return "Today" + else: + # calculate the day of the week based on the period + periodTime = period.startTime().unixTime() + return time.strftime("%a", time.localtime(periodTime)) + diff --git a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SRF.py b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SRF.py index 89a7962825..ec474f19b4 100644 --- a/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SRF.py +++ b/edexOsgi/com.raytheon.edex.plugin.gfe/utility/common_static/base/gfe/textproducts/templates/product/SRF.py @@ -1,1962 +1,1962 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -## -# This is a base file that is not intended to be overridden. -## - -#------------------------------------------------------------------------- -# Description: This produces a Surf Zone Forecast. -#------------------------------------------------------------------------- -# Copying: -# This software is in the public domain, furnished "as is", without technical -# support, and with no warranty, express or implied, as to its usefulness for -# any purpose. -#------------------------------------------------------------------------- -# Standard and Local file names and Locations: -# SRF, SRF_TBW_Definition, SRF_TBW_Overrides -#------------------------------------------------------------------------- -# Customization Points: -# -# DEFINITION SECTION -# -# Required Configuration Items: -# -# displayName If not None, defines how product appears in GFE GUI -# -# areaDictionary Create a SurfAreaDictionary with entries as described below. -# The SurfAreaDictionary appears in the TextUtilities window -# of the DefineTextProducts dialog. You can use it as a -# starting point to create your own. -# -# The SRF formatter is unique in that the various phrases are based on -# different edit areas. The SurfAreaDictionary specifies which areas -# to use for each type of phrase. -# -# For example, the sky, weather, and wind information -# typically comes from a coastal land/sea edit area which you -# will need to create while the swell information comes from a -# coastal waters area. Meanwhile, each segment of the product is -# based on a public zone. -# -# The "additionalAreasDict", which can be overridden, lists -# the weather elements, analysis, and phrases for each type of edit area. -# -# The SurfAreaDictionary has an entry for each public zone -# for which you want a SRF product segment. -# You can copy the information from the AreaDictionary as a starting point. -# Then add the following information for each zone: -# -# "landSeaArea": An edit area you need to create which contains grid -# points along the coast, including both land and sea. -# "marineArea": Typically, the coastal waters area. -# "surfAreas": The surfAreas entry is an optional list of edit areas and labels -# for which to create a surf (waveHeight) phrase. -# For example, If you have: -# -# surfAreas: [ -# ("WestCoast", "Surf along west facing reefs.............."), -# ("NorthCoast", "Surf along north facing reefs............."), -# ("EastCoast", "Surf along east facing reefs.............."), -# ("SouthCoast", "Surf along south facing reefs............."), -# ] -# -# You would get a surfHeight report for each surfArea listed: -# -# Surf along west facing reefs................10 to 12 feet. -# Surf along north facing reefs...............4 to 6 feet. -# Surf along east facing reefs................2 to 3 feet. -# Surf along south facing reefs...............4 to 6 feet. -# -# If the list is empty, you will simply get surfHeight reported -# for the current value of the WaveHeight Grid sampled from the -# landSea edit area: -# -# Surf................1 TO 2 feet. -# -# "uviCity": The UVI index is take from the previous UVI product (pil is UVI). -# This specifies the city name from which to report the uvi index. -# "tideTables": A list of the tide tables from which you want tides reported -# for each public zone. -# Tide information is taken from files that you must set-up locally. -# Make a file for each tide table that you want reported. -# You can include a year's worth of tide information and update the files once -# a year. -# Then list the tide tables and corresponding files in the "tideFiles" -# Definition entry (see below). -# Tide tables should be in the format found at the website: -# http://co-ops.nos.noaa.gov/tides05/ -# -# defaultEditAreas defines edit areas, default is Combinations. -# Note that zones can be combined for the SRF product. -# If so, the corresponding landSeaAreas, marineAreas -# will be combined, and the surfAreas and tideTables -# handled correctly. -# Note that if you always have the same combinations, -# you need only list this additional information for -# one of the zones in each combination. -# productName defines name of product e.g. "Zone Forecast Product" -# fullStationID Full station identifier, 4 letter, such as "KSLC". -# wmoID WMO ID code for product header, such as "FOUS45" -# pil Product pil, such as "SFTBOS" -# areaName (opt.) Area name for product header, such as "Western New York" -# wfoCityState WFO location, such as "Buffalo NY" -# -# Optional Configuration Items -# mapNameForCombinations Name of the map background that is used for -# creating/editing the combinations file. This must -# be defined for the GFE zone combiner -# database Source database for product. Can be "Official", -# "Fcst" or "ISC" -# outputFile Defines the output location of the finished product -# when saved from the Formatter Launcher. -# debug If on, debug_print statements will appear. -# textdbPil Defines the awips product identifier -# (e.g., DENCCFDEN) that is used to store the product -# in the AWIPS text database. -# This value is also used for the default GUI entry for -# storage. -# awipsWANPil Defines the awips product identifier -# (e.g., KBOUCCFDEN) that is used to transmit the -# product to the AWIPS WAN. -# This value is also used for the default GUI -# entry for storage. -# periodCombining If 1, an attempt will be made to combine components -# or time periods into one. Otherwise no period -# combining will will be done. -# individualExtended If individualExtended == 1, an extended forecast will be -# generated for each individual area -# extendedLabel If extendedLabel== 1, a label will be included for each -# individual extended -# tempLocalEffects Set to 1 to after defining edit areas Inland -# and Coastal to enable local effects for temperature -# windLocalEffects Set to 1 after defining edit areas Inland and Coastal -# to enable local effects for wind. -# If you change these edit area names, -# make sure that the names in the "getIntersectAreas" -# and "valleyRidgeAreaNames" are set to the new names. -# surfGrid Name of grid to use for the waveHeight_phrase. Default is SurfHeight, -# but can be set to WaveHeight for sites not creating a SurfHeight grid. -# ripGrid If set to the name of a rip current grid, this will be used -# for the rip_phrase instead of using Wind Grid and SurfHeight/WaveHeight -# Grid values to calculate a rip_phrase value. -# waterSpoutGrid If you want a waterSpout phrase, set this to your Water Spout grid -# name. Also, make sure to add this to the analysisList for -# "marineArea" in the "_additionalAreasDict". -# includeOutlook If 1, OUTLOOK section will be included. The OUTLOOK section is -# is a hand-editied narrative outlook. -# tideFiles Make a file for each tide table that you want reported. -# Tide tables should be in the format given at the website: -# http://co-ops.nos.noaa.gov/tides05/ -# includeTideHeights If 1, tide heights will be included in the tide output. -# extraAreaPrefix Prefix for extra temporary edit areas that must be created to run -# this product e.g. combinations of landSeaAreas -# surfDebug If 1, produces information showing the extra edit areas created -# and which areas are used for which phrases. -# -# Other Important Configuration Items: -# -# --To Add or Remove phrases from the SRF, override the getSurfPeriod_phraseList method -# -# --To Change the analysisList OR to specify which areas to use for which phrases, -# override the _additionalAreasDict. -# -# --If you want to get the previous synopsis, and _uvi statement, -# set Definition section "statePil" -# -# --Override "seaBreeze_thresholds" (ConfigVariables) -# to set up offshore wind values for the chop_phrase -# -#------------------------------------------------------------------------- -# Weather Elements Needed: -# Hazards (optional): If provided, headlines will be generated. -# Sky, Wind (6 hourly), PoP, MaxT, MinT, Sky, Wind, T, HeatIndex, LAL -# Wx, WaveHeight OR SurfHeight, Swell, Period, Swell2, Period2 -# Optional: -# rip current grid (scalar values 1-3 for LOW, MODERATE, HIGH) -# water spout grid (scalar values 1-5) -# -#------------------------------------------------------------------------- -# OTHER INPUTS -# Tides from file, UVI from Awips -#------------------------------------------------------------------------- -# Component Products: -# SurfPeriod (component) -# ExtendedLabel(component) -# SurfExtended (component) -#------------------------------------------------------------------------- -# Programmers and Support including product team leader's email: -# Product Team POC: Charlie Paxton -# FSL POC: Tracy Hansen -#------------------------------------------------------------------------- -# Development tasks that are identified and in progress: -# To look up tasks and their status, see the Text Product User Guide -# Section on "Tkgnats: Task Reporting System". -#------------------------------------------------------------------------- -# Example Output: -# Refer to the NWS Directives for Marine Services. -#------------------------------------------------------------------------- -# Implementation Notes -# -# The _sampleData method must first call _getCombinedAdditionalAreas -# to create combinations of additional edit areas based on the -# combinations the user selected for the public zones for which the -# segments of the product are reported. -# These combinations are stored in the _combinedAreaDict: -# {combo: {areaType: areaLabel} -# Then _sampleData calls ForecastNarrative.getNarrativeData to interpret -# the narrative definition, the components of which can now use -# the _combinedAreaDict in creating their "additionalAreas", -# "additionalAreasAnalysisList", intersectAreas", intersectWithAreas", -# and "intersectAnalysisList" entries. -# Finally, each phrase setUp method will call _setSurfAreaLabel to look -# up and set the areaLabel for the phrase using the configurable -# _additionaAreasDict: -# {areaType: {analysisList: [], phraseList: []}} -# and the _combinedAreaDict. -# -#------------------------------------------------------------------------- - -import TextRules -import SampleAnalysis -import ForecastNarrative -import time, string, types, os, re, copy - -class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): - -## VariableList = [ -## (("Issuance Type", "issuanceType") , "ROUTINE", "radio", -## ["ROUTINE", "UPDATE", "CORRECTION"]), -## ] - - Definition = { - "type": "smart", - "displayName": "None", - "statePil": "MIA", - "outputFile": "{prddir}/TEXT/SRF_.txt", - -########################################################################## -##### Edit Areas: Create Combinations file with edit area combinations. - # Name of map background for creating Combinations - "mapNameForCombinations": "Zones_TBW", - "showZoneCombiner" : 1, # 1 to cause zone combiner to display - "defaultEditAreas" : "Combinations_SRF__", - -########################################################################## -##### Product identifiers - # Source database for product. Can be "Official", "Fcst" or "ISC" - "database": "Official", - "debug": 0, - "productName": "Surfzone Forecast", # product name - "fullStationID": "", # full station identifier (4letter) - "wmoID": "", # WMO ID - "pil": "", # Product pil - "areaName": "", # Name of state, such as "Georgia" -- optional - "wfoCityState": "", # Location of WFO - city and state name - - "textdbPil": "", # Product ID for storing to AWIPS text database. - "awipsWANPil":"" , # Product ID for transmitting to AWIPS WAN. - "srfPil": "SRFTBW", - "uviPil": "UVITBW", - - "periodCombining" : 1, # If 1, combine periods, if possible - "tempLocalEffects": 0, # Set to 1 to enable Temp local effects AFTER - # creating Inland and Coastal edit areas - "windLocalEffects": 0, # Set to 1 to enable wind local effects AFTER - # creating Inland and Coastal edit areas - #"surfGrid": "SurfHeight", # Use grid for waveHeight_phrase - "surfGrid": "WaveHeight", # Use grid for waveHeight_phrase - "ripGrid": "", # Use grid for rip_phrase - "waterSpoutGrid": "", # Use grid for waterSpout_phrase - "includeOutlook": 0, # If 1, OUTLOOK section included - "outLookText": "\n.OUTLOOK...",# Text for OUTLOOK section - "tideFiles": { # For each tide table, list the file where it can - # be found - "Venice Inlet": "/data/local/localapps/tides/VeniceInlet.txt", - "Saint Petersburg": "/data/local/localapps/tides/SaintPetersburg.txt", - "Fort Myers": "/data/local/localapps/tides/FortMyers.txt", - "Cedar Key": "/data/local/localapps/tides/CedarKey.txt", - }, - "includeTideHeights": 0, - "extraAreaPrefix": "__ExtraSurfArea", - "surfDebug": 1, - - #"purgeTime": 15, - -########################################################################## -##### Product variables - # To include a MultipleElementTable (e.g. Temp Pop Table) - # for each area in the current Combination: - # Set "includeMultipleElement" to 1 - # Set the "elementList" and "singleValueFormat" flag if desired - # "elementList" may include "Temp", "PoP", and/or "Humidity" - # "singleValueFormat" lists only one value per element - # Make sure you are using a Combinations file - # Modify the CityDictionary TextUtility file or create a new one - "includeMultipleElementTable": 0, - "includeMultipleElementTable_perPeriod": 0, - "elementList" : ["Temp", "Humidity", "PoP"], - "singleValueFormat": 0, - "cityDictionary": "CityDictionary", - "areaDictionary": "SurfAreaDictionary", - "language": "english", - "synopsisUGC": "", # UGC code for synopsis - "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis - # If individualExtended == 1, an extended forecast will be - # generated for each individual area - # If extendedLabel == 1, a label will be included for each - # individual extended - "individualExtended": 1, - "extendedLabel": 0, - "useHolidays": 1, # Set to 1 to use holidays in the time period labels - "sampleFromServer": 0, # If 1, sample directly from server - # Trouble-shooting items - # Trouble-shooting items - "passLimit": 20, # Limit on passes allowed through - # Narrative Tree - "trace": 0, # Set to 1 to turn on trace through - "lineLength": 70, # Narrative Tree for trouble-shooting - } - - def __init__(self): - TextRules.TextRules.__init__(self) - SampleAnalysis.SampleAnalysis.__init__(self) - -############################################################################ -##### OVERRIDING THRESHOLDS AND VARIABLES - - def minMax_std_deviation(self, parmHisto, timeRange, componentName): - # Replaces MINMAX_STD_DEVIATION - # Number of standard deviations to compute around the weighted - # average for a stdDev_MinMax - return 1.4 - - def element_outUnits_dict(self, tree, node): - dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) - dict["Wind"] = "mph" - dict["WindGust"] = "mph" - return dict - - def phrase_descriptor_dict(self, tree, node): - # Dictionary of descriptors for various weather elements in phrases - # The value for an element may be a phrase or a method - # If a method, it will be called with arguments: - # tree, node, key, element - return { - "WaveHeight" : "Surf................", - "Swell": "swell", - "Swell2": "swell", - "LabelSwell": "Swell...............", - "LabelSwell2": "Secondary swell.....", - "Period": "Period..............", - "Period2":"Secondary period....", - "chop" : "Water condition.....", - "rip" : "Rip current risk....", - "HeatIndex": "Heat index..........", - "20-foot winds......." : "Beach winds.........", - "MaxT_FireWx":"Max temperature.....", - "Sky/weather.........": "Sky/weather.........", - "sst" : "Water temperature...", - "uvi" : "UVI index...........", - "LAL" : "Lightning threat....", - "WaterSpout" : "Waterspout threat...", - "PoP" : "Chance of...........", - "MinT":"lows", - "MaxT":"highs", - "Wind": "winds", - # Used for Headlines - "EXPECTED" : "expected", - "IN EFFECT" : "in effect", - # Used for single values - "around": "around ", - " Valleys/lwr slopes...": " Inland...............", - " Ridges/upr slopes....": " Coastal..............", - } - -############################################################################ -###### COMPONENT PRODUCT DEFINITIONS -###### -############################################################################ -##### SURF PERIOD AND ISSUANCE LISTS - - def _getSurfPeriod_phraseList(self): - # Override this to add or remove phrases from the SRF product. - phraseList = [ - self.skyWeather_byTimeRange_compoundPhrase, - self.popMax_phrase, - (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1], self._tempLocalEffects_list()), - self.severeWeather_phrase, - self.fireWind_compoundPhrase, - self.fireWind_label_phrase, - self.fireValleyWind_compoundPhrase, - self.fireRidgeWind_compoundPhrase, - self.waveHeight_phrase, - self.swell_compoundPhrase, - self.period_phrase, - self.swell2_compoundPhrase, - self.period2_phrase, - self.chop_phrase, - self._sst_phrase, - self._uvi_phrase, - self.rip_phrase, - self.heatIndex_phrase, - self._lal_phrase, - ] - if self._waterSpoutGrid != "": - phraseList.append(self._waterSpout_phrase) - # Add multipleElementTable - if self._includeMultipleElementTable_perPeriod: - phraseList.append(self.multipleElementTable_perPeriod_phrase) - return phraseList - - def _additionalAreasDict(self, component): - # This dictionary is used to build the SurfPeriod. - # Override this to - # --specify which areas to use for which phrases, - # --change the analysisLists - # {areaType: {analysisList: [], phraseList: []}} - if component == "SurfPeriod": - return self._surfPeriodAdditionalAreasDict() - elif component == "SurfExtended": - return self._surfExtendedAdditionalAreasDict() - elif component == "ExtraSampling": - return self._extraSamplingAdditionalAreasDict() - - def _surfPeriodAdditionalAreasDict(self): - return { - "landSeaArea": { - "analysisList": [ - ("Sky", self.minMax, [0]), - ("PoP", self._PoP_analysisMethod("SurfPeriod")), - ("Wx", self.rankedWx), - ("WindGust", self.moderatedMax, [3]), - ("WaveHeight", self.maximum, [6]), - ("SurfHeight", self.maximum, [6]), - ("WindWaveHgt", self.maximum, [6]), - ("Wind", self.vectorAvg, [6]), - ("MaxT", self.minMax), - ("MinT", self.minMax), - ("HeatIndex", self.minMax), - ("T", self.minMax), - ("LAL", self.minMax), - ], - "phraseList": [ - self.skyWeather_byTimeRange_compoundPhrase, - self.popMax_phrase, - self.dayOrNight_phrase, - self.severeWeather_phrase, - self.fireWind_compoundPhrase, - self.fireWind_label_phrase, - self.fireValleyWind_compoundPhrase, - self.fireRidgeWind_compoundPhrase, - self.waveHeight_phrase, - self.chop_phrase, - self._sst_phrase, - self._uvi_phrase, - self.rip_phrase, - self.heatIndex_phrase, - self._lal_phrase, - ], - }, - "marineArea": { - "analysisList": [ - ("Swell", self.vectorModeratedMinMax, [6]), - ("Swell2", self.vectorModeratedMinMax, [6]), - ("Period", self.moderatedMinMax, [6]), - ("Period2", self.moderatedMinMax, [6]), - ], - "phraseList": [ - self.swell_compoundPhrase, - self.period_phrase, - self.swell2_compoundPhrase, - self.period2_phrase, - self._waterSpout_phrase, - ], - }, - } - - def _surfExtendedAdditionalAreasDict(self): - return { - "landSeaArea": { - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ("T", self.hourlyTemp), - ("Sky", self.avg, [12]), - ("Wind", self.vectorAvg, [12]), - ("Wx", self.rankedWx, [12]), - ("PoP", self._PoP_analysisMethod("SurfExtended")), - ], - "phraseList": [ - self.wind_summary, - self.sky_phrase, - self.weather_phrase, - self.reportTrends, - self.lows_phrase, - self.highs_phrase, - self.wind_phrase, - ], - }, - } - - def _extraSamplingAdditionalAreasDict(self): - return { - "landSeaArea": { - "analysisList": [ - ("MinT", self.firstAvg), - ("MaxT", self.avg), - ], - "phraseList": [], - }, - } - - def _PoP_analysisMethod(self, componentName): - # Alternative PoP analysis methods for consistency between PoP and Wx - return self.stdDevMaxAvg - #return self.maximum - # Use "mode" if you have non-continuous PoP values - #return self.mode - - def SurfPeriod(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assembleIndentedPhrases, - ], - "analysisList": [], # Public area is used only for header - "phraseList": self._getSurfPeriod_phraseList(), - "additionalAreas": self._getSurfPeriod_additionalAreas(), - "additionalAnalysisList": self._getSurfPeriod_additionalAnalysisList(), - "intersectAreas": self._getSurfPeriod_intersectAreas(), - "intersectWithAreas": self._getSurfPeriod_intersectWithAreas(), - "intersectAnalysisList": self._getSurfPeriod_intersectAnalysisList(), - } - - def _getSurfPeriod_additionalAreas(self): - # Return a list of (element, list of areas) - # representing the areas in addition to the public zone - # to be analyzed and added to the StatisticsDictionary - # for the generation of phrases. - additionalAreas = self._getAdditionalAreas("SurfPeriod") - # Add in the surfAreas - surfList = [] - for combo in self._combinedAreaDict.keys(): - surfList += self._combinedAreaDict[combo]["surfAreas"] - areas = [] - for surfArea, label in surfList: - areas.append(surfArea) - additionalAreas += [(self._surfGrid, areas)] - #print "\nadditionalAreas", additionalAreas - return additionalAreas - - def _getAdditionalAreas(self, component): - # List (element, list of areas) - additionalAreas = [] - areaDict = self._additionalAreasDict(component) - # Create dictionary of {element: [areaTypes]} - # e.g. {"MaxT": ['landSeaArea']} - elementDict = {} - for areaType in areaDict.keys(): - for entry in areaDict[areaType]["analysisList"]: - element = entry[0] - if elementDict.has_key(element): - elementDict[element].append(areaType) - else: - elementDict[element] = [areaType] - # For each element, get the areas listed in the SurfAreaDictionary - # for the given areaTypes and then make an additionalAreas entry - # e.g. ("MaxT", ["SRF_586"]) - for element in elementDict.keys(): - areaTypes = elementDict[element] - addAreaList = [] - for areaType in areaTypes: - for combo in self._combinedAreaDict.keys(): - addAreaList += [self._combinedAreaDict[combo][areaType]] - additionalAreas.append((element, addAreaList)) - #print "\nadditionalAreas", additionalAreas - return additionalAreas - - def _getSurfPeriod_additionalAnalysisList(self): - # List all possible analysis here - analysisList = self._getAdditionalAnalysisList("SurfPeriod") - # Add in extra elements - if self._ripGrid != "": - analysisList.append((self._ripGrid, self.maximum)) - if self._waterSpoutGrid != "": - analysisList.append((self._waterSpoutGrid, self.maximum)) - return analysisList - - def _getAdditionalAnalysisList(self, component): - # Return the concatenation of analysisLists from the - # additionalAreasDict. - # NOTE: This is not the most efficient implementation - # since all additional areas will be sampled and analyzed - # for all weather elements in the additionalAreasDict. - analysisList = [] - areaDict = self._additionalAreasDict(component) - for key in areaDict.keys(): - analysisList += areaDict[key]["analysisList"] - return analysisList - - def _getSurfPeriod_intersectAreas(self): - # This is for local effects. - tempList = [] - windList = [] - if self._tempLocalEffects == 1: - tempList = [ - ("MinT", ["Inland", "Coastal"]), - ("MaxT", ["Inland", "Coastal"]), - ] - if self._windLocalEffects == 1: - windList = [("Wind", ["Inland", "Coastal"])] - return tempList + windList - - def _getSurfPeriod_intersectWithAreas(self): - # Return all the landSeaArea combinations from the _combinedAreaDict - intersectWithAreas = [] - for key in self._combinedAreaDict.keys(): - if self._combinedAreaDict[key].has_key('landSeaArea'): - intersectWithAreas.append( - self._combinedAreaDict[key]['landSeaArea']) - #print "returning INTERSECT WITH AREAS", intersectWithAreas - return intersectWithAreas - - def _getSurfPeriod_intersectAnalysisList(self): - analysisList = [ - ("WindGust", self.moderatedMax, [3]), - ("Wind", self.vectorAvg, [6]), - ("MaxT", self.minMax), - ("MinT", self.minMax), - ("T", self.minMax), - ] - return analysisList - - def _tempLocalEffects_list(self): - leArea1 = self.LocalEffectArea("Inland", "") - leArea2 = self.LocalEffectArea("Coastal", " near the coast") - return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] - - def ExtendedLabel(self): - return { - "type": "component", - "methodList": [self.setLabel], - "analysisList": [], - "phraseList":[], - } - def setLabel(self, tree, component): - component.set("words", "\n.EXTENDED...\n") - return self.DONE() - - def SurfExtended(self): - return { - "type": "component", - "methodList": [ - self.consolidateSubPhrases, - self.assemblePhrases, - self.wordWrap, - ], - "analysisList": [], - "phraseList":[ - self.wind_summary, - self.sky_phrase, - self.weather_phrase, - self.reportTrends, - self.lows_phrase, - self.highs_phrase, - self.wind_phrase, - ], - "additionalAreas": self._getAdditionalAreas("SurfExtended"), - "additionalAnalysisList": self._getAdditionalAnalysisList("SurfExtended"), - } - - def ExtraSampling(self): - return { - "type": "component", - "methodList": [self.noWords], - "analysisList": [ - ("MaxT", self.avg, [0]), - ("MinT", self.firstAvg, [0]), - ], - "additionalAreas": self._getAdditionalAreas("ExtraSampling"), - "additionalAnalysisList": self._getAdditionalAnalysisList("ExtraSampling"), - } - - def ExtendedNarrative(self): - return { - "type": "narrative", - "methodList": [self.assembleChildWords], - ## Components - "narrativeDef": [ - ("SurfExtended",12),("SurfExtended",24), - ("SurfExtended",24), ("SurfExtended",24), - ], - } - - def removeEmptyPhrase(self, tree, node): - # If an indented phrase is empty, do not include the entry for it - if node.getAncestor('name') in ["fireWind_label_phrase"]: - return 0 - return 1 - - def _setSurfAreaLabel(self, tree, node, phrase): - # Look up the area label for the phrase and set the node's label - # This is to handle landSeaArea and marineArea phrases - # Use the additionalAreasDict: {areaType: {analysisList: [], phraseList: []}} - # and the combinedAreaDict {combo: {areaType: areaLabel} - # - # First, find the phrase in the additionalAreasDict and determine the - # areaType that we should use for this phrase. - # E.g. is this a "landSeaArea" or "marineArea" phrase? - areasDict = self._additionalAreasDict(node.getComponentName()) - useAreaType = None - for areaType in areasDict.keys(): - phraseList = areasDict[areaType]["phraseList"] - if type(phrase) is types.ListType: - for p in phrase: - if p in phraseList: - useAreaType = areaType - break - else: - if phrase in phraseList: - useAreaType = areaType - break - if useAreaType is None: - print "Warning!! Trying to set areaLabel for ", phrase - print " Entry not found in _additionalAreasDict" - # Leave areaLabel alone and use current land zone - return - # Now, set the areaLabel for the node to the area for the areaType - try: - areaLabel = self._combinedAreaDict[node.getAreaLabel()][useAreaType] - if self._surfDebug: - print "Setting label", useAreaType, areaLabel, \ - node.getComponentName(), node.getAncestor("name") - node.set("areaLabel", areaLabel) - except: - if self._surfDebug: - print "Leaving area alone", node.getAreaLabel(), node.getComponentName(),\ - node.getAncestor("name") - -######################################################################################### -### Generate Product - - def generateForecast(self, argDict): - # Generate Text Phrases for a list of edit areas - - # Get variables - error = self._getVariables(argDict) - if error is not None: - return error - - # Get the areaList -- derived from defaultEditAreas and - # may be solicited at run-time from user if desired - self._areaList = self.getAreaList(argDict) - if len(self._areaList) == 0: - return "WARNING -- No Edit Areas Specified to Generate Product." - - # Determine time ranges - error = self._determineTimeRanges(argDict) - if error is not None: - return error - - # Sample the data - error = self._sampleData(argDict) - if error is not None: - return error - - # Initialize the output string - fcst = "" - fcst = self._preProcessProduct(fcst, argDict) - - # Generate the product for each edit area in the list - fraction = 0 - fractionOne = 1.0/float(len(self._areaList)) - percent = 50.0 - self.setProgressPercentage(percent) - for editArea, areaLabel in self._areaList: - self.progressMessage(fraction, percent, "Making Product for " + areaLabel) - fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) - fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) - fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) - fraction = fractionOne - fcst = self._postProcessProduct(fcst, argDict) - - return fcst - - def _getVariables(self, argDict): - # Make argDict accessible - self.__argDict = argDict - - # Get Definition variables - self._definition = argDict["forecastDef"] - for key in self._definition.keys(): - exec "self._" + key + "= self._definition[key]" - - # Get VariableList and _issuance_list variables - varDict = argDict["varDict"] - for key in varDict.keys(): - if type(key) is types.TupleType: - label, variable = key - exec "self._" + variable + "= varDict[key]" - - self._language = argDict["language"] - return None - - def _determineTimeRanges(self, argDict): - # Set up the Narrative Definition and initial Time Range - self._issuanceInfo = self.getIssuanceInfo( - self._productIssuance, self._issuance_list(argDict)) - self._timeRange = self._issuanceInfo.timeRange() - argDict["productTimeRange"] = self._timeRange - self._expireTime = self._issuanceInfo.expireTime() - self._issueTime = self._issuanceInfo.issueTime() - self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() - #expireTime = time.time() + self._purgeTime*3600 - #self._expireTime = expireTime - #self._ddhhmmTimeExpire = time.strftime("%d%H%M", - # time.gmtime(expireTime)) - - if self._periodCombining: - self._definition["methodList"] = \ - [self.combineComponentStats, self.assembleChildWords] - else: - self._definition["methodList"] = [self.assembleChildWords] - self._definition["priorPeriod"] = 24 - - # Calculate current times - self._ddhhmmTime = self.getCurrentTime( - argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) - self._timeLabel = self.getCurrentTime( - argDict, "600 AM %Z %a %b %e %Y", stripLeading=1) - return None - - def _sampleData(self, argDict): - # Sample and analyze the data for the narrative - components = ["SurfPeriod", "SurfExtended"] - self._combinedAreaDict = self._getCombinedAdditionalAreas(argDict, components) - self._narrativeProcessor = ForecastNarrative.ForecastNarrative() - error = self._narrativeProcessor.getNarrativeData( - argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) - if error is not None: - return error - return None - - def _preProcessProduct(self, fcst, argDict,): - # Product header - if self._areaName != "": - productName = self._productName.strip() + " for " + \ - self._areaName.strip() - else: - productName = self._productName.strip() - - issuedByString = self.getIssuedByString() - productName = self.checkTestMode(argDict, productName) - - s = self._wmoID + " " + self._fullStationID + " " + \ - self._ddhhmmTime + "\n" + self._pil + "\n\n" - fcst = fcst + s.upper() - - s = productName + "\n" +\ - "National Weather Service " + self._wfoCityState + \ - "\n" + issuedByString + self._timeLabel + "\n\n" - fcst = fcst + s - - # Try to get Synopsis from previous SRF - srfPil = self._statePil + self._srfPil - synopsis = self.getPreviousProduct(srfPil, "SYNOPSIS") - discussion = self._synopsisHeading + synopsis + "\n$$\n\n" - fcst = fcst + discussion - return fcst - - def _preProcessArea(self, fcst, editArea, areaLabel, argDict): - # This is the header for an edit area combination - areaHeader = self.makeAreaHeader( - argDict, areaLabel, self._issueTime, self._expireTime, - self._areaDictionary, self._defaultEditAreas, - cityDescriptor = "Including the beaches of") - - fcst = fcst + areaHeader - # get the hazards text - self._hazards = argDict['hazards'] - self._combinations = argDict["combinations"] - - headlines = self.generateProduct("Hazards", argDict, area = editArea, - areaLabel=areaLabel, - timeRange = self._timeRange) - fcst = fcst + headlines - return fcst - - def _makeProduct(self, fcst, editArea, areaLabel, argDict): - argDict["language"] = self._language - fcst = fcst + self._narrativeProcessor.generateForecast( - argDict, editArea, areaLabel) - if self._includeMultipleElementTable == 1: - fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) - return fcst - - def _postProcessArea(self, fcst, editArea, areaLabel, argDict): - if self._includeOutlook: - fcst = fcst + self._outLookText + "\n" - # Add Tide information - fcst = self._getTideInfo(fcst, editArea, areaLabel, argDict) - fcst = fcst + "\n$$\n" - return fcst - - def _postProcessProduct(self, fcst, argDict): - self.setProgressPercentage(100) - self.progressMessage(0, 100, self._displayName + " Complete") - # Clean out extra areas that had to be saved - try: - if self._savedAreas: - self.cleanOutEditAreas(self._extraAreaPrefix) - except: - pass - return fcst - - ######################################################################## - # PRODUCT-SPECIFIC METHODS - ######################################################################## - def _getCombinedAdditionalAreas(self, argDict, components): - # Called before sampling data to create appropriate combinations - # for the additional areas based on the user's combinations - # for the public zones. - # - # Create a dictionary with an entry for each areaList combination - # to let us know the combined landSea, marine, surf and tide areas - # e.g. {"Combo0": { - # "landSeaArea": "Extra0", - # "marineArea": "Extra1", - # "surfAreas": [ - # ('NorthCoast', 'Surf along north facing reefs.............'), - # ('SouthCoast', 'Surf along south facing reefs.............') - # ], - # "tideTables": ["Cedar Key", "Venice Inlet"], - # }, - # } - # For example, if GUZ001 has landSeaArea of SRF_001 - # and it is combined with GUZ002 with landSeaArea of SRF_002, - # we need to create a union of SRF_001 and SRF_002 and list - # it as the landSeaArea for the combination of GUZ001 and GUZ002. - # - # This works similarly for marineAreas. - # Any surfAreas that have the same label need to be unioned. - # The tideTables are not edit areas and thus do not need to be combined. - # However, all must be listed. - # - import ModuleAccessor - accessor = ModuleAccessor.ModuleAccessor() - self._surfAreaDict = accessor.variable(self._areaDictionary, "AreaDictionary") - combinations = argDict["combinations"] - if combinations is None: - # Use the areaList to create a combinations entry - combinations = [] - for editArea, label in self._areaList: - combinations.append(([label], label)) - extraAreas = {} - areaTypes = [] - for comp in components: - areaTypes += self._additionalAreasDict(comp).keys() - areaTypes = self.removeDups(areaTypes) - for comboList, label in combinations: - extraAreas[label] = {} - for areaType in areaTypes: - allAreas = self.getAreaDictEntry( - comboList, self._surfAreaDict, areaType) - if self._surfDebug: - print "\nGetting Areas", areaType, comboList - print " ", allAreas - if len(allAreas) > 1: - area = self.getUnion(argDict, allAreas, self._extraAreaPrefix) - if self._surfDebug: - print " Saving Union", area.getId().getName() - self.saveEditAreas([area]) - self._savedAreas = 1 - areaName = area.getId().getName() - elif len(allAreas) == 1: - areaName = allAreas[0] - else: - areaName = label # Use combined area itself - extraAreas[label][areaType] = areaName - # Handle surf areas - extraAreas[label]["surfAreas"] = self._getCombinedSurfAreas( - argDict, components, comboList, label) - # Handle tide areas - extraAreas[label]["tideTables"] = self.getAreaDictEntry( - comboList, self._surfAreaDict, "tideTables") - #print "extraAreas", extraAreas - return extraAreas - - def _getCombinedSurfAreas(self, argDict, components, comboList, comboLabel): - # Get the surfAreas for the combination. - # Combine surfAreas that have the same label. - # If a surfArea has a different label in different entries, - # just use the first label. - surfAreasList = self.getAreaDictEntry( - comboList, self._surfAreaDict, "surfAreas") - if self._surfDebug: - print "\nGetting surfAreas" - print " Original areas:" - # If any surfAreas have the same labels, combine them - # First, make a dictionary of surfLabel: surfAreas - labelDict = {} - for surfArea, surfLabel in surfAreasList: - if self._surfDebug: - print " surfArea, surfLabel", surfArea, surfLabel - self.addToDictionary(labelDict, surfLabel, surfArea) - #print "\nLabelDict", labelDict - # Make combinations - for surfLabel in labelDict.keys(): - surfAreas = labelDict[surfLabel] - if len(surfAreas) > 1: - area = self.getUnion(argDict, surfAreas, self._extraAreaPrefix) - if self._surfDebug: - print " Saving Union", area.getId().getName() - self.saveEditAreas([area]) - self._savedAreas = 1 - areaName = area.getId().getName() - else: - areaName = surfAreas[0] - labelDict[surfLabel] = areaName - newList = [] - if self._surfDebug: - print " New surfAreas" - for surfArea, surfLabel in surfAreasList: - if labelDict.has_key(surfLabel): - surfArea = labelDict[surfLabel] - # Remove this entry so we don't get duplicate entries - del labelDict[surfLabel] - else: - # We already handled this label - continue - if self._surfDebug: - print " ", surfArea, surfLabel - newList.append((surfArea, surfLabel)) - return newList - - def _issuance_list(self, argDict): - narrativeDefAM = [ - ("SurfPeriod", "period1"), - ] - narrativeDefPM = [ - ("SurfPeriod", "period1"), - ] - extended = [ - ("SurfExtended", 12), - ("SurfExtended", 24), - ("SurfExtended", 24), - ("SurfExtended", 24), - ] - - try: - if self._individualExtended == 1: - if self._extendedLabel == 1: - narrativeDefAM.append(("ExtendedLabel",0)) - narrativeDefPM.append(("ExtendedLabel",0)) - narrativeDefAM = narrativeDefAM + extended - narrativeDefPM = narrativeDefPM + extended - except: - pass - - narrativeDefAM += [("Custom", ("ExtraSampling", (-12, 12)))] - narrativeDefPM += [("Custom", ("ExtraSampling", (12, 36)))] - - return [ - ("Morning", self.DAY(), self.NIGHT(), 16, - ".TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Morning Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning", "late in the afternoon", - 1, narrativeDefAM), - ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, - ".REST OF TODAY...", "early in the morning","late in the afternoon", - 1, narrativeDefAM), - ("Evening (for tomorrow)", self.DAY()+24, self.NIGHT()+24, 16+24, - ".TOMORROW...", "early in the morning","late in the afternoon", - 0, narrativeDefPM), - ] - - def lateDay_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late day -- default 3pm-6pm - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateDayPhrase() - else: - return "late in the afternoon" - - def lateNight_descriptor(self, statDict, argDict, timeRange): - # If time range is in the first period, return period1 descriptor for - # late night -- default 3am-6am - if self._issuanceInfo.period1TimeRange().contains(timeRange): - return self._issuanceInfo.period1LateNightPhrase() - else: - return "early in the morning" - -######################################################################### - - def valleyRidgeAreaNames(self, tree, node): - # This was taken from the FWF, so the areas are named - # Valleys and Ridges. - # For the SRF, the default names are Inland and Coastal. - # These areas are to be intersected with the current edit area for - # reporting inland and coastal winds, respectively. - # NOTE: If you change these area names, you will also - # need to change the names in the SurfPeriod "intersectAreas" - # section. - return "Inland", "Coastal" - - def untilPhrasing_flag_dict(self, tree, node): - # If set to 1, "until" time descriptor phrasing will be used. - # E.g. "North winds 20 mph until 10 am, then 35 mph" - dict = TextRules.TextRules.untilPhrasing_flag_dict(self, tree, node) - componentName = node.getComponent().get("name") - if componentName == "SurfPeriod": - dict["Sky"] = 1 - dict["Wx"] = 1 - return dict - - def untilPhrasing_format_dict(self, tree, node): - # Format for "until" time descriptors. - # If "military": UNTIL 1000 - # If "standard": UNTIL 10 AM - return { - "otherwise": "standard", - #"Wind": "standard", - } - -########################################################################### -# PHRASES SET-UP needs to use correct edit area -########################################################################### - - def fireSky_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.skyWeather_byTimeRange_compoundPhrase) - elementInfoList = [self.ElementInfo("Sky", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def weather_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, - [self.skyWeather_byTimeRange_compoundPhrase, - self.weather_phrase]) - resolution = node.get("resolution") - if resolution is not None: - mergeMethod = "Average" - else: - mergeMethod = "List" - elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, - resolution) - node.set("allTimeDescriptors", 1) - if self.areal_sky_flag(tree, node): - self.disableSkyRelatedWx(tree, node) - return self.DONE() - - def popMax_setUp(self, tree, node): - # NOTE: The method is set to "Average" instead of "List" so - # that the PoP phrase will always cover the full period. - # It doesn't matter what method (other than List) we choose - # since the popMax_words method gets its PoP value directly from - # the "matchToWx" method. - self._setSurfAreaLabel(tree, node, self.popMax_phrase) - elementInfoList = [self.ElementInfo("PoP", "Average")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def dayOrNight_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.dayOrNight_phrase) - dayElement, nightElement, indent, endWithPeriod = node.get("args") - elementName = self.dayOrNight_element(tree, node, dayElement, nightElement) - indentName = elementName+"_FireWx" - method = "MinMax" - if elementName == "RH": - dayNight = self.getPeriod(node.getTimeRange(), 1) - if dayNight == self.DAYTIME(): - indentName = "MinRH_FireWx" - method = "Min" - else: - indentName = "MaxRH_FireWx" - method = "Max" - elementInfoList = [self.ElementInfo(elementName, method)] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", indentName) - return self.DONE() - - def severeWeather_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.severeWeather_phrase) - elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] - self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) - # Set this flag used by the "checkWeatherSimilarity" method - node.set("noIntensityCombining", 1) - self.determineSevereTimeDescriptors(tree, node) - return self.DONE() - - def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): - self._setSurfAreaLabel(tree, node, [self.fireWind_compoundPhrase, self.wind_phrase]) - wind = self.ElementInfo(element, "List", self.VECTOR()) - elementInfoList = [wind] - if gustFlag: - windGust = self.ElementInfo( - "WindGust", "Max", phraseDef=self.gust_phrase) - elementInfoList.append(windGust) - node.set("gustFlag", 1) - if connectorMethod is None: - connectorMethod = self.vectorConnector - self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) - return self.DONE() - - def wind_summary_setUp(self, tree, node): - self._setSurfAreaLabel( - tree, node, [self.fireWind_compoundPhrase, self.wind_summary]) - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def fireWind_label_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.fireWind_label_phrase) - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 0: - return self.setWords(node, "") - self.setWords(node, "") - node.set("descriptor", "") - node.set("indentLabel", "20-foot winds.......") - return self.DONE() - - def fireRidgeValleyWind_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, - [self.fireValleyWind_compoundPhrase, - self.fireRidgeWind_compoundPhrase]) - # Used for set-up of fireRidgeWind_compoundPhrase as well. - if self.currentAreaContains( - tree, self.ridgeValleyAreas(tree, node)) == 0: - return self.setWords(node, "") - # Set up intersect area to be used for the node - areaName = node.getAreaLabel() - phraseName = node.get("name") - valleys, ridges = self.valleyRidgeAreaNames(tree, node) - if phraseName.find("Valley") >= 0: - area = valleys - else: - area = ridges - intersectName = self.getIntersectName(areaName, area) - #print "setting intersect", intersectName - node.set("areaLabel", intersectName) - return self.DONE() - - ## WaveHeight Phrase Overrides (MarinePhrases) - # Need override to set up as indented phrase - def waveHeight_setUp(self, tree, node): - # Need to determine if we will have one simple phrase OR - # spawn separate phrases for each surfArea for the current combination - - #print "\nWaveHeight phrase" - # Are we dealing with a spawned phrase? - args = node.get("args") - if args is not None: - # Set the label for the spawned phrase - #print "Has Args", args - surfArea, label = args - node.set("areaLabel", surfArea) - indentLabel = label - # If not, check to see if we need to spawn phrases - else: - #print "Does not have args" - indentLabel = "WaveHeight" - try: - surfAreas = self._combinedAreaDict[node.getAreaLabel()]["surfAreas"] - except: - surfAreas = [] - #print "surfAreas", surfAreas - if surfAreas == []: - # Set label for simple phrase - #print "Getting label" - self._setSurfAreaLabel(tree, node, self.waveHeight_phrase) - else: - #print "Spawning" - # Spawn separate phrases - # We need to reverse the list since we are adding phrases - # just after the current node and we want the resulting - # spawned phrases to be in the order of surfAreas - surfAreas.reverse() - for surfArea, label in surfAreas: - newPhrase = tree.addPhraseDef(node, self.waveHeight_phrase) - newPhrase.set("args", (surfArea, label)) - return self.setWords(node, "") - - # Proceed with phrase - if self._surfDebug: - print "Proceeding with wave height phrase", node.getAreaLabel(), self._surfGrid - elementInfoList = [self.ElementInfo(self._surfGrid, "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - # See if we are dealing with a surfArea - node.set("descriptor", "") - node.set("indentLabel", indentLabel) - return self.DONE() - - def waveHeight_words(self, tree, node): - "Create phrase for waves" - statDict = node.getStatDict() - stats = self.getStats(statDict, self._surfGrid) - if stats is None: - nodataPhrase = self.noWaveHeight_phrase( - tree, node, "WaveHeight", "WaveHeight") - return self.setWords(node.parent, nodataPhrase) - - min, max = self.getValue(stats, "MinMax") - avg = (min + max)/2 - words = self.wave_range(avg) - return self.setWords(node, words) - - def sky_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.sky_phrase) - sky = self.ElementInfo("Sky", "List") - elementInfoList = [sky] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def reportTrends_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.reportTrends) - timeRange = node.getTimeRange() - dayNight = self.getPeriod(timeRange, 1) - if dayNight == self.NIGHTTIME(): - eleInfo = self.ElementInfo("MinT", "Min") - else: - eleInfo = self.ElementInfo("MaxT", "Max") - elementName = "MaxT" - elementInfoList = [eleInfo] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - return self.DONE() - - def lows_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.lows_phrase) - elementInfoList = [self.ElementInfo("MinT", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def highs_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.highs_phrase) - elementInfoList = [self.ElementInfo("MaxT", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - -####################################################################### -##### LAL Phrase Methods - - def _lal_phrase(self): - return { - "setUpMethod": self.lal_setUp, - "wordMethod": self.lal_words, - "phraseMethods": self.standard_phraseMethods() - } - - def lal_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self._lal_phrase) - lal = self.ElementInfo("LAL", "Max", self.SCALAR()) - - # comment the following line if you do not want chop - # subPhrases e.g. "A light chop in morning." - lal = self.ElementInfo("LAL", "List", self.SCALAR()) - elementInfoList = [lal] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "LAL") - return self.DONE() - - def lal_words(self, tree, node): - "Create phrase for lal" - statDict = node.getStatDict() - stats = self.getStats(statDict, "LAL") - if stats is None: - return self.setWords(node, "") - lal = self.getValue(stats, "Max", self.SCALAR()) - if lal == 1: - value = "No lightning" - elif lal == 2: - value = "Very infrequent deadly lightning" - elif lal == 3: - value = "Infrequent deadly lightning" - elif lal == 4: - value = "Frequent deadly lightning" - elif lal == 5: - value = "Extreme deadly lightning" - else: - value = "!!!LAL phrase problem!!!" - return self.setWords(node, value) - -####################################################################### - -####################################################################### -##### Waterspout Phrase Methods - - def _waterSpout_phrase(self): - return { - "setUpMethod": self._waterSpout_setUp, - "wordMethod": self._waterSpout_words, - "phraseMethods": self.standard_phraseMethods() - } - - def _waterSpout_setUp(self, tree, node): - if self._waterSpoutGrid == "": - return self.setWords(node, "") - self._setSurfAreaLabel(tree, node, self._waterSpout_phrase) - waterSpout = self.ElementInfo(self._waterSpoutGrid, "Max", self.SCALAR()) - # comment the following line if you do not want chop - # subPhrases e.g. "A light chop in morning." - waterSpout = self.ElementInfo(self._waterSpoutGrid, "List", self.SCALAR()) - elementInfoList = [waterSpout] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "WaterSpout") - return self.DONE() - - def _waterSpout_words(self, tree, node): - "Create phrase for waterSpout" - statDict = node.getStatDict() - stats = self.getStats(statDict, self._waterSpoutGrid) - if stats is None: - return self.setWords(node, "") - waterSpout = self.getValue(stats, "Max", self.SCALAR()) - #print "LLLLLLLLLLLLLLLL", waterSpout - if waterSpout == 1: - value = "No waterspouts" - elif waterSpout == 2: - value = "Very slight chance of waterspouts" - elif waterSpout == 3: - value = "Very slight chance of waterspouts" - elif waterSpout == 4: - value = "Moderate chance of waterspouts" - elif waterSpout == 5: - value = "Very good chance of waterspouts" - else: - value = "!!!WATERSPOUT phrase problem!!!" - return self.setWords(node, value) - -########################################################################## -###### Wx Phrase Methods - def pop_lower_threshold(self, tree, node): - # Pop values below this amount will not be reported - return 0 - - def pop_wx_lower_threshold(self, tree, node): - # Pop-related Wx will not be reported if Pop is below this threshold - return 0 - - def pop_upper_threshold(self, tree, node): - # Pop values above this amount will not be reported - return 100 - -####################################################################### -##### CHOP Phrase Methods - - def chop_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.chop_phrase) - chop = self.ElementInfo("Wind", "Max", self.VECTOR()) - # comment the following line if you do not want chop - # subPhrases e.g. "A light chop in morning." - chop = self.ElementInfo("Wind", "List", self.VECTOR()) - elementInfoList = [chop] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "chop") - return self.DONE() - - def chop_words(self, tree, node): - "Create phrase for chop" - statDict = node.getStatDict() - stats = self.getStats(statDict, "Wind") - if stats is None: - return self.setWords(node, "") - maxWind, dir = self.getValue(stats, "Max", self.VECTOR()) - offshore1, offshore2, onshore1, onshore2 = self.seaBreeze_thresholds(tree, node) - if dir > offshore1 and dir < offshore2: - # For offshore winds - value = "smooth" - else: - if maxWind <= 7: - value = "smooth" - elif maxWind > 7 and maxWind <= 12: - value = "a light chop" - elif maxWind > 12 and maxWind <= 17: - value = "a moderate chop" - elif maxWind > 17 and maxWind <= 22: - value = "choppy" - elif maxWind > 22 and maxWind <= 27: - value = "rough" - elif maxWind > 27 and maxWind <= 32: - value = "very rough" - elif maxWind > 32: - value = "extremely rough" - else: - value = "!!!Chop phrase problem!!!" - return self.setWords(node, value) - -####################################################################### -############################################################################ -######### UVI These are read from a daily tide file with various -######### tide locations and tides. - def _uvi_phrase(self): - return { - "setUpMethod": self._uvi_setUp, - "wordMethod": self._uvi_words, - "phraseMethods": self.standard_phraseMethods() - } - def _uvi_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self._uvi_phrase) - sst = self.ElementInfo("Wind", "Max", self.VECTOR()) - elementInfoList = [sst] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "uvi") - # Get the uvi product - productID = self._statePil + self._uviPil - uviProduct = self.getPreviousProduct(productID, "") - self._uviLines = string.split(uviProduct, "\n") - return self.DONE() - - def _uvi_words(self, tree, node): - # Get Synopsis from previous forecast - # Product has lines such as: - # ALBUQUERQUE NM 7 LITTLE ROCK AR 6 - uviCity = self.getAreaDictEntry(tree, self._surfAreaDict, "uviCity", firstOnly=1) - uviState = self.getAreaDictEntry(tree, self._surfAreaDict, "stateAbbr", firstOnly=1) - if uviCity == []: - msg = self._areaDictErrorMsg(node.getAreaLabel(), "uviCity") - return self.setWords(node, msg) - #print "uvicity, state", uviCity, uviState - uviValue = None - for line in self._uviLines: - tokens = line.split() - # Look for the city - for token in tokens: - if token.upper() == uviCity.upper(): - index = tokens.index(token) - state = tokens[index + 1] - if state == uviState: - # The value is after the next token - uviValue = int(tokens[index + 2]) - break - print "uviValue", uviValue - if uviValue is not None: - uviWords = self._uvi_value(tree, node, uviValue) - else: - uviWords = "" - return self.setWords(node, uviWords) - - def _uvi_value(self, tree, node, uviValue): - if uviValue >= 0 and uviValue <= 2: - uviWords = "Low" - elif uviValue >= 3 and uviValue <= 5: - uviWords = "Moderate" - elif uviValue >= 6 and uviValue <= 7: - uviWords = "High" - elif uviValue >= 8 and uviValue <= 10: - uviWords = "Very High" - else: - uviWords = "Extreme" - return uviWords - -########################################################################## - ### HeatIndex (ScalarPhrases) - - def heatIndex_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.heatIndex_phrase) - elementInfoList = [self.ElementInfo("HeatIndex", "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "HeatIndex") - return self.DONE() - -########################################################################## -####### Tides These are read from a daily tide file with various -####### tide locations and tides. - - def _getTideInfo(self, fcst, editArea, areaLabel, argDict): - # Get a list of tideTables lists from the current edit area combination - labels = self._combinedAreaDict[areaLabel]["tideTables"] - if labels == []: - return fcst - - fcst = fcst + "\nTIDE INFORMATION...\n" - # Get day/month/year - creationTime = time.localtime(argDict["creationTime"]) - currentDate = time.strftime("%m/%d/%Y", creationTime) - #print "currentDate", currentDate, type(currentDate) - for label in labels: - success, tideTable = self._getTideTable(label) - if not success: - # Add error message to fcst - fcst = fcst + tideTable - continue - fcst = fcst + "\nAT " + label.upper() + "...\n\n" - for line in tideTable: - if line.find(currentDate) == 0: - # Get the tide info - # Line format has currentDate then series of low/high tide times: - # 05/02/2005 Mon 07:04AM LDT -0.0 L 02:18PM LDT 0.9 H 06:31PM LDT 0.5 L - # date day, time, timeDescriptor, height, low/high - tokens = line.split() - tideList = [] - index = 2 - while index < len(tokens)-1: - tideList.append(tuple(tokens[index:index+4])) - index += 4 - #print "tideList", tideList - for timeStr, timeDesc, height, lowHigh in tideList: - if lowHigh == "L": - tideType = "Low" - else: - tideType = "High" - hrMin = timeStr[0:5].lstrip("0") - amPm = timeStr[5:] - if self._includeTideHeights: - height = " " + height + " feet" - else: - height = "" - fcst = fcst + tideType+" tide"+height+" at "+hrMin+" "+amPm+ "\n" - - return fcst - - def _getTideTable(self, label): - fn = "" - try: - fn = self._tideFiles[label] - tideTable = open(fn, "r").readlines() - except: - msg = "\nWARNING:Cannot find Tide File for " + label + " " + fn\ - + " \nPlease check the tideFiles set up in the Site Definition file!!!\n" - return 0, msg - return 1, tideTable - -####################################################################### -####### SST Phrase This is mainly bogus framework to add an input line -####### for the SST. - def _sst_phrase(self): - return { - "setUpMethod": self._sst_setUp, - "wordMethod": self._sst_words, - "phraseMethods": self.standard_phraseMethods() - } - def _sst_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self._sst_phrase) - sst = self.ElementInfo("Wind", "Max", self.VECTOR()) - elementInfoList = [sst] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - descriptor = self.phrase_descriptor(tree, node, "sst", "sst") - node.set("descriptor", descriptor) - return self.DONE() - - def _sst_words(self, tree, node): - sstphrase = "|* !!!!!insert water temperature here!!!!!!!!!! *|" - return self.setWords(node,sstphrase) - - def _areaDictErrorMsg(self, entryName, areaLabel): - msg = "WARNING: Cannot find " + entryName + " for areas in " + areaLabel \ - + " Please check the SurfAreaDictionary for this information!!!" - return msg - -###################################################################### -### MarinePhrases TO BE PUT IN BASELINE -- EXCEPT KEEP SET-UP METHODS -###################################################################### -##### Swell methods: compound phrase and summary phrase - - def swell_compoundPhrase(self): - return { - "phraseList": [ - self.swell_summary, - self.simple_swell_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.swell_finishUp - ], - } - def swell2_compoundPhrase(self): - return { - "phraseList": [ - self.swell2_summary, - self.simple_swell2_phrase, - ], - "phraseMethods": [ - self.consolidateSubPhrases, - self.assembleSentences, - self.swell2_finishUp - ], - } - def swell_finishUp(self, tree, node): - return self.get_swell_finishUp(tree, node, "Swell") - - def swell2_finishUp(self, tree, node): - return self.get_swell_finishUp(tree, node, "Swell2") - - def get_swell_finishUp(self, tree, node, elementName): - words = node.get("words") - if words is None: - return - if words == "": - words = "MISSING" - node.set("descriptor", "") - node.set("indentLabel", "Label"+elementName) - node.set("compound", 1) - return self.setWords(node, words) - - ### Swell Summary methods for Swell and Swell2 - def swell_summary(self): - return { - "setUpMethod": self.swell_summary_setUp, - "wordMethod": self.swell_summary_words, - "phraseMethods": self.standard_phraseMethods(), - } - def swell2_summary(self): - return { - "setUpMethod": self.swell_summary_setUp, - "wordMethod": self.swell2_summary_words, - "phraseMethods": self.standard_phraseMethods(), - } - def swell_summary_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, [self.swell_compoundPhrase, - self.swell2_compoundPhrase]) - elementInfoList = [] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - return self.DONE() - - def swell_summary_words(self, tree, node): - # Uses vectorAvg, vectorMedian, vectorMinMax - words = self.vector_summary(tree, node, "Swell") - return self.setWords(node, words) - - def swell2_summary_words(self, tree, node): - # Uses vectorAvg, vectorMedian, vectorMinMax - words = self.vector_summary(tree, node, "Swell2") - return self.setWords(node, words) - - ## Simple Swell Phrases - - def simple_swell_phrase(self): - return { - "setUpMethod": self.simple_swell_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - def simple_swell2_phrase(self): - return { - "setUpMethod": self.simple_swell2_setUp, - "wordMethod": self.vector_words, - "phraseMethods": self.standard_vector_phraseMethods(), - } - - def simple_swell_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.swell_compoundPhrase) - return self.get_swell_setUp(tree, node, "Swell") - - def simple_swell2_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.swell2_compoundPhrase) - return self.get_swell_setUp(tree, node, "Swell2") - - def get_swell_setUp(self, tree, node, element="Swell", connectorMethod=None): - swell = self.ElementInfo(element, "List", self.VECTOR()) - elementInfoList = [swell] - if connectorMethod is None: - connectorMethod = self.vectorConnector - self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) - return self.DONE() - - def vector_summary_valueStr(self, value, elementName): - # Thresholds and corresponding phrases - # Defaults are for Winds converted to mph - words = "" - if elementName in ["Swell", "Swell2"]: - return self.swell_summary_valueStr(value, elementName) - else: - return self.wind_summary_valueStr(value, elementName) - - def swell_summary_valueStr(self, value, elementName): - if value < 10: - words = "" - elif value < 20: - words = "moderate" - else: - words = "large swells" - return words - - def wind_summary_valueStr(self, value, elementName): - if value < 25: - words = "" - elif value < 30: - words = "breezy" - elif value < 40: - words = "windy" - elif value < 50: - words = "very windy" - elif value < 74: - words = "strong winds" - else: - words = "hurricane force winds" - return words - -######################################################################## -####### Period Phrases - - def period_phrase(self): - return { - "setUpMethod": self.period_setUp, - "wordMethod": self.period_words, - "phraseMethods": self.standard_phraseMethods() - } - def period_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.period_phrase) - return self.get_period_setUp(tree, node, "Period") - - def period2_phrase(self): - return { - "setUpMethod": self.period2_setUp, - "wordMethod": self.period_words, - "phraseMethods": self.standard_phraseMethods() - } - def period2_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.period2_phrase) - return self.get_period_setUp(tree, node, "Period2") - - def get_period_setUp(self, tree, node, element): - elementInfoList = [self.ElementInfo(element, "List")] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", element) - return self.DONE() - - def period_words(self, tree, node): - element = node.getAncestor("elementName") - statDict = node.getStatDict() - stats = self.getStats(statDict, element) - if stats is None: - return self.setWords(node, "") - periodValue = int(self.getValue(stats)) - outUnits = self.element_outUnits(tree, node, element, element) - units = self.units_descriptor(tree, node, "units", outUnits) - unit = self.units_descriptor(tree, node, "unit", outUnits) - if periodValue == 1: - units = unit - return self.setWords(node, `periodValue` + " " + units) - -######################################################################## -####### Rip Phrase - - def rip_phrase(self): - return { - "setUpMethod": self.rip_setUp, - "wordMethod": self.rip_words, - "phraseMethods": self.standard_phraseMethods() - } - def rip_setUp(self, tree, node): - self._setSurfAreaLabel(tree, node, self.rip_phrase) - if self._ripGrid != "": - rip = self.ElementInfo(self._ripGrid, "Max") - else: - rip = self.ElementInfo("Wind", "Max", self.VECTOR()) - elementInfoList = [rip] - self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) - node.set("descriptor", "") - node.set("indentLabel", "rip") - return self.DONE() - - def rip_words(self, tree, phrase): - timeRange = phrase.getTimeRange() - areaLabel = phrase.getAreaLabel() - if self._ripGrid != "": - stats = tree.stats.get(self._ripGrid, timeRange, areaLabel, mergeMethod="Max") - if stats is None: - return self.setWords(phrase, "") - else: - return self.setWords(phrase, self.ripGrid_value(stats)) - else: - maxWave = tree.stats.get(self._surfGrid, timeRange, areaLabel, mergeMethod="Max") - windStats = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Max") - - ###This will provide average winds - #windStats = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Avg") - #print "avg", windStats - - if maxWave is None or windStats is None: - return self.setWords(phrase, "") - - maxWind, dir = windStats - words = self.rip_value(maxWave, maxWind, dir) - if len(words) > 0: - words = words - return self.setWords(phrase, words) - - def ripPhrases(self): - return { - "lowPhrase" : "Low...however...strong currents can occur near piers and jetties. Always have a flotation device with you in the water", - "modPhrase" : "Moderate. A moderate risk of rip currents means wind and or wave conditions support stronger or more frequent rip currents. Always have a flotation device with you in the water", - "highPhrase" : "High. High surf and large swells will produce dangerous pounding surf and rip currents at the beaches. People visiting the beaches should stay out of the high surf", - } - - def ripGrid_value(self, value): - phrase = "" - ripPhrases = self.ripPhrases() - if value == 1: - words = ripPhrases["lowPhrase"] - elif value == 2: - words = ripPhrases["modPhrase"] - elif value == 3: - words = ripPhrases["highPhrase"] - else: - words = "RIP phrase problem!!!" - return words - - def rip_value(self, maxWave, maxWind, dir): - words = "" - ripPhrases = self.ripPhrases() - if dir > 150: - if maxWind < 15: - words = ripPhrases["lowPhrase"] - if maxWind >= 15 and maxWave >= 3: - words = ripPhrases["modPhrase"] - elif maxWind >= 15 and maxWave < 3: - words = ripPhrases["lowPhrase"] - if maxWind >= 20 and maxWave >= 6: - words = ripPhrases["highPhrase"] - - elif maxWind >= 20 and maxWave < 6: - words = modPhrase - - else: - if maxWave < 6: - words = ripPhrases["lowPhrase"] - if maxWave >= 6: - words = ripPhrases["modPhrase"] - if maxWave > 8: - words = ripPhrases["highPhrase"] - return words - - def allowedHazards(self): - allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] - return [ - ('SU.W', allActions, 'Marine'), # HIGH SURF WARNING - ('SU.Y', allActions, 'Marine'), # HIGH SURF ADVISORY - ('RP.S', allActions, 'Rip'), # High Rip Threat - ('BH.S', allActions, 'BeachHaz'), # Beach Hazards Statement - ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING - ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY - ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH - ] - +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +## +# This is a base file that is not intended to be overridden. +## + +#------------------------------------------------------------------------- +# Description: This produces a Surf Zone Forecast. +#------------------------------------------------------------------------- +# Copying: +# This software is in the public domain, furnished "as is", without technical +# support, and with no warranty, express or implied, as to its usefulness for +# any purpose. +#------------------------------------------------------------------------- +# Standard and Local file names and Locations: +# SRF, SRF_TBW_Definition, SRF_TBW_Overrides +#------------------------------------------------------------------------- +# Customization Points: +# +# DEFINITION SECTION +# +# Required Configuration Items: +# +# displayName If not None, defines how product appears in GFE GUI +# +# areaDictionary Create a SurfAreaDictionary with entries as described below. +# The SurfAreaDictionary appears in the TextUtilities window +# of the DefineTextProducts dialog. You can use it as a +# starting point to create your own. +# +# The SRF formatter is unique in that the various phrases are based on +# different edit areas. The SurfAreaDictionary specifies which areas +# to use for each type of phrase. +# +# For example, the sky, weather, and wind information +# typically comes from a coastal land/sea edit area which you +# will need to create while the swell information comes from a +# coastal waters area. Meanwhile, each segment of the product is +# based on a public zone. +# +# The "additionalAreasDict", which can be overridden, lists +# the weather elements, analysis, and phrases for each type of edit area. +# +# The SurfAreaDictionary has an entry for each public zone +# for which you want a SRF product segment. +# You can copy the information from the AreaDictionary as a starting point. +# Then add the following information for each zone: +# +# "landSeaArea": An edit area you need to create which contains grid +# points along the coast, including both land and sea. +# "marineArea": Typically, the coastal waters area. +# "surfAreas": The surfAreas entry is an optional list of edit areas and labels +# for which to create a surf (waveHeight) phrase. +# For example, If you have: +# +# surfAreas: [ +# ("WestCoast", "Surf along west facing reefs.............."), +# ("NorthCoast", "Surf along north facing reefs............."), +# ("EastCoast", "Surf along east facing reefs.............."), +# ("SouthCoast", "Surf along south facing reefs............."), +# ] +# +# You would get a surfHeight report for each surfArea listed: +# +# Surf along west facing reefs................10 to 12 feet. +# Surf along north facing reefs...............4 to 6 feet. +# Surf along east facing reefs................2 to 3 feet. +# Surf along south facing reefs...............4 to 6 feet. +# +# If the list is empty, you will simply get surfHeight reported +# for the current value of the WaveHeight Grid sampled from the +# landSea edit area: +# +# Surf................1 TO 2 feet. +# +# "uviCity": The UVI index is take from the previous UVI product (pil is UVI). +# This specifies the city name from which to report the uvi index. +# "tideTables": A list of the tide tables from which you want tides reported +# for each public zone. +# Tide information is taken from files that you must set-up locally. +# Make a file for each tide table that you want reported. +# You can include a year's worth of tide information and update the files once +# a year. +# Then list the tide tables and corresponding files in the "tideFiles" +# Definition entry (see below). +# Tide tables should be in the format found at the website: +# http://co-ops.nos.noaa.gov/tides05/ +# +# defaultEditAreas defines edit areas, default is Combinations. +# Note that zones can be combined for the SRF product. +# If so, the corresponding landSeaAreas, marineAreas +# will be combined, and the surfAreas and tideTables +# handled correctly. +# Note that if you always have the same combinations, +# you need only list this additional information for +# one of the zones in each combination. +# productName defines name of product e.g. "Zone Forecast Product" +# fullStationID Full station identifier, 4 letter, such as "KSLC". +# wmoID WMO ID code for product header, such as "FOUS45" +# pil Product pil, such as "SFTBOS" +# areaName (opt.) Area name for product header, such as "Western New York" +# wfoCityState WFO location, such as "Buffalo NY" +# +# Optional Configuration Items +# mapNameForCombinations Name of the map background that is used for +# creating/editing the combinations file. This must +# be defined for the GFE zone combiner +# database Source database for product. Can be "Official", +# "Fcst" or "ISC" +# outputFile Defines the output location of the finished product +# when saved from the Formatter Launcher. +# debug If on, debug_print statements will appear. +# textdbPil Defines the awips product identifier +# (e.g., DENCCFDEN) that is used to store the product +# in the AWIPS text database. +# This value is also used for the default GUI entry for +# storage. +# awipsWANPil Defines the awips product identifier +# (e.g., KBOUCCFDEN) that is used to transmit the +# product to the AWIPS WAN. +# This value is also used for the default GUI +# entry for storage. +# periodCombining If 1, an attempt will be made to combine components +# or time periods into one. Otherwise no period +# combining will will be done. +# individualExtended If individualExtended == 1, an extended forecast will be +# generated for each individual area +# extendedLabel If extendedLabel== 1, a label will be included for each +# individual extended +# tempLocalEffects Set to 1 to after defining edit areas Inland +# and Coastal to enable local effects for temperature +# windLocalEffects Set to 1 after defining edit areas Inland and Coastal +# to enable local effects for wind. +# If you change these edit area names, +# make sure that the names in the "getIntersectAreas" +# and "valleyRidgeAreaNames" are set to the new names. +# surfGrid Name of grid to use for the waveHeight_phrase. Default is SurfHeight, +# but can be set to WaveHeight for sites not creating a SurfHeight grid. +# ripGrid If set to the name of a rip current grid, this will be used +# for the rip_phrase instead of using Wind Grid and SurfHeight/WaveHeight +# Grid values to calculate a rip_phrase value. +# waterSpoutGrid If you want a waterSpout phrase, set this to your Water Spout grid +# name. Also, make sure to add this to the analysisList for +# "marineArea" in the "_additionalAreasDict". +# includeOutlook If 1, OUTLOOK section will be included. The OUTLOOK section is +# is a hand-editied narrative outlook. +# tideFiles Make a file for each tide table that you want reported. +# Tide tables should be in the format given at the website: +# http://co-ops.nos.noaa.gov/tides05/ +# includeTideHeights If 1, tide heights will be included in the tide output. +# extraAreaPrefix Prefix for extra temporary edit areas that must be created to run +# this product e.g. combinations of landSeaAreas +# surfDebug If 1, produces information showing the extra edit areas created +# and which areas are used for which phrases. +# +# Other Important Configuration Items: +# +# --To Add or Remove phrases from the SRF, override the getSurfPeriod_phraseList method +# +# --To Change the analysisList OR to specify which areas to use for which phrases, +# override the _additionalAreasDict. +# +# --If you want to get the previous synopsis, and _uvi statement, +# set Definition section "statePil" +# +# --Override "seaBreeze_thresholds" (ConfigVariables) +# to set up offshore wind values for the chop_phrase +# +#------------------------------------------------------------------------- +# Weather Elements Needed: +# Hazards (optional): If provided, headlines will be generated. +# Sky, Wind (6 hourly), PoP, MaxT, MinT, Sky, Wind, T, HeatIndex, LAL +# Wx, WaveHeight OR SurfHeight, Swell, Period, Swell2, Period2 +# Optional: +# rip current grid (scalar values 1-3 for LOW, MODERATE, HIGH) +# water spout grid (scalar values 1-5) +# +#------------------------------------------------------------------------- +# OTHER INPUTS +# Tides from file, UVI from Awips +#------------------------------------------------------------------------- +# Component Products: +# SurfPeriod (component) +# ExtendedLabel(component) +# SurfExtended (component) +#------------------------------------------------------------------------- +# Programmers and Support including product team leader's email: +# Product Team POC: Charlie Paxton +# FSL POC: Tracy Hansen +#------------------------------------------------------------------------- +# Development tasks that are identified and in progress: +# To look up tasks and their status, see the Text Product User Guide +# Section on "Tkgnats: Task Reporting System". +#------------------------------------------------------------------------- +# Example Output: +# Refer to the NWS Directives for Marine Services. +#------------------------------------------------------------------------- +# Implementation Notes +# +# The _sampleData method must first call _getCombinedAdditionalAreas +# to create combinations of additional edit areas based on the +# combinations the user selected for the public zones for which the +# segments of the product are reported. +# These combinations are stored in the _combinedAreaDict: +# {combo: {areaType: areaLabel} +# Then _sampleData calls ForecastNarrative.getNarrativeData to interpret +# the narrative definition, the components of which can now use +# the _combinedAreaDict in creating their "additionalAreas", +# "additionalAreasAnalysisList", intersectAreas", intersectWithAreas", +# and "intersectAnalysisList" entries. +# Finally, each phrase setUp method will call _setSurfAreaLabel to look +# up and set the areaLabel for the phrase using the configurable +# _additionaAreasDict: +# {areaType: {analysisList: [], phraseList: []}} +# and the _combinedAreaDict. +# +#------------------------------------------------------------------------- + +import TextRules +import SampleAnalysis +import ForecastNarrative +import time, string, types, os, re, copy + +class TextProduct(TextRules.TextRules, SampleAnalysis.SampleAnalysis): + +## VariableList = [ +## (("Issuance Type", "issuanceType") , "ROUTINE", "radio", +## ["ROUTINE", "UPDATE", "CORRECTION"]), +## ] + + Definition = { + "type": "smart", + "displayName": "None", + "statePil": "MIA", + "outputFile": "{prddir}/TEXT/SRF_.txt", + +########################################################################## +##### Edit Areas: Create Combinations file with edit area combinations. + # Name of map background for creating Combinations + "mapNameForCombinations": "Zones_TBW", + "showZoneCombiner" : 1, # 1 to cause zone combiner to display + "defaultEditAreas" : "Combinations_SRF__", + +########################################################################## +##### Product identifiers + # Source database for product. Can be "Official", "Fcst" or "ISC" + "database": "Official", + "debug": 0, + "productName": "Surfzone Forecast", # product name + "fullStationID": "", # full station identifier (4letter) + "wmoID": "", # WMO ID + "pil": "", # Product pil + "areaName": "", # Name of state, such as "Georgia" -- optional + "wfoCityState": "", # Location of WFO - city and state name + + "textdbPil": "", # Product ID for storing to AWIPS text database. + "awipsWANPil":"" , # Product ID for transmitting to AWIPS WAN. + "srfPil": "SRFTBW", + "uviPil": "UVITBW", + + "periodCombining" : 1, # If 1, combine periods, if possible + "tempLocalEffects": 0, # Set to 1 to enable Temp local effects AFTER + # creating Inland and Coastal edit areas + "windLocalEffects": 0, # Set to 1 to enable wind local effects AFTER + # creating Inland and Coastal edit areas + #"surfGrid": "SurfHeight", # Use grid for waveHeight_phrase + "surfGrid": "WaveHeight", # Use grid for waveHeight_phrase + "ripGrid": "", # Use grid for rip_phrase + "waterSpoutGrid": "", # Use grid for waterSpout_phrase + "includeOutlook": 0, # If 1, OUTLOOK section included + "outLookText": "\n.OUTLOOK...",# Text for OUTLOOK section + "tideFiles": { # For each tide table, list the file where it can + # be found + "Venice Inlet": "/data/local/localapps/tides/VeniceInlet.txt", + "Saint Petersburg": "/data/local/localapps/tides/SaintPetersburg.txt", + "Fort Myers": "/data/local/localapps/tides/FortMyers.txt", + "Cedar Key": "/data/local/localapps/tides/CedarKey.txt", + }, + "includeTideHeights": 0, + "extraAreaPrefix": "__ExtraSurfArea", + "surfDebug": 1, + + #"purgeTime": 15, + +########################################################################## +##### Product variables + # To include a MultipleElementTable (e.g. Temp Pop Table) + # for each area in the current Combination: + # Set "includeMultipleElement" to 1 + # Set the "elementList" and "singleValueFormat" flag if desired + # "elementList" may include "Temp", "PoP", and/or "Humidity" + # "singleValueFormat" lists only one value per element + # Make sure you are using a Combinations file + # Modify the CityDictionary TextUtility file or create a new one + "includeMultipleElementTable": 0, + "includeMultipleElementTable_perPeriod": 0, + "elementList" : ["Temp", "Humidity", "PoP"], + "singleValueFormat": 0, + "cityDictionary": "CityDictionary", + "areaDictionary": "SurfAreaDictionary", + "language": "english", + "synopsisUGC": "", # UGC code for synopsis + "synopsisHeading": ".SYNOPSIS...",# Heading for synopsis + # If individualExtended == 1, an extended forecast will be + # generated for each individual area + # If extendedLabel == 1, a label will be included for each + # individual extended + "individualExtended": 1, + "extendedLabel": 0, + "useHolidays": 1, # Set to 1 to use holidays in the time period labels + "sampleFromServer": 0, # If 1, sample directly from server + # Trouble-shooting items + # Trouble-shooting items + "passLimit": 20, # Limit on passes allowed through + # Narrative Tree + "trace": 0, # Set to 1 to turn on trace through + "lineLength": 70, # Narrative Tree for trouble-shooting + } + + def __init__(self): + TextRules.TextRules.__init__(self) + SampleAnalysis.SampleAnalysis.__init__(self) + +############################################################################ +##### OVERRIDING THRESHOLDS AND VARIABLES + + def minMax_std_deviation(self, parmHisto, timeRange, componentName): + # Replaces MINMAX_STD_DEVIATION + # Number of standard deviations to compute around the weighted + # average for a stdDev_MinMax + return 1.4 + + def element_outUnits_dict(self, tree, node): + dict = TextRules.TextRules.element_outUnits_dict(self, tree, node) + dict["Wind"] = "mph" + dict["WindGust"] = "mph" + return dict + + def phrase_descriptor_dict(self, tree, node): + # Dictionary of descriptors for various weather elements in phrases + # The value for an element may be a phrase or a method + # If a method, it will be called with arguments: + # tree, node, key, element + return { + "WaveHeight" : "Surf................", + "Swell": "swell", + "Swell2": "swell", + "LabelSwell": "Swell...............", + "LabelSwell2": "Secondary swell.....", + "Period": "Period..............", + "Period2":"Secondary period....", + "chop" : "Water condition.....", + "rip" : "Rip current risk....", + "HeatIndex": "Heat index..........", + "20-foot winds......." : "Beach winds.........", + "MaxT_FireWx":"Max temperature.....", + "Sky/weather.........": "Sky/weather.........", + "sst" : "Water temperature...", + "uvi" : "UVI index...........", + "LAL" : "Lightning threat....", + "WaterSpout" : "Waterspout threat...", + "PoP" : "Chance of...........", + "MinT":"lows", + "MaxT":"highs", + "Wind": "winds", + # Used for Headlines + "EXPECTED" : "expected", + "IN EFFECT" : "in effect", + # Used for single values + "around": "around ", + " Valleys/lwr slopes...": " Inland...............", + " Ridges/upr slopes....": " Coastal..............", + } + +############################################################################ +###### COMPONENT PRODUCT DEFINITIONS +###### +############################################################################ +##### SURF PERIOD AND ISSUANCE LISTS + + def _getSurfPeriod_phraseList(self): + # Override this to add or remove phrases from the SRF product. + phraseList = [ + self.skyWeather_byTimeRange_compoundPhrase, + self.popMax_phrase, + (self.dayOrNight_phrase, ["MaxT", "MinT", 1, 1], self._tempLocalEffects_list()), + self.severeWeather_phrase, + self.fireWind_compoundPhrase, + self.fireWind_label_phrase, + self.fireValleyWind_compoundPhrase, + self.fireRidgeWind_compoundPhrase, + self.waveHeight_phrase, + self.swell_compoundPhrase, + self.period_phrase, + self.swell2_compoundPhrase, + self.period2_phrase, + self.chop_phrase, + self._sst_phrase, + self._uvi_phrase, + self.rip_phrase, + self.heatIndex_phrase, + self._lal_phrase, + ] + if self._waterSpoutGrid != "": + phraseList.append(self._waterSpout_phrase) + # Add multipleElementTable + if self._includeMultipleElementTable_perPeriod: + phraseList.append(self.multipleElementTable_perPeriod_phrase) + return phraseList + + def _additionalAreasDict(self, component): + # This dictionary is used to build the SurfPeriod. + # Override this to + # --specify which areas to use for which phrases, + # --change the analysisLists + # {areaType: {analysisList: [], phraseList: []}} + if component == "SurfPeriod": + return self._surfPeriodAdditionalAreasDict() + elif component == "SurfExtended": + return self._surfExtendedAdditionalAreasDict() + elif component == "ExtraSampling": + return self._extraSamplingAdditionalAreasDict() + + def _surfPeriodAdditionalAreasDict(self): + return { + "landSeaArea": { + "analysisList": [ + ("Sky", self.minMax, [0]), + ("PoP", self._PoP_analysisMethod("SurfPeriod")), + ("Wx", self.rankedWx), + ("WindGust", self.moderatedMax, [3]), + ("WaveHeight", self.maximum, [6]), + ("SurfHeight", self.maximum, [6]), + ("WindWaveHgt", self.maximum, [6]), + ("Wind", self.vectorAvg, [6]), + ("MaxT", self.minMax), + ("MinT", self.minMax), + ("HeatIndex", self.minMax), + ("T", self.minMax), + ("LAL", self.minMax), + ], + "phraseList": [ + self.skyWeather_byTimeRange_compoundPhrase, + self.popMax_phrase, + self.dayOrNight_phrase, + self.severeWeather_phrase, + self.fireWind_compoundPhrase, + self.fireWind_label_phrase, + self.fireValleyWind_compoundPhrase, + self.fireRidgeWind_compoundPhrase, + self.waveHeight_phrase, + self.chop_phrase, + self._sst_phrase, + self._uvi_phrase, + self.rip_phrase, + self.heatIndex_phrase, + self._lal_phrase, + ], + }, + "marineArea": { + "analysisList": [ + ("Swell", self.vectorModeratedMinMax, [6]), + ("Swell2", self.vectorModeratedMinMax, [6]), + ("Period", self.moderatedMinMax, [6]), + ("Period2", self.moderatedMinMax, [6]), + ], + "phraseList": [ + self.swell_compoundPhrase, + self.period_phrase, + self.swell2_compoundPhrase, + self.period2_phrase, + self._waterSpout_phrase, + ], + }, + } + + def _surfExtendedAdditionalAreasDict(self): + return { + "landSeaArea": { + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ("T", self.hourlyTemp), + ("Sky", self.avg, [12]), + ("Wind", self.vectorAvg, [12]), + ("Wx", self.rankedWx, [12]), + ("PoP", self._PoP_analysisMethod("SurfExtended")), + ], + "phraseList": [ + self.wind_summary, + self.sky_phrase, + self.weather_phrase, + self.reportTrends, + self.lows_phrase, + self.highs_phrase, + self.wind_phrase, + ], + }, + } + + def _extraSamplingAdditionalAreasDict(self): + return { + "landSeaArea": { + "analysisList": [ + ("MinT", self.firstAvg), + ("MaxT", self.avg), + ], + "phraseList": [], + }, + } + + def _PoP_analysisMethod(self, componentName): + # Alternative PoP analysis methods for consistency between PoP and Wx + return self.stdDevMaxAvg + #return self.maximum + # Use "mode" if you have non-continuous PoP values + #return self.mode + + def SurfPeriod(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assembleIndentedPhrases, + ], + "analysisList": [], # Public area is used only for header + "phraseList": self._getSurfPeriod_phraseList(), + "additionalAreas": self._getSurfPeriod_additionalAreas(), + "additionalAnalysisList": self._getSurfPeriod_additionalAnalysisList(), + "intersectAreas": self._getSurfPeriod_intersectAreas(), + "intersectWithAreas": self._getSurfPeriod_intersectWithAreas(), + "intersectAnalysisList": self._getSurfPeriod_intersectAnalysisList(), + } + + def _getSurfPeriod_additionalAreas(self): + # Return a list of (element, list of areas) + # representing the areas in addition to the public zone + # to be analyzed and added to the StatisticsDictionary + # for the generation of phrases. + additionalAreas = self._getAdditionalAreas("SurfPeriod") + # Add in the surfAreas + surfList = [] + for combo in list(self._combinedAreaDict.keys()): + surfList += self._combinedAreaDict[combo]["surfAreas"] + areas = [] + for surfArea, label in surfList: + areas.append(surfArea) + additionalAreas += [(self._surfGrid, areas)] + #print "\nadditionalAreas", additionalAreas + return additionalAreas + + def _getAdditionalAreas(self, component): + # List (element, list of areas) + additionalAreas = [] + areaDict = self._additionalAreasDict(component) + # Create dictionary of {element: [areaTypes]} + # e.g. {"MaxT": ['landSeaArea']} + elementDict = {} + for areaType in list(areaDict.keys()): + for entry in areaDict[areaType]["analysisList"]: + element = entry[0] + if element in elementDict: + elementDict[element].append(areaType) + else: + elementDict[element] = [areaType] + # For each element, get the areas listed in the SurfAreaDictionary + # for the given areaTypes and then make an additionalAreas entry + # e.g. ("MaxT", ["SRF_586"]) + for element in list(elementDict.keys()): + areaTypes = elementDict[element] + addAreaList = [] + for areaType in areaTypes: + for combo in list(self._combinedAreaDict.keys()): + addAreaList += [self._combinedAreaDict[combo][areaType]] + additionalAreas.append((element, addAreaList)) + #print "\nadditionalAreas", additionalAreas + return additionalAreas + + def _getSurfPeriod_additionalAnalysisList(self): + # List all possible analysis here + analysisList = self._getAdditionalAnalysisList("SurfPeriod") + # Add in extra elements + if self._ripGrid != "": + analysisList.append((self._ripGrid, self.maximum)) + if self._waterSpoutGrid != "": + analysisList.append((self._waterSpoutGrid, self.maximum)) + return analysisList + + def _getAdditionalAnalysisList(self, component): + # Return the concatenation of analysisLists from the + # additionalAreasDict. + # NOTE: This is not the most efficient implementation + # since all additional areas will be sampled and analyzed + # for all weather elements in the additionalAreasDict. + analysisList = [] + areaDict = self._additionalAreasDict(component) + for key in list(areaDict.keys()): + analysisList += areaDict[key]["analysisList"] + return analysisList + + def _getSurfPeriod_intersectAreas(self): + # This is for local effects. + tempList = [] + windList = [] + if self._tempLocalEffects == 1: + tempList = [ + ("MinT", ["Inland", "Coastal"]), + ("MaxT", ["Inland", "Coastal"]), + ] + if self._windLocalEffects == 1: + windList = [("Wind", ["Inland", "Coastal"])] + return tempList + windList + + def _getSurfPeriod_intersectWithAreas(self): + # Return all the landSeaArea combinations from the _combinedAreaDict + intersectWithAreas = [] + for key in list(self._combinedAreaDict.keys()): + if 'landSeaArea' in self._combinedAreaDict[key]: + intersectWithAreas.append( + self._combinedAreaDict[key]['landSeaArea']) + #print "returning INTERSECT WITH AREAS", intersectWithAreas + return intersectWithAreas + + def _getSurfPeriod_intersectAnalysisList(self): + analysisList = [ + ("WindGust", self.moderatedMax, [3]), + ("Wind", self.vectorAvg, [6]), + ("MaxT", self.minMax), + ("MinT", self.minMax), + ("T", self.minMax), + ] + return analysisList + + def _tempLocalEffects_list(self): + leArea1 = self.LocalEffectArea("Inland", "") + leArea2 = self.LocalEffectArea("Coastal", " near the coast") + return [self.LocalEffect([leArea1, leArea2], 8, ", except ")] + + def ExtendedLabel(self): + return { + "type": "component", + "methodList": [self.setLabel], + "analysisList": [], + "phraseList":[], + } + def setLabel(self, tree, component): + component.set("words", "\n.EXTENDED...\n") + return self.DONE() + + def SurfExtended(self): + return { + "type": "component", + "methodList": [ + self.consolidateSubPhrases, + self.assemblePhrases, + self.wordWrap, + ], + "analysisList": [], + "phraseList":[ + self.wind_summary, + self.sky_phrase, + self.weather_phrase, + self.reportTrends, + self.lows_phrase, + self.highs_phrase, + self.wind_phrase, + ], + "additionalAreas": self._getAdditionalAreas("SurfExtended"), + "additionalAnalysisList": self._getAdditionalAnalysisList("SurfExtended"), + } + + def ExtraSampling(self): + return { + "type": "component", + "methodList": [self.noWords], + "analysisList": [ + ("MaxT", self.avg, [0]), + ("MinT", self.firstAvg, [0]), + ], + "additionalAreas": self._getAdditionalAreas("ExtraSampling"), + "additionalAnalysisList": self._getAdditionalAnalysisList("ExtraSampling"), + } + + def ExtendedNarrative(self): + return { + "type": "narrative", + "methodList": [self.assembleChildWords], + ## Components + "narrativeDef": [ + ("SurfExtended",12),("SurfExtended",24), + ("SurfExtended",24), ("SurfExtended",24), + ], + } + + def removeEmptyPhrase(self, tree, node): + # If an indented phrase is empty, do not include the entry for it + if node.getAncestor('name') in ["fireWind_label_phrase"]: + return 0 + return 1 + + def _setSurfAreaLabel(self, tree, node, phrase): + # Look up the area label for the phrase and set the node's label + # This is to handle landSeaArea and marineArea phrases + # Use the additionalAreasDict: {areaType: {analysisList: [], phraseList: []}} + # and the combinedAreaDict {combo: {areaType: areaLabel} + # + # First, find the phrase in the additionalAreasDict and determine the + # areaType that we should use for this phrase. + # E.g. is this a "landSeaArea" or "marineArea" phrase? + areasDict = self._additionalAreasDict(node.getComponentName()) + useAreaType = None + for areaType in list(areasDict.keys()): + phraseList = areasDict[areaType]["phraseList"] + if type(phrase) is list: + for p in phrase: + if p in phraseList: + useAreaType = areaType + break + else: + if phrase in phraseList: + useAreaType = areaType + break + if useAreaType is None: + print(("Warning!! Trying to set areaLabel for ", phrase)) + print(" Entry not found in _additionalAreasDict") + # Leave areaLabel alone and use current land zone + return + # Now, set the areaLabel for the node to the area for the areaType + try: + areaLabel = self._combinedAreaDict[node.getAreaLabel()][useAreaType] + if self._surfDebug: + print(("Setting label", useAreaType, areaLabel, \ + node.getComponentName(), node.getAncestor("name"))) + node.set("areaLabel", areaLabel) + except: + if self._surfDebug: + print(("Leaving area alone", node.getAreaLabel(), node.getComponentName(),\ + node.getAncestor("name"))) + +######################################################################################### +### Generate Product + + def generateForecast(self, argDict): + # Generate Text Phrases for a list of edit areas + + # Get variables + error = self._getVariables(argDict) + if error is not None: + return error + + # Get the areaList -- derived from defaultEditAreas and + # may be solicited at run-time from user if desired + self._areaList = self.getAreaList(argDict) + if len(self._areaList) == 0: + return "WARNING -- No Edit Areas Specified to Generate Product." + + # Determine time ranges + error = self._determineTimeRanges(argDict) + if error is not None: + return error + + # Sample the data + error = self._sampleData(argDict) + if error is not None: + return error + + # Initialize the output string + fcst = "" + fcst = self._preProcessProduct(fcst, argDict) + + # Generate the product for each edit area in the list + fraction = 0 + fractionOne = 1.0/float(len(self._areaList)) + percent = 50.0 + self.setProgressPercentage(percent) + for editArea, areaLabel in self._areaList: + self.progressMessage(fraction, percent, "Making Product for " + areaLabel) + fcst = self._preProcessArea(fcst, editArea, areaLabel, argDict) + fcst = self._makeProduct(fcst, editArea, areaLabel, argDict) + fcst = self._postProcessArea(fcst, editArea, areaLabel, argDict) + fraction = fractionOne + fcst = self._postProcessProduct(fcst, argDict) + + return fcst + + def _getVariables(self, argDict): + # Make argDict accessible + self.__argDict = argDict + + # Get Definition variables + self._definition = argDict["forecastDef"] + for key in list(self._definition.keys()): + exec("self._" + key + "= self._definition[key]") + + # Get VariableList and _issuance_list variables + varDict = argDict["varDict"] + for key in list(varDict.keys()): + if type(key) is tuple: + label, variable = key + exec("self._" + variable + "= varDict[key]") + + self._language = argDict["language"] + return None + + def _determineTimeRanges(self, argDict): + # Set up the Narrative Definition and initial Time Range + self._issuanceInfo = self.getIssuanceInfo( + self._productIssuance, self._issuance_list(argDict)) + self._timeRange = self._issuanceInfo.timeRange() + argDict["productTimeRange"] = self._timeRange + self._expireTime = self._issuanceInfo.expireTime() + self._issueTime = self._issuanceInfo.issueTime() + self._definition["narrativeDef"] = self._issuanceInfo.narrativeDef() + #expireTime = time.time() + self._purgeTime*3600 + #self._expireTime = expireTime + #self._ddhhmmTimeExpire = time.strftime("%d%H%M", + # time.gmtime(expireTime)) + + if self._periodCombining: + self._definition["methodList"] = \ + [self.combineComponentStats, self.assembleChildWords] + else: + self._definition["methodList"] = [self.assembleChildWords] + self._definition["priorPeriod"] = 24 + + # Calculate current times + self._ddhhmmTime = self.getCurrentTime( + argDict, "%d%H%M", shiftToLocal=0, stripLeading=0) + self._timeLabel = self.getCurrentTime( + argDict, "600 AM %Z %a %b %e %Y", stripLeading=1) + return None + + def _sampleData(self, argDict): + # Sample and analyze the data for the narrative + components = ["SurfPeriod", "SurfExtended"] + self._combinedAreaDict = self._getCombinedAdditionalAreas(argDict, components) + self._narrativeProcessor = ForecastNarrative.ForecastNarrative() + error = self._narrativeProcessor.getNarrativeData( + argDict, self._definition, self._timeRange, self._areaList, self._issuanceInfo) + if error is not None: + return error + return None + + def _preProcessProduct(self, fcst, argDict,): + # Product header + if self._areaName != "": + productName = self._productName.strip() + " for " + \ + self._areaName.strip() + else: + productName = self._productName.strip() + + issuedByString = self.getIssuedByString() + productName = self.checkTestMode(argDict, productName) + + s = self._wmoID + " " + self._fullStationID + " " + \ + self._ddhhmmTime + "\n" + self._pil + "\n\n" + fcst = fcst + s.upper() + + s = productName + "\n" +\ + "National Weather Service " + self._wfoCityState + \ + "\n" + issuedByString + self._timeLabel + "\n\n" + fcst = fcst + s + + # Try to get Synopsis from previous SRF + srfPil = self._statePil + self._srfPil + synopsis = self.getPreviousProduct(srfPil, "SYNOPSIS") + discussion = self._synopsisHeading + synopsis + "\n$$\n\n" + fcst = fcst + discussion + return fcst + + def _preProcessArea(self, fcst, editArea, areaLabel, argDict): + # This is the header for an edit area combination + areaHeader = self.makeAreaHeader( + argDict, areaLabel, self._issueTime, self._expireTime, + self._areaDictionary, self._defaultEditAreas, + cityDescriptor = "Including the beaches of") + + fcst = fcst + areaHeader + # get the hazards text + self._hazards = argDict['hazards'] + self._combinations = argDict["combinations"] + + headlines = self.generateProduct("Hazards", argDict, area = editArea, + areaLabel=areaLabel, + timeRange = self._timeRange) + fcst = fcst + headlines + return fcst + + def _makeProduct(self, fcst, editArea, areaLabel, argDict): + argDict["language"] = self._language + fcst = fcst + self._narrativeProcessor.generateForecast( + argDict, editArea, areaLabel) + if self._includeMultipleElementTable == 1: + fcst = fcst + self.makeMultipleElementTable(areaLabel, self._timeRange, argDict) + return fcst + + def _postProcessArea(self, fcst, editArea, areaLabel, argDict): + if self._includeOutlook: + fcst = fcst + self._outLookText + "\n" + # Add Tide information + fcst = self._getTideInfo(fcst, editArea, areaLabel, argDict) + fcst = fcst + "\n$$\n" + return fcst + + def _postProcessProduct(self, fcst, argDict): + self.setProgressPercentage(100) + self.progressMessage(0, 100, self._displayName + " Complete") + # Clean out extra areas that had to be saved + try: + if self._savedAreas: + self.cleanOutEditAreas(self._extraAreaPrefix) + except: + pass + return fcst + + ######################################################################## + # PRODUCT-SPECIFIC METHODS + ######################################################################## + def _getCombinedAdditionalAreas(self, argDict, components): + # Called before sampling data to create appropriate combinations + # for the additional areas based on the user's combinations + # for the public zones. + # + # Create a dictionary with an entry for each areaList combination + # to let us know the combined landSea, marine, surf and tide areas + # e.g. {"Combo0": { + # "landSeaArea": "Extra0", + # "marineArea": "Extra1", + # "surfAreas": [ + # ('NorthCoast', 'Surf along north facing reefs.............'), + # ('SouthCoast', 'Surf along south facing reefs.............') + # ], + # "tideTables": ["Cedar Key", "Venice Inlet"], + # }, + # } + # For example, if GUZ001 has landSeaArea of SRF_001 + # and it is combined with GUZ002 with landSeaArea of SRF_002, + # we need to create a union of SRF_001 and SRF_002 and list + # it as the landSeaArea for the combination of GUZ001 and GUZ002. + # + # This works similarly for marineAreas. + # Any surfAreas that have the same label need to be unioned. + # The tideTables are not edit areas and thus do not need to be combined. + # However, all must be listed. + # + import ModuleAccessor + accessor = ModuleAccessor.ModuleAccessor() + self._surfAreaDict = accessor.variable(self._areaDictionary, "AreaDictionary") + combinations = argDict["combinations"] + if combinations is None: + # Use the areaList to create a combinations entry + combinations = [] + for editArea, label in self._areaList: + combinations.append(([label], label)) + extraAreas = {} + areaTypes = [] + for comp in components: + areaTypes += list(self._additionalAreasDict(comp).keys()) + areaTypes = self.removeDups(areaTypes) + for comboList, label in combinations: + extraAreas[label] = {} + for areaType in areaTypes: + allAreas = self.getAreaDictEntry( + comboList, self._surfAreaDict, areaType) + if self._surfDebug: + print(("\nGetting Areas", areaType, comboList)) + print((" ", allAreas)) + if len(allAreas) > 1: + area = self.getUnion(argDict, allAreas, self._extraAreaPrefix) + if self._surfDebug: + print((" Saving Union", area.getId().getName())) + self.saveEditAreas([area]) + self._savedAreas = 1 + areaName = area.getId().getName() + elif len(allAreas) == 1: + areaName = allAreas[0] + else: + areaName = label # Use combined area itself + extraAreas[label][areaType] = areaName + # Handle surf areas + extraAreas[label]["surfAreas"] = self._getCombinedSurfAreas( + argDict, components, comboList, label) + # Handle tide areas + extraAreas[label]["tideTables"] = self.getAreaDictEntry( + comboList, self._surfAreaDict, "tideTables") + #print "extraAreas", extraAreas + return extraAreas + + def _getCombinedSurfAreas(self, argDict, components, comboList, comboLabel): + # Get the surfAreas for the combination. + # Combine surfAreas that have the same label. + # If a surfArea has a different label in different entries, + # just use the first label. + surfAreasList = self.getAreaDictEntry( + comboList, self._surfAreaDict, "surfAreas") + if self._surfDebug: + print("\nGetting surfAreas") + print(" Original areas:") + # If any surfAreas have the same labels, combine them + # First, make a dictionary of surfLabel: surfAreas + labelDict = {} + for surfArea, surfLabel in surfAreasList: + if self._surfDebug: + print((" surfArea, surfLabel", surfArea, surfLabel)) + self.addToDictionary(labelDict, surfLabel, surfArea) + #print "\nLabelDict", labelDict + # Make combinations + for surfLabel in list(labelDict.keys()): + surfAreas = labelDict[surfLabel] + if len(surfAreas) > 1: + area = self.getUnion(argDict, surfAreas, self._extraAreaPrefix) + if self._surfDebug: + print((" Saving Union", area.getId().getName())) + self.saveEditAreas([area]) + self._savedAreas = 1 + areaName = area.getId().getName() + else: + areaName = surfAreas[0] + labelDict[surfLabel] = areaName + newList = [] + if self._surfDebug: + print(" New surfAreas") + for surfArea, surfLabel in surfAreasList: + if surfLabel in labelDict: + surfArea = labelDict[surfLabel] + # Remove this entry so we don't get duplicate entries + del labelDict[surfLabel] + else: + # We already handled this label + continue + if self._surfDebug: + print((" ", surfArea, surfLabel)) + newList.append((surfArea, surfLabel)) + return newList + + def _issuance_list(self, argDict): + narrativeDefAM = [ + ("SurfPeriod", "period1"), + ] + narrativeDefPM = [ + ("SurfPeriod", "period1"), + ] + extended = [ + ("SurfExtended", 12), + ("SurfExtended", 24), + ("SurfExtended", 24), + ("SurfExtended", 24), + ] + + try: + if self._individualExtended == 1: + if self._extendedLabel == 1: + narrativeDefAM.append(("ExtendedLabel",0)) + narrativeDefPM.append(("ExtendedLabel",0)) + narrativeDefAM = narrativeDefAM + extended + narrativeDefPM = narrativeDefPM + extended + except: + pass + + narrativeDefAM += [("Custom", ("ExtraSampling", (-12, 12)))] + narrativeDefPM += [("Custom", ("ExtraSampling", (12, 36)))] + + return [ + ("Morning", self.DAY(), self.NIGHT(), 16, + ".TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Morning Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning", "late in the afternoon", + 1, narrativeDefAM), + ("Afternoon Update", "issuanceHour", self.NIGHT(), 16, + ".REST OF TODAY...", "early in the morning","late in the afternoon", + 1, narrativeDefAM), + ("Evening (for tomorrow)", self.DAY()+24, self.NIGHT()+24, 16+24, + ".TOMORROW...", "early in the morning","late in the afternoon", + 0, narrativeDefPM), + ] + + def lateDay_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late day -- default 3pm-6pm + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateDayPhrase() + else: + return "late in the afternoon" + + def lateNight_descriptor(self, statDict, argDict, timeRange): + # If time range is in the first period, return period1 descriptor for + # late night -- default 3am-6am + if self._issuanceInfo.period1TimeRange().contains(timeRange): + return self._issuanceInfo.period1LateNightPhrase() + else: + return "early in the morning" + +######################################################################### + + def valleyRidgeAreaNames(self, tree, node): + # This was taken from the FWF, so the areas are named + # Valleys and Ridges. + # For the SRF, the default names are Inland and Coastal. + # These areas are to be intersected with the current edit area for + # reporting inland and coastal winds, respectively. + # NOTE: If you change these area names, you will also + # need to change the names in the SurfPeriod "intersectAreas" + # section. + return "Inland", "Coastal" + + def untilPhrasing_flag_dict(self, tree, node): + # If set to 1, "until" time descriptor phrasing will be used. + # E.g. "North winds 20 mph until 10 am, then 35 mph" + dict = TextRules.TextRules.untilPhrasing_flag_dict(self, tree, node) + componentName = node.getComponent().get("name") + if componentName == "SurfPeriod": + dict["Sky"] = 1 + dict["Wx"] = 1 + return dict + + def untilPhrasing_format_dict(self, tree, node): + # Format for "until" time descriptors. + # If "military": UNTIL 1000 + # If "standard": UNTIL 10 AM + return { + "otherwise": "standard", + #"Wind": "standard", + } + +########################################################################### +# PHRASES SET-UP needs to use correct edit area +########################################################################### + + def fireSky_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.skyWeather_byTimeRange_compoundPhrase) + elementInfoList = [self.ElementInfo("Sky", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def weather_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, + [self.skyWeather_byTimeRange_compoundPhrase, + self.weather_phrase]) + resolution = node.get("resolution") + if resolution is not None: + mergeMethod = "Average" + else: + mergeMethod = "List" + elementInfoList = [self.ElementInfo("Wx", mergeMethod, self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector, + resolution) + node.set("allTimeDescriptors", 1) + if self.areal_sky_flag(tree, node): + self.disableSkyRelatedWx(tree, node) + return self.DONE() + + def popMax_setUp(self, tree, node): + # NOTE: The method is set to "Average" instead of "List" so + # that the PoP phrase will always cover the full period. + # It doesn't matter what method (other than List) we choose + # since the popMax_words method gets its PoP value directly from + # the "matchToWx" method. + self._setSurfAreaLabel(tree, node, self.popMax_phrase) + elementInfoList = [self.ElementInfo("PoP", "Average")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def dayOrNight_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.dayOrNight_phrase) + dayElement, nightElement, indent, endWithPeriod = node.get("args") + elementName = self.dayOrNight_element(tree, node, dayElement, nightElement) + indentName = elementName+"_FireWx" + method = "MinMax" + if elementName == "RH": + dayNight = self.getPeriod(node.getTimeRange(), 1) + if dayNight == self.DAYTIME(): + indentName = "MinRH_FireWx" + method = "Min" + else: + indentName = "MaxRH_FireWx" + method = "Max" + elementInfoList = [self.ElementInfo(elementName, method)] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", indentName) + return self.DONE() + + def severeWeather_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.severeWeather_phrase) + elementInfoList = [self.ElementInfo("Wx", "List", self.WEATHER())] + self.subPhraseSetUp(tree, node, elementInfoList, self.wxConnector) + # Set this flag used by the "checkWeatherSimilarity" method + node.set("noIntensityCombining", 1) + self.determineSevereTimeDescriptors(tree, node) + return self.DONE() + + def wind_setUp(self, tree, node, gustFlag=0, element="Wind", connectorMethod=None): + self._setSurfAreaLabel(tree, node, [self.fireWind_compoundPhrase, self.wind_phrase]) + wind = self.ElementInfo(element, "List", self.VECTOR()) + elementInfoList = [wind] + if gustFlag: + windGust = self.ElementInfo( + "WindGust", "Max", phraseDef=self.gust_phrase) + elementInfoList.append(windGust) + node.set("gustFlag", 1) + if connectorMethod is None: + connectorMethod = self.vectorConnector + self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) + return self.DONE() + + def wind_summary_setUp(self, tree, node): + self._setSurfAreaLabel( + tree, node, [self.fireWind_compoundPhrase, self.wind_summary]) + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def fireWind_label_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.fireWind_label_phrase) + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 0: + return self.setWords(node, "") + self.setWords(node, "") + node.set("descriptor", "") + node.set("indentLabel", "20-foot winds.......") + return self.DONE() + + def fireRidgeValleyWind_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, + [self.fireValleyWind_compoundPhrase, + self.fireRidgeWind_compoundPhrase]) + # Used for set-up of fireRidgeWind_compoundPhrase as well. + if self.currentAreaContains( + tree, self.ridgeValleyAreas(tree, node)) == 0: + return self.setWords(node, "") + # Set up intersect area to be used for the node + areaName = node.getAreaLabel() + phraseName = node.get("name") + valleys, ridges = self.valleyRidgeAreaNames(tree, node) + if phraseName.find("Valley") >= 0: + area = valleys + else: + area = ridges + intersectName = self.getIntersectName(areaName, area) + #print "setting intersect", intersectName + node.set("areaLabel", intersectName) + return self.DONE() + + ## WaveHeight Phrase Overrides (MarinePhrases) + # Need override to set up as indented phrase + def waveHeight_setUp(self, tree, node): + # Need to determine if we will have one simple phrase OR + # spawn separate phrases for each surfArea for the current combination + + #print "\nWaveHeight phrase" + # Are we dealing with a spawned phrase? + args = node.get("args") + if args is not None: + # Set the label for the spawned phrase + #print "Has Args", args + surfArea, label = args + node.set("areaLabel", surfArea) + indentLabel = label + # If not, check to see if we need to spawn phrases + else: + #print "Does not have args" + indentLabel = "WaveHeight" + try: + surfAreas = self._combinedAreaDict[node.getAreaLabel()]["surfAreas"] + except: + surfAreas = [] + #print "surfAreas", surfAreas + if surfAreas == []: + # Set label for simple phrase + #print "Getting label" + self._setSurfAreaLabel(tree, node, self.waveHeight_phrase) + else: + #print "Spawning" + # Spawn separate phrases + # We need to reverse the list since we are adding phrases + # just after the current node and we want the resulting + # spawned phrases to be in the order of surfAreas + surfAreas.reverse() + for surfArea, label in surfAreas: + newPhrase = tree.addPhraseDef(node, self.waveHeight_phrase) + newPhrase.set("args", (surfArea, label)) + return self.setWords(node, "") + + # Proceed with phrase + if self._surfDebug: + print(("Proceeding with wave height phrase", node.getAreaLabel(), self._surfGrid)) + elementInfoList = [self.ElementInfo(self._surfGrid, "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + # See if we are dealing with a surfArea + node.set("descriptor", "") + node.set("indentLabel", indentLabel) + return self.DONE() + + def waveHeight_words(self, tree, node): + "Create phrase for waves" + statDict = node.getStatDict() + stats = self.getStats(statDict, self._surfGrid) + if stats is None: + nodataPhrase = self.noWaveHeight_phrase( + tree, node, "WaveHeight", "WaveHeight") + return self.setWords(node.parent, nodataPhrase) + + min, max = self.getValue(stats, "MinMax") + avg = (min + max)/2 + words = self.wave_range(avg) + return self.setWords(node, words) + + def sky_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.sky_phrase) + sky = self.ElementInfo("Sky", "List") + elementInfoList = [sky] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def reportTrends_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.reportTrends) + timeRange = node.getTimeRange() + dayNight = self.getPeriod(timeRange, 1) + if dayNight == self.NIGHTTIME(): + eleInfo = self.ElementInfo("MinT", "Min") + else: + eleInfo = self.ElementInfo("MaxT", "Max") + elementName = "MaxT" + elementInfoList = [eleInfo] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + return self.DONE() + + def lows_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.lows_phrase) + elementInfoList = [self.ElementInfo("MinT", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def highs_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.highs_phrase) + elementInfoList = [self.ElementInfo("MaxT", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + +####################################################################### +##### LAL Phrase Methods + + def _lal_phrase(self): + return { + "setUpMethod": self.lal_setUp, + "wordMethod": self.lal_words, + "phraseMethods": self.standard_phraseMethods() + } + + def lal_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self._lal_phrase) + lal = self.ElementInfo("LAL", "Max", self.SCALAR()) + + # comment the following line if you do not want chop + # subPhrases e.g. "A light chop in morning." + lal = self.ElementInfo("LAL", "List", self.SCALAR()) + elementInfoList = [lal] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "LAL") + return self.DONE() + + def lal_words(self, tree, node): + "Create phrase for lal" + statDict = node.getStatDict() + stats = self.getStats(statDict, "LAL") + if stats is None: + return self.setWords(node, "") + lal = self.getValue(stats, "Max", self.SCALAR()) + if lal == 1: + value = "No lightning" + elif lal == 2: + value = "Very infrequent deadly lightning" + elif lal == 3: + value = "Infrequent deadly lightning" + elif lal == 4: + value = "Frequent deadly lightning" + elif lal == 5: + value = "Extreme deadly lightning" + else: + value = "!!!LAL phrase problem!!!" + return self.setWords(node, value) + +####################################################################### + +####################################################################### +##### Waterspout Phrase Methods + + def _waterSpout_phrase(self): + return { + "setUpMethod": self._waterSpout_setUp, + "wordMethod": self._waterSpout_words, + "phraseMethods": self.standard_phraseMethods() + } + + def _waterSpout_setUp(self, tree, node): + if self._waterSpoutGrid == "": + return self.setWords(node, "") + self._setSurfAreaLabel(tree, node, self._waterSpout_phrase) + waterSpout = self.ElementInfo(self._waterSpoutGrid, "Max", self.SCALAR()) + # comment the following line if you do not want chop + # subPhrases e.g. "A light chop in morning." + waterSpout = self.ElementInfo(self._waterSpoutGrid, "List", self.SCALAR()) + elementInfoList = [waterSpout] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "WaterSpout") + return self.DONE() + + def _waterSpout_words(self, tree, node): + "Create phrase for waterSpout" + statDict = node.getStatDict() + stats = self.getStats(statDict, self._waterSpoutGrid) + if stats is None: + return self.setWords(node, "") + waterSpout = self.getValue(stats, "Max", self.SCALAR()) + #print "LLLLLLLLLLLLLLLL", waterSpout + if waterSpout == 1: + value = "No waterspouts" + elif waterSpout == 2: + value = "Very slight chance of waterspouts" + elif waterSpout == 3: + value = "Very slight chance of waterspouts" + elif waterSpout == 4: + value = "Moderate chance of waterspouts" + elif waterSpout == 5: + value = "Very good chance of waterspouts" + else: + value = "!!!WATERSPOUT phrase problem!!!" + return self.setWords(node, value) + +########################################################################## +###### Wx Phrase Methods + def pop_lower_threshold(self, tree, node): + # Pop values below this amount will not be reported + return 0 + + def pop_wx_lower_threshold(self, tree, node): + # Pop-related Wx will not be reported if Pop is below this threshold + return 0 + + def pop_upper_threshold(self, tree, node): + # Pop values above this amount will not be reported + return 100 + +####################################################################### +##### CHOP Phrase Methods + + def chop_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.chop_phrase) + chop = self.ElementInfo("Wind", "Max", self.VECTOR()) + # comment the following line if you do not want chop + # subPhrases e.g. "A light chop in morning." + chop = self.ElementInfo("Wind", "List", self.VECTOR()) + elementInfoList = [chop] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "chop") + return self.DONE() + + def chop_words(self, tree, node): + "Create phrase for chop" + statDict = node.getStatDict() + stats = self.getStats(statDict, "Wind") + if stats is None: + return self.setWords(node, "") + maxWind, dir = self.getValue(stats, "Max", self.VECTOR()) + offshore1, offshore2, onshore1, onshore2 = self.seaBreeze_thresholds(tree, node) + if dir > offshore1 and dir < offshore2: + # For offshore winds + value = "smooth" + else: + if maxWind <= 7: + value = "smooth" + elif maxWind > 7 and maxWind <= 12: + value = "a light chop" + elif maxWind > 12 and maxWind <= 17: + value = "a moderate chop" + elif maxWind > 17 and maxWind <= 22: + value = "choppy" + elif maxWind > 22 and maxWind <= 27: + value = "rough" + elif maxWind > 27 and maxWind <= 32: + value = "very rough" + elif maxWind > 32: + value = "extremely rough" + else: + value = "!!!Chop phrase problem!!!" + return self.setWords(node, value) + +####################################################################### +############################################################################ +######### UVI These are read from a daily tide file with various +######### tide locations and tides. + def _uvi_phrase(self): + return { + "setUpMethod": self._uvi_setUp, + "wordMethod": self._uvi_words, + "phraseMethods": self.standard_phraseMethods() + } + def _uvi_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self._uvi_phrase) + sst = self.ElementInfo("Wind", "Max", self.VECTOR()) + elementInfoList = [sst] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "uvi") + # Get the uvi product + productID = self._statePil + self._uviPil + uviProduct = self.getPreviousProduct(productID, "") + self._uviLines = string.split(uviProduct, "\n") + return self.DONE() + + def _uvi_words(self, tree, node): + # Get Synopsis from previous forecast + # Product has lines such as: + # ALBUQUERQUE NM 7 LITTLE ROCK AR 6 + uviCity = self.getAreaDictEntry(tree, self._surfAreaDict, "uviCity", firstOnly=1) + uviState = self.getAreaDictEntry(tree, self._surfAreaDict, "stateAbbr", firstOnly=1) + if uviCity == []: + msg = self._areaDictErrorMsg(node.getAreaLabel(), "uviCity") + return self.setWords(node, msg) + #print "uvicity, state", uviCity, uviState + uviValue = None + for line in self._uviLines: + tokens = line.split() + # Look for the city + for token in tokens: + if token.upper() == uviCity.upper(): + index = tokens.index(token) + state = tokens[index + 1] + if state == uviState: + # The value is after the next token + uviValue = int(tokens[index + 2]) + break + print(("uviValue", uviValue)) + if uviValue is not None: + uviWords = self._uvi_value(tree, node, uviValue) + else: + uviWords = "" + return self.setWords(node, uviWords) + + def _uvi_value(self, tree, node, uviValue): + if uviValue >= 0 and uviValue <= 2: + uviWords = "Low" + elif uviValue >= 3 and uviValue <= 5: + uviWords = "Moderate" + elif uviValue >= 6 and uviValue <= 7: + uviWords = "High" + elif uviValue >= 8 and uviValue <= 10: + uviWords = "Very High" + else: + uviWords = "Extreme" + return uviWords + +########################################################################## + ### HeatIndex (ScalarPhrases) + + def heatIndex_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.heatIndex_phrase) + elementInfoList = [self.ElementInfo("HeatIndex", "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "HeatIndex") + return self.DONE() + +########################################################################## +####### Tides These are read from a daily tide file with various +####### tide locations and tides. + + def _getTideInfo(self, fcst, editArea, areaLabel, argDict): + # Get a list of tideTables lists from the current edit area combination + labels = self._combinedAreaDict[areaLabel]["tideTables"] + if labels == []: + return fcst + + fcst = fcst + "\nTIDE INFORMATION...\n" + # Get day/month/year + creationTime = time.localtime(argDict["creationTime"]) + currentDate = time.strftime("%m/%d/%Y", creationTime) + #print "currentDate", currentDate, type(currentDate) + for label in labels: + success, tideTable = self._getTideTable(label) + if not success: + # Add error message to fcst + fcst = fcst + tideTable + continue + fcst = fcst + "\nAT " + label.upper() + "...\n\n" + for line in tideTable: + if line.find(currentDate) == 0: + # Get the tide info + # Line format has currentDate then series of low/high tide times: + # 05/02/2005 Mon 07:04AM LDT -0.0 L 02:18PM LDT 0.9 H 06:31PM LDT 0.5 L + # date day, time, timeDescriptor, height, low/high + tokens = line.split() + tideList = [] + index = 2 + while index < len(tokens)-1: + tideList.append(tuple(tokens[index:index+4])) + index += 4 + #print "tideList", tideList + for timeStr, timeDesc, height, lowHigh in tideList: + if lowHigh == "L": + tideType = "Low" + else: + tideType = "High" + hrMin = timeStr[0:5].lstrip("0") + amPm = timeStr[5:] + if self._includeTideHeights: + height = " " + height + " feet" + else: + height = "" + fcst = fcst + tideType+" tide"+height+" at "+hrMin+" "+amPm+ "\n" + + return fcst + + def _getTideTable(self, label): + fn = "" + try: + fn = self._tideFiles[label] + tideTable = open(fn, "r").readlines() + except: + msg = "\nWARNING:Cannot find Tide File for " + label + " " + fn\ + + " \nPlease check the tideFiles set up in the Site Definition file!!!\n" + return 0, msg + return 1, tideTable + +####################################################################### +####### SST Phrase This is mainly bogus framework to add an input line +####### for the SST. + def _sst_phrase(self): + return { + "setUpMethod": self._sst_setUp, + "wordMethod": self._sst_words, + "phraseMethods": self.standard_phraseMethods() + } + def _sst_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self._sst_phrase) + sst = self.ElementInfo("Wind", "Max", self.VECTOR()) + elementInfoList = [sst] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + descriptor = self.phrase_descriptor(tree, node, "sst", "sst") + node.set("descriptor", descriptor) + return self.DONE() + + def _sst_words(self, tree, node): + sstphrase = "|* !!!!!insert water temperature here!!!!!!!!!! *|" + return self.setWords(node,sstphrase) + + def _areaDictErrorMsg(self, entryName, areaLabel): + msg = "WARNING: Cannot find " + entryName + " for areas in " + areaLabel \ + + " Please check the SurfAreaDictionary for this information!!!" + return msg + +###################################################################### +### MarinePhrases TO BE PUT IN BASELINE -- EXCEPT KEEP SET-UP METHODS +###################################################################### +##### Swell methods: compound phrase and summary phrase + + def swell_compoundPhrase(self): + return { + "phraseList": [ + self.swell_summary, + self.simple_swell_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.swell_finishUp + ], + } + def swell2_compoundPhrase(self): + return { + "phraseList": [ + self.swell2_summary, + self.simple_swell2_phrase, + ], + "phraseMethods": [ + self.consolidateSubPhrases, + self.assembleSentences, + self.swell2_finishUp + ], + } + def swell_finishUp(self, tree, node): + return self.get_swell_finishUp(tree, node, "Swell") + + def swell2_finishUp(self, tree, node): + return self.get_swell_finishUp(tree, node, "Swell2") + + def get_swell_finishUp(self, tree, node, elementName): + words = node.get("words") + if words is None: + return + if words == "": + words = "MISSING" + node.set("descriptor", "") + node.set("indentLabel", "Label"+elementName) + node.set("compound", 1) + return self.setWords(node, words) + + ### Swell Summary methods for Swell and Swell2 + def swell_summary(self): + return { + "setUpMethod": self.swell_summary_setUp, + "wordMethod": self.swell_summary_words, + "phraseMethods": self.standard_phraseMethods(), + } + def swell2_summary(self): + return { + "setUpMethod": self.swell_summary_setUp, + "wordMethod": self.swell2_summary_words, + "phraseMethods": self.standard_phraseMethods(), + } + def swell_summary_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, [self.swell_compoundPhrase, + self.swell2_compoundPhrase]) + elementInfoList = [] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + return self.DONE() + + def swell_summary_words(self, tree, node): + # Uses vectorAvg, vectorMedian, vectorMinMax + words = self.vector_summary(tree, node, "Swell") + return self.setWords(node, words) + + def swell2_summary_words(self, tree, node): + # Uses vectorAvg, vectorMedian, vectorMinMax + words = self.vector_summary(tree, node, "Swell2") + return self.setWords(node, words) + + ## Simple Swell Phrases + + def simple_swell_phrase(self): + return { + "setUpMethod": self.simple_swell_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + def simple_swell2_phrase(self): + return { + "setUpMethod": self.simple_swell2_setUp, + "wordMethod": self.vector_words, + "phraseMethods": self.standard_vector_phraseMethods(), + } + + def simple_swell_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.swell_compoundPhrase) + return self.get_swell_setUp(tree, node, "Swell") + + def simple_swell2_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.swell2_compoundPhrase) + return self.get_swell_setUp(tree, node, "Swell2") + + def get_swell_setUp(self, tree, node, element="Swell", connectorMethod=None): + swell = self.ElementInfo(element, "List", self.VECTOR()) + elementInfoList = [swell] + if connectorMethod is None: + connectorMethod = self.vectorConnector + self.subPhraseSetUp(tree, node, elementInfoList, connectorMethod) + return self.DONE() + + def vector_summary_valueStr(self, value, elementName): + # Thresholds and corresponding phrases + # Defaults are for Winds converted to mph + words = "" + if elementName in ["Swell", "Swell2"]: + return self.swell_summary_valueStr(value, elementName) + else: + return self.wind_summary_valueStr(value, elementName) + + def swell_summary_valueStr(self, value, elementName): + if value < 10: + words = "" + elif value < 20: + words = "moderate" + else: + words = "large swells" + return words + + def wind_summary_valueStr(self, value, elementName): + if value < 25: + words = "" + elif value < 30: + words = "breezy" + elif value < 40: + words = "windy" + elif value < 50: + words = "very windy" + elif value < 74: + words = "strong winds" + else: + words = "hurricane force winds" + return words + +######################################################################## +####### Period Phrases + + def period_phrase(self): + return { + "setUpMethod": self.period_setUp, + "wordMethod": self.period_words, + "phraseMethods": self.standard_phraseMethods() + } + def period_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.period_phrase) + return self.get_period_setUp(tree, node, "Period") + + def period2_phrase(self): + return { + "setUpMethod": self.period2_setUp, + "wordMethod": self.period_words, + "phraseMethods": self.standard_phraseMethods() + } + def period2_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.period2_phrase) + return self.get_period_setUp(tree, node, "Period2") + + def get_period_setUp(self, tree, node, element): + elementInfoList = [self.ElementInfo(element, "List")] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", element) + return self.DONE() + + def period_words(self, tree, node): + element = node.getAncestor("elementName") + statDict = node.getStatDict() + stats = self.getStats(statDict, element) + if stats is None: + return self.setWords(node, "") + periodValue = int(self.getValue(stats)) + outUnits = self.element_outUnits(tree, node, element, element) + units = self.units_descriptor(tree, node, "units", outUnits) + unit = self.units_descriptor(tree, node, "unit", outUnits) + if periodValue == 1: + units = unit + return self.setWords(node, repr(periodValue) + " " + units) + +######################################################################## +####### Rip Phrase + + def rip_phrase(self): + return { + "setUpMethod": self.rip_setUp, + "wordMethod": self.rip_words, + "phraseMethods": self.standard_phraseMethods() + } + def rip_setUp(self, tree, node): + self._setSurfAreaLabel(tree, node, self.rip_phrase) + if self._ripGrid != "": + rip = self.ElementInfo(self._ripGrid, "Max") + else: + rip = self.ElementInfo("Wind", "Max", self.VECTOR()) + elementInfoList = [rip] + self.subPhraseSetUp(tree, node, elementInfoList, self.scalarConnector) + node.set("descriptor", "") + node.set("indentLabel", "rip") + return self.DONE() + + def rip_words(self, tree, phrase): + timeRange = phrase.getTimeRange() + areaLabel = phrase.getAreaLabel() + if self._ripGrid != "": + stats = tree.stats.get(self._ripGrid, timeRange, areaLabel, mergeMethod="Max") + if stats is None: + return self.setWords(phrase, "") + else: + return self.setWords(phrase, self.ripGrid_value(stats)) + else: + maxWave = tree.stats.get(self._surfGrid, timeRange, areaLabel, mergeMethod="Max") + windStats = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Max") + + ###This will provide average winds + #windStats = tree.stats.get("Wind", timeRange, areaLabel, mergeMethod="Avg") + #print "avg", windStats + + if maxWave is None or windStats is None: + return self.setWords(phrase, "") + + maxWind, dir = windStats + words = self.rip_value(maxWave, maxWind, dir) + if len(words) > 0: + words = words + return self.setWords(phrase, words) + + def ripPhrases(self): + return { + "lowPhrase" : "Low...however...strong currents can occur near piers and jetties. Always have a flotation device with you in the water", + "modPhrase" : "Moderate. A moderate risk of rip currents means wind and or wave conditions support stronger or more frequent rip currents. Always have a flotation device with you in the water", + "highPhrase" : "High. High surf and large swells will produce dangerous pounding surf and rip currents at the beaches. People visiting the beaches should stay out of the high surf", + } + + def ripGrid_value(self, value): + phrase = "" + ripPhrases = self.ripPhrases() + if value == 1: + words = ripPhrases["lowPhrase"] + elif value == 2: + words = ripPhrases["modPhrase"] + elif value == 3: + words = ripPhrases["highPhrase"] + else: + words = "RIP phrase problem!!!" + return words + + def rip_value(self, maxWave, maxWind, dir): + words = "" + ripPhrases = self.ripPhrases() + if dir > 150: + if maxWind < 15: + words = ripPhrases["lowPhrase"] + if maxWind >= 15 and maxWave >= 3: + words = ripPhrases["modPhrase"] + elif maxWind >= 15 and maxWave < 3: + words = ripPhrases["lowPhrase"] + if maxWind >= 20 and maxWave >= 6: + words = ripPhrases["highPhrase"] + + elif maxWind >= 20 and maxWave < 6: + words = modPhrase + + else: + if maxWave < 6: + words = ripPhrases["lowPhrase"] + if maxWave >= 6: + words = ripPhrases["modPhrase"] + if maxWave > 8: + words = ripPhrases["highPhrase"] + return words + + def allowedHazards(self): + allActions = ["NEW", "EXA", "EXB", "EXT", "CAN", "CON", "EXP"] + return [ + ('SU.W', allActions, 'Marine'), # HIGH SURF WARNING + ('SU.Y', allActions, 'Marine'), # HIGH SURF ADVISORY + ('RP.S', allActions, 'Rip'), # High Rip Threat + ('BH.S', allActions, 'BeachHaz'), # Beach Hazards Statement + ('TS.W', allActions, 'Tsunami'), # TSUNAMI WARNING + ('TS.Y', allActions, 'Tsunami'), # TSUNAMI ADVISORY + ('TS.A', allActions, 'Tsunami'), # TSUNAMI WATCH + ] + diff --git a/edexOsgi/com.raytheon.uf.tools.gfesuite/cli/src/activeTable/sendAT.py b/edexOsgi/com.raytheon.uf.tools.gfesuite/cli/src/activeTable/sendAT.py index 2b5c350f12..6cd2d06614 100644 --- a/edexOsgi/com.raytheon.uf.tools.gfesuite/cli/src/activeTable/sendAT.py +++ b/edexOsgi/com.raytheon.uf.tools.gfesuite/cli/src/activeTable/sendAT.py @@ -1,122 +1,122 @@ -## -# This software was developed and / or modified by Raytheon Company, -# pursuant to Contract DG133W-05-CQ-1067 with the US Government. -# -# U.S. EXPORT CONTROLLED TECHNICAL DATA -# This software product contains export-restricted data whose -# export/transfer/disclosure is restricted by U.S. law. Dissemination -# to non-U.S. persons whether in the United States or abroad requires -# an export license or other authorization. -# -# Contractor Name: Raytheon Company -# Contractor Address: 6825 Pine Street, Suite 340 -# Mail Stop B8 -# Omaha, NE 68106 -# 402.291.0100 -# -# See the AWIPS II Master Rights File ("Master Rights File.pdf") for -# further licensing information. -## - -# sendAT - sends active table to remote site -# sendAT -s reqSite -a mhsSite -f filterSite -f filterSite -f filterSite... -# [-c countDict] [-t timeStamp] -v vtecData [-X serverXMLInfo] -# -H serverhost -P serverPort -L serverProtocol -M serverMHS -S serverSite -# -x xmtScript - -# -# Port of sendAT code from AWIPS1 -# -# -# SOFTWARE HISTORY -# -# Date Ticket# Engineer Description -# ------------ ---------- ----------- -------------------------- -# 01/28/13 1447 dgilling Initial Creation. -# -# - - -import logging -import sys - -from dynamicserialize.dstypes.com.raytheon.uf.common.activetable.request import SendActiveTableRequest -from awips import ThriftClient -from awips import UsageArgumentParser - - -logging.basicConfig(format="%(asctime)s %(name)s %(levelname)s: %(message)s", - datefmt="%H:%M:%S", - level=logging.INFO) -log = logging.getLogger('sendtAT') - - -#-------------------------------------------------------------------- -# decode command line, -s siteToSend -f filterSite -a mhsSite -# -c countDict (siteid:records) -t issueTime -# -v vtecData [-X serverXMLInfo] -x transmitScript -#-------------------------------------------------------------------- -def process_command_line(): - parser = UsageArgumentParser.UsageArgumentParser(prog='sendAT', conflict_handler="resolve") - parser.add_argument("-s", action="append", dest="sites", - metavar="siteToSend") - parser.add_argument("-f", action="append", dest="filterSites", - metavar="filterSite") - parser.add_argument("-a", action="append", dest="mhsSites", - metavar="mhsSite") - parser.add_argument("-t", action="store", type=float, dest="issueTime", - metavar="issueTime") - parser.add_argument("-c", action="store", dest="countDict", - metavar="countDict") - parser.add_argument("-v", action="store", dest="fname", - metavar="vtecData") - parser.add_argument("-X", action="store", dest="xmlIncoming", - metavar="serverXMLInfo") - parser.add_argument("-H", action="store", dest="myServerHost", - metavar="ourHost") - parser.add_argument("-P", action="store", type=int, dest="myServerPort", - metavar="ourPort") - parser.add_argument("-L", action="store", dest="myServerProtocol", - metavar="ourProto") - parser.add_argument("-M", action="store", dest="myServerMHSID", - metavar="ourMHSID") - parser.add_argument("-S", action="store", dest="myServerSite", - metavar="ourSiteID") - parser.add_argument("-x", action="store", dest="xmtScript", - metavar="transmitScript") - - args = parser.parse_args() - if args.countDict is not None: - exec "countDict = " + args.countDict - setattr(args, "countDict", countDict) - return args - -def build_request(args): - req = SendActiveTableRequest(args.myServerHost, args.myServerPort, - args.myServerProtocol, args.myServerSite, - args.myServerMHSID, args.sites, - args.filterSites, args.mhsSites, - args.issueTime, args.countDict, args.fname, - args.xmlIncoming, args.xmtScript) - return req - -def main(): - options = process_command_line() - log.debug("Command-line options: " + repr(options)) - - req = build_request(options) - log.debug("Request: " + repr(req)) - - thriftClient = ThriftClient.ThriftClient(host=options.myServerHost) - try: - response = thriftClient.sendRequest(req) - except: - log.exception("Error posting request.") - sys.exit(1) - - if not response.getTaskSuccess(): - log.error("Error executing sendAT: " + response.getErrorMessage()) - sys.exit(1) - -if __name__ == '__main__': - main() +## +# This software was developed and / or modified by Raytheon Company, +# pursuant to Contract DG133W-05-CQ-1067 with the US Government. +# +# U.S. EXPORT CONTROLLED TECHNICAL DATA +# This software product contains export-restricted data whose +# export/transfer/disclosure is restricted by U.S. law. Dissemination +# to non-U.S. persons whether in the United States or abroad requires +# an export license or other authorization. +# +# Contractor Name: Raytheon Company +# Contractor Address: 6825 Pine Street, Suite 340 +# Mail Stop B8 +# Omaha, NE 68106 +# 402.291.0100 +# +# See the AWIPS II Master Rights File ("Master Rights File.pdf") for +# further licensing information. +## + +# sendAT - sends active table to remote site +# sendAT -s reqSite -a mhsSite -f filterSite -f filterSite -f filterSite... +# [-c countDict] [-t timeStamp] -v vtecData [-X serverXMLInfo] +# -H serverhost -P serverPort -L serverProtocol -M serverMHS -S serverSite +# -x xmtScript + +# +# Port of sendAT code from AWIPS1 +# +# +# SOFTWARE HISTORY +# +# Date Ticket# Engineer Description +# ------------ ---------- ----------- -------------------------- +# 01/28/13 1447 dgilling Initial Creation. +# +# + + +import logging +import sys + +from dynamicserialize.dstypes.com.raytheon.uf.common.activetable.request import SendActiveTableRequest +from awips import ThriftClient +from awips import UsageArgumentParser + + +logging.basicConfig(format="%(asctime)s %(name)s %(levelname)s: %(message)s", + datefmt="%H:%M:%S", + level=logging.INFO) +log = logging.getLogger('sendtAT') + + +#-------------------------------------------------------------------- +# decode command line, -s siteToSend -f filterSite -a mhsSite +# -c countDict (siteid:records) -t issueTime +# -v vtecData [-X serverXMLInfo] -x transmitScript +#-------------------------------------------------------------------- +def process_command_line(): + parser = UsageArgumentParser.UsageArgumentParser(prog='sendAT', conflict_handler="resolve") + parser.add_argument("-s", action="append", dest="sites", + metavar="siteToSend") + parser.add_argument("-f", action="append", dest="filterSites", + metavar="filterSite") + parser.add_argument("-a", action="append", dest="mhsSites", + metavar="mhsSite") + parser.add_argument("-t", action="store", type=float, dest="issueTime", + metavar="issueTime") + parser.add_argument("-c", action="store", dest="countDict", + metavar="countDict") + parser.add_argument("-v", action="store", dest="fname", + metavar="vtecData") + parser.add_argument("-X", action="store", dest="xmlIncoming", + metavar="serverXMLInfo") + parser.add_argument("-H", action="store", dest="myServerHost", + metavar="ourHost") + parser.add_argument("-P", action="store", type=int, dest="myServerPort", + metavar="ourPort") + parser.add_argument("-L", action="store", dest="myServerProtocol", + metavar="ourProto") + parser.add_argument("-M", action="store", dest="myServerMHSID", + metavar="ourMHSID") + parser.add_argument("-S", action="store", dest="myServerSite", + metavar="ourSiteID") + parser.add_argument("-x", action="store", dest="xmtScript", + metavar="transmitScript") + + args = parser.parse_args() + if args.countDict is not None: + exec("countDict = " + args.countDict) + setattr(args, "countDict", countDict) + return args + +def build_request(args): + req = SendActiveTableRequest(args.myServerHost, args.myServerPort, + args.myServerProtocol, args.myServerSite, + args.myServerMHSID, args.sites, + args.filterSites, args.mhsSites, + args.issueTime, args.countDict, args.fname, + args.xmlIncoming, args.xmtScript) + return req + +def main(): + options = process_command_line() + log.debug("Command-line options: " + repr(options)) + + req = build_request(options) + log.debug("Request: " + repr(req)) + + thriftClient = ThriftClient.ThriftClient(host=options.myServerHost) + try: + response = thriftClient.sendRequest(req) + except: + log.exception("Error posting request.") + sys.exit(1) + + if not response.getTaskSuccess(): + log.error("Error executing sendAT: " + response.getErrorMessage()) + sys.exit(1) + +if __name__ == '__main__': + main() From 6131280f471ca6cb5790e09ab99d0f6d78a2b190 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 4 Jan 2019 10:01:32 -0700 Subject: [PATCH 08/11] cleanup gridslice project --- ...7138.Bryan Kowal.user => gridslice.vcproj} | 130 +++++++++--------- 1 file changed, 65 insertions(+), 65 deletions(-) rename nativeLib/gridslice/src/windows/project/{gridslice.vcproj.ISFL017138.Bryan Kowal.user => gridslice.vcproj} (95%) diff --git a/nativeLib/gridslice/src/windows/project/gridslice.vcproj.ISFL017138.Bryan Kowal.user b/nativeLib/gridslice/src/windows/project/gridslice.vcproj similarity index 95% rename from nativeLib/gridslice/src/windows/project/gridslice.vcproj.ISFL017138.Bryan Kowal.user rename to nativeLib/gridslice/src/windows/project/gridslice.vcproj index 177164f9c7..cf0414a1c6 100644 --- a/nativeLib/gridslice/src/windows/project/gridslice.vcproj.ISFL017138.Bryan Kowal.user +++ b/nativeLib/gridslice/src/windows/project/gridslice.vcproj @@ -1,65 +1,65 @@ - - - - - - - - - - - + + + + + + + + + + + From 0b31360ea3a8dae1b537d45ad13a238ee338dc43 Mon Sep 17 00:00:00 2001 From: Michael James Date: Fri, 4 Jan 2019 10:01:52 -0700 Subject: [PATCH 09/11] Windows developer.product customization --- cave/com.raytheon.viz.product.awips/developer.product | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/cave/com.raytheon.viz.product.awips/developer.product b/cave/com.raytheon.viz.product.awips/developer.product index 5dcd0ccf24..084ed59ca3 100644 --- a/cave/com.raytheon.viz.product.awips/developer.product +++ b/cave/com.raytheon.viz.product.awips/developer.product @@ -16,7 +16,7 @@ -data @user.home/caveData -user @user.home/caveData -clean -consoleLog - -Xmx4536M + -Xmx8536M -XX:+UseG1GC -Dosgi.instance.area.readOnly=true -Dorg.eclipse.update.reconcile=false @@ -24,11 +24,11 @@ -Dqpid.dest_syntax=BURL -Dlogback.configurationFile=logback-viz-core-developer.xml -Dlogback.statusListenerClass=com.raytheon.uf.common.logback.UFLogbackInternalStatusListener --Dthrift.stream.maxsize=200 +-Dthrift.stream.maxsize=400 -Dviz.memory.warn.threshold=10M -Dorg.eclipse.swt.internal.gtk.cairoGraphics=false +-Djava.library.path="C:\Miniconda\Lib\site-packages\jep\" -Dhttps.certificate.check=false --Djava.library.path=C:\Users\mjames\Miniconda2\Lib\site-packages\jep -XX:MaxDirectMemorySize=1G -XX:+UnlockExperimentalVMOptions -XX:G1HeapRegionSize=4M @@ -38,7 +38,7 @@ -XX:G1OldCSetRegionThresholdPercent=25 -XX:G1HeapWastePercent=5 -DvizVersion=18.1.1 --DbuildSystem=RHEL/Eclipse +-DbuildSystem=Windows10 -XX:SoftRefLRUPolicyMSPerMB=1000 -Dfile.encoding=UTF-8 From 713e16fca7d819365b8b932a615e5064852e3e99 Mon Sep 17 00:00:00 2001 From: Michael James Date: Thu, 24 Jan 2019 17:36:08 -0700 Subject: [PATCH 10/11] menus and bundles for goes16/17 Procedures handled by Bundles --- .../uf/viz/d2d/ui/perspectives/D2D5Pane.java | 2 +- .../menus/lightning/lightningMenuItems.xml | 115 +- .../localization/menus/obs/baseSurface.xml | 2 + .../localization/bundles/DefaultSatellite.xml | 24 - .../localization/bundles/GOES16Display.xml | 935 +++++++++++++++ .../localization/bundles/GOES17Display.xml | 1019 +++++++++++++++++ .../menus/satellite/goesr/goesrMenu.xml | 141 ++- .../localization/bundles/AllWWA.xml | 143 ++- .../menus/warnings/baseWarnings.xml | 25 +- 9 files changed, 2228 insertions(+), 178 deletions(-) create mode 100644 cave/com.raytheon.viz.satellite/localization/bundles/GOES16Display.xml create mode 100644 cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml diff --git a/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/perspectives/D2D5Pane.java b/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/perspectives/D2D5Pane.java index 97761edead..c5bc083823 100644 --- a/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/perspectives/D2D5Pane.java +++ b/cave/com.raytheon.uf.viz.d2d.ui/src/com/raytheon/uf/viz/d2d/ui/perspectives/D2D5Pane.java @@ -66,7 +66,7 @@ public class D2D5Pane implements IPerspectiveFactory { private static final String BASE_VIEW_ID_PREFIX = SideView.ID + UiUtil.SECONDARY_ID_SEPARATOR + "sideView"; - private static final float FIVE_PANE_WIDTH = 0.1f; + private static final float FIVE_PANE_WIDTH = 0.2f; private static final float ZERO_PANE_WIDTH = 0.0f; diff --git a/cave/com.raytheon.viz.lightning/localization/menus/lightning/lightningMenuItems.xml b/cave/com.raytheon.viz.lightning/localization/menus/lightning/lightningMenuItems.xml index eac5c154e7..49d4975cb2 100644 --- a/cave/com.raytheon.viz.lightning/localization/menus/lightning/lightningMenuItems.xml +++ b/cave/com.raytheon.viz.lightning/localization/menus/lightning/lightningMenuItems.xml @@ -1,36 +1,43 @@ - - + + + + + + + + + + + + + + + + + + + + + + + + + - - - - - - - - - - - - - - - - - - - - - + + + + + @@ -39,32 +46,34 @@ - - + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml b/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml index 4d8560ee18..aa47e049fd 100644 --- a/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml +++ b/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml @@ -1,5 +1,7 @@ + diff --git a/cave/com.raytheon.viz.satellite/localization/bundles/DefaultSatellite.xml b/cave/com.raytheon.viz.satellite/localization/bundles/DefaultSatellite.xml index 2a1b853ce2..11ac13d681 100644 --- a/cave/com.raytheon.viz.satellite/localization/bundles/DefaultSatellite.xml +++ b/cave/com.raytheon.viz.satellite/localization/bundles/DefaultSatellite.xml @@ -1,28 +1,4 @@ - - + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ name not in ('CANADA', 'UNITED STATES') + the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ 2147483647 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -75.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ 8 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -75.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + 1 + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ name not in ('CANADA', 'UNITED STATES') + the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + Cities + mapdata.city
+ the_geom + prog_disc +
+
+ 8 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -75.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + 20 + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ name not in ('CANADA', 'UNITED STATES') + the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + Cities + mapdata.city
+ the_geom + prog_disc +
+
+ 8 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -75.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + 20 + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ State/County Boundaries +
+
+ 8 + + PROJCS["Mercator (SP: 20.0, CM: 0.0)", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Mercator_2SP"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["standard_parallel_1", 20.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["central_meridian", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + 1 + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+
+ <?xml version="1.0" encoding="UTF-8"?> +<perspectiveLayout> +<info part="PartStack@492d38a2"/> +</perspectiveLayout> +
diff --git a/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml b/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml new file mode 100644 index 0000000000..ec1ebb3ac8 --- /dev/null +++ b/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml @@ -0,0 +1,1019 @@ + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ name not in ('CANADA', 'UNITED STATES') + the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ 2147483647 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -138.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ 8 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -138.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ name not in ('CANADA', 'UNITED STATES') + the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ 8 + + PROJCS["Geostationary", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Geostationary"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -138.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + PARAMETER["orbital_height", 35800000.0], + PARAMETER["sweep_axis", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + Latitude/Longitude + mapdata.latlon10
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + latlon10.lpi + + + Latitude/Longitude +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + Cities + mapdata.city
+ the_geom + prog_disc +
+
+ + + PLAN_VIEW + + + + + + + + + + + basemaps/raob.spi + RAOB Station Locations + + + 8 + + PROJCS["Mercator_1SP", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Mercator_1SP"], + PARAMETER["semi_major", 6371200.0], + PARAMETER["semi_minor", 6371200.0], + PARAMETER["latitude_of_origin", 0.0], + PARAMETER["central_meridian", -158.5], + PARAMETER["scale_factor", 1.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + 1 + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+ + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + World + mapdata.world
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + Canada + mapdata.canada
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + County Boundaries + mapdata.county
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + State Boundaries Zoom + mapdata.states
+ the_geom +
+
+ State/County Boundaries +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + Cities + mapdata.city
+ the_geom + prog_disc +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + Latitude/Longitude + mapdata.latlon10
+ the_geom +
+
+ + + PLAN_VIEW + + + + + + + + + + + + + + latlon10.lpi + + + Latitude/Longitude +
+
+ 8 + + PROJCS["AWIPS Polar Stereographic (SP: 60.0, CM: -150.0)", + GEOGCS["WGS84(DD)", + DATUM["WGS84", + SPHEROID["WGS84", 6378137.0, 298.257223563]], + PRIMEM["Greenwich", 0.0], + UNIT["degree", 0.017453292519943295], + AXIS["Geodetic longitude", EAST], + AXIS["Geodetic latitude", NORTH]], + PROJECTION["Stereographic_North_Pole"], + PARAMETER["semi_major", 6371229.0], + PARAMETER["semi_minor", 6371229.0], + PARAMETER["central_meridian", -150.0], + PARAMETER["Standard_Parallel_1", 60.0], + PARAMETER["scale_factor", 1.0], + PARAMETER["false_easting", 0.0], + PARAMETER["false_northing", 0.0], + UNIT["m", 1.0], + AXIS["Easting", EAST], + AXIS["Northing", NORTH]] + + +
+
+
+ + 250 + 1050 + 700 + 1500 + Forward + true + +
+
+ <?xml version="1.0" encoding="UTF-8"?> +<perspectiveLayout> +<info part="PartStack@492d38a2"/> +</perspectiveLayout> +
diff --git a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml index 09ebb93690..1d4c857683 100644 --- a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml +++ b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml @@ -1,36 +1,61 @@ - - - - - - - - - - - - - - - - - - + + + + - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + @@ -47,13 +72,54 @@ - + + + + + + + + + + + + + + + + + + + + + + + + + + + + - + + + + + + @@ -91,27 +157,4 @@ - - - - - - - - - - - - - - - - - - - diff --git a/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml b/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml index a6cb75cedc..bb8efe04a8 100644 --- a/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml +++ b/cave/com.raytheon.viz.warnings/localization/bundles/AllWWA.xml @@ -1,31 +1,112 @@ - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + + + PLAN_VIEW + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml b/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml index f6165748e9..d62b5625fc 100644 --- a/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml +++ b/cave/com.raytheon.viz.warnings/localization/menus/warnings/baseWarnings.xml @@ -1,29 +1,14 @@ - + + - Date: Tue, 29 Jan 2019 14:11:13 -0700 Subject: [PATCH 11/11] better menu organization --- .../localization/menus/obs/baseSurface.xml | 2 +- .../localization/bundles/GOES16Display.xml | 12 ++++++------ .../localization/bundles/GOES17Display.xml | 3 +++ .../localization/menus/satellite/goesr/goesrMenu.xml | 10 +++++----- 4 files changed, 15 insertions(+), 12 deletions(-) diff --git a/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml b/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml index aa47e049fd..a0459170ce 100644 --- a/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml +++ b/cave/com.raytheon.viz.pointdata/localization/menus/obs/baseSurface.xml @@ -1,6 +1,6 @@ - diff --git a/cave/com.raytheon.viz.satellite/localization/bundles/GOES16Display.xml b/cave/com.raytheon.viz.satellite/localization/bundles/GOES16Display.xml index bada8b4e3b..04d65c539c 100644 --- a/cave/com.raytheon.viz.satellite/localization/bundles/GOES16Display.xml +++ b/cave/com.raytheon.viz.satellite/localization/bundles/GOES16Display.xml @@ -189,7 +189,7 @@ true
- + @@ -377,7 +377,7 @@ true - + @@ -574,7 +574,7 @@ AXIS["Easting", EAST], AXIS["Northing", NORTH]] - 20 + 1 @@ -588,7 +588,7 @@ true - + @@ -785,7 +785,7 @@ AXIS["Easting", EAST], AXIS["Northing", NORTH]] - 20 + 1 @@ -799,7 +799,7 @@ true - + diff --git a/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml b/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml index ec1ebb3ac8..280ffc26ca 100644 --- a/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml +++ b/cave/com.raytheon.viz.satellite/localization/bundles/GOES17Display.xml @@ -363,6 +363,7 @@ AXIS["Easting", EAST], AXIS["Northing", NORTH]] + 1 @@ -551,6 +552,7 @@ AXIS["Easting", EAST], AXIS["Northing", NORTH]] + 1 @@ -998,6 +1000,7 @@ AXIS["Easting", EAST], AXIS["Northing", NORTH]] + 1 diff --git a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml index 1d4c857683..ead1583f90 100644 --- a/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml +++ b/cave/com.raytheon.viz.satellite/localization/menus/satellite/goesr/goesrMenu.xml @@ -50,29 +50,29 @@ - - - - -